merged
authorwenzelm
Sat, 05 Dec 2020 19:30:37 +0100
changeset 72828 18bc50e58e38
parent 72813 b09f358f3eb0 (current diff)
parent 72827 1975f397eabb (diff)
child 72829 a28a4105883f
merged
--- a/src/Pure/Isar/document_structure.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/Isar/document_structure.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -101,7 +101,7 @@
     /* result structure */
 
     val spans = syntax.parse_spans(text)
-    spans.foreach(span => add(Command(Document_ID.none, node_name, Command.no_blobs, span)))
+    spans.foreach(span => add(Command(Document_ID.none, node_name, Command.Blobs_Info.none, span)))
     result()
   }
 
@@ -174,7 +174,7 @@
     for { span <- syntax.parse_spans(text) } {
       sections.add(
         new Command_Item(syntax.keywords,
-          Command(Document_ID.none, node_name, Command.no_blobs, span)))
+          Command(Document_ID.none, node_name, Command.Blobs_Info.none, span)))
     }
     sections.result()
   }
--- a/src/Pure/ML/ml_compiler.ML	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/ML/ml_compiler.ML	Sat Dec 05 19:30:37 2020 +0100
@@ -117,7 +117,8 @@
       |> map (fn (pos, markup, text) => Position.reported_text pos (markup ()) (text ()))
       |> Output.report;
     val _ =
-      if not (null persistent_reports) andalso redirect andalso Future.enabled ()
+      if not (null persistent_reports) andalso redirect andalso
+        not (Options.default_bool "build_pide_reports") andalso Future.enabled ()
       then
         Execution.print
           {name = "ML_Compiler.report", pos = Position.thread_data (), pri = Task_Queue.urgent_pri}
--- a/src/Pure/PIDE/command.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/command.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -8,21 +8,44 @@
 package isabelle
 
 
-import scala.collection.mutable
 import scala.collection.immutable.SortedMap
 
 
 object Command
 {
-  type Edit = (Option[Command], Option[Command])
+  /* blobs */
+
+  object Blob
+  {
+    def read_file(name: Document.Node.Name, src_path: Path): Blob =
+    {
+      val bytes = Bytes.read(name.path)
+      val chunk = Symbol.Text_Chunk(bytes.text)
+      Blob(name, src_path, Some((bytes.sha1_digest, chunk)))
+    }
+  }
 
   sealed case class Blob(
     name: Document.Node.Name,
     src_path: Path,
     content: Option[(SHA1.Digest, Symbol.Text_Chunk)])
+  {
+    def read_file: String = File.read(name.path)
 
-  type Blobs_Info = (List[Exn.Result[Blob]], Int)
-  val no_blobs: Blobs_Info = (Nil, -1)
+    def chunk_file: Symbol.Text_Chunk.File =
+      Symbol.Text_Chunk.File(name.node)
+  }
+
+  object Blobs_Info
+  {
+    val none: Blobs_Info = Blobs_Info(Nil, -1)
+
+    def errors(msgs: List[String]): Blobs_Info =
+      Blobs_Info(msgs.map(msg => Exn.Exn[Blob](ERROR(msg))), -1)
+  }
+
+  sealed case class Blobs_Info(blobs: List[Exn.Result[Blob]], index: Int)
+
 
 
   /** accumulated results from prover **/
@@ -101,6 +124,7 @@
   object Markup_Index
   {
     val markup: Markup_Index = Markup_Index(false, Symbol.Text_Chunk.Default)
+    def blob(blob: Blob): Markup_Index = Markup_Index(false, blob.chunk_file)
   }
 
   sealed case class Markup_Index(status: Boolean, chunk_name: Symbol.Text_Chunk.Name)
@@ -287,10 +311,9 @@
               def bad(): Unit = Output.warning("Ignored report message: " + msg)
 
               msg match {
-                case XML.Elem(Markup(name, atts1), args) =>
-                  val atts = atts1 ::: atts0
-                  command.reported_position(atts) match {
-                    case Some((id, chunk_name)) =>
+                case XML.Elem(Markup(name, atts), args) =>
+                  command.reported_position(atts) orElse command.reported_position(atts0) match {
+                    case Some((id, chunk_name, target_range)) =>
                       val target =
                         if (self_id(id) && command.chunks.isDefinedAt(chunk_name))
                           Some((chunk_name, command.chunks(chunk_name)))
@@ -298,8 +321,8 @@
                           other_id(command.node_name, id)
                         else None
 
-                      (target, atts) match {
-                        case (Some((target_name, target_chunk)), Position.Range(symbol_range)) =>
+                      (target, target_range) match {
+                        case (Some((target_name, target_chunk)), Some(symbol_range)) =>
                           target_chunk.incorporate(symbol_range) match {
                             case Some(range) =>
                               val props = atts.filterNot(Markup.position_property)
@@ -359,18 +382,19 @@
   }
 
   val empty: Command =
-    Command(Document_ID.none, Document.Node.Name.empty, no_blobs, Command_Span.empty)
+    Command(Document_ID.none, Document.Node.Name.empty, Blobs_Info.none, Command_Span.empty)
 
   def unparsed(
     source: String,
     theory: Boolean = false,
     id: Document_ID.Command = Document_ID.none,
     node_name: Document.Node.Name = Document.Node.Name.empty,
+    blobs_info: Blobs_Info = Blobs_Info.none,
     results: Results = Results.empty,
     markups: Markups = Markups.empty): Command =
   {
     val (source1, span1) = Command_Span.unparsed(source, theory).compact_source
-    new Command(id, node_name, no_blobs, span1, source1, results, markups)
+    new Command(id, node_name, blobs_info, span1, source1, results, markups)
   }
 
   def text(source: String): Command = unparsed(source)
@@ -380,7 +404,9 @@
       markups = Markups.init(Markup_Tree.from_XML(body)))
 
 
-  /* perspective */
+  /* edits and perspective */
+
+  type Edit = (Option[Command], Option[Command])
 
   object Perspective
   {
@@ -431,13 +457,11 @@
           yield {
             val completion =
               if (Thy_Header.is_base_name(s)) resources.complete_import_name(node_name, s) else Nil
-            val msg =
-              "Bad theory import " +
-                Markup.Path(import_name.node).markup(quote(import_name.toString)) +
-                Position.here(pos) + Completion.report_theories(pos, completion)
-            Exn.Exn[Command.Blob](ERROR(msg))
+            "Bad theory import " +
+              Markup.Path(import_name.node).markup(quote(import_name.toString)) +
+              Position.here(pos) + Completion.report_theories(pos, completion)
           }
-        (errors, -1)
+        Blobs_Info.errors(errors)
 
       // auxiliary files
       case _ =>
@@ -450,9 +474,29 @@
               val content = get_blob(name).map(blob => (blob.bytes.sha1_digest, blob.chunk))
               Blob(name, src_path, content)
             }).user_error)
-        (blobs, loaded_files.index)
+        Blobs_Info(blobs, loaded_files.index)
     }
   }
+
+  def build_blobs_info(
+    syntax: Outer_Syntax,
+    node_name: Document.Node.Name,
+    load_commands: List[Command_Span.Span]): Blobs_Info =
+  {
+    val blobs =
+      for {
+        span <- load_commands
+        file <- span.loaded_files(syntax).files
+      } yield {
+        (Exn.capture {
+          val dir = node_name.master_dir_path
+          val src_path = Path.explode(file)
+          val name = Document.Node.Name((dir + src_path).expand.implode_symbolic)
+          Blob.read_file(name, src_path)
+        }).user_error
+      }
+    Blobs_Info(blobs, -1)
+  }
 }
 
 
@@ -482,8 +526,8 @@
 
   /* blobs */
 
-  def blobs: List[Exn.Result[Command.Blob]] = blobs_info._1
-  def blobs_index: Int = blobs_info._2
+  def blobs: List[Exn.Result[Command.Blob]] = blobs_info.blobs
+  def blobs_index: Int = blobs_info.index
 
   def blobs_ok: Boolean =
     blobs.forall({ case Exn.Res(_) => true case _ => false })
@@ -508,7 +552,7 @@
   val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] =
     ((Symbol.Text_Chunk.Default -> chunk) ::
       (for (Exn.Res(blob) <- blobs; (_, file) <- blob.content)
-        yield Symbol.Text_Chunk.File(blob.name.node) -> file)).toMap
+        yield blob.chunk_file -> file)).toMap
 
   def length: Int = source.length
   def range: Text.Range = chunk.range
@@ -522,7 +566,9 @@
 
   /* reported positions */
 
-  def reported_position(pos: Position.T): Option[(Document_ID.Generic, Symbol.Text_Chunk.Name)] =
+  def reported_position(pos: Position.T)
+    : Option[(Document_ID.Generic, Symbol.Text_Chunk.Name, Option[Symbol.Range])] =
+  {
     pos match {
       case Position.Id(id) =>
         val chunk_name =
@@ -531,9 +577,10 @@
               Symbol.Text_Chunk.File(name)
             case _ => Symbol.Text_Chunk.Default
           }
-        Some((id, chunk_name))
+        Some((id, chunk_name, Position.Range.unapply(pos)))
       case _ => None
     }
+  }
 
   def message_positions(
     self_id: Document_ID.Generic => Boolean,
@@ -543,11 +590,11 @@
   {
     def elem(props: Properties.T, set: Set[Text.Range]): Set[Text.Range] =
       reported_position(props) match {
-        case Some((id, name)) if self_id(id) && name == chunk_name =>
+        case Some((id, name, reported_range)) if self_id(id) && name == chunk_name =>
           val opt_range =
-            Position.Range.unapply(props) orElse {
+            reported_range orElse {
               if (name == Symbol.Text_Chunk.Default)
-                Position.Range.unapply(span.position)
+                Position.Range.unapply(span.absolute_position)
               else None
             }
           opt_range match {
--- a/src/Pure/PIDE/command_span.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/command_span.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -122,6 +122,9 @@
       else Nil
     }
 
+    def is_load_command(syntax: Outer_Syntax): Boolean =
+      syntax.load_command(name).isDefined
+
     def loaded_files(syntax: Outer_Syntax): Loaded_Files =
       syntax.load_command(name) match {
         case None => Loaded_Files.none
--- a/src/Pure/PIDE/document.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/document.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -128,6 +128,11 @@
       def path: Path = Path.explode(File.standard_path(node))
       def master_dir_path: Path = Path.explode(File.standard_path(master_dir))
 
+      def expand: Name =
+        Name(path.expand.implode, master_dir_path.expand.implode, theory)
+      def symbolic: Name =
+        Name(path.implode_symbolic, master_dir_path.implode_symbolic, theory)
+
       def is_theory: Boolean = theory.nonEmpty
 
       def theory_base_name: String = Long_Name.base_name(theory)
@@ -526,31 +531,66 @@
   }
 
 
-  /* snapshot */
+  /* snapshot: persistent user-view of document state */
 
   object Snapshot
   {
     val init: Snapshot = State.init.snapshot()
   }
 
-  abstract class Snapshot
+  class Snapshot private[Document](
+    val state: State,
+    val version: Version,
+    val node_name: Node.Name,
+    edits: List[Text.Edit],
+    snippet_command: Option[Command])
   {
-    def state: State
-    def version: Version
-    def is_outdated: Boolean
+    override def toString: String =
+      "Snapshot(node = " + node_name.node + ", version = " + version.id +
+        (if (is_outdated) ", outdated" else "") + ")"
+
+
+    /* nodes */
+
+    def get_node(name: Node.Name): Node = version.nodes(name)
+
+    val node: Node = get_node(node_name)
+
+    def node_files: List[Node.Name] =
+      node_name :: (node.load_commands ::: snippet_command.toList).flatMap(_.blobs_names)
+
+
+    /* edits */
+
+    def is_outdated: Boolean = edits.nonEmpty
 
-    def convert(i: Text.Offset): Text.Offset
-    def revert(i: Text.Offset): Text.Offset
-    def convert(range: Text.Range): Text.Range
-    def revert(range: Text.Range): Text.Range
+    private lazy val reverse_edits = edits.reverse
+
+    def convert(offset: Text.Offset): Text.Offset =
+      (offset /: edits)((i, edit) => edit.convert(i))
+    def revert(offset: Text.Offset): Text.Offset =
+      (offset /: reverse_edits)((i, edit) => edit.revert(i))
+
+    def convert(range: Text.Range): Text.Range = range.map(convert)
+    def revert(range: Text.Range): Text.Range = range.map(revert)
+
+
+    /* theory load commands */
 
-    def node_name: Node.Name
-    def node: Node
-    def nodes: List[(Node.Name, Node)]
+    val commands_loading: List[Command] =
+      if (node_name.is_theory) Nil
+      else version.nodes.commands_loading(node_name)
 
-    def commands_loading: List[Command]
-    def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range]
-    def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command]
+    def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range] =
+      (for {
+        cmd <- node.load_commands.iterator
+        blob_name <- cmd.blobs_names.iterator
+        if pred(blob_name)
+        start <- node.command_start(cmd)
+      } yield convert(cmd.core_range + start)).toList
+
+
+    /* command as add-on snippet */
 
     def command_snippet(command: Command): Snapshot =
     {
@@ -569,37 +609,156 @@
           .define_version(version1, state0.the_assignment(version))
           .assign(version1.id, Nil, List(command.id -> List(Document_ID.make())))._2
 
-      state1.snapshot(name = node_name)
+      state1.snapshot(node_name = node_name, snippet_command = Some(command))
     }
 
+
+    /* XML markup */
+
     def xml_markup(
-      range: Text.Range = Text.Range.full,
-      elements: Markup.Elements = Markup.Elements.full): XML.Body
-    def messages: List[(XML.Tree, Position.T)]
-    def exports: List[Export.Entry]
-    def exports_map: Map[String, Export.Entry]
+        range: Text.Range = Text.Range.full,
+        elements: Markup.Elements = Markup.Elements.full): XML.Body =
+      state.xml_markup(version, node_name, range = range, elements = elements)
+
+    def xml_markup_blobs(elements: Markup.Elements = Markup.Elements.full): List[XML.Body] =
+    {
+      snippet_command match {
+        case None => Nil
+        case Some(command) =>
+          for (Exn.Res(blob) <- command.blobs)
+          yield {
+            val text = blob.read_file
+            val markup = command.init_markups(Command.Markup_Index.blob(blob))
+            markup.to_XML(Text.Range(0, text.length), text, elements)
+          }
+      }
+    }
+
+
+    /* messages */
+
+    lazy val messages: List[(XML.Tree, Position.T)] =
+      (for {
+        (command, start) <-
+          Document.Node.Commands.starts_pos(
+            node.commands.iterator, Token.Pos.file(node_name.node))
+        pos = command.span.keyword_pos(start).position(command.span.name)
+        (_, tree) <- state.command_results(version, command).iterator
+       } yield (tree, pos)).toList
+
+
+    /* exports */
+
+    lazy val exports: List[Export.Entry] =
+      state.node_exports(version, node_name).iterator.map(_._2).toList
+
+    lazy val exports_map: Map[String, Export.Entry] =
+      (for (entry <- exports.iterator) yield (entry.name, entry)).toMap
+
+
+    /* find command */
 
-    def find_command(id: Document_ID.Generic): Option[(Node, Command)]
+    def find_command(id: Document_ID.Generic): Option[(Node, Command)] =
+      state.lookup_id(id) match {
+        case None => None
+        case Some(st) =>
+          val command = st.command
+          val command_node = get_node(command.node_name)
+          if (command_node.commands.contains(command)) Some((command_node, command)) else None
+      }
+
     def find_command_position(id: Document_ID.Generic, offset: Symbol.Offset)
-      : Option[Line.Node_Position]
+        : Option[Line.Node_Position] =
+      for ((node, command) <- find_command(id))
+      yield {
+        val name = command.node_name.node
+        val sources_iterator =
+          node.commands.iterator.takeWhile(_ != command).map(_.source) ++
+            (if (offset == 0) Iterator.empty
+             else Iterator.single(command.source(Text.Range(0, command.chunk.decode(offset)))))
+        val pos = (Line.Position.zero /: sources_iterator)(_.advance(_))
+        Line.Node_Position(name, pos)
+      }
+
+    def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command] =
+      if (other_node_name.is_theory) {
+        val other_node = get_node(other_node_name)
+        val iterator = other_node.command_iterator(revert(offset) max 0)
+        if (iterator.hasNext) {
+          val (command0, _) = iterator.next
+          other_node.commands.reverse.iterator(command0).find(command => !command.is_ignored)
+        }
+        else other_node.commands.reverse.iterator.find(command => !command.is_ignored)
+      }
+      else version.nodes.commands_loading(other_node_name).headOption
+
+
+    /* command results */
+
+    def command_results(range: Text.Range): Command.Results =
+      Command.State.merge_results(
+        select[List[Command.State]](range, Markup.Elements.full, command_states =>
+          { case _ => Some(command_states) }).flatMap(_.info))
+
+    def command_results(command: Command): Command.Results =
+      state.command_results(version, command)
+
+
+    /* command ids: static and dynamic */
+
+    def command_id_map: Map[Document_ID.Generic, Command] =
+      state.command_id_map(version, get_node(node_name).commands)
+
+
+    /* cumulate markup */
 
     def cumulate[A](
       range: Text.Range,
       info: A,
       elements: Markup.Elements,
       result: List[Command.State] => (A, Text.Markup) => Option[A],
-      status: Boolean = false): List[Text.Info[A]]
+      status: Boolean = false): List[Text.Info[A]] =
+    {
+      val former_range = revert(range).inflate_singularity
+      val (chunk_name, command_iterator) =
+        commands_loading.headOption match {
+          case None => (Symbol.Text_Chunk.Default, node.command_iterator(former_range))
+          case Some(command) => (Symbol.Text_Chunk.File(node_name.node), Iterator((command, 0)))
+        }
+      val markup_index = Command.Markup_Index(status, chunk_name)
+      (for {
+        (command, command_start) <- command_iterator
+        chunk <- command.chunks.get(chunk_name).iterator
+        states = state.command_states(version, command)
+        res = result(states)
+        markup_range <- (former_range - command_start).try_restrict(chunk.range).iterator
+        markup = Command.State.merge_markup(states, markup_index, markup_range, elements)
+        Text.Info(r0, a) <- markup.cumulate[A](markup_range, info, elements,
+          {
+            case (a, Text.Info(r0, b)) => res(a, Text.Info(convert(r0 + command_start), b))
+          }).iterator
+        r1 <- convert(r0 + command_start).try_restrict(range).iterator
+      } yield Text.Info(r1, a)).toList
+    }
 
     def select[A](
       range: Text.Range,
       elements: Markup.Elements,
       result: List[Command.State] => Text.Markup => Option[A],
-      status: Boolean = false): List[Text.Info[A]]
-
-    def command_results(range: Text.Range): Command.Results
-    def command_results(command: Command): Command.Results
-
-    def command_id_map: Map[Document_ID.Generic, Command]
+      status: Boolean = false): List[Text.Info[A]] =
+    {
+      def result1(states: List[Command.State]): (Option[A], Text.Markup) => Option[Option[A]] =
+      {
+        val res = result(states)
+        (_: Option[A], x: Text.Markup) =>
+          res(x) match {
+            case None => None
+            case some => Some(some)
+          }
+      }
+      for (Text.Info(r, Some(x)) <- cumulate(range, None, elements, result1, status))
+        yield Text.Info(r, x)
+    }
   }
 
 
@@ -814,12 +973,20 @@
           st <- command_states(version, command).iterator
         } yield st.exports)
 
-    def begin_theory(node_name: Node.Name, id: Document_ID.Exec, source: String): State =
+    def begin_theory(
+      node_name: Node.Name,
+      id: Document_ID.Exec,
+      source: String,
+      blobs_info: Command.Blobs_Info): State =
+    {
       if (theories.isDefinedAt(id)) fail
       else {
-        val command = Command.unparsed(source, theory = true, id = id, node_name = node_name)
+        val command =
+          Command.unparsed(source, theory = true, id = id, node_name = node_name,
+            blobs_info = blobs_info)
         copy(theories = theories + (id -> command.empty_state))
       }
+    }
 
     def end_theory(theory: String): (Snapshot, State) =
       theories.collectFirst({ case (_, st) if st.command.node_name.theory == theory => st }) match {
@@ -829,9 +996,9 @@
           val node_name = command.node_name
           val command1 =
             Command.unparsed(command.source, theory = true, id = command.id, node_name = node_name,
-              results = st.results, markups = st.markups)
+              blobs_info = command.blobs_info, results = st.results, markups = st.markups)
           val state1 = copy(theories = theories - command1.id)
-          val snapshot = state1.snapshot(name = node_name).command_snippet(command1)
+          val snapshot = state1.snapshot(node_name = node_name).command_snippet(command1)
           (snapshot, state1)
       }
 
@@ -1061,21 +1228,21 @@
         it.hasNext && command_states(version, it.next).exists(_.consolidated)
       }
 
-    // persistent user-view
-    def snapshot(name: Node.Name = Node.Name.empty, pending_edits: List[Text.Edit] = Nil)
-      : Snapshot =
+    def snapshot(
+      node_name: Node.Name = Node.Name.empty,
+      pending_edits: List[Text.Edit] = Nil,
+      snippet_command: Option[Command] = None): Snapshot =
     {
+      /* pending edits and unstable changes */
+
       val stable = recent_stable
-      val latest = history.tip
-
-
-      /* pending edits and unstable changes */
+      val version = stable.version.get_finished
 
       val rev_pending_changes =
         for {
           change <- history.undo_list.takeWhile(_ != stable)
-          (a, edits) <- change.rev_edits
-          if a == name
+          (name, edits) <- change.rev_edits
+          if name == node_name
         } yield edits
 
       val edits =
@@ -1083,176 +1250,8 @@
           case (edits, Node.Edits(es)) => es ::: edits
           case (edits, _) => edits
         })
-      lazy val reverse_edits = edits.reverse
 
-      new Snapshot
-      {
-        /* global information */
-
-        val state: State = State.this
-        val version: Version = stable.version.get_finished
-        val is_outdated: Boolean = pending_edits.nonEmpty || latest != stable
-
-
-        /* local node content */
-
-        def convert(offset: Text.Offset): Text.Offset = (offset /: edits)((i, edit) => edit.convert(i))
-        def revert(offset: Text.Offset): Text.Offset = (offset /: reverse_edits)((i, edit) => edit.revert(i))
-
-        def convert(range: Text.Range): Text.Range = range.map(convert)
-        def revert(range: Text.Range): Text.Range = range.map(revert)
-
-        val node_name: Node.Name = name
-        val node: Node = version.nodes(name)
-
-        def nodes: List[(Node.Name, Node)] =
-          (node_name :: node.load_commands.flatMap(_.blobs_names)).
-            map(name => (name, version.nodes(name)))
-
-        val commands_loading: List[Command] =
-          if (node_name.is_theory) Nil
-          else version.nodes.commands_loading(node_name)
-
-        def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range] =
-          (for {
-            cmd <- node.load_commands.iterator
-            blob_name <- cmd.blobs_names.iterator
-            if pred(blob_name)
-            start <- node.command_start(cmd)
-          } yield convert(cmd.core_range + start)).toList
-
-        def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command] =
-          if (other_node_name.is_theory) {
-            val other_node = version.nodes(other_node_name)
-            val iterator = other_node.command_iterator(revert(offset) max 0)
-            if (iterator.hasNext) {
-              val (command0, _) = iterator.next
-              other_node.commands.reverse.iterator(command0).find(command => !command.is_ignored)
-            }
-            else other_node.commands.reverse.iterator.find(command => !command.is_ignored)
-          }
-          else version.nodes.commands_loading(other_node_name).headOption
-
-        def xml_markup(
-            range: Text.Range = Text.Range.full,
-            elements: Markup.Elements = Markup.Elements.full): XML.Body =
-          state.xml_markup(version, node_name, range = range, elements = elements)
-
-        lazy val messages: List[(XML.Tree, Position.T)] =
-          (for {
-            (command, start) <-
-              Document.Node.Commands.starts_pos(
-                node.commands.iterator, Token.Pos.file(node_name.node))
-            pos = command.span.keyword_pos(start).position(command.span.name)
-            (_, tree) <- state.command_results(version, command).iterator
-           } yield (tree, pos)).toList
-
-        lazy val exports: List[Export.Entry] =
-          state.node_exports(version, node_name).iterator.map(_._2).toList
-
-        lazy val exports_map: Map[String, Export.Entry] =
-          (for (entry <- exports.iterator) yield (entry.name, entry)).toMap
-
-
-        /* find command */
-
-        def find_command(id: Document_ID.Generic): Option[(Node, Command)] =
-          state.lookup_id(id) match {
-            case None => None
-            case Some(st) =>
-              val command = st.command
-              val node = version.nodes(command.node_name)
-              if (node.commands.contains(command)) Some((node, command)) else None
-          }
-
-        def find_command_position(id: Document_ID.Generic, offset: Symbol.Offset)
-            : Option[Line.Node_Position] =
-          for ((node, command) <- find_command(id))
-          yield {
-            val name = command.node_name.node
-            val sources_iterator =
-              node.commands.iterator.takeWhile(_ != command).map(_.source) ++
-                (if (offset == 0) Iterator.empty
-                 else Iterator.single(command.source(Text.Range(0, command.chunk.decode(offset)))))
-            val pos = (Line.Position.zero /: sources_iterator)(_.advance(_))
-            Line.Node_Position(name, pos)
-          }
-
-
-        /* cumulate markup */
-
-        def cumulate[A](
-          range: Text.Range,
-          info: A,
-          elements: Markup.Elements,
-          result: List[Command.State] => (A, Text.Markup) => Option[A],
-          status: Boolean = false): List[Text.Info[A]] =
-        {
-          val former_range = revert(range).inflate_singularity
-          val (chunk_name, command_iterator) =
-            commands_loading.headOption match {
-              case None => (Symbol.Text_Chunk.Default, node.command_iterator(former_range))
-              case Some(command) => (Symbol.Text_Chunk.File(node_name.node), Iterator((command, 0)))
-            }
-          val markup_index = Command.Markup_Index(status, chunk_name)
-          (for {
-            (command, command_start) <- command_iterator
-            chunk <- command.chunks.get(chunk_name).iterator
-            states = state.command_states(version, command)
-            res = result(states)
-            markup_range <- (former_range - command_start).try_restrict(chunk.range).iterator
-            markup = Command.State.merge_markup(states, markup_index, markup_range, elements)
-            Text.Info(r0, a) <- markup.cumulate[A](markup_range, info, elements,
-              {
-                case (a, Text.Info(r0, b)) => res(a, Text.Info(convert(r0 + command_start), b))
-              }).iterator
-            r1 <- convert(r0 + command_start).try_restrict(range).iterator
-          } yield Text.Info(r1, a)).toList
-        }
-
-        def select[A](
-          range: Text.Range,
-          elements: Markup.Elements,
-          result: List[Command.State] => Text.Markup => Option[A],
-          status: Boolean = false): List[Text.Info[A]] =
-        {
-          def result1(states: List[Command.State]): (Option[A], Text.Markup) => Option[Option[A]] =
-          {
-            val res = result(states)
-            (_: Option[A], x: Text.Markup) =>
-              res(x) match {
-                case None => None
-                case some => Some(some)
-              }
-          }
-          for (Text.Info(r, Some(x)) <- cumulate(range, None, elements, result1, status))
-            yield Text.Info(r, x)
-        }
-
-
-        /* command results */
-
-        def command_results(range: Text.Range): Command.Results =
-          Command.State.merge_results(
-            select[List[Command.State]](range, Markup.Elements.full, command_states =>
-              { case _ => Some(command_states) }).flatMap(_.info))
-
-        def command_results(command: Command): Command.Results =
-          state.command_results(version, command)
-
-
-        /* command ids: static and dynamic */
-
-        def command_id_map: Map[Document_ID.Generic, Command] =
-          state.command_id_map(version, version.nodes(node_name).commands)
-
-
-        /* output */
-
-        override def toString: String =
-          "Snapshot(node = " + node_name.node + ", version = " + version.id +
-            (if (is_outdated) ", outdated" else "") + ")"
-      }
+      new Snapshot(this, version, node_name, edits, snippet_command)
     }
   }
 }
--- a/src/Pure/PIDE/headless.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/headless.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -256,8 +256,8 @@
       val dep_theories = dependencies.theories
       val dep_theories_set = dep_theories.toSet
       val dep_files =
-        dependencies.loaded_files.flatMap(_._2).
-          map(path => Document.Node.Name(resources.append("", path)))
+        for (path <- dependencies.loaded_files)
+          yield Document.Node.Name(resources.append("", path))
 
       val use_theories_state =
       {
--- a/src/Pure/PIDE/markup_tree.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/markup_tree.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -58,7 +58,7 @@
       var result: List[XML.Elem] = Nil
       for (elem <- rev_markup if elements(elem.name))
         result ::= elem
-      result.toList
+      result
     }
 
     def + (markup: Text.Markup): Entry = copy(rev_markup = markup.info :: rev_markup)
--- a/src/Pure/PIDE/query_operation.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/query_operation.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -84,7 +84,7 @@
               (_, elem @ XML.Elem(Markup(Markup.RESULT, props), _)) <- command_results.iterator
               if props.contains((Markup.INSTANCE, state0.instance))
             } yield elem).toList
-          val removed = !snapshot.version.nodes(cmd.node_name).commands.contains(cmd)
+          val removed = !snapshot.get_node(cmd.node_name).commands.contains(cmd)
           (snapshot, command_results, results, removed)
         case None =>
           (Document.Snapshot.init, Command.Results.empty, Nil, true)
--- a/src/Pure/PIDE/resources.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/resources.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -106,36 +106,38 @@
 
   /* theory files */
 
-  def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name): () => List[Path] =
+  def load_commands(syntax: Outer_Syntax, name: Document.Node.Name)
+    : () => List[Command_Span.Span] =
   {
     val (is_utf8, raw_text) =
       with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
-    () => {
-      if (syntax.has_load_commands(raw_text)) {
-        val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
-        val spans = syntax.parse_spans(text)
-        val dir = Path.explode(name.master_dir)
-        (for {
-          span <- spans.iterator
-          file <- span.loaded_files(syntax).files
-        } yield (dir + Path.explode(file)).expand).toList
+    () =>
+      {
+        if (syntax.has_load_commands(raw_text)) {
+          val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
+          syntax.parse_spans(text).filter(_.is_load_command(syntax))
+        }
+        else Nil
       }
-      else Nil
-    }
+  }
+
+  def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name, spans: List[Command_Span.Span])
+    : List[Path] =
+  {
+    val dir = name.master_dir_path
+    for { span <- spans; file <- span.loaded_files(syntax).files }
+      yield (dir + Path.explode(file)).expand
   }
 
   def pure_files(syntax: Outer_Syntax): List[Path] =
   {
     val pure_dir = Path.explode("~~/src/Pure")
-    val roots =
-      for { (name, _) <- Thy_Header.ml_roots }
-      yield (pure_dir + Path.explode(name)).expand
-    val files =
-      for {
-        (path, (_, theory)) <- roots zip Thy_Header.ml_roots
-        file <- loaded_files(syntax, Document.Node.Name(path.implode, path.dir.implode, theory))()
-      } yield file
-    roots ::: files
+    for {
+      (name, theory) <- Thy_Header.ml_roots
+      path = (pure_dir + Path.explode(name)).expand
+      node_name = Document.Node.Name(path.implode, path.dir.implode, theory)
+      file <- loaded_files(syntax, node_name, load_commands(syntax, node_name)())
+    } yield file
   }
 
   def theory_name(qualifier: String, theory: String): String =
@@ -398,19 +400,31 @@
         graph2.map_node(name, _ => syntax)
       })
 
-    def loaded_files: List[(String, List[Path])] =
-    {
+    def get_syntax(name: Document.Node.Name): Outer_Syntax =
+      loaded_theories.get_node(name.theory)
+
+    def load_commands: List[(Document.Node.Name, List[Command_Span.Span])] =
       theories.zip(
-        Par_List.map((e: () => List[Path]) => e(),
-          theories.map(name =>
-            resources.loaded_files(loaded_theories.get_node(name.theory), name))))
-        .map({ case (name, files) =>
-          val files1 =
-            if (name.theory == Thy_Header.PURE) resources.pure_files(overall_syntax) ::: files
-            else files
-          (name.theory, files1) })
+        Par_List.map((e: () => List[Command_Span.Span]) => e(),
+          theories.map(name => resources.load_commands(get_syntax(name), name))))
+      .filter(p => p._2.nonEmpty)
+
+    def loaded_files(name: Document.Node.Name, spans: List[Command_Span.Span])
+      : (String, List[Path]) =
+    {
+      val theory = name.theory
+      val syntax = get_syntax(name)
+      val files1 = resources.loaded_files(syntax, name, spans)
+      val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil
+      (theory, files1 ::: files2)
     }
 
+    def loaded_files: List[Path] =
+      for {
+        (name, spans) <- load_commands
+        file <- loaded_files(name, spans)._2
+      } yield file
+
     def imported_files: List[Path] =
     {
       val base_theories =
@@ -418,7 +432,7 @@
           filter(session_base.loaded_theories.defined)
 
       base_theories.map(theory => session_base.known_theories(theory).name.path) :::
-      base_theories.flatMap(session_base.known_loaded_files(_))
+      base_theories.flatMap(session_base.known_loaded_files.withDefaultValue(Nil))
     }
 
     lazy val overall_syntax: Outer_Syntax =
--- a/src/Pure/PIDE/session.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/PIDE/session.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -130,6 +130,9 @@
   val xml_cache: XML.Cache = XML.make_cache()
   val xz_cache: XZ.Cache = XZ.make_cache()
 
+  def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info =
+    Command.Blobs_Info.none
+
 
   /* global flags */
 
@@ -506,7 +509,8 @@
                 change_command(_.add_export(id, (args.serial, export)))
 
               case Protocol.Loading_Theory(node_name, id) =>
-                try { global_state.change(_.begin_theory(node_name, id, msg.text)) }
+                val blobs_info = build_blobs_info(node_name)
+                try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) }
                 catch { case _: Document.State.Fail => bad_output() }
 
               case Markup.Finished_Theory(theory) =>
--- a/src/Pure/Pure.thy	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/Pure.thy	Sat Dec 05 19:30:37 2020 +0100
@@ -168,6 +168,9 @@
 
 in end\<close>
 
+external_file "ROOT0.ML"
+external_file "ROOT.ML"
+
 
 subsection \<open>Embedded ML text\<close>
 
--- a/src/Pure/Thy/export.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/Thy/export.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -16,6 +16,7 @@
   /* artefact names */
 
   val MARKUP = "PIDE/markup"
+  val FILES = "PIDE/files"
   val MESSAGES = "PIDE/messages"
   val DOCUMENT_PREFIX = "document/"
   val CITATIONS = DOCUMENT_PREFIX + "citations"
--- a/src/Pure/Thy/sessions.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/Thy/sessions.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -55,6 +55,7 @@
     document_theories: List[Document.Node.Name] = Nil,
     loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
     used_theories: List[(Document.Node.Name, Options)] = Nil,
+    load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty,
     known_theories: Map[String, Document.Node.Entry] = Map.empty,
     known_loaded_files: Map[String, List[Path]] = Map.empty,
     overall_syntax: Outer_Syntax = Outer_Syntax.empty,
@@ -172,10 +173,15 @@
 
             val theory_files = dependencies.theories.map(_.path)
 
-            val (loaded_files, loaded_files_errors) =
-              try { if (inlined_files) (dependencies.loaded_files, Nil) else (Nil, Nil) }
+            dependencies.load_commands
+
+            val (load_commands, load_commands_errors) =
+              try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
               catch { case ERROR(msg) => (Nil, List(msg)) }
 
+            val loaded_files =
+              load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) })
+
             val session_files =
               (theory_files ::: loaded_files.flatMap(_._2) :::
                 info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand)
@@ -325,13 +331,14 @@
                 document_theories = document_theories,
                 loaded_theories = dependencies.loaded_theories,
                 used_theories = dependencies.theories_adjunct,
+                load_commands = load_commands.toMap,
                 known_theories = known_theories,
                 known_loaded_files = known_loaded_files,
                 overall_syntax = overall_syntax,
                 imported_sources = check_sources(imported_files),
                 sources = check_sources(session_files),
                 session_graph_display = session_graph_display,
-                errors = dependencies.errors ::: loaded_files_errors ::: import_errors :::
+                errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
                   document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
                   bibtex_errors)
 
--- a/src/Pure/Tools/build_job.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/Tools/build_job.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -28,6 +28,7 @@
     Future.thread("build", uninterruptible = true) {
       val parent = info.parent.getOrElse("")
       val base = deps(parent)
+      val result_base = deps(session_name)
 
       val env =
         Isabelle_System.settings() +
@@ -50,6 +51,16 @@
         new Session(options, resources) {
           override val xml_cache: XML.Cache = store.xml_cache
           override val xz_cache: XZ.Cache = store.xz_cache
+
+          override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info =
+          {
+            result_base.load_commands.get(name.expand) match {
+              case Some(spans) =>
+                val syntax = result_base.theory_syntax(name)
+                Command.build_blobs_info(syntax, name, spans)
+              case None => Command.Blobs_Info.none
+            }
+          }
         }
       def make_rendering(snapshot: Document.Snapshot): Rendering =
         new Rendering(snapshot, options, session) {
@@ -176,6 +187,11 @@
             def export_text(name: String, text: String): Unit =
               export(name, List(XML.Text(text)))
 
+            export_text(Export.FILES, cat_lines(snapshot.node_files.map(_.symbolic.node)))
+
+            for ((xml, i) <- snapshot.xml_markup_blobs().zipWithIndex) {
+              export(Export.MARKUP + (i + 1), xml)
+            }
             export(Export.MARKUP, snapshot.xml_markup())
             export(Export.MESSAGES, snapshot.messages.map(_._1))
 
--- a/src/Pure/Tools/update.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Pure/Tools/update.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -43,7 +43,8 @@
         progress.echo("Processing theory " + args.print_node + " ...")
 
         val snapshot = args.snapshot
-        for ((node_name, node) <- snapshot.nodes) {
+        for (node_name <- snapshot.node_files) {
+          val node = snapshot.get_node(node_name)
           val xml =
             snapshot.state.xml_markup(snapshot.version, node_name,
               elements = Markup.Elements(Markup.UPDATE, Markup.LANGUAGE))
--- a/src/Tools/jEdit/src/jedit_editor.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Tools/jEdit/src/jedit_editor.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -282,7 +282,7 @@
     pos match {
       case Position.Item_Id(id, range) if range.start > 0 =>
         snapshot.find_command(id) match {
-          case Some((node, command)) if snapshot.version.nodes(command.node_name) eq node =>
+          case Some((node, command)) if snapshot.get_node(command.node_name) eq node =>
             node.command_start(command) match {
               case Some(start) => text_offset == start + command.chunk.decode(range.start)
               case None => false
--- a/src/Tools/jEdit/src/timing_dockable.scala	Fri Dec 04 17:55:17 2020 +0000
+++ b/src/Tools/jEdit/src/timing_dockable.scala	Sat Dec 05 19:30:37 2020 +0100
@@ -183,7 +183,7 @@
 
     val iterator =
       restriction match {
-        case Some(names) => names.iterator.map(name => (name, snapshot.version.nodes(name)))
+        case Some(names) => names.iterator.map(name => (name, snapshot.get_node(name)))
         case None => snapshot.version.nodes.iterator
       }
     val nodes_timing1 =