merged
authorwenzelm
Fri, 27 Nov 2020 23:51:37 +0100
changeset 72749 38d001186621
parent 72739 e7c2848b78e8 (current diff)
parent 72748 04d5f6d769a7 (diff)
child 72752 80ae03520fda
merged
NEWS
src/Pure/Pure.thy
--- a/NEWS	Fri Nov 27 06:48:35 2020 +0000
+++ b/NEWS	Fri Nov 27 23:51:37 2020 +0100
@@ -278,6 +278,13 @@
 "isabelle_scala_tools" and "isabelle_file_format": minor
 INCOMPATIBILITY.
 
+* The syntax of theory load commands (for auxiliary files) is now
+specified in Isabelle/Scala, as instance of class
+isabelle.Command_Span.Load_Command registered via isabelle_scala_service
+in etc/settings. This allows more flexible schemes than just a list of
+file extensions. Minor INCOMPATIBILITY, e.g. see theory
+HOL-SPARK.SPARK_Setup to emulate the old behaviour.
+
 * Isabelle server allows user-defined commands via
 isabelle_scala_service.
 
--- a/src/HOL/SMT_Examples/Boogie.thy	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/HOL/SMT_Examples/Boogie.thy	Fri Nov 27 23:51:37 2020 +0100
@@ -6,7 +6,7 @@
 
 theory Boogie
 imports Main
-keywords "boogie_file" :: thy_load ("b2i")
+keywords "boogie_file" :: thy_load
 begin
 
 text \<open>
@@ -58,19 +58,19 @@
 external_file \<open>Boogie_Max.certs\<close>
 declare [[smt_certificates = "Boogie_Max.certs"]]
 
-boogie_file \<open>Boogie_Max\<close>
+boogie_file \<open>Boogie_Max.b2i\<close>
 
 
 external_file \<open>Boogie_Dijkstra.certs\<close>
 declare [[smt_certificates = "Boogie_Dijkstra.certs"]]
 
-boogie_file \<open>Boogie_Dijkstra\<close>
+boogie_file \<open>Boogie_Dijkstra.b2i\<close>
 
 
 declare [[z3_extensions = true]]
 external_file \<open>VCC_Max.certs\<close>
 declare [[smt_certificates = "VCC_Max.certs"]]
 
-boogie_file \<open>VCC_Max\<close>
+boogie_file \<open>VCC_Max.b2i\<close>
 
 end
--- a/src/HOL/SMT_Examples/boogie.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/HOL/SMT_Examples/boogie.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -268,10 +268,10 @@
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>boogie_file\<close>
     "prove verification condition from .b2i file"
-    (Resources.provide_parse_files "boogie_file" >> (fn files =>
+    (Resources.provide_parse_file >> (fn get_file =>
       Toplevel.theory (fn thy =>
         let
-          val ([{lines, ...}], thy') = files thy;
+          val ({lines, ...}, thy') = get_file thy;
           val _ = boogie_prove thy' lines;
         in thy' end)))
 
--- a/src/HOL/SPARK/SPARK_Setup.thy	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/HOL/SPARK/SPARK_Setup.thy	Fri Nov 27 23:51:37 2020 +0100
@@ -9,8 +9,8 @@
   imports
   "HOL-Library.Word"
 keywords
-  "spark_open_vcg" :: thy_load ("vcg", "fdl", "rls") and
-  "spark_open" :: thy_load ("siv", "fdl", "rls") and
+  "spark_open_vcg" :: thy_load (spark_vcg) and
+  "spark_open" :: thy_load (spark_siv) and
   "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
   "spark_vc" :: thy_goal and
   "spark_status" :: diag
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/SPARK/Tools/spark.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -0,0 +1,23 @@
+/*  Title:      HOL/SPARK/Tools/spark.scala
+    Author:     Makarius
+
+Scala support for HOL-SPARK.
+*/
+
+package isabelle.spark
+
+import isabelle._
+
+
+object SPARK
+{
+  class Load_Command1 extends Command_Span.Load_Command("spark_vcg")
+  {
+    override val extensions: List[String] = List("vcg", "fdl", "rls")
+  }
+
+  class Load_Command2 extends Command_Span.Load_Command("spark_siv")
+  {
+    override val extensions: List[String] = List("siv", "fdl", "rls")
+  }
+}
--- a/src/HOL/SPARK/Tools/spark_commands.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/HOL/SPARK/Tools/spark_commands.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -94,13 +94,13 @@
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>spark_open_vcg\<close>
     "open a new SPARK environment and load a SPARK-generated .vcg file"
-    (Resources.provide_parse_files "spark_open_vcg" -- Parse.parname
+    (Resources.provide_parse_files (Resources.extensions ["vcg", "fdl", "rls"]) -- Parse.parname
       >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header));
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>spark_open\<close>
     "open a new SPARK environment and load a SPARK-generated .siv file"
-    (Resources.provide_parse_files "spark_open" -- Parse.parname
+    (Resources.provide_parse_files (Resources.extensions ["siv", "fdl", "rls"]) -- Parse.parname
       >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
 
 val pfun_type = Scan.option
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/SPARK/etc/settings	Fri Nov 27 23:51:37 2020 +0100
@@ -0,0 +1,4 @@
+# -*- shell-script -*- :mode=shellscript:
+
+isabelle_scala_service 'isabelle.spark.SPARK$Load_Command1'
+isabelle_scala_service 'isabelle.spark.SPARK$Load_Command2'
--- a/src/Pure/General/path.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/General/path.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -150,8 +150,15 @@
 }
 
 
-final class Path private(private val elems: List[Path.Elem]) // reversed elements
+final class Path private(protected val elems: List[Path.Elem]) // reversed elements
 {
+  override def hashCode: Int = elems.hashCode
+  override def equals(that: Any): Boolean =
+    that match {
+      case other: Path => elems == other.elems
+      case _ => false
+    }
+
   def is_current: Boolean = elems.isEmpty
   def is_absolute: Boolean = elems.nonEmpty && elems.last.isInstanceOf[Path.Root]
   def is_root: Boolean = elems match { case List(Path.Root(_)) => true case _ => false }
--- a/src/Pure/General/symbol.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/General/symbol.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -214,24 +214,6 @@
     case class Id(id: Document_ID.Generic) extends Name
     case class File(name: String) extends Name
 
-    val encode_name: XML.Encode.T[Name] =
-    {
-      import XML.Encode._
-      variant(List(
-        { case Default => (Nil, Nil) },
-        { case Id(a) => (List(long_atom(a)), Nil) },
-        { case File(a) => (List(a), Nil) }))
-    }
-
-    val decode_name: XML.Decode.T[Name] =
-    {
-      import XML.Decode._
-      variant(List(
-        { case (Nil, Nil) => Default },
-        { case (List(a), Nil) => Id(long_atom(a)) },
-        { case (List(a), Nil) => File(a) }))
-    }
-
     def apply(text: CharSequence): Text_Chunk =
       new Text_Chunk(Text.Range(0, text.length), Index(text))
   }
--- a/src/Pure/Isar/keyword.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Isar/keyword.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -38,7 +38,7 @@
   val prf_script_asm_goal: string
   val before_command: string
   val quasi_command: string
-  type spec = (string * string list) * string list
+  type spec = string * string list
   val no_spec: spec
   val before_command_spec: spec
   val quasi_command_spec: spec
@@ -57,7 +57,6 @@
   val dest_commands: keywords -> string list
   val command_markup: keywords -> string -> Markup.T option
   val command_kind: keywords -> string -> string option
-  val command_files: keywords -> string -> Path.T -> Path.T list
   val command_tags: keywords -> string -> string list
   val is_vacuous: keywords -> string -> bool
   val is_diag: keywords -> string -> bool
@@ -132,27 +131,24 @@
 
 (* specifications *)
 
-type spec = (string * string list) * string list;
+type spec = string * string list;
 
-val no_spec: spec = (("", []), []);
-val before_command_spec: spec = ((before_command, []), []);
-val quasi_command_spec: spec = ((quasi_command, []), []);
-val document_heading_spec: spec = (("document_heading", []), ["document"]);
-val document_body_spec: spec = (("document_body", []), ["document"]);
+val no_spec: spec = ("", []);
+val before_command_spec: spec = (before_command, []);
+val quasi_command_spec: spec = (quasi_command, []);
+val document_heading_spec: spec = ("document_heading", ["document"]);
+val document_body_spec: spec = ("document_body", ["document"]);
 
 type entry =
  {pos: Position.T,
   id: serial,
   kind: string,
-  files: string list,  (*extensions of embedded files*)
   tags: string list};
 
-fun check_spec pos ((kind, files), tags) : entry =
+fun check_spec pos (kind, tags) : entry =
   if not (member (op =) command_kinds kind) then
     error ("Unknown outer syntax keyword kind " ^ quote kind)
-  else if not (null files) andalso kind <> thy_load then
-    error ("Illegal specification of files for " ^ quote kind)
-  else {pos = pos, id = serial (), kind = kind, files = files, tags = tags};
+  else {pos = pos, id = serial (), kind = kind, tags = tags};
 
 
 (** keyword tables **)
@@ -191,7 +187,7 @@
     Symtab.merge (K true) (commands1, commands2));
 
 val add_keywords =
-  fold (fn ((name, pos), spec as ((kind, _), _)) => map_keywords (fn (minor, major, commands) =>
+  fold (fn ((name, pos), spec as (kind, _)) => map_keywords (fn (minor, major, commands) =>
     if kind = "" orelse kind = before_command orelse kind = quasi_command then
       let
         val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
@@ -225,14 +221,6 @@
 
 fun command_kind keywords = Option.map #kind o lookup_command keywords;
 
-fun command_files keywords name path =
-  (case lookup_command keywords name of
-    NONE => []
-  | SOME {kind, files, ...} =>
-      if kind <> thy_load then []
-      else if null files then [path]
-      else map (fn ext => Path.ext ext path) files);
-
 fun command_tags keywords name =
   (case lookup_command keywords name of
     SOME {tags, ...} => tags
--- a/src/Pure/Isar/keyword.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Isar/keyword.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -110,13 +110,13 @@
   {
     val none: Spec = Spec("")
   }
-  sealed case class Spec(kind: String, exts: List[String] = Nil, tags: List[String] = Nil)
+  sealed case class Spec(kind: String, load_command: String = "", tags: List[String] = Nil)
   {
     def is_none: Boolean = kind == ""
 
     override def toString: String =
       kind +
-        (if (exts.isEmpty) "" else " (" + commas_quote(exts) + ")") +
+        (if (load_command.isEmpty) "" else " (" + quote(load_command) + ")") +
         (if (tags.isEmpty) "" else tags.map(quote).mkString(" % ", " % ", ""))
   }
 
@@ -127,16 +127,20 @@
 
   class Keywords private(
     val kinds: Map[String, String] = Map.empty,
-    val load_commands: Map[String, List[String]] = Map.empty)
+    val load_commands: Map[String, String] = Map.empty)
   {
     override def toString: String =
     {
       val entries =
         for ((name, kind) <- kinds.toList.sortBy(_._1)) yield {
-          val exts = load_commands.getOrElse(name, Nil)
+          val load_decl =
+            load_commands.get(name) match {
+              case Some(load_command) => " (" + quote(load_command) + ")"
+              case None => ""
+            }
           val kind_decl =
             if (kind == "") ""
-            else " :: " + quote(kind) + (if (exts.isEmpty) "" else " (" + commas_quote(exts) + ")")
+            else " :: " + quote(kind) + load_decl
           quote(name) + kind_decl
         }
       entries.mkString("keywords\n  ", " and\n  ", "")
@@ -167,14 +171,14 @@
 
     /* add keywords */
 
-    def + (name: String, kind: String = "", exts: List[String] = Nil): Keywords =
+    def + (name: String, kind: String = "", load_command: String = ""): Keywords =
     {
       val kinds1 = kinds + (name -> kind)
       val load_commands1 =
         if (kind == THY_LOAD) {
           if (!Symbol.iterator(name).forall(Symbol.is_ascii))
             error("Bad theory load command " + quote(name))
-          load_commands + (name -> exts)
+          load_commands + (name -> load_command)
         }
         else load_commands
       new Keywords(kinds1, load_commands1)
@@ -187,8 +191,8 @@
             keywords + Symbol.decode(name) + Symbol.encode(name)
           else
             keywords +
-              (Symbol.decode(name), spec.kind, spec.exts) +
-              (Symbol.encode(name), spec.kind, spec.exts)
+              (Symbol.decode(name), spec.kind, spec.load_command) +
+              (Symbol.encode(name), spec.kind, spec.load_command)
       }
 
 
@@ -208,12 +212,6 @@
       token.is_begin_or_command || is_quasi_command(token)
 
 
-    /* load commands */
-
-    def load_commands_in(text: String): Boolean =
-      load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
-
-
     /* lexicons */
 
     private def make_lexicon(is_minor: Boolean): Scan.Lexicon =
--- a/src/Pure/Isar/outer_syntax.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Isar/outer_syntax.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -56,16 +56,16 @@
 
   /* keywords */
 
-  def + (name: String, kind: String = "", exts: List[String] = Nil): Outer_Syntax =
+  def + (name: String, kind: String = "", load_command: String = ""): Outer_Syntax =
     new Outer_Syntax(
-      keywords + (name, kind, exts), rev_abbrevs, language_context, has_tokens = true)
+      keywords + (name, kind, load_command), rev_abbrevs, language_context, has_tokens = true)
 
   def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax =
     (this /: keywords) {
       case (syntax, (name, spec)) =>
         syntax +
-          (Symbol.decode(name), spec.kind, spec.exts) +
-          (Symbol.encode(name), spec.kind, spec.exts)
+          (Symbol.decode(name), spec.kind, spec.load_command) +
+          (Symbol.encode(name), spec.kind, spec.load_command)
     }
 
 
@@ -133,8 +133,11 @@
 
   /* load commands */
 
-  def load_command(name: String): Option[List[String]] = keywords.load_commands.get(name)
-  def load_commands_in(text: String): Boolean = keywords.load_commands_in(text)
+  def load_command(name: String): Option[String] =
+    keywords.load_commands.get(name)
+
+  def has_load_commands(text: String): Boolean =
+    keywords.load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
 
 
   /* language context */
--- a/src/Pure/Isar/token.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Isar/token.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -256,22 +256,6 @@
 
   def reader(tokens: List[Token], start: Token.Pos): Reader =
     new Token_Reader(tokens, start)
-
-
-  /* XML data representation */
-
-  val encode: XML.Encode.T[Token] = (tok: Token) =>
-  {
-    import XML.Encode._
-    pair(int, string)(tok.kind.id, tok.source)
-  }
-
-  val decode: XML.Decode.T[Token] = (body: XML.Body) =>
-  {
-    import XML.Decode._
-    val (k, s) = pair(int, string)(body)
-    Token(Kind(k), s)
-  }
 }
 
 
--- a/src/Pure/ML/ml_file.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/ML/ml_file.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -6,18 +6,18 @@
 
 signature ML_FILE =
 sig
-  val command: string -> bool option -> (theory -> Token.file list) ->
+  val command: string -> bool option -> (theory -> Token.file) ->
     Toplevel.transition -> Toplevel.transition
-  val ML: bool option -> (theory -> Token.file list) -> Toplevel.transition -> Toplevel.transition
-  val SML: bool option -> (theory -> Token.file list) -> Toplevel.transition -> Toplevel.transition
+  val ML: bool option -> (theory -> Token.file) -> Toplevel.transition -> Toplevel.transition
+  val SML: bool option -> (theory -> Token.file) -> Toplevel.transition -> Toplevel.transition
 end;
 
 structure ML_File: ML_FILE =
 struct
 
-fun command environment debug files = Toplevel.generic_theory (fn gthy =>
+fun command environment debug get_file = Toplevel.generic_theory (fn gthy =>
   let
-    val [file: Token.file] = files (Context.theory_of gthy);
+    val file = get_file (Context.theory_of gthy);
     val provide = Resources.provide_file file;
     val source = Token.file_source file;
 
--- a/src/Pure/PIDE/command.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/command.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -6,11 +6,11 @@
 
 signature COMMAND =
 sig
-  type blob = (string * (SHA1.digest * string list) option) Exn.result
+  type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option}
   val read_file: Path.T -> Position.T -> Path.T -> Token.file
   val read_thy: Toplevel.state -> theory
   val read: Keyword.keywords -> theory -> Path.T-> (unit -> theory) ->
-    blob list * int -> Token.T list -> Toplevel.transition
+    blob Exn.result list * int -> Token.T list -> Toplevel.transition
   type eval
   val eval_command_id: eval -> Document_ID.command
   val eval_exec_id: eval -> Document_ID.exec
@@ -20,7 +20,7 @@
   val eval_result_command: eval -> Toplevel.transition
   val eval_result_state: eval -> Toplevel.state
   val eval: Keyword.keywords -> Path.T -> (unit -> theory) ->
-    blob list * int -> Document_ID.command -> Token.T list -> eval -> eval
+    blob Exn.result list * int -> Document_ID.command -> Token.T list -> eval -> eval
   type print
   type print_fn = Toplevel.transition -> Toplevel.state -> unit
   val print0: {pri: int, print_fn: print_fn} -> eval -> print
@@ -53,8 +53,7 @@
 
 (* read *)
 
-type blob =
-  (string * (SHA1.digest * string list) option) Exn.result;  (*file node name, digest, lines*)
+type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option};
 
 fun read_file_node file_node master_dir pos src_path =
   let
@@ -89,29 +88,21 @@
       | SOME exec_id => Position.put_id exec_id);
   in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end
 
-fun resolve_files keywords master_dir (blobs, blobs_index) toks =
+fun resolve_files master_dir (blobs, blobs_index) toks =
   (case Outer_Syntax.parse_spans toks of
     [Command_Span.Span (Command_Span.Command_Span (cmd, _), _)] =>
       (case try (nth toks) blobs_index of
         SOME tok =>
           let
             val pos = Token.pos_of tok;
-            val path = Path.explode (Token.content_of tok)
-              handle ERROR msg => error (msg ^ Position.here pos);
-            fun make_file src_path (Exn.Res (file_node, NONE)) =
+            fun make_file (Exn.Res {file_node, src_path, content = NONE}) =
                   Exn.interruptible_capture (fn () =>
                     read_file_node file_node master_dir pos src_path) ()
-              | make_file src_path (Exn.Res (file_node, SOME (digest, lines))) =
+              | make_file (Exn.Res {file_node, src_path, content = SOME (digest, lines)}) =
                   (Position.report pos Markup.language_path;
                     Exn.Res (blob_file src_path lines digest file_node))
-              | make_file _ (Exn.Exn e) = Exn.Exn e;
-            val src_paths = Keyword.command_files keywords cmd path;
-            val files =
-              if null blobs then
-                map2 make_file src_paths (map (K (Exn.Res ("", NONE))) src_paths)
-              else if length src_paths = length blobs then
-                map2 make_file src_paths blobs
-              else error ("Misalignment of inlined files" ^ Position.here pos);
+              | make_file (Exn.Exn e) = Exn.Exn e;
+            val files = map make_file blobs;
           in
             toks |> map_index (fn (i, tok) =>
               if i = blobs_index then Token.put_files files tok else tok)
@@ -157,7 +148,7 @@
         Position.here_list verbatim);
   in
     if exists #1 token_reports then Toplevel.malformed pos "Malformed command syntax"
-    else Outer_Syntax.parse_span thy init (resolve_files keywords master_dir blobs_info span)
+    else Outer_Syntax.parse_span thy init (resolve_files master_dir blobs_info span)
   end;
 
 end;
--- a/src/Pure/PIDE/command.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/command.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -16,8 +16,12 @@
 {
   type Edit = (Option[Command], Option[Command])
 
-  type Blob = Exn.Result[(Document.Node.Name, Option[(SHA1.Digest, Symbol.Text_Chunk)])]
-  type Blobs_Info = (List[Blob], Int)
+  sealed case class Blob(
+    name: Document.Node.Name,
+    src_path: Path,
+    content: Option[(SHA1.Digest, Symbol.Text_Chunk)])
+
+  type Blobs_Info = (List[Exn.Result[Blob]], Int)
   val no_blobs: Blobs_Info = (Nil, -1)
 
 
@@ -31,15 +35,6 @@
     val empty: Results = new Results(SortedMap.empty)
     def make(args: TraversableOnce[Results.Entry]): Results = (empty /: args)(_ + _)
     def merge(args: TraversableOnce[Results]): Results = (empty /: args)(_ ++ _)
-
-
-    /* XML data representation */
-
-    val encode: XML.Encode.T[Results] = (results: Results) =>
-    { import XML.Encode._; list(pair(long, tree))(results.rep.toList) }
-
-    val decode: XML.Decode.T[Results] = (body: XML.Body) =>
-    { import XML.Decode._; make(list(pair(long, tree))(body)) }
   }
 
   final class Results private(private val rep: SortedMap[Long, XML.Tree])
@@ -115,35 +110,6 @@
     val empty: Markups = new Markups(Map.empty)
     def init(markup: Markup_Tree): Markups = new Markups(Map(Markup_Index.markup -> markup))
     def merge(args: TraversableOnce[Markups]): Markups = (empty /: args)(_ ++ _)
-
-
-    /* XML data representation */
-
-    def encode(source_length: Int): XML.Encode.T[Markups] = (markups: Markups) =>
-    {
-      import XML.Encode._
-
-      val markup_index: T[Markup_Index] = (index: Markup_Index) =>
-        pair(bool, Symbol.Text_Chunk.encode_name)(index.status, index.chunk_name)
-
-      val markup_tree: T[Markup_Tree] =
-        _.to_XML(Text.Range(0, source_length), Symbol.spaces(source_length), Markup.Elements.full)
-
-      list(pair(markup_index, markup_tree))(markups.rep.toList)
-    }
-
-    val decode: XML.Decode.T[Markups] = (body: XML.Body) =>
-    {
-      import XML.Decode._
-
-      val markup_index: T[Markup_Index] = (body: XML.Body) =>
-      {
-        val (status, chunk_name) = pair(bool, Symbol.Text_Chunk.decode_name)(body)
-        Markup_Index(status, chunk_name)
-      }
-
-      (Markups.empty /: list(pair(markup_index, Markup_Tree.from_XML))(body))(_ + _)
-    }
   }
 
   final class Markups private(private val rep: Map[Markup_Index, Markup_Tree])
@@ -221,37 +187,6 @@
     def merge(command: Command, states: List[State]): State =
       State(command, states.flatMap(_.status), merge_results(states),
         merge_exports(states), merge_markups(states))
-
-
-    /* XML data representation */
-
-    val encode: XML.Encode.T[State] = (st: State) =>
-    {
-      import XML.Encode._
-
-      val command = st.command
-      val blobs_names = command.blobs_names.map(_.node)
-      val blobs_index = command.blobs_index
-      require(command.blobs_ok)
-
-      pair(long, pair(string, pair(pair(list(string), int), pair(Command_Span.encode,
-          pair(list(Markup.encode), pair(Results.encode, Markups.encode(command.source.length)))))))(
-        (command.id, (command.node_name.node, ((blobs_names, blobs_index), (command.span,
-          (st.status, (st.results, st.markups)))))))
-    }
-
-    def decode(node_name: String => Document.Node.Name): XML.Decode.T[State] = (body: XML.Body) =>
-    {
-      import XML.Decode._
-      val (id, (node, ((blobs_names, blobs_index), (span, (status, (results, markups)))))) =
-        pair(long, pair(string, pair(pair(list(string), int), pair(Command_Span.decode,
-          pair(list(Markup.decode), pair(Results.decode, Markups.decode))))))(body)
-
-      val blobs_info: Blobs_Info =
-        (blobs_names.map(name => Exn.Res((node_name(name), None)): Blob), blobs_index)
-      val command = Command(id, node_name(node), blobs_info, span)
-      State(command, status, results, Exports.empty, markups)
-    }
   }
 
   sealed case class State(
@@ -468,37 +403,6 @@
 
   /* blobs: inlined errors and auxiliary files */
 
-  private def clean_tokens(tokens: List[Token]): List[(Token, Int)] =
-  {
-    def clean(toks: List[(Token, Int)]): List[(Token, Int)] =
-      toks match {
-        case (t1, i1) :: (t2, i2) :: rest =>
-          if (t1.is_keyword && t1.source == "%" && t2.is_name) clean(rest)
-          else (t1, i1) :: clean((t2, i2) :: rest)
-        case _ => toks
-      }
-    clean(tokens.zipWithIndex.filter({ case (t, _) => t.is_proper }))
-  }
-
-  private def find_file(tokens: List[(Token, Int)]): Option[(String, Int)] =
-    if (tokens.exists({ case (t, _) => t.is_command })) {
-      tokens.dropWhile({ case (t, _) => !t.is_command }).
-        collectFirst({ case (t, i) if t.is_embedded => (t.content, i) })
-    }
-    else None
-
-  def span_files(syntax: Outer_Syntax, span: Command_Span.Span): (List[String], Int) =
-    syntax.load_command(span.name) match {
-      case Some(exts) =>
-        find_file(clean_tokens(span.content)) match {
-          case Some((file, i)) =>
-            if (exts.isEmpty) (List(file), i)
-            else (exts.map(ext => file + "." + ext), i)
-          case None => (Nil, -1)
-        }
-      case None => (Nil, -1)
-    }
-
   def blobs_info(
     resources: Resources,
     syntax: Outer_Syntax,
@@ -511,36 +415,41 @@
       // inlined errors
       case Thy_Header.THEORY =>
         val reader = Scan.char_reader(Token.implode(span.content))
-        val imports_pos = resources.check_thy_reader(node_name, reader).imports_pos
+        val header = resources.check_thy_reader(node_name, reader)
+        val imports_pos = header.imports_pos
         val raw_imports =
           try {
             val read_imports = Thy_Header.read(reader, Token.Pos.none).imports
             if (imports_pos.length == read_imports.length) read_imports else error("")
           }
-          catch { case exn: Throwable => List.fill(imports_pos.length)("") }
+          catch { case _: Throwable => List.fill(imports_pos.length)("") }
 
-        val errors =
+        val errs1 =
           for { ((import_name, pos), s) <- imports_pos zip raw_imports if !can_import(import_name) }
           yield {
             val completion =
               if (Thy_Header.is_base_name(s)) resources.complete_import_name(node_name, s) else Nil
-            val msg =
-              "Bad theory import " +
-                Markup.Path(import_name.node).markup(quote(import_name.toString)) +
-                Position.here(pos) + Completion.report_theories(pos, completion)
-            Exn.Exn(ERROR(msg)): Command.Blob
+            "Bad theory import " +
+              Markup.Path(import_name.node).markup(quote(import_name.toString)) +
+              Position.here(pos) + Completion.report_theories(pos, completion)
           }
-        (errors, -1)
+        val errs2 =
+          for {
+            (_, spec) <- header.keywords
+            if !Command_Span.load_commands.exists(_.name == spec.load_command)
+          } yield { "Unknown load command specification: " + quote(spec.load_command) }
+      ((errs1 ::: errs2).map(msg => Exn.Exn[Command.Blob](ERROR(msg))), -1)
 
       // auxiliary files
       case _ =>
-        val (files, index) = span_files(syntax, span)
+        val (files, index) = span.loaded_files(syntax)
         val blobs =
           files.map(file =>
             (Exn.capture {
-              val name = Document.Node.Name(resources.append(node_name, Path.explode(file)))
-              val blob = get_blob(name).map(blob => ((blob.bytes.sha1_digest, blob.chunk)))
-              (name, blob)
+              val src_path = Path.explode(file)
+              val name = Document.Node.Name(resources.append(node_name, src_path))
+              val content = get_blob(name).map(blob => (blob.bytes.sha1_digest, blob.chunk))
+              Blob(name, src_path, content)
             }).user_error)
         (blobs, index)
     }
@@ -574,23 +483,23 @@
 
   /* blobs */
 
-  def blobs: List[Command.Blob] = blobs_info._1
+  def blobs: List[Exn.Result[Command.Blob]] = blobs_info._1
   def blobs_index: Int = blobs_info._2
 
   def blobs_ok: Boolean =
     blobs.forall({ case Exn.Res(_) => true case _ => false })
 
   def blobs_names: List[Document.Node.Name] =
-    for (Exn.Res((name, _)) <- blobs) yield name
+    for (Exn.Res(blob) <- blobs) yield blob.name
 
   def blobs_undefined: List[Document.Node.Name] =
-    for (Exn.Res((name, None)) <- blobs) yield name
+    for (Exn.Res(blob) <- blobs if blob.content.isEmpty) yield blob.name
 
   def blobs_defined: List[(Document.Node.Name, SHA1.Digest)] =
-    for (Exn.Res((name, Some((digest, _)))) <- blobs) yield (name, digest)
+    for (Exn.Res(blob) <- blobs; (digest, _) <- blob.content) yield (blob.name, digest)
 
   def blobs_changed(doc_blobs: Document.Blobs): Boolean =
-    blobs.exists({ case Exn.Res((name, _)) => doc_blobs.changed(name) case _ => false })
+    blobs.exists({ case Exn.Res(blob) => doc_blobs.changed(blob.name) case _ => false })
 
 
   /* source chunks */
@@ -599,8 +508,8 @@
 
   val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] =
     ((Symbol.Text_Chunk.Default -> chunk) ::
-      (for (Exn.Res((name, Some((_, file)))) <- blobs)
-        yield Symbol.Text_Chunk.File(name.node) -> file)).toMap
+      (for (Exn.Res(blob) <- blobs; (_, file) <- blob.content)
+        yield Symbol.Text_Chunk.File(blob.name.node) -> file)).toMap
 
   def length: Int = source.length
   def range: Text.Range = chunk.range
--- a/src/Pure/PIDE/command_span.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/command_span.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -12,6 +12,35 @@
 
 object Command_Span
 {
+  /* loaded files */
+
+  type Loaded_Files = (List[String], Int)
+
+  val no_loaded_files: Loaded_Files = (Nil, -1)
+
+  class Load_Command(val name: String) extends Isabelle_System.Service
+  {
+    override def toString: String = name
+
+    def extensions: List[String] = Nil
+
+    def loaded_files(tokens: List[(Token, Int)]): Loaded_Files =
+      tokens.collectFirst({ case (t, i) if t.is_embedded => (t.content, i) }) match {
+        case Some((file, i)) =>
+          extensions match {
+            case Nil => (List(file), i)
+            case exts => (exts.map(ext => file + "." + ext), i)
+          }
+        case None => no_loaded_files
+      }
+  }
+
+  lazy val load_commands: List[Load_Command] =
+    new Load_Command("") :: Isabelle_System.make_services(classOf[Load_Command])
+
+
+  /* span kind */
+
   sealed abstract class Kind {
     override def toString: String =
       this match {
@@ -26,6 +55,9 @@
   case object Malformed_Span extends Kind
   case object Theory_Span extends Kind
 
+
+  /* span */
+
   sealed case class Span(kind: Kind, content: List[Token])
   {
     def is_theory: Boolean = kind == Theory_Span
@@ -67,6 +99,33 @@
       }
       (source, Span(kind, content1.toList))
     }
+
+    def clean_arguments: List[(Token, Int)] =
+    {
+      if (name.nonEmpty) {
+        def clean(toks: List[(Token, Int)]): List[(Token, Int)] =
+          toks match {
+            case (t1, i1) :: (t2, i2) :: rest =>
+              if (t1.is_keyword && t1.source == "%" && t2.is_name) clean(rest)
+              else (t1, i1) :: clean((t2, i2) :: rest)
+            case _ => toks
+          }
+        clean(content.zipWithIndex.filter({ case (t, _) => t.is_proper }))
+          .dropWhile({ case (t, _) => !t.is_command })
+          .dropWhile({ case (t, _) => t.is_command })
+      }
+      else Nil
+    }
+
+    def loaded_files(syntax: Outer_Syntax): Loaded_Files =
+      syntax.load_command(name) match {
+        case None => no_loaded_files
+        case Some(a) =>
+          load_commands.find(_.name == a) match {
+            case Some(load_command) => load_command.loaded_files(clean_arguments)
+            case None => error("Undefined load command function: " + a)
+          }
+      }
   }
 
   val empty: Span = Span(Ignored_Span, Nil)
@@ -76,30 +135,4 @@
     val kind = if (theory) Theory_Span else Malformed_Span
     Span(kind, List(Token(Token.Kind.UNPARSED, source)))
   }
-
-
-  /* XML data representation */
-
-  def encode: XML.Encode.T[Span] = (span: Span) =>
-  {
-    import XML.Encode._
-    val kind: T[Kind] =
-      variant(List(
-        { case Command_Span(a, b) => (List(a), properties(b)) },
-        { case Ignored_Span => (Nil, Nil) },
-        { case Malformed_Span => (Nil, Nil) }))
-    pair(kind, list(Token.encode))((span.kind, span.content))
-  }
-
-  def decode: XML.Decode.T[Span] = (body: XML.Body) =>
-  {
-    import XML.Decode._
-    val kind: T[Kind] =
-      variant(List(
-        { case (List(a), b) => Command_Span(a, properties(b)) },
-        { case (Nil, Nil) => Ignored_Span },
-        { case (Nil, Nil) => Malformed_Span }))
-    val (k, toks) = pair(kind, list(Token.decode))(body)
-    Span(k, toks)
-  }
 }
--- a/src/Pure/PIDE/document.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/document.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -18,7 +18,7 @@
   type state
   val init_state: state
   val define_blob: string -> string -> state -> state
-  type blob_digest = (string * string option) Exn.result
+  type blob_digest = {file_node: string, src_path: Path.T, digest: string option} Exn.result
   type command =
    {command_id: Document_ID.command,
     name: string,
@@ -315,7 +315,7 @@
 
 (** main state -- document structure and execution process **)
 
-type blob_digest = (string * string option) Exn.result;  (*file node name, raw digest*)
+type blob_digest = {file_node: string, src_path: Path.T, digest: string option} Exn.result;
 
 type execution =
  {version_id: Document_ID.version,  (*static version id*)
@@ -398,11 +398,11 @@
   | SOME content => content);
 
 fun resolve_blob state (blob_digest: blob_digest) =
-  blob_digest |> Exn.map_res (fn (file_node, raw_digest) =>
-    (file_node, Option.map (the_blob state) raw_digest));
+  blob_digest |> Exn.map_res (fn {file_node, src_path, digest} =>
+    {file_node = file_node, src_path = src_path, content = Option.map (the_blob state) digest});
 
 fun blob_reports pos (blob_digest: blob_digest) =
-  (case blob_digest of Exn.Res (file_node, _) => [(pos, Markup.path file_node)] | _ => []);
+  (case blob_digest of Exn.Res {file_node, ...} => [(pos, Markup.path file_node)] | _ => []);
 
 
 (* commands *)
@@ -478,7 +478,7 @@
     val blobs' =
       (commands', Symtab.empty) |->
         Inttab.fold (fn (_, (_, blobs, _, _)) => blobs |>
-          fold (fn Exn.Res (_, SOME b) => Symtab.update (b, the_blob state b) | _ => I));
+          fold (fn Exn.Res {digest = SOME b, ...} => Symtab.update (b, the_blob state b) | _ => I));
 
   in (versions', blobs', commands', execution) end);
 
--- a/src/Pure/PIDE/protocol.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/protocol.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -38,10 +38,13 @@
       blobs_xml |>
         let
           val message = YXML.string_of_body o Protocol_Message.command_positions id;
+          val blob =
+            triple string string (option string)
+            #> (fn (a, b, c) => {file_node = a, src_path = Path.explode b, digest = c});
         in
           pair
             (list (variant
-             [fn ([], a) => Exn.Res (pair string (option string) a),
+             [fn ([], a) => Exn.Res (blob a),
               fn ([], a) => Exn.Exn (ERROR (message a))]))
             int
         end;
@@ -109,8 +112,7 @@
                         let
                           val (master, (name, (imports, (keywords, errors)))) =
                             pair string (pair string (pair (list string)
-                              (pair (list (pair string
-                                (pair (pair string (list string)) (list string))))
+                              (pair (list (pair string (pair string (list string))))
                                 (list YXML.string_of_body)))) a;
                           val imports' = map (rpair Position.none) imports;
                           val keywords' = map (fn (x, y) => ((x, Position.none), y)) keywords;
--- a/src/Pure/PIDE/protocol.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/protocol.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -315,10 +315,11 @@
 
     val blobs_yxml =
     {
-      val encode_blob: T[Command.Blob] =
+      val encode_blob: T[Exn.Result[Command.Blob]] =
         variant(List(
-          { case Exn.Res((a, b)) =>
-              (Nil, pair(string, option(string))((a.node, b.map(p => p._1.toString)))) },
+          { case Exn.Res(Command.Blob(a, b, c)) =>
+              (Nil, triple(string, string, option(string))(
+                (a.node, b.implode, c.map(p => p._1.toString)))) },
           { case Exn.Exn(e) => (Nil, string(Exn.message(e))) }))
 
       Symbol.encode_yxml(pair(list(encode_blob), int)(command.blobs, command.blobs_index))
@@ -397,11 +398,10 @@
           { case Document.Node.Deps(header) =>
               val master_dir = File.standard_url(name.master_dir)
               val imports = header.imports.map(_.node)
-              val keywords =
-                header.keywords.map({ case (a, Keyword.Spec(b, c, d)) => (a, ((b, c), d)) })
+              val keywords = header.keywords.map({ case (a, Keyword.Spec(b, _, c)) => (a, (b, c)) })
               (Nil,
-                pair(string, pair(string, pair(list(string), pair(list(pair(string,
-                    pair(pair(string, list(string)), list(string)))), list(string)))))(
+                pair(string, pair(string, pair(list(string),
+                  pair(list(pair(string, pair(string, list(string)))), list(string)))))(
                 (master_dir, (name.theory, (imports, (keywords, header.errors)))))) },
           { case Document.Node.Perspective(a, b, c) =>
               (bool_atom(a) :: b.commands.map(cmd => long_atom(cmd.id)),
--- a/src/Pure/PIDE/resources.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/resources.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -37,10 +37,13 @@
   val check_thy: Path.T -> string ->
    {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
     imports: (string * Position.T) list, keywords: Thy_Header.keywords}
-  val parse_files: string -> (theory -> Token.file list) parser
+  val extensions: string list -> Path.T -> Path.T list
+  val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser
+  val parse_file: (theory -> Token.file) parser
   val provide: Path.T * SHA1.digest -> theory -> theory
   val provide_file: Token.file -> theory -> theory
-  val provide_parse_files: string -> (theory -> Token.file list * theory) parser
+  val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser
+  val provide_parse_file: (theory -> Token.file * theory) parser
   val loaded_files_current: theory -> bool
   val check_path: Proof.context -> Path.T option -> string * Position.T -> Path.T
   val check_file: Proof.context -> Path.T option -> string * Position.T -> Path.T
@@ -290,18 +293,22 @@
 
 (* load files *)
 
-fun parse_files cmd =
+fun extensions exts path = map (fn ext => Path.ext ext path) exts;
+
+fun parse_files make_paths =
   Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
     (case Token.get_files tok of
       [] =>
         let
-          val keywords = Thy_Header.get_keywords thy;
           val master_dir = master_directory thy;
           val pos = Token.pos_of tok;
-          val src_paths = Keyword.command_files keywords cmd (Path.explode name);
+          val src_paths = make_paths (Path.explode name);
         in map (Command.read_file master_dir pos) src_paths end
     | files => map Exn.release files));
 
+val parse_file = parse_files single >> (fn f => f #> the_single);
+
+
 fun provide (src_path, id) =
   map_files (fn (master_dir, imports, provided) =>
     if AList.defined (op =) provided src_path then
@@ -310,13 +317,16 @@
 
 fun provide_file (file: Token.file) = provide (#src_path file, #digest file);
 
-fun provide_parse_files cmd =
-  parse_files cmd >> (fn files => fn thy =>
+fun provide_parse_files make_paths =
+  parse_files make_paths >> (fn files => fn thy =>
     let
       val fs = files thy;
       val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy;
     in (fs, thy') end);
 
+val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single);
+
+
 fun load_file thy src_path =
   let
     val full_path = check_file (master_directory thy) src_path;
--- a/src/Pure/PIDE/resources.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/PIDE/resources.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -113,12 +113,14 @@
     val (is_utf8, raw_text) =
       with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
     () => {
-      if (syntax.load_commands_in(raw_text)) {
+      if (syntax.has_load_commands(raw_text)) {
         val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
         val spans = syntax.parse_spans(text)
         val dir = Path.explode(name.master_dir)
-        spans.iterator.flatMap(Command.span_files(syntax, _)._1).
-          map(a => (dir + Path.explode(a)).expand).toList
+        (for {
+          span <- spans.iterator
+          file <- span.loaded_files(syntax)._1
+        } yield (dir + Path.explode(file)).expand).toList
       }
       else Nil
     }
--- a/src/Pure/Pure.thy	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Pure.thy	Fri Nov 27 23:51:37 2020 +0100
@@ -112,14 +112,14 @@
 local
   val _ =
     Outer_Syntax.command \<^command_keyword>\<open>external_file\<close> "formal dependency on external file"
-      (Resources.provide_parse_files "external_file" >> (fn files => Toplevel.theory (#2 o files)));
+      (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (#2 o get_file)));
 
   val _ =
     Outer_Syntax.command \<^command_keyword>\<open>bibtex_file\<close> "check bibtex database file in Prover IDE"
-      (Resources.provide_parse_files "bibtex_file" >> (fn files =>
+      (Resources.provide_parse_file >> (fn get_file =>
         Toplevel.theory (fn thy =>
           let
-            val ([{lines, pos, ...}], thy') = files thy;
+            val ({lines, pos, ...}, thy') = get_file thy;
             val _ = Bibtex.check_database_output pos (cat_lines lines);
           in thy' end)));
 
@@ -178,31 +178,31 @@
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>ML_file\<close> "read and evaluate Isabelle/ML file"
-    (Resources.parse_files "ML_file" --| semi >> ML_File.ML NONE);
+    (Resources.parse_file --| semi >> ML_File.ML NONE);
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>ML_file_debug\<close>
     "read and evaluate Isabelle/ML file (with debugger information)"
-    (Resources.parse_files "ML_file_debug" --| semi >> ML_File.ML (SOME true));
+    (Resources.parse_file --| semi >> ML_File.ML (SOME true));
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>ML_file_no_debug\<close>
     "read and evaluate Isabelle/ML file (no debugger information)"
-    (Resources.parse_files "ML_file_no_debug" --| semi >> ML_File.ML (SOME false));
+    (Resources.parse_file --| semi >> ML_File.ML (SOME false));
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>SML_file\<close> "read and evaluate Standard ML file"
-    (Resources.parse_files "SML_file" --| semi >> ML_File.SML NONE);
+    (Resources.parse_file --| semi >> ML_File.SML NONE);
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>SML_file_debug\<close>
     "read and evaluate Standard ML file (with debugger information)"
-    (Resources.parse_files "SML_file_debug" --| semi >> ML_File.SML (SOME true));
+    (Resources.parse_file --| semi >> ML_File.SML (SOME true));
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>SML_file_no_debug\<close>
     "read and evaluate Standard ML file (no debugger information)"
-    (Resources.parse_files "SML_file_no_debug" --| semi >> ML_File.SML (SOME false));
+    (Resources.parse_file --| semi >> ML_File.SML (SOME false));
 
 val _ =
   Outer_Syntax.command \<^command_keyword>\<open>SML_export\<close> "evaluate SML within Isabelle/ML environment"
--- a/src/Pure/Thy/thy_header.ML	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Thy/thy_header.ML	Fri Nov 27 23:51:37 2020 +0100
@@ -85,9 +85,9 @@
      ((subparagraphN, \<^here>), Keyword.document_heading_spec),
      ((textN, \<^here>), Keyword.document_body_spec),
      ((txtN, \<^here>), Keyword.document_body_spec),
-     ((text_rawN, \<^here>), ((Keyword.document_raw, []), ["document"])),
-     ((theoryN, \<^here>), ((Keyword.thy_begin, []), ["theory"])),
-     (("ML", \<^here>), ((Keyword.thy_decl, []), ["ML"]))];
+     ((text_rawN, \<^here>), (Keyword.document_raw, ["document"])),
+     ((theoryN, \<^here>), (Keyword.thy_begin, ["theory"])),
+     (("ML", \<^here>), (Keyword.thy_decl, ["ML"]))];
 
 
 (* theory data *)
@@ -132,12 +132,14 @@
   if name = Context.PureN then Scan.succeed []
   else Parse.$$$ importsN |-- Parse.!!! (Scan.repeat1 Parse.theory_name);
 
-val opt_files =
-  Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.name) --| Parse.$$$ ")") [];
+fun loaded_files kind =
+  if kind = Keyword.thy_load then
+    Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.name) --| Parse.$$$ ")") []
+  else Scan.succeed [];
 
 val keyword_spec =
   Parse.group (fn () => "outer syntax keyword specification")
-    (Parse.name -- opt_files -- Document_Source.old_tags);
+    ((Parse.name :-- loaded_files >> #1) -- Document_Source.old_tags);
 
 val keyword_decl =
   Scan.repeat1 Parse.string_position --
--- a/src/Pure/Thy/thy_header.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Thy/thy_header.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -7,8 +7,6 @@
 package isabelle
 
 
-import scala.annotation.tailrec
-import scala.collection.mutable
 import scala.util.parsing.input.Reader
 import scala.util.matching.Regex
 
@@ -131,13 +129,15 @@
   {
     val header: Parser[Thy_Header] =
     {
-      val opt_files =
-        $$$("(") ~! (rep1sep(name, $$$(",")) <~ $$$(")")) ^^ { case _ ~ x => x } |
-        success(Nil)
+      def load_command =
+        ($$$("(") ~! (name <~ $$$(")")) ^^ { case _ ~ x => x }) | success("")
+
+      def load_command_spec(kind: String) =
+        (if (kind == Keyword.THY_LOAD) load_command else success("")) ^^ (x => (kind, x))
 
       val keyword_spec =
-        atom("outer syntax keyword specification", _.is_name) ~ opt_files ~ tags ^^
-        { case x ~ y ~ z => Keyword.Spec(x, y, z) }
+        (atom("outer syntax keyword specification", _.is_name) >> load_command_spec) ~ tags ^^
+        { case (x, y) ~ z => Keyword.Spec(x, y, z) }
 
       val keyword_decl =
         rep1(string) ~
@@ -166,7 +166,7 @@
             Thy_Header((f(name), pos),
               imports.map({ case (a, b) => (f(a), b) }),
               keywords.map({ case (a, Keyword.Spec(b, c, d)) =>
-                (f(a), Keyword.Spec(f(b), c.map(f), d.map(f))) }),
+                (f(a), Keyword.Spec(f(b), f(c), d.map(f))) }),
               abbrevs.map({ case (a, b) => (f(a), f(b)) }))
         }
 
--- a/src/Pure/Tools/scala_project.scala	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/Tools/scala_project.scala	Fri Nov 27 23:51:37 2020 +0100
@@ -58,6 +58,7 @@
       "src/Tools/VSCode/" -> Path.explode("isabelle.vscode"),
       "src/Tools/jEdit/src-base/" -> Path.explode("isabelle.jedit_base"),
       "src/Tools/jEdit/src/" -> Path.explode("isabelle.jedit"),
+      "src/HOL/SPARK/Tools" -> Path.explode("isabelle.spark"),
       "src/HOL/Tools/Nitpick" -> Path.explode("isabelle.nitpick"))
 
 
--- a/src/Pure/build-jars	Fri Nov 27 06:48:35 2020 +0000
+++ b/src/Pure/build-jars	Fri Nov 27 23:51:37 2020 +0100
@@ -9,6 +9,7 @@
 ## sources
 
 declare -a SOURCES=(
+  src/HOL/SPARK/Tools/spark.scala
   src/HOL/Tools/Nitpick/kodkod.scala
   src/Pure/Admin/afp.scala
   src/Pure/Admin/build_csdp.scala