clarifed module name;
authorwenzelm
Tue, 18 Mar 2014 17:39:03 +0100
changeset 56208 06cc31dff138
parent 56207 52d5067ca06a
child 56209 3c89e21d9be2
clarifed module name;
src/Doc/Codegen/Setup.thy
src/Doc/Tutorial/ToyList/ToyList_Test.thy
src/Doc/antiquote_setup.ML
src/HOL/SMT_Examples/boogie.ML
src/HOL/SPARK/Tools/spark_commands.ML
src/HOL/Tools/SMT/smt_config.ML
src/HOL/Tools/SMT2/smt2_config.ML
src/Pure/Isar/parse.scala
src/Pure/PIDE/document.ML
src/Pure/PIDE/resources.ML
src/Pure/PIDE/resources.scala
src/Pure/ROOT
src/Pure/ROOT.ML
src/Pure/System/session.scala
src/Pure/Thy/thy_info.ML
src/Pure/Thy/thy_info.scala
src/Pure/Thy/thy_load.ML
src/Pure/Thy/thy_load.scala
src/Pure/Thy/thy_syntax.scala
src/Pure/Tools/build.scala
src/Pure/Tools/proof_general.ML
src/Pure/build-jars
src/Pure/pure_syn.ML
src/Tools/Code/code_runtime.ML
src/Tools/Code/code_target.ML
src/Tools/jEdit/lib/Tools/jedit
src/Tools/jEdit/src/document_model.scala
src/Tools/jEdit/src/find_dockable.scala
src/Tools/jEdit/src/isabelle_sidekick.scala
src/Tools/jEdit/src/jedit_resources.scala
src/Tools/jEdit/src/jedit_thy_load.scala
src/Tools/jEdit/src/plugin.scala
src/Tools/jEdit/src/rendering.scala
src/Tools/jEdit/src/theories_dockable.scala
src/Tools/jEdit/src/timing_dockable.scala
--- a/src/Doc/Codegen/Setup.thy	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Doc/Codegen/Setup.thy	Tue Mar 18 17:39:03 2014 +0100
@@ -9,7 +9,7 @@
 
 (* FIXME avoid writing into source directory *)
 ML {*
-  Isabelle_System.mkdirs (Path.append (Thy_Load.master_directory @{theory}) (Path.basic "examples"))
+  Isabelle_System.mkdirs (Path.append (Resources.master_directory @{theory}) (Path.basic "examples"))
 *}
 
 ML_file "../antiquote_setup.ML"
--- a/src/Doc/Tutorial/ToyList/ToyList_Test.thy	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Doc/Tutorial/ToyList/ToyList_Test.thy	Tue Mar 18 17:39:03 2014 +0100
@@ -5,7 +5,7 @@
 ML {*  (* FIXME somewhat non-standard, fragile *)
   let
     val texts =
-      map (File.read o Path.append (Thy_Load.master_directory @{theory}) o Path.explode)
+      map (File.read o Path.append (Resources.master_directory @{theory}) o Path.explode)
         ["ToyList1", "ToyList2"];
     val trs = Outer_Syntax.parse Position.start (implode texts);
     val end_state = fold (Toplevel.command_exception false) trs Toplevel.toplevel;
--- a/src/Doc/antiquote_setup.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Doc/antiquote_setup.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -152,7 +152,7 @@
 val _ =
   Theory.setup (Thy_Output.antiquotation @{binding thy_file} (Scan.lift Args.name)
     (fn {context = ctxt, ...} =>
-      fn name => (Thy_Load.check_thy Path.current name; Thy_Output.output ctxt [Pretty.str name])));
+      fn name => (Resources.check_thy Path.current name; Thy_Output.output ctxt [Pretty.str name])));
 
 
 (* Isabelle/jEdit elements *)
--- a/src/HOL/SMT_Examples/boogie.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/HOL/SMT_Examples/boogie.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -317,7 +317,7 @@
 val _ =
   Outer_Syntax.command @{command_spec "boogie_file"}
     "prove verification condition from .b2i file"
-    (Thy_Load.provide_parse_files "boogie_file" >> (fn files =>
+    (Resources.provide_parse_files "boogie_file" >> (fn files =>
       Toplevel.theory (fn thy =>
         let
           val ([{lines, ...}], thy') = files thy;
--- a/src/HOL/SPARK/Tools/spark_commands.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/HOL/SPARK/Tools/spark_commands.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -12,7 +12,7 @@
     val ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file,
       {lines = fdl_lines, pos = fdl_pos, ...},
       {lines = rls_lines, pos = rls_pos, ...}], thy') = files thy;
-    val base = fst (Path.split_ext (File.full_path (Thy_Load.master_directory thy') src_path));
+    val base = fst (Path.split_ext (File.full_path (Resources.master_directory thy') src_path));
   in
     SPARK_VCs.set_vcs
       (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines)))
@@ -24,7 +24,7 @@
 (* FIXME *)
 fun spark_open_old (vc_name, prfx) thy =
   let
-    val ((vc_path, vc_id), vc_text) = Thy_Load.load_file thy (Path.explode vc_name);
+    val ((vc_path, vc_id), vc_text) = Resources.load_file thy (Path.explode vc_name);
     val (base, header) =
       (case Path.split_ext vc_path of
         (base, "vcg") => (base, Fdl_Lexer.vcg_header >> K ())
@@ -117,13 +117,13 @@
 val _ =
   Outer_Syntax.command @{command_spec "spark_open_vcg"}
     "open a new SPARK environment and load a SPARK-generated .vcg file"
-    (Parse.parname -- Thy_Load.provide_parse_files "spark_open_vcg"
+    (Parse.parname -- Resources.provide_parse_files "spark_open_vcg"
       >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header));
 
 val _ =
   Outer_Syntax.command @{command_spec "spark_open_siv"}
     "open a new SPARK environment and load a SPARK-generated .siv file"
-    (Parse.parname -- Thy_Load.provide_parse_files "spark_open_siv"
+    (Parse.parname -- Resources.provide_parse_files "spark_open_siv"
       >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
 
 val pfun_type = Scan.option
--- a/src/HOL/Tools/SMT/smt_config.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/HOL/Tools/SMT/smt_config.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -198,7 +198,7 @@
   if name = "" then NONE
   else
     Path.explode name
-    |> Path.append (Thy_Load.master_directory (Context.theory_of context))
+    |> Path.append (Resources.master_directory (Context.theory_of context))
     |> SOME o Cache_IO.unsynchronized_init)
 
 val certificates_of = Certificates.get o Context.Proof
--- a/src/HOL/Tools/SMT2/smt2_config.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/HOL/Tools/SMT2/smt2_config.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -195,7 +195,7 @@
   if name = "" then NONE
   else
     Path.explode name
-    |> Path.append (Thy_Load.master_directory (Context.theory_of context))
+    |> Path.append (Resources.master_directory (Context.theory_of context))
     |> SOME o Cache_IO.unsynchronized_init)
 
 val certificates_of = Certificates.get o Context.Proof
--- a/src/Pure/Isar/parse.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/Isar/parse.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -64,7 +64,7 @@
     def path: Parser[String] =
       atom("file name/path specification", tok => tok.is_name && Path.is_wellformed(tok.content))
     def theory_name: Parser[String] =
-      atom("theory name", tok => tok.is_name && Thy_Load.is_wellformed(tok.content))
+      atom("theory name", tok => tok.is_name && Resources.is_wellformed_thy_path(tok.content))
 
     private def tag_name: Parser[String] =
       atom("tag name", tok =>
--- a/src/Pure/PIDE/document.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/PIDE/document.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -443,7 +443,7 @@
                 NONE => Toplevel.toplevel
               | SOME eval => Command.eval_result_state eval)));
     val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents);
-  in Thy_Load.begin_theory master_dir header parents end;
+  in Resources.begin_theory master_dir header parents end;
 
 fun check_theory full name node =
   is_some (loaded_theory name) orelse
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/PIDE/resources.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -0,0 +1,243 @@
+(*  Title:      Pure/PIDE/resources.ML
+    Author:     Makarius
+
+Resources for theories and auxiliary files.
+*)
+
+signature RESOURCES =
+sig
+  val master_directory: theory -> Path.T
+  val imports_of: theory -> (string * Position.T) list
+  val thy_path: Path.T -> Path.T
+  val check_thy: Path.T -> string ->
+   {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
+    imports: (string * Position.T) list, keywords: Thy_Header.keywords}
+  val parse_files: string -> (theory -> Token.file list) parser
+  val provide: Path.T * SHA1.digest -> theory -> theory
+  val provide_parse_files: string -> (theory -> Token.file list * theory) parser
+  val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
+  val loaded_files: theory -> Path.T list
+  val loaded_files_current: theory -> bool
+  val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
+  val load_thy: bool -> (Toplevel.transition -> Time.time option) -> int -> Path.T ->
+    Thy_Header.header -> Position.T -> string -> theory list -> theory * (unit -> unit) * int
+end;
+
+structure Resources: RESOURCES =
+struct
+
+(* manage source files *)
+
+type files =
+ {master_dir: Path.T,  (*master directory of theory source*)
+  imports: (string * Position.T) list,  (*source specification of imports*)
+  provided: (Path.T * SHA1.digest) list};  (*source path, digest*)
+
+fun make_files (master_dir, imports, provided): files =
+ {master_dir = master_dir, imports = imports, provided = provided};
+
+structure Files = Theory_Data
+(
+  type T = files;
+  val empty = make_files (Path.current, [], []);
+  fun extend _ = empty;
+  fun merge _ = empty;
+);
+
+fun map_files f =
+  Files.map (fn {master_dir, imports, provided} =>
+    make_files (f (master_dir, imports, provided)));
+
+
+val master_directory = #master_dir o Files.get;
+val imports_of = #imports o Files.get;
+
+fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, []));
+
+
+(* theory files *)
+
+val thy_path = Path.ext "thy";
+
+fun check_file dir file = File.check_file (File.full_path dir file);
+
+fun check_thy dir thy_name =
+  let
+    val path = thy_path (Path.basic thy_name);
+    val master_file = check_file dir path;
+    val text = File.read master_file;
+
+    val {name = (name, pos), imports, keywords} =
+      Thy_Header.read (Path.position master_file) text;
+    val _ = thy_name <> name andalso
+      error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name ^ Position.here pos);
+  in
+   {master = (master_file, SHA1.digest text), text = text, theory_pos = pos,
+    imports = imports, keywords = keywords}
+  end;
+
+
+(* load files *)
+
+fun parse_files cmd =
+  Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
+    (case Token.get_files tok of
+      [] =>
+        let
+          val master_dir = master_directory thy;
+          val pos = Token.pos_of tok;
+          val src_paths = Keyword.command_files cmd (Path.explode name);
+        in map (Command.read_file master_dir pos) src_paths end
+    | files => map Exn.release files));
+
+fun provide (src_path, id) =
+  map_files (fn (master_dir, imports, provided) =>
+    if AList.defined (op =) provided src_path then
+      error ("Duplicate use of source file: " ^ Path.print src_path)
+    else (master_dir, imports, (src_path, id) :: provided));
+
+fun provide_parse_files cmd =
+  parse_files cmd >> (fn files => fn thy =>
+    let
+      val fs = files thy;
+      val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy;
+    in (fs, thy') end);
+
+fun load_file thy src_path =
+  let
+    val full_path = check_file (master_directory thy) src_path;
+    val text = File.read full_path;
+    val id = SHA1.digest text;
+  in ((full_path, id), text) end;
+
+fun loaded_files_current thy =
+  #provided (Files.get thy) |>
+    forall (fn (src_path, id) =>
+      (case try (load_file thy) src_path of
+        NONE => false
+      | SOME ((_, id'), _) => id = id'));
+
+(*Proof General legacy*)
+fun loaded_files thy =
+  let val {master_dir, provided, ...} = Files.get thy
+  in map (File.full_path master_dir o #1) provided end;
+
+
+(* load theory *)
+
+fun begin_theory master_dir {name, imports, keywords} parents =
+  Theory.begin_theory name parents
+  |> put_deps master_dir imports
+  |> fold Thy_Header.declare_keyword keywords;
+
+fun excursion master_dir last_timing init elements =
+  let
+    fun prepare_span span =
+      Thy_Syntax.span_content span
+      |> Command.read init master_dir []
+      |> (fn tr => Toplevel.put_timing (last_timing tr) tr);
+
+    fun element_result span_elem (st, _) =
+      let
+        val elem = Thy_Syntax.map_element prepare_span span_elem;
+        val (results, st') = Toplevel.element_result elem st;
+        val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem);
+      in (results, (st', pos')) end;
+
+    val (results, (end_state, end_pos)) =
+      fold_map element_result elements (Toplevel.toplevel, Position.none);
+
+    val thy = Toplevel.end_theory end_pos end_state;
+  in (results, thy) end;
+
+fun load_thy document last_timing update_time master_dir header text_pos text parents =
+  let
+    val time = ! Toplevel.timing;
+
+    val {name = (name, _), ...} = header;
+    val _ = Thy_Header.define_keywords header;
+
+    val lexs = Keyword.get_lexicons ();
+    val toks = Thy_Syntax.parse_tokens lexs text_pos text;
+    val spans = Thy_Syntax.parse_spans toks;
+    val elements = Thy_Syntax.parse_elements spans;
+
+    fun init () =
+      begin_theory master_dir header parents
+      |> Present.begin_theory update_time
+          (fn () => HTML.html_mode (implode o map Thy_Syntax.present_span) spans);
+
+    val _ = if time then writeln ("\n**** Starting theory " ^ quote name ^ " ****") else ();
+    val (results, thy) =
+      cond_timeit time "" (fn () => excursion master_dir last_timing init elements);
+    val _ = if time then writeln ("**** Finished theory " ^ quote name ^ " ****\n") else ();
+
+    fun present () =
+      let
+        val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results);
+        val ((minor, _), outer_syntax) = Outer_Syntax.get_syntax ();
+      in
+        if exists (Toplevel.is_skipped_proof o #2) res then
+          warning ("Cannot present theory with skipped proofs: " ^ quote name)
+        else
+          let val tex_source =
+            Thy_Output.present_thy minor Keyword.command_tags
+              (Outer_Syntax.is_markup outer_syntax) res toks
+            |> Buffer.content;
+          in if document then Present.theory_output name tex_source else () end
+      end;
+
+  in (thy, present, size text) end;
+
+
+(* antiquotations *)
+
+local
+
+fun check_path strict ctxt dir (name, pos) =
+  let
+    val _ = Context_Position.report ctxt pos Markup.language_path;
+
+    val path = Path.append dir (Path.explode name)
+      handle ERROR msg => error (msg ^ Position.here pos);
+
+    val _ = Context_Position.report ctxt pos (Markup.path (Path.smart_implode path));
+    val _ =
+      if can Path.expand path andalso File.exists path then ()
+      else
+        let
+          val path' = perhaps (try Path.expand) path;
+          val msg = "Bad file: " ^ Path.print path' ^ Position.here pos;
+        in
+          if strict then error msg
+          else
+            Context_Position.if_visible ctxt Output.report
+              (Markup.markup (Markup.bad |> Markup.properties (Position.properties_of pos)) msg)
+        end;
+  in path end;
+
+fun file_antiq strict ctxt (name, pos) =
+  let
+    val dir = master_directory (Proof_Context.theory_of ctxt);
+    val _ = check_path strict ctxt dir (name, pos);
+  in
+    space_explode "/" name
+    |> map Thy_Output.verb_text
+    |> space_implode (Thy_Output.verb_text "/" ^ "\\discretionary{}{}{}")
+  end;
+
+in
+
+val _ = Theory.setup
+ (Thy_Output.antiquotation @{binding file} (Scan.lift (Parse.position Parse.path))
+    (file_antiq true o #context) #>
+  Thy_Output.antiquotation @{binding file_unchecked} (Scan.lift (Parse.position Parse.path))
+    (file_antiq false o #context) #>
+  ML_Antiquotation.value @{binding path}
+    (Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, arg) =>
+      let val path = check_path true ctxt Path.current arg
+      in "Path.explode " ^ ML_Syntax.print_string (Path.implode path) end)));
+
+end;
+
+end;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/PIDE/resources.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -0,0 +1,111 @@
+/*  Title:      Pure/PIDE/resources.scala
+    Author:     Makarius
+
+Resources for theories and auxiliary files.
+*/
+
+package isabelle
+
+
+import scala.annotation.tailrec
+
+import java.io.{File => JFile}
+
+
+object Resources
+{
+  /* paths */
+
+  def thy_path(path: Path): Path = path.ext("thy")
+
+  def is_wellformed_thy_path(str: String): Boolean =
+    try { thy_path(Path.explode(str)); true }
+    catch { case ERROR(_) => false }
+}
+
+
+class Resources(val loaded_theories: Set[String] = Set.empty, val base_syntax: Outer_Syntax)
+{
+  /* document node names */
+
+  def node_name(raw_path: Path): Document.Node.Name =
+  {
+    val path = raw_path.expand
+    val node = path.implode
+    val theory = Thy_Header.thy_name(node).getOrElse("")
+    val master_dir = if (theory == "") "" else path.dir.implode
+    Document.Node.Name(node, master_dir, theory)
+  }
+
+
+  /* file-system operations */
+
+  def append(dir: String, source_path: Path): String =
+    (Path.explode(dir) + source_path).expand.implode
+
+  def with_thy_text[A](name: Document.Node.Name, f: CharSequence => A): A =
+  {
+    val path = Path.explode(name.node)
+    if (!path.is_file) error("No such file: " + path.toString)
+
+    val text = File.read(path)
+    Symbol.decode_strict(text)
+    f(text)
+  }
+
+
+  /* theory files */
+
+  def body_files_test(syntax: Outer_Syntax, text: String): Boolean =
+    syntax.thy_load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
+
+  def body_files(syntax: Outer_Syntax, text: String): List[String] =
+  {
+    val spans = Thy_Syntax.parse_spans(syntax.scan(text))
+    spans.iterator.map(Thy_Syntax.span_files(syntax, _)).flatten.toList
+  }
+
+  def import_name(master: Document.Node.Name, s: String): Document.Node.Name =
+  {
+    val theory = Thy_Header.base_name(s)
+    if (loaded_theories(theory)) Document.Node.Name(theory + ".thy", "", theory)
+    else {
+      val path = Path.explode(s)
+      val node = append(master.master_dir, Resources.thy_path(path))
+      val master_dir = append(master.master_dir, path.dir)
+      Document.Node.Name(node, master_dir, theory)
+    }
+  }
+
+  def check_thy_text(name: Document.Node.Name, text: CharSequence): Document.Node.Header =
+  {
+    try {
+      val header = Thy_Header.read(text)
+
+      val name1 = header.name
+      if (name.theory != name1)
+        error("Bad file name " + Resources.thy_path(Path.basic(name.theory)) +
+          " for theory " + quote(name1))
+
+      val imports = header.imports.map(import_name(name, _))
+      Document.Node.Header(imports, header.keywords, Nil)
+    }
+    catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
+  }
+
+  def check_thy(name: Document.Node.Name): Document.Node.Header =
+    with_thy_text(name, check_thy_text(name, _))
+
+
+  /* theory text edits */
+
+  def text_edits(
+    reparse_limit: Int,
+    previous: Document.Version,
+    doc_blobs: Document.Blobs,
+    edits: List[Document.Edit_Text]): (Boolean, List[Document.Edit_Command], Document.Version) =
+    Thy_Syntax.text_edits(this, reparse_limit, previous, doc_blobs, edits)
+
+  def syntax_changed() { }
+}
+
--- a/src/Pure/ROOT	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/ROOT	Tue Mar 18 17:39:03 2014 +0100
@@ -161,6 +161,7 @@
     "PIDE/markup.ML"
     "PIDE/protocol.ML"
     "PIDE/query_operation.ML"
+    "PIDE/resources.ML"
     "PIDE/xml.ML"
     "PIDE/yxml.ML"
     "Proof/extraction.ML"
@@ -198,7 +199,6 @@
     "Thy/thm_deps.ML"
     "Thy/thy_header.ML"
     "Thy/thy_info.ML"
-    "Thy/thy_load.ML"
     "Thy/thy_output.ML"
     "Thy/thy_syntax.ML"
     "Tools/build.ML"
--- a/src/Pure/ROOT.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/ROOT.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -281,7 +281,7 @@
 use "Thy/present.ML";
 use "PIDE/command.ML";
 use "PIDE/query_operation.ML";
-use "Thy/thy_load.ML";
+use "PIDE/resources.ML";
 use "Thy/thy_info.ML";
 use "PIDE/document.ML";
 
--- a/src/Pure/System/session.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/System/session.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -109,7 +109,7 @@
 }
 
 
-class Session(val thy_load: Thy_Load)
+class Session(val resources: Resources)
 {
   /* global flags */
 
@@ -180,8 +180,8 @@
         case Text_Edits(previous, doc_blobs, text_edits, version_result) =>
           val prev = previous.get_finished
           val (syntax_changed, doc_edits, version) =
-            Timing.timeit("Thy_Load.text_edits", timing) {
-              thy_load.text_edits(reparse_limit, prev, doc_blobs, text_edits)
+            Timing.timeit("text_edits", timing) {
+              resources.text_edits(reparse_limit, prev, doc_blobs, text_edits)
             }
           version_result.fulfill(version)
           sender ! Change(doc_blobs, syntax_changed, doc_edits, prev, version)
@@ -216,7 +216,7 @@
   def recent_syntax(): Outer_Syntax =
   {
     val version = current_state().recent_finished.version.get_finished
-    if (version.is_init) thy_load.base_syntax
+    if (version.is_init) resources.base_syntax
     else version.syntax
   }
 
@@ -238,7 +238,7 @@
   def header_edit(name: Document.Node.Name, header: Document.Node.Header): Document.Edit_Text =
   {
     val header1 =
-      if (thy_load.loaded_theories(name.theory))
+      if (resources.loaded_theories(name.theory))
         header.error("Cannot update finished theory " + quote(name.theory))
       else header
     (name, Document.Node.Deps(header1))
@@ -401,7 +401,7 @@
       global_state >> (_.define_version(version, assignment))
       prover.get.update(previous.id, version.id, doc_edits)
 
-      if (syntax_changed) thy_load.syntax_changed()
+      if (syntax_changed) resources.syntax_changed()
     }
     //}}}
 
--- a/src/Pure/Thy/thy_info.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/Thy/thy_info.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -123,13 +123,13 @@
     SOME theory => theory
   | _ => error ("Theory loader: undefined entry for theory " ^ quote name));
 
-val get_imports = Thy_Load.imports_of o get_theory;
+val get_imports = Resources.imports_of o get_theory;
 
 (*Proof General legacy*)
 fun loaded_files name = NAMED_CRITICAL "Thy_Info" (fn () =>
   (case get_deps name of
     NONE => []
-  | SOME {master = (thy_path, _), ...} => thy_path :: Thy_Load.loaded_files (get_theory name)));
+  | SOME {master = (thy_path, _), ...} => thy_path :: Resources.loaded_files (get_theory name)));
 
 
 
@@ -284,7 +284,7 @@
 
     val text_pos = Position.put_id (Document_ID.print exec_id) (Path.position thy_path);
     val (theory, present, weight) =
-      Thy_Load.load_thy document last_timing update_time dir header text_pos text
+      Resources.load_thy document last_timing update_time dir header text_pos text
         (if name = Context.PureN then [ML_Context.the_global_context ()] else parents);
     fun commit () = update_thy deps theory;
   in
@@ -295,18 +295,18 @@
   (case lookup_deps name of
     SOME NONE => (true, NONE, Position.none, get_imports name, [])
   | NONE =>
-      let val {master, text, theory_pos, imports, keywords} = Thy_Load.check_thy dir name
+      let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name
       in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end
   | SOME (SOME {master, ...}) =>
       let
         val {master = master', text = text', theory_pos = theory_pos', imports = imports',
-          keywords = keywords'} = Thy_Load.check_thy dir name;
+          keywords = keywords'} = Resources.check_thy dir name;
         val deps' = SOME (make_deps master' imports', text');
         val current =
           #2 master = #2 master' andalso
             (case lookup_theory name of
               NONE => false
-            | SOME theory => Thy_Load.loaded_files_current theory);
+            | SOME theory => Resources.loaded_files_current theory);
       in (current, deps', theory_pos', imports', keywords') end);
 
 in
@@ -317,7 +317,7 @@
   let
     val path = Path.expand (Path.explode str);
     val name = Path.implode (Path.base path);
-    val node_name = File.full_path dir (Thy_Load.thy_path path);
+    val node_name = File.full_path dir (Resources.thy_path path);
     fun check_entry (Task (node_name', _, _)) =
           if node_name = node_name' then ()
           else
@@ -384,7 +384,7 @@
     val _ = use_theories {document = false, last_timing = K NONE, master_dir = master_dir} imports;
     val _ = Thy_Header.define_keywords header;
     val parents = map (get_theory o base_name o fst) imports;
-  in Thy_Load.begin_theory master_dir header parents end;
+  in Resources.begin_theory master_dir header parents end;
 
 
 (* register theory *)
@@ -392,8 +392,8 @@
 fun register_thy theory =
   let
     val name = Context.theory_name theory;
-    val {master, ...} = Thy_Load.check_thy (Thy_Load.master_directory theory) name;
-    val imports = Thy_Load.imports_of theory;
+    val {master, ...} = Resources.check_thy (Resources.master_directory theory) name;
+    val imports = Resources.imports_of theory;
   in
     NAMED_CRITICAL "Thy_Info" (fn () =>
      (kill_thy name;
--- a/src/Pure/Thy/thy_info.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/Thy/thy_info.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -10,7 +10,7 @@
 import java.util.concurrent.{Future => JFuture}
 
 
-class Thy_Info(thy_load: Thy_Load)
+class Thy_Info(resources: Resources)
 {
   /* messages */
 
@@ -33,9 +33,9 @@
   {
     def load_files(syntax: Outer_Syntax): List[String] =
     {
-      val string = thy_load.with_thy_text(name, _.toString)
-      if (thy_load.body_files_test(syntax, string))
-        thy_load.body_files(syntax, string)
+      val string = resources.with_thy_text(name, _.toString)
+      if (resources.body_files_test(syntax, string))
+        resources.body_files(syntax, string)
       else Nil
     }
   }
@@ -71,7 +71,7 @@
       val import_errors =
         (for {
           (theory, names) <- seen_names.iterator_list
-          if !thy_load.loaded_theories(theory)
+          if !resources.loaded_theories(theory)
           if names.length > 1
         } yield
           "Incoherent imports for theory " + quote(theory) + ":\n" +
@@ -82,10 +82,10 @@
       header_errors ::: import_errors
     }
 
-    lazy val syntax: Outer_Syntax = thy_load.base_syntax.add_keywords(keywords)
+    lazy val syntax: Outer_Syntax = resources.base_syntax.add_keywords(keywords)
 
     def loaded_theories: Set[String] =
-      (thy_load.loaded_theories /: rev_deps) { case (loaded, dep) => loaded + dep.name.theory }
+      (resources.loaded_theories /: rev_deps) { case (loaded, dep) => loaded + dep.name.theory }
 
     def load_files: List[Path] =
     {
@@ -115,13 +115,13 @@
         required_by(initiators) + Position.here(require_pos)
 
     val required1 = required + thy
-    if (required.seen_names.isDefinedAt(theory) || thy_load.loaded_theories(theory))
+    if (required.seen_names.isDefinedAt(theory) || resources.loaded_theories(theory))
       required1
     else {
       try {
         if (initiators.contains(name)) error(cycle_msg(initiators))
         val header =
-          try { thy_load.check_thy(name).cat_errors(message) }
+          try { resources.check_thy(name).cat_errors(message) }
           catch { case ERROR(msg) => cat_error(msg, message) }
         val imports = header.imports.map((_, Position.File(name.node)))
         Dep(name, header) :: require_thys(name :: initiators, required1, imports)
--- a/src/Pure/Thy/thy_load.ML	Tue Mar 18 16:45:14 2014 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,243 +0,0 @@
-(*  Title:      Pure/Thy/thy_load.ML
-    Author:     Makarius
-
-Management of theory sources and auxiliary files.
-*)
-
-signature THY_LOAD =
-sig
-  val master_directory: theory -> Path.T
-  val imports_of: theory -> (string * Position.T) list
-  val thy_path: Path.T -> Path.T
-  val check_thy: Path.T -> string ->
-   {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
-    imports: (string * Position.T) list, keywords: Thy_Header.keywords}
-  val parse_files: string -> (theory -> Token.file list) parser
-  val provide: Path.T * SHA1.digest -> theory -> theory
-  val provide_parse_files: string -> (theory -> Token.file list * theory) parser
-  val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
-  val loaded_files: theory -> Path.T list
-  val loaded_files_current: theory -> bool
-  val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
-  val load_thy: bool -> (Toplevel.transition -> Time.time option) -> int -> Path.T ->
-    Thy_Header.header -> Position.T -> string -> theory list -> theory * (unit -> unit) * int
-end;
-
-structure Thy_Load: THY_LOAD =
-struct
-
-(* manage source files *)
-
-type files =
- {master_dir: Path.T,  (*master directory of theory source*)
-  imports: (string * Position.T) list,  (*source specification of imports*)
-  provided: (Path.T * SHA1.digest) list};  (*source path, digest*)
-
-fun make_files (master_dir, imports, provided): files =
- {master_dir = master_dir, imports = imports, provided = provided};
-
-structure Files = Theory_Data
-(
-  type T = files;
-  val empty = make_files (Path.current, [], []);
-  fun extend _ = empty;
-  fun merge _ = empty;
-);
-
-fun map_files f =
-  Files.map (fn {master_dir, imports, provided} =>
-    make_files (f (master_dir, imports, provided)));
-
-
-val master_directory = #master_dir o Files.get;
-val imports_of = #imports o Files.get;
-
-fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, []));
-
-
-(* theory files *)
-
-val thy_path = Path.ext "thy";
-
-fun check_file dir file = File.check_file (File.full_path dir file);
-
-fun check_thy dir thy_name =
-  let
-    val path = thy_path (Path.basic thy_name);
-    val master_file = check_file dir path;
-    val text = File.read master_file;
-
-    val {name = (name, pos), imports, keywords} =
-      Thy_Header.read (Path.position master_file) text;
-    val _ = thy_name <> name andalso
-      error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name ^ Position.here pos);
-  in
-   {master = (master_file, SHA1.digest text), text = text, theory_pos = pos,
-    imports = imports, keywords = keywords}
-  end;
-
-
-(* load files *)
-
-fun parse_files cmd =
-  Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
-    (case Token.get_files tok of
-      [] =>
-        let
-          val master_dir = master_directory thy;
-          val pos = Token.pos_of tok;
-          val src_paths = Keyword.command_files cmd (Path.explode name);
-        in map (Command.read_file master_dir pos) src_paths end
-    | files => map Exn.release files));
-
-fun provide (src_path, id) =
-  map_files (fn (master_dir, imports, provided) =>
-    if AList.defined (op =) provided src_path then
-      error ("Duplicate use of source file: " ^ Path.print src_path)
-    else (master_dir, imports, (src_path, id) :: provided));
-
-fun provide_parse_files cmd =
-  parse_files cmd >> (fn files => fn thy =>
-    let
-      val fs = files thy;
-      val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy;
-    in (fs, thy') end);
-
-fun load_file thy src_path =
-  let
-    val full_path = check_file (master_directory thy) src_path;
-    val text = File.read full_path;
-    val id = SHA1.digest text;
-  in ((full_path, id), text) end;
-
-fun loaded_files_current thy =
-  #provided (Files.get thy) |>
-    forall (fn (src_path, id) =>
-      (case try (load_file thy) src_path of
-        NONE => false
-      | SOME ((_, id'), _) => id = id'));
-
-(*Proof General legacy*)
-fun loaded_files thy =
-  let val {master_dir, provided, ...} = Files.get thy
-  in map (File.full_path master_dir o #1) provided end;
-
-
-(* load theory *)
-
-fun begin_theory master_dir {name, imports, keywords} parents =
-  Theory.begin_theory name parents
-  |> put_deps master_dir imports
-  |> fold Thy_Header.declare_keyword keywords;
-
-fun excursion master_dir last_timing init elements =
-  let
-    fun prepare_span span =
-      Thy_Syntax.span_content span
-      |> Command.read init master_dir []
-      |> (fn tr => Toplevel.put_timing (last_timing tr) tr);
-
-    fun element_result span_elem (st, _) =
-      let
-        val elem = Thy_Syntax.map_element prepare_span span_elem;
-        val (results, st') = Toplevel.element_result elem st;
-        val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem);
-      in (results, (st', pos')) end;
-
-    val (results, (end_state, end_pos)) =
-      fold_map element_result elements (Toplevel.toplevel, Position.none);
-
-    val thy = Toplevel.end_theory end_pos end_state;
-  in (results, thy) end;
-
-fun load_thy document last_timing update_time master_dir header text_pos text parents =
-  let
-    val time = ! Toplevel.timing;
-
-    val {name = (name, _), ...} = header;
-    val _ = Thy_Header.define_keywords header;
-
-    val lexs = Keyword.get_lexicons ();
-    val toks = Thy_Syntax.parse_tokens lexs text_pos text;
-    val spans = Thy_Syntax.parse_spans toks;
-    val elements = Thy_Syntax.parse_elements spans;
-
-    fun init () =
-      begin_theory master_dir header parents
-      |> Present.begin_theory update_time
-          (fn () => HTML.html_mode (implode o map Thy_Syntax.present_span) spans);
-
-    val _ = if time then writeln ("\n**** Starting theory " ^ quote name ^ " ****") else ();
-    val (results, thy) =
-      cond_timeit time "" (fn () => excursion master_dir last_timing init elements);
-    val _ = if time then writeln ("**** Finished theory " ^ quote name ^ " ****\n") else ();
-
-    fun present () =
-      let
-        val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results);
-        val ((minor, _), outer_syntax) = Outer_Syntax.get_syntax ();
-      in
-        if exists (Toplevel.is_skipped_proof o #2) res then
-          warning ("Cannot present theory with skipped proofs: " ^ quote name)
-        else
-          let val tex_source =
-            Thy_Output.present_thy minor Keyword.command_tags
-              (Outer_Syntax.is_markup outer_syntax) res toks
-            |> Buffer.content;
-          in if document then Present.theory_output name tex_source else () end
-      end;
-
-  in (thy, present, size text) end;
-
-
-(* antiquotations *)
-
-local
-
-fun check_path strict ctxt dir (name, pos) =
-  let
-    val _ = Context_Position.report ctxt pos Markup.language_path;
-
-    val path = Path.append dir (Path.explode name)
-      handle ERROR msg => error (msg ^ Position.here pos);
-
-    val _ = Context_Position.report ctxt pos (Markup.path (Path.smart_implode path));
-    val _ =
-      if can Path.expand path andalso File.exists path then ()
-      else
-        let
-          val path' = perhaps (try Path.expand) path;
-          val msg = "Bad file: " ^ Path.print path' ^ Position.here pos;
-        in
-          if strict then error msg
-          else
-            Context_Position.if_visible ctxt Output.report
-              (Markup.markup (Markup.bad |> Markup.properties (Position.properties_of pos)) msg)
-        end;
-  in path end;
-
-fun file_antiq strict ctxt (name, pos) =
-  let
-    val dir = master_directory (Proof_Context.theory_of ctxt);
-    val _ = check_path strict ctxt dir (name, pos);
-  in
-    space_explode "/" name
-    |> map Thy_Output.verb_text
-    |> space_implode (Thy_Output.verb_text "/" ^ "\\discretionary{}{}{}")
-  end;
-
-in
-
-val _ = Theory.setup
- (Thy_Output.antiquotation @{binding file} (Scan.lift (Parse.position Parse.path))
-    (file_antiq true o #context) #>
-  Thy_Output.antiquotation @{binding file_unchecked} (Scan.lift (Parse.position Parse.path))
-    (file_antiq false o #context) #>
-  ML_Antiquotation.value @{binding path}
-    (Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, arg) =>
-      let val path = check_path true ctxt Path.current arg
-      in "Path.explode " ^ ML_Syntax.print_string (Path.implode path) end)));
-
-end;
-
-end;
--- a/src/Pure/Thy/thy_load.scala	Tue Mar 18 16:45:14 2014 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,111 +0,0 @@
-/*  Title:      Pure/Thy/thy_load.scala
-    Author:     Makarius
-
-Primitives for loading theory files.
-*/
-
-package isabelle
-
-
-import scala.annotation.tailrec
-
-import java.io.{File => JFile}
-
-
-object Thy_Load
-{
-  /* paths */
-
-  def thy_path(path: Path): Path = path.ext("thy")
-
-  def is_wellformed(str: String): Boolean =
-    try { thy_path(Path.explode(str)); true }
-    catch { case ERROR(_) => false }
-}
-
-
-class Thy_Load(val loaded_theories: Set[String] = Set.empty, val base_syntax: Outer_Syntax)
-{
-  /* document node names */
-
-  def node_name(raw_path: Path): Document.Node.Name =
-  {
-    val path = raw_path.expand
-    val node = path.implode
-    val theory = Thy_Header.thy_name(node).getOrElse("")
-    val master_dir = if (theory == "") "" else path.dir.implode
-    Document.Node.Name(node, master_dir, theory)
-  }
-
-
-  /* file-system operations */
-
-  def append(dir: String, source_path: Path): String =
-    (Path.explode(dir) + source_path).expand.implode
-
-  def with_thy_text[A](name: Document.Node.Name, f: CharSequence => A): A =
-  {
-    val path = Path.explode(name.node)
-    if (!path.is_file) error("No such file: " + path.toString)
-
-    val text = File.read(path)
-    Symbol.decode_strict(text)
-    f(text)
-  }
-
-
-  /* theory files */
-
-  def body_files_test(syntax: Outer_Syntax, text: String): Boolean =
-    syntax.thy_load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
-
-  def body_files(syntax: Outer_Syntax, text: String): List[String] =
-  {
-    val spans = Thy_Syntax.parse_spans(syntax.scan(text))
-    spans.iterator.map(Thy_Syntax.span_files(syntax, _)).flatten.toList
-  }
-
-  def import_name(master: Document.Node.Name, s: String): Document.Node.Name =
-  {
-    val theory = Thy_Header.base_name(s)
-    if (loaded_theories(theory)) Document.Node.Name(theory + ".thy", "", theory)
-    else {
-      val path = Path.explode(s)
-      val node = append(master.master_dir, Thy_Load.thy_path(path))
-      val master_dir = append(master.master_dir, path.dir)
-      Document.Node.Name(node, master_dir, theory)
-    }
-  }
-
-  def check_thy_text(name: Document.Node.Name, text: CharSequence): Document.Node.Header =
-  {
-    try {
-      val header = Thy_Header.read(text)
-
-      val name1 = header.name
-      if (name.theory != name1)
-        error("Bad file name " + Thy_Load.thy_path(Path.basic(name.theory)) +
-          " for theory " + quote(name1))
-
-      val imports = header.imports.map(import_name(name, _))
-      Document.Node.Header(imports, header.keywords, Nil)
-    }
-    catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
-  }
-
-  def check_thy(name: Document.Node.Name): Document.Node.Header =
-    with_thy_text(name, check_thy_text(name, _))
-
-
-  /* theory text edits */
-
-  def text_edits(
-    reparse_limit: Int,
-    previous: Document.Version,
-    doc_blobs: Document.Blobs,
-    edits: List[Document.Edit_Text]): (Boolean, List[Document.Edit_Command], Document.Version) =
-    Thy_Syntax.text_edits(this, reparse_limit, previous, doc_blobs, edits)
-
-  def syntax_changed() { }
-}
-
--- a/src/Pure/Thy/thy_syntax.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/Thy/thy_syntax.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -257,7 +257,7 @@
     }
 
   def resolve_files(
-      thy_load: Thy_Load,
+      resources: Resources,
       syntax: Outer_Syntax,
       node_name: Document.Node.Name,
       span: List[Token],
@@ -267,7 +267,7 @@
     span_files(syntax, span).map(file_name =>
       Exn.capture {
         val name =
-          Document.Node.Name(thy_load.append(node_name.master_dir, Path.explode(file_name)))
+          Document.Node.Name(resources.append(node_name.master_dir, Path.explode(file_name)))
         val blob = doc_blobs.get(name).map(blob => ((blob.bytes.sha1_digest, blob.file)))
         (name, blob)
       })
@@ -289,7 +289,7 @@
   }
 
   private def reparse_spans(
-    thy_load: Thy_Load,
+    resources: Resources,
     syntax: Outer_Syntax,
     doc_blobs: Document.Blobs,
     name: Document.Node.Name,
@@ -299,7 +299,7 @@
     val cmds0 = commands.iterator(first, last).toList
     val blobs_spans0 =
       parse_spans(syntax.scan(cmds0.iterator.map(_.source).mkString)).
-        map(span => (resolve_files(thy_load, syntax, name, span, doc_blobs), span))
+        map(span => (resolve_files(resources, syntax, name, span, doc_blobs), span))
 
     val (cmds1, blobs_spans1) = chop_common(cmds0, blobs_spans0)
 
@@ -324,7 +324,7 @@
 
   // FIXME somewhat slow
   private def recover_spans(
-    thy_load: Thy_Load,
+    resources: Resources,
     syntax: Outer_Syntax,
     doc_blobs: Document.Blobs,
     name: Document.Node.Name,
@@ -342,7 +342,7 @@
         case Some(first_unparsed) =>
           val first = next_invisible_command(cmds.reverse, first_unparsed)
           val last = next_invisible_command(cmds, first_unparsed)
-          recover(reparse_spans(thy_load, syntax, doc_blobs, name, cmds, first, last))
+          recover(reparse_spans(resources, syntax, doc_blobs, name, cmds, first, last))
         case None => cmds
       }
     recover(commands)
@@ -352,7 +352,7 @@
   /* consolidate unfinished spans */
 
   private def consolidate_spans(
-    thy_load: Thy_Load,
+    resources: Resources,
     syntax: Outer_Syntax,
     doc_blobs: Document.Blobs,
     reparse_limit: Int,
@@ -374,7 +374,7 @@
                 last = it.next
                 i += last.length
               }
-              reparse_spans(thy_load, syntax, doc_blobs, name, commands, first_unfinished, last)
+              reparse_spans(resources, syntax, doc_blobs, name, commands, first_unfinished, last)
             case None => commands
           }
         case None => commands
@@ -396,7 +396,7 @@
   }
 
   private def text_edit(
-    thy_load: Thy_Load,
+    resources: Resources,
     syntax: Outer_Syntax,
     doc_blobs: Document.Blobs,
     reparse_limit: Int,
@@ -413,7 +413,7 @@
           val commands0 = node.commands
           val commands1 = edit_text(text_edits, commands0)
           val commands2 =
-            recover_spans(thy_load, syntax, doc_blobs, name, node.perspective.visible, commands1)
+            recover_spans(resources, syntax, doc_blobs, name, node.perspective.visible, commands1)
           node.update_commands(commands2)
         }
 
@@ -426,13 +426,13 @@
         if (node.same_perspective(perspective)) node
         else
           node.update_perspective(perspective).update_commands(
-            consolidate_spans(thy_load, syntax, doc_blobs, reparse_limit,
+            consolidate_spans(resources, syntax, doc_blobs, reparse_limit,
               name, visible, node.commands))
     }
   }
 
   def text_edits(
-      thy_load: Thy_Load,
+      resources: Resources,
       reparse_limit: Int,
       previous: Document.Version,
       doc_blobs: Document.Blobs,
@@ -440,7 +440,7 @@
     : (Boolean, List[Document.Edit_Command], Document.Version) =
   {
     val ((syntax, syntax_changed), reparse0, nodes0, doc_edits0) =
-      header_edits(thy_load.base_syntax, previous, edits)
+      header_edits(resources.base_syntax, previous, edits)
 
     if (edits.isEmpty)
       (false, Nil, Document.Version.make(syntax, previous.nodes))
@@ -469,10 +469,10 @@
           val node1 =
             if (reparse_set(name) && !commands.isEmpty)
               node.update_commands(
-                reparse_spans(thy_load, syntax, doc_blobs,
+                reparse_spans(resources, syntax, doc_blobs,
                   name, commands, commands.head, commands.last))
             else node
-          val node2 = (node1 /: edits)(text_edit(thy_load, syntax, doc_blobs, reparse_limit, _, _))
+          val node2 = (node1 /: edits)(text_edit(resources, syntax, doc_blobs, reparse_limit, _, _))
 
           if (!(node.same_perspective(node2.perspective)))
             doc_edits += (name -> node2.perspective)
--- a/src/Pure/Tools/build.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/Tools/build.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -419,8 +419,8 @@
                   val parent = deps(parent_name)
                   (parent.loaded_theories, parent.syntax)
               }
-            val thy_load = new Thy_Load(preloaded, parent_syntax)
-            val thy_info = new Thy_Info(thy_load)
+            val resources = new Resources(preloaded, parent_syntax)
+            val thy_info = new Thy_Info(resources)
 
             if (verbose || list_files) {
               val groups =
@@ -432,7 +432,7 @@
             val thy_deps =
               thy_info.dependencies(
                 info.theories.map(_._2).flatten.
-                  map(thy => (thy_load.node_name(info.dir + Thy_Load.thy_path(thy)), info.pos)))
+                  map(thy => (resources.node_name(info.dir + Resources.thy_path(thy)), info.pos)))
 
             thy_deps.errors match {
               case Nil =>
--- a/src/Pure/Tools/proof_general.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/Tools/proof_general.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -337,7 +337,7 @@
       Thy_Info.register_thy (Toplevel.end_theory Position.none (Isar.state ()))
         handle ERROR msg =>
           (warning (cat_lines ["Failed to register theory: " ^ quote name, msg]);
-            tell_file_retracted (Thy_Load.thy_path (Path.basic name)))
+            tell_file_retracted (Resources.thy_path (Path.basic name)))
     val _ = Isar.init ();
   in () end;
 
--- a/src/Pure/build-jars	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/build-jars	Tue Mar 18 17:39:03 2014 +0100
@@ -53,6 +53,7 @@
   PIDE/markup_tree.scala
   PIDE/protocol.scala
   PIDE/query_operation.scala
+  PIDE/resources.scala
   PIDE/text.scala
   PIDE/xml.scala
   PIDE/yxml.scala
@@ -73,7 +74,6 @@
   Thy/present.scala
   Thy/thy_header.scala
   Thy/thy_info.scala
-  Thy/thy_load.scala
   Thy/thy_syntax.scala
   Tools/build.scala
   Tools/doc.scala
--- a/src/Pure/pure_syn.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Pure/pure_syn.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -18,10 +18,10 @@
 val _ =
   Outer_Syntax.command
     (("ML_file", Keyword.tag_ml Keyword.thy_load), @{here}) "ML text from file"
-    (Thy_Load.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
+    (Resources.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
         let
           val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy);
-          val provide = Thy_Load.provide (src_path, digest);
+          val provide = Resources.provide (src_path, digest);
           val source = {delimited = true, text = cat_lines lines, pos = pos};
         in
           gthy
--- a/src/Tools/Code/code_runtime.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/Code/code_runtime.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -325,7 +325,7 @@
       let
         val preamble =
           "(* Generated from " ^
-            Path.implode (Thy_Load.thy_path (Path.basic (Context.theory_name thy))) ^
+            Path.implode (Resources.thy_path (Path.basic (Context.theory_name thy))) ^
           "; DO NOT EDIT! *)";
         val _ = File.write (Path.explode file_name) (preamble ^ "\n\n" ^ code);
       in
--- a/src/Tools/Code/code_target.ML	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/Code/code_target.ML	Tue Mar 18 17:39:03 2014 +0100
@@ -380,7 +380,8 @@
   | assert_module_name module_name = module_name;
 
 fun using_master_directory ctxt =
-  Option.map (Path.append (File.pwd ()) o Path.append (Thy_Load.master_directory (Proof_Context.theory_of ctxt)));
+  Option.map (Path.append (File.pwd ()) o
+    Path.append (Resources.master_directory (Proof_Context.theory_of ctxt)));
 
 in
 
--- a/src/Tools/jEdit/lib/Tools/jedit	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/lib/Tools/jedit	Tue Mar 18 17:39:03 2014 +0100
@@ -27,7 +27,7 @@
   "src/jedit_editor.scala"
   "src/jedit_lib.scala"
   "src/jedit_options.scala"
-  "src/jedit_thy_load.scala"
+  "src/jedit_resources.scala"
   "src/monitor_dockable.scala"
   "src/output_dockable.scala"
   "src/plugin.scala"
--- a/src/Tools/jEdit/src/document_model.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/document_model.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -70,7 +70,7 @@
     if (is_theory) {
       JEdit_Lib.buffer_lock(buffer) {
         Exn.capture {
-          PIDE.thy_load.check_thy_text(node_name, buffer.getSegment(0, buffer.getLength))
+          PIDE.resources.check_thy_text(node_name, buffer.getSegment(0, buffer.getLength))
         } match {
           case Exn.Res(header) => header
           case Exn.Exn(exn) => Document.Node.bad_header(Exn.message(exn))
@@ -150,7 +150,7 @@
           _blob match {
             case Some(x) => x
             case None =>
-              val bytes = PIDE.thy_load.file_content(buffer)
+              val bytes = PIDE.resources.file_content(buffer)
               val file = new Command.File(node_name.node, buffer.getSegment(0, buffer.getLength))
               _blob = Some((bytes, file))
               (bytes, file)
--- a/src/Tools/jEdit/src/find_dockable.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/find_dockable.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -131,7 +131,7 @@
 
   private val context_entries =
     new Context_Entry("", "current context") ::
-      PIDE.thy_load.loaded_theories.toList.sorted.map(name => Context_Entry(name, name))
+      PIDE.resources.loaded_theories.toList.sorted.map(name => Context_Entry(name, name))
 
   private val context = new ComboBox[Context_Entry](context_entries) {
     tooltip = "Search in pre-loaded theory (default: context of current command)"
--- a/src/Tools/jEdit/src/isabelle_sidekick.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/isabelle_sidekick.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -133,7 +133,7 @@
 
 
 class Isabelle_Sidekick_Default extends
-  Isabelle_Sidekick_Structure("isabelle", PIDE.thy_load.theory_node_name)
+  Isabelle_Sidekick_Structure("isabelle", PIDE.resources.theory_node_name)
 
 
 class Isabelle_Sidekick_Options extends
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/src/jedit_resources.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -0,0 +1,123 @@
+/*  Title:      Tools/jEdit/src/jedit_resources.scala
+    Author:     Makarius
+
+Resources for theories and auxiliary files, based on jEdit buffer
+content and virtual file-systems.
+*/
+
+package isabelle.jedit
+
+
+import isabelle._
+
+import java.io.{File => JFile, IOException, ByteArrayOutputStream}
+import javax.swing.text.Segment
+
+import org.gjt.sp.jedit.io.{VFS, FileVFS, VFSFile, VFSManager}
+import org.gjt.sp.jedit.MiscUtilities
+import org.gjt.sp.jedit.{jEdit, View, Buffer}
+import org.gjt.sp.jedit.bufferio.BufferIORequest
+
+
+class JEdit_Resources(loaded_theories: Set[String] = Set.empty, base_syntax: Outer_Syntax)
+  extends Resources(loaded_theories, base_syntax)
+{
+  /* document node names */
+
+  def node_name(buffer: Buffer): Document.Node.Name =
+  {
+    val node = JEdit_Lib.buffer_name(buffer)
+    val theory = Thy_Header.thy_name(node).getOrElse("")
+    val master_dir = if (theory == "") "" else buffer.getDirectory
+    Document.Node.Name(node, master_dir, theory)
+  }
+
+  def theory_node_name(buffer: Buffer): Option[Document.Node.Name] =
+  {
+    val name = node_name(buffer)
+    if (name.is_theory) Some(name) else None
+  }
+
+
+  /* file-system operations */
+
+  override def append(dir: String, source_path: Path): String =
+  {
+    val path = source_path.expand
+    if (dir == "" || path.is_absolute) Isabelle_System.platform_path(path)
+    else {
+      val vfs = VFSManager.getVFSForPath(dir)
+      if (vfs.isInstanceOf[FileVFS])
+        MiscUtilities.resolveSymlinks(
+          vfs.constructPath(dir, Isabelle_System.platform_path(path)))
+      else vfs.constructPath(dir, Isabelle_System.standard_path(path))
+    }
+  }
+
+  override def with_thy_text[A](name: Document.Node.Name, f: CharSequence => A): A =
+  {
+    Swing_Thread.now {
+      JEdit_Lib.jedit_buffer(name.node) match {
+        case Some(buffer) =>
+          JEdit_Lib.buffer_lock(buffer) {
+            Some(f(buffer.getSegment(0, buffer.getLength)))
+          }
+        case None => None
+      }
+    } getOrElse {
+      val file = new JFile(name.node)  // FIXME load URL via jEdit VFS (!?)
+      if (!file.exists || !file.isFile) error("No such file: " + quote(file.toString))
+      f(File.read(file))
+    }
+  }
+
+  def check_file(view: View, path: String): Boolean =
+  {
+    val vfs = VFSManager.getVFSForPath(path)
+    val session = vfs.createVFSSession(path, view)
+
+    try {
+      session != null && {
+        try {
+          val file = vfs._getFile(session, path, view)
+          file != null && file.isReadable && file.getType == VFSFile.FILE
+        }
+        catch { case _: IOException => false }
+      }
+    }
+    finally {
+      try { vfs._endVFSSession(session, view) }
+      catch { case _: IOException => }
+    }
+  }
+
+
+  /* file content */
+
+  def file_content(buffer: Buffer): Bytes =
+  {
+    val path = buffer.getPath
+    val vfs = VFSManager.getVFSForPath(path)
+    val content =
+      new BufferIORequest(null, buffer, null, vfs, path) {
+        def _run() { }
+        def apply(): Bytes =
+        {
+          val out =
+            new ByteArrayOutputStream(buffer.getLength + 1) {
+              def content(): Bytes = Bytes(this.buf, 0, this.count)
+            }
+          write(buffer, out)
+          out.content()
+        }
+      }
+    content()
+  }
+
+
+  /* theory text edits */
+
+  override def syntax_changed(): Unit =
+    Swing_Thread.later { jEdit.propertiesChanged() }
+}
+
--- a/src/Tools/jEdit/src/jedit_thy_load.scala	Tue Mar 18 16:45:14 2014 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,121 +0,0 @@
-/*  Title:      Tools/jEdit/src/jedit_thy_load.scala
-    Author:     Makarius
-
-Primitives for loading theory files, based on jEdit buffer content.
-*/
-
-package isabelle.jedit
-
-
-import isabelle._
-
-import java.io.{File => JFile, IOException, ByteArrayOutputStream}
-import javax.swing.text.Segment
-
-import org.gjt.sp.jedit.io.{VFS, FileVFS, VFSFile, VFSManager}
-import org.gjt.sp.jedit.MiscUtilities
-import org.gjt.sp.jedit.{jEdit, View, Buffer}
-import org.gjt.sp.jedit.bufferio.BufferIORequest
-
-class JEdit_Thy_Load(loaded_theories: Set[String] = Set.empty, base_syntax: Outer_Syntax)
-  extends Thy_Load(loaded_theories, base_syntax)
-{
-  /* document node names */
-
-  def node_name(buffer: Buffer): Document.Node.Name =
-  {
-    val node = JEdit_Lib.buffer_name(buffer)
-    val theory = Thy_Header.thy_name(node).getOrElse("")
-    val master_dir = if (theory == "") "" else buffer.getDirectory
-    Document.Node.Name(node, master_dir, theory)
-  }
-
-  def theory_node_name(buffer: Buffer): Option[Document.Node.Name] =
-  {
-    val name = node_name(buffer)
-    if (name.is_theory) Some(name) else None
-  }
-
-
-  /* file-system operations */
-
-  override def append(dir: String, source_path: Path): String =
-  {
-    val path = source_path.expand
-    if (dir == "" || path.is_absolute) Isabelle_System.platform_path(path)
-    else {
-      val vfs = VFSManager.getVFSForPath(dir)
-      if (vfs.isInstanceOf[FileVFS])
-        MiscUtilities.resolveSymlinks(
-          vfs.constructPath(dir, Isabelle_System.platform_path(path)))
-      else vfs.constructPath(dir, Isabelle_System.standard_path(path))
-    }
-  }
-
-  override def with_thy_text[A](name: Document.Node.Name, f: CharSequence => A): A =
-  {
-    Swing_Thread.now {
-      JEdit_Lib.jedit_buffer(name.node) match {
-        case Some(buffer) =>
-          JEdit_Lib.buffer_lock(buffer) {
-            Some(f(buffer.getSegment(0, buffer.getLength)))
-          }
-        case None => None
-      }
-    } getOrElse {
-      val file = new JFile(name.node)  // FIXME load URL via jEdit VFS (!?)
-      if (!file.exists || !file.isFile) error("No such file: " + quote(file.toString))
-      f(File.read(file))
-    }
-  }
-
-  def check_file(view: View, path: String): Boolean =
-  {
-    val vfs = VFSManager.getVFSForPath(path)
-    val session = vfs.createVFSSession(path, view)
-
-    try {
-      session != null && {
-        try {
-          val file = vfs._getFile(session, path, view)
-          file != null && file.isReadable && file.getType == VFSFile.FILE
-        }
-        catch { case _: IOException => false }
-      }
-    }
-    finally {
-      try { vfs._endVFSSession(session, view) }
-      catch { case _: IOException => }
-    }
-  }
-
-
-  /* file content */
-
-  def file_content(buffer: Buffer): Bytes =
-  {
-    val path = buffer.getPath
-    val vfs = VFSManager.getVFSForPath(path)
-    val content =
-      new BufferIORequest(null, buffer, null, vfs, path) {
-        def _run() { }
-        def apply(): Bytes =
-        {
-          val out =
-            new ByteArrayOutputStream(buffer.getLength + 1) {
-              def content(): Bytes = Bytes(this.buf, 0, this.count)
-            }
-          write(buffer, out)
-          out.content()
-        }
-      }
-    content()
-  }
-
-
-  /* theory text edits */
-
-  override def syntax_changed(): Unit =
-    Swing_Thread.later { jEdit.propertiesChanged() }
-}
-
--- a/src/Tools/jEdit/src/plugin.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/plugin.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -36,12 +36,12 @@
   @volatile var startup_notified = false
 
   @volatile var plugin: Plugin = null
-  @volatile var session: Session = new Session(new JEdit_Thy_Load(Set.empty, Outer_Syntax.empty))
+  @volatile var session: Session = new Session(new JEdit_Resources(Set.empty, Outer_Syntax.empty))
 
   def options_changed() { plugin.options_changed() }
 
-  def thy_load(): JEdit_Thy_Load =
-    session.thy_load.asInstanceOf[JEdit_Thy_Load]
+  def resources(): JEdit_Resources =
+    session.resources.asInstanceOf[JEdit_Resources]
 
   lazy val editor = new JEdit_Editor
 
@@ -109,7 +109,7 @@
         if buffer != null && !buffer.isLoading && !buffer.getBooleanProperty(Buffer.GZIPPED)
       } {
         JEdit_Lib.buffer_lock(buffer) {
-          val node_name = thy_load.node_name(buffer)
+          val node_name = resources.node_name(buffer)
           val model =
             document_model(buffer) match {
               case Some(model) if model.node_name == node_name => model
@@ -202,10 +202,10 @@
               if model.is_theory
             } yield (model.node_name, Position.none)
 
-          val thy_info = new Thy_Info(PIDE.thy_load)
+          val thy_info = new Thy_Info(PIDE.resources)
           // FIXME avoid I/O in Swing thread!?!
           val files = thy_info.dependencies(thys).deps.map(_.name.node).
-            filter(file => !loaded_buffer(file) && PIDE.thy_load.check_file(view, file))
+            filter(file => !loaded_buffer(file) && PIDE.resources.check_file(view, file))
 
           if (!files.isEmpty) {
             if (PIDE.options.bool("jedit_auto_load")) {
@@ -349,9 +349,9 @@
       JEdit_Lib.jedit_text_areas.foreach(Completion_Popup.Text_Area.init _)
 
       val content = Isabelle_Logic.session_content(false)
-      val thy_load = new JEdit_Thy_Load(content.loaded_theories, content.syntax)
+      val resources = new JEdit_Resources(content.loaded_theories, content.syntax)
 
-      PIDE.session = new Session(thy_load) {
+      PIDE.session = new Session(resources) {
         override def output_delay = PIDE.options.seconds("editor_output_delay")
         override def reparse_limit = PIDE.options.int("editor_reparse_limit")
       }
--- a/src/Tools/jEdit/src/rendering.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/rendering.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -336,7 +336,7 @@
         {
           case (links, Text.Info(info_range, XML.Elem(Markup.Path(name), _)))
           if Path.is_valid(name) =>
-            val jedit_file = PIDE.thy_load.append(snapshot.node_name.master_dir, Path.explode(name))
+            val jedit_file = PIDE.resources.append(snapshot.node_name.master_dir, Path.explode(name))
             val link = PIDE.editor.hyperlink_file(jedit_file)
             Some(links :+ Text.Info(snapshot.convert(info_range), link))
 
@@ -470,7 +470,7 @@
             Some(add(prev, r, (true, XML.Text(txt1 + txt2))))
           case (prev, Text.Info(r, XML.Elem(Markup.Path(name), _)))
           if Path.is_valid(name) =>
-            val jedit_file = PIDE.thy_load.append(snapshot.node_name.master_dir, Path.explode(name))
+            val jedit_file = PIDE.resources.append(snapshot.node_name.master_dir, Path.explode(name))
             val text = "path " + quote(name) + "\nfile " + quote(jedit_file)
             Some(add(prev, r, (true, XML.Text(text))))
           case (prev, Text.Info(r, XML.Elem(Markup.Url(name), _))) =>
--- a/src/Tools/jEdit/src/theories_dockable.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/theories_dockable.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -192,7 +192,7 @@
       }).filter(_._1.is_theory)
     val nodes_status1 =
       (nodes_status /: iterator)({ case (status, (name, node)) =>
-          if (PIDE.thy_load.loaded_theories(name.theory)) status
+          if (PIDE.resources.loaded_theories(name.theory)) status
           else status + (name -> Protocol.node_status(snapshot.state, snapshot.version, node)) })
 
     if (nodes_status != nodes_status1) {
--- a/src/Tools/jEdit/src/timing_dockable.scala	Tue Mar 18 16:45:14 2014 +0100
+++ b/src/Tools/jEdit/src/timing_dockable.scala	Tue Mar 18 17:39:03 2014 +0100
@@ -186,7 +186,7 @@
       }
     val nodes_timing1 =
       (nodes_timing /: iterator)({ case (timing1, (name, node)) =>
-          if (PIDE.thy_load.loaded_theories(name.theory)) timing1
+          if (PIDE.resources.loaded_theories(name.theory)) timing1
           else {
             val node_timing =
               Protocol.node_timing(snapshot.state, snapshot.version, node, timing_threshold)