merged
authorwenzelm
Wed, 27 Feb 2013 17:44:08 +0100
changeset 51296 77e71d54efda
parent 51292 8a635bf2c86c (current diff)
parent 51295 71fc3776c453 (diff)
child 51297 d9f3d91208af
merged
--- a/NEWS	Wed Feb 27 13:48:15 2013 +0100
+++ b/NEWS	Wed Feb 27 17:44:08 2013 +0100
@@ -4,6 +4,16 @@
 New in this Isabelle version
 ----------------------------
 
+*** General ***
+
+* Discontinued obsolete 'uses' within theory header.  Note that
+commands like 'ML_file' work without separate declaration of file
+dependencies.  Minor INCOMPATIBILITY.
+
+* Discontinued redundant 'use' command, which was superseded by
+'ML_file' in Isabelle2013.  Minor INCOMPATIBILITY.
+
+
 *** HOL ***
 
 * Discontinued obsolete src/HOL/IsaMakefile (considered legacy since
--- a/etc/isar-keywords-ZF.el	Wed Feb 27 13:48:15 2013 +0100
+++ b/etc/isar-keywords-ZF.el	Wed Feb 27 17:44:08 2013 +0100
@@ -200,7 +200,6 @@
     "undos_proof"
     "unfolding"
     "unused_thms"
-    "use"
     "use_thy"
     "using"
     "welcome"
@@ -249,7 +248,6 @@
     "type_elims"
     "type_intros"
     "unchecked"
-    "uses"
     "where"))
 
 (defconst isar-keywords-control
@@ -408,8 +406,7 @@
     "type_notation"
     "type_synonym"
     "typed_print_translation"
-    "typedecl"
-    "use"))
+    "typedecl"))
 
 (defconst isar-keywords-theory-script
   '("inductive_cases"))
--- a/etc/isar-keywords.el	Wed Feb 27 13:48:15 2013 +0100
+++ b/etc/isar-keywords.el	Wed Feb 27 17:44:08 2013 +0100
@@ -288,7 +288,6 @@
     "undos_proof"
     "unfolding"
     "unused_thms"
-    "use"
     "use_thy"
     "using"
     "value"
@@ -346,7 +345,6 @@
     "structure"
     "unchecked"
     "unsafe"
-    "uses"
     "where"))
 
 (defconst isar-keywords-control
@@ -574,8 +572,7 @@
     "type_notation"
     "type_synonym"
     "typed_print_translation"
-    "typedecl"
-    "use"))
+    "typedecl"))
 
 (defconst isar-keywords-theory-script
   '("inductive_cases"
--- a/src/Doc/IsarImplementation/ML.thy	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Doc/IsarImplementation/ML.thy	Wed Feb 27 17:44:08 2013 +0100
@@ -540,7 +540,7 @@
 
 text {* The primary Isar source language provides facilities to ``open
   a window'' to the underlying ML compiler.  Especially see the Isar
-  commands @{command_ref "use"} and @{command_ref "ML"}: both work the
+  commands @{command_ref "ML_file"} and @{command_ref "ML"}: both work the
   same way, only the source text is provided via a file vs.\ inlined,
   respectively.  Apart from embedding ML into the main theory
   definition like that, there are many more commands that refer to ML
--- a/src/HOL/Mirabelle/lib/scripts/mirabelle.pl	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/HOL/Mirabelle/lib/scripts/mirabelle.pl	Wed Feb 27 17:44:08 2013 +0100
@@ -69,7 +69,7 @@
   my $s = join ("\n", @action_files);
   my @action_files = split(/\n/, $s . "\n" . $s);
   %action_files = sort(@action_files);
-  $tools = "uses " . join(" ", sort(keys(%action_files)));
+  $tools = join("", map { "ML_file " . $_ . "\n" } (sort(keys(%action_files))));
 }
 
 open(SETUP_FILE, ">$setup_file") || die "Could not create file '$setup_file'";
@@ -77,8 +77,9 @@
 print SETUP_FILE <<END;
 theory "$setup_thy_name"
 imports "$mirabelle_thy" "$mirabelle_theory"
+begin
+
 $tools
-begin
 
 setup {*
   Config.put_global Mirabelle.logfile "$log_file" #>
--- a/src/Pure/Isar/isar_syn.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Isar/isar_syn.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -270,13 +270,6 @@
 (* use ML text *)
 
 val _ =
-  Outer_Syntax.command @{command_spec "use"} "ML text from file"
-    (Parse.path >> (fn name =>
-      Toplevel.generic_theory (fn gthy =>
-        (legacy_feature "Old 'use' command -- use 'ML_file' instead";
-         Thy_Load.exec_ml (Path.explode name) gthy))));
-
-val _ =
   Outer_Syntax.command @{command_spec "ML"} "ML text within theory or local theory"
     (Parse.ML_source >> (fn (txt, pos) =>
       Toplevel.generic_theory
--- a/src/Pure/PIDE/document.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/PIDE/document.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -83,7 +83,7 @@
 fun make_perspective command_ids : perspective =
   (Inttab.defined (Inttab.make (map (rpair ()) command_ids)), try List.last command_ids);
 
-val no_header = ("", Thy_Header.make ("", Position.none) [] [] [], ["Bad theory header"]);
+val no_header = ("", Thy_Header.make ("", Position.none) [] [], ["Bad theory header"]);
 val no_perspective = make_perspective [];
 
 val empty_node = make_node (no_header, no_perspective, (Entries.empty, true), NONE);
@@ -102,9 +102,9 @@
 
 fun read_header node span =
   let
-    val (dir, {name = (name, _), imports, keywords, uses}) = get_header node;
+    val (dir, {name = (name, _), imports, keywords}) = get_header node;
     val {name = (_, pos), imports = imports', ...} = Thy_Header.read_tokens span;
-  in (dir, Thy_Header.make (name, pos) (map #1 imports ~~ map #2 imports') keywords uses) end;
+  in (dir, Thy_Header.make (name, pos) (map #1 imports ~~ map #2 imports') keywords) end;
 
 fun get_perspective (Node {perspective, ...}) = perspective;
 fun set_perspective ids =
--- a/src/Pure/PIDE/document.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/PIDE/document.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -40,13 +40,12 @@
     sealed case class Header(
       imports: List[Name],
       keywords: Thy_Header.Keywords,
-      uses: List[(String, Boolean)],
       errors: List[String] = Nil)
     {
       def error(msg: String): Header = copy(errors = errors ::: List(msg))
     }
 
-    def bad_header(msg: String): Header = Header(Nil, Nil, Nil, List(msg))
+    def bad_header(msg: String): Header = Header(Nil, Nil, List(msg))
 
     object Name
     {
--- a/src/Pure/PIDE/protocol.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/PIDE/protocol.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -37,17 +37,14 @@
                   fn ([], a) => Document.Edits (list (pair (option int) (option int)) a),
                   fn ([], a) =>
                     let
-                      val (master, (name, (imports, (keywords, (uses, errors))))) =
+                      val (master, (name, (imports, (keywords, errors)))) =
                         pair string (pair string (pair (list string)
                           (pair (list (pair string
                             (option (pair (pair string (list string)) (list string)))))
-                            (pair (list (pair string bool)) (list string))))) a;
+                            (list string)))) a;
                       val imports' = map (rpair Position.none) imports;
-                      val (uses', errors') =
-                        (map (apfst Path.explode) uses, errors)
-                          handle ERROR msg => ([], errors @ [msg]);
-                      val header = Thy_Header.make (name, Position.none) imports' keywords uses';
-                    in Document.Deps (master, header, errors') end,
+                      val header = Thy_Header.make (name, Position.none) imports' keywords;
+                    in Document.Deps (master, header, errors) end,
                   fn (a, []) => Document.Perspective (map int_atom a)]))
             end;
 
--- a/src/Pure/PIDE/protocol.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/PIDE/protocol.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -289,14 +289,12 @@
               val dir = Isabelle_System.posix_path(name.dir)
               val imports = header.imports.map(_.node)
               val keywords = header.keywords.map({ case (a, b, _) => (a, b) })
-              // FIXME val uses = deps.uses.map(p => (Isabelle_System.posix_path(p._1), p._2))
-              val uses = header.uses
               (Nil,
                 pair(Encode.string, pair(Encode.string, pair(list(Encode.string),
                   pair(list(pair(Encode.string,
                     option(pair(pair(Encode.string, list(Encode.string)), list(Encode.string))))),
-                  pair(list(pair(Encode.string, bool)), list(Encode.string))))))(
-                (dir, (name.theory, (imports, (keywords, (uses, header.errors))))))) },
+                  list(Encode.string)))))(
+                (dir, (name.theory, (imports, (keywords, header.errors)))))) },
           { case Document.Node.Perspective(a) => (a.commands.map(c => long_atom(c.id)), Nil) }))
       def encode_edits: T[List[Document.Edit_Command]] = list((node_edit: Document.Edit_Command) =>
       {
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -592,10 +592,9 @@
 
         fun filerefs f =
             let val thy = thy_name f
-                val filerefs = map #1 (#uses (Thy_Load.check_thy (Path.dir f) thy))
             in
                 issue_pgip (Setrefs {url=url, thyname=NONE, objtype=SOME PgipTypes.ObjFile,
-                                     name=NONE, idtables=[], fileurls=filerefs})
+                                     name=NONE, idtables=[], fileurls=[]})
             end
 
         fun thyrefs thy =
--- a/src/Pure/Pure.thy	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Pure.thy	Wed Feb 27 17:44:08 2013 +0100
@@ -12,8 +12,7 @@
     "attach" "begin" "binder" "constrains" "defines" "fixes" "for"
     "identifier" "if" "imports" "in" "includes" "infix" "infixl"
     "infixr" "is" "keywords" "notes" "obtains" "open" "output"
-    "overloaded" "pervasive" "shows" "structure" "unchecked" "uses"
-    "where" "|"
+    "overloaded" "pervasive" "shows" "structure" "unchecked" "where" "|"
   and "header" :: diag
   and "chapter" :: thy_heading1
   and "section" :: thy_heading2
@@ -30,7 +29,7 @@
     "abbreviation" "type_notation" "no_type_notation" "notation"
     "no_notation" "axiomatization" "theorems" "lemmas" "declare"
     "hide_class" "hide_type" "hide_const" "hide_fact" :: thy_decl
-  and "use" "ML" :: thy_decl % "ML"
+  and "ML" :: thy_decl % "ML"
   and "ML_prf" :: prf_decl % "proof"  (* FIXME % "ML" ?? *)
   and "ML_val" "ML_command" :: diag % "ML"
   and "setup" "local_setup" "attribute_setup" "method_setup"
--- a/src/Pure/Thy/present.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/present.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -21,7 +21,7 @@
   val init_theory: string -> unit
   val theory_source: string -> (unit -> HTML.text) -> unit
   val theory_output: string -> string -> unit
-  val begin_theory: int -> Path.T -> (Path.T * bool) list -> theory -> theory
+  val begin_theory: int -> Path.T -> theory -> theory
   val drafts: string -> Path.T list -> Path.T
 end;
 
@@ -451,13 +451,14 @@
         else Url.File (Path.append (mk_rel_path curr_session session) (html_path name)));
   in (link, name) end;
 
-fun begin_theory update_time dir files thy =
+fun begin_theory update_time dir thy =
     session_default thy (fn {name = sess_name, session, path, html_prefix, remote_path, ...} =>
   let
     val name = Context.theory_name thy;
     val parents = Theory.parents_of thy;
     val parent_specs = map (parent_link remote_path path) parents;
 
+    val files = [];  (* FIXME *)
     val files_html = files |> map (fn (raw_path, loadit) =>
       let
         val path = File.check_file (File.full_path dir raw_path);
--- a/src/Pure/Thy/thy_header.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/thy_header.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -10,10 +10,8 @@
   type header =
    {name: string * Position.T,
     imports: (string * Position.T) list,
-    keywords: keywords,
-    uses: (Path.T * bool) list}
-  val make: string * Position.T -> (string * Position.T) list -> keywords ->
-    (Path.T * bool) list -> header
+    keywords: keywords}
+  val make: string * Position.T -> (string * Position.T) list -> keywords -> header
   val define_keywords: header -> unit
   val declare_keyword: string * Keyword.spec option -> theory -> theory
   val the_keyword: theory -> string -> Keyword.spec option
@@ -30,11 +28,10 @@
 type header =
  {name: string * Position.T,
   imports: (string * Position.T) list,
-  keywords: keywords,
-  uses: (Path.T * bool) list};
+  keywords: keywords};
 
-fun make name imports keywords uses : header =
-  {name = name, imports = imports, keywords = keywords, uses = uses};
+fun make name imports keywords : header =
+  {name = name, imports = imports, keywords = keywords};
 
 
 
@@ -73,12 +70,11 @@
 val theoryN = "theory";
 val importsN = "imports";
 val keywordsN = "keywords";
-val usesN = "uses";
 val beginN = "begin";
 
 val header_lexicons =
   pairself (Scan.make_lexicon o map Symbol.explode)
-   (["%", "(", ")", ",", "::", ";", "==", "and", beginN, importsN, keywordsN, usesN],
+   (["%", "(", ")", ",", "::", ";", "==", "and", beginN, importsN, keywordsN],
     [headerN, theoryN]);
 
 
@@ -86,7 +82,6 @@
 
 local
 
-val file_name = Parse.group (fn () => "file name") Parse.path >> Path.explode;
 val theory_name = Parse.group (fn () => "theory name") (Parse.position Parse.name);
 
 val opt_files =
@@ -107,19 +102,14 @@
 
 val keyword_decls = Parse.and_list1 keyword_decl >> flat;
 
-val file =
-  Parse.$$$ "(" |-- Parse.!!! (file_name --| Parse.$$$ ")") >> rpair false ||
-  file_name >> rpair true;
-
 in
 
 val args =
   theory_name --
   Scan.optional (Parse.$$$ importsN |-- Parse.!!! (Scan.repeat1 theory_name)) [] --
-  Scan.optional (Parse.$$$ keywordsN |-- Parse.!!! keyword_decls) [] --
-  Scan.optional (Parse.$$$ usesN |-- Parse.!!! (Scan.repeat1 file)) [] --|
+  Scan.optional (Parse.$$$ keywordsN |-- Parse.!!! keyword_decls) [] --|
   Parse.$$$ beginN >>
-  (fn (((name, imports), keywords), uses) => make name imports keywords uses);
+  (fn ((name, imports), keywords) => make name imports keywords);
 
 end;
 
--- a/src/Pure/Thy/thy_header.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/thy_header.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -12,8 +12,6 @@
 import scala.util.parsing.input.{Reader, CharSequenceReader}
 import scala.util.matching.Regex
 
-import java.io.{File => JFile}
-
 
 object Thy_Header extends Parse.Parser
 {
@@ -22,12 +20,11 @@
   val IMPORTS = "imports"
   val KEYWORDS = "keywords"
   val AND = "and"
-  val USES = "uses"
   val BEGIN = "begin"
 
   private val lexicon =
     Scan.Lexicon("%", "(", ")", ",", "::", ";", "==",
-      AND, BEGIN, HEADER, IMPORTS, KEYWORDS, THEORY, USES)
+      AND, BEGIN, HEADER, IMPORTS, KEYWORDS, THEORY)
 
 
   /* theory file name */
@@ -72,9 +69,8 @@
       theory_name ~
       (opt(keyword(IMPORTS) ~! (rep1(theory_name))) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~
       (opt(keyword(KEYWORDS) ~! keyword_decls) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~
-      (opt(keyword(USES) ~! (rep1(file))) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~
       keyword(BEGIN) ^^
-      { case x ~ ys ~ zs ~ ws ~ _ => Thy_Header(x, ys, zs, ws) }
+      { case x ~ ys ~ zs ~ _ => Thy_Header(x, ys, zs) }
 
     (keyword(HEADER) ~ tags) ~!
       ((doc_source ~ rep(keyword(";")) ~ keyword(THEORY) ~ tags) ~> args) ^^ { case _ ~ x => x } |
@@ -119,10 +115,9 @@
 sealed case class Thy_Header(
   name: String,
   imports: List[String],
-  keywords: Thy_Header.Keywords,
-  uses: List[(String, Boolean)])
+  keywords: Thy_Header.Keywords)
 {
   def map(f: String => String): Thy_Header =
-    Thy_Header(f(name), imports.map(f), keywords, uses.map(p => (f(p._1), p._2)))
+    Thy_Header(f(name), imports.map(f), keywords)
 }
 
--- a/src/Pure/Thy/thy_info.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/thy_info.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -235,7 +235,7 @@
 fun required_by _ [] = ""
   | required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")";
 
-fun load_thy last_timing initiators update_time deps text (name, pos) uses keywords parents =
+fun load_thy last_timing initiators update_time deps text (name, pos) keywords parents =
   let
     val _ = kill_thy name;
     val _ = Output.urgent_message ("Loading theory " ^ quote name ^ required_by " " initiators);
@@ -243,7 +243,7 @@
 
     val {master = (thy_path, _), imports} = deps;
     val dir = Path.dir thy_path;
-    val header = Thy_Header.make (name, pos) imports keywords uses;
+    val header = Thy_Header.make (name, pos) imports keywords;
 
     val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents);
 
@@ -257,21 +257,21 @@
 
 fun check_deps dir name =
   (case lookup_deps name of
-    SOME NONE => (true, NONE, Position.none, get_imports name, [], [])
+    SOME NONE => (true, NONE, Position.none, get_imports name, [])
   | NONE =>
-      let val {master, text, theory_pos, imports, keywords, uses} = Thy_Load.check_thy dir name
-      in (false, SOME (make_deps master imports, text), theory_pos, imports, uses, keywords) end
+      let val {master, text, theory_pos, imports, keywords} = Thy_Load.check_thy dir name
+      in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end
   | SOME (SOME {master, ...}) =>
       let
         val {master = master', text = text', theory_pos = theory_pos', imports = imports',
-          uses = uses', keywords = keywords'} = Thy_Load.check_thy dir name;
+          keywords = keywords'} = Thy_Load.check_thy dir name;
         val deps' = SOME (make_deps master' imports', text');
         val current =
           #2 master = #2 master' andalso
             (case lookup_theory name of
               NONE => false
             | SOME theory => Thy_Load.load_current theory);
-      in (current, deps', theory_pos', imports', uses', keywords') end);
+      in (current, deps', theory_pos', imports', keywords') end);
 
 in
 
@@ -289,7 +289,7 @@
           val dir' = Path.append dir (Path.dir path);
           val _ = member (op =) initiators name andalso error (cycle_msg initiators);
 
-          val (current, deps, theory_pos, imports, uses, keywords) = check_deps dir' name
+          val (current, deps, theory_pos, imports, keywords) = check_deps dir' name
             handle ERROR msg => cat_error msg
               (loader_msg "the error(s) above occurred while examining theory" [name] ^
                 Position.here require_pos ^ required_by "\n" initiators);
@@ -310,7 +310,7 @@
                     val update_time = serial ();
                     val load =
                       load_thy last_timing initiators update_time dep
-                        text (name, theory_pos) uses keywords;
+                        text (name, theory_pos) keywords;
                   in Task (parents, load) end);
 
           val tasks'' = new_entry name parents task tasks';
--- a/src/Pure/Thy/thy_info.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/thy_info.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -29,10 +29,15 @@
 
   sealed case class Dep(
     name: Document.Node.Name,
-    header0: Document.Node.Header,
-    future_header: JFuture[Exn.Result[Document.Node.Header]])
+    header: Document.Node.Header)
   {
-    def join_header: Document.Node.Header = Exn.release(future_header.get)
+    def load_files(syntax: Outer_Syntax): List[String] =
+    {
+      val string = thy_load.with_thy_text(name, _.toString)
+      if (thy_load.body_files_test(syntax, string))
+        thy_load.body_files(syntax, string)
+      else Nil
+    }
   }
 
   object Dependencies
@@ -46,24 +51,33 @@
     val seen: Set[Document.Node.Name])
   {
     def :: (dep: Dep): Dependencies =
-      new Dependencies(dep :: rev_deps, dep.header0.keywords ::: keywords, seen)
+      new Dependencies(dep :: rev_deps, dep.header.keywords ::: keywords, seen)
 
     def + (name: Document.Node.Name): Dependencies =
       new Dependencies(rev_deps, keywords, seen = seen + name)
 
     def deps: List[Dep] = rev_deps.reverse
 
+    lazy val syntax: Outer_Syntax = thy_load.base_syntax.add_keywords(keywords)
+
     def loaded_theories: Set[String] =
       (thy_load.loaded_theories /: rev_deps) { case (loaded, dep) => loaded + dep.name.theory }
 
-    def make_syntax: Outer_Syntax = thy_load.base_syntax.add_keywords(keywords)
+    def load_files: List[Path] =
+    {
+      // FIXME par.map (!?)
+      ((Nil: List[Path]) /: rev_deps) {
+        case (files, dep) =>
+          dep.load_files(syntax).map(a => Path.explode(dep.name.dir) + Path.explode(a)) ::: files
+      }
+    }
   }
 
-  private def require_thys(files: Boolean, initiators: List[Document.Node.Name],
+  private def require_thys(initiators: List[Document.Node.Name],
       required: Dependencies, names: List[Document.Node.Name]): Dependencies =
-    (required /: names)(require_thy(files, initiators, _, _))
+    (required /: names)(require_thy(initiators, _, _))
 
-  private def require_thy(files: Boolean, initiators: List[Document.Node.Name],
+  private def require_thy(initiators: List[Document.Node.Name],
       required: Dependencies, name: Document.Node.Name): Dependencies =
   {
     if (required.seen(name)) required
@@ -75,46 +89,18 @@
 
       try {
         if (initiators.contains(name)) error(cycle_msg(initiators))
-        val syntax = required.make_syntax
-
-        val header0 =
+        val header =
           try { thy_load.check_thy(name) }
           catch { case ERROR(msg) => err(msg) }
-
-        val future_header: JFuture[Exn.Result[Document.Node.Header]] =
-          if (files) {
-            val string = thy_load.with_thy_text(name, _.toString)
-            val syntax0 = syntax.add_keywords(header0.keywords)
-
-            if (thy_load.body_files_test(syntax0, string)) {
-              /* FIXME
-                  unstable in scala-2.9.2 on multicore hardware -- spurious NPE
-                  OK in scala-2.10.0.RC3 */
-              // default_thread_pool.submit(() =>
-                Library.future_value(Exn.capture {
-                  try {
-                    val files = thy_load.body_files(syntax0, string)
-                    header0.copy(uses = header0.uses ::: files.map((_, false)))
-                  }
-                  catch { case ERROR(msg) => err(msg) }
-                })
-              //)
-            }
-            else Library.future_value(Exn.Res(header0))
-          }
-          else Library.future_value(Exn.Res(header0))
-
-        Dep(name, header0, future_header) ::
-          require_thys(files, name :: initiators, required + name, header0.imports)
+        Dep(name, header) :: require_thys(name :: initiators, required + name, header.imports)
       }
       catch {
         case e: Throwable =>
-          val bad_header = Document.Node.bad_header(Exn.message(e))
-          Dep(name, bad_header, Library.future_value(Exn.Res(bad_header))) :: (required + name)
+          Dep(name, Document.Node.bad_header(Exn.message(e))) :: (required + name)
       }
     }
   }
 
-  def dependencies(inlined_files: Boolean, names: List[Document.Node.Name]): Dependencies =
-    require_thys(inlined_files, Nil, Dependencies.empty, names)
+  def dependencies(names: List[Document.Node.Name]): Dependencies =
+    require_thys(Nil, Dependencies.empty, names)
 }
--- a/src/Pure/Thy/thy_load.ML	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/thy_load.ML	Wed Feb 27 17:44:08 2013 +0100
@@ -12,7 +12,7 @@
   val parse_files: string -> (theory -> Token.file list) parser
   val check_thy: Path.T -> string ->
    {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
-    imports: (string * Position.T) list, uses: (Path.T * bool) list, keywords: Thy_Header.keywords}
+    imports: (string * Position.T) list, keywords: Thy_Header.keywords}
   val provide: Path.T * SHA1.digest -> theory -> theory
   val provide_parse_files: string -> (theory -> Token.file list * theory) parser
   val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
@@ -132,13 +132,13 @@
     val master_file = check_file dir path;
     val text = File.read master_file;
 
-    val {name = (name, pos), imports, uses, keywords} =
+    val {name = (name, pos), imports, keywords} =
       Thy_Header.read (Path.position master_file) text;
     val _ = thy_name <> name andalso
       error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name ^ Position.here pos);
   in
    {master = (master_file, SHA1.digest text), text = text, theory_pos = pos,
-    imports = imports, uses = uses, keywords = keywords}
+    imports = imports, keywords = keywords}
   end;
 
 
@@ -207,11 +207,10 @@
 
 (* load_thy *)
 
-fun begin_theory master_dir {name, imports, keywords, uses} parents =
+fun begin_theory master_dir {name, imports, keywords} parents =
   Theory.begin_theory name parents
   |> put_deps master_dir imports
   |> fold Thy_Header.declare_keyword keywords
-  |> fold (fn (path, true) => Context.theory_map (exec_ml path) o Theory.checkpoint | _ => I) uses
   |> Theory.checkpoint;
 
 fun excursion last_timing init elements =
@@ -239,12 +238,12 @@
   let
     val time = ! Toplevel.timing;
 
-    val {name = (name, _), uses, ...} = header;
+    val {name = (name, _), ...} = header;
     val _ = Thy_Header.define_keywords header;
     val _ = Present.init_theory name;
     fun init () =
       begin_theory master_dir header parents
-      |> Present.begin_theory update_time master_dir uses;
+      |> Present.begin_theory update_time master_dir;
 
     val lexs = Keyword.get_lexicons ();
 
--- a/src/Pure/Thy/thy_load.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Thy/thy_load.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -114,8 +114,7 @@
           " for theory " + quote(name1))
 
       val imports = header.imports.map(import_name(name.dir, _))
-      val uses = header.uses
-      Document.Node.Header(imports, header.keywords, uses)
+      Document.Node.Header(imports, header.keywords, Nil)
     }
     catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
   }
--- a/src/Pure/Tools/build.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Pure/Tools/build.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -407,20 +407,18 @@
           }
 
           val thy_deps =
-            thy_info.dependencies(inlined_files,
+            thy_info.dependencies(
               info.theories.map(_._2).flatten.
                 map(thy => Thy_Load.path_node_name(info.dir + Thy_Load.thy_path(thy))))
 
           val loaded_theories = thy_deps.loaded_theories
-          val syntax = thy_deps.make_syntax
+          val syntax = thy_deps.syntax
+
+          val body_files = if (inlined_files) thy_deps.load_files else Nil
 
           val all_files =
-            (thy_deps.deps.map({ case dep =>
-              val thy = Path.explode(dep.name.node)
-              val uses = dep.join_header.uses.map(p =>
-                Path.explode(dep.name.dir) + Path.explode(p._1))
-              thy :: uses
-            }).flatten ::: info.files.map(file => info.dir + file)).map(_.expand)
+            (thy_deps.deps.map(dep => Path.explode(dep.name.node)) ::: body_files :::
+              info.files.map(file => info.dir + file)).map(_.expand)
 
           if (list_files)
             progress.echo(cat_lines(all_files.map(_.implode).sorted.map("  " + _)))
@@ -432,7 +430,7 @@
                 error(msg + "\nThe error(s) above occurred in session " +
                   quote(name) + Position.here(info.pos))
             }
-          val errors = parent_errors ::: thy_deps.deps.map(dep => dep.join_header.errors).flatten
+          val errors = parent_errors
 
           deps + (name -> Session_Content(loaded_theories, syntax, sources, errors))
       }))
--- a/src/Tools/jEdit/src/plugin.scala	Wed Feb 27 13:48:15 2013 +0100
+++ b/src/Tools/jEdit/src/plugin.scala	Wed Feb 27 17:44:08 2013 +0100
@@ -157,7 +157,7 @@
 
         val thy_info = new Thy_Info(PIDE.thy_load)
         // FIXME avoid I/O in Swing thread!?!
-        val files = thy_info.dependencies(true, thys).deps.map(_.name.node).
+        val files = thy_info.dependencies(thys).deps.map(_.name.node).
           filter(file => !loaded_buffer(file) && PIDE.thy_load.check_file(view, file))
 
         if (!files.isEmpty) {