refined Thy_Load.check_thy: find more uses in body text, based on keywords;
authorwenzelm
Tue, 21 Aug 2012 20:32:33 +0200
changeset 48874 ff9cd47be39b
parent 48873 18b17f15bc62
child 48875 b629f037a0cb
refined Thy_Load.check_thy: find more uses in body text, based on keywords; refined Thy_Info.require_thy(s): cumulate additional keywords; slightly more value-oriented type Keywords.keywords;
src/Pure/Isar/keyword.ML
src/Pure/ProofGeneral/proof_general_pgip.ML
src/Pure/Thy/thy_header.ML
src/Pure/Thy/thy_info.ML
src/Pure/Thy/thy_load.ML
--- a/src/Pure/Isar/keyword.ML	Tue Aug 21 16:56:18 2012 +0200
+++ b/src/Pure/Isar/keyword.ML	Tue Aug 21 20:32:33 2012 +0200
@@ -47,14 +47,18 @@
   type spec = (string * string list) * string list
   val spec: spec -> T
   val command_spec: (string * spec) * Position.T -> (string * T) * Position.T
+  type keywords
+  val lexicons_of: keywords -> Scan.lexicon * Scan.lexicon
+  val get_keywords: unit -> keywords
   val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
   val is_keyword: string -> bool
   val command_keyword: string -> T option
   val command_files: string -> string list
   val command_tags: string -> string list
-  val thy_load_commands: unit -> string list
   val dest: unit -> string list * string list
   val status: unit -> unit
+  val thy_load_commands: keywords -> (string * string list) list
+  val define_keywords: string * T option -> keywords -> keywords
   val define: string * T option -> unit
   val is_diag: string -> bool
   val is_control: string -> bool
@@ -161,12 +165,17 @@
 
 (** global keyword tables **)
 
-type keywords =
+datatype keywords = Keywords of
  {lexicons: Scan.lexicon * Scan.lexicon,  (*minor, major*)
   commands: T Symtab.table};  (*command classification*)
 
-fun make_keywords (lexicons, commands) : keywords =
-  {lexicons = lexicons, commands = commands};
+fun make_keywords (lexicons, commands) =
+  Keywords {lexicons = lexicons, commands = commands};
+
+fun map_keywords f (Keywords {lexicons, commands}) =
+  make_keywords (f (lexicons, commands));
+
+fun lexicons_of (Keywords {lexicons, ...}) = lexicons;
 
 local
 
@@ -177,14 +186,12 @@
 
 fun get_keywords () = ! global_keywords;
 
-fun change_keywords f = CRITICAL (fn () =>
-  Unsynchronized.change global_keywords
-    (fn {lexicons, commands} => make_keywords (f (lexicons, commands))));
+fun change_keywords f = CRITICAL (fn () => Unsynchronized.change global_keywords f);
 
 end;
 
-fun get_lexicons () = #lexicons (get_keywords ());
-fun get_commands () = #commands (get_keywords ());
+fun get_lexicons () = lexicons_of (get_keywords ());
+fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands);
 
 
 (* lookup *)
@@ -199,9 +206,6 @@
 val command_files = these o Option.map (#2 o kind_files_of) o command_keyword;
 val command_tags = these o Option.map tags_of o command_keyword;
 
-fun thy_load_commands () =
-  Symtab.fold (fn (name, k) => kind_of k = kind_of thy_load ? cons name) (get_commands ()) [];
-
 fun dest () = pairself (sort_strings o Scan.dest_lexicon) (get_lexicons ());
 
 
@@ -209,7 +213,7 @@
 
 fun status () =
   let
-    val {lexicons = (minor, _), commands} = get_keywords ();
+    val Keywords {lexicons = (minor, _), commands} = get_keywords ();
     val _ = sort_strings (Scan.dest_lexicon minor) |> List.app (fn name =>
       writeln ("Outer syntax keyword " ^ quote name));
     val _ = sort_wrt #1 (Symtab.dest commands) |> List.app (fn (name, kind) =>
@@ -217,9 +221,14 @@
   in () end;
 
 
+fun thy_load_commands (Keywords {commands, ...}) =
+  Symtab.fold (fn (name, Keyword {kind, files, ...}) =>
+      kind = kind_of thy_load ? cons (name, files)) commands [];
+
+
 (* define *)
 
-fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) =>
+fun define_keywords (name, opt_kind) = map_keywords (fn ((minor, major), commands) =>
   (case opt_kind of
     NONE =>
       let
@@ -231,6 +240,8 @@
         val commands' = Symtab.update (name, kind) commands;
       in ((minor, major'), commands') end));
 
+val define = change_keywords o define_keywords;
+
 
 (* command categories *)
 
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML	Tue Aug 21 16:56:18 2012 +0200
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML	Tue Aug 21 20:32:33 2012 +0200
@@ -594,14 +594,14 @@
 
         fun filerefs f =
             let val thy = thy_name f
-                val filerefs = map #1 (#uses (Thy_Load.check_thy (Path.dir f) thy))
+                val filerefs = map #1 (#uses (Thy_Load.check_thy [] (Path.dir f) thy))
             in
                 issue_pgip (Setrefs {url=url, thyname=NONE, objtype=SOME PgipTypes.ObjFile,
                                      name=NONE, idtables=[], fileurls=filerefs})
             end
 
         fun thyrefs thy =
-            let val thyrefs = #imports (Thy_Load.check_thy Path.current thy)
+            let val thyrefs = #imports (Thy_Load.check_thy [] Path.current thy)
             in
                 issue_pgip (Setrefs {url=url, thyname=thyname, objtype=SOME PgipTypes.ObjTheory,
                                      name=NONE, idtables=[{context=NONE, objtype=PgipTypes.ObjTheory,
--- a/src/Pure/Thy/thy_header.ML	Tue Aug 21 16:56:18 2012 +0200
+++ b/src/Pure/Thy/thy_header.ML	Tue Aug 21 20:32:33 2012 +0200
@@ -6,13 +6,13 @@
 
 signature THY_HEADER =
 sig
+  type keywords = (string * Keyword.spec option) list
   type header =
    {name: string,
     imports: string list,
-    keywords: (string * Keyword.spec option) list,
+    keywords: keywords,
     uses: (Path.T * bool) list}
-  val make: string -> string list -> (string * Keyword.spec option) list ->
-    (Path.T * bool) list -> header
+  val make: string -> string list -> keywords -> (Path.T * bool) list -> header
   val define_keywords: header -> unit
   val declare_keyword: string * Keyword.spec option -> theory -> theory
   val the_keyword: theory -> string -> Keyword.spec option
@@ -23,10 +23,12 @@
 structure Thy_Header: THY_HEADER =
 struct
 
+type keywords = (string * Keyword.spec option) list;
+
 type header =
  {name: string,
   imports: string list,
-  keywords: (string * Keyword.spec option) list,
+  keywords: keywords,
   uses: (Path.T * bool) list};
 
 fun make name imports keywords uses : header =
--- a/src/Pure/Thy/thy_info.ML	Tue Aug 21 16:56:18 2012 +0200
+++ b/src/Pure/Thy/thy_info.ML	Tue Aug 21 20:32:33 2012 +0200
@@ -237,48 +237,49 @@
     fun commit () = update_thy deps theory;
   in (theory, present, commit) end;
 
-fun check_deps dir name =
+fun check_deps base_keywords dir name =
   (case lookup_deps name of
-    SOME NONE => (true, NONE, get_imports name)
+    SOME NONE => (true, NONE, get_imports name, [])
   | NONE =>
-      let val {master, text, imports, ...} = Thy_Load.check_thy dir name
-      in (false, SOME (make_deps master imports, text), imports) end
+      let val {master, text, imports, keywords, ...} = Thy_Load.check_thy base_keywords dir name
+      in (false, SOME (make_deps master imports, text), imports, keywords) end
   | SOME (SOME {master, ...}) =>
       let
-        val {master = master', text = text', imports = imports', ...} = Thy_Load.check_thy dir name;
+        val {master = master', text = text', imports = imports', keywords, ...} =
+          Thy_Load.check_thy base_keywords dir name;
         val deps' = SOME (make_deps master' imports', text');
         val current =
           #2 master = #2 master' andalso
             (case lookup_theory name of
               NONE => false
             | SOME theory => Thy_Load.all_current theory);
-      in (current, deps', imports') end);
+      in (current, deps', imports', keywords) end);
 
 in
 
-fun require_thys initiators dir strs tasks =
-      fold_map (require_thy initiators dir) strs tasks |>> forall I
-and require_thy initiators dir str tasks =
+fun require_thys initiators dir strs result =
+      fold_map (require_thy initiators dir) strs result |>> forall I
+and require_thy initiators dir str (base_keywords, tasks) =
   let
     val path = Path.expand (Path.explode str);
     val name = Path.implode (Path.base path);
   in
     (case try (Graph.get_node tasks) name of
-      SOME task => (task_finished task, tasks)
+      SOME task => (task_finished task, (base_keywords, tasks))
     | NONE =>
         let
           val dir' = Path.append dir (Path.dir path);
           val _ = member (op =) initiators name andalso error (cycle_msg initiators);
 
-          val (current, deps, imports) = check_deps dir' name
+          val (current, deps, imports, keywords) = check_deps base_keywords dir' name
             handle ERROR msg => cat_error msg
               (loader_msg "the error(s) above occurred while examining theory" [name] ^
                 required_by "\n" initiators);
 
           val parents = map base_name imports;
-          val (parents_current, tasks') =
+          val (parents_current, (base_keywords', tasks')) =
             require_thys (name :: initiators)
-              (Path.append dir (master_dir (Option.map #1 deps))) imports tasks;
+              (Path.append dir (master_dir (Option.map #1 deps))) imports (base_keywords, tasks);
 
           val all_current = current andalso parents_current;
           val task =
@@ -289,7 +290,10 @@
               | SOME (dep, text) =>
                   let val update_time = serial ()
                   in Task (parents, load_thy initiators update_time dep text name) end);
-        in (all_current, new_entry name parents task tasks') end)
+
+          val base_keywords'' = keywords @ base_keywords';
+          val tasks'' = new_entry name parents task tasks';
+        in (all_current, (base_keywords'', tasks'')) end)
   end;
 
 end;
@@ -298,7 +302,7 @@
 (* use_thy *)
 
 fun use_thys_wrt dir arg =
-  schedule_tasks (snd (require_thys [] dir arg Graph.empty));
+  schedule_tasks (snd (snd (require_thys [] dir arg ([], Graph.empty))));
 
 val use_thys = use_thys_wrt Path.current;
 val use_thy = use_thys o single;
@@ -321,7 +325,7 @@
 fun register_thy theory =
   let
     val name = Context.theory_name theory;
-    val {master, ...} = Thy_Load.check_thy (Thy_Load.master_directory theory) name;
+    val {master, ...} = Thy_Load.check_thy [] (Thy_Load.master_directory theory) name;
     val imports = Thy_Load.imports_of theory;
   in
     NAMED_CRITICAL "Thy_Info" (fn () =>
--- a/src/Pure/Thy/thy_load.ML	Tue Aug 21 16:56:18 2012 +0200
+++ b/src/Pure/Thy/thy_load.ML	Tue Aug 21 20:32:33 2012 +0200
@@ -11,8 +11,9 @@
   val provide: Path.T * (Path.T * SHA1.digest) -> theory -> theory
   val check_file: Path.T -> Path.T -> Path.T
   val thy_path: Path.T -> Path.T
-  val check_thy: Path.T -> string ->
-    {master: Path.T * SHA1.digest, text: string, imports: string list, uses: (Path.T * bool) list}
+  val check_thy: (string * Keyword.T option) list -> Path.T -> string ->
+   {master: Path.T * SHA1.digest, text: string, imports: string list,
+    uses: (Path.T * bool) list, keywords: (string * Keyword.T option) list}
   val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
   val use_file: Path.T -> theory -> string * theory
   val loaded_files: theory -> Path.T list
@@ -75,17 +76,52 @@
 
 (* inlined files *)
 
-fun command_files cmd path =
-  (case Keyword.command_files cmd of
-    [] => [path]
-  | exts => map (fn ext => Path.ext ext path) exts);
+local
+
+fun clean ((i1, t1) :: (i2, t2) :: toks) =
+      if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks
+      else (i1, t1) :: clean ((i2, t2) :: toks)
+  | clean toks = toks;
+
+fun clean_tokens toks =
+  ((0 upto length toks - 1) ~~ toks)
+  |> filter (fn (_, tok) => Token.is_proper tok)
+  |> clean;
+
+fun find_file toks =
+  rev (clean_tokens toks) |> get_first (fn (i, tok) =>
+    if Token.is_name tok then SOME (i, Path.explode (Token.content_of tok))
+    else NONE);
+
+fun command_files path exts =
+  if null exts then [path]
+  else map (fn ext => Path.ext ext path) exts;
+
+in
+
+fun find_files syntax text =
+  let val thy_load_commands = Keyword.thy_load_commands syntax in
+    if exists (fn (cmd, _) => String.isSubstring cmd text) thy_load_commands then
+      Thy_Syntax.parse_tokens (Keyword.lexicons_of syntax) Position.none text
+      |> Thy_Syntax.parse_spans
+      |> maps
+        (fn Thy_Syntax.Span (Thy_Syntax.Command cmd, toks) =>
+              (case AList.lookup (op =) thy_load_commands cmd of
+                SOME exts =>
+                  (case find_file toks of
+                    SOME (_, path) => command_files path exts
+                  | NONE => [])
+              | NONE => [])
+          | _ => [])
+    else []
+  end;
 
 fun read_files cmd dir tok =
   let
     val path = Path.explode (Token.content_of tok);
     val files =
-      command_files cmd path
-      |> map (Path.append dir #> (fn p => (File.read p, Path.position p)));
+      command_files path (Keyword.command_files cmd)
+      |> map (Path.append dir #> (fn path => (File.read path, Path.position path)));
   in (dir, files) end;
 
 fun parse_files cmd =
@@ -97,31 +133,20 @@
           (warning ("Dynamic loading of files: " ^ quote name ^ Token.pos_of tok);
             read_files cmd (master_directory thy) tok)));
 
-local
-
-fun clean ((i1, t1) :: (i2, t2) :: toks) =
-      if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks
-      else (i1, t1) :: clean ((i2, t2) :: toks)
-  | clean toks = toks;
-
-fun clean_tokens toks =
-  ((0 upto length toks - 1) ~~ toks)
-  |> filter (fn (_, tok) => Token.is_proper tok) |> clean;
-
-in
-
-fun resolve_files dir (span as Thy_Syntax.Span (Thy_Syntax.Command cmd, toks)) =
+fun resolve_files dir span =
+  (case span of
+    Thy_Syntax.Span (Thy_Syntax.Command cmd, toks) =>
       if Keyword.is_theory_load cmd then
         (case find_first (Token.is_name o #2) (rev (clean_tokens toks)) of
-          NONE => span
-        | SOME (i, _) =>
+          NONE => span  (* FIXME error *)
+        | SOME (i, path) =>
             let
               val toks' = toks |> map_index (fn (j, tok) =>
-                if i = j then Token.put_files (read_files cmd dir tok) tok
+                if i = j then Token.put_files (read_files cmd dir path) tok
                 else tok);
             in Thy_Syntax.Span (Thy_Syntax.Command cmd, toks') end)
       else span
-  | resolve_files _ span = span;
+  | span => span);
 
 end;
 
@@ -132,19 +157,29 @@
 
 val thy_path = Path.ext "thy";
 
-fun check_thy dir name =
+fun check_thy base_keywords dir name =
   let
     val path = thy_path (Path.basic name);
     val master_file = check_file dir path;
     val text = File.read master_file;
-    val (name', imports, uses) =
-      if name = Context.PureN then (Context.PureN, [], [])
+    val (name', imports, uses, more_keywords) =
+      if name = Context.PureN then (Context.PureN, [], [], [])
       else
-        let val {name, imports, uses, ...} = Thy_Header.read (Path.position master_file) text
-        in (name, imports, uses) end;
+        let
+          val {name, imports, uses, keywords} = Thy_Header.read (Path.position master_file) text;
+          val more_keywords = map (apsnd (Option.map Keyword.spec)) keywords;
+          val syntax =
+            Keyword.get_keywords ()
+            |> fold Keyword.define_keywords base_keywords
+            |> fold Keyword.define_keywords more_keywords;
+          val more_uses = map (rpair false) (find_files syntax text);
+        in (name, imports, uses @ more_uses, more_keywords) end;
     val _ = name <> name' andalso
       error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name');
-  in {master = (master_file, SHA1.digest text), text = text, imports = imports, uses = uses} end;
+  in
+   {master = (master_file, SHA1.digest text), text = text,
+    imports = imports, uses = uses, keywords = more_keywords}
+  end;
 
 
 (* load files *)