some support for inlining file content into outer syntax token language;
authorwenzelm
Mon, 20 Aug 2012 17:05:53 +0200
changeset 48867 e9beabf045ab
parent 48866 034df7b05759
child 48868 aeea516440c8
some support for inlining file content into outer syntax token language;
etc/isar-keywords-ZF.el
etc/isar-keywords.el
src/Pure/Isar/keyword.ML
src/Pure/Isar/keyword.scala
src/Pure/Isar/token.ML
src/Pure/ROOT.ML
src/Pure/System/build.scala
src/Pure/Thy/thy_load.ML
src/Pure/Thy/thy_syntax.ML
--- a/etc/isar-keywords-ZF.el	Mon Aug 20 15:43:10 2012 +0200
+++ b/etc/isar-keywords-ZF.el	Mon Aug 20 17:05:53 2012 +0200
@@ -10,6 +10,7 @@
     "Isabelle\\.command"
     "ML"
     "ML_command"
+    "ML_file"
     "ML_prf"
     "ML_val"
     "ProofGeneral\\.inform_file_processed"
@@ -343,6 +344,7 @@
 
 (defconst isar-keywords-theory-decl
   '("ML"
+    "ML_file"
     "abbreviation"
     "arities"
     "attribute_setup"
--- a/etc/isar-keywords.el	Mon Aug 20 15:43:10 2012 +0200
+++ b/etc/isar-keywords.el	Mon Aug 20 17:05:53 2012 +0200
@@ -10,6 +10,7 @@
     "Isabelle\\.command"
     "ML"
     "ML_command"
+    "ML_file"
     "ML_prf"
     "ML_val"
     "ProofGeneral\\.inform_file_processed"
@@ -453,6 +454,7 @@
 
 (defconst isar-keywords-theory-decl
   '("ML"
+    "ML_file"
     "abbreviation"
     "arities"
     "atom_decl"
--- a/src/Pure/Isar/keyword.ML	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/Isar/keyword.ML	Mon Aug 20 17:05:53 2012 +0200
@@ -50,6 +50,7 @@
   val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
   val is_keyword: string -> bool
   val command_keyword: string -> T option
+  val command_files: string -> string list
   val command_tags: string -> string list
   val dest: unit -> string list * string list
   val status: unit -> unit
@@ -59,6 +60,7 @@
   val is_regular: string -> bool
   val is_heading: string -> bool
   val is_theory_begin: string -> bool
+  val is_theory_load: string -> bool
   val is_theory: string -> bool
   val is_proof: string -> bool
   val is_theory_goal: string -> bool
@@ -96,9 +98,9 @@
 val thy_heading2 = kind "thy_heading2";
 val thy_heading3 = kind "thy_heading3";
 val thy_heading4 = kind "thy_heading4";
+val thy_decl = kind "thy_decl";
 val thy_load = kind "thy_load";
 fun thy_load_files files = Keyword {kind = "thy_load", files = files, tags = []};
-val thy_decl = kind "thy_decl";
 val thy_script = kind "thy_script";
 val thy_goal = kind "thy_goal";
 val thy_schematic_goal = kind "thy_schematic_goal";
@@ -193,7 +195,8 @@
   in Scan.is_literal minor syms orelse Scan.is_literal major syms end;
 
 fun command_keyword name = Symtab.lookup (get_commands ()) name;
-fun command_tags name = these (Option.map tags_of (command_keyword name));
+val command_files = these o Option.map (#2 o kind_files_of) o command_keyword;
+val command_tags = these o Option.map tags_of o command_keyword;
 
 fun dest () = pairself (sort_strings o Scan.dest_lexicon) (get_lexicons ());
 
@@ -242,6 +245,8 @@
 
 val is_theory_begin = command_category [thy_begin];
 
+val is_theory_load = command_category [thy_load];
+
 val is_theory = command_category
   [thy_begin, thy_end, thy_heading1, thy_heading2, thy_heading3, thy_heading4,
     thy_load, thy_decl, thy_script, thy_goal, thy_schematic_goal];
--- a/src/Pure/Isar/keyword.scala	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/Isar/keyword.scala	Mon Aug 20 17:05:53 2012 +0200
@@ -21,6 +21,7 @@
   val THY_HEADING3 = "thy_heading3"
   val THY_HEADING4 = "thy_heading4"
   val THY_DECL = "thy_decl"
+  val THY_LOAD = "thy_load"
   val THY_SCRIPT = "thy_script"
   val THY_GOAL = "thy_goal"
   val THY_SCHEMATIC_GOAL = "thy_schematic_goal"
--- a/src/Pure/Isar/token.ML	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/Isar/token.ML	Mon Aug 20 17:05:53 2012 +0200
@@ -10,9 +10,10 @@
     Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
     Nat | Float | String | AltString | Verbatim | Space | Comment | InternalValue |
     Error of string | Sync | EOF
+  type files = Path.T * (string * Position.T) list
   datatype value =
     Text of string | Typ of typ | Term of term | Fact of thm list |
-    Attribute of morphism -> attribute
+    Attribute of morphism -> attribute | Files of files
   type T
   val str_of_kind: kind -> string
   val position_of: T -> Position.T
@@ -28,6 +29,7 @@
   val keyword_with: (string -> bool) -> T -> bool
   val ident_with: (string -> bool) -> T -> bool
   val is_command: T -> bool
+  val is_name: T -> bool
   val is_proper: T -> bool
   val is_semicolon: T -> bool
   val is_comment: T -> bool
@@ -42,6 +44,8 @@
   val content_of: T -> string
   val unparse: T -> string
   val text_of: T -> string * string
+  val get_files: T -> files option
+  val put_files: files -> T -> T
   val get_value: T -> value option
   val map_value: (value -> value) -> T -> T
   val mk_text: string -> T
@@ -74,12 +78,15 @@
   args.ML).  Note that an assignable ref designates an intermediate
   state of internalization -- it is NOT meant to persist.*)
 
+type files = Path.T * (string * Position.T) list;  (*master dir, multiple file content*)
+
 datatype value =
   Text of string |
   Typ of typ |
   Term of term |
   Fact of thm list |
-  Attribute of morphism -> attribute;
+  Attribute of morphism -> attribute |
+  Files of files;
 
 datatype slot =
   Slot |
@@ -149,6 +156,7 @@
 fun is_kind k (Token (_, (k', _), _)) = k = k';
 
 val is_command = is_kind Command;
+val is_name = is_kind Ident orf is_kind SymIdent orf is_kind String orf is_kind Nat;
 
 fun keyword_with pred (Token (_, (Keyword, x), _)) = pred x
   | keyword_with _ _ = false;
@@ -229,6 +237,16 @@
 
 (** associated values **)
 
+(* inlined file content *)
+
+fun get_files (Token (_, _, Value (SOME (Files files)))) = SOME files
+  | get_files _ = NONE;
+
+fun put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files)))
+  | put_files _ tok =
+      raise Fail ("Cannot put inlined files here" ^ Position.str_of (position_of tok));
+
+
 (* access values *)
 
 fun get_value (Token (_, _, Value v)) = v
--- a/src/Pure/ROOT.ML	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/ROOT.ML	Mon Aug 20 17:05:53 2012 +0200
@@ -321,6 +321,16 @@
         Toplevel.init_theory
           (fn () => Thy_Info.toplevel_begin_theory (Thy_Load.get_master_path ()) header)));
 
+val _ =
+  Outer_Syntax.command
+    (("ML_file", Keyword.tag_ml Keyword.thy_load), Position.none) "ML text from file"
+    (Thy_Load.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
+      let val (_, [(txt, pos)]) = files (Context.theory_of gthy) in
+        gthy
+        |> ML_Context.exec (fn () => ML_Context.eval_text true pos txt)
+        |> Local_Theory.propagate_ml_env
+      end)));
+
 Unsynchronized.setmp Multithreading.max_threads 1
   use_thy "Pure";
 Context.set_thread_data NONE;
--- a/src/Pure/System/build.scala	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/System/build.scala	Mon Aug 20 17:05:53 2012 +0200
@@ -341,6 +341,7 @@
                   Outer_Syntax.init() +
                     // FIXME avoid hardwired stuff!?
                     ("theory", Keyword.THY_BEGIN) +
+                    ("ML_file", Keyword.THY_LOAD) +
                     ("hence", (Keyword.PRF_ASM_GOAL, Nil), "then have") +
                     ("thus", (Keyword.PRF_ASM_GOAL, Nil), "then show")
                 (Set.empty[String], init_syntax, Nil)
--- a/src/Pure/Thy/thy_load.ML	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/Thy/thy_load.ML	Mon Aug 20 17:05:53 2012 +0200
@@ -22,6 +22,8 @@
   val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
   val load_thy: int -> Path.T -> Thy_Header.header -> Position.T -> string ->
     theory list -> theory * unit future
+  val resolve_files: Path.T -> Thy_Syntax.span -> Thy_Syntax.span
+  val parse_files: string -> (theory -> Token.files) parser
   val set_master_path: Path.T -> unit
   val get_master_path: unit -> Path.T
 end;
@@ -158,6 +160,42 @@
 fun exec_ml src_path = ML_Context.exec (fn () => use_ml src_path);
 
 
+(* inlined files *)
+
+fun read_files cmd dir tok =
+  let
+    val path = Path.explode (Token.content_of tok);
+    val exts = Keyword.command_files cmd;
+    val variants =
+      if null exts then [path]
+      else map (fn ext => Path.ext ext path) exts;
+  in (dir, variants |> map (Path.append dir #> (fn p => (File.read p, Path.position p)))) end;
+
+fun resolve_files dir (span as Thy_Syntax.Span (Thy_Syntax.Command cmd, toks)) =
+      if Keyword.is_theory_load cmd then
+        (case find_index Token.is_name (rev toks) of  (* FIXME allow trailing cmt (!?!) *)
+          ~1 => span
+        | i' =>
+            let
+              val i = length toks - 1 - i';
+              val toks' = toks |> map_index (fn (j, tok) =>
+                if i = j then Token.put_files (read_files cmd dir tok) tok
+                else tok);
+            in Thy_Syntax.Span (Thy_Syntax.Command cmd, toks') end)
+      else span
+  | resolve_files _ span = span;
+
+fun parse_files cmd =
+  Scan.ahead Parse.not_eof -- Parse.group (fn () => "file name/path specification") Parse.name
+    >> (fn (tok, name) => fn thy =>
+      (case Token.get_files tok of
+        SOME files => files
+      | NONE =>
+          (warning ("Dynamic loading of files: " ^ quote name ^
+              Position.str_of (Token.position_of tok));
+            read_files cmd (master_directory thy) tok)));
+
+
 (* load_thy *)
 
 fun begin_theory dir {name, imports, keywords, uses} parents =
@@ -192,7 +230,7 @@
   let
     val time = ! Toplevel.timing;
 
-    val {name, imports, uses, ...} = header;
+    val {name, uses, ...} = header;
     val _ = Thy_Header.define_keywords header;
     val _ = Present.init_theory name;
     fun init () =
@@ -202,7 +240,7 @@
     val lexs = Keyword.get_lexicons ();
 
     val toks = Thy_Syntax.parse_tokens lexs pos text;
-    val spans = Thy_Syntax.parse_spans toks;
+    val spans = map (resolve_files dir) (Thy_Syntax.parse_spans toks);
     val elements = Thy_Syntax.parse_elements spans;
 
     val _ = Present.theory_source name
--- a/src/Pure/Thy/thy_syntax.ML	Mon Aug 20 15:43:10 2012 +0200
+++ b/src/Pure/Thy/thy_syntax.ML	Mon Aug 20 17:05:53 2012 +0200
@@ -10,7 +10,7 @@
   val reports_of_tokens: Token.T list -> bool * (Position.report * string) list
   val present_token: Token.T -> Output.output
   datatype span_kind = Command of string | Ignored | Malformed
-  type span
+  datatype span = Span of span_kind * Token.T list
   val span_kind: span -> span_kind
   val span_content: span -> Token.T list
   val present_span: span -> Output.output