clarified modules;
authorwenzelm
Wed, 20 Oct 2021 20:04:28 +0200
changeset 74562 8403bd51f8b1
parent 74561 8e6c973003c8
child 74563 042041c0ebeb
clarified modules;
src/Pure/Isar/parse.ML
src/Pure/Isar/token.ML
src/Pure/ML/ml_antiquotations.ML
src/Pure/ML/ml_context.ML
--- a/src/Pure/Isar/parse.ML	Wed Oct 20 18:13:17 2021 +0200
+++ b/src/Pure/Isar/parse.ML	Wed Oct 20 20:04:28 2021 +0200
@@ -118,6 +118,8 @@
   val thm: (Facts.ref * Token.src list) parser
   val thms1: (Facts.ref * Token.src list) list parser
   val options: ((string * Position.T) * (string * Position.T)) list parser
+  val embedded_ml: ML_Lex.token Antiquote.antiquote list parser
+  val embedded_ml_underscore: ML_Lex.token Antiquote.antiquote list parser
 end;
 
 structure Parse: PARSE =
@@ -494,4 +496,14 @@
 
 val options = $$$ "[" |-- list1 option --| $$$ "]";
 
+
+(* embedded ML *)
+
+val embedded_ml =
+  embedded_input >> ML_Lex.read_source ||
+  control >> (ML_Lex.read_symbols o Antiquote.control_symbols);
+
+val embedded_ml_underscore =
+  input underscore >> ML_Lex.read_source || embedded_ml;
+
 end;
--- a/src/Pure/Isar/token.ML	Wed Oct 20 18:13:17 2021 +0200
+++ b/src/Pure/Isar/token.ML	Wed Oct 20 20:04:28 2021 +0200
@@ -793,7 +793,6 @@
   end;
 
 
-
 (* wrapped syntax *)
 
 fun syntax_generic scan src context =
--- a/src/Pure/ML/ml_antiquotations.ML	Wed Oct 20 18:13:17 2021 +0200
+++ b/src/Pure/ML/ml_antiquotations.ML	Wed Oct 20 20:04:28 2021 +0200
@@ -221,32 +221,9 @@
 
 (* type/term constructors *)
 
-fun read_embedded ctxt keywords src parse =
-  let
-    val input = #1 (Token.syntax (Scan.lift Args.embedded_input) src ctxt);
-    val toks = input
-      |> Input.source_explode
-      |> Token.tokenize keywords {strict = true}
-      |> filter Token.is_proper;
-    val _ = Context_Position.reports_text ctxt (maps (Token.reports keywords) toks);
-  in
-    (case Scan.read Token.stopper parse toks of
-      SOME res => res
-    | NONE => error ("Bad input" ^ Position.here (Input.pos_of input)))
-  end;
-
-val parse_embedded_ml =
-  Parse.embedded_input >> ML_Lex.read_source ||
-  Parse.control >> (ML_Lex.read_symbols o Antiquote.control_symbols);
-
-val parse_embedded_ml_underscore =
-  Parse.input Parse.underscore >> ML_Lex.read_source || parse_embedded_ml;
-
-fun ml_args ctxt args =
-  let
-    val (decls, ctxt') = fold_map (ML_Context.expand_antiquotes) args ctxt;
-    fun decl' ctxt'' = map (fn decl => decl ctxt'') decls;
-  in (decl', ctxt') end
+fun read_embedded ctxt keywords parse src =
+  Token.syntax (Scan.lift Args.embedded_input) src ctxt
+  |> #1 |> Token.read_embedded ctxt keywords parse;
 
 local
 
@@ -254,7 +231,7 @@
 
 val parse_name = Parse.input Parse.name;
 
-val parse_args = Scan.repeat parse_embedded_ml_underscore;
+val parse_args = Scan.repeat Parse.embedded_ml_underscore;
 val parse_for_args = Scan.optional (Parse.$$$ "for" |-- Parse.!!! parse_args) [];
 
 fun parse_body b =
@@ -279,8 +256,8 @@
   ML_Context.add_antiquotation binding true
     (fn range => fn src => fn ctxt =>
       let
-        val ((s, type_args), fn_body) =
-          read_embedded ctxt keywords src (parse_name -- parse_args -- parse_body function);
+        val ((s, type_args), fn_body) = src
+          |> read_embedded ctxt keywords (parse_name -- parse_args -- parse_body function);
         val pos = Input.pos_of s;
 
         val Type (c, Ts) =
@@ -292,8 +269,8 @@
             error ("Type constructor " ^ quote (Proof_Context.markup_type ctxt c) ^
               " takes " ^ string_of_int n ^ " argument(s)" ^ Position.here pos);
 
-        val (decls1, ctxt1) = ml_args ctxt type_args;
-        val (decls2, ctxt2) = ml_args ctxt1 fn_body;
+        val (decls1, ctxt1) = ML_Context.expand_antiquotes_list type_args ctxt;
+        val (decls2, ctxt2) = ML_Context.expand_antiquotes_list fn_body ctxt1;
         fun decl' ctxt' =
           let
             val (ml_args_env, ml_args_body) = split_list (decls1 ctxt');
@@ -314,9 +291,9 @@
   ML_Context.add_antiquotation binding true
     (fn range => fn src => fn ctxt =>
       let
-        val (((s, type_args), term_args), fn_body) =
-          read_embedded ctxt keywords src
-            (parse_name -- parse_args -- parse_for_args -- parse_body function);
+        val (((s, type_args), term_args), fn_body) = src
+          |> read_embedded ctxt keywords
+              (parse_name -- parse_args -- parse_for_args -- parse_body function);
 
         val Const (c, T) =
           Proof_Context.read_const {proper = true, strict = true} ctxt
@@ -338,9 +315,9 @@
           length term_args > m andalso Term.is_Type (Term.body_type T) andalso
             err (" cannot have more than " ^ string_of_int m ^ " argument(s)");
 
-        val (decls1, ctxt1) = ml_args ctxt type_args;
-        val (decls2, ctxt2) = ml_args ctxt1 term_args;
-        val (decls3, ctxt3) = ml_args ctxt2 fn_body;
+        val (decls1, ctxt1) = ML_Context.expand_antiquotes_list type_args ctxt;
+        val (decls2, ctxt2) = ML_Context.expand_antiquotes_list term_args ctxt1;
+        val (decls3, ctxt3) = ML_Context.expand_antiquotes_list fn_body ctxt2;
         fun decl' ctxt' =
           let
             val (ml_args_env1, ml_args_body1) = split_list (decls1 ctxt');
--- a/src/Pure/ML/ml_context.ML	Wed Oct 20 18:13:17 2021 +0200
+++ b/src/Pure/ML/ml_context.ML	Wed Oct 20 20:04:28 2021 +0200
@@ -10,11 +10,14 @@
   val struct_name: Proof.context -> string
   val variant: string -> Proof.context -> string * Proof.context
   type decl = Proof.context -> ML_Lex.token list * ML_Lex.token list
+  type decls = Proof.context -> (ML_Lex.token list * ML_Lex.token list) list
   type antiquotation = Position.range -> Token.src -> Proof.context -> decl * Proof.context
   val add_antiquotation: binding -> bool -> antiquotation -> theory -> theory
   val print_antiquotations: bool -> Proof.context -> unit
   val expand_antiquotes: ML_Lex.token Antiquote.antiquote list ->
     Proof.context -> decl * Proof.context
+  val expand_antiquotes_list: ML_Lex.token Antiquote.antiquote list list ->
+    Proof.context -> decls * Proof.context
   val eval: ML_Compiler.flags -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit
   val eval_file: ML_Compiler.flags -> Path.T -> unit
   val eval_source: ML_Compiler.flags -> Input.source -> unit
@@ -56,6 +59,8 @@
 type decl =
   Proof.context -> ML_Lex.token list * ML_Lex.token list;  (*final context -> ML env, ML body*)
 
+type decls = Proof.context -> (ML_Lex.token list * ML_Lex.token list) list;
+
 type antiquotation =
   Position.range -> Token.src -> Proof.context -> decl * Proof.context;
 
@@ -116,6 +121,12 @@
     fun decl ctxt'' = decls |> map (fn d => d ctxt'') |> split_list |> apply2 flat;
   in (decl, ctxt') end;
 
+fun expand_antiquotes_list antss ctxt =
+  let
+    val (decls, ctxt') = fold_map expand_antiquotes antss ctxt;
+    fun decl' ctxt'' = map (fn decl => decl ctxt'') decls;
+  in (decl', ctxt') end
+
 end;