src/Pure/Isar/outer_syntax.ML
changeset 7750 e63416829538
parent 7746 b52c1a632e6a
child 7755 01e3d545ced8
     1.1 --- a/src/Pure/Isar/outer_syntax.ML	Wed Oct 06 00:32:53 1999 +0200
     1.2 +++ b/src/Pure/Isar/outer_syntax.ML	Wed Oct 06 00:33:53 1999 +0200
     1.3 @@ -42,6 +42,8 @@
     1.4    type parser
     1.5    val command: string -> string -> string ->
     1.6      (token list -> (Toplevel.transition -> Toplevel.transition) * token list) -> parser
     1.7 +  val markup_command: string -> string -> string ->
     1.8 +    (token list -> (Toplevel.transition -> Toplevel.transition) * token list) -> parser
     1.9    val improper_command: string -> string -> string ->
    1.10      (token list -> (Toplevel.transition -> Toplevel.transition) * token list) -> parser
    1.11    val dest_keywords: unit -> string list
    1.12 @@ -59,6 +61,7 @@
    1.13  structure OuterSyntax: OUTER_SYNTAX =
    1.14  struct
    1.15  
    1.16 +structure T = OuterLex;
    1.17  structure P = OuterParse;
    1.18  
    1.19  
    1.20 @@ -93,13 +96,14 @@
    1.21  
    1.22  (* parsers *)
    1.23  
    1.24 -type token = OuterLex.token;
    1.25 +type token = T.token;
    1.26  type parser_fn = token list -> (Toplevel.transition -> Toplevel.transition) * token list;
    1.27  
    1.28  datatype parser =
    1.29 -  Parser of string * (string * string) * bool * parser_fn;
    1.30 +  Parser of string * (string * string * bool) * bool * parser_fn;
    1.31  
    1.32 -fun parser int_only name comment kind parse = Parser (name, (comment, kind), int_only, parse);
    1.33 +fun parser int_only markup name comment kind parse =
    1.34 +  Parser (name, (comment, kind, markup), int_only, parse);
    1.35  
    1.36  
    1.37  (* parse command *)
    1.38 @@ -133,7 +137,9 @@
    1.39  local
    1.40  
    1.41  val global_lexicons = ref (Scan.empty_lexicon, Scan.empty_lexicon);
    1.42 -val global_parsers = ref (Symtab.empty: ((string * string) * (bool * parser_fn)) Symtab.table);
    1.43 +val global_parsers =
    1.44 +  ref (Symtab.empty: (((string * string) * (bool * parser_fn)) * bool) Symtab.table);
    1.45 +val global_markups = ref ([]: string list);
    1.46  
    1.47  fun change_lexicons f =
    1.48    let val lexs = f (! global_lexicons) in
    1.49 @@ -142,18 +148,24 @@
    1.50      | bads => error ("Clash of outer syntax commands and keywords: " ^ commas_quote bads))
    1.51    end;
    1.52  
    1.53 -fun change_parsers f = global_parsers := f (! global_parsers);
    1.54 +fun get_markup (names, (name, (_, true))) = name :: names
    1.55 +  | get_markup (names, _) = names;
    1.56 +
    1.57 +fun make_markups () = global_markups := Symtab.foldl get_markup ([], ! global_parsers);
    1.58 +fun change_parsers f = (global_parsers := f (! global_parsers); make_markups ());
    1.59  
    1.60  in
    1.61  
    1.62 -(* get current lexers / parsers *)
    1.63 +
    1.64 +(* get current syntax *)
    1.65  
    1.66  (*Note: the syntax for files is statically determined at the very
    1.67    beginning; for interactive processing it may change dynamically.*)
    1.68  
    1.69  fun get_lexicons () = ! global_lexicons;
    1.70  fun get_parsers () = ! global_parsers;
    1.71 -fun get_parser () = apsome snd o curry Symtab.lookup (! global_parsers);
    1.72 +fun get_parser () = apsome (#2 o #1) o curry Symtab.lookup (! global_parsers);
    1.73 +fun is_markup name = name mem_string ! global_markups;
    1.74  
    1.75  
    1.76  (* augment syntax *)
    1.77 @@ -161,10 +173,10 @@
    1.78  fun add_keywords keywords = change_lexicons (apfst (fn lex =>
    1.79    (Scan.extend_lexicon lex (map Symbol.explode keywords))));
    1.80  
    1.81 -fun add_parser (tab, Parser (name, comment, int_only, parse)) =
    1.82 +fun add_parser (tab, Parser (name, (comment, kind, markup), int_only, parse)) =
    1.83   (if is_none (Symtab.lookup (tab, name)) then ()
    1.84    else warning ("Redefined outer syntax command " ^ quote name);
    1.85 -  Symtab.update ((name, (comment, (int_only, parse))), tab));
    1.86 +  Symtab.update ((name, (((comment, kind), (int_only, parse)), markup)), tab));
    1.87  
    1.88  fun add_parsers parsers =
    1.89    (change_parsers (fn tab => foldl add_parser (tab, parsers));
    1.90 @@ -179,7 +191,7 @@
    1.91  fun dest_keywords () = Scan.dest_lexicon (#1 (get_lexicons ()));
    1.92  
    1.93  fun dest_parsers () =
    1.94 -  map (fn (name, ((cmt, kind), (int_only, _))) => (name, cmt, kind, int_only))
    1.95 +  map (fn (name, (((cmt, kind), (int_only, _)), _)) => (name, cmt, kind, int_only))
    1.96      (Symtab.dest (get_parsers ()));
    1.97  
    1.98  fun print_outer_syntax () =
    1.99 @@ -211,8 +223,8 @@
   1.100  val headerN = "header";
   1.101  val theoryN = "theory";
   1.102  
   1.103 -val theory_keyword = OuterParse.$$$ theoryN;
   1.104 -val header_keyword = OuterParse.$$$ headerN;
   1.105 +val theory_keyword = P.$$$ theoryN;
   1.106 +val header_keyword = P.$$$ headerN;
   1.107  val semicolon = P.$$$ ";";
   1.108  
   1.109  
   1.110 @@ -221,7 +233,7 @@
   1.111  local
   1.112  
   1.113  val no_terminator =
   1.114 -  Scan.unless semicolon (Scan.one (OuterLex.not_sync andf OuterLex.not_eof));
   1.115 +  Scan.unless semicolon (Scan.one (T.not_sync andf T.not_eof));
   1.116  
   1.117  val recover = Scan.prompt "recover# " (Scan.repeat no_terminator);
   1.118  
   1.119 @@ -229,8 +241,8 @@
   1.120  
   1.121  fun source term do_recover cmd src =
   1.122    src
   1.123 -  |> Source.source OuterLex.stopper
   1.124 -    (Scan.bulk (fn xs => OuterParse.!!! (command term (cmd ())) xs))
   1.125 +  |> Source.source T.stopper
   1.126 +    (Scan.bulk (fn xs => P.!!! (command term (cmd ())) xs))
   1.127      (if do_recover then Some recover else None)
   1.128    |> Source.mapfilter I;
   1.129  
   1.130 @@ -239,11 +251,11 @@
   1.131  fun token_source (src, pos) =
   1.132    src
   1.133    |> Symbol.source false
   1.134 -  |> OuterLex.source false (K (get_lexicons ())) pos;
   1.135 +  |> T.source false (K (get_lexicons ())) pos;
   1.136  
   1.137  fun filter_proper src =
   1.138    src
   1.139 -  |> Source.filter OuterLex.is_proper;
   1.140 +  |> Source.filter T.is_proper;
   1.141  
   1.142  
   1.143  (* scan header *)
   1.144 @@ -251,9 +263,9 @@
   1.145  fun scan_header get_lex scan (src, pos) =
   1.146    src
   1.147    |> Symbol.source false
   1.148 -  |> OuterLex.source false (fn () => (get_lex (), Scan.empty_lexicon)) pos
   1.149 +  |> T.source false (fn () => (get_lex (), Scan.empty_lexicon)) pos
   1.150    |> filter_proper
   1.151 -  |> Source.source OuterLex.stopper (Scan.single scan) None
   1.152 +  |> Source.source T.stopper (Scan.single scan) None
   1.153    |> (fst o the o Source.get_single);
   1.154  
   1.155  
   1.156 @@ -317,6 +329,35 @@
   1.157  end;
   1.158  
   1.159  
   1.160 +(* present theory source *)
   1.161 +
   1.162 +local
   1.163 +
   1.164 +val scan_markup =
   1.165 +  Scan.one T.not_eof :-- (fn tok =>
   1.166 +    if T.is_kind T.Command tok andalso is_markup (T.val_of tok) then
   1.167 +      (Scan.any (not o T.is_proper) |-- P.text) >> Some
   1.168 +    else Scan.succeed None);
   1.169 +
   1.170 +fun invisible_token (tok, arg) =
   1.171 +  is_none arg andalso T.keyword_with (equal ";") tok orelse T.is_kind T.Comment tok;
   1.172 +
   1.173 +in
   1.174 +
   1.175 +(*note: lazy evaluation ahead*)
   1.176 +
   1.177 +fun present_toks text pos () =
   1.178 +  token_source (Source.of_list (Library.untabify text), pos)
   1.179 +  |> Source.source T.stopper (Scan.bulk scan_markup) None
   1.180 +  |> Source.exhaust
   1.181 +  |> filter_out invisible_token;
   1.182 +
   1.183 +fun present_text text () =
   1.184 +  Source.exhaust (Symbol.source false (Source.of_list (Library.untabify text)));
   1.185 +
   1.186 +end;
   1.187 +
   1.188 +
   1.189  (* load_thy --- read text (including header) *)
   1.190  
   1.191  local
   1.192 @@ -343,16 +384,13 @@
   1.193      val text = explode (File.read path);
   1.194      val src = Source.of_list text;
   1.195      val pos = Path.position path;
   1.196 -
   1.197 -    fun present_toks () =
   1.198 -      Source.exhaust (token_source (Source.of_list (Library.untabify text), pos));
   1.199 -    fun present_text () =
   1.200 -      Source.exhaust (Symbol.source false (Source.of_list (Library.untabify text)));
   1.201    in
   1.202      Present.init_theory name;
   1.203      if is_old_theory (src, pos) then ThySyn.load_thy name text
   1.204 -    else (Toplevel.excursion_error (parse_thy (src, pos)); Present.token_source name present_toks);
   1.205 -    Present.verbatim_source name present_text
   1.206 +    else
   1.207 +     (Toplevel.excursion_error (parse_thy (src, pos));
   1.208 +      Present.token_source name (present_toks text pos));
   1.209 +    Present.verbatim_source name (present_text text)
   1.210    end;
   1.211  
   1.212  in
   1.213 @@ -375,7 +413,7 @@
   1.214  fun isar term no_pos =
   1.215    Source.tty
   1.216    |> Symbol.source true
   1.217 -  |> OuterLex.source true get_lexicons
   1.218 +  |> T.source true get_lexicons
   1.219      (if no_pos then Position.none else Position.line_name 1 "stdin")
   1.220    |> filter_proper
   1.221    |> source term true get_parser;
   1.222 @@ -410,8 +448,9 @@
   1.223  
   1.224  
   1.225  (*final declarations of this structure!*)
   1.226 -val command = parser false;
   1.227 -val improper_command = parser true;
   1.228 +val command = parser false false;
   1.229 +val markup_command = parser false true;
   1.230 +val improper_command = parser true false;
   1.231  
   1.232  
   1.233  end;