explicit type Keyword.keywords;
authorwenzelm
Wed Nov 05 20:20:57 2014 +0100 (2014-11-05)
changeset 5890338c72f5f6c2e
parent 58902 938bbacda12d
child 58904 f49c4f883c58
explicit type Keyword.keywords;
tuned signature;
src/Doc/antiquote_setup.ML
src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML
src/HOL/Tools/Lifting/lifting_setup.ML
src/HOL/Tools/Sledgehammer/sledgehammer_util.ML
src/HOL/Tools/try0.ML
src/Pure/Isar/attrib.ML
src/Pure/Isar/keyword.ML
src/Pure/Isar/outer_syntax.ML
src/Pure/Isar/token.ML
src/Pure/ML/ml_context.ML
src/Pure/PIDE/document.ML
src/Pure/PIDE/resources.ML
src/Pure/ROOT.ML
src/Pure/Thy/thy_header.ML
src/Pure/Thy/thy_header.scala
src/Pure/Thy/thy_output.ML
src/Pure/Tools/find_consts.ML
src/Pure/Tools/find_theorems.ML
src/Pure/Tools/rail.ML
src/Tools/Code/code_target.ML
     1.1 --- a/src/Doc/antiquote_setup.ML	Wed Nov 05 20:05:32 2014 +0100
     1.2 +++ b/src/Doc/antiquote_setup.ML	Wed Nov 05 20:20:57 2014 +0100
     1.3 @@ -195,7 +195,7 @@
     1.4    is_some (Keyword.command_keyword name) andalso
     1.5      let
     1.6        val markup =
     1.7 -        Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none name
     1.8 +        Outer_Syntax.scan (Keyword.get_keywords ()) Position.none name
     1.9          |> maps (Outer_Syntax.command_reports (#2 (Outer_Syntax.get_syntax ())))
    1.10          |> map (snd o fst);
    1.11        val _ = Context_Position.reports ctxt (map (pair pos) markup);
     2.1 --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Nov 05 20:05:32 2014 +0100
     2.2 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Nov 05 20:20:57 2014 +0100
     2.3 @@ -535,7 +535,7 @@
     2.4      fun do_method named_thms ctxt =
     2.5        let
     2.6          val ref_of_str =
     2.7 -          suffix ";" #> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none #> Parse.xthm
     2.8 +          suffix ";" #> Outer_Syntax.scan (Keyword.get_keywords ()) Position.none #> Parse.xthm
     2.9            #> fst
    2.10          val thms = named_thms |> maps snd
    2.11          val facts = named_thms |> map (ref_of_str o fst o fst)
    2.12 @@ -561,7 +561,7 @@
    2.13            let
    2.14              val (type_encs, lam_trans) =
    2.15                !meth
    2.16 -              |> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.start
    2.17 +              |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start
    2.18                |> filter Token.is_proper |> tl
    2.19                |> Metis_Tactic.parse_metis_options |> fst
    2.20                |>> the_default [ATP_Proof_Reconstruct.partial_typesN]
     3.1 --- a/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Nov 05 20:05:32 2014 +0100
     3.2 +++ b/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Nov 05 20:20:57 2014 +0100
     3.3 @@ -221,7 +221,7 @@
     3.4  
     3.5      val thy = Proof_Context.theory_of lthy
     3.6      val dummy_thm = Thm.transfer thy Drule.dummy_thm
     3.7 -    val pointer = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none bundle_name
     3.8 +    val pointer = Outer_Syntax.scan (Keyword.get_keywords ()) Position.none bundle_name
     3.9      val restore_lifting_att = 
    3.10        ([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer])
    3.11    in
     4.1 --- a/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML	Wed Nov 05 20:05:32 2014 +0100
     4.2 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML	Wed Nov 05 20:20:57 2014 +0100
     4.3 @@ -80,7 +80,7 @@
     4.4  val subgoal_count = Try.subgoal_count
     4.5  
     4.6  fun reserved_isar_keyword_table () =
     4.7 -  Keyword.dest () |-> union (op =) |> map (rpair ()) |> Symtab.make
     4.8 +  Symtab.make_set (Scan.dest_lexicon (Scan.merge_lexicons (Keyword.get_lexicons ())));
     4.9  
    4.10  exception TOO_MANY of unit
    4.11  
    4.12 @@ -126,12 +126,11 @@
    4.13  
    4.14  fun thms_of_name ctxt name =
    4.15    let
    4.16 -    val lex = Keyword.get_lexicons
    4.17      val get = maps (Proof_Context.get_fact ctxt o fst)
    4.18    in
    4.19      Source.of_string name
    4.20      |> Symbol.source
    4.21 -    |> Token.source lex Position.start
    4.22 +    |> Token.source Keyword.get_keywords Position.start
    4.23      |> Token.source_proper
    4.24      |> Source.source Token.stopper (Parse.xthms1 >> get)
    4.25      |> Source.exhaust
     5.1 --- a/src/HOL/Tools/try0.ML	Wed Nov 05 20:05:32 2014 +0100
     5.2 +++ b/src/HOL/Tools/try0.ML	Wed Nov 05 20:20:57 2014 +0100
     5.3 @@ -43,7 +43,7 @@
     5.4  
     5.5  fun parse_method s =
     5.6    enclose "(" ")" s
     5.7 -  |> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.start
     5.8 +  |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start
     5.9    |> filter Token.is_proper
    5.10    |> Scan.read Token.stopper Method.parse
    5.11    |> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source");
     6.1 --- a/src/Pure/Isar/attrib.ML	Wed Nov 05 20:05:32 2014 +0100
     6.2 +++ b/src/Pure/Isar/attrib.ML	Wed Nov 05 20:20:57 2014 +0100
     6.3 @@ -289,9 +289,9 @@
     6.4  fun read_attribs ctxt source =
     6.5    let
     6.6      val syms = Symbol_Pos.source_explode source;
     6.7 -    val lex = #1 (Keyword.get_lexicons ());
     6.8 +    val keywords = Keyword.get_keywords ();
     6.9    in
    6.10 -    (case Token.read lex Parse.attribs syms of
    6.11 +    (case Token.read_no_commands keywords Parse.attribs syms of
    6.12        [raw_srcs] => check_attribs ctxt raw_srcs
    6.13      | _ => error ("Malformed attributes" ^ Position.here (#pos source)))
    6.14    end;
     7.1 --- a/src/Pure/Isar/keyword.ML	Wed Nov 05 20:05:32 2014 +0100
     7.2 +++ b/src/Pure/Isar/keyword.ML	Wed Nov 05 20:20:57 2014 +0100
     7.3 @@ -41,13 +41,20 @@
     7.4    type spec = (string * string list) * string list
     7.5    val spec: spec -> T
     7.6    val command_spec: (string * spec) * Position.T -> (string * T) * Position.T
     7.7 +  type keywords
     7.8 +  val minor_keywords: keywords -> Scan.lexicon
     7.9 +  val major_keywords: keywords -> Scan.lexicon
    7.10 +  val empty_keywords: keywords
    7.11 +  val merge_keywords: keywords * keywords -> keywords
    7.12 +  val no_command_keywords: keywords -> keywords
    7.13 +  val add: string * T option -> keywords -> keywords
    7.14 +  val define: string * T option -> unit
    7.15 +  val get_keywords: unit -> keywords
    7.16    val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
    7.17    val is_keyword: string -> bool
    7.18    val command_keyword: string -> T option
    7.19    val command_files: string -> Path.T -> Path.T list
    7.20    val command_tags: string -> string list
    7.21 -  val dest: unit -> string list * string list
    7.22 -  val define: string * T option -> unit
    7.23    val is_diag: string -> bool
    7.24    val is_heading: string -> bool
    7.25    val is_theory_begin: string -> bool
    7.26 @@ -144,32 +151,69 @@
    7.27  
    7.28  
    7.29  
    7.30 -(** global keyword tables **)
    7.31 +(** keyword tables **)
    7.32 +
    7.33 +(* type keywords *)
    7.34  
    7.35  datatype keywords = Keywords of
    7.36 - {lexicons: Scan.lexicon * Scan.lexicon,  (*minor, major*)
    7.37 -  commands: T Symtab.table};  (*command classification*)
    7.38 + {minor: Scan.lexicon,
    7.39 +  major: Scan.lexicon,
    7.40 +  command_kinds: T Symtab.table};
    7.41 +
    7.42 +fun minor_keywords (Keywords {minor, ...}) = minor;
    7.43 +fun major_keywords (Keywords {major, ...}) = major;
    7.44 +fun command_kinds (Keywords {command_kinds, ...}) = command_kinds;
    7.45 +
    7.46 +fun make_keywords (minor, major, command_kinds) =
    7.47 +  Keywords {minor = minor, major = major, command_kinds = command_kinds};
    7.48  
    7.49 -fun make_keywords (lexicons, commands) =
    7.50 -  Keywords {lexicons = lexicons, commands = commands};
    7.51 +fun map_keywords f (Keywords {minor, major, command_kinds}) =
    7.52 +  make_keywords (f (minor, major, command_kinds));
    7.53 +
    7.54 +val empty_keywords =
    7.55 +  make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty);
    7.56  
    7.57 -local
    7.58 +fun merge_keywords
    7.59 +  (Keywords {minor = minor1, major = major1, command_kinds = command_kinds1},
    7.60 +    Keywords {minor = minor2, major = major2, command_kinds = command_kinds2}) =
    7.61 +  make_keywords
    7.62 +   (Scan.merge_lexicons (minor1, minor2),
    7.63 +    Scan.merge_lexicons (major1, major2),
    7.64 +    Symtab.merge (K true) (command_kinds1, command_kinds2));
    7.65  
    7.66 -val global_keywords =
    7.67 -  Unsynchronized.ref (make_keywords ((Scan.empty_lexicon, Scan.empty_lexicon), Symtab.empty));
    7.68 +val no_command_keywords =
    7.69 +  map_keywords (fn (minor, _, _) => (minor, Scan.empty_lexicon, Symtab.empty));
    7.70  
    7.71 -in
    7.72 +
    7.73 +(* add keywords *)
    7.74  
    7.75 -fun get_keywords () = ! global_keywords;
    7.76 +fun add (name, opt_kind) = map_keywords (fn (minor, major, command_kinds) =>
    7.77 +  (case opt_kind of
    7.78 +    NONE =>
    7.79 +      let
    7.80 +        val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
    7.81 +      in (minor', major, command_kinds) end
    7.82 +  | SOME kind =>
    7.83 +      let
    7.84 +        val major' = Scan.extend_lexicon (Symbol.explode name) major;
    7.85 +        val command_kinds' = Symtab.update (name, kind) command_kinds;
    7.86 +      in (minor, major', command_kinds') end));
    7.87  
    7.88 -fun change_keywords f = CRITICAL (fn () =>
    7.89 -  Unsynchronized.change global_keywords
    7.90 -    (fn Keywords {lexicons, commands} => make_keywords (f (lexicons, commands))));
    7.91 +
    7.92 +(* global state *)
    7.93 +
    7.94 +local val global_keywords = Unsynchronized.ref empty_keywords in
    7.95 +
    7.96 +fun define decl = CRITICAL (fn () => Unsynchronized.change global_keywords (add decl));
    7.97 +fun get_keywords () = ! global_keywords;
    7.98  
    7.99  end;
   7.100  
   7.101 -fun get_lexicons () = get_keywords () |> (fn Keywords {lexicons, ...} => lexicons);
   7.102 -fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands);
   7.103 +fun get_lexicons () =
   7.104 +  let val keywords = get_keywords ()
   7.105 +  in (minor_keywords keywords, major_keywords keywords) end;
   7.106 +
   7.107 +fun get_commands () = command_kinds (get_keywords ());
   7.108  
   7.109  
   7.110  (* lookup *)
   7.111 @@ -192,23 +236,6 @@
   7.112  
   7.113  val command_tags = these o Option.map tags_of o command_keyword;
   7.114  
   7.115 -fun dest () = pairself (sort_strings o Scan.dest_lexicon) (get_lexicons ());
   7.116 -
   7.117 -
   7.118 -(* define *)
   7.119 -
   7.120 -fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) =>
   7.121 -  (case opt_kind of
   7.122 -    NONE =>
   7.123 -      let
   7.124 -        val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
   7.125 -      in ((minor', major), commands) end
   7.126 -  | SOME kind =>
   7.127 -      let
   7.128 -        val major' = Scan.extend_lexicon (Symbol.explode name) major;
   7.129 -        val commands' = Symtab.update (name, kind) commands;
   7.130 -      in ((minor, major'), commands') end));
   7.131 -
   7.132  
   7.133  (* command categories *)
   7.134  
     8.1 --- a/src/Pure/Isar/outer_syntax.ML	Wed Nov 05 20:05:32 2014 +0100
     8.2 +++ b/src/Pure/Isar/outer_syntax.ML	Wed Nov 05 20:20:57 2014 +0100
     8.3 @@ -12,7 +12,7 @@
     8.4    type syntax
     8.5    val batch_mode: bool Unsynchronized.ref
     8.6    val is_markup: syntax -> Thy_Output.markup -> string -> bool
     8.7 -  val get_syntax: unit -> (Scan.lexicon * Scan.lexicon) * syntax
     8.8 +  val get_syntax: unit -> Keyword.keywords * syntax
     8.9    val check_syntax: unit -> unit
    8.10    type command_spec = (string * Keyword.T) * Position.T
    8.11    val command: command_spec -> string ->
    8.12 @@ -29,9 +29,8 @@
    8.13      (local_theory -> Proof.state) parser -> unit
    8.14    val help_syntax: string list -> unit
    8.15    val print_syntax: unit -> unit
    8.16 -  val scan: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list
    8.17 -  val parse: (Scan.lexicon * Scan.lexicon) * syntax ->
    8.18 -    Position.T -> string -> Toplevel.transition list
    8.19 +  val scan: Keyword.keywords -> Position.T -> string -> Token.T list
    8.20 +  val parse: Keyword.keywords * syntax -> Position.T -> string -> Toplevel.transition list
    8.21    val parse_spans: Token.T list -> Command_Span.span list
    8.22    val side_comments: Token.T list -> Token.T list
    8.23    val command_reports: syntax -> Token.T -> Position.report_text list
    8.24 @@ -137,13 +136,14 @@
    8.25  
    8.26  in
    8.27  
    8.28 -fun get_syntax () = CRITICAL (fn () => (Keyword.get_lexicons (), ! global_syntax));
    8.29 +fun get_syntax () = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax));
    8.30  
    8.31  fun check_syntax () =
    8.32    let
    8.33 -    val ((_, major), syntax) = CRITICAL (fn () => (Keyword.dest (), ! global_syntax));
    8.34 +    val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax));
    8.35 +    val major = Keyword.major_keywords keywords;
    8.36    in
    8.37 -    (case subtract (op =) (map #1 (dest_commands syntax)) major of
    8.38 +    (case subtract (op =) (map #1 (dest_commands syntax)) (Scan.dest_lexicon major) of
    8.39        [] => ()
    8.40      | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing))
    8.41    end;
    8.42 @@ -178,10 +178,11 @@
    8.43  
    8.44  fun print_syntax () =
    8.45    let
    8.46 -    val ((keywords, _), syntax) = CRITICAL (fn () => (Keyword.dest (), #2 (get_syntax ())));
    8.47 +    val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), #2 (get_syntax ())));
    8.48 +    val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords);
    8.49      val commands = dest_commands syntax;
    8.50    in
    8.51 -    [Pretty.strs ("syntax keywords:" :: map quote keywords),
    8.52 +    [Pretty.strs ("keywords:" :: map quote minor),
    8.53        Pretty.big_list "commands:" (map pretty_command commands)]
    8.54      |> Pretty.writeln_chunks
    8.55    end;
    8.56 @@ -222,16 +223,16 @@
    8.57  
    8.58  (* off-line scanning/parsing *)
    8.59  
    8.60 -fun scan lexs pos =
    8.61 +fun scan keywords pos =
    8.62    Source.of_string #>
    8.63    Symbol.source #>
    8.64 -  Token.source (K lexs) pos #>
    8.65 +  Token.source (K keywords) pos #>
    8.66    Source.exhaust;
    8.67  
    8.68 -fun parse (lexs, syntax) pos str =
    8.69 +fun parse (keywords, syntax) pos str =
    8.70    Source.of_string str
    8.71    |> Symbol.source
    8.72 -  |> Token.source (K lexs) pos
    8.73 +  |> Token.source (K keywords) pos
    8.74    |> commands_source syntax
    8.75    |> Source.exhaust;
    8.76  
     9.1 --- a/src/Pure/Isar/token.ML	Wed Nov 05 20:05:32 2014 +0100
     9.2 +++ b/src/Pure/Isar/token.ML	Wed Nov 05 20:20:57 2014 +0100
     9.3 @@ -79,16 +79,16 @@
     9.4    val pretty_src: Proof.context -> src -> Pretty.T
     9.5    val ident_or_symbolic: string -> bool
     9.6    val source_proper: (T, 'a) Source.source -> (T, (T, 'a) Source.source) Source.source
     9.7 -  val source: (unit -> Scan.lexicon * Scan.lexicon) ->
     9.8 +  val source: (unit -> Keyword.keywords) ->
     9.9      Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
    9.10        (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
    9.11 -  val source_strict: (unit -> Scan.lexicon * Scan.lexicon) ->
    9.12 +  val source_strict: (unit -> Keyword.keywords) ->
    9.13      Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
    9.14        (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
    9.15    type 'a parser = T list -> 'a * T list
    9.16    type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list)
    9.17 -  val read: Scan.lexicon -> 'a parser -> Symbol_Pos.T list -> 'a list
    9.18 -  val read_antiq: Scan.lexicon -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a
    9.19 +  val read_no_commands: Keyword.keywords -> 'a parser -> Symbol_Pos.T list -> 'a list
    9.20 +  val read_antiq: Keyword.keywords -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a
    9.21    val syntax_generic: 'a context_parser -> src -> Context.generic -> 'a * Context.generic
    9.22    val syntax: 'a context_parser -> src -> Proof.context -> 'a * Proof.context
    9.23  end;
    9.24 @@ -530,7 +530,7 @@
    9.25  fun token_range k (pos1, (ss, pos2)) =
    9.26    Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.content ss), Slot);
    9.27  
    9.28 -fun scan (lex1, lex2) = !!! "bad input"
    9.29 +fun scan keywords = !!! "bad input"
    9.30    (Symbol_Pos.scan_string_qq err_prefix >> token_range String ||
    9.31      Symbol_Pos.scan_string_bq err_prefix >> token_range AltString ||
    9.32      scan_verbatim >> token_range Verbatim ||
    9.33 @@ -539,8 +539,8 @@
    9.34      scan_space >> token Space ||
    9.35      (Scan.max token_leq
    9.36        (Scan.max token_leq
    9.37 -        (Scan.literal lex2 >> pair Command)
    9.38 -        (Scan.literal lex1 >> pair Keyword))
    9.39 +        (Scan.literal (Keyword.major_keywords keywords) >> pair Command)
    9.40 +        (Scan.literal (Keyword.minor_keywords keywords) >> pair Keyword))
    9.41        (Lexicon.scan_longid >> pair LongIdent ||
    9.42          Lexicon.scan_id >> pair Ident ||
    9.43          Lexicon.scan_var >> pair Var ||
    9.44 @@ -561,14 +561,14 @@
    9.45  
    9.46  in
    9.47  
    9.48 -fun source' strict get_lex =
    9.49 +fun source' strict get_keywords =
    9.50    let
    9.51 -    val scan_strict = Scan.bulk (fn xs => scan (get_lex ()) xs);
    9.52 +    val scan_strict = Scan.bulk (fn xs => scan (get_keywords ()) xs);
    9.53      val scan = if strict then scan_strict else Scan.recover scan_strict recover;
    9.54    in Source.source Symbol_Pos.stopper scan end;
    9.55  
    9.56 -fun source get_lex pos src = Symbol_Pos.source pos src |> source' false get_lex;
    9.57 -fun source_strict get_lex pos src = Symbol_Pos.source pos src |> source' true get_lex;
    9.58 +fun source get pos src = Symbol_Pos.source pos src |> source' false get;
    9.59 +fun source_strict get pos src = Symbol_Pos.source pos src |> source' true get;
    9.60  
    9.61  end;
    9.62  
    9.63 @@ -582,19 +582,19 @@
    9.64  
    9.65  (* read source *)
    9.66  
    9.67 -fun read lex scan syms =
    9.68 +fun read_no_commands keywords scan syms =
    9.69    Source.of_list syms
    9.70 -  |> source' true (K (lex, Scan.empty_lexicon))
    9.71 +  |> source' true (K (Keyword.no_command_keywords keywords))
    9.72    |> source_proper
    9.73    |> Source.source stopper (Scan.error (Scan.bulk scan))
    9.74    |> Source.exhaust;
    9.75  
    9.76 -fun read_antiq lex scan (syms, pos) =
    9.77 +fun read_antiq keywords scan (syms, pos) =
    9.78    let
    9.79      fun err msg =
    9.80        cat_error msg ("Malformed antiquotation" ^ Position.here pos ^ ":\n" ^
    9.81          "@{" ^ Symbol_Pos.content syms ^ "}");
    9.82 -    val res = read lex scan syms handle ERROR msg => err msg;
    9.83 +    val res = read_no_commands keywords scan syms handle ERROR msg => err msg;
    9.84    in (case res of [x] => x | _ => err "") end;
    9.85  
    9.86  
    10.1 --- a/src/Pure/ML/ml_context.ML	Wed Nov 05 20:05:32 2014 +0100
    10.2 +++ b/src/Pure/ML/ml_context.ML	Wed Nov 05 20:20:57 2014 +0100
    10.3 @@ -116,14 +116,14 @@
    10.4        then ((begin_env0, map (fn Antiquote.Text tok => tok) ants), opt_ctxt)
    10.5        else
    10.6          let
    10.7 -          val lex = #1 (Keyword.get_lexicons ());
    10.8 +          val keywords = Keyword.get_keywords ();
    10.9            fun no_decl _ = ([], []);
   10.10  
   10.11            fun expand (Antiquote.Text tok) ctxt = (K ([], [tok]), ctxt)
   10.12              | expand (Antiquote.Antiq (ss, {range, ...})) ctxt =
   10.13                  let
   10.14                    val (decl, ctxt') =
   10.15 -                    apply_antiquotation (Token.read_antiq lex antiq (ss, #1 range)) ctxt;
   10.16 +                    apply_antiquotation (Token.read_antiq keywords antiq (ss, #1 range)) ctxt;
   10.17                    val decl' = decl #> pairself (ML_Lex.tokenize #> map (ML_Lex.set_range range));
   10.18                  in (decl', ctxt') end;
   10.19  
    11.1 --- a/src/Pure/PIDE/document.ML	Wed Nov 05 20:05:32 2014 +0100
    11.2 +++ b/src/Pure/PIDE/document.ML	Wed Nov 05 20:20:57 2014 +0100
    11.3 @@ -318,7 +318,7 @@
    11.4        val span =
    11.5          Lazy.lazy (fn () =>
    11.6            Position.setmp_thread_data (Position.id_only id)
    11.7 -            (fn () => Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.id id) text) ());
    11.8 +            (fn () => Outer_Syntax.scan (Keyword.get_keywords ()) (Position.id id) text) ());
    11.9        val _ =
   11.10          Position.setmp_thread_data (Position.id_only id)
   11.11            (fn () => Output.status (Markup.markup_only Markup.accepted)) ();
    12.1 --- a/src/Pure/PIDE/resources.ML	Wed Nov 05 20:05:32 2014 +0100
    12.2 +++ b/src/Pure/PIDE/resources.ML	Wed Nov 05 20:20:57 2014 +0100
    12.3 @@ -148,8 +148,7 @@
    12.4      val {name = (name, _), ...} = header;
    12.5      val _ = Thy_Header.define_keywords header;
    12.6  
    12.7 -    val lexs = Keyword.get_lexicons ();
    12.8 -    val toks = Outer_Syntax.scan lexs text_pos text;
    12.9 +    val toks = Outer_Syntax.scan (Keyword.get_keywords ()) text_pos text;
   12.10      val spans = Outer_Syntax.parse_spans toks;
   12.11      val elements = Thy_Syntax.parse_elements spans;
   12.12  
   12.13 @@ -165,13 +164,13 @@
   12.14      fun present () =
   12.15        let
   12.16          val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results);
   12.17 -        val ((minor, _), syntax) = Outer_Syntax.get_syntax ();
   12.18 +        val (keywords, syntax) = Outer_Syntax.get_syntax ();
   12.19        in
   12.20          if exists (Toplevel.is_skipped_proof o #2) res then
   12.21            warning ("Cannot present theory with skipped proofs: " ^ quote name)
   12.22          else
   12.23            let val tex_source =
   12.24 -            Thy_Output.present_thy minor Keyword.command_tags
   12.25 +            Thy_Output.present_thy keywords Keyword.command_tags
   12.26                (Outer_Syntax.is_markup syntax) res toks
   12.27              |> Buffer.content;
   12.28            in if document then Present.theory_output name tex_source else () end
    13.1 --- a/src/Pure/ROOT.ML	Wed Nov 05 20:05:32 2014 +0100
    13.2 +++ b/src/Pure/ROOT.ML	Wed Nov 05 20:20:57 2014 +0100
    13.3 @@ -230,8 +230,8 @@
    13.4  use "Isar/local_defs.ML";
    13.5  
    13.6  (*outer syntax*)
    13.7 +use "Isar/keyword.ML";
    13.8  use "Isar/token.ML";
    13.9 -use "Isar/keyword.ML";
   13.10  use "Isar/parse.ML";
   13.11  use "Isar/args.ML";
   13.12  
    14.1 --- a/src/Pure/Thy/thy_header.ML	Wed Nov 05 20:05:32 2014 +0100
    14.2 +++ b/src/Pure/Thy/thy_header.ML	Wed Nov 05 20:20:57 2014 +0100
    14.3 @@ -77,10 +77,13 @@
    14.4  val keywordsN = "keywords";
    14.5  val beginN = "begin";
    14.6  
    14.7 -val header_lexicons =
    14.8 -  pairself (Scan.make_lexicon o map Symbol.explode)
    14.9 -   (["%", "(", ")", ",", "::", "==", "and", beginN, importsN, keywordsN],
   14.10 -    [headerN, chapterN, sectionN, subsectionN, subsubsectionN, theoryN]);
   14.11 +val header_keywords =
   14.12 +  Keyword.empty_keywords
   14.13 +  |> fold (Keyword.add o rpair NONE)
   14.14 +    ["%", "(", ")", ",", "::", "==", "and", beginN, importsN, keywordsN]
   14.15 +  |> fold (Keyword.add o rpair (SOME Keyword.heading))
   14.16 +    [headerN, chapterN, sectionN, subsectionN, subsubsectionN]
   14.17 +  |> Keyword.add (theoryN, SOME Keyword.thy_begin);
   14.18  
   14.19  
   14.20  (* header args *)
   14.21 @@ -138,7 +141,7 @@
   14.22    str
   14.23    |> Source.of_string_limited 8000
   14.24    |> Symbol.source
   14.25 -  |> Token.source_strict (K header_lexicons) pos;
   14.26 +  |> Token.source_strict (K header_keywords) pos;
   14.27  
   14.28  fun read_source pos source =
   14.29    let val res =
    15.1 --- a/src/Pure/Thy/thy_header.scala	Wed Nov 05 20:05:32 2014 +0100
    15.2 +++ b/src/Pure/Thy/thy_header.scala	Wed Nov 05 20:20:57 2014 +0100
    15.3 @@ -27,7 +27,7 @@
    15.4    val AND = "and"
    15.5    val BEGIN = "begin"
    15.6  
    15.7 -  private val keywords =
    15.8 +  private val header_keywords =
    15.9      Keyword.Keywords(
   15.10        List("%", "(", ")", ",", "::", "==", AND, BEGIN,
   15.11          HEADER, CHAPTER, SECTION, SUBSECTION, SUBSUBSECTION, IMPORTS, KEYWORDS, THEORY))
   15.12 @@ -96,7 +96,7 @@
   15.13  
   15.14    def read(reader: Reader[Char]): Thy_Header =
   15.15    {
   15.16 -    val token = Token.Parsers.token(keywords)
   15.17 +    val token = Token.Parsers.token(header_keywords)
   15.18      val toks = new mutable.ListBuffer[Token]
   15.19  
   15.20      @tailrec def scan_to_begin(in: Reader[Char])
    16.1 --- a/src/Pure/Thy/thy_output.ML	Wed Nov 05 20:05:32 2014 +0100
    16.2 +++ b/src/Pure/Thy/thy_output.ML	Wed Nov 05 20:20:57 2014 +0100
    16.3 @@ -23,9 +23,9 @@
    16.4    val boolean: string -> bool
    16.5    val integer: string -> int
    16.6    datatype markup = Markup | MarkupEnv | Verbatim
    16.7 -  val eval_antiq: Scan.lexicon -> Toplevel.state -> Antiquote.antiq -> string
    16.8 +  val eval_antiq: Keyword.keywords -> Toplevel.state -> Antiquote.antiq -> string
    16.9    val check_text: Symbol_Pos.source -> Toplevel.state -> unit
   16.10 -  val present_thy: Scan.lexicon -> (string -> string list) -> (markup -> string -> bool) ->
   16.11 +  val present_thy: Keyword.keywords -> (string -> string list) -> (markup -> string -> bool) ->
   16.12      (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
   16.13    val pretty_text: Proof.context -> string -> Pretty.T
   16.14    val pretty_term: Proof.context -> term -> Pretty.T
   16.15 @@ -158,9 +158,9 @@
   16.16  
   16.17  (* eval_antiq *)
   16.18  
   16.19 -fun eval_antiq lex state ((ss, {range = (pos, _), ...}): Antiquote.antiq) =
   16.20 +fun eval_antiq keywords state ((ss, {range = (pos, _), ...}): Antiquote.antiq) =
   16.21    let
   16.22 -    val (opts, src) = Token.read_antiq lex antiq (ss, pos);
   16.23 +    val (opts, src) = Token.read_antiq keywords antiq (ss, pos);
   16.24      fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) ();
   16.25      val preview_ctxt = fold option opts (Toplevel.presentation_context_of state);
   16.26      val print_ctxt = Context_Position.set_visible false preview_ctxt;
   16.27 @@ -171,13 +171,13 @@
   16.28  
   16.29  (* check_text *)
   16.30  
   16.31 -fun eval_antiquote lex state (txt, pos) =
   16.32 +fun eval_antiquote keywords state (txt, pos) =
   16.33    let
   16.34      fun words (Antiquote.Text ss) = [(#1 (Symbol_Pos.range ss), Markup.words)]
   16.35        | words (Antiquote.Antiq _) = [];
   16.36  
   16.37      fun expand (Antiquote.Text ss) = Symbol_Pos.content ss
   16.38 -      | expand (Antiquote.Antiq antiq) = eval_antiq lex state antiq;
   16.39 +      | expand (Antiquote.Antiq antiq) = eval_antiq keywords state antiq;
   16.40  
   16.41      val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos);
   16.42      val _ = Position.reports (maps words ants);
   16.43 @@ -190,7 +190,7 @@
   16.44  fun check_text {delimited, text, pos} state =
   16.45   (Position.report pos (Markup.language_document delimited);
   16.46    if Toplevel.is_skipped_proof state then ()
   16.47 -  else ignore (eval_antiquote (#1 (Keyword.get_lexicons ())) state (text, pos)));
   16.48 +  else ignore (eval_antiquote (Keyword.get_keywords ()) state (text, pos)));
   16.49  
   16.50  
   16.51  
   16.52 @@ -207,8 +207,8 @@
   16.53    | MarkupEnvToken of string * (string * Position.T)
   16.54    | VerbatimToken of string * Position.T;
   16.55  
   16.56 -fun output_token lex state =
   16.57 -  let val eval = eval_antiquote lex state in
   16.58 +fun output_token keywords state =
   16.59 +  let val eval = eval_antiquote keywords state in
   16.60      fn NoToken => ""
   16.61       | BasicToken tok => Latex.output_basic tok
   16.62       | MarkupToken (cmd, txt) => Latex.output_markup cmd (eval txt)
   16.63 @@ -265,11 +265,11 @@
   16.64  
   16.65  in
   16.66  
   16.67 -fun present_span lex default_tags span state state'
   16.68 +fun present_span keywords default_tags span state state'
   16.69      (tag_stack, active_tag, newline, buffer, present_cont) =
   16.70    let
   16.71      val present = fold (fn (tok, (flag, 0)) =>
   16.72 -        Buffer.add (output_token lex state' tok)
   16.73 +        Buffer.add (output_token keywords state' tok)
   16.74          #> Buffer.add flag
   16.75        | _ => I);
   16.76  
   16.77 @@ -351,7 +351,7 @@
   16.78  
   16.79  in
   16.80  
   16.81 -fun present_thy lex default_tags is_markup command_results toks =
   16.82 +fun present_thy keywords default_tags is_markup command_results toks =
   16.83    let
   16.84      (* tokens *)
   16.85  
   16.86 @@ -423,7 +423,7 @@
   16.87      (* present commands *)
   16.88  
   16.89      fun present_command tr span st st' =
   16.90 -      Toplevel.setmp_thread_position tr (present_span lex default_tags span st st');
   16.91 +      Toplevel.setmp_thread_position tr (present_span keywords default_tags span st st');
   16.92  
   16.93      fun present _ [] = I
   16.94        | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest;
    17.1 --- a/src/Pure/Tools/find_consts.ML	Wed Nov 05 20:05:32 2014 +0100
    17.2 +++ b/src/Pure/Tools/find_consts.ML	Wed Nov 05 20:20:57 2014 +0100
    17.3 @@ -143,7 +143,7 @@
    17.4  in
    17.5  
    17.6  fun read_query pos str =
    17.7 -  Outer_Syntax.scan (Keyword.get_lexicons ()) pos str
    17.8 +  Outer_Syntax.scan (Keyword.get_keywords ()) pos str
    17.9    |> filter Token.is_proper
   17.10    |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
   17.11    |> #1;
    18.1 --- a/src/Pure/Tools/find_theorems.ML	Wed Nov 05 20:05:32 2014 +0100
    18.2 +++ b/src/Pure/Tools/find_theorems.ML	Wed Nov 05 20:20:57 2014 +0100
    18.3 @@ -529,7 +529,7 @@
    18.4  in
    18.5  
    18.6  fun read_query pos str =
    18.7 -  Outer_Syntax.scan (Keyword.get_lexicons ()) pos str
    18.8 +  Outer_Syntax.scan (Keyword.get_keywords ()) pos str
    18.9    |> filter Token.is_proper
   18.10    |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
   18.11    |> #1;
    19.1 --- a/src/Pure/Tools/rail.ML	Wed Nov 05 20:05:32 2014 +0100
    19.2 +++ b/src/Pure/Tools/rail.ML	Wed Nov 05 20:20:57 2014 +0100
    19.3 @@ -316,7 +316,7 @@
    19.4  
    19.5  fun output_rules state rules =
    19.6    let
    19.7 -    val output_antiq = Thy_Output.eval_antiq (#1 (Keyword.get_lexicons ())) state;
    19.8 +    val output_antiq = Thy_Output.eval_antiq (Keyword.get_keywords ()) state;
    19.9      fun output_text b s =
   19.10        Output.output s
   19.11        |> b ? enclose "\\isakeyword{" "}"
    20.1 --- a/src/Tools/Code/code_target.ML	Wed Nov 05 20:05:32 2014 +0100
    20.2 +++ b/src/Tools/Code/code_target.ML	Wed Nov 05 20:20:57 2014 +0100
    20.3 @@ -695,7 +695,7 @@
    20.4    let
    20.5      val ctxt = Proof_Context.init_global (Thy_Info.get_theory thyname);
    20.6      val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o
    20.7 -      (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none);
    20.8 +      (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_keywords ()) Position.none);
    20.9    in case parse cmd_expr
   20.10     of SOME f => (writeln "Now generating code..."; f ctxt)
   20.11      | NONE => error ("Bad directive " ^ quote cmd_expr)