--- a/src/Pure/Isar/token.ML Wed Jan 24 11:56:54 2018 +0100
+++ b/src/Pure/Isar/token.ML Wed Jan 24 16:34:24 2018 +0100
@@ -82,8 +82,6 @@
val check_src: Proof.context -> (Proof.context -> 'a Name_Space.table) -> src -> src * 'a
val pretty_src: Proof.context -> src -> Pretty.T
val ident_or_symbolic: string -> bool
- val source': bool -> Keyword.keywords -> (Symbol_Pos.T, 'a) Source.source ->
- (T, (Symbol_Pos.T, 'a) Source.source) Source.source
val source_proper: (T, 'a) Source.source -> (T, (T, 'a) Source.source) Source.source
val source: Keyword.keywords ->
Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
@@ -92,7 +90,7 @@
Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
(Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
val read_cartouche: Symbol_Pos.T list -> T
- val tokenize: Keyword.keywords -> Symbol_Pos.T list -> T list
+ val tokenize: Keyword.keywords -> {strict: bool} -> Symbol_Pos.T list -> T list
val explode: Keyword.keywords -> Position.T -> string -> T list
val explode0: Keyword.keywords -> string -> T list
val print_name: Keyword.keywords -> string -> string
@@ -652,14 +650,17 @@
in
-fun source' strict keywords =
+fun make_source keywords {strict} =
let
val scan_strict = Scan.bulk (scan_token keywords);
val scan = if strict then scan_strict else Scan.recover scan_strict recover;
in Source.source Symbol_Pos.stopper scan end;
-fun source keywords pos src = Symbol_Pos.source pos src |> source' false keywords;
-fun source_strict keywords pos src = Symbol_Pos.source pos src |> source' true keywords;
+fun source keywords pos src =
+ Symbol_Pos.source pos src |> make_source keywords {strict = false};
+
+fun source_strict keywords pos src =
+ Symbol_Pos.source pos src |> make_source keywords {strict = true};
fun read_cartouche syms =
(case Scan.read Symbol_Pos.stopper (scan_cartouche >> token_range Cartouche) syms of
@@ -671,11 +672,11 @@
(* explode *)
-fun tokenize keywords syms =
- Source.of_list syms |> source' false keywords |> Source.exhaust;
+fun tokenize keywords strict syms =
+ Source.of_list syms |> make_source keywords strict |> Source.exhaust;
fun explode keywords pos text =
- Symbol_Pos.explode (text, pos) |> tokenize keywords;
+ Symbol_Pos.explode (text, pos) |> tokenize keywords {strict = false};
fun explode0 keywords = explode keywords Position.none;
@@ -725,7 +726,7 @@
fun read_no_commands keywords scan syms =
Source.of_list syms
- |> source' true (Keyword.no_command_keywords keywords)
+ |> make_source (Keyword.no_command_keywords keywords) {strict = true}
|> source_proper
|> Source.source stopper (Scan.error (Scan.bulk scan))
|> Source.exhaust;
--- a/src/Pure/Thy/document_antiquotations.ML Wed Jan 24 11:56:54 2018 +0100
+++ b/src/Pure/Thy/document_antiquotations.ML Wed Jan 24 16:34:24 2018 +0100
@@ -177,9 +177,7 @@
val _ = report_text ctxt text;
val _ =
Input.source_explode text
- |> Source.of_list
- |> Token.source' true keywords
- |> Source.exhaust
+ |> Token.tokenize keywords {strict = true}
|> maps (Token.reports keywords)
|> Context_Position.reports_text ctxt;
in