--- a/src/Doc/antiquote_setup.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Doc/antiquote_setup.ML Tue Aug 12 20:18:27 2014 +0200
@@ -208,7 +208,7 @@
is_some (Keyword.command_keyword name) andalso
let
val markup =
- Outer_Syntax.scan Position.none name
+ Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none name
|> maps (Outer_Syntax.command_reports (#2 (Outer_Syntax.get_syntax ())))
|> map (snd o fst);
val _ = Context_Position.reports ctxt (map (pair pos) markup);
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Tue Aug 12 20:18:27 2014 +0200
@@ -525,7 +525,7 @@
fun do_method named_thms ctxt =
let
val ref_of_str =
- suffix ";" #> Outer_Syntax.scan Position.none #> Parse_Spec.xthm
+ suffix ";" #> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none #> Parse_Spec.xthm
#> fst
val thms = named_thms |> maps snd
val facts = named_thms |> map (ref_of_str o fst o fst)
@@ -551,7 +551,7 @@
let
val (type_encs, lam_trans) =
!meth
- |> Outer_Syntax.scan Position.start
+ |> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.start
|> filter Token.is_proper |> tl
|> Metis_Tactic.parse_metis_options |> fst
|>> the_default [ATP_Proof_Reconstruct.partial_typesN]
--- a/src/HOL/Tools/Lifting/lifting_setup.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML Tue Aug 12 20:18:27 2014 +0200
@@ -221,7 +221,7 @@
val thy = Proof_Context.theory_of lthy
val dummy_thm = Thm.transfer thy Drule.dummy_thm
- val pointer = Outer_Syntax.scan Position.none bundle_name
+ val pointer = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none bundle_name
val restore_lifting_att =
([dummy_thm], [Args.src ("Lifting.lifting_restore_internal", Position.none) pointer])
in
--- a/src/HOL/Tools/try0.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/HOL/Tools/try0.ML Tue Aug 12 20:18:27 2014 +0200
@@ -48,12 +48,12 @@
NONE
end;
-val parse_method =
- enclose "(" ")"
- #> Outer_Syntax.scan Position.start
- #> filter Token.is_proper
- #> Scan.read Token.stopper Method.parse
- #> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source");
+fun parse_method s =
+ enclose "(" ")" s
+ |> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.start
+ |> filter Token.is_proper
+ |> Scan.read Token.stopper Method.parse
+ |> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source");
fun apply_named_method_on_first_goal ctxt =
parse_method
--- a/src/Pure/Isar/isar_syn.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/Isar/isar_syn.ML Tue Aug 12 20:18:27 2014 +0200
@@ -744,7 +744,7 @@
val _ =
Outer_Syntax.improper_command @{command_spec "Isabelle.command"} "evaluate nested Isabelle command"
(props_text :|-- (fn (pos, str) =>
- (case Outer_Syntax.parse pos str of
+ (case Outer_Syntax.parse (Outer_Syntax.get_syntax ()) pos str of
[tr] => Scan.succeed (K tr)
| _ => Scan.fail_with (K (fn () => "exactly one command expected")))
handle ERROR msg => Scan.fail_with (K (fn () => msg))));
--- a/src/Pure/Isar/outer_syntax.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/Isar/outer_syntax.ML Tue Aug 12 20:18:27 2014 +0200
@@ -31,9 +31,9 @@
(local_theory -> Proof.state) parser -> unit
val help_outer_syntax: string list -> unit
val print_outer_syntax: unit -> unit
- val scan: Position.T -> string -> Token.T list
- val parse: Position.T -> string -> Toplevel.transition list
- val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list
+ val scan: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list
+ val parse: (Scan.lexicon * Scan.lexicon) * outer_syntax ->
+ Position.T -> string -> Toplevel.transition list
val parse_spans: Token.T list -> Command_Span.span list
type isar
val isar: TextIO.instream -> bool -> isar
@@ -258,25 +258,19 @@
(* off-line scanning/parsing *)
-fun scan pos str =
- Source.of_string str
- |> Symbol.source
- |> Token.source {do_recover = SOME false} Keyword.get_lexicons pos
- |> Source.exhaust;
-
-fun parse pos str =
- Source.of_string str
- |> Symbol.source
- |> Token.source {do_recover = SOME false} Keyword.get_lexicons pos
- |> toplevel_source false NONE lookup_commands_dynamic
- |> Source.exhaust;
-
-fun parse_tokens lexs pos =
+fun scan lexs pos =
Source.of_string #>
Symbol.source #>
Token.source {do_recover = SOME false} (K lexs) pos #>
Source.exhaust;
+fun parse (lexs, syntax) pos str =
+ Source.of_string str
+ |> Symbol.source
+ |> Token.source {do_recover = SOME false} (K lexs) pos
+ |> toplevel_source false NONE (K (lookup_commands syntax))
+ |> Source.exhaust;
+
(* parse spans *)
--- a/src/Pure/PIDE/document.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/PIDE/document.ML Tue Aug 12 20:18:27 2014 +0200
@@ -318,7 +318,7 @@
val span =
Lazy.lazy (fn () =>
Position.setmp_thread_data (Position.id_only id)
- (fn () => Outer_Syntax.parse_tokens (Keyword.get_lexicons ()) (Position.id id) text) ());
+ (fn () => Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.id id) text) ());
val _ =
Position.setmp_thread_data (Position.id_only id)
(fn () => Output.status (Markup.markup_only Markup.accepted)) ();
--- a/src/Pure/PIDE/resources.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/PIDE/resources.ML Tue Aug 12 20:18:27 2014 +0200
@@ -157,7 +157,7 @@
val _ = Thy_Header.define_keywords header;
val lexs = Keyword.get_lexicons ();
- val toks = Outer_Syntax.parse_tokens lexs text_pos text;
+ val toks = Outer_Syntax.scan lexs text_pos text;
val spans = Outer_Syntax.parse_spans toks;
val elements = Thy_Syntax.parse_elements spans;
--- a/src/Pure/Thy/thy_info.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/Thy/thy_info.ML Tue Aug 12 20:18:27 2014 +0200
@@ -380,7 +380,7 @@
fun script_thy pos txt =
let
- val trs = Outer_Syntax.parse pos txt;
+ val trs = Outer_Syntax.parse (Outer_Syntax.get_syntax ()) pos txt;
val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs);
val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel;
in Toplevel.end_theory end_pos end_state end;
--- a/src/Pure/Tools/find_consts.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/Tools/find_consts.ML Tue Aug 12 20:18:27 2014 +0200
@@ -143,7 +143,7 @@
in
fun read_query pos str =
- Outer_Syntax.scan pos str
+ Outer_Syntax.scan (Keyword.get_lexicons ()) pos str
|> filter Token.is_proper
|> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
|> #1;
--- a/src/Pure/Tools/find_theorems.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Pure/Tools/find_theorems.ML Tue Aug 12 20:18:27 2014 +0200
@@ -528,7 +528,7 @@
in
fun read_query pos str =
- Outer_Syntax.scan pos str
+ Outer_Syntax.scan (Keyword.get_lexicons ()) pos str
|> filter Token.is_proper
|> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
|> #1;
--- a/src/Tools/Code/code_target.ML Tue Aug 12 18:54:53 2014 +0200
+++ b/src/Tools/Code/code_target.ML Tue Aug 12 20:18:27 2014 +0200
@@ -695,7 +695,7 @@
let
val ctxt = Proof_Context.init_global (Thy_Info.get_theory thyname);
val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o
- (filter Token.is_proper o Outer_Syntax.scan Position.none);
+ (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none);
in case parse cmd_expr
of SOME f => (writeln "Now generating code..."; f ctxt)
| NONE => error ("Bad directive " ^ quote cmd_expr)