removed obsolete argument (cf. 954e9d6782ea);
--- a/src/Pure/Syntax/parser.ML Fri Nov 25 22:21:37 2011 +0100
+++ b/src/Pure/Syntax/parser.ML Fri Nov 25 23:04:12 2011 +0100
@@ -16,7 +16,7 @@
Tip of Lexicon.token
exception PARSETREE of parsetree
val pretty_parsetree: parsetree -> Pretty.T
- val parse: Proof.context -> gram -> string -> Lexicon.token list -> parsetree list
+ val parse: gram -> string -> Lexicon.token list -> parsetree list
val guess_infix_lr: gram -> string -> (string * bool * bool * int) option
val branching_level: int Config.T
end;
@@ -649,7 +649,7 @@
in get_prods (connected_with chains nts nts) [] end;
-fun PROCESSS ctxt prods chains Estate i c states =
+fun PROCESSS prods chains Estate i c states =
let
fun all_prods_for nt = prods_for prods chains true c [nt];
@@ -697,7 +697,7 @@
in processS Inttab.empty states ([], []) end;
-fun produce ctxt prods tags chains stateset i indata prev_token =
+fun produce prods tags chains stateset i indata prev_token =
(case Array.sub (stateset, i) of
[] =>
let
@@ -715,15 +715,15 @@
[] => s
| c :: cs =>
let
- val (si, sii) = PROCESSS ctxt prods chains stateset i c s;
+ val (si, sii) = PROCESSS prods chains stateset i c s;
val _ = Array.update (stateset, i, si);
val _ = Array.update (stateset, i + 1, sii);
- in produce ctxt prods tags chains stateset (i + 1) cs c end));
+ in produce prods tags chains stateset (i + 1) cs c end));
fun get_trees states = map_filter (fn (_, _, [pt], _, _, _) => SOME pt | _ => NONE) states;
-fun earley ctxt prods tags chains startsymbol indata =
+fun earley prods tags chains startsymbol indata =
let
val start_tag =
(case Symtab.lookup tags startsymbol of
@@ -734,19 +734,18 @@
val Estate = Array.array (s, []);
val _ = Array.update (Estate, 0, S0);
in
- get_trees
- (produce ctxt prods tags chains Estate 0 indata Lexicon.eof)
+ get_trees (produce prods tags chains Estate 0 indata Lexicon.eof)
end;
-fun parse ctxt (Gram {tags, prods, chains, ...}) start toks =
+fun parse (Gram {tags, prods, chains, ...}) start toks =
let
val end_pos =
(case try List.last toks of
NONE => Position.none
| SOME (Lexicon.Token (_, _, (_, end_pos))) => end_pos);
val r =
- (case earley ctxt prods tags chains start (toks @ [Lexicon.mk_eof end_pos]) of
+ (case earley prods tags chains start (toks @ [Lexicon.mk_eof end_pos]) of
[] => raise Fail "Inner syntax: no parse trees"
| pts => pts);
in r end;
--- a/src/Pure/Syntax/syntax.ML Fri Nov 25 22:21:37 2011 +0100
+++ b/src/Pure/Syntax/syntax.ML Fri Nov 25 23:04:12 2011 +0100
@@ -78,7 +78,7 @@
val lookup_const: syntax -> string -> string option
val is_keyword: syntax -> string -> bool
val tokenize: syntax -> bool -> Symbol_Pos.T list -> Lexicon.token list
- val parse: Proof.context -> syntax -> string -> Lexicon.token list -> Parser.parsetree list
+ val parse: syntax -> string -> Lexicon.token list -> Parser.parsetree list
val parse_ast_translation: syntax -> string -> (Proof.context -> Ast.ast list -> Ast.ast) option
val parse_rules: syntax -> string -> (Ast.ast * Ast.ast) list
val parse_translation: syntax -> string -> (Proof.context -> term list -> term) option
@@ -401,7 +401,7 @@
fun lookup_const (Syntax ({consts, ...}, _)) = Symtab.lookup consts;
fun is_keyword (Syntax ({lexicon, ...}, _)) = Scan.is_literal lexicon o Symbol.explode;
fun tokenize (Syntax ({lexicon, ...}, _)) = Lexicon.tokenize lexicon;
-fun parse ctxt (Syntax ({gram, ...}, _)) = Parser.parse ctxt (Lazy.force gram);
+fun parse (Syntax ({gram, ...}, _)) = Parser.parse (Lazy.force gram);
fun parse_ast_translation (Syntax ({parse_ast_trtab, ...}, _)) = lookup_tr parse_ast_trtab;
fun parse_translation (Syntax ({parse_trtab, ...}, _)) = lookup_tr parse_trtab;
--- a/src/Pure/Syntax/syntax_phases.ML Fri Nov 25 22:21:37 2011 +0100
+++ b/src/Pure/Syntax/syntax_phases.ML Fri Nov 25 23:04:12 2011 +0100
@@ -281,7 +281,7 @@
val toks = Syntax.tokenize syn raw syms;
val _ = Context_Position.reports ctxt (map Lexicon.report_of_token toks);
- val pts = Syntax.parse ctxt syn root (filter Lexicon.is_proper toks)
+ val pts = Syntax.parse syn root (filter Lexicon.is_proper toks)
handle ERROR msg =>
error (msg ^
implode (map (Markup.markup Markup.report o Lexicon.reported_token_range ctxt) toks));