author | wenzelm |
Thu, 26 Sep 2024 00:06:00 +0200 | |
changeset 80961 | 5e4ff0549960 |
parent 80960 | b3568501d06a |
child 80963 | 6b8e746aed55 |
permissions | -rw-r--r-- |
18 | 1 |
(* Title: Pure/Syntax/parser.ML |
67545 | 2 |
Author: Carsten Clasohm, Sonia Mahjoub |
3 |
Author: Makarius |
|
18 | 4 |
|
67545 | 5 |
General context-free parser for the inner syntax of terms and types. |
18 | 6 |
*) |
7 |
||
8 |
signature PARSER = |
|
15752 | 9 |
sig |
1507 | 10 |
type gram |
11 |
val empty_gram: gram |
|
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
12 |
val make_gram: Syntax_Ext.xprod list -> Syntax_Ext.xprod list -> gram option -> gram |
1507 | 13 |
val pretty_gram: gram -> Pretty.T list |
14 |
datatype parsetree = |
|
15 |
Node of string * parsetree list | |
|
16 |
Tip of Lexicon.token |
|
80950 | 17 |
val pretty_parsetree: parsetree -> Pretty.T list |
45641 | 18 |
val parse: gram -> string -> Lexicon.token list -> parsetree list |
41378 | 19 |
val branching_level: int Config.T |
15752 | 20 |
end; |
1507 | 21 |
|
15752 | 22 |
structure Parser: PARSER = |
18 | 23 |
struct |
15752 | 24 |
|
18 | 25 |
(** datatype gram **) |
26 |
||
67541 | 27 |
(* nonterminals *) |
28 |
||
67545 | 29 |
(*production for the NTs are stored in a vector, indexed by the NT tag*) |
67541 | 30 |
type nt = int; |
31 |
||
67545 | 32 |
type tags = nt Symtab.table; |
33 |
val tags_empty: tags = Symtab.empty; |
|
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
34 |
fun tags_size (tags: tags) = Symtab.size tags; |
67545 | 35 |
fun tags_content (tags: tags) = sort_by #1 (Symtab.dest tags); |
36 |
fun tags_lookup (tags: tags) = Symtab.lookup tags; |
|
37 |
fun tags_insert tag (tags: tags) = Symtab.update_new tag tags; |
|
78006 | 38 |
fun tags_name (tags: tags) = |
39 |
the o Inttab.lookup (Inttab.build (Symtab.fold (Inttab.update_new o swap) tags)); |
|
67545 | 40 |
|
79079 | 41 |
type nts = Bitset.T; |
42 |
val nts_empty: nts = Bitset.empty; |
|
43 |
val nts_merge: nts * nts -> nts = Bitset.merge; |
|
44 |
fun nts_insert nt : nts -> nts = Bitset.insert nt; |
|
45 |
fun nts_member (nts: nts) = Bitset.member nts; |
|
46 |
fun nts_fold f (nts: nts) = Bitset.fold f nts; |
|
47 |
fun nts_subset (nts1: nts, nts2: nts) = Bitset.forall (nts_member nts2) nts1; |
|
48 |
fun nts_is_empty (nts: nts) = Bitset.is_empty nts; |
|
49 |
fun nts_is_unique (nts: nts) = Bitset.is_unique nts; |
|
67541 | 50 |
|
51 |
||
67539 | 52 |
(* tokens *) |
53 |
||
77823 | 54 |
structure Tokens = Set(type key = Lexicon.token val ord = Lexicon.tokens_match_ord); |
67539 | 55 |
|
77823 | 56 |
fun tokens_find P tokens = Tokens.get_first (fn tok => if P tok then SOME tok else NONE) tokens; |
57 |
fun tokens_add (nt: nt, tokens) = if Tokens.is_empty tokens then I else cons (nt, tokens); |
|
67533
f253e5eaf995
clarified types and operations: potentially more efficient add_prods;
wenzelm
parents:
67532
diff
changeset
|
58 |
|
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
59 |
|
67545 | 60 |
(* productions *) |
61 |
||
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
62 |
datatype symb = |
67545 | 63 |
Terminal of Lexicon.token | |
64 |
Nonterminal of nt * int; (*(tag, prio)*) |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
65 |
|
77823 | 66 |
structure Prods = Table(Tokens.Key); |
67 |
type prods = (symb list * string * int) list Prods.table; (*start_token ~> [(rhs, name, prio)]*) |
|
67540 | 68 |
|
77823 | 69 |
val prods_empty: prods = Prods.empty; |
70 |
fun prods_lookup (prods: prods) = Prods.lookup_list prods; |
|
71 |
fun prods_update entry : prods -> prods = Prods.update entry; |
|
72 |
fun prods_content (prods: prods) = distinct (op =) (maps #2 (Prods.dest prods)); |
|
67540 | 73 |
|
77823 | 74 |
type nt_gram = (nts * Tokens.T) * prods; (*dependent_nts, start_tokens, prods*) |
67545 | 75 |
(*depent_nts is a set of all NTs whose lookahead depends on this NT's lookahead*) |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
76 |
|
77823 | 77 |
val nt_gram_empty: nt_gram = ((nts_empty, Tokens.empty), prods_empty); |
67533
f253e5eaf995
clarified types and operations: potentially more efficient add_prods;
wenzelm
parents:
67532
diff
changeset
|
78 |
|
67531 | 79 |
type chains = unit Int_Graph.T; |
80 |
fun chains_preds (chains: chains) = Int_Graph.immediate_preds chains; |
|
81 |
fun chains_all_preds (chains: chains) = Int_Graph.all_preds chains; |
|
82 |
fun chains_all_succs (chains: chains) = Int_Graph.all_succs chains; |
|
83 |
val chains_empty: chains = Int_Graph.empty; |
|
84 |
fun chains_member (chains: chains) = Int_Graph.is_edge chains; |
|
85 |
fun chains_declare nt : chains -> chains = Int_Graph.default_node (nt, ()); |
|
86 |
fun chains_insert (from, to) = |
|
87 |
chains_declare from #> chains_declare to #> Int_Graph.add_edge (from, to); |
|
88 |
||
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
89 |
datatype gram = |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
90 |
Gram of |
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
91 |
{tags: tags, |
67531 | 92 |
chains: chains, |
67533
f253e5eaf995
clarified types and operations: potentially more efficient add_prods;
wenzelm
parents:
67532
diff
changeset
|
93 |
lambdas: nts, |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
94 |
prods: nt_gram Vector.vector}; |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
95 |
(*"tags" is used to map NT names (i.e. strings) to tags; |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
96 |
chain productions are not stored as normal productions |
67517
add9a9f6a290
explicit graph for chains, which contains all nts as nodes;
wenzelm
parents:
67516
diff
changeset
|
97 |
but instead as an entry in "chains": from -> to; |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
98 |
lambda productions are stored as normal productions |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
99 |
and also as an entry in "lambdas"*) |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
100 |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
101 |
(*productions for which no starting token is |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
102 |
known yet are associated with this token*) |
38713 | 103 |
val unknown_start = Lexicon.eof; |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
104 |
|
67539 | 105 |
fun get_start tks = |
77823 | 106 |
(case Tokens.min tks of |
77818 | 107 |
SOME tk => tk |
67539 | 108 |
| NONE => unknown_start); |
67516 | 109 |
|
78007 | 110 |
fun add_production array_prods (lhs, new_prod as (rhs, _, pri)) (chains, lambdas) = |
67543 | 111 |
let |
112 |
(*store chain if it does not already exist*) |
|
113 |
val (chain, new_chain, chains') = |
|
114 |
(case (pri, rhs) of |
|
115 |
(~1, [Nonterminal (from, ~1)]) => |
|
116 |
if chains_member chains (from, lhs) |
|
117 |
then (SOME from, false, chains) |
|
118 |
else (SOME from, true, chains_insert (from, lhs) chains) |
|
71468
53fcbede7bf7
more robust (amending add9a9f6a290): proper syntax error instead of exception for grammar with unreachable nonterminals, e.g. nonterminal f1 syntax "_F" :: "f1 ⇒ 'b" ("F _" 10);
wenzelm
parents:
69575
diff
changeset
|
119 |
| _ => |
53fcbede7bf7
more robust (amending add9a9f6a290): proper syntax error instead of exception for grammar with unreachable nonterminals, e.g. nonterminal f1 syntax "_F" :: "f1 ⇒ 'b" ("F _" 10);
wenzelm
parents:
69575
diff
changeset
|
120 |
let |
53fcbede7bf7
more robust (amending add9a9f6a290): proper syntax error instead of exception for grammar with unreachable nonterminals, e.g. nonterminal f1 syntax "_F" :: "f1 ⇒ 'b" ("F _" 10);
wenzelm
parents:
69575
diff
changeset
|
121 |
val chains' = chains |
53fcbede7bf7
more robust (amending add9a9f6a290): proper syntax error instead of exception for grammar with unreachable nonterminals, e.g. nonterminal f1 syntax "_F" :: "f1 ⇒ 'b" ("F _" 10);
wenzelm
parents:
69575
diff
changeset
|
122 |
|> chains_declare lhs |
53fcbede7bf7
more robust (amending add9a9f6a290): proper syntax error instead of exception for grammar with unreachable nonterminals, e.g. nonterminal f1 syntax "_F" :: "f1 ⇒ 'b" ("F _" 10);
wenzelm
parents:
69575
diff
changeset
|
123 |
|> fold (fn Nonterminal (nt, _) => chains_declare nt | _ => I) rhs; |
53fcbede7bf7
more robust (amending add9a9f6a290): proper syntax error instead of exception for grammar with unreachable nonterminals, e.g. nonterminal f1 syntax "_F" :: "f1 ⇒ 'b" ("F _" 10);
wenzelm
parents:
69575
diff
changeset
|
124 |
in (NONE, false, chains') end); |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
125 |
|
67543 | 126 |
(*propagate new chain in lookahead and lambdas; |
127 |
added_starts is used later to associate existing |
|
128 |
productions with new starting tokens*) |
|
129 |
val (added_starts, lambdas') = |
|
130 |
if not new_chain then ([], lambdas) |
|
131 |
else |
|
132 |
let (*lookahead of chain's source*) |
|
78007 | 133 |
val ((_, from_tks), _) = Array.nth array_prods (the chain); |
1175
1b15a4b1a4f7
added comments; fixed a bug; reduced memory usage slightly
clasohm
parents:
1147
diff
changeset
|
134 |
|
67543 | 135 |
(*copy from's lookahead to chain's destinations*) |
136 |
fun copy_lookahead to = |
|
137 |
let |
|
78007 | 138 |
val ((to_nts, to_tks), ps) = Array.nth array_prods to; |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
139 |
|
77823 | 140 |
val new_tks = Tokens.subtract to_tks from_tks; (*added lookahead tokens*) |
141 |
val to_tks' = Tokens.merge (to_tks, new_tks); |
|
78007 | 142 |
val _ = Array.upd array_prods to ((to_nts, to_tks'), ps); |
67543 | 143 |
in tokens_add (to, new_tks) end; |
1175
1b15a4b1a4f7
added comments; fixed a bug; reduced memory usage slightly
clasohm
parents:
1147
diff
changeset
|
144 |
|
67543 | 145 |
val tos = chains_all_succs chains' [lhs]; |
146 |
in |
|
147 |
(fold copy_lookahead tos [], |
|
148 |
lambdas |> nts_member lambdas lhs ? fold nts_insert tos) |
|
149 |
end; |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
150 |
|
67543 | 151 |
(*test if new production can produce lambda |
152 |
(rhs must either be empty or only consist of lambda NTs)*) |
|
153 |
val new_lambdas = |
|
154 |
if forall |
|
155 |
(fn Nonterminal (id, _) => nts_member lambdas' id |
|
156 |
| Terminal _ => false) rhs |
|
157 |
then SOME (filter_out (nts_member lambdas') (chains_all_succs chains' [lhs])) |
|
158 |
else NONE; |
|
159 |
val lambdas'' = fold nts_insert (these new_lambdas) lambdas'; |
|
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
160 |
|
67543 | 161 |
(*list optional terminal and all nonterminals on which the lookahead |
162 |
of a production depends*) |
|
163 |
fun lookahead_dependency _ [] nts = (NONE, nts) |
|
164 |
| lookahead_dependency _ (Terminal tk :: _) nts = (SOME tk, nts) |
|
165 |
| lookahead_dependency lambdas (Nonterminal (nt, _) :: symbs) nts = |
|
166 |
if nts_member lambdas nt then |
|
167 |
lookahead_dependency lambdas symbs (nts_insert nt nts) |
|
168 |
else (NONE, nts_insert nt nts); |
|
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
169 |
|
67543 | 170 |
(*get all known starting tokens for a nonterminal*) |
78007 | 171 |
fun starts_for_nt nt = snd (fst (Array.nth array_prods nt)); |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
172 |
|
67543 | 173 |
(*update prods, lookaheads, and lambdas according to new lambda NTs*) |
174 |
val (added_starts', lambdas') = |
|
175 |
let |
|
176 |
(*propagate added lambda NT*) |
|
177 |
fun propagate_lambda [] added_starts lambdas = (added_starts, lambdas) |
|
178 |
| propagate_lambda (l :: ls) added_starts lambdas = |
|
179 |
let |
|
180 |
(*get lookahead for lambda NT*) |
|
78007 | 181 |
val ((dependent, l_starts), _) = Array.nth array_prods l; |
1175
1b15a4b1a4f7
added comments; fixed a bug; reduced memory usage slightly
clasohm
parents:
1147
diff
changeset
|
182 |
|
67543 | 183 |
(*check productions whose lookahead may depend on lambda NT*) |
184 |
fun examine_prods [] add_lambda nt_dependencies added_tks nt_prods = |
|
185 |
(add_lambda, nt_dependencies, added_tks, nt_prods) |
|
186 |
| examine_prods ((p as (rhs, _, _)) :: ps) add_lambda |
|
187 |
nt_dependencies added_tks nt_prods = |
|
188 |
let val (tk, nts) = lookahead_dependency lambdas rhs nts_empty in |
|
189 |
if nts_member nts l then (*update production's lookahead*) |
|
190 |
let |
|
191 |
val new_lambda = |
|
192 |
is_none tk andalso nts_subset (nts, lambdas); |
|
18 | 193 |
|
67543 | 194 |
val new_tks = |
77823 | 195 |
Tokens.empty |
196 |
|> fold Tokens.insert (the_list tk) |
|
197 |
|> nts_fold (curry Tokens.merge o starts_for_nt) nts |
|
198 |
|> Tokens.subtract l_starts; |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
199 |
|
77823 | 200 |
val added_tks' = Tokens.merge (added_tks, new_tks); |
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
201 |
|
67543 | 202 |
val nt_dependencies' = nts_merge (nt_dependencies, nts); |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
203 |
|
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
204 |
(*associate production with new starting tokens*) |
67539 | 205 |
fun copy tk nt_prods = |
67543 | 206 |
prods_update (tk, p :: prods_lookup nt_prods tk) nt_prods; |
207 |
||
208 |
val nt_prods' = nt_prods |
|
77823 | 209 |
|> Tokens.fold copy new_tks |
67550 | 210 |
|> new_lambda ? copy Lexicon.dummy; |
67543 | 211 |
in |
212 |
examine_prods ps (add_lambda orelse new_lambda) |
|
213 |
nt_dependencies' added_tks' nt_prods' |
|
214 |
end |
|
215 |
else (*skip production*) |
|
216 |
examine_prods ps add_lambda nt_dependencies added_tks nt_prods |
|
217 |
end; |
|
218 |
||
219 |
(*check each NT whose lookahead depends on new lambda NT*) |
|
220 |
fun process_nts nt (added_lambdas, added_starts) = |
|
221 |
let |
|
78007 | 222 |
val ((old_nts, old_tks), nt_prods) = Array.nth array_prods nt; |
67543 | 223 |
|
224 |
(*existing productions whose lookahead may depend on l*) |
|
225 |
val tk_prods = prods_lookup nt_prods (get_start l_starts); |
|
226 |
||
227 |
(*add_lambda is true if an existing production of the nt |
|
228 |
produces lambda due to the new lambda NT l*) |
|
229 |
val (add_lambda, nt_dependencies, added_tks, nt_prods') = |
|
77823 | 230 |
examine_prods tk_prods false nts_empty Tokens.empty nt_prods; |
67543 | 231 |
|
232 |
val new_nts = nts_merge (old_nts, nt_dependencies); |
|
77823 | 233 |
val new_tks = Tokens.merge (old_tks, added_tks); |
67543 | 234 |
|
235 |
val added_lambdas' = added_lambdas |> add_lambda ? cons nt; |
|
78007 | 236 |
val _ = Array.upd array_prods nt ((new_nts, new_tks), nt_prods'); |
67543 | 237 |
(*N.B. that because the tks component |
238 |
is used to access existing |
|
239 |
productions we have to add new |
|
240 |
tokens at the _end_ of the list*) |
|
241 |
val added_starts' = tokens_add (nt, added_tks) added_starts; |
|
242 |
in (added_lambdas', added_starts') end; |
|
377
ab8917806779
lookaheads are now computed faster (during the grammar is built)
clasohm
parents:
373
diff
changeset
|
243 |
|
67543 | 244 |
val (added_lambdas, added_starts') = |
245 |
nts_fold process_nts dependent ([], added_starts); |
|
246 |
val added_lambdas' = filter_out (nts_member lambdas) added_lambdas; |
|
247 |
in |
|
248 |
propagate_lambda (ls @ added_lambdas') added_starts' |
|
249 |
(fold nts_insert added_lambdas' lambdas) |
|
250 |
end; |
|
251 |
in |
|
252 |
propagate_lambda |
|
253 |
(nts_fold (fn l => not (nts_member lambdas l) ? cons l) lambdas'' []) |
|
254 |
added_starts lambdas'' |
|
255 |
end; |
|
256 |
||
257 |
(*insert production into grammar*) |
|
258 |
val added_starts' = |
|
259 |
if is_some chain then added_starts' (*don't store chain production*) |
|
260 |
else |
|
261 |
let |
|
262 |
(*lookahead tokens of new production and on which |
|
263 |
NTs lookahead depends*) |
|
264 |
val (start_tk, start_nts) = lookahead_dependency lambdas' rhs nts_empty; |
|
265 |
||
266 |
val start_tks = |
|
77823 | 267 |
Tokens.empty |
268 |
|> fold Tokens.insert (the_list start_tk) |
|
269 |
|> nts_fold (curry Tokens.merge o starts_for_nt) start_nts; |
|
67543 | 270 |
|
271 |
val start_tks' = |
|
272 |
start_tks |
|
77823 | 273 |
|> (if is_some new_lambdas then Tokens.insert Lexicon.dummy |
274 |
else if Tokens.is_empty start_tks then Tokens.insert unknown_start |
|
67543 | 275 |
else I); |
276 |
||
277 |
(*add lhs NT to list of dependent NTs in lookahead*) |
|
278 |
fun add_nts nt initial = |
|
279 |
(if initial then |
|
78007 | 280 |
let val ((old_nts, old_tks), ps) = Array.nth array_prods nt in |
67543 | 281 |
if nts_member old_nts lhs then () |
78007 | 282 |
else Array.upd array_prods nt ((nts_insert lhs old_nts, old_tks), ps) |
67543 | 283 |
end |
284 |
else (); false); |
|
285 |
||
286 |
(*add new start tokens to chained NTs' lookahead list; |
|
287 |
also store new production for lhs NT*) |
|
288 |
fun add_tks [] added = added |
|
289 |
| add_tks (nt :: nts) added = |
|
290 |
let |
|
78007 | 291 |
val ((old_nts, old_tks), nt_prods) = Array.nth array_prods nt; |
67543 | 292 |
|
77823 | 293 |
val new_tks = Tokens.subtract old_tks start_tks; |
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
294 |
|
67543 | 295 |
(*store new production*) |
296 |
fun store tk (prods, _) = |
|
297 |
let |
|
298 |
val tk_prods = prods_lookup prods tk; |
|
299 |
val tk_prods' = new_prod :: tk_prods; |
|
300 |
val prods' = prods_update (tk, tk_prods') prods; |
|
301 |
in (prods', true) end; |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
302 |
|
67543 | 303 |
val (nt_prods', changed) = (nt_prods, false) |
77823 | 304 |
|> nt = lhs ? Tokens.fold store start_tks'; |
67543 | 305 |
val _ = |
77823 | 306 |
if not changed andalso Tokens.is_empty new_tks then () |
78007 | 307 |
else Array.upd array_prods nt ((old_nts, Tokens.merge (old_tks, new_tks)), nt_prods'); |
67543 | 308 |
in add_tks nts (tokens_add (nt, new_tks) added) end; |
309 |
val _ = nts_fold add_nts start_nts true; |
|
310 |
in add_tks (chains_all_succs chains' [lhs]) [] end; |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
311 |
|
67543 | 312 |
(*associate productions with new lookaheads*) |
313 |
val _ = |
|
314 |
let |
|
315 |
(*propagate added start tokens*) |
|
316 |
fun add_starts [] = () |
|
317 |
| add_starts ((changed_nt, new_tks) :: starts) = |
|
318 |
let |
|
319 |
(*token under which old productions which |
|
320 |
depend on changed_nt could be stored*) |
|
321 |
val key = |
|
77823 | 322 |
tokens_find (not o Tokens.member new_tks) (starts_for_nt changed_nt) |
67543 | 323 |
|> the_default unknown_start; |
324 |
||
325 |
(*copy productions whose lookahead depends on changed_nt; |
|
326 |
if key = SOME unknown_start then tk_prods is used to hold |
|
327 |
the productions not copied*) |
|
328 |
fun update_prods [] result = result |
|
329 |
| update_prods ((p as (rhs, _: string, _: nt)) :: ps) |
|
330 |
(tk_prods, nt_prods) = |
|
331 |
let |
|
332 |
(*lookahead dependency for production*) |
|
333 |
val (tk, depends) = lookahead_dependency lambdas' rhs nts_empty; |
|
334 |
||
335 |
(*test if this production has to be copied*) |
|
336 |
val update = nts_member depends changed_nt; |
|
18 | 337 |
|
67543 | 338 |
(*test if production could already be associated with |
339 |
a member of new_tks*) |
|
340 |
val lambda = |
|
341 |
not (nts_is_unique depends) orelse |
|
342 |
not (nts_is_empty depends) andalso is_some tk |
|
77823 | 343 |
andalso Tokens.member new_tks (the tk); |
67543 | 344 |
|
345 |
(*associate production with new starting tokens*) |
|
346 |
fun copy tk nt_prods = |
|
347 |
let |
|
348 |
val tk_prods = prods_lookup nt_prods tk; |
|
349 |
val tk_prods' = |
|
350 |
if not lambda then p :: tk_prods |
|
351 |
else insert (op =) p tk_prods; |
|
352 |
(*if production depends on lambda NT we |
|
353 |
have to look for duplicates*) |
|
354 |
in prods_update (tk, tk_prods') nt_prods end; |
|
355 |
val result = |
|
77823 | 356 |
if update then (tk_prods, Tokens.fold copy new_tks nt_prods) |
67543 | 357 |
else if key = unknown_start then (p :: tk_prods, nt_prods) |
358 |
else (tk_prods, nt_prods); |
|
359 |
in update_prods ps result end; |
|
18 | 360 |
|
67543 | 361 |
(*copy existing productions for new starting tokens*) |
362 |
fun process_nts nt = |
|
363 |
let |
|
78007 | 364 |
val ((nts, tks), nt_prods) = Array.nth array_prods nt; |
67543 | 365 |
|
366 |
val tk_prods = prods_lookup nt_prods key; |
|
367 |
||
368 |
(*associate productions with new lookahead tokens*) |
|
369 |
val (tk_prods', nt_prods') = update_prods tk_prods ([], nt_prods); |
|
370 |
||
371 |
val nt_prods'' = |
|
372 |
if key = unknown_start then |
|
373 |
prods_update (key, tk_prods') nt_prods' |
|
374 |
else nt_prods'; |
|
375 |
||
77823 | 376 |
val added_tks = Tokens.subtract tks new_tks; |
377 |
val tks' = Tokens.merge (tks, added_tks); |
|
78007 | 378 |
val _ = Array.upd array_prods nt ((nts, tks'), nt_prods''); |
67543 | 379 |
in tokens_add (nt, added_tks) end; |
380 |
||
78007 | 381 |
val ((dependent, _), _) = Array.nth array_prods changed_nt; |
67543 | 382 |
in add_starts (starts @ nts_fold process_nts dependent []) end; |
383 |
in add_starts added_starts' end; |
|
384 |
in (chains', lambdas') end; |
|
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
385 |
|
18 | 386 |
|
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
387 |
(* pretty_gram *) |
18 | 388 |
|
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
389 |
fun pretty_gram (Gram {tags, prods, chains, ...}) = |
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
390 |
let |
67531 | 391 |
val print_nt = tags_name tags; |
67518 | 392 |
fun print_pri p = if p < 0 then "" else Symbol.make_sup ("(" ^ string_of_int p ^ ")"); |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
393 |
|
67552 | 394 |
fun pretty_symb (Terminal tok) = |
395 |
if Lexicon.is_literal tok |
|
396 |
then Pretty.quote (Pretty.keyword1 (Lexicon.str_of_token tok)) |
|
397 |
else Pretty.str (Lexicon.str_of_token tok) |
|
67513 | 398 |
| pretty_symb (Nonterminal (tag, p)) = Pretty.str (print_nt tag ^ print_pri p); |
18 | 399 |
|
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
400 |
fun pretty_const "" = [] |
67513 | 401 |
| pretty_const c = [Pretty.str ("\<^bold>\<Rightarrow> " ^ quote c)]; |
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
402 |
|
67513 | 403 |
fun prod_of_chain from = ([Nonterminal (from, ~1)], "", ~1); |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
404 |
|
67513 | 405 |
fun pretty_prod (name, tag) = |
77846
5ba68d3bd741
more operations, following Isabelle/ML conventions;
wenzelm
parents:
77823
diff
changeset
|
406 |
(prods_content (#2 (Vector.nth prods tag)) @ map prod_of_chain (chains_preds chains tag)) |
67513 | 407 |
|> map (fn (symbs, const, p) => |
408 |
Pretty.block (Pretty.breaks |
|
409 |
(Pretty.str (name ^ print_pri p ^ " =") :: map pretty_symb symbs @ pretty_const const))); |
|
67531 | 410 |
in maps pretty_prod (tags_content tags) end; |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
411 |
|
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
412 |
|
42217 | 413 |
|
67545 | 414 |
(** operations on grammars **) |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
415 |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
416 |
val empty_gram = |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
417 |
Gram |
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
418 |
{tags = tags_empty, |
67531 | 419 |
chains = chains_empty, |
67533
f253e5eaf995
clarified types and operations: potentially more efficient add_prods;
wenzelm
parents:
67532
diff
changeset
|
420 |
lambdas = nts_empty, |
f253e5eaf995
clarified types and operations: potentially more efficient add_prods;
wenzelm
parents:
67532
diff
changeset
|
421 |
prods = Vector.fromList [nt_gram_empty]}; |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
422 |
|
80960 | 423 |
local |
424 |
||
425 |
fun make_tag s tags = |
|
426 |
(case tags_lookup tags s of |
|
427 |
SOME tag => (tag, tags) |
|
428 |
| NONE => |
|
429 |
let val tag = tags_size tags |
|
430 |
in (tag, tags_insert (s, tag) tags) end); |
|
431 |
||
432 |
fun make_arg (s, p) tags = |
|
433 |
(case Lexicon.get_terminal s of |
|
434 |
NONE => |
|
435 |
let val (tag, tags') = make_tag s tags; |
|
436 |
in (Nonterminal (tag, p), tags') end |
|
437 |
| SOME tok => (Terminal tok, tags)); |
|
438 |
||
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
439 |
fun extend_gram xprods gram = |
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
440 |
let |
80960 | 441 |
fun make_symbs (Syntax_Ext.Delim s :: xsyms) result tags = |
442 |
make_symbs xsyms (Terminal (Lexicon.literal s) :: result) tags |
|
443 |
| make_symbs (Syntax_Ext.Argument a :: xsyms) result tags = |
|
444 |
let val (new_symb, tags') = make_arg a tags |
|
445 |
in make_symbs xsyms (new_symb :: result) tags' end |
|
446 |
| make_symbs (_ :: xsyms) result tags = make_symbs xsyms result tags |
|
447 |
| make_symbs [] result tags = (rev result, tags); |
|
1438 | 448 |
|
80960 | 449 |
fun make_prod (Syntax_Ext.XProd (lhs, xsymbs, const, pri)) (result, tags) = |
450 |
let |
|
451 |
val (tag, tags') = make_tag lhs tags; |
|
452 |
val (symbs, tags'') = make_symbs xsymbs [] tags'; |
|
453 |
in ((tag, (symbs, const, pri)) :: result, tags'') end; |
|
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
454 |
|
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
455 |
|
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
456 |
val Gram {tags, chains, lambdas, prods} = gram; |
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
457 |
|
80960 | 458 |
val (new_prods, tags') = fold make_prod xprods ([], tags); |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
459 |
|
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
460 |
val array_prods' = |
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
461 |
Array.tabulate (tags_size tags', fn i => |
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
462 |
if i < Vector.length prods then Vector.nth prods i |
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
463 |
else nt_gram_empty); |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
464 |
|
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
465 |
val (chains', lambdas') = |
80957 | 466 |
(chains, lambdas) |> fold (add_production array_prods') new_prods; |
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
467 |
in |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
468 |
Gram |
80959
4749c9b0ba73
eliminated redundant nt_count: rely on Symtab.size;
wenzelm
parents:
80958
diff
changeset
|
469 |
{tags = tags', |
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
470 |
chains = chains', |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
471 |
lambdas = lambdas', |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
472 |
prods = Array.vector array_prods'} |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
473 |
end; |
18 | 474 |
|
80960 | 475 |
in |
476 |
||
78009
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
477 |
fun make_gram [] _ (SOME gram) = gram |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
478 |
| make_gram new_xprods _ (SOME gram) = extend_gram new_xprods gram |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
479 |
| make_gram [] [] NONE = empty_gram |
f906f7f83dae
performance tuning: cached non-persistent Parser.gram reduces heap size by approx. 1-4%;
wenzelm
parents:
78007
diff
changeset
|
480 |
| make_gram new_xprods old_xprods NONE = extend_gram (new_xprods @ old_xprods) empty_gram; |
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
481 |
|
80960 | 482 |
end; |
483 |
||
18 | 484 |
|
42217 | 485 |
|
42374
b9a6b465da25
clarified pretty_parsetree: suppress literal tokens;
wenzelm
parents:
42288
diff
changeset
|
486 |
(** parser **) |
b9a6b465da25
clarified pretty_parsetree: suppress literal tokens;
wenzelm
parents:
42288
diff
changeset
|
487 |
|
b9a6b465da25
clarified pretty_parsetree: suppress literal tokens;
wenzelm
parents:
42288
diff
changeset
|
488 |
(* parsetree *) |
18 | 489 |
|
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
490 |
datatype parsetree = |
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
491 |
Node of string * parsetree list | |
37683 | 492 |
Tip of Lexicon.token; |
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
493 |
|
80950 | 494 |
fun pretty_parsetree (Node (c, pts)) = |
495 |
[Pretty.enclose "(" ")" |
|
496 |
(Pretty.breaks (Pretty.quote (Pretty.str c) :: maps pretty_parsetree pts))] |
|
497 |
| pretty_parsetree (Tip tok) = |
|
498 |
if Lexicon.valued_token tok then [Pretty.str (Lexicon.str_of_token tok)] else []; |
|
42374
b9a6b465da25
clarified pretty_parsetree: suppress literal tokens;
wenzelm
parents:
42288
diff
changeset
|
499 |
|
b9a6b465da25
clarified pretty_parsetree: suppress literal tokens;
wenzelm
parents:
42288
diff
changeset
|
500 |
|
b9a6b465da25
clarified pretty_parsetree: suppress literal tokens;
wenzelm
parents:
42288
diff
changeset
|
501 |
(* parser state *) |
42205
22f5cc06c419
direct pretty printing of parsetrees -- prevent diagnostic output from crashing due to undeclared entities;
wenzelm
parents:
41378
diff
changeset
|
502 |
|
18 | 503 |
type state = |
80930 | 504 |
(nt * int * (*identification and production precedence*) |
505 |
string * (*name of production*) |
|
506 |
int) * (*index for previous state list*) |
|
80943 | 507 |
symb list * (*input: rest of rhs*) |
508 |
parsetree list; (*output (reversed): already parsed nonterminals on rhs*) |
|
18 | 509 |
|
510 |
||
38713 | 511 |
(*Get all rhss with precedence >= min_prec*) |
42217 | 512 |
fun get_RHS min_prec = filter (fn (_, _, prec: int) => prec >= min_prec); |
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
513 |
|
38713 | 514 |
(*Get all rhss with precedence >= min_prec and < max_prec*) |
515 |
fun get_RHS' min_prec max_prec = |
|
42217 | 516 |
filter (fn (_, _, prec: int) => prec >= min_prec andalso prec < max_prec); |
18 | 517 |
|
15752 | 518 |
(*Add parse tree to list and eliminate duplicates |
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
519 |
saving the maximum precedence*) |
42217 | 520 |
fun conc (t: parsetree list, prec: int) [] = (NONE, [(t, prec)]) |
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
521 |
| conc (t, prec) ((t', prec') :: ts) = |
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
522 |
if t = t' then |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
523 |
(SOME prec', |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
524 |
if prec' >= prec then (t', prec') :: ts |
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
525 |
else (t, prec) :: ts) |
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
526 |
else |
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
527 |
let val (n, ts') = conc (t, prec) ts |
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
528 |
in (n, (t', prec') :: ts') end; |
18 | 529 |
|
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
530 |
(*Update entry in used*) |
42226
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
531 |
fun update_trees (A, t) used = |
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
532 |
let |
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
533 |
val (i, ts) = the (Inttab.lookup used A); |
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
534 |
val (n, ts') = conc t ts; |
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
535 |
in (n, Inttab.update (A, (i, ts')) used) end; |
18 | 536 |
|
330
2fda15dd1e0f
changed the way a grammar is generated to allow the new parser to work;
clasohm
parents:
258
diff
changeset
|
537 |
(*Replace entry in used*) |
42226
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
538 |
fun update_prec (A, prec) = |
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
539 |
Inttab.map_entry A (fn (_, ts) => (prec, ts)); |
18 | 540 |
|
80933 | 541 |
fun get_states_lambda A max opt_min Si : state list = |
542 |
let |
|
543 |
val prec = |
|
544 |
(case opt_min of |
|
545 |
NONE => (fn p => p <= max) |
|
546 |
| SOME min => (fn p => p <= max andalso p > min)); |
|
80943 | 547 |
in filter (fn (_, Nonterminal (B, p) :: _, _) => A = B andalso prec p | _ => false) Si end; |
18 | 548 |
|
80944 | 549 |
fun get_states A max_prec = |
550 |
filter (fn (_, Nonterminal (B, prec) :: _, _) => A = B andalso prec <= max_prec | _ => false); |
|
18 | 551 |
|
552 |
||
80943 | 553 |
fun movedot_nonterm tt (info, Nonterminal _ :: sa, ts) : state = (info, sa, tt @ ts); |
18 | 554 |
|
80944 | 555 |
fun movedot_lambda p ((info, sa, ts): state) = |
556 |
map_filter (fn (t, k) => if p <= k then SOME (info, sa, t @ ts) else NONE); |
|
18 | 557 |
|
558 |
||
41378 | 559 |
(*trigger value for warnings*) |
69575 | 560 |
val branching_level = Config.declare_int ("syntax_branching_level", \<^here>) (K 600); |
18 | 561 |
|
1147
57b5f55bf879
removed 'raw' productions from gram datatype; replaced mk_gram by add_prods;
clasohm
parents:
890
diff
changeset
|
562 |
|
80933 | 563 |
local |
564 |
||
80947 | 565 |
fun process_states (Gram {prods = gram_prods, chains = gram_chains, ...}) stateset i c states = |
37683 | 566 |
let |
80947 | 567 |
(*get all productions of a NT and NTs chained to it which can |
568 |
be started by specified token*) |
|
569 |
fun prods_for tok nt = |
|
570 |
let |
|
571 |
fun token_prods prods = |
|
572 |
fold cons (prods_lookup prods tok) #> |
|
573 |
fold cons (prods_lookup prods Lexicon.dummy); |
|
574 |
val nt_prods = #2 o Vector.nth gram_prods; |
|
575 |
in fold (token_prods o nt_prods) (chains_all_preds gram_chains [nt]) [] end; |
|
576 |
||
80933 | 577 |
fun process _ [] (Si, Sii) = (Si, Sii) |
80946 | 578 |
| process used ((S as (info, symbs, ts)) :: States) (Si, Sii) = |
579 |
(case symbs of |
|
580 |
Nonterminal (nt, min_prec) :: sa => |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
581 |
let (*predictor operation*) |
80945 | 582 |
fun mk_state (rhs, id, prod_prec) = ((nt, prod_prec, id, i), rhs, []); |
583 |
fun movedot_lambda (t, k) = if min_prec <= k then SOME (info, sa, t @ ts) else NONE; |
|
37683 | 584 |
val (used', new_states) = |
42226
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
585 |
(case Inttab.lookup used nt of |
38713 | 586 |
SOME (used_prec, l) => (*nonterminal has been processed*) |
587 |
if used_prec <= min_prec then |
|
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
588 |
(*wanted precedence has been processed*) |
80945 | 589 |
(used, map_filter movedot_lambda l) |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
590 |
else (*wanted precedence hasn't been parsed yet*) |
37683 | 591 |
let |
80947 | 592 |
val States2 = map mk_state (get_RHS' min_prec used_prec (prods_for c nt)); |
80945 | 593 |
val States1 = map_filter movedot_lambda l; |
80944 | 594 |
in (update_prec (nt, min_prec) used, States1 @ States2) end |
38712
f7688fd819a8
some attempts to recover Isabelle/ML style from the depths of time;
wenzelm
parents:
38711
diff
changeset
|
595 |
| NONE => (*nonterminal is parsed for the first time*) |
80947 | 596 |
let val States' = map mk_state (get_RHS min_prec (prods_for c nt)) |
42226
cb650789f2f0
use standard tables with standard argument order;
wenzelm
parents:
42222
diff
changeset
|
597 |
in (Inttab.update (nt, (min_prec, [])) used, States') end); |
80945 | 598 |
in process used' (new_states @ States) (S :: Si, Sii) end |
80946 | 599 |
| Terminal a :: sa => (*scanner operation*) |
80929 | 600 |
let |
80946 | 601 |
val (_, _, id, _) = info; |
80929 | 602 |
val Sii' = |
603 |
if Lexicon.tokens_match_ord (a, c) <> EQUAL then Sii |
|
604 |
else (*move dot*) |
|
605 |
let val ts' = if Lexicon.valued_token c orelse id <> "" then Tip c :: ts else ts |
|
80943 | 606 |
in (info, sa, ts') :: Sii end; |
80933 | 607 |
in process used States (S :: Si, Sii') end |
80946 | 608 |
| [] => (*completer operation*) |
80928 | 609 |
let |
80946 | 610 |
val (A, prec, id, j) = info; |
80928 | 611 |
val tt = if id = "" then ts else [Node (id, rev ts)]; |
612 |
val (used', Slist) = |
|
613 |
if j = i then (*lambda production?*) |
|
614 |
let val (prec', used') = update_trees (A, (tt, prec)) used |
|
80933 | 615 |
in (used', get_states_lambda A prec prec' Si) end |
80944 | 616 |
else (used, get_states A prec (Array.nth stateset j)); |
80928 | 617 |
val States' = map (movedot_nonterm tt) Slist; |
80933 | 618 |
in process used' (States' @ States) (S :: Si, Sii) end) |
80961 | 619 |
|
620 |
val (Si, Sii) = process Inttab.empty states ([], []); |
|
621 |
in |
|
622 |
Array.upd stateset i Si; |
|
623 |
Array.upd stateset (i + 1) Sii |
|
624 |
end; |
|
18 | 625 |
|
80933 | 626 |
fun produce gram stateset i input prev_token = |
77888 | 627 |
(case Array.nth stateset i of |
25986
26f1e4c172c3
syntax error: reduced spam -- print expected nonterminals instead of terminals;
wenzelm
parents:
24245
diff
changeset
|
628 |
[] => |
26f1e4c172c3
syntax error: reduced spam -- print expected nonterminals instead of terminals;
wenzelm
parents:
24245
diff
changeset
|
629 |
let |
80933 | 630 |
val toks = if Lexicon.is_eof prev_token then input else prev_token :: input; |
48992 | 631 |
val pos = Position.here (Lexicon.pos_of_token prev_token); |
27801
0d0adaf0228d
datatype token: maintain range, tuned representation;
wenzelm
parents:
26678
diff
changeset
|
632 |
in |
55624 | 633 |
if null toks then |
634 |
error ("Inner syntax error: unexpected end of input" ^ pos) |
|
635 |
else |
|
636 |
error ("Inner syntax error" ^ pos ^ |
|
637 |
Markup.markup Markup.no_report |
|
638 |
("\n" ^ Pretty.string_of |
|
639 |
(Pretty.block [ |
|
640 |
Pretty.str "at", Pretty.brk 1, |
|
641 |
Pretty.block |
|
642 |
(Pretty.str "\"" :: |
|
643 |
Pretty.breaks (map (Pretty.str o Lexicon.str_of_token) (#1 (split_last toks))) @ |
|
644 |
[Pretty.str "\""])]))) |
|
27801
0d0adaf0228d
datatype token: maintain range, tuned representation;
wenzelm
parents:
26678
diff
changeset
|
645 |
end |
80931 | 646 |
| states => |
80933 | 647 |
(case input of |
80931 | 648 |
[] => states |
80961 | 649 |
| c :: cs => (process_states gram stateset i c states; produce gram stateset (i + 1) cs c))); |
18 | 650 |
|
80933 | 651 |
in |
18 | 652 |
|
80933 | 653 |
fun parse (gram as Gram {tags, ...}) start toks = |
237
a7d3e712767a
MAJOR INTERNAL CHANGE: extend and merge operations of syntax tables
wenzelm
parents:
46
diff
changeset
|
654 |
let |
37683 | 655 |
val start_tag = |
80933 | 656 |
(case tags_lookup tags start of |
37683 | 657 |
SOME tag => tag |
80933 | 658 |
| NONE => error ("Inner syntax: bad grammar root symbol " ^ quote start)); |
18 | 659 |
|
27801
0d0adaf0228d
datatype token: maintain range, tuned representation;
wenzelm
parents:
26678
diff
changeset
|
660 |
val end_pos = |
0d0adaf0228d
datatype token: maintain range, tuned representation;
wenzelm
parents:
26678
diff
changeset
|
661 |
(case try List.last toks of |
0d0adaf0228d
datatype token: maintain range, tuned representation;
wenzelm
parents:
26678
diff
changeset
|
662 |
NONE => Position.none |
67551 | 663 |
| SOME tok => Lexicon.end_pos_of_token tok); |
80933 | 664 |
val input = toks @ [Lexicon.mk_eof end_pos]; |
665 |
||
80943 | 666 |
val S0: state = ((~1, 0, "", 0), [Nonterminal (start_tag, 0), Terminal Lexicon.eof], []); |
80933 | 667 |
val stateset = Array.array (length input + 1, []); |
668 |
val _ = Array.upd stateset 0 [S0]; |
|
669 |
||
670 |
val pts = |
|
671 |
produce gram stateset 0 input Lexicon.eof |
|
80943 | 672 |
|> map_filter (fn (_, _, [pt]) => SOME pt | _ => NONE); |
80933 | 673 |
in if null pts then raise Fail "Inner syntax: no parse trees" else pts end; |
26678 | 674 |
|
18 | 675 |
end; |
80933 | 676 |
|
677 |
end; |