tuned;
authorwenzelm
Tue, 11 Apr 2023 13:23:46 +0200
changeset 77823 e60fe51f6f59
parent 77822 353c4d3e6dda
child 77824 e3fe192fa4a8
tuned;
src/Pure/Syntax/parser.ML
--- a/src/Pure/Syntax/parser.ML	Tue Apr 11 13:06:43 2023 +0200
+++ b/src/Pure/Syntax/parser.ML	Tue Apr 11 13:23:46 2023 +0200
@@ -51,11 +51,10 @@
 
 (* tokens *)
 
-structure Tokens = Table(type key = Lexicon.token val ord = Lexicon.tokens_match_ord);
-structure Tokenset = Set(Tokens.Key);
+structure Tokens = Set(type key = Lexicon.token val ord = Lexicon.tokens_match_ord);
 
-fun tokens_find P tokens = Tokenset.get_first (fn tok => if P tok then SOME tok else NONE) tokens;
-fun tokens_add (nt: nt, tokens) = if Tokenset.is_empty tokens then I else cons (nt, tokens);
+fun tokens_find P tokens = Tokens.get_first (fn tok => if P tok then SOME tok else NONE) tokens;
+fun tokens_add (nt: nt, tokens) = if Tokens.is_empty tokens then I else cons (nt, tokens);
 
 
 (* productions *)
@@ -64,17 +63,18 @@
   Terminal of Lexicon.token |
   Nonterminal of nt * int;  (*(tag, prio)*)
 
-type prods = (symb list * string * int) list Tokens.table;  (*start_token ~> [(rhs, name, prio)]*)
+structure Prods = Table(Tokens.Key);
+type prods = (symb list * string * int) list Prods.table;  (*start_token ~> [(rhs, name, prio)]*)
 
-val prods_empty: prods = Tokens.empty;
-fun prods_lookup (prods: prods) = Tokens.lookup_list prods;
-fun prods_update entry : prods -> prods = Tokens.update entry;
-fun prods_content (prods: prods) = distinct (op =) (maps #2 (Tokens.dest prods));
+val prods_empty: prods = Prods.empty;
+fun prods_lookup (prods: prods) = Prods.lookup_list prods;
+fun prods_update entry : prods -> prods = Prods.update entry;
+fun prods_content (prods: prods) = distinct (op =) (maps #2 (Prods.dest prods));
 
-type nt_gram = (nts * Tokenset.T) * prods; (*dependent_nts, start_tokens, prods*)
+type nt_gram = (nts * Tokens.T) * prods; (*dependent_nts, start_tokens, prods*)
   (*depent_nts is a set of all NTs whose lookahead depends on this NT's lookahead*)
 
-val nt_gram_empty: nt_gram = ((nts_empty, Tokenset.empty), prods_empty);
+val nt_gram_empty: nt_gram = ((nts_empty, Tokens.empty), prods_empty);
 
 type chains = unit Int_Graph.T;
 fun chains_preds (chains: chains) = Int_Graph.immediate_preds chains;
@@ -105,7 +105,7 @@
 val unknown_start = Lexicon.eof;
 
 fun get_start tks =
-  (case Tokenset.min tks of
+  (case Tokens.min tks of
     SOME tk => tk
   | NONE => unknown_start);
 
@@ -139,8 +139,8 @@
             let
               val ((to_nts, to_tks), ps) = Array.sub (prods, to);
 
-              val new_tks = Tokenset.subtract to_tks from_tks;  (*added lookahead tokens*)
-              val to_tks' = Tokenset.merge (to_tks, new_tks);
+              val new_tks = Tokens.subtract to_tks from_tks;  (*added lookahead tokens*)
+              val to_tks' = Tokens.merge (to_tks, new_tks);
               val _ = Array.update (prods, to, ((to_nts, to_tks'), ps));
             in tokens_add (to, new_tks) end;
 
@@ -194,12 +194,12 @@
                               is_none tk andalso nts_subset (nts, lambdas);
 
                             val new_tks =
-                              Tokenset.empty
-                              |> fold Tokenset.insert (the_list tk)
-                              |> nts_fold (curry Tokenset.merge o starts_for_nt) nts
-                              |> Tokenset.subtract l_starts;
+                              Tokens.empty
+                              |> fold Tokens.insert (the_list tk)
+                              |> nts_fold (curry Tokens.merge o starts_for_nt) nts
+                              |> Tokens.subtract l_starts;
 
-                            val added_tks' = Tokenset.merge (added_tks, new_tks);
+                            val added_tks' = Tokens.merge (added_tks, new_tks);
 
                             val nt_dependencies' = nts_merge (nt_dependencies, nts);
 
@@ -208,7 +208,7 @@
                               prods_update (tk, p :: prods_lookup nt_prods tk) nt_prods;
 
                             val nt_prods' = nt_prods
-                              |> Tokenset.fold copy new_tks
+                              |> Tokens.fold copy new_tks
                               |> new_lambda ? copy Lexicon.dummy;
                           in
                             examine_prods ps (add_lambda orelse new_lambda)
@@ -229,10 +229,10 @@
                     (*add_lambda is true if an existing production of the nt
                       produces lambda due to the new lambda NT l*)
                     val (add_lambda, nt_dependencies, added_tks, nt_prods') =
-                      examine_prods tk_prods false nts_empty Tokenset.empty nt_prods;
+                      examine_prods tk_prods false nts_empty Tokens.empty nt_prods;
 
                     val new_nts = nts_merge (old_nts, nt_dependencies);
-                    val new_tks = Tokenset.merge (old_tks, added_tks);
+                    val new_tks = Tokens.merge (old_tks, added_tks);
 
                     val added_lambdas' = added_lambdas |> add_lambda ? cons nt;
                     val _ = Array.update (prods, nt, ((new_nts, new_tks), nt_prods'));
@@ -266,14 +266,14 @@
           val (start_tk, start_nts) = lookahead_dependency lambdas' rhs nts_empty;
 
           val start_tks =
-            Tokenset.empty
-            |> fold Tokenset.insert (the_list start_tk)
-            |> nts_fold (curry Tokenset.merge o starts_for_nt) start_nts;
+            Tokens.empty
+            |> fold Tokens.insert (the_list start_tk)
+            |> nts_fold (curry Tokens.merge o starts_for_nt) start_nts;
 
           val start_tks' =
             start_tks
-            |> (if is_some new_lambdas then Tokenset.insert Lexicon.dummy
-                else if Tokenset.is_empty start_tks then Tokenset.insert unknown_start
+            |> (if is_some new_lambdas then Tokens.insert Lexicon.dummy
+                else if Tokens.is_empty start_tks then Tokens.insert unknown_start
                 else I);
 
           (*add lhs NT to list of dependent NTs in lookahead*)
@@ -292,7 +292,7 @@
                 let
                   val ((old_nts, old_tks), nt_prods) = Array.sub (prods, nt);
 
-                  val new_tks = Tokenset.subtract old_tks start_tks;
+                  val new_tks = Tokens.subtract old_tks start_tks;
 
                   (*store new production*)
                   fun store tk (prods, _) =
@@ -303,12 +303,12 @@
                     in (prods', true) end;
 
                   val (nt_prods', changed) = (nt_prods, false)
-                    |> nt = lhs ? Tokenset.fold store start_tks';
+                    |> nt = lhs ? Tokens.fold store start_tks';
                   val _ =
-                    if not changed andalso Tokenset.is_empty new_tks then ()
+                    if not changed andalso Tokens.is_empty new_tks then ()
                     else
                       Array.update
-                        (prods, nt, ((old_nts, Tokenset.merge (old_tks, new_tks)), nt_prods'));
+                        (prods, nt, ((old_nts, Tokens.merge (old_tks, new_tks)), nt_prods'));
                 in add_tks nts (tokens_add (nt, new_tks) added) end;
           val _ = nts_fold add_nts start_nts true;
         in add_tks (chains_all_succs chains' [lhs]) [] end;
@@ -323,7 +323,7 @@
                 (*token under which old productions which
                   depend on changed_nt could be stored*)
                 val key =
-                  tokens_find (not o Tokenset.member new_tks) (starts_for_nt changed_nt)
+                  tokens_find (not o Tokens.member new_tks) (starts_for_nt changed_nt)
                   |> the_default unknown_start;
 
                 (*copy productions whose lookahead depends on changed_nt;
@@ -344,7 +344,7 @@
                         val lambda =
                           not (nts_is_unique depends) orelse
                           not (nts_is_empty depends) andalso is_some tk
-                          andalso Tokenset.member new_tks (the tk);
+                          andalso Tokens.member new_tks (the tk);
 
                         (*associate production with new starting tokens*)
                         fun copy tk nt_prods =
@@ -357,7 +357,7 @@
                                  have to look for duplicates*)
                           in prods_update (tk, tk_prods') nt_prods end;
                         val result =
-                          if update then (tk_prods, Tokenset.fold copy new_tks nt_prods)
+                          if update then (tk_prods, Tokens.fold copy new_tks nt_prods)
                           else if key = unknown_start then (p :: tk_prods, nt_prods)
                           else (tk_prods, nt_prods);
                       in update_prods ps result end;
@@ -377,8 +377,8 @@
                         prods_update (key, tk_prods') nt_prods'
                       else nt_prods';
 
-                    val added_tks = Tokenset.subtract tks new_tks;
-                    val tks' = Tokenset.merge (tks, added_tks);
+                    val added_tks = Tokens.subtract tks new_tks;
+                    val tks' = Tokens.merge (tks, added_tks);
                     val _ = Array.update (prods, nt, ((nts, tks'), nt_prods''));
                   in tokens_add (nt, added_tks) end;