explicit dummy token;
authorwenzelm
Wed, 31 Jan 2018 11:23:53 +0100
changeset 67550 3b666615e3ce
parent 67549 7b399522d6c1
child 67551 4a087b9a29c5
explicit dummy token;
src/Pure/Syntax/lexicon.ML
src/Pure/Syntax/parser.ML
--- a/src/Pure/Syntax/lexicon.ML	Wed Jan 31 11:18:36 2018 +0100
+++ b/src/Pure/Syntax/lexicon.ML	Wed Jan 31 11:23:53 2018 +0100
@@ -22,7 +22,7 @@
   val is_tid: string -> bool
   datatype token_kind =
     Literal | Ident | Long_Ident | Var | Type_Ident | Type_Var | Num | Float | Str |
-    String | Cartouche | Space | Comment of Comment.kind option | EOF
+    String | Cartouche | Space | Comment of Comment.kind option | Dummy | EOF
   val token_kind_ord: token_kind * token_kind -> order
   datatype token = Token of token_kind * string * Position.range
   val kind_of_token: token -> token_kind
@@ -30,6 +30,7 @@
   val pos_of_token: token -> Position.T
   val tokens_match_ord: token * token -> order
   val is_proper: token -> bool
+  val dummy: token
   val mk_eof: Position.T -> token
   val eof: token
   val is_eof: token -> bool
@@ -115,7 +116,7 @@
 
 datatype token_kind =
   Literal | Ident | Long_Ident | Var | Type_Ident | Type_Var | Num | Float | Str |
-  String | Cartouche | Space | Comment of Comment.kind option | EOF;
+  String | Cartouche | Space | Comment of Comment.kind option | Dummy | EOF;
 
 val token_kind_index =
   fn Literal => 0
@@ -134,7 +135,8 @@
    | Comment (SOME Comment.Comment) => 13
    | Comment (SOME Comment.Cancel) => 14
    | Comment (SOME Comment.Latex) => 15
-   | EOF => 16;
+   | Dummy => 16
+   | EOF => 17;
 
 val token_kind_ord = int_ord o apply2 token_kind_index;
 
@@ -152,6 +154,8 @@
 
 val is_proper = kind_of_token #> (fn Space => false | Comment _ => false | _ => true);
 
+val dummy = Token (Dummy, "", Position.no_range);
+
 
 (* stopper *)
 
--- a/src/Pure/Syntax/parser.ML	Wed Jan 31 11:18:36 2018 +0100
+++ b/src/Pure/Syntax/parser.ML	Wed Jan 31 11:23:53 2018 +0100
@@ -111,8 +111,6 @@
      lambda productions are stored as normal productions
      and also as an entry in "lambdas"*)
 
-val token_none = Lexicon.Token (Lexicon.Space, "", Position.no_range);
-
 (*productions for which no starting token is
   known yet are associated with this token*)
 val unknown_start = Lexicon.eof;
@@ -217,7 +215,7 @@
 
                             val nt_prods' = nt_prods
                               |> tokens_fold copy new_tks
-                              |> new_lambda ? copy token_none;
+                              |> new_lambda ? copy Lexicon.dummy;
                           in
                             examine_prods ps (add_lambda orelse new_lambda)
                               nt_dependencies' added_tks' nt_prods'
@@ -280,7 +278,7 @@
 
           val start_tks' =
             start_tks
-            |> (if is_some new_lambdas then tokens_insert token_none
+            |> (if is_some new_lambdas then tokens_insert Lexicon.dummy
                 else if tokens_is_empty start_tks then tokens_insert unknown_start
                 else I);
 
@@ -608,7 +606,7 @@
   let
     fun token_prods prods =
       fold cons (prods_lookup prods tk) #>
-      fold cons (prods_lookup prods token_none);
+      fold cons (prods_lookup prods Lexicon.dummy);
     fun nt_prods nt = #2 (Vector.sub (prods, nt));
   in fold (token_prods o nt_prods) (chains_all_preds chains [nt]) [] end;