Lexicon.tokenize: do not appen EndToken yet;
authorwenzelm
Mon, 13 Aug 2007 18:10:22 +0200
changeset 24245 4ffeb1dd048a
parent 24244 d7ee11ba1534
child 24246 3a915c75f7b6
Lexicon.tokenize: do not appen EndToken yet;
src/Pure/Syntax/lexicon.ML
src/Pure/Syntax/parser.ML
--- a/src/Pure/Syntax/lexicon.ML	Mon Aug 13 18:10:22 2007 +0200
+++ b/src/Pure/Syntax/lexicon.ML	Mon Aug 13 18:10:22 2007 +0200
@@ -275,7 +275,7 @@
       Scan.one Symbol.is_blank >> K NONE;
   in
     (case Scan.error (Scan.finite Symbol.stopper (Scan.repeat scan_token)) chs of
-      (toks, []) => map_filter I toks @ [EndToken]
+      (toks, []) => map_filter I toks
     | (_, cs) => error ("Inner lexical error at: " ^ quote (implode cs)))
   end;
 
--- a/src/Pure/Syntax/parser.ML	Mon Aug 13 18:10:22 2007 +0200
+++ b/src/Pure/Syntax/parser.ML	Mon Aug 13 18:10:22 2007 +0200
@@ -882,7 +882,7 @@
 
 fun parse (Gram {tags, prods, chains, ...}) start toks =
 let val r =
-  (case earley prods tags chains start toks of
+  (case earley prods tags chains start (toks @ [Lexicon.EndToken]) of
     [] => sys_error "parse: no parse trees"
   | pts => pts);
 in r end