src/Pure/ML/ml_lex.ML
changeset 67362 221612c942de
parent 66067 cdbcb417db67
child 67363 408a137e2793
--- a/src/Pure/ML/ml_lex.ML	Sun Jan 07 15:12:00 2018 +0100
+++ b/src/Pure/ML/ml_lex.ML	Sun Jan 07 16:55:45 2018 +0100
@@ -9,11 +9,12 @@
   val keywords: string list
   datatype token_kind =
     Keyword | Ident | Long_Ident | Type_Var | Word | Int | Real | Char | String |
-    Space | Comment | Error of string | EOF
+    Space | Comment | Comment_Cartouche | Error of string | EOF
   eqtype token
   val stopper: token Scan.stopper
   val is_regular: token -> bool
   val is_improper: token -> bool
+  val is_comment: token -> bool
   val set_range: Position.range -> token -> token
   val range_of: token -> Position.range
   val pos_of: token -> Position.T
@@ -63,7 +64,7 @@
 
 datatype token_kind =
   Keyword | Ident | Long_Ident | Type_Var | Word | Int | Real | Char | String |
-  Space | Comment | Error of string | EOF;
+  Space | Comment | Comment_Cartouche | Error of string | EOF;
 
 datatype token = Token of Position.range * (token_kind * string);
 
@@ -108,8 +109,13 @@
 
 fun is_improper (Token (_, (Space, _))) = true
   | is_improper (Token (_, (Comment, _))) = true
+  | is_improper (Token (_, (Comment_Cartouche, _))) = true
   | is_improper _ = false;
 
+fun is_comment (Token (_, (Comment, _))) = true
+  | is_comment (Token (_, (Comment_Cartouche, _))) = true
+  | is_comment _ = false;
+
 fun warn tok =
   (case tok of
     Token (_, (Keyword, ":>")) =>
@@ -147,6 +153,7 @@
   | Char => (Markup.ML_char, "")
   | String => (if SML then Markup.SML_string else Markup.ML_string, "")
   | Comment => (if SML then Markup.SML_comment else Markup.ML_comment, "")
+  | Comment_Cartouche => (if SML then Markup.SML_comment else Markup.ML_comment, "")
   | Error msg => (Markup.bad (), msg)
   | _ => (Markup.empty, "");
 
@@ -292,6 +299,7 @@
   scan_string >> token String ||
   scan_blanks1 >> token Space ||
   Symbol_Pos.scan_comment err_prefix >> token Comment ||
+  Symbol_Pos.scan_comment_cartouche err_prefix >> token Comment_Cartouche ||
   Scan.max token_leq
    (Scan.literal lexicon >> token Keyword)
    (scan_word >> token Word ||