discontinued old form of marginal comments;
authorwenzelm
Tue, 16 Jan 2018 11:27:52 +0100
changeset 67446 1f4d167b6ac9
parent 67445 4311845b0412
child 67447 c98c6eb3dd4c
discontinued old form of marginal comments;
NEWS
src/Pure/Isar/outer_syntax.ML
src/Pure/Isar/token.ML
src/Pure/Isar/token.scala
src/Pure/PIDE/command.ML
src/Pure/PIDE/command.scala
src/Pure/Pure.thy
src/Pure/Thy/thy_output.ML
--- a/NEWS	Tue Jan 16 09:58:17 2018 +0100
+++ b/NEWS	Tue Jan 16 11:27:52 2018 +0100
@@ -9,6 +9,11 @@
 
 *** General ***
 
+* Marginal comments need to be written exclusively in the new-style form
+"\<comment> \<open>text\<close>", old ASCII variants like "-- {* ... *}" are no longer
+supported. INCOMPATIBILITY, use the command-line tool "isabelle
+update_comments" to update existing theory files.
+
 * The "op <infix-op>" syntax for infix operators has been replaced by
 "(<infix-op>)". If <infix-op> begins or ends with a "*", there needs to
 be a space between the "*" and the corresponding parenthesis.
--- a/src/Pure/Isar/outer_syntax.ML	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/Isar/outer_syntax.ML	Tue Jan 16 11:27:52 2018 +0100
@@ -25,7 +25,6 @@
   val parse: theory -> Position.T -> string -> Toplevel.transition list
   val parse_tokens: theory -> Token.T list -> Toplevel.transition list
   val parse_spans: Token.T list -> Command_Span.span list
-  val side_comments: Token.T list -> Token.T list
   val command_reports: theory -> Token.T -> Position.report_text list
   val check_command: Proof.context -> command_keyword -> string
 end;
@@ -210,12 +209,8 @@
             in msg ^ quote (Markup.markup Markup.keyword1 name) end))
     end);
 
-val parse_cmt = (Parse.$$$ "--" || Parse.$$$ Symbol.comment) -- Parse.!!! Parse.document_source;
-
 fun commands_source thy =
   Token.source_proper #>
-  Source.source Token.stopper (Scan.bulk (parse_cmt >> K NONE || Parse.not_eof >> SOME)) #>
-  Source.map_filter I #>
   Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy) xs));
 
 in
@@ -271,26 +266,6 @@
 end;
 
 
-(* side-comments *)
-
-local
-
-fun is_cmt tok = Token.kind_of tok = Token.Comment (SOME Comment.Comment);
-
-fun cmts (t1 :: t2 :: toks) =
-      if Token.keyword_with (fn s => s = "--" orelse s = Symbol.comment) t1 then t2 :: cmts toks
-      else if is_cmt t1 then t1 :: cmts (t2 :: toks)
-      else cmts (t2 :: toks)
-  | cmts _ = [];
-
-in
-
-val side_comments =
-  cmts o filter_out (fn tok => Token.is_informal_comment tok orelse Token.is_space tok);
-
-end;
-
-
 (* check commands *)
 
 fun command_reports thy tok =
--- a/src/Pure/Isar/token.ML	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/Isar/token.ML	Tue Jan 16 11:27:52 2018 +0100
@@ -624,7 +624,7 @@
     scan_verbatim >> token_range Verbatim ||
     scan_cartouche >> token_range Cartouche ||
     scan_comment >> token_range (Comment NONE) ||
-    (Comment.scan_cancel || Comment.scan_latex) >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
+    Comment.scan >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
     scan_space >> token Space ||
     (Scan.max token_leq
       (Scan.max token_leq
--- a/src/Pure/Isar/token.scala	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/Isar/token.scala	Tue Jan 16 11:27:52 2018 +0100
@@ -48,9 +48,6 @@
 
   trait Parsers extends Scan.Parsers with Comment.Parsers
   {
-    private def comment_marker: Parser[Token] =
-      one(Symbol.is_comment) ^^ (x => Token(Token.Kind.KEYWORD, x))
-
     private def delimited_token: Parser[Token] =
     {
       val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
@@ -111,7 +108,7 @@
     }
 
     def token(keywords: Keyword.Keywords): Parser[Token] =
-      comment_marker | (delimited_token | other_token(keywords))
+      delimited_token | other_token(keywords)
 
     def token_line(keywords: Keyword.Keywords, ctxt: Scan.Line_Context)
       : Parser[(Token, Scan.Line_Context)] =
--- a/src/Pure/PIDE/command.ML	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/PIDE/command.ML	Tue Jan 16 11:27:52 2018 +0100
@@ -205,8 +205,10 @@
     (fn () =>
       (span |> maps (fn tok =>
         check_error (fn () => Thy_Output.check_token_comments ctxt tok))) @
-      (Outer_Syntax.side_comments span |> maps (fn tok =>
-        check_error (fn () => Thy_Output.output_text ctxt {markdown = false} (Token.input_of tok)))))
+      (span |> maps (fn tok =>
+        if Token.kind_of tok = Token.Comment (SOME Comment.Comment) then
+          check_error (fn () => Thy_Output.output_text ctxt {markdown = false} (Token.input_of tok))
+        else [])))
     ();
 
 fun report tr m =
--- a/src/Pure/PIDE/command.scala	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/PIDE/command.scala	Tue Jan 16 11:27:52 2018 +0100
@@ -396,11 +396,10 @@
 
   private def clean_tokens(tokens: List[Token]): List[(Token, Int)] =
   {
-    val markers = Set("%", "--", Symbol.comment, Symbol.comment_decoded)
     def clean(toks: List[(Token, Int)]): List[(Token, Int)] =
       toks match {
         case (t1, i1) :: (t2, i2) :: rest =>
-          if (t1.is_keyword && markers(t1.source)) clean(rest)
+          if (t1.is_keyword && t1.source == "%" && t2.is_name) clean(rest)
           else (t1, i1) :: clean((t2, i2) :: rest)
         case _ => toks
       }
--- a/src/Pure/Pure.thy	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/Pure.thy	Tue Jan 16 11:27:52 2018 +0100
@@ -6,7 +6,7 @@
 
 theory Pure
 keywords
-    "!!" "!" "+" "--" ":" ";" "<" "<=" "==" "=>" "?" "[" "\<comment>" "\<equiv>" "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
+    "!!" "!" "+" ":" ";" "<" "<=" "==" "=>" "?" "[" "\<comment>" "\<equiv>" "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
     "\<subseteq>" "]" "attach" "binder" "in" "infix" "infixl" "infixr" "is" "open" "output"
     "overloaded" "pervasive" "premises" "structure" "unchecked"
   and "private" "qualified" :: before_command
--- a/src/Pure/Thy/thy_output.ML	Tue Jan 16 09:58:17 2018 +0100
+++ b/src/Pure/Thy/thy_output.ML	Tue Jan 16 11:27:52 2018 +0100
@@ -355,11 +355,7 @@
             (Basic_Token cmd, (markup_false, d)))]));
 
     val cmt = Scan.peek (fn d =>
-      Scan.one is_black_comment >>
-        (fn tok => (NONE, (Basic_Token tok, ("", d)))) ||
-      (Parse.$$$ "--" || Parse.$$$ Symbol.comment) |--
-        Parse.!!!! (improper |-- Parse.document_source) >>
-        (fn source => (NONE, (Markup_Token ("cmt", source), ("", d)))));
+      Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
 
     val other = Scan.peek (fn d =>
        Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));