discontinued old form of marginal comments;
authorwenzelm
Tue Jan 16 11:27:52 2018 +0100 (17 months ago)
changeset 674461f4d167b6ac9
parent 67445 4311845b0412
child 67447 c98c6eb3dd4c
discontinued old form of marginal comments;
NEWS
src/Pure/Isar/outer_syntax.ML
src/Pure/Isar/token.ML
src/Pure/Isar/token.scala
src/Pure/PIDE/command.ML
src/Pure/PIDE/command.scala
src/Pure/Pure.thy
src/Pure/Thy/thy_output.ML
     1.1 --- a/NEWS	Tue Jan 16 09:58:17 2018 +0100
     1.2 +++ b/NEWS	Tue Jan 16 11:27:52 2018 +0100
     1.3 @@ -9,6 +9,11 @@
     1.4  
     1.5  *** General ***
     1.6  
     1.7 +* Marginal comments need to be written exclusively in the new-style form
     1.8 +"\<comment> \<open>text\<close>", old ASCII variants like "-- {* ... *}" are no longer
     1.9 +supported. INCOMPATIBILITY, use the command-line tool "isabelle
    1.10 +update_comments" to update existing theory files.
    1.11 +
    1.12  * The "op <infix-op>" syntax for infix operators has been replaced by
    1.13  "(<infix-op>)". If <infix-op> begins or ends with a "*", there needs to
    1.14  be a space between the "*" and the corresponding parenthesis.
     2.1 --- a/src/Pure/Isar/outer_syntax.ML	Tue Jan 16 09:58:17 2018 +0100
     2.2 +++ b/src/Pure/Isar/outer_syntax.ML	Tue Jan 16 11:27:52 2018 +0100
     2.3 @@ -25,7 +25,6 @@
     2.4    val parse: theory -> Position.T -> string -> Toplevel.transition list
     2.5    val parse_tokens: theory -> Token.T list -> Toplevel.transition list
     2.6    val parse_spans: Token.T list -> Command_Span.span list
     2.7 -  val side_comments: Token.T list -> Token.T list
     2.8    val command_reports: theory -> Token.T -> Position.report_text list
     2.9    val check_command: Proof.context -> command_keyword -> string
    2.10  end;
    2.11 @@ -210,12 +209,8 @@
    2.12              in msg ^ quote (Markup.markup Markup.keyword1 name) end))
    2.13      end);
    2.14  
    2.15 -val parse_cmt = (Parse.$$$ "--" || Parse.$$$ Symbol.comment) -- Parse.!!! Parse.document_source;
    2.16 -
    2.17  fun commands_source thy =
    2.18    Token.source_proper #>
    2.19 -  Source.source Token.stopper (Scan.bulk (parse_cmt >> K NONE || Parse.not_eof >> SOME)) #>
    2.20 -  Source.map_filter I #>
    2.21    Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy) xs));
    2.22  
    2.23  in
    2.24 @@ -271,26 +266,6 @@
    2.25  end;
    2.26  
    2.27  
    2.28 -(* side-comments *)
    2.29 -
    2.30 -local
    2.31 -
    2.32 -fun is_cmt tok = Token.kind_of tok = Token.Comment (SOME Comment.Comment);
    2.33 -
    2.34 -fun cmts (t1 :: t2 :: toks) =
    2.35 -      if Token.keyword_with (fn s => s = "--" orelse s = Symbol.comment) t1 then t2 :: cmts toks
    2.36 -      else if is_cmt t1 then t1 :: cmts (t2 :: toks)
    2.37 -      else cmts (t2 :: toks)
    2.38 -  | cmts _ = [];
    2.39 -
    2.40 -in
    2.41 -
    2.42 -val side_comments =
    2.43 -  cmts o filter_out (fn tok => Token.is_informal_comment tok orelse Token.is_space tok);
    2.44 -
    2.45 -end;
    2.46 -
    2.47 -
    2.48  (* check commands *)
    2.49  
    2.50  fun command_reports thy tok =
     3.1 --- a/src/Pure/Isar/token.ML	Tue Jan 16 09:58:17 2018 +0100
     3.2 +++ b/src/Pure/Isar/token.ML	Tue Jan 16 11:27:52 2018 +0100
     3.3 @@ -624,7 +624,7 @@
     3.4      scan_verbatim >> token_range Verbatim ||
     3.5      scan_cartouche >> token_range Cartouche ||
     3.6      scan_comment >> token_range (Comment NONE) ||
     3.7 -    (Comment.scan_cancel || Comment.scan_latex) >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
     3.8 +    Comment.scan >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
     3.9      scan_space >> token Space ||
    3.10      (Scan.max token_leq
    3.11        (Scan.max token_leq
     4.1 --- a/src/Pure/Isar/token.scala	Tue Jan 16 09:58:17 2018 +0100
     4.2 +++ b/src/Pure/Isar/token.scala	Tue Jan 16 11:27:52 2018 +0100
     4.3 @@ -48,9 +48,6 @@
     4.4  
     4.5    trait Parsers extends Scan.Parsers with Comment.Parsers
     4.6    {
     4.7 -    private def comment_marker: Parser[Token] =
     4.8 -      one(Symbol.is_comment) ^^ (x => Token(Token.Kind.KEYWORD, x))
     4.9 -
    4.10      private def delimited_token: Parser[Token] =
    4.11      {
    4.12        val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
    4.13 @@ -111,7 +108,7 @@
    4.14      }
    4.15  
    4.16      def token(keywords: Keyword.Keywords): Parser[Token] =
    4.17 -      comment_marker | (delimited_token | other_token(keywords))
    4.18 +      delimited_token | other_token(keywords)
    4.19  
    4.20      def token_line(keywords: Keyword.Keywords, ctxt: Scan.Line_Context)
    4.21        : Parser[(Token, Scan.Line_Context)] =
     5.1 --- a/src/Pure/PIDE/command.ML	Tue Jan 16 09:58:17 2018 +0100
     5.2 +++ b/src/Pure/PIDE/command.ML	Tue Jan 16 11:27:52 2018 +0100
     5.3 @@ -205,8 +205,10 @@
     5.4      (fn () =>
     5.5        (span |> maps (fn tok =>
     5.6          check_error (fn () => Thy_Output.check_token_comments ctxt tok))) @
     5.7 -      (Outer_Syntax.side_comments span |> maps (fn tok =>
     5.8 -        check_error (fn () => Thy_Output.output_text ctxt {markdown = false} (Token.input_of tok)))))
     5.9 +      (span |> maps (fn tok =>
    5.10 +        if Token.kind_of tok = Token.Comment (SOME Comment.Comment) then
    5.11 +          check_error (fn () => Thy_Output.output_text ctxt {markdown = false} (Token.input_of tok))
    5.12 +        else [])))
    5.13      ();
    5.14  
    5.15  fun report tr m =
     6.1 --- a/src/Pure/PIDE/command.scala	Tue Jan 16 09:58:17 2018 +0100
     6.2 +++ b/src/Pure/PIDE/command.scala	Tue Jan 16 11:27:52 2018 +0100
     6.3 @@ -396,11 +396,10 @@
     6.4  
     6.5    private def clean_tokens(tokens: List[Token]): List[(Token, Int)] =
     6.6    {
     6.7 -    val markers = Set("%", "--", Symbol.comment, Symbol.comment_decoded)
     6.8      def clean(toks: List[(Token, Int)]): List[(Token, Int)] =
     6.9        toks match {
    6.10          case (t1, i1) :: (t2, i2) :: rest =>
    6.11 -          if (t1.is_keyword && markers(t1.source)) clean(rest)
    6.12 +          if (t1.is_keyword && t1.source == "%" && t2.is_name) clean(rest)
    6.13            else (t1, i1) :: clean((t2, i2) :: rest)
    6.14          case _ => toks
    6.15        }
     7.1 --- a/src/Pure/Pure.thy	Tue Jan 16 09:58:17 2018 +0100
     7.2 +++ b/src/Pure/Pure.thy	Tue Jan 16 11:27:52 2018 +0100
     7.3 @@ -6,7 +6,7 @@
     7.4  
     7.5  theory Pure
     7.6  keywords
     7.7 -    "!!" "!" "+" "--" ":" ";" "<" "<=" "==" "=>" "?" "[" "\<comment>" "\<equiv>" "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
     7.8 +    "!!" "!" "+" ":" ";" "<" "<=" "==" "=>" "?" "[" "\<comment>" "\<equiv>" "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
     7.9      "\<subseteq>" "]" "attach" "binder" "in" "infix" "infixl" "infixr" "is" "open" "output"
    7.10      "overloaded" "pervasive" "premises" "structure" "unchecked"
    7.11    and "private" "qualified" :: before_command
     8.1 --- a/src/Pure/Thy/thy_output.ML	Tue Jan 16 09:58:17 2018 +0100
     8.2 +++ b/src/Pure/Thy/thy_output.ML	Tue Jan 16 11:27:52 2018 +0100
     8.3 @@ -355,11 +355,7 @@
     8.4              (Basic_Token cmd, (markup_false, d)))]));
     8.5  
     8.6      val cmt = Scan.peek (fn d =>
     8.7 -      Scan.one is_black_comment >>
     8.8 -        (fn tok => (NONE, (Basic_Token tok, ("", d)))) ||
     8.9 -      (Parse.$$$ "--" || Parse.$$$ Symbol.comment) |--
    8.10 -        Parse.!!!! (improper |-- Parse.document_source) >>
    8.11 -        (fn source => (NONE, (Markup_Token ("cmt", source), ("", d)))));
    8.12 +      Scan.one is_black_comment >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));
    8.13  
    8.14      val other = Scan.peek (fn d =>
    8.15         Parse.not_eof >> (fn tok => (NONE, (Basic_Token tok, ("", d)))));