discontinued old-style {* verbatim *} tokens;
authorwenzelm
Mon, 06 Dec 2021 15:34:54 +0100
changeset 74887 56247fdb8bbb
parent 74886 fa5476c54731
child 74888 1c50ddcf6a01
discontinued old-style {* verbatim *} tokens;
NEWS
lib/texinputs/isabelle.sty
src/Doc/Isar_Ref/Outer_Syntax.thy
src/Doc/Tutorial/ToyList/ToyList.thy
src/HOL/Bali/Term.thy
src/HOL/Eisbach/match_method.ML
src/Pure/General/scan.scala
src/Pure/Isar/args.ML
src/Pure/Isar/method.ML
src/Pure/Isar/parse.ML
src/Pure/Isar/parse.scala
src/Pure/Isar/token.ML
src/Pure/Isar/token.scala
src/Pure/PIDE/command.ML
src/Pure/PIDE/markup.scala
src/Pure/PIDE/rendering.scala
src/Pure/Pure.thy
src/Pure/Thy/bibtex.ML
src/Pure/Thy/document_antiquotations.ML
src/Pure/Thy/document_output.ML
src/Pure/Thy/sessions.ML
src/Pure/Thy/thy_header.ML
src/Pure/Tools/rail.ML
src/Pure/Tools/update_cartouches.scala
src/Pure/Tools/update_comments.scala
src/Tools/Code/code_target.ML
src/Tools/jEdit/src/jedit_rendering.scala
--- a/NEWS	Mon Dec 06 15:10:15 2021 +0100
+++ b/NEWS	Mon Dec 06 15:34:54 2021 +0100
@@ -7,6 +7,12 @@
 New in this Isabelle version
 ----------------------------
 
+*** General ***
+
+* Old-style {* verbatim *} tokens have been discontinued (legacy feature
+since Isabelle2019). INCOMPATIBILITY, use \<open>cartouche\<close> syntax instead.
+
+
 *** HOL ***
 
 * Theory "HOL.Relation": Added lemmas asymp_less and asymp_greater to
--- a/lib/texinputs/isabelle.sty	Mon Dec 06 15:10:15 2021 +0100
+++ b/lib/texinputs/isabelle.sty	Mon Dec 06 15:34:54 2021 +0100
@@ -140,8 +140,6 @@
 \chardef\isacharbar=`\|%
 \chardef\isacharbraceright=`\}%
 \chardef\isachartilde=`\~%
-\def\isacharverbatimopen{\isacharbraceleft\isacharasterisk}%
-\def\isacharverbatimclose{\isacharasterisk\isacharbraceright}%
 \def\isacartoucheopen{\isatext{\guilsinglleft}}%
 \def\isacartoucheclose{\isatext{\guilsinglright}}%
 }
--- a/src/Doc/Isar_Ref/Outer_Syntax.thy	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Doc/Isar_Ref/Outer_Syntax.thy	Mon Dec 06 15:34:54 2021 +0100
@@ -234,12 +234,11 @@
 
 text \<open>
   A chunk of document @{syntax text} is usually given as @{syntax cartouche}
-  \<open>\<open>\<dots>\<close>\<close> or @{syntax verbatim}, i.e.\ enclosed in \<^verbatim>\<open>{*\<close>~\<open>\<dots>\<close>~\<^verbatim>\<open>*}\<close>. For
-  convenience, any of the smaller text unit that conforms to @{syntax name} is
-  admitted as well.
+  \<open>\<open>\<dots>\<close>\<close>. For convenience, any of the smaller text unit that conforms to
+  @{syntax name} is admitted as well.
 
   \<^rail>\<open>
-    @{syntax_def text}: @{syntax embedded} | @{syntax verbatim}
+    @{syntax_def text}: @{syntax embedded}
   \<close>
 
   Typical uses are document markup commands, like \<^theory_text>\<open>chapter\<close>, \<^theory_text>\<open>section\<close> etc.
--- a/src/Doc/Tutorial/ToyList/ToyList.thy	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Doc/Tutorial/ToyList/ToyList.thy	Mon Dec 06 15:34:54 2021 +0100
@@ -107,7 +107,7 @@
 When Isabelle prints a syntax error message, it refers to the HOL syntax as
 the \textbf{inner syntax} and the enclosing theory language as the \textbf{outer syntax}.
 
-Comments\index{comment} must be in enclosed in \texttt{(* }and\texttt{ *)}.
+Comments\index{comment} must be in enclosed in \texttt{(*}and\texttt{*)}.
 
 \section{Evaluation}
 \index{evaluation}
--- a/src/HOL/Bali/Term.thy	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/HOL/Bali/Term.thy	Mon Dec 06 15:34:54 2021 +0100
@@ -110,7 +110,7 @@
                | UNot     \<comment> \<open>{\tt !} logical complement\<close>
 
 \<comment> \<open>function codes for binary operations\<close>
-datatype binop = Mul     \<comment> \<open>{\tt * }   multiplication\<close>
+datatype binop = Mul     \<comment> \<open>{\tt *}   multiplication\<close>
                | Div     \<comment> \<open>{\tt /}   division\<close>
                | Mod     \<comment> \<open>{\tt \%}   remainder\<close>
                | Plus    \<comment> \<open>{\tt +}   addition\<close>
--- a/src/HOL/Eisbach/match_method.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/HOL/Eisbach/match_method.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -95,7 +95,7 @@
         else
           let val b = #1 (the opt_dyn)
           in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end)) --
-  Scan.lift (for_fixes -- (\<^keyword>\<open>\<Rightarrow>\<close> |-- Parse.token Parse.text))
+  Scan.lift (for_fixes -- (\<^keyword>\<open>\<Rightarrow>\<close> |-- Parse.token Parse.embedded))
   >> (fn ((ctxt, ts), (fixes, body)) =>
     (case Token.get_value body of
       SOME (Token.Source src) =>
--- a/src/Pure/General/scan.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/General/scan.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -24,7 +24,6 @@
   abstract class Line_Context
   case object Finished extends Line_Context
   case class Quoted(quote: String) extends Line_Context
-  case object Verbatim extends Line_Context
   case class Cartouche(depth: Int) extends Line_Context
   case class Comment_Prefix(symbol: Symbol.Symbol) extends Line_Context
   case class Cartouche_Comment(depth: Int) extends Line_Context
@@ -136,41 +135,6 @@
       quote ~ quoted_body(quote) ^^ { case x ~ y => x + y }
 
 
-    /* verbatim text */
-
-    private def verbatim_body: Parser[String] =
-      rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString)
-
-    def verbatim: Parser[String] =
-    {
-      "{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z }
-    }.named("verbatim")
-
-    def verbatim_content(source: String): String =
-    {
-      require(parseAll(verbatim, source).successful, "no verbatim text")
-      source.substring(2, source.length - 2)
-    }
-
-    def verbatim_line(ctxt: Line_Context): Parser[(String, Line_Context)] =
-    {
-      ctxt match {
-        case Finished =>
-          "{*" ~ verbatim_body ~ opt_term("*}") ^^
-            { case x ~ y ~ Some(z) => (x + y + z, Finished)
-              case x ~ y ~ None => (x + y, Verbatim) }
-        case Verbatim =>
-          verbatim_body ~ opt_term("*}") ^^
-            { case x ~ Some(y) => (x + y, Finished)
-              case x ~ None => (x, Verbatim) }
-        case _ => failure("")
-      }
-    }.named("verbatim_line")
-
-    val recover_verbatim: Parser[String] =
-      "{*" ~ verbatim_body ^^ { case x ~ y => x + y }
-
-
     /* nested text cartouches */
 
     def cartouche_depth(depth: Int): Parser[(String, Int)] = new Parser[(String, Int)]
--- a/src/Pure/Isar/args.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Isar/args.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -27,9 +27,6 @@
   val name_position: (string * Position.T) parser
   val cartouche_inner_syntax: string parser
   val cartouche_input: Input.source parser
-  val text_token: Token.T parser
-  val text_input: Input.source parser
-  val text: string parser
   val binding: binding parser
   val alt_name: string parser
   val liberal_name: string parser
@@ -47,8 +44,7 @@
   val named_fact: (string -> string option * thm list) -> thm list parser
   val named_attribute: (string * Position.T -> morphism -> attribute) ->
     (morphism -> attribute) parser
-  val text_declaration: (Input.source -> declaration) -> declaration parser
-  val cartouche_declaration: (Input.source -> declaration) -> declaration parser
+  val embedded_declaration: (Input.source -> declaration) -> declaration parser
   val typ_abbrev: typ context_parser
   val typ: typ context_parser
   val term: term context_parser
@@ -110,10 +106,6 @@
 val cartouche_inner_syntax = cartouche >> Token.inner_syntax_of;
 val cartouche_input = cartouche >> Token.input_of;
 
-val text_token = Parse.token (Parse.embedded || Parse.verbatim);
-val text_input = text_token >> Token.input_of;
-val text = text_token >> Token.content_of;
-
 val binding = Parse.input name >> (Binding.make o Input.source_content);
 val alt_name = alt_string >> Token.content_of;
 val liberal_name = (symbolic >> Token.content_of) || name;
@@ -157,11 +149,9 @@
   name_token >>
     Token.evaluate Token.Attribute (fn tok => att (Token.content_of tok, Token.pos_of tok));
 
-fun text_declaration read =
-  internal_declaration || text_token >> Token.evaluate Token.Declaration (read o Token.input_of);
-
-fun cartouche_declaration read =
-  internal_declaration || cartouche >> Token.evaluate Token.Declaration (read o Token.input_of);
+fun embedded_declaration read =
+  internal_declaration ||
+  Parse.token Parse.embedded >> Token.evaluate Token.Declaration (read o Token.input_of);
 
 
 (* terms and types *)
--- a/src/Pure/Isar/method.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Isar/method.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -327,7 +327,7 @@
 
 val parse_tactic =
   Scan.state :|-- (fn context =>
-    Scan.lift (Args.text_declaration (fn source =>
+    Scan.lift (Args.embedded_declaration (fn source =>
       let
         val tac =
           context
@@ -749,7 +749,7 @@
   in Parse.read_embedded ctxt keywords (Scan.many Token.not_eof) #> read_closure ctxt end;
 
 val text_closure =
-  Args.context -- Scan.lift (Parse.token Parse.text) >> (fn (ctxt, tok) =>
+  Args.context -- Scan.lift (Parse.token Parse.embedded) >> (fn (ctxt, tok) =>
     (case Token.get_value tok of
       SOME (Token.Source src) => read ctxt src
     | _ =>
--- a/src/Pure/Isar/parse.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Isar/parse.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -30,7 +30,6 @@
   val string: string parser
   val string_position: (string * Position.T) parser
   val alt_string: string parser
-  val verbatim: string parser
   val cartouche: string parser
   val control: Antiquote.control parser
   val eof: string parser
@@ -70,7 +69,6 @@
   val embedded_inner_syntax: string parser
   val embedded_input: Input.source parser
   val embedded_position: (string * Position.T) parser
-  val text: string parser
   val path_input: Input.source parser
   val path: string parser
   val path_binding: (string * Position.T) parser
@@ -199,7 +197,6 @@
 val float_number = kind Token.Float;
 val string = kind Token.String;
 val alt_string = kind Token.Alt_String;
-val verbatim = kind Token.Verbatim;
 val cartouche = kind Token.Cartouche;
 val control = token (kind Token.control_kind) >> (the o Token.get_control);
 val eof = kind Token.EOF;
@@ -288,8 +285,6 @@
 val embedded_input = input embedded;
 val embedded_position = embedded_input >> Input.source_content;
 
-val text = group (fn () => "text") (embedded || verbatim);
-
 val path_input = group (fn () => "file name/path specification") embedded_input;
 val path = path_input >> Input.string_of;
 val path_binding = group (fn () => "path binding (strict file name)") (position embedded);
@@ -399,8 +394,8 @@
 
 (* embedded source text *)
 
-val ML_source = input (group (fn () => "ML source") text);
-val document_source = input (group (fn () => "document source") text);
+val ML_source = input (group (fn () => "ML source") embedded);
+val document_source = input (group (fn () => "document source") embedded);
 
 val document_marker =
   group (fn () => "document marker")
@@ -440,7 +435,7 @@
 
 val argument_kinds =
  [Token.Ident, Token.Long_Ident, Token.Sym_Ident, Token.Var, Token.Type_Ident, Token.Type_Var,
-  Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche, Token.Verbatim];
+  Token.Nat, Token.Float, Token.String, Token.Alt_String, Token.Cartouche];
 
 fun arguments is_symid =
   let
--- a/src/Pure/Isar/parse.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Isar/parse.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -65,9 +65,9 @@
     def nat: Parser[Int] = atom("natural number", _.is_nat) ^^ (s => Integer.parseInt(s))
     def name: Parser[String] = atom("name", _.is_name)
     def embedded: Parser[String] = atom("embedded content", _.is_embedded)
-    def text: Parser[String] = atom("text", _.is_text)
-    def ML_source: Parser[String] = atom("ML source", _.is_text)
-    def document_source: Parser[String] = atom("document source", _.is_text)
+    def text: Parser[String] = atom("text", _.is_embedded)
+    def ML_source: Parser[String] = atom("ML source", _.is_embedded)
+    def document_source: Parser[String] = atom("document source", _.is_embedded)
 
     def opt_keyword(s: String): Parser[Boolean] =
       ($$$("(") ~! $$$(s) ~ $$$(")")) ^^ { case _ => true } | success(false)
--- a/src/Pure/Isar/token.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Isar/token.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -11,7 +11,7 @@
     Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat |
     Float | Space |
     (*delimited content*)
-    String | Alt_String | Verbatim | Cartouche |
+    String | Alt_String | Cartouche |
     Control of Antiquote.control |
     Comment of Comment.kind option |
     (*special content*)
@@ -123,7 +123,7 @@
   Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat |
   Float | Space |
   (*delimited content*)
-  String | Alt_String | Verbatim | Cartouche |
+  String | Alt_String | Cartouche |
   Control of Antiquote.control |
   Comment of Comment.kind option |
   (*special content*)
@@ -151,7 +151,6 @@
   | Space => "white space"
   | String => "quoted string"
   | Alt_String => "back-quoted string"
-  | Verbatim => "verbatim text"
   | Cartouche => "text cartouche"
   | Control _ => "control cartouche"
   | Comment NONE => "informal comment"
@@ -166,7 +165,6 @@
 val delimited_kind =
   (fn String => true
     | Alt_String => true
-    | Verbatim => true
     | Cartouche => true
     | Control _ => true
     | Comment _ => true
@@ -323,7 +321,6 @@
   | Type_Var => (Markup.tvar, "")
   | String => (Markup.string, "")
   | Alt_String => (Markup.alt_string, "")
-  | Verbatim => (Markup.verbatim, "")
   | Cartouche => (Markup.cartouche, "")
   | Control _ => (Markup.cartouche, "")
   | Comment _ => (Markup.comment, "")
@@ -374,7 +371,6 @@
   (case kind of
     String => Symbol_Pos.quote_string_qq x
   | Alt_String => Symbol_Pos.quote_string_bq x
-  | Verbatim => enclose "{*" "*}" x
   | Cartouche => cartouche x
   | Control control => Symbol_Pos.content (Antiquote.control_symbols control)
   | Comment NONE => enclose "(*" "*)" x
@@ -624,22 +620,6 @@
   | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s;
 
 
-(* scan verbatim text *)
-
-val scan_verb =
-  $$$ "*" --| Scan.ahead (~$$ "}") ||
-  Scan.one (fn (s, _) => s <> "*" andalso Symbol.not_eof s) >> single;
-
-val scan_verbatim =
-  Scan.ahead ($$ "{" -- $$ "*") |--
-    !!! "unclosed verbatim text"
-      ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") --
-        (Scan.repeats scan_verb -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos)));
-
-val recover_verbatim =
-  $$$ "{" @@@ $$$ "*" @@@ Scan.repeats scan_verb;
-
-
 (* scan cartouche *)
 
 val scan_cartouche =
@@ -678,7 +658,6 @@
 fun scan_token keywords = !!! "bad input"
   (Symbol_Pos.scan_string_qq err_prefix >> token_range String ||
     Symbol_Pos.scan_string_bq err_prefix >> token_range Alt_String ||
-    scan_verbatim >> token_range Verbatim ||
     scan_comment >> token_range (Comment NONE) ||
     Comment.scan_outer >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
     scan_cartouche >> token_range Cartouche ||
@@ -701,7 +680,6 @@
 fun recover msg =
   (Symbol_Pos.recover_string_qq ||
     Symbol_Pos.recover_string_bq ||
-    recover_verbatim ||
     Symbol_Pos.recover_cartouche ||
     Symbol_Pos.recover_comment ||
     Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single)
--- a/src/Pure/Isar/token.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Isar/token.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -32,7 +32,6 @@
     /*delimited content*/
     val STRING = Value("string")
     val ALT_STRING = Value("back-quoted string")
-    val VERBATIM = Value("verbatim text")
     val CARTOUCHE = Value("text cartouche")
     val CONTROL = Value("control cartouche")
     val INFORMAL_COMMENT = Value("informal comment")
@@ -53,13 +52,12 @@
     {
       val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
       val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
-      val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
       val cmt = comment ^^ (x => Token(Token.Kind.INFORMAL_COMMENT, x))
       val formal_cmt = comment_cartouche ^^ (x => Token(Token.Kind.FORMAL_COMMENT, x))
       val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
       val ctrl = control_cartouche ^^ (x => Token(Token.Kind.CONTROL, x))
 
-      string | (alt_string | (verb | (cmt | (formal_cmt | (cart | ctrl)))))
+      string | (alt_string | (cmt | (formal_cmt | (cart | ctrl))))
     }
 
     private def other_token(keywords: Keyword.Keywords): Parser[Token] =
@@ -99,8 +97,7 @@
       val recover_delimited =
         (recover_quoted("\"") |
           (recover_quoted("`") |
-            (recover_verbatim |
-              (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
+            (recover_cartouche | recover_comment))) ^^ (x => Token(Token.Kind.ERROR, x))
 
       val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
 
@@ -119,14 +116,13 @@
         quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
       val alt_string =
         quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
-      val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
       val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
       val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.INFORMAL_COMMENT, x), c) }
       val formal_cmt =
         comment_cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.FORMAL_COMMENT, x), c) }
       val other = other_token(keywords) ^^ { case x => (x, Scan.Finished) }
 
-      string | (alt_string | (verb | (cart | (cmt | (formal_cmt | other)))))
+      string | (alt_string | (cart | (cmt | (formal_cmt | other))))
     }
   }
 
@@ -286,7 +282,6 @@
     kind == Token.Kind.VAR ||
     kind == Token.Kind.TYPE_IDENT ||
     kind == Token.Kind.TYPE_VAR
-  def is_text: Boolean = is_embedded || kind == Token.Kind.VERBATIM
   def is_space: Boolean = kind == Token.Kind.SPACE
   def is_informal_comment: Boolean = kind == Token.Kind.INFORMAL_COMMENT
   def is_formal_comment: Boolean = kind == Token.Kind.FORMAL_COMMENT
@@ -302,7 +297,6 @@
   def is_unfinished: Boolean = is_error &&
    (source.startsWith("\"") ||
     source.startsWith("`") ||
-    source.startsWith("{*") ||
     source.startsWith("(*") ||
     source.startsWith(Symbol.open) ||
     source.startsWith(Symbol.open_decoded))
@@ -319,7 +313,6 @@
   def content: String =
     if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source)
     else if (kind == Token.Kind.ALT_STRING) Scan.Parsers.quoted_content("`", source)
-    else if (kind == Token.Kind.VERBATIM) Scan.Parsers.verbatim_content(source)
     else if (kind == Token.Kind.CARTOUCHE) Scan.Parsers.cartouche_content(source)
     else if (kind == Token.Kind.INFORMAL_COMMENT) Scan.Parsers.comment_content(source)
     else if (kind == Token.Kind.FORMAL_COMMENT) Comment.content(source)
--- a/src/Pure/PIDE/command.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/PIDE/command.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -147,14 +147,6 @@
     val token_reports = map (reports_of_token keywords) span;
     val _ = Position.reports_text (maps #2 token_reports @ maps command_reports span);
 
-    val verbatim =
-      span |> map_filter (fn tok =>
-        if Token.kind_of tok = Token.Verbatim then SOME (Token.pos_of tok) else NONE);
-    val _ =
-      if null verbatim then ()
-      else legacy_feature ("Old-style {* verbatim *} token -- use \<open>cartouche\<close> instead" ^
-        Position.here_list verbatim);
-
     val core_range = Token.core_range_of span;
     val tr =
       if exists #1 token_reports
--- a/src/Pure/PIDE/markup.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/PIDE/markup.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -433,7 +433,6 @@
   val OPERATOR = "operator"
   val STRING = "string"
   val ALT_STRING = "alt_string"
-  val VERBATIM = "verbatim"
   val CARTOUCHE = "cartouche"
   val COMMENT = "comment"
 
--- a/src/Pure/PIDE/rendering.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/PIDE/rendering.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -117,7 +117,6 @@
     Markup.OPERATOR -> Color.operator,
     Markup.STRING -> Color.main,
     Markup.ALT_STRING -> Color.main,
-    Markup.VERBATIM -> Color.main,
     Markup.CARTOUCHE -> Color.main,
     Markup.LITERAL -> Color.keyword1,
     Markup.DELIMITER -> Color.main,
@@ -151,7 +150,6 @@
     Map(
       Markup.STRING -> Color.quoted,
       Markup.ALT_STRING -> Color.quoted,
-      Markup.VERBATIM -> Color.quoted,
       Markup.CARTOUCHE -> Color.quoted,
       Markup.ANTIQUOTED -> Color.antiquoted)
 
@@ -209,7 +207,7 @@
     Markup.Elements(Markup.COMPLETION, Markup.NO_COMPLETION)
 
   val language_context_elements =
-    Markup.Elements(Markup.STRING, Markup.ALT_STRING, Markup.VERBATIM,
+    Markup.Elements(Markup.STRING, Markup.ALT_STRING,
       Markup.CARTOUCHE, Markup.COMMENT, Markup.LANGUAGE,
       Markup.ML_STRING, Markup.ML_COMMENT)
 
--- a/src/Pure/Pure.thy	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Pure.thy	Mon Dec 06 15:34:54 2021 +0100
@@ -301,13 +301,13 @@
 val _ =
   Outer_Syntax.local_theory \<^command_keyword>\<open>attribute_setup\<close> "define attribute in ML"
     (Parse.name_position --
-        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.text "")
+        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.embedded "")
       >> (fn (name, (txt, cmt)) => Attrib.attribute_setup name txt cmt));
 
 val _ =
   Outer_Syntax.local_theory \<^command_keyword>\<open>method_setup\<close> "define proof method in ML"
     (Parse.name_position --
-        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.text "")
+        Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.ML_source -- Scan.optional Parse.embedded "")
       >> (fn (name, (txt, cmt)) => Method.method_setup name txt cmt));
 
 val _ =
@@ -572,7 +572,7 @@
 val _ =
   Outer_Syntax.local_theory \<^command_keyword>\<open>named_theorems\<close>
     "declare named collection of theorems"
-    (Parse.and_list1 (Parse.binding -- Scan.optional Parse.text "") >>
+    (Parse.and_list1 (Parse.binding -- Scan.optional Parse.embedded "") >>
       fold (fn (b, descr) => snd o Named_Theorems.declare b descr));
 
 in end\<close>
--- a/src/Pure/Thy/bibtex.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Thy/bibtex.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -42,8 +42,7 @@
   Theory.setup
    (Document_Antiquotation.setup_option \<^binding>\<open>cite_macro\<close> (Config.put cite_macro) #>
     Document_Output.antiquotation_raw \<^binding>\<open>cite\<close>
-      (Scan.lift
-        (Scan.option (Parse.verbatim || Parse.cartouche) -- Parse.and_list1 Args.name_position))
+      (Scan.lift (Scan.option Parse.cartouche -- Parse.and_list1 Args.name_position))
       (fn ctxt => fn (opt, citations) =>
         let
           val _ =
--- a/src/Pure/Thy/document_antiquotations.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Thy/document_antiquotations.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -195,7 +195,7 @@
   Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt;
 
 fun text_antiquotation name =
-  Document_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input)
+  Document_Output.antiquotation_raw_embedded name (Scan.lift Parse.embedded_input)
     (fn ctxt => fn text =>
       let
         val _ = report_text ctxt text;
@@ -206,7 +206,7 @@
       end);
 
 val theory_text_antiquotation =
-  Document_Output.antiquotation_raw_embedded \<^binding>\<open>theory_text\<close> (Scan.lift Args.text_input)
+  Document_Output.antiquotation_raw_embedded \<^binding>\<open>theory_text\<close> (Scan.lift Parse.embedded_input)
     (fn ctxt => fn text =>
       let
         val keywords = Thy_Header.get_keywords' ctxt;
@@ -273,7 +273,7 @@
 (* verbatim text *)
 
 val _ = Theory.setup
-  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>verbatim\<close> (Scan.lift Args.text_input)
+  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>verbatim\<close> (Scan.lift Parse.embedded_input)
     (fn ctxt => fn text =>
       let
         val pos = Input.pos_of text;
@@ -327,17 +327,17 @@
       ML_Lex.read (ML_Syntax.print_string (ML_Lex.content_of tok))
   | test_functor _ = raise Fail "Bad ML functor specification";
 
-val parse_ml0 = Args.text_input >> (fn source => ("", (source, Input.empty)));
+val parse_ml0 = Parse.embedded_input >> (fn source => ("", (source, Input.empty)));
 
 val parse_ml =
-  Args.text_input -- Scan.optional (Args.colon |-- Args.text_input) Input.empty >> pair "";
+  Parse.embedded_input -- Scan.optional (Args.colon |-- Parse.embedded_input) Input.empty >> pair "";
 
 val parse_exn =
-  Args.text_input -- Scan.optional (Args.$$$ "of" |-- Args.text_input) Input.empty >> pair "";
+  Parse.embedded_input -- Scan.optional (Args.$$$ "of" |-- Parse.embedded_input) Input.empty >> pair "";
 
 val parse_type =
   (Parse.type_args >> (fn [] => "" | [a] => a ^ " " | bs => enclose "(" ") " (commas bs))) --
-    (Args.text_input -- Scan.optional (Args.$$$ "=" |-- Args.text_input) Input.empty);
+    (Parse.embedded_input -- Scan.optional (Args.$$$ "=" |-- Parse.embedded_input) Input.empty);
 
 fun eval ctxt pos ml =
   ML_Context.eval_in (SOME ctxt) ML_Compiler.flags pos ml
--- a/src/Pure/Thy/document_output.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Thy/document_output.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -172,7 +172,6 @@
         else output false "" ""
     | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
     | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
-    | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
     | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
     | Token.Control control => output_body ctxt false "" "" (Antiquote.control_symbols control)
     | _ => output false "" "")
--- a/src/Pure/Thy/sessions.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Thy/sessions.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -52,7 +52,7 @@
   Scan.optional (Parse.$$$ "in" |-- Parse.!!! Parse.path_input) (Input.string ".") --
   (Parse.$$$ "=" |--
     Parse.!!! (Scan.option (Parse.session_name --| Parse.!!! (Parse.$$$ "+")) --
-      Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.text)) Input.empty --
+      Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.embedded)) Input.empty --
       Scan.optional (Parse.$$$ "options" |-- Parse.!!! Parse.options) [] --
       Scan.optional (Parse.$$$ "sessions" |--
         Parse.!!! (Scan.repeat1 Parse.session_name)) [] --
--- a/src/Pure/Thy/thy_header.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Thy/thy_header.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -147,7 +147,7 @@
 
 val abbrevs =
   Parse.and_list1
-    (Scan.repeat1 Parse.text -- (Parse.$$$ "=" |-- Parse.!!! (Scan.repeat1 Parse.text))
+    (Scan.repeat1 Parse.embedded -- (Parse.$$$ "=" |-- Parse.!!! (Scan.repeat1 Parse.embedded))
       >> uncurry (map_product pair)) >> flat;
 
 val keyword_decls = Parse.and_list1 keyword_decl >> flat;
--- a/src/Pure/Tools/rail.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Tools/rail.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -387,7 +387,7 @@
   in Latex.environment "railoutput" (maps output_rule rules) end;
 
 val _ = Theory.setup
-  (Document_Output.antiquotation_raw_embedded \<^binding>\<open>rail\<close> (Scan.lift Args.text_input)
+  (Document_Output.antiquotation_raw_embedded \<^binding>\<open>rail\<close> (Scan.lift Parse.embedded_input)
     (fn ctxt => output_rules ctxt o read ctxt));
 
 end;
--- a/src/Pure/Tools/update_cartouches.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Tools/update_cartouches.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -14,8 +14,6 @@
 {
   /* update cartouches */
 
-  private val Verbatim_Body = """(?s)[ \t]*(.*?)[ \t]*""".r
-
   val Text_Antiq: Regex = """(?s)@\{\s*text\b\s*(.+)\}""".r
 
   def update_text(content: String): String =
@@ -46,12 +44,6 @@
       (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator)
         yield {
           if (tok.kind == Token.Kind.ALT_STRING) Symbol.cartouche(tok.content)
-          else if (tok.kind == Token.Kind.VERBATIM) {
-            tok.content match {
-              case Verbatim_Body(s) => Symbol.cartouche(s)
-              case s => tok.source
-            }
-          }
           else tok.source
         }
       ).mkString
@@ -96,7 +88,7 @@
     -t           replace @{text} antiquotations within text tokens
 
   Recursively find .thy or ROOT files and update theory syntax to use
-  cartouches instead of old-style {* verbatim *} or `alt_string` tokens.
+  cartouches instead of `alt_string` tokens.
 
   Old versions of files are preserved by appending "~~".
 """,
--- a/src/Pure/Tools/update_comments.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Pure/Tools/update_comments.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -23,7 +23,7 @@
         case tok :: rest
         if tok.source == "--" || tok.source == Symbol.comment =>
           rest.dropWhile(_.is_space) match {
-            case tok1 :: rest1 if tok1.is_text =>
+            case tok1 :: rest1 if tok1.is_embedded =>
               update(rest1, make_comment(tok1) :: result)
             case _ => update(rest, tok.source :: result)
           }
--- a/src/Tools/Code/code_target.ML	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Tools/Code/code_target.ML	Mon Dec 06 15:34:54 2021 +0100
@@ -746,7 +746,7 @@
   Outer_Syntax.command \<^command_keyword>\<open>code_printing\<close> "declare dedicated printing for code symbols"
     (parse_symbol_pragmas (Code_Printer.parse_const_syntax) (Code_Printer.parse_tyco_syntax)
       Parse.string (Parse.minus >> K ()) (Parse.minus >> K ())
-      (Parse.text -- Scan.optional (\<^keyword>\<open>for\<close> |-- parse_simple_symbols) [])
+      (Parse.embedded -- Scan.optional (\<^keyword>\<open>for\<close> |-- parse_simple_symbols) [])
       >> (Toplevel.theory o fold set_printings_cmd));
 
 val _ =
--- a/src/Tools/jEdit/src/jedit_rendering.scala	Mon Dec 06 15:10:15 2021 +0100
+++ b/src/Tools/jEdit/src/jedit_rendering.scala	Mon Dec 06 15:34:54 2021 +0100
@@ -70,7 +70,6 @@
       Token.Kind.SPACE -> NULL,
       Token.Kind.STRING -> LITERAL1,
       Token.Kind.ALT_STRING -> LITERAL2,
-      Token.Kind.VERBATIM -> COMMENT3,
       Token.Kind.CARTOUCHE -> COMMENT3,
       Token.Kind.CONTROL -> COMMENT3,
       Token.Kind.INFORMAL_COMMENT -> COMMENT1,