clarified modules;
authorwenzelm
Fri, 21 May 2021 12:29:29 +0200
changeset 73761 ef1a18e20ace
parent 73760 f4be1b0d7a51
child 73762 14841c6e4d5f
clarified modules;
src/Doc/Main/Main_Doc.thy
src/Doc/Prog_Prove/LaTeXsugar.thy
src/Doc/antiquote_setup.ML
src/Doc/more_antiquote.ML
src/HOL/Library/LaTeXsugar.thy
src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML
src/HOL/Tools/value_command.ML
src/Pure/ML/ml_file.ML
src/Pure/PIDE/command.ML
src/Pure/PIDE/resources.ML
src/Pure/ROOT.ML
src/Pure/System/isabelle_tool.ML
src/Pure/System/scala_compiler.ML
src/Pure/Thy/bibtex.ML
src/Pure/Thy/document_antiquotations.ML
src/Pure/Thy/document_output.ML
src/Pure/Thy/thy_info.ML
src/Pure/Thy/thy_output.ML
src/Pure/Tools/doc.ML
src/Pure/Tools/jedit.ML
src/Pure/Tools/rail.ML
src/Pure/pure_syn.ML
src/Tools/Code/code_target.ML
--- a/src/Doc/Main/Main_Doc.thy	Fri May 21 11:19:53 2021 +0200
+++ b/src/Doc/Main/Main_Doc.thy	Fri May 21 12:29:29 2021 +0200
@@ -4,14 +4,14 @@
 begin
 
 setup \<open>
-  Thy_Output.antiquotation_pretty_source \<^binding>\<open>term_type_only\<close> (Args.term -- Args.typ_abbrev)
+  Document_Output.antiquotation_pretty_source \<^binding>\<open>term_type_only\<close> (Args.term -- Args.typ_abbrev)
     (fn ctxt => fn (t, T) =>
       (if fastype_of t = Sign.certify_typ (Proof_Context.theory_of ctxt) T then ()
        else error "term_type_only: type mismatch";
        Syntax.pretty_typ ctxt T))
 \<close>
 setup \<open>
-  Thy_Output.antiquotation_pretty_source \<^binding>\<open>expanded_typ\<close> Args.typ
+  Document_Output.antiquotation_pretty_source \<^binding>\<open>expanded_typ\<close> Args.typ
     Syntax.pretty_typ
 \<close>
 (*>*)
--- a/src/Doc/Prog_Prove/LaTeXsugar.thy	Fri May 21 11:19:53 2021 +0200
+++ b/src/Doc/Prog_Prove/LaTeXsugar.thy	Fri May 21 12:29:29 2021 +0200
@@ -44,10 +44,11 @@
   "_asm" :: "prop \<Rightarrow> asms" ("_")
 
 setup \<open>
-  Thy_Output.antiquotation_pretty_source \<^binding>\<open>const_typ\<close> (Scan.lift Args.embedded_inner_syntax)
+  Document_Output.antiquotation_pretty_source \<^binding>\<open>const_typ\<close>
+    (Scan.lift Args.embedded_inner_syntax)
     (fn ctxt => fn c =>
       let val tc = Proof_Context.read_const {proper = false, strict = false} ctxt c in
-        Pretty.block [Thy_Output.pretty_term ctxt tc, Pretty.str " ::",
+        Pretty.block [Document_Output.pretty_term ctxt tc, Pretty.str " ::",
           Pretty.brk 1, Syntax.pretty_typ ctxt (fastype_of tc)]
       end)
 \<close>
--- a/src/Doc/antiquote_setup.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Doc/antiquote_setup.ML	Fri May 21 12:29:29 2021 +0200
@@ -73,7 +73,7 @@
 fun prep_ml source =
   (#1 (Input.source_content source), ML_Lex.read_source source);
 
-fun index_ml name kind ml = Thy_Output.antiquotation_raw name
+fun index_ml name kind ml = Document_Output.antiquotation_raw name
   (Scan.lift (Args.text_input -- Scan.option (Args.colon |-- Args.text_input)))
   (fn ctxt => fn (source1, opt_source2) =>
     let
@@ -100,7 +100,7 @@
     in
       Latex.block
        [Latex.string ("\\indexdef{}{" ^ kind' ^ "}{" ^ clean_string (ml_name txt1) ^ "}"),
-        Thy_Output.verbatim ctxt txt']
+        Document_Output.verbatim ctxt txt']
     end);
 
 in
@@ -120,17 +120,17 @@
 (* named theorems *)
 
 val _ =
-  Theory.setup (Thy_Output.antiquotation_raw \<^binding>\<open>named_thms\<close>
+  Theory.setup (Document_Output.antiquotation_raw \<^binding>\<open>named_thms\<close>
     (Scan.repeat (Attrib.thm -- Scan.lift (Args.parens Args.name)))
     (fn ctxt =>
       map (fn (thm, name) =>
         Output.output
           (Document_Antiquotation.format ctxt
-            (Document_Antiquotation.delimit ctxt (Thy_Output.pretty_thm ctxt thm))) ^
+            (Document_Antiquotation.delimit ctxt (Document_Output.pretty_thm ctxt thm))) ^
         enclose "\\rulename{" "}" (Output.output name))
       #> space_implode "\\par\\smallskip%\n"
       #> Latex.string #> single
-      #> Thy_Output.isabelle ctxt));
+      #> Document_Output.isabelle ctxt));
 
 
 (* Isabelle/Isar entities (with index) *)
@@ -150,7 +150,7 @@
 val arg = enclose "{" "}" o clean_string;
 
 fun entity check markup binding index =
-  Thy_Output.antiquotation_raw
+  Document_Output.antiquotation_raw
     (binding |> Binding.map_name (fn name => name ^
       (case index of NONE => "" | SOME true => "_def" | SOME false => "_ref")))
     (Scan.lift (Scan.optional (Args.parens Args.name) "" -- Args.name_position))
@@ -211,7 +211,7 @@
 (* show symbols *)
 
 val _ =
-  Theory.setup (Thy_Output.antiquotation_raw \<^binding>\<open>show_symbols\<close> (Scan.succeed ())
+  Theory.setup (Document_Output.antiquotation_raw \<^binding>\<open>show_symbols\<close> (Scan.succeed ())
     (fn _ => fn _ =>
       let
         val symbol_name =
--- a/src/Doc/more_antiquote.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Doc/more_antiquote.ML	Fri May 21 12:29:29 2021 +0200
@@ -10,7 +10,7 @@
 (* class specifications *)
 
 val _ =
-  Theory.setup (Thy_Output.antiquotation_pretty \<^binding>\<open>class_spec\<close> (Scan.lift Args.name)
+  Theory.setup (Document_Output.antiquotation_pretty \<^binding>\<open>class_spec\<close> (Scan.lift Args.name)
     (fn ctxt => fn s =>
       let
         val thy = Proof_Context.theory_of ctxt;
@@ -21,7 +21,7 @@
 (* code theorem antiquotation *)
 
 val _ =
-  Theory.setup (Thy_Output.antiquotation_pretty \<^binding>\<open>code_thms\<close> Args.term
+  Theory.setup (Document_Output.antiquotation_pretty \<^binding>\<open>code_thms\<close> Args.term
     (fn ctxt => fn raw_const =>
       let
         val thy = Proof_Context.theory_of ctxt;
@@ -33,6 +33,6 @@
           |> these
           |> map_filter (fn (_, (some_thm, proper)) => if proper then some_thm else NONE)
           |> map (HOLogic.mk_obj_eq o Variable.import_vars ctxt o Axclass.overload ctxt);
-      in Pretty.chunks (map (Thy_Output.pretty_thm ctxt) thms) end));
+      in Pretty.chunks (map (Document_Output.pretty_thm ctxt) thms) end));
 
 end;
--- a/src/HOL/Library/LaTeXsugar.thy	Fri May 21 11:19:53 2021 +0200
+++ b/src/HOL/Library/LaTeXsugar.thy	Fri May 21 12:29:29 2021 +0200
@@ -97,11 +97,11 @@
   "_asm" :: "prop \<Rightarrow> asms" ("_")
 
 setup \<open>
-  Thy_Output.antiquotation_pretty_source_embedded \<^binding>\<open>const_typ\<close>
+  Document_Output.antiquotation_pretty_source_embedded \<^binding>\<open>const_typ\<close>
     (Scan.lift Args.embedded_inner_syntax)
     (fn ctxt => fn c =>
       let val tc = Proof_Context.read_const {proper = false, strict = false} ctxt c in
-        Pretty.block [Thy_Output.pretty_term ctxt tc, Pretty.str " ::",
+        Pretty.block [Document_Output.pretty_term ctxt tc, Pretty.str " ::",
           Pretty.brk 1, Syntax.pretty_typ ctxt (fastype_of tc)]
       end)
 \<close>
--- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML	Fri May 21 12:29:29 2021 +0200
@@ -1200,7 +1200,7 @@
 local
 
 fun antiquote_setup binding co =
-  Thy_Output.antiquotation_pretty_source_embedded binding
+  Document_Output.antiquotation_pretty_source_embedded binding
     ((Scan.ahead (Scan.lift Parse.not_eof) >> Token.pos_of) --
       Args.type_name {proper = true, strict = true})
     (fn ctxt => fn (pos, type_name) =>
--- a/src/HOL/Tools/value_command.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/HOL/Tools/value_command.ML	Fri May 21 12:29:29 2021 +0200
@@ -73,10 +73,10 @@
       >> (fn ((name, modes), t) => Toplevel.keep (value_cmd name modes t)));
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_pretty_source_embedded \<^binding>\<open>value\<close>
+  (Document_Output.antiquotation_pretty_source_embedded \<^binding>\<open>value\<close>
     (Scan.lift opt_evaluator -- Term_Style.parse -- Args.term)
     (fn ctxt => fn ((name, style), t) =>
-      Thy_Output.pretty_term ctxt (style (value_select name ctxt t)))
+      Document_Output.pretty_term ctxt (style (value_select name ctxt t)))
   #> add_evaluator (\<^binding>\<open>simp\<close>, Code_Simp.dynamic_value) #> snd
   #> add_evaluator (\<^binding>\<open>nbe\<close>, Nbe.dynamic_value) #> snd
   #> add_evaluator (\<^binding>\<open>code\<close>, Code_Evaluation.dynamic_value_strict) #> snd);
--- a/src/Pure/ML/ml_file.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/ML/ml_file.ML	Fri May 21 12:29:29 2021 +0200
@@ -21,7 +21,7 @@
     val provide = Resources.provide_file file;
     val source = Token.file_source file;
 
-    val _ = Thy_Output.check_comments (Context.proof_of gthy) (Input.source_explode source);
+    val _ = Document_Output.check_comments (Context.proof_of gthy) (Input.source_explode source);
 
     val flags: ML_Compiler.flags =
       {environment = environment, redirect = true, verbose = true,
--- a/src/Pure/PIDE/command.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/PIDE/command.ML	Fri May 21 12:29:29 2021 +0200
@@ -226,7 +226,7 @@
   else Toplevel.command_errors int tr st;
 
 fun check_token_comments ctxt tok =
-  (Thy_Output.check_comments ctxt (Input.source_explode (Token.input_of tok)); [])
+  (Document_Output.check_comments ctxt (Input.source_explode (Token.input_of tok)); [])
     handle exn =>
       if Exn.is_interrupt exn then Exn.reraise exn
       else Runtime.exn_messages exn;
--- a/src/Pure/PIDE/resources.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/PIDE/resources.ML	Fri May 21 12:29:29 2021 +0200
@@ -198,7 +198,7 @@
   in (name, multi) end;
 
 val _ = Theory.setup
- (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>scala_function\<close>
+ (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>scala_function\<close>
     (Scan.lift Parse.embedded_position) (#1 oo check_scala_function) #>
   ML_Antiquotation.inline_embedded \<^binding>\<open>scala_function\<close>
     (Args.context -- Scan.lift Parse.embedded_position
@@ -424,11 +424,11 @@
 in
 
 val _ = Theory.setup
- (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>session\<close>
+ (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>session\<close>
     (Scan.lift Parse.embedded_position) check_session #>
-  Thy_Output.antiquotation_raw_embedded \<^binding>\<open>path\<close> (document_antiq check_path) (K I) #>
-  Thy_Output.antiquotation_raw_embedded \<^binding>\<open>file\<close> (document_antiq check_file) (K I) #>
-  Thy_Output.antiquotation_raw_embedded \<^binding>\<open>dir\<close> (document_antiq check_dir) (K I) #>
+  Document_Output.antiquotation_raw_embedded \<^binding>\<open>path\<close> (document_antiq check_path) (K I) #>
+  Document_Output.antiquotation_raw_embedded \<^binding>\<open>file\<close> (document_antiq check_file) (K I) #>
+  Document_Output.antiquotation_raw_embedded \<^binding>\<open>dir\<close> (document_antiq check_dir) (K I) #>
   ML_Antiquotation.value_embedded \<^binding>\<open>path\<close> (ML_antiq check_path) #>
   ML_Antiquotation.value_embedded \<^binding>\<open>file\<close> (ML_antiq check_file) #>
   ML_Antiquotation.value_embedded \<^binding>\<open>dir\<close> (ML_antiq check_dir) #>
--- a/src/Pure/ROOT.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/ROOT.ML	Fri May 21 12:29:29 2021 +0200
@@ -304,7 +304,7 @@
 ML_file "ML/ml_antiquotations2.ML";
 ML_file "ML/ml_pid.ML";
 ML_file "Thy/document_antiquotation.ML";
-ML_file "Thy/thy_output.ML";
+ML_file "Thy/document_output.ML";
 ML_file "Thy/document_antiquotations.ML";
 ML_file "General/graph_display.ML";
 ML_file "pure_syn.ML";
--- a/src/Pure/System/isabelle_tool.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/System/isabelle_tool.ML	Fri May 21 12:29:29 2021 +0200
@@ -38,7 +38,7 @@
 
 val _ =
   Theory.setup
-   (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>tool\<close>
+   (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>tool\<close>
       (Scan.lift Parse.embedded_position) check);
 
 end;
--- a/src/Pure/System/scala_compiler.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/System/scala_compiler.ML	Fri May 21 12:29:29 2021 +0200
@@ -65,21 +65,21 @@
 
 val _ =
   Theory.setup
-    (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>scala\<close>
+    (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>scala\<close>
       (Scan.lift Args.embedded_position)
       (fn _ => fn (s, pos) => (static_check (s, pos); s)) #>
 
-    Thy_Output.antiquotation_raw_embedded \<^binding>\<open>scala_type\<close>
+    Document_Output.antiquotation_raw_embedded \<^binding>\<open>scala_type\<close>
       (Scan.lift (Args.embedded_position -- (types >> print_types)))
       (fn _ => fn ((t, pos), type_args) =>
         (static_check ("type _Test_" ^ type_args ^ " = " ^ t ^ type_args, pos);
           scala_name (t ^ type_args))) #>
 
-    Thy_Output.antiquotation_raw_embedded \<^binding>\<open>scala_object\<close>
+    Document_Output.antiquotation_raw_embedded \<^binding>\<open>scala_object\<close>
       (Scan.lift Args.embedded_position)
       (fn _ => fn (x, pos) => (static_check ("val _test_ = " ^ x, pos); scala_name x)) #>
 
-    Thy_Output.antiquotation_raw_embedded \<^binding>\<open>scala_method\<close>
+    Document_Output.antiquotation_raw_embedded \<^binding>\<open>scala_method\<close>
       (Scan.lift (class -- Args.embedded_position -- types -- args))
       (fn _ => fn (((class_context, (method, pos)), method_types), method_args) =>
         let
--- a/src/Pure/Thy/bibtex.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/Thy/bibtex.ML	Fri May 21 12:29:29 2021 +0200
@@ -41,7 +41,7 @@
 val _ =
   Theory.setup
    (Document_Antiquotation.setup_option \<^binding>\<open>cite_macro\<close> (Config.put cite_macro) #>
-    Thy_Output.antiquotation_raw \<^binding>\<open>cite\<close>
+    Document_Output.antiquotation_raw \<^binding>\<open>cite\<close>
       (Scan.lift
         (Scan.option (Parse.verbatim || Parse.cartouche) -- Parse.and_list1 Args.name_position))
       (fn ctxt => fn (opt, citations) =>
--- a/src/Pure/Thy/document_antiquotations.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/Thy/document_antiquotations.ML	Fri May 21 12:29:29 2021 +0200
@@ -14,14 +14,14 @@
 type style = term -> term;
 
 fun pretty_term_style ctxt (style: style, t) =
-  Thy_Output.pretty_term ctxt (style t);
+  Document_Output.pretty_term ctxt (style t);
 
 fun pretty_thms_style ctxt (style: style, ths) =
-  map (fn th => Thy_Output.pretty_term ctxt (style (Thm.full_prop_of th))) ths;
+  map (fn th => Document_Output.pretty_term ctxt (style (Thm.full_prop_of th))) ths;
 
 fun pretty_term_typ ctxt (style: style, t) =
   let val t' = style t
-  in Thy_Output.pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end;
+  in Document_Output.pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end;
 
 fun pretty_term_typeof ctxt (style: style, t) =
   Syntax.pretty_typ ctxt (Term.fastype_of (style t));
@@ -31,7 +31,7 @@
     val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c)
       handle TYPE (msg, _, _) => error msg;
     val (t', _) = yield_singleton (Variable.import_terms true) t ctxt;
-  in Thy_Output.pretty_term ctxt t' end;
+  in Document_Output.pretty_term ctxt t' end;
 
 fun pretty_abbrev ctxt s =
   let
@@ -62,12 +62,12 @@
 fun pretty_theory ctxt (name, pos) =
   (Theory.check {long = true} ctxt (name, pos); Pretty.str name);
 
-val basic_entity = Thy_Output.antiquotation_pretty_source_embedded;
+val basic_entity = Document_Output.antiquotation_pretty_source_embedded;
 
 fun basic_entities name scan pretty =
   Document_Antiquotation.setup name scan
     (fn {context = ctxt, source = src, argument = xs} =>
-      Thy_Output.pretty_items_source ctxt {embedded = false} src (map (pretty ctxt) xs));
+      Document_Output.pretty_items_source ctxt {embedded = false} src (map (pretty ctxt) xs));
 
 val _ = Theory.setup
  (basic_entity \<^binding>\<open>prop\<close> (Term_Style.parse -- Args.prop) pretty_term_style #>
@@ -85,7 +85,7 @@
   basic_entities \<^binding>\<open>full_prf\<close> Attrib.thms (pretty_prf true) #>
   Document_Antiquotation.setup \<^binding>\<open>thm\<close> (Term_Style.parse -- Attrib.thms)
     (fn {context = ctxt, source = src, argument = arg} =>
-      Thy_Output.pretty_items_source ctxt {embedded = false} src (pretty_thms_style ctxt arg)));
+      Document_Output.pretty_items_source ctxt {embedded = false} src (pretty_thms_style ctxt arg)));
 
 in end;
 
@@ -113,13 +113,13 @@
 
 val _ =
   Theory.setup
-   (Thy_Output.antiquotation_raw \<^binding>\<open>noindent\<close> (Scan.succeed ())
+   (Document_Output.antiquotation_raw \<^binding>\<open>noindent\<close> (Scan.succeed ())
       (fn _ => fn () => Latex.string "\\noindent") #>
-    Thy_Output.antiquotation_raw \<^binding>\<open>smallskip\<close> (Scan.succeed ())
+    Document_Output.antiquotation_raw \<^binding>\<open>smallskip\<close> (Scan.succeed ())
       (fn _ => fn () => Latex.string "\\smallskip") #>
-    Thy_Output.antiquotation_raw \<^binding>\<open>medskip\<close> (Scan.succeed ())
+    Document_Output.antiquotation_raw \<^binding>\<open>medskip\<close> (Scan.succeed ())
       (fn _ => fn () => Latex.string "\\medskip") #>
-    Thy_Output.antiquotation_raw \<^binding>\<open>bigskip\<close> (Scan.succeed ())
+    Document_Output.antiquotation_raw \<^binding>\<open>bigskip\<close> (Scan.succeed ())
       (fn _ => fn () => Latex.string "\\bigskip"));
 
 
@@ -128,10 +128,10 @@
 local
 
 fun nested_antiquotation name s1 s2 =
-  Thy_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input)
+  Document_Output.antiquotation_raw_embedded name (Scan.lift Args.cartouche_input)
     (fn ctxt => fn txt =>
-      (Context_Position.reports ctxt (Thy_Output.document_reports txt);
-        Latex.enclose_block s1 s2 (Thy_Output.output_document ctxt {markdown = false} txt)));
+      (Context_Position.reports ctxt (Document_Output.document_reports txt);
+        Latex.enclose_block s1 s2 (Document_Output.output_document ctxt {markdown = false} txt)));
 
 val _ =
   Theory.setup
@@ -180,13 +180,13 @@
 val index_like = Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "is" |-- Args.name --| Parse.$$$ ")");
 val index_args = Parse.enum1 "!" (Args.embedded_input -- Scan.option index_like);
 
-fun output_text ctxt = Latex.block o Thy_Output.output_document ctxt {markdown = false};
+fun output_text ctxt = Latex.block o Document_Output.output_document ctxt {markdown = false};
 
 fun index binding def =
-  Thy_Output.antiquotation_raw binding (Scan.lift index_args)
+  Document_Output.antiquotation_raw binding (Scan.lift index_args)
     (fn ctxt => fn args =>
       let
-        val _ = Context_Position.reports ctxt (maps (Thy_Output.document_reports o #1) args);
+        val _ = Context_Position.reports ctxt (maps (Document_Output.document_reports o #1) args);
         fun make_item (txt, opt_like) =
           let
             val text = output_text ctxt txt;
@@ -222,18 +222,18 @@
   Input.source_content #> #1 #> Document_Antiquotation.prepare_lines ctxt;
 
 fun text_antiquotation name =
-  Thy_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input)
+  Document_Output.antiquotation_raw_embedded name (Scan.lift Args.text_input)
     (fn ctxt => fn text =>
       let
         val _ = report_text ctxt text;
       in
         prepare_text ctxt text
-        |> Thy_Output.output_source ctxt
-        |> Thy_Output.isabelle ctxt
+        |> Document_Output.output_source ctxt
+        |> Document_Output.isabelle ctxt
       end);
 
 val theory_text_antiquotation =
-  Thy_Output.antiquotation_raw_embedded \<^binding>\<open>theory_text\<close> (Scan.lift Args.text_input)
+  Document_Output.antiquotation_raw_embedded \<^binding>\<open>theory_text\<close> (Scan.lift Args.text_input)
     (fn ctxt => fn text =>
       let
         val keywords = Thy_Header.get_keywords' ctxt;
@@ -247,8 +247,8 @@
       in
         prepare_text ctxt text
         |> Token.explode0 keywords
-        |> maps (Thy_Output.output_token ctxt)
-        |> Thy_Output.isabelle ctxt
+        |> maps (Document_Output.output_token ctxt)
+        |> Document_Output.isabelle ctxt
       end);
 
 val _ =
@@ -265,7 +265,7 @@
 local
 
 fun goal_state name main =
-  Thy_Output.antiquotation_pretty name (Scan.succeed ())
+  Document_Output.antiquotation_pretty name (Scan.succeed ())
     (fn ctxt => fn () =>
       Goal_Display.pretty_goal
         (Config.put Goal_Display.show_main_goal main ctxt)
@@ -296,15 +296,15 @@
           |> Proof.theorem NONE (K I) [[(prop, [])]]
           |> Proof.global_terminal_proof (m1, m2);
       in
-        Thy_Output.pretty_source ctxt {embedded = false}
-          [hd src, prop_tok] (Thy_Output.pretty_term ctxt prop)
+        Document_Output.pretty_source ctxt {embedded = false}
+          [hd src, prop_tok] (Document_Output.pretty_term ctxt prop)
       end));
 
 
 (* verbatim text *)
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>verbatim\<close> (Scan.lift Args.text_input)
+  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>verbatim\<close> (Scan.lift Args.text_input)
     (fn ctxt => fn text =>
       let
         val pos = Input.pos_of text;
@@ -318,14 +318,14 @@
 (* bash functions *)
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>bash_function\<close>
+  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>bash_function\<close>
     (Scan.lift Args.embedded_position) Isabelle_System.check_bash_function);
 
 
 (* system options *)
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>system_option\<close>
+  (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>system_option\<close>
     (Scan.lift Args.embedded_position)
     (fn ctxt => fn (name, pos) =>
       let val _ = Completion.check_option (Options.default ()) ctxt (name, pos);
@@ -337,7 +337,7 @@
 local
 
 fun ml_text name ml =
-  Thy_Output.antiquotation_verbatim_embedded name (Scan.lift Args.text_input)
+  Document_Output.antiquotation_verbatim_embedded name (Scan.lift Args.text_input)
     (fn ctxt => fn text =>
       let val _ = ML_Context.eval_in (SOME ctxt) ML_Compiler.flags (Input.pos_of text) (ml text)
       in #1 (Input.source_content text) end);
@@ -368,7 +368,7 @@
   translate_string (fn c => if c = "%" orelse c = "#" orelse c = "^" then "\\" ^ c else c);
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_raw_embedded \<^binding>\<open>url\<close> (Scan.lift Args.embedded_input)
+  (Document_Output.antiquotation_raw_embedded \<^binding>\<open>url\<close> (Scan.lift Args.embedded_input)
     (fn ctxt => fn source =>
       let
         val url = Input.string_of source;
@@ -385,7 +385,7 @@
 local
 
 fun entity_antiquotation name check bg en =
-  Thy_Output.antiquotation_raw name (Scan.lift Args.name_position)
+  Document_Output.antiquotation_raw name (Scan.lift Args.name_position)
     (fn ctxt => fn (name, pos) =>
       let val _ = check ctxt (name, pos)
       in Latex.enclose_block bg en [Latex.string (Output.output name)] end);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Thy/document_output.ML	Fri May 21 12:29:29 2021 +0200
@@ -0,0 +1,577 @@
+(*  Title:      Pure/Thy/document_output.ML
+    Author:     Makarius
+
+Theory document output.
+*)
+
+signature DOCUMENT_OUTPUT =
+sig
+  val document_reports: Input.source -> Position.report list
+  val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
+  val check_comments: Proof.context -> Symbol_Pos.T list -> unit
+  val output_token: Proof.context -> Token.T -> Latex.text list
+  val output_source: Proof.context -> string -> Latex.text list
+  type segment =
+    {span: Command_Span.span, command: Toplevel.transition,
+     prev_state: Toplevel.state, state: Toplevel.state}
+  val present_thy: Options.T -> theory -> segment list -> Latex.text list
+  val pretty_term: Proof.context -> term -> Pretty.T
+  val pretty_thm: Proof.context -> thm -> Pretty.T
+  val lines: Latex.text list -> Latex.text list
+  val items: Latex.text list -> Latex.text list
+  val isabelle: Proof.context -> Latex.text list -> Latex.text
+  val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
+  val typewriter: Proof.context -> string -> Latex.text
+  val verbatim: Proof.context -> string -> Latex.text
+  val source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text
+  val pretty: Proof.context -> Pretty.T -> Latex.text
+  val pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text
+  val pretty_items: Proof.context -> Pretty.T list -> Latex.text
+  val pretty_items_source: Proof.context -> {embedded: bool} -> Token.src ->
+    Pretty.T list -> Latex.text
+  val antiquotation_pretty:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
+  val antiquotation_pretty_embedded:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
+  val antiquotation_pretty_source:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
+  val antiquotation_pretty_source_embedded:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
+  val antiquotation_raw:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
+  val antiquotation_raw_embedded:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
+  val antiquotation_verbatim:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
+  val antiquotation_verbatim_embedded:
+    binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
+end;
+
+structure Document_Output: DOCUMENT_OUTPUT =
+struct
+
+(* output document source *)
+
+fun document_reports txt =
+  let val pos = Input.pos_of txt in
+    [(pos, Markup.language_document (Input.is_delimited txt)),
+     (pos, Markup.plain_text)]
+  end;
+
+val output_symbols = single o Latex.symbols_output;
+
+fun output_comment ctxt (kind, syms) =
+  (case kind of
+    Comment.Comment =>
+      Input.cartouche_content syms
+      |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
+          {markdown = false}
+      |> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
+  | Comment.Cancel =>
+      Symbol_Pos.cartouche_content syms
+      |> output_symbols
+      |> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
+  | Comment.Latex => [Latex.symbols (Symbol_Pos.cartouche_content syms)]
+  | Comment.Marker => [])
+and output_comment_document ctxt (comment, syms) =
+  (case comment of
+    SOME kind => output_comment ctxt (kind, syms)
+  | NONE => [Latex.symbols syms])
+and output_document_text ctxt syms =
+  Comment.read_body syms |> maps (output_comment_document ctxt)
+and output_document ctxt {markdown} source =
+  let
+    val pos = Input.pos_of source;
+    val syms = Input.source_explode source;
+
+    val output_antiquotes =
+      maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
+
+    fun output_line line =
+      (if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
+        output_antiquotes (Markdown.line_content line);
+
+    fun output_block (Markdown.Par lines) =
+          Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
+      | output_block (Markdown.List {kind, body, ...}) =
+          Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
+    and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
+  in
+    if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
+    else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
+    then
+      let
+        val ants = Antiquote.parse_comments pos syms;
+        val reports = Antiquote.antiq_reports ants;
+        val blocks = Markdown.read_antiquotes ants;
+        val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
+      in output_blocks blocks end
+    else
+      let
+        val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
+        val reports = Antiquote.antiq_reports ants;
+        val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
+      in output_antiquotes ants end
+  end;
+
+
+(* output tokens with formal comments *)
+
+local
+
+val output_symbols_antiq =
+  (fn Antiquote.Text syms => output_symbols syms
+    | Antiquote.Control {name = (name, _), body, ...} =>
+        Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
+          output_symbols body
+    | Antiquote.Antiq {body, ...} =>
+        Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
+
+fun output_comment_symbols ctxt {antiq} (comment, syms) =
+  (case (comment, antiq) of
+    (NONE, false) => output_symbols syms
+  | (NONE, true) =>
+      Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
+      |> maps output_symbols_antiq
+  | (SOME comment, _) => output_comment ctxt (comment, syms));
+
+fun output_body ctxt antiq bg en syms =
+  Comment.read_body syms
+  |> maps (output_comment_symbols ctxt {antiq = antiq})
+  |> Latex.enclose_body bg en;
+
+in
+
+fun output_token ctxt tok =
+  let
+    fun output antiq bg en =
+      output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
+  in
+    (case Token.kind_of tok of
+      Token.Comment NONE => []
+    | Token.Comment (SOME Comment.Marker) => []
+    | Token.Command => output false "\\isacommand{" "}"
+    | Token.Keyword =>
+        if Symbol.is_ascii_identifier (Token.content_of tok)
+        then output false "\\isakeyword{" "}"
+        else output false "" ""
+    | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
+    | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
+    | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
+    | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
+    | _ => output false "" "")
+  end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
+
+fun output_source ctxt s =
+  output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
+
+fun check_comments ctxt =
+  Comment.read_body #> List.app (fn (comment, syms) =>
+    let
+      val pos = #1 (Symbol_Pos.range syms);
+      val _ =
+        comment |> Option.app (fn kind =>
+          Context_Position.reports ctxt (map (pair pos) (Comment.kind_markups kind)));
+      val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
+    in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
+
+end;
+
+
+
+(** present theory source **)
+
+(* presentation tokens *)
+
+datatype token =
+    Ignore
+  | Token of Token.T
+  | Heading of string * Input.source
+  | Body of string * Input.source
+  | Raw of Input.source;
+
+fun token_with pred (Token tok) = pred tok
+  | token_with _ _ = false;
+
+val white_token = token_with Document_Source.is_white;
+val white_comment_token = token_with Document_Source.is_white_comment;
+val blank_token = token_with Token.is_blank;
+val newline_token = token_with Token.is_newline;
+
+fun present_token ctxt tok =
+  (case tok of
+    Ignore => []
+  | Token tok => output_token ctxt tok
+  | Heading (cmd, source) =>
+      Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
+        (output_document ctxt {markdown = false} source)
+  | Body (cmd, source) =>
+      [Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
+  | Raw source =>
+      Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
+
+
+(* command spans *)
+
+type command = string * Position.T;   (*name, position*)
+type source = (token * (string * int)) list;        (*token, markup flag, meta-comment depth*)
+
+datatype span = Span of command * (source * source * source * source) * bool;
+
+fun make_span cmd src =
+  let
+    fun chop_newline (tok :: toks) =
+          if newline_token (fst tok) then ([tok], toks, true)
+          else ([], tok :: toks, false)
+      | chop_newline [] = ([], [], false);
+    val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
+      src
+      |> chop_prefix (white_token o fst)
+      ||>> chop_suffix (white_token o fst)
+      ||>> chop_prefix (white_comment_token o fst)
+      ||> chop_newline;
+  in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
+
+
+(* present spans *)
+
+local
+
+fun err_bad_nesting here =
+  error ("Bad nesting of commands in presentation" ^ here);
+
+fun edge which f (x: string option, y) =
+  if x = y then I
+  else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
+
+val begin_tag = edge #2 Latex.begin_tag;
+val end_tag = edge #1 Latex.end_tag;
+fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
+fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
+
+fun document_tag cmd_pos state state' tagging_stack =
+  let
+    val ctxt' = Toplevel.presentation_context state';
+    val nesting = Toplevel.level state' - Toplevel.level state;
+
+    val (tagging, taggings) = tagging_stack;
+    val (tag', tagging') = Document_Source.update_tagging ctxt' tagging;
+
+    val tagging_stack' =
+      if nesting = 0 andalso not (Toplevel.is_proof state) then tagging_stack
+      else if nesting >= 0 then (tagging', replicate nesting tagging @ taggings)
+      else
+        (case drop (~ nesting - 1) taggings of
+          tg :: tgs => (tg, tgs)
+        | [] => err_bad_nesting (Position.here cmd_pos));
+  in (tag', tagging_stack') end;
+
+fun read_tag s =
+  (case space_explode "%" s of
+    ["", b] => (SOME b, NONE)
+  | [a, b] => (NONE, SOME (a, b))
+  | _ => error ("Bad document_tags specification: " ^ quote s));
+
+in
+
+fun make_command_tag options keywords =
+  let
+    val document_tags =
+      map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
+    val document_tags_default = map_filter #1 document_tags;
+    val document_tags_command = map_filter #2 document_tags;
+  in
+    fn cmd_name => fn state => fn state' => fn active_tag =>
+      let
+        val keyword_tags =
+          if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
+          else Keyword.command_tags keywords cmd_name;
+        val command_tags =
+          the_list (AList.lookup (op =) document_tags_command cmd_name) @
+          keyword_tags @ document_tags_default;
+        val active_tag' =
+          (case command_tags of
+            default_tag :: _ => SOME default_tag
+          | [] =>
+              if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
+              then active_tag
+              else NONE);
+      in active_tag' end
+  end;
+
+fun present_span command_tag span state state'
+    (tagging_stack, active_tag, newline, latex, present_cont) =
+  let
+    val ctxt' = Toplevel.presentation_context state';
+    val present = fold (fn (tok, (flag, 0)) =>
+        fold cons (present_token ctxt' tok)
+        #> cons (Latex.string flag)
+      | _ => I);
+
+    val Span ((cmd_name, cmd_pos), srcs, span_newline) = span;
+
+    val (tag', tagging_stack') = document_tag cmd_pos state state' tagging_stack;
+    val active_tag' =
+      if is_some tag' then Option.map #1 tag'
+      else command_tag cmd_name state state' active_tag;
+    val edge = (active_tag, active_tag');
+
+    val newline' =
+      if is_none active_tag' then span_newline else newline;
+
+    val latex' =
+      latex
+      |> end_tag edge
+      |> close_delim (fst present_cont) edge
+      |> snd present_cont
+      |> open_delim (present (#1 srcs)) edge
+      |> begin_tag edge
+      |> present (#2 srcs);
+    val present_cont' =
+      if newline then (present (#3 srcs), present (#4 srcs))
+      else (I, present (#3 srcs) #> present (#4 srcs));
+  in (tagging_stack', active_tag', newline', latex', present_cont') end;
+
+fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
+  if not (null tags) then err_bad_nesting " at end of theory"
+  else
+    latex
+    |> end_tag (active_tag, NONE)
+    |> close_delim (fst present_cont) (active_tag, NONE)
+    |> snd present_cont;
+
+end;
+
+
+(* present_thy *)
+
+local
+
+val markup_true = "\\isamarkuptrue%\n";
+val markup_false = "\\isamarkupfalse%\n";
+
+val opt_newline = Scan.option (Scan.one Token.is_newline);
+
+val ignore =
+  Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
+    >> pair (d + 1)) ||
+  Scan.depend (fn d => Scan.one Token.is_end_ignore --|
+    (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
+    >> pair (d - 1));
+
+val locale =
+  Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |--
+    Parse.!!!
+      (Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")")));
+
+in
+
+type segment =
+  {span: Command_Span.span, command: Toplevel.transition,
+   prev_state: Toplevel.state, state: Toplevel.state};
+
+fun present_thy options thy (segments: segment list) =
+  let
+    val keywords = Thy_Header.get_keywords thy;
+
+
+    (* tokens *)
+
+    val ignored = Scan.state --| ignore
+      >> (fn d => (NONE, (Ignore, ("", d))));
+
+    fun markup pred mk flag = Scan.peek (fn d =>
+      Document_Source.improper |--
+        Parse.position (Scan.one (fn tok =>
+          Token.is_command tok andalso pred keywords (Token.content_of tok))) --
+      (Document_Source.annotation |--
+        Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |--
+          Parse.document_source --| Document_Source.improper_end))
+            >> (fn ((tok, pos'), source) =>
+              let val name = Token.content_of tok
+              in (SOME (name, pos'), (mk (name, source), (flag, d))) end));
+
+    val command = Scan.peek (fn d =>
+      Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] --
+      Scan.one Token.is_command --| Document_Source.annotation
+      >> (fn (cmd_mod, cmd) =>
+        map (fn tok => (NONE, (Token tok, ("", d)))) cmd_mod @
+          [(SOME (Token.content_of cmd, Token.pos_of cmd),
+            (Token cmd, (markup_false, d)))]));
+
+    val cmt = Scan.peek (fn d =>
+      Scan.one Document_Source.is_black_comment >> (fn tok => (NONE, (Token tok, ("", d)))));
+
+    val other = Scan.peek (fn d =>
+       Parse.not_eof >> (fn tok => (NONE, (Token tok, ("", d)))));
+
+    val tokens =
+      (ignored ||
+        markup Keyword.is_document_heading Heading markup_true ||
+        markup Keyword.is_document_body Body markup_true ||
+        markup Keyword.is_document_raw (Raw o #2) "") >> single ||
+      command ||
+      (cmt || other) >> single;
+
+
+    (* spans *)
+
+    val is_eof = fn (_, (Token x, _)) => Token.is_eof x | _ => false;
+    val stopper = Scan.stopper (K (NONE, (Token Token.eof, ("", 0)))) is_eof;
+
+    val cmd = Scan.one (is_some o fst);
+    val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
+
+    val white_comments = Scan.many (white_comment_token o fst o snd);
+    val blank = Scan.one (blank_token o fst o snd);
+    val newline = Scan.one (newline_token o fst o snd);
+    val before_cmd =
+      Scan.option (newline -- white_comments) --
+      Scan.option (newline -- white_comments) --
+      Scan.option (blank -- white_comments) -- cmd;
+
+    val span =
+      Scan.repeat non_cmd -- cmd --
+        Scan.repeat (Scan.unless before_cmd non_cmd) --
+        Scan.option (newline >> (single o snd))
+      >> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
+          make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
+
+    val spans = segments
+      |> maps (Command_Span.content o #span)
+      |> drop_suffix Token.is_space
+      |> Source.of_list
+      |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
+      |> Source.source stopper (Scan.error (Scan.bulk span))
+      |> Source.exhaust;
+
+    val command_results =
+      segments |> map_filter (fn {command, state, ...} =>
+        if Toplevel.is_ignored command then NONE else SOME (command, state));
+
+
+    (* present commands *)
+
+    val command_tag = make_command_tag options keywords;
+
+    fun present_command tr span st st' =
+      Toplevel.setmp_thread_position tr (present_span command_tag span st st');
+
+    fun present _ [] = I
+      | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
+  in
+    if length command_results = length spans then
+      (([], []), NONE, true, [], (I, I))
+      |> present (Toplevel.init_toplevel ()) (spans ~~ command_results)
+      |> present_trailer
+      |> rev
+    else error "Messed-up outer syntax for presentation"
+  end;
+
+end;
+
+
+
+(** standard output operations **)
+
+(* pretty printing *)
+
+fun pretty_term ctxt t =
+  Syntax.pretty_term (Proof_Context.augment t ctxt) t;
+
+fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
+
+
+(* default output *)
+
+val lines = separate (Latex.string "\\isanewline%\n");
+val items = separate (Latex.string "\\isasep\\isanewline%\n");
+
+fun isabelle ctxt body =
+  if Config.get ctxt Document_Antiquotation.thy_output_display
+  then Latex.environment_block "isabelle" body
+  else Latex.block (Latex.enclose_body "\\isa{" "}" body);
+
+fun isabelle_typewriter ctxt body =
+  if Config.get ctxt Document_Antiquotation.thy_output_display
+  then Latex.environment_block "isabellett" body
+  else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
+
+fun typewriter ctxt s =
+  isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
+
+fun verbatim ctxt =
+  if Config.get ctxt Document_Antiquotation.thy_output_display
+  then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
+  else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
+
+fun token_source ctxt {embedded} tok =
+  if Token.is_kind Token.Cartouche tok andalso embedded andalso
+    not (Config.get ctxt Document_Antiquotation.thy_output_source_cartouche)
+  then Token.content_of tok
+  else Token.unparse tok;
+
+fun is_source ctxt =
+  Config.get ctxt Document_Antiquotation.thy_output_source orelse
+  Config.get ctxt Document_Antiquotation.thy_output_source_cartouche;
+
+fun source ctxt embedded =
+  Token.args_of_src
+  #> map (token_source ctxt embedded #> Document_Antiquotation.prepare_lines ctxt)
+  #> space_implode " "
+  #> output_source ctxt
+  #> isabelle ctxt;
+
+fun pretty ctxt =
+  Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
+
+fun pretty_source ctxt embedded src prt =
+  if is_source ctxt then source ctxt embedded src else pretty ctxt prt;
+
+fun pretty_items ctxt =
+  map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
+
+fun pretty_items_source ctxt embedded src prts =
+  if is_source ctxt then source ctxt embedded src else pretty_items ctxt prts;
+
+
+(* antiquotation variants *)
+
+local
+
+fun gen_setup name embedded =
+  if embedded
+  then Document_Antiquotation.setup_embedded name
+  else Document_Antiquotation.setup name;
+
+fun gen_antiquotation_pretty name embedded scan f =
+  gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
+
+fun gen_antiquotation_pretty_source name embedded scan f =
+  gen_setup name embedded scan
+    (fn {context = ctxt, source = src, argument = x} =>
+      pretty_source ctxt {embedded = embedded} src (f ctxt x));
+
+fun gen_antiquotation_raw name embedded scan f =
+  gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => f ctxt x);
+
+fun gen_antiquotation_verbatim name embedded scan f =
+  gen_antiquotation_raw name embedded scan (fn ctxt => verbatim ctxt o f ctxt);
+
+in
+
+fun antiquotation_pretty name = gen_antiquotation_pretty name false;
+fun antiquotation_pretty_embedded name = gen_antiquotation_pretty name true;
+
+fun antiquotation_pretty_source name = gen_antiquotation_pretty_source name false;
+fun antiquotation_pretty_source_embedded name = gen_antiquotation_pretty_source name true;
+
+fun antiquotation_raw name = gen_antiquotation_raw name false;
+fun antiquotation_raw_embedded name = gen_antiquotation_raw name true;
+
+fun antiquotation_verbatim name = gen_antiquotation_verbatim name false;
+fun antiquotation_verbatim_embedded name = gen_antiquotation_verbatim name true;
+
+end;
+
+end;
--- a/src/Pure/Thy/thy_info.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/Thy/thy_info.ML	Fri May 21 12:29:29 2021 +0200
@@ -9,7 +9,7 @@
 sig
   type presentation_context =
     {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T,
-      segments: Thy_Output.segment list}
+      segments: Document_Output.segment list}
   val adjust_pos_properties: presentation_context -> Position.T -> Properties.T
   val apply_presentation: presentation_context -> theory -> unit
   val add_presentation: (presentation_context -> theory -> unit) -> theory -> theory
@@ -42,7 +42,7 @@
 
 type presentation_context =
   {options: Options.T, file_pos: Position.T, adjust_pos: Position.T -> Position.T,
-    segments: Thy_Output.segment list};
+    segments: Document_Output.segment list};
 
 fun adjust_pos_properties (context: presentation_context) pos =
   Position.offset_properties_of (#adjust_pos context pos) @ Position.id_properties_of pos;
@@ -71,7 +71,7 @@
     if exists (Toplevel.is_skipped_proof o #state) segments then ()
     else
       let
-        val body = Thy_Output.present_thy options thy segments;
+        val body = Document_Output.present_thy options thy segments;
       in
         if Options.string options "document" = "false" then ()
         else
--- a/src/Pure/Thy/thy_output.ML	Fri May 21 11:19:53 2021 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,577 +0,0 @@
-(*  Title:      Pure/Thy/thy_output.ML
-    Author:     Makarius
-
-Theory document output.
-*)
-
-signature THY_OUTPUT =
-sig
-  val document_reports: Input.source -> Position.report list
-  val output_document: Proof.context -> {markdown: bool} -> Input.source -> Latex.text list
-  val check_comments: Proof.context -> Symbol_Pos.T list -> unit
-  val output_token: Proof.context -> Token.T -> Latex.text list
-  val output_source: Proof.context -> string -> Latex.text list
-  type segment =
-    {span: Command_Span.span, command: Toplevel.transition,
-     prev_state: Toplevel.state, state: Toplevel.state}
-  val present_thy: Options.T -> theory -> segment list -> Latex.text list
-  val pretty_term: Proof.context -> term -> Pretty.T
-  val pretty_thm: Proof.context -> thm -> Pretty.T
-  val lines: Latex.text list -> Latex.text list
-  val items: Latex.text list -> Latex.text list
-  val isabelle: Proof.context -> Latex.text list -> Latex.text
-  val isabelle_typewriter: Proof.context -> Latex.text list -> Latex.text
-  val typewriter: Proof.context -> string -> Latex.text
-  val verbatim: Proof.context -> string -> Latex.text
-  val source: Proof.context -> {embedded: bool} -> Token.src -> Latex.text
-  val pretty: Proof.context -> Pretty.T -> Latex.text
-  val pretty_source: Proof.context -> {embedded: bool} -> Token.src -> Pretty.T -> Latex.text
-  val pretty_items: Proof.context -> Pretty.T list -> Latex.text
-  val pretty_items_source: Proof.context -> {embedded: bool} -> Token.src ->
-    Pretty.T list -> Latex.text
-  val antiquotation_pretty:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
-  val antiquotation_pretty_embedded:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
-  val antiquotation_pretty_source:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
-  val antiquotation_pretty_source_embedded:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> Pretty.T) -> theory -> theory
-  val antiquotation_raw:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
-  val antiquotation_raw_embedded:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> Latex.text) -> theory -> theory
-  val antiquotation_verbatim:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
-  val antiquotation_verbatim_embedded:
-    binding -> 'a context_parser -> (Proof.context -> 'a -> string) -> theory -> theory
-end;
-
-structure Thy_Output: THY_OUTPUT =
-struct
-
-(* output document source *)
-
-fun document_reports txt =
-  let val pos = Input.pos_of txt in
-    [(pos, Markup.language_document (Input.is_delimited txt)),
-     (pos, Markup.plain_text)]
-  end;
-
-val output_symbols = single o Latex.symbols_output;
-
-fun output_comment ctxt (kind, syms) =
-  (case kind of
-    Comment.Comment =>
-      Input.cartouche_content syms
-      |> output_document (ctxt |> Config.put Document_Antiquotation.thy_output_display false)
-          {markdown = false}
-      |> Latex.enclose_body "%\n\\isamarkupcmt{" "%\n}"
-  | Comment.Cancel =>
-      Symbol_Pos.cartouche_content syms
-      |> output_symbols
-      |> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
-  | Comment.Latex => [Latex.symbols (Symbol_Pos.cartouche_content syms)]
-  | Comment.Marker => [])
-and output_comment_document ctxt (comment, syms) =
-  (case comment of
-    SOME kind => output_comment ctxt (kind, syms)
-  | NONE => [Latex.symbols syms])
-and output_document_text ctxt syms =
-  Comment.read_body syms |> maps (output_comment_document ctxt)
-and output_document ctxt {markdown} source =
-  let
-    val pos = Input.pos_of source;
-    val syms = Input.source_explode source;
-
-    val output_antiquotes =
-      maps (Document_Antiquotation.evaluate (output_document_text ctxt) ctxt);
-
-    fun output_line line =
-      (if Markdown.line_is_item line then [Latex.string "\\item "] else []) @
-        output_antiquotes (Markdown.line_content line);
-
-    fun output_block (Markdown.Par lines) =
-          Latex.block (separate (Latex.string "\n") (map (Latex.block o output_line) lines))
-      | output_block (Markdown.List {kind, body, ...}) =
-          Latex.environment_block (Markdown.print_kind kind) (output_blocks body)
-    and output_blocks blocks = separate (Latex.string "\n\n") (map output_block blocks);
-  in
-    if Toplevel.is_skipped_proof (Toplevel.presentation_state ctxt) then []
-    else if markdown andalso exists (Markdown.is_control o Symbol_Pos.symbol) syms
-    then
-      let
-        val ants = Antiquote.parse_comments pos syms;
-        val reports = Antiquote.antiq_reports ants;
-        val blocks = Markdown.read_antiquotes ants;
-        val _ = Context_Position.reports ctxt (reports @ Markdown.reports blocks);
-      in output_blocks blocks end
-    else
-      let
-        val ants = Antiquote.parse_comments pos (trim (Symbol.is_blank o Symbol_Pos.symbol) syms);
-        val reports = Antiquote.antiq_reports ants;
-        val _ = Context_Position.reports ctxt (reports @ Markdown.text_reports ants);
-      in output_antiquotes ants end
-  end;
-
-
-(* output tokens with formal comments *)
-
-local
-
-val output_symbols_antiq =
-  (fn Antiquote.Text syms => output_symbols syms
-    | Antiquote.Control {name = (name, _), body, ...} =>
-        Latex.string (Latex.output_symbols [Symbol.encode (Symbol.Control name)]) ::
-          output_symbols body
-    | Antiquote.Antiq {body, ...} =>
-        Latex.enclose_body "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" (output_symbols body));
-
-fun output_comment_symbols ctxt {antiq} (comment, syms) =
-  (case (comment, antiq) of
-    (NONE, false) => output_symbols syms
-  | (NONE, true) =>
-      Antiquote.parse_comments (#1 (Symbol_Pos.range syms)) syms
-      |> maps output_symbols_antiq
-  | (SOME comment, _) => output_comment ctxt (comment, syms));
-
-fun output_body ctxt antiq bg en syms =
-  Comment.read_body syms
-  |> maps (output_comment_symbols ctxt {antiq = antiq})
-  |> Latex.enclose_body bg en;
-
-in
-
-fun output_token ctxt tok =
-  let
-    fun output antiq bg en =
-      output_body ctxt antiq bg en (Input.source_explode (Token.input_of tok));
-  in
-    (case Token.kind_of tok of
-      Token.Comment NONE => []
-    | Token.Comment (SOME Comment.Marker) => []
-    | Token.Command => output false "\\isacommand{" "}"
-    | Token.Keyword =>
-        if Symbol.is_ascii_identifier (Token.content_of tok)
-        then output false "\\isakeyword{" "}"
-        else output false "" ""
-    | Token.String => output false "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}"
-    | Token.Alt_String => output false "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}"
-    | Token.Verbatim => output true "{\\isacharverbatimopen}" "{\\isacharverbatimclose}"
-    | Token.Cartouche => output false "{\\isacartoucheopen}" "{\\isacartoucheclose}"
-    | _ => output false "" "")
-  end handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok));
-
-fun output_source ctxt s =
-  output_body ctxt false "" "" (Symbol_Pos.explode (s, Position.none));
-
-fun check_comments ctxt =
-  Comment.read_body #> List.app (fn (comment, syms) =>
-    let
-      val pos = #1 (Symbol_Pos.range syms);
-      val _ =
-        comment |> Option.app (fn kind =>
-          Context_Position.reports ctxt (map (pair pos) (Comment.kind_markups kind)));
-      val _ = output_comment_symbols ctxt {antiq = false} (comment, syms);
-    in if comment = SOME Comment.Comment then check_comments ctxt syms else () end);
-
-end;
-
-
-
-(** present theory source **)
-
-(* presentation tokens *)
-
-datatype token =
-    Ignore
-  | Token of Token.T
-  | Heading of string * Input.source
-  | Body of string * Input.source
-  | Raw of Input.source;
-
-fun token_with pred (Token tok) = pred tok
-  | token_with _ _ = false;
-
-val white_token = token_with Document_Source.is_white;
-val white_comment_token = token_with Document_Source.is_white_comment;
-val blank_token = token_with Token.is_blank;
-val newline_token = token_with Token.is_newline;
-
-fun present_token ctxt tok =
-  (case tok of
-    Ignore => []
-  | Token tok => output_token ctxt tok
-  | Heading (cmd, source) =>
-      Latex.enclose_body ("%\n\\isamarkup" ^ cmd ^ "{") "%\n}\n"
-        (output_document ctxt {markdown = false} source)
-  | Body (cmd, source) =>
-      [Latex.environment_block ("isamarkup" ^ cmd) (output_document ctxt {markdown = true} source)]
-  | Raw source =>
-      Latex.string "%\n" :: output_document ctxt {markdown = true} source @ [Latex.string "\n"]);
-
-
-(* command spans *)
-
-type command = string * Position.T;   (*name, position*)
-type source = (token * (string * int)) list;        (*token, markup flag, meta-comment depth*)
-
-datatype span = Span of command * (source * source * source * source) * bool;
-
-fun make_span cmd src =
-  let
-    fun chop_newline (tok :: toks) =
-          if newline_token (fst tok) then ([tok], toks, true)
-          else ([], tok :: toks, false)
-      | chop_newline [] = ([], [], false);
-    val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) =
-      src
-      |> chop_prefix (white_token o fst)
-      ||>> chop_suffix (white_token o fst)
-      ||>> chop_prefix (white_comment_token o fst)
-      ||> chop_newline;
-  in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end;
-
-
-(* present spans *)
-
-local
-
-fun err_bad_nesting here =
-  error ("Bad nesting of commands in presentation" ^ here);
-
-fun edge which f (x: string option, y) =
-  if x = y then I
-  else (case which (x, y) of NONE => I | SOME txt => cons (Latex.string (f txt)));
-
-val begin_tag = edge #2 Latex.begin_tag;
-val end_tag = edge #1 Latex.end_tag;
-fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e;
-fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e;
-
-fun document_tag cmd_pos state state' tagging_stack =
-  let
-    val ctxt' = Toplevel.presentation_context state';
-    val nesting = Toplevel.level state' - Toplevel.level state;
-
-    val (tagging, taggings) = tagging_stack;
-    val (tag', tagging') = Document_Source.update_tagging ctxt' tagging;
-
-    val tagging_stack' =
-      if nesting = 0 andalso not (Toplevel.is_proof state) then tagging_stack
-      else if nesting >= 0 then (tagging', replicate nesting tagging @ taggings)
-      else
-        (case drop (~ nesting - 1) taggings of
-          tg :: tgs => (tg, tgs)
-        | [] => err_bad_nesting (Position.here cmd_pos));
-  in (tag', tagging_stack') end;
-
-fun read_tag s =
-  (case space_explode "%" s of
-    ["", b] => (SOME b, NONE)
-  | [a, b] => (NONE, SOME (a, b))
-  | _ => error ("Bad document_tags specification: " ^ quote s));
-
-in
-
-fun make_command_tag options keywords =
-  let
-    val document_tags =
-      map read_tag (space_explode "," (Options.string options \<^system_option>\<open>document_tags\<close>));
-    val document_tags_default = map_filter #1 document_tags;
-    val document_tags_command = map_filter #2 document_tags;
-  in
-    fn cmd_name => fn state => fn state' => fn active_tag =>
-      let
-        val keyword_tags =
-          if cmd_name = "end" andalso Toplevel.is_end_theory state' then ["theory"]
-          else Keyword.command_tags keywords cmd_name;
-        val command_tags =
-          the_list (AList.lookup (op =) document_tags_command cmd_name) @
-          keyword_tags @ document_tags_default;
-        val active_tag' =
-          (case command_tags of
-            default_tag :: _ => SOME default_tag
-          | [] =>
-              if Keyword.is_vacuous keywords cmd_name andalso Toplevel.is_proof state
-              then active_tag
-              else NONE);
-      in active_tag' end
-  end;
-
-fun present_span command_tag span state state'
-    (tagging_stack, active_tag, newline, latex, present_cont) =
-  let
-    val ctxt' = Toplevel.presentation_context state';
-    val present = fold (fn (tok, (flag, 0)) =>
-        fold cons (present_token ctxt' tok)
-        #> cons (Latex.string flag)
-      | _ => I);
-
-    val Span ((cmd_name, cmd_pos), srcs, span_newline) = span;
-
-    val (tag', tagging_stack') = document_tag cmd_pos state state' tagging_stack;
-    val active_tag' =
-      if is_some tag' then Option.map #1 tag'
-      else command_tag cmd_name state state' active_tag;
-    val edge = (active_tag, active_tag');
-
-    val newline' =
-      if is_none active_tag' then span_newline else newline;
-
-    val latex' =
-      latex
-      |> end_tag edge
-      |> close_delim (fst present_cont) edge
-      |> snd present_cont
-      |> open_delim (present (#1 srcs)) edge
-      |> begin_tag edge
-      |> present (#2 srcs);
-    val present_cont' =
-      if newline then (present (#3 srcs), present (#4 srcs))
-      else (I, present (#3 srcs) #> present (#4 srcs));
-  in (tagging_stack', active_tag', newline', latex', present_cont') end;
-
-fun present_trailer ((_, tags), active_tag, _, latex, present_cont) =
-  if not (null tags) then err_bad_nesting " at end of theory"
-  else
-    latex
-    |> end_tag (active_tag, NONE)
-    |> close_delim (fst present_cont) (active_tag, NONE)
-    |> snd present_cont;
-
-end;
-
-
-(* present_thy *)
-
-local
-
-val markup_true = "\\isamarkuptrue%\n";
-val markup_false = "\\isamarkupfalse%\n";
-
-val opt_newline = Scan.option (Scan.one Token.is_newline);
-
-val ignore =
-  Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
-    >> pair (d + 1)) ||
-  Scan.depend (fn d => Scan.one Token.is_end_ignore --|
-    (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline)
-    >> pair (d - 1));
-
-val locale =
-  Scan.option ((Parse.$$$ "(" -- Document_Source.improper -- Parse.$$$ "in") |--
-    Parse.!!!
-      (Document_Source.improper |-- Parse.name --| (Document_Source.improper -- Parse.$$$ ")")));
-
-in
-
-type segment =
-  {span: Command_Span.span, command: Toplevel.transition,
-   prev_state: Toplevel.state, state: Toplevel.state};
-
-fun present_thy options thy (segments: segment list) =
-  let
-    val keywords = Thy_Header.get_keywords thy;
-
-
-    (* tokens *)
-
-    val ignored = Scan.state --| ignore
-      >> (fn d => (NONE, (Ignore, ("", d))));
-
-    fun markup pred mk flag = Scan.peek (fn d =>
-      Document_Source.improper |--
-        Parse.position (Scan.one (fn tok =>
-          Token.is_command tok andalso pred keywords (Token.content_of tok))) --
-      (Document_Source.annotation |--
-        Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |--
-          Parse.document_source --| Document_Source.improper_end))
-            >> (fn ((tok, pos'), source) =>
-              let val name = Token.content_of tok
-              in (SOME (name, pos'), (mk (name, source), (flag, d))) end));
-
-    val command = Scan.peek (fn d =>
-      Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] --
-      Scan.one Token.is_command --| Document_Source.annotation
-      >> (fn (cmd_mod, cmd) =>
-        map (fn tok => (NONE, (Token tok, ("", d)))) cmd_mod @
-          [(SOME (Token.content_of cmd, Token.pos_of cmd),
-            (Token cmd, (markup_false, d)))]));
-
-    val cmt = Scan.peek (fn d =>
-      Scan.one Document_Source.is_black_comment >> (fn tok => (NONE, (Token tok, ("", d)))));
-
-    val other = Scan.peek (fn d =>
-       Parse.not_eof >> (fn tok => (NONE, (Token tok, ("", d)))));
-
-    val tokens =
-      (ignored ||
-        markup Keyword.is_document_heading Heading markup_true ||
-        markup Keyword.is_document_body Body markup_true ||
-        markup Keyword.is_document_raw (Raw o #2) "") >> single ||
-      command ||
-      (cmt || other) >> single;
-
-
-    (* spans *)
-
-    val is_eof = fn (_, (Token x, _)) => Token.is_eof x | _ => false;
-    val stopper = Scan.stopper (K (NONE, (Token Token.eof, ("", 0)))) is_eof;
-
-    val cmd = Scan.one (is_some o fst);
-    val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
-
-    val white_comments = Scan.many (white_comment_token o fst o snd);
-    val blank = Scan.one (blank_token o fst o snd);
-    val newline = Scan.one (newline_token o fst o snd);
-    val before_cmd =
-      Scan.option (newline -- white_comments) --
-      Scan.option (newline -- white_comments) --
-      Scan.option (blank -- white_comments) -- cmd;
-
-    val span =
-      Scan.repeat non_cmd -- cmd --
-        Scan.repeat (Scan.unless before_cmd non_cmd) --
-        Scan.option (newline >> (single o snd))
-      >> (fn (((toks1, (cmd, tok2)), toks3), tok4) =>
-          make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4))));
-
-    val spans = segments
-      |> maps (Command_Span.content o #span)
-      |> drop_suffix Token.is_space
-      |> Source.of_list
-      |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk tokens >> flat))
-      |> Source.source stopper (Scan.error (Scan.bulk span))
-      |> Source.exhaust;
-
-    val command_results =
-      segments |> map_filter (fn {command, state, ...} =>
-        if Toplevel.is_ignored command then NONE else SOME (command, state));
-
-
-    (* present commands *)
-
-    val command_tag = make_command_tag options keywords;
-
-    fun present_command tr span st st' =
-      Toplevel.setmp_thread_position tr (present_span command_tag span st st');
-
-    fun present _ [] = I
-      | present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
-  in
-    if length command_results = length spans then
-      (([], []), NONE, true, [], (I, I))
-      |> present (Toplevel.init_toplevel ()) (spans ~~ command_results)
-      |> present_trailer
-      |> rev
-    else error "Messed-up outer syntax for presentation"
-  end;
-
-end;
-
-
-
-(** standard output operations **)
-
-(* pretty printing *)
-
-fun pretty_term ctxt t =
-  Syntax.pretty_term (Proof_Context.augment t ctxt) t;
-
-fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
-
-
-(* default output *)
-
-val lines = separate (Latex.string "\\isanewline%\n");
-val items = separate (Latex.string "\\isasep\\isanewline%\n");
-
-fun isabelle ctxt body =
-  if Config.get ctxt Document_Antiquotation.thy_output_display
-  then Latex.environment_block "isabelle" body
-  else Latex.block (Latex.enclose_body "\\isa{" "}" body);
-
-fun isabelle_typewriter ctxt body =
-  if Config.get ctxt Document_Antiquotation.thy_output_display
-  then Latex.environment_block "isabellett" body
-  else Latex.block (Latex.enclose_body "\\isatt{" "}" body);
-
-fun typewriter ctxt s =
-  isabelle_typewriter ctxt [Latex.string (Latex.output_ascii s)];
-
-fun verbatim ctxt =
-  if Config.get ctxt Document_Antiquotation.thy_output_display
-  then Document_Antiquotation.indent_lines ctxt #> typewriter ctxt
-  else split_lines #> map (typewriter ctxt) #> lines #> Latex.block;
-
-fun token_source ctxt {embedded} tok =
-  if Token.is_kind Token.Cartouche tok andalso embedded andalso
-    not (Config.get ctxt Document_Antiquotation.thy_output_source_cartouche)
-  then Token.content_of tok
-  else Token.unparse tok;
-
-fun is_source ctxt =
-  Config.get ctxt Document_Antiquotation.thy_output_source orelse
-  Config.get ctxt Document_Antiquotation.thy_output_source_cartouche;
-
-fun source ctxt embedded =
-  Token.args_of_src
-  #> map (token_source ctxt embedded #> Document_Antiquotation.prepare_lines ctxt)
-  #> space_implode " "
-  #> output_source ctxt
-  #> isabelle ctxt;
-
-fun pretty ctxt =
-  Document_Antiquotation.output ctxt #> Latex.string #> single #> isabelle ctxt;
-
-fun pretty_source ctxt embedded src prt =
-  if is_source ctxt then source ctxt embedded src else pretty ctxt prt;
-
-fun pretty_items ctxt =
-  map (Document_Antiquotation.output ctxt #> Latex.string) #> items #> isabelle ctxt;
-
-fun pretty_items_source ctxt embedded src prts =
-  if is_source ctxt then source ctxt embedded src else pretty_items ctxt prts;
-
-
-(* antiquotation variants *)
-
-local
-
-fun gen_setup name embedded =
-  if embedded
-  then Document_Antiquotation.setup_embedded name
-  else Document_Antiquotation.setup name;
-
-fun gen_antiquotation_pretty name embedded scan f =
-  gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => pretty ctxt (f ctxt x));
-
-fun gen_antiquotation_pretty_source name embedded scan f =
-  gen_setup name embedded scan
-    (fn {context = ctxt, source = src, argument = x} =>
-      pretty_source ctxt {embedded = embedded} src (f ctxt x));
-
-fun gen_antiquotation_raw name embedded scan f =
-  gen_setup name embedded scan (fn {context = ctxt, argument = x, ...} => f ctxt x);
-
-fun gen_antiquotation_verbatim name embedded scan f =
-  gen_antiquotation_raw name embedded scan (fn ctxt => verbatim ctxt o f ctxt);
-
-in
-
-fun antiquotation_pretty name = gen_antiquotation_pretty name false;
-fun antiquotation_pretty_embedded name = gen_antiquotation_pretty name true;
-
-fun antiquotation_pretty_source name = gen_antiquotation_pretty_source name false;
-fun antiquotation_pretty_source_embedded name = gen_antiquotation_pretty_source name true;
-
-fun antiquotation_raw name = gen_antiquotation_raw name false;
-fun antiquotation_raw_embedded name = gen_antiquotation_raw name true;
-
-fun antiquotation_verbatim name = gen_antiquotation_verbatim name false;
-fun antiquotation_verbatim_embedded name = gen_antiquotation_verbatim name true;
-
-end;
-
-end;
--- a/src/Pure/Tools/doc.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/Tools/doc.ML	Fri May 21 12:29:29 2021 +0200
@@ -18,7 +18,7 @@
 
 val _ =
   Theory.setup
-   (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>doc\<close>
+   (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>doc\<close>
       (Scan.lift Parse.embedded_position) check);
 
 end;
--- a/src/Pure/Tools/jedit.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/Tools/jedit.ML	Fri May 21 12:29:29 2021 +0200
@@ -78,7 +78,7 @@
 
 val _ =
   Theory.setup
-    (Thy_Output.antiquotation_verbatim_embedded \<^binding>\<open>action\<close> (Scan.lift Args.embedded_position)
+    (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>action\<close> (Scan.lift Args.embedded_position)
       (fn ctxt => fn (name, pos) =>
         let
           val _ =
--- a/src/Pure/Tools/rail.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/Tools/rail.ML	Fri May 21 12:29:29 2021 +0200
@@ -384,7 +384,7 @@
   in Latex.string (Latex.environment "railoutput" (implode (map output_rule rules))) end;
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_raw_embedded \<^binding>\<open>rail\<close> (Scan.lift Args.text_input)
+  (Document_Output.antiquotation_raw_embedded \<^binding>\<open>rail\<close> (Scan.lift Args.text_input)
     (fn ctxt => output_rules ctxt o read ctxt));
 
 end;
--- a/src/Pure/pure_syn.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Pure/pure_syn.ML	Fri May 21 12:29:29 2021 +0200
@@ -20,8 +20,8 @@
 fun output_document state markdown txt =
   let
     val ctxt = Toplevel.presentation_context state;
-    val _ = Context_Position.reports ctxt (Thy_Output.document_reports txt);
-  in Thy_Output.output_document ctxt markdown txt end;
+    val _ = Context_Position.reports ctxt (Document_Output.document_reports txt);
+  in Document_Output.output_document ctxt markdown txt end;
 
 fun document_command markdown (loc, txt) =
   Toplevel.keep (fn state =>
--- a/src/Tools/Code/code_target.ML	Fri May 21 11:19:53 2021 +0200
+++ b/src/Tools/Code/code_target.ML	Fri May 21 12:29:29 2021 +0200
@@ -782,7 +782,7 @@
 in
 
 val _ = Theory.setup
-  (Thy_Output.antiquotation_raw \<^binding>\<open>code_stmts\<close>
+  (Document_Output.antiquotation_raw \<^binding>\<open>code_stmts\<close>
     (parse_const_terms --
       Scan.repeats (parse_consts || parse_types || parse_classes || parse_instances)
       -- Scan.lift (Args.parens (Args.name -- Scan.option Parse.int)))
@@ -790,7 +790,7 @@
        present_code ctxt consts symbols
          target_name "Example" some_width []
        |> trim_line
-       |> Thy_Output.verbatim (Config.put Document_Antiquotation.thy_output_display true ctxt)));
+       |> Document_Output.verbatim (Config.put Document_Antiquotation.thy_output_display true ctxt)));
 
 end;