suppress markup for literal tokens with block control symbols, for better PIDE/HTML output (see also d15fe10593ff);
--- a/src/Pure/General/symbol.ML Tue Jan 19 14:14:23 2021 +0100
+++ b/src/Pure/General/symbol.ML Tue Jan 19 20:23:13 2021 +0100
@@ -59,6 +59,7 @@
val is_quasi: symbol -> bool
val is_blank: symbol -> bool
val is_block_ctrl: symbol -> bool
+ val has_block_ctrl: symbol list -> bool
val is_quasi_letter: symbol -> bool
val is_letdig: symbol -> bool
val beginning: int -> symbol list -> string
@@ -408,6 +409,7 @@
fun is_blank s = kind s = Blank;
val is_block_ctrl = member (op =) ["\<^bsub>", "\<^esub>", "\<^bsup>", "\<^esup>"];
+val has_block_ctrl = exists is_block_ctrl;
fun is_quasi_letter s = let val k = kind s in k = Letter orelse k = Quasi end;
fun is_letdig s = let val k = kind s in k = Letter orelse k = Digit orelse k = Quasi end;
--- a/src/Pure/Syntax/lexicon.ML Tue Jan 19 14:14:23 2021 +0100
+++ b/src/Pure/Syntax/lexicon.ML Tue Jan 19 20:23:13 2021 +0100
@@ -40,8 +40,10 @@
val terminals: string list
val is_terminal: string -> bool
val get_terminal: string -> token option
- val literal_markup: string -> Markup.T
- val report_of_token: token -> Position.report
+ val suppress_literal_markup: string -> bool
+ val suppress_markup: token -> bool
+ val literal_markup: string -> Markup.T list
+ val reports_of_token: token -> Position.report list
val reported_token_range: Proof.context -> token -> string
val valued_token: token -> bool
val implode_string: Symbol.symbol list -> string
@@ -197,28 +199,35 @@
(* markup *)
+val suppress_literal_markup = Symbol.has_block_ctrl o Symbol.explode;
+fun suppress_markup tok = is_literal tok andalso suppress_literal_markup (str_of_token tok);
+
fun literal_markup s =
- if Symbol.is_ascii_identifier s orelse exists Symbol.is_letter (Symbol.explode s)
- then Markup.literal
- else Markup.delimiter;
+ let val syms = Symbol.explode s in
+ if Symbol.has_block_ctrl syms then []
+ else if Symbol.is_ascii_identifier s orelse exists Symbol.is_letter syms
+ then [Markup.literal]
+ else [Markup.delimiter]
+ end;
val token_kind_markup =
- fn Type_Ident => Markup.tfree
- | Type_Var => Markup.tvar
- | Num => Markup.numeral
- | Float => Markup.numeral
- | Str => Markup.inner_string
- | String => Markup.inner_string
- | Cartouche => Markup.inner_cartouche
- | Comment _ => Markup.comment1
- | _ => Markup.empty;
+ fn Type_Ident => [Markup.tfree]
+ | Type_Var => [Markup.tvar]
+ | Num => [Markup.numeral]
+ | Float => [Markup.numeral]
+ | Str => [Markup.inner_string]
+ | String => [Markup.inner_string]
+ | Cartouche => [Markup.inner_cartouche]
+ | Comment _ => [Markup.comment1]
+ | _ => [];
-fun report_of_token tok =
+fun reports_of_token tok =
let
- val markup =
+ val pos = pos_of_token tok;
+ val markups =
if is_literal tok then literal_markup (str_of_token tok)
else token_kind_markup (kind_of_token tok);
- in (pos_of_token tok, markup) end;
+ in map (pair pos) markups end;
fun reported_token_range ctxt tok =
if is_proper tok
--- a/src/Pure/Syntax/printer.ML Tue Jan 19 14:14:23 2021 +0100
+++ b/src/Pure/Syntax/printer.ML Tue Jan 19 20:23:13 2021 +0100
@@ -138,7 +138,7 @@
(if Lexicon.is_terminal s then 1000 else p);
fun xsyms_to_syms (Syntax_Ext.Delim s :: xsyms) =
- apfst (cons (String (not (exists Symbol.is_block_ctrl (Symbol.explode s)), s)))
+ apfst (cons (String (not (Lexicon.suppress_literal_markup s), s)))
(xsyms_to_syms xsyms)
| xsyms_to_syms (Syntax_Ext.Argument s_p :: xsyms) =
apfst (cons (arg s_p)) (xsyms_to_syms xsyms)
@@ -232,7 +232,7 @@
val (Ts, args') = synT m (symbs, args);
val T =
if do_mark
- then Pretty.marks_str (m @ [Lexicon.literal_markup s], s)
+ then Pretty.marks_str (m @ Lexicon.literal_markup s, s)
else Pretty.str s;
in (T :: Ts, args') end
| synT m (Block ({markup, consistent, unbreakable, indent}, bsymbs) :: symbs, args) =
--- a/src/Pure/Syntax/syntax_phases.ML Tue Jan 19 14:14:23 2021 +0100
+++ b/src/Pure/Syntax/syntax_phases.ML Tue Jan 19 20:23:13 2021 +0100
@@ -158,6 +158,10 @@
fun report pos = Position.store_reports reports [pos];
val append_reports = Position.append_reports reports;
+ fun report_pos tok =
+ if Lexicon.suppress_markup tok then Position.none
+ else Lexicon.pos_of_token tok;
+
fun trans a args =
(case trf a of
NONE => Ast.mk_appl (Ast.Constant a) args
@@ -169,7 +173,7 @@
else [];
fun ast_of_position tok =
- Ast.Variable (Term_Position.encode (Lexicon.pos_of_token tok));
+ Ast.Variable (Term_Position.encode (report_pos tok));
fun ast_of_dummy a tok =
Ast.Appl [Ast.Constant "_constrain", Ast.Constant a, ast_of_position tok];
@@ -179,13 +183,13 @@
and asts_of (Parser.Node ("_class_name", [Parser.Tip tok])) =
let
- val pos = Lexicon.pos_of_token tok;
+ val pos = report_pos tok;
val (c, rs) = Proof_Context.check_class ctxt (Lexicon.str_of_token tok, pos);
val _ = append_reports rs;
in [Ast.Constant (Lexicon.mark_class c)] end
| asts_of (Parser.Node ("_type_name", [Parser.Tip tok])) =
let
- val pos = Lexicon.pos_of_token tok;
+ val pos = report_pos tok;
val (Type (c, _), rs) =
Proof_Context.check_type_name {proper = true, strict = false} ctxt
(Lexicon.str_of_token tok, pos);
@@ -204,7 +208,7 @@
val _ = pts |> List.app
(fn Parser.Node _ => () | Parser.Tip tok =>
if Lexicon.valued_token tok then ()
- else report (Lexicon.pos_of_token tok) (markup_entity ctxt) a);
+ else report (report_pos tok) (markup_entity ctxt) a);
in [trans a (maps asts_of pts)] end
| asts_of (Parser.Tip tok) = asts_of_token tok
@@ -340,7 +344,7 @@
val ast_tr = Syntax.parse_ast_translation syn;
val toks = Syntax.tokenize syn raw syms;
- val _ = Context_Position.reports ctxt (map Lexicon.report_of_token toks);
+ val _ = Context_Position.reports ctxt (maps Lexicon.reports_of_token toks);
val pts = Syntax.parse syn root (filter Lexicon.is_proper toks)
handle ERROR msg =>