clarified language markup: added "delimited" property;
type Symbol_Pos.source preserves information about delimited outer tokens (e.g string, cartouche);
observe Completion.Language_Context only for delimited languages, which is important to complete keywords after undelimited inner tokens, e.g. "lemma A pro";
--- a/src/Doc/antiquote_setup.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Doc/antiquote_setup.ML Sat Mar 01 22:46:31 2014 +0100
@@ -86,7 +86,9 @@
then txt1 ^ ": " ^ txt2
else txt1 ^ " : " ^ txt2;
val txt' = if kind = "" then txt else kind ^ " " ^ txt;
- val _ = ML_Context.eval_text_in (SOME ctxt) false Position.none (ml (txt1, txt2)); (* ML_Lex.read (!?) *)
+ val _ = (* ML_Lex.read (!?) *)
+ ML_Context.eval_source_in (SOME ctxt) false
+ {delimited = false, text = ml (txt1, txt2), pos = Position.none};
val kind' = if kind = "" then "ML" else "ML " ^ kind;
in
"\\indexdef{}{" ^ kind' ^ "}{" ^ clean_string (ml_name txt1) ^ "}" ^
--- a/src/HOL/ex/Cartouche_Examples.thy Sat Mar 01 19:55:01 2014 +0100
+++ b/src/HOL/ex/Cartouche_Examples.thy Sat Mar 01 22:46:31 2014 +0100
@@ -114,13 +114,13 @@
setup -- "document antiquotation"
{*
Thy_Output.antiquotation @{binding ML_cartouche}
- (Scan.lift Args.cartouche_source_position) (fn {context, ...} => fn (txt, pos) =>
+ (Scan.lift Args.cartouche_source_position) (fn {context, ...} => fn source =>
let
val toks =
ML_Lex.read Position.none "fn _ => (" @
- ML_Lex.read pos txt @
+ ML_Lex.read_source source @
ML_Lex.read Position.none ");";
- val _ = ML_Context.eval_in (SOME context) false pos toks;
+ val _ = ML_Context.eval_in (SOME context) false (#pos source) toks;
in "" end);
*}
@@ -177,19 +177,19 @@
structure ML_Tactic:
sig
val set: (Proof.context -> tactic) -> Proof.context -> Proof.context
- val ml_tactic: string * Position.T -> Proof.context -> tactic
+ val ml_tactic: Symbol_Pos.source -> Proof.context -> tactic
end =
struct
structure Data = Proof_Data(type T = Proof.context -> tactic fun init _ = K no_tac);
val set = Data.put;
- fun ml_tactic (txt, pos) ctxt =
+ fun ml_tactic source ctxt =
let
val ctxt' = ctxt |> Context.proof_map
- (ML_Context.expression pos
+ (ML_Context.expression (#pos source)
"fun tactic (ctxt : Proof.context) : tactic"
- "Context.map_proof (ML_Tactic.set tactic)" (ML_Lex.read pos txt));
+ "Context.map_proof (ML_Tactic.set tactic)" (ML_Lex.read_source source));
in Data.get ctxt' ctxt end;
end;
*}
--- a/src/Pure/General/symbol_pos.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/General/symbol_pos.ML Sat Mar 01 22:46:31 2014 +0100
@@ -37,6 +37,7 @@
val source: Position.T -> (Symbol.symbol, 'a) Source.source ->
(T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source
type text = string
+ type source = {delimited: bool, text: text, pos: Position.T}
val implode: T list -> text
val implode_range: Position.T -> Position.T -> T list -> text * Position.range
val explode: text * Position.T -> T list
@@ -232,6 +233,7 @@
(* compact representation -- with Symbol.DEL padding *)
type text = string;
+type source = {delimited: bool, text: text, pos: Position.T}
fun pad [] = []
| pad [(s, _)] = [s]
--- a/src/Pure/Isar/args.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/args.ML Sat Mar 01 22:46:31 2014 +0100
@@ -30,9 +30,9 @@
val mode: string -> bool parser
val maybe: 'a parser -> 'a option parser
val cartouche_inner_syntax: string parser
- val cartouche_source_position: (Symbol_Pos.text * Position.T) parser
+ val cartouche_source_position: Symbol_Pos.source parser
val name_inner_syntax: string parser
- val name_source_position: (Symbol_Pos.text * Position.T) parser
+ val name_source_position: Symbol_Pos.source parser
val name: string parser
val binding: binding parser
val alt_name: string parser
--- a/src/Pure/Isar/attrib.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/attrib.ML Sat Mar 01 22:46:31 2014 +0100
@@ -35,8 +35,7 @@
Context.generic -> (string * thm list) list * Context.generic
val eval_thms: Proof.context -> (Facts.ref * src list) list -> thm list
val setup: Binding.binding -> attribute context_parser -> string -> theory -> theory
- val attribute_setup: bstring * Position.T -> Symbol_Pos.text * Position.T -> string ->
- theory -> theory
+ val attribute_setup: bstring * Position.T -> Symbol_Pos.source -> string -> theory -> theory
val internal: (morphism -> attribute) -> src
val add_del: attribute -> attribute -> attribute context_parser
val thm_sel: Facts.interval list parser
@@ -185,12 +184,12 @@
add_attribute name
(fn src => fn (ctxt, th) => let val (a, ctxt') = syntax scan src ctxt in a (ctxt', th) end);
-fun attribute_setup name (txt, pos) cmt =
- Context.theory_map (ML_Context.expression pos
+fun attribute_setup name source cmt =
+ Context.theory_map (ML_Context.expression (#pos source)
"val (name, scan, comment): binding * attribute context_parser * string"
"Context.map_theory (Attrib.setup name scan comment)"
(ML_Lex.read Position.none ("(" ^ ML_Syntax.make_binding name ^ ", ") @
- ML_Lex.read pos txt @
+ ML_Lex.read_source source @
ML_Lex.read Position.none (", " ^ ML_Syntax.print_string cmt ^ ")")));
--- a/src/Pure/Isar/isar_cmd.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/isar_cmd.ML Sat Mar 01 22:46:31 2014 +0100
@@ -6,20 +6,20 @@
signature ISAR_CMD =
sig
- val global_setup: Symbol_Pos.text * Position.T -> theory -> theory
- val local_setup: Symbol_Pos.text * Position.T -> Proof.context -> Proof.context
- val parse_ast_translation: Symbol_Pos.text * Position.T -> theory -> theory
- val parse_translation: Symbol_Pos.text * Position.T -> theory -> theory
- val print_translation: Symbol_Pos.text * Position.T -> theory -> theory
- val typed_print_translation: Symbol_Pos.text * Position.T -> theory -> theory
- val print_ast_translation: Symbol_Pos.text * Position.T -> theory -> theory
+ val global_setup: Symbol_Pos.source -> theory -> theory
+ val local_setup: Symbol_Pos.source -> Proof.context -> Proof.context
+ val parse_ast_translation: Symbol_Pos.source -> theory -> theory
+ val parse_translation: Symbol_Pos.source -> theory -> theory
+ val print_translation: Symbol_Pos.source -> theory -> theory
+ val typed_print_translation: Symbol_Pos.source -> theory -> theory
+ val print_ast_translation: Symbol_Pos.source -> theory -> theory
val translations: (xstring * string) Syntax.trrule list -> theory -> theory
val no_translations: (xstring * string) Syntax.trrule list -> theory -> theory
- val oracle: bstring * Position.T -> Symbol_Pos.text * Position.T -> theory -> theory
+ val oracle: bstring * Position.T -> Symbol_Pos.source -> theory -> theory
val add_defs: (bool * bool) * ((binding * string) * Attrib.src list) list -> theory -> theory
val declaration: {syntax: bool, pervasive: bool} ->
- Symbol_Pos.text * Position.T -> local_theory -> local_theory
- val simproc_setup: string * Position.T -> string list -> Symbol_Pos.text * Position.T ->
+ Symbol_Pos.source -> local_theory -> local_theory
+ val simproc_setup: string * Position.T -> string list -> Symbol_Pos.source ->
string list -> local_theory -> local_theory
val hide_class: bool -> xstring list -> theory -> theory
val hide_type: bool -> xstring list -> theory -> theory
@@ -36,7 +36,7 @@
val immediate_proof: Toplevel.transition -> Toplevel.transition
val done_proof: Toplevel.transition -> Toplevel.transition
val skip_proof: Toplevel.transition -> Toplevel.transition
- val ml_diag: bool -> Symbol_Pos.text * Position.T -> Toplevel.transition -> Toplevel.transition
+ val ml_diag: bool -> Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
val diag_state: Proof.context -> Toplevel.state
val diag_goal: Proof.context -> {context: Proof.context, facts: thm list, goal: thm}
val print_theorems: bool -> Toplevel.transition -> Toplevel.transition
@@ -56,10 +56,10 @@
val print_term: (string list * string) -> Toplevel.transition -> Toplevel.transition
val print_type: (string list * (string * string option)) ->
Toplevel.transition -> Toplevel.transition
- val header_markup: Symbol_Pos.text * Position.T -> Toplevel.transition -> Toplevel.transition
- val local_theory_markup: (xstring * Position.T) option * (Symbol_Pos.text * Position.T) ->
+ val header_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
+ val local_theory_markup: (xstring * Position.T) option * (Symbol_Pos.source) ->
Toplevel.transition -> Toplevel.transition
- val proof_markup: Symbol_Pos.text * Position.T -> Toplevel.transition -> Toplevel.transition
+ val proof_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
end;
structure Isar_Cmd: ISAR_CMD =
@@ -70,50 +70,50 @@
(* generic setup *)
-fun global_setup (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos "val setup: theory -> theory" "Context.map_theory setup"
+fun global_setup source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source) "val setup: theory -> theory" "Context.map_theory setup"
|> Context.theory_map;
-fun local_setup (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos "val setup: local_theory -> local_theory" "Context.map_proof setup"
+fun local_setup source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source) "val setup: local_theory -> local_theory" "Context.map_proof setup"
|> Context.proof_map;
(* translation functions *)
-fun parse_ast_translation (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun parse_ast_translation source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val parse_ast_translation: (string * (Proof.context -> Ast.ast list -> Ast.ast)) list"
"Context.map_theory (Sign.parse_ast_translation parse_ast_translation)"
|> Context.theory_map;
-fun parse_translation (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun parse_translation source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val parse_translation: (string * (Proof.context -> term list -> term)) list"
"Context.map_theory (Sign.parse_translation parse_translation)"
|> Context.theory_map;
-fun print_translation (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun print_translation source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val print_translation: (string * (Proof.context -> term list -> term)) list"
"Context.map_theory (Sign.print_translation print_translation)"
|> Context.theory_map;
-fun typed_print_translation (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun typed_print_translation source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val typed_print_translation: (string * (Proof.context -> typ -> term list -> term)) list"
"Context.map_theory (Sign.typed_print_translation typed_print_translation)"
|> Context.theory_map;
-fun print_ast_translation (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun print_ast_translation source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val print_ast_translation: (string * (Proof.context -> Ast.ast list -> Ast.ast)) list"
"Context.map_theory (Sign.print_ast_translation print_ast_translation)"
|> Context.theory_map;
@@ -135,18 +135,19 @@
(* oracles *)
-fun oracle (name, pos) (body_txt, body_pos) =
+fun oracle (name, pos) source =
let
- val body = ML_Lex.read body_pos body_txt;
+ val body = ML_Lex.read_source source;
val ants =
ML_Lex.read Position.none
("local\n\
\ val binding = " ^ ML_Syntax.make_binding (name, pos) ^ ";\n\
\ val body = ") @ body @ ML_Lex.read Position.none (";\n\
\in\n\
- \ val " ^ name ^ " = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (binding, body))));\n\
+ \ val " ^ name ^
+ " = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (binding, body))));\n\
\end;\n");
- in Context.theory_map (ML_Context.exec (fn () => ML_Context.eval false body_pos ants)) end;
+ in Context.theory_map (ML_Context.exec (fn () => ML_Context.eval false (#pos source) ants)) end;
(* old-style defs *)
@@ -161,9 +162,9 @@
(* declarations *)
-fun declaration {syntax, pervasive} (txt, pos) =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun declaration {syntax, pervasive} source =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val declaration: Morphism.declaration"
("Context.map_proof (Local_Theory.declaration {syntax = " ^ Bool.toString syntax ^ ", \
\pervasive = " ^ Bool.toString pervasive ^ "} declaration)")
@@ -172,9 +173,9 @@
(* simprocs *)
-fun simproc_setup name lhss (txt, pos) identifier =
- ML_Lex.read pos txt
- |> ML_Context.expression pos
+fun simproc_setup name lhss source identifier =
+ ML_Lex.read_source source
+ |> ML_Context.expression (#pos source)
"val proc: Morphism.morphism -> Proof.context -> cterm -> thm option"
("Context.map_proof (Simplifier.def_simproc_cmd {name = " ^ ML_Syntax.make_binding name ^ ", \
\lhss = " ^ ML_Syntax.print_strings lhss ^ ", proc = proc, \
@@ -253,11 +254,11 @@
fun init _ = Toplevel.toplevel;
);
-fun ml_diag verbose (txt, pos) = Toplevel.keep (fn state =>
+fun ml_diag verbose source = Toplevel.keep (fn state =>
let val opt_ctxt =
try Toplevel.generic_theory_of state
|> Option.map (Context.proof_of #> Diag_State.put state)
- in ML_Context.eval_text_in opt_ctxt verbose pos txt end);
+ in ML_Context.eval_source_in opt_ctxt verbose source end);
val diag_state = Diag_State.get;
--- a/src/Pure/Isar/isar_syn.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/isar_syn.ML Sat Mar 01 22:46:31 2014 +0100
@@ -248,16 +248,17 @@
val _ =
Outer_Syntax.command @{command_spec "ML"} "ML text within theory or local theory"
- (Parse.ML_source >> (fn (txt, pos) =>
+ (Parse.ML_source >> (fn source =>
Toplevel.generic_theory
- (ML_Context.exec (fn () => ML_Context.eval_text true pos txt) #>
+ (ML_Context.exec (fn () => ML_Context.eval_source true source) #>
Local_Theory.propagate_ml_env)));
val _ =
Outer_Syntax.command @{command_spec "ML_prf"} "ML text within proof"
- (Parse.ML_source >> (fn (txt, pos) =>
+ (Parse.ML_source >> (fn source =>
Toplevel.proof (Proof.map_context (Context.proof_map
- (ML_Context.exec (fn () => ML_Context.eval_text true pos txt))) #> Proof.propagate_ml_env)));
+ (ML_Context.exec (fn () => ML_Context.eval_source true source))) #>
+ Proof.propagate_ml_env)));
val _ =
Outer_Syntax.command @{command_spec "ML_val"} "diagnostic ML text"
--- a/src/Pure/Isar/method.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/method.ML Sat Mar 01 22:46:31 2014 +0100
@@ -42,8 +42,8 @@
val drule: Proof.context -> int -> thm list -> method
val frule: Proof.context -> int -> thm list -> method
val set_tactic: (thm list -> tactic) -> Proof.context -> Proof.context
- val tactic: string * Position.T -> Proof.context -> method
- val raw_tactic: string * Position.T -> Proof.context -> method
+ val tactic: Symbol_Pos.source -> Proof.context -> method
+ val raw_tactic: Symbol_Pos.source -> Proof.context -> method
type src = Args.src
type combinator_info
val no_combinator_info: combinator_info
@@ -68,8 +68,7 @@
val check_source: Proof.context -> src -> src
val syntax: 'a context_parser -> src -> Proof.context -> 'a * Proof.context
val setup: binding -> (Proof.context -> method) context_parser -> string -> theory -> theory
- val method_setup: bstring * Position.T -> Symbol_Pos.text * Position.T -> string ->
- theory -> theory
+ val method_setup: bstring * Position.T -> Symbol_Pos.source -> string -> theory -> theory
type modifier = (Proof.context -> Proof.context) * attribute
val section: modifier parser list -> thm list context_parser
val sections: modifier parser list -> thm list list context_parser
@@ -266,16 +265,16 @@
val set_tactic = ML_Tactic.put;
-fun ml_tactic (txt, pos) ctxt =
+fun ml_tactic source ctxt =
let
val ctxt' = ctxt |> Context.proof_map
- (ML_Context.expression pos
+ (ML_Context.expression (#pos source)
"fun tactic (facts: thm list) : tactic"
- "Context.map_proof (Method.set_tactic tactic)" (ML_Lex.read pos txt));
+ "Context.map_proof (Method.set_tactic tactic)" (ML_Lex.read_source source));
in Context.setmp_thread_data (SOME (Context.Proof ctxt)) (ML_Tactic.get ctxt') end;
-fun tactic txt ctxt = METHOD (ml_tactic txt ctxt);
-fun raw_tactic txt ctxt = RAW_METHOD (ml_tactic txt ctxt);
+fun tactic source ctxt = METHOD (ml_tactic source ctxt);
+fun raw_tactic source ctxt = RAW_METHOD (ml_tactic source ctxt);
@@ -366,12 +365,12 @@
add_method name
(fn src => fn ctxt => let val (m, ctxt') = syntax scan src ctxt in m ctxt' end);
-fun method_setup name (txt, pos) cmt =
- Context.theory_map (ML_Context.expression pos
+fun method_setup name source cmt =
+ Context.theory_map (ML_Context.expression (#pos source)
"val (name, scan, comment): binding * (Proof.context -> Proof.method) context_parser * string"
"Context.map_theory (Method.setup name scan comment)"
(ML_Lex.read Position.none ("(" ^ ML_Syntax.make_binding name ^ ", ") @
- ML_Lex.read pos txt @
+ ML_Lex.read_source source @
ML_Lex.read Position.none (", " ^ ML_Syntax.print_string cmt ^ ")")));
--- a/src/Pure/Isar/parse.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/parse.ML Sat Mar 01 22:46:31 2014 +0100
@@ -16,7 +16,7 @@
val triple_swap: ('a * 'b) * 'c -> ('a * 'c) * 'b
val not_eof: Token.T parser
val position: 'a parser -> ('a * Position.T) parser
- val source_position: 'a parser -> (Symbol_Pos.text * Position.T) parser
+ val source_position: 'a parser -> Symbol_Pos.source parser
val inner_syntax: 'a parser -> string parser
val command: string parser
val keyword: string parser
@@ -93,8 +93,8 @@
val simple_fixes: (binding * string option) list parser
val fixes: (binding * string option * mixfix) list parser
val for_fixes: (binding * string option * mixfix) list parser
- val ML_source: (Symbol_Pos.text * Position.T) parser
- val document_source: (Symbol_Pos.text * Position.T) parser
+ val ML_source: Symbol_Pos.source parser
+ val document_source: Symbol_Pos.source parser
val term_group: string parser
val prop_group: string parser
val term: string parser
--- a/src/Pure/Isar/proof_context.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/proof_context.ML Sat Mar 01 22:46:31 2014 +0100
@@ -377,7 +377,8 @@
fun read_class ctxt text =
let
val tsig = tsig_of ctxt;
- val (syms, pos) = Syntax.read_token text;
+ val {text, pos, ...} = Syntax.read_token_source text;
+ val syms = Symbol_Pos.explode (text, pos);
val c = Type.cert_class tsig (Type.intern_class tsig (Symbol_Pos.content syms))
handle TYPE (msg, _, _) => error (msg ^ Position.here pos);
val _ = Context_Position.report ctxt pos (Name_Space.markup (Type.class_space tsig) c);
@@ -439,8 +440,6 @@
local
-val token_content = Syntax.read_token #>> Symbol_Pos.content;
-
fun prep_const_proper ctxt strict (c, pos) =
let
fun err msg = error (msg ^ Position.here pos);
@@ -461,7 +460,7 @@
fun read_type_name ctxt strict text =
let
val tsig = tsig_of ctxt;
- val (c, pos) = token_content text;
+ val (c, pos) = Syntax.read_token_content text;
in
if Lexicon.is_tid c then
(Context_Position.report ctxt pos Markup.tfree;
@@ -486,11 +485,12 @@
| T => error ("Not a type constructor: " ^ Syntax.string_of_typ ctxt T));
-fun read_const_proper ctxt strict = prep_const_proper ctxt strict o token_content;
+fun read_const_proper ctxt strict =
+ prep_const_proper ctxt strict o Syntax.read_token_content;
fun read_const ctxt strict ty text =
let
- val (c, pos) = token_content text;
+ val (c, pos) = Syntax.read_token_content text;
val _ = no_skolem false c;
in
(case (Variable.lookup_fixed ctxt c, Variable.is_const ctxt c) of
--- a/src/Pure/Isar/token.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Isar/token.ML Sat Mar 01 22:46:31 2014 +0100
@@ -40,7 +40,7 @@
val is_blank: T -> bool
val is_newline: T -> bool
val inner_syntax_of: T -> string
- val source_position_of: T -> Symbol_Pos.text * Position.T
+ val source_position_of: T -> Symbol_Pos.source
val content_of: T -> string
val markup: T -> Markup.T * string
val unparse: T -> string
@@ -127,6 +127,8 @@
| Sync => "sync marker"
| EOF => "end-of-input";
+val delimited_kind = member (op =) [String, AltString, Verbatim, Cartouche, Comment];
+
(* position *)
@@ -206,11 +208,16 @@
(* token content *)
-fun inner_syntax_of (Token ((source, (pos, _)), (_, x), _)) =
+fun inner_syntax_of (Token ((source, (pos, _)), (kind, x), _)) =
if YXML.detect x then x
- else YXML.string_of (XML.Elem (Markup.token (Position.properties_of pos), [XML.Text source]));
+ else
+ let
+ val delimited = delimited_kind kind;
+ val tree = XML.Elem (Markup.token delimited (Position.properties_of pos), [XML.Text source]);
+ in YXML.string_of tree end;
-fun source_position_of (Token ((source, (pos, _)), _, _)) = (source, pos);
+fun source_position_of (Token ((source, (pos, _)), (kind, _), _)) =
+ {delimited = delimited_kind kind, text = source, pos = pos};
fun content_of (Token (_, (_, x), _)) = x;
--- a/src/Pure/ML/ml_context.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/ML/ml_context.ML Sat Mar 01 22:46:31 2014 +0100
@@ -31,10 +31,10 @@
val eval_antiquotes: ML_Lex.token Antiquote.antiquote list * Position.T ->
Context.generic option -> (ML_Lex.token list * ML_Lex.token list) * Context.generic option
val eval: bool -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit
- val eval_text: bool -> Position.T -> Symbol_Pos.text -> unit
+ val eval_source: bool -> Symbol_Pos.source -> unit
val eval_in: Proof.context option -> bool -> Position.T ->
ML_Lex.token Antiquote.antiquote list -> unit
- val eval_text_in: Proof.context option -> bool -> Position.T -> Symbol_Pos.text -> unit
+ val eval_source_in: Proof.context option -> bool -> Symbol_Pos.source -> unit
val expression: Position.T -> string -> string -> ML_Lex.token Antiquote.antiquote list ->
Context.generic -> Context.generic
end
@@ -208,13 +208,14 @@
(* derived versions *)
-fun eval_text verbose pos txt = eval verbose pos (ML_Lex.read pos txt);
+fun eval_source verbose source =
+ eval verbose (#pos source) (ML_Lex.read_source source);
fun eval_in ctxt verbose pos ants =
Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval verbose pos ants) ();
-fun eval_text_in ctxt verbose pos txt =
- Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval_text verbose pos txt) ();
+fun eval_source_in ctxt verbose source =
+ Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval_source verbose source) ();
fun expression pos bind body ants =
exec (fn () => eval false pos
--- a/src/Pure/ML/ml_lex.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/ML/ml_lex.ML Sat Mar 01 22:46:31 2014 +0100
@@ -26,6 +26,7 @@
Source.source) Source.source
val tokenize: string -> token list
val read: Position.T -> Symbol_Pos.text -> token Antiquote.antiquote list
+ val read_source: Symbol_Pos.source -> token Antiquote.antiquote list
end;
structure ML_Lex: ML_LEX =
@@ -298,10 +299,9 @@
val tokenize = Source.of_string #> Symbol.source #> source #> Source.exhaust;
-fun read pos txt =
+fun read pos text =
let
- val _ = Position.report pos Markup.language_ML;
- val syms = Symbol_Pos.explode (txt, pos);
+ val syms = Symbol_Pos.explode (text, pos);
val termination =
if null syms then []
else
@@ -318,6 +318,9 @@
|> tap (List.app (fn Antiquote.Text tok => (check_content_of tok; warn tok) | _ => ())));
in input @ termination end;
+fun read_source {delimited, text, pos} =
+ (Position.report pos (Markup.language_ML delimited); read pos text);
+
end;
end;
--- a/src/Pure/PIDE/markup.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/PIDE/markup.ML Sat Mar 01 22:46:31 2014 +0100
@@ -20,18 +20,21 @@
val name: string -> T -> T
val kindN: string
val instanceN: string
+ val languageN: string
val symbolsN: string
- val languageN: string
- val language: {name: string, symbols: bool, antiquotes: bool} -> T
+ val delimitedN: string
+ val is_delimited: Properties.T -> bool
+ val language: {name: string, symbols: bool, antiquotes: bool, delimited: bool} -> T
+ val language': {name: string, symbols: bool, antiquotes: bool} -> bool -> T
val language_method: T
- val language_sort: T
- val language_type: T
- val language_term: T
- val language_prop: T
- val language_ML: T
- val language_document: T
+ val language_sort: bool -> T
+ val language_type: bool -> T
+ val language_term: bool -> T
+ val language_prop: bool -> T
+ val language_ML: bool -> T
+ val language_document: bool -> T
val language_antiquotation: T
- val language_text: T
+ val language_text: bool -> T
val language_rail: T
val bindingN: string val binding: T
val entityN: string val entity: string -> string -> T
@@ -106,7 +109,7 @@
val cartoucheN: string val cartouche: T
val commentN: string val comment: T
val controlN: string val control: T
- val tokenN: string val token: Properties.T -> T
+ val tokenN: string val token: bool -> Properties.T -> T
val keyword1N: string val keyword1: T
val keyword2N: string val keyword2: T
val keyword3N: string val keyword3: T
@@ -251,24 +254,36 @@
(* embedded languages *)
+val languageN = "language";
val symbolsN = "symbols";
val antiquotesN = "antiquotes";
-val languageN = "language";
+val delimitedN = "delimited"
+
+fun is_delimited props =
+ Properties.get props delimitedN = SOME "true";
-fun language {name, symbols, antiquotes} =
+fun language {name, symbols, antiquotes, delimited} =
(languageN,
- [(nameN, name), (symbolsN, print_bool symbols), (antiquotesN, print_bool antiquotes)]);
+ [(nameN, name),
+ (symbolsN, print_bool symbols),
+ (antiquotesN, print_bool antiquotes),
+ (delimitedN, print_bool delimited)]);
-val language_method = language {name = "method", symbols = true, antiquotes = false};
-val language_sort = language {name = "sort", symbols = true, antiquotes = false};
-val language_type = language {name = "type", symbols = true, antiquotes = false};
-val language_term = language {name = "term", symbols = true, antiquotes = false};
-val language_prop = language {name = "prop", symbols = true, antiquotes = false};
-val language_ML = language {name = "ML", symbols = false, antiquotes = true};
-val language_document = language {name = "document", symbols = false, antiquotes = true};
-val language_antiquotation = language {name = "antiquotation", symbols = true, antiquotes = false};
-val language_text = language {name = "text", symbols = true, antiquotes = false};
-val language_rail = language {name = "rail", symbols = true, antiquotes = true};
+fun language' {name, symbols, antiquotes} delimited =
+ language {name = name, symbols = symbols, antiquotes = antiquotes, delimited = delimited};
+
+val language_method =
+ language {name = "method", symbols = true, antiquotes = false, delimited = false};
+val language_sort = language' {name = "sort", symbols = true, antiquotes = false};
+val language_type = language' {name = "type", symbols = true, antiquotes = false};
+val language_term = language' {name = "term", symbols = true, antiquotes = false};
+val language_prop = language' {name = "prop", symbols = true, antiquotes = false};
+val language_ML = language' {name = "ML", symbols = false, antiquotes = true};
+val language_document = language' {name = "document", symbols = false, antiquotes = true};
+val language_antiquotation =
+ language {name = "antiquotation", symbols = true, antiquotes = false, delimited = true};
+val language_text = language' {name = "text", symbols = true, antiquotes = false};
+val language_rail = language {name = "rail", symbols = true, antiquotes = true, delimited = true};
(* formal entities *)
@@ -411,7 +426,7 @@
val (controlN, control) = markup_elem "control";
val tokenN = "token";
-fun token props = (tokenN, props);
+fun token delimited props = (tokenN, (delimitedN, print_bool delimited) :: props);
(* timing *)
--- a/src/Pure/PIDE/markup.scala Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/PIDE/markup.scala Sat Mar 01 22:46:31 2014 +0100
@@ -94,6 +94,7 @@
val Symbols = new Properties.Boolean("symbols")
val Antiquotes = new Properties.Boolean("antiquotes")
+ val Delimited = new Properties.Boolean("delimited")
val LANGUAGE = "language"
object Language
@@ -101,12 +102,12 @@
val ML = "ML"
val UNKNOWN = "unknown"
- def unapply(markup: Markup): Option[(String, Boolean, Boolean)] =
+ def unapply(markup: Markup): Option[(String, Boolean, Boolean, Boolean)] =
markup match {
case Markup(LANGUAGE, props) =>
- (props, props, props) match {
- case (Name(name), Symbols(symbols), Antiquotes(antiquotes)) =>
- Some((name, symbols, antiquotes))
+ (props, props, props, props) match {
+ case (Name(name), Symbols(symbols), Antiquotes(antiquotes), Delimited(delimited)) =>
+ Some((name, symbols, antiquotes, delimited))
case _ => None
}
case _ => None
--- a/src/Pure/Syntax/syntax.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Syntax/syntax.ML Sat Mar 01 22:46:31 2014 +0100
@@ -14,10 +14,11 @@
val ambiguity_warning: bool Config.T
val ambiguity_limit_raw: Config.raw
val ambiguity_limit: int Config.T
- val read_token: string -> Symbol_Pos.T list * Position.T
val read_token_pos: string -> Position.T
+ val read_token_source: string -> Symbol_Pos.source
+ val read_token_content: string -> string * Position.T
val parse_token: Proof.context -> (XML.tree list -> 'a) ->
- Markup.T -> (Symbol_Pos.T list * Position.T -> 'a) -> string -> 'a
+ (bool -> Markup.T) -> (Symbol_Pos.T list * Position.T -> 'a) -> string -> 'a
val parse_sort: Proof.context -> string -> sort
val parse_typ: Proof.context -> string -> typ
val parse_term: Proof.context -> string -> term
@@ -159,26 +160,39 @@
(* outer syntax token -- with optional YXML content *)
+local
+
fun token_position (XML.Elem ((name, props), _)) =
- if name = Markup.tokenN then Position.of_properties props
- else Position.none
- | token_position (XML.Text _) = Position.none;
+ if name = Markup.tokenN
+ then (Markup.is_delimited props, Position.of_properties props)
+ else (false, Position.none)
+ | token_position (XML.Text _) = (false, Position.none);
-fun explode_token tree =
+fun token_source tree =
let
val text = XML.content_of [tree];
- val pos = token_position tree;
- in (Symbol_Pos.explode (text, pos), pos) end;
+ val (delimited, pos) = token_position tree;
+ in {delimited = delimited, text = text, pos = pos} end;
+
+in
+
+fun read_token_pos str = #2 (token_position (YXML.parse str handle Fail msg => error msg));
-fun read_token str = explode_token (YXML.parse str handle Fail msg => error msg);
-fun read_token_pos str = token_position (YXML.parse str handle Fail msg => error msg);
+fun read_token_source str = token_source (YXML.parse str handle Fail msg => error msg);
+
+fun read_token_content str =
+ let
+ val {text, pos, ...} = read_token_source str;
+ val syms = Symbol_Pos.explode (text, pos);
+ in (Symbol_Pos.content syms, pos) end;
fun parse_token ctxt decode markup parse str =
let
fun parse_tree tree =
let
- val (syms, pos) = explode_token tree;
- val _ = Context_Position.report ctxt pos markup;
+ val {delimited, text, pos} = token_source tree;
+ val syms = Symbol_Pos.explode (text, pos);
+ val _ = Context_Position.report ctxt pos (markup delimited);
in parse (syms, pos) end;
in
(case YXML.parse_body str handle Fail msg => error msg of
@@ -188,6 +202,8 @@
| body => decode body)
end;
+end;
+
(* (un)parsing *)
--- a/src/Pure/Syntax/syntax_phases.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Syntax/syntax_phases.ML Sat Mar 01 22:46:31 2014 +0100
@@ -440,7 +440,8 @@
else decode_appl ps asts
| decode ps (Ast.Appl asts) = decode_appl ps asts;
- val (syms, pos) = Syntax.read_token str;
+ val {text, pos, ...} = Syntax.read_token_source str;
+ val syms = Symbol_Pos.explode (text, pos);
val ast =
parse_asts ctxt true root (syms, pos)
|> uncurry (report_result ctxt pos)
@@ -749,8 +750,8 @@
in
-val unparse_sort = unparse_t sort_to_ast Printer.pretty_typ_ast Markup.language_sort;
-val unparse_typ = unparse_t typ_to_ast Printer.pretty_typ_ast Markup.language_type;
+val unparse_sort = unparse_t sort_to_ast Printer.pretty_typ_ast (Markup.language_sort false);
+val unparse_typ = unparse_t typ_to_ast Printer.pretty_typ_ast (Markup.language_type false);
fun unparse_term ctxt =
let
@@ -760,7 +761,7 @@
in
unparse_t (term_to_ast idents (is_some o Syntax.lookup_const syn))
(Printer.pretty_term_ast (not (Pure_Thy.old_appl_syntax thy)))
- Markup.language_term ctxt
+ (Markup.language_term false) ctxt
end;
end;
--- a/src/Pure/Thy/latex.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Thy/latex.ML Sat Mar 01 22:46:31 2014 +0100
@@ -121,8 +121,8 @@
enclose "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" (output_syms s)
else if Token.is_kind Token.Verbatim tok then
let
- val (txt, pos) = Token.source_position_of tok;
- val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos);
+ val {text, pos, ...} = Token.source_position_of tok;
+ val ants = Antiquote.read (Symbol_Pos.explode (text, pos), pos);
val out = implode (map output_syms_antiq ants);
in enclose "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" out end
else if Token.is_kind Token.Cartouche tok then
--- a/src/Pure/Thy/thy_output.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Thy/thy_output.ML Sat Mar 01 22:46:31 2014 +0100
@@ -25,7 +25,7 @@
datatype markup = Markup | MarkupEnv | Verbatim
val eval_antiq: Scan.lexicon -> Toplevel.state -> Antiquote.antiq -> string
val eval_antiquote: Scan.lexicon -> Toplevel.state -> Symbol_Pos.text * Position.T -> string
- val check_text: Symbol_Pos.text * Position.T -> Toplevel.state -> unit
+ val check_text: Symbol_Pos.source -> Toplevel.state -> unit
val present_thy: Scan.lexicon -> (string -> string list) -> (markup -> string -> bool) ->
(Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
val pretty_text: Proof.context -> string -> Pretty.T
@@ -184,10 +184,10 @@
end;
-fun check_text (txt, pos) state =
- (Position.report pos Markup.language_document;
+fun check_text {delimited, text, pos} state =
+ (Position.report pos (Markup.language_document delimited);
if Toplevel.is_skipped_proof state then ()
- else ignore (eval_antiquote (#1 (Keyword.get_lexicons ())) state (txt, pos)));
+ else ignore (eval_antiquote (#1 (Keyword.get_lexicons ())) state (text, pos)));
@@ -360,9 +360,9 @@
Parse.position (Scan.one (Token.is_command andf is_markup mark o Token.content_of)) --
Scan.repeat tag --
Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end)
- >> (fn (((tok, pos), tags), txt) =>
+ >> (fn (((tok, pos'), tags), {text, pos, ...}) =>
let val name = Token.content_of tok
- in (SOME (name, pos, tags), (mk (name, txt), (flag, d))) end));
+ in (SOME (name, pos', tags), (mk (name, (text, pos)), (flag, d))) end));
val command = Scan.peek (fn d =>
Parse.position (Scan.one (Token.is_command)) --
@@ -373,7 +373,7 @@
val cmt = Scan.peek (fn d =>
Parse.$$$ "--" |-- Parse.!!!! (improper |-- Parse.document_source)
- >> (fn txt => (NONE, (MarkupToken ("cmt", txt), ("", d)))));
+ >> (fn {text, pos, ...} => (NONE, (MarkupToken ("cmt", (text, pos)), ("", d)))));
val other = Scan.peek (fn d =>
Parse.not_eof >> (fn tok => (NONE, (BasicToken tok, ("", d)))));
@@ -467,8 +467,11 @@
fun pretty_text ctxt =
Pretty.chunks o map Pretty.str o map (tweak_line ctxt) o split_lines;
-fun pretty_text_report ctxt (s, pos) =
- (Context_Position.report ctxt pos Markup.language_text; pretty_text ctxt s);
+fun pretty_text_report ctxt {delimited, text, pos} =
+ let
+ val _ = Context_Position.report ctxt pos (Markup.language_text delimited);
+ val s = Symbol_Pos.content (Symbol_Pos.explode (text, pos));
+ in pretty_text ctxt s end;
fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
@@ -574,7 +577,7 @@
basic_entity (Binding.name "typ") Args.typ_abbrev Syntax.pretty_typ #>
basic_entity (Binding.name "class") (Scan.lift Args.name_inner_syntax) pretty_class #>
basic_entity (Binding.name "type") (Scan.lift Args.name) pretty_type #>
- basic_entity (Binding.name "text") (Scan.lift (Parse.position Args.name)) pretty_text_report #>
+ basic_entity (Binding.name "text") (Scan.lift Args.name_source_position) pretty_text_report #>
basic_entities (Binding.name "prf") Attrib.thms (pretty_prf false) #>
basic_entities (Binding.name "full_prf") Attrib.thms (pretty_prf true) #>
basic_entity (Binding.name "theory") (Scan.lift (Parse.position Args.name)) pretty_theory);
@@ -638,15 +641,17 @@
local
fun ml_text name ml = antiquotation name (Scan.lift Args.name_source_position)
- (fn {context, ...} => fn (txt, pos) =>
- (ML_Context.eval_in (SOME context) false pos (ml pos txt);
- Symbol_Pos.content (Symbol_Pos.explode (txt, pos))
+ (fn {context, ...} => fn source as {text, pos, ...} =>
+ (ML_Context.eval_in (SOME context) false pos (ml source);
+ Symbol_Pos.content (Symbol_Pos.explode (text, pos))
|> (if Config.get context quotes then quote else I)
|> (if Config.get context display then enclose "\\begin{verbatim}\n" "\n\\end{verbatim}"
else verb_text)));
-fun ml_enclose bg en pos txt =
- ML_Lex.read Position.none bg @ ML_Lex.read pos txt @ ML_Lex.read Position.none en;
+fun ml_enclose bg en source =
+ ML_Lex.read Position.none bg @
+ ML_Lex.read_source source @
+ ML_Lex.read Position.none en;
in
@@ -658,11 +663,11 @@
(ml_enclose "functor XXX() = struct structure XX = " " end;") #>
ml_text (Binding.name "ML_functor") (* FIXME formal treatment of functor name (!?) *)
- (fn pos => fn txt =>
+ (fn {text, pos, ...} =>
ML_Lex.read Position.none ("ML_Env.check_functor " ^
- ML_Syntax.print_string (Symbol_Pos.content (Symbol_Pos.explode (txt, pos))))) #>
+ ML_Syntax.print_string (Symbol_Pos.content (Symbol_Pos.explode (text, pos))))) #>
- ml_text (Binding.name "ML_text") (K (K [])));
+ ml_text (Binding.name "ML_text") (K []));
end;
--- a/src/Pure/Thy/thy_syntax.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Thy/thy_syntax.ML Sat Mar 01 22:46:31 2014 +0100
@@ -44,14 +44,15 @@
fun reports_of_token tok =
let
+ val {text, pos, ...} = Token.source_position_of tok;
val malformed_symbols =
- Symbol_Pos.explode (Token.source_position_of tok)
+ Symbol_Pos.explode (text, pos)
|> map_filter (fn (sym, pos) =>
if Symbol.is_malformed sym
then SOME ((pos, Markup.bad), "Malformed symbolic character") else NONE);
val is_malformed = Token.is_error tok orelse not (null malformed_symbols);
- val (markup, txt) = Token.markup tok;
- val reports = ((Token.pos_of tok, markup), txt) :: malformed_symbols;
+ val (markup, msg) = Token.markup tok;
+ val reports = ((Token.pos_of tok, markup), msg) :: malformed_symbols;
in (is_malformed, reports) end;
in
--- a/src/Pure/Tools/rail.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/Tools/rail.ML Sat Mar 01 22:46:31 2014 +0100
@@ -192,10 +192,11 @@
in
-fun read ctxt (s, pos) =
+fun read ctxt (source: Symbol_Pos.source) =
let
+ val {text, pos, ...} = source;
val _ = Context_Position.report ctxt pos Markup.language_rail;
- val toks = tokenize (Symbol_Pos.explode (s, pos));
+ val toks = tokenize (Symbol_Pos.explode (text, pos));
val _ = Context_Position.reports ctxt (maps reports_of_token toks);
in #1 (Scan.error (Scan.finite stopper (rules --| !!! (Scan.ahead (Scan.one is_eof)))) toks) end;
--- a/src/Pure/pure_syn.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Pure/pure_syn.ML Sat Mar 01 22:46:31 2014 +0100
@@ -24,9 +24,10 @@
let
val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy);
val provide = Thy_Load.provide (src_path, digest);
+ val source = {delimited = true, text = cat_lines lines, pos = pos};
in
gthy
- |> ML_Context.exec (fn () => ML_Context.eval_text true pos (cat_lines lines))
+ |> ML_Context.exec (fn () => ML_Context.eval_source true source)
|> Local_Theory.propagate_ml_env
|> Context.mapping provide (Local_Theory.background_theory provide)
end)));
--- a/src/Tools/Code/code_thingol.ML Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Tools/Code/code_thingol.ML Sat Mar 01 22:46:31 2014 +0100
@@ -882,7 +882,7 @@
(fn thy'' => not (Sign.declared_const thy'' c)) (Theory.parents_of thy');
fun consts_of_select thy' = filter (belongs_here thy') (consts_of thy');
fun read_const_expr str =
- (case Syntax.parse_token ctxt (K NONE) Markup.empty (SOME o Symbol_Pos.implode o #1) str of
+ (case Syntax.parse_token ctxt (K NONE) (K Markup.empty) (SOME o Symbol_Pos.implode o #1) str of
SOME "_" => ([], consts_of thy)
| SOME s =>
if String.isSuffix "._" s
--- a/src/Tools/jEdit/src/completion_popup.scala Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Tools/jEdit/src/completion_popup.scala Sat Mar 01 22:46:31 2014 +0100
@@ -150,7 +150,7 @@
val context =
(opt_rendering orElse PIDE.document_view(text_area).map(_.get_rendering()) match {
case Some(rendering) =>
- rendering.language_context(before_caret_range(rendering))
+ rendering.completion_language(before_caret_range(rendering))
case None => None
}) getOrElse syntax.language_context
--- a/src/Tools/jEdit/src/rendering.scala Sat Mar 01 19:55:01 2014 +0100
+++ b/src/Tools/jEdit/src/rendering.scala Sat Mar 01 22:46:31 2014 +0100
@@ -132,7 +132,7 @@
private val completion_names_elements =
Document.Elements(Markup.COMPLETION)
- private val language_context_elements =
+ private val completion_language_elements =
Document.Elements(Markup.STRING, Markup.ALTSTRING, Markup.VERBATIM,
Markup.CARTOUCHE, Markup.COMMENT, Markup.LANGUAGE,
Markup.ML_STRING, Markup.ML_COMMENT)
@@ -267,11 +267,12 @@
}).headOption.map(_.info)
}
- def language_context(range: Text.Range): Option[Completion.Language_Context] =
- snapshot.select(range, Rendering.language_context_elements, _ =>
+ def completion_language(range: Text.Range): Option[Completion.Language_Context] =
+ snapshot.select(range, Rendering.completion_language_elements, _ =>
{
- case Text.Info(_, XML.Elem(Markup.Language(language, symbols, antiquotes), _)) =>
- Some(Completion.Language_Context(language, symbols, antiquotes))
+ case Text.Info(_, XML.Elem(Markup.Language(language, symbols, antiquotes, delimited), _)) =>
+ if (delimited) Some(Completion.Language_Context(language, symbols, antiquotes))
+ else None
case Text.Info(_, elem)
if elem.name == Markup.ML_STRING || elem.name == Markup.ML_COMMENT =>
Some(Completion.Language_Context.ML_inner)
@@ -485,7 +486,7 @@
Some(add(prev, r, (true, pretty_typing("::", body))))
case (prev, Text.Info(r, XML.Elem(Markup(Markup.ML_TYPING, _), body))) =>
Some(add(prev, r, (false, pretty_typing("ML:", body))))
- case (prev, Text.Info(r, XML.Elem(Markup.Language(language, _, _), _))) =>
+ case (prev, Text.Info(r, XML.Elem(Markup.Language(language, _, _, _), _))) =>
Some(add(prev, r, (true, XML.Text("language: " + language))))
case (prev, Text.Info(r, XML.Elem(Markup(name, _), _))) =>
Rendering.tooltip_descriptions.get(name).