# HG changeset patch # User wenzelm # Date 1302295229 -7200 # Node ID d622145603eefe81048c042e5019eed105f4c02a # Parent 140f283266b774cac15504ef8718b4caf4f77bc4 more accurate markup for syntax consts, notably binders which point back to the original logical entity; tuned; diff -r 140f283266b7 -r d622145603ee src/Pure/ML/ml_antiquote.ML --- a/src/Pure/ML/ml_antiquote.ML Fri Apr 08 21:11:29 2011 +0200 +++ b/src/Pure/ML/ml_antiquote.ML Fri Apr 08 22:40:29 2011 +0200 @@ -151,7 +151,8 @@ val _ = inline "syntax_const" (Args.context -- Scan.lift (Parse.position Args.name) >> (fn (ctxt, (c, pos)) => - if Syntax.is_const (ProofContext.syn_of ctxt) c then ML_Syntax.print_string c + if is_some (Syntax.lookup_const (ProofContext.syn_of ctxt) c) + then ML_Syntax.print_string c else error ("Unknown syntax const: " ^ quote c ^ Position.str_of pos))); val _ = inline "const" diff -r 140f283266b7 -r d622145603ee src/Pure/Syntax/mixfix.ML --- a/src/Pure/Syntax/mixfix.ML Fri Apr 08 21:11:29 2011 +0200 +++ b/src/Pure/Syntax/mixfix.ML Fri Apr 08 22:40:29 2011 +0200 @@ -110,8 +110,8 @@ val mfix = map mfix_of type_decls; val _ = map2 check_args type_decls mfix; - val xconsts = map #1 type_decls; - in Syntax_Ext.syn_ext (map_filter I mfix) xconsts ([], [], [], []) ([], []) end; + val consts = map (fn (t, _, _) => (t, "")) type_decls; + in Syntax_Ext.syn_ext (map_filter I mfix) consts ([], [], [], []) ([], []) end; (* syn_ext_consts *) @@ -143,15 +143,16 @@ | binder _ = NONE; val mfix = maps mfix_of const_decls; - val xconsts = map #1 const_decls; val binders = map_filter binder const_decls; val binder_trs = binders |> map (Syntax_Ext.stamp_trfun binder_stamp o apsnd K o Syntax_Trans.mk_binder_tr); val binder_trs' = binders |> map (Syntax_Ext.stamp_trfun binder_stamp o apsnd (K o Syntax_Trans.non_typed_tr') o Syntax_Trans.mk_binder_tr' o swap); + + val consts = binders @ map (fn (c, _, _) => (c, "")) const_decls; in - Syntax_Ext.syn_ext' is_logtype mfix xconsts ([], binder_trs, binder_trs', []) ([], []) + Syntax_Ext.syn_ext' is_logtype mfix consts ([], binder_trs, binder_trs', []) ([], []) end; end; diff -r 140f283266b7 -r d622145603ee src/Pure/Syntax/syntax.ML --- a/src/Pure/Syntax/syntax.ML Fri Apr 08 21:11:29 2011 +0200 +++ b/src/Pure/Syntax/syntax.ML Fri Apr 08 22:40:29 2011 +0200 @@ -94,7 +94,7 @@ val pp_global: theory -> Pretty.pp type syntax val eq_syntax: syntax * syntax -> bool - val is_const: syntax -> string -> bool + val lookup_const: syntax -> string -> string option val is_keyword: syntax -> string -> bool val tokenize: syntax -> bool -> Symbol_Pos.T list -> Lexicon.token list val parse: Proof.context -> syntax -> string -> Lexicon.token list -> Parser.parsetree list @@ -458,7 +458,7 @@ input: Syntax_Ext.xprod list, lexicon: Scan.lexicon, gram: Parser.gram, - consts: string list, + consts: string Symtab.table, prmodes: string list, parse_ast_trtab: ((Proof.context -> Ast.ast list -> Ast.ast) * stamp) Symtab.table, parse_ruletab: ruletab, @@ -470,7 +470,7 @@ fun eq_syntax (Syntax (_, s1), Syntax (_, s2)) = s1 = s2; -fun is_const (Syntax ({consts, ...}, _)) c = member (op =) consts c; +fun lookup_const (Syntax ({consts, ...}, _)) = Symtab.lookup consts; fun is_keyword (Syntax ({lexicon, ...}, _)) = Scan.is_literal lexicon o Symbol.explode; fun tokenize (Syntax ({lexicon, ...}, _)) = Lexicon.tokenize lexicon; fun parse ctxt (Syntax ({gram, ...}, _)) = Parser.parse ctxt gram; @@ -495,7 +495,7 @@ ({input = [], lexicon = Scan.empty_lexicon, gram = Parser.empty_gram, - consts = [], + consts = Symtab.empty, prmodes = [], parse_ast_trtab = Symtab.empty, parse_ruletab = Symtab.empty, @@ -508,6 +508,11 @@ (* update_syntax *) +fun update_const (c, b) tab = + if c = "" orelse (b = "" andalso (Lexicon.is_marked c orelse Symtab.defined tab c)) + then tab + else Symtab.update (c, b) tab; + fun update_syntax (mode, inout) syn_ext (Syntax (tabs, _)) = let val {input, lexicon, gram, consts = consts1, prmodes, parse_ast_trtab, parse_ruletab, @@ -523,7 +528,7 @@ ({input = new_xprods @ input, lexicon = fold Scan.extend_lexicon (Syntax_Ext.delims_of new_xprods) lexicon, gram = Parser.extend_gram new_xprods gram, - consts = Library.merge (op =) (consts1, filter_out Lexicon.is_marked consts2), + consts = fold update_const consts2 consts1, prmodes = insert (op =) mode prmodes, parse_ast_trtab = update_trtab "parse ast translation" (if_inout parse_ast_translation) parse_ast_trtab, @@ -582,7 +587,7 @@ ({input = Library.merge (op =) (input1, input2), lexicon = Scan.merge_lexicons (lexicon1, lexicon2), gram = Parser.merge_gram (gram1, gram2), - consts = sort_distinct string_ord (consts1 @ consts2), + consts = Symtab.merge (K true) (consts1, consts2), prmodes = Library.merge (op =) (prmodes1, prmodes2), parse_ast_trtab = merge_trtabs "parse ast translation" parse_ast_trtab1 parse_ast_trtab2, @@ -627,8 +632,8 @@ fun pretty_trans (Syntax (tabs, _)) = let - fun pretty_trtab name tab = - pretty_strs_qs name (Symtab.keys tab); + fun pretty_tab name tab = + pretty_strs_qs name (sort_strings (Symtab.keys tab)); fun pretty_ruletab name tab = Pretty.big_list name (map Ast.pretty_rule (dest_ruletab tab)); @@ -636,13 +641,13 @@ val {consts, parse_ast_trtab, parse_ruletab, parse_trtab, print_trtab, print_ruletab, print_ast_trtab, ...} = tabs; in - [pretty_strs_qs "consts:" consts, - pretty_trtab "parse_ast_translation:" parse_ast_trtab, - pretty_ruletab "parse_rules:" parse_ruletab, - pretty_trtab "parse_translation:" parse_trtab, - pretty_trtab "print_translation:" print_trtab, - pretty_ruletab "print_rules:" print_ruletab, - pretty_trtab "print_ast_translation:" print_ast_trtab] + [pretty_tab "consts:" consts, + pretty_tab "parse_ast_translation:" parse_ast_trtab, + pretty_ruletab "parse_rules:" parse_ruletab, + pretty_tab "parse_translation:" parse_trtab, + pretty_tab "print_translation:" print_trtab, + pretty_ruletab "print_rules:" print_ruletab, + pretty_tab "print_ast_translation:" print_ast_trtab] end; in diff -r 140f283266b7 -r d622145603ee src/Pure/Syntax/syntax_ext.ML --- a/src/Pure/Syntax/syntax_ext.ML Fri Apr 08 21:11:29 2011 +0200 +++ b/src/Pure/Syntax/syntax_ext.ML Fri Apr 08 22:40:29 2011 +0200 @@ -21,7 +21,7 @@ datatype syn_ext = Syn_Ext of { xprods: xprod list, - consts: string list, + consts: (string * string) list, parse_ast_translation: (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list, parse_rules: (Ast.ast * Ast.ast) list, parse_translation: (string * ((Proof.context -> term list -> term) * stamp)) list, @@ -32,18 +32,17 @@ val mfix_args: string -> int val escape: string -> string val syn_ext': (string -> bool) -> mfix list -> - string list -> (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * + (string * string) list -> (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * (string * ((Proof.context -> term list -> term) * stamp)) list * (string * ((Proof.context -> typ -> term list -> term) * stamp)) list * (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list -> (Ast.ast * Ast.ast) list * (Ast.ast * Ast.ast) list -> syn_ext - val syn_ext: mfix list -> string list -> + val syn_ext: mfix list -> (string * string) list -> (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * (string * ((Proof.context -> term list -> term) * stamp)) list * (string * ((Proof.context -> typ -> term list -> term) * stamp)) list * (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list -> (Ast.ast * Ast.ast) list * (Ast.ast * Ast.ast) list -> syn_ext - val syn_ext_const_names: string list -> syn_ext val syn_ext_rules: (Ast.ast * Ast.ast) list * (Ast.ast * Ast.ast) list -> syn_ext val syn_ext_trfuns: (string * ((Ast.ast list -> Ast.ast) * stamp)) list * @@ -283,7 +282,7 @@ datatype syn_ext = Syn_Ext of { xprods: xprod list, - consts: string list, + consts: (string * string) list, parse_ast_translation: (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list, parse_rules: (Ast.ast * Ast.ast) list, parse_translation: (string * ((Proof.context -> term list -> term) * stamp)) list, @@ -301,12 +300,11 @@ val (xprods, parse_rules') = map (mfix_to_xprod is_logtype) mfixes |> split_list |> apsnd (rev o flat); - val mfix_consts = - distinct (op =) (map (fn Mfix x => #3 x) mfixes @ map (fn XProd x => #3 x) xprods); + val mfix_consts = map (fn Mfix x => (#3 x, "")) mfixes @ map (fn XProd x => (#3 x, "")) xprods; in Syn_Ext { xprods = xprods, - consts = union (op =) consts mfix_consts, + consts = mfix_consts @ consts, parse_ast_translation = parse_ast_translation, parse_rules = parse_rules' @ parse_rules, parse_translation = parse_translation, @@ -318,7 +316,6 @@ val syn_ext = syn_ext' (K false); -fun syn_ext_const_names cs = syn_ext [] cs ([], [], [], []) ([], []); fun syn_ext_rules rules = syn_ext [] [] ([], [], [], []) rules; fun syn_ext_advanced_trfuns trfuns = syn_ext [] [] trfuns ([], []); diff -r 140f283266b7 -r d622145603ee src/Pure/Syntax/syntax_phases.ML --- a/src/Pure/Syntax/syntax_phases.ML Fri Apr 08 21:11:29 2011 +0200 +++ b/src/Pure/Syntax/syntax_phases.ML Fri Apr 08 22:40:29 2011 +0200 @@ -18,6 +18,40 @@ structure Syntax_Phases: SYNTAX_PHASES = struct +(** markup logical entities **) + +fun markup_class ctxt c = + [Name_Space.markup_entry (Type.class_space (ProofContext.tsig_of ctxt)) c]; + +fun markup_type ctxt c = + [Name_Space.markup_entry (Type.type_space (ProofContext.tsig_of ctxt)) c]; + +fun markup_const ctxt c = + [Name_Space.markup_entry (Consts.space_of (ProofContext.consts_of ctxt)) c]; + +fun markup_free ctxt x = + [if can Name.dest_skolem x then Markup.skolem else Markup.free] @ + (if not (Variable.is_body ctxt) orelse Variable.is_fixed ctxt x then [] + else [Markup.hilite]); + +fun markup_var xi = [Markup.name (Term.string_of_vname xi) Markup.var]; + +fun markup_bound def id = + [Markup.properties [(if def then Markup.defN else Markup.refN, id)] Markup.bound]; + +fun markup_entity ctxt c = + (case Syntax.lookup_const (ProofContext.syn_of ctxt) c of + SOME "" => [] + | SOME b => markup_entity ctxt b + | NONE => c |> Lexicon.unmark + {case_class = markup_class ctxt, + case_type = markup_type ctxt, + case_const = markup_const ctxt, + case_fixed = markup_free ctxt, + case_default = K []}); + + + (** decode parse trees **) (* sort_of_term *) @@ -89,29 +123,10 @@ (* parsetree_to_ast *) -fun markup_const ctxt c = - [Name_Space.markup_entry (Consts.space_of (ProofContext.consts_of ctxt)) c]; - -fun markup_free ctxt x = - [if can Name.dest_skolem x then Markup.skolem else Markup.free] @ - (if not (Variable.is_body ctxt) orelse Variable.is_fixed ctxt x then [] - else [Markup.hilite]); - fun parsetree_to_ast ctxt constrain_pos trf parsetree = let - val tsig = ProofContext.tsig_of ctxt; - val get_class = ProofContext.read_class ctxt; val get_type = #1 o dest_Type o ProofContext.read_type_name_proper ctxt false; - fun markup_class c = [Name_Space.markup_entry (Type.class_space tsig) c]; - fun markup_type c = [Name_Space.markup_entry (Type.type_space tsig) c]; - - val markup_entity = Lexicon.unmark - {case_class = markup_class, - case_type = markup_type, - case_const = markup_const ctxt, - case_fixed = markup_free ctxt, - case_default = K []}; val reports = Unsynchronized.ref ([]: Position.reports); fun report pos = Position.reports reports [pos]; @@ -124,12 +139,12 @@ fun asts_of (Parser.Node ("_class_name", [Parser.Tip tok])) = let val c = get_class (Lexicon.str_of_token tok); - val _ = report (Lexicon.pos_of_token tok) markup_class c; + val _ = report (Lexicon.pos_of_token tok) (markup_class ctxt) c; in [Ast.Constant (Lexicon.mark_class c)] end | asts_of (Parser.Node ("_type_name", [Parser.Tip tok])) = let val c = get_type (Lexicon.str_of_token tok); - val _ = report (Lexicon.pos_of_token tok) markup_type c; + val _ = report (Lexicon.pos_of_token tok) (markup_type ctxt) c; in [Ast.Constant (Lexicon.mark_type c)] end | asts_of (Parser.Node ("_constrain_position", [pt as Parser.Tip tok])) = if constrain_pos then @@ -141,7 +156,7 @@ val _ = pts |> List.app (fn Parser.Node _ => () | Parser.Tip tok => if Lexicon.valued_token tok then () - else report (Lexicon.pos_of_token tok) markup_entity a); + else report (Lexicon.pos_of_token tok) (markup_entity ctxt) a); in [trans a (maps asts_of pts)] end | asts_of (Parser.Tip tok) = if Lexicon.valued_token tok @@ -185,9 +200,6 @@ ((true, #1 (Term.dest_Const (ProofContext.read_const_proper ctxt false a))) handle ERROR _ => (false, Consts.intern (ProofContext.consts_of ctxt) a)); val get_free = ProofContext.intern_skolem ctxt; - fun markup_var xi = [Markup.name (Term.string_of_vname xi) Markup.var]; - fun markup_bound def id = - [Markup.properties [(if def then Markup.defN else Markup.refN, id)] Markup.bound]; val decodeT = typ_of_term (ProofContext.get_sort ctxt (term_sorts tm)); @@ -390,7 +402,8 @@ fun constify (ast as Ast.Constant _) = ast | constify (ast as Ast.Variable x) = - if Syntax.is_const syn x orelse Long_Name.is_qualified x then Ast.Constant x + if is_some (Syntax.lookup_const syn x) orelse Long_Name.is_qualified x + then Ast.Constant x else ast | constify (Ast.Appl asts) = Ast.Appl (map constify asts); @@ -626,7 +639,7 @@ val syn = ProofContext.syn_of ctxt; val idents = Local_Syntax.idents_of (ProofContext.syntax_of ctxt); in - unparse_t (term_to_ast idents (Syntax.is_const syn)) + unparse_t (term_to_ast idents (is_some o Syntax.lookup_const syn)) (Printer.pretty_term_ast (not (Pure_Thy.old_appl_syntax thy))) Markup.term ctxt end;