--- a/src/HOL/Eisbach/Tests.thy Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Eisbach/Tests.thy Wed Dec 09 18:28:28 2015 +0100
@@ -480,7 +480,7 @@
subsection \<open>Shallow parser tests\<close>
-method all_args for A B methods m1 m2 uses f1 f2 declares my_thms_named = (fail)
+method all_args for A B methods m1 m2 uses f1 f2 declares my_thms_named = fail
lemma True
by (all_args True False \<open>-\<close> \<open>fail\<close> f1: TrueI f2: TrueI my_thms_named: TrueI | rule TrueI)
--- a/src/HOL/Eisbach/match_method.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Eisbach/match_method.ML Wed Dec 09 18:28:28 2015 +0100
@@ -70,8 +70,8 @@
(*FIXME: Dynamic facts modify the background theory, so we have to resort
to token replacement for matched facts. *)
-fun dynamic_fact ctxt =
- bound_term -- Args.opt_attribs (Attrib.check_name ctxt);
+val dynamic_fact =
+ Scan.lift bound_term -- Attrib.opt_attribs;
type match_args = {multi : bool, cut : int};
@@ -86,17 +86,17 @@
ss {multi = false, cut = ~1});
fun parse_named_pats match_kind =
- Args.context :|-- (fn ctxt =>
- Scan.lift (Parse.and_list1
- (Scan.option (dynamic_fact ctxt --| Args.colon) :--
- (fn opt_dyn =>
- if is_none opt_dyn orelse nameable_match match_kind
- then Parse_Tools.name_term -- parse_match_args
- else
- let val b = #1 (the opt_dyn)
- in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end))
- -- for_fixes -- (@{keyword "\<Rightarrow>"} |-- Parse.token Parse.cartouche))
- >> (fn ((ts, fixes), cartouche) =>
+ Args.context --
+ Parse.and_list1'
+ (Scan.option (dynamic_fact --| Scan.lift Args.colon) :--
+ (fn opt_dyn =>
+ if is_none opt_dyn orelse nameable_match match_kind
+ then Scan.lift (Parse_Tools.name_term -- parse_match_args)
+ else
+ let val b = #1 (the opt_dyn)
+ in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end)) --
+ Scan.lift (for_fixes -- (@{keyword "\<Rightarrow>"} |-- Parse.token Parse.cartouche))
+ >> (fn ((ctxt, ts), (fixes, cartouche)) =>
(case Token.get_value cartouche of
SOME (Token.Source src) =>
let
@@ -205,7 +205,7 @@
val morphism =
Variable.export_morphism ctxt6
(ctxt
- |> Token.declare_maxidx_src src
+ |> fold Token.declare_maxidx src
|> Variable.declare_maxidx (Variable.maxidx_of ctxt6));
val pats' = map (Term.map_types Type_Infer.paramify_vars #> Morphism.term morphism) pats;
@@ -213,14 +213,13 @@
fun close_src src =
let
- val src' = Token.closure_src src |> Token.transform_src morphism;
+ val src' = src |> map (Token.closure #> Token.transform morphism);
val _ =
- map2 (fn tok1 => fn tok2 =>
- (case Token.get_value tok2 of
- SOME value => Token.assign (SOME value) tok1
- | NONE => ()))
- (Token.args_of_src src)
- (Token.args_of_src src');
+ (Token.args_of_src src ~~ Token.args_of_src src')
+ |> List.app (fn (tok, tok') =>
+ (case Token.get_value tok' of
+ SOME value => ignore (Token.assign (SOME value) tok)
+ | NONE => ()));
in src' end;
val binds' =
@@ -241,11 +240,11 @@
val match_args = map (fn (_, (_, match_args)) => match_args) ts;
val binds'' = (binds' ~~ match_args) ~~ pats';
- val src' = Token.transform_src morphism src;
+ val src' = map (Token.transform morphism) src;
val _ = Token.assign (SOME (Token.Source src')) cartouche;
in
(binds'', real_fixes', text)
- end)));
+ end));
fun dest_internal_fact t =
@@ -285,14 +284,14 @@
fun apply_attribute (head, (fact, atts)) (fact_insts, ctxt) =
let
val morphism = fact_morphism fact_insts;
- val atts' = map (Attrib.attribute ctxt o Token.transform_src morphism) atts;
+ val atts' = map (Attrib.attribute ctxt o map (Token.transform morphism)) atts;
val (fact'', ctxt') = fold_map (Thm.proof_attributes atts') fact ctxt;
in ((head, fact'') :: fact_insts, ctxt') end;
(*TODO: What to do about attributes that raise errors?*)
val (fact_insts', ctxt') = fold_rev (apply_attribute) fact_insts ([], ctxt);
- val text' = Method.map_source (Token.transform_src (fact_morphism fact_insts')) text;
+ val text' = (Method.map_source o map) (Token.transform (fact_morphism fact_insts')) text;
in
(text', ctxt')
end;
--- a/src/HOL/Eisbach/method_closure.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Eisbach/method_closure.ML Wed Dec 09 18:28:28 2015 +0100
@@ -87,13 +87,13 @@
fun free_aware_attribute thy {handle_all_errs, declaration} src (context, thm) =
let
- val src' = Token.init_assignable_src src;
+ val src' = map Token.init_assignable src;
fun apply_att thm = (Attrib.attribute_global thy src') (context, thm);
val _ =
if handle_all_errs then (try apply_att Drule.dummy_thm; ())
else (apply_att Drule.dummy_thm; ()) handle THM _ => () | TERM _ => () | TYPE _ => ();
- val src'' = Token.closure_src src';
+ val src'' = map Token.closure src';
val thms =
map_filter Token.get_value (Token.args_of_src src'')
|> map_filter (fn (Token.Fact (_, f)) => SOME f | _ => NONE)
@@ -109,7 +109,8 @@
let
val name = Binding.name_of binding;
val name' = Attrib.check_name_generic (Context.Theory thy) (name, Position.none);
- fun get_src src = Token.src (name', Token.range_of_src src) (Token.args_of_src src);
+ fun get_src src =
+ Token.make_src (name', Position.set_range (Token.range_of src)) (Token.args_of_src src);
in
Attrib.define_global binding (free_aware_attribute thy args o get_src) "" thy
|> snd
@@ -121,24 +122,22 @@
fun read_inner_method ctxt src =
let
val toks = Token.args_of_src src;
- val parser = Parse.!!! (Method.parser' ctxt 0 --| Scan.ahead Parse.eof);
+ val parser =
+ Parse.!!! (Method.parser 0 --| Scan.ahead Parse.eof)
+ >> apfst (Method.check_text ctxt);
in
(case Scan.read Token.stopper parser toks of
SOME (method_text, pos) => (Method.report (method_text, pos); method_text)
| NONE => error ("Failed to parse method" ^ Position.here (#2 (Token.name_of_src src))))
end;
-fun read_text_closure ctxt source =
+fun read_text_closure ctxt src0 =
let
- val src = Token.init_assignable_src source;
- val method_text = read_inner_method ctxt src;
+ val src1 = map Token.init_assignable src0;
+ val method_text = read_inner_method ctxt src1;
val method_text' = Method.map_source (Method.method_closure ctxt) method_text;
- (*FIXME: Does not markup method parameters. Needs to be done by Method.parser' directly. *)
- val _ =
- Method.map_source (fn src => (try (Method.check_name ctxt) (Token.name_of_src src); src))
- method_text;
- val src' = Token.closure_src src;
- in (src', method_text') end;
+ val src2 = map Token.closure src1;
+ in (src2, method_text') end;
fun read_inner_text_closure ctxt input =
let
@@ -146,7 +145,7 @@
val toks =
Input.source_explode input
|> Token.read_no_commands keywords (Scan.one Token.not_eof);
- in read_text_closure ctxt (Token.src ("", Input.pos_of input) toks) end;
+ in read_text_closure ctxt (Token.make_src ("", Input.pos_of input) toks) end;
val parse_method =
@@ -154,9 +153,9 @@
(case Token.get_value tok of
NONE =>
let
- val input = Token.input_of tok;
- val (src, text) = read_inner_text_closure ctxt input;
- val _ = Token.assign (SOME (Token.Source src)) tok;
+ val input = Token.input_of tok;
+ val (src, text) = read_inner_text_closure ctxt input;
+ val _ = Token.assign (SOME (Token.Source src)) tok;
in text end
| SOME (Token.Source src) => read_inner_method ctxt src
| SOME _ =>
@@ -181,7 +180,7 @@
| do_parse t = error ("Unexpected method parameter: " ^ Syntax.string_of_term ctxt' t);
fun rep [] x = Scan.succeed [] x
- | rep (t :: ts) x = (do_parse t -- rep ts >> op ::) x;
+ | rep (t :: ts) x = (do_parse t ::: rep ts) x;
fun check ts =
let
@@ -213,7 +212,7 @@
fun instantiate_text env text =
let val morphism = Morphism.term_morphism "instantiate_text" (Envir.norm_term env);
- in Method.map_source (Token.transform_src morphism) text end;
+ in (Method.map_source o map) (Token.transform morphism) text end;
fun evaluate_dynamic_thm ctxt name =
(case try (Named_Theorems.get ctxt) name of
@@ -222,7 +221,7 @@
fun evaluate_named_theorems ctxt =
- (Method.map_source o Token.map_values)
+ (Method.map_source o map o Token.map_nested_values)
(fn Token.Fact (SOME name, _) =>
Token.Fact (SOME name, evaluate_dynamic_thm ctxt name)
| x => x);
@@ -274,7 +273,7 @@
fun do_parse t = parse_method >> pair t;
fun rep [] x = Scan.succeed [] x
- | rep (t :: ts) x = (do_parse t -- rep ts >> op ::) x;
+ | rep (t :: ts) x = (do_parse t ::: rep ts) x;
in rep method_names >> fold bind_method end;
@@ -289,7 +288,7 @@
(Symtab.update (Local_Theory.full_name lthy binding,
(((map (Morphism.term phi) fixes),
(map (Morphism_name phi) declares, map (Morphism_name phi) methods)),
- Method.map_source (Token.transform_src phi) text))));
+ (Method.map_source o map) (Token.transform phi) text))));
fun get_inner_method ctxt (xname, pos) =
let
@@ -355,10 +354,10 @@
Variable.export_morphism lthy3
(lthy
|> Proof_Context.transfer (Proof_Context.theory_of lthy3)
- |> Token.declare_maxidx_src src
+ |> fold Token.declare_maxidx src
|> Variable.declare_maxidx (Variable.maxidx_of lthy3));
- val text' = Method.map_source (Token.transform_src morphism) text;
+ val text' = (Method.map_source o map) (Token.transform morphism) text;
val term_args' = map (Morphism.term morphism) term_args;
fun real_exec ts ctxt =
@@ -383,8 +382,9 @@
((Scan.optional (@{keyword "methods"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) --
(Scan.optional (@{keyword "uses"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) [])) --
(Scan.optional (@{keyword "declares"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) --
- Parse.!!! (@{keyword "="}
- |-- (Parse.position (Parse.args1 (K true)) >> (fn (args, pos) => Token.src ("", pos) args)))
- >> (fn ((((name, vars), (methods, uses)), attribs), source) =>
- method_definition_cmd name vars uses attribs methods source));
+ Parse.!!! (@{keyword "="} |-- Parse.position (Parse.args1 (K true))
+ >> (fn (args, pos) => Token.make_src ("", pos) args))
+ >> (fn ((((name, vars), (methods, uses)), attribs), src) =>
+ method_definition_cmd name vars uses attribs methods src));
+
end;
--- a/src/HOL/Eisbach/parse_tools.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Eisbach/parse_tools.ML Wed Dec 09 18:28:28 2015 +0100
@@ -6,7 +6,6 @@
signature PARSE_TOOLS =
sig
-
datatype ('a, 'b) parse_val =
Real_Val of 'a
| Parse_Val of 'b * ('a -> unit);
@@ -33,7 +32,7 @@
fun parse_term_val scan =
Scan.ahead Parse.not_eof -- Scan.ahead (Scan.option Args.internal_term) -- scan >>
(fn ((_, SOME t), _) => Real_Val t
- | ((tok, NONE), v) => Parse_Val (v, fn t => Token.assign (SOME (Token.Term t)) tok));
+ | ((tok, NONE), v) => Parse_Val (v, fn t => ignore (Token.assign (SOME (Token.Term t)) tok)));
val name_term = parse_term_val Args.name_inner_syntax;
--- a/src/HOL/Library/old_recdef.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Library/old_recdef.ML Wed Dec 09 18:28:28 2015 +0100
@@ -2873,8 +2873,7 @@
val hints =
@{keyword "("} |--
- Parse.!!! (Parse.position @{keyword "hints"} -- Parse.args --| @{keyword ")"})
- >> uncurry Token.src;
+ Parse.!!! ((Parse.token @{keyword "hints"} ::: Parse.args) --| @{keyword ")"});
val recdef_decl =
Scan.optional
--- a/src/HOL/Library/simps_case_conv.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Library/simps_case_conv.ML Wed Dec 09 18:28:28 2015 +0100
@@ -212,7 +212,7 @@
fun case_of_simps_cmd (bind, thms_ref) lthy =
let
val bind' = apsnd (map (Attrib.check_src lthy)) bind
- val thm = (Attrib.eval_thms lthy) thms_ref |> to_case lthy
+ val thm = Attrib.eval_thms lthy thms_ref |> to_case lthy
in
Local_Theory.note (bind', [thm]) lthy |> snd
end
--- a/src/HOL/Tools/Lifting/lifting_setup.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML Wed Dec 09 18:28:28 2015 +0100
@@ -247,10 +247,10 @@
val thy = Proof_Context.theory_of lthy
val dummy_thm = Thm.transfer thy Drule.dummy_thm
- val pointer =
- Token.explode (Thy_Header.get_keywords thy) Position.none (cartouche bundle_name)
val restore_lifting_att =
- ([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer])
+ ([dummy_thm],
+ [map (Token.make_string o rpair Position.none)
+ ["Lifting.lifting_restore_internal", bundle_name]])
in
lthy
|> Local_Theory.declaration {syntax = false, pervasive = true}
@@ -936,7 +936,7 @@
val lifting_restore_internal_attribute_setup =
Attrib.setup @{binding lifting_restore_internal}
- (Scan.lift Parse.cartouche >>
+ (Scan.lift Parse.string >>
(fn name => Thm.declaration_attribute (K (lifting_restore_internal name))))
"restoring lifting infrastructure; internal attribute; not meant to be used directly by regular users"
@@ -982,14 +982,15 @@
fun pointer_of_bundle_name bundle_name ctxt =
let
val bundle = Bundle.get_bundle_cmd ctxt bundle_name
+ fun err () = error "The provided bundle is not a lifting bundle"
in
- case bundle of
+ (case bundle of
[(_, [arg_src])] =>
let
- val (name, _) = Token.syntax (Scan.lift Parse.cartouche) arg_src ctxt
- handle ERROR _ => error "The provided bundle is not a lifting bundle."
+ val (name, _) = Token.syntax (Scan.lift Parse.string) arg_src ctxt
+ handle ERROR _ => err ()
in name end
- | _ => error "The provided bundle is not a lifting bundle."
+ | _ => err ())
end
fun pointer_of_bundle_binding ctxt binding = Name_Space.full_name (Name_Space.naming_of
--- a/src/Pure/Isar/args.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/args.ML Wed Dec 09 18:28:28 2015 +0100
@@ -9,6 +9,7 @@
sig
val context: Proof.context context_parser
val theory: theory context_parser
+ val symbolic: Token.T parser
val $$$ : string -> string parser
val add: string parser
val del: string parser
@@ -32,11 +33,10 @@
val text: string parser
val binding: binding parser
val alt_name: string parser
- val symbol: string parser
val liberal_name: string parser
val var: indexname parser
val internal_source: Token.src parser
- val internal_name: (string * morphism) parser
+ val internal_name: Token.name_value parser
val internal_typ: typ parser
val internal_term: term parser
val internal_fact: thm list parser
@@ -59,9 +59,6 @@
val type_name: {proper: bool, strict: bool} -> string context_parser
val const: {proper: bool, strict: bool} -> string context_parser
val goal_spec: ((int -> tactic) -> tactic) context_parser
- val attribs: (xstring * Position.T -> string) -> Token.src list parser
- val opt_attribs: (xstring * Position.T -> string) -> Token.src list parser
- val checked_name: (xstring * Position.T -> string) -> string parser
end;
structure Args: ARGS =
@@ -121,8 +118,7 @@
val binding = Parse.position name >> Binding.make;
val alt_name = alt_string >> Token.content_of;
-val symbol = symbolic >> Token.content_of;
-val liberal_name = symbol || name;
+val liberal_name = (symbolic >> Token.content_of) || name;
val var = (ident >> Token.content_of) :|-- (fn x =>
(case Lexicon.read_variable x of SOME v => Scan.succeed v | NONE => Scan.fail));
@@ -133,39 +129,38 @@
fun value dest = Scan.some (fn arg =>
(case Token.get_value arg of SOME v => (SOME (dest v) handle Match => NONE) | NONE => NONE));
-fun evaluate mk eval arg =
- let val x = eval arg in (Token.assign (SOME (mk x)) arg; x) end;
-
val internal_source = value (fn Token.Source src => src);
-val internal_name = value (fn Token.Name a => a);
+val internal_name = value (fn Token.Name (a, _) => a);
val internal_typ = value (fn Token.Typ T => T);
val internal_term = value (fn Token.Term t => t);
val internal_fact = value (fn Token.Fact (_, ths) => ths);
val internal_attribute = value (fn Token.Attribute att => att);
val internal_declaration = value (fn Token.Declaration decl => decl);
-fun named_source read = internal_source || name_token >> evaluate Token.Source read;
+fun named_source read =
+ internal_source || name_token >> Token.evaluate Token.Source read;
fun named_typ read =
- internal_typ || name_token >> evaluate Token.Typ (read o Token.inner_syntax_of);
+ internal_typ || name_token >> Token.evaluate Token.Typ (read o Token.inner_syntax_of);
fun named_term read =
- internal_term || name_token >> evaluate Token.Term (read o Token.inner_syntax_of);
+ internal_term || name_token >> Token.evaluate Token.Term (read o Token.inner_syntax_of);
fun named_fact get =
internal_fact ||
- name_token >> evaluate Token.Fact (get o Token.content_of) >> #2 ||
- alt_string >> evaluate Token.Fact (get o Token.inner_syntax_of) >> #2;
+ name_token >> Token.evaluate Token.Fact (get o Token.content_of) >> #2 ||
+ alt_string >> Token.evaluate Token.Fact (get o Token.inner_syntax_of) >> #2;
fun named_attribute att =
internal_attribute ||
- name_token >> evaluate Token.Attribute (fn tok => att (Token.content_of tok, Token.pos_of tok));
+ name_token >>
+ Token.evaluate Token.Attribute (fn tok => att (Token.content_of tok, Token.pos_of tok));
fun text_declaration read =
- internal_declaration || text_token >> evaluate Token.Declaration (read o Token.input_of);
+ internal_declaration || text_token >> Token.evaluate Token.Declaration (read o Token.input_of);
fun cartouche_declaration read =
- internal_declaration || cartouche >> evaluate Token.Declaration (read o Token.input_of);
+ internal_declaration || cartouche >> Token.evaluate Token.Declaration (read o Token.input_of);
(* terms and types *)
@@ -200,23 +195,4 @@
val goal = Parse.keyword_improper "[" |-- Parse.!!! (from_to --| Parse.keyword_improper "]");
fun goal_spec x = Scan.lift (Scan.optional goal (fn tac => tac 1)) x;
-
-(* attributes *)
-
-fun attribs check =
- let
- fun intern tok = check (Token.content_of tok, Token.pos_of tok);
- val attrib_name = internal_name >> #1 || (symbolic || name_token) >> evaluate Token.name0 intern;
- val attrib = Parse.position attrib_name -- Parse.!!! Parse.args >> uncurry Token.src;
- in $$$ "[" |-- Parse.!!! (Parse.list attrib --| $$$ "]") end;
-
-fun opt_attribs check = Scan.optional (attribs check) [];
-
-
-(* checked name *)
-
-fun checked_name check =
- let fun intern tok = check (Token.content_of tok, Token.pos_of tok);
- in internal_name >> #1 || Parse.token Parse.xname >> evaluate Token.name0 intern end;
-
end;
--- a/src/Pure/Isar/attrib.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/attrib.ML Wed Dec 09 18:28:28 2015 +0100
@@ -17,6 +17,9 @@
val check_name_generic: Context.generic -> xstring * Position.T -> string
val check_name: Proof.context -> xstring * Position.T -> string
val check_src: Proof.context -> Token.src -> Token.src
+ val attribs: Token.src list context_parser
+ val opt_attribs: Token.src list context_parser
+ val markup_extern: Proof.context -> string -> Markup.T * xstring
val pretty_attribs: Proof.context -> Token.src list -> Pretty.T list
val pretty_binding: Proof.context -> binding -> string -> Pretty.T list
val attribute: Proof.context -> Token.src -> attribute
@@ -49,8 +52,6 @@
val thm: thm context_parser
val thms: thm list context_parser
val multi_thm: thm list context_parser
- val check_attribs: Proof.context -> Token.src list -> Token.src list
- val read_attribs: Proof.context -> Input.source -> Token.src list
val transform_facts: morphism ->
(Attrib.binding * (thm list * Token.src list) list) list ->
(Attrib.binding * (thm list * Token.src list) list) list
@@ -150,12 +151,26 @@
val check_name = check_name_generic o Context.Proof;
fun check_src ctxt src =
- (Context_Position.report ctxt (Token.range_of_src src) Markup.language_attribute;
- #1 (Token.check_src ctxt (get_attributes ctxt) src));
+ let
+ val _ =
+ if Token.checked_src src then ()
+ else
+ Context_Position.report ctxt
+ (Position.set_range (Token.range_of src)) Markup.language_attribute;
+ in #1 (Token.check_src ctxt get_attributes src) end;
+
+val attribs =
+ Args.context -- Scan.lift Parse.attribs
+ >> (fn (ctxt, srcs) => map (check_src ctxt) srcs);
+
+val opt_attribs = Scan.optional attribs [];
(* pretty printing *)
+fun markup_extern ctxt =
+ Name_Space.markup_extern ctxt (Name_Space.space_of_table (get_attributes ctxt));
+
fun pretty_attribs _ [] = []
| pretty_attribs ctxt srcs = [Pretty.enum "," "[" "]" (map (Token.pretty_src ctxt) srcs)];
@@ -227,7 +242,7 @@
(* internal attribute *)
fun internal att =
- Token.src ("Pure.attribute", Position.none)
+ Token.make_src ("Pure.attribute", Position.none)
[Token.make_value "<attribute>" (Token.Attribute att)];
val _ = Theory.setup
@@ -256,7 +271,7 @@
let val (a, ths) = get (Facts.Named ((name, pos), NONE))
in (if is_sel then NONE else a, ths) end;
in
- Parse.$$$ "[" |-- Args.attribs (check_name_generic context) --| Parse.$$$ "]" >> (fn srcs =>
+ Parse.$$$ "[" |-- Scan.pass context attribs --| Parse.$$$ "]" >> (fn srcs =>
let
val atts = map (attribute_generic context) srcs;
val (th', context') = fold (uncurry o Thm.apply_attribute) atts (Drule.dummy_thm, context);
@@ -268,7 +283,7 @@
(fn ((name, pos), sel) =>
Args.named_fact (get_named (is_some sel) pos) --| Scan.option Parse.thm_sel
>> (fn fact => (name, Facts.Named ((name, pos), sel), fact))))
- -- Args.opt_attribs (check_name_generic context) >> (fn ((name, thmref, fact), srcs) =>
+ -- Scan.pass context opt_attribs >> (fn ((name, thmref, fact), srcs) =>
let
val ths = Facts.select thmref fact;
val atts = map (attribute_generic context) srcs;
@@ -286,30 +301,11 @@
end;
-(* read attribute source *)
-
-fun check_attribs ctxt raw_srcs =
- let
- val srcs = map (check_src ctxt) raw_srcs;
- val _ = map (attribute ctxt) srcs;
- in srcs end;
-
-fun read_attribs ctxt source =
- let
- val keywords = Thy_Header.get_keywords' ctxt;
- val syms = Input.source_explode source;
- in
- (case Token.read_no_commands keywords Parse.attribs syms of
- [raw_srcs] => check_attribs ctxt raw_srcs
- | _ => error ("Malformed attributes" ^ Position.here (Input.pos_of source)))
- end;
-
-
(* transform fact expressions *)
fun transform_facts phi = map (fn ((a, atts), bs) =>
- ((Morphism.binding phi a, map (Token.transform_src phi) atts),
- bs |> map (fn (ths, btts) => (Morphism.fact phi ths, map (Token.transform_src phi) btts))));
+ ((Morphism.binding phi a, (map o map) (Token.transform phi) atts),
+ bs |> map (fn (ths, btts) => (Morphism.fact phi ths, (map o map) (Token.transform phi) btts))));
@@ -321,9 +317,9 @@
fun apply_att src (context, th) =
let
- val src1 = Token.init_assignable_src src;
+ val src1 = map Token.init_assignable src;
val result = attribute_generic context src1 (context, th);
- val src2 = Token.closure_src src1;
+ val src2 = map Token.closure src1;
in (src2, result) end;
fun err msg src =
--- a/src/Pure/Isar/bundle.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/bundle.ML Wed Dec 09 18:28:28 2015 +0100
@@ -35,7 +35,7 @@
type bundle = (thm list * Token.src list) list;
fun transform_bundle phi : bundle -> bundle =
- map (fn (fact, atts) => (Morphism.fact phi fact, map (Token.transform_src phi) atts));
+ map (fn (fact, atts) => (Morphism.fact phi fact, (map o map) (Token.transform phi) atts));
structure Data = Generic_Data
(
--- a/src/Pure/Isar/element.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/element.ML Wed Dec 09 18:28:28 2015 +0100
@@ -108,7 +108,7 @@
term = Morphism.term phi,
pattern = Morphism.term phi,
fact = Morphism.fact phi,
- attrib = Token.transform_src phi};
+ attrib = map (Token.transform phi)};
@@ -513,14 +513,14 @@
fun activate_i elem ctxt =
let
val elem' =
- (case map_ctxt_attrib Token.init_assignable_src elem of
+ (case (map_ctxt_attrib o map) Token.init_assignable elem of
Defines defs =>
Defines (defs |> map (fn ((a, atts), (t, ps)) =>
((Thm.def_binding_optional (Binding.name (#1 (#1 (Local_Defs.cert_def ctxt t)))) a, atts),
(t, ps))))
| e => e);
val ctxt' = Context.proof_map (init elem') ctxt;
- in (map_ctxt_attrib Token.closure_src elem', ctxt') end;
+ in ((map_ctxt_attrib o map) Token.closure elem', ctxt') end;
fun activate raw_elem ctxt =
let val elem = raw_elem |> map_ctxt
--- a/src/Pure/Isar/locale.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/locale.ML Wed Dec 09 18:28:28 2015 +0100
@@ -577,7 +577,7 @@
local
val trim_fact = map Thm.trim_context;
-val trim_srcs = (map o Token.map_facts_src) trim_fact;
+val trim_srcs = (map o map o Token.map_facts) trim_fact;
fun trim_context_facts facts = facts |> map (fn ((b, atts), args) =>
((b, trim_srcs atts), map (fn (a, more_atts) => (trim_fact a, trim_srcs more_atts)) args));
--- a/src/Pure/Isar/method.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/method.ML Wed Dec 09 18:28:28 2015 +0100
@@ -58,6 +58,7 @@
val print_methods: bool -> Proof.context -> unit
val check_name: Proof.context -> xstring * Position.T -> string
val check_src: Proof.context -> Token.src -> Token.src
+ val check_text: Proof.context -> text -> text
val method_syntax: (Proof.context -> method) context_parser ->
Token.src -> Proof.context -> method
val setup: binding -> (Proof.context -> method) context_parser -> string -> theory -> theory
@@ -82,7 +83,6 @@
val position: text_range option -> Position.T
val reports_of: text_range -> Position.report list
val report: text_range -> unit
- val parser': Proof.context -> int -> text_range parser
val parser: int -> text_range parser
val parse: text_range parser
end;
@@ -335,7 +335,7 @@
fun primitive_text r = Basic (SIMPLE_METHOD o PRIMITIVE o r);
val succeed_text = Basic (K succeed);
-val standard_text = Source (Token.src ("standard", Position.none) []);
+val standard_text = Source (Token.make_src ("standard", Position.none) []);
val this_text = Basic this;
val done_text = Basic (K (SIMPLE_METHOD all_tac));
fun sorry_text int = Basic (fn ctxt => cheating ctxt int);
@@ -390,11 +390,11 @@
in #1 o Name_Space.check context (get_methods context) end;
fun check_src ctxt =
- #1 o Token.check_src ctxt (get_methods (Context.Proof ctxt));
+ #1 o Token.check_src ctxt (get_methods o Context.Proof);
-fun checked_info ctxt name =
- let val space = Name_Space.space_of_table (get_methods (Context.Proof ctxt))
- in (Name_Space.kind_of space, Name_Space.markup space name) end;
+fun check_text ctxt (Source src) = Source (check_src ctxt src)
+ | check_text _ (Basic m) = Basic m
+ | check_text ctxt (Combinator (x, y, body)) = Combinator (x, y, map (check_text ctxt) body);
(* method setup *)
@@ -422,10 +422,10 @@
fun method_closure ctxt src =
let
- val src' = Token.init_assignable_src src;
+ val src' = map Token.init_assignable src;
val ctxt' = Context_Position.not_really ctxt;
val _ = Seq.pull (method ctxt' src' ctxt' [] (Goal.protect 0 Drule.dummy_thm));
- in Token.closure_src src' end;
+ in map Token.closure src' end;
val closure = Config.bool (Config.declare ("Method.closure", @{here}) (K (Config.Bool true)));
@@ -552,11 +552,7 @@
| _ =>
let
val ctxt = Context.proof_of context;
- fun prep_att src =
- let
- val src' = Attrib.check_src ctxt src;
- val _ = List.app (Token.assign NONE) (Token.args_of_src src');
- in src' end;
+ val prep_att = Attrib.check_src ctxt #> map (Token.assign NONE);
val thms =
map (fn (a, bs) => (Proof_Context.get_fact ctxt a, map prep_att bs)) xthms;
val facts =
@@ -633,19 +629,16 @@
fun is_symid_meth s =
s <> "|" andalso s <> "?" andalso s <> "+" andalso Token.ident_or_symbolic s;
-fun gen_parser check_ctxt pri =
+in
+
+fun parser pri =
let
- val (meth_name, mk_src) =
- (case check_ctxt of
- NONE => (Parse.xname, Token.src)
- | SOME ctxt =>
- (Args.checked_name (fn (xname, _) => check_name ctxt (xname, Position.none)),
- fn (name, pos) => fn args => Token.src_checked (name, pos) args (checked_info ctxt name)));
+ val meth_name = Parse.token Parse.xname;
fun meth5 x =
- (Parse.position meth_name >> (fn name => Source (mk_src name [])) ||
+ (meth_name >> (Source o single) ||
Scan.ahead Parse.cartouche |-- Parse.not_eof >> (fn tok =>
- Source (mk_src ("cartouche", Token.pos_of tok) [tok])) ||
+ Source (Token.make_src ("cartouche", Position.none) [tok])) ||
Parse.$$$ "(" |-- Parse.!!! (meth0 --| Parse.$$$ ")")) x
and meth4 x =
(meth5 -- Parse.position (Parse.$$$ "?")
@@ -658,7 +651,7 @@
Combinator (combinator_info [pos1, pos2], Select_Goals n, [m])) ||
meth5) x
and meth3 x =
- (Parse.position meth_name -- Parse.args1 is_symid_meth >> (Source o uncurry mk_src) ||
+ (meth_name ::: Parse.args1 is_symid_meth >> Source ||
meth4) x
and meth2 x =
(Parse.enum1_positions "," meth3
@@ -675,10 +668,6 @@
handle General.Subscript => raise Fail ("Bad method parser priority " ^ string_of_int pri);
in Scan.trace meth >> (fn (m, toks) => (m, Token.range_of toks)) end;
-in
-
-val parser' = gen_parser o SOME;
-val parser = gen_parser NONE;
val parse = parser 4;
end;
@@ -694,7 +683,7 @@
setup @{binding "-"} (Scan.succeed (K insert_facts))
"insert current facts, nothing else" #>
setup @{binding goal_cases} (Scan.lift (Scan.repeat Args.name_token) >> (fn names => fn ctxt =>
- METHOD_CASES (fn facts => fn st =>
+ METHOD_CASES (fn _ => fn st =>
let
val _ =
(case drop (Thm.nprems_of st) names of
--- a/src/Pure/Isar/parse.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/parse.ML Wed Dec 09 18:28:28 2015 +0100
@@ -435,7 +435,7 @@
(* attributes *)
-val attrib = position liberal_name -- !!! args >> uncurry Token.src;
+val attrib = token liberal_name ::: !!! args;
val attribs = $$$ "[" |-- list attrib --| $$$ "]";
val opt_attribs = Scan.optional attribs [];
--- a/src/Pure/Isar/proof_context.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/proof_context.ML Wed Dec 09 18:28:28 2015 +0100
@@ -123,6 +123,8 @@
val get_fact_single: Proof.context -> Facts.ref -> thm
val get_thms: Proof.context -> xstring -> thm list
val get_thm: Proof.context -> xstring -> thm
+ val add_thms_dynamic: binding * (Context.generic -> thm list) ->
+ Proof.context -> string * Proof.context
val note_thmss: string -> (Thm.binding * (thm list * attribute list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val put_thms: bool -> string * thm list option -> Proof.context -> Proof.context
@@ -234,9 +236,13 @@
fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep);
-fun map_data f =
- Data.map (fn Data {mode, syntax, tsig, consts, facts, cases} =>
- make_data (f (mode, syntax, tsig, consts, facts, cases)));
+fun map_data_result f ctxt =
+ let
+ val Data {mode, syntax, tsig, consts, facts, cases} = Data.get ctxt;
+ val (res, data') = f (mode, syntax, tsig, consts, facts, cases) ||> make_data;
+ in (res, Data.put data' ctxt) end;
+
+fun map_data f = snd o map_data_result (pair () o f);
fun set_mode mode = map_data (fn (_, syntax, tsig, consts, facts, cases) =>
(mode, syntax, tsig, consts, facts, cases));
@@ -264,9 +270,12 @@
map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
(mode, syntax, tsig, f consts, facts, cases));
-fun map_facts f =
- map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
- (mode, syntax, tsig, consts, f facts, cases));
+fun map_facts_result f =
+ map_data_result (fn (mode, syntax, tsig, consts, facts, cases) =>
+ let val (res, facts') = f facts
+ in (res, (mode, syntax, tsig, consts, facts', cases)) end);
+
+fun map_facts f = snd o map_facts_result (pair () o f);
fun map_cases f =
map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
@@ -991,6 +1000,9 @@
(* facts *)
+fun add_thms_dynamic arg ctxt =
+ ctxt |> map_facts_result (Facts.add_dynamic (Context.Proof ctxt) arg);
+
local
fun update_thms _ (b, NONE) ctxt = ctxt |> map_facts (Facts.del (full_name ctxt b))
--- a/src/Pure/Isar/token.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Isar/token.ML Wed Dec 09 18:28:28 2015 +0100
@@ -17,26 +17,20 @@
val str_of_kind: kind -> string
type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T}
type T
- type src
+ type src = T list
+ type name_value = {name: string, kind: string, print: Proof.context -> Markup.T * xstring}
datatype value =
Source of src |
Literal of bool * Markup.T |
- Name of string * morphism |
+ Name of name_value * morphism |
Typ of typ |
Term of term |
Fact of string option * thm list |
Attribute of morphism -> attribute |
Declaration of declaration |
Files of file Exn.result list
- val name0: string -> value
val pos_of: T -> Position.T
val range_of: T list -> Position.range
- val src: xstring * Position.T -> T list -> src
- val src_checked: string * Position.T -> T list -> string * Markup.T -> src
- val name_of_src: src -> string * Position.T
- val args_of_src: src -> T list
- val range_of_src: src -> Position.T
- val check_src: Proof.context -> 'a Name_Space.table -> src -> src * 'a
val eof: T
val is_eof: T -> bool
val not_eof: T -> bool
@@ -65,7 +59,6 @@
val reports: Keyword.keywords -> T -> Position.report_text list
val markups: Keyword.keywords -> T -> Markup.T list
val unparse: T -> string
- val unparse_src: src -> string list
val print: T -> string
val text_of: T -> string * string
val get_files: T -> file Exn.result list
@@ -73,20 +66,22 @@
val make_value: string -> value -> T
val get_value: T -> value option
val map_value: (value -> value) -> T -> T
+ val name_value: name_value -> value
+ val get_name: T -> name_value option
val reports_of_value: T -> Position.report list
- val map_values: (value -> value) -> src -> src
+ val map_nested_values: (value -> value) -> T -> T
val declare_maxidx: T -> Proof.context -> Proof.context
- val declare_maxidx_src: src -> Proof.context -> Proof.context
val map_facts: (thm list -> thm list) -> T -> T
- val map_facts_src: (thm list -> thm list) -> src -> src
val transform: morphism -> T -> T
- val transform_src: morphism -> src -> src
val init_assignable: T -> T
- val init_assignable_src: src -> src
- val assign: value option -> T -> unit
+ val assign: value option -> T -> T
+ val evaluate: ('a -> value) -> (T -> 'a) -> T -> 'a
val closure: T -> T
- val closure_src: src -> src
val pretty_value: Proof.context -> T -> Pretty.T
+ val name_of_src: src -> string * Position.T
+ val args_of_src: src -> T list
+ val checked_src: src -> bool
+ val check_src: Proof.context -> (Proof.context -> 'a Name_Space.table) -> src -> src * 'a
val pretty_src: Proof.context -> src -> Pretty.T
val ident_or_symbolic: string -> bool
val source': bool -> Keyword.keywords -> (Symbol_Pos.T, 'a) Source.source ->
@@ -101,6 +96,8 @@
val read_cartouche: Symbol_Pos.T list -> T
val explode: Keyword.keywords -> Position.T -> string -> T list
val make: (int * int) * string -> Position.T -> T * Position.T
+ val make_string: string * Position.T -> T
+ val make_src: string * Position.T -> T list -> src
type 'a parser = T list -> 'a * T list
type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list)
val read_no_commands: Keyword.keywords -> 'a parser -> Symbol_Pos.T list -> 'a list
@@ -162,13 +159,9 @@
type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T};
-datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot
+type name_value = {name: string, kind: string, print: Proof.context -> Markup.T * xstring};
-and src =
- Src of
- {name: string * Position.T,
- args: T list,
- checked: (string * Markup.T) option}
+datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot
and slot =
Slot |
@@ -176,9 +169,9 @@
Assignable of value option Unsynchronized.ref
and value =
- Source of src |
+ Source of T list |
Literal of bool * Markup.T |
- Name of string * morphism |
+ Name of name_value * morphism |
Typ of typ |
Term of term |
Fact of string option * thm list | (*optional name for dynamic fact, i.e. fact "variable"*)
@@ -186,7 +179,7 @@
Declaration of declaration |
Files of file Exn.result list;
-fun name0 a = Name (a, Morphism.identity);
+type src = T list;
(* position *)
@@ -194,38 +187,16 @@
fun pos_of (Token ((_, (pos, _)), _, _)) = pos;
fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos;
+fun reset_pos pos (Token ((x, _), y, z)) =
+ let val pos' = Position.reset_range pos
+ in Token ((x, (pos', pos')), y, z) end;
+
fun range_of (toks as tok :: _) =
let val pos' = end_pos_of (List.last toks)
in Position.range (pos_of tok) pos' end
| range_of [] = Position.no_range;
-(* src *)
-
-fun src name args = Src {name = name, args = args, checked = NONE};
-fun src_checked name args checked = Src {name = name, args = args, checked = SOME checked};
-
-fun map_args f (Src {name, args, checked}) =
- Src {name = name, args = map f args, checked = checked};
-
-fun name_of_src (Src {name, ...}) = name;
-fun args_of_src (Src {args, ...}) = args;
-
-fun range_of_src (Src {name = (_, pos), args, ...}) =
- if null args then pos
- else Position.set_range (pos, #2 (range_of args));
-
-fun check_src _ table (src as Src {name = (name, _), checked = SOME _, ...}) =
- (src, Name_Space.get table name)
- | check_src ctxt table (Src {name = (xname, pos), args, checked = NONE}) =
- let
- val (name, x) = Name_Space.check (Context.Proof ctxt) table (xname, pos);
- val space = Name_Space.space_of_table table;
- val kind = Name_Space.kind_of space;
- val markup = Name_Space.markup space name;
- in (src_checked (name, pos) args (kind, markup), x) end;
-
-
(* stopper *)
fun mk_eof pos = Token (("", (pos, Position.none)), (EOF, ""), Slot);
@@ -360,8 +331,6 @@
| EOF => ""
| _ => x);
-fun unparse_src (Src {args, ...}) = map unparse args;
-
fun print tok = Markup.markups (markups Keyword.empty_keywords tok) (unparse tok);
fun text_of tok =
@@ -398,19 +367,29 @@
fun get_value (Token (_, _, Value v)) = v
| get_value _ = NONE;
+fun get_assignable_value (Token (_, _, Assignable r)) = ! r
+ | get_assignable_value (Token (_, _, Value v)) = v
+ | get_assignable_value _ = NONE;
+
fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v)))
| map_value _ tok = tok;
-fun map_values f =
- (map_args o map_value) (fn Source src => Source (map_values f src) | x => f x);
+fun map_nested_values f =
+ map_value (fn Source src => Source (map (map_nested_values f) src) | x => f x);
+
+
+(* name value *)
+
+fun name_value a = Name (a, Morphism.identity);
+
+fun get_name tok =
+ (case get_assignable_value tok of
+ SOME (Name (a, _)) => SOME a
+ | _ => NONE);
(* reports of value *)
-fun get_assignable_value (Token (_, _, Assignable r)) = ! r
- | get_assignable_value (Token (_, _, Value v)) = v
- | get_assignable_value _ = NONE;
-
fun reports_of_value tok =
(case get_assignable_value tok of
SOME (Literal markup) =>
@@ -429,7 +408,7 @@
fun declare_maxidx tok =
(case get_value tok of
- SOME (Source src) => declare_maxidx_src src
+ SOME (Source src) => fold declare_maxidx src
| SOME (Typ T) => Variable.declare_maxidx (Term.maxidx_of_typ T)
| SOME (Term t) => Variable.declare_maxidx (Term.maxidx_of_term t)
| SOME (Fact (_, ths)) => fold (Variable.declare_maxidx o Thm.maxidx_of) ths
@@ -438,8 +417,7 @@
(fn ctxt =>
let val ctxt' = Context.proof_map (Morphism.form decl) ctxt
in Variable.declare_maxidx (Variable.maxidx_of ctxt') ctxt end)
- | _ => I)
-and declare_maxidx_src src = fold declare_maxidx (args_of_src src);
+ | _ => I);
(* fact values *)
@@ -447,10 +425,9 @@
fun map_facts f =
map_value (fn v =>
(case v of
- Source src => Source (map_facts_src f src)
+ Source src => Source (map (map_facts f) src)
| Fact (a, ths) => Fact (a, f ths)
- | _ => v))
-and map_facts_src f = map_args (map_facts f);
+ | _ => v));
(* transform *)
@@ -458,7 +435,7 @@
fun transform phi =
map_value (fn v =>
(case v of
- Source src => Source (transform_src phi src)
+ Source src => Source (map (transform phi) src)
| Literal _ => v
| Name (a, psi) => Name (a, psi $> phi)
| Typ T => Typ (Morphism.typ phi T)
@@ -466,29 +443,32 @@
| Fact (a, ths) => Fact (a, Morphism.fact phi ths)
| Attribute att => Attribute (Morphism.transform phi att)
| Declaration decl => Declaration (Morphism.transform phi decl)
- | Files _ => v))
-and transform_src phi = map_args (transform phi);
+ | Files _ => v));
(* static binding *)
(*1st stage: initialize assignable slots*)
-fun init_assignable (Token (x, y, Slot)) = Token (x, y, Assignable (Unsynchronized.ref NONE))
- | init_assignable (tok as Token (_, _, Assignable r)) = (r := NONE; tok)
- | init_assignable tok = tok;
-
-val init_assignable_src = map_args init_assignable;
+fun init_assignable tok =
+ (case tok of
+ Token (x, y, Slot) => Token (x, y, Assignable (Unsynchronized.ref NONE))
+ | Token (_, _, Value _) => tok
+ | Token (_, _, Assignable r) => (r := NONE; tok));
(*2nd stage: assign values as side-effect of scanning*)
-fun assign v (Token (_, _, Assignable r)) = r := v
- | assign _ _ = ();
+fun assign v tok =
+ (case tok of
+ Token (x, y, Slot) => Token (x, y, Value v)
+ | Token (_, _, Value _) => tok
+ | Token (_, _, Assignable r) => (r := v; tok));
+
+fun evaluate mk eval arg =
+ let val x = eval arg in (assign (SOME (mk x)) arg; x) end;
(*3rd stage: static closure of final values*)
fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v)
| closure tok = tok;
-val closure_src = map_args closure;
-
(* pretty *)
@@ -497,22 +477,60 @@
SOME (Literal markup) =>
let val x = content_of tok
in Pretty.mark_str (keyword_markup markup x, x) end
- | SOME (Name (a, _)) => Pretty.str (quote a)
+ | SOME (Name ({print, ...}, _)) => Pretty.quote (Pretty.mark_str (print ctxt))
| SOME (Typ T) => Syntax.pretty_typ ctxt T
| SOME (Term t) => Syntax.pretty_term ctxt t
| SOME (Fact (_, ths)) =>
Pretty.enclose "(" ")" (Pretty.breaks (map (Pretty.backquote o Thm.pretty_thm ctxt) ths))
| _ => Pretty.marks_str (markups Keyword.empty_keywords tok, unparse tok));
+
+(* src *)
+
+fun dest_src [] = raise Fail "Empty token source"
+ | dest_src (head :: args) = (head, args);
+
+fun name_of_src src =
+ let
+ val head = #1 (dest_src src);
+ val name =
+ (case get_name head of
+ SOME {name, ...} => name
+ | NONE => content_of head);
+ in (name, pos_of head) end;
+
+val args_of_src = #2 o dest_src;
+
fun pretty_src ctxt src =
let
- val Src {name = (name, _), args, checked} = src;
+ val (head, args) = dest_src src;
val prt_name =
- (case checked of
- NONE => Pretty.str name
- | SOME (_, markup) => Pretty.mark_str (markup, name));
- val prt_arg = pretty_value ctxt;
- in Pretty.block (Pretty.breaks (prt_name :: map prt_arg args)) end;
+ (case get_name head of
+ SOME {print, ...} => Pretty.mark_str (print ctxt)
+ | NONE => Pretty.str (content_of head));
+ in Pretty.block (Pretty.breaks (Pretty.quote prt_name :: map (pretty_value ctxt) args)) end;
+
+fun checked_src (head :: _) = is_some (get_name head)
+ | checked_src [] = true;
+
+fun check_src ctxt get_table src =
+ let
+ val (head, args) = dest_src src;
+ val table = get_table ctxt;
+ in
+ (case get_name head of
+ SOME {name, ...} => (src, Name_Space.get table name)
+ | NONE =>
+ let
+ val (name, x) =
+ Name_Space.check (Context.Proof ctxt) table (content_of head, pos_of head);
+ val kind = Name_Space.kind_of (Name_Space.space_of_table table);
+ fun print ctxt' =
+ Name_Space.markup_extern ctxt' (Name_Space.space_of_table (get_table ctxt')) name;
+ val value = name_value {name = name, kind = kind, print = print};
+ val head' = closure (assign (SOME value) head);
+ in (head' :: args, x) end)
+ end;
@@ -660,7 +678,7 @@
val pos' = Position.advance_offset n pos;
val range = Position.range pos pos';
val tok =
- if k < Vector.length immediate_kinds then
+ if 0 <= k andalso k < Vector.length immediate_kinds then
Token ((s, range), (Vector.sub (immediate_kinds, k), s), Slot)
else
(case explode Keyword.empty_keywords pos s of
@@ -668,6 +686,12 @@
| _ => Token ((s, range), (Error (err_prefix ^ "exactly one token expected"), s), Slot))
in (tok, pos') end;
+fun make_string (s, pos) =
+ #1 (make ((~1, 0), Symbol_Pos.quote_string_qq s) Position.none)
+ |> reset_pos pos;
+
+fun make_src a args = make_string a :: args;
+
(** parsers **)
@@ -696,9 +720,10 @@
(* wrapped syntax *)
-fun syntax_generic scan (Src {name = (name, pos), args = args0, checked}) context =
+fun syntax_generic scan src context =
let
- val args1 = map init_assignable args0;
+ val (name, pos) = name_of_src src;
+ val args1 = map init_assignable (args_of_src src);
fun reported_text () =
if Context_Position.is_visible_generic context then
((pos, Markup.operator) :: maps (reports_of_value o closure) args1)
@@ -709,12 +734,16 @@
(SOME x, (context', [])) =>
let val _ = Output.report (reported_text ())
in (x, context') end
- | (_, (_, args2)) =>
+ | (_, (context', args2)) =>
let
val print_name =
- (case checked of
+ (case get_name (hd src) of
NONE => quote name
- | SOME (kind, markup) => plain_words kind ^ " " ^ quote (Markup.markup markup name));
+ | SOME {kind, print, ...} =>
+ let
+ val ctxt' = Context.proof_of context';
+ val (markup, xname) = print ctxt';
+ in plain_words kind ^ " " ^ quote (Markup.markup markup xname) end);
val print_args =
if null args2 then "" else ":\n " ^ space_implode " " (map print args2);
in
--- a/src/Pure/ML/ml_context.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/ML/ml_context.ML Wed Dec 09 18:28:28 2015 +0100
@@ -108,7 +108,7 @@
|> Pretty.writeln;
fun apply_antiquotation src ctxt =
- let val (src', f) = Token.check_src ctxt (get_antiquotations ctxt) src
+ let val (src', f) = Token.check_src ctxt get_antiquotations src
in f src' ctxt end;
@@ -117,8 +117,7 @@
local
val antiq =
- Parse.!!! (Parse.position Parse.xname -- Parse.args --| Scan.ahead Parse.eof)
- >> uncurry Token.src;
+ Parse.!!! ((Parse.token Parse.xname ::: Parse.args) --| Scan.ahead Parse.eof);
fun make_env name visible =
(ML_Lex.tokenize
@@ -154,7 +153,7 @@
fun expand (Antiquote.Text tok) ctxt = (K ([], [tok]), ctxt)
| expand (Antiquote.Control {name, range, body}) ctxt =
expand_src range
- (Token.src name (if null body then [] else [Token.read_cartouche body])) ctxt
+ (Token.make_src name (if null body then [] else [Token.read_cartouche body])) ctxt
| expand (Antiquote.Antiq {range, body, ...}) ctxt =
expand_src range
(Token.read_antiq (Thy_Header.get_keywords' ctxt) antiq (body, #1 range)) ctxt;
--- a/src/Pure/ML/ml_lex.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/ML/ml_lex.ML Wed Dec 09 18:28:28 2015 +0100
@@ -183,7 +183,7 @@
Scan.many (Symbol.is_ascii_letdig o Symbol_Pos.symbol);
val scan_alphanumeric =
- Scan.one (Symbol.is_ascii_letter o Symbol_Pos.symbol) -- scan_letdigs >> op ::;
+ Scan.one (Symbol.is_ascii_letter o Symbol_Pos.symbol) ::: scan_letdigs;
val scan_symbolic =
Scan.many1 (member (op =) (raw_explode "!#$%&*+-/:<=>?@\\^`|~") o Symbol_Pos.symbol);
--- a/src/Pure/ML/ml_thms.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/ML/ml_thms.ML Wed Dec 09 18:28:28 2015 +0100
@@ -40,11 +40,11 @@
(* attribute source *)
val _ = Theory.setup
- (ML_Antiquotation.declaration @{binding attributes} (Scan.lift Parse.attribs)
- (fn _ => fn raw_srcs => fn ctxt =>
+ (ML_Antiquotation.declaration @{binding attributes} Attrib.attribs
+ (fn _ => fn srcs => fn ctxt =>
let val i = serial () in
ctxt
- |> put_attributes (i, Attrib.check_attribs ctxt raw_srcs)
+ |> put_attributes (i, srcs)
|> ML_Context.value_decl "attributes"
("ML_Thms.the_attributes ML_context " ^ string_of_int i)
end));
--- a/src/Pure/Thy/document_antiquotations.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Thy/document_antiquotations.ML Wed Dec 09 18:28:28 2015 +0100
@@ -115,8 +115,6 @@
Scan.lift (Parse.position (Parse.reserved "by") -- Method.parse -- Scan.option Method.parse))
(fn {source, context = ctxt, ...} => fn ((prop_token, prop), (((_, by_pos), m1), m2)) =>
let
- val prop_src = Token.src (Token.name_of_src source) [prop_token];
-
val reports = (by_pos, Markup.keyword1) :: maps Method.reports_of (m1 :: the_list m2);
val _ = Context_Position.reports ctxt reports;
@@ -126,7 +124,8 @@
|> Proof.global_terminal_proof (m1, m2);
in
Thy_Output.output ctxt
- (Thy_Output.maybe_pretty_source Thy_Output.pretty_term ctxt prop_src [prop])
+ (Thy_Output.maybe_pretty_source
+ Thy_Output.pretty_term ctxt [hd source, prop_token] [prop])
end));
--- a/src/Pure/Thy/term_style.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Thy/term_style.ML Wed Dec 09 18:28:28 2015 +0100
@@ -24,7 +24,6 @@
);
val get_data = Data.get o Proof_Context.theory_of;
-val get_style = Name_Space.get o get_data;
fun setup binding style thy =
Data.map (#2 o Name_Space.define (Context.Theory thy) true (binding, style)) thy;
@@ -33,9 +32,9 @@
(* style parsing *)
fun parse_single ctxt =
- Parse.position Parse.xname -- Parse.args >> (fn (name, args) =>
+ Parse.token Parse.xname ::: Parse.args >> (fn src0 =>
let
- val (src, parse) = Token.check_src ctxt (get_data ctxt) (Token.src name args);
+ val (src, parse) = Token.check_src ctxt get_data src0;
val (f, _) = Token.syntax (Scan.lift parse) src ctxt;
in f ctxt end);
--- a/src/Pure/Thy/thy_output.ML Tue Dec 08 20:21:59 2015 +0100
+++ b/src/Pure/Thy/thy_output.ML Wed Dec 09 18:28:28 2015 +0100
@@ -95,7 +95,7 @@
fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt));
fun command src state ctxt =
- let val (src', f) = Token.check_src ctxt (#1 (get_antiquotations ctxt)) src
+ let val (src', f) = Token.check_src ctxt (#1 o get_antiquotations) src
in f src' state ctxt end;
fun option ((xname, pos), s) ctxt =
@@ -153,8 +153,8 @@
val antiq =
Parse.!!!
- (Parse.position Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof)
- >> (fn ((name, props), args) => (props, Token.src name args));
+ (Parse.token Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof)
+ >> (fn ((name, props), args) => (props, name :: args));
end;
@@ -177,7 +177,8 @@
fun eval_antiquote _ (Antiquote.Text ss) = Symbol_Pos.content ss
| eval_antiquote state (Antiquote.Control {name, body, ...}) =
- eval_antiq state ([], Token.src name (if null body then [] else [Token.read_cartouche body]))
+ eval_antiq state
+ ([], Token.make_src name (if null body then [] else [Token.read_cartouche body]))
| eval_antiquote state (Antiquote.Antiq {range = (pos, _), body, ...}) =
let
val keywords =
@@ -418,7 +419,7 @@
in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end));
val command = Scan.peek (fn d =>
- Scan.optional (Scan.one Token.is_command_modifier -- improper >> op ::) [] --
+ Scan.optional (Scan.one Token.is_command_modifier ::: improper) [] --
Scan.one Token.is_command -- Scan.repeat tag
>> (fn ((cmd_mod, cmd), tags) =>
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
@@ -575,7 +576,7 @@
(* default output *)
-val str_of_source = space_implode " " o Token.unparse_src;
+val str_of_source = space_implode " " o map Token.unparse o Token.args_of_src;
fun maybe_pretty_source pretty ctxt src xs =
map (pretty ctxt) xs (*always pretty in order to exhibit errors!*)