# HG changeset patch # User wenzelm # Date 1415389393 -3600 # Node ID dcad9bad43e7654c68df404f27923ce0b63c44af # Parent a3be9a47e2d72950e8f095b9568e9ffa5e24d043# Parent 385a4cc7426fc4e7ad838e391fb966a5c2d878fd merged diff -r a3be9a47e2d7 -r dcad9bad43e7 NEWS --- a/NEWS Fri Nov 07 15:40:08 2014 +0000 +++ b/NEWS Fri Nov 07 20:43:13 2014 +0100 @@ -18,6 +18,10 @@ structure in the body: 'oops' is no longer supported here. Minor INCOMPATIBILITY, use 'sorry' instead. +* Outer syntax commands are managed authentically within the theory +context, without implicit global state. Potential for accidental +INCOMPATIBILITY, make sure that required theories are really imported. + *** Prover IDE -- Isabelle/Scala/jEdit *** diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Doc/Datatypes/Datatypes.thy --- a/src/Doc/Datatypes/Datatypes.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Doc/Datatypes/Datatypes.thy Fri Nov 07 20:43:13 2014 +0100 @@ -1554,7 +1554,7 @@ \item Introduce a fully unspecified constant @{text "un_D\<^sub>0 \ 'a"} using -@{keyword consts}. +@{command consts}. \item Define the datatype, specifying @{text "un_D\<^sub>0"} as the selector's default @@ -2615,7 +2615,7 @@ text {* \noindent -Using \keyw{print_theorems} and @{keyword print_bnfs}, we can contemplate and +Using \keyw{print_theorems} and @{command print_bnfs}, we can contemplate and show the world what we have achieved. This particular example does not need any nonemptiness witness, because the diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Doc/System/Sessions.thy --- a/src/Doc/System/Sessions.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Doc/System/Sessions.thy Fri Nov 07 20:43:13 2014 +0100 @@ -126,7 +126,7 @@ files that are involved in the processing of this session. This should cover anything outside the formal content of the theory sources. In contrast, files that are loaded formally - within a theory, e.g.\ via @{keyword "ML_file"}, need not be + within a theory, e.g.\ via @{command "ML_file"}, need not be declared again. \item \isakeyword{document_files}~@{text "("}\isakeyword{in}~@{text diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Doc/Tutorial/ToyList/ToyList.thy --- a/src/Doc/Tutorial/ToyList/ToyList.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Doc/Tutorial/ToyList/ToyList.thy Fri Nov 07 20:43:13 2014 +0100 @@ -1,16 +1,18 @@ theory ToyList -imports BNF_Least_Fixpoint +imports Main begin text{*\noindent HOL already has a predefined theory of lists called @{text List} --- -@{text ToyList} is merely a small fragment of it chosen as an example. In -contrast to what is recommended in \S\ref{sec:Basic:Theories}, -@{text ToyList} is not based on @{text Main} but on -@{text BNF_Least_Fixpoint}, a theory that contains pretty much everything -but lists, thus avoiding ambiguities caused by defining lists twice. +@{text ToyList} is merely a small fragment of it chosen as an example. +To avoid some ambiguities caused by defining lists twice, we manipulate +the concrete syntax and name space of theory @{theory Main} as follows. *} +no_notation Nil ("[]") and Cons (infixr "#" 65) and append (infixr "@" 65) +hide_type list +hide_const rev + datatype 'a list = Nil ("[]") | Cons 'a "'a list" (infixr "#" 65) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Doc/Tutorial/ToyList/ToyList1.txt --- a/src/Doc/Tutorial/ToyList/ToyList1.txt Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Doc/Tutorial/ToyList/ToyList1.txt Fri Nov 07 20:43:13 2014 +0100 @@ -1,7 +1,11 @@ theory ToyList -imports BNF_Least_Fixpoint +imports Main begin +no_notation Nil ("[]") and Cons (infixr "#" 65) and append (infixr "@" 65) +hide_type list +hide_const rev + datatype 'a list = Nil ("[]") | Cons 'a "'a list" (infixr "#" 65) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Doc/Tutorial/ToyList/ToyList_Test.thy --- a/src/Doc/Tutorial/ToyList/ToyList_Test.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Doc/Tutorial/ToyList/ToyList_Test.thy Fri Nov 07 20:43:13 2014 +0100 @@ -1,5 +1,5 @@ theory ToyList_Test -imports BNF_Least_Fixpoint +imports Main begin ML {* @@ -7,7 +7,7 @@ map (File.read o Path.append (Resources.master_directory @{theory}) o Path.explode) ["ToyList1.txt", "ToyList2.txt"] |> implode - in Thy_Info.script_thy Position.start text @{theory BNF_Least_Fixpoint} end + in Thy_Info.script_thy Position.start text @{theory} end *} end diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Doc/antiquote_setup.ML --- a/src/Doc/antiquote_setup.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Doc/antiquote_setup.ML Fri Nov 07 20:43:13 2014 +0100 @@ -191,15 +191,23 @@ fun no_check _ _ = true; +fun is_keyword ctxt (name, _) = + Keyword.is_keyword (Thy_Header.get_keywords' ctxt) name; + fun check_command ctxt (name, pos) = - is_some (Keyword.command_keyword name) andalso - let - val markup = - Outer_Syntax.scan (Keyword.get_keywords ()) Position.none name - |> maps (Outer_Syntax.command_reports (#2 (Outer_Syntax.get_syntax ()))) - |> map (snd o fst); - val _ = Context_Position.reports ctxt (map (pair pos) markup); - in true end; + let + val thy = Proof_Context.theory_of ctxt; + val keywords = Thy_Header.get_keywords thy; + in + Keyword.is_command keywords name andalso + let + val markup = + Outer_Syntax.scan keywords Position.none name + |> maps (Outer_Syntax.command_reports thy) + |> map (snd o fst); + val _ = Context_Position.reports ctxt (map (pair pos) markup); + in true end + end; fun check_system_option ctxt (name, pos) = (Context_Position.report ctxt pos (Options.default_markup (name, pos)); true) @@ -259,8 +267,8 @@ Theory.setup (entity_antiqs no_check "" @{binding syntax} #> entity_antiqs check_command "isacommand" @{binding command} #> - entity_antiqs (K (Keyword.is_keyword o #1)) "isakeyword" @{binding keyword} #> - entity_antiqs (K (Keyword.is_keyword o #1)) "isakeyword" @{binding element} #> + entity_antiqs is_keyword "isakeyword" @{binding keyword} #> + entity_antiqs is_keyword "isakeyword" @{binding element} #> entity_antiqs (can o Method.check_name) "" @{binding method} #> entity_antiqs (can o Attrib.check_name) "" @{binding attribute} #> entity_antiqs no_check "" @{binding fact} #> diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Groebner_Basis.thy --- a/src/HOL/Groebner_Basis.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Groebner_Basis.thy Fri Nov 07 20:43:13 2014 +0100 @@ -6,7 +6,6 @@ theory Groebner_Basis imports Semiring_Normalization Parity -keywords "try0" :: diag begin subsection {* Groebner Bases *} diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/IMP/Abs_Int_ITP/Collecting_ITP.thy --- a/src/HOL/IMP/Abs_Int_ITP/Collecting_ITP.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/IMP/Abs_Int_ITP/Collecting_ITP.thy Fri Nov 07 20:43:13 2014 +0100 @@ -1,5 +1,5 @@ theory Collecting_ITP -imports Complete_Lattice_ix "ACom_ITP" +imports "~~/src/Tools/Permanent_Interpretation" Complete_Lattice_ix "ACom_ITP" begin subsection "Collecting Semantics of Commands" diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Fri Nov 07 20:43:13 2014 +0100 @@ -414,11 +414,11 @@ time_isa) = time_limit (Mirabelle.cpu_time (fn () => let val ho_atp = Sledgehammer_Prover_ATP.is_ho_atp ctxt prover_name - val reserved = Sledgehammer_Util.reserved_isar_keyword_table () + val keywords = Thy_Header.get_keywords' ctxt val css_table = Sledgehammer_Fact.clasimpset_rule_table_of ctxt val facts = Sledgehammer_Fact.nearly_all_facts ctxt ho_atp - Sledgehammer_Fact.no_fact_override reserved css_table chained_ths + Sledgehammer_Fact.no_fact_override keywords css_table chained_ths hyp_ts concl_t val factss = facts @@ -535,8 +535,8 @@ fun do_method named_thms ctxt = let val ref_of_str = - suffix ";" #> Outer_Syntax.scan (Keyword.get_keywords ()) Position.none #> Parse.xthm - #> fst + suffix ";" #> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.none + #> Parse.xthm #> fst val thms = named_thms |> maps snd val facts = named_thms |> map (ref_of_str o fst o fst) val fact_override = {add = facts, del = [], only = true} @@ -561,7 +561,7 @@ let val (type_encs, lam_trans) = !meth - |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start + |> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.start |> filter Token.is_proper |> tl |> Metis_Tactic.parse_metis_options |> fst |>> the_default [ATP_Proof_Reconstruct.partial_typesN] diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML Fri Nov 07 20:43:13 2014 +0100 @@ -110,11 +110,11 @@ val subgoal = 1 val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal subgoal ctxt val ho_atp = Sledgehammer_Prover_ATP.is_ho_atp ctxt prover - val reserved = Sledgehammer_Util.reserved_isar_keyword_table () + val keywords = Thy_Header.get_keywords' ctxt val css_table = Sledgehammer_Fact.clasimpset_rule_table_of ctxt val facts = Sledgehammer_Fact.nearly_all_facts ctxt ho_atp - Sledgehammer_Fact.no_fact_override reserved css_table chained_ths + Sledgehammer_Fact.no_fact_override keywords css_table chained_ths hyp_ts concl_t |> Sledgehammer_Fact.drop_duplicate_facts |> Sledgehammer_MePo.mepo_suggested_facts ctxt params diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Presburger.thy --- a/src/HOL/Presburger.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Presburger.thy Fri Nov 07 20:43:13 2014 +0100 @@ -6,6 +6,7 @@ theory Presburger imports Groebner_Basis Set_Interval +keywords "try0" :: diag begin ML_file "Tools/Qelim/qelim.ML" diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/TPTP/atp_theory_export.ML --- a/src/HOL/TPTP/atp_theory_export.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/TPTP/atp_theory_export.ML Fri Nov 07 20:43:13 2014 +0100 @@ -165,7 +165,7 @@ val mono = not (is_type_enc_polymorphic type_enc) val facts = Sledgehammer_Fact.all_facts (Proof_Context.init_global thy) true false - Symtab.empty [] [] css_table + Keyword.empty_keywords [] [] css_table |> sort (Sledgehammer_MaSh.crude_thm_ord o pairself snd) val problem = facts diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/TPTP/mash_eval.ML --- a/src/HOL/TPTP/mash_eval.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/TPTP/mash_eval.ML Fri Nov 07 20:43:13 2014 +0100 @@ -53,7 +53,7 @@ val liness' = Ctr_Sugar_Util.transpose (map pad liness0) val css = clasimpset_rule_table_of ctxt - val facts = all_facts ctxt true false Symtab.empty [] [] css + val facts = all_facts ctxt true false Keyword.empty_keywords [] [] css val name_tabs = build_name_tables nickname_of_thm facts fun with_index facts s = (find_index (curry (op =) s) facts + 1, s) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/TPTP/mash_export.ML --- a/src/HOL/TPTP/mash_export.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/TPTP/mash_export.ML Fri Nov 07 20:43:13 2014 +0100 @@ -64,7 +64,7 @@ fun all_facts ctxt = let val css = Sledgehammer_Fact.clasimpset_rule_table_of ctxt in - Sledgehammer_Fact.all_facts ctxt true false Symtab.empty [] [] css + Sledgehammer_Fact.all_facts ctxt true false Keyword.empty_keywords [] [] css |> sort (crude_thm_ord o pairself snd) end diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/TPTP/sledgehammer_tactics.ML --- a/src/HOL/TPTP/sledgehammer_tactics.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/TPTP/sledgehammer_tactics.ML Fri Nov 07 20:43:13 2014 +0100 @@ -36,10 +36,10 @@ val default_max_facts = default_max_facts_of_prover ctxt name val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal i ctxt val ho_atp = exists (is_ho_atp ctxt) provers - val reserved = reserved_isar_keyword_table () + val keywords = Thy_Header.get_keywords' ctxt val css_table = clasimpset_rule_table_of ctxt val facts = - nearly_all_facts ctxt ho_atp fact_override reserved css_table chained hyp_ts concl_t + nearly_all_facts ctxt ho_atp fact_override keywords css_table chained hyp_ts concl_t |> relevant_facts ctxt params name (the_default default_max_facts max_facts) fact_override hyp_ts concl_t |> hd |> snd diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/ATP/atp_util.ML --- a/src/HOL/Tools/ATP/atp_util.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/ATP/atp_util.ML Fri Nov 07 20:43:13 2014 +0100 @@ -19,7 +19,7 @@ val nat_subscript : int -> string val unquote_tvar : string -> string val unyxml : string -> string - val maybe_quote : string -> string + val maybe_quote : Keyword.keywords -> string -> string val string_of_ext_time : bool * Time.time -> string val string_of_time : Time.time -> string val type_instance : theory -> typ -> typ -> bool @@ -136,11 +136,11 @@ val unyxml = XML.content_of o YXML.parse_body val is_long_identifier = forall Symbol_Pos.is_identifier o Long_Name.explode -fun maybe_quote y = +fun maybe_quote keywords y = let val s = unyxml y in y |> ((not (is_long_identifier (unquote_tvar s)) andalso not (is_long_identifier (unquery_var s))) orelse - Keyword.is_keyword s) ? quote + Keyword.is_literal keywords s) ? quote end fun string_of_ext_time (plus, time) = diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/BNF/bnf_fp_def_sugar.ML --- a/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML Fri Nov 07 20:43:13 2014 +0100 @@ -2016,7 +2016,7 @@ val fake_T = qsoty (unfreeze_fp X); val fake_T_backdrop = qsoty (unfreeze_fp X_backdrop); fun register_hint () = - "\nUse the " ^ quote (fst (fst @{command_spec "bnf"})) ^ " command to register " ^ + "\nUse the " ^ quote (#1 @{command_spec "bnf"}) ^ " command to register " ^ quote bad_tc ^ " as a bounded natural functor to allow nested (co)recursion through \ \it"; in diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Lifting/lifting_setup.ML --- a/src/HOL/Tools/Lifting/lifting_setup.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Lifting/lifting_setup.ML Fri Nov 07 20:43:13 2014 +0100 @@ -221,7 +221,7 @@ val thy = Proof_Context.theory_of lthy val dummy_thm = Thm.transfer thy Drule.dummy_thm - val pointer = Outer_Syntax.scan (Keyword.get_keywords ()) Position.none bundle_name + val pointer = Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none bundle_name val restore_lifting_att = ([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer]) in diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Nitpick/nitpick.ML --- a/src/HOL/Tools/Nitpick/nitpick.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Nitpick/nitpick.ML Fri Nov 07 20:43:13 2014 +0100 @@ -220,6 +220,7 @@ val timer = Timer.startRealTimer () val thy = Proof.theory_of state val ctxt = Proof.context_of state + val keywords = Thy_Header.get_keywords thy (* FIXME: reintroduce code before new release: val nitpick_thy = Thy_Info.get_theory "Nitpick" @@ -365,7 +366,7 @@ val unique_scope = forall (curry (op =) 1 o length o snd) cards_assigns fun monotonicity_message Ts extra = - let val pretties = map (pretty_maybe_quote o pretty_for_type ctxt) Ts in + let val pretties = map (pretty_maybe_quote keywords o pretty_for_type ctxt) Ts in Pretty.blk (0, pstrs ("The type" ^ plural_s_for_list pretties ^ " ") @ pretty_serial_commas "and" pretties @ diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Nitpick/nitpick_scope.ML --- a/src/HOL/Tools/Nitpick/nitpick_scope.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Nitpick/nitpick_scope.ML Fri Nov 07 20:43:13 2014 +0100 @@ -180,9 +180,10 @@ let fun standard_blocks s = map (Pretty.block o cons (Pretty.str (s ^ " "))) val (primary_cards, secondary_cards, maxes, iters, miscs) = - quintuple_for_scope (pretty_maybe_quote oo pretty_for_type) - (pretty_maybe_quote oo Syntax.pretty_term) - Pretty.str scope + quintuple_for_scope + (fn ctxt => pretty_maybe_quote (Thy_Header.get_keywords' ctxt) o pretty_for_type ctxt) + (fn ctxt => pretty_maybe_quote (Thy_Header.get_keywords' ctxt) o Syntax.pretty_term ctxt) + Pretty.str scope in standard_blocks "card" primary_cards @ (if verbose then diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Nitpick/nitpick_util.ML --- a/src/HOL/Tools/Nitpick/nitpick_util.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Nitpick/nitpick_util.ML Fri Nov 07 20:43:13 2014 +0100 @@ -72,7 +72,7 @@ val indent_size : int val pstrs : string -> Pretty.T list val unyxml : string -> string - val pretty_maybe_quote : Pretty.T -> Pretty.T + val pretty_maybe_quote : Keyword.keywords -> Pretty.T -> Pretty.T val hash_term : term -> int val spying : bool -> (unit -> Proof.state * int * string) -> unit end; @@ -287,9 +287,9 @@ val maybe_quote = ATP_Util.maybe_quote -fun pretty_maybe_quote pretty = +fun pretty_maybe_quote keywords pretty = let val s = Pretty.str_of pretty in - if maybe_quote s = s then pretty else Pretty.enum "" "\"" "\"" [pretty] + if maybe_quote keywords s = s then pretty else Pretty.quote pretty end val hashw = ATP_Util.hashw diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Sledgehammer/sledgehammer.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer.ML Fri Nov 07 20:43:13 2014 +0100 @@ -264,13 +264,13 @@ val print = if mode = Normal andalso is_none output_result then writeln else K () val ctxt = Proof.context_of state + val keywords = Thy_Header.get_keywords' ctxt val {facts = chained, goal, ...} = Proof.goal state val (_, hyp_ts, concl_t) = strip_subgoal goal i ctxt val ho_atp = exists (is_ho_atp ctxt) provers - val reserved = reserved_isar_keyword_table () val css = clasimpset_rule_table_of ctxt val all_facts = - nearly_all_facts ctxt ho_atp fact_override reserved css chained hyp_ts concl_t + nearly_all_facts ctxt ho_atp fact_override keywords css chained hyp_ts concl_t val _ = () |> not blocking ? kill_provers val _ = (case find_first (not o is_prover_supported ctxt) provers of diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Fri Nov 07 20:43:13 2014 +0100 @@ -21,7 +21,7 @@ val instantiate_inducts : bool Config.T val no_fact_override : fact_override - val fact_of_ref : Proof.context -> unit Symtab.table -> thm list -> status Termtab.table -> + val fact_of_ref : Proof.context -> Keyword.keywords -> thm list -> status Termtab.table -> Facts.ref * Token.src list -> ((string * stature) * thm) list val backquote_thm : Proof.context -> thm -> string val is_blacklisted_or_something : Proof.context -> bool -> string -> bool @@ -33,9 +33,9 @@ val fact_of_raw_fact : raw_fact -> fact val is_useful_unnamed_local_fact : Proof.context -> thm -> bool - val all_facts : Proof.context -> bool -> bool -> unit Symtab.table -> thm list -> thm list -> + val all_facts : Proof.context -> bool -> bool -> Keyword.keywords -> thm list -> thm list -> status Termtab.table -> raw_fact list - val nearly_all_facts : Proof.context -> bool -> fact_override -> unit Symtab.table -> + val nearly_all_facts : Proof.context -> bool -> fact_override -> Keyword.keywords -> status Termtab.table -> thm list -> term list -> term -> raw_fact list val drop_duplicate_facts : raw_fact list -> raw_fact list end; @@ -66,12 +66,12 @@ val no_fact_override = {add = [], del = [], only = false} -fun needs_quoting reserved s = - Symtab.defined reserved s orelse +fun needs_quoting keywords s = + Keyword.is_literal keywords s orelse exists (not o Symbol_Pos.is_identifier) (Long_Name.explode s) -fun make_name reserved multi j name = - (name |> needs_quoting reserved name ? quote) ^ +fun make_name keywords multi j name = + (name |> needs_quoting keywords name ? quote) ^ (if multi then "(" ^ string_of_int j ^ ")" else "") fun explode_interval _ (Facts.FromTo (i, j)) = i upto j @@ -157,7 +157,7 @@ fun stature_of_thm global assms chained css name th = (scope_of_thm global assms chained th, status_of_thm css name th) -fun fact_of_ref ctxt reserved chained css (xthm as (xref, args)) = +fun fact_of_ref ctxt keywords chained css (xthm as (xref, args)) = let val ths = Attrib.eval_thms ctxt [xthm] val bracket = implode (map (enclose "[" "]" o Pretty.str_of o Token.pretty_src ctxt) args) @@ -166,9 +166,9 @@ (case xref of Facts.Fact s => backquote (simplify_spaces (unyxml s)) ^ bracket | Facts.Named (("", _), _) => "[" ^ bracket ^ "]" - | Facts.Named ((name, _), NONE) => make_name reserved (length ths > 1) (j + 1) name ^ bracket + | Facts.Named ((name, _), NONE) => make_name keywords (length ths > 1) (j + 1) name ^ bracket | Facts.Named ((name, _), SOME intervals) => - make_name reserved true + make_name keywords true (nth (maps (explode_interval (length ths)) intervals) j) name ^ bracket) fun add_nth th (j, rest) = @@ -455,7 +455,7 @@ forall (fn (_, ths) => not (member Thm.eq_thm_prop ths th)) named_locals end -fun all_facts ctxt generous ho_atp reserved add_ths chained css = +fun all_facts ctxt generous ho_atp keywords add_ths chained css = let val thy = Proof_Context.theory_of ctxt val global_facts = Global_Theory.facts_of thy @@ -514,7 +514,7 @@ name0 end end - |> make_name reserved multi j + |> make_name keywords multi j val stature = stature_of_thm global assms chained css name0 th val new = ((get_name, stature), th) in @@ -530,7 +530,7 @@ |> op @ end -fun nearly_all_facts ctxt ho_atp {add, del, only} reserved css chained hyp_ts concl_t = +fun nearly_all_facts ctxt ho_atp {add, del, only} keywords css chained hyp_ts concl_t = if only andalso null add then [] else @@ -539,12 +539,12 @@ in (if only then maps (map (fn ((name, stature), th) => ((K name, stature), th)) - o fact_of_ref ctxt reserved chained css) add + o fact_of_ref ctxt keywords chained css) add else let val (add, del) = pairself (Attrib.eval_thms ctxt) (add, del) val facts = - all_facts ctxt false ho_atp reserved add chained css + all_facts ctxt false ho_atp keywords add chained css |> filter_out ((member Thm.eq_thm_prop del orf (Named_Theorems.member ctxt @{named_theorems no_atp} andf not o member Thm.eq_thm_prop add)) o snd) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML Fri Nov 07 20:43:13 2014 +0100 @@ -267,6 +267,8 @@ fun string_of_isar_proof ctxt0 i n proof = let + val keywords = Thy_Header.get_keywords' ctxt0 + (* Make sure only type constraints inserted by the type annotation code are printed. *) val ctxt = ctxt0 |> Config.put show_markup false @@ -300,7 +302,7 @@ |> annotate_types_in_term ctxt |> with_vanilla_print_mode (Syntax.unparse_term ctxt #> Pretty.string_of) |> simplify_spaces - |> maybe_quote), + |> maybe_quote keywords), ctxt |> perhaps (try (Variable.auto_fixes term))) fun using_facts [] [] = "" @@ -317,8 +319,8 @@ end fun of_free (s, T) = - maybe_quote s ^ " :: " ^ - maybe_quote (simplify_spaces (with_vanilla_print_mode (Syntax.string_of_typ ctxt) T)) + maybe_quote keywords s ^ " :: " ^ + maybe_quote keywords (simplify_spaces (with_vanilla_print_mode (Syntax.string_of_typ ctxt) T)) fun add_frees xs (s, ctxt) = (s ^ space_implode " and " (map of_free xs), ctxt |> fold Variable.auto_fixes (map Free xs)) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML Fri Nov 07 20:43:13 2014 +0100 @@ -1467,7 +1467,8 @@ let val css = Sledgehammer_Fact.clasimpset_rule_table_of ctxt val ctxt = ctxt |> Config.put instantiate_inducts false - val facts = nearly_all_facts ctxt false fact_override Symtab.empty css chained [] @{prop True} + val facts = + nearly_all_facts ctxt false fact_override Keyword.empty_keywords css chained [] @{prop True} |> sort (crude_thm_ord o pairself snd o swap) val num_facts = length facts val prover = hd provers diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/Sledgehammer/sledgehammer_util.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Fri Nov 07 20:43:13 2014 +0100 @@ -17,7 +17,6 @@ val parse_bool_option : bool -> string -> string -> bool option val parse_time : string -> string -> Time.time val subgoal_count : Proof.state -> int - val reserved_isar_keyword_table : unit -> unit Symtab.table val thms_in_proof : int -> (string Symtab.table * string Symtab.table) option -> thm -> string list option val thms_of_name : Proof.context -> string -> thm list @@ -79,9 +78,6 @@ val subgoal_count = Try.subgoal_count -fun reserved_isar_keyword_table () = - Symtab.make_set (Scan.dest_lexicon (Scan.merge_lexicons (Keyword.get_lexicons ()))); - exception TOO_MANY of unit (* FIXME: Similar yet different code in "mirabelle.ML". The code here has a few fixes that seem to @@ -127,7 +123,7 @@ fun thms_of_name ctxt name = let val get = maps (Proof_Context.get_fact ctxt o fst) - val keywords = Keyword.get_keywords () + val keywords = Thy_Header.get_keywords' ctxt in Source.of_string name |> Symbol.source diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/Tools/try0.ML --- a/src/HOL/Tools/try0.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/Tools/try0.ML Fri Nov 07 20:43:13 2014 +0100 @@ -41,15 +41,15 @@ NONE end; -fun parse_method s = +fun parse_method keywords s = enclose "(" ")" s - |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start + |> Outer_Syntax.scan keywords Position.start |> filter Token.is_proper |> Scan.read Token.stopper Method.parse |> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source"); fun apply_named_method_on_first_goal ctxt = - parse_method + parse_method (Thy_Header.get_keywords' ctxt) #> Method.method_cmd ctxt #> Method.Basic #> (fn m => Method.Combinator (Method.no_combinator_info, Method.Select_Goals 1, [m])) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/HOL/ex/Cartouche_Examples.thy --- a/src/HOL/ex/Cartouche_Examples.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/HOL/ex/Cartouche_Examples.thy Fri Nov 07 20:43:13 2014 +0100 @@ -178,7 +178,7 @@ ML {* Outer_Syntax.command @{command_spec "text_cartouche"} "" - (Parse.opt_target -- Parse.source_position Parse.cartouche >> Isar_Cmd.local_theory_markup) + (Parse.opt_target -- Parse.source_position Parse.cartouche >> Thy_Output.local_theory_markup) *} text_cartouche diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/attrib.ML --- a/src/Pure/Isar/attrib.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/attrib.ML Fri Nov 07 20:43:13 2014 +0100 @@ -288,8 +288,8 @@ fun read_attribs ctxt source = let + val keywords = Thy_Header.get_keywords' ctxt; val syms = Symbol_Pos.source_explode source; - val keywords = Keyword.get_keywords (); in (case Token.read_no_commands keywords Parse.attribs syms of [raw_srcs] => check_attribs ctxt raw_srcs diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/isar_cmd.ML --- a/src/Pure/Isar/isar_cmd.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/isar_cmd.ML Fri Nov 07 20:43:13 2014 +0100 @@ -48,12 +48,6 @@ val print_term: (string list * string) -> Toplevel.transition -> Toplevel.transition val print_type: (string list * (string * string option)) -> Toplevel.transition -> Toplevel.transition - val local_theory_markup: (xstring * Position.T) option * Symbol_Pos.source -> - Toplevel.transition -> Toplevel.transition - val proof_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition - val header_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition - val heading_markup: (xstring * Position.T) option * Symbol_Pos.source -> - Toplevel.transition -> Toplevel.transition end; structure Isar_Cmd: ISAR_CMD = @@ -376,28 +370,4 @@ end; - -(* markup commands *) - -fun local_theory_markup (loc, txt) = Toplevel.present_local_theory loc (Thy_Output.check_text txt); -val proof_markup = Toplevel.present_proof o Thy_Output.check_text; - -fun reject_target NONE = () - | reject_target (SOME (_, pos)) = - error ("Illegal target specification -- not a theory context" ^ Position.here pos); - -fun header_markup txt = - Toplevel.keep (fn state => - if Toplevel.is_toplevel state then - (legacy_feature "Obsolete 'header' command -- use 'chapter', 'section' etc. instead"; - Thy_Output.check_text txt state) - else raise Toplevel.UNDEF); - -fun heading_markup (loc, txt) = - Toplevel.keep (fn state => - if Toplevel.is_toplevel state then (reject_target loc; Thy_Output.check_text txt state) - else raise Toplevel.UNDEF) o - local_theory_markup (loc, txt) o - Toplevel.present_proof (fn state => (reject_target loc; Thy_Output.check_text txt state)); - end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/isar_syn.ML --- a/src/Pure/Isar/isar_syn.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/isar_syn.ML Fri Nov 07 20:43:13 2014 +0100 @@ -10,49 +10,24 @@ (** markup commands **) val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "header"} "theory header" - (Parse.document_source >> Isar_Cmd.header_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "chapter"} "chapter heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "section"} "section heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "subsection"} "subsection heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup); + Outer_Syntax.markup_command Outer_Syntax.Markup_Env + @{command_spec "text"} "formal comment (theory)" + (Parse.opt_target -- Parse.document_source >> Thy_Output.local_theory_markup); val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "subsubsection"} "subsubsection heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.MarkupEnv - @{command_spec "text"} "formal comment (theory)" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); + Outer_Syntax.markup_command Outer_Syntax.Verbatim + @{command_spec "text_raw"} "raw document preparation text" + (Parse.opt_target -- Parse.document_source >> Thy_Output.local_theory_markup); val _ = - Outer_Syntax.markup_command Thy_Output.Verbatim - @{command_spec "text_raw"} "raw document preparation text" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); + Outer_Syntax.markup_command Outer_Syntax.Markup_Env + @{command_spec "txt"} "formal comment (proof)" + (Parse.document_source >> Thy_Output.proof_markup); val _ = - Outer_Syntax.markup_command Thy_Output.MarkupEnv - @{command_spec "txt"} "formal comment (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Verbatim + Outer_Syntax.markup_command Outer_Syntax.Verbatim @{command_spec "txt_raw"} "raw document preparation text (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); + (Parse.document_source >> Thy_Output.proof_markup); @@ -730,11 +705,11 @@ Outer_Syntax.command @{command_spec "help"} "retrieve outer syntax commands according to name patterns" (Scan.repeat Parse.name >> - (fn pats => Toplevel.imperative (fn () => Outer_Syntax.help_syntax pats))); + (fn pats => Toplevel.keep (fn st => Outer_Syntax.help (Toplevel.theory_of st) pats))); val _ = Outer_Syntax.command @{command_spec "print_commands"} "print outer syntax commands" - (Scan.succeed (Toplevel.imperative Outer_Syntax.print_syntax)); + (Scan.succeed (Toplevel.keep (Outer_Syntax.print_commands o Toplevel.theory_of))); val _ = Outer_Syntax.command @{command_spec "print_options"} "print configuration options" diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/keyword.ML --- a/src/Pure/Isar/keyword.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/keyword.ML Fri Nov 07 20:43:13 2014 +0100 @@ -6,68 +6,54 @@ signature KEYWORD = sig - type T - val kind_of: T -> string - val kind_files_of: T -> string * string list - val diag: T - val heading: T - val thy_begin: T - val thy_end: T - val thy_decl: T - val thy_decl_block: T - val thy_load: T - val thy_load_files: string list -> T - val thy_goal: T - val qed: T - val qed_script: T - val qed_block: T - val qed_global: T - val prf_goal: T - val prf_block: T - val prf_open: T - val prf_close: T - val prf_chain: T - val prf_decl: T - val prf_asm: T - val prf_asm_goal: T - val prf_asm_goal_script: T - val prf_script: T - val kinds: T list - val tag: string -> T -> T - val tags_of: T -> string list - val tag_theory: T -> T - val tag_proof: T -> T - val tag_ml: T -> T + val diag: string + val heading: string + val thy_begin: string + val thy_end: string + val thy_decl: string + val thy_decl_block: string + val thy_load: string + val thy_goal: string + val qed: string + val qed_script: string + val qed_block: string + val qed_global: string + val prf_goal: string + val prf_block: string + val prf_open: string + val prf_close: string + val prf_chain: string + val prf_decl: string + val prf_asm: string + val prf_asm_goal: string + val prf_asm_goal_script: string + val prf_script: string type spec = (string * string list) * string list - val spec: spec -> T - val command_spec: (string * spec) * Position.T -> (string * T) * Position.T type keywords val minor_keywords: keywords -> Scan.lexicon val major_keywords: keywords -> Scan.lexicon + val no_command_keywords: keywords -> keywords val empty_keywords: keywords val merge_keywords: keywords * keywords -> keywords - val no_command_keywords: keywords -> keywords - val add: string * T option -> keywords -> keywords - val define: string * T option -> unit - val get_keywords: unit -> keywords - val get_lexicons: unit -> Scan.lexicon * Scan.lexicon - val is_keyword: string -> bool - val command_keyword: string -> T option - val command_files: string -> Path.T -> Path.T list - val command_tags: string -> string list - val is_diag: string -> bool - val is_heading: string -> bool - val is_theory_begin: string -> bool - val is_theory_load: string -> bool - val is_theory: string -> bool - val is_theory_body: string -> bool - val is_proof: string -> bool - val is_proof_body: string -> bool - val is_theory_goal: string -> bool - val is_proof_goal: string -> bool - val is_qed: string -> bool - val is_qed_global: string -> bool - val is_printed: string -> bool + val add_keywords: (string * spec option) list -> keywords -> keywords + val is_keyword: keywords -> string -> bool + val is_command: keywords -> string -> bool + val is_literal: keywords -> string -> bool + val command_files: keywords -> string -> Path.T -> Path.T list + val command_tags: keywords -> string -> string list + val is_diag: keywords -> string -> bool + val is_heading: keywords -> string -> bool + val is_theory_begin: keywords -> string -> bool + val is_theory_load: keywords -> string -> bool + val is_theory: keywords -> string -> bool + val is_theory_body: keywords -> string -> bool + val is_proof: keywords -> string -> bool + val is_proof_body: keywords -> string -> bool + val is_theory_goal: keywords -> string -> bool + val is_proof_goal: keywords -> string -> bool + val is_qed: keywords -> string -> bool + val is_qed_global: keywords -> string -> bool + val is_printed: keywords -> string -> bool end; structure Keyword: KEYWORD = @@ -75,44 +61,30 @@ (** keyword classification **) -datatype T = Keyword of - {kind: string, - files: string list, (*extensions of embedded files*) - tags: string list}; (*tags in canonical reverse order*) - -fun kind s = Keyword {kind = s, files = [], tags = []}; -fun kind_of (Keyword {kind, ...}) = kind; -fun kind_files_of (Keyword {kind, files, ...}) = (kind, files); - -fun add_files fs (Keyword {kind, files, tags}) = - Keyword {kind = kind, files = files @ fs, tags = tags}; - - (* kinds *) -val diag = kind "diag"; -val heading = kind "heading"; -val thy_begin = kind "thy_begin"; -val thy_end = kind "thy_end"; -val thy_decl = kind "thy_decl"; -val thy_decl_block = kind "thy_decl_block"; -val thy_load = kind "thy_load"; -fun thy_load_files files = Keyword {kind = "thy_load", files = files, tags = []}; -val thy_goal = kind "thy_goal"; -val qed = kind "qed"; -val qed_script = kind "qed_script"; -val qed_block = kind "qed_block"; -val qed_global = kind "qed_global"; -val prf_goal = kind "prf_goal"; -val prf_block = kind "prf_block"; -val prf_open = kind "prf_open"; -val prf_close = kind "prf_close"; -val prf_chain = kind "prf_chain"; -val prf_decl = kind "prf_decl"; -val prf_asm = kind "prf_asm"; -val prf_asm_goal = kind "prf_asm_goal"; -val prf_asm_goal_script = kind "prf_asm_goal_script"; -val prf_script = kind "prf_script"; +val diag = "diag"; +val heading = "heading"; +val thy_begin = "thy_begin"; +val thy_end = "thy_end"; +val thy_decl = "thy_decl"; +val thy_decl_block = "thy_decl_block"; +val thy_load = "thy_load"; +val thy_goal = "thy_goal"; +val qed = "qed"; +val qed_script = "qed_script"; +val qed_block = "qed_block"; +val qed_global = "qed_global"; +val prf_goal = "prf_goal"; +val prf_block = "prf_block"; +val prf_open = "prf_open"; +val prf_close = "prf_close"; +val prf_chain = "prf_chain"; +val prf_decl = "prf_decl"; +val prf_asm = "prf_asm"; +val prf_asm_goal = "prf_asm_goal"; +val prf_asm_goal_script = "prf_asm_goal_script"; +val prf_script = "prf_script"; val kinds = [diag, heading, thy_begin, thy_end, thy_load, thy_decl, thy_decl_block, thy_goal, @@ -120,34 +92,21 @@ prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_asm_goal_script, prf_script]; -(* tags *) - -fun tag t (Keyword {kind, files, tags}) = - Keyword {kind = kind, files = files, tags = update (op =) t tags}; -fun tags_of (Keyword {tags, ...}) = tags; +(* specifications *) -val tag_theory = tag "theory"; -val tag_proof = tag "proof"; -val tag_ml = tag "ML"; - - -(* external names *) - -val name_table = Symtab.make (map (`kind_of) kinds); +type T = + {kind: string, + files: string list, (*extensions of embedded files*) + tags: string list}; type spec = (string * string list) * string list; -fun spec ((name, files), tags) = - (case Symtab.lookup name_table name of - SOME kind => - let val kind' = kind |> fold tag tags in - if null files then kind' - else if name = kind_of thy_load then kind' |> add_files files - else error ("Illegal specification of files for " ^ quote name) - end - | NONE => error ("Unknown outer syntax keyword kind " ^ quote name)); - -fun command_spec ((name, s), pos) = ((name, spec s), pos); +fun check_spec ((kind, files), tags) : T = + if not (member (op =) kinds kind) then + error ("Unknown outer syntax keyword kind " ^ quote kind) + else if not (null files) andalso kind <> thy_load then + error ("Illegal specification of files for " ^ quote kind) + else {kind = kind, files = files, tags = tags}; @@ -162,7 +121,6 @@ fun minor_keywords (Keywords {minor, ...}) = minor; fun major_keywords (Keywords {major, ...}) = major; -fun commands (Keywords {commands, ...}) = commands; fun make_keywords (minor, major, commands) = Keywords {minor = minor, major = major, commands = commands}; @@ -170,6 +128,12 @@ fun map_keywords f (Keywords {minor, major, commands}) = make_keywords (f (minor, major, commands)); +val no_command_keywords = + map_keywords (fn (minor, _, _) => (minor, Scan.empty_lexicon, Symtab.empty)); + + +(* build keywords *) + val empty_keywords = make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty); @@ -181,71 +145,56 @@ Scan.merge_lexicons (major1, major2), Symtab.merge (K true) (commands1, commands2)); -val no_command_keywords = - map_keywords (fn (minor, _, _) => (minor, Scan.empty_lexicon, Symtab.empty)); - - -(* add keywords *) - -fun add (name, opt_kind) = map_keywords (fn (minor, major, commands) => - (case opt_kind of - NONE => - let - val minor' = Scan.extend_lexicon (Symbol.explode name) minor; - in (minor', major, commands) end - | SOME kind => - let - val major' = Scan.extend_lexicon (Symbol.explode name) major; - val commands' = Symtab.update (name, kind) commands; - in (minor, major', commands') end)); +val add_keywords = + fold (fn (name, opt_spec) => map_keywords (fn (minor, major, commands) => + (case opt_spec of + NONE => + let + val minor' = Scan.extend_lexicon (Symbol.explode name) minor; + in (minor', major, commands) end + | SOME spec => + let + val kind = check_spec spec; + val major' = Scan.extend_lexicon (Symbol.explode name) major; + val commands' = Symtab.update (name, kind) commands; + in (minor, major', commands') end))); -(* global state *) - -local val global_keywords = Unsynchronized.ref empty_keywords in - -fun define decl = CRITICAL (fn () => Unsynchronized.change global_keywords (add decl)); -fun get_keywords () = ! global_keywords; +(* keyword status *) -end; - -fun get_lexicons () = - let val keywords = get_keywords () - in (minor_keywords keywords, major_keywords keywords) end; - -fun get_commands () = commands (get_keywords ()); +fun is_keyword keywords s = Scan.is_literal (minor_keywords keywords) (Symbol.explode s); +fun is_command (Keywords {commands, ...}) = Symtab.defined commands; +fun is_literal keywords = is_keyword keywords orf is_command keywords; -(* lookup *) +(* command kind *) -fun is_keyword s = - let - val (minor, major) = get_lexicons (); - val syms = Symbol.explode s; - in Scan.is_literal minor syms orelse Scan.is_literal major syms end; +fun command_kind (Keywords {commands, ...}) = Symtab.lookup commands; -fun command_keyword name = Symtab.lookup (get_commands ()) name; - -fun command_files name path = - (case command_keyword name of +fun command_files keywords name path = + (case command_kind keywords name of NONE => [] - | SOME (Keyword {kind, files, ...}) => - if kind <> kind_of thy_load then [] + | SOME {kind, files, ...} => + if kind <> thy_load then [] else if null files then [path] else map (fn ext => Path.ext ext path) files); -val command_tags = these o Option.map tags_of o command_keyword; +fun command_tags keywords name = + (case command_kind keywords name of + SOME {tags, ...} => tags + | NONE => []); (* command categories *) fun command_category ks = - let val tab = Symtab.make_set (map kind_of ks) in - fn name => - (case command_keyword name of + let + val tab = Symtab.make_set ks; + fun pred keywords name = + (case command_kind keywords name of NONE => false - | SOME k => Symtab.defined tab (kind_of k)) - end; + | SOME {kind, ...} => Symtab.defined tab kind); + in pred end; val is_diag = command_category [diag]; @@ -274,7 +223,7 @@ val is_qed = command_category [qed, qed_script, qed_block]; val is_qed_global = command_category [qed_global]; -val is_printed = is_theory_goal orf is_proof; +fun is_printed keywords = is_theory_goal keywords orf is_proof keywords; end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/keyword.scala --- a/src/Pure/Isar/keyword.scala Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/keyword.scala Fri Nov 07 20:43:13 2014 +0100 @@ -63,12 +63,11 @@ val qed_global = Set(QED_GLOBAL) - type Spec = ((String, List[String]), List[String]) - - /** keyword tables **/ + type Spec = ((String, List[String]), List[String]) + object Keywords { def empty: Keywords = new Keywords() @@ -99,13 +98,21 @@ def + (name: String): Keywords = new Keywords(minor + name, major, commands) def + (name: String, kind: String): Keywords = this + (name, (kind, Nil)) - def + (name: String, kind: (String, List[String])): Keywords = + def + (name: String, kind_tags: (String, List[String])): Keywords = { val major1 = major + name - val commands1 = commands + (name -> kind) + val commands1 = commands + (name -> kind_tags) new Keywords(minor, major1, commands1) } + def add_keywords(header: Thy_Header.Keywords): Keywords = + (this /: header) { + case (keywords, (name, None, _)) => + keywords + Symbol.decode(name) + Symbol.encode(name) + case (keywords, (name, Some((kind_tags, _)), _)) => + keywords + (Symbol.decode(name), kind_tags) + (Symbol.encode(name), kind_tags) + } + /* command kind */ diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/outer_syntax.ML --- a/src/Pure/Isar/outer_syntax.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/outer_syntax.ML Fri Nov 07 20:43:13 2014 +0100 @@ -1,23 +1,19 @@ (* Title: Pure/Isar/outer_syntax.ML Author: Markus Wenzel, TU Muenchen -The global Isabelle/Isar outer syntax. - -Note: the syntax for files is statically determined at the very -beginning; for interactive processing it may change dynamically. +Isabelle/Isar outer syntax. *) signature OUTER_SYNTAX = sig - type syntax - val batch_mode: bool Unsynchronized.ref - val is_markup: syntax -> Thy_Output.markup -> string -> bool - val get_syntax: unit -> Keyword.keywords * syntax - val check_syntax: unit -> unit - type command_spec = (string * Keyword.T) * Position.T + datatype markup = Markup | Markup_Env | Verbatim + val is_markup: theory -> markup -> string -> bool + val help: theory -> string list -> unit + val print_commands: theory -> unit + type command_spec = string * Position.T val command: command_spec -> string -> (Toplevel.transition -> Toplevel.transition) parser -> unit - val markup_command: Thy_Output.markup -> command_spec -> string -> + val markup_command: markup -> command_spec -> string -> (Toplevel.transition -> Toplevel.transition) parser -> unit val local_theory': command_spec -> string -> (bool -> local_theory -> local_theory) parser -> unit @@ -27,14 +23,12 @@ (bool -> local_theory -> Proof.state) parser -> unit val local_theory_to_proof: command_spec -> string -> (local_theory -> Proof.state) parser -> unit - val help_syntax: string list -> unit - val print_syntax: unit -> unit val scan: Keyword.keywords -> Position.T -> string -> Token.T list - val parse: Keyword.keywords * syntax -> Position.T -> string -> Toplevel.transition list + val parse: theory -> Position.T -> string -> Toplevel.transition list + val parse_tokens: theory -> Token.T list -> Toplevel.transition list val parse_spans: Token.T list -> Command_Span.span list val side_comments: Token.T list -> Token.T list - val command_reports: syntax -> Token.T -> Position.report_text list - val read_spans: syntax -> Token.T list -> Toplevel.transition list + val command_reports: theory -> Token.T -> Position.report_text list end; structure Outer_Syntax: OUTER_SYNTAX = @@ -42,18 +36,33 @@ (** outer syntax **) +(* errors *) + +fun err_command msg name ps = + error (msg ^ quote (Markup.markup Markup.keyword1 name) ^ Position.here_list ps); + +fun err_dup_command name ps = + err_command "Duplicate outer syntax command " name ps; + + (* command parsers *) +datatype markup = Markup | Markup_Env | Verbatim; + datatype command = Command of {comment: string, - markup: Thy_Output.markup option, + markup: markup option, parse: (Toplevel.transition -> Toplevel.transition) parser, pos: Position.T, id: serial}; +fun eq_command (Command {id = id1, ...}, Command {id = id2, ...}) = id1 = id2; + fun new_command comment markup parse pos = Command {comment = comment, markup = markup, parse = parse, pos = pos, id = serial ()}; +fun command_pos (Command {pos, ...}) = pos; + fun command_markup def (name, Command {pos, id, ...}) = Markup.properties (Position.entity_properties_of def id pos) (Markup.entity Markup.commandN name); @@ -69,95 +78,104 @@ datatype syntax = Syntax of {commands: command Symtab.table, - markups: (string * Thy_Output.markup) list}; + markups: (string * markup) list}; -fun make_syntax commands markups = +fun make_syntax (commands, markups) = Syntax {commands = commands, markups = markups}; -val empty_syntax = make_syntax Symtab.empty []; +structure Data = Theory_Data +( + type T = syntax; + val empty = make_syntax (Symtab.empty, []); + val extend = I; + fun merge (Syntax {commands = commands1, markups = markups1}, + Syntax {commands = commands2, markups = markups2}) = + let + val commands' = (commands1, commands2) + |> Symtab.join (fn name => fn (cmd1, cmd2) => + if eq_command (cmd1, cmd2) then raise Symtab.SAME + else err_dup_command name [command_pos cmd1, command_pos cmd2]); + val markups' = AList.merge (op =) (K true) (markups1, markups2); + in make_syntax (commands', markups') end; +); -fun map_commands f (Syntax {commands, ...}) = +(* inspect syntax *) + +val get_syntax = Data.get; + +val dest_commands = + get_syntax #> (fn Syntax {commands, ...} => commands |> Symtab.dest |> sort_wrt #1); + +val lookup_commands = + get_syntax #> (fn Syntax {commands, ...} => Symtab.lookup commands); + +val is_markup = + get_syntax #> (fn Syntax {markups, ...} => fn kind => fn name => + AList.lookup (op =) markups name = SOME kind); + +fun help thy pats = + dest_commands thy + |> filter (fn (name, _) => forall (fn pat => match_string pat name) pats) + |> map pretty_command + |> Pretty.writeln_chunks; + +fun print_commands thy = let - val commands' = f commands; + val keywords = Thy_Header.get_keywords thy; + val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords); + val commands = dest_commands thy; + in + [Pretty.strs ("keywords:" :: map quote minor), + Pretty.big_list "commands:" (map pretty_command commands)] + |> Pretty.writeln_chunks + end; + + +(* build syntax *) + +fun add_command name cmd thy = thy |> Data.map (fn Syntax {commands, ...} => + let + val keywords = Thy_Header.get_keywords thy; + val _ = + Keyword.is_command keywords name orelse + err_command "Undeclared outer syntax command " name [command_pos cmd]; + + val _ = + (case Symtab.lookup commands name of + NONE => () + | SOME cmd' => err_dup_command name [command_pos cmd, command_pos cmd']); + + val _ = + Context_Position.report_generic (ML_Context.the_generic_context ()) + (command_pos cmd) (command_markup true (name, cmd)); + + val commands' = Symtab.update (name, cmd) commands; val markups' = Symtab.fold (fn (name, Command {markup = SOME m, ...}) => cons (name, m) | _ => I) commands' []; - in make_syntax commands' markups' end; - -fun dest_commands (Syntax {commands, ...}) = - commands |> Symtab.dest |> sort_wrt #1; + in make_syntax (commands', markups') end); -fun lookup_commands (Syntax {commands, ...}) = Symtab.lookup commands; - -fun is_markup (Syntax {markups, ...}) kind name = - AList.lookup (op =) markups name = SOME kind; - +val _ = Theory.setup (Theory.at_end (fn thy => + let + val keywords = Thy_Header.get_keywords thy; + val major = Keyword.major_keywords keywords; + val _ = + (case subtract (op =) (map #1 (dest_commands thy)) (Scan.dest_lexicon major) of + [] => () + | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing)) + in NONE end)); -(** global outer syntax **) - -type command_spec = (string * Keyword.T) * Position.T; +(* implicit theory setup *) -val batch_mode = Unsynchronized.ref false; - -local - -(*synchronized wrt. Keywords*) -val global_syntax = Unsynchronized.ref empty_syntax; +type command_spec = string * Position.T; -fun add_command (name, kind) cmd = CRITICAL (fn () => - let - val context = ML_Context.the_generic_context (); - val thy = Context.theory_of context; - val Command {pos, ...} = cmd; - val command_name = quote (Markup.markup Markup.keyword1 name); - val _ = - (case try (Thy_Header.the_keyword thy) name of - SOME spec => - if Option.map #1 spec = SOME (Keyword.kind_files_of kind) then () - else error ("Inconsistent outer syntax keyword declaration " ^ - command_name ^ Position.here pos) - | NONE => - if Context.theory_name thy = Context.PureN - then Keyword.define (name, SOME kind) - else error ("Undeclared outer syntax command " ^ command_name ^ Position.here pos)); - val _ = Context_Position.report_generic context pos (command_markup true (name, cmd)); - in - Unsynchronized.change global_syntax (map_commands (fn commands => - (if not (Symtab.defined commands name) then () - else if ! batch_mode then - error ("Attempt to redefine outer syntax command " ^ command_name) - else - warning ("Redefining outer syntax command " ^ command_name ^ - Position.here (Position.thread_data ())); - Symtab.update (name, cmd) commands))) - end); +fun command (name, pos) comment parse = + Theory.setup (add_command name (new_command comment NONE parse pos)); -in - -fun get_syntax () = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax)); - -fun check_syntax () = - let - val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax)); - val major = Keyword.major_keywords keywords; - in - (case subtract (op =) (map #1 (dest_commands syntax)) (Scan.dest_lexicon major) of - [] => () - | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing)) - end; - -fun command (spec, pos) comment parse = - add_command spec (new_command comment NONE parse pos); - -fun markup_command markup (spec, pos) comment parse = - add_command spec (new_command comment (SOME markup) parse pos); - -end; - - -(* local_theory commands *) +fun markup_command markup (name, pos) comment parse = + Theory.setup (add_command name (new_command comment (SOME markup) parse pos)); fun local_theory_command trans command_spec comment parse = command command_spec comment (Parse.opt_target -- parse >> (fn (loc, f) => trans loc f)); @@ -168,60 +186,10 @@ val local_theory_to_proof = local_theory_command Toplevel.local_theory_to_proof; -(* inspect syntax *) - -fun help_syntax pats = - dest_commands (#2 (get_syntax ())) - |> filter (fn (name, _) => forall (fn pat => match_string pat name) pats) - |> map pretty_command - |> Pretty.writeln_chunks; - -fun print_syntax () = - let - val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), #2 (get_syntax ()))); - val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords); - val commands = dest_commands syntax; - in - [Pretty.strs ("keywords:" :: map quote minor), - Pretty.big_list "commands:" (map pretty_command commands)] - |> Pretty.writeln_chunks - end; - - (** toplevel parsing **) -(* parse commands *) - -local - -fun parse_command syntax = - Parse.position Parse.command_ :|-- (fn (name, pos) => - let - val tr0 = Toplevel.empty |> Toplevel.name name |> Toplevel.position pos; - in - (case lookup_commands syntax name of - SOME (Command {parse, ...}) => Parse.!!! (Parse.tags |-- parse) >> (fn f => f tr0) - | NONE => - Scan.succeed - (tr0 |> Toplevel.imperative (fn () => - error ("Bad parser for outer syntax command " ^ quote name ^ Position.here pos)))) - end); - -val parse_cmt = Parse.$$$ "--" -- Parse.!!! Parse.document_source; - -in - -fun commands_source syntax = - Token.source_proper #> - Source.source Token.stopper (Scan.bulk (parse_cmt >> K NONE || Parse.not_eof >> SOME)) #> - Source.map_filter I #> - Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command syntax) xs)); - -end; - - -(* off-line scanning/parsing *) +(* scan tokens *) fun scan keywords pos = Source.of_string #> @@ -229,11 +197,45 @@ Token.source keywords pos #> Source.exhaust; -fun parse (keywords, syntax) pos str = + +(* parse commands *) + +local + +fun parse_command thy = + Parse.position Parse.command_ :|-- (fn (name, pos) => + let + val tr0 = Toplevel.empty |> Toplevel.name name |> Toplevel.position pos; + in + (case lookup_commands thy name of + SOME (Command {parse, ...}) => Parse.!!! (Parse.tags |-- parse) >> (fn f => f tr0) + | NONE => + Scan.succeed + (tr0 |> Toplevel.imperative (fn () => err_command "Undefined command " name [pos]))) + end); + +val parse_cmt = Parse.$$$ "--" -- Parse.!!! Parse.document_source; + +in + +fun commands_source thy = + Token.source_proper #> + Source.source Token.stopper (Scan.bulk (parse_cmt >> K NONE || Parse.not_eof >> SOME)) #> + Source.map_filter I #> + Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy) xs)); + +end; + +fun parse thy pos str = Source.of_string str |> Symbol.source - |> Token.source keywords pos - |> commands_source syntax + |> Token.source (Thy_Header.get_keywords thy) pos + |> commands_source thy + |> Source.exhaust; + +fun parse_tokens thy toks = + Source.of_list toks + |> commands_source thy |> Source.exhaust; @@ -280,19 +282,14 @@ (* read commands *) -fun command_reports syntax tok = +fun command_reports thy tok = if Token.is_command tok then let val name = Token.content_of tok in - (case lookup_commands syntax name of + (case lookup_commands thy name of NONE => [] | SOME cmd => [((Token.pos_of tok, command_markup false (name, cmd)), "")]) end else []; -fun read_spans syntax toks = - Source.of_list toks - |> commands_source syntax - |> Source.exhaust; - end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Isar/toplevel.ML --- a/src/Pure/Isar/toplevel.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Isar/toplevel.ML Fri Nov 07 20:43:13 2014 +0100 @@ -88,7 +88,7 @@ val reset_proof: state -> state option type result val join_results: result -> (transition * state) list - val element_result: transition Thy_Syntax.element -> state -> result * state + val element_result: Keyword.keywords -> transition Thy_Syntax.element -> state -> result * state end; structure Toplevel: TOPLEVEL = @@ -681,10 +681,10 @@ NONE => Goal.future_enabled 2 | SOME t => Goal.future_enabled_timing t))); -fun atom_result tr st = +fun atom_result keywords tr st = let val st' = - if Goal.future_enabled 1 andalso Keyword.is_diag (name_of tr) then + if Goal.future_enabled 1 andalso Keyword.is_diag keywords (name_of tr) then (Execution.fork {name = "Toplevel.diag", pos = pos_of tr, pri = priority (timing_estimate true (Thy_Syntax.atom tr))} @@ -694,10 +694,10 @@ in -fun element_result (Thy_Syntax.Element (tr, NONE)) st = atom_result tr st - | element_result (elem as Thy_Syntax.Element (head_tr, SOME element_rest)) st = +fun element_result keywords (Thy_Syntax.Element (tr, NONE)) st = atom_result keywords tr st + | element_result keywords (elem as Thy_Syntax.Element (head_tr, SOME element_rest)) st = let - val (head_result, st') = atom_result head_tr st; + val (head_result, st') = atom_result keywords head_tr st; val (body_elems, end_tr) = element_rest; val estimate = timing_estimate false elem; in @@ -705,7 +705,7 @@ then let val proof_trs = maps Thy_Syntax.flat_element body_elems @ [end_tr]; - val (proof_results, st'') = fold_map atom_result proof_trs st'; + val (proof_results, st'') = fold_map (atom_result keywords) proof_trs st'; in (Result_List (head_result :: proof_results), st'') end else let @@ -721,7 +721,7 @@ val prf' = Proof_Node.apply (K state) prf; val (result, result_state) = State (SOME (Proof (prf', (finish, orig_gthy))), prev) - |> fold_map element_result body_elems ||> command end_tr; + |> fold_map (element_result keywords) body_elems ||> command end_tr; in (Result_List result, presentation_context_of result_state) end)) #> (fn (res, state') => state' |> put_result (Result_Future res)); diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ML/ml_antiquotations.ML --- a/src/Pure/ML/ml_antiquotations.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/ML/ml_antiquotations.ML Fri Nov 07 20:43:13 2014 +0100 @@ -233,30 +233,17 @@ (* outer syntax *) -fun with_keyword f = - Args.theory -- Scan.lift (Parse.position Parse.string) >> (fn (thy, (name, pos)) => - (f ((name, Thy_Header.the_keyword thy name), pos) - handle ERROR msg => error (msg ^ Position.here pos))); - val _ = Theory.setup (ML_Antiquotation.value @{binding keyword} - (with_keyword - (fn ((name, NONE), _) => "Parse.$$$ " ^ ML_Syntax.print_string name - | ((name, SOME _), pos) => - error ("Expected minor keyword " ^ quote name ^ Position.here pos))) #> + (Args.theory -- Scan.lift (Parse.position Parse.string) >> (fn (thy, (name, pos)) => + if Keyword.is_keyword (Thy_Header.get_keywords thy) name then + "Parse.$$$ " ^ ML_Syntax.print_string name + else error ("Bad outer syntax keyword " ^ quote name ^ Position.here pos))) #> ML_Antiquotation.value @{binding command_spec} - (with_keyword - (fn ((name, SOME kind), pos) => - "Keyword.command_spec " ^ ML_Syntax.atomic - ((ML_Syntax.print_pair - (ML_Syntax.print_pair ML_Syntax.print_string - (ML_Syntax.print_pair - (ML_Syntax.print_pair ML_Syntax.print_string - (ML_Syntax.print_list ML_Syntax.print_string)) - (ML_Syntax.print_list ML_Syntax.print_string))) - ML_Syntax.print_position) ((name, kind), pos)) - | ((name, NONE), pos) => - error ("Expected command keyword " ^ quote name ^ Position.here pos)))); + (Args.theory -- Scan.lift (Parse.position Parse.string) >> (fn (thy, (name, pos)) => + if Keyword.is_command (Thy_Header.get_keywords thy) name then + ML_Syntax.print_pair ML_Syntax.print_string ML_Syntax.print_position (name, pos) + else error ("Bad outer syntax command " ^ quote name ^ Position.here pos)))); end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ML/ml_context.ML --- a/src/Pure/ML/ml_context.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/ML/ml_context.ML Fri Nov 07 20:43:13 2014 +0100 @@ -116,12 +116,12 @@ then ((begin_env0, map (fn Antiquote.Text tok => tok) ants), opt_ctxt) else let - val keywords = Keyword.get_keywords (); fun no_decl _ = ([], []); fun expand (Antiquote.Text tok) ctxt = (K ([], [tok]), ctxt) | expand (Antiquote.Antiq (ss, {range, ...})) ctxt = let + val keywords = Thy_Header.get_keywords' ctxt; val (decl, ctxt') = apply_antiquotation (Token.read_antiq keywords antiq (ss, #1 range)) ctxt; val decl' = decl #> pairself (ML_Lex.tokenize #> map (ML_Lex.set_range range)); diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ML/ml_file.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/ML/ml_file.ML Fri Nov 07 20:43:13 2014 +0100 @@ -0,0 +1,25 @@ +(* Title: Pure/ML/ml_file.ML + Author: Makarius + +The 'ML_file' command. +*) + +structure ML_File: sig end = +struct + +val _ = + Outer_Syntax.command ("ML_file", @{here}) "ML text from file" + (Resources.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy => + let + val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy); + val provide = Resources.provide (src_path, digest); + val source = {delimited = true, text = cat_lines lines, pos = pos}; + val flags = {SML = false, exchange = false, redirect = true, verbose = true}; + in + gthy + |> ML_Context.exec (fn () => ML_Context.eval_source flags source) + |> Local_Theory.propagate_ml_env + |> Context.mapping provide (Local_Theory.background_theory provide) + end))); + +end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ML/ml_lex.ML --- a/src/Pure/ML/ml_lex.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/ML/ml_lex.ML Fri Nov 07 20:43:13 2014 +0100 @@ -44,7 +44,7 @@ "type", "val", "where", "while", "with", "withtype"]; val keywords2 = - ["case", "do", "else", "end", "if", "in", "let", "local", "of", + ["and", "case", "do", "else", "end", "if", "in", "let", "local", "of", "sig", "struct", "then", "while", "with"]; val keywords3 = diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ML/ml_lex.scala --- a/src/Pure/ML/ml_lex.scala Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/ML/ml_lex.scala Fri Nov 07 20:43:13 2014 +0100 @@ -26,8 +26,8 @@ "with", "withtype") val keywords2: Set[String] = - Set("case", "do", "else", "end", "if", "in", "let", "local", "of", - "sig", "struct", "then", "while", "with") + Set("and", "case", "do", "else", "end", "if", "in", "let", "local", + "of", "sig", "struct", "then", "while", "with") val keywords3: Set[String] = Set("handle", "open", "raise") diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/command.ML --- a/src/Pure/PIDE/command.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/command.ML Fri Nov 07 20:43:13 2014 +0100 @@ -8,19 +8,22 @@ sig type blob = (string * (SHA1.digest * string list) option) Exn.result val read_file: Path.T -> Position.T -> Path.T -> Token.file - val read: (unit -> theory) -> Path.T -> blob list -> Token.T list -> Toplevel.transition + val read_thy: Toplevel.state -> theory + val read: Keyword.keywords -> theory -> Path.T-> (unit -> theory) -> + blob list -> Token.T list -> Toplevel.transition type eval val eval_eq: eval * eval -> bool val eval_running: eval -> bool val eval_finished: eval -> bool val eval_result_state: eval -> Toplevel.state - val eval: (unit -> theory) -> Path.T -> blob list -> Token.T list -> eval -> eval + val eval: Keyword.keywords -> Path.T -> (unit -> theory) -> + blob list -> Token.T list -> eval -> eval type print - val print: bool -> (string * string list) list -> string -> + val print: bool -> (string * string list) list -> Keyword.keywords -> string -> eval -> print list -> print list option type print_fn = Toplevel.transition -> Toplevel.state -> unit type print_function = - {command_name: string, args: string list, exec_id: Document_ID.exec} -> + {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option val print_function: string -> print_function -> unit val no_print_function: string -> unit @@ -120,10 +123,10 @@ | SOME exec_id => Position.put_id exec_id); in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end -fun resolve_files master_dir blobs toks = +fun resolve_files keywords master_dir blobs toks = (case Outer_Syntax.parse_spans toks of [span] => span - |> Command_Span.resolve_files (fn cmd => fn (path, pos) => + |> Command_Span.resolve_files keywords (fn cmd => fn (path, pos) => let fun make_file src_path (Exn.Res (file_node, NONE)) = Exn.interruptible_capture (fn () => @@ -132,7 +135,7 @@ (Position.reports [(pos, Markup.language_path), (pos, Markup.path file_node)]; Exn.Res (blob_file src_path lines digest file_node)) | make_file _ (Exn.Exn e) = Exn.Exn e; - val src_paths = Keyword.command_files cmd path; + val src_paths = Keyword.command_files keywords cmd path; in if null blobs then map2 make_file src_paths (map (K (Exn.Res ("", NONE))) src_paths) @@ -143,12 +146,15 @@ |> Command_Span.content | _ => toks); +val bootstrap_thy = ML_Context.the_global_context (); + in -fun read init master_dir blobs span = +fun read_thy st = Toplevel.theory_of st handle Toplevel.UNDEF => bootstrap_thy; + +fun read keywords thy master_dir init blobs span = let - val syntax = #2 (Outer_Syntax.get_syntax ()); - val command_reports = Outer_Syntax.command_reports syntax; + val command_reports = Outer_Syntax.command_reports thy; val proper_range = Token.range_of (#1 (take_suffix Token.is_improper span)); val pos = @@ -161,7 +167,7 @@ in if is_malformed then Toplevel.malformed pos "Malformed command syntax" else - (case Outer_Syntax.read_spans syntax (resolve_files master_dir blobs span) of + (case Outer_Syntax.parse_tokens thy (resolve_files keywords master_dir blobs span) of [tr] => Toplevel.modify_init init tr | [] => Toplevel.ignored (#1 (Token.range_of span)) | _ => Toplevel.malformed (#1 proper_range) "Exactly one command expected") @@ -191,12 +197,12 @@ local -fun reset_state tr st0 = Toplevel.setmp_thread_position tr (fn () => +fun reset_state keywords tr st0 = Toplevel.setmp_thread_position tr (fn () => let val name = Toplevel.name_of tr; val res = - if Keyword.is_theory_body name then Toplevel.reset_theory st0 - else if Keyword.is_proof name then Toplevel.reset_proof st0 + if Keyword.is_theory_body keywords name then Toplevel.reset_theory st0 + else if Keyword.is_proof keywords name then Toplevel.reset_proof st0 else NONE; in (case res of @@ -204,8 +210,8 @@ | SOME st => (Output.error_message (Toplevel.type_error tr st0 ^ " -- using reset state"); st)) end) (); -fun run int tr st = - if Goal.future_enabled 1 andalso Keyword.is_diag (Toplevel.name_of tr) then +fun run keywords int tr st = + if Goal.future_enabled 1 andalso Keyword.is_diag keywords (Toplevel.name_of tr) then (Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1} (fn () => Toplevel.command_exception int tr st); ([], SOME st)) else Toplevel.command_errors int tr st; @@ -230,7 +236,7 @@ SOME prf => status tr (Proof.status_markup prf) | NONE => ()); -fun eval_state span tr ({malformed, state, ...}: eval_state) = +fun eval_state keywords span tr ({malformed, state, ...}: eval_state) = if malformed then {failed = true, malformed = malformed, command = tr, state = Toplevel.toplevel} else @@ -238,10 +244,10 @@ val _ = Multithreading.interrupted (); val malformed' = Toplevel.is_malformed tr; - val st = reset_state tr state; + val st = reset_state keywords tr state; val _ = status tr Markup.running; - val (errs1, result) = run true tr st; + val (errs1, result) = run keywords true tr st; val errs2 = (case result of NONE => [] | SOME st' => check_cmts span tr st'); val errs = errs1 @ errs2; val _ = List.app (Future.error_message (Toplevel.pos_of tr)) errs; @@ -262,15 +268,17 @@ in -fun eval init master_dir blobs span eval0 = +fun eval keywords master_dir init blobs span eval0 = let val exec_id = Document_ID.make (); fun process () = let + val eval_state0 = eval_result eval0; + val thy = read_thy (#state eval_state0); val tr = Position.setmp_thread_data (Position.id_only (Document_ID.print exec_id)) - (fn () => read init master_dir blobs span |> Toplevel.exec_id exec_id) (); - in eval_state span tr (eval_result eval0) end; + (fn () => read keywords thy master_dir init blobs span |> Toplevel.exec_id exec_id) (); + in eval_state keywords span tr eval_state0 end; in Eval {exec_id = exec_id, eval_process = memo exec_id process} end; end; @@ -288,7 +296,7 @@ type print_fn = Toplevel.transition -> Toplevel.state -> unit; type print_function = - {command_name: string, args: string list, exec_id: Document_ID.exec} -> + {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} -> {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option; local @@ -310,7 +318,7 @@ in -fun print command_visible command_overlays command_name eval old_prints = +fun print command_visible command_overlays keywords command_name eval old_prints = let val print_functions = Synchronized.value print_functions; @@ -338,7 +346,11 @@ fun new_print name args get_pr = let - val params = {command_name = command_name, args = args, exec_id = eval_exec_id eval}; + val params = + {keywords = keywords, + command_name = command_name, + args = args, + exec_id = eval_exec_id eval}; in (case Exn.capture (Runtime.controlled_execution NONE get_pr) params of Exn.Res NONE => NONE @@ -385,8 +397,8 @@ val _ = print_function "print_state" - (fn {command_name, ...} => - if Keyword.is_printed command_name then + (fn {keywords, command_name, ...} => + if Keyword.is_printed keywords command_name then SOME {delay = NONE, pri = 1, persistent = false, strict = true, print_fn = fn _ => fn st => if Toplevel.is_proof st then Toplevel.print_state st else ()} else NONE); diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/command_span.ML --- a/src/Pure/PIDE/command_span.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/command_span.ML Fri Nov 07 20:43:13 2014 +0100 @@ -10,7 +10,8 @@ datatype span = Span of kind * Token.T list val kind: span -> kind val content: span -> Token.T list - val resolve_files: (string -> Path.T * Position.T -> Token.file Exn.result list) -> span -> span + val resolve_files: Keyword.keywords -> + (string -> Path.T * Position.T -> Token.file Exn.result list) -> span -> span end; structure Command_Span: COMMAND_SPAN = @@ -49,10 +50,10 @@ in -fun resolve_files read_files span = +fun resolve_files keywords read_files span = (case span of Span (Command_Span (cmd, pos), toks) => - if Keyword.is_theory_load cmd then + if Keyword.is_theory_load keywords cmd then (case find_file (clean_tokens toks) of NONE => error ("Bad file argument of command " ^ quote cmd ^ Position.here pos) | SOME (i, path) => diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/document.ML --- a/src/Pure/PIDE/document.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/document.ML Fri Nov 07 20:43:13 2014 +0100 @@ -8,6 +8,7 @@ signature DOCUMENT = sig val timing: bool Unsynchronized.ref + val init_keywords: unit -> unit type node_header = string * Thy_Header.header * string list type overlay = Document_ID.command * (string * string list) datatype node_edit = @@ -37,6 +38,20 @@ +(** global keywords **) + +val global_keywords = Synchronized.var "global_keywords" Keyword.empty_keywords; + +fun init_keywords () = + Synchronized.change global_keywords + (fn _ => + fold (curry Keyword.merge_keywords o Thy_Header.get_keywords o Thy_Info.get_theory) + (Thy_Info.get_names ()) Keyword.empty_keywords); + +fun get_keywords () = Synchronized.value global_keywords; + + + (** document structure **) fun err_dup kind id = error ("Duplicate " ^ kind ^ ": " ^ Document_ID.print id); @@ -208,7 +223,7 @@ let val imports = map fst (#imports header); val errors1 = - (Thy_Header.define_keywords header; errors) + (Synchronized.change global_keywords (Keyword.add_keywords (#keywords header)); errors) handle ERROR msg => errors @ [msg]; val nodes1 = nodes |> default_node name @@ -318,7 +333,7 @@ val span = Lazy.lazy (fn () => Position.setmp_thread_data (Position.id_only id) - (fn () => Outer_Syntax.scan (Keyword.get_keywords ()) (Position.id id) text) ()); + (fn () => Outer_Syntax.scan (get_keywords ()) (Position.id id) text) ()); val _ = Position.setmp_thread_data (Position.id_only id) (fn () => Output.status (Markup.markup_only Markup.accepted)) (); @@ -466,7 +481,7 @@ is_some (loaded_theory name) orelse can get_header node andalso (not full orelse is_some (get_result node)); -fun last_common state node_required node0 node = +fun last_common keywords state node_required node0 node = let fun update_flags prev (visible, initial) = let @@ -474,7 +489,8 @@ val initial' = initial andalso (case prev of NONE => true - | SOME command_id => not (Keyword.is_theory_begin (the_command_name state command_id))); + | SOME command_id => + not (Keyword.is_theory_begin keywords (the_command_name state command_id))); in (visible', initial') end; fun get_common ((prev, command_id), opt_exec) (_, ok, flags, assign_update) = @@ -495,7 +511,9 @@ val command_overlays = overlays node command_id; val command_name = the_command_name state command_id; in - (case Command.print command_visible command_overlays command_name eval prints of + (case + Command.print command_visible command_overlays keywords command_name eval prints + of SOME prints' => assign_update_new (command_id, SOME (eval, prints')) | NONE => I) end @@ -513,7 +531,7 @@ fun illegal_init _ = error "Illegal theory header after end of theory"; -fun new_exec state node proper_init command_id' (assign_update, command_exec, init) = +fun new_exec keywords state node proper_init command_id' (assign_update, command_exec, init) = if not proper_init andalso is_none init then NONE else let @@ -526,13 +544,14 @@ val span = Lazy.force span0; val eval' = - Command.eval (fn () => the_default illegal_init init span) - (master_directory node) blobs span eval; - val prints' = perhaps (Command.print command_visible command_overlays command_name eval') []; + Command.eval keywords (master_directory node) + (fn () => the_default illegal_init init span) blobs span eval; + val prints' = + perhaps (Command.print command_visible command_overlays keywords command_name eval') []; val exec' = (eval', prints'); val assign_update' = assign_update_new (command_id', SOME exec') assign_update; - val init' = if Keyword.is_theory_begin command_name then NONE else init; + val init' = if Keyword.is_theory_begin keywords command_name then NONE else init; in SOME (assign_update', (command_id', (eval', prints')), init') end; fun removed_execs node0 (command_id, exec_ids) = @@ -558,6 +577,7 @@ deps = map (Future.task_of o #2) deps, pri = 1, interrupts = false} (fn () => timeit ("Document.update " ^ name) (fn () => let + val keywords = get_keywords (); val imports = map (apsnd Future.join) deps; val imports_result_changed = exists (#4 o #1 o #2) imports; val node_required = Symtab.defined required name; @@ -574,7 +594,7 @@ val (print_execs, common, (still_visible, initial)) = if imports_result_changed then (assign_update_empty, NONE, (true, true)) - else last_common state node_required node0 node; + else last_common keywords state node_required node0 node; val common_command_exec = the_default_entry node common; val (updated_execs, (command_id', (eval', _)), _) = @@ -583,7 +603,7 @@ iterate_entries_after common (fn ((prev, id), _) => fn res => if not node_required andalso prev = visible_last node then NONE - else new_exec state node proper_init id res) node; + else new_exec keywords state node proper_init id res) node; val assigned_execs = (node0, updated_execs) |-> iterate_entries_after common diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/protocol.ML --- a/src/Pure/PIDE/protocol.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/protocol.ML Fri Nov 07 20:43:13 2014 +0100 @@ -23,6 +23,10 @@ end); val _ = + Isabelle_Process.protocol_command "Document.init_keywords" + (fn [] => Document.init_keywords ()); + +val _ = Isabelle_Process.protocol_command "Document.define_blob" (fn [digest, content] => Document.change_state (Document.define_blob digest content)); diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/resources.ML Fri Nov 07 20:43:13 2014 +0100 @@ -82,9 +82,10 @@ (case Token.get_files tok of [] => let + val keywords = Thy_Header.get_keywords thy; val master_dir = master_directory thy; val pos = Token.pos_of tok; - val src_paths = Keyword.command_files cmd (Path.explode name); + val src_paths = Keyword.command_files keywords cmd (Path.explode name); in map (Command.read_file master_dir pos) src_paths end | files => map Exn.release files)); @@ -121,19 +122,19 @@ fun begin_theory master_dir {name, imports, keywords} parents = Theory.begin_theory name parents |> put_deps master_dir imports - |> fold Thy_Header.declare_keyword keywords; + |> Thy_Header.add_keywords keywords; -fun excursion master_dir last_timing init elements = +fun excursion keywords master_dir last_timing init elements = let - fun prepare_span span = + fun prepare_span st span = Command_Span.content span - |> Command.read init master_dir [] + |> Command.read keywords (Command.read_thy st) master_dir init [] |> (fn tr => Toplevel.put_timing (last_timing tr) tr); fun element_result span_elem (st, _) = let - val elem = Thy_Syntax.map_element prepare_span span_elem; - val (results, st') = Toplevel.element_result elem st; + val elem = Thy_Syntax.map_element (prepare_span st) span_elem; + val (results, st') = Toplevel.element_result keywords elem st; val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem); in (results, (st', pos')) end; @@ -145,13 +146,14 @@ fun load_thy document last_timing update_time master_dir header text_pos text parents = let - val {name = (name, _), ...} = header; - val _ = Thy_Header.define_keywords header; - val keywords = Keyword.get_keywords (); + val (name, _) = #name header; + val keywords = + fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents + (Keyword.add_keywords (#keywords header) Keyword.empty_keywords); val toks = Outer_Syntax.scan keywords text_pos text; val spans = Outer_Syntax.parse_spans toks; - val elements = Thy_Syntax.parse_elements spans; + val elements = Thy_Syntax.parse_elements keywords spans; fun init () = begin_theory master_dir header parents @@ -160,20 +162,16 @@ val (results, thy) = cond_timeit true ("theory " ^ quote name) - (fn () => excursion master_dir last_timing init elements); + (fn () => excursion keywords master_dir last_timing init elements); fun present () = let val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results); - val (keywords, syntax) = Outer_Syntax.get_syntax (); in if exists (Toplevel.is_skipped_proof o #2) res then warning ("Cannot present theory with skipped proofs: " ^ quote name) else - let val tex_source = - Thy_Output.present_thy keywords Keyword.command_tags - (Outer_Syntax.is_markup syntax) res toks - |> Buffer.content; + let val tex_source = Thy_Output.present_thy thy res toks |> Buffer.content; in if document then Present.theory_output name tex_source else () end end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/session.ML --- a/src/Pure/PIDE/session.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/session.ML Fri Nov 07 20:43:13 2014 +0100 @@ -54,7 +54,6 @@ (Execution.shutdown (); Thy_Info.finish (); Present.finish (); - Outer_Syntax.check_syntax (); Future.shutdown (); Event_Timer.shutdown (); Future.shutdown (); diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/PIDE/session.scala Fri Nov 07 20:43:13 2014 +0100 @@ -610,6 +610,12 @@ def update_options(options: Options) { manager.send_wait(Update_Options(options)) } + def init_options(options: Options) + { + update_options(options) + protocol_command("Document.init_keywords") + } + def dialog_result(id: Document_ID.Generic, serial: Long, result: String) { manager.send(Session.Dialog_Result(id, serial, result)) } } diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Pure.thy --- a/src/Pure/Pure.thy Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Pure.thy Fri Nov 07 20:43:13 2014 +0100 @@ -6,19 +6,12 @@ theory Pure keywords - "!!" "!" "%" "(" ")" "+" "," "--" ":" "::" ";" "<" "<=" "=" "==" - "=>" "?" "[" "\" "\" "\" - "\" "\" "]" "and" "assumes" - "attach" "begin" "binder" "constrains" "defines" "fixes" "for" - "identifier" "if" "imports" "in" "includes" "infix" "infixl" - "infixr" "is" "keywords" "notes" "obtains" "open" "output" + "!!" "!" "+" "--" ":" ";" "<" "<=" "=" "=>" "?" "[" "\" + "\" "\" "\" + "\" "]" "assumes" "attach" "binder" "constrains" + "defines" "fixes" "for" "identifier" "if" "in" "includes" "infix" + "infixl" "infixr" "is" "notes" "obtains" "open" "output" "overloaded" "pervasive" "shows" "structure" "unchecked" "where" "|" - and "theory" :: thy_begin % "theory" - and "header" :: heading - and "chapter" :: heading - and "section" :: heading - and "subsection" :: heading - and "subsubsection" :: heading and "text" "text_raw" :: thy_decl and "txt" "txt_raw" :: prf_decl % "proof" and "default_sort" :: thy_decl == "" @@ -27,7 +20,7 @@ "definition" "abbreviation" "type_notation" "no_type_notation" "notation" "no_notation" "axiomatization" "theorems" "lemmas" "declare" "hide_class" "hide_type" "hide_const" "hide_fact" :: thy_decl - and "SML_file" "ML_file" :: thy_load % "ML" + and "SML_file" :: thy_load % "ML" and "SML_import" "SML_export" :: thy_decl % "ML" and "ML" :: thy_decl % "ML" and "ML_prf" :: prf_decl % "proof" (* FIXME % "ML" ?? *) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ROOT --- a/src/Pure/ROOT Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/ROOT Fri Nov 07 20:43:13 2014 +0100 @@ -154,6 +154,7 @@ "ML/ml_compiler_polyml.ML" "ML/ml_context.ML" "ML/ml_env.ML" + "ML/ml_file.ML" "ML/ml_lex.ML" "ML/ml_parse.ML" "ML/ml_options.ML" diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/ROOT.ML --- a/src/Pure/ROOT.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/ROOT.ML Fri Nov 07 20:43:13 2014 +0100 @@ -302,10 +302,11 @@ (*theory documents*) use "System/isabelle_system.ML"; use "Thy/term_style.ML"; +use "Isar/outer_syntax.ML"; use "Thy/thy_output.ML"; -use "Isar/outer_syntax.ML"; use "General/graph_display.ML"; use "Thy/present.ML"; +use "pure_syn.ML"; use "PIDE/command.ML"; use "PIDE/query_operation.ML"; use "PIDE/resources.ML"; @@ -361,7 +362,7 @@ (* the Pure theory *) -use "pure_syn.ML"; +use "ML/ml_file.ML"; Runtime.toplevel_program (fn () => Thy_Info.use_thy ("Pure", Position.none)); Context.set_thread_data NONE; structure Pure = struct val thy = Thy_Info.get_theory "Pure" end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Thy/thy_header.ML --- a/src/Pure/Thy/thy_header.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Thy/thy_header.ML Fri Nov 07 20:43:13 2014 +0100 @@ -12,9 +12,10 @@ imports: (string * Position.T) list, keywords: keywords} val make: string * Position.T -> (string * Position.T) list -> keywords -> header - val define_keywords: header -> unit - val declare_keyword: string * Keyword.spec option -> theory -> theory - val the_keyword: theory -> string -> Keyword.spec option + val bootstrap_keywords: Keyword.keywords + val add_keywords: keywords -> theory -> theory + val get_keywords: theory -> Keyword.keywords + val get_keywords': Proof.context -> Keyword.keywords val args: header parser val read: Position.T -> string -> header val read_tokens: Token.T list -> header @@ -23,6 +24,10 @@ structure Thy_Header: THY_HEADER = struct +(** keyword declarations **) + +(* header *) + type keywords = (string * Keyword.spec option) list; type header = @@ -34,37 +39,7 @@ {name = name, imports = imports, keywords = keywords}; - -(** keyword declarations **) - -fun define_keywords ({keywords, ...}: header) = - List.app (Keyword.define o apsnd (Option.map Keyword.spec)) keywords; - -fun err_dup name = error ("Duplicate declaration of outer syntax keyword " ^ quote name); - -structure Data = Theory_Data -( - type T = Keyword.spec option Symtab.table; - val empty = Symtab.empty; - val extend = I; - fun merge data : T = Symtab.merge (op =) data handle Symtab.DUP name => err_dup name; -); - -fun declare_keyword (name, spec) = - Data.map (fn data => - (Option.map Keyword.spec spec; - Symtab.update_new (name, spec) data handle Symtab.DUP dup => err_dup dup)); - -fun the_keyword thy name = - (case Symtab.lookup (Data.get thy) name of - SOME spec => spec - | NONE => error ("Undeclared outer syntax keyword " ^ quote name)); - - - -(** concrete syntax **) - -(* header keywords *) +(* bootstrap keywords *) val headerN = "header"; val chapterN = "chapter"; @@ -77,19 +52,47 @@ val keywordsN = "keywords"; val beginN = "begin"; -val header_keywords = +val bootstrap_keywords = Keyword.empty_keywords - |> fold (Keyword.add o rpair NONE) - ["%", "(", ")", ",", "::", "==", "and", beginN, importsN, keywordsN] - |> fold Keyword.add - [(headerN, SOME Keyword.heading), - (chapterN, SOME Keyword.heading), - (sectionN, SOME Keyword.heading), - (subsectionN, SOME Keyword.heading), - (subsubsectionN, SOME Keyword.heading), - (theoryN, SOME Keyword.thy_begin)]; + |> Keyword.add_keywords + [("%", NONE), + ("(", NONE), + (")", NONE), + (",", NONE), + ("::", NONE), + ("==", NONE), + ("and", NONE), + (beginN, NONE), + (importsN, NONE), + (keywordsN, NONE), + (headerN, SOME ((Keyword.heading, []), [])), + (chapterN, SOME ((Keyword.heading, []), [])), + (sectionN, SOME ((Keyword.heading, []), [])), + (subsectionN, SOME ((Keyword.heading, []), [])), + (subsubsectionN, SOME ((Keyword.heading, []), [])), + (theoryN, SOME ((Keyword.thy_begin, []), ["theory"])), + ("ML_file", SOME ((Keyword.thy_load, []), ["ML"]))]; +(* theory data *) + +structure Data = Theory_Data +( + type T = Keyword.keywords; + val empty = bootstrap_keywords; + val extend = I; + val merge = Keyword.merge_keywords; +); + +val add_keywords = Data.map o Keyword.add_keywords; + +val get_keywords = Data.get; +val get_keywords' = get_keywords o Proof_Context.theory_of; + + + +(** concrete syntax **) + (* header args *) local @@ -145,7 +148,7 @@ str |> Source.of_string_limited 8000 |> Symbol.source - |> Token.source_strict header_keywords pos; + |> Token.source_strict bootstrap_keywords pos; fun read_source pos source = let val res = diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Thy/thy_header.scala Fri Nov 07 20:43:13 2014 +0100 @@ -15,6 +15,10 @@ object Thy_Header extends Parse.Parser { + /* bootstrap keywords */ + + type Keywords = List[(String, Option[Keyword.Spec], Option[String])] + val HEADER = "header" val CHAPTER = "chapter" val SECTION = "section" @@ -27,15 +31,31 @@ val AND = "and" val BEGIN = "begin" - private val header_keywords = - Keyword.Keywords.empty + - "%" + "(" + ")" + "," + "::" + "==" + AND + BEGIN + IMPORTS + KEYWORDS + - (HEADER, Keyword.HEADING) + - (CHAPTER, Keyword.HEADING) + - (SECTION, Keyword.HEADING) + - (SUBSECTION, Keyword.HEADING) + - (SUBSUBSECTION, Keyword.HEADING) + - (THEORY, Keyword.THY_BEGIN) + private val bootstrap_header: Keywords = + List( + ("%", None, None), + ("(", None, None), + (")", None, None), + (",", None, None), + ("::", None, None), + ("==", None, None), + (AND, None, None), + (BEGIN, None, None), + (IMPORTS, None, None), + (KEYWORDS, None, None), + (HEADER, Some(((Keyword.HEADING, Nil), Nil)), None), + (CHAPTER, Some(((Keyword.HEADING, Nil), Nil)), None), + (SECTION, Some(((Keyword.HEADING, Nil), Nil)), None), + (SUBSECTION, Some(((Keyword.HEADING, Nil), Nil)), None), + (SUBSUBSECTION, Some(((Keyword.HEADING, Nil), Nil)), None), + (THEORY, Some((Keyword.THY_BEGIN, Nil), List("theory")), None), + ("ML_file", Some((Keyword.THY_LOAD, Nil), List("ML")), None)) + + private val bootstrap_keywords = + Keyword.Keywords.empty.add_keywords(bootstrap_header) + + def bootstrap_syntax(): Outer_Syntax = + Outer_Syntax.init().add_keywords(bootstrap_header) /* theory file name */ @@ -101,7 +121,7 @@ def read(reader: Reader[Char]): Thy_Header = { - val token = Token.Parsers.token(header_keywords) + val token = Token.Parsers.token(bootstrap_keywords) val toks = new mutable.ListBuffer[Token] @tailrec def scan_to_begin(in: Reader[Char]) @@ -123,11 +143,6 @@ def read(source: CharSequence): Thy_Header = read(new CharSequenceReader(source)) - - - /* keywords */ - - type Keywords = List[(String, Option[Keyword.Spec], Option[String])] } diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Thy/thy_info.ML --- a/src/Pure/Thy/thy_info.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Thy/thy_info.ML Fri Nov 07 20:43:13 2014 +0100 @@ -373,7 +373,7 @@ fun script_thy pos txt thy = let val trs = - Outer_Syntax.parse (Outer_Syntax.get_syntax ()) pos txt + Outer_Syntax.parse thy pos txt |> map (Toplevel.modify_init (K thy)); val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs); val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Thy/thy_output.ML --- a/src/Pure/Thy/thy_output.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Thy/thy_output.ML Fri Nov 07 20:43:13 2014 +0100 @@ -22,11 +22,9 @@ theory -> theory val boolean: string -> bool val integer: string -> int - datatype markup = Markup | MarkupEnv | Verbatim - val eval_antiq: Keyword.keywords -> Toplevel.state -> Antiquote.antiq -> string + val eval_antiq: Toplevel.state -> Antiquote.antiq -> string val check_text: Symbol_Pos.source -> Toplevel.state -> unit - val present_thy: Keyword.keywords -> (string -> string list) -> (markup -> string -> bool) -> - (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T + val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T val pretty_text: Proof.context -> string -> Pretty.T val pretty_term: Proof.context -> term -> Pretty.T val pretty_thm: Proof.context -> thm -> Pretty.T @@ -35,6 +33,12 @@ Token.src -> 'a list -> Pretty.T list val output: Proof.context -> Pretty.T list -> string val verbatim_text: Proof.context -> string -> string + val local_theory_markup: (xstring * Position.T) option * Symbol_Pos.source -> + Toplevel.transition -> Toplevel.transition + val proof_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition + val header_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition + val heading_markup: (xstring * Position.T) option * Symbol_Pos.source -> + Toplevel.transition -> Toplevel.transition end; structure Thy_Output: THY_OUTPUT = @@ -158,10 +162,18 @@ (* eval_antiq *) -fun eval_antiq keywords state ((ss, {range = (pos, _), ...}): Antiquote.antiq) = +fun eval_antiq state ((ss, {range = (pos, _), ...}): Antiquote.antiq) = let + val keywords = + (case try Toplevel.presentation_context_of state of + SOME ctxt => Thy_Header.get_keywords' ctxt + | NONE => + error ("Unknown context -- cannot expand document antiquotations" ^ + Position.here pos)); + val (opts, src) = Token.read_antiq keywords antiq (ss, pos); fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) (); + val preview_ctxt = fold option opts (Toplevel.presentation_context_of state); val print_ctxt = Context_Position.set_visible false preview_ctxt; val _ = cmd preview_ctxt; @@ -171,26 +183,22 @@ (* check_text *) -fun eval_antiquote keywords state (txt, pos) = +fun eval_antiquote state (txt, pos) = let fun words (Antiquote.Text ss) = [(#1 (Symbol_Pos.range ss), Markup.words)] | words (Antiquote.Antiq _) = []; fun expand (Antiquote.Text ss) = Symbol_Pos.content ss - | expand (Antiquote.Antiq antiq) = eval_antiq keywords state antiq; + | expand (Antiquote.Antiq antiq) = eval_antiq state antiq; val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos); val _ = Position.reports (maps words ants); - in - if Toplevel.is_toplevel state andalso not (forall Antiquote.is_text ants) then - error ("Unknown context -- cannot expand document antiquotations" ^ Position.here pos) - else implode (map expand ants) - end; + in implode (map expand ants) end; fun check_text {delimited, text, pos} state = (Position.report pos (Markup.language_document delimited); if Toplevel.is_skipped_proof state then () - else ignore (eval_antiquote (Keyword.get_keywords ()) state (text, pos))); + else ignore (eval_antiquote state (text, pos))); @@ -207,8 +215,8 @@ | MarkupEnvToken of string * (string * Position.T) | VerbatimToken of string * Position.T; -fun output_token keywords state = - let val eval = eval_antiquote keywords state in +fun output_token state = + let val eval = eval_antiquote state in fn NoToken => "" | BasicToken tok => Latex.output_basic tok | MarkupToken (cmd, txt) => Latex.output_markup cmd (eval txt) @@ -265,11 +273,10 @@ in -fun present_span keywords default_tags span state state' - (tag_stack, active_tag, newline, buffer, present_cont) = +fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) = let val present = fold (fn (tok, (flag, 0)) => - Buffer.add (output_token keywords state' tok) + Buffer.add (output_token state' tok) #> Buffer.add flag | _ => I); @@ -281,7 +288,7 @@ val active_tag' = if is_some tag' then tag' else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE - else try hd (default_tags cmd_name); + else try hd (Keyword.command_tags keywords cmd_name); val edge = (active_tag, active_tag'); val newline' = @@ -322,8 +329,6 @@ (* present_thy *) -datatype markup = Markup | MarkupEnv | Verbatim; - local val space_proper = @@ -351,8 +356,12 @@ in -fun present_thy keywords default_tags is_markup command_results toks = +fun present_thy thy command_results toks = let + val keywords = Thy_Header.get_keywords thy; + val is_markup = Outer_Syntax.is_markup thy; + + (* tokens *) val ignored = Scan.state --| ignore @@ -383,9 +392,9 @@ val token = ignored || - markup Markup MarkupToken Latex.markup_true || - markup MarkupEnv MarkupEnvToken Latex.markup_true || - markup Verbatim (VerbatimToken o #2) "" || + markup Outer_Syntax.Markup MarkupToken Latex.markup_true || + markup Outer_Syntax.Markup_Env MarkupEnvToken Latex.markup_true || + markup Outer_Syntax.Verbatim (VerbatimToken o #2) "" || command || cmt || other; @@ -423,7 +432,7 @@ (* present commands *) fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span keywords default_tags span st st'); + Toplevel.setmp_thread_position tr (present_span keywords span st st'); fun present _ [] = I | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest; @@ -691,4 +700,29 @@ (Context_Position.reports ctxt [(pos, Markup.language_path), (pos, Markup.url name)]; enclose "\\url{" "}" name))); + + +(** markup commands **) + +fun local_theory_markup (loc, txt) = Toplevel.present_local_theory loc (check_text txt); +val proof_markup = Toplevel.present_proof o check_text; + +fun reject_target NONE = () + | reject_target (SOME (_, pos)) = + error ("Illegal target specification -- not a theory context" ^ Position.here pos); + +fun header_markup txt = + Toplevel.keep (fn state => + if Toplevel.is_toplevel state then + (legacy_feature "Obsolete 'header' command -- use 'chapter', 'section' etc. instead"; + check_text txt state) + else raise Toplevel.UNDEF); + +fun heading_markup (loc, txt) = + Toplevel.keep (fn state => + if Toplevel.is_toplevel state then (reject_target loc; check_text txt state) + else raise Toplevel.UNDEF) o + local_theory_markup (loc, txt) o + Toplevel.present_proof (fn state => (reject_target loc; check_text txt state)); + end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Thy/thy_syntax.ML --- a/src/Pure/Thy/thy_syntax.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Thy/thy_syntax.ML Fri Nov 07 20:43:13 2014 +0100 @@ -14,7 +14,7 @@ val map_element: ('a -> 'b) -> 'a element -> 'b element val flat_element: 'a element -> 'a list val last_element: 'a element -> 'a - val parse_elements: Command_Span.span list -> Command_Span.span element list + val parse_elements: Keyword.keywords -> Command_Span.span list -> Command_Span.span element list end; structure Thy_Syntax: THY_SYNTAX = @@ -89,23 +89,27 @@ Scan.one (fn (Command_Span.Span (Command_Span.Command_Span (name, _), _)) => pred name | _ => false); -val proof_atom = - Scan.one - (fn (Command_Span.Span (Command_Span.Command_Span (name, _), _)) => Keyword.is_proof_body name - | _ => true) >> atom; - -fun proof_element x = (command_with Keyword.is_proof_goal -- proof_rest >> element || proof_atom) x -and proof_rest x = (Scan.repeat proof_element -- command_with Keyword.is_qed) x; - -val other_element = - command_with Keyword.is_theory_goal -- proof_rest >> element || - Scan.one not_eof >> atom; +fun parse_element keywords = + let + val proof_atom = + Scan.one + (fn (Command_Span.Span (Command_Span.Command_Span (name, _), _)) => + Keyword.is_proof_body keywords name + | _ => true) >> atom; + fun proof_element x = + (command_with (Keyword.is_proof_goal keywords) -- proof_rest >> element || proof_atom) x + and proof_rest x = + (Scan.repeat proof_element -- command_with (Keyword.is_qed keywords)) x; + in + command_with (Keyword.is_theory_goal keywords) -- proof_rest >> element || + Scan.one not_eof >> atom + end; in -val parse_elements = +fun parse_elements keywords = Source.of_list #> - Source.source stopper (Scan.bulk other_element) #> + Source.source stopper (Scan.bulk (parse_element keywords)) #> Source.exhaust; end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Tools/build.ML --- a/src/Pure/Tools/build.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Tools/build.ML Fri Nov 07 20:43:13 2014 +0100 @@ -157,7 +157,6 @@ theories |> (List.app (use_theories_condition last_timing) |> session_timing name verbose - |> Unsynchronized.setmp Outer_Syntax.batch_mode true |> Unsynchronized.setmp Output.protocol_message_fn protocol_message |> Multithreading.max_threads_setmp (Options.int options "threads") |> Exn.capture); diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Tools/build.scala Fri Nov 07 20:43:13 2014 +0100 @@ -437,7 +437,8 @@ val (loaded_theories0, known_theories0, syntax0) = info.parent.map(deps(_)) match { case None => - (Set.empty[String], Map.empty[String, Document.Node.Name], Outer_Syntax.init()) + (Set.empty[String], Map.empty[String, Document.Node.Name], + Thy_Header.bootstrap_syntax()) case Some(parent) => (parent.loaded_theories, parent.known_theories, parent.syntax) } diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Tools/find_consts.ML --- a/src/Pure/Tools/find_consts.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Tools/find_consts.ML Fri Nov 07 20:43:13 2014 +0100 @@ -140,7 +140,7 @@ val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion); -val query_keywords = Keyword.add (":", NONE) Keyword.empty_keywords; +val query_keywords = Keyword.add_keywords [(":", NONE)] Keyword.empty_keywords; in diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Tools/find_theorems.ML --- a/src/Pure/Tools/find_theorems.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Tools/find_theorems.ML Fri Nov 07 20:43:13 2014 +0100 @@ -526,7 +526,7 @@ val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion); -val query_keywords = Keyword.add (":", NONE) Keyword.empty_keywords; +val query_keywords = Keyword.add_keywords [(":", NONE)] Keyword.empty_keywords; in diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/Tools/rail.ML --- a/src/Pure/Tools/rail.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/Tools/rail.ML Fri Nov 07 20:43:13 2014 +0100 @@ -316,7 +316,7 @@ fun output_rules state rules = let - val output_antiq = Thy_Output.eval_antiq (Keyword.get_keywords ()) state; + val output_antiq = Thy_Output.eval_antiq state; fun output_text b s = Output.output s |> b ? enclose "\\isakeyword{" "}" diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/build-jars diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Pure/pure_syn.ML --- a/src/Pure/pure_syn.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Pure/pure_syn.ML Fri Nov 07 20:43:13 2014 +0100 @@ -1,33 +1,36 @@ (* Title: Pure/pure_syn.ML Author: Makarius -Minimal outer syntax for bootstrapping Isabelle/Pure. +Outer syntax for bootstrapping Isabelle/Pure. *) structure Pure_Syn: sig end = struct val _ = - Outer_Syntax.command - (("theory", Keyword.tag_theory Keyword.thy_begin), @{here}) "begin theory" + Outer_Syntax.markup_command Outer_Syntax.Markup ("header", @{here}) "theory header" + (Parse.document_source >> Thy_Output.header_markup); + +val _ = + Outer_Syntax.markup_command Outer_Syntax.Markup ("chapter", @{here}) "chapter heading" + (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup); + +val _ = + Outer_Syntax.markup_command Outer_Syntax.Markup ("section", @{here}) "section heading" + (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup); + +val _ = + Outer_Syntax.markup_command Outer_Syntax.Markup ("subsection", @{here}) "subsection heading" + (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup); + +val _ = + Outer_Syntax.markup_command Outer_Syntax.Markup ("subsubsection", @{here}) "subsubsection heading" + (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup); + +val _ = + Outer_Syntax.command ("theory", @{here}) "begin theory" (Thy_Header.args >> (fn _ => Toplevel.init_theory (fn () => error "Missing theory initialization"))); -val _ = - Outer_Syntax.command - (("ML_file", Keyword.tag_ml Keyword.thy_load), @{here}) "ML text from file" - (Resources.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy => - let - val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy); - val provide = Resources.provide (src_path, digest); - val source = {delimited = true, text = cat_lines lines, pos = pos}; - val flags = {SML = false, exchange = false, redirect = true, verbose = true}; - in - gthy - |> ML_Context.exec (fn () => ML_Context.eval_source flags source) - |> Local_Theory.propagate_ml_env - |> Context.mapping provide (Local_Theory.background_theory provide) - end))); - end; diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Tools/Code/code_target.ML --- a/src/Tools/Code/code_target.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Tools/Code/code_target.ML Fri Nov 07 20:43:13 2014 +0100 @@ -693,9 +693,10 @@ fun codegen_tool thyname cmd_expr = let - val ctxt = Proof_Context.init_global (Thy_Info.get_theory thyname); + val thy = Thy_Info.get_theory thyname; + val ctxt = Proof_Context.init_global thy; val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o - (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_keywords ()) Position.none); + (filter Token.is_proper o Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none); in case parse cmd_expr of SOME f => (writeln "Now generating code..."; f ctxt) | NONE => error ("Bad directive " ^ quote cmd_expr) diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Tools/jEdit/src/plugin.scala --- a/src/Tools/jEdit/src/plugin.scala Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Tools/jEdit/src/plugin.scala Fri Nov 07 20:43:13 2014 +0100 @@ -261,7 +261,7 @@ } case Session.Ready => - PIDE.session.update_options(PIDE.options.value) + PIDE.session.init_options(PIDE.options.value) PIDE.init_models() if (!Isabelle.continuous_checking) { diff -r a3be9a47e2d7 -r dcad9bad43e7 src/Tools/try.ML --- a/src/Tools/try.ML Fri Nov 07 15:40:08 2014 +0000 +++ b/src/Tools/try.ML Fri Nov 07 20:43:13 2014 +0100 @@ -96,8 +96,8 @@ fun print_function ((name, (weight, auto, tool)): tool) = Command.print_function ("auto_" ^ name) - (fn {command_name, ...} => - if Keyword.is_theory_goal command_name andalso Options.default_bool auto then + (fn {keywords, command_name, ...} => + if Keyword.is_theory_goal keywords command_name andalso Options.default_bool auto then SOME {delay = SOME (seconds (Options.default_real @{system_option auto_time_start})), pri = ~ weight,