plain value Keywords.keywords, which might be used outside theory for bootstrap purposes;
plain value Outer_Syntax within theory: parsing requires current theory context;
clarified name of Keyword.is_literal according to its semantics;
eliminated pointless type Keyword.T;
simplified @{command_spec};
clarified bootstrap keywords and syntax: take it as basis instead of side-branch;
--- a/NEWS Fri Nov 07 16:22:25 2014 +0100
+++ b/NEWS Fri Nov 07 16:36:55 2014 +0100
@@ -18,6 +18,10 @@
structure in the body: 'oops' is no longer supported here. Minor
INCOMPATIBILITY, use 'sorry' instead.
+* Outer syntax commands are managed authentically within the theory
+context, without implicit global state. Potential for accidental
+INCOMPATIBILITY, make sure that required theories are really imported.
+
*** Prover IDE -- Isabelle/Scala/jEdit ***
--- a/src/Doc/antiquote_setup.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Doc/antiquote_setup.ML Fri Nov 07 16:36:55 2014 +0100
@@ -191,16 +191,19 @@
fun no_check _ _ = true;
-fun is_keyword _ (name, _) =
- Keyword.is_keyword (Keyword.get_keywords ()) name;
+fun is_keyword ctxt (name, _) =
+ Keyword.is_literal (Thy_Header.get_keywords' ctxt) name;
fun check_command ctxt (name, pos) =
- let val keywords = Keyword.get_keywords () in
- is_some (Keyword.command_keyword keywords name) andalso
+ let
+ val thy = Proof_Context.theory_of ctxt;
+ val keywords = Thy_Header.get_keywords thy;
+ in
+ Keyword.is_command keywords name andalso
let
val markup =
Outer_Syntax.scan keywords Position.none name
- |> maps (Outer_Syntax.command_reports (#2 (Outer_Syntax.get_syntax ())))
+ |> maps (Outer_Syntax.command_reports thy)
|> map (snd o fst);
val _ = Context_Position.reports ctxt (map (pair pos) markup);
in true end
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Fri Nov 07 16:36:55 2014 +0100
@@ -414,7 +414,7 @@
time_isa) = time_limit (Mirabelle.cpu_time (fn () =>
let
val ho_atp = Sledgehammer_Prover_ATP.is_ho_atp ctxt prover_name
- val keywords = Keyword.get_keywords ()
+ val keywords = Thy_Header.get_keywords' ctxt
val css_table = Sledgehammer_Fact.clasimpset_rule_table_of ctxt
val facts =
Sledgehammer_Fact.nearly_all_facts ctxt ho_atp
@@ -535,8 +535,8 @@
fun do_method named_thms ctxt =
let
val ref_of_str =
- suffix ";" #> Outer_Syntax.scan (Keyword.get_keywords ()) Position.none #> Parse.xthm
- #> fst
+ suffix ";" #> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.none
+ #> Parse.xthm #> fst
val thms = named_thms |> maps snd
val facts = named_thms |> map (ref_of_str o fst o fst)
val fact_override = {add = facts, del = [], only = true}
@@ -561,7 +561,7 @@
let
val (type_encs, lam_trans) =
!meth
- |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start
+ |> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.start
|> filter Token.is_proper |> tl
|> Metis_Tactic.parse_metis_options |> fst
|>> the_default [ATP_Proof_Reconstruct.partial_typesN]
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML Fri Nov 07 16:36:55 2014 +0100
@@ -110,7 +110,7 @@
val subgoal = 1
val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal subgoal ctxt
val ho_atp = Sledgehammer_Prover_ATP.is_ho_atp ctxt prover
- val keywords = Keyword.get_keywords ()
+ val keywords = Thy_Header.get_keywords' ctxt
val css_table = Sledgehammer_Fact.clasimpset_rule_table_of ctxt
val facts =
Sledgehammer_Fact.nearly_all_facts ctxt ho_atp
--- a/src/HOL/TPTP/sledgehammer_tactics.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/TPTP/sledgehammer_tactics.ML Fri Nov 07 16:36:55 2014 +0100
@@ -36,7 +36,7 @@
val default_max_facts = default_max_facts_of_prover ctxt name
val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal i ctxt
val ho_atp = exists (is_ho_atp ctxt) provers
- val keywords = Keyword.get_keywords ()
+ val keywords = Thy_Header.get_keywords' ctxt
val css_table = clasimpset_rule_table_of ctxt
val facts =
nearly_all_facts ctxt ho_atp fact_override keywords css_table chained hyp_ts concl_t
--- a/src/HOL/Tools/ATP/atp_util.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/ATP/atp_util.ML Fri Nov 07 16:36:55 2014 +0100
@@ -19,7 +19,7 @@
val nat_subscript : int -> string
val unquote_tvar : string -> string
val unyxml : string -> string
- val maybe_quote : string -> string
+ val maybe_quote : Keyword.keywords -> string -> string
val string_of_ext_time : bool * Time.time -> string
val string_of_time : Time.time -> string
val type_instance : theory -> typ -> typ -> bool
@@ -136,11 +136,11 @@
val unyxml = XML.content_of o YXML.parse_body
val is_long_identifier = forall Symbol_Pos.is_identifier o Long_Name.explode
-fun maybe_quote y =
+fun maybe_quote keywords y =
let val s = unyxml y in
y |> ((not (is_long_identifier (unquote_tvar s)) andalso
not (is_long_identifier (unquery_var s))) orelse
- Keyword.is_keyword (Keyword.get_keywords ()) s) ? quote
+ Keyword.is_literal keywords s) ? quote
end
fun string_of_ext_time (plus, time) =
--- a/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML Fri Nov 07 16:36:55 2014 +0100
@@ -2016,7 +2016,7 @@
val fake_T = qsoty (unfreeze_fp X);
val fake_T_backdrop = qsoty (unfreeze_fp X_backdrop);
fun register_hint () =
- "\nUse the " ^ quote (fst (fst @{command_spec "bnf"})) ^ " command to register " ^
+ "\nUse the " ^ quote (#1 @{command_spec "bnf"}) ^ " command to register " ^
quote bad_tc ^ " as a bounded natural functor to allow nested (co)recursion through \
\it";
in
--- a/src/HOL/Tools/Lifting/lifting_setup.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML Fri Nov 07 16:36:55 2014 +0100
@@ -221,7 +221,7 @@
val thy = Proof_Context.theory_of lthy
val dummy_thm = Thm.transfer thy Drule.dummy_thm
- val pointer = Outer_Syntax.scan (Keyword.get_keywords ()) Position.none bundle_name
+ val pointer = Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none bundle_name
val restore_lifting_att =
([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer])
in
--- a/src/HOL/Tools/Nitpick/nitpick.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick.ML Fri Nov 07 16:36:55 2014 +0100
@@ -220,6 +220,7 @@
val timer = Timer.startRealTimer ()
val thy = Proof.theory_of state
val ctxt = Proof.context_of state
+ val keywords = Thy_Header.get_keywords thy
(* FIXME: reintroduce code before new release:
val nitpick_thy = Thy_Info.get_theory "Nitpick"
@@ -365,7 +366,7 @@
val unique_scope = forall (curry (op =) 1 o length o snd) cards_assigns
fun monotonicity_message Ts extra =
- let val pretties = map (pretty_maybe_quote o pretty_for_type ctxt) Ts in
+ let val pretties = map (pretty_maybe_quote keywords o pretty_for_type ctxt) Ts in
Pretty.blk (0,
pstrs ("The type" ^ plural_s_for_list pretties ^ " ") @
pretty_serial_commas "and" pretties @
--- a/src/HOL/Tools/Nitpick/nitpick_scope.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_scope.ML Fri Nov 07 16:36:55 2014 +0100
@@ -180,9 +180,10 @@
let
fun standard_blocks s = map (Pretty.block o cons (Pretty.str (s ^ " ")))
val (primary_cards, secondary_cards, maxes, iters, miscs) =
- quintuple_for_scope (pretty_maybe_quote oo pretty_for_type)
- (pretty_maybe_quote oo Syntax.pretty_term)
- Pretty.str scope
+ quintuple_for_scope
+ (fn ctxt => pretty_maybe_quote (Thy_Header.get_keywords' ctxt) o pretty_for_type ctxt)
+ (fn ctxt => pretty_maybe_quote (Thy_Header.get_keywords' ctxt) o Syntax.pretty_term ctxt)
+ Pretty.str scope
in
standard_blocks "card" primary_cards @
(if verbose then
--- a/src/HOL/Tools/Nitpick/nitpick_util.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_util.ML Fri Nov 07 16:36:55 2014 +0100
@@ -72,7 +72,7 @@
val indent_size : int
val pstrs : string -> Pretty.T list
val unyxml : string -> string
- val pretty_maybe_quote : Pretty.T -> Pretty.T
+ val pretty_maybe_quote : Keyword.keywords -> Pretty.T -> Pretty.T
val hash_term : term -> int
val spying : bool -> (unit -> Proof.state * int * string) -> unit
end;
@@ -287,9 +287,9 @@
val maybe_quote = ATP_Util.maybe_quote
-fun pretty_maybe_quote pretty =
+fun pretty_maybe_quote keywords pretty =
let val s = Pretty.str_of pretty in
- if maybe_quote s = s then pretty else Pretty.enum "" "\"" "\"" [pretty]
+ if maybe_quote keywords s = s then pretty else Pretty.enum "" "\"" "\"" [pretty]
end
val hashw = ATP_Util.hashw
--- a/src/HOL/Tools/Sledgehammer/sledgehammer.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer.ML Fri Nov 07 16:36:55 2014 +0100
@@ -264,10 +264,10 @@
val print =
if mode = Normal andalso is_none output_result then writeln else K ()
val ctxt = Proof.context_of state
+ val keywords = Thy_Header.get_keywords' ctxt
val {facts = chained, goal, ...} = Proof.goal state
val (_, hyp_ts, concl_t) = strip_subgoal goal i ctxt
val ho_atp = exists (is_ho_atp ctxt) provers
- val keywords = Keyword.get_keywords ()
val css = clasimpset_rule_table_of ctxt
val all_facts =
nearly_all_facts ctxt ho_atp fact_override keywords css chained hyp_ts concl_t
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Fri Nov 07 16:36:55 2014 +0100
@@ -67,7 +67,7 @@
val no_fact_override = {add = [], del = [], only = false}
fun needs_quoting keywords s =
- Keyword.is_keyword keywords s orelse
+ Keyword.is_literal keywords s orelse
exists (not o Symbol_Pos.is_identifier) (Long_Name.explode s)
fun make_name keywords multi j name =
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar_proof.ML Fri Nov 07 16:36:55 2014 +0100
@@ -267,6 +267,8 @@
fun string_of_isar_proof ctxt0 i n proof =
let
+ val keywords = Thy_Header.get_keywords' ctxt0
+
(* Make sure only type constraints inserted by the type annotation code are printed. *)
val ctxt = ctxt0
|> Config.put show_markup false
@@ -300,7 +302,7 @@
|> annotate_types_in_term ctxt
|> with_vanilla_print_mode (Syntax.unparse_term ctxt #> Pretty.string_of)
|> simplify_spaces
- |> maybe_quote),
+ |> maybe_quote keywords),
ctxt |> perhaps (try (Variable.auto_fixes term)))
fun using_facts [] [] = ""
@@ -317,8 +319,8 @@
end
fun of_free (s, T) =
- maybe_quote s ^ " :: " ^
- maybe_quote (simplify_spaces (with_vanilla_print_mode (Syntax.string_of_typ ctxt) T))
+ maybe_quote keywords s ^ " :: " ^
+ maybe_quote keywords (simplify_spaces (with_vanilla_print_mode (Syntax.string_of_typ ctxt) T))
fun add_frees xs (s, ctxt) =
(s ^ space_implode " and " (map of_free xs), ctxt |> fold Variable.auto_fixes (map Free xs))
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Fri Nov 07 16:36:55 2014 +0100
@@ -123,7 +123,7 @@
fun thms_of_name ctxt name =
let
val get = maps (Proof_Context.get_fact ctxt o fst)
- val keywords = Keyword.get_keywords ()
+ val keywords = Thy_Header.get_keywords' ctxt
in
Source.of_string name
|> Symbol.source
--- a/src/HOL/Tools/try0.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/Tools/try0.ML Fri Nov 07 16:36:55 2014 +0100
@@ -41,15 +41,15 @@
NONE
end;
-fun parse_method s =
+fun parse_method keywords s =
enclose "(" ")" s
- |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start
+ |> Outer_Syntax.scan keywords Position.start
|> filter Token.is_proper
|> Scan.read Token.stopper Method.parse
|> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source");
fun apply_named_method_on_first_goal ctxt =
- parse_method
+ parse_method (Thy_Header.get_keywords' ctxt)
#> Method.method_cmd ctxt
#> Method.Basic
#> (fn m => Method.Combinator (Method.no_combinator_info, Method.Select_Goals 1, [m]))
--- a/src/HOL/ex/Cartouche_Examples.thy Fri Nov 07 16:22:25 2014 +0100
+++ b/src/HOL/ex/Cartouche_Examples.thy Fri Nov 07 16:36:55 2014 +0100
@@ -178,7 +178,7 @@
ML {*
Outer_Syntax.command
@{command_spec "text_cartouche"} ""
- (Parse.opt_target -- Parse.source_position Parse.cartouche >> Isar_Cmd.local_theory_markup)
+ (Parse.opt_target -- Parse.source_position Parse.cartouche >> Thy_Output.local_theory_markup)
*}
text_cartouche
--- a/src/Pure/Isar/attrib.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Isar/attrib.ML Fri Nov 07 16:36:55 2014 +0100
@@ -288,8 +288,8 @@
fun read_attribs ctxt source =
let
+ val keywords = Thy_Header.get_keywords' ctxt;
val syms = Symbol_Pos.source_explode source;
- val keywords = Keyword.get_keywords ();
in
(case Token.read_no_commands keywords Parse.attribs syms of
[raw_srcs] => check_attribs ctxt raw_srcs
--- a/src/Pure/Isar/isar_cmd.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Isar/isar_cmd.ML Fri Nov 07 16:36:55 2014 +0100
@@ -48,12 +48,6 @@
val print_term: (string list * string) -> Toplevel.transition -> Toplevel.transition
val print_type: (string list * (string * string option)) ->
Toplevel.transition -> Toplevel.transition
- val local_theory_markup: (xstring * Position.T) option * Symbol_Pos.source ->
- Toplevel.transition -> Toplevel.transition
- val proof_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
- val header_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
- val heading_markup: (xstring * Position.T) option * Symbol_Pos.source ->
- Toplevel.transition -> Toplevel.transition
end;
structure Isar_Cmd: ISAR_CMD =
@@ -376,28 +370,4 @@
end;
-
-(* markup commands *)
-
-fun local_theory_markup (loc, txt) = Toplevel.present_local_theory loc (Thy_Output.check_text txt);
-val proof_markup = Toplevel.present_proof o Thy_Output.check_text;
-
-fun reject_target NONE = ()
- | reject_target (SOME (_, pos)) =
- error ("Illegal target specification -- not a theory context" ^ Position.here pos);
-
-fun header_markup txt =
- Toplevel.keep (fn state =>
- if Toplevel.is_toplevel state then
- (legacy_feature "Obsolete 'header' command -- use 'chapter', 'section' etc. instead";
- Thy_Output.check_text txt state)
- else raise Toplevel.UNDEF);
-
-fun heading_markup (loc, txt) =
- Toplevel.keep (fn state =>
- if Toplevel.is_toplevel state then (reject_target loc; Thy_Output.check_text txt state)
- else raise Toplevel.UNDEF) o
- local_theory_markup (loc, txt) o
- Toplevel.present_proof (fn state => (reject_target loc; Thy_Output.check_text txt state));
-
end;
--- a/src/Pure/Isar/isar_syn.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Isar/isar_syn.ML Fri Nov 07 16:36:55 2014 +0100
@@ -10,49 +10,24 @@
(** markup commands **)
val _ =
- Outer_Syntax.markup_command Thy_Output.Markup
- @{command_spec "header"} "theory header"
- (Parse.document_source >> Isar_Cmd.header_markup);
-
-val _ =
- Outer_Syntax.markup_command Thy_Output.Markup
- @{command_spec "chapter"} "chapter heading"
- (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup);
-
-val _ =
- Outer_Syntax.markup_command Thy_Output.Markup
- @{command_spec "section"} "section heading"
- (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup);
-
-val _ =
- Outer_Syntax.markup_command Thy_Output.Markup
- @{command_spec "subsection"} "subsection heading"
- (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup);
+ Outer_Syntax.markup_command Outer_Syntax.Markup_Env
+ @{command_spec "text"} "formal comment (theory)"
+ (Parse.opt_target -- Parse.document_source >> Thy_Output.local_theory_markup);
val _ =
- Outer_Syntax.markup_command Thy_Output.Markup
- @{command_spec "subsubsection"} "subsubsection heading"
- (Parse.opt_target -- Parse.document_source >> Isar_Cmd.heading_markup);
-
-val _ =
- Outer_Syntax.markup_command Thy_Output.MarkupEnv
- @{command_spec "text"} "formal comment (theory)"
- (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup);
+ Outer_Syntax.markup_command Outer_Syntax.Verbatim
+ @{command_spec "text_raw"} "raw document preparation text"
+ (Parse.opt_target -- Parse.document_source >> Thy_Output.local_theory_markup);
val _ =
- Outer_Syntax.markup_command Thy_Output.Verbatim
- @{command_spec "text_raw"} "raw document preparation text"
- (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup);
+ Outer_Syntax.markup_command Outer_Syntax.Markup_Env
+ @{command_spec "txt"} "formal comment (proof)"
+ (Parse.document_source >> Thy_Output.proof_markup);
val _ =
- Outer_Syntax.markup_command Thy_Output.MarkupEnv
- @{command_spec "txt"} "formal comment (proof)"
- (Parse.document_source >> Isar_Cmd.proof_markup);
-
-val _ =
- Outer_Syntax.markup_command Thy_Output.Verbatim
+ Outer_Syntax.markup_command Outer_Syntax.Verbatim
@{command_spec "txt_raw"} "raw document preparation text (proof)"
- (Parse.document_source >> Isar_Cmd.proof_markup);
+ (Parse.document_source >> Thy_Output.proof_markup);
@@ -730,11 +705,11 @@
Outer_Syntax.command @{command_spec "help"}
"retrieve outer syntax commands according to name patterns"
(Scan.repeat Parse.name >>
- (fn pats => Toplevel.imperative (fn () => Outer_Syntax.help_syntax pats)));
+ (fn pats => Toplevel.keep (fn st => Outer_Syntax.help_syntax (Toplevel.theory_of st) pats)));
val _ =
Outer_Syntax.command @{command_spec "print_commands"} "print outer syntax commands"
- (Scan.succeed (Toplevel.imperative Outer_Syntax.print_syntax));
+ (Scan.succeed (Toplevel.keep (Outer_Syntax.print_syntax o Toplevel.theory_of)));
val _ =
Outer_Syntax.command @{command_spec "print_options"} "print configuration options"
--- a/src/Pure/Isar/keyword.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Isar/keyword.ML Fri Nov 07 16:36:55 2014 +0100
@@ -6,43 +6,38 @@
signature KEYWORD =
sig
- type T
- val kind_of: T -> string
- val kind_files_of: T -> string * string list
- val diag: T
- val heading: T
- val thy_begin: T
- val thy_end: T
- val thy_decl: T
- val thy_decl_block: T
- val thy_load: T
- val thy_goal: T
- val qed: T
- val qed_script: T
- val qed_block: T
- val qed_global: T
- val prf_goal: T
- val prf_block: T
- val prf_open: T
- val prf_close: T
- val prf_chain: T
- val prf_decl: T
- val prf_asm: T
- val prf_asm_goal: T
- val prf_asm_goal_script: T
- val prf_script: T
+ val diag: string
+ val heading: string
+ val thy_begin: string
+ val thy_end: string
+ val thy_decl: string
+ val thy_decl_block: string
+ val thy_load: string
+ val thy_goal: string
+ val qed: string
+ val qed_script: string
+ val qed_block: string
+ val qed_global: string
+ val prf_goal: string
+ val prf_block: string
+ val prf_open: string
+ val prf_close: string
+ val prf_chain: string
+ val prf_decl: string
+ val prf_asm: string
+ val prf_asm_goal: string
+ val prf_asm_goal_script: string
+ val prf_script: string
type spec = (string * string list) * string list
- val check_spec: spec -> T
- val command_spec: (string * spec) * Position.T -> (string * T) * Position.T
type keywords
val minor_keywords: keywords -> Scan.lexicon
val major_keywords: keywords -> Scan.lexicon
val no_command_keywords: keywords -> keywords
val empty_keywords: keywords
val merge_keywords: keywords * keywords -> keywords
- val add_keywords: string * T option -> keywords -> keywords
- val is_keyword: keywords -> string -> bool
- val command_keyword: keywords -> string -> T option
+ val add_keywords: (string * spec option) list -> keywords -> keywords
+ val is_literal: keywords -> string -> bool
+ val is_command: keywords -> string -> bool
val command_files: keywords -> string -> Path.T -> Path.T list
val command_tags: keywords -> string -> string list
val is_diag: keywords -> string -> bool
@@ -58,8 +53,6 @@
val is_qed: keywords -> string -> bool
val is_qed_global: keywords -> string -> bool
val is_printed: keywords -> string -> bool
- val define: string * T option -> unit
- val get_keywords: unit -> keywords
end;
structure Keyword: KEYWORD =
@@ -69,57 +62,50 @@
(* kinds *)
-datatype T = Keyword of
+val diag = "diag";
+val heading = "heading";
+val thy_begin = "thy_begin";
+val thy_end = "thy_end";
+val thy_decl = "thy_decl";
+val thy_decl_block = "thy_decl_block";
+val thy_load = "thy_load";
+val thy_goal = "thy_goal";
+val qed = "qed";
+val qed_script = "qed_script";
+val qed_block = "qed_block";
+val qed_global = "qed_global";
+val prf_goal = "prf_goal";
+val prf_block = "prf_block";
+val prf_open = "prf_open";
+val prf_close = "prf_close";
+val prf_chain = "prf_chain";
+val prf_decl = "prf_decl";
+val prf_asm = "prf_asm";
+val prf_asm_goal = "prf_asm_goal";
+val prf_asm_goal_script = "prf_asm_goal_script";
+val prf_script = "prf_script";
+
+val kinds =
+ [diag, heading, thy_begin, thy_end, thy_load, thy_decl, thy_decl_block, thy_goal,
+ qed, qed_script, qed_block, qed_global, prf_goal, prf_block, prf_open, prf_close,
+ prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_asm_goal_script, prf_script];
+
+
+(* specifications *)
+
+type T =
{kind: string,
files: string list, (*extensions of embedded files*)
tags: string list};
-fun kind s = Keyword {kind = s, files = [], tags = []};
-fun kind_of (Keyword {kind, ...}) = kind;
-fun kind_files_of (Keyword {kind, files, ...}) = (kind, files);
-
-val diag = kind "diag";
-val heading = kind "heading";
-val thy_begin = kind "thy_begin";
-val thy_end = kind "thy_end";
-val thy_decl = kind "thy_decl";
-val thy_decl_block = kind "thy_decl_block";
-val thy_load = kind "thy_load";
-val thy_goal = kind "thy_goal";
-val qed = kind "qed";
-val qed_script = kind "qed_script";
-val qed_block = kind "qed_block";
-val qed_global = kind "qed_global";
-val prf_goal = kind "prf_goal";
-val prf_block = kind "prf_block";
-val prf_open = kind "prf_open";
-val prf_close = kind "prf_close";
-val prf_chain = kind "prf_chain";
-val prf_decl = kind "prf_decl";
-val prf_asm = kind "prf_asm";
-val prf_asm_goal = kind "prf_asm_goal";
-val prf_asm_goal_script = kind "prf_asm_goal_script";
-val prf_script = kind "prf_script";
-
-val kinds =
- [diag, heading, thy_begin, thy_end, thy_load, thy_decl, thy_decl_block, thy_goal,
- qed, qed_script, qed_block, qed_global, prf_goal, prf_block, prf_open, prf_close,
- prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_asm_goal_script, prf_script]
- |> map kind_of;
-
-
-(* specifications *)
-
type spec = (string * string list) * string list;
-fun check_spec ((kind, files), tags) =
+fun check_spec ((kind, files), tags) : T =
if not (member (op =) kinds kind) then
error ("Unknown outer syntax keyword kind " ^ quote kind)
- else if not (null files) andalso kind <> kind_of thy_load then
+ else if not (null files) andalso kind <> thy_load then
error ("Illegal specification of files for " ^ quote kind)
- else Keyword {kind = kind, files = files, tags = tags};
-
-fun command_spec ((name, s), pos) = ((name, check_spec s), pos);
+ else {kind = kind, files = files, tags = tags};
@@ -158,41 +144,45 @@
Scan.merge_lexicons (major1, major2),
Symtab.merge (K true) (commands1, commands2));
-fun add_keywords (name, opt_kind) = map_keywords (fn (minor, major, commands) =>
- (case opt_kind of
- NONE =>
- let
- val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
- in (minor', major, commands) end
- | SOME kind =>
- let
- val major' = Scan.extend_lexicon (Symbol.explode name) major;
- val commands' = Symtab.update (name, kind) commands;
- in (minor, major', commands') end));
+val add_keywords =
+ fold (fn (name, opt_spec) => map_keywords (fn (minor, major, commands) =>
+ (case opt_spec of
+ NONE =>
+ let
+ val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
+ in (minor', major, commands) end
+ | SOME spec =>
+ let
+ val kind = check_spec spec;
+ val major' = Scan.extend_lexicon (Symbol.explode name) major;
+ val commands' = Symtab.update (name, kind) commands;
+ in (minor, major', commands') end)));
-(* lookup *)
+(* lookup keywords *)
-fun is_keyword keywords s =
+fun is_literal keywords s =
let
val minor = minor_keywords keywords;
val major = major_keywords keywords;
val syms = Symbol.explode s;
in Scan.is_literal minor syms orelse Scan.is_literal major syms end;
+fun is_command (Keywords {commands, ...}) = Symtab.defined commands;
+
fun command_keyword (Keywords {commands, ...}) = Symtab.lookup commands;
fun command_files keywords name path =
(case command_keyword keywords name of
NONE => []
- | SOME (Keyword {kind, files, ...}) =>
- if kind <> kind_of thy_load then []
+ | SOME {kind, files, ...} =>
+ if kind <> thy_load then []
else if null files then [path]
else map (fn ext => Path.ext ext path) files);
fun command_tags keywords name =
(case command_keyword keywords name of
- SOME (Keyword {tags, ...}) => tags
+ SOME {tags, ...} => tags
| NONE => []);
@@ -200,11 +190,11 @@
fun command_category ks =
let
- val tab = Symtab.make_set (map kind_of ks);
+ val tab = Symtab.make_set ks;
fun pred keywords name =
(case command_keyword keywords name of
NONE => false
- | SOME k => Symtab.defined tab (kind_of k));
+ | SOME {kind, ...} => Symtab.defined tab kind);
in pred end;
val is_diag = command_category [diag];
@@ -236,16 +226,5 @@
fun is_printed keywords = is_theory_goal keywords orf is_proof keywords;
-
-
-(** global state **)
-
-local val global_keywords = Unsynchronized.ref empty_keywords in
-
-fun define decl = CRITICAL (fn () => Unsynchronized.change global_keywords (add_keywords decl));
-fun get_keywords () = ! global_keywords;
-
end;
-end;
-
--- a/src/Pure/Isar/keyword.scala Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Isar/keyword.scala Fri Nov 07 16:36:55 2014 +0100
@@ -63,12 +63,11 @@
val qed_global = Set(QED_GLOBAL)
- type Spec = ((String, List[String]), List[String])
-
-
/** keyword tables **/
+ type Spec = ((String, List[String]), List[String])
+
object Keywords
{
def empty: Keywords = new Keywords()
@@ -99,13 +98,21 @@
def + (name: String): Keywords = new Keywords(minor + name, major, commands)
def + (name: String, kind: String): Keywords = this + (name, (kind, Nil))
- def + (name: String, kind: (String, List[String])): Keywords =
+ def + (name: String, kind_tags: (String, List[String])): Keywords =
{
val major1 = major + name
- val commands1 = commands + (name -> kind)
+ val commands1 = commands + (name -> kind_tags)
new Keywords(minor, major1, commands1)
}
+ def add_keywords(header: Thy_Header.Keywords): Keywords =
+ (this /: header) {
+ case (keywords, (name, None, _)) =>
+ keywords + Symbol.decode(name) + Symbol.encode(name)
+ case (keywords, (name, Some((kind_tags, _)), _)) =>
+ keywords + (Symbol.decode(name), kind_tags) + (Symbol.encode(name), kind_tags)
+ }
+
/* command kind */
--- a/src/Pure/Isar/outer_syntax.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.ML Fri Nov 07 16:36:55 2014 +0100
@@ -1,23 +1,19 @@
(* Title: Pure/Isar/outer_syntax.ML
Author: Markus Wenzel, TU Muenchen
-The global Isabelle/Isar outer syntax.
-
-Note: the syntax for files is statically determined at the very
-beginning; for interactive processing it may change dynamically.
+Isabelle/Isar outer syntax.
*)
signature OUTER_SYNTAX =
sig
- type syntax
- val batch_mode: bool Unsynchronized.ref
- val is_markup: syntax -> Thy_Output.markup -> string -> bool
- val get_syntax: unit -> Keyword.keywords * syntax
- val check_syntax: unit -> unit
- type command_spec = (string * Keyword.T) * Position.T
+ datatype markup = Markup | Markup_Env | Verbatim
+ val is_markup: theory -> markup -> string -> bool
+ val help_syntax: theory -> string list -> unit
+ val print_syntax: theory -> unit
+ type command_spec = string * Position.T
val command: command_spec -> string ->
(Toplevel.transition -> Toplevel.transition) parser -> unit
- val markup_command: Thy_Output.markup -> command_spec -> string ->
+ val markup_command: markup -> command_spec -> string ->
(Toplevel.transition -> Toplevel.transition) parser -> unit
val local_theory': command_spec -> string ->
(bool -> local_theory -> local_theory) parser -> unit
@@ -27,14 +23,12 @@
(bool -> local_theory -> Proof.state) parser -> unit
val local_theory_to_proof: command_spec -> string ->
(local_theory -> Proof.state) parser -> unit
- val help_syntax: string list -> unit
- val print_syntax: unit -> unit
val scan: Keyword.keywords -> Position.T -> string -> Token.T list
- val parse: Keyword.keywords * syntax -> Position.T -> string -> Toplevel.transition list
+ val parse: theory -> Position.T -> string -> Toplevel.transition list
+ val parse_tokens: theory -> Token.T list -> Toplevel.transition list
val parse_spans: Token.T list -> Command_Span.span list
val side_comments: Token.T list -> Token.T list
- val command_reports: syntax -> Token.T -> Position.report_text list
- val read_spans: syntax -> Token.T list -> Toplevel.transition list
+ val command_reports: theory -> Token.T -> Position.report_text list
end;
structure Outer_Syntax: OUTER_SYNTAX =
@@ -42,18 +36,33 @@
(** outer syntax **)
+(* errors *)
+
+fun err_command msg name ps =
+ error (msg ^ quote (Markup.markup Markup.keyword1 name) ^ Position.here_list ps);
+
+fun err_dup_command name ps =
+ err_command "Duplicate outer syntax command " name ps;
+
+
(* command parsers *)
+datatype markup = Markup | Markup_Env | Verbatim;
+
datatype command = Command of
{comment: string,
- markup: Thy_Output.markup option,
+ markup: markup option,
parse: (Toplevel.transition -> Toplevel.transition) parser,
pos: Position.T,
id: serial};
+fun eq_command (Command {id = id1, ...}, Command {id = id2, ...}) = id1 = id2;
+
fun new_command comment markup parse pos =
Command {comment = comment, markup = markup, parse = parse, pos = pos, id = serial ()};
+fun command_pos (Command {pos, ...}) = pos;
+
fun command_markup def (name, Command {pos, id, ...}) =
Markup.properties (Position.entity_properties_of def id pos)
(Markup.entity Markup.commandN name);
@@ -69,94 +78,104 @@
datatype syntax = Syntax of
{commands: command Symtab.table,
- markups: (string * Thy_Output.markup) list};
+ markups: (string * markup) list};
-fun make_syntax commands markups =
+fun make_syntax (commands, markups) =
Syntax {commands = commands, markups = markups};
-val empty_syntax = make_syntax Symtab.empty [];
+structure Data = Theory_Data
+(
+ type T = syntax;
+ val empty = make_syntax (Symtab.empty, []);
+ val extend = I;
+ fun merge (Syntax {commands = commands1, markups = markups1},
+ Syntax {commands = commands2, markups = markups2}) =
+ let
+ val commands' = (commands1, commands2)
+ |> Symtab.join (fn name => fn (cmd1, cmd2) =>
+ if eq_command (cmd1, cmd2) then raise Symtab.SAME
+ else err_dup_command name [command_pos cmd1, command_pos cmd2]);
+ val markups' = AList.merge (op =) (K true) (markups1, markups2);
+ in make_syntax (commands', markups') end;
+);
-fun map_commands f (Syntax {commands, ...}) =
+(* inspect syntax *)
+
+val get_syntax = Data.get;
+
+val dest_commands =
+ get_syntax #> (fn Syntax {commands, ...} => commands |> Symtab.dest |> sort_wrt #1);
+
+val lookup_commands =
+ get_syntax #> (fn Syntax {commands, ...} => Symtab.lookup commands);
+
+val is_markup =
+ get_syntax #> (fn Syntax {markups, ...} => fn kind => fn name =>
+ AList.lookup (op =) markups name = SOME kind);
+
+fun help_syntax thy pats =
+ dest_commands thy
+ |> filter (fn (name, _) => forall (fn pat => match_string pat name) pats)
+ |> map pretty_command
+ |> Pretty.writeln_chunks;
+
+fun print_syntax thy =
let
- val commands' = f commands;
+ val keywords = Thy_Header.get_keywords thy;
+ val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords);
+ val commands = dest_commands thy;
+ in
+ [Pretty.strs ("keywords:" :: map quote minor),
+ Pretty.big_list "commands:" (map pretty_command commands)]
+ |> Pretty.writeln_chunks
+ end;
+
+
+(* build syntax *)
+
+fun add_command name cmd thy = thy |> Data.map (fn Syntax {commands, ...} =>
+ let
+ val keywords = Thy_Header.get_keywords thy;
+ val _ =
+ Keyword.is_command keywords name orelse
+ err_command "Undeclared outer syntax command " name [command_pos cmd];
+
+ val _ =
+ (case Symtab.lookup commands name of
+ NONE => ()
+ | SOME cmd' => err_dup_command name [command_pos cmd, command_pos cmd']);
+
+ val _ =
+ Context_Position.report_generic (ML_Context.the_generic_context ())
+ (command_pos cmd) (command_markup true (name, cmd));
+
+ val commands' = Symtab.update (name, cmd) commands;
val markups' =
Symtab.fold (fn (name, Command {markup = SOME m, ...}) => cons (name, m) | _ => I)
commands' [];
- in make_syntax commands' markups' end;
-
-fun dest_commands (Syntax {commands, ...}) =
- commands |> Symtab.dest |> sort_wrt #1;
+ in make_syntax (commands', markups') end);
-fun lookup_commands (Syntax {commands, ...}) = Symtab.lookup commands;
-
-fun is_markup (Syntax {markups, ...}) kind name =
- AList.lookup (op =) markups name = SOME kind;
-
+val _ = Theory.setup (Theory.at_end (fn thy =>
+ let
+ val keywords = Thy_Header.get_keywords thy;
+ val major = Keyword.major_keywords keywords;
+ val _ =
+ (case subtract (op =) (map #1 (dest_commands thy)) (Scan.dest_lexicon major) of
+ [] => ()
+ | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing))
+ in NONE end));
-(** global outer syntax **)
-
-type command_spec = (string * Keyword.T) * Position.T;
+(* implicit theory setup *)
-val batch_mode = Unsynchronized.ref false;
-
-local
-
-(*synchronized wrt. Keywords*)
-val global_syntax = Unsynchronized.ref empty_syntax;
+type command_spec = string * Position.T;
-fun add_command (name, kind) cmd = CRITICAL (fn () =>
- let
- val context = ML_Context.the_generic_context ();
- val thy = Context.theory_of context;
- val Command {pos, ...} = cmd;
- val command_name = quote (Markup.markup Markup.keyword1 name);
- val _ =
- (case try (Thy_Header.the_keyword thy) name of
- SOME spec =>
- if Option.map #1 spec = SOME (Keyword.kind_files_of kind) then ()
- else
- error ("Inconsistent outer syntax keyword declaration " ^
- command_name ^ Position.here pos)
- | NONE =>
- error ("Undeclared outer syntax command " ^ command_name ^ Position.here pos));
- val _ = Context_Position.report_generic context pos (command_markup true (name, cmd));
- in
- Unsynchronized.change global_syntax (map_commands (fn commands =>
- (if not (Symtab.defined commands name) then ()
- else if ! batch_mode then
- error ("Attempt to redefine outer syntax command " ^ command_name)
- else
- warning ("Redefining outer syntax command " ^ command_name ^
- Position.here (Position.thread_data ()));
- Symtab.update (name, cmd) commands)))
- end);
+fun command (name, pos) comment parse =
+ Theory.setup (add_command name (new_command comment NONE parse pos));
-in
-
-fun get_syntax () = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax));
-
-fun check_syntax () =
- let
- val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax));
- val major = Keyword.major_keywords keywords;
- in
- (case subtract (op =) (map #1 (dest_commands syntax)) (Scan.dest_lexicon major) of
- [] => ()
- | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing))
- end;
-
-fun command (spec, pos) comment parse =
- add_command spec (new_command comment NONE parse pos);
-
-fun markup_command markup (spec, pos) comment parse =
- add_command spec (new_command comment (SOME markup) parse pos);
-
-end;
-
-
-(* local_theory commands *)
+fun markup_command markup (name, pos) comment parse =
+ Theory.setup (add_command name (new_command comment (SOME markup) parse pos));
fun local_theory_command trans command_spec comment parse =
command command_spec comment (Parse.opt_target -- parse >> (fn (loc, f) => trans loc f));
@@ -167,60 +186,10 @@
val local_theory_to_proof = local_theory_command Toplevel.local_theory_to_proof;
-(* inspect syntax *)
-
-fun help_syntax pats =
- dest_commands (#2 (get_syntax ()))
- |> filter (fn (name, _) => forall (fn pat => match_string pat name) pats)
- |> map pretty_command
- |> Pretty.writeln_chunks;
-
-fun print_syntax () =
- let
- val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), #2 (get_syntax ())));
- val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords);
- val commands = dest_commands syntax;
- in
- [Pretty.strs ("keywords:" :: map quote minor),
- Pretty.big_list "commands:" (map pretty_command commands)]
- |> Pretty.writeln_chunks
- end;
-
-
(** toplevel parsing **)
-(* parse commands *)
-
-local
-
-fun parse_command syntax =
- Parse.position Parse.command_ :|-- (fn (name, pos) =>
- let
- val tr0 = Toplevel.empty |> Toplevel.name name |> Toplevel.position pos;
- in
- (case lookup_commands syntax name of
- SOME (Command {parse, ...}) => Parse.!!! (Parse.tags |-- parse) >> (fn f => f tr0)
- | NONE =>
- Scan.succeed
- (tr0 |> Toplevel.imperative (fn () =>
- error ("Bad parser for outer syntax command " ^ quote name ^ Position.here pos))))
- end);
-
-val parse_cmt = Parse.$$$ "--" -- Parse.!!! Parse.document_source;
-
-in
-
-fun commands_source syntax =
- Token.source_proper #>
- Source.source Token.stopper (Scan.bulk (parse_cmt >> K NONE || Parse.not_eof >> SOME)) #>
- Source.map_filter I #>
- Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command syntax) xs));
-
-end;
-
-
-(* off-line scanning/parsing *)
+(* scan tokens *)
fun scan keywords pos =
Source.of_string #>
@@ -228,11 +197,46 @@
Token.source keywords pos #>
Source.exhaust;
-fun parse (keywords, syntax) pos str =
+
+(* parse commands *)
+
+local
+
+fun parse_command thy =
+ Parse.position Parse.command_ :|-- (fn (name, pos) =>
+ let
+ val tr0 = Toplevel.empty |> Toplevel.name name |> Toplevel.position pos;
+ in
+ (case lookup_commands thy name of
+ SOME (Command {parse, ...}) => Parse.!!! (Parse.tags |-- parse) >> (fn f => f tr0)
+ | NONE =>
+ Scan.succeed
+ (tr0 |> Toplevel.imperative (fn () =>
+ err_command "Bad parser for outer syntax command " name [pos])))
+ end);
+
+val parse_cmt = Parse.$$$ "--" -- Parse.!!! Parse.document_source;
+
+in
+
+fun commands_source thy =
+ Token.source_proper #>
+ Source.source Token.stopper (Scan.bulk (parse_cmt >> K NONE || Parse.not_eof >> SOME)) #>
+ Source.map_filter I #>
+ Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy) xs));
+
+end;
+
+fun parse thy pos str =
Source.of_string str
|> Symbol.source
- |> Token.source keywords pos
- |> commands_source syntax
+ |> Token.source (Thy_Header.get_keywords thy) pos
+ |> commands_source thy
+ |> Source.exhaust;
+
+fun parse_tokens thy toks =
+ Source.of_list toks
+ |> commands_source thy
|> Source.exhaust;
@@ -279,19 +283,14 @@
(* read commands *)
-fun command_reports syntax tok =
+fun command_reports thy tok =
if Token.is_command tok then
let val name = Token.content_of tok in
- (case lookup_commands syntax name of
+ (case lookup_commands thy name of
NONE => []
| SOME cmd => [((Token.pos_of tok, command_markup false (name, cmd)), "")])
end
else [];
-fun read_spans syntax toks =
- Source.of_list toks
- |> commands_source syntax
- |> Source.exhaust;
-
end;
--- a/src/Pure/ML/ml_antiquotations.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/ML/ml_antiquotations.ML Fri Nov 07 16:36:55 2014 +0100
@@ -234,29 +234,19 @@
(* outer syntax *)
fun with_keyword f =
- Args.theory -- Scan.lift (Parse.position Parse.string) >> (fn (thy, (name, pos)) =>
- (f ((name, Thy_Header.the_keyword thy name), pos)
- handle ERROR msg => error (msg ^ Position.here pos)));
+ Args.theory -- Scan.lift (Parse.position Parse.string)
+ >> (fn (thy, (name, pos)) => (f (name, pos, Thy_Header.the_keyword thy (name, pos))));
val _ = Theory.setup
(ML_Antiquotation.value @{binding keyword}
- (with_keyword
- (fn ((name, NONE), _) => "Parse.$$$ " ^ ML_Syntax.print_string name
- | ((name, SOME _), pos) =>
- error ("Expected minor keyword " ^ quote name ^ Position.here pos))) #>
+ (with_keyword (fn (name, pos, is_command) =>
+ if not is_command then "Parse.$$$ " ^ ML_Syntax.print_string name
+ else error ("Expected minor keyword " ^ quote name ^ Position.here pos))) #>
ML_Antiquotation.value @{binding command_spec}
- (with_keyword
- (fn ((name, SOME kind), pos) =>
- "Keyword.command_spec " ^ ML_Syntax.atomic
- ((ML_Syntax.print_pair
- (ML_Syntax.print_pair ML_Syntax.print_string
- (ML_Syntax.print_pair
- (ML_Syntax.print_pair ML_Syntax.print_string
- (ML_Syntax.print_list ML_Syntax.print_string))
- (ML_Syntax.print_list ML_Syntax.print_string)))
- ML_Syntax.print_position) ((name, kind), pos))
- | ((name, NONE), pos) =>
- error ("Expected command keyword " ^ quote name ^ Position.here pos))));
+ (with_keyword (fn (name, pos, is_command) =>
+ if is_command then
+ ML_Syntax.print_pair ML_Syntax.print_string ML_Syntax.print_position (name, pos)
+ else error ("Expected command keyword " ^ quote name ^ Position.here pos))));
end;
--- a/src/Pure/ML/ml_context.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/ML/ml_context.ML Fri Nov 07 16:36:55 2014 +0100
@@ -116,12 +116,12 @@
then ((begin_env0, map (fn Antiquote.Text tok => tok) ants), opt_ctxt)
else
let
- val keywords = Keyword.get_keywords ();
fun no_decl _ = ([], []);
fun expand (Antiquote.Text tok) ctxt = (K ([], [tok]), ctxt)
| expand (Antiquote.Antiq (ss, {range, ...})) ctxt =
let
+ val keywords = Thy_Header.get_keywords' ctxt;
val (decl, ctxt') =
apply_antiquotation (Token.read_antiq keywords antiq (ss, #1 range)) ctxt;
val decl' = decl #> pairself (ML_Lex.tokenize #> map (ML_Lex.set_range range));
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/ML/ml_file.ML Fri Nov 07 16:36:55 2014 +0100
@@ -0,0 +1,25 @@
+(* Title: Pure/ML/ml_file.ML
+ Author: Makarius
+
+The 'ML_file' command.
+*)
+
+structure ML_File: sig end =
+struct
+
+val _ =
+ Outer_Syntax.command ("ML_file", @{here}) "ML text from file"
+ (Resources.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
+ let
+ val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy);
+ val provide = Resources.provide (src_path, digest);
+ val source = {delimited = true, text = cat_lines lines, pos = pos};
+ val flags = {SML = false, exchange = false, redirect = true, verbose = true};
+ in
+ gthy
+ |> ML_Context.exec (fn () => ML_Context.eval_source flags source)
+ |> Local_Theory.propagate_ml_env
+ |> Context.mapping provide (Local_Theory.background_theory provide)
+ end)));
+
+end;
--- a/src/Pure/PIDE/command.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/PIDE/command.ML Fri Nov 07 16:36:55 2014 +0100
@@ -8,15 +8,15 @@
sig
type blob = (string * (SHA1.digest * string list) option) Exn.result
val read_file: Path.T -> Position.T -> Path.T -> Token.file
- val read: Keyword.keywords -> Path.T-> (unit -> theory) -> blob list -> Token.T list ->
+ val read_thy: Toplevel.state -> theory
+ val read: theory -> Path.T-> (unit -> theory) -> blob list -> Token.T list ->
Toplevel.transition
type eval
val eval_eq: eval * eval -> bool
val eval_running: eval -> bool
val eval_finished: eval -> bool
val eval_result_state: eval -> Toplevel.state
- val eval: Keyword.keywords -> Path.T -> (unit -> theory) -> blob list -> Token.T list ->
- eval -> eval
+ val eval: Path.T -> (unit -> theory) -> blob list -> Token.T list -> eval -> eval
type print
val print: bool -> (string * string list) list -> Keyword.keywords -> string ->
eval -> print list -> print list option
@@ -145,12 +145,16 @@
|> Command_Span.content
| _ => toks);
+val bootstrap_thy = ML_Context.the_global_context ();
+
in
-fun read keywords master_dir init blobs span =
+fun read_thy st = Toplevel.theory_of st handle Toplevel.UNDEF => bootstrap_thy;
+
+fun read thy master_dir init blobs span =
let
- val syntax = #2 (Outer_Syntax.get_syntax ());
- val command_reports = Outer_Syntax.command_reports syntax;
+ val keywords = Thy_Header.get_keywords thy;
+ val command_reports = Outer_Syntax.command_reports thy;
val proper_range = Token.range_of (#1 (take_suffix Token.is_improper span));
val pos =
@@ -163,7 +167,7 @@
in
if is_malformed then Toplevel.malformed pos "Malformed command syntax"
else
- (case Outer_Syntax.read_spans syntax (resolve_files keywords master_dir blobs span) of
+ (case Outer_Syntax.parse_tokens thy (resolve_files keywords master_dir blobs span) of
[tr] => Toplevel.modify_init init tr
| [] => Toplevel.ignored (#1 (Token.range_of span))
| _ => Toplevel.malformed (#1 proper_range) "Exactly one command expected")
@@ -264,15 +268,17 @@
in
-fun eval keywords master_dir init blobs span eval0 =
+fun eval master_dir init blobs span eval0 =
let
val exec_id = Document_ID.make ();
fun process () =
let
+ val eval_state0 = eval_result eval0;
+ val thy = read_thy (#state eval_state0);
val tr =
Position.setmp_thread_data (Position.id_only (Document_ID.print exec_id))
- (fn () => read keywords master_dir init blobs span |> Toplevel.exec_id exec_id) ();
- in eval_state keywords span tr (eval_result eval0) end;
+ (fn () => read thy master_dir init blobs span |> Toplevel.exec_id exec_id) ();
+ in eval_state (Thy_Header.get_keywords thy) span tr eval_state0 end;
in Eval {exec_id = exec_id, eval_process = memo exec_id process} end;
end;
--- a/src/Pure/PIDE/document.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/PIDE/document.ML Fri Nov 07 16:36:55 2014 +0100
@@ -8,6 +8,7 @@
signature DOCUMENT =
sig
val timing: bool Unsynchronized.ref
+ val init_keywords: unit -> unit
type node_header = string * Thy_Header.header * string list
type overlay = Document_ID.command * (string * string list)
datatype node_edit =
@@ -37,6 +38,20 @@
+(** global keywords **)
+
+val global_keywords = Synchronized.var "global_keywords" Keyword.empty_keywords;
+
+fun init_keywords () =
+ Synchronized.change global_keywords
+ (fn _ =>
+ fold (curry Keyword.merge_keywords o Thy_Header.get_keywords o Thy_Info.get_theory)
+ (Thy_Info.get_names ()) Keyword.empty_keywords);
+
+fun get_keywords () = Synchronized.value global_keywords;
+
+
+
(** document structure **)
fun err_dup kind id = error ("Duplicate " ^ kind ^ ": " ^ Document_ID.print id);
@@ -208,7 +223,7 @@
let
val imports = map fst (#imports header);
val errors1 =
- (Thy_Header.define_keywords (#keywords header); errors)
+ (Synchronized.change global_keywords (Keyword.add_keywords (#keywords header)); errors)
handle ERROR msg => errors @ [msg];
val nodes1 = nodes
|> default_node name
@@ -318,7 +333,7 @@
val span =
Lazy.lazy (fn () =>
Position.setmp_thread_data (Position.id_only id)
- (fn () => Outer_Syntax.scan (Keyword.get_keywords ()) (Position.id id) text) ());
+ (fn () => Outer_Syntax.scan (get_keywords ()) (Position.id id) text) ());
val _ =
Position.setmp_thread_data (Position.id_only id)
(fn () => Output.status (Markup.markup_only Markup.accepted)) ();
@@ -529,7 +544,7 @@
val span = Lazy.force span0;
val eval' =
- Command.eval keywords (master_directory node)
+ Command.eval (master_directory node)
(fn () => the_default illegal_init init span) blobs span eval;
val prints' =
perhaps (Command.print command_visible command_overlays keywords command_name eval') [];
@@ -562,7 +577,7 @@
deps = map (Future.task_of o #2) deps, pri = 1, interrupts = false}
(fn () => timeit ("Document.update " ^ name) (fn () =>
let
- val keywords = Keyword.get_keywords ();
+ val keywords = get_keywords ();
val imports = map (apsnd Future.join) deps;
val imports_result_changed = exists (#4 o #1 o #2) imports;
val node_required = Symtab.defined required name;
--- a/src/Pure/PIDE/protocol.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/PIDE/protocol.ML Fri Nov 07 16:36:55 2014 +0100
@@ -23,6 +23,10 @@
end);
val _ =
+ Isabelle_Process.protocol_command "Document.init_keywords"
+ (fn [] => Document.init_keywords ());
+
+val _ =
Isabelle_Process.protocol_command "Document.define_blob"
(fn [digest, content] => Document.change_state (Document.define_blob digest content));
--- a/src/Pure/PIDE/resources.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/PIDE/resources.ML Fri Nov 07 16:36:55 2014 +0100
@@ -82,7 +82,7 @@
(case Token.get_files tok of
[] =>
let
- val keywords = Keyword.get_keywords ();
+ val keywords = Thy_Header.get_keywords thy;
val master_dir = master_directory thy;
val pos = Token.pos_of tok;
val src_paths = Keyword.command_files keywords cmd (Path.explode name);
@@ -122,18 +122,18 @@
fun begin_theory master_dir {name, imports, keywords} parents =
Theory.begin_theory name parents
|> put_deps master_dir imports
- |> fold Thy_Header.declare_keyword keywords;
+ |> Thy_Header.add_keywords keywords;
fun excursion keywords master_dir last_timing init elements =
let
- fun prepare_span span =
+ fun prepare_span st span =
Command_Span.content span
- |> Command.read keywords master_dir init []
+ |> Command.read (Command.read_thy st) master_dir init []
|> (fn tr => Toplevel.put_timing (last_timing tr) tr);
fun element_result span_elem (st, _) =
let
- val elem = Thy_Syntax.map_element prepare_span span_elem;
+ val elem = Thy_Syntax.map_element (prepare_span st) span_elem;
val (results, st') = Toplevel.element_result keywords elem st;
val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem);
in (results, (st', pos')) end;
@@ -147,8 +147,9 @@
fun load_thy document last_timing update_time master_dir header text_pos text parents =
let
val (name, _) = #name header;
- val _ = Thy_Header.define_keywords (#keywords header);
- val keywords = Keyword.get_keywords ();
+ val keywords =
+ fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents
+ (Keyword.add_keywords (#keywords header) Keyword.empty_keywords);
val toks = Outer_Syntax.scan keywords text_pos text;
val spans = Outer_Syntax.parse_spans toks;
@@ -166,14 +167,11 @@
fun present () =
let
val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results);
- val (keywords, syntax) = Outer_Syntax.get_syntax ();
in
if exists (Toplevel.is_skipped_proof o #2) res then
warning ("Cannot present theory with skipped proofs: " ^ quote name)
else
- let val tex_source =
- Thy_Output.present_thy keywords (Outer_Syntax.is_markup syntax) res toks
- |> Buffer.content;
+ let val tex_source = Thy_Output.present_thy thy res toks |> Buffer.content;
in if document then Present.theory_output name tex_source else () end
end;
--- a/src/Pure/PIDE/session.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/PIDE/session.ML Fri Nov 07 16:36:55 2014 +0100
@@ -54,7 +54,6 @@
(Execution.shutdown ();
Thy_Info.finish ();
Present.finish ();
- Outer_Syntax.check_syntax ();
Future.shutdown ();
Event_Timer.shutdown ();
Future.shutdown ();
--- a/src/Pure/PIDE/session.scala Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/PIDE/session.scala Fri Nov 07 16:36:55 2014 +0100
@@ -610,6 +610,12 @@
def update_options(options: Options)
{ manager.send_wait(Update_Options(options)) }
+ def init_options(options: Options)
+ {
+ update_options(options)
+ protocol_command("Document.init_keywords")
+ }
+
def dialog_result(id: Document_ID.Generic, serial: Long, result: String)
{ manager.send(Session.Dialog_Result(id, serial, result)) }
}
--- a/src/Pure/Pure.thy Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Pure.thy Fri Nov 07 16:36:55 2014 +0100
@@ -6,18 +6,12 @@
theory Pure
keywords
- "!!" "!" "%" "(" ")" "+" "," "--" ":" "::" ";" "<" "<=" "=" "=="
- "=>" "?" "[" "\<equiv>" "\<leftharpoondown>" "\<rightharpoonup>"
- "\<rightleftharpoons>" "\<subseteq>" "]" "and" "assumes"
- "attach" "begin" "binder" "constrains" "defines" "fixes" "for"
- "identifier" "if" "imports" "in" "includes" "infix" "infixl"
- "infixr" "is" "keywords" "notes" "obtains" "open" "output"
+ "!!" "!" "+" "--" ":" ";" "<" "<=" "=" "=>" "?" "[" "\<equiv>"
+ "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
+ "\<subseteq>" "]" "assumes" "attach" "binder" "constrains"
+ "defines" "fixes" "for" "identifier" "if" "in" "includes" "infix"
+ "infixl" "infixr" "is" "notes" "obtains" "open" "output"
"overloaded" "pervasive" "shows" "structure" "unchecked" "where" "|"
- and "header" :: heading
- and "chapter" :: heading
- and "section" :: heading
- and "subsection" :: heading
- and "subsubsection" :: heading
and "text" "text_raw" :: thy_decl
and "txt" "txt_raw" :: prf_decl % "proof"
and "default_sort" :: thy_decl == ""
--- a/src/Pure/ROOT Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/ROOT Fri Nov 07 16:36:55 2014 +0100
@@ -154,6 +154,7 @@
"ML/ml_compiler_polyml.ML"
"ML/ml_context.ML"
"ML/ml_env.ML"
+ "ML/ml_file.ML"
"ML/ml_lex.ML"
"ML/ml_parse.ML"
"ML/ml_options.ML"
--- a/src/Pure/ROOT.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/ROOT.ML Fri Nov 07 16:36:55 2014 +0100
@@ -302,10 +302,11 @@
(*theory documents*)
use "System/isabelle_system.ML";
use "Thy/term_style.ML";
+use "Isar/outer_syntax.ML";
use "Thy/thy_output.ML";
-use "Isar/outer_syntax.ML";
use "General/graph_display.ML";
use "Thy/present.ML";
+use "pure_syn.ML";
use "PIDE/command.ML";
use "PIDE/query_operation.ML";
use "PIDE/resources.ML";
@@ -361,7 +362,7 @@
(* the Pure theory *)
-use "pure_syn.ML";
+use "ML/ml_file.ML";
Runtime.toplevel_program (fn () => Thy_Info.use_thy ("Pure", Position.none));
Context.set_thread_data NONE;
structure Pure = struct val thy = Thy_Info.get_theory "Pure" end;
--- a/src/Pure/Thy/thy_header.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Thy/thy_header.ML Fri Nov 07 16:36:55 2014 +0100
@@ -12,9 +12,11 @@
imports: (string * Position.T) list,
keywords: keywords}
val make: string * Position.T -> (string * Position.T) list -> keywords -> header
- val define_keywords: keywords -> unit
- val declare_keyword: string * Keyword.spec option -> theory -> theory
- val the_keyword: theory -> string -> Keyword.spec option
+ val bootstrap_keywords: Keyword.keywords
+ val add_keywords: keywords -> theory -> theory
+ val get_keywords: theory -> Keyword.keywords
+ val get_keywords': Proof.context -> Keyword.keywords
+ val the_keyword: theory -> string * Position.T -> bool
val args: header parser
val read: Position.T -> string -> header
val read_tokens: Token.T list -> header
@@ -23,6 +25,10 @@
structure Thy_Header: THY_HEADER =
struct
+(** keyword declarations **)
+
+(* header *)
+
type keywords = (string * Keyword.spec option) list;
type header =
@@ -34,37 +40,7 @@
{name = name, imports = imports, keywords = keywords};
-
-(** keyword declarations **)
-
-fun define_keywords (keywords: keywords) =
- List.app (Keyword.define o apsnd (Option.map Keyword.check_spec)) keywords;
-
-fun err_dup name = error ("Duplicate declaration of outer syntax keyword " ^ quote name);
-
-structure Data = Theory_Data
-(
- type T = Keyword.spec option Symtab.table;
- val empty = Symtab.empty;
- val extend = I;
- fun merge data : T = Symtab.merge (op =) data handle Symtab.DUP name => err_dup name;
-);
-
-fun declare_keyword (name, spec) =
- Data.map (fn data =>
- (Option.map Keyword.check_spec spec;
- Symtab.update_new (name, spec) data handle Symtab.DUP dup => err_dup dup));
-
-fun the_keyword thy name =
- (case Symtab.lookup (Data.get thy) name of
- SOME spec => spec
- | NONE => error ("Undeclared outer syntax keyword " ^ quote name));
-
-
-
-(** concrete syntax **)
-
-(* header keywords *)
+(* bootstrap keywords *)
val headerN = "header";
val chapterN = "chapter";
@@ -77,18 +53,53 @@
val keywordsN = "keywords";
val beginN = "begin";
-val header_keywords =
+val bootstrap_keywords =
Keyword.empty_keywords
- |> fold (Keyword.add_keywords o rpair NONE)
- ["%", "(", ")", ",", "::", "==", "and", beginN, importsN, keywordsN]
- |> fold Keyword.add_keywords
- [(headerN, SOME Keyword.heading),
- (chapterN, SOME Keyword.heading),
- (sectionN, SOME Keyword.heading),
- (subsectionN, SOME Keyword.heading),
- (subsubsectionN, SOME Keyword.heading),
- (theoryN, SOME Keyword.thy_begin)];
+ |> Keyword.add_keywords
+ [("%", NONE),
+ ("(", NONE),
+ (")", NONE),
+ (",", NONE),
+ ("::", NONE),
+ ("==", NONE),
+ ("and", NONE),
+ (beginN, NONE),
+ (importsN, NONE),
+ (keywordsN, NONE),
+ (headerN, SOME ((Keyword.heading, []), [])),
+ (chapterN, SOME ((Keyword.heading, []), [])),
+ (sectionN, SOME ((Keyword.heading, []), [])),
+ (subsectionN, SOME ((Keyword.heading, []), [])),
+ (subsubsectionN, SOME ((Keyword.heading, []), [])),
+ (theoryN, SOME ((Keyword.thy_begin, []), ["theory"])),
+ ("ML_file", SOME ((Keyword.thy_load, []), ["ML"]))];
+
+
+(* theory data *)
+structure Data = Theory_Data
+(
+ type T = Keyword.keywords;
+ val empty = bootstrap_keywords;
+ val extend = I;
+ val merge = Keyword.merge_keywords;
+);
+
+val add_keywords = Data.map o Keyword.add_keywords;
+
+val get_keywords = Data.get;
+val get_keywords' = get_keywords o Proof_Context.theory_of;
+
+fun the_keyword thy (name, pos) =
+ let val keywords = get_keywords thy in
+ if Keyword.is_literal keywords name
+ then Keyword.is_command keywords name
+ else error ("Undeclared outer syntax keyword " ^ quote name ^ Position.here pos)
+ end;
+
+
+
+(** concrete syntax **)
(* header args *)
@@ -145,7 +156,7 @@
str
|> Source.of_string_limited 8000
|> Symbol.source
- |> Token.source_strict header_keywords pos;
+ |> Token.source_strict bootstrap_keywords pos;
fun read_source pos source =
let val res =
--- a/src/Pure/Thy/thy_header.scala Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Thy/thy_header.scala Fri Nov 07 16:36:55 2014 +0100
@@ -15,6 +15,10 @@
object Thy_Header extends Parse.Parser
{
+ /* bootstrap keywords */
+
+ type Keywords = List[(String, Option[Keyword.Spec], Option[String])]
+
val HEADER = "header"
val CHAPTER = "chapter"
val SECTION = "section"
@@ -27,15 +31,31 @@
val AND = "and"
val BEGIN = "begin"
- private val header_keywords =
- Keyword.Keywords.empty +
- "%" + "(" + ")" + "," + "::" + "==" + AND + BEGIN + IMPORTS + KEYWORDS +
- (HEADER, Keyword.HEADING) +
- (CHAPTER, Keyword.HEADING) +
- (SECTION, Keyword.HEADING) +
- (SUBSECTION, Keyword.HEADING) +
- (SUBSUBSECTION, Keyword.HEADING) +
- (THEORY, Keyword.THY_BEGIN)
+ private val bootstrap_header: Keywords =
+ List(
+ ("%", None, None),
+ ("(", None, None),
+ (")", None, None),
+ (",", None, None),
+ ("::", None, None),
+ ("==", None, None),
+ (AND, None, None),
+ (BEGIN, None, None),
+ (IMPORTS, None, None),
+ (KEYWORDS, None, None),
+ (HEADER, Some(((Keyword.HEADING, Nil), Nil)), None),
+ (CHAPTER, Some(((Keyword.HEADING, Nil), Nil)), None),
+ (SECTION, Some(((Keyword.HEADING, Nil), Nil)), None),
+ (SUBSECTION, Some(((Keyword.HEADING, Nil), Nil)), None),
+ (SUBSUBSECTION, Some(((Keyword.HEADING, Nil), Nil)), None),
+ (THEORY, Some((Keyword.THY_BEGIN, Nil), List("theory")), None),
+ ("ML_file", Some((Keyword.THY_LOAD, Nil), List("ML")), None))
+
+ private val bootstrap_keywords =
+ Keyword.Keywords.empty.add_keywords(bootstrap_header)
+
+ def bootstrap_syntax(): Outer_Syntax =
+ Outer_Syntax.init().add_keywords(bootstrap_header)
/* theory file name */
@@ -101,7 +121,7 @@
def read(reader: Reader[Char]): Thy_Header =
{
- val token = Token.Parsers.token(header_keywords)
+ val token = Token.Parsers.token(bootstrap_keywords)
val toks = new mutable.ListBuffer[Token]
@tailrec def scan_to_begin(in: Reader[Char])
@@ -123,11 +143,6 @@
def read(source: CharSequence): Thy_Header =
read(new CharSequenceReader(source))
-
-
- /* keywords */
-
- type Keywords = List[(String, Option[Keyword.Spec], Option[String])]
}
--- a/src/Pure/Thy/thy_info.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Thy/thy_info.ML Fri Nov 07 16:36:55 2014 +0100
@@ -373,7 +373,7 @@
fun script_thy pos txt thy =
let
val trs =
- Outer_Syntax.parse (Outer_Syntax.get_syntax ()) pos txt
+ Outer_Syntax.parse thy pos txt
|> map (Toplevel.modify_init (K thy));
val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs);
val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel;
--- a/src/Pure/Thy/thy_output.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Thy/thy_output.ML Fri Nov 07 16:36:55 2014 +0100
@@ -22,11 +22,9 @@
theory -> theory
val boolean: string -> bool
val integer: string -> int
- datatype markup = Markup | MarkupEnv | Verbatim
- val eval_antiq: Keyword.keywords -> Toplevel.state -> Antiquote.antiq -> string
+ val eval_antiq: Toplevel.state -> Antiquote.antiq -> string
val check_text: Symbol_Pos.source -> Toplevel.state -> unit
- val present_thy: Keyword.keywords -> (markup -> string -> bool) ->
- (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
+ val present_thy: theory -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T
val pretty_text: Proof.context -> string -> Pretty.T
val pretty_term: Proof.context -> term -> Pretty.T
val pretty_thm: Proof.context -> thm -> Pretty.T
@@ -35,6 +33,12 @@
Token.src -> 'a list -> Pretty.T list
val output: Proof.context -> Pretty.T list -> string
val verbatim_text: Proof.context -> string -> string
+ val local_theory_markup: (xstring * Position.T) option * Symbol_Pos.source ->
+ Toplevel.transition -> Toplevel.transition
+ val proof_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
+ val header_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition
+ val heading_markup: (xstring * Position.T) option * Symbol_Pos.source ->
+ Toplevel.transition -> Toplevel.transition
end;
structure Thy_Output: THY_OUTPUT =
@@ -158,10 +162,18 @@
(* eval_antiq *)
-fun eval_antiq keywords state ((ss, {range = (pos, _), ...}): Antiquote.antiq) =
+fun eval_antiq state ((ss, {range = (pos, _), ...}): Antiquote.antiq) =
let
+ val keywords =
+ (case try Toplevel.presentation_context_of state of
+ SOME ctxt => Thy_Header.get_keywords' ctxt
+ | NONE =>
+ error ("Unknown context -- cannot expand document antiquotations" ^
+ Position.here pos));
+
val (opts, src) = Token.read_antiq keywords antiq (ss, pos);
fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) ();
+
val preview_ctxt = fold option opts (Toplevel.presentation_context_of state);
val print_ctxt = Context_Position.set_visible false preview_ctxt;
val _ = cmd preview_ctxt;
@@ -171,26 +183,22 @@
(* check_text *)
-fun eval_antiquote keywords state (txt, pos) =
+fun eval_antiquote state (txt, pos) =
let
fun words (Antiquote.Text ss) = [(#1 (Symbol_Pos.range ss), Markup.words)]
| words (Antiquote.Antiq _) = [];
fun expand (Antiquote.Text ss) = Symbol_Pos.content ss
- | expand (Antiquote.Antiq antiq) = eval_antiq keywords state antiq;
+ | expand (Antiquote.Antiq antiq) = eval_antiq state antiq;
val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos);
val _ = Position.reports (maps words ants);
- in
- if Toplevel.is_toplevel state andalso not (forall Antiquote.is_text ants) then
- error ("Unknown context -- cannot expand document antiquotations" ^ Position.here pos)
- else implode (map expand ants)
- end;
+ in implode (map expand ants) end;
fun check_text {delimited, text, pos} state =
(Position.report pos (Markup.language_document delimited);
if Toplevel.is_skipped_proof state then ()
- else ignore (eval_antiquote (Keyword.get_keywords ()) state (text, pos)));
+ else ignore (eval_antiquote state (text, pos)));
@@ -207,8 +215,8 @@
| MarkupEnvToken of string * (string * Position.T)
| VerbatimToken of string * Position.T;
-fun output_token keywords state =
- let val eval = eval_antiquote keywords state in
+fun output_token state =
+ let val eval = eval_antiquote state in
fn NoToken => ""
| BasicToken tok => Latex.output_basic tok
| MarkupToken (cmd, txt) => Latex.output_markup cmd (eval txt)
@@ -268,7 +276,7 @@
fun present_span keywords span state state' (tag_stack, active_tag, newline, buffer, present_cont) =
let
val present = fold (fn (tok, (flag, 0)) =>
- Buffer.add (output_token keywords state' tok)
+ Buffer.add (output_token state' tok)
#> Buffer.add flag
| _ => I);
@@ -321,8 +329,6 @@
(* present_thy *)
-datatype markup = Markup | MarkupEnv | Verbatim;
-
local
val space_proper =
@@ -350,8 +356,12 @@
in
-fun present_thy keywords is_markup command_results toks =
+fun present_thy thy command_results toks =
let
+ val keywords = Thy_Header.get_keywords thy;
+ val is_markup = Outer_Syntax.is_markup thy;
+
+
(* tokens *)
val ignored = Scan.state --| ignore
@@ -382,9 +392,9 @@
val token =
ignored ||
- markup Markup MarkupToken Latex.markup_true ||
- markup MarkupEnv MarkupEnvToken Latex.markup_true ||
- markup Verbatim (VerbatimToken o #2) "" ||
+ markup Outer_Syntax.Markup MarkupToken Latex.markup_true ||
+ markup Outer_Syntax.Markup_Env MarkupEnvToken Latex.markup_true ||
+ markup Outer_Syntax.Verbatim (VerbatimToken o #2) "" ||
command || cmt || other;
@@ -690,4 +700,29 @@
(Context_Position.reports ctxt [(pos, Markup.language_path), (pos, Markup.url name)];
enclose "\\url{" "}" name)));
+
+
+(** markup commands **)
+
+fun local_theory_markup (loc, txt) = Toplevel.present_local_theory loc (check_text txt);
+val proof_markup = Toplevel.present_proof o check_text;
+
+fun reject_target NONE = ()
+ | reject_target (SOME (_, pos)) =
+ error ("Illegal target specification -- not a theory context" ^ Position.here pos);
+
+fun header_markup txt =
+ Toplevel.keep (fn state =>
+ if Toplevel.is_toplevel state then
+ (legacy_feature "Obsolete 'header' command -- use 'chapter', 'section' etc. instead";
+ check_text txt state)
+ else raise Toplevel.UNDEF);
+
+fun heading_markup (loc, txt) =
+ Toplevel.keep (fn state =>
+ if Toplevel.is_toplevel state then (reject_target loc; check_text txt state)
+ else raise Toplevel.UNDEF) o
+ local_theory_markup (loc, txt) o
+ Toplevel.present_proof (fn state => (reject_target loc; check_text txt state));
+
end;
--- a/src/Pure/Tools/build.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Tools/build.ML Fri Nov 07 16:36:55 2014 +0100
@@ -157,7 +157,6 @@
theories |>
(List.app (use_theories_condition last_timing)
|> session_timing name verbose
- |> Unsynchronized.setmp Outer_Syntax.batch_mode true
|> Unsynchronized.setmp Output.protocol_message_fn protocol_message
|> Multithreading.max_threads_setmp (Options.int options "threads")
|> Exn.capture);
--- a/src/Pure/Tools/build.scala Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Tools/build.scala Fri Nov 07 16:36:55 2014 +0100
@@ -437,7 +437,8 @@
val (loaded_theories0, known_theories0, syntax0) =
info.parent.map(deps(_)) match {
case None =>
- (Set.empty[String], Map.empty[String, Document.Node.Name], Pure_Syn.init())
+ (Set.empty[String], Map.empty[String, Document.Node.Name],
+ Thy_Header.bootstrap_syntax())
case Some(parent) =>
(parent.loaded_theories, parent.known_theories, parent.syntax)
}
--- a/src/Pure/Tools/find_consts.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Tools/find_consts.ML Fri Nov 07 16:36:55 2014 +0100
@@ -140,7 +140,7 @@
val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion);
-val query_keywords = Keyword.add_keywords (":", NONE) Keyword.empty_keywords;
+val query_keywords = Keyword.add_keywords [(":", NONE)] Keyword.empty_keywords;
in
--- a/src/Pure/Tools/find_theorems.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Tools/find_theorems.ML Fri Nov 07 16:36:55 2014 +0100
@@ -526,7 +526,7 @@
val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion);
-val query_keywords = Keyword.add_keywords (":", NONE) Keyword.empty_keywords;
+val query_keywords = Keyword.add_keywords [(":", NONE)] Keyword.empty_keywords;
in
--- a/src/Pure/Tools/rail.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/Tools/rail.ML Fri Nov 07 16:36:55 2014 +0100
@@ -316,7 +316,7 @@
fun output_rules state rules =
let
- val output_antiq = Thy_Output.eval_antiq (Keyword.get_keywords ()) state;
+ val output_antiq = Thy_Output.eval_antiq state;
fun output_text b s =
Output.output s
|> b ? enclose "\\isakeyword{" "}"
--- a/src/Pure/build-jars Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/build-jars Fri Nov 07 16:36:55 2014 +0100
@@ -99,7 +99,6 @@
Tools/update_header.scala
Tools/update_semicolons.scala
library.scala
- pure_syn.scala
term.scala
term_xml.scala
"../Tools/Graphview/src/graph_panel.scala"
--- a/src/Pure/pure_syn.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Pure/pure_syn.ML Fri Nov 07 16:36:55 2014 +0100
@@ -1,50 +1,36 @@
(* Title: Pure/pure_syn.ML
Author: Makarius
-Minimal outer syntax for bootstrapping Isabelle/Pure.
+Outer syntax for bootstrapping Isabelle/Pure.
*)
structure Pure_Syn: sig end =
struct
-(* keywords *)
-
-val keywords: Thy_Header.keywords =
- [("theory", SOME (("thy_begin", []), ["theory"])),
- ("ML_file", SOME (("thy_load", []), ["ML"]))];
+val _ =
+ Outer_Syntax.markup_command Outer_Syntax.Markup ("header", @{here}) "theory header"
+ (Parse.document_source >> Thy_Output.header_markup);
-fun command_spec (name, pos) =
- (case AList.lookup (op =) keywords name of
- SOME (SOME spec) => Keyword.command_spec ((name, spec), pos)
- | _ => error ("Bad command specification " ^ quote name ^ Position.here pos));
-
-val _ = Thy_Header.define_keywords keywords;
-val _ = Theory.setup (fold Thy_Header.declare_keyword keywords);
-
-
-(* commands *)
+val _ =
+ Outer_Syntax.markup_command Outer_Syntax.Markup ("chapter", @{here}) "chapter heading"
+ (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup);
val _ =
- Outer_Syntax.command
- (command_spec ("theory", @{here})) "begin theory"
+ Outer_Syntax.markup_command Outer_Syntax.Markup ("section", @{here}) "section heading"
+ (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup);
+
+val _ =
+ Outer_Syntax.markup_command Outer_Syntax.Markup ("subsection", @{here}) "subsection heading"
+ (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup);
+
+val _ =
+ Outer_Syntax.markup_command Outer_Syntax.Markup ("subsubsection", @{here}) "subsubsection heading"
+ (Parse.opt_target -- Parse.document_source >> Thy_Output.heading_markup);
+
+val _ =
+ Outer_Syntax.command ("theory", @{here}) "begin theory"
(Thy_Header.args >>
(fn _ => Toplevel.init_theory (fn () => error "Missing theory initialization")));
-val _ =
- Outer_Syntax.command
- (command_spec ("ML_file", @{here})) "ML text from file"
- (Resources.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
- let
- val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy);
- val provide = Resources.provide (src_path, digest);
- val source = {delimited = true, text = cat_lines lines, pos = pos};
- val flags = {SML = false, exchange = false, redirect = true, verbose = true};
- in
- gthy
- |> ML_Context.exec (fn () => ML_Context.eval_source flags source)
- |> Local_Theory.propagate_ml_env
- |> Context.mapping provide (Local_Theory.background_theory provide)
- end)));
-
end;
--- a/src/Pure/pure_syn.scala Fri Nov 07 16:22:25 2014 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-/* Title: Pure/pure_syn.scala
- Author: Makarius
-
-Minimal outer syntax for bootstrapping Isabelle/Pure.
-*/
-
-package isabelle
-
-
-object Pure_Syn
-{
- private val keywords: Thy_Header.Keywords =
- List(
- ("theory", Some((("thy_begin", Nil), List("theory"))), None),
- ("ML_file", Some((("thy_load", Nil), List("ML"))), None))
-
- def init(): Outer_Syntax = Outer_Syntax.init().add_keywords(keywords)
-}
-
--- a/src/Tools/Code/code_target.ML Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Tools/Code/code_target.ML Fri Nov 07 16:36:55 2014 +0100
@@ -693,9 +693,10 @@
fun codegen_tool thyname cmd_expr =
let
- val ctxt = Proof_Context.init_global (Thy_Info.get_theory thyname);
+ val thy = Thy_Info.get_theory thyname;
+ val ctxt = Proof_Context.init_global thy;
val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o
- (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_keywords ()) Position.none);
+ (filter Token.is_proper o Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none);
in case parse cmd_expr
of SOME f => (writeln "Now generating code..."; f ctxt)
| NONE => error ("Bad directive " ^ quote cmd_expr)
--- a/src/Tools/jEdit/src/plugin.scala Fri Nov 07 16:22:25 2014 +0100
+++ b/src/Tools/jEdit/src/plugin.scala Fri Nov 07 16:36:55 2014 +0100
@@ -261,7 +261,7 @@
}
case Session.Ready =>
- PIDE.session.update_options(PIDE.options.value)
+ PIDE.session.init_options(PIDE.options.value)
PIDE.init_models()
if (!Isabelle.continuous_checking) {