--- a/Admin/update-keywords Thu Mar 15 17:38:05 2012 +0000
+++ b/Admin/update-keywords Thu Mar 15 22:21:28 2012 +0100
@@ -13,7 +13,7 @@
isabelle keywords \
"$LOG/Pure.gz" "$LOG/HOL.gz" "$LOG/HOLCF.gz" "$LOG/HOL-Boogie.gz" \
"$LOG/HOL-Library.gz" "$LOG/HOL-Nominal.gz" "$LOG/HOL-Statespace.gz" \
- "$LOG/HOL-SPARK.gz"
+ "$LOG/HOL-SPARK.gz" "$LOG/HOL-TPTP.gz"
isabelle keywords -k ZF \
"$LOG/Pure.gz" "$LOG/FOL.gz" "$LOG/ZF.gz"
--- a/NEWS Thu Mar 15 17:38:05 2012 +0000
+++ b/NEWS Thu Mar 15 22:21:28 2012 +0100
@@ -371,6 +371,9 @@
*** ML ***
+* Antiquotation @{keyword "name"} produces a parser for outer syntax
+from a minor keyword introduced via theory header declaration.
+
* Local_Theory.define no longer hard-wires default theorem name
"foo_def": definitional packages need to make this explicit, and may
choose to omit theorem bindings for definitions by using
--- a/etc/isar-keywords-ZF.el Thu Mar 15 17:38:05 2012 +0000
+++ b/etc/isar-keywords-ZF.el Thu Mar 15 22:21:28 2012 +0100
@@ -229,6 +229,7 @@
"infixr"
"intros"
"is"
+ "keywords"
"monos"
"notes"
"obtains"
--- a/etc/isar-keywords.el Thu Mar 15 17:38:05 2012 +0000
+++ b/etc/isar-keywords.el Thu Mar 15 22:21:28 2012 +0100
@@ -1,6 +1,6 @@
;;
;; Keyword classification tables for Isabelle/Isar.
-;; Generated from Pure + HOL + HOLCF + HOL-Boogie + HOL-Library + HOL-Nominal + HOL-Statespace + HOL-SPARK.
+;; Generated from Pure + HOL + HOLCF + HOL-Boogie + HOL-Library + HOL-Nominal + HOL-Statespace + HOL-SPARK + HOL-TPTP.
;; *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT ***
;;
@@ -108,6 +108,7 @@
"hide_const"
"hide_fact"
"hide_type"
+ "import_tptp"
"inductive"
"inductive_cases"
"inductive_set"
@@ -305,6 +306,7 @@
"infixl"
"infixr"
"is"
+ "keywords"
"lazy"
"module_name"
"monos"
@@ -483,6 +485,7 @@
"hide_const"
"hide_fact"
"hide_type"
+ "import_tptp"
"inductive"
"inductive_set"
"instantiation"
--- a/src/FOL/FOL.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/FOL/FOL.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory FOL
imports IFOL
+keywords "print_claset" "print_induct_rules" :: diag
uses
"~~/src/Provers/classical.ML"
"~~/src/Provers/blast.ML"
--- a/src/HOL/Boogie/Boogie.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Boogie/Boogie.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,8 @@
theory Boogie
imports Word
+keywords
+ "boogie_open" "boogie_end" :: thy_decl and "boogie_vc" :: thy_goal and "boogie_status" :: diag
uses
("Tools/boogie_vcs.ML")
("Tools/boogie_loader.ML")
--- a/src/HOL/Datatype.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Datatype.thy Thu Mar 15 22:21:28 2012 +0100
@@ -7,6 +7,7 @@
theory Datatype
imports Product_Type Sum_Type Nat
+keywords "datatype" :: thy_decl
uses
("Tools/Datatype/datatype.ML")
("Tools/inductive_realizer.ML")
--- a/src/HOL/Fun.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Fun.thy Thu Mar 15 22:21:28 2012 +0100
@@ -7,6 +7,7 @@
theory Fun
imports Complete_Lattices
+keywords "enriched_type" :: thy_goal
uses ("Tools/enriched_type.ML")
begin
--- a/src/HOL/FunDef.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/FunDef.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory FunDef
imports Partial_Function Wellfounded
+keywords "function" "termination" :: thy_goal and "fun" :: thy_decl
uses
"Tools/prop_logic.ML"
"Tools/sat_solver.ML"
--- a/src/HOL/HOL.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOL.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,8 @@
theory HOL
imports Pure "~~/src/Tools/Code_Generator"
+keywords
+ "print_coercions" "print_coercion_maps" "print_claset" "print_induct_rules" :: diag
uses
("Tools/hologic.ML")
"~~/src/Tools/IsaPlanner/zipper.ML"
--- a/src/HOL/HOLCF/Cpodef.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Cpodef.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Cpodef
imports Adm
+keywords "pcpodef" "cpodef" :: thy_goal
uses ("Tools/cpodef.ML")
begin
--- a/src/HOL/HOLCF/Domain.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Domain.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,9 @@
theory Domain
imports Representable Domain_Aux
+keywords
+ "domaindef" :: thy_decl and "lazy" "unsafe" and
+ "domain_isomorphism" "domain" :: thy_decl
uses
("Tools/domaindef.ML")
("Tools/Domain/domain_isomorphism.ML")
--- a/src/HOL/HOLCF/Fixrec.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Fixrec.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Fixrec
imports Plain_HOLCF
+keywords "fixrec" :: thy_decl
uses
("Tools/holcf_library.ML")
("Tools/fixrec.ML")
--- a/src/HOL/HOLCF/Tools/Domain/domain.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Tools/Domain/domain.ML Thu Mar 15 22:21:28 2012 +0100
@@ -228,25 +228,22 @@
(** outer syntax **)
-val _ = Keyword.keyword "lazy"
-val _ = Keyword.keyword "unsafe"
-
val dest_decl : (bool * binding option * string) parser =
- Parse.$$$ "(" |-- Scan.optional (Parse.$$$ "lazy" >> K true) false --
- (Parse.binding >> SOME) -- (Parse.$$$ "::" |-- Parse.typ) --| Parse.$$$ ")" >> Parse.triple1
- || Parse.$$$ "(" |-- Parse.$$$ "lazy" |-- Parse.typ --| Parse.$$$ ")"
- >> (fn t => (true,NONE,t))
- || Parse.typ >> (fn t => (false,NONE,t))
+ @{keyword "("} |-- Scan.optional (@{keyword "lazy"} >> K true) false --
+ (Parse.binding >> SOME) -- (@{keyword "::"} |-- Parse.typ) --| @{keyword ")"} >> Parse.triple1
+ || @{keyword "("} |-- @{keyword "lazy"} |-- Parse.typ --| @{keyword ")"}
+ >> (fn t => (true, NONE, t))
+ || Parse.typ >> (fn t => (false, NONE, t))
val cons_decl =
Parse.binding -- Scan.repeat dest_decl -- Parse.opt_mixfix
val domain_decl =
(Parse.type_args_constrained -- Parse.binding -- Parse.opt_mixfix) --
- (Parse.$$$ "=" |-- Parse.enum1 "|" cons_decl)
+ (@{keyword "="} |-- Parse.enum1 "|" cons_decl)
val domains_decl =
- Scan.optional (Parse.$$$ "(" |-- (Parse.$$$ "unsafe" >> K true) --| Parse.$$$ ")") false --
+ Scan.optional (@{keyword "("} |-- (@{keyword "unsafe"} >> K true) --| @{keyword ")"}) false --
Parse.and_list1 domain_decl
fun mk_domain
--- a/src/HOL/HOLCF/Tools/Domain/domain_isomorphism.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Tools/Domain/domain_isomorphism.ML Thu Mar 15 22:21:28 2012 +0100
@@ -762,8 +762,8 @@
val parse_domain_iso :
(string list * binding * mixfix * string * (binding * binding) option)
parser =
- (Parse.type_args -- Parse.binding -- Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.typ) --
- Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding)))
+ (Parse.type_args -- Parse.binding -- Parse.opt_mixfix -- (@{keyword "="} |-- Parse.typ) --
+ Scan.option (@{keyword "morphisms"} |-- Parse.!!! (Parse.binding -- Parse.binding)))
>> (fn ((((vs, t), mx), rhs), morphs) => (vs, t, mx, rhs, morphs))
val parse_domain_isos = Parse.and_list1 parse_domain_iso
--- a/src/HOL/HOLCF/Tools/cpodef.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Tools/cpodef.ML Thu Mar 15 22:21:28 2012 +0100
@@ -344,13 +344,13 @@
(** outer syntax **)
val typedef_proof_decl =
- Scan.optional (Parse.$$$ "(" |--
- ((Parse.$$$ "open" >> K false) -- Scan.option Parse.binding ||
+ Scan.optional (@{keyword "("} |--
+ ((@{keyword "open"} >> K false) -- Scan.option Parse.binding ||
Parse.binding >> (fn s => (true, SOME s)))
- --| Parse.$$$ ")") (true, NONE) --
+ --| @{keyword ")"}) (true, NONE) --
(Parse.type_args_constrained -- Parse.binding) -- Parse.opt_mixfix --
- (Parse.$$$ "=" |-- Parse.term) --
- Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding))
+ (@{keyword "="} |-- Parse.term) --
+ Scan.option (@{keyword "morphisms"} |-- Parse.!!! (Parse.binding -- Parse.binding))
fun mk_pcpodef_proof pcpo ((((((def, opt_name), (args, t)), mx), A), morphs)) =
(if pcpo then pcpodef_proof_cmd else cpodef_proof_cmd)
--- a/src/HOL/HOLCF/Tools/domaindef.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Tools/domaindef.ML Thu Mar 15 22:21:28 2012 +0100
@@ -212,13 +212,13 @@
(** outer syntax **)
val domaindef_decl =
- Scan.optional (Parse.$$$ "(" |--
- ((Parse.$$$ "open" >> K false) -- Scan.option Parse.binding ||
+ Scan.optional (@{keyword "("} |--
+ ((@{keyword "open"} >> K false) -- Scan.option Parse.binding ||
Parse.binding >> (fn s => (true, SOME s)))
- --| Parse.$$$ ")") (true, NONE) --
+ --| @{keyword ")"}) (true, NONE) --
(Parse.type_args_constrained -- Parse.binding) --
- Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.term) --
- Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding))
+ Parse.opt_mixfix -- (@{keyword "="} |-- Parse.term) --
+ Scan.option (@{keyword "morphisms"} |-- Parse.!!! (Parse.binding -- Parse.binding))
fun mk_domaindef ((((((def, opt_name), (args, t)), mx), A), morphs)) =
domaindef_cmd ((def, the_default t opt_name), (t, args, mx), A, morphs)
--- a/src/HOL/HOLCF/Tools/fixrec.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/HOLCF/Tools/fixrec.ML Thu Mar 15 22:21:28 2012 +0100
@@ -391,15 +391,15 @@
(*************************************************************************)
val opt_thm_name' : (bool * Attrib.binding) parser =
- Parse.$$$ "(" -- Parse.$$$ "unchecked" -- Parse.$$$ ")" >> K (true, Attrib.empty_binding)
+ @{keyword "("} -- @{keyword "unchecked"} -- @{keyword ")"} >> K (true, Attrib.empty_binding)
|| Parse_Spec.opt_thm_name ":" >> pair false
val spec' : (bool * (Attrib.binding * string)) parser =
opt_thm_name' -- Parse.prop >> (fn ((a, b), c) => (a, (b, c)))
val alt_specs' : (bool * (Attrib.binding * string)) list parser =
- let val unexpected = Scan.ahead (Parse.name || Parse.$$$ "[" || Parse.$$$ "(")
- in Parse.enum1 "|" (spec' --| Scan.option (unexpected -- Parse.!!! (Parse.$$$ "|"))) end
+ let val unexpected = Scan.ahead (Parse.name || @{keyword "["} || @{keyword "("})
+ in Parse.enum1 "|" (spec' --| Scan.option (unexpected -- Parse.!!! (@{keyword "|"}))) end
val _ =
Outer_Syntax.local_theory "fixrec" "define recursive functions (HOLCF)" Keyword.thy_decl
--- a/src/HOL/Hilbert_Choice.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Hilbert_Choice.thy Thu Mar 15 22:21:28 2012 +0100
@@ -7,6 +7,7 @@
theory Hilbert_Choice
imports Nat Wellfounded Plain
+keywords "specification" "ax_specification" :: thy_goal
uses ("Tools/choice_specification.ML")
begin
--- a/src/HOL/Import/Importer.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Import/Importer.thy Thu Mar 15 22:21:28 2012 +0100
@@ -4,6 +4,10 @@
theory Importer
imports Main
+keywords
+ "import_segment" "import_theory" "end_import" "setup_theory" "end_setup"
+ "setup_dump" "append_dump" "flush_dump" "ignore_thms" "type_maps"
+ "def_maps" "thm_maps" "const_renames" "const_moves" "const_maps" :: thy_decl and ">"
uses "shuffler.ML" "import_rews.ML" ("proof_kernel.ML") ("replay.ML") ("import.ML")
begin
--- a/src/HOL/Import/import.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Import/import.ML Thu Mar 15 22:21:28 2012 +0100
@@ -238,8 +238,7 @@
val append_dump = (Parse.verbatim || Parse.string) >> add_dump
val _ =
- (Keyword.keyword ">";
- Outer_Syntax.command "import_segment"
+ (Outer_Syntax.command "import_segment"
"Set import segment name"
Keyword.thy_decl (import_segment >> Toplevel.theory);
Outer_Syntax.command "import_theory"
--- a/src/HOL/Inductive.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Inductive.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,11 @@
theory Inductive
imports Complete_Lattices
+keywords
+ "inductive" "coinductive" :: thy_decl and
+ "inductive_cases" "inductive_simps" :: thy_script and "monos" and
+ "rep_datatype" :: thy_goal and
+ "primrec" :: thy_decl
uses
"Tools/dseq.ML"
("Tools/inductive.ML")
--- a/src/HOL/IsaMakefile Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/IsaMakefile Thu Mar 15 22:21:28 2012 +0100
@@ -1232,14 +1232,21 @@
HOL-TPTP: HOL $(LOG)/HOL-TPTP.gz
-$(LOG)/HOL-TPTP.gz: \
- $(OUT)/HOL \
+$(LOG)/HOL-TPTP.gz: $(OUT)/HOL \
+ TPTP/ATP_Problem_Import.thy \
+ TPTP/ATP_Theory_Export.thy \
+ TPTP/CASC_Setup.thy \
TPTP/ROOT.ML \
+ TPTP/TPTP_Parser.thy \
+ TPTP/TPTP_Parser/ml_yacc_lib.ML \
+ TPTP/TPTP_Parser/tptp_interpret.ML \
+ TPTP/TPTP_Parser/tptp_lexyacc.ML \
+ TPTP/TPTP_Parser/tptp_parser.ML \
+ TPTP/TPTP_Parser/tptp_problem_name.ML \
+ TPTP/TPTP_Parser/tptp_syntax.ML \
+ TPTP/TPTP_Parser_Test.thy \
TPTP/atp_problem_import.ML \
- TPTP/ATP_Problem_Import.thy \
- TPTP/atp_theory_export.ML \
- TPTP/ATP_Theory_Export.thy \
- TPTP/CASC_Setup.thy
+ TPTP/atp_theory_export.ML
@$(ISABELLE_TOOL) usedir $(OUT)/HOL TPTP
--- a/src/HOL/Library/Old_Recdef.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Library/Old_Recdef.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,10 @@
theory Old_Recdef
imports Wfrec
+keywords
+ "recdef" "defer_recdef" :: thy_decl and
+ "recdef_tc" :: thy_goal and
+ "permissive" "congs" "hints"
uses
("~~/src/HOL/Tools/TFL/casesplit.ML")
("~~/src/HOL/Tools/TFL/utils.ML")
@@ -41,7 +45,7 @@
lemma tfl_eq_True: "(x = True) --> x"
by blast
-lemma tfl_rev_eq_mp: "(x = y) --> y --> x";
+lemma tfl_rev_eq_mp: "(x = y) --> y --> x"
by blast
lemma tfl_simp_thm: "(x --> y) --> (x = x') --> (x' --> y)"
--- a/src/HOL/Metis.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Metis.thy Thu Mar 15 22:21:28 2012 +0100
@@ -8,6 +8,7 @@
theory Metis
imports ATP
+keywords "try0" :: diag
uses "~~/src/Tools/Metis/metis.ML"
("Tools/Metis/metis_generate.ML")
("Tools/Metis/metis_reconstruct.ML")
--- a/src/HOL/Nitpick.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Nitpick.thy Thu Mar 15 22:21:28 2012 +0100
@@ -9,6 +9,7 @@
theory Nitpick
imports Map Quotient SAT Record
+keywords "nitpick" :: diag and "nitpick_params" :: thy_decl
uses ("Tools/Nitpick/kodkod.ML")
("Tools/Nitpick/kodkod_sat.ML")
("Tools/Nitpick/nitpick_util.ML")
--- a/src/HOL/Nominal/Nominal.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Nominal/Nominal.thy Thu Mar 15 22:21:28 2012 +0100
@@ -1,5 +1,9 @@
theory Nominal
imports Main "~~/src/HOL/Library/Infinite_Set"
+keywords
+ "atom_decl" "nominal_datatype" "equivariance" :: thy_decl and
+ "nominal_primrec" "nominal_inductive" "nominal_inductive2" :: thy_goal and
+ "avoids"
uses
("nominal_thmdecls.ML")
("nominal_atoms.ML")
--- a/src/HOL/Nominal/nominal_inductive.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Nominal/nominal_inductive.ML Thu Mar 15 22:21:28 2012 +0100
@@ -669,20 +669,18 @@
(* outer syntax *)
-val _ = Keyword.keyword "avoids";
-
val _ =
Outer_Syntax.local_theory_to_proof "nominal_inductive"
"prove equivariance and strong induction theorem for inductive predicate involving nominal datatypes"
Keyword.thy_goal
- (Parse.xname -- Scan.optional (Parse.$$$ "avoids" |-- Parse.and_list1 (Parse.name --
- (Parse.$$$ ":" |-- Scan.repeat1 Parse.name))) [] >> (fn (name, avoids) =>
+ (Parse.xname -- Scan.optional (@{keyword "avoids"} |-- Parse.and_list1 (Parse.name --
+ (@{keyword ":"} |-- Scan.repeat1 Parse.name))) [] >> (fn (name, avoids) =>
prove_strong_ind name avoids));
val _ =
Outer_Syntax.local_theory "equivariance"
"prove equivariance for inductive predicate involving nominal datatypes" Keyword.thy_decl
- (Parse.xname -- Scan.optional (Parse.$$$ "[" |-- Parse.list1 Parse.name --| Parse.$$$ "]") [] >>
+ (Parse.xname -- Scan.optional (@{keyword "["} |-- Parse.list1 Parse.name --| @{keyword "]"}) [] >>
(fn (name, atoms) => prove_eqvt name atoms));
end
--- a/src/HOL/Nominal/nominal_inductive2.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Nominal/nominal_inductive2.ML Thu Mar 15 22:21:28 2012 +0100
@@ -488,9 +488,9 @@
"prove strong induction theorem for inductive predicate involving nominal datatypes"
Keyword.thy_goal
(Parse.xname --
- Scan.option (Parse.$$$ "(" |-- Parse.!!! (Parse.name --| Parse.$$$ ")")) --
- (Scan.optional (Parse.$$$ "avoids" |-- Parse.enum1 "|" (Parse.name --
- (Parse.$$$ ":" |-- Parse.and_list1 Parse.term))) []) >> (fn ((name, rule_name), avoids) =>
+ Scan.option (@{keyword "("} |-- Parse.!!! (Parse.name --| @{keyword ")"})) --
+ (Scan.optional (@{keyword "avoids"} |-- Parse.enum1 "|" (Parse.name --
+ (@{keyword ":"} |-- Parse.and_list1 Parse.term))) []) >> (fn ((name, rule_name), avoids) =>
prove_strong_ind name rule_name avoids));
end
--- a/src/HOL/Nominal/nominal_primrec.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Nominal/nominal_primrec.ML Thu Mar 15 22:21:28 2012 +0100
@@ -392,14 +392,14 @@
val freshness_context = Parse.reserved "freshness_context";
val invariant = Parse.reserved "invariant";
-fun unless_flag scan = Scan.unless ((freshness_context || invariant) -- Parse.$$$ ":") scan;
+fun unless_flag scan = Scan.unless ((freshness_context || invariant) -- @{keyword ":"}) scan;
-val parser1 = (freshness_context -- Parse.$$$ ":") |-- unless_flag Parse.term >> SOME;
-val parser2 = (invariant -- Parse.$$$ ":") |--
+val parser1 = (freshness_context -- @{keyword ":"}) |-- unless_flag Parse.term >> SOME;
+val parser2 = (invariant -- @{keyword ":"}) |--
(Scan.repeat1 (unless_flag Parse.term) >> SOME) -- Scan.optional parser1 NONE ||
(parser1 >> pair NONE);
val options =
- Scan.optional (Parse.$$$ "(" |-- Parse.!!! (parser2 --| Parse.$$$ ")")) (NONE, NONE);
+ Scan.optional (@{keyword "("} |-- Parse.!!! (parser2 --| @{keyword ")"})) (NONE, NONE);
val _ =
Outer_Syntax.local_theory_to_proof "nominal_primrec"
--- a/src/HOL/Orderings.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Orderings.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Orderings
imports HOL
+keywords "print_orders" :: diag
uses
"~~/src/Provers/order.ML"
"~~/src/Provers/quasi.ML" (* FIXME unused? *)
--- a/src/HOL/Partial_Function.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Partial_Function.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Partial_Function
imports Complete_Partial_Order Option
+keywords "partial_function" :: thy_decl
uses
"Tools/Function/function_lib.ML"
"Tools/Function/partial_function.ML"
--- a/src/HOL/Predicate_Compile.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Predicate_Compile.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Predicate_Compile
imports Predicate New_Random_Sequence Quickcheck_Exhaustive
+keywords "code_pred" :: thy_goal and "values" :: diag
uses
"Tools/Predicate_Compile/predicate_compile_aux.ML"
"Tools/Predicate_Compile/predicate_compile_compilations.ML"
--- a/src/HOL/Product_Type.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Product_Type.thy Thu Mar 15 22:21:28 2012 +0100
@@ -7,6 +7,7 @@
theory Product_Type
imports Typedef Inductive Fun
+keywords "inductive_set" "coinductive_set" :: thy_decl
uses
("Tools/split_rule.ML")
("Tools/inductive_set.ML")
--- a/src/HOL/Quickcheck_Exhaustive.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Quickcheck_Exhaustive.thy Thu Mar 15 22:21:28 2012 +0100
@@ -4,6 +4,7 @@
theory Quickcheck_Exhaustive
imports Quickcheck
+keywords "quickcheck_generator" :: thy_decl
uses
("Tools/Quickcheck/exhaustive_generators.ML")
("Tools/Quickcheck/abstract_generators.ML")
--- a/src/HOL/Quickcheck_Narrowing.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Quickcheck_Narrowing.thy Thu Mar 15 22:21:28 2012 +0100
@@ -4,6 +4,7 @@
theory Quickcheck_Narrowing
imports Quickcheck_Exhaustive
+keywords "find_unused_assms" :: diag
uses
("Tools/Quickcheck/PNF_Narrowing_Engine.hs")
("Tools/Quickcheck/Narrowing_Engine.hs")
--- a/src/HOL/Quotient.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Quotient.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,10 @@
theory Quotient
imports Plain Hilbert_Choice Equiv_Relations
+keywords
+ "print_quotmaps" "print_quotients" "print_quotconsts" :: diag and
+ "quotient_type" :: thy_goal and "/" and
+ "quotient_definition" :: thy_decl
uses
("Tools/Quotient/quotient_info.ML")
("Tools/Quotient/quotient_type.ML")
--- a/src/HOL/Record.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Record.thy Thu Mar 15 22:21:28 2012 +0100
@@ -10,6 +10,7 @@
theory Record
imports Plain Quickcheck_Narrowing
+keywords "record" :: thy_decl
uses ("Tools/record.ML")
begin
--- a/src/HOL/Refute.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Refute.thy Thu Mar 15 22:21:28 2012 +0100
@@ -9,6 +9,7 @@
theory Refute
imports Hilbert_Choice List Sledgehammer
+keywords "refute" :: diag and "refute_params" :: thy_decl
uses "Tools/refute.ML"
begin
--- a/src/HOL/SMT.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/SMT.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory SMT
imports Record
+keywords "smt_status" :: diag
uses
"Tools/SMT/smt_utils.ML"
"Tools/SMT/smt_failure.ML"
--- a/src/HOL/SPARK/SPARK_Setup.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/SPARK/SPARK_Setup.thy Thu Mar 15 22:21:28 2012 +0100
@@ -7,6 +7,9 @@
theory SPARK_Setup
imports Word
+keywords
+ "spark_open" "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
+ "spark_vc" :: thy_goal and "spark_status" :: diag
uses
"Tools/fdl_lexer.ML"
"Tools/fdl_parser.ML"
--- a/src/HOL/Sledgehammer.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Sledgehammer.thy Thu Mar 15 22:21:28 2012 +0100
@@ -8,6 +8,7 @@
theory Sledgehammer
imports ATP SMT
+keywords "sledgehammer" :: diag and "sledgehammer_params" :: thy_decl
uses "Tools/Sledgehammer/async_manager.ML"
"Tools/Sledgehammer/sledgehammer_util.ML"
"Tools/Sledgehammer/sledgehammer_filter.ML"
--- a/src/HOL/Statespace/StateSpaceLocale.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Statespace/StateSpaceLocale.thy Thu Mar 15 22:21:28 2012 +0100
@@ -5,6 +5,7 @@
header {* Setup for State Space Locales \label{sec:StateSpaceLocale}*}
theory StateSpaceLocale imports StateFun
+keywords "statespace" :: thy_decl
uses "state_space.ML" "state_fun.ML"
begin
--- a/src/HOL/Statespace/state_space.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Statespace/state_space.ML Thu Mar 15 22:21:28 2012 +0100
@@ -676,15 +676,15 @@
val type_insts =
Parse.typ >> single ||
- Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.typ --| Parse.$$$ ")")
+ @{keyword "("} |-- Parse.!!! (Parse.list1 Parse.typ --| @{keyword ")"})
-val comp = Parse.name -- (Parse.$$$ "::" |-- Parse.!!! Parse.typ);
+val comp = Parse.name -- (@{keyword "::"} |-- Parse.!!! Parse.typ);
fun plus1_unless test scan =
- scan ::: Scan.repeat (Parse.$$$ "+" |-- Scan.unless test (Parse.!!! scan));
+ scan ::: Scan.repeat (@{keyword "+"} |-- Scan.unless test (Parse.!!! scan));
-val mapsto = Parse.$$$ "=";
+val mapsto = @{keyword "="};
val rename = Parse.name -- (mapsto |-- Parse.name);
-val renames = Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.list1 rename --| Parse.$$$ "]")) [];
+val renames = Scan.optional (@{keyword "["} |-- Parse.!!! (Parse.list1 rename --| @{keyword "]"})) [];
val parent =
((type_insts -- Parse.xname) || (Parse.xname >> pair [])) -- renames
@@ -694,10 +694,10 @@
val statespace_decl =
Parse.type_args -- Parse.name --
- (Parse.$$$ "=" |--
+ (@{keyword "="} |--
((Scan.repeat1 comp >> pair []) ||
(plus1_unless comp parent --
- Scan.optional (Parse.$$$ "+" |-- Parse.!!! (Scan.repeat1 comp)) [])));
+ Scan.optional (@{keyword "+"} |-- Parse.!!! (Scan.repeat1 comp)) [])));
val _ =
Outer_Syntax.command "statespace" "define state space" Keyword.thy_decl
(statespace_decl >> (fn ((args, name), (parents, comps)) =>
--- a/src/HOL/TPTP/TPTP_Parser.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/TPTP/TPTP_Parser.thy Thu Mar 15 22:21:28 2012 +0100
@@ -9,6 +9,7 @@
theory TPTP_Parser
imports Main
+keywords "import_tptp" :: thy_decl
uses
"TPTP_Parser/ml_yacc_lib.ML" (*generated from ML-Yacc's lib*)
--- a/src/HOL/TPTP/TPTP_Parser/tptp_interpret.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_interpret.ML Thu Mar 15 22:21:28 2012 +0100
@@ -887,14 +887,9 @@
in TPTP_Data.map (cons ((prob_name, result))) thy' end
val _ =
- Outer_Syntax.improper_command "import_tptp" "import TPTP problem"
- Keyword.diag (*FIXME why diag?*)
- (Parse.string >>
- (fn file_name =>
- Toplevel.theory (fn thy =>
- import_file true (Path.explode file_name |> Path.dir)
- (Path.explode file_name) [] [] thy
- )))
+ Outer_Syntax.improper_command "import_tptp" "import TPTP problem" Keyword.thy_decl
+ (Parse.path >> (fn path =>
+ Toplevel.theory (fn thy => import_file true (Path.dir path) path [] [] thy)))
(*Used for debugging. Returns all files contained within a directory or its
--- a/src/HOL/Tools/Datatype/datatype.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Datatype/datatype.ML Thu Mar 15 22:21:28 2012 +0100
@@ -788,7 +788,7 @@
val spec_cmd =
Parse.type_args_constrained -- Parse.binding -- Parse.opt_mixfix --
- (Parse.$$$ "=" |-- Parse.enum1 "|" (Parse.binding -- Scan.repeat Parse.typ -- Parse.opt_mixfix))
+ (@{keyword "="} |-- Parse.enum1 "|" (Parse.binding -- Scan.repeat Parse.typ -- Parse.opt_mixfix))
>> (fn (((vs, t), mx), cons) => ((t, vs, mx), map Parse.triple1 cons));
val _ =
--- a/src/HOL/Tools/Function/function_common.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Function/function_common.ML Thu Mar 15 22:21:28 2012 +0100
@@ -345,7 +345,7 @@
|| (Parse.reserved "no_partials" >> K No_Partials))
fun config_parser default =
- (Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 option_parser) --| Parse.$$$ ")") [])
+ (Scan.optional (@{keyword "("} |-- Parse.!!! (Parse.list1 option_parser) --| @{keyword ")"}) [])
>> (fn opts => fold apply_opt opts default)
in
fun function_parser default_cfg =
--- a/src/HOL/Tools/Function/partial_function.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Function/partial_function.ML Thu Mar 15 22:21:28 2012 +0100
@@ -279,7 +279,7 @@
val add_partial_function = gen_add_partial_function Specification.check_spec;
val add_partial_function_cmd = gen_add_partial_function Specification.read_spec;
-val mode = Parse.$$$ "(" |-- Parse.xname --| Parse.$$$ ")";
+val mode = @{keyword "("} |-- Parse.xname --| @{keyword ")"};
val _ = Outer_Syntax.local_theory
"partial_function" "define partial function" Keyword.thy_decl
--- a/src/HOL/Tools/Metis/metis_tactic.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Metis/metis_tactic.ML Thu Mar 15 22:21:28 2012 +0100
@@ -292,7 +292,7 @@
val parse_metis_options =
Scan.optional
(Args.parens (Parse.short_ident
- -- Scan.option (Parse.$$$ "," |-- Parse.short_ident))
+ -- Scan.option (@{keyword ","} |-- Parse.short_ident))
>> (fn (s, s') =>
(NONE, NONE) |> consider_opt s
|> (case s' of SOME s' => consider_opt s' | _ => I)))
--- a/src/HOL/Tools/Nitpick/nitpick_isar.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Nitpick/nitpick_isar.ML Thu Mar 15 22:21:28 2012 +0100
@@ -317,11 +317,11 @@
Scan.repeat1 (Parse.minus >> single
|| Scan.repeat1 (Scan.unless Parse.minus
(Parse.name || Parse.float_number))
- || Parse.$$$ "," |-- Parse.number >> prefix "," >> single)
+ || @{keyword ","} |-- Parse.number >> prefix "," >> single)
>> flat
-val parse_param = parse_key -- Scan.optional (Parse.$$$ "=" |-- parse_value) []
+val parse_param = parse_key -- Scan.optional (@{keyword "="} |-- parse_value) []
val parse_params =
- Scan.optional (Parse.$$$ "[" |-- Parse.list parse_param --| Parse.$$$ "]") []
+ Scan.optional (@{keyword "["} |-- Parse.list parse_param --| @{keyword "]"}) []
fun handle_exceptions ctxt f x =
f x
--- a/src/HOL/Tools/Predicate_Compile/code_prolog.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Predicate_Compile/code_prolog.ML Thu Mar 15 22:21:28 2012 +0100
@@ -1002,7 +1002,7 @@
(* renewing the values command for Prolog queries *)
val opt_print_modes =
- Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.xname --| Parse.$$$ ")")) [];
+ Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.xname --| @{keyword ")"})) [];
val _ = Outer_Syntax.improper_command "values" "enumerate and print comprehensions" Keyword.diag
(opt_print_modes -- Scan.optional (Parse.nat >> SOME) NONE -- Parse.term
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile.ML Thu Mar 15 22:21:28 2012 +0100
@@ -221,15 +221,15 @@
val opt_modes =
- Scan.optional (Parse.$$$ "(" |-- Args.$$$ "modes" |-- Parse.$$$ ":" |--
- (((Parse.enum1 "and" (Parse.xname --| Parse.$$$ ":" --
+ Scan.optional (@{keyword "("} |-- Args.$$$ "modes" |-- @{keyword ":"} |--
+ (((Parse.enum1 "and" (Parse.xname --| @{keyword ":"} --
(Parse.enum "," mode_and_opt_proposal))) >> Multiple_Preds)
|| ((Parse.enum "," mode_and_opt_proposal) >> Single_Pred))
- --| Parse.$$$ ")") (Multiple_Preds [])
+ --| @{keyword ")"}) (Multiple_Preds [])
val opt_expected_modes =
- Scan.optional (Parse.$$$ "(" |-- Args.$$$ "expected_modes" |-- Parse.$$$ ":" |--
- Parse.enum "," parse_mode_expr --| Parse.$$$ ")" >> SOME) NONE
+ Scan.optional (@{keyword "("} |-- Args.$$$ "expected_modes" |-- @{keyword ":"} |--
+ Parse.enum "," parse_mode_expr --| @{keyword ")"} >> SOME) NONE
(* Parser for options *)
@@ -238,18 +238,18 @@
val scan_bool_option = foldl1 (op ||) (map Args.$$$ bool_options)
val scan_compilation = foldl1 (op ||) (map (fn (s, c) => Args.$$$ s >> K c) compilation_names)
in
- Scan.optional (Parse.$$$ "[" |-- Scan.optional scan_compilation Pred
- -- Parse.enum "," scan_bool_option --| Parse.$$$ "]")
+ Scan.optional (@{keyword "["} |-- Scan.optional scan_compilation Pred
+ -- Parse.enum "," scan_bool_option --| @{keyword "]"})
(Pred, [])
end
val opt_print_modes =
- Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.xname --| Parse.$$$ ")")) [];
+ Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.xname --| @{keyword ")"})) [];
-val opt_mode = (Parse.$$$ "_" >> K NONE) || (parse_mode_expr >> SOME)
+val opt_mode = (Args.$$$ "_" >> K NONE) || (parse_mode_expr >> SOME)
-val opt_param_modes = Scan.optional (Parse.$$$ "[" |-- Args.$$$ "mode" |-- Parse.$$$ ":" |--
- Parse.enum ", " opt_mode --| Parse.$$$ "]" >> SOME) NONE
+val opt_param_modes = Scan.optional (@{keyword "["} |-- Args.$$$ "mode" |-- @{keyword ":"} |--
+ Parse.enum ", " opt_mode --| @{keyword "]"} >> SOME) NONE
val stats = Scan.optional (Args.$$$ "stats" >> K true) false
@@ -264,7 +264,7 @@
(Pred, [])
in
Scan.optional
- (Parse.$$$ "[" |-- (expected_values -- stats) -- scan_compilation --| Parse.$$$ "]")
+ (@{keyword "["} |-- (expected_values -- stats) -- scan_compilation --| @{keyword "]"})
((NONE, false), (Pred, []))
end
--- a/src/HOL/Tools/Quickcheck/abstract_generators.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Quickcheck/abstract_generators.ML Thu Mar 15 22:21:28 2012 +0100
@@ -66,8 +66,8 @@
val _ = Outer_Syntax.command "quickcheck_generator" "define quickcheck generators for abstract types"
Keyword.thy_decl ((Parse.type_const --
- Scan.option (Args.$$$ "predicate" |-- Parse.$$$ ":" |-- Parse.term)) --
- (Args.$$$ "constructors" |-- Parse.$$$ ":" |-- Parse.list1 Parse.term)
+ Scan.option (Args.$$$ "predicate" |-- @{keyword ":"} |-- Parse.term)) --
+ (Args.$$$ "constructors" |-- @{keyword ":"} |-- Parse.list1 Parse.term)
>> (fn ((tyco, opt_pred), constrs) =>
Toplevel.theory (quickcheck_generator_cmd tyco opt_pred constrs)))
--- a/src/HOL/Tools/Quotient/quotient_def.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Quotient/quotient_def.ML Thu Mar 15 22:21:28 2012 +0100
@@ -109,7 +109,7 @@
(* parser and command *)
val quotdef_parser =
Scan.option Parse_Spec.constdecl --
- Parse.!!! (Parse_Spec.opt_thm_name ":" -- (Parse.term --| Parse.$$$ "is" -- Parse.term))
+ Parse.!!! (Parse_Spec.opt_thm_name ":" -- (Parse.term --| @{keyword "is"} -- Parse.term))
val _ =
Outer_Syntax.local_theory "quotient_definition"
--- a/src/HOL/Tools/Quotient/quotient_info.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Quotient/quotient_info.ML Thu Mar 15 22:21:28 2012 +0100
@@ -71,7 +71,7 @@
val quotmaps_attribute_setup =
Attrib.setup @{binding map}
- ((Args.type_name false --| Scan.lift (Parse.$$$ "=")) -- (* FIXME Args.type_name true (requires "set" type) *)
+ ((Args.type_name false --| Scan.lift (@{keyword "="})) -- (* FIXME Args.type_name true (requires "set" type) *)
Args.const_proper true >>
(fn (tyname, relname) =>
let val minfo = {relmap = relname}
--- a/src/HOL/Tools/Quotient/quotient_type.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Quotient/quotient_type.ML Thu Mar 15 22:21:28 2012 +0100
@@ -262,17 +262,15 @@
quotient_type spec' lthy
end
-val partial = Scan.optional (Parse.reserved "partial" -- Parse.$$$ ":" >> K true) false
+val partial = Scan.optional (Parse.reserved "partial" -- @{keyword ":"} >> K true) false
val quotspec_parser =
Parse.and_list1
((Parse.type_args -- Parse.binding) --
(* FIXME Parse.type_args_constrained and standard treatment of sort constraints *)
- Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.typ) --
- (Parse.$$$ "/" |-- (partial -- Parse.term)) --
- Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding)))
-
-val _ = Keyword.keyword "/"
+ Parse.opt_mixfix -- (@{keyword "="} |-- Parse.typ) --
+ (@{keyword "/"} |-- (partial -- Parse.term)) --
+ Scan.option (@{keyword "morphisms"} |-- Parse.!!! (Parse.binding -- Parse.binding)))
val _ =
Outer_Syntax.local_theory_to_proof "quotient_type"
--- a/src/HOL/Tools/SMT/smt_config.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/SMT/smt_config.ML Thu Mar 15 22:21:28 2012 +0100
@@ -86,7 +86,7 @@
context
|> Solvers.map (apfst (Symtab.update (name, (info, []))))
|> Context.map_theory (Attrib.setup (Binding.name (name ^ "_options"))
- (Scan.lift (Parse.$$$ "=" |-- Args.name) >>
+ (Scan.lift (@{keyword "="} |-- Args.name) >>
(Thm.declaration_attribute o K o set_solver_options o pair name))
("Additional command line options for SMT solver " ^ quote name))
@@ -142,7 +142,7 @@
val setup_solver =
Attrib.setup @{binding smt_solver}
- (Scan.lift (Parse.$$$ "=" |-- Args.name) >>
+ (Scan.lift (@{keyword "="} |-- Args.name) >>
(Thm.declaration_attribute o K o select_solver))
"SMT solver configuration"
@@ -207,7 +207,7 @@
val setup_certificates =
Attrib.setup @{binding smt_certificates}
- (Scan.lift (Parse.$$$ "=" |-- Args.name) >>
+ (Scan.lift (@{keyword "="} |-- Args.name) >>
(Thm.declaration_attribute o K o select_certificates))
"SMT certificates configuration"
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Thu Mar 15 22:21:28 2012 +0100
@@ -396,12 +396,12 @@
|> sort_strings |> cat_lines))
end))
-val parse_query_bang = Parse.$$$ "?" || Parse.$$$ "!" || Parse.$$$ "!!"
+val parse_query_bang = @{keyword "?"} || @{keyword "!"} || @{keyword "!!"}
val parse_key =
Scan.repeat1 (Parse.typ_group || parse_query_bang) >> implode_param
val parse_value =
Scan.repeat1 (Parse.xname || Parse.float_number || parse_query_bang)
-val parse_param = parse_key -- Scan.optional (Parse.$$$ "=" |-- parse_value) []
+val parse_param = parse_key -- Scan.optional (@{keyword "="} |-- parse_value) []
val parse_params = Scan.optional (Args.bracks (Parse.list parse_param)) []
val parse_fact_refs =
Scan.repeat1 (Scan.unless (Parse.name -- Args.colon) Parse_Spec.xthm)
--- a/src/HOL/Tools/choice_specification.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/choice_specification.ML Thu Mar 15 22:21:28 2012 +0100
@@ -234,11 +234,11 @@
(* outer syntax *)
-val opt_name = Scan.optional (Parse.name --| Parse.$$$ ":") ""
+val opt_name = Scan.optional (Parse.name --| @{keyword ":"}) ""
val opt_overloaded = Parse.opt_keyword "overloaded"
val specification_decl =
- Parse.$$$ "(" |-- Scan.repeat1 (opt_name -- Parse.term -- opt_overloaded) --| Parse.$$$ ")" --
+ @{keyword "("} |-- Scan.repeat1 (opt_name -- Parse.term -- opt_overloaded) --| @{keyword ")"} --
Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.prop)
val _ =
@@ -249,7 +249,7 @@
val ax_specification_decl =
Parse.name --
- (Parse.$$$ "(" |-- Scan.repeat1 (opt_name -- Parse.term -- opt_overloaded) --| Parse.$$$ ")" --
+ (@{keyword "("} |-- Scan.repeat1 (opt_name -- Parse.term -- opt_overloaded) --| @{keyword ")"} --
Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.prop))
val _ =
--- a/src/HOL/Tools/enriched_type.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/enriched_type.ML Thu Mar 15 22:21:28 2012 +0100
@@ -268,7 +268,7 @@
val _ = Outer_Syntax.local_theory_to_proof "enriched_type"
"register operations managing the functorial structure of a type"
- Keyword.thy_goal (Scan.option (Parse.name --| Parse.$$$ ":") -- Parse.term
+ Keyword.thy_goal (Scan.option (Parse.name --| @{keyword ":"}) -- Parse.term
>> (fn (prfx, t) => enriched_type_cmd prfx t));
end;
--- a/src/HOL/Tools/inductive.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/inductive.ML Thu Mar 15 22:21:28 2012 +0100
@@ -1134,12 +1134,10 @@
(* outer syntax *)
-val _ = Keyword.keyword "monos";
-
fun gen_ind_decl mk_def coind =
Parse.fixes -- Parse.for_fixes --
Scan.optional Parse_Spec.where_alt_specs [] --
- Scan.optional (Parse.$$$ "monos" |-- Parse.!!! Parse_Spec.xthms1) []
+ Scan.optional (@{keyword "monos"} |-- Parse.!!! Parse_Spec.xthms1) []
>> (fn (((preds, params), specs), monos) =>
(snd oo gen_add_inductive mk_def true coind preds params specs monos));
--- a/src/HOL/Tools/recdef.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/recdef.ML Thu Mar 15 22:21:28 2012 +0100
@@ -290,15 +290,13 @@
(* outer syntax *)
-val _ = List.app Keyword.keyword ["permissive", "congs", "hints"];
-
val hints =
- Parse.$$$ "(" |--
- Parse.!!! (Parse.position (Parse.$$$ "hints" -- Args.parse) --| Parse.$$$ ")") >> Args.src;
+ @{keyword "("} |--
+ Parse.!!! (Parse.position (@{keyword "hints"} -- Args.parse) --| @{keyword ")"}) >> Args.src;
val recdef_decl =
Scan.optional
- (Parse.$$$ "(" -- Parse.!!! (Parse.$$$ "permissive" -- Parse.$$$ ")") >> K false) true --
+ (@{keyword "("} -- Parse.!!! (@{keyword "permissive"} -- @{keyword ")"}) >> K false) true --
Parse.name -- Parse.term -- Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- Parse.prop)
-- Scan.option hints
>> (fn ((((p, f), R), eqs), src) => #1 o add_recdef p f R (map Parse.triple_swap eqs) src);
@@ -311,7 +309,7 @@
val defer_recdef_decl =
Parse.name -- Scan.repeat1 Parse.prop --
Scan.optional
- (Parse.$$$ "(" |-- Parse.$$$ "congs" |-- Parse.!!! (Parse_Spec.xthms1 --| Parse.$$$ ")")) []
+ (@{keyword "("} |-- @{keyword "congs"} |-- Parse.!!! (Parse_Spec.xthms1 --| @{keyword ")"})) []
>> (fn ((f, eqs), congs) => #1 o defer_recdef f eqs congs);
val _ =
@@ -322,7 +320,7 @@
Outer_Syntax.local_theory_to_proof' "recdef_tc" "recommence proof of termination condition (TFL)"
Keyword.thy_goal
((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.xname --
- Scan.option (Parse.$$$ "(" |-- Parse.nat --| Parse.$$$ ")")
+ Scan.option (@{keyword "("} |-- Parse.nat --| @{keyword ")"})
>> (fn ((thm_name, name), i) => recdef_tc thm_name name i));
end;
--- a/src/HOL/Tools/record.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/record.ML Thu Mar 15 22:21:28 2012 +0100
@@ -2313,7 +2313,7 @@
val _ =
Outer_Syntax.command "record" "define extensible record" Keyword.thy_decl
(Parse.type_args_constrained -- Parse.binding --
- (Parse.$$$ "=" |-- Scan.option (Parse.typ --| Parse.$$$ "+") --
+ (@{keyword "="} |-- Scan.option (Parse.typ --| @{keyword "+"}) --
Scan.repeat1 Parse.const_binding)
>> (fn (x, (y, z)) => Toplevel.theory (add_record_cmd x y z)));
--- a/src/HOL/Tools/refute.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/refute.ML Thu Mar 15 22:21:28 2012 +0100
@@ -3202,8 +3202,8 @@
(*optional list of arguments of the form [name1=value1, name2=value2, ...]*)
-val scan_parm = Parse.name -- (Scan.optional (Parse.$$$ "=" |-- Parse.name) "true")
-val scan_parms = Scan.optional (Parse.$$$ "[" |-- Parse.list scan_parm --| Parse.$$$ "]") [];
+val scan_parm = Parse.name -- (Scan.optional (@{keyword "="} |-- Parse.name) "true")
+val scan_parms = Scan.optional (@{keyword "["} |-- Parse.list scan_parm --| @{keyword "]"}) [];
(* 'refute' command *)
--- a/src/HOL/Tools/typedef.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Tools/typedef.ML Thu Mar 15 22:21:28 2012 +0100
@@ -299,17 +299,15 @@
(** outer syntax **)
-val _ = Keyword.keyword "morphisms";
-
val _ =
Outer_Syntax.local_theory_to_proof "typedef" "HOL type definition (requires non-emptiness proof)"
Keyword.thy_goal
- (Scan.optional (Parse.$$$ "(" |--
- ((Parse.$$$ "open" >> K false) -- Scan.option Parse.binding ||
- Parse.binding >> (fn s => (true, SOME s))) --| Parse.$$$ ")") (true, NONE) --
+ (Scan.optional (@{keyword "("} |--
+ ((@{keyword "open"} >> K false) -- Scan.option Parse.binding ||
+ Parse.binding >> (fn s => (true, SOME s))) --| @{keyword ")"}) (true, NONE) --
(Parse.type_args_constrained -- Parse.binding) --
- Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.term) --
- Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding))
+ Parse.opt_mixfix -- (@{keyword "="} |-- Parse.term) --
+ Scan.option (@{keyword "morphisms"} |-- Parse.!!! (Parse.binding -- Parse.binding))
>> (fn ((((((def, opt_name), (args, t)), mx), A), morphs)) =>
typedef_cmd ((def, the_default t opt_name), (t, args, mx), A, morphs)));
--- a/src/HOL/Typedef.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/Typedef.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Typedef
imports Set
+keywords "typedef" :: thy_goal and "morphisms"
uses ("Tools/typedef.ML")
begin
--- a/src/HOL/ex/Interpretation_with_Defs.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/HOL/ex/Interpretation_with_Defs.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,7 @@
theory Interpretation_with_Defs
imports Pure
+keywords "interpretation" :: thy_goal
uses "~~/src/Tools/interpretation_with_defs.ML"
begin
--- a/src/Pure/Isar/isar_syn.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Isar/isar_syn.ML Thu Mar 15 22:21:28 2012 +0100
@@ -12,15 +12,16 @@
(*keep keywords consistent with the parsers, otherwise be prepared for
unexpected errors*)
-val _ = List.app Keyword.keyword
- ["!!", "!", "%", "(", ")", "+", ",", "--", ":", "::", ";", "<", "<=",
- "=", "==", "=>", "?", "[", "\\<equiv>", "\\<leftharpoondown>",
- "\\<rightharpoonup>", "\\<rightleftharpoons>", "\\<subseteq>", "]",
- "advanced", "and", "assumes", "attach", "begin", "binder",
- "constrains", "defines", "fixes", "for", "identifier", "if",
- "imports", "in", "infix", "infixl", "infixr", "is",
- "notes", "obtains", "open", "output", "overloaded", "pervasive",
- "shows", "structure", "unchecked", "uses", "where", "|"];
+val _ = Context.>> (Context.map_theory
+ (fold (fn name => Thy_Header.declare_keyword (name, NONE))
+ ["!!", "!", "%", "(", ")", "+", ",", "--", ":", "::", ";", "<", "<=",
+ "=", "==", "=>", "?", "[", "\\<equiv>", "\\<leftharpoondown>",
+ "\\<rightharpoonup>", "\\<rightleftharpoons>", "\\<subseteq>", "]",
+ "advanced", "and", "assumes", "attach", "begin", "binder",
+ "constrains", "defines", "fixes", "for", "identifier", "if",
+ "imports", "in", "infix", "infixl", "infixr", "is", "keywords",
+ "notes", "obtains", "open", "output", "overloaded", "pervasive",
+ "shows", "structure", "unchecked", "uses", "where", "|"]));
@@ -28,12 +29,10 @@
val _ =
Outer_Syntax.command "theory" "begin theory" (Keyword.tag_theory Keyword.thy_begin)
- (Thy_Header.args >> (fn (name, imports, uses) =>
+ (Thy_Header.args >> (fn header =>
Toplevel.print o
Toplevel.init_theory
- (fn () =>
- Thy_Info.toplevel_begin_theory (Thy_Load.get_master_path ())
- name imports (map (apfst Path.explode) uses))));
+ (fn () => Thy_Info.toplevel_begin_theory (Thy_Load.get_master_path ()) header)));
val _ =
Outer_Syntax.command "end" "end (local) theory" (Keyword.tag_theory Keyword.thy_end)
--- a/src/Pure/Isar/keyword.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Isar/keyword.ML Thu Mar 15 22:21:28 2012 +0100
@@ -37,6 +37,7 @@
val tag_theory: T -> T
val tag_proof: T -> T
val tag_ml: T -> T
+ val make: string * string list -> T
val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
val is_keyword: string -> bool
val dest_keywords: unit -> string list
@@ -47,6 +48,7 @@
val status: unit -> unit
val keyword: string -> unit
val command: string -> T -> unit
+ val declare: string -> T option -> unit
val is_diag: string -> bool
val is_control: string -> bool
val is_regular: string -> bool
@@ -115,6 +117,39 @@
val tag_ml = tag "ML";
+(* external names *)
+
+val name_table = Symtab.make
+ [("control", control),
+ ("diag", diag),
+ ("thy_begin", thy_begin),
+ ("thy_switch", thy_switch),
+ ("thy_end", thy_end),
+ ("thy_heading", thy_heading),
+ ("thy_decl", thy_decl),
+ ("thy_script", thy_script),
+ ("thy_goal", thy_goal),
+ ("thy_schematic_goal", thy_schematic_goal),
+ ("qed", qed),
+ ("qed_block", qed_block),
+ ("qed_global", qed_global),
+ ("prf_heading", prf_heading),
+ ("prf_goal", prf_goal),
+ ("prf_block", prf_block),
+ ("prf_open", prf_open),
+ ("prf_close", prf_close),
+ ("prf_chain", prf_chain),
+ ("prf_decl", prf_decl),
+ ("prf_asm", prf_asm),
+ ("prf_asm_goal", prf_asm_goal),
+ ("prf_script", prf_script)];
+
+fun make (kind, tags) =
+ (case Symtab.lookup name_table kind of
+ SOME k => k |> fold tag tags
+ | NONE => error ("Unknown outer syntax keyword kind " ^ quote kind));
+
+
(** global keyword tables **)
@@ -180,6 +215,9 @@
change_commands (Symtab.update (name, kind));
command_status (name, kind));
+fun declare name NONE = keyword name
+ | declare name (SOME kind) = command name kind;
+
(* command categories *)
--- a/src/Pure/Isar/outer_syntax.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Isar/outer_syntax.ML Thu Mar 15 22:21:28 2012 +0100
@@ -123,11 +123,23 @@
val global_outer_syntax = Unsynchronized.ref empty_outer_syntax;
fun add_command name kind cmd = CRITICAL (fn () =>
- (Keyword.command name kind;
- Unsynchronized.change global_outer_syntax (map_commands (fn commands =>
- (if not (Symtab.defined commands name) then ()
- else warning ("Redefining outer syntax command " ^ quote name);
- Symtab.update (name, cmd) commands)))));
+ let
+ val thy = ML_Context.the_global_context ();
+ val _ =
+ (case try (Thy_Header.the_keyword thy) name of
+ SOME k =>
+ if k = SOME kind then ()
+ else error ("Inconsistent outer syntax keyword declaration " ^ quote name)
+ | NONE =>
+ (Keyword.command name kind;
+ if Context.theory_name thy = Context.PureN then ()
+ else error ("Undeclared outer syntax command " ^ quote name)));
+ in
+ Unsynchronized.change global_outer_syntax (map_commands (fn commands =>
+ (if not (Symtab.defined commands name) then ()
+ else warning ("Redefining outer syntax command " ^ quote name);
+ Symtab.update (name, cmd) commands)))
+ end);
in
--- a/src/Pure/Isar/outer_syntax.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Isar/outer_syntax.scala Thu Mar 15 22:21:28 2012 +0100
@@ -34,13 +34,16 @@
result.toString
}
- def init(): Outer_Syntax = new Outer_Syntax()
+ type Decl = (String, Option[(String, List[String])])
+
+ val empty: Outer_Syntax = new Outer_Syntax()
+ def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init())
}
final class Outer_Syntax private(
keywords: Map[String, String] = Map((";" -> Keyword.DIAG)),
lexicon: Scan.Lexicon = Scan.Lexicon.empty,
- val completion: Completion = Completion.init())
+ val completion: Completion = Completion.empty)
{
def keyword_kind(name: String): Option[String] = keywords.get(name)
@@ -51,8 +54,12 @@
if (Keyword.control(kind)) completion else completion + (name, replace))
def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
-
def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
+ def + (decl: Outer_Syntax.Decl): Outer_Syntax =
+ decl match {
+ case ((name, Some((kind, _)))) => this + (name, kind)
+ case ((name, None)) => this + name
+ }
def is_command(name: String): Boolean =
keyword_kind(name) match {
--- a/src/Pure/Isar/parse.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Isar/parse.scala Thu Mar 15 22:21:28 2012 +0100
@@ -53,6 +53,7 @@
atom(Token.Kind.KEYWORD.toString + " " + quote(name),
tok => tok.kind == Token.Kind.KEYWORD && tok.content == name)
+ def string: Parser[String] = atom("string", _.is_string)
def name: Parser[String] = atom("name declaration", _.is_name)
def xname: Parser[String] = atom("name reference", _.is_xname)
def text: Parser[String] = atom("text", _.is_text)
--- a/src/Pure/Isar/token.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Isar/token.scala Thu Mar 15 22:21:28 2012 +0100
@@ -70,6 +70,7 @@
kind == Token.Kind.ALT_STRING ||
kind == Token.Kind.VERBATIM ||
kind == Token.Kind.COMMENT
+ def is_string: Boolean = kind == Token.Kind.STRING
def is_name: Boolean =
kind == Token.Kind.IDENT ||
kind == Token.Kind.SYM_IDENT ||
--- a/src/Pure/ML/ml_antiquote.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/ML/ml_antiquote.ML Thu Mar 15 22:21:28 2012 +0100
@@ -182,5 +182,16 @@
val const = Const (c, Consts.instance (Proof_Context.consts_of ctxt) (c, Ts));
in ML_Syntax.atomic (ML_Syntax.print_term const) end))));
+
+(* outer syntax *)
+
+val _ = Context.>> (Context.map_theory
+ (value (Binding.name "keyword")
+ (Args.theory -- Scan.lift (Parse.position Parse.string) >> (fn (thy, (name, pos)) =>
+ (if is_none (Thy_Header.the_keyword thy name) then
+ ML_Syntax.atomic ("Parse.$$$ " ^ ML_Syntax.print_string name)
+ else error ("Unknown minor keyword " ^ quote name))
+ handle ERROR msg => error (msg ^ Position.str_of pos)))));
+
end;
--- a/src/Pure/PIDE/document.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/PIDE/document.ML Thu Mar 15 22:21:28 2012 +0100
@@ -15,7 +15,7 @@
val new_id: unit -> id
val parse_id: string -> id
val print_id: id -> string
- type node_header = ((string * string) * string list * (string * bool) list) Exn.result
+ type node_header = (string * Thy_Header.header) Exn.result
datatype node_edit =
Clear |
Edits of (command_id option * command_id option) list |
@@ -58,7 +58,7 @@
(** document structure **)
-type node_header = ((string * string) * string list * (string * bool) list) Exn.result;
+type node_header = (string * Thy_Header.header) Exn.result;
type perspective = (command_id -> bool) * command_id option; (*visible commands, last*)
structure Entries = Linear_Set(type key = command_id val ord = int_ord);
@@ -212,17 +212,22 @@
|> touch_node name
| Header header =>
let
- val imports = (case header of Exn.Res (_, imports, _) => imports | _ => []);
+ val imports = (case header of Exn.Res (_, {imports, ...}) => imports | _ => []);
+ val keywords = (case header of Exn.Res (_, {keywords, ...}) => keywords | _ => []);
+ val header' =
+ (List.app (fn (name, decl) =>
+ Keyword.declare name (Option.map Keyword.make decl)) keywords; header)
+ handle exn as ERROR _ => Exn.Exn exn;
val nodes1 = nodes
|> default_node name
|> fold default_node imports;
val nodes2 = nodes1
|> Graph.Keys.fold
(fn dep => Graph.del_edge (dep, name)) (Graph.imm_preds nodes1 name);
- val (header', nodes3) =
- (header, Graph.add_deps_acyclic (name, imports) nodes2)
+ val (header'', nodes3) =
+ (header', Graph.add_deps_acyclic (name, imports) nodes2)
handle Graph.CYCLES cs => (Exn.Exn (ERROR (cat_lines (map cycle_msg cs))), nodes2);
- in Graph.map_node name (set_header header') nodes3 end
+ in Graph.map_node name (set_header header'') nodes3 end
|> touch_node name
| Perspective perspective =>
update_node name (set_perspective perspective) nodes);
@@ -240,7 +245,7 @@
abstype state = State of
{versions: version Inttab.table, (*version_id -> document content*)
- commands: (string * Token.T list future) Inttab.table, (*command_id -> name * span*)
+ commands: (string * Token.T list lazy) Inttab.table, (*command_id -> name * span*)
execution: version_id * Future.group} (*current execution process*)
with
@@ -284,13 +289,9 @@
fun define_command (id: command_id) name text =
map_state (fn (versions, commands, execution) =>
let
- val future =
- (singleton o Future.forks)
- {name = "Document.define_command", group = SOME (Future.new_group NONE),
- deps = [], pri = ~1, interrupts = false}
- (fn () => parse_command (print_id id) text);
+ val span = Lazy.lazy (fn () => parse_command (print_id id) text);
val commands' =
- Inttab.update_new (id, (name, future)) commands
+ Inttab.update_new (id, (name, span)) commands
handle Inttab.DUP dup => err_dup "command" dup;
in (versions, commands', execution) end);
@@ -307,7 +308,6 @@
end;
-
(* toplevel transactions *)
local
@@ -413,7 +413,7 @@
if bad_init andalso is_none init then NONE
else
let
- val (name, span) = the_command state command_id' ||> Future.join;
+ val (name, span) = the_command state command_id' ||> Lazy.force;
val (modify_init, init') =
if Keyword.is_theory_begin name then
(Toplevel.modify_init (the_default illegal_init init), NONE)
@@ -447,16 +447,15 @@
fun init_theory deps node =
let
(* FIXME provide files via Scala layer, not master_dir *)
- val ((master_dir, thy_name), imports, uses) = Exn.release (get_header node);
- val files = map (apfst Path.explode) uses;
+ val (master_dir, header) = Exn.release (get_header node);
val parents =
- imports |> map (fn import =>
+ #imports header |> map (fn import =>
(case Thy_Info.lookup_theory import of (* FIXME more robust!? *)
SOME thy => thy
| NONE =>
get_theory (Position.file_only import)
(snd (Future.join (the (AList.lookup (op =) deps import))))));
- in Thy_Load.begin_theory (Path.explode master_dir) thy_name imports files parents end;
+ in Thy_Load.begin_theory (Path.explode master_dir) header parents end;
in
--- a/src/Pure/PIDE/document.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/PIDE/document.scala Thu Mar 15 22:21:28 2012 +0100
@@ -39,7 +39,10 @@
object Node
{
- sealed case class Deps(imports: List[Name], uses: List[(String, Boolean)])
+ sealed case class Deps(
+ imports: List[Name],
+ keywords: List[Outer_Syntax.Decl],
+ uses: List[(String, Boolean)])
object Name
{
@@ -119,6 +122,12 @@
def update_commands(new_commands: Linear_Set[Command]): Node =
new Node(header, perspective, blobs, new_commands)
+ def imports: List[Node.Name] =
+ header match { case Exn.Res(deps) => deps.imports case _ => Nil }
+
+ def keywords: List[Outer_Syntax.Decl] =
+ header match { case Exn.Res(deps) => deps.keywords case _ => Nil }
+
/* commands */
@@ -181,11 +190,7 @@
def + (entry: (Node.Name, Node)): Nodes =
{
val (name, node) = entry
- val imports =
- node.header match {
- case Exn.Res(deps) => deps.imports
- case _ => Nil
- }
+ val imports = node.imports
val graph1 =
(graph.default_node(name, Node.empty) /: imports)((g, p) => g.default_node(p, Node.empty))
val graph2 = (graph1 /: graph1.imm_preds(name))((g, dep) => g.del_edge(dep, name))
@@ -196,6 +201,7 @@
def entries: Iterator[(Node.Name, Node)] =
graph.entries.map({ case (name, (node, _)) => (name, node) })
+ def descendants(names: List[Node.Name]): List[Node.Name] = graph.all_succs(names)
def topological_order: List[Node.Name] = graph.topological_order
}
@@ -209,12 +215,17 @@
{
val init: Version = new Version()
- def make(nodes: Nodes): Version = new Version(new_id(), nodes)
+ def make(syntax: Outer_Syntax, nodes: Nodes): Version =
+ new Version(new_id(), syntax, nodes)
}
final class Version private(
val id: Version_ID = no_id,
+ val syntax: Outer_Syntax = Outer_Syntax.empty,
val nodes: Nodes = Nodes.empty)
+ {
+ def is_init: Boolean = id == no_id
+ }
/* changes of plain text, eventually resulting in document edits */
@@ -408,6 +419,7 @@
def is_stable(change: Change): Boolean =
change.is_finished && is_assigned(change.version.get_finished)
+ def recent_finished: Change = history.undo_list.find(_.is_finished) getOrElse fail
def recent_stable: Change = history.undo_list.find(is_stable) getOrElse fail
def tip_stable: Boolean = is_stable(history.tip)
def tip_version: Version = history.tip.version.get_finished
--- a/src/Pure/PIDE/protocol.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/PIDE/protocol.ML Thu Mar 15 22:21:28 2012 +0100
@@ -46,9 +46,16 @@
[fn ([], []) => Document.Clear,
fn ([], a) => Document.Edits (list (pair (option int) (option int)) a),
fn ([], a) =>
- Document.Header
- (Exn.Res
- (triple (pair string string) (list string) (list (pair string bool)) a)),
+ let
+ val ((((master, name), imports), keywords), uses) =
+ pair (pair (pair (pair string string) (list string))
+ (list (pair string (option (pair string (list string))))))
+ (list (pair string bool)) a;
+ val res =
+ Exn.capture (fn () =>
+ (master, Thy_Header.make name imports keywords
+ (map (apfst Path.explode) uses))) ();
+ in Document.Header res end,
fn ([a], []) => Document.Header (Exn.Exn (ERROR a)),
fn (a, []) => Document.Perspective (map int_atom a)]))
end;
--- a/src/Pure/PIDE/protocol.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/PIDE/protocol.scala Thu Mar 15 22:21:28 2012 +0100
@@ -225,8 +225,10 @@
// FIXME val uses = deps.uses.map(p => (Isabelle_System.posix_path(p._1), p._2))
val uses = deps.uses
(Nil,
- triple(pair(symbol_string, symbol_string), list(symbol_string),
- list(pair(symbol_string, bool)))((dir, name.theory), imports, uses)) },
+ pair(pair(pair(pair(symbol_string, symbol_string), list(symbol_string)),
+ list(pair(symbol_string, option(pair(symbol_string, list(symbol_string)))))),
+ list(pair(symbol_string, bool)))(
+ (((dir, name.theory), imports), deps.keywords), uses)) },
{ case Document.Node.Header(Exn.Exn(e)) => (List(Symbol.encode(Exn.message(e))), Nil) },
{ case Document.Node.Perspective(a) => (a.commands.map(c => long_atom(c.id)), Nil) }))
def encode: T[List[Document.Edit_Command]] = list((node_edit: Document.Edit_Command) =>
--- a/src/Pure/ProofGeneral/pgip_parser.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/ProofGeneral/pgip_parser.ML Thu Mar 15 22:21:28 2012 +0100
@@ -21,7 +21,7 @@
fun thy_begin text =
(case try (Thy_Header.read Position.none) text of
NONE => D.Opentheory {thyname = NONE, parentnames = [], text = text}
- | SOME (name, imports, _) =>
+ | SOME {name, imports, ...} =>
D.Opentheory {thyname = SOME name, parentnames = imports, text = text})
:: [D.Openblock {metavarid = NONE, name = NONE, objtype = SOME I.ObjTheoryBody}];
--- a/src/Pure/System/session.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/System/session.scala Thu Mar 15 22:21:28 2012 +0100
@@ -86,7 +86,6 @@
//{{{
private case class Text_Edits(
- syntax: Outer_Syntax,
name: Document.Node.Name,
previous: Future[Document.Version],
text_edits: List[Document.Edit_Text],
@@ -99,9 +98,9 @@
receive {
case Stop => finished = true; reply(())
- case Text_Edits(syntax, name, previous, text_edits, version_result) =>
+ case Text_Edits(name, previous, text_edits, version_result) =>
val prev = previous.get_finished
- val (doc_edits, version) = Thy_Syntax.text_edits(syntax, prev, text_edits)
+ val (doc_edits, version) = Thy_Syntax.text_edits(prover_syntax, prev, text_edits)
version_result.fulfill(version)
sender ! Change_Node(name, doc_edits, prev, version)
@@ -118,8 +117,11 @@
@volatile var verbose: Boolean = false
- @volatile private var syntax = Outer_Syntax.init()
- def current_syntax(): Outer_Syntax = syntax
+ @volatile private var prover_syntax =
+ Outer_Syntax.init() +
+ // FIXME avoid hardwired stuff!?
+ ("hence", Keyword.PRF_ASM_GOAL, "then have") +
+ ("thus", Keyword.PRF_ASM_GOAL, "then show")
private val syslog = Volatile(Queue.empty[XML.Elem])
def current_syslog(): String = cat_lines(syslog().iterator.map(msg => XML.content(msg).mkString))
@@ -136,6 +138,13 @@
private val global_state = Volatile(Document.State.init)
def current_state(): Document.State = global_state()
+ def recent_syntax(): Outer_Syntax =
+ {
+ val version = current_state().recent_finished.version.get_finished
+ if (version.is_init) prover_syntax
+ else version.syntax
+ }
+
def snapshot(name: Document.Node.Name = Document.Node.Name.empty,
pending_edits: List[Text.Edit] = Nil): Document.Snapshot =
global_state().snapshot(name, pending_edits)
@@ -279,7 +288,6 @@
header: Document.Node_Header, edits: List[Document.Node.Edit[Text.Edit, Text.Perspective]])
//{{{
{
- val syntax = current_syntax()
val previous = global_state().history.tip.version
prover.get.cancel_execution()
@@ -288,7 +296,7 @@
val version = Future.promise[Document.Version]
val change = global_state >>> (_.continue_history(previous, text_edits, version))
- change_parser ! Text_Edits(syntax, name, previous, text_edits, version)
+ change_parser ! Text_Edits(name, previous, text_edits, version)
}
//}}}
@@ -398,19 +406,16 @@
System.err.println("cancel_scala " + id) // FIXME actually cancel JVM task
case Isabelle_Markup.Ready if output.is_protocol =>
- // FIXME move to ML side (!?)
- syntax += ("hence", Keyword.PRF_ASM_GOAL, "then have")
- syntax += ("thus", Keyword.PRF_ASM_GOAL, "then show")
phase = Session.Ready
case Isabelle_Markup.Loaded_Theory(name) if output.is_protocol =>
thy_load.register_thy(name)
case Isabelle_Markup.Command_Decl(name, kind) if output.is_protocol =>
- syntax += (name, kind)
+ prover_syntax += (name, kind)
case Isabelle_Markup.Keyword_Decl(name) if output.is_protocol =>
- syntax += name
+ prover_syntax += name
case _ =>
if (output.is_exit && phase == Session.Startup) phase = Session.Failed
--- a/src/Pure/Thy/thy_header.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Thy/thy_header.ML Thu Mar 15 22:21:28 2012 +0100
@@ -1,45 +1,106 @@
(* Title: Pure/Thy/thy_header.ML
- Author: Markus Wenzel, TU Muenchen
+ Author: Makarius
-Theory headers -- independent of outer syntax.
+Static theory header information.
*)
signature THY_HEADER =
sig
- val args: Token.T list -> (string * string list * (string * bool) list) * Token.T list
- val read: Position.T -> string -> string * string list * (Path.T * bool) list
+ type header =
+ {name: string, imports: string list,
+ keywords: (string * (string * string list) option) list,
+ uses: (Path.T * bool) list}
+ val make: string -> string list -> (string * (string * string list) option) list ->
+ (Path.T * bool) list -> header
+ val declare_keyword: string * (string * string list) option -> theory -> theory
+ val the_keyword: theory -> string -> Keyword.T option
+ val args: header parser
+ val read: Position.T -> string -> header
end;
structure Thy_Header: THY_HEADER =
struct
-(* keywords *)
+type header =
+ {name: string, imports: string list,
+ keywords: (string * (string * string list) option) list,
+ uses: (Path.T * bool) list};
+
+fun make name imports keywords uses : header =
+ {name = name, imports = imports, keywords = keywords, uses = uses};
+
+
+
+(** keyword declarations **)
+
+fun err_dup name = error ("Inconsistent declaration of outer syntax keyword " ^ quote name);
+
+structure Data = Theory_Data
+(
+ type T = Keyword.T option Symtab.table;
+ val empty = Symtab.empty;
+ val extend = I;
+ fun merge data : T = Symtab.merge (op =) data handle Symtab.DUP name => err_dup name;
+);
+
+fun declare_keyword (name, decl) = Data.map (fn data =>
+ let
+ val kind = Option.map Keyword.make decl;
+ val _ = Keyword.declare name kind;
+ in Symtab.update_new (name, kind) data handle Symtab.DUP name => err_dup name end);
+
+fun the_keyword thy name =
+ (case Symtab.lookup (Data.get thy) name of
+ SOME kind => kind
+ | NONE => error ("Unknown outer syntax keyword " ^ quote name));
+
+
+
+(** concrete syntax **)
+
+(* header keywords *)
val headerN = "header";
val theoryN = "theory";
val importsN = "imports";
+val keywordsN = "keywords";
val usesN = "uses";
val beginN = "begin";
-val header_lexicon = Scan.make_lexicon
- (map Symbol.explode ["%", "(", ")", ";", beginN, headerN, importsN, theoryN, usesN]);
+val header_lexicon =
+ Scan.make_lexicon
+ (map Symbol.explode
+ ["%", "(", ")", "::", ";", "and", beginN, headerN, importsN, keywordsN, theoryN, usesN]);
(* header args *)
-val file_name = Parse.group (fn () => "file name") Parse.name;
+local
+
+val file_name = Parse.group (fn () => "file name") Parse.path;
val theory_name = Parse.group (fn () => "theory name") Parse.name;
+val keyword_kind = Parse.group (fn () => "outer syntax keyword kind") (Parse.name -- Parse.tags);
+val keyword_decl =
+ Scan.repeat1 Parse.string -- Scan.option (Parse.$$$ "::" |-- Parse.!!! keyword_kind) >>
+ (fn (names, kind) => map (rpair kind) names);
+val keyword_decls = Parse.and_list1 keyword_decl >> flat;
+
val file =
Parse.$$$ "(" |-- Parse.!!! (file_name --| Parse.$$$ ")") >> rpair false ||
file_name >> rpair true;
-val uses = Scan.optional (Parse.$$$ usesN |-- Parse.!!! (Scan.repeat1 file)) [];
+in
val args =
- theory_name -- (Parse.$$$ importsN |--
- Parse.!!! (Scan.repeat1 theory_name -- uses --| Parse.$$$ beginN))
- >> Parse.triple2;
+ theory_name --
+ (Parse.$$$ importsN |-- Parse.!!! (Scan.repeat1 theory_name)) --
+ Scan.optional (Parse.$$$ keywordsN |-- Parse.!!! keyword_decls) [] --
+ Scan.optional (Parse.$$$ usesN |-- Parse.!!! (Scan.repeat1 file)) [] --|
+ Parse.$$$ beginN >>
+ (fn (((name, imports), keywords), uses) => make name imports keywords uses);
+
+end;
(* read header *)
@@ -61,7 +122,7 @@
|> Source.get_single;
in
(case res of
- SOME ((name, imports, uses), _) => (name, imports, map (apfst Path.explode) uses)
+ SOME (h, _) => h
| NONE => error ("Unexpected end of input" ^ Position.str_of pos))
end;
--- a/src/Pure/Thy/thy_header.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Thy/thy_header.scala Thu Mar 15 22:21:28 2012 +0100
@@ -1,7 +1,7 @@
/* Title: Pure/Thy/thy_header.scala
Author: Makarius
-Theory headers -- independent of outer syntax.
+Static theory header information.
*/
package isabelle
@@ -20,10 +20,13 @@
val HEADER = "header"
val THEORY = "theory"
val IMPORTS = "imports"
+ val KEYWORDS = "keywords"
+ val AND = "and"
val USES = "uses"
val BEGIN = "begin"
- private val lexicon = Scan.Lexicon("%", "(", ")", ";", BEGIN, HEADER, IMPORTS, THEORY, USES)
+ private val lexicon =
+ Scan.Lexicon("%", "(", ")", "::", ";", AND, BEGIN, HEADER, IMPORTS, KEYWORDS, THEORY, USES)
/* theory file name */
@@ -45,15 +48,26 @@
val file_name = atom("file name", _.is_name)
val theory_name = atom("theory name", _.is_name)
+ val keyword_kind =
+ atom("outer syntax keyword kind", _.is_name) ~ tags ^^ { case x ~ y => (x, y) }
+ val keyword_decl =
+ rep1(string) ~ opt(keyword("::") ~! keyword_kind ^^ { case _ ~ x => x }) ^^
+ { case xs ~ y => xs.map((_, y)) }
+ val keyword_decls =
+ keyword_decl ~ rep(keyword(AND) ~! keyword_decl ^^ { case _ ~ x => x }) ^^
+ { case xs ~ yss => (xs :: yss).flatten }
+
val file =
keyword("(") ~! (file_name ~ keyword(")")) ^^ { case _ ~ (x ~ _) => (x, false) } |
file_name ^^ (x => (x, true))
- val uses = opt(keyword(USES) ~! (rep1(file))) ^^ { case None => Nil case Some(_ ~ xs) => xs }
-
val args =
- theory_name ~ (keyword(IMPORTS) ~! (rep1(theory_name) ~ uses ~ keyword(BEGIN))) ^^
- { case x ~ (_ ~ (ys ~ zs ~ _)) => Thy_Header(x, ys, zs) }
+ theory_name ~
+ (keyword(IMPORTS) ~! (rep1(theory_name)) ^^ { case _ ~ xs => xs }) ~
+ (opt(keyword(KEYWORDS) ~! keyword_decls) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~
+ (opt(keyword(USES) ~! (rep1(file))) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~
+ keyword(BEGIN) ^^
+ { case x ~ ys ~ zs ~ ws ~ _ => Thy_Header(x, ys, zs, ws) }
(keyword(HEADER) ~ tags) ~!
((doc_source ~ rep(keyword(";")) ~ keyword(THEORY) ~ tags) ~> args) ^^ { case _ ~ x => x } |
@@ -98,9 +112,11 @@
sealed case class Thy_Header(
- val name: String, val imports: List[String], val uses: List[(String, Boolean)])
+ name: String, imports: List[String],
+ keywords: List[Outer_Syntax.Decl],
+ uses: List[(String, Boolean)])
{
def map(f: String => String): Thy_Header =
- Thy_Header(f(name), imports.map(f), uses.map(p => (f(p._1), p._2)))
+ Thy_Header(f(name), imports.map(f), keywords, uses.map(p => (f(p._1), p._2)))
}
--- a/src/Pure/Thy/thy_info.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Thy/thy_info.ML Thu Mar 15 22:21:28 2012 +0100
@@ -21,7 +21,7 @@
val use_thys_wrt: Path.T -> string list -> unit
val use_thys: string list -> unit
val use_thy: string -> unit
- val toplevel_begin_theory: Path.T -> string -> string list -> (Path.T * bool) list -> theory
+ val toplevel_begin_theory: Path.T -> Thy_Header.header -> theory
val register_thy: theory -> unit
val finish: unit -> unit
end;
@@ -233,9 +233,10 @@
val {master = (thy_path, _), imports} = deps;
val dir = Path.dir thy_path;
val pos = Path.position thy_path;
- val (_, _, uses) = Thy_Header.read pos text;
+ val {uses, keywords, ...} = Thy_Header.read pos text;
+ val header = Thy_Header.make name imports keywords uses;
- val (theory, present) = Thy_Load.load_thy update_time dir name imports uses pos text parents;
+ val (theory, present) = Thy_Load.load_thy update_time dir header pos text parents;
fun commit () = update_thy deps theory;
in (theory, present, commit) end;
@@ -308,12 +309,13 @@
(* toplevel begin theory -- without maintaining database *)
-fun toplevel_begin_theory dir name imports uses =
+fun toplevel_begin_theory dir (header: Thy_Header.header) =
let
+ val {name, imports, ...} = header;
val _ = kill_thy name;
val _ = use_thys_wrt dir imports;
val parents = map (get_theory o base_name) imports;
- in Thy_Load.begin_theory dir name imports uses parents end;
+ in Thy_Load.begin_theory dir header parents end;
(* register theory *)
--- a/src/Pure/Thy/thy_load.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Thy/thy_load.ML Thu Mar 15 22:21:28 2012 +0100
@@ -19,10 +19,9 @@
val all_current: theory -> bool
val use_ml: Path.T -> unit
val exec_ml: Path.T -> generic_theory -> generic_theory
- val begin_theory: Path.T -> string -> string list -> (Path.T * bool) list ->
- theory list -> theory
- val load_thy: int -> Path.T -> string -> string list -> (Path.T * bool) list ->
- Position.T -> string -> theory list -> theory * unit future
+ val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
+ val load_thy: int -> Path.T -> Thy_Header.header -> Position.T -> string ->
+ theory list -> theory * unit future
val set_master_path: Path.T -> unit
val get_master_path: unit -> Path.T
end;
@@ -85,7 +84,9 @@
val text = File.read master_file;
val (name', imports, uses) =
if name = Context.PureN then (Context.PureN, [], [])
- else Thy_Header.read (Path.position master_file) text;
+ else
+ let val {name, imports, uses, ...} = Thy_Header.read (Path.position master_file) text
+ in (name, imports, uses) end;
val _ = name <> name' andalso
error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name');
in {master = (master_file, SHA1.digest text), text = text, imports = imports, uses = uses} end;
@@ -159,21 +160,23 @@
(* load_thy *)
-fun begin_theory dir name imports uses parents =
+fun begin_theory dir {name, imports, keywords, uses} parents =
Theory.begin_theory name parents
|> put_deps dir imports
+ |> fold Thy_Header.declare_keyword keywords
|> fold (require o fst) uses
|> fold (fn (path, true) => Context.theory_map (exec_ml path) o Theory.checkpoint | _ => I) uses
|> Theory.checkpoint;
-fun load_thy update_time dir name imports uses pos text parents =
+fun load_thy update_time dir header pos text parents =
let
val (lexs, outer_syntax) = Outer_Syntax.get_syntax ();
val time = ! Toplevel.timing;
+ val {name, imports, uses, ...} = header;
val _ = Present.init_theory name;
fun init () =
- begin_theory dir name imports uses parents
+ begin_theory dir header parents
|> Present.begin_theory update_time dir uses;
val toks = Thy_Syntax.parse_tokens lexs pos text;
--- a/src/Pure/Thy/thy_load.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Thy/thy_load.scala Thu Mar 15 22:21:28 2012 +0100
@@ -61,7 +61,7 @@
val uses = header.uses
if (name.theory != name1)
error("Bad file name " + thy_path(Path.basic(name.theory)) + " for theory " + quote(name1))
- Document.Node.Deps(imports, uses)
+ Document.Node.Deps(imports, header.keywords, uses)
}
def check_thy(name: Document.Node.Name): Document.Node.Deps =
--- a/src/Pure/Thy/thy_syntax.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Pure/Thy/thy_syntax.scala Thu Mar 15 22:21:28 2012 +0100
@@ -136,136 +136,188 @@
{
val nodes = previous.nodes
val (perspective, new_nodes) = update_perspective(nodes, name, text_perspective)
- val version = Document.Version.make(new_nodes getOrElse nodes)
+ val version = Document.Version.make(previous.syntax, new_nodes getOrElse nodes)
(perspective, version)
}
+ /** header edits: structure and outer syntax **/
+
+ private def header_edits(
+ base_syntax: Outer_Syntax,
+ previous: Document.Version,
+ edits: List[Document.Edit_Text])
+ : (Outer_Syntax, List[Document.Node.Name], Document.Nodes, List[Document.Edit_Command]) =
+ {
+ var rebuild_syntax = previous.is_init
+ var nodes = previous.nodes
+ val doc_edits = new mutable.ListBuffer[Document.Edit_Command]
+
+ edits foreach {
+ case (name, Document.Node.Header(header)) =>
+ val node = nodes(name)
+ val update_header =
+ (node.header, header) match {
+ case (Exn.Res(deps0), Exn.Res(deps)) => deps0 != deps
+ case _ => true
+ }
+ if (update_header) {
+ val node1 = node.update_header(header)
+ rebuild_syntax = rebuild_syntax || (node.keywords != node1.keywords)
+ nodes += (name -> node1)
+ doc_edits += (name -> Document.Node.Header(header))
+ }
+ case _ =>
+ }
+
+ val syntax =
+ if (rebuild_syntax)
+ (base_syntax /: nodes.entries)({ case (syn, (_, node)) => (syn /: node.keywords)(_ + _) })
+ else previous.syntax
+
+ val reparse =
+ if (rebuild_syntax) nodes.descendants(doc_edits.iterator.map(_._1).toList)
+ else Nil
+
+ (syntax, reparse, nodes, doc_edits.toList)
+ }
+
+
+
/** text edits **/
+ /* phase 1: edit individual command source */
+
+ @tailrec private def edit_text(eds: List[Text.Edit], commands: Linear_Set[Command])
+ : Linear_Set[Command] =
+ {
+ eds match {
+ case e :: es =>
+ Document.Node.command_starts(commands.iterator).find {
+ case (cmd, cmd_start) =>
+ e.can_edit(cmd.source, cmd_start) ||
+ e.is_insert && e.start == cmd_start + cmd.length
+ } match {
+ case Some((cmd, cmd_start)) if e.can_edit(cmd.source, cmd_start) =>
+ val (rest, text) = e.edit(cmd.source, cmd_start)
+ val new_commands = commands.insert_after(Some(cmd), Command.unparsed(text)) - cmd
+ edit_text(rest.toList ::: es, new_commands)
+
+ case Some((cmd, cmd_start)) =>
+ edit_text(es, commands.insert_after(Some(cmd), Command.unparsed(e.text)))
+
+ case None =>
+ require(e.is_insert && e.start == 0)
+ edit_text(es, commands.insert_after(None, Command.unparsed(e.text)))
+ }
+ case Nil => commands
+ }
+ }
+
+
+ /* phase 2: recover command spans */
+
+ @tailrec private def recover_spans(
+ syntax: Outer_Syntax,
+ node_name: Document.Node.Name,
+ commands: Linear_Set[Command]): Linear_Set[Command] =
+ {
+ commands.iterator.find(cmd => !cmd.is_defined) match {
+ case Some(first_unparsed) =>
+ val first =
+ commands.reverse_iterator(first_unparsed).
+ dropWhile(_.newlines == 0).find(_.is_command) getOrElse commands.head
+ val last =
+ commands.iterator(first_unparsed).
+ dropWhile(_.newlines == 0).find(_.is_command) getOrElse commands.last
+ val range =
+ commands.iterator(first).takeWhile(_ != last).toList ::: List(last)
+
+ val spans0 = parse_spans(syntax.scan(range.map(_.source).mkString))
+
+ val (before_edit, spans1) =
+ if (!spans0.isEmpty && first.is_command && first.span == spans0.head)
+ (Some(first), spans0.tail)
+ else (commands.prev(first), spans0)
+
+ val (after_edit, spans2) =
+ if (!spans1.isEmpty && last.is_command && last.span == spans1.last)
+ (Some(last), spans1.take(spans1.length - 1))
+ else (commands.next(last), spans1)
+
+ val inserted = spans2.map(span => Command(Document.new_id(), node_name, span))
+ val new_commands =
+ commands.delete_between(before_edit, after_edit).append_after(before_edit, inserted)
+ recover_spans(syntax, node_name, new_commands)
+
+ case None => commands
+ }
+ }
+
+
+ /* phase 3: full reparsing after syntax change */
+
+ private def reparse_spans(
+ syntax: Outer_Syntax,
+ node_name: Document.Node.Name,
+ commands: Linear_Set[Command]): Linear_Set[Command] =
+ {
+ val cmds = commands.toList
+ val spans1 = parse_spans(syntax.scan(cmds.map(_.source).mkString))
+ if (cmds.map(_.span) == spans1) commands
+ else Linear_Set(spans1.map(span => Command(Document.new_id(), node_name, span)): _*)
+ }
+
+
+ /* main phase */
+
def text_edits(
- syntax: Outer_Syntax,
+ base_syntax: Outer_Syntax,
previous: Document.Version,
edits: List[Document.Edit_Text])
: (List[Document.Edit_Command], Document.Version) =
{
- /* phase 1: edit individual command source */
+ val (syntax, reparse, nodes0, doc_edits0) = header_edits(base_syntax, previous, edits)
+ val reparse_set = reparse.toSet
- @tailrec def edit_text(eds: List[Text.Edit], commands: Linear_Set[Command])
- : Linear_Set[Command] =
- {
- eds match {
- case e :: es =>
- Document.Node.command_starts(commands.iterator).find {
- case (cmd, cmd_start) =>
- e.can_edit(cmd.source, cmd_start) ||
- e.is_insert && e.start == cmd_start + cmd.length
- } match {
- case Some((cmd, cmd_start)) if e.can_edit(cmd.source, cmd_start) =>
- val (rest, text) = e.edit(cmd.source, cmd_start)
- val new_commands = commands.insert_after(Some(cmd), Command.unparsed(text)) - cmd
- edit_text(rest.toList ::: es, new_commands)
-
- case Some((cmd, cmd_start)) =>
- edit_text(es, commands.insert_after(Some(cmd), Command.unparsed(e.text)))
-
- case None =>
- require(e.is_insert && e.start == 0)
- edit_text(es, commands.insert_after(None, Command.unparsed(e.text)))
- }
- case Nil => commands
- }
- }
-
+ var nodes = nodes0
+ val doc_edits = new mutable.ListBuffer[Document.Edit_Command]; doc_edits ++= doc_edits0
- /* phase 2: recover command spans */
+ (edits ::: reparse.map((_, Document.Node.Edits(Nil)))) foreach {
+ case (name, Document.Node.Clear()) =>
+ doc_edits += (name -> Document.Node.Clear())
+ nodes += (name -> nodes(name).clear)
- @tailrec def recover_spans(node_name: Document.Node.Name, commands: Linear_Set[Command])
- : Linear_Set[Command] =
- {
- commands.iterator.find(cmd => !cmd.is_defined) match {
- case Some(first_unparsed) =>
- val first =
- commands.reverse_iterator(first_unparsed).
- dropWhile(_.newlines == 0).find(_.is_command) getOrElse commands.head
- val last =
- commands.iterator(first_unparsed).
- dropWhile(_.newlines == 0).find(_.is_command) getOrElse commands.last
- val range =
- commands.iterator(first).takeWhile(_ != last).toList ::: List(last)
-
- val sources = range.flatMap(_.span.map(_.source))
- val spans0 = parse_spans(syntax.scan(sources.mkString))
-
- val (before_edit, spans1) =
- if (!spans0.isEmpty && first.is_command && first.span == spans0.head)
- (Some(first), spans0.tail)
- else (commands.prev(first), spans0)
-
- val (after_edit, spans2) =
- if (!spans1.isEmpty && last.is_command && last.span == spans1.last)
- (Some(last), spans1.take(spans1.length - 1))
- else (commands.next(last), spans1)
+ case (name, Document.Node.Edits(text_edits)) =>
+ val node = nodes(name)
+ val commands0 = node.commands
+ val commands1 = edit_text(text_edits, commands0)
+ val commands2 = recover_spans(syntax, name, commands1) // FIXME somewhat slow
+ val commands3 =
+ if (reparse_set.contains(name)) reparse_spans(syntax, name, commands2) // slow
+ else commands2
- val inserted = spans2.map(span => Command(Document.new_id(), node_name, span))
- val new_commands =
- commands.delete_between(before_edit, after_edit).append_after(before_edit, inserted)
- recover_spans(node_name, new_commands)
-
- case None => commands
- }
- }
-
-
- /* resulting document edits */
+ val removed_commands = commands0.iterator.filter(!commands3.contains(_)).toList
+ val inserted_commands = commands3.iterator.filter(!commands0.contains(_)).toList
- {
- val doc_edits = new mutable.ListBuffer[Document.Edit_Command]
- var nodes = previous.nodes
-
- edits foreach {
- case (name, Document.Node.Clear()) =>
- doc_edits += (name -> Document.Node.Clear())
- nodes += (name -> nodes(name).clear)
+ val cmd_edits =
+ removed_commands.reverse.map(cmd => (commands0.prev(cmd), None)) :::
+ inserted_commands.map(cmd => (commands3.prev(cmd), Some(cmd)))
- case (name, Document.Node.Edits(text_edits)) =>
- val node = nodes(name)
- val commands0 = node.commands
- val commands1 = edit_text(text_edits, commands0)
- val commands2 = recover_spans(name, commands1) // FIXME somewhat slow
-
- val removed_commands = commands0.iterator.filter(!commands2.contains(_)).toList
- val inserted_commands = commands2.iterator.filter(!commands0.contains(_)).toList
+ doc_edits += (name -> Document.Node.Edits(cmd_edits))
+ nodes += (name -> node.update_commands(commands3))
- val cmd_edits =
- removed_commands.reverse.map(cmd => (commands0.prev(cmd), None)) :::
- inserted_commands.map(cmd => (commands2.prev(cmd), Some(cmd)))
-
- doc_edits += (name -> Document.Node.Edits(cmd_edits))
- nodes += (name -> node.update_commands(commands2))
+ case (name, Document.Node.Header(_)) =>
- case (name, Document.Node.Header(header)) =>
- val node = nodes(name)
- val update_header =
- (node.header, header) match {
- case (Exn.Res(deps0), Exn.Res(deps)) => deps0 != deps
- case _ => true
- }
- if (update_header) {
- doc_edits += (name -> Document.Node.Header(header))
- nodes += (name -> node.update_header(header))
- }
-
- case (name, Document.Node.Perspective(text_perspective)) =>
- update_perspective(nodes, name, text_perspective) match {
- case (_, None) =>
- case (perspective, Some(nodes1)) =>
- doc_edits += (name -> Document.Node.Perspective(perspective))
- nodes = nodes1
- }
- }
- (doc_edits.toList, Document.Version.make(nodes))
+ case (name, Document.Node.Perspective(text_perspective)) =>
+ update_perspective(nodes, name, text_perspective) match {
+ case (_, None) =>
+ case (perspective, Some(nodes1)) =>
+ doc_edits += (name -> Document.Node.Perspective(perspective))
+ nodes = nodes1
+ }
}
+ (doc_edits.toList, Document.Version.make(syntax, nodes))
}
}
--- a/src/Tools/Code/code_printer.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/Code/code_printer.ML Thu Mar 15 22:21:28 2012 +0100
@@ -394,19 +394,15 @@
| _ => Scan.!! (err s) Scan.fail ()
end;
-val (infixK, infixlK, infixrK) = ("infix", "infixl", "infixr");
-
fun parse_syntax mk_plain mk_complex prep_arg =
Scan.option (
- ((Parse.$$$ infixK >> K X)
- || (Parse.$$$ infixlK >> K L)
- || (Parse.$$$ infixrK >> K R))
+ ((@{keyword "infix"} >> K X)
+ || (@{keyword "infixl"} >> K L)
+ || (@{keyword "infixr"} >> K R))
-- Parse.nat -- Parse.string
>> (fn ((x, i), s) => mk_complex (parse_infix prep_arg (x, i) s))
|| Parse.string >> (fn s => (parse_mixfix mk_plain mk_complex prep_arg s)));
-val _ = List.app Keyword.keyword [infixK, infixlK, infixrK];
-
fun parse_tyco_syntax x = parse_syntax (fn s => (0, (K o K o K o str) s)) I I x;
val parse_const_syntax = parse_syntax plain_const_syntax simple_const_syntax fst;
--- a/src/Tools/Code/code_runtime.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/Code/code_runtime.ML Thu Mar 15 22:21:28 2012 +0100
@@ -344,26 +344,19 @@
local
-val datatypesK = "datatypes";
-val functionsK = "functions";
-val fileK = "file";
-val andK = "and"
-
-val _ = List.app Keyword.keyword [datatypesK, functionsK];
-
val parse_datatype =
- Parse.name --| Parse.$$$ "=" --
+ Parse.name --| @{keyword "="} --
(((Parse.sym_ident || Parse.string) >> (fn "_" => NONE | _ => Scan.fail ()))
- || ((Parse.term ::: (Scan.repeat (Parse.$$$ "|" |-- Parse.term))) >> SOME));
+ || ((Parse.term ::: (Scan.repeat (@{keyword "|"} |-- Parse.term))) >> SOME));
in
val _ =
Outer_Syntax.command "code_reflect" "enrich runtime environment with generated code"
- Keyword.thy_decl (Parse.name -- Scan.optional (Parse.$$$ datatypesK |-- (parse_datatype
- ::: Scan.repeat (Parse.$$$ andK |-- parse_datatype))) []
- -- Scan.optional (Parse.$$$ functionsK |-- Scan.repeat1 Parse.name) []
- -- Scan.option (Parse.$$$ fileK |-- Parse.name)
+ Keyword.thy_decl (Parse.name -- Scan.optional (@{keyword "datatypes"} |-- (parse_datatype
+ ::: Scan.repeat (@{keyword "and"} |-- parse_datatype))) []
+ -- Scan.optional (@{keyword "functions"} |-- Scan.repeat1 Parse.name) []
+ -- Scan.option (@{keyword "file"} |-- Parse.name)
>> (fn (((module_name, raw_datatypes), raw_functions), some_file) => Toplevel.theory
(code_reflect_cmd raw_datatypes raw_functions module_name some_file)));
--- a/src/Tools/Code/code_target.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/Code/code_target.ML Thu Mar 15 22:21:28 2012 +0100
@@ -638,8 +638,8 @@
fun process_multi_syntax parse_thing parse_syntax change =
(Parse.and_list1 parse_thing
- :|-- (fn things => Scan.repeat1 (Parse.$$$ "(" |-- Parse.name --
- (zip_list things parse_syntax (Parse.$$$ "and")) --| Parse.$$$ ")")))
+ :|-- (fn things => Scan.repeat1 (@{keyword "("} |-- Parse.name --
+ (zip_list things parse_syntax (@{keyword "and"})) --| @{keyword ")"})))
>> (Toplevel.theory oo fold)
(fn (target, syns) => fold (fn (raw_x, syn) => change target raw_x syn) syns);
@@ -667,22 +667,18 @@
(** Isar setup **)
-val (inK, module_nameK, fileK, checkingK) = ("in", "module_name", "file", "checking");
-
-val code_expr_argsP = Scan.optional (Parse.$$$ "(" |-- Args.parse --| Parse.$$$ ")") [];
+val code_expr_argsP = Scan.optional (@{keyword "("} |-- Args.parse --| @{keyword ")"}) [];
val code_exprP =
Scan.repeat1 Parse.term_group :|-- (fn raw_cs =>
- ((Parse.$$$ checkingK |-- Scan.repeat (Parse.name
- -- ((Parse.$$$ "?" |-- Scan.succeed false) || Scan.succeed true) -- code_expr_argsP))
+ ((@{keyword "checking"} |-- Scan.repeat (Parse.name
+ -- ((@{keyword "?"} |-- Scan.succeed false) || Scan.succeed true) -- code_expr_argsP))
>> (fn seris => check_code_cmd raw_cs seris)
- || Scan.repeat (Parse.$$$ inK |-- Parse.name
- -- Scan.optional (Parse.$$$ module_nameK |-- Parse.name) ""
- -- Scan.optional (Parse.$$$ fileK |-- Parse.name) ""
+ || Scan.repeat (@{keyword "in"} |-- Parse.name
+ -- Scan.optional (@{keyword "module_name"} |-- Parse.name) ""
+ -- Scan.optional (@{keyword "file"} |-- Parse.name) ""
-- code_expr_argsP) >> (fn seris => export_code_cmd raw_cs seris)));
-val _ = List.app Keyword.keyword [inK, module_nameK, fileK, checkingK];
-
val _ =
Outer_Syntax.command "code_class" "define code syntax for class" Keyword.thy_decl (
process_multi_syntax Parse.xname (Scan.option Parse.string)
@@ -690,7 +686,7 @@
val _ =
Outer_Syntax.command "code_instance" "define code syntax for instance" Keyword.thy_decl (
- process_multi_syntax (Parse.xname --| Parse.$$$ "::" -- Parse.xname)
+ process_multi_syntax (Parse.xname --| @{keyword "::"} -- Parse.xname)
(Scan.option (Parse.minus >> K ()))
add_instance_syntax_cmd);
@@ -715,7 +711,7 @@
Outer_Syntax.command "code_include" "declare piece of code to be included in generated code"
Keyword.thy_decl (
Parse.name -- Parse.name -- (Parse.text :|-- (fn "-" => Scan.succeed NONE
- | s => Scan.optional (Parse.$$$ "attach" |-- Scan.repeat1 Parse.term) [] >> pair s >> SOME))
+ | s => Scan.optional (@{keyword "attach"} |-- Scan.repeat1 Parse.term) [] >> pair s >> SOME))
>> (fn ((target, name), content_consts) =>
(Toplevel.theory o add_include_cmd target) (name, content_consts))
);
--- a/src/Tools/Code_Generator.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/Code_Generator.thy Thu Mar 15 22:21:28 2012 +0100
@@ -6,6 +6,13 @@
theory Code_Generator
imports Pure
+keywords
+ "try" "solve_direct" "quickcheck" "value" "print_codeproc"
+ "code_thms" "code_deps" "export_code" :: diag and
+ "quickcheck_params" "code_class" "code_instance" "code_type"
+ "code_const" "code_reserved" "code_include" "code_modulename"
+ "code_abort" "code_monad" "code_reflect" :: thy_decl and
+ "datatypes" "functions" "module_name" "file" "checking"
uses
"~~/src/Tools/misc_legacy.ML"
"~~/src/Tools/cache_io.ML"
--- a/src/Tools/interpretation_with_defs.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/interpretation_with_defs.ML Thu Mar 15 22:21:28 2012 +0100
@@ -79,15 +79,12 @@
end;
-val definesK = "defines";
-val _ = Keyword.keyword definesK;
-
val _ =
Outer_Syntax.command "interpretation"
"prove interpretation of locale expression in theory" Keyword.thy_goal
(Parse.!!! (Parse_Spec.locale_expression true) --
- Scan.optional (Parse.$$$ definesK |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":"
- -- ((Parse.binding -- Parse.opt_mixfix') --| Parse.$$$ "is" -- Parse.term))) [] --
+ Scan.optional (@{keyword "defines"} |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":"
+ -- ((Parse.binding -- Parse.opt_mixfix') --| @{keyword "is"} -- Parse.term))) [] --
Scan.optional (Parse.where_ |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":" -- Parse.prop)) []
>> (fn ((expr, defs), equations) => Toplevel.print o
Toplevel.theory_to_proof (interpretation_cmd expr defs equations)));
--- a/src/Tools/jEdit/src/isabelle_sidekick.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/jEdit/src/isabelle_sidekick.scala Thu Mar 15 22:21:28 2012 +0100
@@ -92,7 +92,7 @@
val start = buffer.getLineStartOffset(line)
val text = buffer.getSegment(start, caret - start)
- val completion = model.session.current_syntax().completion
+ val completion = model.session.recent_syntax().completion
completion.complete(text) match {
case None => null
case Some((word, cs)) =>
@@ -116,7 +116,7 @@
def parser(data: SideKickParsedData, model: Document_Model)
{
- val syntax = model.session.current_syntax()
+ val syntax = model.session.recent_syntax()
def make_tree(offset: Text.Offset, entry: Structure.Entry): List[DefaultMutableTreeNode] =
entry match {
--- a/src/Tools/jEdit/src/token_markup.scala Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/jEdit/src/token_markup.scala Thu Mar 15 22:21:28 2012 +0100
@@ -178,7 +178,7 @@
{
val (styled_tokens, context1) =
if (line_ctxt.isDefined && Isabelle.session.is_ready) {
- val syntax = Isabelle.session.current_syntax()
+ val syntax = Isabelle.session.recent_syntax()
val (tokens, ctxt1) = syntax.scan_context(line, line_ctxt.get)
val styled_tokens =
tokens.map(tok => (Isabelle_Rendering.token_markup(syntax, tok), tok))
--- a/src/Tools/quickcheck.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/quickcheck.ML Thu Mar 15 22:21:28 2012 +0100
@@ -522,12 +522,12 @@
val parse_arg =
Parse.name --
- (Scan.optional (Parse.$$$ "=" |--
+ (Scan.optional (@{keyword "="} |--
(((Parse.name || Parse.float_number) >> single) ||
- (Parse.$$$ "[" |-- Parse.list1 Parse.name --| Parse.$$$ "]"))) ["true"]);
+ (@{keyword "["} |-- Parse.list1 Parse.name --| @{keyword "]"}))) ["true"]);
val parse_args =
- Parse.$$$ "[" |-- Parse.list1 parse_arg --| Parse.$$$ "]" || Scan.succeed [];
+ @{keyword "["} |-- Parse.list1 parse_arg --| @{keyword "]"} || Scan.succeed [];
val _ =
Outer_Syntax.command quickcheck_paramsN "set parameters for random testing" Keyword.thy_decl
--- a/src/Tools/value.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/Tools/value.ML Thu Mar 15 22:21:28 2012 +0100
@@ -57,10 +57,10 @@
in Pretty.writeln p end;
val opt_modes =
- Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.xname --| Parse.$$$ ")")) [];
+ Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.xname --| @{keyword ")"})) [];
val opt_evaluator =
- Scan.option (Parse.$$$ "[" |-- Parse.xname --| Parse.$$$ "]")
+ Scan.option (@{keyword "["} |-- Parse.xname --| @{keyword "]"})
val _ =
Outer_Syntax.improper_command "value" "evaluate and print term" Keyword.diag
--- a/src/ZF/Datatype_ZF.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/ZF/Datatype_ZF.thy Thu Mar 15 22:21:28 2012 +0100
@@ -7,6 +7,7 @@
theory Datatype_ZF
imports Inductive_ZF Univ QUniv
+keywords "datatype" "codatatype" :: thy_decl
uses "Tools/datatype_package.ML"
begin
--- a/src/ZF/Inductive_ZF.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/ZF/Inductive_ZF.thy Thu Mar 15 22:21:28 2012 +0100
@@ -13,6 +13,11 @@
theory Inductive_ZF
imports Fixedpt QPair Nat_ZF
+keywords
+ "inductive" "coinductive" "rep_datatype" "primrec" :: thy_decl and
+ "inductive_cases" :: thy_script and
+ "domains" "intros" "monos" "con_defs" "type_intros" "type_elims"
+ "elimination" "induction" "case_eqns" "recursor_eqns"
uses
("ind_syntax.ML")
("Tools/cartprod.ML")
--- a/src/ZF/Tools/datatype_package.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/ZF/Tools/datatype_package.ML Thu Mar 15 22:21:28 2012 +0100
@@ -427,15 +427,15 @@
#1 o add_datatype (dom, map fst dts) (map snd dts) (monos, type_intrs, type_elims);
val con_decl =
- Parse.name -- Scan.optional (Parse.$$$ "(" |-- Parse.list1 Parse.term --| Parse.$$$ ")") [] --
+ Parse.name -- Scan.optional (@{keyword "("} |-- Parse.list1 Parse.term --| @{keyword ")"}) [] --
Parse.opt_mixfix >> Parse.triple1;
val datatype_decl =
- (Scan.optional ((Parse.$$$ "\<subseteq>" || Parse.$$$ "<=") |-- Parse.!!! Parse.term) "") --
- Parse.and_list1 (Parse.term -- (Parse.$$$ "=" |-- Parse.enum1 "|" con_decl)) --
- Scan.optional (Parse.$$$ "monos" |-- Parse.!!! Parse_Spec.xthms1) [] --
- Scan.optional (Parse.$$$ "type_intros" |-- Parse.!!! Parse_Spec.xthms1) [] --
- Scan.optional (Parse.$$$ "type_elims" |-- Parse.!!! Parse_Spec.xthms1) []
+ (Scan.optional ((@{keyword "\<subseteq>"} || @{keyword "<="}) |-- Parse.!!! Parse.term) "") --
+ Parse.and_list1 (Parse.term -- (@{keyword "="} |-- Parse.enum1 "|" con_decl)) --
+ Scan.optional (@{keyword "monos"} |-- Parse.!!! Parse_Spec.xthms1) [] --
+ Scan.optional (@{keyword "type_intros"} |-- Parse.!!! Parse_Spec.xthms1) [] --
+ Scan.optional (@{keyword "type_elims"} |-- Parse.!!! Parse_Spec.xthms1) []
>> (Toplevel.theory o mk_datatype);
val coind_prefix = if coind then "co" else "";
--- a/src/ZF/Tools/induct_tacs.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/ZF/Tools/induct_tacs.ML Thu Mar 15 22:21:28 2012 +0100
@@ -188,13 +188,11 @@
(* outer syntax *)
-val _ = List.app Keyword.keyword ["elimination", "induction", "case_eqns", "recursor_eqns"];
-
val rep_datatype_decl =
- (Parse.$$$ "elimination" |-- Parse.!!! Parse_Spec.xthm) --
- (Parse.$$$ "induction" |-- Parse.!!! Parse_Spec.xthm) --
- (Parse.$$$ "case_eqns" |-- Parse.!!! Parse_Spec.xthms1) --
- Scan.optional (Parse.$$$ "recursor_eqns" |-- Parse.!!! Parse_Spec.xthms1) []
+ (@{keyword "elimination"} |-- Parse.!!! Parse_Spec.xthm) --
+ (@{keyword "induction"} |-- Parse.!!! Parse_Spec.xthm) --
+ (@{keyword "case_eqns"} |-- Parse.!!! Parse_Spec.xthms1) --
+ Scan.optional (@{keyword "recursor_eqns"} |-- Parse.!!! Parse_Spec.xthms1) []
>> (fn (((x, y), z), w) => rep_datatype x y z w);
val _ =
--- a/src/ZF/Tools/inductive_package.ML Thu Mar 15 17:38:05 2012 +0000
+++ b/src/ZF/Tools/inductive_package.ML Thu Mar 15 22:21:28 2012 +0100
@@ -576,21 +576,18 @@
(* outer syntax *)
-val _ = List.app Keyword.keyword
- ["domains", "intros", "monos", "con_defs", "type_intros", "type_elims"];
-
fun mk_ind (((((doms, intrs), monos), con_defs), type_intrs), type_elims) =
#1 o add_inductive doms (map Parse.triple_swap intrs) (monos, con_defs, type_intrs, type_elims);
val ind_decl =
- (Parse.$$$ "domains" |-- Parse.!!! (Parse.enum1 "+" Parse.term --
- ((Parse.$$$ "\<subseteq>" || Parse.$$$ "<=") |-- Parse.term))) --
- (Parse.$$$ "intros" |--
+ (@{keyword "domains"} |-- Parse.!!! (Parse.enum1 "+" Parse.term --
+ ((@{keyword "\<subseteq>"} || @{keyword "<="}) |-- Parse.term))) --
+ (@{keyword "intros"} |--
Parse.!!! (Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- Parse.prop))) --
- Scan.optional (Parse.$$$ "monos" |-- Parse.!!! Parse_Spec.xthms1) [] --
- Scan.optional (Parse.$$$ "con_defs" |-- Parse.!!! Parse_Spec.xthms1) [] --
- Scan.optional (Parse.$$$ "type_intros" |-- Parse.!!! Parse_Spec.xthms1) [] --
- Scan.optional (Parse.$$$ "type_elims" |-- Parse.!!! Parse_Spec.xthms1) []
+ Scan.optional (@{keyword "monos"} |-- Parse.!!! Parse_Spec.xthms1) [] --
+ Scan.optional (@{keyword "con_defs"} |-- Parse.!!! Parse_Spec.xthms1) [] --
+ Scan.optional (@{keyword "type_intros"} |-- Parse.!!! Parse_Spec.xthms1) [] --
+ Scan.optional (@{keyword "type_elims"} |-- Parse.!!! Parse_Spec.xthms1) []
>> (Toplevel.theory o mk_ind);
val _ =
--- a/src/ZF/upair.thy Thu Mar 15 17:38:05 2012 +0000
+++ b/src/ZF/upair.thy Thu Mar 15 22:21:28 2012 +0100
@@ -11,8 +11,11 @@
header{*Unordered Pairs*}
-theory upair imports ZF
-uses "Tools/typechk.ML" begin
+theory upair
+imports ZF
+keywords "print_tcset" :: diag
+uses "Tools/typechk.ML"
+begin
setup TypeCheck.setup