--- a/NEWS Tue Aug 21 09:02:29 2012 +0200
+++ b/NEWS Wed Aug 22 23:23:48 2012 +0200
@@ -6,6 +6,9 @@
*** General ***
+* Command 'ML_file' evaluates ML text from a file directly within the
+theory, without any predeclaration via 'uses' in the theory header.
+
* Discontinued obsolete method fastsimp / tactic fast_simp_tac, which
is called fastforce / fast_force_tac already since Isabelle2011-1.
--- a/doc-src/Classes/Thy/Setup.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/Classes/Thy/Setup.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,10 +1,10 @@
theory Setup
imports Main "~~/src/HOL/Library/Code_Integer"
-uses
- "../../antiquote_setup.ML"
- "../../more_antiquote.ML"
begin
+ML_file "../../antiquote_setup.ML"
+ML_file "../../more_antiquote.ML"
+
setup {*
Antiquote_Setup.setup #>
More_Antiquote.setup #>
--- a/doc-src/Codegen/Thy/Setup.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/Codegen/Thy/Setup.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,11 +4,11 @@
"~~/src/HOL/Library/Dlist"
"~~/src/HOL/Library/RBT"
"~~/src/HOL/Library/Mapping"
-uses
- "../../antiquote_setup.ML"
- "../../more_antiquote.ML"
begin
+ML_file "../../antiquote_setup.ML"
+ML_file "../../more_antiquote.ML"
+
setup {*
Antiquote_Setup.setup #>
More_Antiquote.setup #>
--- a/doc-src/IsarImplementation/Thy/Base.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/IsarImplementation/Thy/Base.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,8 +1,8 @@
theory Base
imports Main
-uses "../../antiquote_setup.ML"
begin
+ML_file "../../antiquote_setup.ML"
setup {* Antiquote_Setup.setup *}
end
--- a/doc-src/IsarRef/Thy/Base.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/IsarRef/Thy/Base.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,8 +1,9 @@
theory Base
imports Pure
-uses "../../antiquote_setup.ML"
begin
+ML_file "../../antiquote_setup.ML"
+
setup {*
Antiquote_Setup.setup #>
member (op =) (Session.id ()) "ZF" ? Pure_Thy.old_appl_syntax_setup
--- a/doc-src/IsarRef/Thy/Spec.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/IsarRef/Thy/Spec.thy Wed Aug 22 23:23:48 2012 +0200
@@ -960,7 +960,7 @@
text {*
\begin{matharray}{rcl}
- @{command_def "use"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+ @{command_def "ML_file"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
@{command_def "ML"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
@{command_def "ML_prf"} & : & @{text "proof \<rightarrow> proof"} \\
@{command_def "ML_val"} & : & @{text "any \<rightarrow>"} \\
@@ -971,7 +971,7 @@
\end{matharray}
@{rail "
- @@{command use} @{syntax name}
+ @@{command ML_file} @{syntax name}
;
(@@{command ML} | @@{command ML_prf} | @@{command ML_val} |
@@{command ML_command} | @@{command setup} | @@{command local_setup}) @{syntax text}
@@ -981,28 +981,22 @@
\begin{description}
- \item @{command "use"}~@{text "file"} reads and executes ML
- commands from @{text "file"}. The current theory context is passed
- down to the ML toplevel and may be modified, using @{ML
- "Context.>>"} or derived ML commands. The file name is checked with
- the @{keyword_ref "uses"} dependency declaration given in the theory
- header (see also \secref{sec:begin-thy}).
-
- Top-level ML bindings are stored within the (global or local) theory
- context.
+ \item @{command "ML_file"}~@{text "name"} reads and evaluates the
+ given ML file. The current theory context is passed down to the ML
+ toplevel and may be modified, using @{ML "Context.>>"} or derived ML
+ commands. Top-level ML bindings are stored within the (global or
+ local) theory context.
- \item @{command "ML"}~@{text "text"} is similar to @{command "use"},
- but executes ML commands directly from the given @{text "text"}.
+ \item @{command "ML"}~@{text "text"} is similar to @{command
+ "ML_file"}, but evaluates directly the given @{text "text"}.
Top-level ML bindings are stored within the (global or local) theory
context.
\item @{command "ML_prf"} is analogous to @{command "ML"} but works
- within a proof context.
-
- Top-level ML bindings are stored within the proof context in a
- purely sequential fashion, disregarding the nested proof structure.
- ML bindings introduced by @{command "ML_prf"} are discarded at the
- end of the proof.
+ within a proof context. Top-level ML bindings are stored within the
+ proof context in a purely sequential fashion, disregarding the
+ nested proof structure. ML bindings introduced by @{command
+ "ML_prf"} are discarded at the end of the proof.
\item @{command "ML_val"} and @{command "ML_command"} are diagnostic
versions of @{command "ML"}, which means that the context may not be
--- a/doc-src/IsarRef/Thy/document/Spec.tex Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/IsarRef/Thy/document/Spec.tex Wed Aug 22 23:23:48 2012 +0200
@@ -1444,7 +1444,7 @@
%
\begin{isamarkuptext}%
\begin{matharray}{rcl}
- \indexdef{}{command}{use}\hypertarget{command.use}{\hyperlink{command.use}{\mbox{\isa{\isacommand{use}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
+ \indexdef{}{command}{ML\_file}\hypertarget{command.ML-file}{\hyperlink{command.ML-file}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}file}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
\indexdef{}{command}{ML}\hypertarget{command.ML}{\hyperlink{command.ML}{\mbox{\isa{\isacommand{ML}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
\indexdef{}{command}{ML\_prf}\hypertarget{command.ML-prf}{\hyperlink{command.ML-prf}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}prf}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}proof\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ proof{\isaliteral{22}{\isachardoublequote}}} \\
\indexdef{}{command}{ML\_val}\hypertarget{command.ML-val}{\hyperlink{command.ML-val}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}val}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}any\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}{\isaliteral{22}{\isachardoublequote}}} \\
@@ -1456,7 +1456,7 @@
\begin{railoutput}
\rail@begin{1}{}
-\rail@term{\hyperlink{command.use}{\mbox{\isa{\isacommand{use}}}}}[]
+\rail@term{\hyperlink{command.ML-file}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}file}}}}}[]
\rail@nont{\hyperlink{syntax.name}{\mbox{\isa{name}}}}[]
\rail@end
\rail@begin{6}{}
@@ -1490,27 +1490,20 @@
\begin{description}
- \item \hyperlink{command.use}{\mbox{\isa{\isacommand{use}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}file{\isaliteral{22}{\isachardoublequote}}} reads and executes ML
- commands from \isa{{\isaliteral{22}{\isachardoublequote}}file{\isaliteral{22}{\isachardoublequote}}}. The current theory context is passed
- down to the ML toplevel and may be modified, using \verb|Context.>>| or derived ML commands. The file name is checked with
- the \indexref{}{keyword}{uses}\hyperlink{keyword.uses}{\mbox{\isa{\isakeyword{uses}}}} dependency declaration given in the theory
- header (see also \secref{sec:begin-thy}).
-
- Top-level ML bindings are stored within the (global or local) theory
- context.
+ \item \hyperlink{command.ML-file}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}file}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}name{\isaliteral{22}{\isachardoublequote}}} reads and evaluates the
+ given ML file. The current theory context is passed down to the ML
+ toplevel and may be modified, using \verb|Context.>>| or derived ML
+ commands. Top-level ML bindings are stored within the (global or
+ local) theory context.
- \item \hyperlink{command.ML}{\mbox{\isa{\isacommand{ML}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}text{\isaliteral{22}{\isachardoublequote}}} is similar to \hyperlink{command.use}{\mbox{\isa{\isacommand{use}}}},
- but executes ML commands directly from the given \isa{{\isaliteral{22}{\isachardoublequote}}text{\isaliteral{22}{\isachardoublequote}}}.
+ \item \hyperlink{command.ML}{\mbox{\isa{\isacommand{ML}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}text{\isaliteral{22}{\isachardoublequote}}} is similar to \hyperlink{command.ML-file}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}file}}}}, but evaluates directly the given \isa{{\isaliteral{22}{\isachardoublequote}}text{\isaliteral{22}{\isachardoublequote}}}.
Top-level ML bindings are stored within the (global or local) theory
context.
\item \hyperlink{command.ML-prf}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}prf}}}} is analogous to \hyperlink{command.ML}{\mbox{\isa{\isacommand{ML}}}} but works
- within a proof context.
-
- Top-level ML bindings are stored within the proof context in a
- purely sequential fashion, disregarding the nested proof structure.
- ML bindings introduced by \hyperlink{command.ML-prf}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}prf}}}} are discarded at the
- end of the proof.
+ within a proof context. Top-level ML bindings are stored within the
+ proof context in a purely sequential fashion, disregarding the
+ nested proof structure. ML bindings introduced by \hyperlink{command.ML-prf}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}prf}}}} are discarded at the end of the proof.
\item \hyperlink{command.ML-val}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}val}}}} and \hyperlink{command.ML-command}{\mbox{\isa{\isacommand{ML{\isaliteral{5F}{\isacharunderscore}}command}}}} are diagnostic
versions of \hyperlink{command.ML}{\mbox{\isa{\isacommand{ML}}}}, which means that the context may not be
--- a/doc-src/System/Thy/Base.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/System/Thy/Base.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,9 +1,9 @@
theory Base
imports Pure
-uses "../../antiquote_setup.ML"
begin
-setup {* Antiquote_Setup.setup *}
+ML_file "../../antiquote_setup.ML"
+setup Antiquote_Setup.setup
declare [[thy_output_source]]
--- a/doc-src/TutorialI/Types/Setup.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/TutorialI/Types/Setup.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,11 +1,10 @@
theory Setup
imports Main
-uses
- "../../antiquote_setup.ML"
- "../../more_antiquote.ML"
begin
+ML_file "../../antiquote_setup.ML"
+ML_file "../../more_antiquote.ML"
+
setup {* Antiquote_Setup.setup #> More_Antiquote.setup *}
-
end
\ No newline at end of file
--- a/doc-src/antiquote_setup.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/doc-src/antiquote_setup.ML Wed Aug 22 23:23:48 2012 +0200
@@ -135,7 +135,7 @@
val thy_file_setup =
Thy_Output.antiquotation @{binding thy_file} (Scan.lift Args.name)
(fn {context = ctxt, ...} =>
- fn name => (Thy_Load.check_thy Path.current name; Thy_Output.output ctxt [Pretty.str name]));
+ fn name => (Thy_Load.check_thy [] Path.current name; Thy_Output.output ctxt [Pretty.str name]));
(* Isabelle/Isar entities (with index) *)
--- a/src/CTT/CTT.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/CTT/CTT.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,9 +7,9 @@
theory CTT
imports Pure
-uses "~~/src/Provers/typedsimp.ML" ("rew.ML")
begin
+ML_file "~~/src/Provers/typedsimp.ML"
setup Pure_Thy.old_appl_syntax_setup
typedecl i
@@ -460,7 +460,7 @@
fun pc_tac thms = DEPTH_SOLVE_1 o (step_tac thms)
*}
-use "rew.ML"
+ML_file "rew.ML"
subsection {* The elimination rules for fst/snd *}
--- a/src/FOL/FOL.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/FOL/FOL.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,15 +7,14 @@
theory FOL
imports IFOL
keywords "print_claset" "print_induct_rules" :: diag
-uses
- "~~/src/Provers/classical.ML"
- "~~/src/Provers/blast.ML"
- "~~/src/Provers/clasimp.ML"
- "~~/src/Tools/induct.ML"
- "~~/src/Tools/case_product.ML"
- ("simpdata.ML")
begin
+ML_file "~~/src/Provers/classical.ML"
+ML_file "~~/src/Provers/blast.ML"
+ML_file "~~/src/Provers/clasimp.ML"
+ML_file "~~/src/Tools/induct.ML"
+ML_file "~~/src/Tools/case_product.ML"
+
subsection {* The classical axiom *}
@@ -329,7 +328,7 @@
not_imp not_all not_ex cases_simp cla_simps_misc
-use "simpdata.ML"
+ML_file "simpdata.ML"
simproc_setup defined_Ex ("EX x. P(x)") = {* fn _ => Quantifier1.rearrange_ex *}
simproc_setup defined_All ("ALL x. P(x)") = {* fn _ => Quantifier1.rearrange_all *}
--- a/src/FOL/IFOL.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/FOL/IFOL.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,23 +6,21 @@
theory IFOL
imports Pure
-uses
- "~~/src/Tools/misc_legacy.ML"
- "~~/src/Provers/splitter.ML"
- "~~/src/Provers/hypsubst.ML"
- "~~/src/Tools/IsaPlanner/zipper.ML"
- "~~/src/Tools/IsaPlanner/isand.ML"
- "~~/src/Tools/IsaPlanner/rw_tools.ML"
- "~~/src/Tools/IsaPlanner/rw_inst.ML"
- "~~/src/Tools/eqsubst.ML"
- "~~/src/Provers/quantifier1.ML"
- "~~/src/Tools/intuitionistic.ML"
- "~~/src/Tools/project_rule.ML"
- "~~/src/Tools/atomize_elim.ML"
- ("fologic.ML")
- ("intprover.ML")
begin
+ML_file "~~/src/Tools/misc_legacy.ML"
+ML_file "~~/src/Provers/splitter.ML"
+ML_file "~~/src/Provers/hypsubst.ML"
+ML_file "~~/src/Tools/IsaPlanner/zipper.ML"
+ML_file "~~/src/Tools/IsaPlanner/isand.ML"
+ML_file "~~/src/Tools/IsaPlanner/rw_tools.ML"
+ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML"
+ML_file "~~/src/Tools/eqsubst.ML"
+ML_file "~~/src/Provers/quantifier1.ML"
+ML_file "~~/src/Tools/intuitionistic.ML"
+ML_file "~~/src/Tools/project_rule.ML"
+ML_file "~~/src/Tools/atomize_elim.ML"
+
subsection {* Syntax and axiomatic basis *}
@@ -575,7 +573,7 @@
)
*}
-use "fologic.ML"
+ML_file "fologic.ML"
lemma thin_refl: "[|x=x; PROP W|] ==> PROP W" .
@@ -597,7 +595,7 @@
*}
setup hypsubst_setup
-use "intprover.ML"
+ML_file "intprover.ML"
subsection {* Intuitionistic Reasoning *}
--- a/src/FOLP/FOLP.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/FOLP/FOLP.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,8 +7,6 @@
theory FOLP
imports IFOLP
-uses
- ("classical.ML") ("simp.ML") ("simpdata.ML")
begin
axiomatization cla :: "[p=>p]=>p"
@@ -99,8 +97,8 @@
apply assumption
done
-use "classical.ML" (* Patched 'cos matching won't instantiate proof *)
-use "simp.ML" (* Patched 'cos matching won't instantiate proof *)
+ML_file "classical.ML" (* Patched because matching won't instantiate proof *)
+ML_file "simp.ML" (* Patched because matching won't instantiate proof *)
ML {*
structure Cla = Classical
@@ -139,6 +137,6 @@
apply (tactic {* ALLGOALS (Cla.fast_tac FOLP_cs) *})
done
-use "simpdata.ML"
+ML_file "simpdata.ML"
end
--- a/src/FOLP/IFOLP.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/FOLP/IFOLP.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,9 +7,10 @@
theory IFOLP
imports Pure
-uses "~~/src/Tools/misc_legacy.ML" ("hypsubst.ML") ("intprover.ML")
begin
+ML_file "~~/src/Tools/misc_legacy.ML"
+
setup Pure_Thy.old_appl_syntax_setup
classes "term"
@@ -587,7 +588,7 @@
lemma thin_refl: "!!X. [|p:x=x; PROP W|] ==> PROP W" .
-use "hypsubst.ML"
+ML_file "hypsubst.ML"
ML {*
structure Hypsubst = Hypsubst
@@ -609,7 +610,7 @@
open Hypsubst;
*}
-use "intprover.ML"
+ML_file "intprover.ML"
(*** Rewrite rules ***)
--- a/src/HOL/ATP.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/ATP.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,17 +7,15 @@
theory ATP
imports Meson
-uses "Tools/lambda_lifting.ML"
- "Tools/monomorph.ML"
- "Tools/ATP/atp_util.ML"
- "Tools/ATP/atp_problem.ML"
- "Tools/ATP/atp_proof.ML"
- "Tools/ATP/atp_proof_redirect.ML"
- ("Tools/ATP/atp_problem_generate.ML")
- ("Tools/ATP/atp_proof_reconstruct.ML")
- ("Tools/ATP/atp_systems.ML")
begin
+ML_file "Tools/lambda_lifting.ML"
+ML_file "Tools/monomorph.ML"
+ML_file "Tools/ATP/atp_util.ML"
+ML_file "Tools/ATP/atp_problem.ML"
+ML_file "Tools/ATP/atp_proof.ML"
+ML_file "Tools/ATP/atp_proof_redirect.ML"
+
subsection {* Higher-order reasoning helpers *}
definition fFalse :: bool where [no_atp]:
@@ -132,9 +130,9 @@
subsection {* Setup *}
-use "Tools/ATP/atp_problem_generate.ML"
-use "Tools/ATP/atp_proof_reconstruct.ML"
-use "Tools/ATP/atp_systems.ML"
+ML_file "Tools/ATP/atp_problem_generate.ML"
+ML_file "Tools/ATP/atp_proof_reconstruct.ML"
+ML_file "Tools/ATP/atp_systems.ML"
setup ATP_Systems.setup
--- a/src/HOL/Algebra/Ring.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Algebra/Ring.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,7 +5,6 @@
theory Ring
imports FiniteProduct
-uses ("ringsimp.ML")
begin
section {* The Algebraic Hierarchy of Rings *}
@@ -389,7 +388,7 @@
text {* Setup algebra method:
compute distributive normal form in locale contexts *}
-use "ringsimp.ML"
+ML_file "ringsimp.ML"
setup Algebra.attrib_setup
--- a/src/HOL/Boogie/Boogie.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Boogie/Boogie.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,11 +8,6 @@
imports Word
keywords
"boogie_open" "boogie_end" :: thy_decl and "boogie_vc" :: thy_goal and "boogie_status" :: diag
-uses
- ("Tools/boogie_vcs.ML")
- ("Tools/boogie_loader.ML")
- ("Tools/boogie_tactics.ML")
- ("Tools/boogie_commands.ML")
begin
text {*
@@ -95,12 +90,12 @@
*}
setup Boogie_Axioms.setup
-use "Tools/boogie_vcs.ML"
-use "Tools/boogie_loader.ML"
-use "Tools/boogie_tactics.ML"
+ML_file "Tools/boogie_vcs.ML"
+ML_file "Tools/boogie_loader.ML"
+ML_file "Tools/boogie_tactics.ML"
setup Boogie_Tactics.setup
-use "Tools/boogie_commands.ML"
+ML_file "Tools/boogie_commands.ML"
setup Boogie_Commands.setup
end
--- a/src/HOL/Code_Evaluation.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Code_Evaluation.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory Code_Evaluation
imports Plain Typerep Code_Numeral Predicate
-uses ("Tools/code_evaluation.ML")
begin
subsection {* Term representation *}
@@ -73,7 +72,7 @@
shows "x \<equiv> y"
using assms by simp
-use "Tools/code_evaluation.ML"
+ML_file "Tools/code_evaluation.ML"
code_reserved Eval Code_Evaluation
--- a/src/HOL/Datatype.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Datatype.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,10 +8,6 @@
theory Datatype
imports Product_Type Sum_Type Nat
keywords "datatype" :: thy_decl
-uses
- ("Tools/Datatype/datatype.ML")
- ("Tools/inductive_realizer.ML")
- ("Tools/Datatype/datatype_realizer.ML")
begin
subsection {* The datatype universe *}
@@ -517,12 +513,12 @@
hide_type (open) node item
hide_const (open) Push Node Atom Leaf Numb Lim Split Case
-use "Tools/Datatype/datatype.ML"
+ML_file "Tools/Datatype/datatype.ML"
-use "Tools/inductive_realizer.ML"
+ML_file "Tools/inductive_realizer.ML"
setup InductiveRealizer.setup
-use "Tools/Datatype/datatype_realizer.ML"
+ML_file "Tools/Datatype/datatype_realizer.ML"
setup Datatype_Realizer.setup
end
--- a/src/HOL/Decision_Procs/Commutative_Ring.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Decision_Procs/Commutative_Ring.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,7 +7,6 @@
theory Commutative_Ring
imports Main Parity
-uses ("commutative_ring_tac.ML")
begin
text {* Syntax of multivariate polynomials (pol) and polynomial expressions. *}
@@ -313,7 +312,7 @@
qed
-use "commutative_ring_tac.ML"
+ML_file "commutative_ring_tac.ML"
method_setup comm_ring = {*
Scan.succeed (SIMPLE_METHOD' o Commutative_Ring_Tac.tac)
--- a/src/HOL/Decision_Procs/Cooper.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Decision_Procs/Cooper.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,7 +4,6 @@
theory Cooper
imports Complex_Main "~~/src/HOL/Library/Efficient_Nat" "~~/src/HOL/Library/Old_Recdef"
-uses ("cooper_tac.ML")
begin
(* Periodicity of dvd *)
@@ -2004,7 +2003,7 @@
end;
*}
-use "cooper_tac.ML"
+ML_file "cooper_tac.ML"
method_setup cooper = {*
Args.mode "no_quantify" >>
--- a/src/HOL/Decision_Procs/Dense_Linear_Order.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Decision_Procs/Dense_Linear_Order.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,13 +7,11 @@
theory Dense_Linear_Order
imports Main
-uses
- "langford_data.ML"
- "ferrante_rackoff_data.ML"
- ("langford.ML")
- ("ferrante_rackoff.ML")
begin
+ML_file "langford_data.ML"
+ML_file "ferrante_rackoff_data.ML"
+
setup {* Langford_Data.setup #> Ferrante_Rackoff_Data.setup *}
context linorder
@@ -290,7 +288,7 @@
lemmas dnf_simps[no_atp] = weak_dnf_simps nnf_simps ex_distrib
-use "langford.ML"
+ML_file "langford.ML"
method_setup dlo = {*
Scan.succeed (SIMPLE_METHOD' o LangfordQE.dlo_tac)
*} "Langford's algorithm for quantifier elimination in dense linear orders"
@@ -540,7 +538,7 @@
end
-use "ferrante_rackoff.ML"
+ML_file "ferrante_rackoff.ML"
method_setup ferrack = {*
Scan.succeed (SIMPLE_METHOD' o FerranteRackoff.dlo_tac)
--- a/src/HOL/Decision_Procs/Ferrack.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Decision_Procs/Ferrack.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,7 +5,6 @@
theory Ferrack
imports Complex_Main Dense_Linear_Order DP_Library
"~~/src/HOL/Library/Efficient_Nat" "~~/src/HOL/Library/Old_Recdef"
-uses ("ferrack_tac.ML")
begin
section {* Quantifier elimination for @{text "\<real> (0, 1, +, <)"} *}
@@ -2003,7 +2002,7 @@
end;
*}
-use "ferrack_tac.ML"
+ML_file "ferrack_tac.ML"
method_setup rferrack = {*
Args.mode "no_quantify" >>
--- a/src/HOL/Decision_Procs/MIR.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Decision_Procs/MIR.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,7 +5,6 @@
theory MIR
imports Complex_Main Dense_Linear_Order DP_Library
"~~/src/HOL/Library/Efficient_Nat" "~~/src/HOL/Library/Old_Recdef"
-uses ("mir_tac.ML")
begin
section {* Quantifier elimination for @{text "\<real> (0, 1, +, floor, <)"} *}
@@ -5634,7 +5633,7 @@
end;
*}
-use "mir_tac.ML"
+ML_file "mir_tac.ML"
method_setup mir = {*
Args.mode "no_quantify" >>
--- a/src/HOL/Divides.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Divides.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,9 +7,10 @@
theory Divides
imports Nat_Transfer
-uses "~~/src/Provers/Arith/cancel_div_mod.ML"
begin
+ML_file "~~/src/Provers/Arith/cancel_div_mod.ML"
+
subsection {* Syntactic division operations *}
class div = dvd +
--- a/src/HOL/Extraction.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Extraction.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory Extraction
imports Option
-uses "Tools/rewrite_hol_proof.ML"
begin
+ML_file "Tools/rewrite_hol_proof.ML"
+
subsection {* Setup *}
setup {*
--- a/src/HOL/Finite_Set.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Finite_Set.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,7 +7,6 @@
theory Finite_Set
imports Option Power
-uses ("Tools/set_comprehension_pointfree.ML")
begin
subsection {* Predicate for finite sets *}
@@ -18,7 +17,7 @@
| insertI [simp, intro!]: "finite A \<Longrightarrow> finite (insert a A)"
(* FIXME: move to Set theory *)
-use "Tools/set_comprehension_pointfree.ML"
+ML_file "Tools/set_comprehension_pointfree.ML"
simproc_setup finite_Collect ("finite (Collect P)") =
{* K Set_Comprehension_Pointfree.simproc *}
--- a/src/HOL/Fun.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Fun.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,7 +8,6 @@
theory Fun
imports Complete_Lattices
keywords "enriched_type" :: thy_goal
-uses ("Tools/enriched_type.ML")
begin
lemma apply_inverse:
@@ -801,7 +800,7 @@
subsubsection {* Functorial structure of types *}
-use "Tools/enriched_type.ML"
+ML_file "Tools/enriched_type.ML"
enriched_type map_fun: map_fun
by (simp_all add: fun_eq_iff)
--- a/src/HOL/FunDef.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/FunDef.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,27 +7,11 @@
theory FunDef
imports Partial_Function Wellfounded
keywords "function" "termination" :: thy_goal and "fun" :: thy_decl
-uses
- "Tools/prop_logic.ML"
- "Tools/sat_solver.ML"
- ("Tools/Function/function_common.ML")
- ("Tools/Function/context_tree.ML")
- ("Tools/Function/function_core.ML")
- ("Tools/Function/sum_tree.ML")
- ("Tools/Function/mutual.ML")
- ("Tools/Function/pattern_split.ML")
- ("Tools/Function/function.ML")
- ("Tools/Function/relation.ML")
- ("Tools/Function/measure_functions.ML")
- ("Tools/Function/lexicographic_order.ML")
- ("Tools/Function/pat_completeness.ML")
- ("Tools/Function/fun.ML")
- ("Tools/Function/induction_schema.ML")
- ("Tools/Function/termination.ML")
- ("Tools/Function/scnp_solve.ML")
- ("Tools/Function/scnp_reconstruct.ML")
begin
+ML_file "Tools/prop_logic.ML"
+ML_file "Tools/sat_solver.ML"
+
subsection {* Definitions with default value. *}
definition
@@ -101,27 +85,27 @@
"wf R \<Longrightarrow> wfP (in_rel R)"
by (simp add: wfP_def)
-use "Tools/Function/function_common.ML"
-use "Tools/Function/context_tree.ML"
-use "Tools/Function/function_core.ML"
-use "Tools/Function/sum_tree.ML"
-use "Tools/Function/mutual.ML"
-use "Tools/Function/pattern_split.ML"
-use "Tools/Function/relation.ML"
+ML_file "Tools/Function/function_common.ML"
+ML_file "Tools/Function/context_tree.ML"
+ML_file "Tools/Function/function_core.ML"
+ML_file "Tools/Function/sum_tree.ML"
+ML_file "Tools/Function/mutual.ML"
+ML_file "Tools/Function/pattern_split.ML"
+ML_file "Tools/Function/relation.ML"
method_setup relation = {*
Args.term >> (fn t => fn ctxt => SIMPLE_METHOD' (Function_Relation.relation_infer_tac ctxt t))
*} "prove termination using a user-specified wellfounded relation"
-use "Tools/Function/function.ML"
-use "Tools/Function/pat_completeness.ML"
+ML_file "Tools/Function/function.ML"
+ML_file "Tools/Function/pat_completeness.ML"
method_setup pat_completeness = {*
Scan.succeed (SIMPLE_METHOD' o Pat_Completeness.pat_completeness_tac)
*} "prove completeness of datatype patterns"
-use "Tools/Function/fun.ML"
-use "Tools/Function/induction_schema.ML"
+ML_file "Tools/Function/fun.ML"
+ML_file "Tools/Function/induction_schema.ML"
method_setup induction_schema = {*
Scan.succeed (RAW_METHOD o Induction_Schema.induction_schema_tac)
@@ -137,7 +121,7 @@
inductive is_measure :: "('a \<Rightarrow> nat) \<Rightarrow> bool"
where is_measure_trivial: "is_measure f"
-use "Tools/Function/measure_functions.ML"
+ML_file "Tools/Function/measure_functions.ML"
setup MeasureFunctions.setup
lemma measure_size[measure_function]: "is_measure size"
@@ -148,7 +132,7 @@
lemma measure_snd[measure_function]: "is_measure f \<Longrightarrow> is_measure (\<lambda>p. f (snd p))"
by (rule is_measure_trivial)
-use "Tools/Function/lexicographic_order.ML"
+ML_file "Tools/Function/lexicographic_order.ML"
method_setup lexicographic_order = {*
Method.sections clasimp_modifiers >>
@@ -323,9 +307,9 @@
subsection {* Tool setup *}
-use "Tools/Function/termination.ML"
-use "Tools/Function/scnp_solve.ML"
-use "Tools/Function/scnp_reconstruct.ML"
+ML_file "Tools/Function/termination.ML"
+ML_file "Tools/Function/scnp_solve.ML"
+ML_file "Tools/Function/scnp_reconstruct.ML"
setup {* ScnpReconstruct.setup *}
--- a/src/HOL/Groebner_Basis.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Groebner_Basis.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,8 +6,6 @@
theory Groebner_Basis
imports Semiring_Normalization
-uses
- ("Tools/groebner.ML")
begin
subsection {* Groebner Bases *}
@@ -41,7 +39,7 @@
setup Algebra_Simplification.setup
-use "Tools/groebner.ML"
+ML_file "Tools/groebner.ML"
method_setup algebra = {*
let
--- a/src/HOL/Groups.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Groups.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory Groups
imports Orderings
-uses ("Tools/group_cancel.ML")
begin
subsection {* Fact collections *}
@@ -830,7 +829,7 @@
end
-use "Tools/group_cancel.ML"
+ML_file "Tools/group_cancel.ML"
simproc_setup group_cancel_add ("a + b::'a::ab_group_add") =
{* fn phi => fn ss => try Group_Cancel.cancel_add_conv *}
--- a/src/HOL/HOL.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/HOL.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,37 +10,32 @@
"try" "solve_direct" "quickcheck"
"print_coercions" "print_coercion_maps" "print_claset" "print_induct_rules" :: diag and
"quickcheck_params" :: thy_decl
-uses
- ("Tools/hologic.ML")
- "~~/src/Tools/misc_legacy.ML"
- "~~/src/Tools/try.ML"
- "~~/src/Tools/quickcheck.ML"
- "~~/src/Tools/solve_direct.ML"
- "~~/src/Tools/IsaPlanner/zipper.ML"
- "~~/src/Tools/IsaPlanner/isand.ML"
- "~~/src/Tools/IsaPlanner/rw_tools.ML"
- "~~/src/Tools/IsaPlanner/rw_inst.ML"
- "~~/src/Provers/hypsubst.ML"
- "~~/src/Provers/splitter.ML"
- "~~/src/Provers/classical.ML"
- "~~/src/Provers/blast.ML"
- "~~/src/Provers/clasimp.ML"
- "~~/src/Tools/coherent.ML"
- "~~/src/Tools/eqsubst.ML"
- "~~/src/Provers/quantifier1.ML"
- ("Tools/simpdata.ML")
- "~~/src/Tools/atomize_elim.ML"
- "~~/src/Tools/induct.ML"
- "~~/src/Tools/cong_tac.ML"
- "~~/src/Tools/intuitionistic.ML"
- "~~/src/Tools/project_rule.ML"
- ("~~/src/Tools/induction.ML")
- ("~~/src/Tools/induct_tacs.ML")
- ("Tools/cnf_funcs.ML")
- "~~/src/Tools/subtyping.ML"
- "~~/src/Tools/case_product.ML"
begin
+ML_file "~~/src/Tools/misc_legacy.ML"
+ML_file "~~/src/Tools/try.ML"
+ML_file "~~/src/Tools/quickcheck.ML"
+ML_file "~~/src/Tools/solve_direct.ML"
+ML_file "~~/src/Tools/IsaPlanner/zipper.ML"
+ML_file "~~/src/Tools/IsaPlanner/isand.ML"
+ML_file "~~/src/Tools/IsaPlanner/rw_tools.ML"
+ML_file "~~/src/Tools/IsaPlanner/rw_inst.ML"
+ML_file "~~/src/Provers/hypsubst.ML"
+ML_file "~~/src/Provers/splitter.ML"
+ML_file "~~/src/Provers/classical.ML"
+ML_file "~~/src/Provers/blast.ML"
+ML_file "~~/src/Provers/clasimp.ML"
+ML_file "~~/src/Tools/coherent.ML"
+ML_file "~~/src/Tools/eqsubst.ML"
+ML_file "~~/src/Provers/quantifier1.ML"
+ML_file "~~/src/Tools/atomize_elim.ML"
+ML_file "~~/src/Tools/induct.ML"
+ML_file "~~/src/Tools/cong_tac.ML"
+ML_file "~~/src/Tools/intuitionistic.ML"
+ML_file "~~/src/Tools/project_rule.ML"
+ML_file "~~/src/Tools/subtyping.ML"
+ML_file "~~/src/Tools/case_product.ML"
+
setup {*
Intuitionistic.method_setup @{binding iprover}
#> Quickcheck.setup
@@ -702,7 +697,7 @@
and [sym] = sym not_sym
and [Pure.elim?] = iffD1 iffD2 impE
-use "Tools/hologic.ML"
+ML_file "Tools/hologic.ML"
subsubsection {* Atomizing meta-level connectives *}
@@ -1193,7 +1188,7 @@
"(\<exists>x y. P x y) = (\<exists>y x. P x y)"
by blast
-use "Tools/simpdata.ML"
+ML_file "Tools/simpdata.ML"
ML {* open Simpdata *}
setup {* Simplifier.map_simpset_global (K HOL_basic_ss) *}
@@ -1481,7 +1476,7 @@
)
*}
-use "~~/src/Tools/induction.ML"
+ML_file "~~/src/Tools/induction.ML"
setup {*
Induct.setup #> Induction.setup #>
@@ -1563,7 +1558,7 @@
hide_const induct_forall induct_implies induct_equal induct_conj induct_true induct_false
-use "~~/src/Tools/induct_tacs.ML"
+ML_file "~~/src/Tools/induct_tacs.ML"
setup Induct_Tacs.setup
@@ -1701,7 +1696,7 @@
val trans = @{thm trans}
*}
-use "Tools/cnf_funcs.ML"
+ML_file "Tools/cnf_funcs.ML"
subsection {* Code generator setup *}
--- a/src/HOL/HOLCF/Cpodef.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/HOLCF/Cpodef.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,7 +7,6 @@
theory Cpodef
imports Adm
keywords "pcpodef" "cpodef" :: thy_goal
-uses ("Tools/cpodef.ML")
begin
subsection {* Proving a subtype is a partial order *}
@@ -268,6 +267,6 @@
subsection {* HOLCF type definition package *}
-use "Tools/cpodef.ML"
+ML_file "Tools/cpodef.ML"
end
--- a/src/HOL/HOLCF/Domain.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/HOLCF/Domain.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,11 +9,6 @@
keywords
"domaindef" :: thy_decl and "lazy" "unsafe" and
"domain_isomorphism" "domain" :: thy_decl
-uses
- ("Tools/domaindef.ML")
- ("Tools/Domain/domain_isomorphism.ML")
- ("Tools/Domain/domain_axioms.ML")
- ("Tools/Domain/domain.ML")
begin
default_sort "domain"
@@ -155,7 +150,7 @@
, (@{const_name liftprj}, SOME @{typ "udom u \<rightarrow> 'a::predomain u"}) ]
*}
-use "Tools/domaindef.ML"
+ML_file "Tools/domaindef.ML"
subsection {* Isomorphic deflations *}
@@ -321,9 +316,9 @@
subsection {* Setting up the domain package *}
-use "Tools/Domain/domain_isomorphism.ML"
-use "Tools/Domain/domain_axioms.ML"
-use "Tools/Domain/domain.ML"
+ML_file "Tools/Domain/domain_isomorphism.ML"
+ML_file "Tools/Domain/domain_axioms.ML"
+ML_file "Tools/Domain/domain.ML"
setup Domain_Isomorphism.setup
--- a/src/HOL/HOLCF/Domain_Aux.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/HOLCF/Domain_Aux.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,12 +6,6 @@
theory Domain_Aux
imports Map_Functions Fixrec
-uses
- ("Tools/Domain/domain_take_proofs.ML")
- ("Tools/cont_consts.ML")
- ("Tools/cont_proc.ML")
- ("Tools/Domain/domain_constructors.ML")
- ("Tools/Domain/domain_induction.ML")
begin
subsection {* Continuous isomorphisms *}
@@ -350,11 +344,11 @@
subsection {* ML setup *}
-use "Tools/Domain/domain_take_proofs.ML"
-use "Tools/cont_consts.ML"
-use "Tools/cont_proc.ML"
-use "Tools/Domain/domain_constructors.ML"
-use "Tools/Domain/domain_induction.ML"
+ML_file "Tools/Domain/domain_take_proofs.ML"
+ML_file "Tools/cont_consts.ML"
+ML_file "Tools/cont_proc.ML"
+ML_file "Tools/Domain/domain_constructors.ML"
+ML_file "Tools/Domain/domain_induction.ML"
setup Domain_Take_Proofs.setup
--- a/src/HOL/HOLCF/Fixrec.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/HOLCF/Fixrec.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,9 +7,6 @@
theory Fixrec
imports Plain_HOLCF
keywords "fixrec" :: thy_decl
-uses
- ("Tools/holcf_library.ML")
- ("Tools/fixrec.ML")
begin
subsection {* Pattern-match monad *}
@@ -227,8 +224,8 @@
subsection {* Initializing the fixrec package *}
-use "Tools/holcf_library.ML"
-use "Tools/fixrec.ML"
+ML_file "Tools/holcf_library.ML"
+ML_file "Tools/fixrec.ML"
method_setup fixrec_simp = {*
Scan.succeed (SIMPLE_METHOD' o Fixrec.fixrec_simp_tac)
--- a/src/HOL/HOLCF/IOA/ABP/Correctness.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/HOLCF/IOA/ABP/Correctness.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory Correctness
imports IOA Env Impl Impl_finite
-uses "Check.ML"
begin
+ML_file "Check.ML"
+
primrec reduce :: "'a list => 'a list"
where
reduce_Nil: "reduce [] = []"
--- a/src/HOL/Hilbert_Choice.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Hilbert_Choice.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,7 +8,6 @@
theory Hilbert_Choice
imports Nat Wellfounded Plain
keywords "specification" "ax_specification" :: thy_goal
-uses ("Tools/choice_specification.ML")
begin
subsection {* Hilbert's epsilon *}
@@ -649,6 +648,6 @@
lemma exE_some: "[| Ex P ; c == Eps P |] ==> P c"
by (simp only: someI_ex)
-use "Tools/choice_specification.ML"
+ML_file "Tools/choice_specification.ML"
end
--- a/src/HOL/Hoare/Hoare_Logic.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Hoare/Hoare_Logic.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,7 +10,6 @@
theory Hoare_Logic
imports Main
-uses ("hoare_syntax.ML") ("hoare_tac.ML")
begin
type_synonym 'a bexp = "'a set"
@@ -54,7 +53,7 @@
"_hoare" :: "['a assn,'a com,'a assn] => bool"
("{_} // _ // {_}" [0,55,0] 50)
-use "hoare_syntax.ML"
+ML_file "hoare_syntax.ML"
parse_translation {* [(@{syntax_const "_hoare_vars"}, Hoare_Syntax.hoare_vars_tr)] *}
print_translation {* [(@{const_syntax Valid}, Hoare_Syntax.spec_tr' @{syntax_const "_hoare"})] *}
@@ -94,7 +93,7 @@
by blast
lemmas AbortRule = SkipRule -- "dummy version"
-use "hoare_tac.ML"
+ML_file "hoare_tac.ML"
method_setup vcg = {*
Scan.succeed (fn ctxt => SIMPLE_METHOD' (hoare_tac ctxt (K all_tac))) *}
--- a/src/HOL/Hoare/Hoare_Logic_Abort.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Hoare/Hoare_Logic_Abort.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,7 +7,6 @@
theory Hoare_Logic_Abort
imports Main
-uses ("hoare_syntax.ML") ("hoare_tac.ML")
begin
type_synonym 'a bexp = "'a set"
@@ -56,7 +55,7 @@
"_hoare_abort" :: "['a assn,'a com,'a assn] => bool"
("{_} // _ // {_}" [0,55,0] 50)
-use "hoare_syntax.ML"
+ML_file "hoare_syntax.ML"
parse_translation {* [(@{syntax_const "_hoare_abort_vars"}, Hoare_Syntax.hoare_vars_tr)] *}
print_translation
{* [(@{const_syntax Valid}, Hoare_Syntax.spec_tr' @{syntax_const "_hoare_abort"})] *}
@@ -105,7 +104,7 @@
lemma Compl_Collect: "-(Collect b) = {x. ~(b x)}"
by blast
-use "hoare_tac.ML"
+ML_file "hoare_tac.ML"
method_setup vcg = {*
Scan.succeed (fn ctxt => SIMPLE_METHOD' (hoare_tac ctxt (K all_tac))) *}
--- a/src/HOL/Import/Import_Setup.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Import/Import_Setup.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,12 +7,11 @@
theory Import_Setup
imports "~~/src/HOL/Parity" "~~/src/HOL/Fact"
-keywords
- "import_type_map" :: thy_decl and "import_const_map" :: thy_decl and
- "import_file" :: thy_decl
-uses "import_data.ML" ("import_rule.ML")
+keywords "import_type_map" "import_const_map" "import_file" :: thy_decl
begin
+ML_file "import_data.ML"
+
lemma light_ex_imp_nonempty:
"P t \<Longrightarrow> \<exists>x. x \<in> Collect P"
by auto
@@ -27,7 +26,7 @@
"(\<And>x. f x = g x) \<Longrightarrow> f = g"
by auto
-use "import_rule.ML"
+ML_file "import_rule.ML"
end
--- a/src/HOL/Import/import_rule.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Import/import_rule.ML Wed Aug 22 23:23:48 2012 +0200
@@ -444,7 +444,8 @@
(thy, init_state) |> File.fold_lines process_line path |> fst
val _ = Outer_Syntax.command @{command_spec "import_file"}
- "Import a recorded proof file" (Parse.path >> process_file >> Toplevel.theory)
+ "Import a recorded proof file"
+ (Parse.path >> (fn name => Toplevel.theory (fn thy => process_file (Path.explode name) thy)))
end
--- a/src/HOL/Inductive.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Inductive.thy Wed Aug 22 23:23:48 2012 +0200
@@ -11,15 +11,6 @@
"inductive_cases" "inductive_simps" :: thy_script and "monos" and
"rep_datatype" :: thy_goal and
"primrec" :: thy_decl
-uses
- ("Tools/inductive.ML")
- ("Tools/Datatype/datatype_aux.ML")
- ("Tools/Datatype/datatype_prop.ML")
- ("Tools/Datatype/datatype_data.ML")
- ("Tools/Datatype/datatype_case.ML")
- ("Tools/Datatype/rep_datatype.ML")
- ("Tools/Datatype/datatype_codegen.ML")
- ("Tools/Datatype/primrec.ML")
begin
subsection {* Least and greatest fixed points *}
@@ -264,7 +255,7 @@
subset_refl imp_refl disj_mono conj_mono ex_mono all_mono if_bool_eq_conj
Collect_mono in_mono vimage_mono
-use "Tools/inductive.ML"
+ML_file "Tools/inductive.ML"
setup Inductive.setup
theorems [mono] =
@@ -278,13 +269,13 @@
text {* Package setup. *}
-use "Tools/Datatype/datatype_aux.ML"
-use "Tools/Datatype/datatype_prop.ML"
-use "Tools/Datatype/datatype_data.ML" setup Datatype_Data.setup
-use "Tools/Datatype/datatype_case.ML" setup Datatype_Case.setup
-use "Tools/Datatype/rep_datatype.ML"
-use "Tools/Datatype/datatype_codegen.ML" setup Datatype_Codegen.setup
-use "Tools/Datatype/primrec.ML"
+ML_file "Tools/Datatype/datatype_aux.ML"
+ML_file "Tools/Datatype/datatype_prop.ML"
+ML_file "Tools/Datatype/datatype_data.ML" setup Datatype_Data.setup
+ML_file "Tools/Datatype/datatype_case.ML" setup Datatype_Case.setup
+ML_file "Tools/Datatype/rep_datatype.ML"
+ML_file "Tools/Datatype/datatype_codegen.ML" setup Datatype_Codegen.setup
+ML_file "Tools/Datatype/primrec.ML"
text{* Lambda-abstractions with pattern matching: *}
--- a/src/HOL/Int.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Int.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,8 +7,6 @@
theory Int
imports Equiv_Relations Wellfounded Quotient
-uses
- ("Tools/int_arith.ML")
begin
subsection {* Definition of integers as a quotient type *}
@@ -719,7 +717,7 @@
of_nat_0 of_nat_1 of_nat_Suc of_nat_add of_nat_mult
of_int_0 of_int_1 of_int_add of_int_mult
-use "Tools/int_arith.ML"
+ML_file "Tools/int_arith.ML"
declaration {* K Int_Arith.setup *}
simproc_setup fast_arith ("(m::'a::linordered_idom) < n" |
--- a/src/HOL/Isar_Examples/Hoare.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Isar_Examples/Hoare.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,7 +8,6 @@
theory Hoare
imports Main
-uses ("~~/src/HOL/Hoare/hoare_tac.ML")
begin
subsection {* Abstract syntax and semantics *}
@@ -402,7 +401,7 @@
lemmas AbortRule = SkipRule -- "dummy version"
-use "~~/src/HOL/Hoare/hoare_tac.ML"
+ML_file "~~/src/HOL/Hoare/hoare_tac.ML"
method_setup hoare = {*
Scan.succeed (fn ctxt =>
--- a/src/HOL/Library/Code_Prolog.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Library/Code_Prolog.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory Code_Prolog
imports Main
-uses "~~/src/HOL/Tools/Predicate_Compile/code_prolog.ML"
begin
+ML_file "~~/src/HOL/Tools/Predicate_Compile/code_prolog.ML"
+
section {* Setup for Numerals *}
setup {* Predicate_Compile_Data.ignore_consts
--- a/src/HOL/Library/Old_Recdef.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Library/Old_Recdef.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,17 +10,6 @@
"recdef" "defer_recdef" :: thy_decl and
"recdef_tc" :: thy_goal and
"permissive" "congs" "hints"
-uses
- ("~~/src/HOL/Tools/TFL/casesplit.ML")
- ("~~/src/HOL/Tools/TFL/utils.ML")
- ("~~/src/HOL/Tools/TFL/usyntax.ML")
- ("~~/src/HOL/Tools/TFL/dcterm.ML")
- ("~~/src/HOL/Tools/TFL/thms.ML")
- ("~~/src/HOL/Tools/TFL/rules.ML")
- ("~~/src/HOL/Tools/TFL/thry.ML")
- ("~~/src/HOL/Tools/TFL/tfl.ML")
- ("~~/src/HOL/Tools/TFL/post.ML")
- ("~~/src/HOL/Tools/recdef.ML")
begin
subsection {* Lemmas for TFL *}
@@ -66,16 +55,16 @@
lemma tfl_exE: "\<exists>x. P x ==> \<forall>x. P x --> Q ==> Q"
by blast
-use "~~/src/HOL/Tools/TFL/casesplit.ML"
-use "~~/src/HOL/Tools/TFL/utils.ML"
-use "~~/src/HOL/Tools/TFL/usyntax.ML"
-use "~~/src/HOL/Tools/TFL/dcterm.ML"
-use "~~/src/HOL/Tools/TFL/thms.ML"
-use "~~/src/HOL/Tools/TFL/rules.ML"
-use "~~/src/HOL/Tools/TFL/thry.ML"
-use "~~/src/HOL/Tools/TFL/tfl.ML"
-use "~~/src/HOL/Tools/TFL/post.ML"
-use "~~/src/HOL/Tools/recdef.ML"
+ML_file "~~/src/HOL/Tools/TFL/casesplit.ML"
+ML_file "~~/src/HOL/Tools/TFL/utils.ML"
+ML_file "~~/src/HOL/Tools/TFL/usyntax.ML"
+ML_file "~~/src/HOL/Tools/TFL/dcterm.ML"
+ML_file "~~/src/HOL/Tools/TFL/thms.ML"
+ML_file "~~/src/HOL/Tools/TFL/rules.ML"
+ML_file "~~/src/HOL/Tools/TFL/thry.ML"
+ML_file "~~/src/HOL/Tools/TFL/tfl.ML"
+ML_file "~~/src/HOL/Tools/TFL/post.ML"
+ML_file "~~/src/HOL/Tools/recdef.ML"
setup Recdef.setup
subsection {* Rule setup *}
--- a/src/HOL/Library/Predicate_Compile_Quickcheck.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Library/Predicate_Compile_Quickcheck.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,9 +4,10 @@
theory Predicate_Compile_Quickcheck
imports Main Predicate_Compile_Alternative_Defs
-uses "../Tools/Predicate_Compile/predicate_compile_quickcheck.ML"
begin
+ML_file "../Tools/Predicate_Compile/predicate_compile_quickcheck.ML"
+
setup {* Predicate_Compile_Quickcheck.setup *}
end
\ No newline at end of file
--- a/src/HOL/Library/Reflection.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Library/Reflection.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,11 +6,11 @@
theory Reflection
imports Main
-uses
- "~~/src/HOL/Library/reify_data.ML"
- "~~/src/HOL/Library/reflection.ML"
begin
+ML_file "~~/src/HOL/Library/reify_data.ML"
+ML_file "~~/src/HOL/Library/reflection.ML"
+
setup {* Reify_Data.setup *}
method_setup reify = {*
--- a/src/HOL/Library/Sum_of_Squares.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Library/Sum_of_Squares.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,13 +8,13 @@
theory Sum_of_Squares
imports Complex_Main
-uses
- "positivstellensatz.ML"
- "Sum_of_Squares/sum_of_squares.ML"
- "Sum_of_Squares/positivstellensatz_tools.ML"
- "Sum_of_Squares/sos_wrapper.ML"
begin
+ML_file "positivstellensatz.ML"
+ML_file "Sum_of_Squares/sum_of_squares.ML"
+ML_file "Sum_of_Squares/positivstellensatz_tools.ML"
+ML_file "Sum_of_Squares/sos_wrapper.ML"
+
text {*
Proof method sos invocations:
\begin{itemize}
--- a/src/HOL/Lifting.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Lifting.thy Wed Aug 22 23:23:48 2012 +0200
@@ -11,12 +11,6 @@
"print_quotmaps" "print_quotients" :: diag and
"lift_definition" :: thy_goal and
"setup_lifting" :: thy_decl
-uses
- ("Tools/Lifting/lifting_util.ML")
- ("Tools/Lifting/lifting_info.ML")
- ("Tools/Lifting/lifting_term.ML")
- ("Tools/Lifting/lifting_def.ML")
- ("Tools/Lifting/lifting_setup.ML")
begin
subsection {* Function map *}
@@ -418,19 +412,19 @@
subsection {* ML setup *}
-use "Tools/Lifting/lifting_util.ML"
+ML_file "Tools/Lifting/lifting_util.ML"
-use "Tools/Lifting/lifting_info.ML"
+ML_file "Tools/Lifting/lifting_info.ML"
setup Lifting_Info.setup
declare fun_quotient[quot_map]
lemmas [reflexivity_rule] = reflp_equality reflp_Quotient_composition
-use "Tools/Lifting/lifting_term.ML"
+ML_file "Tools/Lifting/lifting_term.ML"
-use "Tools/Lifting/lifting_def.ML"
+ML_file "Tools/Lifting/lifting_def.ML"
-use "Tools/Lifting/lifting_setup.ML"
+ML_file "Tools/Lifting/lifting_setup.ML"
hide_const (open) invariant
--- a/src/HOL/List.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/List.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,6 @@
theory List
imports Plain Presburger Code_Numeral Quotient ATP
-uses
- ("Tools/list_code.ML")
- ("Tools/list_to_set_comprehension.ML")
begin
datatype 'a list =
@@ -485,7 +482,7 @@
*)
-use "Tools/list_to_set_comprehension.ML"
+ML_file "Tools/list_to_set_comprehension.ML"
simproc_setup list_to_set_comprehension ("set xs") = {* K List_to_Set_Comprehension.simproc *}
@@ -5522,7 +5519,7 @@
subsubsection {* Pretty lists *}
-use "Tools/list_code.ML"
+ML_file "Tools/list_code.ML"
code_type list
(SML "_ list")
--- a/src/HOL/Matrix_LP/ComputeFloat.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Matrix_LP/ComputeFloat.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory ComputeFloat
imports Complex_Main "~~/src/HOL/Library/Lattice_Algebras"
-uses "~~/src/Tools/float.ML" ("~~/src/HOL/Tools/float_arith.ML")
begin
+ML_file "~~/src/Tools/float.ML"
+
definition int_of_real :: "real \<Rightarrow> int"
where "int_of_real x = (SOME y. real y = x)"
@@ -238,6 +239,6 @@
lemmas arith = arith_simps rel_simps diff_nat_numeral nat_0
nat_neg_numeral powerarith floatarith not_false_eq_true not_true_eq_false
-use "~~/src/HOL/Tools/float_arith.ML"
+ML_file "~~/src/HOL/Tools/float_arith.ML"
end
--- a/src/HOL/Matrix_LP/Compute_Oracle/Compute_Oracle.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/Compute_Oracle.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,7 +5,15 @@
*)
theory Compute_Oracle imports HOL
-uses "am.ML" "am_compiler.ML" "am_interpreter.ML" "am_ghc.ML" "am_sml.ML" "report.ML" "compute.ML" "linker.ML"
begin
+ML_file "am.ML"
+ML_file "am_compiler.ML"
+ML_file "am_interpreter.ML"
+ML_file "am_ghc.ML"
+ML_file "am_sml.ML"
+ML_file "report.ML"
+ML_file "compute.ML"
+ML_file "linker.ML"
+
end
\ No newline at end of file
--- a/src/HOL/Matrix_LP/Cplex.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Matrix_LP/Cplex.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,10 +4,13 @@
theory Cplex
imports SparseMatrix LP ComputeFloat ComputeNumeral
-uses "Cplex_tools.ML" "CplexMatrixConverter.ML" "FloatSparseMatrixBuilder.ML"
- "fspmlp.ML" ("matrixlp.ML")
begin
+ML_file "Cplex_tools.ML"
+ML_file "CplexMatrixConverter.ML"
+ML_file "FloatSparseMatrixBuilder.ML"
+ML_file "fspmlp.ML"
+
lemma spm_mult_le_dual_prts:
assumes
"sorted_sparse_matrix A1"
@@ -61,7 +64,7 @@
(mult_est_spmat r1 r2 (diff_spmat c1 (mult_spmat y A2)) (diff_spmat c2 (mult_spmat y A1))))"
by (simp add: assms mult_est_spmat_def spm_mult_le_dual_prts[where A=A, simplified Let_def])
-use "matrixlp.ML"
+ML_file "matrixlp.ML"
end
--- a/src/HOL/Meson.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Meson.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,9 +9,6 @@
theory Meson
imports Datatype
-uses ("Tools/Meson/meson.ML")
- ("Tools/Meson/meson_clausify.ML")
- ("Tools/Meson/meson_tactic.ML")
begin
subsection {* Negation Normal Form *}
@@ -194,9 +191,9 @@
subsection {* Meson package *}
-use "Tools/Meson/meson.ML"
-use "Tools/Meson/meson_clausify.ML"
-use "Tools/Meson/meson_tactic.ML"
+ML_file "Tools/Meson/meson.ML"
+ML_file "Tools/Meson/meson_clausify.ML"
+ML_file "Tools/Meson/meson_tactic.ML"
setup {* Meson_Tactic.setup *}
--- a/src/HOL/Metis.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Metis.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,13 +9,10 @@
theory Metis
imports ATP
keywords "try0" :: diag
-uses "~~/src/Tools/Metis/metis.ML"
- ("Tools/Metis/metis_generate.ML")
- ("Tools/Metis/metis_reconstruct.ML")
- ("Tools/Metis/metis_tactic.ML")
- ("Tools/try0.ML")
begin
+ML_file "~~/src/Tools/Metis/metis.ML"
+
subsection {* Literal selection and lambda-lifting helpers *}
definition select :: "'a \<Rightarrow> 'a" where
@@ -41,9 +38,9 @@
subsection {* Metis package *}
-use "Tools/Metis/metis_generate.ML"
-use "Tools/Metis/metis_reconstruct.ML"
-use "Tools/Metis/metis_tactic.ML"
+ML_file "Tools/Metis/metis_generate.ML"
+ML_file "Tools/Metis/metis_reconstruct.ML"
+ML_file "Tools/Metis/metis_tactic.ML"
setup {* Metis_Tactic.setup *}
@@ -58,8 +55,7 @@
subsection {* Try0 *}
-use "Tools/try0.ML"
-
+ML_file "Tools/try0.ML"
setup {* Try0.setup *}
end
--- a/src/HOL/Mirabelle/Mirabelle.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Mirabelle/Mirabelle.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,10 +4,11 @@
theory Mirabelle
imports Sledgehammer
-uses "Tools/mirabelle.ML"
- "../TPTP/sledgehammer_tactics.ML"
begin
+ML_file "Tools/mirabelle.ML"
+ML_file "../TPTP/sledgehammer_tactics.ML"
+
(* no multithreading, no parallel proofs *) (* FIXME *)
ML {* Multithreading.max_threads := 1 *}
ML {* Goal.parallel_proofs := 0 *}
--- a/src/HOL/Mirabelle/Mirabelle_Test.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Mirabelle/Mirabelle_Test.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,16 +6,16 @@
theory Mirabelle_Test
imports Main Mirabelle
-uses
- "Tools/mirabelle_arith.ML"
- "Tools/mirabelle_metis.ML"
- "Tools/mirabelle_quickcheck.ML"
- "Tools/mirabelle_refute.ML"
- "Tools/mirabelle_sledgehammer.ML"
- "Tools/mirabelle_sledgehammer_filter.ML"
- "Tools/mirabelle_try0.ML"
begin
+ML_file "Tools/mirabelle_arith.ML"
+ML_file "Tools/mirabelle_metis.ML"
+ML_file "Tools/mirabelle_quickcheck.ML"
+ML_file "Tools/mirabelle_refute.ML"
+ML_file "Tools/mirabelle_sledgehammer.ML"
+ML_file "Tools/mirabelle_sledgehammer_filter.ML"
+ML_file "Tools/mirabelle_try0.ML"
+
text {*
Only perform type-checking of the actions,
any reasonable test would be too complicated.
--- a/src/HOL/Multivariate_Analysis/Norm_Arith.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Multivariate_Analysis/Norm_Arith.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory Norm_Arith
imports "~~/src/HOL/Library/Sum_of_Squares"
-uses ("normarith.ML")
begin
lemma norm_cmul_rule_thm:
@@ -111,7 +110,7 @@
mult_1_left
mult_1_right
-use "normarith.ML"
+ML_file "normarith.ML"
method_setup norm = {* Scan.succeed (SIMPLE_METHOD' o NormArith.norm_arith_tac)
*} "prove simple linear statements about vector norms"
--- a/src/HOL/Mutabelle/Mutabelle.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Mutabelle/Mutabelle.thy Wed Aug 22 23:23:48 2012 +0200
@@ -2,9 +2,10 @@
theory Mutabelle
imports Main
-uses "mutabelle.ML"
begin
+ML_file "mutabelle.ML"
+
ML {*
val comms = [@{const_name HOL.eq}, @{const_name HOL.disj}, @{const_name HOL.conj}];
--- a/src/HOL/Mutabelle/MutabelleExtra.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Mutabelle/MutabelleExtra.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,11 +9,11 @@
"~/repos/afp/thys/Polynomials/Polynomial"
"~/repos/afp/thys/Presburger-Automata/Presburger_Automata"
"~/repos/afp/thys/Abstract-Rewriting/Abstract_Rewriting"*)
-uses
- "mutabelle.ML"
- "mutabelle_extra.ML"
begin
+ML_file "mutabelle.ML"
+ML_file "mutabelle_extra.ML"
+
section {* configuration *}
--- a/src/HOL/NSA/StarDef.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/NSA/StarDef.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory StarDef
imports Filter
-uses ("transfer.ML")
begin
subsection {* A Free Ultrafilter over the Naturals *}
@@ -88,7 +87,7 @@
by (subgoal_tac "P \<equiv> Q", simp, simp add: atomize_eq)
text {*Initialize transfer tactic.*}
-use "transfer.ML"
+ML_file "transfer.ML"
setup Transfer_Principle.setup
method_setup transfer = {*
--- a/src/HOL/Nat.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Nat.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,14 +9,13 @@
theory Nat
imports Inductive Typedef Fun Fields
-uses
- "~~/src/Tools/rat.ML"
- "Tools/arith_data.ML"
- ("Tools/nat_arith.ML")
- "~~/src/Provers/Arith/fast_lin_arith.ML"
- ("Tools/lin_arith.ML")
begin
+ML_file "~~/src/Tools/rat.ML"
+ML_file "Tools/arith_data.ML"
+ML_file "~~/src/Provers/Arith/fast_lin_arith.ML"
+
+
subsection {* Type @{text ind} *}
typedecl ind
@@ -1492,7 +1491,7 @@
setup Arith_Data.setup
-use "Tools/nat_arith.ML"
+ML_file "Tools/nat_arith.ML"
simproc_setup nateq_cancel_sums
("(l::nat) + m = n" | "(l::nat) = m + n" | "Suc m = n" | "m = Suc n") =
@@ -1510,7 +1509,7 @@
("(l::nat) + m - n" | "(l::nat) - (m + n)" | "Suc m - n" | "m - Suc n") =
{* fn phi => fn ss => try Nat_Arith.cancel_diff_conv *}
-use "Tools/lin_arith.ML"
+ML_file "Tools/lin_arith.ML"
setup {* Lin_Arith.global_setup *}
declaration {* K Lin_Arith.setup *}
--- a/src/HOL/Nat_Transfer.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Nat_Transfer.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,7 +5,6 @@
theory Nat_Transfer
imports Int
-uses ("Tools/legacy_transfer.ML")
begin
subsection {* Generic transfer machinery *}
@@ -16,8 +15,7 @@
lemma transfer_morphismI[intro]: "transfer_morphism f A"
by (simp add: transfer_morphism_def)
-use "Tools/legacy_transfer.ML"
-
+ML_file "Tools/legacy_transfer.ML"
setup Legacy_Transfer.setup
--- a/src/HOL/Nitpick.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Nitpick.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,21 +10,6 @@
theory Nitpick
imports Map Quotient SAT Record
keywords "nitpick" :: diag and "nitpick_params" :: thy_decl
-uses ("Tools/Nitpick/kodkod.ML")
- ("Tools/Nitpick/kodkod_sat.ML")
- ("Tools/Nitpick/nitpick_util.ML")
- ("Tools/Nitpick/nitpick_hol.ML")
- ("Tools/Nitpick/nitpick_preproc.ML")
- ("Tools/Nitpick/nitpick_mono.ML")
- ("Tools/Nitpick/nitpick_scope.ML")
- ("Tools/Nitpick/nitpick_peephole.ML")
- ("Tools/Nitpick/nitpick_rep.ML")
- ("Tools/Nitpick/nitpick_nut.ML")
- ("Tools/Nitpick/nitpick_kodkod.ML")
- ("Tools/Nitpick/nitpick_model.ML")
- ("Tools/Nitpick/nitpick.ML")
- ("Tools/Nitpick/nitpick_isar.ML")
- ("Tools/Nitpick/nitpick_tests.ML")
begin
typedecl bisim_iterator
@@ -212,21 +197,21 @@
definition of_frac :: "'a \<Rightarrow> 'b\<Colon>{inverse,ring_1}" where
"of_frac q \<equiv> of_int (num q) / of_int (denom q)"
-use "Tools/Nitpick/kodkod.ML"
-use "Tools/Nitpick/kodkod_sat.ML"
-use "Tools/Nitpick/nitpick_util.ML"
-use "Tools/Nitpick/nitpick_hol.ML"
-use "Tools/Nitpick/nitpick_mono.ML"
-use "Tools/Nitpick/nitpick_preproc.ML"
-use "Tools/Nitpick/nitpick_scope.ML"
-use "Tools/Nitpick/nitpick_peephole.ML"
-use "Tools/Nitpick/nitpick_rep.ML"
-use "Tools/Nitpick/nitpick_nut.ML"
-use "Tools/Nitpick/nitpick_kodkod.ML"
-use "Tools/Nitpick/nitpick_model.ML"
-use "Tools/Nitpick/nitpick.ML"
-use "Tools/Nitpick/nitpick_isar.ML"
-use "Tools/Nitpick/nitpick_tests.ML"
+ML_file "Tools/Nitpick/kodkod.ML"
+ML_file "Tools/Nitpick/kodkod_sat.ML"
+ML_file "Tools/Nitpick/nitpick_util.ML"
+ML_file "Tools/Nitpick/nitpick_hol.ML"
+ML_file "Tools/Nitpick/nitpick_mono.ML"
+ML_file "Tools/Nitpick/nitpick_preproc.ML"
+ML_file "Tools/Nitpick/nitpick_scope.ML"
+ML_file "Tools/Nitpick/nitpick_peephole.ML"
+ML_file "Tools/Nitpick/nitpick_rep.ML"
+ML_file "Tools/Nitpick/nitpick_nut.ML"
+ML_file "Tools/Nitpick/nitpick_kodkod.ML"
+ML_file "Tools/Nitpick/nitpick_model.ML"
+ML_file "Tools/Nitpick/nitpick.ML"
+ML_file "Tools/Nitpick/nitpick_isar.ML"
+ML_file "Tools/Nitpick/nitpick_tests.ML"
setup {*
Nitpick_Isar.setup #>
--- a/src/HOL/Nitpick_Examples/Mini_Nits.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Nitpick_Examples/Mini_Nits.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,9 +9,10 @@
theory Mini_Nits
imports Main
-uses "minipick.ML"
begin
+ML_file "minipick.ML"
+
nitpick_params [verbose, sat_solver = MiniSat_JNI, max_threads = 1]
nitpick_params [total_consts = smart]
--- a/src/HOL/Nominal/Nominal.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Nominal/Nominal.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,16 +4,6 @@
"atom_decl" "nominal_datatype" "equivariance" :: thy_decl and
"nominal_primrec" "nominal_inductive" "nominal_inductive2" :: thy_goal and
"avoids"
-uses
- ("nominal_thmdecls.ML")
- ("nominal_atoms.ML")
- ("nominal_datatype.ML")
- ("nominal_induct.ML")
- ("nominal_permeq.ML")
- ("nominal_fresh_fun.ML")
- ("nominal_primrec.ML")
- ("nominal_inductive.ML")
- ("nominal_inductive2.ML")
begin
section {* Permutations *}
@@ -3562,7 +3552,7 @@
(*******************************************************)
(* Setup of the theorem attributes eqvt and eqvt_force *)
-use "nominal_thmdecls.ML"
+ML_file "nominal_thmdecls.ML"
setup "NominalThmDecls.setup"
lemmas [eqvt] =
@@ -3598,11 +3588,11 @@
(***************************************)
(* setup for the individial atom-kinds *)
(* and nominal datatypes *)
-use "nominal_atoms.ML"
+ML_file "nominal_atoms.ML"
(************************************************************)
(* various tactics for analysing permutations, supports etc *)
-use "nominal_permeq.ML"
+ML_file "nominal_permeq.ML"
method_setup perm_simp =
{* NominalPermeq.perm_simp_meth *}
@@ -3646,7 +3636,7 @@
(*****************************************************************)
(* tactics for generating fresh names and simplifying fresh_funs *)
-use "nominal_fresh_fun.ML"
+ML_file "nominal_fresh_fun.ML"
method_setup generate_fresh =
{* setup_generate_fresh *}
@@ -3662,20 +3652,20 @@
lemma allE_Nil: assumes "\<forall>x. P x" obtains "P []"
using assms ..
-use "nominal_datatype.ML"
+ML_file "nominal_datatype.ML"
(******************************************************)
(* primitive recursive functions on nominal datatypes *)
-use "nominal_primrec.ML"
+ML_file "nominal_primrec.ML"
(****************************************************)
(* inductive definition involving nominal datatypes *)
-use "nominal_inductive.ML"
-use "nominal_inductive2.ML"
+ML_file "nominal_inductive.ML"
+ML_file "nominal_inductive2.ML"
(*****************************************)
(* setup for induction principles method *)
-use "nominal_induct.ML"
+ML_file "nominal_induct.ML"
method_setup nominal_induct =
{* NominalInduct.nominal_induct_method *}
{* nominal induction *}
--- a/src/HOL/Num.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Num.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,8 +7,6 @@
theory Num
imports Datatype
-uses
- ("Tools/numeral.ML")
begin
subsection {* The @{text num} type *}
@@ -333,7 +331,7 @@
(@{const_syntax neg_numeral}, num_tr' "-")] end
*}
-use "Tools/numeral.ML"
+ML_file "Tools/numeral.ML"
subsection {* Class-specific numeral rules *}
--- a/src/HOL/Numeral_Simprocs.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Numeral_Simprocs.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,16 +4,14 @@
theory Numeral_Simprocs
imports Divides
-uses
- "~~/src/Provers/Arith/assoc_fold.ML"
- "~~/src/Provers/Arith/cancel_numerals.ML"
- "~~/src/Provers/Arith/combine_numerals.ML"
- "~~/src/Provers/Arith/cancel_numeral_factor.ML"
- "~~/src/Provers/Arith/extract_common_term.ML"
- ("Tools/numeral_simprocs.ML")
- ("Tools/nat_numeral_simprocs.ML")
begin
+ML_file "~~/src/Provers/Arith/assoc_fold.ML"
+ML_file "~~/src/Provers/Arith/cancel_numerals.ML"
+ML_file "~~/src/Provers/Arith/combine_numerals.ML"
+ML_file "~~/src/Provers/Arith/cancel_numeral_factor.ML"
+ML_file "~~/src/Provers/Arith/extract_common_term.ML"
+
lemmas semiring_norm =
Let_def arith_simps nat_arith rel_simps
if_False if_True
@@ -100,7 +98,7 @@
"(k*m) div (k*n) = (if k = (0::nat) then 0 else m div n)"
by (simp add: nat_mult_div_cancel1)
-use "Tools/numeral_simprocs.ML"
+ML_file "Tools/numeral_simprocs.ML"
simproc_setup semiring_assoc_fold
("(a::'a::comm_semiring_1_cancel) * b") =
@@ -210,7 +208,7 @@
|"(l::'a::field_inverse_zero) / (m * n)") =
{* fn phi => Numeral_Simprocs.divide_cancel_factor *}
-use "Tools/nat_numeral_simprocs.ML"
+ML_file "Tools/nat_numeral_simprocs.ML"
simproc_setup nat_combine_numerals
("(i::nat) + j" | "Suc (i + j)") =
--- a/src/HOL/Orderings.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Orderings.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,11 +7,11 @@
theory Orderings
imports HOL
keywords "print_orders" :: diag
-uses
- "~~/src/Provers/order.ML"
- "~~/src/Provers/quasi.ML" (* FIXME unused? *)
begin
+ML_file "~~/src/Provers/order.ML"
+ML_file "~~/src/Provers/quasi.ML" (* FIXME unused? *)
+
subsection {* Syntactic orders *}
class ord =
--- a/src/HOL/Partial_Function.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Partial_Function.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,11 +7,10 @@
theory Partial_Function
imports Complete_Partial_Order Option
keywords "partial_function" :: thy_decl
-uses
- "Tools/Function/function_lib.ML"
- "Tools/Function/partial_function.ML"
begin
+ML_file "Tools/Function/function_lib.ML"
+ML_file "Tools/Function/partial_function.ML"
setup Partial_Function.setup
subsection {* Axiomatic setup *}
--- a/src/HOL/Predicate_Compile.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Predicate_Compile.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,20 +7,20 @@
theory Predicate_Compile
imports Predicate New_Random_Sequence Quickcheck_Exhaustive
keywords "code_pred" :: thy_goal and "values" :: diag
-uses
- "Tools/Predicate_Compile/predicate_compile_aux.ML"
- "Tools/Predicate_Compile/predicate_compile_compilations.ML"
- "Tools/Predicate_Compile/core_data.ML"
- "Tools/Predicate_Compile/mode_inference.ML"
- "Tools/Predicate_Compile/predicate_compile_proof.ML"
- "Tools/Predicate_Compile/predicate_compile_core.ML"
- "Tools/Predicate_Compile/predicate_compile_data.ML"
- "Tools/Predicate_Compile/predicate_compile_fun.ML"
- "Tools/Predicate_Compile/predicate_compile_pred.ML"
- "Tools/Predicate_Compile/predicate_compile_specialisation.ML"
- "Tools/Predicate_Compile/predicate_compile.ML"
begin
+ML_file "Tools/Predicate_Compile/predicate_compile_aux.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_compilations.ML"
+ML_file "Tools/Predicate_Compile/core_data.ML"
+ML_file "Tools/Predicate_Compile/mode_inference.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_proof.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_core.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_data.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_fun.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_pred.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile_specialisation.ML"
+ML_file "Tools/Predicate_Compile/predicate_compile.ML"
+
setup Predicate_Compile.setup
end
--- a/src/HOL/Predicate_Compile_Examples/Hotel_Example_Small_Generator.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Predicate_Compile_Examples/Hotel_Example_Small_Generator.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,8 +1,9 @@
theory Hotel_Example_Small_Generator
imports Hotel_Example "~~/src/HOL/Library/Predicate_Compile_Alternative_Defs"
-uses "~~/src/HOL/Tools/Predicate_Compile/predicate_compile_quickcheck.ML"
begin
+ML_file "~~/src/HOL/Tools/Predicate_Compile/predicate_compile_quickcheck.ML"
+
declare Let_def[code_pred_inline]
lemma [code_pred_inline]: "insert == (%y A x. y = x | A x)"
--- a/src/HOL/Presburger.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Presburger.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,12 +6,11 @@
theory Presburger
imports Groebner_Basis Set_Interval
-uses
- "Tools/Qelim/qelim.ML"
- "Tools/Qelim/cooper_procedure.ML"
- ("Tools/Qelim/cooper.ML")
begin
+ML_file "Tools/Qelim/qelim.ML"
+ML_file "Tools/Qelim/cooper_procedure.ML"
+
subsection{* The @{text "-\<infinity>"} and @{text "+\<infinity>"} Properties *}
lemma minf:
@@ -386,7 +385,7 @@
"\<lbrakk>x = x'; 0 \<le> x' \<Longrightarrow> P = P'\<rbrakk> \<Longrightarrow> (0 \<le> (x::int) \<and> P) = (0 \<le> x' \<and> P')"
by (simp cong: conj_cong)
-use "Tools/Qelim/cooper.ML"
+ML_file "Tools/Qelim/cooper.ML"
setup Cooper.setup
method_setup presburger = {*
--- a/src/HOL/Product_Type.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Product_Type.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,9 +8,6 @@
theory Product_Type
imports Typedef Inductive Fun
keywords "inductive_set" "coinductive_set" :: thy_decl
-uses
- ("Tools/split_rule.ML")
- ("Tools/inductive_set.ML")
begin
subsection {* @{typ bool} is a datatype *}
@@ -690,7 +687,7 @@
lemma internal_split_conv: "internal_split c (a, b) = c a b"
by (simp only: internal_split_def split_conv)
-use "Tools/split_rule.ML"
+ML_file "Tools/split_rule.ML"
setup Split_Rule.setup
hide_const internal_split
@@ -1122,7 +1119,7 @@
subsection {* Inductively defined sets *}
-use "Tools/inductive_set.ML"
+ML_file "Tools/inductive_set.ML"
setup Inductive_Set.setup
--- a/src/HOL/Prolog/HOHH.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Prolog/HOHH.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory HOHH
imports HOL
-uses "prolog.ML"
begin
+ML_file "prolog.ML"
+
method_setup ptac =
{* Attrib.thms >> (fn thms => fn ctxt => SIMPLE_METHOD' (Prolog.ptac ctxt thms)) *}
"basic Lambda Prolog interpreter"
--- a/src/HOL/Quickcheck.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Quickcheck.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,9 +4,6 @@
theory Quickcheck
imports Random Code_Evaluation Enum
-uses
- ("Tools/Quickcheck/quickcheck_common.ML")
- ("Tools/Quickcheck/random_generators.ML")
begin
notation fcomp (infixl "\<circ>>" 60)
@@ -182,8 +179,8 @@
subsection {* Deriving random generators for datatypes *}
-use "Tools/Quickcheck/quickcheck_common.ML"
-use "Tools/Quickcheck/random_generators.ML"
+ML_file "Tools/Quickcheck/quickcheck_common.ML"
+ML_file "Tools/Quickcheck/random_generators.ML"
setup Random_Generators.setup
--- a/src/HOL/Quickcheck_Exhaustive.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Quickcheck_Exhaustive.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,9 +5,6 @@
theory Quickcheck_Exhaustive
imports Quickcheck
keywords "quickcheck_generator" :: thy_decl
-uses
- ("Tools/Quickcheck/exhaustive_generators.ML")
- ("Tools/Quickcheck/abstract_generators.ML")
begin
subsection {* basic operations for exhaustive generators *}
@@ -577,7 +574,7 @@
notation (output) unknown ("?")
-use "Tools/Quickcheck/exhaustive_generators.ML"
+ML_file "Tools/Quickcheck/exhaustive_generators.ML"
setup {* Exhaustive_Generators.setup *}
@@ -585,7 +582,7 @@
subsection {* Defining generators for abstract types *}
-use "Tools/Quickcheck/abstract_generators.ML"
+ML_file "Tools/Quickcheck/abstract_generators.ML"
hide_fact (open) orelse_def
no_notation orelse (infixr "orelse" 55)
--- a/src/HOL/Quickcheck_Narrowing.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Quickcheck_Narrowing.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,11 +5,9 @@
theory Quickcheck_Narrowing
imports Quickcheck_Exhaustive
keywords "find_unused_assms" :: diag
-uses
+uses (* FIXME session files *)
("Tools/Quickcheck/PNF_Narrowing_Engine.hs")
("Tools/Quickcheck/Narrowing_Engine.hs")
- ("Tools/Quickcheck/narrowing_generators.ML")
- ("Tools/Quickcheck/find_unused_assms.ML")
begin
subsection {* Counterexample generator *}
@@ -340,7 +338,7 @@
subsubsection {* Setting up the counterexample generator *}
-use "Tools/Quickcheck/narrowing_generators.ML"
+ML_file "Tools/Quickcheck/narrowing_generators.ML"
setup {* Narrowing_Generators.setup *}
@@ -447,7 +445,7 @@
subsection {* The @{text find_unused_assms} command *}
-use "Tools/Quickcheck/find_unused_assms.ML"
+ML_file "Tools/Quickcheck/find_unused_assms.ML"
subsection {* Closing up *}
--- a/src/HOL/Quotient.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Quotient.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,12 +10,6 @@
"print_quotmapsQ3" "print_quotientsQ3" "print_quotconsts" :: diag and
"quotient_type" :: thy_goal and "/" and
"quotient_definition" :: thy_goal
-uses
- ("Tools/Quotient/quotient_info.ML")
- ("Tools/Quotient/quotient_type.ML")
- ("Tools/Quotient/quotient_def.ML")
- ("Tools/Quotient/quotient_term.ML")
- ("Tools/Quotient/quotient_tacs.ML")
begin
text {*
@@ -765,7 +759,7 @@
text {* Auxiliary data for the quotient package *}
-use "Tools/Quotient/quotient_info.ML"
+ML_file "Tools/Quotient/quotient_info.ML"
setup Quotient_Info.setup
declare [[mapQ3 "fun" = (fun_rel, fun_quotient3)]]
@@ -787,15 +781,15 @@
vimage_id
text {* Translation functions for the lifting process. *}
-use "Tools/Quotient/quotient_term.ML"
+ML_file "Tools/Quotient/quotient_term.ML"
text {* Definitions of the quotient types. *}
-use "Tools/Quotient/quotient_type.ML"
+ML_file "Tools/Quotient/quotient_type.ML"
text {* Definitions for quotient constants. *}
-use "Tools/Quotient/quotient_def.ML"
+ML_file "Tools/Quotient/quotient_def.ML"
text {*
@@ -820,7 +814,7 @@
text {* Tactics for proving the lifted theorems *}
-use "Tools/Quotient/quotient_tacs.ML"
+ML_file "Tools/Quotient/quotient_tacs.ML"
subsection {* Methods / Interface *}
--- a/src/HOL/Rat.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Rat.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory Rat
imports GCD Archimedean_Field
-uses ("Tools/float_syntax.ML")
begin
subsection {* Rational numbers as quotient *}
@@ -1107,7 +1106,7 @@
syntax "_Float" :: "float_const \<Rightarrow> 'a" ("_")
-use "Tools/float_syntax.ML"
+ML_file "Tools/float_syntax.ML"
setup Float_Syntax.setup
text{* Test: *}
--- a/src/HOL/Real.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Real.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,8 +1,8 @@
theory Real
imports RComplete RealVector
-uses "Tools/SMT/smt_real.ML"
begin
-setup {* SMT_Real.setup *}
+ML_file "Tools/SMT/smt_real.ML"
+setup SMT_Real.setup
end
--- a/src/HOL/Record.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Record.thy Wed Aug 22 23:23:48 2012 +0200
@@ -11,7 +11,6 @@
theory Record
imports Plain Quickcheck_Narrowing
keywords "record" :: thy_decl
-uses ("Tools/record.ML")
begin
subsection {* Introduction *}
@@ -461,7 +460,7 @@
subsection {* Record package *}
-use "Tools/record.ML" setup Record.setup
+ML_file "Tools/record.ML" setup Record.setup
hide_const (open) Tuple_Isomorphism repr abst iso_tuple_fst iso_tuple_snd
iso_tuple_fst_update iso_tuple_snd_update iso_tuple_cons
--- a/src/HOL/Refute.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Refute.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,9 +10,9 @@
theory Refute
imports Hilbert_Choice List Sledgehammer
keywords "refute" :: diag and "refute_params" :: thy_decl
-uses "Tools/refute.ML"
begin
+ML_file "Tools/refute.ML"
setup Refute.setup
refute_params
--- a/src/HOL/SAT.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/SAT.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,10 +9,10 @@
theory SAT
imports Refute
-uses
- "Tools/sat_funcs.ML"
begin
+ML_file "Tools/sat_funcs.ML"
+
ML {* structure sat = SATFunc(cnf) *}
method_setup sat = {* Scan.succeed (SIMPLE_METHOD' o sat.sat_tac) *}
--- a/src/HOL/SMT.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/SMT.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,26 +7,11 @@
theory SMT
imports Record
keywords "smt_status" :: diag
-uses
- "Tools/SMT/smt_utils.ML"
- "Tools/SMT/smt_failure.ML"
- "Tools/SMT/smt_config.ML"
- ("Tools/SMT/smt_builtin.ML")
- ("Tools/SMT/smt_datatypes.ML")
- ("Tools/SMT/smt_normalize.ML")
- ("Tools/SMT/smt_translate.ML")
- ("Tools/SMT/smt_solver.ML")
- ("Tools/SMT/smtlib_interface.ML")
- ("Tools/SMT/z3_interface.ML")
- ("Tools/SMT/z3_proof_parser.ML")
- ("Tools/SMT/z3_proof_tools.ML")
- ("Tools/SMT/z3_proof_literals.ML")
- ("Tools/SMT/z3_proof_methods.ML")
- ("Tools/SMT/z3_proof_reconstruction.ML")
- ("Tools/SMT/z3_model.ML")
- ("Tools/SMT/smt_setup_solvers.ML")
begin
+ML_file "Tools/SMT/smt_utils.ML"
+ML_file "Tools/SMT/smt_failure.ML"
+ML_file "Tools/SMT/smt_config.ML"
subsection {* Triggers for quantifier instantiation *}
@@ -135,20 +120,20 @@
subsection {* Setup *}
-use "Tools/SMT/smt_builtin.ML"
-use "Tools/SMT/smt_datatypes.ML"
-use "Tools/SMT/smt_normalize.ML"
-use "Tools/SMT/smt_translate.ML"
-use "Tools/SMT/smt_solver.ML"
-use "Tools/SMT/smtlib_interface.ML"
-use "Tools/SMT/z3_interface.ML"
-use "Tools/SMT/z3_proof_parser.ML"
-use "Tools/SMT/z3_proof_tools.ML"
-use "Tools/SMT/z3_proof_literals.ML"
-use "Tools/SMT/z3_proof_methods.ML"
-use "Tools/SMT/z3_proof_reconstruction.ML"
-use "Tools/SMT/z3_model.ML"
-use "Tools/SMT/smt_setup_solvers.ML"
+ML_file "Tools/SMT/smt_builtin.ML"
+ML_file "Tools/SMT/smt_datatypes.ML"
+ML_file "Tools/SMT/smt_normalize.ML"
+ML_file "Tools/SMT/smt_translate.ML"
+ML_file "Tools/SMT/smt_solver.ML"
+ML_file "Tools/SMT/smtlib_interface.ML"
+ML_file "Tools/SMT/z3_interface.ML"
+ML_file "Tools/SMT/z3_proof_parser.ML"
+ML_file "Tools/SMT/z3_proof_tools.ML"
+ML_file "Tools/SMT/z3_proof_literals.ML"
+ML_file "Tools/SMT/z3_proof_methods.ML"
+ML_file "Tools/SMT/z3_proof_reconstruction.ML"
+ML_file "Tools/SMT/z3_model.ML"
+ML_file "Tools/SMT/smt_setup_solvers.ML"
setup {*
SMT_Config.setup #>
--- a/src/HOL/SPARK/SPARK_Setup.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/SPARK/SPARK_Setup.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,13 +10,11 @@
keywords
"spark_open" "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
"spark_vc" :: thy_goal and "spark_status" :: diag
-uses
- "Tools/fdl_lexer.ML"
- "Tools/fdl_parser.ML"
- ("Tools/spark_vcs.ML")
- ("Tools/spark_commands.ML")
begin
+ML_file "Tools/fdl_lexer.ML"
+ML_file "Tools/fdl_parser.ML"
+
text {*
SPARK version of div, see section 4.4.1.1 of SPARK Proof Manual
*}
@@ -180,8 +178,8 @@
text {* Load the package *}
-use "Tools/spark_vcs.ML"
-use "Tools/spark_commands.ML"
+ML_file "Tools/spark_vcs.ML"
+ML_file "Tools/spark_commands.ML"
setup SPARK_Commands.setup
--- a/src/HOL/Semiring_Normalization.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Semiring_Normalization.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,10 +6,10 @@
theory Semiring_Normalization
imports Numeral_Simprocs Nat_Transfer
-uses
- "Tools/semiring_normalizer.ML"
begin
+ML_file "Tools/semiring_normalizer.ML"
+
text {* Prelude *}
class comm_semiring_1_cancel_crossproduct = comm_semiring_1_cancel +
--- a/src/HOL/Sledgehammer.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Sledgehammer.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,17 +9,18 @@
theory Sledgehammer
imports ATP SMT
keywords "sledgehammer" :: diag and "sledgehammer_params" :: thy_decl
-uses "Tools/Sledgehammer/async_manager.ML"
- "Tools/Sledgehammer/sledgehammer_util.ML"
- "Tools/Sledgehammer/sledgehammer_fact.ML"
- "Tools/Sledgehammer/sledgehammer_provers.ML"
- "Tools/Sledgehammer/sledgehammer_minimize.ML"
- "Tools/Sledgehammer/sledgehammer_mepo.ML"
- "Tools/Sledgehammer/sledgehammer_mash.ML"
- "Tools/Sledgehammer/sledgehammer_run.ML"
- "Tools/Sledgehammer/sledgehammer_isar.ML"
begin
+ML_file "Tools/Sledgehammer/async_manager.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_util.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_fact.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_provers.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_minimize.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_mepo.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_mash.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_run.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_isar.ML"
+
setup {* Sledgehammer_Isar.setup *}
end
--- a/src/HOL/Statespace/DistinctTreeProver.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Statespace/DistinctTreeProver.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory DistinctTreeProver
imports Main
-uses ("distinct_tree_prover.ML")
begin
text {* A state space manages a set of (abstract) names and assumes
@@ -631,7 +630,7 @@
text {* Now we have all the theorems in place that are needed for the
certificate generating ML functions. *}
-use "distinct_tree_prover.ML"
+ML_file "distinct_tree_prover.ML"
(* Uncomment for profiling or debugging *)
(*
--- a/src/HOL/Statespace/StateSpaceLocale.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Statespace/StateSpaceLocale.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory StateSpaceLocale imports StateFun
keywords "statespace" :: thy_decl
-uses "state_space.ML" "state_fun.ML"
begin
+ML_file "state_space.ML"
+ML_file "state_fun.ML"
setup StateFun.setup
text {* For every type that is to be stored in a state space, an
--- a/src/HOL/String.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/String.thy Wed Aug 22 23:23:48 2012 +0200
@@ -4,9 +4,6 @@
theory String
imports List
-uses
- ("Tools/string_syntax.ML")
- ("Tools/string_code.ML")
begin
subsection {* Characters *}
@@ -79,7 +76,7 @@
syntax
"_String" :: "str_position => string" ("_")
-use "Tools/string_syntax.ML"
+ML_file "Tools/string_syntax.ML"
setup String_Syntax.setup
definition chars :: string where
@@ -188,7 +185,7 @@
subsection {* Code generator *}
-use "Tools/string_code.ML"
+ML_file "Tools/string_code.ML"
code_reserved SML string
code_reserved OCaml string
--- a/src/HOL/TPTP/ATP_Problem_Import.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/ATP_Problem_Import.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,10 +6,10 @@
theory ATP_Problem_Import
imports Complex_Main TPTP_Interpret
-uses "sledgehammer_tactics.ML"
- ("atp_problem_import.ML")
begin
+ML_file "sledgehammer_tactics.ML"
+
ML {* Proofterm.proofs := 0 *}
declare [[show_consts]] (* for Refute *)
@@ -18,7 +18,7 @@
declare [[unify_search_bound = 60]]
declare [[unify_trace_bound = 60]]
-use "atp_problem_import.ML"
+ML_file "atp_problem_import.ML"
(*
ML {* ATP_Problem_Import.isabelle_tptp_file @{theory} 50
--- a/src/HOL/TPTP/ATP_Theory_Export.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/ATP_Theory_Export.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory ATP_Theory_Export
imports Complex_Main
-uses "atp_theory_export.ML"
begin
+ML_file "atp_theory_export.ML"
+
ML {*
open ATP_Problem
open ATP_Theory_Export
--- a/src/HOL/TPTP/MaSh_Eval.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/MaSh_Eval.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory MaSh_Eval
imports Complex_Main
-uses "mash_eval.ML"
begin
+ML_file "mash_eval.ML"
+
sledgehammer_params
[provers = e, max_relevant = 40, strict, dont_slice, type_enc = poly_guards??,
lam_trans = combs_and_lifting, timeout = 5, dont_preplay, minimize]
--- a/src/HOL/TPTP/MaSh_Export.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/MaSh_Export.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory MaSh_Export
imports Complex_Main
-uses "mash_export.ML"
begin
+ML_file "mash_export.ML"
+
sledgehammer_params
[provers = e, max_relevant = 40, strict, dont_slice, type_enc = poly_guards??,
lam_trans = combs_and_lifting, timeout = 1, dont_preplay, minimize]
--- a/src/HOL/TPTP/TPTP_Interpret.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Interpret.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,9 +8,10 @@
theory TPTP_Interpret
imports Main TPTP_Parser
keywords "import_tptp" :: thy_decl
-uses ("TPTP_Parser/tptp_interpret.ML")
+begin
+
+typedecl "ind"
-begin
-typedecl "ind"
-use "TPTP_Parser/tptp_interpret.ML"
+ML_file "TPTP_Parser/tptp_interpret.ML"
+
end
\ No newline at end of file
--- a/src/HOL/TPTP/TPTP_Parser.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,16 +6,15 @@
theory TPTP_Parser
imports Pure
-uses
- "TPTP_Parser/ml_yacc_lib.ML" (*generated from ML-Yacc's lib*)
+begin
+
+ML_file "TPTP_Parser/ml_yacc_lib.ML" (*generated from ML-Yacc's lib*)
- "TPTP_Parser/tptp_syntax.ML"
- "TPTP_Parser/tptp_lexyacc.ML" (*generated from tptp.lex and tptp.yacc*)
- "TPTP_Parser/tptp_parser.ML"
- "TPTP_Parser/tptp_problem_name.ML"
- "TPTP_Parser/tptp_proof.ML"
-
-begin
+ML_file "TPTP_Parser/tptp_syntax.ML"
+ML_file "TPTP_Parser/tptp_lexyacc.ML" (*generated from tptp.lex and tptp.yacc*)
+ML_file "TPTP_Parser/tptp_parser.ML"
+ML_file "TPTP_Parser/tptp_problem_name.ML"
+ML_file "TPTP_Parser/tptp_proof.ML"
text {*The TPTP parser was generated using ML-Yacc, and needs the
ML-Yacc library to operate. This library is included with the parser,
--- a/src/HOL/TPTP/TPTP_Parser/tptp_interpret.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_interpret.ML Wed Aug 22 23:23:48 2012 +0200
@@ -878,10 +878,11 @@
val _ =
Outer_Syntax.improper_command @{command_spec "import_tptp"} "import TPTP problem"
- (Parse.path >> (fn path =>
+ (Parse.path >> (fn name =>
Toplevel.theory (fn thy =>
- (*NOTE: assumes include files are located at $TPTP/Axioms
- (which is how TPTP is organised)*)
- import_file true [Path.dir path, Path.explode "$TPTP"] path [] [] thy)))
+ let val path = Path.explode name
+ (*NOTE: assumes include files are located at $TPTP/Axioms
+ (which is how TPTP is organised)*)
+ in import_file true [Path.dir path, Path.explode "$TPTP"] path [] [] thy end)))
end
--- a/src/HOL/TPTP/TPTP_Parser_Example.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser_Example.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
theory TPTP_Parser_Example
imports TPTP_Parser TPTP_Interpret
-uses "sledgehammer_tactics.ML"
begin
+ML_file "sledgehammer_tactics.ML"
+
import_tptp "$TPTP/Problems/CSR/CSR077+1.p"
ML {*
--- a/src/HOL/Transfer.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Transfer.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,6 @@
theory Transfer
imports Plain Hilbert_Choice
-uses ("Tools/transfer.ML")
begin
subsection {* Relator for function space *}
@@ -96,8 +95,7 @@
shows "Rel (A ===> B) (\<lambda>x. f x) (\<lambda>y. g y)"
using assms unfolding Rel_def fun_rel_def by fast
-use "Tools/transfer.ML"
-
+ML_file "Tools/transfer.ML"
setup Transfer.setup
declare fun_rel_eq [relator_eq]
--- a/src/HOL/Transitive_Closure.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Transitive_Closure.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,9 +7,10 @@
theory Transitive_Closure
imports Relation
-uses "~~/src/Provers/trancl.ML"
begin
+ML_file "~~/src/Provers/trancl.ML"
+
text {*
@{text rtrancl} is reflexive/transitive closure,
@{text trancl} is transitive closure,
--- a/src/HOL/Typedef.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Typedef.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,7 +7,6 @@
theory Typedef
imports Set
keywords "typedef" :: thy_goal and "morphisms"
-uses ("Tools/typedef.ML")
begin
locale type_definition =
@@ -109,6 +108,6 @@
end
-use "Tools/typedef.ML" setup Typedef.setup
+ML_file "Tools/typedef.ML" setup Typedef.setup
end
--- a/src/HOL/UNITY/UNITY_Main.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/UNITY/UNITY_Main.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,9 +7,10 @@
theory UNITY_Main
imports Detects PPROD Follows ProgressSets
-uses "UNITY_tactics.ML"
begin
+ML_file "UNITY_tactics.ML"
+
method_setup safety = {*
Scan.succeed (SIMPLE_METHOD' o constrains_tac) *}
"for proving safety properties"
--- a/src/HOL/Wellfounded.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Wellfounded.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,7 +9,6 @@
theory Wellfounded
imports Transitive_Closure
-uses ("Tools/Function/size.ML")
begin
subsection {* Basic Definitions *}
@@ -845,8 +844,7 @@
subsection {* size of a datatype value *}
-use "Tools/Function/size.ML"
-
+ML_file "Tools/Function/size.ML"
setup Size.setup
lemma size_bool [code]:
--- a/src/HOL/Word/Word.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/Word/Word.thy Wed Aug 22 23:23:48 2012 +0200
@@ -10,9 +10,6 @@
Misc_Typedef
"~~/src/HOL/Library/Boolean_Algebra"
Bool_List_Representation
-uses
- ("~~/src/HOL/Word/Tools/smt_word.ML")
- ("~~/src/HOL/Word/Tools/word_lib.ML")
begin
text {* see @{text "Examples/WordExamples.thy"} for examples *}
@@ -4647,8 +4644,8 @@
declare bin_to_bl_def [simp]
-use "~~/src/HOL/Word/Tools/word_lib.ML"
-use "~~/src/HOL/Word/Tools/smt_word.ML"
+ML_file "~~/src/HOL/Word/Tools/word_lib.ML"
+ML_file "~~/src/HOL/Word/Tools/smt_word.ML"
setup {* SMT_Word.setup *}
hide_const (open) Word
--- a/src/HOL/ex/Interpretation_with_Defs.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/ex/Interpretation_with_Defs.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,7 +6,8 @@
theory Interpretation_with_Defs
imports Pure
-uses "~~/src/Tools/interpretation_with_defs.ML"
begin
+ML_file "~~/src/Tools/interpretation_with_defs.ML"
+
end
--- a/src/HOL/ex/SVC_Oracle.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/HOL/ex/SVC_Oracle.thy Wed Aug 22 23:23:48 2012 +0200
@@ -9,9 +9,10 @@
theory SVC_Oracle
imports Main
-uses "svc_funcs.ML"
begin
+ML_file "svc_funcs.ML"
+
consts
iff_keep :: "[bool, bool] => bool"
iff_unfold :: "[bool, bool] => bool"
--- a/src/Pure/Isar/isar_cmd.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Isar/isar_cmd.ML Wed Aug 22 23:23:48 2012 +0200
@@ -40,8 +40,8 @@
val ml_diag: bool -> Symbol_Pos.text * Position.T -> Toplevel.transition -> Toplevel.transition
val diag_state: Proof.context -> Toplevel.state
val diag_goal: Proof.context -> {context: Proof.context, facts: thm list, goal: thm}
- val display_drafts: Path.T list -> Toplevel.transition -> Toplevel.transition
- val print_drafts: Path.T list -> Toplevel.transition -> Toplevel.transition
+ val display_drafts: string list -> Toplevel.transition -> Toplevel.transition
+ val print_drafts: string list -> Toplevel.transition -> Toplevel.transition
val print_context: Toplevel.transition -> Toplevel.transition
val print_theory: bool -> Toplevel.transition -> Toplevel.transition
val print_syntax: Toplevel.transition -> Toplevel.transition
@@ -314,12 +314,16 @@
(* present draft files *)
-fun display_drafts files = Toplevel.imperative (fn () =>
- let val outfile = File.shell_path (Present.drafts (getenv "ISABELLE_DOC_FORMAT") files)
+fun display_drafts names = Toplevel.imperative (fn () =>
+ let
+ val paths = map Path.explode names;
+ val outfile = File.shell_path (Present.drafts (getenv "ISABELLE_DOC_FORMAT") paths);
in Isabelle_System.isabelle_tool "display" ("-c " ^ outfile ^ " &"); () end);
-fun print_drafts files = Toplevel.imperative (fn () =>
- let val outfile = File.shell_path (Present.drafts "ps" files)
+fun print_drafts names = Toplevel.imperative (fn () =>
+ let
+ val paths = map Path.explode names;
+ val outfile = File.shell_path (Present.drafts "ps" paths);
in Isabelle_System.isabelle_tool "print" ("-c " ^ outfile); () end);
--- a/src/Pure/Isar/isar_syn.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Isar/isar_syn.ML Wed Aug 22 23:23:48 2012 +0200
@@ -271,7 +271,8 @@
val _ =
Outer_Syntax.command @{command_spec "use"} "ML text from file"
- (Parse.path >> (fn path => Toplevel.generic_theory (Thy_Load.exec_ml path)));
+ (Parse.path >> (fn name =>
+ Toplevel.generic_theory (fn gthy => Thy_Load.exec_ml (Path.explode name) gthy)));
val _ =
Outer_Syntax.command @{command_spec "ML"} "ML text within theory or local theory"
@@ -923,7 +924,8 @@
val _ =
Outer_Syntax.improper_command @{command_spec "cd"} "change current working directory"
- (Parse.path >> (fn path => Toplevel.no_timing o Toplevel.imperative (fn () => File.cd path)));
+ (Parse.path >> (fn name =>
+ Toplevel.no_timing o Toplevel.imperative (fn () => File.cd (Path.explode name))));
val _ =
Outer_Syntax.improper_command @{command_spec "pwd"} "print current working directory"
--- a/src/Pure/Isar/keyword.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Isar/keyword.ML Wed Aug 22 23:23:48 2012 +0200
@@ -47,6 +47,9 @@
type spec = (string * string list) * string list
val spec: spec -> T
val command_spec: (string * spec) * Position.T -> (string * T) * Position.T
+ type keywords
+ val lexicons_of: keywords -> Scan.lexicon * Scan.lexicon
+ val get_keywords: unit -> keywords
val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
val is_keyword: string -> bool
val command_keyword: string -> T option
@@ -54,6 +57,8 @@
val command_tags: string -> string list
val dest: unit -> string list * string list
val status: unit -> unit
+ val thy_load_commands: keywords -> (string * string list) list
+ val define_keywords: string * T option -> keywords -> keywords
val define: string * T option -> unit
val is_diag: string -> bool
val is_control: string -> bool
@@ -160,12 +165,17 @@
(** global keyword tables **)
-type keywords =
+datatype keywords = Keywords of
{lexicons: Scan.lexicon * Scan.lexicon, (*minor, major*)
commands: T Symtab.table}; (*command classification*)
-fun make_keywords (lexicons, commands) : keywords =
- {lexicons = lexicons, commands = commands};
+fun make_keywords (lexicons, commands) =
+ Keywords {lexicons = lexicons, commands = commands};
+
+fun map_keywords f (Keywords {lexicons, commands}) =
+ make_keywords (f (lexicons, commands));
+
+fun lexicons_of (Keywords {lexicons, ...}) = lexicons;
local
@@ -176,14 +186,12 @@
fun get_keywords () = ! global_keywords;
-fun change_keywords f = CRITICAL (fn () =>
- Unsynchronized.change global_keywords
- (fn {lexicons, commands} => make_keywords (f (lexicons, commands))));
+fun change_keywords f = CRITICAL (fn () => Unsynchronized.change global_keywords f);
end;
-fun get_lexicons () = #lexicons (get_keywords ());
-fun get_commands () = #commands (get_keywords ());
+fun get_lexicons () = lexicons_of (get_keywords ());
+fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands);
(* lookup *)
@@ -205,7 +213,7 @@
fun status () =
let
- val {lexicons = (minor, _), commands} = get_keywords ();
+ val Keywords {lexicons = (minor, _), commands} = get_keywords ();
val _ = sort_strings (Scan.dest_lexicon minor) |> List.app (fn name =>
writeln ("Outer syntax keyword " ^ quote name));
val _ = sort_wrt #1 (Symtab.dest commands) |> List.app (fn (name, kind) =>
@@ -213,9 +221,14 @@
in () end;
+fun thy_load_commands (Keywords {commands, ...}) =
+ Symtab.fold (fn (name, Keyword {kind, files, ...}) =>
+ kind = kind_of thy_load ? cons (name, files)) commands [];
+
+
(* define *)
-fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) =>
+fun define_keywords (name, opt_kind) = map_keywords (fn ((minor, major), commands) =>
(case opt_kind of
NONE =>
let
@@ -227,6 +240,8 @@
val commands' = Symtab.update (name, kind) commands;
in ((minor, major'), commands') end));
+val define = change_keywords o define_keywords;
+
(* command categories *)
--- a/src/Pure/Isar/outer_syntax.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Isar/outer_syntax.scala Wed Aug 22 23:23:48 2012 +0200
@@ -35,7 +35,11 @@
}
val empty: Outer_Syntax = new Outer_Syntax()
+
def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init())
+
+ def init_pure(): Outer_Syntax =
+ init() + ("theory", Keyword.THY_BEGIN) + ("ML_file", Keyword.THY_LOAD)
}
final class Outer_Syntax private(
@@ -54,6 +58,9 @@
def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name)
def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1)
+ def thy_load_commands: List[(String, List[String])] =
+ (for ((name, (Keyword.THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList
+
def + (name: String, kind: (String, List[String]), replace: String): Outer_Syntax =
new Outer_Syntax(
keywords + (name -> kind),
@@ -64,8 +71,8 @@
def + (name: String, kind: String): Outer_Syntax = this + (name, (kind, Nil), name)
def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
- def add_keywords(header: Document.Node.Header): Outer_Syntax =
- (this /: header.keywords) {
+ def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax =
+ (this /: keywords) {
case (syntax, ((name, Some((kind, _))))) =>
syntax + (Symbol.decode(name), kind) + (Symbol.encode(name), kind)
case (syntax, ((name, None))) =>
--- a/src/Pure/Isar/parse.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Isar/parse.ML Wed Aug 22 23:23:48 2012 +0200
@@ -65,7 +65,7 @@
val binding: binding parser
val xname: xstring parser
val text: string parser
- val path: Path.T parser
+ val path: string parser
val liberal_name: xstring parser
val parname: string parser
val parbinding: binding parser
@@ -250,7 +250,7 @@
val text = group (fn () => "text")
(short_ident || long_ident || sym_ident || string || number || verbatim);
-val path = group (fn () => "file name/path specification") name >> Path.explode;
+val path = group (fn () => "file name/path specification") name;
val liberal_name = keyword_ident_or_symbolic || xname;
--- a/src/Pure/ML-Systems/multithreading_polyml.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/ML-Systems/multithreading_polyml.ML Wed Aug 22 23:23:48 2012 +0200
@@ -29,7 +29,7 @@
val available = true;
-val max_threads = ref 0;
+val max_threads = ref 1;
fun max_threads_value () =
let val m = ! max_threads in
--- a/src/Pure/ProofGeneral/pgip_parser.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/ProofGeneral/pgip_parser.ML Wed Aug 22 23:23:48 2012 +0200
@@ -101,7 +101,7 @@
val text = implode (map (Print_Mode.setmp [] Thy_Syntax.present_token) toks);
in
(case kind of
- Thy_Syntax.Command name => parse_command name text
+ Thy_Syntax.Command (name, _) => parse_command name text
| Thy_Syntax.Ignored => [D.Whitespace {text = text}]
| Thy_Syntax.Malformed => [D.Unparseable {text = text}])
end;
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML Wed Aug 22 23:23:48 2012 +0200
@@ -594,14 +594,14 @@
fun filerefs f =
let val thy = thy_name f
- val filerefs = map #1 (#uses (Thy_Load.check_thy (Path.dir f) thy))
+ val filerefs = map #1 (#uses (Thy_Load.check_thy [] (Path.dir f) thy))
in
issue_pgip (Setrefs {url=url, thyname=NONE, objtype=SOME PgipTypes.ObjFile,
name=NONE, idtables=[], fileurls=filerefs})
end
fun thyrefs thy =
- let val thyrefs = #imports (Thy_Load.check_thy Path.current thy)
+ let val thyrefs = #imports (Thy_Load.check_thy [] Path.current thy)
in
issue_pgip (Setrefs {url=url, thyname=thyname, objtype=SOME PgipTypes.ObjTheory,
name=NONE, idtables=[{context=NONE, objtype=PgipTypes.ObjTheory,
--- a/src/Pure/Pure.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Pure.thy Wed Aug 22 23:23:48 2012 +0200
@@ -85,12 +85,13 @@
and "end" :: thy_end % "theory"
and "realizers" "realizability" "extract_type" "extract" :: thy_decl
and "find_theorems" "find_consts" :: diag
- uses
- "Isar/isar_syn.ML"
- "Tools/find_theorems.ML"
- "Tools/find_consts.ML"
begin
+ML_file "Isar/isar_syn.ML"
+ML_file "Tools/find_theorems.ML"
+ML_file "Tools/find_consts.ML"
+
+
section {* Further content for the Pure theory *}
subsection {* Meta-level connectives in assumptions *}
--- a/src/Pure/ROOT Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/ROOT Wed Aug 22 23:23:48 2012 +0200
@@ -226,6 +226,7 @@
"pattern.ML"
"primitive_defs.ML"
"proofterm.ML"
+ "pure_syn.ML"
"pure_thy.ML"
"raw_simplifier.ML"
"search.ML"
--- a/src/Pure/ROOT.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/ROOT.ML Wed Aug 22 23:23:48 2012 +0200
@@ -303,39 +303,15 @@
use "ProofGeneral/proof_general_emacs.ML";
-(* ML toplevel use commands *)
-
-fun use name = Toplevel.program (fn () => Thy_Load.use_ml (Path.explode name));
-
-fun use_thys name = Toplevel.program (fn () => Thy_Info.use_thys name);
-fun use_thy name = Toplevel.program (fn () => Thy_Info.use_thy name);
-
-
(* the Pure theory *)
-val _ =
- Outer_Syntax.command
- (("theory", Keyword.tag_theory Keyword.thy_begin), Position.none) "begin theory context"
- (Thy_Header.args >> (fn header =>
- Toplevel.print o
- Toplevel.init_theory
- (fn () => Thy_Info.toplevel_begin_theory (Thy_Load.get_master_path ()) header)));
+use "pure_syn.ML";
+Thy_Info.use_thy "Pure";
+Context.set_thread_data NONE;
+structure Pure = struct val thy = Thy_Info.get_theory "Pure" end;
-val _ =
- Outer_Syntax.command
- (("ML_file", Keyword.tag_ml Keyword.thy_load), Position.none) "ML text from file"
- (Thy_Load.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
- let val (_, [(txt, pos)]) = files (Context.theory_of gthy) in
- gthy
- |> ML_Context.exec (fn () => ML_Context.eval_text true pos txt)
- |> Local_Theory.propagate_ml_env
- end)));
-Unsynchronized.setmp Multithreading.max_threads 1
- use_thy "Pure";
-Context.set_thread_data NONE;
-
-structure Pure = struct val thy = Thy_Info.get_theory "Pure" end;
+use "ProofGeneral/pgip_tests.ML";
(* ML toplevel pretty printing *)
@@ -361,11 +337,15 @@
if ML_System.is_polyml then use "ML/install_pp_polyml.ML" else ();
-(* misc *)
+(* ML toplevel commands *)
+
+fun use name = Toplevel.program (fn () => Thy_Load.use_ml (Path.explode name));
+
+fun use_thys name = Toplevel.program (fn () => Thy_Info.use_thys name);
+fun use_thy name = Toplevel.program (fn () => Thy_Info.use_thy name);
val cd = File.cd o Path.explode;
Proofterm.proofs := 0;
+Multithreading.max_threads := 0;
-use "ProofGeneral/pgip_tests.ML";
-
--- a/src/Pure/System/build.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/System/build.scala Wed Aug 22 23:23:48 2012 +0200
@@ -333,20 +333,13 @@
{ case (deps, (name, info)) =>
val (preloaded, parent_syntax, parent_errors) =
info.parent match {
+ case None =>
+ (Set.empty[String], Outer_Syntax.init_pure(), Nil)
case Some(parent_name) =>
val parent = deps(parent_name)
(parent.loaded_theories, parent.syntax, parent.errors)
- case None =>
- val init_syntax =
- Outer_Syntax.init() +
- // FIXME avoid hardwired stuff!?
- ("theory", Keyword.THY_BEGIN) +
- ("ML_file", Keyword.THY_LOAD) +
- ("hence", (Keyword.PRF_ASM_GOAL, Nil), "then have") +
- ("thus", (Keyword.PRF_ASM_GOAL, Nil), "then show")
- (Set.empty[String], init_syntax, Nil)
}
- val thy_info = new Thy_Info(new Thy_Load(preloaded))
+ val thy_info = new Thy_Info(new Thy_Load(preloaded, parent_syntax))
if (verbose) {
val groups =
@@ -360,11 +353,11 @@
info.theories.map(_._2).flatten.
map(thy => Document.Node.Name(info.dir + Thy_Load.thy_path(thy))))
- val loaded_theories = preloaded ++ thy_deps.map(_._1.theory)
- val syntax = (parent_syntax /: thy_deps) { case (syn, (_, h)) => syn.add_keywords(h) }
+ val loaded_theories = thy_deps.loaded_theories
+ val syntax = thy_deps.make_syntax
val all_files =
- thy_deps.map({ case (n, h) =>
+ thy_deps.deps.map({ case (n, h) =>
val thy = Path.explode(n.node).expand
val uses = h.uses.map(p => (Path.explode(n.dir) + Path.explode(p._1)).expand)
thy :: uses
@@ -376,7 +369,7 @@
error(msg + "\nThe error(s) above occurred in session " +
quote(name) + Position.str_of(info.pos))
}
- val errors = parent_errors ::: thy_deps.map(d => d._2.errors).flatten
+ val errors = parent_errors ::: thy_deps.deps.map(d => d._2.errors).flatten
deps + (name -> Session_Content(loaded_theories, syntax, sources, errors))
}))
--- a/src/Pure/System/session.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/System/session.scala Wed Aug 22 23:23:48 2012 +0200
@@ -37,7 +37,7 @@
}
-class Session(val thy_load: Thy_Load = new Thy_Load())
+class Session(val thy_load: Thy_Load)
{
/* global flags */
@@ -108,7 +108,7 @@
val prev = previous.get_finished
val (doc_edits, version) =
Timing.timeit("Thy_Syntax.text_edits", timing) {
- Thy_Syntax.text_edits(base_syntax, prev, text_edits)
+ Thy_Syntax.text_edits(thy_load.base_syntax, prev, text_edits)
}
version_result.fulfill(version)
sender ! Change(doc_edits, prev, version)
@@ -125,8 +125,6 @@
/* global state */
- @volatile private var base_syntax = Outer_Syntax.init()
-
private val syslog = Volatile(Queue.empty[XML.Elem])
def current_syslog(): String = cat_lines(syslog().iterator.map(msg => XML.content(msg).mkString))
@@ -145,12 +143,9 @@
def recent_syntax(): Outer_Syntax =
{
val version = current_state().recent_finished.version.get_finished
- if (version.is_init) base_syntax
+ if (version.is_init) thy_load.base_syntax
else version.syntax
}
- def get_recent_syntax(): Option[Outer_Syntax] =
- if (is_ready) Some(recent_syntax)
- else None
def snapshot(name: Document.Node.Name = Document.Node.Name.empty,
pending_edits: List[Text.Edit] = Nil): Document.Snapshot =
@@ -162,7 +157,7 @@
def header_edit(name: Document.Node.Name, header: Document.Node.Header): Document.Edit_Text =
{
val header1 =
- if (thy_load.is_loaded(name.theory))
+ if (thy_load.loaded_theories(name.theory))
header.error("Attempt to update loaded theory " + quote(name.theory))
else header
(name, Document.Node.Deps(header1))
@@ -171,7 +166,7 @@
/* actor messages */
- private case class Start(dirs: List[Path], logic: String, args: List[String])
+ private case class Start(args: List[String])
private case object Cancel_Execution
private case class Edit(edits: List[Document.Edit_Text])
private case class Change(
@@ -377,15 +372,9 @@
receiveWithin(delay_commands_changed.flush_timeout) {
case TIMEOUT => delay_commands_changed.flush()
- case Start(dirs, name, args) if prover.isEmpty =>
+ case Start(args) if prover.isEmpty =>
if (phase == Session.Inactive || phase == Session.Failed) {
phase = Session.Startup
-
- // FIXME static init in main constructor
- val content = Build.session_content(dirs, name).check_errors
- thy_load.register_thys(content.loaded_theories)
- base_syntax = content.syntax
-
prover = Some(new Isabelle_Process(receiver.invoke _, args) with Protocol)
}
@@ -440,8 +429,8 @@
/* actions */
- def start(dirs: List[Path], name: String, args: List[String])
- { session_actor ! Start(dirs, name, args) }
+ def start(args: List[String])
+ { session_actor ! Start(args) }
def stop() { commands_changed_buffer !? Stop; change_parser !? Stop; session_actor !? Stop }
--- a/src/Pure/Thy/thy_header.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_header.ML Wed Aug 22 23:23:48 2012 +0200
@@ -6,13 +6,13 @@
signature THY_HEADER =
sig
+ type keywords = (string * Keyword.spec option) list
type header =
{name: string,
imports: string list,
- keywords: (string * Keyword.spec option) list,
+ keywords: keywords,
uses: (Path.T * bool) list}
- val make: string -> string list -> (string * Keyword.spec option) list ->
- (Path.T * bool) list -> header
+ val make: string -> string list -> keywords -> (Path.T * bool) list -> header
val define_keywords: header -> unit
val declare_keyword: string * Keyword.spec option -> theory -> theory
val the_keyword: theory -> string -> Keyword.spec option
@@ -23,10 +23,12 @@
structure Thy_Header: THY_HEADER =
struct
+type keywords = (string * Keyword.spec option) list;
+
type header =
{name: string,
imports: string list,
- keywords: (string * Keyword.spec option) list,
+ keywords: keywords,
uses: (Path.T * bool) list};
fun make name imports keywords uses : header =
@@ -82,7 +84,7 @@
local
-val file_name = Parse.group (fn () => "file name") Parse.path;
+val file_name = Parse.group (fn () => "file name") Parse.path >> Path.explode;
val theory_name = Parse.group (fn () => "theory name") Parse.name;
val opt_files =
--- a/src/Pure/Thy/thy_header.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_header.scala Wed Aug 22 23:23:48 2012 +0200
@@ -106,13 +106,6 @@
def read(source: CharSequence): Thy_Header =
read(new CharSequenceReader(source))
- def read(file: JFile): Thy_Header =
- {
- val reader = Scan.byte_reader(file)
- try { read(reader).map(Standard_System.decode_permissive_utf8) }
- finally { reader.close }
- }
-
/* keywords */
--- a/src/Pure/Thy/thy_info.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_info.ML Wed Aug 22 23:23:48 2012 +0200
@@ -220,7 +220,7 @@
fun required_by _ [] = ""
| required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")";
-fun load_thy initiators update_time deps text name parents =
+fun load_thy initiators update_time deps text name uses keywords parents =
let
val _ = kill_thy name;
val _ = Output.urgent_message ("Loading theory " ^ quote name ^ required_by " " initiators);
@@ -228,7 +228,6 @@
val {master = (thy_path, _), imports} = deps;
val dir = Path.dir thy_path;
val pos = Path.position thy_path;
- val {uses, keywords, ...} = Thy_Header.read pos text;
val header = Thy_Header.make name imports keywords uses;
val (theory, present) =
@@ -237,48 +236,49 @@
fun commit () = update_thy deps theory;
in (theory, present, commit) end;
-fun check_deps dir name =
+fun check_deps base_keywords dir name =
(case lookup_deps name of
- SOME NONE => (true, NONE, get_imports name)
+ SOME NONE => (true, NONE, get_imports name, [], [])
| NONE =>
- let val {master, text, imports, ...} = Thy_Load.check_thy dir name
- in (false, SOME (make_deps master imports, text), imports) end
+ let val {master, text, imports, keywords, uses} = Thy_Load.check_thy base_keywords dir name
+ in (false, SOME (make_deps master imports, text), imports, uses, keywords) end
| SOME (SOME {master, ...}) =>
let
- val {master = master', text = text', imports = imports', ...} = Thy_Load.check_thy dir name;
+ val {master = master', text = text', imports = imports', uses = uses', keywords = keywords'}
+ = Thy_Load.check_thy base_keywords dir name;
val deps' = SOME (make_deps master' imports', text');
val current =
#2 master = #2 master' andalso
(case lookup_theory name of
NONE => false
- | SOME theory => Thy_Load.all_current theory);
- in (current, deps', imports') end);
+ | SOME theory => Thy_Load.load_current theory);
+ in (current, deps', imports', uses', keywords') end);
in
-fun require_thys initiators dir strs tasks =
- fold_map (require_thy initiators dir) strs tasks |>> forall I
-and require_thy initiators dir str tasks =
+fun require_thys initiators dir strs result =
+ fold_map (require_thy initiators dir) strs result |>> forall I
+and require_thy initiators dir str (base_keywords, tasks) =
let
val path = Path.expand (Path.explode str);
val name = Path.implode (Path.base path);
in
(case try (Graph.get_node tasks) name of
- SOME task => (task_finished task, tasks)
+ SOME task => (task_finished task, (base_keywords, tasks))
| NONE =>
let
val dir' = Path.append dir (Path.dir path);
val _ = member (op =) initiators name andalso error (cycle_msg initiators);
- val (current, deps, imports) = check_deps dir' name
+ val (current, deps, imports, uses, keywords) = check_deps base_keywords dir' name
handle ERROR msg => cat_error msg
(loader_msg "the error(s) above occurred while examining theory" [name] ^
required_by "\n" initiators);
val parents = map base_name imports;
- val (parents_current, tasks') =
+ val (parents_current, (base_keywords', tasks')) =
require_thys (name :: initiators)
- (Path.append dir (master_dir (Option.map #1 deps))) imports tasks;
+ (Path.append dir (master_dir (Option.map #1 deps))) imports (base_keywords, tasks);
val all_current = current andalso parents_current;
val task =
@@ -287,9 +287,14 @@
(case deps of
NONE => raise Fail "Malformed deps"
| SOME (dep, text) =>
- let val update_time = serial ()
- in Task (parents, load_thy initiators update_time dep text name) end);
- in (all_current, new_entry name parents task tasks') end)
+ let
+ val update_time = serial ();
+ val load = load_thy initiators update_time dep text name uses keywords;
+ in Task (parents, load) end);
+
+ val base_keywords'' = keywords @ base_keywords';
+ val tasks'' = new_entry name parents task tasks';
+ in (all_current, (base_keywords'', tasks'')) end)
end;
end;
@@ -298,7 +303,7 @@
(* use_thy *)
fun use_thys_wrt dir arg =
- schedule_tasks (snd (require_thys [] dir arg Graph.empty));
+ schedule_tasks (snd (snd (require_thys [] dir arg ([], Graph.empty))));
val use_thys = use_thys_wrt Path.current;
val use_thy = use_thys o single;
@@ -306,14 +311,14 @@
(* toplevel begin theory -- without maintaining database *)
-fun toplevel_begin_theory dir (header: Thy_Header.header) =
+fun toplevel_begin_theory master_dir (header: Thy_Header.header) =
let
val {name, imports, ...} = header;
val _ = kill_thy name;
- val _ = use_thys_wrt dir imports;
+ val _ = use_thys_wrt master_dir imports;
val _ = Thy_Header.define_keywords header;
val parents = map (get_theory o base_name) imports;
- in Thy_Load.begin_theory dir header parents end;
+ in Thy_Load.begin_theory master_dir header parents end;
(* register theory *)
@@ -321,7 +326,7 @@
fun register_thy theory =
let
val name = Context.theory_name theory;
- val {master, ...} = Thy_Load.check_thy (Thy_Load.master_directory theory) name;
+ val {master, ...} = Thy_Load.check_thy [] (Thy_Load.master_directory theory) name;
val imports = Thy_Load.imports_of theory;
in
NAMED_CRITICAL "Thy_Info" (fn () =>
--- a/src/Pure/Thy/thy_info.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_info.scala Wed Aug 22 23:23:48 2012 +0200
@@ -25,39 +25,60 @@
/* dependencies */
type Dep = (Document.Node.Name, Document.Node.Header)
- private type Required = (List[Dep], Set[Document.Node.Name])
+
+ object Dependencies
+ {
+ val empty = new Dependencies(Nil, Nil, Set.empty)
+ }
+
+ final class Dependencies private(
+ rev_deps: List[Dep],
+ val keywords: Thy_Header.Keywords,
+ val seen: Set[Document.Node.Name])
+ {
+ def :: (dep: Dep): Dependencies =
+ new Dependencies(dep :: rev_deps, dep._2.keywords ::: keywords, seen)
+
+ def + (name: Document.Node.Name): Dependencies =
+ new Dependencies(rev_deps, keywords, seen = seen + name)
+
+ def deps: List[Dep] = rev_deps.reverse
+
+ def loaded_theories: Set[String] =
+ (thy_load.loaded_theories /: rev_deps) { case (loaded, (name, _)) => loaded + name.theory }
+
+ def make_syntax: Outer_Syntax = thy_load.base_syntax.add_keywords(keywords)
+ }
private def require_thys(initiators: List[Document.Node.Name],
- required: Required, names: List[Document.Node.Name]): Required =
+ required: Dependencies, names: List[Document.Node.Name]): Dependencies =
(required /: names)(require_thy(initiators, _, _))
private def require_thy(initiators: List[Document.Node.Name],
- required: Required, name: Document.Node.Name): Required =
+ required: Dependencies, name: Document.Node.Name): Dependencies =
{
- val (deps, seen) = required
- if (seen(name)) required
- else if (thy_load.is_loaded(name.theory)) (deps, seen + name)
+ if (required.seen(name)) required
+ else if (thy_load.loaded_theories(name.theory)) required + name
else {
try {
if (initiators.contains(name)) error(cycle_msg(initiators))
+ val syntax = required.make_syntax
val header =
- try { thy_load.check_thy(name) }
+ try { thy_load.check_thy_files(syntax, name) }
catch {
case ERROR(msg) =>
cat_error(msg, "The error(s) above occurred while examining theory " +
quote(name.theory) + required_by(initiators))
}
- val (deps1, seen1) =
- require_thys(name :: initiators, (deps, seen + name), header.imports)
- ((name, header) :: deps1, seen1)
+ (name, header) :: require_thys(name :: initiators, required + name, header.imports)
}
catch {
case e: Throwable =>
- ((name, Document.Node.bad_header(Exn.message(e))) :: deps, seen + name)
+ (name, Document.Node.bad_header(Exn.message(e))) :: (required + name)
}
}
}
- def dependencies(names: List[Document.Node.Name]): List[Dep] =
- require_thys(Nil, (Nil, Set.empty), names)._1.reverse
+ def dependencies(names: List[Document.Node.Name]): Dependencies =
+ require_thys(Nil, Dependencies.empty, names)
}
--- a/src/Pure/Thy/thy_load.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_load.ML Wed Aug 22 23:23:48 2012 +0200
@@ -1,7 +1,7 @@
(* Title: Pure/Thy/thy_load.ML
Author: Makarius
-Loading files that contribute to a theory. Global master path.
+Loading files that contribute to a theory. Global master path for TTY loop.
*)
signature THY_LOAD =
@@ -11,18 +11,18 @@
val provide: Path.T * (Path.T * SHA1.digest) -> theory -> theory
val check_file: Path.T -> Path.T -> Path.T
val thy_path: Path.T -> Path.T
- val check_thy: Path.T -> string ->
- {master: Path.T * SHA1.digest, text: string, imports: string list, uses: (Path.T * bool) list}
+ val check_thy: Thy_Header.keywords -> Path.T -> string ->
+ {master: Path.T * SHA1.digest, text: string, imports: string list,
+ uses: (Path.T * bool) list, keywords: Thy_Header.keywords}
val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
val use_file: Path.T -> theory -> string * theory
val loaded_files: theory -> Path.T list
- val all_current: theory -> bool
+ val load_current: theory -> bool
val use_ml: Path.T -> unit
val exec_ml: Path.T -> generic_theory -> generic_theory
val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
val load_thy: int -> Path.T -> Thy_Header.header -> Position.T -> string ->
theory list -> theory * unit future
- val resolve_files: Path.T -> Thy_Syntax.span -> Thy_Syntax.span
val parse_files: string -> (theory -> Token.files) parser
val set_master_path: Path.T -> unit
val get_master_path: unit -> Path.T
@@ -36,62 +36,135 @@
type files =
{master_dir: Path.T, (*master directory of theory source*)
imports: string list, (*source specification of imports*)
- required: Path.T list, (*source path*)
provided: (Path.T * (Path.T * SHA1.digest)) list}; (*source path, physical path, digest*)
-fun make_files (master_dir, imports, required, provided): files =
- {master_dir = master_dir, imports = imports, required = required, provided = provided};
+fun make_files (master_dir, imports, provided): files =
+ {master_dir = master_dir, imports = imports, provided = provided};
structure Files = Theory_Data
(
type T = files;
- val empty = make_files (Path.current, [], [], []);
+ val empty = make_files (Path.current, [], []);
fun extend _ = empty;
fun merge _ = empty;
);
fun map_files f =
- Files.map (fn {master_dir, imports, required, provided} =>
- make_files (f (master_dir, imports, required, provided)));
+ Files.map (fn {master_dir, imports, provided} =>
+ make_files (f (master_dir, imports, provided)));
val master_directory = #master_dir o Files.get;
val imports_of = #imports o Files.get;
-fun put_deps dir imports = map_files (fn _ => (dir, imports, [], []));
-
-fun require src_path =
- map_files (fn (master_dir, imports, required, provided) =>
- if member (op =) required src_path then
- error ("Duplicate source file dependency: " ^ Path.print src_path)
- else (master_dir, imports, src_path :: required, provided));
+fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, []));
fun provide (src_path, path_id) =
- map_files (fn (master_dir, imports, required, provided) =>
+ map_files (fn (master_dir, imports, provided) =>
if AList.defined (op =) provided src_path then
- error ("Duplicate resolution of source file dependency: " ^ Path.print src_path)
- else (master_dir, imports, required, (src_path, path_id) :: provided));
+ error ("Duplicate use of source file: " ^ Path.print src_path)
+ else (master_dir, imports, (src_path, path_id) :: provided));
+
+
+(* inlined files *)
+
+fun check_file dir file = File.check_file (File.full_path dir file);
+
+local
+
+fun clean ((i1, t1) :: (i2, t2) :: toks) =
+ if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks
+ else (i1, t1) :: clean ((i2, t2) :: toks)
+ | clean toks = toks;
+
+fun clean_tokens toks =
+ ((0 upto length toks - 1) ~~ toks)
+ |> filter (fn (_, tok) => Token.is_proper tok)
+ |> clean;
+
+fun find_file toks =
+ rev (clean_tokens toks) |> get_first (fn (i, tok) =>
+ if Token.is_name tok then
+ SOME (i, Path.explode (Token.content_of tok))
+ handle ERROR msg => error (msg ^ Token.pos_of tok)
+ else NONE);
+
+fun command_files path exts =
+ if null exts then [path]
+ else map (fn ext => Path.ext ext path) exts;
+
+in
+
+fun find_files syntax text =
+ let val thy_load_commands = Keyword.thy_load_commands syntax in
+ if exists (fn (cmd, _) => String.isSubstring cmd text) thy_load_commands then
+ Thy_Syntax.parse_tokens (Keyword.lexicons_of syntax) Position.none text
+ |> Thy_Syntax.parse_spans
+ |> maps
+ (fn Thy_Syntax.Span (Thy_Syntax.Command (cmd, _), toks) =>
+ (case AList.lookup (op =) thy_load_commands cmd of
+ SOME exts =>
+ (case find_file toks of
+ SOME (_, path) => command_files path exts
+ | NONE => [])
+ | NONE => [])
+ | _ => [])
+ else []
+ end;
+
+fun read_files cmd dir path =
+ let
+ val files =
+ command_files path (Keyword.command_files cmd)
+ |> map (check_file dir #> (fn p => (File.read p, Path.position p)));
+ in (dir, files) end;
+
+fun parse_files cmd =
+ Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
+ (case Token.get_files tok of
+ SOME files => files
+ | NONE => read_files cmd (master_directory thy) (Path.explode name)));
+
+fun resolve_files master_dir span =
+ (case span of
+ Thy_Syntax.Span (Thy_Syntax.Command (cmd, pos), toks) =>
+ if Keyword.is_theory_load cmd then
+ (case find_file toks of
+ NONE => error ("Bad file argument of command " ^ quote cmd ^ Position.str_of pos)
+ | SOME (i, path) =>
+ let
+ val toks' = toks |> map_index (fn (j, tok) =>
+ if i = j then Token.put_files (read_files cmd master_dir path) tok
+ else tok);
+ in Thy_Syntax.Span (Thy_Syntax.Command (cmd, pos), toks') end)
+ else span
+ | span => span);
+
+end;
(* check files *)
-fun check_file dir file = File.check_file (File.full_path dir file);
-
val thy_path = Path.ext "thy";
-fun check_thy dir name =
+fun check_thy base_keywords dir thy_name =
let
- val path = thy_path (Path.basic name);
+ val path = thy_path (Path.basic thy_name);
val master_file = check_file dir path;
val text = File.read master_file;
- val (name', imports, uses) =
- if name = Context.PureN then (Context.PureN, [], [])
- else
- let val {name, imports, uses, ...} = Thy_Header.read (Path.position master_file) text
- in (name, imports, uses) end;
- val _ = name <> name' andalso
- error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name');
- in {master = (master_file, SHA1.digest text), text = text, imports = imports, uses = uses} end;
+
+ val {name, imports, uses, keywords} = Thy_Header.read (Path.position master_file) text;
+ val _ = thy_name <> name andalso
+ error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name);
+
+ val syntax =
+ fold (Keyword.define_keywords o apsnd (Option.map Keyword.spec))
+ (keywords @ base_keywords) (Keyword.get_keywords ());
+ val more_uses = map (rpair false) (find_files syntax text);
+ in
+ {master = (master_file, SHA1.digest text), text = text,
+ imports = imports, uses = uses @ more_uses, keywords = keywords}
+ end;
(* load files *)
@@ -111,30 +184,12 @@
val loaded_files = map (#1 o #2) o #provided o Files.get;
-fun check_loaded thy =
- let
- val {required, provided, ...} = Files.get thy;
- val provided_paths = map #1 provided;
- val _ =
- (case subtract (op =) provided_paths required of
- [] => NONE
- | bad => error ("Pending source file dependencies: " ^ commas (map Path.print (rev bad))));
- val _ =
- (case subtract (op =) required provided_paths of
- [] => NONE
- | bad => error ("Undeclared source file dependencies: " ^ commas (map Path.print (rev bad))));
- in () end;
-
-fun all_current thy =
- let
- val {provided, ...} = Files.get thy;
- fun current (src_path, (_, id)) =
+fun load_current thy =
+ #provided (Files.get thy) |>
+ forall (fn (src_path, (_, id)) =>
(case try (load_file thy) src_path of
NONE => false
- | SOME ((_, id'), _) => id = id');
- in can check_loaded thy andalso forall current provided end;
-
-val _ = Context.>> (Context.map_theory (Theory.at_end (fn thy => (check_loaded thy; NONE))));
+ | SOME ((_, id'), _) => id = id'));
(* provide files *)
@@ -160,58 +215,12 @@
fun exec_ml src_path = ML_Context.exec (fn () => use_ml src_path);
-(* inlined files *)
-
-fun read_files cmd dir tok =
- let
- val path = Path.explode (Token.content_of tok);
- val exts = Keyword.command_files cmd;
- val variants =
- if null exts then [path]
- else map (fn ext => Path.ext ext path) exts;
- in (dir, variants |> map (Path.append dir #> (fn p => (File.read p, Path.position p)))) end;
-
-
-fun clean ((i1, t1) :: (i2, t2) :: toks) =
- if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks
- else (i1, t1) :: clean ((i2, t2) :: toks)
- | clean toks = toks;
-
-fun clean_tokens toks =
- ((0 upto length toks - 1) ~~ toks)
- |> filter (fn (_, tok) => Token.is_proper tok) |> clean;
-
-fun resolve_files dir (span as Thy_Syntax.Span (Thy_Syntax.Command cmd, toks)) =
- if Keyword.is_theory_load cmd then
- (case find_first (Token.is_name o #2) (rev (clean_tokens toks)) of
- NONE => span
- | SOME (i, _) =>
- let
- val toks' = toks |> map_index (fn (j, tok) =>
- if i = j then Token.put_files (read_files cmd dir tok) tok
- else tok);
- in Thy_Syntax.Span (Thy_Syntax.Command cmd, toks') end)
- else span
- | resolve_files _ span = span;
-
-fun parse_files cmd =
- Scan.ahead Parse.not_eof -- Parse.group (fn () => "file name/path specification") Parse.name
- >> (fn (tok, name) => fn thy =>
- (case Token.get_files tok of
- SOME files => files
- | NONE =>
- (warning ("Dynamic loading of files: " ^ quote name ^
- Position.str_of (Token.position_of tok));
- read_files cmd (master_directory thy) tok)));
-
-
(* load_thy *)
-fun begin_theory dir {name, imports, keywords, uses} parents =
+fun begin_theory master_dir {name, imports, keywords, uses} parents =
Theory.begin_theory name parents
- |> put_deps dir imports
+ |> put_deps master_dir imports
|> fold Thy_Header.declare_keyword keywords
- |> fold (require o fst) uses
|> fold (fn (path, true) => Context.theory_map (exec_ml path) o Theory.checkpoint | _ => I) uses
|> Theory.checkpoint;
@@ -235,7 +244,7 @@
val thy = Toplevel.end_theory end_pos end_state;
in (flat results, thy) end;
-fun load_thy update_time dir header pos text parents =
+fun load_thy update_time master_dir header pos text parents =
let
val time = ! Toplevel.timing;
@@ -243,13 +252,13 @@
val _ = Thy_Header.define_keywords header;
val _ = Present.init_theory name;
fun init () =
- begin_theory dir header parents
- |> Present.begin_theory update_time dir uses;
+ begin_theory master_dir header parents
+ |> Present.begin_theory update_time master_dir uses;
val lexs = Keyword.get_lexicons ();
val toks = Thy_Syntax.parse_tokens lexs pos text;
- val spans = map (resolve_files dir) (Thy_Syntax.parse_spans toks);
+ val spans = map (resolve_files master_dir) (Thy_Syntax.parse_spans toks);
val elements = Thy_Syntax.parse_elements spans;
val _ = Present.theory_source name
--- a/src/Pure/Thy/thy_load.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_load.scala Wed Aug 22 23:23:48 2012 +0200
@@ -7,6 +7,8 @@
package isabelle
+import scala.annotation.tailrec
+
import java.io.{File => JFile}
@@ -20,32 +22,18 @@
}
-class Thy_Load(preloaded: Set[String] = Set.empty)
+class Thy_Load(val loaded_theories: Set[String] = Set.empty, val base_syntax: Outer_Syntax)
{
- /* loaded theories provided by prover */
-
- private var loaded_theories: Set[String] = preloaded
-
- def register_thy(name: String): Unit =
- synchronized { loaded_theories += name }
-
- def register_thys(names: Set[String]): Unit =
- synchronized { loaded_theories ++= names }
-
- def is_loaded(thy_name: String): Boolean =
- synchronized { loaded_theories.contains(thy_name) }
-
-
/* file-system operations */
def append(dir: String, source_path: Path): String =
(Path.explode(dir) + source_path).expand.implode
- def read_header(name: Document.Node.Name): Thy_Header =
+ def with_thy_text[A](name: Document.Node.Name, f: CharSequence => A): A =
{
val path = Path.explode(name.node)
if (!path.is_file) error("No such file: " + path.toString)
- Thy_Header.read(path.file)
+ f(File.read(path))
}
@@ -54,7 +42,7 @@
private def import_name(dir: String, s: String): Document.Node.Name =
{
val theory = Thy_Header.base_name(s)
- if (is_loaded(theory)) Document.Node.Name(theory, "", theory)
+ if (loaded_theories(theory)) Document.Node.Name(theory, "", theory)
else {
val path = Path.explode(s)
val node = append(dir, Thy_Load.thy_path(path))
@@ -63,19 +51,65 @@
}
}
- def check_header(name: Document.Node.Name, header: Thy_Header): Document.Node.Header =
+ private def find_file(tokens: List[Token]): Option[String] =
+ {
+ def clean(toks: List[Token]): List[Token] =
+ toks match {
+ case t :: _ :: ts if t.is_keyword && (t.source == "%" || t.source == "--") => clean(ts)
+ case t :: ts => t :: clean(ts)
+ case Nil => Nil
+ }
+ val clean_tokens = clean(tokens.filter(_.is_proper))
+ clean_tokens.reverse.find(_.is_name).map(_.content)
+ }
+
+ def find_files(syntax: Outer_Syntax, text: String): List[String] =
{
+ val thy_load_commands = syntax.thy_load_commands
+ if (thy_load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })) {
+ val spans = Thy_Syntax.parse_spans(syntax.scan(text))
+ spans.iterator.map({
+ case toks @ (tok :: _) if tok.is_command =>
+ thy_load_commands.find({ case (cmd, _) => cmd == tok.content }) match {
+ case Some((_, exts)) =>
+ find_file(toks) match {
+ case Some(file) =>
+ if (exts.isEmpty) List(file)
+ else exts.map(ext => file + "." + ext)
+ case None => Nil
+ }
+ case None => Nil
+ }
+ case _ => Nil
+ }).flatten.toList
+ }
+ else Nil
+ }
+
+ def check_thy_text(name: Document.Node.Name, text: CharSequence): Document.Node.Header =
+ {
+ val header = Thy_Header.read(text)
+
val name1 = header.name
- val imports = header.imports.map(import_name(name.dir, _))
- // FIXME val uses = header.uses.map(p => (append(name.dir, Path.explode(p._1)), p._2))
- val uses = header.uses
if (name.theory != name1)
error("Bad file name " + Thy_Load.thy_path(Path.basic(name.theory)) +
" for theory " + quote(name1))
+
+ val imports = header.imports.map(import_name(name.dir, _))
+ val uses = header.uses
Document.Node.Header(imports, header.keywords, uses)
}
def check_thy(name: Document.Node.Name): Document.Node.Header =
- check_header(name, read_header(name))
+ with_thy_text(name, check_thy_text(name, _))
+
+ def check_thy_files(syntax: Outer_Syntax, name: Document.Node.Name): Document.Node.Header =
+ with_thy_text(name, text =>
+ {
+ val string = text.toString
+ val header = check_thy_text(name, string)
+ val more_uses = find_files(syntax.add_keywords(header.keywords), string)
+ header.copy(uses = header.uses ::: more_uses.map((_, false)))
+ })
}
--- a/src/Pure/Thy/thy_syntax.ML Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_syntax.ML Wed Aug 22 23:23:48 2012 +0200
@@ -9,7 +9,7 @@
val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list
val reports_of_tokens: Token.T list -> bool * (Position.report * string) list
val present_token: Token.T -> Output.output
- datatype span_kind = Command of string | Ignored | Malformed
+ datatype span_kind = Command of string * Position.T | Ignored | Malformed
datatype span = Span of span_kind * Token.T list
val span_kind: span -> span_kind
val span_content: span -> Token.T list
@@ -100,7 +100,7 @@
(* type span *)
-datatype span_kind = Command of string | Ignored | Malformed;
+datatype span_kind = Command of string * Position.T | Ignored | Malformed;
datatype span = Span of span_kind * Token.T list;
fun span_kind (Span (k, _)) = k;
@@ -115,7 +115,7 @@
fun make_span toks =
if not (null toks) andalso Token.is_command (hd toks) then
- Span (Command (Token.content_of (hd toks)), toks)
+ Span (Command (Token.content_of (hd toks), Token.position_of (hd toks)), toks)
else if forall (not o Token.is_proper) toks then Span (Ignored, toks)
else Span (Malformed, toks);
@@ -142,9 +142,9 @@
(* scanning spans *)
-val eof = Span (Command "", []);
+val eof = Span (Command ("", Position.none), []);
-fun is_eof (Span (Command "", _)) = true
+fun is_eof (Span (Command ("", _), _)) = true
| is_eof _ = false;
val not_eof = not o is_eof;
@@ -156,7 +156,8 @@
local
-fun command_with pred = Scan.one (fn (Span (Command name, _)) => pred name | _ => false);
+fun command_with pred =
+ Scan.one (fn (Span (Command (name, _), _)) => pred name | _ => false);
val proof = Scan.pass 1 (Scan.repeat (Scan.depend (fn d =>
if d <= 0 then Scan.fail
--- a/src/Pure/Thy/thy_syntax.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Pure/Thy/thy_syntax.scala Wed Aug 22 23:23:48 2012 +0200
@@ -150,7 +150,9 @@
val syntax =
if (previous.is_init || updated_keywords)
- (base_syntax /: nodes.entries) { case (syn, (_, node)) => syn.add_keywords(node.header) }
+ (base_syntax /: nodes.entries) {
+ case (syn, (_, node)) => syn.add_keywords(node.header.keywords)
+ }
else previous.syntax
val reparse =
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/pure_syn.ML Wed Aug 22 23:23:48 2012 +0200
@@ -0,0 +1,35 @@
+(* Title: Pure/pure_syn.ML
+ Author: Makarius
+
+Minimal outer syntax for bootstrapping Pure.
+*)
+
+structure Pure_Syn: sig end =
+struct
+
+val _ =
+ Outer_Syntax.command
+ (("theory", Keyword.tag_theory Keyword.thy_begin), Position.none) "begin theory context"
+ (Thy_Header.args >> (fn header =>
+ Toplevel.print o
+ Toplevel.init_theory
+ (fn () => Thy_Info.toplevel_begin_theory (Thy_Load.get_master_path ()) header)));
+
+val _ =
+ Outer_Syntax.command
+ (("ML_file", Keyword.tag_ml Keyword.thy_load), Position.none) "ML text from file"
+ (Scan.ahead Parse.path -- Thy_Load.parse_files "ML_file"
+ >> (fn (name, files) => Toplevel.generic_theory (fn gthy =>
+ let
+ val src_path = Path.explode name;
+ val (dir, [(txt, pos)]) = files (Context.theory_of gthy);
+ val provide = Thy_Load.provide (src_path, (File.full_path dir src_path, SHA1.digest txt));
+ in
+ gthy
+ |> ML_Context.exec (fn () => ML_Context.eval_text true pos txt)
+ |> Local_Theory.propagate_ml_env
+ |> Context.mapping provide (Local_Theory.background_theory provide)
+ end)));
+
+end;
+
--- a/src/Sequents/LK.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Sequents/LK.thy Wed Aug 22 23:23:48 2012 +0200
@@ -14,7 +14,6 @@
theory LK
imports LK0
-uses ("simpdata.ML")
begin
axiomatization where
@@ -215,7 +214,7 @@
apply (tactic {* fast_tac (LK_pack add_safes @{thms subst}) 1 *})
done
-use "simpdata.ML"
+ML_file "simpdata.ML"
setup {* Simplifier.map_simpset_global (K LK_ss) *}
--- a/src/Sequents/Modal0.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Sequents/Modal0.thy Wed Aug 22 23:23:48 2012 +0200
@@ -5,9 +5,10 @@
theory Modal0
imports LK0
-uses "modal.ML"
begin
+ML_file "modal.ML"
+
consts
box :: "o=>o" ("[]_" [50] 50)
dia :: "o=>o" ("<>_" [50] 50)
--- a/src/Sequents/Sequents.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Sequents/Sequents.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,7 +7,6 @@
theory Sequents
imports Pure
-uses ("prover.ML")
begin
setup Pure_Thy.old_appl_syntax_setup
@@ -142,7 +141,7 @@
parse_translation {* [(@{syntax_const "_Side"}, side_tr)] *}
-use "prover.ML"
+ML_file "prover.ML"
end
--- a/src/Tools/Adhoc_Overloading.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/Adhoc_Overloading.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,9 @@
theory Adhoc_Overloading
imports Pure
-uses "adhoc_overloading.ML"
begin
+ML_file "adhoc_overloading.ML"
setup Adhoc_Overloading.setup
end
--- a/src/Tools/Code_Generator.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/Code_Generator.thy Wed Aug 22 23:23:48 2012 +0200
@@ -12,22 +12,20 @@
"code_const" "code_reserved" "code_include" "code_modulename"
"code_abort" "code_monad" "code_reflect" :: thy_decl and
"datatypes" "functions" "module_name" "file" "checking"
-uses
- "~~/src/Tools/value.ML"
- "~~/src/Tools/cache_io.ML"
- "~~/src/Tools/Code/code_preproc.ML"
- "~~/src/Tools/Code/code_thingol.ML"
- "~~/src/Tools/Code/code_simp.ML"
- "~~/src/Tools/Code/code_printer.ML"
- "~~/src/Tools/Code/code_target.ML"
- "~~/src/Tools/Code/code_namespace.ML"
- "~~/src/Tools/Code/code_ml.ML"
- "~~/src/Tools/Code/code_haskell.ML"
- "~~/src/Tools/Code/code_scala.ML"
- ("~~/src/Tools/Code/code_runtime.ML")
- ("~~/src/Tools/nbe.ML")
begin
+ML_file "~~/src/Tools/value.ML"
+ML_file "~~/src/Tools/cache_io.ML"
+ML_file "~~/src/Tools/Code/code_preproc.ML"
+ML_file "~~/src/Tools/Code/code_thingol.ML"
+ML_file "~~/src/Tools/Code/code_simp.ML"
+ML_file "~~/src/Tools/Code/code_printer.ML"
+ML_file "~~/src/Tools/Code/code_target.ML"
+ML_file "~~/src/Tools/Code/code_namespace.ML"
+ML_file "~~/src/Tools/Code/code_ml.ML"
+ML_file "~~/src/Tools/Code/code_haskell.ML"
+ML_file "~~/src/Tools/Code/code_scala.ML"
+
setup {*
Value.setup
#> Code_Preproc.setup
@@ -65,9 +63,8 @@
by rule (rule holds)+
qed
-use "~~/src/Tools/Code/code_runtime.ML"
-use "~~/src/Tools/nbe.ML"
-
+ML_file "~~/src/Tools/Code/code_runtime.ML"
+ML_file "~~/src/Tools/nbe.ML"
setup Nbe.setup
hide_const (open) holds
--- a/src/Tools/WWW_Find/WWW_Find.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/WWW_Find/WWW_Find.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,20 +1,20 @@
theory WWW_Find
imports Pure
-uses
- "unicode_symbols.ML"
- "html_unicode.ML"
- "mime.ML"
- "http_status.ML"
- "http_util.ML"
- "xhtml.ML"
- "socket_util.ML"
- "scgi_req.ML"
- "scgi_server.ML"
- "echo.ML"
- "html_templates.ML"
- "find_theorems.ML"
- "yxml_find_theorems.ML"
begin
+ML_file "unicode_symbols.ML"
+ML_file "html_unicode.ML"
+ML_file "mime.ML"
+ML_file "http_status.ML"
+ML_file "http_util.ML"
+ML_file "xhtml.ML"
+ML_file "socket_util.ML"
+ML_file "scgi_req.ML"
+ML_file "scgi_server.ML"
+ML_file "echo.ML"
+ML_file "html_templates.ML"
+ML_file "find_theorems.ML"
+ML_file "yxml_find_theorems.ML"
+
end
--- a/src/Tools/jEdit/src/document_model.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/jEdit/src/document_model.scala Wed Aug 22 23:23:48 2012 +0200
@@ -68,8 +68,7 @@
Swing_Thread.require()
Isabelle.buffer_lock(buffer) {
Exn.capture {
- Isabelle.thy_load.check_header(name,
- Thy_Header.read(buffer.getSegment(0, buffer.getLength)))
+ Isabelle.thy_load.check_thy_text(name, buffer.getSegment(0, buffer.getLength))
} match {
case Exn.Res(header) => header
case Exn.Exn(exn) => Document.Node.bad_header(Exn.message(exn))
--- a/src/Tools/jEdit/src/isabelle_sidekick.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/jEdit/src/isabelle_sidekick.scala Wed Aug 22 23:23:48 2012 +0200
@@ -177,7 +177,7 @@
class Isabelle_Sidekick_Default extends Isabelle_Sidekick_Structure(
- "isabelle", Isabelle.session.get_recent_syntax, Isabelle.buffer_node_name)
+ "isabelle", Isabelle.get_recent_syntax, Isabelle.buffer_node_name)
class Isabelle_Sidekick_Options extends Isabelle_Sidekick_Structure(
@@ -189,7 +189,7 @@
class Isabelle_Sidekick_Raw
- extends Isabelle_Sidekick("isabelle-raw", Isabelle.session.get_recent_syntax)
+ extends Isabelle_Sidekick("isabelle-raw", Isabelle.get_recent_syntax)
{
override def parser(buffer: Buffer, syntax: Outer_Syntax, data: SideKickParsedData): Boolean =
{
--- a/src/Tools/jEdit/src/jedit_thy_load.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/jEdit/src/jedit_thy_load.scala Wed Aug 22 23:23:48 2012 +0200
@@ -17,7 +17,8 @@
import org.gjt.sp.jedit.View
-class JEdit_Thy_Load extends Thy_Load()
+class JEdit_Thy_Load(loaded_theories: Set[String] = Set.empty, base_syntax: Outer_Syntax)
+ extends Thy_Load(loaded_theories, base_syntax)
{
override def append(dir: String, source_path: Path): String =
{
@@ -32,6 +33,23 @@
}
}
+ override def with_thy_text[A](name: Document.Node.Name, f: CharSequence => A): A =
+ {
+ Swing_Thread.now {
+ Isabelle.jedit_buffer(name.node) match {
+ case Some(buffer) =>
+ Isabelle.buffer_lock(buffer) {
+ Some(f(buffer.getSegment(0, buffer.getLength)))
+ }
+ case None => None
+ }
+ } getOrElse {
+ val file = new JFile(name.node) // FIXME load URL via jEdit VFS (!?)
+ if (!file.exists || !file.isFile) error("No such file: " + quote(file.toString))
+ f(File.read(file))
+ }
+ }
+
def check_file(view: View, path: String): Boolean =
{
val vfs = VFSManager.getVFSForPath(path)
@@ -51,24 +69,5 @@
catch { case _: IOException => }
}
}
-
- override def read_header(name: Document.Node.Name): Thy_Header =
- {
- Swing_Thread.now {
- Isabelle.jedit_buffer(name.node) match {
- case Some(buffer) =>
- Isabelle.buffer_lock(buffer) {
- val text = new Segment
- buffer.getText(0, buffer.getLength, text)
- Some(Thy_Header.read(text))
- }
- case None => None
- }
- } getOrElse {
- val file = new JFile(name.node) // FIXME load URL via jEdit VFS (!?)
- if (!file.exists || !file.isFile) error("No such file: " + quote(file.toString))
- Thy_Header.read(file)
- }
- }
}
--- a/src/Tools/jEdit/src/plugin.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/jEdit/src/plugin.scala Wed Aug 22 23:23:48 2012 +0200
@@ -37,8 +37,15 @@
var plugin: Plugin = null
var session: Session = null
- val thy_load = new JEdit_Thy_Load
- val thy_info = new Thy_Info(thy_load)
+ def thy_load(): JEdit_Thy_Load =
+ session.thy_load.asInstanceOf[JEdit_Thy_Load]
+
+ def get_recent_syntax(): Option[Outer_Syntax] =
+ {
+ val current_session = session
+ if (current_session != null) Some(current_session.recent_syntax)
+ else None
+ }
/* properties */
@@ -298,17 +305,22 @@
component
}
- def start_session()
+ def session_args(): List[String] =
{
- val dirs = Path.split(Isabelle_System.getenv("JEDIT_SESSION_DIRS"))
val modes = space_explode(',', Isabelle_System.getenv("JEDIT_PRINT_MODE")).map("-m" + _)
val logic = {
val logic = Property("logic")
if (logic != null && logic != "") logic
else Isabelle.default_logic()
}
- val name = Path.explode(logic).base.implode // FIXME more robust session name
- session.start(dirs, name, modes ::: List(logic))
+ modes ::: List(logic)
+ }
+
+ def session_content(): Build.Session_Content =
+ {
+ val dirs = Path.split(Isabelle_System.getenv("JEDIT_SESSION_DIRS"))
+ val name = Path.explode(session_args().last).base.implode // FIXME more robust
+ Build.session_content(dirs, name).check_errors
}
@@ -359,8 +371,9 @@
for (buffer <- buffers; model <- Isabelle.document_model(buffer))
yield model.name
+ val thy_info = new Thy_Info(Isabelle.thy_load)
// FIXME avoid I/O in Swing thread!?!
- val files = Isabelle.thy_info.dependencies(thys).map(_._1.node).
+ val files = thy_info.dependencies(thys).deps.map(_._1.node).
filter(file => !loaded_buffer(file) && Isabelle.thy_load.check_file(view, file))
if (!files.isEmpty) {
@@ -422,7 +435,7 @@
message match {
case msg: EditorStarted =>
if (Isabelle.Boolean_Property("auto-start"))
- Isabelle.start_session()
+ Isabelle.session.start(Isabelle.session_args())
case msg: BufferUpdate
if msg.getWhat == BufferUpdate.LOADED || msg.getWhat == BufferUpdate.PROPERTIES_CHANGED =>
@@ -459,12 +472,16 @@
}
override def start()
- {
+ { // FIXME more robust error handling
Isabelle.plugin = this
Isabelle.setup_tooltips()
Isabelle_System.init()
Isabelle_System.install_fonts()
- Isabelle.session = new Session(Isabelle.thy_load)
+
+ val content = Isabelle.session_content()
+ val thy_load = new JEdit_Thy_Load(content.loaded_theories, content.syntax)
+ Isabelle.session = new Session(thy_load)
+
SyntaxUtilities.setStyleExtender(new Token_Markup.Style_Extender)
if (ModeProvider.instance.isInstanceOf[ModeProvider])
ModeProvider.instance = new Token_Markup.Mode_Provider(ModeProvider.instance)
--- a/src/Tools/jEdit/src/session_dockable.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/jEdit/src/session_dockable.scala Wed Aug 22 23:23:48 2012 +0200
@@ -134,7 +134,7 @@
}
val nodes_status1 =
(nodes_status /: iterator)({ case (status, (name, node)) =>
- if (Isabelle.thy_load.is_loaded(name.theory)) status
+ if (Isabelle.thy_load.loaded_theories(name.theory)) status
else status + (name -> Protocol.node_status(snapshot.state, snapshot.version, node)) })
if (nodes_status != nodes_status1) {
--- a/src/Tools/jEdit/src/token_markup.scala Tue Aug 21 09:02:29 2012 +0200
+++ b/src/Tools/jEdit/src/token_markup.scala Wed Aug 22 23:23:48 2012 +0200
@@ -224,7 +224,7 @@
/* mode provider */
private val markers = Map(
- "isabelle" -> new Token_Markup.Marker(true, Isabelle.session.get_recent_syntax()),
+ "isabelle" -> new Token_Markup.Marker(true, Isabelle.get_recent_syntax()),
"isabelle-options" -> new Token_Markup.Marker(false, Some(Options.options_syntax)),
"isabelle-root" -> new Token_Markup.Marker(false, Some(Build.root_syntax)))
--- a/src/ZF/ArithSimp.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/ArithSimp.thy Wed Aug 22 23:23:48 2012 +0200
@@ -7,11 +7,13 @@
theory ArithSimp
imports Arith
-uses "~~/src/Provers/Arith/cancel_numerals.ML"
- "~~/src/Provers/Arith/combine_numerals.ML"
- "arith_data.ML"
begin
+ML_file "~~/src/Provers/Arith/cancel_numerals.ML"
+ML_file "~~/src/Provers/Arith/combine_numerals.ML"
+ML_file "arith_data.ML"
+
+
subsection{*Difference*}
lemma diff_self_eq_0 [simp]: "m #- m = 0"
--- a/src/ZF/Bin.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/Bin.thy Wed Aug 22 23:23:48 2012 +0200
@@ -17,7 +17,6 @@
theory Bin
imports Int_ZF Datatype_ZF
-uses ("Tools/numeral_syntax.ML")
begin
consts bin :: i
@@ -104,7 +103,7 @@
syntax
"_Int" :: "xnum_token => i" ("_")
-use "Tools/numeral_syntax.ML"
+ML_file "Tools/numeral_syntax.ML"
setup Numeral_Syntax.setup
--- a/src/ZF/Datatype_ZF.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/Datatype_ZF.thy Wed Aug 22 23:23:48 2012 +0200
@@ -8,9 +8,10 @@
theory Datatype_ZF
imports Inductive_ZF Univ QUniv
keywords "datatype" "codatatype" :: thy_decl
-uses "Tools/datatype_package.ML"
begin
+ML_file "Tools/datatype_package.ML"
+
ML {*
(*Typechecking rules for most datatypes involving univ*)
structure Data_Arg =
--- a/src/ZF/Inductive_ZF.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/Inductive_ZF.thy Wed Aug 22 23:23:48 2012 +0200
@@ -18,13 +18,6 @@
"inductive_cases" :: thy_script and
"domains" "intros" "monos" "con_defs" "type_intros" "type_elims"
"elimination" "induction" "case_eqns" "recursor_eqns"
-uses
- ("ind_syntax.ML")
- ("Tools/cartprod.ML")
- ("Tools/ind_cases.ML")
- ("Tools/inductive_package.ML")
- ("Tools/induct_tacs.ML")
- ("Tools/primrec_package.ML")
begin
lemma def_swap_iff: "a == b ==> a = c \<longleftrightarrow> c = b"
@@ -35,12 +28,12 @@
lemma refl_thin: "!!P. a = a ==> P ==> P" .
-use "ind_syntax.ML"
-use "Tools/ind_cases.ML"
-use "Tools/cartprod.ML"
-use "Tools/inductive_package.ML"
-use "Tools/induct_tacs.ML"
-use "Tools/primrec_package.ML"
+ML_file "ind_syntax.ML"
+ML_file "Tools/ind_cases.ML"
+ML_file "Tools/cartprod.ML"
+ML_file "Tools/inductive_package.ML"
+ML_file "Tools/induct_tacs.ML"
+ML_file "Tools/primrec_package.ML"
setup IndCases.setup
setup DatatypeTactics.setup
--- a/src/ZF/IntArith.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/IntArith.thy Wed Aug 22 23:23:48 2012 +0200
@@ -1,6 +1,5 @@
theory IntArith imports Bin
-uses ("int_arith.ML")
begin
@@ -90,6 +89,6 @@
apply (simp add: zadd_ac)
done
-use "int_arith.ML"
+ML_file "int_arith.ML"
end
--- a/src/ZF/pair.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/pair.thy Wed Aug 22 23:23:48 2012 +0200
@@ -6,9 +6,10 @@
header{*Ordered Pairs*}
theory pair imports upair
-uses "simpdata.ML"
begin
+ML_file "simpdata.ML"
+
setup {*
Simplifier.map_simpset_global
(Simplifier.set_mksimps (K (map mk_eq o ZF_atomize o gen_all))
--- a/src/ZF/upair.thy Tue Aug 21 09:02:29 2012 +0200
+++ b/src/ZF/upair.thy Wed Aug 22 23:23:48 2012 +0200
@@ -14,9 +14,9 @@
theory upair
imports ZF
keywords "print_tcset" :: diag
-uses "Tools/typechk.ML"
begin
+ML_file "Tools/typechk.ML"
setup TypeCheck.setup
lemma atomize_ball [symmetric, rulify]: