--- a/NEWS Mon May 31 19:36:13 2010 +0200
+++ b/NEWS Mon May 31 21:06:57 2010 +0200
@@ -503,9 +503,12 @@
OuterLex ~> Token
OuterParse ~> Parse
OuterSyntax ~> Outer_Syntax
+ PrintMode ~> Print_Mode
SpecParse ~> Parse_Spec
+ ThyInfo ~> Thy_Info
+ ThyLoad ~> Thy_Load
+ ThyOutput ~> Thy_Output
TypeInfer ~> Type_Infer
- PrintMode ~> Print_Mode
Note that "open Legacy" simplifies porting of sources, but forgetting
to remove it again will complicate porting again in the future.
--- a/doc-src/Classes/Thy/document/Classes.tex Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/Classes/Thy/document/Classes.tex Mon May 31 21:06:57 2010 +0200
@@ -792,8 +792,7 @@
\isamarkuptrue%
%
\begin{isamarkuptext}%
-Isabelle locales support a concept of local definitions
- in locales:%
+Isabelle locales are targets which support local definitions:%
\end{isamarkuptext}%
\isamarkuptrue%
%
--- a/doc-src/IsarImplementation/Thy/Integration.thy Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/IsarImplementation/Thy/Integration.thy Mon May 31 21:06:57 2010 +0200
@@ -368,13 +368,13 @@
@{index_ML theory: "string -> theory"} \\
@{index_ML use_thy: "string -> unit"} \\
@{index_ML use_thys: "string list -> unit"} \\
- @{index_ML ThyInfo.touch_thy: "string -> unit"} \\
- @{index_ML ThyInfo.remove_thy: "string -> unit"} \\[1ex]
- @{index_ML ThyInfo.begin_theory}@{verbatim ": ... -> bool -> theory"} \\
- @{index_ML ThyInfo.end_theory: "theory -> unit"} \\
- @{index_ML ThyInfo.register_theory: "theory -> unit"} \\[1ex]
+ @{index_ML Thy_Info.touch_thy: "string -> unit"} \\
+ @{index_ML Thy_Info.remove_thy: "string -> unit"} \\[1ex]
+ @{index_ML Thy_Info.begin_theory}@{verbatim ": ... -> bool -> theory"} \\
+ @{index_ML Thy_Info.end_theory: "theory -> unit"} \\
+ @{index_ML Thy_Info.register_theory: "theory -> unit"} \\[1ex]
@{verbatim "datatype action = Update | Outdate | Remove"} \\
- @{index_ML ThyInfo.add_hook: "(ThyInfo.action -> string -> unit) -> unit"} \\
+ @{index_ML Thy_Info.add_hook: "(Thy_Info.action -> string -> unit) -> unit"} \\
\end{mldecls}
\begin{description}
@@ -395,24 +395,24 @@
intrinsic parallelism can be exploited by the system, to speedup
loading.
- \item @{ML ThyInfo.touch_thy}~@{text A} performs and @{text outdate} action
+ \item @{ML Thy_Info.touch_thy}~@{text A} performs and @{text outdate} action
on theory @{text A} and all descendants.
- \item @{ML ThyInfo.remove_thy}~@{text A} deletes theory @{text A} and all
+ \item @{ML Thy_Info.remove_thy}~@{text A} deletes theory @{text A} and all
descendants from the theory database.
- \item @{ML ThyInfo.begin_theory} is the basic operation behind a
+ \item @{ML Thy_Info.begin_theory} is the basic operation behind a
@{text \<THEORY>} header declaration. This {\ML} function is
normally not invoked directly.
- \item @{ML ThyInfo.end_theory} concludes the loading of a theory
+ \item @{ML Thy_Info.end_theory} concludes the loading of a theory
proper and stores the result in the theory database.
- \item @{ML ThyInfo.register_theory}~@{text "text thy"} registers an
+ \item @{ML Thy_Info.register_theory}~@{text "text thy"} registers an
existing theory value with the theory loader database. There is no
management of associated sources.
- \item @{ML "ThyInfo.add_hook"}~@{text f} registers function @{text
+ \item @{ML "Thy_Info.add_hook"}~@{text f} registers function @{text
f} as a hook for theory database actions. The function will be
invoked with the action and theory name being involved; thus derived
actions may be performed in associated system components, e.g.\
--- a/doc-src/IsarImplementation/Thy/document/Integration.tex Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/IsarImplementation/Thy/document/Integration.tex Mon May 31 21:06:57 2010 +0200
@@ -440,13 +440,13 @@
\indexdef{}{ML}{theory}\verb|theory: string -> theory| \\
\indexdef{}{ML}{use\_thy}\verb|use_thy: string -> unit| \\
\indexdef{}{ML}{use\_thys}\verb|use_thys: string list -> unit| \\
- \indexdef{}{ML}{ThyInfo.touch\_thy}\verb|ThyInfo.touch_thy: string -> unit| \\
- \indexdef{}{ML}{ThyInfo.remove\_thy}\verb|ThyInfo.remove_thy: string -> unit| \\[1ex]
- \indexdef{}{ML}{ThyInfo.begin\_theory}\verb|ThyInfo.begin_theory|\verb|: ... -> bool -> theory| \\
- \indexdef{}{ML}{ThyInfo.end\_theory}\verb|ThyInfo.end_theory: theory -> unit| \\
- \indexdef{}{ML}{ThyInfo.register\_theory}\verb|ThyInfo.register_theory: theory -> unit| \\[1ex]
+ \indexdef{}{ML}{Thy\_Info.touch\_thy}\verb|Thy_Info.touch_thy: string -> unit| \\
+ \indexdef{}{ML}{Thy\_Info.remove\_thy}\verb|Thy_Info.remove_thy: string -> unit| \\[1ex]
+ \indexdef{}{ML}{Thy\_Info.begin\_theory}\verb|Thy_Info.begin_theory|\verb|: ... -> bool -> theory| \\
+ \indexdef{}{ML}{Thy\_Info.end\_theory}\verb|Thy_Info.end_theory: theory -> unit| \\
+ \indexdef{}{ML}{Thy\_Info.register\_theory}\verb|Thy_Info.register_theory: theory -> unit| \\[1ex]
\verb|datatype action = Update |\verb,|,\verb| Outdate |\verb,|,\verb| Remove| \\
- \indexdef{}{ML}{ThyInfo.add\_hook}\verb|ThyInfo.add_hook: (ThyInfo.action -> string -> unit) -> unit| \\
+ \indexdef{}{ML}{Thy\_Info.add\_hook}\verb|Thy_Info.add_hook: (Thy_Info.action -> string -> unit) -> unit| \\
\end{mldecls}
\begin{description}
@@ -466,24 +466,24 @@
intrinsic parallelism can be exploited by the system, to speedup
loading.
- \item \verb|ThyInfo.touch_thy|~\isa{A} performs and \isa{outdate} action
+ \item \verb|Thy_Info.touch_thy|~\isa{A} performs and \isa{outdate} action
on theory \isa{A} and all descendants.
- \item \verb|ThyInfo.remove_thy|~\isa{A} deletes theory \isa{A} and all
+ \item \verb|Thy_Info.remove_thy|~\isa{A} deletes theory \isa{A} and all
descendants from the theory database.
- \item \verb|ThyInfo.begin_theory| is the basic operation behind a
+ \item \verb|Thy_Info.begin_theory| is the basic operation behind a
\isa{{\isasymTHEORY}} header declaration. This {\ML} function is
normally not invoked directly.
- \item \verb|ThyInfo.end_theory| concludes the loading of a theory
+ \item \verb|Thy_Info.end_theory| concludes the loading of a theory
proper and stores the result in the theory database.
- \item \verb|ThyInfo.register_theory|~\isa{text\ thy} registers an
+ \item \verb|Thy_Info.register_theory|~\isa{text\ thy} registers an
existing theory value with the theory loader database. There is no
management of associated sources.
- \item \verb|ThyInfo.add_hook|~\isa{f} registers function \isa{f} as a hook for theory database actions. The function will be
+ \item \verb|Thy_Info.add_hook|~\isa{f} registers function \isa{f} as a hook for theory database actions. The function will be
invoked with the action and theory name being involved; thus derived
actions may be performed in associated system components, e.g.\
maintaining the state of an editor for the theory sources.
--- a/doc-src/IsarRef/Thy/ROOT-HOLCF.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/IsarRef/Thy/ROOT-HOLCF.ML Mon May 31 21:06:57 2010 +0200
@@ -1,4 +1,4 @@
-Unsynchronized.set ThyOutput.source;
+Unsynchronized.set Thy_Output.source;
use "../../antiquote_setup.ML";
use_thy "HOLCF_Specific";
--- a/doc-src/IsarRef/Thy/ROOT-ZF.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/IsarRef/Thy/ROOT-ZF.ML Mon May 31 21:06:57 2010 +0200
@@ -1,4 +1,4 @@
-Unsynchronized.set ThyOutput.source;
+Unsynchronized.set Thy_Output.source;
use "../../antiquote_setup.ML";
use_thy "ZF_Specific";
--- a/doc-src/IsarRef/Thy/ROOT.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/IsarRef/Thy/ROOT.ML Mon May 31 21:06:57 2010 +0200
@@ -1,5 +1,5 @@
Unsynchronized.set quick_and_dirty;
-Unsynchronized.set ThyOutput.source;
+Unsynchronized.set Thy_Output.source;
use "../../antiquote_setup.ML";
use_thys [
--- a/doc-src/Main/Docs/Main_Doc.thy Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/Main/Docs/Main_Doc.thy Mon May 31 21:06:57 2010 +0200
@@ -9,10 +9,10 @@
else error "term_type_only: type mismatch";
Syntax.pretty_typ ctxt T)
-val _ = ThyOutput.antiquotation "term_type_only" (Args.term -- Args.typ_abbrev)
+val _ = Thy_Output.antiquotation "term_type_only" (Args.term -- Args.typ_abbrev)
(fn {source, context, ...} => fn arg =>
- ThyOutput.output
- (ThyOutput.maybe_pretty_source (pretty_term_type_only context) source [arg]));
+ Thy_Output.output
+ (Thy_Output.maybe_pretty_source (pretty_term_type_only context) source [arg]));
*}
(*>*)
text{*
--- a/doc-src/System/Thy/ROOT.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/System/Thy/ROOT.ML Mon May 31 21:06:57 2010 +0200
@@ -1,4 +1,4 @@
-Unsynchronized.set ThyOutput.source;
+Unsynchronized.set Thy_Output.source;
use "../../antiquote_setup.ML";
use_thys ["Basics", "Interfaces", "Presentation", "Misc"];
--- a/doc-src/TutorialI/Rules/Primes.thy Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/TutorialI/Rules/Primes.thy Mon May 31 21:06:57 2010 +0200
@@ -10,7 +10,7 @@
ML "Pretty.margin_default := 64"
-ML "ThyOutput.indent := 5" (*that is, Doc/TutorialI/settings.ML*)
+ML "Thy_Output.indent := 5" (*that is, Doc/TutorialI/settings.ML*)
text {*Now in Basic.thy!
--- a/doc-src/TutorialI/Types/Numbers.thy Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/TutorialI/Types/Numbers.thy Mon May 31 21:06:57 2010 +0200
@@ -3,7 +3,7 @@
begin
ML "Pretty.margin_default := 64"
-ML "ThyOutput.indent := 0" (*we don't want 5 for listing theorems*)
+ML "Thy_Output.indent := 0" (*we don't want 5 for listing theorems*)
text{*
--- a/doc-src/TutorialI/Types/document/Numbers.tex Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/TutorialI/Types/document/Numbers.tex Mon May 31 21:06:57 2010 +0200
@@ -28,7 +28,7 @@
\isacommand{ML}\isamarkupfalse%
\ {\isachardoublequoteopen}Pretty{\isachardot}margin{\isacharunderscore}default\ {\isacharcolon}{\isacharequal}\ {\isadigit{6}}{\isadigit{4}}{\isachardoublequoteclose}\isanewline
\isacommand{ML}\isamarkupfalse%
-\ {\isachardoublequoteopen}ThyOutput{\isachardot}indent\ {\isacharcolon}{\isacharequal}\ {\isadigit{0}}{\isachardoublequoteclose}%
+\ {\isachardoublequoteopen}Thy{\isacharunderscore}Output{\isachardot}indent\ {\isacharcolon}{\isacharequal}\ {\isadigit{0}}{\isachardoublequoteclose}%
\endisatagML
{\isafoldML}%
%
--- a/doc-src/TutorialI/Types/document/Pairs.tex Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/TutorialI/Types/document/Pairs.tex Mon May 31 21:06:57 2010 +0200
@@ -60,7 +60,7 @@
Internally, \isa{{\isasymlambda}{\isacharparenleft}x{\isacharcomma}\ y{\isacharparenright}{\isachardot}\ t} becomes \isa{split\ {\isacharparenleft}{\isasymlambda}x\ y{\isachardot}\ t{\isacharparenright}}, where
\cdx{split} is the uncurrying function of type \isa{{\isacharparenleft}{\isacharprime}a\ {\isasymRightarrow}\ {\isacharprime}b\ {\isasymRightarrow}\ {\isacharprime}c{\isacharparenright}\ {\isasymRightarrow}\ {\isacharprime}a\ {\isasymtimes}\ {\isacharprime}b\ {\isasymRightarrow}\ {\isacharprime}c} defined as
\begin{center}
-\isa{split\ {\isasymequiv}\ {\isasymlambda}c\ p{\isachardot}\ c\ {\isacharparenleft}fst\ p{\isacharparenright}\ {\isacharparenleft}snd\ p{\isacharparenright}}
+\isa{split\ {\isacharequal}\ {\isacharparenleft}{\isasymlambda}c\ p{\isachardot}\ c\ {\isacharparenleft}fst\ p{\isacharparenright}\ {\isacharparenleft}snd\ p{\isacharparenright}{\isacharparenright}}
\hfill(\isa{split{\isacharunderscore}def})
\end{center}
Pattern matching in
--- a/doc-src/TutorialI/settings.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/TutorialI/settings.ML Mon May 31 21:06:57 2010 +0200
@@ -1,3 +1,3 @@
(* $Id$ *)
-ThyOutput.indent := 5;
+Thy_Output.indent := 5;
--- a/doc-src/antiquote_setup.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/antiquote_setup.ML Mon May 31 21:06:57 2010 +0200
@@ -35,7 +35,7 @@
val verbatim = space_implode "\\verb,|," o map (enclose "\\verb|" "|") o space_explode "|";
-val _ = ThyOutput.antiquotation "verbatim" (Scan.lift Args.name)
+val _ = Thy_Output.antiquotation "verbatim" (Scan.lift Args.name)
(K (split_lines #> map verbatim #> space_implode "\\isasep\\isanewline%\n"));
@@ -56,7 +56,7 @@
fun ml_functor (txt, _) = "ML_Env.check_functor " ^ ML_Syntax.print_string txt;
-fun index_ml name kind ml = ThyOutput.antiquotation name
+fun index_ml name kind ml = Thy_Output.antiquotation name
(Scan.lift (Args.name -- Scan.optional (Args.colon |-- Args.name) ""))
(fn {context = ctxt, ...} => fn (txt1, txt2) =>
let
@@ -71,8 +71,8 @@
in
"\\indexdef{}{" ^ kind' ^ "}{" ^ clean_string txt1 ^ "}" ^
(txt'
- |> (if ! ThyOutput.quotes then quote else I)
- |> (if ! ThyOutput.display then enclose "\\begin{verbatim}\n" "\n\\end{verbatim}"
+ |> (if ! Thy_Output.quotes then quote else I)
+ |> (if ! Thy_Output.display then enclose "\\begin{verbatim}\n" "\n\\end{verbatim}"
else split_lines #> map verbatim #> space_implode "\\isasep\\isanewline%\n"))
end);
@@ -89,30 +89,30 @@
(* named theorems *)
-val _ = ThyOutput.antiquotation "named_thms"
+val _ = Thy_Output.antiquotation "named_thms"
(Scan.repeat (Attrib.thm -- Scan.lift (Args.parens Args.name)))
(fn {context = ctxt, ...} =>
- map (apfst (ThyOutput.pretty_thm ctxt))
- #> (if ! ThyOutput.quotes then map (apfst Pretty.quote) else I)
- #> (if ! ThyOutput.display
+ map (apfst (Thy_Output.pretty_thm ctxt))
+ #> (if ! Thy_Output.quotes then map (apfst Pretty.quote) else I)
+ #> (if ! Thy_Output.display
then
map (fn (p, name) =>
- Output.output (Pretty.string_of (Pretty.indent (! ThyOutput.indent) p)) ^
- "\\rulename{" ^ Output.output (Pretty.str_of (ThyOutput.pretty_text name)) ^ "}")
+ Output.output (Pretty.string_of (Pretty.indent (! Thy_Output.indent) p)) ^
+ "\\rulename{" ^ Output.output (Pretty.str_of (Thy_Output.pretty_text name)) ^ "}")
#> space_implode "\\par\\smallskip%\n"
#> enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
else
map (fn (p, name) =>
Output.output (Pretty.str_of p) ^
- "\\rulename{" ^ Output.output (Pretty.str_of (ThyOutput.pretty_text name)) ^ "}")
+ "\\rulename{" ^ Output.output (Pretty.str_of (Thy_Output.pretty_text name)) ^ "}")
#> space_implode "\\par\\smallskip%\n"
#> enclose "\\isa{" "}"));
(* theory file *)
-val _ = ThyOutput.antiquotation "thy_file" (Scan.lift Args.name)
- (fn _ => fn name => (ThyLoad.check_thy Path.current name; ThyOutput.output [Pretty.str name]));
+val _ = Thy_Output.antiquotation "thy_file" (Scan.lift Args.name)
+ (fn _ => fn name => (Thy_Load.check_thy Path.current name; Thy_Output.output [Pretty.str name]));
(* Isabelle/Isar entities (with index) *)
@@ -131,7 +131,7 @@
val arg = enclose "{" "}" o clean_string;
fun entity check markup kind index =
- ThyOutput.antiquotation
+ Thy_Output.antiquotation
(translate (fn " " => "_" | c => c) kind ^
(case index of NONE => "" | SOME true => "_def" | SOME false => "_ref"))
(Scan.lift (Scan.optional (Args.parens Args.name) "" -- Args.name))
@@ -152,8 +152,8 @@
idx ^
(Output.output name
|> (if markup = "" then I else enclose ("\\" ^ markup ^ "{") "}")
- |> (if ! ThyOutput.quotes then quote else I)
- |> (if ! ThyOutput.display then enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
+ |> (if ! Thy_Output.quotes then quote else I)
+ |> (if ! Thy_Output.display then enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
else hyper o enclose "\\mbox{\\isa{" "}}"))
else error ("Bad " ^ kind ^ " " ^ quote name)
end);
@@ -174,14 +174,14 @@
val _ = entity_antiqs no_check "" "fact";
val _ = entity_antiqs no_check "" "variable";
val _ = entity_antiqs no_check "" "case";
-val _ = entity_antiqs (K ThyOutput.defined_command) "" "antiquotation";
-val _ = entity_antiqs (K ThyOutput.defined_option) "" "antiquotation option";
+val _ = entity_antiqs (K Thy_Output.defined_command) "" "antiquotation";
+val _ = entity_antiqs (K Thy_Output.defined_option) "" "antiquotation option";
val _ = entity_antiqs (fn _ => fn name => is_some (OS.Process.getEnv name)) "isatt" "setting";
val _ = entity_antiqs no_check "" "inference";
val _ = entity_antiqs no_check "isatt" "executable";
val _ = entity_antiqs (K check_tool) "isatt" "tool";
val _ = entity_antiqs (K (File.exists o Path.explode)) "isatt" "file";
-val _ = entity_antiqs (K ThyInfo.known_thy) "" "theory";
+val _ = entity_antiqs (K Thy_Info.known_thy) "" "theory";
end;
--- a/doc-src/more_antiquote.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/more_antiquote.ML Mon May 31 21:06:57 2010 +0200
@@ -64,8 +64,8 @@
in
-val _ = ThyOutput.antiquotation "class" (Scan.lift Args.name) (pr_class o #context);
-val _ = ThyOutput.antiquotation "type" (Scan.lift Args.name) (pr_type o #context);
+val _ = Thy_Output.antiquotation "class" (Scan.lift Args.name) (pr_class o #context);
+val _ = Thy_Output.antiquotation "type" (Scan.lift Args.name) (pr_type o #context);
end;
@@ -95,11 +95,11 @@
|> snd
|> map_filter (fn (_, (some_thm, proper)) => if proper then some_thm else NONE)
|> map (holize o no_vars ctxt o AxClass.overload thy);
- in ThyOutput.output (ThyOutput.maybe_pretty_source (pretty_thm ctxt) src thms) end;
+ in Thy_Output.output (Thy_Output.maybe_pretty_source (pretty_thm ctxt) src thms) end;
in
-val _ = ThyOutput.antiquotation "code_thms" Args.term
+val _ = Thy_Output.antiquotation "code_thms" Args.term
(fn {source, context, ...} => pretty_code_thm source context);
end;
@@ -123,7 +123,7 @@
in
-val _ = ThyOutput.antiquotation "code_stmts"
+val _ = Thy_Output.antiquotation "code_stmts"
(parse_const_terms -- Scan.repeat parse_names -- Scan.lift (Args.parens Args.name))
(fn {context = ctxt, ...} => fn ((mk_cs, mk_stmtss), target) =>
let val thy = ProofContext.theory_of ctxt in
--- a/doc-src/rail.ML Mon May 31 19:36:13 2010 +0200
+++ b/doc-src/rail.ML Mon May 31 21:06:57 2010 +0200
@@ -67,14 +67,14 @@
("fact", ("", no_check, true)),
("variable", ("", no_check, true)),
("case", ("", no_check, true)),
- ("antiquotation", ("", K ThyOutput.defined_command, true)),
- ("antiquotation option", ("", K ThyOutput.defined_option, true)), (* kann mein scanner nicht erkennen *)
+ ("antiquotation", ("", K Thy_Output.defined_command, true)),
+ ("antiquotation option", ("", K Thy_Output.defined_option, true)), (* kann mein scanner nicht erkennen *)
("setting", ("isatt", (fn _ => fn name => is_some (OS.Process.getEnv name)), true)),
("inference", ("", no_check, true)),
("executable", ("isatt", no_check, true)),
("tool", ("isatt", K check_tool, true)),
("file", ("isatt", K (File.exists o Path.explode), true)),
- ("theory", ("", K ThyInfo.known_thy, true))
+ ("theory", ("", K Thy_Info.known_thy, true))
];
in
@@ -97,8 +97,8 @@
(idx ^
(Output.output name
|> (if markup = "" then I else enclose ("\\" ^ markup ^ "{") "}")
- |> (if ! ThyOutput.quotes then quote else I)
- |> (if ! ThyOutput.display then enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
+ |> (if ! Thy_Output.quotes then quote else I)
+ |> (if ! Thy_Output.display then enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
else hyper o enclose "\\mbox{\\isa{" "}}")), style)
else ("Bad " ^ kind ^ " " ^ name, false)
end;
@@ -473,7 +473,7 @@
|> parse
|> print;
-val _ = ThyOutput.antiquotation "rail" (Scan.lift (Parse.position Args.name))
+val _ = Thy_Output.antiquotation "rail" (Scan.lift (Parse.position Args.name))
(fn {context = ctxt,...} => fn txt => process txt ctxt);
end;
--- a/src/HOL/Tools/ATP_Manager/atp_manager.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/HOL/Tools/ATP_Manager/atp_manager.ML Mon May 31 21:06:57 2010 +0200
@@ -219,7 +219,7 @@
then NONE
else
let
- val _ = List.app (SimpleThread.interrupt o #1) cancelling;
+ val _ = List.app (Simple_Thread.interrupt o #1) cancelling;
val cancelling' = filter (Thread.isActive o #1) cancelling;
val state' = make_state manager timeout_heap' active cancelling' messages store;
in SOME (map #2 timeout_threads, state') end
--- a/src/HOL/Tools/Datatype/datatype_data.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/HOL/Tools/Datatype/datatype_data.ML Mon May 31 21:06:57 2010 +0200
@@ -236,7 +236,7 @@
(** document antiquotation **)
-val _ = ThyOutput.antiquotation "datatype" (Args.type_name true)
+val _ = Thy_Output.antiquotation "datatype" (Args.type_name true)
(fn {source = src, context = ctxt, ...} => fn dtco =>
let
val thy = ProofContext.theory_of ctxt;
@@ -257,7 +257,7 @@
Pretty.str " =" :: Pretty.brk 1 ::
flat (separate [Pretty.brk 1, Pretty.str "| "]
(map (single o pretty_constr) cos)));
- in ThyOutput.output (ThyOutput.maybe_pretty_source (K pretty_datatype) src [()]) end);
+ in Thy_Output.output (Thy_Output.maybe_pretty_source (K pretty_datatype) src [()]) end);
--- a/src/HOL/Tools/Nitpick/nitpick.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick.ML Mon May 31 21:06:57 2010 +0200
@@ -181,7 +181,7 @@
val ctxt = Proof.context_of state
(* FIXME: reintroduce code before new release:
- val nitpick_thy = ThyInfo.get_theory "Nitpick"
+ val nitpick_thy = Thy_Info.get_theory "Nitpick"
val _ = Theory.subthy (nitpick_thy, thy) orelse
error "You must import the theory \"Nitpick\" to use Nitpick"
*)
--- a/src/Pure/Concurrent/future.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Concurrent/future.ML Mon May 31 21:06:57 2010 +0200
@@ -126,7 +126,7 @@
val lock = Mutex.mutex ();
in
-fun SYNCHRONIZED name = SimpleThread.synchronized name lock;
+fun SYNCHRONIZED name = Simple_Thread.synchronized name lock;
fun wait cond = (*requires SYNCHRONIZED*)
Multithreading.sync_wait NONE NONE cond lock;
@@ -238,7 +238,7 @@
| SOME work => (execute work; worker_loop name));
fun worker_start name = (*requires SYNCHRONIZED*)
- Unsynchronized.change workers (cons (SimpleThread.fork false (fn () => worker_loop name),
+ Unsynchronized.change workers (cons (Simple_Thread.fork false (fn () => worker_loop name),
Unsynchronized.ref Working));
@@ -371,7 +371,7 @@
fun scheduler_check () = (*requires SYNCHRONIZED*)
(do_shutdown := false;
if scheduler_active () then ()
- else scheduler := SOME (SimpleThread.fork false scheduler_loop));
+ else scheduler := SOME (Simple_Thread.fork false scheduler_loop));
--- a/src/Pure/Concurrent/simple_thread.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Concurrent/simple_thread.ML Mon May 31 21:06:57 2010 +0200
@@ -12,7 +12,7 @@
val synchronized: string -> Mutex.mutex -> (unit -> 'a) -> 'a
end;
-structure SimpleThread: SIMPLE_THREAD =
+structure Simple_Thread: SIMPLE_THREAD =
struct
fun attributes interrupts =
--- a/src/Pure/Concurrent/single_assignment.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Concurrent/single_assignment.ML Mon May 31 21:06:57 2010 +0200
@@ -32,7 +32,7 @@
fun peek (Var {var, ...}) = SingleAssignment.savalue var;
fun await (v as Var {name, lock, cond, var}) =
- SimpleThread.synchronized name lock (fn () =>
+ Simple_Thread.synchronized name lock (fn () =>
let
fun wait () =
(case peek v of
@@ -44,7 +44,7 @@
in wait () end);
fun assign (v as Var {name, lock, cond, var}) x =
- SimpleThread.synchronized name lock (fn () =>
+ Simple_Thread.synchronized name lock (fn () =>
(case peek v of
SOME _ => raise Fail ("Duplicate assignment to variable " ^ quote name)
| NONE =>
--- a/src/Pure/Concurrent/synchronized.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Concurrent/synchronized.ML Mon May 31 21:06:57 2010 +0200
@@ -39,7 +39,7 @@
(* synchronized access *)
fun timed_access (Var {name, lock, cond, var}) time_limit f =
- SimpleThread.synchronized name lock (fn () =>
+ Simple_Thread.synchronized name lock (fn () =>
let
fun try_change () =
let val x = ! var in
--- a/src/Pure/Concurrent/task_queue.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Concurrent/task_queue.ML Mon May 31 21:06:57 2010 +0200
@@ -162,7 +162,7 @@
val tasks = Inttab.lookup_list groups (group_id group);
val running =
fold (get_job jobs #> (fn Running t => insert Thread.equal t | _ => I)) tasks [];
- val _ = List.app SimpleThread.interrupt running;
+ val _ = List.app Simple_Thread.interrupt running;
in null running end;
fun cancel_all (Queue {groups, jobs}) =
@@ -173,7 +173,7 @@
Running t => (insert eq_group group groups, insert Thread.equal t running)
| _ => (groups, running)));
val (running_groups, running) = Task_Graph.fold (cancel_job o #1 o #2) jobs ([], []);
- val _ = List.app SimpleThread.interrupt running;
+ val _ = List.app Simple_Thread.interrupt running;
in running_groups end;
--- a/src/Pure/General/scan.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/General/scan.ML Mon May 31 21:06:57 2010 +0200
@@ -322,5 +322,5 @@
end;
-structure BasicScan: BASIC_SCAN = Scan;
-open BasicScan;
+structure Basic_Scan: BASIC_SCAN = Scan;
+open Basic_Scan;
--- a/src/Pure/Isar/isar_cmd.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/isar_cmd.ML Mon May 31 21:06:57 2010 +0200
@@ -85,7 +85,7 @@
val proof_markup: Symbol_Pos.text * Position.T -> Toplevel.transition -> Toplevel.transition
end;
-structure IsarCmd: ISAR_CMD =
+structure Isar_Cmd: ISAR_CMD =
struct
@@ -272,10 +272,10 @@
(* init and exit *)
fun init_theory (name, imports, uses) =
- Toplevel.init_theory name (ThyInfo.begin_theory name imports (map (apfst Path.explode) uses))
+ Toplevel.init_theory name (Thy_Info.begin_theory name imports (map (apfst Path.explode) uses))
(fn thy =>
- if ThyInfo.check_known_thy (Context.theory_name thy)
- then ThyInfo.end_theory thy else ());
+ if Thy_Info.check_known_thy (Context.theory_name thy)
+ then Thy_Info.end_theory thy else ());
val exit = Toplevel.keep (fn state =>
(Context.set_thread_data (try Toplevel.generic_theory_of state);
@@ -384,18 +384,18 @@
val print_methods = Toplevel.unknown_theory o
Toplevel.keep (Method.print_methods o Toplevel.theory_of);
-val print_antiquotations = Toplevel.imperative ThyOutput.print_antiquotations;
+val print_antiquotations = Toplevel.imperative Thy_Output.print_antiquotations;
val thy_deps = Toplevel.unknown_theory o Toplevel.keep (fn state =>
let
val thy = Toplevel.theory_of state;
- val all_thys = sort ThyInfo.thy_ord (thy :: Theory.ancestors_of thy);
+ val all_thys = sort Thy_Info.thy_ord (thy :: Theory.ancestors_of thy);
val gr = all_thys |> map (fn node =>
let
val name = Context.theory_name node;
val parents = map Context.theory_name (Theory.parents_of node);
val dir = Present.session_name node;
- val unfold = not (ThyInfo.known_thy name andalso ThyInfo.is_finished name);
+ val unfold = not (Thy_Info.known_thy name andalso Thy_Info.is_finished name);
in {name = name, ID = name, parents = parents, dir = dir, unfold = unfold, path = ""} end);
in Present.display_graph gr end);
@@ -427,8 +427,8 @@
Thm_Deps.unused_thms
(case opt_range of
NONE => (Theory.parents_of thy, [thy])
- | SOME (xs, NONE) => (map ThyInfo.get_theory xs, [thy])
- | SOME (xs, SOME ys) => (map ThyInfo.get_theory xs, map ThyInfo.get_theory ys))
+ | SOME (xs, NONE) => (map Thy_Info.get_theory xs, [thy])
+ | SOME (xs, SOME ys) => (map Thy_Info.get_theory xs, map Thy_Info.get_theory ys))
|> map pretty_thm |> Pretty.chunks |> Pretty.writeln
end);
@@ -517,7 +517,7 @@
fun check_text (txt, pos) state =
(Position.report Markup.doc_source pos;
- ignore (ThyOutput.eval_antiquote (#1 (Keyword.get_lexicons ())) state (txt, pos)));
+ ignore (Thy_Output.eval_antiquote (#1 (Keyword.get_lexicons ())) state (txt, pos)));
fun header_markup txt = Toplevel.keep (fn state =>
if Toplevel.is_toplevel state then check_text txt state
--- a/src/Pure/Isar/isar_document.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/isar_document.ML Mon May 31 21:06:57 2010 +0200
@@ -181,7 +181,7 @@
fun begin_document (id: document_id) path =
let
- val (dir, name) = ThyLoad.split_thy_path path;
+ val (dir, name) = Thy_Load.split_thy_path path;
val _ = define_document id (make_document dir name no_entries);
in () end;
@@ -197,8 +197,8 @@
(case Lazy.force end_state of
SOME st =>
(Toplevel.run_command name (Toplevel.put_id id (Toplevel.commit_exit Position.none)) st;
- ThyInfo.touch_child_thys name;
- ThyInfo.register_thy name)
+ Thy_Info.touch_child_thys name;
+ Thy_Info.register_thy name)
| NONE => error ("Failed to finish theory " ^ quote name)));
in () end);
--- a/src/Pure/Isar/isar_syn.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/isar_syn.ML Mon May 31 21:06:57 2010 +0200
@@ -4,7 +4,7 @@
Isar/Pure outer syntax.
*)
-structure IsarSyn: sig end =
+structure Isar_Syn: sig end =
struct
(** keywords **)
@@ -28,7 +28,7 @@
val _ =
Outer_Syntax.command "theory" "begin theory" (Keyword.tag_theory Keyword.thy_begin)
- (Thy_Header.args >> (Toplevel.print oo IsarCmd.init_theory));
+ (Thy_Header.args >> (Toplevel.print oo Isar_Cmd.init_theory));
val _ =
Outer_Syntax.command "end" "end (local) theory" (Keyword.tag_theory Keyword.thy_end)
@@ -38,43 +38,43 @@
(** markup commands **)
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "header" "theory header" Keyword.diag
- (Parse.doc_source >> IsarCmd.header_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "header" "theory header" Keyword.diag
+ (Parse.doc_source >> Isar_Cmd.header_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "chapter" "chapter heading"
- Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> IsarCmd.local_theory_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "chapter" "chapter heading"
+ Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> Isar_Cmd.local_theory_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "section" "section heading"
- Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> IsarCmd.local_theory_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "section" "section heading"
+ Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> Isar_Cmd.local_theory_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "subsection" "subsection heading"
- Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> IsarCmd.local_theory_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "subsection" "subsection heading"
+ Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> Isar_Cmd.local_theory_markup);
val _ =
- Outer_Syntax.markup_command ThyOutput.Markup "subsubsection" "subsubsection heading"
- Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> IsarCmd.local_theory_markup);
+ Outer_Syntax.markup_command Thy_Output.Markup "subsubsection" "subsubsection heading"
+ Keyword.thy_heading (Parse.opt_target -- Parse.doc_source >> Isar_Cmd.local_theory_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.MarkupEnv "text" "formal comment (theory)"
- Keyword.thy_decl (Parse.opt_target -- Parse.doc_source >> IsarCmd.local_theory_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.MarkupEnv "text" "formal comment (theory)"
+ Keyword.thy_decl (Parse.opt_target -- Parse.doc_source >> Isar_Cmd.local_theory_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Verbatim "text_raw" "raw document preparation text"
- Keyword.thy_decl (Parse.opt_target -- Parse.doc_source >> IsarCmd.local_theory_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Verbatim "text_raw" "raw document preparation text"
+ Keyword.thy_decl (Parse.opt_target -- Parse.doc_source >> Isar_Cmd.local_theory_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "sect" "formal comment (proof)"
- (Keyword.tag_proof Keyword.prf_heading) (Parse.doc_source >> IsarCmd.proof_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "sect" "formal comment (proof)"
+ (Keyword.tag_proof Keyword.prf_heading) (Parse.doc_source >> Isar_Cmd.proof_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "subsect" "formal comment (proof)"
- (Keyword.tag_proof Keyword.prf_heading) (Parse.doc_source >> IsarCmd.proof_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "subsect" "formal comment (proof)"
+ (Keyword.tag_proof Keyword.prf_heading) (Parse.doc_source >> Isar_Cmd.proof_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Markup "subsubsect" "formal comment (proof)"
- (Keyword.tag_proof Keyword.prf_heading) (Parse.doc_source >> IsarCmd.proof_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.Markup "subsubsect" "formal comment (proof)"
+ (Keyword.tag_proof Keyword.prf_heading) (Parse.doc_source >> Isar_Cmd.proof_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.MarkupEnv "txt" "formal comment (proof)"
- (Keyword.tag_proof Keyword.prf_decl) (Parse.doc_source >> IsarCmd.proof_markup);
+val _ = Outer_Syntax.markup_command Thy_Output.MarkupEnv "txt" "formal comment (proof)"
+ (Keyword.tag_proof Keyword.prf_decl) (Parse.doc_source >> Isar_Cmd.proof_markup);
-val _ = Outer_Syntax.markup_command ThyOutput.Verbatim "txt_raw"
+val _ = Outer_Syntax.markup_command Thy_Output.Verbatim "txt_raw"
"raw document preparation text (proof)" (Keyword.tag_proof Keyword.prf_decl)
- (Parse.doc_source >> IsarCmd.proof_markup);
+ (Parse.doc_source >> Isar_Cmd.proof_markup);
@@ -185,7 +185,7 @@
Outer_Syntax.command "axioms" "state arbitrary propositions (axiomatic!)" Keyword.thy_decl
(Scan.repeat1 Parse_Spec.spec >>
(Toplevel.theory o
- (IsarCmd.add_axioms o
+ (Isar_Cmd.add_axioms o
tap (fn _ => legacy_feature "Old 'axioms' command -- use 'axiomatization' instead"))));
val opt_unchecked_overloaded =
@@ -197,7 +197,7 @@
Outer_Syntax.command "defs" "define constants" Keyword.thy_decl
(opt_unchecked_overloaded --
Scan.repeat1 (Parse_Spec.thm_name ":" -- Parse.prop >> (fn ((x, y), z) => ((x, z), y)))
- >> (Toplevel.theory o IsarCmd.add_defs));
+ >> (Toplevel.theory o Isar_Cmd.add_defs));
(* old constdefs *)
@@ -296,10 +296,10 @@
((Parse.opt_keyword "open" >> not) -- Scan.repeat1 Parse.xname >>
(Toplevel.theory o uncurry hide));
-val _ = hide_names "hide_class" IsarCmd.hide_class "classes";
-val _ = hide_names "hide_type" IsarCmd.hide_type "types";
-val _ = hide_names "hide_const" IsarCmd.hide_const "constants";
-val _ = hide_names "hide_fact" IsarCmd.hide_fact "facts";
+val _ = hide_names "hide_class" Isar_Cmd.hide_class "classes";
+val _ = hide_names "hide_type" Isar_Cmd.hide_type "types";
+val _ = hide_names "hide_const" Isar_Cmd.hide_const "constants";
+val _ = hide_names "hide_fact" Isar_Cmd.hide_fact "facts";
(* use ML text *)
@@ -314,7 +314,7 @@
val _ =
Outer_Syntax.command "use" "ML text from file" (Keyword.tag_ml Keyword.thy_decl)
(Parse.path >>
- (fn path => Toplevel.generic_theory (ThyInfo.exec_file false path #> propagate_env)));
+ (fn path => Toplevel.generic_theory (Thy_Info.exec_file false path #> propagate_env)));
val _ =
Outer_Syntax.command "ML" "ML text within theory or local theory"
@@ -331,19 +331,19 @@
val _ =
Outer_Syntax.command "ML_val" "diagnostic ML text" (Keyword.tag_ml Keyword.diag)
- (Parse.ML_source >> IsarCmd.ml_diag true);
+ (Parse.ML_source >> Isar_Cmd.ml_diag true);
val _ =
Outer_Syntax.command "ML_command" "diagnostic ML text (silent)" (Keyword.tag_ml Keyword.diag)
- (Parse.ML_source >> (Toplevel.no_timing oo IsarCmd.ml_diag false));
+ (Parse.ML_source >> (Toplevel.no_timing oo Isar_Cmd.ml_diag false));
val _ =
Outer_Syntax.command "setup" "ML theory setup" (Keyword.tag_ml Keyword.thy_decl)
- (Parse.ML_source >> (Toplevel.theory o IsarCmd.global_setup));
+ (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.global_setup));
val _ =
Outer_Syntax.local_theory "local_setup" "ML local theory setup" (Keyword.tag_ml Keyword.thy_decl)
- (Parse.ML_source >> IsarCmd.local_setup);
+ (Parse.ML_source >> Isar_Cmd.local_setup);
val _ =
Outer_Syntax.command "attribute_setup" "define attribute in ML" (Keyword.tag_ml Keyword.thy_decl)
@@ -357,14 +357,14 @@
val _ =
Outer_Syntax.local_theory "declaration" "generic ML declaration" (Keyword.tag_ml Keyword.thy_decl)
- (Parse.opt_keyword "pervasive" -- Parse.ML_source >> uncurry IsarCmd.declaration);
+ (Parse.opt_keyword "pervasive" -- Parse.ML_source >> uncurry Isar_Cmd.declaration);
val _ =
Outer_Syntax.local_theory "simproc_setup" "define simproc in ML" (Keyword.tag_ml Keyword.thy_decl)
(Parse.name --
(Parse.$$$ "(" |-- Parse.enum1 "|" Parse.term --| Parse.$$$ ")" --| Parse.$$$ "=") --
Parse.ML_source -- Scan.optional (Parse.$$$ "identifier" |-- Scan.repeat1 Parse.xname) []
- >> (fn (((a, b), c), d) => IsarCmd.simproc_setup a b c d));
+ >> (fn (((a, b), c), d) => Isar_Cmd.simproc_setup a b c d));
(* translation functions *)
@@ -374,27 +374,27 @@
val _ =
Outer_Syntax.command "parse_ast_translation" "install parse ast translation functions"
(Keyword.tag_ml Keyword.thy_decl)
- (trfun >> (Toplevel.theory o IsarCmd.parse_ast_translation));
+ (trfun >> (Toplevel.theory o Isar_Cmd.parse_ast_translation));
val _ =
Outer_Syntax.command "parse_translation" "install parse translation functions"
(Keyword.tag_ml Keyword.thy_decl)
- (trfun >> (Toplevel.theory o IsarCmd.parse_translation));
+ (trfun >> (Toplevel.theory o Isar_Cmd.parse_translation));
val _ =
Outer_Syntax.command "print_translation" "install print translation functions"
(Keyword.tag_ml Keyword.thy_decl)
- (trfun >> (Toplevel.theory o IsarCmd.print_translation));
+ (trfun >> (Toplevel.theory o Isar_Cmd.print_translation));
val _ =
Outer_Syntax.command "typed_print_translation" "install typed print translation functions"
(Keyword.tag_ml Keyword.thy_decl)
- (trfun >> (Toplevel.theory o IsarCmd.typed_print_translation));
+ (trfun >> (Toplevel.theory o Isar_Cmd.typed_print_translation));
val _ =
Outer_Syntax.command "print_ast_translation" "install print ast translation functions"
(Keyword.tag_ml Keyword.thy_decl)
- (trfun >> (Toplevel.theory o IsarCmd.print_ast_translation));
+ (trfun >> (Toplevel.theory o Isar_Cmd.print_ast_translation));
(* oracles *)
@@ -402,7 +402,7 @@
val _ =
Outer_Syntax.command "oracle" "declare oracle" (Keyword.tag_ml Keyword.thy_decl)
(Parse.position Parse.name -- (Parse.$$$ "=" |-- Parse.ML_source) >>
- (fn (x, y) => Toplevel.theory (IsarCmd.oracle x y)));
+ (fn (x, y) => Toplevel.theory (Isar_Cmd.oracle x y)));
(* local theories *)
@@ -537,22 +537,22 @@
val _ =
Outer_Syntax.command "have" "state local goal"
(Keyword.tag_proof Keyword.prf_goal)
- (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o IsarCmd.have));
+ (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o Isar_Cmd.have));
val _ =
Outer_Syntax.command "hence" "abbreviates \"then have\""
(Keyword.tag_proof Keyword.prf_goal)
- (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o IsarCmd.hence));
+ (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o Isar_Cmd.hence));
val _ =
Outer_Syntax.command "show" "state local goal, solving current obligation"
(Keyword.tag_proof Keyword.prf_asm_goal)
- (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o IsarCmd.show));
+ (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o Isar_Cmd.show));
val _ =
Outer_Syntax.command "thus" "abbreviates \"then show\""
(Keyword.tag_proof Keyword.prf_asm_goal)
- (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o IsarCmd.thus));
+ (Parse_Spec.statement >> ((Toplevel.print oo Toplevel.proof') o Isar_Cmd.thus));
(* facts *)
@@ -673,32 +673,32 @@
val _ =
Outer_Syntax.command "qed" "conclude (sub-)proof"
(Keyword.tag_proof Keyword.qed_block)
- (Scan.option Method.parse >> IsarCmd.qed);
+ (Scan.option Method.parse >> Isar_Cmd.qed);
val _ =
Outer_Syntax.command "by" "terminal backward proof"
(Keyword.tag_proof Keyword.qed)
- (Method.parse -- Scan.option Method.parse >> IsarCmd.terminal_proof);
+ (Method.parse -- Scan.option Method.parse >> Isar_Cmd.terminal_proof);
val _ =
Outer_Syntax.command ".." "default proof"
(Keyword.tag_proof Keyword.qed)
- (Scan.succeed IsarCmd.default_proof);
+ (Scan.succeed Isar_Cmd.default_proof);
val _ =
Outer_Syntax.command "." "immediate proof"
(Keyword.tag_proof Keyword.qed)
- (Scan.succeed IsarCmd.immediate_proof);
+ (Scan.succeed Isar_Cmd.immediate_proof);
val _ =
Outer_Syntax.command "done" "done proof"
(Keyword.tag_proof Keyword.qed)
- (Scan.succeed IsarCmd.done_proof);
+ (Scan.succeed Isar_Cmd.done_proof);
val _ =
Outer_Syntax.command "sorry" "skip proof (quick-and-dirty mode only!)"
(Keyword.tag_proof Keyword.qed)
- (Scan.succeed IsarCmd.skip_proof);
+ (Scan.succeed Isar_Cmd.skip_proof);
val _ =
Outer_Syntax.command "oops" "forget proof"
@@ -796,7 +796,7 @@
val _ =
Outer_Syntax.improper_command "pretty_setmargin" "change default margin for pretty printing"
- Keyword.diag (Parse.nat >> (Toplevel.no_timing oo IsarCmd.pretty_setmargin));
+ Keyword.diag (Parse.nat >> (Toplevel.no_timing oo Isar_Cmd.pretty_setmargin));
val _ =
Outer_Syntax.improper_command "help" "print outer syntax commands" Keyword.diag
@@ -808,32 +808,32 @@
val _ =
Outer_Syntax.improper_command "print_configs" "print configuration options" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_configs));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_configs));
val _ =
Outer_Syntax.improper_command "print_context" "print theory context name" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_context));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_context));
val _ =
Outer_Syntax.improper_command "print_theory" "print logical theory contents (verbose!)"
- Keyword.diag (opt_bang >> (Toplevel.no_timing oo IsarCmd.print_theory));
+ Keyword.diag (opt_bang >> (Toplevel.no_timing oo Isar_Cmd.print_theory));
val _ =
Outer_Syntax.improper_command "print_syntax" "print inner syntax of context (verbose!)"
- Keyword.diag (Scan.succeed (Toplevel.no_timing o IsarCmd.print_syntax));
+ Keyword.diag (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_syntax));
val _ =
Outer_Syntax.improper_command "print_abbrevs" "print constant abbreviation of context"
- Keyword.diag (Scan.succeed (Toplevel.no_timing o IsarCmd.print_abbrevs));
+ Keyword.diag (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_abbrevs));
val _ =
Outer_Syntax.improper_command "print_theorems"
"print theorems of local theory or proof context" Keyword.diag
- (opt_bang >> (Toplevel.no_timing oo IsarCmd.print_theorems));
+ (opt_bang >> (Toplevel.no_timing oo Isar_Cmd.print_theorems));
val _ =
Outer_Syntax.improper_command "print_locales" "print locales of this theory" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_locales));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_locales));
val _ =
Outer_Syntax.improper_command "print_classes" "print classes of this theory" Keyword.diag
@@ -842,89 +842,89 @@
val _ =
Outer_Syntax.improper_command "print_locale" "print locale of this theory" Keyword.diag
- (opt_bang -- Parse.xname >> (Toplevel.no_timing oo IsarCmd.print_locale));
+ (opt_bang -- Parse.xname >> (Toplevel.no_timing oo Isar_Cmd.print_locale));
val _ =
Outer_Syntax.improper_command "print_interps"
"print interpretations of locale for this theory" Keyword.diag
- (Parse.xname >> (Toplevel.no_timing oo IsarCmd.print_registrations));
+ (Parse.xname >> (Toplevel.no_timing oo Isar_Cmd.print_registrations));
val _ =
Outer_Syntax.improper_command "print_attributes" "print attributes of this theory" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_attributes));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_attributes));
val _ =
Outer_Syntax.improper_command "print_simpset" "print context of Simplifier" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_simpset));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_simpset));
val _ =
Outer_Syntax.improper_command "print_rules" "print intro/elim rules" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_rules));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_rules));
val _ =
Outer_Syntax.improper_command "print_trans_rules" "print transitivity rules" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_trans_rules));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_trans_rules));
val _ =
Outer_Syntax.improper_command "print_methods" "print methods of this theory" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_methods));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_methods));
val _ =
Outer_Syntax.improper_command "print_antiquotations" "print antiquotations (global)" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_antiquotations));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_antiquotations));
val _ =
Outer_Syntax.improper_command "thy_deps" "visualize theory dependencies"
- Keyword.diag (Scan.succeed (Toplevel.no_timing o IsarCmd.thy_deps));
+ Keyword.diag (Scan.succeed (Toplevel.no_timing o Isar_Cmd.thy_deps));
val _ =
Outer_Syntax.improper_command "class_deps" "visualize class dependencies"
- Keyword.diag (Scan.succeed (Toplevel.no_timing o IsarCmd.class_deps));
+ Keyword.diag (Scan.succeed (Toplevel.no_timing o Isar_Cmd.class_deps));
val _ =
Outer_Syntax.improper_command "thm_deps" "visualize theorem dependencies"
- Keyword.diag (Parse_Spec.xthms1 >> (Toplevel.no_timing oo IsarCmd.thm_deps));
+ Keyword.diag (Parse_Spec.xthms1 >> (Toplevel.no_timing oo Isar_Cmd.thm_deps));
val _ =
Outer_Syntax.improper_command "print_binds" "print term bindings of proof context" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_binds));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_binds));
val _ =
Outer_Syntax.improper_command "print_facts" "print facts of proof context" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_facts));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_facts));
val _ =
Outer_Syntax.improper_command "print_cases" "print cases of proof context" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.print_cases));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.print_cases));
val _ =
Outer_Syntax.improper_command "print_statement" "print theorems as long statements" Keyword.diag
- (opt_modes -- Parse_Spec.xthms1 >> (Toplevel.no_timing oo IsarCmd.print_stmts));
+ (opt_modes -- Parse_Spec.xthms1 >> (Toplevel.no_timing oo Isar_Cmd.print_stmts));
val _ =
Outer_Syntax.improper_command "thm" "print theorems" Keyword.diag
- (opt_modes -- Parse_Spec.xthms1 >> (Toplevel.no_timing oo IsarCmd.print_thms));
+ (opt_modes -- Parse_Spec.xthms1 >> (Toplevel.no_timing oo Isar_Cmd.print_thms));
val _ =
Outer_Syntax.improper_command "prf" "print proof terms of theorems" Keyword.diag
(opt_modes -- Scan.option Parse_Spec.xthms1
- >> (Toplevel.no_timing oo IsarCmd.print_prfs false));
+ >> (Toplevel.no_timing oo Isar_Cmd.print_prfs false));
val _ =
Outer_Syntax.improper_command "full_prf" "print full proof terms of theorems" Keyword.diag
- (opt_modes -- Scan.option Parse_Spec.xthms1 >> (Toplevel.no_timing oo IsarCmd.print_prfs true));
+ (opt_modes -- Scan.option Parse_Spec.xthms1 >> (Toplevel.no_timing oo Isar_Cmd.print_prfs true));
val _ =
Outer_Syntax.improper_command "prop" "read and print proposition" Keyword.diag
- (opt_modes -- Parse.term >> (Toplevel.no_timing oo IsarCmd.print_prop));
+ (opt_modes -- Parse.term >> (Toplevel.no_timing oo Isar_Cmd.print_prop));
val _ =
Outer_Syntax.improper_command "term" "read and print term" Keyword.diag
- (opt_modes -- Parse.term >> (Toplevel.no_timing oo IsarCmd.print_term));
+ (opt_modes -- Parse.term >> (Toplevel.no_timing oo Isar_Cmd.print_term));
val _ =
Outer_Syntax.improper_command "typ" "read and print type" Keyword.diag
- (opt_modes -- Parse.typ >> (Toplevel.no_timing oo IsarCmd.print_type));
+ (opt_modes -- Parse.typ >> (Toplevel.no_timing oo Isar_Cmd.print_type));
val _ =
Outer_Syntax.improper_command "print_codesetup" "print code generator setup" Keyword.diag
@@ -936,7 +936,7 @@
Outer_Syntax.improper_command "unused_thms" "find unused theorems" Keyword.diag
(Scan.option ((Scan.repeat1 (Scan.unless Parse.minus Parse.name) --| Parse.minus) --
Scan.option (Scan.repeat1 (Scan.unless Parse.minus Parse.name))) >>
- (Toplevel.no_timing oo IsarCmd.unused_thms));
+ (Toplevel.no_timing oo Isar_Cmd.unused_thms));
@@ -944,54 +944,54 @@
val _ =
Outer_Syntax.improper_command "cd" "change current working directory" Keyword.diag
- (Parse.path >> (Toplevel.no_timing oo IsarCmd.cd));
+ (Parse.path >> (Toplevel.no_timing oo Isar_Cmd.cd));
val _ =
Outer_Syntax.improper_command "pwd" "print current working directory" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.pwd));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.pwd));
val _ =
Outer_Syntax.improper_command "use_thy" "use theory file" Keyword.diag
(Parse.name >> (fn name =>
- Toplevel.no_timing o Toplevel.imperative (fn () => ThyInfo.use_thy name)));
+ Toplevel.no_timing o Toplevel.imperative (fn () => Thy_Info.use_thy name)));
val _ =
Outer_Syntax.improper_command "touch_thy" "outdate theory, including descendants" Keyword.diag
(Parse.name >> (fn name =>
- Toplevel.no_timing o Toplevel.imperative (fn () => ThyInfo.touch_thy name)));
+ Toplevel.no_timing o Toplevel.imperative (fn () => Thy_Info.touch_thy name)));
val _ =
Outer_Syntax.improper_command "remove_thy" "remove theory from loader database" Keyword.diag
(Parse.name >> (fn name =>
- Toplevel.no_timing o Toplevel.imperative (fn () => ThyInfo.remove_thy name)));
+ Toplevel.no_timing o Toplevel.imperative (fn () => Thy_Info.remove_thy name)));
val _ =
Outer_Syntax.improper_command "kill_thy" "kill theory -- try to remove from loader database"
Keyword.diag (Parse.name >> (fn name =>
- Toplevel.no_timing o Toplevel.imperative (fn () => ThyInfo.kill_thy name)));
+ Toplevel.no_timing o Toplevel.imperative (fn () => Thy_Info.kill_thy name)));
val _ =
Outer_Syntax.improper_command "display_drafts" "display raw source files with symbols"
- Keyword.diag (Scan.repeat1 Parse.path >> (Toplevel.no_timing oo IsarCmd.display_drafts));
+ Keyword.diag (Scan.repeat1 Parse.path >> (Toplevel.no_timing oo Isar_Cmd.display_drafts));
val _ =
Outer_Syntax.improper_command "print_drafts" "print raw source files with symbols"
- Keyword.diag (Scan.repeat1 Parse.path >> (Toplevel.no_timing oo IsarCmd.print_drafts));
+ Keyword.diag (Scan.repeat1 Parse.path >> (Toplevel.no_timing oo Isar_Cmd.print_drafts));
val opt_limits =
Scan.option Parse.nat -- Scan.option (Parse.$$$ "," |-- Parse.!!! Parse.nat);
val _ =
Outer_Syntax.improper_command "pr" "print current proof state (if present)" Keyword.diag
- (opt_modes -- opt_limits >> (Toplevel.no_timing oo IsarCmd.pr));
+ (opt_modes -- opt_limits >> (Toplevel.no_timing oo Isar_Cmd.pr));
val _ =
Outer_Syntax.improper_command "disable_pr" "disable printing of toplevel state" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.disable_pr));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.disable_pr));
val _ =
Outer_Syntax.improper_command "enable_pr" "enable printing of toplevel state" Keyword.diag
- (Scan.succeed (Toplevel.no_timing o IsarCmd.enable_pr));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.enable_pr));
val _ =
Outer_Syntax.improper_command "commit" "commit current session to ML database" Keyword.diag
@@ -999,11 +999,11 @@
val _ =
Outer_Syntax.improper_command "quit" "quit Isabelle" Keyword.control
- (Parse.opt_unit >> (Toplevel.no_timing oo K IsarCmd.quit));
+ (Parse.opt_unit >> (Toplevel.no_timing oo K Isar_Cmd.quit));
val _ =
Outer_Syntax.improper_command "exit" "exit Isar loop" Keyword.control
- (Scan.succeed (Toplevel.no_timing o IsarCmd.exit));
+ (Scan.succeed (Toplevel.no_timing o Isar_Cmd.exit));
end;
--- a/src/Pure/Isar/method.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/method.ML Mon May 31 21:06:57 2010 +0200
@@ -271,15 +271,16 @@
val intros_tac = gen_intros_tac ALLGOALS;
val try_intros_tac = gen_intros_tac TRYALL;
+
(* ML tactics *)
-structure TacticData = Proof_Data
+structure ML_Tactic = Proof_Data
(
type T = thm list -> tactic;
fun init _ = undefined;
);
-val set_tactic = TacticData.put;
+val set_tactic = ML_Tactic.put;
fun ml_tactic (txt, pos) ctxt =
let
@@ -287,7 +288,7 @@
(ML_Context.expression pos
"fun tactic (facts: thm list) : tactic"
"Context.map_proof (Method.set_tactic tactic)" (ML_Lex.read pos txt));
- in Context.setmp_thread_data (SOME (Context.Proof ctxt)) (TacticData.get ctxt') end;
+ in Context.setmp_thread_data (SOME (Context.Proof ctxt)) (ML_Tactic.get ctxt') end;
fun tactic txt ctxt = METHOD (ml_tactic txt ctxt);
fun raw_tactic txt ctxt = RAW_METHOD (ml_tactic txt ctxt);
--- a/src/Pure/Isar/object_logic.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/object_logic.ML Mon May 31 21:06:57 2010 +0200
@@ -46,7 +46,7 @@
fun make_data (base_sort, judgment, atomize_rulify) =
Data {base_sort = base_sort, judgment = judgment, atomize_rulify = atomize_rulify};
-structure ObjectLogicData = Theory_Data
+structure Data = Theory_Data
(
type T = data;
val empty = make_data (NONE, NONE, ([], []));
@@ -63,10 +63,10 @@
(Thm.merge_thms (atomize1, atomize2), Thm.merge_thms (rulify1, rulify2)));
);
-fun map_data f = ObjectLogicData.map (fn (Data {base_sort, judgment, atomize_rulify}) =>
+fun map_data f = Data.map (fn (Data {base_sort, judgment, atomize_rulify}) =>
make_data (f (base_sort, judgment, atomize_rulify)));
-fun get_data thy = ObjectLogicData.get thy |> (fn Data args => args);
+fun get_data thy = Data.get thy |> (fn Data args => args);
--- a/src/Pure/Isar/outer_syntax.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/outer_syntax.ML Mon May 31 21:06:57 2010 +0200
@@ -11,7 +11,7 @@
sig
val command: string -> string -> Keyword.T ->
(Toplevel.transition -> Toplevel.transition) parser -> unit
- val markup_command: ThyOutput.markup -> string -> string -> Keyword.T ->
+ val markup_command: Thy_Output.markup -> string -> string -> Keyword.T ->
(Toplevel.transition -> Toplevel.transition) parser -> unit
val improper_command: string -> string -> Keyword.T ->
(Toplevel.transition -> Toplevel.transition) parser -> unit
@@ -43,7 +43,7 @@
datatype command = Command of
{comment: string,
- markup: ThyOutput.markup option,
+ markup: Thy_Output.markup option,
int_only: bool,
parse: (Toplevel.transition -> Toplevel.transition) parser};
@@ -83,7 +83,7 @@
local
val global_commands = Unsynchronized.ref (Symtab.empty: command Symtab.table);
-val global_markups = Unsynchronized.ref ([]: (string * ThyOutput.markup) list);
+val global_markups = Unsynchronized.ref ([]: (string * Thy_Output.markup) list);
fun change_commands f = CRITICAL (fn () =>
(Unsynchronized.change global_commands f;
@@ -235,9 +235,9 @@
fun prepare_span commands span =
let
- val range_pos = Position.encode_range (ThySyntax.span_range span);
- val toks = ThySyntax.span_content span;
- val _ = List.app ThySyntax.report_token toks;
+ val range_pos = Position.encode_range (Thy_Syntax.span_range span);
+ val toks = Thy_Syntax.span_content span;
+ val _ = List.app Thy_Syntax.report_token toks;
in
(case Source.exhaust (toplevel_source false NONE (K commands) (Source.of_list toks)) of
[tr] => (tr, true)
@@ -257,7 +257,7 @@
fun prepare_command pos str =
let val (lexs, commands) = get_syntax () in
- (case ThySyntax.parse_spans lexs pos str of
+ (case Thy_Syntax.parse_spans lexs pos str of
[span] => #1 (prepare_span commands span)
| _ => Toplevel.malformed pos not_singleton)
end;
@@ -271,21 +271,21 @@
val _ = Present.init_theory name;
- val toks = Source.exhausted (ThySyntax.token_source lexs pos (Source.of_list text));
- val spans = Source.exhaust (ThySyntax.span_source toks);
- val _ = List.app ThySyntax.report_span spans;
- val units = Source.exhaust (ThySyntax.unit_source (Source.of_list spans))
+ val toks = Source.exhausted (Thy_Syntax.token_source lexs pos (Source.of_list text));
+ val spans = Source.exhaust (Thy_Syntax.span_source toks);
+ val _ = List.app Thy_Syntax.report_span spans;
+ val units = Source.exhaust (Thy_Syntax.unit_source (Source.of_list spans))
|> maps (prepare_unit commands);
val _ = Present.theory_source name
- (fn () => HTML.html_mode (implode o map ThySyntax.present_span) spans);
+ (fn () => HTML.html_mode (implode o map Thy_Syntax.present_span) spans);
val _ = if time then writeln ("\n**** Starting theory " ^ quote name ^ " ****") else ();
val results = cond_timeit time "" (fn () => Toplevel.excursion units);
val _ = if time then writeln ("**** Finished theory " ^ quote name ^ " ****\n") else ();
fun after_load () =
- ThyOutput.present_thy (#1 lexs) Keyword.command_tags is_markup (Lazy.force results) toks
+ Thy_Output.present_thy (#1 lexs) Keyword.command_tags is_markup (Lazy.force results) toks
|> Buffer.content
|> Present.theory_output name;
in after_load end;
--- a/src/Pure/Isar/proof_context.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/proof_context.ML Mon May 31 21:06:57 2010 +0200
@@ -193,7 +193,7 @@
val local_naming = Name_Space.default_naming |> Name_Space.add_path "local";
-structure ContextData = Proof_Data
+structure Data = Proof_Data
(
type T = ctxt;
fun init thy =
@@ -202,10 +202,10 @@
(Sign.consts_of thy, Sign.consts_of thy), Facts.empty, []);
);
-fun rep_context ctxt = ContextData.get ctxt |> (fn Ctxt args => args);
+fun rep_context ctxt = Data.get ctxt |> (fn Ctxt args => args);
fun map_context f =
- ContextData.map (fn Ctxt {mode, naming, syntax, tsig, consts, facts, cases} =>
+ Data.map (fn Ctxt {mode, naming, syntax, tsig, consts, facts, cases} =>
make_ctxt (f (mode, naming, syntax, tsig, consts, facts, cases)));
fun set_mode mode = map_context (fn (_, naming, syntax, tsig, consts, facts, cases) =>
--- a/src/Pure/Isar/specification.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/specification.ML Mon May 31 21:06:57 2010 +0200
@@ -361,7 +361,7 @@
#2 #> pair ths);
in ((prems, stmt, SOME facts), goal_ctxt) end);
-structure TheoremHook = Generic_Data
+structure Theorem_Hook = Generic_Data
(
type T = ((bool -> Proof.state -> Proof.state) * stamp) list;
val empty = [];
@@ -407,7 +407,7 @@
|> (case facts of NONE => I | SOME ths => Proof.refine_insert ths)
|> tap (fn state => not schematic andalso Proof.schematic_goal state andalso
error "Illegal schematic goal statement")
- |> Library.apply (map (fn (f, _) => f int) (rev (TheoremHook.get (Context.Proof goal_ctxt))))
+ |> Library.apply (map (fn (f, _) => f int) (rev (Theorem_Hook.get (Context.Proof goal_ctxt))))
end;
in
@@ -418,7 +418,7 @@
val schematic_theorem = gen_theorem true (K I) Expression.cert_statement;
val schematic_theorem_cmd = gen_theorem true Attrib.intern_src Expression.read_statement;
-fun add_theorem_hook f = TheoremHook.map (cons (f, stamp ()));
+fun add_theorem_hook f = Theorem_Hook.map (cons (f, stamp ()));
end;
--- a/src/Pure/Isar/toplevel.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Isar/toplevel.ML Mon May 31 21:06:57 2010 +0200
@@ -238,7 +238,7 @@
|> Runtime.debugging
|> Runtime.toplevel_error
(fn exn => priority ("## INTERNAL ERROR ##\n" ^ ML_Compiler.exn_message exn))),
- SimpleThread.attributes interrupts);
+ Simple_Thread.attributes interrupts);
(* node transactions -- maintaining stable checkpoints *)
@@ -638,7 +638,7 @@
fun run_command thy_name tr st =
(case
(case init_of tr of
- SOME name => Exn.capture (fn () => ThyLoad.check_name thy_name name) ()
+ SOME name => Exn.capture (fn () => Thy_Load.check_name thy_name name) ()
| NONE => Exn.Result ()) of
Exn.Result () =>
(case transition true tr st of
--- a/src/Pure/ML/ml_antiquote.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ML/ml_antiquote.ML Mon May 31 21:06:57 2010 +0200
@@ -20,7 +20,7 @@
(* ML names *)
-structure NamesData = Proof_Data
+structure Names = Proof_Data
(
type T = Name.context;
fun init _ = ML_Syntax.reserved;
@@ -28,9 +28,9 @@
fun variant a ctxt =
let
- val names = NamesData.get ctxt;
+ val names = Names.get ctxt;
val ([b], names') = Name.variants [a] names;
- val ctxt' = NamesData.put names' ctxt;
+ val ctxt' = Names.put names' ctxt;
in (b, ctxt') end;
@@ -64,12 +64,12 @@
>> (fn name => ML_Syntax.atomic (ML_Syntax.make_binding name)));
val _ = value "theory"
- (Scan.lift Args.name >> (fn name => "ThyInfo.get_theory " ^ ML_Syntax.print_string name)
+ (Scan.lift Args.name >> (fn name => "Thy_Info.get_theory " ^ ML_Syntax.print_string name)
|| Scan.succeed "ML_Context.the_global_context ()");
val _ = value "theory_ref"
(Scan.lift Args.name >> (fn name =>
- "Theory.check_thy (ThyInfo.theory " ^ ML_Syntax.print_string name ^ ")")
+ "Theory.check_thy (Thy_Info.theory " ^ ML_Syntax.print_string name ^ ")")
|| Scan.succeed "Theory.check_thy (ML_Context.the_global_context ())");
val _ = value "context" (Scan.succeed "ML_Context.the_local_context ()");
--- a/src/Pure/ML/ml_thms.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ML/ml_thms.ML Mon May 31 21:06:57 2010 +0200
@@ -15,15 +15,15 @@
(* auxiliary facts table *)
-structure AuxFacts = Proof_Data
+structure Aux_Facts = Proof_Data
(
type T = thm list Inttab.table;
fun init _ = Inttab.empty;
);
-val put_thms = AuxFacts.map o Inttab.update;
+val put_thms = Aux_Facts.map o Inttab.update;
-fun the_thms ctxt name = the (Inttab.lookup (AuxFacts.get ctxt) name);
+fun the_thms ctxt name = the (Inttab.lookup (Aux_Facts.get ctxt) name);
fun the_thm ctxt name = the_single (the_thms ctxt name);
fun thm_bind kind a i =
--- a/src/Pure/ProofGeneral/pgip_isabelle.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ProofGeneral/pgip_isabelle.ML Mon May 31 21:06:57 2010 +0200
@@ -80,7 +80,7 @@
NONE => (NONE, NONE)
| SOME fname =>
let val path = Path.explode fname in
- case ThyLoad.check_file Path.current path of
+ case Thy_Load.check_file Path.current path of
SOME _ => (SOME (PgipTypes.pgipurl_of_path path), NONE)
| NONE => (NONE, SOME fname)
end);
--- a/src/Pure/ProofGeneral/pgip_parser.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ProofGeneral/pgip_parser.ML Mon May 31 21:06:57 2010 +0200
@@ -91,18 +91,18 @@
fun parse_span span =
let
- val kind = ThySyntax.span_kind span;
- val toks = ThySyntax.span_content span;
- val text = implode (map (Print_Mode.setmp [] ThySyntax.present_token) toks);
+ val kind = Thy_Syntax.span_kind span;
+ val toks = Thy_Syntax.span_content span;
+ val text = implode (map (Print_Mode.setmp [] Thy_Syntax.present_token) toks);
in
(case kind of
- ThySyntax.Command name => parse_command name text
- | ThySyntax.Ignored => [D.Whitespace {text = text}]
- | ThySyntax.Malformed => [D.Unparseable {text = text}])
+ Thy_Syntax.Command name => parse_command name text
+ | Thy_Syntax.Ignored => [D.Whitespace {text = text}]
+ | Thy_Syntax.Malformed => [D.Unparseable {text = text}])
end;
fun pgip_parser pos str =
- maps parse_span (ThySyntax.parse_spans (Keyword.get_lexicons ()) pos str);
+ maps parse_span (Thy_Syntax.parse_spans (Keyword.get_lexicons ()) pos str);
end;
--- a/src/Pure/ProofGeneral/proof_general_emacs.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ProofGeneral/proof_general_emacs.ML Mon May 31 21:06:57 2010 +0200
@@ -113,15 +113,15 @@
local
fun trace_action action name =
- if action = ThyInfo.Update then
- List.app tell_file_loaded (ThyInfo.loaded_files name)
- else if action = ThyInfo.Outdate orelse action = ThyInfo.Remove then
- List.app tell_file_retracted (ThyInfo.loaded_files name)
+ if action = Thy_Info.Update then
+ List.app tell_file_loaded (Thy_Info.loaded_files name)
+ else if action = Thy_Info.Outdate orelse action = Thy_Info.Remove then
+ List.app tell_file_retracted (Thy_Info.loaded_files name)
else ();
in
- fun setup_thy_loader () = ThyInfo.add_hook trace_action;
- fun sync_thy_loader () = List.app (trace_action ThyInfo.Update) (ThyInfo.get_names ());
+ fun setup_thy_loader () = Thy_Info.add_hook trace_action;
+ fun sync_thy_loader () = List.app (trace_action Thy_Info.Update) (Thy_Info.get_names ());
end;
@@ -130,19 +130,19 @@
(*liberal low-level version*)
val thy_name = perhaps (try (unsuffix ".thy")) o List.last o space_explode "/";
-val inform_file_retracted = ThyInfo.if_known_thy ThyInfo.remove_thy o thy_name;
+val inform_file_retracted = Thy_Info.if_known_thy Thy_Info.remove_thy o thy_name;
fun inform_file_processed file =
let
val name = thy_name file;
val _ = name = "" andalso error ("Bad file name: " ^ quote file);
val _ =
- if ThyInfo.check_known_thy name then
+ if Thy_Info.check_known_thy name then
(Isar.>> (Toplevel.commit_exit Position.none);
- ThyInfo.touch_child_thys name; ThyInfo.register_thy name)
+ Thy_Info.touch_child_thys name; Thy_Info.register_thy name)
handle ERROR msg =>
(warning (cat_lines ["Failed to register theory: " ^ quote name, msg]);
- tell_file_retracted (ThyLoad.thy_path name))
+ tell_file_retracted (Thy_Load.thy_path name))
else ();
val _ = Isar.init ();
in () end;
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML Mon May 31 21:06:57 2010 +0200
@@ -198,18 +198,18 @@
operations).
*)
fun trace_action action name =
- if action = ThyInfo.Update then
- List.app (tell_file_loaded true) (ThyInfo.loaded_files name)
- else if action = ThyInfo.Outdate then
- List.app (tell_file_outdated true) (ThyInfo.loaded_files name)
- else if action = ThyInfo.Remove then
- List.app (tell_file_retracted true) (ThyInfo.loaded_files name)
+ if action = Thy_Info.Update then
+ List.app (tell_file_loaded true) (Thy_Info.loaded_files name)
+ else if action = Thy_Info.Outdate then
+ List.app (tell_file_outdated true) (Thy_Info.loaded_files name)
+ else if action = Thy_Info.Remove then
+ List.app (tell_file_retracted true) (Thy_Info.loaded_files name)
else ()
in
- fun setup_thy_loader () = ThyInfo.add_hook trace_action;
- fun sync_thy_loader () = List.app (trace_action ThyInfo.Update) (ThyInfo.get_names ());
+ fun setup_thy_loader () = Thy_Info.add_hook trace_action;
+ fun sync_thy_loader () = List.app (trace_action Thy_Info.Update) (Thy_Info.get_names ());
end;
@@ -217,14 +217,14 @@
val thy_name = Path.implode o #1 o Path.split_ext o Path.base;
-val inform_file_retracted = ThyInfo.if_known_thy ThyInfo.remove_thy o thy_name;
-val inform_file_processed = ThyInfo.if_known_thy ThyInfo.touch_child_thys o thy_name;
+val inform_file_retracted = Thy_Info.if_known_thy Thy_Info.remove_thy o thy_name;
+val inform_file_processed = Thy_Info.if_known_thy Thy_Info.touch_child_thys o thy_name;
fun proper_inform_file_processed path state =
let val name = thy_name path in
- if Toplevel.is_toplevel state andalso ThyInfo.known_thy name then
- (ThyInfo.touch_child_thys name;
- ThyInfo.register_thy name handle ERROR msg =>
+ if Toplevel.is_toplevel state andalso Thy_Info.known_thy name then
+ (Thy_Info.touch_child_thys name;
+ Thy_Info.register_thy name handle ERROR msg =>
(warning (cat_lines [msg, "Failed to register theory: " ^ quote name]);
tell_file_retracted true (Path.base path)))
else raise Toplevel.UNDEF
@@ -522,7 +522,7 @@
local
fun theory_facts name =
- let val thy = ThyInfo.get_theory name
+ let val thy = Thy_Info.get_theory name
in (map PureThy.facts_of (Theory.parents_of thy), PureThy.facts_of thy) end;
fun thms_of_thy name = map fst (theory_facts name |-> Facts.extern_static);
@@ -556,13 +556,13 @@
in
case (thyname,objtype) of
(NONE, NONE) =>
- setids (idtable ObjFile NONE (ThyInfo.get_names())) (*FIXME: uris*)
+ setids (idtable ObjFile NONE (Thy_Info.get_names())) (*FIXME: uris*)
| (NONE, SOME ObjFile) =>
- setids (idtable ObjFile NONE (ThyInfo.get_names())) (*FIXME: uris*)
+ setids (idtable ObjFile NONE (Thy_Info.get_names())) (*FIXME: uris*)
| (SOME fi, SOME ObjFile) =>
setids (idtable ObjTheory (SOME fi) [fi]) (* TODO: check exists *)
| (NONE, SOME ObjTheory) =>
- setids (idtable ObjTheory NONE (ThyInfo.get_names()))
+ setids (idtable ObjTheory NONE (Thy_Info.get_names()))
| (SOME thy, SOME ObjTheory) =>
setids (idtable ObjTheory (SOME thy) (subthys_of_thy thy))
| (SOME thy, SOME ObjTheorem) =>
@@ -571,9 +571,9 @@
(* A large query, but not unreasonable. ~5000 results for HOL.*)
(* Several setids should be allowed, but Eclipse code is currently broken:
List.app (fn thy => setids (idtable ObjTheorem (SOME thy) (subthms_of_thy thy)))
- (ThyInfo.get_names()) *)
+ (Thy_Info.get_names()) *)
setids (idtable ObjTheorem NONE (* this one gives ~7000 for HOL *)
- (maps qualified_thms_of_thy (ThyInfo.get_names())))
+ (maps qualified_thms_of_thy (Thy_Info.get_names())))
| _ => warning ("askids: ignored argument combination")
end
@@ -592,14 +592,14 @@
fun filerefs f =
let val thy = thy_name f
- val filerefs = #uses (ThyLoad.deps_thy (Path.dir f) thy)
+ val filerefs = #uses (Thy_Load.deps_thy (Path.dir f) thy)
in
issue_pgip (Setrefs {url=url, thyname=NONE, objtype=SOME PgipTypes.ObjFile,
name=NONE, idtables=[], fileurls=filerefs})
end
fun thyrefs thy =
- let val thyrefs = #imports (ThyLoad.deps_thy Path.current thy)
+ let val thyrefs = #imports (Thy_Load.deps_thy Path.current thy)
in
issue_pgip (Setrefs {url=url, thyname=thyname, objtype=SOME PgipTypes.ObjTheory,
name=NONE, idtables=[{context=NONE, objtype=PgipTypes.ObjTheory,
@@ -642,7 +642,7 @@
fun splitthy id =
let val comps = Long_Name.explode id
in case comps of
- (thy::(rest as _::_)) => (ThyInfo.get_theory thy, space_implode "." rest)
+ (thy::(rest as _::_)) => (Thy_Info.get_theory thy, space_implode "." rest)
| [plainid] => (topthy(),plainid)
| _ => raise Toplevel.UNDEF (* assert false *)
end
@@ -659,8 +659,8 @@
val string_of_thy = Pretty.string_of o Proof_Display.pretty_full_theory false
in
case (thyname, objtype) of
- (_, ObjTheory) => idvalue [string_of_thy (ThyInfo.get_theory name)]
- | (SOME thy, ObjTheorem) => idvalue (strings_of_thm (ThyInfo.get_theory thy, name))
+ (_, ObjTheory) => idvalue [string_of_thy (Thy_Info.get_theory name)]
+ | (SOME thy, ObjTheorem) => idvalue (strings_of_thm (Thy_Info.get_theory thy, name))
| (NONE, ObjTheorem) => idvalue (strings_of_thm (splitthy name))
| (_, ot) => error ("Cannot show objects of type "^(PgipTypes.name_of_objtype ot))
end
@@ -785,8 +785,8 @@
in
(case (!current_working_dir) of
NONE => ()
- | SOME dir => ThyLoad.del_path dir;
- ThyLoad.add_path newdir;
+ | SOME dir => Thy_Load.del_path dir;
+ Thy_Load.add_path newdir;
current_working_dir := SOME newdir)
end
end
@@ -806,7 +806,7 @@
val filedir = Path.dir filepath
val thy_name = Path.implode o #1 o Path.split_ext o Path.base
val openfile_retract = Output.no_warnings_CRITICAL
- (ThyInfo.if_known_thy ThyInfo.remove_thy) o thy_name;
+ (Thy_Info.if_known_thy Thy_Info.remove_thy) o thy_name;
in
case !currently_open_file of
SOME f => raise PGIP ("<openfile> when a file is already open!\nCurrently open file: " ^
@@ -856,7 +856,7 @@
(* TODO: next should be in thy loader, here just for testing *)
let
val name = thy_name url
- in List.app (tell_file_retracted false) (ThyInfo.loaded_files name) end;
+ in List.app (tell_file_retracted false) (Thy_Info.loaded_files name) end;
inform_file_retracted url)
end
--- a/src/Pure/ROOT.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/ROOT.ML Mon May 31 21:06:57 2010 +0200
@@ -299,18 +299,15 @@
struct
structure OuterKeyword = Keyword;
-
-structure OuterLex =
-struct
- open Token;
- type token = T;
-end;
-
+structure OuterLex = struct open Token type token = T end;
structure OuterParse = Parse;
structure OuterSyntax = Outer_Syntax;
+structure PrintMode = Print_Mode;
structure SpecParse = Parse_Spec;
+structure ThyInfo = Thy_Info;
+structure ThyLoad = Thy_Load;
+structure ThyOutput = Thy_Output;
structure TypeInfer = Type_Infer;
-structure PrintMode = Print_Mode;
end;
--- a/src/Pure/Syntax/mixfix.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/mixfix.ML Mon May 31 21:06:57 2010 +0200
@@ -31,8 +31,8 @@
signature MIXFIX =
sig
include MIXFIX1
- val syn_ext_types: (string * typ * mixfix) list -> SynExt.syn_ext
- val syn_ext_consts: (string -> bool) -> (string * typ * mixfix) list -> SynExt.syn_ext
+ val syn_ext_types: (string * typ * mixfix) list -> Syn_Ext.syn_ext
+ val syn_ext_consts: (string -> bool) -> (string * typ * mixfix) list -> Syn_Ext.syn_ext
end;
structure Mixfix: MIXFIX =
@@ -83,11 +83,11 @@
(* syntax specifications *)
fun mixfix_args NoSyn = 0
- | mixfix_args (Mixfix (sy, _, _)) = SynExt.mfix_args sy
- | mixfix_args (Delimfix sy) = SynExt.mfix_args sy
- | mixfix_args (Infix (sy, _)) = 2 + SynExt.mfix_args sy
- | mixfix_args (Infixl (sy, _)) = 2 + SynExt.mfix_args sy
- | mixfix_args (Infixr (sy, _)) = 2 + SynExt.mfix_args sy
+ | mixfix_args (Mixfix (sy, _, _)) = Syn_Ext.mfix_args sy
+ | mixfix_args (Delimfix sy) = Syn_Ext.mfix_args sy
+ | mixfix_args (Infix (sy, _)) = 2 + Syn_Ext.mfix_args sy
+ | mixfix_args (Infixl (sy, _)) = 2 + Syn_Ext.mfix_args sy
+ | mixfix_args (Infixr (sy, _)) = 2 + Syn_Ext.mfix_args sy
| mixfix_args (Binder _) = 1
| mixfix_args Structure = 0;
@@ -97,29 +97,29 @@
(* syn_ext_types *)
-fun make_type n = replicate n SynExt.typeT ---> SynExt.typeT;
+fun make_type n = replicate n Syn_Ext.typeT ---> Syn_Ext.typeT;
fun syn_ext_types type_decls =
let
- fun mk_infix sy ty t p1 p2 p3 = SynExt.Mfix ("(_ " ^ sy ^ "/ _)", ty, t, [p1, p2], p3);
+ fun mk_infix sy ty t p1 p2 p3 = Syn_Ext.Mfix ("(_ " ^ sy ^ "/ _)", ty, t, [p1, p2], p3);
fun mfix_of (_, _, NoSyn) = NONE
- | mfix_of (t, ty, Mixfix (sy, ps, p)) = SOME (SynExt.Mfix (sy, ty, t, ps, p))
- | mfix_of (t, ty, Delimfix sy) = SOME (SynExt.Mfix (sy, ty, t, [], SynExt.max_pri))
+ | mfix_of (t, ty, Mixfix (sy, ps, p)) = SOME (Syn_Ext.Mfix (sy, ty, t, ps, p))
+ | mfix_of (t, ty, Delimfix sy) = SOME (Syn_Ext.Mfix (sy, ty, t, [], Syn_Ext.max_pri))
| mfix_of (t, ty, Infix (sy, p)) = SOME (mk_infix sy ty t (p + 1) (p + 1) p)
| mfix_of (t, ty, Infixl (sy, p)) = SOME (mk_infix sy ty t p (p + 1) p)
| mfix_of (t, ty, Infixr (sy, p)) = SOME (mk_infix sy ty t (p + 1) p p)
| mfix_of (t, _, _) = error ("Bad mixfix declaration for " ^ quote t); (* FIXME printable t (!?) *)
- fun check_args (_, ty, _) (SOME (mfix as SynExt.Mfix (sy, _, _, _, _))) =
- if length (Term.binder_types ty) = SynExt.mfix_args sy then ()
- else SynExt.err_in_mfix "Bad number of type constructor arguments" mfix
+ fun check_args (_, ty, _) (SOME (mfix as Syn_Ext.Mfix (sy, _, _, _, _))) =
+ if length (Term.binder_types ty) = Syn_Ext.mfix_args sy then ()
+ else Syn_Ext.err_in_mfix "Bad number of type constructor arguments" mfix
| check_args _ NONE = ();
val mfix = map mfix_of type_decls;
val _ = map2 check_args type_decls mfix;
val xconsts = map #1 type_decls;
- in SynExt.syn_ext (map_filter I mfix) xconsts ([], [], [], []) [] ([], []) end;
+ in Syn_Ext.syn_ext (map_filter I mfix) xconsts ([], [], [], []) [] ([], []) end;
(* syn_ext_consts *)
@@ -130,21 +130,21 @@
fun syn_ext_consts is_logtype const_decls =
let
fun mk_infix sy ty c p1 p2 p3 =
- [SynExt.Mfix ("op " ^ sy, ty, c, [], SynExt.max_pri),
- SynExt.Mfix ("(_ " ^ sy ^ "/ _)", ty, c, [p1, p2], p3)];
+ [Syn_Ext.Mfix ("op " ^ sy, ty, c, [], Syn_Ext.max_pri),
+ Syn_Ext.Mfix ("(_ " ^ sy ^ "/ _)", ty, c, [p1, p2], p3)];
fun binder_typ _ (Type ("fun", [Type ("fun", [_, ty2]), ty3])) =
[Type ("idts", []), ty2] ---> ty3
| binder_typ c _ = error ("Bad type of binder: " ^ quote c);
fun mfix_of (_, _, NoSyn) = []
- | mfix_of (c, ty, Mixfix (sy, ps, p)) = [SynExt.Mfix (sy, ty, c, ps, p)]
- | mfix_of (c, ty, Delimfix sy) = [SynExt.Mfix (sy, ty, c, [], SynExt.max_pri)]
+ | mfix_of (c, ty, Mixfix (sy, ps, p)) = [Syn_Ext.Mfix (sy, ty, c, ps, p)]
+ | mfix_of (c, ty, Delimfix sy) = [Syn_Ext.Mfix (sy, ty, c, [], Syn_Ext.max_pri)]
| mfix_of (c, ty, Infix (sy, p)) = mk_infix sy ty c (p + 1) (p + 1) p
| mfix_of (c, ty, Infixl (sy, p)) = mk_infix sy ty c p (p + 1) p
| mfix_of (c, ty, Infixr (sy, p)) = mk_infix sy ty c (p + 1) p p
| mfix_of (c, ty, Binder (sy, p, q)) =
- [SynExt.Mfix ("(3" ^ sy ^ "_./ _)", binder_typ c ty, (binder_name c), [0, p], q)]
+ [Syn_Ext.Mfix ("(3" ^ sy ^ "_./ _)", binder_typ c ty, (binder_name c), [0, p], q)]
| mfix_of (c, _, _) = error ("Bad mixfix declaration for " ^ quote c); (* FIXME printable c (!?) *)
fun binder (c, _, Binder _) = SOME (binder_name c, c)
@@ -154,12 +154,12 @@
val xconsts = map #1 const_decls;
val binders = map_filter binder const_decls;
val binder_trs = binders
- |> map (SynExt.stamp_trfun binder_stamp o apsnd K o SynTrans.mk_binder_tr);
+ |> map (Syn_Ext.stamp_trfun binder_stamp o apsnd K o Syn_Trans.mk_binder_tr);
val binder_trs' = binders
- |> map (SynExt.stamp_trfun binder_stamp o
- apsnd (K o SynTrans.non_typed_tr') o SynTrans.mk_binder_tr' o swap);
+ |> map (Syn_Ext.stamp_trfun binder_stamp o
+ apsnd (K o Syn_Trans.non_typed_tr') o Syn_Trans.mk_binder_tr' o swap);
in
- SynExt.syn_ext' true is_logtype
+ Syn_Ext.syn_ext' true is_logtype
mfix xconsts ([], binder_trs, binder_trs', []) [] ([], [])
end;
--- a/src/Pure/Syntax/parser.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/parser.ML Mon May 31 21:06:57 2010 +0200
@@ -8,8 +8,8 @@
sig
type gram
val empty_gram: gram
- val extend_gram: gram -> SynExt.xprod list -> gram
- val make_gram: SynExt.xprod list -> gram
+ val extend_gram: gram -> Syn_Ext.xprod list -> gram
+ val make_gram: Syn_Ext.xprod list -> gram
val merge_grams: gram -> gram -> gram
val pretty_gram: gram -> Pretty.T list
datatype parsetree =
@@ -23,7 +23,7 @@
structure Parser: PARSER =
struct
-open Lexicon SynExt;
+open Lexicon Syn_Ext;
(** datatype gram **)
--- a/src/Pure/Syntax/printer.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/printer.ML Mon May 31 21:06:57 2010 +0200
@@ -26,8 +26,8 @@
(string -> (Proof.context -> bool -> typ -> term list -> term) list) -> term -> Ast.ast
type prtabs
val empty_prtabs: prtabs
- val update_prtabs: string -> SynExt.xprod list -> prtabs -> prtabs
- val remove_prtabs: string -> SynExt.xprod list -> prtabs -> prtabs
+ val update_prtabs: string -> Syn_Ext.xprod list -> prtabs -> prtabs
+ val remove_prtabs: string -> Syn_Ext.xprod list -> prtabs -> prtabs
val merge_prtabs: prtabs -> prtabs -> prtabs
val pretty_term_ast: {extern_class: string -> xstring, extern_type: string -> xstring,
extern_const: string -> xstring} -> Proof.context -> bool -> prtabs ->
@@ -101,8 +101,8 @@
handle Match => Ast.mk_appl (Ast.Constant a) (map ast_of args);
in ast_of tm end;
-fun sort_to_ast ctxt trf S = ast_of_termT ctxt trf (TypeExt.term_of_sort S);
-fun typ_to_ast ctxt trf T = ast_of_termT ctxt trf (TypeExt.term_of_typ (! show_sorts) T);
+fun sort_to_ast ctxt trf S = ast_of_termT ctxt trf (Type_Ext.term_of_sort S);
+fun typ_to_ast ctxt trf T = ast_of_termT ctxt trf (Type_Ext.term_of_typ (! show_sorts) T);
@@ -114,7 +114,7 @@
val {structs, fixes} = idents;
fun mark_atoms ((t as Const (c, T)) $ u) =
- if member (op =) SynExt.standard_token_markers c
+ if member (op =) Syn_Ext.standard_token_markers c
then t $ u else mark_atoms t $ mark_atoms u
| mark_atoms (t $ u) = mark_atoms t $ mark_atoms u
| mark_atoms (Abs (x, T, t)) = Abs (x, T, mark_atoms t)
@@ -130,7 +130,7 @@
else Lexicon.const "_free" $ t
end
| mark_atoms (t as Var (xi, T)) =
- if xi = SynExt.dddot_indexname then Const ("_DDDOT", T)
+ if xi = Syn_Ext.dddot_indexname then Const ("_DDDOT", T)
else Lexicon.const "_var" $ t
| mark_atoms a = a;
@@ -155,7 +155,7 @@
fun ast_of tm =
(case strip_comb tm of
- (t as Abs _, ts) => Ast.mk_appl (ast_of (SynTrans.abs_tr' t)) (map ast_of ts)
+ (t as Abs _, ts) => Ast.mk_appl (ast_of (Syn_Trans.abs_tr' t)) (map ast_of ts)
| ((c as Const ("_free", _)), Free (x, T) :: ts) =>
Ast.mk_appl (constrain (c $ Lexicon.free x) T) (map ast_of ts)
| ((c as Const ("_var", _)), Var (xi, T) :: ts) =>
@@ -176,12 +176,12 @@
and constrain t T =
if show_types andalso T <> dummyT then
- Ast.Appl [Ast.Constant SynExt.constrainC, simple_ast_of t,
- ast_of_termT ctxt trf (TypeExt.term_of_typ show_sorts T)]
+ Ast.Appl [Ast.Constant Syn_Ext.constrainC, simple_ast_of t,
+ ast_of_termT ctxt trf (Type_Ext.term_of_typ show_sorts T)]
else simple_ast_of t;
in
tm
- |> SynTrans.prop_tr'
+ |> Syn_Trans.prop_tr'
|> show_types ? (#1 o prune_typs o rpair [])
|> mark_atoms
|> ast_of
@@ -211,29 +211,29 @@
(* xprod_to_fmt *)
-fun xprod_to_fmt (SynExt.XProd (_, _, "", _)) = NONE
- | xprod_to_fmt (SynExt.XProd (_, xsymbs, const, pri)) =
+fun xprod_to_fmt (Syn_Ext.XProd (_, _, "", _)) = NONE
+ | xprod_to_fmt (Syn_Ext.XProd (_, xsymbs, const, pri)) =
let
fun arg (s, p) =
(if s = "type" then TypArg else Arg)
- (if Lexicon.is_terminal s then SynExt.max_pri else p);
+ (if Lexicon.is_terminal s then Syn_Ext.max_pri else p);
- fun xsyms_to_syms (SynExt.Delim s :: xsyms) =
+ fun xsyms_to_syms (Syn_Ext.Delim s :: xsyms) =
apfst (cons (String s)) (xsyms_to_syms xsyms)
- | xsyms_to_syms (SynExt.Argument s_p :: xsyms) =
+ | xsyms_to_syms (Syn_Ext.Argument s_p :: xsyms) =
apfst (cons (arg s_p)) (xsyms_to_syms xsyms)
- | xsyms_to_syms (SynExt.Space s :: xsyms) =
+ | xsyms_to_syms (Syn_Ext.Space s :: xsyms) =
apfst (cons (Space s)) (xsyms_to_syms xsyms)
- | xsyms_to_syms (SynExt.Bg i :: xsyms) =
+ | xsyms_to_syms (Syn_Ext.Bg i :: xsyms) =
let
val (bsyms, xsyms') = xsyms_to_syms xsyms;
val (syms, xsyms'') = xsyms_to_syms xsyms';
in
(Block (i, bsyms) :: syms, xsyms'')
end
- | xsyms_to_syms (SynExt.Brk i :: xsyms) =
+ | xsyms_to_syms (Syn_Ext.Brk i :: xsyms) =
apfst (cons (Break i)) (xsyms_to_syms xsyms)
- | xsyms_to_syms (SynExt.En :: xsyms) = ([], xsyms)
+ | xsyms_to_syms (Syn_Ext.En :: xsyms) = ([], xsyms)
| xsyms_to_syms [] = ([], []);
fun nargs (Arg _ :: syms) = nargs syms + 1
@@ -263,7 +263,7 @@
fun remove_prtabs mode xprods prtabs =
let
val tab = mode_tab prtabs mode;
- val fmts = map_filter (fn xprod as SynExt.XProd (_, _, c, _) =>
+ val fmts = map_filter (fn xprod as Syn_Ext.XProd (_, _, c, _) =>
if null (Symtab.lookup_list tab c) then NONE
else xprod_to_fmt xprod) xprods;
val tab' = fold (Symtab.remove_list (op =)) fmts tab;
@@ -290,15 +290,15 @@
fun token_trans a x =
(case tokentrf a of
NONE =>
- if member (op =) SynExt.standard_token_classes a
+ if member (op =) Syn_Ext.standard_token_classes a
then SOME (Pretty.str x) else NONE
| SOME f => SOME (f ctxt x));
(*default applications: prefix / postfix*)
val appT =
- if type_mode then TypeExt.tappl_ast_tr'
- else if curried then SynTrans.applC_ast_tr'
- else SynTrans.appl_ast_tr';
+ if type_mode then Type_Ext.tappl_ast_tr'
+ else if curried then Syn_Trans.applC_ast_tr'
+ else Syn_Trans.appl_ast_tr';
fun synT _ ([], args) = ([], args)
| synT markup (Arg p :: symbs, t :: args) =
@@ -333,7 +333,7 @@
and parT markup (pr, args, p, p': int) = #1 (synT markup
(if p > p' orelse
- (! show_brackets andalso p' <> SynExt.max_pri andalso not (is_chain pr))
+ (! show_brackets andalso p' <> Syn_Ext.max_pri andalso not (is_chain pr))
then [Block (1, Space "(" :: pr @ [Space ")"])]
else pr, args))
--- a/src/Pure/Syntax/syn_ext.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/syn_ext.ML Mon May 31 21:06:57 2010 +0200
@@ -41,7 +41,7 @@
val chain_pri: int
val delims_of: xprod list -> string list list
datatype syn_ext =
- SynExt of {
+ Syn_Ext of {
xprods: xprod list,
consts: string list,
prmodes: string list,
@@ -86,7 +86,7 @@
val pure_ext: syn_ext
end;
-structure SynExt: SYN_EXT =
+structure Syn_Ext: SYN_EXT =
struct
@@ -326,7 +326,7 @@
(** datatype syn_ext **)
datatype syn_ext =
- SynExt of {
+ Syn_Ext of {
xprods: xprod list,
consts: string list,
prmodes: string list,
@@ -352,7 +352,7 @@
val mfix_consts =
distinct (op =) (map (fn Mfix x => #3 x) mfixes @ map (fn XProd x => #3 x) xprods);
in
- SynExt {
+ Syn_Ext {
xprods = xprods,
consts = union (op =) consts mfix_consts,
prmodes = distinct (op =) (map (fn (m, _, _) => m) tokentrfuns),
--- a/src/Pure/Syntax/syn_trans.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/syn_trans.ML Mon May 31 21:06:57 2010 +0200
@@ -60,7 +60,7 @@
(string -> (Proof.context -> term list -> term) option) -> Ast.ast list -> term list
end;
-structure SynTrans: SYN_TRANS =
+structure Syn_Trans: SYN_TRANS =
struct
@@ -166,7 +166,7 @@
(* dddot *)
-fun dddot_tr (*"_DDDOT"*) ts = Term.list_comb (Lexicon.var SynExt.dddot_indexname, ts);
+fun dddot_tr (*"_DDDOT"*) ts = Term.list_comb (Lexicon.var Syn_Ext.dddot_indexname, ts);
(* quote / antiquote *)
@@ -354,9 +354,9 @@
(* type propositions *)
fun type_prop_tr' _ (*"_type_prop"*) T [Const ("\\<^const>Pure.sort_constraint", _)] =
- Lexicon.const "_sort_constraint" $ TypeExt.term_of_typ true T
+ Lexicon.const "_sort_constraint" $ Type_Ext.term_of_typ true T
| type_prop_tr' show_sorts (*"_type_prop"*) T [t] =
- Lexicon.const "_ofclass" $ TypeExt.term_of_typ show_sorts T $ t
+ Lexicon.const "_ofclass" $ Type_Ext.term_of_typ show_sorts T $ t
| type_prop_tr' _ (*"_type_prop"*) T ts = raise TYPE ("type_prop_tr'", [T], ts);
@@ -394,7 +394,7 @@
(* meta implication *)
fun impl_ast_tr' (*"==>"*) asts =
- if TypeExt.no_brackets () then raise Match
+ if Type_Ext.no_brackets () then raise Match
else
(case Ast.unfold_ast_p "\\<^const>==>" (Ast.Appl (Ast.Constant "\\<^const>==>" :: asts)) of
(prems as _ :: _ :: _, concl) =>
@@ -408,14 +408,14 @@
(* type reflection *)
fun type_tr' show_sorts (*"TYPE"*) (Type ("itself", [T])) ts =
- Term.list_comb (Lexicon.const "_TYPE" $ TypeExt.term_of_typ show_sorts T, ts)
+ Term.list_comb (Lexicon.const "_TYPE" $ Type_Ext.term_of_typ show_sorts T, ts)
| type_tr' _ _ _ = raise Match;
(* type constraints *)
fun type_constraint_tr' show_sorts (*"_type_constraint_"*) (Type ("fun", [T, _])) (t :: ts) =
- Term.list_comb (Lexicon.const SynExt.constrainC $ t $ TypeExt.term_of_typ show_sorts T, ts)
+ Term.list_comb (Lexicon.const Syn_Ext.constrainC $ t $ Type_Ext.term_of_typ show_sorts T, ts)
| type_constraint_tr' _ _ _ = raise Match;
--- a/src/Pure/Syntax/syntax.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/syntax.ML Mon May 31 21:06:57 2010 +0200
@@ -168,21 +168,21 @@
fun err_dup_trfun name c =
error ("More than one " ^ name ^ " for " ^ quote c);
-fun remove_trtab trfuns = fold (Symtab.remove SynExt.eq_trfun) trfuns;
+fun remove_trtab trfuns = fold (Symtab.remove Syn_Ext.eq_trfun) trfuns;
fun update_trtab name trfuns tab = fold Symtab.update_new trfuns (remove_trtab trfuns tab)
handle Symtab.DUP c => err_dup_trfun name c;
-fun merge_trtabs name tab1 tab2 = Symtab.merge SynExt.eq_trfun (tab1, tab2)
+fun merge_trtabs name tab1 tab2 = Symtab.merge Syn_Ext.eq_trfun (tab1, tab2)
handle Symtab.DUP c => err_dup_trfun name c;
(* print (ast) translations *)
fun lookup_tr' tab c = map fst (Symtab.lookup_list tab c);
-fun update_tr'tab trfuns = fold_rev (Symtab.update_list SynExt.eq_trfun) trfuns;
-fun remove_tr'tab trfuns = fold (Symtab.remove_list SynExt.eq_trfun) trfuns;
-fun merge_tr'tabs tab1 tab2 = Symtab.merge_list SynExt.eq_trfun (tab1, tab2);
+fun update_tr'tab trfuns = fold_rev (Symtab.update_list Syn_Ext.eq_trfun) trfuns;
+fun remove_tr'tab trfuns = fold (Symtab.remove_list Syn_Ext.eq_trfun) trfuns;
+fun merge_tr'tabs tab1 tab2 = Symtab.merge_list Syn_Ext.eq_trfun (tab1, tab2);
@@ -239,7 +239,7 @@
datatype syntax =
Syntax of {
- input: SynExt.xprod list,
+ input: Syn_Ext.xprod list,
lexicon: Scan.lexicon,
gram: Parser.gram,
consts: string list,
@@ -287,7 +287,7 @@
val {input, lexicon, gram, consts = consts1, prmodes = prmodes1,
parse_ast_trtab, parse_ruletab, parse_trtab, print_trtab, print_ruletab,
print_ast_trtab, tokentrtab, prtabs} = tabs;
- val SynExt.SynExt {xprods, consts = consts2, prmodes = prmodes2,
+ val Syn_Ext.Syn_Ext {xprods, consts = consts2, prmodes = prmodes2,
parse_ast_translation, parse_rules, parse_translation, print_translation, print_rules,
print_ast_translation, token_translation} = syn_ext;
val new_xprods =
@@ -296,7 +296,7 @@
in
Syntax
({input = new_xprods @ input,
- lexicon = fold Scan.extend_lexicon (SynExt.delims_of new_xprods) lexicon,
+ lexicon = fold Scan.extend_lexicon (Syn_Ext.delims_of new_xprods) lexicon,
gram = Parser.extend_gram gram new_xprods,
consts = Library.merge (op =) (consts1, filter_out Lexicon.is_marked consts2),
prmodes = insert (op =) mode (Library.merge (op =) (prmodes1, prmodes2)),
@@ -316,7 +316,7 @@
fun remove_syntax (mode, inout) syn_ext (Syntax (tabs, _)) =
let
- val SynExt.SynExt {xprods, consts = _, prmodes = _,
+ val Syn_Ext.Syn_Ext {xprods, consts = _, prmodes = _,
parse_ast_translation, parse_rules, parse_translation, print_translation, print_rules,
print_ast_translation, token_translation = _} = syn_ext;
val {input, lexicon, gram, consts, prmodes,
@@ -328,7 +328,7 @@
in
Syntax
({input = input',
- lexicon = if changed then Scan.make_lexicon (SynExt.delims_of input') else lexicon,
+ lexicon = if changed then Scan.make_lexicon (Syn_Ext.delims_of input') else lexicon,
gram = if changed then Parser.make_gram input' else gram,
consts = consts,
prmodes = prmodes,
@@ -381,13 +381,13 @@
val basic_syntax =
empty_syntax
- |> update_syntax mode_default TypeExt.type_ext
- |> update_syntax mode_default SynExt.pure_ext;
+ |> update_syntax mode_default Type_Ext.type_ext
+ |> update_syntax mode_default Syn_Ext.pure_ext;
val basic_nonterms =
- (Lexicon.terminals @ [SynExt.logic, "type", "types", "sort", "classes",
- SynExt.args, SynExt.cargs, "pttrn", "pttrns", "idt", "idts", "aprop",
- "asms", SynExt.any, SynExt.sprop, "num_const", "float_const",
+ (Lexicon.terminals @ [Syn_Ext.logic, "type", "types", "sort", "classes",
+ Syn_Ext.args, Syn_Ext.cargs, "pttrn", "pttrns", "idt", "idts", "aprop",
+ "asms", Syn_Ext.any, Syn_Ext.sprop, "num_const", "float_const",
"index", "struct"]);
@@ -481,13 +481,13 @@
val toks = Lexicon.tokenize lexicon xids syms;
val _ = List.app Lexicon.report_token toks;
- val root' = if root <> "prop" andalso is_logtype root then SynExt.logic else root;
+ val root' = if root <> "prop" andalso is_logtype root then Syn_Ext.logic else root;
val pts = Parser.parse gram root' (filter Lexicon.is_proper toks);
val len = length pts;
val limit = ! ambiguity_limit;
fun show_pt pt =
- Pretty.string_of (Ast.pretty_ast (hd (SynTrans.pts_to_asts ctxt (K NONE) [pt])));
+ Pretty.string_of (Ast.pretty_ast (hd (Syn_Trans.pts_to_asts ctxt (K NONE) [pt])));
in
if len <= ! ambiguity_level then ()
else if ! ambiguity_is_error then error (ambiguity_msg pos)
@@ -497,7 +497,7 @@
"\nproduces " ^ string_of_int len ^ " parse trees" ^
(if len <= limit then "" else " (" ^ string_of_int limit ^ " displayed)") ^ ":") ::
map show_pt (take limit pts))));
- SynTrans.pts_to_asts ctxt (lookup_tr parse_ast_trtab) pts
+ Syn_Trans.pts_to_asts ctxt (lookup_tr parse_ast_trtab) pts
end;
@@ -506,9 +506,9 @@
fun read ctxt is_logtype (syn as Syntax (tabs, _)) ty inp =
let
val {parse_ruletab, parse_trtab, ...} = tabs;
- val asts = read_asts ctxt is_logtype syn false (SynExt.typ_to_nonterm ty) inp;
+ val asts = read_asts ctxt is_logtype syn false (Syn_Ext.typ_to_nonterm ty) inp;
in
- SynTrans.asts_to_terms ctxt (lookup_tr parse_trtab)
+ Syn_Trans.asts_to_terms ctxt (lookup_tr parse_trtab)
(map (Ast.normalize_ast (Symtab.lookup_list parse_ruletab)) asts)
end;
@@ -547,23 +547,23 @@
fun standard_parse_term pp check get_sort map_const map_free ctxt is_logtype syn ty (syms, pos) =
read ctxt is_logtype syn ty (syms, pos)
- |> map (TypeExt.decode_term get_sort map_const map_free)
+ |> map (Type_Ext.decode_term get_sort map_const map_free)
|> disambig (Printer.pp_show_brackets pp) check;
(* read types *)
fun standard_parse_typ ctxt syn get_sort (syms, pos) =
- (case read ctxt (K false) syn SynExt.typeT (syms, pos) of
- [t] => TypeExt.typ_of_term (get_sort (TypeExt.term_sorts t)) t
+ (case read ctxt (K false) syn Syn_Ext.typeT (syms, pos) of
+ [t] => Type_Ext.typ_of_term (get_sort (Type_Ext.term_sorts t)) t
| _ => error (ambiguity_msg pos));
(* read sorts *)
fun standard_parse_sort ctxt syn (syms, pos) =
- (case read ctxt (K false) syn TypeExt.sortT (syms, pos) of
- [t] => TypeExt.sort_of_term t
+ (case read ctxt (K false) syn Type_Ext.sortT (syms, pos) of
+ [t] => Type_Ext.sort_of_term t
| _ => error (ambiguity_msg pos));
@@ -666,9 +666,9 @@
fun ext_syntax f decls = update_syntax mode_default (f decls);
-val update_trfuns = ext_syntax SynExt.syn_ext_trfuns;
-val update_advanced_trfuns = ext_syntax SynExt.syn_ext_advanced_trfuns;
-val extend_tokentrfuns = ext_syntax SynExt.syn_ext_tokentrfuns;
+val update_trfuns = ext_syntax Syn_Ext.syn_ext_trfuns;
+val update_advanced_trfuns = ext_syntax Syn_Ext.syn_ext_advanced_trfuns;
+val extend_tokentrfuns = ext_syntax Syn_Ext.syn_ext_tokentrfuns;
fun update_type_gram add prmode decls =
(if add then update_syntax else remove_syntax) prmode (Mixfix.syn_ext_types decls);
@@ -677,13 +677,13 @@
(if add then update_syntax else remove_syntax) prmode (Mixfix.syn_ext_consts is_logtype decls);
fun update_trrules ctxt is_logtype syn =
- ext_syntax SynExt.syn_ext_rules o read_rules ctxt is_logtype syn;
+ ext_syntax Syn_Ext.syn_ext_rules o read_rules ctxt is_logtype syn;
fun remove_trrules ctxt is_logtype syn =
- remove_syntax mode_default o SynExt.syn_ext_rules o read_rules ctxt is_logtype syn;
+ remove_syntax mode_default o Syn_Ext.syn_ext_rules o read_rules ctxt is_logtype syn;
-val update_trrules_i = ext_syntax SynExt.syn_ext_rules o cert_rules;
-val remove_trrules_i = remove_syntax mode_default o SynExt.syn_ext_rules o cert_rules;
+val update_trrules_i = ext_syntax Syn_Ext.syn_ext_rules o cert_rules;
+val remove_trrules_i = remove_syntax mode_default o Syn_Ext.syn_ext_rules o cert_rules;
@@ -798,7 +798,7 @@
val check_typs = gen_check fst false;
val check_terms = gen_check snd false;
-fun check_props ctxt = map (TypeExt.type_constraint propT) #> check_terms ctxt;
+fun check_props ctxt = map (Type_Ext.type_constraint propT) #> check_terms ctxt;
val check_typ = singleton o check_typs;
val check_term = singleton o check_terms;
@@ -905,7 +905,7 @@
(*export parts of internal Syntax structures*)
-open Lexicon SynExt Ast Parser TypeExt SynTrans Mixfix Printer;
+open Lexicon Syn_Ext Ast Parser Type_Ext Syn_Trans Mixfix Printer;
end;
@@ -913,9 +913,9 @@
open Basic_Syntax;
forget_structure "Ast";
-forget_structure "SynExt";
+forget_structure "Syn_Ext";
forget_structure "Parser";
-forget_structure "TypeExt";
-forget_structure "SynTrans";
+forget_structure "Type_Ext";
+forget_structure "Syn_Trans";
forget_structure "Mixfix";
forget_structure "Printer";
--- a/src/Pure/Syntax/type_ext.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Syntax/type_ext.ML Mon May 31 21:06:57 2010 +0200
@@ -27,10 +27,10 @@
val term_of_sort: sort -> term
val tappl_ast_tr': Ast.ast * Ast.ast list -> Ast.ast
val sortT: typ
- val type_ext: SynExt.syn_ext
+ val type_ext: Syn_Ext.syn_ext
end;
-structure TypeExt: TYPE_EXT =
+structure Type_Ext: TYPE_EXT =
struct
(** input utils **)
@@ -242,7 +242,7 @@
val classesT = Type ("classes", []);
val typesT = Type ("types", []);
-local open Lexicon SynExt in
+local open Lexicon Syn_Ext in
val type_ext = syn_ext' false (K false)
[Mfix ("_", tidT --> typeT, "", [], max_pri),
@@ -271,7 +271,7 @@
Mfix ("'(_')", typeT --> typeT, "", [0], max_pri),
Mfix ("'_", typeT, "\\<^type>dummy", [], max_pri)]
["_type_prop"]
- ([], [], [], map SynExt.mk_trfun [("\\<^type>fun", K fun_ast_tr')])
+ ([], [], [], map Syn_Ext.mk_trfun [("\\<^type>fun", K fun_ast_tr')])
[]
([], []);
--- a/src/Pure/System/isabelle_process.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/System/isabelle_process.ML Mon May 31 21:06:57 2010 +0200
@@ -67,9 +67,9 @@
val path = File.platform_path (Path.explode out);
val out_stream = TextIO.openOut path; (*fifo blocks until reader is ready*)
val _ = OS.FileSys.remove path; (*prevent alien access, indicate writer is ready*)
- val _ = SimpleThread.fork false (auto_flush out_stream);
- val _ = SimpleThread.fork false (auto_flush TextIO.stdOut);
- val _ = SimpleThread.fork false (auto_flush TextIO.stdErr);
+ val _ = Simple_Thread.fork false (auto_flush out_stream);
+ val _ = Simple_Thread.fork false (auto_flush TextIO.stdOut);
+ val _ = Simple_Thread.fork false (auto_flush TextIO.stdErr);
in
Output.status_fn := standard_message out_stream "B";
Output.writeln_fn := standard_message out_stream "C";
--- a/src/Pure/System/isar.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/System/isar.ML Mon May 31 21:06:57 2010 +0200
@@ -72,7 +72,7 @@
fun find_and_undo _ [] = error "Undo history exhausted"
| find_and_undo which ((prev, tr) :: hist) =
- ((case Toplevel.init_of tr of SOME name => ThyInfo.kill_thy name | NONE => ());
+ ((case Toplevel.init_of tr of SOME name => Thy_Info.kill_thy name | NONE => ());
if which (Toplevel.name_of tr) then (prev, hist) else find_and_undo which hist);
in
--- a/src/Pure/System/session.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/System/session.ML Mon May 31 21:06:57 2010 +0200
@@ -73,7 +73,7 @@
(* finish *)
fun finish () =
- (ThyInfo.finish ();
+ (Thy_Info.finish ();
Present.finish ();
Future.shutdown ();
session_finished := true);
@@ -97,7 +97,7 @@
((fn () =>
(init reset parent name;
Present.init build info doc doc_graph doc_versions (path ()) name
- (dumping dump) (get_rpath rpath) verbose (map ThyInfo.get_theory (ThyInfo.get_names ()));
+ (dumping dump) (get_rpath rpath) verbose (map Thy_Info.get_theory (Thy_Info.get_names ()));
if timing then
let
val start = start_timing ();
--- a/src/Pure/Thy/present.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Thy/present.ML Mon May 31 21:06:57 2010 +0200
@@ -461,7 +461,7 @@
val parent_specs = map (parent_link remote_path path) parents;
fun prep_file (raw_path, loadit) =
- (case ThyLoad.check_ml dir raw_path of
+ (case Thy_Load.check_ml dir raw_path of
SOME (path, _) =>
let
val base = Path.base path;
--- a/src/Pure/Thy/thm_deps.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Thy/thm_deps.ML Mon May 31 21:06:57 2010 +0200
@@ -24,7 +24,7 @@
val session =
(case prefix of
a :: _ =>
- (case ThyInfo.lookup_theory a of
+ (case Thy_Info.lookup_theory a of
SOME thy =>
(case Present.session_name thy of
"" => []
--- a/src/Pure/Thy/thy_info.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Thy/thy_info.ML Mon May 31 21:06:57 2010 +0200
@@ -41,7 +41,7 @@
val finish: unit -> unit
end;
-structure ThyInfo: THY_INFO =
+structure Thy_Info: THY_INFO =
struct
(** theory loader actions and hooks **)
@@ -292,12 +292,12 @@
fun run_file path =
(case Option.map (Context.theory_name o Context.the_theory) (Context.thread_data ()) of
- NONE => (ThyLoad.load_ml Path.current path; ())
+ NONE => (Thy_Load.load_ml Path.current path; ())
| SOME name =>
(case lookup_deps name of
SOME deps =>
- change_deps name (provide path name (ThyLoad.load_ml (master_dir' deps) path))
- | NONE => (ThyLoad.load_ml Path.current path; ())));
+ change_deps name (provide path name (Thy_Load.load_ml (master_dir' deps) path))
+ | NONE => (Thy_Load.load_ml Path.current path; ())));
in
@@ -306,7 +306,7 @@
val dir = master_directory name;
val _ = check_unfinished error name;
in
- (case ThyLoad.check_file dir path of
+ (case Thy_Load.check_file dir path of
SOME path_info => change_deps name (provide path name path_info)
| NONE => error ("Could not find file " ^ quote (Path.implode path)))
end;
@@ -429,7 +429,7 @@
let val info' =
(case info of NONE => NONE
| SOME (_, id) =>
- (case ThyLoad.check_ml (master_dir master) src_path of NONE => NONE
+ (case Thy_Load.check_ml (master_dir master) src_path of NONE => NONE
| SOME (path', id') => if id <> id' then NONE else SOME (path', id')))
in (src_path, info') end;
@@ -437,16 +437,16 @@
(case lookup_deps name of
SOME NONE => (true, NONE, get_parents name)
| NONE =>
- let val {master, text, imports = parents, uses = files} = ThyLoad.deps_thy dir name
+ let val {master, text, imports = parents, uses = files} = Thy_Load.deps_thy dir name
in (false, init_deps (SOME master) text parents files, parents) end
| SOME (SOME {update_time, master, text, parents, files}) =>
let
- val (thy_path, thy_id) = ThyLoad.check_thy dir name;
+ val (thy_path, thy_id) = Thy_Load.check_thy dir name;
val master' = SOME (thy_path, thy_id);
in
if Option.map #2 master <> SOME thy_id then
let val {text = text', imports = parents', uses = files', ...} =
- ThyLoad.deps_thy dir name;
+ Thy_Load.deps_thy dir name;
in (false, init_deps master' text' parents' files', parents') end
else
let
@@ -571,7 +571,7 @@
val _ = map get_theory (get_parents name);
val _ = check_unfinished error name;
val _ = touch_thy name;
- val master = #master (ThyLoad.deps_thy Path.current name);
+ val master = #master (Thy_Load.deps_thy Path.current name);
val upd_time = #2 (Management_Data.get thy);
in
CRITICAL (fn () =>
--- a/src/Pure/Thy/thy_load.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Thy/thy_load.ML Mon May 31 21:06:57 2010 +0200
@@ -31,10 +31,9 @@
val load_ml: Path.T -> Path.T -> Path.T * File.ident
end;
-structure ThyLoad: THY_LOAD =
+structure Thy_Load: THY_LOAD =
struct
-
(* maintain load path *)
local val load_path = Unsynchronized.ref [Path.current] in
@@ -132,5 +131,5 @@
end;
-structure Basic_Thy_Load: BASIC_THY_LOAD = ThyLoad;
+structure Basic_Thy_Load: BASIC_THY_LOAD = Thy_Load;
open Basic_Thy_Load;
--- a/src/Pure/Thy/thy_output.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Thy/thy_output.ML Mon May 31 21:06:57 2010 +0200
@@ -33,7 +33,7 @@
val output: Pretty.T list -> string
end;
-structure ThyOutput: THY_OUTPUT =
+structure Thy_Output: THY_OUTPUT =
struct
(** global options **)
--- a/src/Pure/Thy/thy_syntax.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/Thy/thy_syntax.ML Mon May 31 21:06:57 2010 +0200
@@ -25,7 +25,7 @@
(span * span list * bool, (span, 'a) Source.source) Source.source
end;
-structure ThySyntax: THY_SYNTAX =
+structure Thy_Syntax: THY_SYNTAX =
struct
(** tokens **)
--- a/src/Pure/context.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/context.ML Mon May 31 21:06:57 2010 +0200
@@ -325,7 +325,7 @@
local
val lock = Mutex.mutex ();
in
- fun SYNCHRONIZED e = SimpleThread.synchronized "theory" lock e;
+ fun SYNCHRONIZED e = Simple_Thread.synchronized "theory" lock e;
end;
fun create_thy self draft ids data ancestry history =
--- a/src/Pure/morphism.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/morphism.ML Mon May 31 21:06:57 2010 +0200
@@ -79,5 +79,5 @@
end;
-structure BasicMorphism: BASIC_MORPHISM = Morphism;
-open BasicMorphism;
+structure Basic_Morphism: BASIC_MORPHISM = Morphism;
+open Basic_Morphism;
--- a/src/Pure/proofterm.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/proofterm.ML Mon May 31 21:06:57 2010 +0200
@@ -1328,7 +1328,7 @@
(**** theory data ****)
-structure ProofData = Theory_Data
+structure Data = Theory_Data
(
type T =
(stamp * (proof * proof)) list *
@@ -1341,11 +1341,11 @@
AList.merge (op =) (K true) (procs1, procs2));
);
-fun get_data thy = let val (rules, procs) = ProofData.get thy in (map #2 rules, map #2 procs) end;
+fun get_data thy = let val (rules, procs) = Data.get thy in (map #2 rules, map #2 procs) end;
fun rew_proof thy = rewrite_prf fst (get_data thy);
-fun add_prf_rrule r = (ProofData.map o apfst) (cons (stamp (), r));
-fun add_prf_rproc p = (ProofData.map o apsnd) (cons (stamp (), p));
+fun add_prf_rrule r = (Data.map o apfst) (cons (stamp (), r));
+fun add_prf_rproc p = (Data.map o apsnd) (cons (stamp (), p));
(***** promises *****)
--- a/src/Pure/pure_setup.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/pure_setup.ML Mon May 31 21:06:57 2010 +0200
@@ -6,14 +6,14 @@
(* the Pure theories *)
-val theory = ThyInfo.get_theory;
+val theory = Thy_Info.get_theory;
Context.>> (Context.map_theory
(Outer_Syntax.process_file (Path.explode "Pure.thy") #>
Theory.end_theory));
structure Pure = struct val thy = ML_Context.the_global_context () end;
Context.set_thread_data NONE;
-ThyInfo.register_theory Pure.thy;
+Thy_Info.register_theory Pure.thy;
(* ML toplevel pretty printing *)
@@ -47,11 +47,11 @@
(* ML toplevel use commands *)
-fun use name = Toplevel.program (fn () => ThyInfo.use name);
-fun use_thys name = Toplevel.program (fn () => ThyInfo.use_thys name);
-fun use_thy name = Toplevel.program (fn () => ThyInfo.use_thy name);
-fun time_use name = Toplevel.program (fn () => ThyInfo.time_use name);
-fun time_use_thy name = Toplevel.program (fn () => ThyInfo.time_use_thy name);
+fun use name = Toplevel.program (fn () => Thy_Info.use name);
+fun use_thys name = Toplevel.program (fn () => Thy_Info.use_thys name);
+fun use_thy name = Toplevel.program (fn () => Thy_Info.use_thy name);
+fun time_use name = Toplevel.program (fn () => Thy_Info.time_use name);
+fun time_use_thy name = Toplevel.program (fn () => Thy_Info.time_use_thy name);
(* misc *)
--- a/src/Pure/pure_thy.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/pure_thy.ML Mon May 31 21:06:57 2010 +0200
@@ -52,7 +52,7 @@
(** theory data **)
-structure FactsData = Theory_Data
+structure Global_Facts = Theory_Data
(
type T = Facts.T * thm list;
val empty = (Facts.empty, []);
@@ -63,19 +63,19 @@
(* facts *)
-val facts_of = #1 o FactsData.get;
+val facts_of = #1 o Global_Facts.get;
val intern_fact = Facts.intern o facts_of;
val defined_fact = Facts.defined o facts_of;
-fun hide_fact fully name = FactsData.map (apfst (Facts.hide fully name));
+fun hide_fact fully name = Global_Facts.map (apfst (Facts.hide fully name));
(* proofs *)
-fun register_proofs (thy, thms) = (FactsData.map (apsnd (append thms)) thy, thms);
+fun register_proofs (thy, thms) = (Global_Facts.map (apsnd (append thms)) thy, thms);
-fun join_proofs thy = Thm.join_proofs (rev (#2 (FactsData.get thy)));
+fun join_proofs thy = Thm.join_proofs (rev (#2 (Global_Facts.get thy)));
@@ -143,7 +143,7 @@
val (thy', thms') =
register_proofs (apsnd (post_name name) (app_att (thy, pre_name name thms)));
val thms'' = map (Thm.transfer thy') thms';
- val thy'' = thy' |> (FactsData.map o apfst) (Facts.add_global naming (b, thms'') #> snd);
+ val thy'' = thy' |> (Global_Facts.map o apfst) (Facts.add_global naming (b, thms'') #> snd);
in (thms'', thy'') end;
@@ -178,7 +178,7 @@
(* add_thms_dynamic *)
fun add_thms_dynamic (b, f) thy = thy
- |> (FactsData.map o apfst)
+ |> (Global_Facts.map o apfst)
(Facts.add_dynamic (Sign.naming_of thy) (b, f) #> snd);
@@ -252,7 +252,7 @@
("_applC", typ "('b => 'a) => cargs => logic", Mixfix ("(1_/ _)", [1000, 1000], 999)),
("_applC", typ "('b => 'a) => cargs => aprop", Mixfix ("(1_/ _)", [1000, 1000], 999))];
-structure OldApplSyntax = Theory_Data
+structure Old_Appl_Syntax = Theory_Data
(
type T = bool;
val empty = false;
@@ -262,10 +262,10 @@
else error "Cannot merge theories with different application syntax";
);
-val old_appl_syntax = OldApplSyntax.get;
+val old_appl_syntax = Old_Appl_Syntax.get;
val old_appl_syntax_setup =
- OldApplSyntax.put true #>
+ Old_Appl_Syntax.put true #>
Sign.del_modesyntax_i Syntax.mode_default applC_syntax #>
Sign.add_syntax_i appl_syntax;
@@ -274,7 +274,7 @@
val _ = Context.>> (Context.map_theory
(Sign.map_naming (Name_Space.set_theory_name Context.PureN) #>
- OldApplSyntax.put false #>
+ Old_Appl_Syntax.put false #>
Sign.add_types
[(Binding.name "fun", 2, NoSyn),
(Binding.name "prop", 0, NoSyn),
--- a/src/Pure/simplifier.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Pure/simplifier.ML Mon May 31 21:06:57 2010 +0200
@@ -100,7 +100,7 @@
(** simpset data **)
-structure SimpsetData = Generic_Data
+structure Simpset = Generic_Data
(
type T = simpset;
val empty = empty_ss;
@@ -108,8 +108,8 @@
val merge = merge_ss;
);
-val get_ss = SimpsetData.get;
-fun map_ss f context = SimpsetData.map (with_context (Context.proof_of context) f) context;
+val get_ss = Simpset.get;
+fun map_ss f context = Simpset.map (with_context (Context.proof_of context) f) context;
(* attributes *)
--- a/src/Tools/Code/code_eval.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Tools/Code/code_eval.ML Mon May 31 21:06:57 2010 +0200
@@ -173,7 +173,7 @@
end
| process (code_body, _) _ (SOME file_name) thy =
let
- val preamble = "(* Generated from " ^ Path.implode (ThyLoad.thy_path (Context.theory_name thy))
+ val preamble = "(* Generated from " ^ Path.implode (Thy_Load.thy_path (Context.theory_name thy))
^ "; DO NOT EDIT! *)";
val _ = File.write (Path.explode file_name) (preamble ^ "\n\n" ^ code_body);
in
--- a/src/Tools/Code/code_thingol.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Tools/Code/code_thingol.ML Mon May 31 21:06:57 2010 +0200
@@ -882,7 +882,7 @@
fun consts_of_select thy' = filter (belongs_here thy') (consts_of thy');
fun read_const_expr "*" = ([], consts_of thy)
| read_const_expr s = if String.isSuffix ".*" s
- then ([], consts_of_select (ThyInfo.the_theory (unsuffix ".*" s) thy))
+ then ([], consts_of_select (Thy_Info.the_theory (unsuffix ".*" s) thy))
else ([Code.read_const thy s], []);
in pairself flat o split_list o map read_const_expr end;
--- a/src/Tools/WWW_Find/find_theorems.ML Mon May 31 19:36:13 2010 +0200
+++ b/src/Tools/WWW_Find/find_theorems.ML Mon May 31 21:06:57 2010 +0200
@@ -5,8 +5,9 @@
*)
local
+
val default_limit = 20;
-val thy_names = sort string_ord (ThyInfo.get_names ());
+val thy_names = sort string_ord (Thy_Info.get_names ());
val find_theorems_url = "find_theorems";
@@ -234,7 +235,9 @@
Xhtml.write_close send header
end;
in
+
val () = show_question_marks := false;
val () = ScgiServer.register (find_theorems_url, SOME Mime.html, find_theorems);
+
end;