merged
authorhuffman
Tue, 27 Apr 2010 11:17:50 -0700
changeset 36444 027879c5637d
parent 36443 e62e32e163a4 (current diff)
parent 36426 cc8db7295249 (diff)
child 36445 0685b4336132
merged
src/HOL/Multivariate_Analysis/Determinants.thy
src/HOL/Tools/ATP_Manager/atp_minimal.ML
src/HOL/Tools/ATP_Manager/atp_wrapper.ML
--- a/CONTRIBUTORS	Tue Apr 27 11:03:04 2010 -0700
+++ b/CONTRIBUTORS	Tue Apr 27 11:17:50 2010 -0700
@@ -6,6 +6,14 @@
 Contributions to this Isabelle version
 --------------------------------------
 
+* April 2010, Florian Haftmann, TUM
+  Reorganization of abstract algebra type classes.
+
+* April 2010, Florian Haftmann, TUM
+  Code generation for data representations involving invariants;
+  various collections avaiable in theories Fset, Dlist, RBT,
+  Mapping and AssocList.
+
 
 Contributions to Isabelle2009-1
 -------------------------------
--- a/NEWS	Tue Apr 27 11:03:04 2010 -0700
+++ b/NEWS	Tue Apr 27 11:17:50 2010 -0700
@@ -103,6 +103,9 @@
 datatype constructors have been renamed from InfixName to Infix etc.
 Minor INCOMPATIBILITY.
 
+* Command 'example_proof' opens an empty proof body.  This allows to
+experiment with Isar, without producing any persistent result.
+
 * Commands 'type_notation' and 'no_type_notation' declare type syntax
 within a local theory context, with explicit checking of the
 constructors involved (in contrast to the raw 'syntax' versions).
@@ -118,17 +121,6 @@
 
 *** HOL ***
 
-* Abstract algebra:
-  * classes division_ring_inverse_zero, field_inverse_zero, linordered_field_inverse_zero
-    include rule inverse 0 = 0 -- subsumes former division_by_zero class.
-  * numerous lemmas have been ported from field to division_ring;
-  * dropped theorem group group_simps, use algebra_simps instead;
-  * dropped theorem group ring_simps, use field_simps instead;
-  * proper theorem collection field_simps subsumes former theorem groups field_eq_simps and field_simps;
-  * dropped lemma eq_minus_self_iff which is a duplicate for equal_neg_zero.
-
-  INCOMPATIBILITY.
-
 * Library theory 'RBT' renamed to 'RBT_Impl'; new library theory 'RBT'
 provides abstract red-black tree type which is backed by RBT_Impl
 as implementation.  INCOMPATIBILTY.
@@ -157,16 +149,11 @@
 INCOMPATIBILITY.
 
 * Some generic constants have been put to appropriate theories:
-
-    less_eq, less: Orderings
-    zero, one, plus, minus, uminus, times, abs, sgn: Groups
-    inverse, divide: Rings
-
+  * less_eq, less: Orderings
+  * zero, one, plus, minus, uminus, times, abs, sgn: Groups
+  * inverse, divide: Rings
 INCOMPATIBILITY.
 
-* Class division_ring also requires proof of fact divide_inverse.  However instantiation
-of parameter divide has also been required previously.  INCOMPATIBILITY.
-
 * More consistent naming of type classes involving orderings (and lattices):
 
     lower_semilattice                   ~> semilattice_inf
@@ -218,33 +205,18 @@
 
 INCOMPATIBILITY.
 
-* HOLogic.strip_psplit: types are returned in syntactic order, similar
-to other strip and tuple operations.  INCOMPATIBILITY.
-
-* Various old-style primrec specifications in the HOL theories have been
-replaced by new-style primrec, especially in theory List.  The corresponding
-constants now have authentic syntax.  INCOMPATIBILITY.
-
-* Reorganized theory Multiset: swapped notation of pointwise and multiset order:
-  * pointwise ordering is instance of class order with standard syntax <= and <;
-  * multiset ordering has syntax <=# and <#; partial order properties are provided
-      by means of interpretation with prefix multiset_order.
-Less duplication, less historical organization of sections,
-conversion from associations lists to multisets, rudimentary code generation.
-Use insert_DiffM2 [symmetric] instead of elem_imp_eq_diff_union, if needed.
-INCOMPATIBILITY.
-
-* Reorganized theory Sum_Type; Inl and Inr now have authentic syntax.
-INCOMPATIBILITY.
-
-* Code generation: ML and OCaml code is decorated with signatures.
-
-* Theory Complete_Lattice: lemmas top_def and bot_def have been
-replaced by the more convenient lemmas Inf_empty and Sup_empty.
-Dropped lemmas Inf_insert_simp and Sup_insert_simp, which are subsumed
-by Inf_insert and Sup_insert.  Lemmas Inf_UNIV and Sup_UNIV replace
-former Inf_Univ and Sup_Univ.  Lemmas inf_top_right and sup_bot_right
-subsume inf_top and sup_bot respectively.  INCOMPATIBILITY.
+* Refined field classes:
+  * classes division_ring_inverse_zero, field_inverse_zero, linordered_field_inverse_zero
+    include rule inverse 0 = 0 -- subsumes former division_by_zero class.
+  * numerous lemmas have been ported from field to division_ring;
+  INCOMPATIBILITY.
+
+* Refined algebra theorem collections:
+  * dropped theorem group group_simps, use algebra_simps instead;
+  * dropped theorem group ring_simps, use field_simps instead;
+  * proper theorem collection field_simps subsumes former theorem groups field_eq_simps and field_simps;
+  * dropped lemma eq_minus_self_iff which is a duplicate for equal_neg_zero.
+  INCOMPATIBILITY.
 
 * Theory Finite_Set and List: some lemmas have been generalized from
 sets to lattices:
@@ -260,6 +232,27 @@
   INTER_fold_inter              ~> INFI_fold_inf
   UNION_fold_union              ~> SUPR_fold_sup
 
+* Theory Complete_Lattice: lemmas top_def and bot_def have been
+replaced by the more convenient lemmas Inf_empty and Sup_empty.
+Dropped lemmas Inf_insert_simp and Sup_insert_simp, which are subsumed
+by Inf_insert and Sup_insert.  Lemmas Inf_UNIV and Sup_UNIV replace
+former Inf_Univ and Sup_Univ.  Lemmas inf_top_right and sup_bot_right
+subsume inf_top and sup_bot respectively.  INCOMPATIBILITY.
+
+* HOLogic.strip_psplit: types are returned in syntactic order, similar
+to other strip and tuple operations.  INCOMPATIBILITY.
+
+* Reorganized theory Multiset: swapped notation of pointwise and multiset order:
+  * pointwise ordering is instance of class order with standard syntax <= and <;
+  * multiset ordering has syntax <=# and <#; partial order properties are provided
+      by means of interpretation with prefix multiset_order;
+  * less duplication, less historical organization of sections,
+      conversion from associations lists to multisets, rudimentary code generation;
+  * use insert_DiffM2 [symmetric] instead of elem_imp_eq_diff_union, if needed.
+  INCOMPATIBILITY.
+
+* Code generation: ML and OCaml code is decorated with signatures.
+
 * Theory List: added transpose.
 
 * Renamed Library/Quotient.thy to Library/Quotient_Type.thy to avoid
--- a/doc-src/IsarRef/Thy/Framework.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/doc-src/IsarRef/Thy/Framework.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -79,8 +79,7 @@
 text_raw {*\medskip\begin{minipage}{0.6\textwidth}*}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
     assume "x \<in> A" and "x \<in> B"
     then have "x \<in> A \<inter> B" ..
@@ -107,8 +106,7 @@
 *}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
     assume "x \<in> A" and "x \<in> B"
     then have "x \<in> A \<inter> B" by (rule IntI)
@@ -130,8 +128,7 @@
 text_raw {*\medskip\begin{minipage}{0.6\textwidth}*}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
     have "x \<in> \<Inter>\<A>"
     proof
@@ -178,8 +175,7 @@
 text_raw {*\medskip\begin{minipage}{0.6\textwidth}*}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
     assume "x \<in> \<Union>\<A>"
     then have C
@@ -212,8 +208,7 @@
 *}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
     assume "x \<in> \<Union>\<A>"
     then obtain A where "x \<in> A" and "A \<in> \<A>" ..
@@ -817,8 +812,7 @@
 *}
 
 text_raw {* \begingroup\footnotesize *}
-(*<*)lemma True
-proof
+(*<*)example_proof
 (*>*)
   txt_raw {* \begin{minipage}[t]{0.18\textwidth} *}
   have "A \<longrightarrow> B"
@@ -877,8 +871,7 @@
 text_raw {*\begin{minipage}{0.5\textwidth}*}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
   have "\<And>x y. A x \<Longrightarrow> B y \<Longrightarrow> C x y"
   proof -
@@ -987,8 +980,7 @@
 *}
 
 (*<*)
-lemma True
-proof
+example_proof
 (*>*)
   have "a = b" sorry
   also have "\<dots> = c" sorry
--- a/doc-src/IsarRef/Thy/Proof.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/doc-src/IsarRef/Thy/Proof.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -46,6 +46,28 @@
 
 section {* Proof structure *}
 
+subsection {* Example proofs *}
+
+text {*
+  \begin{matharray}{rcl}
+    @{command_def "example_proof"} & : & @{text "local_theory \<rightarrow> proof(state)"} \\
+  \end{matharray}
+
+  \begin{description}
+
+  \item @{command "example_proof"} opens an empty proof body.  This
+  allows to experiment with Isar, without producing any persistent
+  result.
+
+  Structurally, this is like a vacous @{command "lemma"} statement
+  followed by ``@{command "proof"}~@{text "-"}'', which means the
+  example proof may be closed by a regular @{command "qed"}, or
+  discontinued by @{command "oops"}.
+
+  \end{description}
+*}
+
+
 subsection {* Blocks *}
 
 text {*
--- a/doc-src/IsarRef/Thy/document/Framework.tex	Tue Apr 27 11:03:04 2010 -0700
+++ b/doc-src/IsarRef/Thy/document/Framework.tex	Tue Apr 27 11:17:50 2010 -0700
@@ -97,11 +97,11 @@
 \medskip\begin{minipage}{0.6\textwidth}
 %
 \isadelimproof
-%
+\ \ \ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
+\isacommand{assume}\isamarkupfalse%
 \ {\isachardoublequoteopen}x\ {\isasymin}\ A{\isachardoublequoteclose}\ \isakeyword{and}\ {\isachardoublequoteopen}x\ {\isasymin}\ B{\isachardoublequoteclose}\isanewline
 \ \ \ \ \isacommand{then}\isamarkupfalse%
 \ \isacommand{have}\isamarkupfalse%
@@ -135,11 +135,11 @@
 \isamarkuptrue%
 %
 \isadelimproof
-%
+\ \ \ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
+\isacommand{assume}\isamarkupfalse%
 \ {\isachardoublequoteopen}x\ {\isasymin}\ A{\isachardoublequoteclose}\ \isakeyword{and}\ {\isachardoublequoteopen}x\ {\isasymin}\ B{\isachardoublequoteclose}\isanewline
 \ \ \ \ \isacommand{then}\isamarkupfalse%
 \ \isacommand{have}\isamarkupfalse%
@@ -166,11 +166,11 @@
 \medskip\begin{minipage}{0.6\textwidth}
 %
 \isadelimproof
-%
+\ \ \ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \ \ \isacommand{have}\isamarkupfalse%
+\isacommand{have}\isamarkupfalse%
 \ {\isachardoublequoteopen}x\ {\isasymin}\ {\isasymInter}{\isasymA}{\isachardoublequoteclose}\isanewline
 \ \ \ \ \isacommand{proof}\isamarkupfalse%
 \isanewline
@@ -198,9 +198,9 @@
 {\isafoldnoproof}%
 %
 \isadelimnoproof
-\isanewline
 %
 \endisadelimnoproof
+\isanewline
 %
 \isadelimproof
 \ \ \ \ %
@@ -251,11 +251,11 @@
 \medskip\begin{minipage}{0.6\textwidth}
 %
 \isadelimproof
-%
+\ \ \ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
+\isacommand{assume}\isamarkupfalse%
 \ {\isachardoublequoteopen}x\ {\isasymin}\ {\isasymUnion}{\isasymA}{\isachardoublequoteclose}\isanewline
 \ \ \ \ \isacommand{then}\isamarkupfalse%
 \ \isacommand{have}\isamarkupfalse%
@@ -286,9 +286,9 @@
 {\isafoldnoproof}%
 %
 \isadelimnoproof
-\isanewline
 %
 \endisadelimnoproof
+\isanewline
 %
 \isadelimproof
 \ \ \ \ %
@@ -326,11 +326,11 @@
 \isamarkuptrue%
 %
 \isadelimproof
-%
+\ \ \ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
+\isacommand{assume}\isamarkupfalse%
 \ {\isachardoublequoteopen}x\ {\isasymin}\ {\isasymUnion}{\isasymA}{\isachardoublequoteclose}\isanewline
 \ \ \ \ \isacommand{then}\isamarkupfalse%
 \ \isacommand{obtain}\isamarkupfalse%
@@ -1186,9 +1186,9 @@
 {\isafoldproof}%
 %
 \isadelimproof
-\isanewline
 %
 \endisadelimproof
+\isanewline
 %
 \isadelimnoproof
 \ \ \ \ \ \ %
@@ -1201,9 +1201,9 @@
 {\isafoldnoproof}%
 %
 \isadelimnoproof
-\isanewline
 %
 \endisadelimnoproof
+\isanewline
 %
 \isadelimproof
 \ \ %
@@ -1268,11 +1268,11 @@
 \begin{minipage}{0.5\textwidth}
 %
 \isadelimproof
-%
+\ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \isacommand{have}\isamarkupfalse%
+\isacommand{have}\isamarkupfalse%
 \ {\isachardoublequoteopen}{\isasymAnd}x\ y{\isachardot}\ A\ x\ {\isasymLongrightarrow}\ B\ y\ {\isasymLongrightarrow}\ C\ x\ y{\isachardoublequoteclose}\isanewline
 \ \ \isacommand{proof}\isamarkupfalse%
 \ {\isacharminus}\isanewline
@@ -1300,9 +1300,9 @@
 {\isafoldnoproof}%
 %
 \isadelimnoproof
-\isanewline
 %
 \endisadelimnoproof
+\isanewline
 %
 \isadelimproof
 \ \ %
@@ -1342,9 +1342,9 @@
 {\isafoldnoproof}%
 %
 \isadelimnoproof
-\isanewline
 %
 \endisadelimnoproof
+\isanewline
 %
 \isadelimproof
 \ \ %
@@ -1456,11 +1456,11 @@
 \isamarkuptrue%
 %
 \isadelimproof
-%
+\ \ %
 \endisadelimproof
 %
 \isatagproof
-\ \ \isacommand{have}\isamarkupfalse%
+\isacommand{have}\isamarkupfalse%
 \ {\isachardoublequoteopen}a\ {\isacharequal}\ b{\isachardoublequoteclose}\ \isacommand{sorry}\isamarkupfalse%
 \isanewline
 \ \ \isacommand{also}\isamarkupfalse%
--- a/doc-src/IsarRef/Thy/document/Proof.tex	Tue Apr 27 11:03:04 2010 -0700
+++ b/doc-src/IsarRef/Thy/document/Proof.tex	Tue Apr 27 11:17:50 2010 -0700
@@ -65,6 +65,30 @@
 }
 \isamarkuptrue%
 %
+\isamarkupsubsection{Example proofs%
+}
+\isamarkuptrue%
+%
+\begin{isamarkuptext}%
+\begin{matharray}{rcl}
+    \indexdef{}{command}{example\_proof}\hypertarget{command.example-proof}{\hyperlink{command.example-proof}{\mbox{\isa{\isacommand{example{\isacharunderscore}proof}}}}} & : & \isa{{\isachardoublequote}local{\isacharunderscore}theory\ {\isasymrightarrow}\ proof{\isacharparenleft}state{\isacharparenright}{\isachardoublequote}} \\
+  \end{matharray}
+
+  \begin{description}
+
+  \item \hyperlink{command.example-proof}{\mbox{\isa{\isacommand{example{\isacharunderscore}proof}}}} opens an empty proof body.  This
+  allows to experiment with Isar, without producing any persistent
+  result.
+
+  Structurally, this is like a vacous \hyperlink{command.lemma}{\mbox{\isa{\isacommand{lemma}}}} statement
+  followed by ``\hyperlink{command.proof}{\mbox{\isa{\isacommand{proof}}}}~\isa{{\isachardoublequote}{\isacharminus}{\isachardoublequote}}'', which means the
+  example proof may be closed by a regular \hyperlink{command.qed}{\mbox{\isa{\isacommand{qed}}}}, or
+  discontinued by \hyperlink{command.oops}{\mbox{\isa{\isacommand{oops}}}}.
+
+  \end{description}%
+\end{isamarkuptext}%
+\isamarkuptrue%
+%
 \isamarkupsubsection{Blocks%
 }
 \isamarkuptrue%
--- a/doc-src/Nitpick/nitpick.tex	Tue Apr 27 11:03:04 2010 -0700
+++ b/doc-src/Nitpick/nitpick.tex	Tue Apr 27 11:17:50 2010 -0700
@@ -428,9 +428,6 @@
 $\mathit{sym}.y$ are simply the bound variables $x$ and $y$
 from \textit{sym}'s definition.
 
-Although skolemization is a useful optimization, you can disable it by invoking
-Nitpick with \textit{dont\_skolemize}. See \S\ref{optimizations} for details.
-
 \subsection{Natural Numbers and Integers}
 \label{natural-numbers-and-integers}
 
@@ -2193,15 +2190,6 @@
 {\small See also \textit{overlord} (\S\ref{mode-of-operation}) and
 \textit{batch\_size} (\S\ref{optimizations}).}
 
-\optrue{show\_skolems}{hide\_skolem}
-Specifies whether the values of Skolem constants should be displayed as part of
-counterexamples. Skolem constants correspond to bound variables in the original
-formula and usually help us to understand why the counterexample falsifies the
-formula.
-
-\nopagebreak
-{\small See also \textit{skolemize} (\S\ref{optimizations}).}
-
 \opfalse{show\_datatypes}{hide\_datatypes}
 Specifies whether the subsets used to approximate (co)in\-duc\-tive datatypes should
 be displayed as part of counterexamples. Such subsets are sometimes helpful when
@@ -2215,8 +2203,8 @@
 genuine, but they can clutter the output.
 
 \opfalse{show\_all}{dont\_show\_all}
-Enabling this option effectively enables \textit{show\_skolems},
-\textit{show\_datatypes}, and \textit{show\_consts}.
+Enabling this option effectively enables \textit{show\_datatypes} and
+\textit{show\_consts}.
 
 \opdefault{max\_potential}{int}{$\mathbf{1}$}
 Specifies the maximum number of potential counterexamples to display. Setting
@@ -2258,9 +2246,6 @@
 arguments that are not accounted for are left alone, as if the specification had
 been $1,\ldots,1,n_1,\ldots,n_k$.
 
-\nopagebreak
-{\small See also \textit{uncurry} (\S\ref{optimizations}).}
-
 \opdefault{format}{int\_seq}{$\mathbf{1}$}
 Specifies the default format to use. Irrespective of the default format, the
 extra arguments to a Skolem constant corresponding to the outer bound variables
@@ -2454,15 +2439,6 @@
 {\small See also \textit{debug} (\S\ref{output-format}) and
 \textit{show\_consts} (\S\ref{output-format}).}
 
-\optrue{skolemize}{dont\_skolemize}
-Specifies whether the formula should be skolemized. For performance reasons,
-(positive) $\forall$-quanti\-fiers that occur in the scope of a higher-order
-(positive) $\exists$-quanti\-fier are left unchanged.
-
-\nopagebreak
-{\small See also \textit{debug} (\S\ref{output-format}) and
-\textit{show\_skolems} (\S\ref{output-format}).}
-
 \optrue{star\_linear\_preds}{dont\_star\_linear\_preds}
 Specifies whether Nitpick should use Kodkod's transitive closure operator to
 encode non-well-founded ``linear inductive predicates,'' i.e., inductive
@@ -2474,15 +2450,6 @@
 {\small See also \textit{wf} (\S\ref{scope-of-search}), \textit{debug}
 (\S\ref{output-format}), and \textit{iter} (\S\ref{scope-of-search}).}
 
-\optrue{uncurry}{dont\_uncurry}
-Specifies whether Nitpick should uncurry functions. Uncurrying has on its own no
-tangible effect on efficiency, but it creates opportunities for the boxing 
-optimization.
-
-\nopagebreak
-{\small See also \textit{box} (\S\ref{scope-of-search}), \textit{debug}
-(\S\ref{output-format}), and \textit{format} (\S\ref{output-format}).}
-
 \optrue{fast\_descrs}{full\_descrs}
 Specifies whether Nitpick should optimize the definite and indefinite
 description operators (THE and SOME). The optimized versions usually help
@@ -2498,25 +2465,6 @@
 Unless you are tracking down a bug in Nitpick or distrust the peephole
 optimizer, you should leave this option enabled.
 
-\opdefault{sym\_break}{int}{20}
-Specifies an upper bound on the number of relations for which Kodkod generates
-symmetry breaking predicates. According to the Kodkod documentation
-\cite{kodkod-2009-options}, ``in general, the higher this value, the more
-symmetries will be broken, and the faster the formula will be solved. But,
-setting the value too high may have the opposite effect and slow down the
-solving.''
-
-\opdefault{sharing\_depth}{int}{3}
-Specifies the depth to which Kodkod should check circuits for equivalence during
-the translation to SAT. The default of 3 is the same as in Alloy. The minimum
-allowed depth is 1. Increasing the sharing may result in a smaller SAT problem,
-but can also slow down Kodkod.
-
-\opfalse{flatten\_props}{dont\_flatten\_props}
-Specifies whether Kodkod should try to eliminate intermediate Boolean variables.
-Although this might sound like a good idea, in practice it can drastically slow
-down Kodkod.
-
 \opdefault{max\_threads}{int}{0}
 Specifies the maximum number of threads to use in Kodkod. If this option is set
 to 0, Kodkod will compute an appropriate value based on the number of processor
@@ -2569,7 +2517,7 @@
 Behind the scenes, Isabelle's built-in packages and theories rely on the
 following attributes to affect Nitpick's behavior:
 
-\begin{itemize}
+\begin{enum}
 \flushitem{\textit{nitpick\_def}}
 
 \nopagebreak
@@ -2611,8 +2559,8 @@
 must be of the form
 
 \qquad $\lbrakk P_1;\> \ldots;\> P_m;\> M~(c\ t_{11}\ \ldots\ t_{1n});\>
-\ldots;\> M~(c\ t_{k1}\ \ldots\ t_{kn})\rbrakk \,\Longrightarrow\, c\ u_1\
-\ldots\ u_n$,
+\ldots;\> M~(c\ t_{k1}\ \ldots\ t_{kn})\rbrakk$ \\
+\hbox{}\qquad ${\Longrightarrow}\;\, c\ u_1\ \ldots\ u_n$,
 
 where the $P_i$'s are side conditions that do not involve $c$ and $M$ is an
 optional monotonic operator. The order of the assumptions is irrelevant.
@@ -2623,7 +2571,7 @@
 This attribute specifies the (free-form) specification of a constant defined
 using the \hbox{(\textbf{ax\_})}\allowbreak\textbf{specification} command.
 
-\end{itemize}
+\end{enum}
 
 When faced with a constant, Nitpick proceeds as follows:
 
--- a/etc/isar-keywords-ZF.el	Tue Apr 27 11:03:04 2010 -0700
+++ b/etc/isar-keywords-ZF.el	Tue Apr 27 11:17:50 2010 -0700
@@ -66,6 +66,7 @@
     "done"
     "enable_pr"
     "end"
+    "example_proof"
     "exit"
     "extract"
     "extract_type"
@@ -425,6 +426,7 @@
 
 (defconst isar-keywords-theory-goal
   '("corollary"
+    "example_proof"
     "instance"
     "interpretation"
     "lemma"
--- a/etc/isar-keywords.el	Tue Apr 27 11:03:04 2010 -0700
+++ b/etc/isar-keywords.el	Tue Apr 27 11:17:50 2010 -0700
@@ -94,6 +94,7 @@
     "enable_pr"
     "end"
     "equivariance"
+    "example_proof"
     "exit"
     "export_code"
     "extract"
@@ -562,6 +563,7 @@
     "code_pred"
     "corollary"
     "cpodef"
+    "example_proof"
     "function"
     "instance"
     "interpretation"
--- a/src/HOL/Big_Operators.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Big_Operators.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -1033,12 +1033,12 @@
   by (erule finite_induct) (auto simp add: insert_Diff_if)
 
 lemma setprod_inversef: 
-  fixes f :: "'b \<Rightarrow> 'a::{field,division_ring_inverse_zero}"
+  fixes f :: "'b \<Rightarrow> 'a::field_inverse_zero"
   shows "finite A ==> setprod (inverse \<circ> f) A = inverse (setprod f A)"
 by (erule finite_induct) auto
 
 lemma setprod_dividef:
-  fixes f :: "'b \<Rightarrow> 'a::{field,division_ring_inverse_zero}"
+  fixes f :: "'b \<Rightarrow> 'a::field_inverse_zero"
   shows "finite A
     ==> setprod (%x. f x / g x) A = setprod f A / setprod g A"
 apply (subgoal_tac
--- a/src/HOL/Complex.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Complex.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -99,7 +99,7 @@
 
 subsection {* Multiplication and Division *}
 
-instantiation complex :: "{field, division_ring_inverse_zero}"
+instantiation complex :: field_inverse_zero
 begin
 
 definition
--- a/src/HOL/Decision_Procs/Parametric_Ferrante_Rackoff.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Decision_Procs/Parametric_Ferrante_Rackoff.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -27,7 +27,7 @@
   "tmsize (CNP n c a) = 3 + polysize c + tmsize a "
 
   (* Semantics of terms tm *)
-consts Itm :: "'a::{ring_char_0,division_ring_inverse_zero,field} list \<Rightarrow> 'a list \<Rightarrow> tm \<Rightarrow> 'a"
+consts Itm :: "'a::{field_char_0, field_inverse_zero} list \<Rightarrow> 'a list \<Rightarrow> tm \<Rightarrow> 'a"
 primrec
   "Itm vs bs (CP c) = (Ipoly vs c)"
   "Itm vs bs (Bound n) = bs!n"
@@ -270,7 +270,7 @@
 by (induct t arbitrary: i rule: tmmul.induct, auto simp add: Let_def)
 
 lemma tmmul_allpolys_npoly[simp]: 
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "allpolys isnpoly t \<Longrightarrow> isnpoly c \<Longrightarrow> allpolys isnpoly (tmmul t c)" by (induct t rule: tmmul.induct, simp_all add: Let_def polymul_norm)
 
 definition tmneg :: "tm \<Rightarrow> tm" where
@@ -296,7 +296,7 @@
 using tmneg_def by simp
 lemma [simp]: "isnpoly (C (-1,1))" unfolding isnpoly_def by simp
 lemma tmneg_allpolys_npoly[simp]: 
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "allpolys isnpoly t \<Longrightarrow> allpolys isnpoly (tmneg t)" 
   unfolding tmneg_def by auto
 
@@ -310,7 +310,7 @@
 lemma tmsub_blt[simp]: "\<lbrakk>tmboundslt n t ; tmboundslt n s\<rbrakk> \<Longrightarrow> tmboundslt n (tmsub t s )"
 using tmsub_def by simp
 lemma tmsub_allpolys_npoly[simp]: 
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "allpolys isnpoly t \<Longrightarrow> allpolys isnpoly s \<Longrightarrow> allpolys isnpoly (tmsub t s)" 
   unfolding tmsub_def by (simp add: isnpoly_def)
 
@@ -324,8 +324,8 @@
   "simptm (CNP n c t) = (let c' = polynate c in if c' = 0\<^sub>p then simptm t else tmadd (CNP n c' (CP 0\<^sub>p ), simptm t))"
 
 lemma polynate_stupid: 
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
-  shows "polynate t = 0\<^sub>p \<Longrightarrow> Ipoly bs t = (0::'a::{ring_char_0,division_ring_inverse_zero, field})" 
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
+  shows "polynate t = 0\<^sub>p \<Longrightarrow> Ipoly bs t = (0::'a::{field_char_0, field_inverse_zero})" 
 apply (subst polynate[symmetric])
 apply simp
 done
@@ -345,7 +345,7 @@
 lemma [simp]: "isnpoly 0\<^sub>p" and [simp]: "isnpoly (C(1,1))" 
   by (simp_all add: isnpoly_def)
 lemma simptm_allpolys_npoly[simp]: 
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "allpolys isnpoly (simptm p)"
   by (induct p rule: simptm.induct, auto simp add: Let_def)
 
@@ -387,7 +387,7 @@
 qed
 
 lemma split0_nb0: 
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "split0 t = (c',t') \<Longrightarrow>  tmbound 0 t'"
 proof-
   fix c' t'
@@ -395,7 +395,7 @@
   with conjunct1[OF split0[where t="t"]] show "tmbound 0 t'" by simp
 qed
 
-lemma split0_nb0'[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+lemma split0_nb0'[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "tmbound0 (snd (split0 t))"
   using split0_nb0[of t "fst (split0 t)" "snd (split0 t)"] by (simp add: tmbound0_tmbound_iff)
 
@@ -418,7 +418,7 @@
 lemma allpolys_split0: "allpolys isnpoly p \<Longrightarrow> allpolys isnpoly (snd (split0 p))"
 by (induct p rule: split0.induct, auto simp  add: isnpoly_def Let_def split_def split0_stupid)
 
-lemma isnpoly_fst_split0:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero, field})"
+lemma isnpoly_fst_split0:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows 
   "allpolys isnpoly p \<Longrightarrow> isnpoly (fst (split0 p))"
   by (induct p rule: split0.induct, 
@@ -447,7 +447,7 @@
 by (induct p rule: fmsize.induct) simp_all
 
   (* Semantics of formulae (fm) *)
-consts Ifm ::"'a::{division_ring_inverse_zero,linordered_field} list \<Rightarrow> 'a list \<Rightarrow> fm \<Rightarrow> bool"
+consts Ifm ::"'a::{linordered_field_inverse_zero} list \<Rightarrow> 'a list \<Rightarrow> fm \<Rightarrow> bool"
 primrec
   "Ifm vs bs T = True"
   "Ifm vs bs F = False"
@@ -969,24 +969,24 @@
 definition "simpeq t = (let (c,s) = split0 (simptm t) in if c= 0\<^sub>p then eq s else Eq (CNP 0 c s))"
 definition "simpneq t = (let (c,s) = split0 (simptm t) in if c= 0\<^sub>p then neq s else NEq (CNP 0 c s))"
 
-lemma simplt_islin[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simplt_islin[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "islin (simplt t)"
   unfolding simplt_def 
   using split0_nb0'
 by (auto simp add: lt_lin Let_def split_def isnpoly_fst_split0[OF simptm_allpolys_npoly] islin_stupid allpolys_split0[OF simptm_allpolys_npoly])
   
-lemma simple_islin[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simple_islin[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "islin (simple t)"
   unfolding simple_def 
   using split0_nb0'
 by (auto simp add: Let_def split_def isnpoly_fst_split0[OF simptm_allpolys_npoly] islin_stupid allpolys_split0[OF simptm_allpolys_npoly] le_lin)
-lemma simpeq_islin[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simpeq_islin[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "islin (simpeq t)"
   unfolding simpeq_def 
   using split0_nb0'
 by (auto simp add: Let_def split_def isnpoly_fst_split0[OF simptm_allpolys_npoly] islin_stupid allpolys_split0[OF simptm_allpolys_npoly] eq_lin)
 
-lemma simpneq_islin[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simpneq_islin[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "islin (simpneq t)"
   unfolding simpneq_def 
   using split0_nb0'
@@ -994,7 +994,7 @@
 
 lemma really_stupid: "\<not> (\<forall>c1 s'. (c1, s') \<noteq> split0 s)"
   by (cases "split0 s", auto)
-lemma split0_npoly:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma split0_npoly:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and n: "allpolys isnpoly t"
   shows "isnpoly (fst (split0 t))" and "allpolys isnpoly (snd (split0 t))"
   using n
@@ -1083,7 +1083,7 @@
   apply (case_tac poly, auto)
   done
 
-lemma simplt_nb[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simplt_nb[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "tmbound0 t \<Longrightarrow> bound0 (simplt t)"
   using split0 [of "simptm t" vs bs]
 proof(simp add: simplt_def Let_def split_def)
@@ -1100,7 +1100,7 @@
        fst (split0 (simptm t)) = 0\<^sub>p" by (simp add: simplt_def Let_def split_def lt_nb)
 qed
 
-lemma simple_nb[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simple_nb[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "tmbound0 t \<Longrightarrow> bound0 (simple t)"
   using split0 [of "simptm t" vs bs]
 proof(simp add: simple_def Let_def split_def)
@@ -1117,7 +1117,7 @@
        fst (split0 (simptm t)) = 0\<^sub>p" by (simp add: simplt_def Let_def split_def le_nb)
 qed
 
-lemma simpeq_nb[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simpeq_nb[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "tmbound0 t \<Longrightarrow> bound0 (simpeq t)"
   using split0 [of "simptm t" vs bs]
 proof(simp add: simpeq_def Let_def split_def)
@@ -1134,7 +1134,7 @@
        fst (split0 (simptm t)) = 0\<^sub>p" by (simp add: simpeq_def Let_def split_def eq_nb)
 qed
 
-lemma simpneq_nb[simp]:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simpneq_nb[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "tmbound0 t \<Longrightarrow> bound0 (simpneq t)"
   using split0 [of "simptm t" vs bs]
 proof(simp add: simpneq_def Let_def split_def)
@@ -1267,7 +1267,7 @@
 lemma simpfm[simp]: "Ifm vs bs (simpfm p) = Ifm vs bs p"
 by(induct p arbitrary: bs rule: simpfm.induct, auto)
 
-lemma simpfm_bound0:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simpfm_bound0:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "bound0 p \<Longrightarrow> bound0 (simpfm p)"
 by (induct p rule: simpfm.induct, auto)
 
@@ -1296,7 +1296,7 @@
 lemma disj_lin: "islin p \<Longrightarrow> islin q \<Longrightarrow> islin (disj p q)" by (simp add: disj_def)
 lemma conj_lin: "islin p \<Longrightarrow> islin q \<Longrightarrow> islin (conj p q)" by (simp add: conj_def)
 
-lemma   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "qfree p \<Longrightarrow> islin (simpfm p)" 
   apply (induct p rule: simpfm.induct)
   apply (simp_all add: conj_lin disj_lin)
@@ -2519,7 +2519,7 @@
 lemma remdps_set[simp]: "set (remdps xs) = set xs"
   by (induct xs rule: remdps.induct, auto)
 
-lemma simpfm_lin:   assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+lemma simpfm_lin:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "qfree p \<Longrightarrow> islin (simpfm p)"
   by (induct p rule: simpfm.induct, auto simp add: conj_lin disj_lin)
 
@@ -2747,12 +2747,12 @@
 using lp tnb
 by (induct p c t rule: msubstpos.induct, auto simp add: msubsteq2_nb msubstltpos_nb msubstlepos_nb)
 
-lemma msubstneg_nb: assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})" and lp: "islin p" and tnb: "tmbound0 t"
+lemma msubstneg_nb: assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})" and lp: "islin p" and tnb: "tmbound0 t"
   shows "bound0 (msubstneg p c t)"
 using lp tnb
 by (induct p c t rule: msubstneg.induct, auto simp add: msubsteq2_nb msubstltneg_nb msubstleneg_nb)
 
-lemma msubst2_nb: assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})" and lp: "islin p" and tnb: "tmbound0 t"
+lemma msubst2_nb: assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})" and lp: "islin p" and tnb: "tmbound0 t"
   shows "bound0 (msubst2 p c t)"
 using lp tnb
 by (simp add: msubst2_def msubstneg_nb msubstpos_nb conj_nb disj_nb lt_nb simpfm_bound0)
@@ -3156,54 +3156,54 @@
 *} "Parametric QE for linear Arithmetic over fields, Version 2"
 
 
-lemma "\<exists>(x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). y \<noteq> -1 \<longrightarrow> (y + 1)*x < 0"
-  apply (frpar type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "y::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+lemma "\<exists>(x::'a::{linordered_field_inverse_zero, number_ring}). y \<noteq> -1 \<longrightarrow> (y + 1)*x < 0"
+  apply (frpar type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "y::'a::{linordered_field_inverse_zero, number_ring}")
   apply (simp add: field_simps)
   apply (rule spec[where x=y])
-  apply (frpar type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "z::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+  apply (frpar type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "z::'a::{linordered_field_inverse_zero, number_ring}")
   by simp
 
 text{* Collins/Jones Problem *}
 (*
-lemma "\<exists>(r::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0"
+lemma "\<exists>(r::'a::{linordered_field_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0"
 proof-
-  have "(\<exists>(r::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0) \<longleftrightarrow> (\<exists>(r::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < 2 *(a^2 + b^2) - (3*(a^2 + b^2)) * r + (2*a)*r \<and> 2*(a^2 + b^2) - (3*(a^2 + b^2) - 4*a + 1)*r - 2*a < 0)" (is "?lhs \<longleftrightarrow> ?rhs")
+  have "(\<exists>(r::'a::{linordered_field_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0) \<longleftrightarrow> (\<exists>(r::'a::{linordered_field_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < 2 *(a^2 + b^2) - (3*(a^2 + b^2)) * r + (2*a)*r \<and> 2*(a^2 + b^2) - (3*(a^2 + b^2) - 4*a + 1)*r - 2*a < 0)" (is "?lhs \<longleftrightarrow> ?rhs")
 by (simp add: field_simps)
 have "?rhs"
 
-  apply (frpar type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "a::'a::{linordered_field, division_ring_inverse_zero, number_ring}" "b::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+  apply (frpar type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "a::'a::{linordered_field_inverse_zero, number_ring}" "b::'a::{linordered_field_inverse_zero, number_ring}")
   apply (simp add: field_simps)
 oops
 *)
 (*
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (1 - t)*x \<le> (1+t)*y \<and> (1 - t)*y \<le> (1+t)*x --> 0 \<le> y"
-apply (frpar type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "t::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (1 - t)*x \<le> (1+t)*y \<and> (1 - t)*y \<le> (1+t)*x --> 0 \<le> y"
+apply (frpar type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "t::'a::{linordered_field_inverse_zero, number_ring}")
 oops
 *)
 
-lemma "\<exists>(x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). y \<noteq> -1 \<longrightarrow> (y + 1)*x < 0"
-  apply (frpar2 type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "y::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+lemma "\<exists>(x::'a::{linordered_field_inverse_zero, number_ring}). y \<noteq> -1 \<longrightarrow> (y + 1)*x < 0"
+  apply (frpar2 type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "y::'a::{linordered_field_inverse_zero, number_ring}")
   apply (simp add: field_simps)
   apply (rule spec[where x=y])
-  apply (frpar2 type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "z::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+  apply (frpar2 type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "z::'a::{linordered_field_inverse_zero, number_ring}")
   by simp
 
 text{* Collins/Jones Problem *}
 
 (*
-lemma "\<exists>(r::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0"
+lemma "\<exists>(r::'a::{linordered_field_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0"
 proof-
-  have "(\<exists>(r::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0) \<longleftrightarrow> (\<exists>(r::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < 2 *(a^2 + b^2) - (3*(a^2 + b^2)) * r + (2*a)*r \<and> 2*(a^2 + b^2) - (3*(a^2 + b^2) - 4*a + 1)*r - 2*a < 0)" (is "?lhs \<longleftrightarrow> ?rhs")
+  have "(\<exists>(r::'a::{linordered_field_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < (2 - 3*r) *(a^2 + b^2) + (2*a)*r \<and> (2 - 3*r) *(a^2 + b^2) + 4*a*r - 2*a - r < 0) \<longleftrightarrow> (\<exists>(r::'a::{linordered_field_inverse_zero, number_ring}). 0 < r \<and> r < 1 \<and> 0 < 2 *(a^2 + b^2) - (3*(a^2 + b^2)) * r + (2*a)*r \<and> 2*(a^2 + b^2) - (3*(a^2 + b^2) - 4*a + 1)*r - 2*a < 0)" (is "?lhs \<longleftrightarrow> ?rhs")
 by (simp add: field_simps)
 have "?rhs"
-  apply (frpar2 type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "a::'a::{linordered_field, division_ring_inverse_zero, number_ring}" "b::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+  apply (frpar2 type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "a::'a::{linordered_field_inverse_zero, number_ring}" "b::'a::{linordered_field_inverse_zero, number_ring}")
   apply simp
 oops
 *)
 
 (*
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (1 - t)*x \<le> (1+t)*y \<and> (1 - t)*y \<le> (1+t)*x --> 0 \<le> y"
-apply (frpar2 type: "'a::{linordered_field, division_ring_inverse_zero, number_ring}" pars: "t::'a::{linordered_field, division_ring_inverse_zero, number_ring}")
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (1 - t)*x \<le> (1+t)*y \<and> (1 - t)*y \<le> (1+t)*x --> 0 \<le> y"
+apply (frpar2 type: "'a::{linordered_field_inverse_zero, number_ring}" pars: "t::'a::{linordered_field_inverse_zero, number_ring}")
 apply (simp add: field_simps linorder_neq_iff[symmetric])
 apply ferrack
 oops
--- a/src/HOL/Decision_Procs/Reflected_Multivariate_Polynomial.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Decision_Procs/Reflected_Multivariate_Polynomial.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -230,7 +230,7 @@
 
 subsection{* Semantics of the polynomial representation *}
 
-consts Ipoly :: "'a list \<Rightarrow> poly \<Rightarrow> 'a::{ring_char_0,power,division_ring_inverse_zero,field}"
+consts Ipoly :: "'a list \<Rightarrow> poly \<Rightarrow> 'a::{field_char_0, field_inverse_zero, power}"
 primrec
   "Ipoly bs (C c) = INum c"
   "Ipoly bs (Bound n) = bs!n"
@@ -241,7 +241,7 @@
   "Ipoly bs (Pw t n) = (Ipoly bs t) ^ n"
   "Ipoly bs (CN c n p) = (Ipoly bs c) + (bs!n)*(Ipoly bs p)"
 abbreviation
-  Ipoly_syntax :: "poly \<Rightarrow> 'a list \<Rightarrow>'a::{ring_char_0,power,division_ring_inverse_zero,field}" ("\<lparr>_\<rparr>\<^sub>p\<^bsup>_\<^esup>")
+  Ipoly_syntax :: "poly \<Rightarrow> 'a list \<Rightarrow>'a::{field_char_0, field_inverse_zero, power}" ("\<lparr>_\<rparr>\<^sub>p\<^bsup>_\<^esup>")
   where "\<lparr>p\<rparr>\<^sub>p\<^bsup>bs\<^esup> \<equiv> Ipoly bs p"
 
 lemma Ipoly_CInt: "Ipoly bs (C (i,1)) = of_int i" 
@@ -394,7 +394,7 @@
 qed simp_all
 
 lemma polymul_properties:
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" and nq: "isnpolyh q n1" and m: "m \<le> min n0 n1"
   shows "isnpolyh (p *\<^sub>p q) (min n0 n1)" 
   and "(p *\<^sub>p q = 0\<^sub>p) = (p = 0\<^sub>p \<or> q = 0\<^sub>p)" 
@@ -568,19 +568,19 @@
 by(induct p q rule: polymul.induct, auto simp add: field_simps)
 
 lemma polymul_normh: 
-    assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+    assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "\<lbrakk>isnpolyh p n0 ; isnpolyh q n1\<rbrakk> \<Longrightarrow> isnpolyh (p *\<^sub>p q) (min n0 n1)"
   using polymul_properties(1)  by blast
 lemma polymul_eq0_iff: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "\<lbrakk> isnpolyh p n0 ; isnpolyh q n1\<rbrakk> \<Longrightarrow> (p *\<^sub>p q = 0\<^sub>p) = (p = 0\<^sub>p \<or> q = 0\<^sub>p) "
   using polymul_properties(2)  by blast
 lemma polymul_degreen:  
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "\<lbrakk> isnpolyh p n0 ; isnpolyh q n1 ; m \<le> min n0 n1\<rbrakk> \<Longrightarrow> degreen (p *\<^sub>p q) m = (if (p = 0\<^sub>p \<or> q = 0\<^sub>p) then 0 else degreen p m + degreen q m)"
   using polymul_properties(3) by blast
 lemma polymul_norm:   
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "\<lbrakk> isnpoly p; isnpoly q\<rbrakk> \<Longrightarrow> isnpoly (polymul (p,q))"
   using polymul_normh[of "p" "0" "q" "0"] isnpoly_def by simp
 
@@ -591,7 +591,7 @@
   by (induct p arbitrary: n0, auto)
 
 lemma monic_eqI: assumes np: "isnpolyh p n0" 
-  shows "INum (headconst p) * Ipoly bs (fst (monic p)) = (Ipoly bs p ::'a::{ring_char_0,power,division_ring_inverse_zero,field})"
+  shows "INum (headconst p) * Ipoly bs (fst (monic p)) = (Ipoly bs p ::'a::{field_char_0, field_inverse_zero, power})"
   unfolding monic_def Let_def
 proof(cases "headconst p = 0\<^sub>N", simp_all add: headconst_zero[OF np])
   let ?h = "headconst p"
@@ -629,13 +629,13 @@
 
 lemma polysub_norm: "\<lbrakk> isnpoly p; isnpoly q\<rbrakk> \<Longrightarrow> isnpoly (polysub(p,q))"
   using polyadd_norm polyneg_norm by (simp add: polysub_def) 
-lemma polysub_same_0[simp]:   assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+lemma polysub_same_0[simp]:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "isnpolyh p n0 \<Longrightarrow> polysub (p, p) = 0\<^sub>p"
 unfolding polysub_def split_def fst_conv snd_conv
 by (induct p arbitrary: n0,auto simp add: Let_def Nsub0[simplified Nsub_def])
 
 lemma polysub_0: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "\<lbrakk> isnpolyh p n0 ; isnpolyh q n1\<rbrakk> \<Longrightarrow> (p -\<^sub>p q = 0\<^sub>p) = (p = q)"
   unfolding polysub_def split_def fst_conv snd_conv
   apply (induct p q arbitrary: n0 n1 rule:polyadd.induct, simp_all add: Nsub0[simplified Nsub_def])
@@ -657,7 +657,7 @@
   done
 
 text{* polypow is a power function and preserves normal forms *}
-lemma polypow[simp]: "Ipoly bs (polypow n p) = ((Ipoly bs p :: 'a::{field, division_ring_inverse_zero, ring_char_0})) ^ n"
+lemma polypow[simp]: "Ipoly bs (polypow n p) = ((Ipoly bs p :: 'a::{field_char_0, field_inverse_zero})) ^ n"
 proof(induct n rule: polypow.induct)
   case 1 thus ?case by simp
 next
@@ -688,7 +688,7 @@
 qed
 
 lemma polypow_normh: 
-    assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+    assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "isnpolyh p n \<Longrightarrow> isnpolyh (polypow k p) n"
 proof (induct k arbitrary: n rule: polypow.induct)
   case (2 k n)
@@ -701,17 +701,17 @@
 qed auto 
 
 lemma polypow_norm:   
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "isnpoly p \<Longrightarrow> isnpoly (polypow k p)"
   by (simp add: polypow_normh isnpoly_def)
 
 text{* Finally the whole normalization*}
 
-lemma polynate[simp]: "Ipoly bs (polynate p) = (Ipoly bs p :: 'a ::{field, division_ring_inverse_zero, ring_char_0})"
+lemma polynate[simp]: "Ipoly bs (polynate p) = (Ipoly bs p :: 'a ::{field_char_0, field_inverse_zero})"
 by (induct p rule:polynate.induct, auto)
 
 lemma polynate_norm[simp]: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "isnpoly (polynate p)"
   by (induct p rule: polynate.induct, simp_all add: polyadd_norm polymul_norm polysub_norm polyneg_norm polypow_norm) (simp_all add: isnpoly_def)
 
@@ -736,14 +736,14 @@
   shows "isnpolyh (funpow k f p) n"
   using f np by (induct k arbitrary: p, auto)
 
-lemma funpow_shift1: "(Ipoly bs (funpow n shift1 p) :: 'a :: {field, division_ring_inverse_zero, ring_char_0}) = Ipoly bs (Mul (Pw (Bound 0) n) p)"
+lemma funpow_shift1: "(Ipoly bs (funpow n shift1 p) :: 'a :: {field_char_0, field_inverse_zero}) = Ipoly bs (Mul (Pw (Bound 0) n) p)"
   by (induct n arbitrary: p, simp_all add: shift1_isnpoly shift1 power_Suc )
 
 lemma shift1_isnpolyh: "isnpolyh p n0 \<Longrightarrow> p\<noteq> 0\<^sub>p \<Longrightarrow> isnpolyh (shift1 p) 0"
   using isnpolyh_mono[where n="n0" and n'="0" and p="p"] by (simp add: shift1_def)
 
 lemma funpow_shift1_1: 
-  "(Ipoly bs (funpow n shift1 p) :: 'a :: {field, division_ring_inverse_zero, ring_char_0}) = Ipoly bs (funpow n shift1 1\<^sub>p *\<^sub>p p)"
+  "(Ipoly bs (funpow n shift1 p) :: 'a :: {field_char_0, field_inverse_zero}) = Ipoly bs (funpow n shift1 1\<^sub>p *\<^sub>p p)"
   by (simp add: funpow_shift1)
 
 lemma poly_cmul[simp]: "Ipoly bs (poly_cmul c p) = Ipoly bs (Mul (C c) p)"
@@ -751,7 +751,7 @@
 
 lemma behead:
   assumes np: "isnpolyh p n"
-  shows "Ipoly bs (Add (Mul (head p) (Pw (Bound 0) (degree p))) (behead p)) = (Ipoly bs p :: 'a :: {field, division_ring_inverse_zero, ring_char_0})"
+  shows "Ipoly bs (Add (Mul (head p) (Pw (Bound 0) (degree p))) (behead p)) = (Ipoly bs p :: 'a :: {field_char_0, field_inverse_zero})"
   using np
 proof (induct p arbitrary: n rule: behead.induct)
   case (1 c p n) hence pn: "isnpolyh p n" by simp
@@ -981,7 +981,7 @@
   by (simp add: head_eq_headn0)
 
 lemma isnpolyh_zero_iff: 
-  assumes nq: "isnpolyh p n0" and eq :"\<forall>bs. wf_bs bs p \<longrightarrow> \<lparr>p\<rparr>\<^sub>p\<^bsup>bs\<^esup> = (0::'a::{ring_char_0,power,division_ring_inverse_zero,field})"
+  assumes nq: "isnpolyh p n0" and eq :"\<forall>bs. wf_bs bs p \<longrightarrow> \<lparr>p\<rparr>\<^sub>p\<^bsup>bs\<^esup> = (0::'a::{field_char_0, field_inverse_zero, power})"
   shows "p = 0\<^sub>p"
 using nq eq
 proof (induct "maxindex p" arbitrary: p n0 rule: less_induct)
@@ -1033,7 +1033,7 @@
 
 lemma isnpolyh_unique:  
   assumes np:"isnpolyh p n0" and nq: "isnpolyh q n1"
-  shows "(\<forall>bs. \<lparr>p\<rparr>\<^sub>p\<^bsup>bs\<^esup> = (\<lparr>q\<rparr>\<^sub>p\<^bsup>bs\<^esup> :: 'a::{ring_char_0,power,division_ring_inverse_zero,field})) \<longleftrightarrow>  p = q"
+  shows "(\<forall>bs. \<lparr>p\<rparr>\<^sub>p\<^bsup>bs\<^esup> = (\<lparr>q\<rparr>\<^sub>p\<^bsup>bs\<^esup> :: 'a::{field_char_0, field_inverse_zero, power})) \<longleftrightarrow>  p = q"
 proof(auto)
   assume H: "\<forall>bs. (\<lparr>p\<rparr>\<^sub>p\<^bsup>bs\<^esup> ::'a)= \<lparr>q\<rparr>\<^sub>p\<^bsup>bs\<^esup>"
   hence "\<forall>bs.\<lparr>p -\<^sub>p q\<rparr>\<^sub>p\<^bsup>bs\<^esup>= (0::'a)" by simp
@@ -1046,50 +1046,50 @@
 
 text{* consequenses of unicity on the algorithms for polynomial normalization *}
 
-lemma polyadd_commute:   assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+lemma polyadd_commute:   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" and nq: "isnpolyh q n1" shows "p +\<^sub>p q = q +\<^sub>p p"
   using isnpolyh_unique[OF polyadd_normh[OF np nq] polyadd_normh[OF nq np]] by simp
 
 lemma zero_normh: "isnpolyh 0\<^sub>p n" by simp
 lemma one_normh: "isnpolyh 1\<^sub>p n" by simp
 lemma polyadd_0[simp]: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" shows "p +\<^sub>p 0\<^sub>p = p" and "0\<^sub>p +\<^sub>p p = p"
   using isnpolyh_unique[OF polyadd_normh[OF np zero_normh] np] 
     isnpolyh_unique[OF polyadd_normh[OF zero_normh np] np] by simp_all
 
 lemma polymul_1[simp]: 
-    assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+    assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" shows "p *\<^sub>p 1\<^sub>p = p" and "1\<^sub>p *\<^sub>p p = p"
   using isnpolyh_unique[OF polymul_normh[OF np one_normh] np] 
     isnpolyh_unique[OF polymul_normh[OF one_normh np] np] by simp_all
 lemma polymul_0[simp]: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" shows "p *\<^sub>p 0\<^sub>p = 0\<^sub>p" and "0\<^sub>p *\<^sub>p p = 0\<^sub>p"
   using isnpolyh_unique[OF polymul_normh[OF np zero_normh] zero_normh] 
     isnpolyh_unique[OF polymul_normh[OF zero_normh np] zero_normh] by simp_all
 
 lemma polymul_commute: 
-    assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+    assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np:"isnpolyh p n0" and nq: "isnpolyh q n1"
   shows "p *\<^sub>p q = q *\<^sub>p p"
-using isnpolyh_unique[OF polymul_normh[OF np nq] polymul_normh[OF nq np], where ?'a = "'a\<Colon>{ring_char_0,power,division_ring_inverse_zero,field}"] by simp
+using isnpolyh_unique[OF polymul_normh[OF np nq] polymul_normh[OF nq np], where ?'a = "'a\<Colon>{field_char_0, field_inverse_zero, power}"] by simp
 
 declare polyneg_polyneg[simp]
   
 lemma isnpolyh_polynate_id[simp]: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np:"isnpolyh p n0" shows "polynate p = p"
-  using isnpolyh_unique[where ?'a= "'a::{field, division_ring_inverse_zero, ring_char_0}", OF polynate_norm[of p, unfolded isnpoly_def] np] polynate[where ?'a = "'a::{field, division_ring_inverse_zero, ring_char_0}"] by simp
+  using isnpolyh_unique[where ?'a= "'a::{field_char_0, field_inverse_zero}", OF polynate_norm[of p, unfolded isnpoly_def] np] polynate[where ?'a = "'a::{field_char_0, field_inverse_zero}"] by simp
 
 lemma polynate_idempotent[simp]: 
-    assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+    assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "polynate (polynate p) = polynate p"
   using isnpolyh_polynate_id[OF polynate_norm[of p, unfolded isnpoly_def]] .
 
 lemma poly_nate_polypoly': "poly_nate bs p = polypoly' bs (polynate p)"
   unfolding poly_nate_def polypoly'_def ..
-lemma poly_nate_poly: shows "poly (poly_nate bs p) = (\<lambda>x:: 'a ::{field, division_ring_inverse_zero, ring_char_0}. \<lparr>p\<rparr>\<^sub>p\<^bsup>x # bs\<^esup>)"
+lemma poly_nate_poly: shows "poly (poly_nate bs p) = (\<lambda>x:: 'a ::{field_char_0, field_inverse_zero}. \<lparr>p\<rparr>\<^sub>p\<^bsup>x # bs\<^esup>)"
   using polypoly'_poly[OF polynate_norm[unfolded isnpoly_def], symmetric, of bs p]
   unfolding poly_nate_polypoly' by (auto intro: ext)
 
@@ -1116,7 +1116,7 @@
 qed
 
 lemma degree_polysub_samehead: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" and nq: "isnpolyh q n1" and h: "head p = head q" 
   and d: "degree p = degree q"
   shows "degree (p -\<^sub>p q) < degree p \<or> (p -\<^sub>p q = 0\<^sub>p)"
@@ -1226,7 +1226,7 @@
 done
 
 lemma polymul_head_polyeq: 
-   assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+   assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "\<lbrakk>isnpolyh p n0; isnpolyh q n1 ; p \<noteq> 0\<^sub>p ; q \<noteq> 0\<^sub>p \<rbrakk> \<Longrightarrow> head (p *\<^sub>p q) = head p *\<^sub>p head q"
 proof (induct p q arbitrary: n0 n1 rule: polymul.induct)
   case (2 a b c' n' p' n0 n1)
@@ -1300,7 +1300,7 @@
   by (induct p arbitrary: n0 rule: polyneg.induct, auto)
 
 lemma degree_polymul:
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" and nq: "isnpolyh q n1"
   shows "degree (p *\<^sub>p q) \<le> degree p + degree q"
   using polymul_degreen[OF np nq, where m="0"]  degree_eq_degreen0 by simp
@@ -1344,7 +1344,7 @@
 qed
 
 lemma polydivide_aux_properties:
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" and ns: "isnpolyh s n1"
   and ap: "head p = a" and ndp: "degree p = n" and pnz: "p \<noteq> 0\<^sub>p"
   shows "polydivide_aux_dom (a,n,p,k,s) \<and> 
@@ -1415,19 +1415,19 @@
             from polyadd_normh[OF polymul_normh[OF np 
               polyadd_normh[OF polymul_normh[OF nakk' nxdn] nq]] nr']
             have nqr': "isnpolyh (p *\<^sub>p (?akk' *\<^sub>p ?xdn +\<^sub>p q) +\<^sub>p r) 0" by simp 
-            from asp have "\<forall> (bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a^\<^sub>p (k' - k) *\<^sub>p (s -\<^sub>p ?p')) = 
+            from asp have "\<forall> (bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a^\<^sub>p (k' - k) *\<^sub>p (s -\<^sub>p ?p')) = 
               Ipoly bs (p *\<^sub>p q +\<^sub>p r)" by simp
-            hence " \<forall>(bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a^\<^sub>p (k' - k)*\<^sub>p s) = 
+            hence " \<forall>(bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a^\<^sub>p (k' - k)*\<^sub>p s) = 
               Ipoly bs (a^\<^sub>p (k' - k)) * Ipoly bs ?p' + Ipoly bs p * Ipoly bs q + Ipoly bs r" 
               by (simp add: field_simps)
-            hence " \<forall>(bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
+            hence " \<forall>(bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
               Ipoly bs (a^\<^sub>p (k' - k)) * Ipoly bs (funpow (degree s - n) shift1 1\<^sub>p *\<^sub>p p) 
               + Ipoly bs p * Ipoly bs q + Ipoly bs r"
               by (auto simp only: funpow_shift1_1) 
-            hence "\<forall>(bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
+            hence "\<forall>(bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
               Ipoly bs p * (Ipoly bs (a^\<^sub>p (k' - k)) * Ipoly bs (funpow (degree s - n) shift1 1\<^sub>p) 
               + Ipoly bs q) + Ipoly bs r" by (simp add: field_simps)
-            hence "\<forall>(bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
+            hence "\<forall>(bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
               Ipoly bs (p *\<^sub>p ((a^\<^sub>p (k' - k)) *\<^sub>p (funpow (degree s - n) shift1 1\<^sub>p) +\<^sub>p q) +\<^sub>p r)" by simp
             with isnpolyh_unique[OF nakks' nqr']
             have "a ^\<^sub>p (k' - k) *\<^sub>p s = 
@@ -1444,9 +1444,9 @@
             apply (simp) by (rule polydivide_aux_real_domintros, simp_all)
           have dom: ?dths apply (rule polydivide_aux_real_domintros) 
             using ba dn' domsp by simp_all
-          from spz isnpolyh_unique[OF polysub_normh[OF ns np'], where q="0\<^sub>p", symmetric, where ?'a = "'a::{field, division_ring_inverse_zero, ring_char_0}"]
-          have " \<forall>(bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs s = Ipoly bs ?p'" by simp
-          hence "\<forall>(bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs s = Ipoly bs (?xdn *\<^sub>p p)" using np nxdn apply simp
+          from spz isnpolyh_unique[OF polysub_normh[OF ns np'], where q="0\<^sub>p", symmetric, where ?'a = "'a::{field_char_0, field_inverse_zero}"]
+          have " \<forall>(bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs s = Ipoly bs ?p'" by simp
+          hence "\<forall>(bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs s = Ipoly bs (?xdn *\<^sub>p p)" using np nxdn apply simp
             by (simp only: funpow_shift1_1) simp
           hence sp': "s = ?xdn *\<^sub>p p" using isnpolyh_unique[OF ns polymul_normh[OF nxdn np]] by blast
           {assume h1: "polydivide_aux (a,n,p,k,s) = (k',r)"
@@ -1501,7 +1501,7 @@
               and dr: "degree r = 0 \<or> degree r < degree p"
               and qr: "a ^\<^sub>p (k' - Suc k) *\<^sub>p ((a *\<^sub>p s) -\<^sub>p (?b *\<^sub>p ?p')) = p *\<^sub>p q +\<^sub>p r" by auto
             from kk' have kk'':"Suc (k' - Suc k) = k' - k" by arith
-            {fix bs:: "'a::{field, division_ring_inverse_zero, ring_char_0} list"
+            {fix bs:: "'a::{field_char_0, field_inverse_zero} list"
               
             from qr isnpolyh_unique[OF polypow_normh[OF head_isnpolyh[OF np], where k="k' - Suc k", simplified ap] nasbp', symmetric]
             have "Ipoly bs (a ^\<^sub>p (k' - Suc k) *\<^sub>p ((a *\<^sub>p s) -\<^sub>p (?b *\<^sub>p ?p'))) = Ipoly bs (p *\<^sub>p q +\<^sub>p r)" by simp
@@ -1511,7 +1511,7 @@
               by (simp add:kk'' funpow_shift1_1[where n="degree s - n" and p="p"])
             hence "Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = Ipoly bs p * (Ipoly bs q + Ipoly bs a ^ (k' - Suc k) * Ipoly bs ?b * Ipoly bs ?xdn) + Ipoly bs r"
               by (simp add: field_simps)}
-            hence ieq:"\<forall>(bs :: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
+            hence ieq:"\<forall>(bs :: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a ^\<^sub>p (k' - k) *\<^sub>p s) = 
               Ipoly bs (p *\<^sub>p (q +\<^sub>p (a ^\<^sub>p (k' - Suc k) *\<^sub>p ?b *\<^sub>p ?xdn)) +\<^sub>p r)" by auto 
             let ?q = "q +\<^sub>p (a ^\<^sub>p (k' - Suc k) *\<^sub>p ?b *\<^sub>p ?xdn)"
             from polyadd_normh[OF nq polymul_normh[OF polymul_normh[OF polypow_normh[OF head_isnpolyh[OF np], where k="k' - Suc k"] head_isnpolyh[OF ns], simplified ap ] nxdn]]
@@ -1532,17 +1532,17 @@
             apply (simp) by (rule polydivide_aux_real_domintros, simp_all)
           have dom: ?dths using sz ba dn' domsp 
             by - (rule polydivide_aux_real_domintros, simp_all)
-          {fix bs :: "'a::{field, division_ring_inverse_zero, ring_char_0} list"
+          {fix bs :: "'a::{field_char_0, field_inverse_zero} list"
             from isnpolyh_unique[OF nth, where ?'a="'a" and q="0\<^sub>p",simplified,symmetric] spz
           have "Ipoly bs (a*\<^sub>p s) = Ipoly bs ?b * Ipoly bs ?p'" by simp
           hence "Ipoly bs (a*\<^sub>p s) = Ipoly bs (?b *\<^sub>p ?xdn) * Ipoly bs p" 
             by (simp add: funpow_shift1_1[where n="degree s - n" and p="p"])
           hence "Ipoly bs (a*\<^sub>p s) = Ipoly bs (p *\<^sub>p (?b *\<^sub>p ?xdn))" by simp
         }
-        hence hth: "\<forall> (bs:: 'a::{field, division_ring_inverse_zero, ring_char_0} list). Ipoly bs (a*\<^sub>p s) = Ipoly bs (p *\<^sub>p (?b *\<^sub>p ?xdn))" ..
+        hence hth: "\<forall> (bs:: 'a::{field_char_0, field_inverse_zero} list). Ipoly bs (a*\<^sub>p s) = Ipoly bs (p *\<^sub>p (?b *\<^sub>p ?xdn))" ..
           from hth
           have asq: "a *\<^sub>p s = p *\<^sub>p (?b *\<^sub>p ?xdn)" 
-            using isnpolyh_unique[where ?'a = "'a::{field, division_ring_inverse_zero, ring_char_0}", OF polymul_normh[OF head_isnpolyh[OF np] ns] 
+            using isnpolyh_unique[where ?'a = "'a::{field_char_0, field_inverse_zero}", OF polymul_normh[OF head_isnpolyh[OF np] ns] 
                     polymul_normh[OF np polymul_normh[OF head_isnpolyh[OF ns] nxdn]],
               simplified ap] by simp
           {assume h1: "polydivide_aux (a,n,p,k,s) = (k', r)"
@@ -1566,7 +1566,7 @@
 qed
 
 lemma polydivide_properties: 
-  assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   and np: "isnpolyh p n0" and ns: "isnpolyh s n1" and pnz: "p \<noteq> 0\<^sub>p"
   shows "(\<exists> k r. polydivide s p = (k,r) \<and> (\<exists>nr. isnpolyh r nr) \<and> (degree r = 0 \<or> degree r < degree p) 
   \<and> (\<exists>q n1. isnpolyh q n1 \<and> ((polypow k (head p)) *\<^sub>p s = p *\<^sub>p q +\<^sub>p r)))"
@@ -1698,11 +1698,11 @@
 definition "swapnorm n m t = polynate (swap n m t)"
 
 lemma swapnorm: assumes nbs: "n < length bs" and mbs: "m < length bs"
-  shows "((Ipoly bs (swapnorm n m t) :: 'a\<Colon>{field, division_ring_inverse_zero, ring_char_0})) = Ipoly ((bs[n:= bs!m])[m:= bs!n]) t"
+  shows "((Ipoly bs (swapnorm n m t) :: 'a\<Colon>{field_char_0, field_inverse_zero})) = Ipoly ((bs[n:= bs!m])[m:= bs!n]) t"
   using swap[OF prems] swapnorm_def by simp
 
 lemma swapnorm_isnpoly[simp]: 
-    assumes "SORT_CONSTRAINT('a::{field, division_ring_inverse_zero, ring_char_0})"
+    assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "isnpoly (swapnorm n m p)"
   unfolding swapnorm_def by simp
 
--- a/src/HOL/Decision_Procs/ex/Dense_Linear_Order_Ex.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Decision_Procs/ex/Dense_Linear_Order_Ex.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -7,147 +7,147 @@
 begin
 
 lemma
-  "\<exists>(y::'a::{linordered_field, division_ring_inverse_zero, number_ring}) <2. x + 3* y < 0 \<and> x - y >0"
+  "\<exists>(y::'a::{linordered_field_inverse_zero, number_ring}) <2. x + 3* y < 0 \<and> x - y >0"
   by ferrack
 
-lemma "~ (ALL x (y::'a::{linordered_field, division_ring_inverse_zero, number_ring}). x < y --> 10*x < 11*y)"
+lemma "~ (ALL x (y::'a::{linordered_field_inverse_zero, number_ring}). x < y --> 10*x < 11*y)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. x < y --> (10*(x + 5*y + -1) < 60*y)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. x < y --> (10*(x + 5*y + -1) < 60*y)"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. x ~= y --> x < y"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y. x ~= y --> x < y"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (x ~= y & 10*x ~= 9*y & 10*x < y) --> x < y"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y. (x ~= y & 10*x ~= 9*y & 10*x < y) --> x < y"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (x ~= y & 5*x <= y) --> 500*x <= 100*y"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (x ~= y & 5*x <= y) --> 500*x <= 100*y"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX (y::'a::{linordered_field, division_ring_inverse_zero, number_ring}). 4*x + 3*y <= 0 & 4*x + 3*y >= -1)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX (y::'a::{linordered_field_inverse_zero, number_ring}). 4*x + 3*y <= 0 & 4*x + 3*y >= -1)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) < 0. (EX (y::'a::{linordered_field, division_ring_inverse_zero, number_ring}) > 0. 7*x + y > 0 & x - y <= 9)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) < 0. (EX (y::'a::{linordered_field_inverse_zero, number_ring}) > 0. 7*x + y > 0 & x - y <= 9)"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (0 < x & x < 1) --> (ALL y > 1. x + y ~= 1)"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}). (0 < x & x < 1) --> (ALL y > 1. x + y ~= 1)"
   by ferrack
 
-lemma "EX x. (ALL (y::'a::{linordered_field, division_ring_inverse_zero, number_ring}). y < 2 -->  2*(y - x) \<le> 0 )"
+lemma "EX x. (ALL (y::'a::{linordered_field_inverse_zero, number_ring}). y < 2 -->  2*(y - x) \<le> 0 )"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). x < 10 | x > 20 | (EX y. y>= 0 & y <= 10 & x+y = 20)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). x < 10 | x > 20 | (EX y. y>= 0 & y <= 10 & x+y = 20)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. x + y < z --> y >= z --> x < 0"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y z. x + y < z --> y >= z --> x < 0"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. x + 7*y < 5* z & 5*y >= 7*z & x < 0"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z. x + 7*y < 5* z & 5*y >= 7*z & x < 0"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. abs (x + y) <= z --> (abs z = z)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y z. abs (x + y) <= z --> (abs z = z)"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. x + 7*y - 5* z < 0 & 5*y + 7*z + 3*x < 0"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z. x + 7*y - 5* z < 0 & 5*y + 7*z + 3*x < 0"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. (abs (5*x+3*y+z) <= 5*x+3*y+z & abs (5*x+3*y+z) >= - (5*x+3*y+z)) | (abs (5*x+3*y+z) >= 5*x+3*y+z & abs (5*x+3*y+z) <= - (5*x+3*y+z))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y z. (abs (5*x+3*y+z) <= 5*x+3*y+z & abs (5*x+3*y+z) >= - (5*x+3*y+z)) | (abs (5*x+3*y+z) >= 5*x+3*y+z & abs (5*x+3*y+z) <= - (5*x+3*y+z))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. x < y --> (EX z>0. x+z = y)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. x < y --> (EX z>0. x+z = y)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. x < y --> (EX z>0. x+z = y)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. x < y --> (EX z>0. x+z = y)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (EX z>0. abs (x - y) <= z )"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (EX z>0. abs (x - y) <= z )"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (ALL z<0. (z < x --> z <= y) & (z > y --> z >= x))"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y. (ALL z<0. (z < x --> z <= y) & (z > y --> z >= x))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (ALL z>=0. abs (3*x+7*y) <= 2*z + 1)"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y. (ALL z>=0. abs (3*x+7*y) <= 2*z + 1)"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (ALL z<0. (z < x --> z <= y) & (z > y --> z >= x))"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y. (ALL z<0. (z < x --> z <= y) & (z > y --> z >= x))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring})>0. (ALL y. (EX z. 13* abs z \<noteq> abs (12*y - x) & 5*x - 3*(abs y) <= 7*z))"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring})>0. (ALL y. (EX z. 13* abs z \<noteq> abs (12*y - x) & 5*x - 3*(abs y) <= 7*z))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). abs (4*x + 17) < 4 & (ALL y . abs (x*34 - 34*y - 9) \<noteq> 0 \<longrightarrow> (EX z. 5*x - 3*abs y <= 7*z))"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}). abs (4*x + 17) < 4 & (ALL y . abs (x*34 - 34*y - 9) \<noteq> 0 \<longrightarrow> (EX z. 5*x - 3*abs y <= 7*z))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y > abs (23*x - 9). (ALL z > abs (3*y - 19* abs x). x+z > 2*y))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y > abs (23*x - 9). (ALL z > abs (3*y - 19* abs x). x+z > 2*y))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y< abs (3*x - 1). (ALL z >= (3*abs x - 1). abs (12*x - 13*y + 19*z) > abs (23*x) ))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y< abs (3*x - 1). (ALL z >= (3*abs x - 1). abs (12*x - 13*y + 19*z) > abs (23*x) ))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). abs x < 100 & (ALL y > x. (EX z<2*y - x. 5*x - 3*y <= 7*z))"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}). abs x < 100 & (ALL y > x. (EX z<2*y - x. 5*x - 3*y <= 7*z))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z w. 7*x<3*y --> 5*y < 7*z --> z < 2*w --> 7*(2*w-x) > 2*y"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y z w. 7*x<3*y --> 5*y < 7*z --> z < 2*w --> 7*(2*w-x) > 2*y"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z w. 5*x + 3*z - 17*w + abs (y - 8*x + z) <= 89"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z w. 5*x + 3*z - 17*w + abs (y - 8*x + z) <= 89"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z w. 5*x + 3*z - 17*w + 7* (y - 8*x + z) <= max y (7*z - x + w)"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z w. 5*x + 3*z - 17*w + 7* (y - 8*x + z) <= max y (7*z - x + w)"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w)"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. (EX w >= (x+y+z). w <= abs x + abs y + abs z)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y z. (EX w >= (x+y+z). w <= abs x + abs y + abs z)"
   by ferrack
 
-lemma "~(ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y z w. 3* x + z*4 = 3*y & x + y < z & x> w & 3*x < w + y))"
+lemma "~(ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y z w. 3* x + z*4 = 3*y & x + y < z & x> w & 3*x < w + y))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (EX z w. abs (x-y) = (z-w) & z*1234 < 233*x & w ~= y)"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (EX z w. abs (x-y) = (z-w) & z*1234 < 233*x & w ~= y)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z. (ALL w >= abs (x+y+z). w >= abs x + abs y + abs z)"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z. (ALL w >= abs (x+y+z). w >= abs x + abs y + abs z)"
   by ferrack
 
-lemma "EX z. (ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (EX w >= (x+y+z). w <= abs x + abs y + abs z))"
+lemma "EX z. (ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (EX w >= (x+y+z). w <= abs x + abs y + abs z))"
   by ferrack
 
-lemma "EX z. (ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) < abs z. (EX y w. x< y & x < z & x> w & 3*x < w + y))"
+lemma "EX z. (ALL (x::'a::{linordered_field_inverse_zero, number_ring}) < abs z. (EX y w. x< y & x < z & x> w & 3*x < w + y))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y. (EX z. (ALL w. abs (x-y) = abs (z-w) --> z < x & w ~= y))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}) y. (EX z. (ALL w. abs (x-y) = abs (z-w) --> z < x & w ~= y))"
   by ferrack
 
-lemma "EX y. (ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w)))"
+lemma "EX y. (ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w)))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) z. (ALL w >= 13*x - 4*z. (EX y. w >= abs x + abs y + z))"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) z. (ALL w >= 13*x - 4*z. (EX y. w >= abs x + abs y + z))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (ALL y < x. (EX z > (x+y).
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}). (ALL y < x. (EX z > (x+y).
   (ALL w. 5*w + 10*x - z >= y --> w + 7*x + 3*z >= 2*y)))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (ALL y. (EX z > y.
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}). (ALL y. (EX z > y.
   (ALL w . w < 13 --> w + 10*x - z >= y --> 5*w + 7*x + 13*z >= 2*y)))"
   by ferrack
 
-lemma "EX (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}) y z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w)"
+lemma "EX (x::'a::{linordered_field_inverse_zero, number_ring}) y z w. min (5*x + 3*z) (17*w) + 5* abs (y - 8*x + z) <= max y (7*z - x + w)"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y. (ALL z>19. y <= x + z & (EX w. abs (y - x) < w)))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y. (ALL z>19. y <= x + z & (EX w. abs (y - x) < w)))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y. (ALL z>19. y <= x + z & (EX w. abs (x + z) < w - y)))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y. (ALL z>19. y <= x + z & (EX w. abs (x + z) < w - y)))"
   by ferrack
 
-lemma "ALL (x::'a::{linordered_field, division_ring_inverse_zero, number_ring}). (EX y. abs y ~= abs x & (ALL z> max x y. (EX w. w ~= y & w ~= z & 3*w - z >= x + y)))"
+lemma "ALL (x::'a::{linordered_field_inverse_zero, number_ring}). (EX y. abs y ~= abs x & (ALL z> max x y. (EX w. w ~= y & w ~= z & 3*w - z >= x + y)))"
   by ferrack
 
 end
--- a/src/HOL/Fields.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Fields.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -129,22 +129,20 @@
 subclass division_ring_inverse_zero proof
 qed (fact field_inverse_zero)
 
-end
-
 text{*This version builds in division by zero while also re-orienting
       the right-hand side.*}
 lemma inverse_mult_distrib [simp]:
-     "inverse(a*b) = inverse(a) * inverse(b::'a::{field,division_ring_inverse_zero})"
-  proof cases
-    assume "a \<noteq> 0 & b \<noteq> 0" 
-    thus ?thesis by (simp add: nonzero_inverse_mult_distrib mult_ac)
-  next
-    assume "~ (a \<noteq> 0 & b \<noteq> 0)" 
-    thus ?thesis by force
-  qed
+  "inverse (a * b) = inverse a * inverse b"
+proof cases
+  assume "a \<noteq> 0 & b \<noteq> 0" 
+  thus ?thesis by (simp add: nonzero_inverse_mult_distrib mult_ac)
+next
+  assume "~ (a \<noteq> 0 & b \<noteq> 0)" 
+  thus ?thesis by force
+qed
 
 lemma inverse_divide [simp]:
-  "inverse (a/b) = b / (a::'a::{field,division_ring_inverse_zero})"
+  "inverse (a / b) = b / a"
   by (simp add: divide_inverse mult_commute)
 
 
@@ -155,86 +153,88 @@
 because the latter are covered by a simproc. *}
 
 lemma mult_divide_mult_cancel_left:
-  "c\<noteq>0 ==> (c*a) / (c*b) = a / (b::'a::{field,division_ring_inverse_zero})"
+  "c \<noteq> 0 \<Longrightarrow> (c * a) / (c * b) = a / b"
 apply (cases "b = 0")
 apply simp_all
 done
 
 lemma mult_divide_mult_cancel_right:
-  "c\<noteq>0 ==> (a*c) / (b*c) = a / (b::'a::{field,division_ring_inverse_zero})"
+  "c \<noteq> 0 \<Longrightarrow> (a * c) / (b * c) = a / b"
 apply (cases "b = 0")
 apply simp_all
 done
 
-lemma divide_divide_eq_right [simp,no_atp]:
-  "a / (b/c) = (a*c) / (b::'a::{field,division_ring_inverse_zero})"
-by (simp add: divide_inverse mult_ac)
+lemma divide_divide_eq_right [simp, no_atp]:
+  "a / (b / c) = (a * c) / b"
+  by (simp add: divide_inverse mult_ac)
 
-lemma divide_divide_eq_left [simp,no_atp]:
-  "(a / b) / (c::'a::{field,division_ring_inverse_zero}) = a / (b*c)"
-by (simp add: divide_inverse mult_assoc)
+lemma divide_divide_eq_left [simp, no_atp]:
+  "(a / b) / c = a / (b * c)"
+  by (simp add: divide_inverse mult_assoc)
 
 
 text {*Special Cancellation Simprules for Division*}
 
-lemma mult_divide_mult_cancel_left_if[simp,no_atp]:
-fixes c :: "'a :: {field,division_ring_inverse_zero}"
-shows "(c*a) / (c*b) = (if c=0 then 0 else a/b)"
-by (simp add: mult_divide_mult_cancel_left)
+lemma mult_divide_mult_cancel_left_if [simp,no_atp]:
+  shows "(c * a) / (c * b) = (if c = 0 then 0 else a / b)"
+  by (simp add: mult_divide_mult_cancel_left)
 
 
 text {* Division and Unary Minus *}
 
-lemma minus_divide_right: "- (a/b) = a / -(b::'a::{field,division_ring_inverse_zero})"
-by (simp add: divide_inverse)
+lemma minus_divide_right:
+  "- (a / b) = a / - b"
+  by (simp add: divide_inverse)
 
 lemma divide_minus_right [simp, no_atp]:
-  "a / -(b::'a::{field,division_ring_inverse_zero}) = -(a / b)"
-by (simp add: divide_inverse)
+  "a / - b = - (a / b)"
+  by (simp add: divide_inverse)
 
 lemma minus_divide_divide:
-  "(-a)/(-b) = a / (b::'a::{field,division_ring_inverse_zero})"
+  "(- a) / (- b) = a / b"
 apply (cases "b=0", simp) 
 apply (simp add: nonzero_minus_divide_divide) 
 done
 
 lemma eq_divide_eq:
-  "((a::'a::{field,division_ring_inverse_zero}) = b/c) = (if c\<noteq>0 then a*c = b else a=0)"
-by (simp add: nonzero_eq_divide_eq)
+  "a = b / c \<longleftrightarrow> (if c \<noteq> 0 then a * c = b else a = 0)"
+  by (simp add: nonzero_eq_divide_eq)
 
 lemma divide_eq_eq:
-  "(b/c = (a::'a::{field,division_ring_inverse_zero})) = (if c\<noteq>0 then b = a*c else a=0)"
-by (force simp add: nonzero_divide_eq_eq)
+  "b / c = a \<longleftrightarrow> (if c \<noteq> 0 then b = a * c else a = 0)"
+  by (force simp add: nonzero_divide_eq_eq)
 
 lemma inverse_eq_1_iff [simp]:
-  "(inverse x = 1) = (x = (1::'a::{field,division_ring_inverse_zero}))"
-by (insert inverse_eq_iff_eq [of x 1], simp) 
+  "inverse x = 1 \<longleftrightarrow> x = 1"
+  by (insert inverse_eq_iff_eq [of x 1], simp) 
 
-lemma divide_eq_0_iff [simp,no_atp]:
-     "(a/b = 0) = (a=0 | b=(0::'a::{field,division_ring_inverse_zero}))"
-by (simp add: divide_inverse)
+lemma divide_eq_0_iff [simp, no_atp]:
+  "a / b = 0 \<longleftrightarrow> a = 0 \<or> b = 0"
+  by (simp add: divide_inverse)
 
-lemma divide_cancel_right [simp,no_atp]:
-     "(a/c = b/c) = (c = 0 | a = (b::'a::{field,division_ring_inverse_zero}))"
-apply (cases "c=0", simp)
-apply (simp add: divide_inverse)
-done
+lemma divide_cancel_right [simp, no_atp]:
+  "a / c = b / c \<longleftrightarrow> c = 0 \<or> a = b"
+  apply (cases "c=0", simp)
+  apply (simp add: divide_inverse)
+  done
 
-lemma divide_cancel_left [simp,no_atp]:
-     "(c/a = c/b) = (c = 0 | a = (b::'a::{field,division_ring_inverse_zero}))" 
-apply (cases "c=0", simp)
-apply (simp add: divide_inverse)
-done
+lemma divide_cancel_left [simp, no_atp]:
+  "c / a = c / b \<longleftrightarrow> c = 0 \<or> a = b" 
+  apply (cases "c=0", simp)
+  apply (simp add: divide_inverse)
+  done
 
-lemma divide_eq_1_iff [simp,no_atp]:
-     "(a/b = 1) = (b \<noteq> 0 & a = (b::'a::{field,division_ring_inverse_zero}))"
-apply (cases "b=0", simp)
-apply (simp add: right_inverse_eq)
-done
+lemma divide_eq_1_iff [simp, no_atp]:
+  "a / b = 1 \<longleftrightarrow> b \<noteq> 0 \<and> a = b"
+  apply (cases "b=0", simp)
+  apply (simp add: right_inverse_eq)
+  done
 
-lemma one_eq_divide_iff [simp,no_atp]:
-     "(1 = a/b) = (b \<noteq> 0 & a = (b::'a::{field,division_ring_inverse_zero}))"
-by (simp add: eq_commute [of 1])
+lemma one_eq_divide_iff [simp, no_atp]:
+  "1 = a / b \<longleftrightarrow> b \<noteq> 0 \<and> a = b"
+  by (simp add: eq_commute [of 1])
+
+end
 
 
 text {* Ordered Fields *}
@@ -643,46 +643,40 @@
 
 end
 
-class linordered_field_inverse_zero = linordered_field +
-  assumes linordered_field_inverse_zero: "inverse 0 = 0"
+class linordered_field_inverse_zero = linordered_field + field_inverse_zero
 begin
 
-subclass field_inverse_zero proof
-qed (fact linordered_field_inverse_zero)
-
-end
-
 lemma le_divide_eq:
   "(a \<le> b/c) = 
    (if 0 < c then a*c \<le> b
              else if c < 0 then b \<le> a*c
-             else  a \<le> (0::'a::{linordered_field,division_ring_inverse_zero}))"
+             else  a \<le> 0)"
 apply (cases "c=0", simp) 
 apply (force simp add: pos_le_divide_eq neg_le_divide_eq linorder_neq_iff) 
 done
 
 lemma inverse_positive_iff_positive [simp]:
-  "(0 < inverse a) = (0 < (a::'a::{linordered_field,division_ring_inverse_zero}))"
+  "(0 < inverse a) = (0 < a)"
 apply (cases "a = 0", simp)
 apply (blast intro: inverse_positive_imp_positive positive_imp_inverse_positive)
 done
 
 lemma inverse_negative_iff_negative [simp]:
-  "(inverse a < 0) = (a < (0::'a::{linordered_field,division_ring_inverse_zero}))"
+  "(inverse a < 0) = (a < 0)"
 apply (cases "a = 0", simp)
 apply (blast intro: inverse_negative_imp_negative negative_imp_inverse_negative)
 done
 
 lemma inverse_nonnegative_iff_nonnegative [simp]:
-  "(0 \<le> inverse a) = (0 \<le> (a::'a::{linordered_field,division_ring_inverse_zero}))"
-by (simp add: linorder_not_less [symmetric])
+  "0 \<le> inverse a \<longleftrightarrow> 0 \<le> a"
+  by (simp add: not_less [symmetric])
 
 lemma inverse_nonpositive_iff_nonpositive [simp]:
-  "(inverse a \<le> 0) = (a \<le> (0::'a::{linordered_field,division_ring_inverse_zero}))"
-by (simp add: linorder_not_less [symmetric])
+  "inverse a \<le> 0 \<longleftrightarrow> a \<le> 0"
+  by (simp add: not_less [symmetric])
 
 lemma one_less_inverse_iff:
-  "(1 < inverse x) = (0 < x & x < (1::'a::{linordered_field,division_ring_inverse_zero}))"
+  "1 < inverse x \<longleftrightarrow> 0 < x \<and> x < 1"
 proof cases
   assume "0 < x"
     with inverse_less_iff_less [OF zero_less_one, of x]
@@ -692,7 +686,7 @@
   have "~ (1 < inverse x)"
   proof
     assume "1 < inverse x"
-    also with notless have "... \<le> 0" by (simp add: linorder_not_less)
+    also with notless have "... \<le> 0" by simp
     also have "... < 1" by (rule zero_less_one) 
     finally show False by auto
   qed
@@ -700,62 +694,69 @@
 qed
 
 lemma one_le_inverse_iff:
-  "(1 \<le> inverse x) = (0 < x & x \<le> (1::'a::{linordered_field,division_ring_inverse_zero}))"
-by (force simp add: order_le_less one_less_inverse_iff)
+  "1 \<le> inverse x \<longleftrightarrow> 0 < x \<and> x \<le> 1"
+proof (cases "x = 1")
+  case True then show ?thesis by simp
+next
+  case False then have "inverse x \<noteq> 1" by simp
+  then have "1 \<noteq> inverse x" by blast
+  then have "1 \<le> inverse x \<longleftrightarrow> 1 < inverse x" by (simp add: le_less)
+  with False show ?thesis by (auto simp add: one_less_inverse_iff)
+qed
 
 lemma inverse_less_1_iff:
-  "(inverse x < 1) = (x \<le> 0 | 1 < (x::'a::{linordered_field,division_ring_inverse_zero}))"
-by (simp add: linorder_not_le [symmetric] one_le_inverse_iff) 
+  "inverse x < 1 \<longleftrightarrow> x \<le> 0 \<or> 1 < x"
+  by (simp add: not_le [symmetric] one_le_inverse_iff) 
 
 lemma inverse_le_1_iff:
-  "(inverse x \<le> 1) = (x \<le> 0 | 1 \<le> (x::'a::{linordered_field,division_ring_inverse_zero}))"
-by (simp add: linorder_not_less [symmetric] one_less_inverse_iff) 
+  "inverse x \<le> 1 \<longleftrightarrow> x \<le> 0 \<or> 1 \<le> x"
+  by (simp add: not_less [symmetric] one_less_inverse_iff) 
 
 lemma divide_le_eq:
   "(b/c \<le> a) = 
    (if 0 < c then b \<le> a*c
              else if c < 0 then a*c \<le> b
-             else 0 \<le> (a::'a::{linordered_field,division_ring_inverse_zero}))"
+             else 0 \<le> a)"
 apply (cases "c=0", simp) 
-apply (force simp add: pos_divide_le_eq neg_divide_le_eq linorder_neq_iff) 
+apply (force simp add: pos_divide_le_eq neg_divide_le_eq) 
 done
 
 lemma less_divide_eq:
   "(a < b/c) = 
    (if 0 < c then a*c < b
              else if c < 0 then b < a*c
-             else  a < (0::'a::{linordered_field,division_ring_inverse_zero}))"
+             else  a < 0)"
 apply (cases "c=0", simp) 
-apply (force simp add: pos_less_divide_eq neg_less_divide_eq linorder_neq_iff) 
+apply (force simp add: pos_less_divide_eq neg_less_divide_eq) 
 done
 
 lemma divide_less_eq:
   "(b/c < a) = 
    (if 0 < c then b < a*c
              else if c < 0 then a*c < b
-             else 0 < (a::'a::{linordered_field,division_ring_inverse_zero}))"
+             else 0 < a)"
 apply (cases "c=0", simp) 
-apply (force simp add: pos_divide_less_eq neg_divide_less_eq linorder_neq_iff) 
+apply (force simp add: pos_divide_less_eq neg_divide_less_eq)
 done
 
 text {*Division and Signs*}
 
 lemma zero_less_divide_iff:
-     "((0::'a::{linordered_field,division_ring_inverse_zero}) < a/b) = (0 < a & 0 < b | a < 0 & b < 0)"
+     "(0 < a/b) = (0 < a & 0 < b | a < 0 & b < 0)"
 by (simp add: divide_inverse zero_less_mult_iff)
 
 lemma divide_less_0_iff:
-     "(a/b < (0::'a::{linordered_field,division_ring_inverse_zero})) = 
+     "(a/b < 0) = 
       (0 < a & b < 0 | a < 0 & 0 < b)"
 by (simp add: divide_inverse mult_less_0_iff)
 
 lemma zero_le_divide_iff:
-     "((0::'a::{linordered_field,division_ring_inverse_zero}) \<le> a/b) =
+     "(0 \<le> a/b) =
       (0 \<le> a & 0 \<le> b | a \<le> 0 & b \<le> 0)"
 by (simp add: divide_inverse zero_le_mult_iff)
 
 lemma divide_le_0_iff:
-     "(a/b \<le> (0::'a::{linordered_field,division_ring_inverse_zero})) =
+     "(a/b \<le> 0) =
       (0 \<le> a & b \<le> 0 | a \<le> 0 & 0 \<le> b)"
 by (simp add: divide_inverse mult_le_0_iff)
 
@@ -764,143 +765,133 @@
 text{*Simplify expressions equated with 1*}
 
 lemma zero_eq_1_divide_iff [simp,no_atp]:
-     "((0::'a::{linordered_field,division_ring_inverse_zero}) = 1/a) = (a = 0)"
+     "(0 = 1/a) = (a = 0)"
 apply (cases "a=0", simp)
 apply (auto simp add: nonzero_eq_divide_eq)
 done
 
 lemma one_divide_eq_0_iff [simp,no_atp]:
-     "(1/a = (0::'a::{linordered_field,division_ring_inverse_zero})) = (a = 0)"
+     "(1/a = 0) = (a = 0)"
 apply (cases "a=0", simp)
 apply (insert zero_neq_one [THEN not_sym])
 apply (auto simp add: nonzero_divide_eq_eq)
 done
 
 text{*Simplify expressions such as @{text "0 < 1/x"} to @{text "0 < x"}*}
-lemmas zero_less_divide_1_iff = zero_less_divide_iff [of 1, simplified]
-lemmas divide_less_0_1_iff = divide_less_0_iff [of 1, simplified]
-lemmas zero_le_divide_1_iff = zero_le_divide_iff [of 1, simplified]
-lemmas divide_le_0_1_iff = divide_le_0_iff [of 1, simplified]
+
+lemma zero_le_divide_1_iff [simp, no_atp]:
+  "0 \<le> 1 / a \<longleftrightarrow> 0 \<le> a"
+  by (simp add: zero_le_divide_iff)
 
-declare zero_less_divide_1_iff [simp,no_atp]
-declare divide_less_0_1_iff [simp,no_atp]
-declare zero_le_divide_1_iff [simp,no_atp]
-declare divide_le_0_1_iff [simp,no_atp]
+lemma zero_less_divide_1_iff [simp, no_atp]:
+  "0 < 1 / a \<longleftrightarrow> 0 < a"
+  by (simp add: zero_less_divide_iff)
+
+lemma divide_le_0_1_iff [simp, no_atp]:
+  "1 / a \<le> 0 \<longleftrightarrow> a \<le> 0"
+  by (simp add: divide_le_0_iff)
+
+lemma divide_less_0_1_iff [simp, no_atp]:
+  "1 / a < 0 \<longleftrightarrow> a < 0"
+  by (simp add: divide_less_0_iff)
 
 lemma divide_right_mono:
-     "[|a \<le> b; 0 \<le> c|] ==> a/c \<le> b/(c::'a::{linordered_field,division_ring_inverse_zero})"
-by (force simp add: divide_strict_right_mono order_le_less)
+     "[|a \<le> b; 0 \<le> c|] ==> a/c \<le> b/c"
+by (force simp add: divide_strict_right_mono le_less)
 
-lemma divide_right_mono_neg: "(a::'a::{linordered_field,division_ring_inverse_zero}) <= b 
+lemma divide_right_mono_neg: "a <= b 
     ==> c <= 0 ==> b / c <= a / c"
 apply (drule divide_right_mono [of _ _ "- c"])
 apply auto
 done
 
-lemma divide_left_mono_neg: "(a::'a::{linordered_field,division_ring_inverse_zero}) <= b 
+lemma divide_left_mono_neg: "a <= b 
     ==> c <= 0 ==> 0 < a * b ==> c / a <= c / b"
   apply (drule divide_left_mono [of _ _ "- c"])
   apply (auto simp add: mult_commute)
 done
 
-
-
 text{*Simplify quotients that are compared with the value 1.*}
 
 lemma le_divide_eq_1 [no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "(1 \<le> b / a) = ((0 < a & a \<le> b) | (a < 0 & b \<le> a))"
+  "(1 \<le> b / a) = ((0 < a & a \<le> b) | (a < 0 & b \<le> a))"
 by (auto simp add: le_divide_eq)
 
 lemma divide_le_eq_1 [no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "(b / a \<le> 1) = ((0 < a & b \<le> a) | (a < 0 & a \<le> b) | a=0)"
+  "(b / a \<le> 1) = ((0 < a & b \<le> a) | (a < 0 & a \<le> b) | a=0)"
 by (auto simp add: divide_le_eq)
 
 lemma less_divide_eq_1 [no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "(1 < b / a) = ((0 < a & a < b) | (a < 0 & b < a))"
+  "(1 < b / a) = ((0 < a & a < b) | (a < 0 & b < a))"
 by (auto simp add: less_divide_eq)
 
 lemma divide_less_eq_1 [no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "(b / a < 1) = ((0 < a & b < a) | (a < 0 & a < b) | a=0)"
+  "(b / a < 1) = ((0 < a & b < a) | (a < 0 & a < b) | a=0)"
 by (auto simp add: divide_less_eq)
 
 
 text {*Conditional Simplification Rules: No Case Splits*}
 
 lemma le_divide_eq_1_pos [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "0 < a \<Longrightarrow> (1 \<le> b/a) = (a \<le> b)"
+  "0 < a \<Longrightarrow> (1 \<le> b/a) = (a \<le> b)"
 by (auto simp add: le_divide_eq)
 
 lemma le_divide_eq_1_neg [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "a < 0 \<Longrightarrow> (1 \<le> b/a) = (b \<le> a)"
+  "a < 0 \<Longrightarrow> (1 \<le> b/a) = (b \<le> a)"
 by (auto simp add: le_divide_eq)
 
 lemma divide_le_eq_1_pos [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "0 < a \<Longrightarrow> (b/a \<le> 1) = (b \<le> a)"
+  "0 < a \<Longrightarrow> (b/a \<le> 1) = (b \<le> a)"
 by (auto simp add: divide_le_eq)
 
 lemma divide_le_eq_1_neg [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "a < 0 \<Longrightarrow> (b/a \<le> 1) = (a \<le> b)"
+  "a < 0 \<Longrightarrow> (b/a \<le> 1) = (a \<le> b)"
 by (auto simp add: divide_le_eq)
 
 lemma less_divide_eq_1_pos [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "0 < a \<Longrightarrow> (1 < b/a) = (a < b)"
+  "0 < a \<Longrightarrow> (1 < b/a) = (a < b)"
 by (auto simp add: less_divide_eq)
 
 lemma less_divide_eq_1_neg [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "a < 0 \<Longrightarrow> (1 < b/a) = (b < a)"
+  "a < 0 \<Longrightarrow> (1 < b/a) = (b < a)"
 by (auto simp add: less_divide_eq)
 
 lemma divide_less_eq_1_pos [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "0 < a \<Longrightarrow> (b/a < 1) = (b < a)"
+  "0 < a \<Longrightarrow> (b/a < 1) = (b < a)"
 by (auto simp add: divide_less_eq)
 
 lemma divide_less_eq_1_neg [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "a < 0 \<Longrightarrow> b/a < 1 <-> a < b"
+  "a < 0 \<Longrightarrow> b/a < 1 <-> a < b"
 by (auto simp add: divide_less_eq)
 
 lemma eq_divide_eq_1 [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "(1 = b/a) = ((a \<noteq> 0 & a = b))"
+  "(1 = b/a) = ((a \<noteq> 0 & a = b))"
 by (auto simp add: eq_divide_eq)
 
 lemma divide_eq_eq_1 [simp,no_atp]:
-  fixes a :: "'a :: {linordered_field,division_ring_inverse_zero}"
-  shows "(b/a = 1) = ((a \<noteq> 0 & a = b))"
+  "(b/a = 1) = ((a \<noteq> 0 & a = b))"
 by (auto simp add: divide_eq_eq)
 
 lemma abs_inverse [simp]:
-     "\<bar>inverse (a::'a::{linordered_field,division_ring_inverse_zero})\<bar> = 
+     "\<bar>inverse a\<bar> = 
       inverse \<bar>a\<bar>"
 apply (cases "a=0", simp) 
 apply (simp add: nonzero_abs_inverse) 
 done
 
 lemma abs_divide [simp]:
-     "\<bar>a / (b::'a::{linordered_field,division_ring_inverse_zero})\<bar> = \<bar>a\<bar> / \<bar>b\<bar>"
+     "\<bar>a / b\<bar> = \<bar>a\<bar> / \<bar>b\<bar>"
 apply (cases "b=0", simp) 
 apply (simp add: nonzero_abs_divide) 
 done
 
-lemma abs_div_pos: "(0::'a::{linordered_field,division_ring_inverse_zero}) < y ==> 
+lemma abs_div_pos: "0 < y ==> 
     \<bar>x\<bar> / y = \<bar>x / y\<bar>"
   apply (subst abs_divide)
   apply (simp add: order_less_imp_le)
 done
 
 lemma field_le_mult_one_interval:
-  fixes x :: "'a\<Colon>{linordered_field,division_ring_inverse_zero}"
   assumes *: "\<And>z. \<lbrakk> 0 < z ; z < 1 \<rbrakk> \<Longrightarrow> z * x \<le> y"
   shows "x \<le> y"
 proof (cases "0 < x")
@@ -916,6 +907,8 @@
   finally show ?thesis .
 qed
 
+end
+
 code_modulename SML
   Fields Arith
 
--- a/src/HOL/Groebner_Basis.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Groebner_Basis.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -473,21 +473,21 @@
 interpretation class_fieldgb:
   fieldgb "op +" "op *" "op ^" "0::'a::{field,number_ring}" "1" "op -" "uminus" "op /" "inverse" apply (unfold_locales) by (simp_all add: divide_inverse)
 
-lemma divide_Numeral1: "(x::'a::{field,number_ring}) / Numeral1 = x" by simp
-lemma divide_Numeral0: "(x::'a::{field,number_ring, division_ring_inverse_zero}) / Numeral0 = 0"
+lemma divide_Numeral1: "(x::'a::{field, number_ring}) / Numeral1 = x" by simp
+lemma divide_Numeral0: "(x::'a::{field_inverse_zero, number_ring}) / Numeral0 = 0"
   by simp
-lemma mult_frac_frac: "((x::'a::{field,division_ring_inverse_zero}) / y) * (z / w) = (x*z) / (y*w)"
+lemma mult_frac_frac: "((x::'a::field_inverse_zero) / y) * (z / w) = (x*z) / (y*w)"
   by simp
-lemma mult_frac_num: "((x::'a::{field, division_ring_inverse_zero}) / y) * z  = (x*z) / y"
+lemma mult_frac_num: "((x::'a::field_inverse_zero) / y) * z  = (x*z) / y"
   by simp
-lemma mult_num_frac: "((x::'a::{field, division_ring_inverse_zero}) / y) * z  = (x*z) / y"
+lemma mult_num_frac: "((x::'a::field_inverse_zero) / y) * z  = (x*z) / y"
   by simp
 
 lemma Numeral1_eq1_nat: "(1::nat) = Numeral1" by simp
 
-lemma add_frac_num: "y\<noteq> 0 \<Longrightarrow> (x::'a::{field, division_ring_inverse_zero}) / y + z = (x + z*y) / y"
+lemma add_frac_num: "y\<noteq> 0 \<Longrightarrow> (x::'a::field_inverse_zero) / y + z = (x + z*y) / y"
   by (simp add: add_divide_distrib)
-lemma add_num_frac: "y\<noteq> 0 \<Longrightarrow> z + (x::'a::{field, division_ring_inverse_zero}) / y = (x + z*y) / y"
+lemma add_num_frac: "y\<noteq> 0 \<Longrightarrow> z + (x::'a::field_inverse_zero) / y = (x + z*y) / y"
   by (simp add: add_divide_distrib)
 
 ML {*
--- a/src/HOL/Int.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Int.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -324,27 +324,6 @@
 
 end
 
-context linordered_idom
-begin
-
-lemma of_int_le_iff [simp]:
-  "of_int w \<le> of_int z \<longleftrightarrow> w \<le> z"
-by (cases w, cases z, simp add: of_int le minus algebra_simps of_nat_add [symmetric] del: of_nat_add)
-
-text{*Special cases where either operand is zero*}
-lemmas of_int_0_le_iff [simp] = of_int_le_iff [of 0, simplified]
-lemmas of_int_le_0_iff [simp] = of_int_le_iff [of _ 0, simplified]
-
-lemma of_int_less_iff [simp]:
-  "of_int w < of_int z \<longleftrightarrow> w < z"
-  by (simp add: not_le [symmetric] linorder_not_le [symmetric])
-
-text{*Special cases where either operand is zero*}
-lemmas of_int_0_less_iff [simp] = of_int_less_iff [of 0, simplified]
-lemmas of_int_less_0_iff [simp] = of_int_less_iff [of _ 0, simplified]
-
-end
-
 text{*Class for unital rings with characteristic zero.
  Includes non-ordered rings like the complex numbers.*}
 class ring_char_0 = ring_1 + semiring_char_0
@@ -358,13 +337,47 @@
 done
 
 text{*Special cases where either operand is zero*}
-lemmas of_int_0_eq_iff [simp] = of_int_eq_iff [of 0, simplified]
-lemmas of_int_eq_0_iff [simp] = of_int_eq_iff [of _ 0, simplified]
+lemma of_int_eq_0_iff [simp]:
+  "of_int z = 0 \<longleftrightarrow> z = 0"
+  using of_int_eq_iff [of z 0] by simp
+
+lemma of_int_0_eq_iff [simp]:
+  "0 = of_int z \<longleftrightarrow> z = 0"
+  using of_int_eq_iff [of 0 z] by simp
 
 end
 
+context linordered_idom
+begin
+
 text{*Every @{text linordered_idom} has characteristic zero.*}
-subclass (in linordered_idom) ring_char_0 by intro_locales
+subclass ring_char_0 ..
+
+lemma of_int_le_iff [simp]:
+  "of_int w \<le> of_int z \<longleftrightarrow> w \<le> z"
+  by (cases w, cases z, simp add: of_int le minus algebra_simps of_nat_add [symmetric] del: of_nat_add)
+
+lemma of_int_less_iff [simp]:
+  "of_int w < of_int z \<longleftrightarrow> w < z"
+  by (simp add: less_le order_less_le)
+
+lemma of_int_0_le_iff [simp]:
+  "0 \<le> of_int z \<longleftrightarrow> 0 \<le> z"
+  using of_int_le_iff [of 0 z] by simp
+
+lemma of_int_le_0_iff [simp]:
+  "of_int z \<le> 0 \<longleftrightarrow> z \<le> 0"
+  using of_int_le_iff [of z 0] by simp
+
+lemma of_int_0_less_iff [simp]:
+  "0 < of_int z \<longleftrightarrow> 0 < z"
+  using of_int_less_iff [of 0 z] by simp
+
+lemma of_int_less_0_iff [simp]:
+  "of_int z < 0 \<longleftrightarrow> z < 0"
+  using of_int_less_iff [of z 0] by simp
+
+end
 
 lemma of_int_eq_id [simp]: "of_int = id"
 proof
@@ -1995,15 +2008,15 @@
 text{*Division By @{text "-1"}*}
 
 lemma divide_minus1 [simp]:
-     "x/-1 = -(x::'a::{field,division_ring_inverse_zero,number_ring})"
+     "x/-1 = -(x::'a::{field_inverse_zero, number_ring})"
 by simp
 
 lemma minus1_divide [simp]:
-     "-1 / (x::'a::{field,division_ring_inverse_zero,number_ring}) = - (1/x)"
+     "-1 / (x::'a::{field_inverse_zero, number_ring}) = - (1/x)"
 by (simp add: divide_inverse)
 
 lemma half_gt_zero_iff:
-     "(0 < r/2) = (0 < (r::'a::{linordered_field,division_ring_inverse_zero,number_ring}))"
+     "(0 < r/2) = (0 < (r::'a::{linordered_field_inverse_zero,number_ring}))"
 by auto
 
 lemmas half_gt_zero [simp] = half_gt_zero_iff [THEN iffD2, standard]
--- a/src/HOL/IsaMakefile	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/IsaMakefile	Tue Apr 27 11:17:50 2010 -0700
@@ -282,8 +282,7 @@
   $(SRC)/Provers/Arith/extract_common_term.ML \
   $(SRC)/Tools/Metis/metis.ML \
   Tools/ATP_Manager/atp_manager.ML \
-  Tools/ATP_Manager/atp_minimal.ML \
-  Tools/ATP_Manager/atp_wrapper.ML \
+  Tools/ATP_Manager/atp_systems.ML \
   Tools/Groebner_Basis/groebner.ML \
   Tools/Groebner_Basis/misc.ML \
   Tools/Groebner_Basis/normalizer.ML \
@@ -319,6 +318,7 @@
   Tools/Sledgehammer/meson_tactic.ML \
   Tools/Sledgehammer/metis_tactics.ML \
   Tools/Sledgehammer/sledgehammer_fact_filter.ML \
+  Tools/Sledgehammer/sledgehammer_fact_minimizer.ML \
   Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML \
   Tools/Sledgehammer/sledgehammer_fol_clause.ML \
   Tools/Sledgehammer/sledgehammer_hol_clause.ML \
--- a/src/HOL/Library/Abstract_Rat.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Library/Abstract_Rat.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -5,7 +5,7 @@
 header {* Abstract rational numbers *}
 
 theory Abstract_Rat
-imports GCD Main
+imports Complex_Main
 begin
 
 types Num = "int \<times> int"
@@ -184,7 +184,7 @@
 
 lemma isnormNum_unique[simp]: 
   assumes na: "isnormNum x" and nb: "isnormNum y" 
-  shows "((INum x ::'a::{ring_char_0,field, division_ring_inverse_zero}) = INum y) = (x = y)" (is "?lhs = ?rhs")
+  shows "((INum x ::'a::{field_char_0, field_inverse_zero}) = INum y) = (x = y)" (is "?lhs = ?rhs")
 proof
   have "\<exists> a b a' b'. x = (a,b) \<and> y = (a',b')" by auto
   then obtain a b a' b' where xy[simp]: "x = (a,b)" "y=(a',b')" by blast
@@ -217,11 +217,11 @@
 qed
 
 
-lemma isnormNum0[simp]: "isnormNum x \<Longrightarrow> (INum x = (0::'a::{ring_char_0, field,division_ring_inverse_zero})) = (x = 0\<^sub>N)"
+lemma isnormNum0[simp]: "isnormNum x \<Longrightarrow> (INum x = (0::'a::{field_char_0, field_inverse_zero})) = (x = 0\<^sub>N)"
   unfolding INum_int(2)[symmetric]
   by (rule isnormNum_unique, simp_all)
 
-lemma of_int_div_aux: "d ~= 0 ==> ((of_int x)::'a::{field, ring_char_0}) / (of_int d) = 
+lemma of_int_div_aux: "d ~= 0 ==> ((of_int x)::'a::field_char_0) / (of_int d) = 
     of_int (x div d) + (of_int (x mod d)) / ((of_int d)::'a)"
 proof -
   assume "d ~= 0"
@@ -238,14 +238,14 @@
 qed
 
 lemma of_int_div: "(d::int) ~= 0 ==> d dvd n ==>
-    (of_int(n div d)::'a::{field, ring_char_0}) = of_int n / of_int d"
+    (of_int(n div d)::'a::field_char_0) = of_int n / of_int d"
   apply (frule of_int_div_aux [of d n, where ?'a = 'a])
   apply simp
   apply (simp add: dvd_eq_mod_eq_0)
 done
 
 
-lemma normNum[simp]: "INum (normNum x) = (INum x :: 'a::{ring_char_0,field, division_ring_inverse_zero})"
+lemma normNum[simp]: "INum (normNum x) = (INum x :: 'a::{field_char_0, field_inverse_zero})"
 proof-
   have "\<exists> a b. x = (a,b)" by auto
   then obtain a b where x[simp]: "x = (a,b)" by blast
@@ -260,7 +260,7 @@
   ultimately show ?thesis by blast
 qed
 
-lemma INum_normNum_iff: "(INum x ::'a::{field, division_ring_inverse_zero, ring_char_0}) = INum y \<longleftrightarrow> normNum x = normNum y" (is "?lhs = ?rhs")
+lemma INum_normNum_iff: "(INum x ::'a::{field_char_0, field_inverse_zero}) = INum y \<longleftrightarrow> normNum x = normNum y" (is "?lhs = ?rhs")
 proof -
   have "normNum x = normNum y \<longleftrightarrow> (INum (normNum x) :: 'a) = INum (normNum y)"
     by (simp del: normNum)
@@ -268,7 +268,7 @@
   finally show ?thesis by simp
 qed
 
-lemma Nadd[simp]: "INum (x +\<^sub>N y) = INum x + (INum y :: 'a :: {ring_char_0,division_ring_inverse_zero,field})"
+lemma Nadd[simp]: "INum (x +\<^sub>N y) = INum x + (INum y :: 'a :: {field_char_0, field_inverse_zero})"
 proof-
 let ?z = "0:: 'a"
   have " \<exists> a b. x = (a,b)" " \<exists> a' b'. y = (a',b')" by auto
@@ -300,7 +300,7 @@
   ultimately show ?thesis by blast
 qed
 
-lemma Nmul[simp]: "INum (x *\<^sub>N y) = INum x * (INum y:: 'a :: {ring_char_0,division_ring_inverse_zero,field}) "
+lemma Nmul[simp]: "INum (x *\<^sub>N y) = INum x * (INum y:: 'a :: {field_char_0, field_inverse_zero}) "
 proof-
   let ?z = "0::'a"
   have " \<exists> a b. x = (a,b)" " \<exists> a' b'. y = (a',b')" by auto
@@ -323,16 +323,16 @@
 lemma Nneg[simp]: "INum (~\<^sub>N x) = - (INum x ::'a:: field)"
   by (simp add: Nneg_def split_def INum_def)
 
-lemma Nsub[simp]: shows "INum (x -\<^sub>N y) = INum x - (INum y:: 'a :: {ring_char_0,division_ring_inverse_zero,field})"
+lemma Nsub[simp]: shows "INum (x -\<^sub>N y) = INum x - (INum y:: 'a :: {field_char_0, field_inverse_zero})"
 by (simp add: Nsub_def split_def)
 
-lemma Ninv[simp]: "INum (Ninv x) = (1::'a :: {division_ring_inverse_zero,field}) / (INum x)"
+lemma Ninv[simp]: "INum (Ninv x) = (1::'a :: field_inverse_zero) / (INum x)"
   by (simp add: Ninv_def INum_def split_def)
 
-lemma Ndiv[simp]: "INum (x \<div>\<^sub>N y) = INum x / (INum y ::'a :: {ring_char_0, division_ring_inverse_zero,field})" by (simp add: Ndiv_def)
+lemma Ndiv[simp]: "INum (x \<div>\<^sub>N y) = INum x / (INum y ::'a :: {field_char_0, field_inverse_zero})" by (simp add: Ndiv_def)
 
 lemma Nlt0_iff[simp]: assumes nx: "isnormNum x" 
-  shows "((INum x :: 'a :: {ring_char_0,division_ring_inverse_zero,linordered_field})< 0) = 0>\<^sub>N x "
+  shows "((INum x :: 'a :: {field_char_0, linordered_field_inverse_zero})< 0) = 0>\<^sub>N x "
 proof-
   have " \<exists> a b. x = (a,b)" by simp
   then obtain a b where x[simp]:"x = (a,b)" by blast
@@ -345,7 +345,7 @@
 qed
 
 lemma Nle0_iff[simp]:assumes nx: "isnormNum x" 
-  shows "((INum x :: 'a :: {ring_char_0,division_ring_inverse_zero,linordered_field}) \<le> 0) = 0\<ge>\<^sub>N x"
+  shows "((INum x :: 'a :: {field_char_0, linordered_field_inverse_zero}) \<le> 0) = 0\<ge>\<^sub>N x"
 proof-
   have " \<exists> a b. x = (a,b)" by simp
   then obtain a b where x[simp]:"x = (a,b)" by blast
@@ -357,7 +357,7 @@
   ultimately show ?thesis by blast
 qed
 
-lemma Ngt0_iff[simp]:assumes nx: "isnormNum x" shows "((INum x :: 'a :: {ring_char_0,division_ring_inverse_zero,linordered_field})> 0) = 0<\<^sub>N x"
+lemma Ngt0_iff[simp]:assumes nx: "isnormNum x" shows "((INum x :: 'a :: {field_char_0, linordered_field_inverse_zero})> 0) = 0<\<^sub>N x"
 proof-
   have " \<exists> a b. x = (a,b)" by simp
   then obtain a b where x[simp]:"x = (a,b)" by blast
@@ -369,7 +369,7 @@
   ultimately show ?thesis by blast
 qed
 lemma Nge0_iff[simp]:assumes nx: "isnormNum x" 
-  shows "((INum x :: 'a :: {ring_char_0,division_ring_inverse_zero,linordered_field}) \<ge> 0) = 0\<le>\<^sub>N x"
+  shows "((INum x :: 'a :: {field_char_0, linordered_field_inverse_zero}) \<ge> 0) = 0\<le>\<^sub>N x"
 proof-
   have " \<exists> a b. x = (a,b)" by simp
   then obtain a b where x[simp]:"x = (a,b)" by blast
@@ -382,7 +382,7 @@
 qed
 
 lemma Nlt_iff[simp]: assumes nx: "isnormNum x" and ny: "isnormNum y"
-  shows "((INum x :: 'a :: {ring_char_0,division_ring_inverse_zero,linordered_field}) < INum y) = (x <\<^sub>N y)"
+  shows "((INum x :: 'a :: {field_char_0, linordered_field_inverse_zero}) < INum y) = (x <\<^sub>N y)"
 proof-
   let ?z = "0::'a"
   have "((INum x ::'a) < INum y) = (INum (x -\<^sub>N y) < ?z)" using nx ny by simp
@@ -391,7 +391,7 @@
 qed
 
 lemma Nle_iff[simp]: assumes nx: "isnormNum x" and ny: "isnormNum y"
-  shows "((INum x :: 'a :: {ring_char_0,division_ring_inverse_zero,linordered_field})\<le> INum y) = (x \<le>\<^sub>N y)"
+  shows "((INum x :: 'a :: {field_char_0, linordered_field_inverse_zero})\<le> INum y) = (x \<le>\<^sub>N y)"
 proof-
   have "((INum x ::'a) \<le> INum y) = (INum (x -\<^sub>N y) \<le> (0::'a))" using nx ny by simp
   also have "\<dots> = (0\<ge>\<^sub>N (x -\<^sub>N y))" using Nle0_iff[OF Nsub_normN[OF ny]] by simp
@@ -399,7 +399,7 @@
 qed
 
 lemma Nadd_commute:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "x +\<^sub>N y = y +\<^sub>N x"
 proof-
   have n: "isnormNum (x +\<^sub>N y)" "isnormNum (y +\<^sub>N x)" by simp_all
@@ -408,7 +408,7 @@
 qed
 
 lemma [simp]:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "(0, b) +\<^sub>N y = normNum y"
     and "(a, 0) +\<^sub>N y = normNum y" 
     and "x +\<^sub>N (0, b) = normNum x"
@@ -420,7 +420,7 @@
   done
 
 lemma normNum_nilpotent_aux[simp]:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   assumes nx: "isnormNum x" 
   shows "normNum x = x"
 proof-
@@ -432,7 +432,7 @@
 qed
 
 lemma normNum_nilpotent[simp]:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "normNum (normNum x) = normNum x"
   by simp
 
@@ -440,11 +440,11 @@
   by (simp_all add: normNum_def)
 
 lemma normNum_Nadd:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "normNum (x +\<^sub>N y) = x +\<^sub>N y" by simp
 
 lemma Nadd_normNum1[simp]:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "normNum x +\<^sub>N y = x +\<^sub>N y"
 proof-
   have n: "isnormNum (normNum x +\<^sub>N y)" "isnormNum (x +\<^sub>N y)" by simp_all
@@ -454,7 +454,7 @@
 qed
 
 lemma Nadd_normNum2[simp]:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "x +\<^sub>N normNum y = x +\<^sub>N y"
 proof-
   have n: "isnormNum (x +\<^sub>N normNum y)" "isnormNum (x +\<^sub>N y)" by simp_all
@@ -464,7 +464,7 @@
 qed
 
 lemma Nadd_assoc:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   shows "x +\<^sub>N y +\<^sub>N z = x +\<^sub>N (y +\<^sub>N z)"
 proof-
   have n: "isnormNum (x +\<^sub>N y +\<^sub>N z)" "isnormNum (x +\<^sub>N (y +\<^sub>N z))" by simp_all
@@ -476,7 +476,7 @@
   by (simp add: Nmul_def split_def Let_def gcd_commute_int mult_commute)
 
 lemma Nmul_assoc:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   assumes nx: "isnormNum x" and ny:"isnormNum y" and nz:"isnormNum z"
   shows "x *\<^sub>N y *\<^sub>N z = x *\<^sub>N (y *\<^sub>N z)"
 proof-
@@ -487,7 +487,7 @@
 qed
 
 lemma Nsub0:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   assumes x: "isnormNum x" and y:"isnormNum y" shows "(x -\<^sub>N y = 0\<^sub>N) = (x = y)"
 proof-
   { fix h :: 'a
@@ -502,7 +502,7 @@
   by (simp_all add: Nmul_def Let_def split_def)
 
 lemma Nmul_eq0[simp]:
-  assumes "SORT_CONSTRAINT('a::{ring_char_0,division_ring_inverse_zero,field})"
+  assumes "SORT_CONSTRAINT('a::{field_char_0, field_inverse_zero})"
   assumes nx:"isnormNum x" and ny: "isnormNum y"
   shows "(x*\<^sub>N y = 0\<^sub>N) = (x = 0\<^sub>N \<or> y = 0\<^sub>N)"
 proof-
--- a/src/HOL/Library/Bit.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Library/Bit.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -49,7 +49,7 @@
 
 subsection {* Type @{typ bit} forms a field *}
 
-instantiation bit :: "{field, division_ring_inverse_zero}"
+instantiation bit :: field_inverse_zero
 begin
 
 definition plus_bit_def:
--- a/src/HOL/Library/Formal_Power_Series.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Library/Formal_Power_Series.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -28,7 +28,7 @@
 
 text{* Definition of the basic elements 0 and 1 and the basic operations of addition, negation and multiplication *}
 
-instantiation fps :: (zero)  zero
+instantiation fps :: (zero) zero
 begin
 
 definition fps_zero_def:
@@ -40,7 +40,7 @@
 lemma fps_zero_nth [simp]: "0 $ n = 0"
   unfolding fps_zero_def by simp
 
-instantiation fps :: ("{one,zero}")  one
+instantiation fps :: ("{one, zero}") one
 begin
 
 definition fps_one_def:
@@ -2649,7 +2649,7 @@
 text{* The generalized binomial theorem as a  consequence of @{thm E_add_mult} *}
 
 lemma gbinomial_theorem: 
-  "((a::'a::{field_char_0, division_ring_inverse_zero})+b) ^ n = (\<Sum>k=0..n. of_nat (n choose k) * a^k * b^(n-k))"
+  "((a::'a::{field_char_0, field_inverse_zero})+b) ^ n = (\<Sum>k=0..n. of_nat (n choose k) * a^k * b^(n-k))"
 proof-
   from E_add_mult[of a b] 
   have "(E (a + b)) $ n = (E a * E b)$n" by simp
@@ -3252,7 +3252,7 @@
 subsection {* Hypergeometric series *}
 
 
-definition "F as bs (c::'a::{field_char_0, division_ring_inverse_zero}) = Abs_fps (%n. (foldl (%r a. r* pochhammer a n) 1 as * c^n)/ (foldl (%r b. r * pochhammer b n) 1 bs * of_nat (fact n)))"
+definition "F as bs (c::'a::{field_char_0, field_inverse_zero}) = Abs_fps (%n. (foldl (%r a. r* pochhammer a n) 1 as * c^n)/ (foldl (%r b. r * pochhammer b n) 1 bs * of_nat (fact n)))"
 
 lemma F_nth[simp]: "F as bs c $ n =  (foldl (%r a. r* pochhammer a n) 1 as * c^n)/ (foldl (%r b. r * pochhammer b n) 1 bs * of_nat (fact n))"
   by (simp add: F_def)
@@ -3321,9 +3321,9 @@
   by (simp add: fps_eq_iff fps_integral_def)
 
 lemma F_minus_nat: 
-  "F [- of_nat n] [- of_nat (n + m)] (c::'a::{field_char_0, division_ring_inverse_zero}) $ k = (if k <= n then pochhammer (- of_nat n) k * c ^ k /
+  "F [- of_nat n] [- of_nat (n + m)] (c::'a::{field_char_0, field_inverse_zero}) $ k = (if k <= n then pochhammer (- of_nat n) k * c ^ k /
     (pochhammer (- of_nat (n + m)) k * of_nat (fact k)) else 0)"
-  "F [- of_nat m] [- of_nat (m + n)] (c::'a::{field_char_0, division_ring_inverse_zero}) $ k = (if k <= m then pochhammer (- of_nat m) k * c ^ k /
+  "F [- of_nat m] [- of_nat (m + n)] (c::'a::{field_char_0, field_inverse_zero}) $ k = (if k <= m then pochhammer (- of_nat m) k * c ^ k /
     (pochhammer (- of_nat (m + n)) k * of_nat (fact k)) else 0)"
   by (auto simp add: pochhammer_eq_0_iff)
 
--- a/src/HOL/Library/Fraction_Field.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Library/Fraction_Field.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -232,7 +232,7 @@
 thm mult_eq_0_iff
 end
 
-instantiation fract :: (idom) field
+instantiation fract :: (idom) field_inverse_zero
 begin
 
 definition
@@ -263,16 +263,13 @@
 next
   fix q r :: "'a fract"
   show "q / r = q * inverse r" by (simp add: divide_fract_def)
+next
+  show "inverse 0 = (0:: 'a fract)" by (simp add: fract_expand)
+    (simp add: fract_collapse)
 qed
 
 end
 
-instance fract :: (idom) division_ring_inverse_zero
-proof
-  show "inverse 0 = (0:: 'a fract)" by (simp add: fract_expand)
-    (simp add: fract_collapse)
-qed
-
 
 subsubsection {* The ordered field of fractions over an ordered idom *}
 
@@ -434,7 +431,7 @@
 
 end
 
-instance fract :: (linordered_idom) linordered_field
+instance fract :: (linordered_idom) linordered_field_inverse_zero
 proof
   fix q r s :: "'a fract"
   show "q \<le> r ==> s + q \<le> s + r"
--- a/src/HOL/Metis_Examples/BigO.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Metis_Examples/BigO.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -26,9 +26,9 @@
   apply (metis abs_ge_minus_self abs_ge_zero abs_minus_cancel abs_of_nonneg equation_minus_iff Orderings.xt1(6) abs_mult)
   done
 
-(*** Now various verions with an increasing modulus ***)
+(*** Now various verions with an increasing shrink factor ***)
 
-sledgehammer_params [modulus = 1]
+sledgehammer_params [shrink_factor = 1]
 
 lemma (*bigo_pos_const:*) "(EX (c::'a::linordered_idom). 
     ALL x. (abs (h x)) <= (c * (abs (f x))))
@@ -97,7 +97,7 @@
   by (metis abs_le_iff 5 18 14)
 qed
 
-sledgehammer_params [modulus = 2]
+sledgehammer_params [shrink_factor = 2]
 
 lemma (*bigo_pos_const:*) "(EX (c::'a::linordered_idom). 
     ALL x. (abs (h x)) <= (c * (abs (f x))))
@@ -138,7 +138,7 @@
   by (metis 8 abs_ge_zero abs_mult_pos abs_mult 1 9 3 abs_le_iff)
 qed
 
-sledgehammer_params [modulus = 3]
+sledgehammer_params [shrink_factor = 3]
 
 lemma (*bigo_pos_const:*) "(EX (c::'a::linordered_idom). 
     ALL x. (abs (h x)) <= (c * (abs (f x))))
@@ -170,7 +170,7 @@
 qed
 
 
-sledgehammer_params [modulus = 4]
+sledgehammer_params [shrink_factor = 4]
 
 lemma (*bigo_pos_const:*) "(EX (c::'a::linordered_idom). 
     ALL x. (abs (h x)) <= (c * (abs (f x))))
--- a/src/HOL/Metis_Examples/set.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Metis_Examples/set.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -19,7 +19,7 @@
 lemma "P(n::nat) ==> ~P(0) ==> n ~= 0"
 by metis
 
-sledgehammer_params [modulus = 1]
+sledgehammer_params [shrink_factor = 1]
 
 
 (*multiple versions of this example*)
@@ -90,7 +90,7 @@
   by (metis 31 29)
 qed
 
-sledgehammer_params [modulus = 2]
+sledgehammer_params [shrink_factor = 2]
 
 lemma (*equal_union: *)
    "(X = Y \<union> Z) =
@@ -133,7 +133,7 @@
   by (metis 18 17)
 qed
 
-sledgehammer_params [modulus = 3]
+sledgehammer_params [shrink_factor = 3]
 
 lemma (*equal_union: *)
    "(X = Y \<union> Z) =
@@ -168,7 +168,7 @@
 
 (*Example included in TPHOLs paper*)
 
-sledgehammer_params [modulus = 4]
+sledgehammer_params [shrink_factor = 4]
 
 lemma (*equal_union: *)
    "(X = Y \<union> Z) =
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -283,10 +283,7 @@
     fun default_atp_name () =
       hd (#atps (Sledgehammer_Isar.default_params thy []))
       handle Empty => error "No ATP available."
-    fun get_prover name =
-      (case ATP_Manager.get_prover thy name of
-        SOME prover => (name, prover)
-      | NONE => error ("Bad ATP: " ^ quote name))
+    fun get_prover name = (name, ATP_Manager.get_prover thy name)
   in
     (case AList.lookup (op =) args proverK of
       SOME name => get_prover name
@@ -304,11 +301,11 @@
   let
     val {context = ctxt, facts, goal} = Proof.goal st
     val thy = ProofContext.theory_of ctxt
-    val change_dir = (fn SOME d => Config.put ATP_Wrapper.destdir d | _ => I)
+    val change_dir = (fn SOME d => Config.put ATP_Systems.dest_dir d | _ => I)
     val ctxt' =
       ctxt
       |> change_dir dir
-      |> Config.put ATP_Wrapper.measure_runtime true
+      |> Config.put ATP_Systems.measure_runtime true
     val params = Sledgehammer_Isar.default_params thy []
     val problem =
       {subgoal = 1, goal = (ctxt', (facts, goal)),
@@ -318,13 +315,14 @@
       (case hard_timeout of
         NONE => I
       | SOME secs => TimeLimit.timeLimit (Time.fromSeconds secs))
-    val ({success, message, relevant_thm_names,
+    val ({outcome, message, relevant_thm_names,
           atp_run_time_in_msecs = time_atp, ...}: ATP_Manager.prover_result,
         time_isa) = time_limit (Mirabelle.cpu_time
                       (prover params (K "") (Time.fromSeconds timeout))) problem
   in
-    if success then (message, SH_OK (time_isa, time_atp, relevant_thm_names))
-    else (message, SH_FAIL (time_isa, time_atp))
+    case outcome of
+      NONE => (message, SH_OK (time_isa, time_atp, relevant_thm_names))
+    | SOME _ => (message, SH_FAIL (time_isa, time_atp))
   end
   handle Sledgehammer_HOL_Clause.TRIVIAL => ("trivial", SH_OK (0, 0, []))
        | ERROR msg => ("error: " ^ msg, SH_ERROR)
@@ -381,17 +379,18 @@
 
 fun run_minimize args named_thms id ({pre=st, log, ...}: Mirabelle.run_args) =
   let
-    open ATP_Minimal
+    open Sledgehammer_Fact_Minimizer
     open Sledgehammer_Isar
     val thy = Proof.theory_of st
     val n0 = length (these (!named_thms))
-    val (prover_name, prover) = get_atp thy args
+    val (prover_name, _) = get_atp thy args
     val timeout =
       AList.lookup (op =) args minimize_timeoutK
       |> Option.map (fst o read_int o explode)
       |> the_default 5
-    val params = default_params thy [("minimize_timeout", Int.toString timeout)]
-    val minimize = minimize_theorems params prover prover_name 1
+    val params = default_params thy
+      [("atps", prover_name), ("minimize_timeout", Int.toString timeout)]
+    val minimize = minimize_theorems params 1
     val _ = log separator
   in
     case minimize st (these (!named_thms)) of
@@ -475,7 +474,7 @@
 
 fun invoke args =
   let
-    val _ = ATP_Manager.full_types := AList.defined (op =) args full_typesK
+    val _ = Sledgehammer_Isar.full_types := AList.defined (op =) args full_typesK
   in Mirabelle.register (init, sledgehammer_action args, done) end
 
 end
--- a/src/HOL/Multivariate_Analysis/Determinants.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Multivariate_Analysis/Determinants.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -55,7 +55,7 @@
 done
 
   (* FIXME: In Finite_Set there is a useless further assumption *)
-lemma setprod_inversef: "finite A ==> setprod (inverse \<circ> f) A = (inverse (setprod f A) :: 'a:: {division_ring_inverse_zero, field})"
+lemma setprod_inversef: "finite A ==> setprod (inverse \<circ> f) A = (inverse (setprod f A) :: 'a:: field_inverse_zero)"
   apply (erule finite_induct)
   apply (simp)
   apply simp
--- a/src/HOL/NSA/HyperDef.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/NSA/HyperDef.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -140,12 +140,12 @@
 
 lemma of_hypreal_inverse [simp]:
   "\<And>x. of_hypreal (inverse x) =
-   inverse (of_hypreal x :: 'a::{real_div_algebra,division_ring_inverse_zero} star)"
+   inverse (of_hypreal x :: 'a::{real_div_algebra, division_ring_inverse_zero} star)"
 by transfer (rule of_real_inverse)
 
 lemma of_hypreal_divide [simp]:
   "\<And>x y. of_hypreal (x / y) =
-   (of_hypreal x / of_hypreal y :: 'a::{real_field,division_ring_inverse_zero} star)"
+   (of_hypreal x / of_hypreal y :: 'a::{real_field, field_inverse_zero} star)"
 by transfer (rule of_real_divide)
 
 lemma of_hypreal_eq_iff [simp]:
@@ -454,7 +454,7 @@
 by transfer (rule field_power_not_zero)
 
 lemma hyperpow_inverse:
-  "\<And>r n. r \<noteq> (0::'a::{division_ring_inverse_zero,field} star)
+  "\<And>r n. r \<noteq> (0::'a::field_inverse_zero star)
    \<Longrightarrow> inverse (r pow n) = (inverse r) pow n"
 by transfer (rule power_inverse)
   
--- a/src/HOL/NSA/NSA.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/NSA/NSA.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -145,12 +145,12 @@
 by transfer (rule nonzero_norm_inverse)
 
 lemma hnorm_inverse:
-  "\<And>a::'a::{real_normed_div_algebra,division_ring_inverse_zero} star.
+  "\<And>a::'a::{real_normed_div_algebra, division_ring_inverse_zero} star.
    hnorm (inverse a) = inverse (hnorm a)"
 by transfer (rule norm_inverse)
 
 lemma hnorm_divide:
-  "\<And>a b::'a::{real_normed_field,division_ring_inverse_zero} star.
+  "\<And>a b::'a::{real_normed_field, field_inverse_zero} star.
    hnorm (a / b) = hnorm a / hnorm b"
 by transfer (rule norm_divide)
 
--- a/src/HOL/NSA/StarDef.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/NSA/StarDef.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -896,14 +896,19 @@
 apply (transfer, fact divide_inverse)
 done
 
+instance star :: (division_ring_inverse_zero) division_ring_inverse_zero
+by (intro_classes, transfer, rule inverse_zero)
+
 instance star :: (field) field
 apply (intro_classes)
 apply (transfer, erule left_inverse)
 apply (transfer, rule divide_inverse)
 done
 
-instance star :: (division_ring_inverse_zero) division_ring_inverse_zero
-by (intro_classes, transfer, rule inverse_zero)
+instance star :: (field_inverse_zero) field_inverse_zero
+apply intro_classes
+apply (rule inverse_zero)
+done
 
 instance star :: (ordered_semiring) ordered_semiring
 apply (intro_classes)
@@ -945,6 +950,7 @@
 
 instance star :: (linordered_idom) linordered_idom ..
 instance star :: (linordered_field) linordered_field ..
+instance star :: (linordered_field_inverse_zero) linordered_field_inverse_zero ..
 
 
 subsection {* Power *}
--- a/src/HOL/Nitpick_Examples/Mono_Nits.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Nitpick_Examples/Mono_Nits.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -21,9 +21,9 @@
   {thy = @{theory}, ctxt = @{context}, max_bisim_depth = ~1, boxes = [],
    stds = [(NONE, true)], wfs = [], user_axioms = NONE, debug = false,
    binary_ints = SOME false, destroy_constrs = false, specialize = false,
-   skolemize = false, star_linear_preds = false, uncurry = false,
-   fast_descrs = false, tac_timeout = NONE, evals = [], case_names = [],
-   def_table = def_table, nondef_table = Symtab.empty, user_nondefs = [],
+   star_linear_preds = false, fast_descrs = false, tac_timeout = NONE,
+   evals = [], case_names = [], def_table = def_table,
+   nondef_table = Symtab.empty, user_nondefs = [],
    simp_table = Unsynchronized.ref Symtab.empty, psimp_table = Symtab.empty,
    choice_spec_table = Symtab.empty, intro_table = Symtab.empty,
    ground_thm_table = Inttab.empty, ersatz_table = [],
--- a/src/HOL/Nitpick_Examples/Special_Nits.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Nitpick_Examples/Special_Nits.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -73,8 +73,6 @@
        \<and> (\<forall>u. b = u \<longrightarrow> f3 b b u b b = f3 u u b u u)"
 nitpick [expect = none]
 nitpick [dont_specialize, expect = none]
-nitpick [dont_skolemize, expect = none]
-nitpick [dont_specialize, dont_skolemize, expect = none]
 sorry
 
 function f4 :: "nat \<Rightarrow> nat \<Rightarrow> nat" where
--- a/src/HOL/Power.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Power.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -392,27 +392,26 @@
   by (induct n)
     (auto simp add: no_zero_divisors elim: contrapos_pp)
 
-lemma power_diff:
-  fixes a :: "'a::field"
+lemma (in field) power_diff:
   assumes nz: "a \<noteq> 0"
   shows "n \<le> m \<Longrightarrow> a ^ (m - n) = a ^ m / a ^ n"
-  by (induct m n rule: diff_induct) (simp_all add: nz)
+  by (induct m n rule: diff_induct) (simp_all add: nz field_power_not_zero)
 
 text{*Perhaps these should be simprules.*}
 lemma power_inverse:
-  fixes a :: "'a::{division_ring,division_ring_inverse_zero,power}"
-  shows "inverse (a ^ n) = (inverse a) ^ n"
+  fixes a :: "'a::division_ring_inverse_zero"
+  shows "inverse (a ^ n) = inverse a ^ n"
 apply (cases "a = 0")
 apply (simp add: power_0_left)
 apply (simp add: nonzero_power_inverse)
 done (* TODO: reorient or rename to inverse_power *)
 
 lemma power_one_over:
-  "1 / (a::'a::{field,division_ring_inverse_zero, power}) ^ n =  (1 / a) ^ n"
+  "1 / (a::'a::{field_inverse_zero, power}) ^ n =  (1 / a) ^ n"
   by (simp add: divide_inverse) (rule power_inverse)
 
 lemma power_divide:
-  "(a / b) ^ n = (a::'a::{field,division_ring_inverse_zero}) ^ n / b ^ n"
+  "(a / b) ^ n = (a::'a::field_inverse_zero) ^ n / b ^ n"
 apply (cases "b = 0")
 apply (simp add: power_0_left)
 apply (rule nonzero_power_divide)
--- a/src/HOL/Rat.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Rat.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -411,7 +411,7 @@
 
 subsubsection {* The field of rational numbers *}
 
-instantiation rat :: field
+instantiation rat :: field_inverse_zero
 begin
 
 definition
@@ -440,13 +440,12 @@
 next
   fix q r :: rat
   show "q / r = q * inverse r" by (simp add: divide_rat_def)
+next
+  show "inverse 0 = (0::rat)" by (simp add: rat_number_expand, simp add: rat_number_collapse)
 qed
 
 end
 
-instance rat :: division_ring_inverse_zero proof
-qed (simp add: rat_number_expand, simp add: rat_number_collapse)
-
 
 subsubsection {* Various *}
 
@@ -624,7 +623,7 @@
 
 end
 
-instance rat :: linordered_field
+instance rat :: linordered_field_inverse_zero
 proof
   fix q r s :: rat
   show "q \<le> r ==> s + q \<le> s + r"
@@ -724,8 +723,7 @@
     by (cases "b = 0", simp, simp add: of_int_rat)
   moreover have "0 \<le> Fract (a mod b) b \<and> Fract (a mod b) b < 1"
     unfolding Fract_of_int_quotient
-    by (rule linorder_cases [of b 0])
-       (simp add: divide_nonpos_neg, simp, simp add: divide_nonneg_pos)
+    by (rule linorder_cases [of b 0]) (simp add: divide_nonpos_neg, simp, simp add: divide_nonneg_pos)
   ultimately show ?thesis by simp
 qed
 
@@ -818,7 +816,7 @@
 done
 
 lemma of_rat_inverse:
-  "(of_rat (inverse a)::'a::{field_char_0,division_ring_inverse_zero}) =
+  "(of_rat (inverse a)::'a::{field_char_0, field_inverse_zero}) =
    inverse (of_rat a)"
 by (cases "a = 0", simp_all add: nonzero_of_rat_inverse)
 
@@ -827,7 +825,7 @@
 by (simp add: divide_inverse of_rat_mult nonzero_of_rat_inverse)
 
 lemma of_rat_divide:
-  "(of_rat (a / b)::'a::{field_char_0,division_ring_inverse_zero})
+  "(of_rat (a / b)::'a::{field_char_0, field_inverse_zero})
    = of_rat a / of_rat b"
 by (cases "b = 0") (simp_all add: nonzero_of_rat_divide)
 
@@ -968,7 +966,7 @@
 done
 
 lemma Rats_inverse [simp]:
-  fixes a :: "'a::{field_char_0,division_ring_inverse_zero}"
+  fixes a :: "'a::{field_char_0, field_inverse_zero}"
   shows "a \<in> Rats \<Longrightarrow> inverse a \<in> Rats"
 apply (auto simp add: Rats_def)
 apply (rule range_eqI)
@@ -984,7 +982,7 @@
 done
 
 lemma Rats_divide [simp]:
-  fixes a b :: "'a::{field_char_0,division_ring_inverse_zero}"
+  fixes a b :: "'a::{field_char_0, field_inverse_zero}"
   shows "\<lbrakk>a \<in> Rats; b \<in> Rats\<rbrakk> \<Longrightarrow> a / b \<in> Rats"
 apply (auto simp add: Rats_def)
 apply (rule range_eqI)
--- a/src/HOL/RealDef.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/RealDef.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -266,23 +266,16 @@
 
 subsection{*The Real Numbers form a Field*}
 
-instance real :: field
+instance real :: field_inverse_zero
 proof
   fix x y z :: real
   show "x \<noteq> 0 ==> inverse x * x = 1" by (rule real_mult_inverse_left)
   show "x / y = x * inverse y" by (simp add: real_divide_def)
+  show "inverse 0 = (0::real)" by (simp add: real_inverse_def)
 qed
 
-
-text{*Inverse of zero!  Useful to simplify certain equations*}
-
 lemma INVERSE_ZERO: "inverse 0 = (0::real)"
-by (simp add: real_inverse_def)
-
-instance real :: division_ring_inverse_zero
-proof
-  show "inverse 0 = (0::real)" by (rule INVERSE_ZERO)
-qed
+  by (fact inverse_zero)
 
 
 subsection{*The @{text "\<le>"} Ordering*}
@@ -416,7 +409,7 @@
 
 subsection{*The Reals Form an Ordered Field*}
 
-instance real :: linordered_field
+instance real :: linordered_field_inverse_zero
 proof
   fix x y z :: real
   show "x \<le> y ==> z + x \<le> z + y" by (rule real_add_left_mono)
--- a/src/HOL/RealVector.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/RealVector.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -207,7 +207,7 @@
 by (rule inverse_unique, simp)
 
 lemma inverse_scaleR_distrib:
-  fixes x :: "'a::{real_div_algebra,division_ring_inverse_zero}"
+  fixes x :: "'a::{real_div_algebra, division_ring_inverse_zero}"
   shows "inverse (scaleR a x) = scaleR (inverse a) (inverse x)"
 apply (case_tac "a = 0", simp)
 apply (case_tac "x = 0", simp)
@@ -250,7 +250,7 @@
 
 lemma of_real_inverse [simp]:
   "of_real (inverse x) =
-   inverse (of_real x :: 'a::{real_div_algebra,division_ring_inverse_zero})"
+   inverse (of_real x :: 'a::{real_div_algebra, division_ring_inverse_zero})"
 by (simp add: of_real_def inverse_scaleR_distrib)
 
 lemma nonzero_of_real_divide:
@@ -260,7 +260,7 @@
 
 lemma of_real_divide [simp]:
   "of_real (x / y) =
-   (of_real x / of_real y :: 'a::{real_field,division_ring_inverse_zero})"
+   (of_real x / of_real y :: 'a::{real_field, field_inverse_zero})"
 by (simp add: divide_inverse)
 
 lemma of_real_power [simp]:
@@ -370,7 +370,7 @@
 done
 
 lemma Reals_inverse [simp]:
-  fixes a :: "'a::{real_div_algebra,division_ring_inverse_zero}"
+  fixes a :: "'a::{real_div_algebra, division_ring_inverse_zero}"
   shows "a \<in> Reals \<Longrightarrow> inverse a \<in> Reals"
 apply (auto simp add: Reals_def)
 apply (rule range_eqI)
@@ -386,7 +386,7 @@
 done
 
 lemma Reals_divide [simp]:
-  fixes a b :: "'a::{real_field,division_ring_inverse_zero}"
+  fixes a b :: "'a::{real_field, field_inverse_zero}"
   shows "\<lbrakk>a \<in> Reals; b \<in> Reals\<rbrakk> \<Longrightarrow> a / b \<in> Reals"
 apply (auto simp add: Reals_def)
 apply (rule range_eqI)
@@ -726,7 +726,7 @@
 done
 
 lemma norm_inverse:
-  fixes a :: "'a::{real_normed_div_algebra,division_ring_inverse_zero}"
+  fixes a :: "'a::{real_normed_div_algebra, division_ring_inverse_zero}"
   shows "norm (inverse a) = inverse (norm a)"
 apply (case_tac "a = 0", simp)
 apply (erule nonzero_norm_inverse)
@@ -738,7 +738,7 @@
 by (simp add: divide_inverse norm_mult nonzero_norm_inverse)
 
 lemma norm_divide:
-  fixes a b :: "'a::{real_normed_field,division_ring_inverse_zero}"
+  fixes a b :: "'a::{real_normed_field, field_inverse_zero}"
   shows "norm (a / b) = norm a / norm b"
 by (simp add: divide_inverse norm_mult norm_inverse)
 
--- a/src/HOL/Series.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Series.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -381,7 +381,7 @@
   shows "norm x < 1 \<Longrightarrow> summable (\<lambda>n. x ^ n)"
 by (rule geometric_sums [THEN sums_summable])
 
-lemma half: "0 < 1 / (2::'a::{number_ring,division_ring_inverse_zero,linordered_field})"
+lemma half: "0 < 1 / (2::'a::{number_ring,linordered_field_inverse_zero})"
   by arith 
 
 lemma power_half_series: "(\<lambda>n. (1/2::real)^Suc n) sums 1"
--- a/src/HOL/Sledgehammer.thy	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Sledgehammer.thy	Tue Apr 27 11:17:50 2010 -0700
@@ -18,8 +18,8 @@
   ("Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML")
   ("Tools/Sledgehammer/sledgehammer_fact_filter.ML")
   ("Tools/ATP_Manager/atp_manager.ML")
-  ("Tools/ATP_Manager/atp_wrapper.ML")
-  ("Tools/ATP_Manager/atp_minimal.ML")
+  ("Tools/ATP_Manager/atp_systems.ML")
+  ("Tools/Sledgehammer/sledgehammer_fact_minimizer.ML")
   ("Tools/Sledgehammer/sledgehammer_isar.ML")
   ("Tools/Sledgehammer/meson_tactic.ML")
   ("Tools/Sledgehammer/metis_tactics.ML")
@@ -101,10 +101,11 @@
 use "Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML"
 use "Tools/Sledgehammer/sledgehammer_fact_filter.ML"
 use "Tools/ATP_Manager/atp_manager.ML"
-use "Tools/ATP_Manager/atp_wrapper.ML"
-setup ATP_Wrapper.setup
-use "Tools/ATP_Manager/atp_minimal.ML"
+use "Tools/ATP_Manager/atp_systems.ML"
+setup ATP_Systems.setup
+use "Tools/Sledgehammer/sledgehammer_fact_minimizer.ML"
 use "Tools/Sledgehammer/sledgehammer_isar.ML"
+setup Sledgehammer_Isar.setup
 
 
 subsection {* The MESON prover *}
--- a/src/HOL/Tools/ATP_Manager/SystemOnTPTP	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/ATP_Manager/SystemOnTPTP	Tue Apr 27 11:17:50 2010 -0700
@@ -136,7 +136,7 @@
   print $Response->content;
   exit(0);
 }else {
-  print "Remote-script could not extract proof:\n".$Response->content;
+  print "Remote script could not extract proof:\n".$Response->content;
   exit(-1);
 }
 
--- a/src/HOL/Tools/ATP_Manager/atp_manager.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/ATP_Manager/atp_manager.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -8,6 +8,7 @@
 
 signature ATP_MANAGER =
 sig
+  type name_pool = Sledgehammer_HOL_Clause.name_pool
   type relevance_override = Sledgehammer_Fact_Filter.relevance_override
   type minimize_command = Sledgehammer_Proof_Reconstruct.minimize_command
   type params =
@@ -24,7 +25,7 @@
      higher_order: bool option,
      follow_defs: bool,
      isar_proof: bool,
-     modulus: int,
+     shrink_factor: int,
      sorts: bool,
      timeout: Time.time,
      minimize_timeout: Time.time}
@@ -34,29 +35,32 @@
      relevance_override: relevance_override,
      axiom_clauses: (thm * (string * int)) list option,
      filtered_clauses: (thm * (string * int)) list option}
+  datatype failure =
+    Unprovable | TimedOut | OutOfResources | OldSpass | MalformedOutput |
+    UnknownError
   type prover_result =
-    {success: bool,
+    {outcome: failure option,
      message: string,
+     pool: name_pool option,
      relevant_thm_names: string list,
      atp_run_time_in_msecs: int,
+     output: string,
      proof: string,
      internal_thm_names: string Vector.vector,
+     conjecture_shape: int list list,
      filtered_clauses: (thm * (string * int)) list}
   type prover =
     params -> minimize_command -> Time.time -> problem -> prover_result
 
-  val atps: string Unsynchronized.ref
-  val timeout: int Unsynchronized.ref
-  val full_types: bool Unsynchronized.ref
   val kill_atps: unit -> unit
   val running_atps: unit -> unit
   val messages: int option -> unit
   val add_prover: string * prover -> theory -> theory
-  val get_prover: theory -> string -> prover option
+  val get_prover: theory -> string -> prover
   val available_atps: theory -> unit
-  val sledgehammer:
-    params -> int -> relevance_override -> (string -> minimize_command)
-    -> Proof.state -> unit
+  val start_prover_thread:
+    params -> Time.time -> Time.time -> int -> relevance_override
+    -> (string -> minimize_command) -> Proof.state -> string -> unit
 end;
 
 structure ATP_Manager : ATP_MANAGER =
@@ -82,7 +86,7 @@
    higher_order: bool option,
    follow_defs: bool,
    isar_proof: bool,
-   modulus: int,
+   shrink_factor: int,
    sorts: bool,
    timeout: Time.time,
    minimize_timeout: Time.time}
@@ -94,13 +98,20 @@
    axiom_clauses: (thm * (string * int)) list option,
    filtered_clauses: (thm * (string * int)) list option};
 
+datatype failure =
+  Unprovable | TimedOut | OutOfResources | OldSpass | MalformedOutput |
+  UnknownError
+
 type prover_result =
-  {success: bool,
+  {outcome: failure option,
    message: string,
+   pool: name_pool option,
    relevant_thm_names: string list,
    atp_run_time_in_msecs: int,
+   output: string,
    proof: string,
    internal_thm_names: string Vector.vector,
+   conjecture_shape: int list list,
    filtered_clauses: (thm * (string * int)) list};
 
 type prover =
@@ -112,26 +123,6 @@
 val message_store_limit = 20;
 val message_display_limit = 5;
 
-val atps = Unsynchronized.ref "e spass remote_vampire"; (* set in "ATP_Wrapper" *)
-val timeout = Unsynchronized.ref 60;
-val full_types = Unsynchronized.ref false;
-
-val _ =
-  ProofGeneralPgip.add_preference Preferences.category_proof
-    (Preferences.string_pref atps
-      "ATP: provers" "Default automatic provers (separated by whitespace)");
-
-val _ =
-  ProofGeneralPgip.add_preference Preferences.category_proof
-    (Preferences.int_pref timeout
-      "ATP: timeout" "ATPs will be interrupted after this time (in seconds)");
-
-val _ =
-  ProofGeneralPgip.add_preference Preferences.category_proof
-    (Preferences.bool_pref full_types
-      "ATP: full types" "ATPs will use full type information");
-
-
 
 (** thread management **)
 
@@ -172,13 +163,13 @@
   Synchronized.change global_state
   (fn state as {manager, timeout_heap, active, cancelling, messages, store} =>
     (case lookup_thread active thread of
-      SOME (birth_time, _, description) =>
+      SOME (birth_time, _, desc) =>
         let
           val active' = delete_thread thread active;
           val now = Time.now ()
-          val cancelling' = (thread, (now, description)) :: cancelling;
+          val cancelling' = (thread, (now, desc)) :: cancelling;
           val message' =
-            description ^ "\n" ^ message ^
+            desc ^ "\n" ^ message ^
             (if verbose then
                "Total time: " ^ Int.toString (Time.toMilliseconds
                                           (Time.- (now, birth_time))) ^ " ms.\n"
@@ -246,7 +237,7 @@
         do
           (Synchronized.timed_access global_state (SOME o time_limit o #timeout_heap) action
             |> these
-            |> List.app (unregister params "Timed out.");
+            |> List.app (unregister params "Timed out.\n");
             print_new_messages ();
             (*give threads some time to respond to interrupt*)
             OS.Process.sleep min_wait_time)
@@ -322,7 +313,7 @@
 
 fun err_dup_prover name = error ("Duplicate prover: " ^ quote name ^ ".");
 
-structure Provers = Theory_Data
+structure Data = Theory_Data
 (
   type T = (prover * stamp) Symtab.table;
   val empty = Symtab.empty;
@@ -332,60 +323,43 @@
 );
 
 fun add_prover (name, prover) thy =
-  Provers.map (Symtab.update_new (name, (prover, stamp ()))) thy
-    handle Symtab.DUP name => err_dup_prover name;
+  Data.map (Symtab.update_new (name, (prover, stamp ()))) thy
+  handle Symtab.DUP name => err_dup_prover name;
 
 fun get_prover thy name =
-  Option.map #1 (Symtab.lookup (Provers.get thy) name);
+  case Symtab.lookup (Data.get thy) name of
+    SOME (prover, _) => prover
+  | NONE => error ("Unknown ATP: " ^ name)
 
 fun available_atps thy =
   priority ("Available ATPs: " ^
-            commas (sort_strings (Symtab.keys (Provers.get thy))) ^ ".")
+            commas (sort_strings (Symtab.keys (Data.get thy))) ^ ".")
 
 
 (* start prover thread *)
 
-fun start_prover (params as {timeout, ...}) birth_time death_time i
-                 relevance_override minimize_command proof_state name =
-  (case get_prover (Proof.theory_of proof_state) name of
-    NONE => warning ("Unknown ATP: " ^ quote name ^ ".")
-  | SOME prover =>
+fun start_prover_thread (params as {timeout, ...}) birth_time death_time i
+                        relevance_override minimize_command proof_state name =
+  let
+    val prover = get_prover (Proof.theory_of proof_state) name
+    val {context = ctxt, facts, goal} = Proof.goal proof_state;
+    val n = Logic.count_prems (prop_of goal)
+    val desc =
+      "ATP " ^ quote name ^ " for subgoal " ^ string_of_int i ^ ":\n" ^
+      Syntax.string_of_term ctxt (Thm.term_of (Thm.cprem_of goal i));
+    val _ = Toplevel.thread true (fn () =>
       let
-        val {context = ctxt, facts, goal} = Proof.goal proof_state;
-        val n = Logic.count_prems (prop_of goal)
-        val desc =
-          "ATP " ^ quote name ^ " for subgoal " ^ string_of_int i ^ ":\n" ^
-            Syntax.string_of_term ctxt (Thm.term_of (Thm.cprem_of goal i));
-
-        val _ = Toplevel.thread true (fn () =>
-          let
-            val _ = register params birth_time death_time (Thread.self (), desc)
-            val problem =
-              {subgoal = i, goal = (ctxt, (facts, goal)),
-               relevance_override = relevance_override, axiom_clauses = NONE,
-               filtered_clauses = NONE}
-            val message =
-              #message (prover params (minimize_command name) timeout problem)
-              handle Sledgehammer_HOL_Clause.TRIVIAL =>
-                  metis_line i n []
-                | ERROR msg => "Error: " ^ msg ^ ".\n";
-            val _ = unregister params message (Thread.self ());
-          in () end);
-      in () end);
-
-
-(* Sledgehammer the given subgoal *)
-
-fun sledgehammer (params as {atps, timeout, ...}) i relevance_override
-                 minimize_command proof_state =
-  let
-    val birth_time = Time.now ()
-    val death_time = Time.+ (birth_time, timeout)
-    val _ = kill_atps () (* RACE w.r.t. other invocations of Sledgehammer *)
-    val _ = priority "Sledgehammering..."
-    val _ = List.app (start_prover params birth_time death_time i
-                                   relevance_override minimize_command
-                                   proof_state) atps
+        val _ = register params birth_time death_time (Thread.self (), desc)
+        val problem =
+          {subgoal = i, goal = (ctxt, (facts, goal)),
+           relevance_override = relevance_override, axiom_clauses = NONE,
+           filtered_clauses = NONE}
+        val message =
+          #message (prover params (minimize_command name) timeout problem)
+          handle Sledgehammer_HOL_Clause.TRIVIAL => metis_line i n []
+               | ERROR message => "Error: " ^ message ^ "\n"
+        val _ = unregister params message (Thread.self ());
+      in () end)
   in () end
 
 end;
--- a/src/HOL/Tools/ATP_Manager/atp_minimal.ML	Tue Apr 27 11:03:04 2010 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,144 +0,0 @@
-(*  Title:      HOL/Tools/ATP_Manager/atp_minimal.ML
-    Author:     Philipp Meyer, TU Muenchen
-
-Minimization of theorem list for Metis using automatic theorem provers.
-*)
-
-signature ATP_MINIMAL =
-sig
-  type params = ATP_Manager.params
-  type prover = ATP_Manager.prover
-  type prover_result = ATP_Manager.prover_result
-
-  val minimize_theorems :
-    params -> prover -> string -> int -> Proof.state -> (string * thm list) list
-    -> (string * thm list) list option * string
-end;
-
-structure ATP_Minimal : ATP_MINIMAL =
-struct
-
-open Sledgehammer_Util
-open Sledgehammer_Fact_Preprocessor
-open Sledgehammer_Proof_Reconstruct
-open ATP_Manager
-
-(* Linear minimization algorithm *)
-
-fun linear_minimize test s =
-  let
-    fun aux [] p = p
-      | aux (x :: xs) (needed, result) =
-        case test (xs @ needed) of
-          SOME result => aux xs (needed, result)
-        | NONE => aux xs (x :: needed, result)
-  in aux s end
-
-
-(* failure check and producing answer *)
-
-datatype outcome = Success | Failure | Timeout | Error
-
-val string_of_outcome =
-  fn Success => "Success"
-   | Failure => "Failure"
-   | Timeout => "Timeout"
-   | Error => "Error"
-
-val failure_strings =
-  [("SPASS beiseite: Ran out of time.", Timeout),
-   ("Timeout", Timeout),
-   ("time limit exceeded", Timeout),
-   ("# Cannot determine problem status within resource limit", Timeout),
-   ("Error", Error)]
-
-fun outcome_of_result (result as {success, proof, ...} : prover_result) =
-  if success then
-    Success
-  else case get_first (fn (s, outcome) =>
-                          if String.isSubstring s proof then SOME outcome
-                          else NONE) failure_strings of
-    SOME outcome => outcome
-  | NONE => Failure
-
-(* wrapper for calling external prover *)
-
-fun sledgehammer_test_theorems (params as {full_types, ...} : params) prover
-        timeout subgoal state filtered_clauses name_thms_pairs =
-  let
-    val num_theorems = length name_thms_pairs
-    val _ = priority ("Testing " ^ string_of_int num_theorems ^
-                      " theorem" ^ plural_s num_theorems ^ "...")
-    val name_thm_pairs = maps (fn (n, ths) => map (pair n) ths) name_thms_pairs
-    val axclauses = cnf_rules_pairs (Proof.theory_of state) name_thm_pairs
-    val {context = ctxt, facts, goal} = Proof.goal state
-    val problem =
-     {subgoal = subgoal, goal = (ctxt, (facts, goal)),
-      relevance_override = {add = [], del = [], only = false},
-      axiom_clauses = SOME axclauses,
-      filtered_clauses = SOME (the_default axclauses filtered_clauses)}
-  in
-    `outcome_of_result (prover params (K "") timeout problem)
-    |>> tap (priority o string_of_outcome)
-  end
-
-(* minimalization of thms *)
-
-fun minimize_theorems (params as {debug, minimize_timeout, isar_proof, modulus,
-                                  sorts, ...})
-                      prover atp_name i state name_thms_pairs =
-  let
-    val msecs = Time.toMilliseconds minimize_timeout
-    val n = length name_thms_pairs
-    val _ =
-      priority ("Sledgehammer minimizer: ATP " ^ quote atp_name ^
-                " with a time limit of " ^ string_of_int msecs ^ " ms.")
-    val test_thms_fun =
-      sledgehammer_test_theorems params prover minimize_timeout i state
-    fun test_thms filtered thms =
-      case test_thms_fun filtered thms of
-        (Success, result) => SOME result
-      | _ => NONE
-
-    val {context = ctxt, facts, goal} = Proof.goal state;
-    val n = Logic.count_prems (prop_of goal)
-  in
-    (* try prove first to check result and get used theorems *)
-    (case test_thms_fun NONE name_thms_pairs of
-      (Success, result as {internal_thm_names, filtered_clauses, ...}) =>
-        let
-          val used = internal_thm_names |> Vector.foldr (op ::) []
-                                        |> sort_distinct string_ord
-          val to_use =
-            if length used < length name_thms_pairs then
-              filter (fn (name1, _) => List.exists (curry (op =) name1) used)
-                     name_thms_pairs
-            else name_thms_pairs
-          val (min_thms, {proof, internal_thm_names, ...}) =
-            linear_minimize (test_thms (SOME filtered_clauses)) to_use
-                            ([], result)
-          val n = length min_thms
-          val _ = priority (cat_lines
-            ["Minimized: " ^ string_of_int n ^ " theorem" ^ plural_s n] ^ ".")
-        in
-          (SOME min_thms,
-           proof_text isar_proof debug modulus sorts ctxt
-                      (K "", proof, internal_thm_names, goal, i) |> fst)
-        end
-    | (Timeout, _) =>
-        (NONE, "Timeout: You can increase the time limit using the \"timeout\" \
-               \option (e.g., \"timeout = " ^
-               string_of_int (10 + msecs div 1000) ^ " s\").")
-    | (Error, {message, ...}) => (NONE, "ATP error: " ^ message)
-    | (Failure, _) =>
-        (* Failure sometimes mean timeout, unfortunately. *)
-        (NONE, "Failure: No proof was found with the current time limit. You \
-               \can increase the time limit using the \"timeout\" \
-               \option (e.g., \"timeout = " ^
-               string_of_int (10 + msecs div 1000) ^ " s\")."))
-    handle Sledgehammer_HOL_Clause.TRIVIAL =>
-        (SOME [], metis_line i n [])
-      | ERROR msg => (NONE, "Error: " ^ msg)
-  end
-
-end;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/ATP_Manager/atp_systems.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -0,0 +1,424 @@
+(*  Title:      HOL/Tools/ATP_Manager/atp_systems.ML
+    Author:     Fabian Immler, TU Muenchen
+    Author:     Jasmin Blanchette, TU Muenchen
+
+Setup for supported ATPs.
+*)
+
+signature ATP_SYSTEMS =
+sig
+  type prover = ATP_Manager.prover
+
+  (* hooks for problem files *)
+  val dest_dir : string Config.T
+  val problem_prefix : string Config.T
+  val measure_runtime : bool Config.T
+
+  val refresh_systems_on_tptp : unit -> unit
+  val default_atps_param_value : unit -> string
+  val setup : theory -> theory
+end;
+
+structure ATP_Systems : ATP_SYSTEMS =
+struct
+
+open Sledgehammer_Util
+open Sledgehammer_Fact_Preprocessor
+open Sledgehammer_HOL_Clause
+open Sledgehammer_Fact_Filter
+open Sledgehammer_Proof_Reconstruct
+open ATP_Manager
+
+(** generic ATP **)
+
+(* external problem files *)
+
+val (dest_dir, dest_dir_setup) = Attrib.config_string "atp_dest_dir" (K "");
+  (*Empty string means create files in Isabelle's temporary files directory.*)
+
+val (problem_prefix, problem_prefix_setup) =
+  Attrib.config_string "atp_problem_prefix" (K "prob");
+
+val (measure_runtime, measure_runtime_setup) =
+  Attrib.config_bool "atp_measure_runtime" (K false);
+
+
+(* prover configuration *)
+
+type prover_config =
+  {home: string,
+   executable: string,
+   arguments: Time.time -> string,
+   proof_delims: (string * string) list,
+   known_failures: (failure * string) list,
+   max_axiom_clauses: int,
+   prefers_theory_relevant: bool};
+
+
+(* basic template *)
+
+val remotify = prefix "remote_"
+
+fun with_path cleanup after f path =
+  Exn.capture f path
+  |> tap (fn _ => cleanup path)
+  |> Exn.release
+  |> tap (after path)
+
+(* Splits by the first possible of a list of delimiters. *)
+fun extract_proof delims output =
+  case pairself (find_first (fn s => String.isSubstring s output))
+                (ListPair.unzip delims) of
+    (SOME begin_delim, SOME end_delim) =>
+    output |> first_field begin_delim |> the |> snd
+           |> first_field end_delim |> the |> fst
+  | _ => ""
+
+fun extract_proof_and_outcome res_code proof_delims known_failures output =
+  case map_filter (fn (failure, pattern) =>
+                      if String.isSubstring pattern output then SOME failure
+                      else NONE) known_failures of
+    [] => (case extract_proof proof_delims output of
+             "" => ("", SOME UnknownError)
+           | proof => if res_code = 0 then (proof, NONE)
+                      else ("", SOME UnknownError))
+  | (failure :: _) => ("", SOME failure)
+
+fun string_for_failure Unprovable = "The ATP problem is unprovable."
+  | string_for_failure TimedOut = "Timed out."
+  | string_for_failure OutOfResources = "The ATP ran out of resources."
+  | string_for_failure OldSpass =
+    (* FIXME: Change the error message below to point to the Isabelle download
+       page once the package is there (around the Isabelle2010 release). *)
+    "Warning: Sledgehammer requires a more recent version of SPASS with \
+    \support for the TPTP syntax. To install it, download and untar the \
+    \package \"http://isabelle.in.tum.de/~blanchet/spass-3.7.tgz\" and add the \
+    \\"spass-3.7\" directory's full path to \"" ^
+    Path.implode (Path.expand (Path.appends
+        (Path.variable "ISABELLE_HOME_USER" ::
+         map Path.basic ["etc", "components"]))) ^
+    "\" on a line of its own."
+  | string_for_failure MalformedOutput = "Error: The ATP output is malformed."
+  | string_for_failure UnknownError = "Error: An unknown ATP error occurred."
+
+fun shape_of_clauses _ [] = []
+  | shape_of_clauses j ([] :: clauses) = [] :: shape_of_clauses j clauses
+  | shape_of_clauses j ((lit :: lits) :: clauses) =
+    let val shape = shape_of_clauses (j + 1) (lits :: clauses) in
+      (j :: hd shape) :: tl shape
+    end
+
+fun generic_prover overlord get_facts prepare write_file home executable args
+        proof_delims known_failures name
+        ({debug, full_types, explicit_apply, isar_proof, shrink_factor, sorts,
+         ...} : params) minimize_command
+        ({subgoal, goal, relevance_override, axiom_clauses, filtered_clauses}
+         : problem) =
+  let
+    (* get clauses and prepare them for writing *)
+    val (ctxt, (chain_ths, th)) = goal;
+    val thy = ProofContext.theory_of ctxt;
+    val chain_ths = map (Thm.put_name_hint chained_hint) chain_ths;
+    val goal_clss = #1 (neg_conjecture_clauses ctxt th subgoal)
+    val goal_cls = List.concat goal_clss
+    val the_filtered_clauses =
+      (case filtered_clauses of
+        NONE => get_facts relevance_override goal goal_cls
+      | SOME fcls => fcls);
+    val the_axiom_clauses =
+      (case axiom_clauses of
+        NONE => the_filtered_clauses
+      | SOME axcls => axcls);
+    val (internal_thm_names, clauses) =
+      prepare goal_cls chain_ths the_axiom_clauses the_filtered_clauses thy;
+
+    (* path to unique problem file *)
+    val the_dest_dir = if overlord then getenv "ISABELLE_HOME_USER"
+                       else Config.get ctxt dest_dir;
+    val the_problem_prefix = Config.get ctxt problem_prefix;
+    fun prob_pathname nr =
+      let
+        val probfile =
+          Path.basic (the_problem_prefix ^
+                      (if overlord then "_" ^ name else serial_string ())
+                      ^ "_" ^ string_of_int nr)
+      in
+        if the_dest_dir = "" then File.tmp_path probfile
+        else if File.exists (Path.explode the_dest_dir)
+        then Path.append (Path.explode the_dest_dir) probfile
+        else error ("No such directory: " ^ the_dest_dir ^ ".")
+      end;
+
+    val command = Path.explode (home ^ "/" ^ executable)
+    (* write out problem file and call prover *)
+    fun command_line probfile =
+      (if Config.get ctxt measure_runtime then
+         "TIMEFORMAT='%3U'; { time " ^
+         space_implode " " [File.shell_path command, args,
+                            File.shell_path probfile] ^ " ; } 2>&1"
+       else
+         space_implode " " ["exec", File.shell_path command, args,
+                            File.shell_path probfile, "2>&1"]) ^
+      (if overlord then
+         " | sed 's/,/, /g' \
+         \| sed 's/\\([^!=]\\)\\([=|]\\)\\([^=]\\)/\\1 \\2 \\3/g' \
+         \| sed 's/! =/ !=/g' \
+         \| sed 's/  / /g' | sed 's/| |/||/g' \
+         \| sed 's/ = = =/===/g' \
+         \| sed 's/= = /== /g'"
+       else
+         "")
+    fun split_time s =
+      let
+        val split = String.tokens (fn c => str c = "\n");
+        val (output, t) = s |> split |> split_last |> apfst cat_lines;
+        fun as_num f = f >> (fst o read_int);
+        val num = as_num (Scan.many1 Symbol.is_ascii_digit);
+        val digit = Scan.one Symbol.is_ascii_digit;
+        val num3 = as_num (digit ::: digit ::: (digit >> single));
+        val time = num --| Scan.$$ "." -- num3 >> (fn (a, b) => a * 1000 + b);
+        val as_time = the_default 0 o Scan.read Symbol.stopper time o explode;
+      in (output, as_time t) end;
+    fun split_time' s =
+      if Config.get ctxt measure_runtime then split_time s else (s, 0)
+    fun run_on probfile =
+      if File.exists command then
+        write_file full_types explicit_apply probfile clauses
+        |> pair (apfst split_time' (bash_output (command_line probfile)))
+      else
+        error ("Bad executable: " ^ Path.implode command ^ ".");
+
+    (* If the problem file has not been exported, remove it; otherwise, export
+       the proof file too. *)
+    fun cleanup probfile =
+      if the_dest_dir = "" then try File.rm probfile else NONE
+    fun export probfile (((output, _), _), _) =
+      if the_dest_dir = "" then
+        ()
+      else
+        File.write (Path.explode (Path.implode probfile ^ "_proof"))
+                   ((if overlord then
+                       "% " ^ command_line probfile ^ "\n% " ^ timestamp () ^
+                       "\n"
+                     else
+                       "") ^ output)
+
+    val (((output, atp_run_time_in_msecs), res_code),
+         (pool, conjecture_offset)) =
+      with_path cleanup export run_on (prob_pathname subgoal);
+    val conjecture_shape = shape_of_clauses (conjecture_offset + 1) goal_clss
+    (* Check for success and print out some information on failure. *)
+    val (proof, outcome) =
+      extract_proof_and_outcome res_code proof_delims known_failures output
+    val (message, relevant_thm_names) =
+      case outcome of
+        NONE =>
+        proof_text isar_proof
+                   (pool, debug, shrink_factor, sorts, ctxt, conjecture_shape)
+                   (minimize_command, proof, internal_thm_names, th, subgoal)
+      | SOME failure => (string_for_failure failure ^ "\n", [])
+  in
+    {outcome = outcome, message = message, pool = pool,
+     relevant_thm_names = relevant_thm_names,
+     atp_run_time_in_msecs = atp_run_time_in_msecs, output = output,
+     proof = proof, internal_thm_names = internal_thm_names,
+     conjecture_shape = conjecture_shape,
+     filtered_clauses = the_filtered_clauses}
+  end;
+
+
+(* generic TPTP-based provers *)
+
+fun generic_tptp_prover
+        (name, {home, executable, arguments, proof_delims, known_failures,
+                max_axiom_clauses, prefers_theory_relevant})
+        (params as {debug, overlord, respect_no_atp, relevance_threshold,
+                    convergence, theory_relevant, higher_order, follow_defs,
+                    isar_proof, ...})
+        minimize_command timeout =
+  generic_prover overlord
+      (get_relevant_facts respect_no_atp relevance_threshold convergence
+                          higher_order follow_defs max_axiom_clauses
+                          (the_default prefers_theory_relevant theory_relevant))
+      (prepare_clauses higher_order false)
+      (write_tptp_file (debug andalso overlord)) home
+      executable (arguments timeout) proof_delims known_failures name params
+      minimize_command
+
+fun tptp_prover name p = (name, generic_tptp_prover (name, p));
+
+
+(** common provers **)
+
+fun to_generous_secs time = (Time.toMilliseconds time + 999) div 1000
+
+(* Vampire *)
+
+(* Vampire requires an explicit time limit. *)
+
+val vampire_config : prover_config =
+  {home = getenv "VAMPIRE_HOME",
+   executable = "vampire",
+   arguments = fn timeout =>
+     "--output_syntax tptp --mode casc -t " ^
+     string_of_int (to_generous_secs timeout),
+   proof_delims = [("=========== Refutation ==========",
+                    "======= End of refutation =======")],
+   known_failures =
+     [(Unprovable, "Satisfiability detected"),
+      (OutOfResources, "CANNOT PROVE"),
+      (OutOfResources, "Refutation not found")],
+   max_axiom_clauses = 60,
+   prefers_theory_relevant = false}
+val vampire = tptp_prover "vampire" vampire_config
+
+
+(* E prover *)
+
+val tstp_proof_delims =
+  ("# SZS output start CNFRefutation.", "# SZS output end CNFRefutation")
+
+val e_config : prover_config =
+  {home = getenv "E_HOME",
+   executable = "eproof",
+   arguments = fn timeout =>
+     "--tstp-in --tstp-out -l5 -xAutoDev -tAutoDev --silent --cpu-limit=" ^
+     string_of_int (to_generous_secs timeout),
+   proof_delims = [tstp_proof_delims],
+   known_failures =
+     [(Unprovable, "SZS status: Satisfiable"),
+      (Unprovable, "SZS status Satisfiable"),
+      (TimedOut, "Failure: Resource limit exceeded (time)"),
+      (TimedOut, "time limit exceeded"),
+      (OutOfResources,
+       "# Cannot determine problem status within resource limit"),
+      (OutOfResources, "SZS status: ResourceOut"),
+      (OutOfResources, "SZS status ResourceOut")],
+   max_axiom_clauses = 100,
+   prefers_theory_relevant = false}
+val e = tptp_prover "e" e_config
+
+
+(* SPASS *)
+
+fun generic_dfg_prover
+        (name, {home, executable, arguments, proof_delims, known_failures,
+                max_axiom_clauses, prefers_theory_relevant})
+        (params as {overlord, respect_no_atp, relevance_threshold, convergence,
+                    theory_relevant, higher_order, follow_defs, ...})
+        minimize_command timeout =
+  generic_prover overlord
+      (get_relevant_facts respect_no_atp relevance_threshold convergence
+                          higher_order follow_defs max_axiom_clauses
+                          (the_default prefers_theory_relevant theory_relevant))
+      (prepare_clauses higher_order true) write_dfg_file home executable
+      (arguments timeout) proof_delims known_failures name params
+      minimize_command
+
+fun dfg_prover name p = (name, generic_dfg_prover (name, p))
+
+(* The "-VarWeight=3" option helps the higher-order problems, probably by
+   counteracting the presence of "hAPP". *)
+val spass_config : prover_config =
+  {home = getenv "SPASS_HOME",
+   executable = "SPASS",
+   arguments = fn timeout =>
+     "-Auto -SOS=1 -PGiven=0 -PProblem=0 -Splits=0 -FullRed=0 -DocProof \
+     \-VarWeight=3 -TimeLimit=" ^ string_of_int (to_generous_secs timeout),
+   proof_delims = [("Here is a proof", "Formulae used in the proof")],
+   known_failures =
+     [(Unprovable, "SPASS beiseite: Completion found"),
+      (TimedOut, "SPASS beiseite: Ran out of time"),
+      (OutOfResources, "SPASS beiseite: Maximal number of loops exceeded")],
+   max_axiom_clauses = 40,
+   prefers_theory_relevant = true}
+val spass = dfg_prover "spass" spass_config
+
+(* SPASS 3.7 supports both the DFG and the TPTP syntax, whereas SPASS 3.0
+   supports only the DFG syntax. As soon as all Isabelle repository/snapshot
+   users have upgraded to 3.7, we can kill "spass" (and all DFG support in
+   Sledgehammer) and rename "spass_tptp" "spass". *)
+
+val spass_tptp_config =
+  {home = #home spass_config,
+   executable = #executable spass_config,
+   arguments = prefix "-TPTP " o #arguments spass_config,
+   proof_delims = #proof_delims spass_config,
+   known_failures =
+     #known_failures spass_config @
+     [(OldSpass, "unrecognized option `-TPTP'"),
+      (OldSpass, "Unrecognized option TPTP")],
+   max_axiom_clauses = #max_axiom_clauses spass_config,
+   prefers_theory_relevant = #prefers_theory_relevant spass_config}
+val spass_tptp = tptp_prover "spass_tptp" spass_tptp_config
+
+(* remote prover invocation via SystemOnTPTP *)
+
+val systems = Synchronized.var "atp_systems" ([]: string list);
+
+fun get_systems () =
+  case bash_output "\"$ISABELLE_ATP_MANAGER/SystemOnTPTP\" -w" of
+    (answer, 0) => split_lines answer
+  | (answer, _) =>
+    error ("Failed to get available systems at SystemOnTPTP:\n" ^ answer)
+
+fun refresh_systems_on_tptp () =
+  Synchronized.change systems (fn _ => get_systems ());
+
+fun get_system prefix = Synchronized.change_result systems (fn systems =>
+  (if null systems then get_systems () else systems)
+  |> `(find_first (String.isPrefix prefix)));
+
+fun the_system prefix =
+  (case get_system prefix of
+    NONE => error ("System " ^ quote prefix ^
+                   " not available at SystemOnTPTP.")
+  | SOME sys => sys);
+
+val remote_known_failures =
+  [(TimedOut, "says Timeout"),
+   (MalformedOutput, "Remote script could not extract proof")]
+
+fun remote_prover_config atp_prefix args
+        ({proof_delims, known_failures, max_axiom_clauses,
+          prefers_theory_relevant, ...} : prover_config) : prover_config =
+  {home = getenv "ISABELLE_ATP_MANAGER",
+   executable = "SystemOnTPTP",
+   arguments = fn timeout =>
+     args ^ " -t " ^ string_of_int (to_generous_secs timeout) ^ " -s " ^
+     the_system atp_prefix,
+   proof_delims = insert (op =) tstp_proof_delims proof_delims,
+   known_failures = remote_known_failures @ known_failures,
+   max_axiom_clauses = max_axiom_clauses,
+   prefers_theory_relevant = prefers_theory_relevant}
+
+val remote_vampire =
+  tptp_prover (remotify (fst vampire))
+              (remote_prover_config "Vampire---9" "" vampire_config)
+
+val remote_e =
+  tptp_prover (remotify (fst e))
+              (remote_prover_config "EP---" "" e_config)
+
+val remote_spass =
+  tptp_prover (remotify (fst spass))
+              (remote_prover_config "SPASS---" "-x" spass_config)
+
+fun maybe_remote (name, _) ({home, ...} : prover_config) =
+  name |> home = "" ? remotify
+
+fun default_atps_param_value () =
+  space_implode " " [maybe_remote e e_config, maybe_remote spass spass_config,
+                     remotify (fst vampire)]
+
+val provers =
+  [spass, spass_tptp, vampire, e, remote_vampire, remote_spass, remote_e]
+val prover_setup = fold add_prover provers
+
+val setup =
+  dest_dir_setup
+  #> problem_prefix_setup
+  #> measure_runtime_setup
+  #> prover_setup
+
+end;
--- a/src/HOL/Tools/ATP_Manager/atp_wrapper.ML	Tue Apr 27 11:03:04 2010 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,384 +0,0 @@
-(*  Title:      HOL/Tools/ATP_Manager/atp_wrapper.ML
-    Author:     Fabian Immler, TU Muenchen
-
-Wrapper functions for external ATPs.
-*)
-
-signature ATP_WRAPPER =
-sig
-  type prover = ATP_Manager.prover
-
-  (* hooks for problem files *)
-  val destdir : string Config.T
-  val problem_prefix : string Config.T
-  val measure_runtime : bool Config.T
-
-  val refresh_systems_on_tptp : unit -> unit
-  val setup : theory -> theory
-end;
-
-structure ATP_Wrapper : ATP_WRAPPER =
-struct
-
-open Sledgehammer_Util
-open Sledgehammer_Fact_Preprocessor
-open Sledgehammer_HOL_Clause
-open Sledgehammer_Fact_Filter
-open Sledgehammer_Proof_Reconstruct
-open ATP_Manager
-
-(** generic ATP wrapper **)
-
-(* external problem files *)
-
-val (destdir, destdir_setup) = Attrib.config_string "atp_destdir" (K "");
-  (*Empty string means create files in Isabelle's temporary files directory.*)
-
-val (problem_prefix, problem_prefix_setup) =
-  Attrib.config_string "atp_problem_prefix" (K "prob");
-
-val (measure_runtime, measure_runtime_setup) =
-  Attrib.config_bool "atp_measure_runtime" (K false);
-
-
-(* prover configuration *)
-
-val remotify = prefix "remote_"
-
-type prover_config =
- {home: string,
-  executable: string,
-  arguments: Time.time -> string,
-  known_failures: (string list * string) list,
-  max_new_clauses: int,
-  prefers_theory_relevant: bool};
-
-
-(* basic template *)
-
-fun with_path cleanup after f path =
-  Exn.capture f path
-  |> tap (fn _ => cleanup path)
-  |> Exn.release
-  |> tap (after path);
-
-fun find_known_failure known_failures proof =
-  case map_filter (fn (patterns, message) =>
-                      if exists (fn pattern => String.isSubstring pattern proof)
-                                patterns then
-                        SOME message
-                      else
-                        NONE) known_failures of
-    [] => if is_proof_well_formed proof then ""
-          else "Error: The ATP output is ill-formed."
-  | (message :: _) => message
-
-fun generic_prover overlord get_facts prepare write_file home executable args
-        known_failures name
-        ({debug, full_types, explicit_apply, isar_proof, modulus, sorts, ...}
-         : params) minimize_command
-        ({subgoal, goal, relevance_override, axiom_clauses, filtered_clauses}
-         : problem) =
-  let
-    (* get clauses and prepare them for writing *)
-    val (ctxt, (chain_ths, th)) = goal;
-    val thy = ProofContext.theory_of ctxt;
-    val chain_ths = map (Thm.put_name_hint chained_hint) chain_ths;
-    val goal_cls = #1 (neg_conjecture_clauses ctxt th subgoal);
-    val the_filtered_clauses =
-      (case filtered_clauses of
-        NONE => get_facts relevance_override goal goal_cls
-      | SOME fcls => fcls);
-    val the_axiom_clauses =
-      (case axiom_clauses of
-        NONE => the_filtered_clauses
-      | SOME axcls => axcls);
-    val (internal_thm_names, clauses) =
-      prepare goal_cls chain_ths the_axiom_clauses the_filtered_clauses thy;
-
-    (* path to unique problem file *)
-    val destdir' = if overlord then getenv "ISABELLE_HOME_USER"
-                   else Config.get ctxt destdir;
-    val problem_prefix' = Config.get ctxt problem_prefix;
-    fun prob_pathname nr =
-      let
-        val probfile =
-          Path.basic (problem_prefix' ^
-                      (if overlord then "_" ^ name else serial_string ())
-                      ^ "_" ^ string_of_int nr)
-      in
-        if destdir' = "" then File.tmp_path probfile
-        else if File.exists (Path.explode destdir')
-        then Path.append (Path.explode destdir') probfile
-        else error ("No such directory: " ^ destdir' ^ ".")
-      end;
-
-    val command = Path.explode (home ^ "/" ^ executable)
-    (* write out problem file and call prover *)
-    fun command_line probfile =
-      (if Config.get ctxt measure_runtime then
-         "TIMEFORMAT='%3U'; { time " ^
-         space_implode " " [File.shell_path command, args,
-                            File.shell_path probfile] ^ " ; } 2>&1"
-       else
-         space_implode " " ["exec", File.shell_path command, args,
-                            File.shell_path probfile, "2>&1"]) ^
-      (if overlord then
-         " | sed 's/,/, /g' \
-         \| sed 's/\\([^!=]\\)\\([=|]\\)\\([^=]\\)/\\1 \\2 \\3/g' \
-         \| sed 's/! =/ !=/g' \
-         \| sed 's/  / /g' | sed 's/| |/||/g' \
-         \| sed 's/ = = =/===/g' \
-         \| sed 's/= = /== /g'"
-       else
-         "")
-    fun split_time s =
-      let
-        val split = String.tokens (fn c => str c = "\n");
-        val (proof, t) = s |> split |> split_last |> apfst cat_lines;
-        fun as_num f = f >> (fst o read_int);
-        val num = as_num (Scan.many1 Symbol.is_ascii_digit);
-        val digit = Scan.one Symbol.is_ascii_digit;
-        val num3 = as_num (digit ::: digit ::: (digit >> single));
-        val time = num --| Scan.$$ "." -- num3 >> (fn (a, b) => a * 1000 + b);
-        val as_time = the_default 0 o Scan.read Symbol.stopper time o explode;
-      in (proof, as_time t) end;
-    fun split_time' s =
-      if Config.get ctxt measure_runtime then split_time s else (s, 0)
-    fun run_on probfile =
-      if File.exists command then
-        write_file full_types explicit_apply probfile clauses
-        |> pair (apfst split_time' (bash_output (command_line probfile)))
-      else error ("Bad executable: " ^ Path.implode command ^ ".");
-
-    (* If the problem file has not been exported, remove it; otherwise, export
-       the proof file too. *)
-    fun cleanup probfile = if destdir' = "" then try File.rm probfile else NONE;
-    fun export probfile (((proof, _), _), _) =
-      if destdir' = "" then
-        ()
-      else
-        File.write (Path.explode (Path.implode probfile ^ "_proof"))
-                   ((if overlord then
-                       "% " ^ command_line probfile ^ "\n% " ^ timestamp () ^
-                       "\n"
-                     else
-                        "") ^ proof)
-
-    val (((proof, atp_run_time_in_msecs), rc), _) =
-      with_path cleanup export run_on (prob_pathname subgoal);
-
-    (* Check for success and print out some information on failure. *)
-    val failure = find_known_failure known_failures proof;
-    val success = rc = 0 andalso failure = "";
-    val (message, relevant_thm_names) =
-      if success then
-        proof_text isar_proof debug modulus sorts ctxt
-                   (minimize_command, proof, internal_thm_names, th, subgoal)
-      else if failure <> "" then
-        (failure ^ "\n", [])
-      else
-        ("Unknown ATP error: " ^ proof ^ ".\n", [])
-  in
-    {success = success, message = message,
-     relevant_thm_names = relevant_thm_names,
-     atp_run_time_in_msecs = atp_run_time_in_msecs, proof = proof,
-     internal_thm_names = internal_thm_names,
-     filtered_clauses = the_filtered_clauses}
-  end;
-
-
-(* generic TPTP-based provers *)
-
-fun generic_tptp_prover
-        (name, {home, executable, arguments, known_failures, max_new_clauses,
-                prefers_theory_relevant})
-        (params as {debug, overlord, respect_no_atp, relevance_threshold,
-                    convergence, theory_relevant, higher_order, follow_defs,
-                    isar_proof, ...})
-        minimize_command timeout =
-  generic_prover overlord
-      (get_relevant_facts respect_no_atp relevance_threshold convergence
-                          higher_order follow_defs max_new_clauses
-                          (the_default prefers_theory_relevant theory_relevant))
-      (prepare_clauses higher_order false)
-      (write_tptp_file (debug andalso overlord andalso not isar_proof)) home
-      executable (arguments timeout) known_failures name params minimize_command
-
-fun tptp_prover name p = (name, generic_tptp_prover (name, p));
-
-
-(** common provers **)
-
-fun generous_to_secs time = (Time.toMilliseconds time + 999) div 1000
-
-(* Vampire *)
-
-(* Vampire requires an explicit time limit. *)
-
-val vampire_config : prover_config =
-  {home = getenv "VAMPIRE_HOME",
-   executable = "vampire",
-   arguments = (fn timeout => "--output_syntax tptp --mode casc -t " ^
-                              string_of_int (generous_to_secs timeout)),
-   known_failures =
-     [(["Satisfiability detected", "CANNOT PROVE"],
-       "The ATP problem is unprovable."),
-      (["Refutation not found"],
-       "The ATP failed to determine the problem's status.")],
-   max_new_clauses = 60,
-   prefers_theory_relevant = false}
-val vampire = tptp_prover "vampire" vampire_config
-
-
-(* E prover *)
-
-val e_config : prover_config =
-  {home = getenv "E_HOME",
-   executable = "eproof",
-   arguments = (fn timeout => "--tstp-in --tstp-out -l5 -xAutoDev \
-                              \-tAutoDev --silent --cpu-limit=" ^
-                              string_of_int (generous_to_secs timeout)),
-   known_failures =
-       [(["SZS status: Satisfiable", "SZS status Satisfiable"],
-         "The ATP problem is unprovable."),
-        (["SZS status: ResourceOut", "SZS status ResourceOut"],
-         "The ATP ran out of resources."),
-        (["# Cannot determine problem status"],
-         "The ATP failed to determine the problem's status.")],
-   max_new_clauses = 100,
-   prefers_theory_relevant = false}
-val e = tptp_prover "e" e_config
-
-
-(* SPASS *)
-
-fun generic_dfg_prover
-        (name, {home, executable, arguments, known_failures, max_new_clauses,
-                prefers_theory_relevant})
-        (params as {overlord, respect_no_atp, relevance_threshold, convergence,
-                    theory_relevant, higher_order, follow_defs, ...})
-        minimize_command timeout =
-  generic_prover overlord
-      (get_relevant_facts respect_no_atp relevance_threshold convergence
-                          higher_order follow_defs max_new_clauses
-                          (the_default prefers_theory_relevant theory_relevant))
-      (prepare_clauses higher_order true) write_dfg_file home executable
-      (arguments timeout) known_failures name params minimize_command
-
-fun dfg_prover name p = (name, generic_dfg_prover (name, p))
-
-(* The "-VarWeight=3" option helps the higher-order problems, probably by
-   counteracting the presence of "hAPP". *)
-val spass_config : prover_config =
-  {home = getenv "SPASS_HOME",
-   executable = "SPASS",
-   arguments = (fn timeout => "-Auto -SOS=1 -PGiven=0 -PProblem=0 -Splits=0" ^
-     " -FullRed=0 -DocProof -VarWeight=3 -TimeLimit=" ^
-     string_of_int (generous_to_secs timeout)),
-   known_failures =
-     [(["SPASS beiseite: Completion found."], "The ATP problem is unprovable."),
-      (["SPASS beiseite: Ran out of time."], "The ATP timed out."),
-      (["SPASS beiseite: Maximal number of loops exceeded."],
-       "The ATP hit its loop limit.")],
-   max_new_clauses = 40,
-   prefers_theory_relevant = true}
-val spass = dfg_prover "spass" spass_config
-
-(* SPASS 3.7 supports both the DFG and the TPTP syntax, whereas SPASS 3.0
-   supports only the DFG syntax. As soon as all Isabelle repository/snapshot
-   users have upgraded to 3.7, we can kill "spass" (and all DFG support in
-   Sledgehammer) and rename "spass_tptp" "spass". *)
-
-(* FIXME: Change the error message below to point to the Isabelle download
-   page once the package is there (around the Isabelle2010 release). *)
-
-val spass_tptp_config =
-  {home = #home spass_config,
-   executable = #executable spass_config,
-   arguments = prefix "-TPTP " o #arguments spass_config,
-   known_failures =
-     #known_failures spass_config @
-     [(["unrecognized option `-TPTP'", "Unrecognized option TPTP"],
-       "Warning: Sledgehammer requires a more recent version of SPASS with \
-       \support for the TPTP syntax. To install it, download and untar the \
-       \package \"http://isabelle.in.tum.de/~blanchet/spass-3.7.tgz\" and add \
-       \the \"spass-3.7\" directory's full path to \"" ^
-       Path.implode (Path.expand (Path.appends
-           (Path.variable "ISABELLE_HOME_USER" ::
-            map Path.basic ["etc", "components"]))) ^
-       "\" on a line of its own.")],
-   max_new_clauses = #max_new_clauses spass_config,
-   prefers_theory_relevant = #prefers_theory_relevant spass_config}
-val spass_tptp = tptp_prover "spass_tptp" spass_tptp_config
-
-(* remote prover invocation via SystemOnTPTP *)
-
-val systems = Synchronized.var "atp_wrapper_systems" ([]: string list);
-
-fun get_systems () =
-  let
-    val (answer, rc) = bash_output "\"$ISABELLE_ATP_MANAGER/SystemOnTPTP\" -w"
-  in
-    if rc <> 0 then
-      error ("Failed to get available systems at SystemOnTPTP:\n" ^ answer)
-    else
-      split_lines answer
-  end;
-
-fun refresh_systems_on_tptp () =
-  Synchronized.change systems (fn _ => get_systems ());
-
-fun get_system prefix = Synchronized.change_result systems (fn systems =>
-  (if null systems then get_systems () else systems)
-  |> `(find_first (String.isPrefix prefix)));
-
-fun the_system prefix =
-  (case get_system prefix of
-    NONE => error ("System " ^ quote prefix ^ " not available at SystemOnTPTP")
-  | SOME sys => sys);
-
-val remote_known_failures =
-  [(["Remote-script could not extract proof"],
-    "Error: The remote ATP proof is ill-formed.")]
-
-fun remote_prover_config prover_prefix args
-        ({known_failures, max_new_clauses, prefers_theory_relevant, ...}
-         : prover_config) : prover_config =
-  {home = getenv "ISABELLE_ATP_MANAGER",
-   executable = "SystemOnTPTP",
-   arguments = (fn timeout =>
-     args ^ " -t " ^ string_of_int (generous_to_secs timeout) ^ " -s " ^
-     the_system prover_prefix),
-   known_failures = remote_known_failures @ known_failures,
-   max_new_clauses = max_new_clauses,
-   prefers_theory_relevant = prefers_theory_relevant}
-
-val remote_vampire =
-  tptp_prover (remotify (fst vampire))
-              (remote_prover_config "Vampire---9" "" vampire_config)
-
-val remote_e =
-  tptp_prover (remotify (fst e))
-              (remote_prover_config "EP---" "" e_config)
-
-val remote_spass =
-  tptp_prover (remotify (fst spass))
-              (remote_prover_config "SPASS---" "-x" spass_config)
-
-val provers =
-  [spass, spass_tptp, vampire, e, remote_vampire, remote_spass, remote_e]
-val prover_setup = fold add_prover provers
-
-val setup =
-  destdir_setup
-  #> problem_prefix_setup
-  #> measure_runtime_setup
-  #> prover_setup;
-
-fun maybe_remote (name, _) ({home, ...} : prover_config) =
-  name |> home = "" ? remotify
-
-val _ = atps := ([maybe_remote e e_config, maybe_remote spass spass_config,
-                  remotify (fst vampire)] |> space_implode " ")
-end;
--- a/src/HOL/Tools/Nitpick/HISTORY	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/HISTORY	Tue Apr 27 11:17:50 2010 -0700
@@ -16,7 +16,9 @@
   * Fixed soundness bug related to higher-order constructors
   * Added cache to speed up repeated Kodkod invocations on the same problems
   * Renamed "MiniSatJNI", "zChaffJNI", "BerkMinAlloy", and "SAT4JLight" to
- 	"MiniSat_JNI", "zChaff_JNI", "BerkMin_Alloy", and "SAT4J_Light"
+    "MiniSat_JNI", "zChaff_JNI", "BerkMin_Alloy", and "SAT4J_Light"
+  * Removed "skolemize", "uncurry", "sym_break", "flatten_prop",
+    "sharing_depth", and "show_skolems" options
 
 Version 2009-1
 
--- a/src/HOL/Tools/Nitpick/kodkod.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/kodkod.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -120,11 +120,10 @@
     AssignRelReg of n_ary_index * rel_expr |
     AssignIntReg of int * int_expr
 
-  type 'a fold_expr_funcs = {
-    formula_func: formula -> 'a -> 'a,
-    rel_expr_func: rel_expr -> 'a -> 'a,
-    int_expr_func: int_expr -> 'a -> 'a
-  }
+  type 'a fold_expr_funcs =
+    {formula_func: formula -> 'a -> 'a,
+     rel_expr_func: rel_expr -> 'a -> 'a,
+     int_expr_func: int_expr -> 'a -> 'a}
 
   val fold_formula : 'a fold_expr_funcs -> formula -> 'a -> 'a
   val fold_rel_expr : 'a fold_expr_funcs -> rel_expr -> 'a -> 'a
@@ -132,10 +131,9 @@
   val fold_decl : 'a fold_expr_funcs -> decl -> 'a -> 'a
   val fold_expr_assign : 'a fold_expr_funcs -> expr_assign -> 'a -> 'a
 
-  type 'a fold_tuple_funcs = {
-    tuple_func: tuple -> 'a -> 'a,
-    tuple_set_func: tuple_set -> 'a -> 'a
-  }
+  type 'a fold_tuple_funcs =
+    {tuple_func: tuple -> 'a -> 'a,
+     tuple_set_func: tuple_set -> 'a -> 'a}
 
   val fold_tuple : 'a fold_tuple_funcs -> tuple -> 'a -> 'a
   val fold_tuple_set : 'a fold_tuple_funcs -> tuple_set -> 'a -> 'a
@@ -144,15 +142,15 @@
       'a fold_expr_funcs -> 'a fold_tuple_funcs -> bound -> 'a -> 'a
   val fold_int_bound : 'a fold_tuple_funcs -> int_bound -> 'a -> 'a
 
-  type problem = {
-    comment: string,
-    settings: setting list,
-    univ_card: int,
-    tuple_assigns: tuple_assign list,
-    bounds: bound list,
-    int_bounds: int_bound list,
-    expr_assigns: expr_assign list,
-    formula: formula}
+  type problem =
+    {comment: string,
+     settings: setting list,
+     univ_card: int,
+     tuple_assigns: tuple_assign list,
+     bounds: bound list,
+     int_bounds: int_bound list,
+     expr_assigns: expr_assign list,
+     formula: formula}
 
   type raw_bound = n_ary_index * int list list
 
@@ -291,15 +289,15 @@
   AssignRelReg of n_ary_index * rel_expr |
   AssignIntReg of int * int_expr
 
-type problem = {
-  comment: string,
-  settings: setting list,
-  univ_card: int,
-  tuple_assigns: tuple_assign list,
-  bounds: bound list,
-  int_bounds: int_bound list,
-  expr_assigns: expr_assign list,
-  formula: formula}
+type problem =
+  {comment: string,
+   settings: setting list,
+   univ_card: int,
+   tuple_assigns: tuple_assign list,
+   bounds: bound list,
+   int_bounds: int_bound list,
+   expr_assigns: expr_assign list,
+   formula: formula}
 
 type raw_bound = n_ary_index * int list list
 
@@ -313,15 +311,13 @@
 
 exception SYNTAX of string * string
 
-type 'a fold_expr_funcs = {
-  formula_func: formula -> 'a -> 'a,
-  rel_expr_func: rel_expr -> 'a -> 'a,
-  int_expr_func: int_expr -> 'a -> 'a
-}
+type 'a fold_expr_funcs =
+  {formula_func: formula -> 'a -> 'a,
+   rel_expr_func: rel_expr -> 'a -> 'a,
+   int_expr_func: int_expr -> 'a -> 'a}
 
 (** Auxiliary functions on ML representation of Kodkod problems **)
 
-(* 'a fold_expr_funcs -> formula -> 'a -> 'a *)
 fun fold_formula (F : 'a fold_expr_funcs) formula =
   case formula of
     All (ds, f) => fold (fold_decl F) ds #> fold_formula F f
@@ -354,7 +350,6 @@
   | False => #formula_func F formula
   | True => #formula_func F formula
   | FormulaReg _ => #formula_func F formula
-(* 'a fold_expr_funcs -> rel_expr -> 'a -> 'a *)
 and fold_rel_expr F rel_expr =
   case rel_expr of
     RelLet (bs, r) => fold (fold_expr_assign F) bs #> fold_rel_expr F r
@@ -383,7 +378,6 @@
   | Rel _ => #rel_expr_func F rel_expr
   | Var _ => #rel_expr_func F rel_expr
   | RelReg _ => #rel_expr_func F rel_expr
-(* 'a fold_expr_funcs -> int_expr -> 'a -> 'a *)
 and fold_int_expr F int_expr =
   case int_expr of
     Sum (ds, i) => fold (fold_decl F) ds #> fold_int_expr F i
@@ -409,7 +403,6 @@
   | Signum i => fold_int_expr F i
   | Num _ => #int_expr_func F int_expr
   | IntReg _ => #int_expr_func F int_expr
-(* 'a fold_expr_funcs -> decl -> 'a -> 'a *)
 and fold_decl F decl =
   case decl of
     DeclNo (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r
@@ -417,21 +410,17 @@
   | DeclOne (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r
   | DeclSome (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r
   | DeclSet (x, r) => fold_rel_expr F (Var x) #> fold_rel_expr F r
-(* 'a fold_expr_funcs -> expr_assign -> 'a -> 'a *)
 and fold_expr_assign F assign =
   case assign of
     AssignFormulaReg (x, f) => fold_formula F (FormulaReg x) #> fold_formula F f
   | AssignRelReg (x, r) => fold_rel_expr F (RelReg x) #> fold_rel_expr F r
   | AssignIntReg (x, i) => fold_int_expr F (IntReg x) #> fold_int_expr F i
 
-type 'a fold_tuple_funcs = {
-  tuple_func: tuple -> 'a -> 'a,
-  tuple_set_func: tuple_set -> 'a -> 'a
-}
+type 'a fold_tuple_funcs =
+  {tuple_func: tuple -> 'a -> 'a,
+   tuple_set_func: tuple_set -> 'a -> 'a}
 
-(* 'a fold_tuple_funcs -> tuple -> 'a -> 'a *)
 fun fold_tuple (F : 'a fold_tuple_funcs) = #tuple_func F
-(* 'a fold_tuple_funcs -> tuple_set -> 'a -> 'a *)
 fun fold_tuple_set F tuple_set =
   case tuple_set of
     TupleUnion (ts1, ts2) => fold_tuple_set F ts1 #> fold_tuple_set F ts2
@@ -444,23 +433,18 @@
   | TupleArea (t1, t2) => fold_tuple F t1 #> fold_tuple F t2
   | TupleAtomSeq _ => #tuple_set_func F tuple_set
   | TupleSetReg _ => #tuple_set_func F tuple_set
-(* 'a fold_tuple_funcs -> tuple_assign -> 'a -> 'a *)
 fun fold_tuple_assign F assign =
   case assign of
     AssignTuple (x, t) => fold_tuple F (TupleReg x) #> fold_tuple F t
   | AssignTupleSet (x, ts) =>
     fold_tuple_set F (TupleSetReg x) #> fold_tuple_set F ts
-(* 'a fold_expr_funcs -> 'a fold_tuple_funcs -> bound -> 'a -> 'a *)
 fun fold_bound expr_F tuple_F (zs, tss) =
   fold (fold_rel_expr expr_F) (map (Rel o fst) zs)
   #> fold (fold_tuple_set tuple_F) tss
-(* 'a fold_tuple_funcs -> int_bound -> 'a -> 'a *)
 fun fold_int_bound F (_, tss) = fold (fold_tuple_set F) tss
 
-(* int -> int *)
 fun max_arity univ_card = floor (Math.ln 2147483647.0
                                  / Math.ln (Real.fromInt univ_card))
-(* rel_expr -> int *)
 fun arity_of_rel_expr (RelLet (_, r)) = arity_of_rel_expr r
   | arity_of_rel_expr (RelIf (_, r1, _)) = arity_of_rel_expr r1
   | arity_of_rel_expr (Union (r1, _)) = arity_of_rel_expr r1
@@ -487,23 +471,18 @@
   | arity_of_rel_expr (Rel (n, _)) = n
   | arity_of_rel_expr (Var (n, _)) = n
   | arity_of_rel_expr (RelReg (n, _)) = n
-(* rel_expr -> rel_expr -> int *)
 and sum_arities_of_rel_exprs r1 r2 = arity_of_rel_expr r1 + arity_of_rel_expr r2
-(* decl -> int *)
 and arity_of_decl (DeclNo ((n, _), _)) = n
   | arity_of_decl (DeclLone ((n, _), _)) = n
   | arity_of_decl (DeclOne ((n, _), _)) = n
   | arity_of_decl (DeclSome ((n, _), _)) = n
   | arity_of_decl (DeclSet ((n, _), _)) = n
 
-(* problem -> bool *)
 fun is_problem_trivially_false ({formula = False, ...} : problem) = true
   | is_problem_trivially_false _ = false
 
-(* string -> string list *)
 val chop_solver = take 2 o space_explode ","
 
-(* setting list * setting list -> bool *)
 fun settings_equivalent ([], []) = true
   | settings_equivalent ((key1, value1) :: settings1,
                          (key2, value2) :: settings2) =
@@ -513,7 +492,6 @@
     settings_equivalent (settings1, settings2)
   | settings_equivalent _ = false
 
-(* problem * problem -> bool *)
 fun problems_equivalent (p1 : problem, p2 : problem) =
   #univ_card p1 = #univ_card p2 andalso
   #formula p1 = #formula p2 andalso
@@ -525,16 +503,13 @@
 
 (** Serialization of problem **)
 
-(* int -> string *)
 fun base_name j =
   if j < 0 then string_of_int (~j - 1) ^ "'" else string_of_int j
 
-(* n_ary_index -> string -> string -> string -> string *)
 fun n_ary_name (1, j) prefix _ _ = prefix ^ base_name j
   | n_ary_name (2, j) _ prefix _ = prefix ^ base_name j
   | n_ary_name (n, j) _ _ prefix = prefix ^ string_of_int n ^ "_" ^ base_name j
 
-(* int -> string *)
 fun atom_name j = "A" ^ base_name j
 fun atom_seq_name (k, 0) = "u" ^ base_name k
   | atom_seq_name (k, j0) = "u" ^ base_name k ^ "@" ^ base_name j0
@@ -542,14 +517,12 @@
 fun rel_reg_name j = "$e" ^ base_name j
 fun int_reg_name j = "$i" ^ base_name j
 
-(* n_ary_index -> string *)
 fun tuple_name x = n_ary_name x "A" "P" "T"
 fun rel_name x = n_ary_name x "s" "r" "m"
 fun var_name x = n_ary_name x "S" "R" "M"
 fun tuple_reg_name x = n_ary_name x "$A" "$P" "$T"
 fun tuple_set_reg_name x = n_ary_name x "$a" "$p" "$t"
 
-(* string -> string *)
 fun inline_comment "" = ""
   | inline_comment comment =
     " /* " ^ translate_string (fn "\n" => " " | "*" => "* " | s => s) comment ^
@@ -557,10 +530,8 @@
 fun block_comment "" = ""
   | block_comment comment = prefix_lines "// " comment ^ "\n"
 
-(* (n_ary_index * string) -> string *)
 fun commented_rel_name (x, s) = rel_name x ^ inline_comment s
 
-(* tuple -> string *)
 fun string_for_tuple (Tuple js) = "[" ^ commas (map atom_name js) ^ "]"
   | string_for_tuple (TupleIndex x) = tuple_name x
   | string_for_tuple (TupleReg x) = tuple_reg_name x
@@ -571,7 +542,6 @@
 val prec_TupleProduct = 3
 val prec_TupleProject = 4
 
-(* tuple_set -> int *)
 fun precedence_ts (TupleUnion _) = prec_TupleUnion
   | precedence_ts (TupleDifference _) = prec_TupleUnion
   | precedence_ts (TupleIntersect _) = prec_TupleIntersect
@@ -579,10 +549,8 @@
   | precedence_ts (TupleProject _) = prec_TupleProject
   | precedence_ts _ = no_prec
 
-(* tuple_set -> string *)
 fun string_for_tuple_set tuple_set =
   let
-    (* tuple_set -> int -> string *)
     fun sub tuple_set outer_prec =
       let
         val prec = precedence_ts tuple_set
@@ -608,19 +576,16 @@
       end
   in sub tuple_set 0 end
 
-(* tuple_assign -> string *)
 fun string_for_tuple_assign (AssignTuple (x, t)) =
     tuple_reg_name x ^ " := " ^ string_for_tuple t ^ "\n"
   | string_for_tuple_assign (AssignTupleSet (x, ts)) =
     tuple_set_reg_name x ^ " := " ^ string_for_tuple_set ts ^ "\n"
 
-(* bound -> string *)
 fun string_for_bound (zs, tss) =
   "bounds " ^ commas (map commented_rel_name zs) ^ ": " ^
   (if length tss = 1 then "" else "[") ^ commas (map string_for_tuple_set tss) ^
   (if length tss = 1 then "" else "]") ^ "\n"
 
-(* int_bound -> string *)
 fun int_string_for_bound (opt_n, tss) =
   (case opt_n of
      SOME n => signed_string_of_int n ^ ": "
@@ -645,7 +610,6 @@
 val prec_Join = 18
 val prec_BitNot = 19
 
-(* formula -> int *)
 fun precedence_f (All _) = prec_All
   | precedence_f (Exist _) = prec_All
   | precedence_f (FormulaLet _) = prec_All
@@ -671,7 +635,6 @@
   | precedence_f False = no_prec
   | precedence_f True = no_prec
   | precedence_f (FormulaReg _) = no_prec
-(* rel_expr -> int *)
 and precedence_r (RelLet _) = prec_All
   | precedence_r (RelIf _) = prec_All
   | precedence_r (Union _) = prec_Add
@@ -697,7 +660,6 @@
   | precedence_r (Rel _) = no_prec
   | precedence_r (Var _) = no_prec
   | precedence_r (RelReg _) = no_prec
-(* int_expr -> int *)
 and precedence_i (Sum _) = prec_All
   | precedence_i (IntLet _) = prec_All
   | precedence_i (IntIf _) = prec_All
@@ -721,14 +683,11 @@
   | precedence_i (Num _) = no_prec
   | precedence_i (IntReg _) = no_prec
 
-(* (string -> unit) -> problem list -> unit *)
 fun write_problem_file out problems =
   let
-    (* formula -> unit *)
     fun out_outmost_f (And (f1, f2)) =
         (out_outmost_f f1; out "\n   && "; out_outmost_f f2)
       | out_outmost_f f = out_f f prec_And
-    (* formula -> int -> unit *)
     and out_f formula outer_prec =
       let
         val prec = precedence_f formula
@@ -773,7 +732,6 @@
          | FormulaReg j => out (formula_reg_name j));
         (if need_parens then out ")" else ())
       end
-    (* rel_expr -> int -> unit *)
     and out_r rel_expr outer_prec =
       let
         val prec = precedence_r rel_expr
@@ -813,7 +771,6 @@
          | RelReg (_, j) => out (rel_reg_name j));
         (if need_parens then out ")" else ())
       end
-    (* int_expr -> int -> unit *)
     and out_i int_expr outer_prec =
       let
         val prec = precedence_i int_expr
@@ -848,11 +805,9 @@
          | IntReg j => out (int_reg_name j));
         (if need_parens then out ")" else ())
       end
-    (* decl list -> unit *)
     and out_decls [] = ()
       | out_decls [d] = out_decl d
       | out_decls (d :: ds) = (out_decl d; out ", "; out_decls ds)
-    (* decl -> unit *)
     and out_decl (DeclNo (x, r)) =
         (out (var_name x); out " : no "; out_r r 0)
       | out_decl (DeclLone (x, r)) =
@@ -863,22 +818,18 @@
         (out (var_name x); out " : some "; out_r r 0)
       | out_decl (DeclSet (x, r)) =
         (out (var_name x); out " : set "; out_r r 0)
-    (* assign_expr list -> unit *)
     and out_assigns [] = ()
       | out_assigns [b] = out_assign b
       | out_assigns (b :: bs) = (out_assign b; out ", "; out_assigns bs)
-    (* assign_expr -> unit *)
     and out_assign (AssignFormulaReg (j, f)) =
         (out (formula_reg_name j); out " := "; out_f f 0)
       | out_assign (AssignRelReg ((_, j), r)) =
         (out (rel_reg_name j); out " := "; out_r r 0)
       | out_assign (AssignIntReg (j, i)) =
         (out (int_reg_name j); out " := "; out_i i 0)
-    (* int_expr list -> unit *)
     and out_columns [] = ()
       | out_columns [i] = out_i i 0
       | out_columns (i :: is) = (out_i i 0; out ", "; out_columns is)
-    (* problem -> unit *)
     and out_problem {comment, settings, univ_card, tuple_assigns, bounds,
                      int_bounds, expr_assigns, formula} =
         (out ("\n" ^ block_comment comment ^
@@ -896,19 +847,16 @@
          out "solve "; out_outmost_f formula; out ";\n")
   in
     out ("// This file was generated by Isabelle (most likely Nitpick)\n" ^
-         "// " ^ Date.fmt "%Y-%m-%d %H:%M:%S"
-                          (Date.fromTimeLocal (Time.now ())) ^ "\n");
+         "// " ^ Sledgehammer_Util.timestamp () ^ "\n");
     map out_problem problems
   end
 
 (** Parsing of solution **)
 
-(* string -> bool *)
 fun is_ident_char s =
   Symbol.is_ascii_letter s orelse Symbol.is_ascii_digit s orelse
   s = "_" orelse s = "'" orelse s = "$"
 
-(* string list -> string list *)
 fun strip_blanks [] = []
   | strip_blanks (" " :: ss) = strip_blanks ss
   | strip_blanks [s1, " "] = [s1]
@@ -919,29 +867,20 @@
       strip_blanks (s1 :: s2 :: ss)
   | strip_blanks (s :: ss) = s :: strip_blanks ss
 
-(* (string list -> 'a * string list) -> string list -> 'a list * string list *)
 fun scan_non_empty_list scan = scan ::: Scan.repeat ($$ "," |-- scan)
 fun scan_list scan = scan_non_empty_list scan || Scan.succeed []
-(* string list -> int * string list *)
 val scan_nat = Scan.repeat1 (Scan.one Symbol.is_ascii_digit)
                >> (the o Int.fromString o space_implode "")
-(*  string list -> (int * int) * string list *)
 val scan_rel_name = $$ "s" |-- scan_nat >> pair 1
                     || $$ "r" |-- scan_nat >> pair 2
                     || ($$ "m" |-- scan_nat --| $$ "_") -- scan_nat
-(* string list -> int * string list *)
 val scan_atom = $$ "A" |-- scan_nat
-(* string list -> int list * string list *)
 val scan_tuple = $$ "[" |-- scan_list scan_atom --| $$ "]"
-(* string list -> int list list * string list *)
 val scan_tuple_set = $$ "[" |-- scan_list scan_tuple --| $$ "]"
-(* string list -> ((int * int) * int list list) * string list *)
 val scan_assignment = (scan_rel_name --| $$ "=") -- scan_tuple_set
-(* string list -> ((int * int) * int list list) list * string list *)
 val scan_instance = Scan.this_string "relations:" |--
                     $$ "{" |-- scan_list scan_assignment --| $$ "}"
 
-(* string -> raw_bound list *)
 val parse_instance =
   fst o Scan.finite Symbol.stopper
             (Scan.error (!! (fn _ =>
@@ -954,12 +893,10 @@
 val outcome_marker = "---OUTCOME---\n"
 val instance_marker = "---INSTANCE---\n"
 
-(* string -> substring -> string *)
 fun read_section_body marker =
   Substring.string o fst o Substring.position "\n\n"
   o Substring.triml (size marker)
 
-(* substring -> raw_bound list *)
 fun read_next_instance s =
   let val s = Substring.position instance_marker s |> snd in
     if Substring.isEmpty s then
@@ -968,8 +905,6 @@
       read_section_body instance_marker s |> parse_instance
   end
 
-(* int -> substring * (int * raw_bound list) list * int list
-   -> substring * (int * raw_bound list) list * int list *)
 fun read_next_outcomes j (s, ps, js) =
   let val (s1, s2) = Substring.position outcome_marker s in
     if Substring.isEmpty s2 orelse
@@ -991,8 +926,6 @@
       end
   end
 
-(* substring * (int * raw_bound list) list * int list
-   -> (int * raw_bound list) list * int list *)
 fun read_next_problems (s, ps, js) =
   let val s = Substring.position problem_marker s |> snd in
     if Substring.isEmpty s then
@@ -1008,7 +941,6 @@
   handle Option.Option => raise SYNTAX ("Kodkod.read_next_problems",
                                         "expected number after \"PROBLEM\"")
 
-(* Path.T -> bool * ((int * raw_bound list) list * int list) *)
 fun read_output_file path =
   (false, read_next_problems (Substring.full (File.read path), [], [])
           |>> rev ||> rev)
@@ -1018,7 +950,6 @@
 
 val created_temp_dir = Unsynchronized.ref false
 
-(* bool -> string * string *)
 fun serial_string_and_temporary_dir_for_overlord overlord =
   if overlord then
     ("", getenv "ISABELLE_HOME_USER")
@@ -1033,14 +964,12 @@
    is partly due to the JVM and partly due to the ML "bash" function. *)
 val fudge_ms = 250
 
-(* Time.time option -> int *)
 fun milliseconds_until_deadline deadline =
   case deadline of
     NONE => ~1
   | SOME time =>
     Int.max (0, Time.toMilliseconds (Time.- (time, Time.now ())) - fudge_ms)
 
-(* bool -> Time.time option -> int -> int -> problem list -> outcome *)
 fun uncached_solve_any_problem overlord deadline max_threads max_solutions
                                problems =
   let
@@ -1052,7 +981,6 @@
                                         (0 upto length problems - 1 ~~ problems)
     val triv_js = filter_out (AList.defined (op =) indexed_problems)
                              (0 upto length problems - 1)
-    (* int -> int *)
     val reindex = fst o nth indexed_problems
   in
     if null indexed_problems then
@@ -1061,18 +989,15 @@
       let
         val (serial_str, temp_dir) =
           serial_string_and_temporary_dir_for_overlord overlord
-        (* string -> Path.T *)
         fun path_for suf =
           Path.explode (temp_dir ^ "/kodkodi" ^ serial_str ^ "." ^ suf)
         val in_path = path_for "kki"
         val in_buf = Unsynchronized.ref Buffer.empty
-        (* string -> unit *)
         fun out s = Unsynchronized.change in_buf (Buffer.add s)
         val out_path = path_for "out"
         val err_path = path_for "err"
         val _ = write_problem_file out (map snd indexed_problems)
         val _ = File.write_buffer in_path (!in_buf)
-        (* unit -> unit *)
         fun remove_temporary_files () =
           if overlord then ()
           else List.app (K () o try File.rm) [in_path, out_path, err_path]
@@ -1151,10 +1076,8 @@
   Synchronized.var "Kodkod.cached_outcome"
                    (NONE : ((int * problem list) * outcome) option)
 
-(* bool -> Time.time option -> int -> int -> problem list -> outcome *)
 fun solve_any_problem overlord deadline max_threads max_solutions problems =
   let
-    (* unit -> outcome *)
     fun do_solve () = uncached_solve_any_problem overlord deadline max_threads
                                                  max_solutions problems
   in
--- a/src/HOL/Tools/Nitpick/kodkod_sat.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/kodkod_sat.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -51,8 +51,6 @@
    ("HaifaSat", ExternalV2 (ToStdout, "HAIFASAT_HOME", "HaifaSat", ["-p", "1"],
                             "s SATISFIABLE", "v ", "s UNSATISFIABLE"))]
 
-(* string -> sink -> string -> string -> string list -> string list
-   -> (string * (unit -> string list)) option *)
 fun dynamic_entry_for_external name dev home exec args markers =
   case getenv home of
     "" => NONE
@@ -74,8 +72,6 @@
                       if dev = ToFile then out_file else ""] @ markers @
                       (if dev = ToFile then [out_file] else []) @ args
                    end)
-(* bool -> bool -> string * sat_solver_info
-   -> (string * (unit -> string list)) option *)
 fun dynamic_entry_for_info incremental (name, Internal (Java, mode, ss)) =
     if incremental andalso mode = Batch then NONE else SOME (name, K ss)
   | dynamic_entry_for_info incremental (name, Internal (JNI, mode, ss)) =
@@ -98,20 +94,15 @@
         (name, ExternalV2 (dev, home, exec, args, m1, m2, m3)) =
     dynamic_entry_for_external name dev home exec args [m1, m2, m3]
   | dynamic_entry_for_info true _ = NONE
-(* bool -> (string * (unit -> string list)) list *)
 fun dynamic_list incremental =
   map_filter (dynamic_entry_for_info incremental) static_list
 
-(* bool -> string list *)
 val configured_sat_solvers = map fst o dynamic_list
-(* bool -> string *)
 val smart_sat_solver_name = fst o hd o dynamic_list
 
-(* string -> string * string list *)
 fun sat_solver_spec name =
   let
     val dyn_list = dynamic_list false
-    (* (string * 'a) list -> string *)
     fun enum_solvers solvers =
       commas (distinct (op =) (map (quote o fst) solvers))
   in
--- a/src/HOL/Tools/Nitpick/minipick.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/minipick.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -35,7 +35,6 @@
 
 datatype rep = SRep | RRep
 
-(* Proof.context -> typ -> unit *)
 fun check_type ctxt (Type (@{type_name fun}, Ts)) =
     List.app (check_type ctxt) Ts
   | check_type ctxt (Type (@{type_name "*"}, Ts)) =
@@ -46,7 +45,6 @@
   | check_type ctxt T =
     raise NOT_SUPPORTED ("type " ^ quote (Syntax.string_of_typ ctxt T))
 
-(* rep -> (typ -> int) -> typ -> int list *)
 fun atom_schema_of SRep card (Type (@{type_name fun}, [T1, T2])) =
     replicate_list (card T1) (atom_schema_of SRep card T2)
   | atom_schema_of RRep card (Type (@{type_name fun}, [T1, @{typ bool}])) =
@@ -56,42 +54,32 @@
   | atom_schema_of _ card (Type (@{type_name "*"}, Ts)) =
     maps (atom_schema_of SRep card) Ts
   | atom_schema_of _ card T = [card T]
-(* rep -> (typ -> int) -> typ -> int *)
 val arity_of = length ooo atom_schema_of
 
-(* (typ -> int) -> typ list -> int -> int *)
 fun index_for_bound_var _ [_] 0 = 0
   | index_for_bound_var card (_ :: Ts) 0 =
     index_for_bound_var card Ts 0 + arity_of SRep card (hd Ts)
   | index_for_bound_var card Ts n = index_for_bound_var card (tl Ts) (n - 1)
-(* (typ -> int) -> rep -> typ list -> int -> rel_expr list *)
 fun vars_for_bound_var card R Ts j =
   map (curry Var 1) (index_seq (index_for_bound_var card Ts j)
                                (arity_of R card (nth Ts j)))
-(* (typ -> int) -> rep -> typ list -> int -> rel_expr *)
 val rel_expr_for_bound_var = foldl1 Product oooo vars_for_bound_var
-(* rep -> (typ -> int) -> typ list -> typ -> decl list *)
 fun decls_for R card Ts T =
   map2 (curry DeclOne o pair 1)
        (index_seq (index_for_bound_var card (T :: Ts) 0)
                   (arity_of R card (nth (T :: Ts) 0)))
        (map (AtomSeq o rpair 0) (atom_schema_of R card T))
 
-(* int list -> rel_expr *)
 val atom_product = foldl1 Product o map Atom
 
 val false_atom = Atom 0
 val true_atom = Atom 1
 
-(* rel_expr -> formula *)
 fun formula_from_atom r = RelEq (r, true_atom)
-(* formula -> rel_expr *)
 fun atom_from_formula f = RelIf (f, true_atom, false_atom)
 
-(* Proof.context -> (typ -> int) -> styp list -> term -> formula *)
 fun kodkod_formula_from_term ctxt card frees =
   let
-    (* typ -> rel_expr -> rel_expr *)
     fun R_rep_from_S_rep (T as Type (@{type_name fun}, [T1, @{typ bool}])) r =
         let
           val jss = atom_schema_of SRep card T1 |> map (rpair 0)
@@ -117,13 +105,11 @@
           |> foldl1 Union
         end
       | R_rep_from_S_rep _ r = r
-    (* typ list -> typ -> rel_expr -> rel_expr *)
     fun S_rep_from_R_rep Ts (T as Type (@{type_name fun}, _)) r =
         Comprehension (decls_for SRep card Ts T,
             RelEq (R_rep_from_S_rep T
                        (rel_expr_for_bound_var card SRep (T :: Ts) 0), r))
       | S_rep_from_R_rep _ _ r = r
-    (* typ list -> term -> formula *)
     fun to_F Ts t =
       (case t of
          @{const Not} $ t1 => Not (to_F Ts t1)
@@ -154,28 +140,26 @@
        | Const (s, _) => raise NOT_SUPPORTED ("constant " ^ quote s)
        | _ => raise TERM ("Minipick.kodkod_formula_from_term.to_F", [t]))
       handle SAME () => formula_from_atom (to_R_rep Ts t)
-    (* typ list -> term -> rel_expr *)
     and to_S_rep Ts t =
-        case t of
-          Const (@{const_name Pair}, _) $ t1 $ t2 =>
-          Product (to_S_rep Ts t1, to_S_rep Ts t2)
-        | Const (@{const_name Pair}, _) $ _ => to_S_rep Ts (eta_expand Ts t 1)
-        | Const (@{const_name Pair}, _) => to_S_rep Ts (eta_expand Ts t 2)
-        | Const (@{const_name fst}, _) $ t1 =>
-          let val fst_arity = arity_of SRep card (fastype_of1 (Ts, t)) in
-            Project (to_S_rep Ts t1, num_seq 0 fst_arity)
-          end
-        | Const (@{const_name fst}, _) => to_S_rep Ts (eta_expand Ts t 1)
-        | Const (@{const_name snd}, _) $ t1 =>
-          let
-            val pair_arity = arity_of SRep card (fastype_of1 (Ts, t1))
-            val snd_arity = arity_of SRep card (fastype_of1 (Ts, t))
-            val fst_arity = pair_arity - snd_arity
-          in Project (to_S_rep Ts t1, num_seq fst_arity snd_arity) end
-        | Const (@{const_name snd}, _) => to_S_rep Ts (eta_expand Ts t 1)
-        | Bound j => rel_expr_for_bound_var card SRep Ts j
-        | _ => S_rep_from_R_rep Ts (fastype_of1 (Ts, t)) (to_R_rep Ts t)
-    (* term -> rel_expr *)
+      case t of
+        Const (@{const_name Pair}, _) $ t1 $ t2 =>
+        Product (to_S_rep Ts t1, to_S_rep Ts t2)
+      | Const (@{const_name Pair}, _) $ _ => to_S_rep Ts (eta_expand Ts t 1)
+      | Const (@{const_name Pair}, _) => to_S_rep Ts (eta_expand Ts t 2)
+      | Const (@{const_name fst}, _) $ t1 =>
+        let val fst_arity = arity_of SRep card (fastype_of1 (Ts, t)) in
+          Project (to_S_rep Ts t1, num_seq 0 fst_arity)
+        end
+      | Const (@{const_name fst}, _) => to_S_rep Ts (eta_expand Ts t 1)
+      | Const (@{const_name snd}, _) $ t1 =>
+        let
+          val pair_arity = arity_of SRep card (fastype_of1 (Ts, t1))
+          val snd_arity = arity_of SRep card (fastype_of1 (Ts, t))
+          val fst_arity = pair_arity - snd_arity
+        in Project (to_S_rep Ts t1, num_seq fst_arity snd_arity) end
+      | Const (@{const_name snd}, _) => to_S_rep Ts (eta_expand Ts t 1)
+      | Bound j => rel_expr_for_bound_var card SRep Ts j
+      | _ => S_rep_from_R_rep Ts (fastype_of1 (Ts, t)) (to_R_rep Ts t)
     and to_R_rep Ts t =
       (case t of
          @{const Not} => to_R_rep Ts (eta_expand Ts t 1)
@@ -282,7 +266,6 @@
       handle SAME () => R_rep_from_S_rep (fastype_of1 (Ts, t)) (to_S_rep Ts t)
   in to_F [] end
 
-(* (typ -> int) -> int -> styp -> bound *)
 fun bound_for_free card i (s, T) =
   let val js = atom_schema_of RRep card T in
     ([((length js, i), s)],
@@ -290,7 +273,6 @@
                    |> tuple_set_from_atom_schema])
   end
 
-(* (typ -> int) -> typ list -> typ -> rel_expr -> formula *)
 fun declarative_axiom_for_rel_expr card Ts (Type (@{type_name fun}, [T1, T2]))
                                    r =
     if body_type T2 = bool_T then
@@ -300,15 +282,12 @@
            declarative_axiom_for_rel_expr card (T1 :: Ts) T2
                (List.foldl Join r (vars_for_bound_var card SRep (T1 :: Ts) 0)))
   | declarative_axiom_for_rel_expr _ _ _ r = One r
-(* (typ -> int) -> bool -> int -> styp -> formula *)
 fun declarative_axiom_for_free card i (_, T) =
   declarative_axiom_for_rel_expr card [] T (Rel (arity_of RRep card T, i))
 
-(* Proof.context -> (typ -> int) -> term -> problem *)
 fun kodkod_problem_from_term ctxt raw_card t =
   let
     val thy = ProofContext.theory_of ctxt
-    (* typ -> int *)
     fun card (Type (@{type_name fun}, [T1, T2])) =
         reasonable_power (card T2) (card T1)
       | card (Type (@{type_name "*"}, [T1, T2])) = card T1 * card T2
@@ -328,7 +307,6 @@
      bounds = bounds, int_bounds = [], expr_assigns = [], formula = formula}
   end
 
-(* theory -> problem list -> string *)
 fun solve_any_kodkod_problem thy problems =
   let
     val {overlord, ...} = Nitpick_Isar.default_params thy []
--- a/src/HOL/Tools/Nitpick/nitpick.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -9,51 +9,45 @@
 sig
   type styp = Nitpick_Util.styp
   type term_postprocessor = Nitpick_Model.term_postprocessor
-  type params = {
-    cards_assigns: (typ option * int list) list,
-    maxes_assigns: (styp option * int list) list,
-    iters_assigns: (styp option * int list) list,
-    bitss: int list,
-    bisim_depths: int list,
-    boxes: (typ option * bool option) list,
-    finitizes: (typ option * bool option) list,
-    monos: (typ option * bool option) list,
-    stds: (typ option * bool) list,
-    wfs: (styp option * bool option) list,
-    sat_solver: string,
-    blocking: bool,
-    falsify: bool,
-    debug: bool,
-    verbose: bool,
-    overlord: bool,
-    user_axioms: bool option,
-    assms: bool,
-    merge_type_vars: bool,
-    binary_ints: bool option,
-    destroy_constrs: bool,
-    specialize: bool,
-    skolemize: bool,
-    star_linear_preds: bool,
-    uncurry: bool,
-    fast_descrs: bool,
-    peephole_optim: bool,
-    timeout: Time.time option,
-    tac_timeout: Time.time option,
-    sym_break: int,
-    sharing_depth: int,
-    flatten_props: bool,
-    max_threads: int,
-    show_skolems: bool,
-    show_datatypes: bool,
-    show_consts: bool,
-    evals: term list,
-    formats: (term option * int list) list,
-    max_potential: int,
-    max_genuine: int,
-    check_potential: bool,
-    check_genuine: bool,
-    batch_size: int,
-    expect: string}
+  type params =
+    {cards_assigns: (typ option * int list) list,
+     maxes_assigns: (styp option * int list) list,
+     iters_assigns: (styp option * int list) list,
+     bitss: int list,
+     bisim_depths: int list,
+     boxes: (typ option * bool option) list,
+     finitizes: (typ option * bool option) list,
+     monos: (typ option * bool option) list,
+     stds: (typ option * bool) list,
+     wfs: (styp option * bool option) list,
+     sat_solver: string,
+     blocking: bool,
+     falsify: bool,
+     debug: bool,
+     verbose: bool,
+     overlord: bool,
+     user_axioms: bool option,
+     assms: bool,
+     merge_type_vars: bool,
+     binary_ints: bool option,
+     destroy_constrs: bool,
+     specialize: bool,
+     star_linear_preds: bool,
+     fast_descrs: bool,
+     peephole_optim: bool,
+     timeout: Time.time option,
+     tac_timeout: Time.time option,
+     max_threads: int,
+     show_datatypes: bool,
+     show_consts: bool,
+     evals: term list,
+     formats: (term option * int list) list,
+     max_potential: int,
+     max_genuine: int,
+     check_potential: bool,
+     check_genuine: bool,
+     batch_size: int,
+     expect: string}
 
   val register_frac_type : string -> (string * string) list -> theory -> theory
   val unregister_frac_type : string -> theory -> theory
@@ -85,63 +79,56 @@
 
 structure KK = Kodkod
 
-type params = {
-  cards_assigns: (typ option * int list) list,
-  maxes_assigns: (styp option * int list) list,
-  iters_assigns: (styp option * int list) list,
-  bitss: int list,
-  bisim_depths: int list,
-  boxes: (typ option * bool option) list,
-  finitizes: (typ option * bool option) list,
-  monos: (typ option * bool option) list,
-  stds: (typ option * bool) list,
-  wfs: (styp option * bool option) list,
-  sat_solver: string,
-  blocking: bool,
-  falsify: bool,
-  debug: bool,
-  verbose: bool,
-  overlord: bool,
-  user_axioms: bool option,
-  assms: bool,
-  merge_type_vars: bool,
-  binary_ints: bool option,
-  destroy_constrs: bool,
-  specialize: bool,
-  skolemize: bool,
-  star_linear_preds: bool,
-  uncurry: bool,
-  fast_descrs: bool,
-  peephole_optim: bool,
-  timeout: Time.time option,
-  tac_timeout: Time.time option,
-  sym_break: int,
-  sharing_depth: int,
-  flatten_props: bool,
-  max_threads: int,
-  show_skolems: bool,
-  show_datatypes: bool,
-  show_consts: bool,
-  evals: term list,
-  formats: (term option * int list) list,
-  max_potential: int,
-  max_genuine: int,
-  check_potential: bool,
-  check_genuine: bool,
-  batch_size: int,
-  expect: string}
+type params =
+  {cards_assigns: (typ option * int list) list,
+   maxes_assigns: (styp option * int list) list,
+   iters_assigns: (styp option * int list) list,
+   bitss: int list,
+   bisim_depths: int list,
+   boxes: (typ option * bool option) list,
+   finitizes: (typ option * bool option) list,
+   monos: (typ option * bool option) list,
+   stds: (typ option * bool) list,
+   wfs: (styp option * bool option) list,
+   sat_solver: string,
+   blocking: bool,
+   falsify: bool,
+   debug: bool,
+   verbose: bool,
+   overlord: bool,
+   user_axioms: bool option,
+   assms: bool,
+   merge_type_vars: bool,
+   binary_ints: bool option,
+   destroy_constrs: bool,
+   specialize: bool,
+   star_linear_preds: bool,
+   fast_descrs: bool,
+   peephole_optim: bool,
+   timeout: Time.time option,
+   tac_timeout: Time.time option,
+   max_threads: int,
+   show_datatypes: bool,
+   show_consts: bool,
+   evals: term list,
+   formats: (term option * int list) list,
+   max_potential: int,
+   max_genuine: int,
+   check_potential: bool,
+   check_genuine: bool,
+   batch_size: int,
+   expect: string}
 
-type problem_extension = {
-  free_names: nut list,
-  sel_names: nut list,
-  nonsel_names: nut list,
-  rel_table: nut NameTable.table,
-  unsound: bool,
-  scope: scope}
-
+type problem_extension =
+  {free_names: nut list,
+   sel_names: nut list,
+   nonsel_names: nut list,
+   rel_table: nut NameTable.table,
+   unsound: bool,
+   scope: scope}
+ 
 type rich_problem = KK.problem * problem_extension
 
-(* Proof.context -> string -> term list -> Pretty.T list *)
 fun pretties_for_formulas _ _ [] = []
   | pretties_for_formulas ctxt s ts =
     [Pretty.str (s ^ plural_s_for_list ts ^ ":"),
@@ -152,10 +139,8 @@
                                  Pretty.str (if j = 1 then "." else ";")])
                (length ts downto 1) ts))]
 
-(* unit -> string *)
 fun install_java_message () =
   "Nitpick requires a Java 1.5 virtual machine called \"java\"."
-(* unit -> string *)
 fun install_kodkodi_message () =
   "Nitpick requires the external Java program Kodkodi. To install it, download \
   \the package from Isabelle's web page and add the \"kodkodi-x.y.z\" \
@@ -167,35 +152,27 @@
 val max_unsound_delay_ms = 200
 val max_unsound_delay_percent = 2
 
-(* Time.time option -> int *)
 fun unsound_delay_for_timeout NONE = max_unsound_delay_ms
   | unsound_delay_for_timeout (SOME timeout) =
     Int.max (0, Int.min (max_unsound_delay_ms,
                          Time.toMilliseconds timeout
                          * max_unsound_delay_percent div 100))
 
-(* Time.time option -> bool *)
 fun passed_deadline NONE = false
   | passed_deadline (SOME time) = Time.compare (Time.now (), time) <> LESS
 
-(* ('a * bool option) list -> bool *)
 fun none_true assigns = forall (not_equal (SOME true) o snd) assigns
 
 val syntactic_sorts =
   @{sort "{default,zero,one,plus,minus,uminus,times,inverse,abs,sgn,ord,eq}"} @
   @{sort number}
-(* typ -> bool *)
 fun has_tfree_syntactic_sort (TFree (_, S as _ :: _)) =
     subset (op =) (S, syntactic_sorts)
   | has_tfree_syntactic_sort _ = false
-(* term -> bool *)
 val has_syntactic_sorts = exists_type (exists_subtype has_tfree_syntactic_sort)
 
-(* (unit -> string) -> Pretty.T *)
 fun plazy f = Pretty.blk (0, pstrs (f ()))
 
-(* Time.time -> Proof.state -> params -> bool -> int -> int -> int
-   -> (term * term) list -> term list -> term -> string * Proof.state *)
 fun pick_them_nits_in_term deadline state (params : params) auto i n step
                            subst orig_assm_ts orig_t =
   let
@@ -211,14 +188,11 @@
     val {cards_assigns, maxes_assigns, iters_assigns, bitss, bisim_depths,
          boxes, finitizes, monos, stds, wfs, sat_solver, falsify, debug,
          verbose, overlord, user_axioms, assms, merge_type_vars, binary_ints,
-         destroy_constrs, specialize, skolemize, star_linear_preds, uncurry,
-         fast_descrs, peephole_optim, tac_timeout, sym_break, sharing_depth,
-         flatten_props, max_threads, show_skolems, show_datatypes, show_consts,
+         destroy_constrs, specialize, star_linear_preds, fast_descrs,
+         peephole_optim, tac_timeout, max_threads, show_datatypes, show_consts,
          evals, formats, max_potential, max_genuine, check_potential,
-         check_genuine, batch_size, ...} =
-      params
+         check_genuine, batch_size, ...} = params
     val state_ref = Unsynchronized.ref state
-    (* Pretty.T -> unit *)
     val pprint =
       if auto then
         Unsynchronized.change state_ref o Proof.goal_message o K
@@ -227,22 +201,17 @@
       else
         (fn s => (priority s; if debug then tracing s else ()))
         o Pretty.string_of
-    (* (unit -> Pretty.T) -> unit *)
     fun pprint_m f = () |> not auto ? pprint o f
     fun pprint_v f = () |> verbose ? pprint o f
     fun pprint_d f = () |> debug ? pprint o f
-    (* string -> unit *)
     val print = pprint o curry Pretty.blk 0 o pstrs
     val print_g = pprint o Pretty.str
-    (* (unit -> string) -> unit *)
     val print_m = pprint_m o K o plazy
     val print_v = pprint_v o K o plazy
 
-    (* unit -> unit *)
     fun check_deadline () =
       if debug andalso passed_deadline deadline then raise TimeLimit.TimeOut
       else ()
-    (* unit -> 'a *)
     fun do_interrupted () =
       if passed_deadline deadline then raise TimeLimit.TimeOut
       else raise Interrupt
@@ -288,8 +257,7 @@
       {thy = thy, ctxt = ctxt, max_bisim_depth = max_bisim_depth, boxes = boxes,
        stds = stds, wfs = wfs, user_axioms = user_axioms, debug = debug,
        binary_ints = binary_ints, destroy_constrs = destroy_constrs,
-       specialize = specialize, skolemize = skolemize,
-       star_linear_preds = star_linear_preds, uncurry = uncurry,
+       specialize = specialize, star_linear_preds = star_linear_preds,
        fast_descrs = fast_descrs, tac_timeout = tac_timeout, evals = evals,
        case_names = case_names, def_table = def_table,
        nondef_table = nondef_table, user_nondefs = user_nondefs,
@@ -307,7 +275,6 @@
     val got_all_user_axioms =
       got_all_mono_user_axioms andalso no_poly_user_axioms
 
-    (* styp * (bool * bool) -> unit *)
     fun print_wf (x, (gfp, wf)) =
       pprint (Pretty.blk (0,
           pstrs ("The " ^ (if gfp then "co" else "") ^ "inductive predicate \"")
@@ -344,18 +311,16 @@
 *)
 
     val unique_scope = forall (curry (op =) 1 o length o snd) cards_assigns
-    (* typ list -> string -> string *)
     fun monotonicity_message Ts extra =
       let val ss = map (quote o string_for_type ctxt) Ts in
         "The type" ^ plural_s_for_list ss ^ " " ^
-        space_implode " " (Sledgehammer_Util.serial_commas "and" ss) ^ " " ^
+        space_implode " " (serial_commas "and" ss) ^ " " ^
         (if none_true monos then
            "passed the monotonicity test"
          else
            (if length ss = 1 then "is" else "are") ^ " considered monotonic") ^
         ". " ^ extra
       end
-    (* typ -> bool *)
     fun is_type_fundamentally_monotonic T =
       (is_datatype thy stds T andalso not (is_quot_type thy T) andalso
        (not (is_pure_typedef thy T) orelse is_univ_typedef thy T)) orelse
@@ -416,7 +381,6 @@
         ()
     (* This detection code is an ugly hack. Fortunately, it is used only to
        provide a hint to the user. *)
-    (* string * (Rule_Cases.T * bool) -> bool *)
     fun is_struct_induct_step (name, (Rule_Cases.Case {fixes, assumes, ...}, _)) =
       not (null fixes) andalso
       exists (String.isSuffix ".hyps" o fst) assumes andalso
@@ -439,10 +403,10 @@
     val _ = List.app (print_g o string_for_type ctxt) nonmono_Ts
 *)
 
-    val need_incremental = Int.max (max_potential, max_genuine) >= 2
-    val effective_sat_solver =
+    val incremental = Int.max (max_potential, max_genuine) >= 2
+    val actual_sat_solver =
       if sat_solver <> "smart" then
-        if need_incremental andalso
+        if incremental andalso
            not (member (op =) (Kodkod_SAT.configured_sat_solvers true)
                        sat_solver) then
           (print_m (K ("An incremental SAT solver is required: \"SAT4J\" will \
@@ -451,21 +415,19 @@
         else
           sat_solver
       else
-        Kodkod_SAT.smart_sat_solver_name need_incremental
+        Kodkod_SAT.smart_sat_solver_name incremental
     val _ =
       if sat_solver = "smart" then
-        print_v (fn () => "Using SAT solver " ^ quote effective_sat_solver ^
-                          ". The following" ^
-                          (if need_incremental then " incremental " else " ") ^
-                          "solvers are configured: " ^
-                          commas (map quote (Kodkod_SAT.configured_sat_solvers
-                                                       need_incremental)) ^ ".")
+        print_v (fn () =>
+            "Using SAT solver " ^ quote actual_sat_solver ^ ". The following" ^
+            (if incremental then " incremental " else " ") ^
+            "solvers are configured: " ^
+            commas_quote (Kodkod_SAT.configured_sat_solvers incremental) ^ ".")
       else
         ()
 
     val too_big_scopes = Unsynchronized.ref []
 
-    (* bool -> scope -> rich_problem option *)
     fun problem_for_scope unsound
             (scope as {card_assigns, bits, bisim_depth, datatypes, ofs, ...}) =
       let
@@ -481,7 +443,6 @@
                          (Typtab.dest ofs)
 *)
         val all_exact = forall (is_exact_type datatypes true) all_Ts
-        (* nut list -> rep NameTable.table -> nut list * rep NameTable.table *)
         val repify_consts = choose_reps_for_consts scope all_exact
         val main_j0 = offset_of_type ofs bool_T
         val (nat_card, nat_j0) = spec_of_type scope nat_T
@@ -495,9 +456,9 @@
         val (sel_names, rep_table) = choose_reps_for_all_sels scope rep_table
         val (nonsel_names, rep_table) = repify_consts nonsel_names rep_table
         val min_highest_arity =
-          NameTable.fold (curry Int.max o arity_of_rep o snd) rep_table 1
+          NameTable.fold (Integer.max o arity_of_rep o snd) rep_table 1
         val min_univ_card =
-          NameTable.fold (curry Int.max o min_univ_card_of_rep o snd) rep_table
+          NameTable.fold (Integer.max o min_univ_card_of_rep o snd) rep_table
                          (univ_card nat_card int_card main_j0 [] KK.True)
         val _ = check_arity min_univ_card min_highest_arity
 
@@ -524,20 +485,20 @@
         val comment = (if unsound then "unsound" else "sound") ^ "\n" ^
                       PrintMode.setmp [] multiline_string_for_scope scope
         val kodkod_sat_solver =
-          Kodkod_SAT.sat_solver_spec effective_sat_solver |> snd
+          Kodkod_SAT.sat_solver_spec actual_sat_solver |> snd
         val bit_width = if bits = 0 then 16 else bits + 1
-        val delay = if unsound then
-                      Option.map (fn time => Time.- (time, Time.now ()))
-                                 deadline
-                      |> unsound_delay_for_timeout
-                    else
-                      0
-        val settings = [("solver", commas (map quote kodkod_sat_solver)),
+        val delay =
+          if unsound then
+            Option.map (fn time => Time.- (time, Time.now ())) deadline
+            |> unsound_delay_for_timeout
+          else
+            0
+        val settings = [("solver", commas_quote kodkod_sat_solver),
                         ("skolem_depth", "-1"),
                         ("bit_width", string_of_int bit_width),
-                        ("symmetry_breaking", signed_string_of_int sym_break),
-                        ("sharing", signed_string_of_int sharing_depth),
-                        ("flatten", Bool.toString flatten_props),
+                        ("symmetry_breaking", "20"),
+                        ("sharing", "3"),
+                        ("flatten", "false"),
                         ("delay", signed_string_of_int delay)]
         val plain_rels = free_rels @ other_rels
         val plain_bounds = map (bound_for_plain_rel ctxt debug) plain_rels
@@ -605,22 +566,18 @@
     val checked_problems = Unsynchronized.ref (SOME [])
     val met_potential = Unsynchronized.ref 0
 
-    (* rich_problem list -> int list -> unit *)
     fun update_checked_problems problems =
       List.app (Unsynchronized.change checked_problems o Option.map o cons
                 o nth problems)
-    (* string -> unit *)
     fun show_kodkod_warning "" = ()
       | show_kodkod_warning s = print_m (fn () => "Kodkod warning: " ^ s ^ ".")
 
-    (* bool -> KK.raw_bound list -> problem_extension -> bool * bool option *)
     fun print_and_check_model genuine bounds
             ({free_names, sel_names, nonsel_names, rel_table, scope, ...}
              : problem_extension) =
       let
         val (reconstructed_model, codatatypes_ok) =
-          reconstruct_hol_model {show_skolems = show_skolems,
-                                 show_datatypes = show_datatypes,
+          reconstruct_hol_model {show_datatypes = show_datatypes,
                                  show_consts = show_consts}
               scope formats frees free_names sel_names nonsel_names rel_table
               bounds
@@ -686,8 +643,7 @@
                         options
                 in
                   print ("Try again with " ^
-                         space_implode " "
-                             (Sledgehammer_Util.serial_commas "and" ss) ^
+                         space_implode " " (serial_commas "and" ss) ^
                          " to confirm that the " ^ das_wort_model ^
                          " is genuine.")
                 end
@@ -721,18 +677,15 @@
              NONE)
         |> pair genuine_means_genuine
       end
-    (* bool * int * int * int -> bool -> rich_problem list
-       -> bool * int * int * int *)
     fun solve_any_problem (found_really_genuine, max_potential, max_genuine,
                            donno) first_time problems =
       let
         val max_potential = Int.max (0, max_potential)
         val max_genuine = Int.max (0, max_genuine)
-        (* bool -> int * KK.raw_bound list -> bool * bool option *)
         fun print_and_check genuine (j, bounds) =
           print_and_check_model genuine bounds (snd (nth problems j))
         val max_solutions = max_potential + max_genuine
-                            |> not need_incremental ? curry Int.min 1
+                            |> not incremental ? Integer.min 1
       in
         if max_solutions <= 0 then
           (found_really_genuine, 0, 0, donno)
@@ -828,8 +781,6 @@
              (found_really_genuine, max_potential, max_genuine, donno + 1))
       end
 
-    (* int -> int -> scope list -> bool * int * int * int
-       -> bool * int * int * int *)
     fun run_batch j n scopes (found_really_genuine, max_potential, max_genuine,
                               donno) =
       let
@@ -857,8 +808,6 @@
                           (length scopes downto 1) scopes))])
           else
             ()
-        (* scope * bool -> rich_problem list * bool
-           -> rich_problem list * bool *)
         fun add_problem_for_scope (scope, unsound) (problems, donno) =
           (check_deadline ();
            case problem_for_scope unsound scope of
@@ -904,13 +853,10 @@
                            donno) true (rev problems)
       end
 
-    (* rich_problem list -> scope -> int *)
     fun scope_count (problems : rich_problem list) scope =
       length (filter (curry scopes_equivalent scope o #scope o snd) problems)
-    (* string -> string *)
     fun excipit did_so_and_so =
       let
-        (* rich_problem list -> rich_problem list *)
         val do_filter =
           if !met_potential = max_potential then filter_out (#unsound o snd)
           else I
@@ -932,7 +878,6 @@
            "") ^ "."
       end
 
-    (* int -> int -> scope list -> bool * int * int * int -> KK.outcome *)
     fun run_batches _ _ []
                     (found_really_genuine, max_potential, max_genuine, donno) =
         if donno > 0 andalso max_genuine > 0 then
@@ -961,8 +906,8 @@
         end
 
     val (skipped, the_scopes) =
-      all_scopes hol_ctxt binarize sym_break cards_assigns maxes_assigns
-                 iters_assigns bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs
+      all_scopes hol_ctxt binarize cards_assigns maxes_assigns iters_assigns
+                 bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs
                  finitizable_dataTs
     val _ = if skipped > 0 then
               print_m (fn () => "Too many scopes. Skipping " ^
@@ -998,8 +943,6 @@
            else
              error "Nitpick was interrupted."
 
-(* Proof.state -> params -> bool -> int -> int -> int -> (term * term) list
-   -> term list -> term -> string * Proof.state *)
 fun pick_nits_in_term state (params as {debug, timeout, expect, ...}) auto i n
                       step subst orig_assm_ts orig_t =
   if getenv "KODKODI" = "" then
@@ -1018,12 +961,10 @@
       else error ("Unexpected outcome: " ^ quote outcome_code ^ ".")
     end
 
-(* string list -> term -> bool *)
 fun is_fixed_equation fixes
                       (Const (@{const_name "=="}, _) $ Free (s, _) $ Const _) =
     member (op =) fixes s
   | is_fixed_equation _ _ = false
-(* Proof.context -> term list * term -> (term * term) list * term list * term *)
 fun extract_fixed_frees ctxt (assms, t) =
   let
     val fixes = Variable.fixes_of ctxt |> map snd
@@ -1032,7 +973,6 @@
       |>> map Logic.dest_equals
   in (subst, other_assms, subst_atomic subst t) end
 
-(* Proof.state -> params -> bool -> int -> int -> string * Proof.state *)
 fun pick_nits_in_subgoal state params auto i step =
   let
     val ctxt = Proof.context_of state
@@ -1042,8 +982,7 @@
       0 => (priority "No subgoal!"; ("none", state))
     | n =>
       let
-        val (t, frees) = Logic.goal_params t i
-        val t = subst_bounds (frees, t)
+        val t = Logic.goal_params t i |> fst
         val assms = map term_of (Assumption.all_assms_of ctxt)
         val (subst, assms, t) = extract_fixed_frees ctxt (assms, t)
       in pick_nits_in_term state params auto i n step subst assms t end
--- a/src/HOL/Tools/Nitpick/nitpick_hol.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -13,39 +13,37 @@
   type unrolled = styp * styp
   type wf_cache = (styp * (bool * bool)) list
 
-  type hol_context = {
-    thy: theory,
-    ctxt: Proof.context,
-    max_bisim_depth: int,
-    boxes: (typ option * bool option) list,
-    stds: (typ option * bool) list,
-    wfs: (styp option * bool option) list,
-    user_axioms: bool option,
-    debug: bool,
-    binary_ints: bool option,
-    destroy_constrs: bool,
-    specialize: bool,
-    skolemize: bool,
-    star_linear_preds: bool,
-    uncurry: bool,
-    fast_descrs: bool,
-    tac_timeout: Time.time option,
-    evals: term list,
-    case_names: (string * int) list,
-    def_table: const_table,
-    nondef_table: const_table,
-    user_nondefs: term list,
-    simp_table: const_table Unsynchronized.ref,
-    psimp_table: const_table,
-    choice_spec_table: const_table,
-    intro_table: const_table,
-    ground_thm_table: term list Inttab.table,
-    ersatz_table: (string * string) list,
-    skolems: (string * string list) list Unsynchronized.ref,
-    special_funs: special_fun list Unsynchronized.ref,
-    unrolled_preds: unrolled list Unsynchronized.ref,
-    wf_cache: wf_cache Unsynchronized.ref,
-    constr_cache: (typ * styp list) list Unsynchronized.ref}
+  type hol_context =
+    {thy: theory,
+     ctxt: Proof.context,
+     max_bisim_depth: int,
+     boxes: (typ option * bool option) list,
+     stds: (typ option * bool) list,
+     wfs: (styp option * bool option) list,
+     user_axioms: bool option,
+     debug: bool,
+     binary_ints: bool option,
+     destroy_constrs: bool,
+     specialize: bool,
+     star_linear_preds: bool,
+     fast_descrs: bool,
+     tac_timeout: Time.time option,
+     evals: term list,
+     case_names: (string * int) list,
+     def_table: const_table,
+     nondef_table: const_table,
+     user_nondefs: term list,
+     simp_table: const_table Unsynchronized.ref,
+     psimp_table: const_table,
+     choice_spec_table: const_table,
+     intro_table: const_table,
+     ground_thm_table: term list Inttab.table,
+     ersatz_table: (string * string) list,
+     skolems: (string * string list) list Unsynchronized.ref,
+     special_funs: special_fun list Unsynchronized.ref,
+     unrolled_preds: unrolled list Unsynchronized.ref,
+     wf_cache: wf_cache Unsynchronized.ref,
+     constr_cache: (typ * styp list) list Unsynchronized.ref}
 
   datatype fixpoint_kind = Lfp | Gfp | NoFp
   datatype boxability =
@@ -217,7 +215,6 @@
 structure Nitpick_HOL : NITPICK_HOL =
 struct
 
-open Sledgehammer_Util
 open Nitpick_Util
 
 type const_table = term list Symtab.table
@@ -225,39 +222,37 @@
 type unrolled = styp * styp
 type wf_cache = (styp * (bool * bool)) list
 
-type hol_context = {
-  thy: theory,
-  ctxt: Proof.context,
-  max_bisim_depth: int,
-  boxes: (typ option * bool option) list,
-  stds: (typ option * bool) list,
-  wfs: (styp option * bool option) list,
-  user_axioms: bool option,
-  debug: bool,
-  binary_ints: bool option,
-  destroy_constrs: bool,
-  specialize: bool,
-  skolemize: bool,
-  star_linear_preds: bool,
-  uncurry: bool,
-  fast_descrs: bool,
-  tac_timeout: Time.time option,
-  evals: term list,
-  case_names: (string * int) list,
-  def_table: const_table,
-  nondef_table: const_table,
-  user_nondefs: term list,
-  simp_table: const_table Unsynchronized.ref,
-  psimp_table: const_table,
-  choice_spec_table: const_table,
-  intro_table: const_table,
-  ground_thm_table: term list Inttab.table,
-  ersatz_table: (string * string) list,
-  skolems: (string * string list) list Unsynchronized.ref,
-  special_funs: special_fun list Unsynchronized.ref,
-  unrolled_preds: unrolled list Unsynchronized.ref,
-  wf_cache: wf_cache Unsynchronized.ref,
-  constr_cache: (typ * styp list) list Unsynchronized.ref}
+type hol_context =
+  {thy: theory,
+   ctxt: Proof.context,
+   max_bisim_depth: int,
+   boxes: (typ option * bool option) list,
+   stds: (typ option * bool) list,
+   wfs: (styp option * bool option) list,
+   user_axioms: bool option,
+   debug: bool,
+   binary_ints: bool option,
+   destroy_constrs: bool,
+   specialize: bool,
+   star_linear_preds: bool,
+   fast_descrs: bool,
+   tac_timeout: Time.time option,
+   evals: term list,
+   case_names: (string * int) list,
+   def_table: const_table,
+   nondef_table: const_table,
+   user_nondefs: term list,
+   simp_table: const_table Unsynchronized.ref,
+   psimp_table: const_table,
+   choice_spec_table: const_table,
+   intro_table: const_table,
+   ground_thm_table: term list Inttab.table,
+   ersatz_table: (string * string) list,
+   skolems: (string * string list) list Unsynchronized.ref,
+   special_funs: special_fun list Unsynchronized.ref,
+   unrolled_preds: unrolled list Unsynchronized.ref,
+   wf_cache: wf_cache Unsynchronized.ref,
+   constr_cache: (typ * styp list) list Unsynchronized.ref}
 
 datatype fixpoint_kind = Lfp | Gfp | NoFp
 datatype boxability =
@@ -269,7 +264,7 @@
   val empty = {frac_types = [], codatatypes = []}
   val extend = I
   fun merge ({frac_types = fs1, codatatypes = cs1},
-               {frac_types = fs2, codatatypes = cs2}) : T =
+             {frac_types = fs2, codatatypes = cs2}) : T =
     {frac_types = AList.merge (op =) (K true) (fs1, fs2),
      codatatypes = AList.merge (op =) (K true) (cs1, cs2)})
 
@@ -294,31 +289,24 @@
 
 (** Constant/type information and term/type manipulation **)
 
-(* int -> string *)
 fun sel_prefix_for j = sel_prefix ^ string_of_int j ^ name_sep
-(* Proof.context -> typ -> string *)
 fun quot_normal_name_for_type ctxt T =
   quot_normal_prefix ^ unyxml (Syntax.string_of_typ ctxt T)
 
-(* string -> string * string *)
 val strip_first_name_sep =
   Substring.full #> Substring.position name_sep ##> Substring.triml 1
   #> pairself Substring.string
-(* string -> string *)
 fun original_name s =
   if String.isPrefix nitpick_prefix s then
     case strip_first_name_sep s of (s1, "") => s1 | (_, s2) => original_name s2
   else
     s
 
-(* term * term -> term *)
 fun s_betapply (Const (@{const_name If}, _) $ @{const True} $ t, _) = t
   | s_betapply (Const (@{const_name If}, _) $ @{const False} $ _, t) = t
   | s_betapply p = betapply p
-(* term * term list -> term *)
 val s_betapplys = Library.foldl s_betapply
 
-(* term * term -> term *)
 fun s_conj (t1, @{const True}) = t1
   | s_conj (@{const True}, t2) = t2
   | s_conj (t1, t2) =
@@ -330,18 +318,15 @@
     if t1 = @{const True} orelse t2 = @{const True} then @{const True}
     else HOLogic.mk_disj (t1, t2)
 
-(* term -> term -> term list *)
 fun strip_connective conn_t (t as (t0 $ t1 $ t2)) =
     if t0 = conn_t then strip_connective t0 t2 @ strip_connective t0 t1 else [t]
   | strip_connective _ t = [t]
-(* term -> term list * term *)
 fun strip_any_connective (t as (t0 $ _ $ _)) =
     if t0 = @{const "op &"} orelse t0 = @{const "op |"} then
       (strip_connective t0 t, t0)
     else
       ([t], @{const Not})
   | strip_any_connective t = ([t], @{const Not})
-(* term -> term list *)
 val conjuncts_of = strip_connective @{const "op &"}
 val disjuncts_of = strip_connective @{const "op |"}
 
@@ -416,7 +401,6 @@
    (@{const_name minus_class.minus}, 2),
    (@{const_name ord_class.less_eq}, 2)]
 
-(* typ -> typ *)
 fun unarize_type @{typ "unsigned_bit word"} = nat_T
   | unarize_type @{typ "signed_bit word"} = int_T
   | unarize_type (Type (s, Ts as _ :: _)) = Type (s, map unarize_type Ts)
@@ -437,44 +421,33 @@
   | uniterize_type T = T
 val uniterize_unarize_unbox_etc_type = uniterize_type o unarize_unbox_etc_type
 
-(* Proof.context -> typ -> string *)
 fun string_for_type ctxt = Syntax.string_of_typ ctxt o unarize_unbox_etc_type
 
-(* string -> string -> string *)
 val prefix_name = Long_Name.qualify o Long_Name.base_name
-(* string -> string *)
 fun shortest_name s = List.last (space_explode "." s) handle List.Empty => ""
-(* string -> term -> term *)
 val prefix_abs_vars = Term.map_abs_vars o prefix_name
-(* string -> string *)
 fun short_name s =
   case space_explode name_sep s of
     [_] => s |> String.isPrefix nitpick_prefix s ? unprefix nitpick_prefix
   | ss => map shortest_name ss |> space_implode "_"
-(* typ -> typ *)
 fun shorten_names_in_type (Type (s, Ts)) =
     Type (short_name s, map shorten_names_in_type Ts)
   | shorten_names_in_type T = T
-(* term -> term *)
 val shorten_names_in_term =
   map_aterms (fn Const (s, T) => Const (short_name s, T) | t => t)
   #> map_types shorten_names_in_type
 
-(* theory -> typ * typ -> bool *)
 fun strict_type_match thy (T1, T2) =
   (Sign.typ_match thy (T2, T1) Vartab.empty; true)
   handle Type.TYPE_MATCH => false
 fun type_match thy = strict_type_match thy o pairself unarize_unbox_etc_type
-(* theory -> styp * styp -> bool *)
 fun const_match thy ((s1, T1), (s2, T2)) =
   s1 = s2 andalso type_match thy (T1, T2)
-(* theory -> term * term -> bool *)
 fun term_match thy (Const x1, Const x2) = const_match thy (x1, x2)
   | term_match thy (Free (s1, T1), Free (s2, T2)) =
     const_match thy ((shortest_name s1, T1), (shortest_name s2, T2))
   | term_match _ (t1, t2) = t1 aconv t2
 
-(* typ -> term -> term -> term *)
 fun frac_from_term_pair T t1 t2 =
   case snd (HOLogic.dest_number t1) of
     0 => HOLogic.mk_number T 0
@@ -483,7 +456,6 @@
           | n2 => Const (@{const_name divide}, T --> T --> T)
                   $ HOLogic.mk_number T n1 $ HOLogic.mk_number T n2
 
-(* typ -> bool *)
 fun is_TFree (TFree _) = true
   | is_TFree _ = false
 fun is_higher_order_type (Type (@{type_name fun}, _)) = true
@@ -509,50 +481,41 @@
   | is_word_type _ = false
 val is_integer_like_type = is_iterator_type orf is_integer_type orf is_word_type
 val is_record_type = not o null o Record.dest_recTs
-(* theory -> typ -> bool *)
 fun is_frac_type thy (Type (s, [])) =
     not (null (these (AList.lookup (op =) (#frac_types (Data.get thy)) s)))
   | is_frac_type _ _ = false
 fun is_number_type thy = is_integer_like_type orf is_frac_type thy
 
-(* bool -> styp -> typ *)
 fun iterator_type_for_const gfp (s, T) =
   Type ((if gfp then gfp_iterator_prefix else lfp_iterator_prefix) ^ s,
         binder_types T)
-(* typ -> styp *)
 fun const_for_iterator_type (Type (s, Ts)) =
     (strip_first_name_sep s |> snd, Ts ---> bool_T)
   | const_for_iterator_type T =
     raise TYPE ("Nitpick_HOL.const_for_iterator_type", [T], [])
 
-(* int -> typ -> typ list * typ *)
 fun strip_n_binders 0 T = ([], T)
   | strip_n_binders n (Type (@{type_name fun}, [T1, T2])) =
     strip_n_binders (n - 1) T2 |>> cons T1
   | strip_n_binders n (Type (@{type_name fun_box}, Ts)) =
     strip_n_binders n (Type (@{type_name fun}, Ts))
   | strip_n_binders _ T = raise TYPE ("Nitpick_HOL.strip_n_binders", [T], [])
-(* typ -> typ *)
 val nth_range_type = snd oo strip_n_binders
 
-(* typ -> int *)
 fun num_factors_in_type (Type (@{type_name "*"}, [T1, T2])) =
     fold (Integer.add o num_factors_in_type) [T1, T2] 0
   | num_factors_in_type _ = 1
 fun num_binder_types (Type (@{type_name fun}, [_, T2])) =
     1 + num_binder_types T2
   | num_binder_types _ = 0
-(* typ -> typ list *)
 val curried_binder_types = maps HOLogic.flatten_tupleT o binder_types
 fun maybe_curried_binder_types T =
   (if is_pair_type (body_type T) then binder_types else curried_binder_types) T
 
-(* typ -> term list -> term *)
 fun mk_flat_tuple _ [t] = t
   | mk_flat_tuple (Type (@{type_name "*"}, [T1, T2])) (t :: ts) =
     HOLogic.pair_const T1 T2 $ t $ (mk_flat_tuple T2 ts)
   | mk_flat_tuple T ts = raise TYPE ("Nitpick_HOL.mk_flat_tuple", [T], ts)
-(* int -> term -> term list *)
 fun dest_n_tuple 1 t = [t]
   | dest_n_tuple n t = HOLogic.dest_prod t ||> dest_n_tuple (n - 1) |> op ::
 
@@ -561,7 +524,6 @@
    set_def: thm option, prop_of_Rep: thm, set_name: string,
    Abs_inverse: thm option, Rep_inverse: thm option}
 
-(* theory -> string -> typedef_info *)
 fun typedef_info thy s =
   if is_frac_type thy (Type (s, [])) then
     SOME {abs_type = Type (s, []), rep_type = @{typ "int * int"},
@@ -579,21 +541,17 @@
           Rep_inverse = SOME Rep_inverse}
   | _ => NONE
 
-(* theory -> string -> bool *)
 val is_typedef = is_some oo typedef_info
 val is_real_datatype = is_some oo Datatype.get_info
-(* theory -> (typ option * bool) list -> typ -> bool *)
 fun is_standard_datatype thy = the oo triple_lookup (type_match thy)
 
 (* FIXME: Use antiquotation for "code_numeral" below or detect "rep_datatype",
    e.g., by adding a field to "Datatype_Aux.info". *)
-(* theory -> (typ option * bool) list -> string -> bool *)
 fun is_basic_datatype thy stds s =
   member (op =) [@{type_name "*"}, @{type_name bool}, @{type_name unit},
                  @{type_name int}, "Code_Numeral.code_numeral"] s orelse
   (s = @{type_name nat} andalso is_standard_datatype thy stds nat_T)
 
-(* theory -> typ -> typ -> typ -> typ *)
 fun instantiate_type thy T1 T1' T2 =
   Same.commit (Envir.subst_type_same
                    (Sign.typ_match thy (T1, T1') Vartab.empty)) T2
@@ -602,20 +560,16 @@
 fun varify_and_instantiate_type thy T1 T1' T2 =
   instantiate_type thy (Logic.varifyT_global T1) T1' (Logic.varifyT_global T2)
 
-(* theory -> typ -> typ -> styp *)
 fun repair_constr_type thy body_T' T =
   varify_and_instantiate_type thy (body_type T) body_T' T
 
-(* string -> (string * string) list -> theory -> theory *)
 fun register_frac_type frac_s ersaetze thy =
   let
     val {frac_types, codatatypes} = Data.get thy
     val frac_types = AList.update (op =) (frac_s, ersaetze) frac_types
   in Data.put {frac_types = frac_types, codatatypes = codatatypes} thy end
-(* string -> theory -> theory *)
 fun unregister_frac_type frac_s = register_frac_type frac_s []
 
-(* typ -> string -> styp list -> theory -> theory *)
 fun register_codatatype co_T case_name constr_xs thy =
   let
     val {frac_types, codatatypes} = Data.get thy
@@ -631,10 +585,8 @@
     val codatatypes = AList.update (op =) (co_s, (case_name, constr_xs))
                                    codatatypes
   in Data.put {frac_types = frac_types, codatatypes = codatatypes} thy end
-(* typ -> theory -> theory *)
 fun unregister_codatatype co_T = register_codatatype co_T "" []
 
-(* theory -> typ -> bool *)
 fun is_quot_type thy (Type (s, _)) =
     is_some (Quotient_Info.quotdata_lookup_raw thy s)
   | is_quot_type _ _ = false
@@ -671,32 +623,26 @@
        end
      | NONE => false)
   | is_univ_typedef _ _ = false
-(* theory -> (typ option * bool) list -> typ -> bool *)
 fun is_datatype thy stds (T as Type (s, _)) =
     (is_typedef thy s orelse is_codatatype thy T orelse T = @{typ ind} orelse
      is_quot_type thy T) andalso not (is_basic_datatype thy stds s)
   | is_datatype _ _ _ = false
 
-(* theory -> typ -> (string * typ) list * (string * typ) *)
 fun all_record_fields thy T =
   let val (recs, more) = Record.get_extT_fields thy T in
     recs @ more :: all_record_fields thy (snd more)
   end
   handle TYPE _ => []
-(* styp -> bool *)
 fun is_record_constr (s, T) =
   String.isSuffix Record.extN s andalso
   let val dataT = body_type T in
     is_record_type dataT andalso
     s = unsuffix Record.ext_typeN (fst (dest_Type dataT)) ^ Record.extN
   end
-(* theory -> typ -> int *)
 val num_record_fields = Integer.add 1 o length o fst oo Record.get_extT_fields
-(* theory -> string -> typ -> int *)
 fun no_of_record_field thy s T1 =
   find_index (curry (op =) s o fst)
              (Record.get_extT_fields thy T1 ||> single |> op @)
-(* theory -> styp -> bool *)
 fun is_record_get thy (s, Type (@{type_name fun}, [T1, _])) =
     exists (curry (op =) s o fst) (all_record_fields thy T1)
   | is_record_get _ _ = false
@@ -715,7 +661,6 @@
        SOME {Rep_name, ...} => s = Rep_name
      | NONE => false)
   | is_rep_fun _ _ = false
-(* Proof.context -> styp -> bool *)
 fun is_quot_abs_fun ctxt
                     (x as (_, Type (@{type_name fun}, [_, Type (s', _)]))) =
     (try (Quotient_Term.absrep_const_chk Quotient_Term.AbsF ctxt) s'
@@ -727,19 +672,16 @@
      = SOME (Const x))
   | is_quot_rep_fun _ _ = false
 
-(* theory -> styp -> styp *)
 fun mate_of_rep_fun thy (x as (_, Type (@{type_name fun},
                                         [T1 as Type (s', _), T2]))) =
     (case typedef_info thy s' of
        SOME {Abs_name, ...} => (Abs_name, Type (@{type_name fun}, [T2, T1]))
      | NONE => raise TERM ("Nitpick_HOL.mate_of_rep_fun", [Const x]))
   | mate_of_rep_fun _ x = raise TERM ("Nitpick_HOL.mate_of_rep_fun", [Const x])
-(* theory -> typ -> typ *)
 fun rep_type_for_quot_type thy (T as Type (s, _)) =
   let val {qtyp, rtyp, ...} = Quotient_Info.quotdata_lookup thy s in
     instantiate_type thy qtyp T rtyp
   end
-(* theory -> typ -> term *)
 fun equiv_relation_for_quot_type thy (Type (s, Ts)) =
     let
       val {qtyp, equiv_rel, ...} = Quotient_Info.quotdata_lookup thy s
@@ -748,7 +690,6 @@
   | equiv_relation_for_quot_type _ T =
     raise TYPE ("Nitpick_HOL.equiv_relation_for_quot_type", [T], [])
 
-(* theory -> styp -> bool *)
 fun is_coconstr thy (s, T) =
   let
     val {codatatypes, ...} = Data.get thy
@@ -771,19 +712,16 @@
 fun is_stale_constr thy (x as (_, T)) =
   is_codatatype thy (body_type T) andalso is_constr_like thy x andalso
   not (is_coconstr thy x)
-(* theory -> (typ option * bool) list -> styp -> bool *)
 fun is_constr thy stds (x as (_, T)) =
   is_constr_like thy x andalso
   not (is_basic_datatype thy stds
                          (fst (dest_Type (unarize_type (body_type T))))) andalso
   not (is_stale_constr thy x)
-(* string -> bool *)
 val is_sel = String.isPrefix discr_prefix orf String.isPrefix sel_prefix
 val is_sel_like_and_no_discr =
   String.isPrefix sel_prefix orf
   (member (op =) [@{const_name fst}, @{const_name snd}])
 
-(* boxability -> boxability *)
 fun in_fun_lhs_for InConstr = InSel
   | in_fun_lhs_for _ = InFunLHS
 fun in_fun_rhs_for InConstr = InConstr
@@ -791,7 +729,6 @@
   | in_fun_rhs_for InFunRHS1 = InFunRHS2
   | in_fun_rhs_for _ = InFunRHS1
 
-(* hol_context -> boxability -> typ -> bool *)
 fun is_boxing_worth_it (hol_ctxt : hol_context) boxy T =
   case T of
     Type (@{type_name fun}, _) =>
@@ -803,12 +740,10 @@
      exists (is_boxing_worth_it hol_ctxt InPair)
             (map (box_type hol_ctxt InPair) Ts))
   | _ => false
-(* hol_context -> boxability -> string * typ list -> string *)
 and should_box_type (hol_ctxt as {thy, boxes, ...}) boxy z =
   case triple_lookup (type_match thy) boxes (Type z) of
     SOME (SOME box_me) => box_me
   | _ => is_boxing_worth_it hol_ctxt boxy (Type z)
-(* hol_context -> boxability -> typ -> typ *)
 and box_type hol_ctxt boxy T =
   case T of
     Type (z as (@{type_name fun}, [T1, T2])) =>
@@ -830,37 +765,29 @@
                            else InPair)) Ts)
   | _ => T
 
-(* typ -> typ *)
 fun binarize_nat_and_int_in_type @{typ nat} = @{typ "unsigned_bit word"}
   | binarize_nat_and_int_in_type @{typ int} = @{typ "signed_bit word"}
   | binarize_nat_and_int_in_type (Type (s, Ts)) =
     Type (s, map binarize_nat_and_int_in_type Ts)
   | binarize_nat_and_int_in_type T = T
-(* term -> term *)
 val binarize_nat_and_int_in_term = map_types binarize_nat_and_int_in_type
 
-(* styp -> styp *)
 fun discr_for_constr (s, T) = (discr_prefix ^ s, body_type T --> bool_T)
 
-(* typ -> int *)
 fun num_sels_for_constr_type T = length (maybe_curried_binder_types T)
-(* string -> int -> string *)
 fun nth_sel_name_for_constr_name s n =
   if s = @{const_name Pair} then
     if n = 0 then @{const_name fst} else @{const_name snd}
   else
     sel_prefix_for n ^ s
-(* styp -> int -> styp *)
 fun nth_sel_for_constr x ~1 = discr_for_constr x
   | nth_sel_for_constr (s, T) n =
     (nth_sel_name_for_constr_name s n,
      body_type T --> nth (maybe_curried_binder_types T) n)
-(* hol_context -> bool -> styp -> int -> styp *)
 fun binarized_and_boxed_nth_sel_for_constr hol_ctxt binarize =
   apsnd ((binarize ? binarize_nat_and_int_in_type) o box_type hol_ctxt InSel)
   oo nth_sel_for_constr
 
-(* string -> int *)
 fun sel_no_from_name s =
   if String.isPrefix discr_prefix s then
     ~1
@@ -871,15 +798,12 @@
   else
     0
 
-(* term -> term *)
 val close_form =
   let
-    (* (indexname * typ) list -> (indexname * typ) list -> term -> term *)
     fun close_up zs zs' =
       fold (fn (z as ((s, _), T)) => fn t' =>
                Term.all T $ Abs (s, T, abstract_over (Var z, t')))
            (take (length zs' - length zs) zs')
-    (* (indexname * typ) list -> term -> term *)
     fun aux zs (@{const "==>"} $ t1 $ t2) =
         let val zs' = Term.add_vars t1 zs in
           close_up zs zs' (Logic.mk_implies (t1, aux zs' t2))
@@ -887,7 +811,6 @@
       | aux zs t = close_up zs (Term.add_vars t zs) t
   in aux [] end
 
-(* typ list -> term -> int -> term *)
 fun eta_expand _ t 0 = t
   | eta_expand Ts (Abs (s, T, t')) n =
     Abs (s, T, eta_expand (T :: Ts) t' (n - 1))
@@ -896,7 +819,6 @@
              (List.take (binder_types (fastype_of1 (Ts, t)), n))
              (list_comb (incr_boundvars n t, map Bound (n - 1 downto 0)))
 
-(* term -> term *)
 fun extensionalize t =
   case t of
     (t0 as @{const Trueprop}) $ t1 => t0 $ extensionalize t1
@@ -906,17 +828,14 @@
     end
   | _ => t
 
-(* typ -> term list -> term *)
 fun distinctness_formula T =
   all_distinct_unordered_pairs_of
   #> map (fn (t1, t2) => @{const Not} $ (HOLogic.eq_const T $ t1 $ t2))
   #> List.foldr (s_conj o swap) @{const True}
 
-(* typ -> term *)
 fun zero_const T = Const (@{const_name zero_class.zero}, T)
 fun suc_const T = Const (@{const_name Suc}, T --> T)
 
-(* hol_context -> typ -> styp list *)
 fun uncached_datatype_constrs ({thy, stds, ...} : hol_context)
                               (T as Type (s, Ts)) =
     (case AList.lookup (op =) (#codatatypes (Data.get thy)) s of
@@ -953,7 +872,6 @@
        else
          [])
   | uncached_datatype_constrs _ _ = []
-(* hol_context -> typ -> styp list *)
 fun datatype_constrs (hol_ctxt as {constr_cache, ...}) T =
   case AList.lookup (op =) (!constr_cache) T of
     SOME xs => xs
@@ -961,18 +879,14 @@
     let val xs = uncached_datatype_constrs hol_ctxt T in
       (Unsynchronized.change constr_cache (cons (T, xs)); xs)
     end
-(* hol_context -> bool -> typ -> styp list *)
 fun binarized_and_boxed_datatype_constrs hol_ctxt binarize =
   map (apsnd ((binarize ? binarize_nat_and_int_in_type)
               o box_type hol_ctxt InConstr)) o datatype_constrs hol_ctxt
-(* hol_context -> typ -> int *)
 val num_datatype_constrs = length oo datatype_constrs
 
-(* string -> string *)
 fun constr_name_for_sel_like @{const_name fst} = @{const_name Pair}
   | constr_name_for_sel_like @{const_name snd} = @{const_name Pair}
   | constr_name_for_sel_like s' = original_name s'
-(* hol_context -> bool -> styp -> styp *)
 fun binarized_and_boxed_constr_for_sel hol_ctxt binarize (s', T') =
   let val s = constr_name_for_sel_like s' in
     AList.lookup (op =)
@@ -981,7 +895,6 @@
     |> the |> pair s
   end
 
-(* hol_context -> styp -> term *)
 fun discr_term_for_constr hol_ctxt (x as (s, T)) =
   let val dataT = body_type T in
     if s = @{const_name Suc} then
@@ -992,7 +905,6 @@
     else
       Abs (Name.uu, dataT, @{const True})
   end
-(* hol_context -> styp -> term -> term *)
 fun discriminate_value (hol_ctxt as {thy, ...}) x t =
   case head_of t of
     Const x' =>
@@ -1001,7 +913,6 @@
     else betapply (discr_term_for_constr hol_ctxt x, t)
   | _ => betapply (discr_term_for_constr hol_ctxt x, t)
 
-(* theory -> (typ option * bool) list -> styp -> term -> term *)
 fun nth_arg_sel_term_for_constr thy stds (x as (s, T)) n =
   let val (arg_Ts, dataT) = strip_type T in
     if dataT = nat_T andalso is_standard_datatype thy stds nat_T then
@@ -1010,7 +921,6 @@
       Const (nth_sel_for_constr x n)
     else
       let
-        (* int -> typ -> int * term *)
         fun aux m (Type (@{type_name "*"}, [T1, T2])) =
             let
               val (m, t1) = aux m T1
@@ -1023,7 +933,6 @@
                      (List.take (arg_Ts, n)) 0
       in Abs ("x", dataT, aux m (nth arg_Ts n) |> snd) end
   end
-(* theory -> (typ option * bool) list -> styp -> term -> int -> typ -> term *)
 fun select_nth_constr_arg thy stds x t n res_T =
   (case strip_comb t of
      (Const x', args) =>
@@ -1033,7 +942,6 @@
    | _ => raise SAME())
   handle SAME () => betapply (nth_arg_sel_term_for_constr thy stds x n, t)
 
-(* theory -> (typ option * bool) list -> styp -> term list -> term *)
 fun construct_value _ _ x [] = Const x
   | construct_value thy stds (x as (s, _)) args =
     let val args = map Envir.eta_contract args in
@@ -1050,7 +958,6 @@
       | _ => list_comb (Const x, args)
     end
 
-(* hol_context -> typ -> term -> term *)
 fun constr_expand (hol_ctxt as {thy, stds, ...}) T t =
   (case head_of t of
      Const x => if is_constr_like thy x then t else raise SAME ()
@@ -1070,17 +977,14 @@
                                      (index_seq 0 (length arg_Ts)) arg_Ts)
          end
 
-(* (term -> term) -> int -> term -> term *)
 fun coerce_bound_no f j t =
   case t of
     t1 $ t2 => coerce_bound_no f j t1 $ coerce_bound_no f j t2
   | Abs (s, T, t') => Abs (s, T, coerce_bound_no f (j + 1) t')
   | Bound j' => if j' = j then f t else t
   | _ => t
-(* hol_context -> typ -> typ -> term -> term *)
 fun coerce_bound_0_in_term hol_ctxt new_T old_T =
   old_T <> new_T ? coerce_bound_no (coerce_term hol_ctxt [new_T] old_T new_T) 0
-(* hol_context -> typ list -> typ -> typ -> term -> term *)
 and coerce_term (hol_ctxt as {thy, stds, fast_descrs, ...}) Ts new_T old_T t =
   if old_T = new_T then
     t
@@ -1125,7 +1029,6 @@
         raise TYPE ("Nitpick_HOL.coerce_term", [new_T, old_T], [t])
     | _ => raise TYPE ("Nitpick_HOL.coerce_term", [new_T, old_T], [t])
 
-(* (typ * int) list -> typ -> int *)
 fun card_of_type assigns (Type (@{type_name fun}, [T1, T2])) =
     reasonable_power (card_of_type assigns T2) (card_of_type assigns T1)
   | card_of_type assigns (Type (@{type_name "*"}, [T1, T2])) =
@@ -1139,7 +1042,6 @@
       SOME k => k
     | NONE => if T = @{typ bisim_iterator} then 0
               else raise TYPE ("Nitpick_HOL.card_of_type", [T], [])
-(* int -> (typ * int) list -> typ -> int *)
 fun bounded_card_of_type max default_card assigns
                          (Type (@{type_name fun}, [T1, T2])) =
     let
@@ -1162,11 +1064,9 @@
                     card_of_type assigns T
                     handle TYPE ("Nitpick_HOL.card_of_type", _, _) =>
                            default_card)
-(* hol_context -> typ list -> int -> (typ * int) list -> typ -> int *)
 fun bounded_exact_card_of_type hol_ctxt finitizable_dataTs max default_card
                                assigns T =
   let
-    (* typ list -> typ -> int *)
     fun aux avoid T =
       (if member (op =) avoid T then
          0
@@ -1215,47 +1115,36 @@
 
 val small_type_max_card = 5
 
-(* hol_context -> typ -> bool *)
 fun is_finite_type hol_ctxt T =
   bounded_exact_card_of_type hol_ctxt [] 1 2 [] T > 0
-(* hol_context -> typ -> bool *)
 fun is_small_finite_type hol_ctxt T =
   let val n = bounded_exact_card_of_type hol_ctxt [] 1 2 [] T in
     n > 0 andalso n <= small_type_max_card
   end
 
-(* term -> bool *)
 fun is_ground_term (t1 $ t2) = is_ground_term t1 andalso is_ground_term t2
   | is_ground_term (Const _) = true
   | is_ground_term _ = false
 
-(* term -> word -> word *)
 fun hashw_term (t1 $ t2) = hashw (hashw_term t1, hashw_term t2)
   | hashw_term (Const (s, _)) = hashw_string (s, 0w0)
   | hashw_term _ = 0w0
-(* term -> int *)
 val hash_term = Word.toInt o hashw_term
 
-(* term list -> (indexname * typ) list *)
 fun special_bounds ts =
   fold Term.add_vars ts [] |> sort (Term_Ord.fast_indexname_ord o pairself fst)
 
-(* indexname * typ -> term -> term *)
 fun abs_var ((s, j), T) body = Abs (s, T, abstract_over (Var ((s, j), T), body))
 
-(* theory -> string -> bool *)
 fun is_funky_typedef_name thy s =
   member (op =) [@{type_name unit}, @{type_name "*"}, @{type_name "+"},
                  @{type_name int}] s orelse
   is_frac_type thy (Type (s, []))
-(* theory -> typ -> bool *)
 fun is_funky_typedef thy (Type (s, _)) = is_funky_typedef_name thy s
   | is_funky_typedef _ _ = false
-(* term -> bool *)
 fun is_arity_type_axiom (Const (@{const_name HOL.type_class}, _)
                          $ Const (@{const_name TYPE}, _)) = true
   | is_arity_type_axiom _ = false
-(* theory -> bool -> term -> bool *)
 fun is_typedef_axiom thy boring (@{const "==>"} $ _ $ t2) =
     is_typedef_axiom thy boring t2
   | is_typedef_axiom thy boring
@@ -1264,7 +1153,6 @@
          $ Const _ $ _)) =
     boring <> is_funky_typedef_name thy s andalso is_typedef thy s
   | is_typedef_axiom _ _ _ = false
-(* term -> bool *)
 val is_class_axiom =
   Logic.strip_horn #> swap #> op :: #> forall (can Logic.dest_of_class)
 
@@ -1272,7 +1160,6 @@
    typedef axioms, and (3) other axioms, and returns the pair ((1), (3)).
    Typedef axioms are uninteresting to Nitpick, because it can retrieve them
    using "typedef_info". *)
-(* theory -> (string * term) list -> string list -> term list * term list *)
 fun partition_axioms_by_definitionality thy axioms def_names =
   let
     val axioms = sort (fast_string_ord o pairself fst) axioms
@@ -1285,15 +1172,12 @@
 (* Ideally we would check against "Complex_Main", not "Refute", but any theory
    will do as long as it contains all the "axioms" and "axiomatization"
    commands. *)
-(* theory -> bool *)
 fun is_built_in_theory thy = Theory.subthy (thy, @{theory Refute})
 
-(* term -> bool *)
 val is_trivial_definition =
   the_default false o try (op aconv o Logic.dest_equals)
 val is_plain_definition =
   let
-    (* term -> bool *)
     fun do_lhs t1 =
       case strip_comb t1 of
         (Const _, args) =>
@@ -1305,10 +1189,8 @@
       | do_eq _ = false
   in do_eq end
 
-(* theory -> (term * term) list -> term list * term list * term list *)
 fun all_axioms_of thy subst =
   let
-    (* theory list -> term list *)
     val axioms_of_thys =
       maps Thm.axioms_of
       #> map (apsnd (subst_atomic subst o prop_of))
@@ -1337,7 +1219,6 @@
       user_defs @ built_in_defs
   in (defs, built_in_nondefs, user_nondefs) end
 
-(* theory -> (typ option * bool) list -> bool -> styp -> int option *)
 fun arity_of_built_in_const thy stds fast_descrs (s, T) =
   if s = @{const_name If} then
     if nth_range_type 3 T = @{typ bool} then NONE else SOME 3
@@ -1365,12 +1246,10 @@
                  else
                    NONE
     end
-(* theory -> (typ option * bool) list -> bool -> styp -> bool *)
 val is_built_in_const = is_some oooo arity_of_built_in_const
 
 (* This function is designed to work for both real definition axioms and
    simplification rules (equational specifications). *)
-(* term -> term *)
 fun term_under_def t =
   case t of
     @{const "==>"} $ _ $ t2 => term_under_def t2
@@ -1384,8 +1263,6 @@
 (* Here we crucially rely on "Refute.specialize_type" performing a preorder
    traversal of the term, without which the wrong occurrence of a constant could
    be matched in the face of overloading. *)
-(* theory -> (typ option * bool) list -> bool -> const_table -> styp
-   -> term list *)
 fun def_props_for_const thy stds fast_descrs table (x as (s, _)) =
   if is_built_in_const thy stds fast_descrs x then
     []
@@ -1394,10 +1271,8 @@
     |> map_filter (try (Refute.specialize_type thy x))
     |> filter (curry (op =) (Const x) o term_under_def)
 
-(* term -> term option *)
 fun normalized_rhs_of t =
   let
-    (* term option -> term option *)
     fun aux (v as Var _) (SOME t) = SOME (lambda v t)
       | aux (c as Const (@{const_name TYPE}, _)) (SOME t) = SOME (lambda c t)
       | aux _ _ = NONE
@@ -1410,7 +1285,6 @@
     val args = strip_comb lhs |> snd
   in fold_rev aux args (SOME rhs) end
 
-(* theory -> const_table -> styp -> term option *)
 fun def_of_const thy table (x as (s, _)) =
   if is_built_in_const thy [(NONE, false)] false x orelse
      original_name s <> s then
@@ -1420,16 +1294,13 @@
       |> normalized_rhs_of |> Option.map (prefix_abs_vars s)
     handle List.Empty => NONE
 
-(* term -> fixpoint_kind *)
 fun fixpoint_kind_of_rhs (Abs (_, _, t)) = fixpoint_kind_of_rhs t
   | fixpoint_kind_of_rhs (Const (@{const_name lfp}, _) $ Abs _) = Lfp
   | fixpoint_kind_of_rhs (Const (@{const_name gfp}, _) $ Abs _) = Gfp
   | fixpoint_kind_of_rhs _ = NoFp
 
-(* theory -> const_table -> term -> bool *)
 fun is_mutually_inductive_pred_def thy table t =
   let
-    (* term -> bool *)
     fun is_good_arg (Bound _) = true
       | is_good_arg (Const (s, _)) =
         s = @{const_name True} orelse s = @{const_name False} orelse
@@ -1443,7 +1314,6 @@
        | NONE => false)
     | _ => false
   end
-(* theory -> const_table -> term -> term *)
 fun unfold_mutually_inductive_preds thy table =
   map_aterms (fn t as Const x =>
                  (case def_of_const thy table x of
@@ -1455,7 +1325,6 @@
                  | NONE => t)
                | t => t)
 
-(* theory -> (typ option * bool) list -> (string * int) list *)
 fun case_const_names thy stds =
   Symtab.fold (fn (dtype_s, {index, descr, case_name, ...}) =>
                   if is_basic_datatype thy stds dtype_s then
@@ -1466,7 +1335,6 @@
               (Datatype.get_all thy) [] @
   map (apsnd length o snd) (#codatatypes (Data.get thy))
 
-(* theory -> const_table -> string * typ -> fixpoint_kind *)
 fun fixpoint_kind_of_const thy table x =
   if is_built_in_const thy [(NONE, false)] false x then
     NoFp
@@ -1474,7 +1342,6 @@
     fixpoint_kind_of_rhs (the (def_of_const thy table x))
     handle Option.Option => NoFp
 
-(* hol_context -> styp -> bool *)
 fun is_real_inductive_pred ({thy, stds, fast_descrs, def_table, intro_table,
                              ...} : hol_context) x =
   fixpoint_kind_of_const thy def_table x <> NoFp andalso
@@ -1490,7 +1357,6 @@
   (is_real_equational_fun hol_ctxt orf is_real_inductive_pred hol_ctxt orf
    (String.isPrefix ubfp_prefix orf String.isPrefix lbfp_prefix) o fst)
 
-(* term -> term *)
 fun lhs_of_equation t =
   case t of
     Const (@{const_name all}, _) $ Abs (_, _, t1) => lhs_of_equation t1
@@ -1501,7 +1367,6 @@
   | Const (@{const_name "op ="}, _) $ t1 $ _ => SOME t1
   | @{const "op -->"} $ _ $ t2 => lhs_of_equation t2
   | _ => NONE
-(* theory -> term -> bool *)
 fun is_constr_pattern _ (Bound _) = true
   | is_constr_pattern _ (Var _) = true
   | is_constr_pattern thy t =
@@ -1518,10 +1383,8 @@
 
 (* Similar to "Refute.specialize_type" but returns all matches rather than only
    the first (preorder) match. *)
-(* theory -> styp -> term -> term list *)
 fun multi_specialize_type thy slack (s, T) t =
   let
-    (* term -> (typ * term) list -> (typ * term) list *)
     fun aux (Const (s', T')) ys =
         if s = s' then
           ys |> (if AList.defined (op =) ys T' then
@@ -1540,22 +1403,18 @@
           ys
       | aux _ ys = ys
   in map snd (fold_aterms aux t []) end
-(* theory -> bool -> const_table -> styp -> term list *)
 fun nondef_props_for_const thy slack table (x as (s, _)) =
   these (Symtab.lookup table s) |> maps (multi_specialize_type thy slack x)
 
-(* term -> term *)
 fun unvarify_term (t1 $ t2) = unvarify_term t1 $ unvarify_term t2
   | unvarify_term (Var ((s, 0), T)) = Free (s, T)
   | unvarify_term (Abs (s, T, t')) = Abs (s, T, unvarify_term t')
   | unvarify_term t = t
-(* theory -> term -> term *)
 fun axiom_for_choice_spec thy =
   unvarify_term
   #> Object_Logic.atomize_term thy
   #> Choice_Specification.close_form
   #> HOLogic.mk_Trueprop
-(* hol_context -> styp -> bool *)
 fun is_choice_spec_fun ({thy, def_table, nondef_table, choice_spec_table, ...}
                         : hol_context) x =
   case nondef_props_for_const thy true choice_spec_table x of
@@ -1571,7 +1430,6 @@
                                 ts') ts
             end
 
-(* theory -> const_table -> term -> bool *)
 fun is_choice_spec_axiom thy choice_spec_table t =
   Symtab.exists (fn (_, ts) =>
                     exists (curry (op aconv) t o axiom_for_choice_spec thy) ts)
@@ -1579,18 +1437,15 @@
 
 (** Constant unfolding **)
 
-(* theory -> (typ option * bool) list -> int * styp -> term *)
 fun constr_case_body thy stds (j, (x as (_, T))) =
   let val arg_Ts = binder_types T in
     list_comb (Bound j, map2 (select_nth_constr_arg thy stds x (Bound 0))
                              (index_seq 0 (length arg_Ts)) arg_Ts)
   end
-(* hol_context -> typ -> int * styp -> term -> term *)
 fun add_constr_case (hol_ctxt as {thy, stds, ...}) res_T (j, x) res_t =
   Const (@{const_name If}, bool_T --> res_T --> res_T --> res_T)
   $ discriminate_value hol_ctxt x (Bound 0) $ constr_case_body thy stds (j, x)
   $ res_t
-(* hol_context -> typ -> typ -> term *)
 fun optimized_case_def (hol_ctxt as {thy, stds, ...}) dataT res_T =
   let
     val xs = datatype_constrs hol_ctxt dataT
@@ -1601,7 +1456,6 @@
     |> fold_rev (add_constr_case hol_ctxt res_T) (length xs downto 2 ~~ xs')
     |> fold_rev (curry absdummy) (func_Ts @ [dataT])
   end
-(* hol_context -> string -> typ -> typ -> term -> term *)
 fun optimized_record_get (hol_ctxt as {thy, stds, ...}) s rec_T res_T t =
   let val constr_x = hd (datatype_constrs hol_ctxt rec_T) in
     case no_of_record_field thy s rec_T of
@@ -1618,7 +1472,6 @@
                                 []))
     | j => select_nth_constr_arg thy stds constr_x t j res_T
   end
-(* hol_context -> string -> typ -> term -> term -> term *)
 fun optimized_record_update (hol_ctxt as {thy, stds, ...}) s rec_T fun_t rec_t =
   let
     val constr_x as (_, constr_T) = hd (datatype_constrs hol_ctxt rec_T)
@@ -1641,12 +1494,10 @@
 (* Prevents divergence in case of cyclic or infinite definition dependencies. *)
 val unfold_max_depth = 255
 
-(* hol_context -> term -> term *)
 fun unfold_defs_in_term (hol_ctxt as {thy, ctxt, stds, fast_descrs, case_names,
                                       def_table, ground_thm_table, ersatz_table,
                                       ...}) =
   let
-    (* int -> typ list -> term -> term *)
     fun do_term depth Ts t =
       case t of
         (t0 as Const (@{const_name Int.number_class.number_of},
@@ -1696,13 +1547,11 @@
       | Var _ => t
       | Bound _ => t
       | Abs (s, T, body) => Abs (s, T, do_term depth (T :: Ts) body)
-    (* int -> typ list -> styp -> term list -> int -> typ -> term * term list *)
     and select_nth_constr_arg_with_args _ _ (x as (_, T)) [] n res_T =
         (Abs (Name.uu, body_type T,
               select_nth_constr_arg thy stds x (Bound 0) n res_T), [])
       | select_nth_constr_arg_with_args depth Ts x (t :: ts) n res_T =
         (select_nth_constr_arg thy stds x (do_term depth Ts t) n res_T, ts)
-    (* int -> typ list -> term -> styp -> term list -> term *)
     and do_const depth Ts t (x as (s, T)) ts =
       case AList.lookup (op =) ersatz_table s of
         SOME s' =>
@@ -1783,39 +1632,30 @@
 
 (** Axiom extraction/generation **)
 
-(* term -> string * term *)
 fun pair_for_prop t =
   case term_under_def t of
     Const (s, _) => (s, t)
   | t' => raise TERM ("Nitpick_HOL.pair_for_prop", [t, t'])
-(* (Proof.context -> term list) -> Proof.context -> (term * term) list
-   -> const_table *)
 fun def_table_for get ctxt subst =
   ctxt |> get |> map (pair_for_prop o subst_atomic subst)
        |> AList.group (op =) |> Symtab.make
-(* term -> string * term *)
 fun paired_with_consts t = map (rpair t) (Term.add_const_names t [])
-(* Proof.context -> (term * term) list -> term list -> const_table *)
 fun const_def_table ctxt subst ts =
   def_table_for (map prop_of o Nitpick_Defs.get) ctxt subst
   |> fold (fn (s, t) => Symtab.map_default (s, []) (cons t))
           (map pair_for_prop ts)
-(* term list -> const_table *)
 fun const_nondef_table ts =
   fold (append o paired_with_consts) ts [] |> AList.group (op =) |> Symtab.make
-(* Proof.context -> (term * term) list -> const_table *)
 val const_simp_table = def_table_for (map prop_of o Nitpick_Simps.get)
 val const_psimp_table = def_table_for (map prop_of o Nitpick_Psimps.get)
 fun const_choice_spec_table ctxt subst =
   map (subst_atomic subst o prop_of) (Nitpick_Choice_Specs.get ctxt)
   |> const_nondef_table
-(* Proof.context -> (term * term) list -> const_table -> const_table *)
 fun inductive_intro_table ctxt subst def_table =
   def_table_for
       (map (unfold_mutually_inductive_preds (ProofContext.theory_of ctxt)
                                             def_table o prop_of)
            o Nitpick_Intros.get) ctxt subst
-(* theory -> term list Inttab.table *)
 fun ground_theorem_table thy =
   fold ((fn @{const Trueprop} $ t1 =>
             is_ground_term t1 ? Inttab.map_default (hash_term t1, []) (cons t1)
@@ -1831,16 +1671,13 @@
    (@{const_name wf_wfrec}, @{const_name wf_wfrec'}),
    (@{const_name wfrec}, @{const_name wfrec'})]
 
-(* theory -> (string * string) list *)
 fun ersatz_table thy =
   fold (append o snd) (#frac_types (Data.get thy)) basic_ersatz_table
 
-(* const_table Unsynchronized.ref -> string -> term list -> unit *)
 fun add_simps simp_table s eqs =
   Unsynchronized.change simp_table
       (Symtab.update (s, eqs @ these (Symtab.lookup (!simp_table) s)))
 
-(* theory -> styp -> term list *)
 fun inverse_axioms_for_rep_fun thy (x as (_, T)) =
   let val abs_T = domain_type T in
     typedef_info thy (fst (dest_Type abs_T)) |> the
@@ -1848,7 +1685,6 @@
     |> pairself (Refute.specialize_type thy x o prop_of o the)
     ||> single |> op ::
   end
-(* theory -> string * typ list -> term list *)
 fun optimized_typedef_axioms thy (abs_z as (abs_s, _)) =
   let val abs_T = Type abs_z in
     if is_univ_typedef thy abs_T then
@@ -1871,7 +1707,6 @@
       end
     | NONE => []
   end
-(* Proof.context -> string * typ list -> term list *)
 fun optimized_quot_type_axioms ctxt stds abs_z =
   let
     val thy = ProofContext.theory_of ctxt
@@ -1900,7 +1735,6 @@
           HOLogic.mk_Trueprop (equiv_rel $ x_var $ normal_x))]
   end
 
-(* hol_context -> typ -> term list *)
 fun codatatype_bisim_axioms (hol_ctxt as {thy, stds, ...}) T =
   let
     val xs = datatype_constrs hol_ctxt T
@@ -1915,13 +1749,11 @@
                           $ (suc_const iter_T $ Bound 0) $ n_var)
     val x_var = Var (("x", 0), T)
     val y_var = Var (("y", 0), T)
-    (* styp -> int -> typ -> term *)
     fun nth_sub_bisim x n nth_T =
       (if is_codatatype thy nth_T then bisim_const $ n_var_minus_1
        else HOLogic.eq_const nth_T)
       $ select_nth_constr_arg thy stds x x_var n nth_T
       $ select_nth_constr_arg thy stds x y_var n nth_T
-    (* styp -> term *)
     fun case_func (x as (_, T)) =
       let
         val arg_Ts = binder_types T
@@ -1943,22 +1775,18 @@
 
 exception NO_TRIPLE of unit
 
-(* theory -> styp -> term -> term list * term list * term *)
 fun triple_for_intro_rule thy x t =
   let
     val prems = Logic.strip_imp_prems t |> map (Object_Logic.atomize_term thy)
     val concl = Logic.strip_imp_concl t |> Object_Logic.atomize_term thy
     val (main, side) = List.partition (exists_Const (curry (op =) x)) prems
-    (* term -> bool *)
-     val is_good_head = curry (op =) (Const x) o head_of
+    val is_good_head = curry (op =) (Const x) o head_of
   in
     if forall is_good_head main then (side, main, concl) else raise NO_TRIPLE ()
   end
 
-(* term -> term *)
 val tuple_for_args = HOLogic.mk_tuple o snd o strip_comb
 
-(* indexname * typ -> term list -> term -> term -> term *)
 fun wf_constraint_for rel side concl main =
   let
     val core = HOLogic.mk_mem (HOLogic.mk_prod (tuple_for_args main,
@@ -1972,12 +1800,9 @@
                   (t, vars)
   end
 
-(* indexname * typ -> term list * term list * term -> term *)
 fun wf_constraint_for_triple rel (side, main, concl) =
   map (wf_constraint_for rel side concl) main |> foldr1 s_conj
 
-(* Proof.context -> Time.time option -> thm
-   -> (Proof.context -> tactic -> tactic) -> bool *)
 fun terminates_by ctxt timeout goal tac =
   can (SINGLE (Classical.safe_tac (claset_of ctxt)) #> the
        #> SINGLE (DETERM_TIMEOUT timeout
@@ -1993,7 +1818,6 @@
 val termination_tacs = [Lexicographic_Order.lex_order_tac true,
                         ScnpReconstruct.sizechange_tac]
 
-(* hol_context -> const_table -> styp -> bool *)
 fun uncached_is_well_founded_inductive_pred
         ({thy, ctxt, stds, debug, fast_descrs, tac_timeout, intro_table, ...}
          : hol_context) (x as (_, T)) =
@@ -2038,7 +1862,6 @@
 
 (* The type constraint below is a workaround for a Poly/ML crash. *)
 
-(* hol_context -> styp -> bool *)
 fun is_well_founded_inductive_pred
         (hol_ctxt as {thy, wfs, def_table, wf_cache, ...} : hol_context)
         (x as (s, _)) =
@@ -2055,7 +1878,6 @@
              Unsynchronized.change wf_cache (cons (x, (gfp, wf))); wf
            end
 
-(* typ list -> typ -> term -> term *)
 fun ap_curry [_] _ t = t
   | ap_curry arg_Ts tuple_T t =
     let val n = length arg_Ts in
@@ -2064,7 +1886,6 @@
                 $ mk_flat_tuple tuple_T (map Bound (n - 1 downto 0)))
     end
 
-(* int -> term -> int *)
 fun num_occs_of_bound_in_term j (t1 $ t2) =
     op + (pairself (num_occs_of_bound_in_term j) (t1, t2))
   | num_occs_of_bound_in_term j (Abs (_, _, t')) =
@@ -2072,10 +1893,8 @@
   | num_occs_of_bound_in_term j (Bound j') = if j' = j then 1 else 0
   | num_occs_of_bound_in_term _ _ = 0
 
-(* term -> bool *)
 val is_linear_inductive_pred_def =
   let
-    (* int -> term -> bool *)
     fun do_disjunct j (Const (@{const_name Ex}, _) $ Abs (_, _, t2)) =
         do_disjunct (j + 1) t2
       | do_disjunct j t =
@@ -2083,7 +1902,6 @@
           0 => true
         | 1 => exists (curry (op =) (Bound j) o head_of) (conjuncts_of t)
         | _ => false
-    (* term -> bool *)
     fun do_lfp_def (Const (@{const_name lfp}, _) $ t2) =
         let val (xs, body) = strip_abs t2 in
           case length xs of
@@ -2093,24 +1911,19 @@
       | do_lfp_def _ = false
   in do_lfp_def o strip_abs_body end
 
-(* int -> int list list *)
 fun n_ptuple_paths 0 = []
   | n_ptuple_paths 1 = []
   | n_ptuple_paths n = [] :: map (cons 2) (n_ptuple_paths (n - 1))
-(* int -> typ -> typ -> term -> term *)
 val ap_n_split = HOLogic.mk_psplits o n_ptuple_paths
 
-(* term -> term * term *)
 val linear_pred_base_and_step_rhss =
   let
-    (* term -> term *)
     fun aux (Const (@{const_name lfp}, _) $ t2) =
         let
           val (xs, body) = strip_abs t2
           val arg_Ts = map snd (tl xs)
           val tuple_T = HOLogic.mk_tupleT arg_Ts
           val j = length arg_Ts
-          (* int -> term -> term *)
           fun repair_rec j (Const (@{const_name Ex}, T1) $ Abs (s2, T2, t2')) =
               Const (@{const_name Ex}, T1)
               $ Abs (s2, T2, repair_rec (j + 1) t2')
@@ -2140,7 +1953,6 @@
         raise TERM ("Nitpick_HOL.linear_pred_base_and_step_rhss.aux", [t])
   in aux end
 
-(* hol_context -> styp -> term -> term *)
 fun starred_linear_pred_const (hol_ctxt as {simp_table, ...}) (s, T) def =
   let
     val j = maxidx_of_term def + 1
@@ -2174,7 +1986,6 @@
     |> unfold_defs_in_term hol_ctxt
   end
 
-(* hol_context -> bool -> styp -> term *)
 fun unrolled_inductive_pred_const (hol_ctxt as {thy, star_linear_preds,
                                                 def_table, simp_table, ...})
                                   gfp (x as (s, T)) =
@@ -2211,7 +2022,6 @@
       in unrolled_const end
   end
 
-(* hol_context -> styp -> term *)
 fun raw_inductive_pred_axiom ({thy, def_table, ...} : hol_context) x =
   let
     val def = the (def_of_const thy def_table x)
@@ -2238,7 +2048,6 @@
   else
     raw_inductive_pred_axiom hol_ctxt x
 
-(* hol_context -> styp -> term list *)
 fun raw_equational_fun_axioms (hol_ctxt as {thy, stds, fast_descrs, simp_table,
                                             psimp_table, ...}) x =
   case def_props_for_const thy stds fast_descrs (!simp_table) x of
@@ -2247,7 +2056,6 @@
            | psimps => psimps)
   | simps => simps
 val equational_fun_axioms = map extensionalize oo raw_equational_fun_axioms
-(* hol_context -> styp -> bool *)
 fun is_equational_fun_surely_complete hol_ctxt x =
   case raw_equational_fun_axioms hol_ctxt x of
     [@{const Trueprop} $ (Const (@{const_name "op ="}, _) $ t1 $ _)] =>
@@ -2256,10 +2064,8 @@
 
 (** Type preprocessing **)
 
-(* term list -> term list *)
 fun merge_type_vars_in_terms ts =
   let
-    (* typ -> (sort * string) list -> (sort * string) list *)
     fun add_type (TFree (s, S)) table =
         (case AList.lookup (op =) table S of
            SOME s' =>
@@ -2268,12 +2074,10 @@
          | NONE => (S, s) :: table)
       | add_type _ table = table
     val table = fold (fold_types (fold_atyps add_type)) ts []
-    (* typ -> typ *)
     fun coalesce (TFree (_, S)) = TFree (AList.lookup (op =) table S |> the, S)
       | coalesce T = T
   in map (map_types (map_atyps coalesce)) ts end
 
-(* hol_context -> bool -> typ -> typ list -> typ list *)
 fun add_ground_types hol_ctxt binarize =
   let
     fun aux T accum =
@@ -2294,10 +2098,8 @@
       | _ => insert (op =) T accum
   in aux end
 
-(* hol_context -> bool -> typ -> typ list *)
 fun ground_types_in_type hol_ctxt binarize T =
   add_ground_types hol_ctxt binarize T []
-(* hol_context -> term list -> typ list *)
 fun ground_types_in_terms hol_ctxt binarize ts =
   fold (fold_types (add_ground_types hol_ctxt binarize)) ts []
 
--- a/src/HOL/Tools/Nitpick/nitpick_isar.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_isar.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -55,22 +55,16 @@
    ("binary_ints", "smart"),
    ("destroy_constrs", "true"),
    ("specialize", "true"),
-   ("skolemize", "true"),
    ("star_linear_preds", "true"),
-   ("uncurry", "true"),
    ("fast_descrs", "true"),
    ("peephole_optim", "true"),
    ("timeout", "30 s"),
    ("tac_timeout", "500 ms"),
-   ("sym_break", "20"),
-   ("sharing_depth", "3"),
-   ("flatten_props", "false"),
    ("max_threads", "0"),
    ("debug", "false"),
    ("verbose", "false"),
    ("overlord", "false"),
    ("show_all", "false"),
-   ("show_skolems", "true"),
    ("show_datatypes", "false"),
    ("show_consts", "false"),
    ("format", "1"),
@@ -93,23 +87,18 @@
    ("unary_ints", "binary_ints"),
    ("dont_destroy_constrs", "destroy_constrs"),
    ("dont_specialize", "specialize"),
-   ("dont_skolemize", "skolemize"),
    ("dont_star_linear_preds", "star_linear_preds"),
-   ("dont_uncurry", "uncurry"),
    ("full_descrs", "fast_descrs"),
    ("no_peephole_optim", "peephole_optim"),
-   ("dont_flatten_props", "flatten_props"),
    ("no_debug", "debug"),
    ("quiet", "verbose"),
    ("no_overlord", "overlord"),
    ("dont_show_all", "show_all"),
-   ("hide_skolems", "show_skolems"),
    ("hide_datatypes", "show_datatypes"),
    ("hide_consts", "show_consts"),
    ("trust_potential", "check_potential"),
    ("trust_genuine", "check_genuine")]
 
-(* string -> bool *)
 fun is_known_raw_param s =
   AList.defined (op =) default_default_params s orelse
   AList.defined (op =) negated_params s orelse
@@ -118,19 +107,16 @@
          ["card", "max", "iter", "box", "dont_box", "finitize", "dont_finitize",
           "mono", "non_mono", "std", "non_std", "wf", "non_wf", "format"]
 
-(* string * 'a -> unit *)
 fun check_raw_param (s, _) =
   if is_known_raw_param s then ()
   else error ("Unknown parameter: " ^ quote s ^ ".")  
 
-(* string -> string option *)
 fun unnegate_param_name name =
   case AList.lookup (op =) negated_params name of
     NONE => if String.isPrefix "dont_" name then SOME (unprefix "dont_" name)
             else if String.isPrefix "non_" name then SOME (unprefix "non_" name)
             else NONE
   | some_name => some_name
-(* raw_param -> raw_param *)
 fun unnegate_raw_param (name, value) =
   case unnegate_param_name name of
     SOME name' => (name', case value of
@@ -142,47 +128,36 @@
 
 structure Data = Theory_Data(
   type T = raw_param list
-  val empty = default_default_params |> map (apsnd single)
+  val empty = map (apsnd single) default_default_params
   val extend = I
-  fun merge p : T = AList.merge (op =) (K true) p)
+  val merge = AList.merge (op =) (K true))
 
-(* raw_param -> theory -> theory *)
 val set_default_raw_param = Data.map o AList.update (op =) o unnegate_raw_param
-(* theory -> raw_param list *)
 val default_raw_params = Data.get
 
-(* string -> bool *)
 fun is_punctuation s = (s = "," orelse s = "-" orelse s = "\<midarrow>")
 
-(* string list -> string *)
 fun stringify_raw_param_value [] = ""
   | stringify_raw_param_value [s] = s
   | stringify_raw_param_value (s1 :: s2 :: ss) =
     s1 ^ (if is_punctuation s1 orelse is_punctuation s2 then "" else " ") ^
     stringify_raw_param_value (s2 :: ss)
 
-(* int -> string -> int *)
 fun maxed_int_from_string min_int s = Int.max (min_int, the (Int.fromString s))
 
-(* Proof.context -> bool -> raw_param list -> raw_param list -> params *)
 fun extract_params ctxt auto default_params override_params =
   let
     val override_params = map unnegate_raw_param override_params
     val raw_params = rev override_params @ rev default_params
-    (* string -> string *)
     val lookup =
       Option.map stringify_raw_param_value o AList.lookup (op =) raw_params
     val lookup_string = the_default "" o lookup
-    (* bool -> bool option -> string -> bool option *)
     fun general_lookup_bool option default_value name =
       case lookup name of
-        SOME s => Sledgehammer_Util.parse_bool_option option name s
+        SOME s => parse_bool_option option name s
       | NONE => default_value
-    (* string -> bool *)
     val lookup_bool = the o general_lookup_bool false (SOME false)
-    (* string -> bool option *)
     val lookup_bool_option = general_lookup_bool true NONE
-    (* string -> string option -> int *)
     fun do_int name value =
       case value of
         SOME s => (case Int.fromString s of
@@ -190,14 +165,11 @@
                    | NONE => error ("Parameter " ^ quote name ^
                                     " must be assigned an integer value."))
       | NONE => 0
-    (* string -> int *)
     fun lookup_int name = do_int name (lookup name)
-    (* string -> int option *)
     fun lookup_int_option name =
       case lookup name of
         SOME "smart" => NONE
       | value => SOME (do_int name value)
-    (* string -> int -> string -> int list *)
     fun int_range_from_string name min_int s =
       let
         val (k1, k2) =
@@ -211,17 +183,14 @@
       handle Option.Option =>
              error ("Parameter " ^ quote name ^
                     " must be assigned a sequence of integers.")
-    (* string -> int -> string -> int list *)
     fun int_seq_from_string name min_int s =
       maps (int_range_from_string name min_int) (space_explode "," s)
-    (* string -> int -> int list *)
     fun lookup_int_seq name min_int =
       case lookup name of
         SOME s => (case int_seq_from_string name min_int s of
                      [] => [min_int]
                    | value => value)
       | NONE => [min_int]
-    (* (string -> 'a) -> int -> string -> ('a option * int list) list *)
     fun lookup_ints_assigns read prefix min_int =
       (NONE, lookup_int_seq prefix min_int)
       :: map (fn (name, value) =>
@@ -229,38 +198,31 @@
                   value |> stringify_raw_param_value
                         |> int_seq_from_string name min_int))
              (filter (String.isPrefix (prefix ^ " ") o fst) raw_params)
-    (* (string -> 'a) -> string -> ('a option * bool) list *)
     fun lookup_bool_assigns read prefix =
       (NONE, lookup_bool prefix)
       :: map (fn (name, value) =>
                  (SOME (read (String.extract (name, size prefix + 1, NONE))),
                   value |> stringify_raw_param_value
-                        |> Sledgehammer_Util.parse_bool_option false name
-                        |> the))
+                        |> parse_bool_option false name |> the))
              (filter (String.isPrefix (prefix ^ " ") o fst) raw_params)
-    (* (string -> 'a) -> string -> ('a option * bool option) list *)
     fun lookup_bool_option_assigns read prefix =
       (NONE, lookup_bool_option prefix)
       :: map (fn (name, value) =>
                  (SOME (read (String.extract (name, size prefix + 1, NONE))),
                   value |> stringify_raw_param_value
-                        |> Sledgehammer_Util.parse_bool_option true name))
+                        |> parse_bool_option true name))
              (filter (String.isPrefix (prefix ^ " ") o fst) raw_params)
-    (* string -> Time.time option *)
     fun lookup_time name =
       case lookup name of
         NONE => NONE
-      | SOME s => Sledgehammer_Util.parse_time_option name s
-    (* string -> term list *)
+      | SOME s => parse_time_option name s
     val lookup_term_list =
       AList.lookup (op =) raw_params #> these #> Syntax.read_terms ctxt
     val read_type_polymorphic =
       Syntax.read_typ ctxt #> Logic.mk_type
       #> singleton (Variable.polymorphic ctxt) #> Logic.dest_type
-    (* string -> term *)
     val read_term_polymorphic =
       Syntax.read_term ctxt #> singleton (Variable.polymorphic ctxt)
-    (* string -> styp *)
     val read_const_polymorphic = read_term_polymorphic #> dest_Const
     val cards_assigns = lookup_ints_assigns read_type_polymorphic "card" 1
     val maxes_assigns = lookup_ints_assigns read_const_polymorphic "max" ~1
@@ -284,19 +246,13 @@
     val binary_ints = lookup_bool_option "binary_ints"
     val destroy_constrs = lookup_bool "destroy_constrs"
     val specialize = lookup_bool "specialize"
-    val skolemize = lookup_bool "skolemize"
     val star_linear_preds = lookup_bool "star_linear_preds"
-    val uncurry = lookup_bool "uncurry"
     val fast_descrs = lookup_bool "fast_descrs"
     val peephole_optim = lookup_bool "peephole_optim"
     val timeout = if auto then NONE else lookup_time "timeout"
     val tac_timeout = lookup_time "tac_timeout"
-    val sym_break = Int.max (0, lookup_int "sym_break")
-    val sharing_depth = Int.max (1, lookup_int "sharing_depth")
-    val flatten_props = lookup_bool "flatten_props"
     val max_threads = Int.max (0, lookup_int "max_threads")
     val show_all = debug orelse lookup_bool "show_all"
-    val show_skolems = show_all orelse lookup_bool "show_skolems"
     val show_datatypes = show_all orelse lookup_bool "show_datatypes"
     val show_consts = show_all orelse lookup_bool "show_consts"
     val formats = lookup_ints_assigns read_term_polymorphic "format" 0
@@ -306,9 +262,10 @@
     val max_genuine = Int.max (0, lookup_int "max_genuine")
     val check_potential = lookup_bool "check_potential"
     val check_genuine = lookup_bool "check_genuine"
-    val batch_size = case lookup_int_option "batch_size" of
-                       SOME n => Int.max (1, n)
-                     | NONE => if debug then 1 else 64
+    val batch_size =
+      case lookup_int_option "batch_size" of
+        SOME n => Int.max (1, n)
+      | NONE => if debug then 1 else 64
     val expect = lookup_string "expect"
   in
     {cards_assigns = cards_assigns, maxes_assigns = maxes_assigns,
@@ -319,37 +276,28 @@
      user_axioms = user_axioms, assms = assms,
      merge_type_vars = merge_type_vars, binary_ints = binary_ints,
      destroy_constrs = destroy_constrs, specialize = specialize,
-     skolemize = skolemize, star_linear_preds = star_linear_preds,
-     uncurry = uncurry, fast_descrs = fast_descrs,
+     star_linear_preds = star_linear_preds, fast_descrs = fast_descrs,
      peephole_optim = peephole_optim, timeout = timeout,
-     tac_timeout = tac_timeout, sym_break = sym_break,
-     sharing_depth = sharing_depth, flatten_props = flatten_props,
-     max_threads = max_threads, show_skolems = show_skolems,
+     tac_timeout = tac_timeout, max_threads = max_threads,
      show_datatypes = show_datatypes, show_consts = show_consts,
      formats = formats, evals = evals, max_potential = max_potential,
      max_genuine = max_genuine, check_potential = check_potential,
      check_genuine = check_genuine, batch_size = batch_size, expect = expect}
   end
 
-(* theory -> (string * string) list -> params *)
 fun default_params thy =
   extract_params (ProofContext.init thy) false (default_raw_params thy)
   o map (apsnd single)
 
-(* P.token list -> string * P.token list *)
 val parse_key = Scan.repeat1 P.typ_group >> space_implode " "
-(* P.token list -> string list * P.token list *)
 val parse_value =
   Scan.repeat1 (P.minus >> single
                 || Scan.repeat1 (Scan.unless P.minus P.name)
                 || P.$$$ "," |-- P.number >> prefix "," >> single) >> flat
-(* P.token list -> raw_param * P.token list *)
 val parse_param = parse_key -- Scan.optional (P.$$$ "=" |-- parse_value) []
-(* P.token list -> raw_param list * P.token list *)
 val parse_params =
   Scan.optional (P.$$$ "[" |-- P.list parse_param --| P.$$$ "]") []
 
-(* Proof.context -> ('a -> 'a) -> 'a -> 'a *)
 fun handle_exceptions ctxt f x =
   f x
   handle ARG (loc, details) =>
@@ -387,7 +335,6 @@
        | Refute.REFUTE (loc, details) =>
          error ("Unhandled Refute error (" ^ quote loc ^ "): " ^ details ^ ".")
 
-(* raw_param list -> bool -> int -> int -> Proof.state -> bool * Proof.state *)
 fun pick_nits override_params auto i step state =
   let
     val thy = Proof.theory_of state
@@ -395,7 +342,6 @@
     val _ = List.app check_raw_param override_params
     val params as {blocking, debug, ...} =
       extract_params ctxt auto (default_raw_params thy) override_params
-    (* unit -> bool * Proof.state *)
     fun go () =
       (false, state)
       |> (if auto then perhaps o try
@@ -408,17 +354,14 @@
     else (Toplevel.thread true (fn () => (go (); ())); (false, state))
   end
 
-(* raw_param list * int -> Toplevel.transition -> Toplevel.transition *)
 fun nitpick_trans (params, i) =
   Toplevel.keep (fn st =>
       (pick_nits params false i (Toplevel.proof_position_of st)
                  (Toplevel.proof_of st); ()))
 
-(* raw_param -> string *)
 fun string_for_raw_param (name, value) =
   name ^ " = " ^ stringify_raw_param_value value
 
-(* raw_param list -> Toplevel.transition -> Toplevel.transition *)
 fun nitpick_params_trans params =
   Toplevel.theory
       (fold set_default_raw_param params
@@ -431,20 +374,17 @@
                                params |> map string_for_raw_param
                                       |> sort_strings |> cat_lines)))))
 
-(* P.token list
-   -> (Toplevel.transition -> Toplevel.transition) * P.token list *)
 val parse_nitpick_command =
   (parse_params -- Scan.optional P.nat 1) #>> nitpick_trans
 val parse_nitpick_params_command = parse_params #>> nitpick_params_trans
 
 val _ = OuterSyntax.improper_command "nitpick"
-            "try to find a counterexample for a given subgoal using Kodkod"
+            "try to find a counterexample for a given subgoal using Nitpick"
             K.diag parse_nitpick_command
 val _ = OuterSyntax.command "nitpick_params"
             "set and display the default parameters for Nitpick"
             K.thy_decl parse_nitpick_params_command
 
-(* Proof.state -> bool * Proof.state *)
 fun auto_nitpick state =
   if not (!auto) then (false, state) else pick_nits [] true 1 0 state
 
--- a/src/HOL/Tools/Nitpick/nitpick_kodkod.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_kodkod.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -57,24 +57,19 @@
 
 structure NfaGraph = Typ_Graph
 
-(* int -> KK.int_expr list *)
 fun flip_nums n = index_seq 1 n @ [0] |> map KK.Num
 
-(* int -> int -> int -> KK.bound list -> KK.formula -> int *)
 fun univ_card nat_card int_card main_j0 bounds formula =
   let
-    (* KK.rel_expr -> int -> int *)
     fun rel_expr_func r k =
       Int.max (k, case r of
                     KK.Atom j => j + 1
                   | KK.AtomSeq (k', j0) => j0 + k'
                   | _ => 0)
-    (* KK.tuple -> int -> int *)
     fun tuple_func t k =
       case t of
         KK.Tuple js => fold Integer.max (map (Integer.add 1) js) k
       | _ => k
-    (* KK.tuple_set -> int -> int *)
     fun tuple_set_func ts k =
       Int.max (k, case ts of KK.TupleAtomSeq (k', j0) => j0 + k' | _ => 0)
     val expr_F = {formula_func = K I, rel_expr_func = rel_expr_func,
@@ -84,10 +79,8 @@
                |> KK.fold_formula expr_F formula
   in Int.max (main_j0 + fold Integer.max [2, nat_card, int_card] 0, card) end
 
-(* int -> KK.formula -> unit *)
 fun check_bits bits formula =
   let
-    (* KK.int_expr -> unit -> unit *)
     fun int_expr_func (KK.Num k) () =
         if is_twos_complement_representable bits k then
           ()
@@ -100,7 +93,6 @@
                   int_expr_func = int_expr_func}
   in KK.fold_formula expr_F formula () end
 
-(* int -> int -> unit *)
 fun check_arity univ_card n =
   if n > KK.max_arity univ_card then
     raise TOO_LARGE ("Nitpick_Kodkod.check_arity",
@@ -109,7 +101,6 @@
   else
     ()
 
-(* bool -> int -> int list -> KK.tuple *)
 fun kk_tuple debug univ_card js =
   if debug then
     KK.Tuple js
@@ -117,19 +108,13 @@
     KK.TupleIndex (length js,
                    fold (fn j => fn accum => accum * univ_card + j) js 0)
 
-(* (int * int) list -> KK.tuple_set *)
 val tuple_set_from_atom_schema = foldl1 KK.TupleProduct o map KK.TupleAtomSeq
-(* rep -> KK.tuple_set *)
 val upper_bound_for_rep = tuple_set_from_atom_schema o atom_schema_of_rep
 
-(* int -> KK.tuple_set *)
 val single_atom = KK.TupleSet o single o KK.Tuple o single
-(* int -> KK.int_bound list *)
 fun sequential_int_bounds n = [(NONE, map single_atom (index_seq 0 n))]
-(* int -> int -> KK.int_bound list *)
 fun pow_of_two_int_bounds bits j0 =
   let
-    (* int -> int -> int -> KK.int_bound list *)
     fun aux 0  _ _ = []
       | aux 1 pow_of_two j = [(SOME (~ pow_of_two), [single_atom j])]
       | aux iter pow_of_two j =
@@ -137,10 +122,8 @@
         aux (iter - 1) (2 * pow_of_two) (j + 1)
   in aux (bits + 1) 1 j0 end
 
-(* KK.formula -> KK.n_ary_index list *)
 fun built_in_rels_in_formula formula =
   let
-    (* KK.rel_expr -> KK.n_ary_index list -> KK.n_ary_index list *)
     fun rel_expr_func (KK.Rel (x as (n, j))) =
         if x = unsigned_bit_word_sel_rel orelse x = signed_bit_word_sel_rel then
           I
@@ -155,7 +138,6 @@
 
 val max_table_size = 65536
 
-(* int -> unit *)
 fun check_table_size k =
   if k > max_table_size then
     raise TOO_LARGE ("Nitpick_Kodkod.check_table_size",
@@ -163,7 +145,6 @@
   else
     ()
 
-(* bool -> int -> int * int -> (int -> int) -> KK.tuple list *)
 fun tabulate_func1 debug univ_card (k, j0) f =
   (check_table_size k;
    map_filter (fn j1 => let val j2 = f j1 in
@@ -172,7 +153,6 @@
                           else
                             NONE
                         end) (index_seq 0 k))
-(* bool -> int -> int * int -> int -> (int * int -> int) -> KK.tuple list *)
 fun tabulate_op2 debug univ_card (k, j0) res_j0 f =
   (check_table_size (k * k);
    map_filter (fn j => let
@@ -186,8 +166,6 @@
                          else
                            NONE
                        end) (index_seq 0 (k * k)))
-(* bool -> int -> int * int -> int -> (int * int -> int * int)
-   -> KK.tuple list *)
 fun tabulate_op2_2 debug univ_card (k, j0) res_j0 f =
   (check_table_size (k * k);
    map_filter (fn j => let
@@ -202,33 +180,27 @@
                          else
                            NONE
                        end) (index_seq 0 (k * k)))
-(* bool -> int -> int * int -> (int * int -> int) -> KK.tuple list *)
 fun tabulate_nat_op2 debug univ_card (k, j0) f =
   tabulate_op2 debug univ_card (k, j0) j0 (atom_for_nat (k, 0) o f)
 fun tabulate_int_op2 debug univ_card (k, j0) f =
   tabulate_op2 debug univ_card (k, j0) j0
                (atom_for_int (k, 0) o f o pairself (int_for_atom (k, 0)))
-(* bool -> int -> int * int -> (int * int -> int * int) -> KK.tuple list *)
 fun tabulate_int_op2_2 debug univ_card (k, j0) f =
   tabulate_op2_2 debug univ_card (k, j0) j0
                  (pairself (atom_for_int (k, 0)) o f
                   o pairself (int_for_atom (k, 0)))
 
-(* int * int -> int *)
 fun isa_div (m, n) = m div n handle General.Div => 0
 fun isa_mod (m, n) = m mod n handle General.Div => m
 fun isa_gcd (m, 0) = m
   | isa_gcd (m, n) = isa_gcd (n, isa_mod (m, n))
 fun isa_lcm (m, n) = isa_div (m * n, isa_gcd (m, n))
 val isa_zgcd = isa_gcd o pairself abs
-(* int * int -> int * int *)
 fun isa_norm_frac (m, n) =
   if n < 0 then isa_norm_frac (~m, ~n)
   else if m = 0 orelse n = 0 then (0, 1)
   else let val p = isa_zgcd (m, n) in (isa_div (m, p), isa_div (n, p)) end
 
-(* bool -> int -> int -> int -> int -> int * int
-   -> string * bool * KK.tuple list *)
 fun tabulate_built_in_rel debug univ_card nat_card int_card j0 (x as (n, _)) =
   (check_arity univ_card n;
    if x = not3_rel then
@@ -269,25 +241,21 @@
    else
      raise ARG ("Nitpick_Kodkod.tabulate_built_in_rel", "unknown relation"))
 
-(* bool -> int -> int -> int -> int -> int * int -> KK.rel_expr -> KK.bound *)
 fun bound_for_built_in_rel debug univ_card nat_card int_card j0 x =
   let
     val (nick, ts) = tabulate_built_in_rel debug univ_card nat_card int_card
                                            j0 x
   in ([(x, nick)], [KK.TupleSet ts]) end
 
-(* bool -> int -> int -> int -> int -> KK.formula -> KK.bound list *)
 fun bounds_for_built_in_rels_in_formula debug univ_card nat_card int_card j0 =
   map (bound_for_built_in_rel debug univ_card nat_card int_card j0)
   o built_in_rels_in_formula
 
-(* Proof.context -> bool -> string -> typ -> rep -> string *)
 fun bound_comment ctxt debug nick T R =
   short_name nick ^
   (if debug then " :: " ^ unyxml (Syntax.string_of_typ ctxt T) else "") ^
   " : " ^ string_for_rep R
 
-(* Proof.context -> bool -> nut -> KK.bound *)
 fun bound_for_plain_rel ctxt debug (u as FreeRel (x, T, R, nick)) =
     ([(x, bound_comment ctxt debug nick T R)],
      if nick = @{const_name bisim_iterator_max} then
@@ -299,7 +267,6 @@
   | bound_for_plain_rel _ _ u =
     raise NUT ("Nitpick_Kodkod.bound_for_plain_rel", [u])
 
-(* Proof.context -> bool -> dtype_spec list -> nut -> KK.bound *)
 fun bound_for_sel_rel ctxt debug dtypes
         (FreeRel (x, T as Type (@{type_name fun}, [T1, T2]),
                   R as Func (Atom (_, j0), R2), nick)) =
@@ -331,12 +298,9 @@
   | bound_for_sel_rel _ _ _ u =
     raise NUT ("Nitpick_Kodkod.bound_for_sel_rel", [u])
 
-(* KK.bound list -> KK.bound list *)
 fun merge_bounds bs =
   let
-    (* KK.bound -> int *)
     fun arity (zs, _) = fst (fst (hd zs))
-    (* KK.bound list -> KK.bound -> KK.bound list -> KK.bound list *)
     fun add_bound ds b [] = List.revAppend (ds, [b])
       | add_bound ds b (c :: cs) =
         if arity b = arity c andalso snd b = snd c then
@@ -345,40 +309,33 @@
           add_bound (c :: ds) b cs
   in fold (add_bound []) bs [] end
 
-(* int -> int -> KK.rel_expr list *)
 fun unary_var_seq j0 n = map (curry KK.Var 1) (index_seq j0 n)
 
-(* int list -> KK.rel_expr *)
 val singleton_from_combination = foldl1 KK.Product o map KK.Atom
-(* rep -> KK.rel_expr list *)
 fun all_singletons_for_rep R =
   if is_lone_rep R then
     all_combinations_for_rep R |> map singleton_from_combination
   else
     raise REP ("Nitpick_Kodkod.all_singletons_for_rep", [R])
 
-(* KK.rel_expr -> KK.rel_expr list *)
 fun unpack_products (KK.Product (r1, r2)) =
     unpack_products r1 @ unpack_products r2
   | unpack_products r = [r]
 fun unpack_joins (KK.Join (r1, r2)) = unpack_joins r1 @ unpack_joins r2
   | unpack_joins r = [r]
 
-(* rep -> KK.rel_expr *)
 val empty_rel_for_rep = empty_n_ary_rel o arity_of_rep
 fun full_rel_for_rep R =
   case atom_schema_of_rep R of
     [] => raise REP ("Nitpick_Kodkod.full_rel_for_rep", [R])
   | schema => foldl1 KK.Product (map KK.AtomSeq schema)
 
-(* int -> int list -> KK.decl list *)
 fun decls_for_atom_schema j0 schema =
   map2 (fn j => fn x => KK.DeclOne ((1, j), KK.AtomSeq x))
        (index_seq j0 (length schema)) schema
 
 (* The type constraint below is a workaround for a Poly/ML bug. *)
 
-(* kodkod_constrs -> rep -> KK.rel_expr -> KK.formula *)
 fun d_n_ary_function ({kk_all, kk_join, kk_lone, kk_one, ...} : kodkod_constrs)
                      R r =
   let val body_R = body_rep R in
@@ -420,14 +377,11 @@
       d_n_ary_function kk R r
   | kk_n_ary_function kk R r = d_n_ary_function kk R r
 
-(* kodkod_constrs -> KK.rel_expr list -> KK.formula *)
 fun kk_disjoint_sets _ [] = KK.True
   | kk_disjoint_sets (kk as {kk_and, kk_no, kk_intersect, ...} : kodkod_constrs)
                      (r :: rs) =
     fold (kk_and o kk_no o kk_intersect r) rs (kk_disjoint_sets kk rs)
 
-(* int -> kodkod_constrs -> (KK.rel_expr -> KK.rel_expr) -> KK.rel_expr
-   -> KK.rel_expr *)
 fun basic_rel_rel_let j ({kk_rel_let, ...} : kodkod_constrs) f r =
   if inline_rel_expr r then
     f r
@@ -435,36 +389,25 @@
     let val x = (KK.arity_of_rel_expr r, j) in
       kk_rel_let [KK.AssignRelReg (x, r)] (f (KK.RelReg x))
     end
-(* kodkod_constrs -> (KK.rel_expr -> KK.rel_expr) -> KK.rel_expr
-   -> KK.rel_expr *)
 val single_rel_rel_let = basic_rel_rel_let 0
-(* kodkod_constrs -> (KK.rel_expr -> KK.rel_expr -> KK.rel_expr) -> KK.rel_expr
-   -> KK.rel_expr -> KK.rel_expr *)
 fun double_rel_rel_let kk f r1 r2 =
   single_rel_rel_let kk (fn r1 => basic_rel_rel_let 1 kk (f r1) r2) r1
-(* kodkod_constrs -> (KK.rel_expr -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr)
-   -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr -> KK.rel_expr *)
 fun triple_rel_rel_let kk f r1 r2 r3 =
   double_rel_rel_let kk
       (fn r1 => fn r2 => basic_rel_rel_let 2 kk (f r1 r2) r3) r1 r2
 
-(* kodkod_constrs -> int -> KK.formula -> KK.rel_expr *)
 fun atom_from_formula ({kk_rel_if, ...} : kodkod_constrs) j0 f =
   kk_rel_if f (KK.Atom (j0 + 1)) (KK.Atom j0)
-(* kodkod_constrs -> rep -> KK.formula -> KK.rel_expr *)
 fun rel_expr_from_formula kk R f =
   case unopt_rep R of
     Atom (2, j0) => atom_from_formula kk j0 f
   | _ => raise REP ("Nitpick_Kodkod.rel_expr_from_formula", [R])
 
-(* kodkod_cotrs -> int -> int -> KK.rel_expr -> KK.rel_expr list *)
 fun unpack_vect_in_chunks ({kk_project_seq, ...} : kodkod_constrs) chunk_arity
                           num_chunks r =
   List.tabulate (num_chunks, fn j => kk_project_seq r (j * chunk_arity)
                                                     chunk_arity)
 
-(* kodkod_constrs -> bool -> rep -> rep -> KK.rel_expr -> KK.rel_expr
-   -> KK.rel_expr *)
 fun kk_n_fold_join
         (kk as {kk_intersect, kk_product, kk_join, kk_project_seq, ...}) one R1
         res_R r1 r2 =
@@ -484,8 +427,6 @@
             arity1 (arity_of_rep res_R)
     end
 
-(* kodkod_constrs -> rep -> rep -> KK.rel_expr -> KK.rel_expr list
-   -> KK.rel_expr list -> KK.rel_expr *)
 fun kk_case_switch (kk as {kk_union, kk_product, ...}) R1 R2 r rs1 rs2 =
   if rs1 = rs2 then r
   else kk_n_fold_join kk true R1 R2 r (fold1 kk_union (map2 kk_product rs1 rs2))
@@ -493,7 +434,6 @@
 val lone_rep_fallback_max_card = 4096
 val some_j0 = 0
 
-(* kodkod_constrs -> rep -> rep -> KK.rel_expr -> KK.rel_expr *)
 fun lone_rep_fallback kk new_R old_R r =
   if old_R = new_R then
     r
@@ -510,7 +450,6 @@
       else
         raise REP ("Nitpick_Kodkod.lone_rep_fallback", [old_R, new_R])
     end
-(* kodkod_constrs -> int * int -> rep -> KK.rel_expr -> KK.rel_expr *)
 and atom_from_rel_expr kk x old_R r =
   case old_R of
     Func (R1, R2) =>
@@ -523,7 +462,6 @@
     end
   | Opt _ => raise REP ("Nitpick_Kodkod.atom_from_rel_expr", [old_R])
   | _ => lone_rep_fallback kk (Atom x) old_R r
-(* kodkod_constrs -> rep list -> rep -> KK.rel_expr -> KK.rel_expr *)
 and struct_from_rel_expr kk Rs old_R r =
   case old_R of
     Atom _ => lone_rep_fallback kk (Struct Rs) old_R r
@@ -547,7 +485,6 @@
         lone_rep_fallback kk (Struct Rs) old_R r
     end
   | _ => raise REP ("Nitpick_Kodkod.struct_from_rel_expr", [old_R])
-(* kodkod_constrs -> int -> rep -> rep -> KK.rel_expr -> KK.rel_expr *)
 and vect_from_rel_expr kk k R old_R r =
   case old_R of
     Atom _ => lone_rep_fallback kk (Vect (k, R)) old_R r
@@ -570,7 +507,6 @@
                                          (kk_n_fold_join kk true R1 R2 arg_r r))
                (all_singletons_for_rep R1))
   | _ => raise REP ("Nitpick_Kodkod.vect_from_rel_expr", [old_R])
-(* kodkod_constrs -> rep -> rep -> rep -> KK.rel_expr -> KK.rel_expr *)
 and func_from_no_opt_rel_expr kk R1 R2 (Atom x) r =
     let
       val dom_card = card_of_rep R1
@@ -599,7 +535,6 @@
        let
          val args_rs = all_singletons_for_rep R1
          val vals_rs = unpack_vect_in_chunks kk 1 k r
-         (* KK.rel_expr -> KK.rel_expr -> KK.rel_expr *)
          fun empty_or_singleton_set_for arg_r val_r =
            #kk_join kk val_r (#kk_product kk (KK.Atom (j0 + 1)) arg_r)
        in
@@ -682,7 +617,6 @@
         end
     | _ => raise REP ("Nitpick_Kodkod.func_from_no_opt_rel_expr",
                       [old_R, Func (R1, R2)])
-(* kodkod_constrs -> rep -> rep -> KK.rel_expr -> KK.rel_expr *)
 and rel_expr_from_rel_expr kk new_R old_R r =
   let
     val unopt_old_R = unopt_rep old_R
@@ -702,25 +636,20 @@
                          [old_R, new_R]))
           unopt_old_R r
   end
-(* kodkod_constrs -> rep -> rep -> rep -> KK.rel_expr -> KK.rel_expr *)
 and rel_expr_to_func kk R1 R2 = rel_expr_from_rel_expr kk (Func (R1, R2))
 
-(* kodkod_constrs -> typ -> KK.rel_expr -> KK.rel_expr *)
 fun bit_set_from_atom ({kk_join, ...} : kodkod_constrs) T r =
   kk_join r (KK.Rel (if T = @{typ "unsigned_bit word"} then
                        unsigned_bit_word_sel_rel
                      else
                        signed_bit_word_sel_rel))
-(* kodkod_constrs -> typ -> KK.rel_expr -> KK.int_expr *)
 val int_expr_from_atom = KK.SetSum ooo bit_set_from_atom
-(* kodkod_constrs -> typ -> rep -> KK.int_expr -> KK.rel_expr *)
 fun atom_from_int_expr (kk as {kk_rel_eq, kk_comprehension, ...}
                         : kodkod_constrs) T R i =
   kk_comprehension (decls_for_atom_schema ~1 (atom_schema_of_rep R))
                    (kk_rel_eq (bit_set_from_atom kk T (KK.Var (1, ~1)))
                               (KK.Bits i))
 
-(* kodkod_constrs -> nut -> KK.formula *)
 fun declarative_axiom_for_plain_rel kk (FreeRel (x, _, R as Func _, nick)) =
     kk_n_ary_function kk (R |> nick = @{const_name List.set} ? unopt_rep)
                       (KK.Rel x)
@@ -732,17 +661,13 @@
   | declarative_axiom_for_plain_rel _ u =
     raise NUT ("Nitpick_Kodkod.declarative_axiom_for_plain_rel", [u])
 
-(* nut NameTable.table -> styp -> KK.rel_expr * rep * int *)
 fun const_triple rel_table (x as (s, T)) =
   case the_name rel_table (ConstName (s, T, Any)) of
     FreeRel ((n, j), _, R, _) => (KK.Rel (n, j), R, n)
   | _ => raise TERM ("Nitpick_Kodkod.const_triple", [Const x])
 
-(* nut NameTable.table -> styp -> KK.rel_expr *)
 fun discr_rel_expr rel_table = #1 o const_triple rel_table o discr_for_constr
 
-(* hol_context -> bool -> kodkod_constrs -> nut NameTable.table
-   -> dtype_spec list -> styp -> int -> nfa_transition list *)
 fun nfa_transitions_for_sel hol_ctxt binarize
                             ({kk_project, ...} : kodkod_constrs) rel_table
                             (dtypes : dtype_spec list) constr_x n =
@@ -757,14 +682,10 @@
                    else SOME (kk_project r (map KK.Num [0, j]), T))
                (index_seq 1 (arity - 1) ~~ tl type_schema)
   end
-(* hol_context -> bool -> kodkod_constrs -> nut NameTable.table
-   -> dtype_spec list -> styp -> nfa_transition list *)
 fun nfa_transitions_for_constr hol_ctxt binarize kk rel_table dtypes
                                (x as (_, T)) =
   maps (nfa_transitions_for_sel hol_ctxt binarize kk rel_table dtypes x)
        (index_seq 0 (num_sels_for_constr_type T))
-(* hol_context -> bool -> kodkod_constrs -> nut NameTable.table
-   -> dtype_spec list -> dtype_spec -> nfa_entry option *)
 fun nfa_entry_for_datatype _ _ _ _ _ ({co = true, ...} : dtype_spec) = NONE
   | nfa_entry_for_datatype _ _ _ _ _ {standard = false, ...} = NONE
   | nfa_entry_for_datatype _ _ _ _ _ {deep = false, ...} = NONE
@@ -775,12 +696,10 @@
 
 val empty_rel = KK.Product (KK.None, KK.None)
 
-(* nfa_table -> typ -> typ -> KK.rel_expr list *)
 fun direct_path_rel_exprs nfa start_T final_T =
   case AList.lookup (op =) nfa final_T of
     SOME trans => map fst (filter (curry (op =) start_T o snd) trans)
   | NONE => []
-(* kodkod_constrs -> nfa_table -> typ list -> typ -> typ -> KK.rel_expr *)
 and any_path_rel_expr ({kk_union, ...} : kodkod_constrs) nfa [] start_T
                       final_T =
     fold kk_union (direct_path_rel_exprs nfa start_T final_T)
@@ -788,14 +707,11 @@
   | any_path_rel_expr (kk as {kk_union, ...}) nfa (T :: Ts) start_T final_T =
     kk_union (any_path_rel_expr kk nfa Ts start_T final_T)
              (knot_path_rel_expr kk nfa Ts start_T T final_T)
-(* kodkod_constrs -> nfa_table -> typ list -> typ -> typ -> typ
-   -> KK.rel_expr *)
 and knot_path_rel_expr (kk as {kk_join, kk_reflexive_closure, ...}) nfa Ts
                        start_T knot_T final_T =
   kk_join (kk_join (any_path_rel_expr kk nfa Ts knot_T final_T)
                    (kk_reflexive_closure (loop_path_rel_expr kk nfa Ts knot_T)))
           (any_path_rel_expr kk nfa Ts start_T knot_T)
-(* kodkod_constrs -> nfa_table -> typ list -> typ -> KK.rel_expr *)
 and loop_path_rel_expr ({kk_union, ...} : kodkod_constrs) nfa [] start_T =
     fold kk_union (direct_path_rel_exprs nfa start_T start_T) empty_rel
   | loop_path_rel_expr (kk as {kk_union, kk_closure, ...}) nfa (T :: Ts)
@@ -806,12 +722,9 @@
       kk_union (loop_path_rel_expr kk nfa Ts start_T)
                (knot_path_rel_expr kk nfa Ts start_T T start_T)
 
-(* nfa_table -> unit NfaGraph.T *)
 fun graph_for_nfa nfa =
   let
-    (* typ -> unit NfaGraph.T -> unit NfaGraph.T *)
     fun new_node T = perhaps (try (NfaGraph.new_node (T, ())))
-    (* nfa_table -> unit NfaGraph.T -> unit NfaGraph.T *)
     fun add_nfa [] = I
       | add_nfa ((_, []) :: nfa) = add_nfa nfa
       | add_nfa ((T, ((_, T') :: transitions)) :: nfa) =
@@ -819,25 +732,19 @@
         new_node T' o new_node T
   in add_nfa nfa NfaGraph.empty end
 
-(* nfa_table -> nfa_table list *)
 fun strongly_connected_sub_nfas nfa =
   nfa |> graph_for_nfa |> NfaGraph.strong_conn
       |> map (fn keys => filter (member (op =) keys o fst) nfa)
 
-(* kodkod_constrs -> nfa_table -> typ -> KK.formula *)
 fun acyclicity_axiom_for_datatype kk nfa start_T =
   #kk_no kk (#kk_intersect kk
                  (loop_path_rel_expr kk nfa (map fst nfa) start_T) KK.Iden)
-(* hol_context -> bool -> kodkod_constrs -> nut NameTable.table
-   -> dtype_spec list -> KK.formula list *)
 fun acyclicity_axioms_for_datatypes hol_ctxt binarize kk rel_table dtypes =
   map_filter (nfa_entry_for_datatype hol_ctxt binarize kk rel_table dtypes)
              dtypes
   |> strongly_connected_sub_nfas
   |> maps (fn nfa => map (acyclicity_axiom_for_datatype kk nfa o fst) nfa)
 
-(* hol_context -> bool -> int -> kodkod_constrs -> nut NameTable.table
-   -> KK.rel_expr -> constr_spec -> int -> KK.formula *)
 fun sel_axiom_for_sel hol_ctxt binarize j0
         (kk as {kk_all, kk_formula_if, kk_subset, kk_no, kk_join, ...})
         rel_table dom_r ({const, delta, epsilon, exclusive, ...} : constr_spec)
@@ -857,8 +764,6 @@
                               (kk_n_ary_function kk R2 r') (kk_no r'))
       end
   end
-(* hol_context -> bool -> int -> int -> kodkod_constrs -> nut NameTable.table
-   -> constr_spec -> KK.formula list *)
 fun sel_axioms_for_constr hol_ctxt binarize bits j0 kk rel_table
         (constr as {const, delta, epsilon, explicit_max, ...}) =
   let
@@ -885,19 +790,14 @@
             (index_seq 0 (num_sels_for_constr_type (snd const)))
       end
   end
-(* hol_context -> bool -> int -> int -> kodkod_constrs -> nut NameTable.table
-   -> dtype_spec -> KK.formula list *)
 fun sel_axioms_for_datatype hol_ctxt binarize bits j0 kk rel_table
                             ({constrs, ...} : dtype_spec) =
   maps (sel_axioms_for_constr hol_ctxt binarize bits j0 kk rel_table) constrs
 
-(* hol_context -> bool -> kodkod_constrs -> nut NameTable.table -> constr_spec
-   -> KK.formula list *)
 fun uniqueness_axiom_for_constr hol_ctxt binarize
         ({kk_all, kk_implies, kk_and, kk_rel_eq, kk_lone, kk_join, ...}
          : kodkod_constrs) rel_table ({const, ...} : constr_spec) =
   let
-    (* KK.rel_expr -> KK.formula *)
     fun conjunct_for_sel r =
       kk_rel_eq (kk_join (KK.Var (1, 0)) r) (kk_join (KK.Var (1, 1)) r)
     val num_sels = num_sels_for_constr_type (snd const)
@@ -915,16 +815,11 @@
                   (fold1 kk_and (map (conjunct_for_sel o #1) (tl triples)))
                   (kk_rel_eq (KK.Var (1, 0)) (KK.Var (1, 1))))
   end
-(* hol_context -> bool -> kodkod_constrs -> nut NameTable.table -> dtype_spec
-   -> KK.formula list *)
 fun uniqueness_axioms_for_datatype hol_ctxt binarize kk rel_table
                                    ({constrs, ...} : dtype_spec) =
   map (uniqueness_axiom_for_constr hol_ctxt binarize kk rel_table) constrs
 
-(* constr_spec -> int *)
 fun effective_constr_max ({delta, epsilon, ...} : constr_spec) = epsilon - delta
-(* int -> kodkod_constrs -> nut NameTable.table -> dtype_spec
-   -> KK.formula list *)
 fun partition_axioms_for_datatype j0 (kk as {kk_rel_eq, kk_union, ...})
                                   rel_table
                                   ({card, constrs, ...} : dtype_spec) =
@@ -936,8 +831,6 @@
        kk_disjoint_sets kk rs]
     end
 
-(* hol_context -> bool -> int -> int Typtab.table -> kodkod_constrs
-   -> nut NameTable.table -> dtype_spec -> KK.formula list *)
 fun other_axioms_for_datatype _ _ _ _ _ _ {deep = false, ...} = []
   | other_axioms_for_datatype hol_ctxt binarize bits ofs kk rel_table
                               (dtype as {typ, ...}) =
@@ -947,15 +840,12 @@
       partition_axioms_for_datatype j0 kk rel_table dtype
     end
 
-(* hol_context -> bool -> int -> int Typtab.table -> kodkod_constrs
-   -> nut NameTable.table -> dtype_spec list -> KK.formula list *)
 fun declarative_axioms_for_datatypes hol_ctxt binarize bits ofs kk rel_table
                                      dtypes =
   acyclicity_axioms_for_datatypes hol_ctxt binarize kk rel_table dtypes @
   maps (other_axioms_for_datatype hol_ctxt binarize bits ofs kk rel_table)
        dtypes
 
-(* int Typtab.table -> kodkod_constrs -> nut -> KK.formula *)
 fun kodkod_formula_from_nut ofs
         (kk as {kk_all, kk_exist, kk_formula_let, kk_formula_if, kk_or, kk_not,
                 kk_iff, kk_implies, kk_and, kk_subset, kk_rel_eq, kk_no,
@@ -970,17 +860,13 @@
     val false_atom = KK.Atom bool_j0
     val true_atom = KK.Atom (bool_j0 + 1)
 
-    (* polarity -> int -> KK.rel_expr -> KK.formula *)
     fun formula_from_opt_atom polar j0 r =
       case polar of
         Neg => kk_not (kk_rel_eq r (KK.Atom j0))
       | _ => kk_rel_eq r (KK.Atom (j0 + 1))
-    (* int -> KK.rel_expr -> KK.formula *)
     val formula_from_atom = formula_from_opt_atom Pos
 
-    (* KK.formula -> KK.formula -> KK.formula *)
     fun kk_notimplies f1 f2 = kk_and f1 (kk_not f2)
-    (* KK.rel_expr -> KK.rel_expr -> KK.rel_expr *)
     val kk_or3 =
       double_rel_rel_let kk
           (fn r1 => fn r2 =>
@@ -993,21 +879,15 @@
                         (kk_intersect r1 r2))
     fun kk_notimplies3 r1 r2 = kk_and3 r1 (kk_not3 r2)
 
-    (* int -> KK.rel_expr -> KK.formula list *)
     val unpack_formulas =
       map (formula_from_atom bool_j0) oo unpack_vect_in_chunks kk 1
-    (* (KK.formula -> KK.formula -> KK.formula) -> int -> KK.rel_expr
-       -> KK.rel_expr -> KK.rel_expr *)
     fun kk_vect_set_op connective k r1 r2 =
       fold1 kk_product (map2 (atom_from_formula kk bool_j0 oo connective)
                              (unpack_formulas k r1) (unpack_formulas k r2))
-    (* (KK.formula -> KK.formula -> KK.formula) -> int -> KK.rel_expr
-       -> KK.rel_expr -> KK.formula *)
     fun kk_vect_set_bool_op connective k r1 r2 =
       fold1 kk_and (map2 connective (unpack_formulas k r1)
                          (unpack_formulas k r2))
 
-    (* nut -> KK.formula *)
     fun to_f u =
       case rep_of u of
         Formula polar =>
@@ -1060,7 +940,6 @@
              else
                let
                  (* FIXME: merge with similar code below *)
-                 (* bool -> nut -> KK.rel_expr *)
                  fun set_to_r widen u =
                    if widen then
                      kk_difference (full_rel_for_rep dom_R)
@@ -1078,7 +957,6 @@
               kk_iff (to_f_with_polarity polar u1) (to_f_with_polarity polar u2)
             | min_R =>
               let
-                (* nut -> nut list *)
                 fun args (Op2 (Apply, _, _, u1, u2)) = u2 :: args u1
                   | args (Tuple (_, _, us)) = us
                   | args _ = []
@@ -1177,14 +1055,12 @@
          | _ => raise NUT ("Nitpick_Kodkod.to_f", [u]))
       | Atom (2, j0) => formula_from_atom j0 (to_r u)
       | _ => raise NUT ("Nitpick_Kodkod.to_f", [u])
-    (* polarity -> nut -> KK.formula *)
     and to_f_with_polarity polar u =
       case rep_of u of
         Formula _ => to_f u
       | Atom (2, j0) => formula_from_atom j0 (to_r u)
       | Opt (Atom (2, j0)) => formula_from_opt_atom polar j0 (to_r u)
       | _ => raise NUT ("Nitpick_Kodkod.to_f_with_polarity", [u])
-    (* nut -> KK.rel_expr *)
     and to_r u =
       case u of
         Cst (False, _, Atom _) => false_atom
@@ -1523,7 +1399,6 @@
            | Opt (Atom (2, _)) =>
              let
                (* FIXME: merge with similar code above *)
-               (* rep -> rep -> nut -> KK.rel_expr *)
                fun must R1 R2 u =
                  kk_join (to_rep (Func (Struct [R1, R2], body_R)) u) true_atom
                fun may R1 R2 u =
@@ -1558,9 +1433,7 @@
                         (to_rep (Func (b_R, Formula Neut)) u2)
            | Opt (Atom (2, _)) =>
              let
-               (* KK.rel_expr -> rep -> nut -> KK.rel_expr *)
                fun do_nut r R u = kk_join (to_rep (Func (R, body_R)) u) r
-               (* KK.rel_expr -> KK.rel_expr *)
                fun do_term r =
                  kk_product (kk_product (do_nut r a_R u1) (do_nut r b_R u2)) r
              in kk_union (do_term true_atom) (do_term false_atom) end
@@ -1572,7 +1445,6 @@
            (Func (R11, R12), Func (R21, Formula Neut)) =>
            if R21 = R11 andalso is_lone_rep R12 then
              let
-               (* KK.rel_expr -> KK.rel_expr *)
                fun big_join r = kk_n_fold_join kk false R21 R12 r (to_r u1)
                val core_r = big_join (to_r u2)
                val core_R = Func (R12, Formula Neut)
@@ -1666,39 +1538,32 @@
       | FreeRel (x, _, _, _) => KK.Rel x
       | RelReg (j, _, R) => KK.RelReg (arity_of_rep R, j)
       | u => raise NUT ("Nitpick_Kodkod.to_r", [u])
-    (* nut -> KK.decl *)
     and to_decl (BoundRel (x, _, R, _)) =
         KK.DeclOne (x, KK.AtomSeq (the_single (atom_schema_of_rep R)))
       | to_decl u = raise NUT ("Nitpick_Kodkod.to_decl", [u])
-    (* nut -> KK.expr_assign *)
     and to_expr_assign (FormulaReg (j, _, _)) u =
         KK.AssignFormulaReg (j, to_f u)
       | to_expr_assign (RelReg (j, _, R)) u =
         KK.AssignRelReg ((arity_of_rep R, j), to_r u)
       | to_expr_assign u1 _ = raise NUT ("Nitpick_Kodkod.to_expr_assign", [u1])
-    (* int * int -> nut -> KK.rel_expr *)
     and to_atom (x as (k, j0)) u =
       case rep_of u of
         Formula _ => atom_from_formula kk j0 (to_f u)
       | Unit => if k = 1 then KK.Atom j0
                 else raise NUT ("Nitpick_Kodkod.to_atom", [u])
       | R => atom_from_rel_expr kk x R (to_r u)
-    (* rep list -> nut -> KK.rel_expr *)
     and to_struct Rs u =
       case rep_of u of
         Unit => full_rel_for_rep (Struct Rs)
       | R' => struct_from_rel_expr kk Rs R' (to_r u)
-    (* int -> rep -> nut -> KK.rel_expr *)
     and to_vect k R u =
       case rep_of u of
         Unit => full_rel_for_rep (Vect (k, R))
       | R' => vect_from_rel_expr kk k R R' (to_r u)
-    (* rep -> rep -> nut -> KK.rel_expr *)
     and to_func R1 R2 u =
       case rep_of u of
         Unit => full_rel_for_rep (Func (R1, R2))
       | R' => rel_expr_to_func kk R1 R2 R' (to_r u)
-    (* rep -> nut -> KK.rel_expr *)
     and to_opt R u =
       let val old_R = rep_of u in
         if is_opt_rep old_R then
@@ -1706,16 +1571,13 @@
         else
           to_rep R u
       end
-    (* rep -> nut -> KK.rel_expr *)
     and to_rep (Atom x) u = to_atom x u
       | to_rep (Struct Rs) u = to_struct Rs u
       | to_rep (Vect (k, R)) u = to_vect k R u
       | to_rep (Func (R1, R2)) u = to_func R1 R2 u
       | to_rep (Opt R) u = to_opt R u
       | to_rep R _ = raise REP ("Nitpick_Kodkod.to_rep", [R])
-    (* nut -> KK.rel_expr *)
     and to_integer u = to_opt (one_rep ofs (type_of u) (rep_of u)) u
-    (* nut list -> rep -> KK.rel_expr -> KK.rel_expr *)
     and to_guard guard_us R r =
       let
         val unpacked_rs = unpack_joins r
@@ -1733,16 +1595,13 @@
         if null guard_fs then r
         else kk_rel_if (fold1 kk_or guard_fs) (empty_rel_for_rep R) r
       end
-    (* rep -> rep -> KK.rel_expr -> int -> KK.rel_expr *)
     and to_project new_R old_R r j0 =
       rel_expr_from_rel_expr kk new_R old_R
                              (kk_project_seq r j0 (arity_of_rep old_R))
-    (* rep list -> nut list -> KK.rel_expr *)
     and to_product Rs us =
       case map (uncurry to_opt) (filter (not_equal Unit o fst) (Rs ~~ us)) of
         [] => raise REP ("Nitpick_Kodkod.to_product", Rs)
       | rs => fold1 kk_product rs
-    (* int -> typ -> rep -> nut -> KK.rel_expr *)
     and to_nth_pair_sel n res_T res_R u =
       case u of
         Tuple (_, _, us) => to_rep res_R (nth us n)
@@ -1770,9 +1629,6 @@
                                (to_rep res_R (Cst (Unity, res_T, Unit)))
                | _ => to_project res_R nth_R (to_rep (Opt (Struct Rs)) u) j0
              end
-    (* (KK.formula -> KK.formula -> KK.formula)
-       -> (KK.rel_expr -> KK.rel_expr -> KK.formula) -> nut -> nut
-       -> KK.formula *)
     and to_set_bool_op connective set_oper u1 u2 =
       let
         val min_R = min_rep (rep_of u1) (rep_of u2)
@@ -1788,12 +1644,6 @@
                                        (kk_join r2 true_atom)
         | _ => raise REP ("Nitpick_Kodkod.to_set_bool_op", [min_R])
       end
-    (* (KK.formula -> KK.formula -> KK.formula)
-       -> (KK.rel_expr -> KK.rel_expr -> KK.rel_expr)
-       -> (KK.rel_expr -> KK.rel_expr -> KK.formula)
-       -> (KK.rel_expr -> KK.rel_expr -> KK.formula)
-       -> (KK.rel_expr -> KK.rel_expr -> KK.formula) -> bool -> rep -> nut
-       -> nut -> KK.rel_expr *)
     and to_set_op connective connective3 set_oper true_set_oper false_set_oper
                   neg_second R u1 u2 =
       let
@@ -1825,11 +1675,9 @@
                    r1 r2
              | _ => raise REP ("Nitpick_Kodkod.to_set_op", [min_R]))
       end
-    (* typ -> rep -> (KK.int_expr -> KK.int_expr) -> KK.rel_expr *)
     and to_bit_word_unary_op T R oper =
       let
         val Ts = strip_type T ||> single |> op @
-        (* int -> KK.int_expr *)
         fun int_arg j = int_expr_from_atom kk (nth Ts j) (KK.Var (1, j))
       in
         kk_comprehension (decls_for_atom_schema 0 (atom_schema_of_rep R))
@@ -1837,12 +1685,9 @@
                  (map (fn j => KK.AssignIntReg (j, int_arg j)) (0 upto 1),
                   KK.IntEq (KK.IntReg 1, oper (KK.IntReg 0))))
       end
-    (* typ -> rep -> (KK.int_expr -> KK.int_expr -> KK.int_expr -> bool) option
-       -> (KK.int_expr -> KK.int_expr -> KK.int_expr) option -> KK.rel_expr *)
     and to_bit_word_binary_op T R opt_guard opt_oper =
       let
         val Ts = strip_type T ||> single |> op @
-        (* int -> KK.int_expr *)
         fun int_arg j = int_expr_from_atom kk (nth Ts j) (KK.Var (1, j))
       in
         kk_comprehension (decls_for_atom_schema 0 (atom_schema_of_rep R))
@@ -1859,7 +1704,6 @@
                             [KK.IntEq (KK.IntReg 2,
                                        oper (KK.IntReg 0) (KK.IntReg 1))]))))
       end
-    (* rep -> rep -> KK.rel_expr -> nut -> KK.rel_expr *)
     and to_apply (R as Formula _) func_u arg_u =
         raise REP ("Nitpick_Kodkod.to_apply", [R])
       | to_apply res_R func_u arg_u =
@@ -1896,7 +1740,6 @@
               (kk_n_fold_join kk true R1 R2 (to_opt R1 arg_u) (to_r func_u))
           |> body_rep R2 = Formula Neut ? to_guard [arg_u] res_R
         | _ => raise NUT ("Nitpick_Kodkod.to_apply", [func_u])
-    (* int -> rep -> rep -> KK.rel_expr -> nut *)
     and to_apply_vect k R' res_R func_r arg_u =
       let
         val arg_R = one_rep ofs (type_of arg_u) (unopt_rep (rep_of arg_u))
@@ -1906,10 +1749,8 @@
         kk_case_switch kk arg_R res_R (to_opt arg_R arg_u)
                        (all_singletons_for_rep arg_R) vect_rs
       end
-    (* bool -> nut -> KK.formula *)
     and to_could_be_unrep neg u =
       if neg andalso is_opt_rep (rep_of u) then kk_no (to_r u) else KK.False
-    (* nut -> KK.rel_expr -> KK.rel_expr *)
     and to_compare_with_unrep u r =
       if is_opt_rep (rep_of u) then
         kk_rel_if (kk_some (to_r u)) r (empty_rel_for_rep (rep_of u))
--- a/src/HOL/Tools/Nitpick/nitpick_model.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_model.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -12,10 +12,10 @@
   type rep = Nitpick_Rep.rep
   type nut = Nitpick_Nut.nut
 
-  type params = {
-    show_skolems: bool,
-    show_datatypes: bool,
-    show_consts: bool}
+  type params =
+    {show_datatypes: bool,
+     show_consts: bool}
+
   type term_postprocessor =
     Proof.context -> string -> (typ -> term list) -> typ -> term -> term
 
@@ -51,10 +51,9 @@
 
 structure KK = Kodkod
 
-type params = {
-  show_skolems: bool,
-  show_datatypes: bool,
-  show_consts: bool}
+type params =
+  {show_datatypes: bool,
+   show_consts: bool}
 
 type term_postprocessor =
   Proof.context -> string -> (typ -> term list) -> typ -> term -> term
@@ -63,7 +62,7 @@
   type T = (typ * term_postprocessor) list
   val empty = []
   val extend = I
-  fun merge (ps1, ps2) = AList.merge (op =) (K true) (ps1, ps2))
+  val merge = AList.merge (op =) (K true))
 
 val irrelevant = "_"
 val unknown = "?"
@@ -81,10 +80,8 @@
 
 type atom_pool = ((string * int) * int list) list
 
-(* Proof.context -> ((string * string) * (string * string)) * Proof.context *)
 fun add_wacky_syntax ctxt =
   let
-    (* term -> string *)
     val name_of = fst o dest_Const
     val thy = ProofContext.theory_of ctxt |> Context.reject_draft
     val (maybe_t, thy) =
@@ -106,7 +103,6 @@
 
 (** Term reconstruction **)
 
-(* atom_pool Unsynchronized.ref -> string -> int -> int -> string *)
 fun nth_atom_suffix pool s j k =
   (case AList.lookup (op =) (!pool) (s, k) of
      SOME js =>
@@ -118,7 +114,6 @@
   |> nat_subscript
   |> (s <> "" andalso Symbol.is_ascii_digit (List.last (explode s)))
      ? prefix "\<^isub>,"
-(* atom_pool Unsynchronized.ref -> string -> typ -> int -> int -> string *)
 fun nth_atom_name pool prefix (Type (s, _)) j k =
     let val s' = shortest_name s in
       prefix ^ (if String.isPrefix "\\" s' then s' else substring (s', 0, 1)) ^
@@ -128,18 +123,15 @@
     prefix ^ perhaps (try (unprefix "'")) s ^ nth_atom_suffix pool s j k
   | nth_atom_name _ _ T _ _ =
     raise TYPE ("Nitpick_Model.nth_atom_name", [T], [])
-(* atom_pool Unsynchronized.ref -> bool -> typ -> int -> int -> term *)
 fun nth_atom pool for_auto T j k =
   if for_auto then
     Free (nth_atom_name pool (hd (space_explode "." nitpick_prefix)) T j k, T)
   else
     Const (nth_atom_name pool "" T j k, T)
 
-(* term -> real *)
 fun extract_real_number (Const (@{const_name divide}, _) $ t1 $ t2) =
     real (snd (HOLogic.dest_number t1)) / real (snd (HOLogic.dest_number t2))
   | extract_real_number t = real (snd (HOLogic.dest_number t))
-(* term * term -> order *)
 fun nice_term_ord (Abs (_, _, t1), Abs (_, _, t2)) = nice_term_ord (t1, t2)
   | nice_term_ord tp = Real.compare (pairself extract_real_number tp)
     handle TERM ("dest_number", _) =>
@@ -150,16 +142,12 @@
               | ord => ord)
            | _ => Term_Ord.fast_term_ord tp
 
-(* typ -> term_postprocessor -> theory -> theory *)
 fun register_term_postprocessor T p = Data.map (cons (T, p))
-(* typ -> theory -> theory *)
 fun unregister_term_postprocessor T = Data.map (AList.delete (op =) T)
 
-(* nut NameTable.table -> KK.raw_bound list -> nut -> int list list *)
 fun tuple_list_for_name rel_table bounds name =
   the (AList.lookup (op =) bounds (the_rel rel_table name)) handle NUT _ => [[]]
 
-(* term -> term *)
 fun unarize_unbox_etc_term (Const (@{const_name FinFun}, _) $ t1) =
     unarize_unbox_etc_term t1
   | unarize_unbox_etc_term (Const (@{const_name FunBox}, _) $ t1) =
@@ -184,7 +172,6 @@
   | unarize_unbox_etc_term (Abs (s, T, t')) =
     Abs (s, uniterize_unarize_unbox_etc_type T, unarize_unbox_etc_term t')
 
-(* typ -> typ -> (typ * typ) * (typ * typ) *)
 fun factor_out_types (T1 as Type (@{type_name "*"}, [T11, T12]))
                      (T2 as Type (@{type_name "*"}, [T21, T22])) =
     let val (n1, n2) = pairself num_factors_in_type (T11, T21) in
@@ -209,25 +196,20 @@
     ((T1, NONE), (T21, SOME T22))
   | factor_out_types T1 T2 = ((T1, NONE), (T2, NONE))
 
-(* bool -> typ -> typ -> (term * term) list -> term *)
 fun make_plain_fun maybe_opt T1 T2 =
   let
-    (* typ -> typ -> (term * term) list -> term *)
     fun aux T1 T2 [] =
         Const (if maybe_opt then opt_flag else non_opt_flag, T1 --> T2)
       | aux T1 T2 ((t1, t2) :: tps) =
         Const (@{const_name fun_upd}, (T1 --> T2) --> T1 --> T2 --> T1 --> T2)
         $ aux T1 T2 tps $ t1 $ t2
   in aux T1 T2 o rev end
-(* term -> bool *)
 fun is_plain_fun (Const (s, _)) = (s = opt_flag orelse s = non_opt_flag)
   | is_plain_fun (Const (@{const_name fun_upd}, _) $ t0 $ _ $ _) =
     is_plain_fun t0
   | is_plain_fun _ = false
-(* term -> bool * (term list * term list) *)
 val dest_plain_fun =
   let
-    (* term -> bool * (term list * term list) *)
     fun aux (Abs (_, _, Const (s, _))) = (s <> irrelevant, ([], []))
       | aux (Const (s, _)) = (s <> non_opt_flag, ([], []))
       | aux (Const (@{const_name fun_upd}, _) $ t0 $ t1 $ t2) =
@@ -237,7 +219,6 @@
       | aux t = raise TERM ("Nitpick_Model.dest_plain_fun", [t])
   in apsnd (pairself rev) o aux end
 
-(* typ -> typ -> typ -> term -> term * term *)
 fun break_in_two T T1 T2 t =
   let
     val ps = HOLogic.flat_tupleT_paths T
@@ -245,7 +226,6 @@
     val (ps1, ps2) = pairself HOLogic.flat_tupleT_paths (T1, T2)
     val (ts1, ts2) = t |> HOLogic.strip_ptuple ps |> chop cut
   in (HOLogic.mk_ptuple ps1 T1 ts1, HOLogic.mk_ptuple ps2 T2 ts2) end
-(* typ -> term -> term -> term *)
 fun pair_up (Type (@{type_name "*"}, [T1', T2']))
             (t1 as Const (@{const_name Pair},
                           Type (@{type_name fun},
@@ -254,13 +234,10 @@
     if T1 = T1' then HOLogic.mk_prod (t1, t2)
     else HOLogic.mk_prod (t11, pair_up T2' t12 t2)
   | pair_up _ t1 t2 = HOLogic.mk_prod (t1, t2)
-(* typ -> term -> term list * term list -> (term * term) list*)
 fun multi_pair_up T1 t1 (ts2, ts3) = map2 (pair o pair_up T1 t1) ts2 ts3
 
-(* typ -> typ -> typ -> term -> term *)
 fun typecast_fun (Type (@{type_name fun}, [T1', T2'])) T1 T2 t =
     let
-      (* typ -> typ -> typ -> typ -> term -> term *)
       fun do_curry T1 T1a T1b T2 t =
         let
           val (maybe_opt, tsp) = dest_plain_fun t
@@ -270,7 +247,6 @@
                 |> AList.coalesce (op =)
                 |> map (apsnd (make_plain_fun maybe_opt T1b T2))
         in make_plain_fun maybe_opt T1a (T1b --> T2) tps end
-      (* typ -> typ -> term -> term *)
       and do_uncurry T1 T2 t =
         let
           val (maybe_opt, tsp) = dest_plain_fun t
@@ -279,7 +255,6 @@
                 |> maps (fn (t1, t2) =>
                             multi_pair_up T1 t1 (snd (dest_plain_fun t2)))
         in make_plain_fun maybe_opt T1 T2 tps end
-      (* typ -> typ -> typ -> typ -> term -> term *)
       and do_arrow T1' T2' _ _ (Const (s, _)) = Const (s, T1' --> T2')
         | do_arrow T1' T2' T1 T2
                    (Const (@{const_name fun_upd}, _) $ t0 $ t1 $ t2) =
@@ -296,7 +271,6 @@
         | ((T1a', SOME T1b'), (_, NONE)) =>
           t |> do_arrow T1a' (T1b' --> T2') T1 T2 |> do_uncurry T1' T2'
         | _ => raise TYPE ("Nitpick_Model.typecast_fun.do_fun", [T1, T1'], [])
-      (* typ -> typ -> term -> term *)
       and do_term (Type (@{type_name fun}, [T1', T2']))
                   (Type (@{type_name fun}, [T1, T2])) t =
           do_fun T1' T2' T1 T2 t
@@ -312,33 +286,25 @@
   | typecast_fun T' _ _ _ =
     raise TYPE ("Nitpick_Model.typecast_fun", [T'], [])
 
-(* term -> string *)
 fun truth_const_sort_key @{const True} = "0"
   | truth_const_sort_key @{const False} = "2"
   | truth_const_sort_key _ = "1"
 
-(* typ -> term list -> term *)
 fun mk_tuple (Type (@{type_name "*"}, [T1, T2])) ts =
     HOLogic.mk_prod (mk_tuple T1 ts,
         mk_tuple T2 (List.drop (ts, length (HOLogic.flatten_tupleT T1))))
   | mk_tuple _ (t :: _) = t
   | mk_tuple T [] = raise TYPE ("Nitpick_Model.mk_tuple", [T], [])
 
-(* theory -> typ * typ -> bool *)
 fun varified_type_match thy (candid_T, pat_T) =
   strict_type_match thy (candid_T, Logic.varifyT_global pat_T)
 
-(* atom_pool -> (string * string) * (string * string) -> scope -> nut list
-   -> nut list -> nut list -> nut NameTable.table -> KK.raw_bound list -> typ
-   -> term list *)
 fun all_values_of_type pool wacky_names (scope as {card_assigns, ...} : scope)
                        sel_names rel_table bounds card T =
   let
     val card = if card = 0 then card_of_type card_assigns T else card
-    (* nat -> term *)
     fun nth_value_of_type n =
       let
-        (* bool -> term *)
         fun term unfold =
           reconstruct_term unfold pool wacky_names scope sel_names rel_table
                            bounds T T (Atom (card, 0)) [[n]]
@@ -352,15 +318,11 @@
         | t => t
       end
   in index_seq 0 card |> map nth_value_of_type |> sort nice_term_ord end
-(* bool -> atom_pool -> (string * string) * (string * string) -> scope
-   -> nut list -> nut list -> nut list -> nut NameTable.table
-   -> KK.raw_bound list -> typ -> typ -> rep -> int list list -> term *)
 and reconstruct_term unfold pool (wacky_names as ((maybe_name, abs_name), _))
         (scope as {hol_ctxt as {ctxt, thy, stds, ...}, binarize, card_assigns,
                    bits, datatypes, ofs, ...}) sel_names rel_table bounds =
   let
     val for_auto = (maybe_name = "")
-    (* int list list -> int *)
     fun value_of_bits jss =
       let
         val j0 = offset_of_type ofs @{typ unsigned_bit}
@@ -369,10 +331,8 @@
         fold (fn j => Integer.add (reasonable_power 2 j |> j = bits ? op ~))
              js 0
       end
-    (* typ -> term list *)
     val all_values =
       all_values_of_type pool wacky_names scope sel_names rel_table bounds 0
-    (* typ -> term -> term *)
     fun postprocess_term (Type (@{type_name fun}, _)) = I
       | postprocess_term T =
         if null (Data.get thy) then
@@ -380,7 +340,6 @@
         else case AList.lookup (varified_type_match thy) (Data.get thy) T of
           SOME postproc => postproc ctxt maybe_name all_values T
         | NONE => I
-    (* typ list -> term -> term *)
     fun postprocess_subterms Ts (t1 $ t2) =
         let val t = postprocess_subterms Ts t1 $ postprocess_subterms Ts t2 in
           postprocess_term (fastype_of1 (Ts, t)) t
@@ -388,13 +347,11 @@
       | postprocess_subterms Ts (Abs (s, T, t')) =
         Abs (s, T, postprocess_subterms (T :: Ts) t')
       | postprocess_subterms Ts t = postprocess_term (fastype_of1 (Ts, t)) t
-    (* bool -> typ -> typ -> (term * term) list -> term *)
     fun make_set maybe_opt T1 T2 tps =
       let
         val empty_const = Const (@{const_abbrev Set.empty}, T1 --> T2)
         val insert_const = Const (@{const_name insert},
                                   T1 --> (T1 --> T2) --> T1 --> T2)
-        (* (term * term) list -> term *)
         fun aux [] =
             if maybe_opt andalso not (is_complete_type datatypes false T1) then
               insert_const $ Const (unrep, T1) $ empty_const
@@ -415,12 +372,10 @@
         else
           aux tps
       end
-    (* bool -> typ -> typ -> typ -> (term * term) list -> term *)
     fun make_map maybe_opt T1 T2 T2' =
       let
         val update_const = Const (@{const_name fun_upd},
                                   (T1 --> T2) --> T1 --> T2 --> T1 --> T2)
-        (* (term * term) list -> term *)
         fun aux' [] = Const (@{const_abbrev Map.empty}, T1 --> T2)
           | aux' ((t1, t2) :: tps) =
             (case t2 of
@@ -433,7 +388,6 @@
           else
             aux' tps
       in aux end
-    (* typ list -> term -> term *)
     fun polish_funs Ts t =
       (case fastype_of1 (Ts, t) of
          Type (@{type_name fun}, [T1, T2]) =>
@@ -474,7 +428,6 @@
                else
                  t
              | t => t
-    (* bool -> typ -> typ -> typ -> term list -> term list -> term *)
     fun make_fun maybe_opt T1 T2 T' ts1 ts2 =
       ts1 ~~ ts2 |> sort (nice_term_ord o pairself fst)
                  |> make_plain_fun maybe_opt T1 T2
@@ -482,7 +435,6 @@
                  |> typecast_fun (uniterize_unarize_unbox_etc_type T')
                                  (uniterize_unarize_unbox_etc_type T1)
                                  (uniterize_unarize_unbox_etc_type T2)
-    (* (typ * int) list -> typ -> typ -> int -> term *)
     fun term_for_atom seen (T as Type (@{type_name fun}, [T1, T2])) T' j _ =
         let
           val k1 = card_of_type card_assigns T1
@@ -523,10 +475,8 @@
         | SOME {deep = false, ...} => nth_atom pool for_auto T j k
         | SOME {co, standard, constrs, ...} =>
           let
-            (* styp -> int list *)
             fun tuples_for_const (s, T) =
               tuple_list_for_name rel_table bounds (ConstName (s, T, Any))
-            (* unit -> term *)
             fun cyclic_atom () =
               nth_atom pool for_auto (Type (cyclic_type_name, [])) j k
             fun cyclic_var () = Var ((nth_atom_name pool "" T j k, 0), T)
@@ -615,14 +565,11 @@
                   t
               end
           end
-    (* (typ * int) list -> int -> rep -> typ -> typ -> typ -> int list
-       -> term *)
     and term_for_vect seen k R T1 T2 T' js =
       make_fun true T1 T2 T'
                (map (fn j => term_for_atom seen T1 T1 j k) (index_seq 0 k))
                (map (term_for_rep true seen T2 T2 R o single)
                     (batch_list (arity_of_rep R) js))
-    (* bool -> (typ * int) list -> typ -> typ -> rep -> int list list -> term *)
     and term_for_rep _ seen T T' Unit [[]] = term_for_atom seen T T' 0 1
       | term_for_rep _ seen T T' (R as Atom (k, j0)) [[j]] =
         if j >= j0 andalso j < j0 + k then term_for_atom seen T T' (j - j0) k
@@ -674,14 +621,12 @@
 
 (** Constant postprocessing **)
 
-(* int -> typ -> typ list *)
 fun dest_n_tuple_type 1 T = [T]
   | dest_n_tuple_type n (Type (_, [T1, T2])) =
     T1 :: dest_n_tuple_type (n - 1) T2
   | dest_n_tuple_type _ T =
     raise TYPE ("Nitpick_Model.dest_n_tuple_type", [T], [])
 
-(* theory -> const_table -> styp -> int list *)
 fun const_format thy def_table (x as (s, T)) =
   if String.isPrefix unrolled_prefix s then
     const_format thy def_table (original_name s, range_type T)
@@ -701,7 +646,6 @@
                else
                  [num_binder_types T]
   | NONE => [num_binder_types T]
-(* int list -> int list -> int list *)
 fun intersect_formats _ [] = []
   | intersect_formats [] _ = []
   | intersect_formats ks1 ks2 =
@@ -711,7 +655,6 @@
       [Int.min (k1, k2)]
     end
 
-(* theory -> const_table -> (term option * int list) list -> term -> int list *)
 fun lookup_format thy def_table formats t =
   case AList.lookup (fn (SOME x, SOME y) =>
                         (term_match thy) (x, y) | _ => false)
@@ -724,7 +667,6 @@
               | _ => format
             end
 
-(* int list -> int list -> typ -> typ *)
 fun format_type default_format format T =
   let
     val T = uniterize_unarize_unbox_etc_type T
@@ -742,28 +684,22 @@
           |> map (HOLogic.mk_tupleT o rev)
       in List.foldl (op -->) body_T batched end
   end
-(* theory -> const_table -> (term option * int list) list -> term -> typ *)
 fun format_term_type thy def_table formats t =
   format_type (the (AList.lookup (op =) formats NONE))
               (lookup_format thy def_table formats t) (fastype_of t)
 
-(* int list -> int -> int list -> int list *)
 fun repair_special_format js m format =
   m - 1 downto 0 |> chunk_list_unevenly (rev format)
                  |> map (rev o filter_out (member (op =) js))
                  |> filter_out null |> map length |> rev
 
-(* hol_context -> string * string -> (term option * int list) list
-   -> styp -> term * typ *)
 fun user_friendly_const ({thy, evals, def_table, skolems, special_funs, ...}
                          : hol_context) (base_name, step_name) formats =
   let
     val default_format = the (AList.lookup (op =) formats NONE)
-    (* styp -> term * typ *)
     fun do_const (x as (s, T)) =
       (if String.isPrefix special_prefix s then
          let
-           (* term -> term *)
            val do_term = map_aterms (fn Const x => fst (do_const x) | t' => t')
            val (x' as (_, T'), js, ts) =
              AList.find (op =) (!special_funs) (s, unarize_unbox_etc_type T)
@@ -772,7 +708,6 @@
            val Ts = List.take (binder_types T', max_j + 1)
            val missing_js = filter_out (member (op =) js) (0 upto max_j)
            val missing_Ts = filter_indices missing_js Ts
-           (* int -> indexname *)
            fun nth_missing_var n =
              ((arg_var_prefix ^ nat_subscript (n + 1), 0), nth missing_Ts n)
            val missing_vars = map nth_missing_var (0 upto length missing_js - 1)
@@ -864,7 +799,6 @@
       |>> shorten_names_in_term |>> Term.map_abs_vars shortest_name
   in do_const end
 
-(* styp -> string *)
 fun assign_operator_for_const (s, T) =
   if String.isPrefix ubfp_prefix s then
     if is_fun_type T then "\<subseteq>" else "\<le>"
@@ -877,8 +811,6 @@
 
 (** Model reconstruction **)
 
-(* atom_pool -> scope -> nut list -> nut NameTable.table -> KK.raw_bound list
-   -> nut -> term *)
 fun term_for_name pool scope sel_names rel_table bounds name =
   let val T = type_of name in
     tuple_list_for_name rel_table bounds name
@@ -886,13 +818,11 @@
                         rel_table bounds T T (rep_of name)
   end
 
-(* term -> term *)
 fun unfold_outer_the_binders (t as Const (@{const_name The}, _)
                                    $ Abs (s, T, Const (@{const_name "op ="}, _)
                                                 $ Bound 0 $ t')) =
     betapply (Abs (s, T, t'), t) |> unfold_outer_the_binders
   | unfold_outer_the_binders t = t
-(* typ list -> int -> term * term -> bool *)
 fun bisimilar_values _ 0 _ = true
   | bisimilar_values coTs max_depth (t1, t2) =
     let val T = fastype_of t1 in
@@ -909,17 +839,14 @@
         t1 = t2
     end
 
-(* params -> scope -> (term option * int list) list -> styp list -> nut list
-  -> nut list -> nut list -> nut NameTable.table -> KK.raw_bound list
-  -> Pretty.T * bool *)
-fun reconstruct_hol_model {show_skolems, show_datatypes, show_consts}
+fun reconstruct_hol_model {show_datatypes, show_consts}
         ({hol_ctxt = {thy, ctxt, max_bisim_depth, boxes, stds, wfs, user_axioms,
                       debug, binary_ints, destroy_constrs, specialize,
-                      skolemize, star_linear_preds, uncurry, fast_descrs,
-                      tac_timeout, evals, case_names, def_table, nondef_table,
-                      user_nondefs, simp_table, psimp_table, choice_spec_table,
-                      intro_table, ground_thm_table, ersatz_table, skolems,
-                      special_funs, unrolled_preds, wf_cache, constr_cache},
+                      star_linear_preds, fast_descrs, tac_timeout, evals,
+                      case_names, def_table, nondef_table, user_nondefs,
+                      simp_table, psimp_table, choice_spec_table, intro_table,
+                      ground_thm_table, ersatz_table, skolems, special_funs,
+                      unrolled_preds, wf_cache, constr_cache},
          binarize, card_assigns, bits, bisim_depth, datatypes, ofs} : scope)
         formats all_frees free_names sel_names nonsel_names rel_table bounds =
   let
@@ -930,8 +857,7 @@
       {thy = thy, ctxt = ctxt, max_bisim_depth = max_bisim_depth, boxes = boxes,
        stds = stds, wfs = wfs, user_axioms = user_axioms, debug = debug,
        binary_ints = binary_ints, destroy_constrs = destroy_constrs,
-       specialize = specialize, skolemize = skolemize,
-       star_linear_preds = star_linear_preds, uncurry = uncurry,
+       specialize = specialize, star_linear_preds = star_linear_preds,
        fast_descrs = fast_descrs, tac_timeout = tac_timeout, evals = evals,
        case_names = case_names, def_table = def_table,
        nondef_table = nondef_table, user_nondefs = user_nondefs,
@@ -941,16 +867,13 @@
        skolems = skolems, special_funs = special_funs,
        unrolled_preds = unrolled_preds, wf_cache = wf_cache,
        constr_cache = constr_cache}
-    val scope = {hol_ctxt = hol_ctxt, binarize = binarize,
-                 card_assigns = card_assigns, bits = bits,
-                 bisim_depth = bisim_depth, datatypes = datatypes, ofs = ofs}
-    (* bool -> typ -> typ -> rep -> int list list -> term *)
+    val scope =
+      {hol_ctxt = hol_ctxt, binarize = binarize, card_assigns = card_assigns,
+       bits = bits, bisim_depth = bisim_depth, datatypes = datatypes, ofs = ofs}
     fun term_for_rep unfold =
       reconstruct_term unfold pool wacky_names scope sel_names rel_table bounds
-    (* nat -> typ -> nat -> term *)
     fun nth_value_of_type card T n =
       let
-        (* bool -> term *)
         fun aux unfold = term_for_rep unfold T T (Atom (card, 0)) [[n]]
       in
         case aux false of
@@ -961,10 +884,8 @@
             t
         | t => t
       end
-    (* nat -> typ -> term list *)
     val all_values =
       all_values_of_type pool wacky_names scope sel_names rel_table bounds
-    (* dtype_spec list -> dtype_spec -> bool *)
     fun is_codatatype_wellformed (cos : dtype_spec list)
                                  ({typ, card, ...} : dtype_spec) =
       let
@@ -974,7 +895,6 @@
         forall (not o bisimilar_values (map #typ cos) max_depth)
                (all_distinct_unordered_pairs_of ts)
       end
-    (* string -> Pretty.T *)
     fun pretty_for_assign name =
       let
         val (oper, (t1, T'), T) =
@@ -998,7 +918,6 @@
             [setmp_show_all_types (Syntax.pretty_term ctxt) t1,
              Pretty.str oper, Syntax.pretty_term ctxt t2])
       end
-    (* dtype_spec -> Pretty.T *)
     fun pretty_for_datatype ({typ, card, complete, ...} : dtype_spec) =
       Pretty.block (Pretty.breaks
           (Syntax.pretty_typ ctxt (uniterize_unarize_unbox_etc_type typ) ::
@@ -1012,7 +931,6 @@
                 (map (Syntax.pretty_term ctxt) (all_values card typ) @
                  (if fun_from_pair complete false then []
                   else [Pretty.str unrep]))]))
-    (* typ -> dtype_spec list *)
     fun integer_datatype T =
       [{typ = T, card = card_of_type card_assigns T, co = false,
         standard = true, complete = (false, false), concrete = (true, true),
@@ -1035,7 +953,6 @@
                          (map pretty_for_datatype codatatypes)]
       else
         []
-    (* bool -> string -> nut list -> Pretty.T list *)
     fun block_of_names show title names =
       if show andalso not (null names) then
         Pretty.str (title ^ plural_s_for_list names ^ ":")
@@ -1062,7 +979,7 @@
               | _ => raise TERM ("Nitpick_Model.reconstruct_hol_model",
                                  [Const x])) all_frees
     val chunks = block_of_names true "Free variable" free_names @
-                 block_of_names show_skolems "Skolem constant" skolem_names @
+                 block_of_names true "Skolem constant" skolem_names @
                  block_of_names true "Evaluated term" eval_names @
                  block_of_datatypes @ block_of_codatatypes @
                  block_of_names show_consts "Constant"
@@ -1074,17 +991,13 @@
      forall (is_codatatype_wellformed codatatypes) codatatypes)
   end
 
-(* scope -> Time.time option -> nut list -> nut list -> nut NameTable.table
-   -> KK.raw_bound list -> term -> bool option *)
 fun prove_hol_model (scope as {hol_ctxt = {thy, ctxt, debug, ...},
                                card_assigns, ...})
                     auto_timeout free_names sel_names rel_table bounds prop =
   let
     val pool = Unsynchronized.ref []
-    (* typ * int -> term *)
     fun free_type_assm (T, k) =
       let
-        (* int -> term *)
         fun atom j = nth_atom pool true T j k
         fun equation_for_atom j = HOLogic.eq_const T $ Bound 0 $ atom j
         val eqs = map equation_for_atom (index_seq 0 k)
@@ -1093,14 +1006,12 @@
               $ Abs ("x", T, foldl1 HOLogic.mk_disj eqs)
         val distinct_assm = distinctness_formula T (map atom (index_seq 0 k))
       in s_conj (compreh_assm, distinct_assm) end
-    (* nut -> term *)
     fun free_name_assm name =
       HOLogic.mk_eq (Free (nickname_of name, type_of name),
                      term_for_name pool scope sel_names rel_table bounds name)
     val freeT_assms = map free_type_assm (filter (is_TFree o fst) card_assigns)
     val model_assms = map free_name_assm free_names
     val assm = foldr1 s_conj (freeT_assms @ model_assms)
-    (* bool -> bool *)
     fun try_out negate =
       let
         val concl = (negate ? curry (op $) @{const Not})
--- a/src/HOL/Tools/Nitpick/nitpick_mono.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_mono.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -54,55 +54,42 @@
 exception MTYPE of string * mtyp list * typ list
 exception MTERM of string * mterm list
 
-(* string -> unit *)
 fun print_g (_ : string) = ()
 (* val print_g = tracing *)
 
-(* var -> string *)
 val string_for_var = signed_string_of_int
-(* string -> var list -> string *)
 fun string_for_vars sep [] = "0\<^bsub>" ^ sep ^ "\<^esub>"
   | string_for_vars sep xs = space_implode sep (map string_for_var xs)
 fun subscript_string_for_vars sep xs =
   if null xs then "" else "\<^bsub>" ^ string_for_vars sep xs ^ "\<^esub>"
 
-(* sign -> string *)
 fun string_for_sign Plus = "+"
   | string_for_sign Minus = "-"
 
-(* sign -> sign -> sign *)
 fun xor sn1 sn2 = if sn1 = sn2 then Plus else Minus
-(* sign -> sign *)
 val negate = xor Minus
 
-(* sign_atom -> string *)
 fun string_for_sign_atom (S sn) = string_for_sign sn
   | string_for_sign_atom (V x) = string_for_var x
 
-(* literal -> string *)
 fun string_for_literal (x, sn) = string_for_var x ^ " = " ^ string_for_sign sn
 
 val bool_M = MType (@{type_name bool}, [])
 val dummy_M = MType (nitpick_prefix ^ "dummy", [])
 
-(* mtyp -> bool *)
 fun is_MRec (MRec _) = true
   | is_MRec _ = false
-(* mtyp -> mtyp * sign_atom * mtyp *)
 fun dest_MFun (MFun z) = z
   | dest_MFun M = raise MTYPE ("Nitpick_Mono.dest_MFun", [M], [])
 
 val no_prec = 100
 
-(* mtyp -> int *)
 fun precedence_of_mtype (MFun _) = 1
   | precedence_of_mtype (MPair _) = 2
   | precedence_of_mtype _ = no_prec
 
-(* mtyp -> string *)
 val string_for_mtype =
   let
-    (* int -> mtyp -> string *)
     fun aux outer_prec M =
       let
         val prec = precedence_of_mtype M
@@ -126,22 +113,17 @@
       end
   in aux 0 end
 
-(* mtyp -> mtyp list *)
 fun flatten_mtype (MPair (M1, M2)) = maps flatten_mtype [M1, M2]
   | flatten_mtype (MType (_, Ms)) = maps flatten_mtype Ms
   | flatten_mtype M = [M]
 
-(* mterm -> bool *)
 fun precedence_of_mterm (MRaw _) = no_prec
   | precedence_of_mterm (MAbs _) = 1
   | precedence_of_mterm (MApp _) = 2
 
-(* Proof.context -> mterm -> string *)
 fun string_for_mterm ctxt =
   let
-    (* mtype -> string *)
     fun mtype_annotation M = "\<^bsup>" ^ string_for_mtype M ^ "\<^esup>"
-    (* int -> mterm -> string *)
     fun aux outer_prec m =
       let
         val prec = precedence_of_mterm m
@@ -158,7 +140,6 @@
       end
   in aux 0 end
 
-(* mterm -> mtyp *)
 fun mtype_of_mterm (MRaw (_, M)) = M
   | mtype_of_mterm (MAbs (_, _, M, a, m)) = MFun (M, a, mtype_of_mterm m)
   | mtype_of_mterm (MApp (m1, _)) =
@@ -166,29 +147,24 @@
       MFun (_, _, M12) => M12
     | M1 => raise MTYPE ("Nitpick_Mono.mtype_of_mterm", [M1], [])
 
-(* mterm -> mterm * mterm list *)
 fun strip_mcomb (MApp (m1, m2)) = strip_mcomb m1 ||> (fn ms => append ms [m2])
   | strip_mcomb m = (m, [])
 
-(* hol_context -> bool -> bool -> typ -> mdata *)
 fun initial_mdata hol_ctxt binarize no_harmless alpha_T =
   ({hol_ctxt = hol_ctxt, binarize = binarize, alpha_T = alpha_T,
     no_harmless = no_harmless, max_fresh = Unsynchronized.ref 0,
     datatype_mcache = Unsynchronized.ref [],
     constr_mcache = Unsynchronized.ref []} : mdata)
 
-(* typ -> typ -> bool *)
 fun could_exist_alpha_subtype alpha_T (T as Type (_, Ts)) =
     T = alpha_T orelse (not (is_fp_iterator_type T) andalso
                         exists (could_exist_alpha_subtype alpha_T) Ts)
   | could_exist_alpha_subtype alpha_T T = (T = alpha_T)
-(* theory -> typ -> typ -> bool *)
 fun could_exist_alpha_sub_mtype _ (alpha_T as TFree _) T =
     could_exist_alpha_subtype alpha_T T
   | could_exist_alpha_sub_mtype thy alpha_T T =
     (T = alpha_T orelse is_datatype thy [(NONE, true)] T)
 
-(* mtyp -> bool *)
 fun exists_alpha_sub_mtype MAlpha = true
   | exists_alpha_sub_mtype (MFun (M1, _, M2)) =
     exists exists_alpha_sub_mtype [M1, M2]
@@ -197,7 +173,6 @@
   | exists_alpha_sub_mtype (MType (_, Ms)) = exists exists_alpha_sub_mtype Ms
   | exists_alpha_sub_mtype (MRec _) = true
 
-(* mtyp -> bool *)
 fun exists_alpha_sub_mtype_fresh MAlpha = true
   | exists_alpha_sub_mtype_fresh (MFun (_, V _, _)) = true
   | exists_alpha_sub_mtype_fresh (MFun (_, _, M2)) =
@@ -208,11 +183,9 @@
     exists exists_alpha_sub_mtype_fresh Ms
   | exists_alpha_sub_mtype_fresh (MRec _) = true
 
-(* string * typ list -> mtyp list -> mtyp *)
 fun constr_mtype_for_binders z Ms =
   fold_rev (fn M => curry3 MFun M (S Minus)) Ms (MRec z)
 
-(* ((string * typ list) * mtyp) list -> mtyp list -> mtyp -> mtyp *)
 fun repair_mtype _ _ MAlpha = MAlpha
   | repair_mtype cache seen (MFun (M1, a, M2)) =
     MFun (repair_mtype cache seen M1, a, repair_mtype cache seen M2)
@@ -226,30 +199,24 @@
     | M => if member (op =) seen M then MType (s, [])
            else repair_mtype cache (M :: seen) M
 
-(* ((string * typ list) * mtyp) list Unsynchronized.ref -> unit *)
 fun repair_datatype_mcache cache =
   let
-    (* (string * typ list) * mtyp -> unit *)
     fun repair_one (z, M) =
       Unsynchronized.change cache
           (AList.update (op =) (z, repair_mtype (!cache) [] M))
   in List.app repair_one (rev (!cache)) end
 
-(* (typ * mtyp) list -> (styp * mtyp) list Unsynchronized.ref -> unit *)
 fun repair_constr_mcache dtype_cache constr_mcache =
   let
-    (* styp * mtyp -> unit *)
     fun repair_one (x, M) =
       Unsynchronized.change constr_mcache
           (AList.update (op =) (x, repair_mtype dtype_cache [] M))
   in List.app repair_one (!constr_mcache) end
 
-(* typ -> bool *)
 fun is_fin_fun_supported_type @{typ prop} = true
   | is_fin_fun_supported_type @{typ bool} = true
   | is_fin_fun_supported_type (Type (@{type_name option}, _)) = true
   | is_fin_fun_supported_type _ = false
-(* typ -> typ -> term -> term option *)
 fun fin_fun_body _ _ (t as @{term False}) = SOME t
   | fin_fun_body _ _ (t as Const (@{const_name None}, _)) = SOME t
   | fin_fun_body dom_T ran_T
@@ -265,7 +232,6 @@
                 $ (Const (@{const_name unknown}, ran_T)) $ (t0 $ t1 $ t2 $ t3)))
   | fin_fun_body _ _ _ = NONE
 
-(* mdata -> bool -> typ -> typ -> mtyp * sign_atom * mtyp *)
 fun fresh_mfun_for_fun_type (mdata as {max_fresh, ...} : mdata) all_minus
                             T1 T2 =
   let
@@ -277,12 +243,10 @@
             else
               S Minus
   in (M1, a, M2) end
-(* mdata -> bool -> typ -> mtyp *)
 and fresh_mtype_for_type (mdata as {hol_ctxt as {thy, ...}, binarize, alpha_T,
                                     datatype_mcache, constr_mcache, ...})
                          all_minus =
   let
-    (* typ -> mtyp *)
     fun do_type T =
       if T = alpha_T then
         MAlpha
@@ -329,21 +293,17 @@
       | _ => MType (Refute.string_of_typ T, [])
   in do_type end
 
-(* mtyp -> mtyp list *)
 fun prodM_factors (MPair (M1, M2)) = maps prodM_factors [M1, M2]
   | prodM_factors M = [M]
-(* mtyp -> mtyp list * mtyp *)
 fun curried_strip_mtype (MFun (M1, _, M2)) =
     curried_strip_mtype M2 |>> append (prodM_factors M1)
   | curried_strip_mtype M = ([], M)
-(* string -> mtyp -> mtyp *)
 fun sel_mtype_from_constr_mtype s M =
   let val (arg_Ms, dataM) = curried_strip_mtype M in
     MFun (dataM, S Minus,
           case sel_no_from_name s of ~1 => bool_M | n => nth arg_Ms n)
   end
 
-(* mdata -> styp -> mtyp *)
 fun mtype_for_constr (mdata as {hol_ctxt = {thy, ...}, alpha_T, constr_mcache,
                                 ...}) (x as (_, T)) =
   if could_exist_alpha_sub_mtype thy alpha_T T then
@@ -362,14 +322,11 @@
   x |> binarized_and_boxed_constr_for_sel hol_ctxt binarize
     |> mtype_for_constr mdata |> sel_mtype_from_constr_mtype s
 
-(* literal list -> sign_atom -> sign_atom *)
 fun resolve_sign_atom lits (V x) =
     x |> AList.lookup (op =) lits |> Option.map S |> the_default (V x)
   | resolve_sign_atom _ a = a
-(* literal list -> mtyp -> mtyp *)
 fun resolve_mtype lits =
   let
-    (* mtyp -> mtyp *)
     fun aux MAlpha = MAlpha
       | aux (MFun (M1, a, M2)) = MFun (aux M1, resolve_sign_atom lits a, aux M2)
       | aux (MPair Mp) = MPair (pairself aux Mp)
@@ -384,24 +341,19 @@
 
 type constraint_set = literal list * comp list * sign_expr list
 
-(* comp_op -> string *)
 fun string_for_comp_op Eq = "="
   | string_for_comp_op Leq = "\<le>"
 
-(* sign_expr -> string *)
 fun string_for_sign_expr [] = "\<bot>"
   | string_for_sign_expr lits =
     space_implode " \<or> " (map string_for_literal lits)
 
-(* literal -> literal list option -> literal list option *)
 fun do_literal _ NONE = NONE
   | do_literal (x, sn) (SOME lits) =
     case AList.lookup (op =) lits x of
       SOME sn' => if sn = sn' then SOME lits else NONE
     | NONE => SOME ((x, sn) :: lits)
 
-(* comp_op -> var list -> sign_atom -> sign_atom -> literal list * comp list
-   -> (literal list * comp list) option *)
 fun do_sign_atom_comp Eq [] a1 a2 (accum as (lits, comps)) =
     (case (a1, a2) of
        (S sn1, S sn2) => if sn1 = sn2 then SOME accum else NONE
@@ -419,8 +371,6 @@
   | do_sign_atom_comp cmp xs a1 a2 (lits, comps) =
     SOME (lits, insert (op =) (a1, a2, cmp, xs) comps)
 
-(* comp -> var list -> mtyp -> mtyp -> (literal list * comp list) option
-   -> (literal list * comp list) option *)
 fun do_mtype_comp _ _ _ _ NONE = NONE
   | do_mtype_comp _ _ MAlpha MAlpha accum = accum
   | do_mtype_comp Eq xs (MFun (M11, a1, M12)) (MFun (M21, a2, M22))
@@ -450,7 +400,6 @@
     raise MTYPE ("Nitpick_Mono.do_mtype_comp (" ^ string_for_comp_op cmp ^ ")",
                  [M1, M2], [])
 
-(* comp_op -> mtyp -> mtyp -> constraint_set -> constraint_set *)
 fun add_mtype_comp cmp M1 M2 ((lits, comps, sexps) : constraint_set) =
     (print_g ("*** Add " ^ string_for_mtype M1 ^ " " ^ string_for_comp_op cmp ^
               " " ^ string_for_mtype M2);
@@ -458,12 +407,9 @@
        NONE => (print_g "**** Unsolvable"; raise UNSOLVABLE ())
      | SOME (lits, comps) => (lits, comps, sexps))
 
-(* mtyp -> mtyp -> constraint_set -> constraint_set *)
 val add_mtypes_equal = add_mtype_comp Eq
 val add_is_sub_mtype = add_mtype_comp Leq
 
-(* sign -> sign_expr -> mtyp -> (literal list * sign_expr list) option
-   -> (literal list * sign_expr list) option *)
 fun do_notin_mtype_fv _ _ _ NONE = NONE
   | do_notin_mtype_fv Minus _ MAlpha accum = accum
   | do_notin_mtype_fv Plus [] MAlpha _ = NONE
@@ -499,7 +445,6 @@
   | do_notin_mtype_fv _ _ M _ =
     raise MTYPE ("Nitpick_Mono.do_notin_mtype_fv", [M], [])
 
-(* sign -> mtyp -> constraint_set -> constraint_set *)
 fun add_notin_mtype_fv sn M ((lits, comps, sexps) : constraint_set) =
     (print_g ("*** Add " ^ string_for_mtype M ^ " is " ^
               (case sn of Minus => "concrete" | Plus => "complete") ^ ".");
@@ -507,31 +452,23 @@
        NONE => (print_g "**** Unsolvable"; raise UNSOLVABLE ())
      | SOME (lits, sexps) => (lits, comps, sexps))
 
-(* mtyp -> constraint_set -> constraint_set *)
 val add_mtype_is_concrete = add_notin_mtype_fv Minus
 val add_mtype_is_complete = add_notin_mtype_fv Plus
 
 val bool_from_minus = true
 
-(* sign -> bool *)
 fun bool_from_sign Plus = not bool_from_minus
   | bool_from_sign Minus = bool_from_minus
-(* bool -> sign *)
 fun sign_from_bool b = if b = bool_from_minus then Minus else Plus
 
-(* literal -> PropLogic.prop_formula *)
 fun prop_for_literal (x, sn) =
   (not (bool_from_sign sn) ? PropLogic.Not) (PropLogic.BoolVar x)
-(* sign_atom -> PropLogic.prop_formula *)
 fun prop_for_sign_atom_eq (S sn', sn) =
     if sn = sn' then PropLogic.True else PropLogic.False
   | prop_for_sign_atom_eq (V x, sn) = prop_for_literal (x, sn)
-(* sign_expr -> PropLogic.prop_formula *)
 fun prop_for_sign_expr xs = PropLogic.exists (map prop_for_literal xs)
-(* var list -> sign -> PropLogic.prop_formula *)
 fun prop_for_exists_eq xs sn =
   PropLogic.exists (map (fn x => prop_for_literal (x, sn)) xs)
-(* comp -> PropLogic.prop_formula *)
 fun prop_for_comp (a1, a2, Eq, []) =
     PropLogic.SAnd (prop_for_comp (a1, a2, Leq, []),
                     prop_for_comp (a2, a1, Leq, []))
@@ -541,7 +478,6 @@
   | prop_for_comp (a1, a2, cmp, xs) =
     PropLogic.SOr (prop_for_exists_eq xs Minus, prop_for_comp (a1, a2, cmp, []))
 
-(* var -> (int -> bool option) -> literal list -> literal list *)
 fun literals_from_assignments max_var assigns lits =
   fold (fn x => fn accum =>
            if AList.defined (op =) lits x then
@@ -550,18 +486,15 @@
              SOME b => (x, sign_from_bool b) :: accum
            | NONE => accum) (max_var downto 1) lits
 
-(* comp -> string *)
 fun string_for_comp (a1, a2, cmp, xs) =
   string_for_sign_atom a1 ^ " " ^ string_for_comp_op cmp ^
   subscript_string_for_vars " \<and> " xs ^ " " ^ string_for_sign_atom a2
 
-(* literal list -> comp list -> sign_expr list -> unit *)
 fun print_problem lits comps sexps =
   print_g ("*** Problem:\n" ^ cat_lines (map string_for_literal lits @
                                          map string_for_comp comps @
                                          map string_for_sign_expr sexps))
 
-(* literal list -> unit *)
 fun print_solution lits =
   let val (pos, neg) = List.partition (curry (op =) Plus o snd) lits in
     print_g ("*** Solution:\n" ^
@@ -569,10 +502,8 @@
              "-: " ^ commas (map (string_for_var o fst) neg))
   end
 
-(* var -> constraint_set -> literal list option *)
 fun solve max_var (lits, comps, sexps) =
     let
-      (* (int -> bool option) -> literal list option *)
       fun do_assigns assigns =
         SOME (literals_from_assignments max_var assigns lits
               |> tap print_solution)
@@ -607,27 +538,21 @@
 
 val initial_gamma = {bound_Ts = [], bound_Ms = [], frees = [], consts = []}
 
-(* typ -> mtyp -> mtype_context -> mtype_context *)
 fun push_bound T M {bound_Ts, bound_Ms, frees, consts} =
   {bound_Ts = T :: bound_Ts, bound_Ms = M :: bound_Ms, frees = frees,
    consts = consts}
-(* mtype_context -> mtype_context *)
 fun pop_bound {bound_Ts, bound_Ms, frees, consts} =
   {bound_Ts = tl bound_Ts, bound_Ms = tl bound_Ms, frees = frees,
    consts = consts}
   handle List.Empty => initial_gamma (* FIXME: needed? *)
 
-(* mdata -> term -> accumulator -> mterm * accumulator *)
 fun consider_term (mdata as {hol_ctxt as {thy, ctxt, stds, fast_descrs,
                                           def_table, ...},
                              alpha_T, max_fresh, ...}) =
   let
-    (* typ -> mtyp *)
     val mtype_for = fresh_mtype_for_type mdata false
-    (* mtyp -> mtyp *)
     fun plus_set_mtype_for_dom M =
       MFun (M, S (if exists_alpha_sub_mtype M then Plus else Minus), bool_M)
-    (* typ -> accumulator -> mterm * accumulator *)
     fun do_all T (gamma, cset) =
       let
         val abs_M = mtype_for (domain_type (domain_type T))
@@ -656,7 +581,6 @@
       let
         val set_T = domain_type T
         val set_M = mtype_for set_T
-        (* typ -> mtyp *)
         fun custom_mtype_for (T as Type (@{type_name fun}, [T1, T2])) =
             if T = set_T then set_M
             else MFun (custom_mtype_for T1, S Minus, custom_mtype_for T2)
@@ -664,20 +588,16 @@
       in
         (custom_mtype_for T, (gamma, cset |> add_mtype_is_concrete set_M))
       end
-    (* typ -> accumulator -> mtyp * accumulator *)
     fun do_pair_constr T accum =
       case mtype_for (nth_range_type 2 T) of
         M as MPair (a_M, b_M) =>
         (MFun (a_M, S Minus, MFun (b_M, S Minus, M)), accum)
       | M => raise MTYPE ("Nitpick_Mono.consider_term.do_pair_constr", [M], [])
-    (* int -> typ -> accumulator -> mtyp * accumulator *)
     fun do_nth_pair_sel n T =
       case mtype_for (domain_type T) of
         M as MPair (a_M, b_M) =>
         pair (MFun (M, S Minus, if n = 0 then a_M else b_M))
       | M => raise MTYPE ("Nitpick_Mono.consider_term.do_nth_pair_sel", [M], [])
-    (* term -> string -> typ -> term -> term -> term -> accumulator
-       -> mterm * accumulator *)
     fun do_bounded_quantifier t0 abs_s abs_T connective_t bound_t body_t accum =
       let
         val abs_M = mtype_for abs_T
@@ -697,7 +617,6 @@
                                  MApp (bound_m, MRaw (Bound 0, M1))),
                            body_m))), accum)
       end
-    (* term -> accumulator -> mterm * accumulator *)
     and do_term t (accum as (gamma as {bound_Ts, bound_Ms, frees, consts},
                              cset)) =
         (case t of
@@ -747,7 +666,6 @@
               | @{const_name converse} =>
                 let
                   val x = Unsynchronized.inc max_fresh
-                  (* typ -> mtyp *)
                   fun mtype_for_set T =
                     MFun (mtype_for (domain_type T), V x, bool_M)
                   val ab_set_M = domain_type T |> mtype_for_set
@@ -757,7 +675,6 @@
               | @{const_name rel_comp} =>
                 let
                   val x = Unsynchronized.inc max_fresh
-                  (* typ -> mtyp *)
                   fun mtype_for_set T =
                     MFun (mtype_for (domain_type T), V x, bool_M)
                   val bc_set_M = domain_type T |> mtype_for_set
@@ -783,7 +700,6 @@
               | @{const_name Sigma} =>
                 let
                   val x = Unsynchronized.inc max_fresh
-                  (* typ -> mtyp *)
                   fun mtype_for_set T =
                     MFun (mtype_for (domain_type T), V x, bool_M)
                   val a_set_T = domain_type T
@@ -891,14 +807,12 @@
                                       string_for_mterm ctxt m))
   in do_term end
 
-(* int -> mtyp -> accumulator -> accumulator *)
 fun force_minus_funs 0 _ = I
   | force_minus_funs n (M as MFun (M1, _, M2)) =
     add_mtypes_equal M (MFun (M1, S Minus, M2))
     #> force_minus_funs (n - 1) M2
   | force_minus_funs _ M =
     raise MTYPE ("Nitpick_Mono.force_minus_funs", [M], [])
-(* mdata -> bool -> styp -> term -> term -> mterm * accumulator *)
 fun consider_general_equals mdata def (x as (_, T)) t1 t2 accum =
   let
     val (m1, accum) = consider_term mdata t1 accum
@@ -918,17 +832,12 @@
           accum)
   end
 
-(* mdata -> sign -> term -> accumulator -> mterm * accumulator *)
 fun consider_general_formula (mdata as {hol_ctxt = {ctxt, ...}, ...}) =
   let
-    (* typ -> mtyp *)
     val mtype_for = fresh_mtype_for_type mdata false
-    (* term -> accumulator -> mterm * accumulator *)
     val do_term = consider_term mdata
-    (* sign -> term -> accumulator -> mterm * accumulator *)
     fun do_formula sn t accum =
         let
-          (* styp -> string -> typ -> term -> mterm * accumulator *)
           fun do_quantifier (quant_x as (quant_s, _)) abs_s abs_T body_t =
             let
               val abs_M = mtype_for abs_T
@@ -944,7 +853,6 @@
                      MAbs (abs_s, abs_T, abs_M, S Minus, body_m)),
                accum |>> pop_bound)
             end
-          (* styp -> term -> term -> mterm * accumulator *)
           fun do_equals x t1 t2 =
             case sn of
               Plus => do_term t accum
@@ -1005,7 +913,6 @@
   [@{const_name ord_class.less}, @{const_name ord_class.less_eq}]
 val bounteous_consts = [@{const_name bisim}]
 
-(* mdata -> term -> bool *)
 fun is_harmless_axiom ({no_harmless = true, ...} : mdata) _ = false
   | is_harmless_axiom {hol_ctxt = {thy, stds, fast_descrs, ...}, ...} t =
     Term.add_consts t []
@@ -1013,12 +920,10 @@
     |> (forall (member (op =) harmless_consts o original_name o fst) orf
         exists (member (op =) bounteous_consts o fst))
 
-(* mdata -> term -> accumulator -> mterm * accumulator *)
 fun consider_nondefinitional_axiom mdata t =
   if is_harmless_axiom mdata t then pair (MRaw (t, dummy_M))
   else consider_general_formula mdata Plus t
 
-(* mdata -> term -> accumulator -> mterm * accumulator *)
 fun consider_definitional_axiom (mdata as {hol_ctxt = {thy, ...}, ...}) t =
   if not (is_constr_pattern_formula thy t) then
     consider_nondefinitional_axiom mdata t
@@ -1026,11 +931,8 @@
     pair (MRaw (t, dummy_M))
   else
     let
-      (* typ -> mtyp *)
       val mtype_for = fresh_mtype_for_type mdata false
-      (* term -> accumulator -> mterm * accumulator *)
       val do_term = consider_term mdata
-      (* term -> string -> typ -> term -> accumulator -> mterm * accumulator *)
       fun do_all quant_t abs_s abs_T body_t accum =
         let
           val abs_M = mtype_for abs_T
@@ -1043,7 +945,6 @@
                  MAbs (abs_s, abs_T, abs_M, S Minus, body_m)),
            accum |>> pop_bound)
         end
-      (* term -> term -> term -> accumulator -> mterm * accumulator *)
       and do_conjunction t0 t1 t2 accum =
         let
           val (m1, accum) = do_formula t1 accum
@@ -1058,7 +959,6 @@
         in
           (MApp (MApp (MRaw (t0, mtype_for (fastype_of t0)), m1), m2), accum)
         end
-      (* term -> accumulator -> accumulator *)
       and do_formula t accum =
           case t of
             (t0 as Const (@{const_name all}, _)) $ Abs (s1, T1, t1) =>
@@ -1083,22 +983,17 @@
                              \do_formula", [t])
     in do_formula t end
 
-(* Proof.context -> literal list -> term -> mtyp -> string *)
 fun string_for_mtype_of_term ctxt lits t M =
   Syntax.string_of_term ctxt t ^ " : " ^ string_for_mtype (resolve_mtype lits M)
 
-(* theory -> literal list -> mtype_context -> unit *)
 fun print_mtype_context ctxt lits ({frees, consts, ...} : mtype_context) =
   map (fn (x, M) => string_for_mtype_of_term ctxt lits (Free x) M) frees @
   map (fn (x, M) => string_for_mtype_of_term ctxt lits (Const x) M) consts
   |> cat_lines |> print_g
 
-(* ('a -> 'b -> 'c * 'd) -> 'a -> 'c list * 'b -> 'c list * 'd *)
 fun amass f t (ms, accum) =
   let val (m, accum) = f t accum in (m :: ms, accum) end
 
-(* string -> bool -> hol_context -> bool -> typ -> term list * term list
-   -> (literal list * (mterm list * mterm list) * (styp * mtyp) list) option *)
 fun infer which no_harmless (hol_ctxt as {ctxt, ...}) binarize alpha_T
           (nondef_ts, def_ts) =
   let
@@ -1127,15 +1022,11 @@
        | MTERM (loc, ms) =>
          raise BAD (loc, commas (map (string_for_mterm ctxt) ms))
 
-(* hol_context -> bool -> typ -> term list * term list -> bool *)
 val formulas_monotonic = is_some oooo infer "Monotonicity" false
 
-(* typ -> typ -> styp *)
 fun fin_fun_constr T1 T2 =
   (@{const_name FinFun}, (T1 --> T2) --> Type (@{type_name fin_fun}, [T1, T2]))
 
-(* hol_context -> bool -> (typ option * bool option) list -> typ
-   -> term list * term list -> term list * term list *)
 fun finitize_funs (hol_ctxt as {thy, stds, fast_descrs, constr_cache, ...})
                   binarize finitizes alpha_T tsp =
   case infer "Finiteness" true hol_ctxt binarize alpha_T tsp of
@@ -1144,12 +1035,10 @@
       tsp
     else
       let
-        (* typ -> sign_atom -> bool *)
         fun should_finitize T a =
           case triple_lookup (type_match thy) finitizes T of
             SOME (SOME false) => false
           | _ => resolve_sign_atom lits a = S Plus
-        (* typ -> mtyp -> typ *)
         fun type_from_mtype T M =
           case (M, T) of
             (MAlpha, _) => T
@@ -1161,12 +1050,10 @@
           | (MType _, _) => T
           | _ => raise MTYPE ("Nitpick_Mono.finitize_funs.type_from_mtype",
                               [M], [T])
-        (* styp -> styp *)
         fun finitize_constr (x as (s, T)) =
           (s, case AList.lookup (op =) constr_mtypes x of
                 SOME M => type_from_mtype T M
               | NONE => T)
-        (* typ list -> mterm -> term *)
         fun term_from_mterm Ts m =
           case m of
             MRaw (t, M) =>
--- a/src/HOL/Tools/Nitpick/nitpick_nut.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_nut.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -205,7 +205,6 @@
 
 exception NUT of string * nut list
 
-(* cst -> string *)
 fun string_for_cst Unity = "Unity"
   | string_for_cst False = "False"
   | string_for_cst True = "True"
@@ -225,7 +224,6 @@
   | string_for_cst NatToInt = "NatToInt"
   | string_for_cst IntToNat = "IntToNat"
 
-(* op1 -> string *)
 fun string_for_op1 Not = "Not"
   | string_for_op1 Finite = "Finite"
   | string_for_op1 Converse = "Converse"
@@ -237,7 +235,6 @@
   | string_for_op1 Second = "Second"
   | string_for_op1 Cast = "Cast"
 
-(* op2 -> string *)
 fun string_for_op2 All = "All"
   | string_for_op2 Exist = "Exist"
   | string_for_op2 Or = "Or"
@@ -258,14 +255,11 @@
   | string_for_op2 Apply = "Apply"
   | string_for_op2 Lambda = "Lambda"
 
-(* op3 -> string *)
 fun string_for_op3 Let = "Let"
   | string_for_op3 If = "If"
 
-(* int -> Proof.context -> nut -> string *)
 fun basic_string_for_nut indent ctxt u =
   let
-    (* nut -> string *)
     val sub = basic_string_for_nut (indent + 1) ctxt
   in
     (if indent = 0 then "" else "\n" ^ implode (replicate (2 * indent) " ")) ^
@@ -313,17 +307,14 @@
        Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R) ^
     ")"
   end
-(* Proof.context -> nut -> string *)
 val string_for_nut = basic_string_for_nut 0
 
-(* nut -> bool *)
 fun inline_nut (Op1 _) = false
   | inline_nut (Op2 _) = false
   | inline_nut (Op3 _) = false
   | inline_nut (Tuple (_, _, us)) = forall inline_nut us
   | inline_nut _ = true
 
-(* nut -> typ *)
 fun type_of (Cst (_, T, _)) = T
   | type_of (Op1 (_, T, _, _)) = T
   | type_of (Op2 (_, T, _, _, _)) = T
@@ -338,7 +329,6 @@
   | type_of (RelReg (_, T, _)) = T
   | type_of (FormulaReg (_, T, _)) = T
 
-(* nut -> rep *)
 fun rep_of (Cst (_, _, R)) = R
   | rep_of (Op1 (_, _, R, _)) = R
   | rep_of (Op2 (_, _, R, _, _)) = R
@@ -353,7 +343,6 @@
   | rep_of (RelReg (_, _, R)) = R
   | rep_of (FormulaReg (_, _, R)) = R
 
-(* nut -> string *)
 fun nickname_of (BoundName (_, _, _, nick)) = nick
   | nickname_of (FreeName (s, _, _)) = s
   | nickname_of (ConstName (s, _, _)) = s
@@ -361,7 +350,6 @@
   | nickname_of (FreeRel (_, _, _, nick)) = nick
   | nickname_of u = raise NUT ("Nitpick_Nut.nickname_of", [u])
 
-(* nut -> bool *)
 fun is_skolem_name u =
   space_explode name_sep (nickname_of u)
   |> exists (String.isPrefix skolem_prefix)
@@ -369,11 +357,9 @@
 fun is_eval_name u =
   String.isPrefix eval_prefix (nickname_of u)
   handle NUT ("Nitpick_Nut.nickname_of", _) => false
-(* cst -> nut -> bool *)
 fun is_Cst cst (Cst (cst', _, _)) = (cst = cst')
   | is_Cst _ _ = false
 
-(* (nut -> 'a -> 'a) -> nut -> 'a -> 'a *)
 fun fold_nut f u =
   case u of
     Op1 (_, _, _, u1) => fold_nut f u1
@@ -382,7 +368,6 @@
   | Tuple (_, _, us) => fold (fold_nut f) us
   | Construct (us', _, _, us) => fold (fold_nut f) us #> fold (fold_nut f) us'
   | _ => f u
-(* (nut -> nut) -> nut -> nut *)
 fun map_nut f u =
   case u of
     Op1 (oper, T, R, u1) => Op1 (oper, T, R, map_nut f u1)
@@ -394,7 +379,6 @@
     Construct (map (map_nut f) us', T, R, map (map_nut f) us)
   | _ => f u
 
-(* nut * nut -> order *)
 fun name_ord (BoundName (j1, _, _, _), BoundName (j2, _, _, _)) =
     int_ord (j1, j2)
   | name_ord (BoundName _, _) = LESS
@@ -411,24 +395,19 @@
      | ord => ord)
   | name_ord (u1, u2) = raise NUT ("Nitpick_Nut.name_ord", [u1, u2])
 
-(* nut -> nut -> int *)
 fun num_occs_in_nut needle_u stack_u =
   fold_nut (fn u => if u = needle_u then Integer.add 1 else I) stack_u 0
-(* nut -> nut -> bool *)
 val is_subterm_of = not_equal 0 oo num_occs_in_nut
 
-(* nut -> nut -> nut -> nut *)
 fun substitute_in_nut needle_u needle_u' =
   map_nut (fn u => if u = needle_u then needle_u' else u)
 
-(* nut -> nut list * nut list -> nut list * nut list *)
 val add_free_and_const_names =
   fold_nut (fn u => case u of
                       FreeName _ => apfst (insert (op =) u)
                     | ConstName _ => apsnd (insert (op =) u)
                     | _ => I)
 
-(* nut -> rep -> nut *)
 fun modify_name_rep (BoundName (j, T, _, nick)) R = BoundName (j, T, R, nick)
   | modify_name_rep (FreeName (s, T, _)) R = FreeName (s, T, R)
   | modify_name_rep (ConstName (s, T, _)) R = ConstName (s, T, R)
@@ -436,18 +415,15 @@
 
 structure NameTable = Table(type key = nut val ord = name_ord)
 
-(* 'a NameTable.table -> nut -> 'a *)
 fun the_name table name =
   case NameTable.lookup table name of
     SOME u => u
   | NONE => raise NUT ("Nitpick_Nut.the_name", [name])
-(* nut NameTable.table -> nut -> KK.n_ary_index *)
 fun the_rel table name =
   case the_name table name of
     FreeRel (x, _, _, _) => x
   | u => raise NUT ("Nitpick_Nut.the_rel", [u])
 
-(* typ * term -> typ * term *)
 fun mk_fst (_, Const (@{const_name Pair}, T) $ t1 $ _) = (domain_type T, t1)
   | mk_fst (T, t) =
     let val res_T = fst (HOLogic.dest_prodT T) in
@@ -459,23 +435,17 @@
     let val res_T = snd (HOLogic.dest_prodT T) in
       (res_T, Const (@{const_name snd}, T --> res_T) $ t)
     end
-(* typ * term -> (typ * term) list *)
 fun factorize (z as (Type (@{type_name "*"}, _), _)) =
     maps factorize [mk_fst z, mk_snd z]
   | factorize z = [z]
 
-(* hol_context -> op2 -> term -> nut *)
 fun nut_from_term (hol_ctxt as {thy, stds, fast_descrs, ...}) eq =
   let
-    (* string list -> typ list -> term -> nut *)
     fun aux eq ss Ts t =
       let
-        (* term -> nut *)
         val sub = aux Eq ss Ts
         val sub' = aux eq ss Ts
-        (* string -> typ -> term -> nut *)
         fun sub_abs s T = aux eq (s :: ss) (T :: Ts)
-        (* typ -> term -> term -> nut *)
         fun sub_equals T t1 t2 =
           let
             val (binder_Ts, body_T) = strip_type (domain_type T)
@@ -498,7 +468,6 @@
             else
               Op2 (eq, bool_T, Any, aux Eq ss Ts t1, aux Eq ss Ts t2)
           end
-        (* op2 -> string -> typ -> term -> nut *)
         fun do_quantifier quant s T t1 =
           let
             val bound_u = BoundName (length Ts, T, Any, s)
@@ -509,21 +478,18 @@
             else
               body_u
           end
-        (* term -> term list -> nut *)
         fun do_apply t0 ts =
           let
             val (ts', t2) = split_last ts
             val t1 = list_comb (t0, ts')
             val T1 = fastype_of1 (Ts, t1)
           in Op2 (Apply, range_type T1, Any, sub t1, sub t2) end
-        (* op2 -> string -> styp -> term -> nut *)
         fun do_description_operator oper undef_s (x as (_, T)) t1 =
           if fast_descrs then
             Op2 (oper, range_type T, Any, sub t1,
                  sub (Const (undef_s, range_type T)))
           else
             do_apply (Const x) [t1]
-        (* styp -> term list -> nut *)
         fun do_construct (x as (_, T)) ts =
           case num_binder_types T - length ts of
             0 => Construct (map ((fn (s', T') => ConstName (s', T', Any))
@@ -716,21 +682,16 @@
       end
   in aux eq [] [] end
 
-(* scope -> typ -> rep *)
 fun rep_for_abs_fun scope T =
   let val (R1, R2) = best_non_opt_symmetric_reps_for_fun_type scope T in
     Func (R1, (card_of_rep R1 <> card_of_rep R2 ? Opt) R2)
   end
 
-(* scope -> nut -> nut list * rep NameTable.table
-   -> nut list * rep NameTable.table *)
 fun choose_rep_for_free_var scope v (vs, table) =
   let
     val R = best_non_opt_set_rep_for_type scope (type_of v)
     val v = modify_name_rep v R
   in (v :: vs, NameTable.update (v, R) table) end
-(* scope -> bool -> nut -> nut list * rep NameTable.table
-   -> nut list * rep NameTable.table *)
 fun choose_rep_for_const (scope as {hol_ctxt = {thy, ...}, ...}) all_exact v
                          (vs, table) =
   let
@@ -756,16 +717,11 @@
     val v = modify_name_rep v R
   in (v :: vs, NameTable.update (v, R) table) end
 
-(* scope -> nut list -> rep NameTable.table -> nut list * rep NameTable.table *)
 fun choose_reps_for_free_vars scope vs table =
   fold (choose_rep_for_free_var scope) vs ([], table)
-(* scope -> bool -> nut list -> rep NameTable.table
-   -> nut list * rep NameTable.table *)
 fun choose_reps_for_consts scope all_exact vs table =
   fold (choose_rep_for_const scope all_exact) vs ([], table)
 
-(* scope -> styp -> int -> nut list * rep NameTable.table
-   -> nut list * rep NameTable.table *)
 fun choose_rep_for_nth_sel_for_constr (scope as {hol_ctxt, binarize, ...})
                                       (x as (_, T)) n (vs, table) =
   let
@@ -778,21 +734,15 @@
                best_opt_set_rep_for_type scope T' |> unopt_rep
     val v = ConstName (s', T', R')
   in (v :: vs, NameTable.update (v, R') table) end
-(* scope -> styp -> nut list * rep NameTable.table
-   -> nut list * rep NameTable.table *)
 fun choose_rep_for_sels_for_constr scope (x as (_, T)) =
   fold_rev (choose_rep_for_nth_sel_for_constr scope x)
            (~1 upto num_sels_for_constr_type T - 1)
-(* scope -> dtype_spec -> nut list * rep NameTable.table
-   -> nut list * rep NameTable.table *)
 fun choose_rep_for_sels_of_datatype _ ({deep = false, ...} : dtype_spec) = I
   | choose_rep_for_sels_of_datatype scope {constrs, ...} =
     fold_rev (choose_rep_for_sels_for_constr scope o #const) constrs
-(* scope -> rep NameTable.table -> nut list * rep NameTable.table *)
 fun choose_reps_for_all_sels (scope as {datatypes, ...}) =
   fold (choose_rep_for_sels_of_datatype scope) datatypes o pair []
 
-(* scope -> nut -> rep NameTable.table -> rep NameTable.table *)
 fun choose_rep_for_bound_var scope v table =
   let val R = best_one_rep_for_type scope (type_of v) in
     NameTable.update (v, R) table
@@ -802,7 +752,6 @@
    three-valued logic, it would evaluate to a unrepresentable value ("Unrep")
    according to the HOL semantics. For example, "Suc n" is constructive if "n"
    is representable or "Unrep", because unknown implies "Unrep". *)
-(* nut -> bool *)
 fun is_constructive u =
   is_Cst Unrep u orelse
   (not (is_fun_type (type_of u)) andalso not (is_opt_rep (rep_of u))) orelse
@@ -817,14 +766,11 @@
   | Construct (_, _, _, us) => forall is_constructive us
   | _ => false
 
-(* nut -> nut *)
 fun optimize_unit u =
   if rep_of u = Unit then Cst (Unity, type_of u, Unit) else u
-(* typ -> rep -> nut *)
 fun unknown_boolean T R =
   Cst (case R of Formula Pos => False | Formula Neg => True | _ => Unknown,
        T, R)
-(* nut -> bool *)
 fun is_fully_representable_set u =
   not (is_opt_rep (rep_of u)) andalso
   case u of
@@ -835,7 +781,6 @@
     forall is_fully_representable_set [u1, u2]
   | _ => false
 
-(* op1 -> typ -> rep -> nut -> nut *)
 fun s_op1 oper T R u1 =
   ((if oper = Not then
       if is_Cst True u1 then Cst (False, T, R)
@@ -845,7 +790,6 @@
       raise SAME ())
    handle SAME () => Op1 (oper, T, R, u1))
   |> optimize_unit
-(* op2 -> typ -> rep -> nut -> nut -> nut *)
 fun s_op2 oper T R u1 u2 =
   ((case oper of
       Or =>
@@ -886,7 +830,6 @@
     | _ => raise SAME ())
    handle SAME () => Op2 (oper, T, R, u1, u2))
   |> optimize_unit
-(* op3 -> typ -> rep -> nut -> nut -> nut -> nut *)
 fun s_op3 oper T R u1 u2 u3 =
   ((case oper of
       Let =>
@@ -897,12 +840,10 @@
     | _ => raise SAME ())
    handle SAME () => Op3 (oper, T, R, u1, u2, u3))
   |> optimize_unit
-(* typ -> rep -> nut list -> nut *)
 fun s_tuple T R us =
   (if exists (is_Cst Unrep) us then Cst (Unrep, T, R) else Tuple (T, R, us))
   |> optimize_unit
 
-(* theory -> nut -> nut *)
 fun optimize_nut u =
   case u of
     Op1 (oper, T, R, u1) => s_op1 oper T R (optimize_nut u1)
@@ -914,35 +855,26 @@
   | Construct (us', T, R, us) => Construct (us', T, R, map optimize_nut us)
   | _ => optimize_unit u
 
-(* (nut -> 'a) -> nut -> 'a list *)
 fun untuple f (Tuple (_, _, us)) = maps (untuple f) us
   | untuple f u = if rep_of u = Unit then [] else [f u]
 
-(* scope -> bool -> rep NameTable.table -> bool -> nut -> nut *)
 fun choose_reps_in_nut (scope as {card_assigns, bits, datatypes, ofs, ...})
                        unsound table def =
   let
     val bool_atom_R = Atom (2, offset_of_type ofs bool_T)
-    (* polarity -> bool -> rep *)
     fun bool_rep polar opt =
       if polar = Neut andalso opt then Opt bool_atom_R else Formula polar
-    (* nut -> nut -> nut *)
     fun triad u1 u2 = s_op2 Triad (type_of u1) (Opt bool_atom_R) u1 u2
-    (* (polarity -> nut) -> nut *)
     fun triad_fn f = triad (f Pos) (f Neg)
-    (* rep NameTable.table -> bool -> polarity -> nut -> nut -> nut *)
     fun unrepify_nut_in_nut table def polar needle_u =
       let val needle_T = type_of needle_u in
         substitute_in_nut needle_u (Cst (if is_fun_type needle_T then Unknown
                                          else Unrep, needle_T, Any))
         #> aux table def polar
       end
-    (* rep NameTable.table -> bool -> polarity -> nut -> nut *)
     and aux table def polar u =
       let
-        (* bool -> polarity -> nut -> nut *)
         val gsub = aux table
-        (* nut -> nut *)
         val sub = gsub false Neut
       in
         case u of
@@ -1050,15 +982,12 @@
           let
             val u1' = sub u1
             val u2' = sub u2
-            (* unit -> nut *)
             fun non_opt_case () = s_op2 Eq T (Formula polar) u1' u2'
-            (* unit -> nut *)
             fun opt_opt_case () =
               if polar = Neut then
                 triad_fn (fn polar => s_op2 Eq T (Formula polar) u1' u2')
               else
                 non_opt_case ()
-            (* nut -> nut *)
             fun hybrid_case u =
               (* hackish optimization *)
               if is_constructive u then s_op2 Eq T (Formula Neut) u1' u2'
@@ -1275,35 +1204,27 @@
       |> optimize_unit
   in aux table def Pos end
 
-(* int -> KK.n_ary_index list -> KK.n_ary_index list
-   -> int * KK.n_ary_index list *)
 fun fresh_n_ary_index n [] ys = (0, (n, 1) :: ys)
   | fresh_n_ary_index n ((m, j) :: xs) ys =
     if m = n then (j, ys @ ((m, j + 1) :: xs))
     else fresh_n_ary_index n xs ((m, j) :: ys)
-(* int -> name_pool -> int * name_pool *)
 fun fresh_rel n {rels, vars, formula_reg, rel_reg} =
   let val (j, rels') = fresh_n_ary_index n rels [] in
     (j, {rels = rels', vars = vars, formula_reg = formula_reg,
          rel_reg = rel_reg})
   end
-(* int -> name_pool -> int * name_pool *)
 fun fresh_var n {rels, vars, formula_reg, rel_reg} =
   let val (j, vars') = fresh_n_ary_index n vars [] in
     (j, {rels = rels, vars = vars', formula_reg = formula_reg,
          rel_reg = rel_reg})
   end
-(* int -> name_pool -> int * name_pool *)
 fun fresh_formula_reg {rels, vars, formula_reg, rel_reg} =
   (formula_reg, {rels = rels, vars = vars, formula_reg = formula_reg + 1,
                  rel_reg = rel_reg})
-(* int -> name_pool -> int * name_pool *)
 fun fresh_rel_reg {rels, vars, formula_reg, rel_reg} =
   (rel_reg, {rels = rels, vars = vars, formula_reg = formula_reg,
              rel_reg = rel_reg + 1})
 
-(* nut -> nut list * name_pool * nut NameTable.table
-   -> nut list * name_pool * nut NameTable.table *)
 fun rename_plain_var v (ws, pool, table) =
   let
     val is_formula = (rep_of v = Formula Neut)
@@ -1313,7 +1234,6 @@
     val w = constr (j, type_of v, rep_of v)
   in (w :: ws, pool, NameTable.update (v, w) table) end
 
-(* typ -> rep -> nut list -> nut *)
 fun shape_tuple (T as Type (@{type_name "*"}, [T1, T2])) (R as Struct [R1, R2])
                 us =
     let val arity1 = arity_of_rep R1 in
@@ -1327,8 +1247,6 @@
   | shape_tuple T Unit [] = Cst (Unity, T, Unit)
   | shape_tuple _ _ us = raise NUT ("Nitpick_Nut.shape_tuple", us)
 
-(* bool -> nut -> nut list * name_pool * nut NameTable.table
-   -> nut list * name_pool * nut NameTable.table *)
 fun rename_n_ary_var rename_free v (ws, pool, table) =
   let
     val T = type_of v
@@ -1370,15 +1288,12 @@
       in (w :: ws, pool, NameTable.update (v, w) table) end
   end
 
-(* nut list -> name_pool -> nut NameTable.table
-  -> nut list * name_pool * nut NameTable.table *)
 fun rename_free_vars vs pool table =
   let
     val vs = filter (not_equal Unit o rep_of) vs
     val (vs, pool, table) = fold (rename_n_ary_var true) vs ([], pool, table)
   in (rev vs, pool, table) end
 
-(* name_pool -> nut NameTable.table -> nut -> nut *)
 fun rename_vars_in_nut pool table u =
   case u of
     Cst _ => u
--- a/src/HOL/Tools/Nitpick/nitpick_peephole.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_peephole.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -14,11 +14,11 @@
   type decl = Kodkod.decl
   type expr_assign = Kodkod.expr_assign
 
-  type name_pool = {
-    rels: n_ary_index list,
-    vars: n_ary_index list,
-    formula_reg: int,
-    rel_reg: int}
+  type name_pool =
+    {rels: n_ary_index list,
+     vars: n_ary_index list,
+     formula_reg: int,
+     rel_reg: int}
 
   val initial_pool : name_pool
   val not3_rel : n_ary_index
@@ -51,39 +51,38 @@
   val num_seq : int -> int -> int_expr list
   val s_and : formula -> formula -> formula
 
-  type kodkod_constrs = {
-    kk_all: decl list -> formula -> formula,
-    kk_exist: decl list -> formula -> formula,
-    kk_formula_let: expr_assign list -> formula -> formula,
-    kk_formula_if: formula -> formula -> formula -> formula,
-    kk_or: formula -> formula -> formula,
-    kk_not: formula -> formula,
-    kk_iff: formula -> formula -> formula,
-    kk_implies: formula -> formula -> formula,
-    kk_and: formula -> formula -> formula,
-    kk_subset: rel_expr -> rel_expr -> formula,
-    kk_rel_eq: rel_expr -> rel_expr -> formula,
-    kk_no: rel_expr -> formula,
-    kk_lone: rel_expr -> formula,
-    kk_one: rel_expr -> formula,
-    kk_some: rel_expr -> formula,
-    kk_rel_let: expr_assign list -> rel_expr -> rel_expr,
-    kk_rel_if: formula -> rel_expr -> rel_expr -> rel_expr,
-    kk_union: rel_expr -> rel_expr -> rel_expr,
-    kk_difference: rel_expr -> rel_expr -> rel_expr,
-    kk_override: rel_expr -> rel_expr -> rel_expr,
-    kk_intersect: rel_expr -> rel_expr -> rel_expr,
-    kk_product: rel_expr -> rel_expr -> rel_expr,
-    kk_join: rel_expr -> rel_expr -> rel_expr,
-    kk_closure: rel_expr -> rel_expr,
-    kk_reflexive_closure: rel_expr -> rel_expr,
-    kk_comprehension: decl list -> formula -> rel_expr,
-    kk_project: rel_expr -> int_expr list -> rel_expr,
-    kk_project_seq: rel_expr -> int -> int -> rel_expr,
-    kk_not3: rel_expr -> rel_expr,
-    kk_nat_less: rel_expr -> rel_expr -> rel_expr,
-    kk_int_less: rel_expr -> rel_expr -> rel_expr
-  }
+  type kodkod_constrs =
+    {kk_all: decl list -> formula -> formula,
+     kk_exist: decl list -> formula -> formula,
+     kk_formula_let: expr_assign list -> formula -> formula,
+     kk_formula_if: formula -> formula -> formula -> formula,
+     kk_or: formula -> formula -> formula,
+     kk_not: formula -> formula,
+     kk_iff: formula -> formula -> formula,
+     kk_implies: formula -> formula -> formula,
+     kk_and: formula -> formula -> formula,
+     kk_subset: rel_expr -> rel_expr -> formula,
+     kk_rel_eq: rel_expr -> rel_expr -> formula,
+     kk_no: rel_expr -> formula,
+     kk_lone: rel_expr -> formula,
+     kk_one: rel_expr -> formula,
+     kk_some: rel_expr -> formula,
+     kk_rel_let: expr_assign list -> rel_expr -> rel_expr,
+     kk_rel_if: formula -> rel_expr -> rel_expr -> rel_expr,
+     kk_union: rel_expr -> rel_expr -> rel_expr,
+     kk_difference: rel_expr -> rel_expr -> rel_expr,
+     kk_override: rel_expr -> rel_expr -> rel_expr,
+     kk_intersect: rel_expr -> rel_expr -> rel_expr,
+     kk_product: rel_expr -> rel_expr -> rel_expr,
+     kk_join: rel_expr -> rel_expr -> rel_expr,
+     kk_closure: rel_expr -> rel_expr,
+     kk_reflexive_closure: rel_expr -> rel_expr,
+     kk_comprehension: decl list -> formula -> rel_expr,
+     kk_project: rel_expr -> int_expr list -> rel_expr,
+     kk_project_seq: rel_expr -> int -> int -> rel_expr,
+     kk_not3: rel_expr -> rel_expr,
+     kk_nat_less: rel_expr -> rel_expr -> rel_expr,
+     kk_int_less: rel_expr -> rel_expr -> rel_expr}
 
   val kodkod_constrs : bool -> int -> int -> int -> kodkod_constrs
 end;
@@ -94,11 +93,11 @@
 open Kodkod
 open Nitpick_Util
 
-type name_pool = {
-  rels: n_ary_index list,
-  vars: n_ary_index list,
-  formula_reg: int,
-  rel_reg: int}
+type name_pool =
+  {rels: n_ary_index list,
+   vars: n_ary_index list,
+   formula_reg: int,
+   rel_reg: int}
 
 (* If you add new built-in relations, make sure to increment the counters here
    as well to avoid name clashes (which fortunately would be detected by
@@ -125,40 +124,31 @@
 val lcm_rel = (3, 11)
 val norm_frac_rel = (4, 0)
 
-(* int -> bool -> rel_expr *)
 fun atom_for_bool j0 = Atom o Integer.add j0 o int_from_bool
-(* bool -> formula *)
 fun formula_for_bool b = if b then True else False
 
-(* int * int -> int -> int *)
 fun atom_for_nat (k, j0) n = if n < 0 orelse n >= k then ~1 else n + j0
-(* int -> int *)
 fun min_int_for_card k = ~k div 2 + 1
 fun max_int_for_card k = k div 2
-(* int * int -> int -> int *)
 fun int_for_atom (k, j0) j =
   let val j = j - j0 in if j <= max_int_for_card k then j else j - k end
 fun atom_for_int (k, j0) n =
   if n < min_int_for_card k orelse n > max_int_for_card k then ~1
   else if n < 0 then n + k + j0
   else n + j0
-(* int -> int -> bool *)
 fun is_twos_complement_representable bits n =
   let val max = reasonable_power 2 bits in n >= ~ max andalso n < max end
 
-(* rel_expr -> bool *)
 fun is_none_product (Product (r1, r2)) =
     is_none_product r1 orelse is_none_product r2
   | is_none_product None = true
   | is_none_product _ = false
 
-(* rel_expr -> bool *)
 fun is_one_rel_expr (Atom _) = true
   | is_one_rel_expr (AtomSeq (1, _)) = true
   | is_one_rel_expr (Var _) = true
   | is_one_rel_expr _ = false
 
-(* rel_expr -> bool *)
 fun inline_rel_expr (Product (r1, r2)) =
     inline_rel_expr r1 andalso inline_rel_expr r2
   | inline_rel_expr Iden = true
@@ -172,7 +162,6 @@
   | inline_rel_expr (RelReg _) = true
   | inline_rel_expr _ = false
 
-(* rel_expr -> rel_expr -> bool option *)
 fun rel_expr_equal None (Atom _) = SOME false
   | rel_expr_equal None (AtomSeq (k, _)) = SOME (k = 0)
   | rel_expr_equal (Atom _) None = SOME false
@@ -183,7 +172,6 @@
   | rel_expr_equal (AtomSeq x1) (AtomSeq x2) = SOME (x1 = x2)
   | rel_expr_equal r1 r2 = if r1 = r2 then SOME true else NONE
 
-(* rel_expr -> rel_expr -> bool option *)
 fun rel_expr_intersects (Atom j1) (Atom j2) = SOME (j1 = j2)
   | rel_expr_intersects (Atom j) (AtomSeq (k, j0)) = SOME (j < j0 + k)
   | rel_expr_intersects (AtomSeq (k, j0)) (Atom j) = SOME (j < j0 + k)
@@ -192,84 +180,71 @@
   | rel_expr_intersects r1 r2 =
     if is_none_product r1 orelse is_none_product r2 then SOME false else NONE
 
-(* int -> rel_expr *)
 fun empty_n_ary_rel 0 = raise ARG ("Nitpick_Peephole.empty_n_ary_rel", "0")
   | empty_n_ary_rel n = funpow (n - 1) (curry Product None) None
 
-(* decl -> rel_expr *)
 fun decl_one_set (DeclOne (_, r)) = r
   | decl_one_set _ =
     raise ARG ("Nitpick_Peephole.decl_one_set", "not \"DeclOne\"")
 
-(* int_expr -> bool *)
 fun is_Num (Num _) = true
   | is_Num _ = false
-(* int_expr -> int *)
 fun dest_Num (Num k) = k
   | dest_Num _ = raise ARG ("Nitpick_Peephole.dest_Num", "not \"Num\"")
-(* int -> int -> int_expr list *)
 fun num_seq j0 n = map Num (index_seq j0 n)
 
-(* rel_expr -> rel_expr -> bool *)
 fun occurs_in_union r (Union (r1, r2)) =
     occurs_in_union r r1 orelse occurs_in_union r r2
   | occurs_in_union r r' = (r = r')
 
-(* rel_expr -> rel_expr -> rel_expr *)
 fun s_and True f2 = f2
   | s_and False _ = False
   | s_and f1 True = f1
   | s_and _ False = False
   | s_and f1 f2 = And (f1, f2)
 
-type kodkod_constrs = {
-  kk_all: decl list -> formula -> formula,
-  kk_exist: decl list -> formula -> formula,
-  kk_formula_let: expr_assign list -> formula -> formula,
-  kk_formula_if: formula -> formula -> formula -> formula,
-  kk_or: formula -> formula -> formula,
-  kk_not: formula -> formula,
-  kk_iff: formula -> formula -> formula,
-  kk_implies: formula -> formula -> formula,
-  kk_and: formula -> formula -> formula,
-  kk_subset: rel_expr -> rel_expr -> formula,
-  kk_rel_eq: rel_expr -> rel_expr -> formula,
-  kk_no: rel_expr -> formula,
-  kk_lone: rel_expr -> formula,
-  kk_one: rel_expr -> formula,
-  kk_some: rel_expr -> formula,
-  kk_rel_let: expr_assign list -> rel_expr -> rel_expr,
-  kk_rel_if: formula -> rel_expr -> rel_expr -> rel_expr,
-  kk_union: rel_expr -> rel_expr -> rel_expr,
-  kk_difference: rel_expr -> rel_expr -> rel_expr,
-  kk_override: rel_expr -> rel_expr -> rel_expr,
-  kk_intersect: rel_expr -> rel_expr -> rel_expr,
-  kk_product: rel_expr -> rel_expr -> rel_expr,
-  kk_join: rel_expr -> rel_expr -> rel_expr,
-  kk_closure: rel_expr -> rel_expr,
-  kk_reflexive_closure: rel_expr -> rel_expr,
-  kk_comprehension: decl list -> formula -> rel_expr,
-  kk_project: rel_expr -> int_expr list -> rel_expr,
-  kk_project_seq: rel_expr -> int -> int -> rel_expr,
-  kk_not3: rel_expr -> rel_expr,
-  kk_nat_less: rel_expr -> rel_expr -> rel_expr,
-  kk_int_less: rel_expr -> rel_expr -> rel_expr
-}
+type kodkod_constrs =
+  {kk_all: decl list -> formula -> formula,
+   kk_exist: decl list -> formula -> formula,
+   kk_formula_let: expr_assign list -> formula -> formula,
+   kk_formula_if: formula -> formula -> formula -> formula,
+   kk_or: formula -> formula -> formula,
+   kk_not: formula -> formula,
+   kk_iff: formula -> formula -> formula,
+   kk_implies: formula -> formula -> formula,
+   kk_and: formula -> formula -> formula,
+   kk_subset: rel_expr -> rel_expr -> formula,
+   kk_rel_eq: rel_expr -> rel_expr -> formula,
+   kk_no: rel_expr -> formula,
+   kk_lone: rel_expr -> formula,
+   kk_one: rel_expr -> formula,
+   kk_some: rel_expr -> formula,
+   kk_rel_let: expr_assign list -> rel_expr -> rel_expr,
+   kk_rel_if: formula -> rel_expr -> rel_expr -> rel_expr,
+   kk_union: rel_expr -> rel_expr -> rel_expr,
+   kk_difference: rel_expr -> rel_expr -> rel_expr,
+   kk_override: rel_expr -> rel_expr -> rel_expr,
+   kk_intersect: rel_expr -> rel_expr -> rel_expr,
+   kk_product: rel_expr -> rel_expr -> rel_expr,
+   kk_join: rel_expr -> rel_expr -> rel_expr,
+   kk_closure: rel_expr -> rel_expr,
+   kk_reflexive_closure: rel_expr -> rel_expr,
+   kk_comprehension: decl list -> formula -> rel_expr,
+   kk_project: rel_expr -> int_expr list -> rel_expr,
+   kk_project_seq: rel_expr -> int -> int -> rel_expr,
+   kk_not3: rel_expr -> rel_expr,
+   kk_nat_less: rel_expr -> rel_expr -> rel_expr,
+   kk_int_less: rel_expr -> rel_expr -> rel_expr}
 
 (* We assume throughout that Kodkod variables have a "one" constraint. This is
    always the case if Kodkod's skolemization is disabled. *)
-(* bool -> int -> int -> int -> kodkod_constrs *)
 fun kodkod_constrs optim nat_card int_card main_j0 =
   let
-    (* bool -> int *)
     val from_bool = atom_for_bool main_j0
-    (* int -> rel_expr *)
     fun from_nat n = Atom (n + main_j0)
-    (* int -> int *)
     fun to_nat j = j - main_j0
     val to_int = int_for_atom (int_card, main_j0)
 
-    (* decl list -> formula -> formula *)
     fun s_all _ True = True
       | s_all _ False = False
       | s_all [] f = f
@@ -281,12 +256,10 @@
       | s_exist ds (Exist (ds', f)) = Exist (ds @ ds', f)
       | s_exist ds f = Exist (ds, f)
 
-    (* expr_assign list -> formula -> formula *)
     fun s_formula_let _ True = True
       | s_formula_let _ False = False
       | s_formula_let assigns f = FormulaLet (assigns, f)
 
-    (* formula -> formula *)
     fun s_not True = False
       | s_not False = True
       | s_not (All (ds, f)) = Exist (ds, s_not f)
@@ -299,7 +272,6 @@
       | s_not (Some r) = No r
       | s_not f = Not f
 
-    (* formula -> formula -> formula *)
     fun s_or True _ = True
       | s_or False f2 = f2
       | s_or _ True = True
@@ -316,7 +288,6 @@
       | s_implies f1 False = s_not f1
       | s_implies f1 f2 = if f1 = f2 then True else Implies (f1, f2)
 
-    (* formula -> formula -> formula -> formula *)
     fun s_formula_if True f2 _ = f2
       | s_formula_if False _ f3 = f3
       | s_formula_if f1 True f3 = s_or f1 f3
@@ -325,7 +296,6 @@
       | s_formula_if f1 f2 False = s_and f1 f2
       | s_formula_if f f1 f2 = FormulaIf (f, f1, f2)
 
-    (* rel_expr -> int_expr list -> rel_expr *)
     fun s_project r is =
       (case r of
          Project (r1, is') =>
@@ -340,7 +310,6 @@
                else Project (r, is)
              end
 
-    (* (rel_expr -> formula) -> rel_expr -> formula *)
     fun s_xone xone r =
       if is_one_rel_expr r then
         True
@@ -348,7 +317,6 @@
         1 => xone r
       | arity => foldl1 And (map (xone o s_project r o single o Num)
                                  (index_seq 0 arity))
-    (* rel_expr -> formula *)
     fun s_no None = True
       | s_no (Product (r1, r2)) = s_or (s_no r1) (s_no r2)
       | s_no (Intersect (Closure (Rel x), Iden)) = Acyclic x
@@ -362,13 +330,11 @@
       | s_some (Product (r1, r2)) = s_and (s_some r1) (s_some r2)
       | s_some r = if is_one_rel_expr r then True else Some r
 
-    (* rel_expr -> rel_expr *)
     fun s_not3 (Atom j) = Atom (if j = main_j0 then j + 1 else j - 1)
       | s_not3 (r as Join (r1, r2)) =
         if r2 = Rel not3_rel then r1 else Join (r, Rel not3_rel)
       | s_not3 r = Join (r, Rel not3_rel)
 
-    (* rel_expr -> rel_expr -> formula *)
     fun s_rel_eq r1 r2 =
       (case (r1, r2) of
          (Join (r11, Rel x), _) =>
@@ -427,12 +393,10 @@
         else if forall is_one_rel_expr [r1, r2] then s_rel_eq r1 r2
         else Subset (r1, r2)
 
-    (* expr_assign list -> rel_expr -> rel_expr *)
     fun s_rel_let [b as AssignRelReg (x', r')] (r as RelReg x) =
         if x = x' then r' else RelLet ([b], r)
       | s_rel_let bs r = RelLet (bs, r)
 
-    (* formula -> rel_expr -> rel_expr -> rel_expr *)
     fun s_rel_if f r1 r2 =
       (case (f, r1, r2) of
          (True, _, _) => r1
@@ -443,7 +407,6 @@
        | _ => raise SAME ())
       handle SAME () => if r1 = r2 then r1 else RelIf (f, r1, r2)
 
-    (* rel_expr -> rel_expr -> rel_expr *)
     fun s_union r1 (Union (r21, r22)) = s_union (s_union r1 r21) r22
       | s_union r1 r2 =
         if is_none_product r1 then r2
@@ -561,14 +524,12 @@
          handle SAME () => List.foldr Join r22 [r1, r21])
       | s_join r1 r2 = Join (r1, r2)
 
-    (* rel_expr -> rel_expr *)
     fun s_closure Iden = Iden
       | s_closure r = if is_none_product r then r else Closure r
     fun s_reflexive_closure Iden = Iden
       | s_reflexive_closure r =
         if is_none_product r then Iden else ReflexiveClosure r
 
-    (* decl list -> formula -> rel_expr *)
     fun s_comprehension ds False = empty_n_ary_rel (length ds)
       | s_comprehension ds True = fold1 s_product (map decl_one_set ds)
       | s_comprehension [d as DeclOne ((1, j1), r)]
@@ -579,10 +540,8 @@
           Comprehension ([d], f)
       | s_comprehension ds f = Comprehension (ds, f)
 
-    (* rel_expr -> int -> int -> rel_expr *)
     fun s_project_seq r =
       let
-        (* int -> rel_expr -> int -> int -> rel_expr *)
         fun aux arity r j0 n =
           if j0 = 0 andalso arity = n then
             r
@@ -595,7 +554,6 @@
               val arity1 = arity - arity2
               val n1 = Int.min (nat_minus arity1 j0, n)
               val n2 = n - n1
-              (* unit -> rel_expr *)
               fun one () = aux arity1 r1 j0 n1
               fun two () = aux arity2 r2 (nat_minus j0 arity1) n2
             in
@@ -607,17 +565,13 @@
           | _ => s_project r (num_seq j0 n)
       in aux (arity_of_rel_expr r) r end
 
-    (* rel_expr -> rel_expr -> rel_expr *)
     fun s_nat_less (Atom j1) (Atom j2) = from_bool (j1 < j2)
       | s_nat_less r1 r2 = fold s_join [r1, r2] (Rel nat_less_rel)
     fun s_int_less (Atom j1) (Atom j2) = from_bool (to_int j1 < to_int j2)
       | s_int_less r1 r2 = fold s_join [r1, r2] (Rel int_less_rel)
 
-    (* rel_expr -> int -> int -> rel_expr *)
     fun d_project_seq r j0 n = Project (r, num_seq j0 n)
-    (* rel_expr -> rel_expr *)
     fun d_not3 r = Join (r, Rel not3_rel)
-    (* rel_expr -> rel_expr -> rel_expr *)
     fun d_nat_less r1 r2 = List.foldl Join (Rel nat_less_rel) [r1, r2]
     fun d_int_less r1 r2 = List.foldl Join (Rel int_less_rel) [r1, r2]
   in
--- a/src/HOL/Tools/Nitpick/nitpick_preproc.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_preproc.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -21,7 +21,6 @@
 open Nitpick_HOL
 open Nitpick_Mono
 
-(* polarity -> string -> bool *)
 fun is_positive_existential polar quant_s =
   (polar = Pos andalso quant_s = @{const_name Ex}) orelse
   (polar = Neg andalso quant_s <> @{const_name Ex})
@@ -33,10 +32,8 @@
    binary coding. *)
 val binary_int_threshold = 3
 
-(* bool -> term -> bool *)
 val may_use_binary_ints =
   let
-    (* bool -> term -> bool *)
     fun aux def (Const (@{const_name "=="}, _) $ t1 $ t2) =
         aux def t1 andalso aux false t2
       | aux def (@{const "==>"} $ t1 $ t2) = aux false t1 andalso aux def t2
@@ -52,10 +49,8 @@
       | aux def (Abs (_, _, t')) = aux def t'
       | aux _ _ = true
   in aux end
-(* term -> bool *)
 val should_use_binary_ints =
   let
-    (* term -> bool *)
     fun aux (t1 $ t2) = aux t1 orelse aux t2
       | aux (Const (s, T)) =
         ((s = @{const_name times} orelse s = @{const_name div}) andalso
@@ -70,10 +65,8 @@
 
 (** Uncurrying **)
 
-(* theory -> term -> int Termtab.tab -> int Termtab.tab *)
 fun add_to_uncurry_table thy t =
   let
-    (* term -> term list -> int Termtab.tab -> int Termtab.tab *)
     fun aux (t1 $ t2) args table =
         let val table = aux t2 [] table in aux t1 (t2 :: args) table end
       | aux (Abs (_, _, t')) _ table = aux t' [] table
@@ -83,18 +76,15 @@
            is_sel s orelse s = @{const_name Sigma} then
           table
         else
-          Termtab.map_default (t, 65536) (curry Int.min (length args)) table
+          Termtab.map_default (t, 65536) (Integer.min (length args)) table
       | aux _ _ table = table
   in aux t [] end
 
-(* int -> int -> string *)
 fun uncurry_prefix_for k j =
   uncurry_prefix ^ string_of_int k ^ "@" ^ string_of_int j ^ name_sep
 
-(* int Termtab.tab term -> term *)
 fun uncurry_term table t =
   let
-    (* term -> term list -> term *)
     fun aux (t1 $ t2) args = aux t1 (aux t2 [] :: args)
       | aux (Abs (s, T, t')) args = betapplys (Abs (s, T, aux t' []), args)
       | aux (t as Const (s, T)) args =
@@ -131,17 +121,14 @@
 
 (** Boxing **)
 
-(* hol_context -> bool -> term -> term *)
 fun box_fun_and_pair_in_term (hol_ctxt as {thy, stds, fast_descrs, ...}) def
                              orig_t =
   let
-    (* typ -> typ *)
     fun box_relational_operator_type (Type (@{type_name fun}, Ts)) =
         Type (@{type_name fun}, map box_relational_operator_type Ts)
       | box_relational_operator_type (Type (@{type_name "*"}, Ts)) =
         Type (@{type_name "*"}, map (box_type hol_ctxt InPair) Ts)
       | box_relational_operator_type T = T
-    (* indexname * typ -> typ * term -> typ option list -> typ option list *)
     fun add_boxed_types_for_var (z as (_, T)) (T', t') =
       case t' of
         Var z' => z' = z ? insert (op =) T'
@@ -152,7 +139,6 @@
          | _ => raise TYPE ("Nitpick_Preproc.box_fun_and_pair_in_term.\
                             \add_boxed_types_for_var", [T'], []))
       | _ => exists_subterm (curry (op =) (Var z)) t' ? insert (op =) T
-    (* typ list -> typ list -> term -> indexname * typ -> typ *)
     fun box_var_in_def new_Ts old_Ts t (z as (_, T)) =
       case t of
         @{const Trueprop} $ t1 => box_var_in_def new_Ts old_Ts t1 z
@@ -170,8 +156,6 @@
         else
           T
       | _ => T
-    (* typ list -> typ list -> polarity -> string -> typ -> string -> typ
-       -> term -> term *)
     and do_quantifier new_Ts old_Ts polar quant_s quant_T abs_s abs_T t =
       let
         val abs_T' =
@@ -185,7 +169,6 @@
         $ Abs (abs_s, abs_T',
                t |> do_term (abs_T' :: new_Ts) (abs_T :: old_Ts) polar)
       end
-    (* typ list -> typ list -> string -> typ -> term -> term -> term *)
     and do_equals new_Ts old_Ts s0 T0 t1 t2 =
       let
         val (t1, t2) = pairself (do_term new_Ts old_Ts Neut) (t1, t2)
@@ -195,12 +178,10 @@
         list_comb (Const (s0, T --> T --> body_type T0),
                    map2 (coerce_term hol_ctxt new_Ts T) [T1, T2] [t1, t2])
       end
-    (* string -> typ -> term *)
     and do_description_operator s T =
       let val T1 = box_type hol_ctxt InFunLHS (range_type T) in
         Const (s, (T1 --> bool_T) --> T1)
       end
-    (* typ list -> typ list -> polarity -> term -> term *)
     and do_term new_Ts old_Ts polar t =
       case t of
         Const (s0 as @{const_name all}, T0) $ Abs (s1, T1, t1) =>
@@ -302,21 +283,16 @@
 
 val val_var_prefix = nitpick_prefix ^ "v"
 
-(* typ list -> int -> int -> int -> term -> term *)
 fun fresh_value_var Ts k n j t =
   Var ((val_var_prefix ^ nat_subscript (n - j), k), fastype_of1 (Ts, t))
 
-(* typ list -> term -> bool *)
 fun has_heavy_bounds_or_vars Ts t =
   let
-    (* typ list -> bool *)
     fun aux [] = false
       | aux [T] = is_fun_type T orelse is_pair_type T
       | aux _ = true
   in aux (map snd (Term.add_vars t []) @ map (nth Ts) (loose_bnos t)) end
 
-(* hol_context -> typ list -> bool -> int -> int -> term -> term list
-   -> term list -> term * term list *)
 fun pull_out_constr_comb ({thy, stds, ...} : hol_context) Ts relax k level t
                          args seen =
   let val t_comb = list_comb (t, args) in
@@ -336,18 +312,15 @@
     | _ => (t_comb, seen)
   end
 
-(* (term -> term) -> typ list -> int -> term list -> term list *)
 fun equations_for_pulled_out_constrs mk_eq Ts k seen =
   let val n = length seen in
     map2 (fn j => fn t => mk_eq (fresh_value_var Ts k n j t, t))
          (index_seq 0 n) seen
   end
 
-(* hol_context -> bool -> term -> term *)
 fun pull_out_universal_constrs hol_ctxt def t =
   let
     val k = maxidx_of_term t + 1
-    (* typ list -> bool -> term -> term list -> term list -> term * term list *)
     fun do_term Ts def t args seen =
       case t of
         (t0 as Const (@{const_name "=="}, _)) $ t1 $ t2 =>
@@ -367,8 +340,6 @@
           do_term Ts def t1 (t2 :: args) seen
         end
       | _ => pull_out_constr_comb hol_ctxt Ts def k 0 t args seen
-    (* typ list -> bool -> bool -> term -> term -> term -> term list
-       -> term * term list *)
     and do_eq_or_imp Ts eq def t0 t1 t2 seen =
       let
         val (t2, seen) = if eq andalso def then (t2, seen)
@@ -381,22 +352,18 @@
                                                          seen, concl)
   end
 
-(* term -> term -> term *)
 fun mk_exists v t =
   HOLogic.exists_const (fastype_of v) $ lambda v (incr_boundvars 1 t)
 
-(* hol_context -> term -> term *)
 fun pull_out_existential_constrs hol_ctxt t =
   let
     val k = maxidx_of_term t + 1
-    (* typ list -> int -> term -> term list -> term list -> term * term list *)
     fun aux Ts num_exists t args seen =
       case t of
         (t0 as Const (@{const_name Ex}, _)) $ Abs (s1, T1, t1) =>
         let
           val (t1, seen') = aux (T1 :: Ts) (num_exists + 1) t1 [] []
           val n = length seen'
-          (* unit -> term list *)
           fun vars () = map2 (fresh_value_var Ts k n) (index_seq 0 n) seen'
         in
           (equations_for_pulled_out_constrs HOLogic.mk_eq Ts k seen'
@@ -421,7 +388,6 @@
 val let_var_prefix = nitpick_prefix ^ "l"
 val let_inline_threshold = 32
 
-(* int -> typ -> term -> (term -> term) -> term *)
 fun hol_let n abs_T body_T f t =
   if n * size_of_term t <= let_inline_threshold then
     f t
@@ -431,14 +397,11 @@
       $ t $ abs_var z (incr_boundvars 1 (f (Var z)))
     end
 
-(* hol_context -> bool -> term -> term *)
 fun destroy_pulled_out_constrs (hol_ctxt as {thy, stds, ...}) axiom t =
   let
-    (* styp -> int *)
     val num_occs_of_var =
       fold_aterms (fn Var z => (fn f => fn z' => f z' |> z = z' ? Integer.add 1)
                     | _ => I) t (K 0)
-    (* bool -> term -> term *)
     fun aux careful ((t0 as Const (@{const_name "=="}, _)) $ t1 $ t2) =
         aux_eq careful true t0 t1 t2
       | aux careful ((t0 as @{const "==>"}) $ t1 $ t2) =
@@ -450,7 +413,6 @@
       | aux careful (Abs (s, T, t')) = Abs (s, T, aux careful t')
       | aux careful (t1 $ t2) = aux careful t1 $ aux careful t2
       | aux _ t = t
-    (* bool -> bool -> term -> term -> term -> term *)
     and aux_eq careful pass1 t0 t1 t2 =
       ((if careful then
           raise SAME ()
@@ -485,7 +447,6 @@
        |> body_type (type_of t0) = prop_T ? HOLogic.mk_Trueprop)
       handle SAME () => if pass1 then aux_eq careful false t0 t2 t1
                         else t0 $ aux false t2 $ aux false t1
-    (* styp -> term -> int -> typ -> term -> term *)
     and sel_eq x t n nth_T nth_t =
       HOLogic.eq_const nth_T $ nth_t
                              $ select_nth_constr_arg thy stds x t n nth_T
@@ -494,7 +455,6 @@
 
 (** Destruction of universal and existential equalities **)
 
-(* term -> term *)
 fun curry_assms (@{const "==>"} $ (@{const Trueprop}
                                    $ (@{const "op &"} $ t1 $ t2)) $ t3) =
     curry_assms (Logic.list_implies ([t1, t2] |> map HOLogic.mk_Trueprop, t3))
@@ -502,15 +462,12 @@
     @{const "==>"} $ curry_assms t1 $ curry_assms t2
   | curry_assms t = t
 
-(* term -> term *)
 val destroy_universal_equalities =
   let
-    (* term list -> (indexname * typ) list -> term -> term *)
     fun aux prems zs t =
       case t of
         @{const "==>"} $ t1 $ t2 => aux_implies prems zs t1 t2
       | _ => Logic.list_implies (rev prems, t)
-    (* term list -> (indexname * typ) list -> term -> term -> term *)
     and aux_implies prems zs t1 t2 =
       case t1 of
         Const (@{const_name "=="}, _) $ Var z $ t' => aux_eq prems zs z t' t1 t2
@@ -519,8 +476,6 @@
       | @{const Trueprop} $ (Const (@{const_name "op ="}, _) $ t' $ Var z) =>
         aux_eq prems zs z t' t1 t2
       | _ => aux (t1 :: prems) (Term.add_vars t1 zs) t2
-    (* term list -> (indexname * typ) list -> indexname * typ -> term -> term
-       -> term -> term *)
     and aux_eq prems zs z t' t1 t2 =
       if not (member (op =) zs z) andalso
          not (exists_subterm (curry (op =) (Var z)) t') then
@@ -529,15 +484,11 @@
         aux (t1 :: prems) (Term.add_vars t1 zs) t2
   in aux [] [] end
 
-(* theory -> (typ option * bool) list -> int -> term list -> term list
-   -> (term * term list) option *)
 fun find_bound_assign thy stds j =
   let
-    (* term list -> term list -> (term * term list) option *)
     fun do_term _ [] = NONE
       | do_term seen (t :: ts) =
         let
-          (* bool -> term -> term -> (term * term list) option *)
           fun do_eq pass1 t1 t2 =
             (if loose_bvar1 (t2, j) then
                if pass1 then do_eq false t2 t1 else raise SAME ()
@@ -559,10 +510,8 @@
         end
   in do_term end
 
-(* int -> term -> term -> term *)
 fun subst_one_bound j arg t =
   let
-    (* term * int -> term *)
     fun aux (Bound i, lev) =
         if i < lev then raise SAME ()
         else if i = lev then incr_boundvars (lev - j) arg
@@ -574,10 +523,8 @@
       | aux _ = raise SAME ()
   in aux (t, j) handle SAME () => t end
 
-(* hol_context -> term -> term *)
 fun destroy_existential_equalities ({thy, stds, ...} : hol_context) =
   let
-    (* string list -> typ list -> term list -> term *)
     fun kill [] [] ts = foldr1 s_conj ts
       | kill (s :: ss) (T :: Ts) ts =
         (case find_bound_assign thy stds (length ss) [] ts of
@@ -589,7 +536,6 @@
            Const (@{const_name Ex}, (T --> bool_T) --> bool_T)
            $ Abs (s, T, kill ss Ts ts))
       | kill _ _ _ = raise UnequalLengths
-    (* string list -> typ list -> term -> term *)
     fun gather ss Ts (Const (@{const_name Ex}, _) $ Abs (s1, T1, t1)) =
         gather (ss @ [s1]) (Ts @ [T1]) t1
       | gather [] [] (Abs (s, T, t1)) = Abs (s, T, gather [] [] t1)
@@ -600,20 +546,15 @@
 
 (** Skolemization **)
 
-(* int -> int -> string *)
 fun skolem_prefix_for k j =
   skolem_prefix ^ string_of_int k ^ "@" ^ string_of_int j ^ name_sep
 
-(* hol_context -> int -> term -> term *)
 fun skolemize_term_and_more (hol_ctxt as {thy, def_table, skolems, ...})
                             skolem_depth =
   let
-    (* int list -> int list *)
     val incrs = map (Integer.add 1)
-    (* string list -> typ list -> int list -> int -> polarity -> term -> term *)
     fun aux ss Ts js depth polar t =
       let
-        (* string -> typ -> string -> typ -> term -> term *)
         fun do_quantifier quant_s quant_T abs_s abs_T t =
           if not (loose_bvar1 (t, 0)) then
             aux ss Ts js depth polar (incr_boundvars ~1 t)
@@ -679,7 +620,6 @@
                 else
                   (ubfp_prefix, @{const "op &"},
                    @{const_name semilattice_inf_class.inf})
-              (* unit -> term *)
               fun pos () = unrolled_inductive_pred_const hol_ctxt gfp x
                            |> aux ss Ts js depth polar
               fun neg () = Const (pref ^ s, T)
@@ -693,7 +633,6 @@
                      val ((trunk_arg_Ts, rump_arg_T), body_T) =
                        T |> strip_type |>> split_last
                      val set_T = rump_arg_T --> body_T
-                     (* (unit -> term) -> term *)
                      fun app f =
                        list_comb (f (),
                                   map Bound (length trunk_arg_Ts - 1 downto 0))
@@ -717,21 +656,18 @@
 
 (** Function specialization **)
 
-(* term -> term list *)
 fun params_in_equation (@{const "==>"} $ _ $ t2) = params_in_equation t2
   | params_in_equation (@{const Trueprop} $ t1) = params_in_equation t1
   | params_in_equation (Const (@{const_name "op ="}, _) $ t1 $ _) =
     snd (strip_comb t1)
   | params_in_equation _ = []
 
-(* styp -> styp -> int list -> term list -> term list -> term -> term *)
 fun specialize_fun_axiom x x' fixed_js fixed_args extra_args t =
   let
     val k = fold Integer.max (map maxidx_of_term (fixed_args @ extra_args)) 0
             + 1
     val t = map_aterms (fn Var ((s, i), T) => Var ((s, k + i), T) | t' => t') t
     val fixed_params = filter_indices fixed_js (params_in_equation t)
-    (* term list -> term -> term *)
     fun aux args (Abs (s, T, t)) = list_comb (Abs (s, T, aux [] t), args)
       | aux args (t1 $ t2) = aux (aux [] t2 :: args) t1
       | aux args t =
@@ -743,10 +679,8 @@
           end
   in aux [] t end
 
-(* hol_context -> styp -> (int * term option) list *)
 fun static_args_in_term ({ersatz_table, ...} : hol_context) x t =
   let
-    (* term -> term list -> term list -> term list list *)
     fun fun_calls (Abs (_, _, t)) _ = fun_calls t []
       | fun_calls (t1 $ t2) args = fun_calls t2 [] #> fun_calls t1 (t2 :: args)
       | fun_calls t args =
@@ -756,7 +690,6 @@
                             SOME s'' => x = (s'', T')
                           | NONE => false)
          | _ => false) ? cons args
-    (* term list list -> term list list -> term list -> term list list *)
     fun call_sets [] [] vs = [vs]
       | call_sets [] uss vs = vs :: call_sets uss [] []
       | call_sets ([] :: _) _ _ = []
@@ -773,12 +706,10 @@
                  | [t as Free _] => cons (j, SOME t)
                  | _ => I) indexed_sets []
   end
-(* hol_context -> styp -> term list -> (int * term option) list *)
 fun static_args_in_terms hol_ctxt x =
   map (static_args_in_term hol_ctxt x)
   #> fold1 (OrdList.inter (prod_ord int_ord (option_ord Term_Ord.term_ord)))
 
-(* (int * term option) list -> (int * term) list -> int list *)
 fun overlapping_indices [] _ = []
   | overlapping_indices _ [] = []
   | overlapping_indices (ps1 as (j1, t1) :: ps1') (ps2 as (j2, t2) :: ps2') =
@@ -786,7 +717,6 @@
     else if j1 > j2 then overlapping_indices ps1 ps2'
     else overlapping_indices ps1' ps2' |> the_default t2 t1 = t2 ? cons j1
 
-(* typ list -> term -> bool *)
 fun is_eligible_arg Ts t =
   let val bad_Ts = map snd (Term.add_vars t []) @ map (nth Ts) (loose_bnos t) in
     null bad_Ts orelse
@@ -794,7 +724,6 @@
      forall (not o is_higher_order_type) bad_Ts)
   end
 
-(* int -> string *)
 fun special_prefix_for j = special_prefix ^ string_of_int j ^ name_sep
 
 (* If a constant's definition is picked up deeper than this threshold, we
@@ -803,7 +732,6 @@
 
 val bound_var_prefix = "b"
 
-(* hol_context -> int -> term -> term *)
 fun specialize_consts_in_term (hol_ctxt as {specialize, simp_table,
                                             special_funs, ...}) depth t =
   if not specialize orelse depth > special_max_depth then
@@ -812,7 +740,6 @@
     let
       val blacklist = if depth = 0 then []
                       else case term_under_def t of Const x => [x] | _ => []
-      (* term list -> typ list -> term -> term *)
       fun aux args Ts (Const (x as (s, T))) =
           ((if not (member (op =) blacklist x) andalso not (null args) andalso
                not (String.isPrefix special_prefix s) andalso
@@ -836,7 +763,6 @@
                 val extra_args = map Var vars @ map Bound bound_js @ live_args
                 val extra_Ts = map snd vars @ filter_indices bound_js Ts
                 val k = maxidx_of_term t + 1
-                (* int -> term *)
                 fun var_for_bound_no j =
                   Var ((bound_var_prefix ^
                         nat_subscript (find_index (curry (op =) j) bound_js
@@ -880,7 +806,6 @@
 
 val cong_var_prefix = "c"
 
-(* typ -> special_triple -> special_triple -> term *)
 fun special_congruence_axiom T (js1, ts1, x1) (js2, ts2, x2) =
   let
     val (bounds1, bounds2) = pairself (map Var o special_bounds) (ts1, ts2)
@@ -905,7 +830,6 @@
     |> close_form (* TODO: needed? *)
   end
 
-(* hol_context -> styp list -> term list *)
 fun special_congruence_axioms (hol_ctxt as {special_funs, ...}) xs =
   let
     val groups =
@@ -914,14 +838,10 @@
       |> AList.group (op =)
       |> filter_out (is_equational_fun_surely_complete hol_ctxt o fst)
       |> map (fn (x, zs) => (x, zs |> member (op =) xs x ? cons ([], [], x)))
-    (* special_triple -> int *)
     fun generality (js, _, _) = ~(length js)
-    (* special_triple -> special_triple -> bool *)
     fun is_more_specific (j1, t1, x1) (j2, t2, x2) =
       x1 <> x2 andalso OrdList.subset (prod_ord int_ord Term_Ord.term_ord)
                                       (j2 ~~ t2, j1 ~~ t1)
-    (* typ -> special_triple list -> special_triple list -> special_triple list
-       -> term list -> term list *)
     fun do_pass_1 _ [] [_] [_] = I
       | do_pass_1 T skipped _ [] = do_pass_2 T skipped
       | do_pass_1 T skipped all (z :: zs) =
@@ -930,7 +850,6 @@
           [] => do_pass_1 T (z :: skipped) all zs
         | (z' :: _) => cons (special_congruence_axiom T z z')
                        #> do_pass_1 T skipped all zs
-    (* typ -> special_triple list -> term list -> term list *)
     and do_pass_2 _ [] = I
       | do_pass_2 T (z :: zs) =
         fold (cons o special_congruence_axiom T z) zs #> do_pass_2 T zs
@@ -938,32 +857,23 @@
 
 (** Axiom selection **)
 
-(* 'a Symtab.table -> 'a list *)
 fun all_table_entries table = Symtab.fold (append o snd) table []
-(* const_table -> string -> const_table *)
 fun extra_table table s = Symtab.make [(s, all_table_entries table)]
 
-(* int -> term -> term *)
 fun eval_axiom_for_term j t =
   Logic.mk_equals (Const (eval_prefix ^ string_of_int j, fastype_of t), t)
 
-(* term -> bool *)
 val is_trivial_equation = the_default false o try (op aconv o Logic.dest_equals)
 
 (* Prevents divergence in case of cyclic or infinite axiom dependencies. *)
 val axioms_max_depth = 255
 
-(* hol_context -> term -> term list * term list * bool * bool *)
 fun axioms_for_term
         (hol_ctxt as {thy, ctxt, max_bisim_depth, stds, user_axioms,
                       fast_descrs, evals, def_table, nondef_table,
                       choice_spec_table, user_nondefs, ...}) t =
   let
     type accumulator = styp list * (term list * term list)
-    (* (term list * term list -> term list)
-       -> ((term list -> term list) -> term list * term list
-           -> term list * term list)
-       -> int -> term -> accumulator -> accumulator *)
     fun add_axiom get app depth t (accum as (xs, axs)) =
       let
         val t = t |> unfold_defs_in_term hol_ctxt
@@ -977,7 +887,6 @@
             else add_axioms_for_term (depth + 1) t' (xs, app (cons t') axs)
           end
       end
-    (* int -> term -> accumulator -> accumulator *)
     and add_def_axiom depth = add_axiom fst apfst depth
     and add_nondef_axiom depth = add_axiom snd apsnd depth
     and add_maybe_def_axiom depth t =
@@ -986,7 +895,6 @@
     and add_eq_axiom depth t =
       (if is_constr_pattern_formula thy t then add_def_axiom
        else add_nondef_axiom) depth t
-    (* int -> term -> accumulator -> accumulator *)
     and add_axioms_for_term depth t (accum as (xs, axs)) =
       case t of
         t1 $ t2 => accum |> fold (add_axioms_for_term depth) [t1, t2]
@@ -1058,7 +966,6 @@
       | Bound _ => accum
       | Abs (_, T, t) => accum |> add_axioms_for_term depth t
                                |> add_axioms_for_type depth T
-    (* int -> typ -> accumulator -> accumulator *)
     and add_axioms_for_type depth T =
       case T of
         Type (@{type_name fun}, Ts) => fold (add_axioms_for_type depth) Ts
@@ -1080,7 +987,6 @@
                    (codatatype_bisim_axioms hol_ctxt T)
             else
               I)
-    (* int -> typ -> sort -> accumulator -> accumulator *)
     and add_axioms_for_sort depth T S =
       let
         val supers = Sign.complete_sort thy S
@@ -1112,15 +1018,12 @@
 
 (** Simplification of constructor/selector terms **)
 
-(* theory -> term -> term *)
 fun simplify_constrs_and_sels thy t =
   let
-    (* term -> int -> term *)
     fun is_nth_sel_on t' n (Const (s, _) $ t) =
         (t = t' andalso is_sel_like_and_no_discr s andalso
          sel_no_from_name s = n)
       | is_nth_sel_on _ _ _ = false
-    (* term -> term list -> term *)
     fun do_term (Const (@{const_name Rep_Frac}, _)
                  $ (Const (@{const_name Abs_Frac}, _) $ t1)) [] = do_term t1 []
       | do_term (Const (@{const_name Abs_Frac}, _)
@@ -1160,7 +1063,6 @@
 
 (** Quantifier massaging: Distributing quantifiers **)
 
-(* term -> term *)
 fun distribute_quantifiers t =
   case t of
     (t0 as Const (@{const_name All}, T0)) $ Abs (s, T1, t1) =>
@@ -1199,7 +1101,6 @@
 
 (** Quantifier massaging: Pushing quantifiers inward **)
 
-(* int -> int -> (int -> int) -> term -> term *)
 fun renumber_bounds j n f t =
   case t of
     t1 $ t2 => renumber_bounds j n f t1 $ renumber_bounds j n f t2
@@ -1214,10 +1115,8 @@
    paper). *)
 val quantifier_cluster_threshold = 7
 
-(* term -> term *)
 val push_quantifiers_inward =
   let
-    (* string -> string list -> typ list -> term -> term *)
     fun aux quant_s ss Ts t =
       (case t of
          Const (s0, _) $ Abs (s1, T1, t1 as _ $ _) =>
@@ -1237,7 +1136,6 @@
                else
                  let
                    val typical_card = 4
-                   (* ('a -> ''b list) -> 'a list -> ''b list *)
                    fun big_union proj ps =
                      fold (fold (insert (op =)) o proj) ps []
                    val (ts, connective) = strip_any_connective t
@@ -1245,11 +1143,8 @@
                      map (bounded_card_of_type 65536 typical_card []) Ts
                    val t_costs = map size_of_term ts
                    val num_Ts = length Ts
-                   (* int -> int *)
                    val flip = curry (op -) (num_Ts - 1)
                    val t_boundss = map (map flip o loose_bnos) ts
-                   (* (int list * int) list -> int list
-                      -> (int list * int) list *)
                    fun merge costly_boundss [] = costly_boundss
                      | merge costly_boundss (j :: js) =
                        let
@@ -1261,9 +1156,7 @@
                          val yeas_cost = Integer.sum (map snd yeas)
                                          * nth T_costs j
                        in merge ((yeas_bounds, yeas_cost) :: nays) js end
-                   (* (int list * int) list -> int list -> int *)
                    val cost = Integer.sum o map snd oo merge
-                   (* (int list * int) list -> int list -> int list *)
                    fun heuristically_best_permutation _ [] = []
                      | heuristically_best_permutation costly_boundss js =
                        let
@@ -1287,14 +1180,12 @@
                                      (index_seq 0 num_Ts)
                    val ts = map (renumber_bounds 0 num_Ts (nth back_js o flip))
                                 ts
-                   (* (term * int list) list -> term *)
                    fun mk_connection [] =
                        raise ARG ("Nitpick_Preproc.push_quantifiers_inward.aux.\
                                   \mk_connection", "")
                      | mk_connection ts_cum_bounds =
                        ts_cum_bounds |> map fst
                        |> foldr1 (fn (t1, t2) => connective $ t1 $ t2)
-                   (* (term * int list) list -> int list -> term *)
                    fun build ts_cum_bounds [] = ts_cum_bounds |> mk_connection
                      | build ts_cum_bounds (j :: js) =
                        let
@@ -1321,9 +1212,6 @@
 
 (** Inference of finite functions **)
 
-(* hol_context -> bool -> (typ option * bool option) list
-   -> (typ option * bool option) list -> term list * term list
-   -> term list * term list *)
 fun finitize_all_types_of_funs (hol_ctxt as {thy, ...}) binarize finitizes monos
                                (nondef_ts, def_ts) =
   let
@@ -1338,18 +1226,15 @@
 
 (** Preprocessor entry point **)
 
-(* hol_context -> (typ option * bool option) list
-   -> (typ option * bool option) list -> term
-   -> term list * term list * bool * bool * bool *)
+val max_skolem_depth = 4
+
 fun preprocess_term (hol_ctxt as {thy, stds, binary_ints, destroy_constrs,
-                                  boxes, skolemize, uncurry, ...})
-                    finitizes monos t =
+                                  boxes, ...}) finitizes monos t =
   let
-    val skolem_depth = if skolemize then 4 else ~1
     val (nondef_ts, def_ts, got_all_mono_user_axioms, no_poly_user_axioms) =
       t |> unfold_defs_in_term hol_ctxt
         |> close_form
-        |> skolemize_term_and_more hol_ctxt skolem_depth
+        |> skolemize_term_and_more hol_ctxt max_skolem_depth
         |> specialize_consts_in_term hol_ctxt 0
         |> axioms_for_term hol_ctxt
     val binarize =
@@ -1361,14 +1246,12 @@
              (binary_ints = SOME true orelse
               exists should_use_binary_ints (nondef_ts @ def_ts))
     val box = exists (not_equal (SOME false) o snd) boxes
-    val uncurry = uncurry andalso box
     val table =
       Termtab.empty
-      |> uncurry ? fold (add_to_uncurry_table thy) (nondef_ts @ def_ts)
-    (* bool -> term -> term *)
+      |> box ? fold (add_to_uncurry_table thy) (nondef_ts @ def_ts)
     fun do_rest def =
       binarize ? binarize_nat_and_int_in_term
-      #> uncurry ? uncurry_term table
+      #> box ? uncurry_term table
       #> box ? box_fun_and_pair_in_term hol_ctxt def
       #> destroy_constrs ? (pull_out_universal_constrs hol_ctxt def
                             #> pull_out_existential_constrs hol_ctxt
--- a/src/HOL/Tools/Nitpick/nitpick_rep.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_rep.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -77,18 +77,15 @@
 
 exception REP of string * rep list
 
-(* polarity -> string *)
 fun string_for_polarity Pos = "+"
   | string_for_polarity Neg = "-"
   | string_for_polarity Neut = "="
 
-(* rep -> string *)
 fun atomic_string_for_rep rep =
   let val s = string_for_rep rep in
     if String.isPrefix "[" s orelse not (is_substring_of " " s) then s
     else "(" ^ s ^ ")"
   end
-(* rep -> string *)
 and string_for_rep Any = "X"
   | string_for_rep (Formula polar) = "F" ^ string_for_polarity polar
   | string_for_rep Unit = "U"
@@ -101,7 +98,6 @@
     atomic_string_for_rep R1 ^ " => " ^ string_for_rep R2
   | string_for_rep (Opt R) = atomic_string_for_rep R ^ "?"
 
-(* rep -> bool *)
 fun is_Func (Func _) = true
   | is_Func _ = false
 fun is_Opt (Opt _) = true
@@ -110,7 +106,6 @@
   | is_opt_rep (Opt _) = true
   | is_opt_rep _ = false
 
-(* rep -> int *)
 fun card_of_rep Any = raise REP ("Nitpick_Rep.card_of_rep", [Any])
   | card_of_rep (Formula _) = 2
   | card_of_rep Unit = 1
@@ -140,7 +135,6 @@
     Int.max (min_univ_card_of_rep R1, min_univ_card_of_rep R2)
   | min_univ_card_of_rep (Opt R) = min_univ_card_of_rep R
 
-(* rep -> bool *)
 fun is_one_rep Unit = true
   | is_one_rep (Atom _) = true
   | is_one_rep (Struct _) = true
@@ -149,10 +143,8 @@
 fun is_lone_rep (Opt R) = is_one_rep R
   | is_lone_rep R = is_one_rep R
 
-(* rep -> rep * rep *)
 fun dest_Func (Func z) = z
   | dest_Func R = raise REP ("Nitpick_Rep.dest_Func", [R])
-(* int Typtab.table -> typ -> (unit -> int) -> rep -> rep *)
 fun lazy_range_rep _ _ _ Unit = Unit
   | lazy_range_rep _ _ _ (Vect (_, R)) = R
   | lazy_range_rep _ _ _ (Func (_, R2)) = R2
@@ -164,19 +156,15 @@
     Atom (ran_card (), offset_of_type ofs T2)
   | lazy_range_rep _ _ _ R = raise REP ("Nitpick_Rep.lazy_range_rep", [R])
 
-(* rep -> rep list *)
 fun binder_reps (Func (R1, R2)) = R1 :: binder_reps R2
   | binder_reps _ = []
-(* rep -> rep *)
 fun body_rep (Func (_, R2)) = body_rep R2
   | body_rep R = R
 
-(* rep -> rep *)
 fun flip_rep_polarity (Formula polar) = Formula (flip_polarity polar)
   | flip_rep_polarity (Func (R1, R2)) = Func (R1, flip_rep_polarity R2)
   | flip_rep_polarity R = R
 
-(* int Typtab.table -> rep -> rep *)
 fun one_rep _ _ Any = raise REP ("Nitpick_Rep.one_rep", [Any])
   | one_rep _ _ (Atom x) = Atom x
   | one_rep _ _ (Struct Rs) = Struct Rs
@@ -189,12 +177,10 @@
 fun opt_rep ofs (Type (@{type_name fun}, [_, T2])) (Func (R1, R2)) =
     Func (R1, opt_rep ofs T2 R2)
   | opt_rep ofs T R = Opt (optable_rep ofs T R)
-(* rep -> rep *)
 fun unopt_rep (Func (R1, R2)) = Func (R1, unopt_rep R2)
   | unopt_rep (Opt R) = R
   | unopt_rep R = R
 
-(* polarity -> polarity -> polarity *)
 fun min_polarity polar1 polar2 =
   if polar1 = polar2 then
     polar1
@@ -208,7 +194,6 @@
 
 (* It's important that Func is before Vect, because if the range is Opt we
    could lose information by converting a Func to a Vect. *)
-(* rep -> rep -> rep *)
 fun min_rep (Opt R1) (Opt R2) = Opt (min_rep R1 R2)
   | min_rep (Opt R) _ = Opt R
   | min_rep _ (Opt R) = Opt R
@@ -237,7 +222,6 @@
     else if k1 > k2 then Vect (k2, R2)
     else Vect (k1, min_rep R1 R2)
   | min_rep R1 R2 = raise REP ("Nitpick_Rep.min_rep", [R1, R2])
-(* rep list -> rep list -> rep list *)
 and min_reps [] _ = []
   | min_reps _ [] = []
   | min_reps (R1 :: Rs1) (R2 :: Rs2) =
@@ -245,7 +229,6 @@
     else if min_rep R1 R2 = R1 then R1 :: Rs1
     else R2 :: Rs2
 
-(* int -> rep -> int *)
 fun card_of_domain_from_rep ran_card R =
   case R of
     Unit => 1
@@ -255,14 +238,12 @@
   | Opt R => card_of_domain_from_rep ran_card R
   | _ => raise REP ("Nitpick_Rep.card_of_domain_from_rep", [R])
 
-(* int Typtab.table -> typ -> rep -> rep *)
 fun rep_to_binary_rel_rep ofs T R =
   let
     val k = exact_root 2 (card_of_domain_from_rep 2 R)
     val j0 = offset_of_type ofs (fst (HOLogic.dest_prodT (domain_type T)))
   in Func (Struct [Atom (k, j0), Atom (k, j0)], Formula Neut) end
 
-(* scope -> typ -> rep *)
 fun best_one_rep_for_type (scope as {card_assigns, ...} : scope)
                           (Type (@{type_name fun}, [T1, T2])) =
     (case best_one_rep_for_type scope T2 of
@@ -283,7 +264,6 @@
 
 (* Datatypes are never represented by Unit, because it would confuse
    "nfa_transitions_for_ctor". *)
-(* scope -> typ -> rep *)
 fun best_opt_set_rep_for_type scope (Type (@{type_name fun}, [T1, T2])) =
     Func (best_one_rep_for_type scope T1, best_opt_set_rep_for_type scope T2)
   | best_opt_set_rep_for_type (scope as {ofs, ...}) T =
@@ -308,7 +288,6 @@
   | best_non_opt_symmetric_reps_for_fun_type _ T =
     raise TYPE ("Nitpick_Rep.best_non_opt_symmetric_reps_for_fun_type", [T], [])
 
-(* rep -> (int * int) list *)
 fun atom_schema_of_rep Any = raise REP ("Nitpick_Rep.atom_schema_of_rep", [Any])
   | atom_schema_of_rep (Formula _) = []
   | atom_schema_of_rep Unit = []
@@ -318,10 +297,8 @@
   | atom_schema_of_rep (Func (R1, R2)) =
     atom_schema_of_rep R1 @ atom_schema_of_rep R2
   | atom_schema_of_rep (Opt R) = atom_schema_of_rep R
-(* rep list -> (int * int) list *)
 and atom_schema_of_reps Rs = maps atom_schema_of_rep Rs
 
-(* typ -> rep -> typ list *)
 fun type_schema_of_rep _ (Formula _) = []
   | type_schema_of_rep _ Unit = []
   | type_schema_of_rep T (Atom _) = [T]
@@ -333,12 +310,9 @@
     type_schema_of_rep T1 R1 @ type_schema_of_rep T2 R2
   | type_schema_of_rep T (Opt R) = type_schema_of_rep T R
   | type_schema_of_rep _ R = raise REP ("Nitpick_Rep.type_schema_of_rep", [R])
-(* typ list -> rep list -> typ list *)
 and type_schema_of_reps Ts Rs = flat (map2 type_schema_of_rep Ts Rs)
 
-(* rep -> int list list *)
 val all_combinations_for_rep = all_combinations o atom_schema_of_rep
-(* rep list -> int list list *)
 val all_combinations_for_reps = all_combinations o atom_schema_of_reps
 
 end;
--- a/src/HOL/Tools/Nitpick/nitpick_scope.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_scope.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -10,32 +10,32 @@
   type styp = Nitpick_Util.styp
   type hol_context = Nitpick_HOL.hol_context
 
-  type constr_spec = {
-    const: styp,
-    delta: int,
-    epsilon: int,
-    exclusive: bool,
-    explicit_max: int,
-    total: bool}
+  type constr_spec =
+    {const: styp,
+     delta: int,
+     epsilon: int,
+     exclusive: bool,
+     explicit_max: int,
+     total: bool}
 
-  type dtype_spec = {
-    typ: typ,
-    card: int,
-    co: bool,
-    standard: bool,
-    complete: bool * bool,
-    concrete: bool * bool,
-    deep: bool,
-    constrs: constr_spec list}
+  type dtype_spec =
+    {typ: typ,
+     card: int,
+     co: bool,
+     standard: bool,
+     complete: bool * bool,
+     concrete: bool * bool,
+     deep: bool,
+     constrs: constr_spec list}
 
-  type scope = {
-    hol_ctxt: hol_context,
-    binarize: bool,
-    card_assigns: (typ * int) list,
-    bits: int,
-    bisim_depth: int,
-    datatypes: dtype_spec list,
-    ofs: int Typtab.table}
+  type scope =
+    {hol_ctxt: hol_context,
+     binarize: bool,
+     card_assigns: (typ * int) list,
+     bits: int,
+     bisim_depth: int,
+     datatypes: dtype_spec list,
+     ofs: int Typtab.table}
 
   val datatype_spec : dtype_spec list -> typ -> dtype_spec option
   val constr_spec : dtype_spec list -> styp -> constr_spec
@@ -49,7 +49,7 @@
   val scopes_equivalent : scope * scope -> bool
   val scope_less_eq : scope -> scope -> bool
   val all_scopes :
-    hol_context -> bool -> int -> (typ option * int list) list
+    hol_context -> bool -> (typ option * int list) list
     -> (styp option * int list) list -> (styp option * int list) list
     -> int list -> int list -> typ list -> typ list -> typ list -> typ list
     -> int * scope list
@@ -61,43 +61,41 @@
 open Nitpick_Util
 open Nitpick_HOL
 
-type constr_spec = {
-  const: styp,
-  delta: int,
-  epsilon: int,
-  exclusive: bool,
-  explicit_max: int,
-  total: bool}
+type constr_spec =
+  {const: styp,
+   delta: int,
+   epsilon: int,
+   exclusive: bool,
+   explicit_max: int,
+   total: bool}
 
-type dtype_spec = {
-  typ: typ,
-  card: int,
-  co: bool,
-  standard: bool,
-  complete: bool * bool,
-  concrete: bool * bool,
-  deep: bool,
-  constrs: constr_spec list}
+type dtype_spec =
+  {typ: typ,
+   card: int,
+   co: bool,
+   standard: bool,
+   complete: bool * bool,
+   concrete: bool * bool,
+   deep: bool,
+   constrs: constr_spec list}
 
-type scope = {
-  hol_ctxt: hol_context,
-  binarize: bool,
-  card_assigns: (typ * int) list,
-  bits: int,
-  bisim_depth: int,
-  datatypes: dtype_spec list,
-  ofs: int Typtab.table}
+type scope =
+  {hol_ctxt: hol_context,
+   binarize: bool,
+   card_assigns: (typ * int) list,
+   bits: int,
+   bisim_depth: int,
+   datatypes: dtype_spec list,
+   ofs: int Typtab.table}
 
 datatype row_kind = Card of typ | Max of styp
 
 type row = row_kind * int list
 type block = row list
 
-(* dtype_spec list -> typ -> dtype_spec option *)
 fun datatype_spec (dtypes : dtype_spec list) T =
   List.find (curry (op =) T o #typ) dtypes
 
-(* dtype_spec list -> styp -> constr_spec *)
 fun constr_spec [] x = raise TERM ("Nitpick_Scope.constr_spec", [Const x])
   | constr_spec ({constrs, ...} :: dtypes : dtype_spec list) (x as (s, T)) =
     case List.find (curry (op =) (s, body_type T) o (apsnd body_type o #const))
@@ -105,7 +103,6 @@
       SOME c => c
     | NONE => constr_spec dtypes x
 
-(* dtype_spec list -> bool -> typ -> bool *)
 fun is_complete_type dtypes facto (Type (@{type_name fun}, [T1, T2])) =
     is_concrete_type dtypes facto T1 andalso is_complete_type dtypes facto T2
   | is_complete_type dtypes facto (Type (@{type_name fin_fun}, [T1, T2])) =
@@ -128,19 +125,15 @@
 and is_exact_type dtypes facto =
   is_complete_type dtypes facto andf is_concrete_type dtypes facto
 
-(* int Typtab.table -> typ -> int *)
 fun offset_of_type ofs T =
   case Typtab.lookup ofs T of
     SOME j0 => j0
   | NONE => Typtab.lookup ofs dummyT |> the_default 0
 
-(* scope -> typ -> int * int *)
 fun spec_of_type ({card_assigns, ofs, ...} : scope) T =
   (card_of_type card_assigns T
    handle TYPE ("Nitpick_HOL.card_of_type", _, _) => ~1, offset_of_type ofs T)
 
-(* (string -> string) -> scope
-   -> string list * string list * string list * string list * string list *)
 fun quintuple_for_scope quote
         ({hol_ctxt = {thy, ctxt, stds, ...}, card_assigns, bits, bisim_depth,
          datatypes, ...} : scope) =
@@ -180,7 +173,6 @@
                    maxes (), iters (), miscs ())) ()
   end
 
-(* scope -> bool -> Pretty.T list *)
 fun pretties_for_scope scope verbose =
   let
     val (primary_cards, secondary_cards, maxes, iters, bisim_depths) =
@@ -194,14 +186,10 @@
               else
                 [])
   in
-    if null ss then
-      []
-    else
-      Sledgehammer_Util.serial_commas "and" ss
-      |> map Pretty.str |> Pretty.breaks
+    if null ss then []
+    else serial_commas "and" ss |> map Pretty.str |> Pretty.breaks
   end
 
-(* scope -> string *)
 fun multiline_string_for_scope scope =
   let
     val (primary_cards, secondary_cards, maxes, iters, bisim_depths) =
@@ -216,47 +204,35 @@
     | lines => space_implode "\n" lines
   end
 
-(* scope * scope -> bool *)
 fun scopes_equivalent (s1 : scope, s2 : scope) =
   #datatypes s1 = #datatypes s2 andalso #card_assigns s1 = #card_assigns s2
 fun scope_less_eq (s1 : scope) (s2 : scope) =
   (s1, s2) |> pairself (map snd o #card_assigns) |> op ~~ |> forall (op <=)
 
-(* row -> int *)
 fun rank_of_row (_, ks) = length ks
-(* block -> int *)
 fun rank_of_block block = fold Integer.max (map rank_of_row block) 1
-(* int -> typ * int list -> typ * int list *)
 fun project_row column (y, ks) = (y, [nth ks (Int.min (column, length ks - 1))])
-(* int -> block -> block *)
 fun project_block (column, block) = map (project_row column) block
 
-(* (''a * ''a -> bool) -> (''a option * int list) list -> ''a -> int list *)
 fun lookup_ints_assign eq assigns key =
   case triple_lookup eq assigns key of
     SOME ks => ks
   | NONE => raise ARG ("Nitpick_Scope.lookup_ints_assign", "")
-(* theory -> (typ option * int list) list -> typ -> int list *)
 fun lookup_type_ints_assign thy assigns T =
-  map (curry Int.max 1) (lookup_ints_assign (type_match thy) assigns T)
+  map (Integer.max 1) (lookup_ints_assign (type_match thy) assigns T)
   handle ARG ("Nitpick_Scope.lookup_ints_assign", _) =>
          raise TYPE ("Nitpick_Scope.lookup_type_ints_assign", [T], [])
-(* theory -> (styp option * int list) list -> styp -> int list *)
 fun lookup_const_ints_assign thy assigns x =
   lookup_ints_assign (const_match thy) assigns x
   handle ARG ("Nitpick_Scope.lookup_ints_assign", _) =>
          raise TERM ("Nitpick_Scope.lookup_const_ints_assign", [Const x])
 
-(* theory -> (styp option * int list) list -> styp -> row option *)
 fun row_for_constr thy maxes_assigns constr =
   SOME (Max constr, lookup_const_ints_assign thy maxes_assigns constr)
   handle TERM ("lookup_const_ints_assign", _) => NONE
 
 val max_bits = 31 (* Kodkod limit *)
 
-(* hol_context -> bool -> (typ option * int list) list
-   -> (styp option * int list) list -> (styp option * int list) list -> int list
-   -> int list -> typ -> block *)
 fun block_for_type (hol_ctxt as {thy, ...}) binarize cards_assigns maxes_assigns
                    iters_assigns bitss bisim_depths T =
   if T = @{typ unsigned_bit} then
@@ -279,13 +255,9 @@
        [_] => []
      | constrs => map_filter (row_for_constr thy maxes_assigns) constrs)
 
-(* hol_context -> bool -> (typ option * int list) list
-   -> (styp option * int list) list -> (styp option * int list) list -> int list
-   -> int list -> typ list -> typ list -> block list *)
 fun blocks_for_types hol_ctxt binarize cards_assigns maxes_assigns iters_assigns
                      bitss bisim_depths mono_Ts nonmono_Ts =
   let
-    (* typ -> block *)
     val block_for = block_for_type hol_ctxt binarize cards_assigns maxes_assigns
                                    iters_assigns bitss bisim_depths
     val mono_block = maps block_for mono_Ts
@@ -294,10 +266,8 @@
 
 val sync_threshold = 5
 
-(* int list -> int list list *)
 fun all_combinations_ordered_smartly ks =
   let
-    (* int list -> int *)
     fun cost_with_monos [] = 0
       | cost_with_monos (k :: ks) =
         if k < sync_threshold andalso forall (curry (op =) k) ks then
@@ -318,16 +288,13 @@
        |> sort (int_ord o pairself fst) |> map snd
   end
 
-(* typ -> bool *)
 fun is_self_recursive_constr_type T =
   exists (exists_subtype (curry (op =) (body_type T))) (binder_types T)
 
-(* (styp * int) list -> styp -> int *)
 fun constr_max maxes x = the_default ~1 (AList.lookup (op =) maxes x)
 
 type scope_desc = (typ * int) list * (styp * int) list
 
-(* hol_context -> bool -> scope_desc -> typ * int -> bool *)
 fun is_surely_inconsistent_card_assign hol_ctxt binarize
                                        (card_assigns, max_assigns) (T, k) =
   case binarized_and_boxed_datatype_constrs hol_ctxt binarize T of
@@ -338,22 +305,17 @@
         map (Integer.prod o map (bounded_card_of_type k ~1 card_assigns)
              o binder_types o snd) xs
       val maxes = map (constr_max max_assigns) xs
-      (* int -> int -> int *)
       fun effective_max card ~1 = card
         | effective_max card max = Int.min (card, max)
       val max = map2 effective_max dom_cards maxes |> Integer.sum
     in max < k end
-(* hol_context -> bool -> (typ * int) list -> (typ * int) list
-   -> (styp * int) list -> bool *)
 fun is_surely_inconsistent_scope_description hol_ctxt binarize seen rest
                                              max_assigns =
   exists (is_surely_inconsistent_card_assign hol_ctxt binarize
                                              (seen @ rest, max_assigns)) seen
 
-(* hol_context -> bool -> scope_desc -> (typ * int) list option *)
 fun repair_card_assigns hol_ctxt binarize (card_assigns, max_assigns) =
   let
-    (* (typ * int) list -> (typ * int) list -> (typ * int) list option *)
     fun aux seen [] = SOME seen
       | aux _ ((_, 0) :: _) = NONE
       | aux seen ((T, k) :: rest) =
@@ -367,7 +329,6 @@
         handle SAME () => aux seen ((T, k - 1) :: rest)
   in aux [] (rev card_assigns) end
 
-(* theory -> (typ * int) list -> typ * int -> typ * int *)
 fun repair_iterator_assign thy assigns (T as Type (_, Ts), k) =
     (T, if T = @{typ bisim_iterator} then
           let
@@ -381,15 +342,12 @@
           k)
   | repair_iterator_assign _ _ assign = assign
 
-(* row -> scope_desc -> scope_desc *)
 fun add_row_to_scope_descriptor (kind, ks) (card_assigns, max_assigns) =
   case kind of
     Card T => ((T, the_single ks) :: card_assigns, max_assigns)
   | Max x => (card_assigns, (x, the_single ks) :: max_assigns)
-(* block -> scope_desc *)
 fun scope_descriptor_from_block block =
   fold_rev add_row_to_scope_descriptor block ([], [])
-(* hol_context -> bool -> block list -> int list -> scope_desc option *)
 fun scope_descriptor_from_combination (hol_ctxt as {thy, ...}) binarize blocks
                                       columns =
   let
@@ -403,11 +361,8 @@
   end
   handle Option.Option => NONE
 
-(* (typ * int) list -> dtype_spec list -> int Typtab.table *)
 fun offset_table_for_card_assigns assigns dtypes =
   let
-    (* int -> (int * int) list -> (typ * int) list -> int Typtab.table
-       -> int Typtab.table *)
     fun aux next _ [] = Typtab.update_new (dummyT, next)
       | aux next reusable ((T, k) :: assigns) =
         if k = 1 orelse is_iterator_type T orelse is_integer_type T
@@ -423,18 +378,14 @@
                     #> aux (next + k) ((k, next) :: reusable) assigns
   in aux 0 [] assigns Typtab.empty end
 
-(* int -> (typ * int) list -> typ -> int *)
 fun domain_card max card_assigns =
   Integer.prod o map (bounded_card_of_type max max card_assigns) o binder_types
 
-(* scope_desc -> bool -> int -> (int -> int) -> int -> int -> bool * styp
-   -> constr_spec list -> constr_spec list *)
 fun add_constr_spec (card_assigns, max_assigns) co card sum_dom_cards
                     num_self_recs num_non_self_recs (self_rec, x as (_, T))
                     constrs =
   let
     val max = constr_max max_assigns x
-    (* unit -> int *)
     fun next_delta () = if null constrs then 0 else #epsilon (hd constrs)
     val {delta, epsilon, exclusive, total} =
       if max = 0 then
@@ -470,7 +421,6 @@
      explicit_max = max, total = total} :: constrs
   end
 
-(* hol_context -> bool -> typ list -> (typ * int) list -> typ -> bool *)
 fun has_exact_card hol_ctxt facto finitizable_dataTs card_assigns T =
   let val card = card_of_type card_assigns T in
     card = bounded_exact_card_of_type hol_ctxt
@@ -478,8 +428,6 @@
                card_assigns T
   end
 
-(* hol_context -> bool -> typ list -> typ list -> scope_desc -> typ * int
-   -> dtype_spec *)
 fun datatype_spec_from_scope_descriptor (hol_ctxt as {thy, stds, ...}) binarize
         deep_dataTs finitizable_dataTs (desc as (card_assigns, _)) (T, card) =
   let
@@ -490,7 +438,6 @@
     val self_recs = map (is_self_recursive_constr_type o snd) xs
     val (num_self_recs, num_non_self_recs) =
       List.partition I self_recs |> pairself length
-    (* bool -> bool *)
     fun is_complete facto =
       has_exact_card hol_ctxt facto finitizable_dataTs card_assigns T
     fun is_concrete facto =
@@ -500,7 +447,6 @@
                                    card_assigns)
     val complete = pair_from_fun is_complete
     val concrete = pair_from_fun is_concrete
-    (* int -> int *)
     fun sum_dom_cards max =
       map (domain_card max card_assigns o snd) xs |> Integer.sum
     val constrs =
@@ -512,10 +458,8 @@
      concrete = concrete, deep = deep, constrs = constrs}
   end
 
-(* hol_context -> bool -> int -> typ list -> typ list -> scope_desc -> scope *)
-fun scope_from_descriptor (hol_ctxt as {thy, stds, ...}) binarize sym_break
-                          deep_dataTs finitizable_dataTs
-                          (desc as (card_assigns, _)) =
+fun scope_from_descriptor (hol_ctxt as {thy, stds, ...}) binarize deep_dataTs
+                          finitizable_dataTs (desc as (card_assigns, _)) =
   let
     val datatypes =
       map (datatype_spec_from_scope_descriptor hol_ctxt binarize deep_dataTs
@@ -529,12 +473,9 @@
   in
     {hol_ctxt = hol_ctxt, binarize = binarize, card_assigns = card_assigns,
      datatypes = datatypes, bits = bits, bisim_depth = bisim_depth,
-     ofs = if sym_break <= 0 then Typtab.empty
-           else offset_table_for_card_assigns card_assigns datatypes}
+     ofs = offset_table_for_card_assigns card_assigns datatypes}
   end
 
-(* theory -> typ list -> (typ option * int list) list
-   -> (typ option * int list) list *)
 fun repair_cards_assigns_wrt_boxing_etc _ _ [] = []
   | repair_cards_assigns_wrt_boxing_etc thy Ts ((SOME T, ks) :: cards_assigns) =
     (if is_fun_type T orelse is_pair_type T then
@@ -548,12 +489,9 @@
 val max_scopes = 4096
 val distinct_threshold = 512
 
-(* hol_context -> bool -> int -> (typ option * int list) list
-   -> (styp option * int list) list -> (styp option * int list) list -> int list
-   -> typ list -> typ list -> typ list ->typ list -> int * scope list *)
-fun all_scopes (hol_ctxt as {thy, ...}) binarize sym_break cards_assigns
-               maxes_assigns iters_assigns bitss bisim_depths mono_Ts nonmono_Ts
-               deep_dataTs finitizable_dataTs =
+fun all_scopes (hol_ctxt as {thy, ...}) binarize cards_assigns maxes_assigns
+               iters_assigns bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs
+               finitizable_dataTs =
   let
     val cards_assigns = repair_cards_assigns_wrt_boxing_etc thy mono_Ts
                                                             cards_assigns
@@ -569,8 +507,8 @@
   in
     (length all - length head,
      descs |> length descs <= distinct_threshold ? distinct (op =)
-           |> map (scope_from_descriptor hol_ctxt binarize sym_break
-                                         deep_dataTs finitizable_dataTs))
+           |> map (scope_from_descriptor hol_ctxt binarize deep_dataTs
+                                         finitizable_dataTs))
   end
 
 end;
--- a/src/HOL/Tools/Nitpick/nitpick_tests.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_tests.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -292,7 +292,6 @@
 *)
   ]
 
-(* Proof.context -> string * nut -> Kodkod.problem *)
 fun problem_for_nut ctxt (name, u) =
   let
     val debug = false
@@ -319,11 +318,10 @@
      formula = formula}
   end
 
-(* unit -> unit *)
 fun run_all_tests () =
   case Kodkod.solve_any_problem false NONE 0 1
                                 (map (problem_for_nut @{context}) tests) of
     Kodkod.Normal ([], _, _) => ()
-  | _ => error "Tests failed"
+  | _ => error "Tests failed."
 
 end;
--- a/src/HOL/Tools/Nitpick/nitpick_util.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Nitpick/nitpick_util.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -48,6 +48,10 @@
   val is_substring_of : string -> string -> bool
   val plural_s : int -> string
   val plural_s_for_list : 'a list -> string
+  val serial_commas : string -> string list -> string list
+  val timestamp : unit -> string
+  val parse_bool_option : bool -> string -> string -> bool option
+  val parse_time_option : string -> string -> Time.time option
   val flip_polarity : polarity -> polarity
   val prop_T : typ
   val bool_T : typ
@@ -61,6 +65,8 @@
   val pstrs : string -> Pretty.T list
   val unyxml : string -> string
   val maybe_quote : string -> string
+  val hashw : word * word -> word
+  val hashw_string : string * word -> word
 end;
 
 structure Nitpick_Util : NITPICK_UTIL =
@@ -79,25 +85,18 @@
 
 val nitpick_prefix = "Nitpick."
 
-(* ('a * 'b * 'c -> 'd) -> 'a -> 'b -> 'c -> 'd *)
 fun curry3 f = fn x => fn y => fn z => f (x, y, z)
 
-(* ('a -> 'b) -> ('a -> 'c) -> 'a -> 'b * 'c *)
 fun pairf f g x = (f x, g x)
 
-(* (bool -> 'a) -> 'a * 'a *)
 fun pair_from_fun f = (f false, f true)
-(* 'a * 'a -> bool -> 'a *)
 fun fun_from_pair (f, t) b = if b then t else f
 
-(* bool -> int *)
 fun int_from_bool b = if b then 1 else 0
-(* int -> int -> int *)
 fun nat_minus i j = if i > j then i - j else 0
 
 val max_exponent = 16384
 
-(* int -> int -> int *)
 fun reasonable_power _ 0 = 1
   | reasonable_power a 1 = a
   | reasonable_power 0 _ = 0
@@ -114,7 +113,6 @@
         c * c * reasonable_power a (b mod 2)
       end
 
-(* int -> int -> int *)
 fun exact_log m n =
   let
     val r = Math.ln (Real.fromInt n) / Math.ln (Real.fromInt m) |> Real.round
@@ -126,7 +124,6 @@
                  commas (map signed_string_of_int [m, n]))
   end
 
-(* int -> int -> int *)
 fun exact_root m n =
   let val r = Math.pow (Real.fromInt n, 1.0 / (Real.fromInt m)) |> Real.round in
     if reasonable_power r m = n then
@@ -136,22 +133,16 @@
                  commas (map signed_string_of_int [m, n]))
   end
 
-(* ('a -> 'a -> 'a) -> 'a list -> 'a *)
 fun fold1 f = foldl1 (uncurry f)
 
-(* int -> 'a list -> 'a list *)
 fun replicate_list 0 _ = []
   | replicate_list n xs = xs @ replicate_list (n - 1) xs
 
-(* int list -> int list *)
 fun offset_list ns = rev (tl (fold (fn x => fn xs => (x + hd xs) :: xs) ns [0]))
-(* int -> int -> int list *)
 fun index_seq j0 n = if j0 < 0 then j0 downto j0 - n + 1 else j0 upto j0 + n - 1
 
-(* int list -> 'a list -> 'a list *)
 fun filter_indices js xs =
   let
-    (* int -> int list -> 'a list -> 'a list *)
     fun aux _ [] _ = []
       | aux i (j :: js) (x :: xs) =
         if i = j then x :: aux (i + 1) js xs else aux (i + 1) (j :: js) xs
@@ -160,7 +151,6 @@
   in aux 0 js xs end
 fun filter_out_indices js xs =
   let
-    (* int -> int list -> 'a list -> 'a list *)
     fun aux _ [] xs = xs
       | aux i (j :: js) (x :: xs) =
         if i = j then aux (i + 1) js xs else x :: aux (i + 1) (j :: js) xs
@@ -168,76 +158,66 @@
                                "indices unordered or out of range")
   in aux 0 js xs end
 
-(* 'a list -> 'a list list -> 'a list list *)
 fun cartesian_product [] _ = []
   | cartesian_product (x :: xs) yss =
     map (cons x) yss @ cartesian_product xs yss
-(* 'a list list -> 'a list list *)
 fun n_fold_cartesian_product xss = fold_rev cartesian_product xss [[]]
-(* ''a list -> (''a * ''a) list *)
 fun all_distinct_unordered_pairs_of [] = []
   | all_distinct_unordered_pairs_of (x :: xs) =
     map (pair x) xs @ all_distinct_unordered_pairs_of xs
 
-(* (int * int) list -> int -> int list *)
 val nth_combination =
   let
-    (* (int * int) list -> int -> int list * int *)
     fun aux [] n = ([], n)
       | aux ((k, j0) :: xs) n =
         let val (js, n) = aux xs n in ((n mod k) + j0 :: js, n div k) end
   in fst oo aux end
 
-(* (int * int) list -> int list list *)
 val all_combinations = n_fold_cartesian_product o map (uncurry index_seq o swap)
 
-(* 'a list -> 'a list list *)
 fun all_permutations [] = [[]]
   | all_permutations xs =
     maps (fn j => map (cons (nth xs j)) (all_permutations (nth_drop j xs)))
          (index_seq 0 (length xs))
 
-(* int -> 'a list -> 'a list list *)
 fun batch_list _ [] = []
   | batch_list k xs =
     if length xs <= k then [xs]
     else List.take (xs, k) :: batch_list k (List.drop (xs, k))
 
-(* int list -> 'a list -> 'a list list *)
 fun chunk_list_unevenly _ [] = []
   | chunk_list_unevenly [] ys = map single ys
   | chunk_list_unevenly (k :: ks) ys =
     let val (ys1, ys2) = chop k ys in ys1 :: chunk_list_unevenly ks ys2 end
 
-(* ('a -> 'b -> 'c -> 'd) -> 'a list -> 'b list -> 'c list -> 'd list *)
 fun map3 _ [] [] [] = []
   | map3 f (x :: xs) (y :: ys) (z :: zs) = f x y z :: map3 f xs ys zs
   | map3 _ _ _ _ = raise UnequalLengths
 
-(* ('a * 'a -> bool) -> ('a option * 'b) list -> 'a -> 'b option *)
 fun double_lookup eq ps key =
   case AList.lookup (fn (SOME x, SOME y) => eq (x, y) | _ => false) ps
                     (SOME key) of
     SOME z => SOME z
   | NONE => ps |> find_first (is_none o fst) |> Option.map snd
-(* (''a * ''a -> bool) -> (''a option * 'b) list -> ''a -> 'b option *)
 fun triple_lookup _ [(NONE, z)] _ = SOME z
   | triple_lookup eq ps key =
     case AList.lookup (op =) ps (SOME key) of
       SOME z => SOME z
     | NONE => double_lookup eq ps key
 
-(* string -> string -> bool *)
 fun is_substring_of needle stack =
   not (Substring.isEmpty (snd (Substring.position needle
                                                   (Substring.full stack))))
 
-(* int -> string *)
-fun plural_s n = if n = 1 then "" else "s"
-(* 'a list -> string *)
+val plural_s = Sledgehammer_Util.plural_s
 fun plural_s_for_list xs = plural_s (length xs)
 
-(* polarity -> polarity *)
+val serial_commas = Sledgehammer_Util.serial_commas
+
+val timestamp = Sledgehammer_Util.timestamp
+val parse_bool_option = Sledgehammer_Util.parse_bool_option
+val parse_time_option = Sledgehammer_Util.parse_time_option
+
 fun flip_polarity Pos = Neg
   | flip_polarity Neg = Pos
   | flip_polarity Neut = Neut
@@ -247,42 +227,32 @@
 val nat_T = @{typ nat}
 val int_T = @{typ int}
 
-(* string -> string *)
 val subscript = implode o map (prefix "\<^isub>") o explode
-(* int -> string *)
 fun nat_subscript n =
   (* cheap trick to ensure proper alphanumeric ordering for one- and two-digit
      numbers *)
   if n <= 9 then "\<^bsub>" ^ signed_string_of_int n ^ "\<^esub>"
   else subscript (string_of_int n)
 
-(* Time.time option -> ('a -> 'b) -> 'a -> 'b *)
 fun time_limit NONE = I
   | time_limit (SOME delay) = TimeLimit.timeLimit delay
 
-(* Time.time option -> tactic -> tactic *)
 fun DETERM_TIMEOUT delay tac st =
   Seq.of_list (the_list (time_limit delay (fn () => SINGLE tac st) ()))
 
-(* ('a -> 'b) -> 'a -> 'b *)
 fun setmp_show_all_types f =
   setmp_CRITICAL show_all_types
                  (! show_types orelse ! show_sorts orelse ! show_all_types) f
 
 val indent_size = 2
 
-(* string -> Pretty.T list *)
 val pstrs = Pretty.breaks o map Pretty.str o space_explode " "
 
-(* XML.tree -> string *)
 fun plain_string_from_xml_tree t =
   Buffer.empty |> XML.add_content t |> Buffer.content
-(* string -> string *)
 val unyxml = plain_string_from_xml_tree o YXML.parse
 
-(* string -> bool *)
 val is_long_identifier = forall Syntax.is_identifier o space_explode "."
-(* string -> string *)
 fun maybe_quote y =
   let val s = unyxml y in
     y |> ((not (is_long_identifier (perhaps (try (unprefix "'")) s)) andalso
@@ -290,4 +260,11 @@
            OuterKeyword.is_keyword s) ? quote
   end
 
+(* This hash function is recommended in Compilers: Principles, Techniques, and
+   Tools, by Aho, Sethi and Ullman. The hashpjw function, which they
+   particularly recommend, triggers a bug in versions of Poly/ML up to 4.2.0. *)
+fun hashw (u, w) = Word.+ (u, Word.* (0w65599, w))
+fun hashw_char (c, w) = hashw (Word.fromInt (Char.ord c), w)
+fun hashw_string (s:string, w) = CharVector.foldl hashw_char w s
+
 end;
--- a/src/HOL/Tools/Sledgehammer/metis_tactics.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/metis_tactics.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -693,10 +693,11 @@
                 val unused = th_cls_pairs |> map_filter (fn (name, cls) =>
                   if common_thm used cls then NONE else SOME name)
             in
-                if not (common_thm used cls) then
-                  warning "Metis: The goal is provable because the context is \
-                          \inconsistent."
-                else if not (null unused) then
+                if not (null cls) andalso not (common_thm used cls) then
+                  warning "Metis: The assumptions are inconsistent."
+                else
+                  ();
+                if not (null unused) then
                   warning ("Metis: Unused theorems: " ^ commas_quote unused
                            ^ ".")
                 else
@@ -720,7 +721,7 @@
     if exists_type type_has_topsort (prop_of st0)
     then raise METIS "Metis: Proof state contains the universal sort {}"
     else
-      (Meson.MESON neg_clausify
+      (Meson.MESON (maps neg_clausify)
         (fn cls => resolve_tac (FOL_SOLVE mode ctxt cls ths) 1) ctxt i
           THEN Meson_Tactic.expand_defs_tac st0) st0
   end
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1,5 +1,6 @@
 (*  Title:      HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML
     Author:     Jia Meng, Cambridge University Computer Laboratory, NICTA
+    Author:     Jasmin Blanchette, TU Muenchen
 *)
 
 signature SLEDGEHAMMER_FACT_FILTER =
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_minimizer.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -0,0 +1,133 @@
+(*  Title:      HOL/Tools/Sledgehammer/sledgehammer_fact_minimizer.ML
+    Author:     Philipp Meyer, TU Muenchen
+    Author:     Jasmin Blanchette, TU Muenchen
+
+Minimization of theorem list for Metis using automatic theorem provers.
+*)
+
+signature SLEDGEHAMMER_FACT_MINIMIZER =
+sig
+  type params = ATP_Manager.params
+  type prover_result = ATP_Manager.prover_result
+
+  val minimize_theorems :
+    params -> int -> Proof.state -> (string * thm list) list
+    -> (string * thm list) list option * string
+end;
+
+structure Sledgehammer_Fact_Minimizer : SLEDGEHAMMER_FACT_MINIMIZER =
+struct
+
+open Sledgehammer_Util
+open Sledgehammer_Fact_Preprocessor
+open Sledgehammer_Proof_Reconstruct
+open ATP_Manager
+
+(* Linear minimization algorithm *)
+
+fun linear_minimize test s =
+  let
+    fun aux [] p = p
+      | aux (x :: xs) (needed, result) =
+        case test (xs @ needed) of
+          SOME result => aux xs (needed, result)
+        | NONE => aux xs (x :: needed, result)
+  in aux s end
+
+
+(* wrapper for calling external prover *)
+
+fun string_for_failure Unprovable = "Unprovable."
+  | string_for_failure TimedOut = "Timed out."
+  | string_for_failure OutOfResources = "Failed."
+  | string_for_failure OldSpass = "Error."
+  | string_for_failure MalformedOutput = "Error."
+  | string_for_failure UnknownError = "Failed."
+fun string_for_outcome NONE = "Success."
+  | string_for_outcome (SOME failure) = string_for_failure failure
+
+fun sledgehammer_test_theorems (params as {full_types, ...} : params) prover
+        timeout subgoal state filtered_clauses name_thms_pairs =
+  let
+    val num_theorems = length name_thms_pairs
+    val _ = priority ("Testing " ^ string_of_int num_theorems ^
+                      " theorem" ^ plural_s num_theorems ^ "...")
+    val name_thm_pairs = maps (fn (n, ths) => map (pair n) ths) name_thms_pairs
+    val axclauses = cnf_rules_pairs (Proof.theory_of state) name_thm_pairs
+    val {context = ctxt, facts, goal} = Proof.goal state
+    val problem =
+     {subgoal = subgoal, goal = (ctxt, (facts, goal)),
+      relevance_override = {add = [], del = [], only = false},
+      axiom_clauses = SOME axclauses,
+      filtered_clauses = SOME (the_default axclauses filtered_clauses)}
+  in
+    prover params (K "") timeout problem
+    |> tap (priority o string_for_outcome o #outcome)
+  end
+
+(* minimalization of thms *)
+
+fun minimize_theorems (params as {debug, atps, minimize_timeout, isar_proof,
+                                  shrink_factor, sorts, ...})
+                      i state name_thms_pairs =
+  let
+    val thy = Proof.theory_of state
+    val prover = case atps of
+                   [atp_name] => get_prover thy atp_name
+                 | _ => error "Expected a single ATP."
+    val msecs = Time.toMilliseconds minimize_timeout
+    val n = length name_thms_pairs
+    val _ =
+      priority ("Sledgehammer minimizer: ATP " ^ quote (the_single atps) ^
+                " with a time limit of " ^ string_of_int msecs ^ " ms.")
+    val test_thms_fun =
+      sledgehammer_test_theorems params prover minimize_timeout i state
+    fun test_thms filtered thms =
+      case test_thms_fun filtered thms of
+        (result as {outcome = NONE, ...}) => SOME result
+      | _ => NONE
+
+    val {context = ctxt, facts, goal} = Proof.goal state;
+    val n = Logic.count_prems (prop_of goal)
+  in
+    (* try prove first to check result and get used theorems *)
+    (case test_thms_fun NONE name_thms_pairs of
+      result as {outcome = NONE, pool, internal_thm_names, conjecture_shape,
+                 filtered_clauses, ...} =>
+        let
+          val used = internal_thm_names |> Vector.foldr (op ::) []
+                                        |> sort_distinct string_ord
+          val to_use =
+            if length used < length name_thms_pairs then
+              filter (fn (name1, _) => exists (curry (op =) name1) used)
+                     name_thms_pairs
+            else name_thms_pairs
+          val (min_thms, {proof, internal_thm_names, ...}) =
+            linear_minimize (test_thms (SOME filtered_clauses)) to_use
+                            ([], result)
+          val n = length min_thms
+          val _ = priority (cat_lines
+            ["Minimized: " ^ string_of_int n ^ " theorem" ^ plural_s n] ^ ".")
+        in
+          (SOME min_thms,
+           proof_text isar_proof
+                      (pool, debug, shrink_factor, sorts, ctxt,
+                       conjecture_shape)
+                      (K "", proof, internal_thm_names, goal, i) |> fst)
+        end
+    | {outcome = SOME TimedOut, ...} =>
+        (NONE, "Timeout: You can increase the time limit using the \"timeout\" \
+               \option (e.g., \"timeout = " ^
+               string_of_int (10 + msecs div 1000) ^ " s\").")
+    | {outcome = SOME UnknownError, ...} =>
+        (* Failure sometimes mean timeout, unfortunately. *)
+        (NONE, "Failure: No proof was found with the current time limit. You \
+               \can increase the time limit using the \"timeout\" \
+               \option (e.g., \"timeout = " ^
+               string_of_int (10 + msecs div 1000) ^ " s\").")
+    | {message, ...} => (NONE, "ATP error: " ^ message))
+    handle Sledgehammer_HOL_Clause.TRIVIAL => (SOME [], metis_line i n [])
+         | ERROR msg => (NONE, "Error: " ^ msg)
+  end
+
+end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1,5 +1,6 @@
 (*  Title:      HOL/Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML
     Author:     Jia Meng, Cambridge University Computer Laboratory
+    Author:     Jasmin Blanchette, TU Muenchen
 
 Transformation of axiom rules (elim/intro/etc) into CNF forms.
 *)
@@ -14,11 +15,13 @@
   val bad_for_atp: thm -> bool
   val type_has_topsort: typ -> bool
   val cnf_rules_pairs: theory -> (string * thm) list -> (thm * (string * int)) list
-  val neg_clausify: thm list -> thm list
-  val combinators: thm -> thm
-  val neg_conjecture_clauses: Proof.context -> thm -> int -> thm list * (string * typ) list
   val suppress_endtheory: bool Unsynchronized.ref
     (*for emergency use where endtheory causes problems*)
+  val strip_subgoal : thm -> int -> term list * term list * term
+  val neg_clausify: thm -> thm list
+  val neg_conjecture_clauses:
+    Proof.context -> thm -> int -> thm list list * (string * typ) list
+  val neg_clausify_tac: Proof.context -> int -> tactic
   val setup: theory -> theory
 end;
 
@@ -455,19 +458,31 @@
   lambda_free, but then the individual theory caches become much bigger.*)
 
 
+fun strip_subgoal goal i =
+  let
+    val (t, frees) = Logic.goal_params (prop_of goal) i
+    val hyp_ts = t |> Logic.strip_assums_hyp |> map (curry subst_bounds frees)
+    val concl_t = t |> Logic.strip_assums_concl |> curry subst_bounds frees
+  in (rev frees, hyp_ts, concl_t) end
+
 (*** Converting a subgoal into negated conjecture clauses. ***)
 
 fun neg_skolemize_tac ctxt =
   EVERY' [rtac ccontr, Object_Logic.atomize_prems_tac, Meson.skolemize_tac ctxt];
 
+fun neg_skolemize_tac ctxt =
+  EVERY' [rtac ccontr, Object_Logic.atomize_prems_tac, Meson.skolemize_tac ctxt];
+
 val neg_clausify =
-  Meson.make_clauses_unsorted #> map combinators #> Meson.finish_cnf;
+  single #> Meson.make_clauses_unsorted #> map combinators #> Meson.finish_cnf
 
 fun neg_conjecture_clauses ctxt st0 n =
   let
     val st = Seq.hd (neg_skolemize_tac ctxt n st0)
     val ({params, prems, ...}, _) = Subgoal.focus (Variable.set_body false ctxt) n st
-  in (neg_clausify prems, map (Term.dest_Free o Thm.term_of o #2) params) end;
+  in
+    (map neg_clausify prems, map (Term.dest_Free o Thm.term_of o #2) params)
+  end
 
 (*Conversion of a subgoal to conjecture clauses. Each clause has
   leading !!-bound universal variables, to express generality. *)
@@ -479,30 +494,14 @@
        [Subgoal.FOCUS
          (fn {prems, ...} =>
            (Method.insert_tac
-             (map forall_intr_vars (neg_clausify prems)) i)) ctxt,
+             (map forall_intr_vars (maps neg_clausify prems)) i)) ctxt,
         REPEAT_DETERM_N (length ts) o etac thin_rl] i
      end);
 
-val neg_clausify_setup =
-  Method.setup @{binding neg_clausify} (Scan.succeed (SIMPLE_METHOD' o neg_clausify_tac))
-  "conversion of goal to conjecture clauses";
-
-
-(** Attribute for converting a theorem into clauses **)
-
-val clausify_setup =
-  Attrib.setup @{binding clausify}
-    (Scan.lift OuterParse.nat >>
-      (fn i => Thm.rule_attribute (fn context => fn th =>
-          Meson.make_meta_clause (nth (cnf_axiom (Context.theory_of context) th) i))))
-  "conversion of theorem to clauses";
-
 
 (** setup **)
 
 val setup =
-  neg_clausify_setup #>
-  clausify_setup #>
   perhaps saturate_skolem_cache #>
   Theory.at_end clause_cache_endtheory;
 
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fol_clause.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fol_clause.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1,5 +1,6 @@
 (*  Title:      HOL/Tools/Sledgehammer/sledgehammer_fol_clause.ML
     Author:     Jia Meng, Cambridge University Computer Laboratory
+    Author:     Jasmin Blanchette, TU Muenchen
 
 Storing/printing FOL clauses and arity clauses.  Typed equality is
 treated differently.
@@ -190,11 +191,15 @@
       tvar_prefix ^ (ascii_of_indexname (trim_type_var x,i));
 fun make_fixed_type_var x = tfree_prefix ^ (ascii_of (trim_type_var x));
 
-(* HACK because SPASS 3.0 truncates identifiers to 63 characters. (This is
-   solved in 3.7 and perhaps in earlier versions too.) *)
-(* 32-bit hash, so we expect no collisions. *)
+val max_dfg_symbol_length = 63
+
+(* HACK because SPASS 3.0 truncates identifiers to 63 characters. *)
 fun controlled_length dfg s =
-  if dfg andalso size s > 60 then Word.toString (hashw_string (s, 0w0)) else s;
+  if dfg andalso size s > max_dfg_symbol_length then
+    String.extract (s, 0, SOME (max_dfg_symbol_length div 2 - 1)) ^ "__" ^
+    String.extract (s, size s - max_dfg_symbol_length div 2 + 1, NONE)
+  else
+    s
 
 fun lookup_const dfg c =
     case Symtab.lookup const_trans_table c of
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_hol_clause.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_hol_clause.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1,5 +1,6 @@
-(*  Title:      HOL/Sledgehammer/sledgehammer_hol_clause.ML
+(*  Title:      HOL/Tools/Sledgehammer/sledgehammer_hol_clause.ML
     Author:     Jia Meng, NICTA
+    Author:     Jasmin Blanchette, TU Muenchen
 
 FOL clauses translated from HOL formulae.
 *)
@@ -7,6 +8,7 @@
 signature SLEDGEHAMMER_HOL_CLAUSE =
 sig
   type name = Sledgehammer_FOL_Clause.name
+  type name_pool = Sledgehammer_FOL_Clause.name_pool
   type kind = Sledgehammer_FOL_Clause.kind
   type fol_type = Sledgehammer_FOL_Clause.fol_type
   type classrel_clause = Sledgehammer_FOL_Clause.classrel_clause
@@ -36,10 +38,10 @@
        hol_clause list
   val write_tptp_file : bool -> bool -> bool -> Path.T ->
     hol_clause list * hol_clause list * hol_clause list * hol_clause list *
-    classrel_clause list * arity_clause list -> unit
+    classrel_clause list * arity_clause list -> name_pool option * int
   val write_dfg_file : bool -> bool -> Path.T ->
     hol_clause list * hol_clause list * hol_clause list * hol_clause list *
-    classrel_clause list * arity_clause list -> unit
+    classrel_clause list * arity_clause list -> name_pool option * int
 end
 
 structure Sledgehammer_HOL_Clause : SLEDGEHAMMER_HOL_CLAUSE =
@@ -54,7 +56,7 @@
 
    If "explicit_apply" is false, each function will be directly applied to as
    many arguments as possible, avoiding use of the "apply" operator. Use of
-   hBOOL is also minimized.
+   "hBOOL" is also minimized.
 *)
 
 fun min_arity_of const_min_arity c = the_default 0 (Symtab.lookup const_min_arity c);
@@ -499,7 +501,9 @@
 fun write_tptp_file readable_names full_types explicit_apply file clauses =
   let
     fun section _ [] = []
-      | section name ss = "\n% " ^ name ^ plural_s (length ss) ^ "\n" :: ss
+      | section name ss =
+        "\n% " ^ name ^ plural_s (length ss) ^ " (" ^ Int.toString (length ss) ^
+        ")\n" :: ss
     val pool = empty_name_pool readable_names
     val (conjectures, axclauses, _, helper_clauses,
       classrel_clauses, arity_clauses) = clauses
@@ -514,17 +518,19 @@
     val arity_clss = map tptp_arity_clause arity_clauses
     val (helper_clss, pool) = pool_map (apfst fst oo tptp_clause params)
                                        helper_clauses pool
-  in
-    File.write_list file
-        (header () ::
-         section "Relevant fact" ax_clss @
-         section "Type variable" tfree_clss @
-         section "Conjecture" conjecture_clss @
-         section "Class relationship" classrel_clss @
-         section "Arity declaration" arity_clss @
-         section "Helper fact" helper_clss)
-  end
-
+    val conjecture_offset =
+      length ax_clss + length classrel_clss + length arity_clss
+      + length helper_clss
+    val _ =
+      File.write_list file
+          (header () ::
+           section "Relevant fact" ax_clss @
+           section "Class relationship" classrel_clss @
+           section "Arity declaration" arity_clss @
+           section "Helper fact" helper_clss @
+           section "Conjecture" conjecture_clss @
+           section "Type variable" tfree_clss)
+  in (pool, conjecture_offset) end
 
 (* DFG format *)
 
@@ -540,30 +546,33 @@
     val params = (full_types, explicit_apply, cma, cnh)
     val ((conjecture_clss, tfree_litss), pool) =
       pool_map (dfg_clause params) conjectures pool |>> ListPair.unzip
-    and probname = Path.implode (Path.base file)
+    and problem_name = Path.implode (Path.base file)
     val (axstrs, pool) = pool_map (apfst fst oo dfg_clause params) axclauses pool
     val tfree_clss = map dfg_tfree_clause (union_all tfree_litss)
     val (helper_clauses_strs, pool) =
       pool_map (apfst fst oo dfg_clause params) helper_clauses pool
     val (funcs, cl_preds) = decls_of_clauses params (helper_clauses @ conjectures @ axclauses) arity_clauses
     and ty_preds = preds_of_clauses axclauses classrel_clauses arity_clauses
-  in
-    File.write_list file
-        (header () ::
-         string_of_start probname ::
-         string_of_descrip probname ::
-         string_of_symbols (string_of_funcs funcs)
-                           (string_of_preds (cl_preds @ ty_preds)) ::
-         "list_of_clauses(axioms, cnf).\n" ::
-         axstrs @
-         map dfg_classrel_clause classrel_clauses @
-         map dfg_arity_clause arity_clauses @
-         helper_clauses_strs @
-         ["end_of_list.\n\nlist_of_clauses(conjectures, cnf).\n"] @
-         tfree_clss @
-         conjecture_clss @
-         ["end_of_list.\n\n",
-          "end_problem.\n"])
-  end
+    val conjecture_offset =
+      length axclauses + length classrel_clauses + length arity_clauses
+      + length helper_clauses
+    val _ =
+      File.write_list file
+          (header () ::
+           string_of_start problem_name ::
+           string_of_descrip problem_name ::
+           string_of_symbols (string_of_funcs funcs)
+                             (string_of_preds (cl_preds @ ty_preds)) ::
+           "list_of_clauses(axioms, cnf).\n" ::
+           axstrs @
+           map dfg_classrel_clause classrel_clauses @
+           map dfg_arity_clause arity_clauses @
+           helper_clauses_strs @
+           ["end_of_list.\n\nlist_of_clauses(conjectures, cnf).\n"] @
+           conjecture_clss @
+           tfree_clss @
+           ["end_of_list.\n\n",
+            "end_problem.\n"])
+  in (pool, conjecture_offset) end
 
 end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Sledgehammer/sledgehammer_isar.ML
+(*  Title:      HOL/Tools/Sledgehammer/sledgehammer_isar.ML
     Author:     Jasmin Blanchette, TU Muenchen
 
 Adds "sledgehammer" and related commands to Isabelle/Isar's outer syntax.
@@ -8,19 +8,46 @@
 sig
   type params = ATP_Manager.params
 
+  val atps: string Unsynchronized.ref
+  val timeout: int Unsynchronized.ref
+  val full_types: bool Unsynchronized.ref
   val default_params : theory -> (string * string) list -> params
+  val setup: theory -> theory
 end;
 
 structure Sledgehammer_Isar : SLEDGEHAMMER_ISAR =
 struct
 
 open Sledgehammer_Util
+open Sledgehammer_Fact_Preprocessor
 open ATP_Manager
-open ATP_Minimal
-open ATP_Wrapper
+open ATP_Systems
+open Sledgehammer_Fact_Minimizer
 
 structure K = OuterKeyword and P = OuterParse
 
+(** Proof method used in Isar proofs **)
+
+val neg_clausify_setup =
+  Method.setup @{binding neg_clausify}
+               (Scan.succeed (SIMPLE_METHOD' o neg_clausify_tac))
+               "conversion of goal to negated conjecture clauses"
+
+(** Attribute for converting a theorem into clauses **)
+
+val parse_clausify_attribute =
+  Scan.lift OuterParse.nat
+  >> (fn i => Thm.rule_attribute (fn context => fn th =>
+                  let val thy = Context.theory_of context in
+                    Meson.make_meta_clause (nth (cnf_axiom thy th) i)
+                  end))
+
+val clausify_setup =
+  Attrib.setup @{binding clausify} parse_clausify_attribute
+               "conversion of theorem to clauses"
+
+(** Sledgehammer commands **)
+
 fun add_to_relevance_override ns : relevance_override =
   {add = ns, del = [], only = false}
 fun del_from_relevance_override ns : relevance_override =
@@ -35,6 +62,29 @@
 fun merge_relevance_overrides rs =
   fold merge_relevance_override_pairwise rs (only_relevance_override [])
 
+(*** parameters ***)
+
+val atps = Unsynchronized.ref (default_atps_param_value ())
+val timeout = Unsynchronized.ref 60
+val full_types = Unsynchronized.ref false
+
+val _ =
+  ProofGeneralPgip.add_preference Preferences.category_proof
+      (Preferences.string_pref atps
+          "Sledgehammer: ATPs"
+          "Default automatic provers (separated by whitespace)")
+
+val _ =
+  ProofGeneralPgip.add_preference Preferences.category_proof
+      (Preferences.int_pref timeout
+          "Sledgehammer: Time Limit"
+          "ATPs will be interrupted after this time (in seconds)")
+
+val _ =
+  ProofGeneralPgip.add_preference Preferences.category_proof
+      (Preferences.bool_pref full_types
+          "Sledgehammer: Full Types" "ATPs will use full type information")
+
 type raw_param = string * string list
 
 val default_default_params =
@@ -49,7 +99,7 @@
    ("higher_order", "smart"),
    ("follow_defs", "false"),
    ("isar_proof", "false"),
-   ("modulus", "1"),
+   ("shrink_factor", "1"),
    ("sorts", "false"),
    ("minimize_timeout", "5 s")]
 
@@ -59,9 +109,9 @@
   [("no_debug", "debug"),
    ("quiet", "verbose"),
    ("no_overlord", "overlord"),
+   ("partial_types", "full_types"),
    ("implicit_apply", "explicit_apply"),
    ("ignore_no_atp", "respect_no_atp"),
-   ("partial_types", "full_types"),
    ("theory_irrelevant", "theory_relevant"),
    ("first_order", "higher_order"),
    ("dont_follow_defs", "follow_defs"),
@@ -69,8 +119,8 @@
    ("no_sorts", "sorts")]
 
 val params_for_minimize =
-  ["full_types", "explicit_apply", "higher_order", "isar_proof", "modulus",
-   "sorts", "minimize_timeout"]
+  ["debug", "verbose", "overlord", "full_types", "explicit_apply",
+   "higher_order", "isar_proof", "shrink_factor", "sorts", "minimize_timeout"]
 
 val property_dependent_params = ["atps", "full_types", "timeout"]
 
@@ -153,7 +203,7 @@
     val higher_order = lookup_bool_option "higher_order"
     val follow_defs = lookup_bool "follow_defs"
     val isar_proof = lookup_bool "isar_proof"
-    val modulus = Int.max (1, lookup_int "modulus")
+    val shrink_factor = Int.max (1, lookup_int "shrink_factor")
     val sorts = lookup_bool "sorts"
     val timeout = lookup_time "timeout"
     val minimize_timeout = lookup_time "minimize_timeout"
@@ -163,49 +213,43 @@
      respect_no_atp = respect_no_atp, relevance_threshold = relevance_threshold,
      convergence = convergence, theory_relevant = theory_relevant,
      higher_order = higher_order, follow_defs = follow_defs,
-     isar_proof = isar_proof, modulus = modulus, sorts = sorts,
+     isar_proof = isar_proof, shrink_factor = shrink_factor, sorts = sorts,
      timeout = timeout, minimize_timeout = minimize_timeout}
   end
 
 fun get_params thy = extract_params thy (default_raw_params thy)
 fun default_params thy = get_params thy o map (apsnd single)
 
-fun minimize override_params old_style_args i fact_refs state =
+(* Sledgehammer the given subgoal *)
+
+fun run {atps = [], ...} _ _ _ _ = error "No ATP is set."
+  | run (params as {atps, timeout, ...}) i relevance_override minimize_command
+        proof_state =
+    let
+      val birth_time = Time.now ()
+      val death_time = Time.+ (birth_time, timeout)
+      val _ = kill_atps ()  (* race w.r.t. other invocations of Sledgehammer *)
+      val _ = priority "Sledgehammering..."
+      val _ = List.app (start_prover_thread params birth_time death_time i
+                                            relevance_override minimize_command
+                                            proof_state) atps
+    in () end
+
+fun minimize override_params i fact_refs state =
   let
     val thy = Proof.theory_of state
     val ctxt = Proof.context_of state
-    fun theorems_from_refs ctxt =
-      map (fn fact_ref =>
-              let
-                val ths = ProofContext.get_fact ctxt fact_ref
-                val name' = Facts.string_of_ref fact_ref
-              in (name', ths) end)
-    fun get_time_limit_arg s =
-      (case Int.fromString s of
-        SOME t => Time.fromSeconds t
-      | NONE => error ("Invalid time limit: " ^ quote s ^ "."))
-    fun get_opt (name, a) (p, t) =
-      (case name of
-        "time" => (p, get_time_limit_arg a)
-      | "atp" => (a, t)
-      | n => error ("Invalid argument: " ^ n ^ "."))
-    val {atps, minimize_timeout, ...} = get_params thy override_params
-    val (atp, timeout) = fold get_opt old_style_args (hd atps, minimize_timeout)
-    val params =
-      get_params thy
-          (override_params @
-           [("atps", [atp]),
-            ("minimize_timeout",
-             [string_of_int (Time.toMilliseconds timeout) ^ " ms"])])
-    val prover =
-      (case get_prover thy atp of
-        SOME prover => prover
-      | NONE => error ("Unknown ATP: " ^ quote atp ^ "."))
+    val theorems_from_refs =
+      map o pairf Facts.string_of_ref o ProofContext.get_fact
     val name_thms_pairs = theorems_from_refs ctxt fact_refs
   in
-    priority (#2 (minimize_theorems params prover atp i state name_thms_pairs))
+    priority (#2 (minimize_theorems (get_params thy override_params) i state
+                                    name_thms_pairs))
   end
 
+val sledgehammerN = "sledgehammer"
+val sledgehammer_paramsN = "sledgehammer_params"
+
 val runN = "run"
 val minimizeN = "minimize"
 val messagesN = "messages"
@@ -226,7 +270,7 @@
          | value => " = " ^ value)
 
 fun minimize_command override_params i atp_name facts =
-  "sledgehammer minimize [atp = " ^ atp_name ^
+  sledgehammerN ^ " " ^ minimizeN ^ " [atp = " ^ atp_name ^
   (override_params |> filter is_raw_param_relevant_for_minimize
                    |> implode o map (prefix ", " o string_for_raw_param)) ^
   "] (" ^ space_implode " " facts ^ ")" ^
@@ -239,11 +283,11 @@
   in
     if subcommand = runN then
       let val i = the_default 1 opt_i in
-        sledgehammer (get_params thy override_params) i relevance_override
-                     (minimize_command override_params i) state
+        run (get_params thy override_params) i relevance_override
+            (minimize_command override_params i) state
       end
     else if subcommand = minimizeN then
-      minimize (map (apfst minimizize_raw_param_name) override_params) []
+      minimize (map (apfst minimizize_raw_param_name) override_params)
                (the_default 1 opt_i) (#add relevance_override) state
     else if subcommand = messagesN then
       messages opt_i
@@ -301,40 +345,17 @@
 val parse_sledgehammer_params_command =
   parse_params #>> sledgehammer_params_trans
 
-val parse_minimize_args =
-  Scan.optional (Args.bracks (P.list (P.short_ident --| P.$$$ "=" -- P.xname)))
-                []
 val _ =
-  OuterSyntax.improper_command "atp_kill" "kill all managed provers" K.diag
-    (Scan.succeed (Toplevel.no_timing o Toplevel.imperative kill_atps))
-val _ =
-  OuterSyntax.improper_command "atp_info"
-    "print information about managed provers" K.diag
-    (Scan.succeed (Toplevel.no_timing o Toplevel.imperative running_atps))
-val _ =
-  OuterSyntax.improper_command "atp_messages"
-    "print recent messages issued by managed provers" K.diag
-    (Scan.option (P.$$$ "(" |-- P.nat --| P.$$$ ")") >>
-      (fn limit => Toplevel.no_timing
-                   o Toplevel.imperative (fn () => messages limit)))
+  OuterSyntax.improper_command sledgehammerN
+      "search for first-order proof using automatic theorem provers" K.diag
+      parse_sledgehammer_command
 val _ =
-  OuterSyntax.improper_command "print_atps" "print external provers" K.diag
-    (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_theory o
-      Toplevel.keep (available_atps o Toplevel.theory_of)))
-val _ =
-  OuterSyntax.improper_command "atp_minimize"
-    "minimize theorem list with external prover" K.diag
-    (parse_minimize_args -- parse_fact_refs >> (fn (args, fact_refs) =>
-      Toplevel.no_timing o Toplevel.unknown_proof o
-        Toplevel.keep (minimize [] args 1 fact_refs o Toplevel.proof_of)))
+  OuterSyntax.command sledgehammer_paramsN
+      "set and display the default parameters for Sledgehammer" K.thy_decl
+      parse_sledgehammer_params_command
 
-val _ =
-  OuterSyntax.improper_command "sledgehammer"
-    "search for first-order proof using automatic theorem provers" K.diag
-    parse_sledgehammer_command
-val _ =
-  OuterSyntax.command "sledgehammer_params"
-    "set and display the default parameters for Sledgehammer" K.thy_decl
-    parse_sledgehammer_params_command
+val setup =
+  neg_clausify_setup
+  #> clausify_setup
 
 end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1,5 +1,6 @@
 (*  Title:      HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML
     Author:     Lawrence C Paulson and Claire Quigley, Cambridge University Computer Laboratory
+    Author:     Jasmin Blanchette, TU Muenchen
 
 Transfer of proofs from external provers.
 *)
@@ -7,6 +8,7 @@
 signature SLEDGEHAMMER_PROOF_RECONSTRUCT =
 sig
   type minimize_command = string list -> string
+  type name_pool = Sledgehammer_FOL_Clause.name_pool
 
   val chained_hint: string
   val invert_const: string -> string
@@ -14,17 +16,16 @@
   val num_typargs: theory -> string -> int
   val make_tvar: string -> typ
   val strip_prefix: string -> string -> string option
-  val is_proof_well_formed: string -> bool
   val metis_line: int -> int -> string list -> string
   val metis_proof_text:
     minimize_command * string * string vector * thm * int
     -> string * string list
   val isar_proof_text:
-    bool -> int -> bool -> Proof.context
+    name_pool option * bool * int * bool * Proof.context * int list list
     -> minimize_command * string * string vector * thm * int
     -> string * string list
   val proof_text:
-    bool -> bool -> int -> bool -> Proof.context
+    bool -> name_pool option * bool * int * bool * Proof.context * int list list
     -> minimize_command * string * string vector * thm * int
     -> string * string list
 end;
@@ -37,110 +38,122 @@
 
 type minimize_command = string list -> string
 
-val trace_proof_path = Path.basic "atp_trace";
+fun is_ident_char c = Char.isAlphaNum c orelse c = #"_"
+fun is_head_digit s = Char.isDigit (String.sub (s, 0))
 
-fun trace_proof_msg f =
-  if !trace then File.append (File.tmp_path trace_proof_path) (f ()) else ();
+fun is_axiom_clause_number thm_names num = num <= Vector.length thm_names
 
-fun string_of_thm ctxt = PrintMode.setmp [] (Display.string_of_thm ctxt);
-
-fun is_ident_char c = Char.isAlphaNum c orelse c = #"_"
-
-fun is_axiom thm_names line_no = line_no <= Vector.length thm_names
+fun ugly_name NONE s = s
+  | ugly_name (SOME the_pool) s =
+    case Symtab.lookup (snd the_pool) s of
+      SOME s' => s'
+    | NONE => s
 
 (**** PARSING OF TSTP FORMAT ****)
 
 (* Syntax trees, either term list or formulae *)
-datatype stree = Int of int | Br of string * stree list;
+datatype stree = SInt of int | SBranch of string * stree list;
 
-fun atom x = Br(x,[]);
+fun atom x = SBranch (x, [])
 
-fun scons (x,y) = Br("cons", [x,y]);
-val listof = List.foldl scons (atom "nil");
+fun scons (x, y) = SBranch ("cons", [x, y])
+val slist_of = List.foldl scons (atom "nil")
 
 (*Strings enclosed in single quotes, e.g. filenames*)
-val quoted = $$ "'" |-- Scan.repeat (~$$ "'") --| $$ "'" >> implode;
-
-(*Intended for $true and $false*)
-fun tf s = "c_" ^ str (Char.toUpper (String.sub(s,0))) ^ String.extract(s,1,NONE);
-val truefalse = $$ "$" |-- Symbol.scan_id >> (atom o tf);
+val parse_quoted = $$ "'" |-- Scan.repeat (~$$ "'") --| $$ "'" >> implode;
 
 (*Integer constants, typically proof line numbers*)
-fun is_digit s = Char.isDigit (String.sub(s,0));
-val integer = Scan.many1 is_digit >> (the o Int.fromString o implode);
-
-(* needed for SPASS's nonstandard output format *)
-fun fix_symbol "equal" = "c_equal"
-  | fix_symbol s = s
+val parse_integer = Scan.many1 is_head_digit >> (the o Int.fromString o implode)
 
-(*Generalized FO terms, which include filenames, numbers, etc.*)
-fun term x = (quoted >> atom || integer >> Int || truefalse
-              || (Symbol.scan_id >> fix_symbol)
-                 -- Scan.optional ($$ "(" |-- terms --| $$ ")") [] >> Br
-              || $$ "(" |-- term --| $$ ")"
-              || $$ "[" |-- Scan.optional terms [] --| $$ "]" >> listof) x
-and terms x = (term ::: Scan.repeat ($$ "," |-- term)) x
+(* needed for SPASS's output format *)
+fun repair_bool_literal "true" = "c_True"
+  | repair_bool_literal "false" = "c_False"
+fun repair_name pool "equal" = "c_equal"
+  | repair_name pool s = ugly_name pool s
+(* Generalized first-order terms, which include file names, numbers, etc. *)
+(* The "x" argument is not strictly necessary, but without it Poly/ML loops
+   forever at compile time. *)
+fun parse_term pool x =
+  (parse_quoted >> atom
+   || parse_integer >> SInt
+   || $$ "$" |-- Symbol.scan_id >> (atom o repair_bool_literal)
+   || (Symbol.scan_id >> repair_name pool)
+      -- Scan.optional ($$ "(" |-- parse_terms pool --| $$ ")") [] >> SBranch
+   || $$ "(" |-- parse_term pool --| $$ ")"
+   || $$ "[" |-- Scan.optional (parse_terms pool) [] --| $$ "]" >> slist_of) x
+and parse_terms pool x =
+  (parse_term pool ::: Scan.repeat ($$ "," |-- parse_term pool)) x
 
-fun negate t = Br ("c_Not", [t])
-fun equate t1 t2 = Br ("c_equal", [t1, t2]);
+fun negate_stree t = SBranch ("c_Not", [t])
+fun equate_strees t1 t2 = SBranch ("c_equal", [t1, t2]);
 
-(*Apply equal or not-equal to a term*)
-fun syn_equal (t, NONE) = t
-  | syn_equal (t1, SOME (NONE, t2)) = equate t1 t2
-  | syn_equal (t1, SOME (SOME _, t2)) = negate (equate t1 t2)
-
+(* Apply equal or not-equal to a term. *)
+fun repair_predicate_term (t, NONE) = t
+  | repair_predicate_term (t1, SOME (NONE, t2)) = equate_strees t1 t2
+  | repair_predicate_term (t1, SOME (SOME _, t2)) =
+    negate_stree (equate_strees t1 t2)
+fun parse_predicate_term pool =
+  parse_term pool -- Scan.option (Scan.option ($$ "!") --| $$ "="
+                                  -- parse_term pool)
+  >> repair_predicate_term
 (*Literals can involve negation, = and !=.*)
-fun literal x =
-  ($$ "~" |-- literal >> negate
-   || (term -- Scan.option (Scan.option ($$ "!") --| $$ "=" -- term)
-       >> syn_equal)) x
+fun parse_literal pool x =
+  ($$ "~" |-- parse_literal pool >> negate_stree || parse_predicate_term pool) x
 
-val literals = literal ::: Scan.repeat ($$ "|" |-- literal);
+fun parse_literals pool =
+  parse_literal pool ::: Scan.repeat ($$ "|" |-- parse_literal pool)
 
-(*Clause: a list of literals separated by the disjunction sign*)
-val clause = $$ "(" |-- literals --| $$ ")" || Scan.single literal;
+(* Clause: a list of literals separated by the disjunction sign. *)
+fun parse_clause pool =
+  $$ "(" |-- parse_literals pool --| $$ ")" || Scan.single (parse_literal pool)
 
-fun ints_of_stree (Int n) = cons n
-  | ints_of_stree (Br (_, ts)) = fold ints_of_stree ts
-
-val tstp_annotations =
-  Scan.optional ($$ "," |-- term --| Scan.option ($$ "," |-- terms)
+fun ints_of_stree (SInt n) = cons n
+  | ints_of_stree (SBranch (_, ts)) = fold ints_of_stree ts
+val parse_tstp_annotations =
+  Scan.optional ($$ "," |-- parse_term NONE
+                   --| Scan.option ($$ "," |-- parse_terms NONE)
                  >> (fn source => ints_of_stree source [])) []
 
-fun retuple_tstp_line ((name, ts), deps) = (name, ts, deps)
-
 (* <cnf_annotated> ::= cnf(<name>, <formula_role>, <cnf_formula> <annotations>).
    The <name> could be an identifier, but we assume integers. *)
-val parse_tstp_line =
-  (Scan.this_string "cnf" -- $$ "(") |-- integer --| $$ "," --| Symbol.scan_id
-   --| $$ "," -- clause -- tstp_annotations --| $$ ")" --| $$ "."
+fun retuple_tstp_line ((name, ts), deps) = (name, ts, deps)
+fun parse_tstp_line pool =
+  (Scan.this_string "cnf" -- $$ "(") |-- parse_integer --| $$ ","
+   --| Symbol.scan_id --| $$ "," -- parse_clause pool -- parse_tstp_annotations
+   --| $$ ")" --| $$ "."
   >> retuple_tstp_line
 
 (**** PARSING OF SPASS OUTPUT ****)
 
-val dot_name = integer --| $$ "." --| integer
+(* SPASS returns clause references of the form "x.y". We ignore "y", whose role
+   is not clear anyway. *)
+val parse_dot_name = parse_integer --| $$ "." --| parse_integer
 
-val spass_annotations =
-  Scan.optional ($$ ":" |-- Scan.repeat (dot_name --| Scan.option ($$ ","))) []
+val parse_spass_annotations =
+  Scan.optional ($$ ":" |-- Scan.repeat (parse_dot_name
+                                         --| Scan.option ($$ ","))) []
 
-val starred_literal = literal --| Scan.repeat ($$ "*" || $$ " ")
+(* It is not clear why some literals are followed by sequences of stars. We
+   ignore them. *)
+fun parse_starred_predicate_term pool =
+  parse_predicate_term pool --| Scan.repeat ($$ "*" || $$ " ")
 
-val horn_clause =
-  Scan.repeat starred_literal --| $$ "-" --| $$ ">"
-  -- Scan.repeat starred_literal
-  >> (fn ([], []) => [atom (tf "false")]
-       | (clauses1, clauses2) => map negate clauses1 @ clauses2)
-
-fun retuple_spass_proof_line ((name, deps), ts) = (name, ts, deps)
+fun parse_horn_clause pool =
+  Scan.repeat (parse_starred_predicate_term pool) --| $$ "-" --| $$ ">"
+  -- Scan.repeat (parse_starred_predicate_term pool)
+  >> (fn ([], []) => [atom "c_False"]
+       | (clauses1, clauses2) => map negate_stree clauses1 @ clauses2)
 
-(* Syntax: <name>[0:<inference><annotations>] || -> <cnf_formula> **. *)
-val parse_spass_proof_line =
-  integer --| $$ "[" --| $$ "0" --| $$ ":" --| Symbol.scan_id
-  -- spass_annotations --| $$ "]" --| $$ "|" --| $$ "|" -- horn_clause
-  --| $$ "."
-  >> retuple_spass_proof_line
+(* Syntax: <name>[0:<inference><annotations>] ||
+           <cnf_formulas> -> <cnf_formulas>. *)
+fun retuple_spass_line ((name, deps), ts) = (name, ts, deps)
+fun parse_spass_line pool =
+  parse_integer --| $$ "[" --| $$ "0" --| $$ ":" --| Symbol.scan_id
+  -- parse_spass_annotations --| $$ "]" --| $$ "|" --| $$ "|"
+  -- parse_horn_clause pool --| $$ "."
+  >> retuple_spass_line
 
-val parse_proof_line = fst o (parse_tstp_line || parse_spass_proof_line)
+fun parse_line pool = fst o (parse_tstp_line pool || parse_spass_line pool)
 
 (**** INTERPRETATION OF TSTP SYNTAX TREES ****)
 
@@ -169,8 +182,8 @@
   by information from type literals, or by type inference.*)
 fun type_of_stree t =
   case t of
-      Int _ => raise STREE t
-    | Br (a,ts) =>
+      SInt _ => raise STREE t
+    | SBranch (a,ts) =>
         let val Ts = map type_of_stree ts
         in
           case strip_prefix tconst_prefix a of
@@ -188,13 +201,10 @@
 
 (*Invert the table of translations between Isabelle and ATPs*)
 val const_trans_table_inv =
-      Symtab.update ("fequal", "op =")
-        (Symtab.make (map swap (Symtab.dest const_trans_table)));
+  Symtab.update ("fequal", @{const_name "op ="})
+                (Symtab.make (map swap (Symtab.dest const_trans_table)))
 
-fun invert_const c =
-    case Symtab.lookup const_trans_table_inv c of
-        SOME c' => c'
-      | NONE => c;
+fun invert_const c = c |> Symtab.lookup const_trans_table_inv |> the_default c
 
 (*The number of type arguments of a constant, zero if it's monomorphic*)
 fun num_typargs thy s = length (Sign.const_typargs thy (s, Sign.the_const_type thy s));
@@ -206,10 +216,10 @@
   them to be inferred.*)
 fun term_of_stree args thy t =
   case t of
-      Int _ => raise STREE t
-    | Br ("hBOOL",[t]) => term_of_stree [] thy t  (*ignore hBOOL*)
-    | Br ("hAPP",[t,u]) => term_of_stree (u::args) thy t
-    | Br (a,ts) =>
+      SInt _ => raise STREE t
+    | SBranch ("hBOOL", [t]) => term_of_stree [] thy t  (*ignore hBOOL*)
+    | SBranch ("hAPP", [t, u]) => term_of_stree (u::args) thy t
+    | SBranch (a, ts) =>
         case strip_prefix const_prefix a of
             SOME "equal" =>
               list_comb(Const (@{const_name "op ="}, HOLogic.typeT), List.map (term_of_stree [] thy) ts)
@@ -232,11 +242,13 @@
                       | NONE => make_var (a,T)  (* Variable from the ATP, say "X1" *)
               in  list_comb (opr, List.map (term_of_stree [] thy) (ts@args))  end;
 
-(*Type class literal applied to a type. Returns triple of polarity, class, type.*)
-fun constraint_of_stree pol (Br("c_Not",[t])) = constraint_of_stree (not pol) t
+(* Type class literal applied to a type. Returns triple of polarity, class,
+   type. *)
+fun constraint_of_stree pol (SBranch ("c_Not", [t])) =
+    constraint_of_stree (not pol) t
   | constraint_of_stree pol t = case t of
-        Int _ => raise STREE t
-      | Br (a,ts) =>
+        SInt _ => raise STREE t
+      | SBranch (a, ts) =>
             (case (strip_prefix class_prefix a, map type_of_stree ts) of
                  (SOME b, [T]) => (pol, b, T)
                | _ => raise STREE t);
@@ -249,25 +261,55 @@
   | add_constraint ((false, cl, TVar(ix,_)), vt) = addix (ix,cl) vt
   | add_constraint (_, vt) = vt;
 
-(*False literals (which E includes in its proofs) are deleted*)
-val nofalses = filter (not o equal HOLogic.false_const);
+fun is_positive_literal (@{const Trueprop} $ t) = is_positive_literal t
+  | is_positive_literal (@{const Not} $ _) = false
+  | is_positive_literal t = true
 
-(*Final treatment of the list of "real" literals from a clause.*)
-fun finish [] = HOLogic.true_const  (*No "real" literals means only type information*)
-  | finish lits =
-      case nofalses lits of
-          [] => HOLogic.false_const  (*The empty clause, since we started with real literals*)
-        | xs => foldr1 HOLogic.mk_disj (rev xs);
+fun negate_term thy (@{const Trueprop} $ t) =
+    @{const Trueprop} $ negate_term thy t
+  | negate_term thy (Const (@{const_name All}, T) $ Abs (s, T', t')) =
+    Const (@{const_name Ex}, T) $ Abs (s, T', negate_term thy t')
+  | negate_term thy (Const (@{const_name Ex}, T) $ Abs (s, T', t')) =
+    Const (@{const_name All}, T) $ Abs (s, T', negate_term thy t')
+  | negate_term thy (@{const "op -->"} $ t1 $ t2) =
+    @{const "op &"} $ t1 $ negate_term thy t2
+  | negate_term thy (@{const "op &"} $ t1 $ t2) =
+    @{const "op |"} $ negate_term thy t1 $ negate_term thy t2
+  | negate_term thy (@{const "op |"} $ t1 $ t2) =
+    @{const "op &"} $ negate_term thy t1 $ negate_term thy t2
+  | negate_term thy (@{const Not} $ t) = t
+  | negate_term thy t =
+    if fastype_of t = @{typ prop} then
+      HOLogic.mk_Trueprop (negate_term thy (Object_Logic.atomize_term thy t))
+    else
+      @{const Not} $ t
+
+fun clause_for_literals _ [] = HOLogic.false_const
+  | clause_for_literals _ [lit] = lit
+  | clause_for_literals thy lits =
+    case List.partition is_positive_literal lits of
+      (pos_lits as _ :: _, neg_lits as _ :: _) =>
+      @{const "op -->"}
+          $ foldr1 HOLogic.mk_conj (map (negate_term thy) neg_lits)
+          $ foldr1 HOLogic.mk_disj pos_lits
+    | _ => foldr1 HOLogic.mk_disj lits
+
+(* Final treatment of the list of "real" literals from a clause.
+   No "real" literals means only type information. *)
+fun finish_clause _ [] = HOLogic.true_const
+  | finish_clause thy lits =
+    lits |> filter_out (curry (op =) HOLogic.false_const) |> rev
+         |> clause_for_literals thy
 
 (*Accumulate sort constraints in vt, with "real" literals in lits.*)
-fun lits_of_strees _ (vt, lits) [] = (vt, finish lits)
-  | lits_of_strees ctxt (vt, lits) (t::ts) =
-      lits_of_strees ctxt (add_constraint (constraint_of_stree true t, vt), lits) ts
-      handle STREE _ =>
-      lits_of_strees ctxt (vt, term_of_stree [] (ProofContext.theory_of ctxt) t :: lits) ts;
+fun lits_of_strees thy (vt, lits) [] = (vt, finish_clause thy lits)
+  | lits_of_strees thy (vt, lits) (t :: ts) =
+    lits_of_strees thy (add_constraint (constraint_of_stree true t, vt), lits)
+                   ts
+    handle STREE _ => lits_of_strees thy (vt, term_of_stree [] thy t :: lits) ts
 
 (*Update TVars/TFrees with detected sort constraints.*)
-fun fix_sorts vt =
+fun repair_sorts vt =
   let fun tysubst (Type (a, Ts)) = Type (a, map tysubst Ts)
         | tysubst (TVar (xi, s)) = TVar (xi, the_default s (Vartab.lookup vt xi))
         | tysubst (TFree (x, s)) = TFree (x, the_default s (Vartab.lookup vt (x, ~1)))
@@ -281,14 +323,15 @@
 
 (*Interpret a list of syntax trees as a clause, given by "real" literals and sort constraints.
   vt0 holds the initial sort constraints, from the conjecture clauses.*)
-fun clause_of_strees ctxt vt0 ts =
-  let val (vt, dt) = lits_of_strees ctxt (vt0,[]) ts in
-    singleton (Syntax.check_terms ctxt) (TypeInfer.constrain HOLogic.boolT (fix_sorts vt dt))
+fun clause_of_strees ctxt vt ts =
+  let val (vt, dt) = lits_of_strees (ProofContext.theory_of ctxt) (vt, []) ts in
+    dt |> repair_sorts vt |> TypeInfer.constrain HOLogic.boolT
+       |> Syntax.check_term ctxt
   end
 
 fun gen_all_vars t = fold_rev Logic.all (OldTerm.term_vars t) t;
 
-fun decode_proof_step vt0 (name, ts, deps) ctxt =
+fun decode_line vt0 (name, ts, deps) ctxt =
   let val cl = clause_of_strees ctxt vt0 ts in
     ((name, cl, deps), fold Variable.declare_term (OldTerm.term_frees cl) ctxt)
   end
@@ -308,264 +351,110 @@
 
 (**** Translation of TSTP files to Isar Proofs ****)
 
-fun decode_proof_steps ctxt tuples =
+fun decode_lines ctxt tuples =
   let val vt0 = tfree_constraints_of_clauses Vartab.empty (map #2 tuples) in
-    #1 (fold_map (decode_proof_step vt0) tuples ctxt)
+    #1 (fold_map (decode_line vt0) tuples ctxt)
   end
 
-(** Finding a matching assumption. The literals may be permuted, and variable names
-    may disagree. We must try all combinations of literals (quadratic!) and
-    match the variable names consistently. **)
-
-fun strip_alls_aux n (Const(@{const_name all}, _)$Abs(a,T,t))  =
-      strip_alls_aux (n+1) (subst_bound (Var ((a,n), T), t))
-  | strip_alls_aux _ t  =  t;
-
-val strip_alls = strip_alls_aux 0;
-
-exception MATCH_LITERAL of unit
-
-(* Remark 1: Ignore types. They are not to be trusted.
-   Remark 2: Ignore order of arguments for equality. SPASS sometimes swaps
-   them for no apparent reason. *)
-fun match_literal (Const (@{const_name "op ="}, _) $ t1 $ u1)
-                  (Const (@{const_name "op ="}, _) $ t2 $ u2) env =
-    (env |> match_literal t1 t2 |> match_literal u1 u2
-     handle MATCH_LITERAL () =>
-            env |> match_literal t1 u2 |> match_literal u1 t2)
-  | match_literal (t1 $ u1) (t2 $ u2) env =
-    env |> match_literal t1 t2 |> match_literal u1 u2
-  | match_literal (Abs (_,_,t1)) (Abs (_,_,t2)) env =
-    match_literal t1 t2 env
-  | match_literal (Bound i1) (Bound i2) env =
-    if i1=i2 then env else raise MATCH_LITERAL ()
-  | match_literal (Const(a1,_)) (Const(a2,_)) env =
-    if a1=a2 then env else raise MATCH_LITERAL ()
-  | match_literal (Free(a1,_)) (Free(a2,_)) env =
-    if a1=a2 then env else raise MATCH_LITERAL ()
-  | match_literal (Var(ix1,_)) (Var(ix2,_)) env = insert (op =) (ix1,ix2) env
-  | match_literal _ _ _ = raise MATCH_LITERAL ()
-
-(* Checking that all variable associations are unique. The list "env" contains
-   no repetitions, but does it contain say (x, y) and (y, y)? *)
-fun good env =
-  let val (xs,ys) = ListPair.unzip env
-  in  not (has_duplicates (op=) xs orelse has_duplicates (op=) ys)  end;
-
-(*Match one list of literals against another, ignoring types and the order of
-  literals. Sorting is unreliable because we don't have types or variable names.*)
-fun matches_aux _ [] [] = true
-  | matches_aux env (lit::lits) ts =
-      let fun match1 us [] = false
-            | match1 us (t::ts) =
-                let val env' = match_literal lit t env
-                in  (good env' andalso matches_aux env' lits (us@ts)) orelse
-                    match1 (t::us) ts
-                end
-                handle MATCH_LITERAL () => match1 (t::us) ts
-      in  match1 [] ts  end;
-
-(*Is this length test useful?*)
-fun matches (lits1,lits2) =
-  length lits1 = length lits2  andalso
-  matches_aux [] (map Envir.eta_contract lits1) (map Envir.eta_contract lits2);
-
-fun permuted_clause t =
-  let val lits = HOLogic.disjuncts t
-      fun perm [] = NONE
-        | perm (ctm::ctms) =
-            if matches (lits, HOLogic.disjuncts (HOLogic.dest_Trueprop (strip_alls ctm)))
-            then SOME ctm else perm ctms
-  in perm end;
-
-(*ctms is a list of conjecture clauses as yielded by Isabelle. Those returned by the
-  ATP may have their literals reordered.*)
-fun isar_proof_body ctxt sorts ctms =
-  let
-    val _ = trace_proof_msg (K "\n\nisar_proof_body: start\n")
-    val string_of_term = 
-      PrintMode.setmp (filter (curry (op =) Symbol.xsymbolsN)
-                              (print_mode_value ()))
-                      (Syntax.string_of_term ctxt)
-    fun have_or_show "show" _ = "  show \""
-      | have_or_show have lname = "  " ^ have ^ " " ^ lname ^ ": \""
-    fun do_line _ (lname, t, []) =
-       (* No depedencies: it's a conjecture clause, with no proof. *)
-       (case permuted_clause t ctms of
-          SOME u => "  assume " ^ lname ^ ": \"" ^ string_of_term u ^ "\"\n"
-        | NONE => raise TERM ("Sledgehammer_Proof_Reconstruct.isar_proof_body",
-                              [t]))
-      | do_line have (lname, t, deps) =
-        have_or_show have lname ^
-        string_of_term (gen_all_vars (HOLogic.mk_Trueprop t)) ^
-        "\"\n    by (metis " ^ space_implode " " deps ^ ")\n"
-    fun do_lines [(lname, t, deps)] = [do_line "show" (lname, t, deps)]
-      | do_lines ((lname, t, deps) :: lines) =
-        do_line "have" (lname, t, deps) :: do_lines lines
-  in setmp_CRITICAL show_sorts sorts do_lines end;
-
 fun unequal t (_, t', _) = not (t aconv t');
 
 (*No "real" literals means only type information*)
 fun eq_types t = t aconv HOLogic.true_const;
 
-fun replace_dep (old:int, new) dep = if dep=old then new else [dep];
-
-fun replace_deps (old:int, new) (lno, t, deps) =
-      (lno, t, List.foldl (uncurry (union (op =))) [] (map (replace_dep (old, new)) deps));
+fun replace_dep (old, new) dep = if dep = old then new else [dep]
+fun replace_deps p (num, t, deps) =
+  (num, t, fold (union (op =) o replace_dep p) deps [])
 
 (*Discard axioms; consolidate adjacent lines that prove the same clause, since they differ
   only in type information.*)
-fun add_proof_line thm_names (lno, t, []) lines =
+fun add_line thm_names (num, t, []) lines =
       (* No dependencies: axiom or conjecture clause *)
-      if is_axiom thm_names lno then
+      if is_axiom_clause_number thm_names num then
         (* Axioms are not proof lines *)
         if eq_types t then
           (* Must be clsrel/clsarity: type information, so delete refs to it *)
-          map (replace_deps (lno, [])) lines
+          map (replace_deps (num, [])) lines
         else
           (case take_prefix (unequal t) lines of
              (_,[]) => lines                  (*no repetition of proof line*)
-           | (pre, (lno', _, _) :: post) =>   (*repetition: replace later line by earlier one*)
-               pre @ map (replace_deps (lno', [lno])) post)
+           | (pre, (num', _, _) :: post) =>   (*repetition: replace later line by earlier one*)
+               pre @ map (replace_deps (num', [num])) post)
       else
-        (lno, t, []) :: lines
-  | add_proof_line _ (lno, t, deps) lines =
-      if eq_types t then (lno, t, deps) :: lines
+        (num, t, []) :: lines
+  | add_line _ (num, t, deps) lines =
+      if eq_types t then (num, t, deps) :: lines
       (*Type information will be deleted later; skip repetition test.*)
       else (*FIXME: Doesn't this code risk conflating proofs involving different types??*)
       case take_prefix (unequal t) lines of
-         (_,[]) => (lno, t, deps) :: lines  (*no repetition of proof line*)
-       | (pre, (lno', t', _) :: post) =>
-           (lno, t', deps) ::               (*repetition: replace later line by earlier one*)
-           (pre @ map (replace_deps (lno', [lno])) post);
+         (_,[]) => (num, t, deps) :: lines  (*no repetition of proof line*)
+       | (pre, (num', t', _) :: post) =>
+           (num, t', deps) ::               (*repetition: replace later line by earlier one*)
+           (pre @ map (replace_deps (num', [num])) post);
 
 (*Recursively delete empty lines (type information) from the proof.*)
-fun add_nonnull_prfline ((lno, t, []), lines) = (*no dependencies, so a conjecture clause*)
-     if eq_types t (*must be type information, tfree_tcs, clsrel, clsarity: delete refs to it*)
-     then delete_dep lno lines
-     else (lno, t, []) :: lines
-  | add_nonnull_prfline ((lno, t, deps), lines) = (lno, t, deps) :: lines
-and delete_dep lno lines = List.foldr add_nonnull_prfline [] (map (replace_deps (lno, [])) lines);
+fun add_nonnull_line (num, t, []) lines = (*no dependencies, so a conjecture clause*)
+    if eq_types t then
+      (*must be type information, tfree_tcs, clsrel, clsarity: delete refs to it*)
+      delete_dep num lines
+    else
+      (num, t, []) :: lines
+  | add_nonnull_line (num, t, deps) lines = (num, t, deps) :: lines
+and delete_dep num lines =
+  fold_rev add_nonnull_line (map (replace_deps (num, [])) lines) []
 
 fun bad_free (Free (a,_)) = String.isPrefix skolem_prefix a
   | bad_free _ = false;
 
-(*TVars are forbidden in goals. Also, we don't want lines with <2 dependencies.
-  To further compress proofs, setting modulus:=n deletes every nth line, and nlines
-  counts the number of proof lines processed so far.
-  Deleted lines are replaced by their own dependencies. Note that the "add_nonnull_prfline"
-  phase may delete some dependencies, hence this phase comes later.*)
-fun add_wanted_prfline ctxt _ ((lno, t, []), (nlines, lines)) =
-      (nlines, (lno, t, []) :: lines)   (*conjecture clauses must be kept*)
-  | add_wanted_prfline ctxt modulus ((lno, t, deps), (nlines, lines)) =
-      if eq_types t orelse not (null (Term.add_tvars t [])) orelse
-         exists_subterm bad_free t orelse
-         (not (null lines) andalso   (*final line can't be deleted for these reasons*)
-          (length deps < 2 orelse nlines mod modulus <> 0))
-      then (nlines+1, map (replace_deps (lno, deps)) lines) (*Delete line*)
-      else (nlines+1, (lno, t, deps) :: lines);
+fun add_desired_line ctxt (num, t, []) (j, lines) =
+    (j, (num, t, []) :: lines)  (* conjecture clauses must be kept *)
+  | add_desired_line ctxt (num, t, deps) (j, lines) =
+    (j + 1,
+     if eq_types t orelse not (null (Term.add_tvars t [])) orelse
+        exists_subterm bad_free t orelse length deps < 2 then
+       map (replace_deps (num, deps)) lines  (* delete line *)
+     else
+       (num, t, deps) :: lines)
 
+(* ### *)
 (*Replace numeric proof lines by strings, either from thm_names or sequential line numbers*)
 fun stringify_deps thm_names deps_map [] = []
-  | stringify_deps thm_names deps_map ((lno, t, deps) :: lines) =
-      if is_axiom thm_names lno then
-        (Vector.sub(thm_names,lno-1), t, []) :: stringify_deps thm_names deps_map lines
-      else let val lname = Int.toString (length deps_map)
-               fun fix lno = if is_axiom thm_names lno
-                             then SOME(Vector.sub(thm_names,lno-1))
-                             else AList.lookup (op =) deps_map lno;
-           in  (lname, t, map_filter fix (distinct (op=) deps)) ::
-               stringify_deps thm_names ((lno,lname)::deps_map) lines
-           end;
+  | stringify_deps thm_names deps_map ((num, t, deps) :: lines) =
+    if is_axiom_clause_number thm_names num then
+      (Vector.sub (thm_names, num - 1), t, []) ::
+      stringify_deps thm_names deps_map lines
+    else
+      let
+        val label = Int.toString (length deps_map)
+        fun string_for_num num =
+          if is_axiom_clause_number thm_names num then
+            SOME (Vector.sub (thm_names, num - 1))
+          else
+            AList.lookup (op =) deps_map num
+      in
+        (label, t, map_filter string_for_num (distinct (op=) deps)) ::
+        stringify_deps thm_names ((num, label) :: deps_map) lines
+      end
 
-fun isar_proof_start i =
-  (if i = 1 then "" else "prefer " ^ string_of_int i ^ "\n") ^
-  "proof (neg_clausify)\n";
-fun isar_fixes [] = ""
-  | isar_fixes ts = "  fix " ^ space_implode " " ts ^ "\n";
-fun isar_proof_end 1 = "qed"
-  | isar_proof_end _ = "next"
+(** EXTRACTING LEMMAS **)
 
-fun isar_proof_from_atp_proof cnfs modulus sorts ctxt goal i thm_names =
+(* A list consisting of the first number in each line is returned.
+   TSTP: Interesting lines have the form "cnf(108, axiom, ...)", where the
+   number (108) is extracted.
+   SPASS: Lines have the form "108[0:Inp] ...", where the first number (108) is
+   extracted. *)
+fun extract_clause_numbers_in_atp_proof atp_proof =
   let
-    val _ = trace_proof_msg (K "\nisar_proof_from_atp_proof: start\n")
-    val tuples = map (parse_proof_line o explode) cnfs
-    val _ = trace_proof_msg (fn () =>
-      Int.toString (length tuples) ^ " tuples extracted\n")
-    val ctxt = ProofContext.set_mode ProofContext.mode_schematic ctxt
-    val raw_lines =
-      fold_rev (add_proof_line thm_names) (decode_proof_steps ctxt tuples) []
-    val _ = trace_proof_msg (fn () =>
-      Int.toString (length raw_lines) ^ " raw_lines extracted\n")
-    val nonnull_lines = List.foldr add_nonnull_prfline [] raw_lines
-    val _ = trace_proof_msg (fn () =>
-      Int.toString (length nonnull_lines) ^ " nonnull_lines extracted\n")
-    val (_, lines) = List.foldr (add_wanted_prfline ctxt modulus) (0,[]) nonnull_lines
-    val _ = trace_proof_msg (fn () =>
-      Int.toString (length lines) ^ " lines extracted\n")
-    val (ccls, fixes) = neg_conjecture_clauses ctxt goal i
-    val _ = trace_proof_msg (fn () =>
-      Int.toString (length ccls) ^ " conjecture clauses\n")
-    val ccls = map forall_intr_vars ccls
-    val _ = app (fn th => trace_proof_msg
-                              (fn () => "\nccl: " ^ string_of_thm ctxt th)) ccls
-    val body = isar_proof_body ctxt sorts (map prop_of ccls)
-                               (stringify_deps thm_names [] lines)
-    val n = Logic.count_prems (prop_of goal)
-    val _ = trace_proof_msg (K "\nisar_proof_from_atp_proof: finishing\n")
-  in
-    isar_proof_start i ^ isar_fixes (map #1 fixes) ^ implode body ^
-    isar_proof_end n ^ "\n"
-  end
-  handle STREE _ => raise Fail "Cannot parse ATP output";
-
-
-(*=== EXTRACTING PROOF-TEXT === *)
-
-val begin_proof_strs = ["# SZS output start CNFRefutation.",
-  "=========== Refutation ==========",
-  "Here is a proof"];
-
-val end_proof_strs = ["# SZS output end CNFRefutation",
-  "======= End of refutation =======",
-  "Formulae used in the proof"];
-
-fun get_proof_extract proof =
-  (* Splits by the first possible of a list of splitters. *)
-  case pairself (find_first (fn s => String.isSubstring s proof))
-                (begin_proof_strs, end_proof_strs) of
-    (SOME begin_string, SOME end_string) =>
-    proof |> first_field begin_string |> the |> snd
-          |> first_field end_string |> the |> fst
-  | _ => raise Fail "Cannot extract proof"
-
-(* ==== CHECK IF PROOF WAS SUCCESSFUL === *)
-
-fun is_proof_well_formed proof =
-  forall (exists (fn s => String.isSubstring s proof))
-         [begin_proof_strs, end_proof_strs]
-
-(* === EXTRACTING LEMMAS === *)
-(* A list consisting of the first number in each line is returned.
-   TPTP: Interesting lines have the form "cnf(108, axiom, ...)", where the
-   number (108) is extracted.
-   DFG: Lines have the form "108[0:Inp] ...", where the first number (108) is
-   extracted. *)
-fun get_step_nums proof_extract =
-  let
-    val toks = String.tokens (not o is_ident_char)
-    fun inputno ("cnf" :: ntok :: "axiom" :: _) = Int.fromString ntok
-      | inputno ("cnf" :: ntok :: "negated_conjecture" :: _) =
-        Int.fromString ntok
-      | inputno (ntok::"0"::"Inp"::_) = Int.fromString ntok  (* DFG format *)
-      | inputno _ = NONE
-    val lines = split_lines proof_extract
-  in map_filter (inputno o toks) lines end
+    val tokens_of = String.tokens (not o is_ident_char)
+    fun extract_num ("cnf" :: num :: "axiom" :: _) = Int.fromString num
+      | extract_num ("cnf" :: num :: "negated_conjecture" :: _) =
+        Int.fromString num
+      | extract_num (num :: "0" :: "Inp" :: _) = Int.fromString num
+      | extract_num _ = NONE
+  in atp_proof |> split_lines |> map_filter (extract_num o tokens_of) end
   
-(*Used to label theorems chained into the sledgehammer call*)
-val chained_hint = "CHAINED";
-val kill_chained = filter_out (curry (op =) chained_hint)
+(* Used to label theorems chained into the Sledgehammer call (or rather
+   goal?) *)
+val chained_hint = "sledgehammer_chained"
 
 fun apply_command _ 1 = "by "
   | apply_command 1 _ = "apply "
@@ -585,28 +474,59 @@
       "To minimize the number of lemmas, try this command: " ^
       Markup.markup Markup.sendback command ^ ".\n"
 
-fun metis_proof_text (minimize_command, proof, thm_names, goal, i) =
+fun metis_proof_text (minimize_command, atp_proof, thm_names, goal, i) =
   let
     val lemmas =
-      proof |> get_proof_extract
-            |> get_step_nums
-            |> filter (is_axiom thm_names)
-            |> map (fn i => Vector.sub (thm_names, i - 1))
-            |> filter (fn x => x <> "??.unknown")
-            |> sort_distinct string_ord
+      atp_proof |> extract_clause_numbers_in_atp_proof
+                |> filter (is_axiom_clause_number thm_names)
+                |> map (fn i => Vector.sub (thm_names, i - 1))
+                |> filter_out (fn s => s = "??.unknown" orelse s = chained_hint)
+                |> sort_distinct string_ord
     val n = Logic.count_prems (prop_of goal)
-    val xs = kill_chained lemmas
-  in
-    (metis_line i n xs ^ minimize_line minimize_command xs, kill_chained lemmas)
-  end
+  in (metis_line i n lemmas ^ minimize_line minimize_command lemmas, lemmas) end
+
+val is_valid_line = String.isPrefix "cnf(" orf String.isSubstring "||"
+
+(** NEW PROOF RECONSTRUCTION CODE **)
+
+type label = string * int
+type facts = label list * string list
+
+fun merge_fact_sets (ls1, ss1) (ls2, ss2) =
+  (union (op =) ls1 ls2, union (op =) ss1 ss2)
+
+datatype qualifier = Show | Then | Moreover | Ultimately
 
-val is_proof_line = String.isPrefix "cnf(" orf String.isSubstring "||"
+datatype step =
+  Fix of term list |
+  Assume of label * term |
+  Have of qualifier list * label * term * byline
+and byline =
+  Facts of facts |
+  CaseSplit of step list list * facts
+
+val raw_prefix = "X"
+val assum_prefix = "A"
+val fact_prefix = "F"
 
-fun do_space c = if Char.isSpace c then "" else str c
+(* ###
+fun add_fact_from_dep s =
+  case Int.fromString s of
+    SOME n => apfst (cons (raw_prefix, n))
+  | NONE => apsnd (cons s)
+*)
+
+val add_fact_from_dep = apfst o cons o pair raw_prefix
+
+fun step_for_tuple _ (label, t, []) = Assume ((raw_prefix, label), t)
+  | step_for_tuple j (label, t, deps) =
+    Have (if j = 1 then [Show] else [], (raw_prefix, label), t,
+          Facts (fold add_fact_from_dep deps ([], [])))
 
 fun strip_spaces_in_list [] = ""
-  | strip_spaces_in_list [c1] = do_space c1
-  | strip_spaces_in_list [c1, c2] = do_space c1 ^ do_space c2
+  | strip_spaces_in_list [c1] = if Char.isSpace c1 then "" else str c1
+  | strip_spaces_in_list [c1, c2] =
+    strip_spaces_in_list [c1] ^ strip_spaces_in_list [c2]
   | strip_spaces_in_list (c1 :: c2 :: c3 :: cs) =
     if Char.isSpace c1 then
       strip_spaces_in_list (c2 :: c3 :: cs)
@@ -614,39 +534,336 @@
       if Char.isSpace c3 then
         strip_spaces_in_list (c1 :: c3 :: cs)
       else
-        str c1 ^
-        (if is_ident_char c1 andalso is_ident_char c3 then " " else "") ^
+        str c1 ^ (if forall is_ident_char [c1, c3] then " " else "") ^
         strip_spaces_in_list (c3 :: cs)
     else
       str c1 ^ strip_spaces_in_list (c2 :: c3 :: cs)
-
 val strip_spaces = strip_spaces_in_list o String.explode
 
-fun isar_proof_text debug modulus sorts ctxt
-                    (minimize_command, proof, thm_names, goal, i) =
+fun proof_from_atp_proof pool ctxt atp_proof thm_names frees =
+  let
+    val tuples =
+      atp_proof |> split_lines |> map strip_spaces
+                |> filter is_valid_line
+                |> map (parse_line pool o explode)
+                |> decode_lines ctxt
+    val tuples = fold_rev (add_line thm_names) tuples []
+    val tuples = fold_rev add_nonnull_line tuples []
+    val tuples = fold_rev (add_desired_line ctxt) tuples (0, []) |> snd
+  in
+    (if null frees then [] else [Fix frees]) @
+    map2 step_for_tuple (length tuples downto 1) tuples
+  end
+
+val indent_size = 2
+val no_label = ("", ~1)
+
+fun no_show qs = not (member (op =) qs Show)
+
+(* When redirecting proofs, we keep information about the labels seen so far in
+   the "backpatches" data structure. The first component indicates which facts
+   should be associated with forthcoming proof steps. The second component is a
+   pair ("keep_ls", "drop_ls"), where "keep_ls" are the labels to keep and
+   "drop_ls" are those that should be dropped in a case split. *)
+type backpatches = (label * facts) list * (label list * label list)
+
+fun using_of_step (Have (_, _, _, by)) =
+    (case by of
+       Facts (ls, _) => ls
+     | CaseSplit (proofs, (ls, _)) => fold (union (op =) o using_of) proofs ls)
+  | using_of_step _ = []
+and using_of proof = fold (union (op =) o using_of_step) proof []
+
+fun new_labels_of_step (Fix _) = []
+  | new_labels_of_step (Assume (l, _)) = [l]
+  | new_labels_of_step (Have (_, l, _, _)) = [l]
+val new_labels_of = maps new_labels_of_step
+
+val join_proofs =
+  let
+    fun aux _ [] = NONE
+      | aux proof_tail (proofs as (proof1 :: _)) =
+        if exists null proofs then
+          NONE
+        else if forall (curry (op =) (hd proof1) o hd) (tl proofs) then
+          aux (hd proof1 :: proof_tail) (map tl proofs)
+        else case hd proof1 of
+          Have ([], l, t, by) =>
+          if forall (fn Have ([], l', t', _) :: _ => (l, t) = (l', t')
+                      | _ => false) (tl proofs) andalso
+             not (exists (member (op =) (maps new_labels_of proofs))
+                         (using_of proof_tail)) then
+            SOME (l, t, map rev proofs, proof_tail)
+          else
+            NONE
+        | _ => NONE
+  in aux [] o map rev end
+
+fun case_split_qualifiers proofs =
+  case length proofs of
+    0 => []
+  | 1 => [Then]
+  | _ => [Ultimately]
+
+val index_in_shape = find_index o exists o curry (op =)
+
+fun direct_proof thy conjecture_shape hyp_ts concl_t proof =
   let
-    val cnfs = proof |> get_proof_extract |> split_lines |> map strip_spaces
-                     |> filter is_proof_line
+    val concl_ls = map (pair raw_prefix) (List.last conjecture_shape)
+    fun find_hyp (_, j) = nth hyp_ts (index_in_shape j conjecture_shape)
+    fun first_pass ([], contra) = ([], contra)
+      | first_pass ((ps as Fix _) :: proof, contra) =
+        first_pass (proof, contra) |>> cons ps
+      | first_pass ((ps as Assume (l, t)) :: proof, contra) =
+        if member (op =) concl_ls l then
+          first_pass (proof, contra ||> cons ps)
+        else
+          first_pass (proof, contra) |>> cons (Assume (l, find_hyp l))
+      | first_pass ((ps as Have (qs, l, t, Facts (ls, ss))) :: proof, contra) =
+        if exists (member (op =) (fst contra)) ls then
+          first_pass (proof, contra |>> cons l ||> cons ps)
+        else
+          first_pass (proof, contra) |>> cons ps
+      | first_pass _ = raise Fail "malformed proof"
+    val (proof_top, (contra_ls, contra_proof)) =
+      first_pass (proof, (concl_ls, []))
+    val backpatch_label = the_default ([], []) oo AList.lookup (op =) o fst
+    fun backpatch_labels patches ls =
+      fold merge_fact_sets (map (backpatch_label patches) ls) ([], [])
+    fun second_pass end_qs ([], assums, patches) =
+        ([Have (end_qs, no_label,
+                if length assums < length concl_ls then
+                  clause_for_literals thy (map (negate_term thy o fst) assums)
+                else
+                  concl_t,
+                Facts (backpatch_labels patches (map snd assums)))], patches)
+      | second_pass end_qs (Assume (l, t) :: proof, assums, patches) =
+        second_pass end_qs (proof, (t, l) :: assums, patches)
+      | second_pass end_qs (Have (qs, l, t, Facts (ls, ss)) :: proof, assums,
+                            patches) =
+        if member (op =) (snd (snd patches)) l andalso
+           not (AList.defined (op =) (fst patches) l) then
+          second_pass end_qs (proof, assums, patches ||> apsnd (append ls))
+        else
+          (case List.partition (member (op =) contra_ls) ls of
+             ([contra_l], co_ls) =>
+             if no_show qs then
+               second_pass end_qs
+                           (proof, assums,
+                            patches |>> cons (contra_l, (l :: co_ls, ss)))
+               |>> cons (if member (op =) (fst (snd patches)) l then
+                           Assume (l, negate_term thy t)
+                         else
+                           Have (qs, l, negate_term thy t,
+                                 Facts (backpatch_label patches l)))
+             else
+               second_pass end_qs (proof, assums,
+                                   patches |>> cons (contra_l, (co_ls, ss)))
+           | (contra_ls as _ :: _, co_ls) =>
+             let
+               val proofs =
+                 map_filter
+                     (fn l =>
+                         if member (op =) concl_ls l then
+                           NONE
+                         else
+                           let
+                             val drop_ls = filter (curry (op <>) l) contra_ls
+                           in
+                             second_pass []
+                                 (proof, assums,
+                                  patches ||> apfst (insert (op =) l)
+                                          ||> apsnd (union (op =) drop_ls))
+                             |> fst |> SOME
+                           end) contra_ls
+               val facts = (co_ls, [])
+             in
+               (case join_proofs proofs of
+                  SOME (l, t, proofs, proof_tail) =>
+                  Have (case_split_qualifiers proofs @
+                        (if null proof_tail then end_qs else []), l, t,
+                        CaseSplit (proofs, facts)) :: proof_tail
+                | NONE =>
+                  [Have (case_split_qualifiers proofs @ end_qs, no_label,
+                         concl_t, CaseSplit (proofs, facts))],
+                patches)
+             end
+           | _ => raise Fail "malformed proof")
+       | second_pass _ _ = raise Fail "malformed proof"
+    val (proof_bottom, _) =
+      second_pass [Show] (contra_proof, [], ([], ([], [])))
+  in proof_top @ proof_bottom end
+
+val kill_duplicate_assumptions_in_proof =
+  let
+    fun relabel_facts subst =
+      apfst (map (fn l => AList.lookup (op =) subst l |> the_default l))
+    fun do_step (ps as Fix _) (proof, subst, assums) =
+        (ps :: proof, subst, assums)
+      | do_step (ps as Assume (l, t)) (proof, subst, assums) =
+        (case AList.lookup (op aconv) assums t of
+           SOME l' => (proof, (l', l) :: subst, assums)
+         | NONE => (ps :: proof, subst, (t, l) :: assums))
+      | do_step (Have (qs, l, t, by)) (proof, subst, assums) =
+        (Have (qs, l, t,
+               case by of
+                 Facts facts => Facts (relabel_facts subst facts)
+               | CaseSplit (proofs, facts) =>
+                 CaseSplit (map do_proof proofs, relabel_facts subst facts)) ::
+         proof, subst, assums)
+    and do_proof proof = fold do_step proof ([], [], []) |> #1 |> rev
+  in do_proof end
+
+(* FIXME: implement *)
+fun shrink_proof shrink_factor proof = proof
+
+val then_chain_proof =
+  let
+    fun aux _ [] = []
+      | aux _ ((ps as Fix _) :: proof) = ps :: aux no_label proof
+      | aux _ ((ps as Assume (l, _)) :: proof) = ps :: aux l proof
+      | aux l' (Have (qs, l, t, by) :: proof) =
+        (case by of
+           Facts (ls, ss) =>
+           Have (if member (op =) ls l' then
+                   (Then :: qs, l, t,
+                    Facts (filter_out (curry (op =) l') ls, ss))
+                 else
+                   (qs, l, t, Facts (ls, ss)))
+         | CaseSplit (proofs, facts) =>
+           Have (qs, l, t, CaseSplit (map (aux no_label) proofs, facts))) ::
+        aux l proof
+  in aux no_label end
+
+fun kill_useless_labels_in_proof proof =
+  let
+    val used_ls = using_of proof
+    fun do_label l = if member (op =) used_ls l then l else no_label
+    fun kill (Fix ts) = Fix ts
+      | kill (Assume (l, t)) = Assume (do_label l, t)
+      | kill (Have (qs, l, t, by)) =
+        Have (qs, do_label l, t,
+              case by of
+                CaseSplit (proofs, facts) =>
+                CaseSplit (map (map kill) proofs, facts)
+              | _ => by)
+  in map kill proof end
+
+fun prefix_for_depth n = replicate_string (n + 1)
+
+val relabel_proof =
+  let
+    fun aux _ _ _ [] = []
+      | aux subst depth nextp ((ps as Fix _) :: proof) =
+        ps :: aux subst depth nextp proof
+      | aux subst depth (next_assum, next_fact) (Assume (l, t) :: proof) =
+        if l = no_label then
+          Assume (l, t) :: aux subst depth (next_assum, next_fact) proof
+        else
+          let val l' = (prefix_for_depth depth assum_prefix, next_assum) in
+            Assume (l', t) ::
+            aux ((l, l') :: subst) depth (next_assum + 1, next_fact) proof
+          end
+      | aux subst depth (next_assum, next_fact) (Have (qs, l, t, by) :: proof) =
+        let
+          val (l', subst, next_fact) =
+            if l = no_label then
+              (l, subst, next_fact)
+            else
+              let
+                val l' = (prefix_for_depth depth fact_prefix, next_fact)
+              in (l', (l, l') :: subst, next_fact + 1) end
+          val relabel_facts = apfst (map (the o AList.lookup (op =) subst))
+          val by =
+            case by of
+              Facts facts => Facts (relabel_facts facts)
+            | CaseSplit (proofs, facts) =>
+              CaseSplit (map (aux subst (depth + 1) (1, 1)) proofs,
+                         relabel_facts facts)
+        in
+          Have (qs, l', t, by) ::
+          aux subst depth (next_assum, next_fact) proof
+        end
+  in aux [] 0 (1, 1) end
+
+fun string_for_proof ctxt sorts i n =
+  let
+    fun do_indent ind = replicate_string (ind * indent_size) " "
+    fun do_raw_label (s, j) = s ^ string_of_int j
+    fun do_label l = if l = no_label then "" else do_raw_label l ^ ": "
+    fun do_have qs =
+      (if member (op =) qs Moreover then "moreover " else "") ^
+      (if member (op =) qs Ultimately then "ultimately " else "") ^
+      (if member (op =) qs Then then
+         if member (op =) qs Show then "thus" else "hence"
+       else
+         if member (op =) qs Show then "show" else "have")
+    val do_term =
+      quote o PrintMode.setmp (filter (curry (op =) Symbol.xsymbolsN)
+                                      (print_mode_value ()))
+                              (Syntax.string_of_term ctxt)
+    fun do_using [] = ""
+      | do_using ls = "using " ^ (space_implode " " (map do_raw_label ls)) ^ " "
+    fun do_by_facts [] [] = "by blast"
+      | do_by_facts _ [] = "by metis"
+      | do_by_facts _ ss = "by (metis " ^ space_implode " " ss ^ ")"
+    fun do_facts ind (ls, ss) =
+      do_indent (ind + 1) ^ do_using ls ^ do_by_facts ls ss
+    and do_step ind (Fix ts) =
+        do_indent ind ^ "fix " ^ space_implode " and " (map do_term ts) ^ "\n"
+      | do_step ind (Assume (l, t)) =
+        do_indent ind ^ "assume " ^ do_label l ^ do_term t ^ "\n"
+      | do_step ind (Have (qs, l, t, Facts facts)) =
+        do_indent ind ^ do_have qs ^ " " ^
+        do_label l ^ do_term t ^ "\n" ^ do_facts ind facts ^ "\n"
+      | do_step ind (Have (qs, l, t, CaseSplit (proofs, facts))) =
+        space_implode (do_indent ind ^ "moreover\n")
+                      (map (do_block ind) proofs) ^
+        do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^ "\n" ^
+        do_facts ind facts ^ "\n"
+    and do_steps prefix suffix ind steps =
+      let val s = implode (map (do_step ind) steps) in
+        replicate_string (ind * indent_size - size prefix) " " ^ prefix ^
+        String.extract (s, ind * indent_size,
+                        SOME (size s - ind * indent_size - 1)) ^
+        suffix ^ "\n"
+      end
+    and do_block ind proof = do_steps "{ " " }" (ind + 1) proof
+    and do_proof proof =
+      (if i <> 1 then "prefer " ^ string_of_int i ^ "\n" else "") ^
+      do_indent 0 ^ "proof -\n" ^
+      do_steps "" "" 1 proof ^
+      do_indent 0 ^ (if n <> 1 then "next" else "qed") ^ "\n"
+  in setmp_CRITICAL show_sorts sorts do_proof end
+
+fun isar_proof_text (pool, debug, shrink_factor, sorts, ctxt, conjecture_shape)
+                    (minimize_command, atp_proof, thm_names, goal, i) =
+  let
+    val thy = ProofContext.theory_of ctxt
+    val (frees, hyp_ts, concl_t) = strip_subgoal goal i
+    val n = Logic.count_prems (prop_of goal)
     val (one_line_proof, lemma_names) =
-      metis_proof_text (minimize_command, proof, thm_names, goal, i)
-    val tokens = String.tokens (fn c => c = #" ") one_line_proof
+      metis_proof_text (minimize_command, atp_proof, thm_names, goal, i)
     fun isar_proof_for () =
-      case isar_proof_from_atp_proof cnfs modulus sorts ctxt goal i thm_names of
+      case proof_from_atp_proof pool ctxt atp_proof thm_names frees
+           |> direct_proof thy conjecture_shape hyp_ts concl_t
+           |> kill_duplicate_assumptions_in_proof
+           |> shrink_proof shrink_factor
+           |> then_chain_proof
+           |> kill_useless_labels_in_proof
+           |> relabel_proof
+           |> string_for_proof ctxt sorts i n of
         "" => ""
-      | isar_proof =>
-        "\nStructured proof:\n" ^ Markup.markup Markup.sendback isar_proof
+      | proof => "\nStructured proof:\n" ^ Markup.markup Markup.sendback proof
     val isar_proof =
-      if member (op =) tokens chained_hint then
-        ""
-      else if debug then
+      if debug then
         isar_proof_for ()
       else
         try isar_proof_for ()
         |> the_default "Warning: The Isar proof construction failed.\n"
   in (one_line_proof ^ isar_proof, lemma_names) end
 
-fun proof_text isar_proof debug modulus sorts ctxt =
-  if isar_proof then isar_proof_text debug modulus sorts ctxt
-  else metis_proof_text
+fun proof_text isar_proof isar_params =
+  if isar_proof then isar_proof_text isar_params else metis_proof_text
 
 end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -6,6 +6,7 @@
 
 signature SLEDGEHAMMER_UTIL =
 sig
+  val pairf : ('a -> 'b) -> ('a -> 'c) -> 'a -> 'b * 'c
   val plural_s : int -> string
   val serial_commas : string -> string list -> string list
   val replace_all : string -> string -> string -> string
@@ -13,14 +14,13 @@
   val timestamp : unit -> string
   val parse_bool_option : bool -> string -> string -> bool option
   val parse_time_option : string -> string -> Time.time option
-  val hashw : word * word -> word
-  val hashw_char : Char.char * word -> word
-  val hashw_string : string * word -> word
 end;
  
 structure Sledgehammer_Util : SLEDGEHAMMER_UTIL =
 struct
 
+fun pairf f g x = (f x, g x)
+
 fun plural_s n = if n = 1 then "" else "s"
 
 fun serial_commas _ [] = ["??"]
@@ -38,7 +38,6 @@
         else
           aux (String.sub (s, 0) :: seen) (String.extract (s, 1, NONE))
   in aux [] end
-
 fun remove_all bef = replace_all bef ""
 
 val timestamp = Date.fmt "%Y-%m-%d %H:%M:%S" o Date.fromTimeLocal o Time.now
@@ -73,11 +72,4 @@
         SOME (Time.fromMilliseconds msecs)
     end
 
-(* This hash function is recommended in Compilers: Principles, Techniques, and
-   Tools, by Aho, Sethi and Ullman. The hashpjw function, which they
-   particularly recommend, triggers a bug in versions of Poly/ML up to 4.2.0. *)
-fun hashw (u, w) = Word.+ (u, Word.* (0w65599, w))
-fun hashw_char (c, w) = hashw (Word.fromInt (Char.ord c), w)
-fun hashw_string (s:string, w) = CharVector.foldl hashw_char w s
-
 end;
--- a/src/HOL/Tools/numeral_simprocs.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/numeral_simprocs.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -332,8 +332,8 @@
 val field_combine_numerals =
   Arith_Data.prep_simproc @{theory}
     ("field_combine_numerals", 
-     ["(i::'a::{number_ring,field,division_ring_inverse_zero}) + j",
-      "(i::'a::{number_ring,field,division_ring_inverse_zero}) - j"], 
+     ["(i::'a::{field_inverse_zero, number_ring}) + j",
+      "(i::'a::{field_inverse_zero, number_ring}) - j"], 
      K FieldCombineNumerals.proc);
 
 (** Constant folding for multiplication in semirings **)
@@ -442,9 +442,9 @@
       "(l::'a::{semiring_div,number_ring}) div (m * n)"],
      K DivCancelNumeralFactor.proc),
     ("divide_cancel_numeral_factor",
-     ["((l::'a::{division_ring_inverse_zero,field,number_ring}) * m) / n",
-      "(l::'a::{division_ring_inverse_zero,field,number_ring}) / (m * n)",
-      "((number_of v)::'a::{division_ring_inverse_zero,field,number_ring}) / (number_of w)"],
+     ["((l::'a::{field_inverse_zero,number_ring}) * m) / n",
+      "(l::'a::{field_inverse_zero,number_ring}) / (m * n)",
+      "((number_of v)::'a::{field_inverse_zero,number_ring}) / (number_of w)"],
      K DivideCancelNumeralFactor.proc)];
 
 val field_cancel_numeral_factors =
@@ -454,9 +454,9 @@
       "(l::'a::{field,number_ring}) = m * n"],
      K EqCancelNumeralFactor.proc),
     ("field_cancel_numeral_factor",
-     ["((l::'a::{division_ring_inverse_zero,field,number_ring}) * m) / n",
-      "(l::'a::{division_ring_inverse_zero,field,number_ring}) / (m * n)",
-      "((number_of v)::'a::{division_ring_inverse_zero,field,number_ring}) / (number_of w)"],
+     ["((l::'a::{field_inverse_zero,number_ring}) * m) / n",
+      "(l::'a::{field_inverse_zero,number_ring}) / (m * n)",
+      "((number_of v)::'a::{field_inverse_zero,number_ring}) / (number_of w)"],
      K DivideCancelNumeralFactor.proc)]
 
 
@@ -598,8 +598,8 @@
      ["((l::'a::idom) * m) dvd n", "(l::'a::idom) dvd (m * n)"],
      K DvdCancelFactor.proc),
     ("divide_cancel_factor",
-     ["((l::'a::{division_ring_inverse_zero,field}) * m) / n",
-      "(l::'a::{division_ring_inverse_zero,field}) / (m * n)"],
+     ["((l::'a::field_inverse_zero) * m) / n",
+      "(l::'a::field_inverse_zero) / (m * n)"],
      K DivideCancelFactor.proc)];
 
 end;
--- a/src/HOL/Tools/refute.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/HOL/Tools/refute.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -1357,7 +1357,6 @@
     val subst_t = Term.subst_bounds (map Free frees, strip_t)
   in
     find_model thy (actual_params thy params) assm_ts subst_t true
-    handle REFUTE (s, s') => error ("REFUTE " ^ s ^ " " ^ s') (* ### *)
   end;
 
 (* ------------------------------------------------------------------------- *)
--- a/src/Pure/Isar/isar_syn.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/Isar/isar_syn.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -510,6 +510,13 @@
 val _ = gen_theorem true Thm.corollaryK;
 
 val _ =
+  OuterSyntax.local_theory_to_proof "example_proof"
+    "example proof body, without any result" K.thy_schematic_goal
+    (Scan.succeed
+      (Specification.schematic_theorem_cmd "" NONE (K I)
+        Attrib.empty_binding [] (Element.Shows []) false #> Proof.enter_forward));
+
+val _ =
   OuterSyntax.command "have" "state local goal"
     (K.tag_proof K.prf_goal)
     (SpecParse.statement >> ((Toplevel.print oo Toplevel.proof') o IsarCmd.have));
--- a/src/Pure/Isar/overloading.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/Isar/overloading.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -67,8 +67,8 @@
 
 fun improve_term_check ts ctxt =
   let
-    val { primary_constraints, secondary_constraints, improve, subst,
-      consider_abbrevs, passed, ... } = ImprovableSyntax.get ctxt;
+    val { secondary_constraints, improve, subst, consider_abbrevs, passed, ... } =
+      ImprovableSyntax.get ctxt;
     val tsig = (Sign.tsig_of o ProofContext.theory_of) ctxt;
     val is_abbrev = consider_abbrevs andalso ProofContext.abbrev_mode ctxt;
     val passed_or_abbrev = passed orelse is_abbrev;
--- a/src/Pure/Isar/proof.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/Isar/proof.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -792,7 +792,7 @@
 
 fun implicit_vars props =
   let
-    val (var_props, props') = take_prefix is_var props;
+    val (var_props, _) = take_prefix is_var props;
     val explicit_vars = fold Term.add_vars var_props [];
     val vars = filter_out (member (op =) explicit_vars) (fold Term.add_vars props []);
   in map (Logic.mk_term o Var) vars end;
@@ -845,11 +845,10 @@
 
 fun generic_qed after_ctxt state =
   let
-    val (goal_ctxt, {statement, goal, after_qed, ...}) = current_goal state;
+    val (goal_ctxt, {statement = (_, stmt, _), goal, after_qed, ...}) = current_goal state;
     val outer_state = state |> close_block;
     val outer_ctxt = context_of outer_state;
 
-    val ((_, pos), stmt, _) = statement;
     val props =
       flat (tl stmt)
       |> Variable.exportT_terms goal_ctxt outer_ctxt;
--- a/src/Pure/Isar/rule_cases.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/Isar/rule_cases.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -368,7 +368,7 @@
         map (Name.internal o Name.clean o fst) (Logic.strip_params prem)
       in Logic.list_rename_params (xs, prem) end;
     fun rename_prems prop =
-      let val (As, C) = Logic.strip_horn (Thm.prop_of rule)
+      let val (As, C) = Logic.strip_horn prop
       in Logic.list_implies (map rename As, C) end;
   in Thm.equal_elim (Thm.reflexive (Drule.cterm_fun rename_prems (Thm.cprop_of rule))) rule end;
 
--- a/src/Pure/Isar/toplevel.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/Isar/toplevel.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -661,6 +661,7 @@
     if immediate orelse
       null proof_trs orelse
       OuterKeyword.is_schematic_goal (name_of tr) orelse
+      exists (OuterKeyword.is_qed_global o name_of) proof_trs orelse
       not (can proof_of st') orelse
       Proof.is_relevant (proof_of st')
     then
--- a/src/Pure/axclass.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/axclass.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -2,7 +2,7 @@
     Author:     Markus Wenzel, TU Muenchen
 
 Type classes defined as predicates, associated with a record of
-parameters.
+parameters.  Proven class relations and type arities.
 *)
 
 signature AX_CLASS =
@@ -72,20 +72,23 @@
 datatype data = Data of
  {axclasses: info Symtab.table,
   params: param list,
-  proven_classrels: (thm * proof) Symreltab.table,
-  proven_arities: ((class * sort list) * ((thm * string) * proof)) list Symtab.table,
+  proven_classrels: thm Symreltab.table,
+  proven_arities: ((class * sort list) * (thm * string)) list Symtab.table,
     (*arity theorems with theory name*)
   inst_params:
     (string * thm) Symtab.table Symtab.table *
       (*constant name ~> type constructor ~> (constant name, equation)*)
     (string * string) Symtab.table (*constant name ~> (constant name, type constructor)*),
-  diff_merge_classrels: (class * class) list};
+  diff_classrels: (class * class) list};
 
 fun make_data
-    (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels) =
+    (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels) =
   Data {axclasses = axclasses, params = params, proven_classrels = proven_classrels,
     proven_arities = proven_arities, inst_params = inst_params,
-    diff_merge_classrels = diff_merge_classrels};
+    diff_classrels = diff_classrels};
+
+fun diff_table tab1 tab2 =
+  Symreltab.fold (fn (x, _) => if Symreltab.defined tab2 x then I else cons x) tab1 [];
 
 structure Data = Theory_Data_PP
 (
@@ -96,62 +99,60 @@
   fun merge pp
       (Data {axclasses = axclasses1, params = params1, proven_classrels = proven_classrels1,
         proven_arities = proven_arities1, inst_params = inst_params1,
-        diff_merge_classrels = diff_merge_classrels1},
+        diff_classrels = diff_classrels1},
        Data {axclasses = axclasses2, params = params2, proven_classrels = proven_classrels2,
         proven_arities = proven_arities2, inst_params = inst_params2,
-        diff_merge_classrels = diff_merge_classrels2}) =
+        diff_classrels = diff_classrels2}) =
     let
       val axclasses' = Symtab.merge (K true) (axclasses1, axclasses2);
       val params' =
         if null params1 then params2
-        else fold_rev (fn q => if member (op =) params1 q then I else add_param pp q) params2 params1;
+        else fold_rev (fn p => if member (op =) params1 p then I else add_param pp p) params2 params1;
 
       (*transitive closure of classrels and arity completion is done in Theory.at_begin hook*)
       val proven_classrels' = Symreltab.join (K #1) (proven_classrels1, proven_classrels2);
       val proven_arities' =
         Symtab.join (K (Library.merge (eq_fst op =))) (proven_arities1, proven_arities2);
 
-      val classrels1 = Symreltab.keys proven_classrels1;
-      val classrels2 = Symreltab.keys proven_classrels2;
-      val diff_merge_classrels' =
-        subtract (op =) classrels1 classrels2 @
-        subtract (op =) classrels2 classrels1 @
-        diff_merge_classrels1 @ diff_merge_classrels2;
+      val diff_classrels' =
+        diff_table proven_classrels1 proven_classrels2 @
+        diff_table proven_classrels2 proven_classrels1 @
+        diff_classrels1 @ diff_classrels2;
 
       val inst_params' =
         (Symtab.join (K (Symtab.merge (K true))) (#1 inst_params1, #1 inst_params2),
           Symtab.merge (K true) (#2 inst_params1, #2 inst_params2));
     in
-      make_data (axclasses', params', proven_classrels', proven_arities', inst_params',
-        diff_merge_classrels')
+      make_data
+        (axclasses', params', proven_classrels', proven_arities', inst_params', diff_classrels')
     end;
 );
 
 fun map_data f =
-  Data.map (fn Data {axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels} =>
-    make_data (f (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels)));
+  Data.map (fn Data {axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels} =>
+    make_data (f (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels)));
 
 fun map_axclasses f =
-  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels) =>
-    (f axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels));
+  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels) =>
+    (f axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels));
 
 fun map_params f =
-  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels) =>
-    (axclasses, f params, proven_classrels, proven_arities, inst_params, diff_merge_classrels));
+  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels) =>
+    (axclasses, f params, proven_classrels, proven_arities, inst_params, diff_classrels));
 
 fun map_proven_classrels f =
-  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels) =>
-    (axclasses, params, f proven_classrels, proven_arities, inst_params, diff_merge_classrels));
+  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels) =>
+    (axclasses, params, f proven_classrels, proven_arities, inst_params, diff_classrels));
 
 fun map_proven_arities f =
-  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels) =>
-    (axclasses, params, proven_classrels, f proven_arities, inst_params, diff_merge_classrels));
+  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels) =>
+    (axclasses, params, proven_classrels, f proven_arities, inst_params, diff_classrels));
 
 fun map_inst_params f =
-  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_merge_classrels) =>
-    (axclasses, params, proven_classrels, proven_arities, f inst_params, diff_merge_classrels));
+  map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, diff_classrels) =>
+    (axclasses, params, proven_classrels, proven_arities, f inst_params, diff_classrels));
 
-val clear_diff_merge_classrels =
+val clear_diff_classrels =
   map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params, _) =>
     (axclasses, params, proven_classrels, proven_arities, inst_params, []));
 
@@ -162,7 +163,7 @@
 val proven_classrels_of = #proven_classrels o rep_data;
 val proven_arities_of = #proven_arities o rep_data;
 val inst_params_of = #inst_params o rep_data;
-val diff_merge_classrels_of = #diff_merge_classrels o rep_data;
+val diff_classrels_of = #diff_classrels o rep_data;
 
 
 (* axclasses with parameters *)
@@ -187,38 +188,36 @@
 fun instance_name (a, c) = Long_Name.base_name c ^ "_" ^ Long_Name.base_name a;
 
 
+infix 0 RSO;
+
+fun (SOME a RSO SOME b) = SOME (a RS b)
+  | (x RSO NONE) = x
+  | (NONE RSO y) = y;
+
 fun the_classrel thy (c1, c2) =
   (case Symreltab.lookup (proven_classrels_of thy) (c1, c2) of
-    SOME classrel => classrel
+    SOME thm => Thm.transfer thy thm
   | NONE => error ("Unproven class relation " ^
       Syntax.string_of_classrel (ProofContext.init thy) [c1, c2]));
 
-fun the_classrel_thm thy = Thm.transfer thy o #1 o the_classrel thy;
-fun the_classrel_prf thy = #2 o the_classrel thy;
-
 fun put_trancl_classrel ((c1, c2), th) thy =
   let
-    val cert = Thm.cterm_of thy;
-    val certT = Thm.ctyp_of thy;
-
     val classes = Sorts.classes_of (Sign.classes_of thy);
     val classrels = proven_classrels_of thy;
 
     fun reflcl_classrel (c1', c2') =
-      if c1' = c2'
-      then Thm.trivial (cert (Logic.mk_of_class (TVar ((Name.aT, 0), []), c1')))
-      else the_classrel_thm thy (c1', c2');
+      if c1' = c2' then NONE else SOME (the_classrel thy (c1', c2'));
     fun gen_classrel (c1_pred, c2_succ) =
       let
-        val th' = ((reflcl_classrel (c1_pred, c1) RS th) RS reflcl_classrel (c2, c2_succ))
-          |> Drule.instantiate' [SOME (certT (TVar ((Name.aT, 0), [])))] []
+        val th' =
+          the ((reflcl_classrel (c1_pred, c1) RSO SOME th) RSO reflcl_classrel (c2, c2_succ))
+          |> Drule.instantiate' [SOME (ctyp_of thy (TVar ((Name.aT, 0), [])))] []
           |> Thm.close_derivation;
-        val prf' = Thm.proof_of th';
-      in ((c1_pred, c2_succ), (th', prf')) end;
+      in ((c1_pred, c2_succ), th') end;
 
     val new_classrels =
       Library.map_product pair (c1 :: Graph.imm_preds classes c1) (c2 :: Graph.imm_succs classes c2)
-      |> filter_out (Symreltab.defined classrels)
+      |> filter_out ((op =) orf Symreltab.defined classrels)
       |> map gen_classrel;
     val needed = not (null new_classrels);
   in
@@ -230,75 +229,77 @@
 fun complete_classrels thy =
   let
     val classrels = proven_classrels_of thy;
-    val diff_merge_classrels = diff_merge_classrels_of thy;
+    val diff_classrels = diff_classrels_of thy;
     val (needed, thy') = (false, thy) |>
-      fold (fn c12 => fn (needed, thy) =>
-          put_trancl_classrel (c12, Symreltab.lookup classrels c12 |> the |> #1) thy
+      fold (fn rel => fn (needed, thy) =>
+          put_trancl_classrel (rel, Symreltab.lookup classrels rel |> the) thy
           |>> (fn b => needed orelse b))
-        diff_merge_classrels;
+        diff_classrels;
   in
-    if null diff_merge_classrels then NONE
-    else SOME (clear_diff_merge_classrels thy')
+    if null diff_classrels then NONE
+    else SOME (clear_diff_classrels thy')
   end;
 
 
 fun the_arity thy a (c, Ss) =
   (case AList.lookup (op =) (Symtab.lookup_list (proven_arities_of thy) a) (c, Ss) of
-    SOME arity => arity
+    SOME (thm, _) => Thm.transfer thy thm
   | NONE => error ("Unproven type arity " ^
       Syntax.string_of_arity (ProofContext.init thy) (a, Ss, [c])));
 
-fun the_arity_thm thy a c_Ss = the_arity thy a c_Ss |> #1 |> #1 |> Thm.transfer thy;
-fun the_arity_prf thy a c_Ss = the_arity thy a c_Ss |> #2;
-
 fun thynames_of_arity thy (c, a) =
   Symtab.lookup_list (proven_arities_of thy) a
-  |> map_filter (fn ((c', _), ((_, name),_)) => if c = c' then SOME name else NONE)
+  |> map_filter (fn ((c', _), (_, name)) => if c = c' then SOME name else NONE)
   |> rev;
 
-fun insert_arity_completions thy (t, ((c, Ss), ((th, thy_name), _))) arities =
+fun insert_arity_completions thy t ((c, Ss), ((th, thy_name))) (finished, arities) =
   let
     val algebra = Sign.classes_of thy;
+    val ars = Symtab.lookup_list arities t;
     val super_class_completions =
       Sign.super_classes thy c
-      |> filter_out (fn c1 => exists (fn ((c2, Ss2), _) => c1 = c2
-          andalso Sorts.sorts_le algebra (Ss2, Ss)) (Symtab.lookup_list arities t));
-    val names_and_Ss = Name.names Name.context Name.aT (map (K []) Ss);
+      |> filter_out (fn c1 => exists (fn ((c2, Ss2), _) =>
+            c1 = c2 andalso Sorts.sorts_le algebra (Ss2, Ss)) ars);
+
+    val names = Name.invents Name.context Name.aT (length Ss);
+    val std_vars = map (fn a => SOME (ctyp_of thy (TVar ((a, 0), [])))) names;
+
     val completions = super_class_completions |> map (fn c1 =>
       let
-        val th1 = (th RS the_classrel_thm thy (c, c1))
-          |> Drule.instantiate' (map (SOME o ctyp_of thy o TVar o apfst (rpair 0)) names_and_Ss) []
+        val th1 =
+          (th RS the_classrel thy (c, c1))
+          |> Drule.instantiate' std_vars []
           |> Thm.close_derivation;
-        val prf1 = Thm.proof_of th1;
-      in (((th1, thy_name), prf1), c1) end);
-    val arities' = fold (fn (th_thy_prf1, c1) => Symtab.cons_list (t, ((c1, Ss), th_thy_prf1)))
-      completions arities;
-  in (null completions, arities') end;
+      in ((th1, thy_name), c1) end);
+
+    val finished' = finished andalso null completions;
+    val arities' = fold (fn (th, c1) => Symtab.cons_list (t, ((c1, Ss), th))) completions arities;
+  in (finished', arities') end;
 
 fun put_arity ((t, Ss, c), th) thy =
-  let
-    val arity' = (t, ((c, Ss), ((th, Context.theory_name thy), Thm.proof_of th)));
-  in
+  let val ar = ((c, Ss), (th, Context.theory_name thy)) in
     thy
     |> map_proven_arities
-      (Symtab.insert_list (eq_fst op =) arity' #>
-        insert_arity_completions thy arity' #> snd)
+      (Symtab.insert_list (eq_fst op =) (t, ar) #>
+       curry (insert_arity_completions thy t ar) true #> #2)
   end;
 
 fun complete_arities thy =
   let
     val arities = proven_arities_of thy;
-    val (finished, arities') = arities
-      |> fold_map (insert_arity_completions thy) (Symtab.dest_list arities);
+    val (finished, arities') =
+      Symtab.fold (fn (t, ars) => fold (insert_arity_completions thy t) ars) arities (true, arities);
   in
-    if forall I finished
-    then NONE
+    if finished then NONE
     else SOME (map_proven_arities (K arities') thy)
   end;
 
 val _ = Context.>> (Context.map_theory
   (Theory.at_begin complete_classrels #> Theory.at_begin complete_arities));
 
+val the_classrel_prf = Thm.proof_of oo the_classrel;
+val the_arity_prf = Thm.proof_of ooo the_arity;
+
 
 (* maintain instance parameters *)
 
@@ -309,15 +310,15 @@
 
 fun add_inst_param (c, tyco) inst =
   (map_inst_params o apfst o Symtab.map_default (c, Symtab.empty)) (Symtab.update_new (tyco, inst))
-  #> (map_inst_params o apsnd) (Symtab.update_new (fst inst, (c, tyco)));
+  #> (map_inst_params o apsnd) (Symtab.update_new (#1 inst, (c, tyco)));
 
 val inst_of_param = Symtab.lookup o #2 o inst_params_of;
-val param_of_inst = fst oo get_inst_param;
+val param_of_inst = #1 oo get_inst_param;
 
 fun inst_thms thy =
   Symtab.fold (Symtab.fold (cons o #2 o #2) o #2) (#1 (inst_params_of thy)) [];
 
-fun get_inst_tyco consts = try (fst o dest_Type o the_single o Consts.typargs consts);
+fun get_inst_tyco consts = try (#1 o dest_Type o the_single o Consts.typargs consts);
 
 fun unoverload thy = MetaSimplifier.simplify true (inst_thms thy);
 fun overload thy = MetaSimplifier.simplify true (map Thm.symmetric (inst_thms thy));
@@ -376,7 +377,7 @@
       | NONE => error ("Not a class parameter: " ^ quote c));
     val tyco = inst_tyco_of thy (c, T);
     val name_inst = instance_name (tyco, class) ^ "_inst";
-    val c' = Long_Name.base_name c ^ "_" ^ Long_Name.base_name tyco;
+    val c' = instance_name (tyco, c);
     val T' = Type.strip_sorts T;
   in
     thy
@@ -388,7 +389,7 @@
       #>> apsnd Thm.varifyT_global
       #-> (fn (_, thm) => add_inst_param (c, tyco) (c'', thm)
         #> PureThy.add_thm ((Binding.conceal (Binding.name c'), thm), [])
-        #> snd
+        #> #2
         #> pair (Const (c, T))))
     ||> Sign.restore_naming thy
   end;
@@ -399,8 +400,7 @@
     val tyco = inst_tyco_of thy (c, T);
     val (c', eq) = get_inst_param thy (c, tyco);
     val prop = Logic.mk_equals (Const (c', T), t);
-    val b' = Thm.def_binding_optional
-      (Binding.name (Long_Name.base_name c ^ "_" ^ Long_Name.base_name tyco)) b;
+    val b' = Thm.def_binding_optional (Binding.name (instance_name (tyco, c))) b;
   in
     thy
     |> Thm.add_def false false (b', prop)
@@ -426,7 +426,7 @@
   in
     thy
     |> Sign.primitive_classrel (c1, c2)
-    |> (snd oo put_trancl_classrel) ((c1, c2), th')
+    |> (#2 oo put_trancl_classrel) ((c1, c2), th')
     |> perhaps complete_arities
   end;
 
@@ -436,20 +436,23 @@
     val prop = Thm.plain_prop_of th;
     fun err () = raise THM ("add_arity: malformed type arity", 0, [th]);
     val (t, Ss, c) = Logic.dest_arity prop handle TERM _ => err ();
-    val names = Name.names Name.context Name.aT Ss;
-    val T = Type (t, map TFree names);
+
+    val args = Name.names Name.context Name.aT Ss;
+    val T = Type (t, map TFree args);
+    val std_vars = map (fn (a, S) => SOME (ctyp_of thy (TVar ((a, 0), S)))) args;
+
     val missing_params = Sign.complete_sort thy [c]
       |> maps (these o Option.map #params o try (get_info thy))
       |> filter_out (fn (const, _) => can (get_inst_param thy) (const, t))
       |> (map o apsnd o map_atyps) (K T);
     val _ = map (Sign.certify_sort thy) Ss = Ss orelse err ();
     val th' = th
-      |> Drule.instantiate' (map (SOME o ctyp_of thy o TVar o apfst (rpair 0)) names) []
+      |> Drule.instantiate' std_vars []
       |> Thm.unconstrain_allTs;
     val _ = shyps_topped th' orelse raise Fail "add_arity: nontop shyps after unconstrain";
   in
     thy
-    |> fold (snd oo declare_overloaded) missing_params
+    |> fold (#2 oo declare_overloaded) missing_params
     |> Sign.primitive_arity (t, Ss, [c])
     |> put_arity ((t, Ss, c), th')
   end;
@@ -585,9 +588,9 @@
     val axclass = make_axclass (def, intro, axioms, params);
     val result_thy =
       facts_thy
-      |> fold (snd oo put_trancl_classrel) (map (pair class) super ~~ classrel)
+      |> fold (#2 oo put_trancl_classrel) (map (pair class) super ~~ classrel)
       |> Sign.qualified_path false bconst
-      |> PureThy.note_thmss "" (name_atts ~~ map Thm.simple_fact (unflat axiomss axioms)) |> snd
+      |> PureThy.note_thmss "" (name_atts ~~ map Thm.simple_fact (unflat axiomss axioms)) |> #2
       |> Sign.restore_naming facts_thy
       |> map_axclasses (Symtab.update (class, axclass))
       |> map_params (fold (fn (x, _) => add_param pp (x, class)) params);
@@ -600,8 +603,7 @@
 
 local
 
-(* old-style axioms *)
-
+(*old-style axioms*)
 fun add_axiom (b, prop) =
   Thm.add_axiom (b, prop) #->
   (fn (_, thm) => PureThy.add_thm ((b, Drule.export_without_context thm), []));
--- a/src/Pure/meta_simplifier.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/meta_simplifier.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -834,7 +834,6 @@
   in if msg then trace_thm (fn () => "SUCCEEDED") ss thm' else (); SOME thm'' end
   handle THM _ =>
     let
-      val thy = Thm.theory_of_thm thm;
       val _ $ _ $ prop0 = Thm.prop_of thm;
     in
       trace_thm (fn () => "Proved wrong thm (Check subgoaler?)") ss thm';
--- a/src/Pure/tactic.ML	Tue Apr 27 11:03:04 2010 -0700
+++ b/src/Pure/tactic.ML	Tue Apr 27 11:17:50 2010 -0700
@@ -188,9 +188,6 @@
   let val (_, _, Bi, _) = dest_state (st, i)
   in Term.rename_wrt_term Bi (Logic.strip_params Bi) end;
 
-(*params of subgoal i as they are printed*)
-fun params_of_state i st = rev (innermost_params i st);
-
 
 (*** Applications of cut_rl ***)