*** empty log message ***
authornipkow
Wed, 11 Oct 2000 09:09:06 +0200
changeset 10186 499637e8f2c6
parent 10185 c452fea3ce74
child 10187 0376cccd9118
*** empty log message ***
doc-src/TutorialI/Advanced/advanced.tex
doc-src/TutorialI/Advanced/document/simp.tex
doc-src/TutorialI/Advanced/simp.thy
doc-src/TutorialI/CTL/Base.thy
doc-src/TutorialI/CTL/CTL.thy
doc-src/TutorialI/CTL/PDL.thy
doc-src/TutorialI/CTL/document/Base.tex
doc-src/TutorialI/CTL/document/CTL.tex
doc-src/TutorialI/CTL/document/PDL.tex
doc-src/TutorialI/Datatype/Nested.thy
doc-src/TutorialI/Datatype/document/Nested.tex
doc-src/TutorialI/IsaMakefile
doc-src/TutorialI/Misc/AdvancedInd.thy
doc-src/TutorialI/Misc/document/AdvancedInd.tex
doc-src/TutorialI/Recdef/Nested0.thy
doc-src/TutorialI/Recdef/Nested1.thy
doc-src/TutorialI/Recdef/Nested2.thy
doc-src/TutorialI/Recdef/ROOT.ML
doc-src/TutorialI/Recdef/document/Nested0.tex
doc-src/TutorialI/Recdef/document/Nested1.tex
doc-src/TutorialI/Recdef/document/termination.tex
doc-src/TutorialI/Recdef/termination.thy
doc-src/TutorialI/todo.tobias
doc-src/manual.bib
src/HOL/Gfp.ML
src/HOL/IMP/Denotation.ML
src/HOL/IMP/Hoare.ML
src/HOL/Induct/LList.ML
src/HOL/Lfp.ML
src/HOL/NatDef.ML
src/HOL/Real/PNat.ML
src/HOL/Tools/inductive_package.ML
src/HOL/Trancl.ML
src/HOL/While.ML
src/HOL/ex/set.ML
--- a/doc-src/TutorialI/Advanced/advanced.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Advanced/advanced.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -12,13 +12,19 @@
 \input{Advanced/document/simp.tex}
 
 \section{Advanced forms of recursion}
-\label{sec:advanced-recdef}
 \index{*recdef|(}
+
+\subsection{Recursion over nested datatypes}
+\label{sec:nested-recdef}
 \input{Recdef/document/Nested0.tex}
 \input{Recdef/document/Nested1.tex}
 \input{Recdef/document/Nested2.tex}
 \index{*recdef|)}
 
+\subsection{Beyond measure}
+\label{sec:wellfounded}
+\input{Recdef/document/WFrec.tex}
+
 \section{Advanced induction techniques}
 \label{sec:advanced-ind}
 \index{induction|(}
--- a/doc-src/TutorialI/Advanced/document/simp.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Advanced/document/simp.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -126,16 +126,71 @@
 \label{sec:SimpHow}
 Roughly speaking, the simplifier proceeds bottom-up (subterms are simplified
 first) and a conditional equation is only applied if its condition could be
-proved (again by simplification). Below we explain some special%
+proved (again by simplification). Below we explain some special features of the rewriting process.%
 \end{isamarkuptext}%
 %
 \isamarkupsubsubsection{Higher-order patterns}
 %
-\isamarkupsubsubsection{Local assumptions}
+\begin{isamarkuptext}%
+\index{simplification rule|(}
+So far we have pretended the simplifier can deal with arbitrary
+rewrite rules. This is not quite true.  Due to efficiency (and
+potentially also computability) reasons, the simplifier expects the
+left-hand side of each rule to be a so-called \emph{higher-order
+pattern}~\cite{nipkow-patterns}\indexbold{higher-order
+pattern}\indexbold{pattern, higher-order}. This restricts where
+unknowns may occur.  Higher-order patterns are terms in $\beta$-normal
+form (this will always be the case unless you have done something
+strange) where each occurrence of an unknown is of the form
+$\Var{f}~x@1~\dots~x@n$, where the $x@i$ are distinct bound
+variables. Thus all ``standard'' rewrite rules, where all unknowns are
+of base type, for example \isa{{\isacharquery}m\ {\isacharplus}\ {\isacharquery}n\ {\isacharplus}\ {\isacharquery}k\ {\isacharequal}\ {\isacharquery}m\ {\isacharplus}\ {\isacharparenleft}{\isacharquery}n\ {\isacharplus}\ {\isacharquery}k{\isacharparenright}}, are OK: if an unknown is
+of base type, it cannot have any arguments. Additionally, the rule
+\isa{{\isacharparenleft}{\isasymforall}x{\isachardot}\ {\isacharquery}P\ x\ {\isasymand}\ {\isacharquery}Q\ x{\isacharparenright}\ {\isacharequal}\ {\isacharparenleft}{\isacharparenleft}{\isasymforall}x{\isachardot}\ {\isacharquery}P\ x{\isacharparenright}\ {\isasymand}\ {\isacharparenleft}{\isasymforall}x{\isachardot}\ {\isacharquery}Q\ x{\isacharparenright}{\isacharparenright}} is also OK, in
+both directions: all arguments of the unknowns \isa{{\isacharquery}P} and
+\isa{{\isacharquery}Q} are distinct bound variables.
+
+If the left-hand side is not a higher-order pattern, not all is lost
+and the simplifier will still try to apply the rule, but only if it
+matches ``directly'', i.e.\ without much $\lambda$-calculus hocus
+pocus. For example, \isa{{\isacharquery}f\ {\isacharquery}x\ {\isasymin}\ range\ {\isacharquery}f\ {\isacharequal}\ True} rewrites
+\isa{g\ a\ {\isasymin}\ range\ g} to \isa{True}, but will fail to match
+\isa{g{\isacharparenleft}h\ b{\isacharparenright}\ {\isasymin}\ range{\isacharparenleft}{\isasymlambda}x{\isachardot}\ g{\isacharparenleft}h\ x{\isacharparenright}{\isacharparenright}}.  However, you can
+replace the offending subterms (in our case \isa{{\isacharquery}f\ {\isacharquery}x}, which
+is not a pattern) by adding new variables and conditions: \isa{{\isacharquery}y\ {\isacharequal}\ {\isacharquery}f\ {\isacharquery}x\ {\isasymLongrightarrow}\ {\isacharquery}y\ {\isasymin}\ range\ {\isacharquery}f\ {\isacharequal}\ True} is fine
+as a conditional rewrite rule since conditions can be arbitrary
+terms. However, this trick is not a panacea because the newly
+introduced conditions may be hard to prove, which has to take place
+before the rule can actually be applied.
+  
+There is basically no restriction on the form of the right-hand
+sides.  They may not contain extraneous term or type variables, though.%
+\end{isamarkuptext}%
 %
 \isamarkupsubsubsection{The preprocessor}
 %
 \begin{isamarkuptext}%
+When some theorem is declared a simplification rule, it need not be a
+conditional equation already.  The simplifier will turn it into a set of
+conditional equations automatically.  For example, given \isa{f\ x\ {\isacharequal}\ g\ x\ {\isasymand}\ h\ x\ {\isacharequal}\ k\ x} the simplifier will turn this into the two separate
+simplifiction rules \isa{f\ x\ {\isacharequal}\ g\ x} and \isa{h\ x\ {\isacharequal}\ k\ x}. In
+general, the input theorem is converted as follows:
+\begin{eqnarray}
+\neg P &\mapsto& P = False \nonumber\\
+P \longrightarrow Q &\mapsto& P \Longrightarrow Q \nonumber\\
+P \land Q &\mapsto& P,\ Q \nonumber\\
+\forall x.~P~x &\mapsto& P~\Var{x}\nonumber\\
+\forall x \in A.\ P~x &\mapsto& \Var{x} \in A \Longrightarrow P~\Var{x} \nonumber\\
+\isa{if}\ P\ \isa{then}\ Q\ \isa{else}\ R &\mapsto&
+ P \Longrightarrow Q,\ \neg P \Longrightarrow R \nonumber
+\end{eqnarray}
+Once this conversion process is finished, all remaining non-equations
+$P$ are turned into trivial equations $P = True$.
+For example, the formula \isa{{\isacharparenleft}p\ {\isasymlongrightarrow}\ q\ {\isasymand}\ r{\isacharparenright}\ {\isasymand}\ s} is converted into the three rules
+\begin{center}
+\isa{p\ {\isasymLongrightarrow}\ q\ {\isacharequal}\ True},\quad  \isa{p\ {\isasymLongrightarrow}\ r\ {\isacharequal}\ True},\quad  \isa{s\ {\isacharequal}\ True}.
+\end{center}
+\index{simplification rule|)}
 \index{simplification|)}%
 \end{isamarkuptext}%
 \end{isabellebody}%
--- a/doc-src/TutorialI/Advanced/simp.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Advanced/simp.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -114,16 +114,72 @@
 text{*\label{sec:SimpHow}
 Roughly speaking, the simplifier proceeds bottom-up (subterms are simplified
 first) and a conditional equation is only applied if its condition could be
-proved (again by simplification). Below we explain some special 
+proved (again by simplification). Below we explain some special features of the rewriting process.
 *}
 
 subsubsection{*Higher-order patterns*}
 
-subsubsection{*Local assumptions*}
+text{*\index{simplification rule|(}
+So far we have pretended the simplifier can deal with arbitrary
+rewrite rules. This is not quite true.  Due to efficiency (and
+potentially also computability) reasons, the simplifier expects the
+left-hand side of each rule to be a so-called \emph{higher-order
+pattern}~\cite{nipkow-patterns}\indexbold{higher-order
+pattern}\indexbold{pattern, higher-order}. This restricts where
+unknowns may occur.  Higher-order patterns are terms in $\beta$-normal
+form (this will always be the case unless you have done something
+strange) where each occurrence of an unknown is of the form
+$\Var{f}~x@1~\dots~x@n$, where the $x@i$ are distinct bound
+variables. Thus all ``standard'' rewrite rules, where all unknowns are
+of base type, for example @{thm add_assoc}, are OK: if an unknown is
+of base type, it cannot have any arguments. Additionally, the rule
+@{text"(\<forall>x. ?P x \<and> ?Q x) = ((\<forall>x. ?P x) \<and> (\<forall>x. ?Q x))"} is also OK, in
+both directions: all arguments of the unknowns @{text"?P"} and
+@{text"?Q"} are distinct bound variables.
+
+If the left-hand side is not a higher-order pattern, not all is lost
+and the simplifier will still try to apply the rule, but only if it
+matches ``directly'', i.e.\ without much $\lambda$-calculus hocus
+pocus. For example, @{text"?f ?x \<in> range ?f = True"} rewrites
+@{term"g a \<in> range g"} to @{term True}, but will fail to match
+@{text"g(h b) \<in> range(\<lambda>x. g(h x))"}.  However, you can
+replace the offending subterms (in our case @{text"?f ?x"}, which
+is not a pattern) by adding new variables and conditions: @{text"?y =
+?f ?x \<Longrightarrow> ?y \<in> range ?f = True"} is fine
+as a conditional rewrite rule since conditions can be arbitrary
+terms. However, this trick is not a panacea because the newly
+introduced conditions may be hard to prove, which has to take place
+before the rule can actually be applied.
+  
+There is basically no restriction on the form of the right-hand
+sides.  They may not contain extraneous term or type variables, though.
+*}
 
 subsubsection{*The preprocessor*}
 
 text{*
+When some theorem is declared a simplification rule, it need not be a
+conditional equation already.  The simplifier will turn it into a set of
+conditional equations automatically.  For example, given @{prop"f x =
+g x & h x = k x"} the simplifier will turn this into the two separate
+simplifiction rules @{prop"f x = g x"} and @{prop"h x = k x"}. In
+general, the input theorem is converted as follows:
+\begin{eqnarray}
+\neg P &\mapsto& P = False \nonumber\\
+P \longrightarrow Q &\mapsto& P \Longrightarrow Q \nonumber\\
+P \land Q &\mapsto& P,\ Q \nonumber\\
+\forall x.~P~x &\mapsto& P~\Var{x}\nonumber\\
+\forall x \in A.\ P~x &\mapsto& \Var{x} \in A \Longrightarrow P~\Var{x} \nonumber\\
+@{text if}\ P\ @{text then}\ Q\ @{text else}\ R &\mapsto&
+ P \Longrightarrow Q,\ \neg P \Longrightarrow R \nonumber
+\end{eqnarray}
+Once this conversion process is finished, all remaining non-equations
+$P$ are turned into trivial equations $P = True$.
+For example, the formula @{prop"(p \<longrightarrow> q \<and> r) \<and> s"} is converted into the three rules
+\begin{center}
+@{prop"p \<Longrightarrow> q = True"},\quad  @{prop"p \<Longrightarrow> r = True"},\quad  @{prop"s = True"}.
+\end{center}
+\index{simplification rule|)}
 \index{simplification|)}
 *}
 (*<*)
--- a/doc-src/TutorialI/CTL/Base.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/CTL/Base.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -1,12 +1,12 @@
 (*<*)theory Base = Main:(*>*)
 
-section{*A verified model checker*}
+section{*Case study: Verified model checkers*}
 
 text{*
 Model checking is a very popular technique for the verification of finite
 state systems (implementations) w.r.t.\ temporal logic formulae
-(specifications) \cite{Clark}. Its foundations are completely set theoretic
-and this section shall develop them in HOL. This is done in two steps: first
+(specifications) \cite{Clarke,Huth-Ryan-book}. Its foundations are completely set theoretic
+and this section shall explore them a little in HOL. This is done in two steps: first
 we consider a simple modal logic called propositional dynamic
 logic (PDL) which we then extend to the temporal logic CTL used in many real
 model checkers. In each case we give both a traditional semantics (@{text \<Turnstile>}) and a
@@ -45,8 +45,9 @@
 Each state has a unique name or number ($s_0,s_1,s_2$), and in each
 state certain \emph{atomic propositions} ($p,q,r$) are true.
 The aim of temporal logic is to formalize statements such as ``there is no
-transition sequence starting from $s_2$ leading to a state where $p$ or $q$
-are true''.
+path starting from $s_2$ leading to a state where $p$ or $q$
+are true'', which is true, and ``on all paths starting from $s_0$ $q$ is always true'',
+which is false.
 
 Abstracting from this concrete example, we assume there is some type of
 states
--- a/doc-src/TutorialI/CTL/CTL.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/CTL/CTL.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -91,9 +91,9 @@
 apply(rule subsetI);
 apply(simp, clarify);
 apply(erule converse_rtrancl_induct);
- apply(rule ssubst[OF lfp_Tarski[OF mono_ef]]);
+ apply(rule ssubst[OF lfp_unfold[OF mono_ef]]);
  apply(blast);
-apply(rule ssubst[OF lfp_Tarski[OF mono_ef]]);
+apply(rule ssubst[OF lfp_unfold[OF mono_ef]]);
 by(blast);
 (*>*)
 text{*
@@ -174,7 +174,7 @@
 lemma not_in_lfp_afD:
  "s \<notin> lfp(af A) \<Longrightarrow> s \<notin> A \<and> (\<exists> t. (s,t)\<in>M \<and> t \<notin> lfp(af A))";
 apply(erule swap);
-apply(rule ssubst[OF lfp_Tarski[OF mono_af]]);
+apply(rule ssubst[OF lfp_unfold[OF mono_af]]);
 apply(simp add:af_def);
 done;
 
@@ -380,19 +380,21 @@
 @{text[display]"s \<Turnstile> EU f g = (\<exists>p\<in>Paths s. \<exists>j. p j \<Turnstile> g \<and> (\<exists>i < j. p i \<Turnstile> f))"}
 and model checking algorithm
 @{text[display]"mc(EU f g) = lfp(\<lambda>T. mc g \<union> mc f \<inter> (M^-1 ^^ T))"}
-Prove the equivalence between semantics and model checking, i.e.\
-@{prop"mc(EU f g) = {s. s \<Turnstile> EU f g}"}.
+Prove the equivalence between semantics and model checking, i.e.\ that
+@{prop[display]"mc(EU f g) = {s. s \<Turnstile> EU f g}"}
+%For readability you may want to annotate {term EU} with its customary syntax
+%{text[display]"| EU formula formula    E[_ U _]"}
+%which enables you to read and write {text"E[f U g]"} instead of {term"EU f g"}.
+\end{exercise}
+For more CTL exercises see, for example \cite{Huth-Ryan-book,Clarke-as-well?}.
+\bigskip
 
-For readability you may want to equip @{term EU} with the following customary syntax:
-@{text"E[f U g]"}.
-\end{exercise}
-
-Let us close this section with a few words about the executability of @{term mc}.
+Let us close this section with a few words about the executability of our model checkers.
 It is clear that if all sets are finite, they can be represented as lists and the usual
 set operations are easily implemented. Only @{term lfp} requires a little thought.
 Fortunately the HOL library proves that in the case of finite sets and a monotone @{term F},
 @{term"lfp F"} can be computed by iterated application of @{term F} to @{term"{}"} until
-a fixpoint is reached. It is possible to generate executable functional programs
+a fixpoint is reached. It is actually possible to generate executable functional programs
 from HOL definitions, but that is beyond the scope of the tutorial.
 *}
 
@@ -428,7 +430,7 @@
 apply(subgoal_tac "wf{(y,x). (x,y)\<in>M \<and> x \<in> Avoid s A \<and> y \<in> Avoid s A \<and> x \<notin> A}");
  apply(erule_tac a = t in wf_induct);
  apply(clarsimp);
- apply(rule ssubst [OF lfp_Tarski[OF mono_af]]);
+ apply(rule ssubst [OF lfp_unfold[OF mono_af]]);
  apply(unfold af_def);
  apply(blast intro:Avoid.intros);
 apply(erule contrapos2);
--- a/doc-src/TutorialI/CTL/PDL.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/CTL/PDL.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -158,7 +158,7 @@
 is solved by unrolling @{term lfp} once
 *}
 
- apply(rule ssubst[OF lfp_Tarski[OF mono_ef]])
+ apply(rule ssubst[OF lfp_unfold[OF mono_ef]])
 (*pr(latex xsymbols symbols);*)
 txt{*
 \begin{isabelle}
@@ -173,7 +173,7 @@
 The proof of the induction step is identical to the one for the base case:
 *}
 
-apply(rule ssubst[OF lfp_Tarski[OF mono_ef]])
+apply(rule ssubst[OF lfp_unfold[OF mono_ef]])
 apply(blast)
 done
 
@@ -213,7 +213,7 @@
 lemma "(s \<Turnstile> EF f) = (s \<Turnstile> f | s \<Turnstile> Neg(AX(Neg(EF f))))";
 apply(simp only:aux);
 apply(simp);
-apply(rule ssubst[OF lfp_Tarski[OF mono_ef]], fast);
+apply(rule ssubst[OF lfp_unfold[OF mono_ef]], fast);
 done
 
 end
--- a/doc-src/TutorialI/CTL/document/Base.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/CTL/document/Base.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -2,13 +2,13 @@
 \begin{isabellebody}%
 \def\isabellecontext{Base}%
 %
-\isamarkupsection{A verified model checker}
+\isamarkupsection{Case study: Verified model checkers}
 %
 \begin{isamarkuptext}%
 Model checking is a very popular technique for the verification of finite
 state systems (implementations) w.r.t.\ temporal logic formulae
-(specifications) \cite{Clark}. Its foundations are completely set theoretic
-and this section shall develop them in HOL. This is done in two steps: first
+(specifications) \cite{Clarke,Huth-Ryan-book}. Its foundations are completely set theoretic
+and this section shall explore them a little in HOL. This is done in two steps: first
 we consider a simple modal logic called propositional dynamic
 logic (PDL) which we then extend to the temporal logic CTL used in many real
 model checkers. In each case we give both a traditional semantics (\isa{{\isasymTurnstile}}) and a
@@ -47,8 +47,9 @@
 Each state has a unique name or number ($s_0,s_1,s_2$), and in each
 state certain \emph{atomic propositions} ($p,q,r$) are true.
 The aim of temporal logic is to formalize statements such as ``there is no
-transition sequence starting from $s_2$ leading to a state where $p$ or $q$
-are true''.
+path starting from $s_2$ leading to a state where $p$ or $q$
+are true'', which is true, and ``on all paths starting from $s_0$ $q$ is always true'',
+which is false.
 
 Abstracting from this concrete example, we assume there is some type of
 states%
--- a/doc-src/TutorialI/CTL/document/CTL.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/CTL/document/CTL.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -114,7 +114,7 @@
 \isacommand{lemma}\ not{\isacharunderscore}in{\isacharunderscore}lfp{\isacharunderscore}afD{\isacharcolon}\isanewline
 \ {\isachardoublequote}s\ {\isasymnotin}\ lfp{\isacharparenleft}af\ A{\isacharparenright}\ {\isasymLongrightarrow}\ s\ {\isasymnotin}\ A\ {\isasymand}\ {\isacharparenleft}{\isasymexists}\ t{\isachardot}\ {\isacharparenleft}s{\isacharcomma}t{\isacharparenright}{\isasymin}M\ {\isasymand}\ t\ {\isasymnotin}\ lfp{\isacharparenleft}af\ A{\isacharparenright}{\isacharparenright}{\isachardoublequote}\isanewline
 \isacommand{apply}{\isacharparenleft}erule\ swap{\isacharparenright}\isanewline
-\isacommand{apply}{\isacharparenleft}rule\ ssubst{\isacharbrackleft}OF\ lfp{\isacharunderscore}Tarski{\isacharbrackleft}OF\ mono{\isacharunderscore}af{\isacharbrackright}{\isacharbrackright}{\isacharparenright}\isanewline
+\isacommand{apply}{\isacharparenleft}rule\ ssubst{\isacharbrackleft}OF\ lfp{\isacharunderscore}unfold{\isacharbrackleft}OF\ mono{\isacharunderscore}af{\isacharbrackright}{\isacharbrackright}{\isacharparenright}\isanewline
 \isacommand{apply}{\isacharparenleft}simp\ add{\isacharcolon}af{\isacharunderscore}def{\isacharparenright}\isanewline
 \isacommand{done}%
 \begin{isamarkuptext}%
@@ -283,19 +283,23 @@
 \begin{isabelle}%
 \ \ \ \ \ mc{\isacharparenleft}EU\ f\ g{\isacharparenright}\ {\isacharequal}\ lfp{\isacharparenleft}{\isasymlambda}T{\isachardot}\ mc\ g\ {\isasymunion}\ mc\ f\ {\isasyminter}\ {\isacharparenleft}M{\isacharcircum}{\isacharminus}\isadigit{1}\ {\isacharcircum}{\isacharcircum}\ T{\isacharparenright}{\isacharparenright}%
 \end{isabelle}
-Prove the equivalence between semantics and model checking, i.e.\
-\isa{mc\ {\isacharparenleft}EU\ f\ g{\isacharparenright}\ {\isacharequal}\ {\isacharbraceleft}s{\isachardot}\ s\ {\isasymTurnstile}\ EU\ f\ g{\isacharbraceright}}.
+Prove the equivalence between semantics and model checking, i.e.\ that
+\begin{isabelle}%
+\ \ \ \ \ mc\ {\isacharparenleft}EU\ f\ g{\isacharparenright}\ {\isacharequal}\ {\isacharbraceleft}s{\isachardot}\ s\ {\isasymTurnstile}\ EU\ f\ g{\isacharbraceright}%
+\end{isabelle}
+%For readability you may want to annotate {term EU} with its customary syntax
+%{text[display]"| EU formula formula    E[_ U _]"}
+%which enables you to read and write {text"E[f U g]"} instead of {term"EU f g"}.
+\end{exercise}
+For more CTL exercises see, for example \cite{Huth-Ryan-book,Clarke-as-well?}.
+\bigskip
 
-For readability you may want to equip \isa{EU} with the following customary syntax:
-\isa{E{\isacharbrackleft}f\ U\ g{\isacharbrackright}}.
-\end{exercise}
-
-Let us close this section with a few words about the executability of \isa{mc}.
+Let us close this section with a few words about the executability of our model checkers.
 It is clear that if all sets are finite, they can be represented as lists and the usual
 set operations are easily implemented. Only \isa{lfp} requires a little thought.
 Fortunately the HOL library proves that in the case of finite sets and a monotone \isa{F},
 \isa{lfp\ F} can be computed by iterated application of \isa{F} to \isa{{\isacharbraceleft}{\isacharbraceright}} until
-a fixpoint is reached. It is possible to generate executable functional programs
+a fixpoint is reached. It is actually possible to generate executable functional programs
 from HOL definitions, but that is beyond the scope of the tutorial.%
 \end{isamarkuptext}%
 \end{isabellebody}%
--- a/doc-src/TutorialI/CTL/document/PDL.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/CTL/document/PDL.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -152,7 +152,7 @@
 \end{isabelle}
 is solved by unrolling \isa{lfp} once%
 \end{isamarkuptxt}%
-\ \isacommand{apply}{\isacharparenleft}rule\ ssubst{\isacharbrackleft}OF\ lfp{\isacharunderscore}Tarski{\isacharbrackleft}OF\ mono{\isacharunderscore}ef{\isacharbrackright}{\isacharbrackright}{\isacharparenright}%
+\ \isacommand{apply}{\isacharparenleft}rule\ ssubst{\isacharbrackleft}OF\ lfp{\isacharunderscore}unfold{\isacharbrackleft}OF\ mono{\isacharunderscore}ef{\isacharbrackright}{\isacharbrackright}{\isacharparenright}%
 \begin{isamarkuptxt}%
 \begin{isabelle}
 \ \isadigit{1}{\isachardot}\ {\isasymAnd}t{\isachardot}\ t\ {\isasymin}\ A\ {\isasymLongrightarrow}\ t\ {\isasymin}\ A\ {\isasymunion}\ M{\isacharcircum}{\isacharminus}\isadigit{1}\ {\isacharcircum}{\isacharcircum}\ lfp\ {\isacharparenleft}{\isasymlambda}T{\isachardot}\ A\ {\isasymunion}\ M{\isacharcircum}{\isacharminus}\isadigit{1}\ {\isacharcircum}{\isacharcircum}\ T{\isacharparenright}
@@ -164,7 +164,7 @@
 \noindent
 The proof of the induction step is identical to the one for the base case:%
 \end{isamarkuptxt}%
-\isacommand{apply}{\isacharparenleft}rule\ ssubst{\isacharbrackleft}OF\ lfp{\isacharunderscore}Tarski{\isacharbrackleft}OF\ mono{\isacharunderscore}ef{\isacharbrackright}{\isacharbrackright}{\isacharparenright}\isanewline
+\isacommand{apply}{\isacharparenleft}rule\ ssubst{\isacharbrackleft}OF\ lfp{\isacharunderscore}unfold{\isacharbrackleft}OF\ mono{\isacharunderscore}ef{\isacharbrackright}{\isacharbrackright}{\isacharparenright}\isanewline
 \isacommand{apply}{\isacharparenleft}blast{\isacharparenright}\isanewline
 \isacommand{done}%
 \begin{isamarkuptext}%
--- a/doc-src/TutorialI/Datatype/Nested.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Datatype/Nested.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -118,7 +118,7 @@
 could derive a new induction principle as well (see
 \S\ref{sec:derive-ind}), but turns out to be simpler to define
 functions by \isacommand{recdef} instead of \isacommand{primrec}.
-The details are explained in \S\ref{sec:advanced-recdef} below.
+The details are explained in \S\ref{sec:nested-recdef} below.
 
 Of course, you may also combine mutual and nested recursion. For example,
 constructor @{text Sum} in \S\ref{sec:datatype-mut-rec} could take a list of
--- a/doc-src/TutorialI/Datatype/document/Nested.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Datatype/document/Nested.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -116,7 +116,7 @@
 could derive a new induction principle as well (see
 \S\ref{sec:derive-ind}), but turns out to be simpler to define
 functions by \isacommand{recdef} instead of \isacommand{primrec}.
-The details are explained in \S\ref{sec:advanced-recdef} below.
+The details are explained in \S\ref{sec:nested-recdef} below.
 
 Of course, you may also combine mutual and nested recursion. For example,
 constructor \isa{Sum} in \S\ref{sec:datatype-mut-rec} could take a list of
--- a/doc-src/TutorialI/IsaMakefile	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/IsaMakefile	Wed Oct 11 09:09:06 2000 +0200
@@ -87,9 +87,9 @@
 
 HOL-Recdef: HOL $(LOG)/HOL-Recdef.gz
 
-$(LOG)/HOL-Recdef.gz: $(OUT)/HOL Recdef/examples.thy Recdef/termination.thy \
+$(LOG)/HOL-Recdef.gz: $(OUT)/HOL Recdef/ROOT.ML Recdef/examples.thy Recdef/termination.thy \
   Recdef/simplification.thy Recdef/Induction.thy \
-  Recdef/Nested0.thy Recdef/Nested1.thy Recdef/Nested2.thy
+  Recdef/Nested0.thy Recdef/Nested1.thy Recdef/Nested2.thy Recdef/WFrec.thy
 	@$(ISATOOL) usedir -i true -d dvi -D document $(OUT)/HOL Recdef
 	@rm -f tutorial.dvi
 
--- a/doc-src/TutorialI/Misc/AdvancedInd.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Misc/AdvancedInd.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -275,13 +275,18 @@
 by(insert induct_lem, blast);
 
 text{*
+
 Finally we should mention that HOL already provides the mother of all
-inductions, \emph{wellfounded induction} (@{thm[source]wf_induct}):
-@{thm[display]"wf_induct"[no_vars]}
-where @{term"wf r"} means that the relation @{term"r"} is wellfounded.
+inductions, \textbf{wellfounded
+induction}\indexbold{induction!wellfounded}\index{wellfounded
+induction|see{induction, wellfounded}} (@{thm[source]wf_induct}):
+@{thm[display]wf_induct[no_vars]}
+where @{term"wf r"} means that the relation @{term r} is wellfounded
+(see \S\ref{sec:wellfounded}).
 For example, theorem @{thm[source]nat_less_induct} can be viewed (and
 derived) as a special case of @{thm[source]wf_induct} where 
-@{term"r"} is @{text"<"} on @{typ"nat"}. For details see the library.
+@{term r} is @{text"<"} on @{typ nat}. The details can be found in the HOL library.
+For a mathematical account of wellfounded induction see, for example, \cite{Baader-Nipkow}.
 *};
 
 (*<*)
--- a/doc-src/TutorialI/Misc/document/AdvancedInd.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Misc/document/AdvancedInd.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -251,14 +251,18 @@
 \isacommand{by}{\isacharparenleft}insert\ induct{\isacharunderscore}lem{\isacharcomma}\ blast{\isacharparenright}%
 \begin{isamarkuptext}%
 Finally we should mention that HOL already provides the mother of all
-inductions, \emph{wellfounded induction} (\isa{wf{\isacharunderscore}induct}):
+inductions, \textbf{wellfounded
+induction}\indexbold{induction!wellfounded}\index{wellfounded
+induction|see{induction, wellfounded}} (\isa{wf{\isacharunderscore}induct}):
 \begin{isabelle}%
 \ \ \ \ \ {\isasymlbrakk}wf\ r{\isacharsemicolon}\ {\isasymAnd}x{\isachardot}\ {\isasymforall}y{\isachardot}\ {\isacharparenleft}y{\isacharcomma}\ x{\isacharparenright}\ {\isasymin}\ r\ {\isasymlongrightarrow}\ P\ y\ {\isasymLongrightarrow}\ P\ x{\isasymrbrakk}\ {\isasymLongrightarrow}\ P\ a%
 \end{isabelle}
-where \isa{wf\ r} means that the relation \isa{r} is wellfounded.
+where \isa{wf\ r} means that the relation \isa{r} is wellfounded
+(see \S\ref{sec:wellfounded}).
 For example, theorem \isa{nat{\isacharunderscore}less{\isacharunderscore}induct} can be viewed (and
 derived) as a special case of \isa{wf{\isacharunderscore}induct} where 
-\isa{r} is \isa{{\isacharless}} on \isa{nat}. For details see the library.%
+\isa{r} is \isa{{\isacharless}} on \isa{nat}. The details can be found in the HOL library.
+For a mathematical account of wellfounded induction see, for example, \cite{Baader-Nipkow}.%
 \end{isamarkuptext}%
 \end{isabellebody}%
 %%% Local Variables:
--- a/doc-src/TutorialI/Recdef/Nested0.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/Nested0.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -17,7 +17,6 @@
 definitions and proofs about nested recursive datatypes. As an example we
 choose exercise~\ref{ex:trev-trev}:
 *}
-(* consts trev  :: "('a,'b)term => ('a,'b)term" *)
-(*<*)
-end
-(*>*)
+
+consts trev  :: "('a,'b)term \<Rightarrow> ('a,'b)term"
+(*<*)end(*>*)
--- a/doc-src/TutorialI/Recdef/Nested1.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/Nested1.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -1,7 +1,6 @@
 (*<*)
 theory Nested1 = Nested0:;
 (*>*)
-consts trev  :: "('a,'b)term \<Rightarrow> ('a,'b)term";
 
 text{*\noindent
 Although the definition of @{term trev} is quite natural, we will have
--- a/doc-src/TutorialI/Recdef/Nested2.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/Nested2.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -1,6 +1,5 @@
 (*<*)
 theory Nested2 = Nested0:;
-consts trev  :: "('a,'b)term => ('a,'b)term";
 (*>*)
 
 text{*\noindent
--- a/doc-src/TutorialI/Recdef/ROOT.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/ROOT.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -3,3 +3,4 @@
 use_thy "Induction";
 use_thy "Nested1";
 use_thy "Nested2";
+use_thy "WFrec";
--- a/doc-src/TutorialI/Recdef/document/Nested0.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/document/Nested0.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -16,7 +16,7 @@
 definitions and proofs about nested recursive datatypes. As an example we
 choose exercise~\ref{ex:trev-trev}:%
 \end{isamarkuptext}%
-\end{isabellebody}%
+\isacommand{consts}\ trev\ \ {\isacharcolon}{\isacharcolon}\ {\isachardoublequote}{\isacharparenleft}{\isacharprime}a{\isacharcomma}{\isacharprime}b{\isacharparenright}term\ {\isasymRightarrow}\ {\isacharparenleft}{\isacharprime}a{\isacharcomma}{\isacharprime}b{\isacharparenright}term{\isachardoublequote}\end{isabellebody}%
 %%% Local Variables:
 %%% mode: latex
 %%% TeX-master: "root"
--- a/doc-src/TutorialI/Recdef/document/Nested1.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/document/Nested1.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -1,7 +1,7 @@
 %
 \begin{isabellebody}%
 \def\isabellecontext{Nested1}%
-\isacommand{consts}\ trev\ \ {\isacharcolon}{\isacharcolon}\ {\isachardoublequote}{\isacharparenleft}{\isacharprime}a{\isacharcomma}{\isacharprime}b{\isacharparenright}term\ {\isasymRightarrow}\ {\isacharparenleft}{\isacharprime}a{\isacharcomma}{\isacharprime}b{\isacharparenright}term{\isachardoublequote}%
+%
 \begin{isamarkuptext}%
 \noindent
 Although the definition of \isa{trev} is quite natural, we will have
--- a/doc-src/TutorialI/Recdef/document/termination.tex	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/document/termination.tex	Wed Oct 11 09:09:06 2000 +0200
@@ -75,21 +75,7 @@
 it later on and then use it to remove the preconditions from the theorems
 about \isa{f}. In most cases this is more cumbersome than proving things
 up front.
-%FIXME, with one exception: nested recursion.
-
-Although all the above examples employ measure functions, \isacommand{recdef}
-allows arbitrary wellfounded relations. For example, termination of
-Ackermann's function requires the lexicographic product \isa{{\isacharless}{\isacharasterisk}lex{\isacharasterisk}{\isachargreater}}:%
-\end{isamarkuptext}%
-\isacommand{consts}\ ack\ {\isacharcolon}{\isacharcolon}\ {\isachardoublequote}nat{\isasymtimes}nat\ {\isasymRightarrow}\ nat{\isachardoublequote}\isanewline
-\isacommand{recdef}\ ack\ {\isachardoublequote}measure{\isacharparenleft}{\isasymlambda}m{\isachardot}\ m{\isacharparenright}\ {\isacharless}{\isacharasterisk}lex{\isacharasterisk}{\isachargreater}\ measure{\isacharparenleft}{\isasymlambda}n{\isachardot}\ n{\isacharparenright}{\isachardoublequote}\isanewline
-\ \ {\isachardoublequote}ack{\isacharparenleft}\isadigit{0}{\isacharcomma}n{\isacharparenright}\ \ \ \ \ \ \ \ \ {\isacharequal}\ Suc\ n{\isachardoublequote}\isanewline
-\ \ {\isachardoublequote}ack{\isacharparenleft}Suc\ m{\isacharcomma}\isadigit{0}{\isacharparenright}\ \ \ \ \ {\isacharequal}\ ack{\isacharparenleft}m{\isacharcomma}\ \isadigit{1}{\isacharparenright}{\isachardoublequote}\isanewline
-\ \ {\isachardoublequote}ack{\isacharparenleft}Suc\ m{\isacharcomma}Suc\ n{\isacharparenright}\ {\isacharequal}\ ack{\isacharparenleft}m{\isacharcomma}ack{\isacharparenleft}Suc\ m{\isacharcomma}n{\isacharparenright}{\isacharparenright}{\isachardoublequote}%
-\begin{isamarkuptext}%
-\noindent
-For details see the manual~\cite{isabelle-HOL} and the examples in the
-library.%
+%FIXME, with one exception: nested recursion.%
 \end{isamarkuptext}%
 \end{isabellebody}%
 %%% Local Variables:
--- a/doc-src/TutorialI/Recdef/termination.thy	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/Recdef/termination.thy	Wed Oct 11 09:09:06 2000 +0200
@@ -81,21 +81,6 @@
 about @{term f}. In most cases this is more cumbersome than proving things
 up front.
 %FIXME, with one exception: nested recursion.
-
-Although all the above examples employ measure functions, \isacommand{recdef}
-allows arbitrary wellfounded relations. For example, termination of
-Ackermann's function requires the lexicographic product @{text"<*lex*>"}:
-*}
-
-consts ack :: "nat\<times>nat \<Rightarrow> nat";
-recdef ack "measure(\<lambda>m. m) <*lex*> measure(\<lambda>n. n)"
-  "ack(0,n)         = Suc n"
-  "ack(Suc m,0)     = ack(m, 1)"
-  "ack(Suc m,Suc n) = ack(m,ack(Suc m,n))";
-
-text{*\noindent
-For details see the manual~\cite{isabelle-HOL} and the examples in the
-library.
 *}
 
 (*<*)
--- a/doc-src/TutorialI/todo.tobias	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/TutorialI/todo.tobias	Wed Oct 11 09:09:06 2000 +0200
@@ -27,15 +27,16 @@
 
 defs with = and pattern matching
 
-Why can't I declare trev at the end of Recdef/Nested0 and define it in
-Recdef/Nested1??
-
 use arith_tac in recdef to solve termination conditions?
 -> new example in Recdef/termination
 
 a tactic for replacing a specific occurrence:
 apply(substitute [2] thm)
 
+it would be nice if @term could deal with ?-vars.
+then a number of (unchecked!) @texts could be converted to @terms.
+
+
 Minor fixes in the tutorial
 ===========================
 
@@ -45,8 +46,6 @@
 
 Explain typographic conventions?
 
-how the simplifier works
-
 an example of induction: !y. A --> B --> C ??
 
 Advanced Ind expects rulify, mp and spec. How much really?
@@ -80,15 +79,18 @@
 literature. In Recdef/termination.thy, at the end.
 %FIXME, with one exception: nested recursion.
 
-Sets: rels and ^*
+Syntax section: syntax annotations nor just for consts but also for constdefs and datatype.
+
+Prove EU exercise in CTL.
 
 
 Minor additions to the tutorial, unclear where
 ==============================================
 
-insert: lcp?
+Tacticals: , ? +
 
-Tacticals: , ? +
+"typedecl" is used in CTL/Base, but where is it introduced?
+In More Types chapter? Rephrase the CTL bit accordingly.
 
 print_simpset (-> print_simps?)
 (Another subsection Useful theorems, methods, and commands?)
@@ -107,7 +109,6 @@
 
 demonstrate x : set xs in Sets. Or Tricks chapter?
 
-Table with symbols \<forall> etc. Apendix?
 Appendix with HOL keywords. Say something about other keywords.
 
 
@@ -123,8 +124,6 @@
 Nested inductive datatypes: another example/exercise:
  size(t) <= size(subst s t)?
 
-Add Until to CTL.
-
 insertion sort: primrec, later recdef
 
 OTree:
--- a/doc-src/manual.bib	Wed Oct 11 00:03:22 2000 +0200
+++ b/doc-src/manual.bib	Wed Oct 11 09:09:06 2000 +0200
@@ -109,6 +109,9 @@
 
 %B
 
+@book{Baader-Nipkow,author={Franz Baader and Tobias Nipkow},
+title="Term Rewriting and All That",publisher=CUP,year=1998}
+
 @incollection{basin91,
   author	= {David Basin and Matt Kaufmann},
   title		= {The {Boyer-Moore} Prover and {Nuprl}: An Experimental
@@ -231,7 +234,7 @@
 
 @incollection{dybjer91,
   author	= {Peter Dybjer},
-  title		= {Inductive Sets and Families in {Martin-L\"of's} Type
+  title		= {Inductive Sets and Families in {Martin-L{\"o}f's} Type
 		  Theory and Their Set-Theoretic Semantics}, 
   crossref	= {huet-plotkin91},
   pages		= {280-306}}
@@ -400,7 +403,7 @@
   pages		= {31-55}}
 
 @inproceedings{huet88,
-  author	= {G\'erard Huet},
+  author	= {G{\'e}rard Huet},
   title		= {Induction Principles Formalized in the {Calculus of
 		 Constructions}}, 
   booktitle	= {Programming of Future Generation Computers},
@@ -409,6 +412,12 @@
   pages		= {205-216}, 
   publisher	= {Elsevier}}
 
+@Book{Huth-Ryan-book,
+  author	= {Michael Huth and Mark Ryan},
+  title		= {Logic in Computer Science. Modelling and reasoning about systems},
+  publisher	= CUP,
+  year		= 2000}
+
 @InProceedings{Harrison:1996:MizarHOL,
   author = 	 {J. Harrison},
   title = 	 {A {Mizar} Mode for {HOL}},
@@ -462,7 +471,7 @@
   pages		= {366-380}}
 
 @book{martinlof84,
-  author	= {Per Martin-L\"of},
+  author	= {Per Martin-L{\"o}f},
   title		= {Intuitionistic type theory},
   year		= 1984,
   publisher	= {Bibliopolis}}
@@ -520,7 +529,7 @@
 
 @article{MuellerNvOS99,
 author=
-{Olaf M\"uller and Tobias Nipkow and Oheimb, David von and Oscar Slotosch},
+{Olaf M{\"u}ller and Tobias Nipkow and Oheimb, David von and Oscar Slotosch},
 title={{HOLCF = HOL + LCF}},journal=JFP,year=1999}
 
 @Manual{Muzalewski:Mizar,
@@ -552,7 +561,7 @@
   author	= {Wolfgang Naraschewski and Tobias Nipkow},
   title		= {Type Inference Verified: Algorithm {W} in {Isabelle/HOL}},
   booktitle	= {Types for Proofs and Programs: Intl. Workshop TYPES '96},
-  editor	= {E. Gim\'enez and C. Paulin-Mohring},
+  editor	= {E. Gim{\'e}nez and C. Paulin-Mohring},
   publisher	= Springer,
   series	= LNCS,
   volume	= 1512,
@@ -607,8 +616,8 @@
 @manual{isabelle-HOL,
   author	= {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel},
   title		= {{Isabelle}'s Logics: {HOL}},
-  institution	= {Institut f\"ur Informatik, Technische Universi\"at
-                  M\"unchen and Computer Laboratory, University of Cambridge},
+  institution	= {Institut f{\"u}r Informatik, Technische Universi{\"a}t
+                  M{\"u}nchen and Computer Laboratory, University of Cambridge},
   note          = {\url{http://isabelle.in.tum.de/doc/logics-HOL.pdf}}}
 
 @article{nipkow-prehofer,
@@ -630,8 +639,8 @@
   year		= 1993}
 
 @book{nordstrom90,
-  author	= {Bengt {Nordstr\"om} and Kent Petersson and Jan Smith},
-  title		= {Programming in {Martin-L\"of}'s Type Theory.  An
+  author	= {Bengt {Nordstr{\"o}m} and Kent Petersson and Jan Smith},
+  title		= {Programming in {Martin-L{\"o}f}'s Type Theory.  An
 		 Introduction}, 
   publisher	= {Oxford University Press}, 
   year		= 1990}
@@ -974,7 +983,7 @@
 %V
 
 @Unpublished{voelker94,
-  author	= {Norbert V\"olker},
+  author	= {Norbert V{\"o}lker},
   title		= {The Verification of a Timer Program using {Isabelle/HOL}},
   url		= {\url{ftp://ftp.fernuni-hagen.de/pub/fachb/et/dvt/projects/verification/timer.tar.gz}},
   year		= 1994,
@@ -1123,7 +1132,7 @@
   publisher	= {Springer}}
 
 @book{types94,
-  editor	= {Peter Dybjer and Bengt Nordstr{\"om} and Jan Smith},
+  editor	= {Peter Dybjer and Bengt Nordstr{{\"o}m} and Jan Smith},
   title		= TYPES # {: International Workshop {TYPES '94}},
   booktitle	= TYPES # {: International Workshop {TYPES '94}},
   year		= 1995,
@@ -1131,14 +1140,14 @@
   series	= {LNCS 996}}
 
 @book{huet-plotkin91,
-  editor	= {{G\'erard} Huet and Gordon Plotkin},
+  editor	= {{G{\'e}rard} Huet and Gordon Plotkin},
   title		= {Logical Frameworks},
   booktitle	= {Logical Frameworks},
   publisher	= CUP,
   year		= 1991}
 
 @book{huet-plotkin93,
-  editor	= {{G\'erard} Huet and Gordon Plotkin},
+  editor	= {{G{\'e}rard} Huet and Gordon Plotkin},
   title		= {Logical Environments},
   booktitle	= {Logical Environments},
   publisher	= CUP,
@@ -1155,7 +1164,7 @@
   series	= {LNCS 780}}
 
 @proceedings{colog88,
-  editor	= {P. Martin-L\"of and G. Mints},
+  editor	= {P. Martin-L{\"o}f and G. Mints},
   title		= {COLOG-88: International Conference on Computer Logic},
   booktitle	= {COLOG-88: International Conference on Computer Logic},
   year		= {Published 1990},
--- a/src/HOL/Gfp.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/Gfp.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -32,7 +32,7 @@
 
 Goal "mono(f) ==> gfp(f) = f(gfp(f))";
 by (REPEAT (ares_tac [equalityI,gfp_lemma2,gfp_lemma3] 1));
-qed "gfp_Tarski";
+qed "gfp_unfold";
 
 (*** Coinduction rules for greatest fixed points ***)
 
@@ -72,26 +72,26 @@
 by (rtac (Un_least RS Un_least) 1);
 by (rtac subset_refl 1);
 by (assume_tac 1); 
-by (rtac (gfp_Tarski RS equalityD1 RS subset_trans) 1);
+by (rtac (gfp_unfold RS equalityD1 RS subset_trans) 1);
 by (assume_tac 1);
 by (rtac monoD 1 THEN assume_tac 1);
-by (stac (coinduct3_mono_lemma RS lfp_Tarski) 1);
+by (stac (coinduct3_mono_lemma RS lfp_unfold) 1);
 by Auto_tac;  
 qed "coinduct3_lemma";
 
 Goal
   "[| mono(f);  a:X;  X <= f(lfp(%x. f(x) Un X Un gfp(f))) |] ==> a : gfp(f)";
 by (rtac (coinduct3_lemma RSN (2,weak_coinduct)) 1);
-by (resolve_tac [coinduct3_mono_lemma RS lfp_Tarski RS ssubst] 1);
+by (resolve_tac [coinduct3_mono_lemma RS lfp_unfold RS ssubst] 1);
 by Auto_tac;
 qed "coinduct3";
 
 
-(** Definition forms of gfp_Tarski and coinduct, to control unfolding **)
+(** Definition forms of gfp_unfold and coinduct, to control unfolding **)
 
 Goal "[| A==gfp(f);  mono(f) |] ==> A = f(A)";
-by (auto_tac (claset() addSIs [gfp_Tarski], simpset()));  
-qed "def_gfp_Tarski";
+by (auto_tac (claset() addSIs [gfp_unfold], simpset()));  
+qed "def_gfp_unfold";
 
 Goal "[| A==gfp(f);  mono(f);  a:X;  X <= f(X Un A) |] ==> a: A";
 by (auto_tac (claset() addSIs [coinduct], simpset()));  
--- a/src/HOL/IMP/Denotation.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/IMP/Denotation.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -13,7 +13,7 @@
 
 Goal "C(WHILE b DO c) = C(IF b THEN c;WHILE b DO c ELSE SKIP)";
 by (Simp_tac 1);
-by (EVERY[stac (Gamma_mono RS lfp_Tarski) 1,
+by (EVERY[stac (Gamma_mono RS lfp_unfold) 1,
           stac (Gamma_def RS meta_eq_to_obj_eq RS fun_cong) 1,
           Simp_tac 1,
           IF_UNSOLVED no_tac]);
@@ -29,9 +29,9 @@
 by Auto_tac;
 (* while *)
 by (rewtac Gamma_def);
-by (stac (rewrite_rule [Gamma_def] (Gamma_mono RS lfp_Tarski)) 1);
+by (stac (rewrite_rule [Gamma_def] (Gamma_mono RS lfp_unfold)) 1);
 by (Fast_tac 1);
-by (stac (rewrite_rule [Gamma_def] (Gamma_mono RS lfp_Tarski)) 1);
+by (stac (rewrite_rule [Gamma_def] (Gamma_mono RS lfp_unfold)) 1);
 by (Fast_tac 1);
 
 qed "com1";
--- a/src/HOL/IMP/Hoare.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/IMP/Hoare.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -87,7 +87,7 @@
  by (assume_tac 2);
 by (etac induct2 1);
 by (fast_tac (claset() addSIs [monoI]) 1);
-by (stac gfp_Tarski 1);
+by (stac gfp_unfold 1);
  by (fast_tac (claset() addSIs [monoI]) 1);
 by (Fast_tac 1);
 qed "wp_While";
--- a/src/HOL/Induct/LList.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/Induct/LList.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -309,7 +309,7 @@
 qed "Lconst_fun_mono";
 
 (* Lconst(M) = CONS M (Lconst M) *)
-bind_thm ("Lconst", (Lconst_fun_mono RS (Lconst_def RS def_lfp_Tarski)));
+bind_thm ("Lconst", (Lconst_fun_mono RS (Lconst_def RS def_lfp_unfold)));
 
 (*A typical use of co-induction to show membership in the gfp.
   The containing set is simply the singleton {Lconst(M)}. *)
@@ -330,7 +330,7 @@
 Goal "gfp(%N. CONS M N) = LList_corec M (%x. Some(x,x))";
 by (rtac (equals_LList_corec RS fun_cong) 1);
 by (Simp_tac 1);
-by (rtac (Lconst_fun_mono RS gfp_Tarski) 1);
+by (rtac (Lconst_fun_mono RS gfp_unfold) 1);
 qed "gfp_Lconst_eq_LList_corec";
 
 
--- a/src/HOL/Lfp.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/Lfp.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -33,7 +33,7 @@
 
 Goal "mono(f) ==> lfp(f) = f(lfp(f))";
 by (REPEAT (ares_tac [equalityI,lfp_lemma2,lfp_lemma3] 1));
-qed "lfp_Tarski";
+qed "lfp_unfold";
 
 (*** General induction rule for least fixed points ***)
 
@@ -56,8 +56,8 @@
 (** Definition forms of lfp_Tarski and induct, to control unfolding **)
 
 Goal "[| h==lfp(f);  mono(f) |] ==> h = f(h)";
-by (auto_tac (claset() addSIs [lfp_Tarski], simpset()));  
-qed "def_lfp_Tarski";
+by (auto_tac (claset() addSIs [lfp_unfold], simpset()));  
+qed "def_lfp_unfold";
 
 val rew::prems = Goal
     "[| A == lfp(f);  mono(f);   a:A;                   \
--- a/src/HOL/NatDef.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/NatDef.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -8,7 +8,7 @@
 by (REPEAT (ares_tac [monoI, subset_refl, image_mono, Un_mono] 1));
 qed "Nat_fun_mono";
 
-bind_thm ("Nat_unfold", Nat_fun_mono RS (Nat_def RS def_lfp_Tarski));
+bind_thm ("Nat_unfold", Nat_fun_mono RS (Nat_def RS def_lfp_unfold));
 
 (* Zero is a natural number -- this also justifies the type definition*)
 Goal "Zero_Rep: Nat";
--- a/src/HOL/Real/PNat.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/Real/PNat.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -10,7 +10,7 @@
 by (REPEAT (ares_tac [monoI, subset_refl, image_mono, Un_mono] 1));
 qed "pnat_fun_mono";
 
-bind_thm ("pnat_unfold", pnat_fun_mono RS (pnat_def RS def_lfp_Tarski));
+bind_thm ("pnat_unfold", pnat_fun_mono RS (pnat_def RS def_lfp_unfold));
 
 Goal "1 : pnat";
 by (stac pnat_unfold 1);
--- a/src/HOL/Tools/inductive_package.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/Tools/inductive_package.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -416,7 +416,7 @@
     val _ = message "  Proving the introduction rules ...";
 
     val unfold = standard (mono RS (fp_def RS
-      (if coind then def_gfp_Tarski else def_lfp_Tarski)));
+      (if coind then def_gfp_unfold else def_lfp_unfold)));
 
     fun select_disj 1 1 = []
       | select_disj _ 1 = [rtac disjI1]
--- a/src/HOL/Trancl.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/Trancl.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -15,7 +15,7 @@
 by (REPEAT (ares_tac [monoI, subset_refl, comp_mono, Un_mono] 1));
 qed "rtrancl_fun_mono";
 
-bind_thm ("rtrancl_unfold", rtrancl_fun_mono RS (rtrancl_def RS def_lfp_Tarski));
+bind_thm ("rtrancl_unfold", rtrancl_fun_mono RS (rtrancl_def RS def_lfp_unfold));
 
 (*Reflexivity of rtrancl*)
 Goal "(a,a) : r^*";
--- a/src/HOL/While.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/While.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -54,7 +54,7 @@
 \ ==> lfp f = fst(while (%(A,fA). A~=fA) (%(A,fA). (fA, f fA)) ({},f{}))";
 by(res_inst_tac [("P","%(A,B).(A <= U & B = f A & A <= B & B <= lfp f)")]
      while_rule 1);
-   by(stac lfp_Tarski 1);
+   by(stac lfp_unfold 1);
     ba 1;
    by(Clarsimp_tac 1);
    by(blast_tac (claset() addDs [monoD]) 1);
@@ -67,7 +67,7 @@
  by(clarsimp_tac (claset(),simpset() addsimps
       [inv_image_def,finite_psubset_def,order_less_le]) 1);
  by(blast_tac (claset() addSIs [finite_Diff] addDs [monoD]) 1);
-by(stac lfp_Tarski 1);
+by(stac lfp_unfold 1);
  ba 1;
 by(asm_simp_tac (simpset() addsimps [monoD]) 1);
 qed "lfp_conv_while";
--- a/src/HOL/ex/set.ML	Wed Oct 11 00:03:22 2000 +0200
+++ b/src/HOL/ex/set.ML	Wed Oct 11 09:09:06 2000 +0200
@@ -102,7 +102,7 @@
 
 Goal "EX X. X = - (g``(- (f``X)))";
 by (rtac exI 1);
-by (rtac lfp_Tarski 1);
+by (rtac lfp_unfold 1);
 by (REPEAT (ares_tac [monoI, image_mono, Compl_anti_mono] 1));
 qed "decomposition";