# HG changeset patch # User wenzelm # Date 1352055686 -3600 # Node ID 869e485bbdba5b340ec7c53f9bdd19893543a7d8 # Parent 7c01dc2dcb8c92868070778877de6ba1698a8964 removed pointless historic material; diff -r 7c01dc2dcb8c -r 869e485bbdba src/Doc/Ref/document/root.tex --- a/src/Doc/Ref/document/root.tex Sun Nov 04 19:51:53 2012 +0100 +++ b/src/Doc/Ref/document/root.tex Sun Nov 04 20:01:26 2012 +0100 @@ -1,10 +1,6 @@ \documentclass[12pt,a4paper]{report} \usepackage{graphicx,iman,extra,ttbox,proof,pdfsetup} -%%% to index ids: \[\\tt \([a-zA-Z0-9][a-zA-Z0-9_'.]*\) [\\ttindexbold{\1} -%%% to delete old ones: \\indexbold{\*[^}]*} -%% run sedindex ref to prepare index file -%%% needs chapter on Provers/typedsimp.ML? \title{\includegraphics[scale=0.5]{isabelle} \\[4ex] Old Isabelle Reference Manual} \author{{\em Lawrence C. Paulson}\\ @@ -12,8 +8,6 @@ \texttt{lcp@cl.cam.ac.uk}\\[3ex] With Contributions by Tobias Nipkow and Markus Wenzel} -\makeindex - \setcounter{secnumdepth}{2} \setcounter{tocdepth}{2} \pagestyle{headings} @@ -61,5 +55,4 @@ \bibliography{manual} \endgroup -\printindex \end{document} diff -r 7c01dc2dcb8c -r 869e485bbdba src/Doc/Ref/document/simplifier.tex --- a/src/Doc/Ref/document/simplifier.tex Sun Nov 04 19:51:53 2012 +0100 +++ b/src/Doc/Ref/document/simplifier.tex Sun Nov 04 20:01:26 2012 +0100 @@ -13,31 +13,6 @@ rules or simplification procedures. Experienced users can exploit the other components to streamline proofs in more sophisticated manners. -\subsection{Inspecting simpsets} -\begin{ttbox} -print_ss : simpset -> unit -rep_ss : simpset -> \{mss : meta_simpset, - subgoal_tac: simpset -> int -> tactic, - loop_tacs : (string * (int -> tactic))list, - finish_tac : solver list, - unsafe_finish_tac : solver list\} -\end{ttbox} -\begin{ttdescription} - -\item[\ttindexbold{print_ss} $ss$;] displays the printable contents of - simpset $ss$. This includes the rewrite rules and congruences in - their internal form expressed as meta-equalities. The names of the - simplification procedures and the patterns they are invoked on are - also shown. The other parts, functions and tactics, are - non-printable. - -\item[\ttindexbold{rep_ss} $ss$;] decomposes $ss$ as a record of its internal - components, namely the meta_simpset, the subgoaler, the loop, and the safe - and unsafe solvers. - -\end{ttdescription} - - \subsection{Building simpsets} \begin{ttbox} empty_ss : simpset @@ -382,31 +357,6 @@ \end{ttdescription} -\medskip - -Local modifications of simpsets within a proof are often much cleaner -by using above tactics in conjunction with explicit simpsets, rather -than their capitalized counterparts. For example -\begin{ttbox} -Addsimps \(thms\); -by (Simp_tac \(i\)); -Delsimps \(thms\); -\end{ttbox} -can be expressed more appropriately as -\begin{ttbox} -by (simp_tac (simpset() addsimps \(thms\)) \(i\)); -\end{ttbox} - -\medskip - -Also note that functions depending implicitly on the current theory -context (like capital \texttt{Simp_tac} and the other commands of -{\S}\ref{sec:simp-for-dummies}) should be considered harmful outside of -actual proof scripts. In particular, ML programs like theory -definition packages or special tactics should refer to simpsets only -explicitly, via the above tactics used in conjunction with -\texttt{simpset_of} or the \texttt{SIMPSET} tacticals. - \section{Forward rules and conversions} \index{simplification!forward rules}\index{simplification!conversions} @@ -432,12 +382,6 @@ of the form $t \equiv u$ where the l.h.s.\ $t$ has been given as argument. -\begin{warn} - Forward simplification rules and conversions should be used rarely - in ordinary proof scripts. The main intention is to provide an - internal interface to the simplifier for special utilities. -\end{warn} - \section{Permutative rewrite rules} \index{rewrite rules!permutative|(} @@ -601,223 +545,6 @@ \index{rewrite rules!permutative|)} - -\section{*Setting up the Simplifier}\label{sec:setting-up-simp} -\index{simplification!setting up} - -Setting up the simplifier for new logics is complicated in the general case. -This section describes how the simplifier is installed for intuitionistic -first-order logic; the code is largely taken from {\tt FOL/simpdata.ML} of the -Isabelle sources. - -The case splitting tactic, which resides on a separate files, is not part of -Pure Isabelle. It needs to be loaded explicitly by the object-logic as -follows (below \texttt{\~\relax\~\relax} refers to \texttt{\$ISABELLE_HOME}): -\begin{ttbox} -use "\~\relax\~\relax/src/Provers/splitter.ML"; -\end{ttbox} - -Simplification requires converting object-equalities to meta-level rewrite -rules. This demands rules stating that equal terms and equivalent formulae -are also equal at the meta-level. The rule declaration part of the file -\texttt{FOL/IFOL.thy} contains the two lines -\begin{ttbox}\index{*eq_reflection theorem}\index{*iff_reflection theorem} -eq_reflection "(x=y) ==> (x==y)" -iff_reflection "(P<->Q) ==> (P==Q)" -\end{ttbox} -Of course, you should only assert such rules if they are true for your -particular logic. In Constructive Type Theory, equality is a ternary -relation of the form $a=b\in A$; the type~$A$ determines the meaning -of the equality essentially as a partial equivalence relation. The -present simplifier cannot be used. Rewriting in \texttt{CTT} uses -another simplifier, which resides in the file {\tt - Provers/typedsimp.ML} and is not documented. Even this does not -work for later variants of Constructive Type Theory that use -intensional equality~\cite{nordstrom90}. - - -\subsection{A collection of standard rewrite rules} - -We first prove lots of standard rewrite rules about the logical -connectives. These include cancellation and associative laws. We -define a function that echoes the desired law and then supplies it the -prover for intuitionistic FOL: -\begin{ttbox} -fun int_prove_fun s = - (writeln s; - prove_goal IFOL.thy s - (fn prems => [ (cut_facts_tac prems 1), - (IntPr.fast_tac 1) ])); -\end{ttbox} -The following rewrite rules about conjunction are a selection of those -proved on \texttt{FOL/simpdata.ML}. Later, these will be supplied to the -standard simpset. -\begin{ttbox} -val conj_simps = map int_prove_fun - ["P & True <-> P", "True & P <-> P", - "P & False <-> False", "False & P <-> False", - "P & P <-> P", - "P & ~P <-> False", "~P & P <-> False", - "(P & Q) & R <-> P & (Q & R)"]; -\end{ttbox} -The file also proves some distributive laws. As they can cause exponential -blowup, they will not be included in the standard simpset. Instead they -are merely bound to an \ML{} identifier, for user reference. -\begin{ttbox} -val distrib_simps = map int_prove_fun - ["P & (Q | R) <-> P&Q | P&R", - "(Q | R) & P <-> Q&P | R&P", - "(P | Q --> R) <-> (P --> R) & (Q --> R)"]; -\end{ttbox} - - -\subsection{Functions for preprocessing the rewrite rules} -\label{sec:setmksimps} -\begin{ttbox}\indexbold{*setmksimps} -setmksimps : simpset * (thm -> thm list) -> simpset \hfill{\bf infix 4} -\end{ttbox} -The next step is to define the function for preprocessing rewrite rules. -This will be installed by calling \texttt{setmksimps} below. Preprocessing -occurs whenever rewrite rules are added, whether by user command or -automatically. Preprocessing involves extracting atomic rewrites at the -object-level, then reflecting them to the meta-level. - -To start, the function \texttt{gen_all} strips any meta-level -quantifiers from the front of the given theorem. - -The function \texttt{atomize} analyses a theorem in order to extract -atomic rewrite rules. The head of all the patterns, matched by the -wildcard~\texttt{_}, is the coercion function \texttt{Trueprop}. -\begin{ttbox} -fun atomize th = case concl_of th of - _ $ (Const("op &",_) $ _ $ _) => atomize(th RS conjunct1) \at - atomize(th RS conjunct2) - | _ $ (Const("op -->",_) $ _ $ _) => atomize(th RS mp) - | _ $ (Const("All",_) $ _) => atomize(th RS spec) - | _ $ (Const("True",_)) => [] - | _ $ (Const("False",_)) => [] - | _ => [th]; -\end{ttbox} -There are several cases, depending upon the form of the conclusion: -\begin{itemize} -\item Conjunction: extract rewrites from both conjuncts. -\item Implication: convert $P\imp Q$ to the meta-implication $P\Imp Q$ and - extract rewrites from~$Q$; these will be conditional rewrites with the - condition~$P$. -\item Universal quantification: remove the quantifier, replacing the bound - variable by a schematic variable, and extract rewrites from the body. -\item \texttt{True} and \texttt{False} contain no useful rewrites. -\item Anything else: return the theorem in a singleton list. -\end{itemize} -The resulting theorems are not literally atomic --- they could be -disjunctive, for example --- but are broken down as much as possible. -See the file \texttt{ZF/simpdata.ML} for a sophisticated translation of -set-theoretic formulae into rewrite rules. - -For standard situations like the above, -there is a generic auxiliary function \ttindexbold{mk_atomize} that takes a -list of pairs $(name, thms)$, where $name$ is an operator name and -$thms$ is a list of theorems to resolve with in case the pattern matches, -and returns a suitable \texttt{atomize} function. - - -The simplified rewrites must now be converted into meta-equalities. The -rule \texttt{eq_reflection} converts equality rewrites, while {\tt - iff_reflection} converts if-and-only-if rewrites. The latter possibility -can arise in two other ways: the negative theorem~$\neg P$ is converted to -$P\equiv\texttt{False}$, and any other theorem~$P$ is converted to -$P\equiv\texttt{True}$. The rules \texttt{iff_reflection_F} and {\tt - iff_reflection_T} accomplish this conversion. -\begin{ttbox} -val P_iff_F = int_prove_fun "~P ==> (P <-> False)"; -val iff_reflection_F = P_iff_F RS iff_reflection; -\ttbreak -val P_iff_T = int_prove_fun "P ==> (P <-> True)"; -val iff_reflection_T = P_iff_T RS iff_reflection; -\end{ttbox} -The function \texttt{mk_eq} converts a theorem to a meta-equality -using the case analysis described above. -\begin{ttbox} -fun mk_eq th = case concl_of th of - _ $ (Const("op =",_)$_$_) => th RS eq_reflection - | _ $ (Const("op <->",_)$_$_) => th RS iff_reflection - | _ $ (Const("Not",_)$_) => th RS iff_reflection_F - | _ => th RS iff_reflection_T; -\end{ttbox} -The -three functions \texttt{gen_all}, \texttt{atomize} and \texttt{mk_eq} -will be composed together and supplied below to \texttt{setmksimps}. - - -\subsection{Making the initial simpset} - -It is time to assemble these items. The list \texttt{IFOL_simps} contains the -default rewrite rules for intuitionistic first-order logic. The first of -these is the reflexive law expressed as the equivalence -$(a=a)\bimp\texttt{True}$; the rewrite rule $a=a$ is clearly useless. -\begin{ttbox} -val IFOL_simps = - [refl RS P_iff_T] \at conj_simps \at disj_simps \at not_simps \at - imp_simps \at iff_simps \at quant_simps; -\end{ttbox} -The list \texttt{triv_rls} contains trivial theorems for the solver. Any -subgoal that is simplified to one of these will be removed. -\begin{ttbox} -val notFalseI = int_prove_fun "~False"; -val triv_rls = [TrueI,refl,iff_refl,notFalseI]; -\end{ttbox} -We also define the function \ttindex{mk_meta_cong} to convert the conclusion -of congruence rules into meta-equalities. -\begin{ttbox} -fun mk_meta_cong rl = standard (mk_meta_eq (mk_meta_prems rl)); -\end{ttbox} -% -The basic simpset for intuitionistic FOL is \ttindexbold{FOL_basic_ss}. It -preprocess rewrites using -{\tt gen_all}, \texttt{atomize} and \texttt{mk_eq}. -It solves simplified subgoals using \texttt{triv_rls} and assumptions, and by -detecting contradictions. It uses \ttindex{asm_simp_tac} to tackle subgoals -of conditional rewrites. - -Other simpsets built from \texttt{FOL_basic_ss} will inherit these items. -In particular, \ttindexbold{IFOL_ss}, which introduces {\tt - IFOL_simps} as rewrite rules. \ttindexbold{FOL_ss} will later -extend \texttt{IFOL_ss} with classical rewrite rules such as $\neg\neg -P\bimp P$. -\index{*setmksimps}\index{*setSSolver}\index{*setSolver}\index{*setsubgoaler} -\index{*addsimps}\index{*addcongs} -\begin{ttbox} -fun unsafe_solver prems = FIRST'[resolve_tac (triv_rls {\at} prems), - atac, etac FalseE]; - -fun safe_solver prems = FIRST'[match_tac (triv_rls {\at} prems), - eq_assume_tac, ematch_tac [FalseE]]; - -val FOL_basic_ss = - empty_ss setsubgoaler asm_simp_tac - addsimprocs [defALL_regroup, defEX_regroup] - setSSolver safe_solver - setSolver unsafe_solver - setmksimps (map mk_eq o atomize o gen_all) - setmkcong mk_meta_cong; - -val IFOL_ss = - FOL_basic_ss addsimps (IFOL_simps {\at} - int_ex_simps {\at} int_all_simps) - addcongs [imp_cong]; -\end{ttbox} -This simpset takes \texttt{imp_cong} as a congruence rule in order to use -contextual information to simplify the conclusions of implications: -\[ \List{\Var{P}\bimp\Var{P'};\; \Var{P'} \Imp \Var{Q}\bimp\Var{Q'}} \Imp - (\Var{P}\imp\Var{Q}) \bimp (\Var{P'}\imp\Var{Q'}) -\] -By adding the congruence rule \texttt{conj_cong}, we could obtain a similar -effect for conjunctions. - - -\index{simplification|)} - - %%% Local Variables: %%% mode: latex %%% TeX-master: "ref"