# HG changeset patch # User wenzelm # Date 1009647372 -3600 # Node ID 8b9845807f7719216ef7b295f37f7bb15186bf25 # Parent fb073a34b537ae89d79784e1b576310881eff198 tuned document sources; diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Coind/ECR.thy --- a/src/ZF/Coind/ECR.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Coind/ECR.thy Sat Dec 29 18:36:12 2001 +0100 @@ -46,8 +46,9 @@ (* Specialised elimination rules *) -inductive_cases htr_constE [elim!]: " \ HasTyRel" -inductive_cases htr_closE [elim]: " \ HasTyRel" +inductive_cases + htr_constE [elim!]: " \ HasTyRel" + and htr_closE [elim]: " \ HasTyRel" (* Properties of the pointwise extension to environments *) diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Coind/Static.thy --- a/src/ZF/Coind/Static.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Coind/Static.thy Sat Dec 29 18:36:12 2001 +0100 @@ -54,11 +54,12 @@ type_intros te_appI Exp.intros Ty.intros -inductive_cases elab_constE [elim!]: " \ ElabRel" -inductive_cases elab_varE [elim!]: " \ ElabRel" -inductive_cases elab_fnE [elim]: " \ ElabRel" -inductive_cases elab_fixE [elim!]: " \ ElabRel" -inductive_cases elab_appE [elim]: " \ ElabRel" +inductive_cases + elab_constE [elim!]: " \ ElabRel" + and elab_varE [elim!]: " \ ElabRel" + and elab_fnE [elim]: " \ ElabRel" + and elab_fixE [elim!]: " \ ElabRel" + and elab_appE [elim]: " \ ElabRel" declare ElabRel.dom_subset [THEN subsetD, dest] diff -r fb073a34b537 -r 8b9845807f77 src/ZF/IMP/Com.thy --- a/src/ZF/IMP/Com.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/IMP/Com.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,160 +1,139 @@ (* Title: ZF/IMP/Com.thy ID: $Id$ - Author: Heiko Loetzbeyer & Robert Sandner, TUM - Copyright 1994 TUM + Author: Heiko Loetzbeyer and Robert Sandner, TU München +*) -Arithmetic expressions, Boolean expressions, Commands - -And their Operational semantics -*) +header {* Arithmetic expressions, boolean expressions, commands *} theory Com = Main: -(** Arithmetic expressions **) -consts loc :: i - aexp :: i + +subsection {* Arithmetic expressions *} -datatype <= "univ(loc Un (nat->nat) Un ((nat*nat) -> nat) )" - "aexp" = N ("n \ nat") - | X ("x \ loc") - | Op1 ("f \ nat -> nat", "a \ aexp") - | Op2 ("f \ (nat*nat) -> nat", "a0 \ aexp", "a1 \ aexp") +consts + loc :: i + aexp :: i + +datatype \ "univ(loc \ (nat -> nat) \ ((nat \ nat) -> nat))" + aexp = N ("n \ nat") + | X ("x \ loc") + | Op1 ("f \ nat -> nat", "a \ aexp") + | Op2 ("f \ (nat \ nat) -> nat", "a0 \ aexp", "a1 \ aexp") -(** Evaluation of arithmetic expressions **) -consts evala :: "i" - "-a->" :: "[i,i] => o" (infixl 50) -translations - "p -a-> n" == " \ evala" +consts evala :: i +syntax "_evala" :: "[i, i] => o" (infixl "-a->" 50) +translations "p -a-> n" == " \ evala" + inductive - domains "evala" <= "(aexp * (loc -> nat)) * nat" - intros + domains "evala" \ "(aexp \ (loc -> nat)) \ nat" + intros N: "[| n \ nat; sigma \ loc->nat |] ==> -a-> n" X: "[| x \ loc; sigma \ loc->nat |] ==> -a-> sigma`x" Op1: "[| -a-> n; f \ nat -> nat |] ==> -a-> f`n" - Op2: "[| -a-> n0; -a-> n1; f \ (nat*nat) -> nat |] + Op2: "[| -a-> n0; -a-> n1; f \ (nat\nat) -> nat |] ==> -a-> f`" type_intros aexp.intros apply_funtype -(** Boolean expressions **) -consts bexp :: i +subsection {* Boolean expressions *} + +consts bexp :: i -datatype <= "univ(aexp Un ((nat*nat)->bool) )" - "bexp" = true - | false - | ROp ("f \ (nat*nat)->bool", "a0 \ aexp", "a1 \ aexp") - | noti ("b \ bexp") - | andi ("b0 \ bexp", "b1 \ bexp") (infixl 60) - | ori ("b0 \ bexp", "b1 \ bexp") (infixl 60) +datatype \ "univ(aexp \ ((nat \ nat)->bool))" + bexp = true + | false + | ROp ("f \ (nat \ nat)->bool", "a0 \ aexp", "a1 \ aexp") + | noti ("b \ bexp") + | andi ("b0 \ bexp", "b1 \ bexp") (infixl 60) + | ori ("b0 \ bexp", "b1 \ bexp") (infixl 60) -(** Evaluation of boolean expressions **) -consts evalb :: "i" - "-b->" :: "[i,i] => o" (infixl 50) - -translations - "p -b-> b" == " \ evalb" +consts evalb :: i +syntax "_evalb" :: "[i,i] => o" (infixl "-b->" 50) +translations "p -b-> b" == " \ evalb" inductive - domains "evalb" <= "(bexp * (loc -> nat)) * bool" - intros (*avoid clash with ML constructors true, false*) - tru: "[| sigma \ loc -> nat |] ==> -b-> 1" - fls: "[| sigma \ loc -> nat |] ==> -b-> 0" - ROp: "[| -a-> n0; -a-> n1; f \ (nat*nat)->bool |] + domains "evalb" \ "(bexp \ (loc -> nat)) \ bool" + intros + true: "[| sigma \ loc -> nat |] ==> -b-> 1" + false: "[| sigma \ loc -> nat |] ==> -b-> 0" + ROp: "[| -a-> n0; -a-> n1; f \ (nat*nat)->bool |] ==> -b-> f` " noti: "[| -b-> w |] ==> -b-> not(w)" - andi: "[| -b-> w0; -b-> w1 |] + andi: "[| -b-> w0; -b-> w1 |] ==> -b-> (w0 and w1)" - ori: "[| -b-> w0; -b-> w1 |] + ori: "[| -b-> w0; -b-> w1 |] ==> -b-> (w0 or w1)" - type_intros bexp.intros + type_intros bexp.intros apply_funtype and_type or_type bool_1I bool_0I not_type type_elims evala.dom_subset [THEN subsetD, elim_format] -(** Commands **) -consts com :: i - -datatype - "com" = skip - | asgt ("x \ loc", "a \ aexp") (infixl 60) - | semic("c0 \ com", "c1 \ com") ("_; _" [60, 60] 10) - | while("b \ bexp", "c \ com") ("while _ do _" 60) - | ifc ("b \ bexp", "c0 \ com", "c1 \ com") ("ifc _ then _ else _" 60) +subsection {* Commands *} -(*Constructor ";" has low precedence to avoid syntactic ambiguities - with [| m \ nat; x \ loc; ... |] ==> ... It usually will need parentheses.*) +consts com :: i +datatype com = + skip ("\" []) + | assignment ("x \ loc", "a \ aexp") (infixl "\" 60) + | semicolon ("c0 \ com", "c1 \ com") ("_\ _" [60, 60] 10) + | while ("b \ bexp", "c \ com") ("\ _ \ _" 60) + | if ("b \ bexp", "c0 \ com", "c1 \ com") ("\ _ \ _ \ _" 60) -(** Execution of commands **) -consts evalc :: "i" - "-c->" :: "[i,i] => o" (infixl 50) -translations - "p -c-> s" == " \ evalc" +consts evalc :: i +syntax "_evalc" :: "[i, i] => o" (infixl "-c->" 50) +translations "p -c-> s" == " \ evalc" inductive - domains "evalc" <= "(com * (loc -> nat)) * (loc -> nat)" + domains "evalc" \ "(com \ (loc -> nat)) \ (loc -> nat)" intros - skip: "[| sigma \ loc -> nat |] ==> -c-> sigma" + skip: "[| sigma \ loc -> nat |] ==> <\,sigma> -c-> sigma" - assign: "[| m \ nat; x \ loc; -a-> m |] - ==> -c-> sigma(x:=m)" + assign: "[| m \ nat; x \ loc; -a-> m |] + ==> a,sigma> -c-> sigma(x:=m)" - semi: "[| -c-> sigma2; -c-> sigma1 |] - ==> -c-> sigma1" + semi: "[| -c-> sigma2; -c-> sigma1 |] + ==> c1, sigma> -c-> sigma1" - ifc1: "[| b \ bexp; c1 \ com; sigma \ loc->nat; - -b-> 1; -c-> sigma1 |] - ==> -c-> sigma1" + if1: "[| b \ bexp; c1 \ com; sigma \ loc->nat; + -b-> 1; -c-> sigma1 |] + ==> <\ b \ c0 \ c1, sigma> -c-> sigma1" - ifc0: "[| b \ bexp; c0 \ com; sigma \ loc->nat; - -b-> 0; -c-> sigma1 |] - ==> -c-> sigma1" + if0: "[| b \ bexp; c0 \ com; sigma \ loc->nat; + -b-> 0; -c-> sigma1 |] + ==> <\ b \ c0 \ c1, sigma> -c-> sigma1" - while0: "[| c \ com; -b-> 0 |] - ==> -c-> sigma" + while0: "[| c \ com; -b-> 0 |] + ==> <\ b \ c,sigma> -c-> sigma" - while1: "[| c \ com; -b-> 1; -c-> sigma2; - -c-> sigma1 |] - ==> -c-> sigma1" + while1: "[| c \ com; -b-> 1; -c-> sigma2; + <\ b \ c, sigma2> -c-> sigma1 |] + ==> <\ b \ c, sigma> -c-> sigma1" type_intros com.intros update_type type_elims evala.dom_subset [THEN subsetD, elim_format] evalb.dom_subset [THEN subsetD, elim_format] -(*** type_intros for evala ***) +subsection {* Misc lemmas *} -lemmas evala_1 [simp] = - evala.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD1] -lemmas evala_2 [simp] = - evala.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD2] -lemmas evala_3 [simp] = - evala.dom_subset [THEN subsetD, THEN SigmaD2] +lemmas evala_1 [simp] = evala.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD1] + and evala_2 [simp] = evala.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD2] + and evala_3 [simp] = evala.dom_subset [THEN subsetD, THEN SigmaD2] - -(*** type_intros for evalb ***) +lemmas evalb_1 [simp] = evalb.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD1] + and evalb_2 [simp] = evalb.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD2] + and evalb_3 [simp] = evalb.dom_subset [THEN subsetD, THEN SigmaD2] -lemmas evalb_1 [simp] = - evalb.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD1] -lemmas evalb_2 [simp] = - evalb.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD2] -lemmas evalb_3 [simp] = - evalb.dom_subset [THEN subsetD, THEN SigmaD2] - -(*** type_intros for evalc ***) +lemmas evalc_1 [simp] = evalc.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD1] + and evalc_2 [simp] = evalc.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD2] + and evalc_3 [simp] = evalc.dom_subset [THEN subsetD, THEN SigmaD2] -lemmas evalc_1 [simp] = - evalc.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD1] -lemmas evalc_2 [simp] = - evalc.dom_subset [THEN subsetD, THEN SigmaD1, THEN SigmaD2] -lemmas evalc_3 [simp] = - evalc.dom_subset [THEN subsetD, THEN SigmaD2] - -inductive_cases evala_N_E [elim!]: " -a-> i" -inductive_cases evala_X_E [elim!]: " -a-> i" -inductive_cases evala_Op1_E [elim!]: " -a-> i" -inductive_cases evala_Op2_E [elim!]: " -a-> i" +inductive_cases + evala_N_E [elim!]: " -a-> i" + and evala_X_E [elim!]: " -a-> i" + and evala_Op1_E [elim!]: " -a-> i" + and evala_Op2_E [elim!]: " -a-> i" end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/IMP/Denotation.thy --- a/src/ZF/IMP/Denotation.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/IMP/Denotation.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,13 +1,13 @@ (* Title: ZF/IMP/Denotation.thy ID: $Id$ - Author: Heiko Loetzbeyer & Robert Sandner, TUM - Copyright 1994 TUM + Author: Heiko Loetzbeyer and Robert Sandner, TU München +*) -Denotational semantics of expressions & commands -*) +header {* Denotational semantics of expressions and commands *} theory Denotation = Com: +subsection {* Definitions *} consts A :: "i => i => i" @@ -15,73 +15,59 @@ C :: "i => i" constdefs - Gamma :: "[i,i,i] => i" - "Gamma(b,cden) == (%phi.{io \ (phi O cden). B(b,fst(io))=1} Un - {io \ id(loc->nat). B(b,fst(io))=0})" + Gamma :: "[i,i,i] => i" ("\") + "\(b,cden) == + (\phi. {io \ (phi O cden). B(b,fst(io))=1} \ + {io \ id(loc->nat). B(b,fst(io))=0})" primrec - "A(N(n), sigma) = n" - "A(X(x), sigma) = sigma`x" - "A(Op1(f,a), sigma) = f`A(a,sigma)" - "A(Op2(f,a0,a1), sigma) = f`" - - -primrec - "B(true, sigma) = 1" - "B(false, sigma) = 0" - "B(ROp(f,a0,a1), sigma) = f`" - "B(noti(b), sigma) = not(B(b,sigma))" - "B(b0 andi b1, sigma) = B(b0,sigma) and B(b1,sigma)" - "B(b0 ori b1, sigma) = B(b0,sigma) or B(b1,sigma)" - + "A(N(n), sigma) = n" + "A(X(x), sigma) = sigma`x" + "A(Op1(f,a), sigma) = f`A(a,sigma)" + "A(Op2(f,a0,a1), sigma) = f`" primrec - "C(skip) = id(loc->nat)" - - "C(x asgt a) = {io:(loc->nat)*(loc->nat). - snd(io) = fst(io)(x := A(a,fst(io)))}" + "B(true, sigma) = 1" + "B(false, sigma) = 0" + "B(ROp(f,a0,a1), sigma) = f`" + "B(noti(b), sigma) = not(B(b,sigma))" + "B(b0 andi b1, sigma) = B(b0,sigma) and B(b1,sigma)" + "B(b0 ori b1, sigma) = B(b0,sigma) or B(b1,sigma)" - "C(c0 ; c1) = C(c1) O C(c0)" - - "C(ifc b then c0 else c1) = {io \ C(c0). B(b,fst(io))=1} Un - {io \ C(c1). B(b,fst(io))=0}" - - "C(while b do c) = lfp((loc->nat)*(loc->nat), Gamma(b,C(c)))" +primrec + "C(\) = id(loc->nat)" + "C(x \ a) = + {io \ (loc->nat) \ (loc->nat). snd(io) = fst(io)(x := A(a,fst(io)))}" + "C(c0\ c1) = C(c1) O C(c0)" + "C(\ b \ c0 \ c1) = + {io \ C(c0). B(b,fst(io)) = 1} \ {io \ C(c1). B(b,fst(io)) = 0}" + "C(\ b \ c) = lfp((loc->nat) \ (loc->nat), \(b,C(c)))" -(** Type_intr for A **) +subsection {* Misc lemmas *} lemma A_type [TC]: "[|a \ aexp; sigma \ loc->nat|] ==> A(a,sigma) \ nat" -by (erule aexp.induct, simp_all) - - -(** Type_intr for B **) + by (erule aexp.induct) simp_all lemma B_type [TC]: "[|b \ bexp; sigma \ loc->nat|] ==> B(b,sigma) \ bool" by (erule bexp.induct, simp_all) -(** C_subset **) - -lemma C_subset: "c \ com ==> C(c) \ (loc->nat)*(loc->nat)" -apply (erule com.induct) -apply simp_all -apply (blast dest: lfp_subset [THEN subsetD])+ -done - -(** Type_elims for C **) +lemma C_subset: "c \ com ==> C(c) \ (loc->nat) \ (loc->nat)" + apply (erule com.induct) + apply simp_all + apply (blast dest: lfp_subset [THEN subsetD])+ + done lemma C_type_D [dest]: - "[| \ C(c); c \ com |] ==> x \ loc->nat & y \ loc->nat" -by (blast dest: C_subset [THEN subsetD]) + "[| \ C(c); c \ com |] ==> x \ loc->nat & y \ loc->nat" + by (blast dest: C_subset [THEN subsetD]) lemma C_type_fst [dest]: "[| x \ C(c); c \ com |] ==> fst(x) \ loc->nat" -by (auto dest!: C_subset [THEN subsetD]) - -(** bnd_mono (nat->nat*nat->nat,Gamma(b,c) **) + by (auto dest!: C_subset [THEN subsetD]) -lemma Gamma_bnd_mono: - "cden <= (loc->nat)*(loc->nat) - ==> bnd_mono ((loc->nat)*(loc->nat),Gamma(b,cden))" -by (unfold bnd_mono_def Gamma_def, blast) +lemma Gamma_bnd_mono: + "cden \ (loc->nat) \ (loc->nat) + ==> bnd_mono ((loc->nat) \ (loc->nat), \(b,cden))" + by (unfold bnd_mono_def Gamma_def) blast end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/IMP/Equiv.thy --- a/src/ZF/IMP/Equiv.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/IMP/Equiv.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,87 +1,87 @@ (* Title: ZF/IMP/Equiv.thy ID: $Id$ - Author: Heiko Loetzbeyer & Robert Sandner, TUM - Copyright 1994 TUM + Author: Heiko Loetzbeyer and Robert Sandner, TU München *) +header {* Equivalence *} + theory Equiv = Denotation + Com: lemma aexp_iff [rule_format]: - "[| a \ aexp; sigma: loc -> nat |] - ==> ALL n. -a-> n <-> A(a,sigma) = n" -apply (erule aexp.induct) -apply (force intro!: evala.intros)+ -done + "[| a \ aexp; sigma: loc -> nat |] + ==> ALL n. -a-> n <-> A(a,sigma) = n" + apply (erule aexp.induct) + apply (force intro!: evala.intros)+ + done declare aexp_iff [THEN iffD1, simp] aexp_iff [THEN iffD2, intro!] -inductive_cases [elim!]: " -b-> x" -inductive_cases [elim!]: " -b-> x" -inductive_cases [elim!]: " -b-> x" -inductive_cases [elim!]: " -b-> x" -inductive_cases [elim!]: " -b-> x" -inductive_cases [elim!]: " -b-> x" +inductive_cases [elim!]: + " -b-> x" + " -b-> x" + " -b-> x" + " -b-> x" + " -b-> x" + " -b-> x" lemma bexp_iff [rule_format]: - "[| b \ bexp; sigma: loc -> nat |] - ==> ALL w. -b-> w <-> B(b,sigma) = w" -apply (erule bexp.induct) -apply (auto intro!: evalb.intros) -done + "[| b \ bexp; sigma: loc -> nat |] + ==> ALL w. -b-> w <-> B(b,sigma) = w" + apply (erule bexp.induct) + apply (auto intro!: evalb.intros) + done declare bexp_iff [THEN iffD1, simp] bexp_iff [THEN iffD2, intro!] lemma com1: " -c-> sigma' ==> \ C(c)" -apply (erule evalc.induct) -apply simp_all -(* skip *) -apply fast -(* assign *) -apply (simp add: update_type) -(* comp *) -apply fast -(* while *) -apply (erule Gamma_bnd_mono [THEN lfp_unfold, THEN ssubst, OF C_subset]) -apply (simp add: Gamma_def) -apply (force ) -(* recursive case of while *) -apply (erule Gamma_bnd_mono [THEN lfp_unfold, THEN ssubst, OF C_subset]) -apply (simp add: Gamma_def) -apply auto -done - + apply (erule evalc.induct) + apply simp_all + txt {* @{text skip} *} + apply fast + txt {* @{text assign} *} + apply (simp add: update_type) + txt {* @{text comp} *} + apply fast + txt {* @{text while} *} + apply (erule Gamma_bnd_mono [THEN lfp_unfold, THEN ssubst, OF C_subset]) + apply (simp add: Gamma_def) + apply force + txt {* recursive case of @{text while} *} + apply (erule Gamma_bnd_mono [THEN lfp_unfold, THEN ssubst, OF C_subset]) + apply (auto simp add: Gamma_def) + done declare B_type [intro!] A_type [intro!] declare evalc.intros [intro] lemma com2 [rule_format]: "c \ com ==> \x \ C(c). -c-> snd(x)" -apply (erule com.induct) -(* skip *) -apply force -(* assign *) -apply force -(* comp *) -apply force -(* while *) -apply safe -apply simp_all -apply (frule Gamma_bnd_mono [OF C_subset], erule Fixedpt.induct, assumption) -apply (unfold Gamma_def) -apply force -(* if *) -apply auto -done + apply (erule com.induct) + txt {* @{text skip} *} + apply force + txt {* @{text assign} *} + apply force + txt {* @{text comp} *} + apply force + txt {* @{text while} *} + apply safe + apply simp_all + apply (frule Gamma_bnd_mono [OF C_subset], erule Fixedpt.induct, assumption) + apply (unfold Gamma_def) + apply force + txt {* @{text if} *} + apply auto + done -(**** Proof of Equivalence ****) +subsection {* Main theorem *} -lemma com_equivalence: - "\c \ com. C(c) = {io:(loc->nat)*(loc->nat). -c-> snd(io)}" -by (force intro: C_subset [THEN subsetD] elim: com2 dest: com1) +theorem com_equivalence: + "c \ com ==> C(c) = {io \ (loc->nat) \ (loc->nat). -c-> snd(io)}" + by (force intro: C_subset [THEN subsetD] elim: com2 dest: com1) end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/IMP/document/root.bib --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/ZF/IMP/document/root.bib Sat Dec 29 18:36:12 2001 +0100 @@ -0,0 +1,3 @@ + +@book{Winskel,author={Glynn Winskel}, +title={The Formal Semantics of Programming Languages},publisher=MIT,year=1993} diff -r fb073a34b537 -r 8b9845807f77 src/ZF/IMP/document/root.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/ZF/IMP/document/root.tex Sat Dec 29 18:36:12 2001 +0100 @@ -0,0 +1,49 @@ + +\documentclass[a4wide]{article} +\usepackage{isabelle,isabellesym} +\usepackage{pdfsetup} + +\urlstyle{rm} +\renewcommand{\isachardoublequote}{} + +% pretty printing for the Com language +%\newcommand{\CMD}[1]{\isatext{\bf\sffamily#1}} +\newcommand{\CMD}[1]{\isatext{\rm\sffamily#1}} +\newcommand{\isasymSKIP}{\CMD{skip}} +\newcommand{\isasymASSN}{\CMD{:=}} +\newcommand{\isasymSEQ}{\CMD{;}} +\newcommand{\isasymIF}{\CMD{if}} +\newcommand{\isasymTHEN}{\CMD{then}} +\newcommand{\isasymELSE}{\CMD{else}} +\newcommand{\isasymWHILE}{\CMD{while}} +\newcommand{\isasymDO}{\CMD{do}} + +\addtolength{\hoffset}{-1cm} +\addtolength{\textwidth}{2cm} + + +\begin{document} + +\title{IMP --- A {\tt WHILE}-language and two semantics} +\author{Heiko Loetzbeyer and Robert Sandner} +\maketitle + +\parindent 0pt\parskip 0.5ex + +\begin{abstract}\noindent + The formalization of the denotational and operational semantics of a simple + while-language together with an equivalence proof between the two semantics. + The whole development essentially formalizes/transcribes chapters 2 and 5 of + \cite{Winskel}. A much extended version of this development is found in + HOL/IMP of the Isabelle distribution. +\end{abstract} + +\tableofcontents + +\parindent 0pt\parskip 0.5ex +\input{session} + +\bibliographystyle{plain} +\bibliography{root} + +\end{document} diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Acc.thy --- a/src/ZF/Induct/Acc.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Acc.thy Sat Dec 29 18:36:12 2001 +0100 @@ -2,59 +2,61 @@ ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1994 University of Cambridge - -Inductive definition of acc(r) +*) -See Ch. Paulin-Mohring, Inductive Definitions in the System Coq. -Research Report 92-49, LIP, ENS Lyon. Dec 1992. -*) +header {* The accessible part of a relation *} theory Acc = Main: +text {* + Inductive definition of @{text "acc(r)"}; see \cite{paulin-tlca}. +*} + consts - acc :: "i=>i" + acc :: "i => i" inductive - domains "acc(r)" <= "field(r)" + domains "acc(r)" \ "field(r)" intros vimage: "[| r-``{a}: Pow(acc(r)); a \ field(r) |] ==> a \ acc(r)" monos Pow_mono - -(*The introduction rule must require a \ field(r) - Otherwise acc(r) would be a proper class! *) +text {* + The introduction rule must require @{prop "a \ field(r)"}, + otherwise @{text "acc(r)"} would be a proper class! -(*The intended introduction rule*) + \medskip + The intended introduction rule: +*} + lemma accI: "[| !!b. :r ==> b \ acc(r); a \ field(r) |] ==> a \ acc(r)" -by (blast intro: acc.intros) + by (blast intro: acc.intros) lemma acc_downward: "[| b \ acc(r); : r |] ==> a \ acc(r)" -by (erule acc.cases, blast) + by (erule acc.cases) blast -lemma acc_induct: - "[| a \ acc(r); - !!x. [| x \ acc(r); \y. :r --> P(y) |] ==> P(x) +lemma acc_induct [induct set: acc]: + "[| a \ acc(r); + !!x. [| x \ acc(r); \y. :r --> P(y) |] ==> P(x) |] ==> P(a)" -apply (erule acc.induct) -apply (blast intro: acc.intros) -done + by (erule acc.induct) (blast intro: acc.intros) lemma wf_on_acc: "wf[acc(r)](r)" -apply (rule wf_onI2) -apply (erule acc_induct) -apply fast -done + apply (rule wf_onI2) + apply (erule acc_induct) + apply fast + done -(* field(r) \ acc(r) ==> wf(r) *) -lemmas acc_wfI = wf_on_acc [THEN wf_on_subset_A, THEN wf_on_field_imp_wf] +lemma acc_wfI: "field(r) \ acc(r) \ wf(r)" + by (erule wf_on_acc [THEN wf_on_subset_A, THEN wf_on_field_imp_wf]) lemma acc_wfD: "wf(r) ==> field(r) \ acc(r)" -apply (rule subsetI) -apply (erule wf_induct2, assumption) -apply (blast intro: accI)+ -done + apply (rule subsetI) + apply (erule wf_induct2, assumption) + apply (blast intro: accI)+ + done lemma wf_acc_iff: "wf(r) <-> field(r) \ acc(r)" -by (rule iffI, erule acc_wfD, erule acc_wfI) + by (rule iffI, erule acc_wfD, erule acc_wfI) end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Brouwer.thy --- a/src/ZF/Induct/Brouwer.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Brouwer.thy Sat Dec 29 18:36:12 2001 +0100 @@ -13,8 +13,8 @@ consts brouwer :: i -datatype \\ "Vfrom(0, csucc(nat))" - "brouwer" = Zero | Suc ("b \\ brouwer") | Lim ("h \\ nat -> brouwer") +datatype \ "Vfrom(0, csucc(nat))" + "brouwer" = Zero | Suc ("b \ brouwer") | Lim ("h \ nat -> brouwer") monos Pi_mono type_intros inf_datatype_intrs @@ -23,16 +23,16 @@ elim: brouwer.cases [unfolded brouwer.con_defs]) lemma brouwer_induct2: - "[| b \\ brouwer; + "[| b \ brouwer; P(Zero); - !!b. [| b \\ brouwer; P(b) |] ==> P(Suc(b)); - !!h. [| h \\ nat -> brouwer; \\i \\ nat. P(h`i) + !!b. [| b \ brouwer; P(b) |] ==> P(Suc(b)); + !!h. [| h \ nat -> brouwer; \i \ nat. P(h`i) |] ==> P(Lim(h)) |] ==> P(b)" -- {* A nicer induction rule than the standard one. *} proof - case rule_context - assume "b \\ brouwer" + assume "b \ brouwer" thus ?thesis apply induct apply (assumption | rule rule_context)+ @@ -47,26 +47,26 @@ consts Well :: "[i, i => i] => i" -datatype \\ "Vfrom(A \\ (\\x \\ A. B(x)), csucc(nat \\ |\\x \\ A. B(x)|))" +datatype \ "Vfrom(A \ (\x \ A. B(x)), csucc(nat \ |\x \ A. B(x)|))" -- {* The union with @{text nat} ensures that the cardinal is infinite. *} - "Well(A, B)" = Sup ("a \\ A", "f \\ B(a) -> Well(A, B)") + "Well(A, B)" = Sup ("a \ A", "f \ B(a) -> Well(A, B)") monos Pi_mono type_intros le_trans [OF UN_upper_cardinal le_nat_Un_cardinal] inf_datatype_intrs -lemma Well_unfold: "Well(A, B) = (\\x \\ A. B(x) -> Well(A, B))" +lemma Well_unfold: "Well(A, B) = (\x \ A. B(x) -> Well(A, B))" by (fast intro!: Well.intros [unfolded Well.con_defs] elim: Well.cases [unfolded Well.con_defs]) lemma Well_induct2: - "[| w \\ Well(A, B); - !!a f. [| a \\ A; f \\ B(a) -> Well(A,B); \\y \\ B(a). P(f`y) + "[| w \ Well(A, B); + !!a f. [| a \ A; f \ B(a) -> Well(A,B); \y \ B(a). P(f`y) |] ==> P(Sup(a,f)) |] ==> P(w)" -- {* A nicer induction rule than the standard one. *} proof - case rule_context - assume "w \\ Well(A, B)" + assume "w \ Well(A, B)" thus ?thesis apply induct apply (assumption | rule rule_context)+ @@ -75,7 +75,7 @@ done qed -lemma Well_bool_unfold: "Well(bool, %x. x) = 1 + (1 -> Well(bool, %x. x))" +lemma Well_bool_unfold: "Well(bool, \x. x) = 1 + (1 -> Well(bool, \x. x))" -- {* In fact it's isomorphic to @{text nat}, but we need a recursion operator *} -- {* for @{text Well} to prove this. *} apply (rule Well_unfold [THEN trans]) diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Comb.thy --- a/src/ZF/Induct/Comb.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Comb.thy Sat Dec 29 18:36:12 2001 +0100 @@ -2,269 +2,279 @@ ID: $Id$ Author: Lawrence C Paulson Copyright 1994 University of Cambridge - -Combinatory Logic example: the Church-Rosser Theorem -Curiously, combinators do not include free variables. - -Example taken from - J. Camilleri and T. F. Melham. - Reasoning with Inductively Defined Relations in the HOL Theorem Prover. - Report 265, University of Cambridge Computer Laboratory, 1992. - -HOL system proofs may be found in -/usr/groups/theory/hvg-aftp/contrib/rule-induction/cl.ml *) +header {* Combinatory Logic example: the Church-Rosser Theorem *} theory Comb = Main: -(** Datatype definition of combinators S and K, with infixed application **) -consts comb :: i -datatype - "comb" = K - | S - | "#" ("p \ comb", "q \ comb") (infixl 90) +text {* + Curiously, combinators do not include free variables. + + Example taken from \cite{camilleri-melham}. +*} + +subsection {* Definitions *} + +text {* Datatype definition of combinators @{text S} and @{text K}. *} -(** Inductive definition of contractions, -1-> - and (multi-step) reductions, ---> -**) +consts comb :: i +datatype comb = + K + | S + | app ("p \ comb", "q \ comb") (infixl "#" 90) + +text {* + Inductive definition of contractions, @{text "-1->"} and + (multi-step) reductions, @{text "--->"}. +*} + consts - contract :: "i" - "-1->" :: "[i,i] => o" (infixl 50) - "--->" :: "[i,i] => o" (infixl 50) - + contract :: i +syntax + "_contract" :: "[i,i] => o" (infixl "-1->" 50) + "_contract_multi" :: "[i,i] => o" (infixl "--->" 50) translations "p -1-> q" == " \ contract" "p ---> q" == " \ contract^*" inductive - domains "contract" <= "comb*comb" + domains "contract" \ "comb \ comb" intros - K: "[| p \ comb; q \ comb |] ==> K#p#q -1-> p" - S: "[| p \ comb; q \ comb; r \ comb |] ==> S#p#q#r -1-> (p#r)#(q#r)" - Ap1: "[| p-1->q; r \ comb |] ==> p#r -1-> q#r" - Ap2: "[| p-1->q; r \ comb |] ==> r#p -1-> r#q" - type_intros "comb.intros" - + K: "[| p \ comb; q \ comb |] ==> K#p#q -1-> p" + S: "[| p \ comb; q \ comb; r \ comb |] ==> S#p#q#r -1-> (p#r)#(q#r)" + Ap1: "[| p-1->q; r \ comb |] ==> p#r -1-> q#r" + Ap2: "[| p-1->q; r \ comb |] ==> r#p -1-> r#q" + type_intros comb.intros -(** Inductive definition of parallel contractions, =1=> - and (multi-step) parallel reductions, ===> -**) +text {* + Inductive definition of parallel contractions, @{text "=1=>"} and + (multi-step) parallel reductions, @{text "===>"}. +*} + consts - parcontract :: "i" - "=1=>" :: "[i,i] => o" (infixl 50) - "===>" :: "[i,i] => o" (infixl 50) - + parcontract :: i +syntax + "_parcontract" :: "[i,i] => o" (infixl "=1=>" 50) + "_parcontract_multi" :: "[i,i] => o" (infixl "===>" 50) translations "p =1=> q" == " \ parcontract" "p ===> q" == " \ parcontract^+" inductive - domains "parcontract" <= "comb*comb" + domains "parcontract" \ "comb \ comb" intros - refl: "[| p \ comb |] ==> p =1=> p" - K: "[| p \ comb; q \ comb |] ==> K#p#q =1=> p" - S: "[| p \ comb; q \ comb; r \ comb |] ==> S#p#q#r =1=> (p#r)#(q#r)" - Ap: "[| p=1=>q; r=1=>s |] ==> p#r =1=> q#s" - type_intros "comb.intros" + refl: "[| p \ comb |] ==> p =1=> p" + K: "[| p \ comb; q \ comb |] ==> K#p#q =1=> p" + S: "[| p \ comb; q \ comb; r \ comb |] ==> S#p#q#r =1=> (p#r)#(q#r)" + Ap: "[| p=1=>q; r=1=>s |] ==> p#r =1=> q#s" + type_intros comb.intros +text {* + Misc definitions. +*} -(*Misc definitions*) constdefs I :: i "I == S#K#K" diamond :: "i => o" - "diamond(r) == \x y. \r --> - (\y'. \r --> - (\z. \r & \ r))" + "diamond(r) == + \x y. \r --> (\y'. \r --> (\z. \r & \ r))" - -(*** Transitive closure preserves the Church-Rosser property ***) +subsection {* Transitive closure preserves the Church-Rosser property *} lemma diamond_strip_lemmaD [rule_format]: - "[| diamond(r); :r^+ |] ==> - \y'. :r --> (\z. : r^+ & : r)" -apply (unfold diamond_def) -apply (erule trancl_induct) -apply (blast intro: r_into_trancl) -apply clarify -apply (drule spec [THEN mp], assumption) -apply (blast intro: r_into_trancl trans_trancl [THEN transD]) -done - + "[| diamond(r); :r^+ |] ==> + \y'. :r --> (\z. : r^+ & : r)" + apply (unfold diamond_def) + apply (erule trancl_induct) + apply (blast intro: r_into_trancl) + apply clarify + apply (drule spec [THEN mp], assumption) + apply (blast intro: r_into_trancl trans_trancl [THEN transD]) + done lemma diamond_trancl: "diamond(r) ==> diamond(r^+)" -apply (simp (no_asm_simp) add: diamond_def) -apply (rule impI [THEN allI, THEN allI]) -apply (erule trancl_induct) -apply (auto ); -apply (best intro: r_into_trancl trans_trancl [THEN transD] - dest: diamond_strip_lemmaD)+ -done - + apply (simp (no_asm_simp) add: diamond_def) + apply (rule impI [THEN allI, THEN allI]) + apply (erule trancl_induct) + apply auto + apply (best intro: r_into_trancl trans_trancl [THEN transD] + dest: diamond_strip_lemmaD)+ + done inductive_cases Ap_E [elim!]: "p#q \ comb" declare comb.intros [intro!] -(*** Results about Contraction ***) +subsection {* Results about Contraction *} -(*For type checking: replaces a-1->b by a,b \ comb *) +text {* + For type checking: replaces @{term "a -1-> b"} by @{text "a, b \ + comb"}. +*} + lemmas contract_combE2 = contract.dom_subset [THEN subsetD, THEN SigmaE2] -lemmas contract_combD1 = contract.dom_subset [THEN subsetD, THEN SigmaD1] -lemmas contract_combD2 = contract.dom_subset [THEN subsetD, THEN SigmaD2] + and contract_combD1 = contract.dom_subset [THEN subsetD, THEN SigmaD1] + and contract_combD2 = contract.dom_subset [THEN subsetD, THEN SigmaD2] lemma field_contract_eq: "field(contract) = comb" -by (blast intro: contract.K elim!: contract_combE2) + by (blast intro: contract.K elim!: contract_combE2) -lemmas reduction_refl = - field_contract_eq [THEN equalityD2, THEN subsetD, THEN rtrancl_refl] +lemmas reduction_refl = + field_contract_eq [THEN equalityD2, THEN subsetD, THEN rtrancl_refl] -lemmas rtrancl_into_rtrancl2 = - r_into_rtrancl [THEN trans_rtrancl [THEN transD]] +lemmas rtrancl_into_rtrancl2 = + r_into_rtrancl [THEN trans_rtrancl [THEN transD]] declare reduction_refl [intro!] contract.K [intro!] contract.S [intro!] -lemmas reduction_rls = contract.K [THEN rtrancl_into_rtrancl2] - contract.S [THEN rtrancl_into_rtrancl2] - contract.Ap1 [THEN rtrancl_into_rtrancl2] - contract.Ap2 [THEN rtrancl_into_rtrancl2] +lemmas reduction_rls = + contract.K [THEN rtrancl_into_rtrancl2] + contract.S [THEN rtrancl_into_rtrancl2] + contract.Ap1 [THEN rtrancl_into_rtrancl2] + contract.Ap2 [THEN rtrancl_into_rtrancl2] -(*Example only: not used*) -lemma reduce_I: "p \ comb ==> I#p ---> p" -by (unfold I_def, blast intro: reduction_rls) +lemma "p \ comb ==> I#p ---> p" + -- {* Example only: not used *} + by (unfold I_def) (blast intro: reduction_rls) lemma comb_I: "I \ comb" -by (unfold I_def, blast) + by (unfold I_def) blast -(** Non-contraction results **) + +subsection {* Non-contraction results *} -(*Derive a case for each combinator constructor*) -inductive_cases K_contractE [elim!]: "K -1-> r" -inductive_cases S_contractE [elim!]: "S -1-> r" -inductive_cases Ap_contractE [elim!]: "p#q -1-> r" +text {* Derive a case for each combinator constructor. *} + +inductive_cases + K_contractE [elim!]: "K -1-> r" + and S_contractE [elim!]: "S -1-> r" + and Ap_contractE [elim!]: "p#q -1-> r" declare contract.Ap1 [intro] contract.Ap2 [intro] lemma I_contract_E: "I -1-> r ==> P" -by (auto simp add: I_def) + by (auto simp add: I_def) lemma K1_contractD: "K#p -1-> r ==> (\q. r = K#q & p -1-> q)" -by auto + by auto lemma Ap_reduce1: "[| p ---> q; r \ comb |] ==> p#r ---> q#r" -apply (frule rtrancl_type [THEN subsetD, THEN SigmaD1]) -apply (drule field_contract_eq [THEN equalityD1, THEN subsetD]) -apply (erule rtrancl_induct) -apply (blast intro: reduction_rls) -apply (erule trans_rtrancl [THEN transD]) -apply (blast intro: contract_combD2 reduction_rls) -done + apply (frule rtrancl_type [THEN subsetD, THEN SigmaD1]) + apply (drule field_contract_eq [THEN equalityD1, THEN subsetD]) + apply (erule rtrancl_induct) + apply (blast intro: reduction_rls) + apply (erule trans_rtrancl [THEN transD]) + apply (blast intro: contract_combD2 reduction_rls) + done lemma Ap_reduce2: "[| p ---> q; r \ comb |] ==> r#p ---> r#q" -apply (frule rtrancl_type [THEN subsetD, THEN SigmaD1]) -apply (drule field_contract_eq [THEN equalityD1, THEN subsetD]) -apply (erule rtrancl_induct) -apply (blast intro: reduction_rls) -apply (erule trans_rtrancl [THEN transD]) -apply (blast intro: contract_combD2 reduction_rls) -done + apply (frule rtrancl_type [THEN subsetD, THEN SigmaD1]) + apply (drule field_contract_eq [THEN equalityD1, THEN subsetD]) + apply (erule rtrancl_induct) + apply (blast intro: reduction_rls) + apply (erule trans_rtrancl [THEN transD]) + apply (blast intro: contract_combD2 reduction_rls) + done -(** Counterexample to the diamond property for -1-> **) +text {* Counterexample to the diamond property for @{text "-1->"}. *} lemma KIII_contract1: "K#I#(I#I) -1-> I" -by (blast intro: comb.intros contract.K comb_I) + by (blast intro: comb.intros contract.K comb_I) lemma KIII_contract2: "K#I#(I#I) -1-> K#I#((K#I)#(K#I))" -by (unfold I_def, blast intro: comb.intros contract.intros) - + by (unfold I_def) (blast intro: comb.intros contract.intros) lemma KIII_contract3: "K#I#((K#I)#(K#I)) -1-> I" -by (blast intro: comb.intros contract.K comb_I) + by (blast intro: comb.intros contract.K comb_I) -lemma not_diamond_contract: "~ diamond(contract)" -apply (unfold diamond_def) -apply (blast intro: KIII_contract1 KIII_contract2 KIII_contract3 - elim!: I_contract_E) -done - +lemma not_diamond_contract: "\ diamond(contract)" + apply (unfold diamond_def) + apply (blast intro: KIII_contract1 KIII_contract2 KIII_contract3 + elim!: I_contract_E) + done -(*** Results about Parallel Contraction ***) +subsection {* Results about Parallel Contraction *} -(*For type checking: replaces a=1=>b by a,b \ comb *) +text {* For type checking: replaces @{text "a =1=> b"} by @{text "a, b + \ comb"} *} lemmas parcontract_combE2 = parcontract.dom_subset [THEN subsetD, THEN SigmaE2] -lemmas parcontract_combD1 = parcontract.dom_subset [THEN subsetD, THEN SigmaD1] -lemmas parcontract_combD2 = parcontract.dom_subset [THEN subsetD, THEN SigmaD2] + and parcontract_combD1 = parcontract.dom_subset [THEN subsetD, THEN SigmaD1] + and parcontract_combD2 = parcontract.dom_subset [THEN subsetD, THEN SigmaD2] lemma field_parcontract_eq: "field(parcontract) = comb" -by (blast intro: parcontract.K elim!: parcontract_combE2) + by (blast intro: parcontract.K elim!: parcontract_combE2) -(*Derive a case for each combinator constructor*) -inductive_cases K_parcontractE [elim!]: "K =1=> r" -inductive_cases S_parcontractE [elim!]: "S =1=> r" -inductive_cases Ap_parcontractE [elim!]: "p#q =1=> r" +text {* Derive a case for each combinator constructor. *} +inductive_cases + K_parcontractE [elim!]: "K =1=> r" + and S_parcontractE [elim!]: "S =1=> r" + and Ap_parcontractE [elim!]: "p#q =1=> r" declare parcontract.intros [intro] -(*** Basic properties of parallel contraction ***) +subsection {* Basic properties of parallel contraction *} -lemma K1_parcontractD [dest!]: "K#p =1=> r ==> (\p'. r = K#p' & p =1=> p')" -by auto +lemma K1_parcontractD [dest!]: + "K#p =1=> r ==> (\p'. r = K#p' & p =1=> p')" + by auto -lemma S1_parcontractD [dest!]: "S#p =1=> r ==> (\p'. r = S#p' & p =1=> p')" -by auto +lemma S1_parcontractD [dest!]: + "S#p =1=> r ==> (\p'. r = S#p' & p =1=> p')" + by auto lemma S2_parcontractD [dest!]: - "S#p#q =1=> r ==> (\p' q'. r = S#p'#q' & p =1=> p' & q =1=> q')" -by auto + "S#p#q =1=> r ==> (\p' q'. r = S#p'#q' & p =1=> p' & q =1=> q')" + by auto - -(*Church-Rosser property for parallel contraction*) lemma diamond_parcontract: "diamond(parcontract)" -apply (unfold diamond_def) -apply (rule impI [THEN allI, THEN allI]) -apply (erule parcontract.induct) -apply (blast elim!: comb.free_elims intro: parcontract_combD2)+ -done + -- {* Church-Rosser property for parallel contraction *} + apply (unfold diamond_def) + apply (rule impI [THEN allI, THEN allI]) + apply (erule parcontract.induct) + apply (blast elim!: comb.free_elims intro: parcontract_combD2)+ + done -(*** Equivalence of p--->q and p===>q ***) +text {* + \medskip Equivalence of @{prop "p ---> q"} and @{prop "p ===> q"}. +*} lemma contract_imp_parcontract: "p-1->q ==> p=1=>q" -by (erule contract.induct, auto) + by (erule contract.induct) auto lemma reduce_imp_parreduce: "p--->q ==> p===>q" -apply (frule rtrancl_type [THEN subsetD, THEN SigmaD1]) -apply (drule field_contract_eq [THEN equalityD1, THEN subsetD]) -apply (erule rtrancl_induct) - apply (blast intro: r_into_trancl) -apply (blast intro: contract_imp_parcontract r_into_trancl - trans_trancl [THEN transD]) -done - + apply (frule rtrancl_type [THEN subsetD, THEN SigmaD1]) + apply (drule field_contract_eq [THEN equalityD1, THEN subsetD]) + apply (erule rtrancl_induct) + apply (blast intro: r_into_trancl) + apply (blast intro: contract_imp_parcontract r_into_trancl + trans_trancl [THEN transD]) + done lemma parcontract_imp_reduce: "p=1=>q ==> p--->q" -apply (erule parcontract.induct) + apply (erule parcontract.induct) + apply (blast intro: reduction_rls) + apply (blast intro: reduction_rls) apply (blast intro: reduction_rls) - apply (blast intro: reduction_rls) - apply (blast intro: reduction_rls) -apply (blast intro: trans_rtrancl [THEN transD] - Ap_reduce1 Ap_reduce2 parcontract_combD1 parcontract_combD2) -done + apply (blast intro: trans_rtrancl [THEN transD] + Ap_reduce1 Ap_reduce2 parcontract_combD1 parcontract_combD2) + done lemma parreduce_imp_reduce: "p===>q ==> p--->q" -apply (frule trancl_type [THEN subsetD, THEN SigmaD1]) -apply (drule field_parcontract_eq [THEN equalityD1, THEN subsetD]) -apply (erule trancl_induct, erule parcontract_imp_reduce) -apply (erule trans_rtrancl [THEN transD]) -apply (erule parcontract_imp_reduce) -done + apply (frule trancl_type [THEN subsetD, THEN SigmaD1]) + apply (drule field_parcontract_eq [THEN equalityD1, THEN subsetD]) + apply (erule trancl_induct, erule parcontract_imp_reduce) + apply (erule trans_rtrancl [THEN transD]) + apply (erule parcontract_imp_reduce) + done lemma parreduce_iff_reduce: "p===>q <-> p--->q" -by (blast intro: parreduce_imp_reduce reduce_imp_parreduce) + by (blast intro: parreduce_imp_reduce reduce_imp_parreduce) end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Datatypes.thy --- a/src/ZF/Induct/Datatypes.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Datatypes.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,4 +1,4 @@ -(* Title: ZF/ex/Datatypes.thy +(* Title: ZF/Induct/Datatypes.thy ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1994 University of Cambridge diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/FoldSet.ML --- a/src/ZF/Induct/FoldSet.ML Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/FoldSet.ML Sat Dec 29 18:36:12 2001 +0100 @@ -1,4 +1,4 @@ -(* Title: ZF/UNITY/FoldSet.thy +(* Title: ZF/Induct/FoldSet.thy ID: $Id$ Author: Sidi O Ehmety, Cambridge University Computer Laboratory Copyright 2001 University of Cambridge diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/FoldSet.thy --- a/src/ZF/Induct/FoldSet.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/FoldSet.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,4 +1,4 @@ -(* Title: ZF/UNITY/FoldSet.thy +(* Title: ZF/Induct/FoldSet.thy ID: $Id$ Author: Sidi O Ehmety, Cambridge University Computer Laboratory diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/ListN.thy --- a/src/ZF/Induct/ListN.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/ListN.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,58 +1,62 @@ -(* Title: ZF/Induct/ListN +(* Title: ZF/Induct/ListN.thy ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1994 University of Cambridge - -Inductive definition of lists of n elements +*) -See Ch. Paulin-Mohring, Inductive Definitions in the System Coq. -Research Report 92-49, LIP, ENS Lyon. Dec 1992. -*) +header {* Lists of n elements *} theory ListN = Main: -consts listn :: "i=>i" +text {* + Inductive definition of lists of @{text n} elements; see + \cite{paulin-tlca}. +*} + +consts listn :: "i=>i" inductive - domains "listn(A)" <= "nat*list(A)" + domains "listn(A)" \ "nat \ list(A)" intros - NilI: "<0,Nil> \ listn(A)" + NilI: "<0,Nil> \ listn(A)" ConsI: "[| a \ A; \ listn(A) |] ==> \ listn(A)" - type_intros nat_typechecks list.intros + type_intros nat_typechecks list.intros lemma list_into_listn: "l \ list(A) ==> \ listn(A)" -by (erule list.induct, simp_all add: listn.intros) + by (erule list.induct) (simp_all add: listn.intros) lemma listn_iff: " \ listn(A) <-> l \ list(A) & length(l)=n" -apply (rule iffI) -apply (erule listn.induct) -apply auto -apply (blast intro: list_into_listn) -done + apply (rule iffI) + apply (erule listn.induct) + apply auto + apply (blast intro: list_into_listn) + done lemma listn_image_eq: "listn(A)``{n} = {l \ list(A). length(l)=n}" -apply (rule equality_iffI) -apply (simp (no_asm) add: listn_iff separation image_singleton_iff) -done + apply (rule equality_iffI) + apply (simp add: listn_iff separation image_singleton_iff) + done lemma listn_mono: "A \ B ==> listn(A) \ listn(B)" -apply (unfold listn.defs ) -apply (rule lfp_mono) -apply (rule listn.bnd_mono)+ -apply (assumption | rule univ_mono Sigma_mono list_mono basic_monos)+ -done + apply (unfold listn.defs) + apply (rule lfp_mono) + apply (rule listn.bnd_mono)+ + apply (assumption | rule univ_mono Sigma_mono list_mono basic_monos)+ + done lemma listn_append: - "[| \ listn(A); \ listn(A) |] ==> \ listn(A)" -apply (erule listn.induct) -apply (frule listn.dom_subset [THEN subsetD]) -apply (simp_all add: listn.intros) -done + "[| \ listn(A); \ listn(A) |] ==> \ listn(A)" + apply (erule listn.induct) + apply (frule listn.dom_subset [THEN subsetD]) + apply (simp_all add: listn.intros) + done -inductive_cases Nil_listn_case: " \ listn(A)" -inductive_cases Cons_listn_case: " \ listn(A)" +inductive_cases + Nil_listn_case: " \ listn(A)" + and Cons_listn_case: " \ listn(A)" -inductive_cases zero_listn_case: "<0,l> \ listn(A)" -inductive_cases succ_listn_case: " \ listn(A)" +inductive_cases + zero_listn_case: "<0,l> \ listn(A)" + and succ_listn_case: " \ listn(A)" end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Multiset.ML --- a/src/ZF/Induct/Multiset.ML Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Multiset.ML Sat Dec 29 18:36:12 2001 +0100 @@ -1,4 +1,4 @@ -(* Title: ZF/UNITY/Multiset.thy +(* Title: ZF/Induct/Multiset.thy ID: $Id$ Author: Sidi O Ehmety, Cambridge University Computer Laboratory diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Multiset.thy --- a/src/ZF/Induct/Multiset.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Multiset.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,4 +1,4 @@ -(* Title: ZF/UNITY/Multiset.thy +(* Title: ZF/Induct/Multiset.thy ID: $Id$ Author: Sidi O Ehmety, Cambridge University Computer Laboratory diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Ntree.thy --- a/src/ZF/Induct/Ntree.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Ntree.thy Sat Dec 29 18:36:12 2001 +0100 @@ -34,11 +34,11 @@ constdefs ntree_rec :: "[[i, i, i] => i, i] => i" "ntree_rec(b) == - Vrecursor(%pr. ntree_case(%x h. b(x, h, \i \ domain(h). pr`(h`i))))" + Vrecursor(\pr. ntree_case(\x h. b(x, h, \i \ domain(h). pr`(h`i))))" constdefs ntree_copy :: "i => i" - "ntree_copy(z) == ntree_rec(%x h r. Branch(x,r), z)" + "ntree_copy(z) == ntree_rec(\x h r. Branch(x,r), z)" text {* diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Primrec.thy --- a/src/ZF/Induct/Primrec.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Primrec.thy Sat Dec 29 18:36:12 2001 +0100 @@ -2,369 +2,372 @@ ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1994 University of Cambridge - -Primitive Recursive Functions: the inductive definition +*) -Proof adopted from -Nora Szasz, -A Machine Checked Proof that Ackermann's Function is not Primitive Recursive, -In: Huet & Plotkin, eds., Logical Environments (CUP, 1993), 317-338. - -See also E. Mendelson, Introduction to Mathematical Logic. -(Van Nostrand, 1964), page 250, exercise 11. -*) +header {* Primitive Recursive Functions: the inductive definition *} theory Primrec = Main: +text {* + Proof adopted from \cite{szasz}. + + See also \cite[page 250, exercise 11]{mendelson}. +*} + + +subsection {* Basic definitions *} + constdefs - SC :: "i" - "SC == \l \ list(nat). list_case(0, %x xs. succ(x), l)" - - CONST :: "i=>i" - "CONST(k) == \l \ list(nat). k" + SC :: "i" + "SC == \l \ list(nat). list_case(0, \x xs. succ(x), l)" - PROJ :: "i=>i" - "PROJ(i) == \l \ list(nat). list_case(0, %x xs. x, drop(i,l))" + CONST :: "i=>i" + "CONST(k) == \l \ list(nat). k" - COMP :: "[i,i]=>i" - "COMP(g,fs) == \l \ list(nat). g ` List.map(%f. f`l, fs)" + PROJ :: "i=>i" + "PROJ(i) == \l \ list(nat). list_case(0, \x xs. x, drop(i,l))" - PREC :: "[i,i]=>i" - (*Note that g is applied first to PREC(f,g)`y and then to y!*) - "PREC(f,g) == - \l \ list(nat). list_case(0, - %x xs. rec(x, f`xs, %y r. g ` Cons(r, Cons(y, xs))), l)" + COMP :: "[i,i]=>i" + "COMP(g,fs) == \l \ list(nat). g ` List.map(\f. f`l, fs)" + + PREC :: "[i,i]=>i" + "PREC(f,g) == + \l \ list(nat). list_case(0, + \x xs. rec(x, f`xs, \y r. g ` Cons(r, Cons(y, xs))), l)" + -- {* Note that @{text g} is applied first to @{term "PREC(f,g)`y"} and then to @{text y}! *} consts - ACK :: "i=>i" - + ACK :: "i=>i" primrec - ACK_0: "ACK(0) = SC" - ACK_succ: "ACK(succ(i)) = PREC (CONST (ACK(i) ` [1]), - COMP(ACK(i), [PROJ(0)]))" + "ACK(0) = SC" + "ACK(succ(i)) = PREC (CONST (ACK(i) ` [1]), COMP(ACK(i), [PROJ(0)]))" syntax - ack :: "[i,i]=>i" - + ack :: "[i,i]=>i" translations - "ack(x,y)" == "ACK(x) ` [y]" - + "ack(x,y)" == "ACK(x) ` [y]" -(** Useful special cases of evaluation ***) +text {* + \medskip Useful special cases of evaluation. +*} lemma SC: "[| x \ nat; l \ list(nat) |] ==> SC ` (Cons(x,l)) = succ(x)" -by (simp add: SC_def) + by (simp add: SC_def) lemma CONST: "l \ list(nat) ==> CONST(k) ` l = k" -by (simp add: CONST_def) + by (simp add: CONST_def) lemma PROJ_0: "[| x \ nat; l \ list(nat) |] ==> PROJ(0) ` (Cons(x,l)) = x" -by (simp add: PROJ_def) + by (simp add: PROJ_def) lemma COMP_1: "l \ list(nat) ==> COMP(g,[f]) ` l = g` [f`l]" -by (simp add: COMP_def) + by (simp add: COMP_def) lemma PREC_0: "l \ list(nat) ==> PREC(f,g) ` (Cons(0,l)) = f`l" -by (simp add: PREC_def) + by (simp add: PREC_def) -lemma PREC_succ: - "[| x \ nat; l \ list(nat) |] - ==> PREC(f,g) ` (Cons(succ(x),l)) = - g ` Cons(PREC(f,g)`(Cons(x,l)), Cons(x,l))" -by (simp add: PREC_def) +lemma PREC_succ: + "[| x \ nat; l \ list(nat) |] + ==> PREC(f,g) ` (Cons(succ(x),l)) = + g ` Cons(PREC(f,g)`(Cons(x,l)), Cons(x,l))" + by (simp add: PREC_def) +subsection {* Inductive definition of the PR functions *} + consts - prim_rec :: "i" + prim_rec :: i inductive - domains "prim_rec" <= "list(nat)->nat" + domains prim_rec \ "list(nat)->nat" intros "SC \ prim_rec" "k \ nat ==> CONST(k) \ prim_rec" "i \ nat ==> PROJ(i) \ prim_rec" - "[| g \ prim_rec; fs\list(prim_rec) |] ==> COMP(g,fs): prim_rec" - "[| f \ prim_rec; g \ prim_rec |] ==> PREC(f,g): prim_rec" - monos list_mono - con_defs SC_def CONST_def PROJ_def COMP_def PREC_def - type_intros nat_typechecks list.intros - lam_type list_case_type drop_type List.map_type - apply_type rec_type + "[| g \ prim_rec; fs\list(prim_rec) |] ==> COMP(g,fs) \ prim_rec" + "[| f \ prim_rec; g \ prim_rec |] ==> PREC(f,g) \ prim_rec" + monos list_mono + con_defs SC_def CONST_def PROJ_def COMP_def PREC_def + type_intros nat_typechecks list.intros + lam_type list_case_type drop_type List.map_type + apply_type rec_type -(*** Inductive definition of the PR functions ***) +lemma prim_rec_into_fun [TC]: "c \ prim_rec ==> c \ list(nat) -> nat" + by (erule subsetD [OF prim_rec.dom_subset]) -(* c \ prim_rec ==> c \ list(nat) -> nat *) -lemmas prim_rec_into_fun [TC] = subsetD [OF prim_rec.dom_subset] lemmas [TC] = apply_type [OF prim_rec_into_fun] declare prim_rec.intros [TC] declare nat_into_Ord [TC] declare rec_type [TC] -lemma ACK_in_prim_rec [TC]: "i \ nat ==> ACK(i): prim_rec" -by (induct_tac "i", simp_all) - -lemma ack_type [TC]: "[| i \ nat; j \ nat |] ==> ack(i,j): nat" -by auto +lemma ACK_in_prim_rec [TC]: "i \ nat ==> ACK(i) \ prim_rec" + by (induct_tac i) simp_all -(** Ackermann's function cases **) +lemma ack_type [TC]: "[| i \ nat; j \ nat |] ==> ack(i,j) \ nat" + by auto -(*PROPERTY A 1*) -lemma ack_0: "j \ nat ==> ack(0,j) = succ(j)" -by (simp add: SC) + +subsection {* Ackermann's function cases *} -(*PROPERTY A 2*) +lemma ack_0: "j \ nat ==> ack(0,j) = succ(j)" + -- {* PROPERTY A 1 *} + by (simp add: SC) + lemma ack_succ_0: "ack(succ(i), 0) = ack(i,1)" -by (simp add: CONST PREC_0) + -- {* PROPERTY A 2 *} + by (simp add: CONST PREC_0) -(*PROPERTY A 3*) lemma ack_succ_succ: - "[| i\nat; j\nat |] ==> ack(succ(i), succ(j)) = ack(i, ack(succ(i), j))" -by (simp add: CONST PREC_succ COMP_1 PROJ_0) + "[| i\nat; j\nat |] ==> ack(succ(i), succ(j)) = ack(i, ack(succ(i), j))" + -- {* PROPERTY A 3 *} + by (simp add: CONST PREC_succ COMP_1 PROJ_0) -declare ack_0 [simp] ack_succ_0 [simp] ack_succ_succ [simp] ack_type [simp] -declare ACK_0 [simp del] ACK_succ [simp del] +lemmas [simp] = ack_0 ack_succ_0 ack_succ_succ ack_type + and [simp del] = ACK.simps -(*PROPERTY A 4*) lemma lt_ack2 [rule_format]: "i \ nat ==> \j \ nat. j < ack(i,j)" -apply (induct_tac "i") -apply simp -apply (rule ballI) -apply (induct_tac "j") -apply (erule_tac [2] succ_leI [THEN lt_trans1]) -apply (rule nat_0I [THEN nat_0_le, THEN lt_trans]) -apply auto -done + -- {* PROPERTY A 4 *} + apply (induct_tac i) + apply simp + apply (rule ballI) + apply (induct_tac j) + apply (erule_tac [2] succ_leI [THEN lt_trans1]) + apply (rule nat_0I [THEN nat_0_le, THEN lt_trans]) + apply auto + done -(*PROPERTY A 5-, the single-step lemma*) lemma ack_lt_ack_succ2: "[|i\nat; j\nat|] ==> ack(i,j) < ack(i, succ(j))" -by (induct_tac "i", simp_all add: lt_ack2) + -- {* PROPERTY A 5-, the single-step lemma *} + by (induct_tac i) (simp_all add: lt_ack2) -(*PROPERTY A 5, monotonicity for < *) lemma ack_lt_mono2: "[| j nat; k \ nat |] ==> ack(i,j) < ack(i,k)" -apply (frule lt_nat_in_nat , assumption) -apply (erule succ_lt_induct) -apply assumption -apply (rule_tac [2] lt_trans) -apply (auto intro: ack_lt_ack_succ2) -done + -- {* PROPERTY A 5, monotonicity for @{text "<"} *} + apply (frule lt_nat_in_nat, assumption) + apply (erule succ_lt_induct) + apply assumption + apply (rule_tac [2] lt_trans) + apply (auto intro: ack_lt_ack_succ2) + done -(*PROPERTY A 5', monotonicity for\*) lemma ack_le_mono2: "[|j\k; i\nat; k\nat|] ==> ack(i,j) \ ack(i,k)" -apply (rule_tac f = "%j. ack (i,j) " in Ord_lt_mono_imp_le_mono) -apply (assumption | rule ack_lt_mono2 ack_type [THEN nat_into_Ord])+; -done + -- {* PROPERTY A 5', monotonicity for @{text \} *} + apply (rule_tac f = "\j. ack (i,j) " in Ord_lt_mono_imp_le_mono) + apply (assumption | rule ack_lt_mono2 ack_type [THEN nat_into_Ord])+ + done -(*PROPERTY A 6*) lemma ack2_le_ack1: - "[| i\nat; j\nat |] ==> ack(i, succ(j)) \ ack(succ(i), j)" -apply (induct_tac "j") -apply (simp_all) -apply (rule ack_le_mono2) -apply (rule lt_ack2 [THEN succ_leI, THEN le_trans]) -apply auto -done + "[| i\nat; j\nat |] ==> ack(i, succ(j)) \ ack(succ(i), j)" + -- {* PROPERTY A 6 *} + apply (induct_tac j) + apply simp_all + apply (rule ack_le_mono2) + apply (rule lt_ack2 [THEN succ_leI, THEN le_trans]) + apply auto + done -(*PROPERTY A 7-, the single-step lemma*) lemma ack_lt_ack_succ1: "[| i \ nat; j \ nat |] ==> ack(i,j) < ack(succ(i),j)" -apply (rule ack_lt_mono2 [THEN lt_trans2]) -apply (rule_tac [4] ack2_le_ack1) -apply auto -done + -- {* PROPERTY A 7-, the single-step lemma *} + apply (rule ack_lt_mono2 [THEN lt_trans2]) + apply (rule_tac [4] ack2_le_ack1) + apply auto + done -(*PROPERTY A 7, monotonicity for < *) lemma ack_lt_mono1: "[| i nat; k \ nat |] ==> ack(i,k) < ack(j,k)" -apply (frule lt_nat_in_nat , assumption) -apply (erule succ_lt_induct) -apply assumption -apply (rule_tac [2] lt_trans) -apply (auto intro: ack_lt_ack_succ1) -done + -- {* PROPERTY A 7, monotonicity for @{text "<"} *} + apply (frule lt_nat_in_nat, assumption) + apply (erule succ_lt_induct) + apply assumption + apply (rule_tac [2] lt_trans) + apply (auto intro: ack_lt_ack_succ1) + done -(*PROPERTY A 7', monotonicity for\*) lemma ack_le_mono1: "[| i\j; j \ nat; k \ nat |] ==> ack(i,k) \ ack(j,k)" -apply (rule_tac f = "%j. ack (j,k) " in Ord_lt_mono_imp_le_mono) -apply (assumption | rule ack_lt_mono1 ack_type [THEN nat_into_Ord])+; -done + -- {* PROPERTY A 7', monotonicity for @{text \} *} + apply (rule_tac f = "\j. ack (j,k) " in Ord_lt_mono_imp_le_mono) + apply (assumption | rule ack_lt_mono1 ack_type [THEN nat_into_Ord])+ + done -(*PROPERTY A 8*) lemma ack_1: "j \ nat ==> ack(1,j) = succ(succ(j))" -by (induct_tac "j", simp_all) + -- {* PROPERTY A 8 *} + by (induct_tac j) simp_all -(*PROPERTY A 9*) lemma ack_2: "j \ nat ==> ack(succ(1),j) = succ(succ(succ(j#+j)))" -by (induct_tac "j", simp_all add: ack_1) + -- {* PROPERTY A 9 *} + by (induct_tac j) (simp_all add: ack_1) -(*PROPERTY A 10*) lemma ack_nest_bound: - "[| i1 \ nat; i2 \ nat; j \ nat |] - ==> ack(i1, ack(i2,j)) < ack(succ(succ(i1#+i2)), j)" -apply (rule lt_trans2 [OF _ ack2_le_ack1]) -apply simp -apply (rule add_le_self [THEN ack_le_mono1, THEN lt_trans1]) -apply auto -apply (force intro: add_le_self2 [THEN ack_lt_mono1, THEN ack_lt_mono2]) -done + "[| i1 \ nat; i2 \ nat; j \ nat |] + ==> ack(i1, ack(i2,j)) < ack(succ(succ(i1#+i2)), j)" + -- {* PROPERTY A 10 *} + apply (rule lt_trans2 [OF _ ack2_le_ack1]) + apply simp + apply (rule add_le_self [THEN ack_le_mono1, THEN lt_trans1]) + apply auto + apply (force intro: add_le_self2 [THEN ack_lt_mono1, THEN ack_lt_mono2]) + done -(*PROPERTY A 11*) lemma ack_add_bound: - "[| i1 \ nat; i2 \ nat; j \ nat |] - ==> ack(i1,j) #+ ack(i2,j) < ack(succ(succ(succ(succ(i1#+i2)))), j)" -apply (rule_tac j = "ack (succ (1) , ack (i1 #+ i2, j))" in lt_trans) -apply (simp add: ack_2) -apply (rule_tac [2] ack_nest_bound [THEN lt_trans2]) -apply (rule add_le_mono [THEN leI, THEN leI]) -apply (auto intro: add_le_self add_le_self2 ack_le_mono1) -done + "[| i1 \ nat; i2 \ nat; j \ nat |] + ==> ack(i1,j) #+ ack(i2,j) < ack(succ(succ(succ(succ(i1#+i2)))), j)" + -- {* PROPERTY A 11 *} + apply (rule_tac j = "ack (succ (1) , ack (i1 #+ i2, j))" in lt_trans) + apply (simp add: ack_2) + apply (rule_tac [2] ack_nest_bound [THEN lt_trans2]) + apply (rule add_le_mono [THEN leI, THEN leI]) + apply (auto intro: add_le_self add_le_self2 ack_le_mono1) + done -(*PROPERTY A 12. Article uses existential quantifier but the ALF proof - used k#+4. Quantified version must be nested \k'. \i,j... *) lemma ack_add_bound2: - "[| i < ack(k,j); j \ nat; k \ nat |] + "[| i < ack(k,j); j \ nat; k \ nat |] ==> i#+j < ack(succ(succ(succ(succ(k)))), j)" -apply (rule_tac j = "ack (k,j) #+ ack (0,j) " in lt_trans) -apply (rule_tac [2] ack_add_bound [THEN lt_trans2]) -apply (rule add_lt_mono) -apply auto -done + -- {* PROPERTY A 12. *} + -- {* Article uses existential quantifier but the ALF proof used @{term "k#+#4"}. *} + -- {* Quantified version must be nested @{text "\k'. \i,j \"}. *} + apply (rule_tac j = "ack (k,j) #+ ack (0,j) " in lt_trans) + apply (rule_tac [2] ack_add_bound [THEN lt_trans2]) + apply (rule add_lt_mono) + apply auto + done -(*** MAIN RESULT ***) + +subsection {* Main result *} declare list_add_type [simp] lemma SC_case: "l \ list(nat) ==> SC ` l < ack(1, list_add(l))" -apply (unfold SC_def) -apply (erule list.cases) -apply (simp add: succ_iff) -apply (simp add: ack_1 add_le_self) -done + apply (unfold SC_def) + apply (erule list.cases) + apply (simp add: succ_iff) + apply (simp add: ack_1 add_le_self) + done -(*PROPERTY A 4'? Extra lemma needed for CONST case, constant functions*) lemma lt_ack1: "[| i \ nat; j \ nat |] ==> i < ack(i,j)" -apply (induct_tac "i") -apply (simp add: nat_0_le) -apply (erule lt_trans1 [OF succ_leI ack_lt_ack_succ1]) -apply auto -done + -- {* PROPERTY A 4'? Extra lemma needed for @{text CONST} case, constant functions. *} + apply (induct_tac i) + apply (simp add: nat_0_le) + apply (erule lt_trans1 [OF succ_leI ack_lt_ack_succ1]) + apply auto + done -lemma CONST_case: +lemma CONST_case: "[| l \ list(nat); k \ nat |] ==> CONST(k) ` l < ack(k, list_add(l))" -by (simp add: CONST_def lt_ack1) + by (simp add: CONST_def lt_ack1) -lemma PROJ_case [rule_format]: +lemma PROJ_case [rule_format]: "l \ list(nat) ==> \i \ nat. PROJ(i) ` l < ack(0, list_add(l))" -apply (unfold PROJ_def) -apply simp -apply (erule list.induct) -apply (simp add: nat_0_le) -apply simp -apply (rule ballI) -apply (erule_tac n = "x" in natE) -apply (simp add: add_le_self) -apply simp -apply (erule bspec [THEN lt_trans2]) -apply (rule_tac [2] add_le_self2 [THEN succ_leI]) -apply auto -done + apply (unfold PROJ_def) + apply simp + apply (erule list.induct) + apply (simp add: nat_0_le) + apply simp + apply (rule ballI) + apply (erule_tac n = "x" in natE) + apply (simp add: add_le_self) + apply simp + apply (erule bspec [THEN lt_trans2]) + apply (rule_tac [2] add_le_self2 [THEN succ_leI]) + apply auto + done -(** COMP case **) +text {* + \medskip @{text COMP} case. +*} lemma COMP_map_lemma: - "fs \ list({f \ prim_rec . - \kf \ nat. \l \ list(nat). f`l < ack(kf, list_add(l))}) - ==> \k \ nat. \l \ list(nat). - list_add(map(%f. f ` l, fs)) < ack(k, list_add(l))" -apply (erule list.induct) -apply (rule_tac x = "0" in bexI) -apply (simp_all add: lt_ack1 nat_0_le) -apply clarify -apply (rule ballI [THEN bexI]) -apply (rule add_lt_mono [THEN lt_trans]) -apply (rule_tac [5] ack_add_bound) -apply blast -apply auto -done + "fs \ list({f \ prim_rec. \kf \ nat. \l \ list(nat). f`l < ack(kf, list_add(l))}) + ==> \k \ nat. \l \ list(nat). + list_add(map(\f. f ` l, fs)) < ack(k, list_add(l))" + apply (erule list.induct) + apply (rule_tac x = 0 in bexI) + apply (simp_all add: lt_ack1 nat_0_le) + apply clarify + apply (rule ballI [THEN bexI]) + apply (rule add_lt_mono [THEN lt_trans]) + apply (rule_tac [5] ack_add_bound) + apply blast + apply auto + done -lemma COMP_case: - "[| kg\nat; - \l \ list(nat). g`l < ack(kg, list_add(l)); - fs \ list({f \ prim_rec . - \kf \ nat. \l \ list(nat). +lemma COMP_case: + "[| kg\nat; + \l \ list(nat). g`l < ack(kg, list_add(l)); + fs \ list({f \ prim_rec . + \kf \ nat. \l \ list(nat). f`l < ack(kf, list_add(l))}) |] ==> \k \ nat. \l \ list(nat). COMP(g,fs)`l < ack(k, list_add(l))" -apply (simp add: COMP_def) -apply (frule list_CollectD) -apply (erule COMP_map_lemma [THEN bexE]) -apply (rule ballI [THEN bexI]) -apply (erule bspec [THEN lt_trans]) -apply (rule_tac [2] lt_trans) -apply (rule_tac [3] ack_nest_bound) -apply (erule_tac [2] bspec [THEN ack_lt_mono2]) -apply auto -done + apply (simp add: COMP_def) + apply (frule list_CollectD) + apply (erule COMP_map_lemma [THEN bexE]) + apply (rule ballI [THEN bexI]) + apply (erule bspec [THEN lt_trans]) + apply (rule_tac [2] lt_trans) + apply (rule_tac [3] ack_nest_bound) + apply (erule_tac [2] bspec [THEN ack_lt_mono2]) + apply auto + done -(** PREC case **) +text {* + \medskip @{text PREC} case. +*} -lemma PREC_case_lemma: - "[| \l \ list(nat). f`l #+ list_add(l) < ack(kf, list_add(l)); - \l \ list(nat). g`l #+ list_add(l) < ack(kg, list_add(l)); - f \ prim_rec; kf\nat; - g \ prim_rec; kg\nat; +lemma PREC_case_lemma: + "[| \l \ list(nat). f`l #+ list_add(l) < ack(kf, list_add(l)); + \l \ list(nat). g`l #+ list_add(l) < ack(kg, list_add(l)); + f \ prim_rec; kf\nat; + g \ prim_rec; kg\nat; l \ list(nat) |] ==> PREC(f,g)`l #+ list_add(l) < ack(succ(kf#+kg), list_add(l))" -apply (unfold PREC_def) -apply (erule list.cases) -apply (simp add: lt_trans [OF nat_le_refl lt_ack2]) -apply simp -apply (erule ssubst) (*get rid of the needless assumption*) -apply (induct_tac "a") -apply simp_all -(*base case*) -apply (rule lt_trans, erule bspec, assumption); -apply (simp add: add_le_self [THEN ack_lt_mono1]) -(*ind step*) -apply (rule succ_leI [THEN lt_trans1]) -apply (rule_tac j = "g ` ?ll #+ ?mm" in lt_trans1) -apply (erule_tac [2] bspec) -apply (rule nat_le_refl [THEN add_le_mono]) -apply typecheck -apply (simp add: add_le_self2) -(*final part of the simplification*) -apply simp -apply (rule add_le_self2 [THEN ack_le_mono1, THEN lt_trans1]) -apply (erule_tac [4] ack_lt_mono2) -apply auto -done + apply (unfold PREC_def) + apply (erule list.cases) + apply (simp add: lt_trans [OF nat_le_refl lt_ack2]) + apply simp + apply (erule ssubst) -- {* get rid of the needless assumption *} + apply (induct_tac a) + apply simp_all + txt {* base case *} + apply (rule lt_trans, erule bspec, assumption) + apply (simp add: add_le_self [THEN ack_lt_mono1]) + txt {* ind step *} + apply (rule succ_leI [THEN lt_trans1]) + apply (rule_tac j = "g ` ?ll #+ ?mm" in lt_trans1) + apply (erule_tac [2] bspec) + apply (rule nat_le_refl [THEN add_le_mono]) + apply typecheck + apply (simp add: add_le_self2) + txt {* final part of the simplification *} + apply simp + apply (rule add_le_self2 [THEN ack_le_mono1, THEN lt_trans1]) + apply (erule_tac [4] ack_lt_mono2) + apply auto + done lemma PREC_case: - "[| f \ prim_rec; kf\nat; - g \ prim_rec; kg\nat; - \l \ list(nat). f`l < ack(kf, list_add(l)); - \l \ list(nat). g`l < ack(kg, list_add(l)) |] - ==> \k \ nat. \l \ list(nat). PREC(f,g)`l< ack(k, list_add(l))" -apply (rule ballI [THEN bexI]) -apply (rule lt_trans1 [OF add_le_self PREC_case_lemma]) -apply typecheck -apply (blast intro: ack_add_bound2 list_add_type)+ -done + "[| f \ prim_rec; kf\nat; + g \ prim_rec; kg\nat; + \l \ list(nat). f`l < ack(kf, list_add(l)); + \l \ list(nat). g`l < ack(kg, list_add(l)) |] + ==> \k \ nat. \l \ list(nat). PREC(f,g)`l< ack(k, list_add(l))" + apply (rule ballI [THEN bexI]) + apply (rule lt_trans1 [OF add_le_self PREC_case_lemma]) + apply typecheck + apply (blast intro: ack_add_bound2 list_add_type)+ + done lemma ack_bounds_prim_rec: - "f \ prim_rec ==> \k \ nat. \l \ list(nat). f`l < ack(k, list_add(l))" -apply (erule prim_rec.induct) -apply (auto intro: SC_case CONST_case PROJ_case COMP_case PREC_case) -done + "f \ prim_rec ==> \k \ nat. \l \ list(nat). f`l < ack(k, list_add(l))" + apply (erule prim_rec.induct) + apply (auto intro: SC_case CONST_case PROJ_case COMP_case PREC_case) + done -lemma ack_not_prim_rec: - "~ (\l \ list(nat). list_case(0, %x xs. ack(x,x), l)) \ prim_rec" -apply (rule notI) -apply (drule ack_bounds_prim_rec) -apply force -done +theorem ack_not_prim_rec: + "(\l \ list(nat). list_case(0, \x xs. ack(x,x), l)) \ prim_rec" + apply (rule notI) + apply (drule ack_bounds_prim_rec) + apply force + done end - - - diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/PropLog.thy --- a/src/ZF/Induct/PropLog.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/PropLog.thy Sat Dec 29 18:36:12 2001 +0100 @@ -2,296 +2,339 @@ ID: $Id$ Author: Tobias Nipkow & Lawrence C Paulson Copyright 1993 University of Cambridge - -Datatype definition of propositional logic formulae and inductive definition -of the propositional tautologies. +*) -Inductive definition of propositional logic. -Soundness and completeness w.r.t. truth-tables. - -Prove: If H|=p then G|=p where G \ Fin(H) -*) +header {* Meta-theory of propositional logic *} theory PropLog = Main: -(** The datatype of propositions; note mixfix syntax **) -consts - propn :: "i" +text {* + Datatype definition of propositional logic formulae and inductive + definition of the propositional tautologies. + + Inductive definition of propositional logic. Soundness and + completeness w.r.t.\ truth-tables. -datatype - "propn" = Fls - | Var ("n \ nat") ("#_" [100] 100) - | "=>" ("p \ propn", "q \ propn") (infixr 90) + Prove: If @{text "H |= p"} then @{text "G |= p"} where @{text "G \ + Fin(H)"} +*} + + +subsection {* The datatype of propositions *} -(** The proof system **) consts - thms :: "i => i" + propn :: i -syntax - "|-" :: "[i,i] => o" (infixl 50) +datatype propn = + Fls + | Var ("n \ nat") ("#_" [100] 100) + | Imp ("p \ propn", "q \ propn") (infixr "=>" 90) + -translations - "H |- p" == "p \ thms(H)" +subsection {* The proof system *} + +consts thms :: "i => i" +syntax "_thms" :: "[i,i] => o" (infixl "|-" 50) +translations "H |- p" == "p \ thms(H)" inductive - domains "thms(H)" <= "propn" + domains "thms(H)" \ "propn" intros H: "[| p \ H; p \ propn |] ==> H |- p" K: "[| p \ propn; q \ propn |] ==> H |- p=>q=>p" - S: "[| p \ propn; q \ propn; r \ propn |] + S: "[| p \ propn; q \ propn; r \ propn |] ==> H |- (p=>q=>r) => (p=>q) => p=>r" DN: "p \ propn ==> H |- ((p=>Fls) => Fls) => p" MP: "[| H |- p=>q; H |- p; p \ propn; q \ propn |] ==> H |- q" type_intros "propn.intros" - -(** The semantics **) -consts - "|=" :: "[i,i] => o" (infixl 50) - hyps :: "[i,i] => i" - is_true_fun :: "[i,i] => i" - -constdefs (*this definitionis necessary since predicates can't be recursive*) - is_true :: "[i,i] => o" - "is_true(p,t) == is_true_fun(p,t)=1" - -defs - (*Logical consequence: for every valuation, if all elements of H are true - then so is p*) - logcon_def: "H |= p == \t. (\q \ H. is_true(q,t)) --> is_true(p,t)" - -primrec (** A finite set of hypotheses from t and the Vars in p **) - "hyps(Fls, t) = 0" - "hyps(Var(v), t) = (if v \ t then {#v} else {#v=>Fls})" - "hyps(p=>q, t) = hyps(p,t) Un hyps(q,t)" - -primrec (** Semantics of propositional logic **) - "is_true_fun(Fls, t) = 0" - "is_true_fun(Var(v), t) = (if v \ t then 1 else 0)" - "is_true_fun(p=>q, t) = (if is_true_fun(p,t)=1 then is_true_fun(q,t) - else 1)" - declare propn.intros [simp] -(*** Semantics of propositional logic ***) + +subsection {* The semantics *} + +subsubsection {* Semantics of propositional logic. *} -(** The function is_true **) +consts + is_true_fun :: "[i,i] => i" +primrec + "is_true_fun(Fls, t) = 0" + "is_true_fun(Var(v), t) = (if v \ t then 1 else 0)" + "is_true_fun(p=>q, t) = (if is_true_fun(p,t) = 1 then is_true_fun(q,t) else 1)" + +constdefs + is_true :: "[i,i] => o" + "is_true(p,t) == is_true_fun(p,t) = 1" + -- {* this definition is required since predicates can't be recursive *} lemma is_true_Fls [simp]: "is_true(Fls,t) <-> False" -by (simp add: is_true_def) + by (simp add: is_true_def) lemma is_true_Var [simp]: "is_true(#v,t) <-> v \ t" -by (simp add: is_true_def) + by (simp add: is_true_def) lemma is_true_Imp [simp]: "is_true(p=>q,t) <-> (is_true(p,t)-->is_true(q,t))" -by (simp add: is_true_def) + by (simp add: is_true_def) + + +subsubsection {* Logical consequence *} + +text {* + For every valuation, if all elements of @{text H} are true then so + is @{text p}. +*} + +constdefs + logcon :: "[i,i] => o" (infixl "|=" 50) + "H |= p == \t. (\q \ H. is_true(q,t)) --> is_true(p,t)" -(*** Proof theory of propositional logic ***) +text {* + A finite set of hypotheses from @{text t} and the @{text Var}s in + @{text p}. +*} + +consts + hyps :: "[i,i] => i" +primrec + "hyps(Fls, t) = 0" + "hyps(Var(v), t) = (if v \ t then {#v} else {#v=>Fls})" + "hyps(p=>q, t) = hyps(p,t) \ hyps(q,t)" + + + +subsection {* Proof theory of propositional logic *} lemma thms_mono: "G \ H ==> thms(G) \ thms(H)" -apply (unfold thms.defs ) -apply (rule lfp_mono) -apply (rule thms.bnd_mono)+ -apply (assumption | rule univ_mono basic_monos)+ -done + apply (unfold thms.defs) + apply (rule lfp_mono) + apply (rule thms.bnd_mono)+ + apply (assumption | rule univ_mono basic_monos)+ + done lemmas thms_in_pl = thms.dom_subset [THEN subsetD] inductive_cases ImpE: "p=>q \ propn" -(*Stronger Modus Ponens rule: no typechecking!*) lemma thms_MP: "[| H |- p=>q; H |- p |] ==> H |- q" -apply (rule thms.MP) -apply (erule asm_rl thms_in_pl thms_in_pl [THEN ImpE])+ -done + -- {* Stronger Modus Ponens rule: no typechecking! *} + apply (rule thms.MP) + apply (erule asm_rl thms_in_pl thms_in_pl [THEN ImpE])+ + done -(*Rule is called I for Identity Combinator, not for Introduction*) lemma thms_I: "p \ propn ==> H |- p=>p" -apply (rule thms.S [THEN thms_MP, THEN thms_MP]) -apply (rule_tac [5] thms.K) -apply (rule_tac [4] thms.K) -apply (simp_all add: propn.intros ) -done + -- {*Rule is called @{text I} for Identity Combinator, not for Introduction. *} + apply (rule thms.S [THEN thms_MP, THEN thms_MP]) + apply (rule_tac [5] thms.K) + apply (rule_tac [4] thms.K) + apply simp_all + done + -(** Weakening, left and right **) +subsubsection {* Weakening, left and right *} -(* [| G \ H; G|-p |] ==> H|-p Order of premises is convenient with RS*) -lemmas weaken_left = thms_mono [THEN subsetD, standard] +lemma weaken_left: "[| G \ H; G|-p |] ==> H|-p" + -- {* Order of premises is convenient with @{text THEN} *} + by (erule thms_mono [THEN subsetD]) -(* H |- p ==> cons(a,H) |- p *) -lemmas weaken_left_cons = subset_consI [THEN weaken_left] +lemma weaken_left_cons: "H |- p ==> cons(a,H) |- p" + by (erule subset_consI [THEN weaken_left]) lemmas weaken_left_Un1 = Un_upper1 [THEN weaken_left] lemmas weaken_left_Un2 = Un_upper2 [THEN weaken_left] lemma weaken_right: "[| H |- q; p \ propn |] ==> H |- p=>q" -by (simp_all add: thms.K [THEN thms_MP] thms_in_pl) + by (simp_all add: thms.K [THEN thms_MP] thms_in_pl) -(*The deduction theorem*) -lemma deduction: "[| cons(p,H) |- q; p \ propn |] ==> H |- p=>q" -apply (erule thms.induct) - apply (blast intro: thms_I thms.H [THEN weaken_right]) - apply (blast intro: thms.K [THEN weaken_right]) - apply (blast intro: thms.S [THEN weaken_right]) - apply (blast intro: thms.DN [THEN weaken_right]) -apply (blast intro: thms.S [THEN thms_MP [THEN thms_MP]]) -done +subsubsection {* The deduction theorem *} + +theorem deduction: "[| cons(p,H) |- q; p \ propn |] ==> H |- p=>q" + apply (erule thms.induct) + apply (blast intro: thms_I thms.H [THEN weaken_right]) + apply (blast intro: thms.K [THEN weaken_right]) + apply (blast intro: thms.S [THEN weaken_right]) + apply (blast intro: thms.DN [THEN weaken_right]) + apply (blast intro: thms.S [THEN thms_MP [THEN thms_MP]]) + done -(*The cut rule*) +subsubsection {* The cut rule *} + lemma cut: "[| H|-p; cons(p,H) |- q |] ==> H |- q" -apply (rule deduction [THEN thms_MP]) -apply (simp_all add: thms_in_pl) -done + apply (rule deduction [THEN thms_MP]) + apply (simp_all add: thms_in_pl) + done lemma thms_FlsE: "[| H |- Fls; p \ propn |] ==> H |- p" -apply (rule thms.DN [THEN thms_MP]) -apply (rule_tac [2] weaken_right) -apply (simp_all add: propn.intros) -done + apply (rule thms.DN [THEN thms_MP]) + apply (rule_tac [2] weaken_right) + apply (simp_all add: propn.intros) + done -(* [| H |- p=>Fls; H |- p; q \ propn |] ==> H |- q *) -lemmas thms_notE = thms_MP [THEN thms_FlsE, standard] +lemma thms_notE: "[| H |- p=>Fls; H |- p; q \ propn |] ==> H |- q" + by (erule thms_MP [THEN thms_FlsE]) + + +subsubsection {* Soundness of the rules wrt truth-table semantics *} -(*Soundness of the rules wrt truth-table semantics*) -lemma soundness: "H |- p ==> H |= p" -apply (unfold logcon_def) -apply (erule thms.induct) -apply auto -done +theorem soundness: "H |- p ==> H |= p" + apply (unfold logcon_def) + apply (erule thms.induct) + apply auto + done -(*** Towards the completeness proof ***) + +subsection {* Completeness *} + +subsubsection {* Towards the completeness proof *} lemma Fls_Imp: "[| H |- p=>Fls; q \ propn |] ==> H |- p=>q" -apply (frule thms_in_pl) -apply (rule deduction) -apply (rule weaken_left_cons [THEN thms_notE]) -apply (blast intro: thms.H elim: ImpE)+ -done + apply (frule thms_in_pl) + apply (rule deduction) + apply (rule weaken_left_cons [THEN thms_notE]) + apply (blast intro: thms.H elim: ImpE)+ + done lemma Imp_Fls: "[| H |- p; H |- q=>Fls |] ==> H |- (p=>q)=>Fls" -apply (frule thms_in_pl) -apply (frule thms_in_pl [of concl: "q=>Fls"]) -apply (rule deduction) -apply (erule weaken_left_cons [THEN thms_MP]) -apply (rule consI1 [THEN thms.H, THEN thms_MP]) -apply (blast intro: weaken_left_cons elim: ImpE)+ -done + apply (frule thms_in_pl) + apply (frule thms_in_pl [of concl: "q=>Fls"]) + apply (rule deduction) + apply (erule weaken_left_cons [THEN thms_MP]) + apply (rule consI1 [THEN thms.H, THEN thms_MP]) + apply (blast intro: weaken_left_cons elim: ImpE)+ + done -(*Typical example of strengthening the induction formula*) lemma hyps_thms_if: - "p \ propn ==> hyps(p,t) |- (if is_true(p,t) then p else p=>Fls)" -apply (simp (no_asm)) -apply (induct_tac "p") -apply (simp_all (no_asm_simp) add: thms_I thms.H) -apply (safe elim!: Fls_Imp [THEN weaken_left_Un1] - Fls_Imp [THEN weaken_left_Un2]) -apply (blast intro: weaken_left_Un1 weaken_left_Un2 weaken_right Imp_Fls)+ -done + "p \ propn ==> hyps(p,t) |- (if is_true(p,t) then p else p=>Fls)" + -- {* Typical example of strengthening the induction statement. *} + apply simp + apply (induct_tac p) + apply (simp_all add: thms_I thms.H) + apply (safe elim!: Fls_Imp [THEN weaken_left_Un1] Fls_Imp [THEN weaken_left_Un2]) + apply (blast intro: weaken_left_Un1 weaken_left_Un2 weaken_right Imp_Fls)+ + done + +lemma logcon_thms_p: "[| p \ propn; 0 |= p |] ==> hyps(p,t) |- p" + -- {* Key lemma for completeness; yields a set of assumptions satisfying @{text p} *} + apply (drule hyps_thms_if) + apply (simp add: logcon_def) + done + +text {* + For proving certain theorems in our new propositional logic. +*} -(*Key lemma for completeness; yields a set of assumptions satisfying p*) -lemma logcon_thms_p: "[| p \ propn; 0 |= p |] ==> hyps(p,t) |- p" -apply (unfold logcon_def) -apply (drule hyps_thms_if) -apply simp -done +lemmas propn_SIs = propn.intros deduction + and propn_Is = thms_in_pl thms.H thms.H [THEN thms_MP] + +text {* + The excluded middle in the form of an elimination rule. +*} +lemma thms_excluded_middle: + "[| p \ propn; q \ propn |] ==> H |- (p=>q) => ((p=>Fls)=>q) => q" + apply (rule deduction [THEN deduction]) + apply (rule thms.DN [THEN thms_MP]) + apply (best intro!: propn_SIs intro: propn_Is)+ + done -(*For proving certain theorems in our new propositional logic*) -lemmas propn_SIs = propn.intros deduction -lemmas propn_Is = thms_in_pl thms.H thms.H [THEN thms_MP] +lemma thms_excluded_middle_rule: + "[| cons(p,H) |- q; cons(p=>Fls,H) |- q; p \ propn |] ==> H |- q" + -- {* Hard to prove directly because it requires cuts *} + apply (rule thms_excluded_middle [THEN thms_MP, THEN thms_MP]) + apply (blast intro!: propn_SIs intro: propn_Is)+ + done -(*The excluded middle in the form of an elimination rule*) -lemma thms_excluded_middle: - "[| p \ propn; q \ propn |] ==> H |- (p=>q) => ((p=>Fls)=>q) => q" -apply (rule deduction [THEN deduction]) -apply (rule thms.DN [THEN thms_MP]) -apply (best intro!: propn_SIs intro: propn_Is)+ -done +subsubsection {* Completeness -- lemmas for reducing the set of assumptions *} -(*Hard to prove directly because it requires cuts*) -lemma thms_excluded_middle_rule: - "[| cons(p,H) |- q; cons(p=>Fls,H) |- q; p \ propn |] ==> H |- q" -apply (rule thms_excluded_middle [THEN thms_MP, THEN thms_MP]) -apply (blast intro!: propn_SIs intro: propn_Is)+ -done +text {* + For the case @{prop "hyps(p,t)-cons(#v,Y) |- p"} we also have @{prop + "hyps(p,t)-{#v} \ hyps(p, t-{v})"}. +*} +lemma hyps_Diff: + "p \ propn ==> hyps(p, t-{v}) \ cons(#v=>Fls, hyps(p,t)-{#v})" + by (induct_tac p) auto -(*** Completeness -- lemmas for reducing the set of assumptions ***) - -(*For the case hyps(p,t)-cons(#v,Y) |- p - we also have hyps(p,t)-{#v} \ hyps(p, t-{v}) *) -lemma hyps_Diff: - "p \ propn ==> hyps(p, t-{v}) \ cons(#v=>Fls, hyps(p,t)-{#v})" -by (induct_tac "p", auto) +text {* + For the case @{prop "hyps(p,t)-cons(#v => Fls,Y) |- p"} we also have + @{prop "hyps(p,t)-{#v=>Fls} \ hyps(p, cons(v,t))"}. +*} -(*For the case hyps(p,t)-cons(#v => Fls,Y) |- p - we also have hyps(p,t)-{#v=>Fls} \ hyps(p, cons(v,t)) *) lemma hyps_cons: - "p \ propn ==> hyps(p, cons(v,t)) \ cons(#v, hyps(p,t)-{#v=>Fls})" -by (induct_tac "p", auto) + "p \ propn ==> hyps(p, cons(v,t)) \ cons(#v, hyps(p,t)-{#v=>Fls})" + by (induct_tac p) auto -(** Two lemmas for use with weaken_left **) +text {* Two lemmas for use with @{text weaken_left} *} lemma cons_Diff_same: "B-C \ cons(a, B-cons(a,C))" -by blast + by blast lemma cons_Diff_subset2: "cons(a, B-{c}) - D \ cons(a, B-cons(c,D))" -by blast + by blast -(*The set hyps(p,t) is finite, and elements have the form #v or #v=>Fls - could probably prove the stronger hyps(p,t) \ Fin(hyps(p,0) Un hyps(p,nat))*) +text {* + The set @{term "hyps(p,t)"} is finite, and elements have the form + @{term "#v"} or @{term "#v=>Fls"}; could probably prove the stronger + @{prop "hyps(p,t) \ Fin(hyps(p,0) \ hyps(p,nat))"}. +*} + lemma hyps_finite: "p \ propn ==> hyps(p,t) \ Fin(\v \ nat. {#v, #v=>Fls})" -by (induct_tac "p", auto) + by (induct_tac p) auto lemmas Diff_weaken_left = Diff_mono [OF _ subset_refl, THEN weaken_left] -(*Induction on the finite set of assumptions hyps(p,t0). - We may repeatedly subtract assumptions until none are left!*) +text {* + Induction on the finite set of assumptions @{term "hyps(p,t0)"}. We + may repeatedly subtract assumptions until none are left! +*} + lemma completeness_0_lemma [rule_format]: "[| p \ propn; 0 |= p |] ==> \t. hyps(p,t) - hyps(p,t0) |- p" -apply (frule hyps_finite) -apply (erule Fin_induct) -apply (simp add: logcon_thms_p Diff_0) -(*inductive step*) -apply safe -(*Case hyps(p,t)-cons(#v,Y) |- p *) - apply (rule thms_excluded_middle_rule) - apply (erule_tac [3] propn.intros) + apply (frule hyps_finite) + apply (erule Fin_induct) + apply (simp add: logcon_thms_p Diff_0) + txt {* inductive step *} + apply safe + txt {* Case @{prop "hyps(p,t)-cons(#v,Y) |- p"} *} + apply (rule thms_excluded_middle_rule) + apply (erule_tac [3] propn.intros) + apply (blast intro: cons_Diff_same [THEN weaken_left]) + apply (blast intro: cons_Diff_subset2 [THEN weaken_left] + hyps_Diff [THEN Diff_weaken_left]) + txt {* Case @{prop "hyps(p,t)-cons(#v => Fls,Y) |- p"} *} + apply (rule thms_excluded_middle_rule) + apply (erule_tac [3] propn.intros) + apply (blast intro: cons_Diff_subset2 [THEN weaken_left] + hyps_cons [THEN Diff_weaken_left]) apply (blast intro: cons_Diff_same [THEN weaken_left]) - apply (blast intro: cons_Diff_subset2 [THEN weaken_left] - hyps_Diff [THEN Diff_weaken_left]) -(*Case hyps(p,t)-cons(#v => Fls,Y) |- p *) -apply (rule thms_excluded_middle_rule) - apply (erule_tac [3] propn.intros) - apply (blast intro: cons_Diff_subset2 [THEN weaken_left] - hyps_cons [THEN Diff_weaken_left]) -apply (blast intro: cons_Diff_same [THEN weaken_left]) -done + done + + +subsubsection {* Completeness theorem *} -(*The base case for completeness*) lemma completeness_0: "[| p \ propn; 0 |= p |] ==> 0 |- p" -apply (rule Diff_cancel [THEN subst]) -apply (blast intro: completeness_0_lemma) -done + -- {* The base case for completeness *} + apply (rule Diff_cancel [THEN subst]) + apply (blast intro: completeness_0_lemma) + done -(*A semantic analogue of the Deduction Theorem*) lemma logcon_Imp: "[| cons(p,H) |= q |] ==> H |= p=>q" -by (simp add: logcon_def) + -- {* A semantic analogue of the Deduction Theorem *} + by (simp add: logcon_def) lemma completeness [rule_format]: "H \ Fin(propn) ==> \p \ propn. H |= p --> H |- p" -apply (erule Fin_induct) -apply (safe intro!: completeness_0) -apply (rule weaken_left_cons [THEN thms_MP]) - apply (blast intro!: logcon_Imp propn.intros) -apply (blast intro: propn_Is) -done + apply (erule Fin_induct) + apply (safe intro!: completeness_0) + apply (rule weaken_left_cons [THEN thms_MP]) + apply (blast intro!: logcon_Imp propn.intros) + apply (blast intro: propn_Is) + done -lemma thms_iff: "H \ Fin(propn) ==> H |- p <-> H |= p & p \ propn" -by (blast intro: soundness completeness thms_in_pl) +theorem thms_iff: "H \ Fin(propn) ==> H |- p <-> H |= p \ p \ propn" + by (blast intro: soundness completeness thms_in_pl) end diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Rmap.thy --- a/src/ZF/Induct/Rmap.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Rmap.thy Sat Dec 29 18:36:12 2001 +0100 @@ -1,10 +1,10 @@ -(* Title: ZF/ex/Rmap +(* Title: ZF/Induct/Rmap.thy ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1994 University of Cambridge +*) -Inductive definition of an operator to "map" a relation over a list -*) +header {* An operator to ``map'' a relation over a list *} theory Rmap = Main: @@ -12,60 +12,60 @@ rmap :: "i=>i" inductive - domains "rmap(r)" <= "list(domain(r))*list(range(r))" + domains "rmap(r)" \ "list(domain(r)) \ list(range(r))" intros NilI: " \ rmap(r)" - ConsI: "[| : r; \ rmap(r) |] + ConsI: "[| : r; \ rmap(r) |] ==> \ rmap(r)" type_intros domainI rangeI list.intros lemma rmap_mono: "r \ s ==> rmap(r) \ rmap(s)" -apply (unfold rmap.defs ) -apply (rule lfp_mono) -apply (rule rmap.bnd_mono)+ -apply (assumption | - rule Sigma_mono list_mono domain_mono range_mono basic_monos)+ -done + apply (unfold rmap.defs) + apply (rule lfp_mono) + apply (rule rmap.bnd_mono)+ + apply (assumption | rule Sigma_mono list_mono domain_mono range_mono basic_monos)+ + done -inductive_cases Nil_rmap_case [elim!]: " \ rmap(r)" -inductive_cases Cons_rmap_case [elim!]: " \ rmap(r)" +inductive_cases + Nil_rmap_case [elim!]: " \ rmap(r)" + and Cons_rmap_case [elim!]: " \ rmap(r)" declare rmap.intros [intro] -lemma rmap_rel_type: "r \ A*B ==> rmap(r) \ list(A)*list(B)" -apply (rule rmap.dom_subset [THEN subset_trans]) -apply (assumption | - rule domain_rel_subset range_rel_subset Sigma_mono list_mono)+ -done +lemma rmap_rel_type: "r \ A \ B ==> rmap(r) \ list(A) \ list(B)" + apply (rule rmap.dom_subset [THEN subset_trans]) + apply (assumption | + rule domain_rel_subset range_rel_subset Sigma_mono list_mono)+ + done lemma rmap_total: "A \ domain(r) ==> list(A) \ domain(rmap(r))" -apply (rule subsetI) -apply (erule list.induct) -apply blast+ -done + apply (rule subsetI) + apply (erule list.induct) + apply blast+ + done lemma rmap_functional: "function(r) ==> function(rmap(r))" -apply (unfold function_def) -apply (rule impI [THEN allI, THEN allI]) -apply (erule rmap.induct) -apply blast+ -done + apply (unfold function_def) + apply (rule impI [THEN allI, THEN allI]) + apply (erule rmap.induct) + apply blast+ + done - -(** If f is a function then rmap(f) behaves as expected. **) +text {* + \medskip If @{text f} is a function then @{text "rmap(f)"} behaves + as expected. +*} lemma rmap_fun_type: "f \ A->B ==> rmap(f): list(A)->list(B)" -by (simp add: Pi_iff rmap_rel_type rmap_functional rmap_total) + by (simp add: Pi_iff rmap_rel_type rmap_functional rmap_total) lemma rmap_Nil: "rmap(f)`Nil = Nil" -by (unfold apply_def, blast) + by (unfold apply_def) blast -lemma rmap_Cons: "[| f \ A->B; x \ A; xs: list(A) |] +lemma rmap_Cons: "[| f \ A->B; x \ A; xs: list(A) |] ==> rmap(f) ` Cons(x,xs) = Cons(f`x, rmap(f)`xs)" -by (blast intro: apply_equality apply_Pair rmap_fun_type rmap.intros) - + by (blast intro: apply_equality apply_Pair rmap_fun_type rmap.intros) end - diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Term.thy --- a/src/ZF/Induct/Term.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Term.thy Sat Dec 29 18:36:12 2001 +0100 @@ -167,7 +167,8 @@ *} lemma term_map [simp]: - "ts \ list(A) ==> term_map(f, Apply(a, ts)) = Apply(f(a), map(term_map(f), ts))" + "ts \ list(A) ==> + term_map(f, Apply(a, ts)) = Apply(f(a), map(term_map(f), ts))" by (rule term_map_def [THEN def_term_rec]) lemma term_map_type [TC]: @@ -289,7 +290,8 @@ \medskip Theorems about preorder. *} -lemma preorder_term_map: "t \ term(A) ==> preorder(term_map(f,t)) = map(f, preorder(t))" +lemma preorder_term_map: + "t \ term(A) ==> preorder(term_map(f,t)) = map(f, preorder(t))" apply (erule term_induct_eqn) apply (simp add: map_flat) done diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/Tree_Forest.thy --- a/src/ZF/Induct/Tree_Forest.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/Tree_Forest.thy Sat Dec 29 18:36:12 2001 +0100 @@ -54,7 +54,8 @@ done lemma (notes rews = tree_forest.con_defs tree_def forest_def) - tree_forest_unfold: "tree_forest(A) = (A \ forest(A)) + ({0} + tree(A) \ forest(A))" + tree_forest_unfold: "tree_forest(A) = + (A \ forest(A)) + ({0} + tree(A) \ forest(A))" -- {* NOT useful, but interesting \dots *} apply (unfold tree_def forest_def) apply (fast intro!: tree_forest.intros [unfolded rews, THEN PartD1] @@ -206,7 +207,8 @@ !!t f. [| t \ tree(A); f \ forest(A); R(f) |] ==> R(Fcons(t,f)) |] ==> R(f)" -- {* Essentially the same as list induction. *} - apply (erule tree_forest.mutual_induct [THEN conjunct2, THEN spec, THEN [2] rev_mp]) + apply (erule tree_forest.mutual_induct + [THEN conjunct2, THEN spec, THEN [2] rev_mp]) apply (rule TrueI) apply simp apply simp diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Induct/document/root.tex --- a/src/ZF/Induct/document/root.tex Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Induct/document/root.tex Sat Dec 29 18:36:12 2001 +0100 @@ -2,11 +2,8 @@ \documentclass[11pt,a4paper]{article} \usepackage{isabelle,isabellesym} \usepackage[latin1]{inputenc} - -% this should be the last package used \usepackage{pdfsetup} -% proper setup for best-style documents \urlstyle{rm} \isabellestyle{it} @@ -22,4 +19,7 @@ \parindent 0pt\parskip 0.5ex \input{session} +\bibliographystyle{abbrv} +\bibliography{root} + \end{document} diff -r fb073a34b537 -r 8b9845807f77 src/ZF/IsaMakefile --- a/src/ZF/IsaMakefile Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/IsaMakefile Sat Dec 29 18:36:12 2001 +0100 @@ -84,8 +84,8 @@ ZF-IMP: ZF $(LOG)/ZF-IMP.gz -$(LOG)/ZF-IMP.gz: $(OUT)/ZF IMP/Com.thy \ - IMP/Denotation.thy IMP/Equiv.thy IMP/ROOT.ML +$(LOG)/ZF-IMP.gz: $(OUT)/ZF IMP/Com.thy IMP/Denotation.thy IMP/Equiv.thy \ + IMP/ROOT.ML IMP/document/root.bib IMP/document/root.tex @$(ISATOOL) usedir $(OUT)/ZF IMP diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Resid/Redex.thy --- a/src/ZF/Resid/Redex.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Resid/Redex.thy Sat Dec 29 18:36:12 2001 +0100 @@ -125,16 +125,17 @@ declare Scomp.intros [intro] -inductive_cases [elim!]: "regular(App(b,f,a))" -inductive_cases [elim!]: "regular(Fun(b))" -inductive_cases [elim!]: "regular(Var(b))" -inductive_cases [elim!]: "Fun(u) ~ Fun(t)" -inductive_cases [elim!]: "u ~ Fun(t)" -inductive_cases [elim!]: "u ~ Var(n)" -inductive_cases [elim!]: "u ~ App(b,t,a)" -inductive_cases [elim!]: "Fun(t) ~ v" -inductive_cases [elim!]: "App(b,f,a) ~ v" -inductive_cases [elim!]: "Var(n) ~ u" +inductive_cases [elim!]: + "regular(App(b,f,a))" + "regular(Fun(b))" + "regular(Var(b))" + "Fun(u) ~ Fun(t)" + "u ~ Fun(t)" + "u ~ Var(n)" + "u ~ App(b,t,a)" + "Fun(t) ~ v" + "App(b,f,a) ~ v" + "Var(n) ~ u" diff -r fb073a34b537 -r 8b9845807f77 src/ZF/Resid/Residuals.thy --- a/src/ZF/Resid/Residuals.thy Sat Dec 29 18:35:27 2001 +0100 +++ b/src/ZF/Resid/Residuals.thy Sat Dec 29 18:36:12 2001 +0100 @@ -42,25 +42,28 @@ declare Sreg.intros [intro] declare subst_type [intro] -inductive_cases [elim!]: "residuals(Var(n),Var(n),v)" -inductive_cases [elim!]: "residuals(Fun(t),Fun(u),v)" -inductive_cases [elim!]: "residuals(App(b, u1, u2), App(0, v1, v2),v)" -inductive_cases [elim!]: "residuals(App(b, u1, u2), App(1, Fun(v1), v2),v)" -inductive_cases [elim!]: "residuals(Var(n),u,v)" -inductive_cases [elim!]: "residuals(Fun(t),u,v)" -inductive_cases [elim!]: "residuals(App(b, u1, u2), w,v)" -inductive_cases [elim!]: "residuals(u,Var(n),v)" -inductive_cases [elim!]: "residuals(u,Fun(t),v)" -inductive_cases [elim!]: "residuals(w,App(b, u1, u2),v)" +inductive_cases [elim!]: + "residuals(Var(n),Var(n),v)" + "residuals(Fun(t),Fun(u),v)" + "residuals(App(b, u1, u2), App(0, v1, v2),v)" + "residuals(App(b, u1, u2), App(1, Fun(v1), v2),v)" + "residuals(Var(n),u,v)" + "residuals(Fun(t),u,v)" + "residuals(App(b, u1, u2), w,v)" + "residuals(u,Var(n),v)" + "residuals(u,Fun(t),v)" + "residuals(w,App(b, u1, u2),v)" -inductive_cases [elim!]: "Var(n) <== u" -inductive_cases [elim!]: "Fun(n) <== u" -inductive_cases [elim!]: "u <== Fun(n)" -inductive_cases [elim!]: "App(1,Fun(t),a) <== u" -inductive_cases [elim!]: "App(0,t,a) <== u" +inductive_cases [elim!]: + "Var(n) <== u" + "Fun(n) <== u" + "u <== Fun(n)" + "App(1,Fun(t),a) <== u" + "App(0,t,a) <== u" -inductive_cases [elim!]: "Fun(t):redexes" +inductive_cases [elim!]: + "Fun(t) \ redexes" declare Sres.intros [simp]