# HG changeset patch # User urbanc # Date 1199830268 -3600 # Node ID c24395ea4e71e0b99b0b418f2d3d84d476c3dc81 # Parent 263aaf988d444c28711be681ad4c0fcba5811552 tuned proofs diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/CR_Takahashi.thy --- a/src/HOL/Nominal/Examples/CR_Takahashi.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/CR_Takahashi.thy Tue Jan 08 23:11:08 2008 +0100 @@ -10,7 +10,7 @@ (* Church-Rosser Theorem (1995). *) theory CR_Takahashi -imports "../Nominal" + imports "../Nominal" begin atom_decl name @@ -63,12 +63,11 @@ using a by (nominal_induct t avoiding: x y s u rule: lam.induct) (auto simp add: fresh_fact forget) - lemma subst_rename: assumes a: "y\t" shows "t[x::=s] = ([(y,x)]\t)[y::=s]" using a by (nominal_induct t avoiding: x y s rule: lam.induct) - (auto simp add: calc_atm fresh_atm abs_fresh) + (auto simp add: calc_atm fresh_atm abs_fresh) section {* Beta-Reduction *} diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/Contexts.thy --- a/src/HOL/Nominal/Examples/Contexts.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/Contexts.thy Tue Jan 08 23:11:08 2008 +0100 @@ -10,7 +10,7 @@ reduction relations (based on congruence rules) is equivalent to the Felleisen-Hieb-style representation (based on contexts), and show cbv-evaluation via a - list-machine described by Roshan James. + CK-machine described by Roshan James. The interesting point is that contexts do not contain any binders. On the other hand the operation of filling @@ -20,15 +20,17 @@ atom_decl name -text {* Terms *} +text {* + Lambda-Terms - the Lam-term constructor binds a name +*} nominal_datatype lam = Var "name" | App "lam" "lam" | Lam "\name\lam" ("Lam [_]._" [100,100] 100) text {* - Contexts - the lambda case in contexts does not bind a name - even if we introduce the nomtation [_]._ for CLam. + Contexts - the lambda case in contexts does not bind a name, + even if we introduce the notation [_]._ for CLam. *} nominal_datatype ctx = Hole ("\" 1000) @@ -36,7 +38,7 @@ | CAppR "lam" "ctx" | CLam "name" "ctx" ("CLam [_]._" [100,100] 100) -text {* Capture-avoiding substitution and two lemmas *} +text {* Capture-avoiding substitution *} consts subst :: "lam \ name \ lam \ lam" ("_[_::=_]" [100,100,100] 100) @@ -50,7 +52,10 @@ apply(fresh_guess)+ done -text {* Filling is the operation that fills a term into a hole. *} +text {* + Filling is the operation that fills a term into a hole. + This operation is possibly capturing. +*} consts filling :: "ctx \ lam \ lam" ("_<_>" [100,100] 100) @@ -71,10 +76,10 @@ and "(CLam [x].\) = (CLam [y].\)" by (auto simp add: alpha ctx.inject lam.inject calc_atm fresh_atm) -text {* The composition of two contexts *} +text {* The composition of two contexts. *} consts - ctx_replace :: "ctx \ ctx \ ctx" ("_ \ _" [100,100] 100) + ctx_compose :: "ctx \ ctx \ ctx" ("_ \ _" [100,100] 100) nominal_primrec "\ \ E' = E'" @@ -134,15 +139,15 @@ then show "App (Lam [x].t) t' \x t[x::=t']" by simp qed (metis ctx_red_in_ctx filling.simps)+ (* found by SledgeHammer *) -section {* We now use context in a cbv list machine *} +section {* We now use context in a CBV list machine *} text {* First the function that composes a list of contexts *} -consts - ctx_composes :: "ctx list \ ctx" ("_\" [80] 80) primrec - "[]\ = \" - "(E#Es)\ = (Es\) \ E" + ctx_composes :: "ctx list \ ctx" ("_\" [80] 80) +where + "[]\ = \" + | "(E#Es)\ = (Es\) \ E" text {* Values *} inductive @@ -151,8 +156,7 @@ v_Lam[intro]: "val (Lam [x].e)" | v_Var[intro]: "val (Var x)" - -text {* CBV reduction using contexts *} +text {* CBV-reduction using contexts *} inductive cbv :: "lam\lam\bool" ("_ \cbv _" [80,80] 80) where @@ -166,7 +170,7 @@ | cbvs2[intro]: "e \cbv e' \ e \cbv* e'" | cbvs3[intro,trans]: "\e1\cbv* e2; e2 \cbv* e3\ \ e1 \cbv* e3" -text {* A list machine which builds up a list of contexts *} +text {* A little CK-machine, which builds up a list of evaluation contexts. *} inductive machine :: "lam\ctx list\lam\ctx list\bool" ("<_,_> \ <_,_>" [60,60,60,60] 60) where @@ -174,13 +178,6 @@ | m2[intro]: "val v \ e2)#Es> \ )#Es>" | m3[intro]: "val v \ )#Es> \ " -inductive - "machine_star" :: "lam\ctx list\lam\ctx list\bool" ("<_,_> \* <_,_>" [60,60,60,60] 60) -where - ms1[intro]: " \* " -| ms2[intro]: " \ \ \* " -| ms3[intro,trans]: "\ \* ; \* \ \ \* " - lemma machine_red_implies_cbv_reds_aux: assumes a: " \ " shows "(Es\) \cbv* (Es'\)" @@ -193,4 +190,10 @@ using a by (auto dest: machine_red_implies_cbv_reds_aux) +text {* + One would now like to show something about the opposite + direction, by according to Amr Sabry this amounts to + showing a standardisation lemma, which is hard. + *} + end diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/Lambda_mu.thy --- a/src/HOL/Nominal/Examples/Lambda_mu.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/Lambda_mu.thy Tue Jan 08 23:11:08 2008 +0100 @@ -1,7 +1,7 @@ (* $Id$ *) theory Lambda_mu -imports "../Nominal" + imports "../Nominal" begin section {* Lambda-Mu according to a paper by Gavin Bierman *} @@ -12,7 +12,8 @@ Var "var" | Lam "\var\trm" ("Lam [_]._" [100,100] 100) | App "trm" "trm" - | Pss "mvar" "trm" - | Act "\mvar\trm" ("Act [_]._" [100,100] 100) + | Pss "mvar" "trm" (* passivate *) + | Act "\mvar\trm" ("Act [_]._" [100,100] 100) (* activate *) + end diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/LocalWeakening.thy --- a/src/HOL/Nominal/Examples/LocalWeakening.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/LocalWeakening.thy Tue Jan 08 23:11:08 2008 +0100 @@ -1,19 +1,20 @@ (* $Id$ *) (* Formalisation of weakening using locally nameless *) -(* terms; the nominal infrastructure can derive *) +(* terms; the nominal infrastructure can also derive *) (* strong induction principles for such representations *) (* *) (* Authors: Christian Urban and Randy Pollack *) theory LocalWeakening -imports "../Nominal" + imports "../Nominal" begin atom_decl name -text {* Curry-style lambda terms in locally nameless - representation without any binders *} - +text {* + Curry-style lambda terms in locally nameless + representation without any binders +*} nominal_datatype llam = lPar "name" | lVar "nat" @@ -78,13 +79,6 @@ TVar "nat" | TArr "ty" "ty" (infix "\" 200) -lemma ty_perm[simp]: - fixes pi ::"name prm" - and T ::"ty" - shows "pi\T = T" -by (induct T rule: ty.weak_induct) - (simp_all add: perm_nat_def) - lemma ty_fresh[simp]: fixes x::"name" and T::"ty" @@ -107,27 +101,28 @@ lemma v2_elim: assumes a: "valid ((a,T)#\)" shows "a\\ \ valid \" -using valid.cases[OF a] -by (auto) +using a by (cases) (auto) text {* "weak" typing relation *} inductive typing :: "cxt\llam\ty\bool" (" _ \ _ : _ " [60,60,60] 60) where - t_lPar[intro]: "\valid \; (x,T)\set \\\ \ \ lPar x : T" - | t_lApp[intro]: "\\ \ t1 : T1\T2; \ \ t2 : T1\\ \ \ lApp t1 t2 : T2" - | t_lLam[intro]: "\x\t; (x,T1)#\ \ freshen t x : T2\\ \ \ lLam t : T1\T2" + t_lPar[intro]: "\valid \; (x,T)\set \\\ \ \ lPar x : T" +| t_lApp[intro]: "\\ \ t1 : T1\T2; \ \ t2 : T1\\ \ \ lApp t1 t2 : T2" +| t_lLam[intro]: "\x\t; (x,T1)#\ \ freshen t x : T2\\ \ \ lLam t : T1\T2" equivariance typing lemma typing_implies_valid: assumes a: "\ \ t : T" shows "valid \" -using a -by (induct) (auto dest: v2_elim) +using a by (induct) (auto dest: v2_elim) -text {* we explicitly have to say to strengthen over the variable x *} +text {* + we have to explicitly state that nominal_inductive should strengthen + over the variable x (since x is not a binder) +*} nominal_inductive typing avoids t_lLam: x by (auto simp add: fresh_prod dest: v2_elim typing_implies_valid) @@ -182,7 +177,8 @@ ultimately have "(x,T1)#\2 \ freshen t x : T2" using ih by simp with vc show "\2 \ lLam t : T1\T2" by auto next - case t_lApp thus ?case by auto + case (t_lApp \1 t1 T1 T2 t2 \2) + then show "\2 \ lApp t1 t2 : T2" by auto qed end diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/SN.thy --- a/src/HOL/Nominal/Examples/SN.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/SN.thy Tue Jan 08 23:11:08 2008 +0100 @@ -1,7 +1,7 @@ (* $Id$ *) theory SN -imports Lam_Funs + imports Lam_Funs begin text {* Strong Normalisation proof from the Proofs and Types book *} @@ -104,27 +104,12 @@ TVar "nat" | TArr "ty" "ty" (infix "\" 200) -lemma perm_ty: - fixes pi ::"name prm" - and \ ::"ty" - shows "pi\\ = \" -by (nominal_induct \ rule: ty.induct) - (simp_all add: perm_nat_def) - lemma fresh_ty: fixes a ::"name" and \ ::"ty" shows "a\\" - by (simp add: fresh_def perm_ty supp_def) - -(* domain of a typing context *) - -fun - "dom_ty" :: "(name\ty) list \ (name list)" -where - "dom_ty [] = []" -| "dom_ty ((x,\)#\) = (x)#(dom_ty \)" - +by (nominal_induct \ rule: ty.induct) + (auto simp add: fresh_nat) (* valid contexts *) @@ -136,8 +121,6 @@ equivariance valid -inductive_cases valid_elim[elim]: "valid ((a,\)#\)" - (* typing judgements *) lemma fresh_context: @@ -161,12 +144,7 @@ nominal_inductive typing by (simp_all add: abs_fresh fresh_ty) -abbreviation - "sub" :: "(name\ty) list \ (name\ty) list \ bool" ("_ \ _" [60,60] 60) -where - "\1 \ \2 \ \a \. (a,\)\set \1 \ (a,\)\set \2" - -subsection {* some facts about beta *} +subsection {* a fact about beta *} constdefs "NORMAL" :: "lam \ bool" @@ -177,11 +155,12 @@ proof - { assume "\t'. (Var a) \\<^isub>\ t'" then obtain t' where "(Var a) \\<^isub>\ t'" by blast - hence False by (cases, auto) + hence False by (cases) (auto) } - thus "NORMAL (Var a)" by (force simp add: NORMAL_def) + thus "NORMAL (Var a)" by (auto simp add: NORMAL_def) qed +text {* Inductive version of Strong Normalisation *} inductive SN :: "lam \ bool" where @@ -249,7 +228,7 @@ "RED (\\\) = {t. \u. (u\RED \ \ (App t u)\RED \)}" by (rule TrueI)+ -(* neutral terms *) +text {* neutral terms *} constdefs NEUT :: "lam \ bool" "NEUT t \ (\a. t = Var a) \ (\t1 t2. t = App t1 t2)" @@ -358,7 +337,7 @@ ultimately show "App t u \ RED \" using c3 by (simp add: CR3_def) qed -(* properties of the candiadates *) +text {* properties of the candiadates *} lemma RED_props: shows "CR1 \" and "CR2 \" and "CR3 \" proof (nominal_induct \ rule: ty.induct) @@ -374,10 +353,11 @@ { case 1 have ih_CR3_\1: "CR3 \1" by fact have ih_CR1_\2: "CR1 \2" by fact - show "CR1 (\1 \ \2)" unfolding CR1_def - proof (simp, intro strip) + have "\t. t \ RED (\1 \ \2) \ SN t" + proof - fix t - assume a: "\u. u \ RED \1 \ App t u \ RED \2" + assume "t \ RED (\1 \ \2)" + then have a: "\u. u \ RED \1 \ App t u \ RED \2" by simp from ih_CR3_\1 have "CR4 \1" by (simp add: CR3_implies_CR4) moreover have "NEUT (Var a)" by (force simp add: NEUT_def) @@ -386,20 +366,13 @@ ultimately have "(Var a)\ RED \1" by (simp add: CR4_def) with a have "App t (Var a) \ RED \2" by simp hence "SN (App t (Var a))" using ih_CR1_\2 by (simp add: CR1_def) - thus "SN(t)" by (auto dest: SN_of_FST_of_App) + thus "SN t" by (auto dest: SN_of_FST_of_App) qed + then show "CR1 (\1 \ \2)" unfolding CR1_def by simp next case 2 - have ih_CR1_\1: "CR1 \1" by fact have ih_CR2_\2: "CR2 \2" by fact - show "CR2 (\1 \ \2)" unfolding CR2_def - proof (simp, intro strip) - fix t1 t2 u - assume "(\u. u \ RED \1 \ App t1 u \ RED \2) \ t1 \\<^isub>\ t2" - and "u \ RED \1" - hence "t1 \\<^isub>\ t2" and "App t1 u \ RED \2" by simp_all - thus "App t2 u \ RED \2" using ih_CR2_\2 by (auto simp add: CR2_def) - qed + then show "CR2 (\1 \ \2)" unfolding CR2_def by auto next case 3 have ih_CR1_\1: "CR1 \1" by fact @@ -416,7 +389,10 @@ } qed -(* not as simple as on paper, because of the stronger double_SN induction *) +text {* + the next lemma not as simple as on paper, probably because of + the stronger double_SN induction +*} lemma abs_RED: assumes asm: "\s\RED \. t[x::=s]\RED \" shows "Lam [x].t\RED (\\\)" @@ -462,14 +438,17 @@ apply(drule_tac x="t'" in meta_spec) apply(simp) apply(drule meta_mp) - apply(auto) + prefer 2 + apply(auto)[1] + apply(rule ballI) apply(drule_tac x="s" in bspec) apply(simp) - apply(subgoal_tac "CR2 \") + apply(subgoal_tac "CR2 \")(*A*) apply(unfold CR2_def)[1] apply(drule_tac x="t[x::=s]" in spec) apply(drule_tac x="t'[x::=s]" in spec) apply(simp add: beta_subst) + (*A*) apply(simp add: RED_props) done then have "r\RED \" using a2 by simp @@ -497,9 +476,12 @@ then have "r\RED \" using as1 as2 by auto } ultimately show "r \ RED \" + (* one wants to use the strong elimination principle; for this one + has to know that x\u *) apply(cases) apply(auto simp add: lam.inject) apply(drule beta_abs) + apply(auto)[1] apply(auto simp add: alpha subst_rename) done qed @@ -513,7 +495,7 @@ abbreviation mapsto :: "(name\lam) list \ name \ lam \ bool" ("_ maps _ to _" [55,55,55] 55) where - "\ maps x to e\ (lookup \ x) = e" + "\ maps x to e \ (lookup \ x) = e" abbreviation closes :: "(name\lam) list \ (name\ty) list \ bool" ("_ closes _" [55,55] 55) diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/SOS.thy --- a/src/HOL/Nominal/Examples/SOS.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/SOS.thy Tue Jan 08 23:11:08 2008 +0100 @@ -1,15 +1,15 @@ (* "$Id$" *) -(* *) -(* Formalisation of some typical SOS-proofs. *) -(* *) -(* This work arose from a challenge suggested by Adam *) -(* Chlipala on the POPLmark mailing list. *) -(* *) -(* We thank Nick Benton for helping us with the *) -(* termination-proof for evaluation. *) -(* *) -(* The formalisation was done by Julien Narboux and *) -(* Christian Urban. *) +(* *) +(* Formalisation of some typical SOS-proofs. *) +(* *) +(* This work was inspired by challenge suggested by Adam *) +(* Chlipala on the POPLmark mailing list. *) +(* *) +(* We thank Nick Benton for helping us with the *) +(* termination-proof for evaluation. *) +(* *) +(* The formalisation was done by Julien Narboux and *) +(* Christian Urban. *) theory SOS imports "../Nominal" @@ -97,7 +97,7 @@ by (nominal_induct t rule: trm.induct) (auto simp add: fresh_list_nil) -(* Single substitution *) +text {* Single substitution *} abbreviation subst :: "trm \ name \ trm \ trm" ("_[_::=_]" [100,100,100] 100) where @@ -113,9 +113,9 @@ fixes z::"name" and t\<^isub>1::"trm" and t2::"trm" - assumes "z\t\<^isub>1" and "z\t\<^isub>2" + assumes a: "z\t\<^isub>1" "z\t\<^isub>2" shows "z\t\<^isub>1[y::=t\<^isub>2]" -using assms +using a by (nominal_induct t\<^isub>1 avoiding: z y t\<^isub>2 rule: trm.induct) (auto simp add: abs_fresh fresh_atm) @@ -127,21 +127,18 @@ (auto simp add: fresh_atm abs_fresh fresh_nat) lemma forget: - fixes x::"name" - and L::"trm" - assumes "x\e" + assumes a: "x\e" shows "e[x::=e'] = e" - using assms - by (nominal_induct e avoiding: x e' rule: trm.induct) - (auto simp add: fresh_atm abs_fresh) +using a +by (nominal_induct e avoiding: x e' rule: trm.induct) + (auto simp add: fresh_atm abs_fresh) lemma psubst_subst_psubst: - assumes h:"x\\" + assumes h: "x\\" shows "\[x::=e'] = ((x,e')#\)" using h -apply(nominal_induct e avoiding: \ x e' rule: trm.induct) -apply(auto simp add: fresh_list_cons fresh_atm forget lookup_fresh lookup_fresh') -done +by (nominal_induct e avoiding: \ x e' rule: trm.induct) + (auto simp add: fresh_list_cons fresh_atm forget lookup_fresh lookup_fresh') text {* Typing Judgements *} @@ -186,6 +183,8 @@ ultimately show ?thesis by auto qed +text {* Typing Relation *} + inductive typing :: "(name\ty) list\trm\ty\bool" ("_ \ _ : _" [60,60,60] 60) where @@ -205,10 +204,9 @@ by (induct) (auto) lemma t_Lam_elim: - assumes a1:"\ \ Lam [x].t : T" - and a2: "x\\" + assumes a: "\ \ Lam [x].t : T" "x\\" obtains T\<^isub>1 and T\<^isub>2 where "(x,T\<^isub>1)#\ \ t : T\<^isub>2" and "T=T\<^isub>1\T\<^isub>2" -using a1 a2 +using a by (cases rule: typing.strong_cases [where x="x"]) (auto simp add: abs_fresh fresh_ty alpha trm.inject) @@ -309,7 +307,7 @@ have a3: "\ \ e' : T'" by fact have ih1: "\(x,T')#\ \ e1 : Tn\T; \ \ e' : T'\ \ \ \ e1[x::=e'] : Tn\T" by fact have ih2: "\(x,T')#\ \ e2 : Tn; \ \ e' : T'\ \ \ \ e2[x::=e'] : Tn" by fact - then show ?case using a1 a2 a3 ih1 ih2 by auto + then show "\ \ (App e1 e2)[x::=e'] : T" using a1 a2 a3 ih1 ih2 by auto qed text {* Values *} @@ -408,17 +406,6 @@ shows "val t'" using h by (induct) (auto) -lemma type_arrow_evaluates_to_lams: - assumes "\ \ t : \ \ \" and "t \ t'" - obtains x t'' where "t' = Lam [x]. t''" -proof - - have "\ \ t' : \ \ \" using assms subject_reduction by simp - moreover - have "val t'" using reduces_evaluates_to_values assms by simp - ultimately obtain x t'' where "t' = Lam [x]. t''" by (cases, auto simp add:ty.inject) - thus ?thesis using prems by auto -qed - (* Valuation *) consts V :: "ty \ trm set" @@ -451,7 +438,7 @@ done lemma V_arrow_elim_weak: - assumes h:"u \ (V (T\<^isub>1 \ T\<^isub>2))" + assumes h:"u \ V (T\<^isub>1 \ T\<^isub>2)" obtains a t where "u = Lam[a].t" and "\ v \ (V T\<^isub>1). \ v'. t[a::=v] \ v' \ v' \ V T\<^isub>2" using h by (auto) diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/Support.thy --- a/src/HOL/Nominal/Examples/Support.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/Support.thy Tue Jan 08 23:11:08 2008 +0100 @@ -86,8 +86,8 @@ *} lemma supp_infinite_coinfinite: fixes S::"atom set" - assumes a: "infinite S" - and b: "infinite (UNIV-S)" + assumes asm1: "infinite S" + and asm2: "infinite (UNIV-S)" shows "(supp S) = (UNIV::atom set)" proof - have "\(x::atom). x\(supp S)" @@ -98,14 +98,14 @@ case True have "x\S" by fact hence "\b\(UNIV-S). [(x,b)]\S\S" by (auto simp add: perm_set_def calc_atm) - with b have "infinite {b\(UNIV-S). [(x,b)]\S\S}" by (rule infinite_Collection) + with asm2 have "infinite {b\(UNIV-S). [(x,b)]\S\S}" by (rule infinite_Collection) hence "infinite {b. [(x,b)]\S\S}" by (rule_tac infinite_super, auto) then show "x\(supp S)" by (simp add: supp_def) next case False have "x\S" by fact hence "\b\S. [(x,b)]\S\S" by (auto simp add: perm_set_def calc_atm) - with a have "infinite {b\S. [(x,b)]\S\S}" by (rule infinite_Collection) + with asm1 have "infinite {b\S. [(x,b)]\S\S}" by (rule infinite_Collection) hence "infinite {b. [(x,b)]\S\S}" by (rule_tac infinite_super, auto) then show "x\(supp S)" by (simp add: supp_def) qed diff -r 263aaf988d44 -r c24395ea4e71 src/HOL/Nominal/Examples/VC_Condition.thy --- a/src/HOL/Nominal/Examples/VC_Condition.thy Tue Jan 08 11:37:37 2008 +0100 +++ b/src/HOL/Nominal/Examples/VC_Condition.thy Tue Jan 08 23:11:08 2008 +0100 @@ -90,7 +90,7 @@ text {* The next lemma shows by induction on xs that if x is a free - variable in t and x does not occur in xs, then x is a free + variable in t, and x does not occur in xs, then x is a free variable in bind xs t. In the nominal tradition we formulate 'is a free variable in' as 'is not fresh for'. *} lemma free_variable: