--- a/src/HOL/Nominal/Examples/CR_Takahashi.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/CR_Takahashi.thy Tue Jan 08 23:11:08 2008 +0100
@@ -10,7 +10,7 @@
(* Church-Rosser Theorem (1995). *)
theory CR_Takahashi
-imports "../Nominal"
+ imports "../Nominal"
begin
atom_decl name
@@ -63,12 +63,11 @@
using a by (nominal_induct t avoiding: x y s u rule: lam.induct)
(auto simp add: fresh_fact forget)
-
lemma subst_rename:
assumes a: "y\<sharp>t"
shows "t[x::=s] = ([(y,x)]\<bullet>t)[y::=s]"
using a by (nominal_induct t avoiding: x y s rule: lam.induct)
- (auto simp add: calc_atm fresh_atm abs_fresh)
+ (auto simp add: calc_atm fresh_atm abs_fresh)
section {* Beta-Reduction *}
--- a/src/HOL/Nominal/Examples/Contexts.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/Contexts.thy Tue Jan 08 23:11:08 2008 +0100
@@ -10,7 +10,7 @@
reduction relations (based on congruence rules) is
equivalent to the Felleisen-Hieb-style representation
(based on contexts), and show cbv-evaluation via a
- list-machine described by Roshan James.
+ CK-machine described by Roshan James.
The interesting point is that contexts do not contain
any binders. On the other hand the operation of filling
@@ -20,15 +20,17 @@
atom_decl name
-text {* Terms *}
+text {*
+ Lambda-Terms - the Lam-term constructor binds a name
+*}
nominal_datatype lam =
Var "name"
| App "lam" "lam"
| Lam "\<guillemotleft>name\<guillemotright>lam" ("Lam [_]._" [100,100] 100)
text {*
- Contexts - the lambda case in contexts does not bind a name
- even if we introduce the nomtation [_]._ for CLam.
+ Contexts - the lambda case in contexts does not bind a name,
+ even if we introduce the notation [_]._ for CLam.
*}
nominal_datatype ctx =
Hole ("\<box>" 1000)
@@ -36,7 +38,7 @@
| CAppR "lam" "ctx"
| CLam "name" "ctx" ("CLam [_]._" [100,100] 100)
-text {* Capture-avoiding substitution and two lemmas *}
+text {* Capture-avoiding substitution *}
consts subst :: "lam \<Rightarrow> name \<Rightarrow> lam \<Rightarrow> lam" ("_[_::=_]" [100,100,100] 100)
@@ -50,7 +52,10 @@
apply(fresh_guess)+
done
-text {* Filling is the operation that fills a term into a hole. *}
+text {*
+ Filling is the operation that fills a term into a hole.
+ This operation is possibly capturing.
+*}
consts
filling :: "ctx \<Rightarrow> lam \<Rightarrow> lam" ("_<_>" [100,100] 100)
@@ -71,10 +76,10 @@
and "(CLam [x].\<box>)<Var x> = (CLam [y].\<box>)<Var y>"
by (auto simp add: alpha ctx.inject lam.inject calc_atm fresh_atm)
-text {* The composition of two contexts *}
+text {* The composition of two contexts. *}
consts
- ctx_replace :: "ctx \<Rightarrow> ctx \<Rightarrow> ctx" ("_ \<circ> _" [100,100] 100)
+ ctx_compose :: "ctx \<Rightarrow> ctx \<Rightarrow> ctx" ("_ \<circ> _" [100,100] 100)
nominal_primrec
"\<box> \<circ> E' = E'"
@@ -134,15 +139,15 @@
then show "App (Lam [x].t) t' \<longrightarrow>x t[x::=t']" by simp
qed (metis ctx_red_in_ctx filling.simps)+ (* found by SledgeHammer *)
-section {* We now use context in a cbv list machine *}
+section {* We now use context in a CBV list machine *}
text {* First the function that composes a list of contexts *}
-consts
- ctx_composes :: "ctx list \<Rightarrow> ctx" ("_\<down>" [80] 80)
primrec
- "[]\<down> = \<box>"
- "(E#Es)\<down> = (Es\<down>) \<circ> E"
+ ctx_composes :: "ctx list \<Rightarrow> ctx" ("_\<down>" [80] 80)
+where
+ "[]\<down> = \<box>"
+ | "(E#Es)\<down> = (Es\<down>) \<circ> E"
text {* Values *}
inductive
@@ -151,8 +156,7 @@
v_Lam[intro]: "val (Lam [x].e)"
| v_Var[intro]: "val (Var x)"
-
-text {* CBV reduction using contexts *}
+text {* CBV-reduction using contexts *}
inductive
cbv :: "lam\<Rightarrow>lam\<Rightarrow>bool" ("_ \<longrightarrow>cbv _" [80,80] 80)
where
@@ -166,7 +170,7 @@
| cbvs2[intro]: "e \<longrightarrow>cbv e' \<Longrightarrow> e \<longrightarrow>cbv* e'"
| cbvs3[intro,trans]: "\<lbrakk>e1\<longrightarrow>cbv* e2; e2 \<longrightarrow>cbv* e3\<rbrakk> \<Longrightarrow> e1 \<longrightarrow>cbv* e3"
-text {* A list machine which builds up a list of contexts *}
+text {* A little CK-machine, which builds up a list of evaluation contexts. *}
inductive
machine :: "lam\<Rightarrow>ctx list\<Rightarrow>lam\<Rightarrow>ctx list\<Rightarrow>bool" ("<_,_> \<mapsto> <_,_>" [60,60,60,60] 60)
where
@@ -174,13 +178,6 @@
| m2[intro]: "val v \<Longrightarrow> <v,(CAppL \<box> e2)#Es> \<mapsto> <e2,(CAppR v \<box>)#Es>"
| m3[intro]: "val v \<Longrightarrow> <v,(CAppR (Lam [x].e) \<box>)#Es> \<mapsto> <e[x::=v],Es>"
-inductive
- "machine_star" :: "lam\<Rightarrow>ctx list\<Rightarrow>lam\<Rightarrow>ctx list\<Rightarrow>bool" ("<_,_> \<mapsto>* <_,_>" [60,60,60,60] 60)
-where
- ms1[intro]: "<e,Es> \<mapsto>* <e,Es>"
-| ms2[intro]: "<e,Es> \<mapsto> <e',Es'> \<Longrightarrow> <e,Es> \<mapsto>* <e',Es'>"
-| ms3[intro,trans]: "\<lbrakk><e1,Es1> \<mapsto>* <e2,Es2>; <e2,Es2> \<mapsto>* <e3,Es3>\<rbrakk> \<Longrightarrow> <e1,Es1> \<mapsto>* <e3,Es3>"
-
lemma machine_red_implies_cbv_reds_aux:
assumes a: "<e,Es> \<mapsto> <e',Es'>"
shows "(Es\<down>)<e> \<longrightarrow>cbv* (Es'\<down>)<e'>"
@@ -193,4 +190,10 @@
using a
by (auto dest: machine_red_implies_cbv_reds_aux)
+text {*
+ One would now like to show something about the opposite
+ direction, by according to Amr Sabry this amounts to
+ showing a standardisation lemma, which is hard.
+ *}
+
end
--- a/src/HOL/Nominal/Examples/Lambda_mu.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/Lambda_mu.thy Tue Jan 08 23:11:08 2008 +0100
@@ -1,7 +1,7 @@
(* $Id$ *)
theory Lambda_mu
-imports "../Nominal"
+ imports "../Nominal"
begin
section {* Lambda-Mu according to a paper by Gavin Bierman *}
@@ -12,7 +12,8 @@
Var "var"
| Lam "\<guillemotleft>var\<guillemotright>trm" ("Lam [_]._" [100,100] 100)
| App "trm" "trm"
- | Pss "mvar" "trm"
- | Act "\<guillemotleft>mvar\<guillemotright>trm" ("Act [_]._" [100,100] 100)
+ | Pss "mvar" "trm" (* passivate *)
+ | Act "\<guillemotleft>mvar\<guillemotright>trm" ("Act [_]._" [100,100] 100) (* activate *)
+
end
--- a/src/HOL/Nominal/Examples/LocalWeakening.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/LocalWeakening.thy Tue Jan 08 23:11:08 2008 +0100
@@ -1,19 +1,20 @@
(* $Id$ *)
(* Formalisation of weakening using locally nameless *)
-(* terms; the nominal infrastructure can derive *)
+(* terms; the nominal infrastructure can also derive *)
(* strong induction principles for such representations *)
(* *)
(* Authors: Christian Urban and Randy Pollack *)
theory LocalWeakening
-imports "../Nominal"
+ imports "../Nominal"
begin
atom_decl name
-text {* Curry-style lambda terms in locally nameless
- representation without any binders *}
-
+text {*
+ Curry-style lambda terms in locally nameless
+ representation without any binders
+*}
nominal_datatype llam =
lPar "name"
| lVar "nat"
@@ -78,13 +79,6 @@
TVar "nat"
| TArr "ty" "ty" (infix "\<rightarrow>" 200)
-lemma ty_perm[simp]:
- fixes pi ::"name prm"
- and T ::"ty"
- shows "pi\<bullet>T = T"
-by (induct T rule: ty.weak_induct)
- (simp_all add: perm_nat_def)
-
lemma ty_fresh[simp]:
fixes x::"name"
and T::"ty"
@@ -107,27 +101,28 @@
lemma v2_elim:
assumes a: "valid ((a,T)#\<Gamma>)"
shows "a\<sharp>\<Gamma> \<and> valid \<Gamma>"
-using valid.cases[OF a]
-by (auto)
+using a by (cases) (auto)
text {* "weak" typing relation *}
inductive
typing :: "cxt\<Rightarrow>llam\<Rightarrow>ty\<Rightarrow>bool" (" _ \<turnstile> _ : _ " [60,60,60] 60)
where
- t_lPar[intro]: "\<lbrakk>valid \<Gamma>; (x,T)\<in>set \<Gamma>\<rbrakk>\<Longrightarrow> \<Gamma> \<turnstile> lPar x : T"
- | t_lApp[intro]: "\<lbrakk>\<Gamma> \<turnstile> t1 : T1\<rightarrow>T2; \<Gamma> \<turnstile> t2 : T1\<rbrakk>\<Longrightarrow> \<Gamma> \<turnstile> lApp t1 t2 : T2"
- | t_lLam[intro]: "\<lbrakk>x\<sharp>t; (x,T1)#\<Gamma> \<turnstile> freshen t x : T2\<rbrakk>\<Longrightarrow> \<Gamma> \<turnstile> lLam t : T1\<rightarrow>T2"
+ t_lPar[intro]: "\<lbrakk>valid \<Gamma>; (x,T)\<in>set \<Gamma>\<rbrakk>\<Longrightarrow> \<Gamma> \<turnstile> lPar x : T"
+| t_lApp[intro]: "\<lbrakk>\<Gamma> \<turnstile> t1 : T1\<rightarrow>T2; \<Gamma> \<turnstile> t2 : T1\<rbrakk>\<Longrightarrow> \<Gamma> \<turnstile> lApp t1 t2 : T2"
+| t_lLam[intro]: "\<lbrakk>x\<sharp>t; (x,T1)#\<Gamma> \<turnstile> freshen t x : T2\<rbrakk>\<Longrightarrow> \<Gamma> \<turnstile> lLam t : T1\<rightarrow>T2"
equivariance typing
lemma typing_implies_valid:
assumes a: "\<Gamma> \<turnstile> t : T"
shows "valid \<Gamma>"
-using a
-by (induct) (auto dest: v2_elim)
+using a by (induct) (auto dest: v2_elim)
-text {* we explicitly have to say to strengthen over the variable x *}
+text {*
+ we have to explicitly state that nominal_inductive should strengthen
+ over the variable x (since x is not a binder)
+*}
nominal_inductive typing
avoids t_lLam: x
by (auto simp add: fresh_prod dest: v2_elim typing_implies_valid)
@@ -182,7 +177,8 @@
ultimately have "(x,T1)#\<Gamma>2 \<turnstile> freshen t x : T2" using ih by simp
with vc show "\<Gamma>2 \<turnstile> lLam t : T1\<rightarrow>T2" by auto
next
- case t_lApp thus ?case by auto
+ case (t_lApp \<Gamma>1 t1 T1 T2 t2 \<Gamma>2)
+ then show "\<Gamma>2 \<turnstile> lApp t1 t2 : T2" by auto
qed
end
--- a/src/HOL/Nominal/Examples/SN.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/SN.thy Tue Jan 08 23:11:08 2008 +0100
@@ -1,7 +1,7 @@
(* $Id$ *)
theory SN
-imports Lam_Funs
+ imports Lam_Funs
begin
text {* Strong Normalisation proof from the Proofs and Types book *}
@@ -104,27 +104,12 @@
TVar "nat"
| TArr "ty" "ty" (infix "\<rightarrow>" 200)
-lemma perm_ty:
- fixes pi ::"name prm"
- and \<tau> ::"ty"
- shows "pi\<bullet>\<tau> = \<tau>"
-by (nominal_induct \<tau> rule: ty.induct)
- (simp_all add: perm_nat_def)
-
lemma fresh_ty:
fixes a ::"name"
and \<tau> ::"ty"
shows "a\<sharp>\<tau>"
- by (simp add: fresh_def perm_ty supp_def)
-
-(* domain of a typing context *)
-
-fun
- "dom_ty" :: "(name\<times>ty) list \<Rightarrow> (name list)"
-where
- "dom_ty [] = []"
-| "dom_ty ((x,\<tau>)#\<Gamma>) = (x)#(dom_ty \<Gamma>)"
-
+by (nominal_induct \<tau> rule: ty.induct)
+ (auto simp add: fresh_nat)
(* valid contexts *)
@@ -136,8 +121,6 @@
equivariance valid
-inductive_cases valid_elim[elim]: "valid ((a,\<tau>)#\<Gamma>)"
-
(* typing judgements *)
lemma fresh_context:
@@ -161,12 +144,7 @@
nominal_inductive typing
by (simp_all add: abs_fresh fresh_ty)
-abbreviation
- "sub" :: "(name\<times>ty) list \<Rightarrow> (name\<times>ty) list \<Rightarrow> bool" ("_ \<subseteq> _" [60,60] 60)
-where
- "\<Gamma>1 \<subseteq> \<Gamma>2 \<equiv> \<forall>a \<sigma>. (a,\<sigma>)\<in>set \<Gamma>1 \<longrightarrow> (a,\<sigma>)\<in>set \<Gamma>2"
-
-subsection {* some facts about beta *}
+subsection {* a fact about beta *}
constdefs
"NORMAL" :: "lam \<Rightarrow> bool"
@@ -177,11 +155,12 @@
proof -
{ assume "\<exists>t'. (Var a) \<longrightarrow>\<^isub>\<beta> t'"
then obtain t' where "(Var a) \<longrightarrow>\<^isub>\<beta> t'" by blast
- hence False by (cases, auto)
+ hence False by (cases) (auto)
}
- thus "NORMAL (Var a)" by (force simp add: NORMAL_def)
+ thus "NORMAL (Var a)" by (auto simp add: NORMAL_def)
qed
+text {* Inductive version of Strong Normalisation *}
inductive
SN :: "lam \<Rightarrow> bool"
where
@@ -249,7 +228,7 @@
"RED (\<tau>\<rightarrow>\<sigma>) = {t. \<forall>u. (u\<in>RED \<tau> \<longrightarrow> (App t u)\<in>RED \<sigma>)}"
by (rule TrueI)+
-(* neutral terms *)
+text {* neutral terms *}
constdefs
NEUT :: "lam \<Rightarrow> bool"
"NEUT t \<equiv> (\<exists>a. t = Var a) \<or> (\<exists>t1 t2. t = App t1 t2)"
@@ -358,7 +337,7 @@
ultimately show "App t u \<in> RED \<sigma>" using c3 by (simp add: CR3_def)
qed
-(* properties of the candiadates *)
+text {* properties of the candiadates *}
lemma RED_props:
shows "CR1 \<tau>" and "CR2 \<tau>" and "CR3 \<tau>"
proof (nominal_induct \<tau> rule: ty.induct)
@@ -374,10 +353,11 @@
{ case 1
have ih_CR3_\<tau>1: "CR3 \<tau>1" by fact
have ih_CR1_\<tau>2: "CR1 \<tau>2" by fact
- show "CR1 (\<tau>1 \<rightarrow> \<tau>2)" unfolding CR1_def
- proof (simp, intro strip)
+ have "\<And>t. t \<in> RED (\<tau>1 \<rightarrow> \<tau>2) \<Longrightarrow> SN t"
+ proof -
fix t
- assume a: "\<forall>u. u \<in> RED \<tau>1 \<longrightarrow> App t u \<in> RED \<tau>2"
+ assume "t \<in> RED (\<tau>1 \<rightarrow> \<tau>2)"
+ then have a: "\<forall>u. u \<in> RED \<tau>1 \<longrightarrow> App t u \<in> RED \<tau>2" by simp
from ih_CR3_\<tau>1 have "CR4 \<tau>1" by (simp add: CR3_implies_CR4)
moreover
have "NEUT (Var a)" by (force simp add: NEUT_def)
@@ -386,20 +366,13 @@
ultimately have "(Var a)\<in> RED \<tau>1" by (simp add: CR4_def)
with a have "App t (Var a) \<in> RED \<tau>2" by simp
hence "SN (App t (Var a))" using ih_CR1_\<tau>2 by (simp add: CR1_def)
- thus "SN(t)" by (auto dest: SN_of_FST_of_App)
+ thus "SN t" by (auto dest: SN_of_FST_of_App)
qed
+ then show "CR1 (\<tau>1 \<rightarrow> \<tau>2)" unfolding CR1_def by simp
next
case 2
- have ih_CR1_\<tau>1: "CR1 \<tau>1" by fact
have ih_CR2_\<tau>2: "CR2 \<tau>2" by fact
- show "CR2 (\<tau>1 \<rightarrow> \<tau>2)" unfolding CR2_def
- proof (simp, intro strip)
- fix t1 t2 u
- assume "(\<forall>u. u \<in> RED \<tau>1 \<longrightarrow> App t1 u \<in> RED \<tau>2) \<and> t1 \<longrightarrow>\<^isub>\<beta> t2"
- and "u \<in> RED \<tau>1"
- hence "t1 \<longrightarrow>\<^isub>\<beta> t2" and "App t1 u \<in> RED \<tau>2" by simp_all
- thus "App t2 u \<in> RED \<tau>2" using ih_CR2_\<tau>2 by (auto simp add: CR2_def)
- qed
+ then show "CR2 (\<tau>1 \<rightarrow> \<tau>2)" unfolding CR2_def by auto
next
case 3
have ih_CR1_\<tau>1: "CR1 \<tau>1" by fact
@@ -416,7 +389,10 @@
}
qed
-(* not as simple as on paper, because of the stronger double_SN induction *)
+text {*
+ the next lemma not as simple as on paper, probably because of
+ the stronger double_SN induction
+*}
lemma abs_RED:
assumes asm: "\<forall>s\<in>RED \<tau>. t[x::=s]\<in>RED \<sigma>"
shows "Lam [x].t\<in>RED (\<tau>\<rightarrow>\<sigma>)"
@@ -462,14 +438,17 @@
apply(drule_tac x="t'" in meta_spec)
apply(simp)
apply(drule meta_mp)
- apply(auto)
+ prefer 2
+ apply(auto)[1]
+ apply(rule ballI)
apply(drule_tac x="s" in bspec)
apply(simp)
- apply(subgoal_tac "CR2 \<sigma>")
+ apply(subgoal_tac "CR2 \<sigma>")(*A*)
apply(unfold CR2_def)[1]
apply(drule_tac x="t[x::=s]" in spec)
apply(drule_tac x="t'[x::=s]" in spec)
apply(simp add: beta_subst)
+ (*A*)
apply(simp add: RED_props)
done
then have "r\<in>RED \<sigma>" using a2 by simp
@@ -497,9 +476,12 @@
then have "r\<in>RED \<sigma>" using as1 as2 by auto
}
ultimately show "r \<in> RED \<sigma>"
+ (* one wants to use the strong elimination principle; for this one
+ has to know that x\<sharp>u *)
apply(cases)
apply(auto simp add: lam.inject)
apply(drule beta_abs)
+ apply(auto)[1]
apply(auto simp add: alpha subst_rename)
done
qed
@@ -513,7 +495,7 @@
abbreviation
mapsto :: "(name\<times>lam) list \<Rightarrow> name \<Rightarrow> lam \<Rightarrow> bool" ("_ maps _ to _" [55,55,55] 55)
where
- "\<theta> maps x to e\<equiv> (lookup \<theta> x) = e"
+ "\<theta> maps x to e \<equiv> (lookup \<theta> x) = e"
abbreviation
closes :: "(name\<times>lam) list \<Rightarrow> (name\<times>ty) list \<Rightarrow> bool" ("_ closes _" [55,55] 55)
--- a/src/HOL/Nominal/Examples/SOS.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/SOS.thy Tue Jan 08 23:11:08 2008 +0100
@@ -1,15 +1,15 @@
(* "$Id$" *)
-(* *)
-(* Formalisation of some typical SOS-proofs. *)
-(* *)
-(* This work arose from a challenge suggested by Adam *)
-(* Chlipala on the POPLmark mailing list. *)
-(* *)
-(* We thank Nick Benton for helping us with the *)
-(* termination-proof for evaluation. *)
-(* *)
-(* The formalisation was done by Julien Narboux and *)
-(* Christian Urban. *)
+(* *)
+(* Formalisation of some typical SOS-proofs. *)
+(* *)
+(* This work was inspired by challenge suggested by Adam *)
+(* Chlipala on the POPLmark mailing list. *)
+(* *)
+(* We thank Nick Benton for helping us with the *)
+(* termination-proof for evaluation. *)
+(* *)
+(* The formalisation was done by Julien Narboux and *)
+(* Christian Urban. *)
theory SOS
imports "../Nominal"
@@ -97,7 +97,7 @@
by (nominal_induct t rule: trm.induct)
(auto simp add: fresh_list_nil)
-(* Single substitution *)
+text {* Single substitution *}
abbreviation
subst :: "trm \<Rightarrow> name \<Rightarrow> trm \<Rightarrow> trm" ("_[_::=_]" [100,100,100] 100)
where
@@ -113,9 +113,9 @@
fixes z::"name"
and t\<^isub>1::"trm"
and t2::"trm"
- assumes "z\<sharp>t\<^isub>1" and "z\<sharp>t\<^isub>2"
+ assumes a: "z\<sharp>t\<^isub>1" "z\<sharp>t\<^isub>2"
shows "z\<sharp>t\<^isub>1[y::=t\<^isub>2]"
-using assms
+using a
by (nominal_induct t\<^isub>1 avoiding: z y t\<^isub>2 rule: trm.induct)
(auto simp add: abs_fresh fresh_atm)
@@ -127,21 +127,18 @@
(auto simp add: fresh_atm abs_fresh fresh_nat)
lemma forget:
- fixes x::"name"
- and L::"trm"
- assumes "x\<sharp>e"
+ assumes a: "x\<sharp>e"
shows "e[x::=e'] = e"
- using assms
- by (nominal_induct e avoiding: x e' rule: trm.induct)
- (auto simp add: fresh_atm abs_fresh)
+using a
+by (nominal_induct e avoiding: x e' rule: trm.induct)
+ (auto simp add: fresh_atm abs_fresh)
lemma psubst_subst_psubst:
- assumes h:"x\<sharp>\<theta>"
+ assumes h: "x\<sharp>\<theta>"
shows "\<theta><e>[x::=e'] = ((x,e')#\<theta>)<e>"
using h
-apply(nominal_induct e avoiding: \<theta> x e' rule: trm.induct)
-apply(auto simp add: fresh_list_cons fresh_atm forget lookup_fresh lookup_fresh')
-done
+by (nominal_induct e avoiding: \<theta> x e' rule: trm.induct)
+ (auto simp add: fresh_list_cons fresh_atm forget lookup_fresh lookup_fresh')
text {* Typing Judgements *}
@@ -186,6 +183,8 @@
ultimately show ?thesis by auto
qed
+text {* Typing Relation *}
+
inductive
typing :: "(name\<times>ty) list\<Rightarrow>trm\<Rightarrow>ty\<Rightarrow>bool" ("_ \<turnstile> _ : _" [60,60,60] 60)
where
@@ -205,10 +204,9 @@
by (induct) (auto)
lemma t_Lam_elim:
- assumes a1:"\<Gamma> \<turnstile> Lam [x].t : T"
- and a2: "x\<sharp>\<Gamma>"
+ assumes a: "\<Gamma> \<turnstile> Lam [x].t : T" "x\<sharp>\<Gamma>"
obtains T\<^isub>1 and T\<^isub>2 where "(x,T\<^isub>1)#\<Gamma> \<turnstile> t : T\<^isub>2" and "T=T\<^isub>1\<rightarrow>T\<^isub>2"
-using a1 a2
+using a
by (cases rule: typing.strong_cases [where x="x"])
(auto simp add: abs_fresh fresh_ty alpha trm.inject)
@@ -309,7 +307,7 @@
have a3: "\<Gamma> \<turnstile> e' : T'" by fact
have ih1: "\<lbrakk>(x,T')#\<Gamma> \<turnstile> e1 : Tn\<rightarrow>T; \<Gamma> \<turnstile> e' : T'\<rbrakk> \<Longrightarrow> \<Gamma> \<turnstile> e1[x::=e'] : Tn\<rightarrow>T" by fact
have ih2: "\<lbrakk>(x,T')#\<Gamma> \<turnstile> e2 : Tn; \<Gamma> \<turnstile> e' : T'\<rbrakk> \<Longrightarrow> \<Gamma> \<turnstile> e2[x::=e'] : Tn" by fact
- then show ?case using a1 a2 a3 ih1 ih2 by auto
+ then show "\<Gamma> \<turnstile> (App e1 e2)[x::=e'] : T" using a1 a2 a3 ih1 ih2 by auto
qed
text {* Values *}
@@ -408,17 +406,6 @@
shows "val t'"
using h by (induct) (auto)
-lemma type_arrow_evaluates_to_lams:
- assumes "\<Gamma> \<turnstile> t : \<sigma> \<rightarrow> \<tau>" and "t \<Down> t'"
- obtains x t'' where "t' = Lam [x]. t''"
-proof -
- have "\<Gamma> \<turnstile> t' : \<sigma> \<rightarrow> \<tau>" using assms subject_reduction by simp
- moreover
- have "val t'" using reduces_evaluates_to_values assms by simp
- ultimately obtain x t'' where "t' = Lam [x]. t''" by (cases, auto simp add:ty.inject)
- thus ?thesis using prems by auto
-qed
-
(* Valuation *)
consts
V :: "ty \<Rightarrow> trm set"
@@ -451,7 +438,7 @@
done
lemma V_arrow_elim_weak:
- assumes h:"u \<in> (V (T\<^isub>1 \<rightarrow> T\<^isub>2))"
+ assumes h:"u \<in> V (T\<^isub>1 \<rightarrow> T\<^isub>2)"
obtains a t where "u = Lam[a].t" and "\<forall> v \<in> (V T\<^isub>1). \<exists> v'. t[a::=v] \<Down> v' \<and> v' \<in> V T\<^isub>2"
using h by (auto)
--- a/src/HOL/Nominal/Examples/Support.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/Support.thy Tue Jan 08 23:11:08 2008 +0100
@@ -86,8 +86,8 @@
*}
lemma supp_infinite_coinfinite:
fixes S::"atom set"
- assumes a: "infinite S"
- and b: "infinite (UNIV-S)"
+ assumes asm1: "infinite S"
+ and asm2: "infinite (UNIV-S)"
shows "(supp S) = (UNIV::atom set)"
proof -
have "\<forall>(x::atom). x\<in>(supp S)"
@@ -98,14 +98,14 @@
case True
have "x\<in>S" by fact
hence "\<forall>b\<in>(UNIV-S). [(x,b)]\<bullet>S\<noteq>S" by (auto simp add: perm_set_def calc_atm)
- with b have "infinite {b\<in>(UNIV-S). [(x,b)]\<bullet>S\<noteq>S}" by (rule infinite_Collection)
+ with asm2 have "infinite {b\<in>(UNIV-S). [(x,b)]\<bullet>S\<noteq>S}" by (rule infinite_Collection)
hence "infinite {b. [(x,b)]\<bullet>S\<noteq>S}" by (rule_tac infinite_super, auto)
then show "x\<in>(supp S)" by (simp add: supp_def)
next
case False
have "x\<notin>S" by fact
hence "\<forall>b\<in>S. [(x,b)]\<bullet>S\<noteq>S" by (auto simp add: perm_set_def calc_atm)
- with a have "infinite {b\<in>S. [(x,b)]\<bullet>S\<noteq>S}" by (rule infinite_Collection)
+ with asm1 have "infinite {b\<in>S. [(x,b)]\<bullet>S\<noteq>S}" by (rule infinite_Collection)
hence "infinite {b. [(x,b)]\<bullet>S\<noteq>S}" by (rule_tac infinite_super, auto)
then show "x\<in>(supp S)" by (simp add: supp_def)
qed
--- a/src/HOL/Nominal/Examples/VC_Condition.thy Tue Jan 08 11:37:37 2008 +0100
+++ b/src/HOL/Nominal/Examples/VC_Condition.thy Tue Jan 08 23:11:08 2008 +0100
@@ -90,7 +90,7 @@
text {*
The next lemma shows by induction on xs that if x is a free
- variable in t and x does not occur in xs, then x is a free
+ variable in t, and x does not occur in xs, then x is a free
variable in bind xs t. In the nominal tradition we formulate
'is a free variable in' as 'is not fresh for'. *}
lemma free_variable: