--- a/src/HOL/Auth/Event.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Auth/Event.thy Wed Aug 26 17:38:18 2009 +0100
@@ -139,9 +139,11 @@
text{*Elimination rules: derive contradictions from old Says events containing
items known to be fresh*}
+lemmas Says_imp_parts_knows_Spy =
+ Says_imp_knows_Spy [THEN parts.Inj, THEN revcut_rl, standard]
+
lemmas knows_Spy_partsEs =
- Says_imp_knows_Spy [THEN parts.Inj, THEN revcut_rl, standard]
- parts.Body [THEN revcut_rl, standard]
+ Says_imp_parts_knows_Spy parts.Body [THEN revcut_rl, standard]
lemmas Says_imp_analz_Spy = Says_imp_knows_Spy [THEN analz.Inj]
--- a/src/HOL/Auth/KerberosV.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Auth/KerberosV.thy Wed Aug 26 17:38:18 2009 +0100
@@ -697,9 +697,7 @@
txt{*K4*}
apply (force dest!: Crypt_imp_keysFor, clarify)
txt{*K6*}
-apply (drule Says_imp_spies [THEN parts.Inj, THEN parts.Fst])
-apply (drule Says_imp_spies [THEN parts.Inj, THEN parts.Snd])
-apply (blast dest!: unique_CryptKey)
+apply (metis Says_imp_spies Says_ticket_parts analz.Fst analz.Inj analz_conj_parts unique_CryptKey)
done
text{*Needs a unicity theorem, hence moved here*}
@@ -841,13 +839,10 @@
apply (erule kerbV.induct, analz_mono_contra)
apply (frule_tac [7] Says_ticket_parts)
apply (frule_tac [5] Says_ticket_parts, simp_all, blast)
-txt{*K4 splits into distinct subcases*}
-apply auto
-txt{*servK can't have been enclosed in two certificates*}
- prefer 2 apply (blast dest: unique_CryptKey)
-txt{*servK is fresh and so could not have been used, by
- @{text new_keys_not_used}*}
-apply (force dest!: Crypt_imp_invKey_keysFor simp add: AKcryptSK_def)
+txt{*K4*}
+apply (metis Auth_fresh_not_AKcryptSK Crypt_imp_invKey_keysFor Says_ticket_analz
+ analz.Fst invKey_K new_keys_not_analzd parts.Fst Says_imp_parts_knows_Spy
+ unique_CryptKey)
done
text{*Long term keys are not issued as servKeys*}
@@ -981,9 +976,7 @@
txt{*K4*}
apply (blast dest!: authK_not_AKcryptSK)
txt{*Oops1*}
-apply clarify
-apply simp
-apply (blast dest!: AKcryptSK_analz_insert)
+apply (metis AKcryptSK_analz_insert insert_Key_singleton)
done
text{* First simplification law for analz: no session keys encrypt
@@ -1039,8 +1032,8 @@
\<in> set evs; authK \<in> symKeys;
Key authK \<in> analz (spies evs); evs \<in> kerbV \<rbrakk>
\<Longrightarrow> Key servK \<in> analz (spies evs)"
-apply (force dest: Says_imp_spies [THEN analz.Inj, THEN analz.Fst, THEN analz.Decrypt, THEN analz.Fst])
-done
+ by (metis Says_imp_analz_Spy analz.Fst analz_Decrypt')
+
text{*lemma @{text servK_notin_authKeysD} not needed in version V*}
@@ -1112,16 +1105,16 @@
apply (frule_tac [5] Says_ticket_analz)
apply (safe del: impI conjI impCE)
apply (simp_all add: less_SucI new_keys_not_analzd Says_Kas_message_form Says_Tgs_message_form analz_insert_eq not_parts_not_analz analz_insert_freshK1 analz_insert_freshK2 analz_insert_freshK3_bis pushes)
-txt{*Fake*}
-apply spy_analz
-txt{*K2*}
-apply (blast intro: parts_insertI less_SucI)
-txt{*K4*}
-apply (blast dest: authTicket_authentic Confidentiality_Kas)
-txt{*Oops1*}
+ txt{*Fake*}
+ apply spy_analz
+ txt{*K2*}
+ apply (blast intro: parts_insertI less_SucI)
+ txt{*K4*}
+ apply (blast dest: authTicket_authentic Confidentiality_Kas)
+ txt{*Oops1*}
apply (blast dest: Says_Kas_message_form Says_Tgs_message_form intro: less_SucI)
txt{*Oops2*}
- apply (blast dest: Says_imp_spies [THEN parts.Inj] Key_unique_SesKey intro: less_SucI)
+apply (metis Suc_le_eq linorder_linear linorder_not_le msg.simps(2) unique_servKeys)
done
@@ -1270,17 +1263,7 @@
Key authK \<notin> analz (spies evs); Key servK \<notin> analz (spies evs);
A \<notin> bad; B \<notin> bad; evs \<in> kerbV \<rbrakk>
\<Longrightarrow> Says B A (Crypt servK (Number T3)) \<in> set evs"
-apply (frule authK_authentic)
-apply assumption+
-apply (frule servK_authentic)
-prefer 2 apply (blast dest: authK_authentic Says_Kas_message_form)
-apply assumption+
-apply clarify
-apply (blast dest: K4_imp_K2 Key_unique_SesKey intro!: Says_K6)
-(*Single command proof: much slower!
-apply (blast dest: authK_authentic servK_authentic Says_Kas_message_form Key_unique_SesKey K4_imp_K2 intro!: Says_K6)
-*)
-done
+ by (metis authK_authentic Oops_range_spies1 Says_K6 servK_authentic u_K4_imp_K2 unique_authKeys)
lemma A_authenticates_B_r:
"\<lbrakk> Crypt servK (Number T3) \<in> parts (spies evs);
@@ -1301,8 +1284,7 @@
apply (erule_tac [9] exE)
apply (frule_tac [9] K4_imp_K2)
apply assumption+
-apply (blast dest: Key_unique_SesKey intro!: Says_K6 dest: Confidentiality_Tgs
-)
+apply (blast dest: Key_unique_SesKey intro!: Says_K6 dest: Confidentiality_Tgs)
done
@@ -1478,7 +1460,7 @@
...expands as follows - including extra exE because of new form of lemmas*)
apply (frule K3_imp_K2, assumption, assumption, erule exE, erule exE)
apply (case_tac "Key authK \<in> analz (spies evs5)")
-apply (drule Says_imp_knows_Spy [THEN analz.Inj, THEN analz.Fst, THEN analz_Decrypt', THEN analz.Fst], assumption, assumption, simp)
+ apply (metis Says_imp_analz_Spy analz.Fst analz_Decrypt')
apply (frule K3_imp_K2, assumption, assumption, erule exE, erule exE)
apply (drule Says_imp_knows_Spy [THEN parts.Inj, THEN parts.Fst])
apply (frule servK_authentic_ter, blast, assumption+)
--- a/src/HOL/Auth/Kerberos_BAN.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Auth/Kerberos_BAN.thy Wed Aug 26 17:38:18 2009 +0100
@@ -288,15 +288,8 @@
on evs)"
apply (unfold before_def)
apply (erule rev_mp)
-apply (erule bankerberos.induct, simp_all)
-txt{*We need this simplification only for Message 2*}
-apply (simp (no_asm) add: takeWhile_tail)
-apply auto
-txt{*Two subcases of Message 2. Subcase: used before*}
-apply (blast dest: used_evs_rev [THEN equalityD2, THEN contra_subsetD]
- used_takeWhile_used)
-txt{*subcase: CT before*}
-apply (fastsimp dest!: set_evs_rev [THEN equalityD2, THEN contra_subsetD, THEN takeWhile_void])
+apply (erule bankerberos.induct, simp_all add: takeWhile_tail)
+apply (metis length_rev set_rev takeWhile_void used_evs_rev)
done
@@ -492,6 +485,7 @@
txt{*BK3*}
apply (blast dest: Kab_authentic unique_session_keys)
done
+
lemma lemma_B [rule_format]:
"\<lbrakk> B \<notin> bad; evs \<in> bankerberos \<rbrakk>
\<Longrightarrow> Key K \<notin> analz (spies evs) \<longrightarrow>
@@ -585,9 +579,8 @@
txt{*BK2*}
apply (blast intro: parts_insertI less_SucI)
txt{*BK3*}
-apply (case_tac "Aa \<in> bad")
- prefer 2 apply (blast dest: Kab_authentic unique_session_keys)
-apply (blast dest: Says_imp_spies [THEN analz.Inj] Crypt_Spy_analz_bad elim!: MPair_analz intro: less_SucI)
+apply (metis Crypt_Spy_analz_bad Kab_authentic Says_imp_analz_Spy
+ Says_imp_parts_knows_Spy analz.Snd less_SucI unique_session_keys)
txt{*Oops: PROOF FAILS if unsafe intro below*}
apply (blast dest: unique_session_keys intro!: less_SucI)
done
--- a/src/HOL/Auth/NS_Shared.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Auth/NS_Shared.thy Wed Aug 26 17:38:18 2009 +0100
@@ -273,11 +273,11 @@
apply (simp_all add: analz_insert_eq analz_insert_freshK pushes split_ifs, spy_analz)
txt{*NS2*}
apply blast
-txt{*NS3, Server sub-case*}
+txt{*NS3*}
apply (blast dest!: Crypt_Spy_analz_bad A_trusts_NS2
dest: Says_imp_knows_Spy analz.Inj unique_session_keys)
-txt{*NS3, Spy sub-case; also Oops*}
-apply (blast dest: unique_session_keys)+
+txt{*Oops*}
+apply (blast dest: unique_session_keys)
done
--- a/src/HOL/Auth/Recur.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Auth/Recur.thy Wed Aug 26 17:38:18 2009 +0100
@@ -419,15 +419,10 @@
apply spy_analz
txt{*RA2*}
apply blast
-txt{*RA3 remains*}
+txt{*RA3*}
apply (simp add: parts_insert_spies)
-txt{*Now we split into two cases. A single blast could do it, but it would take
- a CPU minute.*}
-apply (safe del: impCE)
-txt{*RA3, case 1: use lemma previously proved by induction*}
-apply (blast elim: rev_notE [OF _ respond_Spy_not_see_session_key])
-txt{*RA3, case 2: K is an old key*}
-apply (blast dest: resp_analz_insert dest: Key_in_parts_respond)
+apply (metis Key_in_parts_respond parts.Body parts.Fst resp_analz_insert
+ respond_Spy_not_see_session_key usedI)
txt{*RA4*}
apply blast
done
--- a/src/HOL/HOL.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/HOL.thy Wed Aug 26 17:38:18 2009 +0100
@@ -29,6 +29,7 @@
"~~/src/Tools/induct.ML"
("~~/src/Tools/induct_tacs.ML")
("Tools/recfun_codegen.ML")
+ "~~/src/Tools/more_conv.ML"
begin
setup {* Intuitionistic.method_setup @{binding iprover} *}
--- a/src/HOL/IsaMakefile Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/IsaMakefile Wed Aug 26 17:38:18 2009 +0100
@@ -108,6 +108,7 @@
$(SRC)/Tools/random_word.ML \
$(SRC)/Tools/value.ML \
$(SRC)/Tools/Code_Generator.thy \
+ $(SRC)/Tools/more_conv.ML \
HOL.thy \
Tools/hologic.ML \
Tools/recfun_codegen.ML \
--- a/src/HOL/Library/normarith.ML Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Library/normarith.ML Wed Aug 26 17:38:18 2009 +0100
@@ -15,7 +15,7 @@
structure NormArith : NORM_ARITH =
struct
- open Conv Thm Conv2;
+ open Conv Thm;
val bool_eq = op = : bool *bool -> bool
fun dest_ratconst t = case term_of t of
Const(@{const_name divide}, _)$a$b => Rat.rat_of_quotient(HOLogic.dest_number a |> snd, HOLogic.dest_number b |> snd)
@@ -322,6 +322,7 @@
val ths = map_filter (fn (v,t) => if v =/ Rat.zero then NONE
else SOME(norm_cmul_rule v t))
(v ~~ nubs)
+ fun end_itlist f xs = split_last xs |> uncurry (fold_rev f)
in inequality_canon_rule ctxt (end_itlist norm_add_rule ths)
end
val ges' = map_filter (try compute_ineq) (fold_rev (append o consider) destfuns []) @
@@ -332,9 +333,9 @@
in RealArith.real_linear_prover translator
(map (fn t => instantiate ([(tv_n, ctyp_of_term t)],[]) pth_zero)
zerodests,
- map (fconv_rule (once_depth_conv (norm_canon_conv) then_conv
+ map (fconv_rule (try_conv (More_Conv.top_sweep_conv (K norm_canon_conv) ctxt) then_conv
arg_conv (arg_conv real_poly_conv))) ges',
- map (fconv_rule (once_depth_conv (norm_canon_conv) then_conv
+ map (fconv_rule (try_conv (More_Conv.top_sweep_conv (K norm_canon_conv) ctxt) then_conv
arg_conv (arg_conv real_poly_conv))) gts)
end
in val real_vector_combo_prover = real_vector_combo_prover
@@ -353,6 +354,7 @@
val ntms = fold_rev find_normedterms (map (dest_arg o concl) (ges @ gts)) []
val lctab = vector_lincombs (map snd (filter (not o fst) ntms))
val (fxns, ctxt') = Variable.variant_fixes (replicate (length lctab) "x") ctxt
+ fun instantiate_cterm' ty tms = Drule.cterm_rule (Drule.instantiate' ty tms)
fun mk_norm t = capply (instantiate_cterm' [SOME (ctyp_of_term t)] [] @{cpat "norm :: (?'a :: real_normed_vector) => real"}) t
fun mk_equals l r = capply (capply (instantiate_cterm' [SOME (ctyp_of_term l)] [] @{cpat "op == :: ?'a =>_"}) l) r
val asl = map2 (fn (t,_) => fn n => assume (mk_equals (mk_norm t) (cterm_of (ProofContext.theory_of ctxt') (Free(n,@{typ real}))))) lctab fxns
--- a/src/HOL/Library/positivstellensatz.ML Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Library/positivstellensatz.ML Wed Aug 26 17:38:18 2009 +0100
@@ -81,82 +81,6 @@
structure Ctermfunc = FuncFun(type key = cterm val ord = (fn (s,t) => TermOrd.fast_term_ord(term_of s, term_of t)));
structure Ratfunc = FuncFun(type key = Rat.rat val ord = Rat.ord);
- (* Some conversions-related stuff which has been forbidden entrance into Pure/conv.ML*)
-structure Conv2 =
-struct
- open Conv
-fun instantiate_cterm' ty tms = Drule.cterm_rule (Drule.instantiate' ty tms)
-fun is_comb t = case (term_of t) of _$_ => true | _ => false;
-fun is_abs t = case (term_of t) of Abs _ => true | _ => false;
-
-fun end_itlist f l =
- case l of
- [] => error "end_itlist"
- | [x] => x
- | (h::t) => f h (end_itlist f t);
-
- fun absc cv ct = case term_of ct of
- Abs (v,_, _) =>
- let val (x,t) = Thm.dest_abs (SOME v) ct
- in Thm.abstract_rule ((fst o dest_Free o term_of) x) x (cv t)
- end
- | _ => all_conv ct;
-
-fun cache_conv conv =
- let
- val tab = ref Termtab.empty
- fun cconv t =
- case Termtab.lookup (!tab) (term_of t) of
- SOME th => th
- | NONE => let val th = conv t
- in ((tab := Termtab.insert Thm.eq_thm (term_of t, th) (!tab)); th) end
- in cconv end;
-fun is_binop ct ct' = ct aconvc (Thm.dest_fun (Thm.dest_fun ct'))
- handle CTERM _ => false;
-
-local
- fun thenqc conv1 conv2 tm =
- case try conv1 tm of
- SOME th1 => (case try conv2 (Thm.rhs_of th1) of SOME th2 => Thm.transitive th1 th2 | NONE => th1)
- | NONE => conv2 tm
-
- fun thencqc conv1 conv2 tm =
- let val th1 = conv1 tm
- in (case try conv2 (Thm.rhs_of th1) of SOME th2 => Thm.transitive th1 th2 | NONE => th1)
- end
- fun comb_qconv conv tm =
- let val (l,r) = Thm.dest_comb tm
- in (case try conv l of
- SOME th1 => (case try conv r of SOME th2 => Thm.combination th1 th2
- | NONE => Drule.fun_cong_rule th1 r)
- | NONE => Drule.arg_cong_rule l (conv r))
- end
- fun repeatqc conv tm = thencqc conv (repeatqc conv) tm
- fun sub_qconv conv tm = if is_abs tm then absc conv tm else comb_qconv conv tm
- fun once_depth_qconv conv tm =
- (conv else_conv (sub_qconv (once_depth_qconv conv))) tm
- fun depth_qconv conv tm =
- thenqc (sub_qconv (depth_qconv conv))
- (repeatqc conv) tm
- fun redepth_qconv conv tm =
- thenqc (sub_qconv (redepth_qconv conv))
- (thencqc conv (redepth_qconv conv)) tm
- fun top_depth_qconv conv tm =
- thenqc (repeatqc conv)
- (thencqc (sub_qconv (top_depth_qconv conv))
- (thencqc conv (top_depth_qconv conv))) tm
- fun top_sweep_qconv conv tm =
- thenqc (repeatqc conv)
- (sub_qconv (top_sweep_qconv conv)) tm
-in
-val (once_depth_conv, depth_conv, rdepth_conv, top_depth_conv, top_sweep_conv) =
- (fn c => try_conv (once_depth_qconv c),
- fn c => try_conv (depth_qconv c),
- fn c => try_conv (redepth_qconv c),
- fn c => try_conv (top_depth_qconv c),
- fn c => try_conv (top_sweep_qconv c));
-end;
-end;
(* Some useful derived rules *)
@@ -373,15 +297,6 @@
fun instantiate_cterm' ty tms = Drule.cterm_rule (Drule.instantiate' ty tms)
fun is_comb t = case (term_of t) of _$_ => true | _ => false;
-fun cache_conv conv =
- let
- val tab = ref Termtab.empty
- fun cconv t =
- case Termtab.lookup (!tab) (term_of t) of
- SOME th => th
- | NONE => let val th = conv t
- in ((tab := Termtab.insert Thm.eq_thm (term_of t, th) (!tab)); th) end
- in cconv end;
fun is_binop ct ct' = ct aconvc (Thm.dest_fun (Thm.dest_fun ct'))
handle CTERM _ => false;
@@ -571,7 +486,7 @@
val nnf_norm_conv' =
nnf_conv then_conv
literals_conv [@{term "op &"}, @{term "op |"}] []
- (cache_conv
+ (More_Conv.cache_conv
(first_conv [real_lt_conv, real_le_conv,
real_eq_conv, real_not_lt_conv,
real_not_le_conv, real_not_eq_conv, all_conv]))
--- a/src/HOL/SET-Protocol/Cardholder_Registration.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/SET-Protocol/Cardholder_Registration.thy Wed Aug 26 17:38:18 2009 +0100
@@ -715,6 +715,7 @@
==> P --> (Nonce N \<in> analz (Key`KK Un H)) = (Nonce N \<in> analz H)"
by (blast intro: analz_mono [THEN [2] rev_subsetD])
+
text{*The @{text "(no_asm)"} attribute is essential, since it retains
the quantifier and allows the simprule's condition to itself be simplified.*}
lemma Nonce_compromise [rule_format (no_asm)]:
@@ -741,12 +742,11 @@
apply blast --{*3*}
apply blast --{*5*}
txt{*Message 6*}
-apply (force del: allE ballE impCE simp add: symKey_compromise)
+apply (metis symKey_compromise)
--{*cardSK compromised*}
txt{*Simplify again--necessary because the previous simplification introduces
- some logical connectives*}
-apply (force del: allE ballE impCE
- simp del: image_insert image_Un imp_disjL
+ some logical connectives*}
+apply (force simp del: image_insert image_Un imp_disjL
simp add: analz_image_keys_simps symKey_compromise)
done
--- a/src/HOL/SET-Protocol/Purchase.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/SET-Protocol/Purchase.thy Wed Aug 26 17:38:18 2009 +0100
@@ -1040,9 +1040,8 @@
apply (frule_tac [9] AuthReq_msg_in_parts_spies) --{*AuthReq*}
apply simp_all
apply blast
-apply (force dest!: signed_Hash_imp_used)
-apply (clarify) --{*speeds next step*}
-apply (blast dest: unique_LID_M)
+apply (metis subsetD insert_subset parts.Fst parts_increasing signed_Hash_imp_used)
+apply (metis unique_LID_M)
apply (blast dest!: Notes_Cardholder_self_False)
done
--- a/src/HOL/SetInterval.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/SetInterval.thy Wed Aug 26 17:38:18 2009 +0100
@@ -186,28 +186,70 @@
seems to take forever (more than one hour). *}
end
-subsubsection{* Emptyness and singletons *}
+subsubsection{* Emptyness, singletons, subset *}
context order
begin
-lemma atLeastAtMost_empty [simp]: "n < m ==> {m..n} = {}";
-by (auto simp add: atLeastAtMost_def atMost_def atLeast_def)
+lemma atLeastatMost_empty[simp]:
+ "b < a \<Longrightarrow> {a..b} = {}"
+by(auto simp: atLeastAtMost_def atLeast_def atMost_def)
+
+lemma atLeastatMost_empty_iff[simp]:
+ "{a..b} = {} \<longleftrightarrow> (~ a <= b)"
+by auto (blast intro: order_trans)
+
+lemma atLeastatMost_empty_iff2[simp]:
+ "{} = {a..b} \<longleftrightarrow> (~ a <= b)"
+by auto (blast intro: order_trans)
+
+lemma atLeastLessThan_empty[simp]:
+ "b <= a \<Longrightarrow> {a..<b} = {}"
+by(auto simp: atLeastLessThan_def)
-lemma atLeastLessThan_empty[simp]: "n \<le> m ==> {m..<n} = {}"
-by (auto simp add: atLeastLessThan_def)
+lemma atLeastLessThan_empty_iff[simp]:
+ "{a..<b} = {} \<longleftrightarrow> (~ a < b)"
+by auto (blast intro: le_less_trans)
+
+lemma atLeastLessThan_empty_iff2[simp]:
+ "{} = {a..<b} \<longleftrightarrow> (~ a < b)"
+by auto (blast intro: le_less_trans)
-lemma greaterThanAtMost_empty[simp]:"l \<le> k ==> {k<..l} = {}"
+lemma greaterThanAtMost_empty[simp]: "l \<le> k ==> {k<..l} = {}"
by(auto simp:greaterThanAtMost_def greaterThan_def atMost_def)
+lemma greaterThanAtMost_empty_iff[simp]: "{k<..l} = {} \<longleftrightarrow> ~ k < l"
+by auto (blast intro: less_le_trans)
+
+lemma greaterThanAtMost_empty_iff2[simp]: "{} = {k<..l} \<longleftrightarrow> ~ k < l"
+by auto (blast intro: less_le_trans)
+
lemma greaterThanLessThan_empty[simp]:"l \<le> k ==> {k<..<l} = {}"
by(auto simp:greaterThanLessThan_def greaterThan_def lessThan_def)
lemma atLeastAtMost_singleton [simp]: "{a..a} = {a}"
by (auto simp add: atLeastAtMost_def atMost_def atLeast_def)
+lemma atLeastatMost_subset_iff[simp]:
+ "{a..b} <= {c..d} \<longleftrightarrow> (~ a <= b) | c <= a & b <= d"
+unfolding atLeastAtMost_def atLeast_def atMost_def
+by (blast intro: order_trans)
+
+lemma atLeastatMost_psubset_iff:
+ "{a..b} < {c..d} \<longleftrightarrow>
+ ((~ a <= b) | c <= a & b <= d & (c < a | b < d)) & c <= d"
+by(simp add: psubset_eq expand_set_eq less_le_not_le)(blast intro: order_trans)
+
end
+lemma (in linorder) atLeastLessThan_subset_iff:
+ "{a..<b} <= {c..<d} \<Longrightarrow> b <= a | c<=a & b<=d"
+apply (auto simp:subset_eq Ball_def)
+apply(frule_tac x=a in spec)
+apply(erule_tac x=d in allE)
+apply (simp add: less_imp_le)
+done
+
subsection {* Intervals of natural numbers *}
subsubsection {* The Constant @{term lessThan} *}
--- a/src/HOL/Tools/Function/fundef_core.ML Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Tools/Function/fundef_core.ML Wed Aug 26 17:38:18 2009 +0100
@@ -577,8 +577,6 @@
val acc_subset_induct = @{thm Orderings.predicate1I} RS @{thm accp_subset_induct}
-fun binder_conv cv ctxt = Conv.arg_conv (Conv.abs_conv (K cv) ctxt);
-
fun mk_partial_induct_rule thy globals R complete_thm clauses =
let
val Globals {domT, x, z, a, P, D, ...} = globals
@@ -614,7 +612,7 @@
val case_hyp_conv = K (case_hyp RS eq_reflection)
local open Conv in
val lhs_D = fconv_rule (arg_conv (arg_conv (case_hyp_conv))) x_D
- val sih = fconv_rule (binder_conv (arg1_conv (arg_conv (arg_conv case_hyp_conv))) ctxt) aihyp
+ val sih = fconv_rule (More_Conv.binder_conv (K (arg1_conv (arg_conv (arg_conv case_hyp_conv)))) ctxt) aihyp
end
fun mk_Prec (RCInfo {llRI, RIvs, CCas, rcarg, ...}) =
--- a/src/HOL/Tools/Qelim/cooper.ML Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/Tools/Qelim/cooper.ML Wed Aug 26 17:38:18 2009 +0100
@@ -246,8 +246,10 @@
RS eq_reflection
end;
-fun is_intrel (b$_$_) = domain_type (fastype_of b) = HOLogic.intT
- | is_intrel (@{term "Not"}$(b$_$_)) = domain_type (fastype_of b) = HOLogic.intT
+fun is_intrel_type T = T = @{typ "int => int => bool"};
+
+fun is_intrel (b$_$_) = is_intrel_type (fastype_of b)
+ | is_intrel (@{term "Not"}$(b$_$_)) = is_intrel_type (fastype_of b)
| is_intrel _ = false;
fun linearize_conv ctxt vs ct = case term_of ct of
--- a/src/HOL/ex/Predicate_Compile_ex.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/ex/Predicate_Compile_ex.thy Wed Aug 26 17:38:18 2009 +0100
@@ -99,4 +99,33 @@
values 20 "{(n, m). tranclp succ n m}"
*)
+subsection{* CCS *}
+
+text{* This example formalizes finite CCS processes without communication or
+recursion. For simplicity, labels are natural numbers. *}
+
+datatype proc = nil | pre nat proc | or proc proc | par proc proc
+
+inductive step :: "proc \<Rightarrow> nat \<Rightarrow> proc \<Rightarrow> bool" where
+"step (pre n p) n p" |
+"step p1 a q \<Longrightarrow> step (or p1 p2) a q" |
+"step p2 a q \<Longrightarrow> step (or p1 p2) a q" |
+"step p1 a q \<Longrightarrow> step (par p1 p2) a (par q p2)" |
+"step p2 a q \<Longrightarrow> step (par p1 p2) a (par p1 q)"
+
+code_pred step .
+
+inductive steps where
+"steps p [] p" |
+"step p a q \<Longrightarrow> steps q as r \<Longrightarrow> steps p (a#as) r"
+
+code_pred steps .
+
+values 5
+ "{as . steps (par (or (pre 0 nil) (pre 1 nil)) (pre 2 nil)) as (par nil nil)}"
+
+(* FIXME
+values 3 "{(a,q). step (par nil nil) a q}"
+*)
+
end
\ No newline at end of file
--- a/src/HOL/ex/Termination.thy Wed Aug 26 17:38:02 2009 +0100
+++ b/src/HOL/ex/Termination.thy Wed Aug 26 17:38:18 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/ex/Termination.thy
- ID: $Id$
Author: Lukas Bulwahn, TU Muenchen
Author: Alexander Krauss, TU Muenchen
*)
@@ -10,12 +9,33 @@
imports Main Multiset
begin
+subsection {* Manually giving termination relations using @{text relation} and
+@{term measure} *}
+
+function sum :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+where
+ "sum i N = (if i > N then 0 else i + sum (Suc i) N)"
+by pat_completeness auto
+
+termination by (relation "measure (\<lambda>(i,N). N + 1 - i)") auto
+
+function foo :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+where
+ "foo i N = (if i > N
+ then (if N = 0 then 0 else foo 0 (N - 1))
+ else i + foo (Suc i) N)"
+by pat_completeness auto
+
+termination by (relation "measures [\<lambda>(i, N). N, \<lambda>(i,N). N + 1 - i]") auto
+
+
+subsection {* @{text lexicographic_order}: Trivial examples *}
+
text {*
- The @{text fun} command uses the method @{text lexicographic_order} by default.
+ The @{text fun} command uses the method @{text lexicographic_order} by default,
+ so it is not explicitly invoked.
*}
-subsection {* Trivial examples *}
-
fun identity :: "nat \<Rightarrow> nat"
where
"identity n = n"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/more_conv.ML Wed Aug 26 17:38:18 2009 +0100
@@ -0,0 +1,61 @@
+(* Title: Tools/more_conv.ML
+ Author: Sascha Boehme
+
+Further conversions and conversionals.
+*)
+
+signature MORE_CONV =
+sig
+ val rewrs_conv: thm list -> conv
+
+ val sub_conv: (Proof.context -> conv) -> Proof.context -> conv
+ val bottom_conv: (Proof.context -> conv) -> Proof.context -> conv
+ val top_conv: (Proof.context -> conv) -> Proof.context -> conv
+ val top_sweep_conv: (Proof.context -> conv) -> Proof.context -> conv
+
+ val binder_conv: (Proof.context -> conv) -> Proof.context -> conv
+
+ val cache_conv: conv -> conv
+end
+
+
+
+structure More_Conv : MORE_CONV =
+struct
+
+
+fun rewrs_conv eqs = Conv.first_conv (map Conv.rewr_conv eqs)
+
+
+fun sub_conv conv ctxt =
+ Conv.comb_conv (conv ctxt) else_conv
+ Conv.abs_conv (fn (_, cx) => conv cx) ctxt else_conv
+ Conv.all_conv
+
+fun bottom_conv conv ctxt ct =
+ (sub_conv (bottom_conv conv) ctxt then_conv conv ctxt) ct
+
+fun top_conv conv ctxt ct =
+ (conv ctxt then_conv sub_conv (top_conv conv) ctxt) ct
+
+fun top_sweep_conv conv ctxt ct =
+ (conv ctxt else_conv sub_conv (top_sweep_conv conv) ctxt) ct
+
+
+fun binder_conv cv ctxt =
+ Conv.arg_conv (Conv.abs_conv (fn (_, cx) => cv cx) ctxt)
+
+
+fun cache_conv conv =
+ let
+ val tab = ref Termtab.empty
+ fun add_result t thm =
+ let val _ = change tab (Termtab.insert Thm.eq_thm (t, thm))
+ in thm end
+ fun cconv ct =
+ (case Termtab.lookup (!tab) (Thm.term_of ct) of
+ SOME thm => thm
+ | NONE => add_result (Thm.term_of ct) (conv ct))
+ in cconv end
+
+end