# HG changeset patch # User blanchet # Date 1268917160 -3600 # Node ID e31ec41a551b9e401b67d003e8af19555b287344 # Parent b766aad9136d58c0e6abe018ed9c04861d63b56e# Parent d4c4f88f643289c1e24fa69491b030a9c91c4922 merged diff -r b766aad9136d -r e31ec41a551b src/HOL/ATP_Linkup.thy --- a/src/HOL/ATP_Linkup.thy Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,127 +0,0 @@ -(* Title: HOL/ATP_Linkup.thy - Author: Lawrence C Paulson - Author: Jia Meng, NICTA - Author: Fabian Immler, TUM -*) - -header {* The Isabelle-ATP Linkup *} - -theory ATP_Linkup -imports Plain Hilbert_Choice -uses - "Tools/polyhash.ML" - "Tools/res_clause.ML" - ("Tools/res_axioms.ML") - ("Tools/res_hol_clause.ML") - ("Tools/res_reconstruct.ML") - ("Tools/res_atp.ML") - ("Tools/ATP_Manager/atp_manager.ML") - ("Tools/ATP_Manager/atp_wrapper.ML") - ("Tools/ATP_Manager/atp_minimal.ML") - "~~/src/Tools/Metis/metis.ML" - ("Tools/metis_tools.ML") -begin - -definition COMBI :: "'a => 'a" - where "COMBI P == P" - -definition COMBK :: "'a => 'b => 'a" - where "COMBK P Q == P" - -definition COMBB :: "('b => 'c) => ('a => 'b) => 'a => 'c" - where "COMBB P Q R == P (Q R)" - -definition COMBC :: "('a => 'b => 'c) => 'b => 'a => 'c" - where "COMBC P Q R == P R Q" - -definition COMBS :: "('a => 'b => 'c) => ('a => 'b) => 'a => 'c" - where "COMBS P Q R == P R (Q R)" - -definition fequal :: "'a => 'a => bool" - where "fequal X Y == (X=Y)" - -lemma fequal_imp_equal: "fequal X Y ==> X=Y" - by (simp add: fequal_def) - -lemma equal_imp_fequal: "X=Y ==> fequal X Y" - by (simp add: fequal_def) - -text{*These two represent the equivalence between Boolean equality and iff. -They can't be converted to clauses automatically, as the iff would be -expanded...*} - -lemma iff_positive: "P | Q | P=Q" -by blast - -lemma iff_negative: "~P | ~Q | P=Q" -by blast - -text{*Theorems for translation to combinators*} - -lemma abs_S: "(%x. (f x) (g x)) == COMBS f g" -apply (rule eq_reflection) -apply (rule ext) -apply (simp add: COMBS_def) -done - -lemma abs_I: "(%x. x) == COMBI" -apply (rule eq_reflection) -apply (rule ext) -apply (simp add: COMBI_def) -done - -lemma abs_K: "(%x. y) == COMBK y" -apply (rule eq_reflection) -apply (rule ext) -apply (simp add: COMBK_def) -done - -lemma abs_B: "(%x. a (g x)) == COMBB a g" -apply (rule eq_reflection) -apply (rule ext) -apply (simp add: COMBB_def) -done - -lemma abs_C: "(%x. (f x) b) == COMBC f b" -apply (rule eq_reflection) -apply (rule ext) -apply (simp add: COMBC_def) -done - - -subsection {* Setup of external ATPs *} - -use "Tools/res_axioms.ML" setup Res_Axioms.setup -use "Tools/res_hol_clause.ML" -use "Tools/res_reconstruct.ML" setup Res_Reconstruct.setup -use "Tools/res_atp.ML" - -use "Tools/ATP_Manager/atp_wrapper.ML" setup ATP_Wrapper.setup -use "Tools/ATP_Manager/atp_manager.ML" -use "Tools/ATP_Manager/atp_minimal.ML" - -text {* basic provers *} -setup {* ATP_Manager.add_prover ATP_Wrapper.spass *} -setup {* ATP_Manager.add_prover ATP_Wrapper.vampire *} -setup {* ATP_Manager.add_prover ATP_Wrapper.eprover *} - -text {* provers with stuctured output *} -setup {* ATP_Manager.add_prover ATP_Wrapper.vampire_full *} -setup {* ATP_Manager.add_prover ATP_Wrapper.eprover_full *} - -text {* on some problems better results *} -setup {* ATP_Manager.add_prover ATP_Wrapper.spass_no_tc *} - -text {* remote provers via SystemOnTPTP *} -setup {* ATP_Manager.add_prover ATP_Wrapper.remote_vampire *} -setup {* ATP_Manager.add_prover ATP_Wrapper.remote_spass *} -setup {* ATP_Manager.add_prover ATP_Wrapper.remote_eprover *} - - - -subsection {* The Metis prover *} - -use "Tools/metis_tools.ML" -setup MetisTools.setup - -end diff -r b766aad9136d -r e31ec41a551b src/HOL/Big_Operators.thy --- a/src/HOL/Big_Operators.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Big_Operators.thy Thu Mar 18 13:59:20 2010 +0100 @@ -1577,12 +1577,12 @@ from assms show ?thesis by (simp add: Max_def fold1_belowI) qed -lemma Min_ge_iff [simp, noatp]: +lemma Min_ge_iff [simp, no_atp]: assumes "finite A" and "A \ {}" shows "x \ Min A \ (\a\A. x \ a)" using assms by (simp add: Min_def min_max.below_fold1_iff) -lemma Max_le_iff [simp, noatp]: +lemma Max_le_iff [simp, no_atp]: assumes "finite A" and "A \ {}" shows "Max A \ x \ (\a\A. a \ x)" proof - @@ -1591,12 +1591,12 @@ from assms show ?thesis by (simp add: Max_def below_fold1_iff) qed -lemma Min_gr_iff [simp, noatp]: +lemma Min_gr_iff [simp, no_atp]: assumes "finite A" and "A \ {}" shows "x < Min A \ (\a\A. x < a)" using assms by (simp add: Min_def strict_below_fold1_iff) -lemma Max_less_iff [simp, noatp]: +lemma Max_less_iff [simp, no_atp]: assumes "finite A" and "A \ {}" shows "Max A < x \ (\a\A. a < x)" proof - @@ -1606,12 +1606,12 @@ by (simp add: Max_def dual.strict_below_fold1_iff [folded dual.dual_max]) qed -lemma Min_le_iff [noatp]: +lemma Min_le_iff [no_atp]: assumes "finite A" and "A \ {}" shows "Min A \ x \ (\a\A. a \ x)" using assms by (simp add: Min_def fold1_below_iff) -lemma Max_ge_iff [noatp]: +lemma Max_ge_iff [no_atp]: assumes "finite A" and "A \ {}" shows "x \ Max A \ (\a\A. x \ a)" proof - @@ -1621,12 +1621,12 @@ by (simp add: Max_def dual.fold1_below_iff [folded dual.dual_max]) qed -lemma Min_less_iff [noatp]: +lemma Min_less_iff [no_atp]: assumes "finite A" and "A \ {}" shows "Min A < x \ (\a\A. a < x)" using assms by (simp add: Min_def fold1_strict_below_iff) -lemma Max_gr_iff [noatp]: +lemma Max_gr_iff [no_atp]: assumes "finite A" and "A \ {}" shows "x < Max A \ (\a\A. x < a)" proof - diff -r b766aad9136d -r e31ec41a551b src/HOL/Complete_Lattice.thy --- a/src/HOL/Complete_Lattice.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Complete_Lattice.thy Thu Mar 18 13:59:20 2010 +0100 @@ -231,7 +231,7 @@ by (simp add: Sup_fun_def Sup_bool_def) (simp add: mem_def) qed -lemma Union_iff [simp, noatp]: +lemma Union_iff [simp, no_atp]: "A \ \C \ (\X\C. A\X)" by (unfold Union_eq) blast @@ -269,10 +269,10 @@ lemma Union_Int_subset: "\(A \ B) \ \A \ \B" by blast -lemma Union_empty_conv [simp,noatp]: "(\A = {}) = (\x\A. x = {})" +lemma Union_empty_conv [simp,no_atp]: "(\A = {}) = (\x\A. x = {})" by blast -lemma empty_Union_conv [simp,noatp]: "({} = \A) = (\x\A. x = {})" +lemma empty_Union_conv [simp,no_atp]: "({} = \A) = (\x\A. x = {})" by blast lemma Union_disjoint: "(\C \ A = {}) = (\B\C. B \ A = {})" @@ -332,7 +332,7 @@ "\S = (\x\S. x)" by (simp add: UNION_eq_Union_image image_def) -lemma UNION_def [noatp]: +lemma UNION_def [no_atp]: "(\x\A. B x) = {y. \x\A. y \ B x}" by (auto simp add: UNION_eq_Union_image Union_eq) @@ -368,13 +368,13 @@ lemma UN_least: "(!!x. x \ A ==> B x \ C) ==> (\x\A. B x) \ C" by (iprover intro: subsetI elim: UN_E dest: subsetD) -lemma Collect_bex_eq [noatp]: "{x. \y\A. P x y} = (\y\A. {x. P x y})" +lemma Collect_bex_eq [no_atp]: "{x. \y\A. P x y} = (\y\A. {x. P x y})" by blast lemma UN_insert_distrib: "u \ A ==> (\x\A. insert a (B x)) = insert a (\x\A. B x)" by blast -lemma UN_empty [simp,noatp]: "(\x\{}. B x) = {}" +lemma UN_empty [simp,no_atp]: "(\x\{}. B x) = {}" by blast lemma UN_empty2 [simp]: "(\x\A. {}) = {}" @@ -412,7 +412,7 @@ "((UN x:A. B x) = {}) = (\x\A. B x = {})" by blast+ -lemma Collect_ex_eq [noatp]: "{x. \y. P x y} = (\y. {x. P x y})" +lemma Collect_ex_eq [no_atp]: "{x. \y. P x y} = (\y. {x. P x y})" by blast lemma ball_UN: "(\z \ UNION A B. P z) = (\x\A. \z \ B x. P z)" @@ -467,7 +467,7 @@ by (simp add: Inf_fun_def Inf_bool_def) (simp add: mem_def) qed -lemma Inter_iff [simp,noatp]: "(A : Inter C) = (ALL X:C. A:X)" +lemma Inter_iff [simp,no_atp]: "(A : Inter C) = (ALL X:C. A:X)" by (unfold Inter_eq) blast lemma InterI [intro!]: "(!!X. X:C ==> A:X) ==> A : Inter C" @@ -515,7 +515,7 @@ lemma Inter_Un_distrib: "\(A \ B) = \A \ \B" by blast -lemma Inter_UNIV_conv [simp,noatp]: +lemma Inter_UNIV_conv [simp,no_atp]: "(\A = UNIV) = (\x\A. x = UNIV)" "(UNIV = \A) = (\x\A. x = UNIV)" by blast+ @@ -724,7 +724,7 @@ "!!A B f. (INT x:f`A. B x) = (INT a:A. B (f a))" by auto -lemma ball_simps [simp,noatp]: +lemma ball_simps [simp,no_atp]: "!!A P Q. (ALL x:A. P x | Q) = ((ALL x:A. P x) | Q)" "!!A P Q. (ALL x:A. P | Q x) = (P | (ALL x:A. Q x))" "!!A P Q. (ALL x:A. P --> Q x) = (P --> (ALL x:A. Q x))" @@ -739,7 +739,7 @@ "!!A P. (~(ALL x:A. P x)) = (EX x:A. ~P x)" by auto -lemma bex_simps [simp,noatp]: +lemma bex_simps [simp,no_atp]: "!!A P Q. (EX x:A. P x & Q) = ((EX x:A. P x) & Q)" "!!A P Q. (EX x:A. P & Q x) = (P & (EX x:A. Q x))" "!!P. (EX x:{}. P x) = False" diff -r b766aad9136d -r e31ec41a551b src/HOL/Decision_Procs/Dense_Linear_Order.thy --- a/src/HOL/Decision_Procs/Dense_Linear_Order.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Decision_Procs/Dense_Linear_Order.thy Thu Mar 18 13:59:20 2010 +0100 @@ -19,9 +19,9 @@ context linorder begin -lemma less_not_permute[noatp]: "\ (x < y \ y < x)" by (simp add: not_less linear) +lemma less_not_permute[no_atp]: "\ (x < y \ y < x)" by (simp add: not_less linear) -lemma gather_simps[noatp]: +lemma gather_simps[no_atp]: shows "(\x. (\y \ L. y < x) \ (\y \ U. x < y) \ x < u \ P x) \ (\x. (\y \ L. y < x) \ (\y \ (insert u U). x < y) \ P x)" and "(\x. (\y \ L. y < x) \ (\y \ U. x < y) \ l < x \ P x) \ (\x. (\y \ (insert l L). y < x) \ (\y \ U. x < y) \ P x)" @@ -29,61 +29,61 @@ and "(\x. (\y \ L. y < x) \ (\y \ U. x < y) \ l < x) \ (\x. (\y \ (insert l L). y < x) \ (\y \ U. x < y))" by auto lemma - gather_start[noatp]: "(\x. P x) \ (\x. (\y \ {}. y < x) \ (\y\ {}. x < y) \ P x)" + gather_start[no_atp]: "(\x. P x) \ (\x. (\y \ {}. y < x) \ (\y\ {}. x < y) \ P x)" by simp text{* Theorems for @{text "\z. \x. x < z \ (P x \ P\<^bsub>-\\<^esub>)"}*} -lemma minf_lt[noatp]: "\z . \x. x < z \ (x < t \ True)" by auto -lemma minf_gt[noatp]: "\z . \x. x < z \ (t < x \ False)" +lemma minf_lt[no_atp]: "\z . \x. x < z \ (x < t \ True)" by auto +lemma minf_gt[no_atp]: "\z . \x. x < z \ (t < x \ False)" by (simp add: not_less) (rule exI[where x="t"], auto simp add: less_le) -lemma minf_le[noatp]: "\z. \x. x < z \ (x \ t \ True)" by (auto simp add: less_le) -lemma minf_ge[noatp]: "\z. \x. x < z \ (t \ x \ False)" +lemma minf_le[no_atp]: "\z. \x. x < z \ (x \ t \ True)" by (auto simp add: less_le) +lemma minf_ge[no_atp]: "\z. \x. x < z \ (t \ x \ False)" by (auto simp add: less_le not_less not_le) -lemma minf_eq[noatp]: "\z. \x. x < z \ (x = t \ False)" by auto -lemma minf_neq[noatp]: "\z. \x. x < z \ (x \ t \ True)" by auto -lemma minf_P[noatp]: "\z. \x. x < z \ (P \ P)" by blast +lemma minf_eq[no_atp]: "\z. \x. x < z \ (x = t \ False)" by auto +lemma minf_neq[no_atp]: "\z. \x. x < z \ (x \ t \ True)" by auto +lemma minf_P[no_atp]: "\z. \x. x < z \ (P \ P)" by blast text{* Theorems for @{text "\z. \x. x < z \ (P x \ P\<^bsub>+\\<^esub>)"}*} -lemma pinf_gt[noatp]: "\z . \x. z < x \ (t < x \ True)" by auto -lemma pinf_lt[noatp]: "\z . \x. z < x \ (x < t \ False)" +lemma pinf_gt[no_atp]: "\z . \x. z < x \ (t < x \ True)" by auto +lemma pinf_lt[no_atp]: "\z . \x. z < x \ (x < t \ False)" by (simp add: not_less) (rule exI[where x="t"], auto simp add: less_le) -lemma pinf_ge[noatp]: "\z. \x. z < x \ (t \ x \ True)" by (auto simp add: less_le) -lemma pinf_le[noatp]: "\z. \x. z < x \ (x \ t \ False)" +lemma pinf_ge[no_atp]: "\z. \x. z < x \ (t \ x \ True)" by (auto simp add: less_le) +lemma pinf_le[no_atp]: "\z. \x. z < x \ (x \ t \ False)" by (auto simp add: less_le not_less not_le) -lemma pinf_eq[noatp]: "\z. \x. z < x \ (x = t \ False)" by auto -lemma pinf_neq[noatp]: "\z. \x. z < x \ (x \ t \ True)" by auto -lemma pinf_P[noatp]: "\z. \x. z < x \ (P \ P)" by blast +lemma pinf_eq[no_atp]: "\z. \x. z < x \ (x = t \ False)" by auto +lemma pinf_neq[no_atp]: "\z. \x. z < x \ (x \ t \ True)" by auto +lemma pinf_P[no_atp]: "\z. \x. z < x \ (P \ P)" by blast -lemma nmi_lt[noatp]: "t \ U \ \x. \True \ x < t \ (\ u\ U. u \ x)" by auto -lemma nmi_gt[noatp]: "t \ U \ \x. \False \ t < x \ (\ u\ U. u \ x)" +lemma nmi_lt[no_atp]: "t \ U \ \x. \True \ x < t \ (\ u\ U. u \ x)" by auto +lemma nmi_gt[no_atp]: "t \ U \ \x. \False \ t < x \ (\ u\ U. u \ x)" by (auto simp add: le_less) -lemma nmi_le[noatp]: "t \ U \ \x. \True \ x\ t \ (\ u\ U. u \ x)" by auto -lemma nmi_ge[noatp]: "t \ U \ \x. \False \ t\ x \ (\ u\ U. u \ x)" by auto -lemma nmi_eq[noatp]: "t \ U \ \x. \False \ x = t \ (\ u\ U. u \ x)" by auto -lemma nmi_neq[noatp]: "t \ U \\x. \True \ x \ t \ (\ u\ U. u \ x)" by auto -lemma nmi_P[noatp]: "\ x. ~P \ P \ (\ u\ U. u \ x)" by auto -lemma nmi_conj[noatp]: "\\x. \P1' \ P1 x \ (\ u\ U. u \ x) ; +lemma nmi_le[no_atp]: "t \ U \ \x. \True \ x\ t \ (\ u\ U. u \ x)" by auto +lemma nmi_ge[no_atp]: "t \ U \ \x. \False \ t\ x \ (\ u\ U. u \ x)" by auto +lemma nmi_eq[no_atp]: "t \ U \ \x. \False \ x = t \ (\ u\ U. u \ x)" by auto +lemma nmi_neq[no_atp]: "t \ U \\x. \True \ x \ t \ (\ u\ U. u \ x)" by auto +lemma nmi_P[no_atp]: "\ x. ~P \ P \ (\ u\ U. u \ x)" by auto +lemma nmi_conj[no_atp]: "\\x. \P1' \ P1 x \ (\ u\ U. u \ x) ; \x. \P2' \ P2 x \ (\ u\ U. u \ x)\ \ \x. \(P1' \ P2') \ (P1 x \ P2 x) \ (\ u\ U. u \ x)" by auto -lemma nmi_disj[noatp]: "\\x. \P1' \ P1 x \ (\ u\ U. u \ x) ; +lemma nmi_disj[no_atp]: "\\x. \P1' \ P1 x \ (\ u\ U. u \ x) ; \x. \P2' \ P2 x \ (\ u\ U. u \ x)\ \ \x. \(P1' \ P2') \ (P1 x \ P2 x) \ (\ u\ U. u \ x)" by auto -lemma npi_lt[noatp]: "t \ U \ \x. \False \ x < t \ (\ u\ U. x \ u)" by (auto simp add: le_less) -lemma npi_gt[noatp]: "t \ U \ \x. \True \ t < x \ (\ u\ U. x \ u)" by auto -lemma npi_le[noatp]: "t \ U \ \x. \False \ x \ t \ (\ u\ U. x \ u)" by auto -lemma npi_ge[noatp]: "t \ U \ \x. \True \ t \ x \ (\ u\ U. x \ u)" by auto -lemma npi_eq[noatp]: "t \ U \ \x. \False \ x = t \ (\ u\ U. x \ u)" by auto -lemma npi_neq[noatp]: "t \ U \ \x. \True \ x \ t \ (\ u\ U. x \ u )" by auto -lemma npi_P[noatp]: "\ x. ~P \ P \ (\ u\ U. x \ u)" by auto -lemma npi_conj[noatp]: "\\x. \P1' \ P1 x \ (\ u\ U. x \ u) ; \x. \P2' \ P2 x \ (\ u\ U. x \ u)\ +lemma npi_lt[no_atp]: "t \ U \ \x. \False \ x < t \ (\ u\ U. x \ u)" by (auto simp add: le_less) +lemma npi_gt[no_atp]: "t \ U \ \x. \True \ t < x \ (\ u\ U. x \ u)" by auto +lemma npi_le[no_atp]: "t \ U \ \x. \False \ x \ t \ (\ u\ U. x \ u)" by auto +lemma npi_ge[no_atp]: "t \ U \ \x. \True \ t \ x \ (\ u\ U. x \ u)" by auto +lemma npi_eq[no_atp]: "t \ U \ \x. \False \ x = t \ (\ u\ U. x \ u)" by auto +lemma npi_neq[no_atp]: "t \ U \ \x. \True \ x \ t \ (\ u\ U. x \ u )" by auto +lemma npi_P[no_atp]: "\ x. ~P \ P \ (\ u\ U. x \ u)" by auto +lemma npi_conj[no_atp]: "\\x. \P1' \ P1 x \ (\ u\ U. x \ u) ; \x. \P2' \ P2 x \ (\ u\ U. x \ u)\ \ \x. \(P1' \ P2') \ (P1 x \ P2 x) \ (\ u\ U. x \ u)" by auto -lemma npi_disj[noatp]: "\\x. \P1' \ P1 x \ (\ u\ U. x \ u) ; \x. \P2' \ P2 x \ (\ u\ U. x \ u)\ +lemma npi_disj[no_atp]: "\\x. \P1' \ P1 x \ (\ u\ U. x \ u) ; \x. \P2' \ P2 x \ (\ u\ U. x \ u)\ \ \x. \(P1' \ P2') \ (P1 x \ P2 x) \ (\ u\ U. x \ u)" by auto -lemma lin_dense_lt[noatp]: "t \ U \ \x l u. (\ t. l < t \ t < u \ t \ U) \ l< x \ x < u \ x < t \ (\ y. l < y \ y < u \ y < t)" +lemma lin_dense_lt[no_atp]: "t \ U \ \x l u. (\ t. l < t \ t < u \ t \ U) \ l< x \ x < u \ x < t \ (\ y. l < y \ y < u \ y < t)" proof(clarsimp) fix x l u y assume tU: "t \ U" and noU: "\t. l < t \ t < u \ t \ U" and lx: "l < x" and xu: "x U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l < x \ x < u \ t < x \ (\ y. l < y \ y < u \ t < y)" +lemma lin_dense_gt[no_atp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l < x \ x < u \ t < x \ (\ y. l < y \ y < u \ t < y)" proof(clarsimp) fix x l u y assume tU: "t \ U" and noU: "\t. l < t \ t < u \ t \ U" and lx: "l < x" and xu: "x U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ x \ t \ (\ y. l < y \ y < u \ y\ t)" +lemma lin_dense_le[no_atp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ x \ t \ (\ y. l < y \ y < u \ y\ t)" proof(clarsimp) fix x l u y assume tU: "t \ U" and noU: "\t. l < t \ t < u \ t \ U" and lx: "l < x" and xu: "x t < y" by auto thus "y \ t" by (simp add: not_less) qed -lemma lin_dense_ge[noatp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ t \ x \ (\ y. l < y \ y < u \ t \ y)" +lemma lin_dense_ge[no_atp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ t \ x \ (\ y. l < y \ y < u \ t \ y)" proof(clarsimp) fix x l u y assume tU: "t \ U" and noU: "\t. l < t \ t < u \ t \ U" and lx: "l < x" and xu: "x y y" by (simp add: not_less) qed -lemma lin_dense_eq[noatp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ x = t \ (\ y. l < y \ y < u \ y= t)" by auto -lemma lin_dense_neq[noatp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ x \ t \ (\ y. l < y \ y < u \ y\ t)" by auto -lemma lin_dense_P[noatp]: "\x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ P \ (\ y. l < y \ y < u \ P)" by auto +lemma lin_dense_eq[no_atp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ x = t \ (\ y. l < y \ y < u \ y= t)" by auto +lemma lin_dense_neq[no_atp]: "t \ U \ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ x \ t \ (\ y. l < y \ y < u \ y\ t)" by auto +lemma lin_dense_P[no_atp]: "\x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ P \ (\ y. l < y \ y < u \ P)" by auto -lemma lin_dense_conj[noatp]: +lemma lin_dense_conj[no_atp]: "\\x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ P1 x \ (\ y. l < y \ y < u \ P1 y) ; \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ P2 x @@ -146,7 +146,7 @@ \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ (P1 x \ P2 x) \ (\ y. l < y \ y < u \ (P1 y \ P2 y))" by blast -lemma lin_dense_disj[noatp]: +lemma lin_dense_disj[no_atp]: "\\x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ P1 x \ (\ y. l < y \ y < u \ P1 y) ; \x l u. (\ t. l < t \ t< u \ t \ U) \ l< x \ x < u \ P2 x @@ -155,11 +155,11 @@ \ (\ y. l < y \ y < u \ (P1 y \ P2 y))" by blast -lemma npmibnd[noatp]: "\\x. \ MP \ P x \ (\ u\ U. u \ x); \x. \PP \ P x \ (\ u\ U. x \ u)\ +lemma npmibnd[no_atp]: "\\x. \ MP \ P x \ (\ u\ U. u \ x); \x. \PP \ P x \ (\ u\ U. x \ u)\ \ \x. \ MP \ \PP \ P x \ (\ u\ U. \ u' \ U. u \ x \ x \ u')" by auto -lemma finite_set_intervals[noatp]: +lemma finite_set_intervals[no_atp]: assumes px: "P x" and lx: "l \ x" and xu: "x \ u" and linS: "l\ S" and uinS: "u \ S" and fS:"finite S" and lS: "\ x\ S. l \ x" and Su: "\ x\ S. x \ u" shows "\ a \ S. \ b \ S. (\ y. a < y \ y < b \ y \ S) \ a \ x \ x \ b \ P x" @@ -191,7 +191,7 @@ from ainS binS noy ax xb px show ?thesis by blast qed -lemma finite_set_intervals2[noatp]: +lemma finite_set_intervals2[no_atp]: assumes px: "P x" and lx: "l \ x" and xu: "x \ u" and linS: "l\ S" and uinS: "u \ S" and fS:"finite S" and lS: "\ x\ S. l \ x" and Su: "\ x\ S. x \ u" shows "(\ s\ S. P s) \ (\ a \ S. \ b \ S. (\ y. a < y \ y < b \ y \ S) \ a < x \ x < b \ P x)" @@ -215,7 +215,7 @@ "{y. x < y \ y < z} = {} \ \ x < z" by (auto dest: dense) -lemma dlo_qe_bnds[noatp]: +lemma dlo_qe_bnds[no_atp]: assumes ne: "L \ {}" and neU: "U \ {}" and fL: "finite L" and fU: "finite U" shows "(\x. (\y \ L. y < x) \ (\y \ U. x < y)) \ (\ l \ L. \u \ U. l < u)" proof (simp only: atomize_eq, rule iffI) @@ -239,7 +239,7 @@ ultimately show "\x. (\y\L. y < x) \ (\y\U. x < y)" by auto qed -lemma dlo_qe_noub[noatp]: +lemma dlo_qe_noub[no_atp]: assumes ne: "L \ {}" and fL: "finite L" shows "(\x. (\y \ L. y < x) \ (\y \ {}. x < y)) \ True" proof(simp add: atomize_eq) @@ -249,7 +249,7 @@ thus "\x. \y\L. y < x" by blast qed -lemma dlo_qe_nolb[noatp]: +lemma dlo_qe_nolb[no_atp]: assumes ne: "U \ {}" and fU: "finite U" shows "(\x. (\y \ {}. y < x) \ (\y \ U. x < y)) \ True" proof(simp add: atomize_eq) @@ -259,14 +259,14 @@ thus "\x. \y\U. x < y" by blast qed -lemma exists_neq[noatp]: "\(x::'a). x \ t" "\(x::'a). t \ x" +lemma exists_neq[no_atp]: "\(x::'a). x \ t" "\(x::'a). t \ x" using gt_ex[of t] by auto -lemmas dlo_simps[noatp] = order_refl less_irrefl not_less not_le exists_neq +lemmas dlo_simps[no_atp] = order_refl less_irrefl not_less not_le exists_neq le_less neq_iff linear less_not_permute -lemma axiom[noatp]: "dense_linorder (op \) (op <)" by (rule dense_linorder_axioms) -lemma atoms[noatp]: +lemma axiom[no_atp]: "dense_linorder (op \) (op <)" by (rule dense_linorder_axioms) +lemma atoms[no_atp]: shows "TERM (less :: 'a \ _)" and "TERM (less_eq :: 'a \ _)" and "TERM (op = :: 'a \ _)" . @@ -277,21 +277,21 @@ end (* FIXME: Move to HOL -- together with the conj_aci_rule in langford.ML *) -lemma dnf[noatp]: +lemma dnf[no_atp]: "(P & (Q | R)) = ((P&Q) | (P&R))" "((Q | R) & P) = ((Q&P) | (R&P))" by blast+ -lemmas weak_dnf_simps[noatp] = simp_thms dnf +lemmas weak_dnf_simps[no_atp] = simp_thms dnf -lemma nnf_simps[noatp]: +lemma nnf_simps[no_atp]: "(\(P \ Q)) = (\P \ \Q)" "(\(P \ Q)) = (\P \ \Q)" "(P \ Q) = (\P \ Q)" "(P = Q) = ((P \ Q) \ (\P \ \ Q))" "(\ \(P)) = P" by blast+ -lemma ex_distrib[noatp]: "(\x. P x \ Q x) \ ((\x. P x) \ (\x. Q x))" by blast +lemma ex_distrib[no_atp]: "(\x. P x \ Q x) \ ((\x. P x) \ (\x. Q x))" by blast -lemmas dnf_simps[noatp] = weak_dnf_simps nnf_simps ex_distrib +lemmas dnf_simps[no_atp] = weak_dnf_simps nnf_simps ex_distrib use "~~/src/HOL/Tools/Qelim/langford.ML" method_setup dlo = {* @@ -316,10 +316,10 @@ locale linorder_no_ub = linorder_stupid_syntax + assumes gt_ex: "\y. less x y" begin -lemma ge_ex[noatp]: "\y. x \ y" using gt_ex by auto +lemma ge_ex[no_atp]: "\y. x \ y" using gt_ex by auto text {* Theorems for @{text "\z. \x. z \ x \ (P x \ P\<^bsub>+\\<^esub>)"} *} -lemma pinf_conj[noatp]: +lemma pinf_conj[no_atp]: assumes ex1: "\z1. \x. z1 \ x \ (P1 x \ P1')" and ex2: "\z2. \x. z2 \ x \ (P2 x \ P2')" shows "\z. \x. z \ x \ ((P1 x \ P2 x) \ (P1' \ P2'))" @@ -335,7 +335,7 @@ thus ?thesis by blast qed -lemma pinf_disj[noatp]: +lemma pinf_disj[no_atp]: assumes ex1: "\z1. \x. z1 \ x \ (P1 x \ P1')" and ex2: "\z2. \x. z2 \ x \ (P2 x \ P2')" shows "\z. \x. z \ x \ ((P1 x \ P2 x) \ (P1' \ P2'))" @@ -351,7 +351,7 @@ thus ?thesis by blast qed -lemma pinf_ex[noatp]: assumes ex:"\z. \x. z \ x \ (P x \ P1)" and p1: P1 shows "\ x. P x" +lemma pinf_ex[no_atp]: assumes ex:"\z. \x. z \ x \ (P x \ P1)" and p1: P1 shows "\ x. P x" proof- from ex obtain z where z: "\x. z \ x \ (P x \ P1)" by blast from gt_ex obtain x where x: "z \ x" by blast @@ -365,11 +365,11 @@ locale linorder_no_lb = linorder_stupid_syntax + assumes lt_ex: "\y. less y x" begin -lemma le_ex[noatp]: "\y. y \ x" using lt_ex by auto +lemma le_ex[no_atp]: "\y. y \ x" using lt_ex by auto text {* Theorems for @{text "\z. \x. x \ z \ (P x \ P\<^bsub>-\\<^esub>)"} *} -lemma minf_conj[noatp]: +lemma minf_conj[no_atp]: assumes ex1: "\z1. \x. x \ z1 \ (P1 x \ P1')" and ex2: "\z2. \x. x \ z2 \ (P2 x \ P2')" shows "\z. \x. x \ z \ ((P1 x \ P2 x) \ (P1' \ P2'))" @@ -384,7 +384,7 @@ thus ?thesis by blast qed -lemma minf_disj[noatp]: +lemma minf_disj[no_atp]: assumes ex1: "\z1. \x. x \ z1 \ (P1 x \ P1')" and ex2: "\z2. \x. x \ z2 \ (P2 x \ P2')" shows "\z. \x. x \ z \ ((P1 x \ P2 x) \ (P1' \ P2'))" @@ -399,7 +399,7 @@ thus ?thesis by blast qed -lemma minf_ex[noatp]: assumes ex:"\z. \x. x \ z \ (P x \ P1)" and p1: P1 shows "\ x. P x" +lemma minf_ex[no_atp]: assumes ex:"\z. \x. x \ z \ (P x \ P1)" and p1: P1 shows "\ x. P x" proof- from ex obtain z where z: "\x. x \ z \ (P x \ P1)" by blast from lt_ex obtain x where x: "x \ z" by blast @@ -422,7 +422,7 @@ context constr_dense_linorder begin -lemma rinf_U[noatp]: +lemma rinf_U[no_atp]: assumes fU: "finite U" and lin_dense: "\x l u. (\ t. l \ t \ t\ u \ t \ U) \ l\ x \ x \ u \ P x \ (\ y. l \ y \ y \ u \ P y )" @@ -465,7 +465,7 @@ ultimately show ?thesis by blast qed -theorem fr_eq[noatp]: +theorem fr_eq[no_atp]: assumes fU: "finite U" and lin_dense: "\x l u. (\ t. l \ t \ t\ u \ t \ U) \ l\ x \ x \ u \ P x \ (\ y. l \ y \ y \ u \ P y )" @@ -493,16 +493,16 @@ ultimately have "?E = ?D" by blast thus "?E \ ?D" by simp qed -lemmas minf_thms[noatp] = minf_conj minf_disj minf_eq minf_neq minf_lt minf_le minf_gt minf_ge minf_P -lemmas pinf_thms[noatp] = pinf_conj pinf_disj pinf_eq pinf_neq pinf_lt pinf_le pinf_gt pinf_ge pinf_P +lemmas minf_thms[no_atp] = minf_conj minf_disj minf_eq minf_neq minf_lt minf_le minf_gt minf_ge minf_P +lemmas pinf_thms[no_atp] = pinf_conj pinf_disj pinf_eq pinf_neq pinf_lt pinf_le pinf_gt pinf_ge pinf_P -lemmas nmi_thms[noatp] = nmi_conj nmi_disj nmi_eq nmi_neq nmi_lt nmi_le nmi_gt nmi_ge nmi_P -lemmas npi_thms[noatp] = npi_conj npi_disj npi_eq npi_neq npi_lt npi_le npi_gt npi_ge npi_P -lemmas lin_dense_thms[noatp] = lin_dense_conj lin_dense_disj lin_dense_eq lin_dense_neq lin_dense_lt lin_dense_le lin_dense_gt lin_dense_ge lin_dense_P +lemmas nmi_thms[no_atp] = nmi_conj nmi_disj nmi_eq nmi_neq nmi_lt nmi_le nmi_gt nmi_ge nmi_P +lemmas npi_thms[no_atp] = npi_conj npi_disj npi_eq npi_neq npi_lt npi_le npi_gt npi_ge npi_P +lemmas lin_dense_thms[no_atp] = lin_dense_conj lin_dense_disj lin_dense_eq lin_dense_neq lin_dense_lt lin_dense_le lin_dense_gt lin_dense_ge lin_dense_P -lemma ferrack_axiom[noatp]: "constr_dense_linorder less_eq less between" +lemma ferrack_axiom[no_atp]: "constr_dense_linorder less_eq less between" by (rule constr_dense_linorder_axioms) -lemma atoms[noatp]: +lemma atoms[no_atp]: shows "TERM (less :: 'a \ _)" and "TERM (less_eq :: 'a \ _)" and "TERM (op = :: 'a \ _)" . diff -r b766aad9136d -r e31ec41a551b src/HOL/Fields.thy --- a/src/HOL/Fields.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Fields.thy Thu Mar 18 13:59:20 2010 +0100 @@ -65,7 +65,7 @@ ==> inverse a + inverse b = (a + b) * inverse a * inverse b" by (simp add: division_ring_inverse_add mult_ac) -lemma nonzero_mult_divide_mult_cancel_left [simp, noatp]: +lemma nonzero_mult_divide_mult_cancel_left [simp, no_atp]: assumes [simp]: "b\0" and [simp]: "c\0" shows "(c*a)/(c*b) = a/b" proof - have "(c*a)/(c*b) = c * a * (inverse b * inverse c)" @@ -76,7 +76,7 @@ finally show ?thesis by (simp add: divide_inverse) qed -lemma nonzero_mult_divide_mult_cancel_right [simp, noatp]: +lemma nonzero_mult_divide_mult_cancel_right [simp, no_atp]: "\b \ 0; c \ 0\ \ (a * c) / (b * c) = a / b" by (simp add: mult_commute [of _ c]) @@ -90,7 +90,7 @@ by (simp add: divide_inverse mult_ac) text {* These are later declared as simp rules. *} -lemmas times_divide_eq [noatp] = times_divide_eq_right times_divide_eq_left +lemmas times_divide_eq [no_atp] = times_divide_eq_right times_divide_eq_left lemma add_frac_eq: assumes "y \ 0" and "z \ 0" @@ -106,27 +106,27 @@ text{*Special Cancellation Simprules for Division*} -lemma nonzero_mult_divide_cancel_right [simp, noatp]: +lemma nonzero_mult_divide_cancel_right [simp, no_atp]: "b \ 0 \ a * b / b = a" using nonzero_mult_divide_mult_cancel_right [of 1 b a] by simp -lemma nonzero_mult_divide_cancel_left [simp, noatp]: +lemma nonzero_mult_divide_cancel_left [simp, no_atp]: "a \ 0 \ a * b / a = b" using nonzero_mult_divide_mult_cancel_left [of 1 a b] by simp -lemma nonzero_divide_mult_cancel_right [simp, noatp]: +lemma nonzero_divide_mult_cancel_right [simp, no_atp]: "\a \ 0; b \ 0\ \ b / (a * b) = 1 / a" using nonzero_mult_divide_mult_cancel_right [of a b 1] by simp -lemma nonzero_divide_mult_cancel_left [simp, noatp]: +lemma nonzero_divide_mult_cancel_left [simp, no_atp]: "\a \ 0; b \ 0\ \ a / (a * b) = 1 / b" using nonzero_mult_divide_mult_cancel_left [of b a 1] by simp -lemma nonzero_mult_divide_mult_cancel_left2 [simp, noatp]: +lemma nonzero_mult_divide_mult_cancel_left2 [simp, no_atp]: "\b \ 0; c \ 0\ \ (c * a) / (b * c) = a / b" using nonzero_mult_divide_mult_cancel_left [of b c a] by (simp add: mult_ac) -lemma nonzero_mult_divide_mult_cancel_right2 [simp, noatp]: +lemma nonzero_mult_divide_mult_cancel_right2 [simp, no_atp]: "\b \ 0; c \ 0\ \ (a * c) / (c * b) = a / b" using nonzero_mult_divide_mult_cancel_right [of b c a] by (simp add: mult_ac) @@ -139,7 +139,7 @@ lemma nonzero_minus_divide_divide: "b \ 0 ==> (-a) / (-b) = a / b" by (simp add: divide_inverse nonzero_inverse_minus_eq) -lemma divide_minus_left [simp, noatp]: "(-a) / b = - (a / b)" +lemma divide_minus_left [simp, no_atp]: "(-a) / b = - (a / b)" by (simp add: divide_inverse) lemma diff_divide_distrib: "(a - b) / c = a / c - b / c" @@ -183,7 +183,7 @@ lemma eq_divide_imp: "c \ 0 \ a * c = b \ a = b / c" by (erule subst, simp) -lemmas field_eq_simps[noatp] = algebra_simps +lemmas field_eq_simps[no_atp] = algebra_simps (* pull / out*) add_divide_eq_iff divide_add_eq_iff diff_divide_eq_iff divide_diff_eq_iff @@ -292,18 +292,18 @@ apply simp_all done -lemma divide_divide_eq_right [simp,noatp]: +lemma divide_divide_eq_right [simp,no_atp]: "a / (b/c) = (a*c) / (b::'a::{field,division_by_zero})" by (simp add: divide_inverse mult_ac) -lemma divide_divide_eq_left [simp,noatp]: +lemma divide_divide_eq_left [simp,no_atp]: "(a / b) / (c::'a::{field,division_by_zero}) = a / (b*c)" by (simp add: divide_inverse mult_assoc) subsubsection{*Special Cancellation Simprules for Division*} -lemma mult_divide_mult_cancel_left_if[simp,noatp]: +lemma mult_divide_mult_cancel_left_if[simp,no_atp]: fixes c :: "'a :: {field,division_by_zero}" shows "(c*a) / (c*b) = (if c=0 then 0 else a/b)" by (simp add: mult_divide_mult_cancel_left) @@ -314,7 +314,7 @@ lemma minus_divide_right: "- (a/b) = a / -(b::'a::{field,division_by_zero})" by (simp add: divide_inverse) -lemma divide_minus_right [simp, noatp]: +lemma divide_minus_right [simp, no_atp]: "a / -(b::'a::{field,division_by_zero}) = -(a / b)" by (simp add: divide_inverse) @@ -440,7 +440,7 @@ done text{*Both premises are essential. Consider -1 and 1.*} -lemma inverse_less_iff_less [simp,noatp]: +lemma inverse_less_iff_less [simp,no_atp]: "[|0 < a; 0 < b|] ==> (inverse a < inverse b) = (b < (a::'a::linordered_field))" by (blast intro: less_imp_inverse_less dest: inverse_less_imp_less) @@ -448,7 +448,7 @@ "[|a \ b; 0 < a|] ==> inverse b \ inverse (a::'a::linordered_field)" by (force simp add: order_le_less less_imp_inverse_less) -lemma inverse_le_iff_le [simp,noatp]: +lemma inverse_le_iff_le [simp,no_atp]: "[|0 < a; 0 < b|] ==> (inverse a \ inverse b) = (b \ (a::'a::linordered_field))" by (blast intro: le_imp_inverse_le dest: inverse_le_imp_le) @@ -482,7 +482,7 @@ apply (simp add: order_less_imp_not_eq nonzero_inverse_minus_eq) done -lemma inverse_less_iff_less_neg [simp,noatp]: +lemma inverse_less_iff_less_neg [simp,no_atp]: "[|a < 0; b < 0|] ==> (inverse a < inverse b) = (b < (a::'a::linordered_field))" apply (insert inverse_less_iff_less [of "-b" "-a"]) apply (simp del: inverse_less_iff_less @@ -493,7 +493,7 @@ "[|a \ b; b < 0|] ==> inverse b \ inverse (a::'a::linordered_field)" by (force simp add: order_le_less less_imp_inverse_less_neg) -lemma inverse_le_iff_le_neg [simp,noatp]: +lemma inverse_le_iff_le_neg [simp,no_atp]: "[|a < 0; b < 0|] ==> (inverse a \ inverse b) = (b \ (a::'a::linordered_field))" by (blast intro: le_imp_inverse_le_neg dest: inverse_le_imp_le_neg) @@ -665,7 +665,7 @@ (for inequations). Can be too aggressive and is therefore separate from the more benign @{text algebra_simps}. *} -lemmas field_simps[noatp] = field_eq_simps +lemmas field_simps[no_atp] = field_eq_simps (* multiply ineqn *) pos_divide_less_eq neg_divide_less_eq pos_less_divide_eq neg_less_divide_eq @@ -677,7 +677,7 @@ sign_simps} to @{text field_simps} because the former can lead to case explosions. *} -lemmas sign_simps[noatp] = group_simps +lemmas sign_simps[no_atp] = group_simps zero_less_mult_iff mult_less_0_iff (* Only works once linear arithmetic is installed: @@ -716,7 +716,7 @@ (0 \ a & b \ 0 | a \ 0 & 0 \ b)" by (simp add: divide_inverse mult_le_0_iff) -lemma divide_eq_0_iff [simp,noatp]: +lemma divide_eq_0_iff [simp,no_atp]: "(a/b = 0) = (a=0 | b=(0::'a::{field,division_by_zero}))" by (simp add: divide_inverse) @@ -756,13 +756,13 @@ subsection{*Cancellation Laws for Division*} -lemma divide_cancel_right [simp,noatp]: +lemma divide_cancel_right [simp,no_atp]: "(a/c = b/c) = (c = 0 | a = (b::'a::{field,division_by_zero}))" apply (cases "c=0", simp) apply (simp add: divide_inverse) done -lemma divide_cancel_left [simp,noatp]: +lemma divide_cancel_left [simp,no_atp]: "(c/a = c/b) = (c = 0 | a = (b::'a::{field,division_by_zero}))" apply (cases "c=0", simp) apply (simp add: divide_inverse) @@ -772,23 +772,23 @@ subsection {* Division and the Number One *} text{*Simplify expressions equated with 1*} -lemma divide_eq_1_iff [simp,noatp]: +lemma divide_eq_1_iff [simp,no_atp]: "(a/b = 1) = (b \ 0 & a = (b::'a::{field,division_by_zero}))" apply (cases "b=0", simp) apply (simp add: right_inverse_eq) done -lemma one_eq_divide_iff [simp,noatp]: +lemma one_eq_divide_iff [simp,no_atp]: "(1 = a/b) = (b \ 0 & a = (b::'a::{field,division_by_zero}))" by (simp add: eq_commute [of 1]) -lemma zero_eq_1_divide_iff [simp,noatp]: +lemma zero_eq_1_divide_iff [simp,no_atp]: "((0::'a::{linordered_field,division_by_zero}) = 1/a) = (a = 0)" apply (cases "a=0", simp) apply (auto simp add: nonzero_eq_divide_eq) done -lemma one_divide_eq_0_iff [simp,noatp]: +lemma one_divide_eq_0_iff [simp,no_atp]: "(1/a = (0::'a::{linordered_field,division_by_zero})) = (a = 0)" apply (cases "a=0", simp) apply (insert zero_neq_one [THEN not_sym]) @@ -801,10 +801,10 @@ lemmas zero_le_divide_1_iff = zero_le_divide_iff [of 1, simplified] lemmas divide_le_0_1_iff = divide_le_0_iff [of 1, simplified] -declare zero_less_divide_1_iff [simp,noatp] -declare divide_less_0_1_iff [simp,noatp] -declare zero_le_divide_1_iff [simp,noatp] -declare divide_le_0_1_iff [simp,noatp] +declare zero_less_divide_1_iff [simp,no_atp] +declare divide_less_0_1_iff [simp,no_atp] +declare zero_le_divide_1_iff [simp,no_atp] +declare divide_le_0_1_iff [simp,no_atp] subsection {* Ordering Rules for Division *} @@ -853,22 +853,22 @@ text{*Simplify quotients that are compared with the value 1.*} -lemma le_divide_eq_1 [noatp]: +lemma le_divide_eq_1 [no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "(1 \ b / a) = ((0 < a & a \ b) | (a < 0 & b \ a))" by (auto simp add: le_divide_eq) -lemma divide_le_eq_1 [noatp]: +lemma divide_le_eq_1 [no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "(b / a \ 1) = ((0 < a & b \ a) | (a < 0 & a \ b) | a=0)" by (auto simp add: divide_le_eq) -lemma less_divide_eq_1 [noatp]: +lemma less_divide_eq_1 [no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "(1 < b / a) = ((0 < a & a < b) | (a < 0 & b < a))" by (auto simp add: less_divide_eq) -lemma divide_less_eq_1 [noatp]: +lemma divide_less_eq_1 [no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "(b / a < 1) = ((0 < a & b < a) | (a < 0 & a < b) | a=0)" by (auto simp add: divide_less_eq) @@ -876,52 +876,52 @@ subsection{*Conditional Simplification Rules: No Case Splits*} -lemma le_divide_eq_1_pos [simp,noatp]: +lemma le_divide_eq_1_pos [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "0 < a \ (1 \ b/a) = (a \ b)" by (auto simp add: le_divide_eq) -lemma le_divide_eq_1_neg [simp,noatp]: +lemma le_divide_eq_1_neg [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "a < 0 \ (1 \ b/a) = (b \ a)" by (auto simp add: le_divide_eq) -lemma divide_le_eq_1_pos [simp,noatp]: +lemma divide_le_eq_1_pos [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "0 < a \ (b/a \ 1) = (b \ a)" by (auto simp add: divide_le_eq) -lemma divide_le_eq_1_neg [simp,noatp]: +lemma divide_le_eq_1_neg [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "a < 0 \ (b/a \ 1) = (a \ b)" by (auto simp add: divide_le_eq) -lemma less_divide_eq_1_pos [simp,noatp]: +lemma less_divide_eq_1_pos [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "0 < a \ (1 < b/a) = (a < b)" by (auto simp add: less_divide_eq) -lemma less_divide_eq_1_neg [simp,noatp]: +lemma less_divide_eq_1_neg [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "a < 0 \ (1 < b/a) = (b < a)" by (auto simp add: less_divide_eq) -lemma divide_less_eq_1_pos [simp,noatp]: +lemma divide_less_eq_1_pos [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "0 < a \ (b/a < 1) = (b < a)" by (auto simp add: divide_less_eq) -lemma divide_less_eq_1_neg [simp,noatp]: +lemma divide_less_eq_1_neg [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "a < 0 \ b/a < 1 <-> a < b" by (auto simp add: divide_less_eq) -lemma eq_divide_eq_1 [simp,noatp]: +lemma eq_divide_eq_1 [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "(1 = b/a) = ((a \ 0 & a = b))" by (auto simp add: eq_divide_eq) -lemma divide_eq_eq_1 [simp,noatp]: +lemma divide_eq_eq_1 [simp,no_atp]: fixes a :: "'a :: {linordered_field,division_by_zero}" shows "(b/a = 1) = ((a \ 0 & a = b))" by (auto simp add: divide_eq_eq) diff -r b766aad9136d -r e31ec41a551b src/HOL/Finite_Set.thy --- a/src/HOL/Finite_Set.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Finite_Set.thy Thu Mar 18 13:59:20 2010 +0100 @@ -523,13 +523,13 @@ end -lemma UNIV_unit [noatp]: +lemma UNIV_unit [no_atp]: "UNIV = {()}" by auto instance unit :: finite proof qed (simp add: UNIV_unit) -lemma UNIV_bool [noatp]: +lemma UNIV_bool [no_atp]: "UNIV = {False, True}" by auto instance bool :: finite proof @@ -1999,7 +1999,7 @@ "card A > 0 \ finite A" by (rule ccontr) simp -lemma card_0_eq [simp, noatp]: +lemma card_0_eq [simp, no_atp]: "finite A \ card A = 0 \ A = {}" by (auto dest: mk_disjoint_insert) @@ -2329,8 +2329,8 @@ show False by simp (blast dest: Suc_neq_Zero surjD) qed -(* Often leads to bogus ATP proofs because of reduced type information, hence noatp *) -lemma infinite_UNIV_char_0[noatp]: +(* Often leads to bogus ATP proofs because of reduced type information, hence no_atp *) +lemma infinite_UNIV_char_0[no_atp]: "\ finite (UNIV::'a::semiring_char_0 set)" proof assume "finite (UNIV::'a set)" diff -r b766aad9136d -r e31ec41a551b src/HOL/Groups.thy --- a/src/HOL/Groups.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Groups.thy Thu Mar 18 13:59:20 2010 +0100 @@ -437,7 +437,7 @@ (* FIXME: duplicates right_minus_eq from class group_add *) (* but only this one is declared as a simp rule. *) -lemma diff_eq_0_iff_eq [simp, noatp]: "a - b = 0 \ a = b" +lemma diff_eq_0_iff_eq [simp, no_atp]: "a - b = 0 \ a = b" by (simp add: algebra_simps) end @@ -772,12 +772,12 @@ by (simp add: algebra_simps) text{*Legacy - use @{text algebra_simps} *} -lemmas group_simps[noatp] = algebra_simps +lemmas group_simps[no_atp] = algebra_simps end text{*Legacy - use @{text algebra_simps} *} -lemmas group_simps[noatp] = algebra_simps +lemmas group_simps[no_atp] = algebra_simps class linordered_ab_semigroup_add = linorder + ordered_ab_semigroup_add @@ -1054,7 +1054,7 @@ lemma abs_zero [simp]: "\0\ = 0" by simp -lemma abs_0_eq [simp, noatp]: "0 = \a\ \ a = 0" +lemma abs_0_eq [simp, no_atp]: "0 = \a\ \ a = 0" proof - have "0 = \a\ \ \a\ = 0" by (simp only: eq_ac) thus ?thesis by simp @@ -1201,7 +1201,7 @@ subsection {* Tools setup *} -lemma add_mono_thms_linordered_semiring [noatp]: +lemma add_mono_thms_linordered_semiring [no_atp]: fixes i j k :: "'a\ordered_ab_semigroup_add" shows "i \ j \ k \ l \ i + k \ j + l" and "i = j \ k \ l \ i + k \ j + l" @@ -1209,7 +1209,7 @@ and "i = j \ k = l \ i + k = j + l" by (rule add_mono, clarify+)+ -lemma add_mono_thms_linordered_field [noatp]: +lemma add_mono_thms_linordered_field [no_atp]: fixes i j k :: "'a\ordered_cancel_ab_semigroup_add" shows "i < j \ k = l \ i + k < j + l" and "i = j \ k < l \ i + k < j + l" @@ -1220,8 +1220,8 @@ add_less_le_mono add_le_less_mono add_strict_mono) text{*Simplification of @{term "x-y < 0"}, etc.*} -lemmas diff_less_0_iff_less [simp, noatp] = less_iff_diff_less_0 [symmetric] -lemmas diff_le_0_iff_le [simp, noatp] = le_iff_diff_le_0 [symmetric] +lemmas diff_less_0_iff_less [simp, no_atp] = less_iff_diff_less_0 [symmetric] +lemmas diff_le_0_iff_le [simp, no_atp] = le_iff_diff_le_0 [symmetric] ML {* structure ab_group_add_cancel = Abel_Cancel diff -r b766aad9136d -r e31ec41a551b src/HOL/HOL.thy --- a/src/HOL/HOL.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/HOL.thy Thu Mar 18 13:59:20 2010 +0100 @@ -23,7 +23,6 @@ "~~/src/Tools/coherent.ML" "~~/src/Tools/eqsubst.ML" "~~/src/Provers/quantifier1.ML" - "Tools/res_blacklist.ML" ("Tools/simpdata.ML") "~~/src/Tools/random_word.ML" "~~/src/Tools/atomize_elim.ML" @@ -35,8 +34,6 @@ setup {* Intuitionistic.method_setup @{binding iprover} *} -setup Res_Blacklist.setup - subsection {* Primitive logic *} @@ -794,6 +791,25 @@ subsection {* Package setup *} +subsubsection {* Sledgehammer setup *} + +text {* +Theorems blacklisted to Sledgehammer. These theorems typically produce clauses +that are prolific (match too many equality or membership literals) and relate to +seldom-used facts. Some duplicate other rules. +*} + +ML {* +structure No_ATPs = Named_Thms +( + val name = "no_atp" + val description = "theorems that should be avoided by Sledgehammer" +) +*} + +setup {* No_ATPs.setup *} + + subsubsection {* Classical Reasoner setup *} lemma imp_elim: "P --> Q ==> (~ R ==> P) ==> (Q ==> R) ==> R" @@ -1041,7 +1057,7 @@ lemma imp_ex: "((? x. P x) --> Q) = (! x. P x --> Q)" by iprover lemma all_not_ex: "(ALL x. P x) = (~ (EX x. ~ P x ))" by blast -declare All_def [noatp] +declare All_def [no_atp] lemma ex_disj_distrib: "(? x. P(x) | Q(x)) = ((? x. P(x)) | (? x. Q(x)))" by iprover lemma all_conj_distrib: "(!x. P(x) & Q(x)) = ((! x. P(x)) & (! x. Q(x)))" by iprover @@ -1088,7 +1104,7 @@ lemma split_if_asm: "P (if Q then x else y) = (~((Q & ~P x) | (~Q & ~P y)))" by (simplesubst split_if, blast) -lemmas if_splits [noatp] = split_if split_if_asm +lemmas if_splits [no_atp] = split_if split_if_asm lemma if_cancel: "(if c then x else x) = x" by (simplesubst split_if, blast) diff -r b766aad9136d -r e31ec41a551b src/HOL/Int.thy --- a/src/HOL/Int.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Int.thy Thu Mar 18 13:59:20 2010 +0100 @@ -86,7 +86,7 @@ text{*Reduces equality on abstractions to equality on representatives: @{prop "\x \ Integ; y \ Integ\ \ (Abs_Integ x = Abs_Integ y) = (x=y)"} *} -declare Abs_Integ_inject [simp,noatp] Abs_Integ_inverse [simp,noatp] +declare Abs_Integ_inject [simp,no_atp] Abs_Integ_inverse [simp,no_atp] text{*Case analysis on the representation of an integer as an equivalence class of pairs of naturals.*} @@ -522,7 +522,7 @@ It is proved here because attribute @{text arith_split} is not available in theory @{text Rings}. But is it really better than just rewriting with @{text abs_if}?*} -lemma abs_split [arith_split,noatp]: +lemma abs_split [arith_split,no_atp]: "P(abs(a::'a::linordered_idom)) = ((0 \ a --> P a) & (a < 0 --> P(-a)))" by (force dest: order_less_le_trans simp add: abs_if linorder_not_less) @@ -1875,16 +1875,16 @@ text{*These are actually for fields, like real: but where else to put them?*} -lemmas zero_less_divide_iff_number_of [simp, noatp] = +lemmas zero_less_divide_iff_number_of [simp, no_atp] = zero_less_divide_iff [of "number_of w", standard] -lemmas divide_less_0_iff_number_of [simp, noatp] = +lemmas divide_less_0_iff_number_of [simp, no_atp] = divide_less_0_iff [of "number_of w", standard] -lemmas zero_le_divide_iff_number_of [simp, noatp] = +lemmas zero_le_divide_iff_number_of [simp, no_atp] = zero_le_divide_iff [of "number_of w", standard] -lemmas divide_le_0_iff_number_of [simp, noatp] = +lemmas divide_le_0_iff_number_of [simp, no_atp] = divide_le_0_iff [of "number_of w", standard] @@ -1897,53 +1897,53 @@ text {*These laws simplify inequalities, moving unary minus from a term into the literal.*} -lemmas less_minus_iff_number_of [simp, noatp] = +lemmas less_minus_iff_number_of [simp, no_atp] = less_minus_iff [of "number_of v", standard] -lemmas le_minus_iff_number_of [simp, noatp] = +lemmas le_minus_iff_number_of [simp, no_atp] = le_minus_iff [of "number_of v", standard] -lemmas equation_minus_iff_number_of [simp, noatp] = +lemmas equation_minus_iff_number_of [simp, no_atp] = equation_minus_iff [of "number_of v", standard] -lemmas minus_less_iff_number_of [simp, noatp] = +lemmas minus_less_iff_number_of [simp, no_atp] = minus_less_iff [of _ "number_of v", standard] -lemmas minus_le_iff_number_of [simp, noatp] = +lemmas minus_le_iff_number_of [simp, no_atp] = minus_le_iff [of _ "number_of v", standard] -lemmas minus_equation_iff_number_of [simp, noatp] = +lemmas minus_equation_iff_number_of [simp, no_atp] = minus_equation_iff [of _ "number_of v", standard] text{*To Simplify Inequalities Where One Side is the Constant 1*} -lemma less_minus_iff_1 [simp,noatp]: +lemma less_minus_iff_1 [simp,no_atp]: fixes b::"'b::{linordered_idom,number_ring}" shows "(1 < - b) = (b < -1)" by auto -lemma le_minus_iff_1 [simp,noatp]: +lemma le_minus_iff_1 [simp,no_atp]: fixes b::"'b::{linordered_idom,number_ring}" shows "(1 \ - b) = (b \ -1)" by auto -lemma equation_minus_iff_1 [simp,noatp]: +lemma equation_minus_iff_1 [simp,no_atp]: fixes b::"'b::number_ring" shows "(1 = - b) = (b = -1)" by (subst equation_minus_iff, auto) -lemma minus_less_iff_1 [simp,noatp]: +lemma minus_less_iff_1 [simp,no_atp]: fixes a::"'b::{linordered_idom,number_ring}" shows "(- a < 1) = (-1 < a)" by auto -lemma minus_le_iff_1 [simp,noatp]: +lemma minus_le_iff_1 [simp,no_atp]: fixes a::"'b::{linordered_idom,number_ring}" shows "(- a \ 1) = (-1 \ a)" by auto -lemma minus_equation_iff_1 [simp,noatp]: +lemma minus_equation_iff_1 [simp,no_atp]: fixes a::"'b::number_ring" shows "(- a = 1) = (a = -1)" by (subst minus_equation_iff, auto) @@ -1951,16 +1951,16 @@ text {*Cancellation of constant factors in comparisons (@{text "<"} and @{text "\"}) *} -lemmas mult_less_cancel_left_number_of [simp, noatp] = +lemmas mult_less_cancel_left_number_of [simp, no_atp] = mult_less_cancel_left [of "number_of v", standard] -lemmas mult_less_cancel_right_number_of [simp, noatp] = +lemmas mult_less_cancel_right_number_of [simp, no_atp] = mult_less_cancel_right [of _ "number_of v", standard] -lemmas mult_le_cancel_left_number_of [simp, noatp] = +lemmas mult_le_cancel_left_number_of [simp, no_atp] = mult_le_cancel_left [of "number_of v", standard] -lemmas mult_le_cancel_right_number_of [simp, noatp] = +lemmas mult_le_cancel_right_number_of [simp, no_atp] = mult_le_cancel_right [of _ "number_of v", standard] diff -r b766aad9136d -r e31ec41a551b src/HOL/IsaMakefile --- a/src/HOL/IsaMakefile Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/IsaMakefile Thu Mar 18 13:59:20 2010 +0100 @@ -246,7 +246,6 @@ @$(ISABELLE_TOOL) usedir -b -f plain.ML -g true $(OUT)/Pure HOL-Plain MAIN_DEPENDENCIES = $(PLAIN_DEPENDENCIES) \ - ATP_Linkup.thy \ Big_Operators.thy \ Code_Evaluation.thy \ Code_Numeral.thy \ @@ -271,6 +270,7 @@ Random_Sequence.thy \ Recdef.thy \ SetInterval.thy \ + Sledgehammer.thy \ String.thy \ Typerep.thy \ $(SRC)/Provers/Arith/assoc_fold.ML \ @@ -290,7 +290,6 @@ Tools/int_arith.ML \ Tools/list_code.ML \ Tools/meson.ML \ - Tools/metis_tools.ML \ Tools/nat_numeral_simprocs.ML \ Tools/numeral.ML \ Tools/numeral_simprocs.ML \ @@ -315,12 +314,12 @@ Tools/Quotient/quotient_term.ML \ Tools/Quotient/quotient_typ.ML \ Tools/recdef.ML \ - Tools/res_atp.ML \ - Tools/res_axioms.ML \ - Tools/res_blacklist.ML \ - Tools/res_clause.ML \ - Tools/res_hol_clause.ML \ - Tools/res_reconstruct.ML \ + Tools/Sledgehammer/metis_tactics.ML \ + Tools/Sledgehammer/sledgehammer_fact_filter.ML \ + Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML \ + Tools/Sledgehammer/sledgehammer_fol_clause.ML \ + Tools/Sledgehammer/sledgehammer_hol_clause.ML \ + Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML \ Tools/string_code.ML \ Tools/string_syntax.ML \ Tools/transfer.ML \ diff -r b766aad9136d -r e31ec41a551b src/HOL/Library/Lattice_Algebras.thy --- a/src/HOL/Library/Lattice_Algebras.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Library/Lattice_Algebras.thy Thu Mar 18 13:59:20 2010 +0100 @@ -163,16 +163,16 @@ lemma pprt_0[simp]: "pprt 0 = 0" by (simp add: pprt_def) lemma nprt_0[simp]: "nprt 0 = 0" by (simp add: nprt_def) -lemma pprt_eq_id [simp, noatp]: "0 \ x \ pprt x = x" +lemma pprt_eq_id [simp, no_atp]: "0 \ x \ pprt x = x" by (simp add: pprt_def sup_aci sup_absorb1) -lemma nprt_eq_id [simp, noatp]: "x \ 0 \ nprt x = x" +lemma nprt_eq_id [simp, no_atp]: "x \ 0 \ nprt x = x" by (simp add: nprt_def inf_aci inf_absorb1) -lemma pprt_eq_0 [simp, noatp]: "x \ 0 \ pprt x = 0" +lemma pprt_eq_0 [simp, no_atp]: "x \ 0 \ pprt x = 0" by (simp add: pprt_def sup_aci sup_absorb2) -lemma nprt_eq_0 [simp, noatp]: "0 \ x \ nprt x = 0" +lemma nprt_eq_0 [simp, no_atp]: "0 \ x \ nprt x = 0" by (simp add: nprt_def inf_aci inf_absorb2) lemma sup_0_imp_0: "sup a (- a) = 0 \ a = 0" @@ -197,10 +197,10 @@ apply (erule sup_0_imp_0) done -lemma inf_0_eq_0 [simp, noatp]: "inf a (- a) = 0 \ a = 0" +lemma inf_0_eq_0 [simp, no_atp]: "inf a (- a) = 0 \ a = 0" by (rule, erule inf_0_imp_0) simp -lemma sup_0_eq_0 [simp, noatp]: "sup a (- a) = 0 \ a = 0" +lemma sup_0_eq_0 [simp, no_atp]: "sup a (- a) = 0 \ a = 0" by (rule, erule sup_0_imp_0) simp lemma zero_le_double_add_iff_zero_le_single_add [simp]: @@ -295,10 +295,10 @@ lemma zero_le_iff_nprt_id: "a \ 0 \ nprt a = a" unfolding le_iff_inf by (simp add: nprt_def inf_commute) -lemma pprt_mono [simp, noatp]: "a \ b \ pprt a \ pprt b" +lemma pprt_mono [simp, no_atp]: "a \ b \ pprt a \ pprt b" unfolding le_iff_sup by (simp add: pprt_def sup_aci sup_assoc [symmetric, of a]) -lemma nprt_mono [simp, noatp]: "a \ b \ nprt a \ nprt b" +lemma nprt_mono [simp, no_atp]: "a \ b \ nprt a \ nprt b" unfolding le_iff_inf by (simp add: nprt_def inf_aci inf_assoc [symmetric, of a]) end diff -r b766aad9136d -r e31ec41a551b src/HOL/List.thy --- a/src/HOL/List.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/List.thy Thu Mar 18 13:59:20 2010 +0100 @@ -5,7 +5,7 @@ header {* The datatype of finite lists *} theory List -imports Plain Presburger ATP_Linkup Recdef +imports Plain Presburger Sledgehammer Recdef uses ("Tools/list_code.ML") begin @@ -582,7 +582,7 @@ lemma self_append_conv [iff]: "(xs = xs @ ys) = (ys = [])" by (induct xs) auto -lemma append_eq_append_conv [simp, noatp]: +lemma append_eq_append_conv [simp, no_atp]: "length xs = length ys \ length us = length vs ==> (xs@us = ys@vs) = (xs=ys \ us=vs)" apply (induct xs arbitrary: ys) @@ -614,7 +614,7 @@ lemma self_append_conv2 [iff]: "(ys = xs @ ys) = (xs = [])" using append_same_eq [of "[]"] by auto -lemma hd_Cons_tl [simp,noatp]: "xs \ [] ==> hd xs # tl xs = xs" +lemma hd_Cons_tl [simp,no_atp]: "xs \ [] ==> hd xs # tl xs = xs" by (induct xs) auto lemma hd_append: "hd (xs @ ys) = (if xs = [] then hd ys else hd xs)" @@ -3928,10 +3928,10 @@ for A :: "'a set" where Nil [intro!]: "[]: lists A" - | Cons [intro!,noatp]: "[| a: A; l: lists A|] ==> a#l : lists A" - -inductive_cases listsE [elim!,noatp]: "x#l : lists A" -inductive_cases listspE [elim!,noatp]: "listsp A (x # l)" + | Cons [intro!,no_atp]: "[| a: A; l: lists A|] ==> a#l : lists A" + +inductive_cases listsE [elim!,no_atp]: "x#l : lists A" +inductive_cases listspE [elim!,no_atp]: "listsp A (x # l)" lemma listsp_mono [mono]: "A \ B ==> listsp A \ listsp B" by (rule predicate1I, erule listsp.induct, (blast dest: predicate1D)+) @@ -3966,15 +3966,15 @@ lemmas in_lists_conv_set = in_listsp_conv_set [to_set] -lemma in_listspD [dest!,noatp]: "listsp A xs ==> \x\set xs. A x" +lemma in_listspD [dest!,no_atp]: "listsp A xs ==> \x\set xs. A x" by (rule in_listsp_conv_set [THEN iffD1]) -lemmas in_listsD [dest!,noatp] = in_listspD [to_set] - -lemma in_listspI [intro!,noatp]: "\x\set xs. A x ==> listsp A xs" +lemmas in_listsD [dest!,no_atp] = in_listspD [to_set] + +lemma in_listspI [intro!,no_atp]: "\x\set xs. A x ==> listsp A xs" by (rule in_listsp_conv_set [THEN iffD2]) -lemmas in_listsI [intro!,noatp] = in_listspI [to_set] +lemmas in_listsI [intro!,no_atp] = in_listspI [to_set] lemma lists_UNIV [simp]: "lists UNIV = UNIV" by auto diff -r b766aad9136d -r e31ec41a551b src/HOL/Mirabelle/Tools/mirabelle_metis.ML --- a/src/HOL/Mirabelle/Tools/mirabelle_metis.ML Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Mirabelle/Tools/mirabelle_metis.ML Thu Mar 18 13:59:20 2010 +0100 @@ -18,7 +18,7 @@ val facts = Facts.props (ProofContext.facts_of (Proof.context_of pre)) - fun metis ctxt = MetisTools.metis_tac ctxt (thms @ facts) + fun metis ctxt = Metis_Tactics.metis_tac ctxt (thms @ facts) in (if Mirabelle.can_apply timeout metis pre then "succeeded" else "failed") |> prefix (metis_tag id) diff -r b766aad9136d -r e31ec41a551b src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Thu Mar 18 13:59:20 2010 +0100 @@ -325,7 +325,7 @@ if success then (message, SH_OK (time_isa, time_atp, theorem_names)) else (message, SH_FAIL(time_isa, time_atp)) end - handle Res_HOL_Clause.TOO_TRIVIAL => ("trivial", SH_OK (0, 0, [])) + handle Sledgehammer_HOL_Clause.TOO_TRIVIAL => ("trivial", SH_OK (0, 0, [])) | ERROR msg => ("error: " ^ msg, SH_ERROR) | TimeLimit.TimeOut => ("timeout", SH_ERROR) @@ -407,8 +407,8 @@ ({pre=st, timeout, log, pos, ...}: Mirabelle.run_args) = let fun metis thms ctxt = - if full then MetisTools.metisFT_tac ctxt thms - else MetisTools.metis_tac ctxt thms + if full then Metis_Tactics.metisFT_tac ctxt thms + else Metis_Tactics.metis_tac ctxt thms fun apply_metis thms = Mirabelle.can_apply timeout (metis thms) st fun with_time (false, t) = "failed (" ^ string_of_int t ^ ")" diff -r b766aad9136d -r e31ec41a551b src/HOL/Nat.thy --- a/src/HOL/Nat.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Nat.thy Thu Mar 18 13:59:20 2010 +0100 @@ -320,7 +320,7 @@ apply auto done -lemma one_eq_mult_iff [simp,noatp]: "(Suc 0 = m * n) = (m = Suc 0 & n = Suc 0)" +lemma one_eq_mult_iff [simp,no_atp]: "(Suc 0 = m * n) = (m = Suc 0 & n = Suc 0)" apply (rule trans) apply (rule_tac [2] mult_eq_1_iff, fastsimp) done @@ -480,7 +480,7 @@ lemma less_Suc0 [iff]: "(n < Suc 0) = (n = 0)" by (simp add: less_Suc_eq) -lemma less_one [iff, noatp]: "(n < (1::nat)) = (n = 0)" +lemma less_one [iff, no_atp]: "(n < (1::nat)) = (n = 0)" unfolding One_nat_def by (rule less_Suc0) lemma Suc_mono: "m < n ==> Suc m < Suc n" @@ -648,7 +648,7 @@ lemma gr0_conv_Suc: "(0 < n) = (\m. n = Suc m)" by (fast intro: not0_implies_Suc) -lemma not_gr0 [iff,noatp]: "!!n::nat. (~ (0 < n)) = (n = 0)" +lemma not_gr0 [iff,no_atp]: "!!n::nat. (~ (0 < n)) = (n = 0)" using neq0_conv by blast lemma Suc_le_D: "(Suc n \ m') ==> (? m. m' = Suc m)" @@ -1279,10 +1279,10 @@ text{*Special cases where either operand is zero*} -lemma of_nat_0_eq_iff [simp, noatp]: "0 = of_nat n \ 0 = n" +lemma of_nat_0_eq_iff [simp, no_atp]: "0 = of_nat n \ 0 = n" by (rule of_nat_eq_iff [of 0 n, unfolded of_nat_0]) -lemma of_nat_eq_0_iff [simp, noatp]: "of_nat m = 0 \ m = 0" +lemma of_nat_eq_0_iff [simp, no_atp]: "of_nat m = 0 \ m = 0" by (rule of_nat_eq_iff [of m 0, unfolded of_nat_0]) lemma inj_of_nat: "inj of_nat" @@ -1327,7 +1327,7 @@ lemma of_nat_0_le_iff [simp]: "0 \ of_nat n" by (rule of_nat_le_iff [of 0, simplified]) -lemma of_nat_le_0_iff [simp, noatp]: "of_nat m \ 0 \ m = 0" +lemma of_nat_le_0_iff [simp, no_atp]: "of_nat m \ 0 \ m = 0" by (rule of_nat_le_iff [of _ 0, simplified]) lemma of_nat_0_less_iff [simp]: "0 < of_nat n \ 0 < n" diff -r b766aad9136d -r e31ec41a551b src/HOL/Orderings.thy --- a/src/HOL/Orderings.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Orderings.thy Thu Mar 18 13:59:20 2010 +0100 @@ -293,11 +293,11 @@ "max x y < z \ x < z \ y < z" unfolding max_def le_less using less_linear by (auto intro: less_trans) -lemma split_min [noatp]: +lemma split_min [no_atp]: "P (min i j) \ (i \ j \ P i) \ (\ i \ j \ P j)" by (simp add: min_def) -lemma split_max [noatp]: +lemma split_max [no_atp]: "P (max i j) \ (i \ j \ P j) \ (\ i \ j \ P i)" by (simp add: max_def) diff -r b766aad9136d -r e31ec41a551b src/HOL/Power.thy --- a/src/HOL/Power.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Power.thy Thu Mar 18 13:59:20 2010 +0100 @@ -334,7 +334,7 @@ "abs ((-a) ^ n) = abs (a ^ n)" by (simp add: power_abs) -lemma zero_less_power_abs_iff [simp, noatp]: +lemma zero_less_power_abs_iff [simp, no_atp]: "0 < abs a ^ n \ a \ 0 \ n = 0" proof (induct n) case 0 show ?case by simp diff -r b766aad9136d -r e31ec41a551b src/HOL/Product_Type.thy --- a/src/HOL/Product_Type.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Product_Type.thy Thu Mar 18 13:59:20 2010 +0100 @@ -46,7 +46,7 @@ where "() = Abs_unit True" -lemma unit_eq [noatp]: "u = ()" +lemma unit_eq [no_atp]: "u = ()" by (induct u) (simp add: unit_def Unity_def) text {* @@ -78,7 +78,7 @@ f} rather than by @{term [source] "%u. f ()"}. *} -lemma unit_abs_eta_conv [simp,noatp]: "(%u::unit. f ()) = f" +lemma unit_abs_eta_conv [simp,no_atp]: "(%u::unit. f ()) = f" by (rule ext) simp instantiation unit :: default @@ -497,7 +497,7 @@ lemma split_beta [mono]: "(%(x, y). P x y) z = P (fst z) (snd z)" by (subst surjective_pairing, rule split_conv) -lemma split_split [noatp]: "R(split c p) = (ALL x y. p = (x, y) --> R(c x y))" +lemma split_split [no_atp]: "R(split c p) = (ALL x y. p = (x, y) --> R(c x y))" -- {* For use with @{text split} and the Simplifier. *} by (insert surj_pair [of p], clarify, simp) @@ -508,7 +508,7 @@ current goal contains one of those constants. *} -lemma split_split_asm [noatp]: "R (split c p) = (~(EX x y. p = (x, y) & (~R (c x y))))" +lemma split_split_asm [no_atp]: "R (split c p) = (~(EX x y. p = (x, y) & (~R (c x y))))" by (subst split_split, simp) @@ -582,10 +582,10 @@ Classical.map_cs (fn cs => cs addSbefore ("split_conv_tac", split_conv_tac)) *} -lemma split_eta_SetCompr [simp,noatp]: "(%u. EX x y. u = (x, y) & P (x, y)) = P" +lemma split_eta_SetCompr [simp,no_atp]: "(%u. EX x y. u = (x, y) & P (x, y)) = P" by (rule ext) fast -lemma split_eta_SetCompr2 [simp,noatp]: "(%u. EX x y. u = (x, y) & P x y) = split P" +lemma split_eta_SetCompr2 [simp,no_atp]: "(%u. EX x y. u = (x, y) & P x y) = split P" by (rule ext) fast lemma split_part [simp]: "(%(a,b). P & Q a b) = (%ab. P & split Q ab)" @@ -947,11 +947,11 @@ -- {* Suggested by Pierre Chartier *} by blast -lemma split_paired_Ball_Sigma [simp,noatp]: +lemma split_paired_Ball_Sigma [simp,no_atp]: "(ALL z: Sigma A B. P z) = (ALL x:A. ALL y: B x. P(x,y))" by blast -lemma split_paired_Bex_Sigma [simp,noatp]: +lemma split_paired_Bex_Sigma [simp,no_atp]: "(EX z: Sigma A B. P z) = (EX x:A. EX y: B x. P(x,y))" by blast diff -r b766aad9136d -r e31ec41a551b src/HOL/Quotient.thy --- a/src/HOL/Quotient.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Quotient.thy Thu Mar 18 13:59:20 2010 +0100 @@ -5,7 +5,7 @@ header {* Definition of Quotient Types *} theory Quotient -imports Plain ATP_Linkup +imports Plain Sledgehammer uses ("~~/src/HOL/Tools/Quotient/quotient_info.ML") ("~~/src/HOL/Tools/Quotient/quotient_typ.ML") diff -r b766aad9136d -r e31ec41a551b src/HOL/Relation.thy --- a/src/HOL/Relation.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Relation.thy Thu Mar 18 13:59:20 2010 +0100 @@ -121,7 +121,7 @@ lemma Id_on_eqI: "a = b ==> a : A ==> (a, b) : Id_on A" by (simp add: Id_on_def) -lemma Id_onI [intro!,noatp]: "a : A ==> (a, a) : Id_on A" +lemma Id_onI [intro!,no_atp]: "a : A ==> (a, a) : Id_on A" by (rule Id_on_eqI) (rule refl) lemma Id_onE [elim!]: @@ -361,7 +361,7 @@ subsection {* Domain *} -declare Domain_def [noatp] +declare Domain_def [no_atp] lemma Domain_iff: "(a : Domain r) = (EX y. (a, y) : r)" by (unfold Domain_def) blast @@ -484,7 +484,7 @@ subsection {* Image of a set under a relation *} -declare Image_def [noatp] +declare Image_def [no_atp] lemma Image_iff: "(b : r``A) = (EX x:A. (x, b) : r)" by (simp add: Image_def) @@ -495,7 +495,7 @@ lemma Image_singleton_iff [iff]: "(b : r``{a}) = ((a, b) : r)" by (rule Image_iff [THEN trans]) simp -lemma ImageI [intro,noatp]: "(a, b) : r ==> a : A ==> b : r``A" +lemma ImageI [intro,no_atp]: "(a, b) : r ==> a : A ==> b : r``A" by (unfold Image_def) blast lemma ImageE [elim!]: diff -r b766aad9136d -r e31ec41a551b src/HOL/Rings.thy --- a/src/HOL/Rings.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Rings.thy Thu Mar 18 13:59:20 2010 +0100 @@ -127,7 +127,7 @@ then show ?thesis .. qed -lemma dvd_0_left_iff [noatp, simp]: "0 dvd a \ a = 0" +lemma dvd_0_left_iff [no_atp, simp]: "0 dvd a \ a = 0" by (auto intro: dvd_refl elim!: dvdE) lemma dvd_0_right [iff]: "a dvd 0" @@ -221,8 +221,8 @@ by (rule minus_unique) (simp add: right_distrib [symmetric]) text{*Extract signs from products*} -lemmas mult_minus_left [simp, noatp] = minus_mult_left [symmetric] -lemmas mult_minus_right [simp,noatp] = minus_mult_right [symmetric] +lemmas mult_minus_left [simp, no_atp] = minus_mult_left [symmetric] +lemmas mult_minus_right [simp,no_atp] = minus_mult_right [symmetric] lemma minus_mult_minus [simp]: "- a * - b = a * b" by simp @@ -236,11 +236,11 @@ lemma left_diff_distrib[algebra_simps]: "(a - b) * c = a * c - b * c" by (simp add: left_distrib diff_minus) -lemmas ring_distribs[noatp] = +lemmas ring_distribs[no_atp] = right_distrib left_distrib left_diff_distrib right_diff_distrib text{*Legacy - use @{text algebra_simps} *} -lemmas ring_simps[noatp] = algebra_simps +lemmas ring_simps[no_atp] = algebra_simps lemma eq_add_iff1: "a * e + c = b * e + d \ (a - b) * e + c = d" @@ -252,7 +252,7 @@ end -lemmas ring_distribs[noatp] = +lemmas ring_distribs[no_atp] = right_distrib left_distrib left_diff_distrib right_diff_distrib class comm_ring = comm_semiring + ab_group_add @@ -319,7 +319,7 @@ qed text{*Cancellation of equalities with a common factor*} -lemma mult_cancel_right [simp, noatp]: +lemma mult_cancel_right [simp, no_atp]: "a * c = b * c \ c = 0 \ a = b" proof - have "(a * c = b * c) = ((a - b) * c = 0)" @@ -327,7 +327,7 @@ thus ?thesis by (simp add: disj_commute) qed -lemma mult_cancel_left [simp, noatp]: +lemma mult_cancel_left [simp, no_atp]: "c * a = c * b \ c = 0 \ a = b" proof - have "(c * a = c * b) = (c * (a - b) = 0)" @@ -743,7 +743,7 @@ subclass ordered_ab_group_add .. text{*Legacy - use @{text algebra_simps} *} -lemmas ring_simps[noatp] = algebra_simps +lemmas ring_simps[no_atp] = algebra_simps lemma less_add_iff1: "a * e + c < b * e + d \ (a - b) * e + c < d" @@ -950,7 +950,7 @@ end text{*Legacy - use @{text algebra_simps} *} -lemmas ring_simps[noatp] = algebra_simps +lemmas ring_simps[no_atp] = algebra_simps lemmas mult_sign_intros = mult_nonneg_nonneg mult_nonneg_nonpos @@ -1099,7 +1099,7 @@ text {* Simprules for comparisons where common factors can be cancelled. *} -lemmas mult_compare_simps[noatp] = +lemmas mult_compare_simps[no_atp] = mult_le_cancel_right mult_le_cancel_left mult_le_cancel_right1 mult_le_cancel_right2 mult_le_cancel_left1 mult_le_cancel_left2 @@ -1180,7 +1180,7 @@ thus ?thesis by (simp add: ac cpos mult_strict_mono) qed -lemmas eq_minus_self_iff[noatp] = equal_neg_zero +lemmas eq_minus_self_iff[no_atp] = equal_neg_zero lemma less_minus_self_iff: "(a < -a) = (a < (0::'a::linordered_idom))" unfolding order_less_le less_eq_neg_nonpos equal_neg_zero .. diff -r b766aad9136d -r e31ec41a551b src/HOL/Set.thy --- a/src/HOL/Set.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Set.thy Thu Mar 18 13:59:20 2010 +0100 @@ -462,7 +462,7 @@ unfolding mem_def by (erule le_funE, erule le_boolE) -- {* Rule in Modus Ponens style. *} -lemma rev_subsetD [noatp,intro?]: "c \ A ==> A \ B ==> c \ B" +lemma rev_subsetD [no_atp,intro?]: "c \ A ==> A \ B ==> c \ B" -- {* The same, with reversed premises for use with @{text erule} -- cf @{text rev_mp}. *} by (rule subsetD) @@ -471,13 +471,13 @@ \medskip Converts @{prop "A \ B"} to @{prop "x \ A ==> x \ B"}. *} -lemma subsetCE [noatp,elim]: "A \ B ==> (c \ A ==> P) ==> (c \ B ==> P) ==> P" +lemma subsetCE [no_atp,elim]: "A \ B ==> (c \ A ==> P) ==> (c \ B ==> P) ==> P" -- {* Classical elimination rule. *} unfolding mem_def by (blast dest: le_funE le_boolE) -lemma subset_eq [noatp]: "A \ B = (\x\A. x \ B)" by blast +lemma subset_eq [no_atp]: "A \ B = (\x\A. x \ B)" by blast -lemma contra_subsetD [noatp]: "A \ B ==> c \ B ==> c \ A" +lemma contra_subsetD [no_atp]: "A \ B ==> c \ B ==> c \ A" by blast lemma subset_refl [simp]: "A \ A" @@ -791,11 +791,11 @@ subsubsection {* Singletons, using insert *} -lemma singletonI [intro!,noatp]: "a : {a}" +lemma singletonI [intro!,no_atp]: "a : {a}" -- {* Redundant? But unlike @{text insertCI}, it proves the subgoal immediately! *} by (rule insertI1) -lemma singletonD [dest!,noatp]: "b : {a} ==> b = a" +lemma singletonD [dest!,no_atp]: "b : {a} ==> b = a" by blast lemmas singletonE = singletonD [elim_format] @@ -806,11 +806,11 @@ lemma singleton_inject [dest!]: "{a} = {b} ==> a = b" by blast -lemma singleton_insert_inj_eq [iff,noatp]: +lemma singleton_insert_inj_eq [iff,no_atp]: "({b} = insert a A) = (a = b & A \ {b})" by blast -lemma singleton_insert_inj_eq' [iff,noatp]: +lemma singleton_insert_inj_eq' [iff,no_atp]: "(insert a A = {b}) = (a = b & A \ {b})" by blast @@ -837,7 +837,7 @@ *} definition image :: "('a => 'b) => 'a set => 'b set" (infixr "`" 90) where - image_def [noatp]: "f ` A = {y. EX x:A. y = f(x)}" + image_def [no_atp]: "f ` A = {y. EX x:A. y = f(x)}" abbreviation range :: "('a => 'b) => 'b set" where -- "of function" @@ -942,10 +942,10 @@ subsubsection {* The ``proper subset'' relation *} -lemma psubsetI [intro!,noatp]: "A \ B ==> A \ B ==> A \ B" +lemma psubsetI [intro!,no_atp]: "A \ B ==> A \ B ==> A \ B" by (unfold less_le) blast -lemma psubsetE [elim!,noatp]: +lemma psubsetE [elim!,no_atp]: "[|A \ B; [|A \ B; ~ (B\A)|] ==> R|] ==> R" by (unfold less_le) blast @@ -1102,12 +1102,12 @@ lemma insert_inter_insert[simp]: "insert a A \ insert a B = insert a (A \ B)" by blast -lemma insert_disjoint [simp,noatp]: +lemma insert_disjoint [simp,no_atp]: "(insert a A \ B = {}) = (a \ B \ A \ B = {})" "({} = insert a A \ B) = (a \ B \ {} = A \ B)" by auto -lemma disjoint_insert [simp,noatp]: +lemma disjoint_insert [simp,no_atp]: "(B \ insert a A = {}) = (a \ B \ B \ A = {})" "({} = A \ insert b B) = (b \ A \ {} = A \ B)" by auto @@ -1139,7 +1139,7 @@ by blast -lemma image_Collect [noatp]: "f ` {x. P x} = {f x | x. P x}" +lemma image_Collect [no_atp]: "f ` {x. P x} = {f x | x. P x}" -- {* NOT suitable as a default simprule: the RHS isn't simpler than the LHS, with its implicit quantifier and conjunction. Also image enjoys better equational properties than does the RHS. *} @@ -1156,7 +1156,7 @@ text {* \medskip @{text range}. *} -lemma full_SetCompr_eq [noatp]: "{u. \x. u = f x} = range f" +lemma full_SetCompr_eq [no_atp]: "{u. \x. u = f x} = range f" by auto lemma range_composition: "range (\x. f (g x)) = f`range g" @@ -1213,7 +1213,7 @@ lemma Int_Un_distrib2: "(B \ C) \ A = (B \ A) \ (C \ A)" by blast -lemma Int_UNIV [simp,noatp]: "(A \ B = UNIV) = (A = UNIV & B = UNIV)" +lemma Int_UNIV [simp,no_atp]: "(A \ B = UNIV) = (A = UNIV & B = UNIV)" by blast lemma Int_subset_iff [simp]: "(C \ A \ B) = (C \ A & C \ B)" @@ -1376,7 +1376,7 @@ lemma Diff_eq: "A - B = A \ (-B)" by blast -lemma Diff_eq_empty_iff [simp,noatp]: "(A - B = {}) = (A \ B)" +lemma Diff_eq_empty_iff [simp,no_atp]: "(A - B = {}) = (A \ B)" by blast lemma Diff_cancel [simp]: "A - A = {}" @@ -1397,7 +1397,7 @@ lemma Diff_UNIV [simp]: "A - UNIV = {}" by blast -lemma Diff_insert0 [simp,noatp]: "x \ A ==> A - insert x B = A - B" +lemma Diff_insert0 [simp,no_atp]: "x \ A ==> A - insert x B = A - B" by blast lemma Diff_insert: "A - insert a B = A - B - {a}" @@ -1639,7 +1639,7 @@ -- {* monotonicity *} by blast -lemma vimage_image_eq [noatp]: "f -` (f ` A) = {y. EX x:A. f x = f y}" +lemma vimage_image_eq [no_atp]: "f -` (f ` A) = {y. EX x:A. f x = f y}" by (blast intro: sym) lemma image_vimage_subset: "f ` (f -` A) <= A" diff -r b766aad9136d -r e31ec41a551b src/HOL/SetInterval.thy --- a/src/HOL/SetInterval.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/SetInterval.thy Thu Mar 18 13:59:20 2010 +0100 @@ -165,19 +165,19 @@ context ord begin -lemma greaterThanLessThan_iff [simp,noatp]: +lemma greaterThanLessThan_iff [simp,no_atp]: "(i : {l<.. {m.. i | m \ i & j \ (n::'a::linorder))" apply(auto simp:linorder_not_le) apply(rule ccontr) diff -r b766aad9136d -r e31ec41a551b src/HOL/Sledgehammer.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Sledgehammer.thy Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,130 @@ +(* Title: HOL/Sledgehammer.thy + Author: Lawrence C Paulson + Author: Jia Meng, NICTA + Author: Fabian Immler, TUM +*) + +header {* Sledgehammer: Isabelle--ATP Linkup *} + +theory Sledgehammer +imports Plain Hilbert_Choice +uses + "Tools/polyhash.ML" + "Tools/Sledgehammer/sledgehammer_fol_clause.ML" + ("Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML") + ("Tools/Sledgehammer/sledgehammer_hol_clause.ML") + ("Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML") + ("Tools/Sledgehammer/sledgehammer_fact_filter.ML") + ("Tools/ATP_Manager/atp_manager.ML") + ("Tools/ATP_Manager/atp_wrapper.ML") + ("Tools/ATP_Manager/atp_minimal.ML") + "~~/src/Tools/Metis/metis.ML" + ("Tools/Sledgehammer/metis_tactics.ML") +begin + +definition COMBI :: "'a => 'a" + where "COMBI P == P" + +definition COMBK :: "'a => 'b => 'a" + where "COMBK P Q == P" + +definition COMBB :: "('b => 'c) => ('a => 'b) => 'a => 'c" + where "COMBB P Q R == P (Q R)" + +definition COMBC :: "('a => 'b => 'c) => 'b => 'a => 'c" + where "COMBC P Q R == P R Q" + +definition COMBS :: "('a => 'b => 'c) => ('a => 'b) => 'a => 'c" + where "COMBS P Q R == P R (Q R)" + +definition fequal :: "'a => 'a => bool" + where "fequal X Y == (X=Y)" + +lemma fequal_imp_equal: "fequal X Y ==> X=Y" + by (simp add: fequal_def) + +lemma equal_imp_fequal: "X=Y ==> fequal X Y" + by (simp add: fequal_def) + +text{*These two represent the equivalence between Boolean equality and iff. +They can't be converted to clauses automatically, as the iff would be +expanded...*} + +lemma iff_positive: "P | Q | P=Q" +by blast + +lemma iff_negative: "~P | ~Q | P=Q" +by blast + +text{*Theorems for translation to combinators*} + +lemma abs_S: "(%x. (f x) (g x)) == COMBS f g" +apply (rule eq_reflection) +apply (rule ext) +apply (simp add: COMBS_def) +done + +lemma abs_I: "(%x. x) == COMBI" +apply (rule eq_reflection) +apply (rule ext) +apply (simp add: COMBI_def) +done + +lemma abs_K: "(%x. y) == COMBK y" +apply (rule eq_reflection) +apply (rule ext) +apply (simp add: COMBK_def) +done + +lemma abs_B: "(%x. a (g x)) == COMBB a g" +apply (rule eq_reflection) +apply (rule ext) +apply (simp add: COMBB_def) +done + +lemma abs_C: "(%x. (f x) b) == COMBC f b" +apply (rule eq_reflection) +apply (rule ext) +apply (simp add: COMBC_def) +done + + +subsection {* Setup of external ATPs *} + +use "Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML" +setup Sledgehammer_Fact_Preprocessor.setup +use "Tools/Sledgehammer/sledgehammer_hol_clause.ML" +use "Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML" +setup Sledgehammer_Proof_Reconstruct.setup +use "Tools/Sledgehammer/sledgehammer_fact_filter.ML" + +use "Tools/ATP_Manager/atp_wrapper.ML" +setup ATP_Wrapper.setup +use "Tools/ATP_Manager/atp_manager.ML" +use "Tools/ATP_Manager/atp_minimal.ML" + +text {* basic provers *} +setup {* ATP_Manager.add_prover ATP_Wrapper.spass *} +setup {* ATP_Manager.add_prover ATP_Wrapper.vampire *} +setup {* ATP_Manager.add_prover ATP_Wrapper.eprover *} + +text {* provers with stuctured output *} +setup {* ATP_Manager.add_prover ATP_Wrapper.vampire_full *} +setup {* ATP_Manager.add_prover ATP_Wrapper.eprover_full *} + +text {* on some problems better results *} +setup {* ATP_Manager.add_prover ATP_Wrapper.spass_no_tc *} + +text {* remote provers via SystemOnTPTP *} +setup {* ATP_Manager.add_prover ATP_Wrapper.remote_vampire *} +setup {* ATP_Manager.add_prover ATP_Wrapper.remote_spass *} +setup {* ATP_Manager.add_prover ATP_Wrapper.remote_eprover *} + + + +subsection {* The Metis prover *} + +use "Tools/Sledgehammer/metis_tactics.ML" +setup Metis_Tactics.setup + +end diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/ATP_Manager/atp_manager.ML --- a/src/HOL/Tools/ATP_Manager/atp_manager.ML Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Tools/ATP_Manager/atp_manager.ML Thu Mar 18 13:59:20 2010 +0100 @@ -263,7 +263,7 @@ val _ = register birth_time death_time (Thread.self (), desc); val problem = ATP_Wrapper.problem_of_goal (! full_types) i (ctxt, (facts, goal)); val message = #message (prover (! timeout) problem) - handle Res_HOL_Clause.TOO_TRIVIAL => (* FIXME !? *) + handle Sledgehammer_HOL_Clause.TOO_TRIVIAL => (* FIXME !? *) "Empty clause: Try this command: " ^ Markup.markup Markup.sendback "apply metis" | ERROR msg => ("Error: " ^ msg); val _ = unregister message (Thread.self ()); diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/ATP_Manager/atp_minimal.ML --- a/src/HOL/Tools/ATP_Manager/atp_minimal.ML Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Tools/ATP_Manager/atp_minimal.ML Thu Mar 18 13:59:20 2010 +0100 @@ -116,7 +116,7 @@ let val _ = priority ("Testing " ^ string_of_int (length name_thms_pairs) ^ " theorems... ") val name_thm_pairs = maps (fn (n, ths) => map (pair n) ths) name_thms_pairs - val axclauses = Res_Axioms.cnf_rules_pairs (Proof.theory_of state) name_thm_pairs + val axclauses = Sledgehammer_Fact_Preprocessor.cnf_rules_pairs (Proof.theory_of state) name_thm_pairs val {context = ctxt, facts, goal} = Proof.goal state val problem = {with_full_types = ! ATP_Manager.full_types, @@ -170,7 +170,7 @@ (NONE, "Error in prover: " ^ msg) | (Failure, _) => (NONE, "Failure: No proof with the theorems supplied")) - handle Res_HOL_Clause.TOO_TRIVIAL => + handle Sledgehammer_HOL_Clause.TOO_TRIVIAL => (SOME ([], 0), "Trivial: Try this command: " ^ Markup.markup Markup.sendback "apply metis") | ERROR msg => (NONE, "Error: " ^ msg) end diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/ATP_Manager/atp_wrapper.ML --- a/src/HOL/Tools/ATP_Manager/atp_wrapper.ML Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Tools/ATP_Manager/atp_wrapper.ML Thu Mar 18 13:59:20 2010 +0100 @@ -52,6 +52,9 @@ structure ATP_Wrapper: ATP_WRAPPER = struct +structure SFF = Sledgehammer_Fact_Filter +structure SPR = Sledgehammer_Proof_Reconstruct + (** generic ATP wrapper **) (* external problem files *) @@ -117,8 +120,8 @@ (* get clauses and prepare them for writing *) val (ctxt, (chain_ths, th)) = goal; val thy = ProofContext.theory_of ctxt; - val chain_ths = map (Thm.put_name_hint Res_Reconstruct.chained_hint) chain_ths; - val goal_cls = #1 (Res_Axioms.neg_conjecture_clauses ctxt th subgoal); + val chain_ths = map (Thm.put_name_hint SPR.chained_hint) chain_ths; + val goal_cls = #1 (Sledgehammer_Fact_Preprocessor.neg_conjecture_clauses ctxt th subgoal); val the_filtered_clauses = (case filtered_clauses of NONE => relevance_filter goal goal_cls @@ -181,7 +184,7 @@ with_path cleanup export run_on (prob_pathname subgoal); (* check for success and print out some information on failure *) - val failure = Res_Reconstruct.find_failure proof; + val failure = SPR.find_failure proof; val success = rc = 0 andalso is_none failure; val (message, real_thm_names) = if is_some failure then ("External prover failed.", []) @@ -203,13 +206,13 @@ prover_config; in external_prover - (Res_ATP.get_relevant max_new_clauses insert_theory_const) - (Res_ATP.prepare_clauses false) - Res_HOL_Clause.tptp_write_file + (SFF.get_relevant max_new_clauses insert_theory_const) + (SFF.prepare_clauses false) + Sledgehammer_HOL_Clause.tptp_write_file command (arguments timeout) - (if emit_structured_proof then Res_Reconstruct.structured_proof - else Res_Reconstruct.lemma_list false) + (if emit_structured_proof then SPR.structured_proof + else SPR.lemma_list false) name problem end; @@ -274,12 +277,12 @@ val {max_new_clauses, insert_theory_const, command, arguments, ...} = prover_config; in external_prover - (Res_ATP.get_relevant max_new_clauses insert_theory_const) - (Res_ATP.prepare_clauses true) - Res_HOL_Clause.dfg_write_file + (SFF.get_relevant max_new_clauses insert_theory_const) + (SFF.prepare_clauses true) + Sledgehammer_HOL_Clause.dfg_write_file command (arguments timeout) - (Res_Reconstruct.lemma_list true) + (SPR.lemma_list true) name problem end; @@ -298,7 +301,7 @@ let val (answer, rc) = bash_output ("\"$ISABELLE_ATP_MANAGER/SystemOnTPTP\" -w") in - if rc <> 0 then error ("Failed to get available systems from SystemOnTPTP:\n" ^ answer) + if rc <> 0 then error ("Failed to get available systems at SystemOnTPTP:\n" ^ answer) else split_lines answer end; @@ -310,7 +313,7 @@ fun the_system prefix = (case get_system prefix of - NONE => error ("No system like " ^ quote prefix ^ " at SystemOnTPTP") + NONE => error ("System " ^ quote prefix ^ " not available at SystemOnTPTP") | SOME sys => sys); fun remote_prover_config prover_prefix args max_new insert_tc: prover_config = diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/Sledgehammer/metis_tactics.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Sledgehammer/metis_tactics.ML Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,747 @@ +(* Title: HOL/Tools/Sledgehammer/metis_tactics.ML + Author: Kong W. Susanto and Lawrence C. Paulson, CU Computer Laboratory + Copyright Cambridge University 2007 + +HOL setup for the Metis prover. +*) + +signature METIS_TACTICS = +sig + val trace: bool Unsynchronized.ref + val type_lits: bool Config.T + val metis_tac: Proof.context -> thm list -> int -> tactic + val metisF_tac: Proof.context -> thm list -> int -> tactic + val metisFT_tac: Proof.context -> thm list -> int -> tactic + val setup: theory -> theory +end + +structure Metis_Tactics : METIS_TACTICS = +struct + +structure SFC = Sledgehammer_FOL_Clause +structure SHC = Sledgehammer_HOL_Clause +structure SPR = Sledgehammer_Proof_Reconstruct + +val trace = Unsynchronized.ref false; +fun trace_msg msg = if !trace then tracing (msg ()) else (); + +val (type_lits, type_lits_setup) = Attrib.config_bool "metis_type_lits" true; + +datatype mode = FO | HO | FT (* first-order, higher-order, fully-typed *) + +(* ------------------------------------------------------------------------- *) +(* Useful Theorems *) +(* ------------------------------------------------------------------------- *) +val EXCLUDED_MIDDLE = @{lemma "P ==> ~ P ==> False" by (rule notE)} +val REFL_THM = incr_indexes 2 @{lemma "t ~= t ==> False" by simp} +val subst_em = @{lemma "s = t ==> P s ==> ~ P t ==> False" by simp} +val ssubst_em = @{lemma "s = t ==> P t ==> ~ P s ==> False" by simp} + +(* ------------------------------------------------------------------------- *) +(* Useful Functions *) +(* ------------------------------------------------------------------------- *) + +(* match untyped terms*) +fun untyped_aconv (Const(a,_)) (Const(b,_)) = (a=b) + | untyped_aconv (Free(a,_)) (Free(b,_)) = (a=b) + | untyped_aconv (Var((a,_),_)) (Var((b,_),_)) = (a=b) (*the index is ignored!*) + | untyped_aconv (Bound i) (Bound j) = (i=j) + | untyped_aconv (Abs(a,_,t)) (Abs(b,_,u)) = (a=b) andalso untyped_aconv t u + | untyped_aconv (t1$t2) (u1$u2) = untyped_aconv t1 u1 andalso untyped_aconv t2 u2 + | untyped_aconv _ _ = false; + +(* Finding the relative location of an untyped term within a list of terms *) +fun get_index lit = + let val lit = Envir.eta_contract lit + fun get n [] = raise Empty + | get n (x::xs) = if untyped_aconv lit (Envir.eta_contract (HOLogic.dest_Trueprop x)) + then n else get (n+1) xs + in get 1 end; + +(* ------------------------------------------------------------------------- *) +(* HOL to FOL (Isabelle to Metis) *) +(* ------------------------------------------------------------------------- *) + +fun fn_isa_to_met "equal" = "=" + | fn_isa_to_met x = x; + +fun metis_lit b c args = (b, (c, args)); + +fun hol_type_to_fol (SFC.AtomV x) = Metis.Term.Var x + | hol_type_to_fol (SFC.AtomF x) = Metis.Term.Fn(x,[]) + | hol_type_to_fol (SFC.Comp(tc,tps)) = Metis.Term.Fn(tc, map hol_type_to_fol tps); + +(*These two functions insert type literals before the real literals. That is the + opposite order from TPTP linkup, but maybe OK.*) + +fun hol_term_to_fol_FO tm = + case SHC.strip_comb tm of + (SHC.CombConst(c,_,tys), tms) => + let val tyargs = map hol_type_to_fol tys + val args = map hol_term_to_fol_FO tms + in Metis.Term.Fn (c, tyargs @ args) end + | (SHC.CombVar(v,_), []) => Metis.Term.Var v + | _ => error "hol_term_to_fol_FO"; + +fun hol_term_to_fol_HO (SHC.CombVar (a, _)) = Metis.Term.Var a + | hol_term_to_fol_HO (SHC.CombConst (a, _, tylist)) = + Metis.Term.Fn (fn_isa_to_met a, map hol_type_to_fol tylist) + | hol_term_to_fol_HO (SHC.CombApp (tm1, tm2)) = + Metis.Term.Fn (".", map hol_term_to_fol_HO [tm1, tm2]); + +(*The fully-typed translation, to avoid type errors*) +fun wrap_type (tm, ty) = Metis.Term.Fn("ti", [tm, hol_type_to_fol ty]); + +fun hol_term_to_fol_FT (SHC.CombVar(a, ty)) = + wrap_type (Metis.Term.Var a, ty) + | hol_term_to_fol_FT (SHC.CombConst(a, ty, _)) = + wrap_type (Metis.Term.Fn(fn_isa_to_met a, []), ty) + | hol_term_to_fol_FT (tm as SHC.CombApp(tm1,tm2)) = + wrap_type (Metis.Term.Fn(".", map hol_term_to_fol_FT [tm1,tm2]), + SHC.type_of_combterm tm); + +fun hol_literal_to_fol FO (SHC.Literal (pol, tm)) = + let val (SHC.CombConst(p,_,tys), tms) = SHC.strip_comb tm + val tylits = if p = "equal" then [] else map hol_type_to_fol tys + val lits = map hol_term_to_fol_FO tms + in metis_lit pol (fn_isa_to_met p) (tylits @ lits) end + | hol_literal_to_fol HO (SHC.Literal (pol, tm)) = + (case SHC.strip_comb tm of + (SHC.CombConst("equal",_,_), tms) => + metis_lit pol "=" (map hol_term_to_fol_HO tms) + | _ => metis_lit pol "{}" [hol_term_to_fol_HO tm]) (*hBOOL*) + | hol_literal_to_fol FT (SHC.Literal (pol, tm)) = + (case SHC.strip_comb tm of + (SHC.CombConst("equal",_,_), tms) => + metis_lit pol "=" (map hol_term_to_fol_FT tms) + | _ => metis_lit pol "{}" [hol_term_to_fol_FT tm]) (*hBOOL*); + +fun literals_of_hol_thm thy mode t = + let val (lits, types_sorts) = SHC.literals_of_term thy t + in (map (hol_literal_to_fol mode) lits, types_sorts) end; + +(*Sign should be "true" for conjecture type constraints, "false" for type lits in clauses.*) +fun metis_of_typeLit pos (SFC.LTVar (s,x)) = metis_lit pos s [Metis.Term.Var x] + | metis_of_typeLit pos (SFC.LTFree (s,x)) = metis_lit pos s [Metis.Term.Fn(x,[])]; + +fun default_sort _ (TVar _) = false + | default_sort ctxt (TFree (x, s)) = (s = the_default [] (Variable.def_sort ctxt (x, ~1))); + +fun metis_of_tfree tf = + Metis.Thm.axiom (Metis.LiteralSet.singleton (metis_of_typeLit true tf)); + +fun hol_thm_to_fol is_conjecture ctxt mode th = + let val thy = ProofContext.theory_of ctxt + val (mlits, types_sorts) = + (literals_of_hol_thm thy mode o HOLogic.dest_Trueprop o prop_of) th + in + if is_conjecture then + (Metis.Thm.axiom (Metis.LiteralSet.fromList mlits), SFC.add_typs types_sorts) + else + let val tylits = SFC.add_typs + (filter (not o default_sort ctxt) types_sorts) + val mtylits = if Config.get ctxt type_lits + then map (metis_of_typeLit false) tylits else [] + in + (Metis.Thm.axiom (Metis.LiteralSet.fromList(mtylits @ mlits)), []) + end + end; + +(* ARITY CLAUSE *) + +fun m_arity_cls (SFC.TConsLit (c,t,args)) = + metis_lit true (SFC.make_type_class c) [Metis.Term.Fn(t, map Metis.Term.Var args)] + | m_arity_cls (SFC.TVarLit (c,str)) = + metis_lit false (SFC.make_type_class c) [Metis.Term.Var str]; + +(*TrueI is returned as the Isabelle counterpart because there isn't any.*) +fun arity_cls (SFC.ArityClause{conclLit,premLits,...}) = + (TrueI, + Metis.Thm.axiom (Metis.LiteralSet.fromList (map m_arity_cls (conclLit :: premLits)))); + +(* CLASSREL CLAUSE *) + +fun m_classrel_cls subclass superclass = + [metis_lit false subclass [Metis.Term.Var "T"], metis_lit true superclass [Metis.Term.Var "T"]]; + +fun classrel_cls (SFC.ClassrelClause {subclass, superclass, ...}) = + (TrueI, Metis.Thm.axiom (Metis.LiteralSet.fromList (m_classrel_cls subclass superclass))); + +(* ------------------------------------------------------------------------- *) +(* FOL to HOL (Metis to Isabelle) *) +(* ------------------------------------------------------------------------- *) + +datatype term_or_type = Term of Term.term | Type of Term.typ; + +fun terms_of [] = [] + | terms_of (Term t :: tts) = t :: terms_of tts + | terms_of (Type _ :: tts) = terms_of tts; + +fun types_of [] = [] + | types_of (Term (Term.Var ((a,idx), _)) :: tts) = + if String.isPrefix "_" a then + (*Variable generated by Metis, which might have been a type variable.*) + TVar (("'" ^ a, idx), HOLogic.typeS) :: types_of tts + else types_of tts + | types_of (Term _ :: tts) = types_of tts + | types_of (Type T :: tts) = T :: types_of tts; + +fun apply_list rator nargs rands = + let val trands = terms_of rands + in if length trands = nargs then Term (list_comb(rator, trands)) + else error + ("apply_list: wrong number of arguments: " ^ Syntax.string_of_term_global Pure.thy rator ^ + " expected " ^ Int.toString nargs ^ + " received " ^ commas (map (Syntax.string_of_term_global Pure.thy) trands)) + end; + +fun infer_types ctxt = + Syntax.check_terms (ProofContext.set_mode ProofContext.mode_pattern ctxt); + +(*We use 1 rather than 0 because variable references in clauses may otherwise conflict + with variable constraints in the goal...at least, type inference often fails otherwise. + SEE ALSO axiom_inf below.*) +fun mk_var (w,T) = Term.Var((w,1), T); + +(*include the default sort, if available*) +fun mk_tfree ctxt w = + let val ww = "'" ^ w + in TFree(ww, the_default HOLogic.typeS (Variable.def_sort ctxt (ww, ~1))) end; + +(*Remove the "apply" operator from an HO term*) +fun strip_happ args (Metis.Term.Fn(".",[t,u])) = strip_happ (u::args) t + | strip_happ args x = (x, args); + +fun fol_type_to_isa _ (Metis.Term.Var v) = + (case SPR.strip_prefix SFC.tvar_prefix v of + SOME w => SPR.make_tvar w + | NONE => SPR.make_tvar v) + | fol_type_to_isa ctxt (Metis.Term.Fn(x, tys)) = + (case SPR.strip_prefix SFC.tconst_prefix x of + SOME tc => Term.Type (SPR.invert_type_const tc, map (fol_type_to_isa ctxt) tys) + | NONE => + case SPR.strip_prefix SFC.tfree_prefix x of + SOME tf => mk_tfree ctxt tf + | NONE => error ("fol_type_to_isa: " ^ x)); + +(*Maps metis terms to isabelle terms*) +fun fol_term_to_hol_RAW ctxt fol_tm = + let val thy = ProofContext.theory_of ctxt + val _ = trace_msg (fn () => "fol_term_to_hol: " ^ Metis.Term.toString fol_tm) + fun tm_to_tt (Metis.Term.Var v) = + (case SPR.strip_prefix SFC.tvar_prefix v of + SOME w => Type (SPR.make_tvar w) + | NONE => + case SPR.strip_prefix SFC.schematic_var_prefix v of + SOME w => Term (mk_var (w, HOLogic.typeT)) + | NONE => Term (mk_var (v, HOLogic.typeT)) ) + (*Var from Metis with a name like _nnn; possibly a type variable*) + | tm_to_tt (Metis.Term.Fn ("{}", [arg])) = tm_to_tt arg (*hBOOL*) + | tm_to_tt (t as Metis.Term.Fn (".",_)) = + let val (rator,rands) = strip_happ [] t + in case rator of + Metis.Term.Fn(fname,ts) => applic_to_tt (fname, ts @ rands) + | _ => case tm_to_tt rator of + Term t => Term (list_comb(t, terms_of (map tm_to_tt rands))) + | _ => error "tm_to_tt: HO application" + end + | tm_to_tt (Metis.Term.Fn (fname, args)) = applic_to_tt (fname,args) + and applic_to_tt ("=",ts) = + Term (list_comb(Const ("op =", HOLogic.typeT), terms_of (map tm_to_tt ts))) + | applic_to_tt (a,ts) = + case SPR.strip_prefix SFC.const_prefix a of + SOME b => + let val c = SPR.invert_const b + val ntypes = SPR.num_typargs thy c + val nterms = length ts - ntypes + val tts = map tm_to_tt ts + val tys = types_of (List.take(tts,ntypes)) + val ntyargs = SPR.num_typargs thy c + in if length tys = ntyargs then + apply_list (Const (c, dummyT)) nterms (List.drop(tts,ntypes)) + else error ("Constant " ^ c ^ " expects " ^ Int.toString ntyargs ^ + " but gets " ^ Int.toString (length tys) ^ + " type arguments\n" ^ + cat_lines (map (Syntax.string_of_typ ctxt) tys) ^ + " the terms are \n" ^ + cat_lines (map (Syntax.string_of_term ctxt) (terms_of tts))) + end + | NONE => (*Not a constant. Is it a type constructor?*) + case SPR.strip_prefix SFC.tconst_prefix a of + SOME b => + Type (Term.Type (SPR.invert_type_const b, types_of (map tm_to_tt ts))) + | NONE => (*Maybe a TFree. Should then check that ts=[].*) + case SPR.strip_prefix SFC.tfree_prefix a of + SOME b => Type (mk_tfree ctxt b) + | NONE => (*a fixed variable? They are Skolem functions.*) + case SPR.strip_prefix SFC.fixed_var_prefix a of + SOME b => + let val opr = Term.Free(b, HOLogic.typeT) + in apply_list opr (length ts) (map tm_to_tt ts) end + | NONE => error ("unexpected metis function: " ^ a) + in case tm_to_tt fol_tm of Term t => t | _ => error "fol_tm_to_tt: Term expected" end; + +(*Maps fully-typed metis terms to isabelle terms*) +fun fol_term_to_hol_FT ctxt fol_tm = + let val _ = trace_msg (fn () => "fol_term_to_hol_FT: " ^ Metis.Term.toString fol_tm) + fun cvt (Metis.Term.Fn ("ti", [Metis.Term.Var v, _])) = + (case SPR.strip_prefix SFC.schematic_var_prefix v of + SOME w => mk_var(w, dummyT) + | NONE => mk_var(v, dummyT)) + | cvt (Metis.Term.Fn ("ti", [Metis.Term.Fn ("=",[]), _])) = + Const ("op =", HOLogic.typeT) + | cvt (Metis.Term.Fn ("ti", [Metis.Term.Fn (x,[]), ty])) = + (case SPR.strip_prefix SFC.const_prefix x of + SOME c => Const (SPR.invert_const c, dummyT) + | NONE => (*Not a constant. Is it a fixed variable??*) + case SPR.strip_prefix SFC.fixed_var_prefix x of + SOME v => Free (v, fol_type_to_isa ctxt ty) + | NONE => error ("fol_term_to_hol_FT bad constant: " ^ x)) + | cvt (Metis.Term.Fn ("ti", [Metis.Term.Fn (".",[tm1,tm2]), _])) = + cvt tm1 $ cvt tm2 + | cvt (Metis.Term.Fn (".",[tm1,tm2])) = (*untyped application*) + cvt tm1 $ cvt tm2 + | cvt (Metis.Term.Fn ("{}", [arg])) = cvt arg (*hBOOL*) + | cvt (Metis.Term.Fn ("=", [tm1,tm2])) = + list_comb(Const ("op =", HOLogic.typeT), map cvt [tm1,tm2]) + | cvt (t as Metis.Term.Fn (x, [])) = + (case SPR.strip_prefix SFC.const_prefix x of + SOME c => Const (SPR.invert_const c, dummyT) + | NONE => (*Not a constant. Is it a fixed variable??*) + case SPR.strip_prefix SFC.fixed_var_prefix x of + SOME v => Free (v, dummyT) + | NONE => (trace_msg (fn () => "fol_term_to_hol_FT bad const: " ^ x); + fol_term_to_hol_RAW ctxt t)) + | cvt t = (trace_msg (fn () => "fol_term_to_hol_FT bad term: " ^ Metis.Term.toString t); + fol_term_to_hol_RAW ctxt t) + in cvt fol_tm end; + +fun fol_term_to_hol ctxt FO = fol_term_to_hol_RAW ctxt + | fol_term_to_hol ctxt HO = fol_term_to_hol_RAW ctxt + | fol_term_to_hol ctxt FT = fol_term_to_hol_FT ctxt; + +fun fol_terms_to_hol ctxt mode fol_tms = + let val ts = map (fol_term_to_hol ctxt mode) fol_tms + val _ = trace_msg (fn () => " calling type inference:") + val _ = app (fn t => trace_msg (fn () => Syntax.string_of_term ctxt t)) ts + val ts' = infer_types ctxt ts; + val _ = app (fn t => trace_msg + (fn () => " final term: " ^ Syntax.string_of_term ctxt t ^ + " of type " ^ Syntax.string_of_typ ctxt (type_of t))) + ts' + in ts' end; + +fun mk_not (Const ("Not", _) $ b) = b + | mk_not b = HOLogic.mk_not b; + +val metis_eq = Metis.Term.Fn ("=", []); + +(* ------------------------------------------------------------------------- *) +(* FOL step Inference Rules *) +(* ------------------------------------------------------------------------- *) + +(*for debugging only*) +fun print_thpair (fth,th) = + (trace_msg (fn () => "============================================="); + trace_msg (fn () => "Metis: " ^ Metis.Thm.toString fth); + trace_msg (fn () => "Isabelle: " ^ Display.string_of_thm_without_context th)); + +fun lookth thpairs (fth : Metis.Thm.thm) = + the (AList.lookup (uncurry Metis.Thm.equal) thpairs fth) + handle Option => error ("Failed to find a Metis theorem " ^ Metis.Thm.toString fth); + +fun is_TrueI th = Thm.eq_thm(TrueI,th); + +fun cterm_incr_types thy idx = cterm_of thy o (map_types (Logic.incr_tvar idx)); + +fun inst_excluded_middle thy i_atm = + let val th = EXCLUDED_MIDDLE + val [vx] = Term.add_vars (prop_of th) [] + val substs = [(cterm_of thy (Var vx), cterm_of thy i_atm)] + in cterm_instantiate substs th end; + +(* INFERENCE RULE: AXIOM *) +fun axiom_inf thpairs th = incr_indexes 1 (lookth thpairs th); + (*This causes variables to have an index of 1 by default. SEE ALSO mk_var above.*) + +(* INFERENCE RULE: ASSUME *) +fun assume_inf ctxt mode atm = + inst_excluded_middle + (ProofContext.theory_of ctxt) + (singleton (fol_terms_to_hol ctxt mode) (Metis.Term.Fn atm)); + +(* INFERENCE RULE: INSTANTIATE (Subst). Type instantiations are ignored. Trying to reconstruct + them admits new possibilities of errors, e.g. concerning sorts. Instead we try to arrange + that new TVars are distinct and that types can be inferred from terms.*) +fun inst_inf ctxt mode thpairs fsubst th = + let val thy = ProofContext.theory_of ctxt + val i_th = lookth thpairs th + val i_th_vars = Term.add_vars (prop_of i_th) [] + fun find_var x = the (List.find (fn ((a,_),_) => a=x) i_th_vars) + fun subst_translation (x,y) = + let val v = find_var x + val t = fol_term_to_hol ctxt mode y (*we call infer_types below*) + in SOME (cterm_of thy (Var v), t) end + handle Option => + (trace_msg (fn() => "List.find failed for the variable " ^ x ^ + " in " ^ Display.string_of_thm ctxt i_th); + NONE) + fun remove_typeinst (a, t) = + case SPR.strip_prefix SFC.schematic_var_prefix a of + SOME b => SOME (b, t) + | NONE => case SPR.strip_prefix SFC.tvar_prefix a of + SOME _ => NONE (*type instantiations are forbidden!*) + | NONE => SOME (a,t) (*internal Metis var?*) + val _ = trace_msg (fn () => " isa th: " ^ Display.string_of_thm ctxt i_th) + val substs = map_filter remove_typeinst (Metis.Subst.toList fsubst) + val (vars,rawtms) = ListPair.unzip (map_filter subst_translation substs) + val tms = infer_types ctxt rawtms; + val ctm_of = cterm_incr_types thy (1 + Thm.maxidx_of i_th) + val substs' = ListPair.zip (vars, map ctm_of tms) + val _ = trace_msg (fn () => + cat_lines ("subst_translations:" :: + (substs' |> map (fn (x, y) => + Syntax.string_of_term ctxt (term_of x) ^ " |-> " ^ + Syntax.string_of_term ctxt (term_of y))))); + in cterm_instantiate substs' i_th + handle THM (msg, _, _) => error ("metis error (inst_inf): " ^ msg) + end; + +(* INFERENCE RULE: RESOLVE *) + +(*Like RSN, but we rename apart only the type variables. Vars here typically have an index + of 1, and the use of RSN would increase this typically to 3. Instantiations of those Vars + could then fail. See comment on mk_var.*) +fun resolve_inc_tyvars(tha,i,thb) = + let val tha = Drule.incr_type_indexes (1 + Thm.maxidx_of thb) tha + val ths = Seq.list_of (Thm.bicompose false (false,tha,nprems_of tha) i thb) + in + case distinct Thm.eq_thm ths of + [th] => th + | _ => raise THM ("resolve_inc_tyvars: unique result expected", i, [tha,thb]) + end; + +fun resolve_inf ctxt mode thpairs atm th1 th2 = + let + val i_th1 = lookth thpairs th1 and i_th2 = lookth thpairs th2 + val _ = trace_msg (fn () => " isa th1 (pos): " ^ Display.string_of_thm ctxt i_th1) + val _ = trace_msg (fn () => " isa th2 (neg): " ^ Display.string_of_thm ctxt i_th2) + in + if is_TrueI i_th1 then i_th2 (*Trivial cases where one operand is type info*) + else if is_TrueI i_th2 then i_th1 + else + let + val i_atm = singleton (fol_terms_to_hol ctxt mode) (Metis.Term.Fn atm) + val _ = trace_msg (fn () => " atom: " ^ Syntax.string_of_term ctxt i_atm) + val prems_th1 = prems_of i_th1 + val prems_th2 = prems_of i_th2 + val index_th1 = get_index (mk_not i_atm) prems_th1 + handle Empty => error "Failed to find literal in th1" + val _ = trace_msg (fn () => " index_th1: " ^ Int.toString index_th1) + val index_th2 = get_index i_atm prems_th2 + handle Empty => error "Failed to find literal in th2" + val _ = trace_msg (fn () => " index_th2: " ^ Int.toString index_th2) + in resolve_inc_tyvars (Meson.select_literal index_th1 i_th1, index_th2, i_th2) end + end; + +(* INFERENCE RULE: REFL *) +val refl_x = cterm_of @{theory} (Var (hd (Term.add_vars (prop_of REFL_THM) []))); +val refl_idx = 1 + Thm.maxidx_of REFL_THM; + +fun refl_inf ctxt mode t = + let val thy = ProofContext.theory_of ctxt + val i_t = singleton (fol_terms_to_hol ctxt mode) t + val _ = trace_msg (fn () => " term: " ^ Syntax.string_of_term ctxt i_t) + val c_t = cterm_incr_types thy refl_idx i_t + in cterm_instantiate [(refl_x, c_t)] REFL_THM end; + +fun get_ty_arg_size _ (Const ("op =", _)) = 0 (*equality has no type arguments*) + | get_ty_arg_size thy (Const (c, _)) = (SPR.num_typargs thy c handle TYPE _ => 0) + | get_ty_arg_size _ _ = 0; + +(* INFERENCE RULE: EQUALITY *) +fun equality_inf ctxt mode (pos, atm) fp fr = + let val thy = ProofContext.theory_of ctxt + val m_tm = Metis.Term.Fn atm + val [i_atm,i_tm] = fol_terms_to_hol ctxt mode [m_tm, fr] + val _ = trace_msg (fn () => "sign of the literal: " ^ Bool.toString pos) + fun replace_item_list lx 0 (_::ls) = lx::ls + | replace_item_list lx i (l::ls) = l :: replace_item_list lx (i-1) ls + fun path_finder_FO tm [] = (tm, Term.Bound 0) + | path_finder_FO tm (p::ps) = + let val (tm1,args) = Term.strip_comb tm + val adjustment = get_ty_arg_size thy tm1 + val p' = if adjustment > p then p else p-adjustment + val tm_p = List.nth(args,p') + handle Subscript => error ("equality_inf: " ^ Int.toString p ^ " adj " ^ + Int.toString adjustment ^ " term " ^ Syntax.string_of_term ctxt tm) + val _ = trace_msg (fn () => "path_finder: " ^ Int.toString p ^ + " " ^ Syntax.string_of_term ctxt tm_p) + val (r,t) = path_finder_FO tm_p ps + in + (r, list_comb (tm1, replace_item_list t p' args)) + end + fun path_finder_HO tm [] = (tm, Term.Bound 0) + | path_finder_HO (t$u) (0::ps) = (fn(x,y) => (x, y$u)) (path_finder_HO t ps) + | path_finder_HO (t$u) (_::ps) = (fn(x,y) => (x, t$y)) (path_finder_HO u ps) + fun path_finder_FT tm [] _ = (tm, Term.Bound 0) + | path_finder_FT tm (0::ps) (Metis.Term.Fn ("ti", [t1, _])) = + path_finder_FT tm ps t1 + | path_finder_FT (t$u) (0::ps) (Metis.Term.Fn (".", [t1, _])) = + (fn(x,y) => (x, y$u)) (path_finder_FT t ps t1) + | path_finder_FT (t$u) (1::ps) (Metis.Term.Fn (".", [_, t2])) = + (fn(x,y) => (x, t$y)) (path_finder_FT u ps t2) + | path_finder_FT tm ps t = error ("equality_inf, path_finder_FT: path = " ^ + space_implode " " (map Int.toString ps) ^ + " isa-term: " ^ Syntax.string_of_term ctxt tm ^ + " fol-term: " ^ Metis.Term.toString t) + fun path_finder FO tm ps _ = path_finder_FO tm ps + | path_finder HO (tm as Const("op =",_) $ _ $ _) (p::ps) _ = + (*equality: not curried, as other predicates are*) + if p=0 then path_finder_HO tm (0::1::ps) (*select first operand*) + else path_finder_HO tm (p::ps) (*1 selects second operand*) + | path_finder HO tm (_ :: ps) (Metis.Term.Fn ("{}", [_])) = + path_finder_HO tm ps (*if not equality, ignore head to skip hBOOL*) + | path_finder FT (tm as Const("op =",_) $ _ $ _) (p::ps) + (Metis.Term.Fn ("=", [t1,t2])) = + (*equality: not curried, as other predicates are*) + if p=0 then path_finder_FT tm (0::1::ps) + (Metis.Term.Fn (".", [Metis.Term.Fn (".", [metis_eq,t1]), t2])) + (*select first operand*) + else path_finder_FT tm (p::ps) + (Metis.Term.Fn (".", [metis_eq,t2])) + (*1 selects second operand*) + | path_finder FT tm (_ :: ps) (Metis.Term.Fn ("{}", [t1])) = path_finder_FT tm ps t1 + (*if not equality, ignore head to skip the hBOOL predicate*) + | path_finder FT tm ps t = path_finder_FT tm ps t (*really an error case!*) + fun path_finder_lit ((nt as Term.Const ("Not", _)) $ tm_a) idx = + let val (tm, tm_rslt) = path_finder mode tm_a idx m_tm + in (tm, nt $ tm_rslt) end + | path_finder_lit tm_a idx = path_finder mode tm_a idx m_tm + val (tm_subst, body) = path_finder_lit i_atm fp + val tm_abs = Term.Abs("x", Term.type_of tm_subst, body) + val _ = trace_msg (fn () => "abstraction: " ^ Syntax.string_of_term ctxt tm_abs) + val _ = trace_msg (fn () => "i_tm: " ^ Syntax.string_of_term ctxt i_tm) + val _ = trace_msg (fn () => "located term: " ^ Syntax.string_of_term ctxt tm_subst) + val imax = maxidx_of_term (i_tm $ tm_abs $ tm_subst) (*ill typed but gives right max*) + val subst' = incr_indexes (imax+1) (if pos then subst_em else ssubst_em) + val _ = trace_msg (fn () => "subst' " ^ Display.string_of_thm ctxt subst') + val eq_terms = map (pairself (cterm_of thy)) + (ListPair.zip (OldTerm.term_vars (prop_of subst'), [tm_abs, tm_subst, i_tm])) + in cterm_instantiate eq_terms subst' end; + +val factor = Seq.hd o distinct_subgoals_tac; + +fun step _ _ thpairs (fol_th, Metis.Proof.Axiom _) = factor (axiom_inf thpairs fol_th) + | step ctxt mode _ (_, Metis.Proof.Assume f_atm) = assume_inf ctxt mode f_atm + | step ctxt mode thpairs (_, Metis.Proof.Subst (f_subst, f_th1)) = + factor (inst_inf ctxt mode thpairs f_subst f_th1) + | step ctxt mode thpairs (_, Metis.Proof.Resolve(f_atm, f_th1, f_th2)) = + factor (resolve_inf ctxt mode thpairs f_atm f_th1 f_th2) + | step ctxt mode _ (_, Metis.Proof.Refl f_tm) = refl_inf ctxt mode f_tm + | step ctxt mode _ (_, Metis.Proof.Equality (f_lit, f_p, f_r)) = + equality_inf ctxt mode f_lit f_p f_r; + +fun real_literal (_, (c, _)) = not (String.isPrefix SFC.class_prefix c); + +fun translate _ _ thpairs [] = thpairs + | translate mode ctxt thpairs ((fol_th, inf) :: infpairs) = + let val _ = trace_msg (fn () => "=============================================") + val _ = trace_msg (fn () => "METIS THM: " ^ Metis.Thm.toString fol_th) + val _ = trace_msg (fn () => "INFERENCE: " ^ Metis.Proof.inferenceToString inf) + val th = Meson.flexflex_first_order (step ctxt mode thpairs (fol_th, inf)) + val _ = trace_msg (fn () => "ISABELLE THM: " ^ Display.string_of_thm ctxt th) + val _ = trace_msg (fn () => "=============================================") + val n_metis_lits = + length (filter real_literal (Metis.LiteralSet.toList (Metis.Thm.clause fol_th))) + in + if nprems_of th = n_metis_lits then () + else error "Metis: proof reconstruction has gone wrong"; + translate mode ctxt ((fol_th, th) :: thpairs) infpairs + end; + +(*Determining which axiom clauses are actually used*) +fun used_axioms axioms (th, Metis.Proof.Axiom _) = SOME (lookth axioms th) + | used_axioms _ _ = NONE; + +(* ------------------------------------------------------------------------- *) +(* Translation of HO Clauses *) +(* ------------------------------------------------------------------------- *) + +fun cnf_th thy th = hd (Sledgehammer_Fact_Preprocessor.cnf_axiom thy th); + +val equal_imp_fequal' = cnf_th @{theory} @{thm equal_imp_fequal}; +val fequal_imp_equal' = cnf_th @{theory} @{thm fequal_imp_equal}; + +val comb_I = cnf_th @{theory} SHC.comb_I; +val comb_K = cnf_th @{theory} SHC.comb_K; +val comb_B = cnf_th @{theory} SHC.comb_B; +val comb_C = cnf_th @{theory} SHC.comb_C; +val comb_S = cnf_th @{theory} SHC.comb_S; + +fun type_ext thy tms = + let val subs = Sledgehammer_Fact_Filter.tfree_classes_of_terms tms + val supers = Sledgehammer_Fact_Filter.tvar_classes_of_terms tms + and tycons = Sledgehammer_Fact_Filter.type_consts_of_terms thy tms + val (supers', arity_clauses) = SFC.make_arity_clauses thy tycons supers + val classrel_clauses = SFC.make_classrel_clauses thy subs supers' + in map classrel_cls classrel_clauses @ map arity_cls arity_clauses + end; + +(* ------------------------------------------------------------------------- *) +(* Logic maps manage the interface between HOL and first-order logic. *) +(* ------------------------------------------------------------------------- *) + +type logic_map = + {axioms : (Metis.Thm.thm * thm) list, + tfrees : SFC.type_literal list}; + +fun const_in_metis c (pred, tm_list) = + let + fun in_mterm (Metis.Term.Var _) = false + | in_mterm (Metis.Term.Fn (".", tm_list)) = exists in_mterm tm_list + | in_mterm (Metis.Term.Fn (nm, tm_list)) = c=nm orelse exists in_mterm tm_list + in c = pred orelse exists in_mterm tm_list end; + +(*Extract TFree constraints from context to include as conjecture clauses*) +fun init_tfrees ctxt = + let fun add ((a,i),s) Ts = if i = ~1 then TFree(a,s) :: Ts else Ts + in SFC.add_typs (Vartab.fold add (#2 (Variable.constraints_of ctxt)) []) end; + +(*transform isabelle type / arity clause to metis clause *) +fun add_type_thm [] lmap = lmap + | add_type_thm ((ith, mth) :: cls) {axioms, tfrees} = + add_type_thm cls {axioms = (mth, ith) :: axioms, + tfrees = tfrees} + +(*Insert non-logical axioms corresponding to all accumulated TFrees*) +fun add_tfrees {axioms, tfrees} : logic_map = + {axioms = (map (fn tf => (metis_of_tfree tf, TrueI)) (distinct op= tfrees)) @ axioms, + tfrees = tfrees}; + +fun string_of_mode FO = "FO" + | string_of_mode HO = "HO" + | string_of_mode FT = "FT" + +(* Function to generate metis clauses, including comb and type clauses *) +fun build_map mode0 ctxt cls ths = + let val thy = ProofContext.theory_of ctxt + (*The modes FO and FT are sticky. HO can be downgraded to FO.*) + fun set_mode FO = FO + | set_mode HO = if forall (Meson.is_fol_term thy o prop_of) (cls@ths) then FO else HO + | set_mode FT = FT + val mode = set_mode mode0 + (*transform isabelle clause to metis clause *) + fun add_thm is_conjecture ith {axioms, tfrees} : logic_map = + let val (mth, tfree_lits) = hol_thm_to_fol is_conjecture ctxt mode ith + in + {axioms = (mth, Meson.make_meta_clause ith) :: axioms, + tfrees = union (op =) tfree_lits tfrees} + end; + val lmap0 = fold (add_thm true) cls {axioms = [], tfrees = init_tfrees ctxt} + val lmap = fold (add_thm false) ths (add_tfrees lmap0) + val clause_lists = map (Metis.Thm.clause o #1) (#axioms lmap) + fun used c = exists (Metis.LiteralSet.exists (const_in_metis c o #2)) clause_lists + (*Now check for the existence of certain combinators*) + val thI = if used "c_COMBI" then [comb_I] else [] + val thK = if used "c_COMBK" then [comb_K] else [] + val thB = if used "c_COMBB" then [comb_B] else [] + val thC = if used "c_COMBC" then [comb_C] else [] + val thS = if used "c_COMBS" then [comb_S] else [] + val thEQ = if used "c_fequal" then [fequal_imp_equal', equal_imp_fequal'] else [] + val lmap' = if mode=FO then lmap + else fold (add_thm false) (thEQ @ thS @ thC @ thB @ thK @ thI) lmap + in + (mode, add_type_thm (type_ext thy (map prop_of (cls @ ths))) lmap') + end; + +fun refute cls = + Metis.Resolution.loop (Metis.Resolution.new Metis.Resolution.default cls); + +fun is_false t = t aconv (HOLogic.mk_Trueprop HOLogic.false_const); + +fun common_thm ths1 ths2 = exists (member Thm.eq_thm ths1) (map Meson.make_meta_clause ths2); + +exception METIS of string; + +(* Main function to start metis prove and reconstruction *) +fun FOL_SOLVE mode ctxt cls ths0 = + let val thy = ProofContext.theory_of ctxt + val th_cls_pairs = + map (fn th => (Thm.get_name_hint th, Sledgehammer_Fact_Preprocessor.cnf_axiom thy th)) ths0 + val ths = maps #2 th_cls_pairs + val _ = trace_msg (fn () => "FOL_SOLVE: CONJECTURE CLAUSES") + val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) cls + val _ = trace_msg (fn () => "THEOREM CLAUSES") + val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) ths + val (mode, {axioms,tfrees}) = build_map mode ctxt cls ths + val _ = if null tfrees then () + else (trace_msg (fn () => "TFREE CLAUSES"); + app (fn tf => trace_msg (fn _ => SFC.tptp_of_typeLit true tf)) tfrees) + val _ = trace_msg (fn () => "CLAUSES GIVEN TO METIS") + val thms = map #1 axioms + val _ = app (fn th => trace_msg (fn () => Metis.Thm.toString th)) thms + val _ = trace_msg (fn () => "mode = " ^ string_of_mode mode) + val _ = trace_msg (fn () => "START METIS PROVE PROCESS") + in + case filter (is_false o prop_of) cls of + false_th::_ => [false_th RS @{thm FalseE}] + | [] => + case refute thms of + Metis.Resolution.Contradiction mth => + let val _ = trace_msg (fn () => "METIS RECONSTRUCTION START: " ^ + Metis.Thm.toString mth) + val ctxt' = fold Variable.declare_constraints (map prop_of cls) ctxt + (*add constraints arising from converting goal to clause form*) + val proof = Metis.Proof.proof mth + val result = translate mode ctxt' axioms proof + and used = map_filter (used_axioms axioms) proof + val _ = trace_msg (fn () => "METIS COMPLETED...clauses actually used:") + val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) used + val unused = th_cls_pairs |> map_filter (fn (name, cls) => + if common_thm used cls then NONE else SOME name) + in + if null unused then () + else warning ("Metis: unused theorems " ^ commas_quote unused); + case result of + (_,ith)::_ => + (trace_msg (fn () => "success: " ^ Display.string_of_thm ctxt ith); + [ith]) + | _ => (trace_msg (fn () => "Metis: no result"); + []) + end + | Metis.Resolution.Satisfiable _ => + (trace_msg (fn () => "Metis: No first-order proof with the lemmas supplied"); + []) + end; + +fun metis_general_tac mode ctxt ths i st0 = + let val _ = trace_msg (fn () => + "Metis called with theorems " ^ cat_lines (map (Display.string_of_thm ctxt) ths)) + in + if exists_type Sledgehammer_Fact_Preprocessor.type_has_topsort (prop_of st0) + then raise METIS "Metis: Proof state contains the universal sort {}" + else + (Meson.MESON Sledgehammer_Fact_Preprocessor.neg_clausify + (fn cls => resolve_tac (FOL_SOLVE mode ctxt cls ths) 1) ctxt i + THEN Sledgehammer_Fact_Preprocessor.expand_defs_tac st0) st0 + end + handle METIS s => (warning ("Metis: " ^ s); Seq.empty); + +val metis_tac = metis_general_tac HO; +val metisF_tac = metis_general_tac FO; +val metisFT_tac = metis_general_tac FT; + +fun method name mode comment = Method.setup name (Attrib.thms >> (fn ths => fn ctxt => + SIMPLE_METHOD' (CHANGED_PROP o metis_general_tac mode ctxt ths))) comment; + +val setup = + type_lits_setup #> + method @{binding metis} HO "METIS for FOL & HOL problems" #> + method @{binding metisF} FO "METIS for FOL problems" #> + method @{binding metisFT} FT "METIS with fully-typed translation" #> + Method.setup @{binding finish_clausify} + (Scan.succeed (K (SIMPLE_METHOD (Sledgehammer_Fact_Preprocessor.expand_defs_tac refl)))) + "cleanup after conversion to clauses"; + +end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,574 @@ +(* Title: HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML + Author: Jia Meng, Cambridge University Computer Laboratory, NICTA +*) + +signature SLEDGEHAMMER_FACT_FILTER = +sig + type classrelClause = Sledgehammer_FOL_Clause.classrelClause + type arityClause = Sledgehammer_FOL_Clause.arityClause + type axiom_name = Sledgehammer_HOL_Clause.axiom_name + type clause = Sledgehammer_HOL_Clause.clause + type clause_id = Sledgehammer_HOL_Clause.clause_id + datatype mode = Auto | Fol | Hol + val tvar_classes_of_terms : term list -> string list + val tfree_classes_of_terms : term list -> string list + val type_consts_of_terms : theory -> term list -> string list + val get_relevant : int -> bool -> Proof.context * (thm list * 'a) -> thm list -> + (thm * (string * int)) list + val prepare_clauses : bool -> thm list -> thm list -> + (thm * (axiom_name * clause_id)) list -> + (thm * (axiom_name * clause_id)) list -> theory -> + axiom_name vector * + (clause list * clause list * clause list * + clause list * classrelClause list * arityClause list) +end; + +structure Sledgehammer_Fact_Filter : SLEDGEHAMMER_FACT_FILTER = +struct + +structure SFC = Sledgehammer_FOL_Clause +structure SFP = Sledgehammer_Fact_Preprocessor +structure SHC = Sledgehammer_HOL_Clause + +type classrelClause = SFC.classrelClause +type arityClause = SFC.arityClause +type axiom_name = SHC.axiom_name +type clause = SHC.clause +type clause_id = SHC.clause_id + +(********************************************************************) +(* some settings for both background automatic ATP calling procedure*) +(* and also explicit ATP invocation methods *) +(********************************************************************) + +(*Translation mode can be auto-detected, or forced to be first-order or higher-order*) +datatype mode = Auto | Fol | Hol; + +val linkup_logic_mode = Auto; + +(*** background linkup ***) +val run_blacklist_filter = true; + +(*** relevance filter parameters ***) +val run_relevance_filter = true; +val pass_mark = 0.5; +val convergence = 3.2; (*Higher numbers allow longer inference chains*) +val follow_defs = false; (*Follow definitions. Makes problems bigger.*) + +(***************************************************************) +(* Relevance Filtering *) +(***************************************************************) + +fun strip_Trueprop (Const("Trueprop",_) $ t) = t + | strip_Trueprop t = t; + +(*A surprising number of theorems contain only a few significant constants. + These include all induction rules, and other general theorems. Filtering + theorems in clause form reveals these complexities in the form of Skolem + functions. If we were instead to filter theorems in their natural form, + some other method of measuring theorem complexity would become necessary.*) + +fun log_weight2 (x:real) = 1.0 + 2.0/Math.ln (x+1.0); + +(*The default seems best in practice. A constant function of one ignores + the constant frequencies.*) +val weight_fn = log_weight2; + + +(*Including equality in this list might be expected to stop rules like subset_antisym from + being chosen, but for some reason filtering works better with them listed. The + logical signs All, Ex, &, and --> are omitted because any remaining occurrrences + must be within comprehensions.*) +val standard_consts = ["Trueprop","==>","all","==","op |","Not","op ="]; + + +(*** constants with types ***) + +(*An abstraction of Isabelle types*) +datatype const_typ = CTVar | CType of string * const_typ list + +(*Is the second type an instance of the first one?*) +fun match_type (CType(con1,args1)) (CType(con2,args2)) = + con1=con2 andalso match_types args1 args2 + | match_type CTVar _ = true + | match_type _ CTVar = false +and match_types [] [] = true + | match_types (a1::as1) (a2::as2) = match_type a1 a2 andalso match_types as1 as2; + +(*Is there a unifiable constant?*) +fun uni_mem gctab (c,c_typ) = + case Symtab.lookup gctab c of + NONE => false + | SOME ctyps_list => exists (match_types c_typ) ctyps_list; + +(*Maps a "real" type to a const_typ*) +fun const_typ_of (Type (c,typs)) = CType (c, map const_typ_of typs) + | const_typ_of (TFree _) = CTVar + | const_typ_of (TVar _) = CTVar + +(*Pairs a constant with the list of its type instantiations (using const_typ)*) +fun const_with_typ thy (c,typ) = + let val tvars = Sign.const_typargs thy (c,typ) + in (c, map const_typ_of tvars) end + handle TYPE _ => (c,[]); (*Variable (locale constant): monomorphic*) + +(*Add a const/type pair to the table, but a [] entry means a standard connective, + which we ignore.*) +fun add_const_typ_table ((c,ctyps), tab) = + Symtab.map_default (c, [ctyps]) (fn [] => [] | ctyps_list => insert (op =) ctyps ctyps_list) + tab; + +(*Free variables are included, as well as constants, to handle locales*) +fun add_term_consts_typs_rm thy (Const(c, typ), tab) = + add_const_typ_table (const_with_typ thy (c,typ), tab) + | add_term_consts_typs_rm thy (Free(c, typ), tab) = + add_const_typ_table (const_with_typ thy (c,typ), tab) + | add_term_consts_typs_rm thy (t $ u, tab) = + add_term_consts_typs_rm thy (t, add_term_consts_typs_rm thy (u, tab)) + | add_term_consts_typs_rm thy (Abs(_,_,t), tab) = add_term_consts_typs_rm thy (t, tab) + | add_term_consts_typs_rm _ (_, tab) = tab; + +(*The empty list here indicates that the constant is being ignored*) +fun add_standard_const (s,tab) = Symtab.update (s,[]) tab; + +val null_const_tab : const_typ list list Symtab.table = + List.foldl add_standard_const Symtab.empty standard_consts; + +fun get_goal_consts_typs thy = List.foldl (add_term_consts_typs_rm thy) null_const_tab; + +(*Inserts a dummy "constant" referring to the theory name, so that relevance + takes the given theory into account.*) +fun const_prop_of theory_const th = + if theory_const then + let val name = Context.theory_name (theory_of_thm th) + val t = Const (name ^ ". 1", HOLogic.boolT) + in t $ prop_of th end + else prop_of th; + +(**** Constant / Type Frequencies ****) + +(*A two-dimensional symbol table counts frequencies of constants. It's keyed first by + constant name and second by its list of type instantiations. For the latter, we need + a linear ordering on type const_typ list.*) + +local + +fun cons_nr CTVar = 0 + | cons_nr (CType _) = 1; + +in + +fun const_typ_ord TU = + case TU of + (CType (a, Ts), CType (b, Us)) => + (case fast_string_ord(a,b) of EQUAL => dict_ord const_typ_ord (Ts,Us) | ord => ord) + | (T, U) => int_ord (cons_nr T, cons_nr U); + +end; + +structure CTtab = Table(type key = const_typ list val ord = dict_ord const_typ_ord); + +fun count_axiom_consts theory_const thy ((thm,_), tab) = + let fun count_const (a, T, tab) = + let val (c, cts) = const_with_typ thy (a,T) + in (*Two-dimensional table update. Constant maps to types maps to count.*) + Symtab.map_default (c, CTtab.empty) + (CTtab.map_default (cts,0) (fn n => n+1)) tab + end + fun count_term_consts (Const(a,T), tab) = count_const(a,T,tab) + | count_term_consts (Free(a,T), tab) = count_const(a,T,tab) + | count_term_consts (t $ u, tab) = + count_term_consts (t, count_term_consts (u, tab)) + | count_term_consts (Abs(_,_,t), tab) = count_term_consts (t, tab) + | count_term_consts (_, tab) = tab + in count_term_consts (const_prop_of theory_const thm, tab) end; + + +(**** Actual Filtering Code ****) + +(*The frequency of a constant is the sum of those of all instances of its type.*) +fun const_frequency ctab (c, cts) = + let val pairs = CTtab.dest (the (Symtab.lookup ctab c)) + fun add ((cts',m), n) = if match_types cts cts' then m+n else n + in List.foldl add 0 pairs end; + +(*Add in a constant's weight, as determined by its frequency.*) +fun add_ct_weight ctab ((c,T), w) = + w + weight_fn (real (const_frequency ctab (c,T))); + +(*Relevant constants are weighted according to frequency, + but irrelevant constants are simply counted. Otherwise, Skolem functions, + which are rare, would harm a clause's chances of being picked.*) +fun clause_weight ctab gctyps consts_typs = + let val rel = filter (uni_mem gctyps) consts_typs + val rel_weight = List.foldl (add_ct_weight ctab) 0.0 rel + in + rel_weight / (rel_weight + real (length consts_typs - length rel)) + end; + +(*Multiplies out to a list of pairs: 'a * 'b list -> ('a * 'b) list -> ('a * 'b) list*) +fun add_expand_pairs (x,ys) xys = List.foldl (fn (y,acc) => (x,y)::acc) xys ys; + +fun consts_typs_of_term thy t = + let val tab = add_term_consts_typs_rm thy (t, null_const_tab) + in Symtab.fold add_expand_pairs tab [] end; + +fun pair_consts_typs_axiom theory_const thy (thm,name) = + ((thm,name), (consts_typs_of_term thy (const_prop_of theory_const thm))); + +exception ConstFree; +fun dest_ConstFree (Const aT) = aT + | dest_ConstFree (Free aT) = aT + | dest_ConstFree _ = raise ConstFree; + +(*Look for definitions of the form f ?x1 ... ?xn = t, but not reversed.*) +fun defines thy thm gctypes = + let val tm = prop_of thm + fun defs lhs rhs = + let val (rator,args) = strip_comb lhs + val ct = const_with_typ thy (dest_ConstFree rator) + in + forall is_Var args andalso uni_mem gctypes ct andalso + subset (op =) (Term.add_vars rhs [], Term.add_vars lhs []) + end + handle ConstFree => false + in + case tm of Const ("Trueprop",_) $ (Const("op =",_) $ lhs $ rhs) => + defs lhs rhs + | _ => false + end; + +type annotd_cls = (thm * (string * int)) * ((string * const_typ list) list); + +(*For a reverse sort, putting the largest values first.*) +fun compare_pairs ((_,w1),(_,w2)) = Real.compare (w2,w1); + +(*Limit the number of new clauses, to prevent runaway acceptance.*) +fun take_best max_new (newpairs : (annotd_cls*real) list) = + let val nnew = length newpairs + in + if nnew <= max_new then (map #1 newpairs, []) + else + let val cls = sort compare_pairs newpairs + val accepted = List.take (cls, max_new) + in + SFP.trace_msg (fn () => ("Number of candidates, " ^ Int.toString nnew ^ + ", exceeds the limit of " ^ Int.toString (max_new))); + SFP.trace_msg (fn () => ("Effective pass mark: " ^ Real.toString (#2 (List.last accepted)))); + SFP.trace_msg (fn () => "Actually passed: " ^ + space_implode ", " (map (fn (((_,(name,_)),_),_) => name) accepted)); + + (map #1 accepted, map #1 (List.drop (cls, max_new))) + end + end; + +fun relevant_clauses max_new thy ctab p rel_consts = + let fun relevant ([],_) [] = [] : (thm * (string * int)) list (*Nothing added this iteration*) + | relevant (newpairs,rejects) [] = + let val (newrels,more_rejects) = take_best max_new newpairs + val new_consts = maps #2 newrels + val rel_consts' = List.foldl add_const_typ_table rel_consts new_consts + val newp = p + (1.0-p) / convergence + in + SFP.trace_msg (fn () => "relevant this iteration: " ^ Int.toString (length newrels)); + (map #1 newrels) @ + (relevant_clauses max_new thy ctab newp rel_consts' (more_rejects@rejects)) + end + | relevant (newrels,rejects) ((ax as (clsthm as (_,(name,n)),consts_typs)) :: axs) = + let val weight = clause_weight ctab rel_consts consts_typs + in + if p <= weight orelse (follow_defs andalso defines thy (#1 clsthm) rel_consts) + then (SFP.trace_msg (fn () => (name ^ " clause " ^ Int.toString n ^ + " passes: " ^ Real.toString weight)); + relevant ((ax,weight)::newrels, rejects) axs) + else relevant (newrels, ax::rejects) axs + end + in SFP.trace_msg (fn () => ("relevant_clauses, current pass mark = " ^ Real.toString p)); + relevant ([],[]) + end; + +fun relevance_filter max_new theory_const thy axioms goals = + if run_relevance_filter andalso pass_mark >= 0.1 + then + let val const_tab = List.foldl (count_axiom_consts theory_const thy) Symtab.empty axioms + val goal_const_tab = get_goal_consts_typs thy goals + val _ = SFP.trace_msg (fn () => ("Initial constants: " ^ + space_implode ", " (Symtab.keys goal_const_tab))); + val rels = relevant_clauses max_new thy const_tab (pass_mark) + goal_const_tab (map (pair_consts_typs_axiom theory_const thy) axioms) + in + SFP.trace_msg (fn () => ("Total relevant: " ^ Int.toString (length rels))); + rels + end + else axioms; + +(***************************************************************) +(* Retrieving and filtering lemmas *) +(***************************************************************) + +(*** retrieve lemmas and filter them ***) + +(*Hashing to detect duplicate and variant clauses, e.g. from the [iff] attribute*) + +fun setinsert (x,s) = Symtab.update (x,()) s; + +(*Reject theorems with names like "List.filter.filter_list_def" or + "Accessible_Part.acc.defs", as these are definitions arising from packages.*) +fun is_package_def a = + let val names = Long_Name.explode a + in + length names > 2 andalso + not (hd names = "local") andalso + String.isSuffix "_def" a orelse String.isSuffix "_defs" a + end; + +(** a hash function from Term.term to int, and also a hash table **) +val xor_words = List.foldl Word.xorb 0w0; + +fun hashw_term ((Const(c,_)), w) = Polyhash.hashw_string (c,w) + | hashw_term ((Free(a,_)), w) = Polyhash.hashw_string (a,w) + | hashw_term ((Var(_,_)), w) = w + | hashw_term ((Bound i), w) = Polyhash.hashw_int(i,w) + | hashw_term ((Abs(_,_,t)), w) = hashw_term (t, w) + | hashw_term ((P$Q), w) = hashw_term (Q, (hashw_term (P, w))); + +fun hash_literal (Const("Not",_)$P) = Word.notb(hashw_term(P,0w0)) + | hash_literal P = hashw_term(P,0w0); + +fun hash_term t = Word.toIntX (xor_words (map hash_literal (HOLogic.disjuncts (strip_Trueprop t)))); + +fun equal_thm (thm1,thm2) = Term.aconv(prop_of thm1, prop_of thm2); + +exception HASH_CLAUSE; + +(*Create a hash table for clauses, of the given size*) +fun mk_clause_table n = + Polyhash.mkTable (hash_term o prop_of, equal_thm) + (n, HASH_CLAUSE); + +(*Use a hash table to eliminate duplicates from xs. Argument is a list of + (thm * (string * int)) tuples. The theorems are hashed into the table. *) +fun make_unique xs = + let val ht = mk_clause_table 7000 + in + SFP.trace_msg (fn () => ("make_unique gets " ^ Int.toString (length xs) ^ " clauses")); + app (ignore o Polyhash.peekInsert ht) xs; + Polyhash.listItems ht + end; + +(*Remove existing axiom clauses from the conjecture clauses, as this can dramatically + boost an ATP's performance (for some reason)*) +fun subtract_cls c_clauses ax_clauses = + let val ht = mk_clause_table 2200 + fun known x = is_some (Polyhash.peek ht x) + in + app (ignore o Polyhash.peekInsert ht) ax_clauses; + filter (not o known) c_clauses + end; + +fun all_valid_thms ctxt = + let + val global_facts = PureThy.facts_of (ProofContext.theory_of ctxt); + val local_facts = ProofContext.facts_of ctxt; + val full_space = + Name_Space.merge (Facts.space_of global_facts, Facts.space_of local_facts); + + fun valid_facts facts = + (facts, []) |-> Facts.fold_static (fn (name, ths0) => + let + fun check_thms a = + (case try (ProofContext.get_thms ctxt) a of + NONE => false + | SOME ths1 => Thm.eq_thms (ths0, ths1)); + + val name1 = Facts.extern facts name; + val name2 = Name_Space.extern full_space name; + val ths = filter_out SFP.bad_for_atp ths0; + in + if Facts.is_concealed facts name orelse null ths orelse + run_blacklist_filter andalso is_package_def name then I + else + (case find_first check_thms [name1, name2, name] of + NONE => I + | SOME a => cons (a, ths)) + end); + in valid_facts global_facts @ valid_facts local_facts end; + +fun multi_name a th (n, pairs) = + (n + 1, (a ^ "(" ^ Int.toString n ^ ")", th) :: pairs); + +fun add_single_names (a, []) pairs = pairs + | add_single_names (a, [th]) pairs = (a, th) :: pairs + | add_single_names (a, ths) pairs = #2 (fold (multi_name a) ths (1, pairs)); + +(*Ignore blacklisted basenames*) +fun add_multi_names (a, ths) pairs = + if (Long_Name.base_name a) mem_string SFP.multi_base_blacklist then pairs + else add_single_names (a, ths) pairs; + +fun is_multi (a, ths) = length ths > 1 orelse String.isSuffix ".axioms" a; + +(*The single theorems go BEFORE the multiple ones. Blacklist is applied to all.*) +fun name_thm_pairs ctxt = + let + val (mults, singles) = List.partition is_multi (all_valid_thms ctxt) + val blacklist = + if run_blacklist_filter then No_ATPs.get ctxt |> map Thm.prop_of else [] + fun is_blacklisted (_, th) = member (op =) blacklist (Thm.prop_of th) + in + filter_out is_blacklisted + (fold add_single_names singles (fold add_multi_names mults [])) + end; + +fun check_named ("", th) = + (warning ("No name for theorem " ^ Display.string_of_thm_without_context th); false) + | check_named _ = true; + +fun get_all_lemmas ctxt = + let val included_thms = + tap (fn ths => SFP.trace_msg + (fn () => ("Including all " ^ Int.toString (length ths) ^ " theorems"))) + (name_thm_pairs ctxt) + in + filter check_named included_thms + end; + +(***************************************************************) +(* Type Classes Present in the Axiom or Conjecture Clauses *) +(***************************************************************) + +fun add_classes (sorts, cset) = List.foldl setinsert cset (flat sorts); + +(*Remove this trivial type class*) +fun delete_type cset = Symtab.delete_safe "HOL.type" cset; + +fun tvar_classes_of_terms ts = + let val sorts_list = map (map #2 o OldTerm.term_tvars) ts + in Symtab.keys (delete_type (List.foldl add_classes Symtab.empty sorts_list)) end; + +fun tfree_classes_of_terms ts = + let val sorts_list = map (map #2 o OldTerm.term_tfrees) ts + in Symtab.keys (delete_type (List.foldl add_classes Symtab.empty sorts_list)) end; + +(*fold type constructors*) +fun fold_type_consts f (Type (a, Ts)) x = fold (fold_type_consts f) Ts (f (a,x)) + | fold_type_consts _ _ x = x; + +val add_type_consts_in_type = fold_type_consts setinsert; + +(*Type constructors used to instantiate overloaded constants are the only ones needed.*) +fun add_type_consts_in_term thy = + let val const_typargs = Sign.const_typargs thy + fun add_tcs (Const cT) x = fold add_type_consts_in_type (const_typargs cT) x + | add_tcs (Abs (_, _, u)) x = add_tcs u x + | add_tcs (t $ u) x = add_tcs t (add_tcs u x) + | add_tcs _ x = x + in add_tcs end + +fun type_consts_of_terms thy ts = + Symtab.keys (fold (add_type_consts_in_term thy) ts Symtab.empty); + + +(***************************************************************) +(* ATP invocation methods setup *) +(***************************************************************) + +(*Ensures that no higher-order theorems "leak out"*) +fun restrict_to_logic thy true cls = filter (Meson.is_fol_term thy o prop_of o fst) cls + | restrict_to_logic thy false cls = cls; + +(**** Predicates to detect unwanted clauses (prolific or likely to cause unsoundness) ****) + +(** Too general means, positive equality literal with a variable X as one operand, + when X does not occur properly in the other operand. This rules out clearly + inconsistent clauses such as V=a|V=b, though it by no means guarantees soundness. **) + +fun occurs ix = + let fun occ(Var (jx,_)) = (ix=jx) + | occ(t1$t2) = occ t1 orelse occ t2 + | occ(Abs(_,_,t)) = occ t + | occ _ = false + in occ end; + +fun is_recordtype T = not (null (Record.dest_recTs T)); + +(*Unwanted equalities include + (1) those between a variable that does not properly occur in the second operand, + (2) those between a variable and a record, since these seem to be prolific "cases" thms +*) +fun too_general_eqterms (Var (ix,T), t) = not (occurs ix t) orelse is_recordtype T + | too_general_eqterms _ = false; + +fun too_general_equality (Const ("op =", _) $ x $ y) = + too_general_eqterms (x,y) orelse too_general_eqterms(y,x) + | too_general_equality _ = false; + +(* tautologous? *) +fun is_taut (Const ("Trueprop", _) $ Const ("True", _)) = true + | is_taut _ = false; + +fun has_typed_var tycons = exists_subterm + (fn Var (_, Type (a, _)) => member (op =) tycons a | _ => false); + +(*Clauses are forbidden to contain variables of these types. The typical reason is that + they lead to unsoundness. Note that "unit" satisfies numerous equations like ?X=(). + The resulting clause will have no type constraint, yielding false proofs. Even "bool" + leads to many unsound proofs, though (obviously) only for higher-order problems.*) +val unwanted_types = ["Product_Type.unit","bool"]; + +fun unwanted t = + is_taut t orelse has_typed_var unwanted_types t orelse + forall too_general_equality (HOLogic.disjuncts (strip_Trueprop t)); + +(*Clauses containing variables of type "unit" or "bool" are unlikely to be useful and + likely to lead to unsound proofs.*) +fun remove_unwanted_clauses cls = filter (not o unwanted o prop_of o fst) cls; + +fun isFO thy goal_cls = case linkup_logic_mode of + Auto => forall (Meson.is_fol_term thy) (map prop_of goal_cls) + | Fol => true + | Hol => false + +fun get_relevant max_new theory_const (ctxt, (chain_ths, th)) goal_cls = + let + val thy = ProofContext.theory_of ctxt + val isFO = isFO thy goal_cls + val included_cls = get_all_lemmas ctxt + |> SFP.cnf_rules_pairs thy |> make_unique + |> restrict_to_logic thy isFO + |> remove_unwanted_clauses + in + relevance_filter max_new theory_const thy included_cls (map prop_of goal_cls) + end; + +(* prepare for passing to writer, + create additional clauses based on the information from extra_cls *) +fun prepare_clauses dfg goal_cls chain_ths axcls extra_cls thy = + let + (* add chain thms *) + val chain_cls = + SFP.cnf_rules_pairs thy (filter check_named (map SFP.pairname chain_ths)) + val axcls = chain_cls @ axcls + val extra_cls = chain_cls @ extra_cls + val isFO = isFO thy goal_cls + val ccls = subtract_cls goal_cls extra_cls + val _ = app (fn th => SFP.trace_msg (fn _ => Display.string_of_thm_global thy th)) ccls + val ccltms = map prop_of ccls + and axtms = map (prop_of o #1) extra_cls + val subs = tfree_classes_of_terms ccltms + and supers = tvar_classes_of_terms axtms + and tycons = type_consts_of_terms thy (ccltms@axtms) + (*TFrees in conjecture clauses; TVars in axiom clauses*) + val conjectures = SHC.make_conjecture_clauses dfg thy ccls + val (_, extra_clauses) = ListPair.unzip (SHC.make_axiom_clauses dfg thy extra_cls) + val (clnames,axiom_clauses) = ListPair.unzip (SHC.make_axiom_clauses dfg thy axcls) + val helper_clauses = SHC.get_helper_clauses dfg thy isFO (conjectures, extra_cls, []) + val (supers',arity_clauses) = SFC.make_arity_clauses_dfg dfg thy tycons supers + val classrel_clauses = SFC.make_classrel_clauses thy subs supers' + in + (Vector.fromList clnames, + (conjectures, axiom_clauses, extra_clauses, helper_clauses, classrel_clauses, arity_clauses)) + end + +end; + diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,547 @@ +(* Title: HOL/Tools/Sledgehammer/sledgehammer_fact_preprocessor.ML + Author: Jia Meng, Cambridge University Computer Laboratory + +Transformation of axiom rules (elim/intro/etc) into CNF forms. +*) + +signature SLEDGEHAMMER_FACT_PREPROCESSOR = +sig + val trace: bool Unsynchronized.ref + val trace_msg: (unit -> string) -> unit + val cnf_axiom: theory -> thm -> thm list + val pairname: thm -> string * thm + val multi_base_blacklist: string list + val bad_for_atp: thm -> bool + val type_has_topsort: typ -> bool + val cnf_rules_pairs: theory -> (string * thm) list -> (thm * (string * int)) list + val neg_clausify: thm list -> thm list + val expand_defs_tac: thm -> tactic + val combinators: thm -> thm + val neg_conjecture_clauses: Proof.context -> thm -> int -> thm list * (string * typ) list + val suppress_endtheory: bool Unsynchronized.ref + (*for emergency use where endtheory causes problems*) + val setup: theory -> theory +end; + +structure Sledgehammer_Fact_Preprocessor : SLEDGEHAMMER_FACT_PREPROCESSOR = +struct + +val trace = Unsynchronized.ref false; +fun trace_msg msg = if ! trace then tracing (msg ()) else (); + +fun freeze_thm th = #1 (Drule.legacy_freeze_thaw th); + +val type_has_topsort = Term.exists_subtype + (fn TFree (_, []) => true + | TVar (_, []) => true + | _ => false); + + +(**** Transformation of Elimination Rules into First-Order Formulas****) + +val cfalse = cterm_of @{theory HOL} HOLogic.false_const; +val ctp_false = cterm_of @{theory HOL} (HOLogic.mk_Trueprop HOLogic.false_const); + +(*Converts an elim-rule into an equivalent theorem that does not have the + predicate variable. Leaves other theorems unchanged. We simply instantiate the + conclusion variable to False.*) +fun transform_elim th = + case concl_of th of (*conclusion variable*) + Const("Trueprop",_) $ (v as Var(_,Type("bool",[]))) => + Thm.instantiate ([], [(cterm_of @{theory HOL} v, cfalse)]) th + | v as Var(_, Type("prop",[])) => + Thm.instantiate ([], [(cterm_of @{theory HOL} v, ctp_false)]) th + | _ => th; + +(*To enforce single-threading*) +exception Clausify_failure of theory; + + +(**** SKOLEMIZATION BY INFERENCE (lcp) ****) + +fun rhs_extra_types lhsT rhs = + let val lhs_vars = Term.add_tfreesT lhsT [] + fun add_new_TFrees (TFree v) = + if member (op =) lhs_vars v then I else insert (op =) (TFree v) + | add_new_TFrees _ = I + val rhs_consts = fold_aterms (fn Const c => insert (op =) c | _ => I) rhs [] + in fold (#2 #> Term.fold_atyps add_new_TFrees) rhs_consts [] end; + +(*Traverse a theorem, declaring Skolem function definitions. String s is the suggested + prefix for the Skolem constant.*) +fun declare_skofuns s th = + let + val nref = Unsynchronized.ref 0 (* FIXME ??? *) + fun dec_sko (Const ("Ex",_) $ (xtp as Abs (_, T, p))) (axs, thy) = + (*Existential: declare a Skolem function, then insert into body and continue*) + let + val cname = "sko_" ^ s ^ "_" ^ Int.toString (Unsynchronized.inc nref) + val args0 = OldTerm.term_frees xtp (*get the formal parameter list*) + val Ts = map type_of args0 + val extraTs = rhs_extra_types (Ts ---> T) xtp + val argsx = map (fn T => Free (gensym "vsk", T)) extraTs + val args = argsx @ args0 + val cT = extraTs ---> Ts ---> T + val rhs = list_abs_free (map dest_Free args, HOLogic.choice_const T $ xtp) + (*Forms a lambda-abstraction over the formal parameters*) + val (c, thy') = + Sign.declare_const ((Binding.conceal (Binding.name cname), cT), NoSyn) thy + val cdef = cname ^ "_def" + val thy'' = + Theory.add_defs_i true false [(Binding.name cdef, Logic.mk_equals (c, rhs))] thy' + val ax = Thm.axiom thy'' (Sign.full_bname thy'' cdef) + in dec_sko (subst_bound (list_comb (c, args), p)) (ax :: axs, thy'') end + | dec_sko (Const ("All", _) $ (Abs (a, T, p))) thx = + (*Universal quant: insert a free variable into body and continue*) + let val fname = Name.variant (OldTerm.add_term_names (p, [])) a + in dec_sko (subst_bound (Free (fname, T), p)) thx end + | dec_sko (Const ("op &", _) $ p $ q) thx = dec_sko q (dec_sko p thx) + | dec_sko (Const ("op |", _) $ p $ q) thx = dec_sko q (dec_sko p thx) + | dec_sko (Const ("Trueprop", _) $ p) thx = dec_sko p thx + | dec_sko t thx = thx (*Do nothing otherwise*) + in fn thy => dec_sko (Thm.prop_of th) ([], thy) end; + +(*Traverse a theorem, accumulating Skolem function definitions.*) +fun assume_skofuns s th = + let val sko_count = Unsynchronized.ref 0 (* FIXME ??? *) + fun dec_sko (Const ("Ex",_) $ (xtp as Abs(_,T,p))) defs = + (*Existential: declare a Skolem function, then insert into body and continue*) + let val skos = map (#1 o Logic.dest_equals) defs (*existing sko fns*) + val args = subtract (op =) skos (OldTerm.term_frees xtp) (*the formal parameters*) + val Ts = map type_of args + val cT = Ts ---> T + val id = "sko_" ^ s ^ "_" ^ Int.toString (Unsynchronized.inc sko_count) + val c = Free (id, cT) + val rhs = list_abs_free (map dest_Free args, + HOLogic.choice_const T $ xtp) + (*Forms a lambda-abstraction over the formal parameters*) + val def = Logic.mk_equals (c, rhs) + in dec_sko (subst_bound (list_comb(c,args), p)) + (def :: defs) + end + | dec_sko (Const ("All",_) $ Abs (a, T, p)) defs = + (*Universal quant: insert a free variable into body and continue*) + let val fname = Name.variant (OldTerm.add_term_names (p,[])) a + in dec_sko (subst_bound (Free(fname,T), p)) defs end + | dec_sko (Const ("op &", _) $ p $ q) defs = dec_sko q (dec_sko p defs) + | dec_sko (Const ("op |", _) $ p $ q) defs = dec_sko q (dec_sko p defs) + | dec_sko (Const ("Trueprop", _) $ p) defs = dec_sko p defs + | dec_sko t defs = defs (*Do nothing otherwise*) + in dec_sko (prop_of th) [] end; + + +(**** REPLACING ABSTRACTIONS BY COMBINATORS ****) + +(*Returns the vars of a theorem*) +fun vars_of_thm th = + map (Thm.cterm_of (theory_of_thm th) o Var) (Thm.fold_terms Term.add_vars th []); + +(*Make a version of fun_cong with a given variable name*) +local + val fun_cong' = fun_cong RS asm_rl; (*renumber f, g to prevent clashes with (a,0)*) + val cx = hd (vars_of_thm fun_cong'); + val ty = typ_of (ctyp_of_term cx); + val thy = theory_of_thm fun_cong; + fun mkvar a = cterm_of thy (Var((a,0),ty)); +in +fun xfun_cong x = Thm.instantiate ([], [(cx, mkvar x)]) fun_cong' +end; + +(*Removes the lambdas from an equation of the form t = (%x. u). A non-negative n, + serves as an upper bound on how many to remove.*) +fun strip_lambdas 0 th = th + | strip_lambdas n th = + case prop_of th of + _ $ (Const ("op =", _) $ _ $ Abs (x,_,_)) => + strip_lambdas (n-1) (freeze_thm (th RS xfun_cong x)) + | _ => th; + +val lambda_free = not o Term.has_abs; + +val [f_B,g_B] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_B})); +val [g_C,f_C] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_C})); +val [f_S,g_S] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_S})); + +(*FIXME: requires more use of cterm constructors*) +fun abstract ct = + let + val thy = theory_of_cterm ct + val Abs(x,_,body) = term_of ct + val Type("fun",[xT,bodyT]) = typ_of (ctyp_of_term ct) + val cxT = ctyp_of thy xT and cbodyT = ctyp_of thy bodyT + fun makeK() = instantiate' [SOME cxT, SOME cbodyT] [SOME (cterm_of thy body)] @{thm abs_K} + in + case body of + Const _ => makeK() + | Free _ => makeK() + | Var _ => makeK() (*though Var isn't expected*) + | Bound 0 => instantiate' [SOME cxT] [] @{thm abs_I} (*identity: I*) + | rator$rand => + if loose_bvar1 (rator,0) then (*C or S*) + if loose_bvar1 (rand,0) then (*S*) + let val crator = cterm_of thy (Abs(x,xT,rator)) + val crand = cterm_of thy (Abs(x,xT,rand)) + val abs_S' = cterm_instantiate [(f_S,crator),(g_S,crand)] @{thm abs_S} + val (_,rhs) = Thm.dest_equals (cprop_of abs_S') + in + Thm.transitive abs_S' (Conv.binop_conv abstract rhs) + end + else (*C*) + let val crator = cterm_of thy (Abs(x,xT,rator)) + val abs_C' = cterm_instantiate [(f_C,crator),(g_C,cterm_of thy rand)] @{thm abs_C} + val (_,rhs) = Thm.dest_equals (cprop_of abs_C') + in + Thm.transitive abs_C' (Conv.fun_conv (Conv.arg_conv abstract) rhs) + end + else if loose_bvar1 (rand,0) then (*B or eta*) + if rand = Bound 0 then eta_conversion ct + else (*B*) + let val crand = cterm_of thy (Abs(x,xT,rand)) + val crator = cterm_of thy rator + val abs_B' = cterm_instantiate [(f_B,crator),(g_B,crand)] @{thm abs_B} + val (_,rhs) = Thm.dest_equals (cprop_of abs_B') + in + Thm.transitive abs_B' (Conv.arg_conv abstract rhs) + end + else makeK() + | _ => error "abstract: Bad term" + end; + +(*Traverse a theorem, declaring abstraction function definitions. String s is the suggested + prefix for the constants.*) +fun combinators_aux ct = + if lambda_free (term_of ct) then reflexive ct + else + case term_of ct of + Abs _ => + let val (cv, cta) = Thm.dest_abs NONE ct + val (v, _) = dest_Free (term_of cv) + val u_th = combinators_aux cta + val cu = Thm.rhs_of u_th + val comb_eq = abstract (Thm.cabs cv cu) + in transitive (abstract_rule v cv u_th) comb_eq end + | _ $ _ => + let val (ct1, ct2) = Thm.dest_comb ct + in combination (combinators_aux ct1) (combinators_aux ct2) end; + +fun combinators th = + if lambda_free (prop_of th) then th + else + let val th = Drule.eta_contraction_rule th + val eqth = combinators_aux (cprop_of th) + in equal_elim eqth th end + handle THM (msg,_,_) => + (warning (cat_lines + ["Error in the combinator translation of " ^ Display.string_of_thm_without_context th, + " Exception message: " ^ msg]); + TrueI); (*A type variable of sort {} will cause make abstraction fail.*) + +(*cterms are used throughout for efficiency*) +val cTrueprop = Thm.cterm_of @{theory HOL} HOLogic.Trueprop; + +(*cterm version of mk_cTrueprop*) +fun c_mkTrueprop A = Thm.capply cTrueprop A; + +(*Given an abstraction over n variables, replace the bound variables by free + ones. Return the body, along with the list of free variables.*) +fun c_variant_abs_multi (ct0, vars) = + let val (cv,ct) = Thm.dest_abs NONE ct0 + in c_variant_abs_multi (ct, cv::vars) end + handle CTERM _ => (ct0, rev vars); + +(*Given the definition of a Skolem function, return a theorem to replace + an existential formula by a use of that function. + Example: "EX x. x : A & x ~: B ==> sko A B : A & sko A B ~: B" [.] *) +fun skolem_of_def def = + let val (c,rhs) = Thm.dest_equals (cprop_of (freeze_thm def)) + val (ch, frees) = c_variant_abs_multi (rhs, []) + val (chilbert,cabs) = Thm.dest_comb ch + val thy = Thm.theory_of_cterm chilbert + val t = Thm.term_of chilbert + val T = case t of Const ("Hilbert_Choice.Eps", Type("fun",[_,T])) => T + | _ => raise THM ("skolem_of_def: expected Eps", 0, [def]) + val cex = Thm.cterm_of thy (HOLogic.exists_const T) + val ex_tm = c_mkTrueprop (Thm.capply cex cabs) + and conc = c_mkTrueprop (Drule.beta_conv cabs (Drule.list_comb(c,frees))); + fun tacf [prem] = rewrite_goals_tac [def] THEN rtac (prem RS @{thm someI_ex}) 1 + in Goal.prove_internal [ex_tm] conc tacf + |> forall_intr_list frees + |> Thm.forall_elim_vars 0 (*Introduce Vars, but don't discharge defs.*) + |> Thm.varifyT + end; + + +(*Converts an Isabelle theorem (intro, elim or simp format, even higher-order) into NNF.*) +fun to_nnf th ctxt0 = + let val th1 = th |> transform_elim |> zero_var_indexes + val ((_, [th2]), ctxt) = Variable.import true [th1] ctxt0 + val th3 = th2 + |> Conv.fconv_rule Object_Logic.atomize + |> Meson.make_nnf ctxt |> strip_lambdas ~1 + in (th3, ctxt) end; + +(*Generate Skolem functions for a theorem supplied in nnf*) +fun assume_skolem_of_def s th = + map (skolem_of_def o assume o (cterm_of (theory_of_thm th))) (assume_skofuns s th); + + +(*** Blacklisting (duplicated in "Sledgehammer_Fact_Filter"?) ***) + +val max_lambda_nesting = 3; + +fun excessive_lambdas (f$t, k) = excessive_lambdas (f,k) orelse excessive_lambdas (t,k) + | excessive_lambdas (Abs(_,_,t), k) = k=0 orelse excessive_lambdas (t,k-1) + | excessive_lambdas _ = false; + +fun is_formula_type T = (T = HOLogic.boolT orelse T = propT); + +(*Don't count nested lambdas at the level of formulas, as they are quantifiers*) +fun excessive_lambdas_fm Ts (Abs(_,T,t)) = excessive_lambdas_fm (T::Ts) t + | excessive_lambdas_fm Ts t = + if is_formula_type (fastype_of1 (Ts, t)) + then exists (excessive_lambdas_fm Ts) (#2 (strip_comb t)) + else excessive_lambdas (t, max_lambda_nesting); + +(*The max apply_depth of any metis call in Metis_Examples (on 31-10-2007) was 11.*) +val max_apply_depth = 15; + +fun apply_depth (f$t) = Int.max (apply_depth f, apply_depth t + 1) + | apply_depth (Abs(_,_,t)) = apply_depth t + | apply_depth _ = 0; + +fun too_complex t = + apply_depth t > max_apply_depth orelse + Meson.too_many_clauses NONE t orelse + excessive_lambdas_fm [] t; + +fun is_strange_thm th = + case head_of (concl_of th) of + Const (a, _) => (a <> "Trueprop" andalso a <> "==") + | _ => false; + +fun bad_for_atp th = + too_complex (prop_of th) + orelse exists_type type_has_topsort (prop_of th) + orelse is_strange_thm th; + +val multi_base_blacklist = + ["defs","select_defs","update_defs","induct","inducts","split","splits","split_asm", + "cases","ext_cases"]; (* FIXME put other record thms here, or declare as "no_atp" *) + +(*Keep the full complexity of the original name*) +fun flatten_name s = space_implode "_X" (Long_Name.explode s); + +fun fake_name th = + if Thm.has_name_hint th then flatten_name (Thm.get_name_hint th) + else gensym "unknown_thm_"; + +(*Skolemize a named theorem, with Skolem functions as additional premises.*) +fun skolem_thm (s, th) = + if member (op =) multi_base_blacklist (Long_Name.base_name s) orelse bad_for_atp th then [] + else + let + val ctxt0 = Variable.thm_context th + val (nnfth, ctxt1) = to_nnf th ctxt0 + val (cnfs, ctxt2) = Meson.make_cnf (assume_skolem_of_def s nnfth) nnfth ctxt1 + in cnfs |> map combinators |> Variable.export ctxt2 ctxt0 |> Meson.finish_cnf end + handle THM _ => []; + +(*The cache prevents repeated clausification of a theorem, and also repeated declaration of + Skolem functions.*) +structure ThmCache = Theory_Data +( + type T = thm list Thmtab.table * unit Symtab.table; + val empty = (Thmtab.empty, Symtab.empty); + val extend = I; + fun merge ((cache1, seen1), (cache2, seen2)) : T = + (Thmtab.merge (K true) (cache1, cache2), Symtab.merge (K true) (seen1, seen2)); +); + +val lookup_cache = Thmtab.lookup o #1 o ThmCache.get; +val already_seen = Symtab.defined o #2 o ThmCache.get; + +val update_cache = ThmCache.map o apfst o Thmtab.update; +fun mark_seen name = ThmCache.map (apsnd (Symtab.update (name, ()))); + +(*Exported function to convert Isabelle theorems into axiom clauses*) +fun cnf_axiom thy th0 = + let val th = Thm.transfer thy th0 in + case lookup_cache thy th of + NONE => map Thm.close_derivation (skolem_thm (fake_name th, th)) + | SOME cls => cls + end; + + +(**** Rules from the context ****) + +fun pairname th = (Thm.get_name_hint th, th); + + +(**** Translate a set of theorems into CNF ****) + +fun pair_name_cls k (n, []) = [] + | pair_name_cls k (n, cls::clss) = (cls, (n,k)) :: pair_name_cls (k+1) (n, clss) + +fun cnf_rules_pairs_aux _ pairs [] = pairs + | cnf_rules_pairs_aux thy pairs ((name,th)::ths) = + let val pairs' = (pair_name_cls 0 (name, cnf_axiom thy th)) @ pairs + handle THM _ => pairs | + Sledgehammer_FOL_Clause.CLAUSE _ => pairs + in cnf_rules_pairs_aux thy pairs' ths end; + +(*The combination of rev and tail recursion preserves the original order*) +fun cnf_rules_pairs thy l = cnf_rules_pairs_aux thy [] (rev l); + + +(**** Convert all facts of the theory into clauses + (Sledgehammer_FOL_Clause.clause, or Sledgehammer_HOL_Clause.clause) ****) + +local + +fun skolem_def (name, th) thy = + let val ctxt0 = Variable.thm_context th in + (case try (to_nnf th) ctxt0 of + NONE => (NONE, thy) + | SOME (nnfth, ctxt1) => + let val (defs, thy') = declare_skofuns (flatten_name name) nnfth thy + in (SOME (th, ctxt0, ctxt1, nnfth, defs), thy') end) + end; + +fun skolem_cnfs (th, ctxt0, ctxt1, nnfth, defs) = + let + val (cnfs, ctxt2) = Meson.make_cnf (map skolem_of_def defs) nnfth ctxt1; + val cnfs' = cnfs + |> map combinators + |> Variable.export ctxt2 ctxt0 + |> Meson.finish_cnf + |> map Thm.close_derivation; + in (th, cnfs') end; + +in + +fun saturate_skolem_cache thy = + let + val facts = PureThy.facts_of thy; + val new_facts = (facts, []) |-> Facts.fold_static (fn (name, ths) => + if Facts.is_concealed facts name orelse already_seen thy name then I + else cons (name, ths)); + val new_thms = (new_facts, []) |-> fold (fn (name, ths) => + if member (op =) multi_base_blacklist (Long_Name.base_name name) then I + else fold_index (fn (i, th) => + if bad_for_atp th orelse is_some (lookup_cache thy th) then I + else cons (name ^ "_" ^ string_of_int (i + 1), Thm.transfer thy th)) ths); + in + if null new_facts then NONE + else + let + val (defs, thy') = thy + |> fold (mark_seen o #1) new_facts + |> fold_map skolem_def (sort_distinct (Thm.thm_ord o pairself snd) new_thms) + |>> map_filter I; + val cache_entries = Par_List.map skolem_cnfs defs; + in SOME (fold update_cache cache_entries thy') end + end; + +end; + +val suppress_endtheory = Unsynchronized.ref false; + +fun clause_cache_endtheory thy = + if ! suppress_endtheory then NONE + else saturate_skolem_cache thy; + + +(*The cache can be kept smaller by inspecting the prop of each thm. Can ignore all that are + lambda_free, but then the individual theory caches become much bigger.*) + + +(*** meson proof methods ***) + +(*Expand all new definitions of abstraction or Skolem functions in a proof state.*) +fun is_absko (Const ("==", _) $ Free (a,_) $ u) = String.isPrefix "sko_" a + | is_absko _ = false; + +fun is_okdef xs (Const ("==", _) $ t $ u) = (*Definition of Free, not in certain terms*) + is_Free t andalso not (member (op aconv) xs t) + | is_okdef _ _ = false + +(*This function tries to cope with open locales, which introduce hypotheses of the form + Free == t, conjecture clauses, which introduce various hypotheses, and also definitions + of sko_ functions. *) +fun expand_defs_tac st0 st = + let val hyps0 = #hyps (rep_thm st0) + val hyps = #hyps (crep_thm st) + val newhyps = filter_out (member (op aconv) hyps0 o Thm.term_of) hyps + val defs = filter (is_absko o Thm.term_of) newhyps + val remaining_hyps = filter_out (member (op aconv) (map Thm.term_of defs)) + (map Thm.term_of hyps) + val fixed = OldTerm.term_frees (concl_of st) @ + fold (union (op aconv)) (map OldTerm.term_frees remaining_hyps) [] + in Seq.of_list [Local_Defs.expand (filter (is_okdef fixed o Thm.term_of) defs) st] end; + + +fun meson_general_tac ctxt ths i st0 = + let + val thy = ProofContext.theory_of ctxt + val ctxt0 = Classical.put_claset HOL_cs ctxt + in (Meson.meson_tac ctxt0 (maps (cnf_axiom thy) ths) i THEN expand_defs_tac st0) st0 end; + +val meson_method_setup = + Method.setup @{binding meson} (Attrib.thms >> (fn ths => fn ctxt => + SIMPLE_METHOD' (CHANGED_PROP o meson_general_tac ctxt ths))) + "MESON resolution proof procedure"; + + +(*** Converting a subgoal into negated conjecture clauses. ***) + +fun neg_skolemize_tac ctxt = + EVERY' [rtac ccontr, Object_Logic.atomize_prems_tac, Meson.skolemize_tac ctxt]; + +val neg_clausify = Meson.make_clauses #> map combinators #> Meson.finish_cnf; + +fun neg_conjecture_clauses ctxt st0 n = + let + val st = Seq.hd (neg_skolemize_tac ctxt n st0) + val ({params, prems, ...}, _) = Subgoal.focus (Variable.set_body false ctxt) n st + in (neg_clausify prems, map (Term.dest_Free o Thm.term_of o #2) params) end; + +(*Conversion of a subgoal to conjecture clauses. Each clause has + leading !!-bound universal variables, to express generality. *) +fun neg_clausify_tac ctxt = + neg_skolemize_tac ctxt THEN' + SUBGOAL (fn (prop, i) => + let val ts = Logic.strip_assums_hyp prop in + EVERY' + [Subgoal.FOCUS + (fn {prems, ...} => + (Method.insert_tac + (map forall_intr_vars (neg_clausify prems)) i)) ctxt, + REPEAT_DETERM_N (length ts) o etac thin_rl] i + end); + +val neg_clausify_setup = + Method.setup @{binding neg_clausify} (Scan.succeed (SIMPLE_METHOD' o neg_clausify_tac)) + "conversion of goal to conjecture clauses"; + + +(** Attribute for converting a theorem into clauses **) + +val clausify_setup = + Attrib.setup @{binding clausify} + (Scan.lift OuterParse.nat >> + (fn i => Thm.rule_attribute (fn context => fn th => + Meson.make_meta_clause (nth (cnf_axiom (Context.theory_of context) th) i)))) + "conversion of theorem to clauses"; + + + +(** setup **) + +val setup = + meson_method_setup #> + neg_clausify_setup #> + clausify_setup #> + perhaps saturate_skolem_cache #> + Theory.at_end clause_cache_endtheory; + +end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/Sledgehammer/sledgehammer_fol_clause.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fol_clause.ML Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,534 @@ +(* Title: HOL/Tools/Sledgehammer/sledgehammer_fol_clause.ML + Author: Jia Meng, Cambridge University Computer Laboratory + +Storing/printing FOL clauses and arity clauses. Typed equality is +treated differently. + +FIXME: combine with res_hol_clause! +*) + +signature SLEDGEHAMMER_FOL_CLAUSE = +sig + val schematic_var_prefix: string + val fixed_var_prefix: string + val tvar_prefix: string + val tfree_prefix: string + val clause_prefix: string + val const_prefix: string + val tconst_prefix: string + val class_prefix: string + val union_all: ''a list list -> ''a list + val const_trans_table: string Symtab.table + val type_const_trans_table: string Symtab.table + val ascii_of: string -> string + val undo_ascii_of: string -> string + val paren_pack : string list -> string + val make_schematic_var : string * int -> string + val make_fixed_var : string -> string + val make_schematic_type_var : string * int -> string + val make_fixed_type_var : string -> string + val make_fixed_const : bool -> string -> string + val make_fixed_type_const : bool -> string -> string + val make_type_class : string -> string + datatype kind = Axiom | Conjecture + type axiom_name = string + datatype fol_type = + AtomV of string + | AtomF of string + | Comp of string * fol_type list + val string_of_fol_type : fol_type -> string + datatype type_literal = LTVar of string * string | LTFree of string * string + exception CLAUSE of string * term + val add_typs : typ list -> type_literal list + val get_tvar_strs: typ list -> string list + datatype arLit = + TConsLit of class * string * string list + | TVarLit of class * string + datatype arityClause = ArityClause of + {axiom_name: axiom_name, conclLit: arLit, premLits: arLit list} + datatype classrelClause = ClassrelClause of + {axiom_name: axiom_name, subclass: class, superclass: class} + val make_classrel_clauses: theory -> class list -> class list -> classrelClause list + val make_arity_clauses_dfg: bool -> theory -> string list -> class list -> class list * arityClause list + val make_arity_clauses: theory -> string list -> class list -> class list * arityClause list + val add_type_sort_preds: typ * int Symtab.table -> int Symtab.table + val add_classrelClause_preds : classrelClause * int Symtab.table -> int Symtab.table + val class_of_arityLit: arLit -> class + val add_arityClause_preds: arityClause * int Symtab.table -> int Symtab.table + val add_foltype_funcs: fol_type * int Symtab.table -> int Symtab.table + val add_arityClause_funcs: arityClause * int Symtab.table -> int Symtab.table + val init_functab: int Symtab.table + val dfg_sign: bool -> string -> string + val dfg_of_typeLit: bool -> type_literal -> string + val gen_dfg_cls: int * string * kind * string list * string list * string list -> string + val string_of_preds: (string * Int.int) list -> string + val string_of_funcs: (string * int) list -> string + val string_of_symbols: string -> string -> string + val string_of_start: string -> string + val string_of_descrip : string -> string + val dfg_tfree_clause : string -> string + val dfg_classrelClause: classrelClause -> string + val dfg_arity_clause: arityClause -> string + val tptp_sign: bool -> string -> string + val tptp_of_typeLit : bool -> type_literal -> string + val gen_tptp_cls : int * string * kind * string list * string list -> string + val tptp_tfree_clause : string -> string + val tptp_arity_clause : arityClause -> string + val tptp_classrelClause : classrelClause -> string +end + +structure Sledgehammer_FOL_Clause : SLEDGEHAMMER_FOL_CLAUSE = +struct + +val schematic_var_prefix = "V_"; +val fixed_var_prefix = "v_"; + +val tvar_prefix = "T_"; +val tfree_prefix = "t_"; + +val clause_prefix = "cls_"; +val arclause_prefix = "clsarity_" +val clrelclause_prefix = "clsrel_"; + +val const_prefix = "c_"; +val tconst_prefix = "tc_"; +val class_prefix = "class_"; + +fun union_all xss = List.foldl (uncurry (union (op =))) [] xss; + +(*Provide readable names for the more common symbolic functions*) +val const_trans_table = + Symtab.make [(@{const_name "op ="}, "equal"), + (@{const_name Orderings.less_eq}, "lessequals"), + (@{const_name "op &"}, "and"), + (@{const_name "op |"}, "or"), + (@{const_name "op -->"}, "implies"), + (@{const_name "op :"}, "in"), + ("Sledgehammer.fequal", "fequal"), + ("Sledgehammer.COMBI", "COMBI"), + ("Sledgehammer.COMBK", "COMBK"), + ("Sledgehammer.COMBB", "COMBB"), + ("Sledgehammer.COMBC", "COMBC"), + ("Sledgehammer.COMBS", "COMBS"), + ("Sledgehammer.COMBB'", "COMBB_e"), + ("Sledgehammer.COMBC'", "COMBC_e"), + ("Sledgehammer.COMBS'", "COMBS_e")]; + +val type_const_trans_table = + Symtab.make [("*", "prod"), + ("+", "sum"), + ("~=>", "map")]; + +(*Escaping of special characters. + Alphanumeric characters are left unchanged. + The character _ goes to __ + Characters in the range ASCII space to / go to _A to _P, respectively. + Other printing characters go to _nnn where nnn is the decimal ASCII code.*) +val A_minus_space = Char.ord #"A" - Char.ord #" "; + +fun stringN_of_int 0 _ = "" + | stringN_of_int k n = stringN_of_int (k-1) (n div 10) ^ Int.toString (n mod 10); + +fun ascii_of_c c = + if Char.isAlphaNum c then String.str c + else if c = #"_" then "__" + else if #" " <= c andalso c <= #"/" + then "_" ^ String.str (Char.chr (Char.ord c + A_minus_space)) + else if Char.isPrint c + then ("_" ^ stringN_of_int 3 (Char.ord c)) (*fixed width, in case more digits follow*) + else "" + +val ascii_of = String.translate ascii_of_c; + +(** Remove ASCII armouring from names in proof files **) + +(*We don't raise error exceptions because this code can run inside the watcher. + Also, the errors are "impossible" (hah!)*) +fun undo_ascii_aux rcs [] = String.implode(rev rcs) + | undo_ascii_aux rcs [#"_"] = undo_ascii_aux (#"_"::rcs) [] (*ERROR*) + (*Three types of _ escapes: __, _A to _P, _nnn*) + | undo_ascii_aux rcs (#"_" :: #"_" :: cs) = undo_ascii_aux (#"_"::rcs) cs + | undo_ascii_aux rcs (#"_" :: c :: cs) = + if #"A" <= c andalso c<= #"P" (*translation of #" " to #"/"*) + then undo_ascii_aux (Char.chr(Char.ord c - A_minus_space) :: rcs) cs + else + let val digits = List.take (c::cs, 3) handle Subscript => [] + in + case Int.fromString (String.implode digits) of + NONE => undo_ascii_aux (c:: #"_"::rcs) cs (*ERROR*) + | SOME n => undo_ascii_aux (Char.chr n :: rcs) (List.drop (cs, 2)) + end + | undo_ascii_aux rcs (c::cs) = undo_ascii_aux (c::rcs) cs; + +val undo_ascii_of = undo_ascii_aux [] o String.explode; + +(* convert a list of strings into one single string; surrounded by brackets *) +fun paren_pack [] = "" (*empty argument list*) + | paren_pack strings = "(" ^ commas strings ^ ")"; + +(*TSTP format uses (...) rather than the old [...]*) +fun tptp_pack strings = "(" ^ space_implode " | " strings ^ ")"; + + +(*Remove the initial ' character from a type variable, if it is present*) +fun trim_type_var s = + if s <> "" andalso String.sub(s,0) = #"'" then String.extract(s,1,NONE) + else error ("trim_type: Malformed type variable encountered: " ^ s); + +fun ascii_of_indexname (v,0) = ascii_of v + | ascii_of_indexname (v,i) = ascii_of v ^ "_" ^ Int.toString i; + +fun make_schematic_var v = schematic_var_prefix ^ (ascii_of_indexname v); +fun make_fixed_var x = fixed_var_prefix ^ (ascii_of x); + +fun make_schematic_type_var (x,i) = + tvar_prefix ^ (ascii_of_indexname (trim_type_var x,i)); +fun make_fixed_type_var x = tfree_prefix ^ (ascii_of (trim_type_var x)); + +(*HACK because SPASS truncates identifiers to 63 characters :-(( *) +(*32-bit hash,so we expect no collisions unless there are around 65536 long identifiers...*) +fun controlled_length dfg_format s = + if size s > 60 andalso dfg_format + then Word.toString (Polyhash.hashw_string(s,0w0)) + else s; + +fun lookup_const dfg c = + case Symtab.lookup const_trans_table c of + SOME c' => c' + | NONE => controlled_length dfg (ascii_of c); + +fun lookup_type_const dfg c = + case Symtab.lookup type_const_trans_table c of + SOME c' => c' + | NONE => controlled_length dfg (ascii_of c); + +fun make_fixed_const _ "op =" = "equal" (*MUST BE "equal" because it's built-in to ATPs*) + | make_fixed_const dfg c = const_prefix ^ lookup_const dfg c; + +fun make_fixed_type_const dfg c = tconst_prefix ^ lookup_type_const dfg c; + +fun make_type_class clas = class_prefix ^ ascii_of clas; + + +(***** definitions and functions for FOL clauses, for conversion to TPTP or DFG format. *****) + +datatype kind = Axiom | Conjecture; + +type axiom_name = string; + +(**** Isabelle FOL clauses ****) + +(*FIXME: give the constructors more sensible names*) +datatype fol_type = AtomV of string + | AtomF of string + | Comp of string * fol_type list; + +fun string_of_fol_type (AtomV x) = x + | string_of_fol_type (AtomF x) = x + | string_of_fol_type (Comp(tcon,tps)) = + tcon ^ (paren_pack (map string_of_fol_type tps)); + +(*First string is the type class; the second is a TVar or TFfree*) +datatype type_literal = LTVar of string * string | LTFree of string * string; + +exception CLAUSE of string * term; + +fun atomic_type (TFree (a,_)) = AtomF(make_fixed_type_var a) + | atomic_type (TVar (v,_)) = AtomV(make_schematic_type_var v); + +(*Flatten a type to a fol_type while accumulating sort constraints on the TFrees and + TVars it contains.*) +fun type_of dfg (Type (a, Ts)) = + let val (folTyps, ts) = types_of dfg Ts + val t = make_fixed_type_const dfg a + in (Comp(t,folTyps), ts) end + | type_of dfg T = (atomic_type T, [T]) +and types_of dfg Ts = + let val (folTyps,ts) = ListPair.unzip (map (type_of dfg) Ts) + in (folTyps, union_all ts) end; + +(*Make literals for sorted type variables*) +fun sorts_on_typs_aux (_, []) = [] + | sorts_on_typs_aux ((x,i), s::ss) = + let val sorts = sorts_on_typs_aux ((x,i), ss) + in + if s = "HOL.type" then sorts + else if i = ~1 then LTFree(make_type_class s, make_fixed_type_var x) :: sorts + else LTVar(make_type_class s, make_schematic_type_var (x,i)) :: sorts + end; + +fun sorts_on_typs (TFree (a,s)) = sorts_on_typs_aux ((a,~1),s) + | sorts_on_typs (TVar (v,s)) = sorts_on_typs_aux (v,s); + +fun pred_of_sort (LTVar (s,ty)) = (s,1) + | pred_of_sort (LTFree (s,ty)) = (s,1) + +(*Given a list of sorted type variables, return a list of type literals.*) +fun add_typs Ts = List.foldl (uncurry (union (op =))) [] (map sorts_on_typs Ts); + +(*The correct treatment of TFrees like 'a in lemmas (axiom clauses) is not clear. + * Ignoring them leads to unsound proofs, since we do nothing to ensure that 'a + in a lemma has the same sort as 'a in the conjecture. + * Deleting such clauses will lead to problems with locales in other use of local results + where 'a is fixed. Probably we should delete clauses unless the sorts agree. + * Currently we include a class constraint in the clause, exactly as with TVars. +*) + +(** make axiom and conjecture clauses. **) + +fun get_tvar_strs [] = [] + | get_tvar_strs ((TVar (indx,s))::Ts) = + insert (op =) (make_schematic_type_var indx) (get_tvar_strs Ts) + | get_tvar_strs((TFree _)::Ts) = get_tvar_strs Ts + + + +(**** Isabelle arities ****) + +exception ARCLAUSE of string; + +datatype arLit = TConsLit of class * string * string list + | TVarLit of class * string; + +datatype arityClause = + ArityClause of {axiom_name: axiom_name, + conclLit: arLit, + premLits: arLit list}; + + +fun gen_TVars 0 = [] + | gen_TVars n = ("T_" ^ Int.toString n) :: gen_TVars (n-1); + +fun pack_sort(_,[]) = [] + | pack_sort(tvar, "HOL.type"::srt) = pack_sort(tvar, srt) (*IGNORE sort "type"*) + | pack_sort(tvar, cls::srt) = (cls, tvar) :: pack_sort(tvar, srt); + +(*Arity of type constructor tcon :: (arg1,...,argN)res*) +fun make_axiom_arity_clause dfg (tcons, axiom_name, (cls,args)) = + let val tvars = gen_TVars (length args) + val tvars_srts = ListPair.zip (tvars,args) + in + ArityClause {axiom_name = axiom_name, + conclLit = TConsLit (cls, make_fixed_type_const dfg tcons, tvars), + premLits = map TVarLit (union_all(map pack_sort tvars_srts))} + end; + + +(**** Isabelle class relations ****) + +datatype classrelClause = + ClassrelClause of {axiom_name: axiom_name, + subclass: class, + superclass: class}; + +(*Generate all pairs (sub,super) such that sub is a proper subclass of super in theory thy.*) +fun class_pairs thy [] supers = [] + | class_pairs thy subs supers = + let val class_less = Sorts.class_less(Sign.classes_of thy) + fun add_super sub (super,pairs) = + if class_less (sub,super) then (sub,super)::pairs else pairs + fun add_supers (sub,pairs) = List.foldl (add_super sub) pairs supers + in List.foldl add_supers [] subs end; + +fun make_classrelClause (sub,super) = + ClassrelClause {axiom_name = clrelclause_prefix ^ ascii_of sub ^ "_" ^ ascii_of super, + subclass = make_type_class sub, + superclass = make_type_class super}; + +fun make_classrel_clauses thy subs supers = + map make_classrelClause (class_pairs thy subs supers); + + +(** Isabelle arities **) + +fun arity_clause dfg _ _ (tcons, []) = [] + | arity_clause dfg seen n (tcons, ("HOL.type",_)::ars) = (*ignore*) + arity_clause dfg seen n (tcons,ars) + | arity_clause dfg seen n (tcons, (ar as (class,_)) :: ars) = + if class mem_string seen then (*multiple arities for the same tycon, class pair*) + make_axiom_arity_clause dfg (tcons, lookup_type_const dfg tcons ^ "_" ^ class ^ "_" ^ Int.toString n, ar) :: + arity_clause dfg seen (n+1) (tcons,ars) + else + make_axiom_arity_clause dfg (tcons, lookup_type_const dfg tcons ^ "_" ^ class, ar) :: + arity_clause dfg (class::seen) n (tcons,ars) + +fun multi_arity_clause dfg [] = [] + | multi_arity_clause dfg ((tcons,ars) :: tc_arlists) = + arity_clause dfg [] 1 (tcons, ars) @ multi_arity_clause dfg tc_arlists + +(*Generate all pairs (tycon,class,sorts) such that tycon belongs to class in theory thy + provided its arguments have the corresponding sorts.*) +fun type_class_pairs thy tycons classes = + let val alg = Sign.classes_of thy + fun domain_sorts (tycon,class) = Sorts.mg_domain alg tycon [class] + fun add_class tycon (class,pairs) = + (class, domain_sorts(tycon,class))::pairs + handle Sorts.CLASS_ERROR _ => pairs + fun try_classes tycon = (tycon, List.foldl (add_class tycon) [] classes) + in map try_classes tycons end; + +(*Proving one (tycon, class) membership may require proving others, so iterate.*) +fun iter_type_class_pairs thy tycons [] = ([], []) + | iter_type_class_pairs thy tycons classes = + let val cpairs = type_class_pairs thy tycons classes + val newclasses = union_all (union_all (union_all (map (map #2 o #2) cpairs))) + |> subtract (op =) classes |> subtract (op =) HOLogic.typeS + val (classes', cpairs') = iter_type_class_pairs thy tycons newclasses + in (union (op =) classes' classes, union (op =) cpairs' cpairs) end; + +fun make_arity_clauses_dfg dfg thy tycons classes = + let val (classes', cpairs) = iter_type_class_pairs thy tycons classes + in (classes', multi_arity_clause dfg cpairs) end; +val make_arity_clauses = make_arity_clauses_dfg false; + +(**** Find occurrences of predicates in clauses ****) + +(*FIXME: multiple-arity checking doesn't work, as update_new is the wrong + function (it flags repeated declarations of a function, even with the same arity)*) + +fun update_many (tab, keypairs) = List.foldl (uncurry Symtab.update) tab keypairs; + +fun add_type_sort_preds (T, preds) = + update_many (preds, map pred_of_sort (sorts_on_typs T)); + +fun add_classrelClause_preds (ClassrelClause {subclass,superclass,...}, preds) = + Symtab.update (subclass,1) (Symtab.update (superclass,1) preds); + +fun class_of_arityLit (TConsLit (tclass, _, _)) = tclass + | class_of_arityLit (TVarLit (tclass, _)) = tclass; + +fun add_arityClause_preds (ArityClause {conclLit,premLits,...}, preds) = + let val classes = map (make_type_class o class_of_arityLit) (conclLit::premLits) + fun upd (class,preds) = Symtab.update (class,1) preds + in List.foldl upd preds classes end; + +(*** Find occurrences of functions in clauses ***) + +fun add_foltype_funcs (AtomV _, funcs) = funcs + | add_foltype_funcs (AtomF a, funcs) = Symtab.update (a,0) funcs + | add_foltype_funcs (Comp(a,tys), funcs) = + List.foldl add_foltype_funcs (Symtab.update (a, length tys) funcs) tys; + +(*TFrees are recorded as constants*) +fun add_type_sort_funcs (TVar _, funcs) = funcs + | add_type_sort_funcs (TFree (a, _), funcs) = + Symtab.update (make_fixed_type_var a, 0) funcs + +fun add_arityClause_funcs (ArityClause {conclLit,...}, funcs) = + let val TConsLit (_, tcons, tvars) = conclLit + in Symtab.update (tcons, length tvars) funcs end; + +(*This type can be overlooked because it is built-in...*) +val init_functab = Symtab.update ("tc_itself", 1) Symtab.empty; + + +(**** String-oriented operations ****) + +fun string_of_clausename (cls_id,ax_name) = + clause_prefix ^ ascii_of ax_name ^ "_" ^ Int.toString cls_id; + +fun string_of_type_clsname (cls_id,ax_name,idx) = + string_of_clausename (cls_id,ax_name) ^ "_tcs" ^ (Int.toString idx); + + +(**** Producing DFG files ****) + +(*Attach sign in DFG syntax: false means negate.*) +fun dfg_sign true s = s + | dfg_sign false s = "not(" ^ s ^ ")" + +fun dfg_of_typeLit pos (LTVar (s,ty)) = dfg_sign pos (s ^ "(" ^ ty ^ ")") + | dfg_of_typeLit pos (LTFree (s,ty)) = dfg_sign pos (s ^ "(" ^ ty ^ ")"); + +(*Enclose the clause body by quantifiers, if necessary*) +fun dfg_forall [] body = body + | dfg_forall vars body = "forall([" ^ commas vars ^ "],\n" ^ body ^ ")" + +fun gen_dfg_cls (cls_id, ax_name, Axiom, lits, tylits, vars) = + "clause( %(axiom)\n" ^ + dfg_forall vars ("or(" ^ commas (tylits@lits) ^ ")") ^ ",\n" ^ + string_of_clausename (cls_id,ax_name) ^ ").\n\n" + | gen_dfg_cls (cls_id, ax_name, Conjecture, lits, _, vars) = + "clause( %(negated_conjecture)\n" ^ + dfg_forall vars ("or(" ^ commas lits ^ ")") ^ ",\n" ^ + string_of_clausename (cls_id,ax_name) ^ ").\n\n"; + +fun string_of_arity (name, num) = "(" ^ name ^ "," ^ Int.toString num ^ ")" + +fun string_of_preds [] = "" + | string_of_preds preds = "predicates[" ^ commas(map string_of_arity preds) ^ "].\n"; + +fun string_of_funcs [] = "" + | string_of_funcs funcs = "functions[" ^ commas(map string_of_arity funcs) ^ "].\n" ; + +fun string_of_symbols predstr funcstr = + "list_of_symbols.\n" ^ predstr ^ funcstr ^ "end_of_list.\n\n"; + +fun string_of_start name = "begin_problem(" ^ name ^ ").\n\n"; + +fun string_of_descrip name = + "list_of_descriptions.\nname({*" ^ name ^ + "*}).\nauthor({*Isabelle*}).\nstatus(unknown).\ndescription({*auto-generated*}).\nend_of_list.\n\n" + +fun dfg_tfree_clause tfree_lit = + "clause( %(negated_conjecture)\n" ^ "or( " ^ tfree_lit ^ "),\n" ^ "tfree_tcs" ^ ").\n\n" + +fun dfg_of_arLit (TConsLit (c,t,args)) = + dfg_sign true (make_type_class c ^ "(" ^ t ^ paren_pack args ^ ")") + | dfg_of_arLit (TVarLit (c,str)) = + dfg_sign false (make_type_class c ^ "(" ^ str ^ ")") + +fun dfg_classrelLits sub sup = "not(" ^ sub ^ "(T)), " ^ sup ^ "(T)"; + +fun dfg_classrelClause (ClassrelClause {axiom_name,subclass,superclass,...}) = + "clause(forall([T],\nor( " ^ dfg_classrelLits subclass superclass ^ ")),\n" ^ + axiom_name ^ ").\n\n"; + +fun string_of_ar axiom_name = arclause_prefix ^ ascii_of axiom_name; + +fun dfg_arity_clause (ArityClause{axiom_name,conclLit,premLits,...}) = + let val TConsLit (_,_,tvars) = conclLit + val lits = map dfg_of_arLit (conclLit :: premLits) + in + "clause( %(axiom)\n" ^ + dfg_forall tvars ("or( " ^ commas lits ^ ")") ^ ",\n" ^ + string_of_ar axiom_name ^ ").\n\n" + end; + + +(**** Produce TPTP files ****) + +(*Attach sign in TPTP syntax: false means negate.*) +fun tptp_sign true s = s + | tptp_sign false s = "~ " ^ s + +fun tptp_of_typeLit pos (LTVar (s,ty)) = tptp_sign pos (s ^ "(" ^ ty ^ ")") + | tptp_of_typeLit pos (LTFree (s,ty)) = tptp_sign pos (s ^ "(" ^ ty ^ ")"); + +fun gen_tptp_cls (cls_id,ax_name,Axiom,lits,tylits) = + "cnf(" ^ string_of_clausename (cls_id,ax_name) ^ ",axiom," ^ + tptp_pack (tylits@lits) ^ ").\n" + | gen_tptp_cls (cls_id,ax_name,Conjecture,lits,_) = + "cnf(" ^ string_of_clausename (cls_id,ax_name) ^ ",negated_conjecture," ^ + tptp_pack lits ^ ").\n"; + +fun tptp_tfree_clause tfree_lit = + "cnf(" ^ "tfree_tcs," ^ "negated_conjecture" ^ "," ^ tptp_pack[tfree_lit] ^ ").\n"; + +fun tptp_of_arLit (TConsLit (c,t,args)) = + tptp_sign true (make_type_class c ^ "(" ^ t ^ paren_pack args ^ ")") + | tptp_of_arLit (TVarLit (c,str)) = + tptp_sign false (make_type_class c ^ "(" ^ str ^ ")") + +fun tptp_arity_clause (ArityClause{axiom_name,conclLit,premLits,...}) = + "cnf(" ^ string_of_ar axiom_name ^ ",axiom," ^ + tptp_pack (map tptp_of_arLit (conclLit :: premLits)) ^ ").\n"; + +fun tptp_classrelLits sub sup = + let val tvar = "(T)" + in tptp_pack [tptp_sign false (sub^tvar), tptp_sign true (sup^tvar)] end; + +fun tptp_classrelClause (ClassrelClause {axiom_name,subclass,superclass,...}) = + "cnf(" ^ axiom_name ^ ",axiom," ^ tptp_classrelLits subclass superclass ^ ").\n" + +end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/Sledgehammer/sledgehammer_hol_clause.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_hol_clause.ML Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,533 @@ +(* Title: HOL/Sledgehammer/sledgehammer_hol_clause.ML + Author: Jia Meng, NICTA + +FOL clauses translated from HOL formulae. +*) + +signature SLEDGEHAMMER_HOL_CLAUSE = +sig + val ext: thm + val comb_I: thm + val comb_K: thm + val comb_B: thm + val comb_C: thm + val comb_S: thm + val minimize_applies: bool + type axiom_name = string + type polarity = bool + type clause_id = int + datatype combterm = + CombConst of string * Sledgehammer_FOL_Clause.fol_type * Sledgehammer_FOL_Clause.fol_type list (*Const and Free*) + | CombVar of string * Sledgehammer_FOL_Clause.fol_type + | CombApp of combterm * combterm + datatype literal = Literal of polarity * combterm + datatype clause = Clause of {clause_id: clause_id, axiom_name: axiom_name, th: thm, + kind: Sledgehammer_FOL_Clause.kind,literals: literal list, ctypes_sorts: typ list} + val type_of_combterm: combterm -> Sledgehammer_FOL_Clause.fol_type + val strip_comb: combterm -> combterm * combterm list + val literals_of_term: theory -> term -> literal list * typ list + exception TOO_TRIVIAL + val make_conjecture_clauses: bool -> theory -> thm list -> clause list + val make_axiom_clauses: bool -> + theory -> + (thm * (axiom_name * clause_id)) list -> (axiom_name * clause) list + val get_helper_clauses: bool -> + theory -> + bool -> + clause list * (thm * (axiom_name * clause_id)) list * string list -> + clause list + val tptp_write_file: bool -> Path.T -> + clause list * clause list * clause list * clause list * + Sledgehammer_FOL_Clause.classrelClause list * Sledgehammer_FOL_Clause.arityClause list -> + int * int + val dfg_write_file: bool -> Path.T -> + clause list * clause list * clause list * clause list * + Sledgehammer_FOL_Clause.classrelClause list * Sledgehammer_FOL_Clause.arityClause list -> + int * int +end + +structure Sledgehammer_HOL_Clause : SLEDGEHAMMER_HOL_CLAUSE = +struct + +structure SFC = Sledgehammer_FOL_Clause; + +(* theorems for combinators and function extensionality *) +val ext = thm "HOL.ext"; +val comb_I = thm "Sledgehammer.COMBI_def"; +val comb_K = thm "Sledgehammer.COMBK_def"; +val comb_B = thm "Sledgehammer.COMBB_def"; +val comb_C = thm "Sledgehammer.COMBC_def"; +val comb_S = thm "Sledgehammer.COMBS_def"; +val fequal_imp_equal = thm "Sledgehammer.fequal_imp_equal"; +val equal_imp_fequal = thm "Sledgehammer.equal_imp_fequal"; + + +(* Parameter t_full below indicates that full type information is to be +exported *) + +(*If true, each function will be directly applied to as many arguments as possible, avoiding + use of the "apply" operator. Use of hBOOL is also minimized.*) +val minimize_applies = true; + +fun min_arity_of const_min_arity c = the_default 0 (Symtab.lookup const_min_arity c); + +(*True if the constant ever appears outside of the top-level position in literals. + If false, the constant always receives all of its arguments and is used as a predicate.*) +fun needs_hBOOL const_needs_hBOOL c = + not minimize_applies orelse + the_default false (Symtab.lookup const_needs_hBOOL c); + + +(******************************************************) +(* data types for typed combinator expressions *) +(******************************************************) + +type axiom_name = string; +type polarity = bool; +type clause_id = int; + +datatype combterm = CombConst of string * SFC.fol_type * SFC.fol_type list (*Const and Free*) + | CombVar of string * SFC.fol_type + | CombApp of combterm * combterm + +datatype literal = Literal of polarity * combterm; + +datatype clause = + Clause of {clause_id: clause_id, + axiom_name: axiom_name, + th: thm, + kind: SFC.kind, + literals: literal list, + ctypes_sorts: typ list}; + + +(*********************************************************************) +(* convert a clause with type Term.term to a clause with type clause *) +(*********************************************************************) + +fun isFalse (Literal(pol, CombConst(c,_,_))) = + (pol andalso c = "c_False") orelse + (not pol andalso c = "c_True") + | isFalse _ = false; + +fun isTrue (Literal (pol, CombConst(c,_,_))) = + (pol andalso c = "c_True") orelse + (not pol andalso c = "c_False") + | isTrue _ = false; + +fun isTaut (Clause {literals,...}) = exists isTrue literals; + +fun type_of dfg (Type (a, Ts)) = + let val (folTypes,ts) = types_of dfg Ts + in (SFC.Comp(SFC.make_fixed_type_const dfg a, folTypes), ts) end + | type_of _ (tp as TFree (a, _)) = + (SFC.AtomF (SFC.make_fixed_type_var a), [tp]) + | type_of _ (tp as TVar (v, _)) = + (SFC.AtomV (SFC.make_schematic_type_var v), [tp]) +and types_of dfg Ts = + let val (folTyps,ts) = ListPair.unzip (map (type_of dfg) Ts) + in (folTyps, SFC.union_all ts) end; + +(* same as above, but no gathering of sort information *) +fun simp_type_of dfg (Type (a, Ts)) = + SFC.Comp(SFC.make_fixed_type_const dfg a, map (simp_type_of dfg) Ts) + | simp_type_of _ (TFree (a, _)) = SFC.AtomF(SFC.make_fixed_type_var a) + | simp_type_of _ (TVar (v, _)) = SFC.AtomV(SFC.make_schematic_type_var v); + + +fun const_type_of dfg thy (c,t) = + let val (tp,ts) = type_of dfg t + in (tp, ts, map (simp_type_of dfg) (Sign.const_typargs thy (c,t))) end; + +(* convert a Term.term (with combinators) into a combterm, also accummulate sort info *) +fun combterm_of dfg thy (Const(c,t)) = + let val (tp,ts,tvar_list) = const_type_of dfg thy (c,t) + val c' = CombConst(SFC.make_fixed_const dfg c, tp, tvar_list) + in (c',ts) end + | combterm_of dfg _ (Free(v,t)) = + let val (tp,ts) = type_of dfg t + val v' = CombConst(SFC.make_fixed_var v, tp, []) + in (v',ts) end + | combterm_of dfg _ (Var(v,t)) = + let val (tp,ts) = type_of dfg t + val v' = CombVar(SFC.make_schematic_var v,tp) + in (v',ts) end + | combterm_of dfg thy (P $ Q) = + let val (P',tsP) = combterm_of dfg thy P + val (Q',tsQ) = combterm_of dfg thy Q + in (CombApp(P',Q'), union (op =) tsP tsQ) end + | combterm_of _ _ (t as Abs _) = raise SFC.CLAUSE ("HOL CLAUSE", t); + +fun predicate_of dfg thy ((Const("Not",_) $ P), polarity) = predicate_of dfg thy (P, not polarity) + | predicate_of dfg thy (t,polarity) = (combterm_of dfg thy (Envir.eta_contract t), polarity); + +fun literals_of_term1 dfg thy args (Const("Trueprop",_) $ P) = literals_of_term1 dfg thy args P + | literals_of_term1 dfg thy args (Const("op |",_) $ P $ Q) = + literals_of_term1 dfg thy (literals_of_term1 dfg thy args P) Q + | literals_of_term1 dfg thy (lits,ts) P = + let val ((pred,ts'),pol) = predicate_of dfg thy (P,true) + in + (Literal(pol,pred)::lits, union (op =) ts ts') + end; + +fun literals_of_term_dfg dfg thy P = literals_of_term1 dfg thy ([],[]) P; +val literals_of_term = literals_of_term_dfg false; + +(* Problem too trivial for resolution (empty clause) *) +exception TOO_TRIVIAL; + +(* making axiom and conjecture clauses *) +fun make_clause dfg thy (clause_id,axiom_name,kind,th) = + let val (lits,ctypes_sorts) = literals_of_term_dfg dfg thy (prop_of th) + in + if forall isFalse lits + then raise TOO_TRIVIAL + else + Clause {clause_id = clause_id, axiom_name = axiom_name, th = th, kind = kind, + literals = lits, ctypes_sorts = ctypes_sorts} + end; + + +fun add_axiom_clause dfg thy ((th,(name,id)), pairs) = + let val cls = make_clause dfg thy (id, name, SFC.Axiom, th) + in + if isTaut cls then pairs else (name,cls)::pairs + end; + +fun make_axiom_clauses dfg thy = List.foldl (add_axiom_clause dfg thy) []; + +fun make_conjecture_clauses_aux _ _ _ [] = [] + | make_conjecture_clauses_aux dfg thy n (th::ths) = + make_clause dfg thy (n,"conjecture", SFC.Conjecture, th) :: + make_conjecture_clauses_aux dfg thy (n+1) ths; + +fun make_conjecture_clauses dfg thy = make_conjecture_clauses_aux dfg thy 0; + + +(**********************************************************************) +(* convert clause into ATP specific formats: *) +(* TPTP used by Vampire and E *) +(* DFG used by SPASS *) +(**********************************************************************) + +(*Result of a function type; no need to check that the argument type matches.*) +fun result_type (SFC.Comp ("tc_fun", [_, tp2])) = tp2 + | result_type _ = error "result_type" + +fun type_of_combterm (CombConst (_, tp, _)) = tp + | type_of_combterm (CombVar (_, tp)) = tp + | type_of_combterm (CombApp (t1, _)) = result_type (type_of_combterm t1); + +(*gets the head of a combinator application, along with the list of arguments*) +fun strip_comb u = + let fun stripc (CombApp(t,u), ts) = stripc (t, u::ts) + | stripc x = x + in stripc(u,[]) end; + +val type_wrapper = "ti"; + +fun head_needs_hBOOL const_needs_hBOOL (CombConst(c,_,_)) = needs_hBOOL const_needs_hBOOL c + | head_needs_hBOOL _ _ = true; + +fun wrap_type t_full (s, tp) = + if t_full then + type_wrapper ^ SFC.paren_pack [s, SFC.string_of_fol_type tp] + else s; + +fun apply ss = "hAPP" ^ SFC.paren_pack ss; + +fun rev_apply (v, []) = v + | rev_apply (v, arg::args) = apply [rev_apply (v, args), arg]; + +fun string_apply (v, args) = rev_apply (v, rev args); + +(*Apply an operator to the argument strings, using either the "apply" operator or + direct function application.*) +fun string_of_applic t_full cma (CombConst (c, _, tvars), args) = + let val c = if c = "equal" then "c_fequal" else c + val nargs = min_arity_of cma c + val args1 = List.take(args, nargs) + handle Subscript => error ("string_of_applic: " ^ c ^ " has arity " ^ + Int.toString nargs ^ " but is applied to " ^ + space_implode ", " args) + val args2 = List.drop(args, nargs) + val targs = if not t_full then map SFC.string_of_fol_type tvars + else [] + in + string_apply (c ^ SFC.paren_pack (args1@targs), args2) + end + | string_of_applic _ _ (CombVar (v, _), args) = string_apply (v, args) + | string_of_applic _ _ _ = error "string_of_applic"; + +fun wrap_type_if t_full cnh (head, s, tp) = + if head_needs_hBOOL cnh head then wrap_type t_full (s, tp) else s; + +fun string_of_combterm (params as (t_full, cma, cnh)) t = + let val (head, args) = strip_comb t + in wrap_type_if t_full cnh (head, + string_of_applic t_full cma (head, map (string_of_combterm (params)) args), + type_of_combterm t) + end; + +(*Boolean-valued terms are here converted to literals.*) +fun boolify params t = + "hBOOL" ^ SFC.paren_pack [string_of_combterm params t]; + +fun string_of_predicate (params as (_,_,cnh)) t = + case t of + (CombApp(CombApp(CombConst("equal",_,_), t1), t2)) => + (*DFG only: new TPTP prefers infix equality*) + ("equal" ^ SFC.paren_pack [string_of_combterm params t1, string_of_combterm params t2]) + | _ => + case #1 (strip_comb t) of + CombConst(c,_,_) => if needs_hBOOL cnh c then boolify params t else string_of_combterm params t + | _ => boolify params t; + + +(*** tptp format ***) + +fun tptp_of_equality params pol (t1,t2) = + let val eqop = if pol then " = " else " != " + in string_of_combterm params t1 ^ eqop ^ string_of_combterm params t2 end; + +fun tptp_literal params (Literal(pol, CombApp(CombApp(CombConst("equal",_,_), t1), t2))) = + tptp_of_equality params pol (t1,t2) + | tptp_literal params (Literal(pol,pred)) = + SFC.tptp_sign pol (string_of_predicate params pred); + +(*Given a clause, returns its literals paired with a list of literals concerning TFrees; + the latter should only occur in conjecture clauses.*) +fun tptp_type_lits params pos (Clause{literals, ctypes_sorts, ...}) = + (map (tptp_literal params) literals, + map (SFC.tptp_of_typeLit pos) (SFC.add_typs ctypes_sorts)); + +fun clause2tptp params (cls as Clause {axiom_name, clause_id, kind, ...}) = + let val (lits,tylits) = tptp_type_lits params (kind = SFC.Conjecture) cls + in + (SFC.gen_tptp_cls(clause_id,axiom_name,kind,lits,tylits), tylits) + end; + + +(*** dfg format ***) + +fun dfg_literal params (Literal(pol,pred)) = SFC.dfg_sign pol (string_of_predicate params pred); + +fun dfg_type_lits params pos (Clause{literals, ctypes_sorts, ...}) = + (map (dfg_literal params) literals, + map (SFC.dfg_of_typeLit pos) (SFC.add_typs ctypes_sorts)); + +fun get_uvars (CombConst _) vars = vars + | get_uvars (CombVar(v,_)) vars = (v::vars) + | get_uvars (CombApp(P,Q)) vars = get_uvars P (get_uvars Q vars); + +fun get_uvars_l (Literal(_,c)) = get_uvars c []; + +fun dfg_vars (Clause {literals,...}) = SFC.union_all (map get_uvars_l literals); + +fun clause2dfg params (cls as Clause{axiom_name,clause_id,kind,ctypes_sorts,...}) = + let val (lits,tylits) = dfg_type_lits params (kind = SFC.Conjecture) cls + val vars = dfg_vars cls + val tvars = SFC.get_tvar_strs ctypes_sorts + in + (SFC.gen_dfg_cls(clause_id, axiom_name, kind, lits, tylits, tvars@vars), tylits) + end; + + +(** For DFG format: accumulate function and predicate declarations **) + +fun addtypes tvars tab = List.foldl SFC.add_foltype_funcs tab tvars; + +fun add_decls (t_full, cma, cnh) (CombConst (c, _, tvars), (funcs, preds)) = + if c = "equal" then (addtypes tvars funcs, preds) + else + let val arity = min_arity_of cma c + val ntys = if not t_full then length tvars else 0 + val addit = Symtab.update(c, arity+ntys) + in + if needs_hBOOL cnh c then (addtypes tvars (addit funcs), preds) + else (addtypes tvars funcs, addit preds) + end + | add_decls _ (CombVar(_,ctp), (funcs,preds)) = + (SFC.add_foltype_funcs (ctp,funcs), preds) + | add_decls params (CombApp(P,Q),decls) = add_decls params (P,add_decls params (Q,decls)); + +fun add_literal_decls params (Literal(_,c), decls) = add_decls params (c,decls); + +fun add_clause_decls params (Clause {literals, ...}, decls) = + List.foldl (add_literal_decls params) decls literals + handle Symtab.DUP a => error ("function " ^ a ^ " has multiple arities") + +fun decls_of_clauses params clauses arity_clauses = + let val init_functab = Symtab.update (type_wrapper,2) (Symtab.update ("hAPP",2) SFC.init_functab) + val init_predtab = Symtab.update ("hBOOL",1) Symtab.empty + val (functab,predtab) = (List.foldl (add_clause_decls params) (init_functab, init_predtab) clauses) + in + (Symtab.dest (List.foldl SFC.add_arityClause_funcs functab arity_clauses), + Symtab.dest predtab) + end; + +fun add_clause_preds (Clause {ctypes_sorts, ...}, preds) = + List.foldl SFC.add_type_sort_preds preds ctypes_sorts + handle Symtab.DUP a => error ("predicate " ^ a ^ " has multiple arities") + +(*Higher-order clauses have only the predicates hBOOL and type classes.*) +fun preds_of_clauses clauses clsrel_clauses arity_clauses = + Symtab.dest + (List.foldl SFC.add_classrelClause_preds + (List.foldl SFC.add_arityClause_preds + (List.foldl add_clause_preds Symtab.empty clauses) + arity_clauses) + clsrel_clauses) + + +(**********************************************************************) +(* write clauses to files *) +(**********************************************************************) + +val init_counters = + Symtab.make [("c_COMBI", 0), ("c_COMBK", 0), + ("c_COMBB", 0), ("c_COMBC", 0), + ("c_COMBS", 0)]; + +fun count_combterm (CombConst (c, _, _), ct) = + (case Symtab.lookup ct c of NONE => ct (*no counter*) + | SOME n => Symtab.update (c,n+1) ct) + | count_combterm (CombVar _, ct) = ct + | count_combterm (CombApp(t1,t2), ct) = count_combterm(t1, count_combterm(t2, ct)); + +fun count_literal (Literal(_,t), ct) = count_combterm(t,ct); + +fun count_clause (Clause{literals,...}, ct) = List.foldl count_literal ct literals; + +fun count_user_clause user_lemmas (Clause{axiom_name,literals,...}, ct) = + if axiom_name mem_string user_lemmas then List.foldl count_literal ct literals + else ct; + +fun cnf_helper_thms thy = + Sledgehammer_Fact_Preprocessor.cnf_rules_pairs thy + o map Sledgehammer_Fact_Preprocessor.pairname + +fun get_helper_clauses dfg thy isFO (conjectures, axcls, user_lemmas) = + if isFO then [] (*first-order*) + else + let + val axclauses = map #2 (make_axiom_clauses dfg thy axcls) + val ct0 = List.foldl count_clause init_counters conjectures + val ct = List.foldl (count_user_clause user_lemmas) ct0 axclauses + fun needed c = the (Symtab.lookup ct c) > 0 + val IK = if needed "c_COMBI" orelse needed "c_COMBK" + then cnf_helper_thms thy [comb_I,comb_K] + else [] + val BC = if needed "c_COMBB" orelse needed "c_COMBC" + then cnf_helper_thms thy [comb_B,comb_C] + else [] + val S = if needed "c_COMBS" + then cnf_helper_thms thy [comb_S] + else [] + val other = cnf_helper_thms thy [fequal_imp_equal,equal_imp_fequal] + in + map #2 (make_axiom_clauses dfg thy (other @ IK @ BC @ S)) + end; + +(*Find the minimal arity of each function mentioned in the term. Also, note which uses + are not at top level, to see if hBOOL is needed.*) +fun count_constants_term toplev t (const_min_arity, const_needs_hBOOL) = + let val (head, args) = strip_comb t + val n = length args + val (const_min_arity, const_needs_hBOOL) = fold (count_constants_term false) args (const_min_arity, const_needs_hBOOL) + in + case head of + CombConst (a,_,_) => (*predicate or function version of "equal"?*) + let val a = if a="equal" andalso not toplev then "c_fequal" else a + val const_min_arity = Symtab.map_default (a, n) (Integer.min n) const_min_arity + in + if toplev then (const_min_arity, const_needs_hBOOL) + else (const_min_arity, Symtab.update (a,true) (const_needs_hBOOL)) + end + | _ => (const_min_arity, const_needs_hBOOL) + end; + +(*A literal is a top-level term*) +fun count_constants_lit (Literal (_,t)) (const_min_arity, const_needs_hBOOL) = + count_constants_term true t (const_min_arity, const_needs_hBOOL); + +fun count_constants_clause (Clause{literals,...}) (const_min_arity, const_needs_hBOOL) = + fold count_constants_lit literals (const_min_arity, const_needs_hBOOL); + +fun display_arity const_needs_hBOOL (c,n) = + Sledgehammer_Fact_Preprocessor.trace_msg (fn () => "Constant: " ^ c ^ + " arity:\t" ^ Int.toString n ^ + (if needs_hBOOL const_needs_hBOOL c then " needs hBOOL" else "")); + +fun count_constants (conjectures, _, extra_clauses, helper_clauses, _, _) = + if minimize_applies then + let val (const_min_arity, const_needs_hBOOL) = + fold count_constants_clause conjectures (Symtab.empty, Symtab.empty) + |> fold count_constants_clause extra_clauses + |> fold count_constants_clause helper_clauses + val _ = List.app (display_arity const_needs_hBOOL) (Symtab.dest (const_min_arity)) + in (const_min_arity, const_needs_hBOOL) end + else (Symtab.empty, Symtab.empty); + +(* tptp format *) + +fun tptp_write_file t_full file clauses = + let + val (conjectures, axclauses, _, helper_clauses, + classrel_clauses, arity_clauses) = clauses + val (cma, cnh) = count_constants clauses + val params = (t_full, cma, cnh) + val (tptp_clss,tfree_litss) = ListPair.unzip (map (clause2tptp params) conjectures) + val tfree_clss = map SFC.tptp_tfree_clause (List.foldl (uncurry (union (op =))) [] tfree_litss) + val _ = + File.write_list file ( + map (#1 o (clause2tptp params)) axclauses @ + tfree_clss @ + tptp_clss @ + map SFC.tptp_classrelClause classrel_clauses @ + map SFC.tptp_arity_clause arity_clauses @ + map (#1 o (clause2tptp params)) helper_clauses) + in (length axclauses + 1, length tfree_clss + length tptp_clss) + end; + + +(* dfg format *) + +fun dfg_write_file t_full file clauses = + let + val (conjectures, axclauses, _, helper_clauses, + classrel_clauses, arity_clauses) = clauses + val (cma, cnh) = count_constants clauses + val params = (t_full, cma, cnh) + val (dfg_clss, tfree_litss) = ListPair.unzip (map (clause2dfg params) conjectures) + and probname = Path.implode (Path.base file) + val axstrs = map (#1 o (clause2dfg params)) axclauses + val tfree_clss = map SFC.dfg_tfree_clause (SFC.union_all tfree_litss) + val helper_clauses_strs = map (#1 o (clause2dfg params)) helper_clauses + val (funcs,cl_preds) = decls_of_clauses params (helper_clauses @ conjectures @ axclauses) arity_clauses + and ty_preds = preds_of_clauses axclauses classrel_clauses arity_clauses + val _ = + File.write_list file ( + SFC.string_of_start probname :: + SFC.string_of_descrip probname :: + SFC.string_of_symbols (SFC.string_of_funcs funcs) + (SFC.string_of_preds (cl_preds @ ty_preds)) :: + "list_of_clauses(axioms,cnf).\n" :: + axstrs @ + map SFC.dfg_classrelClause classrel_clauses @ + map SFC.dfg_arity_clause arity_clauses @ + helper_clauses_strs @ + ["end_of_list.\n\nlist_of_clauses(conjectures,cnf).\n"] @ + tfree_clss @ + dfg_clss @ + ["end_of_list.\n\n", + (*VarWeight=3 helps the HO problems, probably by counteracting the presence of hAPP*) + "list_of_settings(SPASS).\n{*\nset_flag(VarWeight,3).\n*}\nend_of_list.\n\n", + "end_problem.\n"]) + + in (length axclauses + length classrel_clauses + length arity_clauses + + length helper_clauses + 1, length tfree_clss + length dfg_clss) + end; + +end; + diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML Thu Mar 18 13:59:20 2010 +0100 @@ -0,0 +1,588 @@ +(* Title: HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML + Author: Lawrence C Paulson and Claire Quigley, Cambridge University Computer Laboratory + +Transfer of proofs from external provers. +*) + +signature SLEDGEHAMMER_PROOF_RECONSTRUCT = +sig + val chained_hint: string + + val fix_sorts: sort Vartab.table -> term -> term + val invert_const: string -> string + val invert_type_const: string -> string + val num_typargs: theory -> string -> int + val make_tvar: string -> typ + val strip_prefix: string -> string -> string option + val setup: theory -> theory + (* extracting lemma list*) + val find_failure: string -> string option + val lemma_list: bool -> string -> + string * string vector * (int * int) * Proof.context * thm * int -> string * string list + (* structured proofs *) + val structured_proof: string -> + string * string vector * (int * int) * Proof.context * thm * int -> string * string list +end; + +structure Sledgehammer_Proof_Reconstruct : SLEDGEHAMMER_PROOF_RECONSTRUCT = +struct + +structure SFC = Sledgehammer_FOL_Clause + +val trace_path = Path.basic "atp_trace"; + +fun trace s = + if ! Sledgehammer_Fact_Preprocessor.trace then File.append (File.tmp_path trace_path) s + else (); + +fun string_of_thm ctxt = PrintMode.setmp [] (Display.string_of_thm ctxt); + +(*For generating structured proofs: keep every nth proof line*) +val (modulus, modulus_setup) = Attrib.config_int "sledgehammer_modulus" 1; + +(*Indicates whether to include sort information in generated proofs*) +val (recon_sorts, recon_sorts_setup) = Attrib.config_bool "sledgehammer_sorts" true; + +(*Indicated whether to generate full proofs or just lemma lists - now via setup of atps*) +(* val (full_proofs, full_proofs_setup) = Attrib.config_bool "sledgehammer_full" false; *) + +val setup = modulus_setup #> recon_sorts_setup; + +(**** PARSING OF TSTP FORMAT ****) + +(*Syntax trees, either termlist or formulae*) +datatype stree = Int of int | Br of string * stree list; + +fun atom x = Br(x,[]); + +fun scons (x,y) = Br("cons", [x,y]); +val listof = List.foldl scons (atom "nil"); + +(*Strings enclosed in single quotes, e.g. filenames*) +val quoted = $$"'" |-- Scan.repeat (~$$"'") --| $$"'" >> implode; + +(*Intended for $true and $false*) +fun tf s = "c_" ^ str (Char.toUpper (String.sub(s,0))) ^ String.extract(s,1,NONE); +val truefalse = $$"$" |-- Symbol.scan_id >> (atom o tf); + +(*Integer constants, typically proof line numbers*) +fun is_digit s = Char.isDigit (String.sub(s,0)); +val integer = Scan.many1 is_digit >> (the o Int.fromString o implode); + +(*Generalized FO terms, which include filenames, numbers, etc.*) +fun termlist x = (term ::: Scan.repeat ($$"," |-- term)) x +and term x = (quoted >> atom || integer>>Int || truefalse || + Symbol.scan_id -- Scan.optional ($$"(" |-- termlist --| $$")") [] >> Br || + $$"(" |-- term --| $$")" || + $$"[" |-- Scan.optional termlist [] --| $$"]" >> listof) x; + +fun negate t = Br("c_Not", [t]); +fun equate (t1,t2) = Br("c_equal", [t1,t2]); + +(*Apply equal or not-equal to a term*) +fun syn_equal (t, NONE) = t + | syn_equal (t1, SOME (NONE, t2)) = equate (t1,t2) + | syn_equal (t1, SOME (SOME _, t2)) = negate (equate (t1,t2)); + +(*Literals can involve negation, = and !=.*) +fun literal x = ($$"~" |-- literal >> negate || + (term -- Scan.option (Scan.option ($$"!") --| $$"=" -- term) >> syn_equal)) x; + +val literals = literal ::: Scan.repeat ($$"|" |-- literal); + +(*Clause: a list of literals separated by the disjunction sign*) +val clause = $$"(" |-- literals --| $$")" || Scan.single literal; + +val annotations = $$"," |-- term -- Scan.option ($$"," |-- termlist); + +(* ::= cnf(,,). + The could be an identifier, but we assume integers.*) +val tstp_line = (Scan.this_string "cnf" -- $$"(") |-- + integer --| $$"," -- Symbol.scan_id --| $$"," -- + clause -- Scan.option annotations --| $$ ")"; + + +(**** INTERPRETATION OF TSTP SYNTAX TREES ****) + +exception STREE of stree; + +(*If string s has the prefix s1, return the result of deleting it.*) +fun strip_prefix s1 s = + if String.isPrefix s1 s + then SOME (SFC.undo_ascii_of (String.extract (s, size s1, NONE))) + else NONE; + +(*Invert the table of translations between Isabelle and ATPs*) +val type_const_trans_table_inv = + Symtab.make (map swap (Symtab.dest SFC.type_const_trans_table)); + +fun invert_type_const c = + case Symtab.lookup type_const_trans_table_inv c of + SOME c' => c' + | NONE => c; + +fun make_tvar b = TVar(("'" ^ b, 0), HOLogic.typeS); +fun make_var (b,T) = Var((b,0),T); + +(*Type variables are given the basic sort, HOL.type. Some will later be constrained + by information from type literals, or by type inference.*) +fun type_of_stree t = + case t of + Int _ => raise STREE t + | Br (a,ts) => + let val Ts = map type_of_stree ts + in + case strip_prefix SFC.tconst_prefix a of + SOME b => Type(invert_type_const b, Ts) + | NONE => + if not (null ts) then raise STREE t (*only tconsts have type arguments*) + else + case strip_prefix SFC.tfree_prefix a of + SOME b => TFree("'" ^ b, HOLogic.typeS) + | NONE => + case strip_prefix SFC.tvar_prefix a of + SOME b => make_tvar b + | NONE => make_tvar a (*Variable from the ATP, say X1*) + end; + +(*Invert the table of translations between Isabelle and ATPs*) +val const_trans_table_inv = + Symtab.update ("fequal", "op =") + (Symtab.make (map swap (Symtab.dest SFC.const_trans_table))); + +fun invert_const c = + case Symtab.lookup const_trans_table_inv c of + SOME c' => c' + | NONE => c; + +(*The number of type arguments of a constant, zero if it's monomorphic*) +fun num_typargs thy s = length (Sign.const_typargs thy (s, Sign.the_const_type thy s)); + +(*Generates a constant, given its type arguments*) +fun const_of thy (a,Ts) = Const(a, Sign.const_instance thy (a,Ts)); + +(*First-order translation. No types are known for variables. HOLogic.typeT should allow + them to be inferred.*) +fun term_of_stree args thy t = + case t of + Int _ => raise STREE t + | Br ("hBOOL",[t]) => term_of_stree [] thy t (*ignore hBOOL*) + | Br ("hAPP",[t,u]) => term_of_stree (u::args) thy t + | Br (a,ts) => + case strip_prefix SFC.const_prefix a of + SOME "equal" => + list_comb(Const ("op =", HOLogic.typeT), List.map (term_of_stree [] thy) ts) + | SOME b => + let val c = invert_const b + val nterms = length ts - num_typargs thy c + val us = List.map (term_of_stree [] thy) (List.take(ts,nterms) @ args) + (*Extra args from hAPP come AFTER any arguments given directly to the + constant.*) + val Ts = List.map type_of_stree (List.drop(ts,nterms)) + in list_comb(const_of thy (c, Ts), us) end + | NONE => (*a variable, not a constant*) + let val T = HOLogic.typeT + val opr = (*a Free variable is typically a Skolem function*) + case strip_prefix SFC.fixed_var_prefix a of + SOME b => Free(b,T) + | NONE => + case strip_prefix SFC.schematic_var_prefix a of + SOME b => make_var (b,T) + | NONE => make_var (a,T) (*Variable from the ATP, say X1*) + in list_comb (opr, List.map (term_of_stree [] thy) (ts@args)) end; + +(*Type class literal applied to a type. Returns triple of polarity, class, type.*) +fun constraint_of_stree pol (Br("c_Not",[t])) = constraint_of_stree (not pol) t + | constraint_of_stree pol t = case t of + Int _ => raise STREE t + | Br (a,ts) => + (case (strip_prefix SFC.class_prefix a, map type_of_stree ts) of + (SOME b, [T]) => (pol, b, T) + | _ => raise STREE t); + +(** Accumulate type constraints in a clause: negative type literals **) + +fun addix (key,z) = Vartab.map_default (key,[]) (cons z); + +fun add_constraint ((false, cl, TFree(a,_)), vt) = addix ((a,~1),cl) vt + | add_constraint ((false, cl, TVar(ix,_)), vt) = addix (ix,cl) vt + | add_constraint (_, vt) = vt; + +(*False literals (which E includes in its proofs) are deleted*) +val nofalses = filter (not o equal HOLogic.false_const); + +(*Final treatment of the list of "real" literals from a clause.*) +fun finish [] = HOLogic.true_const (*No "real" literals means only type information*) + | finish lits = + case nofalses lits of + [] => HOLogic.false_const (*The empty clause, since we started with real literals*) + | xs => foldr1 HOLogic.mk_disj (rev xs); + +(*Accumulate sort constraints in vt, with "real" literals in lits.*) +fun lits_of_strees _ (vt, lits) [] = (vt, finish lits) + | lits_of_strees ctxt (vt, lits) (t::ts) = + lits_of_strees ctxt (add_constraint (constraint_of_stree true t, vt), lits) ts + handle STREE _ => + lits_of_strees ctxt (vt, term_of_stree [] (ProofContext.theory_of ctxt) t :: lits) ts; + +(*Update TVars/TFrees with detected sort constraints.*) +fun fix_sorts vt = + let fun tysubst (Type (a, Ts)) = Type (a, map tysubst Ts) + | tysubst (TVar (xi, s)) = TVar (xi, the_default s (Vartab.lookup vt xi)) + | tysubst (TFree (x, s)) = TFree (x, the_default s (Vartab.lookup vt (x, ~1))) + fun tmsubst (Const (a, T)) = Const (a, tysubst T) + | tmsubst (Free (a, T)) = Free (a, tysubst T) + | tmsubst (Var (xi, T)) = Var (xi, tysubst T) + | tmsubst (t as Bound _) = t + | tmsubst (Abs (a, T, t)) = Abs (a, tysubst T, tmsubst t) + | tmsubst (t $ u) = tmsubst t $ tmsubst u; + in fn t => if Vartab.is_empty vt then t else tmsubst t end; + +(*Interpret a list of syntax trees as a clause, given by "real" literals and sort constraints. + vt0 holds the initial sort constraints, from the conjecture clauses.*) +fun clause_of_strees ctxt vt0 ts = + let val (vt, dt) = lits_of_strees ctxt (vt0,[]) ts in + singleton (Syntax.check_terms ctxt) (TypeInfer.constrain HOLogic.boolT (fix_sorts vt dt)) + end; + +fun gen_all_vars t = fold_rev Logic.all (OldTerm.term_vars t) t; + +fun ints_of_stree_aux (Int n, ns) = n::ns + | ints_of_stree_aux (Br(_,ts), ns) = List.foldl ints_of_stree_aux ns ts; + +fun ints_of_stree t = ints_of_stree_aux (t, []); + +fun decode_tstp vt0 (name, role, ts, annots) ctxt = + let val deps = case annots of NONE => [] | SOME (source,_) => ints_of_stree source + val cl = clause_of_strees ctxt vt0 ts + in ((name, role, cl, deps), fold Variable.declare_term (OldTerm.term_frees cl) ctxt) end; + +fun dest_tstp ((((name, role), ts), annots), chs) = + case chs of + "."::_ => (name, role, ts, annots) + | _ => error ("TSTP line not terminated by \".\": " ^ implode chs); + + +(** Global sort constraints on TFrees (from tfree_tcs) are positive unit clauses. **) + +fun add_tfree_constraint ((true, cl, TFree(a,_)), vt) = addix ((a,~1),cl) vt + | add_tfree_constraint (_, vt) = vt; + +fun tfree_constraints_of_clauses vt [] = vt + | tfree_constraints_of_clauses vt ([lit]::tss) = + (tfree_constraints_of_clauses (add_tfree_constraint (constraint_of_stree true lit, vt)) tss + handle STREE _ => (*not a positive type constraint: ignore*) + tfree_constraints_of_clauses vt tss) + | tfree_constraints_of_clauses vt (_::tss) = tfree_constraints_of_clauses vt tss; + + +(**** Translation of TSTP files to Isar Proofs ****) + +fun decode_tstp_list ctxt tuples = + let val vt0 = tfree_constraints_of_clauses Vartab.empty (map #3 tuples) + in #1 (fold_map (decode_tstp vt0) tuples ctxt) end; + +(** Finding a matching assumption. The literals may be permuted, and variable names + may disagree. We have to try all combinations of literals (quadratic!) and + match up the variable names consistently. **) + +fun strip_alls_aux n (Const("all",_)$Abs(a,T,t)) = + strip_alls_aux (n+1) (subst_bound (Var ((a,n), T), t)) + | strip_alls_aux _ t = t; + +val strip_alls = strip_alls_aux 0; + +exception MATCH_LITERAL; + +(*Ignore types: they are not to be trusted...*) +fun match_literal (t1$u1) (t2$u2) env = + match_literal t1 t2 (match_literal u1 u2 env) + | match_literal (Abs (_,_,t1)) (Abs (_,_,t2)) env = + match_literal t1 t2 env + | match_literal (Bound i1) (Bound i2) env = + if i1=i2 then env else raise MATCH_LITERAL + | match_literal (Const(a1,_)) (Const(a2,_)) env = + if a1=a2 then env else raise MATCH_LITERAL + | match_literal (Free(a1,_)) (Free(a2,_)) env = + if a1=a2 then env else raise MATCH_LITERAL + | match_literal (Var(ix1,_)) (Var(ix2,_)) env = insert (op =) (ix1,ix2) env + | match_literal _ _ _ = raise MATCH_LITERAL; + +(*Checking that all variable associations are unique. The list env contains no + repetitions, but does it contain say (x,y) and (y,y)? *) +fun good env = + let val (xs,ys) = ListPair.unzip env + in not (has_duplicates (op=) xs orelse has_duplicates (op=) ys) end; + +(*Match one list of literals against another, ignoring types and the order of + literals. Sorting is unreliable because we don't have types or variable names.*) +fun matches_aux _ [] [] = true + | matches_aux env (lit::lits) ts = + let fun match1 us [] = false + | match1 us (t::ts) = + let val env' = match_literal lit t env + in (good env' andalso matches_aux env' lits (us@ts)) orelse + match1 (t::us) ts + end + handle MATCH_LITERAL => match1 (t::us) ts + in match1 [] ts end; + +(*Is this length test useful?*) +fun matches (lits1,lits2) = + length lits1 = length lits2 andalso + matches_aux [] (map Envir.eta_contract lits1) (map Envir.eta_contract lits2); + +fun permuted_clause t = + let val lits = HOLogic.disjuncts t + fun perm [] = NONE + | perm (ctm::ctms) = + if matches (lits, HOLogic.disjuncts (HOLogic.dest_Trueprop (strip_alls ctm))) + then SOME ctm else perm ctms + in perm end; + +fun have_or_show "show " _ = "show \"" + | have_or_show have lname = have ^ lname ^ ": \"" + +(*ctms is a list of conjecture clauses as yielded by Isabelle. Those returned by the + ATP may have their literals reordered.*) +fun isar_lines ctxt ctms = + let val string_of = PrintMode.setmp [] (fn term => Syntax.string_of_term ctxt term) + val _ = trace ("\n\nisar_lines: start\n") + fun doline _ (lname, t, []) = (*No deps: it's a conjecture clause, with no proof.*) + (case permuted_clause t ctms of + SOME u => "assume " ^ lname ^ ": \"" ^ string_of u ^ "\"\n" + | NONE => "assume? " ^ lname ^ ": \"" ^ string_of t ^ "\"\n") (*no match!!*) + | doline have (lname, t, deps) = + have_or_show have lname ^ string_of (gen_all_vars (HOLogic.mk_Trueprop t)) ^ + "\"\n by (metis " ^ space_implode " " deps ^ ")\n" + fun dolines [(lname, t, deps)] = [doline "show " (lname, t, deps)] + | dolines ((lname, t, deps)::lines) = doline "have " (lname, t, deps) :: dolines lines + in setmp_CRITICAL show_sorts (Config.get ctxt recon_sorts) dolines end; + +fun notequal t (_,t',_) = not (t aconv t'); + +(*No "real" literals means only type information*) +fun eq_types t = t aconv HOLogic.true_const; + +fun replace_dep (old:int, new) dep = if dep=old then new else [dep]; + +fun replace_deps (old:int, new) (lno, t, deps) = + (lno, t, List.foldl (uncurry (union (op =))) [] (map (replace_dep (old, new)) deps)); + +(*Discard axioms; consolidate adjacent lines that prove the same clause, since they differ + only in type information.*) +fun add_prfline ((lno, "axiom", t, []), lines) = (*axioms are not proof lines*) + if eq_types t (*must be clsrel/clsarity: type information, so delete refs to it*) + then map (replace_deps (lno, [])) lines + else + (case take_prefix (notequal t) lines of + (_,[]) => lines (*no repetition of proof line*) + | (pre, (lno', _, _) :: post) => (*repetition: replace later line by earlier one*) + pre @ map (replace_deps (lno', [lno])) post) + | add_prfline ((lno, _, t, []), lines) = (*no deps: conjecture clause*) + (lno, t, []) :: lines + | add_prfline ((lno, _, t, deps), lines) = + if eq_types t then (lno, t, deps) :: lines + (*Type information will be deleted later; skip repetition test.*) + else (*FIXME: Doesn't this code risk conflating proofs involving different types??*) + case take_prefix (notequal t) lines of + (_,[]) => (lno, t, deps) :: lines (*no repetition of proof line*) + | (pre, (lno', t', _) :: post) => + (lno, t', deps) :: (*repetition: replace later line by earlier one*) + (pre @ map (replace_deps (lno', [lno])) post); + +(*Recursively delete empty lines (type information) from the proof.*) +fun add_nonnull_prfline ((lno, t, []), lines) = (*no dependencies, so a conjecture clause*) + if eq_types t (*must be type information, tfree_tcs, clsrel, clsarity: delete refs to it*) + then delete_dep lno lines + else (lno, t, []) :: lines + | add_nonnull_prfline ((lno, t, deps), lines) = (lno, t, deps) :: lines +and delete_dep lno lines = List.foldr add_nonnull_prfline [] (map (replace_deps (lno, [])) lines); + +fun bad_free (Free (a,_)) = String.isPrefix "sko_" a + | bad_free _ = false; + +(*TVars are forbidden in goals. Also, we don't want lines with <2 dependencies. + To further compress proofs, setting modulus:=n deletes every nth line, and nlines + counts the number of proof lines processed so far. + Deleted lines are replaced by their own dependencies. Note that the "add_nonnull_prfline" + phase may delete some dependencies, hence this phase comes later.*) +fun add_wanted_prfline ctxt ((lno, t, []), (nlines, lines)) = + (nlines, (lno, t, []) :: lines) (*conjecture clauses must be kept*) + | add_wanted_prfline ctxt ((lno, t, deps), (nlines, lines)) = + if eq_types t orelse not (null (Term.add_tvars t [])) orelse + exists_subterm bad_free t orelse + (not (null lines) andalso (*final line can't be deleted for these reasons*) + (length deps < 2 orelse nlines mod (Config.get ctxt modulus) <> 0)) + then (nlines+1, map (replace_deps (lno, deps)) lines) (*Delete line*) + else (nlines+1, (lno, t, deps) :: lines); + +(*Replace numeric proof lines by strings, either from thm_names or sequential line numbers*) +fun stringify_deps thm_names deps_map [] = [] + | stringify_deps thm_names deps_map ((lno, t, deps) :: lines) = + if lno <= Vector.length thm_names (*axiom*) + then (Vector.sub(thm_names,lno-1), t, []) :: stringify_deps thm_names deps_map lines + else let val lname = Int.toString (length deps_map) + fun fix lno = if lno <= Vector.length thm_names + then SOME(Vector.sub(thm_names,lno-1)) + else AList.lookup op= deps_map lno; + in (lname, t, map_filter fix (distinct (op=) deps)) :: + stringify_deps thm_names ((lno,lname)::deps_map) lines + end; + +val proofstart = "proof (neg_clausify)\n"; + +fun isar_header [] = proofstart + | isar_header ts = proofstart ^ "fix " ^ space_implode " " ts ^ "\n"; + +fun decode_tstp_file cnfs ctxt th sgno thm_names = + let val _ = trace "\ndecode_tstp_file: start\n" + val tuples = map (dest_tstp o tstp_line o explode) cnfs + val _ = trace (Int.toString (length tuples) ^ " tuples extracted\n") + val ctxt = ProofContext.set_mode ProofContext.mode_schematic ctxt + val raw_lines = List.foldr add_prfline [] (decode_tstp_list ctxt tuples) + val _ = trace (Int.toString (length raw_lines) ^ " raw_lines extracted\n") + val nonnull_lines = List.foldr add_nonnull_prfline [] raw_lines + val _ = trace (Int.toString (length nonnull_lines) ^ " nonnull_lines extracted\n") + val (_,lines) = List.foldr (add_wanted_prfline ctxt) (0,[]) nonnull_lines + val _ = trace (Int.toString (length lines) ^ " lines extracted\n") + val (ccls,fixes) = Sledgehammer_Fact_Preprocessor.neg_conjecture_clauses ctxt th sgno + val _ = trace (Int.toString (length ccls) ^ " conjecture clauses\n") + val ccls = map forall_intr_vars ccls + val _ = + if ! Sledgehammer_Fact_Preprocessor.trace then + app (fn th => trace ("\nccl: " ^ string_of_thm ctxt th)) ccls + else + () + val ilines = isar_lines ctxt (map prop_of ccls) (stringify_deps thm_names [] lines) + val _ = trace "\ndecode_tstp_file: finishing\n" + in + isar_header (map #1 fixes) ^ implode ilines ^ "qed\n" + end handle STREE _ => error "Could not extract proof (ATP output malformed?)"; + + +(*=== EXTRACTING PROOF-TEXT === *) + +val begin_proof_strings = ["# SZS output start CNFRefutation.", + "=========== Refutation ==========", + "Here is a proof"]; + +val end_proof_strings = ["# SZS output end CNFRefutation", + "======= End of refutation =======", + "Formulae used in the proof"]; + +fun get_proof_extract proof = + let + (*splits to_split by the first possible of a list of splitters*) + val (begin_string, end_string) = + (find_first (fn s => String.isSubstring s proof) begin_proof_strings, + find_first (fn s => String.isSubstring s proof) end_proof_strings) + in + if is_none begin_string orelse is_none end_string + then error "Could not extract proof (no substring indicating a proof)" + else proof |> first_field (the begin_string) |> the |> snd + |> first_field (the end_string) |> the |> fst + end; + +(* ==== CHECK IF PROOF OF E OR VAMPIRE WAS SUCCESSFUL === *) + +val failure_strings_E = ["SZS status: Satisfiable","SZS status Satisfiable", + "SZS status: ResourceOut","SZS status ResourceOut","# Cannot determine problem status"]; +val failure_strings_vampire = ["Satisfiability detected", "Refutation not found", "CANNOT PROVE"]; +val failure_strings_SPASS = ["SPASS beiseite: Completion found.", + "SPASS beiseite: Ran out of time.", "SPASS beiseite: Maximal number of loops exceeded."]; +val failure_strings_remote = ["Remote-script could not extract proof"]; +fun find_failure proof = + let val failures = + map_filter (fn s => if String.isSubstring s proof then SOME s else NONE) + (failure_strings_E @ failure_strings_vampire @ failure_strings_SPASS @ failure_strings_remote) + val correct = null failures andalso + exists (fn s => String.isSubstring s proof) begin_proof_strings andalso + exists (fn s => String.isSubstring s proof) end_proof_strings + in + if correct then NONE + else if null failures then SOME "Output of ATP not in proper format" + else SOME (hd failures) end; + +(* === EXTRACTING LEMMAS === *) +(* lines have the form "cnf(108, axiom, ...", +the number (108) has to be extracted)*) +fun get_step_nums false proofextract = + let val toks = String.tokens (not o Char.isAlphaNum) + fun inputno ("cnf"::ntok::"axiom"::_) = Int.fromString ntok + | inputno ("cnf"::ntok::"negated"::"conjecture"::_) = Int.fromString ntok + | inputno _ = NONE + val lines = split_lines proofextract + in map_filter (inputno o toks) lines end +(*String contains multiple lines. We want those of the form + "253[0:Inp] et cetera..." + A list consisting of the first number in each line is returned. *) +| get_step_nums true proofextract = + let val toks = String.tokens (not o Char.isAlphaNum) + fun inputno (ntok::"0"::"Inp"::_) = Int.fromString ntok + | inputno _ = NONE + val lines = split_lines proofextract + in map_filter (inputno o toks) lines end + +(*extracting lemmas from tstp-output between the lines from above*) +fun extract_lemmas get_step_nums (proof, thm_names, conj_count, _, _, _) = + let + (* get the names of axioms from their numbers*) + fun get_axiom_names thm_names step_nums = + let + val last_axiom = Vector.length thm_names + fun is_axiom n = n <= last_axiom + fun is_conj n = n >= fst conj_count andalso n < fst conj_count + snd conj_count + fun getname i = Vector.sub(thm_names, i-1) + in + (sort_distinct string_ord (filter (fn x => x <> "??.unknown") + (map getname (filter is_axiom step_nums))), + exists is_conj step_nums) + end + val proofextract = get_proof_extract proof + in + get_axiom_names thm_names (get_step_nums proofextract) + end; + +(*Used to label theorems chained into the sledgehammer call*) +val chained_hint = "CHAINED"; +val nochained = filter_out (fn y => y = chained_hint) + +(* metis-command *) +fun metis_line [] = "apply metis" + | metis_line xs = "apply (metis " ^ space_implode " " xs ^ ")" + +(* atp_minimize [atp=] *) +fun minimize_line _ [] = "" + | minimize_line name lemmas = "For minimizing the number of lemmas try this command:\n" ^ + (Markup.markup Markup.sendback) ("atp_minimize [atp=" ^ name ^ "] " ^ + space_implode " " (nochained lemmas)) + +fun sendback_metis_nochained lemmas = + (Markup.markup Markup.sendback o metis_line) (nochained lemmas) + +fun lemma_list dfg name result = + let val (lemmas, used_conj) = extract_lemmas (get_step_nums dfg) result + in (sendback_metis_nochained lemmas ^ "\n" ^ minimize_line name lemmas ^ + (if used_conj then "" + else "\nWarning: Goal is provable because context is inconsistent."), + nochained lemmas) + end; + +(* === Extracting structured Isar-proof === *) +fun structured_proof name (result as (proof, thm_names, conj_count, ctxt, goal, subgoalno)) = + let + (*Could use split_lines, but it can return blank lines...*) + val lines = String.tokens (equal #"\n"); + val nospaces = String.translate (fn c => if Char.isSpace c then "" else str c) + val proofextract = get_proof_extract proof + val cnfs = filter (String.isPrefix "cnf(") (map nospaces (lines proofextract)) + val (one_line_proof, lemma_names) = lemma_list false name result + val structured = + if chained_hint mem_string (String.tokens (fn c => c = #" ") one_line_proof) then "" + else decode_tstp_file cnfs ctxt goal subgoalno thm_names + in + (one_line_proof ^ "\n\n" ^ Markup.markup Markup.sendback structured, lemma_names) +end + +end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/metis_tools.ML --- a/src/HOL/Tools/metis_tools.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,742 +0,0 @@ -(* Title: HOL/Tools/metis_tools.ML - Author: Kong W. Susanto and Lawrence C. Paulson, CU Computer Laboratory - Copyright Cambridge University 2007 - -HOL setup for the Metis prover. -*) - -signature METIS_TOOLS = -sig - val trace: bool Unsynchronized.ref - val type_lits: bool Config.T - val metis_tac: Proof.context -> thm list -> int -> tactic - val metisF_tac: Proof.context -> thm list -> int -> tactic - val metisFT_tac: Proof.context -> thm list -> int -> tactic - val setup: theory -> theory -end - -structure MetisTools: METIS_TOOLS = -struct - -val trace = Unsynchronized.ref false; -fun trace_msg msg = if ! trace then tracing (msg ()) else (); - -val (type_lits, type_lits_setup) = Attrib.config_bool "metis_type_lits" true; - -datatype mode = FO | HO | FT (*first-order, higher-order, fully-typed*) - -(* ------------------------------------------------------------------------- *) -(* Useful Theorems *) -(* ------------------------------------------------------------------------- *) -val EXCLUDED_MIDDLE = @{lemma "P ==> ~ P ==> False" by (rule notE)} -val REFL_THM = incr_indexes 2 @{lemma "t ~= t ==> False" by simp} -val subst_em = @{lemma "s = t ==> P s ==> ~ P t ==> False" by simp} -val ssubst_em = @{lemma "s = t ==> P t ==> ~ P s ==> False" by simp} - -(* ------------------------------------------------------------------------- *) -(* Useful Functions *) -(* ------------------------------------------------------------------------- *) - -(* match untyped terms*) -fun untyped_aconv (Const(a,_)) (Const(b,_)) = (a=b) - | untyped_aconv (Free(a,_)) (Free(b,_)) = (a=b) - | untyped_aconv (Var((a,_),_)) (Var((b,_),_)) = (a=b) (*the index is ignored!*) - | untyped_aconv (Bound i) (Bound j) = (i=j) - | untyped_aconv (Abs(a,_,t)) (Abs(b,_,u)) = (a=b) andalso untyped_aconv t u - | untyped_aconv (t1$t2) (u1$u2) = untyped_aconv t1 u1 andalso untyped_aconv t2 u2 - | untyped_aconv _ _ = false; - -(* Finding the relative location of an untyped term within a list of terms *) -fun get_index lit = - let val lit = Envir.eta_contract lit - fun get n [] = raise Empty - | get n (x::xs) = if untyped_aconv lit (Envir.eta_contract (HOLogic.dest_Trueprop x)) - then n else get (n+1) xs - in get 1 end; - -(* ------------------------------------------------------------------------- *) -(* HOL to FOL (Isabelle to Metis) *) -(* ------------------------------------------------------------------------- *) - -fun fn_isa_to_met "equal" = "=" - | fn_isa_to_met x = x; - -fun metis_lit b c args = (b, (c, args)); - -fun hol_type_to_fol (Res_Clause.AtomV x) = Metis.Term.Var x - | hol_type_to_fol (Res_Clause.AtomF x) = Metis.Term.Fn(x,[]) - | hol_type_to_fol (Res_Clause.Comp(tc,tps)) = Metis.Term.Fn(tc, map hol_type_to_fol tps); - -(*These two functions insert type literals before the real literals. That is the - opposite order from TPTP linkup, but maybe OK.*) - -fun hol_term_to_fol_FO tm = - case Res_HOL_Clause.strip_comb tm of - (Res_HOL_Clause.CombConst(c,_,tys), tms) => - let val tyargs = map hol_type_to_fol tys - val args = map hol_term_to_fol_FO tms - in Metis.Term.Fn (c, tyargs @ args) end - | (Res_HOL_Clause.CombVar(v,_), []) => Metis.Term.Var v - | _ => error "hol_term_to_fol_FO"; - -fun hol_term_to_fol_HO (Res_HOL_Clause.CombVar (a, _)) = Metis.Term.Var a - | hol_term_to_fol_HO (Res_HOL_Clause.CombConst (a, _, tylist)) = - Metis.Term.Fn (fn_isa_to_met a, map hol_type_to_fol tylist) - | hol_term_to_fol_HO (Res_HOL_Clause.CombApp (tm1, tm2)) = - Metis.Term.Fn (".", map hol_term_to_fol_HO [tm1, tm2]); - -(*The fully-typed translation, to avoid type errors*) -fun wrap_type (tm, ty) = Metis.Term.Fn("ti", [tm, hol_type_to_fol ty]); - -fun hol_term_to_fol_FT (Res_HOL_Clause.CombVar(a, ty)) = - wrap_type (Metis.Term.Var a, ty) - | hol_term_to_fol_FT (Res_HOL_Clause.CombConst(a, ty, _)) = - wrap_type (Metis.Term.Fn(fn_isa_to_met a, []), ty) - | hol_term_to_fol_FT (tm as Res_HOL_Clause.CombApp(tm1,tm2)) = - wrap_type (Metis.Term.Fn(".", map hol_term_to_fol_FT [tm1,tm2]), - Res_HOL_Clause.type_of_combterm tm); - -fun hol_literal_to_fol FO (Res_HOL_Clause.Literal (pol, tm)) = - let val (Res_HOL_Clause.CombConst(p,_,tys), tms) = Res_HOL_Clause.strip_comb tm - val tylits = if p = "equal" then [] else map hol_type_to_fol tys - val lits = map hol_term_to_fol_FO tms - in metis_lit pol (fn_isa_to_met p) (tylits @ lits) end - | hol_literal_to_fol HO (Res_HOL_Clause.Literal (pol, tm)) = - (case Res_HOL_Clause.strip_comb tm of - (Res_HOL_Clause.CombConst("equal",_,_), tms) => - metis_lit pol "=" (map hol_term_to_fol_HO tms) - | _ => metis_lit pol "{}" [hol_term_to_fol_HO tm]) (*hBOOL*) - | hol_literal_to_fol FT (Res_HOL_Clause.Literal (pol, tm)) = - (case Res_HOL_Clause.strip_comb tm of - (Res_HOL_Clause.CombConst("equal",_,_), tms) => - metis_lit pol "=" (map hol_term_to_fol_FT tms) - | _ => metis_lit pol "{}" [hol_term_to_fol_FT tm]) (*hBOOL*); - -fun literals_of_hol_thm thy mode t = - let val (lits, types_sorts) = Res_HOL_Clause.literals_of_term thy t - in (map (hol_literal_to_fol mode) lits, types_sorts) end; - -(*Sign should be "true" for conjecture type constraints, "false" for type lits in clauses.*) -fun metis_of_typeLit pos (Res_Clause.LTVar (s,x)) = metis_lit pos s [Metis.Term.Var x] - | metis_of_typeLit pos (Res_Clause.LTFree (s,x)) = metis_lit pos s [Metis.Term.Fn(x,[])]; - -fun default_sort _ (TVar _) = false - | default_sort ctxt (TFree (x, s)) = (s = the_default [] (Variable.def_sort ctxt (x, ~1))); - -fun metis_of_tfree tf = - Metis.Thm.axiom (Metis.LiteralSet.singleton (metis_of_typeLit true tf)); - -fun hol_thm_to_fol is_conjecture ctxt mode th = - let val thy = ProofContext.theory_of ctxt - val (mlits, types_sorts) = - (literals_of_hol_thm thy mode o HOLogic.dest_Trueprop o prop_of) th - in - if is_conjecture then - (Metis.Thm.axiom (Metis.LiteralSet.fromList mlits), Res_Clause.add_typs types_sorts) - else - let val tylits = Res_Clause.add_typs - (filter (not o default_sort ctxt) types_sorts) - val mtylits = if Config.get ctxt type_lits - then map (metis_of_typeLit false) tylits else [] - in - (Metis.Thm.axiom (Metis.LiteralSet.fromList(mtylits @ mlits)), []) - end - end; - -(* ARITY CLAUSE *) - -fun m_arity_cls (Res_Clause.TConsLit (c,t,args)) = - metis_lit true (Res_Clause.make_type_class c) [Metis.Term.Fn(t, map Metis.Term.Var args)] - | m_arity_cls (Res_Clause.TVarLit (c,str)) = - metis_lit false (Res_Clause.make_type_class c) [Metis.Term.Var str]; - -(*TrueI is returned as the Isabelle counterpart because there isn't any.*) -fun arity_cls (Res_Clause.ArityClause{conclLit,premLits,...}) = - (TrueI, - Metis.Thm.axiom (Metis.LiteralSet.fromList (map m_arity_cls (conclLit :: premLits)))); - -(* CLASSREL CLAUSE *) - -fun m_classrel_cls subclass superclass = - [metis_lit false subclass [Metis.Term.Var "T"], metis_lit true superclass [Metis.Term.Var "T"]]; - -fun classrel_cls (Res_Clause.ClassrelClause {subclass, superclass, ...}) = - (TrueI, Metis.Thm.axiom (Metis.LiteralSet.fromList (m_classrel_cls subclass superclass))); - -(* ------------------------------------------------------------------------- *) -(* FOL to HOL (Metis to Isabelle) *) -(* ------------------------------------------------------------------------- *) - -datatype term_or_type = Term of Term.term | Type of Term.typ; - -fun terms_of [] = [] - | terms_of (Term t :: tts) = t :: terms_of tts - | terms_of (Type _ :: tts) = terms_of tts; - -fun types_of [] = [] - | types_of (Term (Term.Var ((a,idx), _)) :: tts) = - if String.isPrefix "_" a then - (*Variable generated by Metis, which might have been a type variable.*) - TVar (("'" ^ a, idx), HOLogic.typeS) :: types_of tts - else types_of tts - | types_of (Term _ :: tts) = types_of tts - | types_of (Type T :: tts) = T :: types_of tts; - -fun apply_list rator nargs rands = - let val trands = terms_of rands - in if length trands = nargs then Term (list_comb(rator, trands)) - else error - ("apply_list: wrong number of arguments: " ^ Syntax.string_of_term_global Pure.thy rator ^ - " expected " ^ Int.toString nargs ^ - " received " ^ commas (map (Syntax.string_of_term_global Pure.thy) trands)) - end; - -fun infer_types ctxt = - Syntax.check_terms (ProofContext.set_mode ProofContext.mode_pattern ctxt); - -(*We use 1 rather than 0 because variable references in clauses may otherwise conflict - with variable constraints in the goal...at least, type inference often fails otherwise. - SEE ALSO axiom_inf below.*) -fun mk_var (w,T) = Term.Var((w,1), T); - -(*include the default sort, if available*) -fun mk_tfree ctxt w = - let val ww = "'" ^ w - in TFree(ww, the_default HOLogic.typeS (Variable.def_sort ctxt (ww, ~1))) end; - -(*Remove the "apply" operator from an HO term*) -fun strip_happ args (Metis.Term.Fn(".",[t,u])) = strip_happ (u::args) t - | strip_happ args x = (x, args); - -fun fol_type_to_isa _ (Metis.Term.Var v) = - (case Res_Reconstruct.strip_prefix Res_Clause.tvar_prefix v of - SOME w => Res_Reconstruct.make_tvar w - | NONE => Res_Reconstruct.make_tvar v) - | fol_type_to_isa ctxt (Metis.Term.Fn(x, tys)) = - (case Res_Reconstruct.strip_prefix Res_Clause.tconst_prefix x of - SOME tc => Term.Type (Res_Reconstruct.invert_type_const tc, map (fol_type_to_isa ctxt) tys) - | NONE => - case Res_Reconstruct.strip_prefix Res_Clause.tfree_prefix x of - SOME tf => mk_tfree ctxt tf - | NONE => error ("fol_type_to_isa: " ^ x)); - -(*Maps metis terms to isabelle terms*) -fun fol_term_to_hol_RAW ctxt fol_tm = - let val thy = ProofContext.theory_of ctxt - val _ = trace_msg (fn () => "fol_term_to_hol: " ^ Metis.Term.toString fol_tm) - fun tm_to_tt (Metis.Term.Var v) = - (case Res_Reconstruct.strip_prefix Res_Clause.tvar_prefix v of - SOME w => Type (Res_Reconstruct.make_tvar w) - | NONE => - case Res_Reconstruct.strip_prefix Res_Clause.schematic_var_prefix v of - SOME w => Term (mk_var (w, HOLogic.typeT)) - | NONE => Term (mk_var (v, HOLogic.typeT)) ) - (*Var from Metis with a name like _nnn; possibly a type variable*) - | tm_to_tt (Metis.Term.Fn ("{}", [arg])) = tm_to_tt arg (*hBOOL*) - | tm_to_tt (t as Metis.Term.Fn (".",_)) = - let val (rator,rands) = strip_happ [] t - in case rator of - Metis.Term.Fn(fname,ts) => applic_to_tt (fname, ts @ rands) - | _ => case tm_to_tt rator of - Term t => Term (list_comb(t, terms_of (map tm_to_tt rands))) - | _ => error "tm_to_tt: HO application" - end - | tm_to_tt (Metis.Term.Fn (fname, args)) = applic_to_tt (fname,args) - and applic_to_tt ("=",ts) = - Term (list_comb(Const ("op =", HOLogic.typeT), terms_of (map tm_to_tt ts))) - | applic_to_tt (a,ts) = - case Res_Reconstruct.strip_prefix Res_Clause.const_prefix a of - SOME b => - let val c = Res_Reconstruct.invert_const b - val ntypes = Res_Reconstruct.num_typargs thy c - val nterms = length ts - ntypes - val tts = map tm_to_tt ts - val tys = types_of (List.take(tts,ntypes)) - val ntyargs = Res_Reconstruct.num_typargs thy c - in if length tys = ntyargs then - apply_list (Const (c, dummyT)) nterms (List.drop(tts,ntypes)) - else error ("Constant " ^ c ^ " expects " ^ Int.toString ntyargs ^ - " but gets " ^ Int.toString (length tys) ^ - " type arguments\n" ^ - cat_lines (map (Syntax.string_of_typ ctxt) tys) ^ - " the terms are \n" ^ - cat_lines (map (Syntax.string_of_term ctxt) (terms_of tts))) - end - | NONE => (*Not a constant. Is it a type constructor?*) - case Res_Reconstruct.strip_prefix Res_Clause.tconst_prefix a of - SOME b => - Type (Term.Type (Res_Reconstruct.invert_type_const b, types_of (map tm_to_tt ts))) - | NONE => (*Maybe a TFree. Should then check that ts=[].*) - case Res_Reconstruct.strip_prefix Res_Clause.tfree_prefix a of - SOME b => Type (mk_tfree ctxt b) - | NONE => (*a fixed variable? They are Skolem functions.*) - case Res_Reconstruct.strip_prefix Res_Clause.fixed_var_prefix a of - SOME b => - let val opr = Term.Free(b, HOLogic.typeT) - in apply_list opr (length ts) (map tm_to_tt ts) end - | NONE => error ("unexpected metis function: " ^ a) - in case tm_to_tt fol_tm of Term t => t | _ => error "fol_tm_to_tt: Term expected" end; - -(*Maps fully-typed metis terms to isabelle terms*) -fun fol_term_to_hol_FT ctxt fol_tm = - let val _ = trace_msg (fn () => "fol_term_to_hol_FT: " ^ Metis.Term.toString fol_tm) - fun cvt (Metis.Term.Fn ("ti", [Metis.Term.Var v, _])) = - (case Res_Reconstruct.strip_prefix Res_Clause.schematic_var_prefix v of - SOME w => mk_var(w, dummyT) - | NONE => mk_var(v, dummyT)) - | cvt (Metis.Term.Fn ("ti", [Metis.Term.Fn ("=",[]), _])) = - Const ("op =", HOLogic.typeT) - | cvt (Metis.Term.Fn ("ti", [Metis.Term.Fn (x,[]), ty])) = - (case Res_Reconstruct.strip_prefix Res_Clause.const_prefix x of - SOME c => Const (Res_Reconstruct.invert_const c, dummyT) - | NONE => (*Not a constant. Is it a fixed variable??*) - case Res_Reconstruct.strip_prefix Res_Clause.fixed_var_prefix x of - SOME v => Free (v, fol_type_to_isa ctxt ty) - | NONE => error ("fol_term_to_hol_FT bad constant: " ^ x)) - | cvt (Metis.Term.Fn ("ti", [Metis.Term.Fn (".",[tm1,tm2]), _])) = - cvt tm1 $ cvt tm2 - | cvt (Metis.Term.Fn (".",[tm1,tm2])) = (*untyped application*) - cvt tm1 $ cvt tm2 - | cvt (Metis.Term.Fn ("{}", [arg])) = cvt arg (*hBOOL*) - | cvt (Metis.Term.Fn ("=", [tm1,tm2])) = - list_comb(Const ("op =", HOLogic.typeT), map cvt [tm1,tm2]) - | cvt (t as Metis.Term.Fn (x, [])) = - (case Res_Reconstruct.strip_prefix Res_Clause.const_prefix x of - SOME c => Const (Res_Reconstruct.invert_const c, dummyT) - | NONE => (*Not a constant. Is it a fixed variable??*) - case Res_Reconstruct.strip_prefix Res_Clause.fixed_var_prefix x of - SOME v => Free (v, dummyT) - | NONE => (trace_msg (fn () => "fol_term_to_hol_FT bad const: " ^ x); - fol_term_to_hol_RAW ctxt t)) - | cvt t = (trace_msg (fn () => "fol_term_to_hol_FT bad term: " ^ Metis.Term.toString t); - fol_term_to_hol_RAW ctxt t) - in cvt fol_tm end; - -fun fol_term_to_hol ctxt FO = fol_term_to_hol_RAW ctxt - | fol_term_to_hol ctxt HO = fol_term_to_hol_RAW ctxt - | fol_term_to_hol ctxt FT = fol_term_to_hol_FT ctxt; - -fun fol_terms_to_hol ctxt mode fol_tms = - let val ts = map (fol_term_to_hol ctxt mode) fol_tms - val _ = trace_msg (fn () => " calling type inference:") - val _ = app (fn t => trace_msg (fn () => Syntax.string_of_term ctxt t)) ts - val ts' = infer_types ctxt ts; - val _ = app (fn t => trace_msg - (fn () => " final term: " ^ Syntax.string_of_term ctxt t ^ - " of type " ^ Syntax.string_of_typ ctxt (type_of t))) - ts' - in ts' end; - -fun mk_not (Const ("Not", _) $ b) = b - | mk_not b = HOLogic.mk_not b; - -val metis_eq = Metis.Term.Fn ("=", []); - -(* ------------------------------------------------------------------------- *) -(* FOL step Inference Rules *) -(* ------------------------------------------------------------------------- *) - -(*for debugging only*) -fun print_thpair (fth,th) = - (trace_msg (fn () => "============================================="); - trace_msg (fn () => "Metis: " ^ Metis.Thm.toString fth); - trace_msg (fn () => "Isabelle: " ^ Display.string_of_thm_without_context th)); - -fun lookth thpairs (fth : Metis.Thm.thm) = - the (AList.lookup (uncurry Metis.Thm.equal) thpairs fth) - handle Option => error ("Failed to find a Metis theorem " ^ Metis.Thm.toString fth); - -fun is_TrueI th = Thm.eq_thm(TrueI,th); - -fun cterm_incr_types thy idx = cterm_of thy o (map_types (Logic.incr_tvar idx)); - -fun inst_excluded_middle thy i_atm = - let val th = EXCLUDED_MIDDLE - val [vx] = Term.add_vars (prop_of th) [] - val substs = [(cterm_of thy (Var vx), cterm_of thy i_atm)] - in cterm_instantiate substs th end; - -(* INFERENCE RULE: AXIOM *) -fun axiom_inf thpairs th = incr_indexes 1 (lookth thpairs th); - (*This causes variables to have an index of 1 by default. SEE ALSO mk_var above.*) - -(* INFERENCE RULE: ASSUME *) -fun assume_inf ctxt mode atm = - inst_excluded_middle - (ProofContext.theory_of ctxt) - (singleton (fol_terms_to_hol ctxt mode) (Metis.Term.Fn atm)); - -(* INFERENCE RULE: INSTANTIATE (Subst). Type instantiations are ignored. Trying to reconstruct - them admits new possibilities of errors, e.g. concerning sorts. Instead we try to arrange - that new TVars are distinct and that types can be inferred from terms.*) -fun inst_inf ctxt mode thpairs fsubst th = - let val thy = ProofContext.theory_of ctxt - val i_th = lookth thpairs th - val i_th_vars = Term.add_vars (prop_of i_th) [] - fun find_var x = the (List.find (fn ((a,_),_) => a=x) i_th_vars) - fun subst_translation (x,y) = - let val v = find_var x - val t = fol_term_to_hol ctxt mode y (*we call infer_types below*) - in SOME (cterm_of thy (Var v), t) end - handle Option => - (trace_msg (fn() => "List.find failed for the variable " ^ x ^ - " in " ^ Display.string_of_thm ctxt i_th); - NONE) - fun remove_typeinst (a, t) = - case Res_Reconstruct.strip_prefix Res_Clause.schematic_var_prefix a of - SOME b => SOME (b, t) - | NONE => case Res_Reconstruct.strip_prefix Res_Clause.tvar_prefix a of - SOME _ => NONE (*type instantiations are forbidden!*) - | NONE => SOME (a,t) (*internal Metis var?*) - val _ = trace_msg (fn () => " isa th: " ^ Display.string_of_thm ctxt i_th) - val substs = map_filter remove_typeinst (Metis.Subst.toList fsubst) - val (vars,rawtms) = ListPair.unzip (map_filter subst_translation substs) - val tms = infer_types ctxt rawtms; - val ctm_of = cterm_incr_types thy (1 + Thm.maxidx_of i_th) - val substs' = ListPair.zip (vars, map ctm_of tms) - val _ = trace_msg (fn () => - cat_lines ("subst_translations:" :: - (substs' |> map (fn (x, y) => - Syntax.string_of_term ctxt (term_of x) ^ " |-> " ^ - Syntax.string_of_term ctxt (term_of y))))); - in cterm_instantiate substs' i_th - handle THM (msg, _, _) => error ("metis error (inst_inf): " ^ msg) - end; - -(* INFERENCE RULE: RESOLVE *) - -(*Like RSN, but we rename apart only the type variables. Vars here typically have an index - of 1, and the use of RSN would increase this typically to 3. Instantiations of those Vars - could then fail. See comment on mk_var.*) -fun resolve_inc_tyvars(tha,i,thb) = - let val tha = Drule.incr_type_indexes (1 + Thm.maxidx_of thb) tha - val ths = Seq.list_of (Thm.bicompose false (false,tha,nprems_of tha) i thb) - in - case distinct Thm.eq_thm ths of - [th] => th - | _ => raise THM ("resolve_inc_tyvars: unique result expected", i, [tha,thb]) - end; - -fun resolve_inf ctxt mode thpairs atm th1 th2 = - let - val i_th1 = lookth thpairs th1 and i_th2 = lookth thpairs th2 - val _ = trace_msg (fn () => " isa th1 (pos): " ^ Display.string_of_thm ctxt i_th1) - val _ = trace_msg (fn () => " isa th2 (neg): " ^ Display.string_of_thm ctxt i_th2) - in - if is_TrueI i_th1 then i_th2 (*Trivial cases where one operand is type info*) - else if is_TrueI i_th2 then i_th1 - else - let - val i_atm = singleton (fol_terms_to_hol ctxt mode) (Metis.Term.Fn atm) - val _ = trace_msg (fn () => " atom: " ^ Syntax.string_of_term ctxt i_atm) - val prems_th1 = prems_of i_th1 - val prems_th2 = prems_of i_th2 - val index_th1 = get_index (mk_not i_atm) prems_th1 - handle Empty => error "Failed to find literal in th1" - val _ = trace_msg (fn () => " index_th1: " ^ Int.toString index_th1) - val index_th2 = get_index i_atm prems_th2 - handle Empty => error "Failed to find literal in th2" - val _ = trace_msg (fn () => " index_th2: " ^ Int.toString index_th2) - in resolve_inc_tyvars (Meson.select_literal index_th1 i_th1, index_th2, i_th2) end - end; - -(* INFERENCE RULE: REFL *) -val refl_x = cterm_of @{theory} (Var (hd (Term.add_vars (prop_of REFL_THM) []))); -val refl_idx = 1 + Thm.maxidx_of REFL_THM; - -fun refl_inf ctxt mode t = - let val thy = ProofContext.theory_of ctxt - val i_t = singleton (fol_terms_to_hol ctxt mode) t - val _ = trace_msg (fn () => " term: " ^ Syntax.string_of_term ctxt i_t) - val c_t = cterm_incr_types thy refl_idx i_t - in cterm_instantiate [(refl_x, c_t)] REFL_THM end; - -fun get_ty_arg_size _ (Const ("op =", _)) = 0 (*equality has no type arguments*) - | get_ty_arg_size thy (Const (c, _)) = (Res_Reconstruct.num_typargs thy c handle TYPE _ => 0) - | get_ty_arg_size _ _ = 0; - -(* INFERENCE RULE: EQUALITY *) -fun equality_inf ctxt mode (pos, atm) fp fr = - let val thy = ProofContext.theory_of ctxt - val m_tm = Metis.Term.Fn atm - val [i_atm,i_tm] = fol_terms_to_hol ctxt mode [m_tm, fr] - val _ = trace_msg (fn () => "sign of the literal: " ^ Bool.toString pos) - fun replace_item_list lx 0 (_::ls) = lx::ls - | replace_item_list lx i (l::ls) = l :: replace_item_list lx (i-1) ls - fun path_finder_FO tm [] = (tm, Term.Bound 0) - | path_finder_FO tm (p::ps) = - let val (tm1,args) = Term.strip_comb tm - val adjustment = get_ty_arg_size thy tm1 - val p' = if adjustment > p then p else p-adjustment - val tm_p = List.nth(args,p') - handle Subscript => error ("equality_inf: " ^ Int.toString p ^ " adj " ^ - Int.toString adjustment ^ " term " ^ Syntax.string_of_term ctxt tm) - val _ = trace_msg (fn () => "path_finder: " ^ Int.toString p ^ - " " ^ Syntax.string_of_term ctxt tm_p) - val (r,t) = path_finder_FO tm_p ps - in - (r, list_comb (tm1, replace_item_list t p' args)) - end - fun path_finder_HO tm [] = (tm, Term.Bound 0) - | path_finder_HO (t$u) (0::ps) = (fn(x,y) => (x, y$u)) (path_finder_HO t ps) - | path_finder_HO (t$u) (_::ps) = (fn(x,y) => (x, t$y)) (path_finder_HO u ps) - fun path_finder_FT tm [] _ = (tm, Term.Bound 0) - | path_finder_FT tm (0::ps) (Metis.Term.Fn ("ti", [t1, _])) = - path_finder_FT tm ps t1 - | path_finder_FT (t$u) (0::ps) (Metis.Term.Fn (".", [t1, _])) = - (fn(x,y) => (x, y$u)) (path_finder_FT t ps t1) - | path_finder_FT (t$u) (1::ps) (Metis.Term.Fn (".", [_, t2])) = - (fn(x,y) => (x, t$y)) (path_finder_FT u ps t2) - | path_finder_FT tm ps t = error ("equality_inf, path_finder_FT: path = " ^ - space_implode " " (map Int.toString ps) ^ - " isa-term: " ^ Syntax.string_of_term ctxt tm ^ - " fol-term: " ^ Metis.Term.toString t) - fun path_finder FO tm ps _ = path_finder_FO tm ps - | path_finder HO (tm as Const("op =",_) $ _ $ _) (p::ps) _ = - (*equality: not curried, as other predicates are*) - if p=0 then path_finder_HO tm (0::1::ps) (*select first operand*) - else path_finder_HO tm (p::ps) (*1 selects second operand*) - | path_finder HO tm (_ :: ps) (Metis.Term.Fn ("{}", [_])) = - path_finder_HO tm ps (*if not equality, ignore head to skip hBOOL*) - | path_finder FT (tm as Const("op =",_) $ _ $ _) (p::ps) - (Metis.Term.Fn ("=", [t1,t2])) = - (*equality: not curried, as other predicates are*) - if p=0 then path_finder_FT tm (0::1::ps) - (Metis.Term.Fn (".", [Metis.Term.Fn (".", [metis_eq,t1]), t2])) - (*select first operand*) - else path_finder_FT tm (p::ps) - (Metis.Term.Fn (".", [metis_eq,t2])) - (*1 selects second operand*) - | path_finder FT tm (_ :: ps) (Metis.Term.Fn ("{}", [t1])) = path_finder_FT tm ps t1 - (*if not equality, ignore head to skip the hBOOL predicate*) - | path_finder FT tm ps t = path_finder_FT tm ps t (*really an error case!*) - fun path_finder_lit ((nt as Term.Const ("Not", _)) $ tm_a) idx = - let val (tm, tm_rslt) = path_finder mode tm_a idx m_tm - in (tm, nt $ tm_rslt) end - | path_finder_lit tm_a idx = path_finder mode tm_a idx m_tm - val (tm_subst, body) = path_finder_lit i_atm fp - val tm_abs = Term.Abs("x", Term.type_of tm_subst, body) - val _ = trace_msg (fn () => "abstraction: " ^ Syntax.string_of_term ctxt tm_abs) - val _ = trace_msg (fn () => "i_tm: " ^ Syntax.string_of_term ctxt i_tm) - val _ = trace_msg (fn () => "located term: " ^ Syntax.string_of_term ctxt tm_subst) - val imax = maxidx_of_term (i_tm $ tm_abs $ tm_subst) (*ill typed but gives right max*) - val subst' = incr_indexes (imax+1) (if pos then subst_em else ssubst_em) - val _ = trace_msg (fn () => "subst' " ^ Display.string_of_thm ctxt subst') - val eq_terms = map (pairself (cterm_of thy)) - (ListPair.zip (OldTerm.term_vars (prop_of subst'), [tm_abs, tm_subst, i_tm])) - in cterm_instantiate eq_terms subst' end; - -val factor = Seq.hd o distinct_subgoals_tac; - -fun step _ _ thpairs (fol_th, Metis.Proof.Axiom _) = factor (axiom_inf thpairs fol_th) - | step ctxt mode _ (_, Metis.Proof.Assume f_atm) = assume_inf ctxt mode f_atm - | step ctxt mode thpairs (_, Metis.Proof.Subst (f_subst, f_th1)) = - factor (inst_inf ctxt mode thpairs f_subst f_th1) - | step ctxt mode thpairs (_, Metis.Proof.Resolve(f_atm, f_th1, f_th2)) = - factor (resolve_inf ctxt mode thpairs f_atm f_th1 f_th2) - | step ctxt mode _ (_, Metis.Proof.Refl f_tm) = refl_inf ctxt mode f_tm - | step ctxt mode _ (_, Metis.Proof.Equality (f_lit, f_p, f_r)) = - equality_inf ctxt mode f_lit f_p f_r; - -fun real_literal (_, (c, _)) = not (String.isPrefix Res_Clause.class_prefix c); - -fun translate _ _ thpairs [] = thpairs - | translate mode ctxt thpairs ((fol_th, inf) :: infpairs) = - let val _ = trace_msg (fn () => "=============================================") - val _ = trace_msg (fn () => "METIS THM: " ^ Metis.Thm.toString fol_th) - val _ = trace_msg (fn () => "INFERENCE: " ^ Metis.Proof.inferenceToString inf) - val th = Meson.flexflex_first_order (step ctxt mode thpairs (fol_th, inf)) - val _ = trace_msg (fn () => "ISABELLE THM: " ^ Display.string_of_thm ctxt th) - val _ = trace_msg (fn () => "=============================================") - val n_metis_lits = - length (filter real_literal (Metis.LiteralSet.toList (Metis.Thm.clause fol_th))) - in - if nprems_of th = n_metis_lits then () - else error "Metis: proof reconstruction has gone wrong"; - translate mode ctxt ((fol_th, th) :: thpairs) infpairs - end; - -(*Determining which axiom clauses are actually used*) -fun used_axioms axioms (th, Metis.Proof.Axiom _) = SOME (lookth axioms th) - | used_axioms _ _ = NONE; - -(* ------------------------------------------------------------------------- *) -(* Translation of HO Clauses *) -(* ------------------------------------------------------------------------- *) - -fun cnf_th thy th = hd (Res_Axioms.cnf_axiom thy th); - -val equal_imp_fequal' = cnf_th @{theory} @{thm equal_imp_fequal}; -val fequal_imp_equal' = cnf_th @{theory} @{thm fequal_imp_equal}; - -val comb_I = cnf_th @{theory} Res_HOL_Clause.comb_I; -val comb_K = cnf_th @{theory} Res_HOL_Clause.comb_K; -val comb_B = cnf_th @{theory} Res_HOL_Clause.comb_B; -val comb_C = cnf_th @{theory} Res_HOL_Clause.comb_C; -val comb_S = cnf_th @{theory} Res_HOL_Clause.comb_S; - -fun type_ext thy tms = - let val subs = Res_ATP.tfree_classes_of_terms tms - val supers = Res_ATP.tvar_classes_of_terms tms - and tycons = Res_ATP.type_consts_of_terms thy tms - val (supers', arity_clauses) = Res_Clause.make_arity_clauses thy tycons supers - val classrel_clauses = Res_Clause.make_classrel_clauses thy subs supers' - in map classrel_cls classrel_clauses @ map arity_cls arity_clauses - end; - -(* ------------------------------------------------------------------------- *) -(* Logic maps manage the interface between HOL and first-order logic. *) -(* ------------------------------------------------------------------------- *) - -type logic_map = - {axioms : (Metis.Thm.thm * thm) list, - tfrees : Res_Clause.type_literal list}; - -fun const_in_metis c (pred, tm_list) = - let - fun in_mterm (Metis.Term.Var _) = false - | in_mterm (Metis.Term.Fn (".", tm_list)) = exists in_mterm tm_list - | in_mterm (Metis.Term.Fn (nm, tm_list)) = c=nm orelse exists in_mterm tm_list - in c = pred orelse exists in_mterm tm_list end; - -(*Extract TFree constraints from context to include as conjecture clauses*) -fun init_tfrees ctxt = - let fun add ((a,i),s) Ts = if i = ~1 then TFree(a,s) :: Ts else Ts - in Res_Clause.add_typs (Vartab.fold add (#2 (Variable.constraints_of ctxt)) []) end; - -(*transform isabelle type / arity clause to metis clause *) -fun add_type_thm [] lmap = lmap - | add_type_thm ((ith, mth) :: cls) {axioms, tfrees} = - add_type_thm cls {axioms = (mth, ith) :: axioms, - tfrees = tfrees} - -(*Insert non-logical axioms corresponding to all accumulated TFrees*) -fun add_tfrees {axioms, tfrees} : logic_map = - {axioms = (map (fn tf => (metis_of_tfree tf, TrueI)) (distinct op= tfrees)) @ axioms, - tfrees = tfrees}; - -fun string_of_mode FO = "FO" - | string_of_mode HO = "HO" - | string_of_mode FT = "FT" - -(* Function to generate metis clauses, including comb and type clauses *) -fun build_map mode0 ctxt cls ths = - let val thy = ProofContext.theory_of ctxt - (*The modes FO and FT are sticky. HO can be downgraded to FO.*) - fun set_mode FO = FO - | set_mode HO = if forall (Meson.is_fol_term thy o prop_of) (cls@ths) then FO else HO - | set_mode FT = FT - val mode = set_mode mode0 - (*transform isabelle clause to metis clause *) - fun add_thm is_conjecture ith {axioms, tfrees} : logic_map = - let val (mth, tfree_lits) = hol_thm_to_fol is_conjecture ctxt mode ith - in - {axioms = (mth, Meson.make_meta_clause ith) :: axioms, - tfrees = union (op =) tfree_lits tfrees} - end; - val lmap0 = fold (add_thm true) cls {axioms = [], tfrees = init_tfrees ctxt} - val lmap = fold (add_thm false) ths (add_tfrees lmap0) - val clause_lists = map (Metis.Thm.clause o #1) (#axioms lmap) - fun used c = exists (Metis.LiteralSet.exists (const_in_metis c o #2)) clause_lists - (*Now check for the existence of certain combinators*) - val thI = if used "c_COMBI" then [comb_I] else [] - val thK = if used "c_COMBK" then [comb_K] else [] - val thB = if used "c_COMBB" then [comb_B] else [] - val thC = if used "c_COMBC" then [comb_C] else [] - val thS = if used "c_COMBS" then [comb_S] else [] - val thEQ = if used "c_fequal" then [fequal_imp_equal', equal_imp_fequal'] else [] - val lmap' = if mode=FO then lmap - else fold (add_thm false) (thEQ @ thS @ thC @ thB @ thK @ thI) lmap - in - (mode, add_type_thm (type_ext thy (map prop_of (cls @ ths))) lmap') - end; - -fun refute cls = - Metis.Resolution.loop (Metis.Resolution.new Metis.Resolution.default cls); - -fun is_false t = t aconv (HOLogic.mk_Trueprop HOLogic.false_const); - -fun common_thm ths1 ths2 = exists (member Thm.eq_thm ths1) (map Meson.make_meta_clause ths2); - -exception METIS of string; - -(* Main function to start metis prove and reconstruction *) -fun FOL_SOLVE mode ctxt cls ths0 = - let val thy = ProofContext.theory_of ctxt - val th_cls_pairs = map (fn th => (Thm.get_name_hint th, Res_Axioms.cnf_axiom thy th)) ths0 - val ths = maps #2 th_cls_pairs - val _ = trace_msg (fn () => "FOL_SOLVE: CONJECTURE CLAUSES") - val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) cls - val _ = trace_msg (fn () => "THEOREM CLAUSES") - val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) ths - val (mode, {axioms,tfrees}) = build_map mode ctxt cls ths - val _ = if null tfrees then () - else (trace_msg (fn () => "TFREE CLAUSES"); - app (fn tf => trace_msg (fn _ => Res_Clause.tptp_of_typeLit true tf)) tfrees) - val _ = trace_msg (fn () => "CLAUSES GIVEN TO METIS") - val thms = map #1 axioms - val _ = app (fn th => trace_msg (fn () => Metis.Thm.toString th)) thms - val _ = trace_msg (fn () => "mode = " ^ string_of_mode mode) - val _ = trace_msg (fn () => "START METIS PROVE PROCESS") - in - case filter (is_false o prop_of) cls of - false_th::_ => [false_th RS @{thm FalseE}] - | [] => - case refute thms of - Metis.Resolution.Contradiction mth => - let val _ = trace_msg (fn () => "METIS RECONSTRUCTION START: " ^ - Metis.Thm.toString mth) - val ctxt' = fold Variable.declare_constraints (map prop_of cls) ctxt - (*add constraints arising from converting goal to clause form*) - val proof = Metis.Proof.proof mth - val result = translate mode ctxt' axioms proof - and used = map_filter (used_axioms axioms) proof - val _ = trace_msg (fn () => "METIS COMPLETED...clauses actually used:") - val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) used - val unused = th_cls_pairs |> map_filter (fn (name, cls) => - if common_thm used cls then NONE else SOME name) - in - if null unused then () - else warning ("Metis: unused theorems " ^ commas_quote unused); - case result of - (_,ith)::_ => - (trace_msg (fn () => "success: " ^ Display.string_of_thm ctxt ith); - [ith]) - | _ => (trace_msg (fn () => "Metis: no result"); - []) - end - | Metis.Resolution.Satisfiable _ => - (trace_msg (fn () => "Metis: No first-order proof with the lemmas supplied"); - []) - end; - -fun metis_general_tac mode ctxt ths i st0 = - let val _ = trace_msg (fn () => - "Metis called with theorems " ^ cat_lines (map (Display.string_of_thm ctxt) ths)) - in - if exists_type Res_Axioms.type_has_topsort (prop_of st0) - then raise METIS "Metis: Proof state contains the universal sort {}" - else - (Meson.MESON Res_Axioms.neg_clausify - (fn cls => resolve_tac (FOL_SOLVE mode ctxt cls ths) 1) ctxt i - THEN Res_Axioms.expand_defs_tac st0) st0 - end - handle METIS s => (warning ("Metis: " ^ s); Seq.empty); - -val metis_tac = metis_general_tac HO; -val metisF_tac = metis_general_tac FO; -val metisFT_tac = metis_general_tac FT; - -fun method name mode comment = Method.setup name (Attrib.thms >> (fn ths => fn ctxt => - SIMPLE_METHOD' (CHANGED_PROP o metis_general_tac mode ctxt ths))) comment; - -val setup = - type_lits_setup #> - method @{binding metis} HO "METIS for FOL & HOL problems" #> - method @{binding metisF} FO "METIS for FOL problems" #> - method @{binding metisFT} FT "METIS with fully-typed translation" #> - Method.setup @{binding finish_clausify} - (Scan.succeed (K (SIMPLE_METHOD (Res_Axioms.expand_defs_tac refl)))) - "cleanup after conversion to clauses"; - -end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/res_atp.ML --- a/src/HOL/Tools/res_atp.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,559 +0,0 @@ -(* Title: HOL/Tools/res_atp.ML - Author: Jia Meng, Cambridge University Computer Laboratory, NICTA -*) - -signature RES_ATP = -sig - datatype mode = Auto | Fol | Hol - val tvar_classes_of_terms : term list -> string list - val tfree_classes_of_terms : term list -> string list - val type_consts_of_terms : theory -> term list -> string list - val get_relevant : int -> bool -> Proof.context * (thm list * 'a) -> thm list -> - (thm * (string * int)) list - val prepare_clauses : bool -> thm list -> thm list -> - (thm * (Res_HOL_Clause.axiom_name * Res_HOL_Clause.clause_id)) list -> - (thm * (Res_HOL_Clause.axiom_name * Res_HOL_Clause.clause_id)) list -> theory -> - Res_HOL_Clause.axiom_name vector * - (Res_HOL_Clause.clause list * Res_HOL_Clause.clause list * Res_HOL_Clause.clause list * - Res_HOL_Clause.clause list * Res_Clause.classrelClause list * Res_Clause.arityClause list) -end; - -structure Res_ATP: RES_ATP = -struct - - -(********************************************************************) -(* some settings for both background automatic ATP calling procedure*) -(* and also explicit ATP invocation methods *) -(********************************************************************) - -(*Translation mode can be auto-detected, or forced to be first-order or higher-order*) -datatype mode = Auto | Fol | Hol; - -val linkup_logic_mode = Auto; - -(*** background linkup ***) -val run_blacklist_filter = true; - -(*** relevance filter parameters ***) -val run_relevance_filter = true; -val pass_mark = 0.5; -val convergence = 3.2; (*Higher numbers allow longer inference chains*) -val follow_defs = false; (*Follow definitions. Makes problems bigger.*) - -(***************************************************************) -(* Relevance Filtering *) -(***************************************************************) - -fun strip_Trueprop (Const("Trueprop",_) $ t) = t - | strip_Trueprop t = t; - -(*A surprising number of theorems contain only a few significant constants. - These include all induction rules, and other general theorems. Filtering - theorems in clause form reveals these complexities in the form of Skolem - functions. If we were instead to filter theorems in their natural form, - some other method of measuring theorem complexity would become necessary.*) - -fun log_weight2 (x:real) = 1.0 + 2.0/Math.ln (x+1.0); - -(*The default seems best in practice. A constant function of one ignores - the constant frequencies.*) -val weight_fn = log_weight2; - - -(*Including equality in this list might be expected to stop rules like subset_antisym from - being chosen, but for some reason filtering works better with them listed. The - logical signs All, Ex, &, and --> are omitted because any remaining occurrrences - must be within comprehensions.*) -val standard_consts = ["Trueprop","==>","all","==","op |","Not","op ="]; - - -(*** constants with types ***) - -(*An abstraction of Isabelle types*) -datatype const_typ = CTVar | CType of string * const_typ list - -(*Is the second type an instance of the first one?*) -fun match_type (CType(con1,args1)) (CType(con2,args2)) = - con1=con2 andalso match_types args1 args2 - | match_type CTVar _ = true - | match_type _ CTVar = false -and match_types [] [] = true - | match_types (a1::as1) (a2::as2) = match_type a1 a2 andalso match_types as1 as2; - -(*Is there a unifiable constant?*) -fun uni_mem gctab (c,c_typ) = - case Symtab.lookup gctab c of - NONE => false - | SOME ctyps_list => exists (match_types c_typ) ctyps_list; - -(*Maps a "real" type to a const_typ*) -fun const_typ_of (Type (c,typs)) = CType (c, map const_typ_of typs) - | const_typ_of (TFree _) = CTVar - | const_typ_of (TVar _) = CTVar - -(*Pairs a constant with the list of its type instantiations (using const_typ)*) -fun const_with_typ thy (c,typ) = - let val tvars = Sign.const_typargs thy (c,typ) - in (c, map const_typ_of tvars) end - handle TYPE _ => (c,[]); (*Variable (locale constant): monomorphic*) - -(*Add a const/type pair to the table, but a [] entry means a standard connective, - which we ignore.*) -fun add_const_typ_table ((c,ctyps), tab) = - Symtab.map_default (c, [ctyps]) (fn [] => [] | ctyps_list => insert (op =) ctyps ctyps_list) - tab; - -(*Free variables are included, as well as constants, to handle locales*) -fun add_term_consts_typs_rm thy (Const(c, typ), tab) = - add_const_typ_table (const_with_typ thy (c,typ), tab) - | add_term_consts_typs_rm thy (Free(c, typ), tab) = - add_const_typ_table (const_with_typ thy (c,typ), tab) - | add_term_consts_typs_rm thy (t $ u, tab) = - add_term_consts_typs_rm thy (t, add_term_consts_typs_rm thy (u, tab)) - | add_term_consts_typs_rm thy (Abs(_,_,t), tab) = add_term_consts_typs_rm thy (t, tab) - | add_term_consts_typs_rm _ (_, tab) = tab; - -(*The empty list here indicates that the constant is being ignored*) -fun add_standard_const (s,tab) = Symtab.update (s,[]) tab; - -val null_const_tab : const_typ list list Symtab.table = - List.foldl add_standard_const Symtab.empty standard_consts; - -fun get_goal_consts_typs thy = List.foldl (add_term_consts_typs_rm thy) null_const_tab; - -(*Inserts a dummy "constant" referring to the theory name, so that relevance - takes the given theory into account.*) -fun const_prop_of theory_const th = - if theory_const then - let val name = Context.theory_name (theory_of_thm th) - val t = Const (name ^ ". 1", HOLogic.boolT) - in t $ prop_of th end - else prop_of th; - -(**** Constant / Type Frequencies ****) - -(*A two-dimensional symbol table counts frequencies of constants. It's keyed first by - constant name and second by its list of type instantiations. For the latter, we need - a linear ordering on type const_typ list.*) - -local - -fun cons_nr CTVar = 0 - | cons_nr (CType _) = 1; - -in - -fun const_typ_ord TU = - case TU of - (CType (a, Ts), CType (b, Us)) => - (case fast_string_ord(a,b) of EQUAL => dict_ord const_typ_ord (Ts,Us) | ord => ord) - | (T, U) => int_ord (cons_nr T, cons_nr U); - -end; - -structure CTtab = Table(type key = const_typ list val ord = dict_ord const_typ_ord); - -fun count_axiom_consts theory_const thy ((thm,_), tab) = - let fun count_const (a, T, tab) = - let val (c, cts) = const_with_typ thy (a,T) - in (*Two-dimensional table update. Constant maps to types maps to count.*) - Symtab.map_default (c, CTtab.empty) - (CTtab.map_default (cts,0) (fn n => n+1)) tab - end - fun count_term_consts (Const(a,T), tab) = count_const(a,T,tab) - | count_term_consts (Free(a,T), tab) = count_const(a,T,tab) - | count_term_consts (t $ u, tab) = - count_term_consts (t, count_term_consts (u, tab)) - | count_term_consts (Abs(_,_,t), tab) = count_term_consts (t, tab) - | count_term_consts (_, tab) = tab - in count_term_consts (const_prop_of theory_const thm, tab) end; - - -(**** Actual Filtering Code ****) - -(*The frequency of a constant is the sum of those of all instances of its type.*) -fun const_frequency ctab (c, cts) = - let val pairs = CTtab.dest (the (Symtab.lookup ctab c)) - fun add ((cts',m), n) = if match_types cts cts' then m+n else n - in List.foldl add 0 pairs end; - -(*Add in a constant's weight, as determined by its frequency.*) -fun add_ct_weight ctab ((c,T), w) = - w + weight_fn (real (const_frequency ctab (c,T))); - -(*Relevant constants are weighted according to frequency, - but irrelevant constants are simply counted. Otherwise, Skolem functions, - which are rare, would harm a clause's chances of being picked.*) -fun clause_weight ctab gctyps consts_typs = - let val rel = filter (uni_mem gctyps) consts_typs - val rel_weight = List.foldl (add_ct_weight ctab) 0.0 rel - in - rel_weight / (rel_weight + real (length consts_typs - length rel)) - end; - -(*Multiplies out to a list of pairs: 'a * 'b list -> ('a * 'b) list -> ('a * 'b) list*) -fun add_expand_pairs (x,ys) xys = List.foldl (fn (y,acc) => (x,y)::acc) xys ys; - -fun consts_typs_of_term thy t = - let val tab = add_term_consts_typs_rm thy (t, null_const_tab) - in Symtab.fold add_expand_pairs tab [] end; - -fun pair_consts_typs_axiom theory_const thy (thm,name) = - ((thm,name), (consts_typs_of_term thy (const_prop_of theory_const thm))); - -exception ConstFree; -fun dest_ConstFree (Const aT) = aT - | dest_ConstFree (Free aT) = aT - | dest_ConstFree _ = raise ConstFree; - -(*Look for definitions of the form f ?x1 ... ?xn = t, but not reversed.*) -fun defines thy thm gctypes = - let val tm = prop_of thm - fun defs lhs rhs = - let val (rator,args) = strip_comb lhs - val ct = const_with_typ thy (dest_ConstFree rator) - in - forall is_Var args andalso uni_mem gctypes ct andalso - subset (op =) (Term.add_vars rhs [], Term.add_vars lhs []) - end - handle ConstFree => false - in - case tm of Const ("Trueprop",_) $ (Const("op =",_) $ lhs $ rhs) => - defs lhs rhs - | _ => false - end; - -type annotd_cls = (thm * (string * int)) * ((string * const_typ list) list); - -(*For a reverse sort, putting the largest values first.*) -fun compare_pairs ((_,w1),(_,w2)) = Real.compare (w2,w1); - -(*Limit the number of new clauses, to prevent runaway acceptance.*) -fun take_best max_new (newpairs : (annotd_cls*real) list) = - let val nnew = length newpairs - in - if nnew <= max_new then (map #1 newpairs, []) - else - let val cls = sort compare_pairs newpairs - val accepted = List.take (cls, max_new) - in - Res_Axioms.trace_msg (fn () => ("Number of candidates, " ^ Int.toString nnew ^ - ", exceeds the limit of " ^ Int.toString (max_new))); - Res_Axioms.trace_msg (fn () => ("Effective pass mark: " ^ Real.toString (#2 (List.last accepted)))); - Res_Axioms.trace_msg (fn () => "Actually passed: " ^ - space_implode ", " (map (fn (((_,(name,_)),_),_) => name) accepted)); - - (map #1 accepted, map #1 (List.drop (cls, max_new))) - end - end; - -fun relevant_clauses max_new thy ctab p rel_consts = - let fun relevant ([],_) [] = [] : (thm * (string * int)) list (*Nothing added this iteration*) - | relevant (newpairs,rejects) [] = - let val (newrels,more_rejects) = take_best max_new newpairs - val new_consts = maps #2 newrels - val rel_consts' = List.foldl add_const_typ_table rel_consts new_consts - val newp = p + (1.0-p) / convergence - in - Res_Axioms.trace_msg (fn () => "relevant this iteration: " ^ Int.toString (length newrels)); - (map #1 newrels) @ - (relevant_clauses max_new thy ctab newp rel_consts' (more_rejects@rejects)) - end - | relevant (newrels,rejects) ((ax as (clsthm as (_,(name,n)),consts_typs)) :: axs) = - let val weight = clause_weight ctab rel_consts consts_typs - in - if p <= weight orelse (follow_defs andalso defines thy (#1 clsthm) rel_consts) - then (Res_Axioms.trace_msg (fn () => (name ^ " clause " ^ Int.toString n ^ - " passes: " ^ Real.toString weight)); - relevant ((ax,weight)::newrels, rejects) axs) - else relevant (newrels, ax::rejects) axs - end - in Res_Axioms.trace_msg (fn () => ("relevant_clauses, current pass mark = " ^ Real.toString p)); - relevant ([],[]) - end; - -fun relevance_filter max_new theory_const thy axioms goals = - if run_relevance_filter andalso pass_mark >= 0.1 - then - let val const_tab = List.foldl (count_axiom_consts theory_const thy) Symtab.empty axioms - val goal_const_tab = get_goal_consts_typs thy goals - val _ = Res_Axioms.trace_msg (fn () => ("Initial constants: " ^ - space_implode ", " (Symtab.keys goal_const_tab))); - val rels = relevant_clauses max_new thy const_tab (pass_mark) - goal_const_tab (map (pair_consts_typs_axiom theory_const thy) axioms) - in - Res_Axioms.trace_msg (fn () => ("Total relevant: " ^ Int.toString (length rels))); - rels - end - else axioms; - -(***************************************************************) -(* Retrieving and filtering lemmas *) -(***************************************************************) - -(*** retrieve lemmas and filter them ***) - -(*Hashing to detect duplicate and variant clauses, e.g. from the [iff] attribute*) - -fun setinsert (x,s) = Symtab.update (x,()) s; - -(*Reject theorems with names like "List.filter.filter_list_def" or - "Accessible_Part.acc.defs", as these are definitions arising from packages.*) -fun is_package_def a = - let val names = Long_Name.explode a - in - length names > 2 andalso - not (hd names = "local") andalso - String.isSuffix "_def" a orelse String.isSuffix "_defs" a - end; - -(** a hash function from Term.term to int, and also a hash table **) -val xor_words = List.foldl Word.xorb 0w0; - -fun hashw_term ((Const(c,_)), w) = Polyhash.hashw_string (c,w) - | hashw_term ((Free(a,_)), w) = Polyhash.hashw_string (a,w) - | hashw_term ((Var(_,_)), w) = w - | hashw_term ((Bound i), w) = Polyhash.hashw_int(i,w) - | hashw_term ((Abs(_,_,t)), w) = hashw_term (t, w) - | hashw_term ((P$Q), w) = hashw_term (Q, (hashw_term (P, w))); - -fun hash_literal (Const("Not",_)$P) = Word.notb(hashw_term(P,0w0)) - | hash_literal P = hashw_term(P,0w0); - -fun hash_term t = Word.toIntX (xor_words (map hash_literal (HOLogic.disjuncts (strip_Trueprop t)))); - -fun equal_thm (thm1,thm2) = Term.aconv(prop_of thm1, prop_of thm2); - -exception HASH_CLAUSE; - -(*Create a hash table for clauses, of the given size*) -fun mk_clause_table n = - Polyhash.mkTable (hash_term o prop_of, equal_thm) - (n, HASH_CLAUSE); - -(*Use a hash table to eliminate duplicates from xs. Argument is a list of - (thm * (string * int)) tuples. The theorems are hashed into the table. *) -fun make_unique xs = - let val ht = mk_clause_table 7000 - in - Res_Axioms.trace_msg (fn () => ("make_unique gets " ^ Int.toString (length xs) ^ " clauses")); - app (ignore o Polyhash.peekInsert ht) xs; - Polyhash.listItems ht - end; - -(*Remove existing axiom clauses from the conjecture clauses, as this can dramatically - boost an ATP's performance (for some reason)*) -fun subtract_cls c_clauses ax_clauses = - let val ht = mk_clause_table 2200 - fun known x = is_some (Polyhash.peek ht x) - in - app (ignore o Polyhash.peekInsert ht) ax_clauses; - filter (not o known) c_clauses - end; - -fun all_valid_thms ctxt = - let - val global_facts = PureThy.facts_of (ProofContext.theory_of ctxt); - val local_facts = ProofContext.facts_of ctxt; - val full_space = - Name_Space.merge (Facts.space_of global_facts, Facts.space_of local_facts); - - fun valid_facts facts = - (facts, []) |-> Facts.fold_static (fn (name, ths0) => - let - fun check_thms a = - (case try (ProofContext.get_thms ctxt) a of - NONE => false - | SOME ths1 => Thm.eq_thms (ths0, ths1)); - - val name1 = Facts.extern facts name; - val name2 = Name_Space.extern full_space name; - val ths = filter_out Res_Axioms.bad_for_atp ths0; - in - if Facts.is_concealed facts name orelse null ths orelse - run_blacklist_filter andalso is_package_def name then I - else - (case find_first check_thms [name1, name2, name] of - NONE => I - | SOME a => cons (a, ths)) - end); - in valid_facts global_facts @ valid_facts local_facts end; - -fun multi_name a th (n, pairs) = - (n + 1, (a ^ "(" ^ Int.toString n ^ ")", th) :: pairs); - -fun add_single_names (a, []) pairs = pairs - | add_single_names (a, [th]) pairs = (a, th) :: pairs - | add_single_names (a, ths) pairs = #2 (fold (multi_name a) ths (1, pairs)); - -(*Ignore blacklisted basenames*) -fun add_multi_names (a, ths) pairs = - if (Long_Name.base_name a) mem_string Res_Axioms.multi_base_blacklist then pairs - else add_single_names (a, ths) pairs; - -fun is_multi (a, ths) = length ths > 1 orelse String.isSuffix ".axioms" a; - -(*The single theorems go BEFORE the multiple ones. Blacklist is applied to all.*) -fun name_thm_pairs ctxt = - let - val (mults, singles) = List.partition is_multi (all_valid_thms ctxt) - fun blacklisted (_, th) = - run_blacklist_filter andalso Res_Blacklist.blacklisted ctxt th - in - filter_out blacklisted - (fold add_single_names singles (fold add_multi_names mults [])) - end; - -fun check_named ("", th) = - (warning ("No name for theorem " ^ Display.string_of_thm_without_context th); false) - | check_named _ = true; - -fun get_all_lemmas ctxt = - let val included_thms = - tap (fn ths => Res_Axioms.trace_msg - (fn () => ("Including all " ^ Int.toString (length ths) ^ " theorems"))) - (name_thm_pairs ctxt) - in - filter check_named included_thms - end; - -(***************************************************************) -(* Type Classes Present in the Axiom or Conjecture Clauses *) -(***************************************************************) - -fun add_classes (sorts, cset) = List.foldl setinsert cset (flat sorts); - -(*Remove this trivial type class*) -fun delete_type cset = Symtab.delete_safe "HOL.type" cset; - -fun tvar_classes_of_terms ts = - let val sorts_list = map (map #2 o OldTerm.term_tvars) ts - in Symtab.keys (delete_type (List.foldl add_classes Symtab.empty sorts_list)) end; - -fun tfree_classes_of_terms ts = - let val sorts_list = map (map #2 o OldTerm.term_tfrees) ts - in Symtab.keys (delete_type (List.foldl add_classes Symtab.empty sorts_list)) end; - -(*fold type constructors*) -fun fold_type_consts f (Type (a, Ts)) x = fold (fold_type_consts f) Ts (f (a,x)) - | fold_type_consts _ _ x = x; - -val add_type_consts_in_type = fold_type_consts setinsert; - -(*Type constructors used to instantiate overloaded constants are the only ones needed.*) -fun add_type_consts_in_term thy = - let val const_typargs = Sign.const_typargs thy - fun add_tcs (Const cT) x = fold add_type_consts_in_type (const_typargs cT) x - | add_tcs (Abs (_, _, u)) x = add_tcs u x - | add_tcs (t $ u) x = add_tcs t (add_tcs u x) - | add_tcs _ x = x - in add_tcs end - -fun type_consts_of_terms thy ts = - Symtab.keys (fold (add_type_consts_in_term thy) ts Symtab.empty); - - -(***************************************************************) -(* ATP invocation methods setup *) -(***************************************************************) - -(*Ensures that no higher-order theorems "leak out"*) -fun restrict_to_logic thy true cls = filter (Meson.is_fol_term thy o prop_of o fst) cls - | restrict_to_logic thy false cls = cls; - -(**** Predicates to detect unwanted clauses (prolific or likely to cause unsoundness) ****) - -(** Too general means, positive equality literal with a variable X as one operand, - when X does not occur properly in the other operand. This rules out clearly - inconsistent clauses such as V=a|V=b, though it by no means guarantees soundness. **) - -fun occurs ix = - let fun occ(Var (jx,_)) = (ix=jx) - | occ(t1$t2) = occ t1 orelse occ t2 - | occ(Abs(_,_,t)) = occ t - | occ _ = false - in occ end; - -fun is_recordtype T = not (null (Record.dest_recTs T)); - -(*Unwanted equalities include - (1) those between a variable that does not properly occur in the second operand, - (2) those between a variable and a record, since these seem to be prolific "cases" thms -*) -fun too_general_eqterms (Var (ix,T), t) = not (occurs ix t) orelse is_recordtype T - | too_general_eqterms _ = false; - -fun too_general_equality (Const ("op =", _) $ x $ y) = - too_general_eqterms (x,y) orelse too_general_eqterms(y,x) - | too_general_equality _ = false; - -(* tautologous? *) -fun is_taut (Const ("Trueprop", _) $ Const ("True", _)) = true - | is_taut _ = false; - -fun has_typed_var tycons = exists_subterm - (fn Var (_, Type (a, _)) => member (op =) tycons a | _ => false); - -(*Clauses are forbidden to contain variables of these types. The typical reason is that - they lead to unsoundness. Note that "unit" satisfies numerous equations like ?X=(). - The resulting clause will have no type constraint, yielding false proofs. Even "bool" - leads to many unsound proofs, though (obviously) only for higher-order problems.*) -val unwanted_types = ["Product_Type.unit","bool"]; - -fun unwanted t = - is_taut t orelse has_typed_var unwanted_types t orelse - forall too_general_equality (HOLogic.disjuncts (strip_Trueprop t)); - -(*Clauses containing variables of type "unit" or "bool" are unlikely to be useful and - likely to lead to unsound proofs.*) -fun remove_unwanted_clauses cls = filter (not o unwanted o prop_of o fst) cls; - -fun isFO thy goal_cls = case linkup_logic_mode of - Auto => forall (Meson.is_fol_term thy) (map prop_of goal_cls) - | Fol => true - | Hol => false - -fun get_relevant max_new theory_const (ctxt, (chain_ths, th)) goal_cls = - let - val thy = ProofContext.theory_of ctxt - val isFO = isFO thy goal_cls - val included_cls = get_all_lemmas ctxt - |> Res_Axioms.cnf_rules_pairs thy |> make_unique - |> restrict_to_logic thy isFO - |> remove_unwanted_clauses - in - relevance_filter max_new theory_const thy included_cls (map prop_of goal_cls) - end; - -(* prepare for passing to writer, - create additional clauses based on the information from extra_cls *) -fun prepare_clauses dfg goal_cls chain_ths axcls extra_cls thy = - let - (* add chain thms *) - val chain_cls = - Res_Axioms.cnf_rules_pairs thy (filter check_named (map Res_Axioms.pairname chain_ths)) - val axcls = chain_cls @ axcls - val extra_cls = chain_cls @ extra_cls - val isFO = isFO thy goal_cls - val ccls = subtract_cls goal_cls extra_cls - val _ = app (fn th => Res_Axioms.trace_msg (fn _ => Display.string_of_thm_global thy th)) ccls - val ccltms = map prop_of ccls - and axtms = map (prop_of o #1) extra_cls - val subs = tfree_classes_of_terms ccltms - and supers = tvar_classes_of_terms axtms - and tycons = type_consts_of_terms thy (ccltms@axtms) - (*TFrees in conjecture clauses; TVars in axiom clauses*) - val conjectures = Res_HOL_Clause.make_conjecture_clauses dfg thy ccls - val (_, extra_clauses) = ListPair.unzip (Res_HOL_Clause.make_axiom_clauses dfg thy extra_cls) - val (clnames,axiom_clauses) = ListPair.unzip (Res_HOL_Clause.make_axiom_clauses dfg thy axcls) - val helper_clauses = Res_HOL_Clause.get_helper_clauses dfg thy isFO (conjectures, extra_cls, []) - val (supers',arity_clauses) = Res_Clause.make_arity_clauses_dfg dfg thy tycons supers - val classrel_clauses = Res_Clause.make_classrel_clauses thy subs supers' - in - (Vector.fromList clnames, - (conjectures, axiom_clauses, extra_clauses, helper_clauses, classrel_clauses, arity_clauses)) - end - -end; - diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/res_axioms.ML --- a/src/HOL/Tools/res_axioms.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,545 +0,0 @@ -(* Title: HOL/Tools/res_axioms.ML - Author: Jia Meng, Cambridge University Computer Laboratory - -Transformation of axiom rules (elim/intro/etc) into CNF forms. -*) - -signature RES_AXIOMS = -sig - val trace: bool Unsynchronized.ref - val trace_msg: (unit -> string) -> unit - val cnf_axiom: theory -> thm -> thm list - val pairname: thm -> string * thm - val multi_base_blacklist: string list - val bad_for_atp: thm -> bool - val type_has_topsort: typ -> bool - val cnf_rules_pairs: theory -> (string * thm) list -> (thm * (string * int)) list - val neg_clausify: thm list -> thm list - val expand_defs_tac: thm -> tactic - val combinators: thm -> thm - val neg_conjecture_clauses: Proof.context -> thm -> int -> thm list * (string * typ) list - val suppress_endtheory: bool Unsynchronized.ref - (*for emergency use where endtheory causes problems*) - val setup: theory -> theory -end; - -structure Res_Axioms: RES_AXIOMS = -struct - -val trace = Unsynchronized.ref false; -fun trace_msg msg = if ! trace then tracing (msg ()) else (); - -fun freeze_thm th = #1 (Drule.legacy_freeze_thaw th); - -val type_has_topsort = Term.exists_subtype - (fn TFree (_, []) => true - | TVar (_, []) => true - | _ => false); - - -(**** Transformation of Elimination Rules into First-Order Formulas****) - -val cfalse = cterm_of @{theory HOL} HOLogic.false_const; -val ctp_false = cterm_of @{theory HOL} (HOLogic.mk_Trueprop HOLogic.false_const); - -(*Converts an elim-rule into an equivalent theorem that does not have the - predicate variable. Leaves other theorems unchanged. We simply instantiate the - conclusion variable to False.*) -fun transform_elim th = - case concl_of th of (*conclusion variable*) - Const("Trueprop",_) $ (v as Var(_,Type("bool",[]))) => - Thm.instantiate ([], [(cterm_of @{theory HOL} v, cfalse)]) th - | v as Var(_, Type("prop",[])) => - Thm.instantiate ([], [(cterm_of @{theory HOL} v, ctp_false)]) th - | _ => th; - -(*To enforce single-threading*) -exception Clausify_failure of theory; - - -(**** SKOLEMIZATION BY INFERENCE (lcp) ****) - -fun rhs_extra_types lhsT rhs = - let val lhs_vars = Term.add_tfreesT lhsT [] - fun add_new_TFrees (TFree v) = - if member (op =) lhs_vars v then I else insert (op =) (TFree v) - | add_new_TFrees _ = I - val rhs_consts = fold_aterms (fn Const c => insert (op =) c | _ => I) rhs [] - in fold (#2 #> Term.fold_atyps add_new_TFrees) rhs_consts [] end; - -(*Traverse a theorem, declaring Skolem function definitions. String s is the suggested - prefix for the Skolem constant.*) -fun declare_skofuns s th = - let - val nref = Unsynchronized.ref 0 (* FIXME ??? *) - fun dec_sko (Const ("Ex",_) $ (xtp as Abs (_, T, p))) (axs, thy) = - (*Existential: declare a Skolem function, then insert into body and continue*) - let - val cname = "sko_" ^ s ^ "_" ^ Int.toString (Unsynchronized.inc nref) - val args0 = OldTerm.term_frees xtp (*get the formal parameter list*) - val Ts = map type_of args0 - val extraTs = rhs_extra_types (Ts ---> T) xtp - val argsx = map (fn T => Free (gensym "vsk", T)) extraTs - val args = argsx @ args0 - val cT = extraTs ---> Ts ---> T - val rhs = list_abs_free (map dest_Free args, HOLogic.choice_const T $ xtp) - (*Forms a lambda-abstraction over the formal parameters*) - val (c, thy') = - Sign.declare_const ((Binding.conceal (Binding.name cname), cT), NoSyn) thy - val cdef = cname ^ "_def" - val thy'' = - Theory.add_defs_i true false [(Binding.name cdef, Logic.mk_equals (c, rhs))] thy' - val ax = Thm.axiom thy'' (Sign.full_bname thy'' cdef) - in dec_sko (subst_bound (list_comb (c, args), p)) (ax :: axs, thy'') end - | dec_sko (Const ("All", _) $ (Abs (a, T, p))) thx = - (*Universal quant: insert a free variable into body and continue*) - let val fname = Name.variant (OldTerm.add_term_names (p, [])) a - in dec_sko (subst_bound (Free (fname, T), p)) thx end - | dec_sko (Const ("op &", _) $ p $ q) thx = dec_sko q (dec_sko p thx) - | dec_sko (Const ("op |", _) $ p $ q) thx = dec_sko q (dec_sko p thx) - | dec_sko (Const ("Trueprop", _) $ p) thx = dec_sko p thx - | dec_sko t thx = thx (*Do nothing otherwise*) - in fn thy => dec_sko (Thm.prop_of th) ([], thy) end; - -(*Traverse a theorem, accumulating Skolem function definitions.*) -fun assume_skofuns s th = - let val sko_count = Unsynchronized.ref 0 (* FIXME ??? *) - fun dec_sko (Const ("Ex",_) $ (xtp as Abs(_,T,p))) defs = - (*Existential: declare a Skolem function, then insert into body and continue*) - let val skos = map (#1 o Logic.dest_equals) defs (*existing sko fns*) - val args = subtract (op =) skos (OldTerm.term_frees xtp) (*the formal parameters*) - val Ts = map type_of args - val cT = Ts ---> T - val id = "sko_" ^ s ^ "_" ^ Int.toString (Unsynchronized.inc sko_count) - val c = Free (id, cT) - val rhs = list_abs_free (map dest_Free args, - HOLogic.choice_const T $ xtp) - (*Forms a lambda-abstraction over the formal parameters*) - val def = Logic.mk_equals (c, rhs) - in dec_sko (subst_bound (list_comb(c,args), p)) - (def :: defs) - end - | dec_sko (Const ("All",_) $ Abs (a, T, p)) defs = - (*Universal quant: insert a free variable into body and continue*) - let val fname = Name.variant (OldTerm.add_term_names (p,[])) a - in dec_sko (subst_bound (Free(fname,T), p)) defs end - | dec_sko (Const ("op &", _) $ p $ q) defs = dec_sko q (dec_sko p defs) - | dec_sko (Const ("op |", _) $ p $ q) defs = dec_sko q (dec_sko p defs) - | dec_sko (Const ("Trueprop", _) $ p) defs = dec_sko p defs - | dec_sko t defs = defs (*Do nothing otherwise*) - in dec_sko (prop_of th) [] end; - - -(**** REPLACING ABSTRACTIONS BY COMBINATORS ****) - -(*Returns the vars of a theorem*) -fun vars_of_thm th = - map (Thm.cterm_of (theory_of_thm th) o Var) (Thm.fold_terms Term.add_vars th []); - -(*Make a version of fun_cong with a given variable name*) -local - val fun_cong' = fun_cong RS asm_rl; (*renumber f, g to prevent clashes with (a,0)*) - val cx = hd (vars_of_thm fun_cong'); - val ty = typ_of (ctyp_of_term cx); - val thy = theory_of_thm fun_cong; - fun mkvar a = cterm_of thy (Var((a,0),ty)); -in -fun xfun_cong x = Thm.instantiate ([], [(cx, mkvar x)]) fun_cong' -end; - -(*Removes the lambdas from an equation of the form t = (%x. u). A non-negative n, - serves as an upper bound on how many to remove.*) -fun strip_lambdas 0 th = th - | strip_lambdas n th = - case prop_of th of - _ $ (Const ("op =", _) $ _ $ Abs (x,_,_)) => - strip_lambdas (n-1) (freeze_thm (th RS xfun_cong x)) - | _ => th; - -val lambda_free = not o Term.has_abs; - -val [f_B,g_B] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_B})); -val [g_C,f_C] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_C})); -val [f_S,g_S] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_S})); - -(*FIXME: requires more use of cterm constructors*) -fun abstract ct = - let - val thy = theory_of_cterm ct - val Abs(x,_,body) = term_of ct - val Type("fun",[xT,bodyT]) = typ_of (ctyp_of_term ct) - val cxT = ctyp_of thy xT and cbodyT = ctyp_of thy bodyT - fun makeK() = instantiate' [SOME cxT, SOME cbodyT] [SOME (cterm_of thy body)] @{thm abs_K} - in - case body of - Const _ => makeK() - | Free _ => makeK() - | Var _ => makeK() (*though Var isn't expected*) - | Bound 0 => instantiate' [SOME cxT] [] @{thm abs_I} (*identity: I*) - | rator$rand => - if loose_bvar1 (rator,0) then (*C or S*) - if loose_bvar1 (rand,0) then (*S*) - let val crator = cterm_of thy (Abs(x,xT,rator)) - val crand = cterm_of thy (Abs(x,xT,rand)) - val abs_S' = cterm_instantiate [(f_S,crator),(g_S,crand)] @{thm abs_S} - val (_,rhs) = Thm.dest_equals (cprop_of abs_S') - in - Thm.transitive abs_S' (Conv.binop_conv abstract rhs) - end - else (*C*) - let val crator = cterm_of thy (Abs(x,xT,rator)) - val abs_C' = cterm_instantiate [(f_C,crator),(g_C,cterm_of thy rand)] @{thm abs_C} - val (_,rhs) = Thm.dest_equals (cprop_of abs_C') - in - Thm.transitive abs_C' (Conv.fun_conv (Conv.arg_conv abstract) rhs) - end - else if loose_bvar1 (rand,0) then (*B or eta*) - if rand = Bound 0 then eta_conversion ct - else (*B*) - let val crand = cterm_of thy (Abs(x,xT,rand)) - val crator = cterm_of thy rator - val abs_B' = cterm_instantiate [(f_B,crator),(g_B,crand)] @{thm abs_B} - val (_,rhs) = Thm.dest_equals (cprop_of abs_B') - in - Thm.transitive abs_B' (Conv.arg_conv abstract rhs) - end - else makeK() - | _ => error "abstract: Bad term" - end; - -(*Traverse a theorem, declaring abstraction function definitions. String s is the suggested - prefix for the constants.*) -fun combinators_aux ct = - if lambda_free (term_of ct) then reflexive ct - else - case term_of ct of - Abs _ => - let val (cv, cta) = Thm.dest_abs NONE ct - val (v, _) = dest_Free (term_of cv) - val u_th = combinators_aux cta - val cu = Thm.rhs_of u_th - val comb_eq = abstract (Thm.cabs cv cu) - in transitive (abstract_rule v cv u_th) comb_eq end - | _ $ _ => - let val (ct1, ct2) = Thm.dest_comb ct - in combination (combinators_aux ct1) (combinators_aux ct2) end; - -fun combinators th = - if lambda_free (prop_of th) then th - else - let val th = Drule.eta_contraction_rule th - val eqth = combinators_aux (cprop_of th) - in equal_elim eqth th end - handle THM (msg,_,_) => - (warning (cat_lines - ["Error in the combinator translation of " ^ Display.string_of_thm_without_context th, - " Exception message: " ^ msg]); - TrueI); (*A type variable of sort {} will cause make abstraction fail.*) - -(*cterms are used throughout for efficiency*) -val cTrueprop = Thm.cterm_of @{theory HOL} HOLogic.Trueprop; - -(*cterm version of mk_cTrueprop*) -fun c_mkTrueprop A = Thm.capply cTrueprop A; - -(*Given an abstraction over n variables, replace the bound variables by free - ones. Return the body, along with the list of free variables.*) -fun c_variant_abs_multi (ct0, vars) = - let val (cv,ct) = Thm.dest_abs NONE ct0 - in c_variant_abs_multi (ct, cv::vars) end - handle CTERM _ => (ct0, rev vars); - -(*Given the definition of a Skolem function, return a theorem to replace - an existential formula by a use of that function. - Example: "EX x. x : A & x ~: B ==> sko A B : A & sko A B ~: B" [.] *) -fun skolem_of_def def = - let val (c,rhs) = Thm.dest_equals (cprop_of (freeze_thm def)) - val (ch, frees) = c_variant_abs_multi (rhs, []) - val (chilbert,cabs) = Thm.dest_comb ch - val thy = Thm.theory_of_cterm chilbert - val t = Thm.term_of chilbert - val T = case t of Const ("Hilbert_Choice.Eps", Type("fun",[_,T])) => T - | _ => raise THM ("skolem_of_def: expected Eps", 0, [def]) - val cex = Thm.cterm_of thy (HOLogic.exists_const T) - val ex_tm = c_mkTrueprop (Thm.capply cex cabs) - and conc = c_mkTrueprop (Drule.beta_conv cabs (Drule.list_comb(c,frees))); - fun tacf [prem] = rewrite_goals_tac [def] THEN rtac (prem RS @{thm someI_ex}) 1 - in Goal.prove_internal [ex_tm] conc tacf - |> forall_intr_list frees - |> Thm.forall_elim_vars 0 (*Introduce Vars, but don't discharge defs.*) - |> Thm.varifyT - end; - - -(*Converts an Isabelle theorem (intro, elim or simp format, even higher-order) into NNF.*) -fun to_nnf th ctxt0 = - let val th1 = th |> transform_elim |> zero_var_indexes - val ((_, [th2]), ctxt) = Variable.import true [th1] ctxt0 - val th3 = th2 - |> Conv.fconv_rule Object_Logic.atomize - |> Meson.make_nnf ctxt |> strip_lambdas ~1 - in (th3, ctxt) end; - -(*Generate Skolem functions for a theorem supplied in nnf*) -fun assume_skolem_of_def s th = - map (skolem_of_def o assume o (cterm_of (theory_of_thm th))) (assume_skofuns s th); - - -(*** Blacklisting (duplicated in Res_ATP?) ***) - -val max_lambda_nesting = 3; - -fun excessive_lambdas (f$t, k) = excessive_lambdas (f,k) orelse excessive_lambdas (t,k) - | excessive_lambdas (Abs(_,_,t), k) = k=0 orelse excessive_lambdas (t,k-1) - | excessive_lambdas _ = false; - -fun is_formula_type T = (T = HOLogic.boolT orelse T = propT); - -(*Don't count nested lambdas at the level of formulas, as they are quantifiers*) -fun excessive_lambdas_fm Ts (Abs(_,T,t)) = excessive_lambdas_fm (T::Ts) t - | excessive_lambdas_fm Ts t = - if is_formula_type (fastype_of1 (Ts, t)) - then exists (excessive_lambdas_fm Ts) (#2 (strip_comb t)) - else excessive_lambdas (t, max_lambda_nesting); - -(*The max apply_depth of any metis call in Metis_Examples (on 31-10-2007) was 11.*) -val max_apply_depth = 15; - -fun apply_depth (f$t) = Int.max (apply_depth f, apply_depth t + 1) - | apply_depth (Abs(_,_,t)) = apply_depth t - | apply_depth _ = 0; - -fun too_complex t = - apply_depth t > max_apply_depth orelse - Meson.too_many_clauses NONE t orelse - excessive_lambdas_fm [] t; - -fun is_strange_thm th = - case head_of (concl_of th) of - Const (a, _) => (a <> "Trueprop" andalso a <> "==") - | _ => false; - -fun bad_for_atp th = - too_complex (prop_of th) - orelse exists_type type_has_topsort (prop_of th) - orelse is_strange_thm th; - -val multi_base_blacklist = - ["defs","select_defs","update_defs","induct","inducts","split","splits","split_asm", - "cases","ext_cases"]; (* FIXME put other record thms here, or declare as "noatp" *) - -(*Keep the full complexity of the original name*) -fun flatten_name s = space_implode "_X" (Long_Name.explode s); - -fun fake_name th = - if Thm.has_name_hint th then flatten_name (Thm.get_name_hint th) - else gensym "unknown_thm_"; - -(*Skolemize a named theorem, with Skolem functions as additional premises.*) -fun skolem_thm (s, th) = - if member (op =) multi_base_blacklist (Long_Name.base_name s) orelse bad_for_atp th then [] - else - let - val ctxt0 = Variable.thm_context th - val (nnfth, ctxt1) = to_nnf th ctxt0 - val (cnfs, ctxt2) = Meson.make_cnf (assume_skolem_of_def s nnfth) nnfth ctxt1 - in cnfs |> map combinators |> Variable.export ctxt2 ctxt0 |> Meson.finish_cnf end - handle THM _ => []; - -(*The cache prevents repeated clausification of a theorem, and also repeated declaration of - Skolem functions.*) -structure ThmCache = Theory_Data -( - type T = thm list Thmtab.table * unit Symtab.table; - val empty = (Thmtab.empty, Symtab.empty); - val extend = I; - fun merge ((cache1, seen1), (cache2, seen2)) : T = - (Thmtab.merge (K true) (cache1, cache2), Symtab.merge (K true) (seen1, seen2)); -); - -val lookup_cache = Thmtab.lookup o #1 o ThmCache.get; -val already_seen = Symtab.defined o #2 o ThmCache.get; - -val update_cache = ThmCache.map o apfst o Thmtab.update; -fun mark_seen name = ThmCache.map (apsnd (Symtab.update (name, ()))); - -(*Exported function to convert Isabelle theorems into axiom clauses*) -fun cnf_axiom thy th0 = - let val th = Thm.transfer thy th0 in - case lookup_cache thy th of - NONE => map Thm.close_derivation (skolem_thm (fake_name th, th)) - | SOME cls => cls - end; - - -(**** Rules from the context ****) - -fun pairname th = (Thm.get_name_hint th, th); - - -(**** Translate a set of theorems into CNF ****) - -fun pair_name_cls k (n, []) = [] - | pair_name_cls k (n, cls::clss) = (cls, (n,k)) :: pair_name_cls (k+1) (n, clss) - -fun cnf_rules_pairs_aux _ pairs [] = pairs - | cnf_rules_pairs_aux thy pairs ((name,th)::ths) = - let val pairs' = (pair_name_cls 0 (name, cnf_axiom thy th)) @ pairs - handle THM _ => pairs | Res_Clause.CLAUSE _ => pairs - in cnf_rules_pairs_aux thy pairs' ths end; - -(*The combination of rev and tail recursion preserves the original order*) -fun cnf_rules_pairs thy l = cnf_rules_pairs_aux thy [] (rev l); - - -(**** Convert all facts of the theory into clauses (Res_Clause.clause, or Res_HOL_Clause.clause) ****) - -local - -fun skolem_def (name, th) thy = - let val ctxt0 = Variable.thm_context th in - (case try (to_nnf th) ctxt0 of - NONE => (NONE, thy) - | SOME (nnfth, ctxt1) => - let val (defs, thy') = declare_skofuns (flatten_name name) nnfth thy - in (SOME (th, ctxt0, ctxt1, nnfth, defs), thy') end) - end; - -fun skolem_cnfs (th, ctxt0, ctxt1, nnfth, defs) = - let - val (cnfs, ctxt2) = Meson.make_cnf (map skolem_of_def defs) nnfth ctxt1; - val cnfs' = cnfs - |> map combinators - |> Variable.export ctxt2 ctxt0 - |> Meson.finish_cnf - |> map Thm.close_derivation; - in (th, cnfs') end; - -in - -fun saturate_skolem_cache thy = - let - val facts = PureThy.facts_of thy; - val new_facts = (facts, []) |-> Facts.fold_static (fn (name, ths) => - if Facts.is_concealed facts name orelse already_seen thy name then I - else cons (name, ths)); - val new_thms = (new_facts, []) |-> fold (fn (name, ths) => - if member (op =) multi_base_blacklist (Long_Name.base_name name) then I - else fold_index (fn (i, th) => - if bad_for_atp th orelse is_some (lookup_cache thy th) then I - else cons (name ^ "_" ^ string_of_int (i + 1), Thm.transfer thy th)) ths); - in - if null new_facts then NONE - else - let - val (defs, thy') = thy - |> fold (mark_seen o #1) new_facts - |> fold_map skolem_def (sort_distinct (Thm.thm_ord o pairself snd) new_thms) - |>> map_filter I; - val cache_entries = Par_List.map skolem_cnfs defs; - in SOME (fold update_cache cache_entries thy') end - end; - -end; - -val suppress_endtheory = Unsynchronized.ref false; - -fun clause_cache_endtheory thy = - if ! suppress_endtheory then NONE - else saturate_skolem_cache thy; - - -(*The cache can be kept smaller by inspecting the prop of each thm. Can ignore all that are - lambda_free, but then the individual theory caches become much bigger.*) - - -(*** meson proof methods ***) - -(*Expand all new definitions of abstraction or Skolem functions in a proof state.*) -fun is_absko (Const ("==", _) $ Free (a,_) $ u) = String.isPrefix "sko_" a - | is_absko _ = false; - -fun is_okdef xs (Const ("==", _) $ t $ u) = (*Definition of Free, not in certain terms*) - is_Free t andalso not (member (op aconv) xs t) - | is_okdef _ _ = false - -(*This function tries to cope with open locales, which introduce hypotheses of the form - Free == t, conjecture clauses, which introduce various hypotheses, and also definitions - of sko_ functions. *) -fun expand_defs_tac st0 st = - let val hyps0 = #hyps (rep_thm st0) - val hyps = #hyps (crep_thm st) - val newhyps = filter_out (member (op aconv) hyps0 o Thm.term_of) hyps - val defs = filter (is_absko o Thm.term_of) newhyps - val remaining_hyps = filter_out (member (op aconv) (map Thm.term_of defs)) - (map Thm.term_of hyps) - val fixed = OldTerm.term_frees (concl_of st) @ - fold (union (op aconv)) (map OldTerm.term_frees remaining_hyps) [] - in Seq.of_list [Local_Defs.expand (filter (is_okdef fixed o Thm.term_of) defs) st] end; - - -fun meson_general_tac ctxt ths i st0 = - let - val thy = ProofContext.theory_of ctxt - val ctxt0 = Classical.put_claset HOL_cs ctxt - in (Meson.meson_tac ctxt0 (maps (cnf_axiom thy) ths) i THEN expand_defs_tac st0) st0 end; - -val meson_method_setup = - Method.setup @{binding meson} (Attrib.thms >> (fn ths => fn ctxt => - SIMPLE_METHOD' (CHANGED_PROP o meson_general_tac ctxt ths))) - "MESON resolution proof procedure"; - - -(*** Converting a subgoal into negated conjecture clauses. ***) - -fun neg_skolemize_tac ctxt = - EVERY' [rtac ccontr, Object_Logic.atomize_prems_tac, Meson.skolemize_tac ctxt]; - -val neg_clausify = Meson.make_clauses #> map combinators #> Meson.finish_cnf; - -fun neg_conjecture_clauses ctxt st0 n = - let - val st = Seq.hd (neg_skolemize_tac ctxt n st0) - val ({params, prems, ...}, _) = Subgoal.focus (Variable.set_body false ctxt) n st - in (neg_clausify prems, map (Term.dest_Free o Thm.term_of o #2) params) end; - -(*Conversion of a subgoal to conjecture clauses. Each clause has - leading !!-bound universal variables, to express generality. *) -fun neg_clausify_tac ctxt = - neg_skolemize_tac ctxt THEN' - SUBGOAL (fn (prop, i) => - let val ts = Logic.strip_assums_hyp prop in - EVERY' - [Subgoal.FOCUS - (fn {prems, ...} => - (Method.insert_tac - (map forall_intr_vars (neg_clausify prems)) i)) ctxt, - REPEAT_DETERM_N (length ts) o etac thin_rl] i - end); - -val neg_clausify_setup = - Method.setup @{binding neg_clausify} (Scan.succeed (SIMPLE_METHOD' o neg_clausify_tac)) - "conversion of goal to conjecture clauses"; - - -(** Attribute for converting a theorem into clauses **) - -val clausify_setup = - Attrib.setup @{binding clausify} - (Scan.lift OuterParse.nat >> - (fn i => Thm.rule_attribute (fn context => fn th => - Meson.make_meta_clause (nth (cnf_axiom (Context.theory_of context) th) i)))) - "conversion of theorem to clauses"; - - - -(** setup **) - -val setup = - meson_method_setup #> - neg_clausify_setup #> - clausify_setup #> - perhaps saturate_skolem_cache #> - Theory.at_end clause_cache_endtheory; - -end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/res_blacklist.ML --- a/src/HOL/Tools/res_blacklist.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,43 +0,0 @@ -(* Title: HOL/Tools/res_blacklist.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Makarius - -Theorems blacklisted to sledgehammer. These theorems typically -produce clauses that are prolific (match too many equality or -membership literals) and relate to seldom-used facts. Some duplicate -other rules. -*) - -signature RES_BLACKLIST = -sig - val setup: theory -> theory - val blacklisted: Proof.context -> thm -> bool - val add: attribute - val del: attribute -end; - -structure Res_Blacklist: RES_BLACKLIST = -struct - -structure Data = Generic_Data -( - type T = thm Termtab.table; - val empty = Termtab.empty; - val extend = I; - fun merge tabs = Termtab.merge (K true) tabs; -); - -fun blacklisted ctxt th = - Termtab.defined (Data.get (Context.Proof ctxt)) (Thm.full_prop_of th); - -fun add_thm th = Data.map (Termtab.update (Thm.full_prop_of th, th)); -fun del_thm th = Data.map (Termtab.delete_safe (Thm.full_prop_of th)); - -val add = Thm.declaration_attribute add_thm; -val del = Thm.declaration_attribute del_thm; - -val setup = - Attrib.setup @{binding noatp} (Attrib.add_del add del) "sledgehammer blacklisting" #> - PureThy.add_thms_dynamic (@{binding noatp}, map #2 o Termtab.dest o Data.get); - -end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/res_clause.ML --- a/src/HOL/Tools/res_clause.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,534 +0,0 @@ -(* Title: HOL/Tools/res_clause.ML - Author: Jia Meng, Cambridge University Computer Laboratory - -Storing/printing FOL clauses and arity clauses. Typed equality is -treated differently. - -FIXME: combine with res_hol_clause! -*) - -signature RES_CLAUSE = -sig - val schematic_var_prefix: string - val fixed_var_prefix: string - val tvar_prefix: string - val tfree_prefix: string - val clause_prefix: string - val const_prefix: string - val tconst_prefix: string - val class_prefix: string - val union_all: ''a list list -> ''a list - val const_trans_table: string Symtab.table - val type_const_trans_table: string Symtab.table - val ascii_of: string -> string - val undo_ascii_of: string -> string - val paren_pack : string list -> string - val make_schematic_var : string * int -> string - val make_fixed_var : string -> string - val make_schematic_type_var : string * int -> string - val make_fixed_type_var : string -> string - val make_fixed_const : bool -> string -> string - val make_fixed_type_const : bool -> string -> string - val make_type_class : string -> string - datatype kind = Axiom | Conjecture - type axiom_name = string - datatype fol_type = - AtomV of string - | AtomF of string - | Comp of string * fol_type list - val string_of_fol_type : fol_type -> string - datatype type_literal = LTVar of string * string | LTFree of string * string - exception CLAUSE of string * term - val add_typs : typ list -> type_literal list - val get_tvar_strs: typ list -> string list - datatype arLit = - TConsLit of class * string * string list - | TVarLit of class * string - datatype arityClause = ArityClause of - {axiom_name: axiom_name, conclLit: arLit, premLits: arLit list} - datatype classrelClause = ClassrelClause of - {axiom_name: axiom_name, subclass: class, superclass: class} - val make_classrel_clauses: theory -> class list -> class list -> classrelClause list - val make_arity_clauses_dfg: bool -> theory -> string list -> class list -> class list * arityClause list - val make_arity_clauses: theory -> string list -> class list -> class list * arityClause list - val add_type_sort_preds: typ * int Symtab.table -> int Symtab.table - val add_classrelClause_preds : classrelClause * int Symtab.table -> int Symtab.table - val class_of_arityLit: arLit -> class - val add_arityClause_preds: arityClause * int Symtab.table -> int Symtab.table - val add_foltype_funcs: fol_type * int Symtab.table -> int Symtab.table - val add_arityClause_funcs: arityClause * int Symtab.table -> int Symtab.table - val init_functab: int Symtab.table - val dfg_sign: bool -> string -> string - val dfg_of_typeLit: bool -> type_literal -> string - val gen_dfg_cls: int * string * kind * string list * string list * string list -> string - val string_of_preds: (string * Int.int) list -> string - val string_of_funcs: (string * int) list -> string - val string_of_symbols: string -> string -> string - val string_of_start: string -> string - val string_of_descrip : string -> string - val dfg_tfree_clause : string -> string - val dfg_classrelClause: classrelClause -> string - val dfg_arity_clause: arityClause -> string - val tptp_sign: bool -> string -> string - val tptp_of_typeLit : bool -> type_literal -> string - val gen_tptp_cls : int * string * kind * string list * string list -> string - val tptp_tfree_clause : string -> string - val tptp_arity_clause : arityClause -> string - val tptp_classrelClause : classrelClause -> string -end - -structure Res_Clause: RES_CLAUSE = -struct - -val schematic_var_prefix = "V_"; -val fixed_var_prefix = "v_"; - -val tvar_prefix = "T_"; -val tfree_prefix = "t_"; - -val clause_prefix = "cls_"; -val arclause_prefix = "clsarity_" -val clrelclause_prefix = "clsrel_"; - -val const_prefix = "c_"; -val tconst_prefix = "tc_"; -val class_prefix = "class_"; - -fun union_all xss = List.foldl (uncurry (union (op =))) [] xss; - -(*Provide readable names for the more common symbolic functions*) -val const_trans_table = - Symtab.make [(@{const_name "op ="}, "equal"), - (@{const_name Orderings.less_eq}, "lessequals"), - (@{const_name "op &"}, "and"), - (@{const_name "op |"}, "or"), - (@{const_name "op -->"}, "implies"), - (@{const_name "op :"}, "in"), - ("ATP_Linkup.fequal", "fequal"), - ("ATP_Linkup.COMBI", "COMBI"), - ("ATP_Linkup.COMBK", "COMBK"), - ("ATP_Linkup.COMBB", "COMBB"), - ("ATP_Linkup.COMBC", "COMBC"), - ("ATP_Linkup.COMBS", "COMBS"), - ("ATP_Linkup.COMBB'", "COMBB_e"), - ("ATP_Linkup.COMBC'", "COMBC_e"), - ("ATP_Linkup.COMBS'", "COMBS_e")]; - -val type_const_trans_table = - Symtab.make [("*", "prod"), - ("+", "sum"), - ("~=>", "map")]; - -(*Escaping of special characters. - Alphanumeric characters are left unchanged. - The character _ goes to __ - Characters in the range ASCII space to / go to _A to _P, respectively. - Other printing characters go to _nnn where nnn is the decimal ASCII code.*) -val A_minus_space = Char.ord #"A" - Char.ord #" "; - -fun stringN_of_int 0 _ = "" - | stringN_of_int k n = stringN_of_int (k-1) (n div 10) ^ Int.toString (n mod 10); - -fun ascii_of_c c = - if Char.isAlphaNum c then String.str c - else if c = #"_" then "__" - else if #" " <= c andalso c <= #"/" - then "_" ^ String.str (Char.chr (Char.ord c + A_minus_space)) - else if Char.isPrint c - then ("_" ^ stringN_of_int 3 (Char.ord c)) (*fixed width, in case more digits follow*) - else "" - -val ascii_of = String.translate ascii_of_c; - -(** Remove ASCII armouring from names in proof files **) - -(*We don't raise error exceptions because this code can run inside the watcher. - Also, the errors are "impossible" (hah!)*) -fun undo_ascii_aux rcs [] = String.implode(rev rcs) - | undo_ascii_aux rcs [#"_"] = undo_ascii_aux (#"_"::rcs) [] (*ERROR*) - (*Three types of _ escapes: __, _A to _P, _nnn*) - | undo_ascii_aux rcs (#"_" :: #"_" :: cs) = undo_ascii_aux (#"_"::rcs) cs - | undo_ascii_aux rcs (#"_" :: c :: cs) = - if #"A" <= c andalso c<= #"P" (*translation of #" " to #"/"*) - then undo_ascii_aux (Char.chr(Char.ord c - A_minus_space) :: rcs) cs - else - let val digits = List.take (c::cs, 3) handle Subscript => [] - in - case Int.fromString (String.implode digits) of - NONE => undo_ascii_aux (c:: #"_"::rcs) cs (*ERROR*) - | SOME n => undo_ascii_aux (Char.chr n :: rcs) (List.drop (cs, 2)) - end - | undo_ascii_aux rcs (c::cs) = undo_ascii_aux (c::rcs) cs; - -val undo_ascii_of = undo_ascii_aux [] o String.explode; - -(* convert a list of strings into one single string; surrounded by brackets *) -fun paren_pack [] = "" (*empty argument list*) - | paren_pack strings = "(" ^ commas strings ^ ")"; - -(*TSTP format uses (...) rather than the old [...]*) -fun tptp_pack strings = "(" ^ space_implode " | " strings ^ ")"; - - -(*Remove the initial ' character from a type variable, if it is present*) -fun trim_type_var s = - if s <> "" andalso String.sub(s,0) = #"'" then String.extract(s,1,NONE) - else error ("trim_type: Malformed type variable encountered: " ^ s); - -fun ascii_of_indexname (v,0) = ascii_of v - | ascii_of_indexname (v,i) = ascii_of v ^ "_" ^ Int.toString i; - -fun make_schematic_var v = schematic_var_prefix ^ (ascii_of_indexname v); -fun make_fixed_var x = fixed_var_prefix ^ (ascii_of x); - -fun make_schematic_type_var (x,i) = - tvar_prefix ^ (ascii_of_indexname (trim_type_var x,i)); -fun make_fixed_type_var x = tfree_prefix ^ (ascii_of (trim_type_var x)); - -(*HACK because SPASS truncates identifiers to 63 characters :-(( *) -(*32-bit hash,so we expect no collisions unless there are around 65536 long identifiers...*) -fun controlled_length dfg_format s = - if size s > 60 andalso dfg_format - then Word.toString (Polyhash.hashw_string(s,0w0)) - else s; - -fun lookup_const dfg c = - case Symtab.lookup const_trans_table c of - SOME c' => c' - | NONE => controlled_length dfg (ascii_of c); - -fun lookup_type_const dfg c = - case Symtab.lookup type_const_trans_table c of - SOME c' => c' - | NONE => controlled_length dfg (ascii_of c); - -fun make_fixed_const _ "op =" = "equal" (*MUST BE "equal" because it's built-in to ATPs*) - | make_fixed_const dfg c = const_prefix ^ lookup_const dfg c; - -fun make_fixed_type_const dfg c = tconst_prefix ^ lookup_type_const dfg c; - -fun make_type_class clas = class_prefix ^ ascii_of clas; - - -(***** definitions and functions for FOL clauses, for conversion to TPTP or DFG format. *****) - -datatype kind = Axiom | Conjecture; - -type axiom_name = string; - -(**** Isabelle FOL clauses ****) - -(*FIXME: give the constructors more sensible names*) -datatype fol_type = AtomV of string - | AtomF of string - | Comp of string * fol_type list; - -fun string_of_fol_type (AtomV x) = x - | string_of_fol_type (AtomF x) = x - | string_of_fol_type (Comp(tcon,tps)) = - tcon ^ (paren_pack (map string_of_fol_type tps)); - -(*First string is the type class; the second is a TVar or TFfree*) -datatype type_literal = LTVar of string * string | LTFree of string * string; - -exception CLAUSE of string * term; - -fun atomic_type (TFree (a,_)) = AtomF(make_fixed_type_var a) - | atomic_type (TVar (v,_)) = AtomV(make_schematic_type_var v); - -(*Flatten a type to a fol_type while accumulating sort constraints on the TFrees and - TVars it contains.*) -fun type_of dfg (Type (a, Ts)) = - let val (folTyps, ts) = types_of dfg Ts - val t = make_fixed_type_const dfg a - in (Comp(t,folTyps), ts) end - | type_of dfg T = (atomic_type T, [T]) -and types_of dfg Ts = - let val (folTyps,ts) = ListPair.unzip (map (type_of dfg) Ts) - in (folTyps, union_all ts) end; - -(*Make literals for sorted type variables*) -fun sorts_on_typs_aux (_, []) = [] - | sorts_on_typs_aux ((x,i), s::ss) = - let val sorts = sorts_on_typs_aux ((x,i), ss) - in - if s = "HOL.type" then sorts - else if i = ~1 then LTFree(make_type_class s, make_fixed_type_var x) :: sorts - else LTVar(make_type_class s, make_schematic_type_var (x,i)) :: sorts - end; - -fun sorts_on_typs (TFree (a,s)) = sorts_on_typs_aux ((a,~1),s) - | sorts_on_typs (TVar (v,s)) = sorts_on_typs_aux (v,s); - -fun pred_of_sort (LTVar (s,ty)) = (s,1) - | pred_of_sort (LTFree (s,ty)) = (s,1) - -(*Given a list of sorted type variables, return a list of type literals.*) -fun add_typs Ts = List.foldl (uncurry (union (op =))) [] (map sorts_on_typs Ts); - -(*The correct treatment of TFrees like 'a in lemmas (axiom clauses) is not clear. - * Ignoring them leads to unsound proofs, since we do nothing to ensure that 'a - in a lemma has the same sort as 'a in the conjecture. - * Deleting such clauses will lead to problems with locales in other use of local results - where 'a is fixed. Probably we should delete clauses unless the sorts agree. - * Currently we include a class constraint in the clause, exactly as with TVars. -*) - -(** make axiom and conjecture clauses. **) - -fun get_tvar_strs [] = [] - | get_tvar_strs ((TVar (indx,s))::Ts) = - insert (op =) (make_schematic_type_var indx) (get_tvar_strs Ts) - | get_tvar_strs((TFree _)::Ts) = get_tvar_strs Ts - - - -(**** Isabelle arities ****) - -exception ARCLAUSE of string; - -datatype arLit = TConsLit of class * string * string list - | TVarLit of class * string; - -datatype arityClause = - ArityClause of {axiom_name: axiom_name, - conclLit: arLit, - premLits: arLit list}; - - -fun gen_TVars 0 = [] - | gen_TVars n = ("T_" ^ Int.toString n) :: gen_TVars (n-1); - -fun pack_sort(_,[]) = [] - | pack_sort(tvar, "HOL.type"::srt) = pack_sort(tvar, srt) (*IGNORE sort "type"*) - | pack_sort(tvar, cls::srt) = (cls, tvar) :: pack_sort(tvar, srt); - -(*Arity of type constructor tcon :: (arg1,...,argN)res*) -fun make_axiom_arity_clause dfg (tcons, axiom_name, (cls,args)) = - let val tvars = gen_TVars (length args) - val tvars_srts = ListPair.zip (tvars,args) - in - ArityClause {axiom_name = axiom_name, - conclLit = TConsLit (cls, make_fixed_type_const dfg tcons, tvars), - premLits = map TVarLit (union_all(map pack_sort tvars_srts))} - end; - - -(**** Isabelle class relations ****) - -datatype classrelClause = - ClassrelClause of {axiom_name: axiom_name, - subclass: class, - superclass: class}; - -(*Generate all pairs (sub,super) such that sub is a proper subclass of super in theory thy.*) -fun class_pairs thy [] supers = [] - | class_pairs thy subs supers = - let val class_less = Sorts.class_less(Sign.classes_of thy) - fun add_super sub (super,pairs) = - if class_less (sub,super) then (sub,super)::pairs else pairs - fun add_supers (sub,pairs) = List.foldl (add_super sub) pairs supers - in List.foldl add_supers [] subs end; - -fun make_classrelClause (sub,super) = - ClassrelClause {axiom_name = clrelclause_prefix ^ ascii_of sub ^ "_" ^ ascii_of super, - subclass = make_type_class sub, - superclass = make_type_class super}; - -fun make_classrel_clauses thy subs supers = - map make_classrelClause (class_pairs thy subs supers); - - -(** Isabelle arities **) - -fun arity_clause dfg _ _ (tcons, []) = [] - | arity_clause dfg seen n (tcons, ("HOL.type",_)::ars) = (*ignore*) - arity_clause dfg seen n (tcons,ars) - | arity_clause dfg seen n (tcons, (ar as (class,_)) :: ars) = - if class mem_string seen then (*multiple arities for the same tycon, class pair*) - make_axiom_arity_clause dfg (tcons, lookup_type_const dfg tcons ^ "_" ^ class ^ "_" ^ Int.toString n, ar) :: - arity_clause dfg seen (n+1) (tcons,ars) - else - make_axiom_arity_clause dfg (tcons, lookup_type_const dfg tcons ^ "_" ^ class, ar) :: - arity_clause dfg (class::seen) n (tcons,ars) - -fun multi_arity_clause dfg [] = [] - | multi_arity_clause dfg ((tcons,ars) :: tc_arlists) = - arity_clause dfg [] 1 (tcons, ars) @ multi_arity_clause dfg tc_arlists - -(*Generate all pairs (tycon,class,sorts) such that tycon belongs to class in theory thy - provided its arguments have the corresponding sorts.*) -fun type_class_pairs thy tycons classes = - let val alg = Sign.classes_of thy - fun domain_sorts (tycon,class) = Sorts.mg_domain alg tycon [class] - fun add_class tycon (class,pairs) = - (class, domain_sorts(tycon,class))::pairs - handle Sorts.CLASS_ERROR _ => pairs - fun try_classes tycon = (tycon, List.foldl (add_class tycon) [] classes) - in map try_classes tycons end; - -(*Proving one (tycon, class) membership may require proving others, so iterate.*) -fun iter_type_class_pairs thy tycons [] = ([], []) - | iter_type_class_pairs thy tycons classes = - let val cpairs = type_class_pairs thy tycons classes - val newclasses = union_all (union_all (union_all (map (map #2 o #2) cpairs))) - |> subtract (op =) classes |> subtract (op =) HOLogic.typeS - val (classes', cpairs') = iter_type_class_pairs thy tycons newclasses - in (union (op =) classes' classes, union (op =) cpairs' cpairs) end; - -fun make_arity_clauses_dfg dfg thy tycons classes = - let val (classes', cpairs) = iter_type_class_pairs thy tycons classes - in (classes', multi_arity_clause dfg cpairs) end; -val make_arity_clauses = make_arity_clauses_dfg false; - -(**** Find occurrences of predicates in clauses ****) - -(*FIXME: multiple-arity checking doesn't work, as update_new is the wrong - function (it flags repeated declarations of a function, even with the same arity)*) - -fun update_many (tab, keypairs) = List.foldl (uncurry Symtab.update) tab keypairs; - -fun add_type_sort_preds (T, preds) = - update_many (preds, map pred_of_sort (sorts_on_typs T)); - -fun add_classrelClause_preds (ClassrelClause {subclass,superclass,...}, preds) = - Symtab.update (subclass,1) (Symtab.update (superclass,1) preds); - -fun class_of_arityLit (TConsLit (tclass, _, _)) = tclass - | class_of_arityLit (TVarLit (tclass, _)) = tclass; - -fun add_arityClause_preds (ArityClause {conclLit,premLits,...}, preds) = - let val classes = map (make_type_class o class_of_arityLit) (conclLit::premLits) - fun upd (class,preds) = Symtab.update (class,1) preds - in List.foldl upd preds classes end; - -(*** Find occurrences of functions in clauses ***) - -fun add_foltype_funcs (AtomV _, funcs) = funcs - | add_foltype_funcs (AtomF a, funcs) = Symtab.update (a,0) funcs - | add_foltype_funcs (Comp(a,tys), funcs) = - List.foldl add_foltype_funcs (Symtab.update (a, length tys) funcs) tys; - -(*TFrees are recorded as constants*) -fun add_type_sort_funcs (TVar _, funcs) = funcs - | add_type_sort_funcs (TFree (a, _), funcs) = - Symtab.update (make_fixed_type_var a, 0) funcs - -fun add_arityClause_funcs (ArityClause {conclLit,...}, funcs) = - let val TConsLit (_, tcons, tvars) = conclLit - in Symtab.update (tcons, length tvars) funcs end; - -(*This type can be overlooked because it is built-in...*) -val init_functab = Symtab.update ("tc_itself", 1) Symtab.empty; - - -(**** String-oriented operations ****) - -fun string_of_clausename (cls_id,ax_name) = - clause_prefix ^ ascii_of ax_name ^ "_" ^ Int.toString cls_id; - -fun string_of_type_clsname (cls_id,ax_name,idx) = - string_of_clausename (cls_id,ax_name) ^ "_tcs" ^ (Int.toString idx); - - -(**** Producing DFG files ****) - -(*Attach sign in DFG syntax: false means negate.*) -fun dfg_sign true s = s - | dfg_sign false s = "not(" ^ s ^ ")" - -fun dfg_of_typeLit pos (LTVar (s,ty)) = dfg_sign pos (s ^ "(" ^ ty ^ ")") - | dfg_of_typeLit pos (LTFree (s,ty)) = dfg_sign pos (s ^ "(" ^ ty ^ ")"); - -(*Enclose the clause body by quantifiers, if necessary*) -fun dfg_forall [] body = body - | dfg_forall vars body = "forall([" ^ commas vars ^ "],\n" ^ body ^ ")" - -fun gen_dfg_cls (cls_id, ax_name, Axiom, lits, tylits, vars) = - "clause( %(axiom)\n" ^ - dfg_forall vars ("or(" ^ commas (tylits@lits) ^ ")") ^ ",\n" ^ - string_of_clausename (cls_id,ax_name) ^ ").\n\n" - | gen_dfg_cls (cls_id, ax_name, Conjecture, lits, _, vars) = - "clause( %(negated_conjecture)\n" ^ - dfg_forall vars ("or(" ^ commas lits ^ ")") ^ ",\n" ^ - string_of_clausename (cls_id,ax_name) ^ ").\n\n"; - -fun string_of_arity (name, num) = "(" ^ name ^ "," ^ Int.toString num ^ ")" - -fun string_of_preds [] = "" - | string_of_preds preds = "predicates[" ^ commas(map string_of_arity preds) ^ "].\n"; - -fun string_of_funcs [] = "" - | string_of_funcs funcs = "functions[" ^ commas(map string_of_arity funcs) ^ "].\n" ; - -fun string_of_symbols predstr funcstr = - "list_of_symbols.\n" ^ predstr ^ funcstr ^ "end_of_list.\n\n"; - -fun string_of_start name = "begin_problem(" ^ name ^ ").\n\n"; - -fun string_of_descrip name = - "list_of_descriptions.\nname({*" ^ name ^ - "*}).\nauthor({*Isabelle*}).\nstatus(unknown).\ndescription({*auto-generated*}).\nend_of_list.\n\n" - -fun dfg_tfree_clause tfree_lit = - "clause( %(negated_conjecture)\n" ^ "or( " ^ tfree_lit ^ "),\n" ^ "tfree_tcs" ^ ").\n\n" - -fun dfg_of_arLit (TConsLit (c,t,args)) = - dfg_sign true (make_type_class c ^ "(" ^ t ^ paren_pack args ^ ")") - | dfg_of_arLit (TVarLit (c,str)) = - dfg_sign false (make_type_class c ^ "(" ^ str ^ ")") - -fun dfg_classrelLits sub sup = "not(" ^ sub ^ "(T)), " ^ sup ^ "(T)"; - -fun dfg_classrelClause (ClassrelClause {axiom_name,subclass,superclass,...}) = - "clause(forall([T],\nor( " ^ dfg_classrelLits subclass superclass ^ ")),\n" ^ - axiom_name ^ ").\n\n"; - -fun string_of_ar axiom_name = arclause_prefix ^ ascii_of axiom_name; - -fun dfg_arity_clause (ArityClause{axiom_name,conclLit,premLits,...}) = - let val TConsLit (_,_,tvars) = conclLit - val lits = map dfg_of_arLit (conclLit :: premLits) - in - "clause( %(axiom)\n" ^ - dfg_forall tvars ("or( " ^ commas lits ^ ")") ^ ",\n" ^ - string_of_ar axiom_name ^ ").\n\n" - end; - - -(**** Produce TPTP files ****) - -(*Attach sign in TPTP syntax: false means negate.*) -fun tptp_sign true s = s - | tptp_sign false s = "~ " ^ s - -fun tptp_of_typeLit pos (LTVar (s,ty)) = tptp_sign pos (s ^ "(" ^ ty ^ ")") - | tptp_of_typeLit pos (LTFree (s,ty)) = tptp_sign pos (s ^ "(" ^ ty ^ ")"); - -fun gen_tptp_cls (cls_id,ax_name,Axiom,lits,tylits) = - "cnf(" ^ string_of_clausename (cls_id,ax_name) ^ ",axiom," ^ - tptp_pack (tylits@lits) ^ ").\n" - | gen_tptp_cls (cls_id,ax_name,Conjecture,lits,_) = - "cnf(" ^ string_of_clausename (cls_id,ax_name) ^ ",negated_conjecture," ^ - tptp_pack lits ^ ").\n"; - -fun tptp_tfree_clause tfree_lit = - "cnf(" ^ "tfree_tcs," ^ "negated_conjecture" ^ "," ^ tptp_pack[tfree_lit] ^ ").\n"; - -fun tptp_of_arLit (TConsLit (c,t,args)) = - tptp_sign true (make_type_class c ^ "(" ^ t ^ paren_pack args ^ ")") - | tptp_of_arLit (TVarLit (c,str)) = - tptp_sign false (make_type_class c ^ "(" ^ str ^ ")") - -fun tptp_arity_clause (ArityClause{axiom_name,conclLit,premLits,...}) = - "cnf(" ^ string_of_ar axiom_name ^ ",axiom," ^ - tptp_pack (map tptp_of_arLit (conclLit :: premLits)) ^ ").\n"; - -fun tptp_classrelLits sub sup = - let val tvar = "(T)" - in tptp_pack [tptp_sign false (sub^tvar), tptp_sign true (sup^tvar)] end; - -fun tptp_classrelClause (ClassrelClause {axiom_name,subclass,superclass,...}) = - "cnf(" ^ axiom_name ^ ",axiom," ^ tptp_classrelLits subclass superclass ^ ").\n" - -end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/res_hol_clause.ML --- a/src/HOL/Tools/res_hol_clause.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,531 +0,0 @@ -(* Title: HOL/Tools/res_hol_clause.ML - Author: Jia Meng, NICTA - -FOL clauses translated from HOL formulae. -*) - -signature RES_HOL_CLAUSE = -sig - val ext: thm - val comb_I: thm - val comb_K: thm - val comb_B: thm - val comb_C: thm - val comb_S: thm - val minimize_applies: bool - type axiom_name = string - type polarity = bool - type clause_id = int - datatype combterm = - CombConst of string * Res_Clause.fol_type * Res_Clause.fol_type list (*Const and Free*) - | CombVar of string * Res_Clause.fol_type - | CombApp of combterm * combterm - datatype literal = Literal of polarity * combterm - datatype clause = Clause of {clause_id: clause_id, axiom_name: axiom_name, th: thm, - kind: Res_Clause.kind,literals: literal list, ctypes_sorts: typ list} - val type_of_combterm: combterm -> Res_Clause.fol_type - val strip_comb: combterm -> combterm * combterm list - val literals_of_term: theory -> term -> literal list * typ list - exception TOO_TRIVIAL - val make_conjecture_clauses: bool -> theory -> thm list -> clause list - val make_axiom_clauses: bool -> - theory -> - (thm * (axiom_name * clause_id)) list -> (axiom_name * clause) list - val get_helper_clauses: bool -> - theory -> - bool -> - clause list * (thm * (axiom_name * clause_id)) list * string list -> - clause list - val tptp_write_file: bool -> Path.T -> - clause list * clause list * clause list * clause list * - Res_Clause.classrelClause list * Res_Clause.arityClause list -> - int * int - val dfg_write_file: bool -> Path.T -> - clause list * clause list * clause list * clause list * - Res_Clause.classrelClause list * Res_Clause.arityClause list -> - int * int -end - -structure Res_HOL_Clause: RES_HOL_CLAUSE = -struct - -structure RC = Res_Clause; (* FIXME avoid structure alias *) - -(* theorems for combinators and function extensionality *) -val ext = thm "HOL.ext"; -val comb_I = thm "ATP_Linkup.COMBI_def"; -val comb_K = thm "ATP_Linkup.COMBK_def"; -val comb_B = thm "ATP_Linkup.COMBB_def"; -val comb_C = thm "ATP_Linkup.COMBC_def"; -val comb_S = thm "ATP_Linkup.COMBS_def"; -val fequal_imp_equal = thm "ATP_Linkup.fequal_imp_equal"; -val equal_imp_fequal = thm "ATP_Linkup.equal_imp_fequal"; - - -(* Parameter t_full below indicates that full type information is to be -exported *) - -(*If true, each function will be directly applied to as many arguments as possible, avoiding - use of the "apply" operator. Use of hBOOL is also minimized.*) -val minimize_applies = true; - -fun min_arity_of const_min_arity c = the_default 0 (Symtab.lookup const_min_arity c); - -(*True if the constant ever appears outside of the top-level position in literals. - If false, the constant always receives all of its arguments and is used as a predicate.*) -fun needs_hBOOL const_needs_hBOOL c = - not minimize_applies orelse - the_default false (Symtab.lookup const_needs_hBOOL c); - - -(******************************************************) -(* data types for typed combinator expressions *) -(******************************************************) - -type axiom_name = string; -type polarity = bool; -type clause_id = int; - -datatype combterm = CombConst of string * RC.fol_type * RC.fol_type list (*Const and Free*) - | CombVar of string * RC.fol_type - | CombApp of combterm * combterm - -datatype literal = Literal of polarity * combterm; - -datatype clause = - Clause of {clause_id: clause_id, - axiom_name: axiom_name, - th: thm, - kind: RC.kind, - literals: literal list, - ctypes_sorts: typ list}; - - -(*********************************************************************) -(* convert a clause with type Term.term to a clause with type clause *) -(*********************************************************************) - -fun isFalse (Literal(pol, CombConst(c,_,_))) = - (pol andalso c = "c_False") orelse - (not pol andalso c = "c_True") - | isFalse _ = false; - -fun isTrue (Literal (pol, CombConst(c,_,_))) = - (pol andalso c = "c_True") orelse - (not pol andalso c = "c_False") - | isTrue _ = false; - -fun isTaut (Clause {literals,...}) = exists isTrue literals; - -fun type_of dfg (Type (a, Ts)) = - let val (folTypes,ts) = types_of dfg Ts - in (RC.Comp(RC.make_fixed_type_const dfg a, folTypes), ts) end - | type_of _ (tp as TFree (a, _)) = - (RC.AtomF (RC.make_fixed_type_var a), [tp]) - | type_of _ (tp as TVar (v, _)) = - (RC.AtomV (RC.make_schematic_type_var v), [tp]) -and types_of dfg Ts = - let val (folTyps,ts) = ListPair.unzip (map (type_of dfg) Ts) - in (folTyps, RC.union_all ts) end; - -(* same as above, but no gathering of sort information *) -fun simp_type_of dfg (Type (a, Ts)) = - RC.Comp(RC.make_fixed_type_const dfg a, map (simp_type_of dfg) Ts) - | simp_type_of _ (TFree (a, _)) = RC.AtomF(RC.make_fixed_type_var a) - | simp_type_of _ (TVar (v, _)) = RC.AtomV(RC.make_schematic_type_var v); - - -fun const_type_of dfg thy (c,t) = - let val (tp,ts) = type_of dfg t - in (tp, ts, map (simp_type_of dfg) (Sign.const_typargs thy (c,t))) end; - -(* convert a Term.term (with combinators) into a combterm, also accummulate sort info *) -fun combterm_of dfg thy (Const(c,t)) = - let val (tp,ts,tvar_list) = const_type_of dfg thy (c,t) - val c' = CombConst(RC.make_fixed_const dfg c, tp, tvar_list) - in (c',ts) end - | combterm_of dfg _ (Free(v,t)) = - let val (tp,ts) = type_of dfg t - val v' = CombConst(RC.make_fixed_var v, tp, []) - in (v',ts) end - | combterm_of dfg _ (Var(v,t)) = - let val (tp,ts) = type_of dfg t - val v' = CombVar(RC.make_schematic_var v,tp) - in (v',ts) end - | combterm_of dfg thy (P $ Q) = - let val (P',tsP) = combterm_of dfg thy P - val (Q',tsQ) = combterm_of dfg thy Q - in (CombApp(P',Q'), union (op =) tsP tsQ) end - | combterm_of _ _ (t as Abs _) = raise RC.CLAUSE ("HOL CLAUSE", t); - -fun predicate_of dfg thy ((Const("Not",_) $ P), polarity) = predicate_of dfg thy (P, not polarity) - | predicate_of dfg thy (t,polarity) = (combterm_of dfg thy (Envir.eta_contract t), polarity); - -fun literals_of_term1 dfg thy args (Const("Trueprop",_) $ P) = literals_of_term1 dfg thy args P - | literals_of_term1 dfg thy args (Const("op |",_) $ P $ Q) = - literals_of_term1 dfg thy (literals_of_term1 dfg thy args P) Q - | literals_of_term1 dfg thy (lits,ts) P = - let val ((pred,ts'),pol) = predicate_of dfg thy (P,true) - in - (Literal(pol,pred)::lits, union (op =) ts ts') - end; - -fun literals_of_term_dfg dfg thy P = literals_of_term1 dfg thy ([],[]) P; -val literals_of_term = literals_of_term_dfg false; - -(* Problem too trivial for resolution (empty clause) *) -exception TOO_TRIVIAL; - -(* making axiom and conjecture clauses *) -fun make_clause dfg thy (clause_id,axiom_name,kind,th) = - let val (lits,ctypes_sorts) = literals_of_term_dfg dfg thy (prop_of th) - in - if forall isFalse lits - then raise TOO_TRIVIAL - else - Clause {clause_id = clause_id, axiom_name = axiom_name, th = th, kind = kind, - literals = lits, ctypes_sorts = ctypes_sorts} - end; - - -fun add_axiom_clause dfg thy ((th,(name,id)), pairs) = - let val cls = make_clause dfg thy (id, name, RC.Axiom, th) - in - if isTaut cls then pairs else (name,cls)::pairs - end; - -fun make_axiom_clauses dfg thy = List.foldl (add_axiom_clause dfg thy) []; - -fun make_conjecture_clauses_aux _ _ _ [] = [] - | make_conjecture_clauses_aux dfg thy n (th::ths) = - make_clause dfg thy (n,"conjecture", RC.Conjecture, th) :: - make_conjecture_clauses_aux dfg thy (n+1) ths; - -fun make_conjecture_clauses dfg thy = make_conjecture_clauses_aux dfg thy 0; - - -(**********************************************************************) -(* convert clause into ATP specific formats: *) -(* TPTP used by Vampire and E *) -(* DFG used by SPASS *) -(**********************************************************************) - -(*Result of a function type; no need to check that the argument type matches.*) -fun result_type (RC.Comp ("tc_fun", [_, tp2])) = tp2 - | result_type _ = error "result_type" - -fun type_of_combterm (CombConst (_, tp, _)) = tp - | type_of_combterm (CombVar (_, tp)) = tp - | type_of_combterm (CombApp (t1, _)) = result_type (type_of_combterm t1); - -(*gets the head of a combinator application, along with the list of arguments*) -fun strip_comb u = - let fun stripc (CombApp(t,u), ts) = stripc (t, u::ts) - | stripc x = x - in stripc(u,[]) end; - -val type_wrapper = "ti"; - -fun head_needs_hBOOL const_needs_hBOOL (CombConst(c,_,_)) = needs_hBOOL const_needs_hBOOL c - | head_needs_hBOOL _ _ = true; - -fun wrap_type t_full (s, tp) = - if t_full then - type_wrapper ^ RC.paren_pack [s, RC.string_of_fol_type tp] - else s; - -fun apply ss = "hAPP" ^ RC.paren_pack ss; - -fun rev_apply (v, []) = v - | rev_apply (v, arg::args) = apply [rev_apply (v, args), arg]; - -fun string_apply (v, args) = rev_apply (v, rev args); - -(*Apply an operator to the argument strings, using either the "apply" operator or - direct function application.*) -fun string_of_applic t_full cma (CombConst (c, _, tvars), args) = - let val c = if c = "equal" then "c_fequal" else c - val nargs = min_arity_of cma c - val args1 = List.take(args, nargs) - handle Subscript => error ("string_of_applic: " ^ c ^ " has arity " ^ - Int.toString nargs ^ " but is applied to " ^ - space_implode ", " args) - val args2 = List.drop(args, nargs) - val targs = if not t_full then map RC.string_of_fol_type tvars - else [] - in - string_apply (c ^ RC.paren_pack (args1@targs), args2) - end - | string_of_applic _ _ (CombVar (v, _), args) = string_apply (v, args) - | string_of_applic _ _ _ = error "string_of_applic"; - -fun wrap_type_if t_full cnh (head, s, tp) = - if head_needs_hBOOL cnh head then wrap_type t_full (s, tp) else s; - -fun string_of_combterm (params as (t_full, cma, cnh)) t = - let val (head, args) = strip_comb t - in wrap_type_if t_full cnh (head, - string_of_applic t_full cma (head, map (string_of_combterm (params)) args), - type_of_combterm t) - end; - -(*Boolean-valued terms are here converted to literals.*) -fun boolify params t = - "hBOOL" ^ RC.paren_pack [string_of_combterm params t]; - -fun string_of_predicate (params as (_,_,cnh)) t = - case t of - (CombApp(CombApp(CombConst("equal",_,_), t1), t2)) => - (*DFG only: new TPTP prefers infix equality*) - ("equal" ^ RC.paren_pack [string_of_combterm params t1, string_of_combterm params t2]) - | _ => - case #1 (strip_comb t) of - CombConst(c,_,_) => if needs_hBOOL cnh c then boolify params t else string_of_combterm params t - | _ => boolify params t; - - -(*** tptp format ***) - -fun tptp_of_equality params pol (t1,t2) = - let val eqop = if pol then " = " else " != " - in string_of_combterm params t1 ^ eqop ^ string_of_combterm params t2 end; - -fun tptp_literal params (Literal(pol, CombApp(CombApp(CombConst("equal",_,_), t1), t2))) = - tptp_of_equality params pol (t1,t2) - | tptp_literal params (Literal(pol,pred)) = - RC.tptp_sign pol (string_of_predicate params pred); - -(*Given a clause, returns its literals paired with a list of literals concerning TFrees; - the latter should only occur in conjecture clauses.*) -fun tptp_type_lits params pos (Clause{literals, ctypes_sorts, ...}) = - (map (tptp_literal params) literals, - map (RC.tptp_of_typeLit pos) (RC.add_typs ctypes_sorts)); - -fun clause2tptp params (cls as Clause {axiom_name, clause_id, kind, ...}) = - let val (lits,tylits) = tptp_type_lits params (kind = RC.Conjecture) cls - in - (RC.gen_tptp_cls(clause_id,axiom_name,kind,lits,tylits), tylits) - end; - - -(*** dfg format ***) - -fun dfg_literal params (Literal(pol,pred)) = RC.dfg_sign pol (string_of_predicate params pred); - -fun dfg_type_lits params pos (Clause{literals, ctypes_sorts, ...}) = - (map (dfg_literal params) literals, - map (RC.dfg_of_typeLit pos) (RC.add_typs ctypes_sorts)); - -fun get_uvars (CombConst _) vars = vars - | get_uvars (CombVar(v,_)) vars = (v::vars) - | get_uvars (CombApp(P,Q)) vars = get_uvars P (get_uvars Q vars); - -fun get_uvars_l (Literal(_,c)) = get_uvars c []; - -fun dfg_vars (Clause {literals,...}) = RC.union_all (map get_uvars_l literals); - -fun clause2dfg params (cls as Clause{axiom_name,clause_id,kind,ctypes_sorts,...}) = - let val (lits,tylits) = dfg_type_lits params (kind = RC.Conjecture) cls - val vars = dfg_vars cls - val tvars = RC.get_tvar_strs ctypes_sorts - in - (RC.gen_dfg_cls(clause_id, axiom_name, kind, lits, tylits, tvars@vars), tylits) - end; - - -(** For DFG format: accumulate function and predicate declarations **) - -fun addtypes tvars tab = List.foldl RC.add_foltype_funcs tab tvars; - -fun add_decls (t_full, cma, cnh) (CombConst (c, _, tvars), (funcs, preds)) = - if c = "equal" then (addtypes tvars funcs, preds) - else - let val arity = min_arity_of cma c - val ntys = if not t_full then length tvars else 0 - val addit = Symtab.update(c, arity+ntys) - in - if needs_hBOOL cnh c then (addtypes tvars (addit funcs), preds) - else (addtypes tvars funcs, addit preds) - end - | add_decls _ (CombVar(_,ctp), (funcs,preds)) = - (RC.add_foltype_funcs (ctp,funcs), preds) - | add_decls params (CombApp(P,Q),decls) = add_decls params (P,add_decls params (Q,decls)); - -fun add_literal_decls params (Literal(_,c), decls) = add_decls params (c,decls); - -fun add_clause_decls params (Clause {literals, ...}, decls) = - List.foldl (add_literal_decls params) decls literals - handle Symtab.DUP a => error ("function " ^ a ^ " has multiple arities") - -fun decls_of_clauses params clauses arity_clauses = - let val init_functab = Symtab.update (type_wrapper,2) (Symtab.update ("hAPP",2) RC.init_functab) - val init_predtab = Symtab.update ("hBOOL",1) Symtab.empty - val (functab,predtab) = (List.foldl (add_clause_decls params) (init_functab, init_predtab) clauses) - in - (Symtab.dest (List.foldl RC.add_arityClause_funcs functab arity_clauses), - Symtab.dest predtab) - end; - -fun add_clause_preds (Clause {ctypes_sorts, ...}, preds) = - List.foldl RC.add_type_sort_preds preds ctypes_sorts - handle Symtab.DUP a => error ("predicate " ^ a ^ " has multiple arities") - -(*Higher-order clauses have only the predicates hBOOL and type classes.*) -fun preds_of_clauses clauses clsrel_clauses arity_clauses = - Symtab.dest - (List.foldl RC.add_classrelClause_preds - (List.foldl RC.add_arityClause_preds - (List.foldl add_clause_preds Symtab.empty clauses) - arity_clauses) - clsrel_clauses) - - -(**********************************************************************) -(* write clauses to files *) -(**********************************************************************) - -val init_counters = - Symtab.make [("c_COMBI", 0), ("c_COMBK", 0), - ("c_COMBB", 0), ("c_COMBC", 0), - ("c_COMBS", 0)]; - -fun count_combterm (CombConst (c, _, _), ct) = - (case Symtab.lookup ct c of NONE => ct (*no counter*) - | SOME n => Symtab.update (c,n+1) ct) - | count_combterm (CombVar _, ct) = ct - | count_combterm (CombApp(t1,t2), ct) = count_combterm(t1, count_combterm(t2, ct)); - -fun count_literal (Literal(_,t), ct) = count_combterm(t,ct); - -fun count_clause (Clause{literals,...}, ct) = List.foldl count_literal ct literals; - -fun count_user_clause user_lemmas (Clause{axiom_name,literals,...}, ct) = - if axiom_name mem_string user_lemmas then List.foldl count_literal ct literals - else ct; - -fun cnf_helper_thms thy = - Res_Axioms.cnf_rules_pairs thy o map Res_Axioms.pairname - -fun get_helper_clauses dfg thy isFO (conjectures, axcls, user_lemmas) = - if isFO then [] (*first-order*) - else - let - val axclauses = map #2 (make_axiom_clauses dfg thy axcls) - val ct0 = List.foldl count_clause init_counters conjectures - val ct = List.foldl (count_user_clause user_lemmas) ct0 axclauses - fun needed c = the (Symtab.lookup ct c) > 0 - val IK = if needed "c_COMBI" orelse needed "c_COMBK" - then cnf_helper_thms thy [comb_I,comb_K] - else [] - val BC = if needed "c_COMBB" orelse needed "c_COMBC" - then cnf_helper_thms thy [comb_B,comb_C] - else [] - val S = if needed "c_COMBS" - then cnf_helper_thms thy [comb_S] - else [] - val other = cnf_helper_thms thy [fequal_imp_equal,equal_imp_fequal] - in - map #2 (make_axiom_clauses dfg thy (other @ IK @ BC @ S)) - end; - -(*Find the minimal arity of each function mentioned in the term. Also, note which uses - are not at top level, to see if hBOOL is needed.*) -fun count_constants_term toplev t (const_min_arity, const_needs_hBOOL) = - let val (head, args) = strip_comb t - val n = length args - val (const_min_arity, const_needs_hBOOL) = fold (count_constants_term false) args (const_min_arity, const_needs_hBOOL) - in - case head of - CombConst (a,_,_) => (*predicate or function version of "equal"?*) - let val a = if a="equal" andalso not toplev then "c_fequal" else a - val const_min_arity = Symtab.map_default (a, n) (Integer.min n) const_min_arity - in - if toplev then (const_min_arity, const_needs_hBOOL) - else (const_min_arity, Symtab.update (a,true) (const_needs_hBOOL)) - end - | _ => (const_min_arity, const_needs_hBOOL) - end; - -(*A literal is a top-level term*) -fun count_constants_lit (Literal (_,t)) (const_min_arity, const_needs_hBOOL) = - count_constants_term true t (const_min_arity, const_needs_hBOOL); - -fun count_constants_clause (Clause{literals,...}) (const_min_arity, const_needs_hBOOL) = - fold count_constants_lit literals (const_min_arity, const_needs_hBOOL); - -fun display_arity const_needs_hBOOL (c,n) = - Res_Axioms.trace_msg (fn () => "Constant: " ^ c ^ " arity:\t" ^ Int.toString n ^ - (if needs_hBOOL const_needs_hBOOL c then " needs hBOOL" else "")); - -fun count_constants (conjectures, _, extra_clauses, helper_clauses, _, _) = - if minimize_applies then - let val (const_min_arity, const_needs_hBOOL) = - fold count_constants_clause conjectures (Symtab.empty, Symtab.empty) - |> fold count_constants_clause extra_clauses - |> fold count_constants_clause helper_clauses - val _ = List.app (display_arity const_needs_hBOOL) (Symtab.dest (const_min_arity)) - in (const_min_arity, const_needs_hBOOL) end - else (Symtab.empty, Symtab.empty); - -(* tptp format *) - -fun tptp_write_file t_full file clauses = - let - val (conjectures, axclauses, _, helper_clauses, - classrel_clauses, arity_clauses) = clauses - val (cma, cnh) = count_constants clauses - val params = (t_full, cma, cnh) - val (tptp_clss,tfree_litss) = ListPair.unzip (map (clause2tptp params) conjectures) - val tfree_clss = map RC.tptp_tfree_clause (List.foldl (uncurry (union (op =))) [] tfree_litss) - val _ = - File.write_list file ( - map (#1 o (clause2tptp params)) axclauses @ - tfree_clss @ - tptp_clss @ - map RC.tptp_classrelClause classrel_clauses @ - map RC.tptp_arity_clause arity_clauses @ - map (#1 o (clause2tptp params)) helper_clauses) - in (length axclauses + 1, length tfree_clss + length tptp_clss) - end; - - -(* dfg format *) - -fun dfg_write_file t_full file clauses = - let - val (conjectures, axclauses, _, helper_clauses, - classrel_clauses, arity_clauses) = clauses - val (cma, cnh) = count_constants clauses - val params = (t_full, cma, cnh) - val (dfg_clss, tfree_litss) = ListPair.unzip (map (clause2dfg params) conjectures) - and probname = Path.implode (Path.base file) - val axstrs = map (#1 o (clause2dfg params)) axclauses - val tfree_clss = map RC.dfg_tfree_clause (RC.union_all tfree_litss) - val helper_clauses_strs = map (#1 o (clause2dfg params)) helper_clauses - val (funcs,cl_preds) = decls_of_clauses params (helper_clauses @ conjectures @ axclauses) arity_clauses - and ty_preds = preds_of_clauses axclauses classrel_clauses arity_clauses - val _ = - File.write_list file ( - RC.string_of_start probname :: - RC.string_of_descrip probname :: - RC.string_of_symbols (RC.string_of_funcs funcs) - (RC.string_of_preds (cl_preds @ ty_preds)) :: - "list_of_clauses(axioms,cnf).\n" :: - axstrs @ - map RC.dfg_classrelClause classrel_clauses @ - map RC.dfg_arity_clause arity_clauses @ - helper_clauses_strs @ - ["end_of_list.\n\nlist_of_clauses(conjectures,cnf).\n"] @ - tfree_clss @ - dfg_clss @ - ["end_of_list.\n\n", - (*VarWeight=3 helps the HO problems, probably by counteracting the presence of hAPP*) - "list_of_settings(SPASS).\n{*\nset_flag(VarWeight,3).\n*}\nend_of_list.\n\n", - "end_problem.\n"]) - - in (length axclauses + length classrel_clauses + length arity_clauses + - length helper_clauses + 1, length tfree_clss + length dfg_clss) - end; - -end; - diff -r b766aad9136d -r e31ec41a551b src/HOL/Tools/res_reconstruct.ML --- a/src/HOL/Tools/res_reconstruct.ML Thu Mar 18 13:57:00 2010 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,584 +0,0 @@ -(* Title: HOL/Tools/res_reconstruct.ML - Author: Lawrence C Paulson and Claire Quigley, Cambridge University Computer Laboratory - -Transfer of proofs from external provers. -*) - -signature RES_RECONSTRUCT = -sig - val chained_hint: string - - val fix_sorts: sort Vartab.table -> term -> term - val invert_const: string -> string - val invert_type_const: string -> string - val num_typargs: theory -> string -> int - val make_tvar: string -> typ - val strip_prefix: string -> string -> string option - val setup: theory -> theory - (* extracting lemma list*) - val find_failure: string -> string option - val lemma_list: bool -> string -> - string * string vector * (int * int) * Proof.context * thm * int -> string * string list - (* structured proofs *) - val structured_proof: string -> - string * string vector * (int * int) * Proof.context * thm * int -> string * string list -end; - -structure Res_Reconstruct : RES_RECONSTRUCT = -struct - -val trace_path = Path.basic "atp_trace"; - -fun trace s = - if ! Res_Axioms.trace then File.append (File.tmp_path trace_path) s - else (); - -fun string_of_thm ctxt = PrintMode.setmp [] (Display.string_of_thm ctxt); - -(*For generating structured proofs: keep every nth proof line*) -val (modulus, modulus_setup) = Attrib.config_int "sledgehammer_modulus" 1; - -(*Indicates whether to include sort information in generated proofs*) -val (recon_sorts, recon_sorts_setup) = Attrib.config_bool "sledgehammer_sorts" true; - -(*Indicated whether to generate full proofs or just lemma lists - now via setup of atps*) -(* val (full_proofs, full_proofs_setup) = Attrib.config_bool "sledgehammer_full" false; *) - -val setup = modulus_setup #> recon_sorts_setup; - -(**** PARSING OF TSTP FORMAT ****) - -(*Syntax trees, either termlist or formulae*) -datatype stree = Int of int | Br of string * stree list; - -fun atom x = Br(x,[]); - -fun scons (x,y) = Br("cons", [x,y]); -val listof = List.foldl scons (atom "nil"); - -(*Strings enclosed in single quotes, e.g. filenames*) -val quoted = $$"'" |-- Scan.repeat (~$$"'") --| $$"'" >> implode; - -(*Intended for $true and $false*) -fun tf s = "c_" ^ str (Char.toUpper (String.sub(s,0))) ^ String.extract(s,1,NONE); -val truefalse = $$"$" |-- Symbol.scan_id >> (atom o tf); - -(*Integer constants, typically proof line numbers*) -fun is_digit s = Char.isDigit (String.sub(s,0)); -val integer = Scan.many1 is_digit >> (the o Int.fromString o implode); - -(*Generalized FO terms, which include filenames, numbers, etc.*) -fun termlist x = (term ::: Scan.repeat ($$"," |-- term)) x -and term x = (quoted >> atom || integer>>Int || truefalse || - Symbol.scan_id -- Scan.optional ($$"(" |-- termlist --| $$")") [] >> Br || - $$"(" |-- term --| $$")" || - $$"[" |-- Scan.optional termlist [] --| $$"]" >> listof) x; - -fun negate t = Br("c_Not", [t]); -fun equate (t1,t2) = Br("c_equal", [t1,t2]); - -(*Apply equal or not-equal to a term*) -fun syn_equal (t, NONE) = t - | syn_equal (t1, SOME (NONE, t2)) = equate (t1,t2) - | syn_equal (t1, SOME (SOME _, t2)) = negate (equate (t1,t2)); - -(*Literals can involve negation, = and !=.*) -fun literal x = ($$"~" |-- literal >> negate || - (term -- Scan.option (Scan.option ($$"!") --| $$"=" -- term) >> syn_equal)) x; - -val literals = literal ::: Scan.repeat ($$"|" |-- literal); - -(*Clause: a list of literals separated by the disjunction sign*) -val clause = $$"(" |-- literals --| $$")" || Scan.single literal; - -val annotations = $$"," |-- term -- Scan.option ($$"," |-- termlist); - -(* ::= cnf(,,). - The could be an identifier, but we assume integers.*) -val tstp_line = (Scan.this_string "cnf" -- $$"(") |-- - integer --| $$"," -- Symbol.scan_id --| $$"," -- - clause -- Scan.option annotations --| $$ ")"; - - -(**** INTERPRETATION OF TSTP SYNTAX TREES ****) - -exception STREE of stree; - -(*If string s has the prefix s1, return the result of deleting it.*) -fun strip_prefix s1 s = - if String.isPrefix s1 s - then SOME (Res_Clause.undo_ascii_of (String.extract (s, size s1, NONE))) - else NONE; - -(*Invert the table of translations between Isabelle and ATPs*) -val type_const_trans_table_inv = - Symtab.make (map swap (Symtab.dest Res_Clause.type_const_trans_table)); - -fun invert_type_const c = - case Symtab.lookup type_const_trans_table_inv c of - SOME c' => c' - | NONE => c; - -fun make_tvar b = TVar(("'" ^ b, 0), HOLogic.typeS); -fun make_var (b,T) = Var((b,0),T); - -(*Type variables are given the basic sort, HOL.type. Some will later be constrained - by information from type literals, or by type inference.*) -fun type_of_stree t = - case t of - Int _ => raise STREE t - | Br (a,ts) => - let val Ts = map type_of_stree ts - in - case strip_prefix Res_Clause.tconst_prefix a of - SOME b => Type(invert_type_const b, Ts) - | NONE => - if not (null ts) then raise STREE t (*only tconsts have type arguments*) - else - case strip_prefix Res_Clause.tfree_prefix a of - SOME b => TFree("'" ^ b, HOLogic.typeS) - | NONE => - case strip_prefix Res_Clause.tvar_prefix a of - SOME b => make_tvar b - | NONE => make_tvar a (*Variable from the ATP, say X1*) - end; - -(*Invert the table of translations between Isabelle and ATPs*) -val const_trans_table_inv = - Symtab.update ("fequal", "op =") - (Symtab.make (map swap (Symtab.dest Res_Clause.const_trans_table))); - -fun invert_const c = - case Symtab.lookup const_trans_table_inv c of - SOME c' => c' - | NONE => c; - -(*The number of type arguments of a constant, zero if it's monomorphic*) -fun num_typargs thy s = length (Sign.const_typargs thy (s, Sign.the_const_type thy s)); - -(*Generates a constant, given its type arguments*) -fun const_of thy (a,Ts) = Const(a, Sign.const_instance thy (a,Ts)); - -(*First-order translation. No types are known for variables. HOLogic.typeT should allow - them to be inferred.*) -fun term_of_stree args thy t = - case t of - Int _ => raise STREE t - | Br ("hBOOL",[t]) => term_of_stree [] thy t (*ignore hBOOL*) - | Br ("hAPP",[t,u]) => term_of_stree (u::args) thy t - | Br (a,ts) => - case strip_prefix Res_Clause.const_prefix a of - SOME "equal" => - list_comb(Const ("op =", HOLogic.typeT), List.map (term_of_stree [] thy) ts) - | SOME b => - let val c = invert_const b - val nterms = length ts - num_typargs thy c - val us = List.map (term_of_stree [] thy) (List.take(ts,nterms) @ args) - (*Extra args from hAPP come AFTER any arguments given directly to the - constant.*) - val Ts = List.map type_of_stree (List.drop(ts,nterms)) - in list_comb(const_of thy (c, Ts), us) end - | NONE => (*a variable, not a constant*) - let val T = HOLogic.typeT - val opr = (*a Free variable is typically a Skolem function*) - case strip_prefix Res_Clause.fixed_var_prefix a of - SOME b => Free(b,T) - | NONE => - case strip_prefix Res_Clause.schematic_var_prefix a of - SOME b => make_var (b,T) - | NONE => make_var (a,T) (*Variable from the ATP, say X1*) - in list_comb (opr, List.map (term_of_stree [] thy) (ts@args)) end; - -(*Type class literal applied to a type. Returns triple of polarity, class, type.*) -fun constraint_of_stree pol (Br("c_Not",[t])) = constraint_of_stree (not pol) t - | constraint_of_stree pol t = case t of - Int _ => raise STREE t - | Br (a,ts) => - (case (strip_prefix Res_Clause.class_prefix a, map type_of_stree ts) of - (SOME b, [T]) => (pol, b, T) - | _ => raise STREE t); - -(** Accumulate type constraints in a clause: negative type literals **) - -fun addix (key,z) = Vartab.map_default (key,[]) (cons z); - -fun add_constraint ((false, cl, TFree(a,_)), vt) = addix ((a,~1),cl) vt - | add_constraint ((false, cl, TVar(ix,_)), vt) = addix (ix,cl) vt - | add_constraint (_, vt) = vt; - -(*False literals (which E includes in its proofs) are deleted*) -val nofalses = filter (not o equal HOLogic.false_const); - -(*Final treatment of the list of "real" literals from a clause.*) -fun finish [] = HOLogic.true_const (*No "real" literals means only type information*) - | finish lits = - case nofalses lits of - [] => HOLogic.false_const (*The empty clause, since we started with real literals*) - | xs => foldr1 HOLogic.mk_disj (rev xs); - -(*Accumulate sort constraints in vt, with "real" literals in lits.*) -fun lits_of_strees _ (vt, lits) [] = (vt, finish lits) - | lits_of_strees ctxt (vt, lits) (t::ts) = - lits_of_strees ctxt (add_constraint (constraint_of_stree true t, vt), lits) ts - handle STREE _ => - lits_of_strees ctxt (vt, term_of_stree [] (ProofContext.theory_of ctxt) t :: lits) ts; - -(*Update TVars/TFrees with detected sort constraints.*) -fun fix_sorts vt = - let fun tysubst (Type (a, Ts)) = Type (a, map tysubst Ts) - | tysubst (TVar (xi, s)) = TVar (xi, the_default s (Vartab.lookup vt xi)) - | tysubst (TFree (x, s)) = TFree (x, the_default s (Vartab.lookup vt (x, ~1))) - fun tmsubst (Const (a, T)) = Const (a, tysubst T) - | tmsubst (Free (a, T)) = Free (a, tysubst T) - | tmsubst (Var (xi, T)) = Var (xi, tysubst T) - | tmsubst (t as Bound _) = t - | tmsubst (Abs (a, T, t)) = Abs (a, tysubst T, tmsubst t) - | tmsubst (t $ u) = tmsubst t $ tmsubst u; - in fn t => if Vartab.is_empty vt then t else tmsubst t end; - -(*Interpret a list of syntax trees as a clause, given by "real" literals and sort constraints. - vt0 holds the initial sort constraints, from the conjecture clauses.*) -fun clause_of_strees ctxt vt0 ts = - let val (vt, dt) = lits_of_strees ctxt (vt0,[]) ts in - singleton (Syntax.check_terms ctxt) (TypeInfer.constrain HOLogic.boolT (fix_sorts vt dt)) - end; - -fun gen_all_vars t = fold_rev Logic.all (OldTerm.term_vars t) t; - -fun ints_of_stree_aux (Int n, ns) = n::ns - | ints_of_stree_aux (Br(_,ts), ns) = List.foldl ints_of_stree_aux ns ts; - -fun ints_of_stree t = ints_of_stree_aux (t, []); - -fun decode_tstp vt0 (name, role, ts, annots) ctxt = - let val deps = case annots of NONE => [] | SOME (source,_) => ints_of_stree source - val cl = clause_of_strees ctxt vt0 ts - in ((name, role, cl, deps), fold Variable.declare_term (OldTerm.term_frees cl) ctxt) end; - -fun dest_tstp ((((name, role), ts), annots), chs) = - case chs of - "."::_ => (name, role, ts, annots) - | _ => error ("TSTP line not terminated by \".\": " ^ implode chs); - - -(** Global sort constraints on TFrees (from tfree_tcs) are positive unit clauses. **) - -fun add_tfree_constraint ((true, cl, TFree(a,_)), vt) = addix ((a,~1),cl) vt - | add_tfree_constraint (_, vt) = vt; - -fun tfree_constraints_of_clauses vt [] = vt - | tfree_constraints_of_clauses vt ([lit]::tss) = - (tfree_constraints_of_clauses (add_tfree_constraint (constraint_of_stree true lit, vt)) tss - handle STREE _ => (*not a positive type constraint: ignore*) - tfree_constraints_of_clauses vt tss) - | tfree_constraints_of_clauses vt (_::tss) = tfree_constraints_of_clauses vt tss; - - -(**** Translation of TSTP files to Isar Proofs ****) - -fun decode_tstp_list ctxt tuples = - let val vt0 = tfree_constraints_of_clauses Vartab.empty (map #3 tuples) - in #1 (fold_map (decode_tstp vt0) tuples ctxt) end; - -(** Finding a matching assumption. The literals may be permuted, and variable names - may disagree. We have to try all combinations of literals (quadratic!) and - match up the variable names consistently. **) - -fun strip_alls_aux n (Const("all",_)$Abs(a,T,t)) = - strip_alls_aux (n+1) (subst_bound (Var ((a,n), T), t)) - | strip_alls_aux _ t = t; - -val strip_alls = strip_alls_aux 0; - -exception MATCH_LITERAL; - -(*Ignore types: they are not to be trusted...*) -fun match_literal (t1$u1) (t2$u2) env = - match_literal t1 t2 (match_literal u1 u2 env) - | match_literal (Abs (_,_,t1)) (Abs (_,_,t2)) env = - match_literal t1 t2 env - | match_literal (Bound i1) (Bound i2) env = - if i1=i2 then env else raise MATCH_LITERAL - | match_literal (Const(a1,_)) (Const(a2,_)) env = - if a1=a2 then env else raise MATCH_LITERAL - | match_literal (Free(a1,_)) (Free(a2,_)) env = - if a1=a2 then env else raise MATCH_LITERAL - | match_literal (Var(ix1,_)) (Var(ix2,_)) env = insert (op =) (ix1,ix2) env - | match_literal _ _ _ = raise MATCH_LITERAL; - -(*Checking that all variable associations are unique. The list env contains no - repetitions, but does it contain say (x,y) and (y,y)? *) -fun good env = - let val (xs,ys) = ListPair.unzip env - in not (has_duplicates (op=) xs orelse has_duplicates (op=) ys) end; - -(*Match one list of literals against another, ignoring types and the order of - literals. Sorting is unreliable because we don't have types or variable names.*) -fun matches_aux _ [] [] = true - | matches_aux env (lit::lits) ts = - let fun match1 us [] = false - | match1 us (t::ts) = - let val env' = match_literal lit t env - in (good env' andalso matches_aux env' lits (us@ts)) orelse - match1 (t::us) ts - end - handle MATCH_LITERAL => match1 (t::us) ts - in match1 [] ts end; - -(*Is this length test useful?*) -fun matches (lits1,lits2) = - length lits1 = length lits2 andalso - matches_aux [] (map Envir.eta_contract lits1) (map Envir.eta_contract lits2); - -fun permuted_clause t = - let val lits = HOLogic.disjuncts t - fun perm [] = NONE - | perm (ctm::ctms) = - if matches (lits, HOLogic.disjuncts (HOLogic.dest_Trueprop (strip_alls ctm))) - then SOME ctm else perm ctms - in perm end; - -fun have_or_show "show " _ = "show \"" - | have_or_show have lname = have ^ lname ^ ": \"" - -(*ctms is a list of conjecture clauses as yielded by Isabelle. Those returned by the - ATP may have their literals reordered.*) -fun isar_lines ctxt ctms = - let val string_of = PrintMode.setmp [] (fn term => Syntax.string_of_term ctxt term) - val _ = trace ("\n\nisar_lines: start\n") - fun doline _ (lname, t, []) = (*No deps: it's a conjecture clause, with no proof.*) - (case permuted_clause t ctms of - SOME u => "assume " ^ lname ^ ": \"" ^ string_of u ^ "\"\n" - | NONE => "assume? " ^ lname ^ ": \"" ^ string_of t ^ "\"\n") (*no match!!*) - | doline have (lname, t, deps) = - have_or_show have lname ^ string_of (gen_all_vars (HOLogic.mk_Trueprop t)) ^ - "\"\n by (metis " ^ space_implode " " deps ^ ")\n" - fun dolines [(lname, t, deps)] = [doline "show " (lname, t, deps)] - | dolines ((lname, t, deps)::lines) = doline "have " (lname, t, deps) :: dolines lines - in setmp_CRITICAL show_sorts (Config.get ctxt recon_sorts) dolines end; - -fun notequal t (_,t',_) = not (t aconv t'); - -(*No "real" literals means only type information*) -fun eq_types t = t aconv HOLogic.true_const; - -fun replace_dep (old:int, new) dep = if dep=old then new else [dep]; - -fun replace_deps (old:int, new) (lno, t, deps) = - (lno, t, List.foldl (uncurry (union (op =))) [] (map (replace_dep (old, new)) deps)); - -(*Discard axioms; consolidate adjacent lines that prove the same clause, since they differ - only in type information.*) -fun add_prfline ((lno, "axiom", t, []), lines) = (*axioms are not proof lines*) - if eq_types t (*must be clsrel/clsarity: type information, so delete refs to it*) - then map (replace_deps (lno, [])) lines - else - (case take_prefix (notequal t) lines of - (_,[]) => lines (*no repetition of proof line*) - | (pre, (lno', _, _) :: post) => (*repetition: replace later line by earlier one*) - pre @ map (replace_deps (lno', [lno])) post) - | add_prfline ((lno, _, t, []), lines) = (*no deps: conjecture clause*) - (lno, t, []) :: lines - | add_prfline ((lno, _, t, deps), lines) = - if eq_types t then (lno, t, deps) :: lines - (*Type information will be deleted later; skip repetition test.*) - else (*FIXME: Doesn't this code risk conflating proofs involving different types??*) - case take_prefix (notequal t) lines of - (_,[]) => (lno, t, deps) :: lines (*no repetition of proof line*) - | (pre, (lno', t', _) :: post) => - (lno, t', deps) :: (*repetition: replace later line by earlier one*) - (pre @ map (replace_deps (lno', [lno])) post); - -(*Recursively delete empty lines (type information) from the proof.*) -fun add_nonnull_prfline ((lno, t, []), lines) = (*no dependencies, so a conjecture clause*) - if eq_types t (*must be type information, tfree_tcs, clsrel, clsarity: delete refs to it*) - then delete_dep lno lines - else (lno, t, []) :: lines - | add_nonnull_prfline ((lno, t, deps), lines) = (lno, t, deps) :: lines -and delete_dep lno lines = List.foldr add_nonnull_prfline [] (map (replace_deps (lno, [])) lines); - -fun bad_free (Free (a,_)) = String.isPrefix "sko_" a - | bad_free _ = false; - -(*TVars are forbidden in goals. Also, we don't want lines with <2 dependencies. - To further compress proofs, setting modulus:=n deletes every nth line, and nlines - counts the number of proof lines processed so far. - Deleted lines are replaced by their own dependencies. Note that the "add_nonnull_prfline" - phase may delete some dependencies, hence this phase comes later.*) -fun add_wanted_prfline ctxt ((lno, t, []), (nlines, lines)) = - (nlines, (lno, t, []) :: lines) (*conjecture clauses must be kept*) - | add_wanted_prfline ctxt ((lno, t, deps), (nlines, lines)) = - if eq_types t orelse not (null (Term.add_tvars t [])) orelse - exists_subterm bad_free t orelse - (not (null lines) andalso (*final line can't be deleted for these reasons*) - (length deps < 2 orelse nlines mod (Config.get ctxt modulus) <> 0)) - then (nlines+1, map (replace_deps (lno, deps)) lines) (*Delete line*) - else (nlines+1, (lno, t, deps) :: lines); - -(*Replace numeric proof lines by strings, either from thm_names or sequential line numbers*) -fun stringify_deps thm_names deps_map [] = [] - | stringify_deps thm_names deps_map ((lno, t, deps) :: lines) = - if lno <= Vector.length thm_names (*axiom*) - then (Vector.sub(thm_names,lno-1), t, []) :: stringify_deps thm_names deps_map lines - else let val lname = Int.toString (length deps_map) - fun fix lno = if lno <= Vector.length thm_names - then SOME(Vector.sub(thm_names,lno-1)) - else AList.lookup op= deps_map lno; - in (lname, t, map_filter fix (distinct (op=) deps)) :: - stringify_deps thm_names ((lno,lname)::deps_map) lines - end; - -val proofstart = "proof (neg_clausify)\n"; - -fun isar_header [] = proofstart - | isar_header ts = proofstart ^ "fix " ^ space_implode " " ts ^ "\n"; - -fun decode_tstp_file cnfs ctxt th sgno thm_names = - let val _ = trace "\ndecode_tstp_file: start\n" - val tuples = map (dest_tstp o tstp_line o explode) cnfs - val _ = trace (Int.toString (length tuples) ^ " tuples extracted\n") - val ctxt = ProofContext.set_mode ProofContext.mode_schematic ctxt - val raw_lines = List.foldr add_prfline [] (decode_tstp_list ctxt tuples) - val _ = trace (Int.toString (length raw_lines) ^ " raw_lines extracted\n") - val nonnull_lines = List.foldr add_nonnull_prfline [] raw_lines - val _ = trace (Int.toString (length nonnull_lines) ^ " nonnull_lines extracted\n") - val (_,lines) = List.foldr (add_wanted_prfline ctxt) (0,[]) nonnull_lines - val _ = trace (Int.toString (length lines) ^ " lines extracted\n") - val (ccls,fixes) = Res_Axioms.neg_conjecture_clauses ctxt th sgno - val _ = trace (Int.toString (length ccls) ^ " conjecture clauses\n") - val ccls = map forall_intr_vars ccls - val _ = - if ! Res_Axioms.trace then app (fn th => trace ("\nccl: " ^ string_of_thm ctxt th)) ccls - else () - val ilines = isar_lines ctxt (map prop_of ccls) (stringify_deps thm_names [] lines) - val _ = trace "\ndecode_tstp_file: finishing\n" - in - isar_header (map #1 fixes) ^ implode ilines ^ "qed\n" - end handle STREE _ => error "Could not extract proof (ATP output malformed?)"; - - -(*=== EXTRACTING PROOF-TEXT === *) - -val begin_proof_strings = ["# SZS output start CNFRefutation.", - "=========== Refutation ==========", - "Here is a proof"]; - -val end_proof_strings = ["# SZS output end CNFRefutation", - "======= End of refutation =======", - "Formulae used in the proof"]; - -fun get_proof_extract proof = - let - (*splits to_split by the first possible of a list of splitters*) - val (begin_string, end_string) = - (find_first (fn s => String.isSubstring s proof) begin_proof_strings, - find_first (fn s => String.isSubstring s proof) end_proof_strings) - in - if is_none begin_string orelse is_none end_string - then error "Could not extract proof (no substring indicating a proof)" - else proof |> first_field (the begin_string) |> the |> snd - |> first_field (the end_string) |> the |> fst - end; - -(* ==== CHECK IF PROOF OF E OR VAMPIRE WAS SUCCESSFUL === *) - -val failure_strings_E = ["SZS status: Satisfiable","SZS status Satisfiable", - "SZS status: ResourceOut","SZS status ResourceOut","# Cannot determine problem status"]; -val failure_strings_vampire = ["Satisfiability detected", "Refutation not found", "CANNOT PROVE"]; -val failure_strings_SPASS = ["SPASS beiseite: Completion found.", - "SPASS beiseite: Ran out of time.", "SPASS beiseite: Maximal number of loops exceeded."]; -val failure_strings_remote = ["Remote-script could not extract proof"]; -fun find_failure proof = - let val failures = - map_filter (fn s => if String.isSubstring s proof then SOME s else NONE) - (failure_strings_E @ failure_strings_vampire @ failure_strings_SPASS @ failure_strings_remote) - val correct = null failures andalso - exists (fn s => String.isSubstring s proof) begin_proof_strings andalso - exists (fn s => String.isSubstring s proof) end_proof_strings - in - if correct then NONE - else if null failures then SOME "Output of ATP not in proper format" - else SOME (hd failures) end; - -(* === EXTRACTING LEMMAS === *) -(* lines have the form "cnf(108, axiom, ...", -the number (108) has to be extracted)*) -fun get_step_nums false proofextract = - let val toks = String.tokens (not o Char.isAlphaNum) - fun inputno ("cnf"::ntok::"axiom"::_) = Int.fromString ntok - | inputno ("cnf"::ntok::"negated"::"conjecture"::_) = Int.fromString ntok - | inputno _ = NONE - val lines = split_lines proofextract - in map_filter (inputno o toks) lines end -(*String contains multiple lines. We want those of the form - "253[0:Inp] et cetera..." - A list consisting of the first number in each line is returned. *) -| get_step_nums true proofextract = - let val toks = String.tokens (not o Char.isAlphaNum) - fun inputno (ntok::"0"::"Inp"::_) = Int.fromString ntok - | inputno _ = NONE - val lines = split_lines proofextract - in map_filter (inputno o toks) lines end - -(*extracting lemmas from tstp-output between the lines from above*) -fun extract_lemmas get_step_nums (proof, thm_names, conj_count, _, _, _) = - let - (* get the names of axioms from their numbers*) - fun get_axiom_names thm_names step_nums = - let - val last_axiom = Vector.length thm_names - fun is_axiom n = n <= last_axiom - fun is_conj n = n >= fst conj_count andalso n < fst conj_count + snd conj_count - fun getname i = Vector.sub(thm_names, i-1) - in - (sort_distinct string_ord (filter (fn x => x <> "??.unknown") - (map getname (filter is_axiom step_nums))), - exists is_conj step_nums) - end - val proofextract = get_proof_extract proof - in - get_axiom_names thm_names (get_step_nums proofextract) - end; - -(*Used to label theorems chained into the sledgehammer call*) -val chained_hint = "CHAINED"; -val nochained = filter_out (fn y => y = chained_hint) - -(* metis-command *) -fun metis_line [] = "apply metis" - | metis_line xs = "apply (metis " ^ space_implode " " xs ^ ")" - -(* atp_minimize [atp=] *) -fun minimize_line _ [] = "" - | minimize_line name lemmas = "For minimizing the number of lemmas try this command:\n" ^ - (Markup.markup Markup.sendback) ("atp_minimize [atp=" ^ name ^ "] " ^ - space_implode " " (nochained lemmas)) - -fun sendback_metis_nochained lemmas = - (Markup.markup Markup.sendback o metis_line) (nochained lemmas) - -fun lemma_list dfg name result = - let val (lemmas, used_conj) = extract_lemmas (get_step_nums dfg) result - in (sendback_metis_nochained lemmas ^ "\n" ^ minimize_line name lemmas ^ - (if used_conj then "" - else "\nWarning: Goal is provable because context is inconsistent."), - nochained lemmas) - end; - -(* === Extracting structured Isar-proof === *) -fun structured_proof name (result as (proof, thm_names, conj_count, ctxt, goal, subgoalno)) = - let - (*Could use split_lines, but it can return blank lines...*) - val lines = String.tokens (equal #"\n"); - val nospaces = String.translate (fn c => if Char.isSpace c then "" else str c) - val proofextract = get_proof_extract proof - val cnfs = filter (String.isPrefix "cnf(") (map nospaces (lines proofextract)) - val (one_line_proof, lemma_names) = lemma_list false name result - val structured = - if chained_hint mem_string (String.tokens (fn c => c = #" ") one_line_proof) then "" - else decode_tstp_file cnfs ctxt goal subgoalno thm_names - in - (one_line_proof ^ "\n\n" ^ Markup.markup Markup.sendback structured, lemma_names) -end - -end; diff -r b766aad9136d -r e31ec41a551b src/HOL/Wellfounded.thy --- a/src/HOL/Wellfounded.thy Thu Mar 18 13:57:00 2010 +0100 +++ b/src/HOL/Wellfounded.thy Thu Mar 18 13:59:20 2010 +0100 @@ -908,7 +908,7 @@ lemma nat_size [simp, code]: "size (n\nat) = n" by (induct n) simp_all -declare "prod.size" [noatp] +declare "prod.size" [no_atp] lemma [code]: "size (P :: 'a Predicate.pred) = 0" by (cases P) simp