--- a/NEWS Tue Sep 06 20:55:18 2011 +0200
+++ b/NEWS Tue Sep 06 21:11:12 2011 +0200
@@ -276,6 +276,7 @@
real_squared_diff_one_factored ~> square_diff_one_factored
realpow_two_diff ~> square_diff_square_factored
reals_complete2 ~> complete_real
+ real_sum_squared_expand ~> power2_sum
exp_ln_eq ~> ln_unique
expi_add ~> exp_add
expi_zero ~> exp_zero
@@ -301,6 +302,7 @@
LIMSEQ_add_minus ~> tendsto_add [OF _ tendsto_minus]
LIMSEQ_add_const ~> tendsto_add [OF _ tendsto_const]
LIMSEQ_diff_const ~> tendsto_diff [OF _ tendsto_const]
+ LIMSEQ_Complex ~> tendsto_Complex
LIM_ident ~> tendsto_ident_at
LIM_const ~> tendsto_const
LIM_add ~> tendsto_add
--- a/doc-src/Sledgehammer/sledgehammer.tex Tue Sep 06 20:55:18 2011 +0200
+++ b/doc-src/Sledgehammer/sledgehammer.tex Tue Sep 06 21:11:12 2011 +0200
@@ -108,11 +108,11 @@
results are correct by construction.
In this manual, we will explicitly invoke the \textbf{sledgehammer} command.
-Sledgehammer also provides an automatic mode that can be enabled via the
-``Auto Sledgehammer'' option from the ``Isabelle'' menu in Proof General. In
-this mode, Sledgehammer is run on every newly entered theorem. The time limit
-for Auto Sledgehammer and other automatic tools can be set using the ``Auto
-Tools Time Limit'' option.
+Sledgehammer also provides an automatic mode that can be enabled via the ``Auto
+Sledgehammer'' option in Proof General's ``Isabelle'' menu. In this mode,
+Sledgehammer is run on every newly entered theorem. The time limit for Auto
+Sledgehammer and other automatic tools can be set using the ``Auto Tools Time
+Limit'' option.
\newbox\boxA
\setbox\boxA=\hbox{\texttt{nospam}}
@@ -633,8 +633,8 @@
highly-relevant and \qty{facts\/_{\mathrm{2}}} fully irrelevant.
You can instruct Sledgehammer to run automatically on newly entered theorems by
-enabling the ``Auto Sledgehammer'' option from the ``Isabelle'' menu in Proof
-General. For automatic runs, only the first prover set using \textit{provers}
+enabling the ``Auto Sledgehammer'' option in Proof General's ``Isabelle'' menu.
+For automatic runs, only the first prover set using \textit{provers}
(\S\ref{mode-of-operation}) is considered, fewer facts are passed to the prover,
\textit{slicing} (\S\ref{mode-of-operation}) is disabled, \textit{sound}
(\S\ref{problem-encoding}) is enabled, \textit{verbose} (\S\ref{output-format})
@@ -724,7 +724,7 @@
\item[$\bullet$] \textbf{\textit{leo2}:} LEO-II is an automatic
higher-order prover developed by Christoph Benzm\"uller et al.\ \cite{leo2},
-with support for the TPTP higher-order syntax (THF).
+with support for the TPTP many-typed higher-order syntax (THF0).
\item[$\bullet$] \textbf{\textit{metis}:} Although it is much less powerful than
the external provers, Metis itself can be used for proof search.
@@ -737,7 +737,7 @@
\item[$\bullet$] \textbf{\textit{satallax}:} Satallax is an automatic
higher-order prover developed by Chad Brown et al.\ \cite{satallax}, with
-support for the TPTP higher-order syntax (THF).
+support for the TPTP many-typed higher-order syntax (THF0).
\item[$\bullet$] \textbf{\textit{spass}:} SPASS is a first-order resolution
prover developed by Christoph Weidenbach et al.\ \cite{weidenbach-et-al-2009}.
@@ -752,7 +752,7 @@
\texttt{VAMPIRE\_HOME} to the directory that contains the \texttt{vampire}
executable and \texttt{VAMPIRE\_VERSION} to the version number (e.g., ``1.8'').
Sledgehammer has been tested with versions 0.6, 1.0, and 1.8. Vampire 1.8
-supports the TPTP many-typed first-order format (TFF).
+supports the TPTP many-typed first-order format (TFF0).
\item[$\bullet$] \textbf{\textit{yices}:} Yices is an SMT solver developed at
SRI \cite{yices}. To use Yices, set the environment variable
@@ -767,7 +767,7 @@
\item[$\bullet$] \textbf{\textit{z3\_tptp}:} This version of Z3 pretends to be
an ATP, exploiting Z3's support for the TPTP untyped and many-typed first-order
-formats (FOF and TFF). It is included for experimental purposes. It requires
+formats (FOF and TFF0). It is included for experimental purposes. It requires
version 3.0 or above.
\end{enum}
@@ -787,7 +787,7 @@
\item[$\bullet$] \textbf{\textit{remote\_e\_tofof}:} E-ToFoF is a metaprover
developed by Geoff Sutcliffe \cite{tofof} based on E running on his Miami
-servers. This ATP supports the TPTP many-typed first-order format (TFF). The
+servers. This ATP supports the TPTP many-typed first-order format (TFF0). The
remote version of E-ToFoF runs on Geoff Sutcliffe's Miami servers.
\item[$\bullet$] \textbf{\textit{remote\_leo2}:} The remote version of LEO-II
@@ -798,7 +798,7 @@
\item[$\bullet$] \textbf{\textit{remote\_snark}:} SNARK is a first-order
resolution prover developed by Stickel et al.\ \cite{snark}. It supports the
-TPTP many-typed first-order format (TFF). The remote version of SNARK runs on
+TPTP many-typed first-order format (TFF0). The remote version of SNARK runs on
Geoff Sutcliffe's Miami servers.
\item[$\bullet$] \textbf{\textit{remote\_vampire}:} The remote version of
@@ -818,17 +818,16 @@
with TPTP syntax'' runs on Geoff Sutcliffe's Miami servers.
\end{enum}
-By default, Sledgehammer will run E, E-SInE, SPASS, Vampire, Z3 (or whatever
+By default, Sledgehammer runs E, E-SInE, SPASS, Vampire, Z3 (or whatever
the SMT module's \textit{smt\_solver} configuration option is set to), and (if
appropriate) Waldmeister in parallel---either locally or remotely, depending on
the number of processor cores available. For historical reasons, the default
value of this option can be overridden using the option ``Sledgehammer:
-Provers'' from the ``Isabelle'' menu in Proof General.
+Provers'' in Proof General's ``Isabelle'' menu.
-It is a good idea to run several provers in parallel, although it could slow
-down your machine. Running E, SPASS, and Vampire for 5~seconds yields a similar
-success rate to running the most effective of these for 120~seconds
-\cite{boehme-nipkow-2010}.
+It is generally a good idea to run several provers in parallel. Running E,
+SPASS, and Vampire for 5~seconds yields a similar success rate to running the
+most effective of these for 120~seconds \cite{boehme-nipkow-2010}.
For the \textit{min} subcommand, the default prover is \textit{metis}. If
several provers are set, the first one is used.
@@ -884,7 +883,13 @@
Specifies the type encoding to use in ATP problems. Some of the type encodings
are unsound, meaning that they can give rise to spurious proofs
(unreconstructible using Metis). The supported type encodings are listed below,
-with an indication of their soundness in parentheses:
+with an indication of their soundness in parentheses.
+%
+All the encodings with \textit{guards} or \textit{tags} in their name are
+available in a ``uniform'' and a ``nonuniform'' variant. The nonuniform variants
+are generally more efficient and are the default; the uniform variants are
+identified by the suffix \textit{\_uniform} (e.g.,
+\textit{mono\_guards\_uniform}{?}).
\begin{enum}
\item[$\bullet$] \textbf{\textit{erased} (very unsound):} No type information is
@@ -926,27 +931,27 @@
$\mathit{type\/}(\tau, t)$ becomes a unary function
$\mathit{type\_}\tau(t)$.
-\item[$\bullet$] \textbf{\textit{simple} (sound):} Exploit simple first-order
-types if the prover supports the TFF or THF syntax; otherwise, fall back on
-\textit{mono\_guards}. The problem is monomorphized.
+\item[$\bullet$] \textbf{\textit{mono\_simple} (sound):} Exploits simple
+first-order types if the prover supports the TFF0 or THF0 syntax; otherwise,
+falls back on \textit{mono\_guards\_uniform}. The problem is monomorphized.
-\item[$\bullet$] \textbf{\textit{simple\_higher} (sound):} Exploit simple
-higher-order types if the prover supports the THF syntax; otherwise, fall back
-on \textit{simple} or \textit{mono\_guards\_uniform}. The problem is
+\item[$\bullet$] \textbf{\textit{mono\_simple\_higher} (sound):} Exploits simple
+higher-order types if the prover supports the THF0 syntax; otherwise, falls back
+on \textit{mono\_simple} or \textit{mono\_guards\_uniform}. The problem is
monomorphized.
\item[$\bullet$]
\textbf{%
\textit{poly\_guards}?, \textit{poly\_tags}?, \textit{raw\_mono\_guards}?, \\
\textit{raw\_mono\_tags}?, \textit{mono\_guards}?, \textit{mono\_tags}?, \\
-\textit{simple}? (quasi-sound):} \\
+\textit{mono\_simple}? (quasi-sound):} \\
The type encodings \textit{poly\_guards}, \textit{poly\_tags},
\textit{raw\_mono\_guards}, \textit{raw\_mono\_tags}, \textit{mono\_guards},
-\textit{mono\_tags}, and \textit{simple} are fully
+\textit{mono\_tags}, and \textit{mono\_simple} are fully
typed and sound. For each of these, Sledgehammer also provides a lighter,
virtually sound variant identified by a question mark (`{?}')\ that detects and
-erases monotonic types, notably infinite types. (For \textit{simple}, the types
-are not actually erased but rather replaced by a shared uniform type of
+erases monotonic types, notably infinite types. (For \textit{mono\_simple}, the
+types are not actually erased but rather replaced by a shared uniform type of
individuals.) As argument to the \textit{metis} proof method, the question mark
is replaced by a \hbox{``\textit{\_query}''} suffix. If the \emph{sound} option
is enabled, these encodings are fully sound.
@@ -954,30 +959,25 @@
\item[$\bullet$]
\textbf{%
\textit{poly\_guards}!, \textit{poly\_tags}!, \textit{raw\_mono\_guards}!, \\
-\textit{raw\_mono\_tags}!, \textit{mono\_guards}!, \textit{mono\_tags}!, \textit{simple}!, \\
-\textit{simple\_higher}! (mildly unsound):} \\
+\textit{raw\_mono\_tags}!, \textit{mono\_guards}!, \textit{mono\_tags}!, \\
+\textit{mono\_simple}!, \textit{mono\_simple\_higher}! (mildly unsound):} \\
The type encodings \textit{poly\_guards}, \textit{poly\_tags},
\textit{raw\_mono\_guards}, \textit{raw\_mono\_tags}, \textit{mono\_guards},
-\textit{mono\_tags}, \textit{simple}, and \textit{simple\_higher} also admit
-a mildly unsound (but very efficient) variant identified by an exclamation mark
-(`{!}') that detects and erases erases all types except those that are clearly
-finite (e.g., \textit{bool}). (For \textit{simple} and \textit{simple\_higher},
-the types are not actually erased but rather replaced by a shared uniform type
-of individuals.) As argument to the \textit{metis} proof method, the exclamation
-mark is replaced by the suffix \hbox{``\textit{\_bang}''}.
+\textit{mono\_tags}, \textit{mono\_simple}, and \textit{mono\_simple\_higher}
+also admit a mildly unsound (but very efficient) variant identified by an
+exclamation mark (`{!}') that detects and erases erases all types except those
+that are clearly finite (e.g., \textit{bool}). (For \textit{mono\_simple} and
+\textit{mono\_simple\_higher}, the types are not actually erased but rather
+replaced by a shared uniform type of individuals.) As argument to the
+\textit{metis} proof method, the exclamation mark is replaced by the suffix
+\hbox{``\textit{\_bang}''}.
\item[$\bullet$] \textbf{\textit{smart}:} The actual encoding used depends on
the ATP and should be the most efficient virtually sound encoding for that ATP.
\end{enum}
-In addition, all the \textit{guards} and \textit{tags} type encodings are
-available in two variants, a ``uniform'' and a ``nonuniform'' variant. The
-nonuniform variants are generally more efficient and are the default; the
-uniform variants are identified by the suffix \textit{\_uniform} (e.g.,
-\textit{mono\_guards\_uniform}{?}).
-
-For SMT solvers, the type encoding is always \textit{simple}, irrespective of
-the value of this option.
+For SMT solvers, the type encoding is always \textit{mono\_simple}, irrespective
+of the value of this option.
\nopagebreak
{\small See also \textit{max\_new\_mono\_instances} (\S\ref{relevance-filter})
@@ -1091,8 +1091,7 @@
Specifies the maximum number of seconds that the automatic provers should spend
searching for a proof. This excludes problem preparation and is a soft limit.
For historical reasons, the default value of this option can be overridden using
-the option ``Sledgehammer: Time Limit'' from the ``Isabelle'' menu in Proof
-General.
+the option ``Sledgehammer: Time Limit'' in Proof General's ``Isabelle'' menu.
\opdefault{preplay\_timeout}{float\_or\_none}{\upshape 4}
Specifies the maximum number of seconds that Metis should be spent trying to
--- a/src/HOL/Complex.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Complex.thy Tue Sep 06 21:11:12 2011 +0200
@@ -321,8 +321,6 @@
lemma complex_mod_triangle_ineq2 [simp]: "cmod(b + a) - cmod b \<le> cmod a"
by (rule ord_le_eq_trans [OF norm_triangle_ineq2], simp)
-lemmas real_sum_squared_expand = power2_sum [where 'a=real]
-
lemma abs_Re_le_cmod: "\<bar>Re x\<bar> \<le> cmod x"
by (cases x) simp
@@ -366,10 +364,6 @@
(simp add: dist_norm real_sqrt_sum_squares_less)
qed
-lemma LIMSEQ_Complex:
- "\<lbrakk>X ----> a; Y ----> b\<rbrakk> \<Longrightarrow> (\<lambda>n. Complex (X n) (Y n)) ----> Complex a b"
- by (rule tendsto_Complex)
-
instance complex :: banach
proof
fix X :: "nat \<Rightarrow> complex"
@@ -379,7 +373,7 @@
from Cauchy_Im [OF X] have 2: "(\<lambda>n. Im (X n)) ----> lim (\<lambda>n. Im (X n))"
by (simp add: Cauchy_convergent_iff convergent_LIMSEQ_iff)
have "X ----> Complex (lim (\<lambda>n. Re (X n))) (lim (\<lambda>n. Im (X n)))"
- using LIMSEQ_Complex [OF 1 2] by simp
+ using tendsto_Complex [OF 1 2] by simp
thus "convergent X"
by (rule convergentI)
qed
--- a/src/HOL/Finite_Set.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Finite_Set.thy Tue Sep 06 21:11:12 2011 +0200
@@ -2054,6 +2054,11 @@
apply(auto intro:ccontr)
done
+lemma card_le_Suc_iff: "finite A \<Longrightarrow>
+ Suc n \<le> card A = (\<exists>a B. A = insert a B \<and> a \<notin> B \<and> n \<le> card B \<and> finite B)"
+by (fastsimp simp: card_Suc_eq less_eq_nat.simps(2) insert_eq_iff
+ dest: subset_singletonD split: nat.splits if_splits)
+
lemma finite_fun_UNIVD2:
assumes fin: "finite (UNIV :: ('a \<Rightarrow> 'b) set)"
shows "finite (UNIV :: 'b set)"
--- a/src/HOL/Fun.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Fun.thy Tue Sep 06 21:11:12 2011 +0200
@@ -612,6 +612,10 @@
lemma fun_upd_comp: "f \<circ> (g(x := y)) = (f \<circ> g)(x := f y)"
by (auto intro: ext)
+lemma UNION_fun_upd:
+ "UNION J (A(i:=B)) = (UNION (J-{i}) A \<union> (if i\<in>J then B else {}))"
+by (auto split: if_splits)
+
subsection {* @{text override_on} *}
--- a/src/HOL/Import/Generate-HOL/GenHOL4Base.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Import/Generate-HOL/GenHOL4Base.thy Tue Sep 06 21:11:12 2011 +0200
@@ -12,17 +12,20 @@
import_theory bool;
+type_maps
+ bool > HOL.bool;
+
const_maps
- T > True
- F > False
- "!" > All
+ T > HOL.True
+ F > HOL.False
+ "!" > HOL.All
"/\\" > HOL.conj
"\\/" > HOL.disj
- "?" > Ex
- "?!" > Ex1
- "~" > Not
+ "?" > HOL.Ex
+ "?!" > HOL.Ex1
+ "~" > HOL.Not
COND > HOL.If
- bool_case > Datatype.bool.bool_case
+ bool_case > Product_Type.bool.bool_case
ONE_ONE > HOL4Setup.ONE_ONE
ONTO > Fun.surj
TYPE_DEFINITION > HOL4Setup.TYPE_DEFINITION
@@ -46,7 +49,7 @@
import_theory sum;
type_maps
- sum > "+";
+ sum > Sum_Type.sum;
const_maps
INL > Sum_Type.Inl
@@ -55,7 +58,7 @@
ISR > HOL4Compat.ISR
OUTL > HOL4Compat.OUTL
OUTR > HOL4Compat.OUTR
- sum_case > Datatype.sum.sum_case;
+ sum_case > Sum_Type.sum.sum_case;
ignore_thms
sum_TY_DEF
@@ -63,7 +66,6 @@
IS_SUM_REP
INL_DEF
INR_DEF
- sum_axiom
sum_Axiom;
end_import;
@@ -125,13 +127,13 @@
prod > Product_Type.prod;
const_maps
- "," > Pair
- FST > fst
- SND > snd
- CURRY > curry
- UNCURRY > split
- "##" > map_pair
- pair_case > split;
+ "," > Product_Type.Pair
+ FST > Product_Type.fst
+ SND > Product_Type.snd
+ CURRY > Product_Type.curry
+ UNCURRY > Product_Type.prod.prod_case
+ "##" > Product_Type.map_pair
+ pair_case > Product_Type.prod.prod_case;
ignore_thms
prod_TY_DEF
@@ -145,11 +147,11 @@
import_theory num;
type_maps
- num > nat;
+ num > Nat.nat;
const_maps
- SUC > Suc
- 0 > 0 :: nat;
+ SUC > Nat.Suc
+ 0 > Groups.zero_class.zero :: nat;
ignore_thms
num_TY_DEF
@@ -165,7 +167,7 @@
import_theory prim_rec;
const_maps
- "<" > Orderings.less :: "[nat,nat]=>bool";
+ "<" > Orderings.ord_class.less :: "nat \<Rightarrow> nat \<Rightarrow> bool";
end_import;
@@ -180,15 +182,15 @@
">" > HOL4Compat.nat_gt
">=" > HOL4Compat.nat_ge
FUNPOW > HOL4Compat.FUNPOW
- "<=" > Orderings.less_eq :: "[nat,nat]=>bool"
- "+" > Groups.plus :: "[nat,nat]=>nat"
- "*" > Groups.times :: "[nat,nat]=>nat"
- "-" > Groups.minus :: "[nat,nat]=>nat"
- MIN > Orderings.min :: "[nat,nat]=>nat"
- MAX > Orderings.max :: "[nat,nat]=>nat"
- DIV > Divides.div :: "[nat,nat]=>nat"
- MOD > Divides.mod :: "[nat,nat]=>nat"
- EXP > Power.power :: "[nat,nat]=>nat";
+ "<=" > Orderings.ord_class.less_eq :: "nat \<Rightarrow> nat \<Rightarrow> bool"
+ "+" > Groups.plus_class.plus :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ "*" > Groups.times_class.times :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ "-" > Groups.minus_class.minus :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ MIN > Orderings.ord_class.min :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ MAX > Orderings.ord_class.max :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ DIV > Divides.div_class.div :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ MOD > Divides.div_class.mod :: "nat \<Rightarrow> nat \<Rightarrow> nat"
+ EXP > Power.power_class.power :: "nat \<Rightarrow> nat \<Rightarrow> nat";
end_import;
@@ -207,7 +209,7 @@
import_theory divides;
const_maps
- divides > Divides.times_class.dvd :: "[nat,nat]=>bool";
+ divides > Rings.dvd_class.dvd :: "nat \<Rightarrow> nat \<Rightarrow> bool";
end_import;
@@ -227,7 +229,7 @@
HD > List.hd
TL > List.tl
MAP > List.map
- MEM > "List.op mem"
+ MEM > HOL4Compat.list_mem
FILTER > List.filter
FOLDL > List.foldl
EVERY > List.list_all
@@ -236,12 +238,12 @@
FRONT > List.butlast
APPEND > List.append
FLAT > List.concat
- LENGTH > Nat.size
+ LENGTH > Nat.size_class.size
REPLICATE > List.replicate
list_size > HOL4Compat.list_size
SUM > HOL4Compat.sum
FOLDR > HOL4Compat.FOLDR
- EXISTS > HOL4Compat.list_exists
+ EXISTS > List.list_ex
MAP2 > HOL4Compat.map2
ZIP > HOL4Compat.ZIP
UNZIP > HOL4Compat.unzip;
--- a/src/HOL/Import/Generate-HOL/GenHOL4Real.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Import/Generate-HOL/GenHOL4Real.thy Tue Sep 06 21:11:12 2011 +0200
@@ -16,13 +16,17 @@
real > RealDef.real;
const_maps
- real_0 > Groups.zero :: real
- real_1 > Groups.one :: real
- real_neg > Groups.uminus :: "real => real"
- inv > Groups.inverse :: "real => real"
- real_add > Groups.plus :: "[real,real] => real"
- real_mul > Groups.times :: "[real,real] => real"
- real_lt > Orderings.less :: "[real,real] => bool";
+ real_0 > Groups.zero_class.zero :: real
+ real_1 > Groups.one_class.one :: real
+ real_neg > Groups.uminus_class.uminus :: "real \<Rightarrow> real"
+ inv > Fields.inverse_class.inverse :: "real \<Rightarrow> real"
+ real_add > Groups.plus_class.plus :: "real \<Rightarrow> real \<Rightarrow> real"
+ real_sub > Groups.minus_class.minus :: "real \<Rightarrow> real \<Rightarrow> real"
+ real_mul > Groups.times_class.times :: "real \<Rightarrow> real \<Rightarrow> real"
+ real_div > Fields.inverse_class.divide :: "real \<Rightarrow> real \<Rightarrow> real"
+ real_lt > Orderings.ord_class.less :: "real \<Rightarrow> real \<Rightarrow> bool"
+ mk_real > HOL.undefined (* Otherwise proof_import_concl fails *)
+ dest_real > HOL.undefined
ignore_thms
real_TY_DEF
@@ -50,11 +54,11 @@
const_maps
real_gt > HOL4Compat.real_gt
real_ge > HOL4Compat.real_ge
- real_lte > Orderings.less_eq :: "[real,real] => bool"
- real_sub > Groups.minus :: "[real,real] => real"
- "/" > Fields.divide :: "[real,real] => real"
- pow > Power.power :: "[real,nat] => real"
- abs > Groups.abs :: "real => real"
+ real_lte > Orderings.ord_class.less_eq :: "real \<Rightarrow> real \<Rightarrow> bool"
+ real_sub > Groups.minus_class.minus :: "real \<Rightarrow> real \<Rightarrow> real"
+ "/" > Fields.inverse_class.divide :: "real \<Rightarrow> real \<Rightarrow> real"
+ pow > Power.power_class.power :: "real \<Rightarrow> nat \<Rightarrow> real"
+ abs > Groups.abs_class.abs :: "real => real"
real_of_num > RealDef.real :: "nat => real";
end_import;
--- a/src/HOL/Import/HOL4Compat.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Import/HOL4Compat.thy Tue Sep 06 21:11:12 2011 +0200
@@ -63,6 +63,14 @@
lemma OUTR: "OUTR (Inr x) = x"
by simp
+lemma sum_axiom: "EX! h. h o Inl = f & h o Inr = g"
+ apply (intro allI ex1I[of _ "sum_case f g"] conjI)
+ apply (simp_all add: o_def fun_eq_iff)
+ apply (rule)
+ apply (induct_tac x)
+ apply simp_all
+ done
+
lemma sum_case_def: "(ALL f g x. sum_case f g (Inl x) = f x) & (ALL f g y. sum_case f g (Inr y) = g y)"
by simp
@@ -491,4 +499,6 @@
lemma real_ge: "ALL x y. (y <= x) = (y <= x)"
by simp
+definition [hol4rew]: "list_mem x xs = List.member xs x"
+
end
--- a/src/HOL/Library/Product_Vector.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Library/Product_Vector.thy Tue Sep 06 21:11:12 2011 +0200
@@ -450,7 +450,7 @@
assumes x: "0 \<le> x" and y: "0 \<le> y"
shows "sqrt (x + y) \<le> sqrt x + sqrt y"
apply (rule power2_le_imp_le)
-apply (simp add: real_sum_squared_expand x y)
+apply (simp add: power2_sum x y)
apply (simp add: mult_nonneg_nonneg x y)
apply (simp add: x y)
done
--- a/src/HOL/Multivariate_Analysis/Linear_Algebra.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Multivariate_Analysis/Linear_Algebra.thy Tue Sep 06 21:11:12 2011 +0200
@@ -415,16 +415,6 @@
from power2_le_imp_le[OF th yz] show ?thesis .
qed
-text {* TODO: move to NthRoot *}
-lemma sqrt_add_le_add_sqrt:
- assumes x: "0 \<le> x" and y: "0 \<le> y"
- shows "sqrt (x + y) \<le> sqrt x + sqrt y"
-apply (rule power2_le_imp_le)
-apply (simp add: real_sum_squared_expand x y)
-apply (simp add: mult_nonneg_nonneg x y)
-apply (simp add: x y)
-done
-
subsection {* A generic notion of "hull" (convex, affine, conic hull and closure). *}
definition hull :: "('a set \<Rightarrow> bool) \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixl "hull" 75) where
--- a/src/HOL/SMT_Examples/SMT_Tests.certs Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/SMT_Examples/SMT_Tests.certs Tue Sep 06 21:11:12 2011 +0200
@@ -60074,3 +60074,483 @@
#110 := [asserted]: #38
[mp #110 #120]: false
unsat
+8021f8e09eb3e47791aed2bed0dafccd5948187d 69 0
+#2 := false
+decl f4 :: (-> S2 S1)
+decl f5 :: S2
+#16 := f5
+#19 := (f4 f5)
+decl f1 :: S1
+#4 := f1
+#66 := (= f1 #19)
+#70 := (not #66)
+#20 := (= #19 f1)
+#21 := (not #20)
+#71 := (iff #21 #70)
+#68 := (iff #20 #66)
+#69 := [rewrite]: #68
+#72 := [monotonicity #69]: #71
+#65 := [asserted]: #21
+#75 := [mp #65 #72]: #70
+decl f3 :: (-> S2 S1)
+#17 := (f3 f5)
+#61 := (= f1 #17)
+#18 := (= #17 f1)
+#63 := (iff #18 #61)
+#64 := [rewrite]: #63
+#60 := [asserted]: #18
+#67 := [mp #60 #64]: #61
+#8 := (:var 0 S2)
+#9 := (f3 #8)
+#10 := (pattern #9)
+#12 := (f4 #8)
+#45 := (= f1 #12)
+#42 := (= f1 #9)
+#51 := (not #42)
+#52 := (or #51 #45)
+#57 := (forall (vars (?v0 S2)) (:pat #10) #52)
+#85 := (~ #57 #57)
+#83 := (~ #52 #52)
+#84 := [refl]: #83
+#86 := [nnf-pos #84]: #85
+#13 := (= #12 f1)
+#11 := (= #9 f1)
+#14 := (implies #11 #13)
+#15 := (forall (vars (?v0 S2)) (:pat #10) #14)
+#58 := (iff #15 #57)
+#55 := (iff #14 #52)
+#48 := (implies #42 #45)
+#53 := (iff #48 #52)
+#54 := [rewrite]: #53
+#49 := (iff #14 #48)
+#46 := (iff #13 #45)
+#47 := [rewrite]: #46
+#43 := (iff #11 #42)
+#44 := [rewrite]: #43
+#50 := [monotonicity #44 #47]: #49
+#56 := [trans #50 #54]: #55
+#59 := [quant-intro #56]: #58
+#41 := [asserted]: #15
+#62 := [mp #41 #59]: #57
+#74 := [mp~ #62 #86]: #57
+#137 := (not #61)
+#139 := (not #57)
+#226 := (or #139 #137 #66)
+#224 := (or #137 #66)
+#217 := (or #139 #224)
+#229 := (iff #217 #226)
+#157 := [rewrite]: #229
+#228 := [quant-inst #16]: #217
+#230 := [mp #228 #157]: #226
+[unit-resolution #230 #74 #67 #75]: false
+unsat
+51102b6663906c70b84f1c6e3a1a2e429b49188d 112 0
+#2 := false
+decl f5 :: (-> S2 S1)
+decl f6 :: S2
+#19 := f6
+#24 := (f5 f6)
+decl f1 :: S1
+#4 := f1
+#82 := (= f1 #24)
+#86 := (not #82)
+#25 := (= #24 f1)
+#26 := (not #25)
+#87 := (iff #26 #86)
+#84 := (iff #25 #82)
+#85 := [rewrite]: #84
+#88 := [monotonicity #85]: #87
+#81 := [asserted]: #26
+#91 := [mp #81 #88]: #86
+decl f4 :: (-> S2 S1)
+#22 := (f4 f6)
+#77 := (= f1 #22)
+#23 := (= #22 f1)
+#79 := (iff #23 #77)
+#80 := [rewrite]: #79
+#76 := [asserted]: #23
+#83 := [mp #76 #80]: #77
+decl f3 :: (-> S2 S1)
+#20 := (f3 f6)
+#72 := (= f1 #20)
+#21 := (= #20 f1)
+#74 := (iff #21 #72)
+#75 := [rewrite]: #74
+#71 := [asserted]: #21
+#78 := [mp #71 #75]: #72
+#8 := (:var 0 S2)
+#10 := (f4 #8)
+#9 := (f3 #8)
+#11 := (pattern #9 #10)
+#15 := (f5 #8)
+#56 := (= f1 #15)
+#50 := (= f1 #10)
+#105 := (not #50)
+#47 := (= f1 #9)
+#92 := (not #47)
+#112 := (or #92 #105 #56)
+#117 := (forall (vars (?v0 S2)) (:pat #11) #112)
+#53 := (and #47 #50)
+#62 := (not #53)
+#63 := (or #62 #56)
+#68 := (forall (vars (?v0 S2)) (:pat #11) #63)
+#118 := (iff #68 #117)
+#115 := (iff #63 #112)
+#93 := (or #92 #105)
+#109 := (or #93 #56)
+#113 := (iff #109 #112)
+#114 := [rewrite]: #113
+#110 := (iff #63 #109)
+#107 := (iff #62 #93)
+#94 := (not #93)
+#97 := (not #94)
+#96 := (iff #97 #93)
+#106 := [rewrite]: #96
+#98 := (iff #62 #97)
+#99 := (iff #53 #94)
+#100 := [rewrite]: #99
+#95 := [monotonicity #100]: #98
+#108 := [trans #95 #106]: #107
+#111 := [monotonicity #108]: #110
+#116 := [trans #111 #114]: #115
+#119 := [quant-intro #116]: #118
+#103 := (~ #68 #68)
+#101 := (~ #63 #63)
+#102 := [refl]: #101
+#104 := [nnf-pos #102]: #103
+#16 := (= #15 f1)
+#13 := (= #10 f1)
+#12 := (= #9 f1)
+#14 := (and #12 #13)
+#17 := (implies #14 #16)
+#18 := (forall (vars (?v0 S2)) (:pat #11) #17)
+#69 := (iff #18 #68)
+#66 := (iff #17 #63)
+#59 := (implies #53 #56)
+#64 := (iff #59 #63)
+#65 := [rewrite]: #64
+#60 := (iff #17 #59)
+#57 := (iff #16 #56)
+#58 := [rewrite]: #57
+#54 := (iff #14 #53)
+#51 := (iff #13 #50)
+#52 := [rewrite]: #51
+#48 := (iff #12 #47)
+#49 := [rewrite]: #48
+#55 := [monotonicity #49 #52]: #54
+#61 := [monotonicity #55 #58]: #60
+#67 := [trans #61 #65]: #66
+#70 := [quant-intro #67]: #69
+#46 := [asserted]: #18
+#73 := [mp #46 #70]: #68
+#90 := [mp~ #73 #104]: #68
+#120 := [mp #90 #119]: #117
+#178 := (not #77)
+#265 := (not #72)
+#267 := (not #117)
+#258 := (or #267 #265 #178 #82)
+#179 := (or #265 #178 #82)
+#269 := (or #267 #179)
+#198 := (iff #269 #258)
+#271 := [rewrite]: #198
+#270 := [quant-inst #19]: #269
+#268 := [mp #270 #271]: #258
+[unit-resolution #268 #120 #78 #83 #91]: false
+unsat
+1191e209015c2f2f439f124434700d861e089600 149 0
+#2 := false
+decl f3 :: (-> S2 S1)
+decl f6 :: S2
+#21 := f6
+#22 := (f3 f6)
+decl f1 :: S1
+#4 := f1
+#84 := (= f1 #22)
+#264 := (not #84)
+decl f5 :: (-> S2 S1)
+#27 := (f5 f6)
+#95 := (= f1 #27)
+#178 := (or #264 #95)
+decl f4 :: (-> S2 S1)
+#24 := (f4 f6)
+#88 := (= f1 #24)
+#176 := (not #88)
+#268 := (or #176 #95)
+#266 := (not #268)
+#265 := (not #178)
+#586 := (or #265 #266)
+#375 := (not #586)
+#579 := [hypothesis]: #586
+#8 := (:var 0 S2)
+#11 := (f4 #8)
+#12 := (pattern #11)
+#9 := (f3 #8)
+#10 := (pattern #9)
+#65 := (= f1 #11)
+#71 := (not #65)
+#14 := (f5 #8)
+#53 := (= f1 #14)
+#72 := (or #53 #71)
+#116 := (not #72)
+#50 := (= f1 #9)
+#59 := (not #50)
+#60 := (or #59 #53)
+#105 := (not #60)
+#106 := (or #105 #116)
+#107 := (not #106)
+#108 := (forall (vars (?v0 S2)) (:pat #10 #12) #107)
+#77 := (and #60 #72)
+#80 := (forall (vars (?v0 S2)) (:pat #10 #12) #77)
+#109 := (iff #80 #108)
+#110 := (iff #77 #107)
+#111 := [rewrite]: #110
+#117 := [quant-intro #111]: #109
+#114 := (~ #80 #80)
+#112 := (~ #77 #77)
+#113 := [refl]: #112
+#115 := [nnf-pos #113]: #114
+#15 := (= #14 f1)
+#17 := (= #11 f1)
+#18 := (implies #17 #15)
+#13 := (= #9 f1)
+#16 := (implies #13 #15)
+#19 := (and #16 #18)
+#20 := (forall (vars (?v0 S2)) (:pat #10 #12) #19)
+#81 := (iff #20 #80)
+#78 := (iff #19 #77)
+#75 := (iff #18 #72)
+#68 := (implies #65 #53)
+#73 := (iff #68 #72)
+#74 := [rewrite]: #73
+#69 := (iff #18 #68)
+#54 := (iff #15 #53)
+#55 := [rewrite]: #54
+#66 := (iff #17 #65)
+#67 := [rewrite]: #66
+#70 := [monotonicity #67 #55]: #69
+#76 := [trans #70 #74]: #75
+#63 := (iff #16 #60)
+#56 := (implies #50 #53)
+#61 := (iff #56 #60)
+#62 := [rewrite]: #61
+#57 := (iff #16 #56)
+#51 := (iff #13 #50)
+#52 := [rewrite]: #51
+#58 := [monotonicity #52 #55]: #57
+#64 := [trans #58 #62]: #63
+#79 := [monotonicity #64 #76]: #78
+#82 := [quant-intro #79]: #81
+#49 := [asserted]: #20
+#85 := [mp #49 #82]: #80
+#103 := [mp~ #85 #115]: #80
+#118 := [mp #103 #117]: #108
+#255 := (not #108)
+#589 := (or #255 #375)
+#263 := (or #95 #176)
+#177 := (not #263)
+#256 := (or #265 #177)
+#267 := (not #256)
+#590 := (or #255 #267)
+#592 := (iff #590 #589)
+#593 := (iff #589 #589)
+#583 := [rewrite]: #593
+#582 := (iff #267 #375)
+#588 := (iff #256 #586)
+#270 := (iff #177 #266)
+#196 := (iff #263 #268)
+#269 := [rewrite]: #196
+#249 := [monotonicity #269]: #270
+#243 := [monotonicity #249]: #588
+#254 := [monotonicity #243]: #582
+#587 := [monotonicity #254]: #592
+#241 := [trans #587 #583]: #592
+#591 := [quant-inst #21]: #590
+#246 := [mp #591 #241]: #589
+#217 := [unit-resolution #246 #118 #579]: false
+#218 := [lemma #217]: #375
+#574 := (or #586 #178)
+#575 := [def-axiom]: #574
+#580 := [unit-resolution #575 #218]: #178
+#578 := (or #265 #264)
+#99 := (not #95)
+#28 := (= #27 f1)
+#29 := (not #28)
+#100 := (iff #29 #99)
+#97 := (iff #28 #95)
+#98 := [rewrite]: #97
+#101 := [monotonicity #98]: #100
+#94 := [asserted]: #29
+#104 := [mp #94 #101]: #99
+#569 := (or #265 #264 #95)
+#230 := [def-axiom]: #569
+#581 := [unit-resolution #230 #104]: #578
+#567 := [unit-resolution #581 #580]: #264
+#570 := (or #586 #268)
+#576 := [def-axiom]: #570
+#568 := [unit-resolution #576 #218]: #268
+#274 := (or #266 #176)
+#572 := (or #266 #176 #95)
+#573 := [def-axiom]: #572
+#290 := [unit-resolution #573 #104]: #274
+#291 := [unit-resolution #290 #568]: #176
+#91 := (or #84 #88)
+#25 := (= #24 f1)
+#23 := (= #22 f1)
+#26 := (or #23 #25)
+#92 := (iff #26 #91)
+#89 := (iff #25 #88)
+#90 := [rewrite]: #89
+#86 := (iff #23 #84)
+#87 := [rewrite]: #86
+#93 := [monotonicity #87 #90]: #92
+#83 := [asserted]: #26
+#96 := [mp #83 #93]: #91
+[unit-resolution #96 #291 #567]: false
+unsat
+45f8ffe676ed981a383aaab6faaf520b9ff236c9 69 0
+#2 := false
+decl f4 :: (-> S2 S1)
+decl f5 :: S2
+#16 := f5
+#19 := (f4 f5)
+decl f1 :: S1
+#4 := f1
+#66 := (= f1 #19)
+#70 := (not #66)
+#20 := (= #19 f1)
+#21 := (not #20)
+#71 := (iff #21 #70)
+#68 := (iff #20 #66)
+#69 := [rewrite]: #68
+#72 := [monotonicity #69]: #71
+#65 := [asserted]: #21
+#75 := [mp #65 #72]: #70
+decl f3 :: (-> S2 S1)
+#17 := (f3 f5)
+#61 := (= f1 #17)
+#18 := (= #17 f1)
+#63 := (iff #18 #61)
+#64 := [rewrite]: #63
+#60 := [asserted]: #18
+#67 := [mp #60 #64]: #61
+#8 := (:var 0 S2)
+#9 := (f3 #8)
+#10 := (pattern #9)
+#12 := (f4 #8)
+#45 := (= f1 #12)
+#42 := (= f1 #9)
+#51 := (not #42)
+#52 := (or #51 #45)
+#57 := (forall (vars (?v0 S2)) (:pat #10) #52)
+#85 := (~ #57 #57)
+#83 := (~ #52 #52)
+#84 := [refl]: #83
+#86 := [nnf-pos #84]: #85
+#13 := (= #12 f1)
+#11 := (= #9 f1)
+#14 := (implies #11 #13)
+#15 := (forall (vars (?v0 S2)) (:pat #10) #14)
+#58 := (iff #15 #57)
+#55 := (iff #14 #52)
+#48 := (implies #42 #45)
+#53 := (iff #48 #52)
+#54 := [rewrite]: #53
+#49 := (iff #14 #48)
+#46 := (iff #13 #45)
+#47 := [rewrite]: #46
+#43 := (iff #11 #42)
+#44 := [rewrite]: #43
+#50 := [monotonicity #44 #47]: #49
+#56 := [trans #50 #54]: #55
+#59 := [quant-intro #56]: #58
+#41 := [asserted]: #15
+#62 := [mp #41 #59]: #57
+#74 := [mp~ #62 #86]: #57
+#137 := (not #61)
+#139 := (not #57)
+#226 := (or #139 #137 #66)
+#224 := (or #137 #66)
+#217 := (or #139 #224)
+#229 := (iff #217 #226)
+#157 := [rewrite]: #229
+#228 := [quant-inst #16]: #217
+#230 := [mp #228 #157]: #226
+[unit-resolution #230 #74 #67 #75]: false
+unsat
+ceabafba9f0db45264556e8d9525b667725281c7 76 0
+#2 := false
+decl f4 :: (-> S2 S1)
+decl f5 :: S2
+#15 := f5
+#18 := (f4 f5)
+decl f1 :: S1
+#4 := f1
+#65 := (= f1 #18)
+#69 := (not #65)
+#19 := (= #18 f1)
+#20 := (not #19)
+#70 := (iff #20 #69)
+#67 := (iff #19 #65)
+#68 := [rewrite]: #67
+#71 := [monotonicity #68]: #70
+#64 := [asserted]: #20
+#74 := [mp #64 #71]: #69
+decl f3 :: (-> S2 S1)
+#16 := (f3 f5)
+#60 := (= f1 #16)
+#17 := (= #16 f1)
+#62 := (iff #17 #60)
+#63 := [rewrite]: #62
+#59 := [asserted]: #17
+#66 := [mp #59 #63]: #60
+#8 := (:var 0 S2)
+#11 := (f4 #8)
+#555 := (pattern #11)
+#9 := (f3 #8)
+#554 := (pattern #9)
+#44 := (= f1 #11)
+#41 := (= f1 #9)
+#50 := (not #41)
+#51 := (or #50 #44)
+#556 := (forall (vars (?v0 S2)) (:pat #554 #555) #51)
+#56 := (forall (vars (?v0 S2)) #51)
+#559 := (iff #56 #556)
+#557 := (iff #51 #51)
+#558 := [refl]: #557
+#560 := [quant-intro #558]: #559
+#84 := (~ #56 #56)
+#82 := (~ #51 #51)
+#83 := [refl]: #82
+#85 := [nnf-pos #83]: #84
+#12 := (= #11 f1)
+#10 := (= #9 f1)
+#13 := (implies #10 #12)
+#14 := (forall (vars (?v0 S2)) #13)
+#57 := (iff #14 #56)
+#54 := (iff #13 #51)
+#47 := (implies #41 #44)
+#52 := (iff #47 #51)
+#53 := [rewrite]: #52
+#48 := (iff #13 #47)
+#45 := (iff #12 #44)
+#46 := [rewrite]: #45
+#42 := (iff #10 #41)
+#43 := [rewrite]: #42
+#49 := [monotonicity #43 #46]: #48
+#55 := [trans #49 #53]: #54
+#58 := [quant-intro #55]: #57
+#40 := [asserted]: #14
+#61 := [mp #40 #58]: #56
+#73 := [mp~ #61 #85]: #56
+#561 := [mp #73 #560]: #556
+#136 := (not #60)
+#138 := (not #556)
+#225 := (or #138 #136 #65)
+#223 := (or #136 #65)
+#216 := (or #138 #223)
+#228 := (iff #216 #225)
+#156 := [rewrite]: #228
+#227 := [quant-inst #15]: #216
+#229 := [mp #227 #156]: #225
+[unit-resolution #229 #561 #66 #74]: false
+unsat
--- a/src/HOL/SMT_Examples/SMT_Tests.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/SMT_Examples/SMT_Tests.thy Tue Sep 06 21:11:12 2011 +0200
@@ -201,6 +201,9 @@
"(\<forall>x y z. f x y = f z y \<longrightarrow> x = z) \<and> a \<noteq> d \<longrightarrow> f a b \<noteq> f d b"
by smt+
+
+section {* Guidance for quantifier heuristics: patterns and weights *}
+
lemma
assumes "\<forall>x. SMT.trigger [[SMT.pat (f x)]] (f x = x)"
shows "f 1 = 1"
@@ -211,6 +214,38 @@
shows "f 1 = g 2"
using assms by smt
+lemma
+ assumes "ALL x. SMT.trigger [[SMT.pat (P x)]] (P x --> Q x)"
+ and "P t"
+ shows "Q t"
+ using assms by smt
+
+lemma
+ assumes "ALL x. SMT.trigger [[SMT.pat (P x), SMT.pat (Q x)]]
+ (P x & Q x --> R x)"
+ and "P t" and "Q t"
+ shows "R t"
+ using assms by smt
+
+lemma
+ assumes "ALL x. SMT.trigger [[SMT.pat (P x)], [SMT.pat (Q x)]]
+ ((P x --> R x) & (Q x --> R x))"
+ and "P t | Q t"
+ shows "R t"
+ using assms by smt
+
+lemma
+ assumes "ALL x. SMT.trigger [[SMT.pat (P x)]] (SMT.weight 2 (P x --> Q x))"
+ and "P t"
+ shows "Q t"
+ using assms by smt
+
+lemma
+ assumes "ALL x. SMT.weight 1 (P x --> Q x)"
+ and "P t"
+ shows "Q t"
+ using assms by smt
+
section {* Meta logical connectives *}
--- a/src/HOL/Set.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Set.thy Tue Sep 06 21:11:12 2011 +0200
@@ -785,6 +785,26 @@
lemma insert_ident: "x ~: A ==> x ~: B ==> (insert x A = insert x B) = (A = B)"
by auto
+lemma insert_eq_iff: assumes "a \<notin> A" "b \<notin> B"
+shows "insert a A = insert b B \<longleftrightarrow>
+ (if a=b then A=B else \<exists>C. A = insert b C \<and> b \<notin> C \<and> B = insert a C \<and> a \<notin> C)"
+ (is "?L \<longleftrightarrow> ?R")
+proof
+ assume ?L
+ show ?R
+ proof cases
+ assume "a=b" with assms `?L` show ?R by (simp add: insert_ident)
+ next
+ assume "a\<noteq>b"
+ let ?C = "A - {b}"
+ have "A = insert b ?C \<and> b \<notin> ?C \<and> B = insert a ?C \<and> a \<notin> ?C"
+ using assms `?L` `a\<noteq>b` by auto
+ thus ?R using `a\<noteq>b` by auto
+ qed
+next
+ assume ?R thus ?L by(auto split: if_splits)
+qed
+
subsubsection {* Singletons, using insert *}
lemma singletonI [intro!,no_atp]: "a : {a}"
--- a/src/HOL/Tools/ATP/atp_problem.ML Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Tools/ATP/atp_problem.ML Tue Sep 06 21:11:12 2011 +0200
@@ -22,15 +22,15 @@
AFun of 'a ho_type * 'a ho_type |
ATyAbs of 'a list * 'a ho_type
- datatype tff_polymorphism = TFF_Monomorphic | TFF_Polymorphic
- datatype tff_explicitness = TFF_Implicit | TFF_Explicit
+ datatype tptp_polymorphism = TPTP_Monomorphic | TPTP_Polymorphic
+ datatype tptp_explicitness = TPTP_Implicit | TPTP_Explicit
datatype thf_flavor = THF_Without_Choice | THF_With_Choice
- datatype format =
+ datatype tptp_format =
CNF |
CNF_UEQ |
FOF |
- TFF of tff_polymorphism * tff_explicitness |
- THF0 of thf_flavor
+ TFF of tptp_polymorphism * tptp_explicitness |
+ THF of tptp_polymorphism * tptp_explicitness * thf_flavor
datatype formula_kind = Axiom | Definition | Lemma | Hypothesis | Conjecture
datatype 'a problem_line =
@@ -92,9 +92,9 @@
bool option -> (bool option -> 'c -> 'd -> 'd) -> ('a, 'b, 'c) formula
-> 'd -> 'd
val formula_map : ('c -> 'd) -> ('a, 'b, 'c) formula -> ('a, 'b, 'd) formula
- val is_format_thf : format -> bool
- val is_format_typed : format -> bool
- val tptp_lines_for_atp_problem : format -> string problem -> string list
+ val is_format_thf : tptp_format -> bool
+ val is_format_typed : tptp_format -> bool
+ val tptp_lines_for_atp_problem : tptp_format -> string problem -> string list
val ensure_cnf_problem :
(string * string) problem -> (string * string) problem
val filter_cnf_ueq_problem :
@@ -130,16 +130,16 @@
AFun of 'a ho_type * 'a ho_type |
ATyAbs of 'a list * 'a ho_type
-datatype tff_polymorphism = TFF_Monomorphic | TFF_Polymorphic
-datatype tff_explicitness = TFF_Implicit | TFF_Explicit
+datatype tptp_polymorphism = TPTP_Monomorphic | TPTP_Polymorphic
+datatype tptp_explicitness = TPTP_Implicit | TPTP_Explicit
datatype thf_flavor = THF_Without_Choice | THF_With_Choice
-datatype format =
+datatype tptp_format =
CNF |
CNF_UEQ |
FOF |
- TFF of tff_polymorphism * tff_explicitness |
- THF0 of thf_flavor
+ TFF of tptp_polymorphism * tptp_explicitness |
+ THF of tptp_polymorphism * tptp_explicitness * thf_flavor
datatype formula_kind = Axiom | Definition | Lemma | Hypothesis | Conjecture
datatype 'a problem_line =
@@ -224,10 +224,10 @@
| formula_map f (AConn (c, phis)) = AConn (c, map (formula_map f) phis)
| formula_map f (AAtom tm) = AAtom (f tm)
-fun is_format_thf (THF0 _) = true
+fun is_format_thf (THF _) = true
| is_format_thf _ = false
fun is_format_typed (TFF _) = true
- | is_format_typed (THF0 _) = true
+ | is_format_typed (THF _) = true
| is_format_typed _ = false
fun string_for_kind Axiom = "axiom"
@@ -263,10 +263,10 @@
| str _ (ATyAbs (ss, ty)) =
tptp_pi_binder ^ "[" ^
commas (map (suffix (" " ^ tptp_has_type ^ " " ^ tptp_type_of_types))
- ss) ^ "] : " ^ str false ty
+ ss) ^ "]: " ^ str false ty
in str true ty end
-fun string_for_type (THF0 _) ty = str_for_type ty
+fun string_for_type (THF _) ty = str_for_type ty
| string_for_type (TFF _) ty = str_for_type (flatten_type ty)
| string_for_type _ _ = raise Fail "unexpected type in untyped format"
@@ -288,6 +288,9 @@
else
"")
+fun is_format_with_choice (THF (_, _, THF_With_Choice)) = true
+ | is_format_with_choice _ = false
+
fun string_for_term _ (ATerm (s, [])) = s
| string_for_term format (ATerm (s, ts)) =
if s = tptp_empty_list then
@@ -298,7 +301,7 @@
|> is_format_thf format ? enclose "(" ")"
else
(case (s = tptp_ho_forall orelse s = tptp_ho_exists,
- s = tptp_choice andalso format = THF0 THF_With_Choice, ts) of
+ s = tptp_choice andalso is_format_with_choice format, ts) of
(true, _, [AAbs ((s', ty), tm)]) =>
(* Use syntactic sugar "!" and "?" instead of "!!" and "??" whenever
possible, to work around LEO-II 1.2.8 parser limitation. *)
@@ -306,9 +309,9 @@
(AQuant (if s = tptp_ho_forall then AForall else AExists,
[(s', SOME ty)], AAtom tm))
| (_, true, [AAbs ((s', ty), tm)]) =>
- (*There is code in ATP_Translate to ensure that Eps is always applied
- to an abstraction*)
- tptp_choice ^ "[" ^ s' ^ " : " ^ string_for_type format ty ^ "] : " ^
+ (* There is code in "ATP_Translate" to ensure that "Eps" is always
+ applied to an abstraction. *)
+ tptp_choice ^ "[" ^ s' ^ " : " ^ string_for_type format ty ^ "]: " ^
string_for_term format tm ^ ""
|> enclose "(" ")"
@@ -319,13 +322,13 @@
else
s ^ "(" ^ commas ss ^ ")"
end)
- | string_for_term (format as THF0 _) (AAbs ((s, ty), tm)) =
- "(^[" ^ s ^ " : " ^ string_for_type format ty ^ "] : " ^
+ | string_for_term (format as THF _) (AAbs ((s, ty), tm)) =
+ "(^[" ^ s ^ " : " ^ string_for_type format ty ^ "]: " ^
string_for_term format tm ^ ")"
| string_for_term _ _ = raise Fail "unexpected term in first-order format"
and string_for_formula format (AQuant (q, xs, phi)) =
string_for_quantifier q ^
- "[" ^ commas (map (string_for_bound_var format) xs) ^ "] : " ^
+ "[" ^ commas (map (string_for_bound_var format) xs) ^ "]: " ^
string_for_formula format phi
|> enclose "(" ")"
| string_for_formula format
@@ -352,7 +355,7 @@
| string_for_format CNF_UEQ = tptp_cnf
| string_for_format FOF = tptp_fof
| string_for_format (TFF _) = tptp_tff
- | string_for_format (THF0 _) = tptp_thf
+ | string_for_format (THF _) = tptp_thf
fun string_for_problem_line format (Decl (ident, sym, ty)) =
string_for_format format ^ "(" ^ ident ^ ", type,\n " ^ sym ^ " : " ^
--- a/src/HOL/Tools/ATP/atp_systems.ML Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Tools/ATP/atp_systems.ML Tue Sep 06 21:11:12 2011 +0200
@@ -7,7 +7,7 @@
signature ATP_SYSTEMS =
sig
- type format = ATP_Problem.format
+ type tptp_format = ATP_Problem.tptp_format
type formula_kind = ATP_Problem.formula_kind
type failure = ATP_Proof.failure
@@ -22,7 +22,8 @@
conj_sym_kind : formula_kind,
prem_kind : formula_kind,
best_slices :
- Proof.context -> (real * (bool * (int * format * string * string))) list}
+ Proof.context
+ -> (real * (bool * (int * tptp_format * string * string))) list}
val force_sos : bool Config.T
val e_smartN : string
@@ -41,6 +42,7 @@
val e_tofofN : string
val leo2N : string
val pffN : string
+ val phfN : string
val satallaxN : string
val snarkN : string
val spassN : string
@@ -51,7 +53,7 @@
val remote_atp :
string -> string -> string list -> (string * string) list
-> (failure * string) list -> formula_kind -> formula_kind
- -> (Proof.context -> int * format * string) -> string * atp_config
+ -> (Proof.context -> int * tptp_format * string) -> string * atp_config
val add_atp : string * atp_config -> theory -> theory
val get_atp : theory -> string -> atp_config
val supported_atps : theory -> string list
@@ -79,7 +81,8 @@
conj_sym_kind : formula_kind,
prem_kind : formula_kind,
best_slices :
- Proof.context -> (real * (bool * (int * format * string * string))) list}
+ Proof.context
+ -> (real * (bool * (int * tptp_format * string * string))) list}
(* "best_slices" must be found empirically, taking a wholistic approach since
the ATPs are run in parallel. The "real" components give the faction of the
@@ -105,6 +108,7 @@
val e_tofofN = "e_tofof"
val leo2N = "leo2"
val pffN = "pff"
+val phfN = "phf"
val satallaxN = "satallax"
val snarkN = "snark"
val spassN = "spass"
@@ -230,6 +234,8 @@
(* LEO-II *)
+val leo2_thf0 = THF (TPTP_Monomorphic, TPTP_Explicit, THF_Without_Choice)
+
val leo2_config : atp_config =
{exec = ("LEO2_HOME", "leo"),
required_execs = [],
@@ -243,10 +249,8 @@
prem_kind = Hypothesis,
best_slices = fn ctxt =>
(* FUDGE *)
- [(0.667, (false, (150, THF0 THF_Without_Choice,
- "mono_simple_higher", sosN))),
- (0.333, (true, (50, THF0 THF_Without_Choice,
- "mono_simple_higher", no_sosN)))]
+ [(0.667, (false, (150, leo2_thf0, "mono_simple_higher", sosN))),
+ (0.333, (true, (50, leo2_thf0, "mono_simple_higher", no_sosN)))]
|> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
else I)}
@@ -255,6 +259,8 @@
(* Satallax *)
+val satallax_thf0 = THF (TPTP_Monomorphic, TPTP_Explicit, THF_With_Choice)
+
val satallax_config : atp_config =
{exec = ("SATALLAX_HOME", "satallax"),
required_execs = [],
@@ -266,8 +272,8 @@
conj_sym_kind = Axiom,
prem_kind = Hypothesis,
best_slices =
- K [(1.0, (true, (100, THF0 THF_With_Choice, "mono_simple_higher", "")))]
- (* FUDGE *)}
+ (* FUDGE *)
+ K [(1.0, (true, (100, satallax_thf0, "mono_simple_higher", "")))]}
val satallax = (satallaxN, satallax_config)
@@ -314,7 +320,7 @@
fun is_old_vampire_version () =
string_ord (getenv "VAMPIRE_VERSION", "1.8") <> GREATER
-val vampire_tff = TFF (TFF_Monomorphic, TFF_Implicit)
+val vampire_tff0 = TFF (TPTP_Monomorphic, TPTP_Implicit)
val vampire_config : atp_config =
{exec = ("VAMPIRE_HOME", "vampire"),
@@ -347,9 +353,9 @@
(0.333, (false, (500, FOF, "mono_tags?", sosN))),
(0.334, (true, (50, FOF, "mono_guards?", no_sosN)))]
else
- [(0.333, (false, (150, vampire_tff, "poly_guards", sosN))),
- (0.333, (false, (500, vampire_tff, "mono_simple", sosN))),
- (0.334, (true, (50, vampire_tff, "mono_simple", no_sosN)))])
+ [(0.333, (false, (150, vampire_tff0, "poly_guards", sosN))),
+ (0.333, (false, (500, vampire_tff0, "mono_simple", sosN))),
+ (0.334, (true, (50, vampire_tff0, "mono_simple", no_sosN)))])
|> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
else I)}
@@ -358,7 +364,7 @@
(* Z3 with TPTP syntax *)
-val z3_tff = TFF (TFF_Monomorphic, TFF_Implicit)
+val z3_tff0 = TFF (TPTP_Monomorphic, TPTP_Implicit)
val z3_tptp_config : atp_config =
{exec = ("Z3_HOME", "z3"),
@@ -377,18 +383,17 @@
prem_kind = Hypothesis,
best_slices =
(* FUDGE *)
- K [(0.5, (false, (250, z3_tff, "mono_simple", ""))),
- (0.25, (false, (125, z3_tff, "mono_simple", ""))),
- (0.125, (false, (62, z3_tff, "mono_simple", ""))),
- (0.125, (false, (31, z3_tff, "mono_simple", "")))]}
+ K [(0.5, (false, (250, z3_tff0, "mono_simple", ""))),
+ (0.25, (false, (125, z3_tff0, "mono_simple", ""))),
+ (0.125, (false, (62, z3_tff0, "mono_simple", ""))),
+ (0.125, (false, (31, z3_tff0, "mono_simple", "")))]}
val z3_tptp = (z3_tptpN, z3_tptp_config)
-(* Not really a prover: Experimental PFF (Polymorphic TFF) output *)
-val poly_tff = TFF (TFF_Polymorphic, TFF_Implicit)
+(* Not really a prover: Experimental Polymorphic TFF and THF output *)
-val pff_config : atp_config =
+fun dummy_config format type_enc : atp_config =
{exec = ("ISABELLE_ATP", "scripts/dummy_atp"),
required_execs = [],
arguments = K (K (K (K (K "")))),
@@ -396,10 +401,16 @@
known_failures = [(GaveUp, "SZS status Unknown")],
conj_sym_kind = Hypothesis,
prem_kind = Hypothesis,
- best_slices = K [(1.0, (false, (200, poly_tff, "poly_simple", "")))]}
+ best_slices = K [(1.0, (false, (200, format, type_enc, "")))]}
+val pff_format = TFF (TPTP_Polymorphic, TPTP_Implicit)
+val pff_config = dummy_config pff_format "poly_simple"
val pff = (pffN, pff_config)
+val phf_format = THF (TPTP_Polymorphic, TPTP_Implicit, THF_With_Choice)
+val phf_config = dummy_config phf_format "poly_simple_higher"
+val phf = (phfN, phf_config)
+
(* Remote ATP invocation via SystemOnTPTP *)
@@ -475,34 +486,33 @@
(remote_prefix ^ name,
remotify_config system_name system_versions best_slice config)
-val dumb_tff = TFF (TFF_Monomorphic, TFF_Explicit)
+val explicit_tff0 = TFF (TPTP_Monomorphic, TPTP_Explicit)
val remote_e =
remotify_atp e "EP" ["1.0", "1.1", "1.2"]
(K (750, FOF, "mono_tags?") (* FUDGE *))
val remote_leo2 =
remotify_atp leo2 "LEO-II" ["1.2.8", "1.2.6"]
- (K (100, THF0 THF_Without_Choice,
- "mono_simple_higher") (* FUDGE *))
+ (K (100, leo2_thf0, "mono_simple_higher") (* FUDGE *))
val remote_satallax =
remotify_atp satallax "Satallax" ["2.1", "2.0", "2"]
- (K (100, THF0 THF_With_Choice,
- "mono_simple_higher") (* FUDGE *))
+ (K (100, satallax_thf0, "mono_simple_higher") (* FUDGE *))
val remote_vampire =
remotify_atp vampire "Vampire" ["1.8"]
(K (250, FOF, "mono_guards?") (* FUDGE *))
val remote_z3_tptp =
- remotify_atp z3_tptp "Z3" ["3.0"] (K (250, z3_tff, "mono_simple") (* FUDGE *))
+ remotify_atp z3_tptp "Z3" ["3.0"]
+ (K (250, z3_tff0, "mono_simple") (* FUDGE *))
val remote_e_sine =
remote_atp e_sineN "SInE" ["0.4"] [] (#known_failures e_config) Axiom
Conjecture (K (500, FOF, "mono_guards?") (* FUDGE *))
val remote_snark =
remote_atp snarkN "SNARK" ["20080805r029", "20080805r024"]
[("refutation.", "end_refutation.")] [] Hypothesis Hypothesis
- (K (100, dumb_tff, "mono_simple") (* FUDGE *))
+ (K (100, explicit_tff0, "mono_simple") (* FUDGE *))
val remote_e_tofof =
remote_atp e_tofofN "ToFoF" ["0.1"] [] (#known_failures e_config) Axiom
- Hypothesis (K (150, dumb_tff, "mono_simple") (* FUDGE *))
+ Hypothesis (K (150, explicit_tff0, "mono_simple") (* FUDGE *))
val remote_waldmeister =
remote_atp waldmeisterN "Waldmeister" ["710"]
[("#START OF PROOF", "Proved Goals:")]
@@ -532,7 +542,7 @@
Synchronized.change systems (fn _ => get_systems ())
val atps =
- [e, leo2, pff, satallax, spass, vampire, z3_tptp, remote_e, remote_leo2,
+ [e, leo2, pff, phf, satallax, spass, vampire, z3_tptp, remote_e, remote_leo2,
remote_satallax, remote_vampire, remote_z3_tptp, remote_e_sine, remote_snark,
remote_e_tofof, remote_waldmeister]
val setup = fold add_atp atps
--- a/src/HOL/Tools/ATP/atp_translate.ML Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Tools/ATP/atp_translate.ML Tue Sep 06 21:11:12 2011 +0200
@@ -11,7 +11,7 @@
type ('a, 'b) ho_term = ('a, 'b) ATP_Problem.ho_term
type connective = ATP_Problem.connective
type ('a, 'b, 'c) formula = ('a, 'b, 'c) ATP_Problem.formula
- type format = ATP_Problem.format
+ type tptp_format = ATP_Problem.tptp_format
type formula_kind = ATP_Problem.formula_kind
type 'a problem = 'a ATP_Problem.problem
@@ -92,7 +92,7 @@
val level_of_type_enc : type_enc -> type_level
val is_type_enc_quasi_sound : type_enc -> bool
val is_type_enc_fairly_sound : type_enc -> bool
- val adjust_type_enc : format -> type_enc -> type_enc
+ val adjust_type_enc : tptp_format -> type_enc -> type_enc
val mk_aconns :
connective -> ('a, 'b, 'c) formula list -> ('a, 'b, 'c) formula
val unmangled_const : string -> string * (string, 'b) ho_term list
@@ -100,7 +100,7 @@
val helper_table : ((string * bool) * thm list) list
val factsN : string
val prepare_atp_problem :
- Proof.context -> format -> formula_kind -> formula_kind -> type_enc
+ Proof.context -> tptp_format -> formula_kind -> formula_kind -> type_enc
-> bool -> string -> bool -> bool -> term list -> term
-> ((string * locality) * term) list
-> string problem * string Symtab.table * int * int
@@ -142,7 +142,7 @@
(* TFF1 is still in development, and it is still unclear whether symbols will be
allowed to have types like "$tType > $o" (i.e., "![A : $tType] : $o"), with
"dummy" type variables. *)
-val exploit_tff1_dummy_type_vars = false
+val avoid_first_order_dummy_type_vars = true
val bound_var_prefix = "B_"
val all_bound_var_prefix = "BA_"
@@ -325,8 +325,8 @@
fun default c = const_prefix ^ lookup_const c
in
fun make_fixed_const _ @{const_name HOL.eq} = tptp_old_equal
- | make_fixed_const (SOME (THF0 THF_With_Choice)) c =
- if c = choice_const then tptp_choice else default c
+ | make_fixed_const (SOME (THF (_, _, THF_With_Choice))) c =
+ if c = choice_const then tptp_choice else default c
| make_fixed_const _ c = default c
end
@@ -579,14 +579,20 @@
|> (fn (poly, (level, (uniformity, core))) =>
case (core, (poly, level, uniformity)) of
("simple", (SOME poly, _, Nonuniform)) =>
- (case poly of
- Raw_Monomorphic => raise Same.SAME
- | _ => Simple_Types (First_Order, poly, level))
+ (case (poly, level) of
+ (Polymorphic, All_Types) =>
+ Simple_Types (First_Order, Polymorphic, All_Types)
+ | (Mangled_Monomorphic, _) =>
+ Simple_Types (First_Order, Mangled_Monomorphic, level)
+ | _ => raise Same.SAME)
| ("simple_higher", (SOME poly, _, Nonuniform)) =>
(case (poly, level) of
- (Raw_Monomorphic, _) => raise Same.SAME
+ (Polymorphic, All_Types) =>
+ Simple_Types (Higher_Order, Polymorphic, All_Types)
| (_, Noninf_Nonmono_Types _) => raise Same.SAME
- | _ => Simple_Types (Higher_Order, poly, level))
+ | (Mangled_Monomorphic, _) =>
+ Simple_Types (Higher_Order, Mangled_Monomorphic, level)
+ | _ => raise Same.SAME)
| ("guards", (SOME poly, _, _)) => Guards (poly, level, uniformity)
| ("tags", (SOME Polymorphic, _, _)) =>
Tags (Polymorphic, level, uniformity)
@@ -627,12 +633,13 @@
| is_type_level_monotonicity_based Fin_Nonmono_Types = true
| is_type_level_monotonicity_based _ = false
-fun adjust_type_enc (THF0 _) (Simple_Types (order, Polymorphic, level)) =
+fun adjust_type_enc (THF (TPTP_Monomorphic, _, _))
+ (Simple_Types (order, _, level)) =
Simple_Types (order, Mangled_Monomorphic, level)
- | adjust_type_enc (THF0 _) type_enc = type_enc
- | adjust_type_enc (TFF (TFF_Monomorphic, _)) (Simple_Types (_, _, level)) =
+ | adjust_type_enc (THF _) type_enc = type_enc
+ | adjust_type_enc (TFF (TPTP_Monomorphic, _)) (Simple_Types (_, _, level)) =
Simple_Types (First_Order, Mangled_Monomorphic, level)
- | adjust_type_enc (TFF (_, _)) (Simple_Types (_, poly, level)) =
+ | adjust_type_enc (TFF _) (Simple_Types (_, poly, level)) =
Simple_Types (First_Order, poly, level)
| adjust_type_enc format (Simple_Types (_, poly, level)) =
adjust_type_enc format (Guards (poly, level, Uniform))
@@ -742,8 +749,9 @@
fun type_class_formula type_enc class arg =
AAtom (ATerm (class, arg ::
(case type_enc of
- Simple_Types (_, Polymorphic, _) =>
- if exploit_tff1_dummy_type_vars then [] else [ATerm (TYPE_name, [arg])]
+ Simple_Types (First_Order, Polymorphic, _) =>
+ if avoid_first_order_dummy_type_vars then [ATerm (TYPE_name, [arg])]
+ else []
| _ => [])))
fun formulas_for_types type_enc add_sorts_on_typ Ts =
[] |> level_of_type_enc type_enc <> No_Types ? fold add_sorts_on_typ Ts
@@ -1369,16 +1377,14 @@
fun filter_type_args _ _ _ [] = []
| filter_type_args thy s arity T_args =
- let val U = robust_const_type thy s in
- case Term.add_tvarsT (U |> chop_fun arity |> snd) [] of
- [] => []
- | res_U_vars =>
- let val U_args = (s, U) |> Sign.const_typargs thy in
- U_args ~~ T_args
- |> map (fn (U, T) =>
- if member (op =) res_U_vars (dest_TVar U) then T
- else dummyT)
- end
+ let
+ val U = robust_const_type thy s
+ val arg_U_vars = fold Term.add_tvarsT (U |> chop_fun arity |> fst) []
+ val U_args = (s, U) |> robust_const_typargs thy
+ in
+ U_args ~~ T_args
+ |> map (fn (U, T) =>
+ if member (op =) arg_U_vars (dest_TVar U) then dummyT else T)
end
handle TYPE _ => T_args
@@ -1394,14 +1400,13 @@
| SOME s'' =>
let
val s'' = invert_const s''
- fun filtered_T_args false = T_args
- | filtered_T_args true = filter_type_args thy s'' arity T_args
+ fun filter_T_args false = T_args
+ | filter_T_args true = filter_type_args thy s'' arity T_args
in
case type_arg_policy type_enc s'' of
- Explicit_Type_Args drop_args =>
- (name, filtered_T_args drop_args)
+ Explicit_Type_Args drop_args => (name, filter_T_args drop_args)
| Mangled_Type_Args drop_args =>
- (mangled_const_name format type_enc (filtered_T_args drop_args)
+ (mangled_const_name format type_enc (filter_T_args drop_args)
name, [])
| No_Type_Args => (name, [])
end)
@@ -1555,9 +1560,8 @@
let
fun add (Const (@{const_name Meson.skolem}, _) $ _) = I
| add (t $ u) = add t #> add u
- | add (Const (x as (s, _))) =
- if String.isPrefix skolem_const_prefix s then I
- else x |> Sign.const_typargs thy |> fold (fold_type_constrs set_insert)
+ | add (Const x) =
+ x |> robust_const_typargs thy |> fold (fold_type_constrs set_insert)
| add (Free (s, T)) =
if String.isPrefix polymorphic_free_prefix s then
T |> fold_type_constrs set_insert
@@ -1779,18 +1783,19 @@
(** Symbol declarations **)
-fun decl_line_for_class s =
+fun decl_line_for_class order s =
let val name as (s, _) = `make_type_class s in
Decl (sym_decl_prefix ^ s, name,
- if exploit_tff1_dummy_type_vars then
- AFun (atype_of_types, bool_atype)
+ if order = First_Order andalso avoid_first_order_dummy_type_vars then
+ ATyAbs ([tvar_a_name], AFun (a_itself_atype, bool_atype))
else
- ATyAbs ([tvar_a_name], AFun (a_itself_atype, bool_atype)))
+ AFun (atype_of_types, bool_atype))
end
fun decl_lines_for_classes type_enc classes =
case type_enc of
- Simple_Types (_, Polymorphic, _) => map decl_line_for_class classes
+ Simple_Types (order, Polymorphic, _) =>
+ map (decl_line_for_class order) classes
| _ => []
fun sym_decl_table_for_facts ctxt format type_enc repaired_sym_tab
@@ -2232,7 +2237,8 @@
CNF => ensure_cnf_problem
| CNF_UEQ => filter_cnf_ueq_problem
| FOF => I
- | TFF (_, TFF_Implicit) => I
+ | TFF (_, TPTP_Implicit) => I
+ | THF (_, TPTP_Implicit, _) => I
| _ => declare_undeclared_syms_in_atp_problem type_decl_prefix
implicit_declsN)
val (problem, pool) = problem |> nice_atp_problem readable_names
--- a/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs Tue Sep 06 21:11:12 2011 +0200
@@ -4,7 +4,7 @@
import Control.Exception;
import System.IO;
import System.Exit;
-import Code;
+import Generated_Code;
type Pos = [Int];
--- a/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs Tue Sep 06 21:11:12 2011 +0200
@@ -8,7 +8,7 @@
import System.Exit
import Maybe
import List (partition, findIndex)
-import Code
+import Generated_Code
type Pos = [Int]
--- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML Tue Sep 06 21:11:12 2011 +0200
@@ -235,17 +235,17 @@
if Config.get ctxt overlord then with_overlord_dir else Isabelle_System.with_tmp_dir
fun run in_path =
let
- val code_file = Path.append in_path (Path.basic "Code.hs")
+ val code_file = Path.append in_path (Path.basic "Generated_Code.hs")
val narrowing_engine_file = Path.append in_path (Path.basic "Narrowing_Engine.hs")
val main_file = Path.append in_path (Path.basic "Main.hs")
val main = "module Main where {\n\n" ^
"import System;\n" ^
"import Narrowing_Engine;\n" ^
- "import Code;\n\n" ^
- "main = getArgs >>= \\[size] -> Narrowing_Engine.depthCheck (read size) (Code.value ())\n\n" ^
+ "import Generated_Code;\n\n" ^
+ "main = getArgs >>= \\[size] -> Narrowing_Engine.depthCheck (read size) (Generated_Code.value ())\n\n" ^
"}\n"
- val code' = prefix "module Code where {\n\ndata Typerep = Typerep String [Typerep];\n"
- (unprefix "module Code where {" code)
+ val code' = prefix "module Generated_Code where {\n\ndata Typerep = Typerep String [Typerep];\n"
+ (unprefix "module Generated_Code where {" code)
val _ = File.write code_file code'
val _ = File.write narrowing_engine_file
(if contains_existentials then pnf_narrowing_engine else narrowing_engine)
--- a/src/HOL/Transcendental.thy Tue Sep 06 20:55:18 2011 +0200
+++ b/src/HOL/Transcendental.thy Tue Sep 06 21:11:12 2011 +0200
@@ -1065,22 +1065,24 @@
using exp_inj_iff [where x=x and y=0] by simp
lemma lemma_exp_total: "1 \<le> y ==> \<exists>x. 0 \<le> x & x \<le> y - 1 & exp(x::real) = y"
-apply (rule IVT)
-apply (auto intro: isCont_exp simp add: le_diff_eq)
-apply (subgoal_tac "1 + (y - 1) \<le> exp (y - 1)")
-apply simp
-apply (rule exp_ge_add_one_self_aux, simp)
-done
+proof (rule IVT)
+ assume "1 \<le> y"
+ hence "0 \<le> y - 1" by simp
+ hence "1 + (y - 1) \<le> exp (y - 1)" by (rule exp_ge_add_one_self_aux)
+ thus "y \<le> exp (y - 1)" by simp
+qed (simp_all add: le_diff_eq)
lemma exp_total: "0 < (y::real) ==> \<exists>x. exp x = y"
-apply (rule_tac x = 1 and y = y in linorder_cases)
-apply (drule order_less_imp_le [THEN lemma_exp_total])
-apply (rule_tac [2] x = 0 in exI)
-apply (frule_tac [3] one_less_inverse)
-apply (drule_tac [4] order_less_imp_le [THEN lemma_exp_total], auto)
-apply (rule_tac x = "-x" in exI)
-apply (simp add: exp_minus)
-done
+proof (rule linorder_le_cases [of 1 y])
+ assume "1 \<le> y" thus "\<exists>x. exp x = y"
+ by (fast dest: lemma_exp_total)
+next
+ assume "0 < y" and "y \<le> 1"
+ hence "1 \<le> inverse y" by (simp add: one_le_inverse_iff)
+ then obtain x where "exp x = inverse y" by (fast dest: lemma_exp_total)
+ hence "exp (- x) = y" by (simp add: exp_minus)
+ thus "\<exists>x. exp x = y" ..
+qed
subsection {* Natural Logarithm *}
@@ -1722,19 +1724,29 @@
lemma sin_ge_zero: "[| 0 \<le> x; x \<le> pi |] ==> 0 \<le> sin x"
by (auto simp add: order_le_less sin_gt_zero_pi)
+text {* FIXME: This proof is almost identical to lemma @{text cos_is_zero}.
+ It should be possible to factor out some of the common parts. *}
+
lemma cos_total: "[| -1 \<le> y; y \<le> 1 |] ==> EX! x. 0 \<le> x & x \<le> pi & (cos x = y)"
-apply (subgoal_tac "\<exists>x. 0 \<le> x & x \<le> pi & cos x = y")
-apply (rule_tac [2] IVT2)
-apply (auto intro: order_less_imp_le DERIV_isCont DERIV_cos)
-apply (cut_tac x = xa and y = y in linorder_less_linear)
-apply (rule ccontr, auto)
-apply (drule_tac f = cos in Rolle)
-apply (drule_tac [5] f = cos in Rolle)
-apply (auto intro: order_less_imp_le DERIV_isCont DERIV_cos
- dest!: DERIV_cos [THEN DERIV_unique]
- simp add: differentiable_def)
-apply (auto dest: sin_gt_zero_pi [OF order_le_less_trans order_less_le_trans])
-done
+proof (rule ex_ex1I)
+ assume y: "-1 \<le> y" "y \<le> 1"
+ show "\<exists>x. 0 \<le> x & x \<le> pi & cos x = y"
+ by (rule IVT2, simp_all add: y)
+next
+ fix a b
+ assume a: "0 \<le> a \<and> a \<le> pi \<and> cos a = y"
+ assume b: "0 \<le> b \<and> b \<le> pi \<and> cos b = y"
+ have [simp]: "\<forall>x. cos differentiable x"
+ unfolding differentiable_def by (auto intro: DERIV_cos)
+ from a b show "a = b"
+ apply (cut_tac less_linear [of a b], auto)
+ apply (drule_tac f = cos in Rolle)
+ apply (drule_tac [5] f = cos in Rolle)
+ apply (auto dest!: DERIV_cos [THEN DERIV_unique])
+ apply (metis order_less_le_trans less_le sin_gt_zero_pi)
+ apply (metis order_less_le_trans less_le sin_gt_zero_pi)
+ done
+qed
lemma sin_total:
"[| -1 \<le> y; y \<le> 1 |] ==> EX! x. -(pi/2) \<le> x & x \<le> pi/2 & (sin x = y)"
@@ -1910,21 +1922,9 @@
thus ?thesis by simp
qed
-lemma tan_half: fixes x :: real assumes "- (pi / 2) < x" and "x < pi / 2"
- shows "tan x = sin (2 * x) / (cos (2 * x) + 1)"
-proof -
- from cos_gt_zero_pi[OF `- (pi / 2) < x` `x < pi / 2`]
- have "cos x \<noteq> 0" by auto
-
- have minus_cos_2x: "\<And>X. X - cos (2*x) = X - (cos x) ^ 2 + (sin x) ^ 2" unfolding cos_double by algebra
-
- have "tan x = (tan x + tan x) / 2" by auto
- also have "\<dots> = sin (x + x) / (cos x * cos x) / 2" unfolding add_tan_eq[OF `cos x \<noteq> 0` `cos x \<noteq> 0`] ..
- also have "\<dots> = sin (2 * x) / ((cos x) ^ 2 + (cos x) ^ 2 + cos (2*x) - cos (2*x))" unfolding divide_divide_eq_left numeral_2_eq_2 by auto
- also have "\<dots> = sin (2 * x) / ((cos x) ^ 2 + cos (2*x) + (sin x)^2)" unfolding minus_cos_2x by auto
- also have "\<dots> = sin (2 * x) / (cos (2*x) + 1)" by auto
- finally show ?thesis .
-qed
+lemma tan_half: "tan x = sin (2 * x) / (cos (2 * x) + 1)"
+ unfolding tan_def sin_double cos_double sin_squared_eq
+ by (simp add: power2_eq_square)
lemma DERIV_tan [simp]: "cos x \<noteq> 0 \<Longrightarrow> DERIV tan x :> inverse ((cos x)\<twosuperior>)"
unfolding tan_def
@@ -2196,15 +2196,24 @@
lemma arctan_ubound: "arctan y < pi/2"
by (auto simp only: arctan)
+lemma arctan_unique:
+ assumes "-(pi/2) < x" and "x < pi/2" and "tan x = y"
+ shows "arctan y = x"
+ using assms arctan [of y] tan_total [of y] by (fast elim: ex1E)
+
lemma arctan_tan:
"[|-(pi/2) < x; x < pi/2 |] ==> arctan(tan x) = x"
-apply (unfold arctan_def)
-apply (rule the1_equality)
-apply (rule tan_total, auto)
-done
+ by (rule arctan_unique, simp_all)
lemma arctan_zero_zero [simp]: "arctan 0 = 0"
-by (insert arctan_tan [of 0], simp)
+ by (rule arctan_unique, simp_all)
+
+lemma arctan_minus: "arctan (- x) = - arctan x"
+ apply (rule arctan_unique)
+ apply (simp only: neg_less_iff_less arctan_ubound)
+ apply (metis minus_less_iff arctan_lbound)
+ apply simp
+ done
lemma cos_arctan_not_zero [simp]: "cos (arctan x) \<noteq> 0"
by (intro less_imp_neq [symmetric] cos_gt_zero_pi
@@ -2235,6 +2244,30 @@
mult_assoc power_inverse [symmetric])
done
+lemma arctan_less_iff: "arctan x < arctan y \<longleftrightarrow> x < y"
+ by (metis tan_monotone' arctan_lbound arctan_ubound tan_arctan)
+
+lemma arctan_le_iff: "arctan x \<le> arctan y \<longleftrightarrow> x \<le> y"
+ by (simp only: not_less [symmetric] arctan_less_iff)
+
+lemma arctan_eq_iff: "arctan x = arctan y \<longleftrightarrow> x = y"
+ by (simp only: eq_iff [where 'a=real] arctan_le_iff)
+
+lemma zero_less_arctan_iff [simp]: "0 < arctan x \<longleftrightarrow> 0 < x"
+ using arctan_less_iff [of 0 x] by simp
+
+lemma arctan_less_zero_iff [simp]: "arctan x < 0 \<longleftrightarrow> x < 0"
+ using arctan_less_iff [of x 0] by simp
+
+lemma zero_le_arctan_iff [simp]: "0 \<le> arctan x \<longleftrightarrow> 0 \<le> x"
+ using arctan_le_iff [of 0 x] by simp
+
+lemma arctan_le_zero_iff [simp]: "arctan x \<le> 0 \<longleftrightarrow> x \<le> 0"
+ using arctan_le_iff [of x 0] by simp
+
+lemma arctan_eq_zero_iff [simp]: "arctan x = 0 \<longleftrightarrow> x = 0"
+ using arctan_eq_iff [of x 0] by simp
+
lemma isCont_inverse_function2:
fixes f g :: "real \<Rightarrow> real" shows
"\<lbrakk>a < x; x < b;
@@ -2427,98 +2460,34 @@
subsection {* Machins formula *}
+lemma arctan_one: "arctan 1 = pi / 4"
+ by (rule arctan_unique, simp_all add: tan_45 m2pi_less_pi)
+
lemma tan_total_pi4: assumes "\<bar>x\<bar> < 1"
shows "\<exists> z. - (pi / 4) < z \<and> z < pi / 4 \<and> tan z = x"
-proof -
- obtain z where "- (pi / 2) < z" and "z < pi / 2" and "tan z = x" using tan_total by blast
- have "tan (pi / 4) = 1" and "tan (- (pi / 4)) = - 1" using tan_45 tan_minus by auto
- have "z \<noteq> pi / 4"
- proof (rule ccontr)
- assume "\<not> (z \<noteq> pi / 4)" hence "z = pi / 4" by auto
- have "tan z = 1" unfolding `z = pi / 4` `tan (pi / 4) = 1` ..
- thus False unfolding `tan z = x` using `\<bar>x\<bar> < 1` by auto
- qed
- have "z \<noteq> - (pi / 4)"
- proof (rule ccontr)
- assume "\<not> (z \<noteq> - (pi / 4))" hence "z = - (pi / 4)" by auto
- have "tan z = - 1" unfolding `z = - (pi / 4)` `tan (- (pi / 4)) = - 1` ..
- thus False unfolding `tan z = x` using `\<bar>x\<bar> < 1` by auto
- qed
-
- have "z < pi / 4"
- proof (rule ccontr)
- assume "\<not> (z < pi / 4)" hence "pi / 4 < z" using `z \<noteq> pi / 4` by auto
- have "- (pi / 2) < pi / 4" using m2pi_less_pi by auto
- from tan_monotone[OF this `pi / 4 < z` `z < pi / 2`]
- have "1 < x" unfolding `tan z = x` `tan (pi / 4) = 1` .
- thus False using `\<bar>x\<bar> < 1` by auto
- qed
- moreover
- have "-(pi / 4) < z"
- proof (rule ccontr)
- assume "\<not> (-(pi / 4) < z)" hence "z < - (pi / 4)" using `z \<noteq> - (pi / 4)` by auto
- have "-(pi / 4) < pi / 2" using m2pi_less_pi by auto
- from tan_monotone[OF `-(pi / 2) < z` `z < -(pi / 4)` this]
- have "x < - 1" unfolding `tan z = x` `tan (-(pi / 4)) = - 1` .
- thus False using `\<bar>x\<bar> < 1` by auto
- qed
- ultimately show ?thesis using `tan z = x` by auto
+proof
+ show "- (pi / 4) < arctan x \<and> arctan x < pi / 4 \<and> tan (arctan x) = x"
+ unfolding arctan_one [symmetric] arctan_minus [symmetric]
+ unfolding arctan_less_iff using assms by auto
qed
lemma arctan_add: assumes "\<bar>x\<bar> \<le> 1" and "\<bar>y\<bar> < 1"
shows "arctan x + arctan y = arctan ((x + y) / (1 - x * y))"
-proof -
- obtain y' where "-(pi/4) < y'" and "y' < pi/4" and "tan y' = y" using tan_total_pi4[OF `\<bar>y\<bar> < 1`] by blast
-
- have "pi / 4 < pi / 2" by auto
-
- have "\<exists> x'. -(pi/4) \<le> x' \<and> x' \<le> pi/4 \<and> tan x' = x"
- proof (cases "\<bar>x\<bar> < 1")
- case True from tan_total_pi4[OF this] obtain x' where "-(pi/4) < x'" and "x' < pi/4" and "tan x' = x" by blast
- hence "-(pi/4) \<le> x'" and "x' \<le> pi/4" and "tan x' = x" by auto
- thus ?thesis by auto
- next
- case False
- show ?thesis
- proof (cases "x = 1")
- case True hence "tan (pi/4) = x" using tan_45 by auto
- moreover
- have "- pi \<le> pi" unfolding minus_le_self_iff by auto
- hence "-(pi/4) \<le> pi/4" and "pi/4 \<le> pi/4" by auto
- ultimately show ?thesis by blast
- next
- case False hence "x = -1" using `\<not> \<bar>x\<bar> < 1` and `\<bar>x\<bar> \<le> 1` by auto
- hence "tan (-(pi/4)) = x" using tan_45 tan_minus by auto
- moreover
- have "- pi \<le> pi" unfolding minus_le_self_iff by auto
- hence "-(pi/4) \<le> pi/4" and "-(pi/4) \<le> -(pi/4)" by auto
- ultimately show ?thesis by blast
- qed
- qed
- then obtain x' where "-(pi/4) \<le> x'" and "x' \<le> pi/4" and "tan x' = x" by blast
- hence "-(pi/2) < x'" and "x' < pi/2" using order_le_less_trans[OF `x' \<le> pi/4` `pi / 4 < pi / 2`] by auto
-
- have "cos x' \<noteq> 0" using cos_gt_zero_pi[THEN less_imp_neq] and `-(pi/2) < x'` and `x' < pi/2` by auto
- moreover have "cos y' \<noteq> 0" using cos_gt_zero_pi[THEN less_imp_neq] and `-(pi/4) < y'` and `y' < pi/4` by auto
- ultimately have "cos x' * cos y' \<noteq> 0" by auto
-
- have divide_nonzero_divide: "\<And> A B C :: real. C \<noteq> 0 \<Longrightarrow> (A / C) / (B / C) = A / B" by auto
- have divide_mult_commute: "\<And> A B C D :: real. A * B / (C * D) = (A / C) * (B / D)" by auto
-
- have "tan (x' + y') = sin (x' + y') / (cos x' * cos y' - sin x' * sin y')" unfolding tan_def cos_add ..
- also have "\<dots> = (tan x' + tan y') / ((cos x' * cos y' - sin x' * sin y') / (cos x' * cos y'))" unfolding add_tan_eq[OF `cos x' \<noteq> 0` `cos y' \<noteq> 0`] divide_nonzero_divide[OF `cos x' * cos y' \<noteq> 0`] ..
- also have "\<dots> = (tan x' + tan y') / (1 - tan x' * tan y')" unfolding tan_def diff_divide_distrib divide_self[OF `cos x' * cos y' \<noteq> 0`] unfolding divide_mult_commute ..
- finally have tan_eq: "tan (x' + y') = (x + y) / (1 - x * y)" unfolding `tan y' = y` `tan x' = x` .
-
- have "arctan (tan (x' + y')) = x' + y'" using `-(pi/4) < y'` `-(pi/4) \<le> x'` `y' < pi/4` and `x' \<le> pi/4` by (auto intro!: arctan_tan)
- moreover have "arctan (tan (x')) = x'" using `-(pi/2) < x'` and `x' < pi/2` by (auto intro!: arctan_tan)
- moreover have "arctan (tan (y')) = y'" using `-(pi/4) < y'` and `y' < pi/4` by (auto intro!: arctan_tan)
- ultimately have "arctan x + arctan y = arctan (tan (x' + y'))" unfolding `tan y' = y` [symmetric] `tan x' = x`[symmetric] by auto
- thus "arctan x + arctan y = arctan ((x + y) / (1 - x * y))" unfolding tan_eq .
+proof (rule arctan_unique [symmetric])
+ have "- (pi / 4) \<le> arctan x" and "- (pi / 4) < arctan y"
+ unfolding arctan_one [symmetric] arctan_minus [symmetric]
+ unfolding arctan_le_iff arctan_less_iff using assms by auto
+ from add_le_less_mono [OF this]
+ show 1: "- (pi / 2) < arctan x + arctan y" by simp
+ have "arctan x \<le> pi / 4" and "arctan y < pi / 4"
+ unfolding arctan_one [symmetric]
+ unfolding arctan_le_iff arctan_less_iff using assms by auto
+ from add_le_less_mono [OF this]
+ show 2: "arctan x + arctan y < pi / 2" by simp
+ show "tan (arctan x + arctan y) = (x + y) / (1 - x * y)"
+ using cos_gt_zero_pi [OF 1 2] by (simp add: tan_add)
qed
-lemma arctan1_eq_pi4: "arctan 1 = pi / 4" unfolding tan_45[symmetric] by (rule arctan_tan, auto simp add: m2pi_less_pi)
-
theorem machin: "pi / 4 = 4 * arctan (1/5) - arctan (1 / 239)"
proof -
have "\<bar>1 / 5\<bar> < (1 :: real)" by auto
@@ -2533,8 +2502,9 @@
from arctan_add[OF this]
have "arctan 1 + arctan (1 / 239) = arctan (120 / 119)" by auto
ultimately have "arctan 1 + arctan (1 / 239) = 4 * arctan (1 / 5)" by auto
- thus ?thesis unfolding arctan1_eq_pi4 by algebra
+ thus ?thesis unfolding arctan_one by algebra
qed
+
subsection {* Introducing the arcus tangens power series *}
lemma monoseq_arctan_series: fixes x :: real
@@ -2821,71 +2791,40 @@
have "arctan x = y" using arctan_tan low high y_eq by auto
also have "\<dots> = 2 * (arctan (tan (y/2)))" using arctan_tan[OF low2 high2] by auto
- also have "\<dots> = 2 * (arctan (sin y / (cos y + 1)))" unfolding tan_half[OF low2 high2] by auto
+ also have "\<dots> = 2 * (arctan (sin y / (cos y + 1)))" unfolding tan_half by auto
finally show ?thesis unfolding eq `tan y = x` .
qed
lemma arctan_monotone: assumes "x < y"
shows "arctan x < arctan y"
-proof -
- obtain z where "-(pi / 2) < z" and "z < pi / 2" and "tan z = x" using tan_total by blast
- obtain w where "-(pi / 2) < w" and "w < pi / 2" and "tan w = y" using tan_total by blast
- have "z < w" unfolding tan_monotone'[OF `-(pi / 2) < z` `z < pi / 2` `-(pi / 2) < w` `w < pi / 2`] `tan z = x` `tan w = y` using `x < y` .
- thus ?thesis
- unfolding `tan z = x`[symmetric] arctan_tan[OF `-(pi / 2) < z` `z < pi / 2`]
- unfolding `tan w = y`[symmetric] arctan_tan[OF `-(pi / 2) < w` `w < pi / 2`] .
-qed
+ using assms by (simp only: arctan_less_iff)
lemma arctan_monotone': assumes "x \<le> y" shows "arctan x \<le> arctan y"
-proof (cases "x = y")
- case False hence "x < y" using `x \<le> y` by auto from arctan_monotone[OF this] show ?thesis by auto
-qed auto
-
-lemma arctan_minus: "arctan (- x) = - arctan x"
-proof -
- obtain y where "- (pi / 2) < y" and "y < pi / 2" and "tan y = x" using tan_total by blast
- thus ?thesis unfolding `tan y = x`[symmetric] tan_minus[symmetric] using arctan_tan[of y] arctan_tan[of "-y"] by auto
-qed
-
-lemma arctan_inverse: assumes "x \<noteq> 0" shows "arctan (1 / x) = sgn x * pi / 2 - arctan x"
-proof -
- obtain y where "- (pi / 2) < y" and "y < pi / 2" and "tan y = x" using tan_total by blast
- hence "y = arctan x" unfolding `tan y = x`[symmetric] using arctan_tan by auto
-
- { fix y x :: real assume "0 < y" and "y < pi /2" and "y = arctan x" and "tan y = x" hence "- (pi / 2) < y" by auto
- have "tan y > 0" using tan_monotone'[OF _ _ `- (pi / 2) < y` `y < pi / 2`, of 0] tan_zero `0 < y` by auto
- hence "x > 0" using `tan y = x` by auto
-
- have "- (pi / 2) < pi / 2 - y" using `y > 0` `y < pi / 2` by auto
- moreover have "pi / 2 - y < pi / 2" using `y > 0` `y < pi / 2` by auto
- ultimately have "arctan (1 / x) = pi / 2 - y" unfolding `tan y = x`[symmetric] tan_inverse using arctan_tan by auto
- hence "arctan (1 / x) = sgn x * pi / 2 - arctan x" unfolding `y = arctan x` real_sgn_pos[OF `x > 0`] by auto
- } note pos_y = this
-
- show ?thesis
- proof (cases "y > 0")
- case True from pos_y[OF this `y < pi / 2` `y = arctan x` `tan y = x`] show ?thesis .
- next
- case False hence "y \<le> 0" by auto
- moreover have "y \<noteq> 0"
- proof (rule ccontr)
- assume "\<not> y \<noteq> 0" hence "y = 0" by auto
- have "x = 0" unfolding `tan y = x`[symmetric] `y = 0` tan_zero ..
- thus False using `x \<noteq> 0` by auto
- qed
- ultimately have "y < 0" by auto
- hence "0 < - y" and "-y < pi / 2" using `- (pi / 2) < y` by auto
- moreover have "-y = arctan (-x)" unfolding arctan_minus `y = arctan x` ..
- moreover have "tan (-y) = -x" unfolding tan_minus `tan y = x` ..
- ultimately have "arctan (1 / -x) = sgn (-x) * pi / 2 - arctan (-x)" using pos_y by blast
- hence "arctan (- (1 / x)) = - (sgn x * pi / 2 - arctan x)" unfolding arctan_minus[of x] divide_minus_right sgn_minus by auto
- thus ?thesis unfolding arctan_minus neg_equal_iff_equal .
- qed
+ using assms by (simp only: arctan_le_iff)
+
+lemma arctan_inverse:
+ assumes "x \<noteq> 0" shows "arctan (1 / x) = sgn x * pi / 2 - arctan x"
+proof (rule arctan_unique)
+ show "- (pi / 2) < sgn x * pi / 2 - arctan x"
+ using arctan_bounded [of x] assms
+ unfolding sgn_real_def
+ apply (auto simp add: algebra_simps)
+ apply (drule zero_less_arctan_iff [THEN iffD2])
+ apply arith
+ done
+ show "sgn x * pi / 2 - arctan x < pi / 2"
+ using arctan_bounded [of "- x"] assms
+ unfolding sgn_real_def arctan_minus
+ by auto
+ show "tan (sgn x * pi / 2 - arctan x) = 1 / x"
+ unfolding tan_inverse [of "arctan x", unfolded tan_arctan]
+ unfolding sgn_real_def
+ by (simp add: tan_def cos_arctan sin_arctan sin_diff cos_diff)
qed
theorem pi_series: "pi / 4 = (\<Sum> k. (-1)^k * 1 / real (k*2+1))" (is "_ = ?SUM")
proof -
- have "pi / 4 = arctan 1" using arctan1_eq_pi4 by auto
+ have "pi / 4 = arctan 1" using arctan_one by auto
also have "\<dots> = ?SUM" using arctan_series[of 1] by auto
finally show ?thesis by auto
qed
--- a/src/Tools/Code/code_target.ML Tue Sep 06 20:55:18 2011 +0200
+++ b/src/Tools/Code/code_target.ML Tue Sep 06 21:11:12 2011 +0200
@@ -394,7 +394,7 @@
fun check_code_for thy target strict args naming program names_cs =
let
- val module_name = "Code";
+ val module_name = "Generated_Code";
val { env_var, make_destination, make_command } =
(#check o the_fundamental thy) target;
fun ext_check p =
@@ -435,7 +435,7 @@
fun evaluator thy target naming program consts =
let
val (mounted_serializer, prepared_program) = mount_serializer thy
- target NONE "Code" [] naming program consts;
+ target NONE "Generated_Code" [] naming program consts;
in evaluation mounted_serializer prepared_program consts end;
end; (* local *)