--- a/NEWS Thu Feb 09 19:34:23 2012 +0100
+++ b/NEWS Tue Feb 14 11:16:07 2012 +0100
@@ -73,6 +73,8 @@
predicate and what a set. It can be helpful to carry out that step in
Isabelle2011-1 before jumping right into the current release.
+* New type synonym 'a rel = ('a * 'a) set
+
* Consolidated various theorem names relating to Finite_Set.fold
combinator:
@@ -130,6 +132,36 @@
INCOMPATIBILITY.
+* Renamed facts about the power operation on relations, i.e., relpow
+ to match the constant's name:
+
+ rel_pow_1 ~> lemma relpow_1
+ rel_pow_0_I ~> relpow_0_I
+ rel_pow_Suc_I ~> relpow_Suc_I
+ rel_pow_Suc_I2 ~> relpow_Suc_I2
+ rel_pow_0_E ~> relpow_0_E
+ rel_pow_Suc_E ~> relpow_Suc_E
+ rel_pow_E ~> relpow_E
+ rel_pow_Suc_D2 ~> lemma relpow_Suc_D2
+ rel_pow_Suc_E2 ~> relpow_Suc_E2
+ rel_pow_Suc_D2' ~> relpow_Suc_D2'
+ rel_pow_E2 ~> relpow_E2
+ rel_pow_add ~> relpow_add
+ rel_pow_commute ~> relpow
+ rel_pow_empty ~> relpow_empty:
+ rtrancl_imp_UN_rel_pow ~> rtrancl_imp_UN_relpow
+ rel_pow_imp_rtrancl ~> relpow_imp_rtrancl
+ rtrancl_is_UN_rel_pow ~> rtrancl_is_UN_relpow
+ rtrancl_imp_rel_pow ~> rtrancl_imp_relpow
+ rel_pow_fun_conv ~> relpow_fun_conv
+ rel_pow_finite_bounded1 ~> relpow_finite_bounded1
+ rel_pow_finite_bounded ~> relpow_finite_bounded
+ rtrancl_finite_eq_rel_pow ~> rtrancl_finite_eq_relpow
+ trancl_finite_eq_rel_pow ~> trancl_finite_eq_relpow
+ single_valued_rel_pow ~> single_valued_relpow
+
+INCOMPATIBILITY.
+
* New theory HOL/Library/DAList provides an abstract type for association
lists with distinct keys.
@@ -273,12 +305,13 @@
affecting 'rat' and 'real'.
* Sledgehammer:
- - Added "lam_trans" and "minimize" options.
+ - Added "lam_trans", "uncurry_aliases", and "minimize" options.
- Renamed "slicing" ("no_slicing") option to "slice" ("dont_slice").
+ - Renamed "sound" option to "strict".
* Metis:
- Added possibility to specify lambda translations scheme as a
- parenthesized argument (e.g., "by (metis (lam_lifting) ...)").
+ parenthesized argument (e.g., "by (metis (lifting) ...)").
*** FOL ***
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc-src/Ref/defining.tex Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,314 @@
+
+\chapter{Defining Logics} \label{Defining-Logics}
+
+\section{Mixfix declarations} \label{sec:mixfix}
+\index{mixfix declarations|(}
+
+When defining a theory, you declare new constants by giving their names,
+their type, and an optional {\bf mixfix annotation}. Mixfix annotations
+allow you to extend Isabelle's basic $\lambda$-calculus syntax with
+readable notation. They can express any context-free priority grammar.
+Isabelle syntax definitions are inspired by \OBJ~\cite{OBJ}; they are more
+general than the priority declarations of \ML\ and Prolog.
+
+A mixfix annotation defines a production of the priority grammar. It
+describes the concrete syntax, the translation to abstract syntax, and the
+pretty printing. Special case annotations provide a simple means of
+specifying infix operators and binders.
+
+\subsection{The general mixfix form}
+Here is a detailed account of mixfix declarations. Suppose the following
+line occurs within a {\tt consts} or {\tt syntax} section of a {\tt .thy}
+file:
+\begin{center}
+ {\tt $c$ ::\ "$\sigma$" ("$template$" $ps$ $p$)}
+\end{center}
+This constant declaration and mixfix annotation are interpreted as follows:
+\begin{itemize}\index{productions}
+\item The string {\tt $c$} is the name of the constant associated with the
+ production; unless it is a valid identifier, it must be enclosed in
+ quotes. If $c$ is empty (given as~{\tt ""}) then this is a copy
+ production.\index{productions!copy} Otherwise, parsing an instance of the
+ phrase $template$ generates the \AST{} {\tt ("$c$" $a@1$ $\ldots$
+ $a@n$)}, where $a@i$ is the \AST{} generated by parsing the $i$-th
+ argument.
+
+ \item The constant $c$, if non-empty, is declared to have type $\sigma$
+ ({\tt consts} section only).
+
+ \item The string $template$ specifies the right-hand side of
+ the production. It has the form
+ \[ w@0 \;_\; w@1 \;_\; \ldots \;_\; w@n, \]
+ where each occurrence of {\tt_} denotes an argument position and
+ the~$w@i$ do not contain~{\tt _}. (If you want a literal~{\tt _} in
+ the concrete syntax, you must escape it as described below.) The $w@i$
+ may consist of \rmindex{delimiters}, spaces or
+ \rmindex{pretty printing} annotations (see below).
+
+ \item The type $\sigma$ specifies the production's nonterminal symbols
+ (or name tokens). If $template$ is of the form above then $\sigma$
+ must be a function type with at least~$n$ argument positions, say
+ $\sigma = [\tau@1, \dots, \tau@n] \To \tau$. Nonterminal symbols are
+ derived from the types $\tau@1$, \ldots,~$\tau@n$, $\tau$ as described
+ below. Any of these may be function types.
+
+ \item The optional list~$ps$ may contain at most $n$ integers, say {\tt
+ [$p@1$, $\ldots$, $p@m$]}, where $p@i$ is the minimal
+ priority\indexbold{priorities} required of any phrase that may appear
+ as the $i$-th argument. Missing priorities default to~0.
+
+ \item The integer $p$ is the priority of this production. If
+ omitted, it defaults to the maximal priority. Priorities range
+ between 0 and \ttindexbold{max_pri} (= 1000).
+
+\end{itemize}
+%
+The resulting production is \[ A^{(p)}= w@0\, A@1^{(p@1)}\, w@1\,
+A@2^{(p@2)}\, \dots\, A@n^{(p@n)}\, w@n \] where $A$ and the $A@i$ are the
+nonterminals corresponding to the types $\tau$ and $\tau@i$ respectively.
+The nonterminal symbol associated with a type $(\ldots)ty$ is {\tt logic}, if
+this is a logical type (namely one of class {\tt logic} excluding {\tt
+prop}). Otherwise it is $ty$ (note that only the outermost type constructor
+is taken into account). Finally, the nonterminal of a type variable is {\tt
+any}.
+
+\begin{warn}
+ Theories must sometimes declare types for purely syntactic purposes ---
+ merely playing the role of nonterminals. One example is \tydx{type}, the
+ built-in type of types. This is a `type of all types' in the syntactic
+ sense only. Do not declare such types under {\tt arities} as belonging to
+ class {\tt logic}\index{*logic class}, for that would make them useless as
+ separate nonterminal symbols.
+\end{warn}
+
+Associating nonterminals with types allows a constant's type to specify
+syntax as well. We can declare the function~$f$ to have type $[\tau@1,
+\ldots, \tau@n]\To \tau$ and, through a mixfix annotation, specify the layout
+of the function's $n$ arguments. The constant's name, in this case~$f$, will
+also serve as the label in the abstract syntax tree.
+
+You may also declare mixfix syntax without adding constants to the theory's
+signature, by using a {\tt syntax} section instead of {\tt consts}. Thus a
+production need not map directly to a logical function (this typically
+requires additional syntactic translations, see also
+Chapter~\ref{chap:syntax}).
+
+
+\medskip
+As a special case of the general mixfix declaration, the form
+\begin{center}
+ {\tt $c$ ::\ "$\sigma$" ("$template$")}
+\end{center}
+specifies no priorities. The resulting production puts no priority
+constraints on any of its arguments and has maximal priority itself.
+Omitting priorities in this manner is prone to syntactic ambiguities unless
+the production's right-hand side is fully bracketed, as in
+\verb|"if _ then _ else _ fi"|.
+
+Omitting the mixfix annotation completely, as in {\tt $c$ ::\ "$\sigma$"},
+is sensible only if~$c$ is an identifier. Otherwise you will be unable to
+write terms involving~$c$.
+
+
+\subsection{Example: arithmetic expressions}
+\index{examples!of mixfix declarations}
+This theory specification contains a {\tt syntax} section with mixfix
+declarations encoding the priority grammar from
+\S\ref{sec:priority_grammars}:
+\begin{ttbox}
+ExpSyntax = Pure +
+types
+ exp
+syntax
+ "0" :: exp ("0" 9)
+ "+" :: [exp, exp] => exp ("_ + _" [0, 1] 0)
+ "*" :: [exp, exp] => exp ("_ * _" [3, 2] 2)
+ "-" :: exp => exp ("- _" [3] 3)
+end
+\end{ttbox}
+Executing {\tt Syntax.print_gram} reveals the productions derived from the
+above mixfix declarations (lots of additional information deleted):
+\begin{ttbox}
+Syntax.print_gram (syn_of ExpSyntax.thy);
+{\out exp = "0" => "0" (9)}
+{\out exp = exp[0] "+" exp[1] => "+" (0)}
+{\out exp = exp[3] "*" exp[2] => "*" (2)}
+{\out exp = "-" exp[3] => "-" (3)}
+\end{ttbox}
+
+Note that because {\tt exp} is not of class {\tt logic}, it has been
+retained as a separate nonterminal. This also entails that the syntax
+does not provide for identifiers or paranthesized expressions.
+Normally you would also want to add the declaration {\tt arities
+ exp::logic} after {\tt types} and use {\tt consts} instead of {\tt
+ syntax}. Try this as an exercise and study the changes in the
+grammar.
+
+
+\subsection{Infixes}
+\indexbold{infixes}
+
+Infix operators associating to the left or right can be declared using
+{\tt infixl} or {\tt infixr}. Basically, the form {\tt $c$ ::\
+ $\sigma$ (infixl $p$)} abbreviates the mixfix declarations
+\begin{ttbox}
+"op \(c\)" :: \(\sigma\) ("(_ \(c\)/ _)" [\(p\), \(p+1\)] \(p\))
+"op \(c\)" :: \(\sigma\) ("op \(c\)")
+\end{ttbox}
+and {\tt $c$ ::\ $\sigma$ (infixr $p$)} abbreviates the mixfix declarations
+\begin{ttbox}
+"op \(c\)" :: \(\sigma\) ("(_ \(c\)/ _)" [\(p+1\), \(p\)] \(p\))
+"op \(c\)" :: \(\sigma\) ("op \(c\)")
+\end{ttbox}
+The infix operator is declared as a constant with the prefix {\tt op}.
+Thus, prefixing infixes with \sdx{op} makes them behave like ordinary
+function symbols, as in \ML. Special characters occurring in~$c$ must be
+escaped, as in delimiters, using a single quote.
+
+A slightly more general form of infix declarations allows constant
+names to be independent from their concrete syntax, namely \texttt{$c$
+ ::\ $\sigma$\ (infixl "$sy$" $p$)}, the same for \texttt{infixr}. As
+an example consider:
+\begin{ttbox}
+and :: [bool, bool] => bool (infixr "&" 35)
+\end{ttbox}
+The internal constant name will then be just \texttt{and}, without any
+\texttt{op} prefixed.
+
+
+\subsection{Binders}
+\indexbold{binders}
+\begingroup
+\def\Q{{\cal Q}}
+A {\bf binder} is a variable-binding construct such as a quantifier. The
+constant declaration
+\begin{ttbox}
+\(c\) :: \(\sigma\) (binder "\(\Q\)" [\(pb\)] \(p\))
+\end{ttbox}
+introduces a constant~$c$ of type~$\sigma$, which must have the form
+$(\tau@1 \To \tau@2) \To \tau@3$. Its concrete syntax is $\Q~x.P$, where
+$x$ is a bound variable of type~$\tau@1$, the body~$P$ has type $\tau@2$
+and the whole term has type~$\tau@3$. The optional integer $pb$
+specifies the body's priority, by default~$p$. Special characters
+in $\Q$ must be escaped using a single quote.
+
+The declaration is expanded internally to something like
+\begin{ttbox}
+\(c\)\hskip3pt :: (\(\tau@1\) => \(\tau@2\)) => \(\tau@3\)
+"\(\Q\)" :: [idts, \(\tau@2\)] => \(\tau@3\) ("(3\(\Q\)_./ _)" [0, \(pb\)] \(p\))
+\end{ttbox}
+Here \ndx{idts} is the nonterminal symbol for a list of identifiers with
+\index{type constraints}
+optional type constraints (see Fig.\ts\ref{fig:pure_gram}). The
+declaration also installs a parse translation\index{translations!parse}
+for~$\Q$ and a print translation\index{translations!print} for~$c$ to
+translate between the internal and external forms.
+
+A binder of type $(\sigma \To \tau) \To \tau$ can be nested by giving a
+list of variables. The external form $\Q~x@1~x@2 \ldots x@n. P$
+corresponds to the internal form
+\[ c(\lambda x@1. c(\lambda x@2. \ldots c(\lambda x@n. P) \ldots)). \]
+
+\medskip
+For example, let us declare the quantifier~$\forall$:\index{quantifiers}
+\begin{ttbox}
+All :: ('a => o) => o (binder "ALL " 10)
+\end{ttbox}
+This lets us write $\forall x.P$ as either {\tt All(\%$x$.$P$)} or {\tt ALL
+ $x$.$P$}. When printing, Isabelle prefers the latter form, but must fall
+back on ${\tt All}(P)$ if $P$ is not an abstraction. Both $P$ and {\tt ALL
+ $x$.$P$} have type~$o$, the type of formulae, while the bound variable
+can be polymorphic.
+\endgroup
+
+\index{mixfix declarations|)}
+
+
+\section{*Alternative print modes} \label{sec:prmodes}
+\index{print modes|(}
+%
+Isabelle's pretty printer supports alternative output syntaxes. These
+may be used independently or in cooperation. The currently active
+print modes (with precedence from left to right) are determined by a
+reference variable.
+\begin{ttbox}\index{*print_mode}
+print_mode: string list ref
+\end{ttbox}
+Initially this may already contain some print mode identifiers,
+depending on how Isabelle has been invoked (e.g.\ by some user
+interface). So changes should be incremental --- adding or deleting
+modes relative to the current value.
+
+Any \ML{} string is a legal print mode identifier, without any predeclaration
+required. The following names should be considered reserved, though:
+\texttt{""} (the empty string), \texttt{symbols}, \texttt{xsymbols}, and
+\texttt{latex}.
+
+There is a separate table of mixfix productions for pretty printing
+associated with each print mode. The currently active ones are
+conceptually just concatenated from left to right, with the standard
+syntax output table always coming last as default. Thus mixfix
+productions of preceding modes in the list may override those of later
+ones.
+
+\medskip The canonical application of print modes is optional printing
+of mathematical symbols from a special screen font instead of {\sc
+ ascii}. Another example is to re-use Isabelle's advanced
+$\lambda$-term printing mechanisms to generate completely different
+output, say for interfacing external tools like \rmindex{model
+ checkers} (see also \texttt{HOL/Modelcheck}).
+
+\index{print modes|)}
+
+
+\section{Ambiguity of parsed expressions} \label{sec:ambiguity}
+\index{ambiguity!of parsed expressions}
+
+To keep the grammar small and allow common productions to be shared
+all logical types (except {\tt prop}) are internally represented
+by one nonterminal, namely {\tt logic}. This and omitted or too freely
+chosen priorities may lead to ways of parsing an expression that were
+not intended by the theory's maker. In most cases Isabelle is able to
+select one of multiple parse trees that an expression has lead
+to by checking which of them can be typed correctly. But this may not
+work in every case and always slows down parsing.
+The warning and error messages that can be produced during this process are
+as follows:
+
+If an ambiguity can be resolved by type inference the following
+warning is shown to remind the user that parsing is (unnecessarily)
+slowed down. In cases where it's not easily possible to eliminate the
+ambiguity the frequency of the warning can be controlled by changing
+the value of {\tt Syntax.ambiguity_level} which has type {\tt int
+ref}. Its default value is 1 and by increasing it one can control how
+many parse trees are necessary to generate the warning.
+
+\begin{ttbox}
+{\out Ambiguous input "\dots"}
+{\out produces the following parse trees:}
+{\out \dots}
+{\out Fortunately, only one parse tree is type correct.}
+{\out You may still want to disambiguate your grammar or your input.}
+\end{ttbox}
+
+The following message is normally caused by using the same
+syntax in two different productions:
+
+\begin{ttbox}
+{\out Ambiguous input "..."}
+{\out produces the following parse trees:}
+{\out \dots}
+{\out More than one term is type correct:}
+{\out \dots}
+\end{ttbox}
+
+Ambiguities occuring in syntax translation rules cannot be resolved by
+type inference because it is not necessary for these rules to be type
+correct. Therefore Isabelle always generates an error message and the
+ambiguity should be eliminated by changing the grammar or the rule.
+
+
+%%% Local Variables:
+%%% mode: latex
+%%% TeX-master: "ref"
+%%% End:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc-src/Ref/tctical.tex Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,544 @@
+
+\chapter{Tacticals}
+\index{tacticals|(}
+Tacticals are operations on tactics. Their implementation makes use of
+functional programming techniques, especially for sequences. Most of the
+time, you may forget about this and regard tacticals as high-level control
+structures.
+
+\section{The basic tacticals}
+\subsection{Joining two tactics}
+\index{tacticals!joining two tactics}
+The tacticals {\tt THEN} and {\tt ORELSE}, which provide sequencing and
+alternation, underlie most of the other control structures in Isabelle.
+{\tt APPEND} and {\tt INTLEAVE} provide more sophisticated forms of
+alternation.
+\begin{ttbox}
+THEN : tactic * tactic -> tactic \hfill{\bf infix 1}
+ORELSE : tactic * tactic -> tactic \hfill{\bf infix}
+APPEND : tactic * tactic -> tactic \hfill{\bf infix}
+INTLEAVE : tactic * tactic -> tactic \hfill{\bf infix}
+\end{ttbox}
+\begin{ttdescription}
+\item[$tac@1$ \ttindexbold{THEN} $tac@2$]
+is the sequential composition of the two tactics. Applied to a proof
+state, it returns all states reachable in two steps by applying $tac@1$
+followed by~$tac@2$. First, it applies $tac@1$ to the proof state, getting a
+sequence of next states; then, it applies $tac@2$ to each of these and
+concatenates the results.
+
+\item[$tac@1$ \ttindexbold{ORELSE} $tac@2$]
+makes a choice between the two tactics. Applied to a state, it
+tries~$tac@1$ and returns the result if non-empty; if $tac@1$ fails then it
+uses~$tac@2$. This is a deterministic choice: if $tac@1$ succeeds then
+$tac@2$ is excluded.
+
+\item[$tac@1$ \ttindexbold{APPEND} $tac@2$]
+concatenates the results of $tac@1$ and~$tac@2$. By not making a commitment
+to either tactic, {\tt APPEND} helps avoid incompleteness during
+search.\index{search}
+
+\item[$tac@1$ \ttindexbold{INTLEAVE} $tac@2$]
+interleaves the results of $tac@1$ and~$tac@2$. Thus, it includes all
+possible next states, even if one of the tactics returns an infinite
+sequence.
+\end{ttdescription}
+
+
+\subsection{Joining a list of tactics}
+\index{tacticals!joining a list of tactics}
+\begin{ttbox}
+EVERY : tactic list -> tactic
+FIRST : tactic list -> tactic
+\end{ttbox}
+{\tt EVERY} and {\tt FIRST} are block structured versions of {\tt THEN} and
+{\tt ORELSE}\@.
+\begin{ttdescription}
+\item[\ttindexbold{EVERY} {$[tac@1,\ldots,tac@n]$}]
+abbreviates \hbox{\tt$tac@1$ THEN \ldots{} THEN $tac@n$}. It is useful for
+writing a series of tactics to be executed in sequence.
+
+\item[\ttindexbold{FIRST} {$[tac@1,\ldots,tac@n]$}]
+abbreviates \hbox{\tt$tac@1$ ORELSE \ldots{} ORELSE $tac@n$}. It is useful for
+writing a series of tactics to be attempted one after another.
+\end{ttdescription}
+
+
+\subsection{Repetition tacticals}
+\index{tacticals!repetition}
+\begin{ttbox}
+TRY : tactic -> tactic
+REPEAT_DETERM : tactic -> tactic
+REPEAT_DETERM_N : int -> tactic -> tactic
+REPEAT : tactic -> tactic
+REPEAT1 : tactic -> tactic
+DETERM_UNTIL : (thm -> bool) -> tactic -> tactic
+trace_REPEAT : bool ref \hfill{\bf initially false}
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{TRY} {\it tac}]
+applies {\it tac\/} to the proof state and returns the resulting sequence,
+if non-empty; otherwise it returns the original state. Thus, it applies
+{\it tac\/} at most once.
+
+\item[\ttindexbold{REPEAT_DETERM} {\it tac}]
+applies {\it tac\/} to the proof state and, recursively, to the head of the
+resulting sequence. It returns the first state to make {\it tac\/} fail.
+It is deterministic, discarding alternative outcomes.
+
+\item[\ttindexbold{REPEAT_DETERM_N} {\it n} {\it tac}]
+is like \hbox{\tt REPEAT_DETERM {\it tac}} but the number of repititions
+is bound by {\it n} (unless negative).
+
+\item[\ttindexbold{REPEAT} {\it tac}]
+applies {\it tac\/} to the proof state and, recursively, to each element of
+the resulting sequence. The resulting sequence consists of those states
+that make {\it tac\/} fail. Thus, it applies {\it tac\/} as many times as
+possible (including zero times), and allows backtracking over each
+invocation of {\it tac}. It is more general than {\tt REPEAT_DETERM}, but
+requires more space.
+
+\item[\ttindexbold{REPEAT1} {\it tac}]
+is like \hbox{\tt REPEAT {\it tac}} but it always applies {\it tac\/} at
+least once, failing if this is impossible.
+
+\item[\ttindexbold{DETERM_UNTIL} {\it p} {\it tac}]
+applies {\it tac\/} to the proof state and, recursively, to the head of the
+resulting sequence, until the predicate {\it p} (applied on the proof state)
+yields {\it true}. It fails if {\it tac\/} fails on any of the intermediate
+states. It is deterministic, discarding alternative outcomes.
+
+\item[set \ttindexbold{trace_REPEAT};]
+enables an interactive tracing mode for the tacticals {\tt REPEAT_DETERM}
+and {\tt REPEAT}. To view the tracing options, type {\tt h} at the prompt.
+\end{ttdescription}
+
+
+\subsection{Identities for tacticals}
+\index{tacticals!identities for}
+\begin{ttbox}
+all_tac : tactic
+no_tac : tactic
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{all_tac}]
+maps any proof state to the one-element sequence containing that state.
+Thus, it succeeds for all states. It is the identity element of the
+tactical \ttindex{THEN}\@.
+
+\item[\ttindexbold{no_tac}]
+maps any proof state to the empty sequence. Thus it succeeds for no state.
+It is the identity element of \ttindex{ORELSE}, \ttindex{APPEND}, and
+\ttindex{INTLEAVE}\@. Also, it is a zero element for \ttindex{THEN}, which means that
+\hbox{\tt$tac$ THEN no_tac} is equivalent to {\tt no_tac}.
+\end{ttdescription}
+These primitive tactics are useful when writing tacticals. For example,
+\ttindexbold{TRY} and \ttindexbold{REPEAT} (ignoring tracing) can be coded
+as follows:
+\begin{ttbox}
+fun TRY tac = tac ORELSE all_tac;
+
+fun REPEAT tac =
+ (fn state => ((tac THEN REPEAT tac) ORELSE all_tac) state);
+\end{ttbox}
+If $tac$ can return multiple outcomes then so can \hbox{\tt REPEAT $tac$}.
+Since {\tt REPEAT} uses \ttindex{ORELSE} and not {\tt APPEND} or {\tt
+INTLEAVE}, it applies $tac$ as many times as possible in each
+outcome.
+
+\begin{warn}
+Note {\tt REPEAT}'s explicit abstraction over the proof state. Recursive
+tacticals must be coded in this awkward fashion to avoid infinite
+recursion. With the following definition, \hbox{\tt REPEAT $tac$} would
+loop due to \ML's eager evaluation strategy:
+\begin{ttbox}
+fun REPEAT tac = (tac THEN REPEAT tac) ORELSE all_tac;
+\end{ttbox}
+\par\noindent
+The built-in {\tt REPEAT} avoids~{\tt THEN}, handling sequences explicitly
+and using tail recursion. This sacrifices clarity, but saves much space by
+discarding intermediate proof states.
+\end{warn}
+
+
+\section{Control and search tacticals}
+\index{search!tacticals|(}
+
+A predicate on theorems, namely a function of type \hbox{\tt thm->bool},
+can test whether a proof state enjoys some desirable property --- such as
+having no subgoals. Tactics that search for satisfactory states are easy
+to express. The main search procedures, depth-first, breadth-first and
+best-first, are provided as tacticals. They generate the search tree by
+repeatedly applying a given tactic.
+
+
+\subsection{Filtering a tactic's results}
+\index{tacticals!for filtering}
+\index{tactics!filtering results of}
+\begin{ttbox}
+FILTER : (thm -> bool) -> tactic -> tactic
+CHANGED : tactic -> tactic
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{FILTER} {\it p} $tac$]
+applies $tac$ to the proof state and returns a sequence consisting of those
+result states that satisfy~$p$.
+
+\item[\ttindexbold{CHANGED} {\it tac}]
+applies {\it tac\/} to the proof state and returns precisely those states
+that differ from the original state. Thus, \hbox{\tt CHANGED {\it tac}}
+always has some effect on the state.
+\end{ttdescription}
+
+
+\subsection{Depth-first search}
+\index{tacticals!searching}
+\index{tracing!of searching tacticals}
+\begin{ttbox}
+DEPTH_FIRST : (thm->bool) -> tactic -> tactic
+DEPTH_SOLVE : tactic -> tactic
+DEPTH_SOLVE_1 : tactic -> tactic
+trace_DEPTH_FIRST: bool ref \hfill{\bf initially false}
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{DEPTH_FIRST} {\it satp} {\it tac}]
+returns the proof state if {\it satp} returns true. Otherwise it applies
+{\it tac}, then recursively searches from each element of the resulting
+sequence. The code uses a stack for efficiency, in effect applying
+\hbox{\tt {\it tac} THEN DEPTH_FIRST {\it satp} {\it tac}} to the state.
+
+\item[\ttindexbold{DEPTH_SOLVE} {\it tac}]
+uses {\tt DEPTH_FIRST} to search for states having no subgoals.
+
+\item[\ttindexbold{DEPTH_SOLVE_1} {\it tac}]
+uses {\tt DEPTH_FIRST} to search for states having fewer subgoals than the
+given state. Thus, it insists upon solving at least one subgoal.
+
+\item[set \ttindexbold{trace_DEPTH_FIRST};]
+enables interactive tracing for {\tt DEPTH_FIRST}. To view the
+tracing options, type {\tt h} at the prompt.
+\end{ttdescription}
+
+
+\subsection{Other search strategies}
+\index{tacticals!searching}
+\index{tracing!of searching tacticals}
+\begin{ttbox}
+BREADTH_FIRST : (thm->bool) -> tactic -> tactic
+BEST_FIRST : (thm->bool)*(thm->int) -> tactic -> tactic
+THEN_BEST_FIRST : tactic * ((thm->bool) * (thm->int) * tactic)
+ -> tactic \hfill{\bf infix 1}
+trace_BEST_FIRST: bool ref \hfill{\bf initially false}
+\end{ttbox}
+These search strategies will find a solution if one exists. However, they
+do not enumerate all solutions; they terminate after the first satisfactory
+result from {\it tac}.
+\begin{ttdescription}
+\item[\ttindexbold{BREADTH_FIRST} {\it satp} {\it tac}]
+uses breadth-first search to find states for which {\it satp\/} is true.
+For most applications, it is too slow.
+
+\item[\ttindexbold{BEST_FIRST} $(satp,distf)$ {\it tac}]
+does a heuristic search, using {\it distf\/} to estimate the distance from
+a satisfactory state. It maintains a list of states ordered by distance.
+It applies $tac$ to the head of this list; if the result contains any
+satisfactory states, then it returns them. Otherwise, {\tt BEST_FIRST}
+adds the new states to the list, and continues.
+
+The distance function is typically \ttindex{size_of_thm}, which computes
+the size of the state. The smaller the state, the fewer and simpler
+subgoals it has.
+
+\item[$tac@0$ \ttindexbold{THEN_BEST_FIRST} $(satp,distf,tac)$]
+is like {\tt BEST_FIRST}, except that the priority queue initially
+contains the result of applying $tac@0$ to the proof state. This tactical
+permits separate tactics for starting the search and continuing the search.
+
+\item[set \ttindexbold{trace_BEST_FIRST};]
+enables an interactive tracing mode for the tactical {\tt BEST_FIRST}. To
+view the tracing options, type {\tt h} at the prompt.
+\end{ttdescription}
+
+
+\subsection{Auxiliary tacticals for searching}
+\index{tacticals!conditional}
+\index{tacticals!deterministic}
+\begin{ttbox}
+COND : (thm->bool) -> tactic -> tactic -> tactic
+IF_UNSOLVED : tactic -> tactic
+SOLVE : tactic -> tactic
+DETERM : tactic -> tactic
+DETERM_UNTIL_SOLVED : tactic -> tactic
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{COND} {\it p} $tac@1$ $tac@2$]
+applies $tac@1$ to the proof state if it satisfies~$p$, and applies $tac@2$
+otherwise. It is a conditional tactical in that only one of $tac@1$ and
+$tac@2$ is applied to a proof state. However, both $tac@1$ and $tac@2$ are
+evaluated because \ML{} uses eager evaluation.
+
+\item[\ttindexbold{IF_UNSOLVED} {\it tac}]
+applies {\it tac\/} to the proof state if it has any subgoals, and simply
+returns the proof state otherwise. Many common tactics, such as {\tt
+resolve_tac}, fail if applied to a proof state that has no subgoals.
+
+\item[\ttindexbold{SOLVE} {\it tac}]
+applies {\it tac\/} to the proof state and then fails iff there are subgoals
+left.
+
+\item[\ttindexbold{DETERM} {\it tac}]
+applies {\it tac\/} to the proof state and returns the head of the
+resulting sequence. {\tt DETERM} limits the search space by making its
+argument deterministic.
+
+\item[\ttindexbold{DETERM_UNTIL_SOLVED} {\it tac}]
+forces repeated deterministic application of {\it tac\/} to the proof state
+until the goal is solved completely.
+\end{ttdescription}
+
+
+\subsection{Predicates and functions useful for searching}
+\index{theorems!size of}
+\index{theorems!equality of}
+\begin{ttbox}
+has_fewer_prems : int -> thm -> bool
+eq_thm : thm * thm -> bool
+eq_thm_prop : thm * thm -> bool
+size_of_thm : thm -> int
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{has_fewer_prems} $n$ $thm$]
+reports whether $thm$ has fewer than~$n$ premises. By currying,
+\hbox{\tt has_fewer_prems $n$} is a predicate on theorems; it may
+be given to the searching tacticals.
+
+\item[\ttindexbold{eq_thm} ($thm@1$, $thm@2$)] reports whether $thm@1$ and
+ $thm@2$ are equal. Both theorems must have compatible signatures. Both
+ theorems must have the same conclusions, the same hypotheses (in the same
+ order), and the same set of sort hypotheses. Names of bound variables are
+ ignored.
+
+\item[\ttindexbold{eq_thm_prop} ($thm@1$, $thm@2$)] reports whether the
+ propositions of $thm@1$ and $thm@2$ are equal. Names of bound variables are
+ ignored.
+
+\item[\ttindexbold{size_of_thm} $thm$]
+computes the size of $thm$, namely the number of variables, constants and
+abstractions in its conclusion. It may serve as a distance function for
+\ttindex{BEST_FIRST}.
+\end{ttdescription}
+
+\index{search!tacticals|)}
+
+
+\section{Tacticals for subgoal numbering}
+When conducting a backward proof, we normally consider one goal at a time.
+A tactic can affect the entire proof state, but many tactics --- such as
+{\tt resolve_tac} and {\tt assume_tac} --- work on a single subgoal.
+Subgoals are designated by a positive integer, so Isabelle provides
+tacticals for combining values of type {\tt int->tactic}.
+
+
+\subsection{Restricting a tactic to one subgoal}
+\index{tactics!restricting to a subgoal}
+\index{tacticals!for restriction to a subgoal}
+\begin{ttbox}
+SELECT_GOAL : tactic -> int -> tactic
+METAHYPS : (thm list -> tactic) -> int -> tactic
+\end{ttbox}
+\begin{ttdescription}
+\item[\ttindexbold{SELECT_GOAL} {\it tac} $i$]
+restricts the effect of {\it tac\/} to subgoal~$i$ of the proof state. It
+fails if there is no subgoal~$i$, or if {\it tac\/} changes the main goal
+(do not use {\tt rewrite_tac}). It applies {\it tac\/} to a dummy proof
+state and uses the result to refine the original proof state at
+subgoal~$i$. If {\it tac\/} returns multiple results then so does
+\hbox{\tt SELECT_GOAL {\it tac} $i$}.
+
+{\tt SELECT_GOAL} works by creating a state of the form $\phi\Imp\phi$,
+with the one subgoal~$\phi$. If subgoal~$i$ has the form $\psi\Imp\theta$
+then $(\psi\Imp\theta)\Imp(\psi\Imp\theta)$ is in fact
+$\List{\psi\Imp\theta;\; \psi}\Imp\theta$, a proof state with two subgoals.
+Such a proof state might cause tactics to go astray. Therefore {\tt
+ SELECT_GOAL} inserts a quantifier to create the state
+\[ (\Forall x.\psi\Imp\theta)\Imp(\Forall x.\psi\Imp\theta). \]
+
+\item[\ttindexbold{METAHYPS} {\it tacf} $i$]\index{meta-assumptions}
+takes subgoal~$i$, of the form
+\[ \Forall x@1 \ldots x@l. \List{\theta@1; \ldots; \theta@k}\Imp\theta, \]
+and creates the list $\theta'@1$, \ldots, $\theta'@k$ of meta-level
+assumptions. In these theorems, the subgoal's parameters ($x@1$,
+\ldots,~$x@l$) become free variables. It supplies the assumptions to
+$tacf$ and applies the resulting tactic to the proof state
+$\theta\Imp\theta$.
+
+If the resulting proof state is $\List{\phi@1; \ldots; \phi@n} \Imp \phi$,
+possibly containing $\theta'@1,\ldots,\theta'@k$ as assumptions, then it is
+lifted back into the original context, yielding $n$ subgoals.
+
+Meta-level assumptions may not contain unknowns. Unknowns in the
+hypotheses $\theta@1,\ldots,\theta@k$ become free variables in $\theta'@1$,
+\ldots, $\theta'@k$, and are restored afterwards; the {\tt METAHYPS} call
+cannot instantiate them. Unknowns in $\theta$ may be instantiated. New
+unknowns in $\phi@1$, \ldots, $\phi@n$ are lifted over the parameters.
+
+Here is a typical application. Calling {\tt hyp_res_tac}~$i$ resolves
+subgoal~$i$ with one of its own assumptions, which may itself have the form
+of an inference rule (these are called {\bf higher-level assumptions}).
+\begin{ttbox}
+val hyp_res_tac = METAHYPS (fn prems => resolve_tac prems 1);
+\end{ttbox}
+The function \ttindex{gethyps} is useful for debugging applications of {\tt
+ METAHYPS}.
+\end{ttdescription}
+
+\begin{warn}
+{\tt METAHYPS} fails if the context or new subgoals contain type unknowns.
+In principle, the tactical could treat these like ordinary unknowns.
+\end{warn}
+
+
+\subsection{Scanning for a subgoal by number}
+\index{tacticals!scanning for subgoals}
+\begin{ttbox}
+ALLGOALS : (int -> tactic) -> tactic
+TRYALL : (int -> tactic) -> tactic
+SOMEGOAL : (int -> tactic) -> tactic
+FIRSTGOAL : (int -> tactic) -> tactic
+REPEAT_SOME : (int -> tactic) -> tactic
+REPEAT_FIRST : (int -> tactic) -> tactic
+trace_goalno_tac : (int -> tactic) -> int -> tactic
+\end{ttbox}
+These apply a tactic function of type {\tt int -> tactic} to all the
+subgoal numbers of a proof state, and join the resulting tactics using
+\ttindex{THEN} or \ttindex{ORELSE}\@. Thus, they apply the tactic to all the
+subgoals, or to one subgoal.
+
+Suppose that the original proof state has $n$ subgoals.
+
+\begin{ttdescription}
+\item[\ttindexbold{ALLGOALS} {\it tacf}]
+is equivalent to
+\hbox{\tt$tacf(n)$ THEN \ldots{} THEN $tacf(1)$}.
+
+It applies {\it tacf} to all the subgoals, counting downwards (to
+avoid problems when subgoals are added or deleted).
+
+\item[\ttindexbold{TRYALL} {\it tacf}]
+is equivalent to
+\hbox{\tt TRY$(tacf(n))$ THEN \ldots{} THEN TRY$(tacf(1))$}.
+
+It attempts to apply {\it tacf} to all the subgoals. For instance,
+the tactic \hbox{\tt TRYALL assume_tac} attempts to solve all the subgoals by
+assumption.
+
+\item[\ttindexbold{SOMEGOAL} {\it tacf}]
+is equivalent to
+\hbox{\tt$tacf(n)$ ORELSE \ldots{} ORELSE $tacf(1)$}.
+
+It applies {\it tacf} to one subgoal, counting downwards. For instance,
+the tactic \hbox{\tt SOMEGOAL assume_tac} solves one subgoal by assumption,
+failing if this is impossible.
+
+\item[\ttindexbold{FIRSTGOAL} {\it tacf}]
+is equivalent to
+\hbox{\tt$tacf(1)$ ORELSE \ldots{} ORELSE $tacf(n)$}.
+
+It applies {\it tacf} to one subgoal, counting upwards.
+
+\item[\ttindexbold{REPEAT_SOME} {\it tacf}]
+applies {\it tacf} once or more to a subgoal, counting downwards.
+
+\item[\ttindexbold{REPEAT_FIRST} {\it tacf}]
+applies {\it tacf} once or more to a subgoal, counting upwards.
+
+\item[\ttindexbold{trace_goalno_tac} {\it tac} {\it i}]
+applies \hbox{\it tac i\/} to the proof state. If the resulting sequence
+is non-empty, then it is returned, with the side-effect of printing {\tt
+Subgoal~$i$ selected}. Otherwise, {\tt trace_goalno_tac} returns the empty
+sequence and prints nothing.
+
+It indicates that `the tactic worked for subgoal~$i$' and is mainly used
+with {\tt SOMEGOAL} and {\tt FIRSTGOAL}.
+\end{ttdescription}
+
+
+\subsection{Joining tactic functions}
+\index{tacticals!joining tactic functions}
+\begin{ttbox}
+THEN' : ('a -> tactic) * ('a -> tactic) -> 'a -> tactic \hfill{\bf infix 1}
+ORELSE' : ('a -> tactic) * ('a -> tactic) -> 'a -> tactic \hfill{\bf infix}
+APPEND' : ('a -> tactic) * ('a -> tactic) -> 'a -> tactic \hfill{\bf infix}
+INTLEAVE' : ('a -> tactic) * ('a -> tactic) -> 'a -> tactic \hfill{\bf infix}
+EVERY' : ('a -> tactic) list -> 'a -> tactic
+FIRST' : ('a -> tactic) list -> 'a -> tactic
+\end{ttbox}
+These help to express tactics that specify subgoal numbers. The tactic
+\begin{ttbox}
+SOMEGOAL (fn i => resolve_tac rls i ORELSE eresolve_tac erls i)
+\end{ttbox}
+can be simplified to
+\begin{ttbox}
+SOMEGOAL (resolve_tac rls ORELSE' eresolve_tac erls)
+\end{ttbox}
+Note that {\tt TRY'}, {\tt REPEAT'}, {\tt DEPTH_FIRST'}, etc.\ are not
+provided, because function composition accomplishes the same purpose.
+The tactic
+\begin{ttbox}
+ALLGOALS (fn i => REPEAT (etac exE i ORELSE atac i))
+\end{ttbox}
+can be simplified to
+\begin{ttbox}
+ALLGOALS (REPEAT o (etac exE ORELSE' atac))
+\end{ttbox}
+These tacticals are polymorphic; $x$ need not be an integer.
+\begin{center} \tt
+\begin{tabular}{r@{\rm\ \ yields\ \ }l}
+ $(tacf@1$~~THEN'~~$tacf@2)(x)$ \index{*THEN'} &
+ $tacf@1(x)$~~THEN~~$tacf@2(x)$ \\
+
+ $(tacf@1$ ORELSE' $tacf@2)(x)$ \index{*ORELSE'} &
+ $tacf@1(x)$ ORELSE $tacf@2(x)$ \\
+
+ $(tacf@1$ APPEND' $tacf@2)(x)$ \index{*APPEND'} &
+ $tacf@1(x)$ APPEND $tacf@2(x)$ \\
+
+ $(tacf@1$ INTLEAVE' $tacf@2)(x)$ \index{*INTLEAVE'} &
+ $tacf@1(x)$ INTLEAVE $tacf@2(x)$ \\
+
+ EVERY' $[tacf@1,\ldots,tacf@n] \; (x)$ \index{*EVERY'} &
+ EVERY $[tacf@1(x),\ldots,tacf@n(x)]$ \\
+
+ FIRST' $[tacf@1,\ldots,tacf@n] \; (x)$ \index{*FIRST'} &
+ FIRST $[tacf@1(x),\ldots,tacf@n(x)]$
+\end{tabular}
+\end{center}
+
+
+\subsection{Applying a list of tactics to 1}
+\index{tacticals!joining tactic functions}
+\begin{ttbox}
+EVERY1: (int -> tactic) list -> tactic
+FIRST1: (int -> tactic) list -> tactic
+\end{ttbox}
+A common proof style is to treat the subgoals as a stack, always
+restricting attention to the first subgoal. Such proofs contain long lists
+of tactics, each applied to~1. These can be simplified using {\tt EVERY1}
+and {\tt FIRST1}:
+\begin{center} \tt
+\begin{tabular}{r@{\rm\ \ abbreviates\ \ }l}
+ EVERY1 $[tacf@1,\ldots,tacf@n]$ \indexbold{*EVERY1} &
+ EVERY $[tacf@1(1),\ldots,tacf@n(1)]$ \\
+
+ FIRST1 $[tacf@1,\ldots,tacf@n]$ \indexbold{*FIRST1} &
+ FIRST $[tacf@1(1),\ldots,tacf@n(1)]$
+\end{tabular}
+\end{center}
+
+\index{tacticals|)}
+
+
+%%% Local Variables:
+%%% mode: latex
+%%% TeX-master: "ref"
+%%% End:
--- a/doc-src/Sledgehammer/sledgehammer.tex Thu Feb 09 19:34:23 2012 +0100
+++ b/doc-src/Sledgehammer/sledgehammer.tex Tue Feb 14 11:16:07 2012 +0100
@@ -123,9 +123,9 @@
Limit'' option.
\newbox\boxA
-\setbox\boxA=\hbox{\texttt{nospam}}
+\setbox\boxA=\hbox{\texttt{NOSPAM}}
-\newcommand\authoremail{\texttt{blan{\color{white}nospam}\kern-\wd\boxA{}chette@\allowbreak
+\newcommand\authoremail{\texttt{blan{\color{white}NOSPAM}\kern-\wd\boxA{}chette@\allowbreak
in.\allowbreak tum.\allowbreak de}}
To run Sledgehammer, you must make sure that the theory \textit{Sledgehammer} is
@@ -380,63 +380,10 @@
questions at this stage. And if you have any further questions not listed here,
send them to the author at \authoremail.
-\point{Why does Metis fail to reconstruct the proof?}
-
-There are many reasons. If Metis runs seemingly forever, that is a sign that the
-proof is too difficult for it. Metis's search is complete, so it should
-eventually find it, but that's little consolation. There are several possible
-solutions:
-
-\begin{enum}
-\item[\labelitemi] Try the \textit{isar\_proof} option (\S\ref{output-format}) to
-obtain a step-by-step Isar proof where each step is justified by \textit{metis}.
-Since the steps are fairly small, \textit{metis} is more likely to be able to
-replay them.
-
-\item[\labelitemi] Try the \textit{smt} proof method instead of \textit{metis}. It
-is usually stronger, but you need to either have Z3 available to replay the
-proofs, trust the SMT solver, or use certificates. See the documentation in the
-\emph{SMT} theory (\texttt{\$ISABELLE\_HOME/src/HOL/SMT.thy}) for details.
-
-\item[\labelitemi] Try the \textit{blast} or \textit{auto} proof methods, passing
-the necessary facts via \textbf{unfolding}, \textbf{using}, \textit{intro}{:},
-\textit{elim}{:}, \textit{dest}{:}, or \textit{simp}{:}, as appropriate.
-\end{enum}
-
-In some rare cases, \textit{metis} fails fairly quickly, and you get the error
-message
-
-\prew
-\slshape
-Proof reconstruction failed.
-\postw
-
-This message usually indicates that Sledgehammer found a type-incorrect proof.
-This was a frequent issue with older versions of Sledgehammer, which did not
-supply enough typing information to the ATPs by default. If you notice many
-unsound proofs and are not using \textit{type\_enc} (\S\ref{problem-encoding}),
-contact the author at \authoremail.
-
-\point{How can I tell whether a generated proof is sound?}
-
-First, if \textit{metis} can reconstruct it, the proof is sound (assuming
-Isabelle's inference kernel is sound). If it fails or runs seemingly forever,
-you can try
-
-\prew
-\textbf{apply}~\textbf{--} \\
-\textbf{sledgehammer} [\textit{sound}] (\textit{metis\_facts})
-\postw
-
-where \textit{metis\_facts} is the list of facts appearing in the suggested
-\textit{metis} call. The automatic provers should be able to re-find the proof
-quickly if it is sound, and the \textit{sound} option (\S\ref{problem-encoding})
-ensures that no unsound proofs are found.
-
\point{Which facts are passed to the automatic provers?}
The relevance filter assigns a score to every available fact (lemma, theorem,
-definition, or axiom)\ based upon how many constants that fact shares with the
+definition, or axiom) based upon how many constants that fact shares with the
conjecture. This process iterates to include facts relevant to those just
accepted, but with a decay factor to ensure termination. The constants are
weighted to give unusual ones greater significance. The relevance filter copes
@@ -483,19 +430,84 @@
\textbf{sledgehammer}
\postw
+\point{Why does Metis fail to reconstruct the proof?}
+
+There are many reasons. If Metis runs seemingly forever, that is a sign that the
+proof is too difficult for it. Metis's search is complete, so it should
+eventually find it, but that's little consolation. There are several possible
+solutions:
+
+\begin{enum}
+\item[\labelitemi] Try the \textit{isar\_proof} option (\S\ref{output-format}) to
+obtain a step-by-step Isar proof where each step is justified by \textit{metis}.
+Since the steps are fairly small, \textit{metis} is more likely to be able to
+replay them.
+
+\item[\labelitemi] Try the \textit{smt} proof method instead of \textit{metis}. It
+is usually stronger, but you need to either have Z3 available to replay the
+proofs, trust the SMT solver, or use certificates. See the documentation in the
+\emph{SMT} theory (\texttt{\$ISABELLE\_HOME/src/HOL/SMT.thy}) for details.
+
+\item[\labelitemi] Try the \textit{blast} or \textit{auto} proof methods, passing
+the necessary facts via \textbf{unfolding}, \textbf{using}, \textit{intro}{:},
+\textit{elim}{:}, \textit{dest}{:}, or \textit{simp}{:}, as appropriate.
+\end{enum}
+
+In some rare cases, \textit{metis} fails fairly quickly, and you get the error
+message
+
+\prew
+\slshape
+One-line proof reconstruction failed.
+\postw
+
+This message indicates that Sledgehammer determined that the goal is provable,
+but the proof is, for technical reasons, beyond \textit{metis}'s power. You can
+then try again with the \textit{strict} option (\S\ref{problem-encoding}).
+
+If the goal is actually unprovable are you did not specify an unsound encoding
+using \textit{type\_enc} (\S\ref{problem-encoding}), this is a bug, and you are
+strongly encouraged to report this to the author at \authoremail.
+
\point{Why are the generated Isar proofs so ugly/broken?}
-The current implementation is experimental and explodes exponentially in the
-worst case. Work on a new implementation has begun. There is a large body of
+The current implementation of the Isar proof feature,
+enabled by the \textit{isar\_proof} option (\S\ref{output-format}),
+is highly experimental. Work on a new implementation has begun. There is a large body of
research into transforming resolution proofs into natural deduction proofs (such
as Isar proofs), which we hope to leverage. In the meantime, a workaround is to
set the \textit{isar\_shrink\_factor} option (\S\ref{output-format}) to a larger
value or to try several provers and keep the nicest-looking proof.
-\point{What are the \textit{full\_types} and \textit{no\_types} arguments to
-Metis?}
+\point{How can I tell whether a suggested proof is sound?}
+
+Earlier versions of Sledgehammer often suggested unsound proofs---either proofs
+of nontheorems or simply proofs that rely on type-unsound inferences. This
+is a thing of the pass, unless you explicitly specify an unsound encoding
+using \textit{type\_enc} (\S\ref{problem-encoding}).
+%
+Officially, the only form of ``unsoundness'' that lurks in the sound
+encodings is related to missing characteristic theorems of datatypes. For
+example,
-The \textit{metis}~(\textit{full\_types}) proof method is the fully-typed
+\prew
+\textbf{lemma}~``$\exists \mathit{xs}.\; \mathit{xs} \neq []$'' \\
+\textbf{sledgehammer} ()
+\postw
+
+suggests an argumentless \textit{metis} call that fails. However, the conjecture
+does actually hold, and the \textit{metis} call can be repaired by adding
+\textit{list.distinct}.
+%
+We hope to address this problem in a future version of Isabelle. In the
+meantime, you can avoid it by passing the \textit{strict} option
+(\S\ref{problem-encoding}).
+
+\point{What are the \textit{full\_types}, \textit{no\_types}, and
+\textit{mono\_tags} arguments to Metis?}
+
+The \textit{metis}~(\textit{full\_types}) proof method
+and its cousin \textit{metis}~(\textit{mono\_tags}) are fully-typed
version of Metis. It is somewhat slower than \textit{metis}, but the proof
search is fully typed, and it also includes more powerful rules such as the
axiom ``$x = \const{True} \mathrel{\lor} x = \const{False}$'' for reasoning in
@@ -504,16 +516,18 @@
generated by Sledgehammer instead of \textit{metis} if the proof obviously
requires type information or if \textit{metis} failed when Sledgehammer
preplayed the proof. (By default, Sledgehammer tries to run \textit{metis} with
-various options for up to 4 seconds to ensure that the generated one-line proofs
-actually work and to display timing information. This can be configured using
-the \textit{preplay\_timeout} option (\S\ref{timeouts}).)
-
+various options for up to 3 seconds each time to ensure that the generated
+one-line proofs actually work and to display timing information. This can be
+configured using the \textit{preplay\_timeout} option (\S\ref{timeouts}).)
+%
At the other end of the soundness spectrum, \textit{metis} (\textit{no\_types})
uses no type information at all during the proof search, which is more efficient
but often fails. Calls to \textit{metis} (\textit{no\_types}) are occasionally
generated by Sledgehammer.
+%
+See the \textit{type\_enc} option (\S\ref{problem-encoding}) for details.
-Incidentally, if you see the warning
+Incidentally, if you ever see warnings such as
\prew
\slshape
@@ -523,6 +537,16 @@
for a successful \textit{metis} proof, you can advantageously pass the
\textit{full\_types} option to \textit{metis} directly.
+\point{And what are the \textit{lifting} and \textit{hide\_lams} arguments
+to Metis?}
+
+Orthogonally to the encoding of types, it is important to choose an appropriate
+translation of $\lambda$-abstractions. Metis supports three translation schemes,
+in decreasing order of power: Curry combinators (the default),
+$\lambda$-lifting, and a ``hiding'' scheme that disables all reasoning under
+$\lambda$-abstractions. The more powerful schemes also give the automatic
+provers more rope to hang themselves. See the \textit{lam\_trans} option (\S\ref{problem-encoding}) for details.
+
\point{Are generated proofs minimal?}
Automatic provers frequently use many more facts than are necessary.
@@ -656,7 +680,7 @@
enabling the ``Auto Sledgehammer'' option in Proof General's ``Isabelle'' menu.
For automatic runs, only the first prover set using \textit{provers}
(\S\ref{mode-of-operation}) is considered, fewer facts are passed to the prover,
-\textit{slice} (\S\ref{mode-of-operation}) is disabled, \textit{sound}
+\textit{slice} (\S\ref{mode-of-operation}) is disabled, \textit{strict}
(\S\ref{problem-encoding}) is enabled, \textit{verbose} (\S\ref{output-format})
and \textit{debug} (\S\ref{output-format}) are disabled, and \textit{timeout}
(\S\ref{timeouts}) is superseded by the ``Auto Tools Time Limit'' in Proof
@@ -678,12 +702,12 @@
(\S\ref{problem-encoding}).
%
The supported $\lambda$ translation schemes are \textit{hide\_lams},
-\textit{lam\_lifting}, and \textit{combinators} (the default).
+\textit{lifting}, and \textit{combs} (the default).
%
All the untyped type encodings listed in \S\ref{problem-encoding} are supported.
For convenience, the following aliases are provided:
\begin{enum}
-\item[\labelitemi] \textbf{\textit{full\_types}:} An appropriate type-sound encoding.
+\item[\labelitemi] \textbf{\textit{full\_types}:} Synonym for \textit{poly\_guards\_query}.
\item[\labelitemi] \textbf{\textit{partial\_types}:} Synonym for \textit{poly\_args}.
\item[\labelitemi] \textbf{\textit{no\_types}:} Synonym for \textit{erased}.
\end{enum}
@@ -698,6 +722,7 @@
\def\optrue#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{bool}$\bigr]$\enskip \defl\textit{true}\defr\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]}
\def\opfalse#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{bool}$\bigr]$\enskip \defl\textit{false}\defr\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]}
\def\opsmart#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{smart\_bool}$\bigr]$\enskip \defl\textit{smart}\defr\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]}
+\def\opsmartx#1#2{\flushitem{\textit{#1} $\bigl[$= \qtybf{smart\_bool}$\bigr]$\enskip \defl\textit{smart}\defr\\\hbox{}\hfill (neg.: \textit{#2})}\nopagebreak\\[\parskip]}
\def\opnodefault#1#2{\flushitem{\textit{#1} = \qtybf{#2}} \nopagebreak\\[\parskip]}
\def\opnodefaultbrk#1#2{\flushitem{$\bigl[$\textit{#1} =$\bigr]$ \qtybf{#2}} \nopagebreak\\[\parskip]}
\def\opdefault#1#2#3{\flushitem{\textit{#1} = \qtybf{#2}\enskip \defl\textit{#3}\defr} \nopagebreak\\[\parskip]}
@@ -738,22 +763,24 @@
\begin{enum}
\opnodefaultbrk{provers}{string}
Specifies the automatic provers to use as a space-separated list (e.g.,
-``\textit{e}~\textit{spass}~\textit{remote\_vampire\/}''). The following local
-provers are supported:
+``\textit{e}~\textit{spass}~\textit{remote\_vampire\/}'').
+Provers can be run locally or remotely; see \S\ref{installation} for
+installation instructions.
+
+The following local provers are supported:
\begin{enum}
\item[\labelitemi] \textbf{\textit{cvc3}:} CVC3 is an SMT solver developed by
Clark Barrett, Cesare Tinelli, and their colleagues \cite{cvc3}. To use CVC3,
set the environment variable \texttt{CVC3\_SOLVER} to the complete path of the
executable, including the file name, or install the prebuilt CVC3 package from
-\download. Sledgehammer has been tested with version 2.2. See
-\S\ref{installation} for details.
+\download. Sledgehammer has been tested with version 2.2.
\item[\labelitemi] \textbf{\textit{e}:} E is a first-order resolution prover
developed by Stephan Schulz \cite{schulz-2002}. To use E, set the environment
variable \texttt{E\_HOME} to the directory that contains the \texttt{eproof}
executable, or install the prebuilt E package from \download. Sledgehammer has
-been tested with versions 1.0 to 1.4. See \S\ref{installation} for details.
+been tested with versions 1.0 to 1.4.
\item[\labelitemi] \textbf{\textit{leo2}:} LEO-II is an automatic
higher-order prover developed by Christoph Benzm\"uller et al.\ \cite{leo2},
@@ -777,8 +804,7 @@
prover developed by Christoph Weidenbach et al.\ \cite{weidenbach-et-al-2009}.
To use SPASS, set the environment variable \texttt{SPASS\_HOME} to the directory
that contains the \texttt{SPASS} executable, or install the prebuilt SPASS
-package from \download. Sledgehammer requires version 3.5 or above. See
-\S\ref{installation} for details.
+package from \download. Sledgehammer requires version 3.5 or above.
\item[\labelitemi] \textbf{\textit{vampire}:} Vampire is a first-order resolution
prover developed by Andrei Voronkov and his colleagues
@@ -807,7 +833,7 @@
executable.
\end{enum}
-In addition, the following remote provers are supported:
+The following remote provers are supported:
\begin{enum}
\item[\labelitemi] \textbf{\textit{remote\_cvc3}:} The remote version of CVC3 runs
@@ -950,21 +976,25 @@
by replacing them by unspecified fresh constants, effectively disabling all
reasoning under $\lambda$-abstractions.
-\item[\labelitemi] \textbf{\textit{lam\_lifting}:} Introduce a new
+\item[\labelitemi] \textbf{\textit{lifting}:} Introduce a new
supercombinator \const{c} for each cluster of $n$~$\lambda$-abstractions,
defined using an equation $\const{c}~x_1~\ldots~x_n = t$ ($\lambda$-lifting).
-\item[\labelitemi] \textbf{\textit{combinators}:} Rewrite lambdas to the Curry
+\item[\labelitemi] \textbf{\textit{combs}:} Rewrite lambdas to the Curry
combinators (\comb{I}, \comb{K}, \comb{S}, \comb{B}, \comb{C}). Combinators
enable the ATPs to synthesize $\lambda$-terms but tend to yield bulkier formulas
than $\lambda$-lifting: The translation is quadratic in the worst case, and the
equational definitions of the combinators are very prolific in the context of
resolution.
-\item[\labelitemi] \textbf{\textit{hybrid\_lams}:} Introduce a new
+\item[\labelitemi] \textbf{\textit{combs\_and\_lifting}:} Introduce a new
supercombinator \const{c} for each cluster of $\lambda$-abstractions and characterize it both using a
lifted equation $\const{c}~x_1~\ldots~x_n = t$ and via Curry combinators.
+\item[\labelitemi] \textbf{\textit{combs\_or\_lifting}:} For each cluster of
+$\lambda$-abstractions, heuristically choose between $\lambda$-lifting and Curry
+combinators.
+
\item[\labelitemi] \textbf{\textit{keep\_lams}:}
Keep the $\lambda$-abstractions in the generated problems. This is available
only with provers that support the THF0 syntax.
@@ -973,26 +1003,33 @@
depends on the ATP and should be the most efficient scheme for that ATP.
\end{enum}
-For SMT solvers, the $\lambda$ translation scheme is always
-\textit{lam\_lifting}, irrespective of the value of this option.
+For SMT solvers, the $\lambda$ translation scheme is always \textit{lifting},
+irrespective of the value of this option.
+
+\opsmartx{uncurried\_aliases}{no\_uncurried\_aliases}
+Specifies whether fresh function symbols should be generated as aliases for
+applications of curried functions in ATP problems.
\opdefault{type\_enc}{string}{smart}
Specifies the type encoding to use in ATP problems. Some of the type encodings
are unsound, meaning that they can give rise to spurious proofs
(unreconstructible using \textit{metis}). The supported type encodings are
-listed below, with an indication of their soundness in parentheses:
+listed below, with an indication of their soundness in parentheses.
+An asterisk (*) means that the encoding is slightly incomplete for
+reconstruction with \textit{metis}, unless the \emph{strict} option (described
+below) is enabled.
\begin{enum}
\item[\labelitemi] \textbf{\textit{erased} (very unsound):} No type information is
-supplied to the ATP. Types are simply erased.
+supplied to the ATP, not even to resolve overloading. Types are simply erased.
\item[\labelitemi] \textbf{\textit{poly\_guards} (sound):} Types are encoded using
-a predicate \textit{has\_\allowbreak type\/}$(\tau, t)$ that guards bound
+a predicate \const{g}$(\tau, t)$ that guards bound
variables. Constants are annotated with their types, supplied as additional
arguments, to resolve overloading.
\item[\labelitemi] \textbf{\textit{poly\_tags} (sound):} Each term and subterm is
-tagged with its type using a function $\const{type\/}(\tau, t)$.
+tagged with its type using a function $\const{t\/}(\tau, t)$.
\item[\labelitemi] \textbf{\textit{poly\_args} (unsound):}
Like for \textit{poly\_guards} constants are annotated with their types to
@@ -1017,47 +1054,47 @@
\textit{raw\_mono\_guards}, \textit{raw\_mono\_tags}, and
\textit{raw\_mono\_args}, respectively but types are mangled in constant names
instead of being supplied as ground term arguments. The binary predicate
-$\const{has\_type\/}(\tau, t)$ becomes a unary predicate
-$\const{has\_type\_}\tau(t)$, and the binary function
-$\const{type\/}(\tau, t)$ becomes a unary function
-$\const{type\_}\tau(t)$.
+$\const{g}(\tau, t)$ becomes a unary predicate
+$\const{g\_}\tau(t)$, and the binary function
+$\const{t}(\tau, t)$ becomes a unary function
+$\const{t\_}\tau(t)$.
-\item[\labelitemi] \textbf{\textit{mono\_simple} (sound):} Exploits simple
+\item[\labelitemi] \textbf{\textit{mono\_native} (sound):} Exploits native
first-order types if the prover supports the TFF0 or THF0 syntax; otherwise,
falls back on \textit{mono\_guards}. The problem is monomorphized.
-\item[\labelitemi] \textbf{\textit{mono\_simple\_higher} (sound):} Exploits simple
-higher-order types if the prover supports the THF0 syntax; otherwise, falls back
-on \textit{mono\_simple} or \textit{mono\_guards}. The problem is monomorphized.
+\item[\labelitemi] \textbf{\textit{mono\_native\_higher} (sound):} Exploits
+native higher-order types if the prover supports the THF0 syntax; otherwise,
+falls back on \textit{mono\_native} or \textit{mono\_guards}. The problem is
+monomorphized.
\item[\labelitemi]
\textbf{%
\textit{poly\_guards}?, \textit{poly\_tags}?, \textit{raw\_mono\_guards}?, \\
\textit{raw\_mono\_tags}?, \textit{mono\_guards}?, \textit{mono\_tags}?, \\
-\textit{mono\_simple}? (quasi-sound):} \\
+\textit{mono\_native}? (sound*):} \\
The type encodings \textit{poly\_guards}, \textit{poly\_tags},
\textit{raw\_mono\_guards}, \textit{raw\_mono\_tags}, \textit{mono\_guards},
-\textit{mono\_tags}, and \textit{mono\_simple} are fully
+\textit{mono\_tags}, and \textit{mono\_native} are fully
typed and sound. For each of these, Sledgehammer also provides a lighter,
virtually sound variant identified by a question mark (`\hbox{?}')\ that detects
-and erases monotonic types, notably infinite types. (For \textit{mono\_simple},
+and erases monotonic types, notably infinite types. (For \textit{mono\_native},
the types are not actually erased but rather replaced by a shared uniform type
of individuals.) As argument to the \textit{metis} proof method, the question
-mark is replaced by a \hbox{``\textit{\_query\/}''} suffix. If the \emph{sound}
-option is enabled, these encodings are fully sound.
+mark is replaced by a \hbox{``\textit{\_query\/}''} suffix.
\item[\labelitemi]
\textbf{%
\textit{poly\_guards}??, \textit{poly\_tags}??, \textit{raw\_mono\_guards}??, \\
\textit{raw\_mono\_tags}??, \textit{mono\_guards}??, \textit{mono\_tags}?? \\
-(quasi-sound):} \\
+(sound*):} \\
Even lighter versions of the `\hbox{?}' encodings. As argument to the
\textit{metis} proof method, the `\hbox{??}' suffix is replaced by
\hbox{``\textit{\_query\_query\/}''}.
\item[\labelitemi]
\textbf{%
-\textit{poly\_guards}@?, \textit{raw\_mono\_guards}@? (quasi-sound):} \\
+\textit{poly\_guards}@?, \textit{raw\_mono\_guards}@? (sound*):} \\
Alternative versions of the `\hbox{??}' encodings. As argument to the
\textit{metis} proof method, the `\hbox{@?}' suffix is replaced by
\hbox{``\textit{\_at\_query\/}''}.
@@ -1066,14 +1103,14 @@
\textbf{%
\textit{poly\_guards}!, \textit{poly\_tags}!, \textit{raw\_mono\_guards}!, \\
\textit{raw\_mono\_tags}!, \textit{mono\_guards}!, \textit{mono\_tags}!, \\
-\textit{mono\_simple}!, \textit{mono\_simple\_higher}! (mildly unsound):} \\
+\textit{mono\_native}!, \textit{mono\_native\_higher}! (mildly unsound):} \\
The type encodings \textit{poly\_guards}, \textit{poly\_tags},
\textit{raw\_mono\_guards}, \textit{raw\_mono\_tags}, \textit{mono\_guards},
-\textit{mono\_tags}, \textit{mono\_simple}, and \textit{mono\_simple\_higher}
+\textit{mono\_tags}, \textit{mono\_native}, and \textit{mono\_native\_higher}
also admit a mildly unsound (but very efficient) variant identified by an
exclamation mark (`\hbox{!}') that detects and erases erases all types except
-those that are clearly finite (e.g., \textit{bool}). (For \textit{mono\_simple}
-and \textit{mono\_simple\_higher}, the types are not actually erased but rather
+those that are clearly finite (e.g., \textit{bool}). (For \textit{mono\_native}
+and \textit{mono\_native\_higher}, the types are not actually erased but rather
replaced by a shared uniform type of individuals.) As argument to the
\textit{metis} proof method, the exclamation mark is replaced by the suffix
\hbox{``\textit{\_bang\/}''}.
@@ -1098,18 +1135,19 @@
the ATP and should be the most efficient virtually sound encoding for that ATP.
\end{enum}
-For SMT solvers, the type encoding is always \textit{mono\_simple}, irrespective
+For SMT solvers, the type encoding is always \textit{mono\_native}, irrespective
of the value of this option.
\nopagebreak
{\small See also \textit{max\_new\_mono\_instances} (\S\ref{relevance-filter})
and \textit{max\_mono\_iters} (\S\ref{relevance-filter}).}
-\opfalse{sound}{unsound}
-Specifies whether Sledgehammer should run in its fully sound mode. In that mode,
-quasi-sound type encodings (which are the default) are made fully sound, at the
-cost of some clutter in the generated problems. This option is ignored if
-\textit{type\_enc} is set to an unsound encoding.
+\opfalse{strict}{non\_strict}
+Specifies whether Sledgehammer should run in its strict mode. In that mode,
+sound type encodings marked with an asterisk (*) above are made complete
+for reconstruction with \textit{metis}, at the cost of some clutter in the
+generated problems. This option has no effect if \textit{type\_enc} is
+deliberately set to an unsound encoding.
\end{enum}
\subsection{Relevance Filter}
@@ -1188,8 +1226,7 @@
Specifies the expected outcome, which must be one of the following:
\begin{enum}
-\item[\labelitemi] \textbf{\textit{some}:} Sledgehammer found a (potentially
-unsound) proof.
+\item[\labelitemi] \textbf{\textit{some}:} Sledgehammer found a proof.
\item[\labelitemi] \textbf{\textit{none}:} Sledgehammer found no proof.
\item[\labelitemi] \textbf{\textit{timeout}:} Sledgehammer timed out.
\item[\labelitemi] \textbf{\textit{unknown}:} Sledgehammer encountered some
@@ -1215,7 +1252,7 @@
For historical reasons, the default value of this option can be overridden using
the option ``Sledgehammer: Time Limit'' in Proof General's ``Isabelle'' menu.
-\opdefault{preplay\_timeout}{float\_or\_none}{\upshape 4}
+\opdefault{preplay\_timeout}{float\_or\_none}{\upshape 3}
Specifies the maximum number of seconds that \textit{metis} or \textit{smt}
should spend trying to ``preplay'' the found proof. If this option is set to 0,
no preplaying takes place, and no timing information is displayed next to the
--- a/etc/components Thu Feb 09 19:34:23 2012 +0100
+++ b/etc/components Tue Feb 14 11:16:07 2012 +0100
@@ -13,10 +13,10 @@
src/Tools/Code
src/Tools/jEdit
src/Tools/WWW_Find
+src/HOL/Mirabelle
+src/HOL/Mutabelle
+src/HOL/Library/Sum_of_Squares
src/HOL/Tools/ATP
-src/HOL/Mirabelle
-src/HOL/Library/Sum_of_Squares
+src/HOL/Tools/Predicate_Compile
src/HOL/Tools/SMT
-src/HOL/Tools/Predicate_Compile
-src/HOL/Tools/Nitpick
-src/HOL/Mutabelle
+src/HOL/TPTP
--- a/src/HOL/ATP.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/ATP.thy Tue Feb 14 11:16:07 2012 +0100
@@ -12,9 +12,9 @@
"Tools/ATP/atp_util.ML"
"Tools/ATP/atp_problem.ML"
"Tools/ATP/atp_proof.ML"
- "Tools/ATP/atp_redirect.ML"
- ("Tools/ATP/atp_translate.ML")
- ("Tools/ATP/atp_reconstruct.ML")
+ "Tools/ATP/atp_proof_redirect.ML"
+ ("Tools/ATP/atp_problem_generate.ML")
+ ("Tools/ATP/atp_proof_reconstruct.ML")
("Tools/ATP/atp_systems.ML")
begin
@@ -49,8 +49,8 @@
subsection {* Setup *}
-use "Tools/ATP/atp_translate.ML"
-use "Tools/ATP/atp_reconstruct.ML"
+use "Tools/ATP/atp_problem_generate.ML"
+use "Tools/ATP/atp_proof_reconstruct.ML"
use "Tools/ATP/atp_systems.ML"
setup ATP_Systems.setup
--- a/src/HOL/Enum.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Enum.thy Tue Feb 14 11:16:07 2012 +0100
@@ -742,6 +742,8 @@
end
+hide_const (open) a\<^isub>1 a\<^isub>2 a\<^isub>3 a\<^isub>4 a\<^isub>5
+
subsection {* An executable THE operator on finite types *}
definition
@@ -776,14 +778,187 @@
unfolding enum_the_def by (auto split: list.split)
qed
+code_abort enum_the
+code_const enum_the (Eval "(fn p => raise Match)")
+
+subsection {* Further operations on finite types *}
+
+lemma [code]:
+ "Collect P = set (filter P enum)"
+by (auto simp add: enum_UNIV)
+
+lemma tranclp_unfold [code, no_atp]:
+ "tranclp r a b \<equiv> (a, b) \<in> trancl {(x, y). r x y}"
+by (simp add: trancl_def)
+
+lemma rtranclp_rtrancl_eq[code, no_atp]:
+ "rtranclp r x y = ((x, y) : rtrancl {(x, y). r x y})"
+unfolding rtrancl_def by auto
+
+lemma max_ext_eq[code]:
+ "max_ext R = {(X, Y). finite X & finite Y & Y ~={} & (ALL x. x : X --> (EX xa : Y. (x, xa) : R))}"
+by (auto simp add: max_ext.simps)
+
+lemma max_extp_eq[code]:
+ "max_extp r x y = ((x, y) : max_ext {(x, y). r x y})"
+unfolding max_ext_def by auto
+
+lemma mlex_eq[code]:
+ "f <*mlex*> R = {(x, y). f x < f y \<or> (f x <= f y \<and> (x, y) : R)}"
+unfolding mlex_prod_def by auto
+
+subsection {* Executable accessible part *}
+(* FIXME: should be moved somewhere else !? *)
+
+subsubsection {* Finite monotone eventually stable sequences *}
+
+lemma finite_mono_remains_stable_implies_strict_prefix:
+ fixes f :: "nat \<Rightarrow> 'a::order"
+ assumes S: "finite (range f)" "mono f" and eq: "\<forall>n. f n = f (Suc n) \<longrightarrow> f (Suc n) = f (Suc (Suc n))"
+ shows "\<exists>N. (\<forall>n\<le>N. \<forall>m\<le>N. m < n \<longrightarrow> f m < f n) \<and> (\<forall>n\<ge>N. f N = f n)"
+ using assms
+proof -
+ have "\<exists>n. f n = f (Suc n)"
+ proof (rule ccontr)
+ assume "\<not> ?thesis"
+ then have "\<And>n. f n \<noteq> f (Suc n)" by auto
+ then have "\<And>n. f n < f (Suc n)"
+ using `mono f` by (auto simp: le_less mono_iff_le_Suc)
+ with lift_Suc_mono_less_iff[of f]
+ have "\<And>n m. n < m \<Longrightarrow> f n < f m" by auto
+ then have "inj f"
+ by (auto simp: inj_on_def) (metis linorder_less_linear order_less_imp_not_eq)
+ with `finite (range f)` have "finite (UNIV::nat set)"
+ by (rule finite_imageD)
+ then show False by simp
+ qed
+ then guess n .. note n = this
+ def N \<equiv> "LEAST n. f n = f (Suc n)"
+ have N: "f N = f (Suc N)"
+ unfolding N_def using n by (rule LeastI)
+ show ?thesis
+ proof (intro exI[of _ N] conjI allI impI)
+ fix n assume "N \<le> n"
+ then have "\<And>m. N \<le> m \<Longrightarrow> m \<le> n \<Longrightarrow> f m = f N"
+ proof (induct rule: dec_induct)
+ case (step n) then show ?case
+ using eq[rule_format, of "n - 1"] N
+ by (cases n) (auto simp add: le_Suc_eq)
+ qed simp
+ from this[of n] `N \<le> n` show "f N = f n" by auto
+ next
+ fix n m :: nat assume "m < n" "n \<le> N"
+ then show "f m < f n"
+ proof (induct rule: less_Suc_induct[consumes 1])
+ case (1 i)
+ then have "i < N" by simp
+ then have "f i \<noteq> f (Suc i)"
+ unfolding N_def by (rule not_less_Least)
+ with `mono f` show ?case by (simp add: mono_iff_le_Suc less_le)
+ qed auto
+ qed
+qed
+
+lemma finite_mono_strict_prefix_implies_finite_fixpoint:
+ fixes f :: "nat \<Rightarrow> 'a set"
+ assumes S: "\<And>i. f i \<subseteq> S" "finite S"
+ and inj: "\<exists>N. (\<forall>n\<le>N. \<forall>m\<le>N. m < n \<longrightarrow> f m \<subset> f n) \<and> (\<forall>n\<ge>N. f N = f n)"
+ shows "f (card S) = (\<Union>n. f n)"
+proof -
+ from inj obtain N where inj: "(\<forall>n\<le>N. \<forall>m\<le>N. m < n \<longrightarrow> f m \<subset> f n)" and eq: "(\<forall>n\<ge>N. f N = f n)" by auto
+
+ { fix i have "i \<le> N \<Longrightarrow> i \<le> card (f i)"
+ proof (induct i)
+ case 0 then show ?case by simp
+ next
+ case (Suc i)
+ with inj[rule_format, of "Suc i" i]
+ have "(f i) \<subset> (f (Suc i))" by auto
+ moreover have "finite (f (Suc i))" using S by (rule finite_subset)
+ ultimately have "card (f i) < card (f (Suc i))" by (intro psubset_card_mono)
+ with Suc show ?case using inj by auto
+ qed
+ }
+ then have "N \<le> card (f N)" by simp
+ also have "\<dots> \<le> card S" using S by (intro card_mono)
+ finally have "f (card S) = f N" using eq by auto
+ then show ?thesis using eq inj[rule_format, of N]
+ apply auto
+ apply (case_tac "n < N")
+ apply (auto simp: not_less)
+ done
+qed
+
+subsubsection {* Bounded accessible part *}
+
+fun bacc :: "('a * 'a) set => nat => 'a set"
+where
+ "bacc r 0 = {x. \<forall> y. (y, x) \<notin> r}"
+| "bacc r (Suc n) = (bacc r n Un {x. \<forall> y. (y, x) : r --> y : bacc r n})"
+
+lemma bacc_subseteq_acc:
+ "bacc r n \<subseteq> acc r"
+by (induct n) (auto intro: acc.intros)
+
+lemma bacc_mono:
+ "n <= m ==> bacc r n \<subseteq> bacc r m"
+by (induct rule: dec_induct) auto
+
+lemma bacc_upper_bound:
+ "bacc (r :: ('a * 'a) set) (card (UNIV :: ('a :: enum) set)) = (UN n. bacc r n)"
+proof -
+ have "mono (bacc r)" unfolding mono_def by (simp add: bacc_mono)
+ moreover have "\<forall>n. bacc r n = bacc r (Suc n) \<longrightarrow> bacc r (Suc n) = bacc r (Suc (Suc n))" by auto
+ moreover have "finite (range (bacc r))" by auto
+ ultimately show ?thesis
+ by (intro finite_mono_strict_prefix_implies_finite_fixpoint)
+ (auto intro: finite_mono_remains_stable_implies_strict_prefix simp add: enum_UNIV)
+qed
+
+lemma acc_subseteq_bacc:
+ assumes "finite r"
+ shows "acc r \<subseteq> (UN n. bacc r n)"
+proof
+ fix x
+ assume "x : acc r"
+ from this have "\<exists> n. x : bacc r n"
+ proof (induct x arbitrary: n rule: acc.induct)
+ case (accI x)
+ from accI have "\<forall> y. \<exists> n. (y, x) \<in> r --> y : bacc r n" by simp
+ from choice[OF this] guess n .. note n = this
+ have "\<exists> n. \<forall>y. (y, x) : r --> y : bacc r n"
+ proof (safe intro!: exI[of _ "Max ((%(y,x). n y)`r)"])
+ fix y assume y: "(y, x) : r"
+ with n have "y : bacc r (n y)" by auto
+ moreover have "n y <= Max ((%(y, x). n y) ` r)"
+ using y `finite r` by (auto intro!: Max_ge)
+ note bacc_mono[OF this, of r]
+ ultimately show "y : bacc r (Max ((%(y, x). n y) ` r))" by auto
+ qed
+ from this guess n ..
+ from this show ?case
+ by (auto simp add: Let_def intro!: exI[of _ "Suc n"])
+ qed
+ thus "x : (UN n. bacc r n)" by auto
+qed
+
+lemma acc_bacc_eq: "acc ((set xs) :: (('a :: enum) * 'a) set) = bacc (set xs) (card (UNIV :: 'a set))"
+by (metis acc_subseteq_bacc bacc_subseteq_acc bacc_upper_bound finite_set order_eq_iff)
+
+definition
+ [code del]: "card_UNIV = card UNIV"
+
+lemma [code]:
+ "card_UNIV TYPE('a :: enum) = card (set (Enum.enum :: 'a list))"
+unfolding card_UNIV_def enum_UNIV ..
+
+declare acc_bacc_eq[folded card_UNIV_def, code]
+
+lemma [code_unfold]: "accp r = (%x. x : acc {(x, y). r x y})"
+unfolding acc_def by simp
subsection {* Closing up *}
-code_abort enum_the
-
-hide_const (open) a\<^isub>1 a\<^isub>2 a\<^isub>3 a\<^isub>4 a\<^isub>5
-
-
hide_type (open) finite_1 finite_2 finite_3 finite_4 finite_5
hide_const (open) enum enum_all enum_ex n_lists all_n_lists ex_n_lists product ntrancl
--- a/src/HOL/Fun.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Fun.thy Tue Feb 14 11:16:07 2012 +0100
@@ -427,27 +427,6 @@
using * by blast
qed
-lemma bij_betw_Disj_Un:
- assumes DISJ: "A \<inter> B = {}" and DISJ': "A' \<inter> B' = {}" and
- B1: "bij_betw f A A'" and B2: "bij_betw f B B'"
- shows "bij_betw f (A \<union> B) (A' \<union> B')"
-proof-
- have 1: "inj_on f A \<and> inj_on f B"
- using B1 B2 by (auto simp add: bij_betw_def)
- have 2: "f`A = A' \<and> f`B = B'"
- using B1 B2 by (auto simp add: bij_betw_def)
- hence "f`(A - B) \<inter> f`(B - A) = {}"
- using DISJ DISJ' by blast
- hence "inj_on f (A \<union> B)"
- using 1 by (auto simp add: inj_on_Un)
- (* *)
- moreover
- have "f`(A \<union> B) = A' \<union> B'"
- using 2 by auto
- ultimately show ?thesis
- unfolding bij_betw_def by auto
-qed
-
lemma bij_betw_subset:
assumes BIJ: "bij_betw f A A'" and
SUB: "B \<le> A" and IM: "f ` B = B'"
--- a/src/HOL/Hoare_Parallel/OG_Hoare.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Hoare_Parallel/OG_Hoare.thy Tue Feb 14 11:16:07 2012 +0100
@@ -102,7 +102,7 @@
"(Basic f, s) -Pn\<rightarrow> (Parallel Ts, t) \<longrightarrow> All_None Ts \<longrightarrow> t = f s"
apply(induct "n")
apply(simp (no_asm))
-apply(fast dest: rel_pow_Suc_D2 Parallel_empty_lemma elim: transition_cases)
+apply(fast dest: relpow_Suc_D2 Parallel_empty_lemma elim: transition_cases)
done
lemma SEM_fwhile: "SEM S (p \<inter> b) \<subseteq> p \<Longrightarrow> SEM (fwhile b S k) p \<subseteq> (p \<inter> -b)"
@@ -112,7 +112,7 @@
apply(rule conjI)
apply (blast dest: L3_5i)
apply(simp add: SEM_def sem_def id_def)
-apply (blast dest: Basic_ntran rtrancl_imp_UN_rel_pow)
+apply (blast dest: Basic_ntran rtrancl_imp_UN_relpow)
done
lemma atom_hoare_sound [rule_format]:
@@ -122,7 +122,7 @@
apply simp_all
--{*Basic*}
apply(simp add: SEM_def sem_def)
- apply(fast dest: rtrancl_imp_UN_rel_pow Basic_ntran)
+ apply(fast dest: rtrancl_imp_UN_relpow Basic_ntran)
--{* Seq *}
apply(rule impI)
apply(rule subset_trans)
@@ -448,7 +448,7 @@
apply(force dest: nth_mem simp add: All_None_def)
--{* Basic *}
apply(simp add: SEM_def sem_def)
- apply(force dest: rtrancl_imp_UN_rel_pow Basic_ntran)
+ apply(force dest: rtrancl_imp_UN_relpow Basic_ntran)
--{* Seq *}
apply(rule subset_trans)
prefer 2 apply assumption
--- a/src/HOL/Hoare_Parallel/OG_Tran.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Hoare_Parallel/OG_Tran.thy Tue Feb 14 11:16:07 2012 +0100
@@ -120,7 +120,7 @@
apply(induct n)
apply(simp (no_asm))
apply clarify
-apply(drule rel_pow_Suc_D2)
+apply(drule relpow_Suc_D2)
apply(force elim:transition_cases)
done
@@ -129,7 +129,7 @@
apply(induct "n")
apply(simp (no_asm))
apply clarify
-apply(drule rel_pow_Suc_D2)
+apply(drule relpow_Suc_D2)
apply clarify
apply(erule transition_cases,simp_all)
apply(force dest:nth_mem simp add:All_None_def)
@@ -138,7 +138,7 @@
lemma Parallel_AllNone: "All_None Ts \<Longrightarrow> (SEM (Parallel Ts) X) = X"
apply (unfold SEM_def sem_def)
apply auto
-apply(drule rtrancl_imp_UN_rel_pow)
+apply(drule rtrancl_imp_UN_relpow)
apply clarify
apply(drule Parallel_AllNone_lemma)
apply auto
@@ -171,18 +171,18 @@
(All_None Rs) \<and> (c2, y) -Pm\<rightarrow> (Parallel Ts, t) \<and> m \<le> n)"
apply(induct "n")
apply(force)
-apply(safe dest!: rel_pow_Suc_D2)
+apply(safe dest!: relpow_Suc_D2)
apply(erule transition_cases,simp_all)
apply (fast intro!: le_SucI)
-apply (fast intro!: le_SucI elim!: rel_pow_imp_rtrancl converse_rtrancl_into_rtrancl)
+apply (fast intro!: le_SucI elim!: relpow_imp_rtrancl converse_rtrancl_into_rtrancl)
done
lemma L3_5ii_lemma3:
"\<lbrakk>(Seq c1 c2,s) -P*\<rightarrow> (Parallel Ts,t); All_None Ts\<rbrakk> \<Longrightarrow>
(\<exists>y Rs. (c1,s) -P*\<rightarrow> (Parallel Rs,y) \<and> All_None Rs
\<and> (c2,y) -P*\<rightarrow> (Parallel Ts,t))"
-apply(drule rtrancl_imp_UN_rel_pow)
-apply(fast dest: L3_5ii_lemma2 rel_pow_imp_rtrancl)
+apply(drule rtrancl_imp_UN_relpow)
+apply(fast dest: L3_5ii_lemma2 relpow_imp_rtrancl)
done
lemma L3_5ii: "SEM (Seq c1 c2) X = SEM c2 (SEM c1 X)"
@@ -212,16 +212,16 @@
apply (unfold UNIV_def)
apply(rule nat_less_induct)
apply safe
-apply(erule rel_pow_E2)
+apply(erule relpow_E2)
apply simp_all
apply(erule transition_cases)
apply simp_all
-apply(erule rel_pow_E2)
+apply(erule relpow_E2)
apply(simp add: Id_def)
apply(erule transition_cases,simp_all)
apply clarify
apply(erule transition_cases,simp_all)
-apply(erule rel_pow_E2,simp)
+apply(erule relpow_E2,simp)
apply clarify
apply(erule transition_cases)
apply simp+
@@ -231,7 +231,7 @@
done
lemma L3_5v_lemma2: "\<lbrakk>(\<Omega>, s) -P*\<rightarrow> (Parallel Ts, t); All_None Ts \<rbrakk> \<Longrightarrow> False"
-apply(fast dest: rtrancl_imp_UN_rel_pow L3_5v_lemma1)
+apply(fast dest: rtrancl_imp_UN_relpow L3_5v_lemma1)
done
lemma L3_5v_lemma3: "SEM (\<Omega>) S = {}"
@@ -244,7 +244,7 @@
(\<exists>k. (fwhile b c k, s) -P*\<rightarrow> (Parallel Ts, t))"
apply(rule nat_less_induct)
apply safe
-apply(erule rel_pow_E2)
+apply(erule relpow_E2)
apply safe
apply(erule transition_cases,simp_all)
apply (rule_tac x = "1" in exI)
@@ -275,9 +275,9 @@
apply(rule converse_rtrancl_into_rtrancl)
apply(fast)
apply(fast elim!: L3_5ii_lemma1 dest: L3_5ii_lemma3)
-apply(drule rtrancl_imp_UN_rel_pow)
+apply(drule rtrancl_imp_UN_relpow)
apply clarify
-apply(erule rel_pow_E2)
+apply(erule relpow_E2)
apply simp_all
apply(erule transition_cases,simp_all)
apply(fast dest: Parallel_empty_lemma)
@@ -287,7 +287,7 @@
apply(rule ext)
apply (simp add: SEM_def sem_def)
apply safe
- apply(drule rtrancl_imp_UN_rel_pow,simp)
+ apply(drule rtrancl_imp_UN_relpow,simp)
apply clarify
apply(fast dest:L3_5v_lemma4)
apply(fast intro: L3_5v_lemma5)
--- a/src/HOL/IMP/Abs_Int0.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int0.thy Tue Feb 14 11:16:07 2012 +0100
@@ -9,7 +9,7 @@
text{* Abstract interpretation over type @{text st} instead of
functions. *}
-context Val_abs
+context Gamma
begin
fun aval' :: "aexp \<Rightarrow> 'av st \<Rightarrow> 'av" where
@@ -18,7 +18,7 @@
"aval' (Plus a1 a2) S = plus' (aval' a1 S) (aval' a2 S)"
lemma aval'_sound: "s : \<gamma>\<^isub>f S \<Longrightarrow> aval a s : \<gamma>(aval' a S)"
-by (induct a) (auto simp: gamma_num' gamma_plus' \<gamma>_st_def lookup_def)
+by (induction a) (auto simp: gamma_num' gamma_plus' \<gamma>_st_def lookup_def)
end
@@ -26,7 +26,7 @@
the name of the type parameter @{typ 'av} which would otherwise be renamed to
@{typ 'a}. *}
-locale Abs_Int = Val_abs \<gamma> for \<gamma> :: "'av::SL_top \<Rightarrow> val set"
+locale Abs_Int = Gamma where \<gamma>=\<gamma> for \<gamma> :: "'av::SL_top \<Rightarrow> val set"
begin
fun step' :: "'av st option \<Rightarrow> 'av st option acom \<Rightarrow> 'av st option acom" where
@@ -58,8 +58,8 @@
function operating on states as functions. *}
lemma step_preserves_le:
- "\<lbrakk> S \<subseteq> \<gamma>\<^isub>o S'; cs \<le> \<gamma>\<^isub>c ca \<rbrakk> \<Longrightarrow> step S cs \<le> \<gamma>\<^isub>c (step' S' ca)"
-proof(induction cs arbitrary: ca S S')
+ "\<lbrakk> S \<subseteq> \<gamma>\<^isub>o S'; c \<le> \<gamma>\<^isub>c c' \<rbrakk> \<Longrightarrow> step S c \<le> \<gamma>\<^isub>c (step' S' c')"
+proof(induction c arbitrary: c' S S')
case SKIP thus ?case by(auto simp:SKIP_le map_acom_SKIP)
next
case Assign thus ?case
@@ -69,24 +69,24 @@
case Semi thus ?case apply (auto simp: Semi_le map_acom_Semi)
by (metis le_post post_map_acom)
next
- case (If b cs1 cs2 P)
- then obtain ca1 ca2 Pa where
- "ca= IF b THEN ca1 ELSE ca2 {Pa}"
- "P \<subseteq> \<gamma>\<^isub>o Pa" "cs1 \<le> \<gamma>\<^isub>c ca1" "cs2 \<le> \<gamma>\<^isub>c ca2"
+ case (If b c1 c2 P)
+ then obtain c1' c2' P' where
+ "c' = IF b THEN c1' ELSE c2' {P'}"
+ "P \<subseteq> \<gamma>\<^isub>o P'" "c1 \<le> \<gamma>\<^isub>c c1'" "c2 \<le> \<gamma>\<^isub>c c2'"
by (fastforce simp: If_le map_acom_If)
- moreover have "post cs1 \<subseteq> \<gamma>\<^isub>o(post ca1 \<squnion> post ca2)"
- by (metis (no_types) `cs1 \<le> \<gamma>\<^isub>c ca1` join_ge1 le_post mono_gamma_o order_trans post_map_acom)
- moreover have "post cs2 \<subseteq> \<gamma>\<^isub>o(post ca1 \<squnion> post ca2)"
- by (metis (no_types) `cs2 \<le> \<gamma>\<^isub>c ca2` join_ge2 le_post mono_gamma_o order_trans post_map_acom)
+ moreover have "post c1 \<subseteq> \<gamma>\<^isub>o(post c1' \<squnion> post c2')"
+ by (metis (no_types) `c1 \<le> \<gamma>\<^isub>c c1'` join_ge1 le_post mono_gamma_o order_trans post_map_acom)
+ moreover have "post c2 \<subseteq> \<gamma>\<^isub>o(post c1' \<squnion> post c2')"
+ by (metis (no_types) `c2 \<le> \<gamma>\<^isub>c c2'` join_ge2 le_post mono_gamma_o order_trans post_map_acom)
ultimately show ?case using `S \<subseteq> \<gamma>\<^isub>o S'` by (simp add: If.IH subset_iff)
next
- case (While I b cs1 P)
- then obtain ca1 Ia Pa where
- "ca = {Ia} WHILE b DO ca1 {Pa}"
- "I \<subseteq> \<gamma>\<^isub>o Ia" "P \<subseteq> \<gamma>\<^isub>o Pa" "cs1 \<le> \<gamma>\<^isub>c ca1"
+ case (While I b c1 P)
+ then obtain c1' I' P' where
+ "c' = {I'} WHILE b DO c1' {P'}"
+ "I \<subseteq> \<gamma>\<^isub>o I'" "P \<subseteq> \<gamma>\<^isub>o P'" "c1 \<le> \<gamma>\<^isub>c c1'"
by (fastforce simp: map_acom_While While_le)
- moreover have "S \<union> post cs1 \<subseteq> \<gamma>\<^isub>o (S' \<squnion> post ca1)"
- using `S \<subseteq> \<gamma>\<^isub>o S'` le_post[OF `cs1 \<le> \<gamma>\<^isub>c ca1`, simplified]
+ moreover have "S \<union> post c1 \<subseteq> \<gamma>\<^isub>o (S' \<squnion> post c1')"
+ using `S \<subseteq> \<gamma>\<^isub>o S'` le_post[OF `c1 \<le> \<gamma>\<^isub>c c1'`, simplified]
by (metis (no_types) join_ge1 join_ge2 le_sup_iff mono_gamma_o order_trans)
ultimately show ?case by (simp add: While.IH subset_iff)
qed
@@ -135,7 +135,7 @@
subsubsection "Ascending Chain Condition"
-hide_const acc
+hide_const (open) acc
abbreviation "strict r == r \<inter> -(r^-1)"
abbreviation "acc r == wf((strict r)^-1)"
@@ -150,7 +150,7 @@
text{* ACC for option type: *}
lemma acc_option: assumes "acc {(x,y::'a::preord). x \<sqsubseteq> y}"
-shows "acc {(x,y::'a option). x \<sqsubseteq> y}"
+shows "acc {(x,y::'a::preord option). x \<sqsubseteq> y}"
proof(auto simp: wf_eq_minimal)
fix xo :: "'a option" and Qo assume "xo : Qo"
let ?Q = "{x. Some x \<in> Qo}"
@@ -195,8 +195,8 @@
qed
lemma measure_st: assumes "(strict{(x,y::'a::SL_top). x \<sqsubseteq> y})^-1 <= measure m"
-and "\<forall>x y::'a. x \<sqsubseteq> y \<and> y \<sqsubseteq> x \<longrightarrow> m x = m y"
-shows "(strict{(S,S'::'a st). S \<sqsubseteq> S'})^-1 \<subseteq>
+and "\<forall>x y::'a::SL_top. x \<sqsubseteq> y \<and> y \<sqsubseteq> x \<longrightarrow> m x = m y"
+shows "(strict{(S,S'::'a::SL_top st). S \<sqsubseteq> S'})^-1 \<subseteq>
measure(%fd. \<Sum>x| x\<in>set(dom fd) \<and> ~ \<top> \<sqsubseteq> fun fd x. m(fun fd x)+1)"
proof-
{ fix S S' :: "'a st" assume "S \<sqsubseteq> S'" "~ S' \<sqsubseteq> S"
@@ -392,5 +392,20 @@
apply(simp add: bot_acom assms(3))
done
+context Abs_Int_mono
+begin
+
+lemma AI_Some_measure:
+assumes "(strict{(x,y::'a). x \<sqsubseteq> y})^-1 <= measure m"
+and "\<forall>x y::'a. x \<sqsubseteq> y \<and> y \<sqsubseteq> x \<longrightarrow> m x = m y"
+shows "\<exists>c'. AI c = Some c'"
+unfolding AI_def
+apply(rule lpfpc_termination)
+apply(rule wf_subset[OF wf_measure measure_st[OF assms]])
+apply(erule mono_step'[OF le_refl])
+apply(rule strip_step')
+done
end
+
+end
--- a/src/HOL/IMP/Abs_Int0_const.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int0_const.thy Tue Feb 14 11:16:07 2012 +0100
@@ -54,7 +54,6 @@
interpretation Val_abs
where \<gamma> = \<gamma>_const and num' = Const and plus' = plus_const
-defines aval'_const is aval'
proof
case goal1 thus ?case
by(cases a, cases b, simp, simp, cases b, simp, simp)
@@ -69,9 +68,48 @@
interpretation Abs_Int
where \<gamma> = \<gamma>_const and num' = Const and plus' = plus_const
-defines AI_const is AI
-and step_const is step'
-proof qed
+defines AI_const is AI and step_const is step' and aval'_const is aval'
+..
+
+
+subsubsection "Tests"
+
+value "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test1_const))"
+value "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test1_const))"
+value "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test1_const))"
+value "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test1_const))"
+value "show_acom_opt (AI_const test1_const)"
+
+value "show_acom_opt (AI_const test2_const)"
+value "show_acom_opt (AI_const test3_const)"
+
+value "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test4_const))"
+value "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test4_const))"
+value "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test4_const))"
+value "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test4_const))"
+value "show_acom_opt (AI_const test4_const)"
+
+value "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test5_const))"
+value "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test5_const))"
+value "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test5_const))"
+value "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test5_const))"
+value "show_acom (((step_const \<top>)^^4) (\<bottom>\<^sub>c test5_const))"
+value "show_acom (((step_const \<top>)^^5) (\<bottom>\<^sub>c test5_const))"
+value "show_acom_opt (AI_const test5_const)"
+
+value "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^4) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^5) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^6) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^7) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^8) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^9) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^10) (\<bottom>\<^sub>c test6_const))"
+value "show_acom (((step_const \<top>)^^11) (\<bottom>\<^sub>c test6_const))"
+value "show_acom_opt (AI_const test6_const)"
text{* Monotonicity: *}
@@ -85,48 +123,17 @@
text{* Termination: *}
+definition "m_const x = (case x of Const _ \<Rightarrow> 1 | Any \<Rightarrow> 0)"
+
lemma measure_const:
- "(strict{(x::const,y). x \<sqsubseteq> y})^-1 \<subseteq>
- measure(%x. case x of Const _ \<Rightarrow> 1 | Any \<Rightarrow> 0)"
-by(auto split: const.splits)
+ "(strict{(x::const,y). x \<sqsubseteq> y})^-1 \<subseteq> measure m_const"
+by(auto simp: m_const_def split: const.splits)
lemma measure_const_eq:
- "\<forall> x y::const. x \<sqsubseteq> y \<and> y \<sqsubseteq> x \<longrightarrow> (%x. case x of Const _ \<Rightarrow> 1 | Any \<Rightarrow> 0) x = (%x. case x of Const _ \<Rightarrow> 1 | Any \<Rightarrow> 0) y"
-by(auto split: const.splits)
-
-lemma acc_const_st: "Abs_Int0.acc{(x::const st,y). x \<sqsubseteq> y}"
-by(rule wf_subset[OF wf_measure measure_st[OF measure_const measure_const_eq]])
+ "\<forall> x y::const. x \<sqsubseteq> y \<and> y \<sqsubseteq> x \<longrightarrow> m_const x = m_const y"
+by(auto simp: m_const_def split: const.splits)
lemma "EX c'. AI_const c = Some c'"
-by(metis AI_def lpfpc_termination[OF acc_const_st, where f = "step_const \<top>",
- OF mono_step'[OF le_refl] strip_step'])
-
-
-subsubsection "Tests"
-
-value [code] "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test1_const))"
-value [code] "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test1_const))"
-value [code] "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test1_const))"
-value [code] "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test1_const))"
-value [code] "show_acom_opt (AI_const test1_const)"
-
-value [code] "show_acom_opt (AI_const test2_const)"
-value [code] "show_acom_opt (AI_const test3_const)"
-
-value [code] "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test4_const))"
-value [code] "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test4_const))"
-value [code] "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test4_const))"
-value [code] "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test4_const))"
-value [code] "show_acom_opt (AI_const test4_const)"
-
-value [code] "show_acom (((step_const \<top>)^^0) (\<bottom>\<^sub>c test5_const))"
-value [code] "show_acom (((step_const \<top>)^^1) (\<bottom>\<^sub>c test5_const))"
-value [code] "show_acom (((step_const \<top>)^^2) (\<bottom>\<^sub>c test5_const))"
-value [code] "show_acom (((step_const \<top>)^^3) (\<bottom>\<^sub>c test5_const))"
-value [code] "show_acom (((step_const \<top>)^^4) (\<bottom>\<^sub>c test5_const))"
-value [code] "show_acom (((step_const \<top>)^^5) (\<bottom>\<^sub>c test5_const))"
-value [code] "show_acom_opt (AI_const test5_const)"
-
-value [code] "show_acom_opt (AI_const test6_const)"
+by(rule AI_Some_measure[OF measure_const measure_const_eq])
end
--- a/src/HOL/IMP/Abs_Int0_fun.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int0_fun.thy Tue Feb 14 11:16:07 2012 +0100
@@ -314,8 +314,8 @@
by(simp add: \<gamma>_fun_def)
lemma step_preserves_le:
- "\<lbrakk> S \<subseteq> \<gamma>\<^isub>o S'; cs \<le> \<gamma>\<^isub>c ca \<rbrakk> \<Longrightarrow> step S cs \<le> \<gamma>\<^isub>c (step' S' ca)"
-proof(induction cs arbitrary: ca S S')
+ "\<lbrakk> S \<subseteq> \<gamma>\<^isub>o S'; c \<le> \<gamma>\<^isub>c c' \<rbrakk> \<Longrightarrow> step S c \<le> \<gamma>\<^isub>c (step' S' c')"
+proof(induction c arbitrary: c' S S')
case SKIP thus ?case by(auto simp:SKIP_le map_acom_SKIP)
next
case Assign thus ?case
@@ -325,24 +325,24 @@
case Semi thus ?case apply (auto simp: Semi_le map_acom_Semi)
by (metis le_post post_map_acom)
next
- case (If b cs1 cs2 P)
- then obtain ca1 ca2 Pa where
- "ca= IF b THEN ca1 ELSE ca2 {Pa}"
- "P \<subseteq> \<gamma>\<^isub>o Pa" "cs1 \<le> \<gamma>\<^isub>c ca1" "cs2 \<le> \<gamma>\<^isub>c ca2"
+ case (If b c1 c2 P)
+ then obtain c1' c2' P' where
+ "c' = IF b THEN c1' ELSE c2' {P'}"
+ "P \<subseteq> \<gamma>\<^isub>o P'" "c1 \<le> \<gamma>\<^isub>c c1'" "c2 \<le> \<gamma>\<^isub>c c2'"
by (fastforce simp: If_le map_acom_If)
- moreover have "post cs1 \<subseteq> \<gamma>\<^isub>o(post ca1 \<squnion> post ca2)"
- by (metis (no_types) `cs1 \<le> \<gamma>\<^isub>c ca1` join_ge1 le_post mono_gamma_o order_trans post_map_acom)
- moreover have "post cs2 \<subseteq> \<gamma>\<^isub>o(post ca1 \<squnion> post ca2)"
- by (metis (no_types) `cs2 \<le> \<gamma>\<^isub>c ca2` join_ge2 le_post mono_gamma_o order_trans post_map_acom)
+ moreover have "post c1 \<subseteq> \<gamma>\<^isub>o(post c1' \<squnion> post c2')"
+ by (metis (no_types) `c1 \<le> \<gamma>\<^isub>c c1'` join_ge1 le_post mono_gamma_o order_trans post_map_acom)
+ moreover have "post c2 \<subseteq> \<gamma>\<^isub>o(post c1' \<squnion> post c2')"
+ by (metis (no_types) `c2 \<le> \<gamma>\<^isub>c c2'` join_ge2 le_post mono_gamma_o order_trans post_map_acom)
ultimately show ?case using `S \<subseteq> \<gamma>\<^isub>o S'` by (simp add: If.IH subset_iff)
next
- case (While I b cs1 P)
- then obtain ca1 Ia Pa where
- "ca = {Ia} WHILE b DO ca1 {Pa}"
- "I \<subseteq> \<gamma>\<^isub>o Ia" "P \<subseteq> \<gamma>\<^isub>o Pa" "cs1 \<le> \<gamma>\<^isub>c ca1"
+ case (While I b c1 P)
+ then obtain c1' I' P' where
+ "c' = {I'} WHILE b DO c1' {P'}"
+ "I \<subseteq> \<gamma>\<^isub>o I'" "P \<subseteq> \<gamma>\<^isub>o P'" "c1 \<le> \<gamma>\<^isub>c c1'"
by (fastforce simp: map_acom_While While_le)
- moreover have "S \<union> post cs1 \<subseteq> \<gamma>\<^isub>o (S' \<squnion> post ca1)"
- using `S \<subseteq> \<gamma>\<^isub>o S'` le_post[OF `cs1 \<le> \<gamma>\<^isub>c ca1`, simplified]
+ moreover have "S \<union> post c1 \<subseteq> \<gamma>\<^isub>o (S' \<squnion> post c1')"
+ using `S \<subseteq> \<gamma>\<^isub>o S'` le_post[OF `c1 \<le> \<gamma>\<^isub>c c1'`, simplified]
by (metis (no_types) join_ge1 join_ge2 le_sup_iff mono_gamma_o order_trans)
ultimately show ?case by (simp add: While.IH subset_iff)
qed
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/IMP/Abs_Int0_parity.thy Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,166 @@
+theory Abs_Int0_parity
+imports Abs_Int0
+begin
+
+subsection "Parity Analysis"
+
+datatype parity = Even | Odd | Either
+
+text{* Instantiation of class @{class preord} with type @{typ parity}: *}
+
+instantiation parity :: preord
+begin
+
+text{* First the definition of the interface function @{text"\<sqsubseteq>"}. Note that
+the header of the definition must refer to the ascii name @{const le} of the
+constants as @{text le_parity} and the definition is named @{text
+le_parity_def}. Inside the definition the symbolic names can be used. *}
+
+definition le_parity where
+"x \<sqsubseteq> y = (y = Either \<or> x=y)"
+
+text{* Now the instance proof, i.e.\ the proof that the definition fulfills
+the axioms (assumptions) of the class. The initial proof-step generates the
+necessary proof obligations. *}
+
+instance
+proof
+ fix x::parity show "x \<sqsubseteq> x" by(auto simp: le_parity_def)
+next
+ fix x y z :: parity assume "x \<sqsubseteq> y" "y \<sqsubseteq> z" thus "x \<sqsubseteq> z"
+ by(auto simp: le_parity_def)
+qed
+
+end
+
+text{* Instantiation of class @{class SL_top} with type @{typ parity}: *}
+
+instantiation parity :: SL_top
+begin
+
+
+definition join_parity where
+"x \<squnion> y = (if x \<sqsubseteq> y then y else if y \<sqsubseteq> x then x else Either)"
+
+definition Top_parity where
+"\<top> = Either"
+
+text{* Now the instance proof. This time we take a lazy shortcut: we do not
+write out the proof obligations but use the @{text goali} primitive to refer
+to the assumptions of subgoal i and @{text "case?"} to refer to the
+conclusion of subgoal i. The class axioms are presented in the same order as
+in the class definition. *}
+
+instance
+proof
+ case goal1 (*join1*) show ?case by(auto simp: le_parity_def join_parity_def)
+next
+ case goal2 (*join2*) show ?case by(auto simp: le_parity_def join_parity_def)
+next
+ case goal3 (*join least*) thus ?case by(auto simp: le_parity_def join_parity_def)
+next
+ case goal4 (*Top*) show ?case by(auto simp: le_parity_def Top_parity_def)
+qed
+
+end
+
+
+text{* Now we define the functions used for instantiating the abstract
+interpretation locales. Note that the Isabelle terminology is
+\emph{interpretation}, not \emph{instantiation} of locales, but we use
+instantiation to avoid confusion with abstract interpretation. *}
+
+fun \<gamma>_parity :: "parity \<Rightarrow> val set" where
+"\<gamma>_parity Even = {i. i mod 2 = 0}" |
+"\<gamma>_parity Odd = {i. i mod 2 = 1}" |
+"\<gamma>_parity Either = UNIV"
+
+fun num_parity :: "val \<Rightarrow> parity" where
+"num_parity i = (if i mod 2 = 0 then Even else Odd)"
+
+fun plus_parity :: "parity \<Rightarrow> parity \<Rightarrow> parity" where
+"plus_parity Even Even = Even" |
+"plus_parity Odd Odd = Even" |
+"plus_parity Even Odd = Odd" |
+"plus_parity Odd Even = Odd" |
+"plus_parity Either y = Either" |
+"plus_parity x Either = Either"
+
+text{* First we instantiate the abstract value interface and prove that the
+functions on type @{typ parity} have all the necessary properties: *}
+
+interpretation Val_abs
+where \<gamma> = \<gamma>_parity and num' = num_parity and plus' = plus_parity
+proof txt{* of the locale axioms *}
+ fix a b :: parity
+ assume "a \<sqsubseteq> b" thus "\<gamma>_parity a \<subseteq> \<gamma>_parity b"
+ by(auto simp: le_parity_def)
+next txt{* The rest in the lazy, implicit way *}
+ case goal2 show ?case by(auto simp: Top_parity_def)
+next
+ case goal3 show ?case by auto
+next
+ txt{* Warning: this subproof refers to the names @{text a1} and @{text a2}
+ from the statement of the axiom. *}
+ case goal4 thus ?case
+ proof(cases a1 a2 rule: parity.exhaust[case_product parity.exhaust])
+ qed (auto simp add:mod_add_eq)
+qed
+
+text{* Instantiating the abstract interpretation locale requires no more
+proofs (they happened in the instatiation above) but delivers the
+instantiated abstract interpreter which we call AI: *}
+
+interpretation Abs_Int
+where \<gamma> = \<gamma>_parity and num' = num_parity and plus' = plus_parity
+defines aval_parity is aval' and step_parity is step' and AI_parity is AI
+..
+
+
+subsubsection "Tests"
+
+definition "test1_parity =
+ ''x'' ::= N 1;
+ WHILE Less (V ''x'') (N 100) DO ''x'' ::= Plus (V ''x'') (N 2)"
+
+value "show_acom_opt (AI_parity test1_parity)"
+
+definition "test2_parity =
+ ''x'' ::= N 1;
+ WHILE Less (V ''x'') (N 100) DO ''x'' ::= Plus (V ''x'') (N 3)"
+
+value "show_acom ((step_parity \<top> ^^1) (anno None test2_parity))"
+value "show_acom ((step_parity \<top> ^^2) (anno None test2_parity))"
+value "show_acom ((step_parity \<top> ^^3) (anno None test2_parity))"
+value "show_acom ((step_parity \<top> ^^4) (anno None test2_parity))"
+value "show_acom ((step_parity \<top> ^^5) (anno None test2_parity))"
+value "show_acom_opt (AI_parity test2_parity)"
+
+
+subsubsection "Termination"
+
+interpretation Abs_Int_mono
+where \<gamma> = \<gamma>_parity and num' = num_parity and plus' = plus_parity
+proof
+ case goal1 thus ?case
+ proof(cases a1 a2 b1 b2
+ rule: parity.exhaust[case_product parity.exhaust[case_product parity.exhaust[case_product parity.exhaust]]]) (* FIXME - UGLY! *)
+ qed (auto simp add:le_parity_def)
+qed
+
+
+definition m_parity :: "parity \<Rightarrow> nat" where
+"m_parity x = (if x=Either then 0 else 1)"
+
+lemma measure_parity:
+ "(strict{(x::parity,y). x \<sqsubseteq> y})^-1 \<subseteq> measure m_parity"
+by(auto simp add: m_parity_def le_parity_def)
+
+lemma measure_parity_eq:
+ "\<forall>x y::parity. x \<sqsubseteq> y \<and> y \<sqsubseteq> x \<longrightarrow> m_parity x = m_parity y"
+by(auto simp add: m_parity_def le_parity_def)
+
+lemma AI_parity_Some: "\<exists>c'. AI_parity c = Some c'"
+by(rule AI_Some_measure[OF measure_parity measure_parity_eq])
+
+end
--- a/src/HOL/IMP/Abs_Int1.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int1.thy Tue Feb 14 11:16:07 2012 +0100
@@ -61,7 +61,7 @@
end
locale Val_abs1_gamma =
- Val_abs where \<gamma> = \<gamma> for \<gamma> :: "'av::L_top_bot \<Rightarrow> val set" +
+ Gamma where \<gamma> = \<gamma> for \<gamma> :: "'av::L_top_bot \<Rightarrow> val set" +
assumes inter_gamma_subset_gamma_meet:
"\<gamma> a1 \<inter> \<gamma> a2 \<subseteq> \<gamma>(a1 \<sqinter> a2)"
and gamma_Bot[simp]: "\<gamma> \<bottom> = {}"
@@ -77,14 +77,16 @@
locale Val_abs1 =
- Val_abs1_gamma where \<gamma> = \<gamma>
- for \<gamma> :: "'av::L_top_bot \<Rightarrow> val set" +
-fixes filter_plus' :: "'av \<Rightarrow> 'av \<Rightarrow> 'av \<Rightarrow> 'av * 'av"
+ Val_abs1_gamma where \<gamma> = \<gamma>
+ for \<gamma> :: "'av::L_top_bot \<Rightarrow> val set" +
+fixes test_num' :: "val \<Rightarrow> 'av \<Rightarrow> bool"
+and filter_plus' :: "'av \<Rightarrow> 'av \<Rightarrow> 'av \<Rightarrow> 'av * 'av"
and filter_less' :: "bool \<Rightarrow> 'av \<Rightarrow> 'av \<Rightarrow> 'av * 'av"
-assumes filter_plus': "filter_plus' a a1 a2 = (a1',a2') \<Longrightarrow>
- n1 : \<gamma> a1 \<Longrightarrow> n2 : \<gamma> a2 \<Longrightarrow> n1+n2 : \<gamma> a \<Longrightarrow> n1 : \<gamma> a1' \<and> n2 : \<gamma> a2'"
-and filter_less': "filter_less' (n1<n2) a1 a2 = (a1',a2') \<Longrightarrow>
- n1 : \<gamma> a1 \<Longrightarrow> n2 : \<gamma> a2 \<Longrightarrow> n1 : \<gamma> a1' \<and> n2 : \<gamma> a2'"
+assumes test_num': "test_num' n a = (n : \<gamma> a)"
+and filter_plus': "filter_plus' a a1 a2 = (b1,b2) \<Longrightarrow>
+ n1 : \<gamma> a1 \<Longrightarrow> n2 : \<gamma> a2 \<Longrightarrow> n1+n2 : \<gamma> a \<Longrightarrow> n1 : \<gamma> b1 \<and> n2 : \<gamma> b2"
+and filter_less': "filter_less' (n1<n2) a1 a2 = (b1,b2) \<Longrightarrow>
+ n1 : \<gamma> a1 \<Longrightarrow> n2 : \<gamma> a2 \<Longrightarrow> n1 : \<gamma> b1 \<and> n2 : \<gamma> b2"
locale Abs_Int1 =
@@ -104,7 +106,7 @@
subsubsection "Backward analysis"
fun afilter :: "aexp \<Rightarrow> 'av \<Rightarrow> 'av st option \<Rightarrow> 'av st option" where
-"afilter (N n) a S = (if n : \<gamma> a then S else None)" |
+"afilter (N n) a S = (if test_num' n a then S else None)" |
"afilter (V x) a S = (case S of None \<Rightarrow> None | Some S \<Rightarrow>
let a' = lookup S x \<sqinter> a in
if a' \<sqsubseteq> \<bottom> then None else Some(update S x a'))" |
@@ -134,7 +136,7 @@
lemma afilter_sound: "s : \<gamma>\<^isub>o S \<Longrightarrow> aval e s : \<gamma> a \<Longrightarrow> s : \<gamma>\<^isub>o (afilter e a S)"
proof(induction e arbitrary: a S)
- case N thus ?case by simp
+ case N thus ?case by simp (metis test_num')
next
case (V x)
obtain S' where "S = Some S'" and "s : \<gamma>\<^isub>f S'" using `s : \<gamma>\<^isub>o S`
@@ -328,7 +330,7 @@
lemma mono_afilter: "r \<sqsubseteq> r' \<Longrightarrow> S \<sqsubseteq> S' \<Longrightarrow> afilter e r S \<sqsubseteq> afilter e r' S'"
apply(induction e arbitrary: r r' S S')
-apply(auto simp: Let_def split: option.splits prod.splits)
+apply(auto simp: test_num' Let_def split: option.splits prod.splits)
apply (metis mono_gamma subsetD)
apply(drule_tac x = "list" in mono_lookup)
apply (metis mono_meet le_trans)
--- a/src/HOL/IMP/Abs_Int1_ivl.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int1_ivl.thy Tue Feb 14 11:16:07 2012 +0100
@@ -25,15 +25,11 @@
definition "num_ivl n = {n\<dots>n}"
-definition
- [code_abbrev]: "contained_in i k \<longleftrightarrow> k \<in> \<gamma>_ivl i"
-
-lemma contained_in_simps [code]:
- "contained_in (I (Some l) (Some h)) k \<longleftrightarrow> l \<le> k \<and> k \<le> h"
- "contained_in (I (Some l) None) k \<longleftrightarrow> l \<le> k"
- "contained_in (I None (Some h)) k \<longleftrightarrow> k \<le> h"
- "contained_in (I None None) k \<longleftrightarrow> True"
- by (simp_all add: contained_in_def \<gamma>_ivl_def)
+fun in_ivl :: "int \<Rightarrow> ivl \<Rightarrow> bool" where
+"in_ivl k (I (Some l) (Some h)) \<longleftrightarrow> l \<le> k \<and> k \<le> h" |
+"in_ivl k (I (Some l) None) \<longleftrightarrow> l \<le> k" |
+"in_ivl k (I None (Some h)) \<longleftrightarrow> k \<le> h" |
+"in_ivl k (I None None) \<longleftrightarrow> True"
instantiation option :: (plus)plus
begin
@@ -42,7 +38,7 @@
"Some x + Some y = Some(x+y)" |
"_ + _ = None"
-instance proof qed
+instance ..
end
@@ -147,7 +143,7 @@
"Some x - Some y = Some(x-y)" |
"_ - _ = None"
-instance proof qed
+instance ..
end
@@ -158,7 +154,6 @@
"n1 : \<gamma>_ivl i1 \<Longrightarrow> n2 : \<gamma>_ivl i2 \<Longrightarrow> n1-n2 : \<gamma>_ivl(minus_ivl i1 i2)"
by(auto simp add: minus_ivl_def \<gamma>_ivl_def split: ivl.splits option.splits)
-
definition "filter_plus_ivl i i1 i2 = ((*if is_empty i then empty else*)
i1 \<sqinter> minus_ivl i i2, i2 \<sqinter> minus_ivl i i1)"
@@ -172,7 +167,6 @@
interpretation Val_abs
where \<gamma> = \<gamma>_ivl and num' = num_ivl and plus' = plus_ivl
-defines aval_ivl is aval'
proof
case goal1 thus ?case
by(auto simp: \<gamma>_ivl_def le_ivl_def le_option_def split: ivl.split option.split if_splits)
@@ -187,6 +181,7 @@
interpretation Val_abs1_gamma
where \<gamma> = \<gamma>_ivl and num' = num_ivl and plus' = plus_ivl
+defines aval_ivl is aval'
proof
case goal1 thus ?case
by(auto simp add: \<gamma>_ivl_def meet_ivl_def empty_def min_option_def max_option_def split: ivl.split option.split)
@@ -205,32 +200,38 @@
interpretation Val_abs1
where \<gamma> = \<gamma>_ivl and num' = num_ivl and plus' = plus_ivl
+and test_num' = in_ivl
and filter_plus' = filter_plus_ivl and filter_less' = filter_less_ivl
proof
case goal1 thus ?case
+ by (simp add: \<gamma>_ivl_def split: ivl.split option.split)
+next
+ case goal2 thus ?case
by(auto simp add: filter_plus_ivl_def)
(metis gamma_minus_ivl add_diff_cancel add_commute)+
next
- case goal2 thus ?case
+ case goal3 thus ?case
by(cases a1, cases a2,
auto simp: \<gamma>_ivl_def min_option_def max_option_def le_option_def split: if_splits option.splits)
qed
interpretation Abs_Int1
where \<gamma> = \<gamma>_ivl and num' = num_ivl and plus' = plus_ivl
+and test_num' = in_ivl
and filter_plus' = filter_plus_ivl and filter_less' = filter_less_ivl
defines afilter_ivl is afilter
and bfilter_ivl is bfilter
and step_ivl is step'
and AI_ivl is AI
and aval_ivl' is aval''
-proof qed
+..
text{* Monotonicity: *}
interpretation Abs_Int1_mono
where \<gamma> = \<gamma>_ivl and num' = num_ivl and plus' = plus_ivl
+and test_num' = in_ivl
and filter_plus' = filter_plus_ivl and filter_less' = filter_less_ivl
proof
case goal1 thus ?case
@@ -247,38 +248,38 @@
subsubsection "Tests"
-value [code] "show_acom_opt (AI_ivl test1_ivl)"
+value "show_acom_opt (AI_ivl test1_ivl)"
text{* Better than @{text AI_const}: *}
-value [code] "show_acom_opt (AI_ivl test3_const)"
-value [code] "show_acom_opt (AI_ivl test4_const)"
-value [code] "show_acom_opt (AI_ivl test6_const)"
+value "show_acom_opt (AI_ivl test3_const)"
+value "show_acom_opt (AI_ivl test4_const)"
+value "show_acom_opt (AI_ivl test6_const)"
-value [code] "show_acom_opt (AI_ivl test2_ivl)"
-value [code] "show_acom (((step_ivl \<top>)^^0) (\<bottom>\<^sub>c test2_ivl))"
-value [code] "show_acom (((step_ivl \<top>)^^1) (\<bottom>\<^sub>c test2_ivl))"
-value [code] "show_acom (((step_ivl \<top>)^^2) (\<bottom>\<^sub>c test2_ivl))"
+value "show_acom_opt (AI_ivl test2_ivl)"
+value "show_acom (((step_ivl \<top>)^^0) (\<bottom>\<^sub>c test2_ivl))"
+value "show_acom (((step_ivl \<top>)^^1) (\<bottom>\<^sub>c test2_ivl))"
+value "show_acom (((step_ivl \<top>)^^2) (\<bottom>\<^sub>c test2_ivl))"
text{* Fixed point reached in 2 steps. Not so if the start value of x is known: *}
-value [code] "show_acom_opt (AI_ivl test3_ivl)"
-value [code] "show_acom (((step_ivl \<top>)^^0) (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (((step_ivl \<top>)^^1) (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (((step_ivl \<top>)^^2) (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (((step_ivl \<top>)^^3) (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (((step_ivl \<top>)^^4) (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom_opt (AI_ivl test3_ivl)"
+value "show_acom (((step_ivl \<top>)^^0) (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (((step_ivl \<top>)^^1) (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (((step_ivl \<top>)^^2) (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (((step_ivl \<top>)^^3) (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (((step_ivl \<top>)^^4) (\<bottom>\<^sub>c test3_ivl))"
text{* Takes as many iterations as the actual execution. Would diverge if
loop did not terminate. Worse still, as the following example shows: even if
the actual execution terminates, the analysis may not. The value of y keeps
decreasing as the analysis is iterated, no matter how long: *}
-value [code] "show_acom (((step_ivl \<top>)^^50) (\<bottom>\<^sub>c test4_ivl))"
+value "show_acom (((step_ivl \<top>)^^50) (\<bottom>\<^sub>c test4_ivl))"
text{* Relationships between variables are NOT captured: *}
-value [code] "show_acom_opt (AI_ivl test5_ivl)"
+value "show_acom_opt (AI_ivl test5_ivl)"
text{* Again, the analysis would not terminate: *}
-value [code] "show_acom (((step_ivl \<top>)^^50) (\<bottom>\<^sub>c test6_ivl))"
+value "show_acom (((step_ivl \<top>)^^50) (\<bottom>\<^sub>c test6_ivl))"
end
--- a/src/HOL/IMP/Abs_Int2.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int2.thy Tue Feb 14 11:16:07 2012 +0100
@@ -227,9 +227,10 @@
interpretation Abs_Int2
where \<gamma> = \<gamma>_ivl and num' = num_ivl and plus' = plus_ivl
+and test_num' = in_ivl
and filter_plus' = filter_plus_ivl and filter_less' = filter_less_ivl
defines AI_ivl' is AI_wn
-proof qed
+..
subsubsection "Tests"
@@ -238,23 +239,23 @@
definition "step_down_ivl n = ((\<lambda>c. c \<triangle>\<^sub>c step_ivl \<top> c)^^n)"
text{* For @{const test3_ivl}, @{const AI_ivl} needed as many iterations as
-the loop took to execute. In contrast, @{const AI_ivl} converges in a
+the loop took to execute. In contrast, @{const AI_ivl'} converges in a
constant number of steps: *}
-value [code] "show_acom (step_up_ivl 1 (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (step_up_ivl 2 (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (step_up_ivl 3 (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (step_up_ivl 4 (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl))"
-value [code] "show_acom (step_down_ivl 1 (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl)))"
-value [code] "show_acom (step_down_ivl 2 (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl)))"
-value [code] "show_acom (step_down_ivl 3 (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl)))"
+value "show_acom (step_up_ivl 1 (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (step_up_ivl 2 (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (step_up_ivl 3 (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (step_up_ivl 4 (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl))"
+value "show_acom (step_down_ivl 1 (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl)))"
+value "show_acom (step_down_ivl 2 (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl)))"
+value "show_acom (step_down_ivl 3 (step_up_ivl 5 (\<bottom>\<^sub>c test3_ivl)))"
text{* Now all the analyses terminate: *}
-value [code] "show_acom_opt (AI_ivl' test4_ivl)"
-value [code] "show_acom_opt (AI_ivl' test5_ivl)"
-value [code] "show_acom_opt (AI_ivl' test6_ivl)"
+value "show_acom_opt (AI_ivl' test4_ivl)"
+value "show_acom_opt (AI_ivl' test5_ivl)"
+value "show_acom_opt (AI_ivl' test6_ivl)"
subsubsection "Termination: Intervals"
@@ -619,7 +620,10 @@
show ?thesis using assms(3) by(simp)
qed
-(* step' = step_ivl! mv into locale*)
+
+context Abs_Int2
+begin
+
lemma iter_widen_step'_Com:
"iter_widen (step' \<top>) c = Some c' \<Longrightarrow> vars(strip c) \<subseteq> X \<Longrightarrow> c : Com(X)
\<Longrightarrow> c' : Com(X)"
@@ -631,8 +635,10 @@
apply (auto simp: step'_Com)
done
-theorem step_ivl_termination:
- "EX c. AI_ivl' c0 = Some c"
+end
+
+theorem AI_ivl'_termination:
+ "EX c'. AI_ivl' c = Some c'"
apply(auto simp: AI_wn_def pfp_wn_def iter_winden_step_ivl_termination split: option.split)
apply(rule iter_narrow_step_ivl_termination)
apply (metis bot_acom_Com iter_widen_step'_Com[OF _ subset_refl] strip_iter_widen strip_step')
--- a/src/HOL/IMP/Abs_Int_Tests.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_Int_Tests.thy Tue Feb 14 11:16:07 2012 +0100
@@ -52,9 +52,9 @@
DO ''x'' ::= Plus (V ''x'') (N 1)"
definition "test4_ivl =
- ''x'' ::= N 0; ''y'' ::= N 100; ''z'' ::= Plus (V ''x'') (V ''y'');
+ ''x'' ::= N 0; ''y'' ::= N 0;
WHILE Less (V ''x'') (N 11)
- DO (''x'' ::= Plus (V ''x'') (N 1); ''y'' ::= Plus (V ''y'') (N -1))"
+ DO (''x'' ::= Plus (V ''x'') (N 1); ''y'' ::= Plus (V ''y'') (N 1))"
definition "test5_ivl =
''x'' ::= N 0; ''y'' ::= N 0;
--- a/src/HOL/IMP/Abs_State.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Abs_State.thy Tue Feb 14 11:16:07 2012 +0100
@@ -12,8 +12,8 @@
datatype 'a st = FunDom "vname \<Rightarrow> 'a" "vname list"
-fun "fun" where "fun (FunDom f _) = f"
-fun dom where "dom (FunDom _ A) = A"
+fun "fun" where "fun (FunDom f xs) = f"
+fun dom where "dom (FunDom f xs) = xs"
definition [simp]: "inter_list xs ys = [x\<leftarrow>xs. x \<in> set ys]"
@@ -58,7 +58,7 @@
lemma mono_update: "a \<sqsubseteq> a' \<Longrightarrow> S \<sqsubseteq> S' \<Longrightarrow> update S x a \<sqsubseteq> update S' x a'"
by(auto simp add: le_st_def lookup_def update_def)
-context Val_abs
+locale Gamma = Val_abs where \<gamma>=\<gamma> for \<gamma> :: "'av::SL_top \<Rightarrow> val set"
begin
abbreviation \<gamma>\<^isub>f :: "'av st \<Rightarrow> state set"
--- a/src/HOL/IMP/Collecting.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Collecting.thy Tue Feb 14 11:16:07 2012 +0100
@@ -152,8 +152,8 @@
definition CS :: "com \<Rightarrow> state set acom" where
"CS c = lfp (step UNIV) c"
-lemma mono_step_aux: "x \<le> y \<Longrightarrow> S \<subseteq> T \<Longrightarrow> step S x \<le> step T y"
-proof(induction x y arbitrary: S T rule: less_eq_acom.induct)
+lemma mono2_step: "c1 \<le> c2 \<Longrightarrow> S1 \<subseteq> S2 \<Longrightarrow> step S1 c1 \<le> step S2 c2"
+proof(induction c1 c2 arbitrary: S1 S2 rule: less_eq_acom.induct)
case 2 thus ?case by fastforce
next
case 3 thus ?case by(simp add: le_post)
@@ -164,7 +164,7 @@
qed auto
lemma mono_step: "mono (step S)"
-by(blast intro: monoI mono_step_aux)
+by(blast intro: monoI mono2_step)
lemma strip_step: "strip(step S c) = strip c"
by (induction c arbitrary: S) auto
--- a/src/HOL/IMP/Collecting1.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Collecting1.thy Tue Feb 14 11:16:07 2012 +0100
@@ -11,7 +11,7 @@
lemma step_preserves_le:
"\<lbrakk> step S cs = cs; S' \<subseteq> S; cs' \<le> cs \<rbrakk> \<Longrightarrow>
step S' cs' \<le> cs"
-by (metis mono_step_aux)
+by (metis mono2_step)
lemma steps_empty_preserves_le: assumes "step S cs = cs"
shows "cs' \<le> cs \<Longrightarrow> (step {} ^^ n) cs' \<le> cs"
--- a/src/HOL/IMP/HoareT.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/HoareT.thy Tue Feb 14 11:16:07 2012 +0100
@@ -56,7 +56,7 @@
WHILE Less (V ''y'') (N n)
DO ( ''y'' ::= Plus (V ''y'') (N 1); ''x'' ::= Plus (V ''x'') (V ''y'') )"
-lemma "\<turnstile>\<^sub>t {\<lambda>s. 0 <= n} ''x'' ::= N 0; ''y'' ::= N 0; w n {\<lambda>s. s ''x'' = \<Sum> {1 .. n}}"
+lemma "\<turnstile>\<^sub>t {\<lambda>s. 0 \<le> n} ''x'' ::= N 0; ''y'' ::= N 0; w n {\<lambda>s. s ''x'' = \<Sum>{1..n}}"
apply(rule Semi)
prefer 2
apply(rule While'
--- a/src/HOL/IMP/Live_True.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/Live_True.thy Tue Feb 14 11:16:07 2012 +0100
@@ -146,7 +146,7 @@
lemma while_option_stop2:
"while_option b c s = Some t \<Longrightarrow> EX k. t = (c^^k) s \<and> \<not> b t"
apply(simp add: while_option_def split: if_splits)
-by (metis (lam_lifting) LeastI_ex)
+by (metis (lifting) LeastI_ex)
(* move to While_Combinator *)
lemma while_option_finite_subset_Some: fixes C :: "'a set"
assumes "mono f" and "!!X. X \<subseteq> C \<Longrightarrow> f X \<subseteq> C" and "finite C"
--- a/src/HOL/IMP/ROOT.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IMP/ROOT.ML Tue Feb 14 11:16:07 2012 +0100
@@ -19,9 +19,12 @@
"Hoare_Examples",
"VC",
"HoareT",
- "Abs_Int2",
"Collecting1",
"Collecting_list",
+ "Abs_Int0",
+ "Abs_Int0_parity",
+ "Abs_Int0_const",
+ "Abs_Int2",
"Abs_Int_Den/Abs_Int_den2",
"Procs_Dyn_Vars_Dyn",
"Procs_Stat_Vars_Dyn",
--- a/src/HOL/IsaMakefile Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/IsaMakefile Tue Feb 14 11:16:07 2012 +0100
@@ -204,11 +204,11 @@
Set.thy \
Sum_Type.thy \
Tools/ATP/atp_problem.ML \
+ Tools/ATP/atp_problem_generate.ML \
Tools/ATP/atp_proof.ML \
- Tools/ATP/atp_reconstruct.ML \
- Tools/ATP/atp_redirect.ML \
+ Tools/ATP/atp_proof_reconstruct.ML \
+ Tools/ATP/atp_proof_redirect.ML \
Tools/ATP/atp_systems.ML \
- Tools/ATP/atp_translate.ML \
Tools/ATP/atp_util.ML \
Tools/Datatype/datatype.ML \
Tools/Datatype/datatype_aux.ML \
@@ -241,9 +241,9 @@
Tools/Meson/meson.ML \
Tools/Meson/meson_clausify.ML \
Tools/Meson/meson_tactic.ML \
+ Tools/Metis/metis_generate.ML \
Tools/Metis/metis_reconstruct.ML \
Tools/Metis/metis_tactic.ML \
- Tools/Metis/metis_translate.ML \
Tools/abel_cancel.ML \
Tools/arith_data.ML \
Tools/cnf_funcs.ML \
@@ -335,7 +335,6 @@
Tools/Nitpick/nitpick_scope.ML \
Tools/Nitpick/nitpick_tests.ML \
Tools/Nitpick/nitpick_util.ML \
- Tools/Nitpick/nitrox.ML \
Tools/numeral.ML \
Tools/numeral_simprocs.ML \
Tools/numeral_syntax.ML \
@@ -1052,10 +1051,10 @@
ex/Case_Product.thy ex/Chinese.thy ex/Classical.thy \
ex/Coercion_Examples.thy ex/Coherent.thy \
ex/Dedekind_Real.thy ex/Efficient_Nat_examples.thy \
- ex/Eval_Examples.thy ex/Fundefs.thy ex/Gauge_Integration.thy \
- ex/Groebner_Examples.thy ex/Guess.thy ex/HarmonicSeries.thy \
- ex/Hebrew.thy ex/Hex_Bin_Examples.thy ex/Higher_Order_Logic.thy \
- ex/Iff_Oracle.thy ex/Induction_Schema.thy \
+ ex/Eval_Examples.thy ex/Executable_Relation.thy ex/Fundefs.thy \
+ ex/Gauge_Integration.thy ex/Groebner_Examples.thy ex/Guess.thy \
+ ex/HarmonicSeries.thy ex/Hebrew.thy ex/Hex_Bin_Examples.thy \
+ ex/Higher_Order_Logic.thy ex/Iff_Oracle.thy ex/Induction_Schema.thy \
ex/Interpretation_with_Defs.thy ex/Intuitionistic.thy \
ex/Lagrange.thy ex/List_to_Set_Comprehension_Examples.thy \
ex/LocaleTest2.thy ex/MT.thy ex/MergeSort.thy ex/Meson_Test.thy \
@@ -1169,9 +1168,11 @@
$(LOG)/HOL-TPTP.gz: \
$(OUT)/HOL \
TPTP/ROOT.ML \
- TPTP/ATP_Export.thy \
- TPTP/CASC_Setup.thy \
- TPTP/atp_export.ML
+ TPTP/atp_problem_import.ML \
+ TPTP/ATP_Problem_Import.thy \
+ TPTP/atp_theory_export.ML \
+ TPTP/ATP_Theory_Export.thy \
+ TPTP/CASC_Setup.thy
@$(ISABELLE_TOOL) usedir $(OUT)/HOL TPTP
--- a/src/HOL/Library/List_Cset.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Library/List_Cset.thy Tue Feb 14 11:16:07 2012 +0100
@@ -15,9 +15,9 @@
by (simp_all add: fun_eq_iff List.member_def)
definition (in term_syntax)
- setify :: "'a\<Colon>typerep list \<times> (unit \<Rightarrow> Code_Evaluation.term)
+ csetify :: "'a\<Colon>typerep list \<times> (unit \<Rightarrow> Code_Evaluation.term)
\<Rightarrow> 'a Cset.set \<times> (unit \<Rightarrow> Code_Evaluation.term)" where
- [code_unfold]: "setify xs = Code_Evaluation.valtermify Cset.set {\<cdot>} xs"
+ [code_unfold]: "csetify xs = Code_Evaluation.valtermify Cset.set {\<cdot>} xs"
notation fcomp (infixl "\<circ>>" 60)
notation scomp (infixl "\<circ>\<rightarrow>" 60)
@@ -26,7 +26,7 @@
begin
definition
- "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (setify xs))"
+ "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (csetify xs))"
instance ..
--- a/src/HOL/Library/Multiset.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Library/Multiset.thy Tue Feb 14 11:16:07 2012 +0100
@@ -1175,12 +1175,12 @@
by (simp add: Bag_def count_of_multiset Abs_multiset_inverse)
lemma Mempty_Bag [code]:
- "{#} = Bag (Alist [])"
- by (simp add: multiset_eq_iff alist.Alist_inverse)
+ "{#} = Bag (DAList.empty)"
+ by (simp add: multiset_eq_iff alist.Alist_inverse DAList.empty_def)
lemma single_Bag [code]:
- "{#x#} = Bag (Alist [(x, 1)])"
- by (simp add: multiset_eq_iff alist.Alist_inverse)
+ "{#x#} = Bag (DAList.update x 1 DAList.empty)"
+ by (simp add: multiset_eq_iff alist.Alist_inverse impl_of_update impl_of_empty)
lemma union_Bag [code]:
"Bag xs + Bag ys = Bag (join (\<lambda>x (n1, n2). n1 + n2) xs ys)"
--- a/src/HOL/Library/Transitive_Closure_Table.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Library/Transitive_Closure_Table.thy Tue Feb 14 11:16:07 2012 +0100
@@ -185,6 +185,7 @@
by (auto simp add: rtranclp_eq_rtrancl_path)
qed
+declare rtranclp_rtrancl_eq[code del]
declare rtranclp_eq_rtrancl_tab_nil[THEN iffD2, code_pred_intro]
code_pred rtranclp using rtranclp_eq_rtrancl_tab_nil [THEN iffD1] by fastforce
@@ -214,5 +215,4 @@
hide_type ty
hide_const test A B C
-end
-
+end
\ No newline at end of file
--- a/src/HOL/Library/While_Combinator.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Library/While_Combinator.thy Tue Feb 14 11:16:07 2012 +0100
@@ -55,7 +55,7 @@
lemma while_option_stop2:
"while_option b c s = Some t \<Longrightarrow> EX k. t = (c^^k) s \<and> \<not> b t"
apply(simp add: while_option_def split: if_splits)
-by (metis (lam_lifting) LeastI_ex)
+by (metis (lifting) LeastI_ex)
lemma while_option_stop: "while_option b c s = Some t \<Longrightarrow> ~ b t"
by(metis while_option_stop2)
--- a/src/HOL/List.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/List.thy Tue Feb 14 11:16:07 2012 +0100
@@ -206,9 +206,9 @@
length :: "'a list \<Rightarrow> nat" where
"length \<equiv> size"
-definition
- rotate1 :: "'a list \<Rightarrow> 'a list" where
- "rotate1 xs = (case xs of [] \<Rightarrow> [] | x#xs \<Rightarrow> xs @ [x])"
+primrec rotate1 :: "'a list \<Rightarrow> 'a list" where
+ "rotate1 [] = []" |
+ "rotate1 (x # xs) = xs @ [x]"
definition
rotate :: "nat \<Rightarrow> 'a list \<Rightarrow> 'a list" where
@@ -265,8 +265,8 @@
@{lemma "nth [a,b,c,d] 2 = c" by simp}\\
@{lemma "[a,b,c,d][2 := x] = [a,b,x,d]" by simp}\\
@{lemma "sublist [a,b,c,d,e] {0,2,3} = [a,c,d]" by (simp add:sublist_def)}\\
-@{lemma "rotate1 [a,b,c,d] = [b,c,d,a]" by (simp add:rotate1_def)}\\
-@{lemma "rotate 3 [a,b,c,d] = [d,a,b,c]" by (simp add:rotate1_def rotate_def eval_nat_numeral)}\\
+@{lemma "rotate1 [a,b,c,d] = [b,c,d,a]" by simp}\\
+@{lemma "rotate 3 [a,b,c,d] = [d,a,b,c]" by (simp add:rotate_def eval_nat_numeral)}\\
@{lemma "replicate 4 a = [a,a,a,a]" by (simp add:eval_nat_numeral)}\\
@{lemma "[2..<5] = [2,3,4]" by (simp add:eval_nat_numeral)}\\
@{lemma "listsum [1,2,3::nat] = 6" by (simp add: listsum_def foldr_def)}
@@ -934,7 +934,7 @@
lemma singleton_rev_conv [simp]: "([x] = rev xs) = (xs = [x])"
by (cases xs) auto
-lemma rev_is_rev_conv [iff]: "(rev xs = rev ys) = (xs = ys)"
+lemma rev_is_rev_conv [iff, no_atp]: "(rev xs = rev ys) = (xs = ys)"
apply (induct xs arbitrary: ys, force)
apply (case_tac ys, simp, force)
done
@@ -2880,7 +2880,7 @@
by (metis distinct_remdups finite_list set_remdups)
lemma remdups_eq_nil_iff [simp]: "(remdups x = []) = (x = [])"
-by (induct x, auto)
+by (induct x, auto)
lemma remdups_eq_nil_right_iff [simp]: "([] = remdups x) = (x = [])"
by (induct x, auto)
@@ -2967,7 +2967,7 @@
apply (case_tac j, simp)
apply (simp add: set_conv_nth)
apply (case_tac j)
-apply (clarsimp simp add: set_conv_nth, simp)
+apply (clarsimp simp add: set_conv_nth, simp)
apply (rule conjI)
(*TOO SLOW
apply (metis Zero_neq_Suc gr0_conv_Suc in_set_conv_nth lessI less_trans_Suc nth_Cons' nth_Cons_Suc)
@@ -2999,7 +2999,7 @@
case False with Cons show ?thesis by simp
next
case True with Cons.prems
- have "card (set xs) = Suc (length xs)"
+ have "card (set xs) = Suc (length xs)"
by (simp add: card_insert_if split: split_if_asm)
moreover have "card (set xs) \<le> length xs" by (rule card_length)
ultimately have False by simp
@@ -3590,9 +3590,6 @@
subsubsection{*@{text rotate1} and @{text rotate}*}
-lemma rotate_simps[simp]: "rotate1 [] = [] \<and> rotate1 (x#xs) = xs @ [x]"
-by(simp add:rotate1_def)
-
lemma rotate0[simp]: "rotate 0 = id"
by(simp add:rotate_def)
@@ -3619,7 +3616,7 @@
done
lemma rotate1_hd_tl: "xs \<noteq> [] \<Longrightarrow> rotate1 xs = tl xs @ [hd xs]"
-by(simp add:rotate1_def split:list.split)
+by (cases xs) simp_all
lemma rotate_drop_take:
"rotate n xs = drop (n mod length xs) xs @ take (n mod length xs) xs"
@@ -3642,13 +3639,13 @@
by(simp add:rotate_drop_take)
lemma length_rotate1[simp]: "length(rotate1 xs) = length xs"
-by(simp add:rotate1_def split:list.split)
+by (cases xs) simp_all
lemma length_rotate[simp]: "length(rotate n xs) = length xs"
by (induct n arbitrary: xs) (simp_all add:rotate_def)
lemma distinct1_rotate[simp]: "distinct(rotate1 xs) = distinct xs"
-by(simp add:rotate1_def split:list.split) blast
+by (cases xs) auto
lemma distinct_rotate[simp]: "distinct(rotate n xs) = distinct xs"
by (induct n) (simp_all add:rotate_def)
@@ -3657,13 +3654,13 @@
by(simp add:rotate_drop_take take_map drop_map)
lemma set_rotate1[simp]: "set(rotate1 xs) = set xs"
-by (cases xs) (auto simp add:rotate1_def)
+by (cases xs) auto
lemma set_rotate[simp]: "set(rotate n xs) = set xs"
by (induct n) (simp_all add:rotate_def)
lemma rotate1_is_Nil_conv[simp]: "(rotate1 xs = []) = (xs = [])"
-by(simp add:rotate1_def split:list.split)
+by (cases xs) auto
lemma rotate_is_Nil_conv[simp]: "(rotate n xs = []) = (xs = [])"
by (induct n) (simp_all add:rotate_def)
@@ -4556,6 +4553,10 @@
inductive_cases listsE [elim!,no_atp]: "x#l : lists A"
inductive_cases listspE [elim!,no_atp]: "listsp A (x # l)"
+inductive_simps listsp_simps[code]:
+ "listsp A []"
+ "listsp A (x # xs)"
+
lemma listsp_mono [mono]: "A \<le> B ==> listsp A \<le> listsp B"
by (rule predicate1I, erule listsp.induct, (blast dest: predicate1D)+)
@@ -4590,7 +4591,7 @@
-- {* eliminate @{text listsp} in favour of @{text set} *}
by (induct xs) auto
-lemmas in_lists_conv_set = in_listsp_conv_set [to_set]
+lemmas in_lists_conv_set [code_unfold] = in_listsp_conv_set [to_set]
lemma in_listspD [dest!,no_atp]: "listsp A xs ==> \<forall>x\<in>set xs. A x"
by (rule in_listsp_conv_set [THEN iffD1])
@@ -5006,8 +5007,8 @@
subsubsection {* Lifting Relations to Lists: all elements *}
inductive_set
- listrel :: "('a * 'a)set => ('a list * 'a list)set"
- for r :: "('a * 'a)set"
+ listrel :: "('a \<times> 'b) set \<Rightarrow> ('a list \<times> 'b list) set"
+ for r :: "('a \<times> 'b) set"
where
Nil: "([],[]) \<in> listrel r"
| Cons: "[| (x,y) \<in> r; (xs,ys) \<in> listrel r |] ==> (x#xs, y#ys) \<in> listrel r"
@@ -5021,7 +5022,7 @@
lemma listrel_eq_len: "(xs, ys) \<in> listrel r \<Longrightarrow> length xs = length ys"
by(induct rule: listrel.induct) auto
-lemma listrel_iff_zip: "(xs,ys) : listrel r \<longleftrightarrow>
+lemma listrel_iff_zip [code_unfold]: "(xs,ys) : listrel r \<longleftrightarrow>
length xs = length ys & (\<forall>(x,y) \<in> set(zip xs ys). (x,y) \<in> r)" (is "?L \<longleftrightarrow> ?R")
proof
assume ?L thus ?R by induct (auto intro: listrel_eq_len)
@@ -5327,6 +5328,36 @@
"xs = ys \<Longrightarrow> (\<And>x. x \<in> set ys \<Longrightarrow> f x = g x) \<Longrightarrow> list_ex f xs = list_ex g ys"
by (simp add: list_ex_iff)
+text {* Executable checks for relations on sets *}
+
+definition listrel1p :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
+"listrel1p r xs ys = ((xs, ys) \<in> listrel1 {(x, y). r x y})"
+
+lemma [code_unfold]:
+ "(xs, ys) \<in> listrel1 r = listrel1p (\<lambda>x y. (x, y) \<in> r) xs ys"
+unfolding listrel1p_def by auto
+
+lemma [code]:
+ "listrel1p r [] xs = False"
+ "listrel1p r xs [] = False"
+ "listrel1p r (x # xs) (y # ys) \<longleftrightarrow>
+ r x y \<and> xs = ys \<or> x = y \<and> listrel1p r xs ys"
+by (simp add: listrel1p_def)+
+
+definition
+ lexordp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
+ "lexordp r xs ys = ((xs, ys) \<in> lexord {(x, y). r x y})"
+
+lemma [code_unfold]:
+ "(xs, ys) \<in> lexord r = lexordp (\<lambda>x y. (x, y) \<in> r) xs ys"
+unfolding lexordp_def by auto
+
+lemma [code]:
+ "lexordp r xs [] = False"
+ "lexordp r [] (y#ys) = True"
+ "lexordp r (x # xs) (y # ys) = (r x y | (x = y & lexordp r xs ys))"
+unfolding lexordp_def by auto
+
text {* Bounded quantification and summation over nats. *}
lemma atMost_upto [code_unfold]:
@@ -5640,6 +5671,29 @@
"Pow (set (x # xs)) = (let A = Pow (set xs) in A \<union> insert x ` A)"
by (simp_all add: Pow_insert Let_def)
+text {* Further operations on sets *}
+
+(* Minimal refinement of equality on sets *)
+declare subset_eq[code del]
+lemma subset_code [code]:
+ "set xs <= B \<longleftrightarrow> (ALL x : set xs. x : B)"
+ "List.coset xs <= List.coset ys \<longleftrightarrow> set ys <= set xs"
+ "List.coset [] <= set [] \<longleftrightarrow> False"
+by auto
+
+lemma setsum_code [code]:
+ "setsum f (set xs) = listsum (map f (remdups xs))"
+by (simp add: listsum_distinct_conv_setsum_set)
+
+definition map_project :: "('a \<Rightarrow> 'b option) \<Rightarrow> 'a set \<Rightarrow> 'b set" where
+ "map_project f A = {b. \<exists> a \<in> A. f a = Some b}"
+
+lemma [code]:
+ "map_project f (set xs) = set (List.map_filter f xs)"
+unfolding map_project_def map_filter_def
+by auto (metis (lifting, mono_tags) CollectI image_eqI o_apply the.simps)
+
+hide_const (open) map_project
text {* Operations on relations *}
@@ -5651,6 +5705,10 @@
"Id_on (set xs) = set [(x, x). x \<leftarrow> xs]"
by (auto simp add: Id_on_def)
+lemma [code]:
+ "R `` S = List.map_project (%(x, y). if x : S then Some y else None) R"
+unfolding map_project_def by (auto split: prod.split split_if_asm)
+
lemma trancl_set_ntrancl [code]:
"trancl (set xs) = ntrancl (card (set xs) - 1) (set xs)"
by (simp add: finite_trancl_ntranl)
--- a/src/HOL/Metis.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Metis.thy Tue Feb 14 11:16:07 2012 +0100
@@ -9,7 +9,7 @@
theory Metis
imports ATP
uses "~~/src/Tools/Metis/metis.ML"
- ("Tools/Metis/metis_translate.ML")
+ ("Tools/Metis/metis_generate.ML")
("Tools/Metis/metis_reconstruct.ML")
("Tools/Metis/metis_tactic.ML")
("Tools/try_methods.ML")
@@ -40,7 +40,7 @@
subsection {* Metis package *}
-use "Tools/Metis/metis_translate.ML"
+use "Tools/Metis/metis_generate.ML"
use "Tools/Metis/metis_reconstruct.ML"
use "Tools/Metis/metis_tactic.ML"
--- a/src/HOL/Metis_Examples/Abstraction.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Metis_Examples/Abstraction.thy Tue Feb 14 11:16:07 2012 +0100
@@ -48,7 +48,7 @@
by (metis SigmaD1 SigmaD2)
lemma "(a, b) \<in> (SIGMA x:A. {y. x = f y}) \<Longrightarrow> a \<in> A \<and> a = f b"
-by (metis (full_types, lam_lifting) CollectD SigmaD1 SigmaD2)
+by (metis (full_types, lifting) CollectD SigmaD1 SigmaD2)
lemma "(a, b) \<in> (SIGMA x:A. {y. x = f y}) \<Longrightarrow> a \<in> A \<and> a = f b"
proof -
@@ -110,25 +110,25 @@
"(cl, f) \<in> CLF \<Longrightarrow>
CLF \<subseteq> (SIGMA cl: CL. {f. f \<in> pset cl \<inter> cl}) \<Longrightarrow>
f \<in> pset cl \<inter> cl"
-by (metis (lam_lifting) CollectD Sigma_triv subsetD)
+by (metis (lifting) CollectD Sigma_triv subsetD)
lemma
"(cl, f) \<in> CLF \<Longrightarrow>
CLF = (SIGMA cl: CL. {f. f \<in> pset cl \<inter> cl}) \<Longrightarrow>
f \<in> pset cl \<inter> cl"
-by (metis (lam_lifting) CollectD Sigma_triv)
+by (metis (lifting) CollectD Sigma_triv)
lemma
"(cl, f) \<in> CLF \<Longrightarrow>
CLF \<subseteq> (SIGMA cl': CL. {f. f \<in> pset cl' \<rightarrow> pset cl'}) \<Longrightarrow>
f \<in> pset cl \<rightarrow> pset cl"
-by (metis (lam_lifting) CollectD Sigma_triv subsetD)
+by (metis (lifting) CollectD Sigma_triv subsetD)
lemma
"(cl, f) \<in> CLF \<Longrightarrow>
CLF = (SIGMA cl: CL. {f. f \<in> pset cl \<rightarrow> pset cl}) \<Longrightarrow>
f \<in> pset cl \<rightarrow> pset cl"
-by (metis (lam_lifting) CollectD Sigma_triv)
+by (metis (lifting) CollectD Sigma_triv)
lemma
"(cl, f) \<in> CLF \<Longrightarrow>
@@ -157,7 +157,7 @@
by (metis mem_Collect_eq imageI set_rev_mp)
lemma "f \<in> (\<lambda>u v. b \<times> u \<times> v) ` A \<Longrightarrow> \<forall>u v. P (b \<times> u \<times> v) \<Longrightarrow> P(f y)"
-by (metis (lam_lifting) imageE)
+by (metis (lifting) imageE)
lemma image_TimesA: "(\<lambda>(x, y). (f x, g y)) ` (A \<times> B) = (f ` A) \<times> (g ` B)"
by (metis map_pair_def map_pair_surj_on)
--- a/src/HOL/Metis_Examples/Big_O.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Metis_Examples/Big_O.thy Tue Feb 14 11:16:07 2012 +0100
@@ -20,10 +20,10 @@
"O(f\<Colon>('a => 'b)) == {h. \<exists>c. \<forall>x. abs (h x) <= c * abs (f x)}"
lemma bigo_pos_const:
- "(\<exists>(c\<Colon>'a\<Colon>linordered_idom).
- \<forall>x. (abs (h x)) <= (c * (abs (f x))))
- = (\<exists>c. 0 < c & (\<forall>x. (abs(h x)) <= (c * (abs (f x)))))"
-by (metis (hide_lams, no_types) abs_ge_zero
+ "(\<exists>c\<Colon>'a\<Colon>linordered_idom.
+ \<forall>x. abs (h x) \<le> c * abs (f x))
+ \<longleftrightarrow> (\<exists>c. 0 < c \<and> (\<forall>x. abs(h x) \<le> c * abs (f x)))"
+by (metis (no_types) abs_ge_zero
comm_semiring_1_class.normalizing_semiring_rules(7) mult.comm_neutral
mult_nonpos_nonneg not_leE order_trans zero_less_one)
@@ -32,9 +32,9 @@
sledgehammer_params [isar_proof, isar_shrink_factor = 1]
lemma
- "(\<exists>(c\<Colon>'a\<Colon>linordered_idom).
- \<forall>x. (abs (h x)) <= (c * (abs (f x))))
- = (\<exists>c. 0 < c & (\<forall>x. (abs(h x)) <= (c * (abs (f x)))))"
+ "(\<exists>c\<Colon>'a\<Colon>linordered_idom.
+ \<forall>x. abs (h x) \<le> c * abs (f x))
+ \<longleftrightarrow> (\<exists>c. 0 < c \<and> (\<forall>x. abs(h x) \<le> c * abs (f x)))"
apply auto
apply (case_tac "c = 0", simp)
apply (rule_tac x = "1" in exI, simp)
@@ -63,9 +63,9 @@
sledgehammer_params [isar_proof, isar_shrink_factor = 2]
lemma
- "(\<exists>(c\<Colon>'a\<Colon>linordered_idom).
- \<forall>x. (abs (h x)) <= (c * (abs (f x))))
- = (\<exists>c. 0 < c & (\<forall>x. (abs(h x)) <= (c * (abs (f x)))))"
+ "(\<exists>c\<Colon>'a\<Colon>linordered_idom.
+ \<forall>x. abs (h x) \<le> c * abs (f x))
+ \<longleftrightarrow> (\<exists>c. 0 < c \<and> (\<forall>x. abs(h x) \<le> c * abs (f x)))"
apply auto
apply (case_tac "c = 0", simp)
apply (rule_tac x = "1" in exI, simp)
@@ -86,9 +86,9 @@
sledgehammer_params [isar_proof, isar_shrink_factor = 3]
lemma
- "(\<exists>(c\<Colon>'a\<Colon>linordered_idom).
- \<forall>x. (abs (h x)) <= (c * (abs (f x))))
- = (\<exists>c. 0 < c & (\<forall>x. (abs(h x)) <= (c * (abs (f x)))))"
+ "(\<exists>c\<Colon>'a\<Colon>linordered_idom.
+ \<forall>x. abs (h x) \<le> c * abs (f x))
+ \<longleftrightarrow> (\<exists>c. 0 < c \<and> (\<forall>x. abs(h x) \<le> c * abs (f x)))"
apply auto
apply (case_tac "c = 0", simp)
apply (rule_tac x = "1" in exI, simp)
@@ -106,9 +106,9 @@
sledgehammer_params [isar_proof, isar_shrink_factor = 4]
lemma
- "(\<exists>(c\<Colon>'a\<Colon>linordered_idom).
- \<forall>x. (abs (h x)) <= (c * (abs (f x))))
- = (\<exists>c. 0 < c & (\<forall>x. (abs(h x)) <= (c * (abs (f x)))))"
+ "(\<exists>c\<Colon>'a\<Colon>linordered_idom.
+ \<forall>x. abs (h x) \<le> c * abs (f x))
+ \<longleftrightarrow> (\<exists>c. 0 < c \<and> (\<forall>x. abs(h x) \<le> c * abs (f x)))"
apply auto
apply (case_tac "c = 0", simp)
apply (rule_tac x = "1" in exI, simp)
@@ -125,25 +125,17 @@
sledgehammer_params [isar_proof, isar_shrink_factor = 1]
-lemma bigo_alt_def: "O(f) = {h. \<exists>c. (0 < c & (\<forall>x. abs (h x) <= c * abs (f x)))}"
+lemma bigo_alt_def: "O(f) = {h. \<exists>c. (0 < c \<and> (\<forall>x. abs (h x) <= c * abs (f x)))}"
by (auto simp add: bigo_def bigo_pos_const)
-lemma bigo_elt_subset [intro]: "f : O(g) \<Longrightarrow> O(f) <= O(g)"
+lemma bigo_elt_subset [intro]: "f : O(g) \<Longrightarrow> O(f) \<le> O(g)"
apply (auto simp add: bigo_alt_def)
apply (rule_tac x = "ca * c" in exI)
-apply (rule conjI)
- apply (rule mult_pos_pos)
- apply (assumption)+
-(* sledgehammer *)
-apply (rule allI)
-apply (drule_tac x = "xa" in spec)+
-apply (subgoal_tac "ca * abs (f xa) <= ca * (c * abs (g xa))")
- apply (metis comm_semiring_1_class.normalizing_semiring_rules(19)
- comm_semiring_1_class.normalizing_semiring_rules(7) order_trans)
-by (metis mult_le_cancel_left_pos)
+by (metis comm_semiring_1_class.normalizing_semiring_rules(7,19)
+ mult_le_cancel_left_pos order_trans mult_pos_pos)
lemma bigo_refl [intro]: "f : O(f)"
-apply (auto simp add: bigo_def)
+unfolding bigo_def mem_Collect_eq
by (metis mult_1 order_refl)
lemma bigo_zero: "0 : O(g)"
@@ -205,6 +197,7 @@
apply clarify
(* sledgehammer *)
apply (rule_tac x = "max c ca" in exI)
+
apply (rule conjI)
apply (metis less_max_iff_disj)
apply clarify
@@ -218,9 +211,8 @@
defer 1
apply (metis abs_triangle_ineq)
apply (metis add_nonneg_nonneg)
-apply (rule add_mono)
- apply (metis le_maxI2 linorder_linear min_max.sup_absorb1 mult_right_mono xt1(6))
-by (metis le_maxI2 linorder_not_le mult_le_cancel_right order_trans)
+by (metis (no_types) add_mono le_maxI2 linorder_linear min_max.sup_absorb1
+ mult_right_mono xt1(6))
lemma bigo_bounded_alt: "\<forall>x. 0 <= f x \<Longrightarrow> \<forall>x. f x <= c * g x \<Longrightarrow> f : O(g)"
apply (auto simp add: bigo_def)
@@ -242,15 +234,10 @@
lemma bigo_bounded2: "\<forall>x. lb x <= f x \<Longrightarrow> \<forall>x. f x <= lb x + g x \<Longrightarrow> f : lb +o O(g)"
apply (rule set_minus_imp_plus)
apply (rule bigo_bounded)
- apply (auto simp add: diff_minus fun_Compl_def func_plus)
- prefer 2
- apply (drule_tac x = x in spec)+
- apply (metis add_right_mono add_commute diff_add_cancel diff_minus_eq_add le_less order_trans)
-proof -
- fix x :: 'a
- assume "\<forall>x. lb x \<le> f x"
- thus "(0\<Colon>'b) \<le> f x + - lb x" by (metis not_leE diff_minus less_iff_diff_less_0 less_le_not_le)
-qed
+ apply (metis add_le_cancel_left diff_add_cancel diff_self minus_apply
+ comm_semiring_1_class.normalizing_semiring_rules(24))
+by (metis add_le_cancel_left diff_add_cancel func_plus le_fun_def
+ comm_semiring_1_class.normalizing_semiring_rules(24))
lemma bigo_abs: "(\<lambda>x. abs(f x)) =o O(f)"
apply (unfold bigo_def)
@@ -281,10 +268,8 @@
also have "... <= O(\<lambda>x. abs (f x - g x))"
apply (rule bigo_elt_subset)
apply (rule bigo_bounded)
- apply force
- apply (rule allI)
- apply (rule abs_triangle_ineq3)
- done
+ apply (metis abs_ge_zero)
+ by (metis abs_triangle_ineq3)
also have "... <= O(f - g)"
apply (rule bigo_elt_subset)
apply (subst fun_diff_def)
@@ -304,9 +289,7 @@
also have "... <= O(g) \<oplus> O(h)"
by (auto del: subsetI)
also have "... = O(\<lambda>x. abs(g x)) \<oplus> O(\<lambda>x. abs(h x))"
- apply (subst bigo_abs3 [symmetric])+
- apply (rule refl)
- done
+ by (metis bigo_abs3)
also have "... = O((\<lambda>x. abs(g x)) + (\<lambda>x. abs(h x)))"
by (rule bigo_plus_eq [symmetric], auto)
finally have "f : ...".
@@ -318,39 +301,21 @@
by (simp add: bigo_abs3 [symmetric])
qed
-lemma bigo_mult [intro]: "O(f)\<otimes>O(g) <= O(f * g)"
- apply (rule subsetI)
- apply (subst bigo_def)
- apply (auto simp del: abs_mult mult_ac
- simp add: bigo_alt_def set_times_def func_times)
+lemma bigo_mult [intro]: "O(f) \<otimes> O(g) <= O(f * g)"
+apply (rule subsetI)
+apply (subst bigo_def)
+apply (auto simp del: abs_mult mult_ac
+ simp add: bigo_alt_def set_times_def func_times)
(* sledgehammer *)
- apply (rule_tac x = "c * ca" in exI)
- apply(rule allI)
- apply(erule_tac x = x in allE)+
- apply(subgoal_tac "c * ca * abs(f x * g x) =
- (c * abs(f x)) * (ca * abs(g x))")
-prefer 2
-apply (metis mult_assoc mult_left_commute
- abs_of_pos mult_left_commute
- abs_mult mult_pos_pos)
- apply (erule ssubst)
- apply (subst abs_mult)
-(* not quite as hard as BigO__bigo_mult_simpler_1 (a hard problem!) since
- abs_mult has just been done *)
-by (metis abs_ge_zero mult_mono')
+apply (rule_tac x = "c * ca" in exI)
+apply (rule allI)
+apply (erule_tac x = x in allE)+
+apply (subgoal_tac "c * ca * abs (f x * g x) = (c * abs(f x)) * (ca * abs (g x))")
+ apply (metis (no_types) abs_ge_zero abs_mult mult_mono')
+by (metis mult_assoc mult_left_commute abs_of_pos mult_left_commute abs_mult)
lemma bigo_mult2 [intro]: "f *o O(g) <= O(f * g)"
- apply (auto simp add: bigo_def elt_set_times_def func_times abs_mult)
-(* sledgehammer *)
- apply (rule_tac x = c in exI)
- apply clarify
- apply (drule_tac x = x in spec)
-(*sledgehammer [no luck]*)
- apply (subgoal_tac "abs(f x) * abs(b x) <= abs(f x) * (c * abs(g x))")
- apply (simp add: mult_ac)
- apply (rule mult_left_mono, assumption)
- apply (rule abs_ge_zero)
-done
+by (metis bigo_mult bigo_refl set_times_mono3 subset_trans)
lemma bigo_mult3: "f : O(h) \<Longrightarrow> g : O(j) \<Longrightarrow> f * g : O(h * j)"
by (metis bigo_mult set_rev_mp set_times_intro)
@@ -372,157 +337,101 @@
by (rule bigo_mult2)
also have "(\<lambda>x. 1 / f x) * (f * g) = g"
apply (simp add: func_times)
- apply (rule ext)
- apply (simp add: a h nonzero_divide_eq_eq mult_ac)
- done
+ by (metis (lifting, no_types) a ext mult_ac(2) nonzero_divide_eq_eq)
finally have "(\<lambda>x. (1\<Colon>'b) / f x) * h : O(g)".
then have "f * ((\<lambda>x. (1\<Colon>'b) / f x) * h) : f *o O(g)"
by auto
also have "f * ((\<lambda>x. (1\<Colon>'b) / f x) * h) = h"
apply (simp add: func_times)
- apply (rule ext)
- apply (simp add: a h nonzero_divide_eq_eq mult_ac)
- done
+ by (metis (lifting, no_types) a eq_divide_imp ext
+ comm_semiring_1_class.normalizing_semiring_rules(7))
finally show "h : f *o O(g)".
qed
qed
-lemma bigo_mult6: "\<forall>x. f x ~= 0 \<Longrightarrow>
- O(f * g) = (f\<Colon>'a => ('b\<Colon>{linordered_field,number_ring})) *o O(g)"
+lemma bigo_mult6:
+"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) = (f\<Colon>'a \<Rightarrow> ('b\<Colon>{linordered_field,number_ring})) *o O(g)"
by (metis bigo_mult2 bigo_mult5 order_antisym)
(*proof requires relaxing relevance: 2007-01-25*)
declare bigo_mult6 [simp]
-lemma bigo_mult7: "\<forall>x. f x ~= 0 \<Longrightarrow>
- O(f * g) <= O(f\<Colon>'a => ('b\<Colon>{linordered_field,number_ring})) \<otimes> O(g)"
-(* sledgehammer *)
- apply (subst bigo_mult6)
- apply assumption
- apply (rule set_times_mono3)
- apply (rule bigo_refl)
-done
+lemma bigo_mult7:
+"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) \<le> O(f\<Colon>'a \<Rightarrow> ('b\<Colon>{linordered_field,number_ring})) \<otimes> O(g)"
+by (metis bigo_refl bigo_mult6 set_times_mono3)
declare bigo_mult6 [simp del]
declare bigo_mult7 [intro!]
-lemma bigo_mult8: "\<forall>x. f x ~= 0 \<Longrightarrow>
- O(f * g) = O(f\<Colon>'a => ('b\<Colon>{linordered_field,number_ring})) \<otimes> O(g)"
+lemma bigo_mult8:
+"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) = O(f\<Colon>'a \<Rightarrow> ('b\<Colon>{linordered_field,number_ring})) \<otimes> O(g)"
by (metis bigo_mult bigo_mult7 order_antisym_conv)
lemma bigo_minus [intro]: "f : O(g) \<Longrightarrow> - f : O(g)"
- by (auto simp add: bigo_def fun_Compl_def)
+by (auto simp add: bigo_def fun_Compl_def)
lemma bigo_minus2: "f : g +o O(h) \<Longrightarrow> -f : -g +o O(h)"
- apply (rule set_minus_imp_plus)
- apply (drule set_plus_imp_minus)
- apply (drule bigo_minus)
- apply (simp add: diff_minus)
-done
+by (metis (no_types) bigo_elt_subset bigo_minus bigo_mult4 bigo_refl
+ comm_semiring_1_class.normalizing_semiring_rules(11) minus_mult_left
+ set_plus_mono_b)
lemma bigo_minus3: "O(-f) = O(f)"
- by (auto simp add: bigo_def fun_Compl_def abs_minus_cancel)
+by (metis bigo_elt_subset bigo_minus bigo_refl equalityI minus_minus)
-lemma bigo_plus_absorb_lemma1: "f : O(g) \<Longrightarrow> f +o O(g) <= O(g)"
-proof -
- assume a: "f : O(g)"
- show "f +o O(g) <= O(g)"
- proof -
- have "f : O(f)" by auto
- then have "f +o O(g) <= O(f) \<oplus> O(g)"
- by (auto del: subsetI)
- also have "... <= O(g) \<oplus> O(g)"
- proof -
- from a have "O(f) <= O(g)" by (auto del: subsetI)
- thus ?thesis by (auto del: subsetI)
- qed
- also have "... <= O(g)" by (simp add: bigo_plus_idemp)
- finally show ?thesis .
- qed
-qed
+lemma bigo_plus_absorb_lemma1: "f : O(g) \<Longrightarrow> f +o O(g) \<le> O(g)"
+by (metis bigo_plus_idemp set_plus_mono3)
-lemma bigo_plus_absorb_lemma2: "f : O(g) \<Longrightarrow> O(g) <= f +o O(g)"
-proof -
- assume a: "f : O(g)"
- show "O(g) <= f +o O(g)"
- proof -
- from a have "-f : O(g)" by auto
- then have "-f +o O(g) <= O(g)" by (elim bigo_plus_absorb_lemma1)
- then have "f +o (-f +o O(g)) <= f +o O(g)" by auto
- also have "f +o (-f +o O(g)) = O(g)"
- by (simp add: set_plus_rearranges)
- finally show ?thesis .
- qed
-qed
+lemma bigo_plus_absorb_lemma2: "f : O(g) \<Longrightarrow> O(g) \<le> f +o O(g)"
+by (metis (no_types) bigo_minus bigo_plus_absorb_lemma1 right_minus
+ set_plus_mono_b set_plus_rearrange2 set_zero_plus subsetI)
lemma bigo_plus_absorb [simp]: "f : O(g) \<Longrightarrow> f +o O(g) = O(g)"
by (metis bigo_plus_absorb_lemma1 bigo_plus_absorb_lemma2 order_eq_iff)
-lemma bigo_plus_absorb2 [intro]: "f : O(g) \<Longrightarrow> A <= O(g) \<Longrightarrow> f +o A <= O(g)"
- apply (subgoal_tac "f +o A <= f +o O(g)")
- apply force+
-done
+lemma bigo_plus_absorb2 [intro]: "f : O(g) \<Longrightarrow> A <= O(g) \<Longrightarrow> f +o A \<le> O(g)"
+by (metis bigo_plus_absorb set_plus_mono)
lemma bigo_add_commute_imp: "f : g +o O(h) \<Longrightarrow> g : f +o O(h)"
- apply (subst set_minus_plus [symmetric])
- apply (subgoal_tac "g - f = - (f - g)")
- apply (erule ssubst)
- apply (rule bigo_minus)
- apply (subst set_minus_plus)
- apply assumption
- apply (simp add: diff_minus add_ac)
-done
+by (metis bigo_minus minus_diff_eq set_plus_imp_minus set_minus_plus)
lemma bigo_add_commute: "(f : g +o O(h)) = (g : f +o O(h))"
- apply (rule iffI)
- apply (erule bigo_add_commute_imp)+
-done
+by (metis bigo_add_commute_imp)
lemma bigo_const1: "(\<lambda>x. c) : O(\<lambda>x. 1)"
by (auto simp add: bigo_def mult_ac)
-lemma (*bigo_const2 [intro]:*) "O(\<lambda>x. c) <= O(\<lambda>x. 1)"
+lemma bigo_const2 [intro]: "O(\<lambda>x. c) \<le> O(\<lambda>x. 1)"
by (metis bigo_const1 bigo_elt_subset)
-lemma bigo_const2 [intro]: "O(\<lambda>x. c\<Colon>'b\<Colon>{linordered_idom,number_ring}) <= O(\<lambda>x. 1)"
-proof -
- have "\<forall>u. (\<lambda>Q. u) \<in> O(\<lambda>Q. 1)" by (metis bigo_const1)
- thus "O(\<lambda>x. c) \<subseteq> O(\<lambda>x. 1)" by (metis bigo_elt_subset)
-qed
-
lemma bigo_const3: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow> (\<lambda>x. 1) : O(\<lambda>x. c)"
apply (simp add: bigo_def)
by (metis abs_eq_0 left_inverse order_refl)
lemma bigo_const4: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow> O(\<lambda>x. 1) <= O(\<lambda>x. c)"
-by (rule bigo_elt_subset, rule bigo_const3, assumption)
+by (metis bigo_elt_subset bigo_const3)
lemma bigo_const [simp]: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow>
O(\<lambda>x. c) = O(\<lambda>x. 1)"
-by (rule equalityI, rule bigo_const2, rule bigo_const4, assumption)
+by (metis bigo_const2 bigo_const4 equalityI)
lemma bigo_const_mult1: "(\<lambda>x. c * f x) : O(f)"
- apply (simp add: bigo_def abs_mult)
+apply (simp add: bigo_def abs_mult)
by (metis le_less)
-lemma bigo_const_mult2: "O(\<lambda>x. c * f x) <= O(f)"
+lemma bigo_const_mult2: "O(\<lambda>x. c * f x) \<le> O(f)"
by (rule bigo_elt_subset, rule bigo_const_mult1)
lemma bigo_const_mult3: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow> f : O(\<lambda>x. c * f x)"
apply (simp add: bigo_def)
-(* sledgehammer *)
-apply (rule_tac x = "abs(inverse c)" in exI)
-apply (simp only: abs_mult [symmetric] mult_assoc [symmetric])
-apply (subst left_inverse)
-by auto
+by (metis (no_types) abs_mult mult_assoc mult_1 order_refl left_inverse)
-lemma bigo_const_mult4: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow>
- O(f) <= O(\<lambda>x. c * f x)"
-by (rule bigo_elt_subset, rule bigo_const_mult3, assumption)
+lemma bigo_const_mult4:
+"(c\<Colon>'a\<Colon>{linordered_field,number_ring}) \<noteq> 0 \<Longrightarrow> O(f) \<le> O(\<lambda>x. c * f x)"
+by (metis bigo_elt_subset bigo_const_mult3)
lemma bigo_const_mult [simp]: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow>
O(\<lambda>x. c * f x) = O(f)"
-by (rule equalityI, rule bigo_const_mult2, erule bigo_const_mult4)
+by (metis equalityI bigo_const_mult2 bigo_const_mult4)
lemma bigo_const_mult5 [simp]: "(c\<Colon>'a\<Colon>{linordered_field,number_ring}) ~= 0 \<Longrightarrow>
(\<lambda>x. c) *o O(f) = O(f)"
@@ -571,48 +480,36 @@
apply (rule mult_left_mono)
apply (erule spec)
apply simp
- apply(simp add: mult_ac)
+ apply (simp add: mult_ac)
done
lemma bigo_const_mult7 [intro]: "f =o O(g) \<Longrightarrow> (\<lambda>x. c * f x) =o O(g)"
-proof -
- assume "f =o O(g)"
- then have "(\<lambda>x. c) * f =o (\<lambda>x. c) *o O(g)"
- by auto
- also have "(\<lambda>x. c) * f = (\<lambda>x. c * f x)"
- by (simp add: func_times)
- also have "(\<lambda>x. c) *o O(g) <= O(g)"
- by (auto del: subsetI)
- finally show ?thesis .
-qed
+by (metis bigo_const_mult1 bigo_elt_subset order_less_le psubsetD)
lemma bigo_compose1: "f =o O(g) \<Longrightarrow> (\<lambda>x. f(k x)) =o O(\<lambda>x. g(k x))"
by (unfold bigo_def, auto)
-lemma bigo_compose2: "f =o g +o O(h) \<Longrightarrow> (\<lambda>x. f(k x)) =o (\<lambda>x. g(k x)) +o
- O(\<lambda>x. h(k x))"
- apply (simp only: set_minus_plus [symmetric] diff_minus fun_Compl_def
- func_plus)
- apply (erule bigo_compose1)
-done
+lemma bigo_compose2:
+"f =o g +o O(h) \<Longrightarrow> (\<lambda>x. f(k x)) =o (\<lambda>x. g(k x)) +o O(\<lambda>x. h(k x))"
+apply (simp only: set_minus_plus [symmetric] diff_minus fun_Compl_def func_plus)
+by (erule bigo_compose1)
subsection {* Setsum *}
lemma bigo_setsum_main: "\<forall>x. \<forall>y \<in> A x. 0 <= h x y \<Longrightarrow>
\<exists>c. \<forall>x. \<forall>y \<in> A x. abs (f x y) <= c * (h x y) \<Longrightarrow>
(\<lambda>x. SUM y : A x. f x y) =o O(\<lambda>x. SUM y : A x. h x y)"
- apply (auto simp add: bigo_def)
- apply (rule_tac x = "abs c" in exI)
- apply (subst abs_of_nonneg) back back
- apply (rule setsum_nonneg)
- apply force
- apply (subst setsum_right_distrib)
- apply (rule allI)
- apply (rule order_trans)
- apply (rule setsum_abs)
- apply (rule setsum_mono)
-apply (blast intro: order_trans mult_right_mono abs_ge_self)
-done
+apply (auto simp add: bigo_def)
+apply (rule_tac x = "abs c" in exI)
+apply (subst abs_of_nonneg) back back
+ apply (rule setsum_nonneg)
+ apply force
+apply (subst setsum_right_distrib)
+apply (rule allI)
+apply (rule order_trans)
+ apply (rule setsum_abs)
+apply (rule setsum_mono)
+by (metis abs_ge_self abs_mult_pos order_trans)
lemma bigo_setsum1: "\<forall>x y. 0 <= h x y \<Longrightarrow>
\<exists>c. \<forall>x y. abs (f x y) <= c * (h x y) \<Longrightarrow>
@@ -620,9 +517,10 @@
by (metis (no_types) bigo_setsum_main)
lemma bigo_setsum2: "\<forall>y. 0 <= h y \<Longrightarrow>
- \<exists>c. \<forall>y. abs(f y) <= c * (h y) \<Longrightarrow>
+ \<exists>c. \<forall>y. abs (f y) <= c * (h y) \<Longrightarrow>
(\<lambda>x. SUM y : A x. f y) =o O(\<lambda>x. SUM y : A x. h y)"
-by (rule bigo_setsum1, auto)
+apply (rule bigo_setsum1)
+by metis+
lemma bigo_setsum3: "f =o O(h) \<Longrightarrow>
(\<lambda>x. SUM y : A x. (l x y) * f(k x y)) =o
@@ -632,13 +530,7 @@
apply (rule abs_ge_zero)
apply (unfold bigo_def)
apply (auto simp add: abs_mult)
-(* sledgehammer *)
-apply (rule_tac x = c in exI)
-apply (rule allI)+
-apply (subst mult_left_commute)
-apply (rule mult_left_mono)
- apply (erule spec)
-by (rule abs_ge_zero)
+by (metis abs_ge_zero mult_left_commute mult_left_mono)
lemma bigo_setsum4: "f =o g +o O(h) \<Longrightarrow>
(\<lambda>x. SUM y : A x. l x y * f(k x y)) =o
@@ -649,22 +541,19 @@
apply (subst setsum_subtractf [symmetric])
apply (subst right_diff_distrib [symmetric])
apply (rule bigo_setsum3)
-apply (subst fun_diff_def [symmetric])
-by (erule set_plus_imp_minus)
+by (metis (lifting, no_types) fun_diff_def set_plus_imp_minus ext)
lemma bigo_setsum5: "f =o O(h) \<Longrightarrow> \<forall>x y. 0 <= l x y \<Longrightarrow>
\<forall>x. 0 <= h x \<Longrightarrow>
(\<lambda>x. SUM y : A x. (l x y) * f(k x y)) =o
O(\<lambda>x. SUM y : A x. (l x y) * h(k x y))"
- apply (subgoal_tac "(\<lambda>x. SUM y : A x. (l x y) * h(k x y)) =
+apply (subgoal_tac "(\<lambda>x. SUM y : A x. (l x y) * h(k x y)) =
(\<lambda>x. SUM y : A x. abs((l x y) * h(k x y)))")
- apply (erule ssubst)
- apply (erule bigo_setsum3)
- apply (rule ext)
- apply (rule setsum_cong2)
- apply (thin_tac "f \<in> O(h)")
-apply (metis abs_of_nonneg zero_le_mult_iff)
-done
+ apply (erule ssubst)
+ apply (erule bigo_setsum3)
+apply (rule ext)
+apply (rule setsum_cong2)
+by (metis abs_of_nonneg zero_le_mult_iff)
lemma bigo_setsum6: "f =o g +o O(h) \<Longrightarrow> \<forall>x y. 0 <= l x y \<Longrightarrow>
\<forall>x. 0 <= h x \<Longrightarrow>
@@ -764,7 +653,7 @@
apply (unfold lesso_def)
apply (subgoal_tac "(\<lambda>x. max (f x - g x) 0) = 0")
apply (metis bigo_zero)
-by (metis (lam_lifting, no_types) func_zero le_fun_def le_iff_diff_le_0
+by (metis (lifting, no_types) func_zero le_fun_def le_iff_diff_le_0
min_max.sup_absorb2 order_eq_iff)
lemma bigo_lesso2: "f =o g +o O(h) \<Longrightarrow>
--- a/src/HOL/Metis_Examples/Type_Encodings.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Metis_Examples/Type_Encodings.thy Tue Feb 14 11:16:07 2012 +0100
@@ -67,16 +67,17 @@
| tac (type_enc :: type_encs) st =
st (* |> tap (fn _ => tracing (PolyML.makestring type_enc)) *)
|> ((if null type_encs then all_tac else rtac @{thm fork} 1)
- THEN Metis_Tactic.metis_tac [type_enc] "combinators" ctxt ths 1
+ THEN Metis_Tactic.metis_tac [type_enc]
+ ATP_Problem_Generate.combsN ctxt ths 1
THEN COND (has_fewer_prems 2) all_tac no_tac
THEN tac type_encs)
- in tac end
+ in tac type_encs end
*}
method_setup metis_exhaust = {*
Attrib.thms >>
- (fn ths => fn ctxt => SIMPLE_METHOD (metis_exhaust_tac ctxt ths type_encs))
-*} "exhaustively run the new Metis with all type encodings"
+ (fn ths => fn ctxt => SIMPLE_METHOD (metis_exhaust_tac ctxt ths))
+*} "exhaustively run Metis with all type encodings"
text {* Miscellaneous tests *}
--- a/src/HOL/Mirabelle/Tools/mirabelle_metis.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Mirabelle/Tools/mirabelle_metis.ML Tue Feb 14 11:16:07 2012 +0100
@@ -19,7 +19,8 @@
val facts = Facts.props (Proof_Context.facts_of (Proof.context_of pre))
fun metis ctxt =
- Metis_Tactic.metis_tac [] ATP_Translate.lam_liftingN ctxt (thms @ facts)
+ Metis_Tactic.metis_tac [] ATP_Problem_Generate.liftingN ctxt
+ (thms @ facts)
in
(if Mirabelle.can_apply timeout metis pre then "succeeded" else "failed")
|> prefix (metis_tag id)
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Tue Feb 14 11:16:07 2012 +0100
@@ -9,9 +9,10 @@
val prover_timeoutK = "prover_timeout"
val keepK = "keep"
val type_encK = "type_enc"
-val soundK = "sound"
+val strictK = "strict"
val sliceK = "slice"
val lam_transK = "lam_trans"
+val uncurried_aliasesK = "uncurried_aliases"
val e_weight_methodK = "e_weight_method"
val force_sosK = "force_sos"
val max_relevantK = "max_relevant"
@@ -336,7 +337,7 @@
| NONE => get_prover (default_prover_name ()))
end
-type locality = ATP_Translate.locality
+type stature = ATP_Problem_Generate.stature
(* hack *)
fun reconstructor_from_msg args msg =
@@ -357,12 +358,13 @@
local
datatype sh_result =
- SH_OK of int * int * (string * locality) list |
+ SH_OK of int * int * (string * stature) list |
SH_FAIL of int * int |
SH_ERROR
-fun run_sh prover_name prover type_enc sound max_relevant slice lam_trans
- e_weight_method force_sos hard_timeout timeout dir pos st =
+fun run_sh prover_name prover type_enc strict max_relevant slice lam_trans
+ uncurried_aliases e_weight_method force_sos hard_timeout timeout dir pos
+ st =
let
val {context = ctxt, facts = chained_ths, goal} = Proof.goal st
val i = 1
@@ -385,9 +387,9 @@
Sledgehammer_Isar.default_params ctxt
[("verbose", "true"),
("type_enc", type_enc),
- ("sound", sound),
+ ("strict", strict),
("lam_trans", lam_trans |> the_default "smart"),
- ("preplay_timeout", preplay_timeout),
+ ("uncurried_aliases", uncurried_aliases |> the_default "smart"),
("max_relevant", max_relevant),
("slice", slice),
("timeout", string_of_int timeout),
@@ -410,7 +412,7 @@
fun failed failure =
({outcome = SOME failure, used_facts = [], run_time = Time.zeroTime,
preplay =
- K (ATP_Reconstruct.Failed_to_Play Sledgehammer_Provers.plain_metis),
+ K (ATP_Proof_Reconstruct.Failed_to_Play Sledgehammer_Provers.plain_metis),
message = K "", message_tail = ""}, ~1)
val ({outcome, used_facts, run_time, preplay, message, message_tail}
: Sledgehammer_Provers.prover_result,
@@ -467,10 +469,11 @@
val _ = if trivial then () else change_data id inc_sh_nontriv_calls
val (prover_name, prover) = get_prover (Proof.context_of st) args
val type_enc = AList.lookup (op =) args type_encK |> the_default "smart"
- val sound = AList.lookup (op =) args soundK |> the_default "false"
+ val strict = AList.lookup (op =) args strictK |> the_default "false"
val max_relevant = AList.lookup (op =) args max_relevantK |> the_default "smart"
val slice = AList.lookup (op =) args sliceK |> the_default "true"
val lam_trans = AList.lookup (op =) args lam_transK
+ val uncurried_aliases = AList.lookup (op =) args uncurried_aliasesK
val e_weight_method = AList.lookup (op =) args e_weight_methodK
val force_sos = AList.lookup (op =) args force_sosK
|> Option.map (curry (op <>) "false")
@@ -480,14 +483,15 @@
minimizer has a chance to do its magic *)
val hard_timeout = SOME (2 * timeout)
val (msg, result) =
- run_sh prover_name prover type_enc sound max_relevant slice lam_trans
- e_weight_method force_sos hard_timeout timeout dir pos st
+ run_sh prover_name prover type_enc strict max_relevant slice lam_trans
+ uncurried_aliases e_weight_method force_sos hard_timeout timeout dir pos
+ st
in
case result of
SH_OK (time_isa, time_prover, names) =>
let
- fun get_thms (name, loc) =
- SOME ((name, loc), thms_of_name (Proof.context_of st) name)
+ fun get_thms (name, stature) =
+ SOME ((name, stature), thms_of_name (Proof.context_of st) name)
in
change_data id inc_sh_success;
if trivial then () else change_data id inc_sh_nontriv_success;
@@ -517,7 +521,7 @@
val n0 = length (these (!named_thms))
val (prover_name, _) = get_prover ctxt args
val type_enc = AList.lookup (op =) args type_encK |> the_default "smart"
- val sound = AList.lookup (op =) args soundK |> the_default "false"
+ val strict = AList.lookup (op =) args strictK |> the_default "false"
val timeout =
AList.lookup (op =) args minimize_timeoutK
|> Option.map (fst o read_int o raw_explode) (* FIXME Symbol.explode (?) *)
@@ -526,7 +530,7 @@
[("provers", prover_name),
("verbose", "true"),
("type_enc", type_enc),
- ("sound", sound),
+ ("strict", strict),
("timeout", string_of_int timeout),
("preplay_timeout", preplay_timeout)]
val minimize =
@@ -554,7 +558,7 @@
[("provers", prover),
("max_relevant", "0"),
("type_enc", type_enc),
- ("sound", "true"),
+ ("strict", "true"),
("slice", "false"),
("timeout", timeout |> Time.toSeconds |> string_of_int)]
@@ -577,16 +581,17 @@
relevance_override
in
if !reconstructor = "sledgehammer_tac" then
- sledge_tac 0.2 ATP_Systems.z3_tptpN "mono_simple"
+ sledge_tac 0.2 ATP_Systems.z3_tptpN "mono_native"
ORELSE' sledge_tac 0.2 ATP_Systems.eN "mono_guards??"
ORELSE' sledge_tac 0.2 ATP_Systems.vampireN "mono_guards??"
ORELSE' sledge_tac 0.2 ATP_Systems.spassN "poly_tags"
- ORELSE' Metis_Tactic.metis_tac [] ATP_Translate.combinatorsN ctxt thms
+ ORELSE' Metis_Tactic.metis_tac [] ATP_Problem_Generate.combsN
+ ctxt thms
else if !reconstructor = "smt" then
SMT_Solver.smt_tac ctxt thms
else if full then
- Metis_Tactic.metis_tac [ATP_Reconstruct.full_typesN]
- ATP_Reconstruct.metis_default_lam_trans ctxt thms
+ Metis_Tactic.metis_tac [ATP_Proof_Reconstruct.full_typesN]
+ ATP_Proof_Reconstruct.metis_default_lam_trans ctxt thms
else if String.isPrefix "metis (" (!reconstructor) then
let
val (type_encs, lam_trans) =
@@ -594,11 +599,11 @@
|> Outer_Syntax.scan Position.start
|> filter Token.is_proper |> tl
|> Metis_Tactic.parse_metis_options |> fst
- |>> the_default [ATP_Reconstruct.partial_typesN]
- ||> the_default ATP_Reconstruct.metis_default_lam_trans
+ |>> the_default [ATP_Proof_Reconstruct.partial_typesN]
+ ||> the_default ATP_Proof_Reconstruct.metis_default_lam_trans
in Metis_Tactic.metis_tac type_encs lam_trans ctxt thms end
else if !reconstructor = "metis" then
- Metis_Tactic.metis_tac [] ATP_Reconstruct.metis_default_lam_trans ctxt
+ Metis_Tactic.metis_tac [] ATP_Proof_Reconstruct.metis_default_lam_trans ctxt
thms
else
K all_tac
@@ -653,7 +658,7 @@
let
val reconstructor = Unsynchronized.ref ""
val named_thms =
- Unsynchronized.ref (NONE : ((string * locality) * thm list) list option)
+ Unsynchronized.ref (NONE : ((string * stature) * thm list) list option)
val minimize = AList.defined (op =) args minimizeK
val metis_ft = AList.defined (op =) args metis_ftK
val trivial =
--- a/src/HOL/Mutabelle/lib/Tools/mutabelle Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Mutabelle/lib/Tools/mutabelle Tue Feb 14 11:16:07 2012 +0100
@@ -16,6 +16,7 @@
echo " -L LOGIC parent logic to use (default $MUTABELLE_LOGIC)"
echo " -T THEORY parent theory to use (default $MUTABELLE_IMPORT_THEORY)"
echo " -O DIR output directory for test data (default $MUTABELLE_OUTPUT_PATH)"
+ echo " -N NUMBER number of lemmas to choose randomly, if not given all lemmas are chosen"
echo
echo " THEORY is the name of the theory of which all theorems should be"
echo " mutated and tested."
@@ -30,7 +31,7 @@
MUTABELLE_IMPORTS=""
-while getopts "L:T:O:" OPT
+while getopts "L:T:O:N:" OPT
do
case "$OPT" in
L)
@@ -42,6 +43,9 @@
O)
MUTABELLE_OUTPUT_PATH="$OPTARG"
;;
+ N)
+ NUMBER_OF_LEMMAS="$OPTARG"
+ ;;
\?)
usage
;;
@@ -66,6 +70,9 @@
export MUTABELLE_OUTPUT_PATH
+if [ "$NUMBER_OF_LEMMAS" != "" ]; then
+ MUTABELLE_FILTER="|> MutabelleExtra.take_random $NUMBER_OF_LEMMAS"
+fi
## main
@@ -104,7 +111,7 @@
ML {*
fun mutation_testing_of thy =
(MutabelleExtra.random_seed := 1.0;
- MutabelleExtra.thms_of false thy
+ MutabelleExtra.thms_of false thy $MUTABELLE_FILTER
|> (fn thms => MutabelleExtra.mutate_theorems_and_write_report
@{theory} mtds thms (\"$MUTABELLE_OUTPUT_PATH/log\")))
*}
--- a/src/HOL/Mutabelle/mutabelle.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Mutabelle/mutabelle.ML Tue Feb 14 11:16:07 2012 +0100
@@ -13,42 +13,7 @@
val mutate_sign : term -> theory -> (string * string) list -> int -> term list
val mutate_mix : term -> theory -> string list ->
(string * string) list -> int -> term list
-(* val qc_test : term list -> (typ * typ) list -> theory ->
- int -> int -> int * Thm.cterm list * int * (Thm.cterm * (string * Thm.cterm) list) list
- val qc_test_file : bool -> term list -> (typ * typ) list
- -> theory -> int -> int -> string -> unit
- val mutqc_file_exc : theory -> string list ->
- int -> Thm.thm -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_file_sign : theory -> (string * string) list ->
- int -> Thm.thm -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_file_mix : theory -> string list -> (string * string) list ->
- int -> Thm.thm -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_file_rec_exc : theory -> string list -> int ->
- Thm.thm list -> (typ * typ) list -> int -> int -> string list -> unit
- val mutqc_file_rec_sign : theory -> (string * string) list -> int ->
- Thm.thm list -> (typ * typ) list -> int -> int -> string list -> unit
- val mutqc_file_rec_mix : theory -> string list -> (string * string) list ->
- int -> Thm.thm list -> (typ * typ) list -> int -> int -> string list -> unit
- val mutqc_thy_exc : theory -> theory ->
- string list -> int -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_thy_sign : theory -> theory -> (string * string) list ->
- int -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_thy_mix : theory -> theory -> string list -> (string * string) list ->
- int -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_file_stat_sign : theory -> (string * string) list ->
- int -> Thm.thm list -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_file_stat_exc : theory -> string list ->
- int -> Thm.thm list -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_file_stat_mix : theory -> string list -> (string * string) list ->
- int -> Thm.thm list -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_thystat_exc : (string -> thm -> bool) -> theory -> theory ->
- string list -> int -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_thystat_sign : (string -> thm -> bool) -> theory -> theory -> (string * string) list ->
- int -> (typ * typ) list -> int -> int -> string -> unit
- val mutqc_thystat_mix : (string -> thm -> bool) -> theory -> theory -> string list ->
- (string * string) list -> int -> (typ * typ) list -> int -> int -> string -> unit
- val canonize_term: term -> string list -> term
-*)
+
val all_unconcealed_thms_of : theory -> (string * thm) list
end;
@@ -163,15 +128,6 @@
end;
-
-
-
-(*tests if the given element ist in the given list*)
-
-fun in_list elem [] = false
- | in_list elem (x::xs) = if (elem = x) then true else in_list elem xs;
-
-
(*Evaluate if the longContext is more special as the shortContext.
If so, a term with shortContext can be substituted in the place of a
term with longContext*)
@@ -338,9 +294,7 @@
let
val Const(name,_) = (getSubTerm origTerm (rev opcomm))
in
- if (in_list name commutatives)
- then true
- else false
+ member (op =) commutatives name
end
else false
end
@@ -469,13 +423,7 @@
fun mutate_mix origTerm tsig commutatives forbidden_funs iter =
mutate 2 origTerm tsig commutatives forbidden_funs iter;
-
-(*helper function in order to prettily print a list of terms*)
-
-fun pretty xs = map (fn (id, t) => (id, (HOLogic.mk_number HOLogic.natT
- (HOLogic.dest_nat t)) handle TERM _ => t)) xs;
-
-
+
(*helper function for the quickcheck invocation. Evaluates the quickcheck_term function on a whole list of terms
and tries to print the exceptions*)
@@ -497,247 +445,11 @@
|> Config.put Quickcheck.size 1
|> Config.put Quickcheck.iterations 1
val test = Quickcheck_Common.test_term
- ("exhaustive", Exhaustive_Generators.compile_generator_expr) ctxt' false
+ ("exhaustive", ((fn _ => raise (Fail "")), Exhaustive_Generators.compile_generator_expr)) ctxt' false
in
case try test (preprocess thy insts (prop_of th), []) of
SOME _ => (Output.urgent_message "executable"; true)
| NONE => (Output.urgent_message ("not executable"); false)
- end;
-(*
-(*create a string of a list of terms*)
-
-fun string_of_ctermlist thy [] acc = acc
- | string_of_ctermlist thy (x::xs) acc =
- string_of_ctermlist thy xs ((Syntax.string_of_term_global thy (term_of x)) ^ "\n" ^ acc);
-
-(*helper function in order to decompose the counter examples generated by quickcheck*)
-
-fun decompose_ce thy [] acc = acc
- | decompose_ce thy ((varname,varce)::xs) acc =
- decompose_ce thy xs ("\t" ^ varname ^ " instanciated to " ^
- (Syntax.string_of_term_global thy (term_of varce)) ^ "\n" ^ acc);
-
-(*helper function in order to decompose a list of counter examples*)
-
-fun decompose_celist thy [] acc = acc
- | decompose_celist thy ((mutTerm,varcelist)::xs) acc = decompose_celist thy xs
- ("mutated term : " ^
- (Syntax.string_of_term_global thy (term_of mutTerm)) ^ "\n" ^
- "counterexample : \n" ^
- (decompose_ce thy (rev varcelist) "") ^ acc);
-
-
-(*quickcheck test the list of mutants mutated by applying the type instantiations
-insts and using the quickcheck-theory usedthy. The results of quickcheck are stored
-in the file with name filename. If app is true, the output will be appended to the file.
-Else it will be overwritten. *)
-
-fun qc_test_file app mutated insts usedthy sz qciter filename =
- let
- val statisticList = qc_test mutated insts usedthy sz qciter
- val passed = (string_of_int (#1 statisticList)) ^
- " terms passed the quickchecktest: \n\n" ^
- (string_of_ctermlist usedthy (rev (#2 statisticList)) "")
- val counterexps = (string_of_int (#3 statisticList)) ^
- " terms produced a counterexample: \n\n" ^
- decompose_celist usedthy (rev (#4 statisticList)) ""
- in
- if (app = false)
- then File.write (Path.explode filename) (passed ^ "\n\n" ^ counterexps)
- else File.append (Path.explode filename) (passed ^ "\n\n" ^ counterexps)
- end;
-
-
-(*mutate sourceThm with the mutate-version given in option and check the resulting mutants.
-The output will be written to the file with name filename*)
-
-fun mutqc_file option usedthy commutatives forbidden_funs iter sourceThm insts sz qciter filename =
- let
- val mutated = mutate option (prop_of sourceThm)
- usedthy commutatives forbidden_funs iter
- in
- qc_test_file false mutated insts usedthy sz qciter filename
- end;
-
-(*exchange version of function mutqc_file*)
-
-fun mutqc_file_exc usedthy commutatives iter sourceThm insts sz qciter filename =
- mutqc_file 0 usedthy commutatives [] iter sourceThm insts sz qciter filename;
-
-
-(*sinature version of function mutqc_file*)
-fun mutqc_file_sign usedthy forbidden_funs iter sourceThm insts sz qciter filename=
- mutqc_file 1 usedthy [] forbidden_funs iter sourceThm insts sz qciter filename;
-
-(*mixed version of function mutqc_file*)
-
-fun mutqc_file_mix usedthy commutatives forbidden_funs iter sourceThm insts sz qciter filename =
- mutqc_file 2 usedthy commutatives forbidden_funs iter sourceThm insts sz qciter filename;
-
-
-
-(*apply function mutqc_file on a list of theorems. The output for each theorem
-will be stored in a seperated file whose filename must be indicated in a list*)
-
-fun mutqc_file_rec option usedthy commutatives forbFuns iter [] insts sz qciter _ = ()
- | mutqc_file_rec option usedthy commutatives forbFuns iter sourceThms insts sz qciter [] =
- raise WrongArg ("Not enough files for the output of qc_test_file_rec!")
- | mutqc_file_rec option usedthy commutatives forbFuns iter (x::xs) insts sz qciter (y::ys) =
- (mutqc_file option usedthy commutatives forbFuns iter x insts sz qciter y ;
- mutqc_file_rec option usedthy commutatives forbFuns iter xs insts sz qciter ys);
-
-
-(*exchange version of function mutqc_file_rec*)
-
-fun mutqc_file_rec_exc usedthy commutatives iter sourceThms insts sz qciter files =
- mutqc_file_rec 0 usedthy commutatives [] iter sourceThms insts sz qciter files;
-
-(*signature version of function mutqc_file_rec*)
-
-fun mutqc_file_rec_sign usedthy forbidden_funs iter sourceThms insts sz qciter files =
- mutqc_file_rec 1 usedthy [] forbidden_funs iter sourceThms insts sz qciter files;
-
-(*mixed version of function mutqc_file_rec*)
-
-fun mutqc_file_rec_mix usedthy commutatives forbidden_funs iter sourceThms insts sz qciter files =
- mutqc_file_rec 2 usedthy commutatives forbidden_funs iter sourceThms insts sz qciter files;
-
-(*create the appropriate number of spaces in order to properly print a table*)
-
-fun create_spaces entry spacenum =
- let
- val diff = spacenum - (size entry)
- in
- if (diff > 0)
- then implode (replicate diff " ")
- else ""
- end;
-
-
-(*create a statistic of the output of a quickcheck test on the passed thmlist*)
-
-fun mutqc_file_stat option usedthy commutatives forbidden_funs iter thmlist insts sz qciter filename =
- let
- fun mutthmrec [] = ()
- | mutthmrec (x::xs) =
- let
- val mutated = mutate option (prop_of x) usedthy
- commutatives forbidden_funs iter
- val (passednum,_,cenum,_) = qc_test mutated insts usedthy sz qciter
- val thmname = "\ntheorem " ^ Thm.get_name_hint x ^ ":"
- val pnumstring = string_of_int passednum
- val cenumstring = string_of_int cenum
- in
- (File.append (Path.explode filename)
- (thmname ^ (create_spaces thmname 50) ^
- pnumstring ^ (create_spaces pnumstring 20) ^
- cenumstring ^ (create_spaces cenumstring 20) ^ "\n");
- mutthmrec xs)
- end;
- in
- (File.write (Path.explode filename)
- ("\n\ntheorem name" ^ (create_spaces "theorem name" 50) ^
- "passed mutants" ^ (create_spaces "passed mutants" 20) ^
- "counter examples\n\n" );
- mutthmrec thmlist)
- end;
-
-(*signature version of function mutqc_file_stat*)
-
-fun mutqc_file_stat_sign usedthy forbidden_funs iter thmlist insts sz qciter filename =
- mutqc_file_stat 1 usedthy [] forbidden_funs iter thmlist insts sz qciter filename;
-
-(*exchange version of function mutqc_file_stat*)
-
-fun mutqc_file_stat_exc usedthy commutatives iter thmlist insts sz qciter filename =
- mutqc_file_stat 0 usedthy commutatives [] iter thmlist insts sz qciter filename;
-
-(*mixed version of function mutqc_file_stat*)
-
-fun mutqc_file_stat_mix usedthy commutatives forbidden_funs iter thmlist insts sz qciter filename =
- mutqc_file_stat 2 usedthy commutatives forbidden_funs iter thmlist insts sz qciter filename;
-
-(*mutate and quickcheck-test all the theorems contained in the passed theory.
-The output will be stored in a single file*)
-
-fun mutqc_thy option mutthy usedthy commutatives forbidden_funs iter insts sz qciter filename =
- let
- val thmlist = filter (is_executable mutthy insts o snd) (thms_of mutthy);
- fun mutthmrec [] = ()
- | mutthmrec ((name,thm)::xs) =
- let
- val mutated = mutate option (prop_of thm)
- usedthy commutatives forbidden_funs iter
- in
- (File.append (Path.explode filename)
- ("--------------------------\n\nQuickchecktest of theorem " ^ name ^ ":\n\n");
- qc_test_file true mutated insts usedthy sz qciter filename;
- mutthmrec xs)
- end;
- in
- mutthmrec thmlist
- end;
-
-(*exchange version of function mutqc_thy*)
-
-fun mutqc_thy_exc mutthy usedthy commutatives iter insts sz qciter filename =
- mutqc_thy 0 mutthy usedthy commutatives [] iter insts sz qciter filename;
-
-(*signature version of function mutqc_thy*)
-
-fun mutqc_thy_sign mutthy usedthy forbidden_funs iter insts sz qciter filename =
- mutqc_thy 1 mutthy usedthy [] forbidden_funs iter insts sz qciter filename;
-
-(*mixed version of function mutqc_thy*)
-
-fun mutqc_thy_mix mutthy usedthy commutatives forbidden_funs iter insts sz qciter filename =
- mutqc_thy 2 mutthy usedthy commutatives forbidden_funs iter insts sz qciter filename;
-
-(*create a statistic representation of the call of function mutqc_thy*)
-
-fun mutqc_thystat option p mutthy usedthy commutatives forbidden_funs iter insts sz qciter filename =
- let
- val thmlist = filter
- (fn (s, th) => not (p s th) andalso (Output.urgent_message s; is_executable mutthy insts th)) (thms_of mutthy)
- fun mutthmrec [] = ()
- | mutthmrec ((name,thm)::xs) =
- let
- val _ = Output.urgent_message ("mutthmrec: " ^ name);
- val mutated = mutate option (prop_of thm) usedthy
- commutatives forbidden_funs iter
- val (passednum,_,cenum,_) = qc_test mutated insts usedthy sz qciter
- val thmname = "\ntheorem " ^ name ^ ":"
- val pnumstring = string_of_int passednum
- val cenumstring = string_of_int cenum
- in
- (File.append (Path.explode filename)
- (thmname ^ (create_spaces thmname 50) ^
- pnumstring ^ (create_spaces pnumstring 20) ^
- cenumstring ^ (create_spaces cenumstring 20) ^ "\n");
- mutthmrec xs)
- end;
- in
- (File.write (Path.explode filename) ("Result of the quickcheck-test of theory " ^
- ":\n\ntheorem name" ^ (create_spaces "theorem name" 50) ^
- "passed mutants" ^ (create_spaces "passed mutants" 20) ^
- "counter examples\n\n" );
- mutthmrec thmlist)
- end;
-
-(*exchange version of function mutqc_thystat*)
-
-fun mutqc_thystat_exc p mutthy usedthy commutatives iter insts sz qciter filename =
- mutqc_thystat 0 p mutthy usedthy commutatives [] iter insts sz qciter filename;
-
-(*signature version of function mutqc_thystat*)
-
-fun mutqc_thystat_sign p mutthy usedthy forbidden_funs iter insts sz qciter filename =
- mutqc_thystat 1 p mutthy usedthy [] forbidden_funs iter insts sz qciter filename;
-
-(*mixed version of function mutqc_thystat*)
-
-fun mutqc_thystat_mix p mutthy usedthy commutatives forbidden_funs iter insts sz qciter filename =
- mutqc_thystat 2 p mutthy usedthy commutatives forbidden_funs iter insts sz qciter filename;
-*)
+ end;
end
--- a/src/HOL/Mutabelle/mutabelle_extra.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Mutabelle/mutabelle_extra.ML Tue Feb 14 11:16:07 2012 +0100
@@ -51,11 +51,11 @@
(* mutation options *)
-(*val max_mutants = 4
-val num_mutations = 1*)
+val max_mutants = 4
+val num_mutations = 1
(* soundness check: *)
-val max_mutants = 10
-val num_mutations = 1
+(*val max_mutants = 10
+val num_mutations = 1*)
(* quickcheck options *)
(*val quickcheck_generator = "SML"*)
@@ -121,7 +121,7 @@
(fn _ =>
let
val ctxt' = change_options (Proof_Context.init_global thy)
- val [result] = case Quickcheck.active_testers ctxt' of
+ val (result :: _) = case Quickcheck.active_testers ctxt' of
[] => error "No active testers for quickcheck"
| [tester] => tester ctxt' false [] [(t, [])]
| _ => error "Multiple active testers for quickcheck"
@@ -241,7 +241,8 @@
"nibble"]
val forbidden_consts =
- [@{const_name nibble_pair_of_char}, @{const_name "TYPE"}]
+ [@{const_name nibble_pair_of_char}, @{const_name "TYPE"},
+ @{const_name Datatype.dsum}, @{const_name Datatype.usum}]
fun is_forbidden_theorem (s, th) =
let val consts = Term.add_const_names (prop_of th) [] in
@@ -258,6 +259,7 @@
["HOL.induct_equal",
"HOL.induct_implies",
"HOL.induct_conj",
+ "HOL.induct_forall",
@{const_name undefined},
@{const_name default},
@{const_name dummy_pattern},
@@ -274,9 +276,13 @@
@{const_name "ord_fun_inst.less_fun"},
@{const_name Meson.skolem},
@{const_name ATP.fequal},
+ @{const_name ATP.fEx},
@{const_name transfer_morphism},
@{const_name enum_prod_inst.enum_all_prod},
- @{const_name enum_prod_inst.enum_ex_prod}
+ @{const_name enum_prod_inst.enum_ex_prod},
+ @{const_name Quickcheck.catch_match},
+ @{const_name Quickcheck_Exhaustive.unknown},
+ @{const_name Int.Bit0}, @{const_name Int.Bit1}
(*@{const_name "==>"}, @{const_name "=="}*)]
val forbidden_mutant_consts =
@@ -298,7 +304,8 @@
(@{const_name "GCD.gcd_class.lcm"}, @{typ "prop => prop => prop"}),
(@{const_name "Orderings.bot_class.bot"}, @{typ "bool => prop"}),
(@{const_name "Groups.one_class.one"}, @{typ "bool => prop"}),
- (@{const_name "Groups.zero_class.zero"},@{typ "bool => prop"})]
+ (@{const_name "Groups.zero_class.zero"},@{typ "bool => prop"}),
+ (@{const_name "equal_class.equal"},@{typ "bool => bool => bool"})]
fun is_forbidden_mutant t =
let
--- a/src/HOL/Nat.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Nat.thy Tue Feb 14 11:16:07 2012 +0100
@@ -1616,6 +1616,17 @@
lemma Suc_diff_Suc: "n < m \<Longrightarrow> Suc (m - Suc n) = m - n"
by simp
+(*The others are
+ i - j - k = i - (j + k),
+ k \<le> j ==> j - k + i = j + i - k,
+ k \<le> j ==> i + (j - k) = i + j - k *)
+lemmas add_diff_assoc = diff_add_assoc [symmetric]
+lemmas add_diff_assoc2 = diff_add_assoc2[symmetric]
+declare diff_diff_left [simp] add_diff_assoc [simp] add_diff_assoc2[simp]
+
+text{*At present we prove no analogue of @{text not_less_Least} or @{text
+Least_Suc}, since there appears to be no need.*}
+
text{*Lemmas for ex/Factorization*}
lemma one_less_mult: "[| Suc 0 < n; Suc 0 < m |] ==> Suc 0 < m*n"
@@ -1669,18 +1680,13 @@
lemma zero_induct: "P k ==> (!!n. P (Suc n) ==> P n) ==> P 0"
using inc_induct[of 0 k P] by blast
-(*The others are
- i - j - k = i - (j + k),
- k \<le> j ==> j - k + i = j + i - k,
- k \<le> j ==> i + (j - k) = i + j - k *)
-lemmas add_diff_assoc = diff_add_assoc [symmetric]
-lemmas add_diff_assoc2 = diff_add_assoc2[symmetric]
-declare diff_diff_left [simp] add_diff_assoc [simp] add_diff_assoc2[simp]
+text {* Further induction rule similar to @{thm inc_induct} *}
-text{*At present we prove no analogue of @{text not_less_Least} or @{text
-Least_Suc}, since there appears to be no need.*}
+lemma dec_induct[consumes 1, case_names base step]:
+ "i \<le> j \<Longrightarrow> P i \<Longrightarrow> (\<And>n. i \<le> n \<Longrightarrow> P n \<Longrightarrow> P (Suc n)) \<Longrightarrow> P j"
+ by (induct j arbitrary: i) (auto simp: le_Suc_eq)
-
+
subsection {* The divides relation on @{typ nat} *}
lemma dvd_1_left [iff]: "Suc 0 dvd k"
--- a/src/HOL/Nitpick.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Nitpick.thy Tue Feb 14 11:16:07 2012 +0100
@@ -24,10 +24,8 @@
("Tools/Nitpick/nitpick.ML")
("Tools/Nitpick/nitpick_isar.ML")
("Tools/Nitpick/nitpick_tests.ML")
- ("Tools/Nitpick/nitrox.ML")
begin
-typedecl iota (* for Nitrox *)
typedecl bisim_iterator
axiomatization unknown :: 'a
@@ -224,7 +222,6 @@
use "Tools/Nitpick/nitpick.ML"
use "Tools/Nitpick/nitpick_isar.ML"
use "Tools/Nitpick/nitpick_tests.ML"
-use "Tools/Nitpick/nitrox.ML"
setup {*
Nitpick_Isar.setup #>
@@ -240,8 +237,7 @@
fold_graph' nat_gcd nat_lcm int_gcd int_lcm Frac Abs_Frac Rep_Frac zero_frac
one_frac num denom norm_frac frac plus_frac times_frac uminus_frac
number_of_frac inverse_frac less_frac less_eq_frac of_frac
-hide_type (open) iota bisim_iterator fun_box pair_box unsigned_bit signed_bit
- word
+hide_type (open) bisim_iterator fun_box pair_box unsigned_bit signed_bit word
hide_fact (open) Ex1_unfold rtrancl_unfold rtranclp_unfold tranclp_unfold
prod_def refl'_def wf'_def card'_def setsum'_def
fold_graph'_def The_psimp Eps_psimp unit_case_unfold nat_case_unfold
--- a/src/HOL/Nominal/Examples/Standardization.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Nominal/Examples/Standardization.thy Tue Feb 14 11:16:07 2012 +0100
@@ -424,6 +424,7 @@
declare listrel_mono [mono_set]
lemma listrelp_eqvt [eqvt]:
+ fixes f :: "'a::pt_name \<Rightarrow> 'b::pt_name \<Rightarrow> bool"
assumes xy: "listrelp f (x::'a::pt_name list) y"
shows "listrelp ((pi::name prm) \<bullet> f) (pi \<bullet> x) (pi \<bullet> y)" using xy
by induct (simp_all add: listrelp.intros perm_app [symmetric])
--- a/src/HOL/Quickcheck.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Quickcheck.thy Tue Feb 14 11:16:07 2012 +0100
@@ -129,6 +129,38 @@
"beyond k 0 = 0"
by (simp add: beyond_def)
+
+definition (in term_syntax) [code_unfold]: "valterm_emptyset = Code_Evaluation.valtermify ({} :: ('a :: typerep) set)"
+definition (in term_syntax) [code_unfold]: "valtermify_insert x s = Code_Evaluation.valtermify insert {\<cdot>} (x :: ('a :: typerep * _)) {\<cdot>} s"
+
+instantiation set :: (random) random
+begin
+
+primrec random_aux_set
+where
+ "random_aux_set 0 j = collapse (Random.select_weight [(1, Pair valterm_emptyset)])"
+| "random_aux_set (Suc_code_numeral i) j = collapse (Random.select_weight [(1, Pair valterm_emptyset), (Suc_code_numeral i, random j \<circ>\<rightarrow> (%x. random_aux_set i j \<circ>\<rightarrow> (%s. Pair (valtermify_insert x s))))])"
+
+lemma [code]:
+ "random_aux_set i j = collapse (Random.select_weight [(1, Pair valterm_emptyset), (i, random j \<circ>\<rightarrow> (%x. random_aux_set (i - 1) j \<circ>\<rightarrow> (%s. Pair (valtermify_insert x s))))])"
+proof (induct i rule: code_numeral.induct)
+print_cases
+ case zero
+ show ?case by (subst select_weight_drop_zero[symmetric])
+ (simp add: filter.simps random_aux_set.simps[simplified])
+next
+ case (Suc_code_numeral i)
+ show ?case by (simp only: random_aux_set.simps(2)[of "i"] Suc_code_numeral_minus_one)
+qed
+
+definition random_set
+where
+ "random_set i = random_aux_set i i"
+
+instance ..
+
+end
+
lemma random_aux_rec:
fixes random_aux :: "code_numeral \<Rightarrow> 'a"
assumes "random_aux 0 = rhs 0"
--- a/src/HOL/Quickcheck_Exhaustive.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Quickcheck_Exhaustive.thy Tue Feb 14 11:16:07 2012 +0100
@@ -119,17 +119,13 @@
end
+definition (in term_syntax) [code_unfold]: "valtermify_pair x y = Code_Evaluation.valtermify (Pair :: 'a :: typerep => 'b :: typerep => 'a * 'b) {\<cdot>} x {\<cdot>} y"
+
instantiation prod :: (full_exhaustive, full_exhaustive) full_exhaustive
begin
definition
- "full_exhaustive f d = full_exhaustive (%(x, t1). full_exhaustive (%(y, t2). f ((x, y),
- %u. let T1 = (Typerep.typerep (TYPE('a)));
- T2 = (Typerep.typerep (TYPE('b)))
- in Code_Evaluation.App (Code_Evaluation.App (
- Code_Evaluation.Const (STR ''Product_Type.Pair'')
- (Typerep.Typerep (STR ''fun'') [T1, Typerep.Typerep (STR ''fun'') [T2, Typerep.Typerep (STR ''Product_Type.prod'') [T1, T2]]]))
- (t1 ())) (t2 ()))) d) d"
+ "full_exhaustive f d = full_exhaustive (%x. full_exhaustive (%y. f (valtermify_pair x y)) d) d"
instance ..
@@ -140,15 +136,12 @@
fun exhaustive_set
where
- "exhaustive_set f i = (if i = 0 then None else (f {} orelse exhaustive_set (%A. f A orelse Quickcheck_Exhaustive.exhaustive (%x. if x \<in> A then None else f (insert x A)) (i - 1)) (i - 1)))"
+ "exhaustive_set f i = (if i = 0 then None else (f {} orelse exhaustive_set (%A. f A orelse exhaustive (%x. if x \<in> A then None else f (insert x A)) (i - 1)) (i - 1)))"
instance ..
end
-definition (in term_syntax) "valterm_emptyset = Code_Evaluation.valtermify ({} :: ('a :: typerep) set)"
-definition (in term_syntax) "valtermify_insert x s = Code_Evaluation.valtermify insert {\<cdot>} (x :: ('a :: typerep * _)) {\<cdot>} s"
-
instantiation set :: (full_exhaustive) full_exhaustive
begin
@@ -160,8 +153,6 @@
end
-hide_const valterm_emptyset valtermify_insert
-
instantiation "fun" :: ("{equal, exhaustive}", exhaustive) exhaustive
begin
@@ -180,22 +171,19 @@
end
+definition [code_unfold]: "valtermify_absdummy = (%(v, t). (%_::'a. v, %u::unit. Code_Evaluation.Abs (STR ''x'') (Typerep.typerep TYPE('a::typerep)) (t ())))"
+
+definition (in term_syntax) [code_unfold]: "valtermify_fun_upd g a b = Code_Evaluation.valtermify (fun_upd :: ('a :: typerep => 'b :: typerep) => 'a => 'b => 'a => 'b) {\<cdot>} g {\<cdot>} a {\<cdot>} b"
+
instantiation "fun" :: ("{equal, full_exhaustive}", full_exhaustive) full_exhaustive
begin
fun full_exhaustive_fun' :: "(('a => 'b) * (unit => term) => (bool * term list) option) => code_numeral => code_numeral => (bool * term list) option"
where
- "full_exhaustive_fun' f i d = (full_exhaustive (%(b, t). f (%_. b, %_. Code_Evaluation.Abs (STR ''x'') (Typerep.typerep TYPE('a)) (t ()))) d)
+ "full_exhaustive_fun' f i d = (full_exhaustive (%v. f (valtermify_absdummy v)) d)
orelse (if i > 1 then
- full_exhaustive_fun' (%(g, gt). full_exhaustive (%(a, at). full_exhaustive (%(b, bt).
- f (g(a := b),
- (%_. let A = (Typerep.typerep (TYPE('a)));
- B = (Typerep.typerep (TYPE('b)));
- fun = (%T U. Typerep.Typerep (STR ''fun'') [T, U])
- in
- Code_Evaluation.App (Code_Evaluation.App (Code_Evaluation.App
- (Code_Evaluation.Const (STR ''Fun.fun_upd'') (fun (fun A B) (fun A (fun B (fun A B)))))
- (gt ())) (at ())) (bt ())))) d) d) (i - 1) d else None)"
+ full_exhaustive_fun' (%g. full_exhaustive (%a. full_exhaustive (%b.
+ f (valtermify_fun_upd g a b)) d) d) (i - 1) d else None)"
definition full_exhaustive_fun :: "(('a => 'b) * (unit => term) => (bool * term list) option) => code_numeral => (bool * term list) option"
where
@@ -216,6 +204,8 @@
"check_all_n_lists f n =
(if n = 0 then f ([], (%_. [])) else check_all (%(x, xt). check_all_n_lists (%(xs, xst). f ((x # xs), (%_. (xt () # xst ())))) (n - 1)))"
+definition (in term_syntax) [code_unfold]: "termify_fun_upd g a b = (Code_Evaluation.termify (fun_upd :: ('a :: typerep => 'b :: typerep) => 'a => 'b => 'a => 'b) <\<cdot>> g <\<cdot>> a <\<cdot>> b)"
+
definition mk_map_term :: " (unit \<Rightarrow> typerep) \<Rightarrow> (unit \<Rightarrow> typerep) \<Rightarrow> (unit \<Rightarrow> term list) \<Rightarrow> (unit \<Rightarrow> term list) \<Rightarrow> unit \<Rightarrow> term"
where
"mk_map_term T1 T2 domm rng =
@@ -252,6 +242,33 @@
end
+fun (in term_syntax) check_all_subsets :: "(('a :: typerep) set * (unit => term) => (bool * term list) option) => ('a * (unit => term)) list => (bool * term list) option"
+where
+ "check_all_subsets f [] = f valterm_emptyset"
+| "check_all_subsets f (x # xs) = check_all_subsets (%s. case f s of Some ts => Some ts | None => f (valtermify_insert x s)) xs"
+
+
+definition (in term_syntax) [code_unfold]: "term_emptyset = Code_Evaluation.termify ({} :: ('a :: typerep) set)"
+definition (in term_syntax) [code_unfold]: "termify_insert x s = Code_Evaluation.termify (insert :: ('a::typerep) => 'a set => 'a set) <\<cdot>> x <\<cdot>> s"
+
+definition (in term_syntax) setify :: "('a::typerep) itself => term list => term"
+where
+ "setify T ts = foldr (termify_insert T) ts (term_emptyset T)"
+
+instantiation set :: (check_all) check_all
+begin
+
+definition
+ "check_all_set f =
+ check_all_subsets f (zip (Enum.enum :: 'a list) (map (%a. %u :: unit. a) (Quickcheck_Exhaustive.enum_term_of (TYPE ('a)) ())))"
+
+definition enum_term_of_set :: "'a set itself => unit => term list"
+where
+ "enum_term_of_set _ _ = map (setify (TYPE('a))) (sublists (Quickcheck_Exhaustive.enum_term_of (TYPE('a)) ()))"
+
+instance ..
+
+end
instantiation unit :: check_all
begin
@@ -282,48 +299,31 @@
end
+definition (in term_syntax) [code_unfold]: "termify_pair x y = Code_Evaluation.termify (Pair :: 'a :: typerep => 'b :: typerep => 'a * 'b) <\<cdot>> x <\<cdot>> y"
instantiation prod :: (check_all, check_all) check_all
begin
definition
- "check_all f = check_all (%(x, t1). check_all (%(y, t2). f ((x, y),
- %u. let T1 = (Typerep.typerep (TYPE('a)));
- T2 = (Typerep.typerep (TYPE('b)))
- in Code_Evaluation.App (Code_Evaluation.App (
- Code_Evaluation.Const (STR ''Product_Type.Pair'')
- (Typerep.Typerep (STR ''fun'') [T1, Typerep.Typerep (STR ''fun'') [T2, Typerep.Typerep (STR ''Product_Type.prod'') [T1, T2]]]))
- (t1 ())) (t2 ()))))"
+ "check_all f = check_all (%x. check_all (%y. f (valtermify_pair x y)))"
definition enum_term_of_prod :: "('a * 'b) itself => unit => term list"
where
- "enum_term_of_prod = (%_ _. map (%(x, y).
- let T1 = (Typerep.typerep (TYPE('a)));
- T2 = (Typerep.typerep (TYPE('b)))
- in Code_Evaluation.App (Code_Evaluation.App (
- Code_Evaluation.Const (STR ''Product_Type.Pair'')
- (Typerep.Typerep (STR ''fun'') [T1, Typerep.Typerep (STR ''fun'') [T2, Typerep.Typerep (STR ''Product_Type.prod'') [T1, T2]]])) x) y)
- (Enum.product (enum_term_of (TYPE('a)) ()) (enum_term_of (TYPE('b)) ()))) "
+ "enum_term_of_prod = (%_ _. map (%(x, y). termify_pair TYPE('a) TYPE('b) x y)
+ (Enum.product (enum_term_of (TYPE('a)) ()) (enum_term_of (TYPE('b)) ())))"
instance ..
end
+definition (in term_syntax) [code_unfold]: "valtermify_Inl x = Code_Evaluation.valtermify (Inl :: 'a :: typerep => 'a + 'b :: typerep) {\<cdot>} x"
+definition (in term_syntax) [code_unfold]: "valtermify_Inr x = Code_Evaluation.valtermify (Inr :: 'b :: typerep => 'a ::typerep + 'b) {\<cdot>} x"
instantiation sum :: (check_all, check_all) check_all
begin
definition
- "check_all f = (case check_all (%(a, t). f (Inl a, %_.
- let T1 = (Typerep.typerep (TYPE('a)));
- T2 = (Typerep.typerep (TYPE('b)))
- in Code_Evaluation.App (Code_Evaluation.Const (STR ''Sum_Type.Inl'')
- (Typerep.Typerep (STR ''fun'') [T1, Typerep.Typerep (STR ''Sum_Type.sum'') [T1, T2]])) (t ()))) of Some x' => Some x'
- | None => check_all (%(b, t). f (Inr b, %_. let
- T1 = (Typerep.typerep (TYPE('a)));
- T2 = (Typerep.typerep (TYPE('b)))
- in Code_Evaluation.App (Code_Evaluation.Const (STR ''Sum_Type.Inr'')
- (Typerep.Typerep (STR ''fun'') [T2, Typerep.Typerep (STR ''Sum_Type.sum'') [T1, T2]])) (t ()))))"
+ "check_all f = check_all (%a. f (valtermify_Inl a)) orelse check_all (%b. f (valtermify_Inr b))"
definition enum_term_of_sum :: "('a + 'b) itself => unit => term list"
where
@@ -425,7 +425,8 @@
begin
definition
- "check_all f = (case f (Code_Evaluation.valtermify Enum.finite_2.a\<^isub>1) of Some x' \<Rightarrow> Some x' | None \<Rightarrow> f (Code_Evaluation.valtermify Enum.finite_2.a\<^isub>2))"
+ "check_all f = (f (Code_Evaluation.valtermify Enum.finite_2.a\<^isub>1)
+ orelse f (Code_Evaluation.valtermify Enum.finite_2.a\<^isub>2))"
definition enum_term_of_finite_2 :: "Enum.finite_2 itself => unit => term list"
where
@@ -439,7 +440,9 @@
begin
definition
- "check_all f = (case f (Code_Evaluation.valtermify Enum.finite_3.a\<^isub>1) of Some x' \<Rightarrow> Some x' | None \<Rightarrow> (case f (Code_Evaluation.valtermify Enum.finite_3.a\<^isub>2) of Some x' \<Rightarrow> Some x' | None \<Rightarrow> f (Code_Evaluation.valtermify Enum.finite_3.a\<^isub>3)))"
+ "check_all f = (f (Code_Evaluation.valtermify Enum.finite_3.a\<^isub>1)
+ orelse f (Code_Evaluation.valtermify Enum.finite_3.a\<^isub>2)
+ orelse f (Code_Evaluation.valtermify Enum.finite_3.a\<^isub>3))"
definition enum_term_of_finite_3 :: "Enum.finite_3 itself => unit => term list"
where
@@ -449,6 +452,23 @@
end
+instantiation Enum.finite_4 :: check_all
+begin
+
+definition
+ "check_all f = (f (Code_Evaluation.valtermify Enum.finite_4.a\<^isub>1)
+ orelse f (Code_Evaluation.valtermify Enum.finite_4.a\<^isub>2)
+ orelse f (Code_Evaluation.valtermify Enum.finite_4.a\<^isub>3)
+ orelse f (Code_Evaluation.valtermify Enum.finite_4.a\<^isub>4))"
+
+definition enum_term_of_finite_4 :: "Enum.finite_4 itself => unit => term list"
+where
+ "enum_term_of_finite_4 = (%_ _. map Code_Evaluation.term_of (Enum.enum :: Enum.finite_4 list))"
+
+instance ..
+
+end
+
subsection {* Bounded universal quantifiers *}
class bounded_forall =
@@ -573,11 +593,15 @@
exhaustive'_def
exhaustive_code_numeral'_def
+hide_const valtermify_absdummy valtermify_fun_upd valterm_emptyset valtermify_insert valtermify_pair
+ valtermify_Inl valtermify_Inr
+ termify_fun_upd term_emptyset termify_insert termify_pair setify
+
hide_const (open)
exhaustive full_exhaustive exhaustive' exhaustive_code_numeral' full_exhaustive_code_numeral'
throw_Counterexample catch_Counterexample
check_all enum_term_of
- orelse unknown mk_map_term check_all_n_lists
+ orelse unknown mk_map_term check_all_n_lists check_all_subsets
hide_type (open) cps pos_bound_cps neg_bound_cps unknown three_valued
hide_const (open) cps_empty cps_single cps_bind cps_plus cps_if cps_not
--- a/src/HOL/Quickcheck_Narrowing.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Quickcheck_Narrowing.thy Tue Feb 14 11:16:07 2012 +0100
@@ -367,6 +367,16 @@
z = (conv :: _ => _ => unit) in f)"
unfolding Let_def ensure_testable_def ..
+subsection {* Narrowing for sets *}
+
+instantiation set :: (narrowing) narrowing
+begin
+
+definition "narrowing_set = Quickcheck_Narrowing.apply (Quickcheck_Narrowing.cons set) narrowing"
+
+instance ..
+
+end
subsection {* Narrowing for integers *}
--- a/src/HOL/Quotient_Examples/FSet.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Quotient_Examples/FSet.thy Tue Feb 14 11:16:07 2012 +0100
@@ -125,7 +125,7 @@
proof (intro conjI allI)
fix a r s
show "abs_fset (map Abs (map Rep (rep_fset a))) = a"
- by (simp add: abs_o_rep[OF q] Quotient_abs_rep[OF Quotient_fset] map_id)
+ by (simp add: abs_o_rep[OF q] Quotient_abs_rep[OF Quotient_fset] map.id)
have b: "list_all2 R (map Rep (rep_fset a)) (map Rep (rep_fset a))"
by (rule list_all2_refl'[OF e])
have c: "(op \<approx> OO list_all2 R) (map Rep (rep_fset a)) (map Rep (rep_fset a))"
@@ -376,7 +376,7 @@
fix x y z :: "'a fset"
show "x |\<subset>| y \<longleftrightarrow> x |\<subseteq>| y \<and> \<not> y |\<subseteq>| x"
by (unfold less_fset_def, descending) auto
- show "x |\<subseteq>| x" by (descending) (simp)
+ show "x |\<subseteq>| x" by (descending) (simp)
show "{||} |\<subseteq>| x" by (descending) (simp)
show "x |\<subseteq>| x |\<union>| y" by (descending) (simp)
show "y |\<subseteq>| x |\<union>| y" by (descending) (simp)
@@ -484,20 +484,23 @@
apply auto
done
-lemma map_prs [quot_preserve]:
- shows "(abs_fset \<circ> map f) [] = abs_fset []"
+lemma Nil_prs2 [quot_preserve]:
+ assumes "Quotient R Abs Rep"
+ shows "(Abs \<circ> map f) [] = Abs []"
by simp
-lemma insert_fset_rsp [quot_preserve]:
- "(rep_fset ---> (map rep_fset \<circ> rep_fset) ---> (abs_fset \<circ> map abs_fset)) Cons = insert_fset"
- by (simp add: fun_eq_iff Quotient_abs_rep[OF Quotient_fset]
- abs_o_rep[OF Quotient_fset] map_id insert_fset_def)
+lemma Cons_prs2 [quot_preserve]:
+ assumes q: "Quotient R1 Abs1 Rep1"
+ and r: "Quotient R2 Abs2 Rep2"
+ shows "(Rep1 ---> (map Rep1 \<circ> Rep2) ---> (Abs2 \<circ> map Abs1)) (op #) = (id ---> Rep2 ---> Abs2) (op #)"
+ by (auto simp add: fun_eq_iff comp_def Quotient_abs_rep [OF q])
-lemma union_fset_rsp [quot_preserve]:
- "((map rep_fset \<circ> rep_fset) ---> (map rep_fset \<circ> rep_fset) ---> (abs_fset \<circ> map abs_fset))
- append = union_fset"
- by (simp add: fun_eq_iff Quotient_abs_rep[OF Quotient_fset]
- abs_o_rep[OF Quotient_fset] map_id sup_fset_def)
+lemma append_prs2 [quot_preserve]:
+ assumes q: "Quotient R1 Abs1 Rep1"
+ and r: "Quotient R2 Abs2 Rep2"
+ shows "((map Rep1 \<circ> Rep2) ---> (map Rep1 \<circ> Rep2) ---> (Abs2 \<circ> map Abs1)) op @ =
+ (Rep2 ---> Rep2 ---> Abs2) op @"
+ by (simp add: fun_eq_iff abs_o_rep[OF q] map.id)
lemma list_all2_app_l:
assumes a: "reflp R"
@@ -527,26 +530,43 @@
shows "list_all2 op \<approx> (x @ z) (x' @ z')"
using assms by (rule list_all2_appendI)
+lemma compositional_rsp3:
+ assumes "(R1 ===> R2 ===> R3) C C" and "(R4 ===> R5 ===> R6) C C"
+ shows "(R1 OOO R4 ===> R2 OOO R5 ===> R3 OOO R6) C C"
+ by (auto intro!: fun_relI)
+ (metis (full_types) assms fun_relE pred_comp.intros)
+
lemma append_rsp2 [quot_respect]:
"(list_all2 op \<approx> OOO op \<approx> ===> list_all2 op \<approx> OOO op \<approx> ===> list_all2 op \<approx> OOO op \<approx>) append append"
-proof (intro fun_relI, elim pred_compE)
- fix x y z w x' z' y' w' :: "'a list list"
- assume a:"list_all2 op \<approx> x x'"
- and b: "x' \<approx> y'"
- and c: "list_all2 op \<approx> y' y"
- assume aa: "list_all2 op \<approx> z z'"
- and bb: "z' \<approx> w'"
- and cc: "list_all2 op \<approx> w' w"
- have a': "list_all2 op \<approx> (x @ z) (x' @ z')" using a aa append_rsp2_pre by auto
- have b': "x' @ z' \<approx> y' @ w'" using b bb by simp
- have c': "list_all2 op \<approx> (y' @ w') (y @ w)" using c cc append_rsp2_pre by auto
- have d': "(op \<approx> OO list_all2 op \<approx>) (x' @ z') (y @ w)"
- by (rule pred_compI) (rule b', rule c')
- show "(list_all2 op \<approx> OOO op \<approx>) (x @ z) (y @ w)"
- by (rule pred_compI) (rule a', rule d')
+ by (intro compositional_rsp3 append_rsp)
+ (auto intro!: fun_relI simp add: append_rsp2_pre)
+
+lemma map_rsp2 [quot_respect]:
+ "((op \<approx> ===> op \<approx>) ===> list_all2 op \<approx> OOO op \<approx> ===> list_all2 op \<approx> OOO op \<approx>) map map"
+proof (auto intro!: fun_relI)
+ fix f f' :: "'a list \<Rightarrow> 'b list"
+ fix xa ya x y :: "'a list list"
+ assume fs: "(op \<approx> ===> op \<approx>) f f'" and x: "list_all2 op \<approx> xa x" and xy: "set x = set y" and y: "list_all2 op \<approx> y ya"
+ have a: "(list_all2 op \<approx>) (map f xa) (map f x)"
+ using x
+ by (induct xa x rule: list_induct2')
+ (simp_all, metis fs fun_relE list_eq_def)
+ have b: "set (map f x) = set (map f y)"
+ using xy fs
+ by (induct x y rule: list_induct2')
+ (simp_all, metis image_insert)
+ have c: "(list_all2 op \<approx>) (map f y) (map f' ya)"
+ using y fs
+ by (induct y ya rule: list_induct2')
+ (simp_all, metis apply_rsp' list_eq_def)
+ show "(list_all2 op \<approx> OOO op \<approx>) (map f xa) (map f' ya)"
+ by (metis a b c list_eq_def pred_comp.intros)
qed
-
+lemma map_prs2 [quot_preserve]:
+ shows "((abs_fset ---> rep_fset) ---> (map rep_fset \<circ> rep_fset) ---> abs_fset \<circ> map abs_fset) map = (id ---> rep_fset ---> abs_fset) map"
+ by (auto simp add: fun_eq_iff)
+ (simp only: map_map[symmetric] map_prs_aux[OF Quotient_fset Quotient_fset])
section {* Lifted theorems *}
@@ -907,16 +927,19 @@
lemma concat_empty_fset [simp]:
shows "concat_fset {||} = {||}"
- by (lifting concat.simps(1))
+ by descending simp
lemma concat_insert_fset [simp]:
shows "concat_fset (insert_fset x S) = x |\<union>| concat_fset S"
- by (lifting concat.simps(2))
+ by descending simp
-lemma concat_inter_fset [simp]:
+lemma concat_union_fset [simp]:
shows "concat_fset (xs |\<union>| ys) = concat_fset xs |\<union>| concat_fset ys"
- by (lifting concat_append)
+ by descending simp
+lemma map_concat_fset:
+ shows "map_fset f (concat_fset xs) = concat_fset (map_fset (map_fset f) xs)"
+ by (lifting map_concat)
subsection {* filter_fset *}
--- a/src/HOL/Quotient_Examples/List_Quotient_Cset.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Quotient_Examples/List_Quotient_Cset.thy Tue Feb 14 11:16:07 2012 +0100
@@ -33,9 +33,9 @@
by descending (simp add: in_set_member)
definition (in term_syntax)
- setify :: "'a\<Colon>typerep list \<times> (unit \<Rightarrow> Code_Evaluation.term)
+ csetify :: "'a\<Colon>typerep list \<times> (unit \<Rightarrow> Code_Evaluation.term)
\<Rightarrow> 'a Quotient_Cset.set \<times> (unit \<Rightarrow> Code_Evaluation.term)" where
- [code_unfold]: "setify xs = Code_Evaluation.valtermify Quotient_Cset.set {\<cdot>} xs"
+ [code_unfold]: "csetify xs = Code_Evaluation.valtermify Quotient_Cset.set {\<cdot>} xs"
notation fcomp (infixl "\<circ>>" 60)
notation scomp (infixl "\<circ>\<rightarrow>" 60)
@@ -44,7 +44,7 @@
begin
definition
- "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (setify xs))"
+ "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (csetify xs))"
instance ..
--- a/src/HOL/Random.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Random.thy Tue Feb 14 11:16:07 2012 +0100
@@ -114,7 +114,7 @@
"select_weight ((0, x) # xs) = select_weight xs"
by (simp add: select_weight_def)
-lemma select_weigth_drop_zero:
+lemma select_weight_drop_zero:
"select_weight (filter (\<lambda>(k, _). k > 0) xs) = select_weight xs"
proof -
have "listsum (map fst [(k, _)\<leftarrow>xs . 0 < k]) = listsum (map fst xs)"
@@ -122,7 +122,7 @@
then show ?thesis by (simp only: select_weight_def pick_drop_zero)
qed
-lemma select_weigth_select:
+lemma select_weight_select:
assumes "xs \<noteq> []"
shows "select_weight (map (Pair 1) xs) = select xs"
proof -
--- a/src/HOL/Relation.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Relation.thy Tue Feb 14 11:16:07 2012 +0100
@@ -11,6 +11,8 @@
subsection {* Definitions *}
+type_synonym 'a rel = "('a * 'a) set"
+
definition
converse :: "('a * 'b) set => ('b * 'a) set"
("(_^-1)" [1000] 999) where
--- a/src/HOL/TPTP/ATP_Export.thy Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,57 +0,0 @@
-theory ATP_Export
-imports Complex_Main
-uses "atp_export.ML"
-begin
-
-ML {*
-open ATP_Problem;
-open ATP_Export;
-*}
-
-ML {*
-val do_it = false; (* switch to "true" to generate the files *)
-val thy = @{theory Complex_Main};
-val ctxt = @{context}
-*}
-
-ML {*
-if do_it then
- "/tmp/axs_mono_simple.dfg"
- |> generate_tptp_inference_file_for_theory ctxt thy (DFG DFG_Sorted)
- "mono_simple"
-else
- ()
-*}
-
-ML {*
-if do_it then
- "/tmp/infs_poly_guards.tptp"
- |> generate_tptp_inference_file_for_theory ctxt thy FOF "poly_guards"
-else
- ()
-*}
-
-ML {*
-if do_it then
- "/tmp/infs_poly_tags.tptp"
- |> generate_tptp_inference_file_for_theory ctxt thy FOF "poly_tags"
-else
- ()
-*}
-
-ML {*
-if do_it then
- "/tmp/infs_poly_tags_uniform.tptp"
- |> generate_tptp_inference_file_for_theory ctxt thy FOF "poly_tags_uniform"
-else
- ()
-*}
-
-ML {*
-if do_it then
- "/tmp/graph.out" |> generate_tptp_graph_file_for_theory ctxt thy
-else
- ()
-*}
-
-end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/ATP_Problem_Import.thy Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,16 @@
+(* Title: HOL/TPTP/ATP_Problem_Import.thy
+ Author: Jasmin Blanchette, TU Muenchen
+*)
+
+header {* ATP Problem Importer *}
+
+theory ATP_Problem_Import
+imports Complex_Main
+uses ("atp_problem_import.ML")
+begin
+
+typedecl iota (* for TPTP *)
+
+use "atp_problem_import.ML"
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/ATP_Theory_Export.thy Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,63 @@
+(* Title: HOL/TPTP/ATP_Theory_Export.thy
+ Author: Jasmin Blanchette, TU Muenchen
+*)
+
+header {* ATP Theory Exporter *}
+
+theory ATP_Theory_Export
+imports Complex_Main
+uses "atp_theory_export.ML"
+begin
+
+ML {*
+open ATP_Problem;
+open ATP_Theory_Export;
+*}
+
+ML {*
+val do_it = false; (* switch to "true" to generate the files *)
+val thy = @{theory Complex_Main};
+val ctxt = @{context}
+*}
+
+ML {*
+if do_it then
+ "/tmp/axs_mono_native.dfg"
+ |> generate_tptp_inference_file_for_theory ctxt thy (DFG DFG_Sorted)
+ "mono_native"
+else
+ ()
+*}
+
+ML {*
+if do_it then
+ "/tmp/infs_poly_guards.tptp"
+ |> generate_tptp_inference_file_for_theory ctxt thy FOF "poly_guards"
+else
+ ()
+*}
+
+ML {*
+if do_it then
+ "/tmp/infs_poly_tags.tptp"
+ |> generate_tptp_inference_file_for_theory ctxt thy FOF "poly_tags"
+else
+ ()
+*}
+
+ML {*
+if do_it then
+ "/tmp/infs_poly_tags_uniform.tptp"
+ |> generate_tptp_inference_file_for_theory ctxt thy FOF "poly_tags_uniform"
+else
+ ()
+*}
+
+ML {*
+if do_it then
+ "/tmp/graph.out" |> generate_tptp_graph_file_for_theory ctxt thy
+else
+ ()
+*}
+
+end
--- a/src/HOL/TPTP/CASC_Setup.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/TPTP/CASC_Setup.thy Tue Feb 14 11:16:07 2012 +0100
@@ -129,7 +129,7 @@
Sledgehammer_Filter.no_relevance_override))
ORELSE
SOLVE_TIMEOUT (max_secs div 10) "metis"
- (ALLGOALS (Metis_Tactic.metis_tac [] ATP_Translate.lam_liftingN ctxt []))
+ (ALLGOALS (Metis_Tactic.metis_tac [] ATP_Problem_Generate.liftingN ctxt []))
ORELSE
SOLVE_TIMEOUT (max_secs div 10) "fast" (ALLGOALS (fast_tac ctxt))
ORELSE
--- a/src/HOL/TPTP/ROOT.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/TPTP/ROOT.ML Tue Feb 14 11:16:07 2012 +0100
@@ -7,8 +7,11 @@
*)
use_thys [
- "ATP_Export"
+ "ATP_Theory_Export"
];
Unsynchronized.setmp Proofterm.proofs (!Proofterm.proofs)
- use_thy "CASC_Setup";
+ use_thys [
+ "ATP_Problem_Import",
+ "CASC_Setup"
+ ];
--- a/src/HOL/TPTP/atp_export.ML Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,219 +0,0 @@
-(* Title: HOL/TPTP/atp_export.ML
- Author: Jasmin Blanchette, TU Muenchen
- Copyright 2011
-
-Export Isabelle theories as first-order TPTP inferences, exploiting
-Sledgehammer's translation.
-*)
-
-signature ATP_EXPORT =
-sig
- type atp_format = ATP_Problem.atp_format
-
- val theorems_mentioned_in_proof_term :
- string list option -> thm -> string list
- val generate_tptp_graph_file_for_theory :
- Proof.context -> theory -> string -> unit
- val generate_tptp_inference_file_for_theory :
- Proof.context -> theory -> atp_format -> string -> string -> unit
-end;
-
-structure ATP_Export : ATP_EXPORT =
-struct
-
-open ATP_Problem
-open ATP_Translate
-open ATP_Proof
-open ATP_Systems
-
-val fact_name_of = prefix fact_prefix o ascii_of
-
-fun facts_of thy =
- Sledgehammer_Filter.all_facts (Proof_Context.init_global thy) false
- Symtab.empty true [] []
- |> filter (curry (op =) @{typ bool} o fastype_of
- o Object_Logic.atomize_term thy o prop_of o snd)
-
-(* FIXME: Similar yet different code in "mirabelle.ML". The code here has a few
- fixes that seem to be missing over there; or maybe the two code portions are
- not doing the same? *)
-fun fold_body_thms thm_name all_names f =
- let
- fun app n (PBody {thms, ...}) =
- thms |> fold (fn (_, (name, prop, body)) => fn x =>
- let
- val body' = Future.join body
- val n' =
- n + (if name = "" orelse
- (is_some all_names andalso
- not (member (op =) (the all_names) name)) orelse
- (* uncommon case where the proved theorem occurs twice
- (e.g., "Transitive_Closure.trancl_into_trancl") *)
- n = 1 andalso name = thm_name then
- 0
- else
- 1)
- val x' = x |> n' <= 1 ? app n' body'
- in (x' |> n = 1 ? f (name, prop, body')) end)
- in fold (app 0) end
-
-fun theorems_mentioned_in_proof_term all_names thm =
- let
- fun collect (s, _, _) = if s <> "" then insert (op =) s else I
- val names =
- [] |> fold_body_thms (Thm.get_name_hint thm) all_names collect
- [Thm.proof_body_of thm]
- |> map fact_name_of
- in names end
-
-fun interesting_const_names ctxt =
- let val thy = Proof_Context.theory_of ctxt in
- Sledgehammer_Filter.const_names_in_fact thy
- (Sledgehammer_Provers.is_built_in_const_for_prover ctxt eN)
- end
-
-fun generate_tptp_graph_file_for_theory ctxt thy file_name =
- let
- val path = file_name |> Path.explode
- val _ = File.write path ""
- val axioms = Theory.all_axioms_of thy |> map fst
- fun do_thm thm =
- let
- val name = Thm.get_name_hint thm
- val s =
- "[" ^ Thm.legacy_get_kind thm ^ "] " ^
- (if member (op =) axioms name then "A" else "T") ^ " " ^
- prefix fact_prefix (ascii_of name) ^ ": " ^
- commas (theorems_mentioned_in_proof_term NONE thm) ^ "; " ^
- commas (map (prefix const_prefix o ascii_of)
- (interesting_const_names ctxt (Thm.prop_of thm))) ^ " \n"
- in File.append path s end
- val thms = facts_of thy |> map snd
- val _ = map do_thm thms
- in () end
-
-fun inference_term [] = NONE
- | inference_term ss =
- ATerm ("inference",
- [ATerm ("isabelle", []),
- ATerm (tptp_empty_list, []),
- ATerm (tptp_empty_list, map (fn s => ATerm (s, [])) ss)])
- |> SOME
-fun inference infers ident =
- these (AList.lookup (op =) infers ident) |> inference_term
-fun add_inferences_to_problem_line infers
- (Formula (ident, Axiom, phi, NONE, NONE)) =
- Formula (ident, Lemma, phi, inference infers ident, NONE)
- | add_inferences_to_problem_line _ line = line
-fun add_inferences_to_problem infers =
- map (apsnd (map (add_inferences_to_problem_line infers)))
-
-fun ident_of_problem_line (Decl (ident, _, _)) = ident
- | ident_of_problem_line (Formula (ident, _, _, _, _)) = ident
-
-fun run_some_atp ctxt format problem =
- let
- val thy = Proof_Context.theory_of ctxt
- val prob_file = File.tmp_path (Path.explode "prob.tptp")
- val {exec, arguments, proof_delims, known_failures, ...} =
- get_atp thy (case format of DFG _ => spassN | _ => eN)
- val _ = problem |> lines_for_atp_problem format |> File.write_list prob_file
- val command =
- File.shell_path (Path.explode (getenv (fst exec) ^ "/" ^ snd exec)) ^
- " " ^ arguments ctxt false "" (seconds 1.0) (K []) ^ " " ^
- File.shell_path prob_file
- in
- TimeLimit.timeLimit (seconds 0.3) Isabelle_System.bash_output command
- |> fst
- |> extract_tstplike_proof_and_outcome false true proof_delims
- known_failures
- |> snd
- end
- handle TimeLimit.TimeOut => SOME TimedOut
-
-val likely_tautology_prefixes =
- [@{theory HOL}, @{theory Meson}, @{theory ATP}, @{theory Metis}]
- |> map (fact_name_of o Context.theory_name)
-
-fun is_problem_line_tautology ctxt format (Formula (ident, _, phi, _, _)) =
- exists (fn prefix => String.isPrefix prefix ident)
- likely_tautology_prefixes andalso
- is_none (run_some_atp ctxt format
- [(factsN, [Formula (ident, Conjecture, phi, NONE, NONE)])])
- | is_problem_line_tautology _ _ _ = false
-
-structure String_Graph = Graph(type key = string val ord = string_ord);
-
-fun order_facts ord = sort (ord o pairself ident_of_problem_line)
-fun order_problem_facts _ [] = []
- | order_problem_facts ord ((heading, lines) :: problem) =
- if heading = factsN then (heading, order_facts ord lines) :: problem
- else (heading, lines) :: order_problem_facts ord problem
-
-(* A fairly random selection of types used for monomorphizing. *)
-val ground_types =
- [@{typ nat}, HOLogic.intT, HOLogic.realT, @{typ "nat => bool"}, @{typ bool},
- @{typ unit}]
-
-fun ground_type_for_tvar _ [] tvar =
- raise TYPE ("ground_type_for_sorts", [TVar tvar], [])
- | ground_type_for_tvar thy (T :: Ts) tvar =
- if can (Sign.typ_match thy (TVar tvar, T)) Vartab.empty then T
- else ground_type_for_tvar thy Ts tvar
-
-fun monomorphize_term ctxt t =
- let val thy = Proof_Context.theory_of ctxt in
- t |> map_types (map_type_tvar (ground_type_for_tvar thy ground_types))
- handle TYPE _ => @{prop True}
- end
-
-fun generate_tptp_inference_file_for_theory ctxt thy format type_enc file_name =
- let
- val type_enc = type_enc |> type_enc_from_string Sound
- |> adjust_type_enc format
- val mono = polymorphism_of_type_enc type_enc <> Polymorphic
- val path = file_name |> Path.explode
- val _ = File.write path ""
- val facts = facts_of thy
- val atp_problem =
- facts
- |> map (fn ((_, loc), th) =>
- ((Thm.get_name_hint th, loc),
- th |> prop_of |> mono ? monomorphize_term ctxt))
- |> prepare_atp_problem ctxt format Axiom Axiom type_enc true combinatorsN
- false true [] @{prop False}
- |> #1
- val atp_problem =
- atp_problem
- |> map (apsnd (filter_out (is_problem_line_tautology ctxt format)))
- val all_names = facts |> map (Thm.get_name_hint o snd)
- val infers =
- facts |> map (fn (_, th) =>
- (fact_name_of (Thm.get_name_hint th),
- theorems_mentioned_in_proof_term (SOME all_names) th))
- val all_atp_problem_names =
- atp_problem |> maps (map ident_of_problem_line o snd)
- val infers =
- infers |> filter (member (op =) all_atp_problem_names o fst)
- |> map (apsnd (filter (member (op =) all_atp_problem_names)))
- val ordered_names =
- String_Graph.empty
- |> fold (String_Graph.new_node o rpair ()) all_atp_problem_names
- |> fold (fn (to, froms) =>
- fold (fn from => String_Graph.add_edge (from, to)) froms)
- infers
- |> String_Graph.topological_order
- val order_tab =
- Symtab.empty
- |> fold (Symtab.insert (op =))
- (ordered_names ~~ (1 upto length ordered_names))
- val name_ord = int_ord o pairself (the o Symtab.lookup order_tab)
- val atp_problem =
- atp_problem
- |> (case format of DFG _ => I | _ => add_inferences_to_problem infers)
- |> order_problem_facts name_ord
- val ss = lines_for_atp_problem format atp_problem
- val _ = app (File.append path) ss
- in () end
-
-end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/atp_problem_import.ML Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,183 @@
+(* Title: HOL/TPTP/atp_problem_import.ML
+ Author: Jasmin Blanchette, TU Muenchen
+ Copyright 2012
+
+Import TPTP problems as Isabelle terms or goals.
+*)
+
+signature ATP_PROBLEM_IMPORT =
+sig
+ val isabelle_tptp_file : string -> unit
+ val nitpick_tptp_file : string -> unit
+ val refute_tptp_file : string -> unit
+ val sledgehammer_tptp_file : string -> unit
+ val translate_tptp_file : string -> string -> string -> unit
+end;
+
+structure ATP_Problem_Import : ATP_PROBLEM_IMPORT =
+struct
+
+open ATP_Util
+open ATP_Problem
+open ATP_Proof
+
+
+(** General TPTP parsing **)
+
+exception SYNTAX of string
+
+val tptp_explode = raw_explode o strip_spaces_except_between_idents
+
+fun parse_file_path (c :: ss) =
+ if c = "'" orelse c = "\"" then
+ ss |> chop_while (curry (op <>) c) |>> implode ||> tl
+ else
+ raise SYNTAX "invalid file path"
+ | parse_file_path [] = raise SYNTAX "invalid file path"
+
+fun parse_include x =
+ let
+ val (file_name, rest) =
+ (Scan.this_string "include" |-- $$ "(" |-- parse_file_path --| $$ ")"
+ --| $$ ".") x
+ val path = file_name |> Path.explode
+ val path =
+ path |> not (Path.is_absolute path) ? Path.append (Path.explode "$TPTP")
+ in ((), (path |> File.read |> tptp_explode) @ rest) end
+
+val parse_cnf_or_fof =
+ (Scan.this_string "cnf" || Scan.this_string "fof") |-- $$ "(" |--
+ Scan.many (not_equal ",") |-- $$ "," |--
+ (Scan.this_string "axiom" || Scan.this_string "definition"
+ || Scan.this_string "theorem" || Scan.this_string "lemma"
+ || Scan.this_string "hypothesis" || Scan.this_string "conjecture"
+ || Scan.this_string "negated_conjecture") --| $$ "," -- parse_formula
+ --| $$ ")" --| $$ "."
+ >> (fn ("conjecture", phi) => AConn (ANot, [phi]) | (_, phi) => phi)
+
+val parse_problem =
+ Scan.repeat parse_include
+ |-- Scan.repeat (parse_cnf_or_fof --| Scan.repeat parse_include)
+
+val parse_tptp_problem =
+ Scan.finite Symbol.stopper
+ (Scan.error (!! (fn _ => raise SYNTAX "malformed TPTP input")
+ parse_problem))
+ o tptp_explode
+
+val iota_T = @{typ iota}
+val quant_T = @{typ "(iota => bool) => bool"}
+
+fun is_variable s = Char.isUpper (String.sub (s, 0))
+
+fun hol_term_from_fo_term res_T (ATerm (x, us)) =
+ let val ts = map (hol_term_from_fo_term iota_T) us in
+ list_comb ((case x of
+ "$true" => @{const_name True}
+ | "$false" => @{const_name False}
+ | "=" => @{const_name HOL.eq}
+ | "equal" => @{const_name HOL.eq}
+ | _ => x, map fastype_of ts ---> res_T)
+ |> (if is_variable x then Free else Const), ts)
+ end
+
+fun hol_prop_from_formula phi =
+ case phi of
+ AQuant (_, [], phi') => hol_prop_from_formula phi'
+ | AQuant (q, (x, _) :: xs, phi') =>
+ Const (case q of AForall => @{const_name All} | AExists => @{const_name Ex},
+ quant_T)
+ $ lambda (Free (x, iota_T)) (hol_prop_from_formula (AQuant (q, xs, phi')))
+ | AConn (ANot, [u']) => HOLogic.mk_not (hol_prop_from_formula u')
+ | AConn (c, [u1, u2]) =>
+ pairself hol_prop_from_formula (u1, u2)
+ |> (case c of
+ AAnd => HOLogic.mk_conj
+ | AOr => HOLogic.mk_disj
+ | AImplies => HOLogic.mk_imp
+ | AIff => HOLogic.mk_eq
+ | ANot => raise Fail "binary \"ANot\"")
+ | AConn _ => raise Fail "malformed AConn"
+ | AAtom u => hol_term_from_fo_term @{typ bool} u
+
+fun mk_all x t = Const (@{const_name All}, quant_T) $ lambda x t
+
+fun close_hol_prop t = fold (mk_all o Free) (Term.add_frees t []) t
+
+fun read_tptp_file file_name =
+ case parse_tptp_problem (File.read (Path.explode file_name)) of
+ (_, s :: ss) => raise SYNTAX ("cannot parse " ^ quote (implode (s :: ss)))
+ | (phis, []) =>
+ map (HOLogic.mk_Trueprop o close_hol_prop o hol_prop_from_formula) phis
+
+fun print_szs_from_outcome s =
+ "% SZS status " ^
+ (if s = Nitpick.genuineN then
+ "CounterSatisfiable"
+ else
+ "Unknown")
+ |> writeln
+
+(** Isabelle (combination of provers) **)
+
+fun isabelle_tptp_file file_name = ()
+
+
+(** Nitpick (alias Nitrox) **)
+
+fun nitpick_tptp_file file_name =
+ let
+ val ts = read_tptp_file file_name
+ val state = Proof.init @{context}
+ val params =
+ [("card iota", "1\<emdash>100"),
+ ("card", "1\<emdash>8"),
+ ("box", "false"),
+ ("sat_solver", "smart"),
+ ("max_threads", "1"),
+ ("batch_size", "10"),
+ (* ("debug", "true"), *)
+ ("verbose", "true"),
+ (* ("overlord", "true"), *)
+ ("show_consts", "true"),
+ ("format", "1000"),
+ ("max_potential", "0"),
+ ("timeout", "none"),
+ ("expect", Nitpick.genuineN)]
+ |> Nitpick_Isar.default_params @{theory}
+ val i = 1
+ val n = 1
+ val step = 0
+ val subst = []
+ in
+ Nitpick.pick_nits_in_term state params Nitpick.Normal i n step subst ts
+ @{prop False}
+ |> fst |> print_szs_from_outcome
+ end
+
+
+(** Refute **)
+
+fun refute_tptp_file file_name =
+ let
+ val ts = read_tptp_file file_name
+ val params =
+ [("maxtime", "10000"),
+ ("assms", "true"),
+ ("expect", Nitpick.genuineN)]
+ in
+ Refute.refute_term @{context} params ts @{prop False}
+ |> print_szs_from_outcome
+ end
+
+
+(** Sledgehammer **)
+
+fun sledgehammer_tptp_file file_name = ()
+
+
+(** Translator between TPTP(-like) file formats **)
+
+fun translate_tptp_file format in_file_name out_file_name = ()
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/atp_theory_export.ML Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,220 @@
+(* Title: HOL/TPTP/atp_theory_export.ML
+ Author: Jasmin Blanchette, TU Muenchen
+ Copyright 2011
+
+Export Isabelle theories as first-order TPTP inferences, exploiting
+Sledgehammer's translation.
+*)
+
+signature ATP_THEORY_EXPORT =
+sig
+ type atp_format = ATP_Problem.atp_format
+
+ val theorems_mentioned_in_proof_term :
+ string list option -> thm -> string list
+ val generate_tptp_graph_file_for_theory :
+ Proof.context -> theory -> string -> unit
+ val generate_tptp_inference_file_for_theory :
+ Proof.context -> theory -> atp_format -> string -> string -> unit
+end;
+
+structure ATP_Theory_Export : ATP_THEORY_EXPORT =
+struct
+
+open ATP_Problem
+open ATP_Proof
+open ATP_Problem_Generate
+open ATP_Systems
+
+val fact_name_of = prefix fact_prefix o ascii_of
+
+fun facts_of thy =
+ Sledgehammer_Filter.all_facts (Proof_Context.init_global thy) false
+ Symtab.empty true [] []
+ |> filter (curry (op =) @{typ bool} o fastype_of
+ o Object_Logic.atomize_term thy o prop_of o snd)
+
+(* FIXME: Similar yet different code in "mirabelle.ML". The code here has a few
+ fixes that seem to be missing over there; or maybe the two code portions are
+ not doing the same? *)
+fun fold_body_thms thm_name all_names f =
+ let
+ fun app n (PBody {thms, ...}) =
+ thms |> fold (fn (_, (name, prop, body)) => fn x =>
+ let
+ val body' = Future.join body
+ val n' =
+ n + (if name = "" orelse
+ (is_some all_names andalso
+ not (member (op =) (the all_names) name)) orelse
+ (* uncommon case where the proved theorem occurs twice
+ (e.g., "Transitive_Closure.trancl_into_trancl") *)
+ n = 1 andalso name = thm_name then
+ 0
+ else
+ 1)
+ val x' = x |> n' <= 1 ? app n' body'
+ in (x' |> n = 1 ? f (name, prop, body')) end)
+ in fold (app 0) end
+
+fun theorems_mentioned_in_proof_term all_names thm =
+ let
+ fun collect (s, _, _) = if s <> "" then insert (op =) s else I
+ val names =
+ [] |> fold_body_thms (Thm.get_name_hint thm) all_names collect
+ [Thm.proof_body_of thm]
+ |> map fact_name_of
+ in names end
+
+fun interesting_const_names ctxt =
+ let val thy = Proof_Context.theory_of ctxt in
+ Sledgehammer_Filter.const_names_in_fact thy
+ (Sledgehammer_Provers.is_built_in_const_for_prover ctxt eN)
+ end
+
+fun generate_tptp_graph_file_for_theory ctxt thy file_name =
+ let
+ val path = file_name |> Path.explode
+ val _ = File.write path ""
+ val axioms = Theory.all_axioms_of thy |> map fst
+ fun do_thm thm =
+ let
+ val name = Thm.get_name_hint thm
+ val s =
+ "[" ^ Thm.legacy_get_kind thm ^ "] " ^
+ (if member (op =) axioms name then "A" else "T") ^ " " ^
+ prefix fact_prefix (ascii_of name) ^ ": " ^
+ commas (theorems_mentioned_in_proof_term NONE thm) ^ "; " ^
+ commas (map (prefix const_prefix o ascii_of)
+ (interesting_const_names ctxt (Thm.prop_of thm))) ^ " \n"
+ in File.append path s end
+ val thms = facts_of thy |> map snd
+ val _ = map do_thm thms
+ in () end
+
+fun inference_term [] = NONE
+ | inference_term ss =
+ ATerm ("inference",
+ [ATerm ("isabelle", []),
+ ATerm (tptp_empty_list, []),
+ ATerm (tptp_empty_list, map (fn s => ATerm (s, [])) ss)])
+ |> SOME
+fun inference infers ident =
+ these (AList.lookup (op =) infers ident) |> inference_term
+fun add_inferences_to_problem_line infers
+ (Formula (ident, Axiom, phi, NONE, tms)) =
+ Formula (ident, Lemma, phi, inference infers ident, tms)
+ | add_inferences_to_problem_line _ line = line
+fun add_inferences_to_problem infers =
+ map (apsnd (map (add_inferences_to_problem_line infers)))
+
+fun ident_of_problem_line (Decl (ident, _, _)) = ident
+ | ident_of_problem_line (Formula (ident, _, _, _, _)) = ident
+
+fun run_some_atp ctxt format problem =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val prob_file = File.tmp_path (Path.explode "prob.tptp")
+ val {exec, arguments, proof_delims, known_failures, ...} =
+ get_atp thy (case format of DFG _ => spassN | _ => eN)
+ val _ = problem |> lines_for_atp_problem format (K [])
+ |> File.write_list prob_file
+ val command =
+ File.shell_path (Path.explode (getenv (fst exec) ^ "/" ^ snd exec)) ^
+ " " ^ arguments ctxt false "" (seconds 1.0) (K []) ^ " " ^
+ File.shell_path prob_file
+ in
+ TimeLimit.timeLimit (seconds 0.3) Isabelle_System.bash_output command
+ |> fst
+ |> extract_tstplike_proof_and_outcome false true proof_delims
+ known_failures
+ |> snd
+ end
+ handle TimeLimit.TimeOut => SOME TimedOut
+
+val likely_tautology_prefixes =
+ [@{theory HOL}, @{theory Meson}, @{theory ATP}, @{theory Metis}]
+ |> map (fact_name_of o Context.theory_name)
+
+fun is_problem_line_tautology ctxt format (Formula (ident, _, phi, _, _)) =
+ exists (fn prefix => String.isPrefix prefix ident)
+ likely_tautology_prefixes andalso
+ is_none (run_some_atp ctxt format
+ [(factsN, [Formula (ident, Conjecture, phi, NONE, [])])])
+ | is_problem_line_tautology _ _ _ = false
+
+structure String_Graph = Graph(type key = string val ord = string_ord);
+
+fun order_facts ord = sort (ord o pairself ident_of_problem_line)
+fun order_problem_facts _ [] = []
+ | order_problem_facts ord ((heading, lines) :: problem) =
+ if heading = factsN then (heading, order_facts ord lines) :: problem
+ else (heading, lines) :: order_problem_facts ord problem
+
+(* A fairly random selection of types used for monomorphizing. *)
+val ground_types =
+ [@{typ nat}, HOLogic.intT, HOLogic.realT, @{typ "nat => bool"}, @{typ bool},
+ @{typ unit}]
+
+fun ground_type_for_tvar _ [] tvar =
+ raise TYPE ("ground_type_for_sorts", [TVar tvar], [])
+ | ground_type_for_tvar thy (T :: Ts) tvar =
+ if can (Sign.typ_match thy (TVar tvar, T)) Vartab.empty then T
+ else ground_type_for_tvar thy Ts tvar
+
+fun monomorphize_term ctxt t =
+ let val thy = Proof_Context.theory_of ctxt in
+ t |> map_types (map_type_tvar (ground_type_for_tvar thy ground_types))
+ handle TYPE _ => @{prop True}
+ end
+
+fun generate_tptp_inference_file_for_theory ctxt thy format type_enc file_name =
+ let
+ val type_enc = type_enc |> type_enc_from_string Strict
+ |> adjust_type_enc format
+ val mono = polymorphism_of_type_enc type_enc <> Polymorphic
+ val path = file_name |> Path.explode
+ val _ = File.write path ""
+ val facts = facts_of thy
+ val atp_problem =
+ facts
+ |> map (fn ((_, loc), th) =>
+ ((Thm.get_name_hint th, loc),
+ th |> prop_of |> mono ? monomorphize_term ctxt))
+ |> prepare_atp_problem ctxt format Axiom Axiom type_enc true combsN false
+ false true [] @{prop False}
+ |> #1
+ val atp_problem =
+ atp_problem
+ |> map (apsnd (filter_out (is_problem_line_tautology ctxt format)))
+ val all_names = facts |> map (Thm.get_name_hint o snd)
+ val infers =
+ facts |> map (fn (_, th) =>
+ (fact_name_of (Thm.get_name_hint th),
+ theorems_mentioned_in_proof_term (SOME all_names) th))
+ val all_atp_problem_names =
+ atp_problem |> maps (map ident_of_problem_line o snd)
+ val infers =
+ infers |> filter (member (op =) all_atp_problem_names o fst)
+ |> map (apsnd (filter (member (op =) all_atp_problem_names)))
+ val ordered_names =
+ String_Graph.empty
+ |> fold (String_Graph.new_node o rpair ()) all_atp_problem_names
+ |> fold (fn (to, froms) =>
+ fold (fn from => String_Graph.add_edge (from, to)) froms)
+ infers
+ |> String_Graph.topological_order
+ val order_tab =
+ Symtab.empty
+ |> fold (Symtab.insert (op =))
+ (ordered_names ~~ (1 upto length ordered_names))
+ val name_ord = int_ord o pairself (the o Symtab.lookup order_tab)
+ val atp_problem =
+ atp_problem
+ |> (case format of DFG _ => I | _ => add_inferences_to_problem infers)
+ |> order_problem_facts name_ord
+ val ss = lines_for_atp_problem format (K []) atp_problem
+ val _ = app (File.append path) ss
+ in () end
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/etc/settings Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,5 @@
+# -*- shell-script -*- :mode=shellscript:
+
+TPTP_HOME="$COMPONENT"
+
+ISABELLE_TOOLS="$ISABELLE_TOOLS:$COMPONENT/lib/Tools"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_isabelle Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+# Author: Jasmin Blanchette
+#
+# DESCRIPTION: Isabelle tactics for TPTP
+
+
+PRG="$(basename "$0")"
+
+function usage() {
+ echo
+ echo "Usage: isabelle $PRG FILES"
+ echo
+ echo " Runs a combination of Isabelle tactics on TPTP problems."
+ echo
+ exit 1
+}
+
+[ "$#" -eq 0 -o "$1" = "-?" ] && usage
+
+SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
+
+for FILE in "$@"
+do
+ echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
+ML {* ATP_Problem_Import.isabelle_tptp_file \"$FILE\" *} end;" \
+ > /tmp/$SCRATCH.thy
+ "$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
+done
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_nitpick Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+# Author: Jasmin Blanchette
+#
+# DESCRIPTION: Nitpick for TPTP
+
+
+PRG="$(basename "$0")"
+
+function usage() {
+ echo
+ echo "Usage: isabelle $PRG FILES"
+ echo
+ echo " Runs Nitpick on TPTP problems."
+ echo
+ exit 1
+}
+
+[ "$#" -eq 0 -o "$1" = "-?" ] && usage
+
+SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
+
+for FILE in "$@"
+do
+ echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
+ML {* ATP_Problem_Import.nitpick_tptp_file \"$FILE\" *} end;" \
+ > /tmp/$SCRATCH.thy
+ "$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
+done
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_refute Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+# Author: Jasmin Blanchette
+#
+# DESCRIPTION: Refute for TPTP
+
+
+PRG="$(basename "$0")"
+
+function usage() {
+ echo
+ echo "Usage: isabelle $PRG FILES"
+ echo
+ echo " Runs Refute on TPTP problems."
+ echo
+ exit 1
+}
+
+[ "$#" -eq 0 -o "$1" = "-?" ] && usage
+
+SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
+
+for FILE in "$@"
+do
+ echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
+ML {* ATP_Problem_Import.refute_tptp_file \"$FILE\" *} end;" \
+ > /tmp/$SCRATCH.thy
+ "$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
+done
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_sledgehammer Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+# Author: Jasmin Blanchette
+#
+# DESCRIPTION: Sledgehammer for TPTP
+
+
+PRG="$(basename "$0")"
+
+function usage() {
+ echo
+ echo "Usage: isabelle $PRG FILES"
+ echo
+ echo " Runs Sledgehammer on TPTP problems."
+ echo
+ exit 1
+}
+
+[ "$#" -eq 0 -o "$1" = "-?" ] && usage
+
+SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
+
+for FILE in "$@"
+do
+ echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
+ML {* ATP_Problem_Import.sledgehammer_tptp_file \"$FILE\" *} end;" \
+ > /tmp/$SCRATCH.thy
+ "$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
+done
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_translate Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+#
+# Author: Jasmin Blanchette
+#
+# DESCRIPTION: Translation tool for TPTP formats
+
+
+PRG="$(basename "$0")"
+
+function usage() {
+ echo
+ echo "Usage: isabelle $PRG FORMAT IN_FILE OUT_FILE"
+ echo
+ echo " Translates TPTP input file to specified TPTP format (\"CNF\", \"FOF\", \"TFF0\", \"TFF1\", or \"THF0\")."
+ echo
+ exit 1
+}
+
+[ "$#" -ne 3 -o "$1" = "-?" ] && usage
+
+SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
+
+args=("$@")
+
+echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
+ML {* ATP_Problem_Import.translate_tptp_file \"${args[0]}\" \"${args[1]}\" \"${args[2]}\" *} end;" \
+ > /tmp/$SCRATCH.thy
+"$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
--- a/src/HOL/Tools/ATP/atp_problem.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/ATP/atp_problem.ML Tue Feb 14 11:16:07 2012 +0100
@@ -41,14 +41,9 @@
Formula of string * formula_kind
* ('a, 'a ho_type, ('a, 'a ho_type) ho_term) formula
* (string, string ho_type) ho_term option
- * (string, string ho_type) ho_term option
+ * (string, string ho_type) ho_term list
type 'a problem = (string * 'a problem_line list) list
- val isabelle_info_prefix : string
- val isabelle_info : atp_format -> string -> (string, 'a) ho_term option
- val introN : string
- val elimN : string
- val simpN : string
val tptp_cnf : string
val tptp_fof : string
val tptp_tff : string
@@ -79,6 +74,18 @@
val tptp_false : string
val tptp_true : string
val tptp_empty_list : string
+ val isabelle_info_prefix : string
+ val isabelle_info : atp_format -> string -> int -> (string, 'a) ho_term list
+ val extract_isabelle_status : (string, 'a) ho_term list -> string option
+ val extract_isabelle_rank : (string, 'a) ho_term list -> int
+ val introN : string
+ val elimN : string
+ val simpN : string
+ val spec_eqN : string
+ val rankN : string
+ val minimum_rank : int
+ val default_rank : int
+ val default_kbo_weight : int
val is_tptp_equal : string -> bool
val is_built_in_tptp_symbol : string -> bool
val is_tptp_variable : string -> bool
@@ -100,9 +107,12 @@
bool option -> (bool option -> 'c -> 'd -> 'd) -> ('a, 'b, 'c) formula
-> 'd -> 'd
val formula_map : ('c -> 'd) -> ('a, 'b, 'c) formula -> ('a, 'b, 'd) formula
+ val is_function_type : string ho_type -> bool
+ val is_predicate_type : string ho_type -> bool
val is_format_higher_order : atp_format -> bool
val is_format_typed : atp_format -> bool
- val lines_for_atp_problem : atp_format -> string problem -> string list
+ val lines_for_atp_problem :
+ atp_format -> (unit -> (string * int) list) -> string problem -> string list
val ensure_cnf_problem :
(string * string) problem -> (string * string) problem
val filter_cnf_ueq_problem :
@@ -153,26 +163,12 @@
datatype formula_kind = Axiom | Definition | Lemma | Hypothesis | Conjecture
datatype 'a problem_line =
Decl of string * 'a * 'a ho_type |
- Formula of string * formula_kind * ('a, 'a ho_type, ('a, 'a ho_type) ho_term) formula
- * (string, string ho_type) ho_term option * (string, string ho_type) ho_term option
+ Formula of string * formula_kind
+ * ('a, 'a ho_type, ('a, 'a ho_type) ho_term) formula
+ * (string, string ho_type) ho_term option
+ * (string, string ho_type) ho_term list
type 'a problem = (string * 'a problem_line list) list
-val isabelle_info_prefix = "isabelle_"
-
-(* Currently, only newer versions of SPASS, with sorted DFG format support, can
- process Isabelle metainformation. *)
-fun isabelle_info (DFG DFG_Sorted) s =
- SOME (ATerm ("[]", [ATerm (isabelle_info_prefix ^ s, [])]))
- | isabelle_info _ _ = NONE
-
-val introN = "intro"
-val elimN = "elim"
-val simpN = "simp"
-
-fun is_isabelle_info suffix (SOME (ATerm ("[]", [ATerm (s, [])]))) =
- s = isabelle_info_prefix ^ suffix
- | is_isabelle_info _ _ = false
-
(* official TPTP syntax *)
val tptp_cnf = "cnf"
val tptp_fof = "fof"
@@ -205,6 +201,37 @@
val tptp_true = "$true"
val tptp_empty_list = "[]"
+val isabelle_info_prefix = "isabelle_"
+
+val introN = "intro"
+val elimN = "elim"
+val simpN = "simp"
+val spec_eqN = "spec_eq"
+val rankN = "rank"
+
+val minimum_rank = 0
+val default_rank = 1000
+val default_kbo_weight = 1
+
+(* Currently, only newer versions of SPASS, with sorted DFG format support, can
+ process Isabelle metainformation. *)
+fun isabelle_info (DFG DFG_Sorted) status rank =
+ [] |> rank <> default_rank
+ ? cons (ATerm (isabelle_info_prefix ^ rankN,
+ [ATerm (string_of_int rank, [])]))
+ |> status <> "" ? cons (ATerm (isabelle_info_prefix ^ status, []))
+ | isabelle_info _ _ _ = []
+
+fun extract_isabelle_status (ATerm (s, []) :: _) =
+ try (unprefix isabelle_info_prefix) s
+ | extract_isabelle_status _ = NONE
+
+fun extract_isabelle_rank (tms as _ :: _) =
+ (case List.last tms of
+ ATerm (_, [ATerm (rank, [])]) => the (Int.fromString rank)
+ | _ => default_rank)
+ | extract_isabelle_rank _ = default_rank
+
fun is_tptp_equal s = (s = tptp_equal orelse s = tptp_old_equal)
fun is_built_in_tptp_symbol s =
s = tptp_old_equal orelse not (Char.isAlpha (String.sub (s, 0)))
@@ -253,6 +280,16 @@
| formula_map f (AConn (c, phis)) = AConn (c, map (formula_map f) phis)
| formula_map f (AAtom tm) = AAtom (f tm)
+fun is_function_type (AFun (_, ty)) = is_function_type ty
+ | is_function_type (AType (s, _)) =
+ s <> tptp_type_of_types andalso s <> tptp_bool_type
+ | is_function_type _ = false
+fun is_predicate_type (AFun (_, ty)) = is_predicate_type ty
+ | is_predicate_type (AType (s, _)) = (s = tptp_bool_type)
+ | is_predicate_type _ = false
+fun is_nontrivial_predicate_type (AFun (_, ty)) = is_predicate_type ty
+ | is_nontrivial_predicate_type _ = false
+
fun is_format_higher_order (THF _) = true
| is_format_higher_order _ = false
fun is_format_typed (TFF _) = true
@@ -283,11 +320,13 @@
| flatten_type _ =
raise Fail "unexpected higher-order type in first-order format"
+val dfg_individual_type = "iii" (* cannot clash *)
+
fun str_for_type format ty =
let
val dfg = (format = DFG DFG_Sorted)
fun str _ (AType (s, [])) =
- if dfg andalso s = tptp_individual_type then "Top" else s
+ if dfg andalso s = tptp_individual_type then dfg_individual_type else s
| str _ (AType (s, tys)) =
let val ss = tys |> map (str false) in
if s = tptp_product_type then
@@ -349,7 +388,7 @@
(AQuant (if s = tptp_ho_forall then AForall else AExists,
[(s', SOME ty)], AAtom tm))
| (_, true, [AAbs ((s', ty), tm)]) =>
- (* There is code in "ATP_Translate" to ensure that "Eps" is always
+ (* There is code in "ATP_Problem_Generate" to ensure that "Eps" is always
applied to an abstraction. *)
tptp_choice ^ "[" ^ s' ^ " : " ^ string_for_type format ty ^ "]: " ^
tptp_string_for_term format tm ^ ""
@@ -402,11 +441,12 @@
tptp_string_for_kind kind ^ ",\n (" ^
tptp_string_for_formula format phi ^ ")" ^
(case (source, info) of
- (NONE, NONE) => ""
- | (SOME tm, NONE) => ", " ^ tptp_string_for_term format tm
- | (_, SOME tm) =>
+ (NONE, []) => ""
+ | (SOME tm, []) => ", " ^ tptp_string_for_term format tm
+ | (_, tms) =>
", " ^ tptp_string_for_term format (the_source source) ^
- ", " ^ tptp_string_for_term format tm) ^ ").\n"
+ ", " ^ tptp_string_for_term format (ATerm (tptp_empty_list, tms))) ^
+ ").\n"
fun tptp_lines format =
maps (fn (_, []) => []
@@ -420,19 +460,19 @@
fun binder_atypes (AFun (ty1, ty2)) = ty1 :: binder_atypes ty2
| binder_atypes _ = []
-fun is_function_type (AFun (_, ty)) = is_function_type ty
- | is_function_type (AType (s, _)) =
- s <> tptp_type_of_types andalso s <> tptp_bool_type
- | is_function_type _ = false
-
-fun is_predicate_type (AFun (_, ty)) = is_predicate_type ty
- | is_predicate_type (AType (s, _)) = (s = tptp_bool_type)
- | is_predicate_type _ = false
-
fun dfg_string_for_formula flavor info =
let
- fun str_for_term simp (ATerm (s, tms)) =
- (if is_tptp_equal s then "equal" |> simp ? suffix ":lr"
+ fun suffix_tag top_level s =
+ if top_level then
+ case extract_isabelle_status info of
+ SOME s' => if s' = simpN then s ^ ":lr"
+ else if s' = spec_eqN then s ^ ":lt"
+ else s
+ | NONE => s
+ else
+ s
+ fun str_for_term top_level (ATerm (s, tms)) =
+ (if is_tptp_equal s then "equal" |> suffix_tag top_level
else if s = tptp_true then "true"
else if s = tptp_false then "false"
else s) ^
@@ -445,23 +485,23 @@
| str_for_conn _ AAnd = "and"
| str_for_conn _ AOr = "or"
| str_for_conn _ AImplies = "implies"
- | str_for_conn simp AIff = "equiv" |> simp ? suffix ":lr"
- fun str_for_formula simp (AQuant (q, xs, phi)) =
+ | str_for_conn top_level AIff = "equiv" |> suffix_tag top_level
+ fun str_for_formula top_level (AQuant (q, xs, phi)) =
str_for_quant q ^ "(" ^ "[" ^
commas (map (string_for_bound_var (DFG flavor)) xs) ^ "], " ^
- str_for_formula simp phi ^ ")"
- | str_for_formula simp (AConn (c, phis)) =
- str_for_conn simp c ^ "(" ^
+ str_for_formula top_level phi ^ ")"
+ | str_for_formula top_level (AConn (c, phis)) =
+ str_for_conn top_level c ^ "(" ^
commas (map (str_for_formula false) phis) ^ ")"
- | str_for_formula simp (AAtom tm) = str_for_term simp tm
- in str_for_formula (is_isabelle_info simpN info) end
+ | str_for_formula top_level (AAtom tm) = str_for_term top_level tm
+ in str_for_formula true end
-fun dfg_lines flavor problem =
+fun dfg_lines flavor kbo_weights problem =
let
val sorted = (flavor = DFG_Sorted)
val format = DFG flavor
- fun ary sym ty =
- "(" ^ sym ^ ", " ^ string_of_int (arity_of_type ty) ^ ")"
+ fun spair (sym, k) = "(" ^ sym ^ ", " ^ string_of_int k ^ ")"
+ fun ary sym = curry spair sym o arity_of_type
fun fun_typ sym ty =
"function(" ^ sym ^ ", " ^ string_for_type format ty ^ ")."
fun pred_typ sym ty =
@@ -469,39 +509,51 @@
commas (sym :: map (string_for_type format) (binder_atypes ty)) ^ ")."
fun formula pred (Formula (ident, kind, phi, _, info)) =
if pred kind then
- SOME ("formula(" ^ dfg_string_for_formula flavor info phi ^ ", " ^
- ident ^ ").")
+ let val rank = extract_isabelle_rank info in
+ "formula(" ^ dfg_string_for_formula flavor info phi ^ ", " ^ ident ^
+ (if rank = default_rank then "" else ", " ^ string_of_int rank) ^
+ ")." |> SOME
+ end
else
NONE
| formula _ _ = NONE
- fun filt f = problem |> map (map_filter f o snd) |> flat
+ fun filt f = problem |> map (map_filter f o snd) |> filter_out null
val func_aries =
filt (fn Decl (_, sym, ty) =>
if is_function_type ty then SOME (ary sym ty) else NONE
| _ => NONE)
- |> commas |> enclose "functions [" "]."
+ |> flat |> commas |> enclose "functions [" "]."
val pred_aries =
filt (fn Decl (_, sym, ty) =>
if is_predicate_type ty then SOME (ary sym ty) else NONE
| _ => NONE)
- |> commas |> enclose "predicates [" "]."
+ |> flat |> commas |> enclose "predicates [" "]."
fun sorts () =
filt (fn Decl (_, sym, AType (s, [])) =>
if s = tptp_type_of_types then SOME sym else NONE
- | _ => NONE)
- |> commas |> enclose "sorts [" "]."
- val syms = [func_aries, pred_aries] @ (if sorted then [sorts ()] else [])
+ | _ => NONE) @ [[dfg_individual_type]]
+ |> flat |> commas |> enclose "sorts [" "]."
+ fun do_kbo_weights () =
+ kbo_weights () |> map spair |> commas |> enclose "weights [" "]."
+ val syms =
+ [func_aries] @ (if sorted then [do_kbo_weights ()] else []) @
+ [pred_aries] @ (if sorted then [sorts ()] else [])
fun func_sigs () =
filt (fn Decl (_, sym, ty) =>
if is_function_type ty then SOME (fun_typ sym ty) else NONE
| _ => NONE)
+ |> flat
fun pred_sigs () =
filt (fn Decl (_, sym, ty) =>
- if is_predicate_type ty then SOME (pred_typ sym ty) else NONE
+ if is_nontrivial_predicate_type ty then SOME (pred_typ sym ty)
+ else NONE
| _ => NONE)
+ |> flat
val decls = if sorted then func_sigs () @ pred_sigs () else []
- val axioms = filt (formula (curry (op <>) Conjecture))
- val conjs = filt (formula (curry (op =) Conjecture))
+ val axioms =
+ filt (formula (curry (op <>) Conjecture)) |> separate [""] |> flat
+ val conjs =
+ filt (formula (curry (op =) Conjecture)) |> separate [""] |> flat
fun list_of _ [] = []
| list_of heading ss =
"list_of_" ^ heading ^ ".\n" :: map (suffix "\n") ss @
@@ -518,11 +570,11 @@
["end_problem.\n"]
end
-fun lines_for_atp_problem format problem =
+fun lines_for_atp_problem format kbo_weights problem =
"% This file was generated by Isabelle (most likely Sledgehammer)\n\
\% " ^ timestamp () ^ "\n" ::
(case format of
- DFG flavor => dfg_lines flavor
+ DFG flavor => dfg_lines flavor kbo_weights
| _ => tptp_lines format) problem
@@ -642,7 +694,21 @@
is still necessary). *)
val reserved_nice_names = [tptp_old_equal, "op", "eq"]
-fun readable_name underscore full_name s =
+(* hack to get the same hashing across Mirabelle runs (see "mirabelle.pl") *)
+fun cleanup_mirabelle_name s =
+ let
+ val mirabelle_infix = "_Mirabelle_"
+ val random_suffix_len = 10
+ val (s1, s2) = Substring.position mirabelle_infix (Substring.full s)
+ in
+ if Substring.isEmpty s2 then
+ s
+ else
+ Substring.string s1 ^
+ Substring.string (Substring.triml (size mirabelle_infix + random_suffix_len) s2)
+ end
+
+fun readable_name protect full_name s =
(if s = full_name then
s
else
@@ -652,24 +718,24 @@
|> (fn s =>
if size s > max_readable_name_size then
String.substring (s, 0, max_readable_name_size div 2 - 4) ^
- string_of_int (hash_string full_name) ^
+ string_of_int (hash_string (cleanup_mirabelle_name full_name)) ^
String.extract (s, size s - max_readable_name_size div 2 + 4,
NONE)
else
s)
|> (fn s =>
if member (op =) reserved_nice_names s then full_name else s))
- |> underscore ? suffix "_"
+ |> protect
fun nice_name _ (full_name, _) NONE = (full_name, NONE)
- | nice_name underscore (full_name, desired_name) (SOME the_pool) =
+ | nice_name protect (full_name, desired_name) (SOME the_pool) =
if is_built_in_tptp_symbol full_name then
(full_name, SOME the_pool)
else case Symtab.lookup (fst the_pool) full_name of
SOME nice_name => (nice_name, SOME the_pool)
| NONE =>
let
- val nice_prefix = readable_name underscore full_name desired_name
+ val nice_prefix = readable_name protect full_name desired_name
fun add j =
let
val nice_name =
@@ -686,12 +752,22 @@
end
in add 0 |> apsnd SOME end
+fun avoid_clash_with_dfg_keywords s =
+ let val n = String.size s in
+ if n < 2 orelse (n = 2 andalso String.sub (s, 0) = String.sub (s, 1)) orelse
+ String.isSubstring "_" s then
+ s
+ else
+ String.substring (s, 0, n - 1) ^
+ String.str (Char.toUpper (String.sub (s, n - 1)))
+ end
+
fun nice_atp_problem readable_names format problem =
let
val empty_pool =
if readable_names then SOME (Symtab.empty, Symtab.empty) else NONE
- val underscore = case format of DFG _ => true | _ => false
- val nice_name = nice_name underscore
+ val nice_name =
+ nice_name (case format of DFG _ => avoid_clash_with_dfg_keywords | _ => I)
fun nice_type (AType (name, tys)) =
nice_name name ##>> pool_map nice_type tys #>> AType
| nice_type (AFun (ty1, ty2)) = nice_type ty1 ##>> nice_type ty2 #>> AFun
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/ATP/atp_problem_generate.ML Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,2748 @@
+(* Title: HOL/Tools/ATP/atp_problem_generate.ML
+ Author: Fabian Immler, TU Muenchen
+ Author: Makarius
+ Author: Jasmin Blanchette, TU Muenchen
+
+Translation of HOL to FOL for Metis and Sledgehammer.
+*)
+
+signature ATP_PROBLEM_GENERATE =
+sig
+ type ('a, 'b) ho_term = ('a, 'b) ATP_Problem.ho_term
+ type connective = ATP_Problem.connective
+ type ('a, 'b, 'c) formula = ('a, 'b, 'c) ATP_Problem.formula
+ type atp_format = ATP_Problem.atp_format
+ type formula_kind = ATP_Problem.formula_kind
+ type 'a problem = 'a ATP_Problem.problem
+
+ datatype scope = Global | Local | Assum | Chained
+ datatype status = General | Induct | Intro | Elim | Simp | Spec_Eq
+ type stature = scope * status
+
+ datatype polymorphism = Polymorphic | Raw_Monomorphic | Mangled_Monomorphic
+ datatype strictness = Strict | Non_Strict
+ datatype granularity = All_Vars | Positively_Naked_Vars | Ghost_Type_Arg_Vars
+ datatype type_level =
+ All_Types |
+ Noninf_Nonmono_Types of strictness * granularity |
+ Fin_Nonmono_Types of granularity |
+ Const_Arg_Types |
+ No_Types
+ type type_enc
+
+ val type_tag_idempotence : bool Config.T
+ val type_tag_arguments : bool Config.T
+ val no_lamsN : string
+ val hide_lamsN : string
+ val liftingN : string
+ val combsN : string
+ val combs_and_liftingN : string
+ val combs_or_liftingN : string
+ val lam_liftingN : string
+ val keep_lamsN : string
+ val schematic_var_prefix : string
+ val fixed_var_prefix : string
+ val tvar_prefix : string
+ val tfree_prefix : string
+ val const_prefix : string
+ val type_const_prefix : string
+ val class_prefix : string
+ val lam_lifted_prefix : string
+ val lam_lifted_mono_prefix : string
+ val lam_lifted_poly_prefix : string
+ val skolem_const_prefix : string
+ val old_skolem_const_prefix : string
+ val new_skolem_const_prefix : string
+ val combinator_prefix : string
+ val type_decl_prefix : string
+ val sym_decl_prefix : string
+ val guards_sym_formula_prefix : string
+ val tags_sym_formula_prefix : string
+ val fact_prefix : string
+ val conjecture_prefix : string
+ val helper_prefix : string
+ val class_rel_clause_prefix : string
+ val arity_clause_prefix : string
+ val tfree_clause_prefix : string
+ val lam_fact_prefix : string
+ val typed_helper_suffix : string
+ val untyped_helper_suffix : string
+ val type_tag_idempotence_helper_name : string
+ val predicator_name : string
+ val app_op_name : string
+ val type_guard_name : string
+ val type_tag_name : string
+ val native_type_prefix : string
+ val prefixed_predicator_name : string
+ val prefixed_app_op_name : string
+ val prefixed_type_tag_name : string
+ val ascii_of : string -> string
+ val unascii_of : string -> string
+ val unprefix_and_unascii : string -> string -> string option
+ val proxy_table : (string * (string * (thm * (string * string)))) list
+ val proxify_const : string -> (string * string) option
+ val invert_const : string -> string
+ val unproxify_const : string -> string
+ val new_skolem_var_name_from_const : string -> string
+ val atp_irrelevant_consts : string list
+ val atp_schematic_consts_of : term -> typ list Symtab.table
+ val is_type_enc_higher_order : type_enc -> bool
+ val polymorphism_of_type_enc : type_enc -> polymorphism
+ val level_of_type_enc : type_enc -> type_level
+ val is_type_enc_quasi_sound : type_enc -> bool
+ val is_type_enc_fairly_sound : type_enc -> bool
+ val type_enc_from_string : strictness -> string -> type_enc
+ val adjust_type_enc : atp_format -> type_enc -> type_enc
+ val mk_aconns :
+ connective -> ('a, 'b, 'c) formula list -> ('a, 'b, 'c) formula
+ val unmangled_const : string -> string * (string, 'b) ho_term list
+ val unmangled_const_name : string -> string list
+ val helper_table : ((string * bool) * thm list) list
+ val trans_lams_from_string :
+ Proof.context -> type_enc -> string -> term list -> term list * term list
+ val factsN : string
+ val prepare_atp_problem :
+ Proof.context -> atp_format -> formula_kind -> formula_kind -> type_enc
+ -> bool -> string -> bool -> bool -> bool -> term list -> term
+ -> ((string * stature) * term) list
+ -> string problem * string Symtab.table * (string * stature) list vector
+ * (string * term) list * int Symtab.table
+ val atp_problem_relevance_weights : string problem -> (string * real) list
+ val atp_problem_kbo_weights : string problem -> (string * int) list
+end;
+
+structure ATP_Problem_Generate : ATP_PROBLEM_GENERATE =
+struct
+
+open ATP_Util
+open ATP_Problem
+
+type name = string * string
+
+val type_tag_idempotence =
+ Attrib.setup_config_bool @{binding atp_type_tag_idempotence} (K false)
+val type_tag_arguments =
+ Attrib.setup_config_bool @{binding atp_type_tag_arguments} (K false)
+
+val no_lamsN = "no_lams" (* used internally; undocumented *)
+val hide_lamsN = "hide_lams"
+val liftingN = "lifting"
+val combsN = "combs"
+val combs_and_liftingN = "combs_and_lifting"
+val combs_or_liftingN = "combs_or_lifting"
+val keep_lamsN = "keep_lams"
+val lam_liftingN = "lam_lifting" (* legacy *)
+
+(* It's still unclear whether all TFF1 implementations will support type
+ signatures such as "!>[A : $tType] : $o", with ghost type variables. *)
+val avoid_first_order_ghost_type_vars = false
+
+val bound_var_prefix = "B_"
+val all_bound_var_prefix = "BA_"
+val exist_bound_var_prefix = "BE_"
+val schematic_var_prefix = "V_"
+val fixed_var_prefix = "v_"
+val tvar_prefix = "T_"
+val tfree_prefix = "t_"
+val const_prefix = "c_"
+val type_const_prefix = "tc_"
+val native_type_prefix = "n_"
+val class_prefix = "cl_"
+
+(* Freshness almost guaranteed! *)
+val atp_prefix = "ATP" ^ Long_Name.separator
+val atp_weak_prefix = "ATP:"
+
+val lam_lifted_prefix = atp_weak_prefix ^ "Lam"
+val lam_lifted_mono_prefix = lam_lifted_prefix ^ "m"
+val lam_lifted_poly_prefix = lam_lifted_prefix ^ "p"
+
+val skolem_const_prefix = atp_prefix ^ "Sko"
+val old_skolem_const_prefix = skolem_const_prefix ^ "o"
+val new_skolem_const_prefix = skolem_const_prefix ^ "n"
+
+val combinator_prefix = "COMB"
+
+val type_decl_prefix = "ty_"
+val sym_decl_prefix = "sy_"
+val guards_sym_formula_prefix = "gsy_"
+val tags_sym_formula_prefix = "tsy_"
+val uncurried_alias_eq_prefix = "unc_"
+val fact_prefix = "fact_"
+val conjecture_prefix = "conj_"
+val helper_prefix = "help_"
+val class_rel_clause_prefix = "clar_"
+val arity_clause_prefix = "arity_"
+val tfree_clause_prefix = "tfree_"
+
+val lam_fact_prefix = "ATP.lambda_"
+val typed_helper_suffix = "_T"
+val untyped_helper_suffix = "_U"
+val type_tag_idempotence_helper_name = helper_prefix ^ "ti_idem"
+
+val predicator_name = "pp"
+val app_op_name = "aa"
+val type_guard_name = "gg"
+val type_tag_name = "tt"
+
+val prefixed_predicator_name = const_prefix ^ predicator_name
+val prefixed_app_op_name = const_prefix ^ app_op_name
+val prefixed_type_tag_name = const_prefix ^ type_tag_name
+
+(*Escaping of special characters.
+ Alphanumeric characters are left unchanged.
+ The character _ goes to __
+ Characters in the range ASCII space to / go to _A to _P, respectively.
+ Other characters go to _nnn where nnn is the decimal ASCII code.*)
+val upper_a_minus_space = Char.ord #"A" - Char.ord #" "
+
+fun stringN_of_int 0 _ = ""
+ | stringN_of_int k n =
+ stringN_of_int (k - 1) (n div 10) ^ string_of_int (n mod 10)
+
+fun ascii_of_char c =
+ if Char.isAlphaNum c then
+ String.str c
+ else if c = #"_" then
+ "__"
+ else if #" " <= c andalso c <= #"/" then
+ "_" ^ String.str (Char.chr (Char.ord c + upper_a_minus_space))
+ else
+ (* fixed width, in case more digits follow *)
+ "_" ^ stringN_of_int 3 (Char.ord c)
+
+val ascii_of = String.translate ascii_of_char
+
+(** Remove ASCII armoring from names in proof files **)
+
+(* We don't raise error exceptions because this code can run inside a worker
+ thread. Also, the errors are impossible. *)
+val unascii_of =
+ let
+ fun un rcs [] = String.implode(rev rcs)
+ | un rcs [#"_"] = un (#"_" :: rcs) [] (* ERROR *)
+ (* Three types of _ escapes: __, _A to _P, _nnn *)
+ | un rcs (#"_" :: #"_" :: cs) = un (#"_" :: rcs) cs
+ | un rcs (#"_" :: c :: cs) =
+ if #"A" <= c andalso c<= #"P" then
+ (* translation of #" " to #"/" *)
+ un (Char.chr (Char.ord c - upper_a_minus_space) :: rcs) cs
+ else
+ let val digits = List.take (c :: cs, 3) handle General.Subscript => [] in
+ case Int.fromString (String.implode digits) of
+ SOME n => un (Char.chr n :: rcs) (List.drop (cs, 2))
+ | NONE => un (c :: #"_" :: rcs) cs (* ERROR *)
+ end
+ | un rcs (c :: cs) = un (c :: rcs) cs
+ in un [] o String.explode end
+
+(* If string s has the prefix s1, return the result of deleting it,
+ un-ASCII'd. *)
+fun unprefix_and_unascii s1 s =
+ if String.isPrefix s1 s then
+ SOME (unascii_of (String.extract (s, size s1, NONE)))
+ else
+ NONE
+
+val proxy_table =
+ [("c_False", (@{const_name False}, (@{thm fFalse_def},
+ ("fFalse", @{const_name ATP.fFalse})))),
+ ("c_True", (@{const_name True}, (@{thm fTrue_def},
+ ("fTrue", @{const_name ATP.fTrue})))),
+ ("c_Not", (@{const_name Not}, (@{thm fNot_def},
+ ("fNot", @{const_name ATP.fNot})))),
+ ("c_conj", (@{const_name conj}, (@{thm fconj_def},
+ ("fconj", @{const_name ATP.fconj})))),
+ ("c_disj", (@{const_name disj}, (@{thm fdisj_def},
+ ("fdisj", @{const_name ATP.fdisj})))),
+ ("c_implies", (@{const_name implies}, (@{thm fimplies_def},
+ ("fimplies", @{const_name ATP.fimplies})))),
+ ("equal", (@{const_name HOL.eq}, (@{thm fequal_def},
+ ("fequal", @{const_name ATP.fequal})))),
+ ("c_All", (@{const_name All}, (@{thm fAll_def},
+ ("fAll", @{const_name ATP.fAll})))),
+ ("c_Ex", (@{const_name Ex}, (@{thm fEx_def},
+ ("fEx", @{const_name ATP.fEx}))))]
+
+val proxify_const = AList.lookup (op =) proxy_table #> Option.map (snd o snd)
+
+(* Readable names for the more common symbolic functions. Do not mess with the
+ table unless you know what you are doing. *)
+val const_trans_table =
+ [(@{type_name Product_Type.prod}, "prod"),
+ (@{type_name Sum_Type.sum}, "sum"),
+ (@{const_name False}, "False"),
+ (@{const_name True}, "True"),
+ (@{const_name Not}, "Not"),
+ (@{const_name conj}, "conj"),
+ (@{const_name disj}, "disj"),
+ (@{const_name implies}, "implies"),
+ (@{const_name HOL.eq}, "equal"),
+ (@{const_name All}, "All"),
+ (@{const_name Ex}, "Ex"),
+ (@{const_name If}, "If"),
+ (@{const_name Set.member}, "member"),
+ (@{const_name Meson.COMBI}, combinator_prefix ^ "I"),
+ (@{const_name Meson.COMBK}, combinator_prefix ^ "K"),
+ (@{const_name Meson.COMBB}, combinator_prefix ^ "B"),
+ (@{const_name Meson.COMBC}, combinator_prefix ^ "C"),
+ (@{const_name Meson.COMBS}, combinator_prefix ^ "S")]
+ |> Symtab.make
+ |> fold (Symtab.update o swap o snd o snd o snd) proxy_table
+
+(* Invert the table of translations between Isabelle and ATPs. *)
+val const_trans_table_inv =
+ const_trans_table |> Symtab.dest |> map swap |> Symtab.make
+val const_trans_table_unprox =
+ Symtab.empty
+ |> fold (fn (_, (isa, (_, (_, atp)))) => Symtab.update (atp, isa)) proxy_table
+
+val invert_const = perhaps (Symtab.lookup const_trans_table_inv)
+val unproxify_const = perhaps (Symtab.lookup const_trans_table_unprox)
+
+fun lookup_const c =
+ case Symtab.lookup const_trans_table c of
+ SOME c' => c'
+ | NONE => ascii_of c
+
+fun ascii_of_indexname (v, 0) = ascii_of v
+ | ascii_of_indexname (v, i) = ascii_of v ^ "_" ^ string_of_int i
+
+fun make_bound_var x = bound_var_prefix ^ ascii_of x
+fun make_all_bound_var x = all_bound_var_prefix ^ ascii_of x
+fun make_exist_bound_var x = exist_bound_var_prefix ^ ascii_of x
+fun make_schematic_var v = schematic_var_prefix ^ ascii_of_indexname v
+fun make_fixed_var x = fixed_var_prefix ^ ascii_of x
+
+fun make_schematic_type_var (x, i) =
+ tvar_prefix ^ (ascii_of_indexname (unprefix "'" x, i))
+fun make_fixed_type_var x = tfree_prefix ^ (ascii_of (unprefix "'" x))
+
+(* "HOL.eq" and choice are mapped to the ATP's equivalents *)
+local
+ val choice_const = (fst o dest_Const o HOLogic.choice_const) Term.dummyT
+ fun default c = const_prefix ^ lookup_const c
+in
+ fun make_fixed_const _ @{const_name HOL.eq} = tptp_old_equal
+ | make_fixed_const (SOME (THF (_, _, THF_With_Choice))) c =
+ if c = choice_const then tptp_choice else default c
+ | make_fixed_const _ c = default c
+end
+
+fun make_fixed_type_const c = type_const_prefix ^ lookup_const c
+
+fun make_type_class clas = class_prefix ^ ascii_of clas
+
+fun new_skolem_var_name_from_const s =
+ let val ss = s |> space_explode Long_Name.separator in
+ nth ss (length ss - 2)
+ end
+
+(* These are either simplified away by "Meson.presimplify" (most of the time) or
+ handled specially via "fFalse", "fTrue", ..., "fequal". *)
+val atp_irrelevant_consts =
+ [@{const_name False}, @{const_name True}, @{const_name Not},
+ @{const_name conj}, @{const_name disj}, @{const_name implies},
+ @{const_name HOL.eq}, @{const_name If}, @{const_name Let}]
+
+val atp_monomorph_bad_consts =
+ atp_irrelevant_consts @
+ (* These are ignored anyway by the relevance filter (unless they appear in
+ higher-order places) but not by the monomorphizer. *)
+ [@{const_name all}, @{const_name "==>"}, @{const_name "=="},
+ @{const_name Trueprop}, @{const_name All}, @{const_name Ex},
+ @{const_name Ex1}, @{const_name Ball}, @{const_name Bex}]
+
+fun add_schematic_const (x as (_, T)) =
+ Monomorph.typ_has_tvars T ? Symtab.insert_list (op =) x
+val add_schematic_consts_of =
+ Term.fold_aterms (fn Const (x as (s, _)) =>
+ not (member (op =) atp_monomorph_bad_consts s)
+ ? add_schematic_const x
+ | _ => I)
+fun atp_schematic_consts_of t = add_schematic_consts_of t Symtab.empty
+
+(** Definitions and functions for FOL clauses and formulas for TPTP **)
+
+(** Isabelle arities **)
+
+type arity_atom = name * name * name list
+
+val type_class = the_single @{sort type}
+
+type arity_clause =
+ {name : string,
+ prem_atoms : arity_atom list,
+ concl_atom : arity_atom}
+
+fun add_prem_atom tvar =
+ fold (fn s => s <> type_class ? cons (`make_type_class s, `I tvar, []))
+
+(* Arity of type constructor "tcon :: (arg1, ..., argN) res" *)
+fun make_axiom_arity_clause (tcons, name, (cls, args)) =
+ let
+ val tvars = map (prefix tvar_prefix o string_of_int) (1 upto length args)
+ val tvars_srts = ListPair.zip (tvars, args)
+ in
+ {name = name,
+ prem_atoms = [] |> fold (uncurry add_prem_atom) tvars_srts,
+ concl_atom = (`make_type_class cls, `make_fixed_type_const tcons,
+ tvars ~~ tvars)}
+ end
+
+fun arity_clause _ _ (_, []) = []
+ | arity_clause seen n (tcons, ("HOL.type", _) :: ars) = (* ignore *)
+ arity_clause seen n (tcons, ars)
+ | arity_clause seen n (tcons, (ar as (class, _)) :: ars) =
+ if member (op =) seen class then
+ (* multiple arities for the same (tycon, class) pair *)
+ make_axiom_arity_clause (tcons,
+ lookup_const tcons ^ "___" ^ ascii_of class ^ "_" ^ string_of_int n,
+ ar) ::
+ arity_clause seen (n + 1) (tcons, ars)
+ else
+ make_axiom_arity_clause (tcons, lookup_const tcons ^ "___" ^
+ ascii_of class, ar) ::
+ arity_clause (class :: seen) n (tcons, ars)
+
+fun multi_arity_clause [] = []
+ | multi_arity_clause ((tcons, ars) :: tc_arlists) =
+ arity_clause [] 1 (tcons, ars) @ multi_arity_clause tc_arlists
+
+(* Generate all pairs (tycon, class, sorts) such that tycon belongs to class in
+ theory thy provided its arguments have the corresponding sorts. *)
+fun type_class_pairs thy tycons classes =
+ let
+ val alg = Sign.classes_of thy
+ fun domain_sorts tycon = Sorts.mg_domain alg tycon o single
+ fun add_class tycon class =
+ cons (class, domain_sorts tycon class)
+ handle Sorts.CLASS_ERROR _ => I
+ fun try_classes tycon = (tycon, fold (add_class tycon) classes [])
+ in map try_classes tycons end
+
+(*Proving one (tycon, class) membership may require proving others, so iterate.*)
+fun iter_type_class_pairs _ _ [] = ([], [])
+ | iter_type_class_pairs thy tycons classes =
+ let
+ fun maybe_insert_class s =
+ (s <> type_class andalso not (member (op =) classes s))
+ ? insert (op =) s
+ val cpairs = type_class_pairs thy tycons classes
+ val newclasses =
+ [] |> fold (fold (fold (fold maybe_insert_class) o snd) o snd) cpairs
+ val (classes', cpairs') = iter_type_class_pairs thy tycons newclasses
+ in (classes' @ classes, union (op =) cpairs' cpairs) end
+
+fun make_arity_clauses thy tycons =
+ iter_type_class_pairs thy tycons ##> multi_arity_clause
+
+
+(** Isabelle class relations **)
+
+type class_rel_clause =
+ {name : string,
+ subclass : name,
+ superclass : name}
+
+(* Generate all pairs (sub, super) such that sub is a proper subclass of super
+ in theory "thy". *)
+fun class_pairs _ [] _ = []
+ | class_pairs thy subs supers =
+ let
+ val class_less = Sorts.class_less (Sign.classes_of thy)
+ fun add_super sub super = class_less (sub, super) ? cons (sub, super)
+ fun add_supers sub = fold (add_super sub) supers
+ in fold add_supers subs [] end
+
+fun make_class_rel_clause (sub, super) =
+ {name = sub ^ "_" ^ super, subclass = `make_type_class sub,
+ superclass = `make_type_class super}
+
+fun make_class_rel_clauses thy subs supers =
+ map make_class_rel_clause (class_pairs thy subs supers)
+
+(* intermediate terms *)
+datatype iterm =
+ IConst of name * typ * typ list |
+ IVar of name * typ |
+ IApp of iterm * iterm |
+ IAbs of (name * typ) * iterm
+
+fun ityp_of (IConst (_, T, _)) = T
+ | ityp_of (IVar (_, T)) = T
+ | ityp_of (IApp (t1, _)) = snd (dest_funT (ityp_of t1))
+ | ityp_of (IAbs ((_, T), tm)) = T --> ityp_of tm
+
+(*gets the head of a combinator application, along with the list of arguments*)
+fun strip_iterm_comb u =
+ let
+ fun stripc (IApp (t, u), ts) = stripc (t, u :: ts)
+ | stripc x = x
+ in stripc (u, []) end
+
+fun atomic_types_of T = fold_atyps (insert (op =)) T []
+
+val tvar_a_str = "'a"
+val tvar_a = TVar ((tvar_a_str, 0), HOLogic.typeS)
+val tvar_a_name = (make_schematic_type_var (tvar_a_str, 0), tvar_a_str)
+val itself_name = `make_fixed_type_const @{type_name itself}
+val TYPE_name = `(make_fixed_const NONE) @{const_name TYPE}
+val tvar_a_atype = AType (tvar_a_name, [])
+val a_itself_atype = AType (itself_name, [tvar_a_atype])
+
+fun new_skolem_const_name s num_T_args =
+ [new_skolem_const_prefix, s, string_of_int num_T_args]
+ |> space_implode Long_Name.separator
+
+fun robust_const_type thy s =
+ if s = app_op_name then
+ Logic.varifyT_global @{typ "('a => 'b) => 'a => 'b"}
+ else if String.isPrefix lam_lifted_prefix s then
+ Logic.varifyT_global @{typ "'a => 'b"}
+ else
+ (* Old Skolems throw a "TYPE" exception here, which will be caught. *)
+ s |> Sign.the_const_type thy
+
+(* This function only makes sense if "T" is as general as possible. *)
+fun robust_const_typargs thy (s, T) =
+ if s = app_op_name then
+ let val (T1, T2) = T |> domain_type |> dest_funT in [T1, T2] end
+ else if String.isPrefix old_skolem_const_prefix s then
+ [] |> Term.add_tvarsT T |> rev |> map TVar
+ else if String.isPrefix lam_lifted_prefix s then
+ if String.isPrefix lam_lifted_poly_prefix s then
+ let val (T1, T2) = T |> dest_funT in [T1, T2] end
+ else
+ []
+ else
+ (s, T) |> Sign.const_typargs thy
+
+(* Converts an Isabelle/HOL term (with combinators) into an intermediate term.
+ Also accumulates sort infomation. *)
+fun iterm_from_term thy format bs (P $ Q) =
+ let
+ val (P', P_atomics_Ts) = iterm_from_term thy format bs P
+ val (Q', Q_atomics_Ts) = iterm_from_term thy format bs Q
+ in (IApp (P', Q'), union (op =) P_atomics_Ts Q_atomics_Ts) end
+ | iterm_from_term thy format _ (Const (c, T)) =
+ (IConst (`(make_fixed_const (SOME format)) c, T,
+ robust_const_typargs thy (c, T)),
+ atomic_types_of T)
+ | iterm_from_term _ _ _ (Free (s, T)) =
+ (IConst (`make_fixed_var s, T, []), atomic_types_of T)
+ | iterm_from_term _ format _ (Var (v as (s, _), T)) =
+ (if String.isPrefix Meson_Clausify.new_skolem_var_prefix s then
+ let
+ val Ts = T |> strip_type |> swap |> op ::
+ val s' = new_skolem_const_name s (length Ts)
+ in IConst (`(make_fixed_const (SOME format)) s', T, Ts) end
+ else
+ IVar ((make_schematic_var v, s), T), atomic_types_of T)
+ | iterm_from_term _ _ bs (Bound j) =
+ nth bs j |> (fn (_, (name, T)) => (IConst (name, T, []), atomic_types_of T))
+ | iterm_from_term thy format bs (Abs (s, T, t)) =
+ let
+ fun vary s = s |> AList.defined (op =) bs s ? vary o Symbol.bump_string
+ val s = vary s
+ val name = `make_bound_var s
+ val (tm, atomic_Ts) = iterm_from_term thy format ((s, (name, T)) :: bs) t
+ in (IAbs ((name, T), tm), union (op =) atomic_Ts (atomic_types_of T)) end
+
+datatype scope = Global | Local | Assum | Chained
+datatype status = General | Induct | Intro | Elim | Simp | Spec_Eq
+type stature = scope * status
+
+datatype order = First_Order | Higher_Order
+datatype polymorphism = Polymorphic | Raw_Monomorphic | Mangled_Monomorphic
+datatype strictness = Strict | Non_Strict
+datatype granularity = All_Vars | Positively_Naked_Vars | Ghost_Type_Arg_Vars
+datatype type_level =
+ All_Types |
+ Noninf_Nonmono_Types of strictness * granularity |
+ Fin_Nonmono_Types of granularity |
+ Const_Arg_Types |
+ No_Types
+
+datatype type_enc =
+ Simple_Types of order * polymorphism * type_level |
+ Guards of polymorphism * type_level |
+ Tags of polymorphism * type_level
+
+fun is_type_enc_higher_order (Simple_Types (Higher_Order, _, _)) = true
+ | is_type_enc_higher_order _ = false
+
+fun polymorphism_of_type_enc (Simple_Types (_, poly, _)) = poly
+ | polymorphism_of_type_enc (Guards (poly, _)) = poly
+ | polymorphism_of_type_enc (Tags (poly, _)) = poly
+
+fun level_of_type_enc (Simple_Types (_, _, level)) = level
+ | level_of_type_enc (Guards (_, level)) = level
+ | level_of_type_enc (Tags (_, level)) = level
+
+fun granularity_of_type_level (Noninf_Nonmono_Types (_, grain)) = grain
+ | granularity_of_type_level (Fin_Nonmono_Types grain) = grain
+ | granularity_of_type_level _ = All_Vars
+
+fun is_type_level_quasi_sound All_Types = true
+ | is_type_level_quasi_sound (Noninf_Nonmono_Types _) = true
+ | is_type_level_quasi_sound _ = false
+val is_type_enc_quasi_sound = is_type_level_quasi_sound o level_of_type_enc
+
+fun is_type_level_fairly_sound (Fin_Nonmono_Types _) = true
+ | is_type_level_fairly_sound level = is_type_level_quasi_sound level
+val is_type_enc_fairly_sound = is_type_level_fairly_sound o level_of_type_enc
+
+fun is_type_level_monotonicity_based (Noninf_Nonmono_Types _) = true
+ | is_type_level_monotonicity_based (Fin_Nonmono_Types _) = true
+ | is_type_level_monotonicity_based _ = false
+
+(* "_query", "_bang", and "_at" are for the ASCII-challenged Metis and
+ Mirabelle. *)
+val queries = ["?", "_query"]
+val bangs = ["!", "_bang"]
+val ats = ["@", "_at"]
+
+fun try_unsuffixes ss s =
+ fold (fn s' => fn NONE => try (unsuffix s') s | some => some) ss NONE
+
+fun try_nonmono constr suffixes fallback s =
+ case try_unsuffixes suffixes s of
+ SOME s =>
+ (case try_unsuffixes suffixes s of
+ SOME s => (constr Positively_Naked_Vars, s)
+ | NONE =>
+ case try_unsuffixes ats s of
+ SOME s => (constr Ghost_Type_Arg_Vars, s)
+ | NONE => (constr All_Vars, s))
+ | NONE => fallback s
+
+fun type_enc_from_string strictness s =
+ (case try (unprefix "poly_") s of
+ SOME s => (SOME Polymorphic, s)
+ | NONE =>
+ case try (unprefix "raw_mono_") s of
+ SOME s => (SOME Raw_Monomorphic, s)
+ | NONE =>
+ case try (unprefix "mono_") s of
+ SOME s => (SOME Mangled_Monomorphic, s)
+ | NONE => (NONE, s))
+ ||> (pair All_Types
+ |> try_nonmono Fin_Nonmono_Types bangs
+ |> try_nonmono (curry Noninf_Nonmono_Types strictness) queries)
+ |> (fn (poly, (level, core)) =>
+ case (core, (poly, level)) of
+ ("native", (SOME poly, _)) =>
+ (case (poly, level) of
+ (Polymorphic, All_Types) =>
+ Simple_Types (First_Order, Polymorphic, All_Types)
+ | (Mangled_Monomorphic, _) =>
+ if granularity_of_type_level level = All_Vars then
+ Simple_Types (First_Order, Mangled_Monomorphic, level)
+ else
+ raise Same.SAME
+ | _ => raise Same.SAME)
+ | ("native_higher", (SOME poly, _)) =>
+ (case (poly, level) of
+ (Polymorphic, All_Types) =>
+ Simple_Types (Higher_Order, Polymorphic, All_Types)
+ | (_, Noninf_Nonmono_Types _) => raise Same.SAME
+ | (Mangled_Monomorphic, _) =>
+ if granularity_of_type_level level = All_Vars then
+ Simple_Types (Higher_Order, Mangled_Monomorphic, level)
+ else
+ raise Same.SAME
+ | _ => raise Same.SAME)
+ | ("guards", (SOME poly, _)) =>
+ if poly = Mangled_Monomorphic andalso
+ granularity_of_type_level level = Ghost_Type_Arg_Vars then
+ raise Same.SAME
+ else
+ Guards (poly, level)
+ | ("tags", (SOME poly, _)) =>
+ if granularity_of_type_level level = Ghost_Type_Arg_Vars then
+ raise Same.SAME
+ else
+ Tags (poly, level)
+ | ("args", (SOME poly, All_Types (* naja *))) =>
+ Guards (poly, Const_Arg_Types)
+ | ("erased", (NONE, All_Types (* naja *))) =>
+ Guards (Polymorphic, No_Types)
+ | _ => raise Same.SAME)
+ handle Same.SAME => error ("Unknown type encoding: " ^ quote s ^ ".")
+
+fun adjust_type_enc (THF (TPTP_Monomorphic, _, _))
+ (Simple_Types (order, _, level)) =
+ Simple_Types (order, Mangled_Monomorphic, level)
+ | adjust_type_enc (THF _) type_enc = type_enc
+ | adjust_type_enc (TFF (TPTP_Monomorphic, _)) (Simple_Types (_, _, level)) =
+ Simple_Types (First_Order, Mangled_Monomorphic, level)
+ | adjust_type_enc (DFG DFG_Sorted) (Simple_Types (_, _, level)) =
+ Simple_Types (First_Order, Mangled_Monomorphic, level)
+ | adjust_type_enc (TFF _) (Simple_Types (_, poly, level)) =
+ Simple_Types (First_Order, poly, level)
+ | adjust_type_enc format (Simple_Types (_, poly, level)) =
+ adjust_type_enc format (Guards (poly, level))
+ | adjust_type_enc CNF_UEQ (type_enc as Guards stuff) =
+ (if is_type_enc_fairly_sound type_enc then Tags else Guards) stuff
+ | adjust_type_enc _ type_enc = type_enc
+
+fun constify_lifted (t $ u) = constify_lifted t $ constify_lifted u
+ | constify_lifted (Abs (s, T, t)) = Abs (s, T, constify_lifted t)
+ | constify_lifted (Free (x as (s, _))) =
+ (if String.isPrefix lam_lifted_prefix s then Const else Free) x
+ | constify_lifted t = t
+
+(* Requires bound variables not to clash with any schematic variables (as should
+ be the case right after lambda-lifting). *)
+fun open_form unprefix (t as Const (@{const_name All}, _) $ Abs (s, T, t')) =
+ (case try unprefix s of
+ SOME s =>
+ let
+ val names = Name.make_context (map fst (Term.add_var_names t' []))
+ val (s, _) = Name.variant s names
+ in open_form unprefix (subst_bound (Var ((s, 0), T), t')) end
+ | NONE => t)
+ | open_form _ t = t
+
+fun lift_lams_part_1 ctxt type_enc =
+ map close_form #> rpair ctxt
+ #-> Lambda_Lifting.lift_lambdas
+ (SOME ((if polymorphism_of_type_enc type_enc = Polymorphic then
+ lam_lifted_poly_prefix
+ else
+ lam_lifted_mono_prefix) ^ "_a"))
+ Lambda_Lifting.is_quantifier
+ #> fst
+fun lift_lams_part_2 (facts, lifted) =
+ (map (open_form (unprefix close_form_prefix) o constify_lifted) facts,
+ map (open_form I o constify_lifted) lifted)
+val lift_lams = lift_lams_part_2 ooo lift_lams_part_1
+
+fun intentionalize_def (Const (@{const_name All}, _) $ Abs (_, _, t)) =
+ intentionalize_def t
+ | intentionalize_def (Const (@{const_name HOL.eq}, _) $ t $ u) =
+ let
+ fun lam T t = Abs (Name.uu, T, t)
+ val (head, args) = strip_comb t ||> rev
+ val head_T = fastype_of head
+ val n = length args
+ val arg_Ts = head_T |> binder_types |> take n |> rev
+ val u = u |> subst_atomic (args ~~ map Bound (0 upto n - 1))
+ in HOLogic.eq_const head_T $ head $ fold lam arg_Ts u end
+ | intentionalize_def t = t
+
+type translated_formula =
+ {name : string,
+ stature : stature,
+ kind : formula_kind,
+ iformula : (name, typ, iterm) formula,
+ atomic_types : typ list}
+
+fun update_iformula f ({name, stature, kind, iformula, atomic_types}
+ : translated_formula) =
+ {name = name, stature = stature, kind = kind, iformula = f iformula,
+ atomic_types = atomic_types} : translated_formula
+
+fun fact_lift f ({iformula, ...} : translated_formula) = f iformula
+
+fun insert_type ctxt get_T x xs =
+ let val T = get_T x in
+ if exists (type_instance ctxt T o get_T) xs then xs
+ else x :: filter_out (type_generalization ctxt T o get_T) xs
+ end
+
+(* The Booleans indicate whether all type arguments should be kept. *)
+datatype type_arg_policy =
+ Explicit_Type_Args of bool (* infer_from_term_args *) |
+ Mangled_Type_Args |
+ No_Type_Args
+
+fun type_arg_policy monom_constrs type_enc s =
+ let val poly = polymorphism_of_type_enc type_enc in
+ if s = type_tag_name then
+ if poly = Mangled_Monomorphic then Mangled_Type_Args
+ else Explicit_Type_Args false
+ else case type_enc of
+ Simple_Types (_, Polymorphic, _) => Explicit_Type_Args false
+ | Tags (_, All_Types) => No_Type_Args
+ | _ =>
+ let val level = level_of_type_enc type_enc in
+ if level = No_Types orelse s = @{const_name HOL.eq} orelse
+ (s = app_op_name andalso level = Const_Arg_Types) then
+ No_Type_Args
+ else if poly = Mangled_Monomorphic then
+ Mangled_Type_Args
+ else if member (op =) monom_constrs s andalso
+ granularity_of_type_level level = Positively_Naked_Vars then
+ No_Type_Args
+ else
+ Explicit_Type_Args
+ (level = All_Types orelse
+ granularity_of_type_level level = Ghost_Type_Arg_Vars)
+ end
+ end
+
+(* Make atoms for sorted type variables. *)
+fun generic_add_sorts_on_type (_, []) = I
+ | generic_add_sorts_on_type ((x, i), s :: ss) =
+ generic_add_sorts_on_type ((x, i), ss)
+ #> (if s = the_single @{sort HOL.type} then
+ I
+ else if i = ~1 then
+ insert (op =) (`make_type_class s, `make_fixed_type_var x)
+ else
+ insert (op =) (`make_type_class s,
+ (make_schematic_type_var (x, i), x)))
+fun add_sorts_on_tfree (TFree (s, S)) = generic_add_sorts_on_type ((s, ~1), S)
+ | add_sorts_on_tfree _ = I
+fun add_sorts_on_tvar (TVar z) = generic_add_sorts_on_type z
+ | add_sorts_on_tvar _ = I
+
+fun type_class_formula type_enc class arg =
+ AAtom (ATerm (class, arg ::
+ (case type_enc of
+ Simple_Types (First_Order, Polymorphic, _) =>
+ if avoid_first_order_ghost_type_vars then [ATerm (TYPE_name, [arg])]
+ else []
+ | _ => [])))
+fun formulas_for_types type_enc add_sorts_on_typ Ts =
+ [] |> level_of_type_enc type_enc <> No_Types ? fold add_sorts_on_typ Ts
+ |> map (fn (class, name) =>
+ type_class_formula type_enc class (ATerm (name, [])))
+
+fun mk_aconns c phis =
+ let val (phis', phi') = split_last phis in
+ fold_rev (mk_aconn c) phis' phi'
+ end
+fun mk_ahorn [] phi = phi
+ | mk_ahorn phis psi = AConn (AImplies, [mk_aconns AAnd phis, psi])
+fun mk_aquant _ [] phi = phi
+ | mk_aquant q xs (phi as AQuant (q', xs', phi')) =
+ if q = q' then AQuant (q, xs @ xs', phi') else AQuant (q, xs, phi)
+ | mk_aquant q xs phi = AQuant (q, xs, phi)
+
+fun close_universally add_term_vars phi =
+ let
+ fun add_formula_vars bounds (AQuant (_, xs, phi)) =
+ add_formula_vars (map fst xs @ bounds) phi
+ | add_formula_vars bounds (AConn (_, phis)) =
+ fold (add_formula_vars bounds) phis
+ | add_formula_vars bounds (AAtom tm) = add_term_vars bounds tm
+ in mk_aquant AForall (add_formula_vars [] phi []) phi end
+
+fun add_term_vars bounds (ATerm (name as (s, _), tms)) =
+ (if is_tptp_variable s andalso
+ not (String.isPrefix tvar_prefix s) andalso
+ not (member (op =) bounds name) then
+ insert (op =) (name, NONE)
+ else
+ I)
+ #> fold (add_term_vars bounds) tms
+ | add_term_vars bounds (AAbs ((name, _), tm)) =
+ add_term_vars (name :: bounds) tm
+fun close_formula_universally phi = close_universally add_term_vars phi
+
+fun add_iterm_vars bounds (IApp (tm1, tm2)) =
+ fold (add_iterm_vars bounds) [tm1, tm2]
+ | add_iterm_vars _ (IConst _) = I
+ | add_iterm_vars bounds (IVar (name, T)) =
+ not (member (op =) bounds name) ? insert (op =) (name, SOME T)
+ | add_iterm_vars bounds (IAbs (_, tm)) = add_iterm_vars bounds tm
+fun close_iformula_universally phi = close_universally add_iterm_vars phi
+
+val fused_infinite_type_name = "ATP.fused_inf" (* shouldn't clash *)
+val fused_infinite_type = Type (fused_infinite_type_name, [])
+
+fun tvar_name (x as (s, _)) = (make_schematic_type_var x, s)
+
+fun ho_term_from_typ format type_enc =
+ let
+ fun term (Type (s, Ts)) =
+ ATerm (case (is_type_enc_higher_order type_enc, s) of
+ (true, @{type_name bool}) => `I tptp_bool_type
+ | (true, @{type_name fun}) => `I tptp_fun_type
+ | _ => if s = fused_infinite_type_name andalso
+ is_format_typed format then
+ `I tptp_individual_type
+ else
+ `make_fixed_type_const s,
+ map term Ts)
+ | term (TFree (s, _)) = ATerm (`make_fixed_type_var s, [])
+ | term (TVar (x, _)) = ATerm (tvar_name x, [])
+ in term end
+
+fun ho_term_for_type_arg format type_enc T =
+ if T = dummyT then NONE else SOME (ho_term_from_typ format type_enc T)
+
+(* This shouldn't clash with anything else. *)
+val uncurried_alias_sep = "\000"
+val mangled_type_sep = "\001"
+
+val ascii_of_uncurried_alias_sep = ascii_of uncurried_alias_sep
+
+fun generic_mangled_type_name f (ATerm (name, [])) = f name
+ | generic_mangled_type_name f (ATerm (name, tys)) =
+ f name ^ "(" ^ space_implode "," (map (generic_mangled_type_name f) tys)
+ ^ ")"
+ | generic_mangled_type_name _ _ = raise Fail "unexpected type abstraction"
+
+fun mangled_type format type_enc =
+ generic_mangled_type_name fst o ho_term_from_typ format type_enc
+
+fun make_native_type s =
+ if s = tptp_bool_type orelse s = tptp_fun_type orelse
+ s = tptp_individual_type then
+ s
+ else
+ native_type_prefix ^ ascii_of s
+
+fun ho_type_from_ho_term type_enc pred_sym ary =
+ let
+ fun to_mangled_atype ty =
+ AType ((make_native_type (generic_mangled_type_name fst ty),
+ generic_mangled_type_name snd ty), [])
+ fun to_poly_atype (ATerm (name, tys)) = AType (name, map to_poly_atype tys)
+ | to_poly_atype _ = raise Fail "unexpected type abstraction"
+ val to_atype =
+ if polymorphism_of_type_enc type_enc = Polymorphic then to_poly_atype
+ else to_mangled_atype
+ fun to_afun f1 f2 tys = AFun (f1 (hd tys), f2 (nth tys 1))
+ fun to_fo 0 ty = if pred_sym then bool_atype else to_atype ty
+ | to_fo ary (ATerm (_, tys)) = to_afun to_atype (to_fo (ary - 1)) tys
+ | to_fo _ _ = raise Fail "unexpected type abstraction"
+ fun to_ho (ty as ATerm ((s, _), tys)) =
+ if s = tptp_fun_type then to_afun to_ho to_ho tys else to_atype ty
+ | to_ho _ = raise Fail "unexpected type abstraction"
+ in if is_type_enc_higher_order type_enc then to_ho else to_fo ary end
+
+fun ho_type_from_typ format type_enc pred_sym ary =
+ ho_type_from_ho_term type_enc pred_sym ary
+ o ho_term_from_typ format type_enc
+
+fun aliased_uncurried ary (s, s') =
+ (s ^ ascii_of_uncurried_alias_sep ^ string_of_int ary, s' ^ string_of_int ary)
+fun unaliased_uncurried (s, s') =
+ case space_explode uncurried_alias_sep s of
+ [_] => (s, s')
+ | [s1, s2] => (s1, unsuffix s2 s')
+ | _ => raise Fail "ill-formed explicit application alias"
+
+fun raw_mangled_const_name type_name ty_args (s, s') =
+ let
+ fun type_suffix f g =
+ fold_rev (curry (op ^) o g o prefix mangled_type_sep o type_name f)
+ ty_args ""
+ in (s ^ type_suffix fst ascii_of, s' ^ type_suffix snd I) end
+fun mangled_const_name format type_enc =
+ map_filter (ho_term_for_type_arg format type_enc)
+ #> raw_mangled_const_name generic_mangled_type_name
+
+val parse_mangled_ident =
+ Scan.many1 (not o member (op =) ["(", ")", ","]) >> implode
+
+fun parse_mangled_type x =
+ (parse_mangled_ident
+ -- Scan.optional ($$ "(" |-- Scan.optional parse_mangled_types [] --| $$ ")")
+ [] >> ATerm) x
+and parse_mangled_types x =
+ (parse_mangled_type ::: Scan.repeat ($$ "," |-- parse_mangled_type)) x
+
+fun unmangled_type s =
+ s |> suffix ")" |> raw_explode
+ |> Scan.finite Symbol.stopper
+ (Scan.error (!! (fn _ => raise Fail ("unrecognized mangled type " ^
+ quote s)) parse_mangled_type))
+ |> fst
+
+fun unmangled_const_name s =
+ (s, s) |> unaliased_uncurried |> fst |> space_explode mangled_type_sep
+fun unmangled_const s =
+ let val ss = unmangled_const_name s in
+ (hd ss, map unmangled_type (tl ss))
+ end
+
+fun introduce_proxies_in_iterm type_enc =
+ let
+ fun tweak_ho_quant ho_quant T [IAbs _] = IConst (`I ho_quant, T, [])
+ | tweak_ho_quant ho_quant (T as Type (_, [p_T as Type (_, [x_T, _]), _]))
+ _ =
+ (* Eta-expand "!!" and "??", to work around LEO-II 1.2.8 parser
+ limitation. This works in conjuction with special code in
+ "ATP_Problem" that uses the syntactic sugar "!" and "?" whenever
+ possible. *)
+ IAbs ((`I "P", p_T),
+ IApp (IConst (`I ho_quant, T, []),
+ IAbs ((`I "X", x_T),
+ IApp (IConst (`I "P", p_T, []),
+ IConst (`I "X", x_T, [])))))
+ | tweak_ho_quant _ _ _ = raise Fail "unexpected type for quantifier"
+ fun intro top_level args (IApp (tm1, tm2)) =
+ IApp (intro top_level (tm2 :: args) tm1, intro false [] tm2)
+ | intro top_level args (IConst (name as (s, _), T, T_args)) =
+ (case proxify_const s of
+ SOME proxy_base =>
+ if top_level orelse is_type_enc_higher_order type_enc then
+ case (top_level, s) of
+ (_, "c_False") => IConst (`I tptp_false, T, [])
+ | (_, "c_True") => IConst (`I tptp_true, T, [])
+ | (false, "c_Not") => IConst (`I tptp_not, T, [])
+ | (false, "c_conj") => IConst (`I tptp_and, T, [])
+ | (false, "c_disj") => IConst (`I tptp_or, T, [])
+ | (false, "c_implies") => IConst (`I tptp_implies, T, [])
+ | (false, "c_All") => tweak_ho_quant tptp_ho_forall T args
+ | (false, "c_Ex") => tweak_ho_quant tptp_ho_exists T args
+ | (false, s) =>
+ if is_tptp_equal s andalso length args = 2 then
+ IConst (`I tptp_equal, T, [])
+ else
+ (* Use a proxy even for partially applied THF0 equality,
+ because the LEO-II and Satallax parsers complain about not
+ being able to infer the type of "=". *)
+ IConst (proxy_base |>> prefix const_prefix, T, T_args)
+ | _ => IConst (name, T, [])
+ else
+ IConst (proxy_base |>> prefix const_prefix, T, T_args)
+ | NONE => if s = tptp_choice then tweak_ho_quant tptp_choice T args
+ else IConst (name, T, T_args))
+ | intro _ _ (IAbs (bound, tm)) = IAbs (bound, intro false [] tm)
+ | intro _ _ tm = tm
+ in intro true [] end
+
+fun mangle_type_args_in_const format type_enc (name as (s, _)) T_args =
+ case unprefix_and_unascii const_prefix s of
+ NONE => (name, T_args)
+ | SOME s'' =>
+ case type_arg_policy [] type_enc (invert_const s'') of
+ Mangled_Type_Args => (mangled_const_name format type_enc T_args name, [])
+ | _ => (name, T_args)
+fun mangle_type_args_in_iterm format type_enc =
+ if polymorphism_of_type_enc type_enc = Mangled_Monomorphic then
+ let
+ fun mangle (IApp (tm1, tm2)) = IApp (mangle tm1, mangle tm2)
+ | mangle (tm as IConst (_, _, [])) = tm
+ | mangle (IConst (name, T, T_args)) =
+ mangle_type_args_in_const format type_enc name T_args
+ |> (fn (name, T_args) => IConst (name, T, T_args))
+ | mangle (IAbs (bound, tm)) = IAbs (bound, mangle tm)
+ | mangle tm = tm
+ in mangle end
+ else
+ I
+
+fun chop_fun 0 T = ([], T)
+ | chop_fun n (Type (@{type_name fun}, [dom_T, ran_T])) =
+ chop_fun (n - 1) ran_T |>> cons dom_T
+ | chop_fun _ T = ([], T)
+
+fun filter_const_type_args _ _ _ [] = []
+ | filter_const_type_args thy s ary T_args =
+ let
+ val U = robust_const_type thy s
+ val arg_U_vars = fold Term.add_tvarsT (U |> chop_fun ary |> fst) []
+ val U_args = (s, U) |> robust_const_typargs thy
+ in
+ U_args ~~ T_args
+ |> map (fn (U, T) =>
+ if member (op =) arg_U_vars (dest_TVar U) then dummyT else T)
+ end
+ handle TYPE _ => T_args
+
+fun filter_type_args_in_const _ _ _ _ _ [] = []
+ | filter_type_args_in_const thy monom_constrs type_enc ary s T_args =
+ case unprefix_and_unascii const_prefix s of
+ NONE =>
+ if level_of_type_enc type_enc = No_Types orelse s = tptp_choice then []
+ else T_args
+ | SOME s'' =>
+ let
+ val s'' = invert_const s''
+ fun filter_T_args false = T_args
+ | filter_T_args true = filter_const_type_args thy s'' ary T_args
+ in
+ case type_arg_policy monom_constrs type_enc s'' of
+ Explicit_Type_Args infer_from_term_args =>
+ filter_T_args infer_from_term_args
+ | No_Type_Args => []
+ | Mangled_Type_Args => raise Fail "unexpected (un)mangled symbol"
+ end
+fun filter_type_args_in_iterm thy monom_constrs type_enc =
+ let
+ fun filt ary (IApp (tm1, tm2)) = IApp (filt (ary + 1) tm1, filt 0 tm2)
+ | filt ary (IConst (name as (s, _), T, T_args)) =
+ filter_type_args_in_const thy monom_constrs type_enc ary s T_args
+ |> (fn T_args => IConst (name, T, T_args))
+ | filt _ (IAbs (bound, tm)) = IAbs (bound, filt 0 tm)
+ | filt _ tm = tm
+ in filt 0 end
+
+fun iformula_from_prop ctxt format type_enc eq_as_iff =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ fun do_term bs t atomic_Ts =
+ iterm_from_term thy format bs (Envir.eta_contract t)
+ |>> (introduce_proxies_in_iterm type_enc
+ #> mangle_type_args_in_iterm format type_enc #> AAtom)
+ ||> union (op =) atomic_Ts
+ fun do_quant bs q pos s T t' =
+ let
+ val s = singleton (Name.variant_list (map fst bs)) s
+ val universal = Option.map (q = AExists ? not) pos
+ val name =
+ s |> `(case universal of
+ SOME true => make_all_bound_var
+ | SOME false => make_exist_bound_var
+ | NONE => make_bound_var)
+ in
+ do_formula ((s, (name, T)) :: bs) pos t'
+ #>> mk_aquant q [(name, SOME T)]
+ ##> union (op =) (atomic_types_of T)
+ end
+ and do_conn bs c pos1 t1 pos2 t2 =
+ do_formula bs pos1 t1 ##>> do_formula bs pos2 t2 #>> uncurry (mk_aconn c)
+ and do_formula bs pos t =
+ case t of
+ @{const Trueprop} $ t1 => do_formula bs pos t1
+ | @{const Not} $ t1 => do_formula bs (Option.map not pos) t1 #>> mk_anot
+ | Const (@{const_name All}, _) $ Abs (s, T, t') =>
+ do_quant bs AForall pos s T t'
+ | (t0 as Const (@{const_name All}, _)) $ t1 =>
+ do_formula bs pos (t0 $ eta_expand (map (snd o snd) bs) t1 1)
+ | Const (@{const_name Ex}, _) $ Abs (s, T, t') =>
+ do_quant bs AExists pos s T t'
+ | (t0 as Const (@{const_name Ex}, _)) $ t1 =>
+ do_formula bs pos (t0 $ eta_expand (map (snd o snd) bs) t1 1)
+ | @{const HOL.conj} $ t1 $ t2 => do_conn bs AAnd pos t1 pos t2
+ | @{const HOL.disj} $ t1 $ t2 => do_conn bs AOr pos t1 pos t2
+ | @{const HOL.implies} $ t1 $ t2 =>
+ do_conn bs AImplies (Option.map not pos) t1 pos t2
+ | Const (@{const_name HOL.eq}, Type (_, [@{typ bool}, _])) $ t1 $ t2 =>
+ if eq_as_iff then do_conn bs AIff NONE t1 NONE t2 else do_term bs t
+ | _ => do_term bs t
+ in do_formula [] end
+
+fun presimplify_term ctxt t =
+ t |> exists_Const (member (op =) Meson.presimplified_consts o fst) t
+ ? (Skip_Proof.make_thm (Proof_Context.theory_of ctxt)
+ #> Meson.presimplify
+ #> prop_of)
+
+fun concealed_bound_name j = atp_weak_prefix ^ string_of_int j
+fun conceal_bounds Ts t =
+ subst_bounds (map (Free o apfst concealed_bound_name)
+ (0 upto length Ts - 1 ~~ Ts), t)
+fun reveal_bounds Ts =
+ subst_atomic (map (fn (j, T) => (Free (concealed_bound_name j, T), Bound j))
+ (0 upto length Ts - 1 ~~ Ts))
+
+fun is_fun_equality (@{const_name HOL.eq},
+ Type (_, [Type (@{type_name fun}, _), _])) = true
+ | is_fun_equality _ = false
+
+fun extensionalize_term ctxt t =
+ if exists_Const is_fun_equality t then
+ let val thy = Proof_Context.theory_of ctxt in
+ t |> cterm_of thy |> Meson.extensionalize_conv ctxt
+ |> prop_of |> Logic.dest_equals |> snd
+ end
+ else
+ t
+
+fun simple_translate_lambdas do_lambdas ctxt t =
+ let val thy = Proof_Context.theory_of ctxt in
+ if Meson.is_fol_term thy t then
+ t
+ else
+ let
+ fun trans Ts t =
+ case t of
+ @{const Not} $ t1 => @{const Not} $ trans Ts t1
+ | (t0 as Const (@{const_name All}, _)) $ Abs (s, T, t') =>
+ t0 $ Abs (s, T, trans (T :: Ts) t')
+ | (t0 as Const (@{const_name All}, _)) $ t1 =>
+ trans Ts (t0 $ eta_expand Ts t1 1)
+ | (t0 as Const (@{const_name Ex}, _)) $ Abs (s, T, t') =>
+ t0 $ Abs (s, T, trans (T :: Ts) t')
+ | (t0 as Const (@{const_name Ex}, _)) $ t1 =>
+ trans Ts (t0 $ eta_expand Ts t1 1)
+ | (t0 as @{const HOL.conj}) $ t1 $ t2 =>
+ t0 $ trans Ts t1 $ trans Ts t2
+ | (t0 as @{const HOL.disj}) $ t1 $ t2 =>
+ t0 $ trans Ts t1 $ trans Ts t2
+ | (t0 as @{const HOL.implies}) $ t1 $ t2 =>
+ t0 $ trans Ts t1 $ trans Ts t2
+ | (t0 as Const (@{const_name HOL.eq}, Type (_, [@{typ bool}, _])))
+ $ t1 $ t2 =>
+ t0 $ trans Ts t1 $ trans Ts t2
+ | _ =>
+ if not (exists_subterm (fn Abs _ => true | _ => false) t) then t
+ else t |> Envir.eta_contract |> do_lambdas ctxt Ts
+ val (t, ctxt') = Variable.import_terms true [t] ctxt |>> the_single
+ in t |> trans [] |> singleton (Variable.export_terms ctxt' ctxt) end
+ end
+
+fun do_cheaply_conceal_lambdas Ts (t1 $ t2) =
+ do_cheaply_conceal_lambdas Ts t1
+ $ do_cheaply_conceal_lambdas Ts t2
+ | do_cheaply_conceal_lambdas Ts (Abs (_, T, t)) =
+ Const (lam_lifted_poly_prefix ^ serial_string (),
+ T --> fastype_of1 (T :: Ts, t))
+ | do_cheaply_conceal_lambdas _ t = t
+
+fun do_introduce_combinators ctxt Ts t =
+ let val thy = Proof_Context.theory_of ctxt in
+ t |> conceal_bounds Ts
+ |> cterm_of thy
+ |> Meson_Clausify.introduce_combinators_in_cterm
+ |> prop_of |> Logic.dest_equals |> snd
+ |> reveal_bounds Ts
+ end
+ (* A type variable of sort "{}" will make abstraction fail. *)
+ handle THM _ => t |> do_cheaply_conceal_lambdas Ts
+val introduce_combinators = simple_translate_lambdas do_introduce_combinators
+
+fun preprocess_abstractions_in_terms trans_lams facts =
+ let
+ val (facts, lambda_ts) =
+ facts |> map (snd o snd) |> trans_lams
+ |>> map2 (fn (name, (kind, _)) => fn t => (name, (kind, t))) facts
+ val lam_facts =
+ map2 (fn t => fn j =>
+ ((lam_fact_prefix ^ Int.toString j, (Global, Spec_Eq)),
+ (Axiom, t)))
+ lambda_ts (1 upto length lambda_ts)
+ in (facts, lam_facts) end
+
+(* Metis's use of "resolve_tac" freezes the schematic variables. We simulate the
+ same in Sledgehammer to prevent the discovery of unreplayable proofs. *)
+fun freeze_term t =
+ let
+ fun freeze (t $ u) = freeze t $ freeze u
+ | freeze (Abs (s, T, t)) = Abs (s, T, freeze t)
+ | freeze (Var ((s, i), T)) =
+ Free (atp_weak_prefix ^ s ^ "_" ^ string_of_int i, T)
+ | freeze t = t
+ in t |> exists_subterm is_Var t ? freeze end
+
+fun presimp_prop ctxt role t =
+ (let
+ val thy = Proof_Context.theory_of ctxt
+ val t = t |> Envir.beta_eta_contract
+ |> transform_elim_prop
+ |> Object_Logic.atomize_term thy
+ val need_trueprop = (fastype_of t = @{typ bool})
+ in
+ t |> need_trueprop ? HOLogic.mk_Trueprop
+ |> extensionalize_term ctxt
+ |> presimplify_term ctxt
+ |> HOLogic.dest_Trueprop
+ end
+ handle TERM _ => if role = Conjecture then @{term False} else @{term True})
+ |> pair role
+
+fun make_formula ctxt format type_enc eq_as_iff name stature kind t =
+ let
+ val (iformula, atomic_Ts) =
+ iformula_from_prop ctxt format type_enc eq_as_iff
+ (SOME (kind <> Conjecture)) t []
+ |>> close_iformula_universally
+ in
+ {name = name, stature = stature, kind = kind, iformula = iformula,
+ atomic_types = atomic_Ts}
+ end
+
+fun make_fact ctxt format type_enc eq_as_iff ((name, stature), t) =
+ case t |> make_formula ctxt format type_enc (eq_as_iff andalso format <> CNF)
+ name stature Axiom of
+ formula as {iformula = AAtom (IConst ((s, _), _, _)), ...} =>
+ if s = tptp_true then NONE else SOME formula
+ | formula => SOME formula
+
+fun s_not_trueprop (@{const Trueprop} $ t) = @{const Trueprop} $ s_not t
+ | s_not_trueprop t =
+ if fastype_of t = @{typ bool} then s_not t
+ else @{prop False} (* "t" is too meta *)
+
+fun make_conjecture ctxt format type_enc =
+ map (fn ((name, stature), (kind, t)) =>
+ t |> kind = Conjecture ? s_not_trueprop
+ |> make_formula ctxt format type_enc (format <> CNF) name stature
+ kind)
+
+(** Finite and infinite type inference **)
+
+fun tvar_footprint thy s ary =
+ (case unprefix_and_unascii const_prefix s of
+ SOME s =>
+ s |> invert_const |> robust_const_type thy |> chop_fun ary |> fst
+ |> map (fn T => Term.add_tvarsT T [] |> map fst)
+ | NONE => [])
+ handle TYPE _ => []
+
+fun ghost_type_args thy s ary =
+ if is_tptp_equal s then
+ 0 upto ary - 1
+ else
+ let
+ val footprint = tvar_footprint thy s ary
+ val eq = (s = @{const_name HOL.eq})
+ fun ghosts _ [] = []
+ | ghosts seen ((i, tvars) :: args) =
+ ghosts (union (op =) seen tvars) args
+ |> (eq orelse exists (fn tvar => not (member (op =) seen tvar)) tvars)
+ ? cons i
+ in
+ if forall null footprint then
+ []
+ else
+ 0 upto length footprint - 1 ~~ footprint
+ |> sort (rev_order o list_ord Term_Ord.indexname_ord o pairself snd)
+ |> ghosts []
+ end
+
+type monotonicity_info =
+ {maybe_finite_Ts : typ list,
+ surely_finite_Ts : typ list,
+ maybe_infinite_Ts : typ list,
+ surely_infinite_Ts : typ list,
+ maybe_nonmono_Ts : typ list}
+
+(* These types witness that the type classes they belong to allow infinite
+ models and hence that any types with these type classes is monotonic. *)
+val known_infinite_types =
+ [@{typ nat}, HOLogic.intT, HOLogic.realT, @{typ "nat => bool"}]
+
+fun is_type_kind_of_surely_infinite ctxt strictness cached_Ts T =
+ strictness <> Strict andalso is_type_surely_infinite ctxt true cached_Ts T
+
+(* Finite types such as "unit", "bool", "bool * bool", and "bool => bool" are
+ dangerous because their "exhaust" properties can easily lead to unsound ATP
+ proofs. On the other hand, all HOL infinite types can be given the same
+ models in first-order logic (via Löwenheim-Skolem). *)
+
+fun should_encode_type _ (_ : monotonicity_info) All_Types _ = true
+ | should_encode_type ctxt {maybe_finite_Ts, surely_infinite_Ts,
+ maybe_nonmono_Ts, ...}
+ (Noninf_Nonmono_Types (strictness, grain)) T =
+ grain = Ghost_Type_Arg_Vars orelse
+ (exists (type_intersect ctxt T) maybe_nonmono_Ts andalso
+ not (exists (type_instance ctxt T) surely_infinite_Ts orelse
+ (not (member (type_equiv ctxt) maybe_finite_Ts T) andalso
+ is_type_kind_of_surely_infinite ctxt strictness surely_infinite_Ts
+ T)))
+ | should_encode_type ctxt {surely_finite_Ts, maybe_infinite_Ts,
+ maybe_nonmono_Ts, ...}
+ (Fin_Nonmono_Types grain) T =
+ grain = Ghost_Type_Arg_Vars orelse
+ (exists (type_intersect ctxt T) maybe_nonmono_Ts andalso
+ (exists (type_generalization ctxt T) surely_finite_Ts orelse
+ (not (member (type_equiv ctxt) maybe_infinite_Ts T) andalso
+ is_type_surely_finite ctxt T)))
+ | should_encode_type _ _ _ _ = false
+
+fun should_guard_type ctxt mono (Guards (_, level)) should_guard_var T =
+ should_guard_var () andalso should_encode_type ctxt mono level T
+ | should_guard_type _ _ _ _ _ = false
+
+fun is_maybe_universal_var (IConst ((s, _), _, _)) =
+ String.isPrefix bound_var_prefix s orelse
+ String.isPrefix all_bound_var_prefix s
+ | is_maybe_universal_var (IVar _) = true
+ | is_maybe_universal_var _ = false
+
+datatype site =
+ Top_Level of bool option |
+ Eq_Arg of bool option |
+ Elsewhere
+
+fun should_tag_with_type _ _ _ (Top_Level _) _ _ = false
+ | should_tag_with_type ctxt mono (Tags (_, level)) site u T =
+ if granularity_of_type_level level = All_Vars then
+ should_encode_type ctxt mono level T
+ else
+ (case (site, is_maybe_universal_var u) of
+ (Eq_Arg _, true) => should_encode_type ctxt mono level T
+ | _ => false)
+ | should_tag_with_type _ _ _ _ _ _ = false
+
+fun fused_type ctxt mono level =
+ let
+ val should_encode = should_encode_type ctxt mono level
+ fun fuse 0 T = if should_encode T then T else fused_infinite_type
+ | fuse ary (Type (@{type_name fun}, [T1, T2])) =
+ fuse 0 T1 --> fuse (ary - 1) T2
+ | fuse _ _ = raise Fail "expected function type"
+ in fuse end
+
+(** predicators and application operators **)
+
+type sym_info =
+ {pred_sym : bool, min_ary : int, max_ary : int, types : typ list,
+ in_conj : bool}
+
+fun default_sym_tab_entries type_enc =
+ (make_fixed_const NONE @{const_name undefined},
+ {pred_sym = false, min_ary = 0, max_ary = 0, types = [],
+ in_conj = false}) ::
+ ([tptp_false, tptp_true]
+ |> map (rpair {pred_sym = true, min_ary = 0, max_ary = 0, types = [],
+ in_conj = false})) @
+ ([tptp_equal, tptp_old_equal]
+ |> map (rpair {pred_sym = true, min_ary = 2, max_ary = 2, types = [],
+ in_conj = false}))
+ |> not (is_type_enc_higher_order type_enc)
+ ? cons (prefixed_predicator_name,
+ {pred_sym = true, min_ary = 1, max_ary = 1, types = [],
+ in_conj = false})
+
+datatype app_op_level = Incomplete_Apply | Sufficient_Apply | Full_Apply
+
+fun sym_table_for_facts ctxt type_enc app_op_level conjs facts =
+ let
+ fun consider_var_ary const_T var_T max_ary =
+ let
+ fun iter ary T =
+ if ary = max_ary orelse type_instance ctxt var_T T orelse
+ type_instance ctxt T var_T then
+ ary
+ else
+ iter (ary + 1) (range_type T)
+ in iter 0 const_T end
+ fun add_universal_var T (accum as ((bool_vars, fun_var_Ts), sym_tab)) =
+ if app_op_level = Sufficient_Apply andalso
+ (can dest_funT T orelse T = @{typ bool}) then
+ let
+ val bool_vars' = bool_vars orelse body_type T = @{typ bool}
+ fun repair_min_ary {pred_sym, min_ary, max_ary, types, in_conj} =
+ {pred_sym = pred_sym andalso not bool_vars',
+ min_ary = fold (fn T' => consider_var_ary T' T) types min_ary,
+ max_ary = max_ary, types = types, in_conj = in_conj}
+ val fun_var_Ts' =
+ fun_var_Ts |> can dest_funT T ? insert_type ctxt I T
+ in
+ if bool_vars' = bool_vars andalso
+ pointer_eq (fun_var_Ts', fun_var_Ts) then
+ accum
+ else
+ ((bool_vars', fun_var_Ts'), Symtab.map (K repair_min_ary) sym_tab)
+ end
+ else
+ accum
+ fun add_iterm_syms conj_fact top_level tm
+ (accum as ((bool_vars, fun_var_Ts), sym_tab)) =
+ let val (head, args) = strip_iterm_comb tm in
+ (case head of
+ IConst ((s, _), T, _) =>
+ if String.isPrefix bound_var_prefix s orelse
+ String.isPrefix all_bound_var_prefix s then
+ add_universal_var T accum
+ else if String.isPrefix exist_bound_var_prefix s then
+ accum
+ else
+ let val ary = length args in
+ ((bool_vars, fun_var_Ts),
+ case Symtab.lookup sym_tab s of
+ SOME {pred_sym, min_ary, max_ary, types, in_conj} =>
+ let
+ val pred_sym =
+ pred_sym andalso top_level andalso not bool_vars
+ val types' = types |> insert_type ctxt I T
+ val in_conj = in_conj orelse conj_fact
+ val min_ary =
+ if app_op_level = Sufficient_Apply andalso
+ not (pointer_eq (types', types)) then
+ fold (consider_var_ary T) fun_var_Ts min_ary
+ else
+ min_ary
+ in
+ Symtab.update (s, {pred_sym = pred_sym,
+ min_ary = Int.min (ary, min_ary),
+ max_ary = Int.max (ary, max_ary),
+ types = types', in_conj = in_conj})
+ sym_tab
+ end
+ | NONE =>
+ let
+ val pred_sym = top_level andalso not bool_vars
+ val min_ary =
+ case app_op_level of
+ Incomplete_Apply => ary
+ | Sufficient_Apply =>
+ fold (consider_var_ary T) fun_var_Ts ary
+ | Full_Apply => 0
+ in
+ Symtab.update_new (s,
+ {pred_sym = pred_sym, min_ary = min_ary,
+ max_ary = ary, types = [T], in_conj = conj_fact})
+ sym_tab
+ end)
+ end
+ | IVar (_, T) => add_universal_var T accum
+ | IAbs ((_, T), tm) =>
+ accum |> add_universal_var T |> add_iterm_syms conj_fact false tm
+ | _ => accum)
+ |> fold (add_iterm_syms conj_fact false) args
+ end
+ fun add_fact_syms conj_fact =
+ K (add_iterm_syms conj_fact true) |> formula_fold NONE |> fact_lift
+ in
+ ((false, []), Symtab.empty)
+ |> fold (add_fact_syms true) conjs
+ |> fold (add_fact_syms false) facts
+ |> snd
+ |> fold Symtab.update (default_sym_tab_entries type_enc)
+ end
+
+fun min_ary_of sym_tab s =
+ case Symtab.lookup sym_tab s of
+ SOME ({min_ary, ...} : sym_info) => min_ary
+ | NONE =>
+ case unprefix_and_unascii const_prefix s of
+ SOME s =>
+ let val s = s |> unmangled_const_name |> hd |> invert_const in
+ if s = predicator_name then 1
+ else if s = app_op_name then 2
+ else if s = type_guard_name then 1
+ else 0
+ end
+ | NONE => 0
+
+(* True if the constant ever appears outside of the top-level position in
+ literals, or if it appears with different arities (e.g., because of different
+ type instantiations). If false, the constant always receives all of its
+ arguments and is used as a predicate. *)
+fun is_pred_sym sym_tab s =
+ case Symtab.lookup sym_tab s of
+ SOME ({pred_sym, min_ary, max_ary, ...} : sym_info) =>
+ pred_sym andalso min_ary = max_ary
+ | NONE => false
+
+val app_op = `(make_fixed_const NONE) app_op_name
+val predicator_combconst =
+ IConst (`(make_fixed_const NONE) predicator_name, @{typ "bool => bool"}, [])
+
+fun list_app head args = fold (curry (IApp o swap)) args head
+fun predicator tm = IApp (predicator_combconst, tm)
+
+fun mk_app_op format type_enc head arg =
+ let
+ val head_T = ityp_of head
+ val (arg_T, res_T) = dest_funT head_T
+ val app =
+ IConst (app_op, head_T --> head_T, [arg_T, res_T])
+ |> mangle_type_args_in_iterm format type_enc
+ in list_app app [head, arg] end
+
+fun firstorderize_fact thy monom_constrs format type_enc sym_tab
+ uncurried_aliases =
+ let
+ fun do_app arg head = mk_app_op format type_enc head arg
+ fun list_app_ops head args = fold do_app args head
+ fun introduce_app_ops tm =
+ let val (head, args) = tm |> strip_iterm_comb ||> map introduce_app_ops in
+ case head of
+ IConst (name as (s, _), T, T_args) =>
+ if uncurried_aliases andalso String.isPrefix const_prefix s then
+ let
+ val ary = length args
+ val name = name |> ary > min_ary_of sym_tab s
+ ? aliased_uncurried ary
+ in list_app (IConst (name, T, T_args)) args end
+ else
+ args |> chop (min_ary_of sym_tab s)
+ |>> list_app head |-> list_app_ops
+ | _ => list_app_ops head args
+ end
+ fun introduce_predicators tm =
+ case strip_iterm_comb tm of
+ (IConst ((s, _), _, _), _) =>
+ if is_pred_sym sym_tab s then tm else predicator tm
+ | _ => predicator tm
+ val do_iterm =
+ not (is_type_enc_higher_order type_enc)
+ ? (introduce_app_ops #> introduce_predicators)
+ #> filter_type_args_in_iterm thy monom_constrs type_enc
+ in update_iformula (formula_map do_iterm) end
+
+(** Helper facts **)
+
+val not_ffalse = @{lemma "~ fFalse" by (unfold fFalse_def) fast}
+val ftrue = @{lemma "fTrue" by (unfold fTrue_def) fast}
+
+(* The Boolean indicates that a fairly sound type encoding is needed. *)
+val helper_table =
+ [(("COMBI", false), @{thms Meson.COMBI_def}),
+ (("COMBK", false), @{thms Meson.COMBK_def}),
+ (("COMBB", false), @{thms Meson.COMBB_def}),
+ (("COMBC", false), @{thms Meson.COMBC_def}),
+ (("COMBS", false), @{thms Meson.COMBS_def}),
+ ((predicator_name, false), [not_ffalse, ftrue]),
+ (("fFalse", false), [not_ffalse]),
+ (("fFalse", true), @{thms True_or_False}),
+ (("fTrue", false), [ftrue]),
+ (("fTrue", true), @{thms True_or_False}),
+ (("fNot", false),
+ @{thms fNot_def [THEN Meson.iff_to_disjD, THEN conjunct1]
+ fNot_def [THEN Meson.iff_to_disjD, THEN conjunct2]}),
+ (("fconj", false),
+ @{lemma "~ P | ~ Q | fconj P Q" "~ fconj P Q | P" "~ fconj P Q | Q"
+ by (unfold fconj_def) fast+}),
+ (("fdisj", false),
+ @{lemma "~ P | fdisj P Q" "~ Q | fdisj P Q" "~ fdisj P Q | P | Q"
+ by (unfold fdisj_def) fast+}),
+ (("fimplies", false),
+ @{lemma "P | fimplies P Q" "~ Q | fimplies P Q" "~ fimplies P Q | ~ P | Q"
+ by (unfold fimplies_def) fast+}),
+ (("fequal", true),
+ (* This is a lie: Higher-order equality doesn't need a sound type encoding.
+ However, this is done so for backward compatibility: Including the
+ equality helpers by default in Metis breaks a few existing proofs. *)
+ @{thms fequal_def [THEN Meson.iff_to_disjD, THEN conjunct1]
+ fequal_def [THEN Meson.iff_to_disjD, THEN conjunct2]}),
+ (* Partial characterization of "fAll" and "fEx". A complete characterization
+ would require the axiom of choice for replay with Metis. *)
+ (("fAll", false), [@{lemma "~ fAll P | P x" by (auto simp: fAll_def)}]),
+ (("fEx", false), [@{lemma "~ P x | fEx P" by (auto simp: fEx_def)}]),
+ (("If", true), @{thms if_True if_False True_or_False})]
+ |> map (apsnd (map zero_var_indexes))
+
+fun atype_of_type_vars (Simple_Types (_, Polymorphic, _)) = SOME atype_of_types
+ | atype_of_type_vars _ = NONE
+
+fun bound_tvars type_enc sorts Ts =
+ (sorts ? mk_ahorn (formulas_for_types type_enc add_sorts_on_tvar Ts))
+ #> mk_aquant AForall
+ (map_filter (fn TVar (x as (s, _), _) =>
+ SOME ((make_schematic_type_var x, s),
+ atype_of_type_vars type_enc)
+ | _ => NONE) Ts)
+
+fun eq_formula type_enc atomic_Ts bounds pred_sym tm1 tm2 =
+ (if pred_sym then AConn (AIff, [AAtom tm1, AAtom tm2])
+ else AAtom (ATerm (`I tptp_equal, [tm1, tm2])))
+ |> mk_aquant AForall bounds
+ |> close_formula_universally
+ |> bound_tvars type_enc true atomic_Ts
+
+val helper_rank = default_rank
+val min_rank = 9 * helper_rank div 10
+val max_rank = 4 * min_rank
+
+fun rank_of_fact_num n j = min_rank + (max_rank - min_rank) * j div n
+
+val type_tag = `(make_fixed_const NONE) type_tag_name
+
+fun type_tag_idempotence_fact format type_enc =
+ let
+ fun var s = ATerm (`I s, [])
+ fun tag tm = ATerm (type_tag, [var "A", tm])
+ val tagged_var = tag (var "X")
+ in
+ Formula (type_tag_idempotence_helper_name, Axiom,
+ eq_formula type_enc [] [] false (tag tagged_var) tagged_var,
+ NONE, isabelle_info format spec_eqN helper_rank)
+ end
+
+fun should_specialize_helper type_enc t =
+ polymorphism_of_type_enc type_enc <> Polymorphic andalso
+ level_of_type_enc type_enc <> No_Types andalso
+ not (null (Term.hidden_polymorphism t))
+
+fun helper_facts_for_sym ctxt format type_enc (s, {types, ...} : sym_info) =
+ case unprefix_and_unascii const_prefix s of
+ SOME mangled_s =>
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val unmangled_s = mangled_s |> unmangled_const_name |> hd
+ fun dub needs_fairly_sound j k =
+ unmangled_s ^ "_" ^ string_of_int j ^ "_" ^ string_of_int k ^
+ (if mangled_s = unmangled_s then "" else "_" ^ ascii_of mangled_s) ^
+ (if needs_fairly_sound then typed_helper_suffix
+ else untyped_helper_suffix)
+ fun dub_and_inst needs_fairly_sound (th, j) =
+ let val t = prop_of th in
+ if should_specialize_helper type_enc t then
+ map (fn T => specialize_type thy (invert_const unmangled_s, T) t)
+ types
+ else
+ [t]
+ end
+ |> tag_list 1
+ |> map (fn (k, t) =>
+ ((dub needs_fairly_sound j k, (Global, Spec_Eq)), t))
+ val make_facts = map_filter (make_fact ctxt format type_enc false)
+ val fairly_sound = is_type_enc_fairly_sound type_enc
+ in
+ helper_table
+ |> maps (fn ((helper_s, needs_fairly_sound), ths) =>
+ if helper_s <> unmangled_s orelse
+ (needs_fairly_sound andalso not fairly_sound) then
+ []
+ else
+ ths ~~ (1 upto length ths)
+ |> maps (dub_and_inst needs_fairly_sound)
+ |> make_facts)
+ end
+ | NONE => []
+fun helper_facts_for_sym_table ctxt format type_enc sym_tab =
+ Symtab.fold_rev (append o helper_facts_for_sym ctxt format type_enc) sym_tab
+ []
+
+(***************************************************************)
+(* Type Classes Present in the Axiom or Conjecture Clauses *)
+(***************************************************************)
+
+fun set_insert (x, s) = Symtab.update (x, ()) s
+
+fun add_classes (sorts, cset) = List.foldl set_insert cset (flat sorts)
+
+(* Remove this trivial type class (FIXME: similar code elsewhere) *)
+fun delete_type cset = Symtab.delete_safe (the_single @{sort HOL.type}) cset
+
+fun classes_of_terms get_Ts =
+ map (map snd o get_Ts)
+ #> List.foldl add_classes Symtab.empty
+ #> delete_type #> Symtab.keys
+
+val tfree_classes_of_terms = classes_of_terms Misc_Legacy.term_tfrees
+val tvar_classes_of_terms = classes_of_terms Misc_Legacy.term_tvars
+
+fun fold_type_constrs f (Type (s, Ts)) x =
+ fold (fold_type_constrs f) Ts (f (s, x))
+ | fold_type_constrs _ _ x = x
+
+(* Type constructors used to instantiate overloaded constants are the only ones
+ needed. *)
+fun add_type_constrs_in_term thy =
+ let
+ fun add (Const (@{const_name Meson.skolem}, _) $ _) = I
+ | add (t $ u) = add t #> add u
+ | add (Const x) =
+ x |> robust_const_typargs thy |> fold (fold_type_constrs set_insert)
+ | add (Abs (_, _, u)) = add u
+ | add _ = I
+ in add end
+
+fun type_constrs_of_terms thy ts =
+ Symtab.keys (fold (add_type_constrs_in_term thy) ts Symtab.empty)
+
+fun extract_lambda_def (Const (@{const_name HOL.eq}, _) $ t $ u) =
+ let val (head, args) = strip_comb t in
+ (head |> dest_Const |> fst,
+ fold_rev (fn t as Var ((s, _), T) =>
+ (fn u => Abs (s, T, abstract_over (t, u)))
+ | _ => raise Fail "expected Var") args u)
+ end
+ | extract_lambda_def _ = raise Fail "malformed lifted lambda"
+
+fun trans_lams_from_string ctxt type_enc lam_trans =
+ if lam_trans = no_lamsN then
+ rpair []
+ else if lam_trans = hide_lamsN then
+ lift_lams ctxt type_enc ##> K []
+ else if lam_trans = liftingN orelse lam_trans = lam_liftingN then
+ lift_lams ctxt type_enc
+ else if lam_trans = combsN then
+ map (introduce_combinators ctxt) #> rpair []
+ else if lam_trans = combs_and_liftingN then
+ lift_lams_part_1 ctxt type_enc
+ ##> maps (fn t => [t, introduce_combinators ctxt (intentionalize_def t)])
+ #> lift_lams_part_2
+ else if lam_trans = combs_or_liftingN then
+ lift_lams_part_1 ctxt type_enc
+ ##> map (fn t => case head_of (strip_qnt_body @{const_name All} t) of
+ @{term "op =::bool => bool => bool"} => t
+ | _ => introduce_combinators ctxt (intentionalize_def t))
+ #> lift_lams_part_2
+ else if lam_trans = keep_lamsN then
+ map (Envir.eta_contract) #> rpair []
+ else
+ error ("Unknown lambda translation scheme: " ^ quote lam_trans ^ ".")
+
+fun translate_formulas ctxt format prem_kind type_enc lam_trans presimp hyp_ts
+ concl_t facts =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val trans_lams = trans_lams_from_string ctxt type_enc lam_trans
+ val fact_ts = facts |> map snd
+ (* Remove existing facts from the conjecture, as this can dramatically
+ boost an ATP's performance (for some reason). *)
+ val hyp_ts =
+ hyp_ts
+ |> map (fn t => if member (op aconv) fact_ts t then @{prop True} else t)
+ val facts = facts |> map (apsnd (pair Axiom))
+ val conjs =
+ map (pair prem_kind) hyp_ts @ [(Conjecture, s_not_trueprop concl_t)]
+ |> map (apsnd freeze_term)
+ |> map2 (pair o rpair (Local, General) o string_of_int)
+ (0 upto length hyp_ts)
+ val ((conjs, facts), lam_facts) =
+ (conjs, facts)
+ |> presimp ? pairself (map (apsnd (uncurry (presimp_prop ctxt))))
+ |> (if lam_trans = no_lamsN then
+ rpair []
+ else
+ op @
+ #> preprocess_abstractions_in_terms trans_lams
+ #>> chop (length conjs))
+ val conjs = conjs |> make_conjecture ctxt format type_enc
+ val (fact_names, facts) =
+ facts
+ |> map_filter (fn (name, (_, t)) =>
+ make_fact ctxt format type_enc true (name, t)
+ |> Option.map (pair name))
+ |> ListPair.unzip
+ val lifted = lam_facts |> map (extract_lambda_def o snd o snd)
+ val lam_facts =
+ lam_facts |> map_filter (make_fact ctxt format type_enc true o apsnd snd)
+ val all_ts = concl_t :: hyp_ts @ fact_ts
+ val subs = tfree_classes_of_terms all_ts
+ val supers = tvar_classes_of_terms all_ts
+ val tycons = type_constrs_of_terms thy all_ts
+ val (supers, arity_clauses) =
+ if level_of_type_enc type_enc = No_Types then ([], [])
+ else make_arity_clauses thy tycons supers
+ val class_rel_clauses = make_class_rel_clauses thy subs supers
+ in
+ (fact_names |> map single, union (op =) subs supers, conjs,
+ facts @ lam_facts, class_rel_clauses, arity_clauses, lifted)
+ end
+
+val type_guard = `(make_fixed_const NONE) type_guard_name
+
+fun type_guard_iterm format type_enc T tm =
+ IApp (IConst (type_guard, T --> @{typ bool}, [T])
+ |> mangle_type_args_in_iterm format type_enc, tm)
+
+fun is_var_positively_naked_in_term _ (SOME false) _ accum = accum
+ | is_var_positively_naked_in_term name _ (ATerm ((s, _), tms)) accum =
+ accum orelse (is_tptp_equal s andalso member (op =) tms (ATerm (name, [])))
+ | is_var_positively_naked_in_term _ _ _ _ = true
+
+fun is_var_ghost_type_arg_in_term thy polym_constrs name pos tm accum =
+ is_var_positively_naked_in_term name pos tm accum orelse
+ let
+ val var = ATerm (name, [])
+ fun is_nasty_in_term (ATerm (_, [])) = false
+ | is_nasty_in_term (ATerm ((s, _), tms)) =
+ let
+ val ary = length tms
+ val polym_constr = member (op =) polym_constrs s
+ val ghosts = ghost_type_args thy s ary
+ in
+ exists (fn (j, tm) =>
+ if polym_constr then
+ member (op =) ghosts j andalso
+ (tm = var orelse is_nasty_in_term tm)
+ else
+ tm = var andalso member (op =) ghosts j)
+ (0 upto ary - 1 ~~ tms)
+ orelse (not polym_constr andalso exists is_nasty_in_term tms)
+ end
+ | is_nasty_in_term _ = true
+ in is_nasty_in_term tm end
+
+fun should_guard_var_in_formula thy polym_constrs level pos phi (SOME true)
+ name =
+ (case granularity_of_type_level level of
+ All_Vars => true
+ | Positively_Naked_Vars =>
+ formula_fold pos (is_var_positively_naked_in_term name) phi false
+ | Ghost_Type_Arg_Vars =>
+ formula_fold pos (is_var_ghost_type_arg_in_term thy polym_constrs name)
+ phi false)
+ | should_guard_var_in_formula _ _ _ _ _ _ _ = true
+
+fun always_guard_var_in_formula _ _ _ _ _ _ _ = true
+
+fun should_generate_tag_bound_decl _ _ _ (SOME true) _ = false
+ | should_generate_tag_bound_decl ctxt mono (Tags (_, level)) _ T =
+ granularity_of_type_level level <> All_Vars andalso
+ should_encode_type ctxt mono level T
+ | should_generate_tag_bound_decl _ _ _ _ _ = false
+
+fun mk_aterm format type_enc name T_args args =
+ ATerm (name, map_filter (ho_term_for_type_arg format type_enc) T_args @ args)
+
+fun do_bound_type ctxt format mono type_enc =
+ case type_enc of
+ Simple_Types (_, _, level) =>
+ fused_type ctxt mono level 0
+ #> ho_type_from_typ format type_enc false 0 #> SOME
+ | _ => K NONE
+
+fun tag_with_type ctxt format mono type_enc pos T tm =
+ IConst (type_tag, T --> T, [T])
+ |> mangle_type_args_in_iterm format type_enc
+ |> ho_term_from_iterm ctxt format mono type_enc pos
+ |> (fn ATerm (s, tms) => ATerm (s, tms @ [tm])
+ | _ => raise Fail "unexpected lambda-abstraction")
+and ho_term_from_iterm ctxt format mono type_enc =
+ let
+ fun term site u =
+ let
+ val (head, args) = strip_iterm_comb u
+ val pos =
+ case site of
+ Top_Level pos => pos
+ | Eq_Arg pos => pos
+ | _ => NONE
+ val t =
+ case head of
+ IConst (name as (s, _), _, T_args) =>
+ let
+ val arg_site = if is_tptp_equal s then Eq_Arg pos else Elsewhere
+ in
+ map (term arg_site) args |> mk_aterm format type_enc name T_args
+ end
+ | IVar (name, _) =>
+ map (term Elsewhere) args |> mk_aterm format type_enc name []
+ | IAbs ((name, T), tm) =>
+ AAbs ((name, ho_type_from_typ format type_enc true 0 T),
+ term Elsewhere tm)
+ | IApp _ => raise Fail "impossible \"IApp\""
+ val T = ityp_of u
+ in
+ if should_tag_with_type ctxt mono type_enc site u T then
+ tag_with_type ctxt format mono type_enc pos T t
+ else
+ t
+ end
+ in term o Top_Level end
+and formula_from_iformula ctxt polym_constrs format mono type_enc
+ should_guard_var =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val level = level_of_type_enc type_enc
+ val do_term = ho_term_from_iterm ctxt format mono type_enc
+ fun do_out_of_bound_type pos phi universal (name, T) =
+ if should_guard_type ctxt mono type_enc
+ (fn () => should_guard_var thy polym_constrs level pos phi
+ universal name) T then
+ IVar (name, T)
+ |> type_guard_iterm format type_enc T
+ |> do_term pos |> AAtom |> SOME
+ else if should_generate_tag_bound_decl ctxt mono type_enc universal T then
+ let
+ val var = ATerm (name, [])
+ val tagged_var = tag_with_type ctxt format mono type_enc pos T var
+ in SOME (AAtom (ATerm (`I tptp_equal, [tagged_var, var]))) end
+ else
+ NONE
+ fun do_formula pos (AQuant (q, xs, phi)) =
+ let
+ val phi = phi |> do_formula pos
+ val universal = Option.map (q = AExists ? not) pos
+ val do_bound_type = do_bound_type ctxt format mono type_enc
+ in
+ AQuant (q, xs |> map (apsnd (fn NONE => NONE
+ | SOME T => do_bound_type T)),
+ (if q = AForall then mk_ahorn else fold_rev (mk_aconn AAnd))
+ (map_filter
+ (fn (_, NONE) => NONE
+ | (s, SOME T) =>
+ do_out_of_bound_type pos phi universal (s, T))
+ xs)
+ phi)
+ end
+ | do_formula pos (AConn conn) = aconn_map pos do_formula conn
+ | do_formula pos (AAtom tm) = AAtom (do_term pos tm)
+ in do_formula end
+
+(* Each fact is given a unique fact number to avoid name clashes (e.g., because
+ of monomorphization). The TPTP explicitly forbids name clashes, and some of
+ the remote provers might care. *)
+fun formula_line_for_fact ctxt polym_constrs format prefix encode freshen pos
+ mono type_enc rank (j, {name, stature, kind, iformula, atomic_types}) =
+ (prefix ^ (if freshen then string_of_int j ^ "_" else "") ^ encode name, kind,
+ iformula
+ |> formula_from_iformula ctxt polym_constrs format mono type_enc
+ should_guard_var_in_formula (if pos then SOME true else NONE)
+ |> close_formula_universally
+ |> bound_tvars type_enc true atomic_types,
+ NONE,
+ let val rank = rank j in
+ case snd stature of
+ Intro => isabelle_info format introN rank
+ | Elim => isabelle_info format elimN rank
+ | Simp => isabelle_info format simpN rank
+ | Spec_Eq => isabelle_info format spec_eqN rank
+ | _ => isabelle_info format "" rank
+ end)
+ |> Formula
+
+fun formula_line_for_class_rel_clause format type_enc
+ ({name, subclass, superclass, ...} : class_rel_clause) =
+ let val ty_arg = ATerm (tvar_a_name, []) in
+ Formula (class_rel_clause_prefix ^ ascii_of name, Axiom,
+ AConn (AImplies,
+ [type_class_formula type_enc subclass ty_arg,
+ type_class_formula type_enc superclass ty_arg])
+ |> mk_aquant AForall
+ [(tvar_a_name, atype_of_type_vars type_enc)],
+ NONE, isabelle_info format introN helper_rank)
+ end
+
+fun formula_from_arity_atom type_enc (class, t, args) =
+ ATerm (t, map (fn arg => ATerm (arg, [])) args)
+ |> type_class_formula type_enc class
+
+fun formula_line_for_arity_clause format type_enc
+ ({name, prem_atoms, concl_atom} : arity_clause) =
+ Formula (arity_clause_prefix ^ name, Axiom,
+ mk_ahorn (map (formula_from_arity_atom type_enc) prem_atoms)
+ (formula_from_arity_atom type_enc concl_atom)
+ |> mk_aquant AForall
+ (map (rpair (atype_of_type_vars type_enc)) (#3 concl_atom)),
+ NONE, isabelle_info format introN helper_rank)
+
+fun formula_line_for_conjecture ctxt polym_constrs format mono type_enc
+ ({name, kind, iformula, atomic_types, ...} : translated_formula) =
+ Formula (conjecture_prefix ^ name, kind,
+ iformula
+ |> formula_from_iformula ctxt polym_constrs format mono type_enc
+ should_guard_var_in_formula (SOME false)
+ |> close_formula_universally
+ |> bound_tvars type_enc true atomic_types, NONE, [])
+
+fun formula_line_for_free_type j phi =
+ Formula (tfree_clause_prefix ^ string_of_int j, Hypothesis, phi, NONE, [])
+fun formula_lines_for_free_types type_enc (facts : translated_formula list) =
+ let
+ val phis =
+ fold (union (op =)) (map #atomic_types facts) []
+ |> formulas_for_types type_enc add_sorts_on_tfree
+ in map2 formula_line_for_free_type (0 upto length phis - 1) phis end
+
+(** Symbol declarations **)
+
+fun decl_line_for_class order s =
+ let val name as (s, _) = `make_type_class s in
+ Decl (sym_decl_prefix ^ s, name,
+ if order = First_Order then
+ ATyAbs ([tvar_a_name],
+ if avoid_first_order_ghost_type_vars then
+ AFun (a_itself_atype, bool_atype)
+ else
+ bool_atype)
+ else
+ AFun (atype_of_types, bool_atype))
+ end
+
+fun decl_lines_for_classes type_enc classes =
+ case type_enc of
+ Simple_Types (order, Polymorphic, _) =>
+ map (decl_line_for_class order) classes
+ | _ => []
+
+fun sym_decl_table_for_facts ctxt format type_enc sym_tab
+ (conjs, facts, extra_tms) =
+ let
+ fun add_iterm_syms tm =
+ let val (head, args) = strip_iterm_comb tm in
+ (case head of
+ IConst ((s, s'), T, T_args) =>
+ let
+ val (pred_sym, in_conj) =
+ case Symtab.lookup sym_tab s of
+ SOME ({pred_sym, in_conj, ...} : sym_info) =>
+ (pred_sym, in_conj)
+ | NONE => (false, false)
+ val decl_sym =
+ (case type_enc of
+ Guards _ => not pred_sym
+ | _ => true) andalso
+ is_tptp_user_symbol s
+ in
+ if decl_sym then
+ Symtab.map_default (s, [])
+ (insert_type ctxt #3 (s', T_args, T, pred_sym, length args,
+ in_conj))
+ else
+ I
+ end
+ | IAbs (_, tm) => add_iterm_syms tm
+ | _ => I)
+ #> fold add_iterm_syms args
+ end
+ val add_fact_syms = K add_iterm_syms |> formula_fold NONE |> fact_lift
+ fun add_formula_var_types (AQuant (_, xs, phi)) =
+ fold (fn (_, SOME T) => insert_type ctxt I T | _ => I) xs
+ #> add_formula_var_types phi
+ | add_formula_var_types (AConn (_, phis)) =
+ fold add_formula_var_types phis
+ | add_formula_var_types _ = I
+ fun var_types () =
+ if polymorphism_of_type_enc type_enc = Polymorphic then [tvar_a]
+ else fold (fact_lift add_formula_var_types) (conjs @ facts) []
+ fun add_undefined_const T =
+ let
+ val (s, s') =
+ `(make_fixed_const NONE) @{const_name undefined}
+ |> (case type_arg_policy [] type_enc @{const_name undefined} of
+ Mangled_Type_Args => mangled_const_name format type_enc [T]
+ | _ => I)
+ in
+ Symtab.map_default (s, [])
+ (insert_type ctxt #3 (s', [T], T, false, 0, false))
+ end
+ fun add_TYPE_const () =
+ let val (s, s') = TYPE_name in
+ Symtab.map_default (s, [])
+ (insert_type ctxt #3
+ (s', [tvar_a], @{typ "'a itself"}, false, 0, false))
+ end
+ in
+ Symtab.empty
+ |> is_type_enc_fairly_sound type_enc
+ ? (fold (fold add_fact_syms) [conjs, facts]
+ #> fold add_iterm_syms extra_tms
+ #> (case type_enc of
+ Simple_Types (First_Order, Polymorphic, _) =>
+ if avoid_first_order_ghost_type_vars then add_TYPE_const ()
+ else I
+ | Simple_Types _ => I
+ | _ => fold add_undefined_const (var_types ())))
+ end
+
+(* We add "bool" in case the helper "True_or_False" is included later. *)
+fun default_mono level =
+ {maybe_finite_Ts = [@{typ bool}],
+ surely_finite_Ts = [@{typ bool}],
+ maybe_infinite_Ts = known_infinite_types,
+ surely_infinite_Ts =
+ case level of
+ Noninf_Nonmono_Types (Strict, _) => []
+ | _ => known_infinite_types,
+ maybe_nonmono_Ts = [@{typ bool}]}
+
+(* This inference is described in section 2.3 of Claessen et al.'s "Sorting it
+ out with monotonicity" paper presented at CADE 2011. *)
+fun add_iterm_mononotonicity_info _ _ (SOME false) _ mono = mono
+ | add_iterm_mononotonicity_info ctxt level _
+ (IApp (IApp (IConst ((s, _), Type (_, [T, _]), _), tm1), tm2))
+ (mono as {maybe_finite_Ts, surely_finite_Ts, maybe_infinite_Ts,
+ surely_infinite_Ts, maybe_nonmono_Ts}) =
+ if is_tptp_equal s andalso exists is_maybe_universal_var [tm1, tm2] then
+ case level of
+ Noninf_Nonmono_Types (strictness, _) =>
+ if exists (type_instance ctxt T) surely_infinite_Ts orelse
+ member (type_equiv ctxt) maybe_finite_Ts T then
+ mono
+ else if is_type_kind_of_surely_infinite ctxt strictness
+ surely_infinite_Ts T then
+ {maybe_finite_Ts = maybe_finite_Ts,
+ surely_finite_Ts = surely_finite_Ts,
+ maybe_infinite_Ts = maybe_infinite_Ts,
+ surely_infinite_Ts = surely_infinite_Ts |> insert_type ctxt I T,
+ maybe_nonmono_Ts = maybe_nonmono_Ts}
+ else
+ {maybe_finite_Ts = maybe_finite_Ts |> insert (type_equiv ctxt) T,
+ surely_finite_Ts = surely_finite_Ts,
+ maybe_infinite_Ts = maybe_infinite_Ts,
+ surely_infinite_Ts = surely_infinite_Ts,
+ maybe_nonmono_Ts = maybe_nonmono_Ts |> insert_type ctxt I T}
+ | Fin_Nonmono_Types _ =>
+ if exists (type_instance ctxt T) surely_finite_Ts orelse
+ member (type_equiv ctxt) maybe_infinite_Ts T then
+ mono
+ else if is_type_surely_finite ctxt T then
+ {maybe_finite_Ts = maybe_finite_Ts,
+ surely_finite_Ts = surely_finite_Ts |> insert_type ctxt I T,
+ maybe_infinite_Ts = maybe_infinite_Ts,
+ surely_infinite_Ts = surely_infinite_Ts,
+ maybe_nonmono_Ts = maybe_nonmono_Ts |> insert_type ctxt I T}
+ else
+ {maybe_finite_Ts = maybe_finite_Ts,
+ surely_finite_Ts = surely_finite_Ts,
+ maybe_infinite_Ts = maybe_infinite_Ts |> insert (type_equiv ctxt) T,
+ surely_infinite_Ts = surely_infinite_Ts,
+ maybe_nonmono_Ts = maybe_nonmono_Ts}
+ | _ => mono
+ else
+ mono
+ | add_iterm_mononotonicity_info _ _ _ _ mono = mono
+fun add_fact_mononotonicity_info ctxt level
+ ({kind, iformula, ...} : translated_formula) =
+ formula_fold (SOME (kind <> Conjecture))
+ (add_iterm_mononotonicity_info ctxt level) iformula
+fun mononotonicity_info_for_facts ctxt type_enc facts =
+ let val level = level_of_type_enc type_enc in
+ default_mono level
+ |> is_type_level_monotonicity_based level
+ ? fold (add_fact_mononotonicity_info ctxt level) facts
+ end
+
+fun add_iformula_monotonic_types ctxt mono type_enc =
+ let
+ val level = level_of_type_enc type_enc
+ val should_encode = should_encode_type ctxt mono level
+ fun add_type T = not (should_encode T) ? insert_type ctxt I T
+ fun add_args (IApp (tm1, tm2)) = add_args tm1 #> add_term tm2
+ | add_args _ = I
+ and add_term tm = add_type (ityp_of tm) #> add_args tm
+ in formula_fold NONE (K add_term) end
+fun add_fact_monotonic_types ctxt mono type_enc =
+ add_iformula_monotonic_types ctxt mono type_enc |> fact_lift
+fun monotonic_types_for_facts ctxt mono type_enc facts =
+ let val level = level_of_type_enc type_enc in
+ [] |> (polymorphism_of_type_enc type_enc = Polymorphic andalso
+ is_type_level_monotonicity_based level andalso
+ granularity_of_type_level level <> Ghost_Type_Arg_Vars)
+ ? fold (add_fact_monotonic_types ctxt mono type_enc) facts
+ end
+
+fun formula_line_for_guards_mono_type ctxt format mono type_enc T =
+ Formula (guards_sym_formula_prefix ^
+ ascii_of (mangled_type format type_enc T),
+ Axiom,
+ IConst (`make_bound_var "X", T, [])
+ |> type_guard_iterm format type_enc T
+ |> AAtom
+ |> formula_from_iformula ctxt [] format mono type_enc
+ always_guard_var_in_formula (SOME true)
+ |> close_formula_universally
+ |> bound_tvars type_enc true (atomic_types_of T),
+ NONE, isabelle_info format introN helper_rank)
+
+fun formula_line_for_tags_mono_type ctxt format mono type_enc T =
+ let val x_var = ATerm (`make_bound_var "X", []) in
+ Formula (tags_sym_formula_prefix ^
+ ascii_of (mangled_type format type_enc T),
+ Axiom,
+ eq_formula type_enc (atomic_types_of T) [] false
+ (tag_with_type ctxt format mono type_enc NONE T x_var) x_var,
+ NONE, isabelle_info format spec_eqN helper_rank)
+ end
+
+fun problem_lines_for_mono_types ctxt format mono type_enc Ts =
+ case type_enc of
+ Simple_Types _ => []
+ | Guards _ =>
+ map (formula_line_for_guards_mono_type ctxt format mono type_enc) Ts
+ | Tags _ => map (formula_line_for_tags_mono_type ctxt format mono type_enc) Ts
+
+fun decl_line_for_sym ctxt format mono type_enc s
+ (s', T_args, T, pred_sym, ary, _) =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val (T, T_args) =
+ if null T_args then
+ (T, [])
+ else case unprefix_and_unascii const_prefix s of
+ SOME s' =>
+ let
+ val s' = s' |> invert_const
+ val T = s' |> robust_const_type thy
+ in (T, robust_const_typargs thy (s', T)) end
+ | NONE => raise Fail "unexpected type arguments"
+ in
+ Decl (sym_decl_prefix ^ s, (s, s'),
+ T |> fused_type ctxt mono (level_of_type_enc type_enc) ary
+ |> ho_type_from_typ format type_enc pred_sym ary
+ |> not (null T_args)
+ ? curry ATyAbs (map (tvar_name o fst o dest_TVar) T_args))
+ end
+
+fun honor_conj_sym_kind in_conj conj_sym_kind =
+ if in_conj then (conj_sym_kind, conj_sym_kind = Conjecture ? mk_anot)
+ else (Axiom, I)
+
+fun formula_line_for_guards_sym_decl ctxt format conj_sym_kind mono type_enc n s
+ j (s', T_args, T, _, ary, in_conj) =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val (kind, maybe_negate) = honor_conj_sym_kind in_conj conj_sym_kind
+ val (arg_Ts, res_T) = chop_fun ary T
+ val bound_names = 1 upto ary |> map (`I o make_bound_var o string_of_int)
+ val bounds =
+ bound_names ~~ arg_Ts |> map (fn (name, T) => IConst (name, T, []))
+ val bound_Ts =
+ if exists (curry (op =) dummyT) T_args then
+ case level_of_type_enc type_enc of
+ All_Types => map SOME arg_Ts
+ | level =>
+ if granularity_of_type_level level = Ghost_Type_Arg_Vars then
+ let val ghosts = ghost_type_args thy s ary in
+ map2 (fn j => if member (op =) ghosts j then SOME else K NONE)
+ (0 upto ary - 1) arg_Ts
+ end
+ else
+ replicate ary NONE
+ else
+ replicate ary NONE
+ in
+ Formula (guards_sym_formula_prefix ^ s ^
+ (if n > 1 then "_" ^ string_of_int j else ""), kind,
+ IConst ((s, s'), T, T_args)
+ |> fold (curry (IApp o swap)) bounds
+ |> type_guard_iterm format type_enc res_T
+ |> AAtom |> mk_aquant AForall (bound_names ~~ bound_Ts)
+ |> formula_from_iformula ctxt [] format mono type_enc
+ always_guard_var_in_formula (SOME true)
+ |> close_formula_universally
+ |> bound_tvars type_enc (n > 1) (atomic_types_of T)
+ |> maybe_negate,
+ NONE, isabelle_info format introN helper_rank)
+ end
+
+fun formula_lines_for_tags_sym_decl ctxt format conj_sym_kind mono type_enc n s
+ (j, (s', T_args, T, pred_sym, ary, in_conj)) =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val level = level_of_type_enc type_enc
+ val grain = granularity_of_type_level level
+ val ident_base =
+ tags_sym_formula_prefix ^ s ^
+ (if n > 1 then "_" ^ string_of_int j else "")
+ val (kind, maybe_negate) = honor_conj_sym_kind in_conj conj_sym_kind
+ val (arg_Ts, res_T) = chop_fun ary T
+ val bound_names = 1 upto ary |> map (`I o make_bound_var o string_of_int)
+ val bounds = bound_names |> map (fn name => ATerm (name, []))
+ val cst = mk_aterm format type_enc (s, s') T_args
+ val eq = maybe_negate oo eq_formula type_enc (atomic_types_of T) [] pred_sym
+ val should_encode = should_encode_type ctxt mono level
+ val tag_with = tag_with_type ctxt format mono type_enc NONE
+ val add_formula_for_res =
+ if should_encode res_T then
+ let
+ val tagged_bounds =
+ if grain = Ghost_Type_Arg_Vars then
+ let val ghosts = ghost_type_args thy s ary in
+ map2 (fn (j, arg_T) => member (op =) ghosts j ? tag_with arg_T)
+ (0 upto ary - 1 ~~ arg_Ts) bounds
+ end
+ else
+ bounds
+ in
+ cons (Formula (ident_base ^ "_res", kind,
+ eq (tag_with res_T (cst bounds)) (cst tagged_bounds),
+ NONE, isabelle_info format spec_eqN helper_rank))
+ end
+ else
+ I
+ fun add_formula_for_arg k =
+ let val arg_T = nth arg_Ts k in
+ if should_encode arg_T then
+ case chop k bounds of
+ (bounds1, bound :: bounds2) =>
+ cons (Formula (ident_base ^ "_arg" ^ string_of_int (k + 1), kind,
+ eq (cst (bounds1 @ tag_with arg_T bound :: bounds2))
+ (cst bounds),
+ NONE, isabelle_info format spec_eqN helper_rank))
+ | _ => raise Fail "expected nonempty tail"
+ else
+ I
+ end
+ in
+ [] |> not pred_sym ? add_formula_for_res
+ |> (Config.get ctxt type_tag_arguments andalso
+ grain = Positively_Naked_Vars)
+ ? fold add_formula_for_arg (ary - 1 downto 0)
+ end
+
+fun result_type_of_decl (_, _, T, _, ary, _) = chop_fun ary T |> snd
+
+fun rationalize_decls ctxt (decls as decl :: (decls' as _ :: _)) =
+ let
+ val T = result_type_of_decl decl
+ |> map_type_tvar (fn (z, _) => TVar (z, HOLogic.typeS))
+ in
+ if forall (type_generalization ctxt T o result_type_of_decl) decls' then
+ [decl]
+ else
+ decls
+ end
+ | rationalize_decls _ decls = decls
+
+fun problem_lines_for_sym_decls ctxt format conj_sym_kind mono type_enc
+ (s, decls) =
+ case type_enc of
+ Simple_Types _ => [decl_line_for_sym ctxt format mono type_enc s (hd decls)]
+ | Guards (_, level) =>
+ let
+ val decls = decls |> rationalize_decls ctxt
+ val n = length decls
+ val decls =
+ decls |> filter (should_encode_type ctxt mono level
+ o result_type_of_decl)
+ in
+ (0 upto length decls - 1, decls)
+ |-> map2 (formula_line_for_guards_sym_decl ctxt format conj_sym_kind mono
+ type_enc n s)
+ end
+ | Tags (_, level) =>
+ if granularity_of_type_level level = All_Vars then
+ []
+ else
+ let val n = length decls in
+ (0 upto n - 1 ~~ decls)
+ |> maps (formula_lines_for_tags_sym_decl ctxt format conj_sym_kind mono
+ type_enc n s)
+ end
+
+fun problem_lines_for_sym_decl_table ctxt format conj_sym_kind mono type_enc
+ mono_Ts sym_decl_tab =
+ let
+ val syms = sym_decl_tab |> Symtab.dest |> sort_wrt fst
+ val mono_lines =
+ problem_lines_for_mono_types ctxt format mono type_enc mono_Ts
+ val decl_lines =
+ fold_rev (append o problem_lines_for_sym_decls ctxt format conj_sym_kind
+ mono type_enc)
+ syms []
+ in mono_lines @ decl_lines end
+
+fun pair_append (xs1, xs2) (ys1, ys2) = (xs1 @ ys1, xs2 @ ys2)
+
+fun do_uncurried_alias_lines_for_sym ctxt monom_constrs format conj_sym_kind
+ mono type_enc sym_tab0 sym_tab base_s0 types in_conj =
+ let
+ fun do_alias ary =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val (kind, maybe_negate) = honor_conj_sym_kind in_conj conj_sym_kind
+ val base_name = base_s0 |> `(make_fixed_const (SOME format))
+ val T = case types of [T] => T | _ => robust_const_type thy base_s0
+ val T_args = robust_const_typargs thy (base_s0, T)
+ val (base_name as (base_s, _), T_args) =
+ mangle_type_args_in_const format type_enc base_name T_args
+ val base_ary = min_ary_of sym_tab0 base_s
+ fun do_const name = IConst (name, T, T_args)
+ val filter_ty_args =
+ filter_type_args_in_iterm thy monom_constrs type_enc
+ val ho_term_of =
+ ho_term_from_iterm ctxt format mono type_enc (SOME true)
+ val name1 as (s1, _) =
+ base_name |> ary - 1 > base_ary ? aliased_uncurried (ary - 1)
+ val name2 as (s2, _) = base_name |> aliased_uncurried ary
+ val (arg_Ts, _) = chop_fun ary T
+ val bound_names =
+ 1 upto ary |> map (`I o make_bound_var o string_of_int)
+ val bounds = bound_names ~~ arg_Ts
+ val (first_bounds, last_bound) =
+ bounds |> map (fn (name, T) => IConst (name, T, [])) |> split_last
+ val tm1 =
+ mk_app_op format type_enc (list_app (do_const name1) first_bounds)
+ last_bound
+ |> filter_ty_args
+ val tm2 =
+ list_app (do_const name2) (first_bounds @ [last_bound])
+ |> filter_ty_args
+ val do_bound_type = do_bound_type ctxt format mono type_enc
+ val eq =
+ eq_formula type_enc (atomic_types_of T)
+ (map (apsnd do_bound_type) bounds) false
+ (ho_term_of tm1) (ho_term_of tm2)
+ in
+ ([tm1, tm2],
+ [Formula (uncurried_alias_eq_prefix ^ s2, kind, eq |> maybe_negate,
+ NONE, isabelle_info format spec_eqN helper_rank)])
+ |> (if ary - 1 = base_ary orelse Symtab.defined sym_tab s1 then I
+ else pair_append (do_alias (ary - 1)))
+ end
+ in do_alias end
+fun uncurried_alias_lines_for_sym ctxt monom_constrs format conj_sym_kind mono
+ type_enc sym_tab0 sym_tab
+ (s, {min_ary, types, in_conj, ...} : sym_info) =
+ case unprefix_and_unascii const_prefix s of
+ SOME mangled_s =>
+ if String.isSubstring uncurried_alias_sep mangled_s then
+ let
+ val base_s0 = mangled_s |> unmangled_const_name |> hd |> invert_const
+ in
+ do_uncurried_alias_lines_for_sym ctxt monom_constrs format conj_sym_kind
+ mono type_enc sym_tab0 sym_tab base_s0 types in_conj min_ary
+ end
+ else
+ ([], [])
+ | NONE => ([], [])
+fun uncurried_alias_lines_for_sym_table ctxt monom_constrs format conj_sym_kind
+ mono type_enc uncurried_aliases sym_tab0 sym_tab =
+ ([], [])
+ |> uncurried_aliases
+ ? Symtab.fold_rev
+ (pair_append
+ o uncurried_alias_lines_for_sym ctxt monom_constrs format
+ conj_sym_kind mono type_enc sym_tab0 sym_tab) sym_tab
+
+fun needs_type_tag_idempotence ctxt (Tags (poly, level)) =
+ Config.get ctxt type_tag_idempotence andalso
+ is_type_level_monotonicity_based level andalso
+ poly <> Mangled_Monomorphic
+ | needs_type_tag_idempotence _ _ = false
+
+val implicit_declsN = "Should-be-implicit typings"
+val explicit_declsN = "Explicit typings"
+val uncurried_alias_eqsN = "Uncurried aliases"
+val factsN = "Relevant facts"
+val class_relsN = "Class relationships"
+val aritiesN = "Arities"
+val helpersN = "Helper facts"
+val conjsN = "Conjectures"
+val free_typesN = "Type variables"
+
+(* TFF allows implicit declarations of types, function symbols, and predicate
+ symbols (with "$i" as the type of individuals), but some provers (e.g.,
+ SNARK) require explicit declarations. The situation is similar for THF. *)
+
+fun default_type type_enc pred_sym s =
+ let
+ val ind =
+ case type_enc of
+ Simple_Types _ =>
+ if String.isPrefix type_const_prefix s then atype_of_types
+ else individual_atype
+ | _ => individual_atype
+ fun typ 0 = if pred_sym then bool_atype else ind
+ | typ ary = AFun (ind, typ (ary - 1))
+ in typ end
+
+fun nary_type_constr_type n =
+ funpow n (curry AFun atype_of_types) atype_of_types
+
+fun undeclared_syms_in_problem type_enc problem =
+ let
+ val declared = declared_syms_in_problem problem
+ fun do_sym (name as (s, _)) ty =
+ if is_tptp_user_symbol s andalso not (member (op =) declared name) then
+ AList.default (op =) (name, ty)
+ else
+ I
+ fun do_type (AType (name, tys)) =
+ do_sym name (fn () => nary_type_constr_type (length tys))
+ #> fold do_type tys
+ | do_type (AFun (ty1, ty2)) = do_type ty1 #> do_type ty2
+ | do_type (ATyAbs (_, ty)) = do_type ty
+ fun do_term pred_sym (ATerm (name as (s, _), tms)) =
+ do_sym name (fn _ => default_type type_enc pred_sym s (length tms))
+ #> fold (do_term false) tms
+ | do_term _ (AAbs ((_, ty), tm)) = do_type ty #> do_term false tm
+ fun do_formula (AQuant (_, xs, phi)) =
+ fold do_type (map_filter snd xs) #> do_formula phi
+ | do_formula (AConn (_, phis)) = fold do_formula phis
+ | do_formula (AAtom tm) = do_term true tm
+ fun do_problem_line (Decl (_, _, ty)) = do_type ty
+ | do_problem_line (Formula (_, _, phi, _, _)) = do_formula phi
+ in
+ fold (fold do_problem_line o snd) problem []
+ |> filter_out (is_built_in_tptp_symbol o fst o fst)
+ end
+
+fun declare_undeclared_syms_in_atp_problem type_enc problem =
+ let
+ val decls =
+ problem
+ |> undeclared_syms_in_problem type_enc
+ |> sort_wrt (fst o fst)
+ |> map (fn (x as (s, _), ty) => Decl (type_decl_prefix ^ s, x, ty ()))
+ in (implicit_declsN, decls) :: problem end
+
+fun exists_subdtype P =
+ let
+ fun ex U = P U orelse
+ (case U of Datatype.DtType (_, Us) => exists ex Us | _ => false)
+ in ex end
+
+fun is_poly_constr (_, Us) =
+ exists (exists_subdtype (fn Datatype.DtTFree _ => true | _ => false)) Us
+
+fun all_constrs_of_polymorphic_datatypes thy =
+ Symtab.fold (snd
+ #> #descr
+ #> maps (snd #> #3)
+ #> (fn cs => exists is_poly_constr cs ? append cs))
+ (Datatype.get_all thy) []
+ |> List.partition is_poly_constr
+ |> pairself (map fst)
+
+val app_op_threshold = 50
+
+fun prepare_atp_problem ctxt format conj_sym_kind prem_kind type_enc exporter
+ lam_trans uncurried_aliases readable_names preproc
+ hyp_ts concl_t facts =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val type_enc = type_enc |> adjust_type_enc format
+ (* Forcing explicit applications is expensive for polymorphic encodings,
+ because it takes only one existential variable ranging over "'a => 'b" to
+ ruin everything. Hence we do it only if there are few facts (which is
+ normally the case for "metis" and the minimizer). *)
+ val app_op_level =
+ if polymorphism_of_type_enc type_enc = Polymorphic andalso
+ length facts >= app_op_threshold then
+ Incomplete_Apply
+ else
+ Sufficient_Apply
+ val lam_trans =
+ if lam_trans = keep_lamsN andalso
+ not (is_type_enc_higher_order type_enc) then
+ error ("Lambda translation scheme incompatible with first-order \
+ \encoding.")
+ else
+ lam_trans
+ val (fact_names, classes, conjs, facts, class_rel_clauses, arity_clauses,
+ lifted) =
+ translate_formulas ctxt format prem_kind type_enc lam_trans preproc hyp_ts
+ concl_t facts
+ val sym_tab0 = sym_table_for_facts ctxt type_enc app_op_level conjs facts
+ val mono = conjs @ facts |> mononotonicity_info_for_facts ctxt type_enc
+ val (polym_constrs, monom_constrs) =
+ all_constrs_of_polymorphic_datatypes thy
+ |>> map (make_fixed_const (SOME format))
+ fun firstorderize in_helper =
+ firstorderize_fact thy monom_constrs format type_enc sym_tab0
+ (uncurried_aliases andalso not in_helper)
+ val (conjs, facts) = (conjs, facts) |> pairself (map (firstorderize false))
+ val sym_tab = sym_table_for_facts ctxt type_enc Incomplete_Apply conjs facts
+ val helpers =
+ sym_tab |> helper_facts_for_sym_table ctxt format type_enc
+ |> map (firstorderize true)
+ val mono_Ts =
+ helpers @ conjs @ facts |> monotonic_types_for_facts ctxt mono type_enc
+ val class_decl_lines = decl_lines_for_classes type_enc classes
+ val (uncurried_alias_eq_tms, uncurried_alias_eq_lines) =
+ uncurried_alias_lines_for_sym_table ctxt monom_constrs format
+ conj_sym_kind mono type_enc uncurried_aliases sym_tab0 sym_tab
+ val sym_decl_lines =
+ (conjs, helpers @ facts, uncurried_alias_eq_tms)
+ |> sym_decl_table_for_facts ctxt format type_enc sym_tab
+ |> problem_lines_for_sym_decl_table ctxt format conj_sym_kind mono
+ type_enc mono_Ts
+ val num_facts = length facts
+ val fact_lines =
+ map (formula_line_for_fact ctxt polym_constrs format fact_prefix
+ ascii_of (not exporter) (not exporter) mono type_enc
+ (rank_of_fact_num num_facts))
+ (0 upto num_facts - 1 ~~ facts)
+ val helper_lines =
+ 0 upto length helpers - 1 ~~ helpers
+ |> map (formula_line_for_fact ctxt polym_constrs format helper_prefix I
+ false true mono type_enc (K default_rank))
+ |> (if needs_type_tag_idempotence ctxt type_enc then
+ cons (type_tag_idempotence_fact format type_enc)
+ else
+ I)
+ (* Reordering these might confuse the proof reconstruction code or the SPASS
+ FLOTTER hack. *)
+ val problem =
+ [(explicit_declsN, class_decl_lines @ sym_decl_lines),
+ (uncurried_alias_eqsN, uncurried_alias_eq_lines),
+ (factsN, fact_lines),
+ (class_relsN,
+ map (formula_line_for_class_rel_clause format type_enc)
+ class_rel_clauses),
+ (aritiesN,
+ map (formula_line_for_arity_clause format type_enc) arity_clauses),
+ (helpersN, helper_lines),
+ (conjsN,
+ map (formula_line_for_conjecture ctxt polym_constrs format mono
+ type_enc) conjs),
+ (free_typesN, formula_lines_for_free_types type_enc (facts @ conjs))]
+ val problem =
+ problem
+ |> (case format of
+ CNF => ensure_cnf_problem
+ | CNF_UEQ => filter_cnf_ueq_problem
+ | FOF => I
+ | TFF (_, TPTP_Implicit) => I
+ | THF (_, TPTP_Implicit, _) => I
+ | _ => declare_undeclared_syms_in_atp_problem type_enc)
+ val (problem, pool) = problem |> nice_atp_problem readable_names format
+ fun add_sym_ary (s, {min_ary, ...} : sym_info) =
+ min_ary > 0 ? Symtab.insert (op =) (s, min_ary)
+ in
+ (problem,
+ case pool of SOME the_pool => snd the_pool | NONE => Symtab.empty,
+ fact_names |> Vector.fromList,
+ lifted,
+ Symtab.empty |> Symtab.fold add_sym_ary sym_tab)
+ end
+
+(* FUDGE *)
+val conj_weight = 0.0
+val hyp_weight = 0.1
+val fact_min_weight = 0.2
+val fact_max_weight = 1.0
+val type_info_default_weight = 0.8
+
+(* Weights are from 0.0 (most important) to 1.0 (least important). *)
+fun atp_problem_relevance_weights problem =
+ let
+ fun add_term_weights weight (ATerm (s, tms)) =
+ is_tptp_user_symbol s ? Symtab.default (s, weight)
+ #> fold (add_term_weights weight) tms
+ | add_term_weights weight (AAbs (_, tm)) = add_term_weights weight tm
+ fun add_line_weights weight (Formula (_, _, phi, _, _)) =
+ formula_fold NONE (K (add_term_weights weight)) phi
+ | add_line_weights _ _ = I
+ fun add_conjectures_weights [] = I
+ | add_conjectures_weights conjs =
+ let val (hyps, conj) = split_last conjs in
+ add_line_weights conj_weight conj
+ #> fold (add_line_weights hyp_weight) hyps
+ end
+ fun add_facts_weights facts =
+ let
+ val num_facts = length facts
+ fun weight_of j =
+ fact_min_weight + (fact_max_weight - fact_min_weight) * Real.fromInt j
+ / Real.fromInt num_facts
+ in
+ map weight_of (0 upto num_facts - 1) ~~ facts
+ |> fold (uncurry add_line_weights)
+ end
+ val get = these o AList.lookup (op =) problem
+ in
+ Symtab.empty
+ |> add_conjectures_weights (get free_typesN @ get conjsN)
+ |> add_facts_weights (get factsN)
+ |> fold (fold (add_line_weights type_info_default_weight) o get)
+ [explicit_declsN, class_relsN, aritiesN]
+ |> Symtab.dest
+ |> sort (prod_ord Real.compare string_ord o pairself swap)
+ end
+
+fun have_head_rolling (ATerm (s, args)) =
+ (* ugly hack: may make innocent victims *)
+ if String.isPrefix app_op_name s andalso length args = 2 then
+ have_head_rolling (hd args) ||> append (tl args)
+ else
+ (s, args)
+ | have_head_rolling _ = ("", [])
+
+fun atp_problem_kbo_weights problem =
+ let
+ fun add_term_deps head (ATerm (s, args)) =
+ is_tptp_user_symbol s ? perhaps (try (Graph.add_edge_acyclic (s, head)))
+ #> fold (add_term_deps head) args
+ | add_term_deps head (AAbs (_, tm)) = add_term_deps head tm
+ fun add_eq_deps (ATerm (s, [lhs as _, rhs])) =
+ if is_tptp_equal s then
+ let val (head, rest) = have_head_rolling lhs in
+ fold (add_term_deps head) (rhs :: rest)
+ end
+ else
+ I
+ | add_eq_deps _ = I
+ fun add_line_deps _ (Decl (_, s, ty)) =
+ is_function_type ty ? Graph.default_node (s, ())
+ | add_line_deps status (Formula (_, _, phi, _, info)) =
+ if extract_isabelle_status info = SOME status then
+ formula_fold NONE (K add_eq_deps) phi
+ else
+ I
+ val graph =
+ Graph.empty |> fold (fold (add_line_deps spec_eqN) o snd) problem
+ |> fold (fold (add_line_deps simpN) o snd) problem
+ fun next_weight w = w + w
+ fun add_weights _ [] = I
+ | add_weights weight syms =
+ fold (AList.update (op =) o rpair weight) syms
+ #> add_weights (next_weight weight)
+ (fold (union (op =) o Graph.immediate_succs graph) syms [])
+ in
+ [] |> add_weights 1 (Graph.minimals graph) |> sort (int_ord o pairself snd)
+ end
+
+end;
--- a/src/HOL/Tools/ATP/atp_proof.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/ATP/atp_proof.ML Tue Feb 14 11:16:07 2012 +0100
@@ -115,7 +115,7 @@
"The prover claims the conjecture is a theorem but did not provide a proof."
| string_for_failure ProofIncomplete =
"The prover claims the conjecture is a theorem but provided an incomplete \
- \proof."
+ \(or unparsable) proof."
| string_for_failure (UnsoundProof (false, ss)) =
"The prover found a type-unsound proof " ^ involving ss ^
"(or, less likely, your axioms are inconsistent). Specify a sound type \
@@ -431,21 +431,30 @@
-- Scan.repeat parse_decorated_atom
>> (mk_horn o apfst (op @))) x
-fun resolve_spass_num spass_names num =
- case Int.fromString num of
- SOME j => if j > 0 andalso j <= Vector.length spass_names then
- Vector.sub (spass_names, j - 1)
- else
- []
- | NONE => []
+fun resolve_spass_num (SOME names) _ _ = names
+ | resolve_spass_num NONE spass_names num =
+ case Int.fromString num of
+ SOME j => if j > 0 andalso j <= Vector.length spass_names then
+ Vector.sub (spass_names, j - 1)
+ else
+ []
+ | NONE => []
-(* Syntax: <num>[0:<inference><annotations>] <atoms> || <atoms> -> <atoms>. *)
+val parse_spass_debug =
+ Scan.option ($$ "(" |-- Scan.repeat (scan_general_id --| Scan.option ($$ ","))
+ --| $$ ")")
+
+(* Syntax: <num>[0:<inference><annotations>] <atoms> || <atoms> -> <atoms>.
+ derived from formulae <ident>* *)
fun parse_spass_line spass_names =
- scan_general_id --| $$ "[" --| $$ "0" --| $$ ":" -- Symbol.scan_id
- -- parse_spass_annotations --| $$ "]" -- parse_horn_clause --| $$ "."
- >> (fn (((num, rule), deps), u) =>
- Inference ((num, resolve_spass_num spass_names num), u, rule,
- map (swap o `(resolve_spass_num spass_names)) deps))
+ parse_spass_debug |-- scan_general_id --| $$ "[" --| $$ "0" --| $$ ":"
+ -- Symbol.scan_id -- parse_spass_annotations --| $$ "]"
+ -- parse_horn_clause --| $$ "."
+ -- Scan.option (Scan.this_string "derived from formulae "
+ |-- Scan.repeat scan_general_id)
+ >> (fn ((((num, rule), deps), u), names) =>
+ Inference ((num, resolve_spass_num names spass_names num), u, rule,
+ map (swap o `(resolve_spass_num NONE spass_names)) deps))
(* Syntax: <name> *)
fun parse_satallax_line x =
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/ATP/atp_proof_reconstruct.ML Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,948 @@
+(* Title: HOL/Tools/ATP/atp_proof_reconstruct.ML
+ Author: Lawrence C. Paulson, Cambridge University Computer Laboratory
+ Author: Claire Quigley, Cambridge University Computer Laboratory
+ Author: Jasmin Blanchette, TU Muenchen
+
+Proof reconstruction from ATP proofs.
+*)
+
+signature ATP_PROOF_RECONSTRUCT =
+sig
+ type ('a, 'b) ho_term = ('a, 'b) ATP_Problem.ho_term
+ type ('a, 'b, 'c) formula = ('a, 'b, 'c) ATP_Problem.formula
+ type 'a proof = 'a ATP_Proof.proof
+ type stature = ATP_Problem_Generate.stature
+
+ datatype reconstructor =
+ Metis of string * string |
+ SMT
+
+ datatype play =
+ Played of reconstructor * Time.time |
+ Trust_Playable of reconstructor * Time.time option |
+ Failed_to_Play of reconstructor
+
+ type minimize_command = string list -> string
+ type one_line_params =
+ play * string * (string * stature) list * minimize_command * int * int
+ type isar_params =
+ bool * int * string Symtab.table * (string * stature) list vector
+ * int Symtab.table * string proof * thm
+
+ val metisN : string
+ val smtN : string
+ val full_typesN : string
+ val partial_typesN : string
+ val no_typesN : string
+ val really_full_type_enc : string
+ val full_type_enc : string
+ val partial_type_enc : string
+ val no_type_enc : string
+ val full_type_encs : string list
+ val partial_type_encs : string list
+ val metis_default_lam_trans : string
+ val metis_call : string -> string -> string
+ val string_for_reconstructor : reconstructor -> string
+ val used_facts_in_atp_proof :
+ Proof.context -> (string * stature) list vector -> string proof
+ -> (string * stature) list
+ val lam_trans_from_atp_proof : string proof -> string -> string
+ val is_typed_helper_used_in_atp_proof : string proof -> bool
+ val used_facts_in_unsound_atp_proof :
+ Proof.context -> (string * stature) list vector -> 'a proof
+ -> string list option
+ val unalias_type_enc : string -> string list
+ val one_line_proof_text : one_line_params -> string
+ val make_tvar : string -> typ
+ val make_tfree : Proof.context -> string -> typ
+ val term_from_atp :
+ Proof.context -> bool -> int Symtab.table -> typ option
+ -> (string, string) ho_term -> term
+ val prop_from_atp :
+ Proof.context -> bool -> int Symtab.table
+ -> (string, string, (string, string) ho_term) formula -> term
+ val isar_proof_text :
+ Proof.context -> bool -> isar_params -> one_line_params -> string
+ val proof_text :
+ Proof.context -> bool -> isar_params -> one_line_params -> string
+end;
+
+structure ATP_Proof_Reconstruct : ATP_PROOF_RECONSTRUCT =
+struct
+
+open ATP_Util
+open ATP_Problem
+open ATP_Proof
+open ATP_Problem_Generate
+
+structure String_Redirect = ATP_Proof_Redirect(
+ type key = step_name
+ val ord = fn ((s, _ : string list), (s', _)) => fast_string_ord (s, s')
+ val string_of = fst)
+
+open String_Redirect
+
+datatype reconstructor =
+ Metis of string * string |
+ SMT
+
+datatype play =
+ Played of reconstructor * Time.time |
+ Trust_Playable of reconstructor * Time.time option |
+ Failed_to_Play of reconstructor
+
+type minimize_command = string list -> string
+type one_line_params =
+ play * string * (string * stature) list * minimize_command * int * int
+type isar_params =
+ bool * int * string Symtab.table * (string * stature) list vector
+ * int Symtab.table * string proof * thm
+
+val metisN = "metis"
+val smtN = "smt"
+
+val full_typesN = "full_types"
+val partial_typesN = "partial_types"
+val no_typesN = "no_types"
+
+val really_full_type_enc = "mono_tags"
+val full_type_enc = "poly_guards_query"
+val partial_type_enc = "poly_args"
+val no_type_enc = "erased"
+
+val full_type_encs = [full_type_enc, really_full_type_enc]
+val partial_type_encs = partial_type_enc :: full_type_encs
+
+val type_enc_aliases =
+ [(full_typesN, full_type_encs),
+ (partial_typesN, partial_type_encs),
+ (no_typesN, [no_type_enc])]
+
+fun unalias_type_enc s =
+ AList.lookup (op =) type_enc_aliases s |> the_default [s]
+
+val metis_default_lam_trans = combsN
+
+fun metis_call type_enc lam_trans =
+ let
+ val type_enc =
+ case AList.find (fn (enc, encs) => enc = hd encs) type_enc_aliases
+ type_enc of
+ [alias] => alias
+ | _ => type_enc
+ val opts = [] |> type_enc <> partial_typesN ? cons type_enc
+ |> lam_trans <> metis_default_lam_trans ? cons lam_trans
+ in metisN ^ (if null opts then "" else " (" ^ commas opts ^ ")") end
+
+fun string_for_reconstructor (Metis (type_enc, lam_trans)) =
+ metis_call type_enc lam_trans
+ | string_for_reconstructor SMT = smtN
+
+fun find_first_in_list_vector vec key =
+ Vector.foldl (fn (ps, NONE) => AList.lookup (op =) ps key
+ | (_, value) => value) NONE vec
+
+val unprefix_fact_number = space_implode "_" o tl o space_explode "_"
+
+fun resolve_one_named_fact fact_names s =
+ case try (unprefix fact_prefix) s of
+ SOME s' =>
+ let val s' = s' |> unprefix_fact_number |> unascii_of in
+ s' |> find_first_in_list_vector fact_names |> Option.map (pair s')
+ end
+ | NONE => NONE
+fun resolve_fact fact_names = map_filter (resolve_one_named_fact fact_names)
+fun is_fact fact_names = not o null o resolve_fact fact_names
+
+fun resolve_one_named_conjecture s =
+ case try (unprefix conjecture_prefix) s of
+ SOME s' => Int.fromString s'
+ | NONE => NONE
+
+val resolve_conjecture = map_filter resolve_one_named_conjecture
+val is_conjecture = not o null o resolve_conjecture
+
+fun is_axiom_used_in_proof pred =
+ exists (fn Inference ((_, ss), _, _, []) => exists pred ss | _ => false)
+
+val is_combinator_def = String.isPrefix (helper_prefix ^ combinator_prefix)
+
+val ascii_of_lam_fact_prefix = ascii_of lam_fact_prefix
+
+(* overapproximation (good enough) *)
+fun is_lam_lifted s =
+ String.isPrefix fact_prefix s andalso
+ String.isSubstring ascii_of_lam_fact_prefix s
+
+fun lam_trans_from_atp_proof atp_proof default =
+ case (is_axiom_used_in_proof is_combinator_def atp_proof,
+ is_axiom_used_in_proof is_lam_lifted atp_proof) of
+ (false, false) => default
+ | (false, true) => liftingN
+(* | (true, true) => combs_and_liftingN -- not supported by "metis" *)
+ | (true, _) => combsN
+
+val is_typed_helper_name =
+ String.isPrefix helper_prefix andf String.isSuffix typed_helper_suffix
+fun is_typed_helper_used_in_atp_proof atp_proof =
+ is_axiom_used_in_proof is_typed_helper_name atp_proof
+
+val leo2_ext = "extcnf_equal_neg"
+val isa_ext = Thm.get_name_hint @{thm ext}
+val isa_short_ext = Long_Name.base_name isa_ext
+
+fun ext_name ctxt =
+ if Thm.eq_thm_prop (@{thm ext},
+ singleton (Attrib.eval_thms ctxt) (Facts.named isa_short_ext, [])) then
+ isa_short_ext
+ else
+ isa_ext
+
+fun add_fact _ fact_names (Inference ((_, ss), _, _, [])) =
+ union (op =) (resolve_fact fact_names ss)
+ | add_fact ctxt _ (Inference (_, _, rule, _)) =
+ if rule = leo2_ext then insert (op =) (ext_name ctxt, (Global, General))
+ else I
+ | add_fact _ _ _ = I
+
+fun used_facts_in_atp_proof ctxt fact_names atp_proof =
+ if null atp_proof then Vector.foldl (uncurry (union (op =))) [] fact_names
+ else fold (add_fact ctxt fact_names) atp_proof []
+
+fun used_facts_in_unsound_atp_proof _ _ [] = NONE
+ | used_facts_in_unsound_atp_proof ctxt fact_names atp_proof =
+ let val used_facts = used_facts_in_atp_proof ctxt fact_names atp_proof in
+ if forall (fn (_, (sc, _)) => sc = Global) used_facts andalso
+ not (is_axiom_used_in_proof (is_conjecture o single) atp_proof) then
+ SOME (map fst used_facts)
+ else
+ NONE
+ end
+
+
+(** Soft-core proof reconstruction: one-liners **)
+
+fun string_for_label (s, num) = s ^ string_of_int num
+
+fun show_time NONE = ""
+ | show_time (SOME ext_time) = " (" ^ string_from_ext_time ext_time ^ ")"
+
+fun apply_on_subgoal _ 1 = "by "
+ | apply_on_subgoal 1 _ = "apply "
+ | apply_on_subgoal i n =
+ "prefer " ^ string_of_int i ^ " " ^ apply_on_subgoal 1 n
+fun command_call name [] =
+ name |> not (Lexicon.is_identifier name) ? enclose "(" ")"
+ | command_call name args = "(" ^ name ^ " " ^ space_implode " " args ^ ")"
+fun try_command_line banner time command =
+ banner ^ ": " ^ Markup.markup Isabelle_Markup.sendback command ^ show_time time ^ "."
+fun using_labels [] = ""
+ | using_labels ls =
+ "using " ^ space_implode " " (map string_for_label ls) ^ " "
+fun reconstructor_command reconstr i n (ls, ss) =
+ using_labels ls ^ apply_on_subgoal i n ^
+ command_call (string_for_reconstructor reconstr) ss
+fun minimize_line _ [] = ""
+ | minimize_line minimize_command ss =
+ case minimize_command ss of
+ "" => ""
+ | command =>
+ "\nTo minimize: " ^ Markup.markup Isabelle_Markup.sendback command ^ "."
+
+fun split_used_facts facts =
+ facts |> List.partition (fn (_, (sc, _)) => sc = Chained)
+ |> pairself (sort_distinct (string_ord o pairself fst))
+
+fun one_line_proof_text (preplay, banner, used_facts, minimize_command,
+ subgoal, subgoal_count) =
+ let
+ val (chained, extra) = split_used_facts used_facts
+ val (failed, reconstr, ext_time) =
+ case preplay of
+ Played (reconstr, time) => (false, reconstr, (SOME (false, time)))
+ | Trust_Playable (reconstr, time) =>
+ (false, reconstr,
+ case time of
+ NONE => NONE
+ | SOME time =>
+ if time = Time.zeroTime then NONE else SOME (true, time))
+ | Failed_to_Play reconstr => (true, reconstr, NONE)
+ val try_line =
+ ([], map fst extra)
+ |> reconstructor_command reconstr subgoal subgoal_count
+ |> (if failed then enclose "One-line proof reconstruction failed: " "."
+ else try_command_line banner ext_time)
+ in try_line ^ minimize_line minimize_command (map fst (extra @ chained)) end
+
+(** Hard-core proof reconstruction: structured Isar proofs **)
+
+fun forall_of v t = HOLogic.all_const (fastype_of v) $ lambda v t
+fun exists_of v t = HOLogic.exists_const (fastype_of v) $ lambda v t
+
+fun make_tvar s = TVar (("'" ^ s, 0), HOLogic.typeS)
+fun make_tfree ctxt w =
+ let val ww = "'" ^ w in
+ TFree (ww, the_default HOLogic.typeS (Variable.def_sort ctxt (ww, ~1)))
+ end
+
+val indent_size = 2
+val no_label = ("", ~1)
+
+val raw_prefix = "x"
+val assum_prefix = "a"
+val have_prefix = "f"
+
+fun raw_label_for_name (num, ss) =
+ case resolve_conjecture ss of
+ [j] => (conjecture_prefix, j)
+ | _ => case Int.fromString num of
+ SOME j => (raw_prefix, j)
+ | NONE => (raw_prefix ^ num, 0)
+
+(**** INTERPRETATION OF TSTP SYNTAX TREES ****)
+
+exception HO_TERM of (string, string) ho_term list
+exception FORMULA of (string, string, (string, string) ho_term) formula list
+exception SAME of unit
+
+(* Type variables are given the basic sort "HOL.type". Some will later be
+ constrained by information from type literals, or by type inference. *)
+fun typ_from_atp ctxt (u as ATerm (a, us)) =
+ let val Ts = map (typ_from_atp ctxt) us in
+ case unprefix_and_unascii type_const_prefix a of
+ SOME b => Type (invert_const b, Ts)
+ | NONE =>
+ if not (null us) then
+ raise HO_TERM [u] (* only "tconst"s have type arguments *)
+ else case unprefix_and_unascii tfree_prefix a of
+ SOME b => make_tfree ctxt b
+ | NONE =>
+ (* Could be an Isabelle variable or a variable from the ATP, say "X1"
+ or "_5018". Sometimes variables from the ATP are indistinguishable
+ from Isabelle variables, which forces us to use a type parameter in
+ all cases. *)
+ (a |> perhaps (unprefix_and_unascii tvar_prefix), HOLogic.typeS)
+ |> Type_Infer.param 0
+ end
+
+(* Type class literal applied to a type. Returns triple of polarity, class,
+ type. *)
+fun type_constraint_from_term ctxt (u as ATerm (a, us)) =
+ case (unprefix_and_unascii class_prefix a, map (typ_from_atp ctxt) us) of
+ (SOME b, [T]) => (b, T)
+ | _ => raise HO_TERM [u]
+
+(* Accumulate type constraints in a formula: negative type literals. *)
+fun add_var (key, z) = Vartab.map_default (key, []) (cons z)
+fun add_type_constraint false (cl, TFree (a ,_)) = add_var ((a, ~1), cl)
+ | add_type_constraint false (cl, TVar (ix, _)) = add_var (ix, cl)
+ | add_type_constraint _ _ = I
+
+fun repair_variable_name f s =
+ let
+ fun subscript_name s n = s ^ nat_subscript n
+ val s = String.map f s
+ in
+ case space_explode "_" s of
+ [_] => (case take_suffix Char.isDigit (String.explode s) of
+ (cs1 as _ :: _, cs2 as _ :: _) =>
+ subscript_name (String.implode cs1)
+ (the (Int.fromString (String.implode cs2)))
+ | (_, _) => s)
+ | [s1, s2] => (case Int.fromString s2 of
+ SOME n => subscript_name s1 n
+ | NONE => s)
+ | _ => s
+ end
+
+(* The number of type arguments of a constant, zero if it's monomorphic. For
+ (instances of) Skolem pseudoconstants, this information is encoded in the
+ constant name. *)
+fun num_type_args thy s =
+ if String.isPrefix skolem_const_prefix s then
+ s |> space_explode Long_Name.separator |> List.last |> Int.fromString |> the
+ else if String.isPrefix lam_lifted_prefix s then
+ if String.isPrefix lam_lifted_poly_prefix s then 2 else 0
+ else
+ (s, Sign.the_const_type thy s) |> Sign.const_typargs thy |> length
+
+fun slack_fastype_of t = fastype_of t handle TERM _ => HOLogic.typeT
+
+(* First-order translation. No types are known for variables. "HOLogic.typeT"
+ should allow them to be inferred. *)
+fun term_from_atp ctxt textual sym_tab =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ (* For Metis, we use 1 rather than 0 because variable references in clauses
+ may otherwise conflict with variable constraints in the goal. At least,
+ type inference often fails otherwise. See also "axiom_inference" in
+ "Metis_Reconstruct". *)
+ val var_index = if textual then 0 else 1
+ fun do_term extra_ts opt_T u =
+ case u of
+ ATerm (s, us) =>
+ if String.isPrefix native_type_prefix s then
+ @{const True} (* ignore TPTP type information *)
+ else if s = tptp_equal then
+ let val ts = map (do_term [] NONE) us in
+ if textual andalso length ts = 2 andalso
+ hd ts aconv List.last ts then
+ (* Vampire is keen on producing these. *)
+ @{const True}
+ else
+ list_comb (Const (@{const_name HOL.eq}, HOLogic.typeT), ts)
+ end
+ else case unprefix_and_unascii const_prefix s of
+ SOME s' =>
+ let
+ val ((s', s''), mangled_us) =
+ s' |> unmangled_const |>> `invert_const
+ in
+ if s' = type_tag_name then
+ case mangled_us @ us of
+ [typ_u, term_u] =>
+ do_term extra_ts (SOME (typ_from_atp ctxt typ_u)) term_u
+ | _ => raise HO_TERM us
+ else if s' = predicator_name then
+ do_term [] (SOME @{typ bool}) (hd us)
+ else if s' = app_op_name then
+ let val extra_t = do_term [] NONE (List.last us) in
+ do_term (extra_t :: extra_ts)
+ (case opt_T of
+ SOME T => SOME (slack_fastype_of extra_t --> T)
+ | NONE => NONE)
+ (nth us (length us - 2))
+ end
+ else if s' = type_guard_name then
+ @{const True} (* ignore type predicates *)
+ else
+ let
+ val new_skolem = String.isPrefix new_skolem_const_prefix s''
+ val num_ty_args =
+ length us - the_default 0 (Symtab.lookup sym_tab s)
+ val (type_us, term_us) =
+ chop num_ty_args us |>> append mangled_us
+ val term_ts = map (do_term [] NONE) term_us
+ val T =
+ (if not (null type_us) andalso
+ num_type_args thy s' = length type_us then
+ let val Ts = type_us |> map (typ_from_atp ctxt) in
+ if new_skolem then
+ SOME (Type_Infer.paramify_vars (tl Ts ---> hd Ts))
+ else if textual then
+ try (Sign.const_instance thy) (s', Ts)
+ else
+ NONE
+ end
+ else
+ NONE)
+ |> (fn SOME T => T
+ | NONE => map slack_fastype_of term_ts --->
+ (case opt_T of
+ SOME T => T
+ | NONE => HOLogic.typeT))
+ val t =
+ if new_skolem then
+ Var ((new_skolem_var_name_from_const s'', var_index), T)
+ else
+ Const (unproxify_const s', T)
+ in list_comb (t, term_ts @ extra_ts) end
+ end
+ | NONE => (* a free or schematic variable *)
+ let
+ val term_ts = map (do_term [] NONE) us
+ val ts = term_ts @ extra_ts
+ val T =
+ case opt_T of
+ SOME T => map slack_fastype_of term_ts ---> T
+ | NONE => map slack_fastype_of ts ---> HOLogic.typeT
+ val t =
+ case unprefix_and_unascii fixed_var_prefix s of
+ SOME s => Free (s, T)
+ | NONE =>
+ case unprefix_and_unascii schematic_var_prefix s of
+ SOME s => Var ((s, var_index), T)
+ | NONE =>
+ Var ((s |> textual ? repair_variable_name Char.toLower,
+ var_index), T)
+ in list_comb (t, ts) end
+ in do_term [] end
+
+fun term_from_atom ctxt textual sym_tab pos (u as ATerm (s, _)) =
+ if String.isPrefix class_prefix s then
+ add_type_constraint pos (type_constraint_from_term ctxt u)
+ #> pair @{const True}
+ else
+ pair (term_from_atp ctxt textual sym_tab (SOME @{typ bool}) u)
+
+val combinator_table =
+ [(@{const_name Meson.COMBI}, @{thm Meson.COMBI_def_raw}),
+ (@{const_name Meson.COMBK}, @{thm Meson.COMBK_def_raw}),
+ (@{const_name Meson.COMBB}, @{thm Meson.COMBB_def_raw}),
+ (@{const_name Meson.COMBC}, @{thm Meson.COMBC_def_raw}),
+ (@{const_name Meson.COMBS}, @{thm Meson.COMBS_def_raw})]
+
+fun uncombine_term thy =
+ let
+ fun aux (t1 $ t2) = betapply (pairself aux (t1, t2))
+ | aux (Abs (s, T, t')) = Abs (s, T, aux t')
+ | aux (t as Const (x as (s, _))) =
+ (case AList.lookup (op =) combinator_table s of
+ SOME thm => thm |> prop_of |> specialize_type thy x
+ |> Logic.dest_equals |> snd
+ | NONE => t)
+ | aux t = t
+ in aux end
+
+(* Update schematic type variables with detected sort constraints. It's not
+ totally clear whether this code is necessary. *)
+fun repair_tvar_sorts (t, tvar_tab) =
+ let
+ fun do_type (Type (a, Ts)) = Type (a, map do_type Ts)
+ | do_type (TVar (xi, s)) =
+ TVar (xi, the_default s (Vartab.lookup tvar_tab xi))
+ | do_type (TFree z) = TFree z
+ fun do_term (Const (a, T)) = Const (a, do_type T)
+ | do_term (Free (a, T)) = Free (a, do_type T)
+ | do_term (Var (xi, T)) = Var (xi, do_type T)
+ | do_term (t as Bound _) = t
+ | do_term (Abs (a, T, t)) = Abs (a, do_type T, do_term t)
+ | do_term (t1 $ t2) = do_term t1 $ do_term t2
+ in t |> not (Vartab.is_empty tvar_tab) ? do_term end
+
+fun quantify_over_var quant_of var_s t =
+ let
+ val vars = [] |> Term.add_vars t |> filter (fn ((s, _), _) => s = var_s)
+ |> map Var
+ in fold_rev quant_of vars t end
+
+(* Interpret an ATP formula as a HOL term, extracting sort constraints as they
+ appear in the formula. *)
+fun prop_from_atp ctxt textual sym_tab phi =
+ let
+ fun do_formula pos phi =
+ case phi of
+ AQuant (_, [], phi) => do_formula pos phi
+ | AQuant (q, (s, _) :: xs, phi') =>
+ do_formula pos (AQuant (q, xs, phi'))
+ (* FIXME: TFF *)
+ #>> quantify_over_var (case q of
+ AForall => forall_of
+ | AExists => exists_of)
+ (s |> textual ? repair_variable_name Char.toLower)
+ | AConn (ANot, [phi']) => do_formula (not pos) phi' #>> s_not
+ | AConn (c, [phi1, phi2]) =>
+ do_formula (pos |> c = AImplies ? not) phi1
+ ##>> do_formula pos phi2
+ #>> (case c of
+ AAnd => s_conj
+ | AOr => s_disj
+ | AImplies => s_imp
+ | AIff => s_iff
+ | ANot => raise Fail "impossible connective")
+ | AAtom tm => term_from_atom ctxt textual sym_tab pos tm
+ | _ => raise FORMULA [phi]
+ in repair_tvar_sorts (do_formula true phi Vartab.empty) end
+
+fun infer_formula_types ctxt =
+ Type.constraint HOLogic.boolT
+ #> Syntax.check_term
+ (Proof_Context.set_mode Proof_Context.mode_schematic ctxt)
+
+fun uncombined_etc_prop_from_atp ctxt textual sym_tab =
+ let val thy = Proof_Context.theory_of ctxt in
+ prop_from_atp ctxt textual sym_tab
+ #> textual ? uncombine_term thy #> infer_formula_types ctxt
+ end
+
+(**** Translation of TSTP files to Isar proofs ****)
+
+fun unvarify_term (Var ((s, 0), T)) = Free (s, T)
+ | unvarify_term t = raise TERM ("unvarify_term: non-Var", [t])
+
+fun decode_line sym_tab (Definition (name, phi1, phi2)) ctxt =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val t1 = prop_from_atp ctxt true sym_tab phi1
+ val vars = snd (strip_comb t1)
+ val frees = map unvarify_term vars
+ val unvarify_args = subst_atomic (vars ~~ frees)
+ val t2 = prop_from_atp ctxt true sym_tab phi2
+ val (t1, t2) =
+ HOLogic.eq_const HOLogic.typeT $ t1 $ t2
+ |> unvarify_args |> uncombine_term thy |> infer_formula_types ctxt
+ |> HOLogic.dest_eq
+ in
+ (Definition (name, t1, t2),
+ fold Variable.declare_term (maps Misc_Legacy.term_frees [t1, t2]) ctxt)
+ end
+ | decode_line sym_tab (Inference (name, u, rule, deps)) ctxt =
+ let val t = u |> uncombined_etc_prop_from_atp ctxt true sym_tab in
+ (Inference (name, t, rule, deps),
+ fold Variable.declare_term (Misc_Legacy.term_frees t) ctxt)
+ end
+fun decode_lines ctxt sym_tab lines =
+ fst (fold_map (decode_line sym_tab) lines ctxt)
+
+fun is_same_inference _ (Definition _) = false
+ | is_same_inference t (Inference (_, t', _, _)) = t aconv t'
+
+(* No "real" literals means only type information (tfree_tcs, clsrel, or
+ clsarity). *)
+val is_only_type_information = curry (op aconv) @{term True}
+
+fun replace_one_dependency (old, new) dep =
+ if is_same_atp_step dep old then new else [dep]
+fun replace_dependencies_in_line _ (line as Definition _) = line
+ | replace_dependencies_in_line p (Inference (name, t, rule, deps)) =
+ Inference (name, t, rule,
+ fold (union (op =) o replace_one_dependency p) deps [])
+
+(* Discard facts; consolidate adjacent lines that prove the same formula, since
+ they differ only in type information.*)
+fun add_line _ (line as Definition _) lines = line :: lines
+ | add_line fact_names (Inference (name as (_, ss), t, rule, [])) lines =
+ (* No dependencies: fact, conjecture, or (for Vampire) internal facts or
+ definitions. *)
+ if is_fact fact_names ss then
+ (* Facts are not proof lines. *)
+ if is_only_type_information t then
+ map (replace_dependencies_in_line (name, [])) lines
+ (* Is there a repetition? If so, replace later line by earlier one. *)
+ else case take_prefix (not o is_same_inference t) lines of
+ (_, []) => lines (* no repetition of proof line *)
+ | (pre, Inference (name', _, _, _) :: post) =>
+ pre @ map (replace_dependencies_in_line (name', [name])) post
+ | _ => raise Fail "unexpected inference"
+ else if is_conjecture ss then
+ Inference (name, s_not t, rule, []) :: lines
+ else
+ map (replace_dependencies_in_line (name, [])) lines
+ | add_line _ (Inference (name, t, rule, deps)) lines =
+ (* Type information will be deleted later; skip repetition test. *)
+ if is_only_type_information t then
+ Inference (name, t, rule, deps) :: lines
+ (* Is there a repetition? If so, replace later line by earlier one. *)
+ else case take_prefix (not o is_same_inference t) lines of
+ (* FIXME: Doesn't this code risk conflating proofs involving different
+ types? *)
+ (_, []) => Inference (name, t, rule, deps) :: lines
+ | (pre, Inference (name', t', rule, _) :: post) =>
+ Inference (name, t', rule, deps) ::
+ pre @ map (replace_dependencies_in_line (name', [name])) post
+ | _ => raise Fail "unexpected inference"
+
+(* Recursively delete empty lines (type information) from the proof. *)
+fun add_nontrivial_line (line as Inference (name, t, _, [])) lines =
+ if is_only_type_information t then delete_dependency name lines
+ else line :: lines
+ | add_nontrivial_line line lines = line :: lines
+and delete_dependency name lines =
+ fold_rev add_nontrivial_line
+ (map (replace_dependencies_in_line (name, [])) lines) []
+
+(* ATPs sometimes reuse free variable names in the strangest ways. Removing
+ offending lines often does the trick. *)
+fun is_bad_free frees (Free x) = not (member (op =) frees x)
+ | is_bad_free _ _ = false
+
+fun add_desired_line _ _ _ (line as Definition (name, _, _)) (j, lines) =
+ (j, line :: map (replace_dependencies_in_line (name, [])) lines)
+ | add_desired_line isar_shrink_factor fact_names frees
+ (Inference (name as (_, ss), t, rule, deps)) (j, lines) =
+ (j + 1,
+ if is_fact fact_names ss orelse
+ is_conjecture ss orelse
+ (* the last line must be kept *)
+ j = 0 orelse
+ (not (is_only_type_information t) andalso
+ null (Term.add_tvars t []) andalso
+ not (exists_subterm (is_bad_free frees) t) andalso
+ length deps >= 2 andalso j mod isar_shrink_factor = 0 andalso
+ (* kill next to last line, which usually results in a trivial step *)
+ j <> 1) then
+ Inference (name, t, rule, deps) :: lines (* keep line *)
+ else
+ map (replace_dependencies_in_line (name, deps)) lines) (* drop line *)
+
+(** Isar proof construction and manipulation **)
+
+type label = string * int
+type facts = label list * string list
+
+datatype isar_qualifier = Show | Then | Moreover | Ultimately
+
+datatype isar_step =
+ Fix of (string * typ) list |
+ Let of term * term |
+ Assume of label * term |
+ Prove of isar_qualifier list * label * term * byline
+and byline =
+ By_Metis of facts |
+ Case_Split of isar_step list list * facts
+
+fun add_fact_from_dependency fact_names (name as (_, ss)) =
+ if is_fact fact_names ss then
+ apsnd (union (op =) (map fst (resolve_fact fact_names ss)))
+ else
+ apfst (insert (op =) (raw_label_for_name name))
+
+fun repair_name "$true" = "c_True"
+ | repair_name "$false" = "c_False"
+ | repair_name "$$e" = tptp_equal (* seen in Vampire proofs *)
+ | repair_name s =
+ if is_tptp_equal s orelse
+ (* seen in Vampire proofs *)
+ (String.isPrefix "sQ" s andalso String.isSuffix "_eqProxy" s) then
+ tptp_equal
+ else
+ s
+
+(* FIXME: Still needed? Try with SPASS proofs perhaps. *)
+val kill_duplicate_assumptions_in_proof =
+ let
+ fun relabel_facts subst =
+ apfst (map (fn l => AList.lookup (op =) subst l |> the_default l))
+ fun do_step (step as Assume (l, t)) (proof, subst, assums) =
+ (case AList.lookup (op aconv) assums t of
+ SOME l' => (proof, (l, l') :: subst, assums)
+ | NONE => (step :: proof, subst, (t, l) :: assums))
+ | do_step (Prove (qs, l, t, by)) (proof, subst, assums) =
+ (Prove (qs, l, t,
+ case by of
+ By_Metis facts => By_Metis (relabel_facts subst facts)
+ | Case_Split (proofs, facts) =>
+ Case_Split (map do_proof proofs,
+ relabel_facts subst facts)) ::
+ proof, subst, assums)
+ | do_step step (proof, subst, assums) = (step :: proof, subst, assums)
+ and do_proof proof = fold do_step proof ([], [], []) |> #1 |> rev
+ in do_proof end
+
+fun used_labels_of_step (Prove (_, _, _, by)) =
+ (case by of
+ By_Metis (ls, _) => ls
+ | Case_Split (proofs, (ls, _)) =>
+ fold (union (op =) o used_labels_of) proofs ls)
+ | used_labels_of_step _ = []
+and used_labels_of proof = fold (union (op =) o used_labels_of_step) proof []
+
+fun kill_useless_labels_in_proof proof =
+ let
+ val used_ls = used_labels_of proof
+ fun do_label l = if member (op =) used_ls l then l else no_label
+ fun do_step (Assume (l, t)) = Assume (do_label l, t)
+ | do_step (Prove (qs, l, t, by)) =
+ Prove (qs, do_label l, t,
+ case by of
+ Case_Split (proofs, facts) =>
+ Case_Split (map (map do_step) proofs, facts)
+ | _ => by)
+ | do_step step = step
+ in map do_step proof end
+
+fun prefix_for_depth n = replicate_string (n + 1)
+
+val relabel_proof =
+ let
+ fun aux _ _ _ [] = []
+ | aux subst depth (next_assum, next_fact) (Assume (l, t) :: proof) =
+ if l = no_label then
+ Assume (l, t) :: aux subst depth (next_assum, next_fact) proof
+ else
+ let val l' = (prefix_for_depth depth assum_prefix, next_assum) in
+ Assume (l', t) ::
+ aux ((l, l') :: subst) depth (next_assum + 1, next_fact) proof
+ end
+ | aux subst depth (next_assum, next_fact)
+ (Prove (qs, l, t, by) :: proof) =
+ let
+ val (l', subst, next_fact) =
+ if l = no_label then
+ (l, subst, next_fact)
+ else
+ let
+ val l' = (prefix_for_depth depth have_prefix, next_fact)
+ in (l', (l, l') :: subst, next_fact + 1) end
+ val relabel_facts =
+ apfst (maps (the_list o AList.lookup (op =) subst))
+ val by =
+ case by of
+ By_Metis facts => By_Metis (relabel_facts facts)
+ | Case_Split (proofs, facts) =>
+ Case_Split (map (aux subst (depth + 1) (1, 1)) proofs,
+ relabel_facts facts)
+ in
+ Prove (qs, l', t, by) :: aux subst depth (next_assum, next_fact) proof
+ end
+ | aux subst depth nextp (step :: proof) =
+ step :: aux subst depth nextp proof
+ in aux [] 0 (1, 1) end
+
+fun string_for_proof ctxt0 type_enc lam_trans i n =
+ let
+ val ctxt =
+ ctxt0 |> Config.put show_free_types false
+ |> Config.put show_types true
+ |> Config.put show_sorts true
+ fun fix_print_mode f x =
+ Print_Mode.setmp (filter (curry (op =) Symbol.xsymbolsN)
+ (print_mode_value ())) f x
+ fun do_indent ind = replicate_string (ind * indent_size) " "
+ fun do_free (s, T) =
+ maybe_quote s ^ " :: " ^
+ maybe_quote (fix_print_mode (Syntax.string_of_typ ctxt) T)
+ fun do_label l = if l = no_label then "" else string_for_label l ^ ": "
+ fun do_have qs =
+ (if member (op =) qs Moreover then "moreover " else "") ^
+ (if member (op =) qs Ultimately then "ultimately " else "") ^
+ (if member (op =) qs Then then
+ if member (op =) qs Show then "thus" else "hence"
+ else
+ if member (op =) qs Show then "show" else "have")
+ val do_term = maybe_quote o fix_print_mode (Syntax.string_of_term ctxt)
+ val reconstr = Metis (type_enc, lam_trans)
+ fun do_facts (ls, ss) =
+ reconstructor_command reconstr 1 1
+ (ls |> sort_distinct (prod_ord string_ord int_ord),
+ ss |> sort_distinct string_ord)
+ and do_step ind (Fix xs) =
+ do_indent ind ^ "fix " ^ space_implode " and " (map do_free xs) ^ "\n"
+ | do_step ind (Let (t1, t2)) =
+ do_indent ind ^ "let " ^ do_term t1 ^ " = " ^ do_term t2 ^ "\n"
+ | do_step ind (Assume (l, t)) =
+ do_indent ind ^ "assume " ^ do_label l ^ do_term t ^ "\n"
+ | do_step ind (Prove (qs, l, t, By_Metis facts)) =
+ do_indent ind ^ do_have qs ^ " " ^
+ do_label l ^ do_term t ^ " " ^ do_facts facts ^ "\n"
+ | do_step ind (Prove (qs, l, t, Case_Split (proofs, facts))) =
+ implode (map (prefix (do_indent ind ^ "moreover\n") o do_block ind)
+ proofs) ^
+ do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^ " " ^
+ do_facts facts ^ "\n"
+ and do_steps prefix suffix ind steps =
+ let val s = implode (map (do_step ind) steps) in
+ replicate_string (ind * indent_size - size prefix) " " ^ prefix ^
+ String.extract (s, ind * indent_size,
+ SOME (size s - ind * indent_size - 1)) ^
+ suffix ^ "\n"
+ end
+ and do_block ind proof = do_steps "{ " " }" (ind + 1) proof
+ (* One-step proofs are pointless; better use the Metis one-liner
+ directly. *)
+ and do_proof [Prove (_, _, _, By_Metis _)] = ""
+ | do_proof proof =
+ (if i <> 1 then "prefer " ^ string_of_int i ^ "\n" else "") ^
+ do_indent 0 ^ "proof -\n" ^ do_steps "" "" 1 proof ^ do_indent 0 ^
+ (if n <> 1 then "next" else "qed")
+ in do_proof end
+
+fun isar_proof_text ctxt isar_proof_requested
+ (debug, isar_shrink_factor, pool, fact_names, sym_tab, atp_proof, goal)
+ (one_line_params as (_, _, _, _, subgoal, subgoal_count)) =
+ let
+ val isar_shrink_factor =
+ (if isar_proof_requested then 1 else 2) * isar_shrink_factor
+ val (params, hyp_ts, concl_t) = strip_subgoal ctxt goal subgoal
+ val frees = fold Term.add_frees (concl_t :: hyp_ts) []
+ val one_line_proof = one_line_proof_text one_line_params
+ val type_enc =
+ if is_typed_helper_used_in_atp_proof atp_proof then full_typesN
+ else partial_typesN
+ val lam_trans = lam_trans_from_atp_proof atp_proof metis_default_lam_trans
+
+ fun isar_proof_of () =
+ let
+ val atp_proof =
+ atp_proof
+ |> clean_up_atp_proof_dependencies
+ |> nasty_atp_proof pool
+ |> map_term_names_in_atp_proof repair_name
+ |> decode_lines ctxt sym_tab
+ |> rpair [] |-> fold_rev (add_line fact_names)
+ |> rpair [] |-> fold_rev add_nontrivial_line
+ |> rpair (0, [])
+ |-> fold_rev (add_desired_line isar_shrink_factor fact_names frees)
+ |> snd
+ val conj_name = conjecture_prefix ^ string_of_int (length hyp_ts)
+ val conjs =
+ atp_proof
+ |> map_filter (fn Inference (name as (_, ss), _, _, []) =>
+ if member (op =) ss conj_name then SOME name else NONE
+ | _ => NONE)
+ fun dep_of_step (Definition _) = NONE
+ | dep_of_step (Inference (name, _, _, from)) = SOME (from, name)
+ val ref_graph = atp_proof |> map_filter dep_of_step |> make_ref_graph
+ val axioms = axioms_of_ref_graph ref_graph conjs
+ val tainted = tainted_atoms_of_ref_graph ref_graph conjs
+ val props =
+ Symtab.empty
+ |> fold (fn Definition _ => I (* FIXME *)
+ | Inference ((s, _), t, _, _) =>
+ Symtab.update_new (s,
+ t |> member (op = o apsnd fst) tainted s ? s_not))
+ atp_proof
+ (* FIXME: add "fold_rev forall_of (map Var (Term.add_vars t []))"? *)
+ fun prop_of_clause c =
+ fold (curry s_disj) (map_filter (Symtab.lookup props o fst) c)
+ @{term False}
+ fun label_of_clause c = (space_implode "___" (map fst c), 0)
+ fun maybe_show outer c =
+ (outer andalso length c = 1 andalso subset (op =) (c, conjs))
+ ? cons Show
+ fun do_have outer qs (gamma, c) =
+ Prove (maybe_show outer c qs, label_of_clause c, prop_of_clause c,
+ By_Metis (fold (add_fact_from_dependency fact_names
+ o the_single) gamma ([], [])))
+ fun do_inf outer (Have z) = do_have outer [] z
+ | do_inf outer (Hence z) = do_have outer [Then] z
+ | do_inf outer (Cases cases) =
+ let val c = succedent_of_cases cases in
+ Prove (maybe_show outer c [Ultimately], label_of_clause c,
+ prop_of_clause c,
+ Case_Split (map (do_case false) cases, ([], [])))
+ end
+ and do_case outer (c, infs) =
+ Assume (label_of_clause c, prop_of_clause c) ::
+ map (do_inf outer) infs
+ val isar_proof =
+ (if null params then [] else [Fix params]) @
+ (ref_graph
+ |> redirect_graph axioms tainted
+ |> chain_direct_proof
+ |> map (do_inf true)
+ |> kill_duplicate_assumptions_in_proof
+ |> kill_useless_labels_in_proof
+ |> relabel_proof)
+ |> string_for_proof ctxt type_enc lam_trans subgoal subgoal_count
+ in
+ case isar_proof of
+ "" =>
+ if isar_proof_requested then
+ "\nNo structured proof available (proof too short)."
+ else
+ ""
+ | _ =>
+ "\n\n" ^ (if isar_proof_requested then "Structured proof"
+ else "Perhaps this will work") ^
+ ":\n" ^ Markup.markup Isabelle_Markup.sendback isar_proof
+ end
+ val isar_proof =
+ if debug then
+ isar_proof_of ()
+ else case try isar_proof_of () of
+ SOME s => s
+ | NONE => if isar_proof_requested then
+ "\nWarning: The Isar proof construction failed."
+ else
+ ""
+ in one_line_proof ^ isar_proof end
+
+fun proof_text ctxt isar_proof isar_params
+ (one_line_params as (preplay, _, _, _, _, _)) =
+ (if case preplay of Failed_to_Play _ => true | _ => isar_proof then
+ isar_proof_text ctxt isar_proof isar_params
+ else
+ one_line_proof_text) one_line_params
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/ATP/atp_proof_redirect.ML Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,223 @@
+(* Title: HOL/Tools/ATP/atp_proof_redirect.ML
+ Author: Jasmin Blanchette, TU Muenchen
+
+Transformation of a proof by contradiction into a direct proof.
+*)
+
+signature ATP_ATOM =
+sig
+ type key
+ val ord : key * key -> order
+ val string_of : key -> string
+end;
+
+signature ATP_PROOF_REDIRECT =
+sig
+ type atom
+
+ structure Atom_Graph : GRAPH
+
+ type ref_sequent = atom list * atom
+ type ref_graph = unit Atom_Graph.T
+
+ type clause = atom list
+ type direct_sequent = atom list * clause
+ type direct_graph = unit Atom_Graph.T
+
+ type rich_sequent = clause list * clause
+
+ datatype direct_inference =
+ Have of rich_sequent |
+ Hence of rich_sequent |
+ Cases of (clause * direct_inference list) list
+
+ type direct_proof = direct_inference list
+
+ val make_ref_graph : (atom list * atom) list -> ref_graph
+ val axioms_of_ref_graph : ref_graph -> atom list -> atom list
+ val tainted_atoms_of_ref_graph : ref_graph -> atom list -> atom list
+ val sequents_of_ref_graph : ref_graph -> ref_sequent list
+ val redirect_sequent : atom list -> atom -> ref_sequent -> direct_sequent
+ val direct_graph : direct_sequent list -> direct_graph
+ val redirect_graph : atom list -> atom list -> ref_graph -> direct_proof
+ val succedent_of_cases : (clause * direct_inference list) list -> clause
+ val chain_direct_proof : direct_proof -> direct_proof
+ val string_of_direct_proof : direct_proof -> string
+end;
+
+functor ATP_Proof_Redirect(Atom : ATP_ATOM): ATP_PROOF_REDIRECT =
+struct
+
+type atom = Atom.key
+
+structure Atom_Graph = Graph(Atom)
+
+type ref_sequent = atom list * atom
+type ref_graph = unit Atom_Graph.T
+
+type clause = atom list
+type direct_sequent = atom list * clause
+type direct_graph = unit Atom_Graph.T
+
+type rich_sequent = clause list * clause
+
+datatype direct_inference =
+ Have of rich_sequent |
+ Hence of rich_sequent |
+ Cases of (clause * direct_inference list) list
+
+type direct_proof = direct_inference list
+
+fun atom_eq p = (Atom.ord p = EQUAL)
+fun clause_eq (c, d) = (length c = length d andalso forall atom_eq (c ~~ d))
+fun direct_sequent_eq ((gamma, c), (delta, d)) =
+ clause_eq (gamma, delta) andalso clause_eq (c, d)
+
+fun make_ref_graph infers =
+ let
+ fun add_edge to from =
+ Atom_Graph.default_node (from, ())
+ #> Atom_Graph.default_node (to, ())
+ #> Atom_Graph.add_edge_acyclic (from, to)
+ fun add_infer (froms, to) = fold (add_edge to) froms
+ in Atom_Graph.empty |> fold add_infer infers end
+
+fun axioms_of_ref_graph ref_graph conjs =
+ subtract atom_eq conjs (Atom_Graph.minimals ref_graph)
+fun tainted_atoms_of_ref_graph ref_graph = Atom_Graph.all_succs ref_graph
+
+fun sequents_of_ref_graph ref_graph =
+ map (`(Atom_Graph.immediate_preds ref_graph))
+ (filter_out (Atom_Graph.is_minimal ref_graph) (Atom_Graph.keys ref_graph))
+
+fun redirect_sequent tainted bot (gamma, c) =
+ if member atom_eq tainted c then
+ gamma |> List.partition (not o member atom_eq tainted)
+ |>> not (atom_eq (c, bot)) ? cons c
+ else
+ (gamma, [c])
+
+fun direct_graph seqs =
+ let
+ fun add_edge from to =
+ Atom_Graph.default_node (from, ())
+ #> Atom_Graph.default_node (to, ())
+ #> Atom_Graph.add_edge_acyclic (from, to)
+ fun add_seq (gamma, c) = fold (fn l => fold (add_edge l) c) gamma
+ in Atom_Graph.empty |> fold add_seq seqs end
+
+fun disj cs = fold (union atom_eq) cs [] |> sort Atom.ord
+
+fun succedent_of_inference (Have (_, c)) = c
+ | succedent_of_inference (Hence (_, c)) = c
+ | succedent_of_inference (Cases cases) = succedent_of_cases cases
+and succedent_of_case (c, []) = c
+ | succedent_of_case (_, infs) = succedent_of_inference (List.last infs)
+and succedent_of_cases cases = disj (map succedent_of_case cases)
+
+fun dest_Have (Have z) = z
+ | dest_Have _ = raise Fail "non-Have"
+
+fun enrich_Have nontrivs trivs (cs, c) =
+ (cs |> map (fn c => if member clause_eq nontrivs c then disj (c :: trivs)
+ else c),
+ disj (c :: trivs))
+ |> Have
+
+fun s_cases cases =
+ case cases |> List.partition (null o snd) of
+ (trivs, nontrivs as [(nontriv0, proof)]) =>
+ if forall (can dest_Have) proof then
+ let val seqs = proof |> map dest_Have in
+ seqs |> map (enrich_Have (nontriv0 :: map snd seqs) (map fst trivs))
+ end
+ else
+ [Cases nontrivs]
+ | (_, nontrivs) => [Cases nontrivs]
+
+fun descendants direct_graph =
+ these o try (Atom_Graph.all_succs direct_graph) o single
+
+fun zones_of 0 _ = []
+ | zones_of n (bs :: bss) =
+ (fold (subtract atom_eq) bss) bs :: zones_of (n - 1) (bss @ [bs])
+
+fun redirect_graph axioms tainted ref_graph =
+ let
+ val [bot] = Atom_Graph.maximals ref_graph
+ val seqs =
+ map (redirect_sequent tainted bot) (sequents_of_ref_graph ref_graph)
+ val direct_graph = direct_graph seqs
+
+ fun redirect c proved seqs =
+ if null seqs then
+ []
+ else if length c < 2 then
+ let
+ val proved = c @ proved
+ val provable =
+ filter (fn (gamma, _) => subset atom_eq (gamma, proved)) seqs
+ val horn_provable = filter (fn (_, [_]) => true | _ => false) provable
+ val seq as (gamma, c) = hd (horn_provable @ provable)
+ in
+ Have (map single gamma, c) ::
+ redirect c proved (filter (curry (not o direct_sequent_eq) seq) seqs)
+ end
+ else
+ let
+ fun subsequents seqs zone =
+ filter (fn (gamma, _) => subset atom_eq (gamma, zone @ proved)) seqs
+ val zones = zones_of (length c) (map (descendants direct_graph) c)
+ val subseqss = map (subsequents seqs) zones
+ val seqs = fold (subtract direct_sequent_eq) subseqss seqs
+ val cases =
+ map2 (fn l => fn subseqs => ([l], redirect [l] proved subseqs))
+ c subseqss
+ in s_cases cases @ redirect (succedent_of_cases cases) proved seqs end
+ in redirect [] axioms seqs end
+
+val chain_direct_proof =
+ let
+ fun chain_inf cl0 (seq as Have (cs, c)) =
+ if member clause_eq cs cl0 then
+ Hence (filter_out (curry clause_eq cl0) cs, c)
+ else
+ seq
+ | chain_inf _ (Cases cases) = Cases (map chain_case cases)
+ and chain_case (c, is) = (c, chain_proof (SOME c) is)
+ and chain_proof _ [] = []
+ | chain_proof (SOME prev) (i :: is) =
+ chain_inf prev i :: chain_proof (SOME (succedent_of_inference i)) is
+ | chain_proof _ (i :: is) =
+ i :: chain_proof (SOME (succedent_of_inference i)) is
+ in chain_proof NONE end
+
+fun indent 0 = ""
+ | indent n = " " ^ indent (n - 1)
+
+fun string_of_clause [] = "\<bottom>"
+ | string_of_clause ls = space_implode " \<or> " (map Atom.string_of ls)
+
+fun string_of_rich_sequent ch ([], c) = ch ^ " " ^ string_of_clause c
+ | string_of_rich_sequent ch (cs, c) =
+ commas (map string_of_clause cs) ^ " " ^ ch ^ " " ^ string_of_clause c
+
+fun string_of_case depth (c, proof) =
+ indent (depth + 1) ^ "[" ^ string_of_clause c ^ "]"
+ |> not (null proof) ? suffix ("\n" ^ string_of_subproof (depth + 1) proof)
+
+and string_of_inference depth (Have seq) =
+ indent depth ^ string_of_rich_sequent "\<triangleright>" seq
+ | string_of_inference depth (Hence seq) =
+ indent depth ^ string_of_rich_sequent "\<guillemotright>" seq
+ | string_of_inference depth (Cases cases) =
+ indent depth ^ "[\n" ^
+ space_implode ("\n" ^ indent depth ^ "|\n")
+ (map (string_of_case depth) cases) ^ "\n" ^
+ indent depth ^ "]"
+
+and string_of_subproof depth = cat_lines o map (string_of_inference depth)
+
+val string_of_direct_proof = string_of_subproof 0
+
+end;
--- a/src/HOL/Tools/ATP/atp_reconstruct.ML Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,951 +0,0 @@
-(* Title: HOL/Tools/ATP/atp_reconstruct.ML
- Author: Lawrence C. Paulson, Cambridge University Computer Laboratory
- Author: Claire Quigley, Cambridge University Computer Laboratory
- Author: Jasmin Blanchette, TU Muenchen
-
-Proof reconstruction from ATP proofs.
-*)
-
-signature ATP_RECONSTRUCT =
-sig
- type ('a, 'b) ho_term = ('a, 'b) ATP_Problem.ho_term
- type ('a, 'b, 'c) formula = ('a, 'b, 'c) ATP_Problem.formula
- type 'a proof = 'a ATP_Proof.proof
- type locality = ATP_Translate.locality
-
- datatype reconstructor =
- Metis of string * string |
- SMT
-
- datatype play =
- Played of reconstructor * Time.time |
- Trust_Playable of reconstructor * Time.time option |
- Failed_to_Play of reconstructor
-
- type minimize_command = string list -> string
- type one_line_params =
- play * string * (string * locality) list * minimize_command * int * int
- type isar_params =
- bool * int * string Symtab.table * (string * locality) list vector
- * int Symtab.table * string proof * thm
-
- val metisN : string
- val smtN : string
- val full_typesN : string
- val partial_typesN : string
- val no_typesN : string
- val really_full_type_enc : string
- val full_type_enc : string
- val partial_type_enc : string
- val no_type_enc : string
- val full_type_encs : string list
- val partial_type_encs : string list
- val metis_default_lam_trans : string
- val metis_call : string -> string -> string
- val string_for_reconstructor : reconstructor -> string
- val used_facts_in_atp_proof :
- Proof.context -> (string * locality) list vector -> string proof
- -> (string * locality) list
- val lam_trans_from_atp_proof : string proof -> string -> string
- val is_typed_helper_used_in_atp_proof : string proof -> bool
- val used_facts_in_unsound_atp_proof :
- Proof.context -> (string * locality) list vector -> 'a proof
- -> string list option
- val unalias_type_enc : string -> string list
- val one_line_proof_text : one_line_params -> string
- val make_tvar : string -> typ
- val make_tfree : Proof.context -> string -> typ
- val term_from_atp :
- Proof.context -> bool -> int Symtab.table -> typ option
- -> (string, string) ho_term -> term
- val prop_from_atp :
- Proof.context -> bool -> int Symtab.table
- -> (string, string, (string, string) ho_term) formula -> term
- val isar_proof_text :
- Proof.context -> bool -> isar_params -> one_line_params -> string
- val proof_text :
- Proof.context -> bool -> isar_params -> one_line_params -> string
-end;
-
-structure ATP_Reconstruct : ATP_RECONSTRUCT =
-struct
-
-open ATP_Util
-open ATP_Problem
-open ATP_Proof
-open ATP_Translate
-
-structure String_Redirect = ATP_Redirect(
- type key = step_name
- val ord = fn ((s, _ : string list), (s', _)) => fast_string_ord (s, s')
- val string_of = fst)
-
-open String_Redirect
-
-datatype reconstructor =
- Metis of string * string |
- SMT
-
-datatype play =
- Played of reconstructor * Time.time |
- Trust_Playable of reconstructor * Time.time option |
- Failed_to_Play of reconstructor
-
-type minimize_command = string list -> string
-type one_line_params =
- play * string * (string * locality) list * minimize_command * int * int
-type isar_params =
- bool * int * string Symtab.table * (string * locality) list vector
- * int Symtab.table * string proof * thm
-
-val metisN = "metis"
-val smtN = "smt"
-
-val full_typesN = "full_types"
-val partial_typesN = "partial_types"
-val no_typesN = "no_types"
-
-val really_full_type_enc = "mono_tags"
-val full_type_enc = "poly_guards_query"
-val partial_type_enc = "poly_args"
-val no_type_enc = "erased"
-
-val full_type_encs = [full_type_enc, really_full_type_enc]
-val partial_type_encs = partial_type_enc :: full_type_encs
-
-val type_enc_aliases =
- [(full_typesN, full_type_encs),
- (partial_typesN, partial_type_encs),
- (no_typesN, [no_type_enc])]
-
-fun unalias_type_enc s =
- AList.lookup (op =) type_enc_aliases s |> the_default [s]
-
-val metis_default_lam_trans = combinatorsN
-
-fun metis_call type_enc lam_trans =
- let
- val type_enc =
- case AList.find (fn (enc, encs) => enc = hd encs) type_enc_aliases
- type_enc of
- [alias] => alias
- | _ => type_enc
- val opts = [] |> type_enc <> partial_typesN ? cons type_enc
- |> lam_trans <> metis_default_lam_trans ? cons lam_trans
- in metisN ^ (if null opts then "" else " (" ^ commas opts ^ ")") end
-
-fun string_for_reconstructor (Metis (type_enc, lam_trans)) =
- metis_call type_enc lam_trans
- | string_for_reconstructor SMT = smtN
-
-fun find_first_in_list_vector vec key =
- Vector.foldl (fn (ps, NONE) => AList.lookup (op =) ps key
- | (_, value) => value) NONE vec
-
-val unprefix_fact_number = space_implode "_" o tl o space_explode "_"
-
-fun resolve_one_named_fact fact_names s =
- case try (unprefix fact_prefix) s of
- SOME s' =>
- let val s' = s' |> unprefix_fact_number |> unascii_of in
- s' |> find_first_in_list_vector fact_names |> Option.map (pair s')
- end
- | NONE => NONE
-fun resolve_fact fact_names = map_filter (resolve_one_named_fact fact_names)
-fun is_fact fact_names = not o null o resolve_fact fact_names
-
-fun resolve_one_named_conjecture s =
- case try (unprefix conjecture_prefix) s of
- SOME s' => Int.fromString s'
- | NONE => NONE
-
-val resolve_conjecture = map_filter resolve_one_named_conjecture
-val is_conjecture = not o null o resolve_conjecture
-
-fun is_axiom_used_in_proof pred =
- exists (fn Inference ((_, ss), _, _, []) => exists pred ss | _ => false)
-
-val is_combinator_def = String.isPrefix (helper_prefix ^ combinator_prefix)
-
-val ascii_of_lam_fact_prefix = ascii_of lam_fact_prefix
-
-(* overapproximation (good enough) *)
-fun is_lam_lifted s =
- String.isPrefix fact_prefix s andalso
- String.isSubstring ascii_of_lam_fact_prefix s
-
-fun lam_trans_from_atp_proof atp_proof default =
- if is_axiom_used_in_proof is_combinator_def atp_proof then combinatorsN
- else if is_axiom_used_in_proof is_lam_lifted atp_proof then lam_liftingN
- else default
-
-val is_typed_helper_name =
- String.isPrefix helper_prefix andf String.isSuffix typed_helper_suffix
-fun is_typed_helper_used_in_atp_proof atp_proof =
- is_axiom_used_in_proof is_typed_helper_name atp_proof
-
-val leo2_ext = "extcnf_equal_neg"
-val isa_ext = Thm.get_name_hint @{thm ext}
-val isa_short_ext = Long_Name.base_name isa_ext
-
-fun ext_name ctxt =
- if Thm.eq_thm_prop (@{thm ext},
- singleton (Attrib.eval_thms ctxt) (Facts.named isa_short_ext, [])) then
- isa_short_ext
- else
- isa_ext
-
-fun add_fact _ fact_names (Inference ((_, ss), _, _, [])) =
- union (op =) (resolve_fact fact_names ss)
- | add_fact ctxt _ (Inference (_, _, rule, _)) =
- if rule = leo2_ext then insert (op =) (ext_name ctxt, General) else I
- | add_fact _ _ _ = I
-
-fun used_facts_in_atp_proof ctxt fact_names atp_proof =
- if null atp_proof then Vector.foldl (uncurry (union (op =))) [] fact_names
- else fold (add_fact ctxt fact_names) atp_proof []
-
-(* (quasi-)underapproximation of the truth *)
-fun is_locality_global Local = false
- | is_locality_global Assum = false
- | is_locality_global Chained = false
- | is_locality_global _ = true
-
-fun used_facts_in_unsound_atp_proof _ _ [] = NONE
- | used_facts_in_unsound_atp_proof ctxt fact_names atp_proof =
- let
- val used_facts = used_facts_in_atp_proof ctxt fact_names atp_proof
- in
- if forall (is_locality_global o snd) used_facts andalso
- not (is_axiom_used_in_proof (is_conjecture o single) atp_proof) then
- SOME (map fst used_facts)
- else
- NONE
- end
-
-
-(** Soft-core proof reconstruction: one-liners **)
-
-fun string_for_label (s, num) = s ^ string_of_int num
-
-fun show_time NONE = ""
- | show_time (SOME ext_time) = " (" ^ string_from_ext_time ext_time ^ ")"
-
-fun apply_on_subgoal _ 1 = "by "
- | apply_on_subgoal 1 _ = "apply "
- | apply_on_subgoal i n =
- "prefer " ^ string_of_int i ^ " " ^ apply_on_subgoal 1 n
-fun command_call name [] =
- name |> not (Lexicon.is_identifier name) ? enclose "(" ")"
- | command_call name args = "(" ^ name ^ " " ^ space_implode " " args ^ ")"
-fun try_command_line banner time command =
- banner ^ ": " ^ Markup.markup Isabelle_Markup.sendback command ^ show_time time ^ "."
-fun using_labels [] = ""
- | using_labels ls =
- "using " ^ space_implode " " (map string_for_label ls) ^ " "
-fun reconstructor_command reconstr i n (ls, ss) =
- using_labels ls ^ apply_on_subgoal i n ^
- command_call (string_for_reconstructor reconstr) ss
-fun minimize_line _ [] = ""
- | minimize_line minimize_command ss =
- case minimize_command ss of
- "" => ""
- | command => "\nTo minimize: " ^ Markup.markup Isabelle_Markup.sendback command ^ "."
-
-val split_used_facts =
- List.partition (curry (op =) Chained o snd)
- #> pairself (sort_distinct (string_ord o pairself fst))
-
-fun one_line_proof_text (preplay, banner, used_facts, minimize_command,
- subgoal, subgoal_count) =
- let
- val (chained, extra) = split_used_facts used_facts
- val (failed, reconstr, ext_time) =
- case preplay of
- Played (reconstr, time) => (false, reconstr, (SOME (false, time)))
- | Trust_Playable (reconstr, time) =>
- (false, reconstr,
- case time of
- NONE => NONE
- | SOME time =>
- if time = Time.zeroTime then NONE else SOME (true, time))
- | Failed_to_Play reconstr => (true, reconstr, NONE)
- val try_line =
- ([], map fst extra)
- |> reconstructor_command reconstr subgoal subgoal_count
- |> (if failed then enclose "One-line proof reconstruction failed: " "."
- else try_command_line banner ext_time)
- in try_line ^ minimize_line minimize_command (map fst (extra @ chained)) end
-
-(** Hard-core proof reconstruction: structured Isar proofs **)
-
-fun forall_of v t = HOLogic.all_const (fastype_of v) $ lambda v t
-fun exists_of v t = HOLogic.exists_const (fastype_of v) $ lambda v t
-
-fun make_tvar s = TVar (("'" ^ s, 0), HOLogic.typeS)
-fun make_tfree ctxt w =
- let val ww = "'" ^ w in
- TFree (ww, the_default HOLogic.typeS (Variable.def_sort ctxt (ww, ~1)))
- end
-
-val indent_size = 2
-val no_label = ("", ~1)
-
-val raw_prefix = "x"
-val assum_prefix = "a"
-val have_prefix = "f"
-
-fun raw_label_for_name (num, ss) =
- case resolve_conjecture ss of
- [j] => (conjecture_prefix, j)
- | _ => case Int.fromString num of
- SOME j => (raw_prefix, j)
- | NONE => (raw_prefix ^ num, 0)
-
-(**** INTERPRETATION OF TSTP SYNTAX TREES ****)
-
-exception HO_TERM of (string, string) ho_term list
-exception FORMULA of (string, string, (string, string) ho_term) formula list
-exception SAME of unit
-
-(* Type variables are given the basic sort "HOL.type". Some will later be
- constrained by information from type literals, or by type inference. *)
-fun typ_from_atp ctxt (u as ATerm (a, us)) =
- let val Ts = map (typ_from_atp ctxt) us in
- case unprefix_and_unascii type_const_prefix a of
- SOME b => Type (invert_const b, Ts)
- | NONE =>
- if not (null us) then
- raise HO_TERM [u] (* only "tconst"s have type arguments *)
- else case unprefix_and_unascii tfree_prefix a of
- SOME b => make_tfree ctxt b
- | NONE =>
- (* Could be an Isabelle variable or a variable from the ATP, say "X1"
- or "_5018". Sometimes variables from the ATP are indistinguishable
- from Isabelle variables, which forces us to use a type parameter in
- all cases. *)
- (a |> perhaps (unprefix_and_unascii tvar_prefix), HOLogic.typeS)
- |> Type_Infer.param 0
- end
-
-(* Type class literal applied to a type. Returns triple of polarity, class,
- type. *)
-fun type_constraint_from_term ctxt (u as ATerm (a, us)) =
- case (unprefix_and_unascii class_prefix a, map (typ_from_atp ctxt) us) of
- (SOME b, [T]) => (b, T)
- | _ => raise HO_TERM [u]
-
-(* Accumulate type constraints in a formula: negative type literals. *)
-fun add_var (key, z) = Vartab.map_default (key, []) (cons z)
-fun add_type_constraint false (cl, TFree (a ,_)) = add_var ((a, ~1), cl)
- | add_type_constraint false (cl, TVar (ix, _)) = add_var (ix, cl)
- | add_type_constraint _ _ = I
-
-fun repair_variable_name f s =
- let
- fun subscript_name s n = s ^ nat_subscript n
- val s = String.map f s
- in
- case space_explode "_" s of
- [_] => (case take_suffix Char.isDigit (String.explode s) of
- (cs1 as _ :: _, cs2 as _ :: _) =>
- subscript_name (String.implode cs1)
- (the (Int.fromString (String.implode cs2)))
- | (_, _) => s)
- | [s1, s2] => (case Int.fromString s2 of
- SOME n => subscript_name s1 n
- | NONE => s)
- | _ => s
- end
-
-(* The number of type arguments of a constant, zero if it's monomorphic. For
- (instances of) Skolem pseudoconstants, this information is encoded in the
- constant name. *)
-fun num_type_args thy s =
- if String.isPrefix skolem_const_prefix s then
- s |> space_explode Long_Name.separator |> List.last |> Int.fromString |> the
- else if String.isPrefix lam_lifted_prefix s then
- if String.isPrefix lam_lifted_poly_prefix s then 2 else 0
- else
- (s, Sign.the_const_type thy s) |> Sign.const_typargs thy |> length
-
-fun slack_fastype_of t = fastype_of t handle TERM _ => HOLogic.typeT
-
-(* First-order translation. No types are known for variables. "HOLogic.typeT"
- should allow them to be inferred. *)
-fun term_from_atp ctxt textual sym_tab =
- let
- val thy = Proof_Context.theory_of ctxt
- (* For Metis, we use 1 rather than 0 because variable references in clauses
- may otherwise conflict with variable constraints in the goal. At least,
- type inference often fails otherwise. See also "axiom_inference" in
- "Metis_Reconstruct". *)
- val var_index = if textual then 0 else 1
- fun do_term extra_ts opt_T u =
- case u of
- ATerm (s, us) =>
- if String.isPrefix simple_type_prefix s then
- @{const True} (* ignore TPTP type information *)
- else if s = tptp_equal then
- let val ts = map (do_term [] NONE) us in
- if textual andalso length ts = 2 andalso
- hd ts aconv List.last ts then
- (* Vampire is keen on producing these. *)
- @{const True}
- else
- list_comb (Const (@{const_name HOL.eq}, HOLogic.typeT), ts)
- end
- else case unprefix_and_unascii const_prefix s of
- SOME s' =>
- let
- val ((s', s''), mangled_us) =
- s' |> unmangled_const |>> `invert_const
- in
- if s' = type_tag_name then
- case mangled_us @ us of
- [typ_u, term_u] =>
- do_term extra_ts (SOME (typ_from_atp ctxt typ_u)) term_u
- | _ => raise HO_TERM us
- else if s' = predicator_name then
- do_term [] (SOME @{typ bool}) (hd us)
- else if s' = app_op_name then
- let val extra_t = do_term [] NONE (List.last us) in
- do_term (extra_t :: extra_ts)
- (case opt_T of
- SOME T => SOME (slack_fastype_of extra_t --> T)
- | NONE => NONE)
- (nth us (length us - 2))
- end
- else if s' = type_guard_name then
- @{const True} (* ignore type predicates *)
- else
- let
- val new_skolem = String.isPrefix new_skolem_const_prefix s''
- val num_ty_args =
- length us - the_default 0 (Symtab.lookup sym_tab s)
- val (type_us, term_us) =
- chop num_ty_args us |>> append mangled_us
- val term_ts = map (do_term [] NONE) term_us
- val T =
- (if not (null type_us) andalso
- num_type_args thy s' = length type_us then
- let val Ts = type_us |> map (typ_from_atp ctxt) in
- if new_skolem then
- SOME (Type_Infer.paramify_vars (tl Ts ---> hd Ts))
- else if textual then
- try (Sign.const_instance thy) (s', Ts)
- else
- NONE
- end
- else
- NONE)
- |> (fn SOME T => T
- | NONE => map slack_fastype_of term_ts --->
- (case opt_T of
- SOME T => T
- | NONE => HOLogic.typeT))
- val t =
- if new_skolem then
- Var ((new_skolem_var_name_from_const s'', var_index), T)
- else
- Const (unproxify_const s', T)
- in list_comb (t, term_ts @ extra_ts) end
- end
- | NONE => (* a free or schematic variable *)
- let
- val term_ts = map (do_term [] NONE) us
- val ts = term_ts @ extra_ts
- val T =
- case opt_T of
- SOME T => map slack_fastype_of term_ts ---> T
- | NONE => map slack_fastype_of ts ---> HOLogic.typeT
- val t =
- case unprefix_and_unascii fixed_var_prefix s of
- SOME s => Free (s, T)
- | NONE =>
- case unprefix_and_unascii schematic_var_prefix s of
- SOME s => Var ((s, var_index), T)
- | NONE =>
- Var ((s |> textual ? repair_variable_name Char.toLower,
- var_index), T)
- in list_comb (t, ts) end
- in do_term [] end
-
-fun term_from_atom ctxt textual sym_tab pos (u as ATerm (s, _)) =
- if String.isPrefix class_prefix s then
- add_type_constraint pos (type_constraint_from_term ctxt u)
- #> pair @{const True}
- else
- pair (term_from_atp ctxt textual sym_tab (SOME @{typ bool}) u)
-
-val combinator_table =
- [(@{const_name Meson.COMBI}, @{thm Meson.COMBI_def_raw}),
- (@{const_name Meson.COMBK}, @{thm Meson.COMBK_def_raw}),
- (@{const_name Meson.COMBB}, @{thm Meson.COMBB_def_raw}),
- (@{const_name Meson.COMBC}, @{thm Meson.COMBC_def_raw}),
- (@{const_name Meson.COMBS}, @{thm Meson.COMBS_def_raw})]
-
-fun uncombine_term thy =
- let
- fun aux (t1 $ t2) = betapply (pairself aux (t1, t2))
- | aux (Abs (s, T, t')) = Abs (s, T, aux t')
- | aux (t as Const (x as (s, _))) =
- (case AList.lookup (op =) combinator_table s of
- SOME thm => thm |> prop_of |> specialize_type thy x
- |> Logic.dest_equals |> snd
- | NONE => t)
- | aux t = t
- in aux end
-
-(* Update schematic type variables with detected sort constraints. It's not
- totally clear whether this code is necessary. *)
-fun repair_tvar_sorts (t, tvar_tab) =
- let
- fun do_type (Type (a, Ts)) = Type (a, map do_type Ts)
- | do_type (TVar (xi, s)) =
- TVar (xi, the_default s (Vartab.lookup tvar_tab xi))
- | do_type (TFree z) = TFree z
- fun do_term (Const (a, T)) = Const (a, do_type T)
- | do_term (Free (a, T)) = Free (a, do_type T)
- | do_term (Var (xi, T)) = Var (xi, do_type T)
- | do_term (t as Bound _) = t
- | do_term (Abs (a, T, t)) = Abs (a, do_type T, do_term t)
- | do_term (t1 $ t2) = do_term t1 $ do_term t2
- in t |> not (Vartab.is_empty tvar_tab) ? do_term end
-
-fun quantify_over_var quant_of var_s t =
- let
- val vars = [] |> Term.add_vars t |> filter (fn ((s, _), _) => s = var_s)
- |> map Var
- in fold_rev quant_of vars t end
-
-(* Interpret an ATP formula as a HOL term, extracting sort constraints as they
- appear in the formula. *)
-fun prop_from_atp ctxt textual sym_tab phi =
- let
- fun do_formula pos phi =
- case phi of
- AQuant (_, [], phi) => do_formula pos phi
- | AQuant (q, (s, _) :: xs, phi') =>
- do_formula pos (AQuant (q, xs, phi'))
- (* FIXME: TFF *)
- #>> quantify_over_var (case q of
- AForall => forall_of
- | AExists => exists_of)
- (s |> textual ? repair_variable_name Char.toLower)
- | AConn (ANot, [phi']) => do_formula (not pos) phi' #>> s_not
- | AConn (c, [phi1, phi2]) =>
- do_formula (pos |> c = AImplies ? not) phi1
- ##>> do_formula pos phi2
- #>> (case c of
- AAnd => s_conj
- | AOr => s_disj
- | AImplies => s_imp
- | AIff => s_iff
- | ANot => raise Fail "impossible connective")
- | AAtom tm => term_from_atom ctxt textual sym_tab pos tm
- | _ => raise FORMULA [phi]
- in repair_tvar_sorts (do_formula true phi Vartab.empty) end
-
-fun infer_formula_types ctxt =
- Type.constraint HOLogic.boolT
- #> Syntax.check_term
- (Proof_Context.set_mode Proof_Context.mode_schematic ctxt)
-
-fun uncombined_etc_prop_from_atp ctxt textual sym_tab =
- let val thy = Proof_Context.theory_of ctxt in
- prop_from_atp ctxt textual sym_tab
- #> textual ? uncombine_term thy #> infer_formula_types ctxt
- end
-
-(**** Translation of TSTP files to Isar proofs ****)
-
-fun unvarify_term (Var ((s, 0), T)) = Free (s, T)
- | unvarify_term t = raise TERM ("unvarify_term: non-Var", [t])
-
-fun decode_line sym_tab (Definition (name, phi1, phi2)) ctxt =
- let
- val thy = Proof_Context.theory_of ctxt
- val t1 = prop_from_atp ctxt true sym_tab phi1
- val vars = snd (strip_comb t1)
- val frees = map unvarify_term vars
- val unvarify_args = subst_atomic (vars ~~ frees)
- val t2 = prop_from_atp ctxt true sym_tab phi2
- val (t1, t2) =
- HOLogic.eq_const HOLogic.typeT $ t1 $ t2
- |> unvarify_args |> uncombine_term thy |> infer_formula_types ctxt
- |> HOLogic.dest_eq
- in
- (Definition (name, t1, t2),
- fold Variable.declare_term (maps Misc_Legacy.term_frees [t1, t2]) ctxt)
- end
- | decode_line sym_tab (Inference (name, u, rule, deps)) ctxt =
- let val t = u |> uncombined_etc_prop_from_atp ctxt true sym_tab in
- (Inference (name, t, rule, deps),
- fold Variable.declare_term (Misc_Legacy.term_frees t) ctxt)
- end
-fun decode_lines ctxt sym_tab lines =
- fst (fold_map (decode_line sym_tab) lines ctxt)
-
-fun is_same_inference _ (Definition _) = false
- | is_same_inference t (Inference (_, t', _, _)) = t aconv t'
-
-(* No "real" literals means only type information (tfree_tcs, clsrel, or
- clsarity). *)
-val is_only_type_information = curry (op aconv) @{term True}
-
-fun replace_one_dependency (old, new) dep =
- if is_same_atp_step dep old then new else [dep]
-fun replace_dependencies_in_line _ (line as Definition _) = line
- | replace_dependencies_in_line p (Inference (name, t, rule, deps)) =
- Inference (name, t, rule,
- fold (union (op =) o replace_one_dependency p) deps [])
-
-(* Discard facts; consolidate adjacent lines that prove the same formula, since
- they differ only in type information.*)
-fun add_line _ (line as Definition _) lines = line :: lines
- | add_line fact_names (Inference (name as (_, ss), t, rule, [])) lines =
- (* No dependencies: fact, conjecture, or (for Vampire) internal facts or
- definitions. *)
- if is_fact fact_names ss then
- (* Facts are not proof lines. *)
- if is_only_type_information t then
- map (replace_dependencies_in_line (name, [])) lines
- (* Is there a repetition? If so, replace later line by earlier one. *)
- else case take_prefix (not o is_same_inference t) lines of
- (_, []) => lines (* no repetition of proof line *)
- | (pre, Inference (name', _, _, _) :: post) =>
- pre @ map (replace_dependencies_in_line (name', [name])) post
- | _ => raise Fail "unexpected inference"
- else if is_conjecture ss then
- Inference (name, s_not t, rule, []) :: lines
- else
- map (replace_dependencies_in_line (name, [])) lines
- | add_line _ (Inference (name, t, rule, deps)) lines =
- (* Type information will be deleted later; skip repetition test. *)
- if is_only_type_information t then
- Inference (name, t, rule, deps) :: lines
- (* Is there a repetition? If so, replace later line by earlier one. *)
- else case take_prefix (not o is_same_inference t) lines of
- (* FIXME: Doesn't this code risk conflating proofs involving different
- types? *)
- (_, []) => Inference (name, t, rule, deps) :: lines
- | (pre, Inference (name', t', rule, _) :: post) =>
- Inference (name, t', rule, deps) ::
- pre @ map (replace_dependencies_in_line (name', [name])) post
- | _ => raise Fail "unexpected inference"
-
-(* Recursively delete empty lines (type information) from the proof. *)
-fun add_nontrivial_line (line as Inference (name, t, _, [])) lines =
- if is_only_type_information t then delete_dependency name lines
- else line :: lines
- | add_nontrivial_line line lines = line :: lines
-and delete_dependency name lines =
- fold_rev add_nontrivial_line
- (map (replace_dependencies_in_line (name, [])) lines) []
-
-(* ATPs sometimes reuse free variable names in the strangest ways. Removing
- offending lines often does the trick. *)
-fun is_bad_free frees (Free x) = not (member (op =) frees x)
- | is_bad_free _ _ = false
-
-fun add_desired_line _ _ _ (line as Definition (name, _, _)) (j, lines) =
- (j, line :: map (replace_dependencies_in_line (name, [])) lines)
- | add_desired_line isar_shrink_factor fact_names frees
- (Inference (name as (_, ss), t, rule, deps)) (j, lines) =
- (j + 1,
- if is_fact fact_names ss orelse
- is_conjecture ss orelse
- (* the last line must be kept *)
- j = 0 orelse
- (not (is_only_type_information t) andalso
- null (Term.add_tvars t []) andalso
- not (exists_subterm (is_bad_free frees) t) andalso
- length deps >= 2 andalso j mod isar_shrink_factor = 0 andalso
- (* kill next to last line, which usually results in a trivial step *)
- j <> 1) then
- Inference (name, t, rule, deps) :: lines (* keep line *)
- else
- map (replace_dependencies_in_line (name, deps)) lines) (* drop line *)
-
-(** Isar proof construction and manipulation **)
-
-type label = string * int
-type facts = label list * string list
-
-datatype isar_qualifier = Show | Then | Moreover | Ultimately
-
-datatype isar_step =
- Fix of (string * typ) list |
- Let of term * term |
- Assume of label * term |
- Prove of isar_qualifier list * label * term * byline
-and byline =
- By_Metis of facts |
- Case_Split of isar_step list list * facts
-
-fun add_fact_from_dependency fact_names (name as (_, ss)) =
- if is_fact fact_names ss then
- apsnd (union (op =) (map fst (resolve_fact fact_names ss)))
- else
- apfst (insert (op =) (raw_label_for_name name))
-
-fun repair_name "$true" = "c_True"
- | repair_name "$false" = "c_False"
- | repair_name "$$e" = tptp_equal (* seen in Vampire proofs *)
- | repair_name s =
- if is_tptp_equal s orelse
- (* seen in Vampire proofs *)
- (String.isPrefix "sQ" s andalso String.isSuffix "_eqProxy" s) then
- tptp_equal
- else
- s
-
-(* FIXME: Still needed? Try with SPASS proofs perhaps. *)
-val kill_duplicate_assumptions_in_proof =
- let
- fun relabel_facts subst =
- apfst (map (fn l => AList.lookup (op =) subst l |> the_default l))
- fun do_step (step as Assume (l, t)) (proof, subst, assums) =
- (case AList.lookup (op aconv) assums t of
- SOME l' => (proof, (l, l') :: subst, assums)
- | NONE => (step :: proof, subst, (t, l) :: assums))
- | do_step (Prove (qs, l, t, by)) (proof, subst, assums) =
- (Prove (qs, l, t,
- case by of
- By_Metis facts => By_Metis (relabel_facts subst facts)
- | Case_Split (proofs, facts) =>
- Case_Split (map do_proof proofs,
- relabel_facts subst facts)) ::
- proof, subst, assums)
- | do_step step (proof, subst, assums) = (step :: proof, subst, assums)
- and do_proof proof = fold do_step proof ([], [], []) |> #1 |> rev
- in do_proof end
-
-fun used_labels_of_step (Prove (_, _, _, by)) =
- (case by of
- By_Metis (ls, _) => ls
- | Case_Split (proofs, (ls, _)) =>
- fold (union (op =) o used_labels_of) proofs ls)
- | used_labels_of_step _ = []
-and used_labels_of proof = fold (union (op =) o used_labels_of_step) proof []
-
-fun kill_useless_labels_in_proof proof =
- let
- val used_ls = used_labels_of proof
- fun do_label l = if member (op =) used_ls l then l else no_label
- fun do_step (Assume (l, t)) = Assume (do_label l, t)
- | do_step (Prove (qs, l, t, by)) =
- Prove (qs, do_label l, t,
- case by of
- Case_Split (proofs, facts) =>
- Case_Split (map (map do_step) proofs, facts)
- | _ => by)
- | do_step step = step
- in map do_step proof end
-
-fun prefix_for_depth n = replicate_string (n + 1)
-
-val relabel_proof =
- let
- fun aux _ _ _ [] = []
- | aux subst depth (next_assum, next_fact) (Assume (l, t) :: proof) =
- if l = no_label then
- Assume (l, t) :: aux subst depth (next_assum, next_fact) proof
- else
- let val l' = (prefix_for_depth depth assum_prefix, next_assum) in
- Assume (l', t) ::
- aux ((l, l') :: subst) depth (next_assum + 1, next_fact) proof
- end
- | aux subst depth (next_assum, next_fact)
- (Prove (qs, l, t, by) :: proof) =
- let
- val (l', subst, next_fact) =
- if l = no_label then
- (l, subst, next_fact)
- else
- let
- val l' = (prefix_for_depth depth have_prefix, next_fact)
- in (l', (l, l') :: subst, next_fact + 1) end
- val relabel_facts =
- apfst (maps (the_list o AList.lookup (op =) subst))
- val by =
- case by of
- By_Metis facts => By_Metis (relabel_facts facts)
- | Case_Split (proofs, facts) =>
- Case_Split (map (aux subst (depth + 1) (1, 1)) proofs,
- relabel_facts facts)
- in
- Prove (qs, l', t, by) :: aux subst depth (next_assum, next_fact) proof
- end
- | aux subst depth nextp (step :: proof) =
- step :: aux subst depth nextp proof
- in aux [] 0 (1, 1) end
-
-fun string_for_proof ctxt0 type_enc lam_trans i n =
- let
- val ctxt =
- ctxt0 |> Config.put show_free_types false
- |> Config.put show_types true
- |> Config.put show_sorts true
- fun fix_print_mode f x =
- Print_Mode.setmp (filter (curry (op =) Symbol.xsymbolsN)
- (print_mode_value ())) f x
- fun do_indent ind = replicate_string (ind * indent_size) " "
- fun do_free (s, T) =
- maybe_quote s ^ " :: " ^
- maybe_quote (fix_print_mode (Syntax.string_of_typ ctxt) T)
- fun do_label l = if l = no_label then "" else string_for_label l ^ ": "
- fun do_have qs =
- (if member (op =) qs Moreover then "moreover " else "") ^
- (if member (op =) qs Ultimately then "ultimately " else "") ^
- (if member (op =) qs Then then
- if member (op =) qs Show then "thus" else "hence"
- else
- if member (op =) qs Show then "show" else "have")
- val do_term = maybe_quote o fix_print_mode (Syntax.string_of_term ctxt)
- val reconstr = Metis (type_enc, lam_trans)
- fun do_facts (ls, ss) =
- reconstructor_command reconstr 1 1
- (ls |> sort_distinct (prod_ord string_ord int_ord),
- ss |> sort_distinct string_ord)
- and do_step ind (Fix xs) =
- do_indent ind ^ "fix " ^ space_implode " and " (map do_free xs) ^ "\n"
- | do_step ind (Let (t1, t2)) =
- do_indent ind ^ "let " ^ do_term t1 ^ " = " ^ do_term t2 ^ "\n"
- | do_step ind (Assume (l, t)) =
- do_indent ind ^ "assume " ^ do_label l ^ do_term t ^ "\n"
- | do_step ind (Prove (qs, l, t, By_Metis facts)) =
- do_indent ind ^ do_have qs ^ " " ^
- do_label l ^ do_term t ^ " " ^ do_facts facts ^ "\n"
- | do_step ind (Prove (qs, l, t, Case_Split (proofs, facts))) =
- implode (map (prefix (do_indent ind ^ "moreover\n") o do_block ind)
- proofs) ^
- do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^ " " ^
- do_facts facts ^ "\n"
- and do_steps prefix suffix ind steps =
- let val s = implode (map (do_step ind) steps) in
- replicate_string (ind * indent_size - size prefix) " " ^ prefix ^
- String.extract (s, ind * indent_size,
- SOME (size s - ind * indent_size - 1)) ^
- suffix ^ "\n"
- end
- and do_block ind proof = do_steps "{ " " }" (ind + 1) proof
- (* One-step proofs are pointless; better use the Metis one-liner
- directly. *)
- and do_proof [Prove (_, _, _, By_Metis _)] = ""
- | do_proof proof =
- (if i <> 1 then "prefer " ^ string_of_int i ^ "\n" else "") ^
- do_indent 0 ^ "proof -\n" ^ do_steps "" "" 1 proof ^ do_indent 0 ^
- (if n <> 1 then "next" else "qed")
- in do_proof end
-
-fun isar_proof_text ctxt isar_proof_requested
- (debug, isar_shrink_factor, pool, fact_names, sym_tab, atp_proof, goal)
- (one_line_params as (_, _, _, _, subgoal, subgoal_count)) =
- let
- val isar_shrink_factor =
- (if isar_proof_requested then 1 else 2) * isar_shrink_factor
- val (params, hyp_ts, concl_t) = strip_subgoal ctxt goal subgoal
- val frees = fold Term.add_frees (concl_t :: hyp_ts) []
- val one_line_proof = one_line_proof_text one_line_params
- val type_enc =
- if is_typed_helper_used_in_atp_proof atp_proof then full_typesN
- else partial_typesN
- val lam_trans = lam_trans_from_atp_proof atp_proof metis_default_lam_trans
-
- fun isar_proof_of () =
- let
- val atp_proof =
- atp_proof
- |> clean_up_atp_proof_dependencies
- |> nasty_atp_proof pool
- |> map_term_names_in_atp_proof repair_name
- |> decode_lines ctxt sym_tab
- |> rpair [] |-> fold_rev (add_line fact_names)
- |> rpair [] |-> fold_rev add_nontrivial_line
- |> rpair (0, [])
- |-> fold_rev (add_desired_line isar_shrink_factor fact_names frees)
- |> snd
- val conj_name = conjecture_prefix ^ string_of_int (length hyp_ts)
- val conjs =
- atp_proof
- |> map_filter (fn Inference (name as (_, ss), _, _, []) =>
- if member (op =) ss conj_name then SOME name else NONE
- | _ => NONE)
- fun dep_of_step (Definition _) = NONE
- | dep_of_step (Inference (name, _, _, from)) = SOME (from, name)
- val ref_graph = atp_proof |> map_filter dep_of_step |> make_ref_graph
- val axioms = axioms_of_ref_graph ref_graph conjs
- val tainted = tainted_atoms_of_ref_graph ref_graph conjs
- val props =
- Symtab.empty
- |> fold (fn Definition _ => I (* FIXME *)
- | Inference ((s, _), t, _, _) =>
- Symtab.update_new (s,
- t |> member (op = o apsnd fst) tainted s ? s_not))
- atp_proof
- (* FIXME: add "fold_rev forall_of (map Var (Term.add_vars t []))"? *)
- fun prop_of_clause c =
- fold (curry s_disj) (map_filter (Symtab.lookup props o fst) c)
- @{term False}
- fun label_of_clause c = (space_implode "___" (map fst c), 0)
- fun maybe_show outer c =
- (outer andalso length c = 1 andalso subset (op =) (c, conjs))
- ? cons Show
- fun do_have outer qs (gamma, c) =
- Prove (maybe_show outer c qs, label_of_clause c, prop_of_clause c,
- By_Metis (fold (add_fact_from_dependency fact_names
- o the_single) gamma ([], [])))
- fun do_inf outer (Have z) = do_have outer [] z
- | do_inf outer (Hence z) = do_have outer [Then] z
- | do_inf outer (Cases cases) =
- let val c = succedent_of_cases cases in
- Prove (maybe_show outer c [Ultimately], label_of_clause c,
- prop_of_clause c,
- Case_Split (map (do_case false) cases, ([], [])))
- end
- and do_case outer (c, infs) =
- Assume (label_of_clause c, prop_of_clause c) ::
- map (do_inf outer) infs
- val isar_proof =
- (if null params then [] else [Fix params]) @
- (ref_graph
- |> redirect_graph axioms tainted
- |> chain_direct_proof
- |> map (do_inf true)
- |> kill_duplicate_assumptions_in_proof
- |> kill_useless_labels_in_proof
- |> relabel_proof)
- |> string_for_proof ctxt type_enc lam_trans subgoal subgoal_count
- in
- case isar_proof of
- "" =>
- if isar_proof_requested then
- "\nNo structured proof available (proof too short)."
- else
- ""
- | _ =>
- "\n\n" ^ (if isar_proof_requested then "Structured proof"
- else "Perhaps this will work") ^
- ":\n" ^ Markup.markup Isabelle_Markup.sendback isar_proof
- end
- val isar_proof =
- if debug then
- isar_proof_of ()
- else case try isar_proof_of () of
- SOME s => s
- | NONE => if isar_proof_requested then
- "\nWarning: The Isar proof construction failed."
- else
- ""
- in one_line_proof ^ isar_proof end
-
-fun proof_text ctxt isar_proof isar_params
- (one_line_params as (preplay, _, _, _, _, _)) =
- (if case preplay of Failed_to_Play _ => true | _ => isar_proof then
- isar_proof_text ctxt isar_proof isar_params
- else
- one_line_proof_text) one_line_params
-
-end;
--- a/src/HOL/Tools/ATP/atp_redirect.ML Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,223 +0,0 @@
-(* Title: HOL/Tools/ATP/atp_redirect.ML
- Author: Jasmin Blanchette, TU Muenchen
-
-Transformation of a proof by contradiction into a direct proof.
-*)
-
-signature ATP_ATOM =
-sig
- type key
- val ord : key * key -> order
- val string_of : key -> string
-end;
-
-signature ATP_REDIRECT =
-sig
- type atom
-
- structure Atom_Graph : GRAPH
-
- type ref_sequent = atom list * atom
- type ref_graph = unit Atom_Graph.T
-
- type clause = atom list
- type direct_sequent = atom list * clause
- type direct_graph = unit Atom_Graph.T
-
- type rich_sequent = clause list * clause
-
- datatype direct_inference =
- Have of rich_sequent |
- Hence of rich_sequent |
- Cases of (clause * direct_inference list) list
-
- type direct_proof = direct_inference list
-
- val make_ref_graph : (atom list * atom) list -> ref_graph
- val axioms_of_ref_graph : ref_graph -> atom list -> atom list
- val tainted_atoms_of_ref_graph : ref_graph -> atom list -> atom list
- val sequents_of_ref_graph : ref_graph -> ref_sequent list
- val redirect_sequent : atom list -> atom -> ref_sequent -> direct_sequent
- val direct_graph : direct_sequent list -> direct_graph
- val redirect_graph : atom list -> atom list -> ref_graph -> direct_proof
- val succedent_of_cases : (clause * direct_inference list) list -> clause
- val chain_direct_proof : direct_proof -> direct_proof
- val string_of_direct_proof : direct_proof -> string
-end;
-
-functor ATP_Redirect(Atom : ATP_ATOM): ATP_REDIRECT =
-struct
-
-type atom = Atom.key
-
-structure Atom_Graph = Graph(Atom)
-
-type ref_sequent = atom list * atom
-type ref_graph = unit Atom_Graph.T
-
-type clause = atom list
-type direct_sequent = atom list * clause
-type direct_graph = unit Atom_Graph.T
-
-type rich_sequent = clause list * clause
-
-datatype direct_inference =
- Have of rich_sequent |
- Hence of rich_sequent |
- Cases of (clause * direct_inference list) list
-
-type direct_proof = direct_inference list
-
-fun atom_eq p = (Atom.ord p = EQUAL)
-fun clause_eq (c, d) = (length c = length d andalso forall atom_eq (c ~~ d))
-fun direct_sequent_eq ((gamma, c), (delta, d)) =
- clause_eq (gamma, delta) andalso clause_eq (c, d)
-
-fun make_ref_graph infers =
- let
- fun add_edge to from =
- Atom_Graph.default_node (from, ())
- #> Atom_Graph.default_node (to, ())
- #> Atom_Graph.add_edge_acyclic (from, to)
- fun add_infer (froms, to) = fold (add_edge to) froms
- in Atom_Graph.empty |> fold add_infer infers end
-
-fun axioms_of_ref_graph ref_graph conjs =
- subtract atom_eq conjs (Atom_Graph.minimals ref_graph)
-fun tainted_atoms_of_ref_graph ref_graph = Atom_Graph.all_succs ref_graph
-
-fun sequents_of_ref_graph ref_graph =
- map (`(Atom_Graph.immediate_preds ref_graph))
- (filter_out (Atom_Graph.is_minimal ref_graph) (Atom_Graph.keys ref_graph))
-
-fun redirect_sequent tainted bot (gamma, c) =
- if member atom_eq tainted c then
- gamma |> List.partition (not o member atom_eq tainted)
- |>> not (atom_eq (c, bot)) ? cons c
- else
- (gamma, [c])
-
-fun direct_graph seqs =
- let
- fun add_edge from to =
- Atom_Graph.default_node (from, ())
- #> Atom_Graph.default_node (to, ())
- #> Atom_Graph.add_edge_acyclic (from, to)
- fun add_seq (gamma, c) = fold (fn l => fold (add_edge l) c) gamma
- in Atom_Graph.empty |> fold add_seq seqs end
-
-fun disj cs = fold (union atom_eq) cs [] |> sort Atom.ord
-
-fun succedent_of_inference (Have (_, c)) = c
- | succedent_of_inference (Hence (_, c)) = c
- | succedent_of_inference (Cases cases) = succedent_of_cases cases
-and succedent_of_case (c, []) = c
- | succedent_of_case (_, infs) = succedent_of_inference (List.last infs)
-and succedent_of_cases cases = disj (map succedent_of_case cases)
-
-fun dest_Have (Have z) = z
- | dest_Have _ = raise Fail "non-Have"
-
-fun enrich_Have nontrivs trivs (cs, c) =
- (cs |> map (fn c => if member clause_eq nontrivs c then disj (c :: trivs)
- else c),
- disj (c :: trivs))
- |> Have
-
-fun s_cases cases =
- case cases |> List.partition (null o snd) of
- (trivs, nontrivs as [(nontriv0, proof)]) =>
- if forall (can dest_Have) proof then
- let val seqs = proof |> map dest_Have in
- seqs |> map (enrich_Have (nontriv0 :: map snd seqs) (map fst trivs))
- end
- else
- [Cases nontrivs]
- | (_, nontrivs) => [Cases nontrivs]
-
-fun descendants direct_graph =
- these o try (Atom_Graph.all_succs direct_graph) o single
-
-fun zones_of 0 _ = []
- | zones_of n (bs :: bss) =
- (fold (subtract atom_eq) bss) bs :: zones_of (n - 1) (bss @ [bs])
-
-fun redirect_graph axioms tainted ref_graph =
- let
- val [bot] = Atom_Graph.maximals ref_graph
- val seqs =
- map (redirect_sequent tainted bot) (sequents_of_ref_graph ref_graph)
- val direct_graph = direct_graph seqs
-
- fun redirect c proved seqs =
- if null seqs then
- []
- else if length c < 2 then
- let
- val proved = c @ proved
- val provable =
- filter (fn (gamma, _) => subset atom_eq (gamma, proved)) seqs
- val horn_provable = filter (fn (_, [_]) => true | _ => false) provable
- val seq as (gamma, c) = hd (horn_provable @ provable)
- in
- Have (map single gamma, c) ::
- redirect c proved (filter (curry (not o direct_sequent_eq) seq) seqs)
- end
- else
- let
- fun subsequents seqs zone =
- filter (fn (gamma, _) => subset atom_eq (gamma, zone @ proved)) seqs
- val zones = zones_of (length c) (map (descendants direct_graph) c)
- val subseqss = map (subsequents seqs) zones
- val seqs = fold (subtract direct_sequent_eq) subseqss seqs
- val cases =
- map2 (fn l => fn subseqs => ([l], redirect [l] proved subseqs))
- c subseqss
- in s_cases cases @ redirect (succedent_of_cases cases) proved seqs end
- in redirect [] axioms seqs end
-
-val chain_direct_proof =
- let
- fun chain_inf cl0 (seq as Have (cs, c)) =
- if member clause_eq cs cl0 then
- Hence (filter_out (curry clause_eq cl0) cs, c)
- else
- seq
- | chain_inf _ (Cases cases) = Cases (map chain_case cases)
- and chain_case (c, is) = (c, chain_proof (SOME c) is)
- and chain_proof _ [] = []
- | chain_proof (SOME prev) (i :: is) =
- chain_inf prev i :: chain_proof (SOME (succedent_of_inference i)) is
- | chain_proof _ (i :: is) =
- i :: chain_proof (SOME (succedent_of_inference i)) is
- in chain_proof NONE end
-
-fun indent 0 = ""
- | indent n = " " ^ indent (n - 1)
-
-fun string_of_clause [] = "\<bottom>"
- | string_of_clause ls = space_implode " \<or> " (map Atom.string_of ls)
-
-fun string_of_rich_sequent ch ([], c) = ch ^ " " ^ string_of_clause c
- | string_of_rich_sequent ch (cs, c) =
- commas (map string_of_clause cs) ^ " " ^ ch ^ " " ^ string_of_clause c
-
-fun string_of_case depth (c, proof) =
- indent (depth + 1) ^ "[" ^ string_of_clause c ^ "]"
- |> not (null proof) ? suffix ("\n" ^ string_of_subproof (depth + 1) proof)
-
-and string_of_inference depth (Have seq) =
- indent depth ^ string_of_rich_sequent "\<triangleright>" seq
- | string_of_inference depth (Hence seq) =
- indent depth ^ string_of_rich_sequent "\<guillemotright>" seq
- | string_of_inference depth (Cases cases) =
- indent depth ^ "[\n" ^
- space_implode ("\n" ^ indent depth ^ "|\n")
- (map (string_of_case depth) cases) ^ "\n" ^
- indent depth ^ "]"
-
-and string_of_subproof depth = cat_lines o map (string_of_inference depth)
-
-val string_of_direct_proof = string_of_subproof 0
-
-end;
--- a/src/HOL/Tools/ATP/atp_systems.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/ATP/atp_systems.ML Tue Feb 14 11:16:07 2012 +0100
@@ -11,6 +11,7 @@
type formula_kind = ATP_Problem.formula_kind
type failure = ATP_Proof.failure
+ type slice_spec = int * atp_format * string * string * bool
type atp_config =
{exec : string * string,
required_execs : (string * string) list,
@@ -22,8 +23,7 @@
conj_sym_kind : formula_kind,
prem_kind : formula_kind,
best_slices :
- Proof.context
- -> (real * (bool * (int * atp_format * string * string * string))) list}
+ Proof.context -> (real * (bool * (slice_spec * string))) list}
val force_sos : bool Config.T
val e_smartN : string
@@ -56,8 +56,7 @@
val remote_atp :
string -> string -> string list -> (string * string) list
-> (failure * string) list -> formula_kind -> formula_kind
- -> (Proof.context -> int * atp_format * string * string)
- -> string * atp_config
+ -> (Proof.context -> slice_spec) -> string * atp_config
val add_atp : string * atp_config -> theory -> theory
val get_atp : theory -> string -> atp_config
val supported_atps : theory -> string list
@@ -71,10 +70,12 @@
open ATP_Problem
open ATP_Proof
-open ATP_Translate
+open ATP_Problem_Generate
(* ATP configuration *)
+type slice_spec = int * atp_format * string * string * bool
+
type atp_config =
{exec : string * string,
required_execs : (string * string) list,
@@ -85,17 +86,16 @@
known_failures : (failure * string) list,
conj_sym_kind : formula_kind,
prem_kind : formula_kind,
- best_slices :
- Proof.context
- -> (real * (bool * (int * atp_format * string * string * string))) list}
+ best_slices : Proof.context -> (real * (bool * (slice_spec * string))) list}
(* "best_slices" must be found empirically, taking a wholistic approach since
- the ATPs are run in parallel. The "real" components give the faction of the
- time available given to the slice and should add up to 1.0. The "bool"
+ the ATPs are run in parallel. The "real" component gives the faction of the
+ time available given to the slice and should add up to 1.0. The first "bool"
component indicates whether the slice's strategy is complete; the "int", the
preferred number of facts to pass; the first "string", the preferred type
system (which should be sound or quasi-sound); the second "string", the
- preferred lambda translation scheme; the third "string", extra information to
+ preferred lambda translation scheme; the second "bool", whether uncurried
+ aliased should be generated; the third "string", extra information to
the prover (e.g., SOS or no SOS).
The last slice should be the most "normal" one, because it will get all the
@@ -149,7 +149,8 @@
type T = (atp_config * stamp) Symtab.table
val empty = Symtab.empty
val extend = I
- fun merge data : T = Symtab.merge (eq_snd op =) data
+ fun merge data : T =
+ Symtab.merge (eq_snd (op =)) data
handle Symtab.DUP name => error ("Duplicate ATP: " ^ quote name ^ ".")
)
@@ -245,14 +246,14 @@
let val method = effective_e_weight_method ctxt in
(* FUDGE *)
if method = e_smartN then
- [(0.333, (true, (500, FOF, "mono_tags??", combinatorsN,
+ [(0.333, (true, ((500, FOF, "mono_tags??", combsN, false),
e_fun_weightN))),
- (0.334, (true, (50, FOF, "mono_guards??", combinatorsN,
+ (0.334, (true, ((50, FOF, "mono_guards??", combsN, false),
e_fun_weightN))),
- (0.333, (true, (1000, FOF, "mono_tags??", combinatorsN,
+ (0.333, (true, ((1000, FOF, "mono_tags??", combsN, false),
e_sym_offset_weightN)))]
else
- [(1.0, (true, (500, FOF, "mono_tags??", combinatorsN, method)))]
+ [(1.0, (true, ((500, FOF, "mono_tags??", combsN, false), method)))]
end}
val e = (eN, e_config)
@@ -277,9 +278,9 @@
prem_kind = Hypothesis,
best_slices = fn ctxt =>
(* FUDGE *)
- [(0.667, (false, (150, leo2_thf0, "mono_simple_higher", lam_liftingN,
+ [(0.667, (false, ((150, leo2_thf0, "mono_native_higher", liftingN, false),
sosN))),
- (0.333, (true, (50, leo2_thf0, "mono_simple_higher", lam_liftingN,
+ (0.333, (true, ((50, leo2_thf0, "mono_native_higher", liftingN, false),
no_sosN)))]
|> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
else I)}
@@ -304,8 +305,8 @@
prem_kind = Hypothesis,
best_slices =
(* FUDGE *)
- K [(1.0, (true, (100, satallax_thf0, "mono_simple_higher", keep_lamsN,
- "")))]}
+ K [(1.0, (true, ((100, satallax_thf0, "mono_native_higher", keep_lamsN,
+ false), "")))]}
val satallax = (satallaxN, satallax_config)
@@ -335,11 +336,11 @@
prem_kind = Conjecture,
best_slices = fn ctxt =>
(* FUDGE *)
- [(0.333, (false, (150, DFG DFG_Unsorted, "mono_tags??", lam_liftingN,
+ [(0.333, (false, ((150, DFG DFG_Unsorted, "mono_tags??", liftingN, false),
sosN))),
- (0.333, (false, (300, DFG DFG_Unsorted, "poly_tags??", lam_liftingN,
+ (0.333, (false, ((300, DFG DFG_Unsorted, "poly_tags??", liftingN, false),
sosN))),
- (0.334, (false, (50, DFG DFG_Unsorted, "mono_tags??", lam_liftingN,
+ (0.334, (false, ((50, DFG DFG_Unsorted, "mono_tags??", liftingN, false),
no_sosN)))]
|> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
else I)}
@@ -348,23 +349,28 @@
(* Experimental *)
val spass_new_config : atp_config =
- {exec = ("SPASS_HOME", "SPASS"),
+ {exec = ("SPASS_NEW_HOME", "SPASS"),
required_execs = [],
- arguments = #arguments spass_config,
+ arguments = fn _ => fn _ => fn extra_options => fn timeout => fn _ =>
+ (* TODO: add: -FPDFGProof -FPFCR *)
+ ("-Isabelle=1 -TimeLimit=" ^ string_of_int (to_secs 1 timeout))
+ |> extra_options <> "" ? prefix (extra_options ^ " "),
proof_delims = #proof_delims spass_config,
known_failures = #known_failures spass_config,
conj_sym_kind = #conj_sym_kind spass_config,
prem_kind = #prem_kind spass_config,
- best_slices = fn ctxt =>
+ best_slices = fn _ =>
(* FUDGE *)
- [(0.333, (false, (150, DFG DFG_Sorted, "mono_simple", lam_liftingN,
- sosN))) (*,
- (0.333, (false, (300, DFG DFG_Sorted, "poly_tags??", lam_liftingN,
- sosN))),
- (0.334, (true, (50, DFG DFG_Sorted, "mono_simple", lam_liftingN,
- no_sosN)))*)]
- |> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
- else I)}
+ [(0.25, (true, ((150, DFG DFG_Sorted, "mono_native", combsN, true),
+ "-Heuristic=1"))),
+ (0.20, (true, ((500, DFG DFG_Sorted, "mono_native", liftingN, true),
+ "-Heuristic=2 -SOS"))),
+ (0.20, (true, ((50, DFG DFG_Sorted, "mono_native", liftingN, true),
+ "-Heuristic=2"))),
+ (0.20, (true, ((250, DFG DFG_Sorted, "mono_native", combs_and_liftingN,
+ true), "-Heuristic=2"))),
+ (0.15, (true, ((100, DFG DFG_Sorted, "mono_native", combs_and_liftingN,
+ true), "-Heuristic=2")))]}
val spass_new = (spass_newN, spass_new_config)
@@ -404,16 +410,19 @@
best_slices = fn ctxt =>
(* FUDGE *)
(if is_old_vampire_version () then
- [(0.333, (false, (150, FOF, "poly_guards??", hybrid_lamsN, sosN))),
- (0.333, (false, (500, FOF, "mono_tags??", hybrid_lamsN, sosN))),
- (0.334, (true, (50, FOF, "mono_guards??", hybrid_lamsN, no_sosN)))]
+ [(0.333, (false, ((150, FOF, "poly_guards??", combs_or_liftingN, false),
+ sosN))),
+ (0.333, (false, ((500, FOF, "mono_tags??", combs_or_liftingN, false),
+ sosN))),
+ (0.334, (true, ((50, FOF, "mono_guards??", combs_or_liftingN, false),
+ no_sosN)))]
else
- [(0.333, (false, (150, vampire_tff0, "poly_guards??", hybrid_lamsN,
- sosN))),
- (0.333, (false, (500, vampire_tff0, "mono_simple", hybrid_lamsN,
- sosN))),
- (0.334, (true, (50, vampire_tff0, "mono_simple", hybrid_lamsN,
- no_sosN)))])
+ [(0.333, (false, ((150, vampire_tff0, "poly_guards??",
+ combs_or_liftingN, false), sosN))),
+ (0.333, (false, ((500, vampire_tff0, "mono_native", combs_or_liftingN,
+ false), sosN))),
+ (0.334, (true, ((50, vampire_tff0, "mono_native", combs_or_liftingN,
+ false), no_sosN)))])
|> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
else I)}
@@ -435,10 +444,10 @@
prem_kind = Hypothesis,
best_slices =
(* FUDGE *)
- K [(0.5, (false, (250, z3_tff0, "mono_simple", combinatorsN, ""))),
- (0.25, (false, (125, z3_tff0, "mono_simple", combinatorsN, ""))),
- (0.125, (false, (62, z3_tff0, "mono_simple", combinatorsN, ""))),
- (0.125, (false, (31, z3_tff0, "mono_simple", combinatorsN, "")))]}
+ K [(0.5, (false, ((250, z3_tff0, "mono_native", combsN, false), ""))),
+ (0.25, (false, ((125, z3_tff0, "mono_native", combsN, false), ""))),
+ (0.125, (false, ((62, z3_tff0, "mono_native", combsN, false), ""))),
+ (0.125, (false, ((31, z3_tff0, "mono_native", combsN, false), "")))]}
val z3_tptp = (z3_tptpN, z3_tptp_config)
@@ -454,16 +463,16 @@
conj_sym_kind = Hypothesis,
prem_kind = Hypothesis,
best_slices =
- K [(1.0, (false, (200, format, type_enc,
- if is_format_higher_order format then keep_lamsN
- else combinatorsN, "")))]}
+ K [(1.0, (false, ((200, format, type_enc,
+ if is_format_higher_order format then keep_lamsN
+ else combsN, false), "")))]}
val dummy_tff1_format = TFF (TPTP_Polymorphic, TPTP_Explicit)
-val dummy_tff1_config = dummy_config dummy_tff1_format "poly_simple"
+val dummy_tff1_config = dummy_config dummy_tff1_format "poly_native"
val dummy_tff1 = (dummy_tff1N, dummy_tff1_config)
val dummy_thf_format = THF (TPTP_Polymorphic, TPTP_Explicit, THF_With_Choice)
-val dummy_thf_config = dummy_config dummy_thf_format "poly_simple_higher"
+val dummy_thf_config = dummy_config dummy_thf_format "poly_native_higher"
val dummy_thf = (dummy_thfN, dummy_thf_config)
@@ -507,16 +516,13 @@
{exec = ("ISABELLE_ATP", "scripts/remote_atp"),
required_execs = [],
arguments = fn _ => fn _ => fn _ => fn timeout => fn _ =>
- "-t " ^ string_of_int (Int.min (max_remote_secs, to_secs 1 timeout))
- ^ " -s " ^ the_system system_name system_versions,
+ "-t " ^ string_of_int (Int.min (max_remote_secs, to_secs 1 timeout)) ^
+ " -s " ^ the_system system_name system_versions,
proof_delims = union (op =) tstp_proof_delims proof_delims,
known_failures = known_failures @ known_perl_failures @ known_says_failures,
conj_sym_kind = conj_sym_kind,
prem_kind = prem_kind,
- best_slices = fn ctxt =>
- let val (max_relevant, format, type_enc, lam_trans) = best_slice ctxt in
- [(1.0, (false, (max_relevant, format, type_enc, lam_trans, "")))]
- end}
+ best_slices = fn ctxt => [(1.0, (false, (best_slice ctxt, "")))]}
fun remotify_config system_name system_versions best_slice
({proof_delims, known_failures, conj_sym_kind, prem_kind, ...}
@@ -537,43 +543,44 @@
val remote_e =
remotify_atp e "EP" ["1.0", "1.1", "1.2"]
- (K (750, FOF, "mono_tags??", combinatorsN) (* FUDGE *))
+ (K (750, FOF, "mono_tags??", combsN, false) (* FUDGE *))
val remote_leo2 =
remotify_atp leo2 "LEO-II" ["1.2.8", "1.2.6"]
- (K (100, leo2_thf0, "mono_simple_higher", lam_liftingN) (* FUDGE *))
+ (K (100, leo2_thf0, "mono_native_higher", liftingN, false) (* FUDGE *))
val remote_satallax =
remotify_atp satallax "Satallax" ["2.1", "2.0", "2"]
- (K (100, satallax_thf0, "mono_simple_higher", keep_lamsN) (* FUDGE *))
+ (K (100, satallax_thf0, "mono_native_higher", keep_lamsN, false)
+ (* FUDGE *))
val remote_vampire =
remotify_atp vampire "Vampire" ["1.8"]
- (K (250, FOF, "mono_guards??", hybrid_lamsN) (* FUDGE *))
+ (K (250, FOF, "mono_guards??", combs_or_liftingN, false) (* FUDGE *))
val remote_z3_tptp =
remotify_atp z3_tptp "Z3" ["3.0"]
- (K (250, z3_tff0, "mono_simple", combinatorsN) (* FUDGE *))
+ (K (250, z3_tff0, "mono_native", combsN, false) (* FUDGE *))
val remote_e_sine =
remote_atp e_sineN "SInE" ["0.4"] [] (#known_failures e_config) Axiom
- Conjecture (K (500, FOF, "mono_guards??", combinatorsN) (* FUDGE *))
+ Conjecture (K (500, FOF, "mono_guards??", combsN, false) (* FUDGE *))
val remote_iprover =
remote_atp iproverN "iProver" [] [] [] Axiom Conjecture
- (K (150, FOF, "mono_guards??", lam_liftingN) (* FUDGE *))
+ (K (150, FOF, "mono_guards??", liftingN, false) (* FUDGE *))
val remote_iprover_eq =
remote_atp iprover_eqN "iProver-Eq" [] [] [] Axiom Conjecture
- (K (150, FOF, "mono_guards??", lam_liftingN) (* FUDGE *))
+ (K (150, FOF, "mono_guards??", liftingN, false) (* FUDGE *))
val remote_snark =
remote_atp snarkN "SNARK" ["20080805r029", "20080805r024"]
[("refutation.", "end_refutation.")] [] Hypothesis Hypothesis
- (K (100, explicit_tff0, "mono_simple", lam_liftingN) (* FUDGE *))
+ (K (100, explicit_tff0, "mono_native", liftingN, false) (* FUDGE *))
val remote_e_tofof =
remote_atp e_tofofN "ToFoF" ["0.1"] [] (#known_failures e_config) Axiom
Hypothesis
- (K (150, explicit_tff0, "mono_simple", lam_liftingN) (* FUDGE *))
+ (K (150, explicit_tff0, "mono_native", liftingN, false) (* FUDGE *))
val remote_waldmeister =
remote_atp waldmeisterN "Waldmeister" ["710"]
[("#START OF PROOF", "Proved Goals:")]
[(OutOfResources, "Too many function symbols"),
(Crashed, "Unrecoverable Segmentation Fault")]
Hypothesis Hypothesis
- (K (50, CNF_UEQ, "mono_tags??", combinatorsN) (* FUDGE *))
+ (K (50, CNF_UEQ, "mono_tags??", combsN, false) (* FUDGE *))
(* Setup *)
--- a/src/HOL/Tools/ATP/atp_translate.ML Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2557 +0,0 @@
-(* Title: HOL/Tools/ATP/atp_translate.ML
- Author: Fabian Immler, TU Muenchen
- Author: Makarius
- Author: Jasmin Blanchette, TU Muenchen
-
-Translation of HOL to FOL for Metis and Sledgehammer.
-*)
-
-signature ATP_TRANSLATE =
-sig
- type ('a, 'b) ho_term = ('a, 'b) ATP_Problem.ho_term
- type connective = ATP_Problem.connective
- type ('a, 'b, 'c) formula = ('a, 'b, 'c) ATP_Problem.formula
- type atp_format = ATP_Problem.atp_format
- type formula_kind = ATP_Problem.formula_kind
- type 'a problem = 'a ATP_Problem.problem
-
- datatype locality =
- General | Helper | Induction | Intro | Elim | Simp | Local | Assum | Chained
-
- datatype polymorphism = Polymorphic | Raw_Monomorphic | Mangled_Monomorphic
- datatype soundness = Sound_Modulo_Infinity | Sound
- datatype granularity = All_Vars | Positively_Naked_Vars | Ghost_Type_Arg_Vars
- datatype type_level =
- All_Types |
- Noninf_Nonmono_Types of soundness * granularity |
- Fin_Nonmono_Types of granularity |
- Const_Arg_Types |
- No_Types
- type type_enc
-
- val type_tag_idempotence : bool Config.T
- val type_tag_arguments : bool Config.T
- val no_lamsN : string
- val hide_lamsN : string
- val lam_liftingN : string
- val combinatorsN : string
- val hybrid_lamsN : string
- val keep_lamsN : string
- val schematic_var_prefix : string
- val fixed_var_prefix : string
- val tvar_prefix : string
- val tfree_prefix : string
- val const_prefix : string
- val type_const_prefix : string
- val class_prefix : string
- val lam_lifted_prefix : string
- val lam_lifted_mono_prefix : string
- val lam_lifted_poly_prefix : string
- val skolem_const_prefix : string
- val old_skolem_const_prefix : string
- val new_skolem_const_prefix : string
- val combinator_prefix : string
- val type_decl_prefix : string
- val sym_decl_prefix : string
- val guards_sym_formula_prefix : string
- val tags_sym_formula_prefix : string
- val fact_prefix : string
- val conjecture_prefix : string
- val helper_prefix : string
- val class_rel_clause_prefix : string
- val arity_clause_prefix : string
- val tfree_clause_prefix : string
- val lam_fact_prefix : string
- val typed_helper_suffix : string
- val untyped_helper_suffix : string
- val type_tag_idempotence_helper_name : string
- val predicator_name : string
- val app_op_name : string
- val type_guard_name : string
- val type_tag_name : string
- val simple_type_prefix : string
- val prefixed_predicator_name : string
- val prefixed_app_op_name : string
- val prefixed_type_tag_name : string
- val ascii_of : string -> string
- val unascii_of : string -> string
- val unprefix_and_unascii : string -> string -> string option
- val proxy_table : (string * (string * (thm * (string * string)))) list
- val proxify_const : string -> (string * string) option
- val invert_const : string -> string
- val unproxify_const : string -> string
- val new_skolem_var_name_from_const : string -> string
- val atp_irrelevant_consts : string list
- val atp_schematic_consts_of : term -> typ list Symtab.table
- val is_type_enc_higher_order : type_enc -> bool
- val polymorphism_of_type_enc : type_enc -> polymorphism
- val level_of_type_enc : type_enc -> type_level
- val is_type_enc_quasi_sound : type_enc -> bool
- val is_type_enc_fairly_sound : type_enc -> bool
- val type_enc_from_string : soundness -> string -> type_enc
- val adjust_type_enc : atp_format -> type_enc -> type_enc
- val mk_aconns :
- connective -> ('a, 'b, 'c) formula list -> ('a, 'b, 'c) formula
- val unmangled_const : string -> string * (string, 'b) ho_term list
- val unmangled_const_name : string -> string
- val helper_table : ((string * bool) * thm list) list
- val trans_lams_from_string :
- Proof.context -> type_enc -> string -> term list -> term list * term list
- val factsN : string
- val prepare_atp_problem :
- Proof.context -> atp_format -> formula_kind -> formula_kind -> type_enc
- -> bool -> string -> bool -> bool -> term list -> term
- -> ((string * locality) * term) list
- -> string problem * string Symtab.table * (string * locality) list vector
- * (string * term) list * int Symtab.table
- val atp_problem_weights : string problem -> (string * real) list
-end;
-
-structure ATP_Translate : ATP_TRANSLATE =
-struct
-
-open ATP_Util
-open ATP_Problem
-
-type name = string * string
-
-val type_tag_idempotence =
- Attrib.setup_config_bool @{binding atp_type_tag_idempotence} (K false)
-val type_tag_arguments =
- Attrib.setup_config_bool @{binding atp_type_tag_arguments} (K false)
-
-val no_lamsN = "no_lams" (* used internally; undocumented *)
-val hide_lamsN = "hide_lams"
-val lam_liftingN = "lam_lifting"
-val combinatorsN = "combinators"
-val hybrid_lamsN = "hybrid_lams"
-val keep_lamsN = "keep_lams"
-
-(* It's still unclear whether all TFF1 implementations will support type
- signatures such as "!>[A : $tType] : $o", with ghost type variables. *)
-val avoid_first_order_ghost_type_vars = false
-
-val bound_var_prefix = "B_"
-val all_bound_var_prefix = "BA_"
-val exist_bound_var_prefix = "BE_"
-val schematic_var_prefix = "V_"
-val fixed_var_prefix = "v_"
-val tvar_prefix = "T_"
-val tfree_prefix = "t_"
-val const_prefix = "c_"
-val type_const_prefix = "tc_"
-val simple_type_prefix = "s_"
-val class_prefix = "cl_"
-
-(* Freshness almost guaranteed! *)
-val atp_weak_prefix = "ATP:"
-
-val lam_lifted_prefix = atp_weak_prefix ^ "Lam"
-val lam_lifted_mono_prefix = lam_lifted_prefix ^ "m"
-val lam_lifted_poly_prefix = lam_lifted_prefix ^ "p"
-
-val skolem_const_prefix = "ATP" ^ Long_Name.separator ^ "Sko"
-val old_skolem_const_prefix = skolem_const_prefix ^ "o"
-val new_skolem_const_prefix = skolem_const_prefix ^ "n"
-
-val combinator_prefix = "COMB"
-
-val type_decl_prefix = "ty_"
-val sym_decl_prefix = "sy_"
-val guards_sym_formula_prefix = "gsy_"
-val tags_sym_formula_prefix = "tsy_"
-val fact_prefix = "fact_"
-val conjecture_prefix = "conj_"
-val helper_prefix = "help_"
-val class_rel_clause_prefix = "clar_"
-val arity_clause_prefix = "arity_"
-val tfree_clause_prefix = "tfree_"
-
-val lam_fact_prefix = "ATP.lambda_"
-val typed_helper_suffix = "_T"
-val untyped_helper_suffix = "_U"
-val type_tag_idempotence_helper_name = helper_prefix ^ "ti_idem"
-
-val predicator_name = "pp"
-val app_op_name = "aa"
-val type_guard_name = "gg"
-val type_tag_name = "tt"
-
-val prefixed_predicator_name = const_prefix ^ predicator_name
-val prefixed_app_op_name = const_prefix ^ app_op_name
-val prefixed_type_tag_name = const_prefix ^ type_tag_name
-
-(*Escaping of special characters.
- Alphanumeric characters are left unchanged.
- The character _ goes to __
- Characters in the range ASCII space to / go to _A to _P, respectively.
- Other characters go to _nnn where nnn is the decimal ASCII code.*)
-val upper_a_minus_space = Char.ord #"A" - Char.ord #" "
-
-fun stringN_of_int 0 _ = ""
- | stringN_of_int k n =
- stringN_of_int (k - 1) (n div 10) ^ string_of_int (n mod 10)
-
-fun ascii_of_char c =
- if Char.isAlphaNum c then
- String.str c
- else if c = #"_" then
- "__"
- else if #" " <= c andalso c <= #"/" then
- "_" ^ String.str (Char.chr (Char.ord c + upper_a_minus_space))
- else
- (* fixed width, in case more digits follow *)
- "_" ^ stringN_of_int 3 (Char.ord c)
-
-val ascii_of = String.translate ascii_of_char
-
-(** Remove ASCII armoring from names in proof files **)
-
-(* We don't raise error exceptions because this code can run inside a worker
- thread. Also, the errors are impossible. *)
-val unascii_of =
- let
- fun un rcs [] = String.implode(rev rcs)
- | un rcs [#"_"] = un (#"_" :: rcs) [] (* ERROR *)
- (* Three types of _ escapes: __, _A to _P, _nnn *)
- | un rcs (#"_" :: #"_" :: cs) = un (#"_" :: rcs) cs
- | un rcs (#"_" :: c :: cs) =
- if #"A" <= c andalso c<= #"P" then
- (* translation of #" " to #"/" *)
- un (Char.chr (Char.ord c - upper_a_minus_space) :: rcs) cs
- else
- let val digits = List.take (c :: cs, 3) handle General.Subscript => [] in
- case Int.fromString (String.implode digits) of
- SOME n => un (Char.chr n :: rcs) (List.drop (cs, 2))
- | NONE => un (c :: #"_" :: rcs) cs (* ERROR *)
- end
- | un rcs (c :: cs) = un (c :: rcs) cs
- in un [] o String.explode end
-
-(* If string s has the prefix s1, return the result of deleting it,
- un-ASCII'd. *)
-fun unprefix_and_unascii s1 s =
- if String.isPrefix s1 s then
- SOME (unascii_of (String.extract (s, size s1, NONE)))
- else
- NONE
-
-val proxy_table =
- [("c_False", (@{const_name False}, (@{thm fFalse_def},
- ("fFalse", @{const_name ATP.fFalse})))),
- ("c_True", (@{const_name True}, (@{thm fTrue_def},
- ("fTrue", @{const_name ATP.fTrue})))),
- ("c_Not", (@{const_name Not}, (@{thm fNot_def},
- ("fNot", @{const_name ATP.fNot})))),
- ("c_conj", (@{const_name conj}, (@{thm fconj_def},
- ("fconj", @{const_name ATP.fconj})))),
- ("c_disj", (@{const_name disj}, (@{thm fdisj_def},
- ("fdisj", @{const_name ATP.fdisj})))),
- ("c_implies", (@{const_name implies}, (@{thm fimplies_def},
- ("fimplies", @{const_name ATP.fimplies})))),
- ("equal", (@{const_name HOL.eq}, (@{thm fequal_def},
- ("fequal", @{const_name ATP.fequal})))),
- ("c_All", (@{const_name All}, (@{thm fAll_def},
- ("fAll", @{const_name ATP.fAll})))),
- ("c_Ex", (@{const_name Ex}, (@{thm fEx_def},
- ("fEx", @{const_name ATP.fEx}))))]
-
-val proxify_const = AList.lookup (op =) proxy_table #> Option.map (snd o snd)
-
-(* Readable names for the more common symbolic functions. Do not mess with the
- table unless you know what you are doing. *)
-val const_trans_table =
- [(@{type_name Product_Type.prod}, "prod"),
- (@{type_name Sum_Type.sum}, "sum"),
- (@{const_name False}, "False"),
- (@{const_name True}, "True"),
- (@{const_name Not}, "Not"),
- (@{const_name conj}, "conj"),
- (@{const_name disj}, "disj"),
- (@{const_name implies}, "implies"),
- (@{const_name HOL.eq}, "equal"),
- (@{const_name All}, "All"),
- (@{const_name Ex}, "Ex"),
- (@{const_name If}, "If"),
- (@{const_name Set.member}, "member"),
- (@{const_name Meson.COMBI}, combinator_prefix ^ "I"),
- (@{const_name Meson.COMBK}, combinator_prefix ^ "K"),
- (@{const_name Meson.COMBB}, combinator_prefix ^ "B"),
- (@{const_name Meson.COMBC}, combinator_prefix ^ "C"),
- (@{const_name Meson.COMBS}, combinator_prefix ^ "S")]
- |> Symtab.make
- |> fold (Symtab.update o swap o snd o snd o snd) proxy_table
-
-(* Invert the table of translations between Isabelle and ATPs. *)
-val const_trans_table_inv =
- const_trans_table |> Symtab.dest |> map swap |> Symtab.make
-val const_trans_table_unprox =
- Symtab.empty
- |> fold (fn (_, (isa, (_, (_, atp)))) => Symtab.update (atp, isa)) proxy_table
-
-val invert_const = perhaps (Symtab.lookup const_trans_table_inv)
-val unproxify_const = perhaps (Symtab.lookup const_trans_table_unprox)
-
-fun lookup_const c =
- case Symtab.lookup const_trans_table c of
- SOME c' => c'
- | NONE => ascii_of c
-
-fun ascii_of_indexname (v, 0) = ascii_of v
- | ascii_of_indexname (v, i) = ascii_of v ^ "_" ^ string_of_int i
-
-fun make_bound_var x = bound_var_prefix ^ ascii_of x
-fun make_all_bound_var x = all_bound_var_prefix ^ ascii_of x
-fun make_exist_bound_var x = exist_bound_var_prefix ^ ascii_of x
-fun make_schematic_var v = schematic_var_prefix ^ ascii_of_indexname v
-fun make_fixed_var x = fixed_var_prefix ^ ascii_of x
-
-fun make_schematic_type_var (x, i) =
- tvar_prefix ^ (ascii_of_indexname (unprefix "'" x, i))
-fun make_fixed_type_var x = tfree_prefix ^ (ascii_of (unprefix "'" x))
-
-(* "HOL.eq" and choice are mapped to the ATP's equivalents *)
-local
- val choice_const = (fst o dest_Const o HOLogic.choice_const) Term.dummyT
- fun default c = const_prefix ^ lookup_const c
-in
- fun make_fixed_const _ @{const_name HOL.eq} = tptp_old_equal
- | make_fixed_const (SOME (THF (_, _, THF_With_Choice))) c =
- if c = choice_const then tptp_choice else default c
- | make_fixed_const _ c = default c
-end
-
-fun make_fixed_type_const c = type_const_prefix ^ lookup_const c
-
-fun make_type_class clas = class_prefix ^ ascii_of clas
-
-fun new_skolem_var_name_from_const s =
- let val ss = s |> space_explode Long_Name.separator in
- nth ss (length ss - 2)
- end
-
-(* These are either simplified away by "Meson.presimplify" (most of the time) or
- handled specially via "fFalse", "fTrue", ..., "fequal". *)
-val atp_irrelevant_consts =
- [@{const_name False}, @{const_name True}, @{const_name Not},
- @{const_name conj}, @{const_name disj}, @{const_name implies},
- @{const_name HOL.eq}, @{const_name If}, @{const_name Let}]
-
-val atp_monomorph_bad_consts =
- atp_irrelevant_consts @
- (* These are ignored anyway by the relevance filter (unless they appear in
- higher-order places) but not by the monomorphizer. *)
- [@{const_name all}, @{const_name "==>"}, @{const_name "=="},
- @{const_name Trueprop}, @{const_name All}, @{const_name Ex},
- @{const_name Ex1}, @{const_name Ball}, @{const_name Bex}]
-
-fun add_schematic_const (x as (_, T)) =
- Monomorph.typ_has_tvars T ? Symtab.insert_list (op =) x
-val add_schematic_consts_of =
- Term.fold_aterms (fn Const (x as (s, _)) =>
- not (member (op =) atp_monomorph_bad_consts s)
- ? add_schematic_const x
- | _ => I)
-fun atp_schematic_consts_of t = add_schematic_consts_of t Symtab.empty
-
-(** Definitions and functions for FOL clauses and formulas for TPTP **)
-
-(** Isabelle arities **)
-
-type arity_atom = name * name * name list
-
-val type_class = the_single @{sort type}
-
-type arity_clause =
- {name : string,
- prem_atoms : arity_atom list,
- concl_atom : arity_atom}
-
-fun add_prem_atom tvar =
- fold (fn s => s <> type_class ? cons (`make_type_class s, `I tvar, []))
-
-(* Arity of type constructor "tcon :: (arg1, ..., argN) res" *)
-fun make_axiom_arity_clause (tcons, name, (cls, args)) =
- let
- val tvars = map (prefix tvar_prefix o string_of_int) (1 upto length args)
- val tvars_srts = ListPair.zip (tvars, args)
- in
- {name = name,
- prem_atoms = [] |> fold (uncurry add_prem_atom) tvars_srts,
- concl_atom = (`make_type_class cls, `make_fixed_type_const tcons,
- tvars ~~ tvars)}
- end
-
-fun arity_clause _ _ (_, []) = []
- | arity_clause seen n (tcons, ("HOL.type", _) :: ars) = (* ignore *)
- arity_clause seen n (tcons, ars)
- | arity_clause seen n (tcons, (ar as (class, _)) :: ars) =
- if member (op =) seen class then
- (* multiple arities for the same (tycon, class) pair *)
- make_axiom_arity_clause (tcons,
- lookup_const tcons ^ "___" ^ ascii_of class ^ "_" ^ string_of_int n,
- ar) ::
- arity_clause seen (n + 1) (tcons, ars)
- else
- make_axiom_arity_clause (tcons, lookup_const tcons ^ "___" ^
- ascii_of class, ar) ::
- arity_clause (class :: seen) n (tcons, ars)
-
-fun multi_arity_clause [] = []
- | multi_arity_clause ((tcons, ars) :: tc_arlists) =
- arity_clause [] 1 (tcons, ars) @ multi_arity_clause tc_arlists
-
-(* Generate all pairs (tycon, class, sorts) such that tycon belongs to class in
- theory thy provided its arguments have the corresponding sorts. *)
-fun type_class_pairs thy tycons classes =
- let
- val alg = Sign.classes_of thy
- fun domain_sorts tycon = Sorts.mg_domain alg tycon o single
- fun add_class tycon class =
- cons (class, domain_sorts tycon class)
- handle Sorts.CLASS_ERROR _ => I
- fun try_classes tycon = (tycon, fold (add_class tycon) classes [])
- in map try_classes tycons end
-
-(*Proving one (tycon, class) membership may require proving others, so iterate.*)
-fun iter_type_class_pairs _ _ [] = ([], [])
- | iter_type_class_pairs thy tycons classes =
- let
- fun maybe_insert_class s =
- (s <> type_class andalso not (member (op =) classes s))
- ? insert (op =) s
- val cpairs = type_class_pairs thy tycons classes
- val newclasses =
- [] |> fold (fold (fold (fold maybe_insert_class) o snd) o snd) cpairs
- val (classes', cpairs') = iter_type_class_pairs thy tycons newclasses
- in (classes' @ classes, union (op =) cpairs' cpairs) end
-
-fun make_arity_clauses thy tycons =
- iter_type_class_pairs thy tycons ##> multi_arity_clause
-
-
-(** Isabelle class relations **)
-
-type class_rel_clause =
- {name : string,
- subclass : name,
- superclass : name}
-
-(* Generate all pairs (sub, super) such that sub is a proper subclass of super
- in theory "thy". *)
-fun class_pairs _ [] _ = []
- | class_pairs thy subs supers =
- let
- val class_less = Sorts.class_less (Sign.classes_of thy)
- fun add_super sub super = class_less (sub, super) ? cons (sub, super)
- fun add_supers sub = fold (add_super sub) supers
- in fold add_supers subs [] end
-
-fun make_class_rel_clause (sub, super) =
- {name = sub ^ "_" ^ super, subclass = `make_type_class sub,
- superclass = `make_type_class super}
-
-fun make_class_rel_clauses thy subs supers =
- map make_class_rel_clause (class_pairs thy subs supers)
-
-(* intermediate terms *)
-datatype iterm =
- IConst of name * typ * typ list |
- IVar of name * typ |
- IApp of iterm * iterm |
- IAbs of (name * typ) * iterm
-
-fun ityp_of (IConst (_, T, _)) = T
- | ityp_of (IVar (_, T)) = T
- | ityp_of (IApp (t1, _)) = snd (dest_funT (ityp_of t1))
- | ityp_of (IAbs ((_, T), tm)) = T --> ityp_of tm
-
-(*gets the head of a combinator application, along with the list of arguments*)
-fun strip_iterm_comb u =
- let
- fun stripc (IApp (t, u), ts) = stripc (t, u :: ts)
- | stripc x = x
- in stripc (u, []) end
-
-fun atomic_types_of T = fold_atyps (insert (op =)) T []
-
-val tvar_a_str = "'a"
-val tvar_a = TVar ((tvar_a_str, 0), HOLogic.typeS)
-val tvar_a_name = (make_schematic_type_var (tvar_a_str, 0), tvar_a_str)
-val itself_name = `make_fixed_type_const @{type_name itself}
-val TYPE_name = `(make_fixed_const NONE) @{const_name TYPE}
-val tvar_a_atype = AType (tvar_a_name, [])
-val a_itself_atype = AType (itself_name, [tvar_a_atype])
-
-fun new_skolem_const_name s num_T_args =
- [new_skolem_const_prefix, s, string_of_int num_T_args]
- |> space_implode Long_Name.separator
-
-fun robust_const_type thy s =
- if s = app_op_name then
- Logic.varifyT_global @{typ "('a => 'b) => 'a => 'b"}
- else if String.isPrefix lam_lifted_prefix s then
- Logic.varifyT_global @{typ "'a => 'b"}
- else
- (* Old Skolems throw a "TYPE" exception here, which will be caught. *)
- s |> Sign.the_const_type thy
-
-(* This function only makes sense if "T" is as general as possible. *)
-fun robust_const_typargs thy (s, T) =
- if s = app_op_name then
- let val (T1, T2) = T |> domain_type |> dest_funT in [T1, T2] end
- else if String.isPrefix old_skolem_const_prefix s then
- [] |> Term.add_tvarsT T |> rev |> map TVar
- else if String.isPrefix lam_lifted_prefix s then
- if String.isPrefix lam_lifted_poly_prefix s then
- let val (T1, T2) = T |> dest_funT in [T1, T2] end
- else
- []
- else
- (s, T) |> Sign.const_typargs thy
-
-(* Converts an Isabelle/HOL term (with combinators) into an intermediate term.
- Also accumulates sort infomation. *)
-fun iterm_from_term thy format bs (P $ Q) =
- let
- val (P', P_atomics_Ts) = iterm_from_term thy format bs P
- val (Q', Q_atomics_Ts) = iterm_from_term thy format bs Q
- in (IApp (P', Q'), union (op =) P_atomics_Ts Q_atomics_Ts) end
- | iterm_from_term thy format _ (Const (c, T)) =
- (IConst (`(make_fixed_const (SOME format)) c, T,
- robust_const_typargs thy (c, T)),
- atomic_types_of T)
- | iterm_from_term _ _ _ (Free (s, T)) =
- (IConst (`make_fixed_var s, T, []), atomic_types_of T)
- | iterm_from_term _ format _ (Var (v as (s, _), T)) =
- (if String.isPrefix Meson_Clausify.new_skolem_var_prefix s then
- let
- val Ts = T |> strip_type |> swap |> op ::
- val s' = new_skolem_const_name s (length Ts)
- in IConst (`(make_fixed_const (SOME format)) s', T, Ts) end
- else
- IVar ((make_schematic_var v, s), T), atomic_types_of T)
- | iterm_from_term _ _ bs (Bound j) =
- nth bs j |> (fn (_, (name, T)) => (IConst (name, T, []), atomic_types_of T))
- | iterm_from_term thy format bs (Abs (s, T, t)) =
- let
- fun vary s = s |> AList.defined (op =) bs s ? vary o Symbol.bump_string
- val s = vary s
- val name = `make_bound_var s
- val (tm, atomic_Ts) = iterm_from_term thy format ((s, (name, T)) :: bs) t
- in (IAbs ((name, T), tm), union (op =) atomic_Ts (atomic_types_of T)) end
-
-datatype locality =
- General | Helper | Induction | Intro | Elim | Simp | Local | Assum | Chained
-
-datatype order = First_Order | Higher_Order
-datatype polymorphism = Polymorphic | Raw_Monomorphic | Mangled_Monomorphic
-datatype soundness = Sound_Modulo_Infinity | Sound
-datatype granularity = All_Vars | Positively_Naked_Vars | Ghost_Type_Arg_Vars
-datatype type_level =
- All_Types |
- Noninf_Nonmono_Types of soundness * granularity |
- Fin_Nonmono_Types of granularity |
- Const_Arg_Types |
- No_Types
-
-datatype type_enc =
- Simple_Types of order * polymorphism * type_level |
- Guards of polymorphism * type_level |
- Tags of polymorphism * type_level
-
-fun is_type_enc_higher_order (Simple_Types (Higher_Order, _, _)) = true
- | is_type_enc_higher_order _ = false
-
-fun polymorphism_of_type_enc (Simple_Types (_, poly, _)) = poly
- | polymorphism_of_type_enc (Guards (poly, _)) = poly
- | polymorphism_of_type_enc (Tags (poly, _)) = poly
-
-fun level_of_type_enc (Simple_Types (_, _, level)) = level
- | level_of_type_enc (Guards (_, level)) = level
- | level_of_type_enc (Tags (_, level)) = level
-
-fun granularity_of_type_level (Noninf_Nonmono_Types (_, grain)) = grain
- | granularity_of_type_level (Fin_Nonmono_Types grain) = grain
- | granularity_of_type_level _ = All_Vars
-
-fun is_type_level_quasi_sound All_Types = true
- | is_type_level_quasi_sound (Noninf_Nonmono_Types _) = true
- | is_type_level_quasi_sound _ = false
-val is_type_enc_quasi_sound = is_type_level_quasi_sound o level_of_type_enc
-
-fun is_type_level_fairly_sound (Fin_Nonmono_Types _) = true
- | is_type_level_fairly_sound level = is_type_level_quasi_sound level
-val is_type_enc_fairly_sound = is_type_level_fairly_sound o level_of_type_enc
-
-fun is_type_level_monotonicity_based (Noninf_Nonmono_Types _) = true
- | is_type_level_monotonicity_based (Fin_Nonmono_Types _) = true
- | is_type_level_monotonicity_based _ = false
-
-(* "_query", "_bang", and "_at" are for the ASCII-challenged Metis and
- Mirabelle. *)
-val queries = ["?", "_query"]
-val bangs = ["!", "_bang"]
-val ats = ["@", "_at"]
-
-fun try_unsuffixes ss s =
- fold (fn s' => fn NONE => try (unsuffix s') s | some => some) ss NONE
-
-fun try_nonmono constr suffixes fallback s =
- case try_unsuffixes suffixes s of
- SOME s =>
- (case try_unsuffixes suffixes s of
- SOME s => (constr Positively_Naked_Vars, s)
- | NONE =>
- case try_unsuffixes ats s of
- SOME s => (constr Ghost_Type_Arg_Vars, s)
- | NONE => (constr All_Vars, s))
- | NONE => fallback s
-
-fun type_enc_from_string soundness s =
- (case try (unprefix "poly_") s of
- SOME s => (SOME Polymorphic, s)
- | NONE =>
- case try (unprefix "raw_mono_") s of
- SOME s => (SOME Raw_Monomorphic, s)
- | NONE =>
- case try (unprefix "mono_") s of
- SOME s => (SOME Mangled_Monomorphic, s)
- | NONE => (NONE, s))
- ||> (pair All_Types
- |> try_nonmono Fin_Nonmono_Types bangs
- |> try_nonmono (curry Noninf_Nonmono_Types soundness) queries)
- |> (fn (poly, (level, core)) =>
- case (core, (poly, level)) of
- ("simple", (SOME poly, _)) =>
- (case (poly, level) of
- (Polymorphic, All_Types) =>
- Simple_Types (First_Order, Polymorphic, All_Types)
- | (Mangled_Monomorphic, _) =>
- if granularity_of_type_level level = All_Vars then
- Simple_Types (First_Order, Mangled_Monomorphic, level)
- else
- raise Same.SAME
- | _ => raise Same.SAME)
- | ("simple_higher", (SOME poly, _)) =>
- (case (poly, level) of
- (Polymorphic, All_Types) =>
- Simple_Types (Higher_Order, Polymorphic, All_Types)
- | (_, Noninf_Nonmono_Types _) => raise Same.SAME
- | (Mangled_Monomorphic, _) =>
- if granularity_of_type_level level = All_Vars then
- Simple_Types (Higher_Order, Mangled_Monomorphic, level)
- else
- raise Same.SAME
- | _ => raise Same.SAME)
- | ("guards", (SOME poly, _)) =>
- if poly = Mangled_Monomorphic andalso
- granularity_of_type_level level = Ghost_Type_Arg_Vars then
- raise Same.SAME
- else
- Guards (poly, level)
- | ("tags", (SOME poly, _)) =>
- if granularity_of_type_level level = Ghost_Type_Arg_Vars then
- raise Same.SAME
- else
- Tags (poly, level)
- | ("args", (SOME poly, All_Types (* naja *))) =>
- Guards (poly, Const_Arg_Types)
- | ("erased", (NONE, All_Types (* naja *))) =>
- Guards (Polymorphic, No_Types)
- | _ => raise Same.SAME)
- handle Same.SAME => error ("Unknown type encoding: " ^ quote s ^ ".")
-
-fun adjust_type_enc (THF (TPTP_Monomorphic, _, _))
- (Simple_Types (order, _, level)) =
- Simple_Types (order, Mangled_Monomorphic, level)
- | adjust_type_enc (THF _) type_enc = type_enc
- | adjust_type_enc (TFF (TPTP_Monomorphic, _)) (Simple_Types (_, _, level)) =
- Simple_Types (First_Order, Mangled_Monomorphic, level)
- | adjust_type_enc (DFG DFG_Sorted) (Simple_Types (_, _, level)) =
- Simple_Types (First_Order, Mangled_Monomorphic, level)
- | adjust_type_enc (TFF _) (Simple_Types (_, poly, level)) =
- Simple_Types (First_Order, poly, level)
- | adjust_type_enc format (Simple_Types (_, poly, level)) =
- adjust_type_enc format (Guards (poly, level))
- | adjust_type_enc CNF_UEQ (type_enc as Guards stuff) =
- (if is_type_enc_fairly_sound type_enc then Tags else Guards) stuff
- | adjust_type_enc _ type_enc = type_enc
-
-fun constify_lifted (t $ u) = constify_lifted t $ constify_lifted u
- | constify_lifted (Abs (s, T, t)) = Abs (s, T, constify_lifted t)
- | constify_lifted (Free (x as (s, _))) =
- (if String.isPrefix lam_lifted_prefix s then Const else Free) x
- | constify_lifted t = t
-
-(* Requires bound variables not to clash with any schematic variables (as should
- be the case right after lambda-lifting). *)
-fun open_form (Const (@{const_name All}, _) $ Abs (s, T, t)) =
- let
- val names = Name.make_context (map fst (Term.add_var_names t []))
- val (s, _) = Name.variant s names
- in open_form (subst_bound (Var ((s, 0), T), t)) end
- | open_form t = t
-
-fun lift_lams_part_1 ctxt type_enc =
- map close_form #> rpair ctxt
- #-> Lambda_Lifting.lift_lambdas
- (SOME ((if polymorphism_of_type_enc type_enc = Polymorphic then
- lam_lifted_poly_prefix
- else
- lam_lifted_mono_prefix) ^ "_a"))
- Lambda_Lifting.is_quantifier
- #> fst
-val lift_lams_part_2 = pairself (map (open_form o constify_lifted))
-val lift_lams = lift_lams_part_2 ooo lift_lams_part_1
-
-fun intentionalize_def (Const (@{const_name All}, _) $ Abs (_, _, t)) =
- intentionalize_def t
- | intentionalize_def (Const (@{const_name HOL.eq}, _) $ t $ u) =
- let
- fun lam T t = Abs (Name.uu, T, t)
- val (head, args) = strip_comb t ||> rev
- val head_T = fastype_of head
- val n = length args
- val arg_Ts = head_T |> binder_types |> take n |> rev
- val u = u |> subst_atomic (args ~~ map Bound (0 upto n - 1))
- in HOLogic.eq_const head_T $ head $ fold lam arg_Ts u end
- | intentionalize_def t = t
-
-type translated_formula =
- {name : string,
- locality : locality,
- kind : formula_kind,
- iformula : (name, typ, iterm) formula,
- atomic_types : typ list}
-
-fun update_iformula f ({name, locality, kind, iformula, atomic_types}
- : translated_formula) =
- {name = name, locality = locality, kind = kind, iformula = f iformula,
- atomic_types = atomic_types} : translated_formula
-
-fun fact_lift f ({iformula, ...} : translated_formula) = f iformula
-
-fun insert_type ctxt get_T x xs =
- let val T = get_T x in
- if exists (type_instance ctxt T o get_T) xs then xs
- else x :: filter_out (type_generalization ctxt T o get_T) xs
- end
-
-(* The Booleans indicate whether all type arguments should be kept. *)
-datatype type_arg_policy =
- Explicit_Type_Args of bool (* infer_from_term_args *) |
- Mangled_Type_Args |
- No_Type_Args
-
-fun type_arg_policy monom_constrs type_enc s =
- let val poly = polymorphism_of_type_enc type_enc in
- if s = type_tag_name then
- if poly = Mangled_Monomorphic then Mangled_Type_Args
- else Explicit_Type_Args false
- else case type_enc of
- Simple_Types (_, Polymorphic, _) => Explicit_Type_Args false
- | Tags (_, All_Types) => No_Type_Args
- | _ =>
- let val level = level_of_type_enc type_enc in
- if level = No_Types orelse s = @{const_name HOL.eq} orelse
- (s = app_op_name andalso level = Const_Arg_Types) then
- No_Type_Args
- else if poly = Mangled_Monomorphic then
- Mangled_Type_Args
- else if member (op =) monom_constrs s andalso
- granularity_of_type_level level = Positively_Naked_Vars then
- No_Type_Args
- else
- Explicit_Type_Args
- (level = All_Types orelse
- granularity_of_type_level level = Ghost_Type_Arg_Vars)
- end
- end
-
-(* Make atoms for sorted type variables. *)
-fun generic_add_sorts_on_type (_, []) = I
- | generic_add_sorts_on_type ((x, i), s :: ss) =
- generic_add_sorts_on_type ((x, i), ss)
- #> (if s = the_single @{sort HOL.type} then
- I
- else if i = ~1 then
- insert (op =) (`make_type_class s, `make_fixed_type_var x)
- else
- insert (op =) (`make_type_class s,
- (make_schematic_type_var (x, i), x)))
-fun add_sorts_on_tfree (TFree (s, S)) = generic_add_sorts_on_type ((s, ~1), S)
- | add_sorts_on_tfree _ = I
-fun add_sorts_on_tvar (TVar z) = generic_add_sorts_on_type z
- | add_sorts_on_tvar _ = I
-
-fun type_class_formula type_enc class arg =
- AAtom (ATerm (class, arg ::
- (case type_enc of
- Simple_Types (First_Order, Polymorphic, _) =>
- if avoid_first_order_ghost_type_vars then [ATerm (TYPE_name, [arg])]
- else []
- | _ => [])))
-fun formulas_for_types type_enc add_sorts_on_typ Ts =
- [] |> level_of_type_enc type_enc <> No_Types ? fold add_sorts_on_typ Ts
- |> map (fn (class, name) =>
- type_class_formula type_enc class (ATerm (name, [])))
-
-fun mk_aconns c phis =
- let val (phis', phi') = split_last phis in
- fold_rev (mk_aconn c) phis' phi'
- end
-fun mk_ahorn [] phi = phi
- | mk_ahorn phis psi = AConn (AImplies, [mk_aconns AAnd phis, psi])
-fun mk_aquant _ [] phi = phi
- | mk_aquant q xs (phi as AQuant (q', xs', phi')) =
- if q = q' then AQuant (q, xs @ xs', phi') else AQuant (q, xs, phi)
- | mk_aquant q xs phi = AQuant (q, xs, phi)
-
-fun close_universally add_term_vars phi =
- let
- fun add_formula_vars bounds (AQuant (_, xs, phi)) =
- add_formula_vars (map fst xs @ bounds) phi
- | add_formula_vars bounds (AConn (_, phis)) =
- fold (add_formula_vars bounds) phis
- | add_formula_vars bounds (AAtom tm) = add_term_vars bounds tm
- in mk_aquant AForall (add_formula_vars [] phi []) phi end
-
-fun add_term_vars bounds (ATerm (name as (s, _), tms)) =
- (if is_tptp_variable s andalso
- not (String.isPrefix tvar_prefix s) andalso
- not (member (op =) bounds name) then
- insert (op =) (name, NONE)
- else
- I)
- #> fold (add_term_vars bounds) tms
- | add_term_vars bounds (AAbs ((name, _), tm)) =
- add_term_vars (name :: bounds) tm
-fun close_formula_universally phi = close_universally add_term_vars phi
-
-fun add_iterm_vars bounds (IApp (tm1, tm2)) =
- fold (add_iterm_vars bounds) [tm1, tm2]
- | add_iterm_vars _ (IConst _) = I
- | add_iterm_vars bounds (IVar (name, T)) =
- not (member (op =) bounds name) ? insert (op =) (name, SOME T)
- | add_iterm_vars bounds (IAbs (_, tm)) = add_iterm_vars bounds tm
-fun close_iformula_universally phi = close_universally add_iterm_vars phi
-
-val fused_infinite_type_name = @{type_name ind} (* any infinite type *)
-val fused_infinite_type = Type (fused_infinite_type_name, [])
-
-fun tvar_name (x as (s, _)) = (make_schematic_type_var x, s)
-
-fun ho_term_from_typ format type_enc =
- let
- fun term (Type (s, Ts)) =
- ATerm (case (is_type_enc_higher_order type_enc, s) of
- (true, @{type_name bool}) => `I tptp_bool_type
- | (true, @{type_name fun}) => `I tptp_fun_type
- | _ => if s = fused_infinite_type_name andalso
- is_format_typed format then
- `I tptp_individual_type
- else
- `make_fixed_type_const s,
- map term Ts)
- | term (TFree (s, _)) = ATerm (`make_fixed_type_var s, [])
- | term (TVar (x, _)) = ATerm (tvar_name x, [])
- in term end
-
-fun ho_term_for_type_arg format type_enc T =
- if T = dummyT then NONE else SOME (ho_term_from_typ format type_enc T)
-
-(* This shouldn't clash with anything else. *)
-val mangled_type_sep = "\000"
-
-fun generic_mangled_type_name f (ATerm (name, [])) = f name
- | generic_mangled_type_name f (ATerm (name, tys)) =
- f name ^ "(" ^ space_implode "," (map (generic_mangled_type_name f) tys)
- ^ ")"
- | generic_mangled_type_name _ _ = raise Fail "unexpected type abstraction"
-
-fun mangled_type format type_enc =
- generic_mangled_type_name fst o ho_term_from_typ format type_enc
-
-fun make_simple_type s =
- if s = tptp_bool_type orelse s = tptp_fun_type orelse
- s = tptp_individual_type then
- s
- else
- simple_type_prefix ^ ascii_of s
-
-fun ho_type_from_ho_term type_enc pred_sym ary =
- let
- fun to_mangled_atype ty =
- AType ((make_simple_type (generic_mangled_type_name fst ty),
- generic_mangled_type_name snd ty), [])
- fun to_poly_atype (ATerm (name, tys)) = AType (name, map to_poly_atype tys)
- | to_poly_atype _ = raise Fail "unexpected type abstraction"
- val to_atype =
- if polymorphism_of_type_enc type_enc = Polymorphic then to_poly_atype
- else to_mangled_atype
- fun to_afun f1 f2 tys = AFun (f1 (hd tys), f2 (nth tys 1))
- fun to_fo 0 ty = if pred_sym then bool_atype else to_atype ty
- | to_fo ary (ATerm (_, tys)) = to_afun to_atype (to_fo (ary - 1)) tys
- | to_fo _ _ = raise Fail "unexpected type abstraction"
- fun to_ho (ty as ATerm ((s, _), tys)) =
- if s = tptp_fun_type then to_afun to_ho to_ho tys else to_atype ty
- | to_ho _ = raise Fail "unexpected type abstraction"
- in if is_type_enc_higher_order type_enc then to_ho else to_fo ary end
-
-fun ho_type_from_typ format type_enc pred_sym ary =
- ho_type_from_ho_term type_enc pred_sym ary
- o ho_term_from_typ format type_enc
-
-fun mangled_const_name format type_enc T_args (s, s') =
- let
- val ty_args = T_args |> map_filter (ho_term_for_type_arg format type_enc)
- fun type_suffix f g =
- fold_rev (curry (op ^) o g o prefix mangled_type_sep
- o generic_mangled_type_name f) ty_args ""
- in (s ^ type_suffix fst ascii_of, s' ^ type_suffix snd I) end
-
-val parse_mangled_ident =
- Scan.many1 (not o member (op =) ["(", ")", ","]) >> implode
-
-fun parse_mangled_type x =
- (parse_mangled_ident
- -- Scan.optional ($$ "(" |-- Scan.optional parse_mangled_types [] --| $$ ")")
- [] >> ATerm) x
-and parse_mangled_types x =
- (parse_mangled_type ::: Scan.repeat ($$ "," |-- parse_mangled_type)) x
-
-fun unmangled_type s =
- s |> suffix ")" |> raw_explode
- |> Scan.finite Symbol.stopper
- (Scan.error (!! (fn _ => raise Fail ("unrecognized mangled type " ^
- quote s)) parse_mangled_type))
- |> fst
-
-val unmangled_const_name = space_explode mangled_type_sep #> hd
-fun unmangled_const s =
- let val ss = space_explode mangled_type_sep s in
- (hd ss, map unmangled_type (tl ss))
- end
-
-fun introduce_proxies_in_iterm type_enc =
- let
- fun tweak_ho_quant ho_quant T [IAbs _] = IConst (`I ho_quant, T, [])
- | tweak_ho_quant ho_quant (T as Type (_, [p_T as Type (_, [x_T, _]), _]))
- _ =
- (* Eta-expand "!!" and "??", to work around LEO-II 1.2.8 parser
- limitation. This works in conjuction with special code in
- "ATP_Problem" that uses the syntactic sugar "!" and "?" whenever
- possible. *)
- IAbs ((`I "P", p_T),
- IApp (IConst (`I ho_quant, T, []),
- IAbs ((`I "X", x_T),
- IApp (IConst (`I "P", p_T, []),
- IConst (`I "X", x_T, [])))))
- | tweak_ho_quant _ _ _ = raise Fail "unexpected type for quantifier"
- fun intro top_level args (IApp (tm1, tm2)) =
- IApp (intro top_level (tm2 :: args) tm1, intro false [] tm2)
- | intro top_level args (IConst (name as (s, _), T, T_args)) =
- (case proxify_const s of
- SOME proxy_base =>
- if top_level orelse is_type_enc_higher_order type_enc then
- case (top_level, s) of
- (_, "c_False") => IConst (`I tptp_false, T, [])
- | (_, "c_True") => IConst (`I tptp_true, T, [])
- | (false, "c_Not") => IConst (`I tptp_not, T, [])
- | (false, "c_conj") => IConst (`I tptp_and, T, [])
- | (false, "c_disj") => IConst (`I tptp_or, T, [])
- | (false, "c_implies") => IConst (`I tptp_implies, T, [])
- | (false, "c_All") => tweak_ho_quant tptp_ho_forall T args
- | (false, "c_Ex") => tweak_ho_quant tptp_ho_exists T args
- | (false, s) =>
- if is_tptp_equal s andalso length args = 2 then
- IConst (`I tptp_equal, T, [])
- else
- (* Use a proxy even for partially applied THF0 equality,
- because the LEO-II and Satallax parsers complain about not
- being able to infer the type of "=". *)
- IConst (proxy_base |>> prefix const_prefix, T, T_args)
- | _ => IConst (name, T, [])
- else
- IConst (proxy_base |>> prefix const_prefix, T, T_args)
- | NONE => if s = tptp_choice then tweak_ho_quant tptp_choice T args
- else IConst (name, T, T_args))
- | intro _ _ (IAbs (bound, tm)) = IAbs (bound, intro false [] tm)
- | intro _ _ tm = tm
- in intro true [] end
-
-fun mangle_type_args_in_iterm format type_enc =
- if polymorphism_of_type_enc type_enc = Mangled_Monomorphic then
- let
- fun mangle (IApp (tm1, tm2)) = IApp (mangle tm1, mangle tm2)
- | mangle (tm as IConst (_, _, [])) = tm
- | mangle (tm as IConst (name as (s, _), T, T_args)) =
- (case unprefix_and_unascii const_prefix s of
- NONE => tm
- | SOME s'' =>
- case type_arg_policy [] type_enc (invert_const s'') of
- Mangled_Type_Args =>
- IConst (mangled_const_name format type_enc T_args name, T, [])
- | _ => tm)
- | mangle (IAbs (bound, tm)) = IAbs (bound, mangle tm)
- | mangle tm = tm
- in mangle end
- else
- I
-
-fun chop_fun 0 T = ([], T)
- | chop_fun n (Type (@{type_name fun}, [dom_T, ran_T])) =
- chop_fun (n - 1) ran_T |>> cons dom_T
- | chop_fun _ T = ([], T)
-
-fun filter_const_type_args _ _ _ [] = []
- | filter_const_type_args thy s ary T_args =
- let
- val U = robust_const_type thy s
- val arg_U_vars = fold Term.add_tvarsT (U |> chop_fun ary |> fst) []
- val U_args = (s, U) |> robust_const_typargs thy
- in
- U_args ~~ T_args
- |> map (fn (U, T) =>
- if member (op =) arg_U_vars (dest_TVar U) then dummyT else T)
- end
- handle TYPE _ => T_args
-
-fun filter_type_args_in_iterm thy monom_constrs type_enc =
- let
- fun filt ary (IApp (tm1, tm2)) = IApp (filt (ary + 1) tm1, filt 0 tm2)
- | filt _ (tm as IConst (_, _, [])) = tm
- | filt ary (IConst (name as (s, _), T, T_args)) =
- (case unprefix_and_unascii const_prefix s of
- NONE =>
- (name,
- if level_of_type_enc type_enc = No_Types orelse s = tptp_choice then
- []
- else
- T_args)
- | SOME s'' =>
- let
- val s'' = invert_const s''
- fun filter_T_args false = T_args
- | filter_T_args true = filter_const_type_args thy s'' ary T_args
- in
- case type_arg_policy monom_constrs type_enc s'' of
- Explicit_Type_Args infer_from_term_args =>
- (name, filter_T_args infer_from_term_args)
- | No_Type_Args => (name, [])
- | Mangled_Type_Args => raise Fail "unexpected (un)mangled symbol"
- end)
- |> (fn (name, T_args) => IConst (name, T, T_args))
- | filt _ (IAbs (bound, tm)) = IAbs (bound, filt 0 tm)
- | filt _ tm = tm
- in filt 0 end
-
-fun iformula_from_prop ctxt format type_enc eq_as_iff =
- let
- val thy = Proof_Context.theory_of ctxt
- fun do_term bs t atomic_Ts =
- iterm_from_term thy format bs (Envir.eta_contract t)
- |>> (introduce_proxies_in_iterm type_enc
- #> mangle_type_args_in_iterm format type_enc
- #> AAtom)
- ||> union (op =) atomic_Ts
- fun do_quant bs q pos s T t' =
- let
- val s = singleton (Name.variant_list (map fst bs)) s
- val universal = Option.map (q = AExists ? not) pos
- val name =
- s |> `(case universal of
- SOME true => make_all_bound_var
- | SOME false => make_exist_bound_var
- | NONE => make_bound_var)
- in
- do_formula ((s, (name, T)) :: bs) pos t'
- #>> mk_aquant q [(name, SOME T)]
- ##> union (op =) (atomic_types_of T)
- end
- and do_conn bs c pos1 t1 pos2 t2 =
- do_formula bs pos1 t1 ##>> do_formula bs pos2 t2 #>> uncurry (mk_aconn c)
- and do_formula bs pos t =
- case t of
- @{const Trueprop} $ t1 => do_formula bs pos t1
- | @{const Not} $ t1 => do_formula bs (Option.map not pos) t1 #>> mk_anot
- | Const (@{const_name All}, _) $ Abs (s, T, t') =>
- do_quant bs AForall pos s T t'
- | (t0 as Const (@{const_name All}, _)) $ t1 =>
- do_formula bs pos (t0 $ eta_expand (map (snd o snd) bs) t1 1)
- | Const (@{const_name Ex}, _) $ Abs (s, T, t') =>
- do_quant bs AExists pos s T t'
- | (t0 as Const (@{const_name Ex}, _)) $ t1 =>
- do_formula bs pos (t0 $ eta_expand (map (snd o snd) bs) t1 1)
- | @{const HOL.conj} $ t1 $ t2 => do_conn bs AAnd pos t1 pos t2
- | @{const HOL.disj} $ t1 $ t2 => do_conn bs AOr pos t1 pos t2
- | @{const HOL.implies} $ t1 $ t2 =>
- do_conn bs AImplies (Option.map not pos) t1 pos t2
- | Const (@{const_name HOL.eq}, Type (_, [@{typ bool}, _])) $ t1 $ t2 =>
- if eq_as_iff then do_conn bs AIff NONE t1 NONE t2 else do_term bs t
- | _ => do_term bs t
- in do_formula [] end
-
-fun presimplify_term ctxt t =
- t |> exists_Const (member (op =) Meson.presimplified_consts o fst) t
- ? (Skip_Proof.make_thm (Proof_Context.theory_of ctxt)
- #> Meson.presimplify
- #> prop_of)
-
-fun concealed_bound_name j = atp_weak_prefix ^ string_of_int j
-fun conceal_bounds Ts t =
- subst_bounds (map (Free o apfst concealed_bound_name)
- (0 upto length Ts - 1 ~~ Ts), t)
-fun reveal_bounds Ts =
- subst_atomic (map (fn (j, T) => (Free (concealed_bound_name j, T), Bound j))
- (0 upto length Ts - 1 ~~ Ts))
-
-fun is_fun_equality (@{const_name HOL.eq},
- Type (_, [Type (@{type_name fun}, _), _])) = true
- | is_fun_equality _ = false
-
-fun extensionalize_term ctxt t =
- if exists_Const is_fun_equality t then
- let val thy = Proof_Context.theory_of ctxt in
- t |> cterm_of thy |> Meson.extensionalize_conv ctxt
- |> prop_of |> Logic.dest_equals |> snd
- end
- else
- t
-
-fun simple_translate_lambdas do_lambdas ctxt t =
- let val thy = Proof_Context.theory_of ctxt in
- if Meson.is_fol_term thy t then
- t
- else
- let
- fun trans Ts t =
- case t of
- @{const Not} $ t1 => @{const Not} $ trans Ts t1
- | (t0 as Const (@{const_name All}, _)) $ Abs (s, T, t') =>
- t0 $ Abs (s, T, trans (T :: Ts) t')
- | (t0 as Const (@{const_name All}, _)) $ t1 =>
- trans Ts (t0 $ eta_expand Ts t1 1)
- | (t0 as Const (@{const_name Ex}, _)) $ Abs (s, T, t') =>
- t0 $ Abs (s, T, trans (T :: Ts) t')
- | (t0 as Const (@{const_name Ex}, _)) $ t1 =>
- trans Ts (t0 $ eta_expand Ts t1 1)
- | (t0 as @{const HOL.conj}) $ t1 $ t2 =>
- t0 $ trans Ts t1 $ trans Ts t2
- | (t0 as @{const HOL.disj}) $ t1 $ t2 =>
- t0 $ trans Ts t1 $ trans Ts t2
- | (t0 as @{const HOL.implies}) $ t1 $ t2 =>
- t0 $ trans Ts t1 $ trans Ts t2
- | (t0 as Const (@{const_name HOL.eq}, Type (_, [@{typ bool}, _])))
- $ t1 $ t2 =>
- t0 $ trans Ts t1 $ trans Ts t2
- | _ =>
- if not (exists_subterm (fn Abs _ => true | _ => false) t) then t
- else t |> Envir.eta_contract |> do_lambdas ctxt Ts
- val (t, ctxt') = Variable.import_terms true [t] ctxt |>> the_single
- in t |> trans [] |> singleton (Variable.export_terms ctxt' ctxt) end
- end
-
-fun do_cheaply_conceal_lambdas Ts (t1 $ t2) =
- do_cheaply_conceal_lambdas Ts t1
- $ do_cheaply_conceal_lambdas Ts t2
- | do_cheaply_conceal_lambdas Ts (Abs (_, T, t)) =
- Const (lam_lifted_poly_prefix ^ serial_string (),
- T --> fastype_of1 (T :: Ts, t))
- | do_cheaply_conceal_lambdas _ t = t
-
-fun do_introduce_combinators ctxt Ts t =
- let val thy = Proof_Context.theory_of ctxt in
- t |> conceal_bounds Ts
- |> cterm_of thy
- |> Meson_Clausify.introduce_combinators_in_cterm
- |> prop_of |> Logic.dest_equals |> snd
- |> reveal_bounds Ts
- end
- (* A type variable of sort "{}" will make abstraction fail. *)
- handle THM _ => t |> do_cheaply_conceal_lambdas Ts
-val introduce_combinators = simple_translate_lambdas do_introduce_combinators
-
-fun preprocess_abstractions_in_terms trans_lams facts =
- let
- val (facts, lambda_ts) =
- facts |> map (snd o snd) |> trans_lams
- |>> map2 (fn (name, (kind, _)) => fn t => (name, (kind, t))) facts
- val lam_facts =
- map2 (fn t => fn j =>
- ((lam_fact_prefix ^ Int.toString j, Helper), (Axiom, t)))
- lambda_ts (1 upto length lambda_ts)
- in (facts, lam_facts) end
-
-(* Metis's use of "resolve_tac" freezes the schematic variables. We simulate the
- same in Sledgehammer to prevent the discovery of unreplayable proofs. *)
-fun freeze_term t =
- let
- fun freeze (t $ u) = freeze t $ freeze u
- | freeze (Abs (s, T, t)) = Abs (s, T, freeze t)
- | freeze (Var ((s, i), T)) =
- Free (atp_weak_prefix ^ s ^ "_" ^ string_of_int i, T)
- | freeze t = t
- in t |> exists_subterm is_Var t ? freeze end
-
-fun presimp_prop ctxt role t =
- (let
- val thy = Proof_Context.theory_of ctxt
- val t = t |> Envir.beta_eta_contract
- |> transform_elim_prop
- |> Object_Logic.atomize_term thy
- val need_trueprop = (fastype_of t = @{typ bool})
- in
- t |> need_trueprop ? HOLogic.mk_Trueprop
- |> extensionalize_term ctxt
- |> presimplify_term ctxt
- |> HOLogic.dest_Trueprop
- end
- handle TERM _ => if role = Conjecture then @{term False} else @{term True})
- |> pair role
-
-fun make_formula ctxt format type_enc eq_as_iff name loc kind t =
- let
- val (iformula, atomic_Ts) =
- iformula_from_prop ctxt format type_enc eq_as_iff
- (SOME (kind <> Conjecture)) t []
- |>> close_iformula_universally
- in
- {name = name, locality = loc, kind = kind, iformula = iformula,
- atomic_types = atomic_Ts}
- end
-
-fun make_fact ctxt format type_enc eq_as_iff ((name, loc), t) =
- case t |> make_formula ctxt format type_enc (eq_as_iff andalso format <> CNF)
- name loc Axiom of
- formula as {iformula = AAtom (IConst ((s, _), _, _)), ...} =>
- if s = tptp_true then NONE else SOME formula
- | formula => SOME formula
-
-fun s_not_trueprop (@{const Trueprop} $ t) = @{const Trueprop} $ s_not t
- | s_not_trueprop t =
- if fastype_of t = @{typ bool} then s_not t else @{prop False} (* too meta *)
-
-fun make_conjecture ctxt format type_enc =
- map (fn ((name, loc), (kind, t)) =>
- t |> kind = Conjecture ? s_not_trueprop
- |> make_formula ctxt format type_enc (format <> CNF) name loc kind)
-
-(** Finite and infinite type inference **)
-
-fun tvar_footprint thy s ary =
- (case unprefix_and_unascii const_prefix s of
- SOME s =>
- s |> invert_const |> robust_const_type thy |> chop_fun ary |> fst
- |> map (fn T => Term.add_tvarsT T [] |> map fst)
- | NONE => [])
- handle TYPE _ => []
-
-fun ghost_type_args thy s ary =
- if is_tptp_equal s then
- 0 upto ary - 1
- else
- let
- val footprint = tvar_footprint thy s ary
- val eq = (s = @{const_name HOL.eq})
- fun ghosts _ [] = []
- | ghosts seen ((i, tvars) :: args) =
- ghosts (union (op =) seen tvars) args
- |> (eq orelse exists (fn tvar => not (member (op =) seen tvar)) tvars)
- ? cons i
- in
- if forall null footprint then
- []
- else
- 0 upto length footprint - 1 ~~ footprint
- |> sort (rev_order o list_ord Term_Ord.indexname_ord o pairself snd)
- |> ghosts []
- end
-
-type monotonicity_info =
- {maybe_finite_Ts : typ list,
- surely_finite_Ts : typ list,
- maybe_infinite_Ts : typ list,
- surely_infinite_Ts : typ list,
- maybe_nonmono_Ts : typ list}
-
-(* These types witness that the type classes they belong to allow infinite
- models and hence that any types with these type classes is monotonic. *)
-val known_infinite_types =
- [@{typ nat}, HOLogic.intT, HOLogic.realT, @{typ "nat => bool"}]
-
-fun is_type_kind_of_surely_infinite ctxt soundness cached_Ts T =
- soundness <> Sound andalso is_type_surely_infinite ctxt true cached_Ts T
-
-(* Finite types such as "unit", "bool", "bool * bool", and "bool => bool" are
- dangerous because their "exhaust" properties can easily lead to unsound ATP
- proofs. On the other hand, all HOL infinite types can be given the same
- models in first-order logic (via Löwenheim-Skolem). *)
-
-fun should_encode_type _ (_ : monotonicity_info) All_Types _ = true
- | should_encode_type ctxt {maybe_finite_Ts, surely_infinite_Ts,
- maybe_nonmono_Ts, ...}
- (Noninf_Nonmono_Types (soundness, grain)) T =
- grain = Ghost_Type_Arg_Vars orelse
- (exists (type_intersect ctxt T) maybe_nonmono_Ts andalso
- not (exists (type_instance ctxt T) surely_infinite_Ts orelse
- (not (member (type_equiv ctxt) maybe_finite_Ts T) andalso
- is_type_kind_of_surely_infinite ctxt soundness surely_infinite_Ts
- T)))
- | should_encode_type ctxt {surely_finite_Ts, maybe_infinite_Ts,
- maybe_nonmono_Ts, ...}
- (Fin_Nonmono_Types grain) T =
- grain = Ghost_Type_Arg_Vars orelse
- (exists (type_intersect ctxt T) maybe_nonmono_Ts andalso
- (exists (type_generalization ctxt T) surely_finite_Ts orelse
- (not (member (type_equiv ctxt) maybe_infinite_Ts T) andalso
- is_type_surely_finite ctxt T)))
- | should_encode_type _ _ _ _ = false
-
-fun should_guard_type ctxt mono (Guards (_, level)) should_guard_var T =
- should_guard_var () andalso should_encode_type ctxt mono level T
- | should_guard_type _ _ _ _ _ = false
-
-fun is_maybe_universal_var (IConst ((s, _), _, _)) =
- String.isPrefix bound_var_prefix s orelse
- String.isPrefix all_bound_var_prefix s
- | is_maybe_universal_var (IVar _) = true
- | is_maybe_universal_var _ = false
-
-datatype site =
- Top_Level of bool option |
- Eq_Arg of bool option |
- Elsewhere
-
-fun should_tag_with_type _ _ _ (Top_Level _) _ _ = false
- | should_tag_with_type ctxt mono (Tags (_, level)) site u T =
- if granularity_of_type_level level = All_Vars then
- should_encode_type ctxt mono level T
- else
- (case (site, is_maybe_universal_var u) of
- (Eq_Arg _, true) => should_encode_type ctxt mono level T
- | _ => false)
- | should_tag_with_type _ _ _ _ _ _ = false
-
-fun fused_type ctxt mono level =
- let
- val should_encode = should_encode_type ctxt mono level
- fun fuse 0 T = if should_encode T then T else fused_infinite_type
- | fuse ary (Type (@{type_name fun}, [T1, T2])) =
- fuse 0 T1 --> fuse (ary - 1) T2
- | fuse _ _ = raise Fail "expected function type"
- in fuse end
-
-(** predicators and application operators **)
-
-type sym_info =
- {pred_sym : bool, min_ary : int, max_ary : int, types : typ list,
- in_conj : bool}
-
-fun default_sym_tab_entries type_enc =
- (make_fixed_const NONE @{const_name undefined},
- {pred_sym = false, min_ary = 0, max_ary = 0, types = [],
- in_conj = false}) ::
- ([tptp_false, tptp_true]
- |> map (rpair {pred_sym = true, min_ary = 0, max_ary = 0, types = [],
- in_conj = false})) @
- ([tptp_equal, tptp_old_equal]
- |> map (rpair {pred_sym = true, min_ary = 2, max_ary = 2, types = [],
- in_conj = false}))
- |> not (is_type_enc_higher_order type_enc)
- ? cons (prefixed_predicator_name,
- {pred_sym = true, min_ary = 1, max_ary = 1, types = [],
- in_conj = false})
-
-fun sym_table_for_facts ctxt type_enc explicit_apply conjs facts =
- let
- fun consider_var_ary const_T var_T max_ary =
- let
- fun iter ary T =
- if ary = max_ary orelse type_instance ctxt var_T T orelse
- type_instance ctxt T var_T then
- ary
- else
- iter (ary + 1) (range_type T)
- in iter 0 const_T end
- fun add_universal_var T (accum as ((bool_vars, fun_var_Ts), sym_tab)) =
- if explicit_apply = NONE andalso
- (can dest_funT T orelse T = @{typ bool}) then
- let
- val bool_vars' = bool_vars orelse body_type T = @{typ bool}
- fun repair_min_ary {pred_sym, min_ary, max_ary, types, in_conj} =
- {pred_sym = pred_sym andalso not bool_vars',
- min_ary = fold (fn T' => consider_var_ary T' T) types min_ary,
- max_ary = max_ary, types = types, in_conj = in_conj}
- val fun_var_Ts' =
- fun_var_Ts |> can dest_funT T ? insert_type ctxt I T
- in
- if bool_vars' = bool_vars andalso
- pointer_eq (fun_var_Ts', fun_var_Ts) then
- accum
- else
- ((bool_vars', fun_var_Ts'), Symtab.map (K repair_min_ary) sym_tab)
- end
- else
- accum
- fun add_fact_syms conj_fact =
- let
- fun add_iterm_syms top_level tm
- (accum as ((bool_vars, fun_var_Ts), sym_tab)) =
- let val (head, args) = strip_iterm_comb tm in
- (case head of
- IConst ((s, _), T, _) =>
- if String.isPrefix bound_var_prefix s orelse
- String.isPrefix all_bound_var_prefix s then
- add_universal_var T accum
- else if String.isPrefix exist_bound_var_prefix s then
- accum
- else
- let val ary = length args in
- ((bool_vars, fun_var_Ts),
- case Symtab.lookup sym_tab s of
- SOME {pred_sym, min_ary, max_ary, types, in_conj} =>
- let
- val pred_sym =
- pred_sym andalso top_level andalso not bool_vars
- val types' = types |> insert_type ctxt I T
- val in_conj = in_conj orelse conj_fact
- val min_ary =
- if is_some explicit_apply orelse
- pointer_eq (types', types) then
- min_ary
- else
- fold (consider_var_ary T) fun_var_Ts min_ary
- in
- Symtab.update (s, {pred_sym = pred_sym,
- min_ary = Int.min (ary, min_ary),
- max_ary = Int.max (ary, max_ary),
- types = types', in_conj = in_conj})
- sym_tab
- end
- | NONE =>
- let
- val pred_sym = top_level andalso not bool_vars
- val min_ary =
- case explicit_apply of
- SOME true => 0
- | SOME false => ary
- | NONE => fold (consider_var_ary T) fun_var_Ts ary
- in
- Symtab.update_new (s,
- {pred_sym = pred_sym, min_ary = min_ary,
- max_ary = ary, types = [T], in_conj = conj_fact})
- sym_tab
- end)
- end
- | IVar (_, T) => add_universal_var T accum
- | IAbs ((_, T), tm) =>
- accum |> add_universal_var T |> add_iterm_syms false tm
- | _ => accum)
- |> fold (add_iterm_syms false) args
- end
- in K (add_iterm_syms true) |> formula_fold NONE |> fact_lift end
- in
- ((false, []), Symtab.empty)
- |> fold (add_fact_syms true) conjs
- |> fold (add_fact_syms false) facts
- |> snd
- |> fold Symtab.update (default_sym_tab_entries type_enc)
- end
-
-fun min_ary_of sym_tab s =
- case Symtab.lookup sym_tab s of
- SOME ({min_ary, ...} : sym_info) => min_ary
- | NONE =>
- case unprefix_and_unascii const_prefix s of
- SOME s =>
- let val s = s |> unmangled_const_name |> invert_const in
- if s = predicator_name then 1
- else if s = app_op_name then 2
- else if s = type_guard_name then 1
- else 0
- end
- | NONE => 0
-
-(* True if the constant ever appears outside of the top-level position in
- literals, or if it appears with different arities (e.g., because of different
- type instantiations). If false, the constant always receives all of its
- arguments and is used as a predicate. *)
-fun is_pred_sym sym_tab s =
- case Symtab.lookup sym_tab s of
- SOME ({pred_sym, min_ary, max_ary, ...} : sym_info) =>
- pred_sym andalso min_ary = max_ary
- | NONE => false
-
-val app_op = `(make_fixed_const NONE) app_op_name
-val predicator_combconst =
- IConst (`(make_fixed_const NONE) predicator_name, @{typ "bool => bool"}, [])
-
-fun list_app head args = fold (curry (IApp o swap)) args head
-fun predicator tm = IApp (predicator_combconst, tm)
-
-fun firstorderize_fact thy monom_constrs format type_enc sym_tab =
- let
- fun do_app arg head =
- let
- val head_T = ityp_of head
- val (arg_T, res_T) = dest_funT head_T
- val app =
- IConst (app_op, head_T --> head_T, [arg_T, res_T])
- |> mangle_type_args_in_iterm format type_enc
- in list_app app [head, arg] end
- fun list_app_ops head args = fold do_app args head
- fun introduce_app_ops tm =
- case strip_iterm_comb tm of
- (head as IConst ((s, _), _, _), args) =>
- args |> map introduce_app_ops
- |> chop (min_ary_of sym_tab s)
- |>> list_app head
- |-> list_app_ops
- | (head, args) => list_app_ops head (map introduce_app_ops args)
- fun introduce_predicators tm =
- case strip_iterm_comb tm of
- (IConst ((s, _), _, _), _) =>
- if is_pred_sym sym_tab s then tm else predicator tm
- | _ => predicator tm
- val do_iterm =
- not (is_type_enc_higher_order type_enc)
- ? (introduce_app_ops #> introduce_predicators)
- #> filter_type_args_in_iterm thy monom_constrs type_enc
- in update_iformula (formula_map do_iterm) end
-
-(** Helper facts **)
-
-val not_ffalse = @{lemma "~ fFalse" by (unfold fFalse_def) fast}
-val ftrue = @{lemma "fTrue" by (unfold fTrue_def) fast}
-
-(* The Boolean indicates that a fairly sound type encoding is needed. *)
-val helper_table =
- [(("COMBI", false), @{thms Meson.COMBI_def}),
- (("COMBK", false), @{thms Meson.COMBK_def}),
- (("COMBB", false), @{thms Meson.COMBB_def}),
- (("COMBC", false), @{thms Meson.COMBC_def}),
- (("COMBS", false), @{thms Meson.COMBS_def}),
- ((predicator_name, false), [not_ffalse, ftrue]),
- (("fFalse", false), [not_ffalse]),
- (("fFalse", true), @{thms True_or_False}),
- (("fTrue", false), [ftrue]),
- (("fTrue", true), @{thms True_or_False}),
- (("fNot", false),
- @{thms fNot_def [THEN Meson.iff_to_disjD, THEN conjunct1]
- fNot_def [THEN Meson.iff_to_disjD, THEN conjunct2]}),
- (("fconj", false),
- @{lemma "~ P | ~ Q | fconj P Q" "~ fconj P Q | P" "~ fconj P Q | Q"
- by (unfold fconj_def) fast+}),
- (("fdisj", false),
- @{lemma "~ P | fdisj P Q" "~ Q | fdisj P Q" "~ fdisj P Q | P | Q"
- by (unfold fdisj_def) fast+}),
- (("fimplies", false),
- @{lemma "P | fimplies P Q" "~ Q | fimplies P Q" "~ fimplies P Q | ~ P | Q"
- by (unfold fimplies_def) fast+}),
- (("fequal", true),
- (* This is a lie: Higher-order equality doesn't need a sound type encoding.
- However, this is done so for backward compatibility: Including the
- equality helpers by default in Metis breaks a few existing proofs. *)
- @{thms fequal_def [THEN Meson.iff_to_disjD, THEN conjunct1]
- fequal_def [THEN Meson.iff_to_disjD, THEN conjunct2]}),
- (* Partial characterization of "fAll" and "fEx". A complete characterization
- would require the axiom of choice for replay with Metis. *)
- (("fAll", false), [@{lemma "~ fAll P | P x" by (auto simp: fAll_def)}]),
- (("fEx", false), [@{lemma "~ P x | fEx P" by (auto simp: fEx_def)}]),
- (("If", true), @{thms if_True if_False True_or_False})]
- |> map (apsnd (map zero_var_indexes))
-
-fun atype_of_type_vars (Simple_Types (_, Polymorphic, _)) = SOME atype_of_types
- | atype_of_type_vars _ = NONE
-
-fun bound_tvars type_enc sorts Ts =
- (sorts ? mk_ahorn (formulas_for_types type_enc add_sorts_on_tvar Ts))
- #> mk_aquant AForall
- (map_filter (fn TVar (x as (s, _), _) =>
- SOME ((make_schematic_type_var x, s),
- atype_of_type_vars type_enc)
- | _ => NONE) Ts)
-
-fun eq_formula type_enc atomic_Ts pred_sym tm1 tm2 =
- (if pred_sym then AConn (AIff, [AAtom tm1, AAtom tm2])
- else AAtom (ATerm (`I tptp_equal, [tm1, tm2])))
- |> close_formula_universally
- |> bound_tvars type_enc true atomic_Ts
-
-val type_tag = `(make_fixed_const NONE) type_tag_name
-
-fun type_tag_idempotence_fact format type_enc =
- let
- fun var s = ATerm (`I s, [])
- fun tag tm = ATerm (type_tag, [var "A", tm])
- val tagged_var = tag (var "X")
- in
- Formula (type_tag_idempotence_helper_name, Axiom,
- eq_formula type_enc [] false (tag tagged_var) tagged_var,
- isabelle_info format simpN, NONE)
- end
-
-fun should_specialize_helper type_enc t =
- polymorphism_of_type_enc type_enc <> Polymorphic andalso
- level_of_type_enc type_enc <> No_Types andalso
- not (null (Term.hidden_polymorphism t))
-
-fun helper_facts_for_sym ctxt format type_enc (s, {types, ...} : sym_info) =
- case unprefix_and_unascii const_prefix s of
- SOME mangled_s =>
- let
- val thy = Proof_Context.theory_of ctxt
- val unmangled_s = mangled_s |> unmangled_const_name
- fun dub needs_fairly_sound j k =
- (unmangled_s ^ "_" ^ string_of_int j ^ "_" ^ string_of_int k ^
- (if mangled_s = unmangled_s then "" else "_" ^ ascii_of mangled_s) ^
- (if needs_fairly_sound then typed_helper_suffix
- else untyped_helper_suffix),
- Helper)
- fun dub_and_inst needs_fairly_sound (th, j) =
- let val t = prop_of th in
- if should_specialize_helper type_enc t then
- map (fn T => specialize_type thy (invert_const unmangled_s, T) t)
- types
- else
- [t]
- end
- |> map (fn (k, t) => (dub needs_fairly_sound j k, t)) o tag_list 1
- val make_facts = map_filter (make_fact ctxt format type_enc false)
- val fairly_sound = is_type_enc_fairly_sound type_enc
- in
- helper_table
- |> maps (fn ((helper_s, needs_fairly_sound), ths) =>
- if helper_s <> unmangled_s orelse
- (needs_fairly_sound andalso not fairly_sound) then
- []
- else
- ths ~~ (1 upto length ths)
- |> maps (dub_and_inst needs_fairly_sound)
- |> make_facts)
- end
- | NONE => []
-fun helper_facts_for_sym_table ctxt format type_enc sym_tab =
- Symtab.fold_rev (append o helper_facts_for_sym ctxt format type_enc) sym_tab
- []
-
-(***************************************************************)
-(* Type Classes Present in the Axiom or Conjecture Clauses *)
-(***************************************************************)
-
-fun set_insert (x, s) = Symtab.update (x, ()) s
-
-fun add_classes (sorts, cset) = List.foldl set_insert cset (flat sorts)
-
-(* Remove this trivial type class (FIXME: similar code elsewhere) *)
-fun delete_type cset = Symtab.delete_safe (the_single @{sort HOL.type}) cset
-
-fun classes_of_terms get_Ts =
- map (map snd o get_Ts)
- #> List.foldl add_classes Symtab.empty
- #> delete_type #> Symtab.keys
-
-val tfree_classes_of_terms = classes_of_terms Misc_Legacy.term_tfrees
-val tvar_classes_of_terms = classes_of_terms Misc_Legacy.term_tvars
-
-fun fold_type_constrs f (Type (s, Ts)) x =
- fold (fold_type_constrs f) Ts (f (s, x))
- | fold_type_constrs _ _ x = x
-
-(* Type constructors used to instantiate overloaded constants are the only ones
- needed. *)
-fun add_type_constrs_in_term thy =
- let
- fun add (Const (@{const_name Meson.skolem}, _) $ _) = I
- | add (t $ u) = add t #> add u
- | add (Const x) =
- x |> robust_const_typargs thy |> fold (fold_type_constrs set_insert)
- | add (Abs (_, _, u)) = add u
- | add _ = I
- in add end
-
-fun type_constrs_of_terms thy ts =
- Symtab.keys (fold (add_type_constrs_in_term thy) ts Symtab.empty)
-
-fun extract_lambda_def (Const (@{const_name HOL.eq}, _) $ t $ u) =
- let val (head, args) = strip_comb t in
- (head |> dest_Const |> fst,
- fold_rev (fn t as Var ((s, _), T) =>
- (fn u => Abs (s, T, abstract_over (t, u)))
- | _ => raise Fail "expected Var") args u)
- end
- | extract_lambda_def _ = raise Fail "malformed lifted lambda"
-
-fun trans_lams_from_string ctxt type_enc lam_trans =
- if lam_trans = no_lamsN then
- rpair []
- else if lam_trans = hide_lamsN then
- lift_lams ctxt type_enc ##> K []
- else if lam_trans = lam_liftingN then
- lift_lams ctxt type_enc
- else if lam_trans = combinatorsN then
- map (introduce_combinators ctxt) #> rpair []
- else if lam_trans = hybrid_lamsN then
- lift_lams_part_1 ctxt type_enc
- ##> maps (fn t => [t, introduce_combinators ctxt (intentionalize_def t)])
- #> lift_lams_part_2
- else if lam_trans = keep_lamsN then
- map (Envir.eta_contract) #> rpair []
- else
- error ("Unknown lambda translation scheme: " ^ quote lam_trans ^ ".")
-
-fun translate_formulas ctxt format prem_kind type_enc lam_trans presimp hyp_ts
- concl_t facts =
- let
- val thy = Proof_Context.theory_of ctxt
- val trans_lams = trans_lams_from_string ctxt type_enc lam_trans
- val fact_ts = facts |> map snd
- (* Remove existing facts from the conjecture, as this can dramatically
- boost an ATP's performance (for some reason). *)
- val hyp_ts =
- hyp_ts
- |> map (fn t => if member (op aconv) fact_ts t then @{prop True} else t)
- val facts = facts |> map (apsnd (pair Axiom))
- val conjs =
- map (pair prem_kind) hyp_ts @ [(Conjecture, s_not_trueprop concl_t)]
- |> map (apsnd freeze_term)
- |> map2 (pair o rpair Local o string_of_int) (0 upto length hyp_ts)
- val ((conjs, facts), lam_facts) =
- (conjs, facts)
- |> presimp ? pairself (map (apsnd (uncurry (presimp_prop ctxt))))
- |> (if lam_trans = no_lamsN then
- rpair []
- else
- op @
- #> preprocess_abstractions_in_terms trans_lams
- #>> chop (length conjs))
- val conjs = conjs |> make_conjecture ctxt format type_enc
- val (fact_names, facts) =
- facts
- |> map_filter (fn (name, (_, t)) =>
- make_fact ctxt format type_enc true (name, t)
- |> Option.map (pair name))
- |> ListPair.unzip
- val lifted = lam_facts |> map (extract_lambda_def o snd o snd)
- val lam_facts =
- lam_facts |> map_filter (make_fact ctxt format type_enc true o apsnd snd)
- val all_ts = concl_t :: hyp_ts @ fact_ts
- val subs = tfree_classes_of_terms all_ts
- val supers = tvar_classes_of_terms all_ts
- val tycons = type_constrs_of_terms thy all_ts
- val (supers, arity_clauses) =
- if level_of_type_enc type_enc = No_Types then ([], [])
- else make_arity_clauses thy tycons supers
- val class_rel_clauses = make_class_rel_clauses thy subs supers
- in
- (fact_names |> map single, union (op =) subs supers, conjs,
- facts @ lam_facts, class_rel_clauses, arity_clauses, lifted)
- end
-
-val type_guard = `(make_fixed_const NONE) type_guard_name
-
-fun type_guard_iterm format type_enc T tm =
- IApp (IConst (type_guard, T --> @{typ bool}, [T])
- |> mangle_type_args_in_iterm format type_enc, tm)
-
-fun is_var_positively_naked_in_term _ (SOME false) _ accum = accum
- | is_var_positively_naked_in_term name _ (ATerm ((s, _), tms)) accum =
- accum orelse (is_tptp_equal s andalso member (op =) tms (ATerm (name, [])))
- | is_var_positively_naked_in_term _ _ _ _ = true
-
-fun is_var_ghost_type_arg_in_term thy polym_constrs name pos tm accum =
- is_var_positively_naked_in_term name pos tm accum orelse
- let
- val var = ATerm (name, [])
- fun is_nasty_in_term (ATerm (_, [])) = false
- | is_nasty_in_term (ATerm ((s, _), tms)) =
- let
- val ary = length tms
- val polym_constr = member (op =) polym_constrs s
- val ghosts = ghost_type_args thy s ary
- in
- exists (fn (j, tm) =>
- if polym_constr then
- member (op =) ghosts j andalso
- (tm = var orelse is_nasty_in_term tm)
- else
- tm = var andalso member (op =) ghosts j)
- (0 upto ary - 1 ~~ tms)
- orelse (not polym_constr andalso exists is_nasty_in_term tms)
- end
- | is_nasty_in_term _ = true
- in is_nasty_in_term tm end
-
-fun should_guard_var_in_formula thy polym_constrs level pos phi (SOME true)
- name =
- (case granularity_of_type_level level of
- All_Vars => true
- | Positively_Naked_Vars =>
- formula_fold pos (is_var_positively_naked_in_term name) phi false
- | Ghost_Type_Arg_Vars =>
- formula_fold pos (is_var_ghost_type_arg_in_term thy polym_constrs name)
- phi false)
- | should_guard_var_in_formula _ _ _ _ _ _ _ = true
-
-fun always_guard_var_in_formula _ _ _ _ _ _ _ = true
-
-fun should_generate_tag_bound_decl _ _ _ (SOME true) _ = false
- | should_generate_tag_bound_decl ctxt mono (Tags (_, level)) _ T =
- granularity_of_type_level level <> All_Vars andalso
- should_encode_type ctxt mono level T
- | should_generate_tag_bound_decl _ _ _ _ _ = false
-
-fun mk_aterm format type_enc name T_args args =
- ATerm (name, map_filter (ho_term_for_type_arg format type_enc) T_args @ args)
-
-fun tag_with_type ctxt format mono type_enc pos T tm =
- IConst (type_tag, T --> T, [T])
- |> mangle_type_args_in_iterm format type_enc
- |> ho_term_from_iterm ctxt format mono type_enc pos
- |> (fn ATerm (s, tms) => ATerm (s, tms @ [tm])
- | _ => raise Fail "unexpected lambda-abstraction")
-and ho_term_from_iterm ctxt format mono type_enc =
- let
- fun term site u =
- let
- val (head, args) = strip_iterm_comb u
- val pos =
- case site of
- Top_Level pos => pos
- | Eq_Arg pos => pos
- | _ => NONE
- val t =
- case head of
- IConst (name as (s, _), _, T_args) =>
- let
- val arg_site = if is_tptp_equal s then Eq_Arg pos else Elsewhere
- in
- map (term arg_site) args |> mk_aterm format type_enc name T_args
- end
- | IVar (name, _) =>
- map (term Elsewhere) args |> mk_aterm format type_enc name []
- | IAbs ((name, T), tm) =>
- AAbs ((name, ho_type_from_typ format type_enc true 0 T),
- term Elsewhere tm)
- | IApp _ => raise Fail "impossible \"IApp\""
- val T = ityp_of u
- in
- if should_tag_with_type ctxt mono type_enc site u T then
- tag_with_type ctxt format mono type_enc pos T t
- else
- t
- end
- in term o Top_Level end
-and formula_from_iformula ctxt polym_constrs format mono type_enc
- should_guard_var =
- let
- val thy = Proof_Context.theory_of ctxt
- val level = level_of_type_enc type_enc
- val do_term = ho_term_from_iterm ctxt format mono type_enc
- val do_bound_type =
- case type_enc of
- Simple_Types _ => fused_type ctxt mono level 0
- #> ho_type_from_typ format type_enc false 0 #> SOME
- | _ => K NONE
- fun do_out_of_bound_type pos phi universal (name, T) =
- if should_guard_type ctxt mono type_enc
- (fn () => should_guard_var thy polym_constrs level pos phi
- universal name) T then
- IVar (name, T)
- |> type_guard_iterm format type_enc T
- |> do_term pos |> AAtom |> SOME
- else if should_generate_tag_bound_decl ctxt mono type_enc universal T then
- let
- val var = ATerm (name, [])
- val tagged_var = tag_with_type ctxt format mono type_enc pos T var
- in SOME (AAtom (ATerm (`I tptp_equal, [tagged_var, var]))) end
- else
- NONE
- fun do_formula pos (AQuant (q, xs, phi)) =
- let
- val phi = phi |> do_formula pos
- val universal = Option.map (q = AExists ? not) pos
- in
- AQuant (q, xs |> map (apsnd (fn NONE => NONE
- | SOME T => do_bound_type T)),
- (if q = AForall then mk_ahorn else fold_rev (mk_aconn AAnd))
- (map_filter
- (fn (_, NONE) => NONE
- | (s, SOME T) =>
- do_out_of_bound_type pos phi universal (s, T))
- xs)
- phi)
- end
- | do_formula pos (AConn conn) = aconn_map pos do_formula conn
- | do_formula pos (AAtom tm) = AAtom (do_term pos tm)
- in do_formula end
-
-(* Each fact is given a unique fact number to avoid name clashes (e.g., because
- of monomorphization). The TPTP explicitly forbids name clashes, and some of
- the remote provers might care. *)
-fun formula_line_for_fact ctxt polym_constrs format prefix encode freshen pos
- mono type_enc (j, {name, locality, kind, iformula, atomic_types}) =
- (prefix ^ (if freshen then string_of_int j ^ "_" else "") ^ encode name, kind,
- iformula
- |> formula_from_iformula ctxt polym_constrs format mono type_enc
- should_guard_var_in_formula (if pos then SOME true else NONE)
- |> close_formula_universally
- |> bound_tvars type_enc true atomic_types,
- NONE,
- case locality of
- Intro => isabelle_info format introN
- | Elim => isabelle_info format elimN
- | Simp => isabelle_info format simpN
- | _ => NONE)
- |> Formula
-
-fun formula_line_for_class_rel_clause format type_enc
- ({name, subclass, superclass, ...} : class_rel_clause) =
- let val ty_arg = ATerm (tvar_a_name, []) in
- Formula (class_rel_clause_prefix ^ ascii_of name, Axiom,
- AConn (AImplies,
- [type_class_formula type_enc subclass ty_arg,
- type_class_formula type_enc superclass ty_arg])
- |> mk_aquant AForall
- [(tvar_a_name, atype_of_type_vars type_enc)],
- isabelle_info format introN, NONE)
- end
-
-fun formula_from_arity_atom type_enc (class, t, args) =
- ATerm (t, map (fn arg => ATerm (arg, [])) args)
- |> type_class_formula type_enc class
-
-fun formula_line_for_arity_clause format type_enc
- ({name, prem_atoms, concl_atom} : arity_clause) =
- Formula (arity_clause_prefix ^ name, Axiom,
- mk_ahorn (map (formula_from_arity_atom type_enc) prem_atoms)
- (formula_from_arity_atom type_enc concl_atom)
- |> mk_aquant AForall
- (map (rpair (atype_of_type_vars type_enc)) (#3 concl_atom)),
- isabelle_info format introN, NONE)
-
-fun formula_line_for_conjecture ctxt polym_constrs format mono type_enc
- ({name, kind, iformula, atomic_types, ...} : translated_formula) =
- Formula (conjecture_prefix ^ name, kind,
- iformula
- |> formula_from_iformula ctxt polym_constrs format mono type_enc
- should_guard_var_in_formula (SOME false)
- |> close_formula_universally
- |> bound_tvars type_enc true atomic_types, NONE, NONE)
-
-fun formula_line_for_free_type j phi =
- Formula (tfree_clause_prefix ^ string_of_int j, Hypothesis, phi, NONE, NONE)
-fun formula_lines_for_free_types type_enc (facts : translated_formula list) =
- let
- val phis =
- fold (union (op =)) (map #atomic_types facts) []
- |> formulas_for_types type_enc add_sorts_on_tfree
- in map2 formula_line_for_free_type (0 upto length phis - 1) phis end
-
-(** Symbol declarations **)
-
-fun decl_line_for_class order s =
- let val name as (s, _) = `make_type_class s in
- Decl (sym_decl_prefix ^ s, name,
- if order = First_Order then
- ATyAbs ([tvar_a_name],
- if avoid_first_order_ghost_type_vars then
- AFun (a_itself_atype, bool_atype)
- else
- bool_atype)
- else
- AFun (atype_of_types, bool_atype))
- end
-
-fun decl_lines_for_classes type_enc classes =
- case type_enc of
- Simple_Types (order, Polymorphic, _) =>
- map (decl_line_for_class order) classes
- | _ => []
-
-fun sym_decl_table_for_facts ctxt format type_enc sym_tab (conjs, facts) =
- let
- fun add_iterm_syms tm =
- let val (head, args) = strip_iterm_comb tm in
- (case head of
- IConst ((s, s'), T, T_args) =>
- let
- val (pred_sym, in_conj) =
- case Symtab.lookup sym_tab s of
- SOME ({pred_sym, in_conj, ...} : sym_info) =>
- (pred_sym, in_conj)
- | NONE => (false, false)
- val decl_sym =
- (case type_enc of
- Guards _ => not pred_sym
- | _ => true) andalso
- is_tptp_user_symbol s
- in
- if decl_sym then
- Symtab.map_default (s, [])
- (insert_type ctxt #3 (s', T_args, T, pred_sym, length args,
- in_conj))
- else
- I
- end
- | IAbs (_, tm) => add_iterm_syms tm
- | _ => I)
- #> fold add_iterm_syms args
- end
- val add_fact_syms = K add_iterm_syms |> formula_fold NONE |> fact_lift
- fun add_formula_var_types (AQuant (_, xs, phi)) =
- fold (fn (_, SOME T) => insert_type ctxt I T | _ => I) xs
- #> add_formula_var_types phi
- | add_formula_var_types (AConn (_, phis)) =
- fold add_formula_var_types phis
- | add_formula_var_types _ = I
- fun var_types () =
- if polymorphism_of_type_enc type_enc = Polymorphic then [tvar_a]
- else fold (fact_lift add_formula_var_types) (conjs @ facts) []
- fun add_undefined_const T =
- let
- val (s, s') =
- `(make_fixed_const NONE) @{const_name undefined}
- |> (case type_arg_policy [] type_enc @{const_name undefined} of
- Mangled_Type_Args => mangled_const_name format type_enc [T]
- | _ => I)
- in
- Symtab.map_default (s, [])
- (insert_type ctxt #3 (s', [T], T, false, 0, false))
- end
- fun add_TYPE_const () =
- let val (s, s') = TYPE_name in
- Symtab.map_default (s, [])
- (insert_type ctxt #3
- (s', [tvar_a], @{typ "'a itself"}, false, 0, false))
- end
- in
- Symtab.empty
- |> is_type_enc_fairly_sound type_enc
- ? (fold (fold add_fact_syms) [conjs, facts]
- #> (case type_enc of
- Simple_Types (First_Order, Polymorphic, _) =>
- if avoid_first_order_ghost_type_vars then add_TYPE_const ()
- else I
- | Simple_Types _ => I
- | _ => fold add_undefined_const (var_types ())))
- end
-
-(* We add "bool" in case the helper "True_or_False" is included later. *)
-fun default_mono level =
- {maybe_finite_Ts = [@{typ bool}],
- surely_finite_Ts = [@{typ bool}],
- maybe_infinite_Ts = known_infinite_types,
- surely_infinite_Ts =
- case level of
- Noninf_Nonmono_Types (Sound, _) => []
- | _ => known_infinite_types,
- maybe_nonmono_Ts = [@{typ bool}]}
-
-(* This inference is described in section 2.3 of Claessen et al.'s "Sorting it
- out with monotonicity" paper presented at CADE 2011. *)
-fun add_iterm_mononotonicity_info _ _ (SOME false) _ mono = mono
- | add_iterm_mononotonicity_info ctxt level _
- (IApp (IApp (IConst ((s, _), Type (_, [T, _]), _), tm1), tm2))
- (mono as {maybe_finite_Ts, surely_finite_Ts, maybe_infinite_Ts,
- surely_infinite_Ts, maybe_nonmono_Ts}) =
- if is_tptp_equal s andalso exists is_maybe_universal_var [tm1, tm2] then
- case level of
- Noninf_Nonmono_Types (soundness, _) =>
- if exists (type_instance ctxt T) surely_infinite_Ts orelse
- member (type_equiv ctxt) maybe_finite_Ts T then
- mono
- else if is_type_kind_of_surely_infinite ctxt soundness
- surely_infinite_Ts T then
- {maybe_finite_Ts = maybe_finite_Ts,
- surely_finite_Ts = surely_finite_Ts,
- maybe_infinite_Ts = maybe_infinite_Ts,
- surely_infinite_Ts = surely_infinite_Ts |> insert_type ctxt I T,
- maybe_nonmono_Ts = maybe_nonmono_Ts}
- else
- {maybe_finite_Ts = maybe_finite_Ts |> insert (type_equiv ctxt) T,
- surely_finite_Ts = surely_finite_Ts,
- maybe_infinite_Ts = maybe_infinite_Ts,
- surely_infinite_Ts = surely_infinite_Ts,
- maybe_nonmono_Ts = maybe_nonmono_Ts |> insert_type ctxt I T}
- | Fin_Nonmono_Types _ =>
- if exists (type_instance ctxt T) surely_finite_Ts orelse
- member (type_equiv ctxt) maybe_infinite_Ts T then
- mono
- else if is_type_surely_finite ctxt T then
- {maybe_finite_Ts = maybe_finite_Ts,
- surely_finite_Ts = surely_finite_Ts |> insert_type ctxt I T,
- maybe_infinite_Ts = maybe_infinite_Ts,
- surely_infinite_Ts = surely_infinite_Ts,
- maybe_nonmono_Ts = maybe_nonmono_Ts |> insert_type ctxt I T}
- else
- {maybe_finite_Ts = maybe_finite_Ts,
- surely_finite_Ts = surely_finite_Ts,
- maybe_infinite_Ts = maybe_infinite_Ts |> insert (type_equiv ctxt) T,
- surely_infinite_Ts = surely_infinite_Ts,
- maybe_nonmono_Ts = maybe_nonmono_Ts}
- | _ => mono
- else
- mono
- | add_iterm_mononotonicity_info _ _ _ _ mono = mono
-fun add_fact_mononotonicity_info ctxt level
- ({kind, iformula, ...} : translated_formula) =
- formula_fold (SOME (kind <> Conjecture))
- (add_iterm_mononotonicity_info ctxt level) iformula
-fun mononotonicity_info_for_facts ctxt type_enc facts =
- let val level = level_of_type_enc type_enc in
- default_mono level
- |> is_type_level_monotonicity_based level
- ? fold (add_fact_mononotonicity_info ctxt level) facts
- end
-
-fun add_iformula_monotonic_types ctxt mono type_enc =
- let
- val level = level_of_type_enc type_enc
- val should_encode = should_encode_type ctxt mono level
- fun add_type T = not (should_encode T) ? insert_type ctxt I T
- fun add_args (IApp (tm1, tm2)) = add_args tm1 #> add_term tm2
- | add_args _ = I
- and add_term tm = add_type (ityp_of tm) #> add_args tm
- in formula_fold NONE (K add_term) end
-fun add_fact_monotonic_types ctxt mono type_enc =
- add_iformula_monotonic_types ctxt mono type_enc |> fact_lift
-fun monotonic_types_for_facts ctxt mono type_enc facts =
- let val level = level_of_type_enc type_enc in
- [] |> (polymorphism_of_type_enc type_enc = Polymorphic andalso
- is_type_level_monotonicity_based level andalso
- granularity_of_type_level level <> Ghost_Type_Arg_Vars)
- ? fold (add_fact_monotonic_types ctxt mono type_enc) facts
- end
-
-fun formula_line_for_guards_mono_type ctxt format mono type_enc T =
- Formula (guards_sym_formula_prefix ^
- ascii_of (mangled_type format type_enc T),
- Axiom,
- IConst (`make_bound_var "X", T, [])
- |> type_guard_iterm format type_enc T
- |> AAtom
- |> formula_from_iformula ctxt [] format mono type_enc
- always_guard_var_in_formula (SOME true)
- |> close_formula_universally
- |> bound_tvars type_enc true (atomic_types_of T),
- isabelle_info format introN, NONE)
-
-fun formula_line_for_tags_mono_type ctxt format mono type_enc T =
- let val x_var = ATerm (`make_bound_var "X", []) in
- Formula (tags_sym_formula_prefix ^
- ascii_of (mangled_type format type_enc T),
- Axiom,
- eq_formula type_enc (atomic_types_of T) false
- (tag_with_type ctxt format mono type_enc NONE T x_var) x_var,
- isabelle_info format simpN, NONE)
- end
-
-fun problem_lines_for_mono_types ctxt format mono type_enc Ts =
- case type_enc of
- Simple_Types _ => []
- | Guards _ =>
- map (formula_line_for_guards_mono_type ctxt format mono type_enc) Ts
- | Tags _ => map (formula_line_for_tags_mono_type ctxt format mono type_enc) Ts
-
-fun decl_line_for_sym ctxt format mono type_enc s
- (s', T_args, T, pred_sym, ary, _) =
- let
- val thy = Proof_Context.theory_of ctxt
- val (T, T_args) =
- if null T_args then
- (T, [])
- else case unprefix_and_unascii const_prefix s of
- SOME s' =>
- let
- val s' = s' |> invert_const
- val T = s' |> robust_const_type thy
- in (T, robust_const_typargs thy (s', T)) end
- | NONE => raise Fail "unexpected type arguments"
- in
- Decl (sym_decl_prefix ^ s, (s, s'),
- T |> fused_type ctxt mono (level_of_type_enc type_enc) ary
- |> ho_type_from_typ format type_enc pred_sym ary
- |> not (null T_args)
- ? curry ATyAbs (map (tvar_name o fst o dest_TVar) T_args))
- end
-
-fun formula_line_for_guards_sym_decl ctxt format conj_sym_kind mono type_enc n s
- j (s', T_args, T, _, ary, in_conj) =
- let
- val thy = Proof_Context.theory_of ctxt
- val (kind, maybe_negate) =
- if in_conj then (conj_sym_kind, conj_sym_kind = Conjecture ? mk_anot)
- else (Axiom, I)
- val (arg_Ts, res_T) = chop_fun ary T
- val bound_names = 1 upto ary |> map (`I o make_bound_var o string_of_int)
- val bounds =
- bound_names ~~ arg_Ts |> map (fn (name, T) => IConst (name, T, []))
- val bound_Ts =
- if exists (curry (op =) dummyT) T_args then
- case level_of_type_enc type_enc of
- All_Types => map SOME arg_Ts
- | level =>
- if granularity_of_type_level level = Ghost_Type_Arg_Vars then
- let val ghosts = ghost_type_args thy s ary in
- map2 (fn j => if member (op =) ghosts j then SOME else K NONE)
- (0 upto ary - 1) arg_Ts
- end
- else
- replicate ary NONE
- else
- replicate ary NONE
- in
- Formula (guards_sym_formula_prefix ^ s ^
- (if n > 1 then "_" ^ string_of_int j else ""), kind,
- IConst ((s, s'), T, T_args)
- |> fold (curry (IApp o swap)) bounds
- |> type_guard_iterm format type_enc res_T
- |> AAtom |> mk_aquant AForall (bound_names ~~ bound_Ts)
- |> formula_from_iformula ctxt [] format mono type_enc
- always_guard_var_in_formula (SOME true)
- |> close_formula_universally
- |> bound_tvars type_enc (n > 1) (atomic_types_of T)
- |> maybe_negate,
- isabelle_info format introN, NONE)
- end
-
-fun formula_lines_for_tags_sym_decl ctxt format conj_sym_kind mono type_enc n s
- (j, (s', T_args, T, pred_sym, ary, in_conj)) =
- let
- val thy = Proof_Context.theory_of ctxt
- val level = level_of_type_enc type_enc
- val grain = granularity_of_type_level level
- val ident_base =
- tags_sym_formula_prefix ^ s ^
- (if n > 1 then "_" ^ string_of_int j else "")
- val (kind, maybe_negate) =
- if in_conj then (conj_sym_kind, conj_sym_kind = Conjecture ? mk_anot)
- else (Axiom, I)
- val (arg_Ts, res_T) = chop_fun ary T
- val bound_names = 1 upto ary |> map (`I o make_bound_var o string_of_int)
- val bounds = bound_names |> map (fn name => ATerm (name, []))
- val cst = mk_aterm format type_enc (s, s') T_args
- val eq = maybe_negate oo eq_formula type_enc (atomic_types_of T) pred_sym
- val should_encode = should_encode_type ctxt mono level
- val tag_with = tag_with_type ctxt format mono type_enc NONE
- val add_formula_for_res =
- if should_encode res_T then
- let
- val tagged_bounds =
- if grain = Ghost_Type_Arg_Vars then
- let val ghosts = ghost_type_args thy s ary in
- map2 (fn (j, arg_T) => member (op =) ghosts j ? tag_with arg_T)
- (0 upto ary - 1 ~~ arg_Ts) bounds
- end
- else
- bounds
- in
- cons (Formula (ident_base ^ "_res", kind,
- eq (tag_with res_T (cst bounds)) (cst tagged_bounds),
- isabelle_info format simpN, NONE))
- end
- else
- I
- fun add_formula_for_arg k =
- let val arg_T = nth arg_Ts k in
- if should_encode arg_T then
- case chop k bounds of
- (bounds1, bound :: bounds2) =>
- cons (Formula (ident_base ^ "_arg" ^ string_of_int (k + 1), kind,
- eq (cst (bounds1 @ tag_with arg_T bound :: bounds2))
- (cst bounds),
- isabelle_info format simpN, NONE))
- | _ => raise Fail "expected nonempty tail"
- else
- I
- end
- in
- [] |> not pred_sym ? add_formula_for_res
- |> (Config.get ctxt type_tag_arguments andalso
- grain = Positively_Naked_Vars)
- ? fold add_formula_for_arg (ary - 1 downto 0)
- end
-
-fun result_type_of_decl (_, _, T, _, ary, _) = chop_fun ary T |> snd
-
-fun rationalize_decls ctxt (decls as decl :: (decls' as _ :: _)) =
- let
- val T = result_type_of_decl decl
- |> map_type_tvar (fn (z, _) => TVar (z, HOLogic.typeS))
- in
- if forall (type_generalization ctxt T o result_type_of_decl) decls' then
- [decl]
- else
- decls
- end
- | rationalize_decls _ decls = decls
-
-fun problem_lines_for_sym_decls ctxt format conj_sym_kind mono type_enc
- (s, decls) =
- case type_enc of
- Simple_Types _ => [decl_line_for_sym ctxt format mono type_enc s (hd decls)]
- | Guards (_, level) =>
- let
- val decls = decls |> rationalize_decls ctxt
- val n = length decls
- val decls =
- decls |> filter (should_encode_type ctxt mono level
- o result_type_of_decl)
- in
- (0 upto length decls - 1, decls)
- |-> map2 (formula_line_for_guards_sym_decl ctxt format conj_sym_kind mono
- type_enc n s)
- end
- | Tags (_, level) =>
- if granularity_of_type_level level = All_Vars then
- []
- else
- let val n = length decls in
- (0 upto n - 1 ~~ decls)
- |> maps (formula_lines_for_tags_sym_decl ctxt format conj_sym_kind mono
- type_enc n s)
- end
-
-fun problem_lines_for_sym_decl_table ctxt format conj_sym_kind mono type_enc
- mono_Ts sym_decl_tab =
- let
- val syms = sym_decl_tab |> Symtab.dest |> sort_wrt fst
- val mono_lines =
- problem_lines_for_mono_types ctxt format mono type_enc mono_Ts
- val decl_lines =
- fold_rev (append o problem_lines_for_sym_decls ctxt format conj_sym_kind
- mono type_enc)
- syms []
- in mono_lines @ decl_lines end
-
-fun needs_type_tag_idempotence ctxt (Tags (poly, level)) =
- Config.get ctxt type_tag_idempotence andalso
- is_type_level_monotonicity_based level andalso
- poly <> Mangled_Monomorphic
- | needs_type_tag_idempotence _ _ = false
-
-val implicit_declsN = "Should-be-implicit typings"
-val explicit_declsN = "Explicit typings"
-val factsN = "Relevant facts"
-val class_relsN = "Class relationships"
-val aritiesN = "Arities"
-val helpersN = "Helper facts"
-val conjsN = "Conjectures"
-val free_typesN = "Type variables"
-
-(* TFF allows implicit declarations of types, function symbols, and predicate
- symbols (with "$i" as the type of individuals), but some provers (e.g.,
- SNARK) require explicit declarations. The situation is similar for THF. *)
-
-fun default_type type_enc pred_sym s =
- let
- val ind =
- case type_enc of
- Simple_Types _ =>
- if String.isPrefix type_const_prefix s then atype_of_types
- else individual_atype
- | _ => individual_atype
- fun typ 0 = if pred_sym then bool_atype else ind
- | typ ary = AFun (ind, typ (ary - 1))
- in typ end
-
-fun nary_type_constr_type n =
- funpow n (curry AFun atype_of_types) atype_of_types
-
-fun undeclared_syms_in_problem type_enc problem =
- let
- val declared = declared_syms_in_problem problem
- fun do_sym name ty =
- if member (op =) declared name then I else AList.default (op =) (name, ty)
- fun do_type (AType (name as (s, _), tys)) =
- is_tptp_user_symbol s
- ? do_sym name (fn () => nary_type_constr_type (length tys))
- #> fold do_type tys
- | do_type (AFun (ty1, ty2)) = do_type ty1 #> do_type ty2
- | do_type (ATyAbs (_, ty)) = do_type ty
- fun do_term pred_sym (ATerm (name as (s, _), tms)) =
- is_tptp_user_symbol s
- ? do_sym name (fn _ => default_type type_enc pred_sym s (length tms))
- #> fold (do_term false) tms
- | do_term _ (AAbs ((_, ty), tm)) = do_type ty #> do_term false tm
- fun do_formula (AQuant (_, xs, phi)) =
- fold do_type (map_filter snd xs) #> do_formula phi
- | do_formula (AConn (_, phis)) = fold do_formula phis
- | do_formula (AAtom tm) = do_term true tm
- fun do_problem_line (Decl (_, _, ty)) = do_type ty
- | do_problem_line (Formula (_, _, phi, _, _)) = do_formula phi
- in
- fold (fold do_problem_line o snd) problem []
- |> filter_out (is_built_in_tptp_symbol o fst o fst)
- end
-
-fun declare_undeclared_syms_in_atp_problem type_enc problem =
- let
- val decls =
- problem
- |> undeclared_syms_in_problem type_enc
- |> sort_wrt (fst o fst)
- |> map (fn (x as (s, _), ty) => Decl (type_decl_prefix ^ s, x, ty ()))
- in (implicit_declsN, decls) :: problem end
-
-fun exists_subdtype P =
- let
- fun ex U = P U orelse
- (case U of Datatype.DtType (_, Us) => exists ex Us | _ => false)
- in ex end
-
-fun is_poly_constr (_, Us) =
- exists (exists_subdtype (fn Datatype.DtTFree _ => true | _ => false)) Us
-
-fun all_constrs_of_polymorphic_datatypes thy =
- Symtab.fold (snd
- #> #descr
- #> maps (snd #> #3)
- #> (fn cs => exists is_poly_constr cs ? append cs))
- (Datatype.get_all thy) []
- |> List.partition is_poly_constr
- |> pairself (map fst)
-
-(* Forcing explicit applications is expensive for polymorphic encodings, because
- it takes only one existential variable ranging over "'a => 'b" to ruin
- everything. Hence we do it only if there are few facts (is normally the case
- for "metis" and the minimizer. *)
-val explicit_apply_threshold = 50
-
-fun prepare_atp_problem ctxt format conj_sym_kind prem_kind type_enc exporter
- lam_trans readable_names preproc hyp_ts concl_t facts =
- let
- val thy = Proof_Context.theory_of ctxt
- val type_enc = type_enc |> adjust_type_enc format
- val explicit_apply =
- if polymorphism_of_type_enc type_enc <> Polymorphic orelse
- length facts <= explicit_apply_threshold then
- NONE
- else
- SOME false
- val lam_trans =
- if lam_trans = keep_lamsN andalso
- not (is_type_enc_higher_order type_enc) then
- error ("Lambda translation scheme incompatible with first-order \
- \encoding.")
- else
- lam_trans
- val (fact_names, classes, conjs, facts, class_rel_clauses, arity_clauses,
- lifted) =
- translate_formulas ctxt format prem_kind type_enc lam_trans preproc hyp_ts
- concl_t facts
- val sym_tab = sym_table_for_facts ctxt type_enc explicit_apply conjs facts
- val mono = conjs @ facts |> mononotonicity_info_for_facts ctxt type_enc
- val (polym_constrs, monom_constrs) =
- all_constrs_of_polymorphic_datatypes thy
- |>> map (make_fixed_const (SOME format))
- val firstorderize =
- firstorderize_fact thy monom_constrs format type_enc sym_tab
- val (conjs, facts) = (conjs, facts) |> pairself (map firstorderize)
- val sym_tab = sym_table_for_facts ctxt type_enc (SOME false) conjs facts
- val helpers =
- sym_tab |> helper_facts_for_sym_table ctxt format type_enc
- |> map firstorderize
- val mono_Ts =
- helpers @ conjs @ facts |> monotonic_types_for_facts ctxt mono type_enc
- val class_decl_lines = decl_lines_for_classes type_enc classes
- val sym_decl_lines =
- (conjs, helpers @ facts)
- |> sym_decl_table_for_facts ctxt format type_enc sym_tab
- |> problem_lines_for_sym_decl_table ctxt format conj_sym_kind mono
- type_enc mono_Ts
- val helper_lines =
- 0 upto length helpers - 1 ~~ helpers
- |> map (formula_line_for_fact ctxt polym_constrs format helper_prefix I
- false true mono type_enc)
- |> (if needs_type_tag_idempotence ctxt type_enc then
- cons (type_tag_idempotence_fact format type_enc)
- else
- I)
- (* Reordering these might confuse the proof reconstruction code or the SPASS
- FLOTTER hack. *)
- val problem =
- [(explicit_declsN, class_decl_lines @ sym_decl_lines),
- (factsN,
- map (formula_line_for_fact ctxt polym_constrs format fact_prefix
- ascii_of (not exporter) (not exporter) mono type_enc)
- (0 upto length facts - 1 ~~ facts)),
- (class_relsN,
- map (formula_line_for_class_rel_clause format type_enc)
- class_rel_clauses),
- (aritiesN,
- map (formula_line_for_arity_clause format type_enc) arity_clauses),
- (helpersN, helper_lines),
- (conjsN,
- map (formula_line_for_conjecture ctxt polym_constrs format mono
- type_enc) conjs),
- (free_typesN, formula_lines_for_free_types type_enc (facts @ conjs))]
- val problem =
- problem
- |> (case format of
- CNF => ensure_cnf_problem
- | CNF_UEQ => filter_cnf_ueq_problem
- | FOF => I
- | TFF (_, TPTP_Implicit) => I
- | THF (_, TPTP_Implicit, _) => I
- | _ => declare_undeclared_syms_in_atp_problem type_enc)
- val (problem, pool) = problem |> nice_atp_problem readable_names format
- fun add_sym_ary (s, {min_ary, ...} : sym_info) =
- min_ary > 0 ? Symtab.insert (op =) (s, min_ary)
- in
- (problem,
- case pool of SOME the_pool => snd the_pool | NONE => Symtab.empty,
- fact_names |> Vector.fromList,
- lifted,
- Symtab.empty |> Symtab.fold add_sym_ary sym_tab)
- end
-
-(* FUDGE *)
-val conj_weight = 0.0
-val hyp_weight = 0.1
-val fact_min_weight = 0.2
-val fact_max_weight = 1.0
-val type_info_default_weight = 0.8
-
-fun add_term_weights weight (ATerm (s, tms)) =
- is_tptp_user_symbol s ? Symtab.default (s, weight)
- #> fold (add_term_weights weight) tms
- | add_term_weights weight (AAbs (_, tm)) = add_term_weights weight tm
-fun add_problem_line_weights weight (Formula (_, _, phi, _, _)) =
- formula_fold NONE (K (add_term_weights weight)) phi
- | add_problem_line_weights _ _ = I
-
-fun add_conjectures_weights [] = I
- | add_conjectures_weights conjs =
- let val (hyps, conj) = split_last conjs in
- add_problem_line_weights conj_weight conj
- #> fold (add_problem_line_weights hyp_weight) hyps
- end
-
-fun add_facts_weights facts =
- let
- val num_facts = length facts
- fun weight_of j =
- fact_min_weight + (fact_max_weight - fact_min_weight) * Real.fromInt j
- / Real.fromInt num_facts
- in
- map weight_of (0 upto num_facts - 1) ~~ facts
- |> fold (uncurry add_problem_line_weights)
- end
-
-(* Weights are from 0.0 (most important) to 1.0 (least important). *)
-fun atp_problem_weights problem =
- let val get = these o AList.lookup (op =) problem in
- Symtab.empty
- |> add_conjectures_weights (get free_typesN @ get conjsN)
- |> add_facts_weights (get factsN)
- |> fold (fold (add_problem_line_weights type_info_default_weight) o get)
- [explicit_declsN, class_relsN, aritiesN]
- |> Symtab.dest
- |> sort (prod_ord Real.compare string_ord o pairself swap)
- end
-
-end;
--- a/src/HOL/Tools/ATP/atp_util.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/ATP/atp_util.ML Tue Feb 14 11:16:07 2012 +0100
@@ -31,6 +31,7 @@
val s_disj : term * term -> term
val s_imp : term * term -> term
val s_iff : term * term -> term
+ val close_form_prefix : string
val close_form : term -> term
val monomorphic_term : Type.tyenv -> term -> term
val eta_expand : typ list -> term -> int -> term
@@ -264,10 +265,13 @@
| s_iff (t1, @{const True}) = t1
| s_iff (t1, t2) = HOLogic.eq_const HOLogic.boolT $ t1 $ t2
+val close_form_prefix = "ATP.close_form."
+
fun close_form t =
fold (fn ((s, i), T) => fn t' =>
HOLogic.all_const T
- $ Abs (s, T, abstract_over (Var ((s, i), T), t')))
+ $ Abs (close_form_prefix ^ s, T,
+ abstract_over (Var ((s, i), T), t')))
(Term.add_vars t []) t
fun monomorphic_term subst =
--- a/src/HOL/Tools/ATP/scripts/spass Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/ATP/scripts/spass Tue Feb 14 11:16:07 2012 +0100
@@ -8,11 +8,11 @@
options=${@:1:$(($#-1))}
name=${@:$(($#)):1}
-"$SPASS_HOME/SPASS" -Flotter $name \
+"$SPASS_HOME/SPASS" -Flotter "$name" \
| sed 's/description({$/description({*/' \
| sed 's/set_ClauseFormulaRelation()\.//' \
> $name.cnf
cat $name.cnf
-"$SPASS_HOME/SPASS" $options $name.cnf \
+"$SPASS_HOME/SPASS" $options "$name.cnf" \
| sed 's/\(Formulae used in the proof :\).*/\1 N\/A/'
-rm -f $name.cnf
+rm -f "$name.cnf"
--- a/src/HOL/Tools/Function/size.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Function/size.ML Tue Feb 14 11:16:07 2012 +0100
@@ -41,7 +41,7 @@
| SOME t => t);
fun is_poly thy (Datatype.DtType (name, dts)) =
- (case Datatype.get_info thy name of
+ (case lookup_size thy name of
NONE => false
| SOME _ => exists (is_poly thy) dts)
| is_poly _ _ = true;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Metis/metis_generate.ML Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,256 @@
+(* Title: HOL/Tools/Metis/metis_generate.ML
+ Author: Jia Meng, Cambridge University Computer Laboratory and NICTA
+ Author: Kong W. Susanto, Cambridge University Computer Laboratory
+ Author: Lawrence C. Paulson, Cambridge University Computer Laboratory
+ Author: Jasmin Blanchette, TU Muenchen
+
+Translation of HOL to FOL for Metis.
+*)
+
+signature METIS_GENERATE =
+sig
+ type type_enc = ATP_Problem_Generate.type_enc
+
+ datatype isa_thm =
+ Isa_Reflexive_or_Trivial |
+ Isa_Lambda_Lifted |
+ Isa_Raw of thm
+
+ val metis_equal : string
+ val metis_predicator : string
+ val metis_app_op : string
+ val metis_systematic_type_tag : string
+ val metis_ad_hoc_type_tag : string
+ val metis_generated_var_prefix : string
+ val trace : bool Config.T
+ val verbose : bool Config.T
+ val trace_msg : Proof.context -> (unit -> string) -> unit
+ val verbose_warning : Proof.context -> string -> unit
+ val metis_name_table : ((string * int) * ((type_enc -> string) * bool)) list
+ val reveal_old_skolem_terms : (string * term) list -> term -> term
+ val reveal_lam_lifted : (string * term) list -> term -> term
+ val prepare_metis_problem :
+ Proof.context -> type_enc -> string -> thm list -> thm list
+ -> int Symtab.table * (Metis_Thm.thm * isa_thm) list
+ * ((string * term) list * (string * term) list)
+end
+
+structure Metis_Generate : METIS_GENERATE =
+struct
+
+open ATP_Problem
+open ATP_Problem_Generate
+
+val metis_equal = "="
+val metis_predicator = "{}"
+val metis_app_op = Metis_Name.toString Metis_Term.appName
+val metis_systematic_type_tag =
+ Metis_Name.toString Metis_Term.hasTypeFunctionName
+val metis_ad_hoc_type_tag = "**"
+val metis_generated_var_prefix = "_"
+
+val trace = Attrib.setup_config_bool @{binding metis_trace} (K false)
+val verbose = Attrib.setup_config_bool @{binding metis_verbose} (K true)
+
+fun trace_msg ctxt msg = if Config.get ctxt trace then tracing (msg ()) else ()
+fun verbose_warning ctxt msg =
+ if Config.get ctxt verbose then warning ("Metis: " ^ msg) else ()
+
+val metis_name_table =
+ [((tptp_equal, 2), (K metis_equal, false)),
+ ((tptp_old_equal, 2), (K metis_equal, false)),
+ ((prefixed_predicator_name, 1), (K metis_predicator, false)),
+ ((prefixed_app_op_name, 2), (K metis_app_op, false)),
+ ((prefixed_type_tag_name, 2),
+ (fn type_enc =>
+ if level_of_type_enc type_enc = All_Types then metis_systematic_type_tag
+ else metis_ad_hoc_type_tag, true))]
+
+fun old_skolem_const_name i j num_T_args =
+ old_skolem_const_prefix ^ Long_Name.separator ^
+ (space_implode Long_Name.separator (map string_of_int [i, j, num_T_args]))
+
+fun conceal_old_skolem_terms i old_skolems t =
+ if exists_Const (curry (op =) @{const_name Meson.skolem} o fst) t then
+ let
+ fun aux old_skolems
+ (t as (Const (@{const_name Meson.skolem}, Type (_, [_, T])) $ _)) =
+ let
+ val (old_skolems, s) =
+ if i = ~1 then
+ (old_skolems, @{const_name undefined})
+ else case AList.find (op aconv) old_skolems t of
+ s :: _ => (old_skolems, s)
+ | [] =>
+ let
+ val s = old_skolem_const_name i (length old_skolems)
+ (length (Term.add_tvarsT T []))
+ in ((s, t) :: old_skolems, s) end
+ in (old_skolems, Const (s, T)) end
+ | aux old_skolems (t1 $ t2) =
+ let
+ val (old_skolems, t1) = aux old_skolems t1
+ val (old_skolems, t2) = aux old_skolems t2
+ in (old_skolems, t1 $ t2) end
+ | aux old_skolems (Abs (s, T, t')) =
+ let val (old_skolems, t') = aux old_skolems t' in
+ (old_skolems, Abs (s, T, t'))
+ end
+ | aux old_skolems t = (old_skolems, t)
+ in aux old_skolems t end
+ else
+ (old_skolems, t)
+
+fun reveal_old_skolem_terms old_skolems =
+ map_aterms (fn t as Const (s, _) =>
+ if String.isPrefix old_skolem_const_prefix s then
+ AList.lookup (op =) old_skolems s |> the
+ |> map_types (map_type_tvar (K dummyT))
+ else
+ t
+ | t => t)
+
+fun reveal_lam_lifted lambdas =
+ map_aterms (fn t as Const (s, _) =>
+ if String.isPrefix lam_lifted_prefix s then
+ case AList.lookup (op =) lambdas s of
+ SOME t =>
+ Const (@{const_name Metis.lambda}, dummyT)
+ $ map_types (map_type_tvar (K dummyT))
+ (reveal_lam_lifted lambdas t)
+ | NONE => t
+ else
+ t
+ | t => t)
+
+
+(* ------------------------------------------------------------------------- *)
+(* Logic maps manage the interface between HOL and first-order logic. *)
+(* ------------------------------------------------------------------------- *)
+
+datatype isa_thm =
+ Isa_Reflexive_or_Trivial |
+ Isa_Lambda_Lifted |
+ Isa_Raw of thm
+
+val proxy_defs = map (fst o snd o snd) proxy_table
+val prepare_helper =
+ Meson.make_meta_clause #> rewrite_rule (map safe_mk_meta_eq proxy_defs)
+
+fun metis_term_from_atp type_enc (ATerm (s, tms)) =
+ if is_tptp_variable s then
+ Metis_Term.Var (Metis_Name.fromString s)
+ else
+ (case AList.lookup (op =) metis_name_table (s, length tms) of
+ SOME (f, swap) => (f type_enc, swap)
+ | NONE => (s, false))
+ |> (fn (s, swap) =>
+ Metis_Term.Fn (Metis_Name.fromString s,
+ tms |> map (metis_term_from_atp type_enc)
+ |> swap ? rev))
+fun metis_atom_from_atp type_enc (AAtom tm) =
+ (case metis_term_from_atp type_enc tm of
+ Metis_Term.Fn x => x
+ | _ => raise Fail "non CNF -- expected function")
+ | metis_atom_from_atp _ _ = raise Fail "not CNF -- expected atom"
+fun metis_literal_from_atp type_enc (AConn (ANot, [phi])) =
+ (false, metis_atom_from_atp type_enc phi)
+ | metis_literal_from_atp type_enc phi =
+ (true, metis_atom_from_atp type_enc phi)
+fun metis_literals_from_atp type_enc (AConn (AOr, phis)) =
+ maps (metis_literals_from_atp type_enc) phis
+ | metis_literals_from_atp type_enc phi = [metis_literal_from_atp type_enc phi]
+fun metis_axiom_from_atp type_enc clauses (Formula (ident, _, phi, _, _)) =
+ let
+ fun some isa =
+ SOME (phi |> metis_literals_from_atp type_enc
+ |> Metis_LiteralSet.fromList
+ |> Metis_Thm.axiom, isa)
+ in
+ if ident = type_tag_idempotence_helper_name orelse
+ String.isPrefix tags_sym_formula_prefix ident then
+ Isa_Reflexive_or_Trivial |> some
+ else if String.isPrefix conjecture_prefix ident then
+ NONE
+ else if String.isPrefix helper_prefix ident then
+ case (String.isSuffix typed_helper_suffix ident,
+ space_explode "_" ident) of
+ (needs_fairly_sound, _ :: const :: j :: _) =>
+ nth ((const, needs_fairly_sound)
+ |> AList.lookup (op =) helper_table |> the)
+ (the (Int.fromString j) - 1)
+ |> prepare_helper
+ |> Isa_Raw |> some
+ | _ => raise Fail ("malformed helper identifier " ^ quote ident)
+ else case try (unprefix fact_prefix) ident of
+ SOME s =>
+ let val s = s |> space_explode "_" |> tl |> space_implode "_"
+ in
+ case Int.fromString s of
+ SOME j =>
+ Meson.make_meta_clause (snd (nth clauses j)) |> Isa_Raw |> some
+ | NONE =>
+ if String.isPrefix lam_fact_prefix (unascii_of s) then
+ Isa_Lambda_Lifted |> some
+ else
+ raise Fail ("malformed fact identifier " ^ quote ident)
+ end
+ | NONE => TrueI |> Isa_Raw |> some
+ end
+ | metis_axiom_from_atp _ _ _ = raise Fail "not CNF -- expected formula"
+
+fun eliminate_lam_wrappers (Const (@{const_name Metis.lambda}, _) $ t) =
+ eliminate_lam_wrappers t
+ | eliminate_lam_wrappers (t $ u) =
+ eliminate_lam_wrappers t $ eliminate_lam_wrappers u
+ | eliminate_lam_wrappers (Abs (s, T, t)) =
+ Abs (s, T, eliminate_lam_wrappers t)
+ | eliminate_lam_wrappers t = t
+
+(* Function to generate metis clauses, including comb and type clauses *)
+fun prepare_metis_problem ctxt type_enc lam_trans conj_clauses fact_clauses =
+ let
+ val (conj_clauses, fact_clauses) =
+ if polymorphism_of_type_enc type_enc = Polymorphic then
+ (conj_clauses, fact_clauses)
+ else
+ conj_clauses @ fact_clauses
+ |> map (pair 0)
+ |> rpair (ctxt |> Config.put Monomorph.keep_partial_instances false)
+ |-> Monomorph.monomorph atp_schematic_consts_of
+ |> fst |> chop (length conj_clauses)
+ |> pairself (maps (map (zero_var_indexes o snd)))
+ val num_conjs = length conj_clauses
+ val clauses =
+ map2 (fn j => pair (Int.toString j, (Local, General)))
+ (0 upto num_conjs - 1) conj_clauses @
+ map2 (fn j => pair (Int.toString (num_conjs + j), (Local, General)))
+ (0 upto length fact_clauses - 1) fact_clauses
+ val (old_skolems, props) =
+ fold_rev (fn (name, th) => fn (old_skolems, props) =>
+ th |> prop_of |> Logic.strip_imp_concl
+ |> conceal_old_skolem_terms (length clauses) old_skolems
+ ||> (lam_trans = liftingN orelse lam_trans = lam_liftingN)
+ ? eliminate_lam_wrappers
+ ||> (fn prop => (name, prop) :: props))
+ clauses ([], [])
+ (*
+ val _ =
+ tracing ("PROPS:\n" ^
+ cat_lines (map (Syntax.string_of_term ctxt o snd) props))
+ *)
+ val lam_trans = if lam_trans = combsN then no_lamsN else lam_trans
+ val (atp_problem, _, _, lifted, sym_tab) =
+ prepare_atp_problem ctxt CNF Hypothesis Axiom type_enc false lam_trans
+ false false false [] @{prop False} props
+ (*
+ val _ = tracing ("ATP PROBLEM: " ^
+ cat_lines (lines_for_atp_problem CNF atp_problem))
+ *)
+ (* "rev" is for compatibility with existing proof scripts. *)
+ val axioms =
+ atp_problem
+ |> maps (map_filter (metis_axiom_from_atp type_enc clauses) o snd) |> rev
+ in (sym_tab, axioms, (lifted, old_skolems)) end
+
+end;
--- a/src/HOL/Tools/Metis/metis_reconstruct.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Metis/metis_reconstruct.ML Tue Feb 14 11:16:07 2012 +0100
@@ -9,7 +9,7 @@
signature METIS_RECONSTRUCT =
sig
- type type_enc = ATP_Translate.type_enc
+ type type_enc = ATP_Problem_Generate.type_enc
exception METIS of string * string
@@ -30,9 +30,9 @@
struct
open ATP_Problem
-open ATP_Translate
-open ATP_Reconstruct
-open Metis_Translate
+open ATP_Problem_Generate
+open ATP_Proof_Reconstruct
+open Metis_Generate
exception METIS of string * string
@@ -101,7 +101,7 @@
(* INFERENCE RULE: AXIOM *)
(* This causes variables to have an index of 1 by default. See also
- "term_from_atp" in "ATP_Reconstruct". *)
+ "term_from_atp" in "ATP_Proof_Reconstruct". *)
val axiom_inference = Thm.incr_indexes 1 oo lookth
(* INFERENCE RULE: ASSUME *)
@@ -337,7 +337,7 @@
let
val s = s |> Metis_Name.toString
|> perhaps (try (unprefix_and_unascii const_prefix
- #> the #> unmangled_const_name))
+ #> the #> unmangled_const_name #> hd))
in
if s = metis_predicator orelse s = predicator_name orelse
s = metis_systematic_type_tag orelse s = metis_ad_hoc_type_tag
--- a/src/HOL/Tools/Metis/metis_tactic.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Metis/metis_tactic.ML Tue Feb 14 11:16:07 2012 +0100
@@ -23,9 +23,9 @@
structure Metis_Tactic : METIS_TACTIC =
struct
-open ATP_Translate
-open ATP_Reconstruct
-open Metis_Translate
+open ATP_Problem_Generate
+open ATP_Proof_Reconstruct
+open Metis_Generate
open Metis_Reconstruct
val new_skolemizer =
@@ -125,13 +125,15 @@
let val thy = Proof_Context.theory_of ctxt
val new_skolemizer =
Config.get ctxt new_skolemizer orelse null (Meson.choice_theorems thy)
- val do_lams = lam_trans = lam_liftingN ? introduce_lam_wrappers ctxt
+ val do_lams =
+ (lam_trans = liftingN orelse lam_trans = lam_liftingN)
+ ? introduce_lam_wrappers ctxt
val th_cls_pairs =
map2 (fn j => fn th =>
(Thm.get_name_hint th,
th |> Drule.eta_contraction_rule
|> Meson_Clausify.cnf_axiom ctxt new_skolemizer
- (lam_trans = combinatorsN) j
+ (lam_trans = combsN) j
||> map do_lams))
(0 upto length ths0 - 1) ths0
val ths = maps (snd o snd) th_cls_pairs
@@ -140,7 +142,7 @@
val _ = trace_msg ctxt (fn () => "FOL_SOLVE: CONJECTURE CLAUSES")
val _ = app (fn th => trace_msg ctxt (fn () => Display.string_of_thm ctxt th)) cls
val _ = trace_msg ctxt (fn () => "type_enc = " ^ type_enc)
- val type_enc = type_enc_from_string Sound type_enc
+ val type_enc = type_enc_from_string Strict type_enc
val (sym_tab, axioms, concealed) =
prepare_metis_problem ctxt type_enc lam_trans cls ths
fun get_isa_thm mth Isa_Reflexive_or_Trivial =
@@ -247,7 +249,7 @@
else
();
Meson.MESON (preskolem_tac ctxt)
- (maps (neg_clausify ctxt (lam_trans = combinatorsN))) tac ctxt i st0
+ (maps (neg_clausify ctxt (lam_trans = combsN))) tac ctxt i st0
end
fun metis_tac [] = generic_metis_tac partial_type_encs
@@ -277,7 +279,7 @@
(schem_facts @ ths))
end
-val metis_lam_transs = [hide_lamsN, lam_liftingN, combinatorsN]
+val metis_lam_transs = [hide_lamsN, liftingN, combsN]
fun set_opt _ x NONE = SOME x
| set_opt get x (SOME x0) =
--- a/src/HOL/Tools/Metis/metis_translate.ML Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,256 +0,0 @@
-(* Title: HOL/Tools/Metis/metis_translate.ML
- Author: Jia Meng, Cambridge University Computer Laboratory and NICTA
- Author: Kong W. Susanto, Cambridge University Computer Laboratory
- Author: Lawrence C. Paulson, Cambridge University Computer Laboratory
- Author: Jasmin Blanchette, TU Muenchen
-
-Translation of HOL to FOL for Metis.
-*)
-
-signature METIS_TRANSLATE =
-sig
- type type_enc = ATP_Translate.type_enc
-
- datatype isa_thm =
- Isa_Reflexive_or_Trivial |
- Isa_Lambda_Lifted |
- Isa_Raw of thm
-
- val metis_equal : string
- val metis_predicator : string
- val metis_app_op : string
- val metis_systematic_type_tag : string
- val metis_ad_hoc_type_tag : string
- val metis_generated_var_prefix : string
- val trace : bool Config.T
- val verbose : bool Config.T
- val trace_msg : Proof.context -> (unit -> string) -> unit
- val verbose_warning : Proof.context -> string -> unit
- val metis_name_table : ((string * int) * ((type_enc -> string) * bool)) list
- val reveal_old_skolem_terms : (string * term) list -> term -> term
- val reveal_lam_lifted : (string * term) list -> term -> term
- val prepare_metis_problem :
- Proof.context -> type_enc -> string -> thm list -> thm list
- -> int Symtab.table * (Metis_Thm.thm * isa_thm) list
- * ((string * term) list * (string * term) list)
-end
-
-structure Metis_Translate : METIS_TRANSLATE =
-struct
-
-open ATP_Problem
-open ATP_Translate
-
-val metis_equal = "="
-val metis_predicator = "{}"
-val metis_app_op = Metis_Name.toString Metis_Term.appName
-val metis_systematic_type_tag =
- Metis_Name.toString Metis_Term.hasTypeFunctionName
-val metis_ad_hoc_type_tag = "**"
-val metis_generated_var_prefix = "_"
-
-val trace = Attrib.setup_config_bool @{binding metis_trace} (K false)
-val verbose = Attrib.setup_config_bool @{binding metis_verbose} (K true)
-
-fun trace_msg ctxt msg = if Config.get ctxt trace then tracing (msg ()) else ()
-fun verbose_warning ctxt msg =
- if Config.get ctxt verbose then warning ("Metis: " ^ msg) else ()
-
-val metis_name_table =
- [((tptp_equal, 2), (K metis_equal, false)),
- ((tptp_old_equal, 2), (K metis_equal, false)),
- ((prefixed_predicator_name, 1), (K metis_predicator, false)),
- ((prefixed_app_op_name, 2), (K metis_app_op, false)),
- ((prefixed_type_tag_name, 2),
- (fn type_enc =>
- if level_of_type_enc type_enc = All_Types then metis_systematic_type_tag
- else metis_ad_hoc_type_tag, true))]
-
-fun old_skolem_const_name i j num_T_args =
- old_skolem_const_prefix ^ Long_Name.separator ^
- (space_implode Long_Name.separator (map string_of_int [i, j, num_T_args]))
-
-fun conceal_old_skolem_terms i old_skolems t =
- if exists_Const (curry (op =) @{const_name Meson.skolem} o fst) t then
- let
- fun aux old_skolems
- (t as (Const (@{const_name Meson.skolem}, Type (_, [_, T])) $ _)) =
- let
- val (old_skolems, s) =
- if i = ~1 then
- (old_skolems, @{const_name undefined})
- else case AList.find (op aconv) old_skolems t of
- s :: _ => (old_skolems, s)
- | [] =>
- let
- val s = old_skolem_const_name i (length old_skolems)
- (length (Term.add_tvarsT T []))
- in ((s, t) :: old_skolems, s) end
- in (old_skolems, Const (s, T)) end
- | aux old_skolems (t1 $ t2) =
- let
- val (old_skolems, t1) = aux old_skolems t1
- val (old_skolems, t2) = aux old_skolems t2
- in (old_skolems, t1 $ t2) end
- | aux old_skolems (Abs (s, T, t')) =
- let val (old_skolems, t') = aux old_skolems t' in
- (old_skolems, Abs (s, T, t'))
- end
- | aux old_skolems t = (old_skolems, t)
- in aux old_skolems t end
- else
- (old_skolems, t)
-
-fun reveal_old_skolem_terms old_skolems =
- map_aterms (fn t as Const (s, _) =>
- if String.isPrefix old_skolem_const_prefix s then
- AList.lookup (op =) old_skolems s |> the
- |> map_types (map_type_tvar (K dummyT))
- else
- t
- | t => t)
-
-fun reveal_lam_lifted lambdas =
- map_aterms (fn t as Const (s, _) =>
- if String.isPrefix lam_lifted_prefix s then
- case AList.lookup (op =) lambdas s of
- SOME t =>
- Const (@{const_name Metis.lambda}, dummyT)
- $ map_types (map_type_tvar (K dummyT))
- (reveal_lam_lifted lambdas t)
- | NONE => t
- else
- t
- | t => t)
-
-
-(* ------------------------------------------------------------------------- *)
-(* Logic maps manage the interface between HOL and first-order logic. *)
-(* ------------------------------------------------------------------------- *)
-
-datatype isa_thm =
- Isa_Reflexive_or_Trivial |
- Isa_Lambda_Lifted |
- Isa_Raw of thm
-
-val proxy_defs = map (fst o snd o snd) proxy_table
-val prepare_helper =
- Meson.make_meta_clause #> rewrite_rule (map safe_mk_meta_eq proxy_defs)
-
-fun metis_term_from_atp type_enc (ATerm (s, tms)) =
- if is_tptp_variable s then
- Metis_Term.Var (Metis_Name.fromString s)
- else
- (case AList.lookup (op =) metis_name_table (s, length tms) of
- SOME (f, swap) => (f type_enc, swap)
- | NONE => (s, false))
- |> (fn (s, swap) =>
- Metis_Term.Fn (Metis_Name.fromString s,
- tms |> map (metis_term_from_atp type_enc)
- |> swap ? rev))
-fun metis_atom_from_atp type_enc (AAtom tm) =
- (case metis_term_from_atp type_enc tm of
- Metis_Term.Fn x => x
- | _ => raise Fail "non CNF -- expected function")
- | metis_atom_from_atp _ _ = raise Fail "not CNF -- expected atom"
-fun metis_literal_from_atp type_enc (AConn (ANot, [phi])) =
- (false, metis_atom_from_atp type_enc phi)
- | metis_literal_from_atp type_enc phi =
- (true, metis_atom_from_atp type_enc phi)
-fun metis_literals_from_atp type_enc (AConn (AOr, phis)) =
- maps (metis_literals_from_atp type_enc) phis
- | metis_literals_from_atp type_enc phi = [metis_literal_from_atp type_enc phi]
-fun metis_axiom_from_atp type_enc clauses (Formula (ident, _, phi, _, _)) =
- let
- fun some isa =
- SOME (phi |> metis_literals_from_atp type_enc
- |> Metis_LiteralSet.fromList
- |> Metis_Thm.axiom, isa)
- in
- if ident = type_tag_idempotence_helper_name orelse
- String.isPrefix tags_sym_formula_prefix ident then
- Isa_Reflexive_or_Trivial |> some
- else if String.isPrefix conjecture_prefix ident then
- NONE
- else if String.isPrefix helper_prefix ident then
- case (String.isSuffix typed_helper_suffix ident,
- space_explode "_" ident) of
- (needs_fairly_sound, _ :: const :: j :: _) =>
- nth ((const, needs_fairly_sound)
- |> AList.lookup (op =) helper_table |> the)
- (the (Int.fromString j) - 1)
- |> prepare_helper
- |> Isa_Raw |> some
- | _ => raise Fail ("malformed helper identifier " ^ quote ident)
- else case try (unprefix fact_prefix) ident of
- SOME s =>
- let val s = s |> space_explode "_" |> tl |> space_implode "_"
- in
- case Int.fromString s of
- SOME j =>
- Meson.make_meta_clause (snd (nth clauses j)) |> Isa_Raw |> some
- | NONE =>
- if String.isPrefix lam_fact_prefix (unascii_of s) then
- Isa_Lambda_Lifted |> some
- else
- raise Fail ("malformed fact identifier " ^ quote ident)
- end
- | NONE => TrueI |> Isa_Raw |> some
- end
- | metis_axiom_from_atp _ _ _ = raise Fail "not CNF -- expected formula"
-
-fun eliminate_lam_wrappers (Const (@{const_name Metis.lambda}, _) $ t) =
- eliminate_lam_wrappers t
- | eliminate_lam_wrappers (t $ u) =
- eliminate_lam_wrappers t $ eliminate_lam_wrappers u
- | eliminate_lam_wrappers (Abs (s, T, t)) =
- Abs (s, T, eliminate_lam_wrappers t)
- | eliminate_lam_wrappers t = t
-
-(* Function to generate metis clauses, including comb and type clauses *)
-fun prepare_metis_problem ctxt type_enc lam_trans conj_clauses fact_clauses =
- let
- val (conj_clauses, fact_clauses) =
- if polymorphism_of_type_enc type_enc = Polymorphic then
- (conj_clauses, fact_clauses)
- else
- conj_clauses @ fact_clauses
- |> map (pair 0)
- |> rpair (ctxt |> Config.put Monomorph.keep_partial_instances false)
- |-> Monomorph.monomorph atp_schematic_consts_of
- |> fst |> chop (length conj_clauses)
- |> pairself (maps (map (zero_var_indexes o snd)))
- val num_conjs = length conj_clauses
- val clauses =
- map2 (fn j => pair (Int.toString j, Local))
- (0 upto num_conjs - 1) conj_clauses @
- (* "General" below isn't quite correct; the fact could be local. *)
- map2 (fn j => pair (Int.toString (num_conjs + j), General))
- (0 upto length fact_clauses - 1) fact_clauses
- val (old_skolems, props) =
- fold_rev (fn (name, th) => fn (old_skolems, props) =>
- th |> prop_of |> Logic.strip_imp_concl
- |> conceal_old_skolem_terms (length clauses) old_skolems
- ||> lam_trans = lam_liftingN ? eliminate_lam_wrappers
- ||> (fn prop => (name, prop) :: props))
- clauses ([], [])
- (*
- val _ =
- tracing ("PROPS:\n" ^
- cat_lines (map (Syntax.string_of_term ctxt o snd) props))
- *)
- val lam_trans = if lam_trans = combinatorsN then no_lamsN else lam_trans
- val (atp_problem, _, _, lifted, sym_tab) =
- prepare_atp_problem ctxt CNF Hypothesis Axiom type_enc false lam_trans
- false false [] @{prop False} props
- (*
- val _ = tracing ("ATP PROBLEM: " ^
- cat_lines (lines_for_atp_problem CNF atp_problem))
- *)
- (* "rev" is for compatibility with existing proof scripts. *)
- val axioms =
- atp_problem
- |> maps (map_filter (metis_axiom_from_atp type_enc clauses) o snd) |> rev
- in (sym_tab, axioms, (lifted, old_skolems)) end
-
-end;
--- a/src/HOL/Tools/Nitpick/etc/settings Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-# -*- shell-script -*- :mode=shellscript:
-
-ISABELLE_TOOLS="$ISABELLE_TOOLS:$COMPONENT/lib/Tools"
--- a/src/HOL/Tools/Nitpick/lib/Tools/nitrox Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-#
-# Author: Jasmin Blanchette
-#
-# DESCRIPTION: TPTP FOF version of Nitpick
-
-
-PRG="$(basename "$0")"
-
-function usage() {
- echo
- echo "Usage: isabelle $PRG FILES"
- echo
- echo " Runs Nitrox on a FOF or CNF TPTP problem."
- echo
- exit 1
-}
-
-[ "$#" -eq 0 -o "$1" = "-?" ] && usage
-
-SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
-
-for FILE in "$@"
-do
- echo "theory $SCRATCH imports \"Nitpick\" begin ML {* Nitrox.pick_nits_in_fof_file \"$FILE\" *} end;" \
- > /tmp/$SCRATCH.thy
- "$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
-done
--- a/src/HOL/Tools/Nitpick/nitpick_hol.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML Tue Feb 14 11:16:07 2012 +0100
@@ -1012,7 +1012,7 @@
handle TYPE ("Nitpick_HOL.card_of_type", _, _) =>
default_card)
-(* Similar to "ATP_Translate.tiny_card_of_type". *)
+(* Similar to "ATP_Util.tiny_card_of_type". *)
fun bounded_exact_card_of_type hol_ctxt finitizable_dataTs max default_card
assigns T =
let
--- a/src/HOL/Tools/Nitpick/nitrox.ML Thu Feb 09 19:34:23 2012 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,140 +0,0 @@
-(* Title: HOL/Tools/Nitpick/nitrox.ML
- Author: Jasmin Blanchette, TU Muenchen
- Copyright 2010, 2011
-
-Finite model generation for TPTP first-order formulas (FOF and CNF) via Nitpick.
-*)
-
-signature NITROX =
-sig
- val pick_nits_in_fof_file : string -> string
-end;
-
-structure Nitrox : NITROX =
-struct
-
-open ATP_Util
-open ATP_Problem
-open ATP_Proof
-open Nitpick_Util
-open Nitpick
-open Nitpick_Isar
-
-exception SYNTAX of string
-
-val tptp_explode = raw_explode o strip_spaces_except_between_idents
-
-fun parse_file_path (c :: ss) =
- if c = "'" orelse c = "\"" then
- ss |> chop_while (curry (op <>) c) |>> implode ||> tl
- else
- raise SYNTAX "invalid file path"
- | parse_file_path [] = raise SYNTAX "invalid file path"
-
-fun parse_include x =
- let
- val (file_name, rest) =
- (Scan.this_string "include" |-- $$ "(" |-- parse_file_path --| $$ ")"
- --| $$ ".") x
- val path = file_name |> Path.explode
- val path =
- path |> not (Path.is_absolute path) ? Path.append (Path.explode "$TPTP")
- in ((), (path |> File.read |> tptp_explode) @ rest) end
-
-val parse_fof_or_cnf =
- (Scan.this_string "fof" || Scan.this_string "cnf") |-- $$ "(" |--
- Scan.many (not_equal ",") |-- $$ "," |--
- (Scan.this_string "axiom" || Scan.this_string "definition"
- || Scan.this_string "theorem" || Scan.this_string "lemma"
- || Scan.this_string "hypothesis" || Scan.this_string "conjecture"
- || Scan.this_string "negated_conjecture") --| $$ "," -- parse_formula
- --| $$ ")" --| $$ "."
- >> (fn ("conjecture", phi) => AConn (ANot, [phi]) | (_, phi) => phi)
-
-val parse_problem =
- Scan.repeat parse_include
- |-- Scan.repeat (parse_fof_or_cnf --| Scan.repeat parse_include)
-
-val parse_tptp_problem =
- Scan.finite Symbol.stopper
- (Scan.error (!! (fn _ => raise SYNTAX "malformed TPTP input")
- parse_problem))
- o tptp_explode
-
-val iota_T = @{typ iota}
-val quant_T = (iota_T --> bool_T) --> bool_T
-
-fun is_variable s = Char.isUpper (String.sub (s, 0))
-
-fun hol_term_from_fo_term res_T (ATerm (x, us)) =
- let val ts = map (hol_term_from_fo_term iota_T) us in
- list_comb ((case x of
- "$true" => @{const_name True}
- | "$false" => @{const_name False}
- | "=" => @{const_name HOL.eq}
- | "equal" => @{const_name HOL.eq}
- | _ => x, map fastype_of ts ---> res_T)
- |> (if is_variable x then Free else Const), ts)
- end
-
-fun hol_prop_from_formula phi =
- case phi of
- AQuant (_, [], phi') => hol_prop_from_formula phi'
- | AQuant (q, (x, _) :: xs, phi') =>
- Const (case q of AForall => @{const_name All} | AExists => @{const_name Ex},
- quant_T)
- $ lambda (Free (x, iota_T)) (hol_prop_from_formula (AQuant (q, xs, phi')))
- | AConn (ANot, [u']) => HOLogic.mk_not (hol_prop_from_formula u')
- | AConn (c, [u1, u2]) =>
- pairself hol_prop_from_formula (u1, u2)
- |> (case c of
- AAnd => HOLogic.mk_conj
- | AOr => HOLogic.mk_disj
- | AImplies => HOLogic.mk_imp
- | AIff => HOLogic.mk_eq
- | ANot => raise Fail "binary \"ANot\"")
- | AConn _ => raise Fail "malformed AConn"
- | AAtom u => hol_term_from_fo_term bool_T u
-
-fun mk_all x t = Const (@{const_name All}, quant_T) $ lambda x t
-
-fun close_hol_prop t = fold (mk_all o Free) (Term.add_frees t []) t
-
-fun pick_nits_in_fof_file file_name =
- case parse_tptp_problem (File.read (Path.explode file_name)) of
- (_, s :: ss) => raise SYNTAX ("cannot parse " ^ quote (implode (s :: ss)))
- | (phis, []) =>
- let
- val ts = map (HOLogic.mk_Trueprop o close_hol_prop
- o hol_prop_from_formula) phis
-(*
- val _ = warning (PolyML.makestring phis)
- val _ = app (warning o Syntax.string_of_term @{context}) ts
-*)
- val state = Proof.init @{context}
- val params =
- [("card iota", "1\<emdash>100"),
- ("card", "1\<emdash>8"),
- ("box", "false"),
- ("sat_solver", "smart"),
- ("max_threads", "1"),
- ("batch_size", "10"),
- (* ("debug", "true"), *)
- ("verbose", "true"),
- (* ("overlord", "true"), *)
- ("show_consts", "true"),
- ("format", "1000"),
- ("max_potential", "0"),
- ("timeout", "none"),
- ("expect", genuineN)]
- |> default_params @{theory}
- val i = 1
- val n = 1
- val step = 0
- val subst = []
- in
- pick_nits_in_term state params Normal i n step subst ts @{prop False}
- |> fst
- end
-
-end;
--- a/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs Tue Feb 14 11:16:07 2012 +0100
@@ -1,6 +1,6 @@
module Narrowing_Engine where {
-import Monad;
+import Control.Monad;
import Control.Exception;
import System.IO;
import System.Exit;
--- a/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs Tue Feb 14 11:16:07 2012 +0100
@@ -3,11 +3,12 @@
-}
module Narrowing_Engine where
-import Monad
+import Control.Monad
import Control.Exception
+import System.IO
import System.Exit
-import Maybe
-import List (partition, findIndex)
+import Data.Maybe
+import Data.List (partition, findIndex)
import qualified Generated_Code
@@ -35,7 +36,7 @@
termListOf' i [] = []
termListOf' i (e : es) =
let
- (ts, rs) = List.partition (\e -> head (posOf e) == i) (e : es)
+ (ts, rs) = Data.List.partition (\e -> head (posOf e) == i) (e : es)
t = termOf (pos ++ [i]) (map tailPosEdge ts)
in
(t : termListOf' (i + 1) rs)
--- a/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Tue Feb 14 11:16:07 2012 +0100
@@ -68,7 +68,7 @@
fun mk_none_continuation (x, y) =
let
- val (T as Type(@{type_name "option"}, [T'])) = fastype_of x
+ val (T as Type(@{type_name "option"}, _)) = fastype_of x
in
Const (@{const_name "Quickcheck_Exhaustive.orelse"}, T --> T --> T) $ x $ y
end
@@ -131,12 +131,11 @@
(T, fn t => nth functerms k $ absdummy T t $ size_pred)
end
-fun gen_mk_consexpr test_function functerms simpleT (c, xs) =
+fun gen_mk_consexpr test_function simpleT (c, xs) =
let
val (Ts, fns) = split_list xs
val constr = Const (c, Ts ---> simpleT)
val bounds = map Bound (((length xs) - 1) downto 0)
- val term_bounds = map (fn x => Bound (2 * x)) (((length xs) - 1) downto 0)
val start_term = test_function simpleT $ list_comb (constr, bounds)
in fold_rev (fn f => fn t => f t) fns start_term end
@@ -147,7 +146,7 @@
Const (@{const_name "Quickcheck_Exhaustive.fast_exhaustive_class.fast_exhaustive"},
fast_exhaustiveT T))
val mk_aux_call = gen_mk_aux_call functerms
- val mk_consexpr = gen_mk_consexpr test_function functerms
+ val mk_consexpr = gen_mk_consexpr test_function
fun mk_rhs exprs = @{term "If :: bool => unit => unit => unit"}
$ size_ge_zero $ (foldr1 mk_unit_let exprs) $ @{term "()"}
in
@@ -160,7 +159,7 @@
val mk_call = gen_mk_call (fn T =>
Const (@{const_name "Quickcheck_Exhaustive.exhaustive_class.exhaustive"}, exhaustiveT T))
val mk_aux_call = gen_mk_aux_call functerms
- val mk_consexpr = gen_mk_consexpr test_function functerms
+ val mk_consexpr = gen_mk_consexpr test_function
fun mk_rhs exprs =
mk_if (size_ge_zero, foldr1 mk_none_continuation exprs, Const (@{const_name "None"}, resultT))
in
@@ -174,7 +173,7 @@
Const (@{const_name "Quickcheck_Exhaustive.bounded_forall_class.bounded_forall"},
bounded_forallT T))
val mk_aux_call = gen_mk_aux_call functerms
- val mk_consexpr = gen_mk_consexpr test_function functerms
+ val mk_consexpr = gen_mk_consexpr test_function
fun mk_rhs exprs =
mk_if (size_ge_zero, foldr1 HOLogic.mk_conj exprs, @{term "True"})
in
@@ -207,7 +206,6 @@
val (Ts, fns) = split_list xs
val constr = Const (c, Ts ---> simpleT)
val bounds = map (fn x => Bound (2 * x + 1)) (((length xs) - 1) downto 0)
- val term_bounds = map (fn x => Bound (2 * x)) (((length xs) - 1) downto 0)
val Eval_App = Const ("Code_Evaluation.App", HOLogic.termT --> HOLogic.termT --> HOLogic.termT)
val Eval_Const = Const ("Code_Evaluation.Const", HOLogic.literalT --> @{typ typerep} --> HOLogic.termT)
val term = fold (fn u => fn t => Eval_App $ t $ (u $ @{term "()"}))
@@ -311,12 +309,11 @@
fun mk_fast_generator_expr ctxt (t, eval_terms) =
let
- val thy = Proof_Context.theory_of ctxt
val ctxt' = Variable.auto_fixes t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
fun lookup v = the (AList.lookup (op =) (names ~~ frees) v)
- val ([depth_name], ctxt'') = Variable.variant_fixes ["depth"] ctxt'
+ val ([depth_name], _) = Variable.variant_fixes ["depth"] ctxt'
val depth = Free (depth_name, @{typ code_numeral})
fun return _ = @{term "throw_Counterexample :: term list => unit"} $
(HOLogic.mk_list @{typ "term"}
@@ -340,12 +337,11 @@
fun mk_generator_expr ctxt (t, eval_terms) =
let
- val thy = Proof_Context.theory_of ctxt
val ctxt' = Variable.auto_fixes t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
fun lookup v = the (AList.lookup (op =) (names ~~ frees) v)
- val ([depth_name, genuine_only_name], ctxt'') =
+ val ([depth_name, genuine_only_name], _) =
Variable.variant_fixes ["depth", "genuine_only"] ctxt'
val depth = Free (depth_name, @{typ code_numeral})
val genuine_only = Free (genuine_only_name, @{typ bool})
@@ -367,15 +363,15 @@
val frees = map Free (Term.add_frees t [])
val ([depth_name, genuine_only_name], ctxt'') =
Variable.variant_fixes ["depth", "genuine_only"] ctxt'
- val (term_names, ctxt''') = Variable.variant_fixes (map (prefix "t_") names) ctxt''
+ val (term_names, _) = Variable.variant_fixes (map (prefix "t_") names) ctxt''
val depth = Free (depth_name, @{typ code_numeral})
- val genuine_only = Free (depth_name, @{typ bool})
+ val genuine_only = Free (genuine_only_name, @{typ bool})
val term_vars = map (fn n => Free (n, @{typ "unit => term"})) term_names
fun lookup v = the (AList.lookup (op =) (names ~~ (frees ~~ term_vars)) v)
val return = mk_return (HOLogic.mk_list @{typ term}
(map (fn v => v $ @{term "()"}) term_vars @ map mk_safe_term eval_terms))
fun mk_exhaustive_closure (free as Free (_, T), term_var) t =
- if Sign.of_sort thy (T, @{sort enum}) then
+ if Sign.of_sort thy (T, @{sort check_all}) then
Const (@{const_name "Quickcheck_Exhaustive.check_all_class.check_all"}, check_allT T)
$ (HOLogic.split_const (T, @{typ "unit => term"}, resultT)
$ lambda free (lambda term_var t))
@@ -397,12 +393,11 @@
fun mk_validator_expr ctxt t =
let
fun bounded_forallT T = (T --> @{typ bool}) --> @{typ code_numeral} --> @{typ bool}
- val thy = Proof_Context.theory_of ctxt
val ctxt' = Variable.auto_fixes t ctxt
val names = Term.add_free_names t []
val frees = map Free (Term.add_frees t [])
fun lookup v = the (AList.lookup (op =) (names ~~ frees) v)
- val ([depth_name], ctxt'') = Variable.variant_fixes ["depth"] ctxt'
+ val ([depth_name], _) = Variable.variant_fixes ["depth"] ctxt'
val depth = Free (depth_name, @{typ code_numeral})
fun mk_bounded_forall (Free (s, T)) t =
Const (@{const_name "Quickcheck_Exhaustive.bounded_forall_class.bounded_forall"}, bounded_forallT T)
@@ -500,7 +495,10 @@
thy (SOME target) (K I) (HOLogic.mk_list @{typ "code_numeral => bool"} ts') []
end;
-val test_goals = Quickcheck_Common.generator_test_goal_terms ("exhaustive", compile_generator_expr);
+fun size_matters_for thy Ts = not (forall (fn T => Sign.of_sort thy (T, @{sort check_all})) Ts)
+
+val test_goals =
+ Quickcheck_Common.generator_test_goal_terms ("exhaustive", (size_matters_for, compile_generator_expr));
(* setup *)
--- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML Tue Feb 14 11:16:07 2012 +0100
@@ -10,7 +10,6 @@
val finite_functions : bool Config.T
val overlord : bool Config.T
val active : bool Config.T
- val test_term: Proof.context -> term * term list -> Quickcheck.result
datatype counterexample = Universal_Counterexample of (term * counterexample)
| Existential_Counterexample of (term * counterexample) list
| Empty_Assignment
@@ -240,7 +239,8 @@
val narrowing_engine_file = Path.append in_path (Path.basic "Narrowing_Engine.hs")
val main_file = Path.append in_path (Path.basic "Main.hs")
val main = "module Main where {\n\n" ^
- "import System;\n" ^
+ "import System.IO;\n" ^
+ "import System.Environment;\n" ^
"import Narrowing_Engine;\n" ^
"import Generated_Code;\n\n" ^
"main = getArgs >>= \\[potential, size] -> " ^
@@ -389,7 +389,8 @@
map (swap o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of)
(@{thms all_simps} @ @{thms ex_simps})
@ map (HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of)
- [@{thm iff_conv_conj_imp}, @{thm not_ex}, @{thm not_all}]
+ [@{thm iff_conv_conj_imp}, @{thm not_ex}, @{thm not_all},
+ @{thm meta_eq_to_obj_eq [OF Ex1_def]}]
fun make_pnf_term thy t = Pattern.rewrite_term thy rewrs [] t
@@ -430,7 +431,7 @@
|> map (apsnd post_process)
end
-fun test_term ctxt (t, eval_terms) =
+fun test_term ctxt catch_code_errors (t, eval_terms) =
let
fun dest_result (Quickcheck.Result r) = r
val opts =
@@ -448,15 +449,20 @@
apfst (map2 pair qs1) (f (qs2, t)) end
val finitize = if Config.get ctxt finite_functions then wrap finitize_functions else I
val (qs, prop_t) = finitize (strip_quantifiers pnf_t)
- val (counterexample, result) = dynamic_value_strict (true, opts)
+ val act = if catch_code_errors then try else (fn f => SOME o f)
+ val execute = dynamic_value_strict (true, opts)
((K true, fn _ => error ""), (Existential_Counterexample.get, Existential_Counterexample.put,
"Narrowing_Generators.put_existential_counterexample"))
- thy (apfst o Option.map o map_counterexample) (mk_property qs prop_t)
- in
- Quickcheck.Result
- {counterexample = Option.map (pair true o mk_terms ctxt qs) counterexample,
- evaluation_terms = Option.map (K []) counterexample,
- timings = #timings (dest_result result), reports = #reports (dest_result result)}
+ thy (apfst o Option.map o map_counterexample)
+ in
+ case act execute (mk_property qs prop_t) of
+ SOME (counterexample, result) => Quickcheck.Result
+ {counterexample = Option.map (pair true o mk_terms ctxt qs) counterexample,
+ evaluation_terms = Option.map (K []) counterexample,
+ timings = #timings (dest_result result), reports = #reports (dest_result result)}
+ | NONE =>
+ (Quickcheck.message ctxt ("Conjecture is not executable with Quickcheck-narrowing");
+ Quickcheck.empty_result)
end
else
let
@@ -469,25 +475,33 @@
fun is_genuine (SOME (true, _)) = true
| is_genuine _ = false
val counterexample_of = Option.map (apsnd (curry (op ~~) (map fst frees) o map post_process))
- val (counterexample, result) = dynamic_value_strict (false, opts)
+ val act = if catch_code_errors then try else (fn f => SOME o f)
+ val execute = dynamic_value_strict (false, opts)
((is_genuine, counterexample_of),
(Counterexample.get, Counterexample.put, "Narrowing_Generators.put_counterexample"))
- thy (apfst o Option.map o apsnd o map) (ensure_testable (finitize t'))
+ thy (apfst o Option.map o apsnd o map)
in
- Quickcheck.Result
- {counterexample = counterexample_of counterexample,
- evaluation_terms = Option.map (K []) counterexample,
- timings = #timings (dest_result result), reports = #reports (dest_result result)}
+ case act execute (ensure_testable (finitize t')) of
+ SOME (counterexample, result) =>
+ Quickcheck.Result
+ {counterexample = counterexample_of counterexample,
+ evaluation_terms = Option.map (K []) counterexample,
+ timings = #timings (dest_result result),
+ reports = #reports (dest_result result)}
+ | NONE =>
+ (Quickcheck.message ctxt ("Conjecture is not executable with Quickcheck-narrowing");
+ Quickcheck.empty_result)
end
end;
-fun test_goals ctxt _ insts goals =
+fun test_goals ctxt catch_code_errors insts goals =
if (not (getenv "ISABELLE_GHC" = "")) then
let
val _ = Quickcheck.message ctxt ("Testing conjecture with Quickcheck-narrowing...")
val correct_inst_goals = Quickcheck_Common.instantiate_goals ctxt insts goals
in
- Quickcheck_Common.collect_results (test_term ctxt) (maps (map snd) correct_inst_goals) []
+ Quickcheck_Common.collect_results (test_term ctxt catch_code_errors)
+ (maps (map snd) correct_inst_goals) []
end
else
(if Config.get ctxt Quickcheck.quiet then () else Output.urgent_message
--- a/src/HOL/Tools/Quickcheck/quickcheck_common.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quickcheck/quickcheck_common.ML Tue Feb 14 11:16:07 2012 +0100
@@ -16,11 +16,12 @@
-> (typ option * (term * term list)) list list
val mk_safe_if : term -> term -> term * term * (bool -> term) -> bool -> term
val collect_results : ('a -> Quickcheck.result) -> 'a list -> Quickcheck.result list -> Quickcheck.result list
- type compile_generator =
- Proof.context -> (term * term list) list -> bool -> int list -> (bool * term list) option * Quickcheck.report option
+ type result = (bool * term list) option * Quickcheck.report option
+ type generator = string * ((theory -> typ list -> bool) *
+ (Proof.context -> (term * term list) list -> bool -> int list -> result))
val generator_test_goal_terms :
- string * compile_generator -> Proof.context -> bool -> (string * typ) list
- -> (term * term list) list -> Quickcheck.result list
+ generator -> Proof.context -> bool -> (string * typ) list
+ -> (term * term list) list -> Quickcheck.result list
type instantiation = Datatype.config -> Datatype.descr -> (string * sort) list
-> string list -> string -> string list * string list -> typ list * typ list -> theory -> theory
val ensure_sort :
@@ -36,8 +37,7 @@
-> Proof.context -> (term * term list) list -> term
val mk_fun_upd : typ -> typ -> term * term -> term -> term
val post_process_term : term -> term
- val test_term : string * compile_generator
- -> Proof.context -> bool -> term * term list -> Quickcheck.result
+ val test_term : generator -> Proof.context -> bool -> term * term list -> Quickcheck.result
end;
structure Quickcheck_Common : QUICKCHECK_COMMON =
@@ -58,8 +58,9 @@
(* testing functions: testing with increasing sizes (and cardinalities) *)
-type compile_generator =
- Proof.context -> (term * term list) list -> bool -> int list -> (bool * term list) option * Quickcheck.report option
+type result = (bool * term list) option * Quickcheck.report option
+type generator = string * ((theory -> typ list -> bool) *
+ (Proof.context -> (term * term list) list -> bool -> int list -> result))
fun check_test_term t =
let
@@ -73,7 +74,7 @@
let val ({cpu, ...}, result) = Timing.timing e ()
in (result, (description, Time.toMilliseconds cpu)) end
-fun test_term (name, compile) ctxt catch_code_errors (t, eval_terms) =
+fun test_term (name, (_, compile)) ctxt catch_code_errors (t, eval_terms) =
let
val genuine_only = Config.get ctxt Quickcheck.genuine_only
val _ = check_test_term t
@@ -165,7 +166,7 @@
[comp_time, exec_time])
end
-fun test_term_with_cardinality (name, compile) ctxt catch_code_errors ts =
+fun test_term_with_cardinality (name, (size_matters_for, compile)) ctxt catch_code_errors ts =
let
val genuine_only = Config.get ctxt Quickcheck.genuine_only
val thy = Proof_Context.theory_of ctxt
@@ -189,13 +190,11 @@
Option.map (pair (card, size)) ts
end
val enumeration_card_size =
- if forall (fn T => Sign.of_sort thy (T, ["Enum.enum"])) Ts then
- (* size does not matter *)
- map (rpair 0) (1 upto (length ts))
- else
- (* size does matter *)
+ if size_matters_for thy Ts then
map_product pair (1 upto (length ts)) (1 upto (Config.get ctxt Quickcheck.size))
|> sort (fn ((c1, s1), (c2, s2)) => int_ord ((c1 + s1), (c2 + s2)))
+ else
+ map (rpair 0) (1 upto (length ts))
val act = if catch_code_errors then try else (fn f => SOME o f)
val (test_fun, comp_time) = cpu_time "quickcheck compilation" (fn () => act (compile ctxt) ts)
val _ = Quickcheck.add_timing comp_time current_result
@@ -262,7 +261,8 @@
val thy = Proof_Context.theory_of ctxt
val dest = HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of
val rewrs = map (swap o dest) @{thms all_simps} @
- (map dest [@{thm not_ex}, @{thm not_all}, @{thm imp_conjL}])
+ (map dest [@{thm not_ex}, @{thm not_all}, @{thm imp_conjL}, @{thm fun_eq_iff},
+ @{thm bot_fun_def}, @{thm less_bool_def}])
val t' = Pattern.rewrite_term thy rewrs [] (Object_Logic.atomize_term thy t)
val (vs, body) = strip_all t'
val vs' = Variable.variant_frees ctxt [t'] vs
@@ -324,7 +324,7 @@
collect_results f ts (result :: results)
end
-fun generator_test_goal_terms (name, compile) ctxt catch_code_errors insts goals =
+fun generator_test_goal_terms generator ctxt catch_code_errors insts goals =
let
fun add_eval_term t ts = if is_Free t then ts else ts @ [t]
fun add_equation_eval_terms (t, eval_terms) =
@@ -333,15 +333,15 @@
| NONE => (t, eval_terms)
fun test_term' goal =
case goal of
- [(NONE, t)] => test_term (name, compile) ctxt catch_code_errors t
- | ts => test_term_with_cardinality (name, compile) ctxt catch_code_errors (map snd ts)
+ [(NONE, t)] => test_term generator ctxt catch_code_errors t
+ | ts => test_term_with_cardinality generator ctxt catch_code_errors (map snd ts)
val goals' = instantiate_goals ctxt insts goals
|> map (map (apsnd add_equation_eval_terms))
in
if Config.get ctxt Quickcheck.finite_types then
collect_results test_term' goals' []
else
- collect_results (test_term (name, compile) ctxt catch_code_errors)
+ collect_results (test_term generator ctxt catch_code_errors)
(maps (map snd) goals') []
end;
--- a/src/HOL/Tools/Quickcheck/random_generators.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quickcheck/random_generators.ML Tue Feb 14 11:16:07 2012 +0100
@@ -455,7 +455,11 @@
end
end;
-val test_goals = Quickcheck_Common.generator_test_goal_terms ("random", compile_generator_expr);
+fun size_matters_for _ Ts = not (forall
+ (fn Type (tyco, []) => is_some (try (unprefix "Enum.finite") tyco) | _ => false) Ts)
+
+val test_goals =
+ Quickcheck_Common.generator_test_goal_terms ("random", (size_matters_for, compile_generator_expr));
(** setup **)
--- a/src/HOL/Tools/Quotient/quotient_term.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Quotient/quotient_term.ML Tue Feb 14 11:16:07 2012 +0100
@@ -728,7 +728,7 @@
| matches ((rty, qty)::tail) =
(case try (Sign.typ_match thy (rty, rty')) Vartab.empty of
NONE => matches tail
- | SOME inst => Envir.subst_type inst qty)
+ | SOME inst => subst_typ ctxt ty_subst (Envir.subst_type inst qty))
in
matches ty_subst
end
@@ -749,7 +749,7 @@
| matches ((rconst, qconst)::tail) =
(case try (Pattern.match thy (rconst, rtrm)) (Vartab.empty, Vartab.empty) of
NONE => matches tail
- | SOME inst => Envir.subst_term inst qconst)
+ | SOME inst => subst_trm ctxt ty_subst trm_subst (Envir.subst_term inst qconst))
in
matches trm_subst
end
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_filter.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_filter.ML Tue Feb 14 11:16:07 2012 +0100
@@ -7,7 +7,8 @@
signature SLEDGEHAMMER_FILTER =
sig
- type locality = ATP_Translate.locality
+ type status = ATP_Problem_Generate.status
+ type stature = ATP_Problem_Generate.stature
type relevance_fudge =
{local_const_multiplier : real,
@@ -42,27 +43,28 @@
val const_names_in_fact :
theory -> (string * typ -> term list -> bool * term list) -> term
-> string list
+ val clasimpset_rule_table_of : Proof.context -> status Termtab.table
val fact_from_ref :
- Proof.context -> unit Symtab.table -> thm list
- -> Facts.ref * Attrib.src list -> ((string * locality) * thm) list
+ Proof.context -> unit Symtab.table -> thm list -> status Termtab.table
+ -> Facts.ref * Attrib.src list -> ((string * stature) * thm) list
val all_facts :
Proof.context -> bool -> 'a Symtab.table -> bool -> thm list -> thm list
- -> (((unit -> string) * locality) * thm) list
+ -> (((unit -> string) * stature) * thm) list
val nearly_all_facts :
Proof.context -> bool -> relevance_override -> thm list -> term list -> term
- -> (((unit -> string) * locality) * thm) list
+ -> (((unit -> string) * stature) * thm) list
val relevant_facts :
Proof.context -> real * real -> int
-> (string * typ -> term list -> bool * term list) -> relevance_fudge
-> relevance_override -> thm list -> term list -> term
- -> (((unit -> string) * locality) * thm) list
- -> ((string * locality) * thm) list
+ -> (((unit -> string) * stature) * thm) list
+ -> ((string * stature) * thm) list
end;
structure Sledgehammer_Filter : SLEDGEHAMMER_FILTER =
struct
-open ATP_Translate
+open ATP_Problem_Generate
open Metis_Tactic
open Sledgehammer_Util
@@ -109,6 +111,39 @@
val skolem_prefix = sledgehammer_prefix ^ "sko"
val theory_const_suffix = Long_Name.separator ^ " 1"
+val crude_zero_vars =
+ map_aterms (fn Var ((s, _), T) => Var ((s, 0), T) | t => t)
+ #> map_types (map_type_tvar (fn ((s, _), S) => TVar ((s, 0), S)))
+
+(* unfolding these can yield really huge terms *)
+val risky_spec_eqs = @{thms Bit0_def Bit1_def}
+
+fun clasimpset_rule_table_of ctxt =
+ let
+ val thy = Proof_Context.theory_of ctxt
+ val atomize = HOLogic.mk_Trueprop o Object_Logic.atomize_term thy
+ fun add stature g f =
+ fold (Termtab.update o rpair stature o g o crude_zero_vars o prop_of o f)
+ val {safeIs, safeEs, hazIs, hazEs, ...} =
+ ctxt |> claset_of |> Classical.rep_cs
+ val intros = Item_Net.content safeIs @ Item_Net.content hazIs
+ val elims =
+ Item_Net.content safeEs @ Item_Net.content hazEs
+ |> map Classical.classical_rule
+ val simps = ctxt |> simpset_of |> dest_ss |> #simps
+ val spec_eqs =
+ ctxt |> Spec_Rules.get
+ |> filter (curry (op =) Spec_Rules.Equational o fst)
+ |> maps (snd o snd)
+ |> filter_out (member Thm.eq_thm_prop risky_spec_eqs)
+ in
+ Termtab.empty |> add Simp I snd simps
+ |> add Simp atomize snd simps
+ |> add Spec_Eq I I spec_eqs
+ |> add Elim I I elims
+ |> add Intro I I intros
+ end
+
fun needs_quoting reserved s =
Symtab.defined reserved s orelse
exists (not o Lexicon.is_identifier) (Long_Name.explode s)
@@ -123,7 +158,29 @@
val backquote =
raw_explode #> map (fn "`" => "\\`" | s => s) #> implode #> enclose "`" "`"
-fun fact_from_ref ctxt reserved chained_ths (xthm as (xref, args)) =
+
+fun is_assum assms th = exists (fn ct => prop_of th aconv term_of ct) assms
+fun is_chained chained_ths = member Thm.eq_thm_prop chained_ths
+
+fun scope_of_thm global assms chained_ths th =
+ if is_chained chained_ths th then Chained
+ else if global then Global
+ else if is_assum assms th then Assum
+ else Local
+
+fun status_of_thm css_table name th =
+ (* FIXME: use structured name *)
+ if String.isSubstring ".induct" name orelse
+ String.isSubstring ".inducts" name then
+ Induct
+ else case Termtab.lookup css_table (prop_of th) of
+ SOME status => status
+ | NONE => General
+
+fun stature_of_thm global assms chained_ths css_table name th =
+ (scope_of_thm global assms chained_ths th, status_of_thm css_table name th)
+
+fun fact_from_ref ctxt reserved chained_ths css_table (xthm as (xref, args)) =
let
val ths = Attrib.eval_thms ctxt [xthm]
val bracket =
@@ -142,10 +199,10 @@
in
(ths, (0, []))
|-> fold (fn th => fn (j, rest) =>
- (j + 1,
- ((nth_name j,
- if member Thm.eq_thm_prop chained_ths th then Chained
- else General), th) :: rest))
+ let val name = nth_name j in
+ (j + 1, ((name, stature_of_thm false [] chained_ths
+ css_table name th), th) :: rest)
+ end)
|> snd
end
@@ -190,7 +247,7 @@
NONE
| _ => NONE
-fun instantiate_induct_rule ctxt concl_prop p_name ((name, loc), th) ind_x =
+fun instantiate_induct_rule ctxt concl_prop p_name ((name, stature), th) ind_x =
let
fun varify_noninducts (t as Free (s, T)) =
if (s, T) = ind_x orelse can dest_funT T then t else Var ((s, 0), T)
@@ -200,8 +257,8 @@
|> lambda (Free ind_x)
|> string_for_term ctxt
in
- ((fn () => name () ^ "[where " ^ p_name ^ " = " ^ quote p_inst ^ "]", loc),
- th |> read_instantiate ctxt [((p_name, 0), p_inst)])
+ ((fn () => name () ^ "[where " ^ p_name ^ " = " ^ quote p_inst ^ "]",
+ stature), th |> read_instantiate ctxt [((p_name, 0), p_inst)])
end
fun type_match thy (T1, T2) =
@@ -467,19 +524,21 @@
chained_const_irrel_weight (irrel_weight_for fudge) swap
const_tab chained_const_tab
-fun locality_bonus ({intro_bonus, ...} : relevance_fudge) Intro = intro_bonus
- | locality_bonus {elim_bonus, ...} Elim = elim_bonus
- | locality_bonus {simp_bonus, ...} Simp = simp_bonus
- | locality_bonus {local_bonus, ...} Local = local_bonus
- | locality_bonus {assum_bonus, ...} Assum = assum_bonus
- | locality_bonus {chained_bonus, ...} Chained = chained_bonus
- | locality_bonus _ _ = 0.0
+fun stature_bonus ({intro_bonus, ...} : relevance_fudge) (_, Intro) =
+ intro_bonus
+ | stature_bonus {elim_bonus, ...} (_, Elim) = elim_bonus
+ | stature_bonus {simp_bonus, ...} (_, Simp) = simp_bonus
+ | stature_bonus {local_bonus, ...} (Local, _) = local_bonus
+ | stature_bonus {assum_bonus, ...} (Assum, _) = assum_bonus
+ | stature_bonus {chained_bonus, ...} (Chained, _) = chained_bonus
+ | stature_bonus _ _ = 0.0
fun is_odd_const_name s =
s = abs_name orelse String.isPrefix skolem_prefix s orelse
String.isSuffix theory_const_suffix s
-fun fact_weight fudge loc const_tab relevant_consts chained_consts fact_consts =
+fun fact_weight fudge stature const_tab relevant_consts chained_consts
+ fact_consts =
case fact_consts |> List.partition (pconst_hyper_mem I relevant_consts)
||> filter_out (pconst_hyper_mem swap relevant_consts) of
([], _) => 0.0
@@ -492,7 +551,7 @@
val rel_weight =
0.0 |> fold (curry (op +) o rel_pconst_weight fudge const_tab) rel
val irrel_weight =
- ~ (locality_bonus fudge loc)
+ ~ (stature_bonus fudge stature)
|> fold (curry (op +)
o irrel_pconst_weight fudge const_tab chained_consts) irrel
val res = rel_weight / (rel_weight + irrel_weight)
@@ -512,7 +571,7 @@
val const_names_in_fact = map fst ooo pconsts_in_fact
type annotated_thm =
- (((unit -> string) * locality) * thm) * (string * ptype) list
+ (((unit -> string) * stature) * thm) * (string * ptype) list
fun take_most_relevant ctxt max_relevant remaining_max
({max_imperfect, max_imperfect_exp, ...} : relevance_fudge)
@@ -537,13 +596,12 @@
(accepts, more_rejects @ rejects)
end
-fun if_empty_replace_with_locality thy is_built_in_const facts loc tab =
+fun if_empty_replace_with_scope thy is_built_in_const facts sc tab =
if Symtab.is_empty tab then
Symtab.empty
|> fold (add_pconsts_in_term thy is_built_in_const false (SOME false))
- (map_filter (fn ((_, loc'), th) =>
- if loc' = loc then SOME (prop_of th) else NONE)
- facts)
+ (map_filter (fn ((_, (sc', _)), th) =>
+ if sc' = sc then SOME (prop_of th) else NONE) facts)
else
tab
@@ -584,7 +642,7 @@
Symtab.empty |> fold (add_pconsts true) hyp_ts
|> add_pconsts false concl_t
|> (fn tab => if Symtab.is_empty tab then chained_const_tab else tab)
- |> fold (if_empty_replace_with_locality thy is_built_in_const facts)
+ |> fold (if_empty_replace_with_scope thy is_built_in_const facts)
[Chained, Assum, Local]
val add_ths = Attrib.eval_thms ctxt add
val del_ths = Attrib.eval_thms ctxt del
@@ -637,13 +695,13 @@
hopeless_rejects hopeful_rejects)
end
| relevant candidates rejects
- (((ax as (((_, loc), _), fact_consts)), cached_weight)
+ (((ax as (((_, stature), _), fact_consts)), cached_weight)
:: hopeful) =
let
val weight =
case cached_weight of
SOME w => w
- | NONE => fact_weight fudge loc const_tab rel_const_tab
+ | NONE => fact_weight fudge stature const_tab rel_const_tab
chained_const_tab fact_consts
in
if weight >= threshold then
@@ -775,30 +833,6 @@
is_that_fact thm
end)
-val crude_zero_vars =
- map_aterms (fn Var ((s, _), T) => Var ((s, 0), T) | t => t)
- #> map_types (map_type_tvar (fn ((s, _), S) => TVar ((s, 0), S)))
-
-fun clasimpset_rule_table_of ctxt =
- let
- val thy = Proof_Context.theory_of ctxt
- val atomize = HOLogic.mk_Trueprop o Object_Logic.atomize_term thy
- fun add loc g f =
- fold (Termtab.update o rpair loc o g o crude_zero_vars o prop_of o f)
- val {safeIs, safeEs, hazIs, hazEs, ...} =
- ctxt |> claset_of |> Classical.rep_cs
- val intros = Item_Net.content safeIs @ Item_Net.content hazIs
- val elims =
- Item_Net.content safeEs @ Item_Net.content hazEs
- |> map Classical.classical_rule
- val simps = ctxt |> simpset_of |> dest_ss |> #simps
- in
- Termtab.empty |> add Intro I I intros
- |> add Elim I I elims
- |> add Simp I snd simps
- |> add Simp atomize snd simps
- end
-
fun all_facts ctxt ho_atp reserved exporter add_ths chained_ths =
let
val thy = Proof_Context.theory_of ctxt
@@ -806,23 +840,7 @@
val local_facts = Proof_Context.facts_of ctxt
val named_locals = local_facts |> Facts.dest_static []
val assms = Assumption.all_assms_of ctxt
- fun is_assum th = exists (fn ct => prop_of th aconv term_of ct) assms
- val is_chained = member Thm.eq_thm_prop chained_ths
- val clasimpset_table = clasimpset_rule_table_of ctxt
- fun locality_of_theorem global name th =
- if String.isSubstring ".induct" name orelse (*FIXME: use structured name*)
- String.isSubstring ".inducts" name then
- Induction
- else if is_chained th then
- Chained
- else if global then
- case Termtab.lookup clasimpset_table (prop_of th) of
- SOME loc => loc
- | NONE => General
- else if is_assum th then
- Assum
- else
- Local
+ val css_table = clasimpset_rule_table_of ctxt
fun is_good_unnamed_local th =
not (Thm.has_name_hint th) andalso
forall (fn (_, ths) => not (member Thm.eq_thm_prop ths th)) named_locals
@@ -872,7 +890,8 @@
|> (fn SOME name =>
make_name reserved multi j name
| NONE => "")),
- locality_of_theorem global name0 th), th)
+ stature_of_thm global assms chained_ths
+ css_table name0 th), th)
in
if multi then (new :: multis, unis)
else (multis, new :: unis)
@@ -900,10 +919,11 @@
Logic.list_implies (hyp_ts |> filter_out (null o external_frees), concl_t)
|> Object_Logic.atomize_term thy
val ind_stmt_xs = external_frees ind_stmt
+ val css_table = clasimpset_rule_table_of ctxt
in
(if only then
- maps (map (fn ((name, loc), th) => ((K name, loc), th))
- o fact_from_ref ctxt reserved chained_ths) add
+ maps (map (fn ((name, stature), th) => ((K name, stature), th))
+ o fact_from_ref ctxt reserved chained_ths css_table) add
else
all_facts ctxt ho_atp reserved false add_ths chained_ths)
|> Config.get ctxt instantiate_inducts
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Tue Feb 14 11:16:07 2012 +0100
@@ -21,8 +21,8 @@
open ATP_Util
open ATP_Systems
-open ATP_Translate
-open ATP_Reconstruct
+open ATP_Problem_Generate
+open ATP_Proof_Reconstruct
open Sledgehammer_Util
open Sledgehammer_Filter
open Sledgehammer_Provers
@@ -87,8 +87,9 @@
("overlord", "false"),
("blocking", "false"),
("type_enc", "smart"),
- ("sound", "false"),
+ ("strict", "false"),
("lam_trans", "smart"),
+ ("uncurried_aliases", "smart"),
("relevance_thresholds", "0.45 0.85"),
("max_relevant", "smart"),
("max_mono_iters", "3"),
@@ -97,7 +98,7 @@
("isar_shrink_factor", "1"),
("slice", "true"),
("minimize", "smart"),
- ("preplay_timeout", "4")]
+ ("preplay_timeout", "3")]
val alias_params =
[("prover", "provers")]
@@ -106,15 +107,16 @@
("quiet", "verbose"),
("no_overlord", "overlord"),
("non_blocking", "blocking"),
- ("unsound", "sound"),
+ ("non_strict", "strict"),
+ ("no_uncurried_aliases", "uncurried_aliases"),
("no_isar_proof", "isar_proof"),
("dont_slice", "slice"),
("dont_minimize", "minimize")]
val params_for_minimize =
- ["debug", "verbose", "overlord", "type_enc", "sound", "lam_trans",
- "max_mono_iters", "max_new_mono_instances", "isar_proof",
- "isar_shrink_factor", "timeout", "preplay_timeout"]
+ ["debug", "verbose", "overlord", "type_enc", "strict", "lam_trans",
+ "uncurried_aliases", "max_mono_iters", "max_new_mono_instances",
+ "isar_proof", "isar_shrink_factor", "timeout", "preplay_timeout"]
val property_dependent_params = ["provers", "timeout"]
@@ -141,7 +143,7 @@
| _ => value)
| NONE => (name, value)
-val any_type_enc = type_enc_from_string Sound "erased"
+val any_type_enc = type_enc_from_string Strict "erased"
(* "provers =", "type_enc =", and "lam_trans" can be omitted. For the last two,
this is a secret feature. *)
@@ -152,7 +154,7 @@
(name, value)
else if is_prover_list ctxt name andalso null value then
("provers", [name])
- else if can (type_enc_from_string Sound) name andalso null value then
+ else if can (type_enc_from_string Strict) name andalso null value then
("type_enc", [name])
else if can (trans_lams_from_string ctxt any_type_enc) name andalso
null value then
@@ -161,7 +163,7 @@
error ("Unknown parameter: " ^ quote name ^ "."))
-(* Ensures that type encodings such as "mono_simple?" and "poly_guards!!" are
+(* Ensures that type encodings such as "mono_native?" and "poly_guards!!" are
read correctly. *)
val implode_param = strip_spaces_except_between_idents o space_implode " "
@@ -210,7 +212,8 @@
(* The first ATP of the list is used by Auto Sledgehammer. Because of the low
timeout, it makes sense to put SPASS first. *)
fun default_provers_param_value ctxt =
- [spassN, eN, vampireN, SMT_Solver.solver_name_of ctxt, e_sineN, waldmeisterN]
+ [spassN, spass_newN, eN, vampireN, SMT_Solver.solver_name_of ctxt, e_sineN,
+ waldmeisterN]
|> map_filter (remotify_prover_if_not_installed ctxt)
|> avoid_too_many_threads ctxt (Multithreading.max_threads_value (),
max_default_remote_threads)
@@ -282,9 +285,10 @@
NONE
else case lookup_string "type_enc" of
"smart" => NONE
- | s => (type_enc_from_string Sound s; SOME s)
- val sound = mode = Auto_Try orelse lookup_bool "sound"
+ | s => (type_enc_from_string Strict s; SOME s)
+ val strict = mode = Auto_Try orelse lookup_bool "strict"
val lam_trans = lookup_option lookup_string "lam_trans"
+ val uncurried_aliases = lookup_option lookup_bool "uncurried_aliases"
val relevance_thresholds = lookup_real_pair "relevance_thresholds"
val max_relevant = lookup_option lookup_int "max_relevant"
val max_mono_iters = lookup_int "max_mono_iters"
@@ -302,9 +306,10 @@
val expect = lookup_string "expect"
in
{debug = debug, verbose = verbose, overlord = overlord, blocking = blocking,
- provers = provers, type_enc = type_enc, sound = sound,
- lam_trans = lam_trans, relevance_thresholds = relevance_thresholds,
- max_relevant = max_relevant, max_mono_iters = max_mono_iters,
+ provers = provers, type_enc = type_enc, strict = strict,
+ lam_trans = lam_trans, uncurried_aliases = uncurried_aliases,
+ relevance_thresholds = relevance_thresholds, max_relevant = max_relevant,
+ max_mono_iters = max_mono_iters,
max_new_mono_instances = max_new_mono_instances, isar_proof = isar_proof,
isar_shrink_factor = isar_shrink_factor, slice = slice,
minimize = minimize, timeout = timeout, preplay_timeout = preplay_timeout,
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_minimize.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_minimize.ML Tue Feb 14 11:16:07 2012 +0100
@@ -7,15 +7,15 @@
signature SLEDGEHAMMER_MINIMIZE =
sig
- type locality = ATP_Translate.locality
- type play = ATP_Reconstruct.play
+ type stature = ATP_Problem_Generate.stature
+ type play = ATP_Proof_Reconstruct.play
type params = Sledgehammer_Provers.params
val binary_min_facts : int Config.T
val minimize_facts :
string -> params -> bool -> int -> int -> Proof.state
- -> ((string * locality) * thm list) list
- -> ((string * locality) * thm list) list option
+ -> ((string * stature) * thm list) list
+ -> ((string * stature) * thm list) list option
* ((unit -> play) * (play -> string) * string)
val run_minimize :
params -> int -> (Facts.ref * Attrib.src list) list -> Proof.state -> unit
@@ -26,8 +26,8 @@
open ATP_Util
open ATP_Proof
-open ATP_Translate
-open ATP_Reconstruct
+open ATP_Problem_Generate
+open ATP_Proof_Reconstruct
open Sledgehammer_Util
open Sledgehammer_Filter
open Sledgehammer_Provers
@@ -47,8 +47,9 @@
fun print silent f = if silent then () else Output.urgent_message (f ())
fun test_facts ({debug, verbose, overlord, provers, max_mono_iters,
- max_new_mono_instances, type_enc, lam_trans, isar_proof,
- isar_shrink_factor, preplay_timeout, ...} : params)
+ max_new_mono_instances, type_enc, strict, lam_trans,
+ uncurried_aliases, isar_proof, isar_shrink_factor,
+ preplay_timeout, ...} : params)
silent (prover : prover) timeout i n state facts =
let
val _ =
@@ -61,9 +62,10 @@
facts |> maps (fn (n, ths) => ths |> map (Untranslated_Fact o pair n))
val params =
{debug = debug, verbose = verbose, overlord = overlord, blocking = true,
- provers = provers, type_enc = type_enc, sound = true,
- lam_trans = lam_trans, relevance_thresholds = (1.01, 1.01),
- max_relevant = SOME (length facts), max_mono_iters = max_mono_iters,
+ provers = provers, type_enc = type_enc, strict = strict,
+ lam_trans = lam_trans, uncurried_aliases = uncurried_aliases,
+ relevance_thresholds = (1.01, 1.01), max_relevant = SOME (length facts),
+ max_mono_iters = max_mono_iters,
max_new_mono_instances = max_new_mono_instances, isar_proof = isar_proof,
isar_shrink_factor = isar_shrink_factor, slice = false,
minimize = SOME false, timeout = timeout,
@@ -192,7 +194,8 @@
min test (new_timeout timeout run_time) result facts
val _ = print silent (fn () => cat_lines
["Minimized to " ^ n_facts (map fst min_facts)] ^
- (case length (filter (curry (op =) Chained o snd o fst) min_facts) of
+ (case min_facts |> filter (fn ((_, (sc, _)), _) => sc = Chained)
+ |> length of
0 => ""
| n => "\n(including " ^ string_of_int n ^ " chained)") ^ ".")
in (SOME min_facts, (preplay, message, message_tail)) end
@@ -215,9 +218,10 @@
val ctxt = Proof.context_of state
val reserved = reserved_isar_keyword_table ()
val chained_ths = normalize_chained_theorems (#facts (Proof.goal state))
+ val css_table = clasimpset_rule_table_of ctxt
val facts =
- refs
- |> maps (map (apsnd single) o fact_from_ref ctxt reserved chained_ths)
+ refs |> maps (map (apsnd single)
+ o fact_from_ref ctxt reserved chained_ths css_table)
in
case subgoal_count state of
0 => Output.urgent_message "No subgoal!"
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_provers.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_provers.ML Tue Feb 14 11:16:07 2012 +0100
@@ -9,11 +9,11 @@
signature SLEDGEHAMMER_PROVERS =
sig
type failure = ATP_Proof.failure
- type locality = ATP_Translate.locality
- type type_enc = ATP_Translate.type_enc
- type reconstructor = ATP_Reconstruct.reconstructor
- type play = ATP_Reconstruct.play
- type minimize_command = ATP_Reconstruct.minimize_command
+ type stature = ATP_Problem_Generate.stature
+ type type_enc = ATP_Problem_Generate.type_enc
+ type reconstructor = ATP_Proof_Reconstruct.reconstructor
+ type play = ATP_Proof_Reconstruct.play
+ type minimize_command = ATP_Proof_Reconstruct.minimize_command
type relevance_fudge = Sledgehammer_Filter.relevance_fudge
datatype mode = Auto_Try | Try | Normal | Auto_Minimize | Minimize
@@ -25,8 +25,9 @@
blocking: bool,
provers: string list,
type_enc: string option,
- sound: bool,
+ strict: bool,
lam_trans: string option,
+ uncurried_aliases: bool option,
relevance_thresholds: real * real,
max_relevant: int option,
max_mono_iters: int,
@@ -40,8 +41,8 @@
expect: string}
datatype prover_fact =
- Untranslated_Fact of (string * locality) * thm |
- SMT_Weighted_Fact of (string * locality) * (int option * thm)
+ Untranslated_Fact of (string * stature) * thm |
+ SMT_Weighted_Fact of (string * stature) * (int option * thm)
type prover_problem =
{state: Proof.state,
@@ -49,11 +50,11 @@
subgoal: int,
subgoal_count: int,
facts: prover_fact list,
- smt_filter: (string * locality) SMT_Solver.smt_filter_data option}
+ smt_filter: (string * stature) SMT_Solver.smt_filter_data option}
type prover_result =
{outcome: failure option,
- used_facts: (string * locality) list,
+ used_facts: (string * stature) list,
run_time: Time.time,
preplay: unit -> play,
message: play -> string,
@@ -98,12 +99,12 @@
val smt_relevance_fudge : relevance_fudge
val relevance_fudge_for_prover : Proof.context -> string -> relevance_fudge
val weight_smt_fact :
- Proof.context -> int -> ((string * locality) * thm) * int
- -> (string * locality) * (int option * thm)
- val untranslated_fact : prover_fact -> (string * locality) * thm
+ Proof.context -> int -> ((string * stature) * thm) * int
+ -> (string * stature) * (int option * thm)
+ val untranslated_fact : prover_fact -> (string * stature) * thm
val smt_weighted_fact :
Proof.context -> int -> prover_fact * int
- -> (string * locality) * (int option * thm)
+ -> (string * stature) * (int option * thm)
val supported_provers : Proof.context -> unit
val kill_provers : unit -> unit
val running_provers : unit -> unit
@@ -119,8 +120,8 @@
open ATP_Problem
open ATP_Proof
open ATP_Systems
-open ATP_Translate
-open ATP_Reconstruct
+open ATP_Problem_Generate
+open ATP_Proof_Reconstruct
open Metis_Tactic
open Sledgehammer_Util
open Sledgehammer_Filter
@@ -134,7 +135,7 @@
val das_tool = "Sledgehammer"
val reconstructor_names = [metisN, smtN]
-val plain_metis = Metis (hd partial_type_encs, combinatorsN)
+val plain_metis = Metis (hd partial_type_encs, combsN)
val is_reconstructor = member (op =) reconstructor_names
val is_atp = member (op =) o supported_atps
@@ -147,7 +148,7 @@
let val thy = Proof_Context.theory_of ctxt in
case try (get_atp thy) name of
SOME {best_slices, ...} =>
- exists (fn (_, (_, (_, format, _, _, _))) => is_format format)
+ exists (fn (_, (_, ((_, format, _, _, _), _))) => is_format format)
(best_slices ctxt)
| NONE => false
end
@@ -174,7 +175,7 @@
if is_reconstructor name then
reconstructor_default_max_relevant
else if is_atp thy name then
- fold (Integer.max o #1 o snd o snd o snd)
+ fold (Integer.max o #1 o fst o snd o snd o snd)
(get_slices slice (#best_slices (get_atp thy name) ctxt)) 0
else (* is_smt_prover ctxt name *)
SMT_Solver.default_max_relevant ctxt name
@@ -289,8 +290,9 @@
blocking: bool,
provers: string list,
type_enc: string option,
- sound: bool,
+ strict: bool,
lam_trans: string option,
+ uncurried_aliases: bool option,
relevance_thresholds: real * real,
max_relevant: int option,
max_mono_iters: int,
@@ -304,8 +306,8 @@
expect: string}
datatype prover_fact =
- Untranslated_Fact of (string * locality) * thm |
- SMT_Weighted_Fact of (string * locality) * (int option * thm)
+ Untranslated_Fact of (string * stature) * thm |
+ SMT_Weighted_Fact of (string * stature) * (int option * thm)
type prover_problem =
{state: Proof.state,
@@ -313,11 +315,11 @@
subgoal: int,
subgoal_count: int,
facts: prover_fact list,
- smt_filter: (string * locality) SMT_Solver.smt_filter_data option}
+ smt_filter: (string * stature) SMT_Solver.smt_filter_data option}
type prover_result =
{outcome: failure option,
- used_facts: (string * locality) list,
+ used_facts: (string * stature) list,
run_time: Time.time,
preplay: unit -> play,
message: play -> string,
@@ -399,9 +401,10 @@
| _ => "Try this"
fun bunch_of_reconstructors needs_full_types lam_trans =
- [(false, Metis (hd partial_type_encs, lam_trans true)),
- (true, Metis (full_typesN, lam_trans false)),
- (false, Metis (no_typesN, lam_trans false)),
+ [(false, Metis (partial_type_enc, lam_trans false)),
+ (true, Metis (full_type_enc, lam_trans false)),
+ (false, Metis (no_typesN, lam_trans true)),
+ (true, Metis (really_full_type_enc, lam_trans true)),
(true, SMT)]
|> map_filter (fn (full_types, reconstr) =>
if needs_full_types andalso not full_types then NONE
@@ -579,10 +582,10 @@
fun run_atp mode name
({exec, required_execs, arguments, proof_delims, known_failures,
conj_sym_kind, prem_kind, best_slices, ...} : atp_config)
- (params as {debug, verbose, overlord, type_enc, sound, lam_trans,
- max_relevant, max_mono_iters, max_new_mono_instances,
- isar_proof, isar_shrink_factor, slice, timeout,
- preplay_timeout, ...})
+ (params as {debug, verbose, overlord, type_enc, strict, lam_trans,
+ uncurried_aliases, max_relevant, max_mono_iters,
+ max_new_mono_instances, isar_proof, isar_shrink_factor,
+ slice, timeout, preplay_timeout, ...})
minimize_command
({state, goal, subgoal, subgoal_count, facts, ...} : prover_problem) =
let
@@ -650,26 +653,19 @@
|> curry ListPair.zip (map fst facts)
|> maps (fn (name, rths) => map (pair name o snd) rths)
end
- fun run_slice (slice, (time_frac, (complete,
- (best_max_relevant, format, best_type_enc,
- best_lam_trans, extra))))
- time_left =
+ fun run_slice time_left (cache_key, cache_value)
+ (slice, (time_frac, (complete,
+ (key as (best_max_relevant, format, best_type_enc,
+ best_lam_trans, best_uncurried_aliases),
+ extra)))) =
let
val num_facts =
length facts |> is_none max_relevant
? Integer.min best_max_relevant
- val soundness = if sound then Sound else Sound_Modulo_Infinity
+ val soundness = if strict then Strict else Non_Strict
val type_enc =
type_enc |> choose_type_enc soundness best_type_enc format
val fairly_sound = is_type_enc_fairly_sound type_enc
- val facts =
- facts |> map untranslated_fact
- |> not fairly_sound
- ? filter_out (is_dangerous_prop ctxt o prop_of o snd)
- |> take num_facts
- |> polymorphism_of_type_enc type_enc <> Polymorphic
- ? monomorphize_facts
- |> map (apsnd prop_of)
val real_ms = Real.fromInt o Time.toMilliseconds
val slice_timeout =
((real_ms time_left
@@ -694,20 +690,38 @@
case lam_trans of
SOME s => s
| NONE => best_lam_trans
- val (atp_problem, pool, fact_names, _, sym_tab) =
- prepare_atp_problem ctxt format conj_sym_kind prem_kind
- type_enc false lam_trans readable_names true hyp_ts
- concl_t facts
- fun weights () = atp_problem_weights atp_problem
+ val uncurried_aliases =
+ case uncurried_aliases of
+ SOME b => b
+ | NONE => best_uncurried_aliases
+ val value as (atp_problem, _, fact_names, _, _) =
+ if cache_key = SOME key then
+ cache_value
+ else
+ facts
+ |> map untranslated_fact
+ |> not fairly_sound
+ ? filter_out (is_dangerous_prop ctxt o prop_of o snd)
+ |> take num_facts
+ |> polymorphism_of_type_enc type_enc <> Polymorphic
+ ? monomorphize_facts
+ |> map (apsnd prop_of)
+ |> prepare_atp_problem ctxt format conj_sym_kind prem_kind
+ type_enc false lam_trans uncurried_aliases
+ readable_names true hyp_ts concl_t
+ fun rel_weights () = atp_problem_relevance_weights atp_problem
+ fun kbo_weights () = atp_problem_kbo_weights atp_problem
val full_proof = debug orelse isar_proof
- val core =
+ val command =
File.shell_path command ^ " " ^
- arguments ctxt full_proof extra slice_timeout weights ^ " " ^
- File.shell_path prob_file
- val command = "TIMEFORMAT='%3R'; { time " ^ core ^ " ; } 2>&1"
- val _ = atp_problem |> lines_for_atp_problem format
- |> cons ("% " ^ command ^ "\n")
- |> File.write_list prob_file
+ arguments ctxt full_proof extra slice_timeout rel_weights ^
+ " " ^ File.shell_path prob_file
+ |> enclose "TIMEFORMAT='%3R'; { time " " ; } 2>&1"
+ val _ =
+ atp_problem
+ |> lines_for_atp_problem format kbo_weights
+ |> cons ("% " ^ command ^ "\n")
+ |> File.write_list prob_file
val ((output, run_time), (atp_proof, outcome)) =
TimeLimit.timeLimit generous_slice_timeout
Isabelle_System.bash_output command
@@ -734,8 +748,11 @@
SOME facts =>
let
val failure =
- UnsoundProof (is_type_enc_quasi_sound type_enc,
- facts)
+ if null facts then
+ ProofIncomplete
+ else
+ UnsoundProof (is_type_enc_quasi_sound type_enc,
+ facts)
in
if debug then
(warning (string_for_failure failure); NONE)
@@ -744,27 +761,24 @@
end
| NONE => NONE)
| _ => outcome
- in
- ((pool, fact_names, sym_tab),
- (output, run_time, atp_proof, outcome))
- end
+ in ((SOME key, value), (output, run_time, atp_proof, outcome)) end
val timer = Timer.startRealTimer ()
fun maybe_run_slice slice
- (result as (_, (_, run_time0, _, SOME _))) =
+ (result as (cache, (_, run_time0, _, SOME _))) =
let
val time_left = Time.- (timeout, Timer.checkRealTimer timer)
in
if Time.<= (time_left, Time.zeroTime) then
result
else
- run_slice slice time_left
- |> (fn (stuff, (output, run_time, atp_proof, outcome)) =>
- (stuff, (output, Time.+ (run_time0, run_time),
+ run_slice time_left cache slice
+ |> (fn (cache, (output, run_time, atp_proof, outcome)) =>
+ (cache, (output, Time.+ (run_time0, run_time),
atp_proof, outcome)))
end
| maybe_run_slice _ result = result
in
- ((Symtab.empty, Vector.fromList [], Symtab.empty),
+ ((NONE, ([], Symtab.empty, Vector.fromList [], [], Symtab.empty)),
("", Time.zeroTime, [], SOME InternalError))
|> fold maybe_run_slice actual_slices
end
@@ -780,7 +794,8 @@
()
else
File.write (Path.explode (Path.implode prob_file ^ "_proof")) output
- val ((pool, fact_names, sym_tab), (output, run_time, atp_proof, outcome)) =
+ val ((_, (_, pool, fact_names, _, sym_tab)),
+ (output, run_time, atp_proof, outcome)) =
with_path cleanup export run_on problem_path_name
val important_message =
if mode = Normal andalso
@@ -797,8 +812,8 @@
val reconstrs =
bunch_of_reconstructors needs_full_types
(lam_trans_from_atp_proof atp_proof
- o (fn plain =>
- if plain then metis_default_lam_trans else hide_lamsN))
+ o (fn desperate => if desperate then hide_lamsN
+ else metis_default_lam_trans))
in
(used_facts,
fn () =>
@@ -990,8 +1005,7 @@
state subgoal SMT
(bunch_of_reconstructors false
(fn plain =>
- if plain then metis_default_lam_trans
- else lam_liftingN)),
+ if plain then metis_default_lam_trans else liftingN)),
fn preplay =>
let
val one_line_params =
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_run.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_run.ML Tue Feb 14 11:16:07 2012 +0100
@@ -8,7 +8,7 @@
signature SLEDGEHAMMER_RUN =
sig
- type minimize_command = ATP_Reconstruct.minimize_command
+ type minimize_command = ATP_Proof_Reconstruct.minimize_command
type relevance_override = Sledgehammer_Filter.relevance_override
type mode = Sledgehammer_Provers.mode
type params = Sledgehammer_Provers.params
@@ -31,8 +31,8 @@
struct
open ATP_Util
-open ATP_Translate
-open ATP_Reconstruct
+open ATP_Problem_Generate
+open ATP_Proof_Reconstruct
open Sledgehammer_Util
open Sledgehammer_Filter
open Sledgehammer_Provers
@@ -73,10 +73,10 @@
(K 5.0)
fun adjust_reconstructor_params override_params
- ({debug, verbose, overlord, blocking, provers, type_enc, sound,
- lam_trans, relevance_thresholds, max_relevant, max_mono_iters,
- max_new_mono_instances, isar_proof, isar_shrink_factor, slice,
- minimize, timeout, preplay_timeout, expect} : params) =
+ ({debug, verbose, overlord, blocking, provers, type_enc, strict,
+ lam_trans, uncurried_aliases, relevance_thresholds, max_relevant,
+ max_mono_iters, max_new_mono_instances, isar_proof, isar_shrink_factor,
+ slice, minimize, timeout, preplay_timeout, expect} : params) =
let
fun lookup_override name default_value =
case AList.lookup (op =) override_params name of
@@ -88,9 +88,9 @@
val lam_trans = lookup_override "lam_trans" lam_trans
in
{debug = debug, verbose = verbose, overlord = overlord, blocking = blocking,
- provers = provers, type_enc = type_enc, sound = sound,
- lam_trans = lam_trans, max_relevant = max_relevant,
- relevance_thresholds = relevance_thresholds,
+ provers = provers, type_enc = type_enc, strict = strict,
+ lam_trans = lam_trans, uncurried_aliases = uncurried_aliases,
+ max_relevant = max_relevant, relevance_thresholds = relevance_thresholds,
max_mono_iters = max_mono_iters,
max_new_mono_instances = max_new_mono_instances, isar_proof = isar_proof,
isar_shrink_factor = isar_shrink_factor, slice = slice,
@@ -172,7 +172,7 @@
get_prover ctxt mode name params minimize_command problem
|> minimize ctxt mode name params problem
-fun is_induction_fact (Untranslated_Fact ((_, Induction), _)) = true
+fun is_induction_fact (Untranslated_Fact ((_, (_, Induct)), _)) = true
| is_induction_fact _ = false
fun launch_prover (params as {debug, verbose, blocking, max_relevant, slice,
--- a/src/HOL/Tools/list_to_set_comprehension.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Tools/list_to_set_comprehension.ML Tue Feb 14 11:16:07 2012 +0100
@@ -82,7 +82,7 @@
fun check (i, case_t) s =
(case strip_abs_body case_t of
(Const (@{const_name List.Nil}, _)) => s
- | t => (case s of NONE => SOME i | SOME s => NONE))
+ | _ => (case s of NONE => SOME i | SOME _ => NONE))
in
fold_index check cases NONE
end
@@ -132,7 +132,7 @@
in
(* do case distinction *)
Splitter.split_tac [#split info] 1
- THEN EVERY (map_index (fn (i', case_rewrite) =>
+ THEN EVERY (map_index (fn (i', _) =>
(if i' < length (#case_rewrites info) - 1 then rtac @{thm conjI} 1 else all_tac)
THEN REPEAT_DETERM (rtac @{thm allI} 1)
THEN rtac @{thm impI} 1
@@ -207,7 +207,7 @@
val eqs' = map reverse_bounds eqs
val pat_eq' = reverse_bounds pat_eq
val inner_t =
- fold (fn (v, T) => fn t => HOLogic.exists_const T $ absdummy T t)
+ fold (fn (_, T) => fn t => HOLogic.exists_const T $ absdummy T t)
(rev bound_vs) (fold (curry HOLogic.mk_conj) eqs' pat_eq')
val lhs = term_of redex
val rhs = HOLogic.mk_Collect ("x", rT, inner_t)
--- a/src/HOL/Transitive_Closure.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Transitive_Closure.thy Tue Feb 14 11:16:07 2012 +0100
@@ -718,85 +718,97 @@
end
-lemma rel_pow_1 [simp]:
+text {* for code generation *}
+
+definition relpow :: "nat \<Rightarrow> ('a \<times> 'a) set \<Rightarrow> ('a \<times> 'a) set" where
+ relpow_code_def [code_abbrev]: "relpow = compow"
+
+lemma [code]:
+ "relpow (Suc n) R = (relpow n R) O R"
+ "relpow 0 R = Id"
+ by (simp_all add: relpow_code_def)
+
+hide_const (open) relpow
+
+lemma relpow_1 [simp]:
fixes R :: "('a \<times> 'a) set"
shows "R ^^ 1 = R"
by simp
-lemma rel_pow_0_I:
+lemma relpow_0_I:
"(x, x) \<in> R ^^ 0"
by simp
-lemma rel_pow_Suc_I:
+lemma relpow_Suc_I:
"(x, y) \<in> R ^^ n \<Longrightarrow> (y, z) \<in> R \<Longrightarrow> (x, z) \<in> R ^^ Suc n"
by auto
-lemma rel_pow_Suc_I2:
+lemma relpow_Suc_I2:
"(x, y) \<in> R \<Longrightarrow> (y, z) \<in> R ^^ n \<Longrightarrow> (x, z) \<in> R ^^ Suc n"
by (induct n arbitrary: z) (simp, fastforce)
-lemma rel_pow_0_E:
+lemma relpow_0_E:
"(x, y) \<in> R ^^ 0 \<Longrightarrow> (x = y \<Longrightarrow> P) \<Longrightarrow> P"
by simp
-lemma rel_pow_Suc_E:
+lemma relpow_Suc_E:
"(x, z) \<in> R ^^ Suc n \<Longrightarrow> (\<And>y. (x, y) \<in> R ^^ n \<Longrightarrow> (y, z) \<in> R \<Longrightarrow> P) \<Longrightarrow> P"
by auto
-lemma rel_pow_E:
+lemma relpow_E:
"(x, z) \<in> R ^^ n \<Longrightarrow> (n = 0 \<Longrightarrow> x = z \<Longrightarrow> P)
\<Longrightarrow> (\<And>y m. n = Suc m \<Longrightarrow> (x, y) \<in> R ^^ m \<Longrightarrow> (y, z) \<in> R \<Longrightarrow> P)
\<Longrightarrow> P"
by (cases n) auto
-lemma rel_pow_Suc_D2:
+lemma relpow_Suc_D2:
"(x, z) \<in> R ^^ Suc n \<Longrightarrow> (\<exists>y. (x, y) \<in> R \<and> (y, z) \<in> R ^^ n)"
apply (induct n arbitrary: x z)
- apply (blast intro: rel_pow_0_I elim: rel_pow_0_E rel_pow_Suc_E)
- apply (blast intro: rel_pow_Suc_I elim: rel_pow_0_E rel_pow_Suc_E)
+ apply (blast intro: relpow_0_I elim: relpow_0_E relpow_Suc_E)
+ apply (blast intro: relpow_Suc_I elim: relpow_0_E relpow_Suc_E)
done
-lemma rel_pow_Suc_E2:
+lemma relpow_Suc_E2:
"(x, z) \<in> R ^^ Suc n \<Longrightarrow> (\<And>y. (x, y) \<in> R \<Longrightarrow> (y, z) \<in> R ^^ n \<Longrightarrow> P) \<Longrightarrow> P"
- by (blast dest: rel_pow_Suc_D2)
+ by (blast dest: relpow_Suc_D2)
-lemma rel_pow_Suc_D2':
+lemma relpow_Suc_D2':
"\<forall>x y z. (x, y) \<in> R ^^ n \<and> (y, z) \<in> R \<longrightarrow> (\<exists>w. (x, w) \<in> R \<and> (w, z) \<in> R ^^ n)"
by (induct n) (simp_all, blast)
-lemma rel_pow_E2:
+lemma relpow_E2:
"(x, z) \<in> R ^^ n \<Longrightarrow> (n = 0 \<Longrightarrow> x = z \<Longrightarrow> P)
\<Longrightarrow> (\<And>y m. n = Suc m \<Longrightarrow> (x, y) \<in> R \<Longrightarrow> (y, z) \<in> R ^^ m \<Longrightarrow> P)
\<Longrightarrow> P"
apply (cases n, simp)
- apply (cut_tac n=nat and R=R in rel_pow_Suc_D2', simp, blast)
+ apply (cut_tac n=nat and R=R in relpow_Suc_D2', simp, blast)
done
-lemma rel_pow_add: "R ^^ (m+n) = R^^m O R^^n"
+lemma relpow_add: "R ^^ (m+n) = R^^m O R^^n"
by (induct n) auto
-lemma rel_pow_commute: "R O R ^^ n = R ^^ n O R"
+lemma relpow_commute: "R O R ^^ n = R ^^ n O R"
by (induct n) (simp, simp add: O_assoc [symmetric])
-lemma rel_pow_empty:
+lemma relpow_empty:
"0 < n \<Longrightarrow> ({} :: ('a \<times> 'a) set) ^^ n = {}"
by (cases n) auto
-lemma rtrancl_imp_UN_rel_pow:
+lemma rtrancl_imp_UN_relpow:
assumes "p \<in> R^*"
shows "p \<in> (\<Union>n. R ^^ n)"
proof (cases p)
case (Pair x y)
with assms have "(x, y) \<in> R^*" by simp
then have "(x, y) \<in> (\<Union>n. R ^^ n)" proof induct
- case base show ?case by (blast intro: rel_pow_0_I)
+ case base show ?case by (blast intro: relpow_0_I)
next
- case step then show ?case by (blast intro: rel_pow_Suc_I)
+ case step then show ?case by (blast intro: relpow_Suc_I)
qed
with Pair show ?thesis by simp
qed
-lemma rel_pow_imp_rtrancl:
+lemma relpow_imp_rtrancl:
assumes "p \<in> R ^^ n"
shows "p \<in> R^*"
proof (cases p)
@@ -806,18 +818,18 @@
case 0 then show ?case by simp
next
case Suc then show ?case
- by (blast elim: rel_pow_Suc_E intro: rtrancl_into_rtrancl)
+ by (blast elim: relpow_Suc_E intro: rtrancl_into_rtrancl)
qed
with Pair show ?thesis by simp
qed
-lemma rtrancl_is_UN_rel_pow:
+lemma rtrancl_is_UN_relpow:
"R^* = (\<Union>n. R ^^ n)"
- by (blast intro: rtrancl_imp_UN_rel_pow rel_pow_imp_rtrancl)
+ by (blast intro: rtrancl_imp_UN_relpow relpow_imp_rtrancl)
lemma rtrancl_power:
"p \<in> R^* \<longleftrightarrow> (\<exists>n. p \<in> R ^^ n)"
- by (simp add: rtrancl_is_UN_rel_pow)
+ by (simp add: rtrancl_is_UN_relpow)
lemma trancl_power:
"p \<in> R^+ \<longleftrightarrow> (\<exists>n > 0. p \<in> R ^^ n)"
@@ -825,23 +837,23 @@
apply simp
apply (rule iffI)
apply (drule tranclD2)
- apply (clarsimp simp: rtrancl_is_UN_rel_pow)
+ apply (clarsimp simp: rtrancl_is_UN_relpow)
apply (rule_tac x="Suc n" in exI)
apply (clarsimp simp: rel_comp_def)
apply fastforce
apply clarsimp
apply (case_tac n, simp)
apply clarsimp
- apply (drule rel_pow_imp_rtrancl)
+ apply (drule relpow_imp_rtrancl)
apply (drule rtrancl_into_trancl1) apply auto
done
-lemma rtrancl_imp_rel_pow:
+lemma rtrancl_imp_relpow:
"p \<in> R^* \<Longrightarrow> \<exists>n. p \<in> R ^^ n"
- by (auto dest: rtrancl_imp_UN_rel_pow)
+ by (auto dest: rtrancl_imp_UN_relpow)
text{* By Sternagel/Thiemann: *}
-lemma rel_pow_fun_conv:
+lemma relpow_fun_conv:
"((a,b) \<in> R ^^ n) = (\<exists>f. f 0 = a \<and> f n = b \<and> (\<forall>i<n. (f i, f(Suc i)) \<in> R))"
proof (induct n arbitrary: b)
case 0 show ?case by auto
@@ -865,7 +877,7 @@
qed
qed
-lemma rel_pow_finite_bounded1:
+lemma relpow_finite_bounded1:
assumes "finite(R :: ('a*'a)set)" and "k>0"
shows "R^^k \<subseteq> (UN n:{n. 0<n & n <= card R}. R^^n)" (is "_ \<subseteq> ?r")
proof-
@@ -886,7 +898,7 @@
hence ?case using `(a,b): R^^(Suc n)` Suc_leI[OF `n < card R`] by blast
} moreover
{ assume "n = card R"
- from `(a,b) \<in> R ^^ (Suc n)`[unfolded rel_pow_fun_conv]
+ from `(a,b) \<in> R ^^ (Suc n)`[unfolded relpow_fun_conv]
obtain f where "f 0 = a" and "f(Suc n) = b"
and steps: "\<And>i. i <= n \<Longrightarrow> (f i, f (Suc i)) \<in> R" by auto
let ?p = "%i. (f i, f(Suc i))"
@@ -906,7 +918,7 @@
and pij: "?p i = ?p j" by blast
let ?g = "\<lambda> l. if l \<le> i then f l else f (l + (j - i))"
let ?n = "Suc(n - (j - i))"
- have abl: "(a,b) \<in> R ^^ ?n" unfolding rel_pow_fun_conv
+ have abl: "(a,b) \<in> R ^^ ?n" unfolding relpow_fun_conv
proof (rule exI[of _ ?g], intro conjI impI allI)
show "?g ?n = b" using `f(Suc n) = b` j ij by auto
next
@@ -940,21 +952,21 @@
thus ?thesis using gr0_implies_Suc[OF `k>0`] by auto
qed
-lemma rel_pow_finite_bounded:
+lemma relpow_finite_bounded:
assumes "finite(R :: ('a*'a)set)"
shows "R^^k \<subseteq> (UN n:{n. n <= card R}. R^^n)"
apply(cases k)
apply force
-using rel_pow_finite_bounded1[OF assms, of k] by auto
+using relpow_finite_bounded1[OF assms, of k] by auto
-lemma rtrancl_finite_eq_rel_pow:
+lemma rtrancl_finite_eq_relpow:
"finite R \<Longrightarrow> R^* = (UN n : {n. n <= card R}. R^^n)"
-by(fastforce simp: rtrancl_power dest: rel_pow_finite_bounded)
+by(fastforce simp: rtrancl_power dest: relpow_finite_bounded)
-lemma trancl_finite_eq_rel_pow:
+lemma trancl_finite_eq_relpow:
"finite R \<Longrightarrow> R^+ = (UN n : {n. 0 < n & n <= card R}. R^^n)"
apply(auto simp add: trancl_power)
-apply(auto dest: rel_pow_finite_bounded1)
+apply(auto dest: relpow_finite_bounded1)
done
lemma finite_rel_comp[simp,intro]:
@@ -974,13 +986,13 @@
apply(simp_all add: assms)
done
-lemma single_valued_rel_pow:
+lemma single_valued_relpow:
fixes R :: "('a * 'a) set"
shows "single_valued R \<Longrightarrow> single_valued (R ^^ n)"
apply (induct n arbitrary: R)
apply simp_all
apply (rule single_valuedI)
-apply (fast dest: single_valuedD elim: rel_pow_Suc_E)
+apply (fast dest: single_valuedD elim: relpow_Suc_E)
done
@@ -1003,7 +1015,7 @@
unfolding ntrancl_def by auto
qed
-lemma ntrancl_Suc [simp, code]:
+lemma ntrancl_Suc [simp]:
"ntrancl (Suc n) R = ntrancl n R O (Id \<union> R)"
proof
{
@@ -1034,9 +1046,13 @@
unfolding ntrancl_def by fastforce
qed
+lemma [code]:
+ "ntrancl (Suc n) r = (let r' = ntrancl n r in r' Un r' O r)"
+unfolding Let_def by auto
+
lemma finite_trancl_ntranl:
"finite R \<Longrightarrow> trancl R = ntrancl (card R - 1) R"
- by (cases "card R") (auto simp add: trancl_finite_eq_rel_pow rel_pow_empty ntrancl_def)
+ by (cases "card R") (auto simp add: trancl_finite_eq_relpow relpow_empty ntrancl_def)
subsection {* Acyclic relations *}
--- a/src/HOL/Wellfounded.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Wellfounded.thy Tue Feb 14 11:16:07 2012 +0100
@@ -635,7 +635,7 @@
definition measure :: "('a => nat) => ('a * 'a)set"
where "measure = inv_image less_than"
-lemma in_measure[simp]: "((x,y) : measure f) = (f x < f y)"
+lemma in_measure[simp, code_unfold]: "((x,y) : measure f) = (f x < f y)"
by (simp add:measure_def)
lemma wf_measure [iff]: "wf (measure f)"
--- a/src/HOL/Word/Word.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/Word/Word.thy Tue Feb 14 11:16:07 2012 +0100
@@ -3942,7 +3942,7 @@
rotate1 (map2 f xs ys) = map2 f (rotate1 xs) (rotate1 ys)"
apply (unfold map2_def)
apply (cases xs)
- apply (cases ys, auto simp add : rotate1_def)+
+ apply (cases ys, auto)+
done
lemmas lth = box_equals [OF asm_rl length_rotate [symmetric]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/ex/Executable_Relation.thy Tue Feb 14 11:16:07 2012 +0100
@@ -0,0 +1,79 @@
+theory Executable_Relation
+imports Main
+begin
+
+text {*
+ Current problem: rtrancl is not executable on an infinite type.
+*}
+
+lemma
+ "(x, (y :: nat)) : rtrancl (R Un S) \<Longrightarrow> (x, y) : (rtrancl R) Un (rtrancl S)"
+(* quickcheck[exhaustive] fails ! *)
+oops
+
+code_thms rtrancl
+
+hide_const (open) rtrancl trancl
+
+quotient_type 'a rel = "('a * 'a) set" / "(op =)"
+morphisms set_of_rel rel_of_set by (metis identity_equivp)
+
+lemma [simp]:
+ "rel_of_set (set_of_rel S) = S"
+by (rule Quotient_abs_rep[OF Quotient_rel])
+
+lemma [simp]:
+ "set_of_rel (rel_of_set R) = R"
+by (rule Quotient_rep_abs[OF Quotient_rel]) (rule refl)
+
+no_notation
+ Set.member ("(_/ : _)" [50, 51] 50)
+
+quotient_definition member :: "'a * 'a => 'a rel => bool" where
+ "member" is "Set.member :: 'a * 'a => ('a * 'a) set => bool"
+
+notation
+ member ("(_/ : _)" [50, 51] 50)
+
+quotient_definition union :: "'a rel => 'a rel => 'a rel" where
+ "union" is "Set.union :: ('a * 'a) set => ('a * 'a) set => ('a * 'a) set"
+
+quotient_definition rtrancl :: "'a rel => 'a rel" where
+ "rtrancl" is "Transitive_Closure.rtrancl :: ('a * 'a) set => ('a * 'a) set"
+
+definition reflcl_raw
+where "reflcl_raw R = R \<union> Id"
+
+quotient_definition reflcl :: "('a * 'a) set => 'a rel" where
+ "reflcl" is "reflcl_raw :: ('a * 'a) set => ('a * 'a) set"
+
+code_datatype reflcl rel_of_set
+
+lemma member_code[code]:
+ "(x, y) : rel_of_set R = Set.member (x, y) R"
+ "(x, y) : reflcl R = ((x = y) \<or> Set.member (x, y) R)"
+unfolding member_def reflcl_def reflcl_raw_def map_fun_def_raw o_def id_def
+by auto
+
+lemma union_code[code]:
+ "union (rel_of_set R) (rel_of_set S) = rel_of_set (Set.union R S)"
+ "union (reflcl R) (rel_of_set S) = reflcl (Set.union R S)"
+ "union (reflcl R) (reflcl S) = reflcl (Set.union R S)"
+ "union (rel_of_set R) (reflcl S) = reflcl (Set.union R S)"
+unfolding union_def reflcl_def reflcl_raw_def map_fun_def_raw o_def id_def
+by (auto intro: arg_cong[where f=rel_of_set])
+
+lemma rtrancl_code[code]:
+ "rtrancl (rel_of_set R) = reflcl (Transitive_Closure.trancl R)"
+ "rtrancl (reflcl R) = reflcl (Transitive_Closure.trancl R)"
+unfolding rtrancl_def reflcl_def reflcl_raw_def map_fun_def_raw o_def id_def
+by (auto intro: arg_cong[where f=rel_of_set])
+
+quickcheck_generator rel constructors: rel_of_set
+
+lemma
+ "(x, (y :: nat)) : rtrancl (union R S) \<Longrightarrow> (x, y) : (union (rtrancl R) (rtrancl S))"
+quickcheck[exhaustive]
+oops
+
+end
--- a/src/HOL/ex/Quickcheck_Examples.thy Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/ex/Quickcheck_Examples.thy Tue Feb 14 11:16:07 2012 +0100
@@ -267,16 +267,59 @@
oops
+subsection {* Examples with sets *}
+
+lemma
+ "{} = A Un - A"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+lemma
+ "[| bij_betw f A B; bij_betw f C D |] ==> bij_betw f (A Un C) (B Un D)"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+
subsection {* Examples with relations *}
lemma
- "acyclic R ==> acyclic S ==> acyclic (R Un S)"
-(* FIXME: missing instance for narrowing -- quickcheck[expect = counterexample] *)
+ "acyclic (R :: ('a * 'a) set) ==> acyclic S ==> acyclic (R Un S)"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+lemma
+ "acyclic (R :: (nat * nat) set) ==> acyclic S ==> acyclic (R Un S)"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+(* FIXME: some dramatic performance decrease after changing the code equation of the ntrancl *)
+lemma
+ "(x, z) : rtrancl (R Un S) ==> \<exists> y. (x, y) : rtrancl R & (y, z) : rtrancl S"
+(*quickcheck[exhaustive, expect = counterexample]*)
oops
lemma
- "(x, z) : rtrancl (R Un S) ==> \<exists> y. (x, y) : rtrancl R & (y, z) : rtrancl S"
-(* FIXME: missing instance for narrowing -- quickcheck[expect = counterexample] *)
+ "wf (R :: ('a * 'a) set) ==> wf S ==> wf (R Un S)"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+lemma
+ "wf (R :: (nat * nat) set) ==> wf S ==> wf (R Un S)"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+lemma
+ "wf (R :: (int * int) set) ==> wf S ==> wf (R Un S)"
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+
+subsection {* Examples with the descriptive operator *}
+
+lemma
+ "(THE x. x = a) = b"
+quickcheck[random, expect = counterexample]
+quickcheck[exhaustive, expect = counterexample]
oops
subsection {* Examples with Multisets *}
@@ -406,7 +449,7 @@
begin
lemma "False"
-quickcheck[exhaustive, expect = no_counterexample]
+quickcheck[exhaustive, expect = counterexample]
oops
end
@@ -422,6 +465,19 @@
end
+locale antisym =
+ fixes R
+ assumes "R x y --> R y x --> x = y"
+begin
+
+lemma
+ "R x y --> R y z --> R x z"
+quickcheck[exhaustive, finite_type_size = 2, expect = no_counterexample]
+quickcheck[exhaustive, expect = counterexample]
+oops
+
+end
+
subsection {* Examples with HOL quantifiers *}
lemma
--- a/src/HOL/ex/ROOT.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/ex/ROOT.ML Tue Feb 14 11:16:07 2012 +0100
@@ -72,7 +72,8 @@
"List_to_Set_Comprehension_Examples",
"Set_Algebras",
"Seq",
- "Simproc_Tests"
+ "Simproc_Tests",
+ "Executable_Relation"
];
if getenv "ISABELLE_GHC" = "" then ()
--- a/src/HOL/ex/sledgehammer_tactics.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/HOL/ex/sledgehammer_tactics.ML Tue Feb 14 11:16:07 2012 +0100
@@ -71,7 +71,7 @@
fun sledgehammer_with_metis_tac ctxt override_params relevance_override i th =
case run_atp override_params relevance_override i i ctxt th of
SOME facts =>
- Metis_Tactic.metis_tac [] ATP_Translate.combinatorsN ctxt
+ Metis_Tactic.metis_tac [] ATP_Problem_Generate.combsN ctxt
(maps (thms_of_name ctxt) facts) i th
| NONE => Seq.empty
--- a/src/Tools/quickcheck.ML Thu Feb 09 19:34:23 2012 +0100
+++ b/src/Tools/quickcheck.ML Tue Feb 14 11:16:07 2012 +0100
@@ -324,10 +324,20 @@
of NONE => Assumption.all_assms_of lthy
| SOME locale => Assumption.local_assms_of lthy (Locale.init locale thy);
val proto_goal = Logic.list_implies (map Thm.term_of assms, subst_bounds (frees, strip gi));
+ fun assms_of locale = case fst (Locale.intros_of thy locale) of NONE => []
+ | SOME th =>
+ let
+ val t = the_single (Assumption.all_assms_of (Locale.init locale thy))
+ val (tyenv, tenv) =
+ Pattern.match thy (concl_of th, term_of t) (Vartab.empty, Vartab.empty)
+ in
+ map (Envir.subst_term (tyenv, tenv)) (prems_of th)
+ end
val goals = case some_locale
of NONE => [(proto_goal, eval_terms)]
| SOME locale =>
- map (fn (_, phi) => (Morphism.term phi proto_goal, map (Morphism.term phi) eval_terms))
+ (Logic.list_implies (assms_of locale, proto_goal), eval_terms) ::
+ map (fn (_, phi) => (Morphism.term phi proto_goal, map (Morphism.term phi) eval_terms))
(Locale.registrations_of (Context.Theory thy) (*FIXME*) locale);
in
test_terms lthy (time_limit, is_interactive) insts goals