--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/Admin/churn Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+ADMIN="$(cd "$(dirname "$0")"; cd "$(pwd -P)"; pwd)"
+cd "$ADMIN"
+hg churn --aliases user-aliases --progress
--- a/Admin/isatest/settings/at-mac-poly-5.1-para Wed Jan 28 16:29:16 2009 +0100
+++ b/Admin/isatest/settings/at-mac-poly-5.1-para Wed Jan 28 16:57:12 2009 +0100
@@ -25,4 +25,4 @@
ISABELLE_USEDIR_OPTIONS="-i false -d false -M 4"
-HOL_USEDIR_OPTIONS="-p 2"
+HOL_USEDIR_OPTIONS="-p 2 -Q false"
--- a/Admin/isatest/settings/at-poly-5.1-para-e Wed Jan 28 16:29:16 2009 +0100
+++ b/Admin/isatest/settings/at-poly-5.1-para-e Wed Jan 28 16:57:12 2009 +0100
@@ -24,4 +24,4 @@
ISABELLE_USEDIR_OPTIONS="-i true -d pdf -v true -M 20"
-HOL_USEDIR_OPTIONS="-p 2"
+HOL_USEDIR_OPTIONS="-p 2 -Q false"
--- a/Admin/isatest/settings/at64-poly-5.1-para Wed Jan 28 16:29:16 2009 +0100
+++ b/Admin/isatest/settings/at64-poly-5.1-para Wed Jan 28 16:57:12 2009 +0100
@@ -24,4 +24,4 @@
ISABELLE_USEDIR_OPTIONS="-i true -d pdf -v true -M 2"
-HOL_USEDIR_OPTIONS="-p 2"
+HOL_USEDIR_OPTIONS="-p 2 -Q false"
--- a/Admin/makedist Wed Jan 28 16:29:16 2009 +0100
+++ b/Admin/makedist Wed Jan 28 16:57:12 2009 +0100
@@ -1,12 +1,10 @@
#!/usr/bin/env bash
#
-# $Id$
-#
# makedist -- make Isabelle source distribution
## global settings
-REPOS="http://isabelle.in.tum.de/repos/isabelle"
+REPOS="https://isabelle.in.tum.de/repos/isabelle"
DISTPREFIX=${DISTPREFIX:-~/tmp/isadist}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/Admin/user-aliases Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,4 @@
+lcp paulson
+norbert.schirmer@web.de schirmer
+urbanc@in.tum.de urbanc
+nipkow@lapbroy100.local nipkow
\ No newline at end of file
--- a/NEWS Wed Jan 28 16:29:16 2009 +0100
+++ b/NEWS Wed Jan 28 16:57:12 2009 +0100
@@ -66,13 +66,19 @@
*** Pure ***
-* Type Binding.T gradually replaces formerly used type bstring for names
+* Class declaration: sc. "base sort" must not be given in import list
+any longer but is inferred from the specification. Particularly in HOL,
+write
+
+ class foo = ... instead of class foo = type + ...
+
+* Type binding gradually replaces formerly used type bstring for names
to be bound. Name space interface for declarations has been simplified:
NameSpace.declare: NameSpace.naming
- -> Binding.T -> NameSpace.T -> string * NameSpace.T
+ -> binding -> NameSpace.T -> string * NameSpace.T
NameSpace.bind: NameSpace.naming
- -> Binding.T * 'a -> 'a NameSpace.table -> string * 'a NameSpace.table
+ -> binding * 'a -> 'a NameSpace.table -> string * 'a NameSpace.table
(*exception Symtab.DUP*)
See further modules src/Pure/General/binding.ML and
@@ -187,6 +193,10 @@
*** HOL ***
+* Theory "Reflection" now resides in HOL/Library.
+
+* Entry point to Word library now simply named "Word". INCOMPATIBILITY.
+
* Made source layout more coherent with logical distribution
structure:
--- a/contrib/SystemOnTPTP/remote Wed Jan 28 16:29:16 2009 +0100
+++ b/contrib/SystemOnTPTP/remote Wed Jan 28 16:57:12 2009 +0100
@@ -3,93 +3,137 @@
# Wrapper for custom remote provers on SystemOnTPTP
# Author: Fabian Immler, TU Muenchen
#
-# Similar to the vampire wrapper, but compatible provers can be passed in the
-# command line, with %s for the problemfile e.g.
-#
-# ./remote Vampire---9.0 jumpirefix --output_syntax tptp --mode casc -t 3600 %s
-# ./remote Vampire---10.0 drakosha.pl 60 %s
-# ./remote SPASS---3.01 SPASS -Auto -SOS=1 -PGiven=0 -PProblem=0 -Splits=0 -FullRed=0 -DocProof %s
-# ./remote Metis---2.1 metis --show proof --show saturation %s
-# ./remote SNARK---20080805r005 run-snark %s
use warnings;
use strict;
-
use Getopt::Std;
use HTTP::Request::Common;
-use LWP::UserAgent;
+use LWP;
-# address of proof-server
my $SystemOnTPTPFormReplyURL = "http://www.cs.miami.edu/~tptp/cgi-bin/SystemOnTPTPFormReply";
-if(scalar(@ARGV) < 2) {
- print "prover and command missing";
- exit -1;
-}
-my $prover = shift(@ARGV);
-my $command = shift(@ARGV);
-
-# pass arguments
-my $options = "";
-while(scalar(@ARGV)>1){
- $options.=" ".shift(@ARGV);
-}
-# last argument is problemfile to be uploaded
-my $problem = [shift(@ARGV)];
-
-# fill in form
+# default parameters
my %URLParameters = (
"NoHTML" => 1,
"QuietFlag" => "-q01",
"X2TPTP" => "-S",
"SubmitButton" => "RunSelectedSystems",
"ProblemSource" => "UPLOAD",
- "UPLOADProblem" => $problem,
- "System___$prover" => "$prover",
- "Format___$prover" => "tptp",
- "Command___$prover" => "$command $options %s"
-);
+ );
+
+# check connection
+my $TestAgent = LWP::UserAgent->new;
+$TestAgent->timeout(5);
+my $TestRequest = GET($SystemOnTPTPFormReplyURL);
+my $TestResponse = $TestAgent->request($TestRequest);
+if(! $TestResponse->is_success) {
+ print "HTTP-Error: " . $TestResponse->message . "\n";
+ exit(-1);
+}
+
+#----Get format and transform options if specified
+my %Options;
+getopts("hws:t:c:",\%Options);
+
+#----Usage
+if (exists($Options{'h'})) {
+ print("Usage: remote <options> [<File name>]\n");
+ print(" <options> are ...\n");
+ print(" -h - print this help\n");
+ print(" -w - list available ATP systems\n");
+ print(" -s<system> - specified system to use\n");
+ print(" -t<timelimit> - CPU time limit for system\n");
+ print(" -c<command> - custom command for system\n");
+ print(" <File name> - TPTP problem file\n");
+ exit(0);
+}
+
+#----What systems flag
+if (exists($Options{'w'})) {
+ $URLParameters{"SubmitButton"} = "ListSystems";
+ delete($URLParameters{"ProblemSource"});
+}
+#----Selected system
+my $System;
+if (exists($Options{'s'})) {
+ $System = $Options{'s'};
+} else {
+ # use Vampire as default
+ $System = "Vampire---9.0";
+}
+$URLParameters{"System___$System"} = $System;
+
+#----Time limit
+if (exists($Options{'t'})) {
+ $URLParameters{"TimeLimit___$System"} = $Options{'t'};
+}
+#----Custom command
+if (exists($Options{'c'})) {
+ $URLParameters{"Command___$System"} = $Options{'c'};
+}
+
+#----Get single file name
+if (exists($URLParameters{"ProblemSource"})) {
+ if (scalar(@ARGV) >= 1) {
+ $URLParameters{"UPLOADProblem"} = [shift(@ARGV)];
+ } else {
+die("Missing problem file");
+ }
+}
# Query Server
my $Agent = LWP::UserAgent->new;
+if (exists($Options{'t'})) {
+ # give server more time to respond
+ $Agent->timeout($Options{'t'} * 2 + 10);
+}
my $Request = POST($SystemOnTPTPFormReplyURL,
Content_Type => 'form-data',Content => \%URLParameters);
my $Response = $Agent->request($Request);
-
-#catch errors, let isabelle/watcher know
-if($Response->is_success && $Response->content !~ /NO SOLUTION OUTPUT BY SYSTEM/
-&& $Response->content =~ m/%\s*Result\s*:\s*Unsatisfiable.*?\n%\s*Output\s*:\s*.*?Refutation.*?\n/){
- # convert to isabelle-friendly format
- my @lines = split( /%\s*Result\s*:\s*Unsatisfiable.*?\n%\s*Output\s*:\s*.*?Refutation.*?\n/, $Response->content);
- @lines = split( /\n/, $lines[1]); my $extract = "";
- my $inproof = 0 > 1;
- my $ende = 0 > 1;
+
+#catch errors / failure
+if(! $Response->is_success){
+ print "HTTP-Error: " . $Response->message . "\n";
+ exit(-1);
+} elsif (exists($Options{'w'})) {
+ print $Response->content;
+ exit (0);
+} elsif ($Response->content =~ /NO SOLUTION OUTPUT BY SYSTEM/){
+ if ($Response->content =~ /%\s*Result\s*:(.*)\n%\s*Output\s*:(.*)\n%/) {
+ print "No Solution Output\nResult: $1\nOutput: $2\n";
+ } else {
+ print "No Solution Output\n";
+ }
+ exit(-1);
+} elsif ($Response->content =~ /ERROR: Could not form TPTP format derivation/) {
+ print "Could not form TPTP format derivation\n";
+ exit(-1);
+} elsif ($Response->content =~ /WARNING: (\S*) does not exist/) {
+ print "Specified System $1 does not exist\n";
+ exit(-1);
+} elsif ($Response->content =~ /^\s*$/) {
+ print "Empty response (specified bad system? Inappropriate problem file format?)\n";
+ exit(-1);
+} elsif ($Response->content !~ /%\s*Result\s*:(.*)\n%\s*Output\s*:(.*)\n%/) {
+ print "Bad response: \n".$Response->content;
+ exit(-1);
+} else {
+ my @lines = split( /\n/, $Response->content);
+ my $extract = "";
foreach my $line (@lines){
- if(! $ende){
- #ignore comments
- if(! $inproof){
- if ($line !~ /^%/ && !($line eq "")) {
- $extract .= "$line";
- $inproof = 1;
- }
- } else {
- if ($line !~ /^%/) {
- $extract .= "$line";
- } else {
- $ende = 1;
- }
- }
+ #ignore comments
+ if ($line !~ /^%/ && !($line eq "")) {
+ $extract .= "$line";
}
}
- # insert newlines after '.'
+ # insert newlines after ').'
$extract =~ s/\s//g;
$extract =~ s/\)\.cnf/\)\.\ncnf/g;
+
+ # orientation for res_reconstruct.ML
print "# SZS output start CNFRefutation.\n";
print "$extract\n";
print "# SZS output end CNFRefutation.\n";
-} else {
- print "HTTP-Request: " . $Response->message;
- print "\nCANNOT PROVE: \n";
- print $Response->content;
+ exit(0);
}
--- a/contrib/SystemOnTPTP/vampire Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,60 +0,0 @@
-#!/usr/bin/env perl
-#
-# Vampire Wrapper for SystemOnTPTP
-# Author: Fabian Immler, TU Muenchen
-#
-# - querys a Vampire theorem prover on SystemOnTPTP
-# (http://www.cs.miami.edu/~tptp/cgi-bin/SystemOnTPTP)
-# - behaves like a local Vampire
-# => can be used for Isabelle when Vampire is not available (e.g. on a Mac)
-#
-
-use warnings;
-use strict;
-
-use Getopt::Std;
-use HTTP::Request::Common;
-use LWP::UserAgent;
-
-# address of proof-server
-my $SystemOnTPTPFormReplyURL = "http://www.cs.miami.edu/~tptp/cgi-bin/SystemOnTPTPFormReply";
-
-#name of prover and its executable on the server, e.g.
-# Vampire---9.0
-# jumpirefix
-my $prover = "Vampire---9.0";
-my $command = "jumpirefix";
-
-# pass arguments
-my $options = "";
-while(scalar(@ARGV)>1){
- $options.=" ".shift(@ARGV);
-}
-# last argument is problemfile to be uploaded
-my $problem = [shift(@ARGV)];
-
-# fill in form
-my %URLParameters = (
- "NoHTML" => 1,
- "QuietFlag" => "-q01",
- "SubmitButton" => "RunSelectedSystems",
- "ProblemSource" => "UPLOAD",
- "UPLOADProblem" => $problem,
- "System___$prover" => "$prover",
- "Format___$prover" => "tptp",
- "Command___$prover" => "$command $options %s"
-);
-
-# Query Server
-my $Agent = LWP::UserAgent->new;
-my $Request = POST($SystemOnTPTPFormReplyURL,
- Content_Type => 'form-data',Content => \%URLParameters);
-my $Response = $Agent->request($Request);
-
-#catch errors, let isabelle/watcher know
-if($Response->is_success){
- print $Response->content;
-} else {
- print $Response->message;
- print "\nCANNOT PROVE\n";
-}
--- a/doc-src/IsarAdvanced/Codegen/Thy/Introduction.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarAdvanced/Codegen/Thy/Introduction.thy Wed Jan 28 16:57:12 2009 +0100
@@ -128,7 +128,7 @@
\tikzstyle process=[ellipse, draw, thick, color = green, fill = white];
\tikzstyle process_arrow=[->, semithick, color = green];
\node (HOL) at (0, 4) [style=entity] {@{text "Isabelle/HOL"} theory};
- \node (eqn) at (2, 2) [style=entity] {defining equations};
+ \node (eqn) at (2, 2) [style=entity] {code equations};
\node (iml) at (2, 0) [style=entity] {intermediate language};
\node (seri) at (1, 0) [style=process] {serialisation};
\node (SML) at (0, 3) [style=entity] {@{text SML}};
@@ -153,12 +153,12 @@
The code generator employs a notion of executability
for three foundational executable ingredients known
from functional programming:
- \emph{defining equations}, \emph{datatypes}, and
- \emph{type classes}. A defining equation as a first approximation
+ \emph{code equations}, \emph{datatypes}, and
+ \emph{type classes}. A code equation as a first approximation
is a theorem of the form @{text "f t\<^isub>1 t\<^isub>2 \<dots> t\<^isub>n \<equiv> t"}
(an equation headed by a constant @{text f} with arguments
@{text "t\<^isub>1 t\<^isub>2 \<dots> t\<^isub>n"} and right hand side @{text t}).
- Code generation aims to turn defining equations
+ Code generation aims to turn code equations
into a functional program. This is achieved by three major
components which operate sequentially, i.e. the result of one is
the input
@@ -168,7 +168,7 @@
\item Out of the vast collection of theorems proven in a
\qn{theory}, a reasonable subset modelling
- defining equations is \qn{selected}.
+ code equations is \qn{selected}.
\item On those selected theorems, certain
transformations are carried out
@@ -177,7 +177,7 @@
specifications into equivalent but executable counterparts.
The result is a structured collection of \qn{code theorems}.
- \item Before the selected defining equations are continued with,
+ \item Before the selected code equations are continued with,
they can be \qn{preprocessed}, i.e. subjected to theorem
transformations. This \qn{preprocessor} is an interface which
allows to apply
@@ -185,12 +185,12 @@
to code generation; motivating examples are shown below, see
\secref{sec:preproc}.
The result of the preprocessing step is a structured collection
- of defining equations.
+ of code equations.
- \item These defining equations are \qn{translated} to a program
+ \item These code equations are \qn{translated} to a program
in an abstract intermediate language. Think of it as a kind
of \qt{Mini-Haskell} with four \qn{statements}: @{text data}
- (for datatypes), @{text fun} (stemming from defining equations),
+ (for datatypes), @{text fun} (stemming from code equations),
also @{text class} and @{text inst} (for type classes).
\item Finally, the abstract program is \qn{serialised} into concrete
--- a/doc-src/IsarAdvanced/Codegen/Thy/ML.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarAdvanced/Codegen/Thy/ML.thy Wed Jan 28 16:57:12 2009 +0100
@@ -45,7 +45,7 @@
theorem @{text "thm"} from executable content, if present.
\item @{ML Code.add_eqnl}~@{text "(const, lthms)"}~@{text "thy"} adds
- suspended defining equations @{text lthms} for constant
+ suspended code equations @{text lthms} for constant
@{text const} to executable content.
\item @{ML Code.map_pre}~@{text "f"}~@{text "thy"} changes
@@ -53,11 +53,11 @@
\item @{ML Code.add_functrans}~@{text "(name, f)"}~@{text "thy"} adds
function transformer @{text f} (named @{text name}) to executable content;
- @{text f} is a transformer of the defining equations belonging
+ @{text f} is a transformer of the code equations belonging
to a certain function definition, depending on the
current theory context. Returning @{text NONE} indicates that no
transformation took place; otherwise, the whole process will be iterated
- with the new defining equations.
+ with the new code equations.
\item @{ML Code.del_functrans}~@{text "name"}~@{text "thy"} removes
function transformer named @{text name} from executable content.
@@ -89,12 +89,12 @@
reads a constant as a concrete term expression @{text s}.
\item @{ML Code_Unit.head_eqn}~@{text thy}~@{text thm}
- extracts the constant and its type from a defining equation @{text thm}.
+ extracts the constant and its type from a code equation @{text thm}.
\item @{ML Code_Unit.rewrite_eqn}~@{text ss}~@{text thm}
- rewrites a defining equation @{text thm} with a simpset @{text ss};
+ rewrites a code equation @{text thm} with a simpset @{text ss};
only arguments and right hand side are rewritten,
- not the head of the defining equation.
+ not the head of the code equation.
\end{description}
--- a/doc-src/IsarAdvanced/Codegen/Thy/Program.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarAdvanced/Codegen/Thy/Program.thy Wed Jan 28 16:57:12 2009 +0100
@@ -10,7 +10,7 @@
We have already seen how by default equations stemming from
@{command definition}/@{command primrec}/@{command fun}
statements are used for code generation. This default behaviour
- can be changed, e.g. by providing different defining equations.
+ can be changed, e.g. by providing different code equations.
All kinds of customisation shown in this section is \emph{safe}
in the sense that the user does not have to worry about
correctness -- all programs generatable that way are partially
@@ -21,7 +21,7 @@
text {*
Coming back to our introductory example, we
- could provide an alternative defining equations for @{const dequeue}
+ could provide an alternative code equations for @{const dequeue}
explicitly:
*}
@@ -36,7 +36,7 @@
text {*
\noindent The annotation @{text "[code]"} is an @{text Isar}
@{text attribute} which states that the given theorems should be
- considered as defining equations for a @{text fun} statement --
+ considered as code equations for a @{text fun} statement --
the corresponding constant is determined syntactically. The resulting code:
*}
@@ -59,13 +59,13 @@
code_thms %quote dequeue
text {*
- \noindent prints a table with \emph{all} defining equations
+ \noindent prints a table with \emph{all} code equations
for @{const dequeue}, including
- \emph{all} defining equations those equations depend
+ \emph{all} code equations those equations depend
on recursively.
Similarly, the @{command code_deps} command shows a graph
- visualising dependencies between defining equations.
+ visualising dependencies between code equations.
*}
subsection {* @{text class} and @{text instantiation} *}
@@ -155,7 +155,7 @@
The \emph{simpset} allows to employ the full generality of the Isabelle
simplifier. Due to the interpretation of theorems
- as defining equations, rewrites are applied to the right
+ as code equations, rewrites are applied to the right
hand side and the arguments of the left hand side of an
equation, but never to the constant heading the left hand side.
An important special case are \emph{inline theorems} which may be
@@ -207,7 +207,7 @@
the @{command print_codesetup} command.
@{command code_thms} provides a convenient
mechanism to inspect the impact of a preprocessor setup
- on defining equations.
+ on code equations.
\begin{warn}
The attribute \emph{code unfold}
@@ -351,7 +351,7 @@
an explicit class @{class eq} with a corresponding operation
@{const eq_class.eq} such that @{thm eq [no_vars]}.
The preprocessing framework does the rest by propagating the
- @{class eq} constraints through all dependent defining equations.
+ @{class eq} constraints through all dependent code equations.
For datatypes, instances of @{class eq} are implicitly derived
when possible. For other types, you may instantiate @{text eq}
manually like any other type class.
@@ -410,7 +410,7 @@
text %quote {*@{code_stmts "op \<le> \<Colon> _ \<times> _ \<Rightarrow> _ \<times> _ \<Rightarrow> bool" (SML)}*}
text {*
- In some cases, the automatically derived defining equations
+ In some cases, the automatically derived code equations
for equality on a particular type may not be appropriate.
As example, watch the following datatype representing
monomorphic parametric types (where type constructors
@@ -493,7 +493,7 @@
on the right hand side of its first equation the constant
@{const empty_queue} occurs which is unspecified.
- Normally, if constants without any defining equations occur in
+ Normally, if constants without any code equations occur in
a program, the code generator complains (since in most cases
this is not what the user expects). But such constants can also
be thought of as function definitions with no equations which
--- a/doc-src/IsarAdvanced/Codegen/Thy/document/Introduction.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarAdvanced/Codegen/Thy/document/Introduction.tex Wed Jan 28 16:57:12 2009 +0100
@@ -293,7 +293,7 @@
\tikzstyle process=[ellipse, draw, thick, color = green, fill = white];
\tikzstyle process_arrow=[->, semithick, color = green];
\node (HOL) at (0, 4) [style=entity] {\isa{Isabelle{\isacharslash}HOL} theory};
- \node (eqn) at (2, 2) [style=entity] {defining equations};
+ \node (eqn) at (2, 2) [style=entity] {code equations};
\node (iml) at (2, 0) [style=entity] {intermediate language};
\node (seri) at (1, 0) [style=process] {serialisation};
\node (SML) at (0, 3) [style=entity] {\isa{SML}};
@@ -318,12 +318,12 @@
The code generator employs a notion of executability
for three foundational executable ingredients known
from functional programming:
- \emph{defining equations}, \emph{datatypes}, and
- \emph{type classes}. A defining equation as a first approximation
+ \emph{code equations}, \emph{datatypes}, and
+ \emph{type classes}. A code equation as a first approximation
is a theorem of the form \isa{f\ t\isactrlisub {\isadigit{1}}\ t\isactrlisub {\isadigit{2}}\ {\isasymdots}\ t\isactrlisub n\ {\isasymequiv}\ t}
(an equation headed by a constant \isa{f} with arguments
\isa{t\isactrlisub {\isadigit{1}}\ t\isactrlisub {\isadigit{2}}\ {\isasymdots}\ t\isactrlisub n} and right hand side \isa{t}).
- Code generation aims to turn defining equations
+ Code generation aims to turn code equations
into a functional program. This is achieved by three major
components which operate sequentially, i.e. the result of one is
the input
@@ -333,7 +333,7 @@
\item Out of the vast collection of theorems proven in a
\qn{theory}, a reasonable subset modelling
- defining equations is \qn{selected}.
+ code equations is \qn{selected}.
\item On those selected theorems, certain
transformations are carried out
@@ -342,7 +342,7 @@
specifications into equivalent but executable counterparts.
The result is a structured collection of \qn{code theorems}.
- \item Before the selected defining equations are continued with,
+ \item Before the selected code equations are continued with,
they can be \qn{preprocessed}, i.e. subjected to theorem
transformations. This \qn{preprocessor} is an interface which
allows to apply
@@ -350,12 +350,12 @@
to code generation; motivating examples are shown below, see
\secref{sec:preproc}.
The result of the preprocessing step is a structured collection
- of defining equations.
+ of code equations.
- \item These defining equations are \qn{translated} to a program
+ \item These code equations are \qn{translated} to a program
in an abstract intermediate language. Think of it as a kind
of \qt{Mini-Haskell} with four \qn{statements}: \isa{data}
- (for datatypes), \isa{fun} (stemming from defining equations),
+ (for datatypes), \isa{fun} (stemming from code equations),
also \isa{class} and \isa{inst} (for type classes).
\item Finally, the abstract program is \qn{serialised} into concrete
--- a/doc-src/IsarAdvanced/Codegen/Thy/document/ML.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarAdvanced/Codegen/Thy/document/ML.tex Wed Jan 28 16:57:12 2009 +0100
@@ -75,7 +75,7 @@
theorem \isa{thm} from executable content, if present.
\item \verb|Code.add_eqnl|~\isa{{\isacharparenleft}const{\isacharcomma}\ lthms{\isacharparenright}}~\isa{thy} adds
- suspended defining equations \isa{lthms} for constant
+ suspended code equations \isa{lthms} for constant
\isa{const} to executable content.
\item \verb|Code.map_pre|~\isa{f}~\isa{thy} changes
@@ -83,11 +83,11 @@
\item \verb|Code.add_functrans|~\isa{{\isacharparenleft}name{\isacharcomma}\ f{\isacharparenright}}~\isa{thy} adds
function transformer \isa{f} (named \isa{name}) to executable content;
- \isa{f} is a transformer of the defining equations belonging
+ \isa{f} is a transformer of the code equations belonging
to a certain function definition, depending on the
current theory context. Returning \isa{NONE} indicates that no
transformation took place; otherwise, the whole process will be iterated
- with the new defining equations.
+ with the new code equations.
\item \verb|Code.del_functrans|~\isa{name}~\isa{thy} removes
function transformer named \isa{name} from executable content.
@@ -135,12 +135,12 @@
reads a constant as a concrete term expression \isa{s}.
\item \verb|Code_Unit.head_eqn|~\isa{thy}~\isa{thm}
- extracts the constant and its type from a defining equation \isa{thm}.
+ extracts the constant and its type from a code equation \isa{thm}.
\item \verb|Code_Unit.rewrite_eqn|~\isa{ss}~\isa{thm}
- rewrites a defining equation \isa{thm} with a simpset \isa{ss};
+ rewrites a code equation \isa{thm} with a simpset \isa{ss};
only arguments and right hand side are rewritten,
- not the head of the defining equation.
+ not the head of the code equation.
\end{description}%
\end{isamarkuptext}%
--- a/doc-src/IsarAdvanced/Codegen/Thy/document/Program.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarAdvanced/Codegen/Thy/document/Program.tex Wed Jan 28 16:57:12 2009 +0100
@@ -30,7 +30,7 @@
We have already seen how by default equations stemming from
\hyperlink{command.definition}{\mbox{\isa{\isacommand{definition}}}}/\hyperlink{command.primrec}{\mbox{\isa{\isacommand{primrec}}}}/\hyperlink{command.fun}{\mbox{\isa{\isacommand{fun}}}}
statements are used for code generation. This default behaviour
- can be changed, e.g. by providing different defining equations.
+ can be changed, e.g. by providing different code equations.
All kinds of customisation shown in this section is \emph{safe}
in the sense that the user does not have to worry about
correctness -- all programs generatable that way are partially
@@ -44,7 +44,7 @@
%
\begin{isamarkuptext}%
Coming back to our introductory example, we
- could provide an alternative defining equations for \isa{dequeue}
+ could provide an alternative code equations for \isa{dequeue}
explicitly:%
\end{isamarkuptext}%
\isamarkuptrue%
@@ -73,7 +73,7 @@
\begin{isamarkuptext}%
\noindent The annotation \isa{{\isacharbrackleft}code{\isacharbrackright}} is an \isa{Isar}
\isa{attribute} which states that the given theorems should be
- considered as defining equations for a \isa{fun} statement --
+ considered as code equations for a \isa{fun} statement --
the corresponding constant is determined syntactically. The resulting code:%
\end{isamarkuptext}%
\isamarkuptrue%
@@ -132,13 +132,13 @@
\endisadelimquote
%
\begin{isamarkuptext}%
-\noindent prints a table with \emph{all} defining equations
+\noindent prints a table with \emph{all} code equations
for \isa{dequeue}, including
- \emph{all} defining equations those equations depend
+ \emph{all} code equations those equations depend
on recursively.
Similarly, the \hyperlink{command.code-deps}{\mbox{\isa{\isacommand{code{\isacharunderscore}deps}}}} command shows a graph
- visualising dependencies between defining equations.%
+ visualising dependencies between code equations.%
\end{isamarkuptext}%
\isamarkuptrue%
%
@@ -398,7 +398,7 @@
The \emph{simpset} allows to employ the full generality of the Isabelle
simplifier. Due to the interpretation of theorems
- as defining equations, rewrites are applied to the right
+ as code equations, rewrites are applied to the right
hand side and the arguments of the left hand side of an
equation, but never to the constant heading the left hand side.
An important special case are \emph{inline theorems} which may be
@@ -489,7 +489,7 @@
the \hyperlink{command.print-codesetup}{\mbox{\isa{\isacommand{print{\isacharunderscore}codesetup}}}} command.
\hyperlink{command.code-thms}{\mbox{\isa{\isacommand{code{\isacharunderscore}thms}}}} provides a convenient
mechanism to inspect the impact of a preprocessor setup
- on defining equations.
+ on code equations.
\begin{warn}
The attribute \emph{code unfold}
@@ -811,7 +811,7 @@
an explicit class \isa{eq} with a corresponding operation
\isa{eq{\isacharunderscore}class{\isachardot}eq} such that \isa{eq{\isacharunderscore}class{\isachardot}eq\ {\isacharequal}\ op\ {\isacharequal}}.
The preprocessing framework does the rest by propagating the
- \isa{eq} constraints through all dependent defining equations.
+ \isa{eq} constraints through all dependent code equations.
For datatypes, instances of \isa{eq} are implicitly derived
when possible. For other types, you may instantiate \isa{eq}
manually like any other type class.
@@ -951,7 +951,7 @@
\endisadelimquote
%
\begin{isamarkuptext}%
-In some cases, the automatically derived defining equations
+In some cases, the automatically derived code equations
for equality on a particular type may not be appropriate.
As example, watch the following datatype representing
monomorphic parametric types (where type constructors
@@ -1165,7 +1165,7 @@
on the right hand side of its first equation the constant
\isa{empty{\isacharunderscore}queue} occurs which is unspecified.
- Normally, if constants without any defining equations occur in
+ Normally, if constants without any code equations occur in
a program, the code generator complains (since in most cases
this is not what the user expects). But such constants can also
be thought of as function definitions with no equations which
--- a/doc-src/IsarImplementation/Thy/ML.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/ML.thy Wed Jan 28 16:57:12 2009 +0100
@@ -291,7 +291,7 @@
in particular files \emph{Pure/library.ML} and \emph{Pure/General/*.ML}.
*}
-section {* Linear transformations *}
+section {* Linear transformations \label{sec:ML-linear-trans} *}
text %mlref {*
\begin{mldecls}
@@ -317,9 +317,9 @@
a theory by constant declararion and primitive definitions:
\smallskip\begin{mldecls}
- @{ML "Sign.declare_const: Properties.T -> (Binding.T * typ) * mixfix
+ @{ML "Sign.declare_const: Properties.T -> (binding * typ) * mixfix
-> theory -> term * theory"} \\
- @{ML "Thm.add_def: bool -> bool -> bstring * term -> theory -> thm * theory"}
+ @{ML "Thm.add_def: bool -> bool -> binding * term -> theory -> thm * theory"}
\end{mldecls}
\noindent Written with naive application, an addition of constant
@@ -328,7 +328,7 @@
\smallskip\begin{mldecls}
@{ML "(fn (t, thy) => Thm.add_def false false
- (\"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})) thy)
+ (Binding.name \"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})) thy)
(Sign.declare_const []
((Binding.name \"bar\", @{typ \"foo => foo\"}), NoSyn) thy)"}
\end{mldecls}
@@ -347,7 +347,7 @@
|> Sign.declare_const [] ((Binding.name \"bar\", @{typ \"foo => foo\"}), NoSyn)
|> (fn (t, thy) => thy
|> Thm.add_def false false
- (\"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})))"}
+ (Binding.name \"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})))"}
\end{mldecls}
*}
@@ -370,7 +370,7 @@
@{ML "thy
|> Sign.declare_const [] ((Binding.name \"bar\", @{typ \"foo => foo\"}), NoSyn)
|-> (fn t => Thm.add_def false false
- (\"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})))
+ (Binding.name \"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})))
"}
\end{mldecls}
@@ -380,7 +380,7 @@
@{ML "thy
|> Sign.declare_const [] ((Binding.name \"bar\", @{typ \"foo => foo\"}), NoSyn)
|>> (fn t => Logic.mk_equals (t, @{term \"%x. x\"}))
-|-> (fn def => Thm.add_def false false (\"bar_def\", def))
+|-> (fn def => Thm.add_def false false (Binding.name \"bar_def\", def))
"}
\end{mldecls}
@@ -392,7 +392,7 @@
|> Sign.declare_const [] ((Binding.name \"bar\", @{typ \"foo => foo\"}), NoSyn)
||> Sign.add_path \"foobar\"
|-> (fn t => Thm.add_def false false
- (\"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})))
+ (Binding.name \"bar_def\", Logic.mk_equals (t, @{term \"%x. x\"})))
||> Sign.restore_naming thy
"}
\end{mldecls}
@@ -404,7 +404,7 @@
|> Sign.declare_const [] ((Binding.name \"bar\", @{typ \"foo => foo\"}), NoSyn)
||>> Sign.declare_const [] ((Binding.name \"foobar\", @{typ \"foo => foo\"}), NoSyn)
|-> (fn (t1, t2) => Thm.add_def false false
- (\"bar_def\", Logic.mk_equals (t1, t2)))
+ (Binding.name \"bar_def\", Logic.mk_equals (t1, t2)))
"}
\end{mldecls}
*}
@@ -451,7 +451,7 @@
((Binding.name const, @{typ \"foo => foo\"}), NoSyn)) consts
|>> map (fn t => Logic.mk_equals (t, @{term \"%x. x\"}))
|-> (fn defs => fold_map (fn def =>
- Thm.add_def false false (\"\", def)) defs)
+ Thm.add_def false false (Binding.empty, def)) defs)
end"}
\end{mldecls}
*}
--- a/doc-src/IsarImplementation/Thy/document/ML.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/document/ML.tex Wed Jan 28 16:57:12 2009 +0100
@@ -319,7 +319,7 @@
\end{isamarkuptext}%
\isamarkuptrue%
%
-\isamarkupsection{Linear transformations%
+\isamarkupsection{Linear transformations \label{sec:ML-linear-trans}%
}
\isamarkuptrue%
%
@@ -366,9 +366,9 @@
a theory by constant declararion and primitive definitions:
\smallskip\begin{mldecls}
- \verb|Sign.declare_const: Properties.T -> (Binding.T * typ) * mixfix|\isasep\isanewline%
+ \verb|Sign.declare_const: Properties.T -> (binding * typ) * mixfix|\isasep\isanewline%
\verb| -> theory -> term * theory| \\
- \verb|Thm.add_def: bool -> bool -> bstring * term -> theory -> thm * theory|
+ \verb|Thm.add_def: bool -> bool -> binding * term -> theory -> thm * theory|
\end{mldecls}
\noindent Written with naive application, an addition of constant
@@ -377,7 +377,7 @@
\smallskip\begin{mldecls}
\verb|(fn (t, thy) => Thm.add_def false false|\isasep\isanewline%
-\verb| ("bar_def", Logic.mk_equals (t, @{term "%x. x"})) thy)|\isasep\isanewline%
+\verb| (Binding.name "bar_def", Logic.mk_equals (t, @{term "%x. x"})) thy)|\isasep\isanewline%
\verb| (Sign.declare_const []|\isasep\isanewline%
\verb| ((Binding.name "bar", @{typ "foo => foo"}), NoSyn) thy)|
\end{mldecls}
@@ -397,7 +397,7 @@
\verb||\verb,|,\verb|> Sign.declare_const [] ((Binding.name "bar", @{typ "foo => foo"}), NoSyn)|\isasep\isanewline%
\verb||\verb,|,\verb|> (fn (t, thy) => thy|\isasep\isanewline%
\verb||\verb,|,\verb|> Thm.add_def false false|\isasep\isanewline%
-\verb| ("bar_def", Logic.mk_equals (t, @{term "%x. x"})))|
+\verb| (Binding.name "bar_def", Logic.mk_equals (t, @{term "%x. x"})))|
\end{mldecls}%
\end{isamarkuptext}%
\isamarkuptrue%
@@ -435,7 +435,7 @@
\verb|thy|\isasep\isanewline%
\verb||\verb,|,\verb|> Sign.declare_const [] ((Binding.name "bar", @{typ "foo => foo"}), NoSyn)|\isasep\isanewline%
\verb||\verb,|,\verb|-> (fn t => Thm.add_def false false|\isasep\isanewline%
-\verb| ("bar_def", Logic.mk_equals (t, @{term "%x. x"})))|\isasep\isanewline%
+\verb| (Binding.name "bar_def", Logic.mk_equals (t, @{term "%x. x"})))|\isasep\isanewline%
\end{mldecls}
@@ -445,7 +445,7 @@
\verb|thy|\isasep\isanewline%
\verb||\verb,|,\verb|> Sign.declare_const [] ((Binding.name "bar", @{typ "foo => foo"}), NoSyn)|\isasep\isanewline%
\verb||\verb,|,\verb|>> (fn t => Logic.mk_equals (t, @{term "%x. x"}))|\isasep\isanewline%
-\verb||\verb,|,\verb|-> (fn def => Thm.add_def false false ("bar_def", def))|\isasep\isanewline%
+\verb||\verb,|,\verb|-> (fn def => Thm.add_def false false (Binding.name "bar_def", def))|\isasep\isanewline%
\end{mldecls}
@@ -457,7 +457,7 @@
\verb||\verb,|,\verb|> Sign.declare_const [] ((Binding.name "bar", @{typ "foo => foo"}), NoSyn)|\isasep\isanewline%
\verb||\verb,|,\verb||\verb,|,\verb|> Sign.add_path "foobar"|\isasep\isanewline%
\verb||\verb,|,\verb|-> (fn t => Thm.add_def false false|\isasep\isanewline%
-\verb| ("bar_def", Logic.mk_equals (t, @{term "%x. x"})))|\isasep\isanewline%
+\verb| (Binding.name "bar_def", Logic.mk_equals (t, @{term "%x. x"})))|\isasep\isanewline%
\verb||\verb,|,\verb||\verb,|,\verb|> Sign.restore_naming thy|\isasep\isanewline%
\end{mldecls}
@@ -469,7 +469,7 @@
\verb||\verb,|,\verb|> Sign.declare_const [] ((Binding.name "bar", @{typ "foo => foo"}), NoSyn)|\isasep\isanewline%
\verb||\verb,|,\verb||\verb,|,\verb|>> Sign.declare_const [] ((Binding.name "foobar", @{typ "foo => foo"}), NoSyn)|\isasep\isanewline%
\verb||\verb,|,\verb|-> (fn (t1, t2) => Thm.add_def false false|\isasep\isanewline%
-\verb| ("bar_def", Logic.mk_equals (t1, t2)))|\isasep\isanewline%
+\verb| (Binding.name "bar_def", Logic.mk_equals (t1, t2)))|\isasep\isanewline%
\end{mldecls}%
\end{isamarkuptext}%
@@ -531,7 +531,7 @@
\verb| ((Binding.name const, @{typ "foo => foo"}), NoSyn)) consts|\isasep\isanewline%
\verb| |\verb,|,\verb|>> map (fn t => Logic.mk_equals (t, @{term "%x. x"}))|\isasep\isanewline%
\verb| |\verb,|,\verb|-> (fn defs => fold_map (fn def =>|\isasep\isanewline%
-\verb| Thm.add_def false false ("", def)) defs)|\isasep\isanewline%
+\verb| Thm.add_def false false (Binding.empty, def)) defs)|\isasep\isanewline%
\verb|end|
\end{mldecls}%
\end{isamarkuptext}%
--- a/doc-src/IsarImplementation/Thy/document/logic.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/document/logic.tex Wed Jan 28 16:57:12 2009 +0100
@@ -325,9 +325,9 @@
\indexml{fastype\_of}\verb|fastype_of: term -> typ| \\
\indexml{lambda}\verb|lambda: term -> term -> term| \\
\indexml{betapply}\verb|betapply: term * term -> term| \\
- \indexml{Sign.declare\_const}\verb|Sign.declare_const: Properties.T -> (Binding.T * typ) * mixfix ->|\isasep\isanewline%
+ \indexml{Sign.declare\_const}\verb|Sign.declare_const: Properties.T -> (binding * typ) * mixfix ->|\isasep\isanewline%
\verb| theory -> term * theory| \\
- \indexml{Sign.add\_abbrev}\verb|Sign.add_abbrev: string -> Properties.T -> Binding.T * term ->|\isasep\isanewline%
+ \indexml{Sign.add\_abbrev}\verb|Sign.add_abbrev: string -> Properties.T -> binding * term ->|\isasep\isanewline%
\verb| theory -> (term * term) * theory| \\
\indexml{Sign.const\_typargs}\verb|Sign.const_typargs: theory -> string * typ -> typ list| \\
\indexml{Sign.const\_instance}\verb|Sign.const_instance: theory -> string * typ list -> typ| \\
@@ -594,9 +594,9 @@
\verb| -> (string * ('a -> thm)) * theory| \\
\end{mldecls}
\begin{mldecls}
- \indexml{Theory.add\_axioms\_i}\verb|Theory.add_axioms_i: (string * term) list -> theory -> theory| \\
+ \indexml{Theory.add\_axioms\_i}\verb|Theory.add_axioms_i: (binding * term) list -> theory -> theory| \\
\indexml{Theory.add\_deps}\verb|Theory.add_deps: string -> string * typ -> (string * typ) list -> theory -> theory| \\
- \indexml{Theory.add\_defs\_i}\verb|Theory.add_defs_i: bool -> bool -> (bstring * term) list -> theory -> theory| \\
+ \indexml{Theory.add\_defs\_i}\verb|Theory.add_defs_i: bool -> bool -> (binding * term) list -> theory -> theory| \\
\end{mldecls}
\begin{description}
--- a/doc-src/IsarImplementation/Thy/document/prelim.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/document/prelim.tex Wed Jan 28 16:57:12 2009 +0100
@@ -816,13 +816,13 @@
\indexmltype{NameSpace.naming}\verb|type NameSpace.naming| \\
\indexml{NameSpace.default\_naming}\verb|NameSpace.default_naming: NameSpace.naming| \\
\indexml{NameSpace.add\_path}\verb|NameSpace.add_path: string -> NameSpace.naming -> NameSpace.naming| \\
- \indexml{NameSpace.full\_name}\verb|NameSpace.full_name: NameSpace.naming -> Binding.T -> string| \\
+ \indexml{NameSpace.full\_name}\verb|NameSpace.full_name: NameSpace.naming -> binding -> string| \\
\end{mldecls}
\begin{mldecls}
\indexmltype{NameSpace.T}\verb|type NameSpace.T| \\
\indexml{NameSpace.empty}\verb|NameSpace.empty: NameSpace.T| \\
\indexml{NameSpace.merge}\verb|NameSpace.merge: NameSpace.T * NameSpace.T -> NameSpace.T| \\
- \indexml{NameSpace.declare}\verb|NameSpace.declare: NameSpace.naming -> Binding.T -> NameSpace.T -> string * NameSpace.T| \\
+ \indexml{NameSpace.declare}\verb|NameSpace.declare: NameSpace.naming -> binding -> NameSpace.T -> string * NameSpace.T| \\
\indexml{NameSpace.intern}\verb|NameSpace.intern: NameSpace.T -> string -> string| \\
\indexml{NameSpace.extern}\verb|NameSpace.extern: NameSpace.T -> string -> string| \\
\end{mldecls}
--- a/doc-src/IsarImplementation/Thy/logic.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/logic.thy Wed Jan 28 16:57:12 2009 +0100
@@ -323,9 +323,9 @@
@{index_ML fastype_of: "term -> typ"} \\
@{index_ML lambda: "term -> term -> term"} \\
@{index_ML betapply: "term * term -> term"} \\
- @{index_ML Sign.declare_const: "Properties.T -> (Binding.T * typ) * mixfix ->
+ @{index_ML Sign.declare_const: "Properties.T -> (binding * typ) * mixfix ->
theory -> term * theory"} \\
- @{index_ML Sign.add_abbrev: "string -> Properties.T -> Binding.T * term ->
+ @{index_ML Sign.add_abbrev: "string -> Properties.T -> binding * term ->
theory -> (term * term) * theory"} \\
@{index_ML Sign.const_typargs: "theory -> string * typ -> typ list"} \\
@{index_ML Sign.const_instance: "theory -> string * typ list -> typ"} \\
@@ -596,9 +596,9 @@
-> (string * ('a -> thm)) * theory"} \\
\end{mldecls}
\begin{mldecls}
- @{index_ML Theory.add_axioms_i: "(string * term) list -> theory -> theory"} \\
+ @{index_ML Theory.add_axioms_i: "(binding * term) list -> theory -> theory"} \\
@{index_ML Theory.add_deps: "string -> string * typ -> (string * typ) list -> theory -> theory"} \\
- @{index_ML Theory.add_defs_i: "bool -> bool -> (bstring * term) list -> theory -> theory"} \\
+ @{index_ML Theory.add_defs_i: "bool -> bool -> (binding * term) list -> theory -> theory"} \\
\end{mldecls}
\begin{description}
--- a/doc-src/IsarImplementation/Thy/prelim.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/prelim.thy Wed Jan 28 16:57:12 2009 +0100
@@ -707,13 +707,13 @@
@{index_ML_type NameSpace.naming} \\
@{index_ML NameSpace.default_naming: NameSpace.naming} \\
@{index_ML NameSpace.add_path: "string -> NameSpace.naming -> NameSpace.naming"} \\
- @{index_ML NameSpace.full_name: "NameSpace.naming -> Binding.T -> string"} \\
+ @{index_ML NameSpace.full_name: "NameSpace.naming -> binding -> string"} \\
\end{mldecls}
\begin{mldecls}
@{index_ML_type NameSpace.T} \\
@{index_ML NameSpace.empty: NameSpace.T} \\
@{index_ML NameSpace.merge: "NameSpace.T * NameSpace.T -> NameSpace.T"} \\
- @{index_ML NameSpace.declare: "NameSpace.naming -> Binding.T -> NameSpace.T -> string * NameSpace.T"} \\
+ @{index_ML NameSpace.declare: "NameSpace.naming -> binding -> NameSpace.T -> string * NameSpace.T"} \\
@{index_ML NameSpace.intern: "NameSpace.T -> string -> string"} \\
@{index_ML NameSpace.extern: "NameSpace.T -> string -> string"} \\
\end{mldecls}
--- a/doc-src/IsarRef/Thy/HOL_Specific.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarRef/Thy/HOL_Specific.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
theory HOL_Specific
imports Main
begin
@@ -1163,21 +1161,21 @@
module name onto another.
\item @{command (HOL) "code_abort"} declares constants which are not
- required to have a definition by means of defining equations; if
+ required to have a definition by means of code equations; if
needed these are implemented by program abort instead.
\item @{attribute (HOL) code} explicitly selects (or with option
- ``@{text "del"}'' deselects) a defining equation for code
- generation. Usually packages introducing defining equations provide
+ ``@{text "del"}'' deselects) a code equation for code
+ generation. Usually packages introducing code equations provide
a reasonable default setup for selection.
\item @{attribute (HOL) code}~@{text inline} declares (or with
option ``@{text "del"}'' removes) inlining theorems which are
- applied as rewrite rules to any defining equation during
+ applied as rewrite rules to any code equation during
preprocessing.
\item @{command (HOL) "print_codesetup"} gives an overview on
- selected defining equations, code generator datatypes and
+ selected code equations, code generator datatypes and
preprocessor setup.
\end{description}
--- a/doc-src/IsarRef/Thy/Spec.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarRef/Thy/Spec.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
theory Spec
imports Main
begin
@@ -438,7 +436,6 @@
\begin{matharray}{rcl}
@{command_def "interpretation"} & : & @{text "theory \<rightarrow> proof(prove)"} \\
@{command_def "interpret"} & : & @{text "proof(state) | proof(chain \<rightarrow> proof(prove)"} \\
- @{command_def "print_interps"}@{text "\<^sup>*"} & : & @{text "context \<rightarrow>"} \\
\end{matharray}
\indexouternonterm{interp}
@@ -447,8 +444,6 @@
;
'interpret' interp
;
- 'print\_interps' '!'? name
- ;
instantiation: ('[' (inst+) ']')?
;
interp: (name ':')? \\ (contextexpr instantiation |
@@ -533,13 +528,6 @@
interprets @{text expr} in the proof context and is otherwise
similar to interpretation in theories.
- \item @{command "print_interps"}~@{text loc} prints the
- interpretations of a particular locale @{text loc} that are active
- in the current context, either theory or proof context. The
- exclamation point argument triggers printing of \emph{witness}
- theorems justifying interpretations. These are normally omitted
- from the output.
-
\end{description}
\begin{warn}
--- a/doc-src/IsarRef/Thy/document/HOL_Specific.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/HOL_Specific.tex Wed Jan 28 16:57:12 2009 +0100
@@ -3,8 +3,6 @@
\def\isabellecontext{HOL{\isacharunderscore}Specific}%
%
\isadelimtheory
-\isanewline
-\isanewline
%
\endisadelimtheory
%
@@ -1166,21 +1164,21 @@
module name onto another.
\item \hyperlink{command.HOL.code-abort}{\mbox{\isa{\isacommand{code{\isacharunderscore}abort}}}} declares constants which are not
- required to have a definition by means of defining equations; if
+ required to have a definition by means of code equations; if
needed these are implemented by program abort instead.
\item \hyperlink{attribute.HOL.code}{\mbox{\isa{code}}} explicitly selects (or with option
- ``\isa{{\isachardoublequote}del{\isachardoublequote}}'' deselects) a defining equation for code
- generation. Usually packages introducing defining equations provide
+ ``\isa{{\isachardoublequote}del{\isachardoublequote}}'' deselects) a code equation for code
+ generation. Usually packages introducing code equations provide
a reasonable default setup for selection.
\item \hyperlink{attribute.HOL.code}{\mbox{\isa{code}}}~\isa{inline} declares (or with
option ``\isa{{\isachardoublequote}del{\isachardoublequote}}'' removes) inlining theorems which are
- applied as rewrite rules to any defining equation during
+ applied as rewrite rules to any code equation during
preprocessing.
\item \hyperlink{command.HOL.print-codesetup}{\mbox{\isa{\isacommand{print{\isacharunderscore}codesetup}}}} gives an overview on
- selected defining equations, code generator datatypes and
+ selected code equations, code generator datatypes and
preprocessor setup.
\end{description}%
--- a/doc-src/IsarRef/Thy/document/Spec.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Spec.tex Wed Jan 28 16:57:12 2009 +0100
@@ -3,8 +3,6 @@
\def\isabellecontext{Spec}%
%
\isadelimtheory
-\isanewline
-\isanewline
%
\endisadelimtheory
%
@@ -455,7 +453,6 @@
\begin{matharray}{rcl}
\indexdef{}{command}{interpretation}\hypertarget{command.interpretation}{\hyperlink{command.interpretation}{\mbox{\isa{\isacommand{interpretation}}}}} & : & \isa{{\isachardoublequote}theory\ {\isasymrightarrow}\ proof{\isacharparenleft}prove{\isacharparenright}{\isachardoublequote}} \\
\indexdef{}{command}{interpret}\hypertarget{command.interpret}{\hyperlink{command.interpret}{\mbox{\isa{\isacommand{interpret}}}}} & : & \isa{{\isachardoublequote}proof{\isacharparenleft}state{\isacharparenright}\ {\isacharbar}\ proof{\isacharparenleft}chain\ {\isasymrightarrow}\ proof{\isacharparenleft}prove{\isacharparenright}{\isachardoublequote}} \\
- \indexdef{}{command}{print\_interps}\hypertarget{command.print-interps}{\hyperlink{command.print-interps}{\mbox{\isa{\isacommand{print{\isacharunderscore}interps}}}}}\isa{{\isachardoublequote}\isactrlsup {\isacharasterisk}{\isachardoublequote}} & : & \isa{{\isachardoublequote}context\ {\isasymrightarrow}{\isachardoublequote}} \\
\end{matharray}
\indexouternonterm{interp}
@@ -464,8 +461,6 @@
;
'interpret' interp
;
- 'print\_interps' '!'? name
- ;
instantiation: ('[' (inst+) ']')?
;
interp: (name ':')? \\ (contextexpr instantiation |
@@ -545,13 +540,6 @@
interprets \isa{expr} in the proof context and is otherwise
similar to interpretation in theories.
- \item \hyperlink{command.print-interps}{\mbox{\isa{\isacommand{print{\isacharunderscore}interps}}}}~\isa{loc} prints the
- interpretations of a particular locale \isa{loc} that are active
- in the current context, either theory or proof context. The
- exclamation point argument triggers printing of \emph{witness}
- theorems justifying interpretations. These are normally omitted
- from the output.
-
\end{description}
\begin{warn}
--- a/doc-src/Locales/Locales/Examples.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/Locales/Locales/Examples.thy Wed Jan 28 16:57:12 2009 +0100
@@ -608,7 +608,7 @@
and @{text lattice} be placed between @{text partial_order}
and @{text total_order}, as shown in Figure~\ref{fig:lattices}(b).
Changes to the locale hierarchy may be declared
- with the \isakeyword{interpretation} command. *}
+ with the \isakeyword{sublocale} command. *}
sublocale %visible total_order \<subseteq> lattice
--- a/doc-src/Locales/Locales/Examples3.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/Locales/Locales/Examples3.thy Wed Jan 28 16:57:12 2009 +0100
@@ -178,8 +178,6 @@
nat_dvd_join_eq} are named since they are handy in the proof of
the subsequent interpretation. *}
-ML %invisible {* set quick_and_dirty *}
-
(*
definition
is_lcm :: "nat \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> bool" where
@@ -200,8 +198,6 @@
lemma %invisible gcd_lcm_distr:
"gcd x (lcm y z) = lcm (gcd x y) (gcd x z)" sorry
-ML %invisible {* reset quick_and_dirty *}
-
interpretation %visible nat_dvd:
distrib_lattice "op dvd :: nat \<Rightarrow> nat \<Rightarrow> bool"
apply unfold_locales
@@ -262,7 +258,7 @@
preserving maps can be declared in the following way. *}
locale order_preserving =
- partial_order + po': partial_order le' for le' (infixl "\<preceq>" 50) +
+ le: partial_order + le': partial_order le' for le' (infixl "\<preceq>" 50) +
fixes \<phi> :: "'a \<Rightarrow> 'b"
assumes hom_le: "x \<sqsubseteq> y \<Longrightarrow> \<phi> x \<preceq> \<phi> y"
@@ -288,8 +284,7 @@
obtained by appending the conclusions of the left locale and of the
right locale. *}
-text {* % FIXME needs update
- The locale @{text order_preserving} contains theorems for both
+text {* The locale @{text order_preserving} contains theorems for both
orders @{text \<sqsubseteq>} and @{text \<preceq>}. How can one refer to a theorem for
a particular order, @{text \<sqsubseteq>} or @{text \<preceq>}? Names in locales are
qualified by the locale parameters. More precisely, a name is
@@ -298,8 +293,8 @@
context %invisible order_preserving begin
-text {* % FIXME needs update?
- @{thm [source] less_le_trans}: @{thm less_le_trans}
+text {*
+ @{thm [source] le.less_le_trans}: @{thm le.less_le_trans}
@{thm [source] hom_le}: @{thm hom_le}
*}
@@ -307,12 +302,11 @@
text {* When renaming a locale, the morphism is also applied
to the qualifiers. Hence theorems for the partial order @{text \<preceq>}
are qualified by @{text le'}. For example, @{thm [source]
- po'.less_le_trans}: @{thm [display, indent=2] po'.less_le_trans} *}
+ le'.less_le_trans}: @{thm [display, indent=2] le'.less_le_trans} *}
end %invisible
-text {* % FIXME needs update?
- This example reveals that there is no infix syntax for the strict
+text {* This example reveals that there is no infix syntax for the strict
version of @{text \<preceq>}! This can, of course, not be introduced
automatically, but it can be declared manually through an abbreviation.
*}
@@ -321,7 +315,7 @@
less' (infixl "\<prec>" 50) where "less' \<equiv> partial_order.less le'"
text {* Now the theorem is displayed nicely as
- @{thm [locale=order_preserving] po'.less_le_trans}. *}
+ @{thm [locale=order_preserving] le'.less_le_trans}. *}
text {* Not only names of theorems are qualified. In fact, all names
are qualified, in particular names introduced by definitions and
@@ -333,7 +327,7 @@
text {* Two more locales illustrate working with locale expressions.
A map @{text \<phi>} is a lattice homomorphism if it preserves meet and join. *}
- locale lattice_hom = lattice + lat'!: lattice le' for le' (infixl "\<preceq>" 50) +
+ locale lattice_hom = le: lattice + le': lattice le' for le' (infixl "\<preceq>" 50) +
fixes \<phi>
assumes hom_meet:
"\<phi> (lattice.meet le x y) = lattice.meet le' (\<phi> x) (\<phi> y)"
@@ -341,9 +335,9 @@
"\<phi> (lattice.join le x y) = lattice.join le' (\<phi> x) (\<phi> y)"
abbreviation (in lattice_hom)
- meet' (infixl "\<sqinter>''" 50) where "meet' \<equiv> lat'.meet"
+ meet' (infixl "\<sqinter>''" 50) where "meet' \<equiv> le'.meet"
abbreviation (in lattice_hom)
- join' (infixl "\<squnion>''" 50) where "join' \<equiv> lat'.join"
+ join' (infixl "\<squnion>''" 50) where "join' \<equiv> le'.join"
text {* A homomorphism is an endomorphism if both orders coincide. *}
@@ -400,17 +394,17 @@
sublocale lattice_hom \<subseteq> order_preserving proof unfold_locales
fix x y
assume "x \<sqsubseteq> y"
- then have "y = (x \<squnion> y)" by (simp add: join_connection)
+ then have "y = (x \<squnion> y)" by (simp add: le.join_connection)
then have "\<phi> y = (\<phi> x \<squnion>' \<phi> y)" by (simp add: hom_join [symmetric])
- then show "\<phi> x \<preceq> \<phi> y" by (simp add: lat'.join_connection)
+ then show "\<phi> x \<preceq> \<phi> y" by (simp add: le'.join_connection)
qed
text {* Theorems and other declarations --- syntax, in particular ---
from the locale @{text order_preserving} are now active in @{text
lattice_hom}, for example
- @{thm [locale=lattice_hom, source] lat'.less_le_trans}:
- @{thm [locale=lattice_hom] lat'.less_le_trans}
+ @{thm [locale=lattice_hom, source] le'.less_le_trans}:
+ @{thm [locale=lattice_hom] le'.less_le_trans}
*}
@@ -450,7 +444,9 @@
\textit{attr-name} & ::=
& \textit{name} $|$ \textit{attribute} $|$
- \textit{name} \textit{attribute} \\[2ex]
+ \textit{name} \textit{attribute} \\
+ \textit{qualifier} & ::=
+ & \textit{name} [``\textbf{!}''] \\[2ex]
\multicolumn{3}{l}{Context Elements} \\
@@ -490,19 +486,23 @@
\multicolumn{3}{l}{Locale Expressions} \\
- \textit{rename} & ::=
- & \textit{name} [ \textit{mixfix} ] $|$ ``\textbf{\_}'' \\
- \textit{expr} & ::=
- & \textit{renamed-expr} ( ``\textbf{+}'' \textit{renamed-expr} )$^*$ \\
- \textit{renamed-expr} & ::=
- & ( \textit{qualified-name} $|$
- ``\textbf{(}'' \textit{expr} ``\textbf{)}'' ) \textit{rename}$^*$ \\[2ex]
+ \textit{pos-insts} & ::=
+ & ( \textit{term} $|$ ``\textbf{\_}'' )$^*$ \\
+ \textit{named-insts} & ::=
+ & \textbf{where} \textit{name} ``\textbf{=}'' \textit{term}
+ ( \textbf{and} \textit{name} ``\textbf{=}'' \textit{term} )$^*$ \\
+ \textit{instance} & ::=
+ & [ \textit{qualifier} \textbf{:} ]
+ \textit{qualified-name} ( \textit{pos-insts} $|$ \textit{named-inst} ) \\
+ \textit{expression} & ::=
+ & \textit{instance} ( ``\textbf{+}'' \textit{instance} )$^*$
+ [ \textbf{for} \textit{fixes} ( \textbf{and} \textit{fixes} )$^*$ ] \\[2ex]
\multicolumn{3}{l}{Declaration of Locales} \\
\textit{locale} & ::=
& \textit{element}$^+$ \\
- & | & \textit{locale-expr} [ ``\textbf{+}'' \textit{element}$^+$ ] \\
+ & | & \textit{expression} [ ``\textbf{+}'' \textit{element}$^+$ ] \\
\textit{toplevel} & ::=
& \textbf{locale} \textit{name} [ ``\textbf{=}''
\textit{locale} ] \\[2ex]
@@ -511,19 +511,17 @@
\textit{equation} & ::= & [ \textit{attr-name} ``\textbf{:}'' ]
\textit{prop} \\
- \textit{insts} & ::= & [ ``\textbf{[}'' \textit{term}$^+$
- ``\textbf{]}'' ] \\
- & & [ \textbf{where} \textit{equation} ( \textbf{and}
- \textit{equation} )$^*$ ] \\
+ \textit{equations} & ::= & \textbf{where} \textit{equation} ( \textbf{and}
+ \textit{equation} )$^*$ \\
\textit{toplevel} & ::=
- & \textbf{interpretation} \textit{name} ( ``$<$'' $|$
- ``$\subseteq$'' ) \textit{expr} \textit{proof} \\
+ & \textbf{sublocale} \textit{name} ( ``$<$'' $|$
+ ``$\subseteq$'' ) \textit{expression} \textit{proof} \\
& |
- & \textbf{interpretation} [ \textit{attr-name} ``\textbf{:}'' ]
- \textit{expr} \textit{insts} \textit{proof} \\
+ & \textbf{interpretation}
+ \textit{expression} [ \textit{equations} ] \textit{proof} \\
& |
- & \textbf{interpret} [ \textit{attr-name} ``\textbf{:}'' ]
- \textit{expr} \textit{insts} \textit{proof} \\[2ex]
+ & \textbf{interpret}
+ \textit{expression} \textit{proof} \\[2ex]
\multicolumn{3}{l}{Diagnostics} \\
@@ -533,7 +531,7 @@
\end{tabular}
\end{center}
\hrule
-\caption{Syntax of Locale Commands.}
+\caption{Syntax of Locale Commands (abridged).}
\label{tab:commands}
\end{table}
*}
--- a/doc-src/Locales/Locales/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/Locales/Locales/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,4 +1,4 @@
no_document use_thy "GCD";
use_thy "Examples1";
use_thy "Examples2";
-use_thy "Examples3";
+setmp_noncritical quick_and_dirty true use_thy "Examples3";
--- a/doc-src/Locales/Locales/document/Examples.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/Locales/Locales/document/Examples.tex Wed Jan 28 16:57:12 2009 +0100
@@ -1213,7 +1213,7 @@
and \isa{lattice} be placed between \isa{partial{\isacharunderscore}order}
and \isa{total{\isacharunderscore}order}, as shown in Figure~\ref{fig:lattices}(b).
Changes to the locale hierarchy may be declared
- with the \isakeyword{interpretation} command.%
+ with the \isakeyword{sublocale} command.%
\end{isamarkuptext}%
\isamarkuptrue%
%
--- a/doc-src/Locales/Locales/document/Examples3.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/Locales/Locales/document/Examples3.tex Wed Jan 28 16:57:12 2009 +0100
@@ -362,18 +362,10 @@
\endisadeliminvisible
%
\isataginvisible
-\isacommand{ML}\isamarkupfalse%
-\ {\isacharverbatimopen}\ set\ quick{\isacharunderscore}and{\isacharunderscore}dirty\ {\isacharverbatimclose}\isanewline
-\isanewline
-\isanewline
-\isanewline
\isacommand{lemma}\isamarkupfalse%
\ gcd{\isacharunderscore}lcm{\isacharunderscore}distr{\isacharcolon}\isanewline
\ \ {\isachardoublequoteopen}gcd\ x\ {\isacharparenleft}lcm\ y\ z{\isacharparenright}\ {\isacharequal}\ lcm\ {\isacharparenleft}gcd\ x\ y{\isacharparenright}\ {\isacharparenleft}gcd\ x\ z{\isacharparenright}{\isachardoublequoteclose}\ \isacommand{sorry}\isamarkupfalse%
-\isanewline
-\isanewline
-\isacommand{ML}\isamarkupfalse%
-\ {\isacharverbatimopen}\ reset\ quick{\isacharunderscore}and{\isacharunderscore}dirty\ {\isacharverbatimclose}%
+%
\endisataginvisible
{\isafoldinvisible}%
%
@@ -383,7 +375,7 @@
\isanewline
%
\isadelimvisible
-\ \ \isanewline
+\isanewline
%
\endisadelimvisible
%
@@ -476,7 +468,7 @@
\isamarkuptrue%
\ \ \isacommand{locale}\isamarkupfalse%
\ order{\isacharunderscore}preserving\ {\isacharequal}\isanewline
-\ \ \ \ partial{\isacharunderscore}order\ {\isacharplus}\ po{\isacharprime}{\isacharcolon}\ partial{\isacharunderscore}order\ le{\isacharprime}\ \isakeyword{for}\ le{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasympreceq}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ {\isacharplus}\isanewline
+\ \ \ \ le{\isacharcolon}\ partial{\isacharunderscore}order\ {\isacharplus}\ le{\isacharprime}{\isacharcolon}\ partial{\isacharunderscore}order\ le{\isacharprime}\ \isakeyword{for}\ le{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasympreceq}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ {\isacharplus}\isanewline
\ \ \ \ \isakeyword{fixes}\ {\isasymphi}\ {\isacharcolon}{\isacharcolon}\ {\isachardoublequoteopen}{\isacharprime}a\ {\isasymRightarrow}\ {\isacharprime}b{\isachardoublequoteclose}\isanewline
\ \ \ \ \isakeyword{assumes}\ hom{\isacharunderscore}le{\isacharcolon}\ {\isachardoublequoteopen}x\ {\isasymsqsubseteq}\ y\ {\isasymLongrightarrow}\ {\isasymphi}\ x\ {\isasympreceq}\ {\isasymphi}\ y{\isachardoublequoteclose}%
\begin{isamarkuptext}%
@@ -505,8 +497,7 @@
\isamarkuptrue%
%
\begin{isamarkuptext}%
-% FIXME needs update
- The locale \isa{order{\isacharunderscore}preserving} contains theorems for both
+The locale \isa{order{\isacharunderscore}preserving} contains theorems for both
orders \isa{{\isasymsqsubseteq}} and \isa{{\isasympreceq}}. How can one refer to a theorem for
a particular order, \isa{{\isasymsqsubseteq}} or \isa{{\isasympreceq}}? Names in locales are
qualified by the locale parameters. More precisely, a name is
@@ -530,8 +521,7 @@
\endisadeliminvisible
%
\begin{isamarkuptext}%
-% FIXME needs update?
- \isa{less{\isacharunderscore}le{\isacharunderscore}trans}: \isa{{\isasymlbrakk}partial{\isacharunderscore}order{\isachardot}less\ op\ {\isasympreceq}\ {\isacharquery}x\ {\isacharquery}y{\isacharsemicolon}\ {\isacharquery}y\ {\isasympreceq}\ {\isacharquery}z{\isasymrbrakk}\ {\isasymLongrightarrow}\ partial{\isacharunderscore}order{\isachardot}less\ op\ {\isasympreceq}\ {\isacharquery}x\ {\isacharquery}z}
+\isa{le{\isachardot}less{\isacharunderscore}le{\isacharunderscore}trans}: \isa{{\isasymlbrakk}{\isacharquery}x\ {\isasymsqsubset}\ {\isacharquery}y{\isacharsemicolon}\ {\isacharquery}y\ {\isasymsqsubseteq}\ {\isacharquery}z{\isasymrbrakk}\ {\isasymLongrightarrow}\ {\isacharquery}x\ {\isasymsqsubset}\ {\isacharquery}z}
\isa{hom{\isacharunderscore}le}: \isa{{\isacharquery}x\ {\isasymsqsubseteq}\ {\isacharquery}y\ {\isasymLongrightarrow}\ {\isasymphi}\ {\isacharquery}x\ {\isasympreceq}\ {\isasymphi}\ {\isacharquery}y}%
\end{isamarkuptext}%
@@ -540,7 +530,7 @@
\begin{isamarkuptext}%
When renaming a locale, the morphism is also applied
to the qualifiers. Hence theorems for the partial order \isa{{\isasympreceq}}
- are qualified by \isa{le{\isacharprime}}. For example, \isa{po{\isacharprime}{\isachardot}less{\isacharunderscore}le{\isacharunderscore}trans}: \begin{isabelle}%
+ are qualified by \isa{le{\isacharprime}}. For example, \isa{le{\isacharprime}{\isachardot}less{\isacharunderscore}le{\isacharunderscore}trans}: \begin{isabelle}%
\ \ {\isasymlbrakk}partial{\isacharunderscore}order{\isachardot}less\ op\ {\isasympreceq}\ {\isacharquery}x\ {\isacharquery}y{\isacharsemicolon}\ {\isacharquery}y\ {\isasympreceq}\ {\isacharquery}z{\isasymrbrakk}\isanewline
\isaindent{\ \ }{\isasymLongrightarrow}\ partial{\isacharunderscore}order{\isachardot}less\ op\ {\isasympreceq}\ {\isacharquery}x\ {\isacharquery}z%
\end{isabelle}%
@@ -562,8 +552,7 @@
\endisadeliminvisible
%
\begin{isamarkuptext}%
-% FIXME needs update?
- This example reveals that there is no infix syntax for the strict
+This example reveals that there is no infix syntax for the strict
version of \isa{{\isasympreceq}}! This can, of course, not be introduced
automatically, but it can be declared manually through an abbreviation.%
\end{isamarkuptext}%
@@ -592,7 +581,7 @@
\end{isamarkuptext}%
\isamarkuptrue%
\ \ \isacommand{locale}\isamarkupfalse%
-\ lattice{\isacharunderscore}hom\ {\isacharequal}\ lattice\ {\isacharplus}\ lat{\isacharprime}{\isacharbang}{\isacharcolon}\ lattice\ le{\isacharprime}\ \isakeyword{for}\ le{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasympreceq}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ {\isacharplus}\isanewline
+\ lattice{\isacharunderscore}hom\ {\isacharequal}\ le{\isacharcolon}\ lattice\ {\isacharplus}\ le{\isacharprime}{\isacharcolon}\ lattice\ le{\isacharprime}\ \isakeyword{for}\ le{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasympreceq}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ {\isacharplus}\isanewline
\ \ \ \ \isakeyword{fixes}\ {\isasymphi}\isanewline
\ \ \ \ \isakeyword{assumes}\ hom{\isacharunderscore}meet{\isacharcolon}\isanewline
\ \ \ \ \ \ \ \ {\isachardoublequoteopen}{\isasymphi}\ {\isacharparenleft}lattice{\isachardot}meet\ le\ x\ y{\isacharparenright}\ {\isacharequal}\ lattice{\isachardot}meet\ le{\isacharprime}\ {\isacharparenleft}{\isasymphi}\ x{\isacharparenright}\ {\isacharparenleft}{\isasymphi}\ y{\isacharparenright}{\isachardoublequoteclose}\isanewline
@@ -601,10 +590,10 @@
\isanewline
\ \ \isacommand{abbreviation}\isamarkupfalse%
\ {\isacharparenleft}\isakeyword{in}\ lattice{\isacharunderscore}hom{\isacharparenright}\isanewline
-\ \ \ \ meet{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasymsqinter}{\isacharprime}{\isacharprime}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ \isakeyword{where}\ {\isachardoublequoteopen}meet{\isacharprime}\ {\isasymequiv}\ lat{\isacharprime}{\isachardot}meet{\isachardoublequoteclose}\isanewline
+\ \ \ \ meet{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasymsqinter}{\isacharprime}{\isacharprime}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ \isakeyword{where}\ {\isachardoublequoteopen}meet{\isacharprime}\ {\isasymequiv}\ le{\isacharprime}{\isachardot}meet{\isachardoublequoteclose}\isanewline
\ \ \isacommand{abbreviation}\isamarkupfalse%
\ {\isacharparenleft}\isakeyword{in}\ lattice{\isacharunderscore}hom{\isacharparenright}\isanewline
-\ \ \ \ join{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasymsqunion}{\isacharprime}{\isacharprime}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ \isakeyword{where}\ {\isachardoublequoteopen}join{\isacharprime}\ {\isasymequiv}\ lat{\isacharprime}{\isachardot}join{\isachardoublequoteclose}%
+\ \ \ \ join{\isacharprime}\ {\isacharparenleft}\isakeyword{infixl}\ {\isachardoublequoteopen}{\isasymsqunion}{\isacharprime}{\isacharprime}{\isachardoublequoteclose}\ {\isadigit{5}}{\isadigit{0}}{\isacharparenright}\ \isakeyword{where}\ {\isachardoublequoteopen}join{\isacharprime}\ {\isasymequiv}\ le{\isacharprime}{\isachardot}join{\isachardoublequoteclose}%
\begin{isamarkuptext}%
A homomorphism is an endomorphism if both orders coincide.%
\end{isamarkuptext}%
@@ -678,7 +667,7 @@
\ \ \ \ \isacommand{then}\isamarkupfalse%
\ \isacommand{have}\isamarkupfalse%
\ {\isachardoublequoteopen}y\ {\isacharequal}\ {\isacharparenleft}x\ {\isasymsqunion}\ y{\isacharparenright}{\isachardoublequoteclose}\ \isacommand{by}\isamarkupfalse%
-\ {\isacharparenleft}simp\ add{\isacharcolon}\ join{\isacharunderscore}connection{\isacharparenright}\isanewline
+\ {\isacharparenleft}simp\ add{\isacharcolon}\ le{\isachardot}join{\isacharunderscore}connection{\isacharparenright}\isanewline
\ \ \ \ \isacommand{then}\isamarkupfalse%
\ \isacommand{have}\isamarkupfalse%
\ {\isachardoublequoteopen}{\isasymphi}\ y\ {\isacharequal}\ {\isacharparenleft}{\isasymphi}\ x\ {\isasymsqunion}{\isacharprime}\ {\isasymphi}\ y{\isacharparenright}{\isachardoublequoteclose}\ \isacommand{by}\isamarkupfalse%
@@ -686,7 +675,7 @@
\ \ \ \ \isacommand{then}\isamarkupfalse%
\ \isacommand{show}\isamarkupfalse%
\ {\isachardoublequoteopen}{\isasymphi}\ x\ {\isasympreceq}\ {\isasymphi}\ y{\isachardoublequoteclose}\ \isacommand{by}\isamarkupfalse%
-\ {\isacharparenleft}simp\ add{\isacharcolon}\ lat{\isacharprime}{\isachardot}join{\isacharunderscore}connection{\isacharparenright}\isanewline
+\ {\isacharparenleft}simp\ add{\isacharcolon}\ le{\isacharprime}{\isachardot}join{\isacharunderscore}connection{\isacharparenright}\isanewline
\ \ \isacommand{qed}\isamarkupfalse%
%
\endisatagproof
@@ -700,7 +689,7 @@
Theorems and other declarations --- syntax, in particular ---
from the locale \isa{order{\isacharunderscore}preserving} are now active in \isa{lattice{\isacharunderscore}hom}, for example
- \isa{lat{\isacharprime}{\isachardot}less{\isacharunderscore}le{\isacharunderscore}trans}:
+ \isa{le{\isacharprime}{\isachardot}less{\isacharunderscore}le{\isacharunderscore}trans}:
\isa{{\isasymlbrakk}{\isacharquery}x\ {\isasymprec}\ {\isacharquery}y{\isacharsemicolon}\ {\isacharquery}y\ {\isasympreceq}\ {\isacharquery}z{\isasymrbrakk}\ {\isasymLongrightarrow}\ {\isacharquery}x\ {\isasymprec}\ {\isacharquery}z}%
\end{isamarkuptext}%
\isamarkuptrue%
@@ -744,7 +733,9 @@
\textit{attr-name} & ::=
& \textit{name} $|$ \textit{attribute} $|$
- \textit{name} \textit{attribute} \\[2ex]
+ \textit{name} \textit{attribute} \\
+ \textit{qualifier} & ::=
+ & \textit{name} [``\textbf{!}''] \\[2ex]
\multicolumn{3}{l}{Context Elements} \\
@@ -784,19 +775,23 @@
\multicolumn{3}{l}{Locale Expressions} \\
- \textit{rename} & ::=
- & \textit{name} [ \textit{mixfix} ] $|$ ``\textbf{\_}'' \\
- \textit{expr} & ::=
- & \textit{renamed-expr} ( ``\textbf{+}'' \textit{renamed-expr} )$^*$ \\
- \textit{renamed-expr} & ::=
- & ( \textit{qualified-name} $|$
- ``\textbf{(}'' \textit{expr} ``\textbf{)}'' ) \textit{rename}$^*$ \\[2ex]
+ \textit{pos-insts} & ::=
+ & ( \textit{term} $|$ ``\textbf{\_}'' )$^*$ \\
+ \textit{named-insts} & ::=
+ & \textbf{where} \textit{name} ``\textbf{=}'' \textit{term}
+ ( \textbf{and} \textit{name} ``\textbf{=}'' \textit{term} )$^*$ \\
+ \textit{instance} & ::=
+ & [ \textit{qualifier} \textbf{:} ]
+ \textit{qualified-name} ( \textit{pos-insts} $|$ \textit{named-inst} ) \\
+ \textit{expression} & ::=
+ & \textit{instance} ( ``\textbf{+}'' \textit{instance} )$^*$
+ [ \textbf{for} \textit{fixes} ( \textbf{and} \textit{fixes} )$^*$ ] \\[2ex]
\multicolumn{3}{l}{Declaration of Locales} \\
\textit{locale} & ::=
& \textit{element}$^+$ \\
- & | & \textit{locale-expr} [ ``\textbf{+}'' \textit{element}$^+$ ] \\
+ & | & \textit{expression} [ ``\textbf{+}'' \textit{element}$^+$ ] \\
\textit{toplevel} & ::=
& \textbf{locale} \textit{name} [ ``\textbf{=}''
\textit{locale} ] \\[2ex]
@@ -805,19 +800,17 @@
\textit{equation} & ::= & [ \textit{attr-name} ``\textbf{:}'' ]
\textit{prop} \\
- \textit{insts} & ::= & [ ``\textbf{[}'' \textit{term}$^+$
- ``\textbf{]}'' ] \\
- & & [ \textbf{where} \textit{equation} ( \textbf{and}
- \textit{equation} )$^*$ ] \\
+ \textit{equations} & ::= & \textbf{where} \textit{equation} ( \textbf{and}
+ \textit{equation} )$^*$ \\
\textit{toplevel} & ::=
- & \textbf{interpretation} \textit{name} ( ``$<$'' $|$
- ``$\subseteq$'' ) \textit{expr} \textit{proof} \\
+ & \textbf{sublocale} \textit{name} ( ``$<$'' $|$
+ ``$\subseteq$'' ) \textit{expression} \textit{proof} \\
& |
- & \textbf{interpretation} [ \textit{attr-name} ``\textbf{:}'' ]
- \textit{expr} \textit{insts} \textit{proof} \\
+ & \textbf{interpretation}
+ \textit{expression} [ \textit{equations} ] \textit{proof} \\
& |
- & \textbf{interpret} [ \textit{attr-name} ``\textbf{:}'' ]
- \textit{expr} \textit{insts} \textit{proof} \\[2ex]
+ & \textbf{interpret}
+ \textit{expression} \textit{proof} \\[2ex]
\multicolumn{3}{l}{Diagnostics} \\
@@ -827,7 +820,7 @@
\end{tabular}
\end{center}
\hrule
-\caption{Syntax of Locale Commands.}
+\caption{Syntax of Locale Commands (abridged).}
\label{tab:commands}
\end{table}%
\end{isamarkuptext}%
--- a/doc-src/Locales/Locales/document/root.tex Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/Locales/Locales/document/root.tex Wed Jan 28 16:57:12 2009 +0100
@@ -22,14 +22,17 @@
\begin{document}
-\title{Tutorial to Locales and Locale Interpretation}
+\title{Tutorial to Locales and Locale Interpretation \\[1ex]
+ \large 2nd revision, for Isabelle 2009}
\author{Clemens Ballarin}
\date{}
\maketitle
+%\thispagestyle{myheadings}
+%\markright{Technical Report TUM-I0723, Technische Universit\"at M\"unchen, 2007}
\thispagestyle{myheadings}
-\markright{Technical Report TUM-I0723, Technische Universit\"at M\"unchen, 2007}
+\markright{This tutorial is currently not consistent.}
\begin{abstract}
Locales are Isabelle's mechanism to deal with parametric theories.
@@ -40,6 +43,10 @@
This tutorial is intended for locale novices; familiarity with
Isabelle and Isar is presumed.
+ The 2nd revision accommodates changes introduced by the locales
+ reimplementation for Isabelle 2009. Most notably, in complex
+ specifications (\emph{locale expressions}) renaming has been
+ generalised to instantiation.
\end{abstract}
\parindent 0pt\parskip 0.5ex
--- a/doc-src/more_antiquote.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/doc-src/more_antiquote.ML Wed Jan 28 16:57:12 2009 +0100
@@ -113,13 +113,13 @@
val parse_const_terms = Scan.repeat1 Args.term
>> (fn ts => fn thy => map (Code_Unit.check_const thy) ts);
val parse_consts = Scan.lift (Args.parens (Args.$$$ "consts")) |-- parse_const_terms
- >> (fn mk_cs => fn thy => fn naming => map (the o Code_Thingol.lookup_const naming) (mk_cs thy));
+ >> (fn mk_cs => fn thy => fn naming => map_filter (Code_Thingol.lookup_const naming) (mk_cs thy));
val parse_types = Scan.lift (Args.parens (Args.$$$ "types") |-- Scan.repeat1 Args.name)
- >> (fn tycos => fn thy => fn naming => map (the o Code_Thingol.lookup_tyco naming o Sign.intern_type thy) tycos);
+ >> (fn tycos => fn thy => fn naming => map_filter (Code_Thingol.lookup_tyco naming o Sign.intern_type thy) tycos);
val parse_classes = Scan.lift (Args.parens (Args.$$$ "classes") |-- Scan.repeat1 Args.name)
- >> (fn classes => fn thy => fn naming => map (the o Code_Thingol.lookup_class naming o Sign.intern_class thy) classes);
+ >> (fn classes => fn thy => fn naming => map_filter (Code_Thingol.lookup_class naming o Sign.intern_class thy) classes);
val parse_instances = Scan.lift (Args.parens (Args.$$$ "instances") |-- Scan.repeat1 (Args.name --| Args.$$$ "::" -- Args.name))
- >> (fn insts => fn thy => fn naming => map (the o Code_Thingol.lookup_instance naming o apsnd (Sign.intern_type thy) o apfst (Sign.intern_class thy) o swap) insts);
+ >> (fn insts => fn thy => fn naming => map_filter (Code_Thingol.lookup_instance naming o apsnd (Sign.intern_type thy) o apfst (Sign.intern_class thy) o swap) insts);
val parse_names = parse_consts || parse_types || parse_classes || parse_instances;
fun code_stmts src ctxt ((mk_cs, mk_stmtss), target) =
--- a/etc/isar-keywords-ZF.el Wed Jan 28 16:29:16 2009 +0100
+++ b/etc/isar-keywords-ZF.el Wed Jan 28 16:57:12 2009 +0100
@@ -3,14 +3,16 @@
;; Generated from Pure + Pure-ProofGeneral + FOL + ZF.
;; *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT ***
;;
-;; $Id$
-;;
(defconst isar-keywords-major
'("\\."
"\\.\\."
"Isabelle\\.command"
+ "Isar\\.begin_document"
"Isar\\.command"
+ "Isar\\.define_command"
+ "Isar\\.edit_document"
+ "Isar\\.end_document"
"Isar\\.insert"
"Isar\\.remove"
"ML"
@@ -89,7 +91,6 @@
"instantiation"
"interpret"
"interpretation"
- "invoke"
"judgment"
"kill"
"kill_thy"
@@ -135,7 +136,6 @@
"print_drafts"
"print_facts"
"print_induct_rules"
- "print_interps"
"print_locale"
"print_locales"
"print_methods"
@@ -249,7 +249,11 @@
(defconst isar-keywords-control
'("Isabelle\\.command"
+ "Isar\\.begin_document"
"Isar\\.command"
+ "Isar\\.define_command"
+ "Isar\\.edit_document"
+ "Isar\\.end_document"
"Isar\\.insert"
"Isar\\.remove"
"ProofGeneral\\.inform_file_processed"
@@ -298,7 +302,6 @@
"print_drafts"
"print_facts"
"print_induct_rules"
- "print_interps"
"print_locale"
"print_locales"
"print_methods"
@@ -438,8 +441,7 @@
(defconst isar-keywords-proof-goal
'("have"
"hence"
- "interpret"
- "invoke"))
+ "interpret"))
(defconst isar-keywords-proof-block
'("next"
--- a/etc/isar-keywords.el Wed Jan 28 16:29:16 2009 +0100
+++ b/etc/isar-keywords.el Wed Jan 28 16:57:12 2009 +0100
@@ -3,14 +3,16 @@
;; Generated from Pure + Pure-ProofGeneral + HOL + HOLCF + IOA + HOL-Nominal + HOL-Statespace.
;; *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT ***
;;
-;; $Id$
-;;
(defconst isar-keywords-major
'("\\."
"\\.\\."
"Isabelle\\.command"
+ "Isar\\.begin_document"
"Isar\\.command"
+ "Isar\\.define_command"
+ "Isar\\.edit_document"
+ "Isar\\.end_document"
"Isar\\.insert"
"Isar\\.remove"
"ML"
@@ -46,9 +48,6 @@
"chapter"
"class"
"class_deps"
- "class_interpret"
- "class_interpretation"
- "class_locale"
"classes"
"classrel"
"code_abort"
@@ -119,7 +118,6 @@
"instantiation"
"interpret"
"interpretation"
- "invoke"
"judgment"
"kill"
"kill_thy"
@@ -172,7 +170,6 @@
"print_drafts"
"print_facts"
"print_induct_rules"
- "print_interps"
"print_locale"
"print_locales"
"print_methods"
@@ -312,7 +309,11 @@
(defconst isar-keywords-control
'("Isabelle\\.command"
+ "Isar\\.begin_document"
"Isar\\.command"
+ "Isar\\.define_command"
+ "Isar\\.edit_document"
+ "Isar\\.end_document"
"Isar\\.insert"
"Isar\\.remove"
"ProofGeneral\\.inform_file_processed"
@@ -369,7 +370,6 @@
"print_drafts"
"print_facts"
"print_induct_rules"
- "print_interps"
"print_locale"
"print_locales"
"print_methods"
@@ -423,7 +423,6 @@
"axiomatization"
"axioms"
"class"
- "class_locale"
"classes"
"classrel"
"code_abort"
@@ -507,7 +506,6 @@
(defconst isar-keywords-theory-goal
'("ax_specification"
- "class_interpretation"
"corollary"
"cpodef"
"function"
@@ -546,11 +544,9 @@
"subsubsect"))
(defconst isar-keywords-proof-goal
- '("class_interpret"
- "have"
+ '("have"
"hence"
- "interpret"
- "invoke"))
+ "interpret"))
(defconst isar-keywords-proof-block
'("next"
--- a/etc/settings Wed Jan 28 16:29:16 2009 +0100
+++ b/etc/settings Wed Jan 28 16:57:12 2009 +0100
@@ -242,7 +242,6 @@
"$ISABELLE_HOME/contrib/vampire/$ML_PLATFORM" \
"$ISABELLE_HOME/../vampire/$ML_PLATFORM" \
"/usr/local/Vampire" \
- "$ISABELLE_HOME/contrib/SystemOnTPTP" \
"")
SPASS_HOME=$(choosefrom \
"$ISABELLE_HOME/contrib/spass/$ML_PLATFORM/bin" \
--- a/lib/jedit/isabelle.xml Wed Jan 28 16:29:16 2009 +0100
+++ b/lib/jedit/isabelle.xml Wed Jan 28 16:57:12 2009 +0100
@@ -2,7 +2,6 @@
<!DOCTYPE MODE SYSTEM "xmode.dtd">
<!-- Generated from Pure + HOL + HOLCF + IOA + HOL-Nominal + HOL-Statespace + FOL + ZF. -->
<!-- *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT *** -->
-<!-- $Id$ -->
<MODE>
<PROPS>
<PROPERTY NAME="commentStart" VALUE="(*"/>
@@ -36,7 +35,11 @@
<OPERATOR>.</OPERATOR>
<OPERATOR>..</OPERATOR>
<INVALID>Isabelle.command</INVALID>
+ <INVALID>Isar.begin_document</INVALID>
<INVALID>Isar.command</INVALID>
+ <INVALID>Isar.define_command</INVALID>
+ <INVALID>Isar.edit_document</INVALID>
+ <INVALID>Isar.end_document</INVALID>
<INVALID>Isar.insert</INVALID>
<INVALID>Isar.remove</INVALID>
<OPERATOR>ML</OPERATOR>
@@ -171,7 +174,6 @@
<OPERATOR>interpret</OPERATOR>
<OPERATOR>interpretation</OPERATOR>
<KEYWORD4>intros</KEYWORD4>
- <OPERATOR>invoke</OPERATOR>
<KEYWORD4>is</KEYWORD4>
<OPERATOR>judgment</OPERATOR>
<INVALID>kill</INVALID>
@@ -239,7 +241,6 @@
<LABEL>print_drafts</LABEL>
<LABEL>print_facts</LABEL>
<LABEL>print_induct_rules</LABEL>
- <LABEL>print_interps</LABEL>
<LABEL>print_locale</LABEL>
<LABEL>print_locales</LABEL>
<LABEL>print_methods</LABEL>
--- a/src/HOL/ATP_Linkup.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/ATP_Linkup.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/ATP_Linkup.thy
- ID: $Id$
Author: Lawrence C Paulson
Author: Jia Meng, NICTA
Author: Fabian Immler, TUM
@@ -8,7 +7,7 @@
header {* The Isabelle-ATP Linkup *}
theory ATP_Linkup
-imports Record Hilbert_Choice
+imports Divides Record Hilbert_Choice Plain
uses
"Tools/polyhash.ML"
"Tools/res_clause.ML"
@@ -112,10 +111,13 @@
setup {* AtpManager.add_prover "spass_no_tc" (AtpWrapper.spass_opts 40 false) *}
text {* remote provers via SystemOnTPTP *}
-setup {* AtpManager.add_prover "remote_vamp9"
- (AtpWrapper.remote_prover "Vampire---9.0" "jumpirefix --output_syntax tptp --mode casc -t 3600") *}
-setup {* AtpManager.add_prover "remote_vamp10"
- (AtpWrapper.remote_prover "Vampire---10.0" "drakosha.pl 60") *}
+setup {* AtpManager.add_prover "remote_vampire"
+ (AtpWrapper.remote_prover "-s Vampire---9.0") *}
+setup {* AtpManager.add_prover "remote_spass"
+ (AtpWrapper.remote_prover "-s SPASS---3.01") *}
+setup {* AtpManager.add_prover "remote_e"
+ (AtpWrapper.remote_prover "-s EP---1.0") *}
+
subsection {* The Metis prover *}
--- a/src/HOL/Code_Eval.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Code_Eval.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Code_Eval.thy
- ID: $Id$
Author: Florian Haftmann, TU Muenchen
*)
@@ -24,7 +23,7 @@
code_datatype Const App
class term_of = typerep +
- fixes term_of :: "'a \<Rightarrow> term"
+ fixes term_of :: "'a::{} \<Rightarrow> term"
lemma term_of_anything: "term_of x \<equiv> t"
by (rule eq_reflection) (cases "term_of x", cases t, simp)
--- a/src/HOL/Datatype.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Datatype.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Datatype.thy
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Author: Stefan Berghofer and Markus Wenzel, TU Muenchen
@@ -9,7 +8,7 @@
header {* Analogues of the Cartesian Product and Disjoint Sum for Datatypes *}
theory Datatype
-imports Nat Relation
+imports Nat Product_Type
begin
typedef (Node)
@@ -510,15 +509,6 @@
lemmas dsum_subset_Sigma = subset_trans [OF dsum_mono dsum_Sigma, standard]
-(*** Domain ***)
-
-lemma Domain_dprod [simp]: "Domain (dprod r s) = uprod (Domain r) (Domain s)"
-by auto
-
-lemma Domain_dsum [simp]: "Domain (dsum r s) = usum (Domain r) (Domain s)"
-by auto
-
-
text {* hides popular names *}
hide (open) type node item
hide (open) const Push Node Atom Leaf Numb Lim Split Case
--- a/src/HOL/Dense_Linear_Order.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Dense_Linear_Order.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,10 +1,12 @@
-(* Author: Amine Chaieb, TU Muenchen *)
+(* Title : HOL/Dense_Linear_Order.thy
+ Author : Amine Chaieb, TU Muenchen
+*)
header {* Dense linear order without endpoints
and a quantifier elimination procedure in Ferrante and Rackoff style *}
theory Dense_Linear_Order
-imports Plain Groebner_Basis
+imports Plain Groebner_Basis Main
uses
"~~/src/HOL/Tools/Qelim/langford_data.ML"
"~~/src/HOL/Tools/Qelim/ferrante_rackoff_data.ML"
--- a/src/HOL/Equiv_Relations.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Equiv_Relations.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,12 +1,11 @@
-(* ID: $Id$
- Authors: Lawrence C Paulson, Cambridge University Computer Laboratory
+(* Authors: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1996 University of Cambridge
*)
header {* Equivalence Relations in Higher-Order Set Theory *}
theory Equiv_Relations
-imports Finite_Set Relation
+imports Finite_Set Relation Plain
begin
subsection {* Equivalence relations *}
--- a/src/HOL/Finite_Set.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Finite_Set.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Finite_Set.thy
- ID: $Id$
Author: Tobias Nipkow, Lawrence C Paulson and Markus Wenzel
with contributions by Jeremy Avigad
*)
@@ -7,7 +6,7 @@
header {* Finite sets *}
theory Finite_Set
-imports Datatype Divides Transitive_Closure
+imports Nat Product_Type Power
begin
subsection {* Definition and basic properties *}
@@ -381,46 +380,6 @@
by(blast intro: finite_subset[OF subset_Pow_Union])
-lemma finite_converse [iff]: "finite (r^-1) = finite r"
- apply (subgoal_tac "r^-1 = (%(x,y). (y,x))`r")
- apply simp
- apply (rule iffI)
- apply (erule finite_imageD [unfolded inj_on_def])
- apply (simp split add: split_split)
- apply (erule finite_imageI)
- apply (simp add: converse_def image_def, auto)
- apply (rule bexI)
- prefer 2 apply assumption
- apply simp
- done
-
-
-text {* \paragraph{Finiteness of transitive closure} (Thanks to Sidi
-Ehmety) *}
-
-lemma finite_Field: "finite r ==> finite (Field r)"
- -- {* A finite relation has a finite field (@{text "= domain \<union> range"}. *}
- apply (induct set: finite)
- apply (auto simp add: Field_def Domain_insert Range_insert)
- done
-
-lemma trancl_subset_Field2: "r^+ <= Field r \<times> Field r"
- apply clarify
- apply (erule trancl_induct)
- apply (auto simp add: Field_def)
- done
-
-lemma finite_trancl: "finite (r^+) = finite r"
- apply auto
- prefer 2
- apply (rule trancl_subset_Field2 [THEN finite_subset])
- apply (rule finite_SigmaI)
- prefer 3
- apply (blast intro: r_into_trancl' finite_subset)
- apply (auto simp add: finite_Field)
- done
-
-
subsection {* Class @{text finite} *}
setup {* Sign.add_path "finite" *} -- {*FIXME: name tweaking*}
@@ -472,9 +431,6 @@
instance "+" :: (finite, finite) finite
by default (simp only: UNIV_Plus_UNIV [symmetric] finite_Plus finite)
-instance option :: (finite) finite
- by default (simp add: insert_None_conv_UNIV [symmetric])
-
subsection {* A fold functional for finite sets *}
--- a/src/HOL/FunDef.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/FunDef.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/FunDef.thy
- ID: $Id$
Author: Alexander Krauss, TU Muenchen
*)
--- a/src/HOL/GCD.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/GCD.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/GCD.thy
- ID: $Id$
Author: Christophe Tabacznyj and Lawrence C Paulson
Copyright 1996 University of Cambridge
*)
@@ -7,7 +6,7 @@
header {* The Greatest Common Divisor *}
theory GCD
-imports Plain Presburger
+imports Plain Presburger Main
begin
text {*
--- a/src/HOL/HOL.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/HOL.thy Wed Jan 28 16:57:12 2009 +0100
@@ -208,34 +208,34 @@
subsubsection {* Generic classes and algebraic operations *}
-class default = type +
+class default =
fixes default :: 'a
-class zero = type +
+class zero =
fixes zero :: 'a ("0")
-class one = type +
+class one =
fixes one :: 'a ("1")
hide (open) const zero one
-class plus = type +
+class plus =
fixes plus :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "+" 65)
-class minus = type +
+class minus =
fixes minus :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "-" 65)
-class uminus = type +
+class uminus =
fixes uminus :: "'a \<Rightarrow> 'a" ("- _" [81] 80)
-class times = type +
+class times =
fixes times :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "*" 70)
-class inverse = type +
+class inverse =
fixes inverse :: "'a \<Rightarrow> 'a"
and divide :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "'/" 70)
-class abs = type +
+class abs =
fixes abs :: "'a \<Rightarrow> 'a"
begin
@@ -247,10 +247,10 @@
end
-class sgn = type +
+class sgn =
fixes sgn :: "'a \<Rightarrow> 'a"
-class ord = type +
+class ord =
fixes less_eq :: "'a \<Rightarrow> 'a \<Rightarrow> bool"
and less :: "'a \<Rightarrow> 'a \<Rightarrow> bool"
begin
@@ -1675,7 +1675,7 @@
text {* Equality *}
-class eq = type +
+class eq =
fixes eq :: "'a \<Rightarrow> 'a \<Rightarrow> bool"
assumes eq_equals: "eq x y \<longleftrightarrow> x = y"
begin
--- a/src/HOL/Hilbert_Choice.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Hilbert_Choice.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Hilbert_Choice.thy
- ID: $Id$
Author: Lawrence C Paulson
Copyright 2001 University of Cambridge
*)
@@ -7,7 +6,7 @@
header {* Hilbert's Epsilon-Operator and the Axiom of Choice *}
theory Hilbert_Choice
-imports Nat Wellfounded
+imports Nat Wellfounded Plain
uses ("Tools/meson.ML") ("Tools/specification_package.ML")
begin
--- a/src/HOL/Import/hol4rews.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Import/hol4rews.ML Wed Jan 28 16:57:12 2009 +0100
@@ -390,7 +390,7 @@
val thm2 = standard thm1;
in
thy
- |> PureThy.store_thm (bthm, thm2)
+ |> PureThy.store_thm (Binding.name bthm, thm2)
|> snd
|> add_hol4_theorem bthy bthm hth
end;
--- a/src/HOL/Import/proof_kernel.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Import/proof_kernel.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1928,7 +1928,7 @@
Replaying _ => thy
| _ => (ImportRecorder.add_consts [(constname, ctype, csyn)]; Sign.add_consts_i [(constname,ctype,csyn)] thy)
val eq = mk_defeq constname rhs' thy1
- val (thms, thy2) = PureThy.add_defs false [((thmname,eq),[])] thy1
+ val (thms, thy2) = PureThy.add_defs false [((Binding.name thmname,eq),[])] thy1
val _ = ImportRecorder.add_defs thmname eq
val def_thm = hd thms
val thm' = def_thm RS meta_eq_to_obj_eq_thm
--- a/src/HOL/Import/replay.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Import/replay.ML Wed Jan 28 16:57:12 2009 +0100
@@ -340,7 +340,7 @@
| delta (Hol_move (fullname, moved_thmname)) thy =
add_hol4_move fullname moved_thmname thy
| delta (Defs (thmname, eq)) thy =
- snd (PureThy.add_defs false [((thmname, eq), [])] thy)
+ snd (PureThy.add_defs false [((Binding.name thmname, eq), [])] thy)
| delta (Hol_theorem (thyname, thmname, th)) thy =
add_hol4_theorem thyname thmname ([], th_of thy th) thy
| delta (Typedef (thmname, typ, c, repabs, th)) thy =
--- a/src/HOL/Int.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Int.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Int.thy
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Tobias Nipkow, Florian Haftmann, TU Muenchen
Copyright 1994 University of Cambridge
@@ -598,7 +597,7 @@
Bit1 :: "int \<Rightarrow> int" where
[code del]: "Bit1 k = 1 + k + k"
-class number = type + -- {* for numeric types: nat, int, real, \dots *}
+class number = -- {* for numeric types: nat, int, real, \dots *}
fixes number_of :: "int \<Rightarrow> 'a"
use "Tools/numeral.ML"
--- a/src/HOL/IntDiv.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/IntDiv.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,82 +1,69 @@
(* Title: HOL/IntDiv.thy
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1999 University of Cambridge
*)
-header{*The Division Operators div and mod; the Divides Relation dvd*}
+header{* The Division Operators div and mod *}
theory IntDiv
imports Int Divides FunDef
begin
-constdefs
- quorem :: "(int*int) * (int*int) => bool"
+definition divmod_rel :: "int \<Rightarrow> int \<Rightarrow> int \<times> int \<Rightarrow> bool" where
--{*definition of quotient and remainder*}
- [code]: "quorem == %((a,b), (q,r)).
- a = b*q + r &
- (if 0 < b then 0\<le>r & r<b else b<r & r \<le> 0)"
+ [code]: "divmod_rel a b = (\<lambda>(q, r). a = b * q + r \<and>
+ (if 0 < b then 0 \<le> r \<and> r < b else b < r \<and> r \<le> 0))"
- adjust :: "[int, int*int] => int*int"
+definition adjust :: "int \<Rightarrow> int \<times> int \<Rightarrow> int \<times> int" where
--{*for the division algorithm*}
- [code]: "adjust b == %(q,r). if 0 \<le> r-b then (2*q + 1, r-b)
- else (2*q, r)"
+ [code]: "adjust b = (\<lambda>(q, r). if 0 \<le> r - b then (2 * q + 1, r - b)
+ else (2 * q, r))"
text{*algorithm for the case @{text "a\<ge>0, b>0"}*}
-function
- posDivAlg :: "int \<Rightarrow> int \<Rightarrow> int \<times> int"
-where
- "posDivAlg a b =
- (if (a<b | b\<le>0) then (0,a)
- else adjust b (posDivAlg a (2*b)))"
+function posDivAlg :: "int \<Rightarrow> int \<Rightarrow> int \<times> int" where
+ "posDivAlg a b = (if a < b \<or> b \<le> 0 then (0, a)
+ else adjust b (posDivAlg a (2 * b)))"
by auto
-termination by (relation "measure (%(a,b). nat(a - b + 1))") auto
+termination by (relation "measure (\<lambda>(a, b). nat (a - b + 1))") auto
text{*algorithm for the case @{text "a<0, b>0"}*}
-function
- negDivAlg :: "int \<Rightarrow> int \<Rightarrow> int \<times> int"
-where
- "negDivAlg a b =
- (if (0\<le>a+b | b\<le>0) then (-1,a+b)
- else adjust b (negDivAlg a (2*b)))"
+function negDivAlg :: "int \<Rightarrow> int \<Rightarrow> int \<times> int" where
+ "negDivAlg a b = (if 0 \<le>a + b \<or> b \<le> 0 then (-1, a + b)
+ else adjust b (negDivAlg a (2 * b)))"
by auto
-termination by (relation "measure (%(a,b). nat(- a - b))") auto
+termination by (relation "measure (\<lambda>(a, b). nat (- a - b))") auto
text{*algorithm for the general case @{term "b\<noteq>0"}*}
-constdefs
- negateSnd :: "int*int => int*int"
- [code]: "negateSnd == %(q,r). (q,-r)"
+definition negateSnd :: "int \<times> int \<Rightarrow> int \<times> int" where
+ [code inline]: "negateSnd = apsnd uminus"
-definition
- divAlg :: "int \<times> int \<Rightarrow> int \<times> int"
+definition divmod :: "int \<Rightarrow> int \<Rightarrow> int \<times> int" where
--{*The full division algorithm considers all possible signs for a, b
including the special case @{text "a=0, b<0"} because
@{term negDivAlg} requires @{term "a<0"}.*}
-where
- "divAlg = (\<lambda>(a, b). (if 0\<le>a then
- if 0\<le>b then posDivAlg a b
- else if a=0 then (0, 0)
+ "divmod a b = (if 0 \<le> a then if 0 \<le> b then posDivAlg a b
+ else if a = 0 then (0, 0)
else negateSnd (negDivAlg (-a) (-b))
else
- if 0<b then negDivAlg a b
- else negateSnd (posDivAlg (-a) (-b))))"
+ if 0 < b then negDivAlg a b
+ else negateSnd (posDivAlg (-a) (-b)))"
instantiation int :: Divides.div
begin
definition
- div_def: "a div b = fst (divAlg (a, b))"
+ div_def: "a div b = fst (divmod a b)"
definition
- mod_def: "a mod b = snd (divAlg (a, b))"
+ mod_def: "a mod b = snd (divmod a b)"
instance ..
end
-lemma divAlg_mod_div:
- "divAlg (p, q) = (p div q, p mod q)"
+lemma divmod_mod_div:
+ "divmod p q = (p div q, p mod q)"
by (auto simp add: div_def mod_def)
text{*
@@ -97,7 +84,7 @@
fun negateSnd (q,r:int) = (q,~r);
- fun divAlg (a,b) = if 0\<le>a then
+ fun divmod (a,b) = if 0\<le>a then
if b>0 then posDivAlg (a,b)
else if a=0 then (0,0)
else negateSnd (negDivAlg (~a,~b))
@@ -131,9 +118,9 @@
auto)
lemma unique_quotient:
- "[| quorem ((a,b), (q,r)); quorem ((a,b), (q',r')); b \<noteq> 0 |]
+ "[| divmod_rel a b (q, r); divmod_rel a b (q', r'); b \<noteq> 0 |]
==> q = q'"
-apply (simp add: quorem_def linorder_neq_iff split: split_if_asm)
+apply (simp add: divmod_rel_def linorder_neq_iff split: split_if_asm)
apply (blast intro: order_antisym
dest: order_eq_refl [THEN unique_quotient_lemma]
order_eq_refl [THEN unique_quotient_lemma_neg] sym)+
@@ -141,10 +128,10 @@
lemma unique_remainder:
- "[| quorem ((a,b), (q,r)); quorem ((a,b), (q',r')); b \<noteq> 0 |]
+ "[| divmod_rel a b (q, r); divmod_rel a b (q', r'); b \<noteq> 0 |]
==> r = r'"
apply (subgoal_tac "q = q'")
- apply (simp add: quorem_def)
+ apply (simp add: divmod_rel_def)
apply (blast intro: unique_quotient)
done
@@ -171,10 +158,10 @@
text{*Correctness of @{term posDivAlg}: it computes quotients correctly*}
theorem posDivAlg_correct:
assumes "0 \<le> a" and "0 < b"
- shows "quorem ((a, b), posDivAlg a b)"
+ shows "divmod_rel a b (posDivAlg a b)"
using prems apply (induct a b rule: posDivAlg.induct)
apply auto
-apply (simp add: quorem_def)
+apply (simp add: divmod_rel_def)
apply (subst posDivAlg_eqn, simp add: right_distrib)
apply (case_tac "a < b")
apply simp_all
@@ -200,10 +187,10 @@
It doesn't work if a=0 because the 0/b equals 0, not -1*)
lemma negDivAlg_correct:
assumes "a < 0" and "b > 0"
- shows "quorem ((a, b), negDivAlg a b)"
+ shows "divmod_rel a b (negDivAlg a b)"
using prems apply (induct a b rule: negDivAlg.induct)
apply (auto simp add: linorder_not_le)
-apply (simp add: quorem_def)
+apply (simp add: divmod_rel_def)
apply (subst negDivAlg_eqn, assumption)
apply (case_tac "a + b < (0\<Colon>int)")
apply simp_all
@@ -215,8 +202,8 @@
subsection{*Existence Shown by Proving the Division Algorithm to be Correct*}
(*the case a=0*)
-lemma quorem_0: "b \<noteq> 0 ==> quorem ((0,b), (0,0))"
-by (auto simp add: quorem_def linorder_neq_iff)
+lemma divmod_rel_0: "b \<noteq> 0 ==> divmod_rel 0 b (0, 0)"
+by (auto simp add: divmod_rel_def linorder_neq_iff)
lemma posDivAlg_0 [simp]: "posDivAlg 0 b = (0, 0)"
by (subst posDivAlg.simps, auto)
@@ -227,26 +214,26 @@
lemma negateSnd_eq [simp]: "negateSnd(q,r) = (q,-r)"
by (simp add: negateSnd_def)
-lemma quorem_neg: "quorem ((-a,-b), qr) ==> quorem ((a,b), negateSnd qr)"
-by (auto simp add: split_ifs quorem_def)
+lemma divmod_rel_neg: "divmod_rel (-a) (-b) qr ==> divmod_rel a b (negateSnd qr)"
+by (auto simp add: split_ifs divmod_rel_def)
-lemma divAlg_correct: "b \<noteq> 0 ==> quorem ((a,b), divAlg (a, b))"
-by (force simp add: linorder_neq_iff quorem_0 divAlg_def quorem_neg
+lemma divmod_correct: "b \<noteq> 0 ==> divmod_rel a b (divmod a b)"
+by (force simp add: linorder_neq_iff divmod_rel_0 divmod_def divmod_rel_neg
posDivAlg_correct negDivAlg_correct)
text{*Arbitrary definitions for division by zero. Useful to simplify
certain equations.*}
lemma DIVISION_BY_ZERO [simp]: "a div (0::int) = 0 & a mod (0::int) = a"
-by (simp add: div_def mod_def divAlg_def posDivAlg.simps)
+by (simp add: div_def mod_def divmod_def posDivAlg.simps)
text{*Basic laws about division and remainder*}
lemma zmod_zdiv_equality: "(a::int) = b * (a div b) + (a mod b)"
apply (case_tac "b = 0", simp)
-apply (cut_tac a = a and b = b in divAlg_correct)
-apply (auto simp add: quorem_def div_def mod_def)
+apply (cut_tac a = a and b = b in divmod_correct)
+apply (auto simp add: divmod_rel_def div_def mod_def)
done
lemma zdiv_zmod_equality: "(b * (a div b) + (a mod b)) + k = (a::int)+k"
@@ -288,16 +275,16 @@
*}
lemma pos_mod_conj : "(0::int) < b ==> 0 \<le> a mod b & a mod b < b"
-apply (cut_tac a = a and b = b in divAlg_correct)
-apply (auto simp add: quorem_def mod_def)
+apply (cut_tac a = a and b = b in divmod_correct)
+apply (auto simp add: divmod_rel_def mod_def)
done
lemmas pos_mod_sign [simp] = pos_mod_conj [THEN conjunct1, standard]
and pos_mod_bound [simp] = pos_mod_conj [THEN conjunct2, standard]
lemma neg_mod_conj : "b < (0::int) ==> a mod b \<le> 0 & b < a mod b"
-apply (cut_tac a = a and b = b in divAlg_correct)
-apply (auto simp add: quorem_def div_def mod_def)
+apply (cut_tac a = a and b = b in divmod_correct)
+apply (auto simp add: divmod_rel_def div_def mod_def)
done
lemmas neg_mod_sign [simp] = neg_mod_conj [THEN conjunct1, standard]
@@ -307,47 +294,47 @@
subsection{*General Properties of div and mod*}
-lemma quorem_div_mod: "b \<noteq> 0 ==> quorem ((a, b), (a div b, a mod b))"
+lemma divmod_rel_div_mod: "b \<noteq> 0 ==> divmod_rel a b (a div b, a mod b)"
apply (cut_tac a = a and b = b in zmod_zdiv_equality)
-apply (force simp add: quorem_def linorder_neq_iff)
+apply (force simp add: divmod_rel_def linorder_neq_iff)
done
-lemma quorem_div: "[| quorem((a,b),(q,r)); b \<noteq> 0 |] ==> a div b = q"
-by (simp add: quorem_div_mod [THEN unique_quotient])
+lemma divmod_rel_div: "[| divmod_rel a b (q, r); b \<noteq> 0 |] ==> a div b = q"
+by (simp add: divmod_rel_div_mod [THEN unique_quotient])
-lemma quorem_mod: "[| quorem((a,b),(q,r)); b \<noteq> 0 |] ==> a mod b = r"
-by (simp add: quorem_div_mod [THEN unique_remainder])
+lemma divmod_rel_mod: "[| divmod_rel a b (q, r); b \<noteq> 0 |] ==> a mod b = r"
+by (simp add: divmod_rel_div_mod [THEN unique_remainder])
lemma div_pos_pos_trivial: "[| (0::int) \<le> a; a < b |] ==> a div b = 0"
-apply (rule quorem_div)
-apply (auto simp add: quorem_def)
+apply (rule divmod_rel_div)
+apply (auto simp add: divmod_rel_def)
done
lemma div_neg_neg_trivial: "[| a \<le> (0::int); b < a |] ==> a div b = 0"
-apply (rule quorem_div)
-apply (auto simp add: quorem_def)
+apply (rule divmod_rel_div)
+apply (auto simp add: divmod_rel_def)
done
lemma div_pos_neg_trivial: "[| (0::int) < a; a+b \<le> 0 |] ==> a div b = -1"
-apply (rule quorem_div)
-apply (auto simp add: quorem_def)
+apply (rule divmod_rel_div)
+apply (auto simp add: divmod_rel_def)
done
(*There is no div_neg_pos_trivial because 0 div b = 0 would supersede it*)
lemma mod_pos_pos_trivial: "[| (0::int) \<le> a; a < b |] ==> a mod b = a"
-apply (rule_tac q = 0 in quorem_mod)
-apply (auto simp add: quorem_def)
+apply (rule_tac q = 0 in divmod_rel_mod)
+apply (auto simp add: divmod_rel_def)
done
lemma mod_neg_neg_trivial: "[| a \<le> (0::int); b < a |] ==> a mod b = a"
-apply (rule_tac q = 0 in quorem_mod)
-apply (auto simp add: quorem_def)
+apply (rule_tac q = 0 in divmod_rel_mod)
+apply (auto simp add: divmod_rel_def)
done
lemma mod_pos_neg_trivial: "[| (0::int) < a; a+b \<le> 0 |] ==> a mod b = a+b"
-apply (rule_tac q = "-1" in quorem_mod)
-apply (auto simp add: quorem_def)
+apply (rule_tac q = "-1" in divmod_rel_mod)
+apply (auto simp add: divmod_rel_def)
done
text{*There is no @{text mod_neg_pos_trivial}.*}
@@ -356,15 +343,15 @@
(*Simpler laws such as -a div b = -(a div b) FAIL, but see just below*)
lemma zdiv_zminus_zminus [simp]: "(-a) div (-b) = a div (b::int)"
apply (case_tac "b = 0", simp)
-apply (simp add: quorem_div_mod [THEN quorem_neg, simplified,
- THEN quorem_div, THEN sym])
+apply (simp add: divmod_rel_div_mod [THEN divmod_rel_neg, simplified,
+ THEN divmod_rel_div, THEN sym])
done
(*Simpler laws such as -a mod b = -(a mod b) FAIL, but see just below*)
lemma zmod_zminus_zminus [simp]: "(-a) mod (-b) = - (a mod (b::int))"
apply (case_tac "b = 0", simp)
-apply (subst quorem_div_mod [THEN quorem_neg, simplified, THEN quorem_mod],
+apply (subst divmod_rel_div_mod [THEN divmod_rel_neg, simplified, THEN divmod_rel_mod],
auto)
done
@@ -372,22 +359,22 @@
subsection{*Laws for div and mod with Unary Minus*}
lemma zminus1_lemma:
- "quorem((a,b),(q,r))
- ==> quorem ((-a,b), (if r=0 then -q else -q - 1),
- (if r=0 then 0 else b-r))"
-by (force simp add: split_ifs quorem_def linorder_neq_iff right_diff_distrib)
+ "divmod_rel a b (q, r)
+ ==> divmod_rel (-a) b (if r=0 then -q else -q - 1,
+ if r=0 then 0 else b-r)"
+by (force simp add: split_ifs divmod_rel_def linorder_neq_iff right_diff_distrib)
lemma zdiv_zminus1_eq_if:
"b \<noteq> (0::int)
==> (-a) div b =
(if a mod b = 0 then - (a div b) else - (a div b) - 1)"
-by (blast intro: quorem_div_mod [THEN zminus1_lemma, THEN quorem_div])
+by (blast intro: divmod_rel_div_mod [THEN zminus1_lemma, THEN divmod_rel_div])
lemma zmod_zminus1_eq_if:
"(-a::int) mod b = (if a mod b = 0 then 0 else b - (a mod b))"
apply (case_tac "b = 0", simp)
-apply (blast intro: quorem_div_mod [THEN zminus1_lemma, THEN quorem_mod])
+apply (blast intro: divmod_rel_div_mod [THEN zminus1_lemma, THEN divmod_rel_mod])
done
lemma zdiv_zminus2: "a div (-b) = (-a::int) div b"
@@ -420,91 +407,91 @@
apply (simp add: right_diff_distrib)
done
-lemma self_quotient: "[| quorem((a,a),(q,r)); a \<noteq> (0::int) |] ==> q = 1"
-apply (simp add: split_ifs quorem_def linorder_neq_iff)
+lemma self_quotient: "[| divmod_rel a a (q, r); a \<noteq> (0::int) |] ==> q = 1"
+apply (simp add: split_ifs divmod_rel_def linorder_neq_iff)
apply (rule order_antisym, safe, simp_all)
apply (rule_tac [3] a = "-a" and r = "-r" in self_quotient_aux1)
apply (rule_tac a = "-a" and r = "-r" in self_quotient_aux2)
apply (force intro: self_quotient_aux1 self_quotient_aux2 simp add: add_commute)+
done
-lemma self_remainder: "[| quorem((a,a),(q,r)); a \<noteq> (0::int) |] ==> r = 0"
+lemma self_remainder: "[| divmod_rel a a (q, r); a \<noteq> (0::int) |] ==> r = 0"
apply (frule self_quotient, assumption)
-apply (simp add: quorem_def)
+apply (simp add: divmod_rel_def)
done
lemma zdiv_self [simp]: "a \<noteq> 0 ==> a div a = (1::int)"
-by (simp add: quorem_div_mod [THEN self_quotient])
+by (simp add: divmod_rel_div_mod [THEN self_quotient])
(*Here we have 0 mod 0 = 0, also assumed by Knuth (who puts m mod 0 = 0) *)
lemma zmod_self [simp]: "a mod a = (0::int)"
apply (case_tac "a = 0", simp)
-apply (simp add: quorem_div_mod [THEN self_remainder])
+apply (simp add: divmod_rel_div_mod [THEN self_remainder])
done
subsection{*Computation of Division and Remainder*}
lemma zdiv_zero [simp]: "(0::int) div b = 0"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma div_eq_minus1: "(0::int) < b ==> -1 div b = -1"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma zmod_zero [simp]: "(0::int) mod b = 0"
-by (simp add: mod_def divAlg_def)
+by (simp add: mod_def divmod_def)
lemma zdiv_minus1: "(0::int) < b ==> -1 div b = -1"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma zmod_minus1: "(0::int) < b ==> -1 mod b = b - 1"
-by (simp add: mod_def divAlg_def)
+by (simp add: mod_def divmod_def)
text{*a positive, b positive *}
lemma div_pos_pos: "[| 0 < a; 0 \<le> b |] ==> a div b = fst (posDivAlg a b)"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma mod_pos_pos: "[| 0 < a; 0 \<le> b |] ==> a mod b = snd (posDivAlg a b)"
-by (simp add: mod_def divAlg_def)
+by (simp add: mod_def divmod_def)
text{*a negative, b positive *}
lemma div_neg_pos: "[| a < 0; 0 < b |] ==> a div b = fst (negDivAlg a b)"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma mod_neg_pos: "[| a < 0; 0 < b |] ==> a mod b = snd (negDivAlg a b)"
-by (simp add: mod_def divAlg_def)
+by (simp add: mod_def divmod_def)
text{*a positive, b negative *}
lemma div_pos_neg:
"[| 0 < a; b < 0 |] ==> a div b = fst (negateSnd (negDivAlg (-a) (-b)))"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma mod_pos_neg:
"[| 0 < a; b < 0 |] ==> a mod b = snd (negateSnd (negDivAlg (-a) (-b)))"
-by (simp add: mod_def divAlg_def)
+by (simp add: mod_def divmod_def)
text{*a negative, b negative *}
lemma div_neg_neg:
"[| a < 0; b \<le> 0 |] ==> a div b = fst (negateSnd (posDivAlg (-a) (-b)))"
-by (simp add: div_def divAlg_def)
+by (simp add: div_def divmod_def)
lemma mod_neg_neg:
"[| a < 0; b \<le> 0 |] ==> a mod b = snd (negateSnd (posDivAlg (-a) (-b)))"
-by (simp add: mod_def divAlg_def)
+by (simp add: mod_def divmod_def)
text {*Simplify expresions in which div and mod combine numerical constants*}
-lemma quoremI:
+lemma divmod_relI:
"\<lbrakk>a == b * q + r; if 0 < b then 0 \<le> r \<and> r < b else b < r \<and> r \<le> 0\<rbrakk>
- \<Longrightarrow> quorem ((a, b), (q, r))"
- unfolding quorem_def by simp
+ \<Longrightarrow> divmod_rel a b (q, r)"
+ unfolding divmod_rel_def by simp
-lemmas quorem_div_eq = quoremI [THEN quorem_div, THEN eq_reflection]
-lemmas quorem_mod_eq = quoremI [THEN quorem_mod, THEN eq_reflection]
+lemmas divmod_rel_div_eq = divmod_relI [THEN divmod_rel_div, THEN eq_reflection]
+lemmas divmod_rel_mod_eq = divmod_relI [THEN divmod_rel_mod, THEN eq_reflection]
lemmas arithmetic_simps =
arith_simps
add_special
@@ -548,10 +535,10 @@
*}
simproc_setup binary_int_div ("number_of m div number_of n :: int") =
- {* K (divmod_proc (@{thm quorem_div_eq})) *}
+ {* K (divmod_proc (@{thm divmod_rel_div_eq})) *}
simproc_setup binary_int_mod ("number_of m mod number_of n :: int") =
- {* K (divmod_proc (@{thm quorem_mod_eq})) *}
+ {* K (divmod_proc (@{thm divmod_rel_mod_eq})) *}
(* The following 8 lemmas are made unnecessary by the above simprocs: *)
@@ -718,18 +705,18 @@
text{*proving (a*b) div c = a * (b div c) + a * (b mod c) *}
lemma zmult1_lemma:
- "[| quorem((b,c),(q,r)); c \<noteq> 0 |]
- ==> quorem ((a*b, c), (a*q + a*r div c, a*r mod c))"
-by (force simp add: split_ifs quorem_def linorder_neq_iff right_distrib)
+ "[| divmod_rel b c (q, r); c \<noteq> 0 |]
+ ==> divmod_rel (a * b) c (a*q + a*r div c, a*r mod c)"
+by (force simp add: split_ifs divmod_rel_def linorder_neq_iff right_distrib)
lemma zdiv_zmult1_eq: "(a*b) div c = a*(b div c) + a*(b mod c) div (c::int)"
apply (case_tac "c = 0", simp)
-apply (blast intro: quorem_div_mod [THEN zmult1_lemma, THEN quorem_div])
+apply (blast intro: divmod_rel_div_mod [THEN zmult1_lemma, THEN divmod_rel_div])
done
lemma zmod_zmult1_eq: "(a*b) mod c = a*(b mod c) mod (c::int)"
apply (case_tac "c = 0", simp)
-apply (blast intro: quorem_div_mod [THEN zmult1_lemma, THEN quorem_mod])
+apply (blast intro: divmod_rel_div_mod [THEN zmult1_lemma, THEN divmod_rel_mod])
done
lemma zmod_zmult1_eq': "(a*b) mod (c::int) = ((a mod c) * b) mod c"
@@ -760,20 +747,20 @@
text{*proving (a+b) div c = a div c + b div c + ((a mod c + b mod c) div c) *}
lemma zadd1_lemma:
- "[| quorem((a,c),(aq,ar)); quorem((b,c),(bq,br)); c \<noteq> 0 |]
- ==> quorem ((a+b, c), (aq + bq + (ar+br) div c, (ar+br) mod c))"
-by (force simp add: split_ifs quorem_def linorder_neq_iff right_distrib)
+ "[| divmod_rel a c (aq, ar); divmod_rel b c (bq, br); c \<noteq> 0 |]
+ ==> divmod_rel (a+b) c (aq + bq + (ar+br) div c, (ar+br) mod c)"
+by (force simp add: split_ifs divmod_rel_def linorder_neq_iff right_distrib)
(*NOT suitable for rewriting: the RHS has an instance of the LHS*)
lemma zdiv_zadd1_eq:
"(a+b) div (c::int) = a div c + b div c + ((a mod c + b mod c) div c)"
apply (case_tac "c = 0", simp)
-apply (blast intro: zadd1_lemma [OF quorem_div_mod quorem_div_mod] quorem_div)
+apply (blast intro: zadd1_lemma [OF divmod_rel_div_mod divmod_rel_div_mod] divmod_rel_div)
done
lemma zmod_zadd1_eq: "(a+b) mod (c::int) = (a mod c + b mod c) mod c"
apply (case_tac "c = 0", simp)
-apply (blast intro: zadd1_lemma [OF quorem_div_mod quorem_div_mod] quorem_mod)
+apply (blast intro: zadd1_lemma [OF divmod_rel_div_mod divmod_rel_div_mod] divmod_rel_mod)
done
instance int :: ring_div
@@ -785,6 +772,33 @@
by (simp add: zmod_zmult1_eq zmod_zdiv_trivial)
qed auto
+lemma posDivAlg_div_mod:
+ assumes "k \<ge> 0"
+ and "l \<ge> 0"
+ shows "posDivAlg k l = (k div l, k mod l)"
+proof (cases "l = 0")
+ case True then show ?thesis by (simp add: posDivAlg.simps)
+next
+ case False with assms posDivAlg_correct
+ have "divmod_rel k l (fst (posDivAlg k l), snd (posDivAlg k l))"
+ by simp
+ from divmod_rel_div [OF this `l \<noteq> 0`] divmod_rel_mod [OF this `l \<noteq> 0`]
+ show ?thesis by simp
+qed
+
+lemma negDivAlg_div_mod:
+ assumes "k < 0"
+ and "l > 0"
+ shows "negDivAlg k l = (k div l, k mod l)"
+proof -
+ from assms have "l \<noteq> 0" by simp
+ from assms negDivAlg_correct
+ have "divmod_rel k l (fst (negDivAlg k l), snd (negDivAlg k l))"
+ by simp
+ from divmod_rel_div [OF this `l \<noteq> 0`] divmod_rel_mod [OF this `l \<noteq> 0`]
+ show ?thesis by simp
+qed
+
lemma zdiv_zadd_self1: "a \<noteq> (0::int) ==> (a+b) div a = b div a + 1"
by (rule div_add_self1) (* already declared [simp] *)
@@ -864,21 +878,21 @@
apply simp
done
-lemma zmult2_lemma: "[| quorem ((a,b), (q,r)); b \<noteq> 0; 0 < c |]
- ==> quorem ((a, b*c), (q div c, b*(q mod c) + r))"
-by (auto simp add: mult_ac quorem_def linorder_neq_iff
+lemma zmult2_lemma: "[| divmod_rel a b (q, r); b \<noteq> 0; 0 < c |]
+ ==> divmod_rel a (b * c) (q div c, b*(q mod c) + r)"
+by (auto simp add: mult_ac divmod_rel_def linorder_neq_iff
zero_less_mult_iff right_distrib [symmetric]
zmult2_lemma_aux1 zmult2_lemma_aux2 zmult2_lemma_aux3 zmult2_lemma_aux4)
lemma zdiv_zmult2_eq: "(0::int) < c ==> a div (b*c) = (a div b) div c"
apply (case_tac "b = 0", simp)
-apply (force simp add: quorem_div_mod [THEN zmult2_lemma, THEN quorem_div])
+apply (force simp add: divmod_rel_div_mod [THEN zmult2_lemma, THEN divmod_rel_div])
done
lemma zmod_zmult2_eq:
"(0::int) < c ==> a mod (b*c) = b*(a div b mod c) + a mod b"
apply (case_tac "b = 0", simp)
-apply (force simp add: quorem_div_mod [THEN zmult2_lemma, THEN quorem_mod])
+apply (force simp add: divmod_rel_div_mod [THEN zmult2_lemma, THEN divmod_rel_mod])
done
@@ -1362,7 +1376,7 @@
apply (subst split_div, auto)
apply (subst split_zdiv, auto)
apply (rule_tac a="int (b * i) + int j" and b="int b" and r="int j" and r'=ja in IntDiv.unique_quotient)
-apply (auto simp add: IntDiv.quorem_def of_nat_mult)
+apply (auto simp add: IntDiv.divmod_rel_def of_nat_mult)
done
lemma zmod_int: "int (a mod b) = (int a) mod (int b)"
@@ -1370,7 +1384,7 @@
apply (subst split_zmod, auto)
apply (rule_tac a="int (b * i) + int j" and b="int b" and q="int i" and q'=ia
in unique_remainder)
-apply (auto simp add: IntDiv.quorem_def of_nat_mult)
+apply (auto simp add: IntDiv.divmod_rel_def of_nat_mult)
done
text{*Suggested by Matthias Daum*}
@@ -1431,7 +1445,7 @@
lemma of_int_num [code]:
"of_int k = (if k = 0 then 0 else if k < 0 then
- of_int (- k) else let
- (l, m) = divAlg (k, 2);
+ (l, m) = divmod k 2;
l' = of_int l
in if m = 0 then l' + l' else l' + l' + 1)"
proof -
@@ -1459,7 +1473,7 @@
show "x * of_int 2 = x + x"
unfolding int2 of_int_add right_distrib by simp
qed
- from aux1 show ?thesis by (auto simp add: divAlg_mod_div Let_def aux2 aux3)
+ from aux1 show ?thesis by (auto simp add: divmod_mod_div Let_def aux2 aux3)
qed
end
--- a/src/HOL/IsaMakefile Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/IsaMakefile Wed Jan 28 16:57:12 2009 +0100
@@ -331,10 +331,11 @@
Library/Binomial.thy Library/Eval_Witness.thy \
Library/Code_Index.thy Library/Code_Char.thy \
Library/Code_Char_chr.thy Library/Code_Integer.thy \
- Library/Numeral_Type.thy \
+ Library/Numeral_Type.thy Library/Reflection.thy \
Library/Boolean_Algebra.thy Library/Countable.thy \
Library/RBT.thy Library/Univ_Poly.thy \
- Library/Enum.thy Library/Float.thy $(SRC)/Tools/float.ML $(SRC)/HOL/Tools/float_arith.ML
+ Library/Enum.thy Library/Float.thy $(SRC)/Tools/float.ML $(SRC)/HOL/Tools/float_arith.ML \
+ Library/reify_data.ML Library/reflection.ML
@cd Library; $(ISABELLE_TOOL) usedir $(OUT)/HOL Library
@@ -809,14 +810,14 @@
ex/Lagrange.thy ex/LocaleTest2.thy ex/MT.thy \
ex/MergeSort.thy ex/MonoidGroup.thy ex/Multiquote.thy ex/NatSum.thy \
ex/Numeral.thy ex/PER.thy ex/PresburgerEx.thy ex/Primrec.thy \
- ex/Quickcheck_Examples.thy ex/Reflection.thy ex/reflection_data.ML \
+ ex/Quickcheck_Examples.thy \
ex/ReflectionEx.thy ex/ROOT.ML ex/Recdefs.thy ex/Records.thy \
ex/Reflected_Presburger.thy ex/coopertac.ML \
ex/Refute_Examples.thy ex/SAT_Examples.thy ex/SVC_Oracle.thy \
ex/Subarray.thy ex/Sublist.thy \
ex/Sudoku.thy ex/Tarski.thy ex/Termination.thy ex/Term_Of_Syntax.thy \
ex/Unification.thy ex/document/root.bib \
- ex/document/root.tex ex/Meson_Test.thy ex/reflection.ML ex/set.thy \
+ ex/document/root.tex ex/Meson_Test.thy ex/set.thy \
ex/svc_funcs.ML ex/svc_test.thy \
ex/ImperativeQuicksort.thy \
ex/BigO_Complex.thy \
@@ -968,13 +969,13 @@
HOL-Word: HOL $(OUT)/HOL-Word
-$(OUT)/HOL-Word: $(OUT)/HOL Word/ROOT.ML Library/Infinite_Set.thy \
+$(OUT)/HOL-Word: $(OUT)/HOL Word/ROOT.ML \
Library/Boolean_Algebra.thy \
Library/Numeral_Type.thy Word/Num_Lemmas.thy Word/TdThs.thy \
Word/Size.thy Word/BinGeneral.thy Word/BinOperations.thy \
Word/BinBoolList.thy Word/BitSyntax.thy Word/WordDefinition.thy \
Word/WordArith.thy Word/WordBitwise.thy Word/WordShift.thy \
- Word/WordGenLib.thy Word/WordMain.thy Word/document/root.tex \
+ Word/WordGenLib.thy Word/Word.thy Word/document/root.tex \
Word/document/root.bib
@cd Word; $(ISABELLE_TOOL) usedir -b -g true $(OUT)/HOL HOL-Word
--- a/src/HOL/Lattices.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Lattices.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Lattices.thy
- ID: $Id$
Author: Tobias Nipkow
*)
--- a/src/HOL/Library/Boolean_Algebra.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Boolean_Algebra.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,8 +1,5 @@
-(*
- ID: $Id$
- Author: Brian Huffman
-
- Boolean algebras as locales.
+(* Title: HOL/Library/Boolean_Algebra.thy
+ Author: Brian Huffman
*)
header {* Boolean Algebras *}
--- a/src/HOL/Library/Efficient_Nat.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Efficient_Nat.thy Wed Jan 28 16:57:12 2009 +0100
@@ -56,10 +56,10 @@
and @{term "op mod \<Colon> nat \<Rightarrow> nat \<Rightarrow> nat"} operations. *}
definition divmod_aux :: "nat \<Rightarrow> nat \<Rightarrow> nat \<times> nat" where
- [code del]: "divmod_aux = divmod"
+ [code del]: "divmod_aux = Divides.divmod"
lemma [code]:
- "divmod n m = (if m = 0 then (0, n) else divmod_aux n m)"
+ "Divides.divmod n m = (if m = 0 then (0, n) else divmod_aux n m)"
unfolding divmod_aux_def divmod_div_mod by simp
lemma divmod_aux_code [code]:
--- a/src/HOL/Library/Eval_Witness.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Eval_Witness.thy Wed Jan 28 16:57:12 2009 +0100
@@ -32,7 +32,7 @@
with the oracle.
*}
-class ml_equiv = type
+class ml_equiv
text {*
Instances of @{text "ml_equiv"} should only be declared for those types,
--- a/src/HOL/Library/Library.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Library.thy Wed Jan 28 16:57:12 2009 +0100
@@ -35,6 +35,7 @@
Quicksort
Quotient
Ramsey
+ Reflection
RBT
State_Monad
Univ_Poly
--- a/src/HOL/Library/Nat_Infinity.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Nat_Infinity.thy Wed Jan 28 16:57:12 2009 +0100
@@ -8,17 +8,6 @@
imports Plain "~~/src/HOL/Presburger"
begin
-text {* FIXME: move to Nat.thy *}
-
-instantiation nat :: bot
-begin
-
-definition bot_nat :: nat where
- "bot_nat = 0"
-
-instance proof
-qed (simp add: bot_nat_def)
-
subsection {* Type definition *}
text {*
@@ -26,8 +15,6 @@
infinity.
*}
-end
-
datatype inat = Fin nat | Infty
notation (xsymbols)
--- a/src/HOL/Library/Numeral_Type.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Numeral_Type.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,11 +1,8 @@
-(*
- ID: $Id$
- Author: Brian Huffman
-
- Numeral Syntax for Types
+(* Title: HOL/Library/Numeral_Type.thy
+ Author: Brian Huffman
*)
-header "Numeral Syntax for Types"
+header {* Numeral Syntax for Types *}
theory Numeral_Type
imports Plain "~~/src/HOL/Presburger"
--- a/src/HOL/Library/Quotient.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Library/Quotient.thy Wed Jan 28 16:57:12 2009 +0100
@@ -21,7 +21,7 @@
"\<sim> :: 'a => 'a => bool"}.
*}
-class eqv = type +
+class eqv =
fixes eqv :: "'a \<Rightarrow> 'a \<Rightarrow> bool" (infixl "\<sim>" 50)
class equiv = eqv +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/Reflection.thy Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,45 @@
+(* Title: HOL/Library/Reflection.thy
+ Author: Amine Chaieb, TU Muenchen
+*)
+
+header {* Generic reflection and reification *}
+
+theory Reflection
+imports Main
+uses "reify_data.ML" ("reflection.ML")
+begin
+
+setup {* Reify_Data.setup *}
+
+lemma ext2: "(\<forall>x. f x = g x) \<Longrightarrow> f = g"
+ by (blast intro: ext)
+
+use "reflection.ML"
+
+method_setup reify = {* fn src =>
+ Method.syntax (Attrib.thms --
+ Scan.option (Scan.lift (Args.$$$ "(") |-- Args.term --| Scan.lift (Args.$$$ ")") )) src #>
+ (fn ((eqs, to), ctxt) => Method.SIMPLE_METHOD' (Reflection.genreify_tac ctxt (eqs @ (fst (Reify_Data.get ctxt))) to))
+*} "partial automatic reification"
+
+method_setup reflection = {*
+let
+ fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
+ val onlyN = "only";
+ val rulesN = "rules";
+ val any_keyword = keyword onlyN || keyword rulesN;
+ val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
+ val terms = thms >> map (term_of o Drule.dest_term);
+ fun optional scan = Scan.optional scan [];
+in fn src =>
+ Method.syntax (thms -- optional (keyword rulesN |-- thms) -- Scan.option (keyword onlyN |-- Args.term)) src #>
+ (fn (((eqs,ths),to), ctxt) =>
+ let
+ val (ceqs,cths) = Reify_Data.get ctxt
+ val corr_thms = ths@cths
+ val raw_eqs = eqs@ceqs
+ in Method.SIMPLE_METHOD' (Reflection.reflection_tac ctxt corr_thms raw_eqs to)
+ end) end
+*} "reflection method"
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/reflection.ML Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,327 @@
+(* Title: HOL/Library/reflection.ML
+ Author: Amine Chaieb, TU Muenchen
+
+A trial for automatical reification.
+*)
+
+signature REFLECTION =
+sig
+ val genreify_tac: Proof.context -> thm list -> term option -> int -> tactic
+ val reflection_tac: Proof.context -> thm list -> thm list -> term option -> int -> tactic
+ val gen_reflection_tac: Proof.context -> (cterm -> thm)
+ -> thm list -> thm list -> term option -> int -> tactic
+end;
+
+structure Reflection : REFLECTION =
+struct
+
+val ext2 = @{thm ext2};
+val nth_Cons_0 = @{thm nth_Cons_0};
+val nth_Cons_Suc = @{thm nth_Cons_Suc};
+
+ (* Make a congruence rule out of a defining equation for the interpretation *)
+ (* th is one defining equation of f, i.e.
+ th is "f (Cp ?t1 ... ?tn) = P(f ?t1, .., f ?tn)" *)
+ (* Cp is a constructor pattern and P is a pattern *)
+
+ (* The result is:
+ [|?A1 = f ?t1 ; .. ; ?An= f ?tn |] ==> P (?A1, .., ?An) = f (Cp ?t1 .. ?tn) *)
+ (* + the a list of names of the A1 .. An, Those are fresh in the ctxt*)
+
+
+fun mk_congeq ctxt fs th =
+ let
+ val (f as Const(fN,fT)) = th |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq
+ |> fst |> strip_comb |> fst
+ val thy = ProofContext.theory_of ctxt
+ val cert = Thm.cterm_of thy
+ val (((_,_),[th']), ctxt') = Variable.import_thms true [th] ctxt
+ val (lhs, rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop (Thm.prop_of th'))
+ fun add_fterms (t as t1 $ t2) =
+ if exists (fn f => Term.could_unify (t |> strip_comb |> fst, f)) fs then insert (op aconv) t
+ else add_fterms t1 #> add_fterms t2
+ | add_fterms (t as Abs(xn,xT,t')) =
+ if exists_Const (fn (c, _) => c = fN) t then (fn _ => [t]) else (fn _ => [])
+ | add_fterms _ = I
+ val fterms = add_fterms rhs []
+ val (xs, ctxt'') = Variable.variant_fixes (replicate (length fterms) "x") ctxt'
+ val tys = map fastype_of fterms
+ val vs = map Free (xs ~~ tys)
+ val env = fterms ~~ vs
+ (* FIXME!!!!*)
+ fun replace_fterms (t as t1 $ t2) =
+ (case AList.lookup (op aconv) env t of
+ SOME v => v
+ | NONE => replace_fterms t1 $ replace_fterms t2)
+ | replace_fterms t = (case AList.lookup (op aconv) env t of
+ SOME v => v
+ | NONE => t)
+
+ fun mk_def (Abs(x,xT,t),v) = HOLogic.mk_Trueprop ((HOLogic.all_const xT)$ Abs(x,xT,HOLogic.mk_eq(v$(Bound 0), t)))
+ | mk_def (t, v) = HOLogic.mk_Trueprop (HOLogic.mk_eq (v, t))
+ fun tryext x = (x RS ext2 handle THM _ => x)
+ val cong = (Goal.prove ctxt'' [] (map mk_def env)
+ (HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, replace_fterms rhs)))
+ (fn x => LocalDefs.unfold_tac (#context x) (map tryext (#prems x))
+ THEN rtac th' 1)) RS sym
+
+ val (cong' :: vars') =
+ Variable.export ctxt'' ctxt (cong :: map (Drule.mk_term o cert) vs)
+ val vs' = map (fst o fst o Term.dest_Var o Thm.term_of o Drule.dest_term) vars'
+
+ in (vs', cong') end;
+ (* congs is a list of pairs (P,th) where th is a theorem for *)
+ (* [| f p1 = A1; ...; f pn = An|] ==> f (C p1 .. pn) = P *)
+val FWD = curry (op OF);
+
+ (* da is the decomposition for atoms, ie. it returns ([],g) where g
+ returns the right instance f (AtC n) = t , where AtC is the Atoms
+ constructor and n is the number of the atom corresponding to t *)
+
+(* Generic decomp for reification : matches the actual term with the
+rhs of one cong rule. The result of the matching guides the
+proof synthesis: The matches of the introduced Variables A1 .. An are
+processed recursively
+ The rest is instantiated in the cong rule,i.e. no reification is needed *)
+
+exception REIF of string;
+
+val bds = ref ([]: (typ * ((term list) * (term list))) list);
+
+fun index_of t =
+ let
+ val tt = HOLogic.listT (fastype_of t)
+ in
+ (case AList.lookup Type.could_unify (!bds) tt of
+ NONE => error "index_of : type not found in environements!"
+ | SOME (tbs,tats) =>
+ let
+ val i = find_index_eq t tats
+ val j = find_index_eq t tbs
+ in (if j= ~1 then
+ if i= ~1
+ then (bds := AList.update Type.could_unify (tt,(tbs,tats@[t])) (!bds) ;
+ length tbs + length tats)
+ else i else j)
+ end)
+ end;
+
+fun dest_listT (Type ("List.list", [T])) = T;
+
+fun decomp_genreif da cgns (t,ctxt) =
+ let
+ val thy = ProofContext.theory_of ctxt
+ val cert = cterm_of thy
+ fun tryabsdecomp (s,ctxt) =
+ (case s of
+ Abs(xn,xT,ta) =>
+ (let
+ val ([xn],ctxt') = Variable.variant_fixes ["x"] ctxt
+ val (xn,ta) = variant_abs (xn,xT,ta)
+ val x = Free(xn,xT)
+ val _ = (case AList.lookup Type.could_unify (!bds) (HOLogic.listT xT)
+ of NONE => error "tryabsdecomp: Type not found in the Environement"
+ | SOME (bsT,atsT) =>
+ (bds := AList.update Type.could_unify (HOLogic.listT xT, ((x::bsT), atsT)) (!bds)))
+ in ([(ta, ctxt')] ,
+ fn [th] => ((let val (bsT,asT) = the(AList.lookup Type.could_unify (!bds) (HOLogic.listT xT))
+ in (bds := AList.update Type.could_unify (HOLogic.listT xT,(tl bsT,asT)) (!bds))
+ end) ;
+ hd (Variable.export ctxt' ctxt [(forall_intr (cert x) th) COMP allI])))
+ end)
+ | _ => da (s,ctxt))
+ in
+ (case cgns of
+ [] => tryabsdecomp (t,ctxt)
+ | ((vns,cong)::congs) => ((let
+ val cert = cterm_of thy
+ val certy = ctyp_of thy
+ val (tyenv, tmenv) =
+ Pattern.match thy
+ ((fst o HOLogic.dest_eq o HOLogic.dest_Trueprop) (concl_of cong), t)
+ (Envir.type_env (Envir.empty 0), Vartab.empty)
+ val (fnvs,invs) = List.partition (fn ((vn,_),_) => vn mem vns) (Vartab.dest tmenv)
+ val (fts,its) =
+ (map (snd o snd) fnvs,
+ map (fn ((vn,vi),(tT,t)) => (cert(Var ((vn,vi),tT)), cert t)) invs)
+ val ctyenv = map (fn ((vn,vi),(s,ty)) => (certy (TVar((vn,vi),s)), certy ty)) (Vartab.dest tyenv)
+ in (fts ~~ (replicate (length fts) ctxt), FWD (instantiate (ctyenv, its) cong))
+ end)
+ handle MATCH => decomp_genreif da congs (t,ctxt)))
+ end;
+
+ (* looks for the atoms equation and instantiates it with the right number *)
+
+
+fun mk_decompatom eqs (t,ctxt) =
+let
+ val tT = fastype_of t
+ fun isat eq =
+ let
+ val rhs = eq |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> snd
+ in exists_Const
+ (fn (n,ty) => n="List.nth"
+ andalso
+ AList.defined Type.could_unify (!bds) (domain_type ty)) rhs
+ andalso Type.could_unify (fastype_of rhs, tT)
+ end
+ fun get_nths t acc =
+ case t of
+ Const("List.nth",_)$vs$n => insert (fn ((a,_),(b,_)) => a aconv b) (t,(vs,n)) acc
+ | t1$t2 => get_nths t1 (get_nths t2 acc)
+ | Abs(_,_,t') => get_nths t' acc
+ | _ => acc
+
+ fun
+ tryeqs [] = error "Can not find the atoms equation"
+ | tryeqs (eq::eqs) = ((
+ let
+ val rhs = eq |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> snd
+ val nths = get_nths rhs []
+ val (vss,ns) = fold_rev (fn (_,(vs,n)) => fn (vss,ns) =>
+ (insert (op aconv) vs vss, insert (op aconv) n ns)) nths ([],[])
+ val (vsns, ctxt') = Variable.variant_fixes (replicate (length vss) "vs") ctxt
+ val (xns, ctxt'') = Variable.variant_fixes (replicate (length nths) "x") ctxt'
+ val thy = ProofContext.theory_of ctxt''
+ val cert = cterm_of thy
+ val certT = ctyp_of thy
+ val vsns_map = vss ~~ vsns
+ val xns_map = (fst (split_list nths)) ~~ xns
+ val subst = map (fn (nt, xn) => (nt, Var ((xn,0), fastype_of nt))) xns_map
+ val rhs_P = subst_free subst rhs
+ val (tyenv, tmenv) = Pattern.match
+ thy (rhs_P, t)
+ (Envir.type_env (Envir.empty 0), Vartab.empty)
+ val sbst = Envir.subst_vars (tyenv, tmenv)
+ val sbsT = Envir.typ_subst_TVars tyenv
+ val subst_ty = map (fn (n,(s,t)) => (certT (TVar (n, s)), certT t))
+ (Vartab.dest tyenv)
+ val tml = Vartab.dest tmenv
+ val t's = map (fn xn => snd (valOf (AList.lookup (op =) tml (xn,0)))) xns (* FIXME : Express with sbst*)
+ val subst_ns = map (fn (Const _ $ vs $ n, Var (xn0,T)) =>
+ (cert n, snd (valOf (AList.lookup (op =) tml xn0))
+ |> (index_of #> HOLogic.mk_nat #> cert)))
+ subst
+ val subst_vs =
+ let
+ fun ty (Const _ $ (vs as Var (vsn,lT)) $ n, Var (xn0,T)) = (certT T, certT (sbsT T))
+ fun h (Const _ $ (vs as Var (vsn,lT)) $ n, Var (xn0,T)) =
+ let
+ val cns = sbst (Const("List.list.Cons", T --> lT --> lT))
+ val lT' = sbsT lT
+ val (bsT,asT) = the (AList.lookup Type.could_unify (!bds) lT)
+ val vsn = valOf (AList.lookup (op =) vsns_map vs)
+ val cvs = cert (fold_rev (fn x => fn xs => cns$x$xs) bsT (Free (vsn, lT')))
+ in (cert vs, cvs) end
+ in map h subst end
+ val cts = map (fn ((vn,vi),(tT,t)) => (cert(Var ((vn,vi),tT)), cert t))
+ (fold (AList.delete (fn (((a: string),_),(b,_)) => a = b))
+ (map (fn n => (n,0)) xns) tml)
+ val substt =
+ let val ih = Drule.cterm_rule (Thm.instantiate (subst_ty,[]))
+ in map (fn (v,t) => (ih v, ih t)) (subst_ns@subst_vs@cts) end
+ val th = (instantiate (subst_ty, substt) eq) RS sym
+ in hd (Variable.export ctxt'' ctxt [th]) end)
+ handle MATCH => tryeqs eqs)
+in ([], fn _ => tryeqs (filter isat eqs))
+end;
+
+ (* Generic reification procedure: *)
+ (* creates all needed cong rules and then just uses the theorem synthesis *)
+
+ fun mk_congs ctxt raw_eqs =
+ let
+ val fs = fold_rev (fn eq =>
+ insert (op =) (eq |> prop_of |> HOLogic.dest_Trueprop
+ |> HOLogic.dest_eq |> fst |> strip_comb
+ |> fst)) raw_eqs []
+ val tys = fold_rev (fn f => fn ts => (f |> fastype_of |> binder_types |> tl)
+ union ts) fs []
+ val _ = bds := AList.make (fn _ => ([],[])) tys
+ val (vs, ctxt') = Variable.variant_fixes (replicate (length tys) "vs") ctxt
+ val thy = ProofContext.theory_of ctxt'
+ val cert = cterm_of thy
+ val vstys = map (fn (t,v) => (t,SOME (cert (Free(v,t)))))
+ (tys ~~ vs)
+ val is_Var = can dest_Var
+ fun insteq eq vs =
+ let
+ val subst = map (fn (v as Var(n,t)) => (cert v, (valOf o valOf) (AList.lookup (op =) vstys t)))
+ (filter is_Var vs)
+ in Thm.instantiate ([],subst) eq
+ end
+ val eqs = map (fn eq => eq |> prop_of |> HOLogic.dest_Trueprop
+ |> HOLogic.dest_eq |> fst |> strip_comb |> snd |> tl
+ |> (insteq eq)) raw_eqs
+ val (ps,congs) = split_list (map (mk_congeq ctxt' fs) eqs)
+in ps ~~ (Variable.export ctxt' ctxt congs)
+end
+
+fun partition P [] = ([],[])
+ | partition P (x::xs) =
+ let val (yes,no) = partition P xs
+ in if P x then (x::yes,no) else (yes, x::no) end
+
+fun rearrange congs =
+let
+ fun P (_, th) =
+ let val @{term "Trueprop"}$(Const ("op =",_) $l$_) = concl_of th
+ in can dest_Var l end
+ val (yes,no) = partition P congs
+ in no @ yes end
+
+
+
+fun genreif ctxt raw_eqs t =
+ let
+ val congs = rearrange (mk_congs ctxt raw_eqs)
+ val th = divide_and_conquer (decomp_genreif (mk_decompatom raw_eqs) congs) (t,ctxt)
+ fun is_listVar (Var (_,t)) = can dest_listT t
+ | is_listVar _ = false
+ val vars = th |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> snd
+ |> strip_comb |> snd |> filter is_listVar
+ val cert = cterm_of (ProofContext.theory_of ctxt)
+ val cvs = map (fn (v as Var(n,t)) => (cert v, the (AList.lookup Type.could_unify (!bds) t) |> snd |> HOLogic.mk_list (dest_listT t) |> cert)) vars
+ val th' = instantiate ([], cvs) th
+ val t' = (fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o prop_of) th'
+ val th'' = Goal.prove ctxt [] [] (HOLogic.mk_Trueprop (HOLogic.mk_eq (t, t')))
+ (fn _ => simp_tac (local_simpset_of ctxt) 1)
+ val _ = bds := []
+in FWD trans [th'',th']
+end
+
+
+fun genreflect ctxt conv corr_thms raw_eqs t =
+let
+ val reifth = genreif ctxt raw_eqs t
+ fun trytrans [] = error "No suitable correctness theorem found"
+ | trytrans (th::ths) =
+ (FWD trans [reifth, th RS sym] handle THM _ => trytrans ths)
+ val th = trytrans corr_thms
+ val ft = (Thm.dest_arg1 o Thm.dest_arg o Thm.dest_arg o cprop_of) th
+ val rth = conv ft
+in simplify (HOL_basic_ss addsimps raw_eqs addsimps [nth_Cons_0, nth_Cons_Suc])
+ (simplify (HOL_basic_ss addsimps [rth]) th)
+end
+
+fun genreify_tac ctxt eqs to i = (fn st =>
+ let
+ val P = HOLogic.dest_Trueprop (List.nth (prems_of st, i - 1))
+ val t = (case to of NONE => P | SOME x => x)
+ val th = (genreif ctxt eqs t) RS ssubst
+ in rtac th i st
+ end);
+
+ (* Reflection calls reification and uses the correctness *)
+ (* theorem assumed to be the dead of the list *)
+fun gen_reflection_tac ctxt conv corr_thms raw_eqs to i = (fn st =>
+ let
+ val P = HOLogic.dest_Trueprop (nth (prems_of st) (i - 1));
+ val t = the_default P to;
+ val th = genreflect ctxt conv corr_thms raw_eqs t
+ RS ssubst;
+ in (rtac th i THEN TRY(rtac TrueI i)) st end);
+
+fun reflection_tac ctxt = gen_reflection_tac ctxt Codegen.evaluation_conv;
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/reify_data.ML Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,39 @@
+(* Title: HOL/Library/reflection_data.ML
+ Author: Amine Chaieb, TU Muenchen
+
+Data for the reification and reflection methods.
+*)
+
+signature REIFY_DATA =
+sig
+ val get: Proof.context -> thm list * thm list
+ val add: attribute
+ val del: attribute
+ val radd: attribute
+ val rdel: attribute
+ val setup: theory -> theory
+end;
+
+structure Reify_Data : REIFY_DATA =
+struct
+
+structure Data = GenericDataFun
+(
+ type T = thm list * thm list;
+ val empty = ([], []);
+ val extend = I;
+ fun merge _ = pairself Thm.merge_thms;
+);
+
+val get = Data.get o Context.Proof;
+
+val add = Thm.declaration_attribute (Data.map o apfst o Thm.add_thm);
+val del = Thm.declaration_attribute (Data.map o apfst o Thm.del_thm);
+val radd = Thm.declaration_attribute (Data.map o apsnd o Thm.add_thm);
+val rdel = Thm.declaration_attribute (Data.map o apsnd o Thm.del_thm);
+
+val setup = Attrib.add_attributes
+ [("reify", Attrib.add_del_args add del, "Reify data"),
+ ("reflection", Attrib.add_del_args radd rdel, "Reflection data")];
+
+end;
--- a/src/HOL/List.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/List.thy Wed Jan 28 16:57:12 2009 +0100
@@ -2887,6 +2887,35 @@
apply (auto simp: sorted_distinct_set_unique)
done
+lemma sorted_take:
+ "sorted xs \<Longrightarrow> sorted (take n xs)"
+proof (induct xs arbitrary: n rule: sorted.induct)
+ case 1 show ?case by simp
+next
+ case 2 show ?case by (cases n) simp_all
+next
+ case (3 x y xs)
+ then have "x \<le> y" by simp
+ show ?case proof (cases n)
+ case 0 then show ?thesis by simp
+ next
+ case (Suc m)
+ with 3 have "sorted (take m (y # xs))" by simp
+ with Suc `x \<le> y` show ?thesis by (cases m) simp_all
+ qed
+qed
+
+lemma sorted_drop:
+ "sorted xs \<Longrightarrow> sorted (drop n xs)"
+proof (induct xs arbitrary: n rule: sorted.induct)
+ case 1 show ?case by simp
+next
+ case 2 show ?case by (cases n) simp_all
+next
+ case 3 then show ?case by (cases n) simp_all
+qed
+
+
end
lemma sorted_upt[simp]: "sorted[i..<j]"
--- a/src/HOL/Lubs.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Lubs.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title : Lubs.thy
- ID : $Id$
Author : Jacques D. Fleuriot
Copyright : 1998 University of Cambridge
*)
@@ -7,7 +6,7 @@
header{*Definitions of Upper Bounds and Least Upper Bounds*}
theory Lubs
-imports Plain
+imports Plain Main
begin
text{*Thanks to suggestions by James Margetson*}
--- a/src/HOL/Map.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Map.thy Wed Jan 28 16:57:12 2009 +0100
@@ -503,6 +503,15 @@
lemma map_add_comm: "dom m1 \<inter> dom m2 = {} \<Longrightarrow> m1++m2 = m2++m1"
by (rule ext) (force simp: map_add_def dom_def split: option.split)
+lemma dom_const [simp]:
+ "dom (\<lambda>x. Some y) = UNIV"
+ by auto
+
+lemma dom_if:
+ "dom (\<lambda>x. if P x then f x else g x) = dom f \<inter> {x. P x} \<union> dom g \<inter> {x. \<not> P x}"
+ by (auto split: if_splits)
+
+
(* Due to John Matthews - could be rephrased with dom *)
lemma finite_map_freshness:
"finite (dom (f :: 'a \<rightharpoonup> 'b)) \<Longrightarrow> \<not> finite (UNIV :: 'a set) \<Longrightarrow>
--- a/src/HOL/Nat.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nat.thy Wed Jan 28 16:57:12 2009 +0100
@@ -425,6 +425,17 @@
end
+instantiation nat :: bot
+begin
+
+definition bot_nat :: nat where
+ "bot_nat = 0"
+
+instance proof
+qed (simp add: bot_nat_def)
+
+end
+
subsubsection {* Introduction properties *}
lemma lessI [iff]: "n < Suc n"
@@ -1515,7 +1526,7 @@
subsection {* size of a datatype value *}
-class size = type +
+class size =
fixes size :: "'a \<Rightarrow> nat" -- {* see further theory @{text Wellfounded} *}
end
--- a/src/HOL/Nominal/Examples/W.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/Examples/W.thy Wed Jan 28 16:57:12 2009 +0100
@@ -49,7 +49,7 @@
text {* free type variables *}
-class ftv = type +
+class ftv =
fixes ftv :: "'a \<Rightarrow> tvar list"
instantiation * :: (ftv, ftv) ftv
--- a/src/HOL/Nominal/nominal_atoms.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_atoms.ML Wed Jan 28 16:57:12 2009 +0100
@@ -90,6 +90,9 @@
let val T = fastype_of x
in Const ("List.list.Cons", T --> HOLogic.listT T --> HOLogic.listT T) $ x $ xs end;
+fun add_thms_string args = PureThy.add_thms ((map o apfst o apfst) Binding.name args);
+fun add_thmss_string args = PureThy.add_thmss ((map o apfst o apfst) Binding.name args);
+
(* this function sets up all matters related to atom- *)
(* kinds; the user specifies a list of atom-kind names *)
(* atom_decl <ak1> ... <akn> *)
@@ -121,7 +124,7 @@
atac 1]
val (inj_thm,thy2) =
- PureThy.add_thms [((ak^"_inj",Goal.prove_global thy1 [] [] stmnt1 proof1), [])] thy1
+ add_thms_string [((ak^"_inj",Goal.prove_global thy1 [] [] stmnt1 proof1), [])] thy1
(* second statement *)
val y = Free ("y",ak_type)
@@ -136,7 +139,7 @@
(* third statement *)
val (inject_thm,thy3) =
- PureThy.add_thms [((ak^"_injection",Goal.prove_global thy2 [] [] stmnt2 proof2), [])] thy2
+ add_thms_string [((ak^"_injection",Goal.prove_global thy2 [] [] stmnt2 proof2), [])] thy2
val stmnt3 = HOLogic.mk_Trueprop
(HOLogic.mk_not
@@ -152,7 +155,7 @@
simp_tac (HOL_basic_ss addsimps simp3) 1]
val (inf_thm,thy4) =
- PureThy.add_thms [((ak^"_infinite",Goal.prove_global thy3 [] [] stmnt3 proof3), [])] thy3
+ add_thms_string [((ak^"_infinite",Goal.prove_global thy3 [] [] stmnt3 proof3), [])] thy3
in
((inj_thm,inject_thm,inf_thm),thy4)
end) ak_names thy
@@ -186,7 +189,7 @@
val def2 = Logic.mk_equals (cswap $ ab $ c, cswap_akname $ ab $ c)
in
thy |> Sign.add_consts_i [("swap_" ^ ak_name, swapT, NoSyn)]
- |> PureThy.add_defs_unchecked true [((name, def2),[])]
+ |> PureThy.add_defs_unchecked true [((Binding.name name, def2),[])]
|> snd
|> OldPrimrecPackage.add_primrec_unchecked_i "" [(("", def1),[])]
end) ak_names_types thy1;
@@ -241,14 +244,14 @@
val def = Logic.mk_equals
(cperm $ pi $ a, if ak_name = ak_name' then cperm_def $ pi $ a else a)
in
- PureThy.add_defs_unchecked true [((name, def),[])] thy'
+ PureThy.add_defs_unchecked true [((Binding.name name, def),[])] thy'
end) ak_names_types thy) ak_names_types thy4;
(* proves that every atom-kind is an instance of at *)
(* lemma at_<ak>_inst: *)
(* at TYPE(<ak>) *)
val (prm_cons_thms,thy6) =
- thy5 |> PureThy.add_thms (map (fn (ak_name, T) =>
+ thy5 |> add_thms_string (map (fn (ak_name, T) =>
let
val ak_name_qu = Sign.full_bname thy5 (ak_name);
val i_type = Type(ak_name_qu,[]);
@@ -309,7 +312,7 @@
(* lemma pt_<ak>_inst: *)
(* pt TYPE('x::pt_<ak>) TYPE(<ak>) *)
val (prm_inst_thms,thy8) =
- thy7 |> PureThy.add_thms (map (fn (ak_name, T) =>
+ thy7 |> add_thms_string (map (fn (ak_name, T) =>
let
val ak_name_qu = Sign.full_bname thy7 ak_name;
val pt_name_qu = Sign.full_bname thy7 ("pt_"^ak_name);
@@ -355,7 +358,7 @@
(* lemma abst_<ak>_inst: *)
(* fs TYPE('x::pt_<ak>) TYPE (<ak>) *)
val (fs_inst_thms,thy12) =
- thy11 |> PureThy.add_thms (map (fn (ak_name, T) =>
+ thy11 |> add_thms_string (map (fn (ak_name, T) =>
let
val ak_name_qu = Sign.full_bname thy11 ak_name;
val fs_name_qu = Sign.full_bname thy11 ("fs_"^ak_name);
@@ -428,7 +431,7 @@
rtac allI 1, rtac allI 1, rtac allI 1,
rtac cp1 1];
in
- yield_singleton PureThy.add_thms ((name,
+ yield_singleton add_thms_string ((name,
Goal.prove_global thy' [] [] statement proof), []) thy'
end)
ak_names_types thy) ak_names_types thy12b;
@@ -460,7 +463,7 @@
val proof = fn _ => simp_tac simp_s 1;
in
- PureThy.add_thms [((name, Goal.prove_global thy' [] [] statement proof), [])] thy'
+ add_thms_string [((name, Goal.prove_global thy' [] [] statement proof), [])] thy'
end
else
([],thy'))) (* do nothing branch, if ak_name = ak_name' *)
@@ -870,114 +873,114 @@
fun inst_pt_pt_at_cp_dj thms = inst_zip djs (inst_pt_pt_at_cp thms);
in
thy32
- |> PureThy.add_thmss [(("alpha", inst_pt_at [abs_fun_eq]),[])]
- ||>> PureThy.add_thmss [(("alpha'", inst_pt_at [abs_fun_eq']),[])]
- ||>> PureThy.add_thmss [(("alpha_fresh", inst_pt_at [abs_fun_fresh]),[])]
- ||>> PureThy.add_thmss [(("alpha_fresh'", inst_pt_at [abs_fun_fresh']),[])]
- ||>> PureThy.add_thmss [(("perm_swap", inst_pt_at [pt_swap_bij] @ inst_pt_at [pt_swap_bij']),[])]
- ||>> PureThy.add_thmss
+ |> add_thmss_string [(("alpha", inst_pt_at [abs_fun_eq]),[])]
+ ||>> add_thmss_string [(("alpha'", inst_pt_at [abs_fun_eq']),[])]
+ ||>> add_thmss_string [(("alpha_fresh", inst_pt_at [abs_fun_fresh]),[])]
+ ||>> add_thmss_string [(("alpha_fresh'", inst_pt_at [abs_fun_fresh']),[])]
+ ||>> add_thmss_string [(("perm_swap", inst_pt_at [pt_swap_bij] @ inst_pt_at [pt_swap_bij']),[])]
+ ||>> add_thmss_string
let val thms1 = inst_at at_swap_simps
and thms2 = inst_dj [dj_perm_forget]
in [(("swap_simps", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [pt_pi_rev];
val thms2 = inst_pt_at [pt_rev_pi];
in [(("perm_pi_simp",thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss [(("perm_fresh_fresh", inst_pt_at [pt_fresh_fresh]),[])]
- ||>> PureThy.add_thmss [(("perm_bij", inst_pt_at [pt_bij]),[])]
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string [(("perm_fresh_fresh", inst_pt_at [pt_fresh_fresh]),[])]
+ ||>> add_thmss_string [(("perm_bij", inst_pt_at [pt_bij]),[])]
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [pt_perm_compose];
val thms2 = instR cp1 (Library.flat cps');
in [(("perm_compose",thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss [(("perm_compose'",inst_pt_at [pt_perm_compose']),[])]
- ||>> PureThy.add_thmss [(("perm_app", inst_pt_at [perm_app]),[])]
- ||>> PureThy.add_thmss [(("supp_atm", (inst_at [at_supp]) @ (inst_dj [dj_supp])),[])]
- ||>> PureThy.add_thmss [(("exists_fresh", inst_at [at_exists_fresh]),[])]
- ||>> PureThy.add_thmss [(("exists_fresh'", inst_at [at_exists_fresh']),[])]
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string [(("perm_compose'",inst_pt_at [pt_perm_compose']),[])]
+ ||>> add_thmss_string [(("perm_app", inst_pt_at [perm_app]),[])]
+ ||>> add_thmss_string [(("supp_atm", (inst_at [at_supp]) @ (inst_dj [dj_supp])),[])]
+ ||>> add_thmss_string [(("exists_fresh", inst_at [at_exists_fresh]),[])]
+ ||>> add_thmss_string [(("exists_fresh'", inst_at [at_exists_fresh']),[])]
+ ||>> add_thmss_string
let
val thms1 = inst_pt_at [all_eqvt];
val thms2 = map (fold_rule [inductive_forall_def]) thms1
in
[(("all_eqvt", thms1 @ thms2), [NominalThmDecls.eqvt_force_add])]
end
- ||>> PureThy.add_thmss [(("ex_eqvt", inst_pt_at [ex_eqvt]),[NominalThmDecls.eqvt_force_add])]
- ||>> PureThy.add_thmss [(("ex1_eqvt", inst_pt_at [ex1_eqvt]),[NominalThmDecls.eqvt_force_add])]
- ||>> PureThy.add_thmss [(("the_eqvt", inst_pt_at [the_eqvt]),[NominalThmDecls.eqvt_force_add])]
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string [(("ex_eqvt", inst_pt_at [ex_eqvt]),[NominalThmDecls.eqvt_force_add])]
+ ||>> add_thmss_string [(("ex1_eqvt", inst_pt_at [ex1_eqvt]),[NominalThmDecls.eqvt_force_add])]
+ ||>> add_thmss_string [(("the_eqvt", inst_pt_at [the_eqvt]),[NominalThmDecls.eqvt_force_add])]
+ ||>> add_thmss_string
let val thms1 = inst_at [at_fresh]
val thms2 = inst_dj [at_fresh_ineq]
in [(("fresh_atm", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_at at_calc
and thms2 = inst_dj [dj_perm_forget]
in [(("calc_atm", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [abs_fun_pi]
and thms2 = inst_pt_pt_at_cp [abs_fun_pi_ineq]
in [(("abs_perm", thms1 @ thms2),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_dj [dj_perm_forget]
and thms2 = inst_dj [dj_pp_forget]
in [(("perm_dj", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at_fs [fresh_iff]
and thms2 = inst_pt_at [fresh_iff]
and thms3 = inst_pt_pt_at_cp_dj [fresh_iff_ineq]
in [(("abs_fresh", thms1 @ thms2 @ thms3),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [abs_fun_supp]
and thms2 = inst_pt_at_fs [abs_fun_supp]
and thms3 = inst_pt_pt_at_cp_dj [abs_fun_supp_ineq]
in [(("abs_supp", thms1 @ thms2 @ thms3),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [fresh_left]
and thms2 = inst_pt_pt_at_cp [fresh_left_ineq]
in [(("fresh_left", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [fresh_right]
and thms2 = inst_pt_pt_at_cp [fresh_right_ineq]
in [(("fresh_right", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [fresh_bij]
and thms2 = inst_pt_pt_at_cp [fresh_bij_ineq]
in [(("fresh_bij", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at fresh_star_bij
and thms2 = flat (map (fn ti => inst_pt_pt_at_cp [ti]) fresh_star_bij_ineq);
in [(("fresh_star_bij", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [fresh_eqvt]
and thms2 = inst_pt_pt_at_cp_dj [fresh_eqvt_ineq]
in [(("fresh_eqvt", thms1 @ thms2),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [in_eqvt]
in [(("in_eqvt", thms1),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [eq_eqvt]
in [(("eq_eqvt", thms1),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [set_diff_eqvt]
in [(("set_diff_eqvt", thms1),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [subseteq_eqvt]
in [(("subseteq_eqvt", thms1),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss [(("insert_eqvt", inst_pt_at [insert_eqvt]), [NominalThmDecls.eqvt_add])]
- ||>> PureThy.add_thmss [(("set_eqvt", inst_pt_at [set_eqvt]), [NominalThmDecls.eqvt_add])]
- ||>> PureThy.add_thmss [(("perm_set_eq", inst_pt_at [perm_set_eq]), [])]
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string [(("insert_eqvt", inst_pt_at [insert_eqvt]), [NominalThmDecls.eqvt_add])]
+ ||>> add_thmss_string [(("set_eqvt", inst_pt_at [set_eqvt]), [NominalThmDecls.eqvt_add])]
+ ||>> add_thmss_string [(("perm_set_eq", inst_pt_at [perm_set_eq]), [])]
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [fresh_aux]
and thms2 = inst_pt_pt_at_cp_dj [fresh_perm_app_ineq]
in [(("fresh_aux", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [fresh_perm_app]
and thms2 = inst_pt_pt_at_cp_dj [fresh_perm_app_ineq]
in [(("fresh_perm_app", thms1 @ thms2),[])] end
- ||>> PureThy.add_thmss
+ ||>> add_thmss_string
let val thms1 = inst_pt_at [pt_perm_supp]
and thms2 = inst_pt_pt_at_cp [pt_perm_supp_ineq]
in [(("supp_eqvt", thms1 @ thms2),[NominalThmDecls.eqvt_add])] end
- ||>> PureThy.add_thmss [(("fin_supp",fs_axs),[])]
+ ||>> add_thmss_string [(("fin_supp",fs_axs),[])]
end;
in
--- a/src/HOL/Nominal/nominal_induct.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_induct.ML Wed Jan 28 16:57:12 2009 +0100
@@ -6,7 +6,7 @@
structure NominalInduct:
sig
- val nominal_induct_tac: Proof.context -> (Binding.T option * term) option list list ->
+ val nominal_induct_tac: Proof.context -> (binding option * term) option list list ->
(string * typ) list -> (string * typ) list list -> thm list ->
thm list -> int -> RuleCases.cases_tactic
val nominal_induct_method: Method.src -> Proof.context -> Method.method
--- a/src/HOL/Nominal/nominal_inductive.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_inductive.ML Wed Jan 28 16:57:12 2009 +0100
@@ -562,17 +562,17 @@
[ind_case_names, RuleCases.consumes 1]);
val ([strong_induct'], thy') = thy |>
Sign.add_path rec_name |>
- PureThy.add_thms [(("strong_induct", #1 strong_induct), #2 strong_induct)];
+ PureThy.add_thms [((Binding.name "strong_induct", #1 strong_induct), #2 strong_induct)];
val strong_inducts =
ProjectRule.projects ctxt (1 upto length names) strong_induct'
in
thy' |>
- PureThy.add_thmss [(("strong_inducts", strong_inducts),
+ PureThy.add_thmss [((Binding.name "strong_inducts", strong_inducts),
[ind_case_names, RuleCases.consumes 1])] |> snd |>
Sign.parent_path |>
fold (fn ((name, elim), (_, cases)) =>
Sign.add_path (Sign.base_name name) #>
- PureThy.add_thms [(("strong_cases", elim),
+ PureThy.add_thms [((Binding.name "strong_cases", elim),
[RuleCases.case_names (map snd cases),
RuleCases.consumes 1])] #> snd #>
Sign.parent_path) (strong_cases ~~ induct_cases')
@@ -653,7 +653,7 @@
in
fold (fn (name, ths) =>
Sign.add_path (Sign.base_name name) #>
- PureThy.add_thmss [(("eqvt", ths), [NominalThmDecls.eqvt_add])] #> snd #>
+ PureThy.add_thmss [((Binding.name "eqvt", ths), [NominalThmDecls.eqvt_add])] #> snd #>
Sign.parent_path) (names ~~ transp thss) thy
end;
--- a/src/HOL/Nominal/nominal_inductive2.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_inductive2.ML Wed Jan 28 16:57:12 2009 +0100
@@ -458,12 +458,12 @@
[ind_case_names, RuleCases.consumes 1]);
val ([strong_induct'], thy') = thy |>
Sign.add_path rec_name |>
- PureThy.add_thms [(("strong_induct", #1 strong_induct), #2 strong_induct)];
+ PureThy.add_thms [((Binding.name "strong_induct", #1 strong_induct), #2 strong_induct)];
val strong_inducts =
ProjectRule.projects ctxt (1 upto length names) strong_induct'
in
thy' |>
- PureThy.add_thmss [(("strong_inducts", strong_inducts),
+ PureThy.add_thmss [((Binding.name "strong_inducts", strong_inducts),
[ind_case_names, RuleCases.consumes 1])] |> snd |>
Sign.parent_path
end))
--- a/src/HOL/Nominal/nominal_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -490,13 +490,13 @@
(full_new_type_names' ~~ tyvars) thy
end) atoms |>
PureThy.add_thmss
- [((space_implode "_" new_type_names ^ "_unfolded_perm_eq",
+ [((Binding.name (space_implode "_" new_type_names ^ "_unfolded_perm_eq"),
unfolded_perm_eq_thms), [Simplifier.simp_add]),
- ((space_implode "_" new_type_names ^ "_perm_empty",
+ ((Binding.name (space_implode "_" new_type_names ^ "_perm_empty"),
perm_empty_thms), [Simplifier.simp_add]),
- ((space_implode "_" new_type_names ^ "_perm_append",
+ ((Binding.name (space_implode "_" new_type_names ^ "_perm_append"),
perm_append_thms), [Simplifier.simp_add]),
- ((space_implode "_" new_type_names ^ "_perm_eq",
+ ((Binding.name (space_implode "_" new_type_names ^ "_perm_eq"),
perm_eq_thms), [Simplifier.simp_add])];
(**** Define representing sets ****)
@@ -627,7 +627,7 @@
val pi = Free ("pi", permT);
val T = Type (Sign.intern_type thy name, map TFree tvs);
in apfst (pair r o hd)
- (PureThy.add_defs_unchecked true [(("prm_" ^ name ^ "_def", Logic.mk_equals
+ (PureThy.add_defs_unchecked true [((Binding.name ("prm_" ^ name ^ "_def"), Logic.mk_equals
(Const ("Nominal.perm", permT --> T --> T) $ pi $ Free ("x", T),
Const (Sign.intern_const thy ("Abs_" ^ name), U --> T) $
(Const ("Nominal.perm", permT --> U --> U) $ pi $
@@ -801,7 +801,7 @@
val def_name = (Sign.base_name cname) ^ "_def";
val ([def_thm], thy') = thy |>
Sign.add_consts_i [(cname', constrT, mx)] |>
- (PureThy.add_defs false o map Thm.no_attributes) [(def_name, def)]
+ (PureThy.add_defs false o map Thm.no_attributes) [(Binding.name def_name, def)]
in (thy', defs @ [def_thm], eqns @ [eqn]) end;
fun dt_constr_defs ((thy, defs, eqns, dist_lemmas), ((((((_, (_, _, constrs)),
@@ -1114,8 +1114,8 @@
val (_, thy9) = thy8 |>
Sign.add_path big_name |>
- PureThy.add_thms [(("induct", dt_induct), [case_names_induct])] ||>>
- PureThy.add_thmss [(("inducts", projections dt_induct), [case_names_induct])] ||>
+ PureThy.add_thms [((Binding.name "induct", dt_induct), [case_names_induct])] ||>>
+ PureThy.add_thmss [((Binding.name "inducts", projections dt_induct), [case_names_induct])] ||>
Sign.parent_path ||>>
DatatypeAux.store_thmss_atts "distinct" new_type_names simp_atts distinct_thms ||>>
DatatypeAux.store_thmss "constr_rep" new_type_names constr_rep_thmss ||>>
@@ -1405,9 +1405,9 @@
val (_, thy10) = thy9 |>
Sign.add_path big_name |>
- PureThy.add_thms [(("strong_induct'", induct_aux), [])] ||>>
- PureThy.add_thms [(("strong_induct", induct), [case_names_induct])] ||>>
- PureThy.add_thmss [(("strong_inducts", projections induct), [case_names_induct])];
+ PureThy.add_thms [((Binding.name "strong_induct'", induct_aux), [])] ||>>
+ PureThy.add_thms [((Binding.name "strong_induct", induct), [case_names_induct])] ||>>
+ PureThy.add_thmss [((Binding.name "strong_inducts", projections induct), [case_names_induct])];
(**** recursion combinator ****)
@@ -2015,7 +2015,7 @@
(Sign.base_name name, rec_fn_Ts @ [T] ---> T', NoSyn))
(reccomb_names ~~ recTs ~~ rec_result_Ts))
|> (PureThy.add_defs false o map Thm.no_attributes) (map (fn ((((name, comb), set), T), T') =>
- ((Sign.base_name name) ^ "_def", Logic.mk_equals (comb, absfree ("x", T,
+ (Binding.name (Sign.base_name name ^ "_def"), Logic.mk_equals (comb, absfree ("x", T,
Const ("The", (T' --> HOLogic.boolT) --> T') $ absfree ("y", T',
set $ Free ("x", T) $ Free ("y", T'))))))
(reccomb_names ~~ reccombs ~~ rec_sets ~~ recTs ~~ rec_result_Ts));
@@ -2052,12 +2052,12 @@
(* FIXME: theorems are stored in database for testing only *)
val (_, thy13) = thy12 |>
PureThy.add_thmss
- [(("rec_equiv", List.concat rec_equiv_thms), []),
- (("rec_equiv'", List.concat rec_equiv_thms'), []),
- (("rec_fin_supp", List.concat rec_fin_supp_thms), []),
- (("rec_fresh", List.concat rec_fresh_thms), []),
- (("rec_unique", map standard rec_unique_thms), []),
- (("recs", rec_thms), [])] ||>
+ [((Binding.name "rec_equiv", List.concat rec_equiv_thms), []),
+ ((Binding.name "rec_equiv'", List.concat rec_equiv_thms'), []),
+ ((Binding.name "rec_fin_supp", List.concat rec_fin_supp_thms), []),
+ ((Binding.name "rec_fresh", List.concat rec_fresh_thms), []),
+ ((Binding.name "rec_unique", map standard rec_unique_thms), []),
+ ((Binding.name "recs", rec_thms), [])] ||>
Sign.parent_path ||>
map_nominal_datatypes (fold Symtab.update dt_infos);
--- a/src/HOL/Nominal/nominal_primrec.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_primrec.ML Wed Jan 28 16:57:12 2009 +0100
@@ -9,8 +9,8 @@
signature NOMINAL_PRIMREC =
sig
val add_primrec: term list option -> term option ->
- (Binding.T * typ option * mixfix) list ->
- (Binding.T * typ option * mixfix) list ->
+ (binding * typ option * mixfix) list ->
+ (binding * typ option * mixfix) list ->
(Attrib.binding * term) list -> local_theory -> Proof.state
end;
--- a/src/HOL/Nominal/nominal_thmdecls.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Nominal/nominal_thmdecls.ML Wed Jan 28 16:57:12 2009 +0100
@@ -187,8 +187,8 @@
"equivariance theorem declaration (without checking the form of the lemma)"),
("fresh", Attrib.add_del_args fresh_add fresh_del, "freshness theorem declaration"),
("bij", Attrib.add_del_args bij_add bij_del, "bijection theorem declaration")] #>
- PureThy.add_thms_dynamic ("eqvts", #eqvts o Data.get) #>
- PureThy.add_thms_dynamic ("freshs", #freshs o Data.get) #>
- PureThy.add_thms_dynamic ("bijs", #bijs o Data.get);
+ PureThy.add_thms_dynamic (Binding.name "eqvts", #eqvts o Data.get) #>
+ PureThy.add_thms_dynamic (Binding.name "freshs", #freshs o Data.get) #>
+ PureThy.add_thms_dynamic (Binding.name "bijs", #bijs o Data.get);
end;
--- a/src/HOL/OrderedGroup.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/OrderedGroup.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1193,7 +1193,7 @@
qed
subclass pordered_ab_group_add_abs
-proof -
+proof
have abs_ge_zero [simp]: "\<And>a. 0 \<le> \<bar>a\<bar>"
proof -
fix a b
@@ -1202,6 +1202,28 @@
qed
have abs_leI: "\<And>a b. a \<le> b \<Longrightarrow> - a \<le> b \<Longrightarrow> \<bar>a\<bar> \<le> b"
by (simp add: abs_lattice le_supI)
+<<<<<<< local
+ fix a b
+ show "0 \<le> \<bar>a\<bar>" by simp
+ show "a \<le> \<bar>a\<bar>"
+ by (auto simp add: abs_lattice)
+ show "\<bar>-a\<bar> = \<bar>a\<bar>"
+ by (simp add: abs_lattice sup_commute)
+ show "a \<le> b \<Longrightarrow> - a \<le> b \<Longrightarrow> \<bar>a\<bar> \<le> b" by (fact abs_leI)
+ show "\<bar>a + b\<bar> \<le> \<bar>a\<bar> + \<bar>b\<bar>"
+ proof -
+ have g:"abs a + abs b = sup (a+b) (sup (-a-b) (sup (-a+b) (a + (-b))))" (is "_=sup ?m ?n")
+ by (simp add: abs_lattice add_sup_inf_distribs sup_ACI diff_minus)
+ have a:"a+b <= sup ?m ?n" by (simp)
+ have b:"-a-b <= ?n" by (simp)
+ have c:"?n <= sup ?m ?n" by (simp)
+ from b c have d: "-a-b <= sup ?m ?n" by(rule order_trans)
+ have e:"-a-b = -(a+b)" by (simp add: diff_minus)
+ from a d e have "abs(a+b) <= sup ?m ?n"
+ by (drule_tac abs_leI, auto)
+ with g[symmetric] show ?thesis by simp
+ qed
+=======
show ?thesis
proof
fix a
@@ -1230,6 +1252,7 @@
with g[symmetric] show ?thesis by simp
qed
qed auto
+>>>>>>> other
qed
end
--- a/src/HOL/Orderings.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Orderings.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Orderings.thy
- ID: $Id$
Author: Tobias Nipkow, Markus Wenzel, and Larry Paulson
*)
--- a/src/HOL/Parity.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Parity.thy Wed Jan 28 16:57:12 2009 +0100
@@ -5,10 +5,10 @@
header {* Even and Odd for int and nat *}
theory Parity
-imports Plain Presburger
+imports Plain Presburger Main
begin
-class even_odd = type +
+class even_odd =
fixes even :: "'a \<Rightarrow> bool"
abbreviation
--- a/src/HOL/Plain.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Plain.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,7 +1,7 @@
header {* Plain HOL *}
theory Plain
-imports Datatype FunDef Record SAT Extraction
+imports Datatype FunDef Record SAT Extraction Divides
begin
text {*
@@ -9,6 +9,9 @@
include @{text Hilbert_Choice}.
*}
+instance option :: (finite) finite
+ by default (simp add: insert_None_conv_UNIV [symmetric])
+
ML {* path_add "~~/src/HOL/Library" *}
end
--- a/src/HOL/Polynomial.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Polynomial.thy Wed Jan 28 16:57:12 2009 +0100
@@ -6,7 +6,7 @@
header {* Univariate Polynomials *}
theory Polynomial
-imports Plain SetInterval
+imports Plain SetInterval Main
begin
subsection {* Definition of type @{text poly} *}
@@ -475,6 +475,16 @@
lemma smult_monom: "smult a (monom b n) = monom (a * b) n"
by (induct n, simp add: monom_0, simp add: monom_Suc)
+lemma degree_smult_eq [simp]:
+ fixes a :: "'a::idom"
+ shows "degree (smult a p) = (if a = 0 then 0 else degree p)"
+ by (cases "a = 0", simp, simp add: degree_def)
+
+lemma smult_eq_0_iff [simp]:
+ fixes a :: "'a::idom"
+ shows "smult a p = 0 \<longleftrightarrow> a = 0 \<or> p = 0"
+ by (simp add: expand_poly_eq)
+
subsection {* Multiplication of polynomials *}
@@ -777,6 +787,12 @@
qed
qed
+lemma pdivmod_rel_0_iff: "pdivmod_rel 0 y q r \<longleftrightarrow> q = 0 \<and> r = 0"
+by (auto dest: pdivmod_rel_unique intro: pdivmod_rel_0)
+
+lemma pdivmod_rel_by_0_iff: "pdivmod_rel x 0 q r \<longleftrightarrow> q = 0 \<and> r = x"
+by (auto dest: pdivmod_rel_unique intro: pdivmod_rel_by_0)
+
lemmas pdivmod_rel_unique_div =
pdivmod_rel_unique [THEN conjunct1, standard]
@@ -861,6 +877,54 @@
thus "x mod y = x" by (rule mod_poly_eq)
qed
+lemma pdivmod_rel_smult_left:
+ "pdivmod_rel x y q r
+ \<Longrightarrow> pdivmod_rel (smult a x) y (smult a q) (smult a r)"
+ unfolding pdivmod_rel_def by (simp add: smult_add_right)
+
+lemma div_smult_left: "(smult a x) div y = smult a (x div y)"
+ by (rule div_poly_eq, rule pdivmod_rel_smult_left, rule pdivmod_rel)
+
+lemma mod_smult_left: "(smult a x) mod y = smult a (x mod y)"
+ by (rule mod_poly_eq, rule pdivmod_rel_smult_left, rule pdivmod_rel)
+
+lemma pdivmod_rel_smult_right:
+ "\<lbrakk>a \<noteq> 0; pdivmod_rel x y q r\<rbrakk>
+ \<Longrightarrow> pdivmod_rel x (smult a y) (smult (inverse a) q) r"
+ unfolding pdivmod_rel_def by simp
+
+lemma div_smult_right:
+ "a \<noteq> 0 \<Longrightarrow> x div (smult a y) = smult (inverse a) (x div y)"
+ by (rule div_poly_eq, erule pdivmod_rel_smult_right, rule pdivmod_rel)
+
+lemma mod_smult_right: "a \<noteq> 0 \<Longrightarrow> x mod (smult a y) = x mod y"
+ by (rule mod_poly_eq, erule pdivmod_rel_smult_right, rule pdivmod_rel)
+
+lemma pdivmod_rel_mult:
+ "\<lbrakk>pdivmod_rel x y q r; pdivmod_rel q z q' r'\<rbrakk>
+ \<Longrightarrow> pdivmod_rel x (y * z) q' (y * r' + r)"
+apply (cases "z = 0", simp add: pdivmod_rel_def)
+apply (cases "y = 0", simp add: pdivmod_rel_by_0_iff pdivmod_rel_0_iff)
+apply (cases "r = 0")
+apply (cases "r' = 0")
+apply (simp add: pdivmod_rel_def)
+apply (simp add: pdivmod_rel_def ring_simps degree_mult_eq)
+apply (cases "r' = 0")
+apply (simp add: pdivmod_rel_def degree_mult_eq)
+apply (simp add: pdivmod_rel_def ring_simps)
+apply (simp add: degree_mult_eq degree_add_less)
+done
+
+lemma poly_div_mult_right:
+ fixes x y z :: "'a::field poly"
+ shows "x div (y * z) = (x div y) div z"
+ by (rule div_poly_eq, rule pdivmod_rel_mult, (rule pdivmod_rel)+)
+
+lemma poly_mod_mult_right:
+ fixes x y z :: "'a::field poly"
+ shows "x mod (y * z) = y * (x div y mod z) + x mod y"
+ by (rule mod_poly_eq, rule pdivmod_rel_mult, (rule pdivmod_rel)+)
+
lemma mod_pCons:
fixes a and x
assumes y: "y \<noteq> 0"
--- a/src/HOL/Power.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Power.thy Wed Jan 28 16:57:12 2009 +0100
@@ -11,7 +11,7 @@
imports Nat
begin
-class power = type +
+class power =
fixes power :: "'a \<Rightarrow> nat \<Rightarrow> 'a" (infixr "^" 80)
subsection{*Powers for Arbitrary Monoids*}
--- a/src/HOL/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,6 +1,7 @@
(* Classical Higher-order Logic -- batteries included *)
use_thy "Main";
+share_common_data ();
use_thy "Complex_Main";
val HOL_proofs = ! Proofterm.proofs;
--- a/src/HOL/RealVector.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/RealVector.thy Wed Jan 28 16:57:12 2009 +0100
@@ -124,7 +124,7 @@
subsection {* Real vector spaces *}
-class scaleR = type +
+class scaleR =
fixes scaleR :: "real \<Rightarrow> 'a \<Rightarrow> 'a" (infixr "*\<^sub>R" 75)
begin
@@ -418,7 +418,7 @@
subsection {* Real normed vector spaces *}
-class norm = type +
+class norm =
fixes norm :: "'a \<Rightarrow> real"
instantiation real :: norm
--- a/src/HOL/Recdef.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Recdef.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,12 +1,11 @@
(* Title: HOL/Recdef.thy
- ID: $Id$
Author: Konrad Slind and Markus Wenzel, TU Muenchen
*)
header {* TFL: recursive function definitions *}
theory Recdef
-imports FunDef
+imports FunDef Plain
uses
("Tools/TFL/casesplit.ML")
("Tools/TFL/utils.ML")
--- a/src/HOL/Relation.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Relation.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Relation.thy
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1996 University of Cambridge
*)
@@ -7,7 +6,7 @@
header {* Relations *}
theory Relation
-imports Product_Type
+imports Datatype Finite_Set
begin
subsection {* Definitions *}
@@ -379,6 +378,12 @@
lemma fst_eq_Domain: "fst ` R = Domain R";
by (auto intro!:image_eqI)
+lemma Domain_dprod [simp]: "Domain (dprod r s) = uprod (Domain r) (Domain s)"
+by auto
+
+lemma Domain_dsum [simp]: "Domain (dsum r s) = usum (Domain r) (Domain s)"
+by auto
+
subsection {* Range *}
@@ -566,6 +571,31 @@
done
+subsection {* Finiteness *}
+
+lemma finite_converse [iff]: "finite (r^-1) = finite r"
+ apply (subgoal_tac "r^-1 = (%(x,y). (y,x))`r")
+ apply simp
+ apply (rule iffI)
+ apply (erule finite_imageD [unfolded inj_on_def])
+ apply (simp split add: split_split)
+ apply (erule finite_imageI)
+ apply (simp add: converse_def image_def, auto)
+ apply (rule bexI)
+ prefer 2 apply assumption
+ apply simp
+ done
+
+text {* \paragraph{Finiteness of transitive closure} (Thanks to Sidi
+Ehmety) *}
+
+lemma finite_Field: "finite r ==> finite (Field r)"
+ -- {* A finite relation has a finite field (@{text "= domain \<union> range"}. *}
+ apply (induct set: finite)
+ apply (auto simp add: Field_def Domain_insert Range_insert)
+ done
+
+
subsection {* Version of @{text lfp_induct} for binary relations *}
lemmas lfp_induct2 =
--- a/src/HOL/Relation_Power.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Relation_Power.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Relation_Power.thy
- ID: $Id$
Author: Tobias Nipkow
Copyright 1996 TU Muenchen
*)
@@ -7,7 +6,7 @@
header{*Powers of Relations and Functions*}
theory Relation_Power
-imports Power Transitive_Closure
+imports Power Transitive_Closure Plain
begin
instance
--- a/src/HOL/Ring_and_Field.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Ring_and_Field.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1071,6 +1071,9 @@
"sgn (a * b) = sgn a * sgn b"
by (auto simp add: sgn_if zero_less_mult_iff)
+lemma abs_sgn: "abs k = k * sgn k"
+ unfolding sgn_if abs_if by auto
+
end
class ordered_field = field + ordered_idom
--- a/src/HOL/SizeChange/Kleene_Algebras.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/SizeChange/Kleene_Algebras.thy Wed Jan 28 16:57:12 2009 +0100
@@ -11,7 +11,7 @@
text {* A type class of kleene algebras *}
-class star = type +
+class star =
fixes star :: "'a \<Rightarrow> 'a"
class idem_add = ab_semigroup_add +
--- a/src/HOL/Tools/ComputeNumeral.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/ComputeNumeral.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,5 @@
theory ComputeNumeral
-imports ComputeHOL "~~/src/HOL/Real/Float"
+imports ComputeHOL Float
begin
(* normalization of bit strings *)
@@ -151,18 +151,18 @@
by (auto simp only: adjust_def)
lemma negateSnd: "negateSnd (q, r) = (q, -r)"
- by (auto simp only: negateSnd_def)
+ by (simp add: negateSnd_def)
-lemma divAlg: "divAlg (a, b) = (if 0\<le>a then
+lemma divmod: "IntDiv.divmod a b = (if 0\<le>a then
if 0\<le>b then posDivAlg a b
else if a=0 then (0, 0)
else negateSnd (negDivAlg (-a) (-b))
else
if 0<b then negDivAlg a b
else negateSnd (posDivAlg (-a) (-b)))"
- by (auto simp only: divAlg_def)
+ by (auto simp only: IntDiv.divmod_def)
-lemmas compute_div_mod = div_def mod_def divAlg adjust negateSnd posDivAlg.simps negDivAlg.simps
+lemmas compute_div_mod = div_def mod_def divmod adjust negateSnd posDivAlg.simps negDivAlg.simps
--- a/src/HOL/Tools/TFL/tfl.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/TFL/tfl.ML Wed Jan 28 16:57:12 2009 +0100
@@ -390,7 +390,7 @@
(wfrec $ map_types poly_tvars R)
$ functional
val def_term = mk_const_def thy (x, Ty, wfrec_R_M)
- val ([def], thy') = PureThy.add_defs false [Thm.no_attributes (def_name, def_term)] thy
+ val ([def], thy') = PureThy.add_defs false [Thm.no_attributes (Binding.name def_name, def_term)] thy
in (thy', def) end;
end;
@@ -549,7 +549,7 @@
val ([def0], theory) =
thy
|> PureThy.add_defs false
- [Thm.no_attributes (fid ^ "_def", defn)]
+ [Thm.no_attributes (Binding.name (fid ^ "_def"), defn)]
val def = Thm.freezeT def0;
val dummy = if !trace then writeln ("DEF = " ^ Display.string_of_thm def)
else ()
--- a/src/HOL/Tools/atp_manager.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/atp_manager.ML Wed Jan 28 16:57:12 2009 +0100
@@ -19,7 +19,7 @@
val kill: unit -> unit
val info: unit -> unit
val messages: int option -> unit
- type prover = int -> Proof.state -> bool * string
+ type prover = int -> int -> Proof.state -> bool * string
val add_prover: string -> prover -> theory -> theory
val print_provers: theory -> unit
val sledgehammer: string list -> Proof.state -> unit
@@ -35,9 +35,9 @@
local
-val atps = ref "e";
+val atps = ref "e remote_vampire";
val max_atps = ref 5; (* ~1 means infinite number of atps *)
-val timeout = ref 60;
+val timeout = ref 100;
in
@@ -89,13 +89,14 @@
oldest_heap: ThreadHeap.T,
active: (Thread.thread * (Time.time * Time.time * string)) list,
cancelling: (Thread.thread * (Time.time * Time.time * string)) list,
- messages: string list};
+ messages: string list,
+ store: string list};
-fun make_state timeout_heap oldest_heap active cancelling messages =
+fun make_state timeout_heap oldest_heap active cancelling messages store =
State {timeout_heap = timeout_heap, oldest_heap = oldest_heap,
- active = active, cancelling = cancelling, messages = messages};
+ active = active, cancelling = cancelling, messages = messages, store = store};
-val state = Synchronized.var "atp_manager" (make_state ThreadHeap.empty ThreadHeap.empty [] [] []);
+val state = Synchronized.var "atp_manager" (make_state ThreadHeap.empty ThreadHeap.empty [] [] [] []);
(* the managing thread *)
@@ -106,29 +107,27 @@
(* unregister thread *)
-fun unregister (success, message) thread = Synchronized.change_result state
- (fn state as State {timeout_heap, oldest_heap, active, cancelling, messages} =>
+fun unregister (success, message) thread = Synchronized.change state
+ (fn state as State {timeout_heap, oldest_heap, active, cancelling, messages, store} =>
(case lookup_thread active thread of
SOME (birthtime, _, description) =>
let
val (group, active') =
if success then List.partition (fn (_, (tb, _, _)) => tb = birthtime) active
else List.partition (fn (th, _) => Thread.equal (th, thread)) active
- val others = delete_thread thread group
val now = Time.now ()
val cancelling' =
- fold (fn (th, (tb, _, desc)) => update_thread (th, (tb, now, desc))) others cancelling
+ fold (fn (th, (tb, _, desc)) => update_thread (th, (tb, now, desc))) group cancelling
- val msg = description ^ "\n" ^ message
- val message' = "Sledgehammer: " ^ msg ^
- (if null others then ""
- else "\nInterrupted " ^ string_of_int (length others) ^ " other group members")
- val messages' = msg ::
- (if length messages <= message_store_limit then messages
- else #1 (chop message_store_limit messages))
- in (message', make_state timeout_heap oldest_heap active' cancelling' messages') end
- | NONE => ("", state)));
+ val message' = description ^ "\n" ^ message ^
+ (if length group <= 1 then ""
+ else "\nInterrupted " ^ string_of_int (length group - 1) ^ " other group members")
+ val store' = message' ::
+ (if length store <= message_store_limit then store
+ else #1 (chop message_store_limit store))
+ in make_state timeout_heap oldest_heap active' cancelling' (message' :: messages) store' end
+ | NONE =>state));
(* kill excessive atp threads *)
@@ -142,13 +141,13 @@
fun kill_oldest () =
let exception Unchanged in
Synchronized.change_result state
- (fn State {timeout_heap, oldest_heap, active, cancelling, messages} =>
+ (fn State {timeout_heap, oldest_heap, active, cancelling, messages, store} =>
if ThreadHeap.is_empty oldest_heap orelse not (excessive_atps active)
then raise Unchanged
else
let val ((_, oldest_thread), oldest_heap') = ThreadHeap.min_elem oldest_heap
- in (oldest_thread, make_state timeout_heap oldest_heap' active cancelling messages) end)
- |> (priority o unregister (false, "Interrupted (maximum number of ATPs exceeded)"))
+ in (oldest_thread, make_state timeout_heap oldest_heap' active cancelling messages store) end)
+ |> unregister (false, "Interrupted (maximum number of ATPs exceeded)")
handle Unchanged => ()
end;
@@ -160,6 +159,13 @@
end;
+fun print_new_messages () =
+ let val to_print = Synchronized.change_result state
+ (fn State {timeout_heap, oldest_heap, active, cancelling, messages, store} =>
+ (messages, make_state timeout_heap oldest_heap active cancelling [] store))
+ in if null to_print then ()
+ else priority ("Sledgehammer: " ^ (space_implode "\n\n" to_print)) end;
+
(* start a watching thread which runs forever -- only one may exist *)
@@ -176,8 +182,8 @@
NONE => SOME (Time.+ (Time.now (), max_wait_time))
| SOME (time, _) => SOME time)
- (* action: cancel find threads whose timeout is reached, and interrupt cancelling threads *)
- fun action (State {timeout_heap, oldest_heap, active, cancelling, messages}) =
+ (* action: find threads whose timeout is reached, and interrupt cancelling threads *)
+ fun action (State {timeout_heap, oldest_heap, active, cancelling, messages, store}) =
let val (timeout_threads, timeout_heap') =
ThreadHeap.upto (Time.now (), Thread.self ()) timeout_heap
in
@@ -187,15 +193,16 @@
let
val _ = List.app (SimpleThread.interrupt o #1) cancelling
val cancelling' = filter (Thread.isActive o #1) cancelling
- val state' = make_state timeout_heap' oldest_heap active cancelling' messages
+ val state' = make_state timeout_heap' oldest_heap active cancelling' messages store
in SOME (map #2 timeout_threads, state') end
end
in
while true do
(Synchronized.timed_access state time_limit action
|> these
- |> List.app (priority o unregister (false, "Interrupted (reached timeout)"));
+ |> List.app (unregister (false, "Interrupted (reached timeout)"));
kill_excessive ();
+ print_new_messages ();
(*give threads time to respond to interrupt*)
OS.Process.sleep min_wait_time)
end)));
@@ -206,12 +213,12 @@
fun register birthtime deadtime (thread, desc) =
(check_thread_manager ();
Synchronized.change state
- (fn State {timeout_heap, oldest_heap, active, cancelling, messages} =>
+ (fn State {timeout_heap, oldest_heap, active, cancelling, messages, store} =>
let
val timeout_heap' = ThreadHeap.insert (deadtime, thread) timeout_heap
val oldest_heap' = ThreadHeap.insert (birthtime, thread) oldest_heap
val active' = update_thread (thread, (birthtime, deadtime, desc)) active
- in make_state timeout_heap' oldest_heap' active' cancelling messages end));
+ in make_state timeout_heap' oldest_heap' active' cancelling messages store end));
@@ -220,9 +227,9 @@
(* kill: move all threads to cancelling *)
fun kill () = Synchronized.change state
- (fn State {timeout_heap, oldest_heap, active, cancelling, messages} =>
+ (fn State {timeout_heap, oldest_heap, active, cancelling, messages, store} =>
let val formerly_active = map (fn (th, (tb, _, desc)) => (th, (tb, Time.now (), desc))) active
- in make_state timeout_heap oldest_heap [] (formerly_active @ cancelling) messages end);
+ in make_state timeout_heap oldest_heap [] (formerly_active @ cancelling) messages store end);
(* ATP info *)
@@ -253,7 +260,7 @@
fun messages opt_limit =
let
val limit = the_default message_display_limit opt_limit;
- val State {messages = msgs, ...} = Synchronized.value state
+ val State {store = msgs, ...} = Synchronized.value state
val header = "Recent ATP messages" ^
(if length msgs <= limit then ":" else " (" ^ string_of_int limit ^ " displayed):");
in writeln (space_implode "\n\n" (header :: #1 (chop limit msgs))) end;
@@ -264,7 +271,7 @@
(* named provers *)
-type prover = int -> Proof.state -> bool * string;
+type prover = int -> int -> Proof.state -> bool * string;
fun err_dup_prover name = error ("Duplicate prover: " ^ quote name);
@@ -300,12 +307,12 @@
val _ = SimpleThread.fork true (fn () =>
let
val _ = register birthtime deadtime (Thread.self (), desc)
- val result = prover i proof_state
+ val result = prover (get_timeout ()) i proof_state
handle ResHolClause.TOO_TRIVIAL
=> (true, "Empty clause: Try this command: " ^ Markup.markup Markup.sendback "apply metis")
| ERROR msg
=> (false, "Error: " ^ msg)
- val _ = priority (unregister result (Thread.self ()))
+ val _ = unregister result (Thread.self ())
in () end handle Interrupt => ())
in () end);
--- a/src/HOL/Tools/atp_wrapper.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/atp_wrapper.ML Wed Jan 28 16:57:12 2009 +0100
@@ -12,7 +12,7 @@
val external_prover:
((int -> Path.T) -> Proof.context * thm list * thm -> string list * ResHolClause.axiom_name Vector.vector list) ->
Path.T * string ->
- (string * int -> bool) ->
+ (string -> string option) ->
(string * string vector * Proof.context * thm * int -> string) ->
AtpManager.prover
val tptp_prover_opts_full: int -> bool -> bool -> Path.T * string -> AtpManager.prover
@@ -30,8 +30,8 @@
val eprover_full: AtpManager.prover
val spass_opts: int -> bool -> AtpManager.prover
val spass: AtpManager.prover
- val remote_prover_opts: int -> bool -> string -> string -> AtpManager.prover
- val remote_prover: string -> string -> AtpManager.prover
+ val remote_prover_opts: int -> bool -> string -> AtpManager.prover
+ val remote_prover: string -> AtpManager.prover
end;
structure AtpWrapper: ATP_WRAPPER =
@@ -47,7 +47,7 @@
(* basic template *)
-fun external_prover write_problem_files (cmd, args) check_success produce_answer subgoalno state =
+fun external_prover write_problem_files (cmd, args) find_failure produce_answer timeout subgoalno state =
let
(* path to unique problem file *)
val destdir' = ! destdir
@@ -70,19 +70,18 @@
if File.exists cmd then File.shell_path cmd ^ " " ^ args
else error ("Bad executable: " ^ Path.implode cmd)
val (proof, rc) = system_out (cmdline ^ " " ^ nth filenames (subgoalno - 1))
- val _ =
- if rc <> 0 then
- warning ("Sledgehammer prover exited with return code " ^ string_of_int rc ^ "\n" ^ cmdline)
- else ()
(* remove *temporary* files *)
val _ = if destdir' = "" then List.app OS.FileSys.remove filenames else ()
-
- val success = check_success (proof, rc)
+
+ (* check for success and print out some information on failure *)
+ val failure = find_failure proof
+ val success = rc = 0 andalso is_none failure
val message =
- if success
- then "Try this command: " ^ produce_answer (proof, thm_names, ctxt, goal, subgoalno)
- else "Failed."
+ if isSome failure then "Could not prove: " ^ the failure
+ else if rc <> 0
+ then "Exited with return code " ^ string_of_int rc ^ ": " ^ proof
+ else "Try this command: " ^ produce_answer (proof, thm_names, ctxt, goal, subgoalno)
in (success, message) end;
@@ -95,7 +94,7 @@
external_prover
(ResAtp.write_problem_files ResHolClause.tptp_write_file max_new theory_const)
command
- ResReconstruct.check_success_e_vamp_spass
+ ResReconstruct.find_failure_e_vamp_spass
(if full then ResReconstruct.structured_proof else ResReconstruct.lemma_list_tstp);
(*arbitrary ATP with TPTP input/output and problemfile as last argument*)
@@ -115,15 +114,19 @@
(*NB: Vampire does not work without explicit timelimit*)
-fun vampire_opts max_new theory_const = tptp_prover_opts
+fun vampire_opts max_new theory_const timeout = tptp_prover_opts
max_new theory_const
- (Path.explode "$VAMPIRE_HOME/vampire", "--output_syntax tptp --mode casc -t 3600");
+ (Path.explode "$VAMPIRE_HOME/vampire",
+ ("--output_syntax tptp --mode casc -t " ^ string_of_int timeout))
+ timeout;
val vampire = vampire_opts 60 false;
-fun vampire_opts_full max_new theory_const = full_prover_opts
+fun vampire_opts_full max_new theory_const timeout = full_prover_opts
max_new theory_const
- (Path.explode "$VAMPIRE_HOME/vampire", "--output_syntax tptp --mode casc -t 3600");
+ (Path.explode "$VAMPIRE_HOME/vampire",
+ ("--output_syntax tptp --mode casc -t " ^ string_of_int timeout))
+ timeout;
val vampire_full = vampire_opts 60 false;
@@ -148,7 +151,7 @@
fun spass_opts max_new theory_const = external_prover
(ResAtp.write_problem_files ResHolClause.dfg_write_file max_new theory_const)
(Path.explode "$SPASS_HOME/SPASS", "-Auto -SOS=1 -PGiven=0 -PProblem=0 -Splits=0 -FullRed=0 -DocProof")
- ResReconstruct.check_success_e_vamp_spass
+ ResReconstruct.find_failure_e_vamp_spass
ResReconstruct.lemma_list_dfg;
val spass = spass_opts 40 true;
@@ -156,9 +159,10 @@
(* remote prover invocation via SystemOnTPTP *)
-fun remote_prover_opts max_new theory_const name command =
+fun remote_prover_opts max_new theory_const args timeout =
tptp_prover_opts max_new theory_const
- (Path.explode "$ISABELLE_HOME/contrib/SystemOnTPTP/remote", name ^ " " ^ command);
+ (Path.explode "$ISABELLE_HOME/contrib/SystemOnTPTP/remote", args ^ " -t " ^ string_of_int timeout)
+ timeout;
val remote_prover = remote_prover_opts 60 false;
--- a/src/HOL/Tools/datatype_abs_proofs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/datatype_abs_proofs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -238,7 +238,7 @@
(Sign.base_name name, reccomb_fn_Ts @ [T] ---> T', NoSyn))
(reccomb_names ~~ recTs ~~ rec_result_Ts))
|> (PureThy.add_defs false o map Thm.no_attributes) (map (fn ((((name, comb), set), T), T') =>
- ((Sign.base_name name) ^ "_def", Logic.mk_equals (comb, absfree ("x", T,
+ (Binding.name (Sign.base_name name ^ "_def"), Logic.mk_equals (comb, absfree ("x", T,
Const ("The", (T' --> HOLogic.boolT) --> T') $ absfree ("y", T',
set $ Free ("x", T) $ Free ("y", T'))))))
(reccomb_names ~~ reccombs ~~ rec_sets ~~ recTs ~~ rec_result_Ts))
@@ -262,7 +262,7 @@
in
thy2
|> Sign.add_path (space_implode "_" new_type_names)
- |> PureThy.add_thmss [(("recs", rec_thms), [])]
+ |> PureThy.add_thmss [((Binding.name "recs", rec_thms), [])]
||> Sign.parent_path
||> Theory.checkpoint
|-> (fn thms => pair (reccomb_names, Library.flat thms))
@@ -316,7 +316,7 @@
fns2 @ (List.concat (Library.drop (i + 1, case_dummy_fns)));
val reccomb = Const (recname, (map fastype_of fns) @ [T] ---> T');
val decl = ((Binding.name (Sign.base_name name), caseT), NoSyn);
- val def = ((Sign.base_name name) ^ "_def",
+ val def = (Binding.name (Sign.base_name name ^ "_def"),
Logic.mk_equals (list_comb (Const (name, caseT), fns1),
list_comb (reccomb, (List.concat (Library.take (i, case_dummy_fns))) @
fns2 @ (List.concat (Library.drop (i + 1, case_dummy_fns))) )));
--- a/src/HOL/Tools/datatype_aux.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/datatype_aux.ML Wed Jan 28 16:57:12 2009 +0100
@@ -76,7 +76,7 @@
fun store_thmss_atts label tnames attss thmss =
fold_map (fn ((tname, atts), thms) =>
Sign.add_path tname
- #> PureThy.add_thmss [((label, thms), atts)]
+ #> PureThy.add_thmss [((Binding.name label, thms), atts)]
#-> (fn thm::_ => Sign.parent_path #> pair thm)) (tnames ~~ attss ~~ thmss)
##> Theory.checkpoint;
@@ -85,7 +85,7 @@
fun store_thms_atts label tnames attss thmss =
fold_map (fn ((tname, atts), thms) =>
Sign.add_path tname
- #> PureThy.add_thms [((label, thms), atts)]
+ #> PureThy.add_thms [((Binding.name label, thms), atts)]
#-> (fn thm::_ => Sign.parent_path #> pair thm)) (tnames ~~ attss ~~ thmss)
##> Theory.checkpoint;
--- a/src/HOL/Tools/datatype_case.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/datatype_case.ML Wed Jan 28 16:57:12 2009 +0100
@@ -419,21 +419,21 @@
(* destruct nested patterns *)
-fun strip_case' dest (pat, rhs) =
+fun strip_case'' dest (pat, rhs) =
case dest (Term.add_free_names pat []) rhs of
SOME (exp as Free _, clauses) =>
if member op aconv (OldTerm.term_frees pat) exp andalso
not (exists (fn (_, rhs') =>
member op aconv (OldTerm.term_frees rhs') exp) clauses)
then
- maps (strip_case' dest) (map (fn (pat', rhs') =>
+ maps (strip_case'' dest) (map (fn (pat', rhs') =>
(subst_free [(exp, pat')] pat, rhs')) clauses)
else [(pat, rhs)]
| _ => [(pat, rhs)];
fun gen_strip_case dest t = case dest [] t of
SOME (x, clauses) =>
- SOME (x, maps (strip_case' dest) clauses)
+ SOME (x, maps (strip_case'' dest) clauses)
| NONE => NONE;
val strip_case = gen_strip_case oo dest_case;
--- a/src/HOL/Tools/datatype_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/datatype_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -196,13 +196,13 @@
fun add_rules simps case_thms rec_thms inject distinct
weak_case_congs cong_att =
- PureThy.add_thmss [(("simps", simps), []),
- (("", flat case_thms @
+ PureThy.add_thmss [((Binding.name "simps", simps), []),
+ ((Binding.empty, flat case_thms @
flat distinct @ rec_thms), [Simplifier.simp_add]),
- (("", rec_thms), [Code.add_default_eqn_attribute]),
- (("", flat inject), [iff_add]),
- (("", map (fn th => th RS notE) (flat distinct)), [Classical.safe_elim NONE]),
- (("", weak_case_congs), [cong_att])]
+ ((Binding.empty, rec_thms), [Code.add_default_eqn_attribute]),
+ ((Binding.empty, flat inject), [iff_add]),
+ ((Binding.empty, map (fn th => th RS notE) (flat distinct)), [Classical.safe_elim NONE]),
+ ((Binding.empty, weak_case_congs), [cong_att])]
#> snd;
@@ -213,15 +213,15 @@
val inducts = ProjectRule.projections (ProofContext.init thy) induction;
fun named_rules (name, {index, exhaustion, ...}: datatype_info) =
- [(("", nth inducts index), [Induct.induct_type name]),
- (("", exhaustion), [Induct.cases_type name])];
+ [((Binding.empty, nth inducts index), [Induct.induct_type name]),
+ ((Binding.empty, exhaustion), [Induct.cases_type name])];
fun unnamed_rule i =
- (("", nth inducts i), [Thm.kind_internal, Induct.induct_type ""]);
+ ((Binding.empty, nth inducts i), [Thm.kind_internal, Induct.induct_type ""]);
in
thy |> PureThy.add_thms
(maps named_rules infos @
map unnamed_rule (length infos upto length inducts - 1)) |> snd
- |> PureThy.add_thmss [(("inducts", inducts), [])] |> snd
+ |> PureThy.add_thmss [((Binding.name "inducts", inducts), [])] |> snd
end;
@@ -451,7 +451,7 @@
|> store_thmss "inject" new_type_names inject
||>> store_thmss "distinct" new_type_names distinct
||> Sign.add_path (space_implode "_" new_type_names)
- ||>> PureThy.add_thms [(("induct", induct), [case_names_induct])];
+ ||>> PureThy.add_thms [((Binding.name "induct", induct), [case_names_induct])];
val dt_infos = map (make_dt_info alt_names descr sorts induct' reccomb_names rec_thms)
((0 upto length descr - 1) ~~ descr ~~ case_names ~~ case_thms ~~ casedist_thms ~~
--- a/src/HOL/Tools/datatype_realizer.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/datatype_realizer.ML Wed Jan 28 16:57:12 2009 +0100
@@ -130,7 +130,7 @@
val vs = map (fn i => List.nth (pnames, i)) is;
val (thm', thy') = thy
|> Sign.absolute_path
- |> PureThy.store_thm (space_implode "_" (ind_name :: vs @ ["correctness"]), thm)
+ |> PureThy.store_thm (Binding.name (space_implode "_" (ind_name :: vs @ ["correctness"])), thm)
||> Sign.restore_naming thy;
val ivs = rev (Term.add_vars (Logic.varify (DatatypeProp.make_ind [descr] sorts)) []);
@@ -196,7 +196,7 @@
val exh_name = Thm.get_name exhaustion;
val (thm', thy') = thy
|> Sign.absolute_path
- |> PureThy.store_thm (exh_name ^ "_P_correctness", thm)
+ |> PureThy.store_thm (Binding.name (exh_name ^ "_P_correctness"), thm)
||> Sign.restore_naming thy;
val P = Var (("P", 0), rT' --> HOLogic.boolT);
--- a/src/HOL/Tools/datatype_rep_proofs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/datatype_rep_proofs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -242,7 +242,7 @@
val ([def_thm], thy') =
thy
|> Sign.add_consts_i [(cname', constrT, mx)]
- |> (PureThy.add_defs false o map Thm.no_attributes) [(def_name, def)];
+ |> (PureThy.add_defs false o map Thm.no_attributes) [(Binding.name def_name, def)];
in (thy', defs @ [def_thm], eqns @ [eqn], i + 1) end;
@@ -343,7 +343,7 @@
val (fs, eqns, isos) = Library.foldl process_dt (([], [], []), ds);
val fTs = map fastype_of fs;
- val defs = map (fn (rec_name, (T, iso_name)) => ((Sign.base_name iso_name) ^ "_def",
+ val defs = map (fn (rec_name, (T, iso_name)) => (Binding.name (Sign.base_name iso_name ^ "_def"),
Logic.mk_equals (Const (iso_name, T --> Univ_elT),
list_comb (Const (rec_name, fTs @ [T] ---> Univ_elT), fs)))) (rec_names ~~ isos);
val (def_thms, thy') =
@@ -631,7 +631,7 @@
val ([dt_induct'], thy7) =
thy6
|> Sign.add_path big_name
- |> PureThy.add_thms [(("induct", dt_induct), [case_names_induct])]
+ |> PureThy.add_thms [((Binding.name "induct", dt_induct), [case_names_induct])]
||> Sign.parent_path
||> Theory.checkpoint;
--- a/src/HOL/Tools/function_package/fundef_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/function_package/fundef_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -9,14 +9,14 @@
signature FUNDEF_PACKAGE =
sig
- val add_fundef : (Binding.T * string option * mixfix) list
+ val add_fundef : (binding * string option * mixfix) list
-> (Attrib.binding * string) list
-> FundefCommon.fundef_config
-> bool list
-> local_theory
-> Proof.state
- val add_fundef_i: (Binding.T * typ option * mixfix) list
+ val add_fundef_i: (binding * typ option * mixfix) list
-> (Attrib.binding * term) list
-> FundefCommon.fundef_config
-> bool list
--- a/src/HOL/Tools/function_package/size.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/function_package/size.ML Wed Jan 28 16:57:12 2009 +0100
@@ -144,7 +144,7 @@
(size_names ~~ recTs1))
|> PureThy.add_defs false
(map (Thm.no_attributes o apsnd (Logic.mk_equals o apsnd (app fs)))
- (def_names ~~ (size_fns ~~ rec_combs1)))
+ (map Binding.name def_names ~~ (size_fns ~~ rec_combs1)))
||> TheoryTarget.instantiation
(map (#1 o snd) descr', map dest_TFree paramTs, [HOLogic.class_size])
||>> fold_map define_overloaded
@@ -208,7 +208,7 @@
prove_size_eqs is_rec_type overloaded_size_fns (K NONE) simpset3;
val ([size_thms], thy'') = PureThy.add_thmss
- [(("size", size_eqns),
+ [((Binding.name "size", size_eqns),
[Simplifier.simp_add, Thm.declaration_attribute
(fn thm => Context.mapping (Code.add_default_eqn thm) I)])] thy'
--- a/src/HOL/Tools/inductive_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/inductive_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -38,17 +38,17 @@
thm list list * local_theory
type inductive_flags
val add_inductive_i:
- inductive_flags -> ((Binding.T * typ) * mixfix) list ->
+ inductive_flags -> ((binding * typ) * mixfix) list ->
(string * typ) list -> (Attrib.binding * term) list -> thm list -> local_theory ->
inductive_result * local_theory
val add_inductive: bool -> bool ->
- (Binding.T * string option * mixfix) list ->
- (Binding.T * string option * mixfix) list ->
+ (binding * string option * mixfix) list ->
+ (binding * string option * mixfix) list ->
(Attrib.binding * string) list ->
(Facts.ref * Attrib.src list) list ->
bool -> local_theory -> inductive_result * local_theory
val add_inductive_global: string -> inductive_flags ->
- ((Binding.T * typ) * mixfix) list -> (string * typ) list -> (Attrib.binding * term) list ->
+ ((binding * typ) * mixfix) list -> (string * typ) list -> (Attrib.binding * term) list ->
thm list -> theory -> inductive_result * theory
val arities_of: thm -> (string * int) list
val params_of: thm -> term list
@@ -63,16 +63,16 @@
sig
include BASIC_INDUCTIVE_PACKAGE
type add_ind_def
- val declare_rules: string -> Binding.T -> bool -> bool -> string list ->
- thm list -> Binding.T list -> Attrib.src list list -> (thm * string list) list ->
+ val declare_rules: string -> binding -> bool -> bool -> string list ->
+ thm list -> binding list -> Attrib.src list list -> (thm * string list) list ->
thm -> local_theory -> thm list * thm list * thm * local_theory
val add_ind_def: add_ind_def
val gen_add_inductive_i: add_ind_def -> inductive_flags ->
- ((Binding.T * typ) * mixfix) list -> (string * typ) list -> (Attrib.binding * term) list ->
+ ((binding * typ) * mixfix) list -> (string * typ) list -> (Attrib.binding * term) list ->
thm list -> local_theory -> inductive_result * local_theory
val gen_add_inductive: add_ind_def -> bool -> bool ->
- (Binding.T * string option * mixfix) list ->
- (Binding.T * string option * mixfix) list ->
+ (binding * string option * mixfix) list ->
+ (binding * string option * mixfix) list ->
(Attrib.binding * string) list -> (Facts.ref * Attrib.src list) list ->
bool -> local_theory -> inductive_result * local_theory
val gen_ind_decl: add_ind_def -> bool ->
@@ -720,13 +720,13 @@
in (intrs', elims', induct', ctxt3) end;
type inductive_flags =
- {quiet_mode: bool, verbose: bool, kind: string, alt_name: Binding.T,
+ {quiet_mode: bool, verbose: bool, kind: string, alt_name: binding,
coind: bool, no_elim: bool, no_ind: bool, skip_mono: bool, fork_mono: bool}
type add_ind_def =
inductive_flags ->
term list -> (Attrib.binding * term) list -> thm list ->
- term list -> (Binding.T * mixfix) list ->
+ term list -> (binding * mixfix) list ->
local_theory -> inductive_result * local_theory
fun add_ind_def {quiet_mode, verbose, kind, alt_name, coind, no_elim, no_ind, skip_mono, fork_mono}
--- a/src/HOL/Tools/inductive_realizer.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/inductive_realizer.ML Wed Jan 28 16:57:12 2009 +0100
@@ -391,14 +391,14 @@
REPEAT ((resolve_tac prems THEN_ALL_NEW EVERY'
[K (rewrite_goals_tac rews), ObjectLogic.atomize_prems_tac,
DEPTH_SOLVE_1 o FIRST' [atac, etac allE, etac impE]]) 1)]);
- val (thm', thy') = PureThy.store_thm (space_implode "_"
- (NameSpace.qualified qualifier "induct" :: vs' @ Ps @ ["correctness"]), thm) thy;
+ val (thm', thy') = PureThy.store_thm (Binding.name (space_implode "_"
+ (NameSpace.qualified qualifier "induct" :: vs' @ Ps @ ["correctness"])), thm) thy;
val thms = map (fn th => zero_var_indexes (rotate_prems ~1 (th RS mp)))
(DatatypeAux.split_conj_thm thm');
val ([thms'], thy'') = PureThy.add_thmss
- [((space_implode "_"
+ [((Binding.name (space_implode "_"
(NameSpace.qualified qualifier "inducts" :: vs' @ Ps @
- ["correctness"]), thms), [])] thy';
+ ["correctness"])), thms), [])] thy';
val realizers = inducts ~~ thms' ~~ rlzs ~~ rs;
in
Extraction.add_realizers_i
@@ -451,8 +451,8 @@
rewrite_goals_tac rews,
REPEAT ((resolve_tac prems THEN_ALL_NEW (ObjectLogic.atomize_prems_tac THEN'
DEPTH_SOLVE_1 o FIRST' [atac, etac allE, etac impE])) 1)]);
- val (thm', thy') = PureThy.store_thm (space_implode "_"
- (name_of_thm elim :: vs @ Ps @ ["correctness"]), thm) thy
+ val (thm', thy') = PureThy.store_thm (Binding.name (space_implode "_"
+ (name_of_thm elim :: vs @ Ps @ ["correctness"])), thm) thy
in
Extraction.add_realizers_i
[mk_realizer thy' (vs @ Ps) (name_of_thm elim, elim, thm', rlz, r)] thy'
--- a/src/HOL/Tools/inductive_set_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/inductive_set_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -12,13 +12,13 @@
val pred_set_conv_att: attribute
val add_inductive_i:
InductivePackage.inductive_flags ->
- ((Binding.T * typ) * mixfix) list ->
+ ((binding * typ) * mixfix) list ->
(string * typ) list ->
(Attrib.binding * term) list -> thm list ->
local_theory -> InductivePackage.inductive_result * local_theory
val add_inductive: bool -> bool ->
- (Binding.T * string option * mixfix) list ->
- (Binding.T * string option * mixfix) list ->
+ (binding * string option * mixfix) list ->
+ (binding * string option * mixfix) list ->
(Attrib.binding * string) list -> (Facts.ref * Attrib.src list) list ->
bool -> local_theory -> InductivePackage.inductive_result * local_theory
val codegen_preproc: theory -> thm list -> thm list
--- a/src/HOL/Tools/old_primrec_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/old_primrec_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -305,11 +305,11 @@
end;
fun thy_note ((name, atts), thms) =
- PureThy.add_thmss [((name, thms), atts)] #-> (fn [thms] => pair (name, thms));
+ PureThy.add_thmss [((Binding.name name, thms), atts)] #-> (fn [thms] => pair (name, thms));
fun thy_def false ((name, atts), t) =
- PureThy.add_defs false [((name, t), atts)] #-> (fn [thm] => pair (name, thm))
+ PureThy.add_defs false [((Binding.name name, t), atts)] #-> (fn [thm] => pair (name, thm))
| thy_def true ((name, atts), t) =
- PureThy.add_defs_unchecked false [((name, t), atts)] #-> (fn [thm] => pair (name, thm));
+ PureThy.add_defs_unchecked false [((Binding.name name, t), atts)] #-> (fn [thm] => pair (name, thm));
in
--- a/src/HOL/Tools/primrec_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/primrec_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -7,12 +7,12 @@
signature PRIMREC_PACKAGE =
sig
- val add_primrec: (Binding.T * typ option * mixfix) list ->
+ val add_primrec: (binding * typ option * mixfix) list ->
(Attrib.binding * term) list -> local_theory -> thm list * local_theory
- val add_primrec_global: (Binding.T * typ option * mixfix) list ->
+ val add_primrec_global: (binding * typ option * mixfix) list ->
(Attrib.binding * term) list -> theory -> thm list * theory
val add_primrec_overloaded: (string * (string * typ) * bool) list ->
- (Binding.T * typ option * mixfix) list ->
+ (binding * typ option * mixfix) list ->
(Attrib.binding * term) list -> theory -> thm list * theory
end;
--- a/src/HOL/Tools/recdef_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/recdef_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Tools/recdef_package.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Wrapper module for Konrad Slind's TFL package.
@@ -16,10 +15,10 @@
val cong_del: attribute
val wf_add: attribute
val wf_del: attribute
- val add_recdef: bool -> xstring -> string -> ((bstring * string) * Attrib.src list) list ->
+ val add_recdef: bool -> xstring -> string -> ((binding * string) * Attrib.src list) list ->
Attrib.src option -> theory -> theory
* {simps: thm list, rules: thm list list, induct: thm, tcs: term list}
- val add_recdef_i: bool -> xstring -> term -> ((bstring * term) * attribute list) list ->
+ val add_recdef_i: bool -> xstring -> term -> ((binding * term) * attribute list) list ->
theory -> theory * {simps: thm list, rules: thm list list, induct: thm, tcs: term list}
val defer_recdef: xstring -> string list -> (Facts.ref * Attrib.src list) list
-> theory -> theory * {induct_rules: thm}
@@ -214,8 +213,8 @@
thy
|> Sign.add_path bname
|> PureThy.add_thmss
- ((("simps", List.concat rules), simp_att) :: ((eq_names ~~ rules) ~~ eq_atts))
- ||>> PureThy.add_thms [(("induct", induct), [])];
+ (((Binding.name "simps", List.concat rules), simp_att) :: ((eq_names ~~ rules) ~~ eq_atts))
+ ||>> PureThy.add_thms [((Binding.name "induct", induct), [])];
val result = {simps = simps', rules = rules', induct = induct', tcs = tcs};
val thy =
thy
@@ -243,7 +242,7 @@
val ([induct_rules'], thy3) =
thy2
|> Sign.add_path bname
- |> PureThy.add_thms [(("induct_rules", induct_rules), [])]
+ |> PureThy.add_thms [((Binding.name "induct_rules", induct_rules), [])]
||> Sign.parent_path;
in (thy3, {induct_rules = induct_rules'}) end;
@@ -299,7 +298,7 @@
val recdef_decl =
Scan.optional (P.$$$ "(" -- P.!!! (P.$$$ "permissive" -- P.$$$ ")") >> K false) true --
- P.name -- P.term -- Scan.repeat1 ((SpecParse.opt_thm_name ":" >> apfst Binding.base_name) -- P.prop)
+ P.name -- P.term -- Scan.repeat1 (SpecParse.opt_thm_name ":" -- P.prop)
-- Scan.option hints
>> (fn ((((p, f), R), eqs), src) => #1 o add_recdef p f R (map P.triple_swap eqs) src);
--- a/src/HOL/Tools/record_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/record_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1534,8 +1534,10 @@
|> extension_typedef name repT (alphas@[zeta])
||> Sign.add_consts_i
(map Syntax.no_syn ((apfst base ext_decl)::dest_decls@upd_decls))
- ||>> PureThy.add_defs false (map Thm.no_attributes (ext_spec::dest_specs))
- ||>> PureThy.add_defs false (map Thm.no_attributes upd_specs)
+ ||>> PureThy.add_defs false
+ (map (Thm.no_attributes o apfst Binding.name) (ext_spec :: dest_specs))
+ ||>> PureThy.add_defs false
+ (map (Thm.no_attributes o apfst Binding.name) upd_specs)
|-> (fn args as ((_, dest_defs), upd_defs) =>
fold Code.add_default_eqn dest_defs
#> fold Code.add_default_eqn upd_defs
@@ -1693,14 +1695,14 @@
[dest_convs',upd_convs']),
thm_thy) =
defs_thy
- |> (PureThy.add_thms o map Thm.no_attributes)
+ |> (PureThy.add_thms o map (Thm.no_attributes o apfst Binding.name))
[("ext_inject", inject),
("ext_induct", induct),
("ext_cases", cases),
("ext_surjective", surjective),
("ext_split", split_meta)]
- ||>> (PureThy.add_thmss o map Thm.no_attributes)
- [("dest_convs",dest_convs_standard),("upd_convs",upd_convs)]
+ ||>> (PureThy.add_thmss o map (Thm.no_attributes o apfst Binding.name))
+ [("dest_convs", dest_convs_standard), ("upd_convs", upd_convs)]
in (thm_thy,extT,induct',inject',dest_convs',split_meta',upd_convs')
end;
@@ -1938,9 +1940,9 @@
(map2 (fn (x, T) => fn mx => (x, T, mx)) sel_decls (field_syntax @ [Syntax.NoSyn]))
|> (Sign.add_consts_i o map Syntax.no_syn)
(upd_decls @ [make_decl, fields_decl, extend_decl, truncate_decl])
- |> ((PureThy.add_defs false o map Thm.no_attributes) sel_specs)
- ||>> ((PureThy.add_defs false o map Thm.no_attributes) upd_specs)
- ||>> ((PureThy.add_defs false o map Thm.no_attributes)
+ |> ((PureThy.add_defs false o map (Thm.no_attributes o apfst Binding.name)) sel_specs)
+ ||>> ((PureThy.add_defs false o map (Thm.no_attributes o apfst Binding.name)) upd_specs)
+ ||>> ((PureThy.add_defs false o map (Thm.no_attributes o apfst Binding.name))
[make_spec, fields_spec, extend_spec, truncate_spec])
|-> (fn defs as ((sel_defs, upd_defs), derived_defs) =>
fold Code.add_default_eqn sel_defs
@@ -2164,17 +2166,17 @@
val ((([sel_convs',upd_convs',sel_defs',upd_defs',[split_meta',split_object',split_ex'],derived_defs'],
[surjective',equality']),[induct_scheme',induct',cases_scheme',cases']), thms_thy) =
defs_thy
- |> (PureThy.add_thmss o map Thm.no_attributes)
+ |> (PureThy.add_thmss o map (Thm.no_attributes o apfst Binding.name))
[("select_convs", sel_convs_standard),
("update_convs", upd_convs),
("select_defs", sel_defs),
("update_defs", upd_defs),
("splits", [split_meta_standard,split_object,split_ex]),
("defs", derived_defs)]
- ||>> (PureThy.add_thms o map Thm.no_attributes)
+ ||>> (PureThy.add_thms o map (Thm.no_attributes o apfst Binding.name))
[("surjective", surjective),
("equality", equality)]
- ||>> PureThy.add_thms
+ ||>> (PureThy.add_thms o (map o apfst o apfst) Binding.name)
[(("induct_scheme", induct_scheme), induct_type_global (suffix schemeN name)),
(("induct", induct), induct_type_global name),
(("cases_scheme", cases_scheme), cases_type_global (suffix schemeN name)),
@@ -2186,8 +2188,8 @@
val final_thy =
thms_thy
|> (snd oo PureThy.add_thmss)
- [(("simps", sel_upd_simps), [Simplifier.simp_add]),
- (("iffs",iffs), [iff_add])]
+ [((Binding.name "simps", sel_upd_simps), [Simplifier.simp_add]),
+ ((Binding.name "iffs", iffs), [iff_add])]
|> put_record name (make_record_info args parent fields extension induct_scheme')
|> put_sel_upd (names @ [full_moreN]) sel_upd_simps
|> add_record_equalities extension_id equality'
--- a/src/HOL/Tools/res_axioms.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/res_axioms.ML Wed Jan 28 16:57:12 2009 +0100
@@ -84,7 +84,7 @@
val (c, thy') =
Sign.declare_const [Markup.property_internal] ((Binding.name cname, cT), NoSyn) thy
val cdef = cname ^ "_def"
- val thy'' = Theory.add_defs_i true false [(cdef, Logic.mk_equals (c, rhs))] thy'
+ val thy'' = Theory.add_defs_i true false [(Binding.name cdef, Logic.mk_equals (c, rhs))] thy'
val ax = Thm.axiom thy'' (Sign.full_bname thy'' cdef)
in dec_sko (subst_bound (list_comb (c, args), p)) (ax :: axs, thy'') end
| dec_sko (Const ("All", _) $ (xtp as Abs (a, T, p))) thx =
--- a/src/HOL/Tools/res_reconstruct.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/res_reconstruct.ML Wed Jan 28 16:57:12 2009 +0100
@@ -15,7 +15,7 @@
val strip_prefix: string -> string -> string option
val setup: Context.theory -> Context.theory
(* extracting lemma list*)
- val check_success_e_vamp_spass: string * int -> bool
+ val find_failure_e_vamp_spass: string -> string option
val lemma_list_dfg: string * string vector * Proof.context * Thm.thm * int -> string
val lemma_list_tstp: string * string vector * Proof.context * Thm.thm * int -> string
(* structured proofs *)
@@ -463,11 +463,12 @@
val failure_strings_vampire = ["Satisfiability detected", "Refutation not found", "CANNOT PROVE"];
val failure_strings_SPASS = ["SPASS beiseite: Completion found.","SPASS beiseite: Ran out of time.",
"SPASS beiseite: Maximal number of loops exceeded."];
- fun check_success_e_vamp_spass (proof, rc) =
- not (exists (fn s => String.isSubstring s proof)
- (failure_strings_E @ failure_strings_vampire @ failure_strings_SPASS))
- andalso (rc = 0);
-
+ fun find_failure_e_vamp_spass proof =
+ let val failures =
+ map_filter (fn s => if String.isSubstring s proof then SOME s else NONE)
+ (failure_strings_E @ failure_strings_vampire @ failure_strings_SPASS)
+ in if null failures then NONE else SOME (hd failures) end;
+
(*=== EXTRACTING PROOF-TEXT === *)
val begin_proof_strings = ["# SZS output start CNFRefutation.",
--- a/src/HOL/Tools/specification_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/specification_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -28,7 +28,7 @@
else thname
val def_eq = Logic.mk_equals (Const(cname_full,ctype),
HOLogic.choice_const ctype $ P)
- val (thms, thy') = PureThy.add_defs covld [((cdefname,def_eq),[])] thy
+ val (thms, thy') = PureThy.add_defs covld [((Binding.name cdefname, def_eq),[])] thy
val thm' = [thm,hd thms] MRS @{thm exE_some}
in
mk_definitional cos (thy',thm')
@@ -39,7 +39,7 @@
let
fun process [] (thy,tm) =
let
- val (thms, thy') = PureThy.add_axioms [((axname,HOLogic.mk_Trueprop tm),[])] thy
+ val (thms, thy') = PureThy.add_axioms [((Binding.name axname, HOLogic.mk_Trueprop tm),[])] thy
in
(thy',hd thms)
end
@@ -184,7 +184,7 @@
if name = ""
then arg |> Library.swap
else (writeln (" " ^ name ^ ": " ^ (Display.string_of_thm thm));
- PureThy.store_thm (name, thm) thy)
+ PureThy.store_thm (Binding.name name, thm) thy)
in
args |> apsnd (remove_alls frees)
|> apsnd undo_imps
--- a/src/HOL/Tools/typedef_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Tools/typedef_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -112,7 +112,8 @@
if def then
theory
|> Sign.add_consts_i [(name, setT', NoSyn)]
- |> PureThy.add_defs false [Thm.no_attributes ((PrimitiveDefs.mk_defpair (setC, set)))]
+ |> PureThy.add_defs false [Thm.no_attributes (apfst (Binding.name)
+ (PrimitiveDefs.mk_defpair (setC, set)))]
|-> (fn [th] => pair (SOME th))
else (NONE, theory);
fun contract_def NONE th = th
@@ -130,7 +131,7 @@
(Abs_name, oldT --> newT, NoSyn)]
#> add_def
#-> (fn set_def =>
- PureThy.add_axioms [((typedef_name, typedef_prop),
+ PureThy.add_axioms [((Binding.name typedef_name, typedef_prop),
[Thm.rule_attribute (K (fn cond_axm => contract_def set_def inhabited RS cond_axm))])]
##>> pair set_def)
##> Theory.add_deps "" (dest_Const RepC) typedef_deps
@@ -143,7 +144,7 @@
thy1
|> Sign.add_path name
|> PureThy.add_thms
- ([((Rep_name, make @{thm type_definition.Rep}), []),
+ ((map o apfst o apfst) Binding.name [((Rep_name, make @{thm type_definition.Rep}), []),
((Rep_name ^ "_inverse", make @{thm type_definition.Rep_inverse}), []),
((Abs_name ^ "_inverse", make @{thm type_definition.Abs_inverse}), []),
((Rep_name ^ "_inject", make @{thm type_definition.Rep_inject}), []),
--- a/src/HOL/Transitive_Closure.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Transitive_Closure.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Transitive_Closure.thy
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1992 University of Cambridge
*)
@@ -568,6 +567,22 @@
apply auto
done
+lemma trancl_subset_Field2: "r^+ <= Field r \<times> Field r"
+ apply clarify
+ apply (erule trancl_induct)
+ apply (auto simp add: Field_def)
+ done
+
+lemma finite_trancl: "finite (r^+) = finite r"
+ apply auto
+ prefer 2
+ apply (rule trancl_subset_Field2 [THEN finite_subset])
+ apply (rule finite_SigmaI)
+ prefer 3
+ apply (blast intro: r_into_trancl' finite_subset)
+ apply (auto simp add: finite_Field)
+ done
+
text {* More about converse @{text rtrancl} and @{text trancl}, should
be merged with main body. *}
--- a/src/HOL/Typedef.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Typedef.thy Wed Jan 28 16:57:12 2009 +0100
@@ -123,7 +123,7 @@
text {* This class is just a workaround for classes without parameters;
it shall disappear as soon as possible. *}
-class itself = type +
+class itself =
fixes itself :: "'a itself"
setup {*
--- a/src/HOL/Typerep.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Typerep.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
-(* Title: HOL/Library/RType.thy
- ID: $Id$
+(* Title: HOL/Typerep.thy
Author: Florian Haftmann, TU Muenchen
*)
@@ -15,9 +14,7 @@
fixes typerep :: "'a\<Colon>{} itself \<Rightarrow> typerep"
begin
-definition
- typerep_of :: "'a \<Rightarrow> typerep"
-where
+definition typerep_of :: "'a \<Rightarrow> typerep" where
[simp]: "typerep_of x = typerep TYPE('a)"
end
--- a/src/HOL/Wellfounded.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Wellfounded.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
-(* ID: $Id$
- Author: Tobias Nipkow
+(* Author: Tobias Nipkow
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Author: Konrad Slind, Alexander Krauss
Copyright 1992-2008 University of Cambridge and TU Muenchen
@@ -8,7 +7,7 @@
header {*Well-founded Recursion*}
theory Wellfounded
-imports Finite_Set Nat
+imports Finite_Set Transitive_Closure Nat
uses ("Tools/function_package/size.ML")
begin
--- a/src/HOL/Word/BinBoolList.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/BinBoolList.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson, NICTA
contains theorems to do with integers, expressed using Pls, Min, BIT,
--- a/src/HOL/Word/BinGeneral.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/BinGeneral.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson, NICTA
contains basic definition to do with integers
--- a/src/HOL/Word/BinOperations.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/BinOperations.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson and Gerwin Klein, NICTA
definition and basic theorems for bit-wise logical operations
--- a/src/HOL/Word/BitSyntax.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/BitSyntax.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Brian Huffman, PSU and Gerwin Klein, NICTA
Syntactic class for bitwise operations.
@@ -12,7 +11,7 @@
imports BinGeneral
begin
-class bit = type +
+class bit =
fixes bitNOT :: "'a \<Rightarrow> 'a" ("NOT _" [70] 71)
and bitAND :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixr "AND" 64)
and bitOR :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixr "OR" 59)
--- a/src/HOL/Word/Examples/WordExamples.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/Examples/WordExamples.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Gerwin Klein, NICTA
Examples demonstrating and testing various word operations.
@@ -7,9 +6,14 @@
header "Examples of word operations"
-theory WordExamples imports WordMain
+theory WordExamples
+imports Word
begin
+types word32 = "32 word"
+types word8 = "8 word"
+types byte = word8
+
-- "modulus"
lemma "(27 :: 4 word) = -5" by simp
--- a/src/HOL/Word/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,2 +1,1 @@
-no_document use_thys ["Infinite_Set"];
-use_thy "WordMain";
+use_thy "Word";
--- a/src/HOL/Word/Size.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/Size.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: John Matthews, Galois Connections, Inc., copyright 2006
A typeclass for parameterizing types by size.
@@ -18,7 +17,7 @@
some duplication with the definitions in @{text "Numeral_Type"}.
*}
-class len0 = type +
+class len0 =
fixes len_of :: "'a itself \<Rightarrow> nat"
text {*
--- a/src/HOL/Word/TdThs.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/TdThs.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson and Gerwin Klein, NICTA
consequences of type definition theorems,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Word/Word.thy Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,13 @@
+(* Title: HOL/Word/Word.thy
+ Author: Gerwin Klein, NICTA
+*)
+
+header {* Word Library interafce *}
+
+theory Word
+imports WordGenLib
+begin
+
+text {* see @{text "Examples/WordExamples.thy"} for examples *}
+
+end
--- a/src/HOL/Word/WordArith.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/WordArith.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson and Gerwin Klein, NICTA
contains arithmetic theorems for word, instantiations to
--- a/src/HOL/Word/WordBitwise.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/WordBitwise.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson and Gerwin Klein, NICTA
contains theorems to do with bit-wise (logical) operations on words
--- a/src/HOL/Word/WordDefinition.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/WordDefinition.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson and Gerwin Klein, NICTA
Basic definition of word type and basic theorems following from
@@ -12,8 +11,50 @@
imports Size BinBoolList TdThs
begin
-typedef (open word) 'a word
- = "{(0::int) ..< 2^len_of TYPE('a::len0)}" by auto
+subsection {* Type definition *}
+
+typedef (open word) 'a word = "{(0::int) ..< 2^len_of TYPE('a::len0)}"
+ morphisms uint Abs_word by auto
+
+definition word_of_int :: "int \<Rightarrow> 'a\<Colon>len0 word" where
+ -- {* representation of words using unsigned or signed bins,
+ only difference in these is the type class *}
+ [code del]: "word_of_int w = Abs_word (bintrunc (len_of TYPE ('a)) w)"
+
+lemma uint_word_of_int [code]: "uint (word_of_int w \<Colon> 'a\<Colon>len0 word) = w mod 2 ^ len_of TYPE('a)"
+ by (auto simp add: word_of_int_def bintrunc_mod2p intro: Abs_word_inverse)
+
+code_datatype word_of_int
+
+
+subsection {* Type conversions and casting *}
+
+definition sint :: "'a :: len word => int" where
+ -- {* treats the most-significant-bit as a sign bit *}
+ sint_uint: "sint w = sbintrunc (len_of TYPE ('a) - 1) (uint w)"
+
+definition unat :: "'a :: len0 word => nat" where
+ "unat w = nat (uint w)"
+
+definition uints :: "nat => int set" where
+ -- "the sets of integers representing the words"
+ "uints n = range (bintrunc n)"
+
+definition sints :: "nat => int set" where
+ "sints n = range (sbintrunc (n - 1))"
+
+definition unats :: "nat => nat set" where
+ "unats n = {i. i < 2 ^ n}"
+
+definition norm_sint :: "nat => int => int" where
+ "norm_sint n w = (w + 2 ^ (n - 1)) mod 2 ^ n - 2 ^ (n - 1)"
+
+definition scast :: "'a :: len word => 'b :: len word" where
+ -- "cast a word to a different length"
+ "scast w = word_of_int (sint w)"
+
+definition ucast :: "'a :: len0 word => 'b :: len0 word" where
+ "ucast w = word_of_int (uint w)"
instantiation word :: (len0) size
begin
@@ -25,83 +66,39 @@
end
-definition
- -- {* representation of words using unsigned or signed bins,
- only difference in these is the type class *}
- word_of_int :: "int \<Rightarrow> 'a\<Colon>len0 word"
-where
- [code del]: "word_of_int w = Abs_word (bintrunc (len_of TYPE ('a)) w)"
-
-code_datatype word_of_int
-
+definition source_size :: "('a :: len0 word => 'b) => nat" where
+ -- "whether a cast (or other) function is to a longer or shorter length"
+ "source_size c = (let arb = undefined ; x = c arb in size arb)"
-subsection "Type conversions and casting"
+definition target_size :: "('a => 'b :: len0 word) => nat" where
+ "target_size c = size (c undefined)"
-constdefs
- -- {* uint and sint cast a word to an integer,
- uint treats the word as unsigned,
- sint treats the most-significant-bit as a sign bit *}
- uint :: "'a :: len0 word => int"
- "uint w == Rep_word w"
- sint :: "'a :: len word => int"
- sint_uint: "sint w == sbintrunc (len_of TYPE ('a) - 1) (uint w)"
- unat :: "'a :: len0 word => nat"
- "unat w == nat (uint w)"
+definition is_up :: "('a :: len0 word => 'b :: len0 word) => bool" where
+ "is_up c \<longleftrightarrow> source_size c <= target_size c"
- -- "the sets of integers representing the words"
- uints :: "nat => int set"
- "uints n == range (bintrunc n)"
- sints :: "nat => int set"
- "sints n == range (sbintrunc (n - 1))"
- unats :: "nat => nat set"
- "unats n == {i. i < 2 ^ n}"
- norm_sint :: "nat => int => int"
- "norm_sint n w == (w + 2 ^ (n - 1)) mod 2 ^ n - 2 ^ (n - 1)"
+definition is_down :: "('a :: len0 word => 'b :: len0 word) => bool" where
+ "is_down c \<longleftrightarrow> target_size c <= source_size c"
- -- "cast a word to a different length"
- scast :: "'a :: len word => 'b :: len word"
- "scast w == word_of_int (sint w)"
- ucast :: "'a :: len0 word => 'b :: len0 word"
- "ucast w == word_of_int (uint w)"
+definition of_bl :: "bool list => 'a :: len0 word" where
+ "of_bl bl = word_of_int (bl_to_bin bl)"
- -- "whether a cast (or other) function is to a longer or shorter length"
- source_size :: "('a :: len0 word => 'b) => nat"
- "source_size c == let arb = undefined ; x = c arb in size arb"
- target_size :: "('a => 'b :: len0 word) => nat"
- "target_size c == size (c undefined)"
- is_up :: "('a :: len0 word => 'b :: len0 word) => bool"
- "is_up c == source_size c <= target_size c"
- is_down :: "('a :: len0 word => 'b :: len0 word) => bool"
- "is_down c == target_size c <= source_size c"
+definition to_bl :: "'a :: len0 word => bool list" where
+ "to_bl w = bin_to_bl (len_of TYPE ('a)) (uint w)"
-constdefs
- of_bl :: "bool list => 'a :: len0 word"
- "of_bl bl == word_of_int (bl_to_bin bl)"
- to_bl :: "'a :: len0 word => bool list"
- "to_bl w ==
- bin_to_bl (len_of TYPE ('a)) (uint w)"
+definition word_reverse :: "'a :: len0 word => 'a word" where
+ "word_reverse w = of_bl (rev (to_bl w))"
- word_reverse :: "'a :: len0 word => 'a word"
- "word_reverse w == of_bl (rev (to_bl w))"
-
-constdefs
- word_int_case :: "(int => 'b) => ('a :: len0 word) => 'b"
- "word_int_case f w == f (uint w)"
+definition word_int_case :: "(int => 'b) => ('a :: len0 word) => 'b" where
+ "word_int_case f w = f (uint w)"
syntax
of_int :: "int => 'a"
translations
- "case x of of_int y => b" == "word_int_case (%y. b) x"
+ "case x of of_int y => b" == "CONST word_int_case (%y. b) x"
subsection "Arithmetic operations"
-declare uint_def [code del]
-
-lemma [code]: "uint (word_of_int w \<Colon> 'a\<Colon>len0 word) = bintrunc (len_of TYPE('a)) w"
- by (auto simp add: uint_def word_of_int_def intro!: Abs_word_inverse)
- (insert range_bintrunc, auto)
-
instantiation word :: (len0) "{number, uminus, minus, plus, one, zero, times, Divides.div, power, ord, bit}"
begin
@@ -186,8 +183,6 @@
subsection "Bit-wise operations"
-
-
instantiation word :: (len0) bits
begin
@@ -337,21 +332,21 @@
unfolding atLeastLessThan_alt by auto
lemma
- Rep_word_0:"0 <= Rep_word x" and
- Rep_word_lt: "Rep_word (x::'a::len0 word) < 2 ^ len_of TYPE('a)"
- by (auto simp: Rep_word [simplified])
+ uint_0:"0 <= uint x" and
+ uint_lt: "uint (x::'a::len0 word) < 2 ^ len_of TYPE('a)"
+ by (auto simp: uint [simplified])
-lemma Rep_word_mod_same:
- "Rep_word x mod 2 ^ len_of TYPE('a) = Rep_word (x::'a::len0 word)"
- by (simp add: int_mod_eq Rep_word_lt Rep_word_0)
+lemma uint_mod_same:
+ "uint x mod 2 ^ len_of TYPE('a) = uint (x::'a::len0 word)"
+ by (simp add: int_mod_eq uint_lt uint_0)
lemma td_ext_uint:
"td_ext (uint :: 'a word => int) word_of_int (uints (len_of TYPE('a::len0)))
(%w::int. w mod 2 ^ len_of TYPE('a))"
apply (unfold td_ext_def')
- apply (simp add: uints_num uint_def word_of_int_def bintrunc_mod2p)
- apply (simp add: Rep_word_mod_same Rep_word_0 Rep_word_lt
- word.Rep_word_inverse word.Abs_word_inverse int_mod_lem)
+ apply (simp add: uints_num word_of_int_def bintrunc_mod2p)
+ apply (simp add: uint_mod_same uint_0 uint_lt
+ word.uint_inverse word.Abs_word_inverse int_mod_lem)
done
lemmas int_word_uint = td_ext_uint [THEN td_ext.eq_norm, standard]
@@ -793,10 +788,7 @@
lemmas is_down = is_down_def [unfolded source_size target_size]
lemmas is_up = is_up_def [unfolded source_size target_size]
-lemmas is_up_down =
- trans [OF is_up [THEN meta_eq_to_obj_eq]
- is_down [THEN meta_eq_to_obj_eq, symmetric],
- standard]
+lemmas is_up_down = trans [OF is_up is_down [symmetric], standard]
lemma down_cast_same': "uc = ucast ==> is_down uc ==> uc = scast"
apply (unfold is_down)
@@ -950,4 +942,17 @@
lemmas word_log_defs = word_and_def word_or_def word_xor_def word_not_def
lemmas word_log_bin_defs = word_log_defs
+text {* Executable equality *}
+
+instantiation word :: ("{len0}") eq
+begin
+
+definition eq_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> bool" where
+ "eq_word k l \<longleftrightarrow> HOL.eq (uint k) (uint l)"
+
+instance proof
+qed (simp add: eq eq_word_def)
+
end
+
+end
--- a/src/HOL/Word/WordGenLib.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/WordGenLib.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Author: Gerwin Klein, Jeremy Dawson
- $Id$
Miscellaneous additional library definitions and lemmas for
the word type. Instantiation to boolean algebras, definition
@@ -452,4 +451,13 @@
"1 + n \<noteq> (0::'a::len word) \<Longrightarrow> unat (1 + n) = Suc (unat n)"
by unat_arith
+
+lemmas word_no_1 [simp] = word_1_no [symmetric, unfolded BIT_simps]
+lemmas word_no_0 [simp] = word_0_no [symmetric]
+
+declare word_0_bl [simp]
+declare bin_to_bl_def [simp]
+declare to_bl_0 [simp]
+declare of_bl_True [simp]
+
end
--- a/src/HOL/Word/WordMain.thy Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-(*
- ID: $Id$
- Author: Gerwin Klein, NICTA
-
- The main interface of the word library to other theories.
-*)
-
-header {* Main Word Library *}
-
-theory WordMain
-imports WordGenLib
-begin
-
-lemmas word_no_1 [simp] = word_1_no [symmetric, unfolded BIT_simps]
-lemmas word_no_0 [simp] = word_0_no [symmetric]
-
-declare word_0_bl [simp]
-declare bin_to_bl_def [simp]
-declare to_bl_0 [simp]
-declare of_bl_True [simp]
-
-text "Examples"
-
-types word32 = "32 word"
-types word8 = "8 word"
-types byte = word8
-
-text {* for more see WordExamples.thy *}
-
-end
--- a/src/HOL/Word/WordShift.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/Word/WordShift.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(*
- ID: $Id$
Author: Jeremy Dawson and Gerwin Klein, NICTA
contains theorems to do with shifting, rotating, splitting words
--- a/src/HOL/ex/LocaleTest2.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/ex/LocaleTest2.thy Wed Jan 28 16:57:12 2009 +0100
@@ -625,9 +625,6 @@
lemma "gcd x y dvd x"
apply (rule nat_dvd.meet_left) done
-print_interps dpo
-print_interps dlat
-
subsection {* Group example with defined operations @{text inv} and @{text unit} *}
--- a/src/HOL/ex/Quickcheck.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/ex/Quickcheck.thy Wed Jan 28 16:57:12 2009 +0100
@@ -200,7 +200,7 @@
in
lthy
|> LocalTheory.theory (Code.del_eqns c
- #> PureThy.add_thm ((fst (dest_Free random') ^ "_code", thm), [Thm.kind_internal])
+ #> PureThy.add_thm ((Binding.name (fst (dest_Free random') ^ "_code"), thm), [Thm.kind_internal])
#-> Code.add_eqn)
end;
in
--- a/src/HOL/ex/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/ex/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -63,7 +63,6 @@
"Dense_Linear_Order_Ex",
"PresburgerEx",
"Reflected_Presburger",
- "Reflection",
"ReflectionEx",
"BinEx",
"Sqrt",
--- a/src/HOL/ex/Reflection.thy Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,48 +0,0 @@
-(*
- ID: $Id$
- Author: Amine Chaieb, TU Muenchen
-*)
-
-header {* Generic reflection and reification *}
-
-theory Reflection
-imports Main
- uses "reflection_data.ML" ("reflection.ML")
-begin
-
-setup {* Reify_Data.setup*}
-
-
-lemma ext2: "(\<forall>x. f x = g x) \<Longrightarrow> f = g"
- by (blast intro: ext)
-
-use "reflection.ML"
-
-method_setup reify = {*
- fn src =>
- Method.syntax (Attrib.thms --
- Scan.option (Scan.lift (Args.$$$ "(") |-- Args.term --| Scan.lift (Args.$$$ ")") )) src #>
- (fn ((eqs, to), ctxt) => Method.SIMPLE_METHOD' (Reflection.genreify_tac ctxt (eqs @ (fst (Reify_Data.get ctxt))) to))
-*} "partial automatic reification"
-
-method_setup reflection = {*
-let
-fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
-val onlyN = "only";
-val rulesN = "rules";
-val any_keyword = keyword onlyN || keyword rulesN;
-val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
-val terms = thms >> map (term_of o Drule.dest_term);
-fun optional scan = Scan.optional scan [];
-in
-fn src =>
- Method.syntax (thms -- optional (keyword rulesN |-- thms) -- Scan.option (keyword onlyN |-- Args.term)) src #>
- (fn (((eqs,ths),to), ctxt) =>
- let
- val (ceqs,cths) = Reify_Data.get ctxt
- val corr_thms = ths@cths
- val raw_eqs = eqs@ceqs
- in Method.SIMPLE_METHOD' (Reflection.reflection_tac ctxt corr_thms raw_eqs to)
- end) end
-*} "reflection method"
-end
--- a/src/HOL/ex/ReflectionEx.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOL/ex/ReflectionEx.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,9 +1,9 @@
-(*
- ID: $Id$
+(* Title: HOL/ex/ReflectionEx.thy
Author: Amine Chaieb, TU Muenchen
*)
header {* Examples for generic reflection and reification *}
+
theory ReflectionEx
imports Reflection
begin
--- a/src/HOL/ex/reflection.ML Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,325 +0,0 @@
-(* Author: Amine Chaieb, TU Muenchen
-
-A trial for automatical reification.
-*)
-
-signature REFLECTION =
-sig
- val genreify_tac: Proof.context -> thm list -> term option -> int -> tactic
- val reflection_tac: Proof.context -> thm list -> thm list -> term option -> int -> tactic
- val gen_reflection_tac: Proof.context -> (cterm -> thm)
- -> thm list -> thm list -> term option -> int -> tactic
-end;
-
-structure Reflection : REFLECTION =
-struct
-
-val ext2 = thm "ext2";
-val nth_Cons_0 = thm "nth_Cons_0";
-val nth_Cons_Suc = thm "nth_Cons_Suc";
-
- (* Make a congruence rule out of a defining equation for the interpretation *)
- (* th is one defining equation of f, i.e.
- th is "f (Cp ?t1 ... ?tn) = P(f ?t1, .., f ?tn)" *)
- (* Cp is a constructor pattern and P is a pattern *)
-
- (* The result is:
- [|?A1 = f ?t1 ; .. ; ?An= f ?tn |] ==> P (?A1, .., ?An) = f (Cp ?t1 .. ?tn) *)
- (* + the a list of names of the A1 .. An, Those are fresh in the ctxt*)
-
-
-fun mk_congeq ctxt fs th =
- let
- val (f as Const(fN,fT)) = th |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq
- |> fst |> strip_comb |> fst
- val thy = ProofContext.theory_of ctxt
- val cert = Thm.cterm_of thy
- val (((_,_),[th']), ctxt') = Variable.import_thms true [th] ctxt
- val (lhs, rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop (Thm.prop_of th'))
- fun add_fterms (t as t1 $ t2) =
- if exists (fn f => Term.could_unify (t |> strip_comb |> fst, f)) fs then insert (op aconv) t
- else add_fterms t1 #> add_fterms t2
- | add_fterms (t as Abs(xn,xT,t')) =
- if exists_Const (fn (c, _) => c = fN) t then (fn _ => [t]) else (fn _ => [])
- | add_fterms _ = I
- val fterms = add_fterms rhs []
- val (xs, ctxt'') = Variable.variant_fixes (replicate (length fterms) "x") ctxt'
- val tys = map fastype_of fterms
- val vs = map Free (xs ~~ tys)
- val env = fterms ~~ vs
- (* FIXME!!!!*)
- fun replace_fterms (t as t1 $ t2) =
- (case AList.lookup (op aconv) env t of
- SOME v => v
- | NONE => replace_fterms t1 $ replace_fterms t2)
- | replace_fterms t = (case AList.lookup (op aconv) env t of
- SOME v => v
- | NONE => t)
-
- fun mk_def (Abs(x,xT,t),v) = HOLogic.mk_Trueprop ((HOLogic.all_const xT)$ Abs(x,xT,HOLogic.mk_eq(v$(Bound 0), t)))
- | mk_def (t, v) = HOLogic.mk_Trueprop (HOLogic.mk_eq (v, t))
- fun tryext x = (x RS ext2 handle THM _ => x)
- val cong = (Goal.prove ctxt'' [] (map mk_def env)
- (HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, replace_fterms rhs)))
- (fn x => LocalDefs.unfold_tac (#context x) (map tryext (#prems x))
- THEN rtac th' 1)) RS sym
-
- val (cong' :: vars') =
- Variable.export ctxt'' ctxt (cong :: map (Drule.mk_term o cert) vs)
- val vs' = map (fst o fst o Term.dest_Var o Thm.term_of o Drule.dest_term) vars'
-
- in (vs', cong') end;
- (* congs is a list of pairs (P,th) where th is a theorem for *)
- (* [| f p1 = A1; ...; f pn = An|] ==> f (C p1 .. pn) = P *)
-val FWD = curry (op OF);
-
- (* da is the decomposition for atoms, ie. it returns ([],g) where g
- returns the right instance f (AtC n) = t , where AtC is the Atoms
- constructor and n is the number of the atom corresponding to t *)
-
-(* Generic decomp for reification : matches the actual term with the
-rhs of one cong rule. The result of the matching guides the
-proof synthesis: The matches of the introduced Variables A1 .. An are
-processed recursively
- The rest is instantiated in the cong rule,i.e. no reification is needed *)
-
-exception REIF of string;
-
-val bds = ref ([]: (typ * ((term list) * (term list))) list);
-
-fun index_of t =
- let
- val tt = HOLogic.listT (fastype_of t)
- in
- (case AList.lookup Type.could_unify (!bds) tt of
- NONE => error "index_of : type not found in environements!"
- | SOME (tbs,tats) =>
- let
- val i = find_index_eq t tats
- val j = find_index_eq t tbs
- in (if j= ~1 then
- if i= ~1
- then (bds := AList.update Type.could_unify (tt,(tbs,tats@[t])) (!bds) ;
- length tbs + length tats)
- else i else j)
- end)
- end;
-
-fun dest_listT (Type ("List.list", [T])) = T;
-
-fun decomp_genreif da cgns (t,ctxt) =
- let
- val thy = ProofContext.theory_of ctxt
- val cert = cterm_of thy
- fun tryabsdecomp (s,ctxt) =
- (case s of
- Abs(xn,xT,ta) =>
- (let
- val ([xn],ctxt') = Variable.variant_fixes ["x"] ctxt
- val (xn,ta) = variant_abs (xn,xT,ta)
- val x = Free(xn,xT)
- val _ = (case AList.lookup Type.could_unify (!bds) (HOLogic.listT xT)
- of NONE => error "tryabsdecomp: Type not found in the Environement"
- | SOME (bsT,atsT) =>
- (bds := AList.update Type.could_unify (HOLogic.listT xT, ((x::bsT), atsT)) (!bds)))
- in ([(ta, ctxt')] ,
- fn [th] => ((let val (bsT,asT) = the(AList.lookup Type.could_unify (!bds) (HOLogic.listT xT))
- in (bds := AList.update Type.could_unify (HOLogic.listT xT,(tl bsT,asT)) (!bds))
- end) ;
- hd (Variable.export ctxt' ctxt [(forall_intr (cert x) th) COMP allI])))
- end)
- | _ => da (s,ctxt))
- in
- (case cgns of
- [] => tryabsdecomp (t,ctxt)
- | ((vns,cong)::congs) => ((let
- val cert = cterm_of thy
- val certy = ctyp_of thy
- val (tyenv, tmenv) =
- Pattern.match thy
- ((fst o HOLogic.dest_eq o HOLogic.dest_Trueprop) (concl_of cong), t)
- (Envir.type_env (Envir.empty 0), Vartab.empty)
- val (fnvs,invs) = List.partition (fn ((vn,_),_) => vn mem vns) (Vartab.dest tmenv)
- val (fts,its) =
- (map (snd o snd) fnvs,
- map (fn ((vn,vi),(tT,t)) => (cert(Var ((vn,vi),tT)), cert t)) invs)
- val ctyenv = map (fn ((vn,vi),(s,ty)) => (certy (TVar((vn,vi),s)), certy ty)) (Vartab.dest tyenv)
- in (fts ~~ (replicate (length fts) ctxt), FWD (instantiate (ctyenv, its) cong))
- end)
- handle MATCH => decomp_genreif da congs (t,ctxt)))
- end;
-
- (* looks for the atoms equation and instantiates it with the right number *)
-
-
-fun mk_decompatom eqs (t,ctxt) =
-let
- val tT = fastype_of t
- fun isat eq =
- let
- val rhs = eq |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> snd
- in exists_Const
- (fn (n,ty) => n="List.nth"
- andalso
- AList.defined Type.could_unify (!bds) (domain_type ty)) rhs
- andalso Type.could_unify (fastype_of rhs, tT)
- end
- fun get_nths t acc =
- case t of
- Const("List.nth",_)$vs$n => insert (fn ((a,_),(b,_)) => a aconv b) (t,(vs,n)) acc
- | t1$t2 => get_nths t1 (get_nths t2 acc)
- | Abs(_,_,t') => get_nths t' acc
- | _ => acc
-
- fun
- tryeqs [] = error "Can not find the atoms equation"
- | tryeqs (eq::eqs) = ((
- let
- val rhs = eq |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> snd
- val nths = get_nths rhs []
- val (vss,ns) = fold_rev (fn (_,(vs,n)) => fn (vss,ns) =>
- (insert (op aconv) vs vss, insert (op aconv) n ns)) nths ([],[])
- val (vsns, ctxt') = Variable.variant_fixes (replicate (length vss) "vs") ctxt
- val (xns, ctxt'') = Variable.variant_fixes (replicate (length nths) "x") ctxt'
- val thy = ProofContext.theory_of ctxt''
- val cert = cterm_of thy
- val certT = ctyp_of thy
- val vsns_map = vss ~~ vsns
- val xns_map = (fst (split_list nths)) ~~ xns
- val subst = map (fn (nt, xn) => (nt, Var ((xn,0), fastype_of nt))) xns_map
- val rhs_P = subst_free subst rhs
- val (tyenv, tmenv) = Pattern.match
- thy (rhs_P, t)
- (Envir.type_env (Envir.empty 0), Vartab.empty)
- val sbst = Envir.subst_vars (tyenv, tmenv)
- val sbsT = Envir.typ_subst_TVars tyenv
- val subst_ty = map (fn (n,(s,t)) => (certT (TVar (n, s)), certT t))
- (Vartab.dest tyenv)
- val tml = Vartab.dest tmenv
- val t's = map (fn xn => snd (valOf (AList.lookup (op =) tml (xn,0)))) xns (* FIXME : Express with sbst*)
- val subst_ns = map (fn (Const _ $ vs $ n, Var (xn0,T)) =>
- (cert n, snd (valOf (AList.lookup (op =) tml xn0))
- |> (index_of #> HOLogic.mk_nat #> cert)))
- subst
- val subst_vs =
- let
- fun ty (Const _ $ (vs as Var (vsn,lT)) $ n, Var (xn0,T)) = (certT T, certT (sbsT T))
- fun h (Const _ $ (vs as Var (vsn,lT)) $ n, Var (xn0,T)) =
- let
- val cns = sbst (Const("List.list.Cons", T --> lT --> lT))
- val lT' = sbsT lT
- val (bsT,asT) = the (AList.lookup Type.could_unify (!bds) lT)
- val vsn = valOf (AList.lookup (op =) vsns_map vs)
- val cvs = cert (fold_rev (fn x => fn xs => cns$x$xs) bsT (Free (vsn, lT')))
- in (cert vs, cvs) end
- in map h subst end
- val cts = map (fn ((vn,vi),(tT,t)) => (cert(Var ((vn,vi),tT)), cert t))
- (fold (AList.delete (fn (((a: string),_),(b,_)) => a = b))
- (map (fn n => (n,0)) xns) tml)
- val substt =
- let val ih = Drule.cterm_rule (Thm.instantiate (subst_ty,[]))
- in map (fn (v,t) => (ih v, ih t)) (subst_ns@subst_vs@cts) end
- val th = (instantiate (subst_ty, substt) eq) RS sym
- in hd (Variable.export ctxt'' ctxt [th]) end)
- handle MATCH => tryeqs eqs)
-in ([], fn _ => tryeqs (filter isat eqs))
-end;
-
- (* Generic reification procedure: *)
- (* creates all needed cong rules and then just uses the theorem synthesis *)
-
- fun mk_congs ctxt raw_eqs =
- let
- val fs = fold_rev (fn eq =>
- insert (op =) (eq |> prop_of |> HOLogic.dest_Trueprop
- |> HOLogic.dest_eq |> fst |> strip_comb
- |> fst)) raw_eqs []
- val tys = fold_rev (fn f => fn ts => (f |> fastype_of |> binder_types |> tl)
- union ts) fs []
- val _ = bds := AList.make (fn _ => ([],[])) tys
- val (vs, ctxt') = Variable.variant_fixes (replicate (length tys) "vs") ctxt
- val thy = ProofContext.theory_of ctxt'
- val cert = cterm_of thy
- val vstys = map (fn (t,v) => (t,SOME (cert (Free(v,t)))))
- (tys ~~ vs)
- val is_Var = can dest_Var
- fun insteq eq vs =
- let
- val subst = map (fn (v as Var(n,t)) => (cert v, (valOf o valOf) (AList.lookup (op =) vstys t)))
- (filter is_Var vs)
- in Thm.instantiate ([],subst) eq
- end
- val eqs = map (fn eq => eq |> prop_of |> HOLogic.dest_Trueprop
- |> HOLogic.dest_eq |> fst |> strip_comb |> snd |> tl
- |> (insteq eq)) raw_eqs
- val (ps,congs) = split_list (map (mk_congeq ctxt' fs) eqs)
-in ps ~~ (Variable.export ctxt' ctxt congs)
-end
-
-fun partition P [] = ([],[])
- | partition P (x::xs) =
- let val (yes,no) = partition P xs
- in if P x then (x::yes,no) else (yes, x::no) end
-
-fun rearrange congs =
-let
- fun P (_, th) =
- let val @{term "Trueprop"}$(Const ("op =",_) $l$_) = concl_of th
- in can dest_Var l end
- val (yes,no) = partition P congs
- in no @ yes end
-
-
-
-fun genreif ctxt raw_eqs t =
- let
- val congs = rearrange (mk_congs ctxt raw_eqs)
- val th = divide_and_conquer (decomp_genreif (mk_decompatom raw_eqs) congs) (t,ctxt)
- fun is_listVar (Var (_,t)) = can dest_listT t
- | is_listVar _ = false
- val vars = th |> prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> snd
- |> strip_comb |> snd |> filter is_listVar
- val cert = cterm_of (ProofContext.theory_of ctxt)
- val cvs = map (fn (v as Var(n,t)) => (cert v, the (AList.lookup Type.could_unify (!bds) t) |> snd |> HOLogic.mk_list (dest_listT t) |> cert)) vars
- val th' = instantiate ([], cvs) th
- val t' = (fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o prop_of) th'
- val th'' = Goal.prove ctxt [] [] (HOLogic.mk_Trueprop (HOLogic.mk_eq (t, t')))
- (fn _ => simp_tac (local_simpset_of ctxt) 1)
- val _ = bds := []
-in FWD trans [th'',th']
-end
-
-
-fun genreflect ctxt conv corr_thms raw_eqs t =
-let
- val reifth = genreif ctxt raw_eqs t
- fun trytrans [] = error "No suitable correctness theorem found"
- | trytrans (th::ths) =
- (FWD trans [reifth, th RS sym] handle THM _ => trytrans ths)
- val th = trytrans corr_thms
- val ft = (Thm.dest_arg1 o Thm.dest_arg o Thm.dest_arg o cprop_of) th
- val rth = conv ft
-in simplify (HOL_basic_ss addsimps raw_eqs addsimps [nth_Cons_0, nth_Cons_Suc])
- (simplify (HOL_basic_ss addsimps [rth]) th)
-end
-
-fun genreify_tac ctxt eqs to i = (fn st =>
- let
- val P = HOLogic.dest_Trueprop (List.nth (prems_of st, i - 1))
- val t = (case to of NONE => P | SOME x => x)
- val th = (genreif ctxt eqs t) RS ssubst
- in rtac th i st
- end);
-
- (* Reflection calls reification and uses the correctness *)
- (* theorem assumed to be the dead of the list *)
-fun gen_reflection_tac ctxt conv corr_thms raw_eqs to i = (fn st =>
- let
- val P = HOLogic.dest_Trueprop (nth (prems_of st) (i - 1));
- val t = the_default P to;
- val th = genreflect ctxt conv corr_thms raw_eqs t
- RS ssubst;
- in (rtac th i THEN TRY(rtac TrueI i)) st end);
-
-fun reflection_tac ctxt = gen_reflection_tac ctxt Codegen.evaluation_conv;
-end
--- a/src/HOL/ex/reflection_data.ML Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-(* Title: HOL/ex/reflection_data.ML
- ID: $Id$
- Author: Amine Chaieb, TU Muenchen
-
-Data for the reification and reflection methods.
-*)
-
-signature REIFY_DATA =
-sig
- type entry = thm list * thm list
- val get: Proof.context -> entry
- val del: attribute
- val add: attribute
- val setup: theory -> theory
-end;
-
-structure Reify_Data : REIFY_DATA =
-struct
-
-type entry = thm list * thm list;
-
-structure Data = GenericDataFun
-(
- type T = entry
- val empty = ([], [])
- val extend = I
- fun merge _ = pairself Thm.merge_thms
-);
-
-val get = Data.get o Context.Proof;
-
-val add = Thm.declaration_attribute (fn th => fn context =>
- context |> Data.map (apfst (Thm.add_thm th)))
-
-val del = Thm.declaration_attribute (fn th => fn context =>
- context |> Data.map (apfst (Thm.del_thm th)))
-
-val radd = Thm.declaration_attribute (fn th => fn context =>
- context |> Data.map (apsnd (Thm.add_thm th)))
-
-val rdel = Thm.declaration_attribute (fn th => fn context =>
- context |> Data.map (apsnd (Thm.del_thm th)))
-
-(* concrete syntax *)
-(*
-local
-fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
-
-val constsN = "consts";
-val addN = "add";
-val delN = "del";
-val any_keyword = keyword constsN || keyword addN || keyword delN;
-val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
-val terms = thms >> map (term_of o Drule.dest_term);
-
-fun optional scan = Scan.optional scan [];
-
-in
-val att_syntax = Attrib.syntax
- ((Scan.lift (Args.$$$ "del") |-- thms) >> del ||
- optional (keyword addN |-- thms) >> add)
-end;
-*)
-
-(* theory setup *)
- val setup =
- Attrib.add_attributes [("reify", Attrib.add_del_args add del, "Reify data"),
- ("reflection", Attrib.add_del_args radd rdel, "Reflection data")];
-end;
--- a/src/HOLCF/Bifinite.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Bifinite.thy Wed Jan 28 16:57:12 2009 +0100
@@ -10,7 +10,7 @@
subsection {* Omega-profinite and bifinite domains *}
-class profinite = cpo +
+class profinite =
fixes approx :: "nat \<Rightarrow> 'a \<rightarrow> 'a"
assumes chain_approx [simp]: "chain approx"
assumes lub_approx_app [simp]: "(\<Squnion>i. approx i\<cdot>x) = x"
--- a/src/HOLCF/Pcpo.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Pcpo.thy Wed Jan 28 16:57:12 2009 +0100
@@ -12,9 +12,9 @@
text {* The class cpo of chain complete partial orders *}
-axclass cpo < po
+class cpo = po +
-- {* class axiom: *}
- cpo: "chain S \<Longrightarrow> \<exists>x. range S <<| x"
+ assumes cpo: "chain S \<Longrightarrow> \<exists>x :: 'a::po. range S <<| x"
text {* in cpo's everthing equal to THE lub has lub properties for every chain *}
@@ -170,8 +170,8 @@
text {* The class pcpo of pointed cpos *}
-axclass pcpo < cpo
- least: "\<exists>x. \<forall>y. x \<sqsubseteq> y"
+class pcpo = cpo +
+ assumes least: "\<exists>x. \<forall>y. x \<sqsubseteq> y"
definition
UU :: "'a::pcpo" where
@@ -254,13 +254,13 @@
text {* further useful classes for HOLCF domains *}
-axclass finite_po < finite, po
+class finite_po = finite + po
-axclass chfin < po
- chfin: "chain Y \<Longrightarrow> \<exists>n. max_in_chain n Y"
+class chfin = po +
+ assumes chfin: "chain Y \<Longrightarrow> \<exists>n. max_in_chain n (Y :: nat => 'a::po)"
-axclass flat < pcpo
- ax_flat: "x \<sqsubseteq> y \<Longrightarrow> (x = \<bottom>) \<or> (x = y)"
+class flat = pcpo +
+ assumes ax_flat: "(x :: 'a::pcpo) \<sqsubseteq> y \<Longrightarrow> x = \<bottom> \<or> x = y"
text {* finite partial orders are chain-finite *}
@@ -310,11 +310,11 @@
text {* Discrete cpos *}
-axclass discrete_cpo < sq_ord
- discrete_cpo [simp]: "x \<sqsubseteq> y \<longleftrightarrow> x = y"
+class discrete_cpo = sq_ord +
+ assumes discrete_cpo [simp]: "x \<sqsubseteq> y \<longleftrightarrow> x = y"
-instance discrete_cpo < po
-by (intro_classes, simp_all)
+subclass (in discrete_cpo) po
+proof qed simp_all
text {* In a discrete cpo, every chain is constant *}
--- a/src/HOLCF/Porder.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Porder.thy Wed Jan 28 16:57:12 2009 +0100
@@ -10,7 +10,7 @@
subsection {* Type class for partial orders *}
-class sq_ord = type +
+class sq_ord =
fixes sq_le :: "'a \<Rightarrow> 'a \<Rightarrow> bool"
notation
--- a/src/HOLCF/Tools/cont_proc.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Tools/cont_proc.ML Wed Jan 28 16:57:12 2009 +0100
@@ -7,7 +7,7 @@
val is_lcf_term: term -> bool
val cont_thms: term -> thm list
val all_cont_thms: term -> thm list
- val cont_tac: thm list ref -> int -> tactic
+ val cont_tac: int -> tactic
val cont_proc: theory -> simproc
val setup: theory -> theory
end;
@@ -15,15 +15,6 @@
structure ContProc: CONT_PROC =
struct
-structure ContProcData = TheoryDataFun
-(
- type T = thm list ref;
- val empty = ref [];
- val copy = I;
- val extend = I;
- fun merge _ _ = ref [];
-)
-
(** theory context references **)
val cont_K = @{thm cont_const};
@@ -107,26 +98,21 @@
conditional rewrite rule with the unsolved subgoals as premises.
*)
-fun cont_tac prev_cont_thms =
+val cont_tac =
let
val rules = [cont_K, cont_I, cont_R, cont_A, cont_L];
- fun old_cont_tac i thm =
- case !prev_cont_thms of
- [] => no_tac thm
- | (c::cs) => (prev_cont_thms := cs; rtac c i thm);
-
- fun new_cont_tac f' i thm =
+ fun new_cont_tac f' i =
case all_cont_thms f' of
- [] => no_tac thm
- | (c::cs) => (prev_cont_thms := cs; rtac c i thm);
+ [] => no_tac
+ | (c::cs) => rtac c i;
fun cont_tac_of_term (Const (@{const_name cont}, _) $ f) =
let
val f' = Const (@{const_name Abs_CFun}, dummyT) $ f;
in
if is_lcf_term f'
- then old_cont_tac ORELSE' new_cont_tac f'
+ then new_cont_tac f'
else REPEAT_ALL_NEW (resolve_tac rules)
end
| cont_tac_of_term _ = K no_tac;
@@ -139,8 +125,7 @@
fun solve_cont thy _ t =
let
val tr = instantiate' [] [SOME (cterm_of thy t)] Eq_TrueI;
- val prev_thms = ContProcData.get thy
- in Option.map fst (Seq.pull (cont_tac prev_thms 1 tr)) end
+ in Option.map fst (Seq.pull (cont_tac 1 tr)) end
in
fun cont_proc thy =
Simplifier.simproc thy "cont_proc" ["cont f"] solve_cont;
--- a/src/HOLCF/Tools/domain/domain_axioms.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Tools/domain/domain_axioms.ML Wed Jan 28 16:57:12 2009 +0100
@@ -111,10 +111,10 @@
fun infer_props thy = map (apsnd (FixrecPackage.legacy_infer_prop thy));
-fun add_axioms_i x = snd o PureThy.add_axioms (map Thm.no_attributes x);
+fun add_axioms_i x = snd o PureThy.add_axioms (map (Thm.no_attributes o apfst Binding.name) x);
fun add_axioms_infer axms thy = add_axioms_i (infer_props thy axms) thy;
-fun add_defs_i x = snd o (PureThy.add_defs false) (map Thm.no_attributes x);
+fun add_defs_i x = snd o (PureThy.add_defs false) (map (Thm.no_attributes o apfst Binding.name) x);
fun add_defs_infer defs thy = add_defs_i (infer_props thy defs) thy;
in (* local *)
--- a/src/HOLCF/Tools/domain/domain_extender.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Tools/domain/domain_extender.ML Wed Jan 28 16:57:12 2009 +0100
@@ -134,7 +134,7 @@
in
theorems_thy
|> Sign.add_path (Sign.base_name comp_dnam)
- |> (snd o (PureThy.add_thmss [(("rews", List.concat rewss @ take_rews), [])]))
+ |> (snd o (PureThy.add_thmss [((Binding.name "rews", List.concat rewss @ take_rews), [])]))
|> Sign.parent_path
end;
--- a/src/HOLCF/Tools/domain/domain_theorems.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Tools/domain/domain_theorems.ML Wed Jan 28 16:57:12 2009 +0100
@@ -607,7 +607,7 @@
in
thy
|> Sign.add_path (Sign.base_name dname)
- |> (snd o (PureThy.add_thmss (map Thm.no_attributes [
+ |> (snd o (PureThy.add_thmss (map (Thm.no_attributes o apfst Binding.name) [
("iso_rews" , iso_rews ),
("exhaust" , [exhaust] ),
("casedist" , [casedist]),
@@ -623,7 +623,7 @@
("injects" , injects ),
("copy_rews", copy_rews)])))
|> (snd o PureThy.add_thmss
- [(("match_rews", mat_rews), [Simplifier.simp_add])])
+ [((Binding.name "match_rews", mat_rews), [Simplifier.simp_add])])
|> Sign.parent_path
|> pair (iso_rews @ when_rews @ con_rews @ sel_rews @ dis_rews @
pat_rews @ dist_les @ dist_eqs @ copy_rews)
@@ -1000,7 +1000,7 @@
end; (* local *)
in thy |> Sign.add_path comp_dnam
- |> (snd o (PureThy.add_thmss (map Thm.no_attributes [
+ |> (snd o (PureThy.add_thmss (map (Thm.no_attributes o apfst Binding.name) [
("take_rews" , take_rews ),
("take_lemmas", take_lemmas),
("finites" , finites ),
--- a/src/HOLCF/Tools/fixrec_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Tools/fixrec_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -9,9 +9,9 @@
val legacy_infer_term: theory -> term -> term
val legacy_infer_prop: theory -> term -> term
val add_fixrec: bool -> (Attrib.binding * string) list list -> theory -> theory
- val add_fixrec_i: bool -> ((Binding.T * attribute list) * term) list list -> theory -> theory
+ val add_fixrec_i: bool -> ((binding * attribute list) * term) list list -> theory -> theory
val add_fixpat: Attrib.binding * string list -> theory -> theory
- val add_fixpat_i: (Binding.T * attribute list) * term list -> theory -> theory
+ val add_fixpat_i: (binding * attribute list) * term list -> theory -> theory
end;
structure FixrecPackage: FIXREC_PACKAGE =
@@ -96,7 +96,7 @@
val fixdefs = map (apsnd (legacy_infer_prop thy)) pre_fixdefs;
val (fixdef_thms, thy') =
- PureThy.add_defs false (map Thm.no_attributes fixdefs) thy;
+ PureThy.add_defs false (map (Thm.no_attributes o apfst Binding.name) fixdefs) thy;
val ctuple_fixdef_thm = foldr1 (fn (x,y) => @{thm cpair_equalI} OF [x,y]) fixdef_thms;
val ctuple_unfold = legacy_infer_term thy' (mk_trp (mk_ctuple lhss === mk_ctuple rhss));
@@ -114,7 +114,7 @@
in (n^"_unfold", thmL) :: unfolds ns thmR end;
val unfold_thms = unfolds names ctuple_unfold_thm;
val thms = ctuple_induct_thm :: unfold_thms;
- val (_, thy'') = PureThy.add_thms (map Thm.no_attributes thms) thy';
+ val (_, thy'') = PureThy.add_thms (map (Thm.no_attributes o apfst Binding.name) thms) thy';
in
(thy'', names, fixdef_thms, map snd unfold_thms)
end;
@@ -241,14 +241,14 @@
in
if strict then let (* only prove simp rules if strict = true *)
val eqn_blocks = unconcat lengths ((names ~~ eqn_ts') ~~ atts);
- val simps = List.concat (map (make_simps thy') (unfold_thms ~~ eqn_blocks));
- val (simp_thms, thy'') = PureThy.add_thms simps thy';
+ val simps = maps (make_simps thy') (unfold_thms ~~ eqn_blocks);
+ val (simp_thms, thy'') = PureThy.add_thms ((map o apfst o apfst) Binding.name simps) thy';
val simp_names = map (fn name => name^"_simps") cnames;
val simp_attribute = rpair [Simplifier.simp_add];
val simps' = map simp_attribute (simp_names ~~ unconcat lengths simp_thms);
in
- (snd o PureThy.add_thmss simps') thy''
+ (snd o PureThy.add_thmss ((map o apfst o apfst) Binding.name simps')) thy''
end
else thy'
end;
@@ -278,7 +278,7 @@
val ts = map (prep_term thy) strings;
val simps = map (fix_pat thy) ts;
in
- (snd o PureThy.add_thmss [((Binding.base_name name, simps), atts)]) thy
+ (snd o PureThy.add_thmss [((name, simps), atts)]) thy
end;
val add_fixpat = gen_add_fixpat Syntax.read_term_global Attrib.attribute;
--- a/src/HOLCF/Tools/pcpodef_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/HOLCF/Tools/pcpodef_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -97,12 +97,12 @@
theory'
|> Sign.add_path name
|> PureThy.add_thms
- ([(("adm_" ^ name, admissible'), []),
- (("cont_" ^ Rep_name, @{thm typedef_cont_Rep} OF cpo_thms'), []),
- (("cont_" ^ Abs_name, @{thm typedef_cont_Abs} OF cpo_thms'), []),
- (("lub_" ^ name, @{thm typedef_lub} OF cpo_thms'), []),
- (("thelub_" ^ name, @{thm typedef_thelub} OF cpo_thms'), []),
- (("compact_" ^ name, @{thm typedef_compact} OF cpo_thms'), [])])
+ ([((Binding.name ("adm_" ^ name), admissible'), []),
+ ((Binding.name ("cont_" ^ Rep_name), @{thm typedef_cont_Rep} OF cpo_thms'), []),
+ ((Binding.name ("cont_" ^ Abs_name), @{thm typedef_cont_Abs} OF cpo_thms'), []),
+ ((Binding.name ("lub_" ^ name), @{thm typedef_lub} OF cpo_thms'), []),
+ ((Binding.name ("thelub_" ^ name), @{thm typedef_thelub} OF cpo_thms'), []),
+ ((Binding.name ("compact_" ^ name), @{thm typedef_compact} OF cpo_thms'), [])])
|> snd
|> Sign.parent_path
end;
@@ -119,12 +119,12 @@
theory'
|> Sign.add_path name
|> PureThy.add_thms
- ([((Rep_name ^ "_strict", @{thm typedef_Rep_strict} OF pcpo_thms'), []),
- ((Abs_name ^ "_strict", @{thm typedef_Abs_strict} OF pcpo_thms'), []),
- ((Rep_name ^ "_strict_iff", @{thm typedef_Rep_strict_iff} OF pcpo_thms'), []),
- ((Abs_name ^ "_strict_iff", @{thm typedef_Abs_strict_iff} OF pcpo_thms'), []),
- ((Rep_name ^ "_defined", @{thm typedef_Rep_defined} OF pcpo_thms'), []),
- ((Abs_name ^ "_defined", @{thm typedef_Abs_defined} OF pcpo_thms'), [])
+ ([((Binding.name (Rep_name ^ "_strict"), @{thm typedef_Rep_strict} OF pcpo_thms'), []),
+ ((Binding.name (Abs_name ^ "_strict"), @{thm typedef_Abs_strict} OF pcpo_thms'), []),
+ ((Binding.name (Rep_name ^ "_strict_iff"), @{thm typedef_Rep_strict_iff} OF pcpo_thms'), []),
+ ((Binding.name (Abs_name ^ "_strict_iff"), @{thm typedef_Abs_strict_iff} OF pcpo_thms'), []),
+ ((Binding.name (Rep_name ^ "_defined"), @{thm typedef_Rep_defined} OF pcpo_thms'), []),
+ ((Binding.name (Abs_name ^ "_defined"), @{thm typedef_Abs_defined} OF pcpo_thms'), [])
])
|> snd
|> Sign.parent_path
--- a/src/Pure/Concurrent/future.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Concurrent/future.ML Wed Jan 28 16:57:12 2009 +0100
@@ -270,8 +270,24 @@
(* join *)
+local
+
fun get_result x = the_default (Exn.Exn (SYS_ERROR "unfinished future")) (peek x);
+fun join_next pending = (*requires SYNCHRONIZED*)
+ if forall is_finished pending then NONE
+ else
+ (case change_result queue Task_Queue.dequeue of
+ NONE => (worker_wait (); join_next pending)
+ | some => some);
+
+fun join_loop name pending =
+ (case SYNCHRONIZED name (fn () => join_next pending) of
+ NONE => ()
+ | SOME work => (execute name work; join_loop name pending));
+
+in
+
fun join_results xs =
if forall is_finished xs then map get_result xs
else uninterruptible (fn _ => fn () =>
@@ -280,12 +296,13 @@
val _ = Multithreading.self_critical () andalso
error "Cannot join future values within critical section";
- fun join_loop _ [] = ()
- | join_loop name deps =
+ fun join_deps _ [] = ()
+ | join_deps name deps =
(case SYNCHRONIZED name (fn () =>
change_result queue (Task_Queue.dequeue_towards deps)) of
NONE => ()
- | SOME (work, deps') => (execute name work; join_loop name deps'));
+ | SOME (work, deps') => (execute name work; join_deps name deps'));
+
val _ =
(case thread_data () of
NONE =>
@@ -299,14 +316,14 @@
val deps = map task_of pending;
val _ = SYNCHRONIZED "join" (fn () =>
(change queue (Task_Queue.depend deps task); notify_all ()));
- val _ = join_loop ("join_loop: " ^ name) deps;
- val _ =
- while not (forall is_finished pending)
- do SYNCHRONIZED "join_task" (fn () => worker_wait ());
+ val _ = join_deps ("join_deps: " ^ name) deps;
+ val _ = join_loop ("join_loop: " ^ name) (filter_out is_finished pending);
in () end);
in map get_result xs end) ();
+end;
+
fun join_result x = singleton join_results x;
fun join x = Exn.release (join_result x);
--- a/src/Pure/Concurrent/mailbox.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Concurrent/mailbox.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Concurrent/mailbox.ML
- ID: $Id$
Author: Makarius
Message exchange via mailbox, with non-blocking send (due to unbounded
--- a/src/Pure/Concurrent/simple_thread.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Concurrent/simple_thread.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Concurrent/simple_thread.ML
- ID: $Id$
Author: Makarius
Simplified thread operations.
--- a/src/Pure/Concurrent/synchronized.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Concurrent/synchronized.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Concurrent/synchronized.ML
- ID: $Id$
Author: Fabian Immler and Makarius
State variables with synchronized access.
--- a/src/Pure/General/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/ROOT.ML
- ID: $Id$
Library of general tools.
*)
--- a/src/Pure/General/alist.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/alist.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/alist.ML
- ID: $Id$
Author: Florian Haftmann, TU Muenchen
Association lists -- lists of (key, value) pairs.
--- a/src/Pure/General/balanced_tree.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/balanced_tree.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/balanced_tree.ML
- ID: $Id$
Author: Lawrence C Paulson and Makarius
Balanced binary trees.
--- a/src/Pure/General/basics.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/basics.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/basics.ML
- ID: $Id$
Author: Florian Haftmann and Makarius, TU Muenchen
Fundamental concepts.
--- a/src/Pure/General/binding.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/binding.ML Wed Jan 28 16:57:12 2009 +0100
@@ -6,6 +6,7 @@
signature BASIC_BINDING =
sig
+ type binding
val long_names: bool ref
val short_names: bool ref
val unique_names: bool ref
@@ -14,22 +15,21 @@
signature BINDING =
sig
include BASIC_BINDING
- type T
- val name_pos: string * Position.T -> T
- val name: string -> T
- val empty: T
- val map_base: (string -> string) -> T -> T
- val qualify: string -> T -> T
- val add_prefix: bool -> string -> T -> T
- val map_prefix: ((string * bool) list -> T -> T) -> T -> T
- val is_empty: T -> bool
- val base_name: T -> string
- val pos_of: T -> Position.T
- val dest: T -> (string * bool) list * string
+ val name_pos: string * Position.T -> binding
+ val name: string -> binding
+ val empty: binding
+ val map_base: (string -> string) -> binding -> binding
+ val qualify: string -> binding -> binding
+ val add_prefix: bool -> string -> binding -> binding
+ val map_prefix: ((string * bool) list -> binding -> binding) -> binding -> binding
+ val is_empty: binding -> bool
+ val base_name: binding -> string
+ val pos_of: binding -> Position.T
+ val dest: binding -> (string * bool) list * string
val separator: string
val is_qualified: string -> bool
- val display: T -> string
-end
+ val display: binding -> string
+end;
structure Binding : BINDING =
struct
@@ -54,7 +54,7 @@
(** binding representation **)
-datatype T = Binding of ((string * bool) list * string) * Position.T;
+datatype binding = Binding of ((string * bool) list * string) * Position.T;
(* (prefix components (with mandatory flag), base name, position) *)
fun name_pos (name, pos) = Binding (([], name), pos);
--- a/src/Pure/General/file.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/file.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/file.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
File system operations.
--- a/src/Pure/General/graph.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/graph.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/graph.ML
- ID: $Id$
Author: Markus Wenzel and Stefan Berghofer, TU Muenchen
Directed graphs.
--- a/src/Pure/General/heap.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/heap.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,6 +1,5 @@
(* Title: Pure/General/heap.ML
- ID: $Id$
- Author: Markus Wenzel, TU Muenchen
+ Author: Lawrence C Paulson and Markus Wenzel
Heaps over linearly ordered types. See also Chris Okasaki: "Purely
Functional Data Structures" (Chapter 3), Cambridge University Press,
--- a/src/Pure/General/integer.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/integer.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/integer.ML
- ID: $Id$
Author: Florian Haftmann, TU Muenchen
Unbounded integers.
--- a/src/Pure/General/name_space.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/name_space.ML Wed Jan 28 16:57:12 2009 +0100
@@ -3,9 +3,10 @@
Generic name spaces with declared and hidden entries. Unknown names
are considered global; no support for absolute addressing.
+Cf. Pure/General/binding.ML
*)
-type bstring = string; (*names to be bound*)
+type bstring = string; (*simple names to be bound -- legacy*)
type xstring = string; (*external names*)
signature NAME_SPACE =
@@ -31,8 +32,8 @@
val merge: T * T -> T
type naming
val default_naming: naming
- val declare: naming -> Binding.T -> T -> string * T
- val full_name: naming -> Binding.T -> string
+ val declare: naming -> binding -> T -> string * T
+ val full_name: naming -> binding -> string
val external_names: naming -> string -> string list
val path_of: naming -> string
val add_path: string -> naming -> naming
@@ -41,7 +42,7 @@
val sticky_prefix: string -> naming -> naming
type 'a table = T * 'a Symtab.table
val empty_table: 'a table
- val bind: naming -> Binding.T * 'a
+ val bind: naming -> binding * 'a
-> 'a table -> string * 'a table (*exception Symtab.DUP*)
val merge_tables: ('a * 'a -> bool) -> 'a table * 'a table -> 'a table
val join_tables: (string -> 'a * 'a -> 'a)
--- a/src/Pure/General/ord_list.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/ord_list.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/ord_list.ML
- ID: $Id$
Author: Makarius
Ordered lists without duplicates -- a light-weight representation of
--- a/src/Pure/General/output.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/output.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/output.ML
- ID: $Id$
Author: Makarius, Hagia Maria Sion Abbey (Jerusalem)
Output channels and timing messages.
--- a/src/Pure/General/path.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/path.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/path.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Abstract algebra of file paths (external encoding in Unix style).
--- a/src/Pure/General/pretty.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/pretty.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/pretty.ML
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Author: Markus Wenzel, TU Munich
--- a/src/Pure/General/print_mode.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/print_mode.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/print_mode.ML
- ID: $Id$
Author: Makarius
Generic print mode as thread-local value derived from global template;
--- a/src/Pure/General/properties.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/properties.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/properties.ML
- ID: $Id$
Author: Makarius
Property lists.
--- a/src/Pure/General/queue.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/queue.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/queue.ML
- ID: $Id$
Author: Makarius
Efficient queues.
--- a/src/Pure/General/scan.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/scan.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/scan.ML
- ID: $Id$
Author: Markus Wenzel and Tobias Nipkow, TU Muenchen
Generic scanners (for potentially infinite input).
--- a/src/Pure/General/secure.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/secure.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/secure.ML
- ID: $Id$
Author: Makarius
Secure critical operations.
--- a/src/Pure/General/seq.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/seq.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/seq.ML
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Author: Markus Wenzel, TU Munich
--- a/src/Pure/General/source.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/source.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/source.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Coalgebraic data sources -- efficient purely functional input streams.
--- a/src/Pure/General/stack.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/stack.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/stack.ML
- ID: $Id$
Author: Makarius
Non-empty stacks.
--- a/src/Pure/General/swing.scala Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/swing.scala Wed Jan 28 16:57:12 2009 +0100
@@ -10,9 +10,13 @@
object Swing
{
- def now(body: => Unit) =
- SwingUtilities.invokeAndWait(new Runnable { def run = body })
+ def now(body: => Unit) {
+ if (SwingUtilities.isEventDispatchThread) body
+ else SwingUtilities.invokeAndWait(new Runnable { def run = body })
+ }
- def later(body: => Unit) =
- SwingUtilities.invokeLater(new Runnable { def run = body })
+ def later(body: => Unit) {
+ if (SwingUtilities.isEventDispatchThread) body
+ else SwingUtilities.invokeLater(new Runnable { def run = body })
+ }
}
--- a/src/Pure/General/symbol.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/symbol.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/symbol.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Generalized characters with infinitely many named symbols.
--- a/src/Pure/General/symbol.scala Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/symbol.scala Wed Jan 28 16:57:12 2009 +0100
@@ -78,9 +78,9 @@
/** Symbol interpretation **/
- class Interpretation(isabelle_system: IsabelleSystem) {
-
- private var symbols = new HashMap[String, HashMap[String, String]]
+ class Interpretation(symbol_decls: Iterator[String])
+ {
+ private val symbols = new HashMap[String, HashMap[String, String]]
private var decoder: Recoder = null
private var encoder: Recoder = null
@@ -94,10 +94,11 @@
private val blank_pattern = compile(""" \s+ """)
private val key_pattern = compile(""" (.+): """)
- private def read_line(line: String) = {
- def err() = error("Bad symbol specification (line " + line + ")")
+ private def read_decl(decl: String) = {
+ def err() = error("Bad symbol declaration: " + decl)
- def read_props(props: List[String], tab: HashMap[String, String]): Unit = {
+ def read_props(props: List[String], tab: HashMap[String, String])
+ {
props match {
case Nil => ()
case _ :: Nil => err()
@@ -112,8 +113,8 @@
}
}
- if (!empty_pattern.matcher(line).matches) {
- blank_pattern.split(line).toList match {
+ if (!empty_pattern.matcher(decl).matches) {
+ blank_pattern.split(decl).toList match {
case Nil => err()
case symbol :: props => {
val tab = new HashMap[String, String]
@@ -124,13 +125,6 @@
}
}
- private def read_symbols(path: String) = {
- val file = new File(isabelle_system.platform_path(path))
- if (file.canRead) {
- for (line <- Source.fromFile(file).getLines) read_line(line)
- }
- }
-
/* init tables */
@@ -154,9 +148,7 @@
/* constructor */
- read_symbols("$ISABELLE_HOME/etc/symbols")
- read_symbols("$ISABELLE_HOME_USER/etc/symbols")
+ symbol_decls.foreach(read_decl)
init_recoders()
}
-
}
--- a/src/Pure/General/symbol_pos.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/symbol_pos.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/symbol_pos.ML
- ID: $Id$
Author: Makarius
Symbols with explicit position information.
--- a/src/Pure/General/table.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/table.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/table.ML
- ID: $Id$
Author: Markus Wenzel and Stefan Berghofer, TU Muenchen
Generic tables. Efficient purely functional implementation using
--- a/src/Pure/General/url.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/url.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/url.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Basic URLs, see RFC 1738 and RFC 2396.
--- a/src/Pure/General/xml.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/xml.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/xml.ML
- ID: $Id$
Author: David Aspinall, Stefan Berghofer and Markus Wenzel
Basic support for XML.
--- a/src/Pure/General/yxml.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/yxml.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/General/yxml.ML
- ID: $Id$
Author: Makarius
Efficient text representation of XML trees using extra characters X
--- a/src/Pure/General/yxml.scala Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/General/yxml.scala Wed Jan 28 16:57:12 2009 +0100
@@ -59,7 +59,7 @@
val s = source.toString
val i = s.indexOf('=')
if (i <= 0) err_attribute()
- (s.substring(0, i), s.substring(i + 1))
+ (s.substring(0, i).intern, s.substring(i + 1))
}
@@ -91,7 +91,7 @@
if (chunk == Y_string) pop()
else {
chunks(Y, chunk).toList match {
- case "" :: name :: atts => push(name.toString, atts.map(parse_attrib))
+ case "" :: name :: atts => push(name.toString.intern, atts.map(parse_attrib))
case txts => for (txt <- txts) add(XML.Text(txt.toString))
}
}
--- a/src/Pure/IsaMakefile Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/IsaMakefile Wed Jan 28 16:57:12 2009 +0100
@@ -41,7 +41,7 @@
Isar/expression.ML Isar/find_theorems.ML Isar/isar.ML \
Isar/isar_document.ML Isar/isar_cmd.ML Isar/isar_syn.ML \
Isar/local_defs.ML Isar/local_syntax.ML Isar/local_theory.ML \
- Isar/locale.ML Isar/method.ML Isar/net_rules.ML Isar/old_locale.ML \
+ Isar/locale.ML Isar/method.ML Isar/net_rules.ML \
Isar/object_logic.ML Isar/obtain.ML Isar/outer_keyword.ML \
Isar/outer_lex.ML Isar/outer_parse.ML Isar/outer_syntax.ML \
Isar/overloading.ML Isar/proof.ML Isar/proof_context.ML \
@@ -75,7 +75,7 @@
Syntax/syn_trans.ML Syntax/syntax.ML Syntax/type_ext.ML Thy/html.ML \
Thy/latex.ML Thy/present.ML Thy/term_style.ML Thy/thm_deps.ML \
Thy/thy_header.ML Thy/thy_info.ML Thy/thy_load.ML Thy/thy_output.ML \
- Thy/thy_syntax.ML Tools/ROOT.ML Tools/invoke.ML \
+ Thy/thy_syntax.ML Tools/ROOT.ML \
Tools/isabelle_process.ML Tools/named_thms.ML Tools/xml_syntax.ML \
assumption.ML axclass.ML codegen.ML config.ML conjunction.ML \
consts.ML context.ML context_position.ML conv.ML defs.ML display.ML \
@@ -126,9 +126,9 @@
SCALA_FILES = General/event_bus.scala General/markup.scala \
General/position.scala General/swing.scala General/symbol.scala \
General/xml.scala General/yxml.scala Isar/isar.scala \
- Isar/outer_keyword.scala Thy/thy_header.scala \
- Tools/isabelle_process.scala Tools/isabelle_syntax.scala \
- Tools/isabelle_system.scala
+ Isar/isar_document.scala Isar/outer_keyword.scala \
+ Thy/thy_header.scala Tools/isabelle_process.scala \
+ Tools/isabelle_syntax.scala Tools/isabelle_system.scala
SCALA_TARGET = $(ISABELLE_HOME)/lib/classes/Pure.jar
--- a/src/Pure/Isar/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -53,7 +53,6 @@
(*local theories and targets*)
use "local_theory.ML";
use "overloading.ML";
-use "old_locale.ML";
use "locale.ML";
use "class_target.ML";
use "theory_target.ML";
--- a/src/Pure/Isar/antiquote.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/antiquote.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/antiquote.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Text with antiquotations of inner items (terms, types etc.).
--- a/src/Pure/Isar/args.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/args.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/args.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Parsing with implicit value assigment. Concrete argument syntax of
@@ -35,7 +34,7 @@
val name_source: T list -> string * T list
val name_source_position: T list -> (SymbolPos.text * Position.T) * T list
val name: T list -> string * T list
- val binding: T list -> Binding.T * T list
+ val binding: T list -> binding * T list
val alt_name: T list -> string * T list
val symbol: T list -> string * T list
val liberal_name: T list -> string * T list
@@ -66,8 +65,8 @@
val parse1: (string -> bool) -> OuterLex.token list -> T list * OuterLex.token list
val attribs: (string -> string) -> T list -> src list * T list
val opt_attribs: (string -> string) -> T list -> src list * T list
- val thm_name: (string -> string) -> string -> T list -> (Binding.T * src list) * T list
- val opt_thm_name: (string -> string) -> string -> T list -> (Binding.T * src list) * T list
+ val thm_name: (string -> string) -> string -> T list -> (binding * src list) * T list
+ val opt_thm_name: (string -> string) -> string -> T list -> (binding * src list) * T list
val syntax: string -> ('b * T list -> 'a * ('b * T list)) -> src -> 'b -> 'a * 'b
val context_syntax: string -> (Context.generic * T list -> 'a * (Context.generic * T list)) ->
src -> Proof.context -> 'a * Proof.context
--- a/src/Pure/Isar/attrib.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/attrib.ML Wed Jan 28 16:57:12 2009 +0100
@@ -7,7 +7,7 @@
signature ATTRIB =
sig
type src = Args.src
- type binding = Binding.T * src list
+ type binding = binding * src list
val empty_binding: binding
val print_attributes: theory -> unit
val intern: theory -> xstring -> string
@@ -54,7 +54,7 @@
type src = Args.src;
-type binding = Binding.T * src list;
+type binding = binding * src list;
val empty_binding: binding = (Binding.empty, []);
--- a/src/Pure/Isar/auto_bind.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/auto_bind.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/auto_bind.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Automatic bindings of Isar text elements.
--- a/src/Pure/Isar/calculation.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/calculation.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/calculation.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Generic calculational proofs.
@@ -98,8 +97,8 @@
("sym", sym_att, "declaration of symmetry rule"),
("symmetric", Attrib.no_args symmetric, "resolution with symmetry rule")] #>
PureThy.add_thms
- [(("", transitive_thm), [trans_add]),
- (("", symmetric_thm), [sym_add])] #> snd));
+ [((Binding.empty, transitive_thm), [trans_add]),
+ ((Binding.empty, symmetric_thm), [sym_add])] #> snd));
--- a/src/Pure/Isar/class.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/class.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,7 +1,7 @@
(* Title: Pure/Isar/ML
Author: Florian Haftmann, TU Muenchen
-Type classes derived from primitive axclasses and locales - interfaces
+Type classes derived from primitive axclasses and locales - interfaces.
*)
signature CLASS =
@@ -33,11 +33,15 @@
val empty_ctxt = ProofContext.init thy;
(* instantiation of canonical interpretation *)
- (*FIXME inst_morph should be calculated manually and not instantiate constraint*)
- val aT = TFree ("'a", base_sort);
+ val aT = TFree (Name.aT, base_sort);
+ val param_map_const = (map o apsnd) Const param_map;
+ val param_map_inst = (map o apsnd)
+ (Const o apsnd (map_atyps (K aT))) param_map;
+ val const_morph = Element.inst_morphism thy
+ (Symtab.empty, Symtab.make param_map_inst);
val (([props], [(_, inst_morph)], export_morph), _) = empty_ctxt
|> Expression.cert_goal_expression ([(class, (("", false),
- Expression.Named ((map o apsnd) Const param_map)))], []);
+ Expression.Named param_map_const))], []);
(* witness for canonical interpretation *)
val prop = try the_single props;
@@ -63,20 +67,18 @@
(* assm_intro *)
fun prove_assm_intro thm =
let
- val prop = thm |> Thm.prop_of |> Logic.unvarify
- |> Morphism.term (inst_morph $> eq_morph)
- |> (map_types o map_atyps) (K aT);
- fun tac ctxt = LocalDefs.unfold_tac ctxt (map Thm.symmetric defs) (*FIXME*)
- THEN ALLGOALS (ProofContext.fact_tac [thm]);
- in Goal.prove_global thy [] [] prop (tac o #context) end;
+ val ((_, [thm']), _) = Variable.import_thms true [thm] empty_ctxt;
+ val thm'' = Morphism.thm (const_morph $> eq_morph) thm';
+ val tac = ALLGOALS (ProofContext.fact_tac [thm'']);
+ in Goal.prove_global thy [] [] (Thm.prop_of thm'') (K tac) end;
val assm_intro = Option.map prove_assm_intro
(fst (Locale.intros_of thy class));
(* of_class *)
val of_class_prop_concl = Logic.mk_inclass (aT, class);
val of_class_prop = case prop of NONE => of_class_prop_concl
- | SOME prop => Logic.mk_implies (Morphism.term inst_morph prop,
- of_class_prop_concl) |> (map_types o map_atyps) (K aT)
+ | SOME prop => Logic.mk_implies (Morphism.term const_morph
+ ((map_types o map_atyps) (K aT) prop), of_class_prop_concl);
val sup_of_classes = map (snd o rules thy) sups;
val loc_axiom_intros = map Drule.standard' (Locale.axioms_of thy class);
val axclass_intro = #intro (AxClass.get_info thy class);
@@ -89,44 +91,99 @@
in (base_morph, morph, export_morph, axiom, assm_intro, of_class) end;
-fun gen_class_spec prep_class process_decl thy raw_supclasses raw_elems =
+val reject_bcd_etc = (map o map_atyps) (fn T as TFree (v, sort) =>
+ if v = Name.aT then T
+ else error ("No type variable other than " ^ Name.aT ^ " allowed in class specification")
+ | T => T);
+
+fun singleton_fixate thy algebra Ts =
let
- (*FIXME 2009 simplify*)
- val supclasses = map (prep_class thy) raw_supclasses;
- val supsort = Sign.minimize_sort thy supclasses;
- val (sups, bases) = List.partition (is_class thy) supsort;
- val base_sort = if null sups then supsort else
- Library.foldr (Sorts.inter_sort (Sign.classes_of thy))
- (map (base_sort thy) sups, bases);
- val supparams = (map o apsnd) (snd o snd) (these_params thy sups);
+ fun extract f = (fold o fold_atyps) f Ts [];
+ val tfrees = extract
+ (fn TFree (v, sort) => insert (op =) (v, sort) | _ => I);
+ val inferred_sort = extract
+ (fn TVar (_, sort) => curry (Sorts.inter_sort algebra) sort | _ => I);
+ val fixate_sort = if null tfrees then inferred_sort
+ else let val a_sort = (snd o the_single) tfrees
+ in if Sorts.sort_le algebra (a_sort, inferred_sort)
+ then Sorts.inter_sort algebra (a_sort, inferred_sort)
+ else error ("Type inference imposes additional sort constraint "
+ ^ Syntax.string_of_sort_global thy inferred_sort
+ ^ " of type parameter " ^ Name.aT ^ " of sort "
+ ^ Syntax.string_of_sort_global thy a_sort)
+ end
+ in (map o map_atyps) (K (TFree (Name.aT, fixate_sort))) Ts end;
+
+fun add_typ_check level name f = Context.proof_map (Syntax.add_typ_check level name (fn xs => fn ctxt =>
+ let val xs' = f xs in if eq_list (op =) (xs, xs') then NONE else SOME (xs', ctxt) end));
+
+fun add_tfrees_of_element (Element.Fixes fxs) = fold (fn (_, SOME T, _) => Term.add_tfreesT T
+ | _ => I) fxs
+ | add_tfrees_of_element (Element.Constrains cnstrs) = fold (Term.add_tfreesT o snd) cnstrs
+ | add_tfrees_of_element (Element.Assumes assms) = fold (fold (fn (t, ts) =>
+ Term.add_tfrees t #> fold Term.add_tfrees ts) o snd) assms
+ | add_tfrees_of_element _ = I;
+
+fun fork_syn (Element.Fixes xs) =
+ fold_map (fn (c, ty, syn) => cons (Binding.base_name c, syn) #> pair (c, ty, NoSyn)) xs
+ #>> Element.Fixes
+ | fork_syn x = pair x;
+
+fun prep_class_spec prep_class prep_decl thy raw_supclasses raw_elems =
+ let
+ (* prepare import *)
+ val inter_sort = curry (Sorts.inter_sort (Sign.classes_of thy));
+ val sups = map (prep_class thy) raw_supclasses
+ |> Sign.minimize_sort thy;
+ val _ = case filter_out (is_class thy) sups
+ of [] => ()
+ | no_classes => error ("These are not classes: " ^ commas (map quote no_classes));
+ val supparams = (map o apsnd) (snd o snd) (these_params thy sups);
val supparam_names = map fst supparams;
val _ = if has_duplicates (op =) supparam_names
then error ("Duplicate parameter(s) in superclasses: "
^ (commas o map quote o duplicates (op =)) supparam_names)
else ();
-
val supexpr = (map (fn sup => (sup, (("", false), Expression.Positional [])))
sups, []);
+ val given_basesort = fold inter_sort (map (base_sort thy) sups) [];
+
+ (* infer types and base sort *)
+ val base_constraints = (map o apsnd)
+ (map_type_tfree (K (TVar ((Name.aT, 0), given_basesort))) o fst o snd)
+ (these_operations thy sups);
+ val ((_, _, inferred_elems), _) = ProofContext.init thy
+ |> fold (ProofContext.add_const_constraint o apsnd SOME) base_constraints
+ |> redeclare_operations thy sups
+ |> add_typ_check 10 "reject_bcd_etc" reject_bcd_etc
+ |> add_typ_check ~10 "singleton_fixate" (singleton_fixate thy (Sign.classes_of thy))
+ |> prep_decl supexpr raw_elems;
+ (*FIXME check for *all* side conditions here, extra check function for elements,
+ less side-condition checks in check phase*)
+ val base_sort = if null inferred_elems then given_basesort else
+ case fold add_tfrees_of_element inferred_elems []
+ of [] => error "No type variable in class specification"
+ | [(_, sort)] => sort
+ | _ => error "Multiple type variables in class specification"
+ val sup_sort = inter_sort base_sort sups
+
+ (* process elements as class specification *)
+ val begin_ctxt = begin sups base_sort
+ #> fold (Variable.declare_constraints o Free) ((map o apsnd o map_atyps)
+ (K (TFree (Name.aT, base_sort))) supparams)
+ (*FIXME should constraints be issued in begin?*)
+ val ((_, _, syntax_elems), _) = ProofContext.init thy
+ |> begin_ctxt
+ |> Expression.cert_declaration supexpr inferred_elems;
+ val (elems, global_syntax) = fold_map fork_syn syntax_elems [];
val constrain = Element.Constrains ((map o apsnd o map_atyps)
(K (TFree (Name.aT, base_sort))) supparams);
(*FIXME 2009 perhaps better: control type variable by explicit
parameter instantiation of import expression*)
- val begin_ctxt = begin sups base_sort
- #> fold (Variable.declare_constraints o Free) ((map o apsnd o map_atyps)
- (K (TFree (Name.aT, base_sort))) supparams) (*FIXME
- should constraints be issued in begin?*)
- val ((_, _, syntax_elems), _) = ProofContext.init thy
- |> begin_ctxt
- |> process_decl supexpr raw_elems;
- fun fork_syn (Element.Fixes xs) =
- fold_map (fn (c, ty, syn) => cons (Binding.base_name c, syn) #> pair (c, ty, NoSyn)) xs
- #>> Element.Fixes
- | fork_syn x = pair x;
- val (elems, global_syntax) = fold_map fork_syn syntax_elems [];
- in (((sups, supparam_names), (supsort, base_sort, supexpr)), (constrain :: elems, global_syntax)) end;
+ in (((sups, supparam_names), (sup_sort, base_sort, supexpr)), (constrain :: elems, global_syntax)) end;
-val cert_class_spec = gen_class_spec (K I) Expression.cert_declaration;
-val read_class_spec = gen_class_spec Sign.intern_class Expression.cert_read_declaration;
+val cert_class_spec = prep_class_spec (K I) Expression.cert_declaration;
+val read_class_spec = prep_class_spec Sign.intern_class Expression.cert_read_declaration;
fun add_consts bname class base_sort sups supparams global_syntax thy =
let
@@ -218,51 +275,30 @@
fun gen_subclass prep_class do_proof raw_sup lthy =
let
val thy = ProofContext.theory_of lthy;
- val sup = prep_class thy raw_sup;
- val sub = case TheoryTarget.peek lthy
- of {is_class = false, ...} => error "Not a class context"
+ val proto_sup = prep_class thy raw_sup;
+ val proto_sub = case TheoryTarget.peek lthy
+ of {is_class = false, ...} => error "Not in a class context"
| {target, ...} => target;
-
- val _ = if Sign.subsort thy ([sup], [sub])
- then error ("Class " ^ Syntax.string_of_sort lthy [sup]
- ^ " is subclass of class " ^ Syntax.string_of_sort lthy [sub])
- else ();
- val sub_params = map fst (these_params thy [sub]);
- val sup_params = map fst (these_params thy [sup]);
- val err_params = subtract (op =) sub_params sup_params;
- val _ = if null err_params then [] else
- error ("Class " ^ Syntax.string_of_sort lthy [sub] ^ " lacks parameter(s) " ^
- commas_quote err_params ^ " of " ^ Syntax.string_of_sort lthy [sup]);
+ val (sub, sup) = AxClass.cert_classrel thy (proto_sub, proto_sup)
val expr = ([(sup, (("", false), Expression.Positional []))], []);
- val (([props], _, _), goal_ctxt) =
+ val (([props], deps, export), goal_ctxt) =
Expression.cert_goal_expression expr lthy;
val some_prop = try the_single props;
+ val some_dep_morph = try the_single (map snd deps);
+ fun after_qed some_wit =
+ ProofContext.theory (register_subclass (sub, sup)
+ some_dep_morph some_wit export)
+ #> ProofContext.theory_of #> TheoryTarget.init (SOME sub);
+ in do_proof after_qed some_prop goal_ctxt end;
- fun tac some_thm = ALLGOALS (ProofContext.fact_tac (the_list some_thm));
- fun prove_sublocale some_thm =
- Expression.sublocale sub expr
- #> Proof.global_terminal_proof
- (Method.Basic (K (Method.SIMPLE_METHOD (tac some_thm)), Position.none), NONE)
- #> ProofContext.theory_of;
- fun after_qed some_thm =
- LocalTheory.theory (register_subclass (sub, sup) some_thm)
- #> is_some some_thm ? LocalTheory.theory (prove_sublocale some_thm)
- (*FIXME should also go to register_subclass*)
- #> ProofContext.theory_of
- #> TheoryTarget.init (SOME sub);
- in do_proof after_qed some_prop lthy end;
+fun user_proof after_qed some_prop =
+ Element.witness_proof (after_qed o try the_single o the_single)
+ [the_list some_prop];
-fun user_proof after_qed NONE =
- Proof.theorem_i NONE (K (after_qed NONE)) [[]]
- | user_proof after_qed (SOME prop) =
- Proof.theorem_i NONE (after_qed o try the_single o the_single) [[(prop, [])]];
-
-fun tactic_proof tac after_qed NONE lthy =
- after_qed NONE lthy
- | tactic_proof tac after_qed (SOME prop) lthy =
- after_qed (SOME (Goal.prove (LocalTheory.target_of lthy) [] [] prop
- (K tac))) lthy;
+fun tactic_proof tac after_qed some_prop ctxt =
+ after_qed (Option.map
+ (fn prop => Element.prove_witness ctxt prop tac) some_prop) ctxt;
in
--- a/src/Pure/Isar/class_target.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/class_target.ML Wed Jan 28 16:57:12 2009 +0100
@@ -10,14 +10,15 @@
val register: class -> class list -> ((string * typ) * (string * typ)) list
-> sort -> morphism -> thm option -> thm option -> thm
-> theory -> theory
- val register_subclass: class * class -> thm option
- -> theory -> theory
+ val register_subclass: class * class -> morphism option -> Element.witness option
+ -> morphism -> theory -> theory
val is_class: theory -> class -> bool
val base_sort: theory -> class -> sort
val rules: theory -> class -> thm option * thm
val these_params: theory -> sort -> (string * (class * (string * typ))) list
val these_defs: theory -> sort -> thm list
+ val these_operations: theory -> sort -> (string * (class * (typ * term))) list
val print_classes: theory -> unit
val begin: class list -> sort -> Proof.context -> Proof.context
@@ -28,6 +29,7 @@
-> (string * mixfix) * term -> theory -> theory
val class_prefix: string -> string
val refresh_syntax: class -> Proof.context -> Proof.context
+ val redeclare_operations: theory -> sort -> Proof.context -> Proof.context
(*instances*)
val init_instantiation: string list * (string * sort) list * sort
@@ -54,8 +56,6 @@
-> (Attrib.binding * string list) list
-> theory -> class * theory
val classrel_cmd: xstring * xstring -> theory -> Proof.state
-
- (*old instance layer*)
val instance_arity: (theory -> theory) -> arity -> theory -> Proof.state
val instance_arity_cmd: bstring * xstring list * xstring -> theory -> Proof.state
end;
@@ -253,7 +253,6 @@
in fold amend (heritage thy [class]) thy end;
fun register_operation class (c, (t, some_def)) thy =
- (*FIXME 2009 does this still also work for abbrevs?*)
let
val base_sort = base_sort thy class;
val prep_typ = map_type_tfree
@@ -270,16 +269,14 @@
|> is_some some_def ? activate_defs class (the_list some_def)
end;
-fun register_subclass (sub, sup) thms thy =
- (*FIXME should also add subclass interpretation*)
+fun register_subclass (sub, sup) some_dep_morph some_wit export thy =
let
- val of_class = (snd o rules thy) sup;
- val intro = case thms
- of SOME thm => Drule.standard' (of_class OF [Drule.standard' thm])
- | NONE => Thm.instantiate ([pairself (Thm.ctyp_of thy o TVar o pair (Name.aT, 0))
- ([], [sub])], []) of_class;
- val classrel = (intro OF (the_list o fst o rules thy) sub)
- |> Thm.close_derivation;
+ val intros = (snd o rules thy) sup :: map_filter I
+ [Option.map (Drule.standard' o Element.conclude_witness) some_wit,
+ (fst o rules thy) sub];
+ val tac = EVERY (map (TRYALL o Tactic.rtac) intros);
+ val classrel = Goal.prove_global thy [] [] (Logic.mk_classrel (sub, sup))
+ (K tac);
val diff_sort = Sign.complete_sort thy [sup]
|> subtract (op =) (Sign.complete_sort thy [sub]);
in
@@ -287,6 +284,11 @@
|> AxClass.add_classrel classrel
|> ClassData.map (Graph.add_edge (sub, sup))
|> activate_defs sub (these_defs thy diff_sort)
+ |> fold (fn dep_morph => Locale.add_dependency sub (sup,
+ dep_morph $> Element.satisfy_morphism (the_list some_wit) $> export))
+ (the_list some_dep_morph)
+ |> (fn thy => fold_rev Locale.activate_global_facts
+ (Locale.get_registrations thy) thy)
end;
@@ -294,19 +296,23 @@
(* class context syntax *)
-fun synchronize_class_syntax sups base_sort ctxt =
+fun these_unchecks thy =
+ map (fn (c, (_, (ty, t))) => (t, Const (c, ty))) o these_operations thy;
+
+fun redeclare_const thy c =
+ let val b = Sign.base_name c
+ in Sign.intern_const thy b = c ? Variable.declare_const (b, c) end;
+
+fun synchronize_class_syntax sort base_sort ctxt =
let
val thy = ProofContext.theory_of ctxt;
val algebra = Sign.classes_of thy;
- val operations = these_operations thy sups;
+ val operations = these_operations thy sort;
fun subst_class_typ sort = map_type_tfree (K (TVar ((Name.aT, 0), sort)));
val primary_constraints =
(map o apsnd) (subst_class_typ base_sort o fst o snd) operations;
val secondary_constraints =
(map o apsnd) (fn (class, (ty, _)) => subst_class_typ [class] ty) operations;
- fun declare_const (c, _) =
- let val b = Sign.base_name c
- in Sign.intern_const thy b = c ? Variable.declare_const (b, c) end;
fun improve (c, ty) = (case AList.lookup (op =) primary_constraints c
of SOME ty' => (case try (Type.raw_match (ty', ty)) Vartab.empty
of SOME tyenv => (case Vartab.lookup tyenv (Name.aT, 0)
@@ -319,10 +325,10 @@
| NONE => NONE)
| NONE => NONE)
fun subst (c, ty) = Option.map snd (AList.lookup (op =) operations c);
- val unchecks = map (fn (c, (_, (ty, t))) => (t, Const (c, ty))) operations;
+ val unchecks = these_unchecks thy sort;
in
ctxt
- |> fold declare_const primary_constraints
+ |> fold (redeclare_const thy o fst) primary_constraints
|> Overloading.map_improvable_syntax (K (((primary_constraints, secondary_constraints),
(((improve, subst), true), unchecks)), false))
|> Overloading.set_primary_constraints
@@ -334,11 +340,14 @@
val base_sort = base_sort thy class;
in synchronize_class_syntax [class] base_sort ctxt end;
-fun begin sups base_sort ctxt =
+fun redeclare_operations thy sort =
+ fold (redeclare_const thy o fst) (these_operations thy sort);
+
+fun begin sort base_sort ctxt =
ctxt
|> Variable.declare_term
(Logic.mk_type (TFree (Name.aT, base_sort)))
- |> synchronize_class_syntax sups base_sort
+ |> synchronize_class_syntax sort base_sort
|> Overloading.add_improvable_syntax;
fun init class thy =
@@ -353,52 +362,42 @@
fun declare class pos ((c, mx), dict) thy =
let
- val prfx = class_prefix class;
- val thy' = thy |> Sign.add_path prfx;
- (*FIXME 2009 use proper name morphism*)
- val morph = morphism thy' class;
- val params = map (apsnd fst o snd) (these_params thy' [class]);
-
- val c' = Sign.full_bname thy' c;
+ val morph = morphism thy class;
+ val b = Morphism.binding morph (Binding.name c);
+ val b_def = Morphism.binding morph (Binding.name (c ^ "_dict"));
+ val c' = Sign.full_name thy b;
val dict' = Morphism.term morph dict;
val ty' = Term.fastype_of dict';
- val ty'' = Type.strip_sorts ty';
- (*FIXME 2009 the tinkering with theorems here is a mess*)
- val def_eq = Logic.mk_equals (Const (c', ty'), dict');
- fun get_axiom thy = ((Thm.varifyT o Thm.axiom thy) c', thy);
+ val def_eq = Logic.mk_equals (Const (c', ty'), dict')
+ |> map_types Type.strip_sorts;
in
- thy'
- |> Sign.declare_const pos ((Binding.name c, ty''), mx) |> snd
- |> Thm.add_def false false (c, def_eq) (*FIXME 2009 name of theorem*)
- (*FIXME 2009 add_def should accept binding*)
- |>> Thm.symmetric
- ||>> get_axiom
- |-> (fn (def, def') => register_operation class (c', (dict', SOME (Thm.symmetric def')))
- #> PureThy.store_thm (c ^ "_raw", def') (*FIXME 2009 name of theorem*)
- (*FIXME 2009 store_thm etc. should accept binding*)
- #> snd)
- |> Sign.restore_naming thy
+ thy
+ |> Sign.declare_const pos ((b, Type.strip_sorts ty'), mx)
+ |> snd
+ |> Thm.add_def false false (b_def, def_eq)
+ |>> Thm.varifyT
+ |-> (fn def_thm => PureThy.store_thm (b_def, def_thm)
+ #> snd
+ #> register_operation class (c', (dict', SOME (Thm.symmetric def_thm))))
|> Sign.add_const_constraint (c', SOME ty')
end;
fun abbrev class prmode pos ((c, mx), rhs) thy =
let
- val prfx = class_prefix class;
- val thy' = thy |> Sign.add_path prfx;
-
- val unchecks = map (fn (c, (_, (ty, t))) => (t, Const (c, ty)))
- (these_operations thy [class]);
- val c' = Sign.full_bname thy' c;
+ val morph = morphism thy class;
+ val unchecks = these_unchecks thy [class];
+ val b = Morphism.binding morph (Binding.name c);
+ val c' = Sign.full_name thy b;
val rhs' = Pattern.rewrite_term thy unchecks [] rhs;
- val rhs'' = map_types Logic.varifyT rhs';
val ty' = Term.fastype_of rhs';
+ val rhs'' = map_types ((*Type.strip_sorts o *)Logic.varifyT) rhs';
in
- thy'
- |> Sign.add_abbrev (#1 prmode) pos (Binding.name c, map_types Type.strip_sorts rhs'') |> snd
+ thy
+ |> Sign.add_abbrev (#1 prmode) pos (b, rhs'')
+ |> snd
|> Sign.add_const_constraint (c', SOME ty')
|> Sign.notation true prmode [(Const (c', ty'), mx)]
|> not (#1 prmode = PrintMode.input) ? register_operation class (c', (rhs', NONE))
- |> Sign.restore_naming thy
end;
@@ -495,7 +494,8 @@
let
val _ = if null tycos then error "At least one arity must be given" else ();
val params = these_params thy sort;
- fun get_param tyco (param, (_, (c, ty))) = if can (AxClass.param_of_inst thy) (c, tyco)
+ fun get_param tyco (param, (_, (c, ty))) =
+ if can (AxClass.param_of_inst thy) (c, tyco)
then NONE else SOME ((c, tyco),
(param ^ "_" ^ type_name tyco, map_atyps (K (Type (tyco, map TFree vs))) ty));
val inst_params = map_product get_param tycos params |> map_filter I;
@@ -607,8 +607,7 @@
end;
fun default_intro_tac ctxt [] =
- intro_classes_tac [] ORELSE Old_Locale.intro_locales_tac true ctxt [] ORELSE
- Locale.intro_locales_tac true ctxt []
+ intro_classes_tac [] ORELSE Locale.intro_locales_tac true ctxt []
| default_intro_tac _ _ = no_tac;
fun default_tac rules ctxt facts =
--- a/src/Pure/Isar/constdefs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/constdefs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -8,12 +8,12 @@
signature CONSTDEFS =
sig
- val add_constdefs: (Binding.T * string option) list *
- ((Binding.T * string option * mixfix) option *
+ val add_constdefs: (binding * string option) list *
+ ((binding * string option * mixfix) option *
(Attrib.binding * string)) list -> theory -> theory
- val add_constdefs_i: (Binding.T * typ option) list *
- ((Binding.T * typ option * mixfix) option *
- ((Binding.T * attribute list) * term)) list -> theory -> theory
+ val add_constdefs_i: (binding * typ option) list *
+ ((binding * typ option * mixfix) option *
+ ((binding * attribute list) * term)) list -> theory -> theory
end;
structure Constdefs: CONSTDEFS =
@@ -52,7 +52,7 @@
val thy' =
thy
|> Sign.add_consts_i [(c, T, mx)]
- |> PureThy.add_defs false [((name, def), atts)]
+ |> PureThy.add_defs false [((Binding.name name, def), atts)]
|-> (fn [thm] => Code.add_default_eqn thm);
in ((c, T), thy') end;
--- a/src/Pure/Isar/context_rules.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/context_rules.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/context_rules.ML
- ID: $Id$
Author: Stefan Berghofer and Markus Wenzel, TU Muenchen
Declarations of intro/elim/dest rules in Pure (see also
@@ -199,7 +198,7 @@
val dest_query = rule_add elim_queryK Tactic.make_elim;
val _ = Context.>> (Context.map_theory
- (snd o PureThy.add_thms [(("", Drule.equal_intr_rule), [intro_query NONE])]));
+ (snd o PureThy.add_thms [((Binding.empty, Drule.equal_intr_rule), [intro_query NONE])]));
(* concrete syntax *)
--- a/src/Pure/Isar/element.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/element.ML Wed Jan 28 16:57:12 2009 +0100
@@ -9,11 +9,11 @@
sig
datatype ('typ, 'term) stmt =
Shows of (Attrib.binding * ('term * 'term list) list) list |
- Obtains of (Binding.T * ((Binding.T * 'typ option) list * 'term list)) list
+ Obtains of (binding * ((binding * 'typ option) list * 'term list)) list
type statement = (string, string) stmt
type statement_i = (typ, term) stmt
datatype ('typ, 'term, 'fact) ctxt =
- Fixes of (Binding.T * 'typ option * mixfix) list |
+ Fixes of (binding * 'typ option * mixfix) list |
Constrains of (string * 'typ) list |
Assumes of (Attrib.binding * ('term * 'term list) list) list |
Defines of (Attrib.binding * ('term * 'term list)) list |
@@ -23,46 +23,26 @@
val facts_map: (('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt) ->
(Attrib.binding * ('fact * Attrib.src list) list) list ->
(Attrib.binding * ('c * Attrib.src list) list) list
- val map_ctxt': {binding: Binding.T -> Binding.T,
- var: Binding.T * mixfix -> Binding.T * mixfix,
- typ: 'typ -> 'a, term: 'term -> 'b, pat: 'term -> 'b, fact: 'fact -> 'c,
- attrib: Attrib.src -> Attrib.src} -> ('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt
- val map_ctxt: {binding: Binding.T -> Binding.T,
- var: Binding.T * mixfix -> Binding.T * mixfix,
- typ: 'typ -> 'a, term: 'term -> 'b, fact: 'fact -> 'c,
- attrib: Attrib.src -> Attrib.src} -> ('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt
+ val map_ctxt: {binding: binding -> binding, typ: 'typ -> 'a, term: 'term -> 'b,
+ pattern: 'term -> 'b, fact: 'fact -> 'c, attrib: Attrib.src -> Attrib.src} ->
+ ('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt
val map_ctxt_attrib: (Attrib.src -> Attrib.src) ->
('typ, 'term, 'fact) ctxt -> ('typ, 'term, 'fact) ctxt
val morph_ctxt: morphism -> context_i -> context_i
- val params_of: context_i -> (string * typ) list
- val prems_of: context_i -> term list
- val facts_of: theory -> context_i -> (Attrib.binding * (thm list * Attrib.src list) list) list
val pretty_stmt: Proof.context -> statement_i -> Pretty.T list
val pretty_ctxt: Proof.context -> context_i -> Pretty.T list
val pretty_statement: Proof.context -> string -> thm -> Pretty.T
type witness
- val map_witness: (term * thm -> term * thm) -> witness -> witness
+ val prove_witness: Proof.context -> term -> tactic -> witness
+ val witness_proof: (witness list list -> Proof.context -> Proof.context) ->
+ term list list -> Proof.context -> Proof.state
+ val witness_proof_eqs: (witness list list -> thm list -> Proof.context -> Proof.context) ->
+ term list list -> term list -> Proof.context -> Proof.state
+ val witness_local_proof: (witness list list -> Proof.state -> Proof.state) ->
+ string -> term list list -> Proof.context -> bool -> Proof.state -> Proof.state
val morph_witness: morphism -> witness -> witness
- val witness_prop: witness -> term
- val witness_hyps: witness -> term list
- val assume_witness: theory -> term -> witness
- val prove_witness: Proof.context -> term -> tactic -> witness
- val close_witness: witness -> witness
val conclude_witness: witness -> thm
- val mark_witness: term -> term
- val make_witness: term -> thm -> witness
- val dest_witness: witness -> term * thm
- val transfer_witness: theory -> witness -> witness
- val refine_witness: Proof.state -> Proof.state Seq.seq
val pretty_witness: Proof.context -> witness -> Pretty.T
- val rename: (string * (string * mixfix option)) list -> string -> string
- val rename_var_name: (string * (string * mixfix option)) list ->
- string * mixfix -> string * mixfix
- val rename_var: (string * (string * mixfix option)) list ->
- Binding.T * mixfix -> Binding.T * mixfix
- val rename_term: (string * (string * mixfix option)) list -> term -> term
- val rename_thm: (string * (string * mixfix option)) list -> thm -> thm
- val rename_morphism: (string * (string * mixfix option)) list -> morphism
val instT_type: typ Symtab.table -> typ -> typ
val instT_term: typ Symtab.table -> term -> term
val instT_thm: theory -> typ Symtab.table -> thm -> thm
@@ -93,7 +73,7 @@
datatype ('typ, 'term) stmt =
Shows of (Attrib.binding * ('term * 'term list) list) list |
- Obtains of (Binding.T * ((Binding.T * 'typ option) list * 'term list)) list;
+ Obtains of (binding * ((binding * 'typ option) list * 'term list)) list;
type statement = (string, string) stmt;
type statement_i = (typ, term) stmt;
@@ -102,7 +82,7 @@
(* context *)
datatype ('typ, 'term, 'fact) ctxt =
- Fixes of (Binding.T * 'typ option * mixfix) list |
+ Fixes of (binding * 'typ option * mixfix) list |
Constrains of (string * 'typ) list |
Assumes of (Attrib.binding * ('term * 'term list) list) list |
Defines of (Attrib.binding * ('term * 'term list)) list |
@@ -113,53 +93,29 @@
fun facts_map f facts = Notes ("", facts) |> f |> (fn Notes (_, facts') => facts');
-fun map_ctxt' {binding, var, typ, term, pat, fact, attrib} =
- fn Fixes fixes => Fixes (fixes |> map (fn (x, T, mx) =>
- let val (x', mx') = var (x, mx) in (x', Option.map typ T, mx') end))
+fun map_ctxt {binding, typ, term, pattern, fact, attrib} =
+ fn Fixes fixes => Fixes (fixes |> map (fn (x, T, mx) => (binding x, Option.map typ T, mx)))
| Constrains xs => Constrains (xs |> map (fn (x, T) =>
- let val x' = Binding.base_name (#1 (var (Binding.name x, NoSyn))) in (x', typ T) end))
+ (Binding.base_name (binding (Binding.name x)), typ T)))
| Assumes asms => Assumes (asms |> map (fn ((a, atts), propps) =>
- ((binding a, map attrib atts), propps |> map (fn (t, ps) => (term t, map pat ps)))))
+ ((binding a, map attrib atts), propps |> map (fn (t, ps) => (term t, map pattern ps)))))
| Defines defs => Defines (defs |> map (fn ((a, atts), (t, ps)) =>
- ((binding a, map attrib atts), (term t, map pat ps))))
+ ((binding a, map attrib atts), (term t, map pattern ps))))
| Notes (kind, facts) => Notes (kind, facts |> map (fn ((a, atts), bs) =>
((binding a, map attrib atts), bs |> map (fn (ths, btts) => (fact ths, map attrib btts)))));
-fun map_ctxt {binding, var, typ, term, fact, attrib} =
- map_ctxt' {binding = binding, var = var, typ = typ, term = term, pat = term,
- fact = fact, attrib = attrib}
-
fun map_ctxt_attrib attrib =
- map_ctxt {binding = I, var = I, typ = I, term = I, fact = I, attrib = attrib};
+ map_ctxt {binding = I, typ = I, term = I, pattern = I, fact = I, attrib = attrib};
fun morph_ctxt phi = map_ctxt
{binding = Morphism.binding phi,
- var = Morphism.var phi,
typ = Morphism.typ phi,
term = Morphism.term phi,
+ pattern = Morphism.term phi,
fact = Morphism.fact phi,
attrib = Args.morph_values phi};
-(* logical content *)
-
-fun params_of (Fixes fixes) = fixes |> map
- (fn (x, SOME T, _) => (Binding.base_name x, T)
- | (x, _, _) => raise TERM ("Untyped context element parameter " ^ quote (Binding.display x), []))
- | params_of _ = [];
-
-fun prems_of (Assumes asms) = maps (map fst o snd) asms
- | prems_of (Defines defs) = map (fst o snd) defs
- | prems_of _ = [];
-
-fun assume thy t = Assumption.assume (Thm.cterm_of thy t);
-
-fun facts_of thy (Assumes asms) = map (apsnd (map (fn (t, _) => ([assume thy t], [])))) asms
- | facts_of thy (Defines defs) = map (apsnd (fn (t, _) => [([assume thy t], [])])) defs
- | facts_of _ (Notes (_, facts)) = facts
- | facts_of _ _ = [];
-
-
(** pretty printing **)
@@ -169,9 +125,8 @@
map (fn y => Pretty.block [Pretty.str " ", Pretty.keyword sep, Pretty.brk 1, y]) ys;
fun pretty_name_atts ctxt (b, atts) sep =
- let
- val name = Binding.display b;
- in if name = "" andalso null atts then []
+ let val name = Binding.display b in
+ if name = "" andalso null atts then []
else [Pretty.block
(Pretty.breaks (Pretty.str name :: Attrib.pretty_attribs ctxt atts @ [Pretty.str sep]))]
end;
@@ -300,24 +255,52 @@
datatype witness = Witness of term * thm;
+val mark_witness = Logic.protect;
+fun witness_prop (Witness (t, _)) = t;
+fun witness_hyps (Witness (_, th)) = #hyps (Thm.rep_thm th);
fun map_witness f (Witness witn) = Witness (f witn);
fun morph_witness phi = map_witness (fn (t, th) => (Morphism.term phi t, Morphism.thm phi th));
-fun witness_prop (Witness (t, _)) = t;
-fun witness_hyps (Witness (_, th)) = #hyps (Thm.rep_thm th);
-
-fun assume_witness thy t =
- Witness (t, Goal.protect (Thm.assume (Thm.cterm_of thy t)));
-
fun prove_witness ctxt t tac =
- Witness (t, Thm.close_derivation (Goal.prove ctxt [] [] (Logic.protect t) (fn _ =>
+ Witness (t, Thm.close_derivation (Goal.prove ctxt [] [] (mark_witness t) (fn _ =>
Tactic.rtac Drule.protectI 1 THEN tac)));
-val close_witness = map_witness (fn (t, th) => (t, Thm.close_derivation th));
+
+local
+
+val refine_witness =
+ Proof.refine (Method.Basic (K (Method.RAW_METHOD
+ (K (ALLGOALS
+ (CONJUNCTS (ALLGOALS
+ (CONJUNCTS (TRYALL (Tactic.rtac Drule.protectI)))))))), Position.none));
+
+fun gen_witness_proof proof after_qed wit_propss eq_props =
+ let
+ val propss = (map o map) (fn prop => (mark_witness prop, [])) wit_propss
+ @ [map (rpair []) eq_props];
+ fun after_qed' thmss =
+ let val (wits, eqs) = split_last ((map o map) Thm.close_derivation thmss);
+ in after_qed ((map2 o map2) (curry Witness) wit_propss wits) eqs end;
+ in proof after_qed' propss #> refine_witness #> Seq.hd end;
-fun conclude_witness (Witness (_, th)) =
- Thm.close_derivation (MetaSimplifier.norm_hhf_protect (Goal.conclude th));
+in
+
+fun witness_proof after_qed wit_propss =
+ gen_witness_proof (Proof.theorem_i NONE) (fn wits => fn _ => after_qed wits)
+ wit_propss [];
+
+val witness_proof_eqs = gen_witness_proof (Proof.theorem_i NONE);
+
+fun witness_local_proof after_qed cmd wit_propss goal_ctxt int =
+ gen_witness_proof (fn after_qed' => fn propss =>
+ Proof.map_context (K goal_ctxt)
+ #> Proof.local_goal (ProofDisplay.print_results int) (K I) ProofContext.bind_propp_i
+ cmd NONE after_qed' (map (pair (Binding.empty, [])) propss))
+ (fn wits => fn _ => after_qed wits) wit_propss [];
+
+end;
+
fun compose_witness (Witness (_, th)) r =
let
@@ -330,18 +313,8 @@
(Thm.instantiate (Thm.match (Thm.cprop_of th', A)) th'))
end;
-val mark_witness = Logic.protect;
-
-fun make_witness t th = Witness (t, th);
-fun dest_witness (Witness w) = w;
-
-fun transfer_witness thy (Witness (t, th)) = Witness (t, Thm.transfer thy th);
-
-val refine_witness =
- Proof.refine (Method.Basic (K (Method.RAW_METHOD
- (K (ALLGOALS
- (CONJUNCTS (ALLGOALS
- (CONJUNCTS (TRYALL (Tactic.rtac Drule.protectI)))))))), Position.none));
+fun conclude_witness (Witness (_, th)) =
+ Thm.close_derivation (MetaSimplifier.norm_hhf_protect (Goal.conclude th));
fun pretty_witness ctxt witn =
let val prt_term = Pretty.quote o Syntax.pretty_term ctxt in
@@ -385,50 +358,6 @@
end;
-(* rename *)
-
-fun rename ren x =
- (case AList.lookup (op =) ren (x: string) of
- NONE => x
- | SOME (x', _) => x');
-
-fun rename_var_name ren (x, mx) =
- (case (AList.lookup (op =) ren x, mx) of
- (NONE, _) => (x, mx)
- | (SOME (x', NONE), Structure) => (x', mx)
- | (SOME (x', SOME _), Structure) =>
- error ("Attempt to change syntax of structure parameter " ^ quote x)
- | (SOME (x', NONE), _) => (x', NoSyn)
- | (SOME (x', SOME mx'), _) => (x', mx'));
-
-fun rename_var ren (b, mx) =
- let
- val x = Binding.base_name b;
- val (x', mx') = rename_var_name ren (x, mx);
- in (Binding.name x', mx') end;
-
-fun rename_term ren (Free (x, T)) = Free (rename ren x, T)
- | rename_term ren (t $ u) = rename_term ren t $ rename_term ren u
- | rename_term ren (Abs (x, T, t)) = Abs (x, T, rename_term ren t)
- | rename_term _ a = a;
-
-fun rename_thm ren th =
- let
- val thy = Thm.theory_of_thm th;
- val subst = (Thm.fold_terms o Term.fold_aterms)
- (fn Free (x, T) =>
- let val x' = rename ren x
- in if x = x' then I else insert (eq_fst (op =)) ((x, T), Free (x', T)) end
- | _ => I) th [];
- in
- if null subst then th
- else th |> hyps_rule (instantiate_frees thy subst)
- end;
-
-fun rename_morphism ren = Morphism.morphism
- {binding = I, var = rename_var ren, typ = I, term = rename_term ren, fact = map (rename_thm ren)};
-
-
(* instantiate types *)
fun instT_type env =
@@ -454,7 +383,7 @@
fun instT_morphism thy env =
let val thy_ref = Theory.check_thy thy in
Morphism.morphism
- {binding = I, var = I,
+ {binding = I,
typ = instT_type env,
term = instT_term env,
fact = map (fn th => instT_thm (Theory.deref thy_ref) env th)}
@@ -503,7 +432,7 @@
fun inst_morphism thy envs =
let val thy_ref = Theory.check_thy thy in
Morphism.morphism
- {binding = I, var = I,
+ {binding = I,
typ = instT_type (#1 envs),
term = inst_term envs,
fact = map (fn th => inst_thm (Theory.deref thy_ref) envs th)}
@@ -517,15 +446,15 @@
NONE => I
| SOME w => Thm.implies_intr hyp #> compose_witness w)) (#hyps (Thm.crep_thm thm));
-fun satisfy_morphism witns = Morphism.thm_morphism (satisfy_thm witns);
-
-fun satisfy_facts witns = facts_map (morph_ctxt (satisfy_morphism witns));
+val satisfy_morphism = Morphism.thm_morphism o satisfy_thm;
+val satisfy_facts = facts_map o morph_ctxt o satisfy_morphism;
(* rewriting with equalities *)
fun eq_morphism thy thms = Morphism.morphism
- {binding = I, var = I, typ = I,
+ {binding = I,
+ typ = I,
term = MetaSimplifier.rewrite_term thy thms [],
fact = map (MetaSimplifier.rewrite_rule thms)};
@@ -542,18 +471,16 @@
val exp_fact = map (Thm.adjust_maxidx_thm maxidx) #> Variable.export inner outer;
val exp_term = Drule.term_rule thy (singleton exp_fact);
val exp_typ = Logic.type_map exp_term;
- val morphism =
- Morphism.morphism {binding = I, var = I, typ = exp_typ, term = exp_term, fact = exp_fact};
+ val morphism = Morphism.morphism {binding = I, typ = exp_typ, term = exp_term, fact = exp_fact};
in facts_map (morph_ctxt morphism) facts end;
(* transfer to theory using closure *)
fun transfer_morphism thy =
- let val thy_ref = Theory.check_thy thy in
- Morphism.morphism {binding = I, var = I, typ = I, term = I,
- fact = map (fn th => transfer (Theory.deref thy_ref) th)}
- end;
+ let val thy_ref = Theory.check_thy thy
+ in Morphism.thm_morphism (fn th => transfer (Theory.deref thy_ref) th) end;
+
(** activate in context, return elements and facts **)
@@ -600,11 +527,14 @@
if NameSpace.is_qualified name then error ("Illegal qualified name: " ^ quote name)
else name;
-fun prep_facts prep_name get intern ctxt elem = elem |> map_ctxt
- {var = I, typ = I, term = I,
- binding = Binding.map_base prep_name,
- fact = get ctxt,
- attrib = intern (ProofContext.theory_of ctxt)};
+fun prep_facts prep_name get intern ctxt =
+ map_ctxt
+ {binding = Binding.map_base prep_name,
+ typ = I,
+ term = I,
+ pattern = I,
+ fact = get ctxt,
+ attrib = intern (ProofContext.theory_of ctxt)};
in
--- a/src/Pure/Isar/expression.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/expression.ML Wed Jan 28 16:57:12 2009 +0100
@@ -9,8 +9,8 @@
(* Locale expressions *)
datatype 'term map = Positional of 'term option list | Named of (string * 'term) list
type 'term expr = (string * ((string * bool) * 'term map)) list
- type expression_i = term expr * (Binding.T * typ option * mixfix) list
- type expression = string expr * (Binding.T * string option * mixfix) list
+ type expression_i = term expr * (binding * typ option * mixfix) list
+ type expression = string expr * (binding * string option * mixfix) list
(* Processing of context statements *)
val cert_statement: Element.context_i list -> (term * term list) list list ->
@@ -20,14 +20,14 @@
(* Declaring locales *)
val cert_declaration: expression_i -> Element.context_i list -> Proof.context ->
- ((Binding.T * typ option * mixfix) list * (string * morphism) list
+ ((binding * typ option * mixfix) list * (string * morphism) list
* Element.context_i list) * ((string * typ) list * Proof.context)
val cert_read_declaration: expression_i -> Element.context list -> Proof.context ->
- ((Binding.T * typ option * mixfix) list * (string * morphism) list
+ ((binding * typ option * mixfix) list * (string * morphism) list
* Element.context_i list) * ((string * typ) list * Proof.context)
(*FIXME*)
val read_declaration: expression -> Element.context list -> Proof.context ->
- ((Binding.T * typ option * mixfix) list * (string * morphism) list
+ ((binding * typ option * mixfix) list * (string * morphism) list
* Element.context_i list) * ((string * typ) list * Proof.context)
val add_locale: bstring -> bstring -> expression_i -> Element.context_i list ->
theory -> string * local_theory
@@ -64,8 +64,8 @@
type 'term expr = (string * ((string * bool) * 'term map)) list;
-type expression = string expr * (Binding.T * string option * mixfix) list;
-type expression_i = term expr * (Binding.T * typ option * mixfix) list;
+type expression = string expr * (binding * string option * mixfix) list;
+type expression_i = term expr * (binding * typ option * mixfix) list;
(** Internalise locale names in expr **)
@@ -198,11 +198,14 @@
(** Parsing **)
-fun parse_elem prep_typ prep_term ctxt elem =
- Element.map_ctxt' {binding = I, var = I, typ = prep_typ ctxt,
- term = prep_term (ProofContext.set_mode ProofContext.mode_schematic ctxt), (* FIXME ?? *)
- pat = prep_term (ProofContext.set_mode ProofContext.mode_pattern ctxt),
- fact = I, attrib = I} elem;
+fun parse_elem prep_typ prep_term ctxt =
+ Element.map_ctxt
+ {binding = I,
+ typ = prep_typ ctxt,
+ term = prep_term (ProofContext.set_mode ProofContext.mode_schematic ctxt),
+ pattern = prep_term (ProofContext.set_mode ProofContext.mode_pattern ctxt),
+ fact = I,
+ attrib = I};
fun parse_concl prep_term ctxt concl =
(map o map) (fn (t, ps) =>
@@ -490,8 +493,7 @@
val exp_fact = Drule.zero_var_indexes_list o map Thm.strip_shyps o Morphism.fact export;
val exp_term = TermSubst.zero_var_indexes o Morphism.term export;
val exp_typ = Logic.type_map exp_term;
- val export' =
- Morphism.morphism {binding = I, var = I, typ = exp_typ, term = exp_term, fact = exp_fact};
+ val export' = Morphism.morphism {binding = I, typ = exp_typ, term = exp_term, fact = exp_fact};
in ((propss, deps, export'), goal_ctxt) end;
in
@@ -640,7 +642,7 @@
|> bodyT = propT ? Sign.add_advanced_trfuns ([], [], [aprop_tr' (length args) name], [])
|> Sign.declare_const [] ((Binding.name bname, predT), NoSyn) |> snd
|> PureThy.add_defs false
- [((Thm.def_name bname, Logic.mk_equals (head, body)), [Thm.kind_internal])];
+ [((Binding.name (Thm.def_name bname), Logic.mk_equals (head, body)), [Thm.kind_internal])];
val defs_ctxt = ProofContext.init defs_thy |> Variable.declare_term head;
val cert = Thm.cterm_of defs_thy;
@@ -786,41 +788,23 @@
(*** Interpretation ***)
-(** Witnesses and goals **)
-
-fun prep_propp propss = propss |> map (map (rpair [] o Element.mark_witness));
-
-val prep_result = map2 (fn props => fn thms =>
- map2 Element.make_witness props (map Thm.close_derivation thms));
-
-
(** Interpretation between locales: declaring sublocale relationships **)
local
-fun gen_sublocale prep_expr intern
- raw_target expression thy =
+fun gen_sublocale prep_expr intern raw_target expression thy =
let
val target = intern thy raw_target;
val target_ctxt = Locale.init target thy;
val ((propss, deps, export), goal_ctxt) = prep_expr expression target_ctxt;
- fun store_dep ((name, morph), thms) =
- Locale.add_dependency target (name, morph $> Element.satisfy_morphism thms $> export);
-
- fun after_qed results =
- ProofContext.theory (
- (* store dependencies *)
- fold store_dep (deps ~~ prep_result propss results) #>
- (* propagate registrations *)
- (fn thy => fold_rev (fn reg => Locale.activate_global_facts reg)
+ fun after_qed witss = ProofContext.theory (
+ fold2 (fn (name, morph) => fn wits => Locale.add_dependency target
+ (name, morph $> Element.satisfy_morphism wits $> export)) deps witss #>
+ (fn thy => fold_rev Locale.activate_global_facts
(Locale.get_registrations thy) thy));
- in
- goal_ctxt |>
- Proof.theorem_i NONE after_qed (prep_propp propss) |>
- Element.refine_witness |> Seq.hd
- end;
+ in Element.witness_proof after_qed propss goal_ctxt end;
in
@@ -845,10 +829,10 @@
val goal_ctxt = fold Variable.auto_fixes eqns expr_ctxt;
val export' = Variable.export_morphism goal_ctxt expr_ctxt;
- fun store_reg ((name, morph), thms) thy =
+ fun store_reg ((name, morph), wits) thy =
let
- val thms' = map (Element.morph_witness export') thms;
- val morph' = morph $> Element.satisfy_morphism thms';
+ val wits' = map (Element.morph_witness export') wits;
+ val morph' = morph $> Element.satisfy_morphism wits';
in
thy
|> Locale.add_registration (name, (morph', export))
@@ -859,35 +843,26 @@
thy
|> fold (fn (name, morph) =>
Locale.activate_global_facts (name, morph $> export)) regs
- | store_eqns_activate regs thms thy =
+ | store_eqns_activate regs eqs thy =
let
- val thms' = thms |> map (Element.conclude_witness #>
- Morphism.thm (export' $> export) #>
+ val eqs' = eqs |> map (Morphism.thm (export' $> export) #>
LocalDefs.meta_rewrite_rule (ProofContext.init thy) #>
Drule.abs_def);
- val eq_morph = Element.eq_morphism thy thms';
+ val eq_morph = Element.eq_morphism thy eqs';
val eq_attns' = map ((apsnd o map) (Attrib.attribute_i thy)) eq_attns;
in
thy
|> fold (fn (name, morph) =>
Locale.amend_registration eq_morph (name, morph) #>
Locale.activate_global_facts (name, morph $> eq_morph $> export)) regs
- |> PureThy.note_thmss Thm.lemmaK (eq_attns' ~~ map (fn th => [([th], [])]) thms')
+ |> PureThy.note_thmss Thm.lemmaK (eq_attns' ~~ map (fn eq => [([eq], [])]) eqs')
|> snd
end;
- fun after_qed results =
- let
- val (wits_reg, wits_eq) = split_last (prep_result (propss @ [eqns]) results);
- in ProofContext.theory (fold_map store_reg (regs ~~ wits_reg)
- #-> (fn regs => store_eqns_activate regs wits_eq))
- end;
+ fun after_qed wits eqs = ProofContext.theory (fold_map store_reg (regs ~~ wits)
+ #-> (fn regs => store_eqns_activate regs eqs));
- in
- goal_ctxt |>
- Proof.theorem_i NONE after_qed (prep_propp (propss @ [eqns])) |>
- Element.refine_witness |> Seq.hd
- end;
+ in Element.witness_proof_eqs after_qed propss eqns goal_ctxt end;
in
@@ -910,20 +885,16 @@
val ((propss, regs, export), goal_ctxt) = prep_expr expression ctxt;
- fun store_reg ((name, morph), thms) =
+ fun store_reg (name, morph) thms =
let
val morph' = morph $> Element.satisfy_morphism thms $> export;
- in
- Locale.activate_local_facts (name, morph')
- end;
+ in Locale.activate_local_facts (name, morph') end;
- fun after_qed results =
- Proof.map_context (fold store_reg (regs ~~ prep_result propss results));
+ fun after_qed wits =
+ Proof.map_context (fold2 store_reg regs wits);
in
- state |> Proof.map_context (K goal_ctxt) |>
- Proof.local_goal (ProofDisplay.print_results int) (K I) ProofContext.bind_propp_i
- "interpret" NONE after_qed (map (pair (Binding.empty, [])) (prep_propp propss)) |>
- Element.refine_witness |> Seq.hd
+ state
+ |> Element.witness_local_proof after_qed "interpret" propss goal_ctxt int
end;
in
--- a/src/Pure/Isar/isar_cmd.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/isar_cmd.ML Wed Jan 28 16:57:12 2009 +0100
@@ -13,8 +13,8 @@
val typed_print_translation: bool * (string * Position.T) -> theory -> theory
val print_ast_translation: bool * (string * Position.T) -> theory -> theory
val oracle: bstring -> SymbolPos.text * Position.T -> theory -> theory
- val add_axioms: ((Binding.T * string) * Attrib.src list) list -> theory -> theory
- val add_defs: (bool * bool) * ((Binding.T * string) * Attrib.src list) list -> theory -> theory
+ val add_axioms: ((binding * string) * Attrib.src list) list -> theory -> theory
+ val add_defs: (bool * bool) * ((binding * string) * Attrib.src list) list -> theory -> theory
val declaration: string * Position.T -> local_theory -> local_theory
val simproc_setup: string -> string list -> string * Position.T -> string list ->
local_theory -> local_theory
@@ -53,7 +53,6 @@
val print_theorems: Toplevel.transition -> Toplevel.transition
val print_locales: Toplevel.transition -> Toplevel.transition
val print_locale: bool * xstring -> Toplevel.transition -> Toplevel.transition
- val print_registrations: bool -> string -> Toplevel.transition -> Toplevel.transition
val print_attributes: Toplevel.transition -> Toplevel.transition
val print_simpset: Toplevel.transition -> Toplevel.transition
val print_rules: Toplevel.transition -> Toplevel.transition
@@ -359,12 +358,6 @@
Toplevel.keep (fn state =>
Locale.print_locale (Toplevel.theory_of state) show_facts name);
-fun print_registrations show_wits name = Toplevel.unknown_context o
- Toplevel.keep (Toplevel.node_case
- (Context.cases (Old_Locale.print_registrations show_wits name o ProofContext.init)
- (Old_Locale.print_registrations show_wits name))
- (Old_Locale.print_registrations show_wits name o Proof.context_of));
-
val print_attributes = Toplevel.unknown_theory o
Toplevel.keep (Attrib.print_attributes o Toplevel.theory_of);
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Isar/isar_document.scala Wed Jan 28 16:57:12 2009 +0100
@@ -0,0 +1,64 @@
+/* Title: Pure/Isar/isar_document.scala
+ Author: Makarius
+
+Interactive Isar documents.
+*/
+
+package isabelle
+
+object IsarDocument {
+ /* unique identifiers */
+
+ type State_ID = String
+ type Command_ID = String
+ type Document_ID = String
+}
+
+trait IsarDocument extends IsabelleProcess
+{
+ import IsarDocument._
+
+
+ /* commands */
+
+ def define_command(id: Command_ID, text: String) {
+ output_sync("Isar.define_command " + IsabelleSyntax.encode_string(id) + " " +
+ IsabelleSyntax.encode_string(text))
+ }
+
+
+ /* documents */
+
+ def begin_document(id: Document_ID, path: String) {
+ output_sync("Isar.begin_document " + IsabelleSyntax.encode_string(id) + " " +
+ IsabelleSyntax.encode_string(path))
+ }
+
+ def end_document(id: Document_ID) {
+ output_sync("Isar.end_document " + IsabelleSyntax.encode_string(id))
+ }
+
+ def edit_document(old_id: Document_ID, new_id: Document_ID,
+ edits: List[(Command_ID, Option[Command_ID])])
+ {
+ def append_edit(edit: (Command_ID, Option[Command_ID]), result: StringBuilder)
+ {
+ edit match {
+ case (id, None) => IsabelleSyntax.append_string(id, result)
+ case (id, Some(id2)) =>
+ IsabelleSyntax.append_string(id, result)
+ result.append(" ")
+ IsabelleSyntax.append_string(id2, result)
+ }
+ }
+
+ val buf = new StringBuilder(40)
+ buf.append("Isar.edit_document ")
+ IsabelleSyntax.append_string(old_id, buf)
+ buf.append(" ")
+ IsabelleSyntax.append_string(new_id, buf)
+ buf.append(" ")
+ IsabelleSyntax.append_list(append_edit, edits, buf)
+ output_sync(buf.toString)
+ }
+}
--- a/src/Pure/Isar/isar_syn.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/isar_syn.ML Wed Jan 28 16:57:12 2009 +0100
@@ -418,45 +418,6 @@
>> (fn expr => Toplevel.print o
Toplevel.proof' (fn int => Expression.interpret_cmd expr int)));
-local
-
-val opt_prefix = Scan.optional (P.binding --| P.$$$ ":") Binding.empty;
-
-in
-
-val locale_val =
- SpecParse.locale_expr --
- Scan.optional (P.$$$ "+" |-- P.!!! (Scan.repeat1 SpecParse.context_element)) [] ||
- Scan.repeat1 SpecParse.context_element >> pair Old_Locale.empty;
-
-val _ =
- OuterSyntax.command "class_locale" "define named proof context based on classes" K.thy_decl
- (P.name -- Scan.optional (P.$$$ "=" |-- P.!!! locale_val) (Old_Locale.empty, []) -- P.opt_begin
- >> (fn ((name, (expr, elems)), begin) =>
- (begin ? Toplevel.print) o Toplevel.begin_local_theory begin
- (Old_Locale.add_locale_cmd name expr elems #-> TheoryTarget.begin)));
-
-val _ =
- OuterSyntax.command "class_interpretation"
- "prove and register interpretation of locale expression in theory or locale" K.thy_goal
- (P.xname --| (P.$$$ "\\<subseteq>" || P.$$$ "<") -- P.!!! SpecParse.locale_expr
- >> (Toplevel.print oo (Toplevel.theory_to_proof o Old_Locale.interpretation_in_locale I)) ||
- opt_prefix -- SpecParse.locale_expr -- SpecParse.locale_insts
- >> (fn ((name, expr), insts) => Toplevel.print o
- Toplevel.theory_to_proof
- (Old_Locale.interpretation_cmd (Binding.base_name name) expr insts)));
-
-val _ =
- OuterSyntax.command "class_interpret"
- "prove and register interpretation of locale expression in proof context"
- (K.tag_proof K.prf_goal)
- (opt_prefix -- SpecParse.locale_expr -- SpecParse.locale_insts
- >> (fn ((name, expr), insts) => Toplevel.print o
- Toplevel.proof'
- (fn int => Old_Locale.interpret_cmd (Binding.base_name name) expr insts int)));
-
-end;
-
(* classes *)
@@ -467,7 +428,7 @@
val _ =
OuterSyntax.command "class" "define type class" K.thy_decl
- (P.name -- (P.$$$ "=" |-- class_val) -- P.opt_begin
+ (P.name -- Scan.optional (P.$$$ "=" |-- class_val) ([], []) -- P.opt_begin
>> (fn ((name, (supclasses, elems)), begin) =>
(begin ? Toplevel.print) o Toplevel.begin_local_theory begin
(Class.class_cmd name supclasses elems #> snd)));
@@ -857,12 +818,6 @@
(opt_bang -- P.xname >> (Toplevel.no_timing oo IsarCmd.print_locale));
val _ =
- OuterSyntax.improper_command "print_interps"
- "print interpretations of named locale" K.diag
- (Scan.optional (P.$$$ "!" >> K true) false -- P.xname
- >> (Toplevel.no_timing oo uncurry IsarCmd.print_registrations));
-
-val _ =
OuterSyntax.improper_command "print_attributes" "print attributes of this theory" K.diag
(Scan.succeed (Toplevel.no_timing o IsarCmd.print_attributes));
--- a/src/Pure/Isar/local_defs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/local_defs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -11,10 +11,10 @@
val mk_def: Proof.context -> (string * term) list -> term list
val expand: cterm list -> thm -> thm
val def_export: Assumption.export
- val add_defs: ((Binding.T * mixfix) * ((Binding.T * attribute list) * term)) list ->
+ val add_defs: ((binding * mixfix) * ((binding * attribute list) * term)) list ->
Proof.context -> (term * (string * thm)) list * Proof.context
- val add_def: (Binding.T * mixfix) * term -> Proof.context -> (term * thm) * Proof.context
- val fixed_abbrev: (Binding.T * mixfix) * term -> Proof.context ->
+ val add_def: (binding * mixfix) * term -> Proof.context -> (term * thm) * Proof.context
+ val fixed_abbrev: (binding * mixfix) * term -> Proof.context ->
(term * term) * Proof.context
val export: Proof.context -> Proof.context -> thm -> thm list * thm
val export_cterm: Proof.context -> Proof.context -> cterm -> cterm * thm
--- a/src/Pure/Isar/local_syntax.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/local_syntax.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/local_syntax.ML
- ID: $Id$
Author: Makarius
Local syntax depending on theory syntax.
--- a/src/Pure/Isar/local_theory.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/local_theory.ML Wed Jan 28 16:57:12 2009 +0100
@@ -18,16 +18,16 @@
val raw_theory: (theory -> theory) -> local_theory -> local_theory
val checkpoint: local_theory -> local_theory
val full_naming: local_theory -> NameSpace.naming
- val full_name: local_theory -> Binding.T -> string
+ val full_name: local_theory -> binding -> string
val theory_result: (theory -> 'a * theory) -> local_theory -> 'a * local_theory
val theory: (theory -> theory) -> local_theory -> local_theory
val target_result: (Proof.context -> 'a * Proof.context) -> local_theory -> 'a * local_theory
val target: (Proof.context -> Proof.context) -> local_theory -> local_theory
val affirm: local_theory -> local_theory
val pretty: local_theory -> Pretty.T list
- val abbrev: Syntax.mode -> (Binding.T * mixfix) * term -> local_theory ->
+ val abbrev: Syntax.mode -> (binding * mixfix) * term -> local_theory ->
(term * term) * local_theory
- val define: string -> (Binding.T * mixfix) * (Attrib.binding * term) -> local_theory ->
+ val define: string -> (binding * mixfix) * (Attrib.binding * term) -> local_theory ->
(term * (string * thm)) * local_theory
val note: string -> Attrib.binding * thm list -> local_theory -> (string * thm list) * local_theory
val notes: string -> (Attrib.binding * (thm list * Attrib.src list) list) list ->
@@ -55,10 +55,10 @@
type operations =
{pretty: local_theory -> Pretty.T list,
- abbrev: Syntax.mode -> (Binding.T * mixfix) * term -> local_theory ->
+ abbrev: Syntax.mode -> (binding * mixfix) * term -> local_theory ->
(term * term) * local_theory,
define: string ->
- (Binding.T * mixfix) * (Attrib.binding * term) -> local_theory ->
+ (binding * mixfix) * (Attrib.binding * term) -> local_theory ->
(term * (string * thm)) * local_theory,
notes: string ->
(Attrib.binding * (thm list * Attrib.src list) list) list ->
--- a/src/Pure/Isar/locale.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/locale.ML Wed Jan 28 16:57:12 2009 +0100
@@ -29,23 +29,18 @@
signature LOCALE =
sig
- type locale
-
+ (* Locale specification *)
val register_locale: bstring ->
- (string * sort) list * (Binding.T * typ option * mixfix) list ->
+ (string * sort) list * (binding * typ option * mixfix) list ->
term option * term list ->
thm option * thm option -> thm list ->
(declaration * stamp) list * (declaration * stamp) list ->
((string * (Attrib.binding * (thm list * Attrib.src list) list) list) * stamp) list ->
((string * morphism) * stamp) list -> theory -> theory
-
- (* Locale name space *)
val intern: theory -> xstring -> string
val extern: theory -> string -> xstring
-
- (* Specification *)
val defined: theory -> string -> bool
- val params_of: theory -> string -> (Binding.T * typ option * mixfix) list
+ val params_of: theory -> string -> (binding * typ option * mixfix) list
val intros_of: theory -> string -> thm option * thm option
val axioms_of: theory -> string -> thm list
val instance_of: theory -> string -> morphism -> term list
@@ -112,13 +107,25 @@
datatype ctxt = datatype Element.ctxt;
+fun global_note_qualified kind facts thy = (*FIXME*)
+ thy
+ |> Sign.qualified_names
+ |> PureThy.note_thmss kind facts
+ ||> Sign.restore_naming thy;
+
+fun local_note_qualified kind facts ctxt = (*FIXME*)
+ ctxt
+ |> ProofContext.qualified_names
+ |> ProofContext.note_thmss_i kind facts
+ ||> ProofContext.restore_naming ctxt;
+
(*** Theory data ***)
datatype locale = Loc of {
(** static part **)
- parameters: (string * sort) list * (Binding.T * typ option * mixfix) list,
+ parameters: (string * sort) list * (binding * typ option * mixfix) list,
(* type and term parameters *)
spec: term option * term list,
(* assumptions (as a single predicate expression) and defines *)
@@ -330,7 +337,7 @@
fun init_global_elem (Notes (kind, facts)) thy =
let
val facts' = Attrib.map_facts (Attrib.attribute_i thy) facts
- in Old_Locale.global_note_qualified kind facts' thy |> snd end
+ in global_note_qualified kind facts' thy |> snd end
fun init_local_elem (Fixes fixes) ctxt = ctxt |>
ProofContext.add_fixes_i fixes |> snd
@@ -352,7 +359,7 @@
| init_local_elem (Notes (kind, facts)) ctxt =
let
val facts' = Attrib.map_facts (Attrib.attribute_i (ProofContext.theory_of ctxt)) facts
- in Old_Locale.local_note_qualified kind facts' ctxt |> snd end
+ in local_note_qualified kind facts' ctxt |> snd end
fun cons_elem false (Notes notes) elems = elems
| cons_elem _ elem elems = elem :: elems
@@ -445,7 +452,7 @@
let
val args'' = snd args' |> Element.facts_map (Element.morph_ctxt morph) |>
Attrib.map_facts (Attrib.attribute_i thy)
- in Old_Locale.global_note_qualified kind args'' #> snd end)
+ in global_note_qualified kind args'' #> snd end)
(get_registrations thy |> filter (fn (name, _) => name = loc)) thy))
in ctxt'' end;
@@ -496,12 +503,10 @@
val _ = Context.>> (Context.map_theory
(Method.add_methods
[("intro_locales",
- Method.ctxt_args (fn ctxt => Method.METHOD (intro_locales_tac false ctxt ORELSE'
- Old_Locale.intro_locales_tac false ctxt)),
+ Method.ctxt_args (fn ctxt => Method.METHOD (intro_locales_tac false ctxt)),
"back-chain introduction rules of locales without unfolding predicates"),
("unfold_locales",
- Method.ctxt_args (fn ctxt => Method.METHOD (intro_locales_tac true ctxt ORELSE'
- Old_Locale.intro_locales_tac true ctxt)),
+ Method.ctxt_args (fn ctxt => Method.METHOD (intro_locales_tac true ctxt)),
"back-chain all introduction rules of locales")]));
end;
--- a/src/Pure/Isar/net_rules.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/net_rules.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/net_rules.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Efficient storage of rules: preserves order, prefers later entries.
--- a/src/Pure/Isar/object_logic.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/object_logic.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/object_logic.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Specifics about common object-logics.
--- a/src/Pure/Isar/obtain.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/obtain.ML Wed Jan 28 16:57:12 2009 +0100
@@ -39,16 +39,16 @@
signature OBTAIN =
sig
val thatN: string
- val obtain: string -> (Binding.T * string option * mixfix) list ->
+ val obtain: string -> (binding * string option * mixfix) list ->
(Attrib.binding * (string * string list) list) list ->
bool -> Proof.state -> Proof.state
- val obtain_i: string -> (Binding.T * typ option * mixfix) list ->
- ((Binding.T * attribute list) * (term * term list) list) list ->
+ val obtain_i: string -> (binding * typ option * mixfix) list ->
+ ((binding * attribute list) * (term * term list) list) list ->
bool -> Proof.state -> Proof.state
val result: (Proof.context -> tactic) -> thm list -> Proof.context ->
(cterm list * thm list) * Proof.context
- val guess: (Binding.T * string option * mixfix) list -> bool -> Proof.state -> Proof.state
- val guess_i: (Binding.T * typ option * mixfix) list -> bool -> Proof.state -> Proof.state
+ val guess: (binding * string option * mixfix) list -> bool -> Proof.state -> Proof.state
+ val guess_i: (binding * typ option * mixfix) list -> bool -> Proof.state -> Proof.state
end;
structure Obtain: OBTAIN =
--- a/src/Pure/Isar/old_locale.ML Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2485 +0,0 @@
-(* Title: Pure/Isar/locale.ML
- Author: Clemens Ballarin, TU Muenchen
- Author: Markus Wenzel, LMU/TU Muenchen
-
-Locales -- Isar proof contexts as meta-level predicates, with local
-syntax and implicit structures.
-
-Draws basic ideas from Florian Kammueller's original version of
-locales, but uses the richer infrastructure of Isar instead of the raw
-meta-logic. Furthermore, structured import of contexts (with merge
-and rename operations) are provided, as well as type-inference of the
-signature parts, and predicate definitions of the specification text.
-
-Interpretation enables the reuse of theorems of locales in other
-contexts, namely those defined by theories, structured proofs and
-locales themselves.
-
-See also:
-
-[1] Clemens Ballarin. Locales and Locale Expressions in Isabelle/Isar.
- In Stefano Berardi et al., Types for Proofs and Programs: International
- Workshop, TYPES 2003, Torino, Italy, LNCS 3085, pages 34-50, 2004.
-[2] Clemens Ballarin. Interpretation of Locales in Isabelle: Managing
- Dependencies between Locales. Technical Report TUM-I0607, Technische
- Universitaet Muenchen, 2006.
-[3] Clemens Ballarin. Interpretation of Locales in Isabelle: Theories and
- Proof Contexts. In J.M. Borwein and W.M. Farmer, MKM 2006, LNAI 4108,
- pages 31-43, 2006.
-*)
-
-(* TODO:
-- beta-eta normalisation of interpretation parameters
-- dangling type frees in locales
-- test subsumption of interpretations when merging theories
-*)
-
-signature OLD_LOCALE =
-sig
- datatype expr =
- Locale of string |
- Rename of expr * (string * mixfix option) option list |
- Merge of expr list
- val empty: expr
-
- val intern: theory -> xstring -> string
- val intern_expr: theory -> expr -> expr
- val extern: theory -> string -> xstring
- val init: string -> theory -> Proof.context
-
- (* The specification of a locale *)
- val parameters_of: theory -> string -> ((string * typ) * mixfix) list
- val parameters_of_expr: theory -> expr -> ((string * typ) * mixfix) list
- val local_asms_of: theory -> string -> (Attrib.binding * term list) list
- val global_asms_of: theory -> string -> (Attrib.binding * term list) list
-
- (* Theorems *)
- val intros: theory -> string -> thm list * thm list
- val dests: theory -> string -> thm list
- (* Not part of the official interface. DO NOT USE *)
- val facts_of: theory -> string -> (Attrib.binding * (thm list * Attrib.src list) list) list list
-
- (* Not part of the official interface. DO NOT USE *)
- val declarations_of: theory -> string -> declaration list * declaration list;
-
- (* Processing of locale statements *)
- val read_context_statement: string option -> Element.context list ->
- (string * string list) list list -> Proof.context ->
- string option * Proof.context * Proof.context * (term * term list) list list
- val read_context_statement_cmd: xstring option -> Element.context list ->
- (string * string list) list list -> Proof.context ->
- string option * Proof.context * Proof.context * (term * term list) list list
- val cert_context_statement: string option -> Element.context_i list ->
- (term * term list) list list -> Proof.context ->
- string option * Proof.context * Proof.context * (term * term list) list list
- val read_expr: expr -> Element.context list -> Proof.context ->
- Element.context_i list * Proof.context
- val cert_expr: expr -> Element.context_i list -> Proof.context ->
- Element.context_i list * Proof.context
-
- (* Diagnostic functions *)
- val print_locales: theory -> unit
- val print_locale: theory -> bool -> expr -> Element.context list -> unit
- val print_registrations: bool -> string -> Proof.context -> unit
-
- val add_locale: string -> bstring -> expr -> Element.context_i list -> theory
- -> string * Proof.context
- val add_locale_cmd: bstring -> expr -> Element.context list -> theory
- -> string * Proof.context
-
- (* Tactics *)
- val intro_locales_tac: bool -> Proof.context -> thm list -> tactic
-
- (* Storing results *)
- val global_note_qualified: string ->
- ((Binding.T * attribute list) * (thm list * attribute list) list) list ->
- theory -> (string * thm list) list * theory
- val local_note_qualified: string ->
- ((Binding.T * attribute list) * (thm list * attribute list) list) list ->
- Proof.context -> (string * thm list) list * Proof.context
- val add_thmss: string -> string -> (Attrib.binding * (thm list * Attrib.src list) list) list ->
- Proof.context -> Proof.context
- val add_type_syntax: string -> declaration -> Proof.context -> Proof.context
- val add_term_syntax: string -> declaration -> Proof.context -> Proof.context
- val add_declaration: string -> declaration -> Proof.context -> Proof.context
-
- (* Interpretation *)
- val get_interpret_morph: theory -> (Binding.T -> Binding.T) -> string * string ->
- (Morphism.morphism * ((typ Vartab.table * typ list) * (term Vartab.table * term list))) ->
- string -> term list -> Morphism.morphism
- val interpretation: (Proof.context -> Proof.context) ->
- (Binding.T -> Binding.T) -> expr ->
- term option list * (Attrib.binding * term) list ->
- theory ->
- (Morphism.morphism * ((typ Vartab.table * typ list) * (term Vartab.table * term list))) * Proof.state
- val interpretation_cmd: string -> expr -> string option list * (Attrib.binding * string) list ->
- theory -> Proof.state
- val interpretation_in_locale: (Proof.context -> Proof.context) ->
- xstring * expr -> theory -> Proof.state
- val interpret: (Proof.state -> Proof.state) ->
- (Binding.T -> Binding.T) -> expr ->
- term option list * (Attrib.binding * term) list ->
- bool -> Proof.state ->
- (Morphism.morphism * ((typ Vartab.table * typ list) * (term Vartab.table * term list))) * Proof.state
- val interpret_cmd: string -> expr -> string option list * (Attrib.binding * string) list ->
- bool -> Proof.state -> Proof.state
-end;
-
-structure Old_Locale: OLD_LOCALE =
-struct
-
-(* legacy operations *)
-
-fun merge_lists _ xs [] = xs
- | merge_lists _ [] ys = ys
- | merge_lists eq xs ys = xs @ filter_out (member eq xs) ys;
-
-fun merge_alists eq xs = merge_lists (eq_fst eq) xs;
-
-
-(* auxiliary: noting name bindings with qualified base names *)
-
-fun global_note_qualified kind facts thy =
- thy
- |> Sign.qualified_names
- |> PureThy.note_thmss kind facts
- ||> Sign.restore_naming thy;
-
-fun local_note_qualified kind facts ctxt =
- ctxt
- |> ProofContext.qualified_names
- |> ProofContext.note_thmss_i kind facts
- ||> ProofContext.restore_naming ctxt;
-
-
-(** locale elements and expressions **)
-
-datatype ctxt = datatype Element.ctxt;
-
-datatype expr =
- Locale of string |
- Rename of expr * (string * mixfix option) option list |
- Merge of expr list;
-
-val empty = Merge [];
-
-datatype 'a element =
- Elem of 'a | Expr of expr;
-
-fun map_elem f (Elem e) = Elem (f e)
- | map_elem _ (Expr e) = Expr e;
-
-type decl = declaration * stamp;
-
-type locale =
- {axiom: Element.witness list,
- (* For locales that define predicates this is [A [A]], where A is the locale
- specification. Otherwise [].
- Only required to generate the right witnesses for locales with predicates. *)
- elems: (Element.context_i * stamp) list,
- (* Static content, neither Fixes nor Constrains elements *)
- params: ((string * typ) * mixfix) list, (*all term params*)
- decls: decl list * decl list, (*type/term_syntax declarations*)
- regs: ((string * string list) * Element.witness list) list,
- (* Registrations: indentifiers and witnesses of locales interpreted in the locale. *)
- intros: thm list * thm list,
- (* Introduction rules: of delta predicate and locale predicate. *)
- dests: thm list}
- (* Destruction rules: projections from locale predicate to predicates of fragments. *)
-
-(* CB: an internal (Int) locale element was either imported or included,
- an external (Ext) element appears directly in the locale text. *)
-
-datatype ('a, 'b) int_ext = Int of 'a | Ext of 'b;
-
-
-
-(** substitutions on Vars -- clone from element.ML **)
-
-(* instantiate types *)
-
-fun var_instT_type env =
- if Vartab.is_empty env then I
- else Term.map_type_tvar (fn (x, S) => the_default (TVar (x, S)) (Vartab.lookup env x));
-
-fun var_instT_term env =
- if Vartab.is_empty env then I
- else Term.map_types (var_instT_type env);
-
-fun var_inst_term (envT, env) =
- if Vartab.is_empty env then var_instT_term envT
- else
- let
- val instT = var_instT_type envT;
- fun inst (Const (x, T)) = Const (x, instT T)
- | inst (Free (x, T)) = Free(x, instT T)
- | inst (Var (xi, T)) =
- (case Vartab.lookup env xi of
- NONE => Var (xi, instT T)
- | SOME t => t)
- | inst (b as Bound _) = b
- | inst (Abs (x, T, t)) = Abs (x, instT T, inst t)
- | inst (t $ u) = inst t $ inst u;
- in Envir.beta_norm o inst end;
-
-
-(** management of registrations in theories and proof contexts **)
-
-type registration =
- {prfx: (Binding.T -> Binding.T) * (string * string),
- (* first component: interpretation name morphism;
- second component: parameter prefix *)
- exp: Morphism.morphism,
- (* maps content to its originating context *)
- imp: (typ Vartab.table * typ list) * (term Vartab.table * term list),
- (* inverse of exp *)
- wits: Element.witness list,
- (* witnesses of the registration *)
- eqns: thm Termtab.table,
- (* theorems (equations) interpreting derived concepts and indexed by lhs *)
- morph: unit
- (* interpreting morphism *)
- }
-
-structure Registrations :
- sig
- type T
- val empty: T
- val join: T * T -> T
- val dest: theory -> T ->
- (term list *
- (((Binding.T -> Binding.T) * (string * string)) *
- (Morphism.morphism * ((typ Vartab.table * typ list) * (term Vartab.table * term list))) *
- Element.witness list *
- thm Termtab.table)) list
- val test: theory -> T * term list -> bool
- val lookup: theory ->
- T * (term list * ((typ Vartab.table * typ list) * (term Vartab.table * term list))) ->
- (((Binding.T -> Binding.T) * (string * string)) * Element.witness list * thm Termtab.table) option
- val insert: theory -> term list -> ((Binding.T -> Binding.T) * (string * string)) ->
- (Morphism.morphism * ((typ Vartab.table * typ list) * (term Vartab.table * term list))) ->
- T ->
- T * (term list * (((Binding.T -> Binding.T) * (string * string)) * Element.witness list)) list
- val add_witness: term list -> Element.witness -> T -> T
- val add_equation: term list -> thm -> T -> T
-(*
- val update_morph: term list -> Morphism.morphism -> T -> T
- val get_morph: theory -> T ->
- term list * ((typ Vartab.table * typ list) * (term Vartab.table * term list)) ->
- Morphism.morphism
-*)
- end =
-struct
- (* A registration is indexed by parameter instantiation.
- NB: index is exported whereas content is internalised. *)
- type T = registration Termtab.table;
-
- fun mk_reg prfx exp imp wits eqns morph =
- {prfx = prfx, exp = exp, imp = imp, wits = wits, eqns = eqns, morph = morph};
-
- fun map_reg f reg =
- let
- val {prfx, exp, imp, wits, eqns, morph} = reg;
- val (prfx', exp', imp', wits', eqns', morph') = f (prfx, exp, imp, wits, eqns, morph);
- in mk_reg prfx' exp' imp' wits' eqns' morph' end;
-
- val empty = Termtab.empty;
-
- (* term list represented as single term, for simultaneous matching *)
- fun termify ts =
- Term.list_comb (Const ("", map fastype_of ts ---> propT), ts);
- fun untermify t =
- let fun ut (Const _) ts = ts
- | ut (s $ t) ts = ut s (t::ts)
- in ut t [] end;
-
- (* joining of registrations:
- - prefix and morphisms of right theory;
- - witnesses are equal, no attempt to subsumption testing;
- - union of equalities, if conflicting (i.e. two eqns with equal lhs)
- eqn of right theory takes precedence *)
- fun join (r1, r2) = Termtab.join (fn _ => fn ({eqns = e1, ...}, {prfx = n, exp, imp, wits = w, eqns = e2, morph = m}) =>
- mk_reg n exp imp w (Termtab.join (fn _ => fn (_, e) => e) (e1, e2)) m) (r1, r2);
-
- fun dest_transfer thy regs =
- Termtab.dest regs |> map (apsnd (map_reg (fn (n, e, i, ws, es, m) =>
- (n, e, i, map (Element.transfer_witness thy) ws, Termtab.map (transfer thy) es, m))));
-
- fun dest thy regs = dest_transfer thy regs |> map (apfst untermify) |>
- map (apsnd (fn {prfx, exp, imp, wits, eqns, ...} => (prfx, (exp, imp), wits, eqns)));
-
- (* registrations that subsume t *)
- fun subsumers thy t regs =
- filter (fn (t', _) => Pattern.matches thy (t', t)) (dest_transfer thy regs);
-
- (* test if registration that subsumes the query is present *)
- fun test thy (regs, ts) =
- not (null (subsumers thy (termify ts) regs));
-
- (* look up registration, pick one that subsumes the query *)
- fun lookup thy (regs, (ts, ((impT, _), (imp, _)))) =
- let
- val t = termify ts;
- val subs = subsumers thy t regs;
- in
- (case subs of
- [] => NONE
- | ((t', {prfx, exp = exp', imp = ((impT', domT'), (imp', dom')), wits, eqns, morph}) :: _) =>
- let
- val (tinst, inst) = Pattern.match thy (t', t) (Vartab.empty, Vartab.empty);
- val tinst' = domT' |> map (fn (T as TFree (x, _)) =>
- (x, T |> Morphism.typ exp' |> Envir.typ_subst_TVars tinst
- |> var_instT_type impT)) |> Symtab.make;
- val inst' = dom' |> map (fn (t as Free (x, _)) =>
- (x, t |> Morphism.term exp' |> Envir.subst_vars (tinst, inst)
- |> var_inst_term (impT, imp))) |> Symtab.make;
- val inst'_morph = Element.inst_morphism thy (tinst', inst');
- in SOME (prfx,
- map (Element.morph_witness inst'_morph) wits,
- Termtab.map (Morphism.thm inst'_morph) eqns)
- end)
- end;
-
- (* add registration if not subsumed by ones already present,
- additionally returns registrations that are strictly subsumed *)
- fun insert thy ts prfx (exp, imp) regs =
- let
- val t = termify ts;
- val subs = subsumers thy t regs ;
- in (case subs of
- [] => let
- val sups =
- filter (fn (t', _) => Pattern.matches thy (t, t')) (dest_transfer thy regs);
- val sups' = map (apfst untermify) sups |> map (fn (ts, {prfx, wits, ...}) => (ts, (prfx, wits)))
- in (Termtab.update (t, mk_reg prfx exp imp [] Termtab.empty ()) regs, sups') end
- | _ => (regs, []))
- end;
-
- fun gen_add f ts regs =
- let
- val t = termify ts;
- in
- Termtab.update (t, map_reg f (the (Termtab.lookup regs t))) regs
- end;
-
- (* add witness theorem to registration,
- only if instantiation is exact, otherwise exception Option raised *)
- fun add_witness ts wit regs =
- gen_add (fn (x, e, i, wits, eqns, m) => (x, e, i, Element.close_witness wit :: wits, eqns, m))
- ts regs;
-
- (* add equation to registration, replaces previous equation with same lhs;
- only if instantiation is exact, otherwise exception Option raised;
- exception TERM raised if not a meta equality *)
- fun add_equation ts thm regs =
- gen_add (fn (x, e, i, thms, eqns, m) =>
- (x, e, i, thms, Termtab.update (thm |> prop_of |> Logic.dest_equals |> fst, Thm.close_derivation thm) eqns, m))
- ts regs;
-
-end;
-
-
-(** theory data : locales **)
-
-structure LocalesData = TheoryDataFun
-(
- type T = NameSpace.T * locale Symtab.table;
- (* 1st entry: locale namespace,
- 2nd entry: locales of the theory *)
-
- val empty = NameSpace.empty_table;
- val copy = I;
- val extend = I;
-
- fun join_locales _
- ({axiom, elems, params, decls = (decls1, decls2), regs, intros, dests}: locale,
- {elems = elems', decls = (decls1', decls2'), regs = regs', ...}: locale) =
- {axiom = axiom,
- elems = merge_lists (eq_snd (op =)) elems elems',
- params = params,
- decls =
- (Library.merge (eq_snd (op =)) (decls1, decls1'),
- Library.merge (eq_snd (op =)) (decls2, decls2')),
- regs = merge_alists (op =) regs regs',
- intros = intros,
- dests = dests};
- fun merge _ = NameSpace.join_tables join_locales;
-);
-
-
-
-(** context data : registrations **)
-
-structure RegistrationsData = GenericDataFun
-(
- type T = Registrations.T Symtab.table; (*registrations, indexed by locale name*)
- val empty = Symtab.empty;
- val extend = I;
- fun merge _ = Symtab.join (K Registrations.join);
-);
-
-
-(** access locales **)
-
-val intern = NameSpace.intern o #1 o LocalesData.get;
-val extern = NameSpace.extern o #1 o LocalesData.get;
-
-fun get_locale thy name = Symtab.lookup (#2 (LocalesData.get thy)) name;
-
-fun the_locale thy name = case get_locale thy name
- of SOME loc => loc
- | NONE => error ("Unknown locale " ^ quote name);
-
-fun register_locale bname loc thy =
- thy |> LocalesData.map (NameSpace.bind (Sign.naming_of thy)
- (Binding.name bname, loc) #> snd);
-
-fun change_locale name f thy =
- let
- val {axiom, elems, params, decls, regs, intros, dests} =
- the_locale thy name;
- val (axiom', elems', params', decls', regs', intros', dests') =
- f (axiom, elems, params, decls, regs, intros, dests);
- in
- thy
- |> (LocalesData.map o apsnd) (Symtab.update (name, {axiom = axiom',
- elems = elems', params = params',
- decls = decls', regs = regs', intros = intros', dests = dests'}))
- end;
-
-fun print_locales thy =
- let val (space, locs) = LocalesData.get thy in
- Pretty.strs ("locales:" :: map #1 (NameSpace.extern_table (space, locs)))
- |> Pretty.writeln
- end;
-
-
-(* access registrations *)
-
-(* retrieve registration from theory or context *)
-
-fun get_registrations ctxt name =
- case Symtab.lookup (RegistrationsData.get ctxt) name of
- NONE => []
- | SOME reg => Registrations.dest (Context.theory_of ctxt) reg;
-
-fun get_global_registrations thy = get_registrations (Context.Theory thy);
-fun get_local_registrations ctxt = get_registrations (Context.Proof ctxt);
-
-
-fun get_registration ctxt imprt (name, ps) =
- case Symtab.lookup (RegistrationsData.get ctxt) name of
- NONE => NONE
- | SOME reg => Registrations.lookup (Context.theory_of ctxt) (reg, (ps, imprt));
-
-fun get_global_registration thy = get_registration (Context.Theory thy);
-fun get_local_registration ctxt = get_registration (Context.Proof ctxt);
-
-
-fun test_registration ctxt (name, ps) =
- case Symtab.lookup (RegistrationsData.get ctxt) name of
- NONE => false
- | SOME reg => Registrations.test (Context.theory_of ctxt) (reg, ps);
-
-fun test_global_registration thy = test_registration (Context.Theory thy);
-fun test_local_registration ctxt = test_registration (Context.Proof ctxt);
-
-
-(* add registration to theory or context, ignored if subsumed *)
-
-fun put_registration (name, ps) prfx morphs ctxt =
- RegistrationsData.map (fn regs =>
- let
- val thy = Context.theory_of ctxt;
- val reg = the_default Registrations.empty (Symtab.lookup regs name);
- val (reg', sups) = Registrations.insert thy ps prfx morphs reg;
- val _ = if not (null sups) then warning
- ("Subsumed interpretation(s) of locale " ^
- quote (extern thy name) ^
- "\nwith the following prefix(es):" ^
- commas_quote (map (fn (_, ((_, (_, s)), _)) => s) sups))
- else ();
- in Symtab.update (name, reg') regs end) ctxt;
-
-fun put_global_registration id prfx morphs =
- Context.theory_map (put_registration id prfx morphs);
-fun put_local_registration id prfx morphs =
- Context.proof_map (put_registration id prfx morphs);
-
-fun put_registration_in_locale name id =
- change_locale name (fn (axiom, elems, params, decls, regs, intros, dests) =>
- (axiom, elems, params, decls, regs @ [(id, [])], intros, dests));
-
-
-(* add witness theorem to registration, ignored if registration not present *)
-
-fun add_witness (name, ps) thm =
- RegistrationsData.map (Symtab.map_entry name (Registrations.add_witness ps thm));
-
-fun add_global_witness id thm = Context.theory_map (add_witness id thm);
-fun add_local_witness id thm = Context.proof_map (add_witness id thm);
-
-
-fun add_witness_in_locale name id thm =
- change_locale name (fn (axiom, elems, params, decls, regs, intros, dests) =>
- let
- fun add (id', thms) =
- if id = id' then (id', thm :: thms) else (id', thms);
- in (axiom, elems, params, decls, map add regs, intros, dests) end);
-
-
-(* add equation to registration, ignored if registration not present *)
-
-fun add_equation (name, ps) thm =
- RegistrationsData.map (Symtab.map_entry name (Registrations.add_equation ps thm));
-
-fun add_global_equation id thm = Context.theory_map (add_equation id thm);
-fun add_local_equation id thm = Context.proof_map (add_equation id thm);
-
-(*
-(* update morphism of registration, ignored if registration not present *)
-
-fun update_morph (name, ps) morph =
- RegistrationsData.map (Symtab.map_entry name (Registrations.update_morph ps morph));
-
-fun update_global_morph id morph = Context.theory_map (update_morph id morph);
-fun update_local_morph id morph = Context.proof_map (update_morph id morph);
-*)
-
-
-(* printing of registrations *)
-
-fun print_registrations show_wits loc ctxt =
- let
- val thy = ProofContext.theory_of ctxt;
- val prt_term = Pretty.quote o Syntax.pretty_term ctxt;
- fun prt_term' t = if !show_types
- then Pretty.block [prt_term t, Pretty.brk 1, Pretty.str "::",
- Pretty.brk 1, (Pretty.quote o Syntax.pretty_typ ctxt) (type_of t)]
- else prt_term t;
- val prt_thm = prt_term o prop_of;
- fun prt_inst ts =
- Pretty.enclose "(" ")" (Pretty.breaks (map prt_term' ts));
- fun prt_prfx ((false, prfx), param_prfx) = [Pretty.str prfx, Pretty.brk 1, Pretty.str "(optional)", Pretty.brk 1, Pretty.str param_prfx]
- | prt_prfx ((true, prfx), param_prfx) = [Pretty.str prfx, Pretty.brk 1, Pretty.str param_prfx];
- fun prt_eqns [] = Pretty.str "no equations."
- | prt_eqns eqns = Pretty.block (Pretty.str "equations:" :: Pretty.brk 1 ::
- Pretty.breaks (map prt_thm eqns));
- fun prt_core ts eqns =
- [prt_inst ts, Pretty.fbrk, prt_eqns (Termtab.dest eqns |> map snd)];
- fun prt_witns [] = Pretty.str "no witnesses."
- | prt_witns witns = Pretty.block (Pretty.str "witnesses:" :: Pretty.brk 1 ::
- Pretty.breaks (map (Element.pretty_witness ctxt) witns))
- fun prt_reg (ts, (_, _, witns, eqns)) =
- if show_wits
- then Pretty.block (prt_core ts eqns @ [Pretty.fbrk, prt_witns witns])
- else Pretty.block (prt_core ts eqns)
-
- val loc_int = intern thy loc;
- val regs = RegistrationsData.get (Context.Proof ctxt);
- val loc_regs = Symtab.lookup regs loc_int;
- in
- (case loc_regs of
- NONE => Pretty.str ("no interpretations")
- | SOME r => let
- val r' = Registrations.dest thy r;
- val r'' = Library.sort_wrt (fn (_, ((_, (_, prfx)), _, _, _)) => prfx) r';
- in Pretty.big_list ("interpretations:") (map prt_reg r'') end)
- |> Pretty.writeln
- end;
-
-
-(* diagnostics *)
-
-fun err_in_locale ctxt msg ids =
- let
- val thy = ProofContext.theory_of ctxt;
- fun prt_id (name, parms) =
- [Pretty.block (Pretty.breaks (map Pretty.str (extern thy name :: parms)))];
- val prt_ids = flat (separate [Pretty.str " +", Pretty.brk 1] (map prt_id ids));
- val err_msg =
- if forall (fn (s, _) => s = "") ids then msg
- else msg ^ "\n" ^ Pretty.string_of (Pretty.block
- (Pretty.str "The error(s) above occurred in locale:" :: Pretty.brk 1 :: prt_ids));
- in error err_msg end;
-
-fun err_in_locale' ctxt msg ids' = err_in_locale ctxt msg (map fst ids');
-
-
-fun pretty_ren NONE = Pretty.str "_"
- | pretty_ren (SOME (x, NONE)) = Pretty.str x
- | pretty_ren (SOME (x, SOME syn)) =
- Pretty.block [Pretty.str x, Pretty.brk 1, Syntax.pretty_mixfix syn];
-
-fun pretty_expr thy (Locale name) = Pretty.str (extern thy name)
- | pretty_expr thy (Rename (expr, xs)) =
- Pretty.block [pretty_expr thy expr, Pretty.brk 1, Pretty.block (map pretty_ren xs |> Pretty.breaks)]
- | pretty_expr thy (Merge es) =
- Pretty.separate "+" (map (pretty_expr thy) es) |> Pretty.block;
-
-fun err_in_expr _ msg (Merge []) = error msg
- | err_in_expr ctxt msg expr =
- error (msg ^ "\n" ^ Pretty.string_of (Pretty.block
- [Pretty.str "The error(s) above occured in locale expression:", Pretty.brk 1,
- pretty_expr (ProofContext.theory_of ctxt) expr]));
-
-
-(** structured contexts: rename + merge + implicit type instantiation **)
-
-(* parameter types *)
-
-fun frozen_tvars ctxt Ts =
- #1 (Variable.importT_inst (map Logic.mk_type Ts) ctxt)
- |> map (fn ((xi, S), T) => (xi, (S, T)));
-
-fun unify_frozen ctxt maxidx Ts Us =
- let
- fun paramify NONE i = (NONE, i)
- | paramify (SOME T) i = apfst SOME (TypeInfer.paramify_dummies T i);
-
- val (Ts', maxidx') = fold_map paramify Ts maxidx;
- val (Us', maxidx'') = fold_map paramify Us maxidx';
- val thy = ProofContext.theory_of ctxt;
-
- fun unify (SOME T, SOME U) env = (Sign.typ_unify thy (U, T) env
- handle Type.TUNIFY => raise TYPE ("unify_frozen: failed to unify types", [U, T], []))
- | unify _ env = env;
- val (unifier, _) = fold unify (Ts' ~~ Us') (Vartab.empty, maxidx'');
- val Vs = map (Option.map (Envir.norm_type unifier)) Us';
- val unifier' = fold Vartab.update_new (frozen_tvars ctxt (map_filter I Vs)) unifier;
- in map (Option.map (Envir.norm_type unifier')) Vs end;
-
-fun params_of elemss =
- distinct (eq_fst (op = : string * string -> bool)) (maps (snd o fst) elemss);
-
-fun params_of' elemss =
- distinct (eq_fst (op = : string * string -> bool)) (maps (snd o fst o fst) elemss);
-
-fun param_prefix locale_name params = (NameSpace.base locale_name ^ "_locale", space_implode "_" params);
-
-
-(* CB: param_types has the following type:
- ('a * 'b option) list -> ('a * 'b) list *)
-fun param_types ps = map_filter (fn (_, NONE) => NONE | (x, SOME T) => SOME (x, T)) ps;
-
-
-fun merge_syntax ctxt ids ss = Symtab.merge (op = : mixfix * mixfix -> bool) ss
- handle Symtab.DUP x => err_in_locale ctxt
- ("Conflicting syntax for parameter: " ^ quote x) (map fst ids);
-
-
-(* Distinction of assumed vs. derived identifiers.
- The former may have axioms relating assumptions of the context to
- assumptions of the specification fragment (for locales with
- predicates). The latter have witnesses relating assumptions of the
- specification fragment to assumptions of other (assumed) specification
- fragments. *)
-
-datatype 'a mode = Assumed of 'a | Derived of 'a;
-
-fun map_mode f (Assumed x) = Assumed (f x)
- | map_mode f (Derived x) = Derived (f x);
-
-
-(* flatten expressions *)
-
-local
-
-fun unify_parms ctxt fixed_parms raw_parmss =
- let
- val thy = ProofContext.theory_of ctxt;
- val maxidx = length raw_parmss;
- val idx_parmss = (0 upto maxidx - 1) ~~ raw_parmss;
-
- fun varify i = Term.map_type_tfree (fn (a, S) => TVar ((a, i), S));
- fun varify_parms (i, ps) = map (apsnd (varify i)) (param_types ps);
- val parms = fixed_parms @ maps varify_parms idx_parmss;
-
- fun unify T U envir = Sign.typ_unify thy (U, T) envir
- handle Type.TUNIFY =>
- let
- val T' = Envir.norm_type (fst envir) T;
- val U' = Envir.norm_type (fst envir) U;
- val prt = Syntax.string_of_typ ctxt;
- in
- raise TYPE ("unify_parms: failed to unify types " ^
- prt U' ^ " and " ^ prt T', [U', T'], [])
- end;
- fun unify_list (T :: Us) = fold (unify T) Us
- | unify_list [] = I;
- val (unifier, _) = fold unify_list (map #2 (Symtab.dest (Symtab.make_list parms)))
- (Vartab.empty, maxidx);
-
- val parms' = map (apsnd (Envir.norm_type unifier)) (distinct (eq_fst (op =)) parms);
- val unifier' = fold Vartab.update_new (frozen_tvars ctxt (map #2 parms')) unifier;
-
- fun inst_parms (i, ps) =
- List.foldr OldTerm.add_typ_tfrees [] (map_filter snd ps)
- |> map_filter (fn (a, S) =>
- let val T = Envir.norm_type unifier' (TVar ((a, i), S))
- in if T = TFree (a, S) then NONE else SOME (a, T) end)
- |> Symtab.make;
- in map inst_parms idx_parmss end;
-
-in
-
-fun unify_elemss _ _ [] = []
- | unify_elemss _ [] [elems] = [elems]
- | unify_elemss ctxt fixed_parms elemss =
- let
- val thy = ProofContext.theory_of ctxt;
- val phis = unify_parms ctxt fixed_parms (map (snd o fst o fst) elemss)
- |> map (Element.instT_morphism thy);
- fun inst ((((name, ps), mode), elems), phi) =
- (((name, map (apsnd (Option.map (Morphism.typ phi))) ps),
- map_mode (map (Element.morph_witness phi)) mode),
- map (Element.morph_ctxt phi) elems);
- in map inst (elemss ~~ phis) end;
-
-
-fun renaming xs parms = zip_options parms xs
- handle Library.UnequalLengths =>
- error ("Too many arguments in renaming: " ^
- commas (map (fn NONE => "_" | SOME x => quote (fst x)) xs));
-
-
-(* params_of_expr:
- Compute parameters (with types and syntax) of locale expression.
-*)
-
-fun params_of_expr ctxt fixed_params expr (prev_parms, prev_types, prev_syn) =
- let
- val thy = ProofContext.theory_of ctxt;
-
- fun merge_tenvs fixed tenv1 tenv2 =
- let
- val [env1, env2] = unify_parms ctxt fixed
- [tenv1 |> Symtab.dest |> map (apsnd SOME),
- tenv2 |> Symtab.dest |> map (apsnd SOME)]
- in
- Symtab.merge (op =) (Symtab.map (Element.instT_type env1) tenv1,
- Symtab.map (Element.instT_type env2) tenv2)
- end;
-
- fun merge_syn expr syn1 syn2 =
- Symtab.merge (op = : mixfix * mixfix -> bool) (syn1, syn2)
- handle Symtab.DUP x => err_in_expr ctxt
- ("Conflicting syntax for parameter: " ^ quote x) expr;
-
- fun params_of (expr as Locale name) =
- let
- val {params, ...} = the_locale thy name;
- in (map (fst o fst) params, params |> map fst |> Symtab.make,
- params |> map (apfst fst) |> Symtab.make) end
- | params_of (expr as Rename (e, xs)) =
- let
- val (parms', types', syn') = params_of e;
- val ren = renaming xs parms';
- (* renaming may reduce number of parameters *)
- val new_parms = map (Element.rename ren) parms' |> distinct (op =);
- val ren_syn = syn' |> Symtab.dest |> map (Element.rename_var_name ren);
- val new_syn = fold (Symtab.insert (op =)) ren_syn Symtab.empty
- handle Symtab.DUP x =>
- err_in_expr ctxt ("Conflicting syntax for parameter: " ^ quote x) expr;
- val syn_types = map (apsnd (fn mx =>
- SOME (Type.freeze_type (#1 (TypeInfer.paramify_dummies (Syntax.mixfixT mx) 0)))))
- (Symtab.dest new_syn);
- val ren_types = types' |> Symtab.dest |> map (apfst (Element.rename ren));
- val (env :: _) = unify_parms ctxt []
- ((ren_types |> map (apsnd SOME)) :: map single syn_types);
- val new_types = fold (Symtab.insert (op =))
- (map (apsnd (Element.instT_type env)) ren_types) Symtab.empty;
- in (new_parms, new_types, new_syn) end
- | params_of (Merge es) =
- fold (fn e => fn (parms, types, syn) =>
- let
- val (parms', types', syn') = params_of e
- in
- (merge_lists (op =) parms parms', merge_tenvs [] types types',
- merge_syn e syn syn')
- end)
- es ([], Symtab.empty, Symtab.empty)
-
- val (parms, types, syn) = params_of expr;
- in
- (merge_lists (op =) prev_parms parms, merge_tenvs fixed_params prev_types types,
- merge_syn expr prev_syn syn)
- end;
-
-fun make_params_ids params = [(("", params), ([], Assumed []))];
-fun make_raw_params_elemss (params, tenv, syn) =
- [((("", map (fn p => (p, Symtab.lookup tenv p)) params), Assumed []),
- Int [Fixes (map (fn p =>
- (Binding.name p, Symtab.lookup tenv p, Symtab.lookup syn p |> the)) params)])];
-
-
-(* flatten_expr:
- Extend list of identifiers by those new in locale expression expr.
- Compute corresponding list of lists of locale elements (one entry per
- identifier).
-
- Identifiers represent locale fragments and are in an extended form:
- ((name, ps), (ax_ps, axs))
- (name, ps) is the locale name with all its parameters.
- (ax_ps, axs) is the locale axioms with its parameters;
- axs are always taken from the top level of the locale hierarchy,
- hence axioms may contain additional parameters from later fragments:
- ps subset of ax_ps. axs is either singleton or empty.
-
- Elements are enriched by identifier-like information:
- (((name, ax_ps), axs), elems)
- The parameters in ax_ps are the axiom parameters, but enriched by type
- info: now each entry is a pair of string and typ option. Axioms are
- type-instantiated.
-
-*)
-
-fun flatten_expr ctxt ((prev_idents, prev_syntax), expr) =
- let
- val thy = ProofContext.theory_of ctxt;
-
- fun rename_parms top ren ((name, ps), (parms, mode)) =
- ((name, map (Element.rename ren) ps),
- if top
- then (map (Element.rename ren) parms,
- map_mode (map (Element.morph_witness (Element.rename_morphism ren))) mode)
- else (parms, mode));
-
- (* add (name, pTs) and its registrations, recursively; adjust hyps of witnesses *)
-
- fun add_with_regs ((name, pTs), mode) (wits, ids, visited) =
- if member (fn (a, (b, _)) => a = b) visited (name, map #1 pTs)
- then (wits, ids, visited)
- else
- let
- val {params, regs, ...} = the_locale thy name;
- val pTs' = map #1 params;
- val ren = map #1 pTs' ~~ map (fn (x, _) => (x, NONE)) pTs;
- (* dummy syntax, since required by rename *)
- val pTs'' = map (fn ((p, _), (_, T)) => (p, T)) (pTs ~~ pTs');
- val [env] = unify_parms ctxt pTs [map (apsnd SOME) pTs''];
- (* propagate parameter types, to keep them consistent *)
- val regs' = map (fn ((name, ps), wits) =>
- ((name, map (Element.rename ren) ps),
- map (Element.transfer_witness thy) wits)) regs;
- val new_regs = regs';
- val new_ids = map fst new_regs;
- val new_idTs =
- map (apsnd (map (fn p => (p, (the o AList.lookup (op =) pTs) p)))) new_ids;
-
- val new_wits = new_regs |> map (#2 #> map
- (Element.morph_witness
- (Element.instT_morphism thy env $>
- Element.rename_morphism ren $>
- Element.satisfy_morphism wits)
- #> Element.close_witness));
- val new_ids' = map (fn (id, wits) =>
- (id, ([], Derived wits))) (new_ids ~~ new_wits);
- val new_idTs' = map (fn ((n, pTs), (_, ([], mode))) =>
- ((n, pTs), mode)) (new_idTs ~~ new_ids');
- val new_id = ((name, map #1 pTs), ([], mode));
- val (wits', ids', visited') = fold add_with_regs new_idTs'
- (wits @ flat new_wits, ids, visited @ [new_id]);
- in
- (wits', ids' @ [new_id], visited')
- end;
-
- (* distribute top-level axioms over assumed ids *)
-
- fun axiomify all_ps ((name, parms), (_, Assumed _)) axioms =
- let
- val {elems, ...} = the_locale thy name;
- val ts = maps
- (fn (Assumes asms, _) => maps (map #1 o #2) asms
- | _ => [])
- elems;
- val (axs1, axs2) = chop (length ts) axioms;
- in (((name, parms), (all_ps, Assumed axs1)), axs2) end
- | axiomify all_ps (id, (_, Derived ths)) axioms =
- ((id, (all_ps, Derived ths)), axioms);
-
- (* identifiers of an expression *)
-
- fun identify top (Locale name) =
- (* CB: ids_ax is a list of tuples of the form ((name, ps), axs),
- where name is a locale name, ps a list of parameter names and axs
- a list of axioms relating to the identifier, axs is empty unless
- identify at top level (top = true);
- parms is accumulated list of parameters *)
- let
- val {axiom, params, ...} = the_locale thy name;
- val ps = map (#1 o #1) params;
- val (_, ids'', _) = add_with_regs ((name, map #1 params), Assumed []) ([], [], []);
- val ids_ax = if top then fst (fold_map (axiomify ps) ids'' axiom) else ids'';
- in (ids_ax, ps) end
- | identify top (Rename (e, xs)) =
- let
- val (ids', parms') = identify top e;
- val ren = renaming xs parms'
- handle ERROR msg => err_in_locale' ctxt msg ids';
-
- val ids'' = distinct (eq_fst (op =)) (map (rename_parms top ren) ids');
- val parms'' = distinct (op =) (maps (#2 o #1) ids'');
- in (ids'', parms'') end
- | identify top (Merge es) =
- fold (fn e => fn (ids, parms) =>
- let
- val (ids', parms') = identify top e
- in
- (merge_alists (op =) ids ids', merge_lists (op =) parms parms')
- end)
- es ([], []);
-
- fun inst_wit all_params (t, th) = let
- val {hyps, prop, ...} = Thm.rep_thm th;
- val ps = map (apsnd SOME) (fold Term.add_frees (prop :: hyps) []);
- val [env] = unify_parms ctxt all_params [ps];
- val t' = Element.instT_term env t;
- val th' = Element.instT_thm thy env th;
- in (t', th') end;
-
- fun eval all_params tenv syn ((name, params), (locale_params, mode)) =
- let
- val {params = ps_mx, elems = elems_stamped, ...} = the_locale thy name;
- val elems = map fst elems_stamped;
- val ps = map fst ps_mx;
- fun lookup_syn x = (case Symtab.lookup syn x of SOME Structure => NONE | opt => opt);
- val locale_params' = map (fn p => (p, Symtab.lookup tenv p |> the)) locale_params;
- val mode' = map_mode (map (Element.map_witness (inst_wit all_params))) mode;
- val ren = map fst ps ~~ map (fn p => (p, lookup_syn p)) params;
- val [env] = unify_parms ctxt all_params [map (apfst (Element.rename ren) o apsnd SOME) ps];
- val (lprfx, pprfx) = param_prefix name params;
- val add_prefices = pprfx <> "" ? Binding.add_prefix false pprfx
- #> Binding.add_prefix false lprfx;
- val elem_morphism =
- Element.rename_morphism ren $>
- Morphism.binding_morphism add_prefices $>
- Element.instT_morphism thy env;
- val elems' = map (Element.morph_ctxt elem_morphism) elems;
- in (((name, map (apsnd SOME) locale_params'), mode'), elems') end;
-
- (* parameters, their types and syntax *)
- val (all_params', tenv, syn) = params_of_expr ctxt [] expr ([], Symtab.empty, Symtab.empty);
- val all_params = map (fn p => (p, Symtab.lookup tenv p |> the)) all_params';
- (* compute identifiers and syntax, merge with previous ones *)
- val (ids, _) = identify true expr;
- val idents = subtract (eq_fst (op =)) prev_idents ids;
- val syntax = merge_syntax ctxt ids (syn, prev_syntax);
- (* type-instantiate elements *)
- val final_elemss = map (eval all_params tenv syntax) idents;
- in ((prev_idents @ idents, syntax), final_elemss) end;
-
-end;
-
-
-(* activate elements *)
-
-local
-
-fun axioms_export axs _ As =
- (Element.satisfy_thm axs #> Drule.implies_intr_list (Library.drop (length axs, As)), fn t => t);
-
-
-(* NB: derived ids contain only facts at this stage *)
-
-fun activate_elem _ _ (Fixes fixes) (ctxt, mode) =
- ([], (ctxt |> ProofContext.add_fixes_i fixes |> snd, mode))
- | activate_elem _ _ (Constrains _) (ctxt, mode) =
- ([], (ctxt, mode))
- | activate_elem ax_in_ctxt _ (Assumes asms) (ctxt, Assumed axs) =
- let
- val asms' = Attrib.map_specs (Attrib.attribute_i (ProofContext.theory_of ctxt)) asms;
- val ts = maps (map #1 o #2) asms';
- val (ps, qs) = chop (length ts) axs;
- val (_, ctxt') =
- ctxt |> fold Variable.auto_fixes ts
- |> ProofContext.add_assms_i (axioms_export (if ax_in_ctxt then ps else [])) asms';
- in ([], (ctxt', Assumed qs)) end
- | activate_elem _ _ (Assumes asms) (ctxt, Derived ths) =
- ([], (ctxt, Derived ths))
- | activate_elem _ _ (Defines defs) (ctxt, Assumed axs) =
- let
- val defs' = Attrib.map_specs (Attrib.attribute_i (ProofContext.theory_of ctxt)) defs;
- val asms = defs' |> map (fn ((name, atts), (t, ps)) =>
- let val ((c, _), t') = LocalDefs.cert_def ctxt t
- in (t', ((Binding.map_base (Thm.def_name_optional c) name, atts), [(t', ps)])) end);
- val (_, ctxt') =
- ctxt |> fold (Variable.auto_fixes o #1) asms
- |> ProofContext.add_assms_i LocalDefs.def_export (map #2 asms);
- in ([], (ctxt', Assumed axs)) end
- | activate_elem _ _ (Defines defs) (ctxt, Derived ths) =
- ([], (ctxt, Derived ths))
- | activate_elem _ is_ext (Notes (kind, facts)) (ctxt, mode) =
- let
- val facts' = Attrib.map_facts (Attrib.attribute_i (ProofContext.theory_of ctxt)) facts;
- val (res, ctxt') = ctxt |> local_note_qualified kind facts';
- in (if is_ext then (map (#1 o #1) facts' ~~ map #2 res) else [], (ctxt', mode)) end;
-
-fun activate_elems ax_in_ctxt (((name, ps), mode), elems) ctxt =
- let
- val thy = ProofContext.theory_of ctxt;
- val (res, (ctxt', _)) = fold_map (activate_elem ax_in_ctxt (name = ""))
- elems (ProofContext.qualified_names ctxt, mode)
- handle ERROR msg => err_in_locale ctxt msg [(name, map fst ps)];
- val ctxt'' = if name = "" then ctxt'
- else let
- val ps' = map (fn (n, SOME T) => Free (n, T)) ps;
- in if test_local_registration ctxt' (name, ps') then ctxt'
- else let
- val ctxt'' = put_local_registration (name, ps') (I, (NameSpace.base name, ""))
- (Morphism.identity, ((Vartab.empty, []), (Vartab.empty, []) )) ctxt'
- in case mode of
- Assumed axs =>
- fold (add_local_witness (name, ps') o
- Element.assume_witness thy o Element.witness_prop) axs ctxt''
- | Derived ths =>
- fold (add_local_witness (name, ps')) ths ctxt''
- end
- end
- in (ProofContext.restore_naming ctxt ctxt'', res) end;
-
-fun activate_elemss ax_in_ctxt prep_facts =
- fold_map (fn (((name, ps), mode), raw_elems) => fn ctxt =>
- let
- val elems = map (prep_facts ctxt) raw_elems;
- val (ctxt', res) = apsnd flat
- (activate_elems ax_in_ctxt (((name, ps), mode), elems) ctxt);
- val elems' = elems |> map (Element.map_ctxt_attrib Args.closure);
- in (((((name, ps), mode), elems'), res), ctxt') end);
-
-in
-
-(* CB: activate_facts prep_facts elemss ctxt,
- where elemss is a list of pairs consisting of identifiers and
- context elements, extends ctxt by the context elements yielding
- ctxt' and returns ((elemss', facts), ctxt').
- Identifiers in the argument are of the form ((name, ps), axs) and
- assumptions use the axioms in the identifiers to set up exporters
- in ctxt'. elemss' does not contain identifiers and is obtained
- from elemss and the intermediate context with prep_facts.
- If read_facts or cert_facts is used for prep_facts, these also remove
- the internal/external markers from elemss. *)
-
-fun activate_facts ax_in_ctxt prep_facts args =
- activate_elemss ax_in_ctxt prep_facts args
- #>> (apsnd flat o split_list);
-
-end;
-
-
-
-(** prepare locale elements **)
-
-(* expressions *)
-
-fun intern_expr thy (Locale xname) = Locale (intern thy xname)
- | intern_expr thy (Merge exprs) = Merge (map (intern_expr thy) exprs)
- | intern_expr thy (Rename (expr, xs)) = Rename (intern_expr thy expr, xs);
-
-
-(* propositions and bindings *)
-
-(* flatten (ctxt, prep_expr) ((ids, syn), expr)
- normalises expr (which is either a locale
- expression or a single context element) wrt.
- to the list ids of already accumulated identifiers.
- It returns ((ids', syn'), elemss) where ids' is an extension of ids
- with identifiers generated for expr, and elemss is the list of
- context elements generated from expr.
- syn and syn' are symtabs mapping parameter names to their syntax. syn'
- is an extension of syn.
- For details, see flatten_expr.
-
- Additionally, for a locale expression, the elems are grouped into a single
- Int; individual context elements are marked Ext. In this case, the
- identifier-like information of the element is as follows:
- - for Fixes: (("", ps), []) where the ps have type info NONE
- - for other elements: (("", []), []).
- The implementation of activate_facts relies on identifier names being
- empty strings for external elements.
-*)
-
-fun flatten (ctxt, _) ((ids, syn), Elem (Fixes fixes)) = let
- val ids' = ids @ [(("", map (Binding.base_name o #1) fixes), ([], Assumed []))]
- in
- ((ids',
- merge_syntax ctxt ids'
- (syn, Symtab.make (map (fn fx => (Binding.base_name (#1 fx), #3 fx)) fixes))
- handle Symtab.DUP x => err_in_locale ctxt
- ("Conflicting syntax for parameter: " ^ quote x)
- (map #1 ids')),
- [((("", map (rpair NONE o Binding.base_name o #1) fixes), Assumed []), Ext (Fixes fixes))])
- end
- | flatten _ ((ids, syn), Elem elem) =
- ((ids @ [(("", []), ([], Assumed []))], syn), [((("", []), Assumed []), Ext elem)])
- | flatten (ctxt, prep_expr) ((ids, syn), Expr expr) =
- apsnd (map (apsnd Int)) (flatten_expr ctxt ((ids, syn), prep_expr expr));
-
-local
-
-local
-
-fun declare_int_elem (Fixes fixes) ctxt =
- ([], ctxt |> ProofContext.add_fixes_i (map (fn (x, T, mx) =>
- (x, Option.map (Term.map_type_tfree (TypeInfer.param 0)) T, mx)) fixes) |> snd)
- | declare_int_elem _ ctxt = ([], ctxt);
-
-fun declare_ext_elem prep_vars (Fixes fixes) ctxt =
- let val (vars, _) = prep_vars fixes ctxt
- in ([], ctxt |> ProofContext.add_fixes_i vars |> snd) end
- | declare_ext_elem prep_vars (Constrains csts) ctxt =
- let val (_, ctxt') = prep_vars (map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) csts) ctxt
- in ([], ctxt') end
- | declare_ext_elem _ (Assumes asms) ctxt = (map #2 asms, ctxt)
- | declare_ext_elem _ (Defines defs) ctxt = (map (fn (_, (t, ps)) => [(t, ps)]) defs, ctxt)
- | declare_ext_elem _ (Notes _) ctxt = ([], ctxt);
-
-fun declare_elems prep_vars (((name, ps), Assumed _), elems) ctxt = ((case elems
- of Int es => fold_map declare_int_elem es ctxt
- | Ext e => declare_ext_elem prep_vars e ctxt |>> single)
- handle ERROR msg => err_in_locale ctxt msg [(name, map fst ps)])
- | declare_elems _ ((_, Derived _), elems) ctxt = ([], ctxt);
-
-in
-
-fun declare_elemss prep_vars fixed_params raw_elemss ctxt =
- let
- (* CB: fix of type bug of goal in target with context elements.
- Parameters new in context elements must receive types that are
- distinct from types of parameters in target (fixed_params). *)
- val ctxt_with_fixed =
- fold Variable.declare_term (map Free fixed_params) ctxt;
- val int_elemss =
- raw_elemss
- |> map_filter (fn (id, Int es) => SOME (id, es) | _ => NONE)
- |> unify_elemss ctxt_with_fixed fixed_params;
- val (raw_elemss', _) =
- fold_map (curry (fn ((id, Int _), (_, es) :: elemss) => ((id, Int es), elemss) | x => x))
- raw_elemss int_elemss;
- in fold_map (declare_elems prep_vars) raw_elemss' ctxt end;
-
-end;
-
-local
-
-val norm_term = Envir.beta_norm oo Term.subst_atomic;
-
-fun abstract_thm thy eq =
- Thm.assume (Thm.cterm_of thy eq) |> Drule.gen_all |> Drule.abs_def;
-
-fun bind_def ctxt (name, ps) eq (xs, env, ths) =
- let
- val ((y, T), b) = LocalDefs.abs_def eq;
- val b' = norm_term env b;
- val th = abstract_thm (ProofContext.theory_of ctxt) eq;
- fun err msg = err_in_locale ctxt (msg ^ ": " ^ quote y) [(name, map fst ps)];
- in
- exists (fn (x, _) => x = y) xs andalso
- err "Attempt to define previously specified variable";
- exists (fn (Free (y', _), _) => y = y' | _ => false) env andalso
- err "Attempt to redefine variable";
- (Term.add_frees b' xs, (Free (y, T), b') :: env, th :: ths)
- end;
-
-
-(* CB: for finish_elems (Int and Ext),
- extracts specification, only of assumed elements *)
-
-fun eval_text _ _ _ (Fixes _) text = text
- | eval_text _ _ _ (Constrains _) text = text
- | eval_text _ (_, Assumed _) is_ext (Assumes asms)
- (((exts, exts'), (ints, ints')), (xs, env, defs)) =
- let
- val ts = maps (map #1 o #2) asms;
- val ts' = map (norm_term env) ts;
- val spec' =
- if is_ext then ((exts @ ts, exts' @ ts'), (ints, ints'))
- else ((exts, exts'), (ints @ ts, ints' @ ts'));
- in (spec', (fold Term.add_frees ts' xs, env, defs)) end
- | eval_text _ (_, Derived _) _ (Assumes _) text = text
- | eval_text ctxt (id, Assumed _) _ (Defines defs) (spec, binds) =
- (spec, fold (bind_def ctxt id o #1 o #2) defs binds)
- | eval_text _ (_, Derived _) _ (Defines _) text = text
- | eval_text _ _ _ (Notes _) text = text;
-
-
-(* for finish_elems (Int),
- remove redundant elements of derived identifiers,
- turn assumptions and definitions into facts,
- satisfy hypotheses of facts *)
-
-fun finish_derived _ _ (Assumed _) (Fixes fixes) = SOME (Fixes fixes)
- | finish_derived _ _ (Assumed _) (Constrains csts) = SOME (Constrains csts)
- | finish_derived _ _ (Assumed _) (Assumes asms) = SOME (Assumes asms)
- | finish_derived _ _ (Assumed _) (Defines defs) = SOME (Defines defs)
-
- | finish_derived _ _ (Derived _) (Fixes _) = NONE
- | finish_derived _ _ (Derived _) (Constrains _) = NONE
- | finish_derived sign satisfy (Derived _) (Assumes asms) = asms
- |> map (apsnd (map (fn (a, _) => ([Thm.assume (cterm_of sign a)], []))))
- |> pair Thm.assumptionK |> Notes
- |> Element.morph_ctxt satisfy |> SOME
- | finish_derived sign satisfy (Derived _) (Defines defs) = defs
- |> map (apsnd (fn (d, _) => [([Thm.assume (cterm_of sign d)], [])]))
- |> pair Thm.definitionK |> Notes
- |> Element.morph_ctxt satisfy |> SOME
-
- | finish_derived _ satisfy _ (Notes facts) = Notes facts
- |> Element.morph_ctxt satisfy |> SOME;
-
-(* CB: for finish_elems (Ext) *)
-
-fun closeup _ false elem = elem
- | closeup ctxt true elem =
- let
- fun close_frees t =
- let
- val rev_frees =
- Term.fold_aterms (fn Free (x, T) =>
- if Variable.is_fixed ctxt x then I else insert (op =) (x, T) | _ => I) t [];
- in Term.list_all_free (rev rev_frees, t) end;
-
- fun no_binds [] = []
- | no_binds _ = error "Illegal term bindings in locale element";
- in
- (case elem of
- Assumes asms => Assumes (asms |> map (fn (a, propps) =>
- (a, map (fn (t, ps) => (close_frees t, no_binds ps)) propps)))
- | Defines defs => Defines (defs |> map (fn (a, (t, ps)) =>
- (a, (close_frees (#2 (LocalDefs.cert_def ctxt t)), no_binds ps))))
- | e => e)
- end;
-
-
-fun finish_ext_elem parms _ (Fixes fixes, _) = Fixes (map (fn (b, _, mx) =>
- let val x = Binding.base_name b
- in (b, AList.lookup (op =) parms x, mx) end) fixes)
- | finish_ext_elem parms _ (Constrains _, _) = Constrains []
- | finish_ext_elem _ close (Assumes asms, propp) =
- close (Assumes (map #1 asms ~~ propp))
- | finish_ext_elem _ close (Defines defs, propp) =
- close (Defines (map #1 defs ~~ map (fn [(t, ps)] => (t, ps)) propp))
- | finish_ext_elem _ _ (Notes facts, _) = Notes facts;
-
-
-(* CB: finish_parms introduces type info from parms to identifiers *)
-(* CB: only needed for types that have been NONE so far???
- If so, which are these??? *)
-
-fun finish_parms parms (((name, ps), mode), elems) =
- (((name, map (fn (x, _) => (x, AList.lookup (op = : string * string -> bool) parms x)) ps), mode), elems);
-
-fun finish_elems ctxt parms _ ((text, wits), ((id, Int e), _)) =
- let
- val [(id' as (_, mode), es)] = unify_elemss ctxt parms [(id, e)];
- val wits' = case mode of Assumed _ => wits | Derived ths => wits @ ths;
- val text' = fold (eval_text ctxt id' false) es text;
- val es' = map_filter
- (finish_derived (ProofContext.theory_of ctxt) (Element.satisfy_morphism wits') mode) es;
- in ((text', wits'), (id', map Int es')) end
- | finish_elems ctxt parms do_close ((text, wits), ((id, Ext e), [propp])) =
- let
- val e' = finish_ext_elem parms (closeup ctxt do_close) (e, propp);
- val text' = eval_text ctxt id true e' text;
- in ((text', wits), (id, [Ext e'])) end
-
-in
-
-(* CB: only called by prep_elemss *)
-
-fun finish_elemss ctxt parms do_close =
- foldl_map (apsnd (finish_parms parms) o finish_elems ctxt parms do_close);
-
-end;
-
-
-(* Remove duplicate Defines elements: temporary workaround to fix Afp/Category. *)
-
-fun defs_ord (defs1, defs2) =
- list_ord (fn ((_, (d1, _)), (_, (d2, _))) =>
- TermOrd.fast_term_ord (d1, d2)) (defs1, defs2);
-structure Defstab =
- TableFun(type key = (Attrib.binding * (term * term list)) list val ord = defs_ord);
-
-fun rem_dup_defs es ds =
- fold_map (fn e as (Defines defs) => (fn ds =>
- if Defstab.defined ds defs
- then (Defines [], ds)
- else (e, Defstab.update (defs, ()) ds))
- | e => (fn ds => (e, ds))) es ds;
-fun rem_dup_elemss (Int es) ds = apfst Int (rem_dup_defs es ds)
- | rem_dup_elemss (Ext e) ds = (Ext e, ds);
-fun rem_dup_defines raw_elemss =
- fold_map (fn (id as (_, (Assumed _)), es) => (fn ds =>
- apfst (pair id) (rem_dup_elemss es ds))
- | (id as (_, (Derived _)), es) => (fn ds =>
- ((id, es), ds))) raw_elemss Defstab.empty |> #1;
-
-(* CB: type inference and consistency checks for locales.
-
- Works by building a context (through declare_elemss), extracting the
- required information and adjusting the context elements (finish_elemss).
- Can also universally close free vars in assms and defs. This is only
- needed for Ext elements and controlled by parameter do_close.
-
- Only elements of assumed identifiers are considered.
-*)
-
-fun prep_elemss prep_vars prepp do_close context fixed_params raw_elemss raw_concl =
- let
- (* CB: contexts computed in the course of this function are discarded.
- They are used for type inference and consistency checks only. *)
- (* CB: fixed_params are the parameters (with types) of the target locale,
- empty list if there is no target. *)
- (* CB: raw_elemss are list of pairs consisting of identifiers and
- context elements, the latter marked as internal or external. *)
- val raw_elemss = rem_dup_defines raw_elemss;
- val (raw_proppss, raw_ctxt) = declare_elemss prep_vars fixed_params raw_elemss context;
- (* CB: raw_ctxt is context with additional fixed variables derived from
- the fixes elements in raw_elemss,
- raw_proppss contains assumptions and definitions from the
- external elements in raw_elemss. *)
- fun prep_prop raw_propp (raw_ctxt, raw_concl) =
- let
- (* CB: add type information from fixed_params to context (declare_term) *)
- (* CB: process patterns (conclusion and external elements only) *)
- val (ctxt, all_propp) =
- prepp (fold Variable.declare_term (map Free fixed_params) raw_ctxt, raw_concl @ raw_propp);
- (* CB: add type information from conclusion and external elements to context *)
- val ctxt = fold Variable.declare_term (maps (map fst) all_propp) ctxt;
- (* CB: resolve schematic variables (patterns) in conclusion and external elements. *)
- val all_propp' = map2 (curry (op ~~))
- (#1 (#2 (ProofContext.bind_propp_schematic_i (ctxt, all_propp)))) (map (map snd) all_propp);
- val (concl, propp) = chop (length raw_concl) all_propp';
- in (propp, (ctxt, concl)) end
-
- val (proppss, (ctxt, concl)) =
- (fold_burrow o fold_burrow) prep_prop raw_proppss (raw_ctxt, raw_concl);
-
- (* CB: obtain all parameters from identifier part of raw_elemss *)
- val xs = map #1 (params_of' raw_elemss);
- val typing = unify_frozen ctxt 0
- (map (Variable.default_type raw_ctxt) xs)
- (map (Variable.default_type ctxt) xs);
- val parms = param_types (xs ~~ typing);
- (* CB: parms are the parameters from raw_elemss, with correct typing. *)
-
- (* CB: extract information from assumes and defines elements
- (fixes, constrains and notes in raw_elemss don't have an effect on
- text and elemss), compute final form of context elements. *)
- val ((text, _), elemss) = finish_elemss ctxt parms do_close
- ((((([], []), ([], [])), ([], [], [])), []), raw_elemss ~~ proppss);
- (* CB: text has the following structure:
- (((exts, exts'), (ints, ints')), (xs, env, defs))
- where
- exts: external assumptions (terms in external assumes elements)
- exts': dito, normalised wrt. env
- ints: internal assumptions (terms in internal assumes elements)
- ints': dito, normalised wrt. env
- xs: the free variables in exts' and ints' and rhss of definitions,
- this includes parameters except defined parameters
- env: list of term pairs encoding substitutions, where the first term
- is a free variable; substitutions represent defines elements and
- the rhs is normalised wrt. the previous env
- defs: theorems representing the substitutions from defines elements
- (thms are normalised wrt. env).
- elemss is an updated version of raw_elemss:
- - type info added to Fixes and modified in Constrains
- - axiom and definition statement replaced by corresponding one
- from proppss in Assumes and Defines
- - Facts unchanged
- *)
- in ((parms, elemss, concl), text) end;
-
-in
-
-fun read_elemss x = prep_elemss ProofContext.read_vars ProofContext.read_propp_schematic x;
-fun cert_elemss x = prep_elemss ProofContext.cert_vars ProofContext.cert_propp_schematic x;
-
-end;
-
-
-(* facts and attributes *)
-
-local
-
-fun check_name name =
- if NameSpace.is_qualified name then error ("Illegal qualified name: " ^ quote name)
- else name;
-
-fun prep_facts _ _ _ ctxt (Int elem) = elem
- |> Element.morph_ctxt (Morphism.thm_morphism (Thm.transfer (ProofContext.theory_of ctxt)))
- | prep_facts prep_name get intern ctxt (Ext elem) = elem |> Element.map_ctxt
- {var = I, typ = I, term = I,
- binding = Binding.map_base prep_name,
- fact = get ctxt,
- attrib = Args.assignable o intern (ProofContext.theory_of ctxt)};
-
-in
-
-fun read_facts x = prep_facts check_name ProofContext.get_fact Attrib.intern_src x;
-fun cert_facts x = prep_facts I (K I) (K I) x;
-
-end;
-
-
-(* Get the specification of a locale *)
-
-(*The global specification is made from the parameters and global
- assumptions, the local specification from the parameters and the
- local assumptions.*)
-
-local
-
-fun gen_asms_of get thy name =
- let
- val ctxt = ProofContext.init thy;
- val (_, raw_elemss) = flatten (ctxt, I) (([], Symtab.empty), Expr (Locale name));
- val ((_, elemss, _), _) = read_elemss false ctxt [] raw_elemss [];
- in
- elemss |> get
- |> maps (fn (_, es) => map (fn Int e => e) es)
- |> maps (fn Assumes asms => asms | _ => [])
- |> map (apsnd (map fst))
- end;
-
-in
-
-fun parameters_of thy = #params o the_locale thy;
-
-fun intros thy = #intros o the_locale thy;
- (*returns introduction rule for delta predicate and locale predicate
- as a pair of singleton lists*)
-
-fun dests thy = #dests o the_locale thy;
-
-fun facts_of thy = map_filter (fn (Element.Notes (_, facts), _) => SOME facts
- | _ => NONE) o #elems o the_locale thy;
-
-fun parameters_of_expr thy expr =
- let
- val ctxt = ProofContext.init thy;
- val pts = params_of_expr ctxt [] (intern_expr thy expr)
- ([], Symtab.empty, Symtab.empty);
- val raw_params_elemss = make_raw_params_elemss pts;
- val ((_, syn), raw_elemss) = flatten (ctxt, intern_expr thy)
- (([], Symtab.empty), Expr expr);
- val ((parms, _, _), _) =
- read_elemss false ctxt [] (raw_params_elemss @ raw_elemss) [];
- in map (fn p as (n, _) => (p, Symtab.lookup syn n |> the)) parms end;
-
-fun local_asms_of thy name =
- gen_asms_of (single o Library.last_elem) thy name;
-
-fun global_asms_of thy name =
- gen_asms_of I thy name;
-
-end;
-
-
-(* full context statements: imports + elements + conclusion *)
-
-local
-
-fun prep_context_statement prep_expr prep_elemss prep_facts
- do_close fixed_params imports elements raw_concl context =
- let
- val thy = ProofContext.theory_of context;
-
- val (import_params, import_tenv, import_syn) =
- params_of_expr context fixed_params (prep_expr thy imports)
- ([], Symtab.empty, Symtab.empty);
- val includes = map_filter (fn Expr e => SOME e | Elem _ => NONE) elements;
- val (incl_params, incl_tenv, incl_syn) = fold (params_of_expr context fixed_params)
- (map (prep_expr thy) includes) (import_params, import_tenv, import_syn);
-
- val ((import_ids, _), raw_import_elemss) =
- flatten (context, prep_expr thy) (([], Symtab.empty), Expr imports);
- (* CB: normalise "includes" among elements *)
- val ((ids, syn), raw_elemsss) = foldl_map (flatten (context, prep_expr thy))
- ((import_ids, incl_syn), elements);
-
- val raw_elemss = flat raw_elemsss;
- (* CB: raw_import_elemss @ raw_elemss is the normalised list of
- context elements obtained from import and elements. *)
- (* Now additional elements for parameters are inserted. *)
- val import_params_ids = make_params_ids import_params;
- val incl_params_ids =
- make_params_ids (incl_params \\ import_params);
- val raw_import_params_elemss =
- make_raw_params_elemss (import_params, incl_tenv, incl_syn);
- val raw_incl_params_elemss =
- make_raw_params_elemss (incl_params \\ import_params, incl_tenv, incl_syn);
- val ((parms, all_elemss, concl), (spec, (_, _, defs))) = prep_elemss do_close
- context fixed_params
- (raw_import_params_elemss @ raw_import_elemss @ raw_incl_params_elemss @ raw_elemss) raw_concl;
-
- (* replace extended ids (for axioms) by ids *)
- val (import_ids', incl_ids) = chop (length import_ids) ids;
- val all_ids = import_params_ids @ import_ids' @ incl_params_ids @ incl_ids;
- val all_elemss' = map (fn (((_, ps), _), (((n, ps'), mode), elems)) =>
- (((n, map (fn p => (p, (the o AList.lookup (op =) ps') p)) ps), mode), elems))
- (all_ids ~~ all_elemss);
- (* CB: all_elemss and parms contain the correct parameter types *)
-
- val (ps, qs) = chop (length raw_import_params_elemss + length raw_import_elemss) all_elemss';
- val ((import_elemss, _), import_ctxt) =
- activate_facts false prep_facts ps context;
-
- val ((elemss, _), ctxt) =
- activate_facts false prep_facts qs (ProofContext.set_stmt true import_ctxt);
- in
- ((((import_ctxt, import_elemss), (ctxt, elemss, syn)),
- (parms, spec, defs)), concl)
- end;
-
-fun prep_statement prep_locale prep_ctxt raw_locale elems concl ctxt =
- let
- val thy = ProofContext.theory_of ctxt;
- val locale = Option.map (prep_locale thy) raw_locale;
- val (fixed_params, imports) =
- (case locale of
- NONE => ([], empty)
- | SOME name =>
- let val {params = ps, ...} = the_locale thy name
- in (map fst ps, Locale name) end);
- val ((((locale_ctxt, _), (elems_ctxt, _, _)), _), concl') =
- prep_ctxt false fixed_params imports (map Elem elems) concl ctxt;
- in (locale, locale_ctxt, elems_ctxt, concl') end;
-
-fun prep_expr prep imports body ctxt =
- let
- val (((_, import_elemss), (ctxt', elemss, _)), _) = prep imports body ctxt;
- val all_elems = maps snd (import_elemss @ elemss);
- in (all_elems, ctxt') end;
-
-in
-
-val read_ctxt = prep_context_statement intern_expr read_elemss read_facts;
-val cert_ctxt = prep_context_statement (K I) cert_elemss cert_facts;
-
-fun read_context imports body ctxt = #1 (read_ctxt true [] imports (map Elem body) [] ctxt);
-fun cert_context imports body ctxt = #1 (cert_ctxt true [] imports (map Elem body) [] ctxt);
-
-val read_expr = prep_expr read_context;
-val cert_expr = prep_expr cert_context;
-
-fun read_context_statement loc = prep_statement (K I) read_ctxt loc;
-fun read_context_statement_cmd loc = prep_statement intern read_ctxt loc;
-fun cert_context_statement loc = prep_statement (K I) cert_ctxt loc;
-
-end;
-
-
-(* init *)
-
-fun init loc =
- ProofContext.init
- #> #2 o cert_context_statement (SOME loc) [] [];
-
-
-(* print locale *)
-
-fun print_locale thy show_facts imports body =
- let val (all_elems, ctxt) = read_expr imports body (ProofContext.init thy) in
- Pretty.big_list "locale elements:" (all_elems
- |> (if show_facts then I else filter (fn Notes _ => false | _ => true))
- |> map (Element.pretty_ctxt ctxt) |> filter_out null
- |> map Pretty.chunks)
- |> Pretty.writeln
- end;
-
-
-
-(** store results **)
-
-(* join equations of an id with already accumulated ones *)
-
-fun join_eqns get_reg id eqns =
- let
- val eqns' = case get_reg id
- of NONE => eqns
- | SOME (_, _, eqns') => Termtab.join (fn _ => fn (_, e) => e) (eqns, eqns')
- (* prefer equations from eqns' *)
- in ((id, eqns'), eqns') end;
-
-
-(* collect witnesses and equations up to a particular target for a
- registration; requires parameters and flattened list of identifiers
- instead of recomputing it from the target *)
-
-fun collect_witnesses ctxt (imprt as ((impT, _), (imp, _))) parms ids ext_ts = let
-
- val thy = ProofContext.theory_of ctxt;
-
- val ts = map (var_inst_term (impT, imp)) ext_ts;
- val (parms, parmTs) = split_list parms;
- val parmvTs = map Logic.varifyT parmTs;
- val vtinst = fold (Sign.typ_match thy) (parmvTs ~~ map Term.fastype_of ts) Vartab.empty;
- val tinst = Vartab.dest vtinst |> map (fn ((x, 0), (_, T)) => (x, T))
- |> Symtab.make;
- val inst = Symtab.make (parms ~~ ts);
-
- (* instantiate parameter names in ids *)
- val ext_inst = Symtab.make (parms ~~ ext_ts);
- fun ext_inst_names ps = map (the o Symtab.lookup ext_inst) ps;
- val inst_ids = map (apfst (apsnd ext_inst_names)) ids;
- val assumed_ids = map_filter (fn (id, (_, Assumed _)) => SOME id | _ => NONE) inst_ids;
- val wits = maps (#2 o the o get_local_registration ctxt imprt) assumed_ids;
- val eqns =
- fold_map (join_eqns (get_local_registration ctxt imprt))
- (map fst inst_ids) Termtab.empty |> snd |> Termtab.dest |> map snd;
- in ((tinst, inst), wits, eqns) end;
-
-
-(* compute and apply morphism *)
-
-fun name_morph phi_name (lprfx, pprfx) b =
- b
- |> (if not (Binding.is_empty b) andalso pprfx <> ""
- then Binding.add_prefix false pprfx else I)
- |> (if not (Binding.is_empty b)
- then Binding.add_prefix false lprfx else I)
- |> phi_name;
-
-fun inst_morph thy phi_name param_prfx insts prems eqns export =
- let
- (* standardise export morphism *)
- val exp_fact = Drule.zero_var_indexes_list o map Thm.strip_shyps o Morphism.fact export;
- val exp_term = TermSubst.zero_var_indexes o Morphism.term export;
- (* FIXME sync with exp_fact *)
- val exp_typ = Logic.type_map exp_term;
- val export' =
- Morphism.morphism {binding = I, var = I, typ = exp_typ, term = exp_term, fact = exp_fact};
- in
- Morphism.binding_morphism (name_morph phi_name param_prfx) $>
- Element.inst_morphism thy insts $>
- Element.satisfy_morphism prems $>
- Morphism.term_morphism (MetaSimplifier.rewrite_term thy eqns []) $>
- Morphism.thm_morphism (MetaSimplifier.rewrite_rule eqns) $>
- export'
- end;
-
-fun activate_note thy phi_name param_prfx attrib insts prems eqns exp =
- (Element.facts_map o Element.morph_ctxt)
- (inst_morph thy phi_name param_prfx insts prems eqns exp)
- #> Attrib.map_facts attrib;
-
-
-(* public interface to interpretation morphism *)
-
-fun get_interpret_morph thy phi_name param_prfx (exp, imp) target ext_ts =
- let
- val parms = the_locale thy target |> #params |> map fst;
- val ids = flatten (ProofContext.init thy, intern_expr thy)
- (([], Symtab.empty), Expr (Locale target)) |> fst |> fst;
- val (insts, prems, eqns) = collect_witnesses (ProofContext.init thy) imp parms ids ext_ts;
- in
- inst_morph thy phi_name param_prfx insts prems eqns exp
- end;
-
-(* store instantiations of args for all registered interpretations
- of the theory *)
-
-fun note_thmss_registrations target (kind, args) thy =
- let
- val parms = the_locale thy target |> #params |> map fst;
- val ids = flatten (ProofContext.init thy, intern_expr thy)
- (([], Symtab.empty), Expr (Locale target)) |> fst |> fst;
-
- val regs = get_global_registrations thy target;
- (* add args to thy for all registrations *)
-
- fun activate (ext_ts, ((phi_name, param_prfx), (exp, imp), _, _)) thy =
- let
- val (insts, prems, eqns) = collect_witnesses (ProofContext.init thy) imp parms ids ext_ts;
- val args' = args
- |> activate_note thy phi_name param_prfx
- (Attrib.attribute_i thy) insts prems eqns exp;
- in
- thy
- |> global_note_qualified kind args'
- |> snd
- end;
- in fold activate regs thy end;
-
-
-(* locale results *)
-
-fun add_thmss loc kind args ctxt =
- let
- val (([(_, [Notes args'])], _), ctxt') =
- activate_facts true cert_facts
- [((("", []), Assumed []), [Ext (Notes (kind, args))])] ctxt;
- val ctxt'' = ctxt' |> ProofContext.theory
- (change_locale loc
- (fn (axiom, elems, params, decls, regs, intros, dests) =>
- (axiom, elems @ [(Notes args', stamp ())],
- params, decls, regs, intros, dests))
- #> note_thmss_registrations loc args');
- in ctxt'' end;
-
-
-(* declarations *)
-
-local
-
-fun decl_attrib decl phi = Thm.declaration_attribute (K (decl phi));
-
-fun add_decls add loc decl =
- ProofContext.theory (change_locale loc
- (fn (axiom, elems, params, decls, regs, intros, dests) =>
- (axiom, elems, params, add (decl, stamp ()) decls, regs, intros, dests))) #>
- add_thmss loc Thm.internalK
- [((Binding.empty, [Attrib.internal (decl_attrib decl)]), [([Drule.dummy_thm], [])])];
-
-in
-
-val add_type_syntax = add_decls (apfst o cons);
-val add_term_syntax = add_decls (apsnd o cons);
-val add_declaration = add_decls (K I);
-
-fun declarations_of thy loc =
- the_locale thy loc |> #decls |> apfst (map fst) |> apsnd (map fst);
-
-end;
-
-
-
-(** define locales **)
-
-(* predicate text *)
-(* CB: generate locale predicates and delta predicates *)
-
-local
-
-(* introN: name of theorems for introduction rules of locale and
- delta predicates;
- axiomsN: name of theorem set with destruct rules for locale predicates,
- also name suffix of delta predicates. *)
-
-val introN = "intro";
-val axiomsN = "axioms";
-
-fun atomize_spec thy ts =
- let
- val t = Logic.mk_conjunction_balanced ts;
- val body = ObjectLogic.atomize_term thy t;
- val bodyT = Term.fastype_of body;
- in
- if bodyT = propT then (t, propT, Thm.reflexive (Thm.cterm_of thy t))
- else (body, bodyT, ObjectLogic.atomize (Thm.cterm_of thy t))
- end;
-
-fun aprop_tr' n c = (Syntax.constN ^ c, fn ctxt => fn args =>
- if length args = n then
- Syntax.const "_aprop" $
- Term.list_comb (Syntax.free (Consts.extern (ProofContext.consts_of ctxt) c), args)
- else raise Match);
-
-(* CB: define one predicate including its intro rule and axioms
- - bname: predicate name
- - parms: locale parameters
- - defs: thms representing substitutions from defines elements
- - ts: terms representing locale assumptions (not normalised wrt. defs)
- - norm_ts: terms representing locale assumptions (normalised wrt. defs)
- - thy: the theory
-*)
-
-fun def_pred bname parms defs ts norm_ts thy =
- let
- val name = Sign.full_bname thy bname;
-
- val (body, bodyT, body_eq) = atomize_spec thy norm_ts;
- val env = Term.add_free_names body [];
- val xs = filter (member (op =) env o #1) parms;
- val Ts = map #2 xs;
- val extraTs =
- (Term.add_tfrees body [] \\ fold Term.add_tfreesT Ts [])
- |> Library.sort_wrt #1 |> map TFree;
- val predT = map Term.itselfT extraTs ---> Ts ---> bodyT;
-
- val args = map Logic.mk_type extraTs @ map Free xs;
- val head = Term.list_comb (Const (name, predT), args);
- val statement = ObjectLogic.ensure_propT thy head;
-
- val ([pred_def], defs_thy) =
- thy
- |> bodyT = propT ? Sign.add_advanced_trfuns ([], [], [aprop_tr' (length args) name], [])
- |> Sign.declare_const [] ((Binding.name bname, predT), NoSyn) |> snd
- |> PureThy.add_defs false
- [((Thm.def_name bname, Logic.mk_equals (head, body)), [Thm.kind_internal])];
- val defs_ctxt = ProofContext.init defs_thy |> Variable.declare_term head;
-
- val cert = Thm.cterm_of defs_thy;
-
- val intro = Goal.prove_global defs_thy [] norm_ts statement (fn _ =>
- MetaSimplifier.rewrite_goals_tac [pred_def] THEN
- Tactic.compose_tac (false, body_eq RS Drule.equal_elim_rule1, 1) 1 THEN
- Tactic.compose_tac (false,
- Conjunction.intr_balanced (map (Thm.assume o cert) norm_ts), 0) 1);
-
- val conjuncts =
- (Drule.equal_elim_rule2 OF [body_eq,
- MetaSimplifier.rewrite_rule [pred_def] (Thm.assume (cert statement))])
- |> Conjunction.elim_balanced (length ts);
- val axioms = ts ~~ conjuncts |> map (fn (t, ax) =>
- Element.prove_witness defs_ctxt t
- (MetaSimplifier.rewrite_goals_tac defs THEN
- Tactic.compose_tac (false, ax, 0) 1));
- in ((statement, intro, axioms), defs_thy) end;
-
-fun assumes_to_notes (Assumes asms) axms =
- fold_map (fn (a, spec) => fn axs =>
- let val (ps, qs) = chop (length spec) axs
- in ((a, [(ps, [])]), qs) end) asms axms
- |> apfst (curry Notes Thm.assumptionK)
- | assumes_to_notes e axms = (e, axms);
-
-(* CB: the following two change only "new" elems, these have identifier ("", _). *)
-
-(* turn Assumes into Notes elements *)
-
-fun change_assumes_elemss axioms elemss =
- let
- val satisfy = Element.morph_ctxt (Element.satisfy_morphism axioms);
- fun change (id as ("", _), es) =
- fold_map assumes_to_notes (map satisfy es)
- #-> (fn es' => pair (id, es'))
- | change e = pair e;
- in
- fst (fold_map change elemss (map Element.conclude_witness axioms))
- end;
-
-(* adjust hyps of Notes elements *)
-
-fun change_elemss_hyps axioms elemss =
- let
- val satisfy = Element.morph_ctxt (Element.satisfy_morphism axioms);
- fun change (id as ("", _), es) = (id, map (fn e as Notes _ => satisfy e | e => e) es)
- | change e = e;
- in map change elemss end;
-
-in
-
-(* CB: main predicate definition function *)
-
-fun define_preds pname (parms, ((exts, exts'), (ints, ints')), defs) elemss thy =
- let
- val ((elemss', more_ts), a_elem, a_intro, thy'') =
- if null exts then ((elemss, []), [], [], thy)
- else
- let
- val aname = if null ints then pname else pname ^ "_" ^ axiomsN;
- val ((statement, intro, axioms), thy') =
- thy
- |> def_pred aname parms defs exts exts';
- val elemss' = change_assumes_elemss axioms elemss;
- val a_elem = [(("", []),
- [Assumes [((Binding.name (pname ^ "_" ^ axiomsN), []), [(statement, [])])]])];
- val (_, thy'') =
- thy'
- |> Sign.add_path aname
- |> Sign.no_base_names
- |> PureThy.note_thmss Thm.internalK [((Binding.name introN, []), [([intro], [])])]
- ||> Sign.restore_naming thy';
- in ((elemss', [statement]), a_elem, [intro], thy'') end;
- val (predicate, stmt', elemss'', b_intro, thy'''') =
- if null ints then (([], []), more_ts, elemss' @ a_elem, [], thy'')
- else
- let
- val ((statement, intro, axioms), thy''') =
- thy''
- |> def_pred pname parms defs (ints @ more_ts) (ints' @ more_ts);
- val cstatement = Thm.cterm_of thy''' statement;
- val elemss'' = change_elemss_hyps axioms elemss';
- val b_elem = [(("", []),
- [Assumes [((Binding.name (pname ^ "_" ^ axiomsN), []), [(statement, [])])]])];
- val (_, thy'''') =
- thy'''
- |> Sign.add_path pname
- |> Sign.no_base_names
- |> PureThy.note_thmss Thm.internalK
- [((Binding.name introN, []), [([intro], [])]),
- ((Binding.name axiomsN, []),
- [(map (Drule.standard o Element.conclude_witness) axioms, [])])]
- ||> Sign.restore_naming thy''';
- in (([cstatement], axioms), [statement], elemss'' @ b_elem, [intro], thy'''') end;
- in (((elemss'', predicate, stmt'), (a_intro, b_intro)), thy'''') end;
-
-end;
-
-
-(* add_locale(_i) *)
-
-local
-
-(* turn Defines into Notes elements, accumulate definition terms *)
-
-fun defines_to_notes is_ext thy (Defines defs) defns =
- let
- val defs' = map (fn (_, (def, _)) => (Attrib.empty_binding, (def, []))) defs
- val notes = map (fn (a, (def, _)) =>
- (a, [([assume (cterm_of thy def)], [])])) defs
- in
- (if is_ext then SOME (Notes (Thm.definitionK, notes)) else NONE, defns @ [Defines defs'])
- end
- | defines_to_notes _ _ e defns = (SOME e, defns);
-
-fun change_defines_elemss thy elemss defns =
- let
- fun change (id as (n, _), es) defns =
- let
- val (es', defns') = fold_map (defines_to_notes (n="") thy) es defns
- in ((id, map_filter I es'), defns') end
- in fold_map change elemss defns end;
-
-fun gen_add_locale prep_ctxt prep_expr
- predicate_name bname raw_imports raw_body thy =
- (* predicate_name: "" - locale with predicate named as locale
- "name" - locale with predicate named "name" *)
- let
- val thy_ctxt = ProofContext.init thy;
- val name = Sign.full_bname thy bname;
- val _ = is_some (get_locale thy name) andalso
- error ("Duplicate definition of locale " ^ quote name);
-
- val (((import_ctxt, import_elemss), (body_ctxt, body_elemss, syn)),
- text as (parms, ((_, exts'), _), defs)) =
- prep_ctxt raw_imports raw_body thy_ctxt;
- val elemss = import_elemss @ body_elemss |>
- map_filter (fn ((id, Assumed axs), elems) => SOME (id, elems) | _ => NONE);
-
- val extraTs = List.foldr OldTerm.add_term_tfrees [] exts' \\
- List.foldr OldTerm.add_typ_tfrees [] (map snd parms);
- val _ = if null extraTs then ()
- else warning ("Additional type variable(s) in locale specification " ^ quote bname);
-
- val predicate_name' = case predicate_name of "" => bname | _ => predicate_name;
- val (elemss', defns) = change_defines_elemss thy elemss [];
- val elemss'' = elemss' @ [(("", []), defns)];
- val (((elemss''', predicate as (pred_statement, pred_axioms), stmt'), intros), thy') =
- define_preds predicate_name' text elemss'' thy;
- val regs = pred_axioms
- |> fold_map (fn (id, elems) => fn wts => let
- val ts = flat (map_filter (fn (Assumes asms) =>
- SOME (maps (map #1 o #2) asms) | _ => NONE) elems);
- val (wts1, wts2) = chop (length ts) wts;
- in ((apsnd (map fst) id, wts1), wts2) end) elemss'''
- |> fst
- |> map_filter (fn (("", _), _) => NONE | e => SOME e);
- fun axiomify axioms elemss =
- (axioms, elemss) |> foldl_map (fn (axs, (id, elems)) => let
- val ts = flat (map_filter (fn (Assumes asms) =>
- SOME (maps (map #1 o #2) asms) | _ => NONE) elems);
- val (axs1, axs2) = chop (length ts) axs;
- in (axs2, ((id, Assumed axs1), elems)) end)
- |> snd;
- val ((_, facts), ctxt) = activate_facts true (K I)
- (axiomify pred_axioms elemss''') (ProofContext.init thy');
- val view_ctxt = Assumption.add_view thy_ctxt pred_statement ctxt;
- val export = Thm.close_derivation o Goal.norm_result o
- singleton (ProofContext.export view_ctxt thy_ctxt);
- val facts' = facts |> map (fn (a, ths) => ((a, []), [(map export ths, [])]));
- val elems' = maps #2 (filter (fn ((s, _), _) => s = "") elemss''');
- val elems'' = map_filter (fn (Fixes _) => NONE | e => SOME e) elems';
- val axs' = map (Element.assume_witness thy') stmt';
- val loc_ctxt = thy'
- |> Sign.add_path bname
- |> Sign.no_base_names
- |> PureThy.note_thmss Thm.assumptionK facts' |> snd
- |> Sign.restore_naming thy'
- |> register_locale bname {axiom = axs',
- elems = map (fn e => (e, stamp ())) elems'',
- params = params_of elemss''' |> map (fn (x, SOME T) => ((x, T), the (Symtab.lookup syn x))),
- decls = ([], []),
- regs = regs,
- intros = intros,
- dests = map Element.conclude_witness pred_axioms}
- |> init name;
- in (name, loc_ctxt) end;
-
-in
-
-val add_locale = gen_add_locale cert_context (K I);
-val add_locale_cmd = gen_add_locale read_context intern_expr "";
-
-end;
-
-val _ = Context.>> (Context.map_theory
- (add_locale "" "var" empty [Fixes [(Binding.name (Name.internal "x"), NONE, NoSyn)]] #>
- snd #> ProofContext.theory_of #>
- add_locale "" "struct" empty [Fixes [(Binding.name (Name.internal "S"), NONE, Structure)]] #>
- snd #> ProofContext.theory_of));
-
-
-
-
-(** Normalisation of locale statements ---
- discharges goals implied by interpretations **)
-
-local
-
-fun locale_assm_intros thy =
- Symtab.fold (fn (_, {intros = (a, _), ...}) => fn intros => (a @ intros))
- (#2 (LocalesData.get thy)) [];
-fun locale_base_intros thy =
- Symtab.fold (fn (_, {intros = (_, b), ...}) => fn intros => (b @ intros))
- (#2 (LocalesData.get thy)) [];
-
-fun all_witnesses ctxt =
- let
- val thy = ProofContext.theory_of ctxt;
- fun get registrations = Symtab.fold (fn (_, regs) => fn thms =>
- (Registrations.dest thy regs |> map (fn (_, (_, (exp, _), wits, _)) =>
- map (Element.conclude_witness #> Morphism.thm exp) wits) |> flat) @ thms)
- registrations [];
- in get (RegistrationsData.get (Context.Proof ctxt)) end;
-
-in
-
-fun intro_locales_tac eager ctxt facts st =
- let
- val wits = all_witnesses ctxt;
- val thy = ProofContext.theory_of ctxt;
- val intros = locale_base_intros thy @ (if eager then locale_assm_intros thy else []);
- in
- Method.intros_tac (wits @ intros) facts st
- end;
-
-end;
-
-
-(** Interpretation commands **)
-
-local
-
-(* extract proof obligations (assms and defs) from elements *)
-
-fun extract_asms_elems ((id, Assumed _), elems) = (id, maps Element.prems_of elems)
- | extract_asms_elems ((id, Derived _), _) = (id, []);
-
-
-(* activate instantiated facts in theory or context *)
-
-fun gen_activate_facts_elemss mk_ctxt note attrib put_reg add_wit add_eqn
- phi_name all_elemss pss propss eq_attns (exp, imp) thmss thy_ctxt =
- let
- val ctxt = mk_ctxt thy_ctxt;
- fun get_reg thy_ctxt = get_local_registration (mk_ctxt thy_ctxt);
- fun test_reg thy_ctxt = test_local_registration (mk_ctxt thy_ctxt);
-
- val (all_propss, eq_props) = chop (length all_elemss) propss;
- val (all_thmss, eq_thms) = chop (length all_elemss) thmss;
-
- (* Filter out fragments already registered. *)
-
- val (new_elemss, xs) = split_list (filter_out (fn (((id, _), _), _) =>
- test_reg thy_ctxt id) (all_elemss ~~ (pss ~~ (all_propss ~~ all_thmss))));
- val (new_pss, ys) = split_list xs;
- val (new_propss, new_thmss) = split_list ys;
-
- val thy_ctxt' = thy_ctxt
- (* add registrations *)
- |> fold2 (fn ((id as (loc, _), _), _) => fn ps => put_reg id (phi_name, param_prefix loc ps) (exp, imp))
- new_elemss new_pss
- (* add witnesses of Assumed elements (only those generate proof obligations) *)
- |> fold2 (fn (id, _) => fold (add_wit id)) new_propss new_thmss
- (* add equations *)
- |> fold2 (fn (id, _) => fold (add_eqn id)) eq_props
- ((map o map) (Drule.abs_def o LocalDefs.meta_rewrite_rule ctxt o
- Element.conclude_witness) eq_thms);
-
- val prems = flat (map_filter
- (fn ((id, Assumed _), _) => Option.map #2 (get_reg thy_ctxt' imp id)
- | ((_, Derived _), _) => NONE) all_elemss);
-
- val thy_ctxt'' = thy_ctxt'
- (* add witnesses of Derived elements *)
- |> fold (fn (id, thms) => fold
- (add_wit id o Element.morph_witness (Element.satisfy_morphism prems)) thms)
- (map_filter (fn ((_, Assumed _), _) => NONE
- | ((id, Derived thms), _) => SOME (id, thms)) new_elemss)
-
- fun activate_elem phi_name param_prfx insts prems eqns exp (Notes (kind, facts)) thy_ctxt =
- let
- val ctxt = mk_ctxt thy_ctxt;
- val thy = ProofContext.theory_of ctxt;
- val facts' = facts
- |> activate_note thy phi_name param_prfx
- (attrib thy_ctxt) insts prems eqns exp;
- in
- thy_ctxt
- |> note kind facts'
- |> snd
- end
- | activate_elem _ _ _ _ _ _ _ thy_ctxt = thy_ctxt;
-
- fun activate_elems (((loc, ext_ts), _), _) ps thy_ctxt =
- let
- val ctxt = mk_ctxt thy_ctxt;
- val thy = ProofContext.theory_of ctxt;
- val {params, elems, ...} = the_locale thy loc;
- val parms = map fst params;
- val param_prfx = param_prefix loc ps;
- val ids = flatten (ProofContext.init thy, intern_expr thy)
- (([], Symtab.empty), Expr (Locale loc)) |> fst |> fst;
- val (insts, prems, eqns) = collect_witnesses ctxt imp parms ids ext_ts;
- in
- thy_ctxt
- |> fold (activate_elem phi_name param_prfx insts prems eqns exp o fst) elems
- end;
-
- in
- thy_ctxt''
- (* add equations as lemmas to context *)
- |> (fold2 o fold2) (fn attn => fn thm => snd o yield_singleton (note Thm.lemmaK)
- ((apsnd o map) (attrib thy_ctxt'') attn, [([Element.conclude_witness thm], [])]))
- (unflat eq_thms eq_attns) eq_thms
- (* add interpreted facts *)
- |> fold2 activate_elems new_elemss new_pss
- end;
-
-fun global_activate_facts_elemss x = gen_activate_facts_elemss
- ProofContext.init
- global_note_qualified
- Attrib.attribute_i
- put_global_registration
- add_global_witness
- add_global_equation
- x;
-
-fun local_activate_facts_elemss x = gen_activate_facts_elemss
- I
- local_note_qualified
- (Attrib.attribute_i o ProofContext.theory_of)
- put_local_registration
- add_local_witness
- add_local_equation
- x;
-
-fun prep_instantiations parse_term parse_prop ctxt parms (insts, eqns) =
- let
- (* parameters *)
- val (parm_names, parm_types) = parms |> split_list
- ||> map (TypeInfer.paramify_vars o Logic.varifyT);
- val type_parms = fold Term.add_tvarsT parm_types [] |> map (Logic.mk_type o TVar);
- val type_parm_names = fold Term.add_tfreesT (map snd parms) [] |> map fst;
-
- (* parameter instantiations *)
- val d = length parms - length insts;
- val insts =
- if d < 0 then error "More arguments than parameters in instantiation."
- else insts @ replicate d NONE;
- val (given_ps, given_insts) =
- ((parm_names ~~ parm_types) ~~ insts) |> map_filter
- (fn (_, NONE) => NONE
- | ((n, T), SOME inst) => SOME ((n, T), inst))
- |> split_list;
- val (given_parm_names, given_parm_types) = given_ps |> split_list;
-
- (* parse insts / eqns *)
- val given_insts' = map (parse_term ctxt) given_insts;
- val eqns' = map (parse_prop ctxt) eqns;
-
- (* type inference and contexts *)
- val arg = type_parms @ map2 TypeInfer.constrain given_parm_types given_insts' @ eqns';
- val res = Syntax.check_terms ctxt arg;
- val ctxt' = ctxt |> fold Variable.auto_fixes res;
-
- (* instantiation *)
- val (type_parms'', res') = chop (length type_parms) res;
- val (given_insts'', eqns'') = chop (length given_insts) res';
- val instT = Symtab.make (type_parm_names ~~ map Logic.dest_type type_parms'');
- val inst = Symtab.make (given_parm_names ~~ given_insts'');
-
- (* export from eigencontext *)
- val export = Variable.export_morphism ctxt' ctxt;
-
- (* import, its inverse *)
- val domT = fold Term.add_tfrees res [] |> map TFree;
- val importT = domT |> map (fn x => (Morphism.typ export x, x))
- |> map_filter (fn (TFree _, _) => NONE (* fixed point of export *)
- | (TVar y, x) => SOME (fst y, x)
- | _ => error "internal: illegal export in interpretation")
- |> Vartab.make;
- val dom = fold Term.add_frees res [] |> map Free;
- val imprt = dom |> map (fn x => (Morphism.term export x, x))
- |> map_filter (fn (Free _, _) => NONE (* fixed point of export *)
- | (Var y, x) => SOME (fst y, x)
- | _ => error "internal: illegal export in interpretation")
- |> Vartab.make;
- in (((instT, inst), eqns''), (export, ((importT, domT), (imprt, dom)))) end;
-
-val read_instantiations = prep_instantiations Syntax.parse_term Syntax.parse_prop;
-val check_instantiations = prep_instantiations (K I) (K I);
-
-fun gen_prep_registration mk_ctxt test_reg activate
- prep_attr prep_expr prep_insts
- thy_ctxt phi_name raw_expr raw_insts =
- let
- val ctxt = mk_ctxt thy_ctxt;
- val thy = ProofContext.theory_of ctxt;
- val ctxt' = ProofContext.init thy;
- fun prep_attn attn = (apsnd o map)
- (Attrib.crude_closure ctxt o Args.assignable o prep_attr thy) attn;
-
- val expr = prep_expr thy raw_expr;
-
- val pts = params_of_expr ctxt' [] expr ([], Symtab.empty, Symtab.empty);
- val params_ids = make_params_ids (#1 pts);
- val raw_params_elemss = make_raw_params_elemss pts;
- val ((ids, _), raw_elemss) = flatten (ctxt', I) (([], Symtab.empty), Expr expr);
- val ((parms, all_elemss, _), (_, (_, defs, _))) =
- read_elemss false ctxt' [] (raw_params_elemss @ raw_elemss) [];
-
- (** compute instantiation **)
-
- (* consistency check: equations need to be stored in a particular locale,
- therefore if equations are present locale expression must be a name *)
-
- val _ = case (expr, snd raw_insts) of
- (Locale _, _) => () | (_, []) => ()
- | (_, _) => error "Interpretations with `where' only permitted if locale expression is a name.";
-
- (* read or certify instantiation *)
- val (raw_insts', raw_eqns) = raw_insts;
- val (raw_eq_attns, raw_eqns') = split_list raw_eqns;
- val (((instT, inst1), eqns), morphs) = prep_insts ctxt parms (raw_insts', raw_eqns');
- val eq_attns = map prep_attn raw_eq_attns;
-
- (* defined params without given instantiation *)
- val not_given = filter_out (Symtab.defined inst1 o fst) parms;
- fun add_def (p, pT) inst =
- let
- val (t, T) = case find_first (fn (Free (a, _), _) => a = p) defs of
- NONE => error ("Instance missing for parameter " ^ quote p)
- | SOME (Free (_, T), t) => (t, T);
- val d = Element.inst_term (instT, inst) t;
- in Symtab.update_new (p, d) inst end;
- val inst2 = fold add_def not_given inst1;
- val inst_morphism = Element.inst_morphism thy (instT, inst2);
- (* Note: insts contain no vars. *)
-
- (** compute proof obligations **)
-
- (* restore "small" ids *)
- val ids' = map (fn ((n, ps), (_, mode)) =>
- ((n, map (fn p => Free (p, (the o AList.lookup (op =) parms) p)) ps), mode))
- ids;
- val (_, all_elemss') = chop (length raw_params_elemss) all_elemss
- (* instantiate ids and elements *)
- val inst_elemss = (ids' ~~ all_elemss') |> map (fn (((n, ps), _), ((_, mode), elems)) =>
- ((n, map (Morphism.term (inst_morphism $> fst morphs)) ps),
- map (fn Int e => Element.morph_ctxt inst_morphism e) elems)
- |> apfst (fn id => (id, map_mode (map (Element.morph_witness inst_morphism)) mode)));
-
- (* equations *)
- val eqn_elems = if null eqns then []
- else [(Library.last_elem inst_elemss |> fst |> fst, eqns)];
-
- val propss = map extract_asms_elems inst_elemss @ eqn_elems;
-
- in
- (propss, activate phi_name inst_elemss (map (snd o fst) ids) propss eq_attns morphs, morphs)
- end;
-
-fun gen_prep_global_registration mk_ctxt = gen_prep_registration ProofContext.init
- test_global_registration
- global_activate_facts_elemss mk_ctxt;
-
-fun gen_prep_local_registration mk_ctxt = gen_prep_registration I
- test_local_registration
- local_activate_facts_elemss mk_ctxt;
-
-val prep_global_registration = gen_prep_global_registration
- (K I) (K I) check_instantiations;
-val prep_global_registration_cmd = gen_prep_global_registration
- Attrib.intern_src intern_expr read_instantiations;
-
-val prep_local_registration = gen_prep_local_registration
- (K I) (K I) check_instantiations;
-val prep_local_registration_cmd = gen_prep_local_registration
- Attrib.intern_src intern_expr read_instantiations;
-
-fun prep_registration_in_locale target expr thy =
- (* target already in internal form *)
- let
- val ctxt = ProofContext.init thy;
- val ((raw_target_ids, target_syn), _) = flatten (ctxt, I)
- (([], Symtab.empty), Expr (Locale target));
- val fixed = the_locale thy target |> #params |> map #1;
- val ((all_ids, syn), raw_elemss) = flatten (ctxt, intern_expr thy)
- ((raw_target_ids, target_syn), Expr expr);
- val (target_ids, ids) = chop (length raw_target_ids) all_ids;
- val ((parms, elemss, _), _) = read_elemss false ctxt fixed raw_elemss [];
-
- (** compute proof obligations **)
-
- (* restore "small" ids, with mode *)
- val ids' = map (apsnd snd) ids;
- (* remove Int markers *)
- val elemss' = map (fn (_, es) =>
- map (fn Int e => e) es) elemss
- (* extract assumptions and defs *)
- val ids_elemss = ids' ~~ elemss';
- val propss = map extract_asms_elems ids_elemss;
-
- (** activation function:
- - add registrations to the target locale
- - add induced registrations for all global registrations of
- the target, unless already present
- - add facts of induced registrations to theory **)
-
- fun activate thmss thy =
- let
- val satisfy = Element.satisfy_thm (flat thmss);
- val ids_elemss_thmss = ids_elemss ~~ thmss;
- val regs = get_global_registrations thy target;
-
- fun activate_id (((id, Assumed _), _), thms) thy =
- thy |> put_registration_in_locale target id
- |> fold (add_witness_in_locale target id) thms
- | activate_id _ thy = thy;
-
- fun activate_reg (ext_ts, ((phi_name, param_prfx), (exp, imp), _, _)) thy =
- let
- val (insts, wits, _) = collect_witnesses (ProofContext.init thy) imp fixed target_ids ext_ts;
- val inst_parms = map (the o AList.lookup (op =) (map #1 fixed ~~ ext_ts));
- val disch = Element.satisfy_thm wits;
- val new_elemss = filter (fn (((name, ps), _), _) =>
- not (test_global_registration thy (name, inst_parms ps))) (ids_elemss);
- fun activate_assumed_id (((_, Derived _), _), _) thy = thy
- | activate_assumed_id ((((name, ps), Assumed _), _), thms) thy = let
- val ps' = inst_parms ps;
- in
- if test_global_registration thy (name, ps')
- then thy
- else thy
- |> put_global_registration (name, ps') (phi_name, param_prefix name ps) (exp, imp)
- |> fold (fn witn => fn thy => add_global_witness (name, ps')
- (Element.morph_witness (Element.inst_morphism thy insts) witn) thy) thms
- end;
-
- fun activate_derived_id ((_, Assumed _), _) thy = thy
- | activate_derived_id (((name, ps), Derived ths), _) thy = let
- val ps' = inst_parms ps;
- in
- if test_global_registration thy (name, ps')
- then thy
- else thy
- |> put_global_registration (name, ps') (phi_name, param_prefix name ps) (exp, imp)
- |> fold (fn witn => fn thy => add_global_witness (name, ps')
- (witn |> Element.map_witness (fn (t, th) => (* FIXME *)
- (Element.inst_term insts t,
- disch (Element.inst_thm thy insts (satisfy th))))) thy) ths
- end;
-
- fun activate_elem (loc, ps) (Notes (kind, facts)) thy =
- let
- val att_morphism =
- Morphism.binding_morphism (name_morph phi_name param_prfx) $>
- Morphism.thm_morphism satisfy $>
- Element.inst_morphism thy insts $>
- Morphism.thm_morphism disch;
- val facts' = facts
- |> Attrib.map_facts (Attrib.attribute_i thy o Args.morph_values att_morphism)
- |> (map o apsnd o map o apfst o map) (disch o Element.inst_thm thy insts o satisfy)
- |> (map o apfst o apfst) (name_morph phi_name param_prfx);
- in
- thy
- |> global_note_qualified kind facts'
- |> snd
- end
- | activate_elem _ _ thy = thy;
-
- fun activate_elems ((id, _), elems) thy = fold (activate_elem id) elems thy;
-
- in thy |> fold activate_assumed_id ids_elemss_thmss
- |> fold activate_derived_id ids_elemss
- |> fold activate_elems new_elemss end;
- in
- thy |> fold activate_id ids_elemss_thmss
- |> fold activate_reg regs
- end;
-
- in (propss, activate) end;
-
-fun prep_propp propss = propss |> map (fn (_, props) =>
- map (rpair [] o Element.mark_witness) props);
-
-fun prep_result propps thmss =
- ListPair.map (fn ((_, props), thms) => map2 Element.make_witness props thms) (propps, thmss);
-
-fun gen_interpretation prep_registration after_qed prfx raw_expr raw_insts thy =
- let
- val (propss, activate, morphs) = prep_registration thy prfx raw_expr raw_insts;
- fun after_qed' results =
- ProofContext.theory (activate (prep_result propss results))
- #> after_qed;
- in
- thy
- |> ProofContext.init
- |> Proof.theorem_i NONE after_qed' (prep_propp propss)
- |> Element.refine_witness
- |> Seq.hd
- |> pair morphs
- end;
-
-fun gen_interpret prep_registration after_qed name_morph expr insts int state =
- let
- val _ = Proof.assert_forward_or_chain state;
- val ctxt = Proof.context_of state;
- val (propss, activate, morphs) = prep_registration ctxt name_morph expr insts;
- fun after_qed' results =
- Proof.map_context (K (ctxt |> activate (prep_result propss results)))
- #> Proof.put_facts NONE
- #> after_qed;
- in
- state
- |> Proof.local_goal (ProofDisplay.print_results int) (K I) ProofContext.bind_propp_i
- "interpret" NONE after_qed' (map (pair (Binding.empty, [])) (prep_propp propss))
- |> Element.refine_witness |> Seq.hd
- |> pair morphs
- end;
-
-fun standard_name_morph interp_prfx b =
- if Binding.is_empty b then b
- else Binding.map_prefix (fn ((lprfx, _) :: pprfx) =>
- fold (Binding.add_prefix false o fst) pprfx
- #> interp_prfx <> "" ? Binding.add_prefix true interp_prfx
- #> Binding.add_prefix false lprfx
- ) b;
-
-in
-
-val interpretation = gen_interpretation prep_global_registration;
-fun interpretation_cmd interp_prfx = snd ooo gen_interpretation prep_global_registration_cmd
- I (standard_name_morph interp_prfx);
-
-fun interpretation_in_locale after_qed (raw_target, expr) thy =
- let
- val target = intern thy raw_target;
- val (propss, activate) = prep_registration_in_locale target expr thy;
- val raw_propp = prep_propp propss;
-
- val (_, _, goal_ctxt, propp) = thy
- |> ProofContext.init
- |> cert_context_statement (SOME target) [] raw_propp;
-
- fun after_qed' results =
- ProofContext.theory (activate (prep_result propss results))
- #> after_qed;
- in
- goal_ctxt
- |> Proof.theorem_i NONE after_qed' propp
- |> Element.refine_witness |> Seq.hd
- end;
-
-val interpret = gen_interpret prep_local_registration;
-fun interpret_cmd interp_prfx = snd oooo gen_interpret prep_local_registration_cmd
- I (standard_name_morph interp_prfx);
-
-end;
-
-end;
--- a/src/Pure/Isar/outer_lex.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/outer_lex.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/outer_lex.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Outer lexical syntax for Isabelle/Isar.
--- a/src/Pure/Isar/outer_parse.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/outer_parse.ML Wed Jan 28 16:57:12 2009 +0100
@@ -61,12 +61,12 @@
val list: 'a parser -> 'a list parser
val list1: 'a parser -> 'a list parser
val name: bstring parser
- val binding: Binding.T parser
+ val binding: binding parser
val xname: xstring parser
val text: string parser
val path: Path.T parser
val parname: string parser
- val parbinding: Binding.T parser
+ val parbinding: binding parser
val sort: string parser
val arity: (string * string list * string) parser
val multi_arity: (string list * string list * string) parser
@@ -81,11 +81,11 @@
val opt_mixfix': mixfix parser
val where_: string parser
val const: (string * string * mixfix) parser
- val params: (Binding.T * string option) list parser
- val simple_fixes: (Binding.T * string option) list parser
- val fixes: (Binding.T * string option * mixfix) list parser
- val for_fixes: (Binding.T * string option * mixfix) list parser
- val for_simple_fixes: (Binding.T * string option) list parser
+ val params: (binding * string option) list parser
+ val simple_fixes: (binding * string option) list parser
+ val fixes: (binding * string option * mixfix) list parser
+ val for_fixes: (binding * string option * mixfix) list parser
+ val for_simple_fixes: (binding * string option) list parser
val ML_source: (SymbolPos.text * Position.T) parser
val doc_source: (SymbolPos.text * Position.T) parser
val term_group: string parser
--- a/src/Pure/Isar/overloading.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/overloading.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/overloading.ML
- ID: $Id$
Author: Florian Haftmann, TU Muenchen
Overloaded definitions without any discipline.
@@ -134,8 +133,8 @@
fun declare c_ty = pair (Const c_ty);
-fun define checked name (c, t) =
- Thm.add_def (not checked) true (name, Logic.mk_equals (Const (c, Term.fastype_of t), t));
+fun define checked name (c, t) = Thm.add_def (not checked) true (Binding.name name,
+ Logic.mk_equals (Const (c, Term.fastype_of t), t));
(* target *)
--- a/src/Pure/Isar/proof.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/proof.ML Wed Jan 28 16:57:12 2009 +0100
@@ -43,27 +43,27 @@
val match_bind_i: (term list * term) list -> state -> state
val let_bind: (string list * string) list -> state -> state
val let_bind_i: (term list * term) list -> state -> state
- val fix: (Binding.T * string option * mixfix) list -> state -> state
- val fix_i: (Binding.T * typ option * mixfix) list -> state -> state
+ val fix: (binding * string option * mixfix) list -> state -> state
+ val fix_i: (binding * typ option * mixfix) list -> state -> state
val assm: Assumption.export ->
(Attrib.binding * (string * string list) list) list -> state -> state
val assm_i: Assumption.export ->
- ((Binding.T * attribute list) * (term * term list) list) list -> state -> state
+ ((binding * attribute list) * (term * term list) list) list -> state -> state
val assume: (Attrib.binding * (string * string list) list) list -> state -> state
- val assume_i: ((Binding.T * attribute list) * (term * term list) list) list ->
+ val assume_i: ((binding * attribute list) * (term * term list) list) list ->
state -> state
val presume: (Attrib.binding * (string * string list) list) list -> state -> state
- val presume_i: ((Binding.T * attribute list) * (term * term list) list) list ->
+ val presume_i: ((binding * attribute list) * (term * term list) list) list ->
state -> state
- val def: (Attrib.binding * ((Binding.T * mixfix) * (string * string list))) list ->
+ val def: (Attrib.binding * ((binding * mixfix) * (string * string list))) list ->
state -> state
- val def_i: ((Binding.T * attribute list) *
- ((Binding.T * mixfix) * (term * term list))) list -> state -> state
+ val def_i: ((binding * attribute list) *
+ ((binding * mixfix) * (term * term list))) list -> state -> state
val chain: state -> state
val chain_facts: thm list -> state -> state
val get_thmss: state -> (Facts.ref * Attrib.src list) list -> thm list
val note_thmss: (Attrib.binding * (Facts.ref * Attrib.src list) list) list -> state -> state
- val note_thmss_i: ((Binding.T * attribute list) *
+ val note_thmss_i: ((binding * attribute list) *
(thm list * attribute list) list) list -> state -> state
val from_thmss: ((Facts.ref * Attrib.src list) list) list -> state -> state
val from_thmss_i: ((thm list * attribute list) list) list -> state -> state
@@ -87,7 +87,7 @@
(theory -> 'a -> attribute) ->
(context * 'b list -> context * (term list list * (context -> context))) ->
string -> Method.text option -> (thm list list -> state -> state) ->
- ((Binding.T * 'a list) * 'b) list -> state -> state
+ ((binding * 'a list) * 'b) list -> state -> state
val local_qed: Method.text option * bool -> state -> state
val theorem: Method.text option -> (thm list list -> context -> context) ->
(string * string list) list list -> context -> state
@@ -107,11 +107,11 @@
val have: Method.text option -> (thm list list -> state -> state) ->
(Attrib.binding * (string * string list) list) list -> bool -> state -> state
val have_i: Method.text option -> (thm list list -> state -> state) ->
- ((Binding.T * attribute list) * (term * term list) list) list -> bool -> state -> state
+ ((binding * attribute list) * (term * term list) list) list -> bool -> state -> state
val show: Method.text option -> (thm list list -> state -> state) ->
(Attrib.binding * (string * string list) list) list -> bool -> state -> state
val show_i: Method.text option -> (thm list list -> state -> state) ->
- ((Binding.T * attribute list) * (term * term list) list) list -> bool -> state -> state
+ ((binding * attribute list) * (term * term list) list) list -> bool -> state -> state
val schematic_goal: state -> bool
val is_relevant: state -> bool
val local_future_proof: (state -> ('a * state) Future.future) ->
--- a/src/Pure/Isar/proof_context.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/proof_context.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/proof_context.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
The key concept of Isar proof contexts: elevates primitive local
@@ -23,7 +22,7 @@
val abbrev_mode: Proof.context -> bool
val set_stmt: bool -> Proof.context -> Proof.context
val naming_of: Proof.context -> NameSpace.naming
- val full_name: Proof.context -> Binding.T -> string
+ val full_name: Proof.context -> binding -> string
val full_bname: Proof.context -> bstring -> string
val consts_of: Proof.context -> Consts.T
val const_syntax_name: Proof.context -> string -> string
@@ -105,27 +104,27 @@
val restore_naming: Proof.context -> Proof.context -> Proof.context
val reset_naming: Proof.context -> Proof.context
val note_thmss: string ->
- ((Binding.T * attribute list) * (Facts.ref * attribute list) list) list ->
+ ((binding * attribute list) * (Facts.ref * attribute list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val note_thmss_i: string ->
- ((Binding.T * attribute list) * (thm list * attribute list) list) list ->
+ ((binding * attribute list) * (thm list * attribute list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val put_thms: bool -> string * thm list option -> Proof.context -> Proof.context
- val read_vars: (Binding.T * string option * mixfix) list -> Proof.context ->
- (Binding.T * typ option * mixfix) list * Proof.context
- val cert_vars: (Binding.T * typ option * mixfix) list -> Proof.context ->
- (Binding.T * typ option * mixfix) list * Proof.context
- val add_fixes: (Binding.T * string option * mixfix) list ->
+ val read_vars: (binding * string option * mixfix) list -> Proof.context ->
+ (binding * typ option * mixfix) list * Proof.context
+ val cert_vars: (binding * typ option * mixfix) list -> Proof.context ->
+ (binding * typ option * mixfix) list * Proof.context
+ val add_fixes: (binding * string option * mixfix) list ->
Proof.context -> string list * Proof.context
- val add_fixes_i: (Binding.T * typ option * mixfix) list ->
+ val add_fixes_i: (binding * typ option * mixfix) list ->
Proof.context -> string list * Proof.context
val auto_fixes: Proof.context * (term list list * 'a) -> Proof.context * (term list list * 'a)
val bind_fixes: string list -> Proof.context -> (term -> term) * Proof.context
val add_assms: Assumption.export ->
- ((Binding.T * attribute list) * (string * string list) list) list ->
+ ((binding * attribute list) * (string * string list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val add_assms_i: Assumption.export ->
- ((Binding.T * attribute list) * (term * term list) list) list ->
+ ((binding * attribute list) * (term * term list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val add_cases: bool -> (string * RuleCases.T option) list -> Proof.context -> Proof.context
val apply_case: RuleCases.T -> Proof.context -> (string * term list) list * Proof.context
@@ -135,7 +134,7 @@
Context.generic -> Context.generic
val add_const_constraint: string * typ option -> Proof.context -> Proof.context
val add_abbrev: string -> Properties.T ->
- Binding.T * term -> Proof.context -> (term * term) * Proof.context
+ binding * term -> Proof.context -> (term * term) * Proof.context
val revert_abbrev: string -> string -> Proof.context -> Proof.context
val verbose: bool ref
val setmp_verbose: ('a -> 'b) -> 'a -> 'b
--- a/src/Pure/Isar/proof_display.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/proof_display.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/proof_display.ML
- ID: $Id$
Author: Makarius
Printing of theorems, goals, results etc.
--- a/src/Pure/Isar/proof_node.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/proof_node.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/proof_node.ML
- ID: $Id$
Author: Makarius
Proof nodes with linear position and backtracking.
--- a/src/Pure/Isar/rule_insts.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/rule_insts.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/rule_insts.ML
- ID: $Id$
Author: Makarius
Rule instantiations -- operations within a rule/subgoal context.
--- a/src/Pure/Isar/skip_proof.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/skip_proof.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/skip_proof.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Skipping proofs -- quick_and_dirty mode.
--- a/src/Pure/Isar/spec_parse.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/spec_parse.ML Wed Jan 28 16:57:12 2009 +0100
@@ -15,16 +15,15 @@
val opt_thm_name: string -> Attrib.binding parser
val spec: (Attrib.binding * string list) parser
val named_spec: (Attrib.binding * string list) parser
- val spec_name: ((Binding.T * string) * Attrib.src list) parser
- val spec_opt_name: ((Binding.T * string) * Attrib.src list) parser
+ val spec_name: ((binding * string) * Attrib.src list) parser
+ val spec_opt_name: ((binding * string) * Attrib.src list) parser
val xthm: (Facts.ref * Attrib.src list) parser
val xthms1: (Facts.ref * Attrib.src list) list parser
val name_facts: (Attrib.binding * (Facts.ref * Attrib.src list) list) list parser
val locale_mixfix: mixfix parser
- val locale_fixes: (Binding.T * string option * mixfix) list parser
+ val locale_fixes: (binding * string option * mixfix) list parser
val locale_insts: (string option list * (Attrib.binding * string) list) parser
val class_expr: string list parser
- val locale_expr: Old_Locale.expr parser
val locale_expression: Expression.expression parser
val locale_keyword: string parser
val context_element: Element.context parser
@@ -32,7 +31,7 @@
val general_statement: (Element.context list * Element.statement) parser
val statement_keyword: string parser
val specification:
- (Binding.T * ((Attrib.binding * string list) list * (Binding.T * string option) list)) list parser
+ (binding * ((Attrib.binding * string list) list * (binding * string option) list)) list parser
end;
structure SpecParse: SPEC_PARSE =
@@ -115,13 +114,6 @@
val class_expr = plus1_unless locale_keyword P.xname;
-val locale_expr =
- let
- fun expr2 x = (P.xname >> Old_Locale.Locale || P.$$$ "(" |-- P.!!! (expr0 --| P.$$$ ")")) x
- and expr1 x = (expr2 -- Scan.repeat1 (P.maybe rename) >> Old_Locale.Rename || expr2) x
- and expr0 x = (plus1_unless locale_keyword expr1 >> (fn [e] => e | es => Old_Locale.Merge es)) x;
- in expr0 end;
-
val locale_expression =
let
fun expr2 x = P.xname x;
--- a/src/Pure/Isar/specification.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/specification.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Isar/specification.ML
- ID: $Id$
Author: Makarius
Derived local theory specifications --- with type-inference and
@@ -9,33 +8,33 @@
signature SPECIFICATION =
sig
val print_consts: local_theory -> (string * typ -> bool) -> (string * typ) list -> unit
- val check_specification: (Binding.T * typ option * mixfix) list ->
+ val check_specification: (binding * typ option * mixfix) list ->
(Attrib.binding * term list) list list -> Proof.context ->
- (((Binding.T * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
- val read_specification: (Binding.T * string option * mixfix) list ->
+ (((binding * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
+ val read_specification: (binding * string option * mixfix) list ->
(Attrib.binding * string list) list list -> Proof.context ->
- (((Binding.T * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
- val check_free_specification: (Binding.T * typ option * mixfix) list ->
+ (((binding * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
+ val check_free_specification: (binding * typ option * mixfix) list ->
(Attrib.binding * term list) list -> Proof.context ->
- (((Binding.T * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
- val read_free_specification: (Binding.T * string option * mixfix) list ->
+ (((binding * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
+ val read_free_specification: (binding * string option * mixfix) list ->
(Attrib.binding * string list) list -> Proof.context ->
- (((Binding.T * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
- val axiomatization: (Binding.T * typ option * mixfix) list ->
+ (((binding * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context
+ val axiomatization: (binding * typ option * mixfix) list ->
(Attrib.binding * term list) list -> theory ->
(term list * (string * thm list) list) * theory
- val axiomatization_cmd: (Binding.T * string option * mixfix) list ->
+ val axiomatization_cmd: (binding * string option * mixfix) list ->
(Attrib.binding * string list) list -> theory ->
(term list * (string * thm list) list) * theory
val definition:
- (Binding.T * typ option * mixfix) option * (Attrib.binding * term) ->
+ (binding * typ option * mixfix) option * (Attrib.binding * term) ->
local_theory -> (term * (string * thm)) * local_theory
val definition_cmd:
- (Binding.T * string option * mixfix) option * (Attrib.binding * string) ->
+ (binding * string option * mixfix) option * (Attrib.binding * string) ->
local_theory -> (term * (string * thm)) * local_theory
- val abbreviation: Syntax.mode -> (Binding.T * typ option * mixfix) option * term ->
+ val abbreviation: Syntax.mode -> (binding * typ option * mixfix) option * term ->
local_theory -> local_theory
- val abbreviation_cmd: Syntax.mode -> (Binding.T * string option * mixfix) option * string ->
+ val abbreviation_cmd: Syntax.mode -> (binding * string option * mixfix) option * string ->
local_theory -> local_theory
val notation: bool -> Syntax.mode -> (term * mixfix) list -> local_theory -> local_theory
val notation_cmd: bool -> Syntax.mode -> (string * mixfix) list -> local_theory -> local_theory
@@ -149,7 +148,8 @@
(*axioms*)
val (axioms, axioms_thy) = consts_thy |> fold_map (fn ((b, atts), props) =>
- fold_map Thm.add_axiom (PureThy.name_multi (Binding.base_name b) (map subst props))
+ fold_map Thm.add_axiom
+ ((map o apfst) Binding.name (PureThy.name_multi (Binding.base_name b) (map subst props)))
#>> (fn ths => ((b, atts), [(map Drule.standard' ths, [])]))) specs;
val (facts, thy') = axioms_thy |> PureThy.note_thmss Thm.axiomK
(Attrib.map_facts (Attrib.attribute_i axioms_thy) axioms);
--- a/src/Pure/Isar/theory_target.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Isar/theory_target.ML Wed Jan 28 16:57:12 2009 +0100
@@ -6,7 +6,7 @@
signature THEORY_TARGET =
sig
- val peek: local_theory -> {target: string, new_locale: bool, is_locale: bool,
+ val peek: local_theory -> {target: string, is_locale: bool,
is_class: bool, instantiation: string list * (string * sort) list * sort,
overloading: (string * (string * typ) * bool) list}
val init: string option -> theory -> local_theory
@@ -21,34 +21,17 @@
structure TheoryTarget: THEORY_TARGET =
struct
-(* new locales *)
-
-fun locale_extern new_locale x =
- if new_locale then Locale.extern x else Old_Locale.extern x;
-fun locale_add_type_syntax new_locale x =
- if new_locale then Locale.add_type_syntax x else Old_Locale.add_type_syntax x;
-fun locale_add_term_syntax new_locale x =
- if new_locale then Locale.add_term_syntax x else Old_Locale.add_term_syntax x;
-fun locale_add_declaration new_locale x =
- if new_locale then Locale.add_declaration x else Old_Locale.add_declaration x;
-fun locale_add_thmss new_locale x =
- if new_locale then Locale.add_thmss x else Old_Locale.add_thmss x;
-fun locale_init new_locale x =
- if new_locale then Locale.init x else Old_Locale.init x;
-fun locale_intern new_locale x =
- if new_locale then Locale.intern x else Old_Locale.intern x;
-
(* context data *)
-datatype target = Target of {target: string, new_locale: bool, is_locale: bool,
+datatype target = Target of {target: string, is_locale: bool,
is_class: bool, instantiation: string list * (string * sort) list * sort,
overloading: (string * (string * typ) * bool) list};
-fun make_target target new_locale is_locale is_class instantiation overloading =
- Target {target = target, new_locale = new_locale, is_locale = is_locale,
+fun make_target target is_locale is_class instantiation overloading =
+ Target {target = target, is_locale = is_locale,
is_class = is_class, instantiation = instantiation, overloading = overloading};
-val global_target = make_target "" false false false ([], [], []) [];
+val global_target = make_target "" false false ([], [], []) [];
structure Data = ProofDataFun
(
@@ -64,7 +47,7 @@
fun pretty_thy ctxt target is_locale is_class =
let
val thy = ProofContext.theory_of ctxt;
- val target_name = (if is_class then "class " else "locale ") ^ locale_extern is_class thy target;
+ val target_name = (if is_class then "class " else "locale ") ^ Locale.extern thy target;
val fixes = map (fn (x, T) => (Binding.name x, SOME T, NoSyn))
(#1 (ProofContext.inferred_fixes ctxt));
val assumes = map (fn A => (Attrib.empty_binding, [(Thm.term_of A, [])]))
@@ -89,7 +72,7 @@
(* target declarations *)
-fun target_decl add (Target {target, new_locale, ...}) d lthy =
+fun target_decl add (Target {target, ...}) d lthy =
let
val d' = Morphism.transform (LocalTheory.target_morphism lthy) d;
val d0 = Morphism.form d';
@@ -100,12 +83,12 @@
|> LocalTheory.target (Context.proof_map d0)
else
lthy
- |> LocalTheory.target (add new_locale target d')
+ |> LocalTheory.target (add target d')
end;
-val type_syntax = target_decl locale_add_type_syntax;
-val term_syntax = target_decl locale_add_term_syntax;
-val declaration = target_decl locale_add_declaration;
+val type_syntax = target_decl Locale.add_type_syntax;
+val term_syntax = target_decl Locale.add_term_syntax;
+val declaration = target_decl Locale.add_declaration;
fun class_target (Target {target, ...}) f =
LocalTheory.raw_theory f #>
@@ -166,7 +149,7 @@
|> ProofContext.note_thmss_i kind facts
||> ProofContext.restore_naming ctxt;
-fun notes (Target {target, is_locale, new_locale, ...}) kind facts lthy =
+fun notes (Target {target, is_locale, ...}) kind facts lthy =
let
val thy = ProofContext.theory_of lthy;
val facts' = facts
@@ -185,7 +168,7 @@
#> PureThy.note_thmss_grouped kind (LocalTheory.group_of lthy) global_facts #> snd
#> Sign.restore_naming thy)
|> not is_locale ? LocalTheory.target (note_local kind global_facts #> snd)
- |> is_locale ? LocalTheory.target (locale_add_thmss new_locale target kind target_facts)
+ |> is_locale ? LocalTheory.target (Locale.add_thmss target kind target_facts)
|> note_local kind local_facts
end;
@@ -313,7 +296,7 @@
(fn name => fn (Const (c, _), rhs) => Overloading.define checked name (c, rhs))
| NONE =>
if is_none (Class_Target.instantiation_param lthy c)
- then (fn name => fn eq => Thm.add_def false false (name, Logic.mk_equals eq))
+ then (fn name => fn eq => Thm.add_def false false (Binding.name name, Logic.mk_equals eq))
else (fn name => fn (Const (c, _), rhs) => AxClass.define_overloaded name (c, rhs)));
val (global_def, lthy3) = lthy2
|> LocalTheory.theory_result (define_const (Binding.base_name name') (lhs', rhs'));
@@ -335,13 +318,13 @@
fun init_target _ NONE = global_target
| init_target thy (SOME target) =
make_target target (Locale.defined thy (Locale.intern thy target))
- true (Class_Target.is_class thy target) ([], [], []) [];
+ (Class_Target.is_class thy target) ([], [], []) [];
-fun init_ctxt (Target {target, new_locale, is_locale, is_class, instantiation, overloading}) =
+fun init_ctxt (Target {target, is_locale, is_class, instantiation, overloading}) =
if not (null (#1 instantiation)) then Class_Target.init_instantiation instantiation
else if not (null overloading) then Overloading.init overloading
else if not is_locale then ProofContext.init
- else if not is_class then locale_init new_locale target
+ else if not is_class then Locale.init target
else Class_Target.init target;
fun init_lthy (ta as Target {target, instantiation, overloading, ...}) =
@@ -375,7 +358,7 @@
val ctxt = ProofContext.init thy;
val ops = raw_ops |> map (fn (name, const, checked) =>
(name, Term.dest_Const (prep_const ctxt const), checked));
- in thy |> init_lthy_ctxt (make_target "" false false false ([], [], []) ops) end;
+ in thy |> init_lthy_ctxt (make_target "" false false ([], [], []) ops) end;
in
@@ -383,10 +366,9 @@
fun begin target ctxt = init_lthy (init_target (ProofContext.theory_of ctxt) (SOME target)) ctxt;
fun context "-" thy = init NONE thy
- | context target thy = init (SOME (locale_intern
- (Locale.defined thy (Locale.intern thy target)) thy target)) thy;
+ | context target thy = init (SOME (Locale.intern thy target)) thy;
-fun instantiation arities = init_lthy_ctxt (make_target "" false false false arities []);
+fun instantiation arities = init_lthy_ctxt (make_target "" false false arities []);
fun instantiation_cmd raw_arities thy =
instantiation (read_multi_arity thy raw_arities) thy;
--- a/src/Pure/ML-Systems/alice.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/alice.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/alice.ML
- ID: $Id$
Compatibility file for Alice 1.4.
--- a/src/Pure/ML-Systems/exn.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/exn.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/exn.ML
- ID: $Id$
Author: Makarius
Extra support for exceptions.
--- a/src/Pure/ML-Systems/install_pp_polyml.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/install_pp_polyml.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/install_pp_polyml.ML
- ID: $Id$
Extra toplevel pretty-printing for Poly/ML.
*)
--- a/src/Pure/ML-Systems/ml_name_space.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/ml_name_space.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/ml_name_space.ML
- ID: $Id$
Author: Makarius
ML name space -- dummy version of Poly/ML 5.2 facility.
--- a/src/Pure/ML-Systems/multithreading.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/multithreading.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/multithreading.ML
- ID: $Id$
Author: Makarius
Dummy implementation of multithreading setup.
--- a/src/Pure/ML-Systems/multithreading_polyml.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/multithreading_polyml.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/multithreading_polyml.ML
- ID: $Id$
Author: Makarius
Multithreading in Poly/ML 5.2 or later (cf. polyml/basis/Thread.sml).
@@ -77,12 +76,12 @@
fun with_attributes new_atts f x =
let
- val orig_atts = Thread.getAttributes ();
+ val orig_atts = safe_interrupts (Thread.getAttributes ());
fun restore () = Thread.setAttributes orig_atts;
val result =
(Thread.setAttributes (safe_interrupts new_atts);
Exn.Result (f orig_atts x) before restore ())
- handle e => Exn.Exn e before restore ()
+ handle e => Exn.Exn e before restore ();
in Exn.release result end;
fun interruptible f = with_attributes regular_interrupts (fn _ => f);
--- a/src/Pure/ML-Systems/overloading_smlnj.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/overloading_smlnj.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/overloading_smlnj.ML
- ID: $Id$
Author: Makarius
Overloading in SML/NJ (cf. smlnj/base/system/smlnj/init/pervasive.sml).
--- a/src/Pure/ML-Systems/polyml-4.1.3.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml-4.1.3.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml-4.1.3.ML
- ID: $Id$
Compatibility wrapper for Poly/ML 4.1.3.
*)
--- a/src/Pure/ML-Systems/polyml-4.1.4.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml-4.1.4.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml-4.1.4.ML
- ID: $Id$
Compatibility wrapper for Poly/ML 4.1.4.
*)
--- a/src/Pure/ML-Systems/polyml-4.2.0.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml-4.2.0.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml-4.2.0.ML
- ID: $Id$
Compatibility wrapper for Poly/ML 4.2.0.
*)
--- a/src/Pure/ML-Systems/polyml-5.0.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml-5.0.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml-5.0.ML
- ID: $Id$
Compatibility wrapper for Poly/ML 5.0.
*)
@@ -11,3 +10,5 @@
val pointer_eq = PolyML.pointerEq;
+fun share_common_data () = PolyML.shareCommonData PolyML.rootFunction;
+
--- a/src/Pure/ML-Systems/polyml-5.1.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml-5.1.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml-5.1.ML
- ID: $Id$
Compatibility wrapper for Poly/ML 5.1.
*)
@@ -9,3 +8,6 @@
use "ML-Systems/polyml_old_compiler5.ML";
val pointer_eq = PolyML.pointerEq;
+
+fun share_common_data () = PolyML.shareCommonData PolyML.rootFunction;
+
--- a/src/Pure/ML-Systems/polyml.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml.ML
- ID: $Id$
Compatibility wrapper for Poly/ML 5.2 or later.
*)
@@ -13,6 +12,8 @@
val pointer_eq = PolyML.pointerEq;
+fun share_common_data () = PolyML.shareCommonData PolyML.rootFunction;
+
(* runtime compilation *)
--- a/src/Pure/ML-Systems/polyml_common.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml_common.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml_common.ML
- ID: $Id$
Compatibility file for Poly/ML -- common part for 4.x and 5.x.
*)
--- a/src/Pure/ML-Systems/polyml_old_basis.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml_old_basis.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml_old_basis.ML
- ID: $Id$
Fixes for the old SML basis library (before Poly/ML 4.2.0).
*)
--- a/src/Pure/ML-Systems/polyml_old_compiler4.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml_old_compiler4.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml_old_compiler4.ML
- ID: $Id$
Runtime compilation -- for old PolyML.compiler (version 4.x).
*)
--- a/src/Pure/ML-Systems/polyml_old_compiler5.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/polyml_old_compiler5.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/polyml_old_compiler5.ML
- ID: $Id$
Runtime compilation -- for old PolyML.compilerEx (version 5.0, 5.1).
*)
--- a/src/Pure/ML-Systems/proper_int.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/proper_int.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/proper_int.ML
- ID: $Id$
Author: Makarius
SML basis with type int representing proper integers, not machine
--- a/src/Pure/ML-Systems/smlnj.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/smlnj.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/smlnj.ML
- ID: $Id$
Compatibility file for Standard ML of New Jersey 110 or later.
*)
--- a/src/Pure/ML-Systems/system_shell.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/system_shell.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/system_shell.ML
- ID: $Id$
Author: Makarius
Generic system shell processes (no provisions to propagate interrupts;
--- a/src/Pure/ML-Systems/thread_dummy.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/thread_dummy.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/thread_dummy.ML
- ID: $Id$
Author: Makarius
Default (mostly dummy) implementation of thread structures
--- a/src/Pure/ML-Systems/time_limit.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/time_limit.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/time_limit.ML
- ID: $Id$
Author: Makarius
Dummy implementation of NJ's TimeLimit structure.
--- a/src/Pure/ML-Systems/universal.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML-Systems/universal.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML-Systems/universal.ML
- ID: $Id$
Author: Makarius
Universal values via tagged union. Emulates structure Universal
--- a/src/Pure/ML/ml_antiquote.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML/ml_antiquote.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML/ml_antiquote.ML
- ID: $Id$
Author: Makarius
Common ML antiquotations.
--- a/src/Pure/ML/ml_context.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML/ml_context.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML/ml_context.ML
- ID: $Id$
Author: Makarius
ML context and antiquotations.
@@ -126,7 +125,8 @@
fun ml_store sel (name, ths) =
let
- val ths' = Context.>>> (Context.map_theory_result (PureThy.store_thms (name, ths)));
+ val ths' = Context.>>> (Context.map_theory_result
+ (PureThy.store_thms (Binding.name name, ths)));
val _ =
if name = "" then ()
else if not (ML_Syntax.is_identifier name) then
--- a/src/Pure/ML/ml_lex.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML/ml_lex.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML/ml_lex.ML
- ID: $Id$
Author: Makarius
Lexical syntax for SML.
--- a/src/Pure/ML/ml_parse.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML/ml_parse.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML/ml_parse.ML
- ID: $Id$
Author: Makarius
Minimal parsing for SML -- fixing integer numerals.
--- a/src/Pure/ML/ml_syntax.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML/ml_syntax.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML/ml_syntax.ML
- ID: $Id$
Author: Makarius
Basic ML syntax operations.
--- a/src/Pure/ML/ml_thms.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ML/ml_thms.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ML/ml_thms.ML
- ID: $Id$
Author: Makarius
Isar theorem values within ML.
--- a/src/Pure/Proof/extraction.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Proof/extraction.ML Wed Jan 28 16:57:12 2009 +0100
@@ -546,7 +546,7 @@
| corr d defs vs ts Ts hs (prf0 as PThm (_, ((name, prop, SOME Ts'), body))) _ _ =
let
- val prf = force_proof body;
+ val prf = join_proof body;
val (vs', tye) = find_inst prop Ts ts vs;
val tye' = (map fst (OldTerm.term_tvars prop) ~~ Ts') @ tye;
val T = etype_of thy' vs' [] prop;
@@ -570,7 +570,7 @@
(proof_combt
(PThm (serial (),
((corr_name name vs', corr_prop, SOME (map TVar (OldTerm.term_tvars corr_prop))),
- Lazy.value (make_proof_body corr_prf))), vfs_of corr_prop))
+ Future.value (make_proof_body corr_prf))), vfs_of corr_prop))
(map (get_var_type corr_prop) (vfs_of prop))
in
((name, (vs', ((nullt, nullt), (corr_prf, corr_prf')))) :: defs'',
@@ -636,7 +636,7 @@
| extr d defs vs ts Ts hs (prf0 as PThm (_, ((s, prop, SOME Ts'), body))) =
let
- val prf = force_proof body;
+ val prf = join_proof body;
val (vs', tye) = find_inst prop Ts ts vs;
val tye' = (map fst (OldTerm.term_tvars prop) ~~ Ts') @ tye
in
@@ -681,7 +681,7 @@
(proof_combt
(PThm (serial (),
((corr_name s vs', corr_prop, SOME (map TVar (OldTerm.term_tvars corr_prop))),
- Lazy.value (make_proof_body corr_prf'))), vfs_of corr_prop))
+ Future.value (make_proof_body corr_prf'))), vfs_of corr_prop))
(map (get_var_type corr_prop) (vfs_of prop));
in
((s, (vs', ((t', u), (corr_prf', corr_prf'')))) :: defs'',
@@ -733,11 +733,11 @@
val (def_thms, thy') = if t = nullt then ([], thy) else
thy
|> Sign.add_consts_i [(extr_name s vs, fastype_of ft, NoSyn)]
- |> PureThy.add_defs false [((extr_name s vs ^ "_def",
+ |> PureThy.add_defs false [((Binding.name (extr_name s vs ^ "_def"),
Logic.mk_equals (head_of (strip_abs_body fu), ft)), [])]
in
thy'
- |> PureThy.store_thm (corr_name s vs,
+ |> PureThy.store_thm (Binding.name (corr_name s vs),
Thm.varifyT (funpow (length (OldTerm.term_vars corr_prop))
(Thm.forall_elim_var 0) (forall_intr_frees
(ProofChecker.thm_of_proof thy'
--- a/src/Pure/Proof/proof_syntax.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Proof/proof_syntax.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Proof/proof_syntax.ML
- ID: $Id$
Author: Stefan Berghofer, TU Muenchen
Function for parsing and printing proof terms.
@@ -229,7 +228,7 @@
val prop = Thm.full_prop_of thm;
val prf = Thm.proof_of thm;
val prf' = (case strip_combt (fst (strip_combP prf)) of
- (PThm (_, ((_, prop', _), body)), _) => if prop = prop' then force_proof body else prf
+ (PThm (_, ((_, prop', _), body)), _) => if prop = prop' then join_proof body else prf
| _ => prf)
in if full then Reconstruct.reconstruct_proof thy prop prf' else prf' end;
--- a/src/Pure/Proof/reconstruct.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Proof/reconstruct.ML Wed Jan 28 16:57:12 2009 +0100
@@ -358,7 +358,7 @@
val _ = message ("Reconstructing proof of " ^ a);
val _ = message (Syntax.string_of_term_global thy prop);
val prf' = forall_intr_vfs_prf prop
- (reconstruct_proof thy prop (force_proof body));
+ (reconstruct_proof thy prop (join_proof body));
val (maxidx', prfs', prf) = expand
(maxidx_proof prf' ~1) prfs prf'
in (maxidx' + maxidx + 1, inc (maxidx + 1) prf,
--- a/src/Pure/ProofGeneral/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/ROOT.ML
- ID: $Id$
Author: David Aspinall
Proof General interface for Isabelle, both the traditional Emacs version,
--- a/src/Pure/ProofGeneral/pgip.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip.ML
- ID: $Id$
Author: David Aspinall
Prover-side PGIP abstraction.
--- a/src/Pure/ProofGeneral/pgip_input.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_input.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_input.ML
- ID: $Id$
Author: David Aspinall
PGIP abstraction: input commands.
--- a/src/Pure/ProofGeneral/pgip_isabelle.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_isabelle.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_isabelle.ML
- ID: $Id$
Author: David Aspinall
Prover-side PGIP abstraction: Isabelle configuration and mapping to Isabelle types.
--- a/src/Pure/ProofGeneral/pgip_markup.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_markup.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_markup.ML
- ID: $Id$
Author: David Aspinall
PGIP abstraction: document markup for proof scripts (in progress).
--- a/src/Pure/ProofGeneral/pgip_output.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_output.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_output.ML
- ID: $Id$
Author: David Aspinall
PGIP abstraction: output commands.
--- a/src/Pure/ProofGeneral/pgip_parser.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_parser.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_parser.ML
- ID: $Id$
Author: David Aspinall and Makarius
Parsing theory sources without execution (via keyword classification).
--- a/src/Pure/ProofGeneral/pgip_tests.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_tests.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_tests.ML
- ID: $Id$
Author: David Aspinall
A test suite for the PGIP abstraction code (in progress).
--- a/src/Pure/ProofGeneral/pgip_types.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_types.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgip_types.ML
- ID: $Id$
Author: David Aspinall
PGIP abstraction: types and conversions.
--- a/src/Pure/ProofGeneral/pgml.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/pgml.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/pgml.ML
- ID: $Id$
Author: David Aspinall
PGIP abstraction: PGML
--- a/src/Pure/ProofGeneral/proof_general_keywords.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/proof_general_keywords.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/proof_general_keywords.ML
- ID: $Id$
Author: Makarius
Dummy session with outer syntax keyword initialization.
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/ProofGeneral/proof_general_pgip.ML
- ID: $Id$
Author: David Aspinall and Markus Wenzel
Isabelle configuration for Proof General using PGIP protocol.
@@ -257,7 +256,7 @@
(case Thm.proof_body_of th of
PBody {proof = PThm (_, ((name, _, _), body)), ...} =>
if Thm.has_name_hint th andalso Thm.get_name_hint th = name
- then add_proof_body (Lazy.force body)
+ then add_proof_body (Future.join body)
else I
| body => add_proof_body body);
--- a/src/Pure/Pure.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Pure.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,6 +1,3 @@
-(* Title: Pure/Pure.thy
- ID: $Id$
-*)
section {* Further content for the Pure theory *}
--- a/src/Pure/Syntax/ast.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/ast.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/ast.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Abstract syntax trees, translation rules, matching and normalization of asts.
--- a/src/Pure/Syntax/lexicon.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/lexicon.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/lexicon.ML
- ID: $Id$
Author: Tobias Nipkow and Markus Wenzel, TU Muenchen
Lexer for the inner Isabelle syntax (terms and types).
--- a/src/Pure/Syntax/mixfix.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/mixfix.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/mixfix.ML
- ID: $Id$
Author: Tobias Nipkow and Markus Wenzel, TU Muenchen
Mixfix declarations, infixes, binders.
--- a/src/Pure/Syntax/parser.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/parser.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/parser.ML
- ID: $Id$
Author: Carsten Clasohm, Sonia Mahjoub, and Markus Wenzel, TU Muenchen
General context-free parser for the inner syntax of terms, types, etc.
--- a/src/Pure/Syntax/printer.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/printer.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/printer.ML
- ID: $Id$
Author: Tobias Nipkow and Markus Wenzel, TU Muenchen
Pretty printing of asts, terms, types and print (ast) translation.
--- a/src/Pure/Syntax/simple_syntax.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/simple_syntax.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/simple_syntax.ML
- ID: $Id$
Author: Makarius
Simple syntax for types and terms --- for bootstrapping Pure.
--- a/src/Pure/Syntax/syn_ext.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/syn_ext.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/syn_ext.ML
- ID: $Id$
Author: Markus Wenzel and Carsten Clasohm, TU Muenchen
Syntax extension (internal interface).
--- a/src/Pure/Syntax/type_ext.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Syntax/type_ext.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Syntax/type_ext.ML
- ID: $Id$
Author: Tobias Nipkow and Markus Wenzel, TU Muenchen
Utilities for input and output of types. Also the concrete syntax of
--- a/src/Pure/Thy/html.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/html.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/html.ML
- ID: $Id$
Author: Markus Wenzel and Stefan Berghofer, TU Muenchen
HTML presentation elements.
--- a/src/Pure/Thy/latex.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/latex.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/latex.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
LaTeX presentation elements -- based on outer lexical syntax.
--- a/src/Pure/Thy/present.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/present.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/present.ML
- ID: $Id$
Author: Markus Wenzel and Stefan Berghofer, TU Muenchen
Theory presentation: HTML, graph files, (PDF)LaTeX documents.
--- a/src/Pure/Thy/term_style.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/term_style.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/term_style.ML
- ID: $Id$
Author: Florian Haftmann, TU Muenchen
Styles for terms, to use with the "term_style" and "thm_style"
--- a/src/Pure/Thy/thm_deps.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/thm_deps.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/thm_deps.ML
- ID: $Id$
Author: Stefan Berghofer, TU Muenchen
Visualize dependencies of theorems.
--- a/src/Pure/Thy/thy_header.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/thy_header.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/thy_header.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Theory headers -- independent of outer syntax.
--- a/src/Pure/Thy/thy_output.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Thy/thy_output.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Thy/thy_output.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Theory document output.
--- a/src/Pure/Tools/ROOT.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Tools/ROOT.ML Wed Jan 28 16:57:12 2009 +0100
@@ -9,8 +9,5 @@
(*basic XML support*)
use "xml_syntax.ML";
-(*derived theory and proof elements*)
-use "invoke.ML";
-
(*quickcheck needed here because of pg preferences*)
use "../../Tools/quickcheck.ML"
--- a/src/Pure/Tools/invoke.ML Wed Jan 28 16:29:16 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,127 +0,0 @@
-(* Title: Pure/Tools/invoke.ML
- Author: Makarius
-
-Schematic invocation of locale expression in proof context.
-*)
-
-signature INVOKE =
-sig
- val invoke: string * Attrib.src list -> Old_Locale.expr -> string option list ->
- (Binding.T * string option * mixfix) list -> bool -> Proof.state -> Proof.state
- val invoke_i: string * attribute list -> Old_Locale.expr -> term option list ->
- (Binding.T * typ option * mixfix) list -> bool -> Proof.state -> Proof.state
-end;
-
-structure Invoke: INVOKE =
-struct
-
-
-(* invoke *)
-
-local
-
-fun gen_invoke prep_att prep_expr parse_term add_fixes
- (prfx, raw_atts) raw_expr raw_insts fixes int state =
- let
- val thy = Proof.theory_of state;
- val _ = Proof.assert_forward_or_chain state;
- val chain_facts = if can Proof.assert_chain state then Proof.the_facts state else [];
-
- val more_atts = map (prep_att thy) raw_atts;
- val (elems, _) = prep_expr raw_expr [] (ProofContext.init thy);
-
- val prems = maps Element.prems_of elems;
- val params = maps Element.params_of elems;
- val types = rev (fold Term.add_tfrees prems (fold (Term.add_tfreesT o #2) params []));
-
- val prems' = map Logic.varify prems;
- val params' = map (Logic.varify o Free) params;
- val types' = map (Logic.varifyT o TFree) types;
-
- val state' = state
- |> Proof.enter_forward
- |> Proof.begin_block
- |> Proof.map_context (snd o add_fixes fixes);
- val ctxt' = Proof.context_of state';
-
- val raw_insts' = zip_options params' raw_insts
- handle Library.UnequalLengths => error "Too many instantiations";
-
- fun prep_inst (t, u) =
- TypeInfer.constrain (TypeInfer.paramify_vars (Term.fastype_of t)) (parse_term ctxt' u);
- val insts = map #1 raw_insts' ~~
- Variable.polymorphic ctxt' (Syntax.check_terms ctxt' (map prep_inst raw_insts'));
- val inst_rules =
- replicate (length types') Drule.termI @
- map (fn t =>
- (case AList.lookup (op =) insts t of
- SOME u => Drule.mk_term (Thm.cterm_of thy u)
- | NONE => Drule.termI)) params';
-
- val propp =
- [((Binding.empty, []), map (rpair [] o Logic.mk_term o Logic.mk_type) types'),
- ((Binding.empty, []), map (rpair [] o Logic.mk_term) params'),
- ((Binding.empty, []), map (rpair [] o Element.mark_witness) prems')];
- fun after_qed results =
- Proof.end_block #>
- Proof.map_context (fn ctxt =>
- let
- val ([res_types, res_params, res_prems], ctxt'') =
- fold_burrow (apfst snd oo Variable.import_thms false) results ctxt';
-
- val types'' = map (Logic.dest_type o Thm.term_of o Drule.dest_term) res_types;
- val params'' = map (Thm.term_of o Drule.dest_term) res_params;
- val inst = Element.morph_ctxt (Element.inst_morphism thy
- (Symtab.make (map #1 types ~~ types''), Symtab.make (map #1 params ~~ params'')));
- val elems' = map inst elems;
- val prems'' = map2 Element.make_witness (maps Element.prems_of elems') res_prems;
- val notes =
- maps (Element.facts_of thy) elems'
- |> Element.satisfy_facts prems''
- |> Element.generalize_facts ctxt'' ctxt
- |> Attrib.map_facts (Attrib.attribute_i thy)
- |> map (fn ((a, atts), bs) => ((a, atts @ more_atts), bs));
- in
- ctxt
- |> ProofContext.sticky_prefix prfx
- |> ProofContext.qualified_names
- |> (snd o ProofContext.note_thmss_i "" notes)
- |> ProofContext.restore_naming ctxt
- end) #>
- Proof.put_facts NONE;
- in
- state'
- |> Proof.chain_facts chain_facts
- |> Proof.local_goal (K (K ())) (K I) ProofContext.bind_propp_schematic_i
- "invoke" NONE after_qed propp
- |> Element.refine_witness
- |> Seq.hd
- |> Proof.refine (Method.Basic (K (Method.METHOD (K (HEADGOAL (RANGE (map rtac inst_rules))))),
- Position.none))
- |> Seq.hd
- end;
-
-in
-
-fun invoke x =
- gen_invoke Attrib.attribute Old_Locale.read_expr Syntax.parse_term ProofContext.add_fixes x;
-fun invoke_i x = gen_invoke (K I) Old_Locale.cert_expr (K I) ProofContext.add_fixes_i x;
-
-end;
-
-
-(* concrete syntax *)
-
-local structure P = OuterParse and K = OuterKeyword in
-
-val _ =
- OuterSyntax.command "invoke"
- "schematic invocation of locale expression in proof context"
- (K.tag_proof K.prf_goal)
- (SpecParse.opt_thm_name ":" -- SpecParse.locale_expr -- SpecParse.locale_insts -- P.for_fixes
- >> (fn ((((name, atts), expr), (insts, _)), fixes) =>
- Toplevel.print o Toplevel.proof' (invoke (Binding.base_name name, atts) expr insts fixes)));
-
-end;
-
-end;
--- a/src/Pure/Tools/isabelle_process.scala Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Tools/isabelle_process.scala Wed Jan 28 16:57:12 2009 +0100
@@ -7,7 +7,6 @@
package isabelle
-import java.util.Properties
import java.util.concurrent.LinkedBlockingQueue
import java.io.{BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
InputStream, OutputStream, IOException}
@@ -80,23 +79,27 @@
kind == STATUS
}
- class Result(val kind: Kind.Value, val props: Properties, val result: String) {
+ class Result(val kind: Kind.Value, val props: List[(String, String)], val result: String) {
override def toString = {
val trees = YXML.parse_body_failsafe(result)
val res =
if (kind == Kind.STATUS) trees.map(_.toString).mkString
else trees.flatMap(XML.content(_).mkString).mkString
- if (props == null) kind.toString + " [[" + res + "]]"
- else kind.toString + " " + props.toString + " [[" + res + "]]"
+ if (props.isEmpty)
+ kind.toString + " [[" + res + "]]"
+ else
+ kind.toString + " " +
+ (for ((x, y) <- props) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]"
}
def is_raw = Kind.is_raw(kind)
def is_control = Kind.is_control(kind)
def is_system = Kind.is_system(kind)
}
- def parse_message(kind: Kind.Value, result: String) = {
- XML.Elem(Markup.MESSAGE, List((Markup.CLASS, Kind.markup(kind))),
- YXML.parse_body_failsafe(result))
+ def parse_message(isabelle_system: IsabelleSystem, result: Result) =
+ {
+ XML.Elem(Markup.MESSAGE, (Markup.CLASS, Kind.markup(result.kind)) :: result.props,
+ YXML.parse_body_failsafe(isabelle_system.symbols.decode(result.result)))
}
}
@@ -115,21 +118,26 @@
/* process information */
- private var proc: Process = null
- private var closing = false
- private var pid: String = null
- private var the_session: String = null
- def session() = the_session
+ @volatile private var proc: Process = null
+ @volatile private var closing = false
+ @volatile private var pid: String = null
+ @volatile private var the_session: String = null
+ def session = the_session
/* results */
+ def parse_message(result: Result): XML.Tree =
+ IsabelleProcess.parse_message(isabelle_system, result)
+
private val result_queue = new LinkedBlockingQueue[Result]
- private def put_result(kind: Kind.Value, props: Properties, result: String) {
- if (kind == Kind.INIT && props != null) {
- pid = props.getProperty(Markup.PID)
- the_session = props.getProperty(Markup.SESSION)
+ private def put_result(kind: Kind.Value, props: List[(String, String)], result: String)
+ {
+ if (kind == Kind.INIT) {
+ val map = Map(props: _*)
+ if (map.isDefinedAt(Markup.PID)) pid = map(Markup.PID)
+ if (map.isDefinedAt(Markup.SESSION)) the_session = map(Markup.SESSION)
}
result_queue.put(new Result(kind, props, result))
}
@@ -143,7 +151,7 @@
catch { case _: NullPointerException => null }
if (result != null) {
- results.event(result) // FIXME try/catch (!??)
+ results.event(result)
if (result.kind == Kind.EXIT) finished = true
}
else finished = true
@@ -156,13 +164,13 @@
def interrupt() = synchronized {
if (proc == null) error("Cannot interrupt Isabelle: no process")
- if (pid == null) put_result(Kind.SYSTEM, null, "Cannot interrupt: unknown pid")
+ if (pid == null) put_result(Kind.SYSTEM, Nil, "Cannot interrupt: unknown pid")
else {
try {
if (isabelle_system.execute(true, "kill", "-INT", pid).waitFor == 0)
- put_result(Kind.SIGNAL, null, "INT")
+ put_result(Kind.SIGNAL, Nil, "INT")
else
- put_result(Kind.SYSTEM, null, "Cannot interrupt: kill command failed")
+ put_result(Kind.SYSTEM, Nil, "Cannot interrupt: kill command failed")
}
catch { case e: IOException => error("Cannot interrupt Isabelle: " + e.getMessage) }
}
@@ -173,7 +181,7 @@
else {
try_close()
Thread.sleep(500)
- put_result(Kind.SIGNAL, null, "KILL")
+ put_result(Kind.SIGNAL, Nil, "KILL")
proc.destroy
proc = null
pid = null
@@ -198,7 +206,7 @@
def command(text: String) =
output_sync("Isabelle.command " + IsabelleSyntax.encode_string(text))
- def command(props: Properties, text: String) =
+ def command(props: List[(String, String)], text: String) =
output_sync("Isabelle.command " + IsabelleSyntax.encode_properties(props) + " " +
IsabelleSyntax.encode_string(text))
@@ -233,17 +241,17 @@
finished = true
}
else {
- put_result(Kind.STDIN, null, s)
+ put_result(Kind.STDIN, Nil, s)
writer.write(s)
writer.flush
}
//}}}
}
catch {
- case e: IOException => put_result(Kind.SYSTEM, null, "Stdin thread: " + e.getMessage)
+ case e: IOException => put_result(Kind.SYSTEM, Nil, "Stdin thread: " + e.getMessage)
}
}
- put_result(Kind.SYSTEM, null, "Stdin thread terminated")
+ put_result(Kind.SYSTEM, Nil, "Stdin thread terminated")
}
}
@@ -267,7 +275,7 @@
else done = true
}
if (result.length > 0) {
- put_result(Kind.STDOUT, null, result.toString)
+ put_result(Kind.STDOUT, Nil, result.toString)
result.length = 0
}
else {
@@ -278,10 +286,10 @@
//}}}
}
catch {
- case e: IOException => put_result(Kind.SYSTEM, null, "Stdout thread: " + e.getMessage)
+ case e: IOException => put_result(Kind.SYSTEM, Nil, "Stdout thread: " + e.getMessage)
}
}
- put_result(Kind.SYSTEM, null, "Stdout thread terminated")
+ put_result(Kind.SYSTEM, Nil, "Stdout thread terminated")
}
}
@@ -292,7 +300,7 @@
override def run() = {
val reader = isabelle_system.fifo_reader(fifo)
var kind: Kind.Value = null
- var props: Properties = null
+ var props: List[(String, String)] = Nil
var result = new StringBuilder
var finished = false
@@ -307,7 +315,7 @@
} while (c >= 0 && c != 2)
if (result.length > 0) {
- put_result(Kind.SYSTEM, null, "Malformed message:\n" + result.toString)
+ put_result(Kind.SYSTEM, Nil, "Malformed message:\n" + result.toString)
result.length = 0
}
if (c < 0) {
@@ -319,7 +327,6 @@
c = reader.read
if (Kind.code.isDefinedAt(c)) kind = Kind.code(c)
else kind = null
- props = null
}
//}}}
}
@@ -339,16 +346,16 @@
if (i > 0) {
val name = line.substring(0, i)
val value = line.substring(i + 1, len - 2)
- if (props == null) props = new Properties
- if (!props.containsKey(name)) props.setProperty(name, value)
+ props = (name, value) :: props
}
}
// last text line
else if (line.endsWith("\u0002.")) {
result.append(line.substring(0, len - 2))
- put_result(kind, props, result.toString)
+ put_result(kind, props.reverse, result.toString)
+ kind = null
+ props = Nil
result.length = 0
- kind = null
}
// text line
else {
@@ -360,10 +367,10 @@
}
}
catch {
- case e: IOException => put_result(Kind.SYSTEM, null, "Message thread: " + e.getMessage)
+ case e: IOException => put_result(Kind.SYSTEM, Nil, "Message thread: " + e.getMessage)
}
}
- put_result(Kind.SYSTEM, null, "Message thread terminated")
+ put_result(Kind.SYSTEM, Nil, "Message thread terminated")
}
}
@@ -377,7 +384,7 @@
{
val (msg, rc) = isabelle_system.isabelle_tool("version")
if (rc != 0) error("Version check failed -- bad Isabelle installation:\n" + msg)
- put_result(Kind.SYSTEM, null, msg)
+ put_result(Kind.SYSTEM, Nil, msg)
}
@@ -418,8 +425,8 @@
override def run() = {
val rc = proc.waitFor()
Thread.sleep(300)
- put_result(Kind.SYSTEM, null, "Exit thread terminated")
- put_result(Kind.EXIT, null, Integer.toString(rc))
+ put_result(Kind.SYSTEM, Nil, "Exit thread terminated")
+ put_result(Kind.EXIT, Nil, Integer.toString(rc))
rm_fifo()
}
}.start
--- a/src/Pure/Tools/isabelle_syntax.scala Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Tools/isabelle_syntax.scala Wed Jan 28 16:57:12 2009 +0100
@@ -6,14 +6,13 @@
package isabelle
-import java.util.{Properties, Enumeration}
-
object IsabelleSyntax {
/* string token */
- def append_string(str: String, result: StringBuilder) = {
+ def append_string(str: String, result: StringBuilder)
+ {
result.append("\"")
for (c <- str) {
if (c < 32 || c == '\\' || c == '\"') {
@@ -27,30 +26,49 @@
result.append("\"")
}
- def encode_string(str: String) = {
- val result = new StringBuilder(str.length + 20)
+ def encode_string(str: String) =
+ {
+ val result = new StringBuilder(str.length + 10)
append_string(str, result)
result.toString
}
+ /* list */
+
+ def append_list[A](append_elem: (A, StringBuilder) => Unit, body: Iterable[A],
+ result: StringBuilder)
+ {
+ result.append("(")
+ val elems = body.elements
+ if (elems.hasNext) append_elem(elems.next, result)
+ while (elems.hasNext) {
+ result.append(", ")
+ append_elem(elems.next, result)
+ }
+ result.append(")")
+ }
+
+ def encode_list[A](append_elem: (A, StringBuilder) => Unit, elems: Iterable[A]) =
+ {
+ val result = new StringBuilder
+ append_list(append_elem, elems, result)
+ result.toString
+ }
+
+
/* properties */
- def append_properties(props: Properties, result: StringBuilder) = {
- result.append("(")
- val names = props.propertyNames.asInstanceOf[Enumeration[String]]
- while (names.hasMoreElements) {
- val name = names.nextElement; val value = props.getProperty(name)
- append_string(name, result); result.append(" = "); append_string(value, result)
- if (names.hasMoreElements) result.append(", ")
- }
- result.append(")")
+ def append_properties(props: List[(String, String)], result: StringBuilder)
+ {
+ append_list((p: (String, String), res) =>
+ { append_string(p._1, res); res.append(" = "); append_string(p._2, res) }, props, result)
}
- def encode_properties(props: Properties) = {
- val result = new StringBuilder(40)
+ def encode_properties(props: List[(String, String)]) =
+ {
+ val result = new StringBuilder
append_properties(props, result)
result.toString
}
-
}
--- a/src/Pure/Tools/isabelle_system.scala Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Tools/isabelle_system.scala Wed Jan 28 16:57:12 2009 +0100
@@ -143,4 +143,16 @@
}
logics.toList.sort(_ < _)
}
+
+
+ /* symbols */
+
+ private def read_symbols(path: String) = {
+ val file = new File(platform_path(path))
+ if (file.canRead) Source.fromFile(file).getLines
+ else Iterator.empty
+ }
+ val symbols = new Symbol.Interpretation(
+ read_symbols("$ISABELLE_HOME/etc/symbols") ++
+ read_symbols("$ISABELLE_HOME_USER/etc/symbols"))
}
--- a/src/Pure/Tools/named_thms.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Tools/named_thms.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Tools/named_thms.ML
- ID: $Id$
Author: Makarius
Named collections of theorems in canonical order.
@@ -36,6 +35,6 @@
val setup =
Attrib.add_attributes [(name, Attrib.add_del_args add del, "declaration of " ^ description)] #>
- PureThy.add_thms_dynamic (name, Data.get);
+ PureThy.add_thms_dynamic (Binding.name name, Data.get);
end;
--- a/src/Pure/Tools/xml_syntax.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/Tools/xml_syntax.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/Tools/xml_syntax.ML
- ID: $Id$
Author: Stefan Berghofer, TU Muenchen
Input and output of types, terms, and proofs in XML format.
@@ -159,7 +158,7 @@
| proof_of_xml (XML.Elem ("PThm", [("name", s)], term :: opttypes)) =
(* FIXME? *)
PThm (serial (), ((s, term_of_xml term, opttypes_of_xml opttypes),
- Lazy.value (Proofterm.make_proof_body MinProof)))
+ Future.value (Proofterm.make_proof_body MinProof)))
| proof_of_xml (XML.Elem ("PAxm", [("name", s)], term :: opttypes)) =
PAxm (s, term_of_xml term, opttypes_of_xml opttypes)
| proof_of_xml (XML.Elem ("Oracle", [("name", s)], term :: opttypes)) =
--- a/src/Pure/assumption.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/assumption.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/assumption.ML
- ID: $Id$
Author: Makarius
Local assumptions, parameterized by export rules.
@@ -79,7 +78,7 @@
(* named prems -- legacy feature *)
val _ = Context.>>
- (Context.map_theory (PureThy.add_thms_dynamic ("prems",
+ (Context.map_theory (PureThy.add_thms_dynamic (Binding.name "prems",
fn Context.Theory _ => [] | Context.Proof ctxt => prems_of ctxt)));
@@ -120,6 +119,6 @@
val thm = export false inner outer;
val term = export_term inner outer;
val typ = Logic.type_map term;
- in Morphism.morphism {binding = I, var = I, typ = typ, term = term, fact = map thm} end;
+ in Morphism.morphism {binding = I, typ = typ, term = term, fact = map thm} end;
end;
--- a/src/Pure/axclass.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/axclass.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/axclass.ML
- ID: $Id$
Author: Markus Wenzel, TU Muenchen
Type classes defined as predicates, associated with a record of
@@ -9,7 +8,7 @@
signature AX_CLASS =
sig
val define_class: bstring * class list -> string list ->
- ((Binding.T * attribute list) * term list) list -> theory -> class * theory
+ ((binding * attribute list) * term list) list -> theory -> class * theory
val add_classrel: thm -> theory -> theory
val add_arity: thm -> theory -> theory
val prove_classrel: class * class -> tactic -> theory -> theory
@@ -329,7 +328,8 @@
quote (Syntax.string_of_classrel ctxt [c1, c2]));
in
thy
- |> PureThy.add_thms [((prefix classrel_prefix (Logic.name_classrel (c1, c2)), th), [])]
+ |> PureThy.add_thms [((Binding.name
+ (prefix classrel_prefix (Logic.name_classrel (c1, c2))), th), [])]
|-> (fn [th'] => add_classrel th')
end;
@@ -345,7 +345,7 @@
quote (Syntax.string_of_arity ctxt arity));
in
thy
- |> PureThy.add_thms (map (rpair []) (names ~~ ths))
+ |> PureThy.add_thms (map (rpair []) (map Binding.name names ~~ ths))
|-> fold add_arity
end;
@@ -372,10 +372,10 @@
|> Sign.no_base_names
|> Sign.declare_const [] ((Binding.name c', T'), NoSyn)
|-> (fn const' as Const (c'', _) => Thm.add_def false true
- (Thm.def_name c', Logic.mk_equals (Const (c, T'), const'))
+ (Binding.name (Thm.def_name c'), Logic.mk_equals (Const (c, T'), const'))
#>> Thm.varifyT
#-> (fn thm => add_inst_param (c, tyco) (c'', thm)
- #> PureThy.add_thm ((c', thm), [Thm.kind_internal])
+ #> PureThy.add_thm ((Binding.name c', thm), [Thm.kind_internal])
#> snd
#> Sign.restore_naming thy
#> pair (Const (c, T))))
@@ -391,7 +391,7 @@
(NameSpace.base c ^ "_" ^ NameSpace.base tyco) name;
in
thy
- |> Thm.add_def false false (name', prop)
+ |> Thm.add_def false false (Binding.name name', prop)
|>> (fn thm => Drule.transitive_thm OF [eq, thm])
end;
@@ -469,7 +469,7 @@
val ([def], def_thy) =
thy
|> Sign.primitive_class (bclass, super)
- |> PureThy.add_defs false [((Thm.def_name bconst, class_eq), [])];
+ |> PureThy.add_defs false [((Binding.name (Thm.def_name bconst), class_eq), [])];
val (raw_intro, (raw_classrel, raw_axioms)) =
split_defined (length conjs) def ||> chop (length super);
@@ -515,7 +515,11 @@
val args = prep thy raw_args;
val specs = mk args;
val names = name args;
- in thy |> PureThy.add_axioms (map (rpair []) (names ~~ specs)) |-> fold add end;
+ in
+ thy
+ |> PureThy.add_axioms (map (rpair []) (map Binding.name names ~~ specs))
+ |-> fold add
+ end;
fun ax_classrel prep =
axiomatize (map o prep) (map Logic.mk_classrel)
--- a/src/Pure/config.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/config.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/config.ML
- ID: $Id$
Author: Makarius
Configuration options as values within the local context.
--- a/src/Pure/conjunction.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/conjunction.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/conjunction.ML
- ID: $Id$
Author: Makarius
Meta-level conjunction.
--- a/src/Pure/consts.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/consts.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/consts.ML
- ID: $Id$
Author: Makarius
Polymorphic constants: declarations, abbreviations, additional type
@@ -30,10 +29,10 @@
val certify: Pretty.pp -> Type.tsig -> bool -> T -> term -> term (*exception TYPE*)
val typargs: T -> string * typ -> typ list
val instance: T -> string * typ list -> typ
- val declare: bool -> NameSpace.naming -> Properties.T -> (Binding.T * typ) -> T -> T
+ val declare: bool -> NameSpace.naming -> Properties.T -> (binding * typ) -> T -> T
val constrain: string * typ option -> T -> T
val abbreviate: Pretty.pp -> Type.tsig -> NameSpace.naming -> string -> Properties.T ->
- Binding.T * term -> T -> (term * term) * T
+ binding * term -> T -> (term * term) * T
val revert_abbrev: string -> string -> T -> T
val hide: bool -> string -> T -> T
val empty: T
--- a/src/Pure/context_position.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/context_position.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/context_position.ML
- ID: $Id$
Author: Makarius
Context position visibility flag.
--- a/src/Pure/conv.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/conv.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/conv.ML
- ID: $Id$
Author: Amine Chaieb and Makarius
Conversions: primitive equality reasoning.
--- a/src/Pure/defs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/defs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/defs.ML
- ID: $Id$
Author: Makarius
Global well-formedness checks for constant definitions. Covers plain
--- a/src/Pure/display.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/display.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/display.ML
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1993 University of Cambridge
--- a/src/Pure/drule.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/drule.ML Wed Jan 28 16:57:12 2009 +0100
@@ -460,10 +460,10 @@
val read_prop = certify o SimpleSyntax.read_prop;
fun store_thm name th =
- Context.>>> (Context.map_theory_result (PureThy.store_thm (name, th)));
+ Context.>>> (Context.map_theory_result (PureThy.store_thm (Binding.name name, th)));
fun store_thm_open name th =
- Context.>>> (Context.map_theory_result (PureThy.store_thm_open (name, th)));
+ Context.>>> (Context.map_theory_result (PureThy.store_thm_open (Binding.name name, th)));
fun store_standard_thm name th = store_thm name (standard th);
fun store_standard_thm_open name thm = store_thm_open name (standard' thm);
--- a/src/Pure/facts.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/facts.ML Wed Jan 28 16:57:12 2009 +0100
@@ -30,9 +30,9 @@
val props: T -> thm list
val could_unify: T -> term -> thm list
val merge: T * T -> T
- val add_global: NameSpace.naming -> Binding.T * thm list -> T -> string * T
- val add_local: bool -> NameSpace.naming -> Binding.T * thm list -> T -> string * T
- val add_dynamic: NameSpace.naming -> Binding.T * (Context.generic -> thm list) -> T -> string * T
+ val add_global: NameSpace.naming -> binding * thm list -> T -> string * T
+ val add_local: bool -> NameSpace.naming -> binding * thm list -> T -> string * T
+ val add_dynamic: NameSpace.naming -> binding * (Context.generic -> thm list) -> T -> string * T
val del: string -> T -> T
val hide: bool -> string -> T -> T
end;
--- a/src/Pure/interpretation.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/interpretation.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/interpretation.ML
- ID: $Id$
Author: Florian Haftmann and Makarius
Generic interpretation of theory data.
--- a/src/Pure/more_thm.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/more_thm.ML Wed Jan 28 16:57:12 2009 +0100
@@ -38,8 +38,8 @@
val forall_elim_vars: int -> thm -> thm
val unvarify: thm -> thm
val close_derivation: thm -> thm
- val add_axiom: bstring * term -> theory -> thm * theory
- val add_def: bool -> bool -> bstring * term -> theory -> thm * theory
+ val add_axiom: binding * term -> theory -> thm * theory
+ val add_def: bool -> bool -> binding * term -> theory -> thm * theory
val rule_attribute: (Context.generic -> thm -> thm) -> attribute
val declaration_attribute: (thm -> Context.generic -> Context.generic) -> attribute
val theory_attributes: attribute list -> theory * thm -> theory * thm
@@ -276,14 +276,15 @@
(** specification primitives **)
-fun add_axiom (name, prop) thy =
+fun add_axiom (b, prop) thy =
let
- val name' = if name = "" then "axiom_" ^ serial_string () else name;
- val thy' = thy |> Theory.add_axioms_i [(name', prop)];
- val axm = unvarify (Thm.axiom thy' (Sign.full_bname thy' name'));
+ val b' = if Binding.is_empty b
+ then Binding.name ("axiom_" ^ serial_string ()) else b;
+ val thy' = thy |> Theory.add_axioms_i [(b', prop)];
+ val axm = unvarify (Thm.axiom thy' (Sign.full_name thy' b'));
in (axm, thy') end;
-fun add_def unchecked overloaded (name, prop) thy =
+fun add_def unchecked overloaded (b, prop) thy =
let
val tfrees = rev (map TFree (Term.add_tfrees prop []));
val tfrees' = map (fn a => TFree (a, [])) (Name.invents Name.context Name.aT (length tfrees));
@@ -291,8 +292,8 @@
val recover_sorts = map (pairself (Thm.ctyp_of thy o Logic.varifyT)) (tfrees' ~~ tfrees);
val prop' = Term.map_types (Term.map_atyps (perhaps (AList.lookup (op =) strip_sorts))) prop;
- val thy' = Theory.add_defs_i unchecked overloaded [(name, prop')] thy;
- val axm' = Thm.axiom thy' (Sign.full_bname thy' name);
+ val thy' = Theory.add_defs_i unchecked overloaded [(b, prop')] thy;
+ val axm' = Thm.axiom thy' (Sign.full_name thy' b);
val thm = unvarify (Thm.instantiate (recover_sorts, []) axm');
in (thm, thy') end;
--- a/src/Pure/morphism.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/morphism.ML Wed Jan 28 16:57:12 2009 +0100
@@ -16,21 +16,18 @@
signature MORPHISM =
sig
include BASIC_MORPHISM
- val var: morphism -> Binding.T * mixfix -> Binding.T * mixfix
- val binding: morphism -> Binding.T -> Binding.T
+ val binding: morphism -> binding -> binding
val typ: morphism -> typ -> typ
val term: morphism -> term -> term
val fact: morphism -> thm list -> thm list
val thm: morphism -> thm -> thm
val cterm: morphism -> cterm -> cterm
val morphism:
- {binding: Binding.T -> Binding.T,
- var: Binding.T * mixfix -> Binding.T * mixfix,
+ {binding: binding -> binding,
typ: typ -> typ,
term: term -> term,
fact: thm list -> thm list} -> morphism
- val binding_morphism: (Binding.T -> Binding.T) -> morphism
- val var_morphism: (Binding.T * mixfix -> Binding.T * mixfix) -> morphism
+ val binding_morphism: (binding -> binding) -> morphism
val typ_morphism: (typ -> typ) -> morphism
val term_morphism: (term -> term) -> morphism
val fact_morphism: (thm list -> thm list) -> morphism
@@ -45,8 +42,7 @@
struct
datatype morphism = Morphism of
- {binding: Binding.T -> Binding.T,
- var: Binding.T * mixfix -> Binding.T * mixfix,
+ {binding: binding -> binding,
typ: typ -> typ,
term: term -> term,
fact: thm list -> thm list};
@@ -54,7 +50,6 @@
type declaration = morphism -> Context.generic -> Context.generic;
fun binding (Morphism {binding, ...}) = binding;
-fun var (Morphism {var, ...}) = var;
fun typ (Morphism {typ, ...}) = typ;
fun term (Morphism {term, ...}) = term;
fun fact (Morphism {fact, ...}) = fact;
@@ -63,20 +58,19 @@
val morphism = Morphism;
-fun binding_morphism binding = morphism {binding = binding, var = I, typ = I, term = I, fact = I};
-fun var_morphism var = morphism {binding = I, var = var, typ = I, term = I, fact = I};
-fun typ_morphism typ = morphism {binding = I, var = I, typ = typ, term = I, fact = I};
-fun term_morphism term = morphism {binding = I, var = I, typ = I, term = term, fact = I};
-fun fact_morphism fact = morphism {binding = I, var = I, typ = I, term = I, fact = fact};
-fun thm_morphism thm = morphism {binding = I, var = I, typ = I, term = I, fact = map thm};
+fun binding_morphism binding = morphism {binding = binding, typ = I, term = I, fact = I};
+fun typ_morphism typ = morphism {binding = I, typ = typ, term = I, fact = I};
+fun term_morphism term = morphism {binding = I, typ = I, term = term, fact = I};
+fun fact_morphism fact = morphism {binding = I, typ = I, term = I, fact = fact};
+fun thm_morphism thm = morphism {binding = I, typ = I, term = I, fact = map thm};
-val identity = morphism {binding = I, var = I, typ = I, term = I, fact = I};
+val identity = morphism {binding = I, typ = I, term = I, fact = I};
fun compose
- (Morphism {binding = binding1, var = var1, typ = typ1, term = term1, fact = fact1})
- (Morphism {binding = binding2, var = var2, typ = typ2, term = term2, fact = fact2}) =
- morphism {binding = binding1 o binding2, var = var1 o var2,
- typ = typ1 o typ2, term = term1 o term2, fact = fact1 o fact2};
+ (Morphism {binding = binding1, typ = typ1, term = term1, fact = fact1})
+ (Morphism {binding = binding2, typ = typ2, term = term2, fact = fact2}) =
+ morphism {binding = binding1 o binding2, typ = typ1 o typ2,
+ term = term1 o term2, fact = fact1 o fact2};
fun phi1 $> phi2 = compose phi2 phi1;
--- a/src/Pure/net.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/net.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/net.ML
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1993 University of Cambridge
--- a/src/Pure/old_goals.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/old_goals.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/old_goals.ML
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1993 University of Cambridge
--- a/src/Pure/primitive_defs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/primitive_defs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/primitive_defs.ML
- ID: $Id$
Author: Makarius
Primitive definition forms.
--- a/src/Pure/proofterm.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/proofterm.ML Wed Jan 28 16:57:12 2009 +0100
@@ -21,10 +21,10 @@
| PAxm of string * term * typ list option
| Oracle of string * term * typ list option
| Promise of serial * term * typ list
- | PThm of serial * ((string * term * typ list option) * proof_body lazy)
+ | PThm of serial * ((string * term * typ list option) * proof_body future)
and proof_body = PBody of
{oracles: (string * term) OrdList.T,
- thms: (serial * (string * term * proof_body lazy)) OrdList.T,
+ thms: (serial * (string * term * proof_body future)) OrdList.T,
proof: proof}
val %> : proof * term -> proof
@@ -35,10 +35,10 @@
include BASIC_PROOFTERM
type oracle = string * term
- type pthm = serial * (string * term * proof_body lazy)
- val force_body: proof_body lazy ->
+ type pthm = serial * (string * term * proof_body future)
+ val join_body: proof_body future ->
{oracles: oracle OrdList.T, thms: pthm OrdList.T, proof: proof}
- val force_proof: proof_body lazy -> proof
+ val join_proof: proof_body future -> proof
val proof_of: proof_body -> proof
val fold_body_thms: (string * term * proof_body -> 'a -> 'a) -> proof_body list -> 'a -> 'a
val fold_proof_atoms: bool -> (proof -> 'a -> 'a) -> proof list -> 'a -> 'a
@@ -110,7 +110,7 @@
val promise_proof: theory -> serial -> term -> proof
val fulfill_proof: theory -> (serial * proof) list -> proof_body -> proof_body
val thm_proof: theory -> string -> term list -> term ->
- (serial * proof) list lazy -> proof_body -> pthm * proof
+ (serial * proof future) list -> proof_body -> pthm * proof
val get_name: term list -> term -> proof -> string
(** rewriting on proof terms **)
@@ -142,17 +142,17 @@
| PAxm of string * term * typ list option
| Oracle of string * term * typ list option
| Promise of serial * term * typ list
- | PThm of serial * ((string * term * typ list option) * proof_body lazy)
+ | PThm of serial * ((string * term * typ list option) * proof_body future)
and proof_body = PBody of
{oracles: (string * term) OrdList.T,
- thms: (serial * (string * term * proof_body lazy)) OrdList.T,
+ thms: (serial * (string * term * proof_body future)) OrdList.T,
proof: proof};
type oracle = string * term;
-type pthm = serial * (string * term * proof_body lazy);
+type pthm = serial * (string * term * proof_body future);
-val force_body = Lazy.force #> (fn PBody args => args);
-val force_proof = #proof o force_body;
+val join_body = Future.join #> (fn PBody args => args);
+val join_proof = #proof o join_body;
fun proof_of (PBody {proof, ...}) = proof;
@@ -165,7 +165,7 @@
if Inttab.defined seen i then (x, seen)
else
let
- val body' = Lazy.force body;
+ val body' = Future.join body;
val (x', seen') = app body' (x, Inttab.update (i, ()) seen);
in (f (name, prop, body') x', seen') end);
in fn bodies => fn x => #1 (fold app bodies (x, Inttab.empty)) end;
@@ -180,7 +180,7 @@
if Inttab.defined seen i then (x, seen)
else
let val (x', seen') =
- (if all then app (force_proof body) else I) (x, Inttab.update (i, ()) seen)
+ (if all then app (join_proof body) else I) (x, Inttab.update (i, ()) seen)
in (f prf x', seen') end)
| app prf = (fn (x, seen) => (f prf x, seen));
in fn prfs => fn x => #1 (fold app prfs (x, Inttab.empty)) end;
@@ -233,7 +233,7 @@
fun strip_thm (body as PBody {proof, ...}) =
(case strip_combt (fst (strip_combP proof)) of
- (PThm (_, (_, body')), _) => Lazy.force body'
+ (PThm (_, (_, body')), _) => Future.join body'
| _ => body);
val mk_Abst = fold_rev (fn (s, T:typ) => fn prf => Abst (s, NONE, prf));
@@ -1227,6 +1227,11 @@
val proof = rewrite_prf fst (rules, K fill :: procs) proof0;
in PBody {oracles = oracles, thms = thms, proof = proof} end;
+fun fulfill_proof_future _ [] body = Future.value body
+ | fulfill_proof_future thy promises body =
+ Future.fork_deps (map snd promises) (fn () =>
+ fulfill_proof thy (map (apsnd Future.join) promises) body);
+
(***** theorems *****)
@@ -1243,11 +1248,9 @@
if ! proofs = 2 then
#4 (shrink_proof thy [] 0 (rew_proof thy (fold_rev implies_intr_proof hyps prf)))
else MinProof;
+ val body0 = PBody {oracles = oracles0, thms = thms0, proof = proof0};
- fun new_prf () = (serial (), name, prop,
- Lazy.lazy (fn () => fulfill_proof thy (Lazy.force promises)
- (PBody {oracles = oracles0, thms = thms0, proof = proof0})));
-
+ fun new_prf () = (serial (), name, prop, fulfill_proof_future thy promises body0);
val (i, name, prop, body') =
(case strip_combt (fst (strip_combP prf)) of
(PThm (i, ((old_name, prop', NONE), body')), args') =>
--- a/src/Pure/pure_setup.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/pure_setup.ML Wed Jan 28 16:57:12 2009 +0100
@@ -33,7 +33,7 @@
map (fn (x, y) => Pretty.str (x ^ "=" ^ y)) o Position.properties_of));
install_pp (make_pp ["Thm", "thm"] ProofDisplay.pprint_thm);
install_pp (make_pp ["Thm", "cterm"] ProofDisplay.pprint_cterm);
-install_pp (make_pp ["Binding", "T"] (Pretty.pprint o Pretty.str o Binding.display));
+install_pp (make_pp ["Binding", "binding"] (Pretty.pprint o Pretty.str o Binding.display));
install_pp (make_pp ["Thm", "ctyp"] ProofDisplay.pprint_ctyp);
install_pp (make_pp ["Context", "theory"] Context.pprint_thy);
install_pp (make_pp ["Context", "theory_ref"] Context.pprint_thy_ref);
--- a/src/Pure/pure_thy.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/pure_thy.ML Wed Jan 28 16:57:12 2009 +0100
@@ -24,27 +24,27 @@
val name_thm: bool -> bool -> Position.T -> string -> thm -> thm
val name_thms: bool -> bool -> Position.T -> string -> thm list -> thm list
val name_thmss: bool -> Position.T -> string -> (thm list * 'a) list -> (thm list * 'a) list
- val store_thms: bstring * thm list -> theory -> thm list * theory
- val store_thm: bstring * thm -> theory -> thm * theory
- val store_thm_open: bstring * thm -> theory -> thm * theory
- val add_thms: ((bstring * thm) * attribute list) list -> theory -> thm list * theory
- val add_thm: (bstring * thm) * attribute list -> theory -> thm * theory
- val add_thmss: ((bstring * thm list) * attribute list) list -> theory -> thm list list * theory
- val add_thms_dynamic: bstring * (Context.generic -> thm list) -> theory -> theory
- val note_thmss: string -> ((Binding.T * attribute list) *
+ val store_thms: binding * thm list -> theory -> thm list * theory
+ val store_thm: binding * thm -> theory -> thm * theory
+ val store_thm_open: binding * thm -> theory -> thm * theory
+ val add_thms: ((binding * thm) * attribute list) list -> theory -> thm list * theory
+ val add_thm: (binding * thm) * attribute list -> theory -> thm * theory
+ val add_thmss: ((binding * thm list) * attribute list) list -> theory -> thm list list * theory
+ val add_thms_dynamic: binding * (Context.generic -> thm list) -> theory -> theory
+ val note_thmss: string -> ((binding * attribute list) *
(thm list * attribute list) list) list -> theory -> (string * thm list) list * theory
- val note_thmss_grouped: string -> string -> ((Binding.T * attribute list) *
+ val note_thmss_grouped: string -> string -> ((binding * attribute list) *
(thm list * attribute list) list) list -> theory -> (string * thm list) list * theory
- val add_axioms: ((bstring * term) * attribute list) list -> theory -> thm list * theory
+ val add_axioms: ((binding * term) * attribute list) list -> theory -> thm list * theory
val add_axioms_cmd: ((bstring * string) * attribute list) list -> theory -> thm list * theory
- val add_defs: bool -> ((bstring * term) * attribute list) list ->
+ val add_defs: bool -> ((binding * term) * attribute list) list ->
theory -> thm list * theory
- val add_defs_unchecked: bool -> ((bstring * term) * attribute list) list ->
+ val add_defs_unchecked: bool -> ((binding * term) * attribute list) list ->
+ theory -> thm list * theory
+ val add_defs_cmd: bool -> ((bstring * string) * attribute list) list ->
theory -> thm list * theory
val add_defs_unchecked_cmd: bool -> ((bstring * string) * attribute list) list ->
theory -> thm list * theory
- val add_defs_cmd: bool -> ((bstring * string) * attribute list) list ->
- theory -> thm list * theory
val old_appl_syntax: theory -> bool
val old_appl_syntax_setup: theory -> theory
end;
@@ -163,21 +163,21 @@
(* store_thm(s) *)
-fun store_thms (bname, thms) = enter_thms (name_thms true true Position.none)
- (name_thms false true Position.none) I (Binding.name bname, thms);
+fun store_thms (b, thms) = enter_thms (name_thms true true Position.none)
+ (name_thms false true Position.none) I (b, thms);
-fun store_thm (bname, th) = store_thms (bname, [th]) #>> the_single;
+fun store_thm (b, th) = store_thms (b, [th]) #>> the_single;
-fun store_thm_open (bname, th) =
+fun store_thm_open (b, th) =
enter_thms (name_thms true false Position.none) (name_thms false false Position.none) I
- (Binding.name bname, [th]) #>> the_single;
+ (b, [th]) #>> the_single;
(* add_thms(s) *)
-fun add_thms_atts pre_name ((bname, thms), atts) =
+fun add_thms_atts pre_name ((b, thms), atts) =
enter_thms pre_name (name_thms false true Position.none)
- (foldl_map (Thm.theory_attributes atts)) (Binding.name bname, thms);
+ (foldl_map (Thm.theory_attributes atts)) (b, thms);
fun gen_add_thmss pre_name =
fold_map (add_thms_atts pre_name);
@@ -192,9 +192,9 @@
(* add_thms_dynamic *)
-fun add_thms_dynamic (bname, f) thy = thy
+fun add_thms_dynamic (b, f) thy = thy
|> (FactsData.map o apfst)
- (Facts.add_dynamic (Sign.naming_of thy) (Binding.name bname, f) #> snd);
+ (Facts.add_dynamic (Sign.naming_of thy) (b, f) #> snd);
(* note_thmss *)
@@ -224,21 +224,21 @@
(* store axioms as theorems *)
local
- fun get_ax thy (name, _) = Thm.axiom thy (Sign.full_bname thy name);
+ fun get_ax thy (b, _) = Thm.axiom thy (Sign.full_name thy b);
fun get_axs thy named_axs = map (Thm.forall_elim_vars 0 o get_ax thy) named_axs;
- fun add_axm add = fold_map (fn ((name, ax), atts) => fn thy =>
+ fun add_axm prep_b add = fold_map (fn ((b, ax), atts) => fn thy =>
let
- val named_ax = [(name, ax)];
+ val named_ax = [(b, ax)];
val thy' = add named_ax thy;
- val thm = hd (get_axs thy' named_ax);
- in apfst hd (gen_add_thms (K I) [((name, thm), atts)] thy') end);
+ val thm = hd (get_axs thy' ((map o apfst) prep_b named_ax));
+ in apfst hd (gen_add_thms (K I) [((prep_b b, thm), atts)] thy') end);
in
- val add_defs = add_axm o Theory.add_defs_i false;
- val add_defs_unchecked = add_axm o Theory.add_defs_i true;
- val add_axioms = add_axm Theory.add_axioms_i;
- val add_defs_cmd = add_axm o Theory.add_defs false;
- val add_defs_unchecked_cmd = add_axm o Theory.add_defs true;
- val add_axioms_cmd = add_axm Theory.add_axioms;
+ val add_defs = add_axm I o Theory.add_defs_i false;
+ val add_defs_unchecked = add_axm I o Theory.add_defs_i true;
+ val add_axioms = add_axm I Theory.add_axioms_i;
+ val add_defs_cmd = add_axm Binding.name o Theory.add_defs false;
+ val add_defs_unchecked_cmd = add_axm Binding.name o Theory.add_defs true;
+ val add_axioms_cmd = add_axm Binding.name Theory.add_axioms;
end;
@@ -378,16 +378,16 @@
("sort_constraint", typ "'a itself => prop", NoSyn),
("conjunction", typ "prop => prop => prop", NoSyn)]
#> (add_defs false o map Thm.no_attributes)
- [("prop_def", prop "(CONST prop :: prop => prop) (A::prop) == A::prop"),
- ("term_def", prop "(CONST Pure.term :: 'a => prop) (x::'a) == (!!A::prop. A ==> A)"),
- ("sort_constraint_def",
+ [(Binding.name "prop_def", prop "(CONST prop :: prop => prop) (A::prop) == A::prop"),
+ (Binding.name "term_def", prop "(CONST Pure.term :: 'a => prop) (x::'a) == (!!A::prop. A ==> A)"),
+ (Binding.name "sort_constraint_def",
prop "(CONST Pure.sort_constraint :: 'a itself => prop) (CONST TYPE :: 'a itself) ==\
\ (CONST Pure.term :: 'a itself => prop) (CONST TYPE :: 'a itself)"),
- ("conjunction_def", prop "(A &&& B) == (!!C::prop. (A ==> B ==> C) ==> C)")] #> snd
+ (Binding.name "conjunction_def", prop "(A &&& B) == (!!C::prop. (A ==> B ==> C) ==> C)")] #> snd
#> Sign.hide_const false "Pure.term"
#> Sign.hide_const false "Pure.sort_constraint"
#> Sign.hide_const false "Pure.conjunction"
- #> add_thmss [(("nothing", []), [])] #> snd
- #> Theory.add_axioms_i Proofterm.equality_axms));
+ #> add_thmss [((Binding.name "nothing", []), [])] #> snd
+ #> Theory.add_axioms_i ((map o apfst) Binding.name Proofterm.equality_axms)));
end;
--- a/src/Pure/sign.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/sign.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/sign.ML
- ID: $Id$
Author: Lawrence C Paulson and Markus Wenzel
Logical signature content: naming conventions, concrete syntax, type
@@ -14,7 +13,7 @@
tsig: Type.tsig,
consts: Consts.T}
val naming_of: theory -> NameSpace.naming
- val full_name: theory -> Binding.T -> string
+ val full_name: theory -> binding -> string
val base_name: string -> bstring
val full_bname: theory -> bstring -> string
val full_bname_path: theory -> string -> bstring -> string
@@ -91,10 +90,10 @@
val del_modesyntax: Syntax.mode -> (bstring * string * mixfix) list -> theory -> theory
val del_modesyntax_i: Syntax.mode -> (bstring * typ * mixfix) list -> theory -> theory
val notation: bool -> Syntax.mode -> (term * mixfix) list -> theory -> theory
- val declare_const: Properties.T -> (Binding.T * typ) * mixfix -> theory -> term * theory
+ val declare_const: Properties.T -> (binding * typ) * mixfix -> theory -> term * theory
val add_consts: (bstring * string * mixfix) list -> theory -> theory
val add_consts_i: (bstring * typ * mixfix) list -> theory -> theory
- val add_abbrev: string -> Properties.T -> Binding.T * term -> theory -> (term * term) * theory
+ val add_abbrev: string -> Properties.T -> binding * term -> theory -> (term * term) * theory
val revert_abbrev: string -> string -> theory -> theory
val add_const_constraint: string * typ option -> theory -> theory
val primitive_class: string * class list -> theory -> theory
--- a/src/Pure/simplifier.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/simplifier.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/simplifier.ML
- ID: $Id$
Author: Tobias Nipkow and Markus Wenzel, TU Muenchen
Generic simplifier, suitable for most logics (see also
--- a/src/Pure/subgoal.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/subgoal.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/subgoal.ML
- ID: $Id$
Author: Makarius
Tactical operations depending on local subgoal structure.
--- a/src/Pure/theory.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/theory.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/theory.ML
- ID: $Id$
Author: Lawrence C Paulson and Markus Wenzel
Logical theory content: axioms, definitions, and begin/end wrappers.
@@ -29,14 +28,14 @@
val at_end: (theory -> theory option) -> theory -> theory
val begin_theory: string -> theory list -> theory
val end_theory: theory -> theory
+ val add_axioms_i: (binding * term) list -> theory -> theory
val add_axioms: (bstring * string) list -> theory -> theory
- val add_axioms_i: (bstring * term) list -> theory -> theory
val add_deps: string -> string * typ -> (string * typ) list -> theory -> theory
+ val add_defs_i: bool -> bool -> (binding * term) list -> theory -> theory
val add_defs: bool -> bool -> (bstring * string) list -> theory -> theory
- val add_defs_i: bool -> bool -> (bstring * term) list -> theory -> theory
+ val add_finals_i: bool -> term list -> theory -> theory
val add_finals: bool -> string list -> theory -> theory
- val add_finals_i: bool -> term list -> theory -> theory
- val specify_const: Properties.T -> (Binding.T * typ) * mixfix -> theory -> term * theory
+ val specify_const: Properties.T -> (binding * typ) * mixfix -> theory -> term * theory
end
structure Theory: THEORY =
@@ -157,19 +156,19 @@
fun err_in_axm msg name =
cat_error msg ("The error(s) above occurred in axiom " ^ quote name);
-fun cert_axm thy (name, raw_tm) =
+fun cert_axm thy (b, raw_tm) =
let
val (t, T, _) = Sign.certify_prop thy raw_tm
handle TYPE (msg, _, _) => error msg
| TERM (msg, _) => error msg;
in
Term.no_dummy_patterns t handle TERM (msg, _) => error msg;
- (name, Sign.no_vars (Syntax.pp_global thy) t)
+ (b, Sign.no_vars (Syntax.pp_global thy) t)
end;
-fun read_axm thy (name, str) =
- cert_axm thy (name, Syntax.read_prop_global thy str)
- handle ERROR msg => err_in_axm msg name;
+fun read_axm thy (bname, str) =
+ cert_axm thy (Binding.name bname, Syntax.read_prop_global thy str)
+ handle ERROR msg => err_in_axm msg bname;
(* add_axioms(_i) *)
@@ -178,15 +177,15 @@
fun gen_add_axioms prep_axm raw_axms thy = thy |> map_axioms (fn axioms =>
let
- val axms = map (apfst Binding.name o apsnd Logic.varify o prep_axm thy) raw_axms;
+ val axms = map (apsnd Logic.varify o prep_axm thy) raw_axms;
val axioms' = fold (snd oo NameSpace.bind (Sign.naming_of thy)) axms axioms
handle Symtab.DUP dup => err_dup_axm dup;
in axioms' end);
in
+val add_axioms_i = gen_add_axioms cert_axm;
val add_axioms = gen_add_axioms read_axm;
-val add_axioms_i = gen_add_axioms cert_axm;
end;
@@ -250,16 +249,16 @@
(* check_def *)
-fun check_def thy unchecked overloaded (bname, tm) defs =
+fun check_def thy unchecked overloaded (b, tm) defs =
let
val ctxt = ProofContext.init thy;
- val name = Sign.full_bname thy bname;
+ val name = Sign.full_name thy b;
val (lhs_const, rhs) = Sign.cert_def ctxt tm;
val rhs_consts = fold_aterms (fn Const const => insert (op =) const | _ => I) rhs [];
val _ = check_overloading thy overloaded lhs_const;
in defs |> dependencies thy unchecked true name lhs_const rhs_consts end
handle ERROR msg => cat_error msg (Pretty.string_of (Pretty.block
- [Pretty.str ("The error(s) above occurred in definition " ^ quote bname ^ ":"),
+ [Pretty.str ("The error(s) above occurred in definition " ^ quote (Binding.display b) ^ ":"),
Pretty.fbrk, Pretty.quote (Syntax.pretty_term_global thy tm)]));
@@ -298,8 +297,8 @@
in
+val add_finals_i = gen_add_finals (K I);
val add_finals = gen_add_finals Syntax.read_term_global;
-val add_finals_i = gen_add_finals (K I);
end;
--- a/src/Pure/thm.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/thm.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1658,7 +1658,7 @@
val {thy_ref, hyps, prop, tpairs, ...} = args;
val _ = null tpairs orelse raise THM ("put_name: unsolved flex-flex constraints", 0, [thm]);
- val ps = Lazy.lazy (fn () => map (apsnd (proof_of o Future.join)) promises);
+ val ps = map (apsnd (Future.map proof_of)) promises;
val thy = Theory.deref thy_ref;
val (pthm, proof) = Pt.thm_proof thy name hyps prop ps body;
--- a/src/Pure/type_infer.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/type_infer.ML Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: Pure/type_infer.ML
- ID: $Id$
Author: Stefan Berghofer and Markus Wenzel, TU Muenchen
Simple type inference.
--- a/src/Pure/variable.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Pure/variable.ML Wed Jan 28 16:57:12 2009 +0100
@@ -397,7 +397,7 @@
val fact = export inner outer;
val term = singleton (export_terms inner outer);
val typ = Logic.type_map term;
- in Morphism.morphism {binding = I, var = I, typ = typ, term = term, fact = fact} end;
+ in Morphism.morphism {binding = I, typ = typ, term = term, fact = fact} end;
--- a/src/Tools/induct.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/Tools/induct.ML Wed Jan 28 16:57:12 2009 +0100
@@ -50,7 +50,7 @@
val setN: string
(*proof methods*)
val fix_tac: Proof.context -> int -> (string * typ) list -> int -> tactic
- val add_defs: (Binding.T option * term) option list -> Proof.context ->
+ val add_defs: (binding option * term) option list -> Proof.context ->
(term option list * thm list) * Proof.context
val atomize_term: theory -> term -> term
val atomize_tac: int -> tactic
@@ -62,7 +62,7 @@
val cases_tac: Proof.context -> term option list list -> thm option ->
thm list -> int -> cases_tactic
val get_inductT: Proof.context -> term option list list -> thm list list
- val induct_tac: Proof.context -> (Binding.T option * term) option list list ->
+ val induct_tac: Proof.context -> (binding option * term) option list list ->
(string * typ) list list -> term option list -> thm list option ->
thm list -> int -> cases_tactic
val coinduct_tac: Proof.context -> term option list -> term option list -> thm option ->
--- a/src/ZF/Inductive_ZF.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/ZF/Inductive_ZF.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,4 @@
(* Title: ZF/Inductive_ZF.thy
- ID: $Id$
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1993 University of Cambridge
--- a/src/ZF/Main_ZF.thy Wed Jan 28 16:29:16 2009 +0100
+++ b/src/ZF/Main_ZF.thy Wed Jan 28 16:57:12 2009 +0100
@@ -1,5 +1,3 @@
-(*$Id$*)
-
header{*Theory Main: Everything Except AC*}
theory Main_ZF imports List_ZF IntDiv_ZF CardinalArith begin
--- a/src/ZF/Tools/datatype_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/ZF/Tools/datatype_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -247,7 +247,7 @@
if need_recursor then
thy |> Sign.add_consts_i
[(recursor_base_name, recursor_typ, NoSyn)]
- |> (snd o PureThy.add_defs false [Thm.no_attributes recursor_def])
+ |> (snd o PureThy.add_defs false [(Thm.no_attributes o apfst Binding.name) recursor_def])
else thy;
val (con_defs, thy0) = thy_path
@@ -255,7 +255,7 @@
((case_base_name, case_typ, NoSyn) ::
map #1 (List.concat con_ty_lists))
|> PureThy.add_defs false
- (map Thm.no_attributes
+ (map (Thm.no_attributes o apfst Binding.name)
(case_def ::
List.concat (ListPair.map mk_con_defs
(1 upto npart, con_ty_lists))))
@@ -383,13 +383,13 @@
(*Updating theory components: simprules and datatype info*)
(thy1 |> Sign.add_path big_rec_base_name
|> PureThy.add_thmss
- [(("simps", simps), [Simplifier.simp_add]),
- (("", intrs), [Classical.safe_intro NONE]),
- (("con_defs", con_defs), []),
- (("case_eqns", case_eqns), []),
- (("recursor_eqns", recursor_eqns), []),
- (("free_iffs", free_iffs), []),
- (("free_elims", free_SEs), [])] |> snd
+ [((Binding.name "simps", simps), [Simplifier.simp_add]),
+ ((Binding.empty , intrs), [Classical.safe_intro NONE]),
+ ((Binding.name "con_defs", con_defs), []),
+ ((Binding.name "case_eqns", case_eqns), []),
+ ((Binding.name "recursor_eqns", recursor_eqns), []),
+ ((Binding.name "free_iffs", free_iffs), []),
+ ((Binding.name "free_elims", free_SEs), [])] |> snd
|> DatatypesData.map (Symtab.update (big_rec_name, dt_info))
|> ConstructorsData.map (fold Symtab.update con_pairs)
|> Sign.parent_path,
--- a/src/ZF/Tools/induct_tacs.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/ZF/Tools/induct_tacs.ML Wed Jan 28 16:57:12 2009 +0100
@@ -158,7 +158,7 @@
in
thy
|> Sign.add_path (Sign.base_name big_rec_name)
- |> PureThy.add_thmss [(("simps", simps), [Simplifier.simp_add])] |> snd
+ |> PureThy.add_thmss [((Binding.name "simps", simps), [Simplifier.simp_add])] |> snd
|> DatatypesData.put (Symtab.update (big_rec_name, dt_info) (DatatypesData.get thy))
|> ConstructorsData.put (fold_rev Symtab.update con_pairs (ConstructorsData.get thy))
|> Sign.parent_path
--- a/src/ZF/Tools/inductive_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/ZF/Tools/inductive_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -27,10 +27,10 @@
(*Insert definitions for the recursive sets, which
must *already* be declared as constants in parent theory!*)
val add_inductive_i: bool -> term list * term ->
- ((Binding.T * term) * attribute list) list ->
+ ((binding * term) * attribute list) list ->
thm list * thm list * thm list * thm list -> theory -> theory * inductive_result
val add_inductive: string list * string ->
- ((Binding.T * string) * Attrib.src list) list ->
+ ((binding * string) * Attrib.src list) list ->
(Facts.ref * Attrib.src list) list * (Facts.ref * Attrib.src list) list *
(Facts.ref * Attrib.src list) list * (Facts.ref * Attrib.src list) list ->
theory -> theory * inductive_result
@@ -173,7 +173,7 @@
val (_, thy1) =
thy
|> Sign.add_path big_rec_base_name
- |> PureThy.add_defs false (map Thm.no_attributes axpairs);
+ |> PureThy.add_defs false (map (Thm.no_attributes o apfst Binding.name) axpairs);
val ctxt1 = ProofContext.init thy1;
@@ -510,9 +510,9 @@
val ([induct', mutual_induct'], thy') =
thy
- |> PureThy.add_thms [((co_prefix ^ "induct", induct),
+ |> PureThy.add_thms [((Binding.name (co_prefix ^ "induct"), induct),
[case_names, Induct.induct_pred big_rec_name]),
- (("mutual_induct", mutual_induct), [case_names])];
+ ((Binding.name "mutual_induct", mutual_induct), [case_names])];
in ((thy', induct'), mutual_induct')
end; (*of induction_rules*)
@@ -522,7 +522,7 @@
if not coind then induction_rules raw_induct thy1
else
(thy1
- |> PureThy.add_thms [((co_prefix ^ "induct", raw_induct), [])]
+ |> PureThy.add_thms [((Binding.name (co_prefix ^ "induct"), raw_induct), [])]
|> apfst hd |> Library.swap, TrueI)
and defs = big_rec_def :: part_rec_defs
@@ -531,15 +531,15 @@
thy2
|> IndCases.declare big_rec_name make_cases
|> PureThy.add_thms
- [(("bnd_mono", bnd_mono), []),
- (("dom_subset", dom_subset), []),
- (("cases", elim), [case_names, Induct.cases_pred big_rec_name])]
+ [((Binding.name "bnd_mono", bnd_mono), []),
+ ((Binding.name "dom_subset", dom_subset), []),
+ ((Binding.name "cases", elim), [case_names, Induct.cases_pred big_rec_name])]
||>> (PureThy.add_thmss o map Thm.no_attributes)
- [("defs", defs),
- ("intros", intrs)];
+ [(Binding.name "defs", defs),
+ (Binding.name "intros", intrs)];
val (intrs'', thy4) =
thy3
- |> PureThy.add_thms ((intr_names ~~ intrs') ~~ map #2 intr_specs)
+ |> PureThy.add_thms ((map Binding.name intr_names ~~ intrs') ~~ map #2 intr_specs)
||> Sign.parent_path;
in
(thy4,
--- a/src/ZF/Tools/primrec_package.ML Wed Jan 28 16:29:16 2009 +0100
+++ b/src/ZF/Tools/primrec_package.ML Wed Jan 28 16:57:12 2009 +0100
@@ -8,8 +8,8 @@
signature PRIMREC_PACKAGE =
sig
- val add_primrec: ((Binding.T * string) * Attrib.src list) list -> theory -> theory * thm list
- val add_primrec_i: ((Binding.T * term) * attribute list) list -> theory -> theory * thm list
+ val add_primrec: ((binding * string) * Attrib.src list) list -> theory -> theory * thm list
+ val add_primrec_i: ((binding * term) * attribute list) list -> theory -> theory * thm list
end;
structure PrimrecPackage : PRIMREC_PACKAGE =
@@ -169,7 +169,7 @@
val ([def_thm], thy1) = thy
|> Sign.add_path (Sign.base_name fname)
- |> (PureThy.add_defs false o map Thm.no_attributes) [def];
+ |> PureThy.add_defs false [Thm.no_attributes (apfst Binding.name def)];
val rewrites = def_thm :: map mk_meta_eq (#rec_rewrites con_info)
val eqn_thms =
@@ -179,10 +179,10 @@
val (eqn_thms', thy2) =
thy1
- |> PureThy.add_thms ((map Binding.base_name eqn_names ~~ eqn_thms) ~~ eqn_atts);
+ |> PureThy.add_thms ((eqn_names ~~ eqn_thms) ~~ eqn_atts);
val (_, thy3) =
thy2
- |> PureThy.add_thmss [(("simps", eqn_thms'), [Simplifier.simp_add])]
+ |> PureThy.add_thmss [((Binding.name "simps", eqn_thms'), [Simplifier.simp_add])]
||> Sign.parent_path;
in (thy3, eqn_thms') end;