--- a/NEWS Fri Feb 15 09:41:25 2013 +0100
+++ b/NEWS Fri Feb 15 09:59:46 2013 +0100
@@ -19,6 +19,11 @@
INCOMPATIBILITY.
+* Sledgehammer:
+
+ - Renamed option:
+ isar_shrink ~> isar_compress
+
New in Isabelle2013 (February 2013)
-----------------------------------
--- a/src/Doc/Sledgehammer/document/root.tex Fri Feb 15 09:41:25 2013 +0100
+++ b/src/Doc/Sledgehammer/document/root.tex Fri Feb 15 09:59:46 2013 +0100
@@ -369,7 +369,7 @@
\item[\labelitemi] \textbf{\textit{isar\_proofs}} (\S\ref{output-format}) specifies
that Isar proofs should be generated, instead of one-liner \textit{metis} or
\textit{smt} proofs. The length of the Isar proofs can be controlled by setting
-\textit{isar\_shrink} (\S\ref{output-format}).
+\textit{isar\_compress} (\S\ref{output-format}).
\item[\labelitemi] \textbf{\textit{timeout}} (\S\ref{timeouts}) controls the
provers' time limit. It is set to 30 seconds, but since Sledgehammer runs
@@ -508,7 +508,7 @@
is highly experimental. Work on a new implementation has begun. There is a large body of
research into transforming resolution proofs into natural deduction proofs (such
as Isar proofs), which we hope to leverage. In the meantime, a workaround is to
-set the \textit{isar\_shrink} option (\S\ref{output-format}) to a larger
+set the \textit{isar\_compress} option (\S\ref{output-format}) to a larger
value or to try several provers and keep the nicest-looking proof.
\point{How can I tell whether a suggested proof is sound?}
@@ -1302,7 +1302,7 @@
fails; however, they are usually faster and sometimes more robust than
\textit{metis} proofs.
-\opdefault{isar\_shrink}{int}{\upshape 10}
+\opdefault{isar\_compress}{int}{\upshape 10}
Specifies the granularity of the generated Isar proofs if \textit{isar\_proofs}
is enabled. A value of $n$ indicates that each Isar proof step should correspond
to a group of up to $n$ consecutive proof steps in the ATP proof.
--- a/src/HOL/Code_Evaluation.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Code_Evaluation.thy Fri Feb 15 09:59:46 2013 +0100
@@ -5,7 +5,7 @@
header {* Term evaluation using the generic code generator *}
theory Code_Evaluation
-imports Typerep New_DSequence
+imports Typerep Limited_Sequence
begin
subsection {* Term representation *}
--- a/src/HOL/DSequence.thy Fri Feb 15 09:41:25 2013 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,113 +0,0 @@
-
-(* Author: Lukas Bulwahn, TU Muenchen *)
-
-header {* Depth-Limited Sequences with failure element *}
-
-theory DSequence
-imports Lazy_Sequence
-begin
-
-type_synonym 'a dseq = "code_numeral => bool => 'a Lazy_Sequence.lazy_sequence option"
-
-definition empty :: "'a dseq"
-where
- "empty = (%i pol. Some Lazy_Sequence.empty)"
-
-definition single :: "'a => 'a dseq"
-where
- "single x = (%i pol. Some (Lazy_Sequence.single x))"
-
-fun eval :: "'a dseq => code_numeral => bool => 'a Lazy_Sequence.lazy_sequence option"
-where
- "eval f i pol = f i pol"
-
-definition yield :: "'a dseq => code_numeral => bool => ('a * 'a dseq) option"
-where
- "yield dseq i pol = (case eval dseq i pol of
- None => None
- | Some s => Option.map (apsnd (%r i pol. Some r)) (Lazy_Sequence.yield s))"
-
-definition yieldn :: "code_numeral \<Rightarrow> 'a dseq \<Rightarrow> code_numeral \<Rightarrow> bool \<Rightarrow> 'a list \<times> 'a dseq"
-where
- "yieldn n dseq i pol = (case dseq i pol of
- None \<Rightarrow> ([], \<lambda>i pol. None)
- | Some s \<Rightarrow> let (xs, s') = Lazy_Sequence.yieldn n s in (xs, \<lambda>i pol. Some s'))"
-
-fun map_seq :: "('a => 'b dseq) => 'a Lazy_Sequence.lazy_sequence => 'b dseq"
-where
- "map_seq f xq i pol = (case Lazy_Sequence.yield xq of
- None => Some Lazy_Sequence.empty
- | Some (x, xq') => (case eval (f x) i pol of
- None => None
- | Some yq => (case map_seq f xq' i pol of
- None => None
- | Some zq => Some (Lazy_Sequence.append yq zq))))"
-
-definition bind :: "'a dseq => ('a => 'b dseq) => 'b dseq"
-where
- "bind x f = (%i pol.
- if i = 0 then
- (if pol then Some Lazy_Sequence.empty else None)
- else
- (case x (i - 1) pol of
- None => None
- | Some xq => map_seq f xq i pol))"
-
-definition union :: "'a dseq => 'a dseq => 'a dseq"
-where
- "union x y = (%i pol. case (x i pol, y i pol) of
- (Some xq, Some yq) => Some (Lazy_Sequence.append xq yq)
- | _ => None)"
-
-definition if_seq :: "bool => unit dseq"
-where
- "if_seq b = (if b then single () else empty)"
-
-definition not_seq :: "unit dseq => unit dseq"
-where
- "not_seq x = (%i pol. case x i (\<not>pol) of
- None => Some Lazy_Sequence.empty
- | Some xq => (case Lazy_Sequence.yield xq of
- None => Some (Lazy_Sequence.single ())
- | Some _ => Some (Lazy_Sequence.empty)))"
-
-definition map :: "('a => 'b) => 'a dseq => 'b dseq"
-where
- "map f g = (%i pol. case g i pol of
- None => None
- | Some xq => Some (Lazy_Sequence.map f xq))"
-
-ML {*
-signature DSEQUENCE =
-sig
- type 'a dseq = int -> bool -> 'a Lazy_Sequence.lazy_sequence option
- val yield : 'a dseq -> int -> bool -> ('a * 'a dseq) option
- val yieldn : int -> 'a dseq -> int -> bool -> 'a list * 'a dseq
- val map : ('a -> 'b) -> 'a dseq -> 'b dseq
-end;
-
-structure DSequence : DSEQUENCE =
-struct
-
-type 'a dseq = int -> bool -> 'a Lazy_Sequence.lazy_sequence option
-
-val yield = @{code yield}
-val yieldn = @{code yieldn}
-val map = @{code map}
-
-end;
-*}
-
-code_reserved Eval DSequence
-(*
-hide_type Sequence.seq
-
-hide_const Sequence.Seq Sequence.yield Sequence.yieldn Sequence.empty Sequence.single
- Sequence.append Sequence.flat Sequence.map Sequence.bind Sequence.ifpred Sequence.not_seq
-*)
-hide_const (open) empty single eval map_seq bind union if_seq not_seq map
-hide_fact (open) empty_def single_def eval.simps map_seq.simps bind_def union_def
- if_seq_def not_seq_def map_def
-
-end
-
--- a/src/HOL/Lazy_Sequence.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Lazy_Sequence.thy Fri Feb 15 09:59:46 2013 +0100
@@ -7,158 +7,230 @@
imports Predicate
begin
-datatype 'a lazy_sequence = Empty | Insert 'a "'a lazy_sequence"
+subsection {* Type of lazy sequences *}
-definition Lazy_Sequence :: "(unit => ('a * 'a lazy_sequence) option) => 'a lazy_sequence"
-where
- "Lazy_Sequence f = (case f () of None => Empty | Some (x, xq) => Insert x xq)"
+datatype 'a lazy_sequence = lazy_sequence_of_list "'a list"
-code_datatype Lazy_Sequence
-
-primrec yield :: "'a lazy_sequence => ('a * 'a lazy_sequence) option"
+primrec list_of_lazy_sequence :: "'a lazy_sequence \<Rightarrow> 'a list"
where
- "yield Empty = None"
-| "yield (Insert x xq) = Some (x, xq)"
+ "list_of_lazy_sequence (lazy_sequence_of_list xs) = xs"
+
+lemma lazy_sequence_of_list_of_lazy_sequence [simp]:
+ "lazy_sequence_of_list (list_of_lazy_sequence xq) = xq"
+ by (cases xq) simp_all
+
+lemma lazy_sequence_eqI:
+ "list_of_lazy_sequence xq = list_of_lazy_sequence yq \<Longrightarrow> xq = yq"
+ by (cases xq, cases yq) simp
+
+lemma lazy_sequence_eq_iff:
+ "xq = yq \<longleftrightarrow> list_of_lazy_sequence xq = list_of_lazy_sequence yq"
+ by (auto intro: lazy_sequence_eqI)
-lemma [simp]: "yield xq = Some (x, xq') ==> size xq' < size xq"
-by (cases xq) auto
+lemma lazy_sequence_size_eq:
+ "lazy_sequence_size f xq = Suc (list_size f (list_of_lazy_sequence xq))"
+ by (cases xq) simp
+
+lemma size_lazy_sequence_eq [code]:
+ "size (xq :: 'a lazy_sequence) = 0"
+ by (cases xq) simp
+
+lemma lazy_sequence_case [simp]:
+ "lazy_sequence_case f xq = f (list_of_lazy_sequence xq)"
+ by (cases xq) auto
+
+lemma lazy_sequence_rec [simp]:
+ "lazy_sequence_rec f xq = f (list_of_lazy_sequence xq)"
+ by (cases xq) auto
-lemma yield_Seq [code]:
- "yield (Lazy_Sequence f) = f ()"
-unfolding Lazy_Sequence_def by (cases "f ()") auto
+definition Lazy_Sequence :: "(unit \<Rightarrow> ('a \<times> 'a lazy_sequence) option) \<Rightarrow> 'a lazy_sequence"
+where
+ "Lazy_Sequence f = lazy_sequence_of_list (case f () of
+ None \<Rightarrow> []
+ | Some (x, xq) \<Rightarrow> x # list_of_lazy_sequence xq)"
+
+code_datatype Lazy_Sequence
+
+declare list_of_lazy_sequence.simps [code del]
+declare lazy_sequence.cases [code del]
+declare lazy_sequence.recs [code del]
-lemma Seq_yield:
- "Lazy_Sequence (%u. yield f) = f"
-unfolding Lazy_Sequence_def by (cases f) auto
+lemma list_of_Lazy_Sequence [simp]:
+ "list_of_lazy_sequence (Lazy_Sequence f) = (case f () of
+ None \<Rightarrow> []
+ | Some (x, xq) \<Rightarrow> x # list_of_lazy_sequence xq)"
+ by (simp add: Lazy_Sequence_def)
+
+definition yield :: "'a lazy_sequence \<Rightarrow> ('a \<times> 'a lazy_sequence) option"
+where
+ "yield xq = (case list_of_lazy_sequence xq of
+ [] \<Rightarrow> None
+ | x # xs \<Rightarrow> Some (x, lazy_sequence_of_list xs))"
+
+lemma yield_Seq [simp, code]:
+ "yield (Lazy_Sequence f) = f ()"
+ by (cases "f ()") (simp_all add: yield_def split_def)
+
+lemma case_yield_eq [simp]: "option_case g h (yield xq) =
+ list_case g (\<lambda>x. curry h x \<circ> lazy_sequence_of_list) (list_of_lazy_sequence xq)"
+ by (cases "list_of_lazy_sequence xq") (simp_all add: yield_def)
lemma lazy_sequence_size_code [code]:
- "lazy_sequence_size s xq = (case yield xq of None => 0 | Some (x, xq') => s x + lazy_sequence_size s xq' + 1)"
-by (cases xq) auto
+ "lazy_sequence_size s xq = (case yield xq of
+ None \<Rightarrow> 1
+ | Some (x, xq') \<Rightarrow> Suc (s x + lazy_sequence_size s xq'))"
+ by (cases "list_of_lazy_sequence xq") (simp_all add: lazy_sequence_size_eq)
-lemma size_code [code]:
- "size xq = (case yield xq of None => 0 | Some (x, xq') => size xq' + 1)"
-by (cases xq) auto
-
-lemma [code]: "HOL.equal xq yq = (case (yield xq, yield yq) of
- (None, None) => True | (Some (x, xq'), Some (y, yq')) => (HOL.equal x y) \<and> (HOL.equal xq yq) | _ => False)"
-apply (cases xq) apply (cases yq) apply (auto simp add: equal_eq)
-apply (cases yq) apply (auto simp add: equal_eq) done
+lemma equal_lazy_sequence_code [code]:
+ "HOL.equal xq yq = (case (yield xq, yield yq) of
+ (None, None) \<Rightarrow> True
+ | (Some (x, xq'), Some (y, yq')) \<Rightarrow> HOL.equal x y \<and> HOL.equal xq yq
+ | _ \<Rightarrow> False)"
+ by (simp_all add: lazy_sequence_eq_iff equal_eq split: list.splits)
lemma [code nbe]:
"HOL.equal (x :: 'a lazy_sequence) x \<longleftrightarrow> True"
by (fact equal_refl)
-lemma seq_case [code]:
- "lazy_sequence_case f g xq = (case (yield xq) of None => f | Some (x, xq') => g x xq')"
-by (cases xq) auto
-
-lemma [code]: "lazy_sequence_rec f g xq = (case (yield xq) of None => f | Some (x, xq') => g x xq' (lazy_sequence_rec f g xq'))"
-by (cases xq) auto
-
definition empty :: "'a lazy_sequence"
where
- [code]: "empty = Lazy_Sequence (%u. None)"
+ "empty = lazy_sequence_of_list []"
-definition single :: "'a => 'a lazy_sequence"
-where
- [code]: "single x = Lazy_Sequence (%u. Some (x, empty))"
+lemma list_of_lazy_sequence_empty [simp]:
+ "list_of_lazy_sequence empty = []"
+ by (simp add: empty_def)
-primrec append :: "'a lazy_sequence => 'a lazy_sequence => 'a lazy_sequence"
-where
- "append Empty yq = yq"
-| "append (Insert x xq) yq = Insert x (append xq yq)"
+lemma empty_code [code]:
+ "empty = Lazy_Sequence (\<lambda>_. None)"
+ by (simp add: lazy_sequence_eq_iff)
-lemma [code]:
- "append xq yq = Lazy_Sequence (%u. case yield xq of
- None => yield yq
- | Some (x, xq') => Some (x, append xq' yq))"
-unfolding Lazy_Sequence_def
-apply (cases "xq")
-apply auto
-apply (cases "yq")
-apply auto
-done
+definition single :: "'a \<Rightarrow> 'a lazy_sequence"
+where
+ "single x = lazy_sequence_of_list [x]"
-primrec flat :: "'a lazy_sequence lazy_sequence => 'a lazy_sequence"
+lemma list_of_lazy_sequence_single [simp]:
+ "list_of_lazy_sequence (single x) = [x]"
+ by (simp add: single_def)
+
+lemma single_code [code]:
+ "single x = Lazy_Sequence (\<lambda>_. Some (x, empty))"
+ by (simp add: lazy_sequence_eq_iff)
+
+definition append :: "'a lazy_sequence \<Rightarrow> 'a lazy_sequence \<Rightarrow> 'a lazy_sequence"
where
- "flat Empty = Empty"
-| "flat (Insert xq xqq) = append xq (flat xqq)"
-
-lemma [code]:
- "flat xqq = Lazy_Sequence (%u. case yield xqq of
- None => None
- | Some (xq, xqq') => yield (append xq (flat xqq')))"
-apply (cases "xqq")
-apply (auto simp add: Seq_yield)
-unfolding Lazy_Sequence_def
-by auto
+ "append xq yq = lazy_sequence_of_list (list_of_lazy_sequence xq @ list_of_lazy_sequence yq)"
+
+lemma list_of_lazy_sequence_append [simp]:
+ "list_of_lazy_sequence (append xq yq) = list_of_lazy_sequence xq @ list_of_lazy_sequence yq"
+ by (simp add: append_def)
-primrec map :: "('a => 'b) => 'a lazy_sequence => 'b lazy_sequence"
+lemma append_code [code]:
+ "append xq yq = Lazy_Sequence (\<lambda>_. case yield xq of
+ None \<Rightarrow> yield yq
+ | Some (x, xq') \<Rightarrow> Some (x, append xq' yq))"
+ by (simp_all add: lazy_sequence_eq_iff split: list.splits)
+
+definition map :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a lazy_sequence \<Rightarrow> 'b lazy_sequence"
where
- "map f Empty = Empty"
-| "map f (Insert x xq) = Insert (f x) (map f xq)"
+ "map f xq = lazy_sequence_of_list (List.map f (list_of_lazy_sequence xq))"
+
+lemma list_of_lazy_sequence_map [simp]:
+ "list_of_lazy_sequence (map f xq) = List.map f (list_of_lazy_sequence xq)"
+ by (simp add: map_def)
+
+lemma map_code [code]:
+ "map f xq =
+ Lazy_Sequence (\<lambda>_. Option.map (\<lambda>(x, xq'). (f x, map f xq')) (yield xq))"
+ by (simp_all add: lazy_sequence_eq_iff split: list.splits)
+
+definition flat :: "'a lazy_sequence lazy_sequence \<Rightarrow> 'a lazy_sequence"
+where
+ "flat xqq = lazy_sequence_of_list (concat (List.map list_of_lazy_sequence (list_of_lazy_sequence xqq)))"
-lemma [code]:
- "map f xq = Lazy_Sequence (%u. Option.map (%(x, xq'). (f x, map f xq')) (yield xq))"
-apply (cases xq)
-apply (auto simp add: Seq_yield)
-unfolding Lazy_Sequence_def
-apply auto
-done
+lemma list_of_lazy_sequence_flat [simp]:
+ "list_of_lazy_sequence (flat xqq) = concat (List.map list_of_lazy_sequence (list_of_lazy_sequence xqq))"
+ by (simp add: flat_def)
-definition bind :: "'a lazy_sequence => ('a => 'b lazy_sequence) => 'b lazy_sequence"
+lemma flat_code [code]:
+ "flat xqq = Lazy_Sequence (\<lambda>_. case yield xqq of
+ None \<Rightarrow> None
+ | Some (xq, xqq') \<Rightarrow> yield (append xq (flat xqq')))"
+ by (simp add: lazy_sequence_eq_iff split: list.splits)
+
+definition bind :: "'a lazy_sequence \<Rightarrow> ('a \<Rightarrow> 'b lazy_sequence) \<Rightarrow> 'b lazy_sequence"
where
- [code]: "bind xq f = flat (map f xq)"
+ "bind xq f = flat (map f xq)"
-definition if_seq :: "bool => unit lazy_sequence"
+definition if_seq :: "bool \<Rightarrow> unit lazy_sequence"
where
"if_seq b = (if b then single () else empty)"
-function iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a Lazy_Sequence.lazy_sequence"
+definition those :: "'a option lazy_sequence \<Rightarrow> 'a lazy_sequence option"
where
- "iterate_upto f n m = Lazy_Sequence.Lazy_Sequence (%u. if n > m then None else Some (f n, iterate_upto f (n + 1) m))"
-by pat_completeness auto
+ "those xq = Option.map lazy_sequence_of_list (List.those (list_of_lazy_sequence xq))"
+
+function iterate_upto :: "(code_numeral \<Rightarrow> 'a) \<Rightarrow> code_numeral \<Rightarrow> code_numeral \<Rightarrow> 'a lazy_sequence"
+where
+ "iterate_upto f n m =
+ Lazy_Sequence (\<lambda>_. if n > m then None else Some (f n, iterate_upto f (n + 1) m))"
+ by pat_completeness auto
termination by (relation "measure (%(f, n, m). Code_Numeral.nat_of (m + 1 - n))") auto
-definition not_seq :: "unit lazy_sequence => unit lazy_sequence"
+definition not_seq :: "unit lazy_sequence \<Rightarrow> unit lazy_sequence"
where
- "not_seq xq = (case yield xq of None => single () | Some ((), xq) => empty)"
-
-subsection {* Code setup *}
+ "not_seq xq = (case yield xq of
+ None \<Rightarrow> single ()
+ | Some ((), xq) \<Rightarrow> empty)"
-fun anamorph :: "('a \<Rightarrow> ('b \<times> 'a) option) \<Rightarrow> code_numeral \<Rightarrow> 'a \<Rightarrow> 'b list \<times> 'a" where
- "anamorph f k x = (if k = 0 then ([], x)
- else case f x of None \<Rightarrow> ([], x) | Some (v, y) \<Rightarrow>
- let (vs, z) = anamorph f (k - 1) y
- in (v # vs, z))"
-
-definition yieldn :: "code_numeral \<Rightarrow> 'a lazy_sequence \<Rightarrow> 'a list \<times> 'a lazy_sequence" where
- "yieldn = anamorph yield"
+
+subsection {* Code setup *}
code_reflect Lazy_Sequence
datatypes lazy_sequence = Lazy_Sequence
- functions map yield yieldn
+
+ML {*
+signature LAZY_SEQUENCE =
+sig
+ datatype 'a lazy_sequence = Lazy_Sequence of (unit -> ('a * 'a Lazy_Sequence.lazy_sequence) option)
+ val map: ('a -> 'b) -> 'a lazy_sequence -> 'b lazy_sequence
+ val yield: 'a lazy_sequence -> ('a * 'a lazy_sequence) option
+ val yieldn: int -> 'a lazy_sequence -> 'a list * 'a lazy_sequence
+end;
+
+structure Lazy_Sequence : LAZY_SEQUENCE =
+struct
+
+datatype lazy_sequence = datatype Lazy_Sequence.lazy_sequence;
+
+fun map f = @{code Lazy_Sequence.map} f;
+
+fun yield P = @{code Lazy_Sequence.yield} P;
+
+fun yieldn k = Predicate.anamorph yield k;
+
+end;
+*}
+
subsection {* Generator Sequences *}
-
subsubsection {* General lazy sequence operation *}
-definition product :: "'a Lazy_Sequence.lazy_sequence \<Rightarrow> 'b Lazy_Sequence.lazy_sequence \<Rightarrow> ('a * 'b) Lazy_Sequence.lazy_sequence"
+definition product :: "'a lazy_sequence \<Rightarrow> 'b lazy_sequence \<Rightarrow> ('a \<times> 'b) lazy_sequence"
where
- "product s1 s2 = Lazy_Sequence.bind s1 (%a. Lazy_Sequence.bind s2 (%b. Lazy_Sequence.single (a, b)))"
+ "product s1 s2 = bind s1 (\<lambda>a. bind s2 (\<lambda>b. single (a, b)))"
subsubsection {* Small lazy typeclasses *}
class small_lazy =
- fixes small_lazy :: "code_numeral \<Rightarrow> 'a Lazy_Sequence.lazy_sequence"
+ fixes small_lazy :: "code_numeral \<Rightarrow> 'a lazy_sequence"
instantiation unit :: small_lazy
begin
-definition "small_lazy d = Lazy_Sequence.single ()"
+definition "small_lazy d = single ()"
instance ..
@@ -170,15 +242,17 @@
text {* maybe optimise this expression -> append (single x) xs == cons x xs
Performance difference? *}
-function small_lazy' :: "int => int => int Lazy_Sequence.lazy_sequence"
-where "small_lazy' d i = (if d < i then Lazy_Sequence.empty else
- Lazy_Sequence.append (Lazy_Sequence.single i) (small_lazy' d (i + 1)))"
-by pat_completeness auto
+function small_lazy' :: "int \<Rightarrow> int \<Rightarrow> int lazy_sequence"
+where
+ "small_lazy' d i = (if d < i then empty
+ else append (single i) (small_lazy' d (i + 1)))"
+ by pat_completeness auto
termination
by (relation "measure (%(d, i). nat (d + 1 - i))") auto
-definition "small_lazy d = small_lazy' (Code_Numeral.int_of d) (- (Code_Numeral.int_of d))"
+definition
+ "small_lazy d = small_lazy' (Code_Numeral.int_of d) (- (Code_Numeral.int_of d))"
instance ..
@@ -197,9 +271,11 @@
instantiation list :: (small_lazy) small_lazy
begin
-fun small_lazy_list :: "code_numeral => 'a list Lazy_Sequence.lazy_sequence"
+fun small_lazy_list :: "code_numeral \<Rightarrow> 'a list lazy_sequence"
where
- "small_lazy_list d = Lazy_Sequence.append (Lazy_Sequence.single []) (if d > 0 then Lazy_Sequence.bind (product (small_lazy (d - 1)) (small_lazy (d - 1))) (%(x, xs). Lazy_Sequence.single (x # xs)) else Lazy_Sequence.empty)"
+ "small_lazy_list d = append (single [])
+ (if d > 0 then bind (product (small_lazy (d - 1))
+ (small_lazy (d - 1))) (\<lambda>(x, xs). single (x # xs)) else empty)"
instance ..
@@ -212,56 +288,61 @@
definition hit_bound :: "'a hit_bound_lazy_sequence"
where
- [code]: "hit_bound = Lazy_Sequence (%u. Some (None, empty))"
+ "hit_bound = Lazy_Sequence (\<lambda>_. Some (None, empty))"
-definition hb_single :: "'a => 'a hit_bound_lazy_sequence"
+lemma list_of_lazy_sequence_hit_bound [simp]:
+ "list_of_lazy_sequence hit_bound = [None]"
+ by (simp add: hit_bound_def)
+
+definition hb_single :: "'a \<Rightarrow> 'a hit_bound_lazy_sequence"
where
- [code]: "hb_single x = Lazy_Sequence (%u. Some (Some x, empty))"
+ "hb_single x = Lazy_Sequence (\<lambda>_. Some (Some x, empty))"
-primrec hb_flat :: "'a hit_bound_lazy_sequence hit_bound_lazy_sequence => 'a hit_bound_lazy_sequence"
+definition hb_map :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a hit_bound_lazy_sequence \<Rightarrow> 'b hit_bound_lazy_sequence"
where
- "hb_flat Empty = Empty"
-| "hb_flat (Insert xq xqq) = append (case xq of None => hit_bound | Some xq => xq) (hb_flat xqq)"
+ "hb_map f xq = map (Option.map f) xq"
+
+lemma hb_map_code [code]:
+ "hb_map f xq =
+ Lazy_Sequence (\<lambda>_. Option.map (\<lambda>(x, xq'). (Option.map f x, hb_map f xq')) (yield xq))"
+ using map_code [of "Option.map f" xq] by (simp add: hb_map_def)
-lemma [code]:
- "hb_flat xqq = Lazy_Sequence (%u. case yield xqq of
- None => None
- | Some (xq, xqq') => yield (append (case xq of None => hit_bound | Some xq => xq) (hb_flat xqq')))"
-apply (cases "xqq")
-apply (auto simp add: Seq_yield)
-unfolding Lazy_Sequence_def
-by auto
+definition hb_flat :: "'a hit_bound_lazy_sequence hit_bound_lazy_sequence \<Rightarrow> 'a hit_bound_lazy_sequence"
+where
+ "hb_flat xqq = lazy_sequence_of_list (concat
+ (List.map ((\<lambda>x. case x of None \<Rightarrow> [None] | Some xs \<Rightarrow> xs) \<circ> Option.map list_of_lazy_sequence) (list_of_lazy_sequence xqq)))"
-primrec hb_map :: "('a => 'b) => 'a hit_bound_lazy_sequence => 'b hit_bound_lazy_sequence"
-where
- "hb_map f Empty = Empty"
-| "hb_map f (Insert x xq) = Insert (Option.map f x) (hb_map f xq)"
+lemma list_of_lazy_sequence_hb_flat [simp]:
+ "list_of_lazy_sequence (hb_flat xqq) =
+ concat (List.map ((\<lambda>x. case x of None \<Rightarrow> [None] | Some xs \<Rightarrow> xs) \<circ> Option.map list_of_lazy_sequence) (list_of_lazy_sequence xqq))"
+ by (simp add: hb_flat_def)
-lemma [code]:
- "hb_map f xq = Lazy_Sequence (%u. Option.map (%(x, xq'). (Option.map f x, hb_map f xq')) (yield xq))"
-apply (cases xq)
-apply (auto simp add: Seq_yield)
-unfolding Lazy_Sequence_def
-apply auto
-done
+lemma hb_flat_code [code]:
+ "hb_flat xqq = Lazy_Sequence (\<lambda>_. case yield xqq of
+ None \<Rightarrow> None
+ | Some (xq, xqq') \<Rightarrow> yield
+ (append (case xq of None \<Rightarrow> hit_bound | Some xq \<Rightarrow> xq) (hb_flat xqq')))"
+ by (simp add: lazy_sequence_eq_iff split: list.splits option.splits)
-definition hb_bind :: "'a hit_bound_lazy_sequence => ('a => 'b hit_bound_lazy_sequence) => 'b hit_bound_lazy_sequence"
+definition hb_bind :: "'a hit_bound_lazy_sequence \<Rightarrow> ('a \<Rightarrow> 'b hit_bound_lazy_sequence) \<Rightarrow> 'b hit_bound_lazy_sequence"
where
- [code]: "hb_bind xq f = hb_flat (hb_map f xq)"
+ "hb_bind xq f = hb_flat (hb_map f xq)"
-definition hb_if_seq :: "bool => unit hit_bound_lazy_sequence"
+definition hb_if_seq :: "bool \<Rightarrow> unit hit_bound_lazy_sequence"
where
"hb_if_seq b = (if b then hb_single () else empty)"
-definition hb_not_seq :: "unit hit_bound_lazy_sequence => unit lazy_sequence"
+definition hb_not_seq :: "unit hit_bound_lazy_sequence \<Rightarrow> unit lazy_sequence"
where
- "hb_not_seq xq = (case yield xq of None => single () | Some (x, xq) => empty)"
+ "hb_not_seq xq = (case yield xq of
+ None \<Rightarrow> single ()
+ | Some (x, xq) \<Rightarrow> empty)"
-hide_type (open) lazy_sequence
-hide_const (open) Empty Insert Lazy_Sequence yield empty single append flat map bind if_seq
- iterate_upto not_seq product
-
-hide_fact yield.simps empty_def single_def append.simps flat.simps map.simps bind_def
- iterate_upto.simps product_def if_seq_def not_seq_def
+hide_const (open) yield empty single append flat map bind
+ if_seq those iterate_upto not_seq product
+
+hide_fact (open) yield_def empty_def single_def append_def flat_def map_def bind_def
+ if_seq_def those_def not_seq_def product_def
end
+
--- a/src/HOL/Library/DAList.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Library/DAList.thy Fri Feb 15 09:59:46 2013 +0100
@@ -117,7 +117,7 @@
fun (in term_syntax) random_aux_alist
where
- "random_aux_alist i j = (if i = 0 then Pair valterm_empty else Quickcheck.collapse (Random.select_weight [(i, Quickcheck.random j \<circ>\<rightarrow> (%k. Quickcheck.random j \<circ>\<rightarrow> (%v. random_aux_alist (i - 1) j \<circ>\<rightarrow> (%a. Pair (valterm_update k v a))))), (1, Pair valterm_empty)]))"
+ "random_aux_alist i j = (if i = 0 then Pair valterm_empty else Quickcheck_Random.collapse (Random.select_weight [(i, Quickcheck_Random.random j \<circ>\<rightarrow> (%k. Quickcheck_Random.random j \<circ>\<rightarrow> (%v. random_aux_alist (i - 1) j \<circ>\<rightarrow> (%a. Pair (valterm_update k v a))))), (1, Pair valterm_empty)]))"
instantiation alist :: (random, random) random
begin
--- a/src/HOL/Library/Multiset.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Library/Multiset.thy Fri Feb 15 09:59:46 2013 +0100
@@ -1314,7 +1314,7 @@
begin
definition
- "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (bagify xs))"
+ "Quickcheck_Random.random i = Quickcheck_Random.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (bagify xs))"
instance ..
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Limited_Sequence.thy Fri Feb 15 09:59:46 2013 +0100
@@ -0,0 +1,216 @@
+
+(* Author: Lukas Bulwahn, TU Muenchen *)
+
+header {* Depth-Limited Sequences with failure element *}
+
+theory Limited_Sequence
+imports Lazy_Sequence
+begin
+
+subsection {* Depth-Limited Sequence *}
+
+type_synonym 'a dseq = "code_numeral \<Rightarrow> bool \<Rightarrow> 'a lazy_sequence option"
+
+definition empty :: "'a dseq"
+where
+ "empty = (\<lambda>_ _. Some Lazy_Sequence.empty)"
+
+definition single :: "'a \<Rightarrow> 'a dseq"
+where
+ "single x = (\<lambda>_ _. Some (Lazy_Sequence.single x))"
+
+definition eval :: "'a dseq \<Rightarrow> code_numeral \<Rightarrow> bool \<Rightarrow> 'a lazy_sequence option"
+where
+ [simp]: "eval f i pol = f i pol"
+
+definition yield :: "'a dseq \<Rightarrow> code_numeral \<Rightarrow> bool \<Rightarrow> ('a \<times> 'a dseq) option"
+where
+ "yield f i pol = (case eval f i pol of
+ None \<Rightarrow> None
+ | Some s \<Rightarrow> (Option.map \<circ> apsnd) (\<lambda>r _ _. Some r) (Lazy_Sequence.yield s))"
+
+definition map_seq :: "('a \<Rightarrow> 'b dseq) \<Rightarrow> 'a lazy_sequence \<Rightarrow> 'b dseq"
+where
+ "map_seq f xq i pol = Option.map Lazy_Sequence.flat
+ (Lazy_Sequence.those (Lazy_Sequence.map (\<lambda>x. f x i pol) xq))"
+
+lemma map_seq_code [code]:
+ "map_seq f xq i pol = (case Lazy_Sequence.yield xq of
+ None \<Rightarrow> Some Lazy_Sequence.empty
+ | Some (x, xq') \<Rightarrow> (case eval (f x) i pol of
+ None \<Rightarrow> None
+ | Some yq \<Rightarrow> (case map_seq f xq' i pol of
+ None \<Rightarrow> None
+ | Some zq \<Rightarrow> Some (Lazy_Sequence.append yq zq))))"
+ by (cases xq)
+ (auto simp add: map_seq_def Lazy_Sequence.those_def lazy_sequence_eq_iff split: list.splits option.splits)
+
+definition bind :: "'a dseq \<Rightarrow> ('a \<Rightarrow> 'b dseq) \<Rightarrow> 'b dseq"
+where
+ "bind x f = (\<lambda>i pol.
+ if i = 0 then
+ (if pol then Some Lazy_Sequence.empty else None)
+ else
+ (case x (i - 1) pol of
+ None \<Rightarrow> None
+ | Some xq \<Rightarrow> map_seq f xq i pol))"
+
+definition union :: "'a dseq \<Rightarrow> 'a dseq \<Rightarrow> 'a dseq"
+where
+ "union x y = (\<lambda>i pol. case (x i pol, y i pol) of
+ (Some xq, Some yq) \<Rightarrow> Some (Lazy_Sequence.append xq yq)
+ | _ \<Rightarrow> None)"
+
+definition if_seq :: "bool \<Rightarrow> unit dseq"
+where
+ "if_seq b = (if b then single () else empty)"
+
+definition not_seq :: "unit dseq \<Rightarrow> unit dseq"
+where
+ "not_seq x = (\<lambda>i pol. case x i (\<not> pol) of
+ None \<Rightarrow> Some Lazy_Sequence.empty
+ | Some xq \<Rightarrow> (case Lazy_Sequence.yield xq of
+ None \<Rightarrow> Some (Lazy_Sequence.single ())
+ | Some _ \<Rightarrow> Some (Lazy_Sequence.empty)))"
+
+definition map :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a dseq \<Rightarrow> 'b dseq"
+where
+ "map f g = (\<lambda>i pol. case g i pol of
+ None \<Rightarrow> None
+ | Some xq \<Rightarrow> Some (Lazy_Sequence.map f xq))"
+
+
+subsection {* Positive Depth-Limited Sequence *}
+
+type_synonym 'a pos_dseq = "code_numeral \<Rightarrow> 'a Lazy_Sequence.lazy_sequence"
+
+definition pos_empty :: "'a pos_dseq"
+where
+ "pos_empty = (\<lambda>i. Lazy_Sequence.empty)"
+
+definition pos_single :: "'a \<Rightarrow> 'a pos_dseq"
+where
+ "pos_single x = (\<lambda>i. Lazy_Sequence.single x)"
+
+definition pos_bind :: "'a pos_dseq \<Rightarrow> ('a \<Rightarrow> 'b pos_dseq) \<Rightarrow> 'b pos_dseq"
+where
+ "pos_bind x f = (\<lambda>i. Lazy_Sequence.bind (x i) (\<lambda>a. f a i))"
+
+definition pos_decr_bind :: "'a pos_dseq \<Rightarrow> ('a \<Rightarrow> 'b pos_dseq) \<Rightarrow> 'b pos_dseq"
+where
+ "pos_decr_bind x f = (\<lambda>i.
+ if i = 0 then
+ Lazy_Sequence.empty
+ else
+ Lazy_Sequence.bind (x (i - 1)) (\<lambda>a. f a i))"
+
+definition pos_union :: "'a pos_dseq \<Rightarrow> 'a pos_dseq \<Rightarrow> 'a pos_dseq"
+where
+ "pos_union xq yq = (\<lambda>i. Lazy_Sequence.append (xq i) (yq i))"
+
+definition pos_if_seq :: "bool \<Rightarrow> unit pos_dseq"
+where
+ "pos_if_seq b = (if b then pos_single () else pos_empty)"
+
+definition pos_iterate_upto :: "(code_numeral \<Rightarrow> 'a) \<Rightarrow> code_numeral \<Rightarrow> code_numeral \<Rightarrow> 'a pos_dseq"
+where
+ "pos_iterate_upto f n m = (\<lambda>i. Lazy_Sequence.iterate_upto f n m)"
+
+definition pos_map :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a pos_dseq \<Rightarrow> 'b pos_dseq"
+where
+ "pos_map f xq = (\<lambda>i. Lazy_Sequence.map f (xq i))"
+
+
+subsection {* Negative Depth-Limited Sequence *}
+
+type_synonym 'a neg_dseq = "code_numeral \<Rightarrow> 'a Lazy_Sequence.hit_bound_lazy_sequence"
+
+definition neg_empty :: "'a neg_dseq"
+where
+ "neg_empty = (\<lambda>i. Lazy_Sequence.empty)"
+
+definition neg_single :: "'a \<Rightarrow> 'a neg_dseq"
+where
+ "neg_single x = (\<lambda>i. Lazy_Sequence.hb_single x)"
+
+definition neg_bind :: "'a neg_dseq \<Rightarrow> ('a \<Rightarrow> 'b neg_dseq) \<Rightarrow> 'b neg_dseq"
+where
+ "neg_bind x f = (\<lambda>i. hb_bind (x i) (\<lambda>a. f a i))"
+
+definition neg_decr_bind :: "'a neg_dseq \<Rightarrow> ('a \<Rightarrow> 'b neg_dseq) \<Rightarrow> 'b neg_dseq"
+where
+ "neg_decr_bind x f = (\<lambda>i.
+ if i = 0 then
+ Lazy_Sequence.hit_bound
+ else
+ hb_bind (x (i - 1)) (\<lambda>a. f a i))"
+
+definition neg_union :: "'a neg_dseq \<Rightarrow> 'a neg_dseq \<Rightarrow> 'a neg_dseq"
+where
+ "neg_union x y = (\<lambda>i. Lazy_Sequence.append (x i) (y i))"
+
+definition neg_if_seq :: "bool \<Rightarrow> unit neg_dseq"
+where
+ "neg_if_seq b = (if b then neg_single () else neg_empty)"
+
+definition neg_iterate_upto
+where
+ "neg_iterate_upto f n m = (\<lambda>i. Lazy_Sequence.iterate_upto (\<lambda>i. Some (f i)) n m)"
+
+definition neg_map :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a neg_dseq \<Rightarrow> 'b neg_dseq"
+where
+ "neg_map f xq = (\<lambda>i. Lazy_Sequence.hb_map f (xq i))"
+
+
+subsection {* Negation *}
+
+definition pos_not_seq :: "unit neg_dseq \<Rightarrow> unit pos_dseq"
+where
+ "pos_not_seq xq = (\<lambda>i. Lazy_Sequence.hb_not_seq (xq (3 * i)))"
+
+definition neg_not_seq :: "unit pos_dseq \<Rightarrow> unit neg_dseq"
+where
+ "neg_not_seq x = (\<lambda>i. case Lazy_Sequence.yield (x i) of
+ None => Lazy_Sequence.hb_single ()
+ | Some ((), xq) => Lazy_Sequence.empty)"
+
+
+ML {*
+signature LIMITED_SEQUENCE =
+sig
+ type 'a dseq = int -> bool -> 'a Lazy_Sequence.lazy_sequence option
+ val map : ('a -> 'b) -> 'a dseq -> 'b dseq
+ val yield : 'a dseq -> int -> bool -> ('a * 'a dseq) option
+ val yieldn : int -> 'a dseq -> int -> bool -> 'a list * 'a dseq
+end;
+
+structure Limited_Sequence : LIMITED_SEQUENCE =
+struct
+
+type 'a dseq = int -> bool -> 'a Lazy_Sequence.lazy_sequence option
+
+fun map f = @{code Limited_Sequence.map} f;
+
+fun yield f = @{code Limited_Sequence.yield} f;
+
+fun yieldn n f i pol = (case f i pol of
+ NONE => ([], fn _ => fn _ => NONE)
+ | SOME s => let val (xs, s') = Lazy_Sequence.yieldn n s in (xs, fn _ => fn _ => SOME s') end);
+
+end;
+*}
+
+code_reserved Eval Limited_Sequence
+
+
+hide_const (open) yield empty single eval map_seq bind union if_seq not_seq map
+ pos_empty pos_single pos_bind pos_decr_bind pos_union pos_if_seq pos_iterate_upto pos_not_seq pos_map
+ neg_empty neg_single neg_bind neg_decr_bind neg_union neg_if_seq neg_iterate_upto neg_not_seq neg_map
+
+hide_fact (open) yield_def empty_def single_def eval_def map_seq_def bind_def union_def
+ if_seq_def not_seq_def map_def
+ pos_empty_def pos_single_def pos_bind_def pos_union_def pos_if_seq_def pos_iterate_upto_def pos_not_seq_def pos_map_def
+ neg_empty_def neg_single_def neg_bind_def neg_union_def neg_if_seq_def neg_iterate_upto_def neg_not_seq_def neg_map_def
+
+end
+
--- a/src/HOL/Metis_Examples/Big_O.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Metis_Examples/Big_O.thy Fri Feb 15 09:59:46 2013 +0100
@@ -29,7 +29,7 @@
(*** Now various verions with an increasing shrink factor ***)
-sledgehammer_params [isar_proofs, isar_shrink = 1]
+sledgehammer_params [isar_proofs, isar_compress = 1]
lemma
"(\<exists>c\<Colon>'a\<Colon>linordered_idom.
@@ -60,7 +60,7 @@
thus "\<bar>h x\<bar> \<le> \<bar>c\<bar> * \<bar>f x\<bar>" by (metis F4)
qed
-sledgehammer_params [isar_proofs, isar_shrink = 2]
+sledgehammer_params [isar_proofs, isar_compress = 2]
lemma
"(\<exists>c\<Colon>'a\<Colon>linordered_idom.
@@ -83,7 +83,7 @@
thus "\<bar>h x\<bar> \<le> \<bar>c\<bar> * \<bar>f x\<bar>" by (metis F2)
qed
-sledgehammer_params [isar_proofs, isar_shrink = 3]
+sledgehammer_params [isar_proofs, isar_compress = 3]
lemma
"(\<exists>c\<Colon>'a\<Colon>linordered_idom.
@@ -103,7 +103,7 @@
thus "\<bar>h x\<bar> \<le> \<bar>c\<bar> * \<bar>f x\<bar>" by (metis A1 abs_ge_zero)
qed
-sledgehammer_params [isar_proofs, isar_shrink = 4]
+sledgehammer_params [isar_proofs, isar_compress = 4]
lemma
"(\<exists>c\<Colon>'a\<Colon>linordered_idom.
@@ -123,7 +123,7 @@
thus "\<bar>h x\<bar> \<le> \<bar>c\<bar> * \<bar>f x\<bar>" by (metis abs_mult)
qed
-sledgehammer_params [isar_proofs, isar_shrink = 1]
+sledgehammer_params [isar_proofs, isar_compress = 1]
lemma bigo_alt_def: "O(f) = {h. \<exists>c. (0 < c \<and> (\<forall>x. abs (h x) <= c * abs (f x)))}"
by (auto simp add: bigo_def bigo_pos_const)
--- a/src/HOL/Metis_Examples/Sets.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Metis_Examples/Sets.thy Fri Feb 15 09:59:46 2013 +0100
@@ -21,7 +21,7 @@
lemma "P(n::nat) ==> ~P(0) ==> n ~= 0"
by metis
-sledgehammer_params [isar_proofs, isar_shrink = 1]
+sledgehammer_params [isar_proofs, isar_compress = 1]
(*multiple versions of this example*)
lemma (*equal_union: *)
@@ -70,7 +70,7 @@
ultimately show "(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V\<Colon>'a set. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))" by metis
qed
-sledgehammer_params [isar_proofs, isar_shrink = 2]
+sledgehammer_params [isar_proofs, isar_compress = 2]
lemma (*equal_union: *)
"(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))"
@@ -107,7 +107,7 @@
ultimately show "(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V\<Colon>'a set. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))" by metis
qed
-sledgehammer_params [isar_proofs, isar_shrink = 3]
+sledgehammer_params [isar_proofs, isar_compress = 3]
lemma (*equal_union: *)
"(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))"
@@ -124,7 +124,7 @@
ultimately show "(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V\<Colon>'a set. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))" by (metis Un_commute Un_upper2)
qed
-sledgehammer_params [isar_proofs, isar_shrink = 4]
+sledgehammer_params [isar_proofs, isar_compress = 4]
lemma (*equal_union: *)
"(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))"
@@ -140,7 +140,7 @@
ultimately show "(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V\<Colon>'a set. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))" by (metis Un_subset_iff Un_upper2)
qed
-sledgehammer_params [isar_proofs, isar_shrink = 1]
+sledgehammer_params [isar_proofs, isar_compress = 1]
lemma (*equal_union: *)
"(X = Y \<union> Z) = (Y \<subseteq> X \<and> Z \<subseteq> X \<and> (\<forall>V. Y \<subseteq> V \<and> Z \<subseteq> V \<longrightarrow> X \<subseteq> V))"
--- a/src/HOL/Metis_Examples/Trans_Closure.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Metis_Examples/Trans_Closure.thy Fri Feb 15 09:59:46 2013 +0100
@@ -44,7 +44,7 @@
lemma "\<lbrakk>f c = Intg x; \<forall>y. f b = Intg y \<longrightarrow> y \<noteq> x; (a, b) \<in> R\<^sup>*; (b,c) \<in> R\<^sup>*\<rbrakk>
\<Longrightarrow> \<exists>c. (b, c) \<in> R \<and> (a, c) \<in> R\<^sup>*"
-(* sledgehammer [isar_proofs, isar_shrink = 2] *)
+(* sledgehammer [isar_proofs, isar_compress = 2] *)
proof -
assume A1: "f c = Intg x"
assume A2: "\<forall>y. f b = Intg y \<longrightarrow> y \<noteq> x"
--- a/src/HOL/Mutabelle/mutabelle_extra.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Mutabelle/mutabelle_extra.ML Fri Feb 15 09:59:46 2013 +0100
@@ -269,7 +269,7 @@
@{const_name transfer_morphism},
@{const_name enum_prod_inst.enum_all_prod},
@{const_name enum_prod_inst.enum_ex_prod},
- @{const_name Quickcheck.catch_match},
+ @{const_name Quickcheck_Random.catch_match},
@{const_name Quickcheck_Exhaustive.unknown},
@{const_name Num.Bit0}, @{const_name Num.Bit1}
(*@{const_name "==>"}, @{const_name "=="}*)]
--- a/src/HOL/New_DSequence.thy Fri Feb 15 09:41:25 2013 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,111 +0,0 @@
-
-(* Author: Lukas Bulwahn, TU Muenchen *)
-
-header {* Depth-Limited Sequences with failure element *}
-
-theory New_DSequence
-imports DSequence
-begin
-
-subsection {* Positive Depth-Limited Sequence *}
-
-type_synonym 'a pos_dseq = "code_numeral => 'a Lazy_Sequence.lazy_sequence"
-
-definition pos_empty :: "'a pos_dseq"
-where
- "pos_empty = (%i. Lazy_Sequence.empty)"
-
-definition pos_single :: "'a => 'a pos_dseq"
-where
- "pos_single x = (%i. Lazy_Sequence.single x)"
-
-definition pos_bind :: "'a pos_dseq => ('a => 'b pos_dseq) => 'b pos_dseq"
-where
- "pos_bind x f = (%i. Lazy_Sequence.bind (x i) (%a. f a i))"
-
-definition pos_decr_bind :: "'a pos_dseq => ('a => 'b pos_dseq) => 'b pos_dseq"
-where
- "pos_decr_bind x f = (%i.
- if i = 0 then
- Lazy_Sequence.empty
- else
- Lazy_Sequence.bind (x (i - 1)) (%a. f a i))"
-
-definition pos_union :: "'a pos_dseq => 'a pos_dseq => 'a pos_dseq"
-where
- "pos_union xq yq = (%i. Lazy_Sequence.append (xq i) (yq i))"
-
-definition pos_if_seq :: "bool => unit pos_dseq"
-where
- "pos_if_seq b = (if b then pos_single () else pos_empty)"
-
-definition pos_iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a pos_dseq"
-where
- "pos_iterate_upto f n m = (%i. Lazy_Sequence.iterate_upto f n m)"
-
-definition pos_map :: "('a => 'b) => 'a pos_dseq => 'b pos_dseq"
-where
- "pos_map f xq = (%i. Lazy_Sequence.map f (xq i))"
-
-subsection {* Negative Depth-Limited Sequence *}
-
-type_synonym 'a neg_dseq = "code_numeral => 'a Lazy_Sequence.hit_bound_lazy_sequence"
-
-definition neg_empty :: "'a neg_dseq"
-where
- "neg_empty = (%i. Lazy_Sequence.empty)"
-
-definition neg_single :: "'a => 'a neg_dseq"
-where
- "neg_single x = (%i. Lazy_Sequence.hb_single x)"
-
-definition neg_bind :: "'a neg_dseq => ('a => 'b neg_dseq) => 'b neg_dseq"
-where
- "neg_bind x f = (%i. hb_bind (x i) (%a. f a i))"
-
-definition neg_decr_bind :: "'a neg_dseq => ('a => 'b neg_dseq) => 'b neg_dseq"
-where
- "neg_decr_bind x f = (%i.
- if i = 0 then
- Lazy_Sequence.hit_bound
- else
- hb_bind (x (i - 1)) (%a. f a i))"
-
-definition neg_union :: "'a neg_dseq => 'a neg_dseq => 'a neg_dseq"
-where
- "neg_union x y = (%i. Lazy_Sequence.append (x i) (y i))"
-
-definition neg_if_seq :: "bool => unit neg_dseq"
-where
- "neg_if_seq b = (if b then neg_single () else neg_empty)"
-
-definition neg_iterate_upto
-where
- "neg_iterate_upto f n m = (%i. Lazy_Sequence.iterate_upto (%i. Some (f i)) n m)"
-
-definition neg_map :: "('a => 'b) => 'a neg_dseq => 'b neg_dseq"
-where
- "neg_map f xq = (%i. Lazy_Sequence.hb_map f (xq i))"
-
-subsection {* Negation *}
-
-definition pos_not_seq :: "unit neg_dseq => unit pos_dseq"
-where
- "pos_not_seq xq = (%i. Lazy_Sequence.hb_not_seq (xq (3 * i)))"
-
-definition neg_not_seq :: "unit pos_dseq => unit neg_dseq"
-where
- "neg_not_seq x = (%i. case Lazy_Sequence.yield (x i) of
- None => Lazy_Sequence.hb_single ()
- | Some ((), xq) => Lazy_Sequence.empty)"
-
-hide_type (open) pos_dseq neg_dseq
-
-hide_const (open)
- pos_empty pos_single pos_bind pos_decr_bind pos_union pos_if_seq pos_iterate_upto pos_not_seq pos_map
- neg_empty neg_single neg_bind neg_decr_bind neg_union neg_if_seq neg_iterate_upto neg_not_seq neg_map
-hide_fact (open)
- pos_empty_def pos_single_def pos_bind_def pos_union_def pos_if_seq_def pos_iterate_upto_def pos_not_seq_def pos_map_def
- neg_empty_def neg_single_def neg_bind_def neg_union_def neg_if_seq_def neg_iterate_upto_def neg_not_seq_def neg_map_def
-
-end
--- a/src/HOL/New_Random_Sequence.thy Fri Feb 15 09:41:25 2013 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,114 +0,0 @@
-
-(* Author: Lukas Bulwahn, TU Muenchen *)
-
-theory New_Random_Sequence
-imports Random_Sequence
-begin
-
-type_synonym 'a pos_random_dseq = "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> 'a New_DSequence.pos_dseq"
-type_synonym 'a neg_random_dseq = "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> 'a New_DSequence.neg_dseq"
-
-definition pos_empty :: "'a pos_random_dseq"
-where
- "pos_empty = (%nrandom size seed. New_DSequence.pos_empty)"
-
-definition pos_single :: "'a => 'a pos_random_dseq"
-where
- "pos_single x = (%nrandom size seed. New_DSequence.pos_single x)"
-
-definition pos_bind :: "'a pos_random_dseq => ('a \<Rightarrow> 'b pos_random_dseq) \<Rightarrow> 'b pos_random_dseq"
-where
- "pos_bind R f = (\<lambda>nrandom size seed. New_DSequence.pos_bind (R nrandom size seed) (%a. f a nrandom size seed))"
-
-definition pos_decr_bind :: "'a pos_random_dseq => ('a \<Rightarrow> 'b pos_random_dseq) \<Rightarrow> 'b pos_random_dseq"
-where
- "pos_decr_bind R f = (\<lambda>nrandom size seed. New_DSequence.pos_decr_bind (R nrandom size seed) (%a. f a nrandom size seed))"
-
-definition pos_union :: "'a pos_random_dseq => 'a pos_random_dseq => 'a pos_random_dseq"
-where
- "pos_union R1 R2 = (\<lambda>nrandom size seed. New_DSequence.pos_union (R1 nrandom size seed) (R2 nrandom size seed))"
-
-definition pos_if_random_dseq :: "bool => unit pos_random_dseq"
-where
- "pos_if_random_dseq b = (if b then pos_single () else pos_empty)"
-
-definition pos_iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a pos_random_dseq"
-where
- "pos_iterate_upto f n m = (\<lambda>nrandom size seed. New_DSequence.pos_iterate_upto f n m)"
-
-definition pos_not_random_dseq :: "unit neg_random_dseq => unit pos_random_dseq"
-where
- "pos_not_random_dseq R = (\<lambda>nrandom size seed. New_DSequence.pos_not_seq (R nrandom size seed))"
-
-fun iter :: "(code_numeral * code_numeral => ('a * (unit => term)) * code_numeral * code_numeral) => code_numeral => code_numeral * code_numeral => 'a Lazy_Sequence.lazy_sequence"
-where
- "iter random nrandom seed =
- (if nrandom = 0 then Lazy_Sequence.empty else Lazy_Sequence.Lazy_Sequence (%u. let ((x, _), seed') = random seed in Some (x, iter random (nrandom - 1) seed')))"
-
-definition Random :: "(code_numeral \<Rightarrow> Random.seed \<Rightarrow> (('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed)) \<Rightarrow> 'a pos_random_dseq"
-where
- "Random g = (%nrandom size seed depth. iter (g size) nrandom seed)"
-
-definition pos_map :: "('a => 'b) => 'a pos_random_dseq => 'b pos_random_dseq"
-where
- "pos_map f P = pos_bind P (pos_single o f)"
-
-
-definition neg_empty :: "'a neg_random_dseq"
-where
- "neg_empty = (%nrandom size seed. New_DSequence.neg_empty)"
-
-definition neg_single :: "'a => 'a neg_random_dseq"
-where
- "neg_single x = (%nrandom size seed. New_DSequence.neg_single x)"
-
-definition neg_bind :: "'a neg_random_dseq => ('a \<Rightarrow> 'b neg_random_dseq) \<Rightarrow> 'b neg_random_dseq"
-where
- "neg_bind R f = (\<lambda>nrandom size seed. New_DSequence.neg_bind (R nrandom size seed) (%a. f a nrandom size seed))"
-
-definition neg_decr_bind :: "'a neg_random_dseq => ('a \<Rightarrow> 'b neg_random_dseq) \<Rightarrow> 'b neg_random_dseq"
-where
- "neg_decr_bind R f = (\<lambda>nrandom size seed. New_DSequence.neg_decr_bind (R nrandom size seed) (%a. f a nrandom size seed))"
-
-definition neg_union :: "'a neg_random_dseq => 'a neg_random_dseq => 'a neg_random_dseq"
-where
- "neg_union R1 R2 = (\<lambda>nrandom size seed. New_DSequence.neg_union (R1 nrandom size seed) (R2 nrandom size seed))"
-
-definition neg_if_random_dseq :: "bool => unit neg_random_dseq"
-where
- "neg_if_random_dseq b = (if b then neg_single () else neg_empty)"
-
-definition neg_iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a neg_random_dseq"
-where
- "neg_iterate_upto f n m = (\<lambda>nrandom size seed. New_DSequence.neg_iterate_upto f n m)"
-
-definition neg_not_random_dseq :: "unit pos_random_dseq => unit neg_random_dseq"
-where
- "neg_not_random_dseq R = (\<lambda>nrandom size seed. New_DSequence.neg_not_seq (R nrandom size seed))"
-(*
-fun iter :: "(code_numeral * code_numeral => ('a * (unit => term)) * code_numeral * code_numeral) => code_numeral => code_numeral * code_numeral => 'a Lazy_Sequence.lazy_sequence"
-where
- "iter random nrandom seed =
- (if nrandom = 0 then Lazy_Sequence.empty else Lazy_Sequence.Lazy_Sequence (%u. let ((x, _), seed') = random seed in Some (x, iter random (nrandom - 1) seed')))"
-
-definition Random :: "(code_numeral \<Rightarrow> Random.seed \<Rightarrow> (('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed)) \<Rightarrow> 'a pos_random_dseq"
-where
- "Random g = (%nrandom size seed depth. iter (g size) nrandom seed)"
-*)
-definition neg_map :: "('a => 'b) => 'a neg_random_dseq => 'b neg_random_dseq"
-where
- "neg_map f P = neg_bind P (neg_single o f)"
-(*
-hide_const DSequence.empty DSequence.single DSequence.eval
- DSequence.map_seq DSequence.bind DSequence.union DSequence.if_seq DSequence.not_seq
- DSequence.map
-*)
-
-hide_const (open)
- pos_empty pos_single pos_bind pos_decr_bind pos_union pos_if_random_dseq pos_iterate_upto pos_not_random_dseq iter Random pos_map
- neg_empty neg_single neg_bind neg_decr_bind neg_union neg_if_random_dseq neg_iterate_upto neg_not_random_dseq neg_map
-hide_type New_DSequence.pos_dseq New_DSequence.neg_dseq pos_random_dseq neg_random_dseq
-(* FIXME: hide facts *)
-(*hide_fact (open) empty_def single_def bind_def union_def if_random_dseq_def not_random_dseq_def Random.simps map_def*)
-
-end
\ No newline at end of file
--- a/src/HOL/Predicate.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Predicate.thy Fri Feb 15 09:59:46 2013 +0100
@@ -600,25 +600,34 @@
code_reflect Predicate
datatypes pred = Seq and seq = Empty | Insert | Join
- functions map
ML {*
signature PREDICATE =
sig
+ val anamorph: ('a -> ('b * 'a) option) -> int -> 'a -> 'b list * 'a
datatype 'a pred = Seq of (unit -> 'a seq)
and 'a seq = Empty | Insert of 'a * 'a pred | Join of 'a pred * 'a seq
+ val map: ('a -> 'b) -> 'a pred -> 'b pred
val yield: 'a pred -> ('a * 'a pred) option
val yieldn: int -> 'a pred -> 'a list * 'a pred
- val map: ('a -> 'b) -> 'a pred -> 'b pred
end;
structure Predicate : PREDICATE =
struct
+fun anamorph f k x =
+ (if k = 0 then ([], x)
+ else case f x
+ of NONE => ([], x)
+ | SOME (v, y) => let
+ val k' = k - 1;
+ val (vs, z) = anamorph f k' y
+ in (v :: vs, z) end);
+
datatype pred = datatype Predicate.pred
datatype seq = datatype Predicate.seq
-fun map f = Predicate.map f;
+fun map f = @{code Predicate.map} f;
fun yield (Seq f) = next (f ())
and next Empty = NONE
@@ -627,14 +636,7 @@
of NONE => next xq
| SOME (x, Q) => SOME (x, Seq (fn _ => Join (Q, xq))));
-fun anamorph f k x = (if k = 0 then ([], x)
- else case f x
- of NONE => ([], x)
- | SOME (v, y) => let
- val (vs, z) = anamorph f (k - 1) y
- in (v :: vs, z) end);
-
-fun yieldn P = anamorph yield P;
+fun yieldn k = anamorph yield k;
end;
*}
--- a/src/HOL/Predicate_Compile.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Predicate_Compile.thy Fri Feb 15 09:59:46 2013 +0100
@@ -5,7 +5,7 @@
header {* A compiler for predicates defined by introduction rules *}
theory Predicate_Compile
-imports New_Random_Sequence Quickcheck_Exhaustive
+imports Random_Sequence Quickcheck_Exhaustive
keywords "code_pred" :: thy_goal and "values" :: diag
begin
--- a/src/HOL/Quickcheck.thy Fri Feb 15 09:41:25 2013 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,284 +0,0 @@
-(* Author: Florian Haftmann & Lukas Bulwahn, TU Muenchen *)
-
-header {* A simple counterexample generator performing random testing *}
-
-theory Quickcheck
-imports Random Code_Evaluation Enum
-begin
-
-notation fcomp (infixl "\<circ>>" 60)
-notation scomp (infixl "\<circ>\<rightarrow>" 60)
-
-setup {* Code_Target.extend_target ("Quickcheck", (Code_Runtime.target, K I)) *}
-
-subsection {* Catching Match exceptions *}
-
-axiomatization catch_match :: "'a => 'a => 'a"
-
-code_const catch_match
- (Quickcheck "((_) handle Match => _)")
-
-subsection {* The @{text random} class *}
-
-class random = typerep +
- fixes random :: "code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
-
-
-subsection {* Fundamental and numeric types*}
-
-instantiation bool :: random
-begin
-
-definition
- "random i = Random.range 2 \<circ>\<rightarrow>
- (\<lambda>k. Pair (if k = 0 then Code_Evaluation.valtermify False else Code_Evaluation.valtermify True))"
-
-instance ..
-
-end
-
-instantiation itself :: (typerep) random
-begin
-
-definition
- random_itself :: "code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a itself \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
-where "random_itself _ = Pair (Code_Evaluation.valtermify TYPE('a))"
-
-instance ..
-
-end
-
-instantiation char :: random
-begin
-
-definition
- "random _ = Random.select (Enum.enum :: char list) \<circ>\<rightarrow> (\<lambda>c. Pair (c, \<lambda>u. Code_Evaluation.term_of c))"
-
-instance ..
-
-end
-
-instantiation String.literal :: random
-begin
-
-definition
- "random _ = Pair (STR '''', \<lambda>u. Code_Evaluation.term_of (STR ''''))"
-
-instance ..
-
-end
-
-instantiation nat :: random
-begin
-
-definition random_nat :: "code_numeral \<Rightarrow> Random.seed
- \<Rightarrow> (nat \<times> (unit \<Rightarrow> Code_Evaluation.term)) \<times> Random.seed"
-where
- "random_nat i = Random.range (i + 1) \<circ>\<rightarrow> (\<lambda>k. Pair (
- let n = Code_Numeral.nat_of k
- in (n, \<lambda>_. Code_Evaluation.term_of n)))"
-
-instance ..
-
-end
-
-instantiation int :: random
-begin
-
-definition
- "random i = Random.range (2 * i + 1) \<circ>\<rightarrow> (\<lambda>k. Pair (
- let j = (if k \<ge> i then Code_Numeral.int_of (k - i) else - Code_Numeral.int_of (i - k))
- in (j, \<lambda>_. Code_Evaluation.term_of j)))"
-
-instance ..
-
-end
-
-
-subsection {* Complex generators *}
-
-text {* Towards @{typ "'a \<Rightarrow> 'b"} *}
-
-axiomatization random_fun_aux :: "typerep \<Rightarrow> typerep \<Rightarrow> ('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> term)
- \<Rightarrow> (Random.seed \<Rightarrow> ('b \<times> (unit \<Rightarrow> term)) \<times> Random.seed)
- \<Rightarrow> (Random.seed \<Rightarrow> Random.seed \<times> Random.seed)
- \<Rightarrow> Random.seed \<Rightarrow> (('a \<Rightarrow> 'b) \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
-
-definition random_fun_lift :: "(Random.seed \<Rightarrow> ('b \<times> (unit \<Rightarrow> term)) \<times> Random.seed)
- \<Rightarrow> Random.seed \<Rightarrow> (('a\<Colon>term_of \<Rightarrow> 'b\<Colon>typerep) \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
-where
- "random_fun_lift f =
- random_fun_aux TYPEREP('a) TYPEREP('b) (op =) Code_Evaluation.term_of f Random.split_seed"
-
-instantiation "fun" :: ("{equal, term_of}", random) random
-begin
-
-definition
- random_fun :: "code_numeral \<Rightarrow> Random.seed \<Rightarrow> (('a \<Rightarrow> 'b) \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
- where "random i = random_fun_lift (random i)"
-
-instance ..
-
-end
-
-text {* Towards type copies and datatypes *}
-
-definition collapse :: "('a \<Rightarrow> ('a \<Rightarrow> 'b \<times> 'a) \<times> 'a) \<Rightarrow> 'a \<Rightarrow> 'b \<times> 'a"
- where "collapse f = (f \<circ>\<rightarrow> id)"
-
-definition beyond :: "code_numeral \<Rightarrow> code_numeral \<Rightarrow> code_numeral"
- where "beyond k l = (if l > k then l else 0)"
-
-lemma beyond_zero: "beyond k 0 = 0"
- by (simp add: beyond_def)
-
-
-definition (in term_syntax) [code_unfold]:
- "valterm_emptyset = Code_Evaluation.valtermify ({} :: ('a :: typerep) set)"
-
-definition (in term_syntax) [code_unfold]:
- "valtermify_insert x s = Code_Evaluation.valtermify insert {\<cdot>} (x :: ('a :: typerep * _)) {\<cdot>} s"
-
-instantiation set :: (random) random
-begin
-
-primrec random_aux_set
-where
- "random_aux_set 0 j = collapse (Random.select_weight [(1, Pair valterm_emptyset)])"
-| "random_aux_set (Code_Numeral.Suc i) j =
- collapse (Random.select_weight
- [(1, Pair valterm_emptyset),
- (Code_Numeral.Suc i,
- random j \<circ>\<rightarrow> (%x. random_aux_set i j \<circ>\<rightarrow> (%s. Pair (valtermify_insert x s))))])"
-
-lemma [code]:
- "random_aux_set i j =
- collapse (Random.select_weight [(1, Pair valterm_emptyset),
- (i, random j \<circ>\<rightarrow> (%x. random_aux_set (i - 1) j \<circ>\<rightarrow> (%s. Pair (valtermify_insert x s))))])"
-proof (induct i rule: code_numeral.induct)
- case zero
- show ?case by (subst select_weight_drop_zero [symmetric])
- (simp add: random_aux_set.simps [simplified])
-next
- case (Suc i)
- show ?case by (simp only: random_aux_set.simps(2) [of "i"] Suc_code_numeral_minus_one)
-qed
-
-definition "random_set i = random_aux_set i i"
-
-instance ..
-
-end
-
-lemma random_aux_rec:
- fixes random_aux :: "code_numeral \<Rightarrow> 'a"
- assumes "random_aux 0 = rhs 0"
- and "\<And>k. random_aux (Code_Numeral.Suc k) = rhs (Code_Numeral.Suc k)"
- shows "random_aux k = rhs k"
- using assms by (rule code_numeral.induct)
-
-subsection {* Deriving random generators for datatypes *}
-
-ML_file "Tools/Quickcheck/quickcheck_common.ML"
-ML_file "Tools/Quickcheck/random_generators.ML"
-setup Random_Generators.setup
-
-
-subsection {* Code setup *}
-
-code_const random_fun_aux (Quickcheck "Random'_Generators.random'_fun")
- -- {* With enough criminal energy this can be abused to derive @{prop False};
- for this reason we use a distinguished target @{text Quickcheck}
- not spoiling the regular trusted code generation *}
-
-code_reserved Quickcheck Random_Generators
-
-no_notation fcomp (infixl "\<circ>>" 60)
-no_notation scomp (infixl "\<circ>\<rightarrow>" 60)
-
-subsection {* The Random-Predicate Monad *}
-
-fun iter' ::
- "'a itself => code_numeral => code_numeral => code_numeral * code_numeral
- => ('a::random) Predicate.pred"
-where
- "iter' T nrandom sz seed = (if nrandom = 0 then bot_class.bot else
- let ((x, _), seed') = random sz seed
- in Predicate.Seq (%u. Predicate.Insert x (iter' T (nrandom - 1) sz seed')))"
-
-definition iter :: "code_numeral => code_numeral => code_numeral * code_numeral
- => ('a::random) Predicate.pred"
-where
- "iter nrandom sz seed = iter' (TYPE('a)) nrandom sz seed"
-
-lemma [code]:
- "iter nrandom sz seed = (if nrandom = 0 then bot_class.bot else
- let ((x, _), seed') = random sz seed
- in Predicate.Seq (%u. Predicate.Insert x (iter (nrandom - 1) sz seed')))"
-unfolding iter_def iter'.simps[of _ nrandom] ..
-
-type_synonym 'a randompred = "Random.seed \<Rightarrow> ('a Predicate.pred \<times> Random.seed)"
-
-definition empty :: "'a randompred"
- where "empty = Pair (bot_class.bot)"
-
-definition single :: "'a => 'a randompred"
- where "single x = Pair (Predicate.single x)"
-
-definition bind :: "'a randompred \<Rightarrow> ('a \<Rightarrow> 'b randompred) \<Rightarrow> 'b randompred"
- where
- "bind R f = (\<lambda>s. let
- (P, s') = R s;
- (s1, s2) = Random.split_seed s'
- in (Predicate.bind P (%a. fst (f a s1)), s2))"
-
-definition union :: "'a randompred \<Rightarrow> 'a randompred \<Rightarrow> 'a randompred"
-where
- "union R1 R2 = (\<lambda>s. let
- (P1, s') = R1 s; (P2, s'') = R2 s'
- in (sup_class.sup P1 P2, s''))"
-
-definition if_randompred :: "bool \<Rightarrow> unit randompred"
-where
- "if_randompred b = (if b then single () else empty)"
-
-definition iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a randompred"
-where
- "iterate_upto f n m = Pair (Predicate.iterate_upto f n m)"
-
-definition not_randompred :: "unit randompred \<Rightarrow> unit randompred"
-where
- "not_randompred P = (\<lambda>s. let
- (P', s') = P s
- in if Predicate.eval P' () then (Orderings.bot, s') else (Predicate.single (), s'))"
-
-definition Random :: "(Random.seed \<Rightarrow> ('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed) \<Rightarrow> 'a randompred"
- where "Random g = scomp g (Pair o (Predicate.single o fst))"
-
-definition map :: "('a \<Rightarrow> 'b) \<Rightarrow> ('a randompred \<Rightarrow> 'b randompred)"
- where "map f P = bind P (single o f)"
-
-hide_fact
- random_bool_def
- random_itself_def
- random_char_def
- random_literal_def
- random_nat_def
- random_int_def
- random_fun_lift_def
- random_fun_def
- collapse_def
- beyond_def
- beyond_zero
- random_aux_rec
-
-hide_const (open) catch_match random collapse beyond random_fun_aux random_fun_lift
-
-hide_fact (open) iter'.simps iter_def empty_def single_def bind_def union_def
- if_randompred_def iterate_upto_def not_randompred_def Random_def map_def
-hide_type (open) randompred
-hide_const (open) iter' iter empty single bind union if_randompred
- iterate_upto not_randompred Random map
-
-end
-
--- a/src/HOL/Quickcheck_Exhaustive.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Quickcheck_Exhaustive.thy Fri Feb 15 09:59:46 2013 +0100
@@ -3,7 +3,7 @@
header {* A simple counterexample generator performing exhaustive testing *}
theory Quickcheck_Exhaustive
-imports Quickcheck
+imports Quickcheck_Random
keywords "quickcheck_generator" :: thy_decl
begin
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Quickcheck_Random.thy Fri Feb 15 09:59:46 2013 +0100
@@ -0,0 +1,204 @@
+(* Author: Florian Haftmann & Lukas Bulwahn, TU Muenchen *)
+
+header {* A simple counterexample generator performing random testing *}
+
+theory Quickcheck_Random
+imports Random Code_Evaluation Enum
+begin
+
+notation fcomp (infixl "\<circ>>" 60)
+notation scomp (infixl "\<circ>\<rightarrow>" 60)
+
+setup {* Code_Target.extend_target ("Quickcheck", (Code_Runtime.target, K I)) *}
+
+subsection {* Catching Match exceptions *}
+
+axiomatization catch_match :: "'a => 'a => 'a"
+
+code_const catch_match
+ (Quickcheck "((_) handle Match => _)")
+
+subsection {* The @{text random} class *}
+
+class random = typerep +
+ fixes random :: "code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
+
+
+subsection {* Fundamental and numeric types*}
+
+instantiation bool :: random
+begin
+
+definition
+ "random i = Random.range 2 \<circ>\<rightarrow>
+ (\<lambda>k. Pair (if k = 0 then Code_Evaluation.valtermify False else Code_Evaluation.valtermify True))"
+
+instance ..
+
+end
+
+instantiation itself :: (typerep) random
+begin
+
+definition
+ random_itself :: "code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a itself \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
+where "random_itself _ = Pair (Code_Evaluation.valtermify TYPE('a))"
+
+instance ..
+
+end
+
+instantiation char :: random
+begin
+
+definition
+ "random _ = Random.select (Enum.enum :: char list) \<circ>\<rightarrow> (\<lambda>c. Pair (c, \<lambda>u. Code_Evaluation.term_of c))"
+
+instance ..
+
+end
+
+instantiation String.literal :: random
+begin
+
+definition
+ "random _ = Pair (STR '''', \<lambda>u. Code_Evaluation.term_of (STR ''''))"
+
+instance ..
+
+end
+
+instantiation nat :: random
+begin
+
+definition random_nat :: "code_numeral \<Rightarrow> Random.seed
+ \<Rightarrow> (nat \<times> (unit \<Rightarrow> Code_Evaluation.term)) \<times> Random.seed"
+where
+ "random_nat i = Random.range (i + 1) \<circ>\<rightarrow> (\<lambda>k. Pair (
+ let n = Code_Numeral.nat_of k
+ in (n, \<lambda>_. Code_Evaluation.term_of n)))"
+
+instance ..
+
+end
+
+instantiation int :: random
+begin
+
+definition
+ "random i = Random.range (2 * i + 1) \<circ>\<rightarrow> (\<lambda>k. Pair (
+ let j = (if k \<ge> i then Code_Numeral.int_of (k - i) else - Code_Numeral.int_of (i - k))
+ in (j, \<lambda>_. Code_Evaluation.term_of j)))"
+
+instance ..
+
+end
+
+
+subsection {* Complex generators *}
+
+text {* Towards @{typ "'a \<Rightarrow> 'b"} *}
+
+axiomatization random_fun_aux :: "typerep \<Rightarrow> typerep \<Rightarrow> ('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> term)
+ \<Rightarrow> (Random.seed \<Rightarrow> ('b \<times> (unit \<Rightarrow> term)) \<times> Random.seed)
+ \<Rightarrow> (Random.seed \<Rightarrow> Random.seed \<times> Random.seed)
+ \<Rightarrow> Random.seed \<Rightarrow> (('a \<Rightarrow> 'b) \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
+
+definition random_fun_lift :: "(Random.seed \<Rightarrow> ('b \<times> (unit \<Rightarrow> term)) \<times> Random.seed)
+ \<Rightarrow> Random.seed \<Rightarrow> (('a\<Colon>term_of \<Rightarrow> 'b\<Colon>typerep) \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
+where
+ "random_fun_lift f =
+ random_fun_aux TYPEREP('a) TYPEREP('b) (op =) Code_Evaluation.term_of f Random.split_seed"
+
+instantiation "fun" :: ("{equal, term_of}", random) random
+begin
+
+definition
+ random_fun :: "code_numeral \<Rightarrow> Random.seed \<Rightarrow> (('a \<Rightarrow> 'b) \<times> (unit \<Rightarrow> term)) \<times> Random.seed"
+ where "random i = random_fun_lift (random i)"
+
+instance ..
+
+end
+
+text {* Towards type copies and datatypes *}
+
+definition collapse :: "('a \<Rightarrow> ('a \<Rightarrow> 'b \<times> 'a) \<times> 'a) \<Rightarrow> 'a \<Rightarrow> 'b \<times> 'a"
+ where "collapse f = (f \<circ>\<rightarrow> id)"
+
+definition beyond :: "code_numeral \<Rightarrow> code_numeral \<Rightarrow> code_numeral"
+ where "beyond k l = (if l > k then l else 0)"
+
+lemma beyond_zero: "beyond k 0 = 0"
+ by (simp add: beyond_def)
+
+
+definition (in term_syntax) [code_unfold]:
+ "valterm_emptyset = Code_Evaluation.valtermify ({} :: ('a :: typerep) set)"
+
+definition (in term_syntax) [code_unfold]:
+ "valtermify_insert x s = Code_Evaluation.valtermify insert {\<cdot>} (x :: ('a :: typerep * _)) {\<cdot>} s"
+
+instantiation set :: (random) random
+begin
+
+primrec random_aux_set
+where
+ "random_aux_set 0 j = collapse (Random.select_weight [(1, Pair valterm_emptyset)])"
+| "random_aux_set (Code_Numeral.Suc i) j =
+ collapse (Random.select_weight
+ [(1, Pair valterm_emptyset),
+ (Code_Numeral.Suc i,
+ random j \<circ>\<rightarrow> (%x. random_aux_set i j \<circ>\<rightarrow> (%s. Pair (valtermify_insert x s))))])"
+
+lemma [code]:
+ "random_aux_set i j =
+ collapse (Random.select_weight [(1, Pair valterm_emptyset),
+ (i, random j \<circ>\<rightarrow> (%x. random_aux_set (i - 1) j \<circ>\<rightarrow> (%s. Pair (valtermify_insert x s))))])"
+proof (induct i rule: code_numeral.induct)
+ case zero
+ show ?case by (subst select_weight_drop_zero [symmetric])
+ (simp add: random_aux_set.simps [simplified])
+next
+ case (Suc i)
+ show ?case by (simp only: random_aux_set.simps(2) [of "i"] Suc_code_numeral_minus_one)
+qed
+
+definition "random_set i = random_aux_set i i"
+
+instance ..
+
+end
+
+lemma random_aux_rec:
+ fixes random_aux :: "code_numeral \<Rightarrow> 'a"
+ assumes "random_aux 0 = rhs 0"
+ and "\<And>k. random_aux (Code_Numeral.Suc k) = rhs (Code_Numeral.Suc k)"
+ shows "random_aux k = rhs k"
+ using assms by (rule code_numeral.induct)
+
+subsection {* Deriving random generators for datatypes *}
+
+ML_file "Tools/Quickcheck/quickcheck_common.ML"
+ML_file "Tools/Quickcheck/random_generators.ML"
+setup Random_Generators.setup
+
+
+subsection {* Code setup *}
+
+code_const random_fun_aux (Quickcheck "Random'_Generators.random'_fun")
+ -- {* With enough criminal energy this can be abused to derive @{prop False};
+ for this reason we use a distinguished target @{text Quickcheck}
+ not spoiling the regular trusted code generation *}
+
+code_reserved Quickcheck Random_Generators
+
+no_notation fcomp (infixl "\<circ>>" 60)
+no_notation scomp (infixl "\<circ>\<rightarrow>" 60)
+
+hide_const (open) catch_match random collapse beyond random_fun_aux random_fun_lift
+
+hide_fact (open) collapse_def beyond_def random_fun_lift_def
+
+end
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Random_Pred.thy Fri Feb 15 09:59:46 2013 +0100
@@ -0,0 +1,76 @@
+
+(* Author: Lukas Bulwahn, TU Muenchen *)
+
+header {* The Random-Predicate Monad *}
+
+theory Random_Pred
+imports Quickcheck_Random
+begin
+
+fun iter' :: "'a itself \<Rightarrow> code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a::random) Predicate.pred"
+where
+ "iter' T nrandom sz seed = (if nrandom = 0 then bot_class.bot else
+ let ((x, _), seed') = Quickcheck_Random.random sz seed
+ in Predicate.Seq (%u. Predicate.Insert x (iter' T (nrandom - 1) sz seed')))"
+
+definition iter :: "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a::random) Predicate.pred"
+where
+ "iter nrandom sz seed = iter' (TYPE('a)) nrandom sz seed"
+
+lemma [code]:
+ "iter nrandom sz seed = (if nrandom = 0 then bot_class.bot else
+ let ((x, _), seed') = Quickcheck_Random.random sz seed
+ in Predicate.Seq (%u. Predicate.Insert x (iter (nrandom - 1) sz seed')))"
+ unfolding iter_def iter'.simps [of _ nrandom] ..
+
+type_synonym 'a random_pred = "Random.seed \<Rightarrow> ('a Predicate.pred \<times> Random.seed)"
+
+definition empty :: "'a random_pred"
+ where "empty = Pair bot"
+
+definition single :: "'a => 'a random_pred"
+ where "single x = Pair (Predicate.single x)"
+
+definition bind :: "'a random_pred \<Rightarrow> ('a \<Rightarrow> 'b random_pred) \<Rightarrow> 'b random_pred"
+ where
+ "bind R f = (\<lambda>s. let
+ (P, s') = R s;
+ (s1, s2) = Random.split_seed s'
+ in (Predicate.bind P (%a. fst (f a s1)), s2))"
+
+definition union :: "'a random_pred \<Rightarrow> 'a random_pred \<Rightarrow> 'a random_pred"
+where
+ "union R1 R2 = (\<lambda>s. let
+ (P1, s') = R1 s; (P2, s'') = R2 s'
+ in (sup_class.sup P1 P2, s''))"
+
+definition if_randompred :: "bool \<Rightarrow> unit random_pred"
+where
+ "if_randompred b = (if b then single () else empty)"
+
+definition iterate_upto :: "(code_numeral \<Rightarrow> 'a) => code_numeral \<Rightarrow> code_numeral \<Rightarrow> 'a random_pred"
+where
+ "iterate_upto f n m = Pair (Predicate.iterate_upto f n m)"
+
+definition not_randompred :: "unit random_pred \<Rightarrow> unit random_pred"
+where
+ "not_randompred P = (\<lambda>s. let
+ (P', s') = P s
+ in if Predicate.eval P' () then (Orderings.bot, s') else (Predicate.single (), s'))"
+
+definition Random :: "(Random.seed \<Rightarrow> ('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed) \<Rightarrow> 'a random_pred"
+ where "Random g = scomp g (Pair o (Predicate.single o fst))"
+
+definition map :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a random_pred \<Rightarrow> 'b random_pred"
+ where "map f P = bind P (single o f)"
+
+hide_const (open) iter' iter empty single bind union if_randompred
+ iterate_upto not_randompred Random map
+
+hide_fact iter'.simps
+
+hide_fact (open) iter_def empty_def single_def bind_def union_def
+ if_randompred_def iterate_upto_def not_randompred_def Random_def map_def
+
+end
+
--- a/src/HOL/Random_Sequence.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Random_Sequence.thy Fri Feb 15 09:59:46 2013 +0100
@@ -1,32 +1,34 @@
(* Author: Lukas Bulwahn, TU Muenchen *)
+header {* Various kind of sequences inside the random monad *}
+
theory Random_Sequence
-imports Quickcheck
+imports Random_Pred
begin
-type_synonym 'a random_dseq = "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a DSequence.dseq \<times> Random.seed)"
+type_synonym 'a random_dseq = "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a Limited_Sequence.dseq \<times> Random.seed)"
definition empty :: "'a random_dseq"
where
- "empty = (%nrandom size. Pair (DSequence.empty))"
+ "empty = (%nrandom size. Pair (Limited_Sequence.empty))"
definition single :: "'a => 'a random_dseq"
where
- "single x = (%nrandom size. Pair (DSequence.single x))"
+ "single x = (%nrandom size. Pair (Limited_Sequence.single x))"
definition bind :: "'a random_dseq => ('a \<Rightarrow> 'b random_dseq) \<Rightarrow> 'b random_dseq"
where
"bind R f = (\<lambda>nrandom size s. let
(P, s') = R nrandom size s;
(s1, s2) = Random.split_seed s'
- in (DSequence.bind P (%a. fst (f a nrandom size s1)), s2))"
+ in (Limited_Sequence.bind P (%a. fst (f a nrandom size s1)), s2))"
definition union :: "'a random_dseq => 'a random_dseq => 'a random_dseq"
where
"union R1 R2 = (\<lambda>nrandom size s. let
(S1, s') = R1 nrandom size s; (S2, s'') = R2 nrandom size s'
- in (DSequence.union S1 S2, s''))"
+ in (Limited_Sequence.union S1 S2, s''))"
definition if_random_dseq :: "bool => unit random_dseq"
where
@@ -36,26 +38,120 @@
where
"not_random_dseq R = (\<lambda>nrandom size s. let
(S, s') = R nrandom size s
- in (DSequence.not_seq S, s'))"
-
-fun Random :: "(code_numeral \<Rightarrow> Random.seed \<Rightarrow> (('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed)) \<Rightarrow> 'a random_dseq"
-where
- "Random g nrandom = (%size. if nrandom <= 0 then (Pair DSequence.empty) else
- (scomp (g size) (%r. scomp (Random g (nrandom - 1) size) (%rs. Pair (DSequence.union (DSequence.single (fst r)) rs)))))"
+ in (Limited_Sequence.not_seq S, s'))"
definition map :: "('a => 'b) => 'a random_dseq => 'b random_dseq"
where
"map f P = bind P (single o f)"
-(*
-hide_const DSequence.empty DSequence.single DSequence.eval
- DSequence.map_seq DSequence.bind DSequence.union DSequence.if_seq DSequence.not_seq
- DSequence.map
-*)
+fun Random :: "(code_numeral \<Rightarrow> Random.seed \<Rightarrow> (('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed)) \<Rightarrow> 'a random_dseq"
+where
+ "Random g nrandom = (%size. if nrandom <= 0 then (Pair Limited_Sequence.empty) else
+ (scomp (g size) (%r. scomp (Random g (nrandom - 1) size) (%rs. Pair (Limited_Sequence.union (Limited_Sequence.single (fst r)) rs)))))"
+
+
+type_synonym 'a pos_random_dseq = "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> 'a Limited_Sequence.pos_dseq"
+
+definition pos_empty :: "'a pos_random_dseq"
+where
+ "pos_empty = (%nrandom size seed. Limited_Sequence.pos_empty)"
+
+definition pos_single :: "'a => 'a pos_random_dseq"
+where
+ "pos_single x = (%nrandom size seed. Limited_Sequence.pos_single x)"
+
+definition pos_bind :: "'a pos_random_dseq => ('a \<Rightarrow> 'b pos_random_dseq) \<Rightarrow> 'b pos_random_dseq"
+where
+ "pos_bind R f = (\<lambda>nrandom size seed. Limited_Sequence.pos_bind (R nrandom size seed) (%a. f a nrandom size seed))"
+
+definition pos_decr_bind :: "'a pos_random_dseq => ('a \<Rightarrow> 'b pos_random_dseq) \<Rightarrow> 'b pos_random_dseq"
+where
+ "pos_decr_bind R f = (\<lambda>nrandom size seed. Limited_Sequence.pos_decr_bind (R nrandom size seed) (%a. f a nrandom size seed))"
+
+definition pos_union :: "'a pos_random_dseq => 'a pos_random_dseq => 'a pos_random_dseq"
+where
+ "pos_union R1 R2 = (\<lambda>nrandom size seed. Limited_Sequence.pos_union (R1 nrandom size seed) (R2 nrandom size seed))"
+
+definition pos_if_random_dseq :: "bool => unit pos_random_dseq"
+where
+ "pos_if_random_dseq b = (if b then pos_single () else pos_empty)"
+
+definition pos_iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a pos_random_dseq"
+where
+ "pos_iterate_upto f n m = (\<lambda>nrandom size seed. Limited_Sequence.pos_iterate_upto f n m)"
+
+definition pos_map :: "('a => 'b) => 'a pos_random_dseq => 'b pos_random_dseq"
+where
+ "pos_map f P = pos_bind P (pos_single o f)"
+
+fun iter :: "(Random.seed \<Rightarrow> ('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed)
+ \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> 'a Lazy_Sequence.lazy_sequence"
+where
+ "iter random nrandom seed =
+ (if nrandom = 0 then Lazy_Sequence.empty else Lazy_Sequence.Lazy_Sequence (%u. let ((x, _), seed') = random seed in Some (x, iter random (nrandom - 1) seed')))"
+
+definition pos_Random :: "(code_numeral \<Rightarrow> Random.seed \<Rightarrow> ('a \<times> (unit \<Rightarrow> term)) \<times> Random.seed)
+ \<Rightarrow> 'a pos_random_dseq"
+where
+ "pos_Random g = (%nrandom size seed depth. iter (g size) nrandom seed)"
+
+
+type_synonym 'a neg_random_dseq = "code_numeral \<Rightarrow> code_numeral \<Rightarrow> Random.seed \<Rightarrow> 'a Limited_Sequence.neg_dseq"
-hide_const (open) empty single bind union if_random_dseq not_random_dseq Random map
+definition neg_empty :: "'a neg_random_dseq"
+where
+ "neg_empty = (%nrandom size seed. Limited_Sequence.neg_empty)"
+
+definition neg_single :: "'a => 'a neg_random_dseq"
+where
+ "neg_single x = (%nrandom size seed. Limited_Sequence.neg_single x)"
+
+definition neg_bind :: "'a neg_random_dseq => ('a \<Rightarrow> 'b neg_random_dseq) \<Rightarrow> 'b neg_random_dseq"
+where
+ "neg_bind R f = (\<lambda>nrandom size seed. Limited_Sequence.neg_bind (R nrandom size seed) (%a. f a nrandom size seed))"
+
+definition neg_decr_bind :: "'a neg_random_dseq => ('a \<Rightarrow> 'b neg_random_dseq) \<Rightarrow> 'b neg_random_dseq"
+where
+ "neg_decr_bind R f = (\<lambda>nrandom size seed. Limited_Sequence.neg_decr_bind (R nrandom size seed) (%a. f a nrandom size seed))"
+
+definition neg_union :: "'a neg_random_dseq => 'a neg_random_dseq => 'a neg_random_dseq"
+where
+ "neg_union R1 R2 = (\<lambda>nrandom size seed. Limited_Sequence.neg_union (R1 nrandom size seed) (R2 nrandom size seed))"
+
+definition neg_if_random_dseq :: "bool => unit neg_random_dseq"
+where
+ "neg_if_random_dseq b = (if b then neg_single () else neg_empty)"
+
+definition neg_iterate_upto :: "(code_numeral => 'a) => code_numeral => code_numeral => 'a neg_random_dseq"
+where
+ "neg_iterate_upto f n m = (\<lambda>nrandom size seed. Limited_Sequence.neg_iterate_upto f n m)"
-hide_type DSequence.dseq random_dseq
-hide_fact (open) empty_def single_def bind_def union_def if_random_dseq_def not_random_dseq_def Random.simps map_def
+definition neg_not_random_dseq :: "unit pos_random_dseq => unit neg_random_dseq"
+where
+ "neg_not_random_dseq R = (\<lambda>nrandom size seed. Limited_Sequence.neg_not_seq (R nrandom size seed))"
+
+definition neg_map :: "('a => 'b) => 'a neg_random_dseq => 'b neg_random_dseq"
+where
+ "neg_map f P = neg_bind P (neg_single o f)"
+
+definition pos_not_random_dseq :: "unit neg_random_dseq => unit pos_random_dseq"
+where
+ "pos_not_random_dseq R = (\<lambda>nrandom size seed. Limited_Sequence.pos_not_seq (R nrandom size seed))"
+
-end
\ No newline at end of file
+hide_const (open)
+ empty single bind union if_random_dseq not_random_dseq map Random
+ pos_empty pos_single pos_bind pos_decr_bind pos_union pos_if_random_dseq pos_iterate_upto
+ pos_not_random_dseq pos_map iter pos_Random
+ neg_empty neg_single neg_bind neg_decr_bind neg_union neg_if_random_dseq neg_iterate_upto
+ neg_not_random_dseq neg_map
+
+hide_fact (open) empty_def single_def bind_def union_def if_random_dseq_def not_random_dseq_def
+ map_def Random.simps
+ pos_empty_def pos_single_def pos_bind_def pos_decr_bind_def pos_union_def pos_if_random_dseq_def
+ pos_iterate_upto_def pos_not_random_dseq_def pos_map_def iter.simps pos_Random_def
+ neg_empty_def neg_single_def neg_bind_def neg_decr_bind_def neg_union_def neg_if_random_dseq_def
+ neg_iterate_upto_def neg_not_random_dseq_def neg_map_def
+
+end
+
--- a/src/HOL/Rat.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Rat.thy Fri Feb 15 09:59:46 2013 +0100
@@ -1030,7 +1030,7 @@
begin
definition
- "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>num. Random.range i \<circ>\<rightarrow> (\<lambda>denom. Pair (
+ "Quickcheck_Random.random i = Quickcheck_Random.random i \<circ>\<rightarrow> (\<lambda>num. Random.range i \<circ>\<rightarrow> (\<lambda>denom. Pair (
let j = Code_Numeral.int_of (denom + 1)
in valterm_fract num (j, \<lambda>u. Code_Evaluation.term_of j))))"
--- a/src/HOL/RealDef.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/RealDef.thy Fri Feb 15 09:59:46 2013 +0100
@@ -1641,7 +1641,7 @@
begin
definition
- "Quickcheck.random i = Quickcheck.random i \<circ>\<rightarrow> (\<lambda>r. Pair (valterm_ratreal r))"
+ "Quickcheck_Random.random i = Quickcheck_Random.random i \<circ>\<rightarrow> (\<lambda>r. Pair (valterm_ratreal r))"
instance ..
--- a/src/HOL/Sledgehammer.thy Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Sledgehammer.thy Fri Feb 15 09:59:46 2013 +0100
@@ -17,7 +17,7 @@
ML_file "Tools/Sledgehammer/sledgehammer_proof.ML"
ML_file "Tools/Sledgehammer/sledgehammer_annotate.ML"
ML_file "Tools/Sledgehammer/sledgehammer_preplay.ML"
-ML_file "Tools/Sledgehammer/sledgehammer_shrink.ML"
+ML_file "Tools/Sledgehammer/sledgehammer_compress.ML"
ML_file "Tools/Sledgehammer/sledgehammer_reconstruct.ML"
ML_file "Tools/Sledgehammer/sledgehammer_provers.ML"
ML_file "Tools/Sledgehammer/sledgehammer_minimize.ML"
--- a/src/HOL/TPTP/mash_eval.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/TPTP/mash_eval.ML Fri Feb 15 09:59:46 2013 +0100
@@ -110,8 +110,10 @@
val goal = goal_of_thm (Proof_Context.theory_of ctxt) th
val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal ctxt goal 1
val isar_deps = isar_dependencies_of name_tabs th
- val facts = facts |> filter (fn (_, th') => thm_ord (th', th) = LESS)
- val find_suggs = find_suggested_facts facts #> map fact_of_raw_fact
+ val facts =
+ facts |> filter (fn (_, th') => crude_thm_ord (th', th) = LESS)
+ val find_suggs =
+ find_suggested_facts ctxt facts #> map fact_of_raw_fact
fun get_facts [] compute = compute facts
| get_facts suggs _ = find_suggs suggs
val mepo_facts =
@@ -121,7 +123,7 @@
|> weight_mepo_facts
fun mash_of suggs =
get_facts suggs (fn _ =>
- find_mash_suggestions slack_max_facts suggs facts [] []
+ find_mash_suggestions ctxt slack_max_facts suggs facts [] []
|> fst |> map fact_of_raw_fact)
|> weight_mash_facts
val mash_isar_facts = mash_of mash_isar_suggs
--- a/src/HOL/TPTP/mash_export.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/TPTP/mash_export.ML Fri Feb 15 09:59:46 2013 +0100
@@ -48,7 +48,7 @@
fun all_facts ctxt =
let val css = Sledgehammer_Fact.clasimpset_rule_table_of ctxt in
Sledgehammer_Fact.all_facts ctxt true false Symtab.empty [] [] css
- |> sort (thm_ord o pairself snd)
+ |> sort (crude_thm_ord o pairself snd)
end
fun generate_accessibility ctxt thys include_thys file_name =
--- a/src/HOL/Tools/Predicate_Compile/code_prolog.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Predicate_Compile/code_prolog.ML Fri Feb 15 09:59:46 2013 +0100
@@ -956,7 +956,7 @@
val tss = run (#timeout system_config, #prolog_system system_config)
p (translate_const constant_table' name, args') output_names soln
val _ = tracing "Restoring terms..."
- val empty = Const("Orderings.bot_class.bot", fastype_of t_compr)
+ val empty = Const(@{const_name bot}, fastype_of t_compr)
fun mk_insert x S =
Const (@{const_name "Set.insert"}, fastype_of x --> fastype_of S --> fastype_of S) $ x $ S
fun mk_set_compr in_insert [] xs =
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_compilations.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_compilations.ML Fri Feb 15 09:59:46 2013 +0100
@@ -225,41 +225,41 @@
@{typ Random.seed} --> HOLogic.mk_prodT (Predicate_Comp_Funs.mk_monadT T, @{typ Random.seed})
fun dest_randompredT (Type ("fun", [@{typ Random.seed}, Type (@{type_name Product_Type.prod},
- [Type (@{type_name "Predicate.pred"}, [T]), @{typ Random.seed}])])) = T
+ [Type (@{type_name Predicate.pred}, [T]), @{typ Random.seed}])])) = T
| dest_randompredT T = raise TYPE ("dest_randompredT", [T], []);
-fun mk_empty T = Const(@{const_name Quickcheck.empty}, mk_randompredT T)
+fun mk_empty T = Const(@{const_name Random_Pred.empty}, mk_randompredT T)
fun mk_single t =
let
val T = fastype_of t
in
- Const (@{const_name Quickcheck.single}, T --> mk_randompredT T) $ t
+ Const (@{const_name Random_Pred.single}, T --> mk_randompredT T) $ t
end;
fun mk_bind (x, f) =
let
val T as (Type ("fun", [_, U])) = fastype_of f
in
- Const (@{const_name Quickcheck.bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Random_Pred.bind}, fastype_of x --> T --> U) $ x $ f
end
-val mk_plus = HOLogic.mk_binop @{const_name Quickcheck.union}
+val mk_plus = HOLogic.mk_binop @{const_name Random_Pred.union}
-fun mk_if cond = Const (@{const_name Quickcheck.if_randompred},
+fun mk_if cond = Const (@{const_name Random_Pred.if_randompred},
HOLogic.boolT --> mk_randompredT HOLogic.unitT) $ cond;
fun mk_iterate_upto T (f, from, to) =
- list_comb (Const (@{const_name Quickcheck.iterate_upto},
+ list_comb (Const (@{const_name Random_Pred.iterate_upto},
[@{typ code_numeral} --> T, @{typ code_numeral}, @{typ code_numeral}] ---> mk_randompredT T),
[f, from, to])
fun mk_not t =
let
val T = mk_randompredT HOLogic.unitT
- in Const (@{const_name Quickcheck.not_randompred}, T --> T) $ t end
+ in Const (@{const_name Random_Pred.not_randompred}, T --> T) $ t end
-fun mk_map T1 T2 tf tp = Const (@{const_name Quickcheck.map},
+fun mk_map T1 T2 tf tp = Const (@{const_name Random_Pred.map},
(T1 --> T2) --> mk_randompredT T1 --> mk_randompredT T2) $ tf $ tp
val compfuns = Predicate_Compile_Aux.CompilationFuns
@@ -279,29 +279,29 @@
Type (@{type_name Option.option}, [Type (@{type_name Lazy_Sequence.lazy_sequence}, [T])])])])) = T
| dest_dseqT T = raise TYPE ("dest_dseqT", [T], []);
-fun mk_empty T = Const (@{const_name DSequence.empty}, mk_dseqT T);
+fun mk_empty T = Const (@{const_name Limited_Sequence.empty}, mk_dseqT T);
fun mk_single t =
let val T = fastype_of t
- in Const(@{const_name DSequence.single}, T --> mk_dseqT T) $ t end;
+ in Const(@{const_name Limited_Sequence.single}, T --> mk_dseqT T) $ t end;
fun mk_bind (x, f) =
let val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name DSequence.bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Limited_Sequence.bind}, fastype_of x --> T --> U) $ x $ f
end;
-val mk_plus = HOLogic.mk_binop @{const_name DSequence.union};
+val mk_plus = HOLogic.mk_binop @{const_name Limited_Sequence.union};
-fun mk_if cond = Const (@{const_name DSequence.if_seq},
+fun mk_if cond = Const (@{const_name Limited_Sequence.if_seq},
HOLogic.boolT --> mk_dseqT HOLogic.unitT) $ cond;
fun mk_iterate_upto _ _ = raise Fail "No iterate_upto compilation"
fun mk_not t = let val T = mk_dseqT HOLogic.unitT
- in Const (@{const_name DSequence.not_seq}, T --> T) $ t end
+ in Const (@{const_name Limited_Sequence.not_seq}, T --> T) $ t end
-fun mk_map T1 T2 tf tp = Const (@{const_name DSequence.map},
+fun mk_map T1 T2 tf tp = Const (@{const_name Limited_Sequence.map},
(T1 --> T2) --> mk_dseqT T1 --> mk_dseqT T2) $ tf $ tp
val compfuns = Predicate_Compile_Aux.CompilationFuns
@@ -321,30 +321,30 @@
Type (@{type_name Lazy_Sequence.lazy_sequence}, [T])])) = T
| dest_pos_dseqT T = raise TYPE ("dest_pos_dseqT", [T], []);
-fun mk_empty T = Const (@{const_name New_DSequence.pos_empty}, mk_pos_dseqT T);
+fun mk_empty T = Const (@{const_name Limited_Sequence.pos_empty}, mk_pos_dseqT T);
fun mk_single t =
let
val T = fastype_of t
- in Const(@{const_name New_DSequence.pos_single}, T --> mk_pos_dseqT T) $ t end;
+ in Const(@{const_name Limited_Sequence.pos_single}, T --> mk_pos_dseqT T) $ t end;
fun mk_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_DSequence.pos_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Limited_Sequence.pos_bind}, fastype_of x --> T --> U) $ x $ f
end;
fun mk_decr_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_DSequence.pos_decr_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Limited_Sequence.pos_decr_bind}, fastype_of x --> T --> U) $ x $ f
end;
-val mk_plus = HOLogic.mk_binop @{const_name New_DSequence.pos_union};
+val mk_plus = HOLogic.mk_binop @{const_name Limited_Sequence.pos_union};
-fun mk_if cond = Const (@{const_name New_DSequence.pos_if_seq},
+fun mk_if cond = Const (@{const_name Limited_Sequence.pos_if_seq},
HOLogic.boolT --> mk_pos_dseqT HOLogic.unitT) $ cond;
fun mk_iterate_upto _ _ = raise Fail "No iterate_upto compilation"
@@ -355,9 +355,9 @@
val nT =
@{typ code_numeral} --> Type (@{type_name Lazy_Sequence.lazy_sequence},
[Type (@{type_name Option.option}, [@{typ unit}])])
- in Const (@{const_name New_DSequence.pos_not_seq}, nT --> pT) $ t end
+ in Const (@{const_name Limited_Sequence.pos_not_seq}, nT --> pT) $ t end
-fun mk_map T1 T2 tf tp = Const (@{const_name New_DSequence.pos_map},
+fun mk_map T1 T2 tf tp = Const (@{const_name Limited_Sequence.pos_map},
(T1 --> T2) --> mk_pos_dseqT T1 --> mk_pos_dseqT T2) $ tf $ tp
val depth_limited_compfuns = Predicate_Compile_Aux.CompilationFuns
@@ -382,30 +382,30 @@
Type (@{type_name Lazy_Sequence.lazy_sequence}, [Type (@{type_name Option.option}, [T])])])) = T
| dest_neg_dseqT T = raise TYPE ("dest_neg_dseqT", [T], []);
-fun mk_empty T = Const (@{const_name New_DSequence.neg_empty}, mk_neg_dseqT T);
+fun mk_empty T = Const (@{const_name Limited_Sequence.neg_empty}, mk_neg_dseqT T);
fun mk_single t =
let
val T = fastype_of t
- in Const(@{const_name New_DSequence.neg_single}, T --> mk_neg_dseqT T) $ t end;
+ in Const(@{const_name Limited_Sequence.neg_single}, T --> mk_neg_dseqT T) $ t end;
fun mk_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_DSequence.neg_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Limited_Sequence.neg_bind}, fastype_of x --> T --> U) $ x $ f
end;
fun mk_decr_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_DSequence.neg_decr_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Limited_Sequence.neg_decr_bind}, fastype_of x --> T --> U) $ x $ f
end;
-val mk_plus = HOLogic.mk_binop @{const_name New_DSequence.neg_union};
+val mk_plus = HOLogic.mk_binop @{const_name Limited_Sequence.neg_union};
-fun mk_if cond = Const (@{const_name New_DSequence.neg_if_seq},
+fun mk_if cond = Const (@{const_name Limited_Sequence.neg_if_seq},
HOLogic.boolT --> mk_neg_dseqT HOLogic.unitT) $ cond;
fun mk_iterate_upto _ _ = raise Fail "No iterate_upto compilation"
@@ -416,9 +416,9 @@
val pT =
@{typ code_numeral} --> Type (@{type_name Lazy_Sequence.lazy_sequence},
[@{typ unit}])
- in Const (@{const_name New_DSequence.neg_not_seq}, pT --> nT) $ t end
+ in Const (@{const_name Limited_Sequence.neg_not_seq}, pT --> nT) $ t end
-fun mk_map T1 T2 tf tp = Const (@{const_name New_DSequence.neg_map},
+fun mk_map T1 T2 tf tp = Const (@{const_name Limited_Sequence.neg_map},
(T1 --> T2) --> mk_neg_dseqT T1 --> mk_neg_dseqT T2) $ tf $ tp
val depth_limited_compfuns = Predicate_Compile_Aux.CompilationFuns
@@ -445,34 +445,34 @@
Type (@{type_name Lazy_Sequence.lazy_sequence}, [T])])])])])) = T
| dest_pos_random_dseqT T = raise TYPE ("dest_random_dseqT", [T], []);
-fun mk_empty T = Const (@{const_name New_Random_Sequence.pos_empty}, mk_pos_random_dseqT T);
+fun mk_empty T = Const (@{const_name Random_Sequence.pos_empty}, mk_pos_random_dseqT T);
fun mk_single t =
let
val T = fastype_of t
- in Const(@{const_name New_Random_Sequence.pos_single}, T --> mk_pos_random_dseqT T) $ t end;
+ in Const(@{const_name Random_Sequence.pos_single}, T --> mk_pos_random_dseqT T) $ t end;
fun mk_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_Random_Sequence.pos_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Random_Sequence.pos_bind}, fastype_of x --> T --> U) $ x $ f
end;
fun mk_decr_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_Random_Sequence.pos_decr_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Random_Sequence.pos_decr_bind}, fastype_of x --> T --> U) $ x $ f
end;
-val mk_plus = HOLogic.mk_binop @{const_name New_Random_Sequence.pos_union};
+val mk_plus = HOLogic.mk_binop @{const_name Random_Sequence.pos_union};
-fun mk_if cond = Const (@{const_name New_Random_Sequence.pos_if_random_dseq},
+fun mk_if cond = Const (@{const_name Random_Sequence.pos_if_random_dseq},
HOLogic.boolT --> mk_pos_random_dseqT HOLogic.unitT) $ cond;
fun mk_iterate_upto T (f, from, to) =
- list_comb (Const (@{const_name New_Random_Sequence.pos_iterate_upto},
+ list_comb (Const (@{const_name Random_Sequence.pos_iterate_upto},
[@{typ code_numeral} --> T, @{typ code_numeral}, @{typ code_numeral}]
---> mk_pos_random_dseqT T),
[f, from, to])
@@ -484,9 +484,9 @@
@{typ code_numeral} --> Type (@{type_name Lazy_Sequence.lazy_sequence},
[Type (@{type_name Option.option}, [@{typ unit}])])
- in Const (@{const_name New_Random_Sequence.pos_not_random_dseq}, nT --> pT) $ t end
+ in Const (@{const_name Random_Sequence.pos_not_random_dseq}, nT --> pT) $ t end
-fun mk_map T1 T2 tf tp = Const (@{const_name New_Random_Sequence.pos_map},
+fun mk_map T1 T2 tf tp = Const (@{const_name Random_Sequence.pos_map},
(T1 --> T2) --> mk_pos_random_dseqT T1 --> mk_pos_random_dseqT T2) $ tf $ tp
val depth_limited_compfuns = Predicate_Compile_Aux.CompilationFuns
@@ -514,34 +514,34 @@
[Type (@{type_name Option.option}, [T])])])])])])) = T
| dest_neg_random_dseqT T = raise TYPE ("dest_random_dseqT", [T], []);
-fun mk_empty T = Const (@{const_name New_Random_Sequence.neg_empty}, mk_neg_random_dseqT T);
+fun mk_empty T = Const (@{const_name Random_Sequence.neg_empty}, mk_neg_random_dseqT T);
fun mk_single t =
let
val T = fastype_of t
- in Const(@{const_name New_Random_Sequence.neg_single}, T --> mk_neg_random_dseqT T) $ t end;
+ in Const(@{const_name Random_Sequence.neg_single}, T --> mk_neg_random_dseqT T) $ t end;
fun mk_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_Random_Sequence.neg_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Random_Sequence.neg_bind}, fastype_of x --> T --> U) $ x $ f
end;
fun mk_decr_bind (x, f) =
let
val T as Type ("fun", [_, U]) = fastype_of f
in
- Const (@{const_name New_Random_Sequence.neg_decr_bind}, fastype_of x --> T --> U) $ x $ f
+ Const (@{const_name Random_Sequence.neg_decr_bind}, fastype_of x --> T --> U) $ x $ f
end;
-val mk_plus = HOLogic.mk_binop @{const_name New_Random_Sequence.neg_union};
+val mk_plus = HOLogic.mk_binop @{const_name Random_Sequence.neg_union};
-fun mk_if cond = Const (@{const_name New_Random_Sequence.neg_if_random_dseq},
+fun mk_if cond = Const (@{const_name Random_Sequence.neg_if_random_dseq},
HOLogic.boolT --> mk_neg_random_dseqT HOLogic.unitT) $ cond;
fun mk_iterate_upto T (f, from, to) =
- list_comb (Const (@{const_name New_Random_Sequence.neg_iterate_upto},
+ list_comb (Const (@{const_name Random_Sequence.neg_iterate_upto},
[@{typ code_numeral} --> T, @{typ code_numeral}, @{typ code_numeral}]
---> mk_neg_random_dseqT T),
[f, from, to])
@@ -551,9 +551,9 @@
val nT = mk_neg_random_dseqT HOLogic.unitT
val pT = @{typ code_numeral} --> @{typ code_numeral} --> @{typ Random.seed} -->
@{typ code_numeral} --> Type (@{type_name Lazy_Sequence.lazy_sequence}, [@{typ unit}])
- in Const (@{const_name New_Random_Sequence.neg_not_random_dseq}, pT --> nT) $ t end
+ in Const (@{const_name Random_Sequence.neg_not_random_dseq}, pT --> nT) $ t end
-fun mk_map T1 T2 tf tp = Const (@{const_name New_Random_Sequence.neg_map},
+fun mk_map T1 T2 tf tp = Const (@{const_name Random_Sequence.neg_map},
(T1 --> T2) --> mk_neg_random_dseqT T1 --> mk_neg_random_dseqT T2) $ tf $ tp
val depth_limited_compfuns = Predicate_Compile_Aux.CompilationFuns
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML Fri Feb 15 09:59:46 2013 +0100
@@ -26,8 +26,8 @@
val put_pred_result : (unit -> term Predicate.pred) -> Proof.context -> Proof.context
val put_pred_random_result : (unit -> seed -> term Predicate.pred * seed) ->
Proof.context -> Proof.context
- val put_dseq_result : (unit -> term DSequence.dseq) -> Proof.context -> Proof.context
- val put_dseq_random_result : (unit -> int -> int -> seed -> term DSequence.dseq * seed) ->
+ val put_dseq_result : (unit -> term Limited_Sequence.dseq) -> Proof.context -> Proof.context
+ val put_dseq_random_result : (unit -> int -> int -> seed -> term Limited_Sequence.dseq * seed) ->
Proof.context -> Proof.context
val put_new_dseq_result : (unit -> int -> term Lazy_Sequence.lazy_sequence) ->
Proof.context -> Proof.context
@@ -325,20 +325,20 @@
function_name_prefix = "random_",
compfuns = Predicate_Comp_Funs.compfuns,
mk_random = (fn T => fn additional_arguments =>
- list_comb (Const(@{const_name Quickcheck.iter},
+ list_comb (Const(@{const_name Random_Pred.iter},
[@{typ code_numeral}, @{typ code_numeral}, @{typ Random.seed}] --->
Predicate_Comp_Funs.mk_monadT T), additional_arguments)),
modify_funT = (fn T =>
let
val (Ts, U) = strip_type T
- val Ts' = [@{typ code_numeral}, @{typ code_numeral}, @{typ "code_numeral * code_numeral"}]
+ val Ts' = [@{typ code_numeral}, @{typ code_numeral}, @{typ Random.seed}]
in (Ts @ Ts') ---> U end),
additional_arguments = (fn names =>
let
val [nrandom, size, seed] = Name.variant_list names ["nrandom", "size", "seed"]
in
[Free (nrandom, @{typ code_numeral}), Free (size, @{typ code_numeral}),
- Free (seed, @{typ "code_numeral * code_numeral"})]
+ Free (seed, @{typ Random.seed})]
end),
wrap_compilation = K (K (K (K (K I))))
: (compilation_funs -> string -> typ -> mode -> term list -> term -> term),
@@ -351,21 +351,21 @@
function_name_prefix = "depth_limited_random_",
compfuns = Predicate_Comp_Funs.compfuns,
mk_random = (fn T => fn additional_arguments =>
- list_comb (Const(@{const_name Quickcheck.iter},
+ list_comb (Const(@{const_name Random_Pred.iter},
[@{typ code_numeral}, @{typ code_numeral}, @{typ Random.seed}] --->
Predicate_Comp_Funs.mk_monadT T), tl additional_arguments)),
modify_funT = (fn T =>
let
val (Ts, U) = strip_type T
val Ts' = [@{typ code_numeral}, @{typ code_numeral}, @{typ code_numeral},
- @{typ "code_numeral * code_numeral"}]
+ @{typ Random.seed}]
in (Ts @ Ts') ---> U end),
additional_arguments = (fn names =>
let
val [depth, nrandom, size, seed] = Name.variant_list names ["depth", "nrandom", "size", "seed"]
in
[Free (depth, @{typ code_numeral}), Free (nrandom, @{typ code_numeral}),
- Free (size, @{typ code_numeral}), Free (seed, @{typ "code_numeral * code_numeral"})]
+ Free (size, @{typ code_numeral}), Free (seed, @{typ Random.seed})]
end),
wrap_compilation =
fn compfuns => fn _ => fn T => fn mode => fn additional_arguments => fn compilation =>
@@ -423,11 +423,11 @@
compfuns = Random_Sequence_CompFuns.compfuns,
mk_random = (fn T => fn _ =>
let
- val random = Const ("Quickcheck.random_class.random",
+ val random = Const (@{const_name Quickcheck_Random.random},
@{typ code_numeral} --> @{typ Random.seed} -->
HOLogic.mk_prodT (HOLogic.mk_prodT (T, @{typ "unit => term"}), @{typ Random.seed}))
in
- Const ("Random_Sequence.Random", (@{typ code_numeral} --> @{typ Random.seed} -->
+ Const (@{const_name Random_Sequence.Random}, (@{typ code_numeral} --> @{typ Random.seed} -->
HOLogic.mk_prodT (HOLogic.mk_prodT (T, @{typ "unit => term"}), @{typ Random.seed})) -->
Random_Sequence_CompFuns.mk_random_dseqT T) $ random
end),
@@ -460,11 +460,11 @@
compfuns = New_Pos_Random_Sequence_CompFuns.depth_limited_compfuns,
mk_random = (fn T => fn _ =>
let
- val random = Const ("Quickcheck.random_class.random",
+ val random = Const (@{const_name Quickcheck_Random.random},
@{typ code_numeral} --> @{typ Random.seed} -->
HOLogic.mk_prodT (HOLogic.mk_prodT (T, @{typ "unit => term"}), @{typ Random.seed}))
in
- Const ("New_Random_Sequence.Random", (@{typ code_numeral} --> @{typ Random.seed} -->
+ Const (@{const_name Random_Sequence.pos_Random}, (@{typ code_numeral} --> @{typ Random.seed} -->
HOLogic.mk_prodT (HOLogic.mk_prodT (T, @{typ "unit => term"}), @{typ Random.seed})) -->
New_Pos_Random_Sequence_CompFuns.mk_pos_random_dseqT T) $ random
end),
@@ -1647,7 +1647,7 @@
structure Dseq_Result = Proof_Data
(
- type T = unit -> term DSequence.dseq
+ type T = unit -> term Limited_Sequence.dseq
(* FIXME avoid user error with non-user text *)
fun init _ () = error "Dseq_Result"
);
@@ -1655,7 +1655,7 @@
structure Dseq_Random_Result = Proof_Data
(
- type T = unit -> int -> int -> seed -> term DSequence.dseq * seed
+ type T = unit -> int -> int -> seed -> term Limited_Sequence.dseq * seed
(* FIXME avoid user error with non-user text *)
fun init _ () = error "Dseq_Random_Result"
);
@@ -1843,24 +1843,24 @@
let
val [nrandom, size, depth] = arguments
in
- rpair NONE (TimeLimit.timeLimit time_limit (fn () => fst (DSequence.yieldn k
+ rpair NONE (TimeLimit.timeLimit time_limit (fn () => fst (Limited_Sequence.yieldn k
(Code_Runtime.dynamic_value_strict (Dseq_Random_Result.get, put_dseq_random_result, "Predicate_Compile_Core.put_dseq_random_result")
- thy NONE (fn proc => fn g => fn nrandom => fn size => fn s => g nrandom size s |>> DSequence.map proc)
+ thy NONE (fn proc => fn g => fn nrandom => fn size => fn s => g nrandom size s |>> Limited_Sequence.map proc)
t' [] nrandom size
|> Random_Engine.run)
depth true)) ())
end
| DSeq =>
- rpair NONE (TimeLimit.timeLimit time_limit (fn () => fst (DSequence.yieldn k
+ rpair NONE (TimeLimit.timeLimit time_limit (fn () => fst (Limited_Sequence.yieldn k
(Code_Runtime.dynamic_value_strict (Dseq_Result.get, put_dseq_result, "Predicate_Compile_Core.put_dseq_result")
- thy NONE DSequence.map t' []) (the_single arguments) true)) ())
+ thy NONE Limited_Sequence.map t' []) (the_single arguments) true)) ())
| Pos_Generator_DSeq =>
let
val depth = (the_single arguments)
in
rpair NONE (TimeLimit.timeLimit time_limit (fn () => fst (Lazy_Sequence.yieldn k
(Code_Runtime.dynamic_value_strict (New_Dseq_Result.get, put_new_dseq_result, "Predicate_Compile_Core.put_new_dseq_result")
- thy NONE (fn proc => fn g => fn depth => g depth |> Lazy_Sequence.mapa proc)
+ thy NONE (fn proc => fn g => fn depth => g depth |> Lazy_Sequence.map proc)
t' [] depth))) ())
end
| New_Pos_Random_DSeq =>
@@ -1875,7 +1875,7 @@
(Lseq_Random_Stats_Result.get, put_lseq_random_stats_result, "Predicate_Compile_Core.put_lseq_random_stats_result")
thy NONE
(fn proc => fn g => fn nrandom => fn size => fn s => fn depth => g nrandom size s depth
- |> Lazy_Sequence.mapa (apfst proc))
+ |> Lazy_Sequence.map (apfst proc))
t' [] nrandom size seed depth))) ()))
else rpair NONE
(TimeLimit.timeLimit time_limit (fn () => fst (Lazy_Sequence.yieldn k
@@ -1883,7 +1883,7 @@
(Lseq_Random_Result.get, put_lseq_random_result, "Predicate_Compile_Core.put_lseq_random_result")
thy NONE
(fn proc => fn g => fn nrandom => fn size => fn s => fn depth => g nrandom size s depth
- |> Lazy_Sequence.mapa proc)
+ |> Lazy_Sequence.map proc)
t' [] nrandom size seed depth))) ())
end
| _ =>
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_quickcheck.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_quickcheck.ML Fri Feb 15 09:59:46 2013 +0100
@@ -12,7 +12,7 @@
(unit -> int -> int -> int -> seed -> term list Predicate.pred) ->
Proof.context -> Proof.context;
val put_dseq_result :
- (unit -> int -> int -> seed -> term list DSequence.dseq * seed) ->
+ (unit -> int -> int -> seed -> term list Limited_Sequence.dseq * seed) ->
Proof.context -> Proof.context;
val put_lseq_result :
(unit -> int -> int -> seed -> int -> term list Lazy_Sequence.lazy_sequence) ->
@@ -49,7 +49,7 @@
structure Dseq_Result = Proof_Data
(
- type T = unit -> int -> int -> seed -> term list DSequence.dseq * seed
+ type T = unit -> int -> int -> seed -> term list Limited_Sequence.dseq * seed
(* FIXME avoid user error with non-user text *)
fun init _ () = error "Dseq_Result"
);
@@ -297,11 +297,11 @@
val compiled_term =
Code_Runtime.dynamic_value_strict (Dseq_Result.get, put_dseq_result, "Predicate_Compile_Quickcheck.put_dseq_result")
thy4 (SOME target)
- (fn proc => fn g => fn n => fn size => fn s => g n size s |>> (DSequence.map o map) proc)
+ (fn proc => fn g => fn n => fn size => fn s => g n size s |>> (Limited_Sequence.map o map) proc)
qc_term []
in
(fn size => fn nrandom => fn depth =>
- Option.map fst (DSequence.yield
+ Option.map fst (Limited_Sequence.yield
(compiled_term nrandom size |> Random_Engine.run) depth true))
end
| New_Pos_Random_DSeq =>
@@ -311,7 +311,7 @@
(Lseq_Result.get, put_lseq_result, "Predicate_Compile_Quickcheck.put_lseq_result")
thy4 (SOME target)
(fn proc => fn g => fn nrandom => fn size => fn s => fn depth =>
- g nrandom size s depth |> (Lazy_Sequence.mapa o map) proc)
+ g nrandom size s depth |> (Lazy_Sequence.map o map) proc)
qc_term []
in
fn size => fn nrandom => fn depth => Option.map fst (Lazy_Sequence.yield
@@ -326,7 +326,7 @@
Code_Runtime.dynamic_value_strict
(New_Dseq_Result.get, put_new_dseq_result, "Predicate_Compile_Quickcheck.put_new_dseq_result")
thy4 (SOME target)
- (fn proc => fn g => fn depth => g depth |> (Lazy_Sequence.mapa o map) proc)
+ (fn proc => fn g => fn depth => g depth |> (Lazy_Sequence.map o map) proc)
qc_term []
in
fn size => fn nrandom => fn depth => Option.map fst (Lazy_Sequence.yield (compiled_term depth))
--- a/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Quickcheck/exhaustive_generators.ML Fri Feb 15 09:59:46 2013 +0100
@@ -243,7 +243,7 @@
val (T1, T2) = (fastype_of x, fastype_of (e genuine))
val if_t = Const (@{const_name "If"}, @{typ bool} --> T2 --> T2 --> T2)
in
- Const (@{const_name "Quickcheck.catch_match"}, T2 --> T2 --> T2) $
+ Const (@{const_name "Quickcheck_Random.catch_match"}, T2 --> T2 --> T2) $
(Const (@{const_name Let}, T1 --> (T1 --> T2) --> T2) $ t $ lambda x (e genuine)) $
(if_t $ genuine_only $ none $ safe false)
end
@@ -337,9 +337,9 @@
val mk_test_term = mk_test_term lookup mk_exhaustive_closure mk_safe_if mk_let none_t return ctxt
in lambda depth (@{term "catch_Counterexample :: unit => term list option"} $ mk_test_term t) end
-fun mk_unknown_term T = HOLogic.reflect_term (Const ("Quickcheck_Exhaustive.unknown", T))
+fun mk_unknown_term T = HOLogic.reflect_term (Const (@{const_name Quickcheck_Exhaustive.unknown}, T))
-fun mk_safe_term t = @{term "Quickcheck.catch_match :: term => term => term"}
+fun mk_safe_term t = @{term "Quickcheck_Random.catch_match :: term => term => term"}
$ (HOLogic.mk_term_of (fastype_of t) t) $ mk_unknown_term (fastype_of t)
fun mk_return t genuine = @{term "Some :: bool * term list => (bool * term list) option"} $
--- a/src/HOL/Tools/Quickcheck/quickcheck_common.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Quickcheck/quickcheck_common.ML Fri Feb 15 09:59:46 2013 +0100
@@ -309,7 +309,7 @@
val T = fastype_of then_t
val if_t = Const (@{const_name "If"}, @{typ bool} --> T --> T --> T)
in
- Const (@{const_name "Quickcheck.catch_match"}, T --> T --> T) $
+ Const (@{const_name "Quickcheck_Random.catch_match"}, T --> T --> T) $
(if_t $ cond $ then_t $ else_t genuine) $
(if_t $ genuine_only $ none $ else_t false)
end
--- a/src/HOL/Tools/Quickcheck/random_generators.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Quickcheck/random_generators.ML Fri Feb 15 09:59:46 2013 +0100
@@ -222,7 +222,7 @@
tc @{typ Random.seed} (SOME T, @{typ Random.seed});
val tk = if is_rec
then if k = 0 then size
- else @{term "Quickcheck.beyond :: code_numeral \<Rightarrow> code_numeral \<Rightarrow> code_numeral"}
+ else @{term "Quickcheck_Random.beyond :: code_numeral \<Rightarrow> code_numeral \<Rightarrow> code_numeral"}
$ HOLogic.mk_number @{typ code_numeral} k $ size
else @{term "1::code_numeral"}
in (is_rec, HOLogic.mk_prod (tk, t)) end;
@@ -233,7 +233,7 @@
|> (map o apfst) Type
|> map (fn (T, cs) => (T, (sort_rec o map (mk_consexpr T)) cs));
fun mk_select (rT, xs) =
- mk_const @{const_name Quickcheck.collapse} [@{typ "Random.seed"}, termifyT rT]
+ mk_const @{const_name Quickcheck_Random.collapse} [@{typ Random.seed}, termifyT rT]
$ (mk_const @{const_name Random.select_weight} [random_resultT rT]
$ HOLogic.mk_list (HOLogic.mk_prodT (@{typ code_numeral}, random_resultT rT)) xs)
$ seed;
@@ -315,7 +315,7 @@
mk_scomp (mk_termtyp T) resultT @{typ Random.seed} t
(mk_split T $ Abs ("", T, Abs ("", @{typ "unit => term"}, t')));
fun mk_bindclause (_, _, i, T) = mk_scomp_split T
- (Sign.mk_const thy (@{const_name Quickcheck.random}, [T]) $ Bound i);
+ (Sign.mk_const thy (@{const_name Quickcheck_Random.random}, [T]) $ Bound i);
in
lambda genuine_only
(Abs ("n", @{typ code_numeral}, fold_rev mk_bindclause bounds (return $ check true)))
@@ -333,7 +333,7 @@
val prop' = betapplys (prop, map (fn (i, _, _, _) => Bound i) bounds);
val terms = HOLogic.mk_list @{typ term} (map (fn (_, i, _, _) => Bound i $ @{term "()"}) bounds)
val (assms, concl) = Quickcheck_Common.strip_imp prop'
- val return = HOLogic.pair_const resultT @{typ "Random.seed"};
+ val return = HOLogic.pair_const resultT @{typ Random.seed};
fun mk_assms_report i =
HOLogic.mk_prod (@{term "None :: (bool * term list) option"},
HOLogic.mk_prod (HOLogic.mk_list HOLogic.boolT
@@ -361,7 +361,7 @@
mk_scomp (mk_termtyp T) resultT @{typ Random.seed} t
(mk_split T $ Abs ("", T, Abs ("", @{typ "unit => term"}, t')));
fun mk_bindclause (_, _, i, T) = mk_scomp_split T
- (Sign.mk_const thy (@{const_name Quickcheck.random}, [T]) $ Bound i);
+ (Sign.mk_const thy (@{const_name Quickcheck_Random.random}, [T]) $ Bound i);
in
lambda genuine_only
(Abs ("n", @{typ code_numeral}, fold_rev mk_bindclause bounds (return $ check true)))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_compress.ML Fri Feb 15 09:59:46 2013 +0100
@@ -0,0 +1,252 @@
+(* Title: HOL/Tools/Sledgehammer/sledgehammer_compress.ML
+ Author: Jasmin Blanchette, TU Muenchen
+ Author: Steffen Juilf Smolka, TU Muenchen
+
+Compression of reconstructed isar proofs.
+*)
+
+signature SLEDGEHAMMER_COMPRESS =
+sig
+ type isar_step = Sledgehammer_Proof.isar_step
+ type preplay_time = Sledgehammer_Preplay.preplay_time
+ val compress_proof :
+ bool -> Proof.context -> string -> string -> bool -> Time.time option
+ -> real -> isar_step list -> isar_step list * (bool * preplay_time)
+end
+
+structure Sledgehammer_Compress : SLEDGEHAMMER_COMPRESS =
+struct
+
+open Sledgehammer_Util
+open Sledgehammer_Proof
+open Sledgehammer_Preplay
+
+(* Parameters *)
+val merge_timeout_slack = 1.2
+
+(* Data structures, orders *)
+val label_ord = prod_ord int_ord fast_string_ord o pairself swap
+structure Label_Table = Table(
+ type key = label
+ val ord = label_ord)
+
+(* clean vector interface *)
+fun get i v = Vector.sub (v, i)
+fun replace x i v = Vector.update (v, i, x)
+fun update f i v = replace (get i v |> f) i v
+fun v_map_index f v = Vector.foldr (op::) nil v |> map_index f |> Vector.fromList
+fun v_fold_index f v s =
+ Vector.foldl (fn (x, (i, s)) => (i+1, f (i, x) s)) (0, s) v |> snd
+
+(* Queue interface to table *)
+fun pop tab key =
+ let val v = hd (Inttab.lookup_list tab key) in
+ (v, Inttab.remove_list (op =) (key, v) tab)
+ end
+fun pop_max tab = pop tab (the (Inttab.max_key tab))
+fun add_list tab xs = fold (Inttab.insert_list (op =)) xs tab
+
+(* Main function for compresing proofs *)
+fun compress_proof debug ctxt type_enc lam_trans preplay preplay_timeout
+ isar_compress proof =
+ let
+ (* 60 seconds seems like a good interpreation of "no timeout" *)
+ val preplay_timeout = preplay_timeout |> the_default (seconds 60.0)
+
+ (* handle metis preplay fail *)
+ local
+ open Unsynchronized
+ val metis_fail = ref false
+ in
+ fun handle_metis_fail try_metis () =
+ try_metis () handle exn =>
+ (if Exn.is_interrupt exn orelse debug then reraise exn
+ else metis_fail := true; some_preplay_time)
+ fun get_time lazy_time =
+ if !metis_fail andalso not (Lazy.is_finished lazy_time)
+ then some_preplay_time
+ else Lazy.force lazy_time
+ val metis_fail = fn () => !metis_fail
+ end
+
+ (* compress proof on top level - do not compress case splits *)
+ fun compress_top_level on_top_level ctxt proof =
+ let
+ (* proof vector *)
+ val proof_vect = proof |> map SOME |> Vector.fromList
+ val n = Vector.length proof_vect
+ val n_metis = metis_steps_top_level proof
+ val target_n_metis = Real.fromInt n_metis / isar_compress |> Real.round
+
+ (* table for mapping from (top-level-)label to proof position *)
+ fun update_table (i, Assume (l, _)) = Label_Table.update_new (l, i)
+ | update_table (i, Obtain (_, _, l, _, _)) = Label_Table.update_new (l, i)
+ | update_table (i, Prove (_, l, _, _)) = Label_Table.update_new (l, i)
+ | update_table _ = I
+ val label_index_table = fold_index update_table proof Label_Table.empty
+ val lookup_indices = map_filter (Label_Table.lookup label_index_table)
+
+ (* proof references *)
+ fun refs (Obtain (_, _, _, _, By_Metis (lfs, _))) = lookup_indices lfs
+ | refs (Prove (_, _, _, By_Metis (lfs, _))) = lookup_indices lfs
+ | refs (Prove (_, _, _, Case_Split (cases, (lfs, _)))) =
+ lookup_indices lfs @ maps (maps refs) cases
+ | refs (Prove (_, _, _, Subblock proof)) = maps refs proof
+ | refs _ = []
+ val refed_by_vect =
+ Vector.tabulate (n, (fn _ => []))
+ |> fold_index (fn (i, step) => fold (update (cons i)) (refs step)) proof
+ |> Vector.map rev (* after rev, indices are sorted in ascending order *)
+
+ (* candidates for elimination, use table as priority queue (greedy
+ algorithm) *)
+ fun add_if_cand proof_vect (i, [j]) =
+ (case (the (get i proof_vect), the (get j proof_vect)) of
+ (Prove (_, _, t, By_Metis _), Prove (_, _, _, By_Metis _)) =>
+ cons (Term.size_of_term t, i)
+ | (Prove (_, _, t, By_Metis _), Obtain (_, _, _, _, By_Metis _)) =>
+ cons (Term.size_of_term t, i)
+ | _ => I)
+ | add_if_cand _ _ = I
+ val cand_tab =
+ v_fold_index (add_if_cand proof_vect) refed_by_vect []
+ |> Inttab.make_list
+
+ (* cache metis preplay times in lazy time vector *)
+ val metis_time =
+ v_map_index
+ (if not preplay then K (zero_preplay_time) #> Lazy.value
+ else
+ apsnd the (* step *)
+ #> apfst (fn i => try (get (i-1) #> the) proof_vect) (* succedent *)
+ #> try_metis debug type_enc lam_trans ctxt preplay_timeout
+ #> handle_metis_fail
+ #> Lazy.lazy)
+ proof_vect
+
+ fun sum_up_time lazy_time_vector =
+ Vector.foldl
+ (apfst get_time #> uncurry add_preplay_time)
+ zero_preplay_time lazy_time_vector
+
+ (* Merging *)
+ fun merge (Prove (_, label1, _, By_Metis (lfs1, gfs1))) step2 =
+ let
+ val (step_constructor, lfs2, gfs2) =
+ (case step2 of
+ (Prove (qs2, label2, t, By_Metis (lfs2, gfs2))) =>
+ (fn by => Prove (qs2, label2, t, by), lfs2, gfs2)
+ | (Obtain (qs2, xs, label2, t, By_Metis (lfs2, gfs2))) =>
+ (fn by => Obtain (qs2, xs, label2, t, by), lfs2, gfs2)
+ | _ => error "sledgehammer_compress: unmergeable Isar steps" )
+ val lfs = remove (op =) label1 lfs2 |> union (op =) lfs1
+ val gfs = union (op =) gfs1 gfs2
+ in step_constructor (By_Metis (lfs, gfs)) end
+ | merge _ _ = error "sledgehammer_compress: unmergeable Isar steps"
+
+ fun try_merge metis_time (s1, i) (s2, j) =
+ if not preplay then (merge s1 s2 |> SOME, metis_time)
+ else
+ (case get i metis_time |> Lazy.force of
+ (true, _) => (NONE, metis_time)
+ | (_, t1) =>
+ (case get j metis_time |> Lazy.force of
+ (true, _) => (NONE, metis_time)
+ | (_, t2) =>
+ let
+ val s12 = merge s1 s2
+ val timeout = time_mult merge_timeout_slack (Time.+(t1, t2))
+ in
+ case try_metis_quietly debug type_enc lam_trans ctxt timeout
+ (NONE, s12) () of
+ (true, _) => (NONE, metis_time)
+ | exact_time =>
+ (SOME s12, metis_time
+ |> replace (zero_preplay_time |> Lazy.value) i
+ |> replace (Lazy.value exact_time) j)
+
+ end))
+
+ fun merge_steps metis_time proof_vect refed_by cand_tab n' n_metis' =
+ if Inttab.is_empty cand_tab
+ orelse n_metis' <= target_n_metis
+ orelse (on_top_level andalso n'<3)
+ then
+ (Vector.foldr
+ (fn (NONE, proof) => proof | (SOME s, proof) => s :: proof)
+ [] proof_vect,
+ sum_up_time metis_time)
+ else
+ let
+ val (i, cand_tab) = pop_max cand_tab
+ val j = get i refed_by |> the_single
+ val s1 = get i proof_vect |> the
+ val s2 = get j proof_vect |> the
+ in
+ case try_merge metis_time (s1, i) (s2, j) of
+ (NONE, metis_time) =>
+ merge_steps metis_time proof_vect refed_by cand_tab n' n_metis'
+ | (s, metis_time) =>
+ let
+ val refs = refs s1
+ val refed_by = refed_by |> fold
+ (update (Ord_List.remove int_ord i #> Ord_List.insert int_ord j)) refs
+ val new_candidates =
+ fold (add_if_cand proof_vect)
+ (map (fn i => (i, get i refed_by)) refs) []
+ val cand_tab = add_list cand_tab new_candidates
+ val proof_vect = proof_vect |> replace NONE i |> replace s j
+ in
+ merge_steps metis_time proof_vect refed_by cand_tab (n' - 1)
+ (n_metis' - 1)
+ end
+ end
+ in
+ merge_steps metis_time proof_vect refed_by_vect cand_tab n n_metis
+ end
+
+ fun do_proof on_top_level ctxt proof =
+ let
+ (* Enrich context with top-level facts *)
+ val thy = Proof_Context.theory_of ctxt
+ (* TODO: add Skolem variables to context? *)
+ fun enrich_with_fact l t =
+ Proof_Context.put_thms false
+ (string_for_label l, SOME [Skip_Proof.make_thm thy t])
+ fun enrich_with_step (Assume (l, t)) = enrich_with_fact l t
+ | enrich_with_step (Obtain (_, _, l, t, _)) = enrich_with_fact l t
+ | enrich_with_step (Prove (_, l, t, _)) = enrich_with_fact l t
+ | enrich_with_step _ = I
+ val rich_ctxt = fold enrich_with_step proof ctxt
+
+ (* compress subproofs (case_splits and subblocks) and top-levl *)
+ val ((proof, top_level_time), lower_level_time) =
+ proof |> do_subproof rich_ctxt
+ |>> compress_top_level on_top_level rich_ctxt
+ in
+ (proof, add_preplay_time lower_level_time top_level_time)
+ end
+
+ and do_subproof ctxt proof =
+ let
+ fun compress_each_and_collect_time compress candidates =
+ let fun f_m cand time = compress cand ||> add_preplay_time time
+ in fold_map f_m candidates zero_preplay_time end
+ val compress_subproof =
+ compress_each_and_collect_time (do_proof false ctxt)
+ fun compress (Prove (qs, l, t, Case_Split (cases, facts))) =
+ let val (cases, time) = compress_subproof cases
+ in (Prove (qs, l, t, Case_Split (cases, facts)), time) end
+ | compress (Prove (qs, l, t, Subblock proof)) =
+ let val ([proof], time) = compress_subproof [proof]
+ in (Prove (qs, l, t, Subblock proof), time) end
+ | compress step = (step, zero_preplay_time)
+ in
+ compress_each_and_collect_time compress proof
+ end
+ in
+ do_proof true ctxt proof
+ |> apsnd (pair (metis_fail ()))
+ end
+
+end
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Fri Feb 15 09:59:46 2013 +0100
@@ -193,9 +193,9 @@
(* FIXME: Ad hoc list *)
val technical_prefixes =
- ["ATP", "Code_Evaluation", "Datatype", "DSequence", "Enum", "Lazy_Sequence",
- "Meson", "Metis", "Nitpick", "New_DSequence", "New_Random_Sequence",
- "Quickcheck", "Quickcheck_Exhaustive", "Quickcheck_Narrowing",
+ ["ATP", "Code_Evaluation", "Datatype", "Enum", "Lazy_Sequence",
+ "Limited_Sequence", "Meson", "Metis", "Nitpick",
+ "Quickcheck_Random", "Quickcheck_Exhaustive", "Quickcheck_Narrowing",
"Random_Sequence", "Sledgehammer", "SMT"]
|> map (suffix Long_Name.separator)
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Fri Feb 15 09:59:46 2013 +0100
@@ -95,14 +95,13 @@
("max_mono_iters", "smart"),
("max_new_mono_instances", "smart"),
("isar_proofs", "false"),
- ("isar_shrink", "10"),
+ ("isar_compress", "10"),
("slice", "true"),
("minimize", "smart"),
("preplay_timeout", "3")]
val alias_params =
- [("prover", ("provers", [])), (* legacy *)
- ("max_relevant", ("max_facts", [])), (* legacy *)
+ [("prover", ("provers", [])), (* undocumented *)
("dont_preplay", ("preplay_timeout", ["0"]))]
val negated_alias_params =
[("no_debug", "debug"),
@@ -119,7 +118,7 @@
val params_for_minimize =
["debug", "verbose", "overlord", "type_enc", "strict", "lam_trans",
"uncurried_aliases", "max_mono_iters", "max_new_mono_instances",
- "learn", "isar_proofs", "isar_shrink", "timeout", "preplay_timeout"]
+ "learn", "isar_proofs", "isar_compress", "timeout", "preplay_timeout"]
val property_dependent_params = ["provers", "timeout"]
@@ -314,7 +313,7 @@
val max_new_mono_instances =
lookup_option lookup_int "max_new_mono_instances"
val isar_proofs = lookup_bool "isar_proofs"
- val isar_shrink = Real.max (1.0, lookup_real "isar_shrink")
+ val isar_compress = Real.max (1.0, lookup_real "isar_compress")
val slice = mode <> Auto_Try andalso lookup_bool "slice"
val minimize =
if mode = Auto_Try then NONE else lookup_option lookup_bool "minimize"
@@ -330,7 +329,7 @@
learn = learn, fact_filter = fact_filter, max_facts = max_facts,
fact_thresholds = fact_thresholds, max_mono_iters = max_mono_iters,
max_new_mono_instances = max_new_mono_instances, isar_proofs = isar_proofs,
- isar_shrink = isar_shrink, slice = slice, minimize = minimize,
+ isar_compress = isar_compress, slice = slice, minimize = minimize,
timeout = timeout, preplay_timeout = preplay_timeout, expect = expect}
end
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML Fri Feb 15 09:59:46 2013 +0100
@@ -50,12 +50,12 @@
val mash_unlearn : Proof.context -> unit
val nickname_of_thm : thm -> string
- val find_suggested_facts : ('b * thm) list -> string list -> ('b * thm) list
+ val find_suggested_facts :
+ Proof.context -> ('b * thm) list -> string list -> ('b * thm) list
val mesh_facts :
('a * 'a -> bool) -> int -> (real * (('a * real) list * 'a list)) list
-> 'a list
- val theory_ord : theory * theory -> order
- val thm_ord : thm * thm -> order
+ val crude_thm_ord : thm * thm -> order
val goal_of_thm : theory -> thm -> thm
val run_prover_for_mash :
Proof.context -> params -> string -> fact list -> thm -> prover_result
@@ -72,8 +72,8 @@
val weight_mepo_facts : 'a list -> ('a * real) list
val weight_mash_facts : 'a list -> ('a * real) list
val find_mash_suggestions :
- int -> string list -> ('b * thm) list -> ('b * thm) list -> ('b * thm) list
- -> ('b * thm) list * ('b * thm) list
+ Proof.context -> int -> string list -> ('b * thm) list -> ('b * thm) list
+ -> ('b * thm) list -> ('b * thm) list * ('b * thm) list
val mash_suggested_facts :
Proof.context -> params -> string -> int -> term list -> term
-> raw_fact list -> fact list * fact list
@@ -450,11 +450,15 @@
else
elided_backquote_thm 200 th
-fun find_suggested_facts facts =
+fun find_suggested_facts ctxt facts =
let
- fun add_fact (fact as (_, th)) = Symtab.default (nickname_of_thm th, fact)
- val tab = fold add_fact facts Symtab.empty
- in map_filter (Symtab.lookup tab) end
+ fun add (fact as (_, th)) = Symtab.default (nickname_of_thm th, fact)
+ val tab = fold add facts Symtab.empty
+ fun lookup nick =
+ Symtab.lookup tab nick
+ |> tap (fn NONE => trace_msg ctxt (fn () => "Cannot find " ^ quote nick)
+ | _ => ())
+ in map_filter lookup end
fun scaled_avg [] = 0
| scaled_avg xs =
@@ -505,7 +509,11 @@
val lams_feature = ("lams", 2.0 (* FUDGE *))
val skos_feature = ("skos", 2.0 (* FUDGE *))
-fun theory_ord p =
+(* The following "crude" functions should be progressively phased out, since
+ they create visibility edges that do not exist in Isabelle, resulting in
+ failed lookups later on. *)
+
+fun crude_theory_ord p =
if Theory.subthy p then
if Theory.eq_thy p then EQUAL else LESS
else if Theory.subthy (swap p) then
@@ -514,8 +522,8 @@
EQUAL => string_ord (pairself Context.theory_name p)
| order => order
-fun thm_ord p =
- case theory_ord (pairself theory_of_thm p) of
+fun crude_thm_ord p =
+ case crude_theory_ord (pairself theory_of_thm p) of
EQUAL =>
let val q = pairself nickname_of_thm p in
(* Hack to put "xxx_def" before "xxxI" and "xxxE" *)
@@ -525,6 +533,9 @@
end
| ord => ord
+val thm_less_eq = Theory.subthy o pairself theory_of_thm
+fun thm_less p = thm_less_eq p andalso not (thm_less_eq (swap p))
+
val freezeT = Type.legacy_freeze_type
fun freeze (t $ u) = freeze t $ freeze u
@@ -636,7 +647,7 @@
isn't much to learn from such proofs. *)
val max_dependencies = 20
-val prover_dependency_default_max_facts = 50
+val prover_default_max_facts = 50
(* "type_definition_xxx" facts are characterized by their use of "CollectI". *)
val typedef_dep = nickname_of_thm @{thm CollectI}
@@ -692,7 +703,7 @@
val thy = Proof_Context.theory_of ctxt
val goal = goal_of_thm thy th
val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal ctxt goal 1
- val facts = facts |> filter (fn (_, th') => thm_ord (th', th) = LESS)
+ val facts = facts |> filter (fn (_, th') => thm_less (th', th))
fun nickify ((_, stature), th) = ((nickname_of_thm th, stature), th)
fun is_dep dep (_, th) = nickname_of_thm th = dep
fun add_isar_dep facts dep accum =
@@ -704,8 +715,8 @@
val facts =
facts
|> mepo_suggested_facts ctxt params prover
- (max_facts |> the_default prover_dependency_default_max_facts)
- NONE hyp_ts concl_t
+ (max_facts |> the_default prover_default_max_facts) NONE hyp_ts
+ concl_t
|> fold (add_isar_dep facts) isar_deps
|> map nickify
in
@@ -789,14 +800,14 @@
val max_proximity_facts = 100
-fun find_mash_suggestions _ [] _ _ raw_unknown = ([], raw_unknown)
- | find_mash_suggestions max_facts suggs facts chained raw_unknown =
+fun find_mash_suggestions _ _ [] _ _ raw_unknown = ([], raw_unknown)
+ | find_mash_suggestions ctxt max_facts suggs facts chained raw_unknown =
let
- val raw_mash = find_suggested_facts facts suggs
+ val raw_mash = find_suggested_facts ctxt facts suggs
val unknown_chained =
inter (Thm.eq_thm_prop o pairself snd) chained raw_unknown
val proximity =
- facts |> sort (thm_ord o pairself snd o swap)
+ facts |> sort (crude_thm_ord o pairself snd o swap)
|> take max_proximity_facts
val mess =
[(0.90 (* FUDGE *), (map (rpair 1.0) unknown_chained, [])),
@@ -831,7 +842,7 @@
end)
val unknown = facts |> filter_out (is_fact_in_graph access_G snd)
in
- find_mash_suggestions max_facts suggs facts chained unknown
+ find_mash_suggestions ctxt max_facts suggs facts chained unknown
|> pairself (map fact_of_raw_fact)
end
@@ -899,7 +910,7 @@
fun next_commit_time () =
Time.+ (Timer.checkRealTimer timer, commit_timeout)
val {access_G, ...} = peek_state ctxt I
- val facts = facts |> sort (thm_ord o pairself snd)
+ val facts = facts |> sort (crude_thm_ord o pairself snd)
val (old_facts, new_facts) =
facts |> List.partition (is_fact_in_graph access_G snd)
in
@@ -991,7 +1002,7 @@
(* crude approximation *)
val ancestors =
old_facts
- |> filter (fn (_, th) => thm_ord (th, last_th) <> GREATER)
+ |> filter (fn (_, th) => crude_thm_ord (th, last_th) <> GREATER)
val parents = maximal_in_graph access_G ancestors
val (learns, (_, n, _, _)) =
([], (parents, 0, next_commit_time (), false))
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_minimize.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_minimize.ML Fri Feb 15 09:59:46 2013 +0100
@@ -54,7 +54,7 @@
fun test_facts ({debug, verbose, overlord, provers, max_mono_iters,
max_new_mono_instances, type_enc, strict, lam_trans,
- uncurried_aliases, isar_proofs, isar_shrink,
+ uncurried_aliases, isar_proofs, isar_compress,
preplay_timeout, ...} : params)
silent (prover : prover) timeout i n state facts =
let
@@ -76,7 +76,7 @@
learn = false, fact_filter = NONE, max_facts = SOME (length facts),
fact_thresholds = (1.01, 1.01), max_mono_iters = max_mono_iters,
max_new_mono_instances = max_new_mono_instances,
- isar_proofs = isar_proofs, isar_shrink = isar_shrink,
+ isar_proofs = isar_proofs, isar_compress = isar_compress,
slice = false, minimize = SOME false, timeout = timeout,
preplay_timeout = preplay_timeout, expect = ""}
val problem =
@@ -243,7 +243,7 @@
({debug, verbose, overlord, blocking, provers, type_enc, strict,
lam_trans, uncurried_aliases, learn, fact_filter, max_facts,
fact_thresholds, max_mono_iters, max_new_mono_instances, isar_proofs,
- isar_shrink, slice, minimize, timeout, preplay_timeout, expect}
+ isar_compress, slice, minimize, timeout, preplay_timeout, expect}
: params) =
let
fun lookup_override name default_value =
@@ -261,7 +261,7 @@
learn = learn, fact_filter = fact_filter, max_facts = max_facts,
fact_thresholds = fact_thresholds, max_mono_iters = max_mono_iters,
max_new_mono_instances = max_new_mono_instances, isar_proofs = isar_proofs,
- isar_shrink = isar_shrink, slice = slice, minimize = minimize,
+ isar_compress = isar_compress, slice = slice, minimize = minimize,
timeout = timeout, preplay_timeout = preplay_timeout, expect = expect}
end
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_preplay.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_preplay.ML Fri Feb 15 09:59:46 2013 +0100
@@ -1,4 +1,4 @@
-(* Title: HOL/Tools/Sledgehammer/sledgehammer_shrink.ML
+(* Title: HOL/Tools/Sledgehammer/sledgehammer_preplay.ML
Author: Jasmin Blanchette, TU Muenchen
Author: Steffen Juilf Smolka, TU Muenchen
@@ -26,11 +26,13 @@
open Sledgehammer_Proof
(* The boolean flag encodes whether the time is exact (false) or an lower bound
- (true) *)
+ (true):
+ (t, false) = "t ms"
+ (t, true) = "> t ms" *)
type preplay_time = bool * Time.time
-val zero_preplay_time = (false, Time.zeroTime)
-val some_preplay_time = (true, Time.zeroTime)
+val zero_preplay_time = (false, Time.zeroTime) (* 0 ms *)
+val some_preplay_time = (true, Time.zeroTime) (* > 0 ms *)
fun add_preplay_time (b1, t1) (b2, t2) = (b1 orelse b2, Time.+(t1,t2))
@@ -92,7 +94,25 @@
:: map (hd #> (fn Assume (_, a) => Logic.mk_implies (a, t)
| _ => error "preplay error: malformed case split")
#> make_thm)
- cases)
+ cases
+ | Subblock proof =>
+ let
+ val (steps, last_step) = split_last proof
+ (* TODO: assuming Fix may only occur at the beginning of a subblock,
+ this can be optimized *)
+ val fixed_frees =
+ fold (fn Fix xs => append (map Free xs) | _ => I) steps []
+ (* TODO: make sure the var indexnames are actually fresh *)
+ fun var_of_free (Free (x, T)) = Var((x, 1), T)
+ | var_of_free _ = error "preplay error: not a free variable"
+ val substitutions = map (`var_of_free #> swap) fixed_frees
+ val concl =
+ (case last_step of
+ Prove (_, _, t, _) => t
+ | _ => error "preplay error: unexpected conclusion of subblock")
+ in
+ concl |> subst_free substitutions |> make_thm |> single
+ end)
val ctxt = ctxt |> Config.put Metis_Tactic.verbose debug
|> obtain ? Config.put Metis_Tactic.new_skolem true
val goal =
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_proof.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_proof.ML Fri Feb 15 09:59:46 2013 +0100
@@ -22,7 +22,8 @@
Prove of isar_qualifier list * label * term * byline
and byline =
By_Metis of facts |
- Case_Split of isar_step list list * facts
+ Case_Split of isar_step list list * facts |
+ Subblock of isar_step list
val string_for_label : label -> string
val metis_steps_top_level : isar_step list -> int
@@ -45,7 +46,8 @@
Prove of isar_qualifier list * label * term * byline
and byline =
By_Metis of facts |
- Case_Split of isar_step list list * facts
+ Case_Split of isar_step list list * facts |
+ Subblock of isar_step list
fun string_for_label (s, num) = s ^ string_of_int num
@@ -57,6 +59,8 @@
| Prove (_, _, _, By_Metis _) => Integer.add 1
| Prove (_, _, _, Case_Split (cases, _)) =>
Integer.add (fold (Integer.add o metis_steps_total) cases 1)
+ | Prove (_, _, _, Subblock subproof) =>
+ Integer.add (metis_steps_total subproof + 1)
| _ => I) proof 0
end
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_provers.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_provers.ML Fri Feb 15 09:59:46 2013 +0100
@@ -35,7 +35,7 @@
max_mono_iters : int option,
max_new_mono_instances : int option,
isar_proofs : bool,
- isar_shrink : real,
+ isar_compress : real,
slice : bool,
minimize : bool option,
timeout : Time.time option,
@@ -322,7 +322,7 @@
max_mono_iters : int option,
max_new_mono_instances : int option,
isar_proofs : bool,
- isar_shrink : real,
+ isar_compress : real,
slice : bool,
minimize : bool option,
timeout : Time.time option,
@@ -635,7 +635,7 @@
best_max_mono_iters, best_max_new_mono_instances, ...} : atp_config)
(params as {debug, verbose, overlord, type_enc, strict, lam_trans,
uncurried_aliases, fact_filter, max_facts, max_mono_iters,
- max_new_mono_instances, isar_proofs, isar_shrink,
+ max_new_mono_instances, isar_proofs, isar_compress,
slice, timeout, preplay_timeout, ...})
minimize_command
({state, goal, subgoal, subgoal_count, factss, ...} : prover_problem) =
@@ -898,7 +898,7 @@
else
()
val isar_params =
- (debug, verbose, preplay_timeout, isar_shrink,
+ (debug, verbose, preplay_timeout, isar_compress,
pool, fact_names, sym_tab, atp_proof, goal)
val one_line_params =
(preplay, proof_banner mode name, used_facts,
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_reconstruct.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_reconstruct.ML Fri Feb 15 09:59:46 2013 +0100
@@ -54,7 +54,7 @@
open Sledgehammer_Util
open Sledgehammer_Proof
open Sledgehammer_Annotate
-open Sledgehammer_Shrink
+open Sledgehammer_Compress
structure String_Redirect = ATP_Proof_Redirect(
type key = step_name
@@ -499,14 +499,14 @@
(E and Vampire -- but see also "atp_proof_reconstruct.ML" regarding
Vampire). *)
do_metis ind "using [[metis_new_skolem]] " facts ^ "\n"
- | do_step ind (Prove (qs, l, t, By_Metis facts)) =
- do_indent ind ^ do_have qs ^ " " ^
- do_label l ^ do_term t ^ do_metis ind "" facts ^ "\n"
+ | do_step ind (Prove (qs, l, t, By_Metis facts)) =
+ do_prove ind qs l t facts
| do_step ind (Prove (qs, l, t, Case_Split (proofs, facts))) =
implode (map (prefix (do_indent ind ^ "moreover\n") o do_block ind)
proofs) ^
- do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^
- do_metis ind "" facts ^ "\n"
+ do_prove ind qs l t facts
+ | do_step ind (Prove (qs, l, t, Subblock proof)) =
+ do_block ind proof ^ do_prove ind qs l t ([], [])
and do_steps prefix suffix ind steps =
let val s = implode (map (do_step ind) steps) in
replicate_string (ind * indent_size - size prefix) " " ^ prefix ^
@@ -515,6 +515,9 @@
suffix ^ "\n"
end
and do_block ind proof = do_steps "{ " " }" (ind + 1) proof
+ and do_prove ind qs l t facts =
+ do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^
+ do_metis ind "" facts ^ "\n"
(* One-step proofs are pointless; better use the Metis one-liner
directly. *)
and do_proof [Prove (_, _, _, By_Metis _)] = ""
@@ -529,7 +532,8 @@
(case by of
By_Metis (ls, _) => ls
| Case_Split (proofs, (ls, _)) =>
- fold (union (op =) o used_labels_of) proofs ls)
+ fold (union (op =) o used_labels_of) proofs ls
+ | Subblock proof => used_labels_of proof)
| used_labels_of_step _ = []
and used_labels_of proof = fold (union (op =) o used_labels_of_step) proof []
@@ -543,10 +547,12 @@
Prove (qs, do_label l, t,
case by of
Case_Split (proofs, facts) =>
- Case_Split (map (map do_step) proofs, facts)
+ Case_Split (map do_proof proofs, facts)
+ | Subblock proof => Subblock (do_proof proof)
| _ => by)
| do_step step = step
- in map do_step proof end
+ and do_proof proof = map do_step proof
+ in do_proof proof end
fun prefix_for_depth n = replicate_string (n + 1)
@@ -561,12 +567,13 @@
end
fun do_facts subst =
apfst (maps (the_list o AList.lookup (op =) subst))
- fun do_byline subst depth by =
+ fun do_byline subst depth nexts by =
case by of
By_Metis facts => By_Metis (do_facts subst facts)
| Case_Split (proofs, facts) =>
Case_Split (map (do_proof subst (depth + 1) (1, 1)) proofs,
do_facts subst facts)
+ | Subblock proof => Subblock (do_proof subst depth nexts proof)
and do_proof _ _ _ [] = []
| do_proof subst depth (next_assum, next_have) (Assume (l, t) :: proof) =
if l = no_label then
@@ -576,26 +583,26 @@
Assume (l', t) ::
do_proof ((l, l') :: subst) depth (next_assum + 1, next_have) proof
end
- | do_proof subst depth (next_assum, next_have)
+ | do_proof subst depth (nexts as (next_assum, next_have))
(Obtain (qs, xs, l, t, by) :: proof) =
let
val (l, subst, next_have) = (l, subst, next_have) |> fresh_label depth
- val by = by |> do_byline subst depth
+ val by = by |> do_byline subst depth nexts
in
Obtain (qs, xs, l, t, by) ::
do_proof subst depth (next_assum, next_have) proof
end
- | do_proof subst depth (next_assum, next_have)
+ | do_proof subst depth (nexts as (next_assum, next_have))
(Prove (qs, l, t, by) :: proof) =
let
val (l, subst, next_have) = (l, subst, next_have) |> fresh_label depth
- val by = by |> do_byline subst depth
+ val by = by |> do_byline subst depth nexts
in
Prove (qs, l, t, by) ::
do_proof subst depth (next_assum, next_have) proof
end
- | do_proof subst depth nextp (step :: proof) =
- step :: do_proof subst depth nextp proof
+ | do_proof subst depth nexts (step :: proof) =
+ step :: do_proof subst depth nexts proof
in do_proof [] 0 (1, 1) end
val chain_direct_proof =
@@ -617,7 +624,9 @@
else
step
| chain_step _ (Prove (qs, l, t, Case_Split (proofs, facts))) =
- Prove (qs, l, t, Case_Split ((map (chain_proof NONE) proofs), facts))
+ Prove (qs, l, t, Case_Split (map (chain_proof NONE) proofs, facts))
+ | chain_step _ (Prove (qs, l, t, Subblock proof)) =
+ Prove (qs, l, t, Subblock (chain_proof NONE proof))
| chain_step _ step = step
and chain_proof _ [] = []
| chain_proof (prev as SOME _) (i :: is) =
@@ -630,7 +639,7 @@
* (string * stature) list vector * int Symtab.table * string proof * thm
fun isar_proof_text ctxt isar_proofs
- (debug, verbose, preplay_timeout, isar_shrink, pool, fact_names, sym_tab,
+ (debug, verbose, preplay_timeout, isar_compress, pool, fact_names, sym_tab,
atp_proof, goal)
(one_line_params as (_, _, _, _, subgoal, subgoal_count)) =
let
@@ -690,7 +699,8 @@
else
I))))
atp_proof
- fun is_clause_skolemize_rule [(s, _)] =
+ fun is_clause_skolemize_rule [atom as (s, _)] =
+ not (member (op =) tainted atom) andalso
Option.map (is_skolemize_rule o fst) (Symtab.lookup steps s) =
SOME true
| is_clause_skolemize_rule _ = false
@@ -747,12 +757,12 @@
|> redirect_graph axioms tainted bot
|> map (isar_step_of_direct_inf true)
|> append assms
- |> (if not preplay andalso isar_shrink <= 1.0 then
+ |> (if not preplay andalso isar_compress <= 1.0 then
rpair (false, (true, seconds 0.0))
else
- shrink_proof debug ctxt type_enc lam_trans preplay
+ compress_proof debug ctxt type_enc lam_trans preplay
preplay_timeout
- (if isar_proofs then isar_shrink else 1000.0))
+ (if isar_proofs then isar_compress else 1000.0))
(* |>> reorder_proof_to_minimize_jumps (* ? *) *)
|>> chain_direct_proof
|>> kill_useless_labels_in_proof
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_shrink.ML Fri Feb 15 09:41:25 2013 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,248 +0,0 @@
-(* Title: HOL/Tools/Sledgehammer/sledgehammer_shrink.ML
- Author: Jasmin Blanchette, TU Muenchen
- Author: Steffen Juilf Smolka, TU Muenchen
-
-Shrinking of reconstructed isar proofs.
-*)
-
-signature SLEDGEHAMMER_SHRINK =
-sig
- type isar_step = Sledgehammer_Proof.isar_step
- type preplay_time = Sledgehammer_Preplay.preplay_time
- val shrink_proof :
- bool -> Proof.context -> string -> string -> bool -> Time.time option
- -> real -> isar_step list -> isar_step list * (bool * preplay_time)
-end
-
-structure Sledgehammer_Shrink : SLEDGEHAMMER_SHRINK =
-struct
-
-open Sledgehammer_Util
-open Sledgehammer_Proof
-open Sledgehammer_Preplay
-
-(* Parameters *)
-val merge_timeout_slack = 1.2
-
-(* Data structures, orders *)
-val label_ord = prod_ord int_ord fast_string_ord o pairself swap
-structure Label_Table = Table(
- type key = label
- val ord = label_ord)
-
-(* clean vector interface *)
-fun get i v = Vector.sub (v, i)
-fun replace x i v = Vector.update (v, i, x)
-fun update f i v = replace (get i v |> f) i v
-fun v_map_index f v = Vector.foldr (op::) nil v |> map_index f |> Vector.fromList
-fun v_fold_index f v s =
- Vector.foldl (fn (x, (i, s)) => (i+1, f (i, x) s)) (0, s) v |> snd
-
-(* Queue interface to table *)
-fun pop tab key =
- let val v = hd (Inttab.lookup_list tab key) in
- (v, Inttab.remove_list (op =) (key, v) tab)
- end
-fun pop_max tab = pop tab (the (Inttab.max_key tab))
-fun add_list tab xs = fold (Inttab.insert_list (op =)) xs tab
-
-(* Main function for shrinking proofs *)
-fun shrink_proof debug ctxt type_enc lam_trans preplay preplay_timeout
- isar_shrink proof =
- let
- (* 60 seconds seems like a good interpreation of "no timeout" *)
- val preplay_timeout = preplay_timeout |> the_default (seconds 60.0)
-
- (* handle metis preplay fail *)
- local
- open Unsynchronized
- val metis_fail = ref false
- in
- fun handle_metis_fail try_metis () =
- try_metis () handle exn =>
- (if Exn.is_interrupt exn orelse debug then reraise exn
- else metis_fail := true; some_preplay_time)
- fun get_time lazy_time =
- if !metis_fail andalso not (Lazy.is_finished lazy_time)
- then some_preplay_time
- else Lazy.force lazy_time
- val metis_fail = fn () => !metis_fail
- end
-
- (* Shrink proof on top level - do not shrink case splits *)
- fun shrink_top_level on_top_level ctxt proof =
- let
- (* proof vector *)
- val proof_vect = proof |> map SOME |> Vector.fromList
- val n = Vector.length proof_vect
- val n_metis = metis_steps_top_level proof
- val target_n_metis = Real.fromInt n_metis / isar_shrink |> Real.round
-
- (* table for mapping from (top-level-)label to proof position *)
- fun update_table (i, Assume (l, _)) = Label_Table.update_new (l, i)
- | update_table (i, Obtain (_, _, l, _, _)) = Label_Table.update_new (l, i)
- | update_table (i, Prove (_, l, _, _)) = Label_Table.update_new (l, i)
- | update_table _ = I
- val label_index_table = fold_index update_table proof Label_Table.empty
- val lookup_indices = map_filter (Label_Table.lookup label_index_table)
-
- (* proof references *)
- fun refs (Obtain (_, _, _, _, By_Metis (lfs, _))) = lookup_indices lfs
- | refs (Prove (_, _, _, By_Metis (lfs, _))) = lookup_indices lfs
- | refs (Prove (_, _, _, Case_Split (cases, (lfs, _)))) =
- lookup_indices lfs @ maps (maps refs) cases
- | refs _ = []
- val refed_by_vect =
- Vector.tabulate (n, (fn _ => []))
- |> fold_index (fn (i, step) => fold (update (cons i)) (refs step)) proof
- |> Vector.map rev (* after rev, indices are sorted in ascending order *)
-
- (* candidates for elimination, use table as priority queue (greedy
- algorithm) *)
- fun add_if_cand proof_vect (i, [j]) =
- (case (the (get i proof_vect), the (get j proof_vect)) of
- (Prove (_, _, t, By_Metis _), Prove (_, _, _, By_Metis _)) =>
- cons (Term.size_of_term t, i)
- | (Prove (_, _, t, By_Metis _), Obtain (_, _, _, _, By_Metis _)) =>
- cons (Term.size_of_term t, i)
- | _ => I)
- | add_if_cand _ _ = I
- val cand_tab =
- v_fold_index (add_if_cand proof_vect) refed_by_vect []
- |> Inttab.make_list
-
- (* cache metis preplay times in lazy time vector *)
- val metis_time =
- v_map_index
- (if not preplay then K (zero_preplay_time) #> Lazy.value
- else
- apsnd the (* step *)
- #> apfst (fn i => try (get (i-1) #> the) proof_vect) (* succedent *)
- #> try_metis debug type_enc lam_trans ctxt preplay_timeout
- #> handle_metis_fail
- #> Lazy.lazy)
- proof_vect
-
- fun sum_up_time lazy_time_vector =
- Vector.foldl
- (apfst get_time #> uncurry add_preplay_time)
- zero_preplay_time lazy_time_vector
-
- (* Merging *)
- fun merge (Prove (_, label1, _, By_Metis (lfs1, gfs1))) step2 =
- let
- val (step_constructor, lfs2, gfs2) =
- (case step2 of
- (Prove (qs2, label2, t, By_Metis (lfs2, gfs2))) =>
- (fn by => Prove (qs2, label2, t, by), lfs2, gfs2)
- | (Obtain (qs2, xs, label2, t, By_Metis (lfs2, gfs2))) =>
- (fn by => Obtain (qs2, xs, label2, t, by), lfs2, gfs2)
- | _ => error "sledgehammer_shrink: unmergeable Isar steps" )
- val lfs = remove (op =) label1 lfs2 |> union (op =) lfs1
- val gfs = union (op =) gfs1 gfs2
- in step_constructor (By_Metis (lfs, gfs)) end
- | merge _ _ = error "sledgehammer_shrink: unmergeable Isar steps"
-
- fun try_merge metis_time (s1, i) (s2, j) =
- if not preplay then (merge s1 s2 |> SOME, metis_time)
- else
- (case get i metis_time |> Lazy.force of
- (true, _) => (NONE, metis_time)
- | (_, t1) =>
- (case get j metis_time |> Lazy.force of
- (true, _) => (NONE, metis_time)
- | (_, t2) =>
- let
- val s12 = merge s1 s2
- val timeout = time_mult merge_timeout_slack (Time.+(t1, t2))
- in
- case try_metis_quietly debug type_enc lam_trans ctxt timeout
- (NONE, s12) () of
- (true, _) => (NONE, metis_time)
- | exact_time =>
- (SOME s12, metis_time
- |> replace (zero_preplay_time |> Lazy.value) i
- |> replace (Lazy.value exact_time) j)
-
- end))
-
- fun merge_steps metis_time proof_vect refed_by cand_tab n' n_metis' =
- if Inttab.is_empty cand_tab
- orelse n_metis' <= target_n_metis
- orelse (on_top_level andalso n'<3)
- then
- (Vector.foldr
- (fn (NONE, proof) => proof | (SOME s, proof) => s :: proof)
- [] proof_vect,
- sum_up_time metis_time)
- else
- let
- val (i, cand_tab) = pop_max cand_tab
- val j = get i refed_by |> the_single
- val s1 = get i proof_vect |> the
- val s2 = get j proof_vect |> the
- in
- case try_merge metis_time (s1, i) (s2, j) of
- (NONE, metis_time) =>
- merge_steps metis_time proof_vect refed_by cand_tab n' n_metis'
- | (s, metis_time) =>
- let
- val refs = refs s1
- val refed_by = refed_by |> fold
- (update (Ord_List.remove int_ord i #> Ord_List.insert int_ord j)) refs
- val new_candidates =
- fold (add_if_cand proof_vect)
- (map (fn i => (i, get i refed_by)) refs) []
- val cand_tab = add_list cand_tab new_candidates
- val proof_vect = proof_vect |> replace NONE i |> replace s j
- in
- merge_steps metis_time proof_vect refed_by cand_tab (n' - 1)
- (n_metis' - 1)
- end
- end
- in
- merge_steps metis_time proof_vect refed_by_vect cand_tab n n_metis
- end
-
- fun do_proof on_top_level ctxt proof =
- let
- (* Enrich context with top-level facts *)
- val thy = Proof_Context.theory_of ctxt
- (* TODO: add Skolem variables to context? *)
- fun enrich_with_fact l t =
- Proof_Context.put_thms false
- (string_for_label l, SOME [Skip_Proof.make_thm thy t])
- fun enrich_with_step (Assume (l, t)) = enrich_with_fact l t
- | enrich_with_step (Obtain (_, _, l, t, _)) = enrich_with_fact l t
- | enrich_with_step (Prove (_, l, t, _)) = enrich_with_fact l t
- | enrich_with_step _ = I
- val rich_ctxt = fold enrich_with_step proof ctxt
-
- (* Shrink case_splits and top-levl *)
- val ((proof, top_level_time), lower_level_time) =
- proof |> do_case_splits rich_ctxt
- |>> shrink_top_level on_top_level rich_ctxt
- in
- (proof, add_preplay_time lower_level_time top_level_time)
- end
-
- and do_case_splits ctxt proof =
- let
- fun shrink_each_and_collect_time shrink candidates =
- let fun f_m cand time = shrink cand ||> add_preplay_time time
- in fold_map f_m candidates zero_preplay_time end
- val shrink_case_split =
- shrink_each_and_collect_time (do_proof false ctxt)
- fun shrink (Prove (qs, l, t, Case_Split (cases, facts))) =
- let val (cases, time) = shrink_case_split cases
- in (Prove (qs, l, t, Case_Split (cases, facts)), time) end
- | shrink step = (step, zero_preplay_time)
- in
- shrink_each_and_collect_time shrink proof
- end
- in
- do_proof true ctxt proof
- |> apsnd (pair (metis_fail ()))
- end
-
-end
--- a/src/HOL/Tools/hologic.ML Fri Feb 15 09:41:25 2013 +0100
+++ b/src/HOL/Tools/hologic.ML Fri Feb 15 09:59:46 2013 +0100
@@ -686,7 +686,7 @@
val random_seedT = mk_prodT (code_numeralT, code_numeralT);
-fun mk_random T t = Const ("Quickcheck.random_class.random", code_numeralT
+fun mk_random T t = Const ("Quickcheck_Random.random_class.random", code_numeralT
--> random_seedT --> mk_prodT (mk_prodT (T, unitT --> termT), random_seedT)) $ t;
end;
--- a/src/Pure/General/graphics_file.scala Fri Feb 15 09:41:25 2013 +0100
+++ b/src/Pure/General/graphics_file.scala Fri Feb 15 09:59:46 2013 +0100
@@ -8,8 +8,11 @@
import java.awt.Graphics2D
+import java.awt.geom.Rectangle2D
import java.io.{FileOutputStream, BufferedOutputStream, File => JFile}
+import org.jfree.chart.JFreeChart
+
object Graphics_File
{
@@ -44,5 +47,14 @@
def write_pdf(path: Path, paint: Graphics2D => Unit, width: Int, height: Int): Unit =
write_pdf(path.file, paint, width, height)
+
+ def write_pdf(file: JFile, chart: JFreeChart, width: Int, height: Int)
+ {
+ def paint(gfx: Graphics2D) = chart.draw(gfx, new Rectangle2D.Double(0, 0, width, height))
+ write_pdf(file, paint _, width, height)
+ }
+
+ def write_pdf(path: Path, chart: JFreeChart, width: Int, height: Int): Unit =
+ write_pdf(path.file, chart, width, height)
}