merged
authorAndreas Lochbihler
Fri, 19 Sep 2014 08:26:03 +0200
changeset 58384 00aaaa7bd752
parent 58382 2ee61d28c667 (diff)
parent 58383 09a2c3e08ec2 (current diff)
child 58385 9cbef70cff8e
merged
src/HOL/Library/Phantom_Type.thy
--- a/NEWS	Thu Sep 18 15:23:23 2014 +0200
+++ b/NEWS	Fri Sep 19 08:26:03 2014 +0200
@@ -33,9 +33,11 @@
 Minor INCOMPATIBILITY.
 
 * New (co)datatype package:
-  - The 'datatype_new' command has been renamed 'datatype'. The old command of
-    that name is now called 'old_datatype'. See 'isabelle doc datatypes' for
-    information on porting.
+  - The 'datatype_new' command has been renamed 'datatype'. The old
+    command of that name is now called 'old_datatype' and is provided
+    by "~~/src/HOL/Library/Old_Datatype.thy". See
+    'isabelle doc datatypes' for information on porting.
+    INCOMPATIBILITY.
   - Renamed theorems:
       disc_corec ~> corec_disc
       disc_corec_iff ~> corec_disc_iff
@@ -67,11 +69,18 @@
 
 * Old datatype package:
   - The old 'datatype' command has been renamed 'old_datatype', and
-    'rep_datatype' has been renamed 'old_rep_datatype'. See
+    'rep_datatype' has been renamed 'old_rep_datatype'. They are
+    provided by "~~/src/HOL/Library/Old_Datatype.thy". See
     'isabelle doc datatypes' for information on porting.
+    INCOMPATIBILITY.
   - Renamed theorems:
       weak_case_cong ~> case_cong_weak
     INCOMPATIBILITY.
+  - Renamed theory:
+      ~~/src/HOL/Datatype.thy ~> ~~/src/HOL/Library/Old_Datatype.thy
+    INCOMPATIBILITY.
+
+* Product over lists via constant "listprod".
 
 * Sledgehammer:
   - Minimization is now always enabled by default.
--- a/src/Doc/Datatypes/Datatypes.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/Doc/Datatypes/Datatypes.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -2877,22 +2877,22 @@
 \item[@{text "t."}\hthm{rel_eq_onp}\rm:] ~ \\
 @{thm list.rel_eq_onp[no_vars]}
 
-\item[@{text "t."}\hthm{left_total_rel} @{text "[transfer]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{left_total_rel} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.left_total_rel[no_vars]}
 
-\item[@{text "t."}\hthm{left_unique_rel} @{text "[transfer]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{left_unique_rel} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.left_unique_rel[no_vars]}
 
-\item[@{text "t."}\hthm{right_total_rel} @{text "[transfer]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{right_total_rel} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.right_total_rel[no_vars]}
 
-\item[@{text "t."}\hthm{right_unique_rel} @{text "[transfer]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{right_unique_rel} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.right_unique_rel[no_vars]}
 
-\item[@{text "t."}\hthm{bi_total_rel} @{text "[transfer]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{bi_total_rel} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.bi_total_rel[no_vars]}
 
-\item[@{text "t."}\hthm{bi_unique_rel} @{text "[transfer]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{bi_unique_rel} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.bi_unique_rel[no_vars]}
 
 \end{description}
--- a/src/Doc/Isar_Ref/HOL_Specific.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/Doc/Isar_Ref/HOL_Specific.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -1,5 +1,6 @@
 theory HOL_Specific
-imports Base Main "~~/src/HOL/Library/Old_Recdef" "~~/src/Tools/Adhoc_Overloading"
+imports Base "~~/src/HOL/Library/Old_Datatype" "~~/src/HOL/Library/Old_Recdef"
+  "~~/src/Tools/Adhoc_Overloading"
 begin
 
 chapter {* Higher-Order Logic *}
--- a/src/Doc/Tutorial/ToyList/ToyList.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/Doc/Tutorial/ToyList/ToyList.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -1,14 +1,14 @@
 theory ToyList
-imports Old_Datatype
+imports BNF_Least_Fixpoint
 begin
 
 text{*\noindent
 HOL already has a predefined theory of lists called @{text List} ---
 @{text ToyList} is merely a small fragment of it chosen as an example. In
 contrast to what is recommended in \S\ref{sec:Basic:Theories},
-@{text ToyList} is not based on @{text Main} but on @{text Datatype}, a
-theory that contains pretty much everything but lists, thus avoiding
-ambiguities caused by defining lists twice.
+@{text ToyList} is not based on @{text Main} but on
+@{text BNF_Least_Fixpoint}, a theory that contains pretty much everything
+but lists, thus avoiding ambiguities caused by defining lists twice.
 *}
 
 datatype 'a list = Nil                          ("[]")
--- a/src/Doc/Tutorial/ToyList/ToyList1.txt	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/Doc/Tutorial/ToyList/ToyList1.txt	Fri Sep 19 08:26:03 2014 +0200
@@ -1,5 +1,5 @@
 theory ToyList
-imports Old_Datatype
+imports BNF_Least_Fixpoint
 begin
 
 datatype 'a list = Nil                          ("[]")
--- a/src/Doc/Tutorial/ToyList/ToyList_Test.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/Doc/Tutorial/ToyList/ToyList_Test.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -1,5 +1,5 @@
 theory ToyList_Test
-imports Old_Datatype
+imports BNF_Least_Fixpoint
 begin
 
 ML {*
--- a/src/HOL/BNF_Least_Fixpoint.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/BNF_Least_Fixpoint.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -231,8 +231,6 @@
 ML_file "Tools/BNF/bnf_lfp_compat.ML"
 ML_file "Tools/BNF/bnf_lfp_rec_sugar_more.ML"
 ML_file "Tools/BNF/bnf_lfp_size.ML"
-ML_file "Tools/Function/old_size.ML"
-ML_file "Tools/datatype_realizer.ML"
 
 hide_fact (open) id_transfer
 
--- a/src/HOL/Bali/Basis.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Bali/Basis.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -145,9 +145,7 @@
 
 section "sums"
 
-hide_const In0 In1
-
-notation case_sum  (infixr "'(+')"80)
+notation case_sum  (infixr "'(+')" 80)
 
 primrec the_Inl :: "'a + 'b \<Rightarrow> 'a"
   where "the_Inl (Inl a) = a"
--- a/src/HOL/Basic_BNF_Least_Fixpoints.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Basic_BNF_Least_Fixpoints.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -9,32 +9,6 @@
 imports BNF_Least_Fixpoint
 begin
 
-subsection {* Size setup (TODO: Merge with rest of file) *}
-
-lemma size_bool[code]: "size (b\<Colon>bool) = 0"
-  by (cases b) auto
-
-lemma size_nat[simp, code]: "size (n\<Colon>nat) = n"
-  by (induct n) simp_all
-
-declare prod.size[no_atp]
-
-lemma size_sum_o_map: "size_sum g1 g2 \<circ> map_sum f1 f2 = size_sum (g1 \<circ> f1) (g2 \<circ> f2)"
-  by (rule ext) (case_tac x, auto)
-
-lemma size_prod_o_map: "size_prod g1 g2 \<circ> map_prod f1 f2 = size_prod (g1 \<circ> f1) (g2 \<circ> f2)"
-  by (rule ext) auto
-
-setup {*
-BNF_LFP_Size.register_size_global @{type_name sum} @{const_name size_sum} @{thms sum.size}
-  @{thms size_sum_o_map}
-#> BNF_LFP_Size.register_size_global @{type_name prod} @{const_name size_prod} @{thms prod.size}
-  @{thms size_prod_o_map}
-*}
-
-
-subsection {* FP sugar setup *}
-
 definition xtor :: "'a \<Rightarrow> 'a" where
   "xtor x = x"
 
@@ -55,15 +29,6 @@
 
 lemmas xtor_inject = xtor_rel[of "op ="]
 
-definition ctor_rec :: "'a \<Rightarrow> 'a" where
-  "ctor_rec x = x"
-
-lemma ctor_rec: "g = id \<Longrightarrow> ctor_rec f (xtor x) = f ((id_bnf \<circ> g \<circ> id_bnf) x)"
-  unfolding ctor_rec_def id_bnf_def xtor_def comp_def id_def by hypsubst (rule refl)
-
-lemma ctor_rec_o_map: "ctor_rec f \<circ> g = ctor_rec (f \<circ> (id_bnf \<circ> g \<circ> id_bnf))"
-  unfolding ctor_rec_def id_bnf_def comp_def by (rule refl)
-
 lemma xtor_rel_induct: "(\<And>x y. vimage2p id_bnf id_bnf R x y \<Longrightarrow> IR (xtor x) (xtor y)) \<Longrightarrow> R \<le> IR"
   unfolding xtor_def vimage2p_def id_bnf_def by default
 
@@ -76,12 +41,30 @@
 lemma Pair_def_alt: "Pair \<equiv> (\<lambda>a b. xtor (id_bnf (a, b)))"
   unfolding xtor_def id_bnf_def by (rule reflexive)
 
+definition ctor_rec :: "'a \<Rightarrow> 'a" where
+  "ctor_rec x = x"
+
+lemma ctor_rec: "g = id \<Longrightarrow> ctor_rec f (xtor x) = f ((id_bnf \<circ> g \<circ> id_bnf) x)"
+  unfolding ctor_rec_def id_bnf_def xtor_def comp_def id_def by hypsubst (rule refl)
+
+lemma ctor_rec_def_alt: "f = ctor_rec (f \<circ> id_bnf)"
+  unfolding ctor_rec_def id_bnf_def comp_def by (rule refl)
+
+lemma ctor_rec_o_map: "ctor_rec f \<circ> g = ctor_rec (f \<circ> (id_bnf \<circ> g \<circ> id_bnf))"
+  unfolding ctor_rec_def id_bnf_def comp_def by (rule refl)
+
 ML_file "Tools/BNF/bnf_lfp_basic_sugar.ML"
 
+thm sum.rec_o_map
+thm sum.size_o_map
+
+thm prod.rec_o_map
+thm prod.size_o_map
+
 hide_const (open) xtor ctor_rec
 
 hide_fact (open)
   xtor_def xtor_map xtor_set xtor_rel xtor_induct xtor_xtor xtor_inject ctor_rec_def ctor_rec
-  ctor_rec_o_map xtor_rel_induct Inl_def_alt Inr_def_alt Pair_def_alt
+  ctor_rec_def_alt ctor_rec_o_map xtor_rel_induct Inl_def_alt Inr_def_alt Pair_def_alt
 
 end
--- a/src/HOL/Code_Numeral.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Code_Numeral.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -809,32 +809,24 @@
   shows P
   using assms by transfer blast
 
-lemma [simp, code]:
-  "size_natural = nat_of_natural"
-proof (rule ext)
-  fix n
-  show "size_natural n = nat_of_natural n"
-    by (induct n) simp_all
-qed
+instantiation natural :: size
+begin
 
-lemma [simp, code]:
-  "size = nat_of_natural"
-proof (rule ext)
-  fix n
-  show "size n = nat_of_natural n"
-    by (induct n) simp_all
-qed
+definition size_natural :: "natural \<Rightarrow> nat" where
+  [simp, code]: "size_natural = nat_of_natural"
+
+instance ..
+
+end
 
 lemma natural_decr [termination_simp]:
   "n \<noteq> 0 \<Longrightarrow> nat_of_natural n - Nat.Suc 0 < nat_of_natural n"
   by transfer simp
 
-lemma natural_zero_minus_one:
-  "(0::natural) - 1 = 0"
-  by simp
+lemma natural_zero_minus_one: "(0::natural) - 1 = 0"
+  by (rule zero_diff)
 
-lemma Suc_natural_minus_one:
-  "Suc n - 1 = n"
+lemma Suc_natural_minus_one: "Suc n - 1 = n"
   by transfer simp
 
 hide_const (open) Suc
@@ -914,16 +906,13 @@
   "HOL.equal m n \<longleftrightarrow> HOL.equal (integer_of_natural m) (integer_of_natural n)"
   by transfer (simp add: equal)
 
-lemma [code nbe]:
-  "HOL.equal n (n::natural) \<longleftrightarrow> True"
-  by (simp add: equal)
+lemma [code nbe]: "HOL.equal n (n::natural) \<longleftrightarrow> True"
+  by (rule equal_class.equal_refl)
 
-lemma [code]:
-  "m \<le> n \<longleftrightarrow> integer_of_natural m \<le> integer_of_natural n"
+lemma [code]: "m \<le> n \<longleftrightarrow> integer_of_natural m \<le> integer_of_natural n"
   by transfer simp
 
-lemma [code]:
-  "m < n \<longleftrightarrow> integer_of_natural m < integer_of_natural n"
+lemma [code]: "m < n \<longleftrightarrow> integer_of_natural m < integer_of_natural n"
   by transfer simp
 
 hide_const (open) Nat
@@ -939,4 +928,3 @@
   functions integer_of_natural natural_of_integer
 
 end
-
--- a/src/HOL/Datatype_Examples/Compat.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Datatype_Examples/Compat.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -8,7 +8,7 @@
 header {* Tests for Compatibility with the Old Datatype Package *}
 
 theory Compat
-imports Main
+imports "~~/src/HOL/Library/Old_Datatype"
 begin
 
 subsection {* Viewing and Registering New-Style Datatypes as Old-Style Ones *}
--- a/src/HOL/Fun_Def.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Fun_Def.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -5,7 +5,7 @@
 header {* Function Definitions and Termination Proofs *}
 
 theory Fun_Def
-imports Partial_Function SAT
+imports Basic_BNF_Least_Fixpoints Partial_Function SAT
 keywords "function" "termination" :: thy_goal and "fun" "fun_cases" :: thy_decl
 begin
 
--- a/src/HOL/Groups_List.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Groups_List.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -1,7 +1,7 @@
 
 (* Author: Tobias Nipkow, TU Muenchen *)
 
-header {* Sum over lists *}
+header {* Sum and product over lists *}
 
 theory Groups_List
 imports List
@@ -289,4 +289,63 @@
 
 end
 
+
+subsection {* List product *}
+
+context monoid_mult
+begin
+
+definition listprod :: "'a list \<Rightarrow> 'a"
+where
+  "listprod  = monoid_list.F times 1"
+
+sublocale listprod!: monoid_list times 1
+where
+  "monoid_list.F times 1 = listprod"
+proof -
+  show "monoid_list times 1" ..
+  then interpret listprod!: monoid_list times 1 .
+  from listprod_def show "monoid_list.F times 1 = listprod" by rule
+qed
+
 end
+
+context comm_monoid_mult
+begin
+
+sublocale listprod!: comm_monoid_list times 1
+where
+  "monoid_list.F times 1 = listprod"
+proof -
+  show "comm_monoid_list times 1" ..
+  then interpret listprod!: comm_monoid_list times 1 .
+  from listprod_def show "monoid_list.F times 1 = listprod" by rule
+qed
+
+sublocale setprod!: comm_monoid_list_set times 1
+where
+  "monoid_list.F times 1 = listprod"
+  and "comm_monoid_set.F times 1 = setprod"
+proof -
+  show "comm_monoid_list_set times 1" ..
+  then interpret setprod!: comm_monoid_list_set times 1 .
+  from listprod_def show "monoid_list.F times 1 = listprod" by rule
+  from setprod_def show "comm_monoid_set.F times 1 = setprod" by rule
+qed
+
+end
+
+text {* Some syntactic sugar: *}
+
+syntax
+  "_listprod" :: "pttrn => 'a list => 'b => 'b"    ("(3PROD _<-_. _)" [0, 51, 10] 10)
+syntax (xsymbols)
+  "_listprod" :: "pttrn => 'a list => 'b => 'b"    ("(3\<Prod>_\<leftarrow>_. _)" [0, 51, 10] 10)
+syntax (HTML output)
+  "_listprod" :: "pttrn => 'a list => 'b => 'b"    ("(3\<Prod>_\<leftarrow>_. _)" [0, 51, 10] 10)
+
+translations -- {* Beware of argument permutation! *}
+  "PROD x<-xs. b" == "CONST listprod (CONST map (%x. b) xs)"
+  "\<Prod>x\<leftarrow>xs. b" == "CONST listprod (CONST map (%x. b) xs)"
+
+end
--- a/src/HOL/Induct/Sexp.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Induct/Sexp.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -7,7 +7,7 @@
 *)
 
 theory Sexp
-imports Main
+imports "~~/src/HOL/Library/Old_Datatype"
 begin
 
 type_synonym 'a item = "'a Old_Datatype.item"
--- a/src/HOL/Library/Countable.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Library/Countable.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -7,7 +7,7 @@
 header {* Encoding (almost) everything into natural numbers *}
 
 theory Countable
-imports Main Rat Nat_Bijection
+imports Old_Datatype Rat Nat_Bijection
 begin
 
 subsection {* The class of countable types *}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/Old_Datatype.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -0,0 +1,531 @@
+(*  Title:      HOL/Library/Old_Datatype.thy
+    Author:     Lawrence C Paulson, Cambridge University Computer Laboratory
+    Author:     Stefan Berghofer and Markus Wenzel, TU Muenchen
+*)
+
+header {* Old Datatype package: constructing datatypes from Cartesian Products and Disjoint Sums *}
+
+theory Old_Datatype
+imports "../Main"
+keywords "old_datatype" :: thy_decl
+begin
+
+ML_file "~~/src/HOL/Tools/Old_Datatype/old_size.ML"
+ML_file "~~/src/HOL/Tools/datatype_realizer.ML"
+
+
+subsection {* The datatype universe *}
+
+definition "Node = {p. EX f x k. p = (f :: nat => 'b + nat, x ::'a + nat) & f k = Inr 0}"
+
+typedef ('a, 'b) node = "Node :: ((nat => 'b + nat) * ('a + nat)) set"
+  morphisms Rep_Node Abs_Node
+  unfolding Node_def by auto
+
+text{*Datatypes will be represented by sets of type @{text node}*}
+
+type_synonym 'a item        = "('a, unit) node set"
+type_synonym ('a, 'b) dtree = "('a, 'b) node set"
+
+consts
+  Push      :: "[('b + nat), nat => ('b + nat)] => (nat => ('b + nat))"
+
+  Push_Node :: "[('b + nat), ('a, 'b) node] => ('a, 'b) node"
+  ndepth    :: "('a, 'b) node => nat"
+
+  Atom      :: "('a + nat) => ('a, 'b) dtree"
+  Leaf      :: "'a => ('a, 'b) dtree"
+  Numb      :: "nat => ('a, 'b) dtree"
+  Scons     :: "[('a, 'b) dtree, ('a, 'b) dtree] => ('a, 'b) dtree"
+  In0       :: "('a, 'b) dtree => ('a, 'b) dtree"
+  In1       :: "('a, 'b) dtree => ('a, 'b) dtree"
+  Lim       :: "('b => ('a, 'b) dtree) => ('a, 'b) dtree"
+
+  ntrunc    :: "[nat, ('a, 'b) dtree] => ('a, 'b) dtree"
+
+  uprod     :: "[('a, 'b) dtree set, ('a, 'b) dtree set]=> ('a, 'b) dtree set"
+  usum      :: "[('a, 'b) dtree set, ('a, 'b) dtree set]=> ('a, 'b) dtree set"
+
+  Split     :: "[[('a, 'b) dtree, ('a, 'b) dtree]=>'c, ('a, 'b) dtree] => 'c"
+  Case      :: "[[('a, 'b) dtree]=>'c, [('a, 'b) dtree]=>'c, ('a, 'b) dtree] => 'c"
+
+  dprod     :: "[(('a, 'b) dtree * ('a, 'b) dtree)set, (('a, 'b) dtree * ('a, 'b) dtree)set]
+                => (('a, 'b) dtree * ('a, 'b) dtree)set"
+  dsum      :: "[(('a, 'b) dtree * ('a, 'b) dtree)set, (('a, 'b) dtree * ('a, 'b) dtree)set]
+                => (('a, 'b) dtree * ('a, 'b) dtree)set"
+
+
+defs
+
+  Push_Node_def:  "Push_Node == (%n x. Abs_Node (apfst (Push n) (Rep_Node x)))"
+
+  (*crude "lists" of nats -- needed for the constructions*)
+  Push_def:   "Push == (%b h. case_nat b h)"
+
+  (** operations on S-expressions -- sets of nodes **)
+
+  (*S-expression constructors*)
+  Atom_def:   "Atom == (%x. {Abs_Node((%k. Inr 0, x))})"
+  Scons_def:  "Scons M N == (Push_Node (Inr 1) ` M) Un (Push_Node (Inr (Suc 1)) ` N)"
+
+  (*Leaf nodes, with arbitrary or nat labels*)
+  Leaf_def:   "Leaf == Atom o Inl"
+  Numb_def:   "Numb == Atom o Inr"
+
+  (*Injections of the "disjoint sum"*)
+  In0_def:    "In0(M) == Scons (Numb 0) M"
+  In1_def:    "In1(M) == Scons (Numb 1) M"
+
+  (*Function spaces*)
+  Lim_def: "Lim f == Union {z. ? x. z = Push_Node (Inl x) ` (f x)}"
+
+  (*the set of nodes with depth less than k*)
+  ndepth_def: "ndepth(n) == (%(f,x). LEAST k. f k = Inr 0) (Rep_Node n)"
+  ntrunc_def: "ntrunc k N == {n. n:N & ndepth(n)<k}"
+
+  (*products and sums for the "universe"*)
+  uprod_def:  "uprod A B == UN x:A. UN y:B. { Scons x y }"
+  usum_def:   "usum A B == In0`A Un In1`B"
+
+  (*the corresponding eliminators*)
+  Split_def:  "Split c M == THE u. EX x y. M = Scons x y & u = c x y"
+
+  Case_def:   "Case c d M == THE u.  (EX x . M = In0(x) & u = c(x))
+                                  | (EX y . M = In1(y) & u = d(y))"
+
+
+  (** equality for the "universe" **)
+
+  dprod_def:  "dprod r s == UN (x,x'):r. UN (y,y'):s. {(Scons x y, Scons x' y')}"
+
+  dsum_def:   "dsum r s == (UN (x,x'):r. {(In0(x),In0(x'))}) Un
+                          (UN (y,y'):s. {(In1(y),In1(y'))})"
+
+
+
+lemma apfst_convE: 
+    "[| q = apfst f p;  !!x y. [| p = (x,y);  q = (f(x),y) |] ==> R  
+     |] ==> R"
+by (force simp add: apfst_def)
+
+(** Push -- an injection, analogous to Cons on lists **)
+
+lemma Push_inject1: "Push i f = Push j g  ==> i=j"
+apply (simp add: Push_def fun_eq_iff) 
+apply (drule_tac x=0 in spec, simp) 
+done
+
+lemma Push_inject2: "Push i f = Push j g  ==> f=g"
+apply (auto simp add: Push_def fun_eq_iff) 
+apply (drule_tac x="Suc x" in spec, simp) 
+done
+
+lemma Push_inject:
+    "[| Push i f =Push j g;  [| i=j;  f=g |] ==> P |] ==> P"
+by (blast dest: Push_inject1 Push_inject2) 
+
+lemma Push_neq_K0: "Push (Inr (Suc k)) f = (%z. Inr 0) ==> P"
+by (auto simp add: Push_def fun_eq_iff split: nat.split_asm)
+
+lemmas Abs_Node_inj = Abs_Node_inject [THEN [2] rev_iffD1]
+
+
+(*** Introduction rules for Node ***)
+
+lemma Node_K0_I: "(%k. Inr 0, a) : Node"
+by (simp add: Node_def)
+
+lemma Node_Push_I: "p: Node ==> apfst (Push i) p : Node"
+apply (simp add: Node_def Push_def) 
+apply (fast intro!: apfst_conv nat.case(2)[THEN trans])
+done
+
+
+subsection{*Freeness: Distinctness of Constructors*}
+
+(** Scons vs Atom **)
+
+lemma Scons_not_Atom [iff]: "Scons M N \<noteq> Atom(a)"
+unfolding Atom_def Scons_def Push_Node_def One_nat_def
+by (blast intro: Node_K0_I Rep_Node [THEN Node_Push_I] 
+         dest!: Abs_Node_inj 
+         elim!: apfst_convE sym [THEN Push_neq_K0])  
+
+lemmas Atom_not_Scons [iff] = Scons_not_Atom [THEN not_sym]
+
+
+(*** Injectiveness ***)
+
+(** Atomic nodes **)
+
+lemma inj_Atom: "inj(Atom)"
+apply (simp add: Atom_def)
+apply (blast intro!: inj_onI Node_K0_I dest!: Abs_Node_inj)
+done
+lemmas Atom_inject = inj_Atom [THEN injD]
+
+lemma Atom_Atom_eq [iff]: "(Atom(a)=Atom(b)) = (a=b)"
+by (blast dest!: Atom_inject)
+
+lemma inj_Leaf: "inj(Leaf)"
+apply (simp add: Leaf_def o_def)
+apply (rule inj_onI)
+apply (erule Atom_inject [THEN Inl_inject])
+done
+
+lemmas Leaf_inject [dest!] = inj_Leaf [THEN injD]
+
+lemma inj_Numb: "inj(Numb)"
+apply (simp add: Numb_def o_def)
+apply (rule inj_onI)
+apply (erule Atom_inject [THEN Inr_inject])
+done
+
+lemmas Numb_inject [dest!] = inj_Numb [THEN injD]
+
+
+(** Injectiveness of Push_Node **)
+
+lemma Push_Node_inject:
+    "[| Push_Node i m =Push_Node j n;  [| i=j;  m=n |] ==> P  
+     |] ==> P"
+apply (simp add: Push_Node_def)
+apply (erule Abs_Node_inj [THEN apfst_convE])
+apply (rule Rep_Node [THEN Node_Push_I])+
+apply (erule sym [THEN apfst_convE]) 
+apply (blast intro: Rep_Node_inject [THEN iffD1] trans sym elim!: Push_inject)
+done
+
+
+(** Injectiveness of Scons **)
+
+lemma Scons_inject_lemma1: "Scons M N <= Scons M' N' ==> M<=M'"
+unfolding Scons_def One_nat_def
+by (blast dest!: Push_Node_inject)
+
+lemma Scons_inject_lemma2: "Scons M N <= Scons M' N' ==> N<=N'"
+unfolding Scons_def One_nat_def
+by (blast dest!: Push_Node_inject)
+
+lemma Scons_inject1: "Scons M N = Scons M' N' ==> M=M'"
+apply (erule equalityE)
+apply (iprover intro: equalityI Scons_inject_lemma1)
+done
+
+lemma Scons_inject2: "Scons M N = Scons M' N' ==> N=N'"
+apply (erule equalityE)
+apply (iprover intro: equalityI Scons_inject_lemma2)
+done
+
+lemma Scons_inject:
+    "[| Scons M N = Scons M' N';  [| M=M';  N=N' |] ==> P |] ==> P"
+by (iprover dest: Scons_inject1 Scons_inject2)
+
+lemma Scons_Scons_eq [iff]: "(Scons M N = Scons M' N') = (M=M' & N=N')"
+by (blast elim!: Scons_inject)
+
+(*** Distinctness involving Leaf and Numb ***)
+
+(** Scons vs Leaf **)
+
+lemma Scons_not_Leaf [iff]: "Scons M N \<noteq> Leaf(a)"
+unfolding Leaf_def o_def by (rule Scons_not_Atom)
+
+lemmas Leaf_not_Scons  [iff] = Scons_not_Leaf [THEN not_sym]
+
+(** Scons vs Numb **)
+
+lemma Scons_not_Numb [iff]: "Scons M N \<noteq> Numb(k)"
+unfolding Numb_def o_def by (rule Scons_not_Atom)
+
+lemmas Numb_not_Scons [iff] = Scons_not_Numb [THEN not_sym]
+
+
+(** Leaf vs Numb **)
+
+lemma Leaf_not_Numb [iff]: "Leaf(a) \<noteq> Numb(k)"
+by (simp add: Leaf_def Numb_def)
+
+lemmas Numb_not_Leaf [iff] = Leaf_not_Numb [THEN not_sym]
+
+
+(*** ndepth -- the depth of a node ***)
+
+lemma ndepth_K0: "ndepth (Abs_Node(%k. Inr 0, x)) = 0"
+by (simp add: ndepth_def  Node_K0_I [THEN Abs_Node_inverse] Least_equality)
+
+lemma ndepth_Push_Node_aux:
+     "case_nat (Inr (Suc i)) f k = Inr 0 --> Suc(LEAST x. f x = Inr 0) <= k"
+apply (induct_tac "k", auto)
+apply (erule Least_le)
+done
+
+lemma ndepth_Push_Node: 
+    "ndepth (Push_Node (Inr (Suc i)) n) = Suc(ndepth(n))"
+apply (insert Rep_Node [of n, unfolded Node_def])
+apply (auto simp add: ndepth_def Push_Node_def
+                 Rep_Node [THEN Node_Push_I, THEN Abs_Node_inverse])
+apply (rule Least_equality)
+apply (auto simp add: Push_def ndepth_Push_Node_aux)
+apply (erule LeastI)
+done
+
+
+(*** ntrunc applied to the various node sets ***)
+
+lemma ntrunc_0 [simp]: "ntrunc 0 M = {}"
+by (simp add: ntrunc_def)
+
+lemma ntrunc_Atom [simp]: "ntrunc (Suc k) (Atom a) = Atom(a)"
+by (auto simp add: Atom_def ntrunc_def ndepth_K0)
+
+lemma ntrunc_Leaf [simp]: "ntrunc (Suc k) (Leaf a) = Leaf(a)"
+unfolding Leaf_def o_def by (rule ntrunc_Atom)
+
+lemma ntrunc_Numb [simp]: "ntrunc (Suc k) (Numb i) = Numb(i)"
+unfolding Numb_def o_def by (rule ntrunc_Atom)
+
+lemma ntrunc_Scons [simp]: 
+    "ntrunc (Suc k) (Scons M N) = Scons (ntrunc k M) (ntrunc k N)"
+unfolding Scons_def ntrunc_def One_nat_def
+by (auto simp add: ndepth_Push_Node)
+
+
+
+(** Injection nodes **)
+
+lemma ntrunc_one_In0 [simp]: "ntrunc (Suc 0) (In0 M) = {}"
+apply (simp add: In0_def)
+apply (simp add: Scons_def)
+done
+
+lemma ntrunc_In0 [simp]: "ntrunc (Suc(Suc k)) (In0 M) = In0 (ntrunc (Suc k) M)"
+by (simp add: In0_def)
+
+lemma ntrunc_one_In1 [simp]: "ntrunc (Suc 0) (In1 M) = {}"
+apply (simp add: In1_def)
+apply (simp add: Scons_def)
+done
+
+lemma ntrunc_In1 [simp]: "ntrunc (Suc(Suc k)) (In1 M) = In1 (ntrunc (Suc k) M)"
+by (simp add: In1_def)
+
+
+subsection{*Set Constructions*}
+
+
+(*** Cartesian Product ***)
+
+lemma uprodI [intro!]: "[| M:A;  N:B |] ==> Scons M N : uprod A B"
+by (simp add: uprod_def)
+
+(*The general elimination rule*)
+lemma uprodE [elim!]:
+    "[| c : uprod A B;   
+        !!x y. [| x:A;  y:B;  c = Scons x y |] ==> P  
+     |] ==> P"
+by (auto simp add: uprod_def) 
+
+
+(*Elimination of a pair -- introduces no eigenvariables*)
+lemma uprodE2: "[| Scons M N : uprod A B;  [| M:A;  N:B |] ==> P |] ==> P"
+by (auto simp add: uprod_def)
+
+
+(*** Disjoint Sum ***)
+
+lemma usum_In0I [intro]: "M:A ==> In0(M) : usum A B"
+by (simp add: usum_def)
+
+lemma usum_In1I [intro]: "N:B ==> In1(N) : usum A B"
+by (simp add: usum_def)
+
+lemma usumE [elim!]: 
+    "[| u : usum A B;   
+        !!x. [| x:A;  u=In0(x) |] ==> P;  
+        !!y. [| y:B;  u=In1(y) |] ==> P  
+     |] ==> P"
+by (auto simp add: usum_def)
+
+
+(** Injection **)
+
+lemma In0_not_In1 [iff]: "In0(M) \<noteq> In1(N)"
+unfolding In0_def In1_def One_nat_def by auto
+
+lemmas In1_not_In0 [iff] = In0_not_In1 [THEN not_sym]
+
+lemma In0_inject: "In0(M) = In0(N) ==>  M=N"
+by (simp add: In0_def)
+
+lemma In1_inject: "In1(M) = In1(N) ==>  M=N"
+by (simp add: In1_def)
+
+lemma In0_eq [iff]: "(In0 M = In0 N) = (M=N)"
+by (blast dest!: In0_inject)
+
+lemma In1_eq [iff]: "(In1 M = In1 N) = (M=N)"
+by (blast dest!: In1_inject)
+
+lemma inj_In0: "inj In0"
+by (blast intro!: inj_onI)
+
+lemma inj_In1: "inj In1"
+by (blast intro!: inj_onI)
+
+
+(*** Function spaces ***)
+
+lemma Lim_inject: "Lim f = Lim g ==> f = g"
+apply (simp add: Lim_def)
+apply (rule ext)
+apply (blast elim!: Push_Node_inject)
+done
+
+
+(*** proving equality of sets and functions using ntrunc ***)
+
+lemma ntrunc_subsetI: "ntrunc k M <= M"
+by (auto simp add: ntrunc_def)
+
+lemma ntrunc_subsetD: "(!!k. ntrunc k M <= N) ==> M<=N"
+by (auto simp add: ntrunc_def)
+
+(*A generalized form of the take-lemma*)
+lemma ntrunc_equality: "(!!k. ntrunc k M = ntrunc k N) ==> M=N"
+apply (rule equalityI)
+apply (rule_tac [!] ntrunc_subsetD)
+apply (rule_tac [!] ntrunc_subsetI [THEN [2] subset_trans], auto) 
+done
+
+lemma ntrunc_o_equality: 
+    "[| !!k. (ntrunc(k) o h1) = (ntrunc(k) o h2) |] ==> h1=h2"
+apply (rule ntrunc_equality [THEN ext])
+apply (simp add: fun_eq_iff) 
+done
+
+
+(*** Monotonicity ***)
+
+lemma uprod_mono: "[| A<=A';  B<=B' |] ==> uprod A B <= uprod A' B'"
+by (simp add: uprod_def, blast)
+
+lemma usum_mono: "[| A<=A';  B<=B' |] ==> usum A B <= usum A' B'"
+by (simp add: usum_def, blast)
+
+lemma Scons_mono: "[| M<=M';  N<=N' |] ==> Scons M N <= Scons M' N'"
+by (simp add: Scons_def, blast)
+
+lemma In0_mono: "M<=N ==> In0(M) <= In0(N)"
+by (simp add: In0_def Scons_mono)
+
+lemma In1_mono: "M<=N ==> In1(M) <= In1(N)"
+by (simp add: In1_def Scons_mono)
+
+
+(*** Split and Case ***)
+
+lemma Split [simp]: "Split c (Scons M N) = c M N"
+by (simp add: Split_def)
+
+lemma Case_In0 [simp]: "Case c d (In0 M) = c(M)"
+by (simp add: Case_def)
+
+lemma Case_In1 [simp]: "Case c d (In1 N) = d(N)"
+by (simp add: Case_def)
+
+
+
+(**** UN x. B(x) rules ****)
+
+lemma ntrunc_UN1: "ntrunc k (UN x. f(x)) = (UN x. ntrunc k (f x))"
+by (simp add: ntrunc_def, blast)
+
+lemma Scons_UN1_x: "Scons (UN x. f x) M = (UN x. Scons (f x) M)"
+by (simp add: Scons_def, blast)
+
+lemma Scons_UN1_y: "Scons M (UN x. f x) = (UN x. Scons M (f x))"
+by (simp add: Scons_def, blast)
+
+lemma In0_UN1: "In0(UN x. f(x)) = (UN x. In0(f(x)))"
+by (simp add: In0_def Scons_UN1_y)
+
+lemma In1_UN1: "In1(UN x. f(x)) = (UN x. In1(f(x)))"
+by (simp add: In1_def Scons_UN1_y)
+
+
+(*** Equality for Cartesian Product ***)
+
+lemma dprodI [intro!]: 
+    "[| (M,M'):r;  (N,N'):s |] ==> (Scons M N, Scons M' N') : dprod r s"
+by (auto simp add: dprod_def)
+
+(*The general elimination rule*)
+lemma dprodE [elim!]: 
+    "[| c : dprod r s;   
+        !!x y x' y'. [| (x,x') : r;  (y,y') : s;  
+                        c = (Scons x y, Scons x' y') |] ==> P  
+     |] ==> P"
+by (auto simp add: dprod_def)
+
+
+(*** Equality for Disjoint Sum ***)
+
+lemma dsum_In0I [intro]: "(M,M'):r ==> (In0(M), In0(M')) : dsum r s"
+by (auto simp add: dsum_def)
+
+lemma dsum_In1I [intro]: "(N,N'):s ==> (In1(N), In1(N')) : dsum r s"
+by (auto simp add: dsum_def)
+
+lemma dsumE [elim!]: 
+    "[| w : dsum r s;   
+        !!x x'. [| (x,x') : r;  w = (In0(x), In0(x')) |] ==> P;  
+        !!y y'. [| (y,y') : s;  w = (In1(y), In1(y')) |] ==> P  
+     |] ==> P"
+by (auto simp add: dsum_def)
+
+
+(*** Monotonicity ***)
+
+lemma dprod_mono: "[| r<=r';  s<=s' |] ==> dprod r s <= dprod r' s'"
+by blast
+
+lemma dsum_mono: "[| r<=r';  s<=s' |] ==> dsum r s <= dsum r' s'"
+by blast
+
+
+(*** Bounding theorems ***)
+
+lemma dprod_Sigma: "(dprod (A <*> B) (C <*> D)) <= (uprod A C) <*> (uprod B D)"
+by blast
+
+lemmas dprod_subset_Sigma = subset_trans [OF dprod_mono dprod_Sigma]
+
+(*Dependent version*)
+lemma dprod_subset_Sigma2:
+    "(dprod (Sigma A B) (Sigma C D)) <= Sigma (uprod A C) (Split (%x y. uprod (B x) (D y)))"
+by auto
+
+lemma dsum_Sigma: "(dsum (A <*> B) (C <*> D)) <= (usum A C) <*> (usum B D)"
+by blast
+
+lemmas dsum_subset_Sigma = subset_trans [OF dsum_mono dsum_Sigma]
+
+
+(*** Domain theorems ***)
+
+lemma Domain_dprod [simp]: "Domain (dprod r s) = uprod (Domain r) (Domain s)"
+  by auto
+
+lemma Domain_dsum [simp]: "Domain (dsum r s) = usum (Domain r) (Domain s)"
+  by auto
+
+
+text {* hides popular names *}
+hide_type (open) node item
+hide_const (open) Push Node Atom Leaf Numb Lim Split Case
+
+ML_file "~~/src/HOL/Tools/Old_Datatype/old_datatype.ML"
+ML_file "~~/src/HOL/Tools/inductive_realizer.ML"
+
+end
--- a/src/HOL/Library/Phantom_Type.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Library/Phantom_Type.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -8,13 +8,7 @@
 imports Main
 begin
 
-datatype ('a, 'b) phantom = phantom 'b
-
-primrec of_phantom :: "('a, 'b) phantom \<Rightarrow> 'b" 
-where "of_phantom (phantom x) = x"
-
-lemma of_phantom_phantom [simp]: "phantom (of_phantom x) = x"
-by(cases x) simp
+datatype ('a, 'b) phantom = phantom (of_phantom: 'b)
 
 lemma type_definition_phantom': "type_definition of_phantom phantom UNIV"
 by(unfold_locales) simp_all
--- a/src/HOL/Library/RBT_Set.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Library/RBT_Set.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -88,6 +88,9 @@
   "setsum = setsum" ..
 
 lemma [code, code del]:
+  "setprod = setprod" ..
+
+lemma [code, code del]:
   "Product_Type.product = Product_Type.product"  ..
 
 lemma [code, code del]:
--- a/src/HOL/Main.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Main.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -2,7 +2,7 @@
 
 theory Main
 imports Predicate_Compile Quickcheck_Narrowing Extraction Lifting_Sum Coinduction Nitpick
-  Basic_BNF_Least_Fixpoints BNF_Greatest_Fixpoint Old_Datatype
+  BNF_Greatest_Fixpoint
 begin
 
 text {*
--- a/src/HOL/Nat.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Nat.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -1185,7 +1185,7 @@
   by (fact Let_def)
 
 
-subsubsection {* Monotonicity of Multiplication *}
+subsubsection {* Monotonicity of multiplication *}
 
 lemma mult_le_mono1: "i \<le> (j::nat) ==> i * k \<le> j * k"
 by (simp add: mult_right_mono)
@@ -1390,7 +1390,7 @@
 qed
 
 
-subsection {* Embedding of the Naturals into any @{text semiring_1}: @{term of_nat} *}
+subsection {* Embedding of the naturals into any @{text semiring_1}: @{term of_nat} *}
 
 context semiring_1
 begin
@@ -1512,7 +1512,7 @@
   by (auto simp add: fun_eq_iff)
 
 
-subsection {* The Set of Natural Numbers *}
+subsection {* The set of natural numbers *}
 
 context semiring_1
 begin
@@ -1567,7 +1567,7 @@
 end
 
 
-subsection {* Further Arithmetic Facts Concerning the Natural Numbers *}
+subsection {* Further arithmetic facts concerning the natural numbers *}
 
 lemma subst_equals:
   assumes 1: "t = s" and 2: "u = t"
@@ -1825,6 +1825,7 @@
   "i \<le> j \<Longrightarrow> P i \<Longrightarrow> (\<And>n. i \<le> n \<Longrightarrow> n < j \<Longrightarrow> P n \<Longrightarrow> P (Suc n)) \<Longrightarrow> P j"
   by (induct j arbitrary: i) (auto simp: le_Suc_eq)
  
+
 subsection {* The divides relation on @{typ nat} *}
 
 lemma dvd_1_left [iff]: "Suc 0 dvd k"
@@ -1962,7 +1963,7 @@
 qed
 
 
-subsection {* aliases *}
+subsection {* Aliases *}
 
 lemma nat_mult_1: "(1::nat) * n = n"
   by (rule mult_1_left)
@@ -1971,13 +1972,23 @@
   by (rule mult_1_right)
 
 
-subsection {* size of a datatype value *}
+subsection {* Size of a datatype value *}
 
 class size =
   fixes size :: "'a \<Rightarrow> nat" -- {* see further theory @{text Wellfounded} *}
 
-
-subsection {* code module namespace *}
+instantiation nat :: size
+begin
+
+definition size_nat where
+  [simp, code]: "size (n \<Colon> nat) = n"
+
+instance ..
+
+end
+
+
+subsection {* Code module namespace *}
 
 code_identifier
   code_module Nat \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
--- a/src/HOL/Nominal/Nominal.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Nominal/Nominal.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -1,5 +1,5 @@
 theory Nominal 
-imports Main "~~/src/HOL/Library/Infinite_Set"
+imports "~~/src/HOL/Library/Infinite_Set" "~~/src/HOL/Library/Old_Datatype"
 keywords
   "atom_decl" "nominal_datatype" "equivariance" :: thy_decl and
   "nominal_primrec" "nominal_inductive" "nominal_inductive2" :: thy_goal and
--- a/src/HOL/Nominal/nominal_datatype.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Nominal/nominal_datatype.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -752,7 +752,7 @@
                   Old_Datatype_Aux.DtRec k => if k < length new_type_names then
                       Const (nth rep_names k, Old_Datatype_Aux.typ_of_dtyp descr'' dt -->
                         Old_Datatype_Aux.typ_of_dtyp descr dt) $ x
-                    else error "nested recursion not (yet) supported"
+                    else error "nested recursion not supported"
                 | _ => x) :: r_args)
           end
 
--- a/src/HOL/Old_Datatype.thy	Thu Sep 18 15:23:23 2014 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,529 +0,0 @@
-(*  Title:      HOL/Old_Datatype.thy
-    Author:     Lawrence C Paulson, Cambridge University Computer Laboratory
-    Author:     Stefan Berghofer and Markus Wenzel, TU Muenchen
-*)
-
-header {* Old Datatype package: constructing datatypes from Cartesian Products and Disjoint Sums *}
-
-theory Old_Datatype
-imports Power
-keywords "old_datatype" :: thy_decl
-begin
-
-subsection {* The datatype universe *}
-
-definition "Node = {p. EX f x k. p = (f :: nat => 'b + nat, x ::'a + nat) & f k = Inr 0}"
-
-typedef ('a, 'b) node = "Node :: ((nat => 'b + nat) * ('a + nat)) set"
-  morphisms Rep_Node Abs_Node
-  unfolding Node_def by auto
-
-text{*Datatypes will be represented by sets of type @{text node}*}
-
-type_synonym 'a item        = "('a, unit) node set"
-type_synonym ('a, 'b) dtree = "('a, 'b) node set"
-
-consts
-  Push      :: "[('b + nat), nat => ('b + nat)] => (nat => ('b + nat))"
-
-  Push_Node :: "[('b + nat), ('a, 'b) node] => ('a, 'b) node"
-  ndepth    :: "('a, 'b) node => nat"
-
-  Atom      :: "('a + nat) => ('a, 'b) dtree"
-  Leaf      :: "'a => ('a, 'b) dtree"
-  Numb      :: "nat => ('a, 'b) dtree"
-  Scons     :: "[('a, 'b) dtree, ('a, 'b) dtree] => ('a, 'b) dtree"
-  In0       :: "('a, 'b) dtree => ('a, 'b) dtree"
-  In1       :: "('a, 'b) dtree => ('a, 'b) dtree"
-  Lim       :: "('b => ('a, 'b) dtree) => ('a, 'b) dtree"
-
-  ntrunc    :: "[nat, ('a, 'b) dtree] => ('a, 'b) dtree"
-
-  uprod     :: "[('a, 'b) dtree set, ('a, 'b) dtree set]=> ('a, 'b) dtree set"
-  usum      :: "[('a, 'b) dtree set, ('a, 'b) dtree set]=> ('a, 'b) dtree set"
-
-  Split     :: "[[('a, 'b) dtree, ('a, 'b) dtree]=>'c, ('a, 'b) dtree] => 'c"
-  Case      :: "[[('a, 'b) dtree]=>'c, [('a, 'b) dtree]=>'c, ('a, 'b) dtree] => 'c"
-
-  dprod     :: "[(('a, 'b) dtree * ('a, 'b) dtree)set, (('a, 'b) dtree * ('a, 'b) dtree)set]
-                => (('a, 'b) dtree * ('a, 'b) dtree)set"
-  dsum      :: "[(('a, 'b) dtree * ('a, 'b) dtree)set, (('a, 'b) dtree * ('a, 'b) dtree)set]
-                => (('a, 'b) dtree * ('a, 'b) dtree)set"
-
-
-defs
-
-  Push_Node_def:  "Push_Node == (%n x. Abs_Node (apfst (Push n) (Rep_Node x)))"
-
-  (*crude "lists" of nats -- needed for the constructions*)
-  Push_def:   "Push == (%b h. case_nat b h)"
-
-  (** operations on S-expressions -- sets of nodes **)
-
-  (*S-expression constructors*)
-  Atom_def:   "Atom == (%x. {Abs_Node((%k. Inr 0, x))})"
-  Scons_def:  "Scons M N == (Push_Node (Inr 1) ` M) Un (Push_Node (Inr (Suc 1)) ` N)"
-
-  (*Leaf nodes, with arbitrary or nat labels*)
-  Leaf_def:   "Leaf == Atom o Inl"
-  Numb_def:   "Numb == Atom o Inr"
-
-  (*Injections of the "disjoint sum"*)
-  In0_def:    "In0(M) == Scons (Numb 0) M"
-  In1_def:    "In1(M) == Scons (Numb 1) M"
-
-  (*Function spaces*)
-  Lim_def: "Lim f == Union {z. ? x. z = Push_Node (Inl x) ` (f x)}"
-
-  (*the set of nodes with depth less than k*)
-  ndepth_def: "ndepth(n) == (%(f,x). LEAST k. f k = Inr 0) (Rep_Node n)"
-  ntrunc_def: "ntrunc k N == {n. n:N & ndepth(n)<k}"
-
-  (*products and sums for the "universe"*)
-  uprod_def:  "uprod A B == UN x:A. UN y:B. { Scons x y }"
-  usum_def:   "usum A B == In0`A Un In1`B"
-
-  (*the corresponding eliminators*)
-  Split_def:  "Split c M == THE u. EX x y. M = Scons x y & u = c x y"
-
-  Case_def:   "Case c d M == THE u.  (EX x . M = In0(x) & u = c(x))
-                                  | (EX y . M = In1(y) & u = d(y))"
-
-
-  (** equality for the "universe" **)
-
-  dprod_def:  "dprod r s == UN (x,x'):r. UN (y,y'):s. {(Scons x y, Scons x' y')}"
-
-  dsum_def:   "dsum r s == (UN (x,x'):r. {(In0(x),In0(x'))}) Un
-                          (UN (y,y'):s. {(In1(y),In1(y'))})"
-
-
-
-lemma apfst_convE: 
-    "[| q = apfst f p;  !!x y. [| p = (x,y);  q = (f(x),y) |] ==> R  
-     |] ==> R"
-by (force simp add: apfst_def)
-
-(** Push -- an injection, analogous to Cons on lists **)
-
-lemma Push_inject1: "Push i f = Push j g  ==> i=j"
-apply (simp add: Push_def fun_eq_iff) 
-apply (drule_tac x=0 in spec, simp) 
-done
-
-lemma Push_inject2: "Push i f = Push j g  ==> f=g"
-apply (auto simp add: Push_def fun_eq_iff) 
-apply (drule_tac x="Suc x" in spec, simp) 
-done
-
-lemma Push_inject:
-    "[| Push i f =Push j g;  [| i=j;  f=g |] ==> P |] ==> P"
-by (blast dest: Push_inject1 Push_inject2) 
-
-lemma Push_neq_K0: "Push (Inr (Suc k)) f = (%z. Inr 0) ==> P"
-by (auto simp add: Push_def fun_eq_iff split: nat.split_asm)
-
-lemmas Abs_Node_inj = Abs_Node_inject [THEN [2] rev_iffD1]
-
-
-(*** Introduction rules for Node ***)
-
-lemma Node_K0_I: "(%k. Inr 0, a) : Node"
-by (simp add: Node_def)
-
-lemma Node_Push_I: "p: Node ==> apfst (Push i) p : Node"
-apply (simp add: Node_def Push_def) 
-apply (fast intro!: apfst_conv nat.case(2)[THEN trans])
-done
-
-
-subsection{*Freeness: Distinctness of Constructors*}
-
-(** Scons vs Atom **)
-
-lemma Scons_not_Atom [iff]: "Scons M N \<noteq> Atom(a)"
-unfolding Atom_def Scons_def Push_Node_def One_nat_def
-by (blast intro: Node_K0_I Rep_Node [THEN Node_Push_I] 
-         dest!: Abs_Node_inj 
-         elim!: apfst_convE sym [THEN Push_neq_K0])  
-
-lemmas Atom_not_Scons [iff] = Scons_not_Atom [THEN not_sym]
-
-
-(*** Injectiveness ***)
-
-(** Atomic nodes **)
-
-lemma inj_Atom: "inj(Atom)"
-apply (simp add: Atom_def)
-apply (blast intro!: inj_onI Node_K0_I dest!: Abs_Node_inj)
-done
-lemmas Atom_inject = inj_Atom [THEN injD]
-
-lemma Atom_Atom_eq [iff]: "(Atom(a)=Atom(b)) = (a=b)"
-by (blast dest!: Atom_inject)
-
-lemma inj_Leaf: "inj(Leaf)"
-apply (simp add: Leaf_def o_def)
-apply (rule inj_onI)
-apply (erule Atom_inject [THEN Inl_inject])
-done
-
-lemmas Leaf_inject [dest!] = inj_Leaf [THEN injD]
-
-lemma inj_Numb: "inj(Numb)"
-apply (simp add: Numb_def o_def)
-apply (rule inj_onI)
-apply (erule Atom_inject [THEN Inr_inject])
-done
-
-lemmas Numb_inject [dest!] = inj_Numb [THEN injD]
-
-
-(** Injectiveness of Push_Node **)
-
-lemma Push_Node_inject:
-    "[| Push_Node i m =Push_Node j n;  [| i=j;  m=n |] ==> P  
-     |] ==> P"
-apply (simp add: Push_Node_def)
-apply (erule Abs_Node_inj [THEN apfst_convE])
-apply (rule Rep_Node [THEN Node_Push_I])+
-apply (erule sym [THEN apfst_convE]) 
-apply (blast intro: Rep_Node_inject [THEN iffD1] trans sym elim!: Push_inject)
-done
-
-
-(** Injectiveness of Scons **)
-
-lemma Scons_inject_lemma1: "Scons M N <= Scons M' N' ==> M<=M'"
-unfolding Scons_def One_nat_def
-by (blast dest!: Push_Node_inject)
-
-lemma Scons_inject_lemma2: "Scons M N <= Scons M' N' ==> N<=N'"
-unfolding Scons_def One_nat_def
-by (blast dest!: Push_Node_inject)
-
-lemma Scons_inject1: "Scons M N = Scons M' N' ==> M=M'"
-apply (erule equalityE)
-apply (iprover intro: equalityI Scons_inject_lemma1)
-done
-
-lemma Scons_inject2: "Scons M N = Scons M' N' ==> N=N'"
-apply (erule equalityE)
-apply (iprover intro: equalityI Scons_inject_lemma2)
-done
-
-lemma Scons_inject:
-    "[| Scons M N = Scons M' N';  [| M=M';  N=N' |] ==> P |] ==> P"
-by (iprover dest: Scons_inject1 Scons_inject2)
-
-lemma Scons_Scons_eq [iff]: "(Scons M N = Scons M' N') = (M=M' & N=N')"
-by (blast elim!: Scons_inject)
-
-(*** Distinctness involving Leaf and Numb ***)
-
-(** Scons vs Leaf **)
-
-lemma Scons_not_Leaf [iff]: "Scons M N \<noteq> Leaf(a)"
-unfolding Leaf_def o_def by (rule Scons_not_Atom)
-
-lemmas Leaf_not_Scons  [iff] = Scons_not_Leaf [THEN not_sym]
-
-(** Scons vs Numb **)
-
-lemma Scons_not_Numb [iff]: "Scons M N \<noteq> Numb(k)"
-unfolding Numb_def o_def by (rule Scons_not_Atom)
-
-lemmas Numb_not_Scons [iff] = Scons_not_Numb [THEN not_sym]
-
-
-(** Leaf vs Numb **)
-
-lemma Leaf_not_Numb [iff]: "Leaf(a) \<noteq> Numb(k)"
-by (simp add: Leaf_def Numb_def)
-
-lemmas Numb_not_Leaf [iff] = Leaf_not_Numb [THEN not_sym]
-
-
-(*** ndepth -- the depth of a node ***)
-
-lemma ndepth_K0: "ndepth (Abs_Node(%k. Inr 0, x)) = 0"
-by (simp add: ndepth_def  Node_K0_I [THEN Abs_Node_inverse] Least_equality)
-
-lemma ndepth_Push_Node_aux:
-     "case_nat (Inr (Suc i)) f k = Inr 0 --> Suc(LEAST x. f x = Inr 0) <= k"
-apply (induct_tac "k", auto)
-apply (erule Least_le)
-done
-
-lemma ndepth_Push_Node: 
-    "ndepth (Push_Node (Inr (Suc i)) n) = Suc(ndepth(n))"
-apply (insert Rep_Node [of n, unfolded Node_def])
-apply (auto simp add: ndepth_def Push_Node_def
-                 Rep_Node [THEN Node_Push_I, THEN Abs_Node_inverse])
-apply (rule Least_equality)
-apply (auto simp add: Push_def ndepth_Push_Node_aux)
-apply (erule LeastI)
-done
-
-
-(*** ntrunc applied to the various node sets ***)
-
-lemma ntrunc_0 [simp]: "ntrunc 0 M = {}"
-by (simp add: ntrunc_def)
-
-lemma ntrunc_Atom [simp]: "ntrunc (Suc k) (Atom a) = Atom(a)"
-by (auto simp add: Atom_def ntrunc_def ndepth_K0)
-
-lemma ntrunc_Leaf [simp]: "ntrunc (Suc k) (Leaf a) = Leaf(a)"
-unfolding Leaf_def o_def by (rule ntrunc_Atom)
-
-lemma ntrunc_Numb [simp]: "ntrunc (Suc k) (Numb i) = Numb(i)"
-unfolding Numb_def o_def by (rule ntrunc_Atom)
-
-lemma ntrunc_Scons [simp]: 
-    "ntrunc (Suc k) (Scons M N) = Scons (ntrunc k M) (ntrunc k N)"
-unfolding Scons_def ntrunc_def One_nat_def
-by (auto simp add: ndepth_Push_Node)
-
-
-
-(** Injection nodes **)
-
-lemma ntrunc_one_In0 [simp]: "ntrunc (Suc 0) (In0 M) = {}"
-apply (simp add: In0_def)
-apply (simp add: Scons_def)
-done
-
-lemma ntrunc_In0 [simp]: "ntrunc (Suc(Suc k)) (In0 M) = In0 (ntrunc (Suc k) M)"
-by (simp add: In0_def)
-
-lemma ntrunc_one_In1 [simp]: "ntrunc (Suc 0) (In1 M) = {}"
-apply (simp add: In1_def)
-apply (simp add: Scons_def)
-done
-
-lemma ntrunc_In1 [simp]: "ntrunc (Suc(Suc k)) (In1 M) = In1 (ntrunc (Suc k) M)"
-by (simp add: In1_def)
-
-
-subsection{*Set Constructions*}
-
-
-(*** Cartesian Product ***)
-
-lemma uprodI [intro!]: "[| M:A;  N:B |] ==> Scons M N : uprod A B"
-by (simp add: uprod_def)
-
-(*The general elimination rule*)
-lemma uprodE [elim!]:
-    "[| c : uprod A B;   
-        !!x y. [| x:A;  y:B;  c = Scons x y |] ==> P  
-     |] ==> P"
-by (auto simp add: uprod_def) 
-
-
-(*Elimination of a pair -- introduces no eigenvariables*)
-lemma uprodE2: "[| Scons M N : uprod A B;  [| M:A;  N:B |] ==> P |] ==> P"
-by (auto simp add: uprod_def)
-
-
-(*** Disjoint Sum ***)
-
-lemma usum_In0I [intro]: "M:A ==> In0(M) : usum A B"
-by (simp add: usum_def)
-
-lemma usum_In1I [intro]: "N:B ==> In1(N) : usum A B"
-by (simp add: usum_def)
-
-lemma usumE [elim!]: 
-    "[| u : usum A B;   
-        !!x. [| x:A;  u=In0(x) |] ==> P;  
-        !!y. [| y:B;  u=In1(y) |] ==> P  
-     |] ==> P"
-by (auto simp add: usum_def)
-
-
-(** Injection **)
-
-lemma In0_not_In1 [iff]: "In0(M) \<noteq> In1(N)"
-unfolding In0_def In1_def One_nat_def by auto
-
-lemmas In1_not_In0 [iff] = In0_not_In1 [THEN not_sym]
-
-lemma In0_inject: "In0(M) = In0(N) ==>  M=N"
-by (simp add: In0_def)
-
-lemma In1_inject: "In1(M) = In1(N) ==>  M=N"
-by (simp add: In1_def)
-
-lemma In0_eq [iff]: "(In0 M = In0 N) = (M=N)"
-by (blast dest!: In0_inject)
-
-lemma In1_eq [iff]: "(In1 M = In1 N) = (M=N)"
-by (blast dest!: In1_inject)
-
-lemma inj_In0: "inj In0"
-by (blast intro!: inj_onI)
-
-lemma inj_In1: "inj In1"
-by (blast intro!: inj_onI)
-
-
-(*** Function spaces ***)
-
-lemma Lim_inject: "Lim f = Lim g ==> f = g"
-apply (simp add: Lim_def)
-apply (rule ext)
-apply (blast elim!: Push_Node_inject)
-done
-
-
-(*** proving equality of sets and functions using ntrunc ***)
-
-lemma ntrunc_subsetI: "ntrunc k M <= M"
-by (auto simp add: ntrunc_def)
-
-lemma ntrunc_subsetD: "(!!k. ntrunc k M <= N) ==> M<=N"
-by (auto simp add: ntrunc_def)
-
-(*A generalized form of the take-lemma*)
-lemma ntrunc_equality: "(!!k. ntrunc k M = ntrunc k N) ==> M=N"
-apply (rule equalityI)
-apply (rule_tac [!] ntrunc_subsetD)
-apply (rule_tac [!] ntrunc_subsetI [THEN [2] subset_trans], auto) 
-done
-
-lemma ntrunc_o_equality: 
-    "[| !!k. (ntrunc(k) o h1) = (ntrunc(k) o h2) |] ==> h1=h2"
-apply (rule ntrunc_equality [THEN ext])
-apply (simp add: fun_eq_iff) 
-done
-
-
-(*** Monotonicity ***)
-
-lemma uprod_mono: "[| A<=A';  B<=B' |] ==> uprod A B <= uprod A' B'"
-by (simp add: uprod_def, blast)
-
-lemma usum_mono: "[| A<=A';  B<=B' |] ==> usum A B <= usum A' B'"
-by (simp add: usum_def, blast)
-
-lemma Scons_mono: "[| M<=M';  N<=N' |] ==> Scons M N <= Scons M' N'"
-by (simp add: Scons_def, blast)
-
-lemma In0_mono: "M<=N ==> In0(M) <= In0(N)"
-by (simp add: In0_def Scons_mono)
-
-lemma In1_mono: "M<=N ==> In1(M) <= In1(N)"
-by (simp add: In1_def Scons_mono)
-
-
-(*** Split and Case ***)
-
-lemma Split [simp]: "Split c (Scons M N) = c M N"
-by (simp add: Split_def)
-
-lemma Case_In0 [simp]: "Case c d (In0 M) = c(M)"
-by (simp add: Case_def)
-
-lemma Case_In1 [simp]: "Case c d (In1 N) = d(N)"
-by (simp add: Case_def)
-
-
-
-(**** UN x. B(x) rules ****)
-
-lemma ntrunc_UN1: "ntrunc k (UN x. f(x)) = (UN x. ntrunc k (f x))"
-by (simp add: ntrunc_def, blast)
-
-lemma Scons_UN1_x: "Scons (UN x. f x) M = (UN x. Scons (f x) M)"
-by (simp add: Scons_def, blast)
-
-lemma Scons_UN1_y: "Scons M (UN x. f x) = (UN x. Scons M (f x))"
-by (simp add: Scons_def, blast)
-
-lemma In0_UN1: "In0(UN x. f(x)) = (UN x. In0(f(x)))"
-by (simp add: In0_def Scons_UN1_y)
-
-lemma In1_UN1: "In1(UN x. f(x)) = (UN x. In1(f(x)))"
-by (simp add: In1_def Scons_UN1_y)
-
-
-(*** Equality for Cartesian Product ***)
-
-lemma dprodI [intro!]: 
-    "[| (M,M'):r;  (N,N'):s |] ==> (Scons M N, Scons M' N') : dprod r s"
-by (auto simp add: dprod_def)
-
-(*The general elimination rule*)
-lemma dprodE [elim!]: 
-    "[| c : dprod r s;   
-        !!x y x' y'. [| (x,x') : r;  (y,y') : s;  
-                        c = (Scons x y, Scons x' y') |] ==> P  
-     |] ==> P"
-by (auto simp add: dprod_def)
-
-
-(*** Equality for Disjoint Sum ***)
-
-lemma dsum_In0I [intro]: "(M,M'):r ==> (In0(M), In0(M')) : dsum r s"
-by (auto simp add: dsum_def)
-
-lemma dsum_In1I [intro]: "(N,N'):s ==> (In1(N), In1(N')) : dsum r s"
-by (auto simp add: dsum_def)
-
-lemma dsumE [elim!]: 
-    "[| w : dsum r s;   
-        !!x x'. [| (x,x') : r;  w = (In0(x), In0(x')) |] ==> P;  
-        !!y y'. [| (y,y') : s;  w = (In1(y), In1(y')) |] ==> P  
-     |] ==> P"
-by (auto simp add: dsum_def)
-
-
-(*** Monotonicity ***)
-
-lemma dprod_mono: "[| r<=r';  s<=s' |] ==> dprod r s <= dprod r' s'"
-by blast
-
-lemma dsum_mono: "[| r<=r';  s<=s' |] ==> dsum r s <= dsum r' s'"
-by blast
-
-
-(*** Bounding theorems ***)
-
-lemma dprod_Sigma: "(dprod (A <*> B) (C <*> D)) <= (uprod A C) <*> (uprod B D)"
-by blast
-
-lemmas dprod_subset_Sigma = subset_trans [OF dprod_mono dprod_Sigma]
-
-(*Dependent version*)
-lemma dprod_subset_Sigma2:
-    "(dprod (Sigma A B) (Sigma C D)) <= Sigma (uprod A C) (Split (%x y. uprod (B x) (D y)))"
-by auto
-
-lemma dsum_Sigma: "(dsum (A <*> B) (C <*> D)) <= (usum A C) <*> (usum B D)"
-by blast
-
-lemmas dsum_subset_Sigma = subset_trans [OF dsum_mono dsum_Sigma]
-
-
-(*** Domain theorems ***)
-
-lemma Domain_dprod [simp]: "Domain (dprod r s) = uprod (Domain r) (Domain s)"
-  by auto
-
-lemma Domain_dsum [simp]: "Domain (dsum r s) = usum (Domain r) (Domain s)"
-  by auto
-
-
-text {* hides popular names *}
-hide_type (open) node item
-hide_const (open) Push Node Atom Leaf Numb Lim Split Case
-
-ML_file "Tools/Old_Datatype/old_datatype.ML"
-
-ML_file "Tools/inductive_realizer.ML"
-setup InductiveRealizer.setup
-
-end
--- a/src/HOL/Proofs/Extraction/Higman.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Extraction/Higman.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -6,7 +6,7 @@
 header {* Higman's lemma *}
 
 theory Higman
-imports Main
+imports Old_Datatype
 begin
 
 text {*
--- a/src/HOL/Proofs/Extraction/Util.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Extraction/Util.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -5,7 +5,7 @@
 header {* Auxiliary lemmas used in program extraction examples *}
 
 theory Util
-imports Main
+imports Old_Datatype
 begin
 
 text {*
--- a/src/HOL/Proofs/Extraction/Warshall.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Extraction/Warshall.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -5,7 +5,7 @@
 header {* Warshall's algorithm *}
 
 theory Warshall
-imports Main
+imports Old_Datatype
 begin
 
 text {*
--- a/src/HOL/Proofs/Lambda/Commutation.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Lambda/Commutation.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -5,7 +5,9 @@
 
 header {* Abstract commutation and confluence notions *}
 
-theory Commutation imports Main begin
+theory Commutation
+imports Main
+begin
 
 declare [[syntax_ambiguity_warning = false]]
 
--- a/src/HOL/Proofs/Lambda/Lambda.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Lambda/Lambda.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -5,7 +5,9 @@
 
 header {* Basic definitions of Lambda-calculus *}
 
-theory Lambda imports Main begin
+theory Lambda
+imports Main
+begin
 
 declare [[syntax_ambiguity_warning = false]]
 
--- a/src/HOL/Proofs/Lambda/ListOrder.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Lambda/ListOrder.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -5,7 +5,9 @@
 
 header {* Lifting an order to lists of elements *}
 
-theory ListOrder imports Main begin
+theory ListOrder
+imports Main
+begin
 
 declare [[syntax_ambiguity_warning = false]]
 
--- a/src/HOL/Proofs/Lambda/WeakNorm.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/Lambda/WeakNorm.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -6,7 +6,8 @@
 header {* Weak normalization for simply-typed lambda calculus *}
 
 theory WeakNorm
-imports LambdaType NormalForm "~~/src/HOL/Library/Code_Target_Int"
+imports LambdaType NormalForm "~~/src/HOL/Library/Old_Datatype"
+  "~~/src/HOL/Library/Code_Target_Int"
 begin
 
 text {*
--- a/src/HOL/Proofs/ex/XML_Data.thy	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Proofs/ex/XML_Data.thy	Fri Sep 19 08:26:03 2014 +0200
@@ -63,4 +63,3 @@
 *}
 
 end
-
--- a/src/HOL/ROOT	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/ROOT	Fri Sep 19 08:26:03 2014 +0200
@@ -19,9 +19,9 @@
   description {*
     HOL-Main with explicit proof terms.
   *}
-  options [document = false]
+  options [timeout = 5400, document = false]
   theories Proofs (*sequential change of global flag!*)
-  theories Main
+  theories "~~/src/HOL/Library/Old_Datatype"
   files
     "Tools/Quickcheck/Narrowing_Engine.hs"
     "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
@@ -51,6 +51,7 @@
     RBT_Set
     (*legacy tools*)
     Refute
+    Old_Datatype
     Old_Recdef
     Old_SMT
   theories [condition = ISABELLE_FULL_TEST]
@@ -91,11 +92,13 @@
 
     PropLog proves the completeness of a formalization of propositional logic
     (see
-    HREF="http://www.cl.cam.ac.uk/Research/Reports/TR312-lcp-set-II.ps.gz).
+    http://www.cl.cam.ac.uk/Research/Reports/TR312-lcp-set-II.ps.gz).
 
     Exp demonstrates the use of iterated inductive definitions to reason about
     mutually recursive relations.
   *}
+  theories [document = false]
+    "~~/src/HOL/Library/Old_Datatype"
   theories [quick_and_dirty]
     Common_Patterns
   theories
@@ -741,6 +744,7 @@
   *}
   options [document = false]
   theories
+    "~~/src/HOL/Library/Old_Datatype"
     Compat
     Lambda_Term
     Process
--- a/src/HOL/Tools/BNF/bnf_fp_n2m.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/BNF/bnf_fp_n2m.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -182,12 +182,13 @@
     val castAs = map2 (curry HOLogic.mk_comp) absAs fp_repAs;
     val castBs = map2 (curry HOLogic.mk_comp) absBs fp_repBs;
 
-    val rel_eqs = no_refl (map rel_eq_of_bnf fp_or_nesting_bnfs);
+    val fp_or_nesting_rel_eqs = no_refl (map rel_eq_of_bnf fp_or_nesting_bnfs);
+    val fp_or_nesting_rel_monos = map rel_mono_of_bnf fp_or_nesting_bnfs;
 
     val rel_xtor_co_induct_thm =
       mk_rel_xtor_co_induct_thm fp (map3 cast castAs castBs pre_rels) pre_phis rels phis xs ys
         xtors xtor's (mk_rel_xtor_co_induct_tactic fp abs_inverses rel_xtor_co_inducts rel_defs
-          rel_monos rel_eqs)
+          rel_monos fp_or_nesting_rel_eqs fp_or_nesting_rel_monos)
         lthy;
 
     val map_id0s = no_refl (map map_id0_of_bnf bnfs);
@@ -209,7 +210,8 @@
           in
             cterm_instantiate_pos cts rel_xtor_co_induct_thm
             |> singleton (Proof_Context.export names_lthy lthy)
-            |> unfold_thms lthy (@{thms eq_le_Grp_id_iff all_simps(1,2)[symmetric]} @ rel_eqs)
+            |> unfold_thms lthy (@{thms eq_le_Grp_id_iff all_simps(1,2)[symmetric]} @
+                fp_or_nesting_rel_eqs)
             |> funpow n (fn thm => thm RS spec)
             |> unfold_thms lthy (@{thm eq_alt} :: map rel_Grp_of_bnf bnfs @ map_id0s)
             |> unfold_thms lthy (@{thms vimage2p_id vimage2p_comp comp_apply comp_id
@@ -224,7 +226,8 @@
             val cts = NONE :: map (SOME o certify lthy) (map HOLogic.eq_const As);
           in
             cterm_instantiate_pos cts rel_xtor_co_induct_thm
-            |> unfold_thms lthy (@{thms le_fun_def le_bool_def all_simps(1,2)[symmetric]} @ rel_eqs)
+            |> unfold_thms lthy (@{thms le_fun_def le_bool_def all_simps(1,2)[symmetric]} @
+                fp_or_nesting_rel_eqs)
             |> funpow (2 * n) (fn thm => thm RS spec)
             |> Conv.fconv_rule (Object_Logic.atomize lthy)
             |> funpow n (fn thm => thm RS mp)
--- a/src/HOL/Tools/BNF/bnf_fp_n2m_tactics.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/BNF/bnf_fp_n2m_tactics.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -8,7 +8,7 @@
 signature BNF_FP_N2M_TACTICS =
 sig
   val mk_rel_xtor_co_induct_tactic: BNF_Util.fp_kind -> thm list -> thm list -> thm list ->
-    thm list -> thm list -> {prems: thm list, context: Proof.context} -> tactic
+    thm list -> thm list -> thm list -> {prems: thm list, context: Proof.context} -> tactic
 end;
 
 structure BNF_FP_N2M_Tactics : BNF_FP_N2M_TACTICS =
@@ -20,11 +20,13 @@
 
 val vimage2p_unfolds = o_apply :: @{thms vimage2p_def};
 
-fun mk_rel_xtor_co_induct_tactic fp abs_inverses co_inducts0 rel_defs rel_monos nesting_rel_eqs
-  {context = ctxt, prems = raw_C_IHs} =
+fun mk_rel_xtor_co_induct_tactic fp abs_inverses co_inducts0 rel_defs rel_monos nesting_rel_eqs0
+  nesting_rel_monos0 {context = ctxt, prems = raw_C_IHs} =
   let
-    val co_inducts = map (unfold_thms ctxt
-      (vimage2p_unfolds @ @{thms prod.rel_eq sum.rel_eq} @ nesting_rel_eqs)) co_inducts0;
+    val nesting_rel_eqs = @{thms prod.rel_eq sum.rel_eq} @ nesting_rel_eqs0;
+    val nesting_rel_monos = map (fn thm => rotate_prems ~1 (thm RS @{thm predicate2D}))
+      (@{thms prod.rel_mono sum.rel_mono} @ nesting_rel_monos0);
+    val co_inducts = map (unfold_thms ctxt (vimage2p_unfolds @ nesting_rel_eqs)) co_inducts0;
     val unfolds = map (fn def =>
       unfold_thms ctxt (id_apply :: vimage2p_unfolds @ abs_inverses @ no_reflexive [def])) rel_defs;
     val folded_C_IHs = map (fn thm => thm RS @{thm spec2} RS mp) raw_C_IHs;
@@ -41,7 +43,9 @@
     HEADGOAL (CONJ_WRAP_GEN' (rtac @{thm context_conjI})
       (fn thm => rtac thm THEN_ALL_NEW (rotate_tac ~1 THEN'
          REPEAT_ALL_NEW (FIRST' [eresolve_tac C_IHs, eresolve_tac C_IH_monos,
-           rtac @{thm order_refl}, atac, resolve_tac co_inducts])))
+           SELECT_GOAL (unfold_thms_tac ctxt nesting_rel_eqs) THEN' rtac @{thm order_refl},
+           atac, resolve_tac co_inducts,
+           resolve_tac C_IH_monos THEN' REPEAT_ALL_NEW (eresolve_tac nesting_rel_monos)])))
     co_inducts)
   end;
 
--- a/src/HOL/Tools/BNF/bnf_lfp_basic_sugar.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/BNF/bnf_lfp_basic_sugar.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -15,7 +15,6 @@
 open BNF_FP_Rec_Sugar_Util
 open BNF_FP_Util
 open BNF_FP_Def_Sugar
-open BNF_LFP_Size
 
 fun trivial_absT_info_of fpT =
   {absT = fpT,
@@ -38,10 +37,10 @@
    dtors = [Const (@{const_name xtor}, fpT --> fpT)],
    xtor_co_recs = [Const (@{const_name ctor_rec}, (fpT --> C) --> (fpT --> C))],
    xtor_co_induct = @{thm xtor_induct},
-   dtor_ctors = [@{thm xtor_xtor}],
-   ctor_dtors = [@{thm xtor_xtor}],
-   ctor_injects = [@{thm xtor_inject}],
-   dtor_injects = [@{thm xtor_inject}],
+   dtor_ctors = @{thms xtor_xtor},
+   ctor_dtors = @{thms xtor_xtor},
+   ctor_injects = @{thms xtor_inject},
+   dtor_injects = @{thms xtor_inject},
    xtor_map_thms = [xtor_map],
    xtor_set_thmss = [xtor_sets],
    xtor_rel_thms = [xtor_rel],
@@ -80,10 +79,10 @@
      ctr_defs = @{thms Inl_def_alt Inr_def_alt},
      ctr_sugar = the_frozen_ctr_sugar_of ctxt fpT_name,
      co_rec = Const (@{const_name case_sum}, map (fn Ts => (Ts ---> C)) ctr_Tss ---> fpT --> C),
-     co_rec_def = @{thm case_sum_def},
+     co_rec_def = @{thm ctor_rec_def_alt[of "case_sum f1 f2" for f1 f2]},
      maps = @{thms map_sum.simps},
-     common_co_inducts = [@{thm sum.induct}],
-     co_inducts = [@{thm sum.induct}],
+     common_co_inducts = @{thms sum.induct},
+     co_inducts = @{thms sum.induct},
      co_rec_thms = @{thms sum.case},
      co_rec_discs = [],
      co_rec_selss = [],
@@ -118,22 +117,22 @@
      fp_nesting_bnfs = [],
      live_nesting_bnfs = [],
      ctrXs_Tss = [ctr_Ts],
-     ctr_defs = [@{thm Pair_def_alt}],
+     ctr_defs = @{thms Pair_def_alt},
      ctr_sugar = the_frozen_ctr_sugar_of ctxt fpT_name,
      co_rec = Const (@{const_name case_prod}, (ctr_Ts ---> C) --> fpT --> C),
-     co_rec_def = @{thm case_prod_def},
-     maps = [@{thm map_prod_simp}],
-     common_co_inducts = [@{thm prod.induct}],
-     co_inducts = [@{thm prod.induct}],
-     co_rec_thms = [@{thm prod.case}],
+     co_rec_def = @{thm ctor_rec_def_alt[of "case_prod f" for f]},
+     maps = @{thms map_prod_simp},
+     common_co_inducts = @{thms prod.induct},
+     co_inducts = @{thms prod.induct},
+     co_rec_thms = @{thms prod.case},
      co_rec_discs = [],
      co_rec_selss = [],
-     rel_injects = [@{thm rel_prod_apply}],
+     rel_injects = @{thms rel_prod_apply},
      rel_distincts = []}
   end;
 
 val _ = Theory.setup (map_local_theory (fn lthy =>
-  fold (BNF_FP_Def_Sugar.register_fp_sugars (fn s => s <> size_plugin) o single o (fn f => f lthy))
+  fold (BNF_FP_Def_Sugar.register_fp_sugars (K true) o single o (fn f => f lthy))
     [fp_sugar_of_sum, fp_sugar_of_prod] lthy));
 
 end;
--- a/src/HOL/Tools/BNF/bnf_lfp_size.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/BNF/bnf_lfp_size.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -67,8 +67,7 @@
 
 fun mk_rec_o_map_tac ctxt rec_def pre_map_defs live_nesting_map_ident0s abs_inverses
     ctor_rec_o_map =
-  unfold_thms_tac ctxt [rec_def] THEN
-  HEADGOAL (rtac (ctor_rec_o_map RS trans) THEN'
+  HEADGOAL (subst_tac @{context} (SOME [1, 2]) [rec_def] THEN' rtac (ctor_rec_o_map RS trans) THEN'
     CONVERSION Thm.eta_long_conversion THEN'
     asm_simp_tac (ss_only (pre_map_defs @
         distinct Thm.eq_thm_prop (live_nesting_map_ident0s @ abs_inverses) @ rec_o_map_simps)
--- a/src/HOL/Tools/BNF/bnf_tactics.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/BNF/bnf_tactics.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -50,15 +50,13 @@
 
 (*transforms f (g x) = h (k x) into f o g = h o k using first order matches for f, g, h, and k*)
 fun mk_pointfree ctxt thm = thm
-  |> Drule.zero_var_indexes
-  |> Thm.prop_of
-  |> Logic.unvarify_global
-  |> HOLogic.dest_Trueprop |> HOLogic.dest_eq
+  |> Thm.prop_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq
   |> pairself (dest_comb #> apsnd (dest_comb #> fst) #> HOLogic.mk_comp)
   |> mk_Trueprop_eq
   |> (fn goal => Goal.prove_sorry ctxt [] [] goal
-    (K (rtac ext 1 THEN rtac @{thm comp_apply_eq} 1 THEN rtac thm 1)))
-  |> Drule.export_without_context
+    (K (rtac @{thm ext} 1 THEN
+        unfold_thms_tac ctxt ([o_apply, unfold_thms ctxt [o_apply] (mk_sym thm)]) THEN
+        rtac refl 1)))
   |> Thm.close_derivation;
 
 
--- a/src/HOL/Tools/Function/old_size.ML	Thu Sep 18 15:23:23 2014 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,228 +0,0 @@
-(*  Title:      HOL/Tools/Function/old_size.ML
-    Author:     Stefan Berghofer, Florian Haftmann, TU Muenchen
-
-Size functions for old-style datatypes.
-*)
-
-structure Old_Size: sig end =
-struct
-
-fun plus (t1, t2) = Const (@{const_name Groups.plus},
-  HOLogic.natT --> HOLogic.natT --> HOLogic.natT) $ t1 $ t2;
-
-fun size_of_type f g h (T as Type (s, Ts)) =
-      (case f s of
-         SOME t => SOME t
-       | NONE => (case g s of
-           SOME size_name =>
-             SOME (list_comb (Const (size_name,
-               map (fn U => U --> HOLogic.natT) Ts @ [T] ---> HOLogic.natT),
-                 map (size_of_type' f g h) Ts))
-         | NONE => NONE))
-  | size_of_type _ _ h (TFree (s, _)) = h s
-and size_of_type' f g h T = (case size_of_type f g h T of
-      NONE => Abs ("x", T, HOLogic.zero)
-    | SOME t => t);
-
-fun is_poly thy (Old_Datatype_Aux.DtType (name, dts)) =
-      is_some (BNF_LFP_Size.size_of_global thy name) andalso exists (is_poly thy) dts
-  | is_poly _ _ = true;
-
-fun constrs_of thy name =
-  let
-    val {descr, index, ...} = Old_Datatype_Data.the_info thy name
-    val SOME (_, _, constrs) = AList.lookup op = descr index
-  in constrs end;
-
-val app = curry (list_comb o swap);
-
-fun prove_size_thms (info : Old_Datatype_Aux.info) new_type_names thy =
-  let
-    val {descr, rec_names, rec_rewrites, induct, ...} = info;
-    val l = length new_type_names;
-    val descr' = List.take (descr, l);
-    val tycos = map (#1 o snd) descr';
-  in
-    if forall (fn tyco => can (Sign.arity_sorts thy tyco) [HOLogic.class_size]) tycos then
-      (* nothing to do -- the "size" function is already defined *)
-      thy
-    else
-      let
-        val recTs = Old_Datatype_Aux.get_rec_types descr;
-        val (recTs1, recTs2) = chop l recTs;
-        val (_, (_, paramdts, _)) :: _ = descr;
-        val paramTs = map (Old_Datatype_Aux.typ_of_dtyp descr) paramdts;
-        val ((param_size_fs, param_size_fTs), f_names) = paramTs |>
-          map (fn T as TFree (s, _) =>
-            let
-              val name = "f" ^ unprefix "'" s;
-              val U = T --> HOLogic.natT
-            in
-              (((s, Free (name, U)), U), name)
-            end) |> split_list |>> split_list;
-        val param_size = AList.lookup op = param_size_fs;
-
-        val extra_rewrites = descr |> map (#1 o snd) |> distinct op = |>
-          map_filter (Option.map (fst o snd) o BNF_LFP_Size.size_of_global thy) |> flat;
-        val extra_size = Option.map fst o BNF_LFP_Size.size_of_global thy;
-
-        val (((size_names, size_fns), def_names), def_names') =
-          recTs1 |> map (fn T as Type (s, _) =>
-            let
-              val s' = "size_" ^ Long_Name.base_name s;
-              val s'' = Sign.full_bname thy s';
-            in
-              (s'',
-               (list_comb (Const (s'', param_size_fTs @ [T] ---> HOLogic.natT),
-                  map snd param_size_fs),
-                (s' ^ "_def", s' ^ "_overloaded_def")))
-            end) |> split_list ||>> split_list ||>> split_list;
-        val overloaded_size_fns = map HOLogic.size_const recTs1;
-
-        (* instantiation for primrec combinator *)
-        fun size_of_constr b size_ofp ((_, cargs), (_, cargs')) =
-          let
-            val Ts = map (Old_Datatype_Aux.typ_of_dtyp descr) cargs;
-            val k = length (filter Old_Datatype_Aux.is_rec_type cargs);
-            val (ts, _, _) = fold_rev (fn ((dt, dt'), T) => fn (us, i, j) =>
-              if Old_Datatype_Aux.is_rec_type dt then (Bound i :: us, i + 1, j + 1)
-              else
-                (if b andalso is_poly thy dt' then
-                   case size_of_type (K NONE) extra_size size_ofp T of
-                     NONE => us | SOME sz => sz $ Bound j :: us
-                 else us, i, j + 1))
-                  (cargs ~~ cargs' ~~ Ts) ([], 0, k);
-            val t =
-              if null ts andalso (not b orelse not (exists (is_poly thy) cargs'))
-              then HOLogic.zero
-              else foldl1 plus (ts @ [HOLogic.Suc_zero])
-          in
-            fold_rev (fn T => fn t' => Abs ("x", T, t')) (Ts @ replicate k HOLogic.natT) t
-          end;
-
-        val fs = maps (fn (_, (name, _, constrs)) =>
-          map (size_of_constr true param_size) (constrs ~~ constrs_of thy name)) descr;
-        val fs' = maps (fn (n, (name, _, constrs)) =>
-          map (size_of_constr (l <= n) (K NONE)) (constrs ~~ constrs_of thy name)) descr;
-        val fTs = map fastype_of fs;
-
-        val (rec_combs1, rec_combs2) = chop l (map (fn (T, rec_name) =>
-          Const (rec_name, fTs @ [T] ---> HOLogic.natT))
-            (recTs ~~ rec_names));
-
-        fun define_overloaded (def_name, eq) lthy =
-          let
-            val (Free (c, _), rhs) = (Logic.dest_equals o Syntax.check_term lthy) eq;
-            val (thm, lthy') = lthy
-              |> Local_Theory.define ((Binding.name c, NoSyn), ((Binding.name def_name, []), rhs))
-              |-> (fn (t, (_, thm)) => Spec_Rules.add Spec_Rules.Equational ([t], [thm]) #> pair thm);
-            val ctxt_thy = Proof_Context.init_global (Proof_Context.theory_of lthy');
-            val thm' = singleton (Proof_Context.export lthy' ctxt_thy) thm;
-          in (thm', lthy') end;
-
-        val ((size_def_thms, size_def_thms'), thy') =
-          thy
-          |> Sign.add_consts (map (fn (s, T) => (Binding.name (Long_Name.base_name s),
-              param_size_fTs @ [T] ---> HOLogic.natT, NoSyn))
-            (size_names ~~ recTs1))
-          |> Global_Theory.add_defs false
-            (map (Thm.no_attributes o apsnd (Logic.mk_equals o apsnd (app fs)))
-               (map Binding.name def_names ~~ (size_fns ~~ rec_combs1)))
-          ||> Class.instantiation (tycos, map dest_TFree paramTs, [HOLogic.class_size])
-          ||>> fold_map define_overloaded
-            (def_names' ~~ map Logic.mk_equals (overloaded_size_fns ~~ map (app fs') rec_combs1))
-          ||> Class.prove_instantiation_instance (K (Class.intro_classes_tac []))
-          ||> Local_Theory.exit_global;
-
-        val ctxt = Proof_Context.init_global thy';
-
-        val simpset1 =
-          put_simpset HOL_basic_ss ctxt addsimps @{thm Nat.add_0} :: @{thm Nat.add_0_right} ::
-            size_def_thms @ size_def_thms' @ rec_rewrites @ extra_rewrites;
-        val xs = map (fn i => "x" ^ string_of_int i) (1 upto length recTs2);
-
-        fun mk_unfolded_size_eq tab size_ofp fs (p as (_, T), r) =
-          HOLogic.mk_eq (app fs r $ Free p,
-            the (size_of_type tab extra_size size_ofp T) $ Free p);
-
-        fun prove_unfolded_size_eqs size_ofp fs =
-          if null recTs2 then []
-          else Old_Datatype_Aux.split_conj_thm (Goal.prove_sorry ctxt xs []
-            (HOLogic.mk_Trueprop (Old_Datatype_Aux.mk_conj (replicate l @{term True} @
-               map (mk_unfolded_size_eq (AList.lookup op =
-                   (new_type_names ~~ map (app fs) rec_combs1)) size_ofp fs)
-                 (xs ~~ recTs2 ~~ rec_combs2))))
-            (fn _ => (Old_Datatype_Aux.ind_tac induct xs THEN_ALL_NEW asm_simp_tac simpset1) 1));
-
-        val unfolded_size_eqs1 = prove_unfolded_size_eqs param_size fs;
-        val unfolded_size_eqs2 = prove_unfolded_size_eqs (K NONE) fs';
-
-        (* characteristic equations for size functions *)
-        fun gen_mk_size_eq p size_of size_ofp size_const T (cname, cargs) =
-          let
-            val Ts = map (Old_Datatype_Aux.typ_of_dtyp descr) cargs;
-            val tnames = Name.variant_list f_names (Old_Datatype_Prop.make_tnames Ts);
-            val ts = map_filter (fn (sT as (_, T), dt) =>
-              Option.map (fn sz => sz $ Free sT)
-                (if p dt then size_of_type size_of extra_size size_ofp T
-                 else NONE)) (tnames ~~ Ts ~~ cargs)
-          in
-            HOLogic.mk_Trueprop (HOLogic.mk_eq
-              (size_const $ list_comb (Const (cname, Ts ---> T),
-                 map2 (curry Free) tnames Ts),
-               if null ts then HOLogic.zero
-               else foldl1 plus (ts @ [HOLogic.Suc_zero])))
-          end;
-
-        val simpset2 =
-          put_simpset HOL_basic_ss ctxt
-            addsimps (rec_rewrites @ size_def_thms @ unfolded_size_eqs1);
-        val simpset3 =
-          put_simpset HOL_basic_ss ctxt
-            addsimps (rec_rewrites @ size_def_thms' @ unfolded_size_eqs2);
-
-        fun prove_size_eqs p size_fns size_ofp simpset =
-          maps (fn (((_, (_, _, constrs)), size_const), T) =>
-            map (fn constr => Drule.export_without_context (Goal.prove_sorry ctxt [] []
-              (gen_mk_size_eq p (AList.lookup op = (new_type_names ~~ size_fns))
-                 size_ofp size_const T constr)
-              (fn _ => simp_tac simpset 1))) constrs)
-            (descr' ~~ size_fns ~~ recTs1);
-
-        val size_eqns = prove_size_eqs (is_poly thy') size_fns param_size simpset2 @
-          prove_size_eqs Old_Datatype_Aux.is_rec_type overloaded_size_fns (K NONE) simpset3;
-
-        val ([(_, size_thms)], thy'') = thy'
-          |> Global_Theory.note_thmss ""
-            [((Binding.name "size",
-                [Simplifier.simp_add, Named_Theorems.add @{named_theorems nitpick_simp},
-                 Thm.declaration_attribute (fn thm =>
-                   Context.mapping (Code.add_default_eqn thm) I)]),
-              [(size_eqns, [])])];
-
-      in
-        fold2 (fn new_type_name => fn size_name =>
-            BNF_LFP_Size.register_size_global new_type_name size_name size_thms [])
-          new_type_names size_names thy''
-      end
-  end;
-
-fun add_size_thms _ (new_type_names as name :: _) thy =
-  let
-    val info as {descr, ...} = Old_Datatype_Data.the_info thy name;
-    val prefix = space_implode "_" (map Long_Name.base_name new_type_names);
-    val no_size = exists (fn (_, (_, _, constrs)) => exists (fn (_, cargs) => exists (fn dt =>
-      Old_Datatype_Aux.is_rec_type dt andalso
-        not (null (fst (Old_Datatype_Aux.strip_dtyp dt)))) cargs) constrs) descr
-  in
-    if no_size then thy
-    else
-      thy
-      |> Sign.add_path prefix
-      |> prove_size_thms info new_type_names
-      |> Sign.restore_naming thy
-  end;
-
-val _ = Context.>> (Context.map_theory (Old_Datatype_Data.interpretation add_size_thms));
-
-end;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Old_Datatype/old_size.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -0,0 +1,228 @@
+(*  Title:      HOL/Tools/Old_Datatype/old_size.ML
+    Author:     Stefan Berghofer, Florian Haftmann, TU Muenchen
+
+Size functions for old-style datatypes.
+*)
+
+structure Old_Size: sig end =
+struct
+
+fun plus (t1, t2) = Const (@{const_name Groups.plus},
+  HOLogic.natT --> HOLogic.natT --> HOLogic.natT) $ t1 $ t2;
+
+fun size_of_type f g h (T as Type (s, Ts)) =
+      (case f s of
+         SOME t => SOME t
+       | NONE => (case g s of
+           SOME size_name =>
+             SOME (list_comb (Const (size_name,
+               map (fn U => U --> HOLogic.natT) Ts @ [T] ---> HOLogic.natT),
+                 map (size_of_type' f g h) Ts))
+         | NONE => NONE))
+  | size_of_type _ _ h (TFree (s, _)) = h s
+and size_of_type' f g h T = (case size_of_type f g h T of
+      NONE => Abs ("x", T, HOLogic.zero)
+    | SOME t => t);
+
+fun is_poly thy (Old_Datatype_Aux.DtType (name, dts)) =
+      is_some (BNF_LFP_Size.size_of_global thy name) andalso exists (is_poly thy) dts
+  | is_poly _ _ = true;
+
+fun constrs_of thy name =
+  let
+    val {descr, index, ...} = Old_Datatype_Data.the_info thy name
+    val SOME (_, _, constrs) = AList.lookup op = descr index
+  in constrs end;
+
+val app = curry (list_comb o swap);
+
+fun prove_size_thms (info : Old_Datatype_Aux.info) new_type_names thy =
+  let
+    val {descr, rec_names, rec_rewrites, induct, ...} = info;
+    val l = length new_type_names;
+    val descr' = List.take (descr, l);
+    val tycos = map (#1 o snd) descr';
+  in
+    if forall (fn tyco => can (Sign.arity_sorts thy tyco) [HOLogic.class_size]) tycos then
+      (* nothing to do -- the "size" function is already defined *)
+      thy
+    else
+      let
+        val recTs = Old_Datatype_Aux.get_rec_types descr;
+        val (recTs1, recTs2) = chop l recTs;
+        val (_, (_, paramdts, _)) :: _ = descr;
+        val paramTs = map (Old_Datatype_Aux.typ_of_dtyp descr) paramdts;
+        val ((param_size_fs, param_size_fTs), f_names) = paramTs |>
+          map (fn T as TFree (s, _) =>
+            let
+              val name = "f" ^ unprefix "'" s;
+              val U = T --> HOLogic.natT
+            in
+              (((s, Free (name, U)), U), name)
+            end) |> split_list |>> split_list;
+        val param_size = AList.lookup op = param_size_fs;
+
+        val extra_rewrites = descr |> map (#1 o snd) |> distinct op = |>
+          map_filter (Option.map (fst o snd) o BNF_LFP_Size.size_of_global thy) |> flat;
+        val extra_size = Option.map fst o BNF_LFP_Size.size_of_global thy;
+
+        val (((size_names, size_fns), def_names), def_names') =
+          recTs1 |> map (fn T as Type (s, _) =>
+            let
+              val s' = "size_" ^ Long_Name.base_name s;
+              val s'' = Sign.full_bname thy s';
+            in
+              (s'',
+               (list_comb (Const (s'', param_size_fTs @ [T] ---> HOLogic.natT),
+                  map snd param_size_fs),
+                (s' ^ "_def", s' ^ "_overloaded_def")))
+            end) |> split_list ||>> split_list ||>> split_list;
+        val overloaded_size_fns = map HOLogic.size_const recTs1;
+
+        (* instantiation for primrec combinator *)
+        fun size_of_constr b size_ofp ((_, cargs), (_, cargs')) =
+          let
+            val Ts = map (Old_Datatype_Aux.typ_of_dtyp descr) cargs;
+            val k = length (filter Old_Datatype_Aux.is_rec_type cargs);
+            val (ts, _, _) = fold_rev (fn ((dt, dt'), T) => fn (us, i, j) =>
+              if Old_Datatype_Aux.is_rec_type dt then (Bound i :: us, i + 1, j + 1)
+              else
+                (if b andalso is_poly thy dt' then
+                   case size_of_type (K NONE) extra_size size_ofp T of
+                     NONE => us | SOME sz => sz $ Bound j :: us
+                 else us, i, j + 1))
+                  (cargs ~~ cargs' ~~ Ts) ([], 0, k);
+            val t =
+              if null ts andalso (not b orelse not (exists (is_poly thy) cargs'))
+              then HOLogic.zero
+              else foldl1 plus (ts @ [HOLogic.Suc_zero])
+          in
+            fold_rev (fn T => fn t' => Abs ("x", T, t')) (Ts @ replicate k HOLogic.natT) t
+          end;
+
+        val fs = maps (fn (_, (name, _, constrs)) =>
+          map (size_of_constr true param_size) (constrs ~~ constrs_of thy name)) descr;
+        val fs' = maps (fn (n, (name, _, constrs)) =>
+          map (size_of_constr (l <= n) (K NONE)) (constrs ~~ constrs_of thy name)) descr;
+        val fTs = map fastype_of fs;
+
+        val (rec_combs1, rec_combs2) = chop l (map (fn (T, rec_name) =>
+          Const (rec_name, fTs @ [T] ---> HOLogic.natT))
+            (recTs ~~ rec_names));
+
+        fun define_overloaded (def_name, eq) lthy =
+          let
+            val (Free (c, _), rhs) = (Logic.dest_equals o Syntax.check_term lthy) eq;
+            val (thm, lthy') = lthy
+              |> Local_Theory.define ((Binding.name c, NoSyn), ((Binding.name def_name, []), rhs))
+              |-> (fn (t, (_, thm)) => Spec_Rules.add Spec_Rules.Equational ([t], [thm]) #> pair thm);
+            val ctxt_thy = Proof_Context.init_global (Proof_Context.theory_of lthy');
+            val thm' = singleton (Proof_Context.export lthy' ctxt_thy) thm;
+          in (thm', lthy') end;
+
+        val ((size_def_thms, size_def_thms'), thy') =
+          thy
+          |> Sign.add_consts (map (fn (s, T) => (Binding.name (Long_Name.base_name s),
+              param_size_fTs @ [T] ---> HOLogic.natT, NoSyn))
+            (size_names ~~ recTs1))
+          |> Global_Theory.add_defs false
+            (map (Thm.no_attributes o apsnd (Logic.mk_equals o apsnd (app fs)))
+               (map Binding.name def_names ~~ (size_fns ~~ rec_combs1)))
+          ||> Class.instantiation (tycos, map dest_TFree paramTs, [HOLogic.class_size])
+          ||>> fold_map define_overloaded
+            (def_names' ~~ map Logic.mk_equals (overloaded_size_fns ~~ map (app fs') rec_combs1))
+          ||> Class.prove_instantiation_instance (K (Class.intro_classes_tac []))
+          ||> Local_Theory.exit_global;
+
+        val ctxt = Proof_Context.init_global thy';
+
+        val simpset1 =
+          put_simpset HOL_basic_ss ctxt addsimps @{thm Nat.add_0} :: @{thm Nat.add_0_right} ::
+            size_def_thms @ size_def_thms' @ rec_rewrites @ extra_rewrites;
+        val xs = map (fn i => "x" ^ string_of_int i) (1 upto length recTs2);
+
+        fun mk_unfolded_size_eq tab size_ofp fs (p as (_, T), r) =
+          HOLogic.mk_eq (app fs r $ Free p,
+            the (size_of_type tab extra_size size_ofp T) $ Free p);
+
+        fun prove_unfolded_size_eqs size_ofp fs =
+          if null recTs2 then []
+          else Old_Datatype_Aux.split_conj_thm (Goal.prove_sorry ctxt xs []
+            (HOLogic.mk_Trueprop (Old_Datatype_Aux.mk_conj (replicate l @{term True} @
+               map (mk_unfolded_size_eq (AList.lookup op =
+                   (new_type_names ~~ map (app fs) rec_combs1)) size_ofp fs)
+                 (xs ~~ recTs2 ~~ rec_combs2))))
+            (fn _ => (Old_Datatype_Aux.ind_tac induct xs THEN_ALL_NEW asm_simp_tac simpset1) 1));
+
+        val unfolded_size_eqs1 = prove_unfolded_size_eqs param_size fs;
+        val unfolded_size_eqs2 = prove_unfolded_size_eqs (K NONE) fs';
+
+        (* characteristic equations for size functions *)
+        fun gen_mk_size_eq p size_of size_ofp size_const T (cname, cargs) =
+          let
+            val Ts = map (Old_Datatype_Aux.typ_of_dtyp descr) cargs;
+            val tnames = Name.variant_list f_names (Old_Datatype_Prop.make_tnames Ts);
+            val ts = map_filter (fn (sT as (_, T), dt) =>
+              Option.map (fn sz => sz $ Free sT)
+                (if p dt then size_of_type size_of extra_size size_ofp T
+                 else NONE)) (tnames ~~ Ts ~~ cargs)
+          in
+            HOLogic.mk_Trueprop (HOLogic.mk_eq
+              (size_const $ list_comb (Const (cname, Ts ---> T),
+                 map2 (curry Free) tnames Ts),
+               if null ts then HOLogic.zero
+               else foldl1 plus (ts @ [HOLogic.Suc_zero])))
+          end;
+
+        val simpset2 =
+          put_simpset HOL_basic_ss ctxt
+            addsimps (rec_rewrites @ size_def_thms @ unfolded_size_eqs1);
+        val simpset3 =
+          put_simpset HOL_basic_ss ctxt
+            addsimps (rec_rewrites @ size_def_thms' @ unfolded_size_eqs2);
+
+        fun prove_size_eqs p size_fns size_ofp simpset =
+          maps (fn (((_, (_, _, constrs)), size_const), T) =>
+            map (fn constr => Drule.export_without_context (Goal.prove_sorry ctxt [] []
+              (gen_mk_size_eq p (AList.lookup op = (new_type_names ~~ size_fns))
+                 size_ofp size_const T constr)
+              (fn _ => simp_tac simpset 1))) constrs)
+            (descr' ~~ size_fns ~~ recTs1);
+
+        val size_eqns = prove_size_eqs (is_poly thy') size_fns param_size simpset2 @
+          prove_size_eqs Old_Datatype_Aux.is_rec_type overloaded_size_fns (K NONE) simpset3;
+
+        val ([(_, size_thms)], thy'') = thy'
+          |> Global_Theory.note_thmss ""
+            [((Binding.name "size",
+                [Simplifier.simp_add, Named_Theorems.add @{named_theorems nitpick_simp},
+                 Thm.declaration_attribute (fn thm =>
+                   Context.mapping (Code.add_default_eqn thm) I)]),
+              [(size_eqns, [])])];
+
+      in
+        fold2 (fn new_type_name => fn size_name =>
+            BNF_LFP_Size.register_size_global new_type_name size_name size_thms [])
+          new_type_names size_names thy''
+      end
+  end;
+
+fun add_size_thms _ (new_type_names as name :: _) thy =
+  let
+    val info as {descr, ...} = Old_Datatype_Data.the_info thy name;
+    val prefix = space_implode "_" (map Long_Name.base_name new_type_names);
+    val no_size = exists (fn (_, (_, _, constrs)) => exists (fn (_, cargs) => exists (fn dt =>
+      Old_Datatype_Aux.is_rec_type dt andalso
+        not (null (fst (Old_Datatype_Aux.strip_dtyp dt)))) cargs) constrs) descr
+  in
+    if no_size then thy
+    else
+      thy
+      |> Sign.add_path prefix
+      |> prove_size_thms info new_type_names
+      |> Sign.restore_naming thy
+  end;
+
+val _ = Theory.setup (Old_Datatype_Data.interpretation add_size_thms);
+
+end;
--- a/src/HOL/Tools/SMT/cvc4_interface.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/SMT/cvc4_interface.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -1,5 +1,5 @@
 (*  Title:      HOL/Tools/SMT/cvc4_interface.ML
-    Author:     Sascha Boehme, TU Muenchen
+    Author:     Jasmin Blanchette, TU Muenchen
 
 Interface to CVC4 based on an extended version of SMT-LIB.
 *)
--- a/src/HOL/Tools/inductive_realizer.ML	Thu Sep 18 15:23:23 2014 +0200
+++ b/src/HOL/Tools/inductive_realizer.ML	Fri Sep 19 08:26:03 2014 +0200
@@ -8,7 +8,6 @@
 signature INDUCTIVE_REALIZER =
 sig
   val add_ind_realizers: string -> string list -> theory -> theory
-  val setup: theory -> theory
 end;
 
 structure InductiveRealizer : INDUCTIVE_REALIZER =
@@ -513,11 +512,10 @@
       | SOME (SOME sets') => subtract (op =) sets' sets)
   end I);
 
-val setup =
-  Attrib.setup @{binding ind_realizer}
-    ((Scan.option (Scan.lift (Args.$$$ "irrelevant") |--
-      Scan.option (Scan.lift (Args.colon) |--
-        Scan.repeat1 (Args.const {proper = true, strict = true})))) >> rlz_attrib)
-    "add realizers for inductive set";
+val _ = Theory.setup (Attrib.setup @{binding ind_realizer}
+  ((Scan.option (Scan.lift (Args.$$$ "irrelevant") |--
+    Scan.option (Scan.lift (Args.colon) |--
+      Scan.repeat1 (Args.const {proper = true, strict = true})))) >> rlz_attrib)
+  "add realizers for inductive set");
 
 end;