merged
authorwenzelm
Mon, 05 Jan 2015 23:33:39 +0100
changeset 59295 bab968955925
parent 59284 d418ac9727f2 (diff)
parent 59294 126293918a37 (current diff)
child 59296 002d817b4c37
merged
--- a/src/Doc/Datatypes/Datatypes.thy	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/Doc/Datatypes/Datatypes.thy	Mon Jan 05 23:33:39 2015 +0100
@@ -124,9 +124,9 @@
 %\item Section \ref{sec:using-the-standard-ml-interface}, ``Using the Standard
 ML Interface,'' %describes the package's programmatic interface.
 
-\item Section \ref{sec:controlling-plugins}, ``Controlling Plugins,''
-is concerned with the package's interoperability with other Isabelle packages
-and tools, such as the code generator, Transfer, Lifting, and Quickcheck.
+\item Section \ref{sec:selecting-plugins}, ``Selecting Plugins,'' is concerned
+with the package's interoperability with other Isabelle packages and tools, such
+as the code generator, Transfer, Lifting, and Quickcheck.
 
 \item Section \ref{sec:known-bugs-and-limitations}, ``Known Bugs and
 Limitations,'' concludes with known open issues at the time of writing.
@@ -184,7 +184,7 @@
 
 text {*
 \noindent
-Here, @{const Truue}, @{const Faalse}, and @{const Perhaaps} have the type @{typ trool}.
+@{const Truue}, @{const Faalse}, and @{const Perhaaps} have the type @{typ trool}.
 
 Polymorphic types are possible, such as the following option type, modeled after
 its homologue from the @{theory Option} theory:
@@ -198,8 +198,8 @@
 
 text {*
 \noindent
-The constructors are @{text "None :: 'a option"} and
-@{text "Some :: 'a \<Rightarrow> 'a option"}.
+The constructors are @{text "None \<Colon> 'a option"} and
+@{text "Some \<Colon> 'a \<Rightarrow> 'a option"}.
 
 The next example has three type parameters:
 *}
@@ -209,7 +209,7 @@
 text {*
 \noindent
 The constructor is
-@{text "Triple :: 'a \<Rightarrow> 'b \<Rightarrow> 'c \<Rightarrow> ('a, 'b, 'c) triple"}.
+@{text "Triple \<Colon> 'a \<Rightarrow> 'b \<Rightarrow> 'c \<Rightarrow> ('a, 'b, 'c) triple"}.
 Unlike in Standard ML, curried constructors are supported. The uncurried variant
 is also possible:
 *}
@@ -342,15 +342,15 @@
 *}
 
 
-subsubsection {* Auxiliary Constants and Properties
-  \label{sssec:datatype-auxiliary-constants-and-properties} *}
+subsubsection {* Auxiliary Constants
+  \label{sssec:datatype-auxiliary-constants} *}
 
 text {*
 The @{command datatype} command introduces various constants in addition to
 the constructors. With each datatype are associated set functions, a map
 function, a relator, discriminators, and selectors, all of which can be given
 custom names. In the example below, the familiar names @{text null}, @{text hd},
-@{text tl}, @{text set}, @{text map}, and @{text list_all2}, override the
+@{text tl}, @{text set}, @{text map}, and @{text list_all2} override the
 default names @{text is_Nil}, @{text un_Cons1}, @{text un_Cons2},
 @{text set_list}, @{text map_list}, and @{text rel_list}:
 *}
@@ -367,7 +367,8 @@
     hide_type list
     hide_const Nil Cons case_list hd tl set map list_all2 rec_list size_list pred_list
 
-    context early begin
+    context early
+    begin
 (*>*)
     datatype (set: 'a) list =
       null: Nil
@@ -417,8 +418,8 @@
 
 The \keyw{where} clause at the end of the command specifies a default value for
 selectors applied to constructors on which they are not a priori specified.
-Here, it is used to ensure that the tail of the empty list is itself (instead of
-being left unspecified).
+In the example, it is used to ensure that the tail of the empty list is itself
+(instead of being left unspecified).
 
 Because @{const Nil} is nullary, it is also possible to use
 @{term "\<lambda>xs. xs = Nil"} as a discriminator. This is the default behavior
@@ -471,13 +472,14 @@
 \end{matharray}
 
 @{rail \<open>
-  @@{command datatype} target? @{syntax dt_options}? \<newline>
-    (@{syntax dt_name} '=' (@{syntax dt_ctor} + '|') \<newline>
-     @{syntax map_rel}? (@'where' (prop + '|'))? + @'and')
+  @@{command datatype} target? @{syntax dt_options}? @{syntax dt_spec}
+  ;
+  @{syntax_def dt_options}: '(' ((@{syntax plugins} | 'discs_sels') + ',') ')'
   ;
   @{syntax_def plugins}: 'plugins' ('only' | 'del') ':' (name +)
   ;
-  @{syntax_def dt_options}: '(' ((@{syntax plugins} | 'discs_sels') + ',') ')'
+  @{syntax_def dt_spec}: (@{syntax dt_name} '=' (@{syntax dt_ctor} + '|') \<newline>
+     @{syntax map_rel}? (@'where' (prop + '|'))? + @'and')
   ;
   @{syntax_def map_rel}: @'for' ((('map' | 'rel') ':' name) +)
 \<close>}
@@ -499,11 +501,8 @@
 \setlength{\itemsep}{0pt}
 
 \item
-The @{text "plugins"} option indicates which plugins should be enabled
-(@{text only}) or disabled (@{text del}). Some plugin names are defined
-as indirection to a group of sub-plugins (notably @{text "quickcheck"}
-based on @{text quickcheck_random}, @{text quickcheck_exhaustive}, \dots).
-By default, all plugins are enabled.
+The @{text plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
 
 \item
 The @{text "discs_sels"} option indicates that discriminators and selectors
@@ -626,10 +625,9 @@
   \label{ssec:datatype-generated-constants} *}
 
 text {*
-Given a datatype @{text "('a\<^sub>1, \<dots>, 'a\<^sub>m) t"}
-with $m > 0$ live type variables and $n$ constructors
-@{text "t.C\<^sub>1"}, \ldots, @{text "t.C\<^sub>n"}, the
-following auxiliary constants are introduced:
+Given a datatype @{text "('a\<^sub>1, \<dots>, 'a\<^sub>m) t"} with $m > 0$ live type variables
+and $n$ constructors @{text "t.C\<^sub>1"}, \ldots, @{text "t.C\<^sub>n"}, the following
+auxiliary constants are introduced:
 
 \medskip
 
@@ -656,7 +654,7 @@
 
 \noindent
 In addition, some of the plugins introduce their own constants
-(Section~\ref{sec:controlling-plugins}). The case combinator, discriminators,
+(Section~\ref{sec:selecting-plugins}). The case combinator, discriminators,
 and selectors are collectively called \emph{destructors}. The prefix
 ``@{text "t."}'' is an optional component of the names and is normally hidden.
 *}
@@ -689,7 +687,7 @@
 The full list of named theorems can be obtained as usual by entering the
 command \keyw{print_theorems} immediately after the datatype definition.
 This list includes theorems produced by plugins
-(Section~\ref{sec:controlling-plugins}), but normally excludes low-level
+(Section~\ref{sec:selecting-plugins}), but normally excludes low-level
 theorems that reveal internal constructions. To make these accessible, add
 the line
 *}
@@ -758,8 +756,8 @@
 \item[@{text "t."}\hthm{case} @{text "[simp, code]"}\rm:] ~ \\
 @{thm list.case(1)[no_vars]} \\
 @{thm list.case(2)[no_vars]} \\
-(The @{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 \item[@{text "t."}\hthm{case_cong} @{text "[fundef_cong]"}\rm:] ~ \\
 @{thm list.case_cong[no_vars]}
@@ -767,6 +765,9 @@
 \item[@{text "t."}\hthm{case_cong_weak} @{text "[cong]"}\rm:] ~ \\
 @{thm list.case_cong_weak[no_vars]}
 
+\item[@{text "t."}\hthm{case_distrib}\rm:] ~ \\
+@{thm list.case_distrib[no_vars]}
+
 \item[@{text "t."}\hthm{split}\rm:] ~ \\
 @{thm list.split[no_vars]}
 
@@ -795,19 +796,19 @@
 \item[@{text "t."}\hthm{sel} @{text "[simp, code]"}\rm:] ~ \\
 @{thm list.sel(1)[no_vars]} \\
 @{thm list.sel(2)[no_vars]} \\
-(The @{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 \item[@{text "t."}\hthm{collapse} @{text "[simp]"}\rm:] ~ \\
 @{thm list.collapse(1)[no_vars]} \\
 @{thm list.collapse(2)[no_vars]} \\
-(The @{text "[simp]"} attribute is exceptionally omitted for datatypes equipped
+The @{text "[simp]"} attribute is exceptionally omitted for datatypes equipped
 with a single nullary constructor, because a property of the form
-@{prop "x = C"} is not suitable as a simplification rule.)
+@{prop "x = C"} is not suitable as a simplification rule.
 
 \item[@{text "t."}\hthm{distinct_disc} @{text "[dest]"}\rm:] ~ \\
 These properties are missing for @{typ "'a list"} because there is only one
-proper discriminator. Had the datatype been introduced with a second
+proper discriminator. If the datatype had been introduced with a second
 discriminator called @{const nonnull}, they would have read thusly: \\[\jot]
 @{prop "null list \<Longrightarrow> \<not> nonnull list"} \\
 @{prop "nonnull list \<Longrightarrow> \<not> null list"}
@@ -832,13 +833,17 @@
 \item[@{text "t."}\hthm{case_eq_if}\rm:] ~ \\
 @{thm list.case_eq_if[no_vars]}
 
+\item[@{text "t."}\hthm{disc_eq_case}\rm:] ~ \\
+@{thm list.disc_eq_case(1)[no_vars]} \\
+@{thm list.disc_eq_case(2)[no_vars]}
+
 \end{description}
 \end{indentblock}
 
 \noindent
 In addition, equational versions of @{text t.disc} are registered with the
-@{text "[code]"} attribute. (The @{text "[code]"} attribute is set by the
-@{text code} plugin, Section~\ref{ssec:code-generator}.)
+@{text "[code]"} attribute. The @{text "[code]"} attribute is set by the
+@{text code} plugin (Section~\ref{ssec:code-generator}).
 *}
 
 
@@ -855,32 +860,32 @@
 
 \item[@{text "t."}\hthm{case_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.case_transfer[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \item[@{text "t."}\hthm{sel_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 This property is missing for @{typ "'a list"} because there is no common
 selector to all constructors. \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \item[@{text "t."}\hthm{ctr_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.ctr_transfer(1)[no_vars]} \\
 @{thm list.ctr_transfer(2)[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \item[@{text "t."}\hthm{disc_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.disc_transfer(1)[no_vars]} \\
 @{thm list.disc_transfer(2)[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \item[@{text "t."}\hthm{set} @{text "[simp, code]"}\rm:] ~ \\
 @{thm list.set(1)[no_vars]} \\
 @{thm list.set(2)[no_vars]} \\
-(The @{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 \item[@{text "t."}\hthm{set_cases} @{text "[consumes 1, cases set: set\<^sub>i_t]"}\rm:] ~ \\
 @{thm list.set_cases[no_vars]}
@@ -895,8 +900,8 @@
 \item[@{text "t."}\hthm{map} @{text "[simp, code]"}\rm:] ~ \\
 @{thm list.map(1)[no_vars]} \\
 @{thm list.map(2)[no_vars]} \\
-(The @{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 \item[@{text "t."}\hthm{map_disc_iff} @{text "[simp]"}\rm:] ~ \\
 @{thm list.map_disc_iff[no_vars]}
@@ -927,9 +932,9 @@
 
 \noindent
 In addition, equational versions of @{text t.rel_inject} and @{text
-rel_distinct} are registered with the @{text "[code]"} attribute. (The
-@{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+rel_distinct} are registered with the @{text "[code]"} attribute. The
+@{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 The second subgroup consists of more abstract properties of the set functions,
 the map function, and the relator:
@@ -948,8 +953,8 @@
 
 \item[@{text "t."}\hthm{set_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.set_transfer[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \item[@{text "t."}\hthm{map_cong0}\rm:] ~ \\
 @{thm list.map_cong0[no_vars]}
@@ -971,13 +976,13 @@
 
 \item[@{text "t."}\hthm{map_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.map_transfer[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \item[@{text "t."}\hthm{rel_compp} @{text "[relator_distr]"}\rm:] ~ \\
 @{thm list.rel_compp[no_vars]} \\
-(The @{text "[relator_distr]"} attribute is set by the @{text lifting} plugin,
-Section~\ref{ssec:lifting}.)
+The @{text "[relator_distr]"} attribute is set by the @{text lifting} plugin
+(Section~\ref{ssec:lifting}).
 
 \item[@{text "t."}\hthm{rel_conversep}\rm:] ~ \\
 @{thm list.rel_conversep[no_vars]}
@@ -994,13 +999,13 @@
 
 \item[@{text "t."}\hthm{rel_mono} @{text "[mono, relator_mono]"}\rm:] ~ \\
 @{thm list.rel_mono[no_vars]} \\
-(The @{text "[relator_mono]"} attribute is set by the @{text lifting} plugin,
-Section~\ref{ssec:lifting}.)
+The @{text "[relator_mono]"} attribute is set by the @{text lifting} plugin
+(Section~\ref{ssec:lifting}).
 
 \item[@{text "t."}\hthm{rel_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.rel_transfer[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \end{description}
 \end{indentblock}
@@ -1032,16 +1037,16 @@
 \item[@{text "t."}\hthm{rec} @{text "[simp, code]"}\rm:] ~ \\
 @{thm list.rec(1)[no_vars]} \\
 @{thm list.rec(2)[no_vars]} \\
-(The @{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 \item[@{text "t."}\hthm{rec_o_map}\rm:] ~ \\
 @{thm list.rec_o_map[no_vars]}
 
 \item[@{text "t."}\hthm{rec_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm list.rec_transfer[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \end{description}
 \end{indentblock}
@@ -1052,7 +1057,7 @@
 \begin{indentblock}
 \begin{description}
 
-\item[@{text "t."}\hthm{simps} = @{text t.inject} @{text t.distinct} @{text t.case} @{text t.rec} @{text t.map} @{text t.rel_inject}] ~ \\
+\item[@{text "t."}\hthm{simps}] = @{text t.inject} @{text t.distinct} @{text t.case} @{text t.rec} @{text t.map} @{text t.rel_inject} \\
 @{text t.rel_distinct} @{text t.set}
 
 \end{description}
@@ -1100,7 +1105,7 @@
 @{const size} in terms of the generic function @{text "t.size_t"}.
 Moreover, the new function considers nested occurrences of a value, in the nested
 recursive case. The old behavior can be obtained by disabling the @{text size}
-plugin (Section~\ref{sec:controlling-plugins}) and instantiating the
+plugin (Section~\ref{sec:selecting-plugins}) and instantiating the
 @{text size} type class manually.
 
 \item \emph{The internal constructions are completely different.} Proof texts
@@ -1145,7 +1150,7 @@
 text {*
 Recursive functions over datatypes can be specified using the @{command primrec}
 command, which supports primitive recursion, or using the more general
-\keyw{fun} and \keyw{function} commands. Here, the focus is on
+\keyw{fun} and \keyw{function} commands. In this tutorial, the focus is on
 @{command primrec}; the other two commands are described in a separate
 tutorial @{cite "isabelle-function"}.
 
@@ -1223,7 +1228,7 @@
 
 text {* \blankline *}
 
-    primrec (*<*)(in early) (*>*)tfold :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a, 'b) tlist \<Rightarrow> 'b" where
+    primrec (*<*)(in early) (transfer) (*>*)tfold :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a, 'b) tlist \<Rightarrow> 'b" where
       "tfold _ (TNil y) = y" |
       "tfold f (TCons x xs) = f x (tfold f xs)"
 
@@ -1393,7 +1398,8 @@
   \label{sssec:primrec-nested-as-mutual-recursion} *}
 
 (*<*)
-    locale n2m begin
+    locale n2m
+    begin
 (*>*)
 
 text {*
@@ -1482,10 +1488,10 @@
 \end{matharray}
 
 @{rail \<open>
-  @@{command primrec} target? @{syntax pr_option}? fixes \<newline>
+  @@{command primrec} target? @{syntax pr_options}? fixes \<newline>
   @'where' (@{syntax pr_equation} + '|')
   ;
-  @{syntax_def pr_option}: '(' 'nonexhaustive' ')'
+  @{syntax_def pr_options}: '(' ((@{syntax plugins} | 'nonexhaustive' | 'transfer') + ',') ')'
   ;
   @{syntax_def pr_equation}: thmdecl? prop
 \<close>}
@@ -1501,15 +1507,25 @@
 \synt{thmdecl} denotes an optional name for the formula that follows, and
 \synt{prop} denotes a HOL proposition @{cite "isabelle-isar-ref"}.
 
-The optional target is optionally followed by the following option:
+The optional target is optionally followed by a combination of the following
+options:
 
 \begin{itemize}
 \setlength{\itemsep}{0pt}
 
 \item
-The @{text "nonexhaustive"} option indicates that the functions are not
+The @{text plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
+
+\item
+The @{text nonexhaustive} option indicates that the functions are not
 necessarily specified for all constructors. It can be used to suppress the
 warning that is normally emitted when some constructors are missing.
+
+\item
+The @{text transfer} option indicates that an unconditional transfer rule
+should be generated and proved @{text "by transfer_prover"}. The
+@{text "[transfer_rule]"} attribute is set on the generated theorem.
 \end{itemize}
 
 %%% TODO: elaborate on format of the equations
@@ -1517,21 +1533,50 @@
 *}
 
 
-(*
 subsection {* Generated Theorems
   \label{ssec:primrec-generated-theorems} *}
 
+(*<*)
+    context early
+    begin
+(*>*)
+
 text {*
-%  * synthesized nonrecursive definition
-%  * user specification is rederived from it, exactly as entered
-%
-%  * induct
-%    * mutualized
-%    * without some needless induction hypotheses if not used
-%  * rec
-%    * mutualized
+The @{command primrec} command generates the following properties (listed
+for @{const tfold}):
+
+\begin{indentblock}
+\begin{description}
+
+\item[@{text "f."}\hthm{simps} @{text "[simp, code]"}\rm:] ~ \\
+@{thm tfold.simps(1)[no_vars]} \\
+@{thm tfold.simps(2)[no_vars]} \\
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
+
+\item[@{text "f."}\hthm{transfer} @{text "[transfer_rule]"}\rm:] ~ \\
+@{thm tfold.transfer[no_vars]} \\
+This theorem is generated by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}) for functions declared with the @{text transfer}
+option enabled.
+
+\item[@{text "f."}\hthm{induct} @{text "[case_names C\<^sub>1 \<dots> C\<^sub>n]"}\rm:] ~ \\
+This induction rule is generated for nested-as-mutual recursive functions
+(Section~\ref{sssec:primrec-nested-as-mutual-recursion}).
+
+\item[@{text "f\<^sub>1_\<dots>_f\<^sub>m."}\hthm{induct} @{text "[case_names C\<^sub>1 \<dots> C\<^sub>n]"}\rm:] ~ \\
+This induction rule is generated for nested-as-mutual recursive functions
+(Section~\ref{sssec:primrec-nested-as-mutual-recursion}). Given $m > 1$ mutually
+recursive functions, this rule can be used to prove $m$ properties
+simultaneously.
+
+\end{description}
+\end{indentblock}
 *}
-*)
+
+(*<*)
+    end
+(*>*)
 
 
 subsection {* Recursive Default Values for Selectors
@@ -1744,8 +1789,7 @@
 \end{matharray}
 
 @{rail \<open>
-  @@{command codatatype} target? \<newline>
-    (@{syntax dt_name} '=' (@{syntax dt_ctor} + '|') + @'and')
+  @@{command codatatype} target? @{syntax dt_options}? @{syntax dt_spec}
 \<close>}
 
 \medskip
@@ -1856,8 +1900,8 @@
 
 \item[@{text "t."}\hthm{corec_code} @{text "[code]"}\rm:] ~ \\
 @{thm llist.corec_code[no_vars]} \\
-(The @{text "[code]"} attribute is set by the @{text code} plugin,
-Section~\ref{ssec:code-generator}.)
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
 
 \item[@{text "t."}\hthm{corec_disc}\rm:] ~ \\
 @{thm llist.corec_disc(1)[no_vars]} \\
@@ -1876,8 +1920,8 @@
 
 \item[@{text "t."}\hthm{corec_transfer} @{text "[transfer_rule]"}\rm:] ~ \\
 @{thm llist.corec_transfer[no_vars]} \\
-(The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin,
-Section~\ref{ssec:transfer}.)
+The @{text "[transfer_rule]"} attribute is set by the @{text transfer} plugin
+(Section~\ref{ssec:transfer}).
 
 \end{description}
 \end{indentblock}
@@ -1888,7 +1932,7 @@
 \begin{indentblock}
 \begin{description}
 
-\item[@{text "t."}\hthm{simps} = @{text t.inject} @{text t.distinct} @{text t.case} @{text t.corec_disc_iff}] @{text t.corec_sel} ~ \\
+\item[@{text "t."}\hthm{simps}] = @{text t.inject} @{text t.distinct} @{text t.case} @{text t.corec_disc_iff} @{text t.corec_sel} \\
 @{text t.map} @{text t.rel_inject} @{text t.rel_distinct} @{text t.set}
 
 \end{description}
@@ -1902,9 +1946,9 @@
 text {*
 Corecursive functions can be specified using the @{command primcorec} and
 \keyw{prim\-corec\-ursive} commands, which support primitive corecursion, or
-using the more general \keyw{partial_function} command. Here, the focus is on
-the first two. More examples can be found in the directory
-\verb|~~/src/HOL/Datatype_Examples|.
+using the more general \keyw{partial_function} command. In this tutorial, the
+focus is on the first two. More examples can be found in the directory
+\verb|~~/src/HOL/Datatype_|\allowbreak\verb|Examples|.
 
 Whereas recursive functions consume datatypes one constructor at a time,
 corecursive functions construct codatatypes one constructor at a time.
@@ -1964,7 +2008,7 @@
 the right of the equal sign or in a conditional expression:
 *}
 
-    primcorec literate :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a llist" where
+    primcorec (*<*)(transfer) (*>*)literate :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a llist" where
       "literate g x = LCons x (literate g (g x))"
 
 text {* \blankline *}
@@ -2282,7 +2326,7 @@
 without having first to discharge @{term "n mod (4\<Colon>int) \<noteq> 0"},
 @{term "n mod (4\<Colon>int) \<noteq> 1"}, and
 @{term "n mod (4\<Colon>int) \<noteq> 2"}.
-The price to pay for this elegance is that we must discharge exclusivity proof
+The price to pay for this elegance is that we must discharge exclusiveness proof
 obligations, one for each pair of conditions
 @{term "(n mod (4\<Colon>int) = i, n mod (4\<Colon>int) = j)"}
 with @{term "i < j"}. If we prefer not to discharge any obligations, we can
@@ -2423,9 +2467,9 @@
 
 @{rail \<open>
   (@@{command primcorec} | @@{command primcorecursive}) target? \<newline>
-    @{syntax pcr_option}? fixes @'where' (@{syntax pcr_formula} + '|')
+    @{syntax pcr_options}? fixes @'where' (@{syntax pcr_formula} + '|')
   ;
-  @{syntax_def pcr_option}: '(' ('sequential' | 'exhaustive') ')'
+  @{syntax_def pcr_options}: '(' ((@{syntax plugins} | 'sequential' | 'exhaustive' | 'transfer') + ',') ')'
   ;
   @{syntax_def pcr_formula}: thmdecl? prop (@'of' (term * ))?
 \<close>}
@@ -2441,20 +2485,30 @@
 \synt{thmdecl} denotes an optional name for the formula that follows, and
 \synt{prop} denotes a HOL proposition @{cite "isabelle-isar-ref"}.
 
-The optional target is optionally followed by one or both of the following
+The optional target is optionally followed by a combination of the following
 options:
 
 \begin{itemize}
 \setlength{\itemsep}{0pt}
 
 \item
-The @{text "sequential"} option indicates that the conditions in specifications
+The @{text plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
+
+\item
+The @{text sequential} option indicates that the conditions in specifications
 expressed using the constructor or destructor view are to be interpreted
 sequentially.
 
 \item
-The @{text "exhaustive"} option indicates that the conditions in specifications
+The @{text exhaustive} option indicates that the conditions in specifications
 expressed using the constructor or destructor view cover all possible cases.
+This generally gives rise to an additional proof obligation.
+
+\item
+The @{text transfer} option indicates that an unconditional transfer rule
+should be generated and proved @{text "by transfer_prover"}. The
+@{text "[transfer_rule]"} attribute is set on the generated theorem.
 \end{itemize}
 
 The @{command primcorec} command is an abbreviation for @{command
@@ -2466,10 +2520,101 @@
 *}
 
 
-(*
 subsection {* Generated Theorems
   \label{ssec:primcorec-generated-theorems} *}
-*)
+
+text {*
+The @{command primcorec} and @{command primcorecursive} commands generate the
+following properties (listed for @{const literate}):
+
+\begin{indentblock}
+\begin{description}
+
+\item[@{text "f."}\hthm{code} @{text "[code]"}\rm:] ~ \\
+@{thm literate.code[no_vars]} \\
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
+
+\item[@{text "f."}\hthm{ctr}\rm:] ~ \\
+@{thm literate.ctr[no_vars]}
+
+\item[@{text "f."}\hthm{disc} @{text "[simp, code]"}\rm:] ~ \\
+@{thm literate.disc[no_vars]} \\
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}). The @{text "[simp]"} attribute is set only
+for functions for which @{text f.disc_iff} is not available.
+
+\item[@{text "f."}\hthm{disc_iff} @{text "[simp]"}\rm:] ~ \\
+@{thm literate.disc_iff[no_vars]} \\
+This property is generated only for functions declared with the
+@{text exhaustive} option or whose conditions are trivially exhaustive.
+
+\item[@{text "f."}\hthm{sel} @{text "[simp, code]"}\rm:] ~ \\
+@{thm literate.disc[no_vars]} \\
+The @{text "[code]"} attribute is set by the @{text code} plugin
+(Section~\ref{ssec:code-generator}).
+
+\item[@{text "f."}\hthm{exclude}\rm:] ~ \\
+These properties are missing for @{const literate} because no exclusiveness
+proof obligations arose. In general, the properties correspond to the
+discharged proof obligations.
+
+\item[@{text "f."}\hthm{exhaust}\rm:] ~ \\
+This property is missing for @{const literate} because no exhaustiveness
+proof obligation arose. In general, the property correspond to the discharged
+proof obligation.
+
+\item[\begin{tabular}{@ {}l@ {}}
+  @{text "f."}\hthm{coinduct} @{text "[consumes m, case_names t\<^sub>1 \<dots> t\<^sub>m,"} \\
+  \phantom{@{text "f."}\hthm{coinduct} @{text "["}}@{text "case_conclusion D\<^sub>1 \<dots>
+  D\<^sub>n]"}\rm:
+\end{tabular}] ~ \\
+This coinduction rule is generated for nested-as-mutual corecursive functions
+(Section~\ref{sssec:primcorec-nested-as-mutual-corecursion}).
+
+\item[\begin{tabular}{@ {}l@ {}}
+  @{text "f."}\hthm{coinduct_strong} @{text "[consumes m, case_names t\<^sub>1 \<dots> t\<^sub>m,"} \\
+  \phantom{@{text "f."}\hthm{coinduct_strong} @{text "["}}@{text "case_conclusion D\<^sub>1 \<dots>
+  D\<^sub>n]"}\rm:
+\end{tabular}] ~ \\
+This coinduction rule is generated for nested-as-mutual corecursive functions
+(Section~\ref{sssec:primcorec-nested-as-mutual-corecursion}).
+
+\item[\begin{tabular}{@ {}l@ {}}
+  @{text "f\<^sub>1_\<dots>_f\<^sub>m."}\hthm{coinduct} @{text "[case_names t\<^sub>1 \<dots> t\<^sub>m,"} \\
+  \phantom{@{text "f."}\hthm{coinduct} @{text "["}}@{text "case_conclusion D\<^sub>1 \<dots>
+  D\<^sub>n]"}\rm:
+\end{tabular}] ~ \\
+This coinduction rule is generated for nested-as-mutual corecursive functions
+(Section~\ref{sssec:primcorec-nested-as-mutual-corecursion}). Given $m > 1$
+mutually corecursive functions, this rule can be used to prove $m$ properties
+simultaneously.
+
+\item[\begin{tabular}{@ {}l@ {}}
+  @{text "f\<^sub>1_\<dots>_f\<^sub>m."}\hthm{coinduct_strong} @{text "[case_names t\<^sub>1 \<dots> t\<^sub>m,"} \\
+  \phantom{@{text "f."}\hthm{coinduct_strong} @{text "["}}@{text "case_conclusion D\<^sub>1 \<dots>
+  D\<^sub>n]"}\rm:
+\end{tabular}] ~ \\
+This coinduction rule is generated for nested-as-mutual corecursive functions
+(Section~\ref{sssec:primcorec-nested-as-mutual-corecursion}). Given $m > 1$
+mutually corecursive functions, this rule can be used to prove $m$ properties
+simultaneously.
+
+\end{description}
+\end{indentblock}
+
+\noindent
+For convenience, @{command primcorec} and @{command primcorecursive} also
+provide the following collection:
+
+\begin{indentblock}
+\begin{description}
+
+\item[@{text "f."}\hthm{simps}] = @{text f.disc_iff} (or @{text f.disc}) @{text t.sel}
+
+\end{description}
+\end{indentblock}
+*}
 
 
 (*
@@ -2629,10 +2774,9 @@
 reasoning abstractly about an arbitrary BNF. The @{command bnf_axiomatization}
 command below introduces a type @{text "('a, 'b, 'c) F"}, three set constants,
 a map function, a relator, and a nonemptiness witness that depends only on
-@{typ 'a}. (The type @{text "'a \<Rightarrow> ('a, 'b, 'c) F"} of
-the witness can be read as an implication: If we have a witness for @{typ 'a},
-we can construct a witness for @{text "('a, 'b, 'c) F"}.) The BNF
-properties are postulated as axioms.
+@{typ 'a}. The type @{text "'a \<Rightarrow> ('a, 'b, 'c) F"} of the witness can be read
+as an implication: Given a witness for @{typ 'a}, we can construct a witness for
+@{text "('a, 'b, 'c) F"}. The BNF properties are postulated as axioms.
 *}
 
     bnf_axiomatization (setA: 'a, setB: 'b, setC: 'c) F
@@ -2674,6 +2818,9 @@
 \synt{type} denotes a HOL type, and \synt{term} denotes a HOL term
 @{cite "isabelle-isar-ref"}.
 
+The @{syntax plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
+
 %%% TODO: elaborate on proof obligations
 *}
 
@@ -2687,7 +2834,7 @@
 \end{matharray}
 
 @{rail \<open>
-  @@{command bnf_axiomatization} target? @{syntax plugins}? \<newline>
+  @@{command bnf_axiomatization} target? ('(' @{syntax plugins} ')')? \<newline>
     @{syntax tyargs}? name @{syntax wit_types}? \<newline>
     mixfix? @{syntax map_rel}?
   ;
@@ -2706,6 +2853,9 @@
 (@{typ 'a}, @{typ 'b}, \ldots), and \synt{mixfix} denotes the usual
 parenthesized mixfix notation @{cite "isabelle-isar-ref"}.
 
+The @{syntax plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
+
 Type arguments are live by default; they can be marked as dead by entering
 @{text dead} in front of the type variable (e.g., @{text "(dead 'a)"})
 instead of an identifier for the corresponding set function. Witnesses can be
@@ -2811,8 +2961,8 @@
 *)
 
 
-section {* Controlling Plugins
-  \label{sec:controlling-plugins} *}
+section {* Selecting Plugins
+  \label{sec:selecting-plugins} *}
 
 text {*
 Plugins extend the (co)datatype package to interoperate with other Isabelle
@@ -2943,6 +3093,10 @@
 
 \end{description}
 \end{indentblock}
+
+For @{command primrec}, @{command primcorec}, and @{command primcorecursive},
+the \hthm{transfer} plugin implements the generation of the @{text "f.transfer"}
+property, conditioned by the @{text transfer} option.
 *}
 
 
@@ -2976,8 +3130,10 @@
   \label{ssec:quickcheck} *}
 
 text {*
-The integration of datatypes with Quickcheck is accomplished by a number of
-plugins that instantiate specific type classes. The plugins are listed below:
+The integration of datatypes with Quickcheck is accomplished by the
+\hthm{quick\-check} plugin. It combines a number of subplugins that instantiate
+specific type classes. The subplugins can be enabled or disabled individually.
+They are listed below:
 
 \begin{indentblock}
 \hthm{quickcheck_random} \\
--- a/src/Doc/Datatypes/document/root.tex	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/Doc/Datatypes/document/root.tex	Mon Jan 05 23:33:39 2015 +0100
@@ -59,14 +59,11 @@
 
 \title{%\includegraphics[scale=0.5]{isabelle_hol} \\[4ex]
 Defining (Co)datatypes in Isabelle/HOL}
-\author{\hbox{} \\
-Jasmin Christian Blanchette,
+\author{Jasmin Christian Blanchette,
 Martin Desharnais, \\
 Lorenz Panny,
 Andrei Popescu, and
-Dmitriy Traytel \\
-{\normalsize Fakult\"at f\"ur Informatik, Technische Universit\"at M\"unchen} \\
-\hbox{}}
+Dmitriy Traytel}
 
 \urlstyle{tt}
 
--- a/src/HOL/Tools/BNF/bnf_def.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_def.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -829,7 +829,7 @@
 
     val bnf_T = Morphism.typ phi T_rhs;
     val bad_args = Term.add_tfreesT bnf_T [];
-    val _ = if null bad_args then () else error ("Locally fixed type arguments " ^
+    val _ = null bad_args orelse error ("Locally fixed type arguments " ^
       commas_quote (map (Syntax.string_of_typ no_defs_lthy o TFree) bad_args));
 
     val bnf_sets =
--- a/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -64,6 +64,9 @@
      fp_bnf_sugar: fp_bnf_sugar,
      fp_co_induct_sugar: fp_co_induct_sugar};
 
+  val co_induct_of: 'a list -> 'a
+  val strong_co_induct_of: 'a list -> 'a
+
   val morph_fp_sugar: morphism -> fp_sugar -> fp_sugar
   val transfer_fp_sugar: theory -> fp_sugar -> fp_sugar
   val fp_sugar_of: Proof.context -> string -> fp_sugar option
@@ -76,8 +79,7 @@
   val register_fp_sugars_raw: fp_sugar list -> local_theory -> local_theory
   val register_fp_sugars: (string -> bool) -> fp_sugar list -> local_theory -> local_theory
 
-  val co_induct_of: 'a list -> 'a
-  val strong_co_induct_of: 'a list -> 'a
+  val mk_parametricity_goal: Proof.context -> term list -> term -> term -> term
 
   val flat_corec_preds_predsss_gettersss: 'a list -> 'a list list list -> 'a list list list ->
     'a list
@@ -241,6 +243,9 @@
    fp_bnf_sugar: fp_bnf_sugar,
    fp_co_induct_sugar: fp_co_induct_sugar};
 
+fun co_induct_of (i :: _) = i;
+fun strong_co_induct_of [_, s] = s;
+
 fun morph_fp_bnf_sugar phi ({map_thms, map_disc_iffs, map_selss, rel_injects, rel_distincts,
     rel_sels, rel_intros, rel_cases, set_thms, set_selssss, set_introssss,
     set_cases} : fp_bnf_sugar) =
@@ -326,9 +331,6 @@
 val fp_sugars_of = fp_sugars_of_generic o Context.Proof;
 val fp_sugars_of_global = fp_sugars_of_generic o Context.Theory;
 
-fun co_induct_of (i :: _) = i;
-fun strong_co_induct_of [_, s] = s;
-
 structure FP_Sugar_Plugin = Plugin(type T = fp_sugar list);
 
 fun fp_sugars_interpretation name f =
@@ -1018,7 +1020,7 @@
                |> map Thm.close_derivation)
         end;
 
-      val code_attrs = if plugins @{plugin code} then [Code.add_default_eqn_attrib] else [];
+      val code_attrs = if plugins code_plugin then [Code.add_default_eqn_attrib] else [];
 
       val anonymous_notes =
         [(rel_code_thms, code_attrs @ nitpicksimp_attrs)]
@@ -1453,7 +1455,7 @@
 
     val rec_thmss = mk_rec_thmss (the rec_args_typess) recs rec_defs ctor_rec_thms;
 
-    val code_attrs = if plugins @{plugin code} then [Code.add_default_eqn_attrib] else [];
+    val code_attrs = if plugins code_plugin then [Code.add_default_eqn_attrib] else [];
   in
     ((induct_thms, induct_thm, mk_induct_attrs ctrss),
      (rec_thmss, code_attrs @ nitpicksimp_attrs @ simp_attrs))
@@ -2079,7 +2081,7 @@
     val fpTs = map (domain_type o fastype_of) dtors;
     val fpBTs = map B_ify_T fpTs;
 
-    val code_attrs = if plugins @{plugin code} then [Code.add_default_eqn_attrib] else [];
+    val code_attrs = if plugins code_plugin then [Code.add_default_eqn_attrib] else [];
 
     val ctr_Tsss = map (map (map (Term.typ_subst_atomic (Xs ~~ fpTs)))) ctrXs_Tsss;
     val ns = map length ctr_Tsss;
--- a/src/HOL/Tools/BNF/bnf_fp_def_sugar_tactics.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_fp_def_sugar_tactics.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -467,10 +467,9 @@
   HEADGOAL (rtac (cterm_instantiate_pos [SOME ct1] exhaust) THEN_ALL_NEW
     rtac (cterm_instantiate_pos [SOME ct2] exhaust) THEN_ALL_NEW
       hyp_subst_tac ctxt) THEN
-  unfold_thms_tac ctxt (sels @ rel_injects @ rel_eqs @ @{thms simp_thms(6,7,8,11,12,15,16,21,22,24)} @
-    ((discs @ distincts) RL @{thms iffD2[OF eq_True] iffD2[OF eq_False]}) @
-    (rel_injects RL @{thms iffD2[OF eq_True]}) @
-    (rel_distincts RL @{thms iffD2[OF eq_False]})) THEN
+  unfold_thms_tac ctxt (sels @ rel_injects @ rel_eqs @
+    @{thms simp_thms(6,7,8,11,12,15,16,21,22,24)} @ ((discs @ distincts) RL [eqTrueI, eqFalseI]) @
+    (rel_injects RL [eqTrueI]) @ (rel_distincts RL [eqFalseI])) THEN
   TRYALL (resolve_tac [TrueI, refl]);
 
 fun mk_sel_transfer_tac ctxt n sel_defs case_transfer =
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/BNF/bnf_fp_rec_sugar_transfer.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -0,0 +1,172 @@
+(*  Title:      HOL/Tools/BNF/bnf_fp_rec_sugar_transfer.ML
+    Author:     Martin Desharnais, TU Muenchen
+    Copyright   2014
+
+Parametricity of primitively (co)recursive functions.
+*)
+
+signature BNF_FP_REC_SUGAR_TRANSFER =
+sig
+  val lfp_rec_sugar_transfer_interpretation: BNF_FP_Rec_Sugar_Util.fp_rec_sugar -> Proof.context ->
+    Proof.context
+  val gfp_rec_sugar_transfer_interpretation: BNF_FP_Rec_Sugar_Util.fp_rec_sugar -> Proof.context ->
+    Proof.context
+end;
+
+structure BNF_FP_Rec_Sugar_Transfer : BNF_FP_REC_SUGAR_TRANSFER =
+struct
+
+open Ctr_Sugar_Util
+open Ctr_Sugar_Tactics
+open BNF_Def
+open BNF_FP_Util
+open BNF_FP_Def_Sugar
+open BNF_FP_Rec_Sugar_Util
+open BNF_LFP_Rec_Sugar
+
+val transferN = "transfer";
+
+fun mk_lfp_rec_sugar_transfer_tac ctxt def =
+  Ctr_Sugar_Tactics.unfold_thms_tac ctxt [def] THEN
+  HEADGOAL (Transfer.transfer_prover_tac ctxt);
+
+fun mk_gfp_rec_sugar_transfer_tac apply_transfer ctxt f_def corec_def type_definitions
+    dtor_corec_transfers rel_pre_defs disc_eq_cases cases case_distribs case_congs =
+  let
+    fun instantiate_with_lambda thm =
+      let
+        val prop = Thm.prop_of thm;
+        val @{const Trueprop} $ (Const (@{const_name HOL.eq}, _) $ (Var (_, fT) $ _) $ _) = prop;
+        val T = range_type fT;
+        val j = Term.maxidx_of_term prop + 1;
+        val cond = Var (("x", j), HOLogic.boolT);
+        val then_branch = Var (("t", j), T);
+        val else_branch = Var (("e", j), T);
+        val lambda = Term.lambda cond (mk_If cond then_branch else_branch);
+      in
+        cterm_instantiate_pos [SOME (certify ctxt lambda)] thm
+      end;
+
+    val transfer_rules =
+      @{thm Abs_transfer[OF BNF_Composition.type_definition_id_bnf_UNIV
+        BNF_Composition.type_definition_id_bnf_UNIV]} ::
+      map (fn thm => @{thm Abs_transfer} OF [thm, thm]) type_definitions @
+      map (Local_Defs.unfold ctxt rel_pre_defs) dtor_corec_transfers;
+    val add_transfer_rule = Thm.attribute_declaration Transfer.transfer_add;
+    val ctxt' = Context.proof_map (fold add_transfer_rule transfer_rules) ctxt;
+
+    val case_distribs = map instantiate_with_lambda case_distribs;
+    val simps = case_distribs @ disc_eq_cases @ cases @ @{thms if_True if_False};
+    val simp_ctxt = put_simpset (simpset_of (ss_only simps ctxt)) ctxt';
+  in
+    unfold_thms_tac ctxt ([f_def, corec_def] @ @{thms split_beta if_conn}) THEN
+    HEADGOAL (simp_tac (fold Simplifier.add_cong case_congs simp_ctxt)) THEN
+    (if apply_transfer then HEADGOAL (Transfer.transfer_prover_tac ctxt') else all_tac)
+  end;
+
+fun massage_simple_notes base =
+  filter_out (null o #2)
+  #> map (fn (thmN, thms, f_attrs) =>
+    ((Binding.qualify true base (Binding.name thmN), []),
+     map_index (fn (i, thm) => ([thm], f_attrs i)) thms));
+
+fun fp_sugar_of_bnf ctxt = fp_sugar_of ctxt o (fn Type (s, _) => s) o T_of_bnf;
+
+fun bnf_depth_first_traverse ctxt f T z =
+  (case T of
+    Type (s, innerTs) =>
+    (case bnf_of ctxt s of
+      NONE => z
+    | SOME bnf => let val z' = f bnf z in
+        fold (bnf_depth_first_traverse ctxt f) innerTs z'
+      end)
+  | _ => z)
+
+fun if_all_bnfs ctxt Ts f g =
+  let val bnfs = map_filter (fn Type (s, _) => BNF_Def.bnf_of ctxt s | _ => NONE) Ts in
+    if length bnfs = length Ts then f bnfs else g
+  end;
+
+fun mk_goal lthy f =
+  let
+    val skematic_Ts = Term.add_tvarsT (fastype_of f) [];
+
+    val ((As, Bs), names_lthy) = lthy
+      |> Ctr_Sugar_Util.mk_TFrees' (map snd skematic_Ts)
+      ||>> Ctr_Sugar_Util.mk_TFrees' (map snd skematic_Ts);
+
+    val (Rs, names_lthy) =
+      Ctr_Sugar_Util.mk_Frees "R" (map2 BNF_Util.mk_pred2T As Bs) names_lthy;
+
+    val fA = Term.subst_TVars (map fst skematic_Ts ~~ As) f;
+    val fB = Term.subst_TVars (map fst skematic_Ts ~~ Bs) f;
+  in
+    (BNF_FP_Def_Sugar.mk_parametricity_goal lthy Rs fA fB, names_lthy)
+  end;
+
+fun prove_parametricity_if_bnf prove {transfers, fun_names, funs, fun_defs, fpTs} lthy =
+  fold_index (fn (n, (((transfer, f_names), f), def)) => fn lthy =>
+      if not transfer then
+        lthy
+      else
+        if_all_bnfs lthy fpTs
+          (fn bnfs => fn () => prove n bnfs f_names f def lthy)
+          (fn () => error "Function is not parametric" (*FIXME: wording*)) ())
+    (transfers ~~ fun_names ~~ funs ~~ fun_defs) lthy;
+
+fun fp_rec_sugar_transfer_interpretation prove =
+  prove_parametricity_if_bnf (fn n => fn bnfs => fn f_name => fn f => fn def => fn lthy =>
+    (case try (prove n bnfs f def) lthy of
+      NONE => error "Failed to prove parametricity"
+    | SOME thm =>
+      let
+        val notes = [(transferN, [thm], K @{attributes [transfer_rule]})]
+          |> massage_simple_notes f_name;
+      in
+        snd (Local_Theory.notes notes lthy)
+      end));
+
+val lfp_rec_sugar_transfer_interpretation = fp_rec_sugar_transfer_interpretation
+  (fn _ => fn _ => fn f => fn def => fn lthy =>
+     let val (goal, names_lthy) = mk_goal lthy f in
+       Goal.prove lthy [] [] goal (fn {context = ctxt, prems = _} =>
+         mk_lfp_rec_sugar_transfer_tac ctxt def)
+       |> singleton (Proof_Context.export names_lthy lthy)
+       |> Thm.close_derivation
+     end);
+
+val gfp_rec_sugar_transfer_interpretation = fp_rec_sugar_transfer_interpretation
+  (fn n => fn bnfs => fn f => fn def => fn lthy =>
+     let
+       val fp_sugars = map (the o fp_sugar_of_bnf lthy) bnfs;
+       val (goal, names_lthy) = mk_goal lthy f;
+       val (disc_eq_cases, case_thms, case_distribs, case_congs) =
+         bnf_depth_first_traverse lthy (fn bnf => fn quad =>
+           let
+             fun add_thms (disc_eq_cases0, case_thms0, case_distribs0, case_congs0)
+                {fp_ctr_sugar = {ctr_sugar = {disc_eq_cases, case_thms, case_distribs, case_cong,
+                   ...}, ...}, ...} =
+               (union Thm.eq_thm disc_eq_cases disc_eq_cases0,
+                union Thm.eq_thm case_thms case_thms0,
+                union Thm.eq_thm case_distribs case_distribs0,
+                insert Thm.eq_thm case_cong case_congs0);
+           in
+             Option.map (add_thms quad) (fp_sugar_of_bnf lthy bnf)
+             |> the_default quad
+           end) (fastype_of f) ([], [], [], []);
+     in
+       Goal.prove lthy [] [] goal (fn {context = ctxt, prems = _} =>
+         mk_gfp_rec_sugar_transfer_tac true ctxt def
+         (#co_rec_def (#fp_co_induct_sugar (nth fp_sugars n)))
+         (map (#type_definition o #absT_info) fp_sugars)
+         (maps (#xtor_co_rec_transfers o #fp_res) fp_sugars)
+         (map (rel_def_of_bnf o #pre_bnf) fp_sugars)
+         disc_eq_cases case_thms case_distribs case_congs)
+       |> singleton (Proof_Context.export names_lthy lthy)
+       |> Thm.close_derivation
+     end);
+
+val _ = Theory.setup (lfp_rec_sugar_interpretation Transfer_BNF.transfer_plugin
+  lfp_rec_sugar_transfer_interpretation);
+
+end;
--- a/src/HOL/Tools/BNF/bnf_fp_rec_sugar_util.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_fp_rec_sugar_util.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -8,10 +8,21 @@
 
 signature BNF_FP_REC_SUGAR_UTIL =
 sig
+
   datatype fp_kind = Least_FP | Greatest_FP
 
   val case_fp: fp_kind -> 'a -> 'a -> 'a
 
+  type fp_rec_sugar =
+    {transfers: bool list,
+     fun_names: string list,
+     funs: term list,
+     fun_defs: thm list,
+     fpTs: typ list}
+
+  val morph_fp_rec_sugar: morphism -> fp_rec_sugar -> fp_rec_sugar
+  val transfer_fp_rec_sugar: theory -> fp_rec_sugar -> fp_rec_sugar
+
   val flat_rec_arg_args: 'a list list -> 'a list
 
   val indexed: 'a list -> int -> int list * int
@@ -51,9 +62,25 @@
 fun case_fp Least_FP l _ = l
   | case_fp Greatest_FP _ g = g;
 
+type fp_rec_sugar =
+  {transfers: bool list,
+   fun_names: string list,
+   funs: term list,
+   fun_defs: thm list,
+   fpTs: typ list};
+
+fun morph_fp_rec_sugar phi {transfers, fun_names, funs, fun_defs, fpTs} =
+  {transfers = transfers,
+   fun_names = fun_names,
+   funs = map (Morphism.term phi) funs,
+   fun_defs = map (Morphism.thm phi) fun_defs,
+   fpTs = map (Morphism.typ phi) fpTs};
+
+val transfer_fp_rec_sugar = morph_fp_rec_sugar o Morphism.transfer_morphism;
+
 fun flat_rec_arg_args xss =
-  (* FIXME (once the old datatype package is phased out): The first line below gives the preferred
-     order. The second line is for compatibility with the old datatype package. *)
+  (* FIXME (once the old datatype package is completely phased out): The first line below gives the
+     preferred order. The second line is for compatibility with the old datatype package. *)
   (* flat xss *)
   map hd xss @ maps tl xss;
 
--- a/src/HOL/Tools/BNF/bnf_gfp_rec_sugar.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_gfp_rec_sugar.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -8,7 +8,11 @@
 
 signature BNF_GFP_REC_SUGAR =
 sig
-  datatype primcorec_option = Sequential_Option | Exhaustive_Option
+  datatype corec_option =
+    Plugins_Option of Proof.context -> Plugin_Name.filter |
+    Sequential_Option |
+    Exhaustive_Option |
+    Transfer_Option
 
   datatype corec_call =
     Dummy_No_Corec of int |
@@ -31,7 +35,8 @@
      corec_sels: thm list}
 
   type corec_spec =
-    {corec: term,
+    {T: typ,
+     corec: term,
      exhaust_discs: thm list,
      sel_defs: thm list,
      fp_nesting_maps: thm list,
@@ -43,10 +48,14 @@
     (term * term list list) list list -> local_theory ->
     corec_spec list * typ list * thm * thm * thm list * thm list * (Token.src list * Token.src list)
     * bool * local_theory
-  val add_primcorecursive_cmd: primcorec_option list ->
+
+  val gfp_rec_sugar_interpretation: string ->
+    (BNF_FP_Rec_Sugar_Util.fp_rec_sugar -> local_theory -> local_theory) -> theory -> theory
+
+  val add_primcorecursive_cmd: corec_option list ->
     (binding * string option * mixfix) list * ((Attrib.binding * string) * string option) list ->
     Proof.context -> Proof.state
-  val add_primcorec_cmd: primcorec_option list ->
+  val add_primcorec_cmd: corec_option list ->
     (binding * string option * mixfix) list * ((Attrib.binding * string) * string option) list ->
     local_theory -> local_theory
 end;
@@ -62,6 +71,7 @@
 open BNF_FP_Def_Sugar
 open BNF_FP_N2M_Sugar
 open BNF_FP_Rec_Sugar_Util
+open BNF_FP_Rec_Sugar_Transfer
 open BNF_GFP_Rec_Sugar_Tactics
 
 val codeN = "code";
@@ -73,7 +83,6 @@
 
 val nitpicksimp_attrs = @{attributes [nitpick_simp]};
 val simp_attrs = @{attributes [simp]};
-val code_nitpicksimp_attrs = Code.add_default_eqn_attrib :: nitpicksimp_attrs;
 
 exception PRIMCOREC of string * term list;
 
@@ -81,7 +90,11 @@
 fun primcorec_error_eqn str eqn = raise PRIMCOREC (str, [eqn]);
 fun primcorec_error_eqns str eqns = raise PRIMCOREC (str, eqns);
 
-datatype primcorec_option = Sequential_Option | Exhaustive_Option;
+datatype corec_option =
+  Plugins_Option of Proof.context -> Plugin_Name.filter |
+  Sequential_Option |
+  Exhaustive_Option |
+  Transfer_Option;
 
 datatype corec_call =
   Dummy_No_Corec of int |
@@ -109,7 +122,8 @@
    corec_sels: thm list};
 
 type corec_spec =
-  {corec: term,
+  {T: typ,
+   corec: term,
    exhaust_discs: thm list,
    sel_defs: thm list,
    fp_nesting_maps: thm list,
@@ -403,6 +417,14 @@
     (case fp_sugar_of ctxt s of SOME {fp_bnf_sugar = {map_thms, ...}, ...} => map_thms | NONE => [])
   | map_thms_of_typ _ _ = [];
 
+structure GFP_Rec_Sugar_Plugin = Plugin(type T = fp_rec_sugar);
+
+fun gfp_rec_sugar_interpretation name f =
+  GFP_Rec_Sugar_Plugin.interpretation name (fn fp_rec_sugar => fn lthy =>
+    f (transfer_fp_rec_sugar (Proof_Context.theory_of lthy) fp_rec_sugar) lthy);
+
+val interpret_gfp_rec_sugar = GFP_Rec_Sugar_Plugin.data;
+
 fun corec_specs_of bs arg_Ts res_Ts callers callssss0 lthy0 =
   let
     val thy = Proof_Context.theory_of lthy0;
@@ -501,7 +523,7 @@
     fun mk_spec ({T, fp_ctr_sugar = {ctr_sugar as {exhaust_discs, sel_defs, ...}, ...},
         fp_co_induct_sugar = {co_rec = corec, co_rec_thms = corec_thms, co_rec_discs = corec_discs,
         co_rec_selss = corec_selss, ...}, ...} : fp_sugar) p_is q_isss f_isss f_Tsss =
-      {corec = mk_co_rec thy Greatest_FP perm_Cs' (substAT T) corec, exhaust_discs = exhaust_discs,
+      {T = T, corec = mk_co_rec thy Greatest_FP perm_Cs' (substAT T) corec, exhaust_discs = exhaust_discs,
        sel_defs = sel_defs,
        fp_nesting_maps = maps (map_thms_of_typ lthy o T_of_bnf) fp_nesting_bnfs,
        fp_nesting_map_ident0s = map map_ident0_of_bnf fp_nesting_bnfs,
@@ -934,7 +956,7 @@
         val prems = maps (s_not_conj o #prems) disc_eqns;
         val ctr_rhs_opt = Option.map #ctr_rhs_opt sel_eqn_opt |> the_default NONE;
         val code_rhs_opt = Option.map #code_rhs_opt sel_eqn_opt |> the_default NONE;
-        val eqn_pos = Option.map (curry (op +) 1 o #eqn_pos) sel_eqn_opt |> the_default 100000 (* FIXME *);
+        val eqn_pos = Option.map (curry (op +) 1 o #eqn_pos) sel_eqn_opt |> the_default 100000; (* FIXME *)
 
         val extra_disc_eqn =
           {fun_name = fun_name, fun_T = fun_T, fun_args = fun_args, ctr = ctr, ctr_no = n,
@@ -974,8 +996,11 @@
 
     val actual_nn = length bs;
 
-    val sequentials = replicate actual_nn (member (op =) opts Sequential_Option);
-    val exhaustives = replicate actual_nn (member (op =) opts Exhaustive_Option);
+    val plugins = get_first (fn Plugins_Option f => SOME (f lthy) | _ => NONE) (rev opts)
+      |> the_default Plugin_Name.default_filter;
+    val sequentials = replicate actual_nn (exists (can (fn Sequential_Option => ())) opts);
+    val exhaustives = replicate actual_nn (exists (can (fn Exhaustive_Option => ())) opts);
+    val transfers = replicate actual_nn (exists (can (fn Transfer_Option => ())) opts);
 
     val fun_names = map Binding.name_of bs;
     val basic_ctr_specss = map (basic_corec_specs_of lthy) res_Ts;
@@ -990,7 +1015,7 @@
       let
         val missing = fun_names
           |> filter (map (fn Disc x => #fun_name x | Sel x => #fun_name x) eqns_data
-            |> not oo member (op =))
+            |> not oo member (op =));
       in
         null missing
           orelse primcorec_error_eqns ("missing equations for function(s): " ^ commas missing) []
@@ -1107,6 +1132,7 @@
     fun prove thmss'' def_infos lthy =
       let
         val def_thms = map (snd o snd) def_infos;
+        val ts = map fst def_infos;
 
         val (nchotomy_thmss, exclude_thmss) =
           (map2 append (take actual_nn thmss'') nchotomy_taut_thmss, drop actual_nn thmss'');
@@ -1386,6 +1412,8 @@
 
         val common_name = mk_common_name fun_names;
 
+        val code_attrs = if plugins code_plugin then [Code.add_default_eqn_attrib] else [];
+
         val anonymous_notes =
           [(flat disc_iff_or_disc_thmss, simp_attrs)]
           |> map (fn (thms, attrs) => ((Binding.empty, attrs), [(thms, [])]));
@@ -1401,7 +1429,7 @@
           [(coinductN, map (if n2m then single else K []) coinduct_thms, coinduct_attrs),
            (coinduct_strongN, map (if n2m then single else K []) coinduct_strong_thms,
             coinduct_attrs),
-           (codeN, code_thmss, code_nitpicksimp_attrs),
+           (codeN, code_thmss, code_attrs @ nitpicksimp_attrs),
            (ctrN, ctr_thmss, []),
            (discN, disc_thmss, []),
            (disc_iffN, disc_iff_thmss, []),
@@ -1421,6 +1449,16 @@
         |> Spec_Rules.add Spec_Rules.Equational (map fst def_infos, flat code_thmss)
         |> Local_Theory.notes (anonymous_notes @ common_notes @ notes)
         |> snd
+        |> (fn lthy =>
+          let
+            val phi = Local_Theory.target_morphism lthy;
+            val Ts = take actual_nn (map #T corec_specs);
+            val fp_rec_sugar =
+              {transfers = transfers, fun_names = fun_names, funs = map (Morphism.term phi) ts,
+               fun_defs = Morphism.fact phi def_thms, fpTs = Ts};
+          in
+            interpret_gfp_rec_sugar plugins fp_rec_sugar lthy
+          end)
       end;
 
     fun after_qed thmss' = fold_map Local_Theory.define defs #-> prove thmss';
@@ -1456,9 +1494,11 @@
       | _ => error "\"auto\" failed -- use \"primcorecursive\" instead of \"primcorec\"")
     goalss)) ooo add_primcorec_ursive_cmd true;
 
-val primcorec_option_parser = Parse.group (fn () => "option")
-  (Parse.reserved "sequential" >> K Sequential_Option
-  || Parse.reserved "exhaustive" >> K Exhaustive_Option)
+val corec_option_parser = Parse.group (K "option")
+  (Plugin_Name.parse_filter >> Plugins_Option
+   || Parse.reserved "sequential" >> K Sequential_Option
+   || Parse.reserved "exhaustive" >> K Exhaustive_Option
+   || Parse.reserved "transfer" >> K Transfer_Option);
 
 (* FIXME: should use "Parse_Spec.spec" instead of "Parse.prop" and honor binding *)
 val where_alt_specs_of_parser = Parse.where_ |-- Parse.!!! (Parse.enum1 "|"
@@ -1467,13 +1507,16 @@
 val _ = Outer_Syntax.local_theory_to_proof @{command_spec "primcorecursive"}
   "define primitive corecursive functions"
   ((Scan.optional (@{keyword "("} |--
-      Parse.!!! (Parse.list1 primcorec_option_parser) --| @{keyword ")"}) []) --
+      Parse.!!! (Parse.list1 corec_option_parser) --| @{keyword ")"}) []) --
     (Parse.fixes -- where_alt_specs_of_parser) >> uncurry add_primcorecursive_cmd);
 
 val _ = Outer_Syntax.local_theory @{command_spec "primcorec"}
   "define primitive corecursive functions"
-  ((Scan.optional (@{keyword "("} |--
-      Parse.!!! (Parse.list1 primcorec_option_parser) --| @{keyword ")"}) []) --
+  ((Scan.optional (@{keyword "("} |-- Parse.!!! (Parse.list1 corec_option_parser)
+      --| @{keyword ")"}) []) --
     (Parse.fixes -- where_alt_specs_of_parser) >> uncurry add_primcorec_cmd);
 
+val _ = Theory.setup (gfp_rec_sugar_interpretation Transfer_BNF.transfer_plugin
+  gfp_rec_sugar_transfer_interpretation);
+
 end;
--- a/src/HOL/Tools/BNF/bnf_gfp_rec_sugar_tactics.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_gfp_rec_sugar_tactics.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -80,9 +80,9 @@
     etac notE THEN' atac ORELSE'
     etac disjE))));
 
-val ss_fst_snd_conv = simpset_of (ss_only @{thms fst_conv snd_conv} @{context});
+fun ss_fst_snd_conv ctxt = simpset_of (ss_only @{thms fst_conv snd_conv} ctxt);
 
-fun case_atac ctxt = simp_tac (put_simpset ss_fst_snd_conv ctxt);
+fun case_atac ctxt = simp_tac (put_simpset (ss_fst_snd_conv ctxt) ctxt);
 
 fun same_case_tac ctxt m =
   HEADGOAL (if m = 0 then rtac TrueI
--- a/src/HOL/Tools/BNF/bnf_lfp_rec_sugar.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_lfp_rec_sugar.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -8,7 +8,10 @@
 
 signature BNF_LFP_REC_SUGAR =
 sig
-  datatype primrec_option = Nonexhaustive_Option
+  datatype rec_option =
+    Plugins_Option of Proof.context -> Plugin_Name.filter |
+    Nonexhaustive_Option |
+    Transfer_Option
 
   datatype rec_call =
     No_Rec of int * typ |
@@ -55,11 +58,14 @@
     * local_theory
   val rec_specs_of: binding list -> typ list -> typ list -> term list ->
     (term * term list list) list list -> local_theory ->
-    (bool * rec_spec list * typ list * thm * thm list * Token.src list) * local_theory
+    (bool * rec_spec list * typ list * thm * thm list * Token.src list * typ list) * local_theory
+
+  val lfp_rec_sugar_interpretation: string ->
+    (BNF_FP_Rec_Sugar_Util.fp_rec_sugar -> local_theory -> local_theory) -> theory -> theory
 
   val add_primrec: (binding * typ option * mixfix) list ->
     (Attrib.binding * term) list -> local_theory -> (term list * thm list list) * local_theory
-  val add_primrec_cmd: primrec_option list -> (binding * string option * mixfix) list ->
+  val add_primrec_cmd: rec_option list -> (binding * string option * mixfix) list ->
     (Attrib.binding * string) list -> local_theory -> (term list * thm list list) * local_theory
   val add_primrec_global: (binding * typ option * mixfix) list ->
     (Attrib.binding * term) list -> theory -> (term list * thm list list) * theory
@@ -83,12 +89,15 @@
 
 val nitpicksimp_attrs = @{attributes [nitpick_simp]};
 val simp_attrs = @{attributes [simp]};
-val code_nitpicksimp_simp_attrs = Code.add_default_eqn_attrib :: nitpicksimp_attrs @ simp_attrs;
+val nitpicksimp_simp_attrs = nitpicksimp_attrs @ simp_attrs;
 
 exception OLD_PRIMREC of unit;
 exception PRIMREC of string * term list;
 
-datatype primrec_option = Nonexhaustive_Option;
+datatype rec_option =
+  Plugins_Option of Proof.context -> Plugin_Name.filter |
+  Nonexhaustive_Option |
+  Transfer_Option;
 
 datatype rec_call =
   No_Rec of int * typ |
@@ -174,6 +183,14 @@
     SOME {rewrite_nested_rec_call = SOME f, ...} => f ctxt
   | _ => error "Unsupported nested recursion");
 
+structure LFP_Rec_Sugar_Plugin = Plugin(type T = fp_rec_sugar);
+
+fun lfp_rec_sugar_interpretation name f =
+  LFP_Rec_Sugar_Plugin.interpretation name (fn fp_rec_sugar => fn lthy =>
+    f (transfer_fp_rec_sugar (Proof_Context.theory_of lthy) fp_rec_sugar) lthy);
+
+val interpret_lfp_rec_sugar = LFP_Rec_Sugar_Plugin.data;
+
 fun rec_specs_of bs arg_Ts res_Ts callers callssss0 lthy0 =
   let
     val thy = Proof_Context.theory_of lthy0;
@@ -242,7 +259,7 @@
        ctr_specs = mk_ctr_specs fp_res_index ctr_offset ctrs rec_thms};
   in
     ((n2m, map2 mk_spec ctr_offsets basic_lfp_sugars, missing_arg_Ts, common_induct, inducts,
-      induct_attrs), lthy)
+      induct_attrs, map #T basic_lfp_sugars), lthy)
   end;
 
 val undef_const = Const (@{const_name undefined}, dummyT);
@@ -401,23 +418,28 @@
       |> fold_rev lambda (args @ left_args @ right_args)
     end);
 
-fun build_defs ctxt nonexhaustive bs mxs (funs_data : eqn_data list list)
+fun build_defs ctxt nonexhaustives bs mxs (funs_data : eqn_data list list)
     (rec_specs : rec_spec list) has_call =
   let
     val n_funs = length funs_data;
 
     val ctr_spec_eqn_data_list' =
-      map #ctr_specs (take n_funs rec_specs) ~~ funs_data
-      |> maps (uncurry (finds (fn (x, y) => #ctr x = #ctr y))
-        ##> (fn x => null x orelse
-          raise PRIMREC ("excess equations in definition", map #rhs_term x)) #> fst);
-    val _ = ctr_spec_eqn_data_list' |> map (fn ({ctr, ...}, x) =>
+      maps (fn ((xs, ys), z) =>
+        let
+          val zs = replicate (length xs) z
+          val (b, c) = finds (fn ((x,_), y) => #ctr x = #ctr y) (xs ~~ zs) ys
+          val (_ : bool ) = (fn x => null x orelse
+            raise PRIMREC ("excess equations in definition", map #rhs_term x)) c
+        in b end) (map #ctr_specs (take n_funs rec_specs) ~~ funs_data ~~ nonexhaustives);
+
+    val (_ : unit list) = ctr_spec_eqn_data_list' |> map (fn (({ctr, ...}, nonexhaustive), x) =>
       if length x > 1 then raise PRIMREC ("multiple equations for constructor", map #user_eqn x)
       else if length x = 1 orelse nonexhaustive then ()
       else warning ("no equation for constructor " ^ Syntax.string_of_term ctxt ctr));
 
     val ctr_spec_eqn_data_list =
-      ctr_spec_eqn_data_list' @ (drop n_funs rec_specs |> maps #ctr_specs |> map (rpair []));
+      map (apfst fst) ctr_spec_eqn_data_list' @
+        (drop n_funs rec_specs |> maps #ctr_specs |> map (rpair []));
 
     val recs = take n_funs rec_specs |> map #recx;
     val rec_args = ctr_spec_eqn_data_list
@@ -472,7 +494,7 @@
   unfold_thms_tac ctxt (nested_simps ctxt @ map_ident0s @ map_comps) THEN
   HEADGOAL (rtac refl);
 
-fun prepare_primrec nonexhaustive fixes specs lthy0 =
+fun prepare_primrec plugins nonexhaustives transfers fixes specs lthy0 =
   let
     val thy = Proof_Context.theory_of lthy0;
 
@@ -502,7 +524,7 @@
         [] => ()
       | (b, _) :: _ => raise PRIMREC ("type of " ^ Binding.print b ^ " contains top sort", []));
 
-    val ((n2m, rec_specs, _, common_induct, inducts, induct_attrs), lthy) =
+    val ((n2m, rec_specs, _, common_induct, inducts, induct_attrs, Ts), lthy) =
       rec_specs_of bs arg_Ts res_Ts frees callssss lthy0;
 
     val actual_nn = length funs_data;
@@ -513,10 +535,10 @@
         raise PRIMREC ("argument " ^ quote (Syntax.string_of_term lthy ctr) ^
           " is not a constructor in left-hand side", [user_eqn])) eqns_data;
 
-    val defs = build_defs lthy nonexhaustive bs mxs funs_data rec_specs has_call;
+    val defs = build_defs lthy nonexhaustives bs mxs funs_data rec_specs has_call;
 
-    fun prove lthy' def_thms' ({ctr_specs, fp_nesting_map_ident0s, fp_nesting_map_comps, ...}
-        : rec_spec) (fun_data : eqn_data list) =
+    fun prove def_thms' ({ctr_specs, fp_nesting_map_ident0s, fp_nesting_map_comps, ...}
+        : rec_spec) (fun_data : eqn_data list) lthy' =
       let
         val js =
           find_indices (op = o apply2 (fn {fun_name, ctr, ...} => (fun_name, ctr)))
@@ -526,15 +548,14 @@
         val simp_thms = finds (fn (x, y) => #ctr x = #ctr y) fun_data ctr_specs
           |> fst
           |> map_filter (try (fn (x, [y]) =>
-            (#fun_name x, #user_eqn x, length (#left_args x) + length (#right_args x), #rec_thm y)))
-          |> map2 (fn j => fn (fun_name, user_eqn, num_extra_args, rec_thm) =>
+            (#user_eqn x, length (#left_args x) + length (#right_args x), #rec_thm y)))
+          |> map (fn (user_eqn, num_extra_args, rec_thm) =>
               mk_primrec_tac lthy' num_extra_args fp_nesting_map_ident0s fp_nesting_map_comps
                 def_thms rec_thm
               |> K |> Goal.prove_sorry lthy' [] [] user_eqn
-              |> Thm.close_derivation)
-            js;
+              |> Thm.close_derivation);
       in
-        (js, simp_thms)
+        ((js, simp_thms), lthy')
       end;
 
     val notes =
@@ -555,19 +576,36 @@
   in
     (((fun_names, defs),
       fn lthy => fn defs =>
-        split_list (map2 (prove lthy defs) (take actual_nn rec_specs) funs_data)),
+        let
+          val def_thms = map (snd o snd) defs;
+          val ts = map fst defs;
+          val phi = Local_Theory.target_morphism lthy;
+          val fp_rec_sugar =
+            {transfers = transfers, fun_names = fun_names, funs = map (Morphism.term phi) ts,
+             fun_defs = Morphism.fact phi def_thms, fpTs = take actual_nn Ts};
+        in
+          map_prod split_list (interpret_lfp_rec_sugar plugins fp_rec_sugar)
+            (@{fold_map 2} (prove defs) (take actual_nn rec_specs) funs_data lthy)
+        end),
       lthy |> Local_Theory.notes (notes @ common_notes) |> snd)
   end;
 
-fun add_primrec_simple' opts fixes ts lthy =
+fun add_primrec_simple0 plugins nonexhaustive transfer fixes ts lthy =
   let
-    val nonexhaustive = member (op =) opts Nonexhaustive_Option;
-    val (((names, defs), prove), lthy') = prepare_primrec nonexhaustive fixes ts lthy
+    val actual_nn = length fixes;
+
+    val nonexhaustives = replicate actual_nn nonexhaustive;
+    val transfers = replicate actual_nn transfer;
+
+    val (((names, defs), prove), lthy') =
+      prepare_primrec plugins nonexhaustives transfers fixes ts lthy
       handle ERROR str => raise PRIMREC (str, []);
   in
     lthy'
     |> fold_map Local_Theory.define defs
-    |-> (fn defs => `(fn lthy => (names, (map fst defs, prove lthy defs))))
+    |-> (fn defs => fn lthy =>
+      let val (thms, lthy) = prove lthy defs;
+      in ((names, (map fst defs, thms)), lthy) end)
   end
   handle PRIMREC (str, eqns) =>
          if null eqns then
@@ -577,7 +615,7 @@
              space_implode "\n  " (map (quote o Syntax.string_of_term lthy) eqns));
 
 fun add_primrec_simple fixes ts lthy =
-  add_primrec_simple' [] fixes ts lthy
+  add_primrec_simple0 Plugin_Name.default_filter false false fixes ts lthy
   handle OLD_PRIMREC () =>
     Old_Primrec.add_primrec_simple fixes ts lthy
     |>> apsnd (apsnd (pair [] o single)) o apfst single;
@@ -585,9 +623,16 @@
 fun gen_primrec old_primrec prep_spec opts
     (raw_fixes : (binding * 'a option * mixfix) list) raw_specs lthy =
   let
-    val d = duplicates (op =) (map (Binding.name_of o #1) raw_fixes)
+    val d = duplicates (op =) (map (Binding.name_of o #1) raw_fixes);
     val _ = null d orelse raise PRIMREC ("duplicate function name(s): " ^ commas d, []);
 
+    val plugins = get_first (fn Plugins_Option f => SOME (f lthy) | _ => NONE) (rev opts)
+      |> the_default Plugin_Name.default_filter;
+    val nonexhaustive = exists (can (fn Nonexhaustive_Option => ())) opts;
+    val transfer = exists (can (fn Transfer_Option => ())) opts;
+
+    val code_attrs = if plugins code_plugin then [Code.add_default_eqn_attrib] else [];
+
     val (fixes, specs) = fst (prep_spec raw_fixes raw_specs lthy);
 
     val mk_notes =
@@ -596,7 +641,7 @@
           val (bs, attrss) = map_split (fst o nth specs) js;
           val notes =
             @{map 3} (fn b => fn attrs => fn thm =>
-                ((Binding.qualify false prefix b, code_nitpicksimp_simp_attrs @ attrs),
+                ((Binding.qualify false prefix b, code_attrs @ nitpicksimp_simp_attrs @ attrs),
                  [([thm], [])]))
               bs attrss thms;
         in
@@ -604,7 +649,7 @@
         end);
   in
     lthy
-    |> add_primrec_simple' opts fixes (map snd specs)
+    |> add_primrec_simple0 plugins nonexhaustive transfer fixes (map snd specs)
     |-> (fn (names, (ts, (jss, simpss))) =>
       Spec_Rules.add Spec_Rules.Equational (ts, flat simpss)
       #> Local_Theory.notes (mk_notes jss names simpss)
@@ -625,13 +670,15 @@
   #> add_primrec fixes specs
   ##> Local_Theory.exit_global;
 
-val primrec_option_parser = Parse.group (fn () => "option")
-  (Parse.reserved "nonexhaustive" >> K Nonexhaustive_Option)
+val rec_option_parser = Parse.group (K "option")
+  (Plugin_Name.parse_filter >> Plugins_Option
+   || Parse.reserved "nonexhaustive" >> K Nonexhaustive_Option
+   || Parse.reserved "transfer" >> K Transfer_Option);
 
 val _ = Outer_Syntax.local_theory @{command_spec "primrec"}
   "define primitive recursive functions"
-  ((Scan.optional (@{keyword "("} |--
-      Parse.!!! (Parse.list1 primrec_option_parser) --| @{keyword ")"}) []) --
+  ((Scan.optional (@{keyword "("} |-- Parse.!!! (Parse.list1 rec_option_parser)
+      --| @{keyword ")"}) []) --
     (Parse.fixes -- Parse_Spec.where_alt_specs)
     >> (fn (opts, (fixes, specs)) => snd o add_primrec_cmd opts fixes specs));
 
--- a/src/HOL/Tools/BNF/bnf_util.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/BNF/bnf_util.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -66,8 +66,6 @@
   val mk_nthN: int -> term -> int -> term
 
   (*parameterized thms*)
-  val eqTrueI: thm
-  val eqFalseI: thm
   val prod_injectD: thm
   val prod_injectI: thm
   val ctrans: thm
@@ -345,8 +343,6 @@
 fun mk_sym thm = thm RS sym;
 
 (*TODO: antiquote heavily used theorems once*)
-val eqTrueI = @{thm iffD2[OF eq_True]};
-val eqFalseI =  @{thm iffD2[OF eq_False]};
 val prod_injectD = @{thm iffD1[OF prod.inject]};
 val prod_injectI = @{thm iffD2[OF prod.inject]};
 val ctrans = @{thm ordLeq_transitive};
--- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -1,5 +1,6 @@
 (*  Title:      HOL/Tools/Ctr_Sugar/ctr_sugar.ML
     Author:     Jasmin Blanchette, TU Muenchen
+    Author:     Martin Desharnais, TU Muenchen
     Copyright   2012, 2013
 
 Wrapping existing freely generated type's constructors.
@@ -23,11 +24,13 @@
      case_thms: thm list,
      case_cong: thm,
      case_cong_weak: thm,
+     case_distribs: thm list,
      split: thm,
      split_asm: thm,
      disc_defs: thm list,
      disc_thmss: thm list list,
      discIs: thm list,
+     disc_eq_cases: thm list,
      sel_defs: thm list,
      sel_thmss: thm list list,
      distinct_discsss: thm list list list,
@@ -72,6 +75,8 @@
   val ctr_of_ctr_spec: ('c, 'a) ctr_spec -> 'c
   val args_of_ctr_spec: ('c, 'a) ctr_spec -> 'a list
 
+  val code_plugin: string
+
   type ctr_options = (string -> bool) * bool
   type ctr_options_cmd = (Proof.context -> string -> bool) * bool
 
@@ -111,11 +116,13 @@
    case_thms: thm list,
    case_cong: thm,
    case_cong_weak: thm,
+   case_distribs: thm list,
    split: thm,
    split_asm: thm,
    disc_defs: thm list,
    disc_thmss: thm list list,
    discIs: thm list,
+   disc_eq_cases: thm list,
    sel_defs: thm list,
    sel_thmss: thm list list,
    distinct_discsss: thm list list list,
@@ -128,9 +135,9 @@
    case_eq_ifs: thm list};
 
 fun morph_ctr_sugar phi ({kind, T, ctrs, casex, discs, selss, exhaust, nchotomy, injects, distincts,
-    case_thms, case_cong, case_cong_weak, split, split_asm, disc_defs, disc_thmss, discIs, sel_defs,
-    sel_thmss, distinct_discsss, exhaust_discs, exhaust_sels, collapses, expands, split_sels,
-    split_sel_asms, case_eq_ifs} : ctr_sugar) =
+    case_thms, case_cong, case_cong_weak, case_distribs, split, split_asm, disc_defs, disc_thmss,
+    discIs, disc_eq_cases, sel_defs, sel_thmss, distinct_discsss, exhaust_discs, exhaust_sels,
+    collapses, expands, split_sels, split_sel_asms, case_eq_ifs} : ctr_sugar) =
   {kind = kind,
    T = Morphism.typ phi T,
    ctrs = map (Morphism.term phi) ctrs,
@@ -144,11 +151,13 @@
    case_thms = map (Morphism.thm phi) case_thms,
    case_cong = Morphism.thm phi case_cong,
    case_cong_weak = Morphism.thm phi case_cong_weak,
+   case_distribs = map (Morphism.thm phi) case_distribs,
    split = Morphism.thm phi split,
    split_asm = Morphism.thm phi split_asm,
    disc_defs = map (Morphism.thm phi) disc_defs,
    disc_thmss = map (map (Morphism.thm phi)) disc_thmss,
    discIs = map (Morphism.thm phi) discIs,
+   disc_eq_cases = map (Morphism.thm phi) disc_eq_cases,
    sel_defs = map (Morphism.thm phi) sel_defs,
    sel_thmss = map (map (Morphism.thm phi)) sel_thmss,
    distinct_discsss = map (map (map (Morphism.thm phi))) distinct_discsss,
@@ -227,6 +236,7 @@
 val case_eq_ifN = "case_eq_if";
 val collapseN = "collapse";
 val discN = "disc";
+val disc_eq_caseN = "disc_eq_case";
 val discIN = "discI";
 val distinctN = "distinct";
 val distinct_discN = "distinct_disc";
@@ -244,6 +254,7 @@
 val splitsN = "splits";
 val split_selsN = "split_sels";
 val case_cong_weak_thmsN = "case_cong_weak";
+val case_distribN = "case_distrib";
 
 val cong_attrs = @{attributes [cong]};
 val dest_attrs = @{attributes [dest]};
@@ -369,12 +380,10 @@
 val code_plugin = Plugin_Name.declare_setup @{binding code};
 
 fun prepare_free_constructors kind prep_plugins prep_term
-    ((((raw_plugins, discs_sels), raw_case_binding), ctr_specs),
-      sel_default_eqs) no_defs_lthy =
+    ((((raw_plugins, discs_sels), raw_case_binding), ctr_specs), sel_default_eqs) no_defs_lthy =
   let
     val plugins = prep_plugins no_defs_lthy raw_plugins;
 
-
     (* TODO: sanity checks on arguments *)
 
     val raw_ctrs = map ctr_of_ctr_spec ctr_specs;
@@ -384,7 +393,7 @@
     val n = length raw_ctrs;
     val ks = 1 upto n;
 
-    val _ = if n > 0 then () else error "No constructors specified";
+    val _ = n > 0 orelse error "No constructors specified";
 
     val ctrs0 = map (prep_term no_defs_lthy) raw_ctrs;
 
@@ -394,10 +403,10 @@
 
     fun qualify mandatory = Binding.qualify mandatory fc_b_name;
 
-    val (unsorted_As, B) =
+    val (unsorted_As, [B, C]) =
       no_defs_lthy
       |> variant_tfrees (map (fst o dest_TFree_or_TVar) As0)
-      ||> the_single o fst o mk_TFrees 1;
+      ||> fst o mk_TFrees 2;
 
     val As = map2 (resort_tfree_or_tvar o snd o dest_TFree_or_TVar) As0 unsorted_As;
 
@@ -445,13 +454,15 @@
 
     val case_Ts = map (fn Ts => Ts ---> B) ctr_Tss;
 
-    val (((((((([exh_y], (xss, xss')), yss), fs), gs), [u', v']), [w]), (p, p')), names_lthy) =
+    val ((((((((([exh_y], (xss, xss')), yss), fs), gs), [h]), [u', v']), [w]), (p, p')),
+        names_lthy) =
       no_defs_lthy
       |> mk_Frees "y" [fcT] (* for compatibility with "datatype_realizer.ML" *)
       ||>> mk_Freess' "x" ctr_Tss
       ||>> mk_Freess "y" ctr_Tss
       ||>> mk_Frees "f" case_Ts
       ||>> mk_Frees "g" case_Ts
+      ||>> mk_Frees "h" [B --> C]
       ||>> (apfst (map (rpair fcT)) oo Variable.variant_fixes) [fc_b_name, fc_b_name ^ "'"]
       ||>> mk_Frees "z" [B]
       ||>> yield_singleton (apfst (op ~~) oo mk_Frees' "P") HOLogic.boolT;
@@ -497,6 +508,8 @@
 
     val case0 = Morphism.term phi raw_case;
     val casex = mk_case As B case0;
+    val casexC = mk_case As C case0;
+    val casexBool = mk_case As HOLogic.boolT case0;
 
     val fcase = Term.list_comb (casex, fs);
 
@@ -662,7 +675,7 @@
 
     val goalss = [exhaust_goal] :: inject_goalss @ half_distinct_goalss;
 
-    fun after_qed ([exhaust_thm] :: thmss) lthy =
+    fun after_qed (thmss0 as [exhaust_thm] :: thmss) lthy =
       let
         val ((inject_thms, inject_thmss), half_distinct_thmss) = chop n thmss |>> `flat;
 
@@ -762,9 +775,9 @@
         val (sel_defs, all_sel_thms, sel_thmss, disc_defs, disc_thmss, nontriv_disc_thmss,
              discI_thms, nontriv_discI_thms, distinct_disc_thms, distinct_disc_thmsss,
              exhaust_disc_thms, exhaust_sel_thms, all_collapse_thms, safe_collapse_thms,
-             expand_thms, split_sel_thms, split_sel_asm_thms, case_eq_if_thms) =
+             expand_thms, split_sel_thms, split_sel_asm_thms, case_eq_if_thms, disc_eq_case_thms) =
           if no_discs_sels then
-            ([], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [])
+            ([], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [])
           else
             let
               val udiscs = map (rapp u) discs;
@@ -974,13 +987,44 @@
                   |> singleton (Proof_Context.export names_lthy lthy)
                   |> Thm.close_derivation
                 end;
+
+              val disc_eq_case_thms =
+                let
+                  fun const_of_bool b = if b then @{const True} else @{const False};
+                  fun mk_case_args n = map_index (fn (k, argTs) =>
+                    fold_rev Term.absdummy argTs (const_of_bool (n = k))) ctr_Tss;
+                  val goals = map_index (fn (n, udisc) =>
+                    mk_Trueprop_eq (udisc, list_comb (casexBool, mk_case_args n) $ u)) udiscs;
+                in
+                  Goal.prove_sorry lthy [] [] (Logic.mk_conjunction_balanced goals)
+                    (fn {context = ctxt, ...} => mk_disc_eq_case_tac ctxt (certify ctxt u)
+                       exhaust_thm (flat nontriv_disc_thmss) distinct_thms case_thms)
+                  |> Conjunction.elim_balanced (length goals)
+                  |> Proof_Context.export names_lthy lthy
+                  |> map Thm.close_derivation
+                end;
             in
               (sel_defs, all_sel_thms, sel_thmss, disc_defs, disc_thmss, nontriv_disc_thmss,
                discI_thms, nontriv_discI_thms, distinct_disc_thms, distinct_disc_thmsss,
                [exhaust_disc_thm], [exhaust_sel_thm], all_collapse_thms, safe_collapse_thms,
-               [expand_thm], [split_sel_thm], [split_sel_asm_thm], [case_eq_if_thm])
+               [expand_thm], [split_sel_thm], [split_sel_asm_thm], [case_eq_if_thm],
+               disc_eq_case_thms)
             end;
 
+        val case_distrib_thm =
+          let
+            val args = @{map 2} (fn f => fn argTs =>
+              let val (args, _) = mk_Frees "x" argTs lthy in
+                fold_rev Term.lambda args (h $ list_comb (f, args))
+              end) fs ctr_Tss;
+            val goal = mk_Trueprop_eq (h $ ufcase, list_comb (casexC, args) $ u);
+          in
+            Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, ...} =>
+              mk_case_distrib_tac ctxt (certify ctxt u) exhaust_thm case_thms)
+            |> singleton (Proof_Context.export names_lthy lthy)
+            |> Thm.close_derivation
+          end;
+
         val exhaust_case_names_attr = Attrib.internal (K (Rule_Cases.case_names exhaust_cases));
         val cases_type_attr = Attrib.internal (K (Induct.cases_type fcT_name));
 
@@ -999,9 +1043,11 @@
           [(caseN, case_thms, code_attrs @ nitpicksimp_attrs @ simp_attrs),
            (case_congN, [case_cong_thm], []),
            (case_cong_weak_thmsN, [case_cong_weak_thm], cong_attrs),
+           (case_distribN, [case_distrib_thm], []),
            (case_eq_ifN, case_eq_if_thms, []),
            (collapseN, safe_collapse_thms, if ms = [0] then [] else simp_attrs),
            (discN, flat nontriv_disc_thmss, simp_attrs),
+           (disc_eq_caseN, disc_eq_case_thms, []),
            (discIN, nontriv_discI_thms, []),
            (distinctN, distinct_thms, simp_attrs @ inductsimp_attrs),
            (distinct_discN, distinct_disc_thms, dest_attrs),
@@ -1048,9 +1094,10 @@
           {kind = kind, T = fcT, ctrs = ctrs, casex = casex, discs = discs, selss = selss,
            exhaust = exhaust_thm, nchotomy = nchotomy_thm, injects = inject_thms,
            distincts = distinct_thms, case_thms = case_thms, case_cong = case_cong_thm,
-           case_cong_weak = case_cong_weak_thm, split = split_thm, split_asm = split_asm_thm,
-           disc_defs = disc_defs, disc_thmss = disc_thmss, discIs = discI_thms, sel_defs = sel_defs,
-           sel_thmss = sel_thmss, distinct_discsss = distinct_disc_thmsss,
+           case_cong_weak = case_cong_weak_thm, case_distribs = [case_distrib_thm],
+           split = split_thm, split_asm = split_asm_thm, disc_defs = disc_defs,
+           disc_thmss = disc_thmss, discIs = discI_thms, disc_eq_cases = disc_eq_case_thms,
+           sel_defs = sel_defs, sel_thmss = sel_thmss, distinct_discsss = distinct_disc_thmsss,
            exhaust_discs = exhaust_disc_thms, exhaust_sels = exhaust_sel_thms,
            collapses = all_collapse_thms, expands = expand_thms, split_sels = split_sel_thms,
            split_sel_asms = split_sel_asm_thms, case_eq_ifs = case_eq_if_thms}
@@ -1080,8 +1127,8 @@
 
 val parse_ctr_options =
   Scan.optional (@{keyword "("} |-- Parse.list1
-        (Plugin_Name.parse_filter >> (apfst o K) ||
-         Parse.reserved "discs_sels" >> (apsnd o K o K true)) --|
+        (Plugin_Name.parse_filter >> (apfst o K)
+         || Parse.reserved "discs_sels" >> (apsnd o K o K true)) --|
       @{keyword ")"}
       >> (fn fs => fold I fs default_ctr_options_cmd))
     default_ctr_options_cmd;
--- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar_tactics.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar_tactics.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -19,9 +19,12 @@
   val mk_alternate_disc_def_tac: Proof.context -> int -> thm -> thm -> thm -> tactic
   val mk_case_tac: Proof.context -> int -> int -> thm -> thm list -> thm list list -> tactic
   val mk_case_cong_tac: Proof.context -> thm -> thm list -> tactic
+  val mk_case_distrib_tac: Proof.context -> cterm -> thm -> thm list -> tactic
   val mk_case_eq_if_tac: Proof.context -> int -> thm -> thm list -> thm list list ->
     thm list list -> tactic
   val mk_collapse_tac: Proof.context -> int -> thm -> thm list -> tactic
+  val mk_disc_eq_case_tac: Proof.context -> cterm -> thm -> thm list -> thm list -> thm list ->
+    tactic
   val mk_exhaust_disc_tac: int -> thm -> thm list -> tactic
   val mk_exhaust_sel_tac: int -> thm -> thm list -> tactic
   val mk_expand_tac: Proof.context -> int -> int list -> thm -> thm -> thm list ->
@@ -92,6 +95,14 @@
        REPEAT_DETERM_N m o etac exE THEN' hyp_subst_tac ctxt THEN'
        SELECT_GOAL (unfold_thms_tac ctxt sels) THEN' rtac refl));
 
+fun mk_disc_eq_case_tac ctxt ct exhaust discs distincts cases =
+  HEADGOAL Goal.conjunction_tac THEN
+  ALLGOALS (rtac (cterm_instantiate_pos [SOME ct] exhaust) THEN_ALL_NEW
+    (hyp_subst_tac ctxt THEN'
+     SELECT_GOAL (unfold_thms_tac ctxt (@{thms not_True_eq_False not_False_eq_True} @ cases @
+       ((refl :: discs @ distincts) RL [eqTrueI, eqFalseI]))) THEN'
+     resolve_tac @{thms TrueI True_not_False False_not_True}));
+
 fun mk_expand_tac ctxt n ms uexhaust_disc vexhaust_disc uncollapses distinct_discsss
     distinct_discsss' =
   if ms = [0] then
@@ -144,6 +155,10 @@
          else mk_case_distinct_ctrs_tac ctxt distincts)) ks distinctss))
   end;
 
+fun mk_case_distrib_tac ctxt ct exhaust cases =
+  HEADGOAL (rtac (cterm_instantiate_pos [SOME ct] exhaust)) THEN
+  ALLGOALS (hyp_subst_tac ctxt THEN' SELECT_GOAL (unfold_thms_tac ctxt cases) THEN' rtac refl);
+
 fun mk_case_cong_tac ctxt uexhaust cases =
   HEADGOAL (rtac uexhaust THEN'
     EVERY' (maps (fn casex => [dtac sym, asm_simp_tac (ss_only [casex] ctxt)]) cases));
--- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar_util.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar_util.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -74,6 +74,10 @@
 
   val ss_only: thm list -> Proof.context -> Proof.context
 
+  (*parameterized thms*)
+  val eqTrueI: thm
+  val eqFalseI: thm
+
   val WRAP: ('a -> tactic) -> ('a -> tactic) -> 'a list -> tactic -> tactic
   val WRAP': ('a -> int -> tactic) -> ('a -> int -> tactic) -> 'a list -> (int -> tactic) -> int ->
     tactic
@@ -251,6 +255,9 @@
 
 fun ss_only thms ctxt = clear_simpset (put_simpset HOL_basic_ss ctxt) addsimps thms;
 
+val eqTrueI = @{thm iffD2[OF eq_True]};
+val eqFalseI =  @{thm iffD2[OF eq_False]};
+
 (*Tactical WRAP surrounds a static given tactic (core) with two deterministic chains of tactics*)
 fun WRAP gen_before gen_after xs core_tac =
   fold_rev (fn x => fn tac => gen_before x THEN tac THEN gen_after x) xs core_tac;
--- a/src/HOL/Tools/Old_Datatype/old_datatype_data.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/Old_Datatype/old_datatype_data.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -110,11 +110,13 @@
      case_thms = case_rewrites,
      case_cong = case_cong,
      case_cong_weak = case_cong_weak,
+     case_distribs = [],
      split = split,
      split_asm = split_asm,
      disc_defs = [],
      disc_thmss = [],
      discIs = [],
+     disc_eq_cases = [],
      sel_defs = [],
      sel_thmss = [],
      distinct_discsss = [],
--- a/src/HOL/Tools/Transfer/transfer_bnf.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/Transfer/transfer_bnf.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -6,6 +6,7 @@
 
 signature TRANSFER_BNF =
 sig
+  val transfer_plugin: string
   val base_name_of_bnf: BNF_Def.bnf -> binding
   val type_name_of_bnf: BNF_Def.bnf -> string
   val lookup_defined_pred_data: Proof.context -> string -> Transfer.pred_data
@@ -20,6 +21,8 @@
 open BNF_FP_Util
 open BNF_FP_Def_Sugar
 
+val transfer_plugin = Plugin_Name.declare_setup @{binding transfer}
+
 (* util functions *)
 
 fun base_name_of_bnf bnf = Binding.name (Binding.name_of (name_of_bnf bnf))
@@ -293,8 +296,6 @@
 
 (* BNF interpretation *)
 
-val transfer_plugin = Plugin_Name.declare_setup @{binding transfer}
-
 fun transfer_bnf_interpretation bnf lthy =
   let
     val dead = dead_of_bnf bnf
--- a/src/HOL/Tools/record.ML	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Tools/record.ML	Mon Jan 05 23:33:39 2015 +0100
@@ -1786,10 +1786,10 @@
     {kind = Ctr_Sugar.Record, T = body_type (fastype_of ctr), ctrs = [ctr], casex = Term.dummy,
      discs = [], selss = [], exhaust = exhaust, nchotomy = Drule.dummy_thm, injects = [inject],
      distincts = [], case_thms = [], case_cong = Drule.dummy_thm, case_cong_weak = Drule.dummy_thm,
-     split = Drule.dummy_thm, split_asm = Drule.dummy_thm, disc_defs = [], disc_thmss = [],
-     discIs = [], sel_defs = [], sel_thmss = [sel_thms], distinct_discsss = [], exhaust_discs = [],
-     exhaust_sels = [], collapses = [], expands = [], split_sels = [], split_sel_asms = [],
-     case_eq_ifs = []};
+     case_distribs = [], split = Drule.dummy_thm, split_asm = Drule.dummy_thm, disc_defs = [],
+     disc_thmss = [], discIs = [], disc_eq_cases = [], sel_defs = [], sel_thmss = [sel_thms],
+     distinct_discsss = [], exhaust_discs = [], exhaust_sels = [], collapses = [], expands = [],
+     split_sels = [], split_sel_asms = [], case_eq_ifs = []};
 
 
 (* definition *)
--- a/src/HOL/Transfer.thy	Mon Jan 05 23:29:38 2015 +0100
+++ b/src/HOL/Transfer.thy	Mon Jan 05 23:33:39 2015 +0100
@@ -6,7 +6,7 @@
 section {* Generic theorem transfer using relations *}
 
 theory Transfer
-imports Hilbert_Choice Metis Basic_BNF_LFPs
+imports Basic_BNF_LFPs Hilbert_Choice Metis
 begin
 
 subsection {* Relator for function space *}
@@ -361,7 +361,15 @@
 
 end
 
+lemma if_conn:
+  "(if P \<and> Q then t else e) = (if P then if Q then t else e else e)"
+  "(if P \<or> Q then t else e) = (if P then t else if Q then t else e)"
+  "(if P \<longrightarrow> Q then t else e) = (if P then if Q then t else e else t)"
+  "(if \<not> P then t else e) = (if P then e else t)"
+by auto
+
 ML_file "Tools/Transfer/transfer_bnf.ML"
+ML_file "Tools/BNF/bnf_fp_rec_sugar_transfer.ML"
 
 declare pred_fun_def [simp]
 declare rel_fun_eq [relator_eq]