merged
authorwenzelm
Tue, 29 Mar 2016 23:45:28 +0200
changeset 62755 7fde2461f9ef
parent 62747 f65ef4723aca (diff)
parent 62754 c35012b86e6f (current diff)
child 62756 d4b7d128ec5a
child 62757 e5828ed9a576
merged
--- a/doc/Contents	Tue Mar 29 23:41:28 2016 +0200
+++ b/doc/Contents	Tue Mar 29 23:45:28 2016 +0200
@@ -4,6 +4,7 @@
   classes         Tutorial on Type Classes
   datatypes       Tutorial on (Co)datatype Definitions
   functions       Tutorial on Function Definitions
+  corec           Tutorial on Nonprimitively Corecursive Definitions
   codegen         Tutorial on Code Generation
   nitpick         User's Guide to Nitpick
   sledgehammer    User's Guide to Sledgehammer
@@ -22,4 +23,3 @@
   intro           Old Introduction to Isabelle
   logics          Isabelle's Logics: HOL and misc logics
   logics-ZF       Isabelle's Logics: FOL and ZF
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Doc/Corec/Corec.thy	Tue Mar 29 23:45:28 2016 +0200
@@ -0,0 +1,703 @@
+(*  Title:      Doc/Corec/Corec.thy
+    Author:     Jasmin Blanchette, Inria, LORIA, MPII
+    Author:     Aymeric Bouzy, Ecole polytechnique
+    Author:     Andreas Lochbihler, ETH Zuerich
+    Author:     Andrei Popescu, Middlesex University
+    Author:     Dmitriy Traytel, ETH Zuerich
+
+Tutorial for nonprimitively corecursive definitions.
+*)
+
+theory Corec
+imports
+  GCD
+  "../Datatypes/Setup"
+  "~~/src/HOL/Library/BNF_Corec"
+  "~~/src/HOL/Library/FSet"
+begin
+
+section \<open>Introduction
+  \label{sec:introduction}\<close>
+
+text \<open>
+...
+@{cite "isabelle-datatypes"}
+
+* friend
+* up to
+
+* versioning
+
+BNF
+
+link to papers
+
+* plugins (code)
+\<close>
+
+
+section \<open>Introductory Examples
+  \label{sec:introductory-examples}\<close>
+
+text \<open>
+\<close>
+
+
+subsection \<open>Simple Corecursion
+  \label{ssec:simple-corecursion}\<close>
+
+text \<open>
+The case studies by Rutten~@{cite rutten05} and Hinze~@{cite hinze10} on stream
+calculi serve as our starting point. Streams can be defined as follows
+(cf. @{file "~~/src/HOL/Library/Stream.thy"}):
+\<close>
+
+    codatatype 'a stream =
+      SCons (shd: 'a) (stl: "'a stream")
+
+text \<open>
+The @{command corec} command makes it possible to define functions where the
+corecursive call occurs under two or more constructors:
+\<close>
+
+    corec oneTwos :: "nat stream" where
+      "oneTwos = SCons 1 (SCons 2 oneTwos)"
+
+thm oneTwos.cong_intros
+
+text \<open>
+\noindent
+This is already beyond the syntactic fragment supported by \keyw{primcorec}.
+
+The following definition of pointwise sum can be performed with either
+\keyw{primcorec} or @{command corec}:
+\<close>
+
+    primcorec ssum :: "('a :: plus) stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream" where
+      "ssum xs ys = SCons (shd xs + shd ys) (ssum (stl xs) (stl ys))"
+
+text \<open>
+\noindent
+Pointwise sum meets the friendliness criterion. We register it as a friend using
+the @{command friend_of_corec} command. The command requires us to give a
+specification of @{const ssum} where a constructor (@{const SCons}) occurs at
+the outermost position on the right-hand side. Here, we can simply reuse the
+\keyw{primcorec} specification above:
+\<close>
+
+    friend_of_corec ssum :: "('a :: plus) stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream" where
+      "ssum xs ys = SCons (shd xs + shd ys) (ssum (stl xs) (stl ys))"
+       apply (rule ssum.code)
+      by transfer_prover
+
+text \<open>
+\noindent
+The command emits two subgoals. The first one corresponds to the equation we
+specified and is trivial to discharge. The second one is a parametricity subgoal
+and can usually be discharged using the @{text transfer_prover} proof method.
+
+After registering @{const ssum} as a friend, we can use it in the corecursive
+call context, either inside or outside the constructor guard:
+\<close>
+
+    corec fibA :: "nat stream" where
+      "fibA = SCons 0 (ssum (SCons 1 fibA) fibA)"
+
+text \<open>\blankline\<close>
+
+    corec fibB :: "nat stream" where
+      "fibB = ssum (SCons 0 (SCons 1 fibB)) (SCons 0 fibB)"
+
+text \<open>
+Using the @{text "friend"} option, we can simultaneously define a function and
+register it as a friend:
+\<close>
+
+    corec (friend)
+      sprod :: "('a :: {plus,times}) stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream"
+    where
+      "sprod xs ys =
+       SCons (shd xs * shd ys) (ssum (sprod xs (stl ys)) (sprod (stl xs) ys))"
+
+text \<open>\blankline\<close>
+
+    corec (friend) sexp :: "nat stream \<Rightarrow> nat stream" where
+      "sexp xs = SCons (2 ^^ shd xs) (sprod (stl xs) (sexp xs))"
+
+text \<open>
+\noindent
+The parametricity subgoal is given to @{text transfer_prover}.
+
+The @{const sprod} and @{const sexp} functions provide shuffle product and
+exponentiation on streams. We can use them to define the stream of factorial
+numbers in two different ways:
+\<close>
+
+    corec factA :: "nat stream" where
+      "factA = (let zs = SCons 1 factA in sprod zs zs)"
+
+    corec factB :: "nat stream" where
+      "factB = sexp (SCons 0 factB)"
+
+text \<open>
+The arguments of friendly operations can be of complex types involving the
+target codatatype. The following example defines the supremum of a finite set of
+streams by primitive corecursion and registers it as friendly:
+\<close>
+
+    corec (friend) sfsup :: "nat stream fset \<Rightarrow> nat stream" where
+      "sfsup X = SCons (Sup (fset (fimage shd X))) (sfsup (fimage stl X))"
+
+text \<open>
+\noindent
+In general, the arguments may be any BNF, with the restriction that the target
+codatatype (@{typ "nat stream"}) may occur only in a live position of the BNF.
+For this reason, the following operation, on unbounded sets, cannot be
+registered as a friend:
+\<close>
+
+    corec ssup :: "nat stream set \<Rightarrow> nat stream" where
+      "ssup X = SCons (Sup (image shd X)) (ssup (image stl X))"
+
+
+subsection \<open>Nested Corecursion
+  \label{ssec:nested-corecursion}\<close>
+
+text \<open>
+The package generally supports arbitrary codatatypes with multiple constructors
+and nesting through other type constructors (BNFs). Consider the following type
+of finitely branching Rose trees of potentially infinite depth:
+\<close>
+
+    codatatype 'a tree =
+      Node (lab: 'a) (sub: "'a tree list")
+
+text \<open>
+We first define the pointwise sum of two trees analogously to @{const ssum}:
+\<close>
+
+    corec (friend) tplus :: "('a :: plus) tree \<Rightarrow> 'a tree \<Rightarrow> 'a tree" where
+      "tplus t u =
+       Node (lab t + lab u) (map (\<lambda>(t', u'). tplus t' u') (zip (sub t) (sub u)))"
+
+text \<open>
+\noindent
+Here, @{const map} is the standard map function on lists, and @{const zip}
+converts two parallel lists into a list of pairs. The @{const tplus} function is
+primitively corecursive. Instead of @{text "corec (friend)"}, we could also have
+used \keyw{primcorec} and @{command friend_of_corec}, as we did for
+@{const ssum}.
+
+Once @{const tplus} is registered as friendly, we can use it in the corecursive
+call context:
+\<close>
+
+    corec (friend) ttimes :: "('a :: {plus,times}) tree \<Rightarrow> 'a tree \<Rightarrow> 'a tree" where
+      "ttimes t u = Node (lab t * lab u)
+         (map (\<lambda>(t', u'). tplus (ttimes t u') (ttimes t' u)) (zip (sub t) (sub u)))"
+
+text \<open>
+All the syntactic convenience provided by \keyw{primcorec} is also supported by
+@{command corec}, @{command corecursive}, and @{command friend_of_corec}. In
+particular, nesting through the function type can be expressed using
+@{text \<lambda>}-abstractions and function applications rather than through composition
+(@{term "op \<circ>"}, the map function for @{text \<Rightarrow>}). For example:
+\<close>
+
+    codatatype 'a language =
+      Lang (\<oo>: bool) (\<dd>: "'a \<Rightarrow> 'a language")
+
+text \<open>\blankline\<close>
+
+    corec (friend) Plus :: "'a language \<Rightarrow> 'a language \<Rightarrow> 'a language" where
+      "Plus r s = Lang (\<oo> r \<or> \<oo> s) (\<lambda>a. Plus (\<dd> r a) (\<dd> s a))"
+
+text \<open>\blankline\<close>
+
+    corec (friend) Times :: "'a language \<Rightarrow> 'a language \<Rightarrow> 'a language" where
+      "Times r s = Lang (\<oo> r \<and> \<oo> s)
+         (\<lambda>a. if \<oo> r then Plus (Times (\<dd> r a) s) (\<dd> s a) else Times (\<dd> r a) s)"
+
+text \<open>\blankline\<close>
+
+    corec (friend) Star :: "'a language \<Rightarrow> 'a language" where
+      "Star r = Lang True (\<lambda>a. Times (\<dd> r a) (Star r))"
+
+text \<open>\blankline\<close>
+
+    corec (friend) Inter :: "'a language \<Rightarrow> 'a language \<Rightarrow> 'a language" where
+      "Inter r s = Lang (\<oo> r \<and> \<oo> s) (\<lambda>a. Inter (\<dd> r a) (\<dd> s a))"
+
+text \<open>\blankline\<close>
+
+    corec (friend) PLUS :: "'a language list \<Rightarrow> 'a language" where
+      "PLUS xs = Lang (\<exists>x \<in> set xs. \<oo> x) (\<lambda>a. PLUS (map (\<lambda>r. \<dd> r a) xs))"
+
+
+subsection \<open>Mixed Recursion--Corecursion
+  \label{ssec:mixed-recursion-corecursion}\<close>
+
+text \<open>
+It is often convenient to let a corecursive function perform some finite
+computation before producing a constructor. With mixed recursion--corecursion, a
+finite number of unguarded recursive calls perform this calculation before
+reaching a guarded corecursive call.
+
+Intuitively, the unguarded recursive call can be unfolded to arbitrary finite
+depth, ultimately yielding a purely corecursive definition. An example is the
+@{term primes} function from Di Gianantonio and Miculan
+@{cite "di-gianantonio-miculan-2003"}:
+\<close>
+
+    corecursive primes :: "nat \<Rightarrow> nat \<Rightarrow> nat stream" where
+      "primes m n =
+       (if (m = 0 \<and> n > 1) \<or> coprime m n then
+          SCons n (primes (m * n) (n + 1))
+        else
+          primes m (n + 1))"
+      apply (relation "measure (\<lambda>(m, n).
+        if n = 0 then 1 else if coprime m n then 0 else m - n mod m)")
+       apply (auto simp: mod_Suc intro: Suc_lessI)
+       apply (metis One_nat_def coprime_Suc_nat gcd.commute gcd_red_nat)
+      by (metis diff_less_mono2 lessI mod_less_divisor)
+
+text \<open>
+\noindent
+The @{command corecursive} command is a variant of @{command corec} that allows
+us to specify a termination argument for any unguarded self-call.
+
+When called with @{term "m = 1"} and @{term "n = 2"}, the @{const primes}
+function computes the stream of prime numbers. The unguarded call in the
+@{text else} branch increments @{term n} until it is coprime to the first
+argument @{term m} (i.e., the greatest common divisor of @{term m} and
+@{term n} is @{term 1}).
+
+For any positive integers @{term m} and @{term n}, the numbers @{term m} and
+@{term "m * n + 1"} are coprime, yielding an upper bound on the number of times
+@{term n} is increased. Hence, the function will take the @{text else} branch at
+most finitely often before taking the then branch and producing one constructor.
+There is a slight complication when @{term "m = 0 \<and> n > 1"}: Without the first
+disjunct in the @{text "if"} condition, the function could stall. (This corner
+case was overlooked in the original example
+@{cite "di-gianantonio-miculan-2003"}.)
+
+In the following examples, termination is discharged automatically by
+@{command corec} by invoking @{method lexicographic_order}:
+\<close>
+
+    corec catalan :: "nat \<Rightarrow> nat stream" where
+      "catalan n =
+       (if n > 0 then ssum (catalan (n - 1)) (SCons 0 (catalan (n + 1)))
+        else SCons 1 (catalan 1))"
+
+    corec collatz :: "nat \<Rightarrow> nat stream" where
+      "collatz n = (if even n \<and> n > 0 then collatz (n div 2)
+         else SCons n (collatz (3 * n + 1)))"
+
+text \<open>
+A more elaborate case study, revolving around the filter function on lazy lists,
+is presented in @{file "~~/src/HOL/Corec_Examples/LFilter.thy"}.
+\<close>
+
+
+subsection \<open>Self-Friendship
+  \label{ssec:self-friendship}\<close>
+
+text \<open>
+Paradoxically, the package allows us to simultaneously define a function and use
+it as its own friend, as in the following definition of a ``skewed product'':
+\<close>
+
+    corec (friend)
+      sskew :: "('a :: {plus,times}) stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream"
+    where
+      "sskew xs ys =
+       SCons (shd xs * shd ys) (sskew (sskew xs (stl ys)) (sskew (stl xs) ys))"
+
+text \<open>
+\noindent
+Such definitions, with nested self-calls on the right-hand side, cannot be
+separated into a @{command corec} part and a @{command friend_of_corec} part.
+\<close>
+
+
+subsection \<open>Coinduction
+  \label{ssec:coinduction}\<close>
+
+text \<open>
+Once a corecursive specification has been accepted, we normally want to reason
+about it. The @{text codatatype} command generates a structural coinduction
+principle that matches primitively corecursive functions. For nonprimitive
+specifications, our package provides the more advanced proof principle of
+\emph{coinduction up to congruence}---or simply \emph{coinduction up-to}.
+
+The structural coinduction principle for @{typ "'a stream"}, called
+@{thm [source] stream.coinduct}, is as follows:
+%
+\begin{indentblock}
+@{thm stream.coinduct[no_vars]}
+\end{indentblock}
+%
+Coinduction allows us to prove an equality @{text "l = r"} on streams by
+providing a relation @{text R} that relates @{text l} and @{text r} (first
+premise) and that constitutes a bisimulation (second premise). Streams that are
+related by a bisimulation cannot be distinguished by taking observations (via
+the selectors @{const shd} and @{const stl}); hence they must be equal.
+
+The coinduction up-to principle after registering @{const sskew} as friendly is
+available as @{thm [source] sskew.coinduct} and as one of the components of
+the collection @{thm [source] stream.coinduct_upto}:
+%
+\begin{indentblock}
+@{thm sfsup.coinduct[no_vars]}
+\end{indentblock}
+%
+This rule is almost identical to structural coinduction, except that the
+corecursive application of @{term R} is replaced by
+@{term "stream.v5.congclp R"}. The @{const stream.v5.congclp} predicate is
+equipped with the following introduction rules:
+
+\begin{indentblock}
+\begin{description}
+
+\item[@{thm [source] sskew.cong_base}\rm:] ~ \\
+@{thm sskew.cong_base[no_vars]}
+
+\item[@{thm [source] sskew.cong_refl}\rm:] ~ \\
+@{thm sskew.cong_refl[no_vars]}
+
+\item[@{thm [source] sskew.cong_sym}\rm:] ~ \\
+@{thm sskew.cong_sym[no_vars]}
+
+\item[@{thm [source] sskew.cong_trans}\rm:] ~ \\
+@{thm sskew.cong_trans[no_vars]}
+
+\item[@{thm [source] sskew.cong_SCons}\rm:] ~ \\
+@{thm sskew.cong_SCons[no_vars]}
+
+\item[@{thm [source] sskew.cong_ssum}\rm:] ~ \\
+@{thm sskew.cong_ssum[no_vars]}
+
+\item[@{thm [source] sskew.cong_sprod}\rm:] ~ \\
+@{thm sskew.cong_sprod[no_vars]}
+
+\item[@{thm [source] sskew.cong_sskew}\rm:] ~ \\
+@{thm sskew.cong_sskew[no_vars]}
+
+\end{description}
+\end{indentblock}
+%
+The introduction rules are also available as
+@{thm [source] sskew.cong_intros}.
+
+Notice that there is no introduction rule corresponding to @{const sexp},
+because @{const sexp} has a more restrictive result type than @{const sskew}
+(@{typ "nat stream"} vs. @{typ "('a :: {plus,times}) stream"}.
+
+Since the package maintains a set of incomparable corecursors, there is also a
+set of associated coinduction principles and a set of sets of introduction
+rules. A technically subtle point is to make Isabelle choose the right rules in
+most situations. For this purpose, the package maintains the collection
+@{thm [source] stream.coinduct_upto} of coinduction principles ordered by
+increasing generality, which works well with Isabelle's philosophy of applying
+the first rule that matches. For example, after registering @{const ssum} as a
+friend, proving the equality @{term "l = r"} on @{typ "nat stream"} might
+require coinduction principle for @{term "nat stream"}, which is up to
+@{const ssum}.
+
+The collection @{thm [source] stream.coinduct_upto} is guaranteed to be complete
+and up to date with respect to the type instances of definitions considered so
+far, but occasionally it may be necessary to take the union of two incomparable
+coinduction principles. This can be done using the @{command coinduction_upto}
+command. Consider the following definitions:
+\<close>
+
+    codatatype ('a, 'b) tllist =
+      TNil (terminal: 'b)
+    | TCons (thd: 'a) (ttl: "('a, 'b) tllist")
+
+    corec (friend) square_elems :: "(nat, 'b) tllist \<Rightarrow> (nat, 'b) tllist" where
+      "square_elems xs =
+       (case xs of
+         TNil z \<Rightarrow> TNil z
+       | TCons y ys \<Rightarrow> TCons (y ^^ 2) (square_elems ys))"
+
+    corec (friend) square_terminal :: "('a, int) tllist \<Rightarrow> ('a, int) tllist" where
+      "square_terminal xs =
+       (case xs of
+         TNil z \<Rightarrow> TNil (z ^^ 2)
+       | TCons y ys \<Rightarrow> TCons y (square_terminal ys))"
+
+text \<open>
+At this point, @{thm [source] tllist.coinduct_upto} contains three variants of the
+coinduction principles:
+%
+\begin{itemize}
+\item @{typ "('a, int) tllist"} up to @{const TNil}, @{const TCons}, and
+  @{const square_terminal};
+\item @{typ "(nat, 'b) tllist"} up to @{const TNil}, @{const TCons}, and
+  @{const square_elems};
+\item @{typ "('a, 'b) tllist"} up to @{const TNil} and @{const TCons}.
+\end{itemize}
+%
+The following variant is missing:
+%
+\begin{itemize}
+\item @{typ "(nat, int) tllist"} up to @{const TNil}, @{const TCons},
+  @{const square_elems}, and @{const square_terminal}.
+\end{itemize}
+%
+To generate it, without having to define a new function with @{command corec},
+we can use the following command:
+\<close>
+
+    coinduction_upto nat_int_tllist: "(nat, int) tllist"
+
+text \<open>
+\noindent
+This produces the theorems @{thm [source] nat_int_tllist.coinduct_upto} and
+@{thm [source] nat_int_tllist.cong_intros} (as well as the individually named
+introduction rules) and extends @{thm [source] tllist.coinduct_upto} and
+@{thm [source] tllist.cong_intros}.
+\<close>
+
+
+subsection \<open>Uniqueness Reasoning
+  \label{ssec:uniqueness-reasoning}\<close>
+
+text \<open>
+It is sometimes possible to achieve better automation by using a more
+specialized proof method than coinduction. Uniqueness principles maintain a good
+balance between expressiveness and automation. They exploit the property that a
+corecursive specification is the unique solution to a fixpoint equation.
+
+The @{command corec}, @{command corecursive}, and @{command friend_of_corec}
+commands generate a property @{text f.unique} about the function of interest
+@{term f} that can be used to prove that any function that satisfies
+@{term f}'s corecursive specification must be equal to @{term f}. For example:
+\[@{thm ssum.unique[no_vars]}\]
+
+The uniqueness principles are not restricted to functions defined using
+@{command corec} or @{command corecursive} or registered with
+@{command friend_of_corec}. Suppose @{term "t x"} is an arbitrary term
+depending on @{term x}. The @{method corec_unique} proof method, provided by our
+tool, transforms subgoals of the form
+\[@{term "(\<forall>x. f x = H x f) \<Longrightarrow> f x = t x"}\]
+into
+\[@{term "\<forall>x. t x = H x t"}\]
+The higher-order functional @{term H} must be such that the equation
+@{term "f x = H x f"} would be a valid @{command corec} specification---but
+without nested self-calls or unguarded calls. Thus, @{method corec_unique}
+proves uniqueness of @{term t} with respect to the given fixpoint equation
+regardless of how @{term t} was defined. For example:
+\<close>
+
+    lemma
+      fixes f :: "nat stream \<Rightarrow> nat stream \<Rightarrow> nat stream"
+      assumes "\<forall>xs ys. f xs ys =
+        SCons (shd ys * shd xs) (ssum (f xs (stl ys)) (f (stl xs) ys))"
+      shows "f = sprod"
+        using assms
+      proof corec_unique
+        show "sprod = (\<lambda>xs ys :: nat stream.
+            SCons (shd ys * shd xs) (ssum (sprod xs (stl ys)) (sprod (stl xs) ys)))"
+          apply (rule ext)+
+          apply (subst sprod.code)
+          by simp
+      qed
+
+
+section \<open>Command Syntax
+  \label{sec:command-syntax}\<close>
+
+subsection \<open>\keyw{corec} and \keyw{corecursive}
+  \label{ssec:corec-and-corecursive}\<close>
+
+text \<open>
+\begin{matharray}{rcl}
+  @{command_def "corec"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+  @{command_def "corecursive"} & : & @{text "local_theory \<rightarrow> proof(prove)"}
+\end{matharray}
+
+@{rail \<open>
+  (@@{command corec} | @@{command corecursive}) target? \<newline>
+    @{syntax cr_options}? fix @'where' prop
+  ;
+  @{syntax_def cr_options}: '(' ((@{syntax plugins} | 'friend' | 'transfer') + ',') ')'
+\<close>}
+
+\medskip
+
+\noindent
+The @{command corec} and @{command corecursive} commands introduce a corecursive
+function over a codatatype.
+
+The syntactic entity \synt{target} can be used to specify a local context,
+\synt{fix} denotes name with an optional type signature, and \synt{prop} denotes
+a HOL proposition @{cite "isabelle-isar-ref"}.
+
+The optional target is optionally followed by a combination of the following
+options:
+
+\begin{itemize}
+\setlength{\itemsep}{0pt}
+
+\item
+The @{text plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
+
+\item
+The @{text friend} option indicates that the defined function should be
+registered as a friend. This gives rise to additional proof obligations.
+
+\item
+The @{text transfer} option indicates that an unconditional transfer rule
+should be generated and proved @{text "by transfer_prover"}. The
+@{text "[transfer_rule]"} attribute is set on the generated theorem.
+\end{itemize}
+
+The @{command corec} command is an abbreviation for @{command corecursive} with
+@{text "apply transfer_prover"} or @{text "apply lexicographic_order"} to
+discharge any emerging proof obligations.
+\<close>
+
+
+subsection \<open>\keyw{friend_of_corec}
+  \label{ssec:friend-of-corec}\<close>
+
+text \<open>
+\begin{matharray}{rcl}
+  @{command_def "friend_of_corec"} & : & @{text "local_theory \<rightarrow> proof(prove)"}
+\end{matharray}
+
+@{rail \<open>
+  @@{command friend_of_corec} target? \<newline>
+    @{syntax foc_options}? fix @'where' prop
+  ;
+  @{syntax_def foc_options}: '(' ((@{syntax plugins} | 'transfer') + ',') ')'
+\<close>}
+
+\medskip
+
+\noindent
+The @{command friend_of_corec} command registers a corecursive function as
+friendly.
+
+The syntactic entity \synt{target} can be used to specify a local context,
+\synt{fix} denotes name with an optional type signature, and \synt{prop} denotes
+a HOL proposition @{cite "isabelle-isar-ref"}.
+
+The optional target is optionally followed by a combination of the following
+options:
+
+\begin{itemize}
+\setlength{\itemsep}{0pt}
+
+\item
+The @{text plugins} option indicates which plugins should be enabled
+(@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
+
+\item
+The @{text transfer} option indicates that an unconditional transfer rule
+should be generated and proved @{text "by transfer_prover"}. The
+@{text "[transfer_rule]"} attribute is set on the generated theorem.
+\end{itemize}
+\<close>
+
+
+subsection \<open>\keyw{coinduction_upto}
+  \label{ssec:coinduction-upto}\<close>
+
+text \<open>
+\begin{matharray}{rcl}
+  @{command_def "coinduction_upto"} & : & @{text "local_theory \<rightarrow> local_theory"}
+\end{matharray}
+
+@{rail \<open>
+  @@{command coinduction_upto} target? name ':' type
+\<close>}
+
+\medskip
+
+\noindent
+The @{command coinduction_upto} generates a coinduction up-to rule for a given
+instance of a (possibly polymorphic) codatatype and notes the result with the
+specified prefix.
+
+The syntactic entity \synt{name} denotes an identifier and \synt{type} denotes a
+type @{cite "isabelle-isar-ref"}.
+\<close>
+
+
+section \<open>Generated Theorems
+  \label{sec:generated-theorems}\<close>
+
+text \<open>
+...
+\<close>
+
+
+subsection \<open>\keyw{corec} and \keyw{corecursive}
+  \label{ssec:corec-and-corecursive}\<close>
+
+text \<open>
+...
+\<close>
+
+
+subsection \<open>\keyw{friend_of_corec}
+  \label{ssec:friend-of-corec}\<close>
+
+text \<open>
+...
+\<close>
+
+
+subsection \<open>\keyw{coinduction_upto}
+  \label{ssec:coinduction-upto}\<close>
+
+text \<open>
+...
+\<close>
+
+
+section \<open>Proof Method
+  \label{sec:proof-method}\<close>
+
+text \<open>
+...
+\<close>
+
+
+subsection \<open>\textit{corec_unique}
+  \label{ssec:corec-unique}\<close>
+
+text \<open>
+...
+\<close>
+
+
+section \<open>Known Bugs and Limitations
+  \label{sec:known-bugs-and-limitations}\<close>
+
+text \<open>
+This section lists the known bugs and limitations of the corecursion package at
+the time of this writing.
+
+\begin{enumerate}
+\setlength{\itemsep}{0pt}
+
+\item
+\emph{TODO.} TODO.
+
+  * no mutual types
+  * limitation on type of friend
+  * unfinished tactics
+  * polymorphism of corecUU_transfer
+  * alternative views
+  * plugins
+
+\end{enumerate}
+\<close>
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Doc/Corec/document/build	Tue Mar 29 23:45:28 2016 +0200
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+
+set -e
+
+FORMAT="$1"
+VARIANT="$2"
+
+"$ISABELLE_HOME/src/Doc/prepare_document" "$FORMAT"
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Doc/Corec/document/root.tex	Tue Mar 29 23:45:28 2016 +0200
@@ -0,0 +1,93 @@
+\documentclass[12pt,a4paper]{article} % fleqn
+\usepackage{lmodern}
+\usepackage[T1]{fontenc}
+\usepackage{amsfonts}
+\usepackage{amsmath}
+\usepackage{cite}
+\usepackage{enumitem}
+\usepackage{footmisc}
+\usepackage{latexsym}
+\usepackage{graphicx}
+\usepackage{iman}
+\usepackage{extra}
+\usepackage{isar}
+\usepackage{isabelle}
+\usepackage{isabellesym}
+\usepackage{style}
+\usepackage{pdfsetup}
+\usepackage{railsetup}
+\usepackage{framed}
+\usepackage{regexpatch}
+
+\makeatletter
+\xpatchcmd{\chaptermark}{\MakeUppercase}{}{}{}%
+\xpatchcmd{\sectionmark}{\MakeUppercase}{}{}{}%
+\xpatchcmd*{\tableofcontents}{\MakeUppercase}{}{}{}%
+\makeatother
+
+\setcounter{secnumdepth}{3}
+\setcounter{tocdepth}{3}
+
+\renewcommand\_{\hbox{\textunderscore\kern-.05ex}}
+
+\newbox\boxA
+\setbox\boxA=\hbox{\ }
+\parindent=4\wd\boxA
+
+\newcommand\blankline{\vskip-.25\baselineskip}
+
+\newenvironment{indentblock}{\list{}{\setlength{\leftmargin}{\parindent}}\item[]}{\endlist}
+
+\newcommand{\keyw}[1]{\textbf{#1}}
+\newcommand{\synt}[1]{\textit{#1}}
+\newcommand{\hthm}[1]{\textbf{\textit{#1}}}
+
+%\renewcommand{\isactrlsub}[1]{\/$\sb{\mathrm{#1}}$}
+\renewcommand\isactrlsub[1]{\/$\sb{#1}$}
+\renewcommand\isadigit[1]{\/\ensuremath{\mathrm{#1}}}
+\renewcommand\isacharprime{\isamath{{'}\mskip-2mu}}
+\renewcommand\isacharunderscore{\mbox{\_}}
+\renewcommand\isacharunderscorekeyword{\mbox{\_}}
+\renewcommand\isachardoublequote{\mbox{\upshape{``}}}
+\renewcommand\isachardoublequoteopen{\mbox{\upshape{``}\kern.1ex}}
+\renewcommand\isachardoublequoteclose{\/\kern.15ex\mbox{\upshape{''}}}
+\renewcommand\isacharverbatimopen{\isacharbraceleft\isacharasterisk}
+\renewcommand\isacharverbatimclose{\isacharasterisk\isacharbraceright}
+\renewcommand{\isasyminverse}{\isamath{{}^{-}}}
+
+\hyphenation{isa-belle}
+
+\isadroptag{theory}
+
+\title{%\includegraphics[scale=0.5]{isabelle_hol} \\[4ex]
+Defining Nonprimitively (Co)recursive Functions in Isabelle/HOL}
+\author{Jasmin Christian Blanchette, Aymeric Bouzy, \\
+Andreas Lochbihler, Andrei Popescu, and \\
+Dmitriy Traytel}
+
+\urlstyle{tt}
+
+\begin{document}
+
+\maketitle
+
+\begin{sloppy}
+\begin{abstract}
+\noindent
+This tutorial describes the definitional package for nonprimitively corecursive functions
+in Isabelle/HOL. The following commands are provided:
+\keyw{corec}, \keyw{corecursive}, \keyw{friend\_of\_corec}, and \keyw{coinduction\_\allowbreak upto}.
+They supplement \keyw{codatatype}, \keyw{primcorec}, and \keyw{primcorecursive}, which
+define codatatypes and primitively corecursive functions.
+\end{abstract}
+\end{sloppy}
+
+\tableofcontents
+
+\input{Corec.tex}
+
+\let\em=\sl
+\bibliography{manual}{}
+\bibliographystyle{abbrv}
+
+\end{document}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Doc/Corec/document/style.sty	Tue Mar 29 23:45:28 2016 +0200
@@ -0,0 +1,46 @@
+
+%% toc
+\newcommand{\tocentry}[1]{\cleardoublepage\phantomsection\addcontentsline{toc}{chapter}{#1}
+\@mkboth{\MakeUppercase{#1}}{\MakeUppercase{#1}}}
+
+%% paragraphs
+\setlength{\parindent}{1em}
+
+%% references
+\newcommand{\secref}[1]{\S\ref{#1}}
+\newcommand{\figref}[1]{figure~\ref{#1}}
+
+%% logical markup
+\newcommand{\strong}[1]{{\bfseries {#1}}}
+\newcommand{\qn}[1]{\emph{#1}}
+
+%% typographic conventions
+\newcommand{\qt}[1]{``{#1}''}
+\newcommand{\ditem}[1]{\item[\isastyletext #1]}
+
+%% quote environment
+\isakeeptag{quote}
+\renewenvironment{quote}
+  {\list{}{\leftmargin2em\rightmargin0pt}\parindent0pt\parskip0pt\item\relax}
+  {\endlist}
+\renewcommand{\isatagquote}{\begin{quote}}
+\renewcommand{\endisatagquote}{\end{quote}}
+\newcommand{\quotebreak}{\\[1.2ex]}
+
+%% presentation
+\setcounter{secnumdepth}{2} \setcounter{tocdepth}{2}
+
+%% character detail
+\renewcommand{\isadigit}[1]{\isamath{#1}}
+\binperiod
+\underscoreoff
+
+%% format
+\pagestyle{headings}
+\isabellestyle{it}
+
+
+%%% Local Variables: 
+%%% mode: latex
+%%% TeX-master: "implementation"
+%%% End: 
--- a/src/Doc/Datatypes/Datatypes.thy	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/Doc/Datatypes/Datatypes.thy	Tue Mar 29 23:45:28 2016 +0200
@@ -460,7 +460,7 @@
   \label{ssec:datatype-command-syntax}\<close>
 
 subsubsection \<open>\keyw{datatype}
-  \label{sssec:datatype-new}\<close>
+  \label{sssec:datatype}\<close>
 
 text \<open>
 \begin{matharray}{rcl}
@@ -1569,7 +1569,7 @@
   \label{ssec:primrec-command-syntax}\<close>
 
 subsubsection \<open>\keyw{primrec}
-  \label{sssec:primrec-new}\<close>
+  \label{sssec:primrec}\<close>
 
 text \<open>
 \begin{matharray}{rcl}
@@ -3094,8 +3094,9 @@
 
 The syntactic entity \synt{target} can be used to specify a local context,
 \synt{name} denotes an identifier, \synt{typefree} denotes fixed type variable
-(@{typ 'a}, @{typ 'b}, \ldots), and \synt{mixfix} denotes the usual
-parenthesized mixfix notation @{cite "isabelle-isar-ref"}.
+(@{typ 'a}, @{typ 'b}, \ldots), \synt{mixfix} denotes the usual parenthesized
+mixfix notation, and \synt{types} denotes a space-separated list of types
+@{cite "isabelle-isar-ref"}.
 
 The @{syntax plugins} option indicates which plugins should be enabled
 (@{text only}) or disabled (@{text del}). By default, all plugins are enabled.
--- a/src/Doc/Datatypes/document/root.tex	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/Doc/Datatypes/document/root.tex	Tue Mar 29 23:45:28 2016 +0200
@@ -1,6 +1,7 @@
 \documentclass[12pt,a4paper]{article} % fleqn
 \usepackage{lmodern}
 \usepackage[T1]{fontenc}
+\usepackage{amsfonts}
 \usepackage{amsmath}
 \usepackage{cite}
 \usepackage{enumitem}
--- a/src/Doc/ROOT	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/Doc/ROOT	Tue Mar 29 23:45:28 2016 +0200
@@ -43,6 +43,22 @@
     "root.tex"
     "style.sty"
 
+session Corec (doc) in "Corec" = HOL +
+  options [document_variants = "corec"]
+  theories [document = false] "../Datatypes/Setup"
+  theories Corec
+  document_files (in "..")
+    "prepare_document"
+    "pdfsetup.sty"
+    "iman.sty"
+    "extra.sty"
+    "isar.sty"
+    "manual.bib"
+  document_files
+    "build"
+    "root.tex"
+    "style.sty"
+
 session Datatypes (doc) in "Datatypes" = HOL +
   options [document_variants = "datatypes"]
   theories [document = false] Setup
--- a/src/Doc/manual.bib	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/Doc/manual.bib	Tue Mar 29 23:45:28 2016 +0200
@@ -575,6 +575,20 @@
   publisher	= {Springer},
   year		= 1979}
 
+@inproceedings{di-gianantonio-miculan-2003,
+  author    = {Di Gianantonio, Pietro and
+               Marino Miculan},
+  title     = {A Unifying Approach to Recursive and Co-recursive Definitions},
+  booktitle = {TYPES 2002},
+  year      = {2003},
+  pages     = {148--161},
+  editor    = {Herman Geuvers and
+               Freek Wiedijk},
+  publisher = {Springer},
+  series    = {LNCS},
+  volume    = {2646}
+}
+
 @book{dummett,
   author	= {Michael Dummett},
   title		= {Elements of Intuitionism},
@@ -834,6 +848,28 @@
   pages		= {265--270},
   year		= 1997}
 
+@article{hinze10,
+  author  = {Hinze, Ralf},
+  title   = {Concrete stream calculus---{A}n extended study},
+  journal = {J. Funct. Program.},
+  volume  = {20},
+  issue   = {Special Issue 5-6},
+  year    = {2010},
+  issn    = {1469-7653},
+  pages   = {463--535}
+}
+
+@inproceedings{sine,
+  author = "Kry\v{s}tof Hoder and Andrei Voronkov",
+  title = "Sine Qua Non for Large Theory Reasoning",
+  booktitle = {Automated Deduction --- CADE-23},
+  publisher = Springer,
+  series = LNCS,
+  volume = 6803,
+  pages = "299--314",
+  editor = "Nikolaj Bj{\o}rner and Viorica Sofronie-Stokkermans",
+  year = 2011}
+
 @book{HopcroftUllman,author={John E. Hopcroft and Jeffrey D. Ullman},
 title={Introduction to Automata Theory, Languages, and Computation.},
 publisher={Addison-Wesley},year=1979}
@@ -858,17 +894,6 @@
   number	= 5,
   month		= May}
 
-@inproceedings{sine,
-  author = "Kry\v{s}tof Hoder and Andrei Voronkov",
-  title = "Sine Qua Non for Large Theory Reasoning",
-  booktitle = {Automated Deduction --- CADE-23},
-  publisher = Springer,
-  series = LNCS,
-  volume = 6803,
-  pages = "299--314",
-  editor = "Nikolaj Bj{\o}rner and Viorica Sofronie-Stokkermans",
-  year = 2011}
-
 @book{Hudak-Haskell,author={Paul Hudak},
 title={The Haskell School of Expression},publisher=CUP,year=2000}
 
@@ -1725,6 +1750,16 @@
   publisher =	 {Bastad}
 }
 
+@article{rutten05,
+  author    = {Jan J. M. M. Rutten},
+  title     = {A coinductive calculus of streams},
+  journal   = {Math. Struct. Comp. Sci.},
+  volume    = 15,
+  number    = 1,
+  year      = 2005,
+  pages     = {93--147},
+}
+
 %S
 
 @inproceedings{saaltink-fme,
--- a/src/HOL/Tools/BNF/bnf_gfp_grec.ML	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/HOL/Tools/BNF/bnf_gfp_grec.ML	Tue Mar 29 23:45:28 2016 +0200
@@ -355,7 +355,7 @@
    corecUU_unique: thm,
    corecUU_transfer: thm,
    buffer: buffer,
-   all_dead_k_bnfs: BNF_Def.bnf list,
+   all_dead_k_bnfs: bnf list,
    Retr: term,
    equivp_Retr: thm,
    Retr_coinduct: thm,
@@ -2144,7 +2144,6 @@
     val ssig_map = mk_mapN lthy live_AsBs ssig_T ssig_bnf;
     val ssig_rel = mk_relN lthy live_AsBs ssig_T ssig_bnf;
     val dead_ssig_map = mk_map 1 As Bs (map_of_bnf dead_ssig_bnf);
-    val dead_ssig_rel = mk_rel 1 As Bs (rel_of_bnf dead_ssig_bnf);
 
     val ((Lam, Lam_def), lthy) = lthy
       |> define_Lam_base fp_b version Y Z preT ssig_T dead_pre_map Sig unsig dead_sig_map Oper
@@ -2384,7 +2383,6 @@
     val ssig_map = mk_mapN lthy live_AsBs ssig_T ssig_bnf;
     val ssig_rel = mk_relN lthy live_AsBs ssig_T ssig_bnf;
     val dead_ssig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_ssig_bnf);
-    val dead_ssig_rel = mk_rel 1 res_As res_Bs (rel_of_bnf dead_ssig_bnf);
     val old_Lam = enforce_type lthy domain_type old_Lam_domT old_Lam0;
     val old_proto_sctr = enforce_type lthy domain_type preT old_proto_sctr0;
     val old_flat = enforce_type lthy range_type old_ssig_T old_flat0;
@@ -2409,7 +2407,6 @@
     val dead_pre_map_comp0 = map_comp0_of_bnf dead_pre_bnf;
     val dead_pre_map_comp = map_comp_of_bnf dead_pre_bnf;
     val fp_map_id = map_id_of_bnf fp_bnf;
-    val fp_rel_eq = rel_eq_of_bnf fp_bnf;
     val [ctor_dtor] = #ctor_dtors fp_res;
     val [dtor_inject] = #dtor_injects fp_res;
     val [dtor_unfold_thm] = #xtor_un_fold_thms_legacy fp_res;
--- a/src/HOL/Tools/BNF/bnf_gfp_grec_sugar.ML	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/HOL/Tools/BNF/bnf_gfp_grec_sugar.ML	Tue Mar 29 23:45:28 2016 +0200
@@ -20,8 +20,8 @@
   val build_corecUU_arg_and_goals: bool -> term -> term list * term -> local_theory ->
     (((thm list * thm list * thm list) * term list) * term) * local_theory
   val derive_eq_corecUU: Proof.context -> BNF_GFP_Grec.corec_info -> term -> term -> thm -> thm
-  val derive_unique: Proof.context -> morphism -> term -> BNF_GFP_Grec.corec_info -> typ -> thm ->
-    thm
+  val derive_unique: Proof.context -> morphism -> term -> BNF_GFP_Grec.corec_info -> string ->
+    thm -> thm
 
   val corec_cmd: corec_option list -> (binding * string option * mixfix) list * string ->
     local_theory -> local_theory
@@ -54,7 +54,6 @@
 val symN = "sym";
 val transN = "trans";
 val cong_introsN = prefix cong_N "intros";
-val cong_intros_friendN = "cong_intros_friend";
 val codeN = "code";
 val coinductN = "coinduct";
 val coinduct_uptoN = "coinduct_upto";
@@ -120,11 +119,11 @@
 type coinduct_extra =
   {coinduct: thm,
    coinduct_attrs: Token.src list,
-   cong_intro_tab: thm list Symtab.table};
+   cong_intro_pairs: (string * thm) list};
 
-fun morph_coinduct_extra phi ({coinduct, coinduct_attrs, cong_intro_tab} : coinduct_extra) =
+fun morph_coinduct_extra phi ({coinduct, coinduct_attrs, cong_intro_pairs} : coinduct_extra) =
   {coinduct = Morphism.thm phi coinduct, coinduct_attrs = coinduct_attrs,
-   cong_intro_tab = Symtab.map (K (Morphism.fact phi)) cong_intro_tab};
+   cong_intro_pairs = map (apsnd (Morphism.thm phi)) cong_intro_pairs};
 
 val transfer_coinduct_extra = morph_coinduct_extra o Morphism.transfer_morphism;
 
@@ -189,10 +188,10 @@
 fun get_coinduct_uptos fpT_name context =
   coinduct_extras_of_generic context fpT_name |> map #coinduct;
 fun get_cong_all_intros fpT_name context =
-  coinduct_extras_of_generic context fpT_name |> maps (#cong_intro_tab #> Symtab.dest #> maps snd);
+  coinduct_extras_of_generic context fpT_name |> maps (#cong_intro_pairs #> map snd);
 fun get_cong_intros fpT_name name context =
   coinduct_extras_of_generic context fpT_name
-  |> maps (#cong_intro_tab #> (fn tab => Symtab.lookup_list tab name));
+  |> map_filter (#cong_intro_pairs #> (fn ps => AList.lookup (op =) ps name));
 
 fun ctr_names_of_fp_name lthy fpT_name =
   fpT_name |> fp_sugar_of lthy |> the |> #fp_ctr_sugar |> #ctr_sugar |> #ctrs
@@ -263,7 +262,7 @@
   let
     val SOME {casex, exhaust, case_thms, ...} = ctr_sugar_of ctxt fpT_name;
 
-    val fpT0 as Type (_, As0) = domain_type (body_fun_type (fastype_of casex));
+    val Type (_, As0) = domain_type (body_fun_type (fastype_of casex));
 
     val (As, _) = ctxt
       |> mk_TFrees' (map Type.sort_of_atyp As0);
@@ -322,7 +321,7 @@
   maps (#algrho_eqs o snd) (all_friend_extras_of ctxt);
 
 fun derive_code ctxt inner_fp_simps goal
-    {sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs, corecUU_thm, ...} res_T fun_t
+    {sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs, corecUU_thm, ...} fun_t
     fun_def =
   let
     val fun_T = fastype_of fun_t;
@@ -367,8 +366,8 @@
   end;
 
 fun derive_unique ctxt phi code_goal
-    {sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs, corecUU_unique, ...}
-    (res_T as Type (fpT_name, _)) eq_corecUU =
+    {sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs, corecUU_unique, ...} fpT_name
+    eq_corecUU =
   let
     val SOME {pre_bnf, fp_bnf, absT_info, fp_nesting_bnfs, live_nesting_bnfs, fp_ctr_sugar, ...} =
       fp_sugar_of ctxt fpT_name;
@@ -591,8 +590,8 @@
       ||>> mk_Frees "y" xy_Ts;
 
     fun mk_prem xy_T x y =
-      BNF_Def.build_rel [] ctxt [fpT]
-        (fn (T, _) => if T = fpT then Rcong else HOLogic.eq_const T) (xy_T, xy_T) $ x $ y;
+      build_rel [] ctxt [fpT] (fn (T, _) => if T = fpT then Rcong else HOLogic.eq_const T)
+        (xy_T, xy_T) $ x $ y;
 
     val prems = @{map 3} mk_prem xy_Ts xs ys;
     val concl = Rcong $ list_comb (alg, xs) $ list_comb (alg, ys);
@@ -659,7 +658,7 @@
       derive_cong_ctr_intros lthy cong_ctor_intro @
       map (derive_cong_friend_intro lthy) cong_algrho_intros;
   in
-    Symtab.make_list (names ~~ thms)
+    names ~~ thms
   end;
 
 fun derive_coinduct ctxt (fpT as Type (fpT_name, fpT_args)) dtor_coinduct =
@@ -1056,7 +1055,7 @@
           |> singleton (Type_Infer.fixate lthy)
           |> type_of
           |> dest_funT
-          |-> BNF_GFP_Grec_Sugar_Util.generalize_types 1
+          |-> generalize_types 1
           |> funT_of_tupleT m;
 
         val j = maxidx_of_typ deadfixed_T + 1;
@@ -1947,13 +1946,13 @@
       let
         val ctr_names = ctr_names_of_fp_name lthy fpT_name;
         val friend_names = friend_names0 |> map Long_Name.base_name |> rev;
-        val cong_intro_tab = derive_cong_intros lthy ctr_names friend_names dtor_coinduct_info;
+        val cong_intro_pairs = derive_cong_intros lthy ctr_names friend_names dtor_coinduct_info;
         val (coinduct, coinduct_attrs) = derive_coinduct lthy fpT0 dtor_coinduct;
         val ((_, [coinduct]), lthy) = (* TODO check: only if most_general?*)
           Local_Theory.note ((Binding.empty, coinduct_attrs), [coinduct]) lthy;
 
         val extra = {coinduct = coinduct, coinduct_attrs = coinduct_attrs,
-          cong_intro_tab = cong_intro_tab};
+          cong_intro_pairs = cong_intro_pairs};
       in
         ((most_general, extra), lthy |> most_general ? register_coinduct_extra corecUU_name extra)
       end)
@@ -2111,7 +2110,7 @@
 
         val lthy = lthy |> friend ? derive_and_note_friend_extra_theorems;
 
-        val code_thm = derive_code lthy inner_fp_simps code_goal corec_info res_T fun_t fun_def;
+        val code_thm = derive_code lthy inner_fp_simps code_goal corec_info fun_t fun_def;
 (* TODO:
         val ctr_thmss = map mk_thm (#2 views);
         val disc_thmss = map mk_thm (#3 views);
@@ -2120,8 +2119,10 @@
 *)
 
         val uniques =
-          if null inner_fp_simps then [derive_unique lthy phi (#1 views0) corec_info res_T fun_def]
-          else [];
+          if null inner_fp_simps then
+            [derive_unique lthy phi (#1 views0) corec_info fpT_name fun_def]
+          else
+            [];
 
 (* TODO:
         val disc_iff_or_disc_thmss =
@@ -2129,9 +2130,9 @@
         val simp_thmss = map2 append disc_iff_or_disc_thmss sel_thmss;
 *)
 
-        val ((_, [{cong_intro_tab, coinduct, coinduct_attrs}]), lthy) = lthy
+        val ((_, [{cong_intro_pairs, coinduct, coinduct_attrs}]), lthy) = lthy
           |> derive_and_update_coinduct_cong_intross [corec_info];
-        val cong_intros_pairs = Symtab.dest cong_intro_tab;
+        val cong_intros_pairs = AList.group (op =) cong_intro_pairs;
 
         val code_attrs = if plugins code_plugin then [Code.add_default_eqn_attrib] else [];
 
@@ -2294,18 +2295,18 @@
         val (eq_algrho, algrho_eq) = derive_eq_algrho lthy corec_info friend_info fun_t k_T
           code_goal const_transfers rho_def eq_corecUU;
 
-        val ((_, [{cong_intro_tab, coinduct, coinduct_attrs}]), lthy) = lthy
+        val ((_, [{cong_intro_pairs, coinduct, coinduct_attrs}]), lthy) = lthy
           |> register_friend_extra fun_name eq_algrho algrho_eq
           |> register_coinduct_dynamic_friend fpT_name fun_name
           |> derive_and_update_coinduct_cong_intross [corec_info];
-        val cong_intros_pairs = Symtab.dest cong_intro_tab;
+        val cong_intros_pairs = AList.group (op =) cong_intro_pairs;
 
-        val unique = derive_unique lthy Morphism.identity code_goal corec_info res_T eq_corecUU;
+        val unique = derive_unique lthy Morphism.identity code_goal corec_info fpT_name eq_corecUU;
 
         val notes =
-          [(cong_intros_friendN, maps snd cong_intros_pairs, []),
-           (codeN, [code_thm], []),
+          [(codeN, [code_thm], []),
            (coinductN, [coinduct], coinduct_attrs),
+           (cong_introsN, maps snd cong_intros_pairs, []),
            (uniqueN, [unique], [])] @
            map (fn (thmN, thms) => (thmN, thms, [])) cong_intros_pairs @
           (if Config.get lthy bnf_internals then
@@ -2344,10 +2345,10 @@
       |> corec_info_of fpT;
     val lthy = lthy |> no_base ? setup_base fpT_name;
 
-    val ((changed, [{cong_intro_tab, coinduct, coinduct_attrs}]), lthy) = lthy
+    val ((changed, [{cong_intro_pairs, coinduct, coinduct_attrs}]), lthy) = lthy
       |> derive_and_update_coinduct_cong_intross [corec_info];
     val lthy = lthy |> (changed orelse no_base) ? update_coinduct_cong_intross_dynamic fpT_name;
-    val cong_intros_pairs = Symtab.dest cong_intro_tab;
+    val cong_intros_pairs = AList.group (op =) cong_intro_pairs;
 
     val notes =
       [(cong_introsN, maps snd cong_intros_pairs, []),
--- a/src/HOL/Tools/BNF/bnf_gfp_grec_sugar_tactics.ML	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/HOL/Tools/BNF/bnf_gfp_grec_sugar_tactics.ML	Tue Mar 29 23:45:28 2016 +0200
@@ -67,8 +67,8 @@
     rtac ctxt cong_alg_intro) THEN
   unfold_thms_tac ctxt (extra_simps @ sumprod_thms_rel @
     @{thms vimage2p_def prod.rel_eq sum.rel_eq}) THEN
-  REPEAT_DETERM (HEADGOAL (etac ctxt subst ORELSE' rtac ctxt conjI ORELSE' assume_tac ctxt ORELSE'
-    rtac ctxt refl));
+  REPEAT_DETERM (HEADGOAL (rtac ctxt conjI ORELSE' assume_tac ctxt ORELSE' rtac ctxt refl ORELSE'
+    etac ctxt subst));
 
 val shared_simps =
   @{thms map_prod_simp map_sum.simps sum.case prod.case_eq_if split_beta' prod.sel
--- a/src/HOL/Tools/BNF/bnf_gfp_grec_unique_sugar.ML	Tue Mar 29 23:41:28 2016 +0200
+++ b/src/HOL/Tools/BNF/bnf_gfp_grec_unique_sugar.ML	Tue Mar 29 23:45:28 2016 +0200
@@ -55,6 +55,8 @@
         | NONE => error ("No corecursor for " ^ quote (Syntax.string_of_typ ctxt res_T) ^
           " (use " ^ quote (#1 @{command_keyword coinduction_upto}) ^ " to derive it)"));
 
+      val Type (fpT_name, _) = res_T;
+
       val parsed_eq = parse_corec_equation ctxt [fun_t] code_goal;
       val explored_eq =
         explore_corec_equation ctxt false false "" fun_t corec_parse_info res_T parsed_eq;
@@ -62,7 +64,7 @@
       val ((_, corecUU_arg), _) = build_corecUU_arg_and_goals false fun_t explored_eq ctxt;
       val eq_corecUU = derive_eq_corecUU ctxt corec_info fun_t corecUU_arg code_thm;
 
-      val unique' = derive_unique ctxt Morphism.identity code_goal corec_info res_T eq_corecUU
+      val unique' = derive_unique ctxt Morphism.identity code_goal corec_info fpT_name eq_corecUU
         |> funpow num_args_in_concl (fn thm => thm RS fun_cong);
     in
       HEADGOAL ((K all_tac APPEND' rtac ctxt sym) THEN' rtac ctxt unique' THEN'