merged, resolving obvious conflicts in NEWS and src/Pure/System/isabelle_process.ML;
authorwenzelm
Thu, 05 Dec 2013 17:58:03 +0100
changeset 54671 d64a4ef26edb
parent 54670 cfb21e03fe2a (current diff)
parent 54635 30666a281ae3 (diff)
child 54672 748778ac0ab8
merged, resolving obvious conflicts in NEWS and src/Pure/System/isabelle_process.ML;
Admin/MacOS/App1/README
Admin/MacOS/App1/build
Admin/MacOS/App1/script
Admin/MacOS/App2/Isabelle.app/Contents/Info.plist
Admin/MacOS/App2/Isabelle.app/Contents/MacOS/Isabelle
Admin/MacOS/App2/README
Admin/MacOS/App2/mk
Admin/MacOS/App3/Info.plist-part1
Admin/MacOS/App3/Info.plist-part2
Admin/MacOS/App3/README
Admin/MacOS/App3/Resources/en.lproj/Localizable.strings
Admin/MacOS/isabelle.icns
Admin/MacOS/theory.icns
Admin/Windows/launch4j/README
Admin/Windows/launch4j/isabelle.ico
Admin/Windows/launch4j/isabelle.xml
Admin/lib/Tools/makedist_bundle
NEWS
src/Doc/JEdit/JEdit.thy
src/HOL/BNF/Coinduction.thy
src/HOL/BNF/Countable_Type.thy
src/HOL/BNF/Ctr_Sugar.thy
src/HOL/BNF/Tools/bnf_fp_rec_sugar.ML
src/HOL/BNF/Tools/bnf_fp_rec_sugar_tactics.ML
src/HOL/BNF/Tools/coinduction.ML
src/HOL/BNF/Tools/ctr_sugar.ML
src/HOL/BNF/Tools/ctr_sugar_tactics.ML
src/HOL/BNF/Tools/ctr_sugar_util.ML
src/HOL/Cardinals/Cardinal_Order_Relation_Base.thy
src/HOL/Cardinals/Constructions_on_Wellorders_Base.thy
src/HOL/Cardinals/Fun_More_Base.thy
src/HOL/Cardinals/Order_Relation_More_Base.thy
src/HOL/Cardinals/Wellfounded_More_Base.thy
src/HOL/Cardinals/Wellorder_Embedding_Base.thy
src/HOL/Cardinals/Wellorder_Relation_Base.thy
src/HOL/Library/Abstract_Rat.thy
src/HOL/Library/Glbs.thy
src/HOL/Library/Order_Relation.thy
src/HOL/Library/Order_Union.thy
src/HOL/Library/Univ_Poly.thy
src/HOL/Lubs.thy
src/Pure/Concurrent/future.ML
src/Pure/PIDE/command.ML
src/Pure/System/isabelle_process.ML
src/Pure/build-jars
src/Tools/jEdit/src/theories_dockable.scala
--- a/.hgignore	Thu Dec 05 17:52:12 2013 +0100
+++ b/.hgignore	Thu Dec 05 17:58:03 2013 +0100
@@ -5,7 +5,9 @@
 *.jar
 *.orig
 *.rej
+*.pyc
 .DS_Store
+.swp
 
 
 syntax: regexp
--- a/Admin/MacOS/App1/README	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-Isabelle application bundle for MacOS
-=====================================
-
-Requirements:
-
-* CocoaDialog 2.1.1 http://cocoadialog.sourceforge.net/
-
-* Platypus 4.7 http://www.sveinbjorn.org/platypus
-  Preferences: Install command line tool
-
-* final packaging:
-
-  hdiutil create -srcfolder DIR DMG
-
--- a/Admin/MacOS/App1/build	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,23 +0,0 @@
-#!/usr/bin/env bash
-#
-# Make Isabelle application bundle
-
-THIS="$(cd "$(dirname "$0")"; pwd)"
-
-COCOADIALOG_APP="/Applications/CocoaDialog.app"
-
-/usr/local/bin/platypus \
-  -a Isabelle -u Isabelle \
-  -I "de.tum.in.isabelle" \
-  -i "$THIS/../isabelle.icns" \
-  -D -X thy \
-  -Q "$THIS/../theory.icns" \
-  -p /bin/bash \
-  -R \
-  -o None \
-  -f "$COCOADIALOG_APP" \
-  "$THIS/script" \
-  "$PWD/Isabelle.app"
-
-rm -f Contents/Resources/Isabelle
-ln -s Contents/Resources/Isabelle Isabelle.app/Isabelle
\ No newline at end of file
--- a/Admin/MacOS/App1/script	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,82 +0,0 @@
-#!/usr/bin/env bash
-#
-# Author: Makarius
-#
-# Isabelle application wrapper
-
-THIS="$(cd "$(dirname "$0")"; pwd)"
-THIS_APP="$(cd "$THIS/../.."; pwd)"
-SUPER_APP="$(cd "$THIS/../../.."; pwd)"
-
-
-# global defaults
-
-ISABELLE_TOOL="$THIS/Isabelle/bin/isabelle"
-PROOFGENERAL_EMACS="$THIS/Aquamacs.app/Contents/MacOS/Aquamacs"
-
-
-# environment
-
-cd "$HOME"
-if [ -x /usr/libexec/path_helper ]; then
-  eval $(/usr/libexec/path_helper -s)
-fi
-
-[ -z "$LANG" ] && export LANG=en_US.UTF-8
-
-
-# run interface with error feedback
-
-ISABELLE_INTERFACE_CHOICE="$("$ISABELLE_TOOL" getenv -b ISABELLE_INTERFACE_CHOICE)"
-if [ "$ISABELLE_INTERFACE_CHOICE" != emacs -a "$ISABELLE_INTERFACE_CHOICE" != jedit ]
-then
-  declare -a CHOICE
-  CHOICE=($("$THIS/CocoaDialog.app/Contents/MacOS/CocoaDialog" dropdown \
-    --title Isabelle \
-    --text "Which Isabelle interface?" \
-    --items "Isabelle/jEdit PIDE" "Emacs / Proof General" \
-    --button2 "OK, do not ask again" --button1 "OK"))
-  if [ "${CHOICE[1]}" = 0 ]; then
-    ISABELLE_INTERFACE_CHOICE=jedit
-  else
-    ISABELLE_INTERFACE_CHOICE=emacs
-  fi
-  if [ "${CHOICE[0]}" = 2 ]; then
-    ISABELLE_HOME_USER="$("$ISABELLE_TOOL" getenv -b ISABELLE_HOME_USER)"
-    mkdir -p "$ISABELLE_HOME_USER/etc"
-    ( echo; echo "ISABELLE_INTERFACE_CHOICE=$ISABELLE_INTERFACE_CHOICE"; ) \
-      >> "$ISABELLE_HOME_USER/etc/settings"
-    "$THIS/CocoaDialog.app/Contents/MacOS/CocoaDialog" ok-msgbox \
-      --title Isabelle \
-      --text Note \
-      --informative-text "ISABELLE_INTERFACE_CHOICE stored in $ISABELLE_HOME_USER/etc/settings" \
-      --no-cancel
-  fi
-fi
-
-OUTPUT="/tmp/isabelle$$.out"
-
-if [ "$ISABELLE_INTERFACE_CHOICE" = emacs ]; then
-  ( "$ISABELLE_TOOL" emacs -p "$PROOFGENERAL_EMACS" "$@" ) > "$OUTPUT" 2>&1
-  RC=$?
-else
-  ( "$ISABELLE_TOOL" jedit -s "$@" ) > "$OUTPUT" 2>&1
-  RC=$?
-fi
-
-if [ "$RC" != 0 ]; then
-  echo >> "$OUTPUT"
-  echo "Return code: $RC" >> "$OUTPUT"
-fi
-
-if [ $(stat -f "%z" "$OUTPUT") != 0 ]; then
-  "$THIS/CocoaDialog.app/Contents/MacOS/CocoaDialog" textbox \
-    --title "Isabelle" \
-    --informative-text "Isabelle output" \
-    --text-from-file "$OUTPUT" \
-    --button1 "OK"
-fi
-
-rm -f "$OUTPUT"
-
-exit "$RC"
--- a/Admin/MacOS/App2/Isabelle.app/Contents/Info.plist	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,48 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>Isabelle</string>
-	<key>CFBundleGetInfoString</key>
-	<string>Isabelle</string>
-	<key>CFBundleIconFile</key>
-	<string>isabelle.icns</string>
-	<key>CFBundleIdentifier</key>
-	<string>de.tum.in.isabelle</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>Isabelle</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>????</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>????</string>
-	<key>Java</key>
-	<dict>
-		<key>JVMVersion</key>
-		<string>1.6</string>
-		<key>VMOptions</key>
-		<string>-Xms128m -Xmx512m -Xss2m</string>
-		<key>ClassPath</key>
-		<string>$JAVAROOT/isabelle-scala.jar</string>
-		<key>MainClass</key>
-		<string>isabelle.GUI_Setup</string>
-		<key>Properties</key>
-		<dict>
-			<key>isabelle.home</key>
-			<string>$APP_PACKAGE/Contents/Resources/Isabelle</string>
-			<key>apple.laf.useScreenMenuBar</key>
-			<string>true</string>
-			<key>com.apple.mrj.application.apple.menu.about.name</key>
-			<string>Isabelle</string>
-		</dict>
-	</dict>
-</dict>
-</plist>
--- a/Admin/MacOS/App2/Isabelle.app/Contents/MacOS/Isabelle	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-/System/Library/Frameworks/JavaVM.framework/Resources/MacOS/JavaApplicationStub
\ No newline at end of file
--- a/Admin/MacOS/App2/README	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-Isabelle/JVM application bundle for MacOS
-=========================================
-
-* http://developer.apple.com/documentation/Java/Conceptual/Java14Development/03-JavaDeployment/JavaDeployment.html
-
-* http://developer.apple.com/documentation/Java/Reference/Java_InfoplistRef/Articles/JavaDictionaryInfo.plistKeys.html#//apple_ref/doc/uid/TP40001969
-
--- a/Admin/MacOS/App2/mk	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-#
-# Make Isabelle/JVM application bundle
-
-THIS="$(cd "$(dirname "$0")"; pwd)"
-
-APP="$THIS/Isabelle.app"
-
-mkdir -p "$APP/Contents/Resources/Java"
-cp "$THIS/../../../lib/classes/isabelle-scala.jar" "$APP/Contents/Resources/Java"
-cp "$THIS/../isabelle.icns" "$APP/Contents/Resources"
-
--- a/Admin/MacOS/App3/Info.plist-part1	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-<?xml version="1.0" ?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-<key>CFBundleDevelopmentRegion</key>
-<string>English</string>
-<key>CFBundleExecutable</key>
-<string>JavaAppLauncher</string>
-<key>CFBundleIconFile</key>
-<string>isabelle.icns</string>
-<key>CFBundleIdentifier</key>
-<string>de.tum.in.isabelle</string>
-<key>CFBundleDisplayName</key>
-<string>{ISABELLE_NAME}</string>
-<key>CFBundleInfoDictionaryVersion</key>
-<string>6.0</string>
-<key>CFBundleName</key>
-<string>{ISABELLE_NAME}</string>
-<key>CFBundlePackageType</key>
-<string>APPL</string>
-<key>CFBundleShortVersionString</key>
-<string>1.0</string>
-<key>CFBundleSignature</key>
-<string>????</string>
-<key>CFBundleVersion</key>
-<string>1</string>
-<key>NSHumanReadableCopyright</key>
-<string></string>
-<key>LSApplicationCategoryType</key>
-<string>public.app-category.developer-tools</string>
-<key>JVMRuntime</key>
-<string>jdk</string>
-<key>JVMMainClassName</key>
-<string>isabelle.Main</string>
-<key>JVMOptions</key>
-<array>
--- a/Admin/MacOS/App3/Info.plist-part2	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-<string>-Disabelle.home=$APP_ROOT/Contents/Resources/{ISABELLE_NAME}</string>
-</array>
-<key>JVMArguments</key>
-<array>
-</array>
-</dict>
-</plist>
--- a/Admin/MacOS/App3/README	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-Isabelle/JVM application bundle for Mac OS X
-============================================
-
-* http://java.net/projects/appbundler
-
-  see appbundler-1.0.jar
-  see com/oracle/appbundler/JavaAppLauncher
-
--- a/Admin/MacOS/App3/Resources/en.lproj/Localizable.strings	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-"JRELoadError" = "Unable to load Java Runtime Environment.";
-"MainClassNameRequired" = "Main class name is required.";
-"JavaDirectoryNotFound" = "Unable to enumerate Java directory contents.";
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Admin/MacOS/Info.plist-part1	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,36 @@
+<?xml version="1.0" ?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+<key>CFBundleDevelopmentRegion</key>
+<string>English</string>
+<key>CFBundleExecutable</key>
+<string>JavaAppLauncher</string>
+<key>CFBundleIconFile</key>
+<string>isabelle.icns</string>
+<key>CFBundleIdentifier</key>
+<string>de.tum.in.isabelle</string>
+<key>CFBundleDisplayName</key>
+<string>{ISABELLE_NAME}</string>
+<key>CFBundleInfoDictionaryVersion</key>
+<string>6.0</string>
+<key>CFBundleName</key>
+<string>{ISABELLE_NAME}</string>
+<key>CFBundlePackageType</key>
+<string>APPL</string>
+<key>CFBundleShortVersionString</key>
+<string>1.0</string>
+<key>CFBundleSignature</key>
+<string>????</string>
+<key>CFBundleVersion</key>
+<string>1</string>
+<key>NSHumanReadableCopyright</key>
+<string></string>
+<key>LSApplicationCategoryType</key>
+<string>public.app-category.developer-tools</string>
+<key>JVMRuntime</key>
+<string>jdk</string>
+<key>JVMMainClassName</key>
+<string>isabelle.Main</string>
+<key>JVMOptions</key>
+<array>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Admin/MacOS/Info.plist-part2	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,7 @@
+<string>-Disabelle.home=$APP_ROOT/Contents/Resources/{ISABELLE_NAME}</string>
+</array>
+<key>JVMArguments</key>
+<array>
+</array>
+</dict>
+</plist>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Admin/MacOS/README	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,8 @@
+Isabelle/JVM application bundle for Mac OS X
+============================================
+
+* http://java.net/projects/appbundler
+
+  see appbundler-1.0.jar
+  see com/oracle/appbundler/JavaAppLauncher
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Admin/MacOS/Resources/en.lproj/Localizable.strings	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,3 @@
+"JRELoadError" = "Unable to load Java Runtime Environment.";
+"MainClassNameRequired" = "Main class name is required.";
+"JavaDirectoryNotFound" = "Unable to enumerate Java directory contents.";
Binary file Admin/MacOS/Resources/isabelle.icns has changed
Binary file Admin/MacOS/Resources/theory.icns has changed
Binary file Admin/MacOS/isabelle.icns has changed
Binary file Admin/MacOS/theory.icns has changed
--- a/Admin/Windows/launch4j/README	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,4 +0,0 @@
-Java application wrapper for Windows
-====================================
-
-* http://launch4j.sourceforge.net
Binary file Admin/Windows/launch4j/isabelle.ico has changed
--- a/Admin/Windows/launch4j/isabelle.xml	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-<launch4jConfig>
-  <dontWrapJar>true</dontWrapJar>
-  <headerType>gui</headerType>
-  <jar></jar>
-  <outfile>Isabelle.exe</outfile>
-  <errTitle></errTitle>
-  <cmdLine></cmdLine>
-  <chdir></chdir>
-  <priority>normal</priority>
-  <downloadUrl></downloadUrl>
-  <supportUrl></supportUrl>
-  <customProcName>false</customProcName>
-  <stayAlive>true</stayAlive>
-  <manifest></manifest>
-  <icon>isabelle.ico</icon>
-  <classPath>
-    <mainClass>isabelle.Main</mainClass>
-    <cp>%EXEDIR%\lib\classes\ext\Pure.jar</cp>
-    <cp>%EXEDIR%\lib\classes\ext\scala-compiler.jar</cp>
-    <cp>%EXEDIR%\lib\classes\ext\scala-library.jar</cp>
-    <cp>%EXEDIR%\lib\classes\ext\scala-swing.jar</cp>
-    <cp>%EXEDIR%\lib\classes\ext\scala-actors.jar</cp>
-    <cp>%EXEDIR%\lib\classes\ext\scala-reflect.jar</cp>
-    <cp>%EXEDIR%\src\Tools\jEdit\dist\jedit.jar</cp>
-  </classPath>
-  <jre>
-    <path>%EXEDIR%\contrib\jdk\x86-cygwin</path>
-    <minVersion></minVersion>
-    <maxVersion></maxVersion>
-    <jdkPreference>jdkOnly</jdkPreference>
-    <opt>-Dfile.encoding=UTF-8 -server -Xms128m -Xmx1024m -Xss2m -Dactors.corePoolSize=4 -Dactors.enableForkJoin=false -Disabelle.home=&quot;%EXEDIR%&quot;</opt>
-  </jre>
-  <splash>
-    <file>isabelle.bmp</file>
-    <waitForWindow>false</waitForWindow>
-    <timeout>10</timeout>
-    <timeoutErr>false</timeoutErr>
-  </splash>
-</launch4jConfig>
\ No newline at end of file
--- a/Admin/isatest/isatest-stats	Thu Dec 05 17:52:12 2013 +0100
+++ b/Admin/isatest/isatest-stats	Thu Dec 05 17:58:03 2013 +0100
@@ -14,11 +14,9 @@
   HOL-Auth
   HOL-BNF
   HOL-BNF-Examples
+  HOL-BNF-LFP
   HOL-BNF-Nitpick_Examples
-  HOL-BNF-LFP
   HOL-Bali
-  HOL-Boogie
-  HOL-Boogie-Examples
   HOL-Cardinals
   HOL-Cardinals-Base
   HOL-Codegenerator_Test
--- a/Admin/lib/Tools/makedist_bundle	Thu Dec 05 17:52:12 2013 +0100
+++ b/Admin/lib/Tools/makedist_bundle	Thu Dec 05 17:58:03 2013 +0100
@@ -261,7 +261,7 @@
       (
         cd "$TMP"
 
-        APP_TEMPLATE="$ISABELLE_HOME/Admin/MacOS/App3"
+        APP_TEMPLATE="$ISABELLE_HOME/Admin/MacOS"
         APP="${ISABELLE_NAME}.app"
 
         for NAME in Java MacOS PlugIns Resources
@@ -289,7 +289,6 @@
         done
 
         cp -R "$APP_TEMPLATE/Resources/." "$APP/Contents/Resources/."
-        cp "$APP_TEMPLATE/../isabelle.icns" "$APP/Contents/Resources/."
 
         ln -sf "../Resources/${ISABELLE_NAME}/contrib/jdk/x86_64-darwin" \
           "$APP/Contents/PlugIns/jdk"
--- a/CONTRIBUTORS	Thu Dec 05 17:52:12 2013 +0100
+++ b/CONTRIBUTORS	Thu Dec 05 17:58:03 2013 +0100
@@ -3,6 +3,10 @@
 who is listed as an author in one of the source files of this Isabelle
 distribution.
 
+Contributions to this Isabelle version
+--------------------------------------
+
+
 Contributions to Isabelle2013-1
 -------------------------------
 
--- a/NEWS	Thu Dec 05 17:52:12 2013 +0100
+++ b/NEWS	Thu Dec 05 17:58:03 2013 +0100
@@ -1,6 +1,98 @@
 Isabelle NEWS -- history user-relevant changes
 ==============================================
 
+New in this Isabelle version
+----------------------------
+
+*** Prover IDE -- Isabelle/Scala/jEdit ***
+
+* Auxiliary files ('ML_file' etc.) are managed by the Prover IDE.
+Open text buffers take precedence over copies within the file-system.
+
+
+*** HOL ***
+
+* Qualified constant names Wellfounded.acc, Wellfounded.accp.
+INCOMPATIBILITY.
+
+* Fact generalization and consolidation:
+    neq_one_mod_two, mod_2_not_eq_zero_eq_one_int ~> not_mod_2_eq_0_eq_1
+INCOMPATIBILITY.
+
+* Purely algebraic definition of even.  Fact generalization and consolidation:
+    nat_even_iff_2_dvd, int_even_iff_2_dvd ~> even_iff_2_dvd
+    even_zero_(nat|int) ~> even_zero
+INCOMPATIBILITY.
+
+* Abolished neg_numeral.
+  * Canonical representation for minus one is "- 1".
+  * Canonical representation for other negative numbers is "- (numeral _)".
+  * When devising rule sets for number calculation, consider the
+    following canonical cases: 0, 1, numeral _, - 1, - numeral _.
+  * HOLogic.dest_number also recognizes numerals in non-canonical forms
+    like "numeral One", "- numeral One", "- 0" and even "- … - _".
+  * Syntax for negative numerals is mere input syntax.
+INCOMPATBILITY.
+
+* Elimination of fact duplicates:
+    equals_zero_I ~> minus_unique
+    diff_eq_0_iff_eq ~> right_minus_eq
+    nat_infinite ~> infinite_UNIV_nat
+    int_infinite ~> infinite_UNIV_int
+INCOMPATIBILITY.
+
+* Fact name consolidation:
+    diff_def, diff_minus, ab_diff_minus ~> diff_conv_add_uminus
+    minus_le_self_iff ~> neg_less_eq_nonneg
+    le_minus_self_iff ~> less_eq_neg_nonpos
+    neg_less_nonneg ~> neg_less_pos
+    less_minus_self_iff ~> less_neg_neg [simp]
+INCOMPATIBILITY.
+
+* More simplification rules on unary and binary minus:
+add_diff_cancel, add_diff_cancel_left, add_le_same_cancel1,
+add_le_same_cancel2, add_less_same_cancel1, add_less_same_cancel2,
+add_minus_cancel, diff_add_cancel, le_add_same_cancel1,
+le_add_same_cancel2, less_add_same_cancel1, less_add_same_cancel2,
+minus_add_cancel, uminus_add_conv_diff.  These correspondingly
+have been taken away from fact collections algebra_simps and
+field_simps.  INCOMPATIBILITY.
+
+To restore proofs, the following patterns are helpful:
+
+a) Arbitrary failing proof not involving "diff_def":
+Consider simplification with algebra_simps or field_simps.
+
+b) Lifting rules from addition to subtraction:
+Try with "using <rule for addition> of [… "- _" …]" by simp".
+
+c) Simplification with "diff_def": just drop "diff_def".
+Consider simplification with algebra_simps or field_simps;
+or the brute way with
+"simp add: diff_conv_add_uminus del: add_uminus_conv_diff".
+
+* SUP and INF generalized to conditionally_complete_lattice
+
+* Theory Lubs moved HOL image to HOL-Library. It is replaced by
+Conditionally_Complete_Lattices.   INCOMPATIBILITY.
+
+* Introduce bdd_above and bdd_below in Conditionally_Complete_Lattices, use them
+instead of explicitly stating boundedness of sets.
+
+* ccpo.admissible quantifies only over non-empty chains to allow
+more syntax-directed proof rules; the case of the empty chain
+shows up as additional case in fixpoint induction proofs.
+INCOMPATIBILITY
+
+*** ML ***
+
+* Toplevel function "use" refers to raw ML bootstrap environment,
+without Isar context nor antiquotations.  Potential INCOMPATIBILITY.
+Note that 'ML_file' is the canonical command to load ML files into the
+formal context.
+
+
+
 New in Isabelle2013-2 (December 2013)
 -------------------------------------
 
@@ -457,6 +549,10 @@
     sets ~> set
 IMCOMPATIBILITY.
 
+* Nitpick:
+  - Fixed soundness bug whereby mutually recursive datatypes could take
+    infinite values.
+
 
 *** ML ***
 
--- a/etc/isar-keywords.el	Thu Dec 05 17:52:12 2013 +0100
+++ b/etc/isar-keywords.el	Thu Dec 05 17:58:03 2013 +0100
@@ -1,6 +1,6 @@
 ;;
 ;; Keyword classification tables for Isabelle/Isar.
-;; Generated from HOL + HOL-Auth + HOL-BNF + HOL-BNF-LFP + HOL-Bali + HOL-Decision_Procs + HOL-IMP + HOL-Imperative_HOL + HOL-Import + HOL-Library + HOL-Mutabelle + HOL-Nominal + HOL-Proofs + HOL-Proofs-Extraction + HOL-SPARK + HOL-Statespace + HOL-TPTP + HOL-ex + HOLCF + Pure.
+;; Generated from HOL + HOL-Auth + HOL-BNF + HOL-BNF-LFP + HOL-Bali + HOL-Decision_Procs + HOL-IMP + HOL-Imperative_HOL + HOL-Import + HOL-Library + HOL-Mutabelle + HOL-Nominal + HOL-Proofs + HOL-Proofs-Extraction + HOL-SPARK + HOL-Statespace + HOL-TPTP + HOL-Word-SMT_Examples + HOL-ex + HOLCF + Pure.
 ;; *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT ***
 ;;
 
@@ -33,6 +33,7 @@
     "axiomatization"
     "back"
     "bnf"
+    "boogie_file"
     "bundle"
     "by"
     "cannot_undo"
@@ -343,7 +344,6 @@
     "module_name"
     "monos"
     "morphisms"
-    "no_discs_sels"
     "notes"
     "obtains"
     "open"
@@ -352,7 +352,6 @@
     "parametric"
     "permissive"
     "pervasive"
-    "rep_compat"
     "shows"
     "structure"
     "type_class"
@@ -487,6 +486,7 @@
     "atom_decl"
     "attribute_setup"
     "axiomatization"
+    "boogie_file"
     "bundle"
     "case_of_simps"
     "class"
--- a/src/Doc/Datatypes/Datatypes.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Datatypes/Datatypes.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -8,22 +8,9 @@
 *)
 
 theory Datatypes
-imports Setup
-keywords
-  "primcorec_notyet" :: thy_decl
+imports Setup "~~/src/HOL/Library/Simps_Case_Conv"
 begin
 
-(*<*)
-(* FIXME: Temporary setup until "primcorec" and "primcorecursive" are fully implemented. *)
-ML_command {*
-fun add_dummy_cmd _ _ lthy = lthy;
-
-val _ = Outer_Syntax.local_theory @{command_spec "primcorec_notyet"} ""
-  (Parse.fixes -- Parse_Spec.where_alt_specs >> uncurry add_dummy_cmd);
-*}
-(*>*)
-
-
 section {* Introduction
   \label{sec:introduction} *}
 
@@ -54,17 +41,19 @@
 
 text {*
 \noindent
-The package also provides some convenience, notably automatically generated
-discriminators and selectors.
-
-In addition to plain inductive datatypes, the new package supports coinductive
-datatypes, or \emph{codatatypes}, which may have infinite values. For example,
-the following command introduces the type of lazy lists, which comprises both
-finite and infinite values:
+Furthermore, the package provides a lot of convenience, including automatically
+generated discriminators, selectors, and relators as well as a wealth of
+properties about them.
+
+In addition to inductive datatypes, the new package supports coinductive
+datatypes, or \emph{codatatypes}, which allow infinite values. For example, the
+following command introduces the type of lazy lists, which comprises both finite
+and infinite values:
 *}
 
 (*<*)
     locale early
+    locale late
 (*>*)
     codatatype (*<*)(in early) (*>*)'a llist = LNil | LCons 'a "'a llist"
 
@@ -80,10 +69,10 @@
     codatatype (*<*)(in early) (*>*)'a tree\<^sub>i\<^sub>i = Node\<^sub>i\<^sub>i 'a "'a tree\<^sub>i\<^sub>i llist"
 
 text {*
-The first two tree types allow only finite branches, whereas the last two allow
-branches of infinite length. Orthogonally, the nodes in the first and third
-types have finite branching, whereas those of the second and fourth may have
-infinitely many direct subtrees.
+The first two tree types allow only paths of finite length, whereas the last two
+allow infinite paths. Orthogonally, the nodes in the first and third types have
+finitely many direct subtrees, whereas those of the second and fourth may have
+infinite branching.
 
 To use the package, it is necessary to import the @{theory BNF} theory, which
 can be precompiled into the \texttt{HOL-BNF} image. The following commands show
@@ -152,15 +141,15 @@
 
 
 \newbox\boxA
-\setbox\boxA=\hbox{\texttt{nospam}}
-
-\newcommand\authoremaili{\texttt{blan{\color{white}nospam}\kern-\wd\boxA{}chette@\allowbreak
+\setbox\boxA=\hbox{\texttt{NOSPAM}}
+
+\newcommand\authoremaili{\texttt{blan{\color{white}NOSPAM}\kern-\wd\boxA{}chette@\allowbreak
 in.\allowbreak tum.\allowbreak de}}
-\newcommand\authoremailii{\texttt{lore{\color{white}nospam}\kern-\wd\boxA{}nz.panny@\allowbreak
+\newcommand\authoremailii{\texttt{lore{\color{white}NOSPAM}\kern-\wd\boxA{}nz.panny@\allowbreak
 \allowbreak tum.\allowbreak de}}
-\newcommand\authoremailiii{\texttt{pope{\color{white}nospam}\kern-\wd\boxA{}scua@\allowbreak
+\newcommand\authoremailiii{\texttt{pope{\color{white}NOSPAM}\kern-\wd\boxA{}scua@\allowbreak
 in.\allowbreak tum.\allowbreak de}}
-\newcommand\authoremailiv{\texttt{tray{\color{white}nospam}\kern-\wd\boxA{}tel@\allowbreak
+\newcommand\authoremailiv{\texttt{tray{\color{white}NOSPAM}\kern-\wd\boxA{}tel@\allowbreak
 in.\allowbreak tum.\allowbreak de}}
 
 The commands @{command datatype_new} and @{command primrec_new} are expected to
@@ -171,13 +160,6 @@
 Comments and bug reports concerning either the tool or this tutorial should be
 directed to the authors at \authoremaili, \authoremailii, \authoremailiii,
 and \authoremailiv.
-
-\begin{framed}
-\noindent
-\textbf{Warning:}\enskip This tutorial and the package it describes are under
-construction. Please forgive their appearance. Should you have suggestions
-or comments regarding either, please let the authors know.
-\end{framed}
 *}
 
 
@@ -195,7 +177,7 @@
 text {*
 Datatypes are illustrated through concrete examples featuring different flavors
 of recursion. More examples can be found in the directory
-\verb|~~/src/HOL/BNF/Examples|.
+\verb|~~/src/HOL/|\allowbreak\verb|BNF/Examples|.
 *}
 
 subsubsection {* Nonrecursive Types
@@ -260,7 +242,8 @@
 
 text {*
 \noindent
-Lists were shown in the introduction. Terminated lists are a variant:
+Lists were shown in the introduction. Terminated lists are a variant that
+stores a value of type @{typ 'b} at the very end:
 *}
 
     datatype_new (*<*)(in early) (*>*)('a, 'b) tlist = TNil 'b | TCons 'a "('a, 'b) tlist"
@@ -310,7 +293,7 @@
 Not all nestings are admissible. For example, this command will fail:
 *}
 
-    datatype_new 'a wrong = Wrong (*<*)'a
+    datatype_new 'a wrong = W1 | W2 (*<*)'a
     typ (*>*)"'a wrong \<Rightarrow> 'a"
 
 text {*
@@ -321,7 +304,7 @@
 *}
 
     datatype_new ('a, 'b) fn = Fn "'a \<Rightarrow> 'b"
-    datatype_new 'a also_wrong = Also_Wrong (*<*)'a
+    datatype_new 'a also_wrong = W1 | W2 (*<*)'a
     typ (*>*)"('a also_wrong, 'a) fn"
 
 text {*
@@ -344,20 +327,30 @@
 datatype_new} and @{command codatatype} commands.
 Section~\ref{sec:registering-bounded-natural-functors} explains how to register
 arbitrary type constructors as BNFs.
+
+Here is another example that fails:
 *}
 
-
-subsubsection {* Custom Names and Syntaxes
-  \label{sssec:datatype-custom-names-and-syntaxes} *}
+    datatype_new 'a pow_list = PNil 'a (*<*)'a
+    datatype_new 'a pow_list' = PNil' 'a (*>*)| PCons "('a * 'a) pow_list"
+
+text {*
+\noindent
+This one features a different flavor of nesting, where the recursive call in the
+type specification occurs around (rather than inside) another type constructor.
+*}
+
+subsubsection {* Auxiliary Constants and Properties
+  \label{sssec:datatype-auxiliary-constants-and-properties} *}
 
 text {*
 The @{command datatype_new} command introduces various constants in addition to
 the constructors. With each datatype are associated set functions, a map
 function, a relator, discriminators, and selectors, all of which can be given
-custom names. In the example below, the traditional names
-@{text set}, @{text map}, @{text list_all2}, @{text null}, @{text hd}, and
-@{text tl} override the default names @{text list_set}, @{text list_map}, @{text
-list_rel}, @{text is_Nil}, @{text un_Cons1}, and @{text un_Cons2}:
+custom names. In the example below, the familiar names @{text null}, @{text hd},
+@{text tl}, @{text set}, @{text map}, and @{text list_all2}, override the
+default names @{text is_Nil}, @{text un_Cons1}, @{text un_Cons2},
+@{text set_list}, @{text map_list}, and @{text rel_list}:
 *}
 
 (*<*)
@@ -370,7 +363,7 @@
       Cons (infixr "#" 65)
 
     hide_type list
-    hide_const Nil Cons hd tl set map list_all2 list_case list_rec
+    hide_const Nil Cons hd tl set map list_all2
 
     context early begin
 (*>*)
@@ -380,14 +373,34 @@
 
 text {*
 \noindent
-The command introduces a discriminator @{const null} and a pair of selectors
-@{const hd} and @{const tl} characterized as follows:
+
+\begin{tabular}{@ {}ll@ {}}
+Constructors: &
+  @{text "Nil \<Colon> 'a list"} \\
+&
+  @{text "Cons \<Colon> 'a \<Rightarrow> 'a list \<Rightarrow> 'a list"} \\
+Discriminator: &
+  @{text "null \<Colon> 'a list \<Rightarrow> bool"} \\
+Selectors: &
+  @{text "hd \<Colon> 'a list \<Rightarrow> 'a"} \\
+&
+  @{text "tl \<Colon> 'a list \<Rightarrow> 'a list"} \\
+Set function: &
+  @{text "set \<Colon> 'a list \<Rightarrow> 'a set"} \\
+Map function: &
+  @{text "map \<Colon> ('a \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b list"} \\
+Relator: &
+  @{text "list_all2 \<Colon> ('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'b list \<Rightarrow> bool"}
+\end{tabular}
+
+The discriminator @{const null} and the selectors @{const hd} and @{const tl}
+are characterized as follows:
 %
 \[@{thm list.collapse(1)[of xs, no_vars]}
   \qquad @{thm list.collapse(2)[of xs, no_vars]}\]
 %
-For two-constructor datatypes, a single discriminator constant suffices. The
-discriminator associated with @{const Cons} is simply
+For two-constructor datatypes, a single discriminator constant is sufficient.
+The discriminator associated with @{const Cons} is simply
 @{term "\<lambda>xs. \<not> null xs"}.
 
 The @{text defaults} clause following the @{const Nil} constructor specifies a
@@ -447,7 +460,7 @@
   @@{command datatype_new} target? @{syntax dt_options}? \\
     (@{syntax dt_name} '=' (@{syntax ctor} + '|') + @'and')
   ;
-  @{syntax_def dt_options}: '(' (('no_discs_sels' | 'rep_compat') + ',') ')'
+  @{syntax_def dt_options}: '(' (('no_discs_sels' | 'no_code' | 'rep_compat') + ',') ')'
 "}
 
 The syntactic entity \synt{target} can be used to specify a local
@@ -464,6 +477,10 @@
 should be generated.
 
 \item
+The @{text "no_code"} option indicates that the datatype should not be
+registered for code generation.
+
+\item
 The @{text "rep_compat"} option indicates that the generated names should
 contain optional (and normally not displayed) ``@{text "new."}'' components to
 prevent clashes with a later call to \keyw{rep\_datatype}. See
@@ -488,7 +505,7 @@
 reference manual \cite{isabelle-isar-ref}.
 
 The optional names preceding the type variables allow to override the default
-names of the set functions (@{text t_set1}, \ldots, @{text t_setM}).
+names of the set functions (@{text set1_t}, \ldots, @{text setM_t}).
 Inside a mutually recursive specification, all defined datatypes must
 mention exactly the same type variables in the same order.
 
@@ -589,6 +606,10 @@
 or the function type. In principle, it should be possible to support old-style
 datatypes as well, but the command does not support this yet (and there is
 currently no way to register old-style datatypes as new-style datatypes).
+
+\item The recursor produced for types that recurse through functions has a
+different signature than with the old package. This makes it impossible to use
+the old \keyw{primrec} command.
 \end{itemize}
 
 An alternative to @{command datatype_new_compat} is to use the old package's
@@ -609,7 +630,7 @@
 \begin{itemize}
 \setlength{\itemsep}{0pt}
 
-\item \relax{Case combinator}: @{text t_case} (rendered using the familiar
+\item \relax{Case combinator}: @{text t.case_t} (rendered using the familiar
 @{text case}--@{text of} syntax)
 
 \item \relax{Discriminators}: @{text "t.is_C\<^sub>1"}, \ldots,
@@ -621,22 +642,22 @@
 \phantom{\relax{Selectors:}} @{text t.un_C\<^sub>n1}$, \ldots, @{text t.un_C\<^sub>nk\<^sub>n}.
 
 \item \relax{Set functions} (or \relax{natural transformations}):
-@{text t_set1}, \ldots, @{text t_setm}
-
-\item \relax{Map function} (or \relax{functorial action}): @{text t_map}
-
-\item \relax{Relator}: @{text t_rel}
-
-\item \relax{Iterator}: @{text t_fold}
-
-\item \relax{Recursor}: @{text t_rec}
+@{text set1_t}, \ldots, @{text t.setm_t}
+
+\item \relax{Map function} (or \relax{functorial action}): @{text t.map_t}
+
+\item \relax{Relator}: @{text t.rel_t}
+
+\item \relax{Iterator}: @{text t.fold_t}
+
+\item \relax{Recursor}: @{text t.rec_t}
 
 \end{itemize}
 
 \noindent
 The case combinator, discriminators, and selectors are collectively called
 \emph{destructors}. The prefix ``@{text "t."}'' is an optional component of the
-name and is normally hidden. 
+names and is normally hidden.
 *}
 
 
@@ -687,8 +708,9 @@
 (*>*)
 
 text {*
-The first subgroup of properties is concerned with the constructors.
-They are listed below for @{typ "'a list"}:
+The free constructor theorems are partitioned in three subgroups. The first
+subgroup of properties is concerned with the constructors. They are listed below
+for @{typ "'a list"}:
 
 \begin{indentblock}
 \begin{description}
@@ -715,7 +737,7 @@
 \begin{indentblock}
 \begin{description}
 
-\item[@{text "t."}\hthm{list.distinct {\upshape[}THEN notE}@{text ", elim!"}\hthm{\upshape]}\rm:] ~ \\
+\item[@{text "t."}\hthm{distinct {\upshape[}THEN notE}@{text ", elim!"}\hthm{\upshape]}\rm:] ~ \\
 @{thm list.distinct(1)[THEN notE, elim!, no_vars]} \\
 @{thm list.distinct(2)[THEN notE, elim!, no_vars]}
 
@@ -750,7 +772,7 @@
 \end{indentblock}
 
 \noindent
-The third and last subgroup revolves around discriminators and selectors:
+The third subgroup revolves around discriminators and selectors:
 
 \begin{indentblock}
 \begin{description}
@@ -793,11 +815,15 @@
 \item[@{text "t."}\hthm{sel\_split\_asm}\rm:] ~ \\
 @{thm list.sel_split_asm[no_vars]}
 
-\item[@{text "t."}\hthm{case\_conv\_if}\rm:] ~ \\
-@{thm list.case_conv_if[no_vars]}
+\item[@{text "t."}\hthm{case\_eq\_if}\rm:] ~ \\
+@{thm list.case_eq_if[no_vars]}
 
 \end{description}
 \end{indentblock}
+
+\noindent
+In addition, equational versions of @{text t.disc} are registered with the @{text "[code]"}
+attribute.
 *}
 
 
@@ -805,7 +831,9 @@
   \label{sssec:functorial-theorems} *}
 
 text {*
-The BNF-related theorem are as follows:
+The functorial theorems are partitioned in two subgroups. The first subgroup
+consists of properties involving the constructors and either a set function, the
+map function, or the relator:
 
 \begin{indentblock}
 \begin{description}
@@ -818,16 +846,56 @@
 @{thm list.map(1)[no_vars]} \\
 @{thm list.map(2)[no_vars]}
 
-\item[@{text "t."}\hthm{rel\_inject} @{text "[simp, code]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{rel\_inject} @{text "[simp]"}\rm:] ~ \\
 @{thm list.rel_inject(1)[no_vars]} \\
 @{thm list.rel_inject(2)[no_vars]}
 
-\item[@{text "t."}\hthm{rel\_distinct} @{text "[simp, code]"}\rm:] ~ \\
+\item[@{text "t."}\hthm{rel\_distinct} @{text "[simp]"}\rm:] ~ \\
 @{thm list.rel_distinct(1)[no_vars]} \\
 @{thm list.rel_distinct(2)[no_vars]}
 
 \end{description}
 \end{indentblock}
+
+\noindent
+In addition, equational versions of @{text t.rel_inject} and @{text
+rel_distinct} are registered with the @{text "[code]"} attribute.
+
+The second subgroup consists of more abstract properties of the set functions,
+the map function, and the relator:
+
+\begin{indentblock}
+\begin{description}
+
+\item[@{text "t."}\hthm{map\_comp}\rm:] ~ \\
+@{thm list.map_cong0[no_vars]}
+
+\item[@{text "t."}\hthm{map\_cong} @{text "[fundef_cong]"}\rm:] ~ \\
+@{thm list.map_cong[no_vars]}
+
+\item[@{text "t."}\hthm{map\_id}\rm:] ~ \\
+@{thm list.map_id[no_vars]}
+
+\item[@{text "t."}\hthm{rel\_compp}\rm:] ~ \\
+@{thm list.rel_compp[no_vars]}
+
+\item[@{text "t."}\hthm{rel\_conversep}\rm:] ~ \\
+@{thm list.rel_conversep[no_vars]}
+
+\item[@{text "t."}\hthm{rel\_eq}\rm:] ~ \\
+@{thm list.rel_eq[no_vars]}
+
+\item[@{text "t."}\hthm{rel\_flip}\rm:] ~ \\
+@{thm list.rel_flip[no_vars]}
+
+\item[@{text "t."}\hthm{rel\_mono}\rm:] ~ \\
+@{thm list.rel_mono[no_vars]}
+
+\item[@{text "t."}\hthm{set\_map}\rm:] ~ \\
+@{thm list.set_map[no_vars]}
+
+\end{description}
+\end{indentblock}
 *}
 
 
@@ -889,18 +957,22 @@
 is recommended to use @{command datatype_new_compat} or \keyw{rep\_datatype}
 to register new-style datatypes as old-style datatypes.
 
-\item \emph{The recursor @{text "t_rec"} has a different signature for nested
-recursive datatypes.} In the old package, nested recursion was internally
-reduced to mutual recursion. This reduction was visible in the type of the
-recursor, used by \keyw{primrec}. In the new package, nested recursion is
-handled in a more modular fashion. The old-style recursor can be generated on
-demand using @{command primrec_new}, as explained in
+\item \emph{The constants @{text t_case} and @{text t_rec} are now called
+@{text case_t} and @{text rec_t}.}
+
+\item \emph{The recursor @{text rec_t} has a different signature for nested
+recursive datatypes.} In the old package, nested recursion through non-functions
+was internally reduced to mutual recursion. This reduction was visible in the
+type of the recursor, used by \keyw{primrec}. Recursion through functions was
+handled specially. In the new package, nested recursion (for functions and
+non-functions) is handled in a more modular fashion. The old-style recursor can
+be generated on demand using @{command primrec_new}, as explained in
 Section~\ref{sssec:primrec-nested-as-mutual-recursion}, if the recursion is via
 new-style datatypes.
 
-\item \emph{Accordingly, the induction principle is different for nested
-recursive datatypes.} Again, the old-style induction principle can be generated
-on demand using @{command primrec_new}, as explained in
+\item \emph{Accordingly, the induction rule is different for nested recursive
+datatypes.} Again, the old-style induction rule can be generated on demand using
+@{command primrec_new}, as explained in
 Section~\ref{sssec:primrec-nested-as-mutual-recursion}, if the recursion is via
 new-style datatypes.
 
@@ -940,9 +1012,9 @@
   \label{sec:defining-recursive-functions} *}
 
 text {*
-Recursive functions over datatypes can be specified using @{command
-primrec_new}, which supports primitive recursion, or using the more general
-\keyw{fun} and \keyw{function} commands. Here, the focus is on @{command
+Recursive functions over datatypes can be specified using the @{command
+primrec_new} command, which supports primitive recursion, or using the more
+general \keyw{fun} and \keyw{function} commands. Here, the focus is on @{command
 primrec_new}; the other two commands are described in a separate tutorial
 \cite{isabelle-function}.
 
@@ -1026,9 +1098,24 @@
 
 text {*
 \noindent
-The next example is not primitive recursive, but it can be defined easily using
-\keyw{fun}. The @{command datatype_new_compat} command is needed to register
-new-style datatypes for use with \keyw{fun} and \keyw{function}
+Pattern matching is only available for the argument on which the recursion takes
+place. Fortunately, it is easy to generate pattern-maching equations using the
+\keyw{simps\_of\_case} command provided by the theory
+\verb|~~/src/HOL/Library/Simps_Case_Conv|.
+*}
+
+    simps_of_case at_simps: at.simps
+
+text {*
+This generates the lemma collection @{thm [source] at_simps}:
+%
+\[@{thm at_simps(1)[no_vars]}
+  \qquad @{thm at_simps(2)[no_vars]}\]
+%
+The next example is defined using \keyw{fun} to escape the syntactic
+restrictions imposed on primitive recursive functions. The
+@{command datatype_new_compat} command is needed to register new-style datatypes
+for use with \keyw{fun} and \keyw{function}
 (Section~\ref{sssec:datatype-new-compat}):
 *}
 
@@ -1109,13 +1196,13 @@
 \noindent
 The next example features recursion through the @{text option} type. Although
 @{text option} is not a new-style datatype, it is registered as a BNF with the
-map function @{const option_map}:
+map function @{const map_option}:
 *}
 
     primrec_new (*<*)(in early) (*>*)sum_btree :: "('a\<Colon>{zero,plus}) btree \<Rightarrow> 'a" where
       "sum_btree (BNode a lt rt) =
-         a + the_default 0 (option_map sum_btree lt) +
-           the_default 0 (option_map sum_btree rt)"
+         a + the_default 0 (map_option sum_btree lt) +
+           the_default 0 (map_option sum_btree rt)"
 
 text {*
 \noindent
@@ -1124,28 +1211,51 @@
 (@{text \<Rightarrow>}) is simply composition (@{text "op \<circ>"}):
 *}
 
-    primrec_new (*<*)(in early) (*>*)ftree_map :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a ftree \<Rightarrow> 'a ftree" where
-      "ftree_map f (FTLeaf x) = FTLeaf (f x)" |
-      "ftree_map f (FTNode g) = FTNode (ftree_map f \<circ> g)"
+    primrec_new (*<*)(in early) (*>*)relabel_ft :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a ftree \<Rightarrow> 'a ftree" where
+      "relabel_ft f (FTLeaf x) = FTLeaf (f x)" |
+      "relabel_ft f (FTNode g) = FTNode (relabel_ft f \<circ> g)"
+
+text {*
+\noindent
+For convenience, recursion through functions can also be expressed using
+$\lambda$-abstractions and function application rather than through composition.
+For example:
+*}
+
+    primrec_new relabel_ft :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a ftree \<Rightarrow> 'a ftree" where
+      "relabel_ft f (FTLeaf x) = FTLeaf (f x)" |
+      "relabel_ft f (FTNode g) = FTNode (\<lambda>x. relabel_ft f (g x))"
+
+text {* \blankline *}
+
+    primrec_new subtree_ft :: "'a \<Rightarrow> 'a ftree \<Rightarrow> 'a ftree" where
+      "subtree_ft x (FTNode g) = g x"
 
 text {*
 \noindent
-(No such map function is defined by the package because the type
-variable @{typ 'a} is dead in @{typ "'a ftree"}.)
-
-Using \keyw{fun} or \keyw{function}, recursion through functions can be
-expressed using $\lambda$-expressions and function application rather
-than through composition. For example:
+For recursion through curried $n$-ary functions, $n$ applications of
+@{term "op \<circ>"} are necessary. The examples below illustrate the case where
+$n = 2$:
 *}
 
-    datatype_new_compat ftree
+    datatype_new 'a ftree2 = FTLeaf2 'a | FTNode2 "'a \<Rightarrow> 'a \<Rightarrow> 'a ftree2"
 
 text {* \blankline *}
 
-    function ftree_map :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a ftree \<Rightarrow> 'a ftree" where
-      "ftree_map f (FTLeaf x) = FTLeaf (f x)" |
-      "ftree_map f (FTNode g) = FTNode (\<lambda>x. ftree_map f (g x))"
-    by auto (metis ftree.exhaust)
+    primrec_new (*<*)(in early) (*>*)relabel_ft2 :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a ftree2 \<Rightarrow> 'a ftree2" where
+      "relabel_ft2 f (FTLeaf2 x) = FTLeaf2 (f x)" |
+      "relabel_ft2 f (FTNode2 g) = FTNode2 (op \<circ> (op \<circ> (relabel_ft2 f)) g)"
+
+text {* \blankline *}
+
+    primrec_new relabel_ft2 :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a ftree2 \<Rightarrow> 'a ftree2" where
+      "relabel_ft2 f (FTLeaf2 x) = FTLeaf2 (f x)" |
+      "relabel_ft2 f (FTNode2 g) = FTNode2 (\<lambda>x y. relabel_ft2 f (g x y))"
+
+text {* \blankline *}
+
+    primrec_new subtree_ft2 :: "'a \<Rightarrow> 'a \<Rightarrow> 'a ftree2 \<Rightarrow> 'a ftree2" where
+      "subtree_ft2 x y (FTNode2 g) = g x y"
 
 
 subsubsection {* Nested-as-Mutual Recursion
@@ -1177,12 +1287,12 @@
 
 text {*
 \noindent
-Appropriate induction principles are generated under the names
+Appropriate induction rules are generated as
 @{thm [source] at\<^sub>f\<^sub>f.induct},
 @{thm [source] ats\<^sub>f\<^sub>f.induct}, and
-@{thm [source] at\<^sub>f\<^sub>f_ats\<^sub>f\<^sub>f.induct}.
-
-%%% TODO: Add recursors.
+@{thm [source] at\<^sub>f\<^sub>f_ats\<^sub>f\<^sub>f.induct}. The
+induction rules and the underlying recursors are generated on a per-need basis
+and are kept in a cache to speed up subsequent definitions.
 
 Here is a second example:
 *}
@@ -1340,7 +1450,7 @@
 \begin{itemize}
 \setlength{\itemsep}{0pt}
 
-\item \emph{Theorems sometimes have different names.}
+\item \emph{Some theorems have different names.}
 For $m > 1$ mutually recursive functions,
 @{text "f\<^sub>1_\<dots>_f\<^sub>m.simps"} has been broken down into separate
 subcollections @{text "f\<^sub>i.simps"}.
@@ -1415,7 +1525,7 @@
 text {*
 \noindent
 Notice that the @{const cont} selector is associated with both @{const Skip}
-and @{const Choice}.
+and @{const Action}.
 *}
 
 
@@ -1488,9 +1598,9 @@
 \begin{itemize}
 \setlength{\itemsep}{0pt}
 
-\item \relax{Coiterator}: @{text t_unfold}
-
-\item \relax{Corecursor}: @{text t_corec}
+\item \relax{Coiterator}: @{text unfold_t}
+
+\item \relax{Corecursor}: @{text corec_t}
 
 \end{itemize}
 *}
@@ -1606,10 +1716,10 @@
   \label{sec:defining-corecursive-functions} *}
 
 text {*
-Corecursive functions can be specified using @{command primcorec} and
-@{command primcorecursive}, which support primitive corecursion, or using the
-more general \keyw{partial\_function} command. Here, the focus is on
-the former two. More examples can be found in the directory
+Corecursive functions can be specified using the @{command primcorec} and
+\keyw{prim\-corec\-ursive} commands, which support primitive corecursion, or
+using the more general \keyw{partial\_function} command. Here, the focus is on
+the first two. More examples can be found in the directory
 \verb|~~/src/HOL/BNF/Examples|.
 
 Whereas recursive functions consume datatypes one constructor at a time,
@@ -1630,7 +1740,7 @@
 This style is popular in the coalgebraic literature.
 
 \item The \emph{constructor view} specifies $f$ by equations of the form
-\[@{text "\<dots> \<Longrightarrow> f x\<^sub>1 \<dots> x\<^sub>n = C \<dots>"}\]
+\[@{text "\<dots> \<Longrightarrow> f x\<^sub>1 \<dots> x\<^sub>n = C\<^sub>j \<dots>"}\]
 This style is often more concise than the previous one.
 
 \item The \emph{code view} specifies $f$ by a single equation of the form
@@ -1643,14 +1753,6 @@
 All three styles are available as input syntax. Whichever syntax is chosen,
 characteristic theorems for all three styles are generated.
 
-\begin{framed}
-\noindent
-\textbf{Warning:}\enskip The @{command primcorec} and @{command primcorecursive}
-commands are under development. Some of the functionality described here is
-vaporware. An alternative is to define corecursive functions directly using the
-generated @{text t_unfold} or @{text t_corec} combinators.
-\end{framed}
-
 %%% TODO: partial_function? E.g. for defining tail recursive function on lazy
 %%% lists (cf. terminal0 in TLList.thy)
 *}
@@ -1668,11 +1770,6 @@
 present the same examples expressed using the constructor and destructor views.
 *}
 
-(*<*)
-    locale code_view
-    begin
-(*>*)
-
 subsubsection {* Simple Corecursion
   \label{sssec:primcorec-simple-corecursion} *}
 
@@ -1683,19 +1780,19 @@
 *}
 
     primcorec literate :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a llist" where
-      "literate f x = LCons x (literate f (f x))"
+      "literate g x = LCons x (literate g (g x))"
 
 text {* \blankline *}
 
     primcorec siterate :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a stream" where
-      "siterate f x = SCons x (siterate f (f x))"
+      "siterate g x = SCons x (siterate g (g x))"
 
 text {*
 \noindent
 The constructor ensures that progress is made---i.e., the function is
 \emph{productive}. The above functions compute the infinite lazy list or stream
-@{text "[x, f x, f (f x), \<dots>]"}. Productivity guarantees that prefixes
-@{text "[x, f x, f (f x), \<dots>, (f ^^ k) x]"} of arbitrary finite length
+@{text "[x, g x, g (g x), \<dots>]"}. Productivity guarantees that prefixes
+@{text "[x, g x, g (g x), \<dots>, (g ^^ k) x]"} of arbitrary finite length
 @{text k} can be computed by unfolding the code equation a finite number of
 times.
 
@@ -1714,7 +1811,7 @@
 appear around constructors that guard corecursive calls:
 *}
 
-    primcorec_notyet lappend :: "'a llist \<Rightarrow> 'a llist \<Rightarrow> 'a llist" where
+    primcorec lappend :: "'a llist \<Rightarrow> 'a llist \<Rightarrow> 'a llist" where
       "lappend xs ys =
          (case xs of
             LNil \<Rightarrow> ys
@@ -1722,6 +1819,19 @@
 
 text {*
 \noindent
+Pattern matching is not supported by @{command primcorec}. Fortunately, it is
+easy to generate pattern-maching equations using the \keyw{simps\_of\_case}
+command provided by the theory \verb|~~/src/HOL/Library/Simps_Case_Conv|.
+*}
+
+    simps_of_case lappend_simps: lappend.code
+
+text {*
+This generates the lemma collection @{thm [source] lappend_simps}:
+%
+\[@{thm lappend_simps(1)[no_vars]}
+  \qquad @{thm lappend_simps(2)[no_vars]}\]
+%
 Corecursion is useful to specify not only functions but also infinite objects:
 *}
 
@@ -1735,7 +1845,7 @@
 pseudorandom seed (@{text n}):
 *}
 
-    primcorec_notyet
+    primcorec
       random_process :: "'a stream \<Rightarrow> (int \<Rightarrow> int) \<Rightarrow> int \<Rightarrow> 'a process"
     where
       "random_process s f n =
@@ -1780,43 +1890,71 @@
 The next pair of examples generalize the @{const literate} and @{const siterate}
 functions (Section~\ref{sssec:primcorec-nested-corecursion}) to possibly
 infinite trees in which subnodes are organized either as a lazy list (@{text
-tree\<^sub>i\<^sub>i}) or as a finite set (@{text tree\<^sub>i\<^sub>s}):
+tree\<^sub>i\<^sub>i}) or as a finite set (@{text tree\<^sub>i\<^sub>s}). They rely on the map functions of
+the nesting type constructors to lift the corecursive calls:
 *}
 
     primcorec iterate\<^sub>i\<^sub>i :: "('a \<Rightarrow> 'a llist) \<Rightarrow> 'a \<Rightarrow> 'a tree\<^sub>i\<^sub>i" where
-      "iterate\<^sub>i\<^sub>i f x = Node\<^sub>i\<^sub>i x (lmap (iterate\<^sub>i\<^sub>i f) (f x))"
+      "iterate\<^sub>i\<^sub>i g x = Node\<^sub>i\<^sub>i x (lmap (iterate\<^sub>i\<^sub>i g) (g x))"
 
 text {* \blankline *}
 
     primcorec iterate\<^sub>i\<^sub>s :: "('a \<Rightarrow> 'a fset) \<Rightarrow> 'a \<Rightarrow> 'a tree\<^sub>i\<^sub>s" where
-      "iterate\<^sub>i\<^sub>s f x = Node\<^sub>i\<^sub>s x (fimage (iterate\<^sub>i\<^sub>s f) (f x))"
+      "iterate\<^sub>i\<^sub>s g x = Node\<^sub>i\<^sub>s x (fimage (iterate\<^sub>i\<^sub>s g) (g x))"
 
 text {*
 \noindent
-Deterministic finite automata (DFAs) are traditionally defined as 5-tuples
-@{text "(Q, \<Sigma>, \<delta>, q\<^sub>0, F)"}, where @{text Q} is a finite set of states,
+Both examples follow the usual format for constructor arguments associated
+with nested recursive occurrences of the datatype. Consider
+@{const iterate\<^sub>i\<^sub>i}. The term @{term "g x"} constructs an @{typ "'a llist"}
+value, which is turned into an @{typ "'a tree\<^sub>i\<^sub>i llist"} value using
+@{const lmap}.
+
+This format may sometimes feel artificial. The following function constructs
+a tree with a single, infinite branch from a stream:
+*}
+
+    primcorec tree\<^sub>i\<^sub>i_of_stream :: "'a stream \<Rightarrow> 'a tree\<^sub>i\<^sub>i" where
+      "tree\<^sub>i\<^sub>i_of_stream s =
+         Node\<^sub>i\<^sub>i (shd s) (lmap tree\<^sub>i\<^sub>i_of_stream (LCons (stl s) LNil))"
+
+text {*
+\noindent
+Fortunately, it is easy to prove the following lemma, where the corecursive call
+is moved inside the lazy list constructor, thereby eliminating the need for
+@{const lmap}:
+*}
+
+    lemma tree\<^sub>i\<^sub>i_of_stream_alt:
+      "tree\<^sub>i\<^sub>i_of_stream s = Node\<^sub>i\<^sub>i (shd s) (LCons (tree\<^sub>i\<^sub>i_of_stream (stl s)) LNil)"
+    by (subst tree\<^sub>i\<^sub>i_of_stream.code) simp
+
+text {*
+The next example illustrates corecursion through functions, which is a bit
+special. Deterministic finite automata (DFAs) are traditionally defined as
+5-tuples @{text "(Q, \<Sigma>, \<delta>, q\<^sub>0, F)"}, where @{text Q} is a finite set of states,
 @{text \<Sigma>} is a finite alphabet, @{text \<delta>} is a transition function, @{text q\<^sub>0}
 is an initial state, and @{text F} is a set of final states. The following
 function translates a DFA into a @{type state_machine}:
 *}
 
-    primcorec (*<*)(in early) (*>*)
-      sm_of_dfa :: "('q \<Rightarrow> 'a \<Rightarrow> 'q) \<Rightarrow> 'q set \<Rightarrow> 'q \<Rightarrow> 'a state_machine"
+    primcorec
+      (*<*)(in early) (*>*)sm_of_dfa :: "('q \<Rightarrow> 'a \<Rightarrow> 'q) \<Rightarrow> 'q set \<Rightarrow> 'q \<Rightarrow> 'a state_machine"
     where
-      "sm_of_dfa \<delta> F q = State_Machine (q \<in> F) (sm_of_dfa \<delta> F o \<delta> q)"
+      "sm_of_dfa \<delta> F q = State_Machine (q \<in> F) (sm_of_dfa \<delta> F \<circ> \<delta> q)"
 
 text {*
 \noindent
 The map function for the function type (@{text \<Rightarrow>}) is composition
-(@{text "op \<circ>"}). For convenience, corecursion through functions can be
-expressed using $\lambda$-expressions and function application rather
+(@{text "op \<circ>"}). For convenience, corecursion through functions can
+also be expressed using $\lambda$-abstractions and function application rather
 than through composition. For example:
 *}
 
     primcorec
       sm_of_dfa :: "('q \<Rightarrow> 'a \<Rightarrow> 'q) \<Rightarrow> 'q set \<Rightarrow> 'q \<Rightarrow> 'a state_machine"
     where
-      "sm_of_dfa \<delta> F q = State_Machine (q \<in> F) (sm_of_dfa \<delta> F o \<delta> q)"
+      "sm_of_dfa \<delta> F q = State_Machine (q \<in> F) (\<lambda>a. sm_of_dfa \<delta> F (\<delta> q a))"
 
 text {* \blankline *}
 
@@ -1833,9 +1971,32 @@
     primcorec
       or_sm :: "'a state_machine \<Rightarrow> 'a state_machine \<Rightarrow> 'a state_machine"
     where
-      "or_sm M N =
-         State_Machine (accept M \<or> accept N)
-           (\<lambda>a. or_sm (trans M a) (trans N a))"
+      "or_sm M N = State_Machine (accept M \<or> accept N)
+         (\<lambda>a. or_sm (trans M a) (trans N a))"
+
+text {*
+\noindent
+For recursion through curried $n$-ary functions, $n$ applications of
+@{term "op \<circ>"} are necessary. The examples below illustrate the case where
+$n = 2$:
+*}
+
+    codatatype ('a, 'b) state_machine2 =
+      State_Machine2 (accept2: bool) (trans2: "'a \<Rightarrow> 'b \<Rightarrow> ('a, 'b) state_machine2")
+
+text {* \blankline *}
+
+    primcorec
+      (*<*)(in early) (*>*)sm2_of_dfa :: "('q \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> 'q) \<Rightarrow> 'q set \<Rightarrow> 'q \<Rightarrow> ('a, 'b) state_machine2"
+    where
+      "sm2_of_dfa \<delta> F q = State_Machine2 (q \<in> F) (op \<circ> (op \<circ> (sm2_of_dfa \<delta> F)) (\<delta> q))"
+
+text {* \blankline *}
+
+    primcorec
+      sm2_of_dfa :: "('q \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> 'q) \<Rightarrow> 'q set \<Rightarrow> 'q \<Rightarrow> ('a, 'b) state_machine2"
+    where
+      "sm2_of_dfa \<delta> F q = State_Machine2 (q \<in> F) (\<lambda>a b. sm2_of_dfa \<delta> F (\<delta> q a b))"
 
 
 subsubsection {* Nested-as-Mutual Corecursion
@@ -1848,15 +2009,31 @@
 pretend that nested codatatypes are mutually corecursive. For example:
 *}
 
-    primcorec_notyet
+(*<*)
+    context late
+    begin
+(*>*)
+    primcorec
       iterate\<^sub>i\<^sub>i :: "('a \<Rightarrow> 'a llist) \<Rightarrow> 'a \<Rightarrow> 'a tree\<^sub>i\<^sub>i" and
       iterates\<^sub>i\<^sub>i :: "('a \<Rightarrow> 'a llist) \<Rightarrow> 'a llist \<Rightarrow> 'a tree\<^sub>i\<^sub>i llist"
     where
-      "iterate\<^sub>i\<^sub>i f x = Node\<^sub>i\<^sub>i x (iterates\<^sub>i\<^sub>i f (f x))" |
-      "iterates\<^sub>i\<^sub>i f xs =
+      "iterate\<^sub>i\<^sub>i g x = Node\<^sub>i\<^sub>i x (iterates\<^sub>i\<^sub>i g (g x))" |
+      "iterates\<^sub>i\<^sub>i g xs =
          (case xs of
             LNil \<Rightarrow> LNil
-          | LCons x xs' \<Rightarrow> LCons (iterate\<^sub>i\<^sub>i f x) (iterates\<^sub>i\<^sub>i f xs'))"
+          | LCons x xs' \<Rightarrow> LCons (iterate\<^sub>i\<^sub>i g x) (iterates\<^sub>i\<^sub>i g xs'))"
+
+text {*
+\noindent
+Coinduction rules are generated as
+@{thm [source] iterate\<^sub>i\<^sub>i.coinduct},
+@{thm [source] iterates\<^sub>i\<^sub>i.coinduct}, and
+@{thm [source] iterate\<^sub>i\<^sub>i_iterates\<^sub>i\<^sub>i.coinduct}
+and analogously for @{text strong_coinduct}. These rules and the
+underlying corecursors are generated on a per-need basis and are kept in a cache
+to speed up subsequent definitions.
+*}
+
 (*<*)
     end
 (*>*)
@@ -1866,7 +2043,7 @@
   \label{ssec:primrec-constructor-view} *}
 
 (*<*)
-    locale ctr_view = code_view
+    locale ctr_view
     begin
 (*>*)
 
@@ -1937,7 +2114,7 @@
   \label{ssec:primrec-destructor-view} *}
 
 (*<*)
-    locale dest_view
+    locale dtr_view
     begin
 (*>*)
 
@@ -1951,13 +2128,13 @@
     primcorec literate :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a llist" where
       "\<not> lnull (literate _ x)" |
       "lhd (literate _ x) = x" |
-      "ltl (literate f x) = literate f (f x)"
+      "ltl (literate g x) = literate g (g x)"
 
 text {* \blankline *}
 
     primcorec siterate :: "('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a stream" where
       "shd (siterate _ x) = x" |
-      "stl (siterate f x) = siterate f (f x)"
+      "stl (siterate g x) = siterate g (g x)"
 
 text {* \blankline *}
 
@@ -1993,6 +2170,9 @@
 (*<*)
     end
 
+    locale dtr_view2
+    begin
+
     primcorec lappend :: "'a llist \<Rightarrow> 'a llist \<Rightarrow> 'a llist" where
       "lnull xs \<Longrightarrow> lnull ys \<Longrightarrow> lnull (lappend xs ys)" |
 (*>*)
@@ -2000,8 +2180,6 @@
 (*<*) |
       "lhd (lappend xs ys) = lhd (if lnull xs then ys else xs)" |
       "ltl (lappend xs ys) = (if xs = LNil then ltl ys else lappend (ltl xs) ys)"
-
-    context dest_view begin
 (*>*)
 
 text {*
@@ -2044,8 +2222,8 @@
 text {* \blankline *}
 
     primcorec iterate\<^sub>i\<^sub>i :: "('a \<Rightarrow> 'a llist) \<Rightarrow> 'a \<Rightarrow> 'a tree\<^sub>i\<^sub>i" where
-      "lbl\<^sub>i\<^sub>i (iterate\<^sub>i\<^sub>i f x) = x" |
-      "sub\<^sub>i\<^sub>i (iterate\<^sub>i\<^sub>i f x) = lmap (iterate\<^sub>i\<^sub>i f) (f x)"
+      "lbl\<^sub>i\<^sub>i (iterate\<^sub>i\<^sub>i g x) = x" |
+      "sub\<^sub>i\<^sub>i (iterate\<^sub>i\<^sub>i g x) = lmap (iterate\<^sub>i\<^sub>i g) (g x)"
 (*<*)
     end
 (*>*)
@@ -2148,11 +2326,39 @@
 \end{matharray}
 
 @{rail "
-  @@{command bnf} target? (name ':')? term \\
-    term_list term term_list term?
+  @@{command bnf} target? (name ':')? typ \\
+    'map:' term ('sets:' (term +))? 'bd:' term \\
+    ('wits:' (term +))? ('rel:' term)?
+"}
+*}
+
+
+subsubsection {* \keyw{bnf\_decl}
+  \label{sssec:bnf-decl} *}
+
+text {*
+%%% TODO: use command_def once the command is available
+\begin{matharray}{rcl}
+  @{text "bnf_decl"} & : & @{text "local_theory \<rightarrow> local_theory"}
+\end{matharray}
+
+@{rail "
+  @@{command bnf_decl} target? @{syntax dt_name}
   ;
-  X_list: '[' (X + ',') ']'
+  @{syntax_def dt_name}: @{syntax tyargs}? name @{syntax map_rel}? mixfix?
+  ;
+  @{syntax_def tyargs}: typefree | '(' (((name | '-') ':')? typefree + ',') ')'
+  ;
+  @{syntax_def map_rel}: '(' ((('map' | 'rel') ':' name) +) ')'
 "}
+
+Declares a fresh type and fresh constants (map, set, relator, cardinal bound)
+and asserts the bnf properties for these constants as axioms. Additionally,
+type arguments may be marked as dead (by using @{syntax "-"} instead of a name for the
+set function)---this is the only difference of @{syntax dt_name} compared to
+the syntax used by the @{command datatype_new}/@{command codatatype} commands.
+
+The axioms are sound, since one there exists a bnf of any given arity.
 *}
 
 
@@ -2185,8 +2391,10 @@
 %    old \keyw{datatype}
 %
 %  * @{command wrap_free_constructors}
-%    * @{text "no_discs_sels"}, @{text "rep_compat"}
+%    * @{text "no_discs_sels"}, @{text "no_code"}, @{text "rep_compat"}
 %    * hack to have both co and nonco view via locale (cf. ext nats)
+%  * code generator
+%     * eq, refl, simps
 *}
 
 
@@ -2215,11 +2423,11 @@
   @{syntax_def wfc_discs_sels}: name_list (name_list_list name_term_list_list? )?
   ;
   @{syntax_def name_term}: (name ':' term)
+  ;
+  X_list: '[' (X + ',') ']'
 "}
 
-% options: no_discs_sels rep_compat
-
-% X_list is as for BNF
+% options: no_discs_sels no_code rep_compat
 
 \noindent
 Section~\ref{ssec:datatype-generated-theorems} lists the generated theorems.
@@ -2307,8 +2515,9 @@
 suggested major simplifications to the internal constructions, much of which has
 yet to be implemented. Florian Haftmann and Christian Urban provided general
 advice on Isabelle and package writing. Stefan Milius and Lutz Schr\"oder
-found an elegant proof to eliminate one of the BNF assumptions. Christian
-Sternagel suggested many textual improvements to this tutorial.
+found an elegant proof to eliminate one of the BNF assumptions. Andreas
+Lochbihler and Christian Sternagel suggested many textual improvements to this
+tutorial.
 *}
 
 end
--- a/src/Doc/Datatypes/document/root.tex	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Datatypes/document/root.tex	Thu Dec 05 17:58:03 2013 +0100
@@ -58,10 +58,10 @@
 
 \begin{abstract}
 \noindent
-This tutorial describes how to use the new package for defining datatypes and
-codatatypes in Isabelle/HOL. The package provides five main commands:
+This tutorial describes the new package for defining datatypes and codatatypes
+in Isabelle/HOL. The package provides four main commands:
 \keyw{datatype\_new}, \keyw{codatatype}, \keyw{primrec\_new},
-\keyw{primcorecursive}, and \keyw{primcorec}. The commands suffixed by
+and \keyw{primcorec}. The commands suffixed by
 \keyw{\_new} are intended to subsume, and eventually replace, the corresponding
 commands from the old datatype package.
 \end{abstract}
--- a/src/Doc/Functions/Functions.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Functions/Functions.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -1003,13 +1003,13 @@
   recursive calls. In general, there is one introduction rule for each
   recursive call.
 
-  The predicate @{term "accp findzero_rel"} is the accessible part of
+  The predicate @{term "Wellfounded.accp findzero_rel"} is the accessible part of
   that relation. An argument belongs to the accessible part, if it can
   be reached in a finite number of steps (cf.~its definition in @{text
   "Wellfounded.thy"}).
 
   Since the domain predicate is just an abbreviation, you can use
-  lemmas for @{const accp} and @{const findzero_rel} directly. Some
+  lemmas for @{const Wellfounded.accp} and @{const findzero_rel} directly. Some
   lemmas which are occasionally useful are @{thm [source] accpI}, @{thm [source]
   accp_downward}, and of course the introduction and elimination rules
   for the recursion relation @{thm [source] "findzero_rel.intros"} and @{thm
--- a/src/Doc/IsarImplementation/ML.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/IsarImplementation/ML.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -1033,7 +1033,7 @@
   without any message output.
 
   \begin{warn}
-  The actual error channel is accessed via @{ML Output.error_msg}, but
+  The actual error channel is accessed via @{ML Output.error_message}, but
   the old interaction protocol of Proof~General \emph{crashes} if that
   function is used in regular ML code: error output and toplevel
   command failure always need to coincide in classic TTY interaction.
--- a/src/Doc/JEdit/JEdit.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/JEdit/JEdit.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -1068,12 +1068,6 @@
 text {*
   \begin{itemize}
 
-  \item \textbf{Problem:} Lack of dependency management for auxiliary files
-  that contribute to a theory (e.g.\ @{command ML_file}).
-
-  \textbf{Workaround:} Re-load files manually within the prover, by
-  editing corresponding command in the text.
-
   \item \textbf{Problem:} Odd behavior of some diagnostic commands with
   global side-effects, like writing a physical file.
 
--- a/src/Doc/Nitpick/document/root.tex	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Nitpick/document/root.tex	Thu Dec 05 17:58:03 2013 +0100
@@ -1965,6 +1965,8 @@
 \texttt{.kki}, \texttt{.cnf}, \texttt{.out}, and
 \texttt{.err}; you may safely remove them after Nitpick has run.
 
+\textbf{Warning:} This option is not thread-safe. Use at your own risks.
+
 \nopagebreak
 {\small See also \textit{debug} (\S\ref{output-format}).}
 \end{enum}
@@ -2382,6 +2384,14 @@
 \cite{kodkod-2009}. Unlike the standard version of MiniSat, the JNI version can
 be used incrementally.
 
+\item[\labelitemi] \textbf{\textit{Riss3g}:} Riss3g is an efficient solver written in
+\cpp{}. To use Riss3g, set the environment variable \texttt{RISS3G\_HOME} to the
+directory that contains the \texttt{riss3g} executable.%
+\footref{cygwin-paths}
+The \cpp{} sources for Riss3g are available at
+\url{http://tools.computational-logic.org/content/riss3g.php}.
+Nitpick has been tested with the SAT Competition 2013 version.
+
 \item[\labelitemi] \textbf{\textit{zChaff}:} zChaff is an older solver written
 in \cpp{}. To use zChaff, set the environment variable \texttt{ZCHAFF\_HOME} to
 the directory that contains the \texttt{zchaff} executable.%
@@ -2794,11 +2804,12 @@
 \subsection{Registering Coinductive Datatypes}
 \label{registering-coinductive-datatypes}
 
+Coinductive datatypes defined using the \textbf{codatatype} command that do not
+involve nested recursion through non-codatatypes are supported by Nitpick.
 If you have defined a custom coinductive datatype, you can tell Nitpick about
-it, so that it can use an efficient Kodkod axiomatization similar to the one it
-uses for lazy lists. The interface for registering and unregistering coinductive
-datatypes consists of the following pair of functions defined in the
-\textit{Nitpick\_HOL} structure:
+it, so that it can use an efficient Kodkod axiomatization. The interface for
+registering and unregistering coinductive datatypes consists of the following
+pair of functions defined in the \textit{Nitpick\_HOL} structure:
 
 \prew
 $\textbf{val}\,~\textit{register\_codatatype\/} : {}$ \\
@@ -2886,6 +2897,12 @@
 \item[\labelitemi] Nitpick produces spurious counterexamples when invoked after a
 \textbf{guess} command in a structured proof.
 
+\item[\labelitemi] Datatypes defined using \textbf{datatype\_new} are not
+supported.
+
+\item[\labelitemi] Codatatypes defined using \textbf{codatatype} that
+involve nested recursion through non-codatatypes are not supported.
+
 \item[\labelitemi] The \textit{nitpick\_xxx} attributes and the
 \textit{Nitpick\_xxx.register\_yyy} functions can cause havoc if used
 improperly.
--- a/src/Doc/ProgProve/Basics.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/ProgProve/Basics.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -22,8 +22,8 @@
 \item[type constructors,]
  in particular @{text list}, the type of
 lists, and @{text set}, the type of sets. Type constructors are written
-postfix, e.g.\ @{typ "nat list"} is the type of lists whose elements are
-natural numbers.
+postfix, i.e., after their arguments. For example,
+@{typ "nat list"} is the type of lists whose elements are natural numbers.
 \item[function types,]
 denoted by @{text"\<Rightarrow>"}.
 \item[type variables,]
@@ -41,8 +41,8 @@
 \begin{warn}
 There are many predefined infix symbols like @{text "+"} and @{text"\<le>"}.
 The name of the corresponding binary function is @{term"op +"},
-not just @{text"+"}. That is, @{term"x + y"} is syntactic sugar for
-\noquotes{@{term[source]"op + x y"}}.
+not just @{text"+"}. That is, @{term"x + y"} is nice surface syntax
+(``syntactic sugar'') for \noquotes{@{term[source]"op + x y"}}.
 \end{warn}
 
 HOL also supports some basic constructs from functional programming:
--- a/src/Doc/ProgProve/Bool_nat_list.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/ProgProve/Bool_nat_list.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -99,10 +99,10 @@
   For example, given the goal @{text"x + 0 = x"}, there is nothing to indicate
   that you are talking about natural numbers. Hence Isabelle can only infer
   that @{term x} is of some arbitrary type where @{text 0} and @{text"+"}
-  exist. As a consequence, you will be unable to prove the
-  goal. To alert you to such pitfalls, Isabelle flags numerals without a
-  fixed type in its output: @{prop"x+0 = x"}.  In this particular example,
-  you need to include
+  exist. As a consequence, you will be unable to prove the goal.
+%  To alert you to such pitfalls, Isabelle flags numerals without a
+%  fixed type in its output: @ {prop"x+0 = x"}.
+  In this particular example, you need to include
   an explicit type constraint, for example @{text"x+0 = (x::nat)"}. If there
   is enough contextual information this may not be necessary: @{prop"Suc x =
   x"} automatically implies @{text"x::nat"} because @{term Suc} is not
@@ -372,10 +372,10 @@
 ys zs)"}. It appears almost mysterious because we suddenly complicate the
 term by appending @{text Nil} on the left. What is really going on is this:
 when proving some equality \mbox{@{prop"s = t"}}, both @{text s} and @{text t} are
-simplified to some common term @{text u}.  This heuristic for equality proofs
+simplified until they ``meet in the middle''. This heuristic for equality proofs
 works well for a functional programming context like ours. In the base case
-@{text s} is @{term"app (app Nil ys) zs"}, @{text t} is @{term"app Nil (app
-ys zs)"}, and @{text u} is @{term"app ys zs"}.
+both @{term"app (app Nil ys) zs"} and @{term"app Nil (app
+ys zs)"} are simplified to @{term"app ys zs"}, the term in the middle.
 
 \subsection{Predefined Lists}
 \label{sec:predeflists}
@@ -419,13 +419,19 @@
 From now on lists are always the predefined lists.
 
 
-\subsection{Exercises}
+\subsection*{Exercises}
+
+\begin{exercise}
+Use the \isacom{value} command to evaluate the following expressions:
+@{term[source] "1 + (2::nat)"}, @{term[source] "1 + (2::int)"},
+@{term[source] "1 - (2::nat)"} and @{term[source] "1 - (2::int)"}.
+\end{exercise}
 
 \begin{exercise}
 Start from the definition of @{const add} given above.
-Prove it is associative (@{prop"add (add m n) p = add m (add n p)"})
-and commutative (@{prop"add m n = add n m"}). Define a recursive function
-@{text double} @{text"::"} @{typ"nat \<Rightarrow> nat"} and prove that @{prop"double m = add m m"}.
+Prove that @{const add} is associative and commutative.
+Define a recursive function @{text double} @{text"::"} @{typ"nat \<Rightarrow> nat"}
+and prove @{prop"double m = add m m"}.
 \end{exercise}
 
 \begin{exercise}
@@ -436,11 +442,15 @@
 
 \begin{exercise}
 Define a recursive function @{text "snoc ::"} @{typ"'a list \<Rightarrow> 'a \<Rightarrow> 'a list"}
-that appends an element to the end of a list. Do not use the predefined append
-operator @{text"@"}. With the help of @{text snoc} define a recursive function
-@{text "reverse ::"} @{typ"'a list \<Rightarrow> 'a list"} that reverses a list. Do not
-use the predefined function @{const rev}.
-Prove @{prop"reverse(reverse xs) = xs"}.
+that appends an element to the end of a list. With the help of @{text snoc}
+define a recursive function @{text "reverse ::"} @{typ"'a list \<Rightarrow> 'a list"}
+that reverses a list. Prove @{prop"reverse(reverse xs) = xs"}.
+\end{exercise}
+
+\begin{exercise}
+Define a recursive function @{text "sum ::"} @{typ"nat \<Rightarrow> nat"} such that
+\mbox{@{text"sum n"}} @{text"="} @{text"0 + ... + n"} and prove
+@{prop" sum(n::nat) = n * (n+1) div 2"}.
 \end{exercise}
 *}
 (*<*)
--- a/src/Doc/ProgProve/Isar.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/ProgProve/Isar.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -590,15 +590,15 @@
 the fact just proved, in this case the preceding block. In general,
 \isacom{note} introduces a new name for one or more facts.
 
-\subsection{Exercises}
+\subsection*{Exercises}
 
 \exercise
 Give a readable, structured proof of the following lemma:
 *}
-lemma assumes T: "\<forall> x y. T x y \<or> T y x"
-  and A: "\<forall> x y. A x y \<and> A y x \<longrightarrow> x = y"
-  and TA: "\<forall> x y. T x y \<longrightarrow> A x y" and "A x y"
-shows "T x y"
+lemma assumes T: "\<forall>x y. T x y \<or> T y x"
+  and A: "\<forall>x y. A x y \<and> A y x \<longrightarrow> x = y"
+  and TA: "\<forall>x y. T x y \<longrightarrow> A x y" and "A x y"
+  shows "T x y"
 (*<*)oops(*>*)
 text{*
 \endexercise
@@ -612,10 +612,11 @@
 text{*
 Hint: There are predefined functions @{const_typ take} and @{const_typ drop}
 such that @{text"take k [x\<^sub>1,\<dots>] = [x\<^sub>1,\<dots>,x\<^sub>k]"} and
-@{text"drop k [x\<^sub>1,\<dots>] = [x\<^bsub>k+1\<^esub>,\<dots>]"}. Let @{text simp} and especially
-sledgehammer find and apply the relevant @{const take} and @{const drop} lemmas for you.
+@{text"drop k [x\<^sub>1,\<dots>] = [x\<^bsub>k+1\<^esub>,\<dots>]"}. Let sledgehammer find and apply
+the relevant @{const take} and @{const drop} lemmas for you.
 \endexercise
 
+
 \section{Case Analysis and Induction}
 
 \subsection{Datatype Case Analysis}
@@ -1018,7 +1019,7 @@
 \isacom{lemma} @{text[source]"I r s t \<Longrightarrow> \<dots>"}
 \end{isabelle}
 Applying the standard form of
-rule induction in such a situation will lead to strange and typically unproveable goals.
+rule induction in such a situation will lead to strange and typically unprovable goals.
 We can easily reduce this situation to the standard one by introducing
 new variables @{text x}, @{text y}, @{text z} and reformulating the goal like this:
 \begin{isabelle}
@@ -1040,7 +1041,7 @@
 proof(induction "Suc m" arbitrary: m rule: ev.induct)
   fix n assume IH: "\<And>m. n = Suc m \<Longrightarrow> \<not> ev m"
   show "\<not> ev (Suc n)"
-  proof --"contradition"
+  proof --"contradiction"
     assume "ev(Suc n)"
     thus False
     proof cases --"rule inversion"
@@ -1075,45 +1076,38 @@
 @{text induct} method.
 \end{warn}
 
-\subsection{Exercises}
+
+\subsection*{Exercises}
+
+
+\exercise
+Give a structured proof by rule inversion:
+*}
+
+lemma assumes a: "ev(Suc(Suc n))" shows "ev n"
+(*<*)oops(*>*)
+
+text{*
+\endexercise
+
+\begin{exercise}
+Give a structured proof of @{prop "\<not> ev(Suc(Suc(Suc 0)))"}
+by rule inversions. If there are no cases to be proved you can close
+a proof immediateley with \isacom{qed}.
+\end{exercise}
+
+\begin{exercise}
+Recall predicate @{text star} from \autoref{sec:star} and @{text iter}
+from Exercise~\ref{exe:iter}. Prove @{prop "iter r n x y \<Longrightarrow> star r x y"}
+in a structured style, do not just sledgehammer each case of the
+required induction.
+\end{exercise}
 
 \begin{exercise}
 Define a recursive function @{text "elems ::"} @{typ"'a list \<Rightarrow> 'a set"}
 and prove @{prop "x : elems xs \<Longrightarrow> \<exists>ys zs. xs = ys @ x # zs \<and> x \<notin> elems ys"}.
 \end{exercise}
-
-\begin{exercise}
-A context-free grammar can be seen as an inductive definition where each
-nonterminal $A$ is an inductively defined predicate on lists of terminal
-symbols: $A(w)$ mans
-that $w$ is in the language generated by $A$. For example, the production $S
-\to a S b$ can be viewed as the implication @{prop"S w \<Longrightarrow> S (a # w @ [b])"}
-where @{text a} and @{text b} are constructors of some datatype of terminal
-symbols: \isacom{datatype} @{text"tsymbs = a | b | \<dots>"}
-
-Define the two grammars
-\[
-\begin{array}{r@ {\quad}c@ {\quad}l}
-S &\to& \varepsilon \quad\mid\quad a~S~b \quad\mid\quad S~S \\
-T &\to& \varepsilon \quad\mid\quad T~a~T~b
-\end{array}
-\]
-($\varepsilon$ is the empty word)
-as two inductive predicates and prove @{prop"S w \<longleftrightarrow> T w"}.
-\end{exercise}
-
 *}
-(*
-lemma "\<not> ev(Suc(Suc(Suc 0)))"
-proof
-  assume "ev(Suc(Suc(Suc 0)))"
-  then show False
-  proof cases
-    case evSS
-    from `ev(Suc 0)` show False by cases
-  qed
-qed
-*)
 
 (*<*)
 end
--- a/src/Doc/ProgProve/Logic.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/ProgProve/Logic.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -141,6 +141,28 @@
 See \cite{Nipkow-Main} for the wealth of further predefined functions in theory
 @{theory Main}.
 
+
+\subsection*{Exercises}
+
+\exercise
+Start from the data type of binary trees defined earlier:
+*}
+
+datatype 'a tree = Tip | Node "'a tree" 'a "'a tree"
+
+text{*
+Define a function @{text "set ::"} @{typ "'a tree \<Rightarrow> 'a set"}
+that returns the elements in a tree and a function
+@{text "ord ::"} @{typ "int tree \<Rightarrow> bool"}
+the tests if an @{typ "int tree"} is ordered.
+
+Define a function @{text ins} that inserts an element into an ordered @{typ "int tree"}
+while maintaining the order of the tree. If the element is already in the tree, the
+same tree should be returned. Prove correctness of @{text ins}:
+@{prop "set(ins x t) = {x} \<union> set t"} and @{prop "ord t \<Longrightarrow> ord(ins i t)"}.
+\endexercise
+
+
 \section{Proof Automation}
 
 So far we have only seen @{text simp} and @{text auto}: Both perform
@@ -459,12 +481,12 @@
 text{* In this particular example we could have backchained with
 @{thm[source] Suc_leD}, too, but because the premise is more complicated than the conclusion this can easily lead to nontermination.
 
-\subsection{Finding Theorems}
-
-Command \isacom{find{\isacharunderscorekeyword}theorems} searches for specific theorems in the current
-theory. Search criteria include pattern matching on terms and on names.
-For details see the Isabelle/Isar Reference Manual~\cite{IsarRef}.
-\bigskip
+%\subsection{Finding Theorems}
+%
+%Command \isacom{find{\isacharunderscorekeyword}theorems} searches for specific theorems in the current
+%theory. Search criteria include pattern matching on terms and on names.
+%For details see the Isabelle/Isar Reference Manual~\cite{IsarRef}.
+%\bigskip
 
 \begin{warn}
 To ease readability we will drop the question marks
@@ -708,8 +730,8 @@
 apply(rename_tac u x y)
 defer
 (*>*)
-txt{* The induction is over @{prop"star r x y"} and we try to prove
-\mbox{@{prop"star r y z \<Longrightarrow> star r x z"}},
+txt{* The induction is over @{prop"star r x y"} (the first matching assumption)
+and we try to prove \mbox{@{prop"star r y z \<Longrightarrow> star r x z"}},
 which we abbreviate by @{prop"P x y"}. These are our two subgoals:
 @{subgoals[display,indent=0]}
 The first one is @{prop"P x x"}, the result of case @{thm[source]refl},
@@ -764,6 +786,95 @@
 conditions}. In rule inductions, these side-conditions appear as additional
 assumptions. The \isacom{for} clause seen in the definition of the reflexive
 transitive closure merely simplifies the form of the induction rule.
+
+
+\subsection*{Exercises}
+
+\begin{exercise}
+Formalise the following definition of palindromes
+\begin{itemize}
+\item The empty list and a singleton list are palindromes.
+\item If @{text xs} is a palindrome, so is @{term "a # xs @ [a]"}.
+\end{itemize}
+as an inductive predicate @{text "palindrome ::"} @{typ "'a list \<Rightarrow> bool"}
+and prove that @{prop "rev xs = xs"} if @{text xs} is a palindrome.
+\end{exercise}
+
+\exercise
+We could also have defined @{const star} as follows:
+*}
+
+inductive star' :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'a \<Rightarrow> bool" for r where
+refl': "star' r x x" |
+step': "star' r x y \<Longrightarrow> r y z \<Longrightarrow> star' r x z"
+
+text{*
+The single @{text r} step is performer after rather than before the @{text star'}
+steps. Prove @{prop "star' r x y \<Longrightarrow> star r x y"} and
+@{prop "star r x y \<Longrightarrow> star r' x y"}. You may need lemmas.
+Note that rule induction fails
+if the assumption about the inductive predicate is not the first assumption.
+\endexercise
+
+\begin{exercise}\label{exe:iter}
+Analogous to @{const star}, give an inductive definition of the @{text n}-fold iteration
+of a relation @{text r}: @{term "iter r n x y"} should hold if there are @{text x\<^sub>0}, \dots, @{text x\<^sub>n}
+such that @{prop"x = x\<^sub>0"}, @{prop"x\<^sub>n = y"} and @{text"r x\<^bsub>i\<^esub> x\<^bsub>i+1\<^esub>"} for
+all @{prop"i < n"}. Correct and prove the following claim:
+@{prop"star r x y \<Longrightarrow> iter r n x y"}.
+\end{exercise}
+
+\begin{exercise}
+A context-free grammar can be seen as an inductive definition where each
+nonterminal $A$ is an inductively defined predicate on lists of terminal
+symbols: $A(w)$ mans that $w$ is in the language generated by $A$.
+For example, the production $S \to a S b$ can be viewed as the implication
+@{prop"S w \<Longrightarrow> S (a # w @ [b])"} where @{text a} and @{text b} are terminal symbols,
+i.e., elements of some alphabet. The alphabet can be defined like this:
+\isacom{datatype} @{text"alpha = a | b | \<dots>"}
+
+Define the two grammars (where $\varepsilon$ is the empty word)
+\[
+\begin{array}{r@ {\quad}c@ {\quad}l}
+S &\to& \varepsilon \quad\mid\quad aSb \quad\mid\quad SS \\
+T &\to& \varepsilon \quad\mid\quad TaTb
+\end{array}
+\]
+as two inductive predicates.
+If you think of @{text a} and @{text b} as ``@{text "("}'' and  ``@{text ")"}'',
+the grammars defines strings of balanced parentheses.
+Prove @{prop"T w \<Longrightarrow> S w"} and @{prop "S w \<Longrightarrow> T w"} separately and conclude
+@{prop "S w = T w"}.
+\end{exercise}
+
+\ifsem
+\begin{exercise}
+In \autoref{sec:AExp} we defined a recursive evaluation function
+@{text "aval :: aexp \<Rightarrow> state \<Rightarrow> val"}.
+Define an inductive evaluation predicate
+@{text "aval_rel :: aexp \<Rightarrow> state \<Rightarrow> val \<Rightarrow> bool"}
+and prove that it agrees with the recursive function:
+@{prop "aval_rel a s v \<Longrightarrow> aval a s = v"}, 
+@{prop "aval a s = v \<Longrightarrow> aval_rel a s v"} and thus
+\noquotes{@{prop [source] "aval_rel a s v \<longleftrightarrow> aval a s = v"}}.
+\end{exercise}
+
+\begin{exercise}
+Consider the stack machine from Chapter~3
+and recall the concept of \concept{stack underflow}
+from Exercise~\ref{exe:stack-underflow}.
+Define an inductive predicate
+@{text "ok :: nat \<Rightarrow> instr list \<Rightarrow> nat \<Rightarrow> bool"}
+such that @{text "ok n is n'"} means that with any initial stack of length
+@{text n} the instructions @{text "is"} can be executed
+without stack underflow and that the final stack has length @{text n'}.
+Prove that @{text ok} correctly computes the final stack size
+@{prop[display] "\<lbrakk>ok n is n'; length stk = n\<rbrakk> \<Longrightarrow> length (exec is s stk) = n'"}
+and that instruction sequences generated by @{text comp}
+cannot cause stack underflow: \ @{text "ok n (comp a) ?"} \ for
+some suitable value of @{text "?"}.
+\end{exercise}
+\fi
 *}
 (*<*)
 end
--- a/src/Doc/ProgProve/Types_and_funs.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/ProgProve/Types_and_funs.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -156,7 +156,7 @@
 
 fun div2 :: "nat \<Rightarrow> nat" where
 "div2 0 = 0" |
-"div2 (Suc 0) = Suc 0" |
+"div2 (Suc 0) = 0" |
 "div2 (Suc(Suc n)) = Suc(div2 n)"
 
 text{* does not just define @{const div2} but also proves a
@@ -171,16 +171,25 @@
 This customized induction rule can simplify inductive proofs. For example,
 *}
 
-lemma "div2(n+n) = n"
+lemma "div2(n) = n div 2"
 apply(induction n rule: div2.induct)
 
-txt{* yields the 3 subgoals
+txt{* (where the infix @{text div} is the predefined division operation)
+yields the 3 subgoals
 @{subgoals[display,margin=65]}
 An application of @{text auto} finishes the proof.
 Had we used ordinary structural induction on @{text n},
 the proof would have needed an additional
 case analysis in the induction step.
 
+This example leads to the following induction heuristic:
+\begin{quote}
+\emph{Let @{text f} be a recursive function.
+If the definition of @{text f} is more complicated
+than having one equation for each constructor of some datatype,
+then properties of @{text f} are best proved via @{text "f.induct"}.}
+\end{quote}
+
 The general case is often called \concept{computation induction},
 because the induction follows the (terminating!) computation.
 For every defining equation
@@ -200,6 +209,35 @@
 But note that the induction rule does not mention @{text f} at all,
 except in its name, and is applicable independently of @{text f}.
 
+
+\subsection*{Exercises}
+
+\begin{exercise}
+Starting from the type @{text "'a tree"} defined in the text, define
+a function @{text "contents ::"} @{typ "'a tree \<Rightarrow> 'a list"}
+that collects all values in a tree in a list, in any order,
+without removing duplicates.
+Then define a function @{text "treesum ::"} @{typ "nat tree \<Rightarrow> nat"}
+that sums up all values in a tree of natural numbers
+and prove @{prop "treesum t = listsum(contents t)"}.
+\end{exercise}
+
+\begin{exercise}
+Define a new type @{text "'a tree2"} of binary trees where values are also
+stored in the leaves of the tree.  Also reformulate the
+@{const mirror} function accordingly. Define two functions
+@{text "pre_order"} and @{text "post_order"} of type @{text "'a tree2 \<Rightarrow> 'a list"}
+that traverse a tree and collect all stored values in the respective order in
+a list. Prove @{prop "pre_order (mirror t) = rev (post_order t)"}.
+\end{exercise}
+
+\begin{exercise}
+Define a function @{text "intersperse ::"} @{typ "'a \<Rightarrow> 'a list \<Rightarrow> 'a list"}
+such that @{text "intersperse a [x\<^sub>1, ..., x\<^sub>n] = [x\<^sub>1, a, x\<^sub>2, a, ..., a, x\<^sub>n]"}.
+Now prove that @{prop "map f (intersperse a xs) = intersperse (f a) (map f xs)"}.
+\end{exercise}
+
+
 \section{Induction Heuristics}
 
 We have already noted that theorems about recursive functions are proved by
@@ -307,6 +345,18 @@
 matters in some cases. The variables that need to be quantified are typically
 those that change in recursive calls.
 
+
+\subsection*{Exercises}
+
+\begin{exercise}
+Write a tail-recursive variant of the @{text add} function on @{typ nat}:
+@{term "itadd :: nat \<Rightarrow> nat \<Rightarrow> nat"}.
+Tail-recursive means that in the recursive case, @{text itadd} needs to call
+itself directly: \mbox{@{term"itadd (Suc m) n"}} @{text"= itadd \<dots>"}.
+Prove @{prop "itadd m n = add m n"}.
+\end{exercise}
+
+
 \section{Simplification}
 
 So far we have talked a lot about simplifying terms without explaining the concept. \concept{Simplification} means
@@ -481,9 +531,37 @@
 splits all case-expressions over natural numbers. For an arbitrary
 datatype @{text t} it is @{text "t.split"} instead of @{thm[source] nat.split}.
 Method @{text auto} can be modified in exactly the same way.
+The modifier @{text "split:"} can be followed by multiple names.
+Splitting if or case-expressions in the assumptions requires 
+@{text "split: if_splits"} or @{text "split: t.splits"}.
 
 
-\subsection{Exercises}
+\subsection*{Exercises}
+
+\exercise\label{exe:tree0}
+Define a datatype @{text tree0} of binary tree skeletons which do not store
+any information, neither in the inner nodes nor in the leaves.
+Define a function @{text "nodes :: tree0 \<Rightarrow> nat"} that counts the number of
+all nodes (inner nodes and leaves) in such a tree.
+Consider the following recursive function:
+*}
+(*<*)
+datatype tree0 = Tip | Node tree0 tree0
+(*>*)
+fun explode :: "nat \<Rightarrow> tree0 \<Rightarrow> tree0" where
+"explode 0 t = t" |
+"explode (Suc n) t = explode n (Node t t)"
+
+text {*
+Find an equation expressing the size of a tree after exploding it
+(\noquotes{@{term [source] "nodes (explode n t)"}}) as a function
+of @{term "nodes t"} and @{text n}. Prove your equation.
+You may use the usual arithmetic operators including the exponentiation
+operator ``@{text"^"}''. For example, \noquotes{@{prop [source] "2 ^ 2 = 4"}}.
+
+Hint: simplifying with the list of theorems @{thm[source] algebra_simps}
+takes care of common algebraic properties of the arithmetic operators.
+\endexercise
 
 \exercise
 Define arithmetic expressions in one variable over integers (type @{typ int})
@@ -506,8 +584,7 @@
 that transforms an expression into a polynomial. This may require auxiliary
 functions. Prove that @{text coeffs} preserves the value of the expression:
 \mbox{@{prop"evalp (coeffs e) x = eval e x"}.}
-Hint: simplifying with @{thm[source] algebra_simps} takes care of
-common algebraic properties of @{text "+"} and @{text "*"}.
+Hint: consider the hint in Exercise~\ref{exe:tree0}.
 \endexercise
 *}
 (*<*)
--- a/src/Doc/ProgProve/document/intro-isabelle.tex	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/ProgProve/document/intro-isabelle.tex	Thu Dec 05 17:58:03 2013 +0100
@@ -16,7 +16,7 @@
 of recursive functions.
 \ifsem
 \autoref{sec:CaseStudyExp} contains a
-little case study: arithmetic and boolean expressions, their evaluation,
+small case study: arithmetic and boolean expressions, their evaluation,
 optimization and compilation.
 \fi
 \autoref{ch:Logic} introduces the rest of HOL: the
@@ -35,8 +35,8 @@
 % in the intersection of computation and logic.
 
 This introduction to the core of Isabelle is intentionally concrete and
-example-based: we concentrate on examples that illustrate the typical cases;
-we do not explain the general case if it can be inferred from the examples.
+example-based: we concentrate on examples that illustrate the typical cases
+without explaining the general case if it can be inferred from the examples.
 We cover the essentials (from a functional programming point of view) as
 quickly and compactly as possible.
 \ifsem
@@ -46,7 +46,7 @@
 For a comprehensive treatment of all things Isabelle we recommend the
 \emph{Isabelle/Isar Reference Manual}~\cite{IsarRef}, which comes with the
 Isabelle distribution.
-The tutorial by Nipkow, Paulson and Wenzel~\cite{LNCS2283} (in its updated version that comes with the Isabelle distribution) is still recommended for the wealth of examples and material, but its proof style is outdated. In particular it fails to cover the structured proof language Isar.
+The tutorial by Nipkow, Paulson and Wenzel~\cite{LNCS2283} (in its updated version that comes with the Isabelle distribution) is still recommended for the wealth of examples and material, but its proof style is outdated. In particular it does not cover the structured proof language Isar.
 
 %This introduction to Isabelle has grown out of many years of teaching
 %Isabelle courses. 
@@ -88,7 +88,7 @@
 
 \ifsem\else
 \paragraph{Acknowledgements}
-I wish to thank the following people for their comments
-on this document:
-Florian Haftmann, Ren\'{e} Thiemann and Christian Sternagel.
+I wish to thank the following people for their comments on this document:
+Florian Haftmann, Ren\'{e} Thiemann, Sean Seefried, Christian Sternagel
+and Carl Witty.
 \fi
\ No newline at end of file
--- a/src/Doc/Sledgehammer/document/root.tex	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Sledgehammer/document/root.tex	Thu Dec 05 17:58:03 2013 +0100
@@ -121,8 +121,8 @@
 
 For Isabelle/jEdit users, Sledgehammer provides an automatic mode that can be
 enabled via the ``Auto Sledgehammer'' option under ``Plugins > Plugin Options >
-Isabelle > General.'' In this mode, Sledgehammer is run on every newly entered
-theorem.
+Isabelle > General.'' In this mode, a reduced version of Sledgehammer is run on
+every newly entered theorem for a few seconds.
 
 \newbox\boxA
 \setbox\boxA=\hbox{\texttt{NOSPAM}}
@@ -719,12 +719,16 @@
 If you use Isabelle/jEdit, Sledgehammer also provides an automatic mode that can
 be enabled via the ``Auto Sledgehammer'' option under ``Plugins > Plugin Options
 > Isabelle > General.'' For automatic runs, only the first prover set using
-\textit{provers} (\S\ref{mode-of-operation}) is considered, fewer facts are
-passed to the prover, \textit{slice} (\S\ref{mode-of-operation}) is disabled,
-\textit{strict} (\S\ref{problem-encoding}) is enabled, \textit{verbose}
-(\S\ref{output-format}) and \textit{debug} (\S\ref{output-format}) are disabled,
-and \textit{timeout} (\S\ref{timeouts}) is superseded by the ``Auto Time Limit''
-option in jEdit. Sledgehammer's output is also more concise.
+\textit{provers} (\S\ref{mode-of-operation}) is considered (typically E),
+\textit{slice} (\S\ref{mode-of-operation}) is disabled,
+\textit{minimize} (\S\ref{mode-of-operation}) is disabled, fewer facts are
+passed to the prover, \textit{fact\_filter} (\S\ref{relevance-filter}) is set to
+\textit{mepo}, \textit{strict} (\S\ref{problem-encoding}) is enabled,
+\textit{verbose} (\S\ref{output-format}) and \textit{debug}
+(\S\ref{output-format}) are disabled, \textit{preplay\_timeout}
+(\S\ref{timeouts}) is set to 0, and \textit{timeout} (\S\ref{timeouts}) is
+superseded by the ``Auto Time Limit'' option in jEdit. Sledgehammer's output is
+also more concise.
 
 \subsection{Metis}
 
@@ -999,8 +1003,7 @@
 number of facts. For SMT solvers, several slices are tried with the same options
 each time but fewer and fewer facts. According to benchmarks with a timeout of
 30 seconds, slicing is a valuable optimization, and you should probably leave it
-enabled unless you are conducting experiments. This option is implicitly
-disabled for (short) automatic runs.
+enabled unless you are conducting experiments.
 
 \nopagebreak
 {\small See also \textit{verbose} (\S\ref{output-format}).}
@@ -1035,6 +1038,8 @@
 simultaneously. The files are identified by the prefixes \texttt{prob\_} and
 \texttt{mash\_}; you may safely remove them after Sledgehammer has run.
 
+\textbf{Warning:} This option is not thread-safe. Use at your own risks.
+
 \nopagebreak
 {\small See also \textit{debug} (\S\ref{output-format}).}
 \end{enum}
@@ -1282,14 +1287,12 @@
 
 \opfalse{verbose}{quiet}
 Specifies whether the \textbf{sledgehammer} command should explain what it does.
-This option is implicitly disabled for automatic runs.
 
 \opfalse{debug}{no\_debug}
 Specifies whether Sledgehammer should display additional debugging information
 beyond what \textit{verbose} already displays. Enabling \textit{debug} also
 enables \textit{verbose} and \textit{blocking} (\S\ref{mode-of-operation})
-behind the scenes. The \textit{debug} option is implicitly disabled for
-automatic runs.
+behind the scenes.
 
 \nopagebreak
 {\small See also \textit{spy} (\S\ref{mode-of-operation}) and
@@ -1349,8 +1352,6 @@
 \opdefault{timeout}{float\_or\_none}{\upshape 30}
 Specifies the maximum number of seconds that the automatic provers should spend
 searching for a proof. This excludes problem preparation and is a soft limit.
-For automatic runs, the ``Auto Time Limit'' option under ``Plugins > Plugin
-Options > Isabelle > General'' is used instead.
 
 \opdefault{preplay\_timeout}{float\_or\_none}{\upshape 3}
 Specifies the maximum number of seconds that \textit{metis} or \textit{smt}
--- a/src/Doc/System/Sessions.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/System/Sessions.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -399,7 +399,7 @@
   \smallskip Build some session images with cleanup of their
   descendants, while retaining their ancestry:
 \begin{ttbox}
-isabelle build -b -c HOL-Boogie HOL-SPARK
+isabelle build -b -c HOL-Algebra HOL-Word
 \end{ttbox}
 
   \smallskip Clean all sessions without building anything:
--- a/src/Doc/Tutorial/document/rules.tex	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Tutorial/document/rules.tex	Thu Dec 05 17:58:03 2013 +0100
@@ -1,4 +1,4 @@
-%!TEX root = ../tutorial.tex
+%!TEX root = root.tex
 \chapter{The Rules of the Game}
 \label{chap:rules}
  
@@ -33,6 +33,8 @@
 one symbol only.  For predicate logic this can be 
 done, but when users define their own concepts they typically 
 have to refer to other symbols as well.  It is best not to be dogmatic.
+Our system is not based on pure natural deduction, but includes elements from the sequent calculus 
+and free-variable tableaux.
 
 Natural deduction generally deserves its name.  It is easy to use.  Each
 proof step consists of identifying the outermost symbol of a formula and
@@ -240,13 +242,14 @@
 of a conjunction.  Rules of this sort (where the conclusion is a subformula of a
 premise) are called \textbf{destruction} rules because they take apart and destroy
 a premise.%
-\footnote{This Isabelle terminology has no counterpart in standard logic texts, 
+\footnote{This Isabelle terminology is not used in standard logic texts, 
 although the distinction between the two forms of elimination rule is well known. 
 Girard \cite[page 74]{girard89},\index{Girard, Jean-Yves|fnote}
 for example, writes ``The elimination rules 
 [for $\disj$ and $\exists$] are very
 bad.  What is catastrophic about them is the parasitic presence of a formula [$R$]
-which has no structural link with the formula which is eliminated.''}
+which has no structural link with the formula which is eliminated.''
+These Isabelle rules are inspired by the sequent calculus.}
 
 The first proof step applies conjunction introduction, leaving 
 two subgoals: 
--- a/src/Doc/Tutorial/document/sets.tex	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/Tutorial/document/sets.tex	Thu Dec 05 17:58:03 2013 +0100
@@ -660,8 +660,8 @@
 \textbf{Composition} of relations (the infix \sdx{O}) is also
 available: 
 \begin{isabelle}
-r\ O\ s\ \isasymequiv\ \isacharbraceleft(x,z).\ \isasymexists y.\ (x,y)\ \isasymin\ s\ \isasymand\ (y,z)\ \isasymin\ r\isacharbraceright
-\rulenamedx{rel_comp_def}
+r\ O\ s\ = \isacharbraceleft(x,z).\ \isasymexists y.\ (x,y)\ \isasymin\ s\ \isasymand\ (y,z)\ \isasymin\ r\isacharbraceright
+\rulenamedx{relcomp_unfold}
 \end{isabelle}
 %
 This is one of the many lemmas proved about these concepts: 
@@ -677,7 +677,7 @@
 \isasymlbrakk r\isacharprime\ \isasymsubseteq\ r;\ s\isacharprime\
 \isasymsubseteq\ s\isasymrbrakk\ \isasymLongrightarrow\ r\isacharprime\ O\
 s\isacharprime\ \isasymsubseteq\ r\ O\ s%
-\rulename{rel_comp_mono}
+\rulename{relcomp_mono}
 \end{isabelle}
 
 \indexbold{converse!of a relation}%
@@ -695,7 +695,7 @@
 Here is a typical law proved about converse and composition: 
 \begin{isabelle}
 (r\ O\ s)\isasyminverse\ =\ s\isasyminverse\ O\ r\isasyminverse
-\rulename{converse_rel_comp}
+\rulename{converse_relcomp}
 \end{isabelle}
 
 \indexbold{image!under a relation}%
--- a/src/Doc/manual.bib	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/Doc/manual.bib	Thu Dec 05 17:58:03 2013 +0100
@@ -194,7 +194,7 @@
 @incollection{basin91,
   author	= {David Basin and Matt Kaufmann},
   title		= {The {Boyer-Moore} Prover and {Nuprl}: An Experimental
-		   Comparison}, 
+		   Comparison},
   crossref	= {huet-plotkin91},
   pages		= {89-119}}
 
@@ -472,7 +472,7 @@
 @book{constable86,
   author	= {R. L. Constable and others},
   title		= {Implementing Mathematics with the Nuprl Proof
-		 Development System}, 
+		 Development System},
   publisher	= Prentice,
   year		= 1986}
 
@@ -505,7 +505,7 @@
 @incollection{dybjer91,
   author	= {Peter Dybjer},
   title		= {Inductive Sets and Families in {Martin-L{\"o}f's} Type
-		  Theory and Their Set-Theoretic Semantics}, 
+		  Theory and Their Set-Theoretic Semantics},
   crossref	= {huet-plotkin91},
   pages		= {280-306}}
 
@@ -533,7 +533,7 @@
 @InProceedings{felty91a,
   Author	= {Amy Felty},
   Title		= {A Logic Program for Transforming Sequent Proofs to Natural
-		  Deduction Proofs}, 
+		  Deduction Proofs},
   crossref	= {extensions91},
   pages		= {157-178}}
 
@@ -566,9 +566,9 @@
 
 @inproceedings{OBJ,
   author	= {K. Futatsugi and J.A. Goguen and Jean-Pierre Jouannaud
-		 and J. Meseguer}, 
+		 and J. Meseguer},
   title		= {Principles of {OBJ2}},
-  booktitle	= POPL, 
+  booktitle	= POPL,
   year		= 1985,
   pages		= {52-66}}
 
@@ -576,7 +576,7 @@
 
 @book{gallier86,
   author	= {J. H. Gallier},
-  title		= {Logic for Computer Science: 
+  title		= {Logic for Computer Science:
 		Foundations of Automatic Theorem Proving},
   year		= 1986,
   publisher	= {Harper \& Row}}
@@ -605,8 +605,8 @@
   author	= {Jean-Yves Girard},
   title		= {Proofs and Types},
   year		= 1989,
-  publisher	= CUP, 
-  note		= {Translated by Yves LaFont and Paul Taylor}}
+  publisher	= CUP,
+  note		= {Translated by Yves Lafont and Paul Taylor}}
 
 @Book{mgordon-hol,
   editor	= {M. J. C. Gordon and T. F. Melham},
@@ -777,21 +777,21 @@
 
 @article{huet78,
   author	= {G. P. Huet and B. Lang},
-  title		= {Proving and Applying Program Transformations Expressed with 
+  title		= {Proving and Applying Program Transformations Expressed with
 			Second-Order Patterns},
   journal	= acta,
   volume	= 11,
-  year		= 1978, 
+  year		= 1978,
   pages		= {31-55}}
 
 @inproceedings{huet88,
   author	= {G{\'e}rard Huet},
   title		= {Induction Principles Formalized in the {Calculus of
-		 Constructions}}, 
+		 Constructions}},
   booktitle	= {Programming of Future Generation Computers},
   editor	= {K. Fuchi and M. Nivat},
   year		= 1988,
-  pages		= {205-216}, 
+  pages		= {205-216},
   publisher	= {Elsevier}}
 
 @inproceedings{Huffman-Kuncar:2013:lifting_transfer,
@@ -843,7 +843,7 @@
 %K
 
 @InProceedings{kammueller-locales,
-  author = 	 {Florian Kamm{\"u}ller and Markus Wenzel and 
+  author = 	 {Florian Kamm{\"u}ller and Markus Wenzel and
                   Lawrence C. Paulson},
   title = 	 {Locales: A Sectioning Concept for {Isabelle}},
   crossref =	 {tphols99}}
@@ -926,7 +926,7 @@
   note = "\url{https://github.com/frelindb/agsyHOL}"}
 
 @incollection{lochbihler-2010,
-  title = "Coinduction",
+  title = "Coinductive",
   author = "Andreas Lochbihler",
   booktitle = "The Archive of Formal Proofs",
   editor = "Gerwin Klein and Tobias Nipkow and Lawrence C. Paulson",
@@ -944,7 +944,7 @@
   author	= {Gavin Lowe},
   title		= {Breaking and Fixing the {Needham}-{Schroeder} Public-Key
 		  Protocol using {CSP} and {FDR}},
-  booktitle = 	 {Tools and Algorithms for the Construction and Analysis 
+  booktitle = 	 {Tools and Algorithms for the Construction and Analysis
                   of Systems:  second international workshop, TACAS '96},
   editor =	 {T. Margaria and B. Steffen},
   series =	 {LNCS 1055},
@@ -978,7 +978,7 @@
 @incollection{melham89,
   author	= {Thomas F. Melham},
   title		= {Automating Recursive Type Definitions in Higher Order
-		 Logic}, 
+		 Logic},
   pages		= {341-386},
   crossref	= {birtwistle89}}
 
@@ -1057,7 +1057,7 @@
 
 @InProceedings{NaraschewskiW-TPHOLs98,
   author	= {Wolfgang Naraschewski and Markus Wenzel},
-  title		= 
+  title		=
 {Object-Oriented Verification based on Record Subtyping in
                   Higher-Order Logic},
   crossref      = {tphols98}}
@@ -1190,8 +1190,8 @@
 @book{nordstrom90,
   author	= {Bengt {Nordstr{\"o}m} and Kent Petersson and Jan Smith},
   title		= {Programming in {Martin-L{\"o}f}'s Type Theory.  An
-		 Introduction}, 
-  publisher	= {Oxford University Press}, 
+		 Introduction},
+  publisher	= {Oxford University Press},
   year		= 1990}
 
 %O
@@ -1251,7 +1251,7 @@
 @InProceedings{paulson-COLOG,
   author	= {Lawrence C. Paulson},
   title		= {A Formulation of the Simple Theory of Types (for
-		 {Isabelle})}, 
+		 {Isabelle})},
   pages		= {246-274},
   crossref	= {colog88},
   url		= {\url{http://www.cl.cam.ac.uk/Research/Reports/TR175-lcp-simple.dvi.gz}}}
@@ -1304,7 +1304,7 @@
 %replaces paulson-final
 @Article{paulson-mscs,
   author	= {Lawrence C. Paulson},
-  title = 	 {Final Coalgebras as Greatest Fixed Points 
+  title = 	 {Final Coalgebras as Greatest Fixed Points
                   in {ZF} Set Theory},
   journal	= {Mathematical Structures in Computer Science},
   year		= 1999,
@@ -1337,9 +1337,9 @@
   crossref	= {milner-fest}}
 
 @book{milner-fest,
-  title		= {Proof, Language, and Interaction: 
+  title		= {Proof, Language, and Interaction:
                    Essays in Honor of {Robin Milner}},
-  booktitle	= {Proof, Language, and Interaction: 
+  booktitle	= {Proof, Language, and Interaction:
                    Essays in Honor of {Robin Milner}},
   publisher	= MIT,
   year		= 2000,
@@ -1427,7 +1427,7 @@
 @book{paulson87,
   author	= {Lawrence C. Paulson},
   title		= {Logic and Computation: Interactive proof with Cambridge
-		 LCF}, 
+		 LCF},
   year		= 1987,
   publisher	= CUP}
 
@@ -1470,7 +1470,7 @@
 @article{pelletier86,
   author	= {F. J. Pelletier},
   title		= {Seventy-five Problems for Testing Automatic Theorem
-		 Provers}, 
+		 Provers},
   journal	= JAR,
   volume	= 2,
   pages		= {191-216},
@@ -1486,13 +1486,13 @@
   publisher	= CUP,
   year		= 1993}
 
-@Article{pitts94,  
+@Article{pitts94,
   author	= {Andrew M. Pitts},
   title		= {A Co-induction Principle for Recursively Defined Domains},
   journal	= TCS,
-  volume	= 124, 
+  volume	= 124,
   pages		= {195-219},
-  year		= 1994} 
+  year		= 1994}
 
 @Article{plaisted90,
   author	= {David A. Plaisted},
@@ -1561,7 +1561,7 @@
 @inproceedings{saaltink-fme,
   author	= {Mark Saaltink and Sentot Kromodimoeljo and Bill Pase and
 		 Dan Craigen and Irwin Meisels},
-  title		= {An {EVES} Data Abstraction Example}, 
+  title		= {An {EVES} Data Abstraction Example},
   pages		= {578-596},
   crossref	= {fme93}}
 
@@ -1897,7 +1897,7 @@
   author	= {A. N. Whitehead and B. Russell},
   title		= {Principia Mathematica},
   year		= 1962,
-  publisher	= CUP, 
+  publisher	= CUP,
   note		= {Paperback edition to *56,
   abridged from the 2nd edition (1927)}}
 
@@ -1982,9 +1982,9 @@
 @book{birtwistle89,
   editor	= {Graham Birtwistle and P. A. Subrahmanyam},
   title		= {Current Trends in Hardware Verification and Automated
-		 Theorem Proving}, 
+		 Theorem Proving},
   booktitle	= {Current Trends in Hardware Verification and Automated
-		 Theorem Proving}, 
+		 Theorem Proving},
   publisher	= {Springer},
   year		= 1989}
 
@@ -1997,9 +1997,9 @@
 
 @Proceedings{cade12,
   editor	= {Alan Bundy},
-  title		= {Automated Deduction --- {CADE}-12 
+  title		= {Automated Deduction --- {CADE}-12
 		  International Conference},
-  booktitle	= {Automated Deduction --- {CADE}-12 
+  booktitle	= {Automated Deduction --- {CADE}-12
 		  International Conference},
   year		= 1994,
   series	= {LNAI 814},
@@ -2059,7 +2059,7 @@
   title		= {Extensions of Logic Programming},
   booktitle	= {Extensions of Logic Programming},
   year		= 1991,
-  series	= {LNAI 475}, 
+  series	= {LNAI 475},
   publisher	= {Springer}}
 
 @proceedings{cade10,
@@ -2078,9 +2078,9 @@
   year		= 1993}
 
 @book{wos-fest,
-  title		= {Automated Reasoning and its Applications: 
+  title		= {Automated Reasoning and its Applications:
 			Essays in Honor of {Larry Wos}},
-  booktitle	= {Automated Reasoning and its Applications: 
+  booktitle	= {Automated Reasoning and its Applications:
 			Essays in Honor of {Larry Wos}},
   publisher	= MIT,
   year		= 1997,
--- a/src/HOL/ATP.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/ATP.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -18,34 +18,34 @@
 
 subsection {* Higher-order reasoning helpers *}
 
-definition fFalse :: bool where [no_atp]:
+definition fFalse :: bool where
 "fFalse \<longleftrightarrow> False"
 
-definition fTrue :: bool where [no_atp]:
+definition fTrue :: bool where
 "fTrue \<longleftrightarrow> True"
 
-definition fNot :: "bool \<Rightarrow> bool" where [no_atp]:
+definition fNot :: "bool \<Rightarrow> bool" where
 "fNot P \<longleftrightarrow> \<not> P"
 
-definition fComp :: "('a \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> bool" where [no_atp]:
+definition fComp :: "('a \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> bool" where
 "fComp P = (\<lambda>x. \<not> P x)"
 
-definition fconj :: "bool \<Rightarrow> bool \<Rightarrow> bool" where [no_atp]:
+definition fconj :: "bool \<Rightarrow> bool \<Rightarrow> bool" where
 "fconj P Q \<longleftrightarrow> P \<and> Q"
 
-definition fdisj :: "bool \<Rightarrow> bool \<Rightarrow> bool" where [no_atp]:
+definition fdisj :: "bool \<Rightarrow> bool \<Rightarrow> bool" where
 "fdisj P Q \<longleftrightarrow> P \<or> Q"
 
-definition fimplies :: "bool \<Rightarrow> bool \<Rightarrow> bool" where [no_atp]:
+definition fimplies :: "bool \<Rightarrow> bool \<Rightarrow> bool" where
 "fimplies P Q \<longleftrightarrow> (P \<longrightarrow> Q)"
 
-definition fequal :: "'a \<Rightarrow> 'a \<Rightarrow> bool" where [no_atp]:
+definition fequal :: "'a \<Rightarrow> 'a \<Rightarrow> bool" where
 "fequal x y \<longleftrightarrow> (x = y)"
 
-definition fAll :: "('a \<Rightarrow> bool) \<Rightarrow> bool" where [no_atp]:
+definition fAll :: "('a \<Rightarrow> bool) \<Rightarrow> bool" where
 "fAll P \<longleftrightarrow> All P"
 
-definition fEx :: "('a \<Rightarrow> bool) \<Rightarrow> bool" where [no_atp]:
+definition fEx :: "('a \<Rightarrow> bool) \<Rightarrow> bool" where
 "fEx P \<longleftrightarrow> Ex P"
 
 lemma fTrue_ne_fFalse: "fFalse \<noteq> fTrue"
--- a/src/HOL/Archimedean_Field.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/Archimedean_Field.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -129,12 +129,8 @@
   fix y z assume
     "of_int y \<le> x \<and> x < of_int (y + 1)"
     "of_int z \<le> x \<and> x < of_int (z + 1)"
-  then have
-    "of_int y \<le> x" "x < of_int (y + 1)"
-    "of_int z \<le> x" "x < of_int (z + 1)"
-    by simp_all
-  from le_less_trans [OF `of_int y \<le> x` `x < of_int (z + 1)`]
-       le_less_trans [OF `of_int z \<le> x` `x < of_int (y + 1)`]
+  with le_less_trans [of "of_int y" "x" "of_int (z + 1)"]
+       le_less_trans [of "of_int z" "x" "of_int (y + 1)"]
   show "y = z" by (simp del: of_int_add)
 qed
 
@@ -208,8 +204,8 @@
 lemma floor_numeral [simp]: "floor (numeral v) = numeral v"
   using floor_of_int [of "numeral v"] by simp
 
-lemma floor_neg_numeral [simp]: "floor (neg_numeral v) = neg_numeral v"
-  using floor_of_int [of "neg_numeral v"] by simp
+lemma floor_neg_numeral [simp]: "floor (- numeral v) = - numeral v"
+  using floor_of_int [of "- numeral v"] by simp
 
 lemma zero_le_floor [simp]: "0 \<le> floor x \<longleftrightarrow> 0 \<le> x"
   by (simp add: le_floor_iff)
@@ -222,7 +218,7 @@
   by (simp add: le_floor_iff)
 
 lemma neg_numeral_le_floor [simp]:
-  "neg_numeral v \<le> floor x \<longleftrightarrow> neg_numeral v \<le> x"
+  "- numeral v \<le> floor x \<longleftrightarrow> - numeral v \<le> x"
   by (simp add: le_floor_iff)
 
 lemma zero_less_floor [simp]: "0 < floor x \<longleftrightarrow> 1 \<le> x"
@@ -236,7 +232,7 @@
   by (simp add: less_floor_iff)
 
 lemma neg_numeral_less_floor [simp]:
-  "neg_numeral v < floor x \<longleftrightarrow> neg_numeral v + 1 \<le> x"
+  "- numeral v < floor x \<longleftrightarrow> - numeral v + 1 \<le> x"
   by (simp add: less_floor_iff)
 
 lemma floor_le_zero [simp]: "floor x \<le> 0 \<longleftrightarrow> x < 1"
@@ -250,7 +246,7 @@
   by (simp add: floor_le_iff)
 
 lemma floor_le_neg_numeral [simp]:
-  "floor x \<le> neg_numeral v \<longleftrightarrow> x < neg_numeral v + 1"
+  "floor x \<le> - numeral v \<longleftrightarrow> x < - numeral v + 1"
   by (simp add: floor_le_iff)
 
 lemma floor_less_zero [simp]: "floor x < 0 \<longleftrightarrow> x < 0"
@@ -264,7 +260,7 @@
   by (simp add: floor_less_iff)
 
 lemma floor_less_neg_numeral [simp]:
-  "floor x < neg_numeral v \<longleftrightarrow> x < neg_numeral v"
+  "floor x < - numeral v \<longleftrightarrow> x < - numeral v"
   by (simp add: floor_less_iff)
 
 text {* Addition and subtraction of integers *}
@@ -276,10 +272,6 @@
     "floor (x + numeral v) = floor x + numeral v"
   using floor_add_of_int [of x "numeral v"] by simp
 
-lemma floor_add_neg_numeral [simp]:
-    "floor (x + neg_numeral v) = floor x + neg_numeral v"
-  using floor_add_of_int [of x "neg_numeral v"] by simp
-
 lemma floor_add_one [simp]: "floor (x + 1) = floor x + 1"
   using floor_add_of_int [of x 1] by simp
 
@@ -290,10 +282,6 @@
   "floor (x - numeral v) = floor x - numeral v"
   using floor_diff_of_int [of x "numeral v"] by simp
 
-lemma floor_diff_neg_numeral [simp]:
-  "floor (x - neg_numeral v) = floor x - neg_numeral v"
-  using floor_diff_of_int [of x "neg_numeral v"] by simp
-
 lemma floor_diff_one [simp]: "floor (x - 1) = floor x - 1"
   using floor_diff_of_int [of x 1] by simp
 
@@ -357,8 +345,8 @@
 lemma ceiling_numeral [simp]: "ceiling (numeral v) = numeral v"
   using ceiling_of_int [of "numeral v"] by simp
 
-lemma ceiling_neg_numeral [simp]: "ceiling (neg_numeral v) = neg_numeral v"
-  using ceiling_of_int [of "neg_numeral v"] by simp
+lemma ceiling_neg_numeral [simp]: "ceiling (- numeral v) = - numeral v"
+  using ceiling_of_int [of "- numeral v"] by simp
 
 lemma ceiling_le_zero [simp]: "ceiling x \<le> 0 \<longleftrightarrow> x \<le> 0"
   by (simp add: ceiling_le_iff)
@@ -371,7 +359,7 @@
   by (simp add: ceiling_le_iff)
 
 lemma ceiling_le_neg_numeral [simp]:
-  "ceiling x \<le> neg_numeral v \<longleftrightarrow> x \<le> neg_numeral v"
+  "ceiling x \<le> - numeral v \<longleftrightarrow> x \<le> - numeral v"
   by (simp add: ceiling_le_iff)
 
 lemma ceiling_less_zero [simp]: "ceiling x < 0 \<longleftrightarrow> x \<le> -1"
@@ -385,7 +373,7 @@
   by (simp add: ceiling_less_iff)
 
 lemma ceiling_less_neg_numeral [simp]:
-  "ceiling x < neg_numeral v \<longleftrightarrow> x \<le> neg_numeral v - 1"
+  "ceiling x < - numeral v \<longleftrightarrow> x \<le> - numeral v - 1"
   by (simp add: ceiling_less_iff)
 
 lemma zero_le_ceiling [simp]: "0 \<le> ceiling x \<longleftrightarrow> -1 < x"
@@ -399,7 +387,7 @@
   by (simp add: le_ceiling_iff)
 
 lemma neg_numeral_le_ceiling [simp]:
-  "neg_numeral v \<le> ceiling x \<longleftrightarrow> neg_numeral v - 1 < x"
+  "- numeral v \<le> ceiling x \<longleftrightarrow> - numeral v - 1 < x"
   by (simp add: le_ceiling_iff)
 
 lemma zero_less_ceiling [simp]: "0 < ceiling x \<longleftrightarrow> 0 < x"
@@ -413,7 +401,7 @@
   by (simp add: less_ceiling_iff)
 
 lemma neg_numeral_less_ceiling [simp]:
-  "neg_numeral v < ceiling x \<longleftrightarrow> neg_numeral v < x"
+  "- numeral v < ceiling x \<longleftrightarrow> - numeral v < x"
   by (simp add: less_ceiling_iff)
 
 text {* Addition and subtraction of integers *}
@@ -425,10 +413,6 @@
     "ceiling (x + numeral v) = ceiling x + numeral v"
   using ceiling_add_of_int [of x "numeral v"] by simp
 
-lemma ceiling_add_neg_numeral [simp]:
-    "ceiling (x + neg_numeral v) = ceiling x + neg_numeral v"
-  using ceiling_add_of_int [of x "neg_numeral v"] by simp
-
 lemma ceiling_add_one [simp]: "ceiling (x + 1) = ceiling x + 1"
   using ceiling_add_of_int [of x 1] by simp
 
@@ -439,10 +423,6 @@
   "ceiling (x - numeral v) = ceiling x - numeral v"
   using ceiling_diff_of_int [of x "numeral v"] by simp
 
-lemma ceiling_diff_neg_numeral [simp]:
-  "ceiling (x - neg_numeral v) = ceiling x - neg_numeral v"
-  using ceiling_diff_of_int [of x "neg_numeral v"] by simp
-
 lemma ceiling_diff_one [simp]: "ceiling (x - 1) = ceiling x - 1"
   using ceiling_diff_of_int [of x 1] by simp
 
--- a/src/HOL/BNF/BNF.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -10,7 +10,7 @@
 header {* Bounded Natural Functors for (Co)datatypes *}
 
 theory BNF
-imports More_BNFs BNF_LFP BNF_GFP Coinduction
+imports Countable_Set_Type BNF_LFP BNF_GFP BNF_Decl
 begin
 
 hide_const (open) image2 image2p vimage2p Gr Grp collect fsts snds setl setr 
--- a/src/HOL/BNF/BNF_Comp.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF_Comp.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -11,6 +11,9 @@
 imports Basic_BNFs
 begin
 
+lemma wpull_id: "wpull UNIV B1 B2 id id id id"
+unfolding wpull_def by simp
+
 lemma empty_natural: "(\<lambda>_. {}) o f = image g o (\<lambda>_. {})"
 by (rule ext) simp
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/BNF/BNF_Decl.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,18 @@
+(*  Title:      HOL/BNF/BNF_Decl.thy
+    Author:     Dmitriy Traytel, TU Muenchen
+    Copyright   2013
+
+Axiomatic declaration of bounded natural functors.
+*)
+
+header {* Axiomatic declaration of Bounded Natural Functors *}
+
+theory BNF_Decl
+imports BNF_Def
+keywords
+  "bnf_decl" :: thy_decl
+begin
+
+ML_file "Tools/bnf_decl.ML"
+
+end
--- a/src/HOL/BNF/BNF_Def.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF_Def.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -9,6 +9,8 @@
 
 theory BNF_Def
 imports BNF_Util
+   (*FIXME: register fundef_cong attribute in an interpretation to remove this dependency*)
+  FunDef
 keywords
   "print_bnfs" :: diag and
   "bnf" :: thy_goal
@@ -190,17 +192,17 @@
 lemma vimage2pI: "R (f x) (g y) \<Longrightarrow> vimage2p f g R x y"
   unfolding vimage2p_def by -
 
-lemma vimage2pD: "vimage2p f g R x y \<Longrightarrow> R (f x) (g y)"
-  unfolding vimage2p_def by -
-
 lemma fun_rel_iff_leq_vimage2p: "(fun_rel R S) f g = (R \<le> vimage2p f g S)"
   unfolding fun_rel_def vimage2p_def by auto
 
 lemma convol_image_vimage2p: "<f o fst, g o snd> ` Collect (split (vimage2p f g R)) \<subseteq> Collect (split R)"
   unfolding vimage2p_def convol_def by auto
 
+(*FIXME: duplicates lemma from Record.thy*)
+lemma o_eq_dest_lhs: "a o b = c \<Longrightarrow> a (b v) = c v"
+  by clarsimp
+
 ML_file "Tools/bnf_def_tactics.ML"
 ML_file "Tools/bnf_def.ML"
 
-
 end
--- a/src/HOL/BNF/BNF_FP_Base.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF_FP_Base.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -13,12 +13,6 @@
 imports BNF_Comp Ctr_Sugar
 begin
 
-lemma not_TrueE: "\<not> True \<Longrightarrow> P"
-by (erule notE, rule TrueI)
-
-lemma neq_eq_eq_contradict: "\<lbrakk>t \<noteq> u; s = t; s = u\<rbrakk> \<Longrightarrow> P"
-by fast
-
 lemma mp_conj: "(P \<longrightarrow> Q) \<and> R \<Longrightarrow> P \<Longrightarrow> R \<and> Q"
 by auto
 
@@ -172,7 +166,5 @@
 ML_file "Tools/bnf_fp_n2m.ML"
 ML_file "Tools/bnf_fp_n2m_sugar.ML"
 ML_file "Tools/bnf_fp_rec_sugar_util.ML"
-ML_file "Tools/bnf_fp_rec_sugar_tactics.ML"
-ML_file "Tools/bnf_fp_rec_sugar.ML"
 
 end
--- a/src/HOL/BNF/BNF_GFP.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF_GFP.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -8,21 +8,29 @@
 header {* Greatest Fixed Point Operation on Bounded Natural Functors *}
 
 theory BNF_GFP
-imports BNF_FP_Base Equiv_Relations_More "~~/src/HOL/Library/Sublist"
+imports BNF_FP_Base Equiv_Relations_More List_Prefix
 keywords
   "codatatype" :: thy_decl and
   "primcorecursive" :: thy_goal and
   "primcorec" :: thy_decl
 begin
 
+lemma not_TrueE: "\<not> True \<Longrightarrow> P"
+by (erule notE, rule TrueI)
+
+lemma neq_eq_eq_contradict: "\<lbrakk>t \<noteq> u; s = t; s = u\<rbrakk> \<Longrightarrow> P"
+by fast
+
 lemma sum_case_expand_Inr: "f o Inl = g \<Longrightarrow> f x = sum_case g (f o Inr) x"
 by (auto split: sum.splits)
 
 lemma sum_case_expand_Inr': "f o Inl = g \<Longrightarrow> h = f o Inr \<longleftrightarrow> sum_case g h = f"
-by (metis sum_case_o_inj(1,2) surjective_sum)
+apply rule
+ apply (rule ext, force split: sum.split)
+by (rule ext, metis sum_case_o_inj(2))
 
 lemma converse_Times: "(A \<times> B) ^-1 = B \<times> A"
-by auto
+by fast
 
 lemma equiv_proj:
   assumes e: "equiv A R" and "z \<in> R"
@@ -37,7 +45,6 @@
 (* Operators: *)
 definition image2 where "image2 A f g = {(f a, g a) | a. a \<in> A}"
 
-
 lemma Id_onD: "(a, b) \<in> Id_on A \<Longrightarrow> a = b"
 unfolding Id_on_def by simp
 
@@ -56,9 +63,6 @@
 lemma Id_on_Gr: "Id_on A = Gr A id"
 unfolding Id_on_def Gr_def by auto
 
-lemma Id_on_UNIV_I: "x = y \<Longrightarrow> (x, y) \<in> Id_on UNIV"
-unfolding Id_on_def by auto
-
 lemma image2_eqI: "\<lbrakk>b = f x; c = g x; x \<in> A\<rbrakk> \<Longrightarrow> (b, c) \<in> image2 A f g"
 unfolding image2_def by auto
 
@@ -77,6 +81,12 @@
 lemma Gr_incl: "Gr A f \<subseteq> A <*> B \<longleftrightarrow> f ` A \<subseteq> B"
 unfolding Gr_def by auto
 
+lemma subset_Collect_iff: "B \<subseteq> A \<Longrightarrow> (B \<subseteq> {x \<in> A. P x}) = (\<forall>x \<in> B. P x)"
+by blast
+
+lemma subset_CollectI: "B \<subseteq> A \<Longrightarrow> (\<And>x. x \<in> B \<Longrightarrow> Q x \<Longrightarrow> P x) \<Longrightarrow> ({x \<in> B. Q x} \<subseteq> {x \<in> A. P x})"
+by blast
+
 lemma in_rel_Collect_split_eq: "in_rel (Collect (split X)) = X"
 unfolding fun_eq_iff by auto
 
@@ -130,9 +140,6 @@
 "R \<subseteq> relInvImage UNIV (relImage R f) f"
 unfolding relInvImage_def relImage_def by auto
 
-lemma equiv_Image: "equiv A R \<Longrightarrow> (\<And>a b. (a, b) \<in> R \<Longrightarrow> a \<in> A \<and> b \<in> A \<and> R `` {a} = R `` {b})"
-unfolding equiv_def refl_on_def Image_def by (auto intro: transD symD)
-
 lemma relImage_proj:
 assumes "equiv A R"
 shows "relImage R (proj R) \<subseteq> Id_on (A//R)"
@@ -143,7 +150,7 @@
 lemma relImage_relInvImage:
 assumes "R \<subseteq> f ` A <*> f ` A"
 shows "relImage (relInvImage A R f) f = R"
-using assms unfolding relImage_def relInvImage_def by fastforce
+using assms unfolding relImage_def relInvImage_def by fast
 
 lemma subst_Pair: "P x y \<Longrightarrow> a = (x, y) \<Longrightarrow> P (fst a) (snd a)"
 by simp
@@ -159,6 +166,8 @@
 
 (*Extended Sublist*)
 
+definition clists where "clists r = |lists (Field r)|"
+
 definition prefCl where
   "prefCl Kl = (\<forall> kl1 kl2. prefixeq kl1 kl2 \<and> kl2 \<in> Kl \<longrightarrow> kl1 \<in> Kl)"
 definition PrefCl where
@@ -255,13 +264,18 @@
 shows "\<exists> a. a \<in> A \<and> p1 a = b1 \<and> p2 a = b2"
 using assms unfolding wpull_def by blast
 
-lemma pickWP:
+lemma pickWP_raw:
 assumes "wpull A B1 B2 f1 f2 p1 p2" and
 "b1 \<in> B1" and "b2 \<in> B2" and "f1 b1 = f2 b2"
-shows "pickWP A p1 p2 b1 b2 \<in> A"
-      "p1 (pickWP A p1 p2 b1 b2) = b1"
-      "p2 (pickWP A p1 p2 b1 b2) = b2"
-unfolding pickWP_def using assms someI_ex[OF pickWP_pred] by fastforce+
+shows "pickWP A p1 p2 b1 b2 \<in> A
+       \<and> p1 (pickWP A p1 p2 b1 b2) = b1
+       \<and> p2 (pickWP A p1 p2 b1 b2) = b2"
+unfolding pickWP_def using assms someI_ex[OF pickWP_pred] by fastforce
+
+lemmas pickWP =
+  pickWP_raw[THEN conjunct1]
+  pickWP_raw[THEN conjunct2, THEN conjunct1]
+  pickWP_raw[THEN conjunct2, THEN conjunct2]
 
 lemma Inl_Field_csum: "a \<in> Field r \<Longrightarrow> Inl a \<in> Field (r +c s)"
 unfolding Field_card_of csum_def by auto
@@ -293,21 +307,17 @@
 lemma image2pI: "R x y \<Longrightarrow> (image2p f g R) (f x) (g y)"
   unfolding image2p_def by blast
 
-lemma image2p_eqI: "\<lbrakk>fx = f x; gy = g y; R x y\<rbrakk> \<Longrightarrow> (image2p f g R) fx gy"
-  unfolding image2p_def by blast
-
 lemma image2pE: "\<lbrakk>(image2p f g R) fx gy; (\<And>x y. fx = f x \<Longrightarrow> gy = g y \<Longrightarrow> R x y \<Longrightarrow> P)\<rbrakk> \<Longrightarrow> P"
   unfolding image2p_def by blast
 
 lemma fun_rel_iff_geq_image2p: "(fun_rel R S) f g = (image2p f g R \<le> S)"
   unfolding fun_rel_def image2p_def by auto
 
-lemma convol_image_image2p: "<f o fst, g o snd> ` Collect (split R) \<subseteq> Collect (split (image2p f g R))"
-  unfolding convol_def image2p_def by fastforce
-
 lemma fun_rel_image2p: "(fun_rel R (image2p f g R)) f g"
   unfolding fun_rel_def image2p_def by auto
 
+ML_file "Tools/bnf_gfp_rec_sugar_tactics.ML"
+ML_file "Tools/bnf_gfp_rec_sugar.ML"
 ML_file "Tools/bnf_gfp_util.ML"
 ML_file "Tools/bnf_gfp_tactics.ML"
 ML_file "Tools/bnf_gfp.ML"
--- a/src/HOL/BNF/BNF_LFP.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF_LFP.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -230,6 +230,7 @@
 lemma predicate2D_vimage2p: "\<lbrakk>R \<le> vimage2p f g S; R x y\<rbrakk> \<Longrightarrow> S (f x) (g y)"
   unfolding vimage2p_def by auto
 
+ML_file "Tools/bnf_lfp_rec_sugar.ML"
 ML_file "Tools/bnf_lfp_util.ML"
 ML_file "Tools/bnf_lfp_tactics.ML"
 ML_file "Tools/bnf_lfp.ML"
--- a/src/HOL/BNF/BNF_Util.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/BNF_Util.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -9,15 +9,11 @@
 header {* Library for Bounded Natural Functors *}
 
 theory BNF_Util
-imports Ctr_Sugar "../Cardinals/Cardinal_Arithmetic"
+imports "../Cardinals/Cardinal_Arithmetic_FP"
+   (*FIXME: define fun_rel here, reuse in Transfer once this theory is in HOL*)
+  Transfer
 begin
 
-lemma subset_Collect_iff: "B \<subseteq> A \<Longrightarrow> (B \<subseteq> {x \<in> A. P x}) = (\<forall>x \<in> B. P x)"
-by blast
-
-lemma subset_CollectI: "B \<subseteq> A \<Longrightarrow> (\<And>x. x \<in> B \<Longrightarrow> Q x \<Longrightarrow> P x) \<Longrightarrow> ({x \<in> B. Q x} \<subseteq> {x \<in> A. P x})"
-by blast
-
 definition collect where
 "collect F x = (\<Union>f \<in> F. f x)"
 
@@ -32,12 +28,6 @@
  (\<forall> b1 b2. b1 \<in> B1 \<and> b2 \<in> B2 \<and> f1 b1 = f2 b2 \<longrightarrow>
            (\<exists> a \<in> A. e1 (p1 a) = e1 b1 \<and> e2 (p2 a) = e2 b2))"
 
-lemma fst_snd: "\<lbrakk>snd x = (y, z)\<rbrakk> \<Longrightarrow> fst (snd x) = y"
-by simp
-
-lemma snd_snd: "\<lbrakk>snd x = (y, z)\<rbrakk> \<Longrightarrow> snd (snd x) = z"
-by simp
-
 lemma fstI: "x = (y, z) \<Longrightarrow> fst x = y"
 by simp
 
@@ -47,9 +37,6 @@
 lemma bijI: "\<lbrakk>\<And>x y. (f x = f y) = (x = y); \<And>y. \<exists>x. y = f x\<rbrakk> \<Longrightarrow> bij f"
 unfolding bij_def inj_on_def by auto blast
 
-lemma Collect_pair_mem_eq: "{(x, y). (x, y) \<in> R} = R"
-by simp
-
 (* Operator: *)
 definition "Gr A f = {(a, f a) | a. a \<in> A}"
 
--- a/src/HOL/BNF/Basic_BNFs.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Basic_BNFs.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -11,31 +11,29 @@
 
 theory Basic_BNFs
 imports BNF_Def
+   (*FIXME: define relators here, reuse in Lifting_* once this theory is in HOL*)
+  Lifting_Sum
+  Lifting_Product
+  Main
 begin
 
-lemma wpull_id: "wpull UNIV B1 B2 id id id id"
-unfolding wpull_def by simp
-
-lemmas natLeq_card_order = natLeq_Card_order[unfolded Field_natLeq]
-
-lemma ctwo_card_order: "card_order ctwo"
-using Card_order_ctwo by (unfold ctwo_def Field_card_of)
-
-lemma natLeq_cinfinite: "cinfinite natLeq"
-unfolding cinfinite_def Field_natLeq by (rule nat_infinite)
-
 lemma wpull_Grp_def: "wpull A B1 B2 f1 f2 p1 p2 \<longleftrightarrow> Grp B1 f1 OO (Grp B2 f2)\<inverse>\<inverse> \<le> (Grp A p1)\<inverse>\<inverse> OO Grp A p2"
   unfolding wpull_def Grp_def by auto
 
-bnf ID: "id :: ('a \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b" ["\<lambda>x. {x}"] "\<lambda>_:: 'a. natLeq" ["id :: 'a \<Rightarrow> 'a"]
-  "id :: ('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
+bnf ID: 'a
+  map: "id :: ('a \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b"
+  sets: "\<lambda>x. {x}"
+  bd: natLeq
+  rel: "id :: ('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
 apply (auto simp: Grp_def fun_eq_iff relcompp.simps natLeq_card_order natLeq_cinfinite)
 apply (rule ordLess_imp_ordLeq[OF finite_ordLess_infinite[OF _ natLeq_Well_order]])
 apply (auto simp add: Field_card_of Field_natLeq card_of_well_order_on)[3]
 done
 
-bnf DEADID: "id :: 'a \<Rightarrow> 'a" [] "\<lambda>_:: 'a. natLeq +c |UNIV :: 'a set|" ["SOME x :: 'a. True"]
-  "op = :: 'a \<Rightarrow> 'a \<Rightarrow> bool"
+bnf DEADID: 'a
+  map: "id :: 'a \<Rightarrow> 'a"
+  bd: "natLeq +c |UNIV :: 'a set|"
+  rel: "op = :: 'a \<Rightarrow> 'a \<Rightarrow> bool"
 by (auto simp add: wpull_Grp_def Grp_def
   card_order_csum natLeq_card_order card_of_card_order_on
   cinfinite_csum natLeq_cinfinite)
@@ -48,15 +46,20 @@
 
 lemmas sum_set_defs = setl_def[abs_def] setr_def[abs_def]
 
-bnf sum_map [setl, setr] "\<lambda>_::'a + 'b. natLeq" [Inl, Inr] sum_rel
+bnf "'a + 'b"
+  map: sum_map
+  sets: setl setr
+  bd: natLeq
+  wits: Inl Inr
+  rel: sum_rel
 proof -
   show "sum_map id id = id" by (rule sum_map.id)
 next
-  fix f1 f2 g1 g2
+  fix f1 :: "'o \<Rightarrow> 's" and f2 :: "'p \<Rightarrow> 't" and g1 :: "'s \<Rightarrow> 'q" and g2 :: "'t \<Rightarrow> 'r"
   show "sum_map (g1 o f1) (g2 o f2) = sum_map g1 g2 o sum_map f1 f2"
     by (rule sum_map.comp[symmetric])
 next
-  fix x f1 f2 g1 g2
+  fix x and f1 :: "'o \<Rightarrow> 'q" and f2 :: "'p \<Rightarrow> 'r" and g1 g2
   assume a1: "\<And>z. z \<in> setl x \<Longrightarrow> f1 z = g1 z" and
          a2: "\<And>z. z \<in> setr x \<Longrightarrow> f2 z = g2 z"
   thus "sum_map f1 f2 x = sum_map g1 g2 x"
@@ -66,11 +69,11 @@
     case Inr thus ?thesis using a2 by (clarsimp simp: setr_def)
   qed
 next
-  fix f1 f2
+  fix f1 :: "'o \<Rightarrow> 'q" and f2 :: "'p \<Rightarrow> 'r"
   show "setl o sum_map f1 f2 = image f1 o setl"
     by (rule ext, unfold o_apply) (simp add: setl_def split: sum.split)
 next
-  fix f1 f2
+  fix f1 :: "'o \<Rightarrow> 'q" and f2 :: "'p \<Rightarrow> 'r"
   show "setr o sum_map f1 f2 = image f2 o setr"
     by (rule ext, unfold o_apply) (simp add: setr_def split: sum.split)
 next
@@ -78,13 +81,13 @@
 next
   show "cinfinite natLeq" by (rule natLeq_cinfinite)
 next
-  fix x
+  fix x :: "'o + 'p"
   show "|setl x| \<le>o natLeq"
     apply (rule ordLess_imp_ordLeq)
     apply (rule finite_iff_ordLess_natLeq[THEN iffD1])
     by (simp add: setl_def split: sum.split)
 next
-  fix x
+  fix x :: "'o + 'p"
   show "|setr x| \<le>o natLeq"
     apply (rule ordLess_imp_ordLeq)
     apply (rule finite_iff_ordLess_natLeq[THEN iffD1])
@@ -148,7 +151,11 @@
 
 lemmas prod_set_defs = fsts_def[abs_def] snds_def[abs_def]
 
-bnf map_pair [fsts, snds] "\<lambda>_::'a \<times> 'b. natLeq" [Pair] prod_rel
+bnf "'a \<times> 'b"
+  map: map_pair
+  sets: fsts snds
+  bd: natLeq
+  rel: prod_rel
 proof (unfold prod_set_defs)
   show "map_pair id id = id" by (rule map_pair.id)
 next
@@ -193,7 +200,7 @@
         Grp {x. {fst x} \<subseteq> Collect (split R) \<and> {snd x} \<subseteq> Collect (split S)} (map_pair snd snd)"
   unfolding prod_set_defs prod_rel_def Grp_def relcompp.simps conversep.simps fun_eq_iff
   by auto
-qed simp+
+qed
 
 (* Categorical version of pullback: *)
 lemma wpull_cat:
@@ -215,24 +222,11 @@
   thus ?thesis using that by fastforce
 qed
 
-lemma card_of_bounded_range:
-  "|{f :: 'd \<Rightarrow> 'a. range f \<subseteq> B}| \<le>o |Func (UNIV :: 'd set) B|" (is "|?LHS| \<le>o |?RHS|")
-proof -
-  let ?f = "\<lambda>f. %x. if f x \<in> B then f x else undefined"
-  have "inj_on ?f ?LHS" unfolding inj_on_def
-  proof (unfold fun_eq_iff, safe)
-    fix g :: "'d \<Rightarrow> 'a" and f :: "'d \<Rightarrow> 'a" and x
-    assume "range f \<subseteq> B" "range g \<subseteq> B" and eq: "\<forall>x. ?f f x = ?f g x"
-    hence "f x \<in> B" "g x \<in> B" by auto
-    with eq have "Some (f x) = Some (g x)" by metis
-    thus "f x = g x" by simp
-  qed
-  moreover have "?f ` ?LHS \<subseteq> ?RHS" unfolding Func_def by fastforce
-  ultimately show ?thesis using card_of_ordLeq by fast
-qed
-
-bnf "op \<circ>" [range] "\<lambda>_:: 'a \<Rightarrow> 'b. natLeq +c |UNIV :: 'a set|" ["%c x::'b::type. c::'a::type"]
-  "fun_rel op ="
+bnf "'a \<Rightarrow> 'b"
+  map: "op \<circ>"
+  sets: range
+  bd: "natLeq +c |UNIV :: 'a set|"
+  rel: "fun_rel op ="
 proof
   fix f show "id \<circ> f = id f" by simp
 next
@@ -258,7 +252,7 @@
 next
   fix f :: "'d => 'a"
   have "|range f| \<le>o | (UNIV::'d set) |" (is "_ \<le>o ?U") by (rule card_of_image)
-  also have "?U \<le>o natLeq +c ?U"  by (rule ordLeq_csum2) (rule card_of_Card_order)
+  also have "?U \<le>o natLeq +c ?U" by (rule ordLeq_csum2) (rule card_of_Card_order)
   finally show "|range f| \<le>o natLeq +c ?U" .
 next
   fix A B1 B2 f1 f2 p1 p2 assume p: "wpull A B1 B2 f1 f2 p1 p2"
@@ -277,7 +271,7 @@
         (Grp {x. range x \<subseteq> Collect (split R)} (op \<circ> fst))\<inverse>\<inverse> OO
          Grp {x. range x \<subseteq> Collect (split R)} (op \<circ> snd)"
   unfolding fun_rel_def Grp_def fun_eq_iff relcompp.simps conversep.simps  subset_iff image_iff
-  by auto (force, metis pair_collapse)
-qed auto
+  by auto (force, metis (no_types) pair_collapse)
+qed
 
 end
--- a/src/HOL/BNF/Coinduction.thy	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-(*  Title:      HOL/BNF/Coinduction.thy
-    Author:     Johannes Hölzl, TU Muenchen
-    Author:     Dmitriy Traytel, TU Muenchen
-    Copyright   2013
-
-Coinduction method that avoids some boilerplate compared to coinduct.
-*)
-
-header {* Coinduction Method *}
-
-theory Coinduction
-imports BNF_Util
-begin
-
-ML_file "Tools/coinduction.ML"
-
-setup Coinduction.setup
-
-end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/BNF/Countable_Set_Type.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,212 @@
+(*  Title:      HOL/BNF/Countable_Set_Type.thy
+    Author:     Andrei Popescu, TU Muenchen
+    Copyright   2012
+
+Type of (at most) countable sets.
+*)
+
+header {* Type of (at Most) Countable Sets *}
+
+theory Countable_Set_Type
+imports
+  More_BNFs
+  "~~/src/HOL/Cardinals/Cardinals"
+  "~~/src/HOL/Library/Countable_Set"
+begin
+
+subsection{* Cardinal stuff *}
+
+lemma countable_card_of_nat: "countable A \<longleftrightarrow> |A| \<le>o |UNIV::nat set|"
+  unfolding countable_def card_of_ordLeq[symmetric] by auto
+
+lemma countable_card_le_natLeq: "countable A \<longleftrightarrow> |A| \<le>o natLeq"
+  unfolding countable_card_of_nat using card_of_nat ordLeq_ordIso_trans ordIso_symmetric by blast
+
+lemma countable_or_card_of:
+assumes "countable A"
+shows "(finite A \<and> |A| <o |UNIV::nat set| ) \<or>
+       (infinite A  \<and> |A| =o |UNIV::nat set| )"
+proof (cases "finite A")
+  case True thus ?thesis by (metis finite_iff_cardOf_nat)
+next
+  case False with assms show ?thesis
+    by (metis countable_card_of_nat infinite_iff_card_of_nat ordIso_iff_ordLeq)
+qed
+
+lemma countable_cases_card_of[elim]:
+  assumes "countable A"
+  obtains (Fin) "finite A" "|A| <o |UNIV::nat set|"
+        | (Inf) "infinite A" "|A| =o |UNIV::nat set|"
+  using assms countable_or_card_of by blast
+
+lemma countable_or:
+  "countable A \<Longrightarrow> (\<exists> f::'a\<Rightarrow>nat. finite A \<and> inj_on f A) \<or> (\<exists> f::'a\<Rightarrow>nat. infinite A \<and> bij_betw f A UNIV)"
+  by (elim countable_enum_cases) fastforce+
+
+lemma countable_cases[elim]:
+  assumes "countable A"
+  obtains (Fin) f :: "'a\<Rightarrow>nat" where "finite A" "inj_on f A"
+        | (Inf) f :: "'a\<Rightarrow>nat" where "infinite A" "bij_betw f A UNIV"
+  using assms countable_or by metis
+
+lemma countable_ordLeq:
+assumes "|A| \<le>o |B|" and "countable B"
+shows "countable A"
+using assms unfolding countable_card_of_nat by(rule ordLeq_transitive)
+
+lemma countable_ordLess:
+assumes AB: "|A| <o |B|" and B: "countable B"
+shows "countable A"
+using countable_ordLeq[OF ordLess_imp_ordLeq[OF AB] B] .
+
+subsection {* The type of countable sets *}
+
+typedef 'a cset = "{A :: 'a set. countable A}" morphisms rcset acset
+  by (rule exI[of _ "{}"]) simp
+
+setup_lifting type_definition_cset
+
+declare
+  rcset_inverse[simp]
+  acset_inverse[Transfer.transferred, unfolded mem_Collect_eq, simp]
+  acset_inject[Transfer.transferred, unfolded mem_Collect_eq, simp]
+  rcset[Transfer.transferred, unfolded mem_Collect_eq, simp]
+
+lift_definition cin :: "'a \<Rightarrow> 'a cset \<Rightarrow> bool" is "op \<in>" parametric member_transfer
+  ..
+lift_definition cempty :: "'a cset" is "{}" parametric empty_transfer
+  by (rule countable_empty)
+lift_definition cinsert :: "'a \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is insert parametric Lifting_Set.insert_transfer
+  by (rule countable_insert)
+lift_definition csingle :: "'a \<Rightarrow> 'a cset" is "\<lambda>x. {x}"
+  by (rule countable_insert[OF countable_empty])
+lift_definition cUn :: "'a cset \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is "op \<union>" parametric union_transfer
+  by (rule countable_Un)
+lift_definition cInt :: "'a cset \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is "op \<inter>" parametric inter_transfer
+  by (rule countable_Int1)
+lift_definition cDiff :: "'a cset \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is "op -" parametric Diff_transfer
+  by (rule countable_Diff)
+lift_definition cimage :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a cset \<Rightarrow> 'b cset" is "op `" parametric image_transfer
+  by (rule countable_image)
+
+subsection {* Registration as BNF *}
+
+lemma card_of_countable_sets_range:
+fixes A :: "'a set"
+shows "|{X. X \<subseteq> A \<and> countable X \<and> X \<noteq> {}}| \<le>o |{f::nat \<Rightarrow> 'a. range f \<subseteq> A}|"
+apply(rule card_of_ordLeqI[of from_nat_into]) using inj_on_from_nat_into
+unfolding inj_on_def by auto
+
+lemma card_of_countable_sets_Func:
+"|{X. X \<subseteq> A \<and> countable X \<and> X \<noteq> {}}| \<le>o |A| ^c natLeq"
+using card_of_countable_sets_range card_of_Func_UNIV[THEN ordIso_symmetric]
+unfolding cexp_def Field_natLeq Field_card_of
+by (rule ordLeq_ordIso_trans)
+
+lemma ordLeq_countable_subsets:
+"|A| \<le>o |{X. X \<subseteq> A \<and> countable X}|"
+apply (rule card_of_ordLeqI[of "\<lambda> a. {a}"]) unfolding inj_on_def by auto
+
+lemma finite_countable_subset:
+"finite {X. X \<subseteq> A \<and> countable X} \<longleftrightarrow> finite A"
+apply default
+ apply (erule contrapos_pp)
+ apply (rule card_of_ordLeq_infinite)
+ apply (rule ordLeq_countable_subsets)
+ apply assumption
+apply (rule finite_Collect_conjI)
+apply (rule disjI1)
+by (erule finite_Collect_subsets)
+
+lemma rcset_to_rcset: "countable A \<Longrightarrow> rcset (the_inv rcset A) = A"
+  apply (rule f_the_inv_into_f[unfolded inj_on_def image_iff])
+   apply transfer' apply simp
+  apply transfer' apply simp
+  done
+
+lemma Collect_Int_Times:
+"{(x, y). R x y} \<inter> A \<times> B = {(x, y). R x y \<and> x \<in> A \<and> y \<in> B}"
+by auto
+
+definition cset_rel :: "('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a cset \<Rightarrow> 'b cset \<Rightarrow> bool" where
+"cset_rel R a b \<longleftrightarrow>
+ (\<forall>t \<in> rcset a. \<exists>u \<in> rcset b. R t u) \<and>
+ (\<forall>t \<in> rcset b. \<exists>u \<in> rcset a. R u t)"
+
+lemma cset_rel_aux:
+"(\<forall>t \<in> rcset a. \<exists>u \<in> rcset b. R t u) \<and> (\<forall>t \<in> rcset b. \<exists>u \<in> rcset a. R u t) \<longleftrightarrow>
+ ((Grp {x. rcset x \<subseteq> {(a, b). R a b}} (cimage fst))\<inverse>\<inverse> OO
+          Grp {x. rcset x \<subseteq> {(a, b). R a b}} (cimage snd)) a b" (is "?L = ?R")
+proof
+  assume ?L
+  def R' \<equiv> "the_inv rcset (Collect (split R) \<inter> (rcset a \<times> rcset b))"
+  (is "the_inv rcset ?L'")
+  have L: "countable ?L'" by auto
+  hence *: "rcset R' = ?L'" unfolding R'_def using fset_to_fset by (intro rcset_to_rcset)
+  thus ?R unfolding Grp_def relcompp.simps conversep.simps
+  proof (intro CollectI prod_caseI exI[of _ a] exI[of _ b] exI[of _ R'] conjI refl)
+    from * `?L` show "a = cimage fst R'" by transfer (auto simp: image_def Collect_Int_Times)
+  next
+    from * `?L` show "b = cimage snd R'" by transfer (auto simp: image_def Collect_Int_Times)
+  qed simp_all
+next
+  assume ?R thus ?L unfolding Grp_def relcompp.simps conversep.simps
+    by transfer force
+qed
+
+bnf "'a cset"
+  map: cimage
+  sets: rcset
+  bd: natLeq
+  wits: "cempty"
+  rel: cset_rel
+proof -
+  show "cimage id = id" by transfer' simp
+next
+  fix f g show "cimage (g \<circ> f) = cimage g \<circ> cimage f" by transfer' fastforce
+next
+  fix C f g assume eq: "\<And>a. a \<in> rcset C \<Longrightarrow> f a = g a"
+  thus "cimage f C = cimage g C" by transfer force
+next
+  fix f show "rcset \<circ> cimage f = op ` f \<circ> rcset" by transfer' fastforce
+next
+  show "card_order natLeq" by (rule natLeq_card_order)
+next
+  show "cinfinite natLeq" by (rule natLeq_cinfinite)
+next
+  fix C show "|rcset C| \<le>o natLeq" by transfer (unfold countable_card_le_natLeq)
+next
+  fix A B1 B2 f1 f2 p1 p2
+  assume wp: "wpull A B1 B2 f1 f2 p1 p2"
+  show "wpull {x. rcset x \<subseteq> A} {x. rcset x \<subseteq> B1} {x. rcset x \<subseteq> B2}
+              (cimage f1) (cimage f2) (cimage p1) (cimage p2)"
+  unfolding wpull_def proof safe
+    fix y1 y2
+    assume Y1: "rcset y1 \<subseteq> B1" and Y2: "rcset y2 \<subseteq> B2"
+    assume "cimage f1 y1 = cimage f2 y2"
+    hence EQ: "f1 ` (rcset y1) = f2 ` (rcset y2)" by transfer
+    with Y1 Y2 obtain X where X: "X \<subseteq> A"
+    and Y1: "p1 ` X = rcset y1" and Y2: "p2 ` X = rcset y2"
+    using wpull_image[OF wp] unfolding wpull_def Pow_def Bex_def mem_Collect_eq
+      by (auto elim!: allE[of _ "rcset y1"] allE[of _ "rcset y2"])
+    have "\<forall> y1' \<in> rcset y1. \<exists> x. x \<in> X \<and> y1' = p1 x" using Y1 by auto
+    then obtain q1 where q1: "\<forall> y1' \<in> rcset y1. q1 y1' \<in> X \<and> y1' = p1 (q1 y1')" by metis
+    have "\<forall> y2' \<in> rcset y2. \<exists> x. x \<in> X \<and> y2' = p2 x" using Y2 by auto
+    then obtain q2 where q2: "\<forall> y2' \<in> rcset y2. q2 y2' \<in> X \<and> y2' = p2 (q2 y2')" by metis
+    def X' \<equiv> "q1 ` (rcset y1) \<union> q2 ` (rcset y2)"
+    have X': "X' \<subseteq> A" and Y1: "p1 ` X' = rcset y1" and Y2: "p2 ` X' = rcset y2"
+    using X Y1 Y2 q1 q2 unfolding X'_def by fast+
+    have fX': "countable X'" unfolding X'_def by simp
+    then obtain x where X'eq: "X' = rcset x" by transfer blast
+    show "\<exists>x\<in>{x. rcset x \<subseteq> A}. cimage p1 x = y1 \<and> cimage p2 x = y2"
+      using X' Y1 Y2 unfolding X'eq by (intro bexI[of _ "x"]) (transfer, auto)
+  qed
+next
+  fix R
+  show "cset_rel R =
+        (Grp {x. rcset x \<subseteq> Collect (split R)} (cimage fst))\<inverse>\<inverse> OO
+         Grp {x. rcset x \<subseteq> Collect (split R)} (cimage snd)"
+  unfolding cset_rel_def[abs_def] cset_rel_aux by simp
+qed (transfer, simp)
+
+end
--- a/src/HOL/BNF/Countable_Type.thy	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,91 +0,0 @@
-(*  Title:      HOL/BNF/Countable_Type.thy
-    Author:     Andrei Popescu, TU Muenchen
-    Copyright   2012
-
-(At most) countable sets.
-*)
-
-header {* (At Most) Countable Sets *}
-
-theory Countable_Type
-imports
-  "~~/src/HOL/Cardinals/Cardinals"
-  "~~/src/HOL/Library/Countable_Set"
-begin
-
-subsection{* Cardinal stuff *}
-
-lemma countable_card_of_nat: "countable A \<longleftrightarrow> |A| \<le>o |UNIV::nat set|"
-  unfolding countable_def card_of_ordLeq[symmetric] by auto
-
-lemma countable_card_le_natLeq: "countable A \<longleftrightarrow> |A| \<le>o natLeq"
-  unfolding countable_card_of_nat using card_of_nat ordLeq_ordIso_trans ordIso_symmetric by blast
-
-lemma countable_or_card_of:
-assumes "countable A"
-shows "(finite A \<and> |A| <o |UNIV::nat set| ) \<or>
-       (infinite A  \<and> |A| =o |UNIV::nat set| )"
-proof (cases "finite A")
-  case True thus ?thesis by (metis finite_iff_cardOf_nat)
-next
-  case False with assms show ?thesis
-    by (metis countable_card_of_nat infinite_iff_card_of_nat ordIso_iff_ordLeq)
-qed
-
-lemma countable_cases_card_of[elim]:
-  assumes "countable A"
-  obtains (Fin) "finite A" "|A| <o |UNIV::nat set|"
-        | (Inf) "infinite A" "|A| =o |UNIV::nat set|"
-  using assms countable_or_card_of by blast
-
-lemma countable_or:
-  "countable A \<Longrightarrow> (\<exists> f::'a\<Rightarrow>nat. finite A \<and> inj_on f A) \<or> (\<exists> f::'a\<Rightarrow>nat. infinite A \<and> bij_betw f A UNIV)"
-  by (elim countable_enum_cases) fastforce+
-
-lemma countable_cases[elim]:
-  assumes "countable A"
-  obtains (Fin) f :: "'a\<Rightarrow>nat" where "finite A" "inj_on f A"
-        | (Inf) f :: "'a\<Rightarrow>nat" where "infinite A" "bij_betw f A UNIV"
-  using assms countable_or by metis
-
-lemma countable_ordLeq:
-assumes "|A| \<le>o |B|" and "countable B"
-shows "countable A"
-using assms unfolding countable_card_of_nat by(rule ordLeq_transitive)
-
-lemma countable_ordLess:
-assumes AB: "|A| <o |B|" and B: "countable B"
-shows "countable A"
-using countable_ordLeq[OF ordLess_imp_ordLeq[OF AB] B] .
-
-subsection{*  The type of countable sets *}
-
-typedef 'a cset = "{A :: 'a set. countable A}" morphisms rcset acset
-  by (rule exI[of _ "{}"]) simp
-
-setup_lifting type_definition_cset
-
-declare
-  rcset_inverse[simp]
-  acset_inverse[Transfer.transferred, unfolded mem_Collect_eq, simp]
-  acset_inject[Transfer.transferred, unfolded mem_Collect_eq, simp]
-  rcset[Transfer.transferred, unfolded mem_Collect_eq, simp]
-
-lift_definition cin :: "'a \<Rightarrow> 'a cset \<Rightarrow> bool" is "op \<in>" parametric member_transfer
-  ..
-lift_definition cempty :: "'a cset" is "{}" parametric empty_transfer
-  by (rule countable_empty)
-lift_definition cinsert :: "'a \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is insert parametric Lifting_Set.insert_transfer
-  by (rule countable_insert)
-lift_definition csingle :: "'a \<Rightarrow> 'a cset" is "\<lambda>x. {x}"
-  by (rule countable_insert[OF countable_empty])
-lift_definition cUn :: "'a cset \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is "op \<union>" parametric union_transfer
-  by (rule countable_Un)
-lift_definition cInt :: "'a cset \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is "op \<inter>" parametric inter_transfer
-  by (rule countable_Int1)
-lift_definition cDiff :: "'a cset \<Rightarrow> 'a cset \<Rightarrow> 'a cset" is "op -" parametric Diff_transfer
-  by (rule countable_Diff)
-lift_definition cimage :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a cset \<Rightarrow> 'b cset" is "op `" parametric image_transfer
-  by (rule countable_image)
-
-end
--- a/src/HOL/BNF/Ctr_Sugar.thy	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-(*  Title:      HOL/BNF/Ctr_Sugar.thy
-    Author:     Jasmin Blanchette, TU Muenchen
-    Copyright   2012
-
-Wrapping existing freely generated type's constructors.
-*)
-
-header {* Wrapping Existing Freely Generated Type's Constructors *}
-
-theory Ctr_Sugar
-imports Main
-keywords
-  "wrap_free_constructors" :: thy_goal and
-  "no_discs_sels" and
-  "rep_compat"
-begin
-
-lemma iffI_np: "\<lbrakk>x \<Longrightarrow> \<not> y; \<not> x \<Longrightarrow> y\<rbrakk> \<Longrightarrow> \<not> x \<longleftrightarrow> y"
-by (erule iffI) (erule contrapos_pn)
-
-lemma iff_contradict:
-"\<not> P \<Longrightarrow> P \<longleftrightarrow> Q \<Longrightarrow> Q \<Longrightarrow> R"
-"\<not> Q \<Longrightarrow> P \<longleftrightarrow> Q \<Longrightarrow> P \<Longrightarrow> R"
-by blast+
-
-ML_file "Tools/ctr_sugar_util.ML"
-ML_file "Tools/ctr_sugar_tactics.ML"
-ML_file "Tools/ctr_sugar.ML"
-
-end
--- a/src/HOL/BNF/Equiv_Relations_More.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Equiv_Relations_More.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -59,7 +59,7 @@
 
 lemma in_quotient_imp_in_rel:
 "\<lbrakk>equiv A r; X \<in> A//r; {x,y} \<subseteq> X\<rbrakk> \<Longrightarrow> (x,y) \<in> r"
-using quotient_eq_iff by fastforce
+using quotient_eq_iff[THEN iffD1] by fastforce
 
 lemma in_quotient_imp_closed:
 "\<lbrakk>equiv A r; X \<in> A//r; x \<in> X; (x,y) \<in> r\<rbrakk> \<Longrightarrow> y \<in> X"
--- a/src/HOL/BNF/Examples/Derivation_Trees/DTree.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Derivation_Trees/DTree.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -11,8 +11,6 @@
 imports Prelim
 begin
 
-hide_fact (open) Lifting_Product.prod_rel_def
-
 typedecl N
 typedecl T
 
@@ -22,8 +20,8 @@
 
 definition "Node n as \<equiv> NNode n (the_inv fset as)"
 definition "cont \<equiv> fset o ccont"
-definition "unfold rt ct \<equiv> dtree_unfold rt (the_inv fset o ct)"
-definition "corec rt ct \<equiv> dtree_corec rt (the_inv fset o ct)"
+definition "unfold rt ct \<equiv> unfold_dtree rt (the_inv fset o ct)"
+definition "corec rt ct \<equiv> corec_dtree rt (the_inv fset o ct)"
 
 lemma finite_cont[simp]: "finite (cont tr)"
   unfolding cont_def o_apply by (cases tr, clarsimp)
--- a/src/HOL/BNF/Examples/Derivation_Trees/Parallel.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Derivation_Trees/Parallel.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -12,7 +12,6 @@
 begin
 
 no_notation plus_class.plus (infixl "+" 65)
-no_notation Sublist.parallel (infixl "\<parallel>" 50)
 
 consts Nplus :: "N \<Rightarrow> N \<Rightarrow> N" (infixl "+" 60)
 
@@ -145,4 +144,4 @@
   thus ?thesis by blast
 qed
 
-end
\ No newline at end of file
+end
--- a/src/HOL/BNF/Examples/Koenig.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Koenig.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -12,44 +12,33 @@
 imports TreeFI Stream
 begin
 
-(* selectors for streams *)
-lemma shd_def': "shd as = fst (stream_dtor as)"
-apply (case_tac as)
-apply (auto simp add: shd_def)
-by (simp add: Stream_def stream.dtor_ctor)
-
-lemma stl_def': "stl as = snd (stream_dtor as)"
-apply (case_tac as)
-apply (auto simp add: stl_def)
-by (simp add: Stream_def stream.dtor_ctor)
-
 (* infinite trees: *)
 coinductive infiniteTr where
-"\<lbrakk>tr' \<in> listF_set (sub tr); infiniteTr tr'\<rbrakk> \<Longrightarrow> infiniteTr tr"
+"\<lbrakk>tr' \<in> set_listF (sub tr); infiniteTr tr'\<rbrakk> \<Longrightarrow> infiniteTr tr"
 
 lemma infiniteTr_strong_coind[consumes 1, case_names sub]:
 assumes *: "phi tr" and
-**: "\<And> tr. phi tr \<Longrightarrow> \<exists> tr' \<in> listF_set (sub tr). phi tr' \<or> infiniteTr tr'"
+**: "\<And> tr. phi tr \<Longrightarrow> \<exists> tr' \<in> set_listF (sub tr). phi tr' \<or> infiniteTr tr'"
 shows "infiniteTr tr"
 using assms by (elim infiniteTr.coinduct) blast
 
 lemma infiniteTr_coind[consumes 1, case_names sub, induct pred: infiniteTr]:
 assumes *: "phi tr" and
-**: "\<And> tr. phi tr \<Longrightarrow> \<exists> tr' \<in> listF_set (sub tr). phi tr'"
+**: "\<And> tr. phi tr \<Longrightarrow> \<exists> tr' \<in> set_listF (sub tr). phi tr'"
 shows "infiniteTr tr"
 using assms by (elim infiniteTr.coinduct) blast
 
 lemma infiniteTr_sub[simp]:
-"infiniteTr tr \<Longrightarrow> (\<exists> tr' \<in> listF_set (sub tr). infiniteTr tr')"
+"infiniteTr tr \<Longrightarrow> (\<exists> tr' \<in> set_listF (sub tr). infiniteTr tr')"
 by (erule infiniteTr.cases) blast
 
 primcorec konigPath where
   "shd (konigPath t) = lab t"
-| "stl (konigPath t) = konigPath (SOME tr. tr \<in> listF_set (sub t) \<and> infiniteTr tr)"
+| "stl (konigPath t) = konigPath (SOME tr. tr \<in> set_listF (sub t) \<and> infiniteTr tr)"
 
 (* proper paths in trees: *)
 coinductive properPath where
-"\<lbrakk>shd as = lab tr; tr' \<in> listF_set (sub tr); properPath (stl as) tr'\<rbrakk> \<Longrightarrow>
+"\<lbrakk>shd as = lab tr; tr' \<in> set_listF (sub tr); properPath (stl as) tr'\<rbrakk> \<Longrightarrow>
  properPath as tr"
 
 lemma properPath_strong_coind[consumes 1, case_names shd_lab sub]:
@@ -57,7 +46,7 @@
 **: "\<And> as tr. phi as tr \<Longrightarrow> shd as = lab tr" and
 ***: "\<And> as tr.
          phi as tr \<Longrightarrow>
-         \<exists> tr' \<in> listF_set (sub tr). phi (stl as) tr' \<or> properPath (stl as) tr'"
+         \<exists> tr' \<in> set_listF (sub tr). phi (stl as) tr' \<or> properPath (stl as) tr'"
 shows "properPath as tr"
 using assms by (elim properPath.coinduct) blast
 
@@ -66,7 +55,7 @@
 **: "\<And> as tr. phi as tr \<Longrightarrow> shd as = lab tr" and
 ***: "\<And> as tr.
          phi as tr \<Longrightarrow>
-         \<exists> tr' \<in> listF_set (sub tr). phi (stl as) tr'"
+         \<exists> tr' \<in> set_listF (sub tr). phi (stl as) tr'"
 shows "properPath as tr"
 using properPath_strong_coind[of phi, OF * **] *** by blast
 
@@ -76,7 +65,7 @@
 
 lemma properPath_sub:
 "properPath as tr \<Longrightarrow>
- \<exists> tr' \<in> listF_set (sub tr). phi (stl as) tr' \<or> properPath (stl as) tr'"
+ \<exists> tr' \<in> set_listF (sub tr). phi (stl as) tr' \<or> properPath (stl as) tr'"
 by (erule properPath.cases) blast
 
 (* prove the following by coinduction *)
@@ -88,10 +77,10 @@
    assume "infiniteTr tr \<and> as = konigPath tr" hence "properPath as tr"
    proof (coinduction arbitrary: tr as rule: properPath_coind)
      case (sub tr as)
-     let ?t = "SOME t'. t' \<in> listF_set (sub tr) \<and> infiniteTr t'"
-     from sub have "\<exists>t' \<in> listF_set (sub tr). infiniteTr t'" by simp
-     then have "\<exists>t'. t' \<in> listF_set (sub tr) \<and> infiniteTr t'" by blast
-     then have "?t \<in> listF_set (sub tr) \<and> infiniteTr ?t" by (rule someI_ex)
+     let ?t = "SOME t'. t' \<in> set_listF (sub tr) \<and> infiniteTr t'"
+     from sub have "\<exists>t' \<in> set_listF (sub tr). infiniteTr t'" by simp
+     then have "\<exists>t'. t' \<in> set_listF (sub tr) \<and> infiniteTr t'" by blast
+     then have "?t \<in> set_listF (sub tr) \<and> infiniteTr ?t" by (rule someI_ex)
      moreover have "stl (konigPath tr) = konigPath ?t" by simp
      ultimately show ?case using sub by blast
    qed simp
--- a/src/HOL/BNF/Examples/ListF.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/ListF.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -62,7 +62,7 @@
   "i < lengthh xs \<Longrightarrow> nthh (mapF f xs) i = f (nthh xs i)"
   by (induct rule: nthh.induct) auto
 
-lemma nthh_listF_set[simp]: "i < lengthh xs \<Longrightarrow> nthh xs i \<in> listF_set xs"
+lemma nthh_listF_set[simp]: "i < lengthh xs \<Longrightarrow> nthh xs i \<in> set_listF xs"
   by (induct rule: nthh.induct) auto
 
 lemma NilF_iff[iff]: "(lengthh xs = 0) = (xs = NilF)"
@@ -105,7 +105,7 @@
 qed simp
 
 lemma list_set_nthh[simp]:
-  "(x \<in> listF_set xs) \<Longrightarrow> (\<exists>i < lengthh xs. nthh xs i = x)"
+  "(x \<in> set_listF xs) \<Longrightarrow> (\<exists>i < lengthh xs. nthh xs i = x)"
   by (induct xs) (auto, induct rule: nthh.induct, auto)
 
 end
--- a/src/HOL/BNF/Examples/Misc_Codatatype.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Misc_Codatatype.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -19,9 +19,9 @@
 
 codatatype simple'' = X1'' nat int | X2''
 
-codatatype 'a stream = Stream 'a "'a stream"
+codatatype 'a stream = Stream (shd: 'a) (stl: "'a stream")
 
-codatatype 'a mylist = MyNil | MyCons 'a "'a mylist"
+codatatype 'a mylist = MyNil | MyCons (myhd: 'a) (mytl: "'a mylist")
 
 codatatype ('b, 'c, 'd, 'e) some_passive =
   SP1 "('b, 'c, 'd, 'e) some_passive" | SP2 'b | SP3 'c | SP4 'd | SP5 'e
--- a/src/HOL/BNF/Examples/Misc_Datatype.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Misc_Datatype.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -19,7 +19,7 @@
 
 datatype_new simple'' = X1'' nat int | X2''
 
-datatype_new 'a mylist = MyNil | MyCons 'a "'a mylist"
+datatype_new 'a mylist = MyNil | MyCons (myhd: 'a) (mytl: "'a mylist")
 
 datatype_new ('b, 'c, 'd, 'e) some_passive =
   SP1 "('b, 'c, 'd, 'e) some_passive" | SP2 'b | SP3 'c | SP4 'd | SP5 'e
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/BNF/Examples/Misc_Primcorec.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,112 @@
+(*  Title:      HOL/BNF/Examples/Misc_Primcorec.thy
+    Author:     Jasmin Blanchette, TU Muenchen
+    Copyright   2013
+
+Miscellaneous primitive corecursive function definitions.
+*)
+
+header {* Miscellaneous Primitive Corecursive Function Definitions *}
+
+theory Misc_Primcorec
+imports Misc_Codatatype
+begin
+
+primcorec simple_of_bools :: "bool \<Rightarrow> bool \<Rightarrow> simple" where
+  "simple_of_bools b b' = (if b then if b' then X1 else X2 else if b' then X3 else X4)"
+
+primcorec simple'_of_bools :: "bool \<Rightarrow> bool \<Rightarrow> simple'" where
+  "simple'_of_bools b b' =
+     (if b then if b' then X1' () else X2' () else if b' then X3' () else X4' ())"
+
+primcorec inc_simple'' :: "nat \<Rightarrow> simple'' \<Rightarrow> simple''" where
+  "inc_simple'' k s = (case s of X1'' n i \<Rightarrow> X1'' (n + k) (i + int k) | X2'' \<Rightarrow> X2'')"
+
+primcorec sinterleave :: "'a stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream" where
+  "sinterleave s s' = Stream (shd s) (sinterleave s' (stl s))"
+
+primcorec myapp :: "'a mylist \<Rightarrow> 'a mylist \<Rightarrow> 'a mylist" where
+  "myapp xs ys =
+     (if xs = MyNil then ys
+      else if ys = MyNil then xs
+      else MyCons (myhd xs) (myapp (mytl xs) ys))"
+
+primcorec shuffle_sp :: "('a, 'b, 'c, 'd) some_passive \<Rightarrow> ('d, 'a, 'b, 'c) some_passive" where
+  "shuffle_sp sp =
+     (case sp of
+       SP1 sp' \<Rightarrow> SP1 (shuffle_sp sp')
+     | SP2 a \<Rightarrow> SP3 a
+     | SP3 b \<Rightarrow> SP4 b
+     | SP4 c \<Rightarrow> SP5 c
+     | SP5 d \<Rightarrow> SP2 d)"
+
+primcorec rename_lam :: "(string \<Rightarrow> string) \<Rightarrow> lambda \<Rightarrow> lambda" where
+  "rename_lam f l =
+     (case l of
+       Var s \<Rightarrow> Var (f s)
+     | App l l' \<Rightarrow> App (rename_lam f l) (rename_lam f l')
+     | Abs s l \<Rightarrow> Abs (f s) (rename_lam f l)
+     | Let SL l \<Rightarrow> Let (fimage (map_pair f (rename_lam f)) SL) (rename_lam f l))"
+
+primcorec
+  j1_sum :: "('a\<Colon>{zero,one,plus}) \<Rightarrow> 'a J1" and
+  j2_sum :: "'a \<Rightarrow> 'a J2"
+where
+  "n = 0 \<Longrightarrow> is_J11 (j1_sum n)" |
+  "un_J111 (j1_sum _) = 0" |
+  "un_J112 (j1_sum _) = j1_sum 0" |
+  "un_J121 (j1_sum n) = n + 1" |
+  "un_J122 (j1_sum n) = j2_sum (n + 1)" |
+  "n = 0 \<Longrightarrow> is_J21 (j2_sum n)" |
+  "un_J221 (j2_sum n) = j1_sum (n + 1)" |
+  "un_J222 (j2_sum n) = j2_sum (n + 1)"
+
+primcorec forest_of_mylist :: "'a tree mylist \<Rightarrow> 'a forest" where
+  "forest_of_mylist ts =
+     (case ts of
+       MyNil \<Rightarrow> FNil
+     | MyCons t ts \<Rightarrow> FCons t (forest_of_mylist ts))"
+
+primcorec mylist_of_forest :: "'a forest \<Rightarrow> 'a tree mylist" where
+  "mylist_of_forest f =
+     (case f of
+       FNil \<Rightarrow> MyNil
+     | FCons t ts \<Rightarrow> MyCons t (mylist_of_forest ts))"
+
+primcorec semi_stream :: "'a stream \<Rightarrow> 'a stream" where
+  "semi_stream s = Stream (shd s) (semi_stream (stl (stl s)))"
+
+primcorec
+  tree'_of_stream :: "'a stream \<Rightarrow> 'a tree'" and
+  branch_of_stream :: "'a stream \<Rightarrow> 'a branch"
+where
+  "tree'_of_stream s =
+     TNode' (branch_of_stream (semi_stream s)) (branch_of_stream (semi_stream (stl s)))" |
+  "branch_of_stream s = (case s of Stream h t \<Rightarrow> Branch h (tree'_of_stream t))"
+
+primcorec
+  freeze_exp :: "('b \<Rightarrow> 'a) \<Rightarrow> ('a, 'b) exp \<Rightarrow> ('a, 'b) exp" and
+  freeze_trm :: "('b \<Rightarrow> 'a) \<Rightarrow> ('a, 'b) trm \<Rightarrow> ('a, 'b) trm" and
+  freeze_factor :: "('b \<Rightarrow> 'a) \<Rightarrow> ('a, 'b) factor \<Rightarrow> ('a, 'b) factor"
+where
+  "freeze_exp g e =
+     (case e of
+       Term t \<Rightarrow> Term (freeze_trm g t)
+     | Sum t e \<Rightarrow> Sum (freeze_trm g t) (freeze_exp g e))" |
+  "freeze_trm g t =
+     (case t of
+       Factor f \<Rightarrow> Factor (freeze_factor g f)
+     | Prod f t \<Rightarrow> Prod (freeze_factor g f) (freeze_trm g t))" |
+  "freeze_factor g f =
+     (case f of
+       C a \<Rightarrow> C a
+     | V b \<Rightarrow> C (g b)
+     | Paren e \<Rightarrow> Paren (freeze_exp g e))"
+
+primcorec poly_unity :: "'a poly_unit" where
+  "poly_unity = U (\<lambda>_. poly_unity)"
+
+primcorec build_cps :: "('a \<Rightarrow> 'a) \<Rightarrow> ('a \<Rightarrow> bool stream) \<Rightarrow> 'a \<Rightarrow> bool stream \<Rightarrow> 'a cps" where
+  "shd b \<Longrightarrow> build_cps f g a b = CPS1 a" |
+  "_ \<Longrightarrow> build_cps f g a b = CPS2 (\<lambda>a. build_cps f g (f a) (g a))"
+
+end
--- a/src/HOL/BNF/Examples/Misc_Primrec.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Misc_Primrec.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -14,7 +14,7 @@
 primrec_new nat_of_simple :: "simple \<Rightarrow> nat" where
   "nat_of_simple X1 = 1" |
   "nat_of_simple X2 = 2" |
-  "nat_of_simple X3 = 2" |
+  "nat_of_simple X3 = 3" |
   "nat_of_simple X4 = 4"
 
 primrec_new simple_of_simple' :: "simple' \<Rightarrow> simple" where
--- a/src/HOL/BNF/Examples/Process.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Process.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -22,7 +22,7 @@
 subsection {* Basic properties *}
 
 declare
-  pre_process_rel_def[simp]
+  rel_pre_process_def[simp]
   sum_rel_def[simp]
   prod_rel_def[simp]
 
@@ -81,24 +81,17 @@
 
 datatype x_y_ax = x | y | ax
 
-definition "isA \<equiv> \<lambda> K. case K of x \<Rightarrow> False     |y \<Rightarrow> True  |ax \<Rightarrow> True"
-definition "pr  \<equiv> \<lambda> K. case K of x \<Rightarrow> undefined |y \<Rightarrow> ''b'' |ax \<Rightarrow> ''a''"
-definition "co  \<equiv> \<lambda> K. case K of x \<Rightarrow> undefined |y \<Rightarrow> x    |ax \<Rightarrow> x"
-lemmas Action_defs = isA_def pr_def co_def
+primcorec F :: "x_y_ax \<Rightarrow> char list process" where
+  "xyax = x \<Longrightarrow> isChoice (F xyax)"
+| "ch1Of (F xyax) = F ax"
+| "ch2Of (F xyax) = F y"
+| "prefOf (F xyax) = (if xyax = y then ''b'' else ''a'')"
+| "contOf (F xyax) = F x"
 
-definition "c1  \<equiv> \<lambda> K. case K of x \<Rightarrow> ax   |y \<Rightarrow> undefined |ax \<Rightarrow> undefined"
-definition "c2  \<equiv> \<lambda> K. case K of x \<Rightarrow> y    |y \<Rightarrow> undefined |ax \<Rightarrow> undefined"
-lemmas Choice_defs = c1_def c2_def
-
-definition "F \<equiv> process_unfold isA pr co c1 c2"
 definition "X = F x"  definition "Y = F y"  definition "AX = F ax"
 
 lemma X_Y_AX: "X = Choice AX Y"  "Y = Action ''b'' X"  "AX = Action ''a'' X"
-unfolding X_def Y_def AX_def F_def
-using process.unfold(2)[of isA x "pr" co c1 c2]
-      process.unfold(1)[of isA y "pr" co c1 c2]
-      process.unfold(1)[of isA ax "pr" co c1 c2]
-unfolding Action_defs Choice_defs by simp_all
+unfolding X_def Y_def AX_def by (subst F.code, simp)+
 
 (* end product: *)
 lemma X_AX:
--- a/src/HOL/BNF/Examples/Stream.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Examples/Stream.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -18,7 +18,7 @@
 code_datatype Stream
 
 lemma stream_case_cert:
-  assumes "CASE \<equiv> stream_case c"
+  assumes "CASE \<equiv> case_stream c"
   shows "CASE (a ## s) \<equiv> c a s"
   using assms by simp_all
 
@@ -87,10 +87,10 @@
   by (induct xs) auto
 
 
-subsection {* set of streams with elements in some fixes set *}
+subsection {* set of streams with elements in some fixed set *}
 
 coinductive_set
-  streams :: "'a set => 'a stream set"
+  streams :: "'a set \<Rightarrow> 'a stream set"
   for A :: "'a set"
 where
   Stream[intro!, simp, no_atp]: "\<lbrakk>a \<in> A; s \<in> streams A\<rbrakk> \<Longrightarrow> a ## s \<in> streams A"
@@ -98,6 +98,15 @@
 lemma shift_streams: "\<lbrakk>w \<in> lists A; s \<in> streams A\<rbrakk> \<Longrightarrow> w @- s \<in> streams A"
   by (induct w) auto
 
+lemma streams_Stream: "x ## s \<in> streams A \<longleftrightarrow> x \<in> A \<and> s \<in> streams A"
+  by (auto elim: streams.cases)
+
+lemma streams_stl: "s \<in> streams A \<Longrightarrow> stl s \<in> streams A"
+  by (cases s) (auto simp: streams_Stream)
+
+lemma streams_shd: "s \<in> streams A \<Longrightarrow> shd s \<in> A"
+  by (cases s) (auto simp: streams_Stream)
+
 lemma sset_streams:
   assumes "sset s \<subseteq> A"
   shows "s \<in> streams A"
@@ -105,6 +114,28 @@
   case streams then show ?case by (cases s) simp
 qed
 
+lemma streams_sset:
+  assumes "s \<in> streams A"
+  shows "sset s \<subseteq> A"
+proof
+  fix x assume "x \<in> sset s" from this `s \<in> streams A` show "x \<in> A"
+    by (induct s) (auto intro: streams_shd streams_stl)
+qed
+
+lemma streams_iff_sset: "s \<in> streams A \<longleftrightarrow> sset s \<subseteq> A"
+  by (metis sset_streams streams_sset)
+
+lemma streams_mono:  "s \<in> streams A \<Longrightarrow> A \<subseteq> B \<Longrightarrow> s \<in> streams B"
+  unfolding streams_iff_sset by auto
+
+lemma smap_streams: "s \<in> streams A \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> f x \<in> B) \<Longrightarrow> smap f s \<in> streams B"
+  unfolding streams_iff_sset stream.set_map by auto
+
+lemma streams_empty: "streams {} = {}"
+  by (auto elim: streams.cases)
+
+lemma streams_UNIV[simp]: "streams UNIV = UNIV"
+  by (auto simp: streams_iff_sset)
 
 subsection {* nth, take, drop for streams *}
 
@@ -234,6 +265,9 @@
 lemma stream_all_shift[simp]: "stream_all P (xs @- s) = (list_all P xs \<and> stream_all P s)"
   unfolding stream_all_iff list_all_iff by auto
 
+lemma stream_all_Stream: "stream_all P (x ## X) \<longleftrightarrow> P x \<and> stream_all P X"
+  by simp
+
 
 subsection {* recurring stream out of a list *}
 
@@ -285,59 +319,60 @@
   by (induct n arbitrary: u) (auto simp: rotate1_rotate_swap rotate1_hd_tl rotate_conv_mod[symmetric])
 
 
+subsection {* iterated application of a function *}
+
+primcorec siterate where
+  "shd (siterate f x) = x"
+| "stl (siterate f x) = siterate f (f x)"
+
+lemma stake_Suc: "stake (Suc n) s = stake n s @ [s !! n]"
+  by (induct n arbitrary: s) auto
+
+lemma snth_siterate[simp]: "siterate f x !! n = (f^^n) x"
+  by (induct n arbitrary: x) (auto simp: funpow_swap1)
+
+lemma sdrop_siterate[simp]: "sdrop n (siterate f x) = siterate f ((f^^n) x)"
+  by (induct n arbitrary: x) (auto simp: funpow_swap1)
+
+lemma stake_siterate[simp]: "stake n (siterate f x) = map (\<lambda>n. (f^^n) x) [0 ..< n]"
+  by (induct n arbitrary: x) (auto simp del: stake.simps(2) simp: stake_Suc)
+
+lemma sset_siterate: "sset (siterate f x) = {(f^^n) x | n. True}"
+  by (auto simp: sset_range)
+
+lemma smap_siterate: "smap f (siterate f x) = siterate f (f x)"
+  by (coinduction arbitrary: x) auto
+
+
 subsection {* stream repeating a single element *}
 
-primcorec same where
-  "shd (same x) = x"
-| "stl (same x) = same x"
+abbreviation "sconst \<equiv> siterate id"
 
-lemma snth_same[simp]: "same x !! n = x"
-  unfolding same_def by (induct n) auto
+lemma shift_replicate_sconst[simp]: "replicate n x @- sconst x = sconst x"
+  by (subst (3) stake_sdrop[symmetric]) (simp add: map_replicate_trivial)
 
-lemma stake_same[simp]: "stake n (same x) = replicate n x"
-  unfolding same_def by (induct n) (auto simp: upt_rec)
+lemma stream_all_same[simp]: "sset (sconst x) = {x}"
+  by (simp add: sset_siterate)
 
-lemma sdrop_same[simp]: "sdrop n (same x) = same x"
-  unfolding same_def by (induct n) auto
-
-lemma shift_replicate_same[simp]: "replicate n x @- same x = same x"
-  by (metis sdrop_same stake_same stake_sdrop)
+lemma same_cycle: "sconst x = cycle [x]"
+  by coinduction auto
 
-lemma stream_all_same[simp]: "stream_all P (same x) \<longleftrightarrow> P x"
-  unfolding stream_all_def by auto
+lemma smap_sconst: "smap f (sconst x) = sconst (f x)"
+  by coinduction auto
 
-lemma same_cycle: "same x = cycle [x]"
-  by coinduction auto
+lemma sconst_streams: "x \<in> A \<Longrightarrow> sconst x \<in> streams A"
+  by (simp add: streams_iff_sset)
 
 
 subsection {* stream of natural numbers *}
 
-primcorec fromN :: "nat \<Rightarrow> nat stream" where
-  "fromN n = n ## fromN (n + 1)"
-
-lemma snth_fromN[simp]: "fromN n !! m = n + m"
-  unfolding fromN_def by (induct m arbitrary: n) auto
-
-lemma stake_fromN[simp]: "stake m (fromN n) = [n ..< m + n]"
-  unfolding fromN_def by (induct m arbitrary: n) (auto simp: upt_rec)
-
-lemma sdrop_fromN[simp]: "sdrop m (fromN n) = fromN (n + m)"
-  unfolding fromN_def by (induct m arbitrary: n) auto
-
-lemma sset_fromN[simp]: "sset (fromN n) = {n ..}" (is "?L = ?R")
-proof safe
-  fix m assume "m \<in> ?L"
-  moreover
-  { fix s assume "m \<in> sset s" "\<exists>n'\<ge>n. s = fromN n'"
-    hence "n \<le> m"  by (induct arbitrary: n rule: sset_induct1) fastforce+
-  }
-  ultimately show "n \<le> m" by auto
-next
-  fix m assume "n \<le> m" thus "m \<in> ?L" by (metis le_iff_add snth_fromN snth_sset)
-qed
+abbreviation "fromN \<equiv> siterate Suc"
 
 abbreviation "nats \<equiv> fromN 0"
 
+lemma sset_fromN[simp]: "sset (fromN n) = {n ..}"
+  by (auto simp add: sset_siterate) arith
+
 
 subsection {* flatten a stream of lists *}
 
@@ -498,26 +533,4 @@
   "smap2 f s1 s2 = smap (split f) (szip s1 s2)"
   by (coinduction arbitrary: s1 s2) auto
 
-
-subsection {* iterated application of a function *}
-
-primcorec siterate where
-  "shd (siterate f x) = x"
-| "stl (siterate f x) = siterate f (f x)"
-
-lemma stake_Suc: "stake (Suc n) s = stake n s @ [s !! n]"
-  by (induct n arbitrary: s) auto
-
-lemma snth_siterate[simp]: "siterate f x !! n = (f^^n) x"
-  by (induct n arbitrary: x) (auto simp: funpow_swap1)
-
-lemma sdrop_siterate[simp]: "sdrop n (siterate f x) = siterate f ((f^^n) x)"
-  by (induct n arbitrary: x) (auto simp: funpow_swap1)
-
-lemma stake_siterate[simp]: "stake n (siterate f x) = map (\<lambda>n. (f^^n) x) [0 ..< n]"
-  by (induct n arbitrary: x) (auto simp del: stake.simps(2) simp: stake_Suc)
-
-lemma sset_siterate: "sset (siterate f x) = {(f^^n) x | n. True}"
-  by (auto simp: sset_range)
-
 end
--- a/src/HOL/BNF/More_BNFs.thy	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/More_BNFs.thy	Thu Dec 05 17:58:03 2013 +0100
@@ -15,13 +15,17 @@
   Basic_BNFs
   "~~/src/HOL/Library/FSet"
   "~~/src/HOL/Library/Multiset"
-  Countable_Type
 begin
 
 lemma option_rec_conv_option_case: "option_rec = option_case"
 by (simp add: fun_eq_iff split: option.split)
 
-bnf Option.map [Option.set] "\<lambda>_::'a option. natLeq" ["None"] option_rel
+bnf "'a option"
+  map: Option.map
+  sets: Option.set
+  bd: natLeq 
+  wits: None
+  rel: option_rel
 proof -
   show "Option.map id = id" by (simp add: fun_eq_iff Option.map_def split: option.split)
 next
@@ -94,7 +98,12 @@
     (\<exists>zs \<in> ?A. map p1 zs = as \<and> map p2 zs = bs)" by blast
 qed
 
-bnf map [set] "\<lambda>_::'a list. natLeq" ["[]"]
+bnf "'a list"
+  map: map
+  sets: set
+  bd: natLeq
+  wits: Nil
+  rel: list_all2
 proof -
   show "map id = id" by (rule List.map.id)
 next
@@ -115,8 +124,16 @@
   fix x
   show "|set x| \<le>o natLeq"
     by (metis List.finite_set finite_iff_ordLess_natLeq ordLess_imp_ordLeq)
+next
+  fix R
+  show "list_all2 R =
+         (Grp {x. set x \<subseteq> {(x, y). R x y}} (map fst))\<inverse>\<inverse> OO
+         Grp {x. set x \<subseteq> {(x, y). R x y}} (map snd)"
+    unfolding list_all2_def[abs_def] Grp_def fun_eq_iff relcompp.simps conversep.simps
+    by (force simp: zip_map_fst_snd)
 qed (simp add: wpull_map)+
 
+
 (* Finite sets *)
 
 lemma wpull_image:
@@ -189,7 +206,7 @@
   by (transfer, clarsimp, metis fst_conv)
 qed
 
-lemma wpull_fmap:
+lemma wpull_fimage:
   assumes "wpull A B1 B2 f1 f2 p1 p2"
   shows "wpull {x. fset x \<subseteq> A} {x. fset x \<subseteq> B1} {x. fset x \<subseteq> B2}
               (fimage f1) (fimage f2) (fimage p1) (fimage p2)"
@@ -214,7 +231,12 @@
      using X' Y1 Y2 by (auto simp: X'eq intro!: exI[of _ "x"]) (transfer, blast)+
 qed
 
-bnf fimage [fset] "\<lambda>_::'a fset. natLeq" ["{||}"] fset_rel
+bnf "'a fset"
+  map: fimage
+  sets: fset 
+  bd: natLeq
+  wits: "{||}"
+  rel: fset_rel
 apply -
           apply transfer' apply simp
          apply transfer' apply force
@@ -223,7 +245,7 @@
       apply (rule natLeq_card_order)
      apply (rule natLeq_cinfinite)
     apply transfer apply (metis ordLess_imp_ordLeq finite_iff_ordLess_natLeq)
-  apply (erule wpull_fmap)
+  apply (erule wpull_fimage)
  apply (simp add: Grp_def relcompp.simps conversep.simps fun_eq_iff fset_rel_alt fset_rel_aux) 
 apply transfer apply simp
 done
@@ -235,121 +257,6 @@
 
 lemmas [simp] = fset.map_comp fset.map_id fset.set_map
 
-(* Countable sets *)
-
-lemma card_of_countable_sets_range:
-fixes A :: "'a set"
-shows "|{X. X \<subseteq> A \<and> countable X \<and> X \<noteq> {}}| \<le>o |{f::nat \<Rightarrow> 'a. range f \<subseteq> A}|"
-apply(rule card_of_ordLeqI[of from_nat_into]) using inj_on_from_nat_into
-unfolding inj_on_def by auto
-
-lemma card_of_countable_sets_Func:
-"|{X. X \<subseteq> A \<and> countable X \<and> X \<noteq> {}}| \<le>o |A| ^c natLeq"
-using card_of_countable_sets_range card_of_Func_UNIV[THEN ordIso_symmetric]
-unfolding cexp_def Field_natLeq Field_card_of
-by (rule ordLeq_ordIso_trans)
-
-lemma ordLeq_countable_subsets:
-"|A| \<le>o |{X. X \<subseteq> A \<and> countable X}|"
-apply (rule card_of_ordLeqI[of "\<lambda> a. {a}"]) unfolding inj_on_def by auto
-
-lemma finite_countable_subset:
-"finite {X. X \<subseteq> A \<and> countable X} \<longleftrightarrow> finite A"
-apply default
- apply (erule contrapos_pp)
- apply (rule card_of_ordLeq_infinite)
- apply (rule ordLeq_countable_subsets)
- apply assumption
-apply (rule finite_Collect_conjI)
-apply (rule disjI1)
-by (erule finite_Collect_subsets)
-
-lemma rcset_to_rcset: "countable A \<Longrightarrow> rcset (the_inv rcset A) = A"
-  apply (rule f_the_inv_into_f[unfolded inj_on_def image_iff])
-   apply transfer' apply simp
-  apply transfer' apply simp
-  done
-
-lemma Collect_Int_Times:
-"{(x, y). R x y} \<inter> A \<times> B = {(x, y). R x y \<and> x \<in> A \<and> y \<in> B}"
-by auto
-
-definition cset_rel :: "('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a cset \<Rightarrow> 'b cset \<Rightarrow> bool" where
-"cset_rel R a b \<longleftrightarrow>
- (\<forall>t \<in> rcset a. \<exists>u \<in> rcset b. R t u) \<and>
- (\<forall>t \<in> rcset b. \<exists>u \<in> rcset a. R u t)"
-
-lemma cset_rel_aux:
-"(\<forall>t \<in> rcset a. \<exists>u \<in> rcset b. R t u) \<and> (\<forall>t \<in> rcset b. \<exists>u \<in> rcset a. R u t) \<longleftrightarrow>
- ((Grp {x. rcset x \<subseteq> {(a, b). R a b}} (cimage fst))\<inverse>\<inverse> OO
-          Grp {x. rcset x \<subseteq> {(a, b). R a b}} (cimage snd)) a b" (is "?L = ?R")
-proof
-  assume ?L
-  def R' \<equiv> "the_inv rcset (Collect (split R) \<inter> (rcset a \<times> rcset b))"
-  (is "the_inv rcset ?L'")
-  have L: "countable ?L'" by auto
-  hence *: "rcset R' = ?L'" unfolding R'_def using fset_to_fset by (intro rcset_to_rcset)
-  thus ?R unfolding Grp_def relcompp.simps conversep.simps
-  proof (intro CollectI prod_caseI exI[of _ a] exI[of _ b] exI[of _ R'] conjI refl)
-    from * `?L` show "a = cimage fst R'" by transfer (auto simp: image_def Collect_Int_Times)
-  next
-    from * `?L` show "b = cimage snd R'" by transfer (auto simp: image_def Collect_Int_Times)
-  qed simp_all
-next
-  assume ?R thus ?L unfolding Grp_def relcompp.simps conversep.simps
-    by transfer force
-qed
-
-bnf cimage [rcset] "\<lambda>_::'a cset. natLeq" ["cempty"] cset_rel
-proof -
-  show "cimage id = id" by transfer' simp
-next
-  fix f g show "cimage (g \<circ> f) = cimage g \<circ> cimage f" by transfer' fastforce
-next
-  fix C f g assume eq: "\<And>a. a \<in> rcset C \<Longrightarrow> f a = g a"
-  thus "cimage f C = cimage g C" by transfer force
-next
-  fix f show "rcset \<circ> cimage f = op ` f \<circ> rcset" by transfer' fastforce
-next
-  show "card_order natLeq" by (rule natLeq_card_order)
-next
-  show "cinfinite natLeq" by (rule natLeq_cinfinite)
-next
-  fix C show "|rcset C| \<le>o natLeq" by transfer (unfold countable_card_le_natLeq)
-next
-  fix A B1 B2 f1 f2 p1 p2
-  assume wp: "wpull A B1 B2 f1 f2 p1 p2"
-  show "wpull {x. rcset x \<subseteq> A} {x. rcset x \<subseteq> B1} {x. rcset x \<subseteq> B2}
-              (cimage f1) (cimage f2) (cimage p1) (cimage p2)"
-  unfolding wpull_def proof safe
-    fix y1 y2
-    assume Y1: "rcset y1 \<subseteq> B1" and Y2: "rcset y2 \<subseteq> B2"
-    assume "cimage f1 y1 = cimage f2 y2"
-    hence EQ: "f1 ` (rcset y1) = f2 ` (rcset y2)" by transfer
-    with Y1 Y2 obtain X where X: "X \<subseteq> A"
-    and Y1: "p1 ` X = rcset y1" and Y2: "p2 ` X = rcset y2"
-    using wpull_image[OF wp] unfolding wpull_def Pow_def Bex_def mem_Collect_eq
-      by (auto elim!: allE[of _ "rcset y1"] allE[of _ "rcset y2"])
-    have "\<forall> y1' \<in> rcset y1. \<exists> x. x \<in> X \<and> y1' = p1 x" using Y1 by auto
-    then obtain q1 where q1: "\<forall> y1' \<in> rcset y1. q1 y1' \<in> X \<and> y1' = p1 (q1 y1')" by metis
-    have "\<forall> y2' \<in> rcset y2. \<exists> x. x \<in> X \<and> y2' = p2 x" using Y2 by auto
-    then obtain q2 where q2: "\<forall> y2' \<in> rcset y2. q2 y2' \<in> X \<and> y2' = p2 (q2 y2')" by metis
-    def X' \<equiv> "q1 ` (rcset y1) \<union> q2 ` (rcset y2)"
-    have X': "X' \<subseteq> A" and Y1: "p1 ` X' = rcset y1" and Y2: "p2 ` X' = rcset y2"
-    using X Y1 Y2 q1 q2 unfolding X'_def by fast+
-    have fX': "countable X'" unfolding X'_def by simp
-    then obtain x where X'eq: "X' = rcset x" by transfer blast
-    show "\<exists>x\<in>{x. rcset x \<subseteq> A}. cimage p1 x = y1 \<and> cimage p2 x = y2"
-      using X' Y1 Y2 unfolding X'eq by (intro bexI[of _ "x"]) (transfer, auto)
-  qed
-next
-  fix R
-  show "cset_rel R =
-        (Grp {x. rcset x \<subseteq> Collect (split R)} (cimage fst))\<inverse>\<inverse> OO
-         Grp {x. rcset x \<subseteq> Collect (split R)} (cimage snd)"
-  unfolding cset_rel_def[abs_def] cset_rel_aux by simp
-qed (transfer, simp)
-
 
 (* Multisets *)
 
@@ -874,22 +781,26 @@
   by transfer
     (auto intro!: ordLess_imp_ordLeq simp: finite_iff_ordLess_natLeq[symmetric] multiset_def)
 
-bnf mmap [set_of] "\<lambda>_::'a multiset. natLeq" ["{#}"]
+bnf "'a multiset"
+  map: mmap
+  sets: set_of 
+  bd: natLeq
+  wits: "{#}"
 by (auto simp add: mmap_id0 mmap_comp set_of_mmap natLeq_card_order natLeq_cinfinite set_of_bd
   intro: mmap_cong wpull_mmap)
 
-inductive multiset_rel' where
-Zero: "multiset_rel' R {#} {#}"
+inductive rel_multiset' where
+Zero: "rel_multiset' R {#} {#}"
 |
-Plus: "\<lbrakk>R a b; multiset_rel' R M N\<rbrakk> \<Longrightarrow> multiset_rel' R (M + {#a#}) (N + {#b#})"
+Plus: "\<lbrakk>R a b; rel_multiset' R M N\<rbrakk> \<Longrightarrow> rel_multiset' R (M + {#a#}) (N + {#b#})"
 
-lemma multiset_map_Zero_iff[simp]: "mmap f M = {#} \<longleftrightarrow> M = {#}"
+lemma map_multiset_Zero_iff[simp]: "mmap f M = {#} \<longleftrightarrow> M = {#}"
 by (metis image_is_empty multiset.set_map set_of_eq_empty_iff)
 
-lemma multiset_map_Zero[simp]: "mmap f {#} = {#}" by simp
+lemma map_multiset_Zero[simp]: "mmap f {#} = {#}" by simp
 
-lemma multiset_rel_Zero: "multiset_rel R {#} {#}"
-unfolding multiset_rel_def Grp_def by auto
+lemma rel_multiset_Zero: "rel_multiset R {#} {#}"
+unfolding rel_multiset_def Grp_def by auto
 
 declare multiset.count[simp]
 declare Abs_multiset_inverse[simp]
@@ -897,7 +808,7 @@
 declare union_preserves_multiset[simp]
 
 
-lemma multiset_map_Plus[simp]: "mmap f (M1 + M2) = mmap f M1 + mmap f M2"
+lemma map_multiset_Plus[simp]: "mmap f (M1 + M2) = mmap f M1 + mmap f M2"
 proof (intro multiset_eqI, transfer fixing: f)
   fix x :: 'a and M1 M2 :: "'b \<Rightarrow> nat"
   assume "M1 \<in> multiset" "M2 \<in> multiset"
@@ -910,12 +821,12 @@
     by (auto simp: setsum.distrib[symmetric])
 qed
 
-lemma multiset_map_singl[simp]: "mmap f {#a#} = {#f a#}"
+lemma map_multiset_singl[simp]: "mmap f {#a#} = {#f a#}"
   by transfer auto
 
-lemma multiset_rel_Plus:
-assumes ab: "R a b" and MN: "multiset_rel R M N"
-shows "multiset_rel R (M + {#a#}) (N + {#b#})"
+lemma rel_multiset_Plus:
+assumes ab: "R a b" and MN: "rel_multiset R M N"
+shows "rel_multiset R (M + {#a#}) (N + {#b#})"
 proof-
   {fix y assume "R a b" and "set_of y \<subseteq> {(x, y). R x y}"
    hence "\<exists>ya. mmap fst y + {#a#} = mmap fst ya \<and>
@@ -925,13 +836,13 @@
   }
   thus ?thesis
   using assms
-  unfolding multiset_rel_def Grp_def by force
+  unfolding rel_multiset_def Grp_def by force
 qed
 
-lemma multiset_rel'_imp_multiset_rel:
-"multiset_rel' R M N \<Longrightarrow> multiset_rel R M N"
-apply(induct rule: multiset_rel'.induct)
-using multiset_rel_Zero multiset_rel_Plus by auto
+lemma rel_multiset'_imp_rel_multiset:
+"rel_multiset' R M N \<Longrightarrow> rel_multiset R M N"
+apply(induct rule: rel_multiset'.induct)
+using rel_multiset_Zero rel_multiset_Plus by auto
 
 lemma mcard_mmap[simp]: "mcard (mmap f M) = mcard M"
 proof -
@@ -942,8 +853,7 @@
   using finite_Collect_mem .
   ultimately have fin: "finite {b. \<exists>a. f a = b \<and> a \<in># M}" by(rule finite_subset)
   have i: "inj_on A ?B" unfolding inj_on_def A_def apply clarsimp
-  by (metis (lifting, mono_tags) mem_Collect_eq rel_simps(54)
-                                 setsum_gt_0_iff setsum_infinite)
+    by (metis (lifting, full_types) mem_Collect_eq neq0_conv setsum.neutral)
   have 0: "\<And> b. 0 < setsum (count M) (A b) \<longleftrightarrow> (\<exists> a \<in> A b. count M a > 0)"
   apply safe
     apply (metis less_not_refl setsum_gt_0_iff setsum_infinite)
@@ -964,10 +874,10 @@
   then show ?thesis unfolding mcard_unfold_setsum A_def by transfer
 qed
 
-lemma multiset_rel_mcard:
-assumes "multiset_rel R M N"
+lemma rel_multiset_mcard:
+assumes "rel_multiset R M N"
 shows "mcard M = mcard N"
-using assms unfolding multiset_rel_def Grp_def by auto
+using assms unfolding rel_multiset_def Grp_def by auto
 
 lemma multiset_induct2[case_names empty addL addR]:
 assumes empty: "P {#} {#}"
@@ -1022,68 +932,67 @@
 qed
 
 lemma msed_rel_invL:
-assumes "multiset_rel R (M + {#a#}) N"
-shows "\<exists> N1 b. N = N1 + {#b#} \<and> R a b \<and> multiset_rel R M N1"
+assumes "rel_multiset R (M + {#a#}) N"
+shows "\<exists> N1 b. N = N1 + {#b#} \<and> R a b \<and> rel_multiset R M N1"
 proof-
   obtain K where KM: "mmap fst K = M + {#a#}"
   and KN: "mmap snd K = N" and sK: "set_of K \<subseteq> {(a, b). R a b}"
   using assms
-  unfolding multiset_rel_def Grp_def by auto
+  unfolding rel_multiset_def Grp_def by auto
   obtain K1 ab where K: "K = K1 + {#ab#}" and a: "fst ab = a"
   and K1M: "mmap fst K1 = M" using msed_map_invR[OF KM] by auto
   obtain N1 where N: "N = N1 + {#snd ab#}" and K1N1: "mmap snd K1 = N1"
   using msed_map_invL[OF KN[unfolded K]] by auto
   have Rab: "R a (snd ab)" using sK a unfolding K by auto
-  have "multiset_rel R M N1" using sK K1M K1N1
-  unfolding K multiset_rel_def Grp_def by auto
+  have "rel_multiset R M N1" using sK K1M K1N1
+  unfolding K rel_multiset_def Grp_def by auto
   thus ?thesis using N Rab by auto
 qed
 
 lemma msed_rel_invR:
-assumes "multiset_rel R M (N + {#b#})"
-shows "\<exists> M1 a. M = M1 + {#a#} \<and> R a b \<and> multiset_rel R M1 N"
+assumes "rel_multiset R M (N + {#b#})"
+shows "\<exists> M1 a. M = M1 + {#a#} \<and> R a b \<and> rel_multiset R M1 N"
 proof-
   obtain K where KN: "mmap snd K = N + {#b#}"
   and KM: "mmap fst K = M" and sK: "set_of K \<subseteq> {(a, b). R a b}"
   using assms
-  unfolding multiset_rel_def Grp_def by auto
+  unfolding rel_multiset_def Grp_def by auto
   obtain K1 ab where K: "K = K1 + {#ab#}" and b: "snd ab = b"
   and K1N: "mmap snd K1 = N" using msed_map_invR[OF KN] by auto
   obtain M1 where M: "M = M1 + {#fst ab#}" and K1M1: "mmap fst K1 = M1"
   using msed_map_invL[OF KM[unfolded K]] by auto
   have Rab: "R (fst ab) b" using sK b unfolding K by auto
-  have "multiset_rel R M1 N" using sK K1N K1M1
-  unfolding K multiset_rel_def Grp_def by auto
+  have "rel_multiset R M1 N" using sK K1N K1M1
+  unfolding K rel_multiset_def Grp_def by auto
   thus ?thesis using M Rab by auto
 qed
 
-lemma multiset_rel_imp_multiset_rel':
-assumes "multiset_rel R M N"
-shows "multiset_rel' R M N"
+lemma rel_multiset_imp_rel_multiset':
+assumes "rel_multiset R M N"
+shows "rel_multiset' R M N"
 using assms proof(induct M arbitrary: N rule: measure_induct_rule[of mcard])
   case (less M)
-  have c: "mcard M = mcard N" using multiset_rel_mcard[OF less.prems] .
+  have c: "mcard M = mcard N" using rel_multiset_mcard[OF less.prems] .
   show ?case
   proof(cases "M = {#}")
     case True hence "N = {#}" using c by simp
-    thus ?thesis using True multiset_rel'.Zero by auto
+    thus ?thesis using True rel_multiset'.Zero by auto
   next
     case False then obtain M1 a where M: "M = M1 + {#a#}" by (metis multi_nonempty_split)
-    obtain N1 b where N: "N = N1 + {#b#}" and R: "R a b" and ms: "multiset_rel R M1 N1"
+    obtain N1 b where N: "N = N1 + {#b#}" and R: "R a b" and ms: "rel_multiset R M1 N1"
     using msed_rel_invL[OF less.prems[unfolded M]] by auto
-    have "multiset_rel' R M1 N1" using less.hyps[of M1 N1] ms unfolding M by simp
-    thus ?thesis using multiset_rel'.Plus[of R a b, OF R] unfolding M N by simp
+    have "rel_multiset' R M1 N1" using less.hyps[of M1 N1] ms unfolding M by simp
+    thus ?thesis using rel_multiset'.Plus[of R a b, OF R] unfolding M N by simp
   qed
 qed
 
-lemma multiset_rel_multiset_rel':
-"multiset_rel R M N = multiset_rel' R M N"
-using  multiset_rel_imp_multiset_rel' multiset_rel'_imp_multiset_rel by auto
+lemma rel_multiset_rel_multiset':
+"rel_multiset R M N = rel_multiset' R M N"
+using  rel_multiset_imp_rel_multiset' rel_multiset'_imp_rel_multiset by auto
 
-(* The main end product for multiset_rel: inductive characterization *)
-theorems multiset_rel_induct[case_names empty add, induct pred: multiset_rel] =
-         multiset_rel'.induct[unfolded multiset_rel_multiset_rel'[symmetric]]
-
+(* The main end product for rel_multiset: inductive characterization *)
+theorems rel_multiset_induct[case_names empty add, induct pred: rel_multiset] =
+         rel_multiset'.induct[unfolded rel_multiset_rel_multiset'[symmetric]]
 
 
 (* Advanced relator customization *)
@@ -1153,5 +1062,4 @@
   qed
 qed
 
-
 end
--- a/src/HOL/BNF/README.html	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/README.html	Thu Dec 05 17:58:03 2013 +0100
@@ -20,7 +20,8 @@
 possibly infinite depth. The framework draws heavily from category theory.
 
 <p>
-The package is described in the following paper:
+The package is described in <tt>isabelle doc datatypes</tt> and in the following
+paper:
 
 <ul>
   <li><a href="http://www21.in.tum.de/~traytel/papers/lics12-codatatypes/index.html">Foundational, Compositional (Co)datatypes for Higher-Order Logic&mdash;Category Theory Applied to Theorem Proving</a>, <br>
@@ -37,17 +38,10 @@
 The key notion underlying the package is that of a <i>bounded natural functor</i>
 (<i>BNF</i>)&mdash;an enriched type constructor satisfying specific properties
 preserved by interesting categorical operations (composition, least fixed point,
-and greatest fixed point). The <tt>Basic_BNFs.thy</tt> and <tt>More_BNFs.thy</tt>
-files register various basic types, notably for sums, products, function spaces,
-finite sets, multisets, and countable sets. Custom BNFs can be registered as well.
-
-<p>
-<b>Warning:</b> The package is under development. Please contact any nonempty
-subset of
-<a href="mailto:traytel@in.tum.de">the</a>
-<a href="mailto:popescua@in.tum.de">above</a>
-<a href="mailto:blanchette@in.tum.de">authors</a>
-if you have questions or comments.
+and greatest fixed point). The <tt>Basic_BNFs.thy</tt>, <tt>More_BNFs.thy</tt>,
+and <tt>Countable_Set_Type.thy</tt> files register various basic types, notably
+for sums, products, function spaces, finite sets, multisets, and countable sets.
+Custom BNFs can be registered as well.
 
 </body>
 
--- a/src/HOL/BNF/Tools/bnf_comp.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_comp.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -147,7 +147,7 @@
     val (sets, sets_alt) = map_split mk_set (0 upto ilive - 1);
 
     (*(inner_1.bd +c ... +c inner_m.bd) *c outer.bd*)
-    val bd = Term.absdummy CCA (mk_cprod (Library.foldr1 (uncurry mk_csum) inner_bds) outer_bd);
+    val bd = mk_cprod (Library.foldr1 (uncurry mk_csum) inner_bds) outer_bd;
 
     fun map_id0_tac _ =
       mk_comp_map_id0_tac (map_id0_of_bnf outer) (map_cong0_of_bnf outer)
@@ -257,7 +257,7 @@
 
     val (bnf', lthy') =
       bnf_def const_policy (K Dont_Note) qualify tacs wit_tac (SOME (oDs @ flat Dss)) Binding.empty
-        Binding.empty [] (((((b, mapx), sets), bd), wits), SOME rel) lthy;
+        Binding.empty [] ((((((b, CCA), mapx), sets), bd), wits), SOME rel) lthy;
   in
     (bnf', (add_bnf_to_unfolds bnf' unfold_set, lthy'))
   end;
@@ -351,7 +351,7 @@
 
     val (bnf', lthy') =
       bnf_def Smart_Inline (K Dont_Note) qualify tacs wit_tac (SOME (killedAs @ Ds)) Binding.empty
-        Binding.empty [] (((((b, mapx), sets), Term.absdummy T bd), wits), SOME rel) lthy;
+        Binding.empty [] ((((((b, T), mapx), sets), bd), wits), SOME rel) lthy;
   in
     (bnf', (add_bnf_to_unfolds bnf' unfold_set, lthy'))
   end;
@@ -433,7 +433,7 @@
 
     val (bnf', lthy') =
       bnf_def Smart_Inline (K Dont_Note) qualify tacs wit_tac (SOME Ds) Binding.empty Binding.empty
-        [] (((((b, mapx), sets), Term.absdummy T bd), wits), SOME rel) lthy;
+        [] ((((((b, T), mapx), sets), bd), wits), SOME rel) lthy;
   in
     (bnf', (add_bnf_to_unfolds bnf' unfold_set, lthy'))
   end;
@@ -506,7 +506,7 @@
 
     val (bnf', lthy') =
       bnf_def Smart_Inline (K Dont_Note) qualify tacs wit_tac (SOME Ds) Binding.empty Binding.empty
-        [] (((((b, mapx), sets), Term.absdummy T bd), wits), SOME rel) lthy;
+        [] ((((((b, T), mapx), sets), bd), wits), SOME rel) lthy;
   in
     (bnf', (add_bnf_to_unfolds bnf' unfold_set, lthy'))
   end;
@@ -643,7 +643,7 @@
     val (bnf', lthy') =
       bnf_def Hardly_Inline (user_policy Dont_Note) qualify tacs wit_tac (SOME deads)
         Binding.empty Binding.empty []
-        (((((b, bnf_map), bnf_sets), Term.absdummy T bnf_bd'), bnf_wits), SOME bnf_rel) lthy;
+        ((((((b, T), bnf_map), bnf_sets), bnf_bd'), bnf_wits), SOME bnf_rel) lthy;
   in
     ((bnf', deads), lthy')
   end;
--- a/src/HOL/BNF/Tools/bnf_comp_tactics.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_comp_tactics.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -164,10 +164,9 @@
 fun mk_comp_wit_tac ctxt Gwit_thms collect_set_map Fwit_thms =
   ALLGOALS (dtac @{thm in_Union_o_assoc}) THEN
   unfold_thms_tac ctxt (collect_set_map :: comp_wit_thms) THEN
-  REPEAT_DETERM (
-    atac 1 ORELSE
-    REPEAT_DETERM (eresolve_tac @{thms UnionE UnE imageE} 1) THEN
-    (TRY o dresolve_tac Gwit_thms THEN'
+  REPEAT_DETERM ((atac ORELSE'
+    REPEAT_DETERM o eresolve_tac @{thms UnionE UnE} THEN'
+    etac imageE THEN' TRY o dresolve_tac Gwit_thms THEN'
     (etac FalseE ORELSE'
     hyp_subst_tac ctxt THEN'
     dresolve_tac Fwit_thms THEN'
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/BNF/Tools/bnf_decl.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,96 @@
+(*  Title:      HOL/BNF/Tools/bnf_decl.ML
+    Author:     Dmitriy Traytel, TU Muenchen
+    Copyright   2013
+
+Axiomatic declaration of bounded natural functors.
+*)
+
+signature BNF_DECL =
+sig
+  val bnf_decl: (binding option * (typ * sort)) list -> binding -> mixfix -> binding -> binding ->
+    local_theory -> BNF_Def.bnf * local_theory
+end
+
+structure BNF_Decl : BNF_DECL =
+struct
+
+open BNF_Util
+open BNF_Def
+
+fun prepare_decl prepare_constraint prepare_typ raw_vars b mx user_mapb user_relb lthy =
+  let
+   fun prepare_type_arg (set_opt, (ty, c)) =
+      let val s = fst (dest_TFree (prepare_typ lthy ty)) in
+        (set_opt, (s, prepare_constraint lthy c))
+      end;
+    val ((user_setbs, vars), raw_vars') =
+      map prepare_type_arg raw_vars
+      |> `split_list
+      |>> apfst (map_filter I);
+    val deads = map_filter (fn (NONE, x) => SOME x | _ => NONE) raw_vars';
+
+    fun mk_b name user_b =
+      (if Binding.is_empty user_b then Binding.prefix_name (name ^ "_") b else user_b)
+      |> Binding.qualify false (Binding.name_of b);
+    val (Tname, lthy) = Typedecl.basic_typedecl (b, length vars, mx) lthy;
+    val (bd_type_Tname, lthy) =
+      Typedecl.basic_typedecl (mk_b "bd_type" Binding.empty, length deads, NoSyn) lthy;
+    val T = Type (Tname, map TFree vars);
+    val bd_type_T = Type (bd_type_Tname, map TFree deads);
+    val lives = map TFree (filter_out (member (op =) deads) vars);
+    val live = length lives;
+    val _ = "Trying to declare a BNF with no live variables" |> null lives ? error;
+    val (lives', _) = BNF_Util.mk_TFrees (length lives)
+      (fold Variable.declare_typ (map TFree vars) lthy);
+    val T' = Term.typ_subst_atomic (lives ~~ lives') T;
+    val mapT = map2 (curry op -->) lives lives' ---> T --> T';
+    val setTs = map (fn U => T --> HOLogic.mk_setT U) lives;
+    val bdT = BNF_Util.mk_relT (bd_type_T, bd_type_T);
+    val mapb = mk_b BNF_Def.mapN user_mapb;
+    val bdb = mk_b "bd" Binding.empty;
+    val setbs = map2 (fn b => fn i => mk_b (BNF_Def.mk_setN i) b) user_setbs
+      (if live = 1 then [0] else 1 upto live);
+    val lthy = Local_Theory.background_theory
+      (Sign.add_consts_i ((mapb, mapT, NoSyn) :: (bdb, bdT, NoSyn) ::
+        map2 (fn b => fn T => (b, T, NoSyn)) setbs setTs))
+      lthy;
+    val Fmap = Const (Local_Theory.full_name lthy mapb, mapT);
+    val Fsets = map2 (fn setb => fn setT =>
+      Const (Local_Theory.full_name lthy setb, setT)) setbs setTs;
+    val Fbd = Const (Local_Theory.full_name lthy bdb, bdT);
+    val (key, goals, (triv_tac_opt, wit_goalss), after_qed, lthy, _) =
+      prepare_def Do_Inline (user_policy Note_Some) I (K I) (K I) (SOME (map TFree deads))
+      user_mapb user_relb user_setbs ((((((Binding.empty, T), Fmap), Fsets), Fbd), []), NONE) lthy;
+
+    fun mk_wits_tac set_maps = K (TRYALL Goal.conjunction_tac) THEN' the triv_tac_opt set_maps;
+    val wit_goals = map Logic.mk_conjunction_balanced wit_goalss;
+    fun mk_wit_thms set_maps =
+      Goal.prove_sorry lthy [] [] (Logic.mk_conjunction_balanced wit_goals) (mk_wits_tac set_maps)
+        |> Conjunction.elim_balanced (length wit_goals)
+        |> map2 (Conjunction.elim_balanced o length) wit_goalss
+        |> map (map (Thm.close_derivation o Thm.forall_elim_vars 0));
+
+    val ((_, [thms]), (lthy_old, lthy)) = Local_Theory.background_theory_result
+      (Specification.axiomatization [] [((mk_b "axioms" Binding.empty, []), goals)]) lthy
+      ||> `Local_Theory.restore;
+    val phi = Proof_Context.export_morphism lthy_old lthy;
+  in
+    BNF_Def.register_bnf key (after_qed mk_wit_thms (map single  (Morphism.fact phi thms)) lthy)
+  end;
+
+val bnf_decl = prepare_decl (K I) (K I);
+
+fun read_constraint _ NONE = HOLogic.typeS
+  | read_constraint ctxt (SOME s) = Syntax.read_sort ctxt s;
+
+val bnf_decl_cmd = prepare_decl read_constraint Syntax.parse_typ;
+
+val parse_bnf_decl =
+  parse_type_args_named_constrained -- parse_binding -- parse_map_rel_bindings -- Parse.opt_mixfix;
+
+val _ =
+  Outer_Syntax.local_theory @{command_spec "bnf_decl"} "bnf declaration"
+    (parse_bnf_decl >> 
+      (fn (((bsTs, b), (mapb, relb)), mx) => bnf_decl_cmd bsTs b mx mapb relb #> snd));
+
+end;
--- a/src/HOL/BNF/Tools/bnf_def.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_def.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -77,14 +77,20 @@
   val wit_thms_of_bnf: bnf -> thm list
   val wit_thmss_of_bnf: bnf -> thm list list
 
+  val mk_map: int -> typ list -> typ list -> term -> term
+  val mk_rel: int -> typ list -> typ list -> term -> term
+  val build_map: Proof.context -> (typ * typ -> term) -> typ * typ -> term
+  val build_rel: Proof.context -> (typ * typ -> term) -> typ * typ -> term
+  val flatten_type_args_of_bnf: bnf -> 'a -> 'a list -> 'a list
+  val map_flattened_map_args: Proof.context -> string -> (term list -> 'a list) -> term list ->
+    'a list
+
   val mk_witness: int list * term -> thm list -> nonemptiness_witness
   val minimize_wits: (''a list * 'b) list -> (''a list * 'b) list
   val wits_of_bnf: bnf -> nonemptiness_witness list
 
   val zip_axioms: 'a -> 'a -> 'a -> 'a list -> 'a -> 'a -> 'a list -> 'a -> 'a -> 'a list
 
-  val flatten_type_args_of_bnf: bnf -> 'a -> 'a list -> 'a list
-
   datatype const_policy = Dont_Inline | Hardly_Inline | Smart_Inline | Do_Inline
   datatype fact_policy = Dont_Note | Note_Some | Note_All
 
@@ -95,11 +101,20 @@
     Proof.context
 
   val print_bnfs: Proof.context -> unit
+  val prepare_def: const_policy -> (Proof.context -> fact_policy) -> (binding -> binding) ->
+    (Proof.context -> 'a -> typ) -> (Proof.context -> 'b -> term) -> typ list option ->
+    binding -> binding -> binding list ->
+    (((((binding * 'a) * 'b) * 'b list) * 'b) * 'b list) * 'b option -> Proof.context ->
+    string * term list *
+    ((thm list -> {context: Proof.context, prems: thm list} -> tactic) option * term list list) *
+    ((thm list -> thm list list) -> thm list list -> Proof.context -> bnf * local_theory) *
+    local_theory * thm list
+
   val bnf_def: const_policy -> (Proof.context -> fact_policy) -> (binding -> binding) ->
     ({prems: thm list, context: Proof.context} -> tactic) list ->
     ({prems: thm list, context: Proof.context} -> tactic) -> typ list option -> binding ->
     binding -> binding list ->
-    ((((binding * term) * term list) * term) * term list) * term option ->
+    (((((binding * typ) * term) * term list) * term) * term list) * term option ->
     local_theory -> bnf * local_theory
 end;
 
@@ -110,7 +125,7 @@
 open BNF_Tactics
 open BNF_Def_Tactics
 
-val fundef_cong_attrs = @{attributes [fundef_cong]};
+val fundefcong_attrs = @{attributes [fundef_cong]};
 
 type axioms = {
   map_id0: thm,
@@ -447,7 +462,6 @@
   #> Option.map (morph_bnf (Morphism.thm_morphism (Thm.transfer (Proof_Context.theory_of ctxt))));
 
 
-
 (* Utilities *)
 
 fun normalize_set insts instA set =
@@ -487,6 +501,46 @@
        else minimize ((I, wit) :: done) todo;
  in minimize [] wits end;
 
+fun mk_map live Ts Us t =
+  let val (Type (_, Ts0), Type (_, Us0)) = strip_typeN (live + 1) (fastype_of t) |>> List.last in
+    Term.subst_atomic_types (Ts0 @ Us0 ~~ Ts @ Us) t
+  end;
+
+fun mk_rel live Ts Us t =
+  let val [Type (_, Ts0), Type (_, Us0)] = binder_types (snd (strip_typeN live (fastype_of t))) in
+    Term.subst_atomic_types (Ts0 @ Us0 ~~ Ts @ Us) t
+  end;
+
+fun build_map_or_rel mk const of_bnf dest ctxt build_simple =
+  let
+    fun build (TU as (T, U)) =
+      if T = U then
+        const T
+      else
+        (case TU of
+          (Type (s, Ts), Type (s', Us)) =>
+          if s = s' then
+            let
+              val bnf = the (bnf_of ctxt s);
+              val live = live_of_bnf bnf;
+              val mapx = mk live Ts Us (of_bnf bnf);
+              val TUs' = map dest (fst (strip_typeN live (fastype_of mapx)));
+            in Term.list_comb (mapx, map build TUs') end
+          else
+            build_simple TU
+        | _ => build_simple TU);
+  in build end;
+
+val build_map = build_map_or_rel mk_map HOLogic.id_const map_of_bnf dest_funT;
+val build_rel = build_map_or_rel mk_rel HOLogic.eq_const rel_of_bnf dest_pred2T;
+
+fun map_flattened_map_args ctxt s map_args fs =
+  let
+    val flat_fs = flatten_type_args_of_bnf (the (bnf_of ctxt s)) Term.dummy fs;
+    val flat_fs' = map_args flat_fs;
+  in
+    permute_like (op aconv) flat_fs fs flat_fs'
+  end;
 
 
 (* Names *)
@@ -525,8 +579,8 @@
 val rel_conversepN = "rel_conversep";
 val rel_monoN = "rel_mono"
 val rel_mono_strongN = "rel_mono_strong"
-val rel_OON = "rel_compp";
-val rel_OO_GrpN = "rel_compp_Grp";
+val rel_comppN = "rel_compp";
+val rel_compp_GrpN = "rel_compp_Grp";
 
 datatype const_policy = Dont_Inline | Hardly_Inline | Smart_Inline | Do_Inline;
 
@@ -582,16 +636,16 @@
           val notes =
             [(map_compN, [Lazy.force (#map_comp facts)], []),
             (map_cong0N, [#map_cong0 axioms], []),
-            (map_congN, [Lazy.force (#map_cong facts)], fundef_cong_attrs),
+            (map_congN, [Lazy.force (#map_cong facts)], fundefcong_attrs),
             (map_idN, [Lazy.force (#map_id facts)], []),
+            (rel_comppN, [Lazy.force (#rel_OO facts)], []),
+            (rel_compp_GrpN, no_refl [#rel_OO_Grp axioms], []),
+            (rel_conversepN, [Lazy.force (#rel_conversep facts)], []),
             (rel_eqN, [Lazy.force (#rel_eq facts)], []),
             (rel_flipN, [Lazy.force (#rel_flip facts)], []),
-            (set_mapN, map Lazy.force (#set_map facts), []),
-            (rel_OO_GrpN, no_refl [#rel_OO_Grp axioms], []),
             (rel_GrpN, [Lazy.force (#rel_Grp facts)], []),
-            (rel_conversepN, [Lazy.force (#rel_conversep facts)], []),
             (rel_monoN, [Lazy.force (#rel_mono facts)], []),
-            (rel_OON, [Lazy.force (#rel_OO facts)], [])]
+            (set_mapN, map Lazy.force (#set_map facts), [])]
             |> filter_out (null o #2)
             |> map (fn (thmN, thms, attrs) =>
               ((qualify (Binding.qualify true (Binding.name_of bnf_b) (Binding.name thmN)),
@@ -606,20 +660,18 @@
 
 (* Define new BNFs *)
 
-fun prepare_def const_policy mk_fact_policy qualify prep_term Ds_opt map_b rel_b set_bs
-  (((((raw_bnf_b, raw_map), raw_sets), raw_bd_Abs), raw_wits), raw_rel_opt) no_defs_lthy =
+fun prepare_def const_policy mk_fact_policy qualify prep_typ prep_term Ds_opt map_b rel_b set_bs
+  ((((((raw_bnf_b, raw_bnf_T), raw_map), raw_sets), raw_bd), raw_wits), raw_rel_opt)
+  no_defs_lthy =
   let
     val fact_policy = mk_fact_policy no_defs_lthy;
     val bnf_b = qualify raw_bnf_b;
     val live = length raw_sets;
-    val nwits = length raw_wits;
 
+    val T_rhs = prep_typ no_defs_lthy raw_bnf_T;
     val map_rhs = prep_term no_defs_lthy raw_map;
     val set_rhss = map (prep_term no_defs_lthy) raw_sets;
-    val (bd_rhsT, bd_rhs) = (case prep_term no_defs_lthy raw_bd_Abs of
-      Abs (_, T, t) => (T, t)
-    | _ => error "Bad bound constant");
-    val wit_rhss = map (prep_term no_defs_lthy) raw_wits;
+    val bd_rhs = prep_term no_defs_lthy raw_bd;
 
     fun err T =
       error ("Trying to register the type " ^ quote (Syntax.string_of_typ no_defs_lthy T) ^
@@ -627,15 +679,15 @@
 
     val (bnf_b, key) =
       if Binding.eq_name (bnf_b, Binding.empty) then
-        (case bd_rhsT of
+        (case T_rhs of
           Type (C, Ts) => if forall (can dest_TFree) Ts
-            then (Binding.qualified_name C, C) else err bd_rhsT
+            then (Binding.qualified_name C, C) else err T_rhs
         | T => err T)
       else (bnf_b, Local_Theory.full_name no_defs_lthy bnf_b);
 
-    val def_qualify = Binding.qualify false (Binding.name_of bnf_b);
+    val def_qualify = Binding.conceal o Binding.qualify false (Binding.name_of bnf_b);
 
-    fun mk_suffix_binding suf = Binding.suffix_name ("_" ^ suf) bnf_b;
+    fun mk_prefix_binding pre = Binding.prefix_name (pre ^ "_") bnf_b;
 
     fun maybe_define user_specified (b, rhs) lthy =
       let
@@ -660,7 +712,7 @@
       lthy |> not (pointer_eq (lthy_old, lthy)) ? Local_Theory.restore;
 
     val map_bind_def =
-      (fn () => def_qualify (if Binding.is_empty map_b then mk_suffix_binding mapN else map_b),
+      (fn () => def_qualify (if Binding.is_empty map_b then mk_prefix_binding mapN else map_b),
          map_rhs);
     val set_binds_defs =
       let
@@ -668,25 +720,18 @@
           (case try (nth set_bs) (i - 1) of
             SOME b => if Binding.is_empty b then get_b else K b
           | NONE => get_b) #> def_qualify;
-        val bs = if live = 1 then [set_name 1 (fn () => mk_suffix_binding setN)]
-          else map (fn i => set_name i (fn () => mk_suffix_binding (mk_setN i))) (1 upto live);
+        val bs = if live = 1 then [set_name 1 (fn () => mk_prefix_binding setN)]
+          else map (fn i => set_name i (fn () => mk_prefix_binding (mk_setN i))) (1 upto live);
       in bs ~~ set_rhss end;
-    val bd_bind_def = (fn () => def_qualify (mk_suffix_binding bdN), bd_rhs);
-    val wit_binds_defs =
-      let
-        val bs = if nwits = 1 then [fn () => def_qualify (mk_suffix_binding witN)]
-          else map (fn i => fn () => def_qualify (mk_suffix_binding (mk_witN i))) (1 upto nwits);
-      in bs ~~ wit_rhss end;
+    val bd_bind_def = (fn () => def_qualify (mk_prefix_binding bdN), bd_rhs);
 
-    val (((((bnf_map_term, raw_map_def),
+    val ((((bnf_map_term, raw_map_def),
       (bnf_set_terms, raw_set_defs)),
-      (bnf_bd_term, raw_bd_def)),
-      (bnf_wit_terms, raw_wit_defs)), (lthy, lthy_old)) =
+      (bnf_bd_term, raw_bd_def)), (lthy, lthy_old)) =
         no_defs_lthy
         |> maybe_define true map_bind_def
         ||>> apfst split_list o fold_map (maybe_define true) set_binds_defs
         ||>> maybe_define true bd_bind_def
-        ||>> apfst split_list o fold_map (maybe_define true) wit_binds_defs
         ||> `(maybe_restore no_defs_lthy);
 
     val phi = Proof_Context.export_morphism lthy_old lthy;
@@ -694,7 +739,6 @@
     val bnf_map_def = Morphism.thm phi raw_map_def;
     val bnf_set_defs = map (Morphism.thm phi) raw_set_defs;
     val bnf_bd_def = Morphism.thm phi raw_bd_def;
-    val bnf_wit_defs = map (Morphism.thm phi) raw_wit_defs;
 
     val bnf_map = Morphism.term phi bnf_map_term;
 
@@ -709,11 +753,14 @@
 
     val CA_params = map TVar (Term.add_tvarsT CA []);
 
+    val bnf_T = Morphism.typ phi T_rhs;
+    val bad_args = Term.add_tfreesT bnf_T [];
+    val _ = if null bad_args then () else error ("Locally fixed type arguments " ^
+      commas_quote (map (Syntax.string_of_typ no_defs_lthy o TFree) bad_args));
+
     val bnf_sets = map2 (normalize_set CA_params) alphas (map (Morphism.term phi) bnf_set_terms);
-    val bdT = Morphism.typ phi bd_rhsT;
     val bnf_bd =
-      Term.subst_TVars (Term.add_tvar_namesT bdT [] ~~ CA_params) (Morphism.term phi bnf_bd_term);
-    val bnf_wits = map (normalize_wit CA_params CA alphas o Morphism.term phi) bnf_wit_terms;
+      Term.subst_TVars (Term.add_tvar_namesT bnf_T [] ~~ CA_params) (Morphism.term phi bnf_bd_term);
 
     (*TODO: assert Ds = (TVars of bnf_map) \ (alphas @ betas) as sets*)
     val deads = (case Ds_opt of
@@ -770,7 +817,6 @@
     val bnf_sets_As = map (mk_bnf_t As') bnf_sets;
     val bnf_sets_Bs = map (mk_bnf_t Bs') bnf_sets;
     val bnf_bd_As = mk_bnf_t As' bnf_bd;
-    val bnf_wit_As = map (apsnd (mk_bnf_t As')) bnf_wits;
 
     val pre_names_lthy = lthy;
     val ((((((((((((((((((((((((fs, gs), hs), x), y), zs), ys), As),
@@ -824,12 +870,26 @@
       | SOME raw_rel => prep_term no_defs_lthy raw_rel);
 
     val rel_bind_def =
-      (fn () => def_qualify (if Binding.is_empty rel_b then mk_suffix_binding relN else rel_b),
+      (fn () => def_qualify (if Binding.is_empty rel_b then mk_prefix_binding relN else rel_b),
          rel_rhs);
 
-    val ((bnf_rel_term, raw_rel_def), (lthy, lthy_old)) =
+    val wit_rhss =
+      if null raw_wits then
+        [fold_rev Term.absdummy As' (Term.list_comb (bnf_map_AsAs,
+          map2 (fn T => fn i => Term.absdummy T (Bound i)) As' (live downto 1)) $
+          Const (@{const_name undefined}, CA'))]
+      else map (prep_term no_defs_lthy) raw_wits;
+    val nwits = length wit_rhss;
+    val wit_binds_defs =
+      let
+        val bs = if nwits = 1 then [fn () => def_qualify (mk_prefix_binding witN)]
+          else map (fn i => fn () => def_qualify (mk_prefix_binding (mk_witN i))) (1 upto nwits);
+      in bs ~~ wit_rhss end;
+
+    val (((bnf_rel_term, raw_rel_def), (bnf_wit_terms, raw_wit_defs)), (lthy, lthy_old)) =
       lthy
       |> maybe_define (is_some raw_rel_opt) rel_bind_def
+      ||>> apfst split_list o fold_map (maybe_define (not (null raw_wits))) wit_binds_defs
       ||> `(maybe_restore lthy);
 
     val phi = Proof_Context.export_morphism lthy_old lthy;
@@ -841,11 +901,9 @@
     val rel = mk_bnf_rel pred2RTs CA' CB';
     val relAsAs = mk_bnf_rel self_pred2RTs CA' CA';
 
-    val _ = case no_reflexive (raw_map_def :: raw_set_defs @ [raw_bd_def] @
-        raw_wit_defs @ [raw_rel_def]) of
-        [] => ()
-      | defs => Proof_Display.print_consts true lthy_old (K false)
-          (map (dest_Free o fst o Logic.dest_equals o prop_of) defs);
+    val bnf_wit_defs = map (Morphism.thm phi) raw_wit_defs;
+    val bnf_wits = map (normalize_wit CA_params CA alphas o Morphism.term phi) bnf_wit_terms;
+    val bnf_wit_As = map (apsnd (mk_bnf_t As')) bnf_wits;
 
     val map_id0_goal =
       let val bnf_map_app_id = Term.list_comb (bnf_map_AsAs, map HOLogic.id_const As') in
@@ -945,11 +1003,14 @@
         map wit_goal (0 upto live - 1)
       end;
 
-    val wit_goalss = map mk_wit_goals bnf_wit_As;
+    val trivial_wit_tac = mk_trivial_wit_tac bnf_wit_defs;
 
-    fun after_qed thms lthy =
+    val wit_goalss =
+      (if null raw_wits then SOME trivial_wit_tac else NONE, map mk_wit_goals bnf_wit_As);
+
+    fun after_qed mk_wit_thms thms lthy =
       let
-        val (axioms, wit_thms) = apfst (mk_axioms live) (chop (length goals) thms);
+        val (axioms, nontriv_wit_thms) = apfst (mk_axioms live) (chop (length goals) thms);
 
         val bd_Card_order = #bd_card_order axioms RS @{thm conjunct2[OF card_order_on_Card_order]};
         val bd_Cinfinite = @{thm conjI} OF [#bd_cinfinite axioms, bd_Card_order];
@@ -1022,6 +1083,9 @@
 
         val set_map = map (fn thm => Lazy.lazy (fn () => mk_set_map thm)) (#set_map0 axioms);
 
+        val wit_thms =
+          if null nontriv_wit_thms then mk_wit_thms (map Lazy.force set_map) else nontriv_wit_thms;
+
         fun mk_in_bd () =
           let
             val bdT = fst (dest_relT (fastype_of bnf_bd_As));
@@ -1265,35 +1329,45 @@
   (bnf, Local_Theory.declaration {syntax = false, pervasive = true}
     (fn phi => Data.map (Symtab.default (key, morph_bnf phi bnf))) lthy);
 
-(* TODO: Once the invariant "nwits > 0" holds, remove "mk_conjunction_balanced'" and "rtac TrueI"
-   below *)
-fun mk_conjunction_balanced' [] = @{prop True}
-  | mk_conjunction_balanced' ts = Logic.mk_conjunction_balanced ts;
-
 fun bnf_def const_policy fact_policy qualify tacs wit_tac Ds map_b rel_b set_bs =
-  (fn (_, goals, wit_goalss, after_qed, lthy, one_step_defs) =>
+  (fn (_, goals, (triv_tac_opt, wit_goalss), after_qed, lthy, one_step_defs) =>
   let
-    val wits_tac =
-      K (TRYALL Goal.conjunction_tac) THEN' K (TRYALL (rtac TrueI)) THEN'
-      mk_unfold_thms_then_tac lthy one_step_defs wit_tac;
-    val wit_goals = map mk_conjunction_balanced' wit_goalss;
-    val wit_thms =
-      Goal.prove_sorry lthy [] [] (mk_conjunction_balanced' wit_goals) wits_tac
-      |> Conjunction.elim_balanced (length wit_goals)
-      |> map2 (Conjunction.elim_balanced o length) wit_goalss
-      |> map (map (Thm.close_derivation o Thm.forall_elim_vars 0));
+    fun mk_wits_tac set_maps =
+      K (TRYALL Goal.conjunction_tac) THEN'
+      (case triv_tac_opt of
+        SOME tac => tac set_maps
+      | NONE => mk_unfold_thms_then_tac lthy one_step_defs wit_tac);
+    val wit_goals = map Logic.mk_conjunction_balanced wit_goalss;
+    fun mk_wit_thms set_maps =
+      Goal.prove_sorry lthy [] [] (Logic.mk_conjunction_balanced wit_goals) (mk_wits_tac set_maps)
+        |> Conjunction.elim_balanced (length wit_goals)
+        |> map2 (Conjunction.elim_balanced o length) wit_goalss
+        |> map (map (Thm.close_derivation o Thm.forall_elim_vars 0));
   in
     map2 (Thm.close_derivation oo Goal.prove_sorry lthy [] [])
       goals (map (mk_unfold_thms_then_tac lthy one_step_defs) tacs)
-    |> (fn thms => after_qed (map single thms @ wit_thms) lthy)
-  end) oo prepare_def const_policy fact_policy qualify (K I) Ds map_b rel_b set_bs;
+    |> (fn thms => after_qed mk_wit_thms (map single thms) lthy)
+  end) oo prepare_def const_policy fact_policy qualify (K I) (K I) Ds map_b rel_b set_bs;
 
-val bnf_cmd = (fn (key, goals, wit_goals, after_qed, lthy, defs) =>
-  Proof.unfolding ([[(defs, [])]])
-    (Proof.theorem NONE (snd o register_bnf key oo after_qed)
-      (map (single o rpair []) goals @ map (map (rpair [])) wit_goals) lthy)) oo
-  prepare_def Do_Inline (user_policy Note_Some) I Syntax.read_term NONE Binding.empty Binding.empty
-    [];
+val bnf_cmd = (fn (key, goals, (triv_tac_opt, wit_goalss), after_qed, lthy, defs) =>
+  let
+    val wit_goals = map Logic.mk_conjunction_balanced wit_goalss;
+    fun mk_triv_wit_thms tac set_maps =
+      Goal.prove_sorry lthy [] [] (Logic.mk_conjunction_balanced wit_goals)
+        (K (TRYALL Goal.conjunction_tac) THEN' tac set_maps)
+        |> Conjunction.elim_balanced (length wit_goals)
+        |> map2 (Conjunction.elim_balanced o length) wit_goalss
+        |> map (map (Thm.close_derivation o Thm.forall_elim_vars 0));
+    val (mk_wit_thms, nontriv_wit_goals) = 
+      (case triv_tac_opt of
+        NONE => (fn _ => [], map (map (rpair [])) wit_goalss)
+      | SOME tac => (mk_triv_wit_thms tac, []));
+  in
+    Proof.unfolding ([[(defs, [])]])
+      (Proof.theorem NONE (snd o register_bnf key oo after_qed mk_wit_thms)
+        (map (single o rpair []) goals @ nontriv_wit_goals) lthy)
+  end) oo prepare_def Do_Inline (user_policy Note_Some) I Syntax.read_typ Syntax.read_term NONE
+    Binding.empty Binding.empty [];
 
 fun print_bnfs ctxt =
   let
@@ -1328,9 +1402,14 @@
 val _ =
   Outer_Syntax.local_theory_to_proof @{command_spec "bnf"}
     "register a type as a bounded natural functor"
-    ((parse_opt_binding_colon -- Parse.term --
-       (@{keyword "["} |-- Parse.list Parse.term --| @{keyword "]"}) -- Parse.term --
-       (@{keyword "["} |-- Parse.list Parse.term --| @{keyword "]"}) -- Scan.option Parse.term)
+    (parse_opt_binding_colon -- Parse.typ --|
+       (Parse.reserved "map" -- @{keyword ":"}) -- Parse.term --
+       (Scan.option ((Parse.reserved "sets" -- @{keyword ":"}) |--
+         Scan.repeat1 (Scan.unless (Parse.reserved "bd") Parse.term)) >> the_default []) --|
+       (Parse.reserved "bd" -- @{keyword ":"}) -- Parse.term --
+       (Scan.option ((Parse.reserved "wits" -- @{keyword ":"}) |--
+         Scan.repeat1 (Scan.unless (Parse.reserved "rel") Parse.term)) >> the_default []) --
+       Scan.option ((Parse.reserved "rel" -- @{keyword ":"}) |-- Parse.term)
        >> bnf_cmd);
 
 end;
--- a/src/HOL/BNF/Tools/bnf_def_tactics.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_def_tactics.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -31,7 +31,10 @@
     {prems: thm list, context: Proof.context} -> tactic
 
   val mk_in_bd_tac: int -> thm -> thm -> thm -> thm -> thm list -> thm list -> thm -> thm -> thm ->
-    thm -> {prems: 'a, context: Proof.context} -> tactic
+    thm -> {prems: thm list, context: Proof.context} -> tactic
+
+  val mk_trivial_wit_tac: thm list -> thm list -> {prems: thm list, context: Proof.context} ->
+    tactic
 end;
 
 structure BNF_Def_Tactics : BNF_DEF_TACTICS =
@@ -302,4 +305,8 @@
            map_comp RS sym, map_id])] 1
   end;
 
+fun mk_trivial_wit_tac wit_defs set_maps {context = ctxt, prems = _} =
+  unfold_thms_tac ctxt wit_defs THEN HEADGOAL (EVERY' (map (fn thm =>
+    dtac (thm RS equalityD1 RS set_mp) THEN' etac imageE THEN' atac) set_maps)) THEN ALLGOALS atac;
+
 end;
--- a/src/HOL/BNF/Tools/bnf_fp_def_sugar.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_fp_def_sugar.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -25,7 +25,9 @@
      sel_co_iterssss: thm list list list list};
 
   val of_fp_sugar: (fp_sugar -> 'a list) -> fp_sugar -> 'a
+  val eq_fp_sugar: fp_sugar * fp_sugar -> bool
   val morph_fp_sugar: morphism -> fp_sugar -> fp_sugar
+  val transfer_fp_sugar: Proof.context -> fp_sugar -> fp_sugar
   val fp_sugar_of: Proof.context -> string -> fp_sugar option
   val fp_sugars_of: Proof.context -> fp_sugar list
 
@@ -39,17 +41,14 @@
     'a list
   val mk_co_iter: theory -> BNF_FP_Util.fp_kind -> typ -> typ list -> term -> term
   val nesty_bnfs: Proof.context -> typ list list list -> typ list -> BNF_Def.bnf list
-  val mk_map: int -> typ list -> typ list -> term -> term
-  val mk_rel: int -> typ list -> typ list -> term -> term
-  val build_map: local_theory -> (typ * typ -> term) -> typ * typ -> term
-  val build_rel: local_theory -> (typ * typ -> term) -> typ * typ -> term
-  val dest_map: Proof.context -> string -> term -> term * term list
-  val dest_ctr: Proof.context -> string -> term -> term * term list
 
   type lfp_sugar_thms =
     (thm list * thm * Args.src list)
     * (thm list list * thm list list * Args.src list)
 
+  val morph_lfp_sugar_thms: morphism -> lfp_sugar_thms -> lfp_sugar_thms
+  val transfer_lfp_sugar_thms: Proof.context -> lfp_sugar_thms -> lfp_sugar_thms
+
   type gfp_sugar_thms =
     ((thm list * thm) list * Args.src list)
     * (thm list list * thm list list * Args.src list)
@@ -57,6 +56,9 @@
     * (thm list list * thm list list * Args.src list)
     * (thm list list list * thm list list list * Args.src list)
 
+  val morph_gfp_sugar_thms: morphism -> gfp_sugar_thms -> gfp_sugar_thms
+  val transfer_gfp_sugar_thms: Proof.context -> gfp_sugar_thms -> gfp_sugar_thms
+
   val mk_co_iters_prelims: BNF_FP_Util.fp_kind -> typ list list list -> typ list -> typ list ->
     int list -> int list list -> term list list -> Proof.context ->
     (term list list
@@ -87,13 +89,14 @@
     string * term list * term list list * ((term list list * term list list list)
       * (typ list * typ list list)) list ->
     thm -> thm list -> thm list -> thm list list -> BNF_Def.bnf list -> typ list -> typ list ->
-    int list list -> int list list -> int list -> thm list list -> Ctr_Sugar.ctr_sugar list ->
-    term list list -> thm list list -> (thm list -> thm list) -> local_theory -> gfp_sugar_thms
+    typ list -> typ list list list -> int list list -> int list list -> int list -> thm list list ->
+    Ctr_Sugar.ctr_sugar list -> term list list -> thm list list -> (thm list -> thm list) ->
+    local_theory -> gfp_sugar_thms
   val co_datatypes: BNF_FP_Util.fp_kind -> (mixfix list -> binding list -> binding list ->
       binding list list -> binding list -> (string * sort) list -> typ list * typ list list ->
       BNF_Def.bnf list -> local_theory -> BNF_FP_Util.fp_result * local_theory) ->
-    (bool * bool) * (((((binding * (typ * sort)) list * binding) * (binding * binding)) * mixfix) *
-      ((((binding * binding) * (binding * typ) list) * (binding * term) list) *
+    (bool * (bool * bool)) * (((((binding * (typ * sort)) list * binding) * (binding * binding))
+      * mixfix) * ((((binding * binding) * (binding * typ) list) * (binding * term) list) *
         mixfix) list) list ->
     local_theory -> local_theory
   val parse_co_datatype_cmd: BNF_FP_Util.fp_kind -> (mixfix list -> binding list -> binding list ->
@@ -207,8 +210,8 @@
 val id_def = @{thm id_def};
 val mp_conj = @{thm mp_conj};
 
-val nitpick_attrs = @{attributes [nitpick_simp]};
-val code_nitpick_simp_attrs = Code.add_default_eqn_attrib :: nitpick_attrs;
+val nitpicksimp_attrs = @{attributes [nitpick_simp]};
+val code_nitpicksimp_attrs = Code.add_default_eqn_attrib :: nitpicksimp_attrs;
 val simp_attrs = @{attributes [simp]};
 
 fun tvar_subst thy Ts Us =
@@ -232,7 +235,9 @@
   | flat_corec_preds_predsss_gettersss (p :: ps) (qss :: qsss) (fss :: fsss) =
     p :: flat_corec_predss_getterss qss fss @ flat_corec_preds_predsss_gettersss ps qsss fsss;
 
-fun mk_tupled_fun x f xs = HOLogic.tupled_lambda x (Term.list_comb (f, xs));
+fun mk_tupled_fun x f xs =
+  if xs = [x] then f else HOLogic.tupled_lambda x (Term.list_comb (f, xs));
+
 fun mk_uncurried2_fun f xss =
   mk_tupled_fun (HOLogic.mk_tuple (map HOLogic.mk_tuple xss)) f (flat_rec_arg_args xss);
 
@@ -287,66 +292,6 @@
   | unzip_corecT _ (Type (@{type_name sum}, Ts)) = Ts
   | unzip_corecT _ T = [T];
 
-fun mk_map live Ts Us t =
-  let val (Type (_, Ts0), Type (_, Us0)) = strip_typeN (live + 1) (fastype_of t) |>> List.last in
-    Term.subst_atomic_types (Ts0 @ Us0 ~~ Ts @ Us) t
-  end;
-
-fun mk_rel live Ts Us t =
-  let val [Type (_, Ts0), Type (_, Us0)] = binder_types (snd (strip_typeN live (fastype_of t))) in
-    Term.subst_atomic_types (Ts0 @ Us0 ~~ Ts @ Us) t
-  end;
-
-local
-
-fun build_map_or_rel mk const of_bnf dest lthy build_simple =
-  let
-    fun build (TU as (T, U)) =
-      if T = U then
-        const T
-      else
-        (case TU of
-          (Type (s, Ts), Type (s', Us)) =>
-          if s = s' then
-            let
-              val bnf = the (bnf_of lthy s);
-              val live = live_of_bnf bnf;
-              val mapx = mk live Ts Us (of_bnf bnf);
-              val TUs' = map dest (fst (strip_typeN live (fastype_of mapx)));
-            in Term.list_comb (mapx, map build TUs') end
-          else
-            build_simple TU
-        | _ => build_simple TU);
-  in build end;
-
-in
-
-val build_map = build_map_or_rel mk_map HOLogic.id_const map_of_bnf dest_funT;
-val build_rel = build_map_or_rel mk_rel HOLogic.eq_const rel_of_bnf dest_pred2T;
-
-end;
-
-val dummy_var_name = "?f"
-
-fun mk_map_pattern ctxt s =
-  let
-    val bnf = the (bnf_of ctxt s);
-    val mapx = map_of_bnf bnf;
-    val live = live_of_bnf bnf;
-    val (f_Ts, _) = strip_typeN live (fastype_of mapx);
-    val fs = map_index (fn (i, T) => Var ((dummy_var_name, i), T)) f_Ts;
-  in
-    (mapx, betapplys (mapx, fs))
-  end;
-
-fun dest_map ctxt s call =
-  let
-    val (map0, pat) = mk_map_pattern ctxt s;
-    val (_, tenv) = fo_match ctxt call pat;
-  in
-    (map0, Vartab.fold_rev (fn (_, (_, f)) => cons f) tenv [])
-  end;
-
 fun liveness_of_fp_bnf n bnf =
   (case T_of_bnf bnf of
     Type (_, Ts) => map (not o member (op =) (deads_of_bnf bnf)) Ts
@@ -388,12 +333,19 @@
 fun nesty_bnfs ctxt ctr_Tsss Us =
   map_filter (bnf_of ctxt) (fold (fold (fold (add_nesty_bnf_names Us))) ctr_Tsss []);
 
-fun indexify proj xs f p = f (find_index (curry op = (proj p)) xs) p;
+fun indexify proj xs f p = f (find_index (curry (op =) (proj p)) xs) p;
 
 type lfp_sugar_thms =
   (thm list * thm * Args.src list)
   * (thm list list * thm list list * Args.src list)
 
+fun morph_lfp_sugar_thms phi ((inducts, induct, induct_attrs), (foldss, recss, iter_attrs)) =
+  ((map (Morphism.thm phi) inducts, Morphism.thm phi induct, induct_attrs),
+   (map (map (Morphism.thm phi)) foldss, map (map (Morphism.thm phi)) recss, iter_attrs));
+
+val transfer_lfp_sugar_thms =
+  morph_lfp_sugar_thms o Morphism.thm_morphism o Thm.transfer o Proof_Context.theory_of;
+
 type gfp_sugar_thms =
   ((thm list * thm) list * Args.src list)
   * (thm list list * thm list list * Args.src list)
@@ -401,6 +353,23 @@
   * (thm list list * thm list list * Args.src list)
   * (thm list list list * thm list list list * Args.src list);
 
+fun morph_gfp_sugar_thms phi ((coinducts_pairs, coinduct_attrs),
+    (unfoldss, corecss, coiter_attrs), (disc_unfoldss, disc_corecss, disc_iter_attrs),
+    (disc_unfold_iffss, disc_corec_iffss, disc_iter_iff_attrs),
+    (sel_unfoldsss, sel_corecsss, sel_iter_attrs)) =
+  ((map (apfst (map (Morphism.thm phi)) o apsnd (Morphism.thm phi)) coinducts_pairs,
+    coinduct_attrs),
+   (map (map (Morphism.thm phi)) unfoldss, map (map (Morphism.thm phi)) corecss, coiter_attrs),
+   (map (map (Morphism.thm phi)) disc_unfoldss, map (map (Morphism.thm phi)) disc_corecss,
+    disc_iter_attrs),
+   (map (map (Morphism.thm phi)) disc_unfold_iffss, map (map (Morphism.thm phi)) disc_corec_iffss,
+    disc_iter_iff_attrs),
+   (map (map (map (Morphism.thm phi))) sel_unfoldsss,
+    map (map (map (Morphism.thm phi))) sel_corecsss, sel_iter_attrs));
+
+val transfer_gfp_sugar_thms =
+  morph_gfp_sugar_thms o Morphism.thm_morphism o Thm.transfer o Proof_Context.theory_of;
+
 fun mk_iter_fun_arg_types0 n ms = map2 dest_tupleT ms o dest_sumTN_balanced n o domain_type;
 
 fun mk_iter_fun_arg_types ctr_Tsss ns mss =
@@ -430,7 +399,7 @@
         ns mss ctr_Tsss ctor_iter_fun_Tss;
 
     val z_Tsss' = map (map flat_rec_arg_args) z_Tssss;
-    val h_Tss = map2 (map2 (curry op --->)) z_Tsss' Css;
+    val h_Tss = map2 (map2 (curry (op --->))) z_Tsss' Css;
 
     val hss = map2 (map2 retype_free) h_Tss gss;
     val zssss_hd = map2 (map2 (map2 (retype_free o hd))) z_Tssss ysss;
@@ -452,7 +421,7 @@
     val f_sum_prod_Ts = map range_type fun_Ts;
     val f_prod_Tss = map2 dest_sumTN_balanced ns f_sum_prod_Ts;
     val f_Tsss = map2 (map2 (dest_tupleT o length)) ctr_Tsss' f_prod_Tss;
-    val f_Tssss = map3 (fn C => map2 (map2 (map (curry op --> C) oo unzip_corecT)))
+    val f_Tssss = map3 (fn C => map2 (map2 (map (curry (op -->) C) oo unzip_corecT)))
       Cs ctr_Tsss' f_Tsss;
     val q_Tssss = map (map (map (fn [_] => [] | [_, T] => [mk_pred1T (domain_type T)]))) f_Tssss;
   in
@@ -536,18 +505,12 @@
     ((xtor_co_iterss, iters_args_types, coiters_args_types), lthy')
   end;
 
-fun mk_iter_body ctor_iter fss xssss =
-  Term.list_comb (ctor_iter, map2 (mk_sum_caseN_balanced oo map2 mk_uncurried2_fun) fss xssss);
-
 fun mk_preds_getterss_join c cps sum_prod_T cqfss =
   let val n = length cqfss in
     Term.lambda c (mk_IfN sum_prod_T cps
       (map2 (mk_InN_balanced sum_prod_T n) (map HOLogic.mk_tuple cqfss) (1 upto n)))
   end;
 
-fun mk_coiter_body cs cpss f_sum_prod_Ts cqfsss dtor_coiter =
-  Term.list_comb (dtor_coiter, map4 mk_preds_getterss_join cs cpss f_sum_prod_Ts cqfsss);
-
 fun define_co_iters fp fpT Cs binding_specs lthy0 =
   let
     val thy = Proof_Context.theory_of lthy0;
@@ -556,8 +519,8 @@
       #> Config.get lthy0 bnf_note_all = false ? Binding.conceal;
 
     val ((csts, defs), (lthy', lthy)) = lthy0
-      |> apfst split_list o fold_map (fn (b, spec) =>
-        Specification.definition (SOME (b, NONE, NoSyn), ((maybe_conceal_def_binding b, []), spec))
+      |> apfst split_list o fold_map (fn (b, rhs) =>
+        Local_Theory.define ((b, NoSyn), ((maybe_conceal_def_binding b, []), rhs))
         #>> apsnd snd) binding_specs
       ||> `Local_Theory.restore;
 
@@ -575,14 +538,10 @@
 
     val fpT_to_C as Type (_, [fpT, _]) = snd (strip_typeN nn (fastype_of (hd ctor_iters)));
 
-    fun generate_iter suf (f_Tss, _, fss, xssss) ctor_iter =
-      let
-        val res_T = fold_rev (curry op --->) f_Tss fpT_to_C;
-        val b = mk_binding suf;
-        val spec =
-          mk_Trueprop_eq (lists_bmoc fss (Free (Binding.name_of b, res_T)),
-            mk_iter_body ctor_iter fss xssss);
-      in (b, spec) end;
+    fun generate_iter pre (_, _, fss, xssss) ctor_iter =
+      (mk_binding pre,
+       fold_rev (fold_rev Term.lambda) fss (Term.list_comb (ctor_iter,
+         map2 (mk_sum_caseN_balanced oo map2 mk_uncurried2_fun) fss xssss)));
   in
     define_co_iters Least_FP fpT Cs (map3 generate_iter iterNs iter_args_typess' ctor_iters) lthy
   end;
@@ -594,14 +553,10 @@
 
     val C_to_fpT as Type (_, [_, fpT]) = snd (strip_typeN nn (fastype_of (hd dtor_coiters)));
 
-    fun generate_coiter suf ((pfss, cqfsss), (f_sum_prod_Ts, pf_Tss)) dtor_coiter =
-      let
-        val res_T = fold_rev (curry op --->) pf_Tss C_to_fpT;
-        val b = mk_binding suf;
-        val spec =
-          mk_Trueprop_eq (lists_bmoc pfss (Free (Binding.name_of b, res_T)),
-            mk_coiter_body cs cpss f_sum_prod_Ts cqfsss dtor_coiter);
-      in (b, spec) end;
+    fun generate_coiter pre ((pfss, cqfsss), (f_sum_prod_Ts, pf_Tss)) dtor_coiter =
+      (mk_binding pre,
+       fold_rev (fold_rev Term.lambda) pfss (Term.list_comb (dtor_coiter,
+         map4 mk_preds_getterss_join cs cpss f_sum_prod_Ts cqfsss)));
   in
     define_co_iters Greatest_FP fpT Cs
       (map3 generate_coiter coiterNs coiter_args_typess' dtor_coiters) lthy
@@ -645,7 +600,7 @@
         val lives = lives_of_bnf bnf;
         val sets = sets_of_bnf bnf;
         fun mk_set U =
-          (case find_index (curry op = U) lives of
+          (case find_index (curry (op =) U) lives of
             ~1 => Term.dummy
           | i => nth sets i);
       in
@@ -662,7 +617,7 @@
           end;
 
         fun mk_raw_prem_prems _ (x as Free (_, Type _)) (X as TFree _) =
-            [([], (find_index (curry op = X) Xs + 1, x))]
+            [([], (find_index (curry (op =) X) Xs + 1, x))]
           | mk_raw_prem_prems names_lthy (x as Free (s, Type (T_name, Ts0))) (Type (_, Xs_Ts0)) =
             (case AList.lookup (op =) setss_nested T_name of
               NONE => []
@@ -702,7 +657,7 @@
 
         val goal =
           Library.foldr (Logic.list_implies o apfst (map mk_prem)) (raw_premss,
-            HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj (map2 (curry op $) ps us)));
+            HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj (map2 (curry (op $)) ps us)));
 
         val kksss = map (map (map (fst o snd) o #2)) raw_premss;
 
@@ -763,13 +718,13 @@
     val rec_thmss = mk_iter_thmss rec_args_types recs rec_defs (map co_rec_of ctor_iter_thmss);
   in
     ((induct_thms, induct_thm, [induct_case_names_attr]),
-     (fold_thmss, rec_thmss, code_nitpick_simp_attrs @ simp_attrs))
+     (fold_thmss, rec_thmss, code_nitpicksimp_attrs @ simp_attrs))
   end;
 
 fun derive_coinduct_coiters_thms_for_types pre_bnfs (z, cs, cpss,
       coiters_args_types as [((pgss, crgsss), _), ((phss, cshsss), _)])
-    dtor_coinduct dtor_injects dtor_ctors dtor_coiter_thmss nesting_bnfs fpTs Cs kss mss ns
-    ctr_defss (ctr_sugars : ctr_sugar list) coiterss coiter_defss export_args lthy =
+    dtor_coinduct dtor_injects dtor_ctors dtor_coiter_thmss nesting_bnfs fpTs Cs Xs ctrXs_Tsss kss
+    mss ns ctr_defss (ctr_sugars : ctr_sugar list) coiterss coiter_defss export_args lthy =
   let
     fun mk_ctor_dtor_coiter_thm dtor_inject dtor_ctor coiter =
       iffD1 OF [dtor_inject, trans OF [coiter, dtor_ctor RS sym]];
@@ -821,40 +776,29 @@
           map4 (fn u => fn v => fn uvr => fn uv_eq =>
             fold_rev Term.lambda [u, v] (HOLogic.mk_disj (uvr, uv_eq))) us vs uvrs uv_eqs;
 
-        (* TODO: generalize (cf. "build_map") *)
-        fun build_rel rs' T =
-          (case find_index (curry op = T) fpTs of
-            ~1 =>
-            if exists_subtype_in fpTs T then
-              let
-                val Type (s, Ts) = T
-                val bnf = the (bnf_of lthy s);
-                val live = live_of_bnf bnf;
-                val rel = mk_rel live Ts Ts (rel_of_bnf bnf);
-                val Ts' = map domain_type (fst (strip_typeN live (fastype_of rel)));
-              in Term.list_comb (rel, map (build_rel rs') Ts') end
-            else
-              HOLogic.eq_const T
-          | kk => nth rs' kk);
+        fun build_the_rel rs' T Xs_T =
+          build_rel lthy (fn (_, X) => nth rs' (find_index (curry (op =) X) Xs)) (T, Xs_T)
+          |> Term.subst_atomic_types (Xs ~~ fpTs);
 
-        fun build_rel_app rs' usel vsel = fold rapp [usel, vsel] (build_rel rs' (fastype_of usel));
+        fun build_rel_app rs' usel vsel Xs_T =
+          fold rapp [usel, vsel] (build_the_rel rs' (fastype_of usel) Xs_T);
 
-        fun mk_prem_ctr_concls rs' n k udisc usels vdisc vsels =
+        fun mk_prem_ctr_concls rs' n k udisc usels vdisc vsels ctrXs_Ts =
           (if k = n then [] else [HOLogic.mk_eq (udisc, vdisc)]) @
           (if null usels then
              []
            else
              [Library.foldr HOLogic.mk_imp (if n = 1 then [] else [udisc, vdisc],
-                Library.foldr1 HOLogic.mk_conj (map2 (build_rel_app rs') usels vsels))]);
+                Library.foldr1 HOLogic.mk_conj (map3 (build_rel_app rs') usels vsels ctrXs_Ts))]);
 
-        fun mk_prem_concl rs' n udiscs uselss vdiscs vselss =
-          Library.foldr1 HOLogic.mk_conj
-            (flat (map5 (mk_prem_ctr_concls rs' n) (1 upto n) udiscs uselss vdiscs vselss))
+        fun mk_prem_concl rs' n udiscs uselss vdiscs vselss ctrXs_Tss =
+          Library.foldr1 HOLogic.mk_conj (flat (map6 (mk_prem_ctr_concls rs' n)
+            (1 upto n) udiscs uselss vdiscs vselss ctrXs_Tss))
           handle List.Empty => @{term True};
 
-        fun mk_prem rs' uvr u v n udiscs uselss vdiscs vselss =
+        fun mk_prem rs' uvr u v n udiscs uselss vdiscs vselss ctrXs_Tss =
           fold_rev Logic.all [u, v] (Logic.mk_implies (HOLogic.mk_Trueprop uvr,
-            HOLogic.mk_Trueprop (mk_prem_concl rs' n udiscs uselss vdiscs vselss)));
+            HOLogic.mk_Trueprop (mk_prem_concl rs' n udiscs uselss vdiscs vselss ctrXs_Tss)));
 
         val concl =
           HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj
@@ -862,8 +806,8 @@
                uvrs us vs));
 
         fun mk_goal rs' =
-          Logic.list_implies (map8 (mk_prem rs') uvrs us vs ns udiscss uselsss vdiscss vselsss,
-            concl);
+          Logic.list_implies (map9 (mk_prem rs') uvrs us vs ns udiscss uselsss vdiscss vselsss
+            ctrXs_Tsss, concl);
 
         val goals = map mk_goal [rs, strong_rs];
 
@@ -1024,14 +968,14 @@
       coinduct_consumes_attr :: coinduct_case_names_attr :: coinduct_case_concl_attrs;
   in
     ((coinduct_thms_pairs, coinduct_case_attrs),
-     (unfold_thmss, corec_thmss, code_nitpick_simp_attrs),
+     (unfold_thmss, corec_thmss, code_nitpicksimp_attrs),
      (disc_unfold_thmss, disc_corec_thmss, []),
      (disc_unfold_iff_thmss, disc_corec_iff_thmss, simp_attrs),
      (sel_unfold_thmsss, sel_corec_thmsss, simp_attrs))
   end;
 
 fun define_co_datatypes prepare_constraint prepare_typ prepare_term fp construct_fp
-    (wrap_opts as (no_discs_sels, rep_compat), specs) no_defs_lthy0 =
+    (wrap_opts as (no_discs_sels, (_, rep_compat)), specs) no_defs_lthy0 =
   let
     (* TODO: sanity checks on arguments *)
 
@@ -1074,7 +1018,7 @@
 
     val qsoty = quote o Syntax.string_of_typ fake_lthy;
 
-    val _ = (case duplicates (op =) unsorted_As of [] => ()
+    val _ = (case Library.duplicates (op =) unsorted_As of [] => ()
       | A :: _ => error ("Duplicate type parameter " ^ qsoty A ^ " in " ^ co_prefix fp ^
           "datatype specification"));
 
@@ -1087,7 +1031,7 @@
 
     val mixfixes = map mixfix_of specs;
 
-    val _ = (case duplicates Binding.eq_name fp_bs of [] => ()
+    val _ = (case Library.duplicates Binding.eq_name fp_bs of [] => ()
       | b :: _ => error ("Duplicate type name declaration " ^ quote (Binding.name_of b)));
 
     val ctr_specss = map ctr_specs_of specs;
@@ -1380,18 +1324,25 @@
               val (rel_distinct_thms, _) =
                 join_halves n half_rel_distinct_thmss other_half_rel_distinct_thmss;
 
+              val anonymous_notes =
+                [(map (fn th => th RS @{thm eq_False[THEN iffD2]}) rel_distinct_thms,
+                  code_nitpicksimp_attrs),
+                 (map2 (fn th => fn 0 => th RS @{thm eq_True[THEN iffD2]} | _ => th)
+                    rel_inject_thms ms, code_nitpicksimp_attrs)]
+                |> map (fn (thms, attrs) => ((Binding.empty, attrs), [(thms, [])]));
+
               val notes =
-                [(mapN, map_thms, code_nitpick_simp_attrs @ simp_attrs),
-                 (rel_distinctN, rel_distinct_thms, code_nitpick_simp_attrs @ simp_attrs),
-                 (rel_injectN, rel_inject_thms, code_nitpick_simp_attrs @ simp_attrs),
-                 (setN, flat set_thmss, code_nitpick_simp_attrs @ simp_attrs)]
+                [(mapN, map_thms, code_nitpicksimp_attrs @ simp_attrs),
+                 (rel_distinctN, rel_distinct_thms, simp_attrs),
+                 (rel_injectN, rel_inject_thms, simp_attrs),
+                 (setN, flat set_thmss, code_nitpicksimp_attrs @ simp_attrs)]
                 |> massage_simple_notes fp_b_name;
             in
               (((map_thms, rel_inject_thms, rel_distinct_thms, set_thmss), ctr_sugar),
-               lthy |> Local_Theory.notes notes |> snd)
+               lthy |> Local_Theory.notes (anonymous_notes @ notes) |> snd)
             end;
 
-        fun mk_binding suf = qualify false fp_b_name (Binding.suffix_name ("_" ^ suf) fp_b);
+        fun mk_binding pre = qualify false fp_b_name (Binding.prefix_name (pre ^ "_") fp_b);
 
         fun massage_res (((maps_sets_rels, ctr_sugar), co_iter_res), lthy) =
           (((maps_sets_rels, (ctrs, xss, ctr_defs, ctr_sugar)), co_iter_res), lthy);
@@ -1457,8 +1408,9 @@
              (disc_unfold_iff_thmss, disc_corec_iff_thmss, disc_coiter_iff_attrs),
              (sel_unfold_thmsss, sel_corec_thmsss, sel_coiter_attrs)) =
           derive_coinduct_coiters_thms_for_types pre_bnfs (the coiters_args_types) xtor_co_induct
-            dtor_injects dtor_ctors xtor_co_iter_thmss nesting_bnfs fpTs Cs kss mss ns ctr_defss
-            ctr_sugars coiterss coiter_defss (Proof_Context.export lthy' no_defs_lthy) lthy;
+            dtor_injects dtor_ctors xtor_co_iter_thmss nesting_bnfs fpTs Cs Xs ctrXs_Tsss kss mss ns
+            ctr_defss ctr_sugars coiterss coiter_defss (Proof_Context.export lthy' no_defs_lthy)
+            lthy;
 
         val sel_unfold_thmss = map flat sel_unfold_thmsss;
         val sel_corec_thmss = map flat sel_corec_thmsss;
@@ -1496,6 +1448,12 @@
            (unfoldN, unfold_thmss, K coiter_attrs)]
           |> massage_multi_notes;
 
+        fun is_codatatype (Type (s, _)) =
+            (case fp_sugar_of lthy s of SOME {fp = Greatest_FP, ...} => true | _ => false)
+          | is_codatatype _ = false;
+
+        val nitpick_supported = forall (is_codatatype o T_of_bnf) nested_bnfs;
+
         fun register_nitpick fpT ({ctrs, casex, ...} : ctr_sugar) =
           Nitpick_HOL.register_codatatype fpT (fst (dest_Const casex))
             (map (dest_Const o mk_ctr As) ctrs)
@@ -1507,7 +1465,7 @@
           ctr_sugars coiterss mapss [coinduct_thm, strong_coinduct_thm]
           (transpose [unfold_thmss, corec_thmss]) (transpose [disc_unfold_thmss, disc_corec_thmss])
           (transpose [sel_unfold_thmsss, sel_corec_thmsss])
-        |> fold2 register_nitpick fpTs ctr_sugars
+        |> nitpick_supported ? fold2 register_nitpick fpTs ctr_sugars
       end;
 
     val lthy'' = lthy'
@@ -1543,24 +1501,13 @@
 
 val parse_type_arg_named_constrained = parse_opt_binding_colon -- parse_type_arg_constrained;
 
+(*FIXME: use parse_type_args_named_constrained from BNF_Util and thus 
+  allow users to kill certain arguments of a (co)datatype*)
 val parse_type_args_named_constrained =
   parse_type_arg_constrained >> (single o pair Binding.empty) ||
   @{keyword "("} |-- Parse.!!! (Parse.list1 parse_type_arg_named_constrained --| @{keyword ")"}) ||
   Scan.succeed [];
 
-val parse_map_rel_binding = Parse.short_ident --| @{keyword ":"} -- parse_binding;
-
-val no_map_rel = (Binding.empty, Binding.empty);
-
-fun extract_map_rel ("map", b) = apfst (K b)
-  | extract_map_rel ("rel", b) = apsnd (K b)
-  | extract_map_rel (s, _) = error ("Unknown label " ^ quote s ^ " (expected \"map\" or \"rel\")");
-
-val parse_map_rel_bindings =
-  @{keyword "("} |-- Scan.repeat parse_map_rel_binding --| @{keyword ")"}
-    >> (fn ps => fold extract_map_rel ps no_map_rel) ||
-  Scan.succeed no_map_rel;
-
 val parse_ctr_spec =
   parse_opt_binding_colon -- parse_binding -- Scan.repeat parse_ctr_arg --
   Scan.optional parse_defaults [] -- Parse.opt_mixfix;
--- a/src/HOL/BNF/Tools/bnf_fp_def_sugar_tactics.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_fp_def_sugar_tactics.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -151,12 +151,17 @@
   (atac ORELSE' REPEAT o etac conjE THEN'
      full_simp_tac
        (ss_only (@{thm prod.inject} :: no_refl discs @ rel_eqs @ more_simp_thms) ctxt) THEN'
-     REPEAT o etac conjE THEN_MAYBE' REPEAT o hyp_subst_tac ctxt THEN' REPEAT o rtac conjI THEN'
-     REPEAT o (rtac refl ORELSE' atac));
+     REPEAT o etac conjE THEN_MAYBE' REPEAT o hyp_subst_tac ctxt THEN'
+     REPEAT o (resolve_tac [refl, conjI] ORELSE' atac));
 
 fun mk_coinduct_distinct_ctrs_tac ctxt discs discs' =
-  hyp_subst_tac ctxt THEN' REPEAT o etac conjE THEN'
-  full_simp_tac (ss_only (refl :: no_refl (union Thm.eq_thm discs discs') @ basic_simp_thms) ctxt);
+  let
+    val discs'' = map (perhaps (try (fn th => th RS @{thm notnotD}))) (discs @ discs')
+      |> distinct Thm.eq_thm_prop;
+  in
+    hyp_subst_tac ctxt THEN' REPEAT o etac conjE THEN'
+    full_simp_tac (ss_only (refl :: no_refl discs'' @ basic_simp_thms) ctxt)
+  end;
 
 fun mk_coinduct_discharge_prem_tac ctxt rel_eqs' nn kk n pre_rel_def dtor_ctor exhaust ctr_defs
     discss selss =
--- a/src/HOL/BNF/Tools/bnf_fp_n2m.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_fp_n2m.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -23,7 +23,7 @@
 open BNF_FP_N2M_Tactics
 
 fun force_typ ctxt T =
-  map_types Type_Infer.paramify_vars 
+  map_types Type_Infer.paramify_vars
   #> Type.constraint T
   #> Syntax.check_term ctxt
   #> singleton (Variable.polymorphic ctxt);
@@ -99,10 +99,6 @@
     val fp_nesty_bnfss = fp_bnfs :: nesty_bnfss;
     val fp_nesty_bnfs = distinct eq_bnf (flat fp_nesty_bnfss);
 
-    fun abstract t =
-      let val Ts = Term.add_frees t [];
-      in fold_rev Term.absfree (filter (member op = Ts) phis') t end;
-
     val rels =
       let
         fun find_rel T As Bs = fp_nesty_bnfss
@@ -121,10 +117,11 @@
               in
                 Term.list_comb (rel, rels)
               end
-          | mk_rel (T as TFree _) _ = nth phis (find_index (curry op = T) As)
+          | mk_rel (T as TFree _) _ = (nth phis (find_index (curry op = T) As)
+              handle General.Subscript => HOLogic.eq_const T)
           | mk_rel _ _ = raise Fail "fpTs contains schematic type variables";
       in
-        map2 (abstract oo mk_rel) fpTs fpTs'
+        map2 (fold_rev Term.absfree phis' oo mk_rel) fpTs fpTs'
       end;
 
     val pre_rels = map2 (fn Ds => mk_rel_of_bnf Ds (As @ fpTs) (Bs @ fpTs')) Dss bnfs;
@@ -224,7 +221,7 @@
         fun mk_s TU' =
           let
             val i = find_index (fn T => co_alg_argT TU' = T) Xs;
-            val sF = co_alg_funT TU'; 
+            val sF = co_alg_funT TU';
             val F = nth iter_preTs i;
             val s = nth iter_strs i;
           in
@@ -238,7 +235,7 @@
                   |> force_typ names_lthy smapT
                   |> hidden_to_unit;
                 val smap_argTs = strip_typeN live (fastype_of smap) |> fst;
-                fun mk_smap_arg TU =              
+                fun mk_smap_arg TU =
                   (if domain_type TU = range_type TU then
                     HOLogic.id_const (domain_type TU)
                   else if is_rec then
@@ -265,7 +262,7 @@
       in
         (case b_opt of
           NONE => ((t, Drule.dummy_thm), lthy)
-        | SOME b => Local_Theory.define ((b, NoSyn), ((Thm.def_binding b, []), 
+        | SOME b => Local_Theory.define ((b, NoSyn), ((Binding.conceal (Thm.def_binding b), []),
             fold_rev Term.absfree (if is_rec then rec_strs' else fold_strs') t)) lthy |>> apsnd snd)
       end;
 
@@ -376,6 +373,6 @@
        |> morph_fp_result (Morphism.term_morphism (singleton (Variable.polymorphic lthy))));
   in
     (fp_res, lthy)
-  end
+  end;
 
 end;
--- a/src/HOL/BNF/Tools/bnf_fp_n2m_sugar.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_fp_n2m_sugar.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -7,14 +7,16 @@
 
 signature BNF_FP_N2M_SUGAR =
 sig
-  val mutualize_fp_sugars: bool -> BNF_FP_Util.fp_kind -> binding list -> typ list ->
-    (term -> int list) -> term list list list list -> BNF_FP_Def_Sugar.fp_sugar list ->
-    local_theory ->
+  val unfold_let: term -> term
+  val dest_map: Proof.context -> string -> term -> term * term list
+
+  val mutualize_fp_sugars: BNF_FP_Util.fp_kind -> binding list -> typ list -> (term -> int list) ->
+    term list list list list -> BNF_FP_Def_Sugar.fp_sugar list -> local_theory ->
     (BNF_FP_Def_Sugar.fp_sugar list
      * (BNF_FP_Def_Sugar.lfp_sugar_thms option * BNF_FP_Def_Sugar.gfp_sugar_thms option))
     * local_theory
-  val pad_and_indexify_calls: BNF_FP_Def_Sugar.fp_sugar list -> int ->
-    (term * term list list) list list -> term list list list list
+  val indexify_callsss: BNF_FP_Def_Sugar.fp_sugar -> (term * term list list) list ->
+    term list list list
   val nested_to_mutual_fps: BNF_FP_Util.fp_kind -> binding list -> typ list -> (term -> int list) ->
     (term * term list list) list list -> local_theory ->
     (typ list * int list * BNF_FP_Def_Sugar.fp_sugar list
@@ -34,171 +36,245 @@
 
 val n2mN = "n2m_"
 
-(* TODO: test with sort constraints on As *)
-(* TODO: use right sorting order for "fp_sort" w.r.t. original BNFs (?) -- treat new variables
-   as deads? *)
-fun mutualize_fp_sugars mutualize fp bs fpTs get_indices callssss fp_sugars0 no_defs_lthy0 =
-  if mutualize orelse has_duplicates (op =) fpTs then
-    let
-      val thy = Proof_Context.theory_of no_defs_lthy0;
+type n2m_sugar = fp_sugar list * (lfp_sugar_thms option * gfp_sugar_thms option);
+
+structure Data = Generic_Data
+(
+  type T = n2m_sugar Typtab.table;
+  val empty = Typtab.empty;
+  val extend = I;
+  val merge = Typtab.merge (eq_fst (eq_list eq_fp_sugar));
+);
 
-      val qsotm = quote o Syntax.string_of_term no_defs_lthy0;
+fun morph_n2m_sugar phi (fp_sugars, (lfp_sugar_thms_opt, gfp_sugar_thms_opt)) =
+  (map (morph_fp_sugar phi) fp_sugars,
+   (Option.map (morph_lfp_sugar_thms phi) lfp_sugar_thms_opt,
+    Option.map (morph_gfp_sugar_thms phi) gfp_sugar_thms_opt));
+
+val transfer_n2m_sugar =
+  morph_n2m_sugar o Morphism.thm_morphism o Thm.transfer o Proof_Context.theory_of;
 
-      fun heterogeneous_call t = error ("Heterogeneous recursive call: " ^ qsotm t);
-      fun incompatible_calls t1 t2 =
-        error ("Incompatible recursive calls: " ^ qsotm t1 ^ " vs. " ^ qsotm t2);
+fun n2m_sugar_of ctxt =
+  Typtab.lookup (Data.get (Context.Proof ctxt))
+  #> Option.map (transfer_n2m_sugar ctxt);
 
-      val b_names = map Binding.name_of bs;
-      val fp_b_names = map base_name_of_typ fpTs;
+fun register_n2m_sugar key n2m_sugar =
+  Local_Theory.declaration {syntax = false, pervasive = false}
+    (fn phi => Data.map (Typtab.default (key, morph_n2m_sugar phi n2m_sugar)));
 
-      val nn = length fpTs;
+fun unfold_let (Const (@{const_name Let}, _) $ arg1 $ arg2) = unfold_let (betapply (arg2, arg1))
+  | unfold_let (Const (@{const_name prod_case}, _) $ t) =
+    (case unfold_let t of
+      t' as Abs (s1, T1, Abs (s2, T2, _)) =>
+      let
+        val x = (s1 ^ s2, Term.maxidx_of_term t + 1);
+        val v = Var (x, HOLogic.mk_prodT (T1, T2));
+      in
+        lambda v (unfold_let (betapplys (t', [HOLogic.mk_fst v, HOLogic.mk_snd v])))
+      end
+    | _ => t)
+  | unfold_let (t $ u) = betapply (unfold_let t, unfold_let u)
+  | unfold_let (Abs (s, T, t)) = Abs (s, T, unfold_let t)
+  | unfold_let t = t;
 
-      fun target_ctr_sugar_of_fp_sugar fpT ({T, index, ctr_sugars, ...} : fp_sugar) =
-        let
-          val rho = Vartab.fold (cons o apsnd snd) (Sign.typ_match thy (T, fpT) Vartab.empty) [];
-          val phi = Morphism.term_morphism (Term.subst_TVars rho);
-        in
-          morph_ctr_sugar phi (nth ctr_sugars index)
-        end;
-
-      val ctr_defss = map (of_fp_sugar #ctr_defss) fp_sugars0;
-      val mapss = map (of_fp_sugar #mapss) fp_sugars0;
-      val ctr_sugars0 = map2 target_ctr_sugar_of_fp_sugar fpTs fp_sugars0;
-
-      val ctrss = map #ctrs ctr_sugars0;
-      val ctr_Tss = map (map fastype_of) ctrss;
+fun mk_map_pattern ctxt s =
+  let
+    val bnf = the (bnf_of ctxt s);
+    val mapx = map_of_bnf bnf;
+    val live = live_of_bnf bnf;
+    val (f_Ts, _) = strip_typeN live (fastype_of mapx);
+    val fs = map_index (fn (i, T) => Var (("?f", i), T)) f_Ts;
+  in
+    (mapx, betapplys (mapx, fs))
+  end;
 
-      val As' = fold (fold Term.add_tfreesT) ctr_Tss [];
-      val As = map TFree As';
+fun dest_map ctxt s call =
+  let
+    val (map0, pat) = mk_map_pattern ctxt s;
+    val (_, tenv) = fo_match ctxt call pat;
+  in
+    (map0, Vartab.fold_rev (fn (_, (_, f)) => cons f) tenv [])
+  end;
+
+fun dest_abs_or_applied_map _ _ (Abs (_, _, t)) = (Term.dummy, [t])
+  | dest_abs_or_applied_map ctxt s (t1 $ _) = dest_map ctxt s t1;
 
-      val ((Cs, Xs), no_defs_lthy) =
-        no_defs_lthy0
-        |> fold Variable.declare_typ As
-        |> mk_TFrees nn
-        ||>> variant_tfrees fp_b_names;
+fun map_partition f xs =
+  fold_rev (fn x => fn (ys, (good, bad)) =>
+      case f x of SOME y => (y :: ys, (x :: good, bad)) | NONE => (ys, (good, x :: bad)))
+    xs ([], ([], []));
 
-      fun freeze_fp_default (T as Type (s, Ts)) =
-          (case find_index (curry (op =) T) fpTs of
-            ~1 => Type (s, map freeze_fp_default Ts)
-          | kk => nth Xs kk)
-        | freeze_fp_default T = T;
+fun key_of_fp_eqs fp fpTs fp_eqs =
+  Type (fp_case fp "l" "g", fpTs @ maps (fn (x, T) => [TFree x, T]) fp_eqs);
+
+(* TODO: test with sort constraints on As *)
+fun mutualize_fp_sugars fp bs fpTs get_indices callssss fp_sugars0 no_defs_lthy0 =
+  let
+    val thy = Proof_Context.theory_of no_defs_lthy0;
+
+    val qsotm = quote o Syntax.string_of_term no_defs_lthy0;
 
-      fun get_indices_checked call =
-        (case get_indices call of
-          _ :: _ :: _ => heterogeneous_call call
-        | kks => kks);
+    fun incompatible_calls t1 t2 =
+      error ("Incompatible " ^ co_prefix fp ^ "recursive calls: " ^ qsotm t1 ^ " vs. " ^ qsotm t2);
+    fun nested_self_call t =
+      error ("Unsupported nested self-call " ^ qsotm t);
+
+    val b_names = map Binding.name_of bs;
+    val fp_b_names = map base_name_of_typ fpTs;
+
+    val nn = length fpTs;
 
-      fun freeze_fp calls (T as Type (s, Ts)) =
-          (case map_filter (try (snd o dest_map no_defs_lthy s)) calls of
-            [] =>
-            (case union (op = o pairself fst)
-                (maps (fn call => map (rpair call) (get_indices_checked call)) calls) [] of
-              [] => freeze_fp_default T
-            | [(kk, _)] => nth Xs kk
-            | (_, call1) :: (_, call2) :: _ => incompatible_calls call1 call2)
-          | callss =>
-            Type (s, map2 freeze_fp (flatten_type_args_of_bnf (the (bnf_of no_defs_lthy s)) []
-              (transpose callss)) Ts))
-        | freeze_fp _ T = T;
+    fun target_ctr_sugar_of_fp_sugar fpT ({T, index, ctr_sugars, ...} : fp_sugar) =
+      let
+        val rho = Vartab.fold (cons o apsnd snd) (Sign.typ_match thy (T, fpT) Vartab.empty) [];
+        val phi = Morphism.term_morphism (Term.subst_TVars rho);
+      in
+        morph_ctr_sugar phi (nth ctr_sugars index)
+      end;
 
-      val ctr_Tsss = map (map binder_types) ctr_Tss;
-      val ctrXs_Tsss = map2 (map2 (map2 freeze_fp)) callssss ctr_Tsss;
-      val ctrXs_sum_prod_Ts = map (mk_sumTN_balanced o map HOLogic.mk_tupleT) ctrXs_Tsss;
-      val Ts = map (body_type o hd) ctr_Tss;
+    val ctr_defss = map (of_fp_sugar #ctr_defss) fp_sugars0;
+    val mapss = map (of_fp_sugar #mapss) fp_sugars0;
+    val ctr_sugars = map2 target_ctr_sugar_of_fp_sugar fpTs fp_sugars0;
+
+    val ctrss = map #ctrs ctr_sugars;
+    val ctr_Tss = map (map fastype_of) ctrss;
+
+    val As' = fold (fold Term.add_tfreesT) ctr_Tss [];
+    val As = map TFree As';
 
-      val ns = map length ctr_Tsss;
-      val kss = map (fn n => 1 upto n) ns;
-      val mss = map (map length) ctr_Tsss;
-
-      val fp_eqs = map dest_TFree Xs ~~ ctrXs_sum_prod_Ts;
+    val ((Cs, Xs), no_defs_lthy) =
+      no_defs_lthy0
+      |> fold Variable.declare_typ As
+      |> mk_TFrees nn
+      ||>> variant_tfrees fp_b_names;
 
-      val base_fp_names = Name.variant_list [] fp_b_names;
-      val fp_bs = map2 (fn b_name => fn base_fp_name =>
-          Binding.qualify true b_name (Binding.name (n2mN ^ base_fp_name)))
-        b_names base_fp_names;
+    fun check_call_dead live_call call =
+      if null (get_indices call) then () else incompatible_calls live_call call;
 
-      val (pre_bnfs, (fp_res as {xtor_co_iterss = xtor_co_iterss0, xtor_co_induct,
-             dtor_injects, dtor_ctors, xtor_co_iter_thmss, ...}, lthy)) =
-        fp_bnf (construct_mutualized_fp fp fpTs fp_sugars0) fp_bs As' fp_eqs no_defs_lthy;
-
-      val nesting_bnfs = nesty_bnfs lthy ctrXs_Tsss As;
-      val nested_bnfs = nesty_bnfs lthy ctrXs_Tsss Xs;
+    fun freeze_fpTs_simple (T as Type (s, Ts)) =
+        (case find_index (curry (op =) T) fpTs of
+          ~1 => Type (s, map freeze_fpTs_simple Ts)
+        | kk => nth Xs kk)
+      | freeze_fpTs_simple T = T;
 
-      val ((xtor_co_iterss, iters_args_types, coiters_args_types), _) =
-        mk_co_iters_prelims fp ctr_Tsss fpTs Cs ns mss xtor_co_iterss0 lthy;
-
-      fun mk_binding b suf = Binding.suffix_name ("_" ^ suf) b;
+    fun freeze_fpTs_map (fpT as Type (_, Ts')) (callss, (live_call :: _, dead_calls))
+        (T as Type (s, Ts)) =
+      if Ts' = Ts then
+        nested_self_call live_call
+      else
+        (List.app (check_call_dead live_call) dead_calls;
+         Type (s, map2 (freeze_fpTs fpT) (flatten_type_args_of_bnf (the (bnf_of no_defs_lthy s)) []
+           (transpose callss)) Ts))
+    and freeze_fpTs fpT calls (T as Type (s, _)) =
+        (case map_partition (try (snd o dest_map no_defs_lthy s)) calls of
+          ([], _) =>
+          (case map_partition (try (snd o dest_abs_or_applied_map no_defs_lthy s)) calls of
+            ([], _) => freeze_fpTs_simple T
+          | callsp => freeze_fpTs_map fpT callsp T)
+        | callsp => freeze_fpTs_map fpT callsp T)
+      | freeze_fpTs _ _ T = T;
 
-      val ((co_iterss, co_iter_defss), lthy) =
-        fold_map2 (fn b =>
-          (if fp = Least_FP then define_iters [foldN, recN] (the iters_args_types)
-           else define_coiters [unfoldN, corecN] (the coiters_args_types))
-            (mk_binding b) fpTs Cs) fp_bs xtor_co_iterss lthy
-        |>> split_list;
+    val ctr_Tsss = map (map binder_types) ctr_Tss;
+    val ctrXs_Tsss = map3 (map2 o map2 o freeze_fpTs) fpTs callssss ctr_Tsss;
+    val ctrXs_sum_prod_Ts = map (mk_sumTN_balanced o map HOLogic.mk_tupleT) ctrXs_Tsss;
+    val ctr_Ts = map (body_type o hd) ctr_Tss;
+
+    val ns = map length ctr_Tsss;
+    val kss = map (fn n => 1 upto n) ns;
+    val mss = map (map length) ctr_Tsss;
 
-      val rho = tvar_subst thy Ts fpTs;
-      val ctr_sugar_phi =
-        Morphism.compose (Morphism.typ_morphism (Term.typ_subst_TVars rho))
-          (Morphism.term_morphism (Term.subst_TVars rho));
-      val inst_ctr_sugar = morph_ctr_sugar ctr_sugar_phi;
+    val fp_eqs = map dest_TFree Xs ~~ ctrXs_sum_prod_Ts;
+    val key = key_of_fp_eqs fp fpTs fp_eqs;
+  in
+    (case n2m_sugar_of no_defs_lthy key of
+      SOME n2m_sugar => (n2m_sugar, no_defs_lthy)
+    | NONE =>
+      let
+        val base_fp_names = Name.variant_list [] fp_b_names;
+        val fp_bs = map2 (fn b_name => fn base_fp_name =>
+            Binding.qualify true b_name (Binding.name (n2mN ^ base_fp_name)))
+          b_names base_fp_names;
 
-      val ctr_sugars = map inst_ctr_sugar ctr_sugars0;
+        val (pre_bnfs, (fp_res as {xtor_co_iterss = xtor_co_iterss0, xtor_co_induct, dtor_injects,
+               dtor_ctors, xtor_co_iter_thmss, ...}, lthy)) =
+          fp_bnf (construct_mutualized_fp fp fpTs fp_sugars0) fp_bs As' fp_eqs no_defs_lthy;
+
+        val nesting_bnfs = nesty_bnfs lthy ctrXs_Tsss As;
+        val nested_bnfs = nesty_bnfs lthy ctrXs_Tsss Xs;
+
+        val ((xtor_co_iterss, iters_args_types, coiters_args_types), _) =
+          mk_co_iters_prelims fp ctr_Tsss fpTs Cs ns mss xtor_co_iterss0 lthy;
+
+        fun mk_binding b suf = Binding.suffix_name ("_" ^ suf) b;
+
+        val ((co_iterss, co_iter_defss), lthy) =
+          fold_map2 (fn b =>
+            (if fp = Least_FP then define_iters [foldN, recN] (the iters_args_types)
+             else define_coiters [unfoldN, corecN] (the coiters_args_types))
+              (mk_binding b) fpTs Cs) fp_bs xtor_co_iterss lthy
+          |>> split_list;
 
-      val ((co_inducts, un_fold_thmss, co_rec_thmss, disc_unfold_thmss, disc_corec_thmss,
-            sel_unfold_thmsss, sel_corec_thmsss), fp_sugar_thms) =
-        if fp = Least_FP then
-          derive_induct_iters_thms_for_types pre_bnfs (the iters_args_types) xtor_co_induct
-            xtor_co_iter_thmss nesting_bnfs nested_bnfs fpTs Cs Xs ctrXs_Tsss ctrss ctr_defss
-            co_iterss co_iter_defss lthy
-          |> `(fn ((_, induct, _), (fold_thmss, rec_thmss, _)) =>
-            ([induct], fold_thmss, rec_thmss, [], [], [], []))
-          ||> (fn info => (SOME info, NONE))
-        else
-          derive_coinduct_coiters_thms_for_types pre_bnfs (the coiters_args_types) xtor_co_induct
-            dtor_injects dtor_ctors xtor_co_iter_thmss nesting_bnfs fpTs Cs kss mss ns ctr_defss
-            ctr_sugars co_iterss co_iter_defss (Proof_Context.export lthy no_defs_lthy) lthy
-          |> `(fn ((coinduct_thms_pairs, _), (unfold_thmss, corec_thmss, _),
-                  (disc_unfold_thmss, disc_corec_thmss, _), _,
-                  (sel_unfold_thmsss, sel_corec_thmsss, _)) =>
-            (map snd coinduct_thms_pairs, unfold_thmss, corec_thmss, disc_unfold_thmss,
-             disc_corec_thmss, sel_unfold_thmsss, sel_corec_thmsss))
-          ||> (fn info => (NONE, SOME info));
+        val ((co_inducts, un_fold_thmss, co_rec_thmss, disc_unfold_thmss, disc_corec_thmss,
+              sel_unfold_thmsss, sel_corec_thmsss), fp_sugar_thms) =
+          if fp = Least_FP then
+            derive_induct_iters_thms_for_types pre_bnfs (the iters_args_types) xtor_co_induct
+              xtor_co_iter_thmss nesting_bnfs nested_bnfs fpTs Cs Xs ctrXs_Tsss ctrss ctr_defss
+              co_iterss co_iter_defss lthy
+            |> `(fn ((_, induct, _), (fold_thmss, rec_thmss, _)) =>
+              ([induct], fold_thmss, rec_thmss, [], [], [], []))
+            ||> (fn info => (SOME info, NONE))
+          else
+            derive_coinduct_coiters_thms_for_types pre_bnfs (the coiters_args_types) xtor_co_induct
+              dtor_injects dtor_ctors xtor_co_iter_thmss nesting_bnfs fpTs Cs Xs ctrXs_Tsss kss mss
+              ns ctr_defss ctr_sugars co_iterss co_iter_defss
+              (Proof_Context.export lthy no_defs_lthy) lthy
+            |> `(fn ((coinduct_thms_pairs, _), (unfold_thmss, corec_thmss, _),
+                    (disc_unfold_thmss, disc_corec_thmss, _), _,
+                    (sel_unfold_thmsss, sel_corec_thmsss, _)) =>
+              (map snd coinduct_thms_pairs, unfold_thmss, corec_thmss, disc_unfold_thmss,
+               disc_corec_thmss, sel_unfold_thmsss, sel_corec_thmsss))
+            ||> (fn info => (NONE, SOME info));
 
-      val phi = Proof_Context.export_morphism no_defs_lthy no_defs_lthy0;
+        val phi = Proof_Context.export_morphism no_defs_lthy no_defs_lthy0;
 
-      fun mk_target_fp_sugar (kk, T) =
-        {T = T, fp = fp, index = kk, pre_bnfs = pre_bnfs, nested_bnfs = nested_bnfs,
-         nesting_bnfs = nesting_bnfs, fp_res = fp_res, ctr_defss = ctr_defss,
-         ctr_sugars = ctr_sugars, co_iterss = co_iterss, mapss = mapss, co_inducts = co_inducts,
-         co_iter_thmsss = transpose [un_fold_thmss, co_rec_thmss],
-         disc_co_itersss = transpose [disc_unfold_thmss, disc_corec_thmss],
-         sel_co_iterssss = transpose [sel_unfold_thmsss, sel_corec_thmsss]}
-        |> morph_fp_sugar phi;
-    in
-      ((map_index mk_target_fp_sugar fpTs, fp_sugar_thms), lthy)
-    end
-  else
-    (* TODO: reorder hypotheses and predicates in (co)induction rules? *)
-    ((fp_sugars0, (NONE, NONE)), no_defs_lthy0);
+        fun mk_target_fp_sugar (kk, T) =
+          {T = T, fp = fp, index = kk, pre_bnfs = pre_bnfs, nested_bnfs = nested_bnfs,
+           nesting_bnfs = nesting_bnfs, fp_res = fp_res, ctr_defss = ctr_defss,
+           ctr_sugars = ctr_sugars, co_iterss = co_iterss, mapss = mapss, co_inducts = co_inducts,
+           co_iter_thmsss = transpose [un_fold_thmss, co_rec_thmss],
+           disc_co_itersss = transpose [disc_unfold_thmss, disc_corec_thmss],
+           sel_co_iterssss = transpose [sel_unfold_thmsss, sel_corec_thmsss]}
+          |> morph_fp_sugar phi;
+
+        val n2m_sugar = (map_index mk_target_fp_sugar fpTs, fp_sugar_thms);
+      in
+        (n2m_sugar, lthy |> register_n2m_sugar key n2m_sugar)
+      end)
+  end;
 
 fun indexify_callsss fp_sugar callsss =
   let
     val {ctrs, ...} = of_fp_sugar #ctr_sugars fp_sugar;
-    fun do_ctr ctr =
+    fun indexify_ctr ctr =
       (case AList.lookup Term.aconv_untyped callsss ctr of
         NONE => replicate (num_binder_types (fastype_of ctr)) []
-      | SOME callss => map (map Envir.beta_eta_contract) callss);
+      | SOME callss => map (map (Envir.beta_eta_contract o unfold_let)) callss);
   in
-    map do_ctr ctrs
+    map indexify_ctr ctrs
   end;
 
-fun pad_and_indexify_calls fp_sugars0 = map2 indexify_callsss fp_sugars0 oo pad_list [];
+fun retypargs tyargs (Type (s, _)) = Type (s, tyargs);
+
+fun fold_subtype_pairs f (T as Type (s, Ts), U as Type (s', Us)) =
+    f (T, U) #> (if s = s' then fold (fold_subtype_pairs f) (Ts ~~ Us) else I)
+  | fold_subtype_pairs f TU = f TU;
 
 fun nested_to_mutual_fps fp actual_bs actual_Ts get_indices actual_callssss0 lthy =
   let
     val qsoty = quote o Syntax.string_of_typ lthy;
     val qsotys = space_implode " or " o map qsoty;
 
+    fun duplicate_datatype T = error (qsoty T ^ " is not mutually recursive with itself");
     fun not_co_datatype0 T = error (qsoty T ^ " is not a " ^ co_prefix fp ^ "datatype");
     fun not_co_datatype (T as Type (s, _)) =
         if fp = Least_FP andalso
@@ -208,32 +284,80 @@
           not_co_datatype0 T
       | not_co_datatype T = not_co_datatype0 T;
     fun not_mutually_nested_rec Ts1 Ts2 =
-      error (qsotys Ts1 ^ " is neither mutually recursive with nor nested recursive via " ^
-        qsotys Ts2);
+      error (qsotys Ts1 ^ " is neither mutually recursive with " ^ qsotys Ts2 ^
+        " nor nested recursive via " ^ qsotys Ts2);
+
+    val _ = (case Library.duplicates (op =) actual_Ts of [] => () | T :: _ => duplicate_datatype T);
 
-    val perm_actual_Ts as Type (_, ty_args0) :: _ =
-      sort (int_ord o pairself Term.size_of_typ) actual_Ts;
+    val perm_actual_Ts =
+      sort (prod_ord int_ord Term_Ord.typ_ord o pairself (`Term.size_of_typ)) actual_Ts;
+
+    fun the_ctrs_of (Type (s, Ts)) = map (mk_ctr Ts) (#ctrs (the (ctr_sugar_of lthy s)));
+
+    fun the_fp_sugar_of (T as Type (T_name, _)) =
+      (case fp_sugar_of lthy T_name of
+        SOME (fp_sugar as {fp = fp', ...}) => if fp = fp' then fp_sugar else not_co_datatype T
+      | NONE => not_co_datatype T);
 
-    fun check_enrich_with_mutuals _ [] = []
-      | check_enrich_with_mutuals seen ((T as Type (T_name, ty_args)) :: Ts) =
-        (case fp_sugar_of lthy T_name of
-          SOME ({fp = fp', fp_res = {Ts = Ts', ...}, ...}) =>
-          if fp = fp' then
+    fun gen_rhss_in gen_Ts rho subTs =
+      let
+        fun maybe_insert (T, Type (_, gen_tyargs)) =
+            if member (op =) subTs T then insert (op =) gen_tyargs else I
+          | maybe_insert _ = I;
+
+        val ctrs = maps the_ctrs_of gen_Ts;
+        val gen_ctr_Ts = maps (binder_types o fastype_of) ctrs;
+        val ctr_Ts = map (Term.typ_subst_atomic rho) gen_ctr_Ts;
+      in
+        fold (fold_subtype_pairs maybe_insert) (ctr_Ts ~~ gen_ctr_Ts) []
+      end;
+
+    fun gather_types _ _ num_groups seen gen_seen [] = (num_groups, seen, gen_seen)
+      | gather_types lthy rho num_groups seen gen_seen ((T as Type (_, tyargs)) :: Ts) =
+        let
+          val {fp_res = {Ts = mutual_Ts0, ...}, ...} = the_fp_sugar_of T;
+          val mutual_Ts = map (retypargs tyargs) mutual_Ts0;
+
+          val _ = seen = [] orelse exists (exists_subtype_in seen) mutual_Ts orelse
+            not_mutually_nested_rec mutual_Ts seen;
+
+          fun fresh_tyargs () =
             let
-              val mutual_Ts = map (fn Type (s, _) => Type (s, ty_args)) Ts';
-              val _ =
-                seen = [] orelse exists (exists_subtype_in seen) mutual_Ts orelse
-                not_mutually_nested_rec mutual_Ts seen;
-              val (seen', Ts') = List.partition (member (op =) mutual_Ts) Ts;
+              (* The name "'z" is unlikely to clash with the context, yielding more cache hits. *)
+              val (gen_tyargs, lthy') =
+                variant_tfrees (replicate (length tyargs) "z") lthy
+                |>> map Logic.varifyT_global;
+              val rho' = (gen_tyargs ~~ tyargs) @ rho;
             in
-              mutual_Ts @ check_enrich_with_mutuals (seen @ T :: seen') Ts'
-            end
-          else
-            not_co_datatype T
-        | NONE => not_co_datatype T)
-      | check_enrich_with_mutuals _ (T :: _) = not_co_datatype T;
+              (rho', gen_tyargs, gen_seen, lthy')
+            end;
 
-    val perm_Ts = check_enrich_with_mutuals [] perm_actual_Ts;
+          val (rho', gen_tyargs, gen_seen', lthy') =
+            if exists (exists_subtype_in seen) mutual_Ts then
+              (case gen_rhss_in gen_seen rho mutual_Ts of
+                [] => fresh_tyargs ()
+              | gen_tyargss as gen_tyargs :: gen_tyargss_tl =>
+                let
+                  val unify_pairs = split_list (maps (curry (op ~~) gen_tyargs) gen_tyargss_tl);
+                  val mgu = Type.raw_unifys unify_pairs Vartab.empty;
+                  val gen_tyargs' = map (Envir.subst_type mgu) gen_tyargs;
+                  val gen_seen' = map (Envir.subst_type mgu) gen_seen;
+                in
+                  (rho, gen_tyargs', gen_seen', lthy)
+                end)
+            else
+              fresh_tyargs ();
+
+          val gen_mutual_Ts = map (retypargs gen_tyargs) mutual_Ts0;
+          val Ts' = filter_out (member (op =) mutual_Ts) Ts;
+        in
+          gather_types lthy' rho' (num_groups + 1) (seen @ mutual_Ts) (gen_seen' @ gen_mutual_Ts)
+            Ts'
+        end
+      | gather_types _ _ _ _ _ (T :: _) = not_co_datatype T;
+
+    val (num_groups, perm_Ts, perm_gen_Ts) = gather_types lthy [] 0 [] [] perm_actual_Ts;
+    val perm_frozen_gen_Ts = map Logic.unvarifyT_global perm_gen_Ts;
 
     val missing_Ts = perm_Ts |> subtract (op =) actual_Ts;
     val Ts = actual_Ts @ missing_Ts;
@@ -241,6 +365,8 @@
     val nn = length Ts;
     val kks = 0 upto nn - 1;
 
+    val callssss0 = pad_list [] nn actual_callssss0;
+
     val common_name = mk_common_name (map Binding.name_of actual_bs);
     val bs = pad_list (Binding.name common_name) nn actual_bs;
 
@@ -249,16 +375,19 @@
 
     val perm_bs = permute bs;
     val perm_kks = permute kks;
+    val perm_callssss0 = permute callssss0;
     val perm_fp_sugars0 = map (the o fp_sugar_of lthy o fst o dest_Type) perm_Ts;
 
-    val mutualize = exists (fn Type (_, ty_args) => ty_args <> ty_args0) Ts;
-    val perm_callssss = pad_and_indexify_calls perm_fp_sugars0 nn actual_callssss0;
+    val perm_callssss = map2 indexify_callsss perm_fp_sugars0 perm_callssss0;
 
     val get_perm_indices = map (fn kk => find_index (curry (op =) kk) perm_kks) o get_indices;
 
     val ((perm_fp_sugars, fp_sugar_thms), lthy) =
-      mutualize_fp_sugars mutualize fp perm_bs perm_Ts get_perm_indices perm_callssss
-        perm_fp_sugars0 lthy;
+      if num_groups > 1 then
+        mutualize_fp_sugars fp perm_bs perm_frozen_gen_Ts get_perm_indices perm_callssss
+          perm_fp_sugars0 lthy
+      else
+        ((perm_fp_sugars0, (NONE, NONE)), lthy);
 
     val fp_sugars = unpermute perm_fp_sugars;
   in
--- a/src/HOL/BNF/Tools/bnf_fp_rec_sugar.ML	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,986 +0,0 @@
-(*  Title:      HOL/BNF/Tools/bnf_fp_rec_sugar.ML
-    Author:     Lorenz Panny, TU Muenchen
-    Copyright   2013
-
-Recursor and corecursor sugar.
-*)
-
-signature BNF_FP_REC_SUGAR =
-sig
-  val add_primrec: (binding * typ option * mixfix) list ->
-    (Attrib.binding * term) list -> local_theory -> (term list * thm list list) * local_theory
-  val add_primrec_cmd: (binding * string option * mixfix) list ->
-    (Attrib.binding * string) list -> local_theory -> (term list * thm list list) * local_theory
-  val add_primrec_global: (binding * typ option * mixfix) list ->
-    (Attrib.binding * term) list -> theory -> (term list * thm list list) * theory
-  val add_primrec_overloaded: (string * (string * typ) * bool) list ->
-    (binding * typ option * mixfix) list ->
-    (Attrib.binding * term) list -> theory -> (term list * thm list list) * theory
-  val add_primrec_simple: ((binding * typ) * mixfix) list -> term list ->
-    local_theory -> (string list * (term list * (int list list * thm list list))) * local_theory
-  val add_primcorecursive_cmd: bool ->
-    (binding * string option * mixfix) list * ((Attrib.binding * string) * string option) list ->
-    Proof.context -> Proof.state
-  val add_primcorec_cmd: bool ->
-    (binding * string option * mixfix) list * ((Attrib.binding * string) * string option) list ->
-    local_theory -> local_theory
-end;
-
-structure BNF_FP_Rec_Sugar : BNF_FP_REC_SUGAR =
-struct
-
-open BNF_Util
-open BNF_FP_Util
-open BNF_FP_Rec_Sugar_Util
-open BNF_FP_Rec_Sugar_Tactics
-
-val codeN = "code"
-val ctrN = "ctr"
-val discN = "disc"
-val selN = "sel"
-
-val nitpick_attrs = @{attributes [nitpick_simp]};
-val simp_attrs = @{attributes [simp]};
-val code_nitpick_attrs = Code.add_default_eqn_attrib :: nitpick_attrs;
-val code_nitpick_simp_attrs = Code.add_default_eqn_attrib :: nitpick_attrs @ simp_attrs;
-
-exception Primrec_Error of string * term list;
-
-fun primrec_error str = raise Primrec_Error (str, []);
-fun primrec_error_eqn str eqn = raise Primrec_Error (str, [eqn]);
-fun primrec_error_eqns str eqns = raise Primrec_Error (str, eqns);
-
-fun finds eq = fold_map (fn x => List.partition (curry eq x) #>> pair x);
-
-val free_name = try (fn Free (v, _) => v);
-val const_name = try (fn Const (v, _) => v);
-val undef_const = Const (@{const_name undefined}, dummyT);
-
-fun permute_args n t = list_comb (t, map Bound (0 :: (n downto 1)))
-  |> fold (K (Term.abs (Name.uu, dummyT))) (0 upto n);
-val abs_tuple = HOLogic.tupled_lambda o HOLogic.mk_tuple;
-fun drop_All t = subst_bounds (strip_qnt_vars @{const_name all} t |> map Free |> rev,
-  strip_qnt_body @{const_name all} t)
-fun abstract vs =
-  let fun a n (t $ u) = a n t $ a n u
-        | a n (Abs (v, T, b)) = Abs (v, T, a (n + 1) b)
-        | a n t = let val idx = find_index (equal t) vs in
-            if idx < 0 then t else Bound (n + idx) end
-  in a 0 end;
-fun mk_prod1 Ts (t, u) = HOLogic.pair_const (fastype_of1 (Ts, t)) (fastype_of1 (Ts, u)) $ t $ u;
-fun mk_tuple1 Ts = the_default HOLogic.unit o try (foldr1 (mk_prod1 Ts));
-
-fun get_indices fixes t = map (fst #>> Binding.name_of #> Free) fixes
-  |> map_index (fn (i, v) => if exists_subterm (equal v) t then SOME i else NONE)
-  |> map_filter I;
-
-
-(* Primrec *)
-
-type eqn_data = {
-  fun_name: string,
-  rec_type: typ,
-  ctr: term,
-  ctr_args: term list,
-  left_args: term list,
-  right_args: term list,
-  res_type: typ,
-  rhs_term: term,
-  user_eqn: term
-};
-
-fun dissect_eqn lthy fun_names eqn' =
-  let
-    val eqn = drop_All eqn' |> HOLogic.dest_Trueprop
-      handle TERM _ =>
-        primrec_error_eqn "malformed function equation (expected \"lhs = rhs\")" eqn';
-    val (lhs, rhs) = HOLogic.dest_eq eqn
-        handle TERM _ =>
-          primrec_error_eqn "malformed function equation (expected \"lhs = rhs\")" eqn';
-    val (fun_name, args) = strip_comb lhs
-      |>> (fn x => if is_Free x then fst (dest_Free x)
-          else primrec_error_eqn "malformed function equation (does not start with free)" eqn);
-    val (left_args, rest) = take_prefix is_Free args;
-    val (nonfrees, right_args) = take_suffix is_Free rest;
-    val num_nonfrees = length nonfrees;
-    val _ = num_nonfrees = 1 orelse if num_nonfrees = 0 then
-      primrec_error_eqn "constructor pattern missing in left-hand side" eqn else
-      primrec_error_eqn "more than one non-variable argument in left-hand side" eqn;
-    val _ = member (op =) fun_names fun_name orelse
-      primrec_error_eqn "malformed function equation (does not start with function name)" eqn
-
-    val (ctr, ctr_args) = strip_comb (the_single nonfrees);
-    val _ = try (num_binder_types o fastype_of) ctr = SOME (length ctr_args) orelse
-      primrec_error_eqn "partially applied constructor in pattern" eqn;
-    val _ = let val d = duplicates (op =) (left_args @ ctr_args @ right_args) in null d orelse
-      primrec_error_eqn ("duplicate variable \"" ^ Syntax.string_of_term lthy (hd d) ^
-        "\" in left-hand side") eqn end;
-    val _ = forall is_Free ctr_args orelse
-      primrec_error_eqn "non-primitive pattern in left-hand side" eqn;
-    val _ =
-      let val b = fold_aterms (fn x as Free (v, _) =>
-        if (not (member (op =) (left_args @ ctr_args @ right_args) x) andalso
-        not (member (op =) fun_names v) andalso
-        not (Variable.is_fixed lthy v)) then cons x else I | _ => I) rhs []
-      in
-        null b orelse
-        primrec_error_eqn ("extra variable(s) in right-hand side: " ^
-          commas (map (Syntax.string_of_term lthy) b)) eqn
-      end;
-  in
-    {fun_name = fun_name,
-     rec_type = body_type (type_of ctr),
-     ctr = ctr,
-     ctr_args = ctr_args,
-     left_args = left_args,
-     right_args = right_args,
-     res_type = map fastype_of (left_args @ right_args) ---> fastype_of rhs,
-     rhs_term = rhs,
-     user_eqn = eqn'}
-  end;
-
-fun rewrite_map_arg get_ctr_pos rec_type res_type =
-  let
-    val pT = HOLogic.mk_prodT (rec_type, res_type);
-
-    val maybe_suc = Option.map (fn x => x + 1);
-    fun subst d (t as Bound d') = t |> d = SOME d' ? curry (op $) (fst_const pT)
-      | subst d (Abs (v, T, b)) = Abs (v, if d = SOME ~1 then pT else T, subst (maybe_suc d) b)
-      | subst d t =
-        let
-          val (u, vs) = strip_comb t;
-          val ctr_pos = try (get_ctr_pos o the) (free_name u) |> the_default ~1;
-        in
-          if ctr_pos >= 0 then
-            if d = SOME ~1 andalso length vs = ctr_pos then
-              list_comb (permute_args ctr_pos (snd_const pT), vs)
-            else if length vs > ctr_pos andalso is_some d
-                andalso d = try (fn Bound n => n) (nth vs ctr_pos) then
-              list_comb (snd_const pT $ nth vs ctr_pos, map (subst d) (nth_drop ctr_pos vs))
-            else
-              primrec_error_eqn ("recursive call not directly applied to constructor argument") t
-          else if d = SOME ~1 andalso const_name u = SOME @{const_name comp} then
-            list_comb (map_types (K dummyT) u, map2 subst [NONE, d] vs)
-          else
-            list_comb (u, map (subst (d |> d = SOME ~1 ? K NONE)) vs)
-        end
-  in
-    subst (SOME ~1)
-  end;
-
-fun subst_rec_calls lthy get_ctr_pos has_call ctr_args direct_calls indirect_calls t =
-  let
-    fun subst bound_Ts (Abs (v, T, b)) = Abs (v, T, subst (T :: bound_Ts) b)
-      | subst bound_Ts (t as g' $ y) =
-        let
-          val maybe_direct_y' = AList.lookup (op =) direct_calls y;
-          val maybe_indirect_y' = AList.lookup (op =) indirect_calls y;
-          val (g, g_args) = strip_comb g';
-          val ctr_pos = try (get_ctr_pos o the) (free_name g) |> the_default ~1;
-          val _ = ctr_pos < 0 orelse length g_args >= ctr_pos orelse
-            primrec_error_eqn "too few arguments in recursive call" t;
-        in
-          if not (member (op =) ctr_args y) then
-            pairself (subst bound_Ts) (g', y) |> (op $)
-          else if ctr_pos >= 0 then
-            list_comb (the maybe_direct_y', g_args)
-          else if is_some maybe_indirect_y' then
-            (if has_call g' then t else y)
-            |> massage_indirect_rec_call lthy has_call
-              (rewrite_map_arg get_ctr_pos) bound_Ts y (the maybe_indirect_y')
-            |> (if has_call g' then I else curry (op $) g')
-          else
-            t
-        end
-      | subst _ t = t
-  in
-    subst [] t
-    |> tap (fn u => has_call u andalso (* FIXME detect this case earlier *)
-      primrec_error_eqn "recursive call not directly applied to constructor argument" t)
-  end;
-
-fun build_rec_arg lthy (funs_data : eqn_data list list) has_call (ctr_spec : rec_ctr_spec)
-    (maybe_eqn_data : eqn_data option) =
-  if is_none maybe_eqn_data then undef_const else
-    let
-      val eqn_data = the maybe_eqn_data;
-      val t = #rhs_term eqn_data;
-      val ctr_args = #ctr_args eqn_data;
-
-      val calls = #calls ctr_spec;
-      val n_args = fold (curry (op +) o (fn Direct_Rec _ => 2 | _ => 1)) calls 0;
-
-      val no_calls' = tag_list 0 calls
-        |> map_filter (try (apsnd (fn No_Rec n => n | Direct_Rec (n, _) => n)));
-      val direct_calls' = tag_list 0 calls
-        |> map_filter (try (apsnd (fn Direct_Rec (_, n) => n)));
-      val indirect_calls' = tag_list 0 calls
-        |> map_filter (try (apsnd (fn Indirect_Rec n => n)));
-
-      fun make_direct_type _ = dummyT; (* FIXME? *)
-
-      val rec_res_type_list = map (fn (x :: _) => (#rec_type x, #res_type x)) funs_data;
-
-      fun make_indirect_type (Type (Tname, Ts)) = Type (Tname, Ts |> map (fn T =>
-        let val maybe_res_type = AList.lookup (op =) rec_res_type_list T in
-          if is_some maybe_res_type
-          then HOLogic.mk_prodT (T, the maybe_res_type)
-          else make_indirect_type T end))
-        | make_indirect_type T = T;
-
-      val args = replicate n_args ("", dummyT)
-        |> Term.rename_wrt_term t
-        |> map Free
-        |> fold (fn (ctr_arg_idx, arg_idx) =>
-            nth_map arg_idx (K (nth ctr_args ctr_arg_idx)))
-          no_calls'
-        |> fold (fn (ctr_arg_idx, arg_idx) =>
-            nth_map arg_idx (K (nth ctr_args ctr_arg_idx |> map_types make_direct_type)))
-          direct_calls'
-        |> fold (fn (ctr_arg_idx, arg_idx) =>
-            nth_map arg_idx (K (nth ctr_args ctr_arg_idx |> map_types make_indirect_type)))
-          indirect_calls';
-
-      val fun_name_ctr_pos_list =
-        map (fn (x :: _) => (#fun_name x, length (#left_args x))) funs_data;
-      val get_ctr_pos = try (the o AList.lookup (op =) fun_name_ctr_pos_list) #> the_default ~1;
-      val direct_calls = map (apfst (nth ctr_args) o apsnd (nth args)) direct_calls';
-      val indirect_calls = map (apfst (nth ctr_args) o apsnd (nth args)) indirect_calls';
-
-      val abstractions = args @ #left_args eqn_data @ #right_args eqn_data;
-    in
-      t
-      |> subst_rec_calls lthy get_ctr_pos has_call ctr_args direct_calls indirect_calls
-      |> fold_rev lambda abstractions
-    end;
-
-fun build_defs lthy bs mxs (funs_data : eqn_data list list) (rec_specs : rec_spec list) has_call =
-  let
-    val n_funs = length funs_data;
-
-    val ctr_spec_eqn_data_list' =
-      (take n_funs rec_specs |> map #ctr_specs) ~~ funs_data
-      |> maps (uncurry (finds (fn (x, y) => #ctr x = #ctr y))
-          ##> (fn x => null x orelse
-            primrec_error_eqns "excess equations in definition" (map #rhs_term x)) #> fst);
-    val _ = ctr_spec_eqn_data_list' |> map (fn (_, x) => length x <= 1 orelse
-      primrec_error_eqns ("multiple equations for constructor") (map #user_eqn x));
-
-    val ctr_spec_eqn_data_list =
-      ctr_spec_eqn_data_list' @ (drop n_funs rec_specs |> maps #ctr_specs |> map (rpair []));
-
-    val recs = take n_funs rec_specs |> map #recx;
-    val rec_args = ctr_spec_eqn_data_list
-      |> sort ((op <) o pairself (#offset o fst) |> make_ord)
-      |> map (uncurry (build_rec_arg lthy funs_data has_call) o apsnd (try the_single));
-    val ctr_poss = map (fn x =>
-      if length (distinct ((op =) o pairself (length o #left_args)) x) <> 1 then
-        primrec_error ("inconstant constructor pattern position for function " ^
-          quote (#fun_name (hd x)))
-      else
-        hd x |> #left_args |> length) funs_data;
-  in
-    (recs, ctr_poss)
-    |-> map2 (fn recx => fn ctr_pos => list_comb (recx, rec_args) |> permute_args ctr_pos)
-    |> Syntax.check_terms lthy
-    |> map3 (fn b => fn mx => fn t => ((b, mx), ((Binding.map_name Thm.def_name b, []), t))) bs mxs
-  end;
-
-fun find_rec_calls has_call (eqn_data : eqn_data) =
-  let
-    fun find (Abs (_, _, b)) ctr_arg = find b ctr_arg
-      | find (t as _ $ _) ctr_arg =
-        let
-          val (f', args') = strip_comb t;
-          val n = find_index (equal ctr_arg) args';
-        in
-          if n < 0 then
-            find f' ctr_arg @ maps (fn x => find x ctr_arg) args'
-          else
-            let val (f, args) = chop n args' |>> curry list_comb f' in
-              if has_call f then
-                f :: maps (fn x => find x ctr_arg) args
-              else
-                find f ctr_arg @ maps (fn x => find x ctr_arg) args
-            end
-        end
-      | find _ _ = [];
-  in
-    map (find (#rhs_term eqn_data)) (#ctr_args eqn_data)
-    |> (fn [] => NONE | callss => SOME (#ctr eqn_data, callss))
-  end;
-
-fun prepare_primrec fixes specs lthy =
-  let
-    val (bs, mxs) = map_split (apfst fst) fixes;
-    val fun_names = map Binding.name_of bs;
-    val eqns_data = map (dissect_eqn lthy fun_names) specs;
-    val funs_data = eqns_data
-      |> partition_eq ((op =) o pairself #fun_name)
-      |> finds (fn (x, y) => x = #fun_name (hd y)) fun_names |> fst
-      |> map (fn (x, y) => the_single y handle List.Empty =>
-          primrec_error ("missing equations for function " ^ quote x));
-
-    val has_call = exists_subterm (map (fst #>> Binding.name_of #> Free) fixes |> member (op =));
-    val arg_Ts = map (#rec_type o hd) funs_data;
-    val res_Ts = map (#res_type o hd) funs_data;
-    val callssss = funs_data
-      |> map (partition_eq ((op =) o pairself #ctr))
-      |> map (maps (map_filter (find_rec_calls has_call)));
-
-    val ((n2m, rec_specs, _, induct_thm, induct_thms), lthy') =
-      rec_specs_of bs arg_Ts res_Ts (get_indices fixes) callssss lthy;
-
-    val actual_nn = length funs_data;
-
-    val _ = let val ctrs = (maps (map #ctr o #ctr_specs) rec_specs) in
-      map (fn {ctr, user_eqn, ...} => member (op =) ctrs ctr orelse
-        primrec_error_eqn ("argument " ^ quote (Syntax.string_of_term lthy' ctr) ^
-          " is not a constructor in left-hand side") user_eqn) eqns_data end;
-
-    val defs = build_defs lthy' bs mxs funs_data rec_specs has_call;
-
-    fun prove lthy def_thms' ({ctr_specs, nested_map_idents, nested_map_comps, ...} : rec_spec)
-        (fun_data : eqn_data list) =
-      let
-        val def_thms = map (snd o snd) def_thms';
-        val simp_thmss = finds (fn (x, y) => #ctr x = #ctr y) fun_data ctr_specs
-          |> fst
-          |> map_filter (try (fn (x, [y]) =>
-            (#user_eqn x, length (#left_args x) + length (#right_args x), #rec_thm y)))
-          |> map (fn (user_eqn, num_extra_args, rec_thm) =>
-            mk_primrec_tac lthy num_extra_args nested_map_idents nested_map_comps def_thms rec_thm
-            |> K |> Goal.prove lthy [] [] user_eqn);
-        val poss = find_indices (fn (x, y) => #ctr x = #ctr y) fun_data eqns_data;
-      in
-        (poss, simp_thmss)
-      end;
-
-    val notes =
-      (if n2m then map2 (fn name => fn thm =>
-        (name, inductN, [thm], [])) fun_names (take actual_nn induct_thms) else [])
-      |> map (fn (prefix, thmN, thms, attrs) =>
-        ((Binding.qualify true prefix (Binding.name thmN), attrs), [(thms, [])]));
-
-    val common_name = mk_common_name fun_names;
-
-    val common_notes =
-      (if n2m then [(inductN, [induct_thm], [])] else [])
-      |> map (fn (thmN, thms, attrs) =>
-        ((Binding.qualify true common_name (Binding.name thmN), attrs), [(thms, [])]));
-  in
-    (((fun_names, defs),
-      fn lthy => fn defs =>
-        split_list (map2 (prove lthy defs) (take actual_nn rec_specs) funs_data)),
-      lthy' |> Local_Theory.notes (notes @ common_notes) |> snd)
-  end;
-
-(* primrec definition *)
-
-fun add_primrec_simple fixes ts lthy =
-  let
-    val (((names, defs), prove), lthy) = prepare_primrec fixes ts lthy
-      handle ERROR str => primrec_error str;
-  in
-    lthy
-    |> fold_map Local_Theory.define defs
-    |-> (fn defs => `(fn lthy => (names, (map fst defs, prove lthy defs))))
-  end
-  handle Primrec_Error (str, eqns) =>
-    if null eqns
-    then error ("primrec_new error:\n  " ^ str)
-    else error ("primrec_new error:\n  " ^ str ^ "\nin\n  " ^
-      space_implode "\n  " (map (quote o Syntax.string_of_term lthy) eqns));
-
-local
-
-fun gen_primrec prep_spec (raw_fixes : (binding * 'a option * mixfix) list) raw_spec lthy =
-  let
-    val d = duplicates (op =) (map (Binding.name_of o #1) raw_fixes)
-    val _ = null d orelse primrec_error ("duplicate function name(s): " ^ commas d);
-
-    val (fixes, specs) = fst (prep_spec raw_fixes raw_spec lthy);
-
-    val mk_notes =
-      flat ooo map3 (fn poss => fn prefix => fn thms =>
-        let
-          val (bs, attrss) = map_split (fst o nth specs) poss;
-          val notes =
-            map3 (fn b => fn attrs => fn thm =>
-              ((Binding.qualify false prefix b, code_nitpick_simp_attrs @ attrs), [([thm], [])]))
-            bs attrss thms;
-        in
-          ((Binding.qualify true prefix (Binding.name simpsN), []), [(thms, [])]) :: notes
-        end);
-  in
-    lthy
-    |> add_primrec_simple fixes (map snd specs)
-    |-> (fn (names, (ts, (posss, simpss))) =>
-      Spec_Rules.add Spec_Rules.Equational (ts, flat simpss)
-      #> Local_Theory.notes (mk_notes posss names simpss)
-      #>> pair ts o map snd)
-  end;
-
-in
-
-val add_primrec = gen_primrec Specification.check_spec;
-val add_primrec_cmd = gen_primrec Specification.read_spec;
-
-end;
-
-fun add_primrec_global fixes specs thy =
-  let
-    val lthy = Named_Target.theory_init thy;
-    val ((ts, simps), lthy') = add_primrec fixes specs lthy;
-    val simps' = burrow (Proof_Context.export lthy' lthy) simps;
-  in ((ts, simps'), Local_Theory.exit_global lthy') end;
-
-fun add_primrec_overloaded ops fixes specs thy =
-  let
-    val lthy = Overloading.overloading ops thy;
-    val ((ts, simps), lthy') = add_primrec fixes specs lthy;
-    val simps' = burrow (Proof_Context.export lthy' lthy) simps;
-  in ((ts, simps'), Local_Theory.exit_global lthy') end;
-
-
-
-(* Primcorec *)
-
-type co_eqn_data_disc = {
-  fun_name: string,
-  fun_T: typ,
-  fun_args: term list,
-  ctr: term,
-  ctr_no: int, (*###*)
-  disc: term,
-  prems: term list,
-  auto_gen: bool,
-  user_eqn: term
-};
-
-type co_eqn_data_sel = {
-  fun_name: string,
-  fun_T: typ,
-  fun_args: term list,
-  ctr: term,
-  sel: term,
-  rhs_term: term,
-  user_eqn: term
-};
-
-datatype co_eqn_data =
-  Disc of co_eqn_data_disc |
-  Sel of co_eqn_data_sel;
-
-fun co_dissect_eqn_disc sequential fun_names (corec_specs : corec_spec list) prems' concl
-    matchedsss =
-  let
-    fun find_subterm p = let (* FIXME \<exists>? *)
-      fun f (t as u $ v) = if p t then SOME t else merge_options (f u, f v)
-        | f t = if p t then SOME t else NONE
-      in f end;
-
-    val applied_fun = concl
-      |> find_subterm (member ((op =) o apsnd SOME) fun_names o try (fst o dest_Free o head_of))
-      |> the
-      handle Option.Option => primrec_error_eqn "malformed discriminator equation" concl;
-    val ((fun_name, fun_T), fun_args) = strip_comb applied_fun |>> dest_Free;
-    val {ctr_specs, ...} = the (AList.lookup (op =) (fun_names ~~ corec_specs) fun_name);
-
-    val discs = map #disc ctr_specs;
-    val ctrs = map #ctr ctr_specs;
-    val not_disc = head_of concl = @{term Not};
-    val _ = not_disc andalso length ctrs <> 2 andalso
-      primrec_error_eqn "\<not>ed discriminator for a type with \<noteq> 2 constructors" concl;
-    val disc = find_subterm (member (op =) discs o head_of) concl;
-    val eq_ctr0 = concl |> perhaps (try (HOLogic.dest_not)) |> try (HOLogic.dest_eq #> snd)
-        |> (fn SOME t => let val n = find_index (equal t) ctrs in
-          if n >= 0 then SOME n else NONE end | _ => NONE);
-    val _ = is_some disc orelse is_some eq_ctr0 orelse
-      primrec_error_eqn "no discriminator in equation" concl;
-    val ctr_no' =
-      if is_none disc then the eq_ctr0 else find_index (equal (head_of (the disc))) discs;
-    val ctr_no = if not_disc then 1 - ctr_no' else ctr_no';
-    val ctr = #ctr (nth ctr_specs ctr_no);
-
-    val catch_all = try (fst o dest_Free o the_single) prems' = SOME Name.uu_;
-    val matchedss = AList.lookup (op =) matchedsss fun_name |> the_default [];
-    val prems = map (abstract (List.rev fun_args)) prems';
-    val real_prems =
-      (if catch_all orelse sequential then maps negate_disj matchedss else []) @
-      (if catch_all then [] else prems);
-
-    val matchedsss' = AList.delete (op =) fun_name matchedsss
-      |> cons (fun_name, if sequential then matchedss @ [prems] else matchedss @ [real_prems]);
-
-    val user_eqn =
-      (real_prems, betapply (#disc (nth ctr_specs ctr_no), applied_fun))
-      |>> map HOLogic.mk_Trueprop ||> HOLogic.mk_Trueprop
-      |> Logic.list_implies;
-  in
-    (Disc {
-      fun_name = fun_name,
-      fun_T = fun_T,
-      fun_args = fun_args,
-      ctr = ctr,
-      ctr_no = ctr_no,
-      disc = #disc (nth ctr_specs ctr_no),
-      prems = real_prems,
-      auto_gen = catch_all,
-      user_eqn = user_eqn
-    }, matchedsss')
-  end;
-
-fun co_dissect_eqn_sel fun_names (corec_specs : corec_spec list) eqn' of_spec eqn =
-  let
-    val (lhs, rhs) = HOLogic.dest_eq eqn
-      handle TERM _ =>
-        primrec_error_eqn "malformed function equation (expected \"lhs = rhs\")" eqn;
-    val sel = head_of lhs;
-    val ((fun_name, fun_T), fun_args) = dest_comb lhs |> snd |> strip_comb |> apfst dest_Free
-      handle TERM _ =>
-        primrec_error_eqn "malformed selector argument in left-hand side" eqn;
-    val corec_spec = the (AList.lookup (op =) (fun_names ~~ corec_specs) fun_name)
-      handle Option.Option => primrec_error_eqn "malformed selector argument in left-hand side" eqn;
-    val ctr_spec =
-      if is_some of_spec
-      then the (find_first (equal (the of_spec) o #ctr) (#ctr_specs corec_spec))
-      else #ctr_specs corec_spec |> filter (exists (equal sel) o #sels) |> the_single
-        handle List.Empty => primrec_error_eqn "ambiguous selector - use \"of\"" eqn;
-    val user_eqn = drop_All eqn';
-  in
-    Sel {
-      fun_name = fun_name,
-      fun_T = fun_T,
-      fun_args = fun_args,
-      ctr = #ctr ctr_spec,
-      sel = sel,
-      rhs_term = rhs,
-      user_eqn = user_eqn
-    }
-  end;
-
-fun co_dissect_eqn_ctr sequential fun_names (corec_specs : corec_spec list) eqn' imp_prems imp_rhs
-    matchedsss =
-  let
-    val (lhs, rhs) = HOLogic.dest_eq imp_rhs;
-    val fun_name = head_of lhs |> fst o dest_Free;
-    val {ctr_specs, ...} = the (AList.lookup (op =) (fun_names ~~ corec_specs) fun_name);
-    val (ctr, ctr_args) = strip_comb rhs;
-    val {disc, sels, ...} = the (find_first (equal ctr o #ctr) ctr_specs)
-      handle Option.Option => primrec_error_eqn "not a constructor" ctr;
-
-    val disc_imp_rhs = betapply (disc, lhs);
-    val (maybe_eqn_data_disc, matchedsss') = if length ctr_specs = 1
-      then (NONE, matchedsss)
-      else apfst SOME (co_dissect_eqn_disc
-          sequential fun_names corec_specs imp_prems disc_imp_rhs matchedsss);
-
-    val sel_imp_rhss = (sels ~~ ctr_args)
-      |> map (fn (sel, ctr_arg) => HOLogic.mk_eq (betapply (sel, lhs), ctr_arg));
-
-(*
-val _ = tracing ("reduced\n    " ^ Syntax.string_of_term @{context} imp_rhs ^ "\nto\n    \<cdot> " ^
- (is_some maybe_eqn_data_disc ? K (Syntax.string_of_term @{context} disc_imp_rhs ^ "\n    \<cdot> ")) "" ^
- space_implode "\n    \<cdot> " (map (Syntax.string_of_term @{context}) sel_imp_rhss));
-*)
-
-    val eqns_data_sel =
-      map (co_dissect_eqn_sel fun_names corec_specs eqn' (SOME ctr)) sel_imp_rhss;
-  in
-    (the_list maybe_eqn_data_disc @ eqns_data_sel, matchedsss')
-  end;
-
-fun co_dissect_eqn sequential fun_names (corec_specs : corec_spec list) eqn' of_spec matchedsss =
-  let
-    val eqn = drop_All eqn'
-      handle TERM _ => primrec_error_eqn "malformed function equation" eqn';
-    val (imp_prems, imp_rhs) = Logic.strip_horn eqn
-      |> apfst (map HOLogic.dest_Trueprop) o apsnd HOLogic.dest_Trueprop;
-
-    val head = imp_rhs
-      |> perhaps (try HOLogic.dest_not) |> perhaps (try (fst o HOLogic.dest_eq))
-      |> head_of;
-
-    val maybe_rhs = imp_rhs |> perhaps (try (HOLogic.dest_not)) |> try (snd o HOLogic.dest_eq);
-
-    val discs = maps #ctr_specs corec_specs |> map #disc;
-    val sels = maps #ctr_specs corec_specs |> maps #sels;
-    val ctrs = maps #ctr_specs corec_specs |> map #ctr;
-  in
-    if member (op =) discs head orelse
-      is_some maybe_rhs andalso
-        member (op =) (filter (null o binder_types o fastype_of) ctrs) (the maybe_rhs) then
-      co_dissect_eqn_disc sequential fun_names corec_specs imp_prems imp_rhs matchedsss
-      |>> single
-    else if member (op =) sels head then
-      ([co_dissect_eqn_sel fun_names corec_specs eqn' of_spec imp_rhs], matchedsss)
-    else if is_Free head andalso member (op =) fun_names (fst (dest_Free head)) then
-      co_dissect_eqn_ctr sequential fun_names corec_specs eqn' imp_prems imp_rhs matchedsss
-    else
-      primrec_error_eqn "malformed function equation" eqn
-  end;
-
-fun build_corec_arg_disc (ctr_specs : corec_ctr_spec list)
-    ({fun_args, ctr_no, prems, ...} : co_eqn_data_disc) =
-  if is_none (#pred (nth ctr_specs ctr_no)) then I else
-    mk_conjs prems
-    |> curry subst_bounds (List.rev fun_args)
-    |> HOLogic.tupled_lambda (HOLogic.mk_tuple fun_args)
-    |> K |> nth_map (the (#pred (nth ctr_specs ctr_no)));
-
-fun build_corec_arg_no_call (sel_eqns : co_eqn_data_sel list) sel =
-  find_first (equal sel o #sel) sel_eqns
-  |> try (fn SOME {fun_args, rhs_term, ...} => abs_tuple fun_args rhs_term)
-  |> the_default undef_const
-  |> K;
-
-fun build_corec_args_direct_call lthy has_call (sel_eqns : co_eqn_data_sel list) sel =
-  let
-    val maybe_sel_eqn = find_first (equal sel o #sel) sel_eqns;
-  in
-    if is_none maybe_sel_eqn then (I, I, I) else
-    let
-      val {fun_args, rhs_term, ... } = the maybe_sel_eqn;
-      fun rewrite_q _ t = if has_call t then @{term False} else @{term True};
-      fun rewrite_g _ t = if has_call t then undef_const else t;
-      fun rewrite_h bound_Ts t =
-        if has_call t then mk_tuple1 bound_Ts (snd (strip_comb t)) else undef_const;
-      fun massage f t = massage_direct_corec_call lthy has_call f [] rhs_term |> abs_tuple fun_args;
-    in
-      (massage rewrite_q,
-       massage rewrite_g,
-       massage rewrite_h)
-    end
-  end;
-
-fun build_corec_arg_indirect_call lthy has_call (sel_eqns : co_eqn_data_sel list) sel =
-  let
-    val maybe_sel_eqn = find_first (equal sel o #sel) sel_eqns;
-  in
-    if is_none maybe_sel_eqn then I else
-    let
-      val {fun_args, rhs_term, ...} = the maybe_sel_eqn;
-      fun rewrite bound_Ts U T (Abs (v, V, b)) = Abs (v, V, rewrite (V :: bound_Ts) U T b)
-        | rewrite bound_Ts U T (t as _ $ _) =
-          let val (u, vs) = strip_comb t in
-            if is_Free u andalso has_call u then
-              Inr_const U T $ mk_tuple1 bound_Ts vs
-            else if try (fst o dest_Const) u = SOME @{const_name prod_case} then
-              map (rewrite bound_Ts U T) vs |> chop 1 |>> HOLogic.mk_split o the_single |> list_comb
-            else
-              list_comb (rewrite bound_Ts U T u, map (rewrite bound_Ts U T) vs)
-          end
-        | rewrite _ U T t =
-          if is_Free t andalso has_call t then Inr_const U T $ HOLogic.unit else t;
-      fun massage t =
-        massage_indirect_corec_call lthy has_call rewrite [] (range_type (fastype_of t)) rhs_term
-        |> abs_tuple fun_args;
-    in
-      massage
-    end
-  end;
-
-fun build_corec_args_sel lthy has_call (all_sel_eqns : co_eqn_data_sel list)
-    (ctr_spec : corec_ctr_spec) =
-  let val sel_eqns = filter (equal (#ctr ctr_spec) o #ctr) all_sel_eqns in
-    if null sel_eqns then I else
-      let
-        val sel_call_list = #sels ctr_spec ~~ #calls ctr_spec;
-
-        val no_calls' = map_filter (try (apsnd (fn No_Corec n => n))) sel_call_list;
-        val direct_calls' = map_filter (try (apsnd (fn Direct_Corec n => n))) sel_call_list;
-        val indirect_calls' = map_filter (try (apsnd (fn Indirect_Corec n => n))) sel_call_list;
-      in
-        I
-        #> fold (fn (sel, n) => nth_map n (build_corec_arg_no_call sel_eqns sel)) no_calls'
-        #> fold (fn (sel, (q, g, h)) =>
-          let val (fq, fg, fh) = build_corec_args_direct_call lthy has_call sel_eqns sel in
-            nth_map q fq o nth_map g fg o nth_map h fh end) direct_calls'
-        #> fold (fn (sel, n) => nth_map n
-          (build_corec_arg_indirect_call lthy has_call sel_eqns sel)) indirect_calls'
-      end
-  end;
-
-fun co_build_defs lthy bs mxs has_call arg_Tss (corec_specs : corec_spec list)
-    (disc_eqnss : co_eqn_data_disc list list) (sel_eqnss : co_eqn_data_sel list list) =
-  let
-    val corec_specs' = take (length bs) corec_specs;
-    val corecs = map #corec corec_specs';
-    val ctr_specss = map #ctr_specs corec_specs';
-    val corec_args = hd corecs
-      |> fst o split_last o binder_types o fastype_of
-      |> map (Const o pair @{const_name undefined})
-      |> fold2 (fold o build_corec_arg_disc) ctr_specss disc_eqnss
-      |> fold2 (fold o build_corec_args_sel lthy has_call) sel_eqnss ctr_specss;
-    fun currys [] t = t
-      | currys Ts t = t $ mk_tuple1 (List.rev Ts) (map Bound (length Ts - 1 downto 0))
-          |> fold_rev (Term.abs o pair Name.uu) Ts;
-
-(*
-val _ = tracing ("corecursor arguments:\n    \<cdot> " ^
- space_implode "\n    \<cdot> " (map (Syntax.string_of_term lthy) corec_args));
-*)
-
-    val exclss' =
-      disc_eqnss
-      |> map (map (fn x => (#fun_args x, #ctr_no x, #prems x, #auto_gen x))
-        #> fst o (fn xs => fold_map (fn x => fn ys => ((x, ys), ys @ [x])) xs [])
-        #> maps (uncurry (map o pair)
-          #> map (fn ((fun_args, c, x, a), (_, c', y, a')) =>
-              ((c, c', a orelse a'), (x, s_not (mk_conjs y)))
-            ||> apfst (map HOLogic.mk_Trueprop) o apsnd HOLogic.mk_Trueprop
-            ||> Logic.list_implies
-            ||> curry Logic.list_all (map dest_Free fun_args))))
-  in
-    map (list_comb o rpair corec_args) corecs
-    |> map2 (fn Ts => fn t => if length Ts = 0 then t $ HOLogic.unit else t) arg_Tss
-    |> map2 currys arg_Tss
-    |> Syntax.check_terms lthy
-    |> map3 (fn b => fn mx => fn t => ((b, mx), ((Binding.map_name Thm.def_name b, []), t))) bs mxs
-    |> rpair exclss'
-  end;
-
-fun mk_real_disc_eqns fun_binding arg_Ts ({ctr_specs, ...} : corec_spec)
-    (sel_eqns : co_eqn_data_sel list) (disc_eqns : co_eqn_data_disc list) =
-  if length disc_eqns <> length ctr_specs - 1 then disc_eqns else
-    let
-      val n = 0 upto length ctr_specs
-        |> the o find_first (fn idx => not (exists (equal idx o #ctr_no) disc_eqns));
-      val fun_args = (try (#fun_args o hd) disc_eqns, try (#fun_args o hd) sel_eqns)
-        |> the_default (map (curry Free Name.uu) arg_Ts) o merge_options;
-      val extra_disc_eqn = {
-        fun_name = Binding.name_of fun_binding,
-        fun_T = arg_Ts ---> body_type (fastype_of (#ctr (hd ctr_specs))),
-        fun_args = fun_args,
-        ctr = #ctr (nth ctr_specs n),
-        ctr_no = n,
-        disc = #disc (nth ctr_specs n),
-        prems = maps (negate_conj o #prems) disc_eqns,
-        auto_gen = true,
-        user_eqn = undef_const};
-    in
-      chop n disc_eqns ||> cons extra_disc_eqn |> (op @)
-    end;
-
-fun add_primcorec simple sequential fixes specs of_specs lthy =
-  let
-    val (bs, mxs) = map_split (apfst fst) fixes;
-    val (arg_Ts, res_Ts) = map (strip_type o snd o fst #>> HOLogic.mk_tupleT) fixes |> split_list;
-
-    val callssss = []; (* FIXME *)
-
-    val ((n2m, corec_specs', _, coinduct_thm, strong_coinduct_thm, coinduct_thms,
-          strong_coinduct_thms), lthy') =
-      corec_specs_of bs arg_Ts res_Ts (get_indices fixes) callssss lthy;
-
-    val actual_nn = length bs;
-    val fun_names = map Binding.name_of bs;
-    val corec_specs = take actual_nn corec_specs'; (*###*)
-
-    val eqns_data =
-      fold_map2 (co_dissect_eqn sequential fun_names corec_specs) (map snd specs) of_specs []
-      |> flat o fst;
-
-    val disc_eqnss' = map_filter (try (fn Disc x => x)) eqns_data
-      |> partition_eq ((op =) o pairself #fun_name)
-      |> fst o finds (fn (x, ({fun_name, ...} :: _)) => x = fun_name) fun_names
-      |> map (sort ((op <) o pairself #ctr_no |> make_ord) o flat o snd);
-    val _ = disc_eqnss' |> map (fn x =>
-      let val d = duplicates ((op =) o pairself #ctr_no) x in null d orelse
-        primrec_error_eqns "excess discriminator equations in definition"
-          (maps (fn t => filter (equal (#ctr_no t) o #ctr_no) x) d |> map #user_eqn) end);
-
-    val sel_eqnss = map_filter (try (fn Sel x => x)) eqns_data
-      |> partition_eq ((op =) o pairself #fun_name)
-      |> fst o finds (fn (x, ({fun_name, ...} :: _)) => x = fun_name) fun_names
-      |> map (flat o snd);
-
-    val has_call = exists_subterm (map (fst #>> Binding.name_of #> Free) fixes |> member (op =));
-    val arg_Tss = map (binder_types o snd o fst) fixes;
-    val disc_eqnss = map5 mk_real_disc_eqns bs arg_Tss corec_specs sel_eqnss disc_eqnss';
-    val (defs, exclss') =
-      co_build_defs lthy' bs mxs has_call arg_Tss corec_specs disc_eqnss sel_eqnss;
-
-    fun excl_tac (c, c', a) =
-      if a orelse c = c' orelse sequential then
-        SOME (K (HEADGOAL (mk_primcorec_assumption_tac lthy [])))
-      else if simple then
-        SOME (K (auto_tac lthy))
-      else
-        NONE;
-
-(*
-val _ = tracing ("exclusiveness properties:\n    \<cdot> " ^
- space_implode "\n    \<cdot> " (maps (map (Syntax.string_of_term lthy o snd)) exclss'));
-*)
-
-    val exclss'' = exclss' |> map (map (fn (idx, t) =>
-      (idx, (Option.map (Goal.prove lthy [] [] t) (excl_tac idx), t))));
-    val taut_thmss = map (map (apsnd (the o fst)) o filter (is_some o fst o snd)) exclss'';
-    val (obligation_idxss, obligationss) = exclss''
-      |> map (map (apsnd (rpair [] o snd)) o filter (is_none o fst o snd))
-      |> split_list o map split_list;
-
-    fun prove thmss' def_thms' lthy =
-      let
-        val def_thms = map (snd o snd) def_thms';
-
-        val exclss' = map (op ~~) (obligation_idxss ~~ thmss');
-        fun mk_exclsss excls n =
-          (excls, map (fn k => replicate k [TrueI] @ replicate (n - k) []) (0 upto n - 1))
-          |-> fold (fn ((c, c', _), thm) => nth_map c (nth_map c' (K [thm])));
-        val exclssss = (exclss' ~~ taut_thmss |> map (op @), fun_names ~~ corec_specs)
-          |-> map2 (fn excls => fn (_, {ctr_specs, ...}) => mk_exclsss excls (length ctr_specs));
-
-        fun prove_disc ({ctr_specs, ...} : corec_spec) exclsss
-            ({fun_name, fun_T, fun_args, ctr_no, prems, ...} : co_eqn_data_disc) =
-          if Term.aconv_untyped (#disc (nth ctr_specs ctr_no), @{term "\<lambda>x. x = x"}) then [] else
-            let
-              val {disc_corec, ...} = nth ctr_specs ctr_no;
-              val k = 1 + ctr_no;
-              val m = length prems;
-              val t =
-                list_comb (Free (fun_name, fun_T), map Bound (length fun_args - 1 downto 0))
-                |> curry betapply (#disc (nth ctr_specs ctr_no)) (*###*)
-                |> HOLogic.mk_Trueprop
-                |> curry Logic.list_implies (map HOLogic.mk_Trueprop prems)
-                |> curry Logic.list_all (map dest_Free fun_args);
-            in
-              mk_primcorec_disc_tac lthy def_thms disc_corec k m exclsss
-              |> K |> Goal.prove lthy [] [] t
-              |> pair (#disc (nth ctr_specs ctr_no))
-              |> single
-            end;
-
-        fun prove_sel ({nested_maps, nested_map_idents, nested_map_comps, ctr_specs, ...}
-            : corec_spec) (disc_eqns : co_eqn_data_disc list) exclsss
-            ({fun_name, fun_T, fun_args, ctr, sel, rhs_term, ...} : co_eqn_data_sel) =
-          let
-            val SOME ctr_spec = find_first (equal ctr o #ctr) ctr_specs;
-            val ctr_no = find_index (equal ctr o #ctr) ctr_specs;
-            val prems = the_default (maps (negate_conj o #prems) disc_eqns)
-                (find_first (equal ctr_no o #ctr_no) disc_eqns |> Option.map #prems);
-            val sel_corec = find_index (equal sel) (#sels ctr_spec)
-              |> nth (#sel_corecs ctr_spec);
-            val k = 1 + ctr_no;
-            val m = length prems;
-            val t =
-              list_comb (Free (fun_name, fun_T), map Bound (length fun_args - 1 downto 0))
-              |> curry betapply sel
-              |> rpair (abstract (List.rev fun_args) rhs_term)
-              |> HOLogic.mk_Trueprop o HOLogic.mk_eq
-              |> curry Logic.list_implies (map HOLogic.mk_Trueprop prems)
-              |> curry Logic.list_all (map dest_Free fun_args);
-            val (distincts, _, sel_splits, sel_split_asms) = case_thms_of_term lthy [] rhs_term;
-          in
-            mk_primcorec_sel_tac lthy def_thms distincts sel_splits sel_split_asms nested_maps
-              nested_map_idents nested_map_comps sel_corec k m exclsss
-            |> K |> Goal.prove lthy [] [] t
-            |> pair sel
-          end;
-
-        fun prove_ctr disc_alist sel_alist (disc_eqns : co_eqn_data_disc list)
-            (sel_eqns : co_eqn_data_sel list) ({ctr, disc, sels, collapse, ...} : corec_ctr_spec) =
-          if not (exists (equal ctr o #ctr) disc_eqns)
-              andalso not (exists (equal ctr o #ctr) sel_eqns)
-            orelse (* don't try to prove theorems when some sel_eqns are missing *)
-              filter (equal ctr o #ctr) sel_eqns
-              |> fst o finds ((op =) o apsnd #sel) sels
-              |> exists (null o snd)
-          then [] else
-            let
-              val (fun_name, fun_T, fun_args, prems) =
-                (find_first (equal ctr o #ctr) disc_eqns, find_first (equal ctr o #ctr) sel_eqns)
-                |>> Option.map (fn x => (#fun_name x, #fun_T x, #fun_args x, #prems x))
-                ||> Option.map (fn x => (#fun_name x, #fun_T x, #fun_args x, []))
-                |> the o merge_options;
-              val m = length prems;
-              val t = filter (equal ctr o #ctr) sel_eqns
-                |> fst o finds ((op =) o apsnd #sel) sels
-                |> map (snd #> (fn [x] => (List.rev (#fun_args x), #rhs_term x)) #-> abstract)
-                |> curry list_comb ctr
-                |> curry HOLogic.mk_eq (list_comb (Free (fun_name, fun_T),
-                  map Bound (length fun_args - 1 downto 0)))
-                |> HOLogic.mk_Trueprop
-                |> curry Logic.list_implies (map HOLogic.mk_Trueprop prems)
-                |> curry Logic.list_all (map dest_Free fun_args);
-              val maybe_disc_thm = AList.lookup (op =) disc_alist disc;
-              val sel_thms = map snd (filter (member (op =) sels o fst) sel_alist);
-            in
-              mk_primcorec_ctr_of_dtr_tac lthy m collapse maybe_disc_thm sel_thms
-              |> K |> Goal.prove lthy [] [] t
-              |> single
-            end;
-
-        val disc_alists = map3 (maps oo prove_disc) corec_specs exclssss disc_eqnss;
-        val sel_alists = map4 (map ooo prove_sel) corec_specs disc_eqnss exclssss sel_eqnss;
-
-        val disc_thmss = map (map snd) disc_alists;
-        val sel_thmss = map (map snd) sel_alists;
-        val ctr_thmss = map5 (maps oooo prove_ctr) disc_alists sel_alists disc_eqnss sel_eqnss
-          (map #ctr_specs corec_specs);
-
-        val simp_thmss = map2 append disc_thmss sel_thmss
-
-        val common_name = mk_common_name fun_names;
-
-        val notes =
-          [(coinductN, map (if n2m then single else K []) coinduct_thms, []),
-           (codeN, ctr_thmss(*FIXME*), code_nitpick_attrs),
-           (ctrN, ctr_thmss, []),
-           (discN, disc_thmss, simp_attrs),
-           (selN, sel_thmss, simp_attrs),
-           (simpsN, simp_thmss, []),
-           (strong_coinductN, map (if n2m then single else K []) strong_coinduct_thms, [])]
-          |> maps (fn (thmN, thmss, attrs) =>
-            map2 (fn fun_name => fn thms =>
-                ((Binding.qualify true fun_name (Binding.name thmN), attrs), [(thms, [])]))
-              fun_names (take actual_nn thmss))
-          |> filter_out (null o fst o hd o snd);
-
-        val common_notes =
-          [(coinductN, if n2m then [coinduct_thm] else [], []),
-           (strong_coinductN, if n2m then [strong_coinduct_thm] else [], [])]
-          |> filter_out (null o #2)
-          |> map (fn (thmN, thms, attrs) =>
-            ((Binding.qualify true common_name (Binding.name thmN), attrs), [(thms, [])]));
-      in
-        lthy |> Local_Theory.notes (notes @ common_notes) |> snd
-      end;
-
-    fun after_qed thmss' = fold_map Local_Theory.define defs #-> prove thmss';
-
-    val _ = if not simple orelse forall null obligationss then () else
-      primrec_error "need exclusiveness proofs - use primcorecursive instead of primcorec";
-  in
-    if simple then
-      lthy'
-      |> after_qed (map (fn [] => []) obligationss)
-      |> pair NONE o SOME
-    else
-      lthy'
-      |> Proof.theorem NONE after_qed obligationss
-      |> Proof.refine (Method.primitive_text I)
-      |> Seq.hd
-      |> rpair NONE o SOME
-  end;
-
-fun add_primcorec_ursive_cmd simple seq (raw_fixes, raw_specs') lthy =
-  let
-    val (raw_specs, of_specs) = split_list raw_specs' ||> map (Option.map (Syntax.read_term lthy));
-    val ((fixes, specs), _) = Specification.read_spec raw_fixes raw_specs lthy;
-  in
-    add_primcorec simple seq fixes specs of_specs lthy
-    handle ERROR str => primrec_error str
-  end
-  handle Primrec_Error (str, eqns) =>
-    if null eqns
-    then error ("primcorec error:\n  " ^ str)
-    else error ("primcorec error:\n  " ^ str ^ "\nin\n  " ^
-      space_implode "\n  " (map (quote o Syntax.string_of_term lthy) eqns));
-
-val add_primcorecursive_cmd = (the o fst) ooo add_primcorec_ursive_cmd false;
-val add_primcorec_cmd = (the o snd) ooo add_primcorec_ursive_cmd true;
-
-end;
--- a/src/HOL/BNF/Tools/bnf_fp_rec_sugar_tactics.ML	Thu Dec 05 17:52:12 2013 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,116 +0,0 @@
-(*  Title:      HOL/BNF/Tools/bnf_fp_rec_sugar_tactics.ML
-    Author:     Jasmin Blanchette, TU Muenchen
-    Copyright   2013
-
-Tactics for recursor and corecursor sugar.
-*)
-
-signature BNF_FP_REC_SUGAR_TACTICS =
-sig
-  val mk_primcorec_assumption_tac: Proof.context -> thm list -> int -> tactic
-  val mk_primcorec_code_of_raw_code_tac: thm list -> thm -> tactic
-  val mk_primcorec_ctr_of_dtr_tac: Proof.context -> int -> thm -> thm option -> thm list -> tactic
-  val mk_primcorec_disc_tac: Proof.context -> thm list -> thm -> int -> int -> thm list list list ->
-    tactic
-  val mk_primcorec_raw_code_of_ctr_tac: Proof.context -> thm list -> thm list -> thm list ->
-    thm list -> int list -> thm list -> tactic
-  val mk_primcorec_sel_tac: Proof.context -> thm list -> thm list -> thm list -> thm list ->
-    thm list -> thm list -> thm list -> thm -> int -> int -> thm list list list -> tactic
-  val mk_primrec_tac: Proof.context -> int -> thm list -> thm list -> thm list -> thm -> tactic
-end;
-
-structure BNF_FP_Rec_Sugar_Tactics : BNF_FP_REC_SUGAR_TACTICS =
-struct
-
-open BNF_Util
-open BNF_Tactics
-
-val falseEs = @{thms not_TrueE FalseE};
-val neq_eq_eq_contradict = @{thm neq_eq_eq_contradict};
-val split_if = @{thm split_if};
-val split_if_asm = @{thm split_if_asm};
-val split_connectI = @{thms allI impI conjI};
-
-fun mk_primrec_tac ctxt num_extra_args map_idents map_comps fun_defs recx =
-  unfold_thms_tac ctxt fun_defs THEN
-  HEADGOAL (rtac (funpow num_extra_args (fn thm => thm RS fun_cong) recx RS trans)) THEN
-  unfold_thms_tac ctxt (@{thms id_def split o_def fst_conv snd_conv} @ map_comps @ map_idents) THEN
-  HEADGOAL (rtac refl);
-
-fun mk_primcorec_assumption_tac ctxt discIs =
-  SELECT_GOAL (unfold_thms_tac ctxt
-      @{thms not_not not_False_eq_True de_Morgan_conj de_Morgan_disj} THEN
-    SOLVE (HEADGOAL (REPEAT o (rtac refl ORELSE' atac ORELSE' etac conjE ORELSE'
-    resolve_tac @{thms TrueI conjI disjI1 disjI2} ORELSE'
-    dresolve_tac discIs THEN' atac ORELSE'
-    etac notE THEN' atac ORELSE'
-    etac disjE))));
-
-fun mk_primcorec_same_case_tac m =
-  HEADGOAL (if m = 0 then rtac TrueI
-    else REPEAT_DETERM_N (m - 1) o (rtac conjI THEN' atac) THEN' atac);
-
-fun mk_primcorec_different_case_tac ctxt excl =
-  unfold_thms_tac ctxt @{thms not_not not_False_eq_True not_True_eq_False} THEN
-  HEADGOAL (rtac excl THEN_ALL_NEW mk_primcorec_assumption_tac ctxt []);
-
-fun mk_primcorec_cases_tac ctxt k m exclsss =
-  let val n = length exclsss in
-    EVERY (map (fn [] => if k = n then all_tac else mk_primcorec_same_case_tac m
-        | [excl] => mk_primcorec_different_case_tac ctxt excl)
-      (take k (nth exclsss (k - 1))))
-  end;
-
-fun mk_primcorec_prelude ctxt defs thm =
-  unfold_thms_tac ctxt defs THEN HEADGOAL (rtac thm) THEN
-  unfold_thms_tac ctxt @{thms Let_def split};
-
-fun mk_primcorec_disc_tac ctxt defs disc_corec k m exclsss =
-  mk_primcorec_prelude ctxt defs disc_corec THEN mk_primcorec_cases_tac ctxt k m exclsss;
-
-fun mk_primcorec_sel_tac ctxt defs distincts splits split_asms maps map_idents map_comps f_sel k m
-    exclsss =
-  mk_primcorec_prelude ctxt defs (f_sel RS trans) THEN
-  mk_primcorec_cases_tac ctxt k m exclsss THEN
-  HEADGOAL (REPEAT_DETERM o (rtac refl ORELSE' rtac ext ORELSE'
-    eresolve_tac falseEs ORELSE'
-    resolve_tac split_connectI ORELSE'
-    Splitter.split_asm_tac (split_if_asm :: split_asms) ORELSE'
-    Splitter.split_tac (split_if :: splits) ORELSE'
-    eresolve_tac (map (fn thm => thm RS neq_eq_eq_contradict) distincts) THEN' atac ORELSE'
-    etac notE THEN' atac ORELSE'
-    (CHANGED o SELECT_GOAL (unfold_thms_tac ctxt
-      (@{thms id_apply o_def split_def sum.cases} @ maps @ map_comps @ map_idents)))));
-
-fun mk_primcorec_ctr_of_dtr_tac ctxt m collapse maybe_disc_f sel_fs =
-  HEADGOAL (rtac ((if null sel_fs then collapse else collapse RS sym) RS trans) THEN'
-    (the_default (K all_tac) (Option.map rtac maybe_disc_f)) THEN' REPEAT_DETERM_N m o atac) THEN
-  unfold_thms_tac ctxt sel_fs THEN HEADGOAL (rtac refl);
-
-(* TODO: reduce code duplication with selector tactic above *)
-fun mk_primcorec_raw_code_of_ctr_single_tac ctxt distincts discIs splits split_asms m f_ctr =
-  HEADGOAL (REPEAT o (resolve_tac split_connectI ORELSE' split_tac (split_if :: splits))) THEN
-  mk_primcorec_prelude ctxt [] (f_ctr RS trans) THEN
-  HEADGOAL ((REPEAT_DETERM_N m o mk_primcorec_assumption_tac ctxt discIs) THEN'
-    SELECT_GOAL (SOLVE (HEADGOAL (REPEAT_DETERM o
-    (rtac refl ORELSE' atac ORELSE'
-     resolve_tac split_connectI ORELSE'
-     Splitter.split_asm_tac (split_if_asm :: split_asms) ORELSE'
-     Splitter.split_tac (split_if :: splits) ORELSE'
-     mk_primcorec_assumption_tac ctxt discIs ORELSE'
-     eresolve_tac (map (fn thm => thm RS neq_eq_eq_contradict) distincts) THEN' atac ORELSE'
-     (TRY o dresolve_tac discIs) THEN' etac notE THEN' atac)))));
-
-fun mk_primcorec_raw_code_of_ctr_tac ctxt distincts discIs splits split_asms ms ctr_thms =
-  EVERY (map2 (mk_primcorec_raw_code_of_ctr_single_tac ctxt distincts discIs splits split_asms)
-    ms ctr_thms);
-
-fun mk_primcorec_code_of_raw_code_tac splits raw =
-  HEADGOAL (rtac raw ORELSE' rtac (raw RS trans) THEN' REPEAT_DETERM o
-    (rtac refl ORELSE'
-     (TRY o rtac sym) THEN' atac ORELSE'
-     resolve_tac split_connectI ORELSE'
-     Splitter.split_tac (split_if :: splits) ORELSE'
-     etac notE THEN' atac));
-
-end;
--- a/src/HOL/BNF/Tools/bnf_fp_rec_sugar_util.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_fp_rec_sugar_util.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -8,616 +8,60 @@
 
 signature BNF_FP_REC_SUGAR_UTIL =
 sig
-  datatype rec_call =
-    No_Rec of int |
-    Direct_Rec of int (*before*) * int (*after*) |
-    Indirect_Rec of int
-
-  datatype corec_call =
-    Dummy_No_Corec of int |
-    No_Corec of int |
-    Direct_Corec of int (*stop?*) * int (*end*) * int (*continue*) |
-    Indirect_Corec of int
-
-  type rec_ctr_spec =
-    {ctr: term,
-     offset: int,
-     calls: rec_call list,
-     rec_thm: thm}
-
-  type corec_ctr_spec =
-    {ctr: term,
-     disc: term,
-     sels: term list,
-     pred: int option,
-     calls: corec_call list,
-     discI: thm,
-     sel_thms: thm list,
-     collapse: thm,
-     corec_thm: thm,
-     disc_corec: thm,
-     sel_corecs: thm list}
+  val indexed: 'a list -> int -> int list * int
+  val indexedd: 'a list list -> int -> int list list * int
+  val indexeddd: 'a list list list -> int -> int list list list * int
+  val indexedddd: 'a list list list list -> int -> int list list list list * int
+  val find_index_eq: ''a list -> ''a -> int
+  val finds: ('a * 'b -> bool) -> 'a list -> 'b list -> ('a * 'b list) list * 'b list
 
-  type rec_spec =
-    {recx: term,
-     nested_map_idents: thm list,
-     nested_map_comps: thm list,
-     ctr_specs: rec_ctr_spec list}
-
-  type corec_spec =
-    {corec: term,
-     nested_maps: thm list,
-     nested_map_idents: thm list,
-     nested_map_comps: thm list,
-     ctr_specs: corec_ctr_spec list}
-
-  val s_not: term -> term
-  val mk_conjs: term list -> term
-  val mk_disjs: term list -> term
-  val s_not_disj: term -> term list
-  val negate_conj: term list -> term list
-  val negate_disj: term list -> term list
+  val drop_All: term -> term
 
-  val massage_indirect_rec_call: Proof.context -> (term -> bool) -> (typ -> typ -> term -> term) ->
-    typ list -> term -> term -> term -> term
-  val massage_direct_corec_call: Proof.context -> (term -> bool) -> (typ list -> term -> term) ->
-    typ list -> term -> term
-  val massage_indirect_corec_call: Proof.context -> (term -> bool) ->
-    (typ list -> typ -> typ -> term -> term) -> typ list -> typ -> term -> term
-  val expand_corec_code_rhs: Proof.context -> (term -> bool) -> typ list -> term -> term
-  val massage_corec_code_rhs: Proof.context -> (typ list -> term -> term list -> term) ->
-    typ list -> term -> term
-  val fold_rev_corec_code_rhs: Proof.context -> (term list -> term -> term list -> 'a -> 'a) ->
-    typ list -> term -> 'a -> 'a
-  val case_thms_of_term: Proof.context -> typ list -> term ->
-    thm list * thm list * thm list * thm list
+  val mk_partial_compN: int -> typ -> term -> term
+  val mk_partial_comp: typ -> typ -> term -> term
+  val mk_compN: int -> typ list -> term * term -> term
+  val mk_comp: typ list -> term * term -> term
 
-  val rec_specs_of: binding list -> typ list -> typ list -> (term -> int list) ->
-    ((term * term list list) list) list -> local_theory ->
-    (bool * rec_spec list * typ list * thm * thm list) * local_theory
-  val corec_specs_of: binding list -> typ list -> typ list -> (term -> int list) ->
-    ((term * term list list) list) list -> local_theory ->
-    (bool * corec_spec list * typ list * thm * thm * thm list * thm list) * local_theory
+  val get_indices: ((binding * typ) * 'a) list -> term -> int list
 end;
 
 structure BNF_FP_Rec_Sugar_Util : BNF_FP_REC_SUGAR_UTIL =
 struct
 
-open Ctr_Sugar
-open BNF_Util
-open BNF_Def
-open BNF_FP_Util
-open BNF_FP_Def_Sugar
-open BNF_FP_N2M_Sugar
-
-datatype rec_call =
-  No_Rec of int |
-  Direct_Rec of int * int |
-  Indirect_Rec of int;
-
-datatype corec_call =
-  Dummy_No_Corec of int |
-  No_Corec of int |
-  Direct_Corec of int * int * int |
-  Indirect_Corec of int;
-
-type rec_ctr_spec =
-  {ctr: term,
-   offset: int,
-   calls: rec_call list,
-   rec_thm: thm};
-
-type corec_ctr_spec =
-  {ctr: term,
-   disc: term,
-   sels: term list,
-   pred: int option,
-   calls: corec_call list,
-   discI: thm,
-   sel_thms: thm list,
-   collapse: thm,
-   corec_thm: thm,
-   disc_corec: thm,
-   sel_corecs: thm list};
-
-type rec_spec =
-  {recx: term,
-   nested_map_idents: thm list,
-   nested_map_comps: thm list,
-   ctr_specs: rec_ctr_spec list};
-
-type corec_spec =
-  {corec: term,
-   nested_maps: thm list,
-   nested_map_idents: thm list,
-   nested_map_comps: thm list,
-   ctr_specs: corec_ctr_spec list};
-
-val id_def = @{thm id_def};
-
-exception AINT_NO_MAP of term;
-
-fun ill_formed_rec_call ctxt t =
-  error ("Ill-formed recursive call: " ^ quote (Syntax.string_of_term ctxt t));
-fun ill_formed_corec_call ctxt t =
-  error ("Ill-formed corecursive call: " ^ quote (Syntax.string_of_term ctxt t));
-fun invalid_map ctxt t =
-  error ("Invalid map function in " ^ quote (Syntax.string_of_term ctxt t));
-fun unexpected_rec_call ctxt t =
-  error ("Unexpected recursive call: " ^ quote (Syntax.string_of_term ctxt t));
-fun unexpected_corec_call ctxt t =
-  error ("Unexpected corecursive call: " ^ quote (Syntax.string_of_term ctxt t));
-
-fun s_not @{const True} = @{const False}
-  | s_not @{const False} = @{const True}
-  | s_not (@{const Not} $ t) = t
-  | s_not t = HOLogic.mk_not t
-
-val mk_conjs = try (foldr1 HOLogic.mk_conj) #> the_default @{const True};
-val mk_disjs = try (foldr1 HOLogic.mk_disj) #> the_default @{const False};
-
-val s_not_disj = map s_not o HOLogic.disjuncts;
-
-fun negate_conj [t] = s_not_disj t
-  | negate_conj ts = [mk_disjs (map s_not ts)];
-
-fun negate_disj [t] = s_not_disj t
-  | negate_disj ts = [mk_disjs (map (mk_conjs o s_not_disj) ts)];
-
-fun factor_out_types ctxt massage destU U T =
-  (case try destU U of
-    SOME (U1, U2) => if U1 = T then massage T U2 else invalid_map ctxt
-  | NONE => invalid_map ctxt);
-
-fun map_flattened_map_args ctxt s map_args fs =
-  let
-    val flat_fs = flatten_type_args_of_bnf (the (bnf_of ctxt s)) Term.dummy fs;
-    val flat_fs' = map_args flat_fs;
-  in
-    permute_like (op aconv) flat_fs fs flat_fs'
-  end;
-
-fun massage_indirect_rec_call ctxt has_call raw_massage_fun bound_Ts y y' =
-  let
-    val typof = curry fastype_of1 bound_Ts;
-    val build_map_fst = build_map ctxt (fst_const o fst);
-
-    val yT = typof y;
-    val yU = typof y';
-
-    fun y_of_y' () = build_map_fst (yU, yT) $ y';
-    val elim_y = Term.map_aterms (fn t => if t = y then y_of_y' () else t);
-
-    fun massage_direct_fun U T t =
-      if has_call t then factor_out_types ctxt raw_massage_fun HOLogic.dest_prodT U T t
-      else HOLogic.mk_comp (t, build_map_fst (U, T));
-
-    fun massage_map (Type (_, Us)) (Type (s, Ts)) t =
-        (case try (dest_map ctxt s) t of
-          SOME (map0, fs) =>
-          let
-            val Type (_, ran_Ts) = range_type (typof t);
-            val map' = mk_map (length fs) Us ran_Ts map0;
-            val fs' = map_flattened_map_args ctxt s (map3 massage_map_or_map_arg Us Ts) fs;
-          in
-            Term.list_comb (map', fs')
-          end
-        | NONE => raise AINT_NO_MAP t)
-      | massage_map _ _ t = raise AINT_NO_MAP t
-    and massage_map_or_map_arg U T t =
-      if T = U then
-        if has_call t then unexpected_rec_call ctxt t else t
-      else
-        massage_map U T t
-        handle AINT_NO_MAP _ => massage_direct_fun U T t;
-
-    fun massage_call (t as t1 $ t2) =
-        if t2 = y then
-          massage_map yU yT (elim_y t1) $ y'
-          handle AINT_NO_MAP t' => invalid_map ctxt t'
-        else
-          ill_formed_rec_call ctxt t
-      | massage_call t = if t = y then y_of_y' () else ill_formed_rec_call ctxt t;
-  in
-    massage_call
-  end;
-
-fun fold_rev_let_if_case ctxt f bound_Ts t =
-  let
-    val thy = Proof_Context.theory_of ctxt;
-
-    fun fld conds t =
-      (case Term.strip_comb t of
-        (Const (@{const_name Let}, _), [arg1, arg2]) => fld conds (betapply (arg2, arg1))
-      | (Const (@{const_name If}, _), [cond, then_branch, else_branch]) =>
-        fld (conds @ HOLogic.conjuncts cond) then_branch
-        o fld (conds @ s_not_disj cond) else_branch
-      | (Const (c, _), args as _ :: _ :: _) =>
-        let val n = num_binder_types (Sign.the_const_type thy c) - 1 in
-          if n >= 0 andalso n < length args then
-            (case fastype_of1 (bound_Ts, nth args n) of
-              Type (s, Ts) =>
-              (case dest_case ctxt s Ts t of
-                NONE => apsnd (f conds t)
-              | SOME (conds', branches) =>
-                apfst (cons s) o fold_rev (uncurry fld)
-                  (map (append conds o HOLogic.conjuncts) conds' ~~ branches))
-            | _ => apsnd (f conds t))
-          else
-            apsnd (f conds t)
-        end
-      | _ => apsnd (f conds t))
-  in
-    fld [] t o pair []
-  end;
-
-fun case_of ctxt = ctr_sugar_of ctxt #> Option.map (fst o dest_Const o #casex);
-
-fun massage_let_if_case ctxt has_call massage_leaf =
-  let
-    val thy = Proof_Context.theory_of ctxt;
-
-    fun check_no_call t = if has_call t then unexpected_corec_call ctxt t else ();
-
-    fun massage_abs bound_Ts (Abs (s, T, t)) = Abs (s, T, massage_abs (T :: bound_Ts) t)
-      | massage_abs bound_Ts t = massage_rec bound_Ts t
-    and massage_rec bound_Ts t =
-      let val typof = curry fastype_of1 bound_Ts in
-        (case Term.strip_comb t of
-          (Const (@{const_name Let}, _), [arg1, arg2]) =>
-          massage_rec bound_Ts (betapply (arg2, arg1))
-        | (Const (@{const_name If}, _), obj :: (branches as [_, _])) =>
-          let val branches' = map (massage_rec bound_Ts) branches in
-            Term.list_comb (If_const (typof (hd branches')) $ tap check_no_call obj, branches')
-          end
-        | (Const (c, _), args as _ :: _ :: _) =>
-          let
-            val gen_T = Sign.the_const_type thy c;
-            val (gen_branch_Ts, gen_body_fun_T) = strip_fun_type gen_T;
-            val n = length gen_branch_Ts;
-          in
-            if n < length args then
-              (case gen_body_fun_T of
-                Type (_, [Type (T_name, _), _]) =>
-                if case_of ctxt T_name = SOME c then
-                  let
-                    val (branches, obj_leftovers) = chop n args;
-                    val branches' = map (massage_abs bound_Ts o Envir.eta_long bound_Ts) branches;
-                    val branch_Ts' = map typof branches';
-                    val casex' = Const (c, branch_Ts' ---> map typof obj_leftovers --->
-                      snd (strip_typeN (num_binder_types (hd gen_branch_Ts)) (hd branch_Ts')));
-                  in
-                    Term.list_comb (casex', branches' @ tap (List.app check_no_call) obj_leftovers)
-                  end
-                else
-                  massage_leaf bound_Ts t
-              | _ => massage_leaf bound_Ts t)
-            else
-              massage_leaf bound_Ts t
-          end
-        | _ => massage_leaf bound_Ts t)
-      end
-  in
-    massage_rec
-  end;
-
-val massage_direct_corec_call = massage_let_if_case;
-
-fun curried_type (Type (@{type_name fun}, [Type (@{type_name prod}, Ts), T])) = Ts ---> T;
-
-fun massage_indirect_corec_call ctxt has_call raw_massage_call bound_Ts U t =
-  let
-    val build_map_Inl = build_map ctxt (uncurry Inl_const o dest_sumT o snd)
-
-    fun massage_direct_call bound_Ts U T t =
-      if has_call t then factor_out_types ctxt (raw_massage_call bound_Ts) dest_sumT U T t
-      else build_map_Inl (T, U) $ t;
-
-    fun massage_direct_fun bound_Ts U T t =
-      let
-        val var = Var ((Name.uu, Term.maxidx_of_term t + 1),
-          domain_type (fastype_of1 (bound_Ts, t)));
-      in
-        Term.lambda var (massage_direct_call bound_Ts U T (t $ var))
-      end;
-
-    fun massage_map bound_Ts (Type (_, Us)) (Type (s, Ts)) t =
-        (case try (dest_map ctxt s) t of
-          SOME (map0, fs) =>
-          let
-            val Type (_, dom_Ts) = domain_type (fastype_of1 (bound_Ts, t));
-            val map' = mk_map (length fs) dom_Ts Us map0;
-            val fs' =
-              map_flattened_map_args ctxt s (map3 (massage_map_or_map_arg bound_Ts) Us Ts) fs;
-          in
-            Term.list_comb (map', fs')
-          end
-        | NONE => raise AINT_NO_MAP t)
-      | massage_map _ _ _ t = raise AINT_NO_MAP t
-    and massage_map_or_map_arg bound_Ts U T t =
-      if T = U then
-        if has_call t then unexpected_corec_call ctxt t else t
-      else
-        massage_map bound_Ts U T t
-        handle AINT_NO_MAP _ => massage_direct_fun bound_Ts U T t;
-
-    fun massage_call bound_Ts U T =
-      massage_let_if_case ctxt has_call (fn bound_Ts => fn t =>
-        if has_call t then
-          (case U of
-            Type (s, Us) =>
-            (case try (dest_ctr ctxt s) t of
-              SOME (f, args) =>
-              let
-                val typof = curry fastype_of1 bound_Ts;
-                val f' = mk_ctr Us f
-                val f'_T = typof f';
-                val arg_Ts = map typof args;
-              in
-                Term.list_comb (f', map3 (massage_call bound_Ts) (binder_types f'_T) arg_Ts args)
-              end
-            | NONE =>
-              (case t of
-                Const (@{const_name prod_case}, _) $ t' =>
-                let
-                  val U' = curried_type U;
-                  val T' = curried_type T;
-                in
-                  Const (@{const_name prod_case}, U' --> U) $ massage_call bound_Ts U' T' t'
-                end
-              | t1 $ t2 =>
-                (if has_call t2 then
-                  massage_direct_call bound_Ts U T t
-                else
-                  massage_map bound_Ts U T t1 $ t2
-                  handle AINT_NO_MAP _ => massage_direct_call bound_Ts U T t)
-              | Abs (s, T', t') =>
-                Abs (s, T', massage_call (T' :: bound_Ts) (range_type U) (range_type T) t')
-              | _ => massage_direct_call bound_Ts U T t))
-          | _ => ill_formed_corec_call ctxt t)
-        else
-          build_map_Inl (T, U) $ t) bound_Ts;
-
-    val T = fastype_of1 (bound_Ts, t);
-  in
-    if has_call t then massage_call bound_Ts U T t else build_map_Inl (T, U) $ t
-  end;
-
-fun expand_ctr_term ctxt s Ts t =
-  (case ctr_sugar_of ctxt s of
-    SOME {ctrs, casex, ...} =>
-    Term.list_comb (mk_case Ts (Type (s, Ts)) casex, map (mk_ctr Ts) ctrs) $ t
-  | NONE => raise Fail "expand_ctr_term");
-
-fun expand_corec_code_rhs ctxt has_call bound_Ts t =
-  (case fastype_of1 (bound_Ts, t) of
-    Type (s, Ts) =>
-    massage_let_if_case ctxt has_call (fn _ => fn t =>
-      if can (dest_ctr ctxt s) t then t else expand_ctr_term ctxt s Ts t) bound_Ts t
-  | _ => raise Fail "expand_corec_code_rhs");
-
-fun massage_corec_code_rhs ctxt massage_ctr =
-  massage_let_if_case ctxt (K false)
-    (fn bound_Ts => uncurry (massage_ctr bound_Ts) o Term.strip_comb);
-
-fun fold_rev_corec_code_rhs ctxt f =
-  snd ooo fold_rev_let_if_case ctxt (fn conds => uncurry (f conds) o Term.strip_comb);
-
-fun case_thms_of_term ctxt bound_Ts t =
-  let
-    val (caseT_names, _) = fold_rev_let_if_case ctxt (K (K I)) bound_Ts t ();
-    val ctr_sugars = map (the o ctr_sugar_of ctxt) caseT_names;
-  in
-    (maps #distincts ctr_sugars, maps #discIs ctr_sugars, maps #sel_splits ctr_sugars,
-     maps #sel_split_asms ctr_sugars)
-  end;
-
-fun indexed xs h = let val h' = h + length xs in (h upto h' - 1, h') end;
+fun indexe _ h = (h, h + 1);
+fun indexed xs = fold_map indexe xs;
 fun indexedd xss = fold_map indexed xss;
 fun indexeddd xsss = fold_map indexedd xsss;
 fun indexedddd xssss = fold_map indexeddd xssss;
 
 fun find_index_eq hs h = find_index (curry (op =) h) hs;
 
-(*FIXME: remove special cases for products and sum once they are registered as datatypes*)
-fun map_thms_of_typ ctxt (Type (s, _)) =
-    if s = @{type_name prod} then
-      @{thms map_pair_simp}
-    else if s = @{type_name sum} then
-      @{thms sum_map.simps}
-    else
-      (case fp_sugar_of ctxt s of
-        SOME {index, mapss, ...} => nth mapss index
-      | NONE => [])
-  | map_thms_of_typ _ _ = [];
-
-fun rec_specs_of bs arg_Ts res_Ts get_indices callssss0 lthy =
-  let
-    val thy = Proof_Context.theory_of lthy;
-
-    val ((missing_arg_Ts, perm0_kks,
-          fp_sugars as {nested_bnfs, fp_res = {xtor_co_iterss = ctor_iters1 :: _, ...},
-            co_inducts = [induct_thm], ...} :: _, (lfp_sugar_thms, _)), lthy') =
-      nested_to_mutual_fps Least_FP bs arg_Ts get_indices callssss0 lthy;
-
-    val perm_fp_sugars = sort (int_ord o pairself #index) fp_sugars;
-
-    val indices = map #index fp_sugars;
-    val perm_indices = map #index perm_fp_sugars;
-
-    val perm_ctrss = map (#ctrs o of_fp_sugar #ctr_sugars) perm_fp_sugars;
-    val perm_ctr_Tsss = map (map (binder_types o fastype_of)) perm_ctrss;
-    val perm_fpTs = map (body_type o fastype_of o hd) perm_ctrss;
-
-    val nn0 = length arg_Ts;
-    val nn = length perm_fpTs;
-    val kks = 0 upto nn - 1;
-    val perm_ns = map length perm_ctr_Tsss;
-    val perm_mss = map (map length) perm_ctr_Tsss;
-
-    val perm_Cs = map (body_type o fastype_of o co_rec_of o of_fp_sugar (#xtor_co_iterss o #fp_res))
-      perm_fp_sugars;
-    val perm_fun_arg_Tssss =
-      mk_iter_fun_arg_types perm_ctr_Tsss perm_ns perm_mss (co_rec_of ctor_iters1);
-
-    fun unpermute0 perm0_xs = permute_like (op =) perm0_kks kks perm0_xs;
-    fun unpermute perm_xs = permute_like (op =) perm_indices indices perm_xs;
-
-    val induct_thms = unpermute0 (conj_dests nn induct_thm);
+fun finds eq = fold_map (fn x => List.partition (curry eq x) #>> pair x);
 
-    val fpTs = unpermute perm_fpTs;
-    val Cs = unpermute perm_Cs;
-
-    val As_rho = tvar_subst thy (take nn0 fpTs) arg_Ts;
-    val Cs_rho = map (fst o dest_TVar) Cs ~~ pad_list HOLogic.unitT nn res_Ts;
-
-    val substA = Term.subst_TVars As_rho;
-    val substAT = Term.typ_subst_TVars As_rho;
-    val substCT = Term.typ_subst_TVars Cs_rho;
-
-    val perm_Cs' = map substCT perm_Cs;
-
-    fun offset_of_ctr 0 _ = 0
-      | offset_of_ctr n (({ctrs, ...} : ctr_sugar) :: ctr_sugars) =
-        length ctrs + offset_of_ctr (n - 1) ctr_sugars;
-
-    fun call_of [i] [T] = (if exists_subtype_in Cs T then Indirect_Rec else No_Rec) i
-      | call_of [i, i'] _ = Direct_Rec (i, i');
+fun drop_All t =
+  subst_bounds (strip_qnt_vars @{const_name all} t |> map Free |> rev,
+    strip_qnt_body @{const_name all} t);
 
-    fun mk_ctr_spec ctr offset fun_arg_Tss rec_thm =
-      let
-        val (fun_arg_hss, _) = indexedd fun_arg_Tss 0;
-        val fun_arg_hs = flat_rec_arg_args fun_arg_hss;
-        val fun_arg_iss = map (map (find_index_eq fun_arg_hs)) fun_arg_hss;
-      in
-        {ctr = substA ctr, offset = offset, calls = map2 call_of fun_arg_iss fun_arg_Tss,
-         rec_thm = rec_thm}
-      end;
-
-    fun mk_ctr_specs index (ctr_sugars : ctr_sugar list) iter_thmsss =
-      let
-        val ctrs = #ctrs (nth ctr_sugars index);
-        val rec_thmss = co_rec_of (nth iter_thmsss index);
-        val k = offset_of_ctr index ctr_sugars;
-        val n = length ctrs;
-      in
-        map4 mk_ctr_spec ctrs (k upto k + n - 1) (nth perm_fun_arg_Tssss index) rec_thmss
-      end;
-
-    fun mk_spec ({T, index, ctr_sugars, co_iterss = iterss, co_iter_thmsss = iter_thmsss, ...}
-      : fp_sugar) =
-      {recx = mk_co_iter thy Least_FP (substAT T) perm_Cs' (co_rec_of (nth iterss index)),
-       nested_map_idents = map (unfold_thms lthy [id_def] o map_id0_of_bnf) nested_bnfs,
-       nested_map_comps = map map_comp_of_bnf nested_bnfs,
-       ctr_specs = mk_ctr_specs index ctr_sugars iter_thmsss};
-  in
-    ((is_some lfp_sugar_thms, map mk_spec fp_sugars, missing_arg_Ts, induct_thm, induct_thms),
-     lthy')
+fun mk_partial_comp gT fT g =
+  let val T = domain_type fT --> range_type gT in
+    Const (@{const_name Fun.comp}, gT --> fT --> T) $ g
   end;
 
-fun corec_specs_of bs arg_Ts res_Ts get_indices callssss0 lthy =
-  let
-    val thy = Proof_Context.theory_of lthy;
-
-    val ((missing_res_Ts, perm0_kks,
-          fp_sugars as {nested_bnfs, fp_res = {xtor_co_iterss = dtor_coiters1 :: _, ...},
-            co_inducts = coinduct_thms, ...} :: _, (_, gfp_sugar_thms)), lthy') =
-      nested_to_mutual_fps Greatest_FP bs res_Ts get_indices callssss0 lthy;
-
-    val perm_fp_sugars = sort (int_ord o pairself #index) fp_sugars;
-
-    val indices = map #index fp_sugars;
-    val perm_indices = map #index perm_fp_sugars;
-
-    val perm_ctrss = map (#ctrs o of_fp_sugar #ctr_sugars) perm_fp_sugars;
-    val perm_ctr_Tsss = map (map (binder_types o fastype_of)) perm_ctrss;
-    val perm_fpTs = map (body_type o fastype_of o hd) perm_ctrss;
-
-    val nn0 = length res_Ts;
-    val nn = length perm_fpTs;
-    val kks = 0 upto nn - 1;
-    val perm_ns = map length perm_ctr_Tsss;
-
-    val perm_Cs = map (domain_type o body_fun_type o fastype_of o co_rec_of o
-      of_fp_sugar (#xtor_co_iterss o #fp_res)) perm_fp_sugars;
-    val (perm_p_Tss, (perm_q_Tssss, _, perm_f_Tssss, _)) =
-      mk_coiter_fun_arg_types perm_ctr_Tsss perm_Cs perm_ns (co_rec_of dtor_coiters1);
-
-    val (perm_p_hss, h) = indexedd perm_p_Tss 0;
-    val (perm_q_hssss, h') = indexedddd perm_q_Tssss h;
-    val (perm_f_hssss, _) = indexedddd perm_f_Tssss h';
-
-    val fun_arg_hs =
-      flat (map3 flat_corec_preds_predsss_gettersss perm_p_hss perm_q_hssss perm_f_hssss);
-
-    fun unpermute0 perm0_xs = permute_like (op =) perm0_kks kks perm0_xs;
-    fun unpermute perm_xs = permute_like (op =) perm_indices indices perm_xs;
-
-    val coinduct_thmss = map (unpermute0 o conj_dests nn) coinduct_thms;
-
-    val p_iss = map (map (find_index_eq fun_arg_hs)) (unpermute perm_p_hss);
-    val q_issss = map (map (map (map (find_index_eq fun_arg_hs)))) (unpermute perm_q_hssss);
-    val f_issss = map (map (map (map (find_index_eq fun_arg_hs)))) (unpermute perm_f_hssss);
-
-    val f_Tssss = unpermute perm_f_Tssss;
-    val fpTs = unpermute perm_fpTs;
-    val Cs = unpermute perm_Cs;
-
-    val As_rho = tvar_subst thy (take nn0 fpTs) res_Ts;
-    val Cs_rho = map (fst o dest_TVar) Cs ~~ pad_list HOLogic.unitT nn arg_Ts;
+fun mk_partial_compN 0 _ g = g
+  | mk_partial_compN n fT g =
+    let val g' = mk_partial_compN (n - 1) (range_type fT) g in
+      mk_partial_comp (fastype_of g') fT g'
+    end;
 
-    val substA = Term.subst_TVars As_rho;
-    val substAT = Term.typ_subst_TVars As_rho;
-    val substCT = Term.typ_subst_TVars Cs_rho;
-
-    val perm_Cs' = map substCT perm_Cs;
-
-    fun call_of nullary [] [g_i] [Type (@{type_name fun}, [_, T])] =
-        (if exists_subtype_in Cs T then Indirect_Corec
-         else if nullary then Dummy_No_Corec
-         else No_Corec) g_i
-      | call_of _ [q_i] [g_i, g_i'] _ = Direct_Corec (q_i, g_i, g_i');
-
-    fun mk_ctr_spec ctr disc sels p_ho q_iss f_iss f_Tss discI sel_thms collapse corec_thm
-        disc_corec sel_corecs =
-      let val nullary = not (can dest_funT (fastype_of ctr)) in
-        {ctr = substA ctr, disc = substA disc, sels = map substA sels, pred = p_ho,
-         calls = map3 (call_of nullary) q_iss f_iss f_Tss, discI = discI, sel_thms = sel_thms,
-         collapse = collapse, corec_thm = corec_thm, disc_corec = disc_corec,
-         sel_corecs = sel_corecs}
-      end;
-
-    fun mk_ctr_specs index (ctr_sugars : ctr_sugar list) p_is q_isss f_isss f_Tsss
-        coiter_thmsss disc_coitersss sel_coiterssss =
-      let
-        val ctrs = #ctrs (nth ctr_sugars index);
-        val discs = #discs (nth ctr_sugars index);
-        val selss = #selss (nth ctr_sugars index);
-        val p_ios = map SOME p_is @ [NONE];
-        val discIs = #discIs (nth ctr_sugars index);
-        val sel_thmss = #sel_thmss (nth ctr_sugars index);
-        val collapses = #collapses (nth ctr_sugars index);
-        val corec_thms = co_rec_of (nth coiter_thmsss index);
-        val disc_corecs = co_rec_of (nth disc_coitersss index);
-        val sel_corecss = co_rec_of (nth sel_coiterssss index);
-      in
-        map13 mk_ctr_spec ctrs discs selss p_ios q_isss f_isss f_Tsss discIs sel_thmss collapses
-          corec_thms disc_corecs sel_corecss
-      end;
-
-    fun mk_spec ({T, index, ctr_sugars, co_iterss = coiterss, co_iter_thmsss = coiter_thmsss,
-          disc_co_itersss = disc_coitersss, sel_co_iterssss = sel_coiterssss, ...} : fp_sugar)
-        p_is q_isss f_isss f_Tsss =
-      {corec = mk_co_iter thy Greatest_FP (substAT T) perm_Cs' (co_rec_of (nth coiterss index)),
-       nested_maps = maps (map_thms_of_typ lthy o T_of_bnf) nested_bnfs,
-       nested_map_idents = map (unfold_thms lthy [id_def] o map_id0_of_bnf) nested_bnfs,
-       nested_map_comps = map map_comp_of_bnf nested_bnfs,
-       ctr_specs = mk_ctr_specs index ctr_sugars p_is q_isss f_isss f_Tsss coiter_thmsss
-         disc_coitersss sel_coiterssss};
-  in
-    ((is_some gfp_sugar_thms, map5 mk_spec fp_sugars p_iss q_issss f_issss f_Tssss, missing_res_Ts,
-      co_induct_of coinduct_thms, strong_co_induct_of coinduct_thms, co_induct_of coinduct_thmss,
-      strong_co_induct_of coinduct_thmss), lthy')
+fun mk_compN n bound_Ts (g, f) =
+  let val typof = curry fastype_of1 bound_Ts in
+    mk_partial_compN n (typof f) g $ f
   end;
 
+val mk_comp = mk_compN 1;
+
+fun get_indices fixes t = map (fst #>> Binding.name_of #> Free) fixes
+  |> map_index (fn (i, v) => if exists_subterm (equal v) t then SOME i else NONE)
+  |> map_filter I;
+
 end;
--- a/src/HOL/BNF/Tools/bnf_gfp.ML	Thu Dec 05 17:52:12 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_gfp.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -23,7 +23,7 @@
 open BNF_Comp
 open BNF_FP_Util
 open BNF_FP_Def_Sugar
-open BNF_FP_Rec_Sugar
+open BNF_GFP_Rec_Sugar
 open BNF_GFP_Util
 open BNF_GFP_Tactics
 
@@ -56,7 +56,7 @@
      ((i, I), nth (nth lwitss i) nwit) :: maps (tree_to_coind_wits lwitss) subtrees;
 
 (*all BNFs have the same lives*)
-fun construct_gfp mixfixes map_bs rel_bs set_bss bs resBs (resDs, Dss) bnfs lthy =
+fun construct_gfp mixfixes map_bs rel_bs set_bss0 bs resBs (resDs, Dss) bnfs lthy =
   let
     val time = time lthy;
     val timer = time (Timer.startRealTimer ());
@@ -74,7 +74,7 @@
     val mk_internal_b = Binding.name #> Binding.prefix true b_name #> Binding.conceal;
     fun mk_internal_bs name =
       map (fn b =>
-        Binding.prefix true b_name (Binding.suffix_name ("_" ^ name) b) |> Binding.conceal) bs;
+        Binding.prefix true b_name (Binding.prefix_name (name ^ "_") b) |> Binding.conceal) bs;
     val external_bs = map2 (Binding.prefix false) b_names bs
       |> note_all = false ? map Binding.conceal;
 
@@ -1695,7 +1695,7 @@
       ||>> mk_Frees "s" corec_sTs
       ||>> mk_Frees "P" (map2 mk_pred2T Ts Ts);
 
-    fun dtor_bind i = nth external_bs (i - 1) |> Binding.suffix_name ("_" ^ dtorN);
+    fun dtor_bind i = nth external_bs (i - 1) |> Binding.prefix_name (dtorN ^ "_");
     val dtor_name = Binding.name_of o dtor_bind;
     val dtor_def_bind = rpair [] o Binding.conceal o Thm.def_binding o dtor_bind;
 
@@ -1747,7 +1747,7 @@
 
     val timer = time (timer "dtor definitions & thms");
 
-    fun unfold_bind i = nth external_bs (i - 1) |> Binding.suffix_name ("_" ^ dtor_unfoldN);
+    fun unfold_bind i = nth external_bs (i - 1) |> Binding.prefix_name (dtor_unfoldN ^ "_");
     val unfold_name = Binding.name_of o unfold_bind;
     val unfold_def_bind = rpair [] o Binding.conceal o Thm.def_binding o unfold_bind;
 
@@ -1868,7 +1868,7 @@
       Term.list_comb (mk_map_of_bnf Ds (passiveAs @ Ts) (passiveAs @ FTs) bnf,
         map HOLogic.id_const passiveAs @ dtors)) Dss bnfs;
 
-    fun ctor_bind i = nth external_bs (i - 1) |> Binding.suffix_name ("_" ^ ctorN);
+    fun ctor_bind i = nth external_bs (i - 1) |> Binding.prefix_name (ctorN ^ "_");
     val ctor_name = Binding.name_of o ctor_bind;
     val ctor_def_bind = rpair [] o Binding.conceal o Thm.def_binding o ctor_bind;
 
@@ -1939,7 +1939,7 @@
           trans OF [mor RS unique, unfold_dtor]) unfold_unique_mor_thms unfold_dtor_thms
       end;
 
-    fun corec_bind i = nth external_bs (i - 1) |> Binding.suffix_name ("_" ^ dtor_corecN);
+    fun corec_bind i = nth external_bs (i - 1) |> Binding.prefix_name (dtor_corecN ^ "_");
     val corec_name = Binding.name_of o corec_bind;
     val corec_def_bind = rpair [] o Binding.conceal o Thm.def_binding o corec_bind;
 
@@ -2673,12 +2673,16 @@
 
         val wit_tac = mk_wit_tac n dtor_ctor_thms (flat dtor_set_thmss) (maps wit_thms_of_bnf bnfs);
 
+        val set_bss =
+          map (flat o map2 (fn B => fn b =>
+            if member (op =) resDs (TFree B) then [] else [b]) resBs) set_bss0;
+
         val (Jbnfs, lthy) =
           fold_map9 (fn tacs => fn b => fn map_b => fn rel_b => fn set_bs => fn mapx => fn sets =>
               fn T => fn (thms, wits) => fn lthy =>
             bnf_def Dont_Inline (user_policy Note_Some) I tacs (wit_tac thms) (SOME deads) map_b
               rel_b set_bs
-              (((((b, fold_rev Term.absfree fs' mapx), sets), absdummy T bd), wits), NONE) lthy
+              ((((((b, T), fold_rev Term.absfree fs' mapx), sets), bd), wits), NONE) lthy
             |> register_bnf (Local_Theory.full_name lthy b))
           tacss bs map_bs rel_bs set_bss fs_maps setss_by_bnf Ts all_witss lthy;
 
@@ -2744,8 +2748,8 @@
               ((Binding.qualify true (Binding.name_of b) (Binding.name thmN), []), [(thms, [])]))
             bs thmss)
       in
-       (timer, Jbnfs, (folded_dtor_map_o_thms, folded_dtor_map_thms), folded_dtor_set_thmss',
-         dtor_set_induct_thms, dtor_Jrel_thms, Jbnf_common_notes @ Jbnf_notes, lthy)
+        (timer, Jbnfs, (folded_dtor_map_o_thms, folded_dtor_map_thms), folded_dtor_set_thmss',
+          dtor_set_induct_thms, dtor_Jrel_thms, Jbnf_common_notes @ Jbnf_notes, lthy)
       end;
 
       val dtor_unfold_o_map_thms = mk_xtor_un_fold_o_map_thms Greatest_FP false m
@@ -2917,19 +2921,23 @@
   Outer_Syntax.local_theory @{command_spec "codatatype"} "define coinductive datatypes"
     (parse_co_datatype_cmd Greatest_FP construct_gfp);
 
-val option_parser = Parse.group (fn () => "option") (Parse.reserved "sequential" >> K true);
+val option_parser = Parse.group (fn () => "option")
+  ((Parse.reserved "sequential" >> K Option_Sequential)
+  || (Parse.reserved "exhaustive" >> K Option_Exhaustive))
 
 val where_alt_specs_of_parser = Parse.where_ |-- Parse.!!! (Parse.enum1 "|"
   (Parse_Spec.spec -- Scan.option (Parse.reserved "of" |-- Parse.const)));
 
 val _ = Outer_Syntax.local_theory_to_proof @{command_spec "primcorecursive"}
   "define primitive corecursive functions"
-  ((Scan.optional (@{keyword "("} |-- Parse.!!! option_parser --| @{keyword ")"}) false) --
+  ((Scan.optional (@{keyword "("} |--
+      Parse.!!! (Parse.list1 option_parser) --| @{keyword ")"}) []) --
     (Parse.fixes -- where_alt_specs_of_parser) >> uncurry add_primcorecursive_cmd);
 
 val _ = Outer_Syntax.local_theory @{command_spec "primcorec"}
   "define primitive corecursive functions"
-  ((Scan.optional (@{keyword "("} |-- Parse.!!! option_parser --| @{keyword ")"}) false) --
+  ((Scan.optional (@{keyword "("} |--
+      Parse.!!! (Parse.list1 option_parser) --| @{keyword ")"}) []) --
     (Parse.fixes -- where_alt_specs_of_parser) >> uncurry add_primcorec_cmd);
 
 end;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/BNF/Tools/bnf_gfp_rec_sugar.ML	Thu Dec 05 17:58:03 2013 +0100
@@ -0,0 +1,1184 @@
+(*  Title:      HOL/BNF/Tools/bnf_gfp_rec_sugar.ML
+    Author:     Lorenz Panny, TU Muenchen
+    Author:     Jasmin Blanchette, TU Muenchen
+    Copyright   2013
+
+Corecursor sugar.
+*)
+
+signature BNF_GFP_REC_SUGAR =
+sig
+  datatype primcorec_option =
+    Option_Sequential |
+    Option_Exhaustive
+  val add_primcorecursive_cmd: primcorec_option list ->
+    (binding * string option * mixfix) list * ((Attrib.binding * string) * string option) list ->
+    Proof.context -> Proof.state
+  val add_primcorec_cmd: primcorec_option list ->
+    (binding * string option * mixfix) list * ((Attrib.binding * string) * string option) list ->
+    local_theory -> local_theory
+end;
+
+structure BNF_GFP_Rec_Sugar : BNF_GFP_REC_SUGAR =
+struct
+
+open Ctr_Sugar
+open BNF_Util
+open BNF_Def
+open BNF_FP_Util
+open BNF_FP_Def_Sugar
+open BNF_FP_N2M_Sugar
+open BNF_FP_Rec_Sugar_Util
+open BNF_GFP_Rec_Sugar_Tactics
+
+val codeN = "code"
+val ctrN = "ctr"
+val discN = "disc"
+val selN = "sel"
+
+val nitpicksimp_attrs = @{attributes [nitpick_simp]};
+val simp_attrs = @{attributes [simp]};
+val code_nitpicksimp_attrs = Code.add_default_eqn_attrib :: nitpicksimp_attrs;
+
+exception Primcorec_Error of string * term list;
+
+fun primcorec_error str = raise Primcorec_Error (str, []);
+fun primcorec_error_eqn str eqn = raise Primcorec_Error (str, [eqn]);
+fun primcorec_error_eqns str eqns = raise Primcorec_Error (str, eqns);
+
+datatype primcorec_option =
+  Option_Sequential |
+  Option_Exhaustive
+
+datatype corec_call =
+  Dummy_No_Corec of int |
+  No_Corec of int |
+  Mutual_Corec of int * int * int |
+  Nested_Corec of int;
+
+type basic_corec_ctr_spec =
+  {ctr: term,
+   disc: term,
+   sels: term list};
+
+type corec_ctr_spec =
+  {ctr: term,
+   disc: term,
+   sels: term list,
+   pred: int option,
+   calls: corec_call list,
+   discI: thm,
+   sel_thms: thm list,
+   collapse: thm,
+   corec_thm: thm,
+   disc_corec: thm,
+   sel_corecs: thm list};
+
+type corec_spec =
+  {corec: term,
+   nested_map_idents: thm list,
+   nested_map_comps: thm list,
+   ctr_specs: corec_ctr_spec list};
+
+exception AINT_NO_MAP of term;
+
+fun not_codatatype ctxt T =
+  error ("Not a codatatype: " ^ Syntax.string_of_typ ctxt T);
+fun ill_formed_corec_call ctxt t =
+  error ("Ill-formed corecursive call: " ^ quote (Syntax.string_of_term ctxt t));
+fun invalid_map ctxt t =
+  error ("Invalid map function in " ^ quote (Syntax.string_of_term ctxt t));
+fun unexpected_corec_call ctxt t =
+  error ("Unexpected corecursive call: " ^ quote (Syntax.string_of_term ctxt t));
+
+val mk_conjs = try (foldr1 HOLogic.mk_conj) #> the_default @{const True};
+val mk_disjs = try (foldr1 HOLogic.mk_disj) #> the_default @{const False};
+
+val conjuncts_s = filter_out (curry (op =) @{const True}) o HOLogic.conjuncts;
+
+fun s_not @{const True} = @{const False}
+  | s_not @{const False} = @{const True}
+  | s_not (@{const Not} $ t) = t
+  | s_not (@{const conj} $ t $ u) = @{const disj} $ s_not t $ s_not u
+  | s_not (@{const disj} $ t $ u) = @{const conj} $ s_not t $ s_not u
+  | s_not t = @{const Not} $ t;
+
+val s_not_conj = conjuncts_s o s_not o mk_conjs;
+
+fun propagate_unit_pos u cs = if member (op aconv) cs u then [@{const False}] else cs;
+
+fun propagate_unit_neg not_u cs = remove (op aconv) not_u cs;
+
+fun propagate_units css =
+  (case List.partition (can the_single) css of
+     ([], _) => css
+   | ([u] :: uss, css') =>
+     [u] :: propagate_units (map (propagate_unit_neg (s_not u))
+       (map (propagate_unit_pos u) (uss @ css'))));
+
+fun s_conjs cs =
+  if member (op aconv) cs @{const False} then @{const False}
+  else mk_conjs (remove (op aconv) @{const True} cs);
+
+fun s_disjs ds =
+  if member (op aconv) ds @{const True} then @{const True}
+  else mk_disjs (remove (op aconv) @{const False} ds);
+
+fun s_dnf css0 =
+  let val css = propagate_units css0 in
+    if null css then
+      [@{const False}]
+    else if exists null css then
+      []
+    else
+      map (fn c :: cs => (c, cs)) css
+      |> AList.coalesce (op =)
+      |> map (fn (c, css) => c :: s_dnf css)
+      |> (fn [cs] => cs | css => [s_disjs (map s_conjs css)])
+  end;
+
+fun fold_rev_let_if_case ctxt f bound_Ts t =
+  let
+    val thy = Proof_Context.theory_of ctxt;
+
+    fun fld conds t =
+      (case Term.strip_comb t of
+        (Const (@{const_name Let}, _), [_, _]) => fld conds (unfold_let t)
+      | (Const (@{const_name If}, _), [cond, then_branch, else_branch]) =>
+        fld (conds @ conjuncts_s cond) then_branch o fld (conds @ s_not_conj [cond]) else_branch
+      | (Const (c, _), args as _ :: _ :: _) =>
+        let val n = num_binder_types (Sign.the_const_type thy c) - 1 in
+          if n >= 0 andalso n < length args then
+            (case fastype_of1 (bound_Ts, nth args n) of
+              Type (s, Ts) =>
+              (case dest_case ctxt s Ts t of
+                NONE => apsnd (f conds t)
+              | SOME (conds', branches) =>
+                apfst (cons s) o fold_rev (uncurry fld)
+                  (map (append conds o conjuncts_s) conds' ~~ branches))
+            | _ => apsnd (f conds t))
+          else
+            apsnd (f conds t)
+        end
+      | _ => apsnd (f conds t))
+  in
+    fld [] t o pair []
+  end;
+
+fun case_of ctxt = ctr_sugar_of ctxt #> Option.map (fst o dest_Const o #casex);
+
+fun massage_let_if_case ctxt has_call massage_leaf =
+  let
+    val thy = Proof_Context.theory_of ctxt;
+
+    fun check_no_call t = if has_call t then unexpected_corec_call ctxt t else ();
+
+    fun massage_abs bound_Ts 0 t = massage_rec bound_Ts t
+      | massage_abs bound_Ts m (Abs (s, T, t)) = Abs (s, T, massage_abs (T :: bound_Ts) (m - 1) t)
+      | massage_abs bound_Ts m t =
+        let val T = domain_type (fastype_of1 (bound_Ts, t)) in
+          Abs (Name.uu, T, massage_abs (T :: bound_Ts) (m - 1) (incr_boundvars 1 t $ Bound 0))
+        end
+    and massage_rec bound_Ts t =
+      let val typof = curry fastype_of1 bound_Ts in
+        (case Term.strip_comb t of
+          (Const (@{const_name Let}, _), [_, _]) => massage_rec bound_Ts (unfold_let t)
+        | (Const (@{const_name If}, _), obj :: (branches as [_, _])) =>
+          let val branches' = map (massage_rec bound_Ts) branches in
+            Term.list_comb (If_const (typof (hd branches')) $ tap check_no_call obj, branches')
+          end
+        | (Const (c, _), args as _ :: _ :: _) =>
+          (case try strip_fun_type (Sign.the_const_type thy c) of
+            SOME (gen_branch_Ts, gen_body_fun_T) =>
+            let
+              val gen_branch_ms = map num_binder_types gen_branch_Ts;
+              val n = length gen_branch_ms;
+            in
+              if n < length args then
+                (case gen_body_fun_T of
+                  Type (_, [Type (T_name, _), _]) =>
+                  if case_of ctxt T_name = SOME c then
+                    let
+                      val (branches, obj_leftovers) = chop n args;
+                      val branches' = map2 (massage_abs bound_Ts) gen_branch_ms branches;
+                      val branch_Ts' = map typof branches';
+                      val body_T' = snd (strip_typeN (hd gen_branch_ms) (hd branch_Ts'));
+                      val casex' = Const (c, branch_Ts' ---> map typof obj_leftovers ---> body_T');
+                    in
+                      Term.list_comb (casex',
+                        branches' @ tap (List.app check_no_call) obj_leftovers)
+                    end
+                  else
+                    massage_leaf bound_Ts t
+                | _ => massage_leaf bound_Ts t)
+              else
+                massage_leaf bound_Ts t
+            end
+          | NONE => massage_leaf bound_Ts t)
+        | _ => massage_leaf bound_Ts t)
+      end
+  in
+    massage_rec
+  end;
+
+val massage_mutual_corec_call = massage_let_if_case;
+
+fun curried_type (Type (@{type_name fun}, [Type (@{type_name prod}, Ts), T])) = Ts ---> T;
+
+fun massage_nested_corec_call ctxt has_call raw_massage_call bound_Ts U t =
+  let
+    fun check_no_call t = if has_call t then unexpected_corec_call ctxt t else ();
+
+    val build_map_Inl = build_map ctxt (uncurry Inl_const o dest_sumT o snd);
+
+    fun massage_mutual_call bound_Ts U T t =
+      if has_call t then
+        (case try dest_sumT U of
+          SOME (U1, U2) => if U1 = T then raw_massage_call bound_Ts T U2 t else invalid_map ctxt t
+        | NONE => invalid_map ctxt t)
+      else
+        build_map_Inl (T, U) $ t;
+
+    fun massage_mutual_fun bound_Ts U T t =
+      (case t of
+        Const (@{const_name comp}, _) $ t1 $ t2 =>
+        mk_comp bound_Ts (massage_mutual_fun bound_Ts U T t1, tap check_no_call t2)
+      | _ =>
+        let
+          val var = Var ((Name.uu, Term.maxidx_of_term t + 1),
+            domain_type (fastype_of1 (bound_Ts, t)));
+        in
+          Term.lambda var (massage_mutual_call bound_Ts U T (t $ var))
+        end);
+
+    fun massage_map bound_Ts (Type (_, Us)) (Type (s, Ts)) t =
+        (case try (dest_map ctxt s) t of
+          SOME (map0, fs) =>
+          let
+            val Type (_, dom_Ts) = domain_type (fastype_of1 (bound_Ts, t));
+            val map' = mk_map (length fs) dom_Ts Us map0;
+            val fs' =
+              map_flattened_map_args ctxt s (map3 (massage_map_or_map_arg bound_Ts) Us Ts) fs;
+          in
+            Term.list_comb (map', fs')
+          end
+        | NONE => raise AINT_NO_MAP t)
+      | massage_map _ _ _ t = raise AINT_NO_MAP t
+    and massage_map_or_map_arg bound_Ts U T t =
+      if T = U then
+        tap check_no_call t
+      else
+        massage_map bound_Ts U T t
+        handle AINT_NO_MAP _ => massage_mutual_fun bound_Ts U T t;
+
+    fun massage_call bound_Ts U T =
+      massage_let_if_case ctxt has_call (fn bound_Ts => fn t =>
+        if has_call t then
+          (case t of
+            Const (@{const_name prod_case}, _) $ t' =>
+            let
+              val U' = curried_type U;
+              val T' = curried_type T;
+            in
+              Const (@{const_name prod_case}, U' --> U) $ massage_call bound_Ts U' T' t'
+            end
+          | t1 $ t2 =>
+            (if has_call t2 then
+              massage_mutual_call bound_Ts U T t
+            else
+              massage_map bound_Ts U T t1 $ t2
+              handle AINT_NO_MAP _ => massage_mutual_call bound_Ts U T t)
+          | Abs (s, T', t') =>
+            Abs (s, T', massage_call (T' :: bound_Ts) (range_type U) (range_type T) t')
+          | _ => massage_mutual_call bound_Ts U T t)
+        else
+          build_map_Inl (T, U) $ t) bound_Ts;
+
+    val T = fastype_of1 (bound_Ts, t);
+  in
+    if has_call t then massage_call bound_Ts U T t else build_map_Inl (T, U) $ t
+  end;
+
+val fold_rev_corec_call = fold_rev_let_if_case;
+
+fun expand_to_ctr_term ctxt s Ts t =
+  (case ctr_sugar_of ctxt s of
+    SOME {ctrs, casex, ...} =>
+    Term.list_comb (mk_case Ts (Type (s, Ts)) casex, map (mk_ctr Ts) ctrs) $ t
+  | NONE => raise Fail "expand_to_ctr_term");
+
+fun expand_corec_code_rhs ctxt has_call bound_Ts t =
+  (case fastype_of1 (bound_Ts, t) of
+    Type (s, Ts) =>
+    massage_let_if_case ctxt has_call (fn _ => fn t =>
+      if can (dest_ctr ctxt s) t then t else expand_to_ctr_term ctxt s Ts t) bound_Ts t
+  | _ => raise Fail "expand_corec_code_rhs");
+
+fun massage_corec_code_rhs ctxt massage_ctr =
+  massage_let_if_case ctxt (K false)
+    (fn bound_Ts => uncurry (massage_ctr bound_Ts) o Term.strip_comb);
+
+fun fold_rev_corec_code_rhs ctxt f =
+  snd ooo fold_rev_let_if_case ctxt (fn conds => uncurry (f conds) o Term.strip_comb);
+
+fun case_thms_of_term ctxt bound_Ts t =
+  let
+    val (caseT_names, _) = fold_rev_let_if_case ctxt (K (K I)) bound_Ts t ();
+    val ctr_sugars = map (the o ctr_sugar_of ctxt) caseT_names;
+  in
+    (maps #distincts ctr_sugars, maps #discIs ctr_sugars, maps #sel_splits ctr_sugars,
+     maps #sel_split_asms ctr_sugars)
+  end;
+
+fun basic_corec_specs_of ctxt res_T =
+  (case res_T of
+    Type (T_name, _) =>
+    (case Ctr_Sugar.ctr_sugar_of ctxt T_name of
+      NONE => not_codatatype ctxt res_T
+    | SOME {ctrs, discs, selss, ...} =>
+      let
+        val thy = Proof_Context.theory_of ctxt;
+
+        val gfpT = body_type (fastype_of (hd ctrs));
+        val As_rho = tvar_subst thy [gfpT] [res_T];
+        val substA = Term.subst_TVars As_rho;
+
+        fun mk_spec ctr disc sels = {ctr = substA ctr, disc = substA disc, sels = map substA sels};
+      in
+        map3 mk_spec ctrs discs selss
+      end)
+  | _ => not_codatatype ctxt res_T);
+
+fun corec_specs_of bs arg_Ts res_Ts get_indices callssss0 lthy =
+  let
+    val thy = Proof_Context.theory_of lthy;
+
+    val ((missing_res_Ts, perm0_kks,
+          fp_sugars as {nested_bnfs, fp_res = {xtor_co_iterss = dtor_coiters1 :: _, ...},
+            co_inducts = coinduct_thms, ...} :: _, (_, gfp_sugar_thms)), lthy') =
+      nested_to_mutual_fps Greatest_FP bs res_Ts get_indices callssss0 lthy;
+
+    val perm_fp_sugars = sort (int_ord o pairself #index) fp_sugars;
+
+    val indices = map #index fp_sugars;
+    val perm_indices = map #index perm_fp_sugars;
+
+    val perm_ctrss = map (#ctrs o of_fp_sugar #ctr_sugars) perm_fp_sugars;
+    val perm_ctr_Tsss = map (map (binder_types o fastype_of)) perm_ctrss;
+    val perm_gfpTs = map (body_type o fastype_of o hd) perm_ctrss;
+
+    val nn0 = length res_Ts;
+    val nn = length perm_gfpTs;
+    val kks = 0 upto nn - 1;
+    val perm_ns = map length perm_ctr_Tsss;
+
+    val perm_Cs = map (domain_type o body_fun_type o fastype_of o co_rec_of o
+      of_fp_sugar (#xtor_co_iterss o #fp_res)) perm_fp_sugars;
+    val (perm_p_Tss, (perm_q_Tssss, _, perm_f_Tssss, _)) =
+      mk_coiter_fun_arg_types perm_ctr_Tsss perm_Cs perm_ns (co_rec_of dtor_coiters1);
+
+    val (perm_p_hss, h) = indexedd perm_p_Tss 0;
+    val (perm_q_hssss, h') = indexedddd perm_q_Tssss h;
+    val (perm_f_hssss, _) = indexedddd perm_f_Tssss h';
+
+    val fun_arg_hs =
+      flat (map3 flat_corec_preds_predsss_gettersss perm_p_hss perm_q_hssss perm_f_hssss);
+
+    fun unpermute0 perm0_xs = permute_like (op =) perm0_kks kks perm0_xs;
+    fun unpermute perm_xs = permute_like (op =) perm_indices indices perm_xs;
+
+    val coinduct_thmss = map (unpermute0 o conj_dests nn) coinduct_thms;
+
+    val p_iss = map (map (find_index_eq fun_arg_hs)) (unpermute perm_p_hss);
+    val q_issss = map (map (map (map (find_index_eq fun_arg_hs)))) (unpermute perm_q_hssss);
+    val f_issss = map (map (map (map (find_index_eq fun_arg_hs)))) (unpermute perm_f_hssss);
+
+    val f_Tssss = unpermute perm_f_Tssss;
+    val gfpTs = unpermute perm_gfpTs;
+    val Cs = unpermute perm_Cs;
+
+    val As_rho = tvar_subst thy (take nn0 gfpTs) res_Ts;
+    val Cs_rho = map (fst o dest_TVar) Cs ~~ pad_list HOLogic.unitT nn arg_Ts;
+
+    val substA = Term.subst_TVars As_rho;
+    val substAT = Term.typ_subst_TVars As_rho;
+    val substCT = Term.typ_subst_TVars Cs_rho;
+
+    val perm_Cs' = map substCT perm_Cs;
+
+    fun call_of nullary [] [g_i] [Type (@{type_name fun}, [_, T])] =
+        (if exists_subtype_in Cs T then Nested_Corec
+         else if nullary then Dummy_No_Corec
+         else No_Corec) g_i
+      | call_of _ [q_i] [g_i, g_i'] _ = Mutual_Corec (q_i, g_i, g_i');
+
+    fun mk_ctr_spec ctr disc sels p_ho q_iss f_iss f_Tss discI sel_thms collapse corec_thm
+        disc_corec sel_corecs =
+      let val nullary = not (can dest_funT (fastype_of ctr)) in
+        {ctr = substA ctr, disc = substA disc, sels = map substA sels, pred = p_ho,
+         calls = map3 (call_of nullary) q_iss f_iss f_Tss, discI = discI, sel_thms = sel_thms,
+         collapse = collapse, corec_thm = corec_thm, disc_corec = disc_corec,
+         sel_corecs = sel_corecs}
+      end;
+
+    fun mk_ctr_specs index (ctr_sugars : ctr_sugar list) p_is q_isss f_isss f_Tsss coiter_thmsss
+        disc_coitersss sel_coiterssss =
+      let
+        val ctrs = #ctrs (nth ctr_sugars index);
+        val discs = #discs (nth ctr_sugars index);
+        val selss = #selss (nth ctr_sugars index);
+        val p_ios = map SOME p_is @ [NONE];
+        val discIs = #discIs (nth ctr_sugars index);
+        val sel_thmss = #sel_thmss (nth ctr_sugars index);
+        val collapses = #collapses (nth ctr_sugars index);
+        val corec_thms = co_rec_of (nth coiter_thmsss index);
+        val disc_corecs = co_rec_of (nth disc_coitersss index);
+        val sel_corecss = co_rec_of (nth sel_coiterssss index);
+      in
+        map13 mk_ctr_spec ctrs discs selss p_ios q_isss f_isss f_Tsss discIs sel_thmss collapses
+          corec_thms disc_corecs sel_corecss
+      end;
+
+    fun mk_spec ({T, index, ctr_sugars, co_iterss = coiterss, co_iter_thmsss = coiter_thmsss,
+          disc_co_itersss = disc_coitersss, sel_co_iterssss = sel_coiterssss, ...} : fp_sugar)
+        p_is q_isss f_isss f_Tsss =
+      {corec = mk_co_iter thy Greatest_FP (substAT T) perm_Cs' (co_rec_of (nth coiterss index)),
+       nested_map_idents = map (unfold_thms lthy @{thms id_def} o map_id0_of_bnf) nested_bnfs,
+       nested_map_comps = map map_comp_of_bnf nested_bnfs,
+       ctr_specs = mk_ctr_specs index ctr_sugars p_is q_isss f_isss f_Tsss coiter_thmsss
+         disc_coitersss sel_coiterssss};
+  in
+    ((is_some gfp_sugar_thms, map5 mk_spec fp_sugars p_iss q_issss f_issss f_Tssss, missing_res_Ts,
+      co_induct_of coinduct_thms, strong_co_induct_of coinduct_thms, co_induct_of coinduct_thmss,
+      strong_co_induct_of coinduct_thmss), lthy')
+  end;
+
+val undef_const = Const (@{const_name undefined}, dummyT);
+
+val abs_tuple = HOLogic.tupled_lambda o HOLogic.mk_tuple;
+fun abstract vs =
+  let fun a n (t $ u) = a n t $ a n u
+        | a n (Abs (v, T, b)) = Abs (v, T, a (n + 1) b)
+        | a n t = let val idx = find_index (equal t) vs in
+            if idx < 0 then t else Bound (n + idx) end
+  in a 0 end;
+
+fun mk_prod1 bound_Ts (t, u) =
+  HOLogic.pair_const (fastype_of1 (bound_Ts, t)) (fastype_of1 (bound_Ts, u)) $ t $ u;
+fun mk_tuple1 bound_Ts = the_default HOLogic.unit o try (foldr1 (mk_prod1 bound_Ts));
+
+type coeqn_data_disc = {
+  fun_name: string,
+  fun_T: typ,
+  fun_args: term list,
+  ctr: term,
+  ctr_no: int, (*###*)
+  disc: term,
+  prems: term list,
+  auto_gen: bool,
+  maybe_ctr_rhs: term option,
+  maybe_code_rhs: term option,
+  user_eqn: term
+};
+
+type coeqn_data_sel = {
+  fun_name: string,
+  fun_T: typ,
+  fun_args: term list,
+  ctr: term,
+  sel: term,
+  rhs_term: term,
+  user_eqn: term
+};
+
+datatype coeqn_data =
+  Disc of coeqn_data_disc |
+  Sel of coeqn_data_sel;
+
+fun dissect_coeqn_disc seq fun_names (basic_ctr_specss : basic_corec_ctr_spec list list)
+    maybe_ctr_rhs maybe_code_rhs prems' concl matchedsss =
+  let
+    fun find_subterm p =
+      let (* FIXME \<exists>? *)
+        fun find (t as u $ v) = if p t then SOME t else merge_options (find u, find v)
+          | find t = if p t then SOME t else NONE;
+      in find end;
+
+    val applied_fun = concl
+      |> find_subterm (member ((op =) o apsnd SOME) fun_names o try (fst o dest_Free o head_of))
+      |> the
+      handle Option.Option => primcorec_error_eqn "malformed discriminator formula" concl;
+    val ((fun_name, fun_T), fun_args) = strip_comb applied_fun |>> dest_Free;
+    val SOME basic_ctr_specs = AList.lookup (op =) (fun_names ~~ basic_ctr_specss) fun_name;
+
+    val discs = map #disc basic_ctr_specs;
+    val ctrs = map #ctr basic_ctr_specs;
+    val not_disc = head_of concl = @{term Not};
+    val _ = not_disc andalso length ctrs <> 2 andalso
+      primcorec_error_eqn "negated discriminator for a type with \<noteq> 2 constructors" concl;
+    val disc' = find_subterm (member (op =) discs o head_of) concl;
+    val eq_ctr0 = concl |> perhaps (try HOLogic.dest_not) |> try (HOLogic.dest_eq #> snd)
+        |> (fn SOME t => let val n = find_index (equal t) ctrs in
+          if n >= 0 then SOME n else NONE end | _ => NONE);
+    val _ = is_some disc' orelse is_some eq_ctr0 orelse
+      primcorec_error_eqn "no discriminator in equation" concl;
+    val ctr_no' =
+      if is_none disc' then the eq_ctr0 else find_index (equal (head_of (the disc'))) discs;
+    val ctr_no = if not_disc then 1 - ctr_no' else ctr_no';
+    val {ctr, disc, ...} = nth basic_ctr_specs ctr_no;
+
+    val catch_all = try (fst o dest_Free o the_single) prems' = SOME Name.uu_;
+    val matchedss = AList.lookup (op =) matchedsss fun_name |> the_default [];
+    val prems = map (abstract (List.rev fun_args)) prems';
+    val real_prems =
+      (if catch_all orelse seq then maps s_not_conj matchedss else []) @
+      (if catch_all then [] else prems);
+
+    val matchedsss' = AList.delete (op =) fun_name matchedsss
+      |> cons (fun_name, if seq then matchedss @ [prems] else matchedss @ [real_prems]);
+
+    val user_eqn =
+      (real_prems, concl)
+      |>> map HOLogic.mk_Trueprop ||> HOLogic.mk_Trueprop o abstract (List.rev fun_args)
+      |> curry Logic.list_all (map dest_Free fun_args) o Logic.list_implies;
+  in
+    (Disc {
+      fun_name = fun_name,
+      fun_T = fun_T,
+      fun_args = fun_args,
+      ctr = ctr,
+      ctr_no = ctr_no,
+      disc = disc,
+      prems = real_prems,
+      auto_gen = catch_all,
+      maybe_ctr_rhs = maybe_ctr_rhs,
+      maybe_code_rhs = maybe_code_rhs,
+      user_eqn = user_eqn
+    }, matchedsss')
+  end;
+
+fun dissect_coeqn_sel fun_names (basic_ctr_specss : basic_corec_ctr_spec list list) eqn'
+    maybe_of_spec eqn =
+  let
+    val (lhs, rhs) = HOLogic.dest_eq eqn
+      handle TERM _ =>
+        primcorec_error_eqn "malformed function equation (expected \"lhs = rhs\")" eqn;
+    val sel = head_of lhs;
+    val ((fun_name, fun_T), fun_args) = dest_comb lhs |> snd |> strip_comb |> apfst dest_Free
+      handle TERM _ =>
+        primcorec_error_eqn "malformed selector argument in left-hand side" eqn;
+    val basic_ctr_specs = the (AList.lookup (op =) (fun_names ~~ basic_ctr_specss) fun_name)
+      handle Option.Option =>
+        primcorec_error_eqn "malformed selector argument in left-hand side" eqn;
+    val {ctr, ...} =
+      (case maybe_of_spec of
+        SOME of_spec => the (find_first (equal of_spec o #ctr) basic_ctr_specs)
+      | NONE => filter (exists (equal sel) o #sels) basic_ctr_specs |> the_single
+          handle List.Empty => primcorec_error_eqn "ambiguous selector - use \"of\"" eqn);
+    val user_eqn = drop_All eqn';
+  in
+    Sel {
+      fun_name = fun_name,
+      fun_T = fun_T,
+      fun_args = fun_args,
+      ctr = ctr,
+      sel = sel,
+      rhs_term = rhs,
+      user_eqn = user_eqn
+    }
+  end;
+
+fun dissect_coeqn_ctr seq fun_names (basic_ctr_specss : basic_corec_ctr_spec list list) eqn'
+    maybe_code_rhs prems concl matchedsss =
+  let
+    val (lhs, rhs) = HOLogic.dest_eq concl;
+    val (fun_name, fun_args) = strip_comb lhs |>> fst o dest_Free;
+    val SOME basic_ctr_specs = AList.lookup (op =) (fun_names ~~ basic_ctr_specss) fun_name;
+    val (ctr, ctr_args) = strip_comb (unfold_let rhs);
+    val {disc, sels, ...} = the (find_first (equal ctr o #ctr) basic_ctr_specs)
+      handle Option.Option => primcorec_error_eqn "not a constructor" ctr;
+
+    val disc_concl = betapply (disc, lhs);
+    val (maybe_eqn_data_disc, matchedsss') = if length basic_ctr_specs = 1
+      then (NONE, matchedsss)
+      else apfst SOME (dissect_coeqn_disc seq fun_names basic_ctr_specss
+          (SOME (abstract (List.rev fun_args) rhs)) maybe_code_rhs prems disc_concl matchedsss);
+
+    val sel_concls = sels ~~ ctr_args
+      |> map (fn (sel, ctr_arg) => HOLogic.mk_eq (betapply (sel, lhs), ctr_arg));
+
+(*
+val _ = tracing ("reduced\n    " ^ Syntax.string_of_term @{context} concl ^ "\nto\n    \<cdot> " ^
+ (is_some maybe_eqn_data_disc ? K (Syntax.string_of_term @{context} disc_concl ^ "\n    \<cdot> ")) "" ^
+ space_implode "\n    \<cdot> " (map (Syntax.string_of_term @{context}) sel_concls) ^
+ "\nfor premise(s)\n    \<cdot> " ^
+ space_implode "\n    \<cdot> " (map (Syntax.string_of_term @{context}) prems));
+*)
+
+    val eqns_data_sel =
+      map (dissect_coeqn_sel fun_names basic_ctr_specss eqn' (SOME ctr)) sel_concls;
+  in
+    (the_list maybe_eqn_data_disc @ eqns_data_sel, matchedsss')
+  end;
+
+fun dissect_coeqn_code lthy has_call fun_names basic_ctr_specss eqn' concl matchedsss =
+  let
+    val (lhs, (rhs', rhs)) = HOLogic.dest_eq concl ||> `(expand_corec_code_rhs lthy has_call []);
+    val (fun_name, fun_args) = strip_comb lhs |>> fst o dest_Free;
+    val SOME basic_ctr_specs = AList.lookup (op =) (fun_names ~~ basic_ctr_specss) fun_name;
+
+    val cond_ctrs = fold_rev_corec_code_rhs lthy (fn cs => fn ctr => fn _ =>
+        if member ((op =) o apsnd #ctr) basic_ctr_specs ctr
+        then cons (ctr, cs)
+        else primcorec_error_eqn "not a constructor" ctr) [] rhs' []
+      |> AList.group (op =);
+
+    val ctr_premss = (case cond_ctrs of [_] => [[]] | _ => map (s_dnf o snd) cond_ctrs);
+    val ctr_concls = cond_ctrs |> map (fn (ctr, _) =>
+        binder_types (fastype_of ctr)
+        |> map_index (fn (n, T) => massage_corec_code_rhs lthy (fn _ => fn ctr' => fn args =>
+          if ctr' = ctr then nth args n else Const (@{const_name undefined}, T)) [] rhs')
+        |> curry list_comb ctr
+        |> curry HOLogic.mk_eq lhs);
+  in
+    fold_map2 (dissect_coeqn_ctr false fun_names basic_ctr_specss eqn'
+        (SOME (abstract (List.rev fun_args) rhs)))
+      ctr_premss ctr_concls matchedsss
+  end;
+
+fun dissect_coeqn lthy seq has_call fun_names (basic_ctr_specss : basic_corec_ctr_spec list list)
+    eqn' maybe_of_spec matchedsss =
+  let
+    val eqn = drop_All eqn'
+      handle TERM _ => primcorec_error_eqn "malformed function equation" eqn';
+    val (prems, concl) = Logic.strip_horn eqn
+      |> apfst (map HOLogic.dest_Trueprop) o apsnd HOLogic.dest_Trueprop;
+
+    val head = concl
+      |> perhaps (try HOLogic.dest_not) |> perhaps (try (fst o HOLogic.dest_eq))
+      |> head_of;
+
+    val maybe_rhs = concl |> perhaps (try HOLogic.dest_not) |> try (snd o HOLogic.dest_eq);
+
+    val discs = maps (map #disc) basic_ctr_specss;
+    val sels = maps (maps #sels) basic_ctr_specss;
+    val ctrs = maps (map #ctr) basic_ctr_specss;
+  in
+    if member (op =) discs head orelse
+      is_some maybe_rhs andalso
+        member (op =) (filter (null o binder_types o fastype_of) ctrs) (the maybe_rhs) then
+      dissect_coeqn_disc seq fun_names basic_ctr_specss NONE NONE prems concl matchedsss
+      |>> single
+    else if member (op =) sels head then
+      ([dissect_coeqn_sel fun_names basic_ctr_specss eqn' maybe_of_spec concl], matchedsss)
+    else if is_Free head andalso member (op =) fun_names (fst (dest_Free head)) andalso
+      member (op =) ctrs (head_of (unfold_let (the maybe_rhs))) then
+      dissect_coeqn_ctr seq fun_names basic_ctr_specss eqn' NONE prems concl matchedsss
+    else if is_Free head andalso member (op =) fun_names (fst (dest_Free head)) andalso
+      null prems then
+      dissect_coeqn_code lthy has_call fun_names basic_ctr_specss eqn' concl matchedsss
+      |>> flat
+    else
+      primcorec_error_eqn "malformed function equation" eqn
+  end;
+
+fun build_corec_arg_disc (ctr_specs : corec_ctr_spec list)
+    ({fun_args, ctr_no, prems, ...} : coeqn_data_disc) =
+  if is_none (#pred (nth ctr_specs ctr_no)) then I else
+    s_conjs prems
+    |> curry subst_bounds (List.rev fun_args)
+    |> HOLogic.tupled_lambda (HOLogic.mk_tuple fun_args)
+    |> K |> nth_map (the (#pred (nth ctr_specs ctr_no)));
+
+fun build_corec_arg_no_call (sel_eqns : coeqn_data_sel list) sel =
+  find_first (equal sel o #sel) sel_eqns
+  |> try (fn SOME {fun_args, rhs_term, ...} => abs_tuple fun_args rhs_term)
+  |> the_default undef_const
+  |> K;
+
+fun build_corec_args_mutual_call lthy has_call (sel_eqns : coeqn_data_sel list) sel =
+  (case find_first (equal sel o #sel) sel_eqns of
+    NONE => (I, I, I)
+  | SOME {fun_args, rhs_term, ... } =>
+    let
+      val bound_Ts = List.rev (map fastype_of fun_args);
+      fun rewrite_stop _ t = if has_call t then @{term False} else @{term True};
+      fun rewrite_end _ t = if has_call t then undef_const else t;
+      fun rewrite_cont bound_Ts t =
+        if has_call t then mk_tuple1 bound_Ts (snd (strip_comb t)) else undef_const;
+      fun massage f _ = massage_mutual_corec_call lthy has_call f bound_Ts rhs_term
+        |> abs_tuple fun_args;
+    in
+      (massage rewrite_stop, massage rewrite_end, massage rewrite_cont)
+    end);
+
+fun build_corec_arg_nested_call lthy has_call (sel_eqns : coeqn_data_sel list) sel =
+  (case find_first (equal sel o #sel) sel_eqns of
+    NONE => I
+  | SOME {fun_args, rhs_term, ...} =>
+    let
+      val bound_Ts = List.rev (map fastype_of fun_args);
+      fun rewrite bound_Ts U T (Abs (v, V, b)) = Abs (v, V, rewrite (V :: bound_Ts) U T b)
+        | rewrite bound_Ts U T (t as _ $ _) =
+          let val (u, vs) = strip_comb t in
+            if is_Free u andalso has_call u then
+              Inr_const U T $ mk_tuple1 bound_Ts vs
+            else if try (fst o dest_Const) u = SOME @{const_name prod_case} then
+              map (rewrite bound_Ts U T) vs |> chop 1 |>> HOLogic.mk_split o the_single |> list_comb
+            else
+              list_comb (rewrite bound_Ts U T u, map (rewrite bound_Ts U T) vs)
+          end
+        | rewrite _ U T t =
+          if is_Free t andalso has_call t then Inr_const U T $ HOLogic.unit else t;
+      fun massage t =
+        rhs_term
+        |> massage_nested_corec_call lthy has_call rewrite bound_Ts (range_type (fastype_of t))
+        |> abs_tuple fun_args;
+    in
+      massage
+    end);
+
+fun build_corec_args_sel lthy has_call (all_sel_eqns : coeqn_data_sel list)
+    (ctr_spec : corec_ctr_spec) =
+  (case filter (equal (#ctr ctr_spec) o #ctr) all_sel_eqns of
+    [] => I
+  | sel_eqns =>
+    let
+      val sel_call_list = #sels ctr_spec ~~ #calls ctr_spec;
+      val no_calls' = map_filter (try (apsnd (fn No_Corec n => n))) sel_call_list;
+      val mutual_calls' = map_filter (try (apsnd (fn Mutual_Corec n => n))) sel_call_list;
+      val nested_calls' = map_filter (try (apsnd (fn Nested_Corec n => n))) sel_call_list;
+    in
+      I
+      #> fold (fn (sel, n) => nth_map n (build_corec_arg_no_call sel_eqns sel)) no_calls'
+      #> fold (fn (sel, (q, g, h)) =>
+        let val (fq, fg, fh) = build_corec_args_mutual_call lthy has_call sel_eqns sel in
+          nth_map q fq o nth_map g fg o nth_map h fh end) mutual_calls'
+      #> fold (fn (sel, n) => nth_map n
+        (build_corec_arg_nested_call lthy has_call sel_eqns sel)) nested_calls'
+    end);
+
+fun build_codefs lthy bs mxs has_call arg_Tss (corec_specs : corec_spec list)
+    (disc_eqnss : coeqn_data_disc list list) (sel_eqnss : coeqn_data_sel list list) =
+  let
+    val corecs = map #corec corec_specs;
+    val ctr_specss = map #ctr_specs corec_specs;
+    val corec_args = hd corecs
+      |> fst o split_last o binder_types o fastype_of
+      |> map (Const o pair @{const_name undefined})
+      |> fold2 (fold o build_corec_arg_disc) ctr_specss disc_eqnss
+      |> fold2 (fold o build_corec_args_sel lthy has_call) sel_eqnss ctr_specss;
+    fun currys [] t = t
+      | currys Ts t = t $ mk_tuple1 (List.rev Ts) (map Bound (length Ts - 1 downto 0))
+          |> fold_rev (Term.abs o pair Name.uu) Ts;
+
+(*
+val _ = tracing ("corecursor arguments:\n    \<cdot> " ^
+ space_implode "\n    \<cdot> " (map (Syntax.string_of_term lthy) corec_args));
+*)
+
+    val exclss' =
+      disc_eqnss
+      |> map (map (fn x => (#fun_args x, #ctr_no x, #prems x, #auto_gen x))
+        #> fst o (fn xs => fold_map (fn x => fn ys => ((x, ys), ys @ [x])) xs [])
+        #> maps (uncurry (map o pair)
+          #> map (fn ((fun_args, c, x, a), (_, c', y, a')) =>
+              ((c, c', a orelse a'), (x, s_not (s_conjs y)))
+            ||> apfst (map HOLogic.mk_Trueprop) o apsnd HOLogic.mk_Trueprop
+            ||> Logic.list_implies
+            ||> curry Logic.list_all (map dest_Free fun_args))))
+  in
+    map (list_comb o rpair corec_args) corecs
+    |> map2 (fn Ts => fn t => if length Ts = 0 then t $ HOLogic.unit else t) arg_Tss
+    |> map2 currys arg_Tss
+    |> Syntax.check_terms lthy
+    |> map3 (fn b => fn mx => fn t => ((b, mx), ((Binding.conceal (Thm.def_binding b), []), t)))
+      bs mxs
+    |> rpair exclss'
+  end;
+
+fun mk_real_disc_eqns fun_binding arg_Ts ({ctr_specs, ...} : corec_spec)
+    (sel_eqns : coeqn_data_sel list) (disc_eqns : coeqn_data_disc list) =
+  if length disc_eqns <> length ctr_specs - 1 then disc_eqns else
+    let
+      val n = 0 upto length ctr_specs
+        |> the o find_first (fn idx => not (exists (equal idx o #ctr_no) disc_eqns));
+      val fun_args = (try (#fun_args o hd) disc_eqns, try (#fun_args o hd) sel_eqns)
+        |> the_default (map (curry Free Name.uu) arg_Ts) o merge_options;
+      val extra_disc_eqn = {
+        fun_name = Binding.name_of fun_binding,
+        fun_T = arg_Ts ---> body_type (fastype_of (#ctr (hd ctr_specs))),
+        fun_args = fun_args,
+        ctr = #ctr (nth ctr_specs n),
+        ctr_no = n,
+        disc = #disc (nth ctr_specs n),
+        prems = maps (s_not_conj o #prems) disc_eqns,
+        auto_gen = true,
+        maybe_ctr_rhs = NONE,
+        maybe_code_rhs = NONE,
+        user_eqn = undef_const};
+    in
+      chop n disc_eqns ||> cons extra_disc_eqn |> (op @)
+    end;
+
+fun find_corec_calls ctxt has_call basic_ctr_specs ({ctr, sel, rhs_term, ...} : coeqn_data_sel) =
+  let
+    val sel_no = find_first (equal ctr o #ctr) basic_ctr_specs
+      |> find_index (equal sel) o #sels o the;
+    fun find t = if has_call t then snd (fold_rev_corec_call ctxt (K cons) [] t []) else [];
+  in
+    find rhs_term
+    |> K |> nth_map sel_no |> AList.map_entry (op =) ctr
+  end;
+
+fun add_primcorec_ursive maybe_tac opts fixes specs maybe_of_specs lthy =
+  let
+    val thy = Proof_Context.theory_of lthy;
+
+    val (bs, mxs) = map_split (apfst fst) fixes;
+    val (arg_Ts, res_Ts) = map (strip_type o snd o fst #>> HOLogic.mk_tupleT) fixes |> split_list;
+
+    val _ = (case filter_out (fn (_, T) => Sign.of_sort thy (T, HOLogic.typeS)) (bs ~~ arg_Ts) of
+        [] => ()
+      | (b, _) :: _ => primcorec_error ("type of " ^ Binding.print b ^ " contains top sort"));
+
+    val seq = member (op =) opts Option_Sequential;
+    val exhaustive = member (op =) opts Option_Exhaustive;
+
+    val fun_names = map Binding.name_of bs;
+    val basic_ctr_specss = map (basic_corec_specs_of lthy) res_Ts;
+    val has_call = exists_subterm (map (fst #>> Binding.name_of #> Free) fixes |> member (op =));
+    val eqns_data =
+      fold_map2 (dissect_coeqn lthy seq has_call fun_names basic_ctr_specss) (map snd specs)
+        maybe_of_specs []
+      |> flat o fst;
+
+    val callssss =
+      map_filter (try (fn Sel x => x)) eqns_data
+      |> partition_eq ((op =) o pairself #fun_name)
+      |> fst o finds (fn (x, ({fun_name, ...} :: _)) => x = fun_name) fun_names
+      |> map (flat o snd)
+      |> map2 (fold o find_corec_calls lthy has_call) basic_ctr_specss
+      |> map2 (curry (op |>)) (map (map (fn {ctr, sels, ...} =>
+        (ctr, map (K []) sels))) basic_ctr_specss);
+
+(*
+val _ = tracing ("callssss = " ^ @{make_string} callssss);
+*)
+
+    val ((n2m, corec_specs', _, coinduct_thm, strong_coinduct_thm, coinduct_thms,
+          strong_coinduct_thms), lthy') =
+      corec_specs_of bs arg_Ts res_Ts (get_indices fixes) callssss lthy;
+    val actual_nn = length bs;
+    val corec_specs = take actual_nn corec_specs'; (*###*)
+    val ctr_specss = map #ctr_specs corec_specs;
+
+    val disc_eqnss' = map_filter (try (fn Disc x => x)) eqns_data
+      |> partition_eq ((op =) o pairself #fun_name)
+      |> fst o finds (fn (x, ({fun_name, ...} :: _)) => x = fun_name) fun_names
+      |> map (sort ((op <) o pairself #ctr_no |> make_ord) o flat o snd);
+    val _ = disc_eqnss' |> map (fn x =>
+      let val d = duplicates ((op =) o pairself #ctr_no) x in null d orelse
+        primcorec_error_eqns "excess discriminator formula in definition"
+          (maps (fn t => filter (equal (#ctr_no t) o #ctr_no) x) d |> map #user_eqn) end);
+
+    val sel_eqnss = map_filter (try (fn Sel x => x)) eqns_data
+      |> partition_eq ((op =) o pairself #fun_name)
+      |> fst o finds (fn (x, ({fun_name, ...} :: _)) => x = fun_name) fun_names
+      |> map (flat o snd);
+
+    val arg_Tss = map (binder_types o snd o fst) fixes;
+    val disc_eqnss = map5 mk_real_disc_eqns bs arg_Tss corec_specs sel_eqnss disc_eqnss';
+    val (defs, exclss') =
+      build_codefs lthy' bs mxs has_call arg_Tss corec_specs disc_eqnss sel_eqnss;
+
+    fun excl_tac (c, c', a) =
+      if a orelse c = c' orelse seq then SOME (K (HEADGOAL (mk_primcorec_assumption_tac lthy [])))
+      else maybe_tac;
+
+(*
+val _ = tracing ("exclusiveness properties:\n    \<cdot> " ^
+ space_implode "\n    \<cdot> " (maps (map (Syntax.string_of_term lthy o snd)) exclss'));
+*)
+
+    val exclss'' = exclss' |> map (map (fn (idx, t) =>
+      (idx, (Option.map (Goal.prove lthy [] [] t #> Thm.close_derivation) (excl_tac idx), t))));
+    val taut_thmss = map (map (apsnd (the o fst)) o filter (is_some o fst o snd)) exclss'';
+    val (goal_idxss, goalss') = exclss''
+      |> map (map (apsnd (rpair [] o snd)) o filter (is_none o fst o snd))
+      |> split_list o map split_list;
+
+    val exh_props = if not exhaustive then [] else
+      map (HOLogic.mk_Trueprop o mk_disjs o map (mk_conjs o #prems)) disc_eqnss
+      |> map2 ((fn {fun_args, ...} =>
+        curry Logic.list_all (map dest_Free fun_args)) o hd) disc_eqnss;
+    val exh_taut_thms = if exhaustive andalso is_some maybe_tac then
+        map (fn t => Goal.prove lthy [] [] t (the maybe_tac) |> Thm.close_derivation) exh_props
+      else [];
+    val goalss = if exhaustive andalso is_none maybe_tac then
+      map (rpair []) exh_props :: goalss' else goalss';
+
+    fun prove thmss'' def_thms' lthy =
+      let
+        val def_thms = map (snd o snd) def_thms';
+
+        val maybe_exh_thms = if exhaustive andalso is_none maybe_tac then
+          map SOME (hd thmss'') else map (K NONE) def_thms;
+        val thmss' = if exhaustive andalso is_none maybe_tac then tl thmss'' else thmss'';
+
+        val exclss' = map (op ~~) (goal_idxss ~~ thmss');
+        fun mk_exclsss excls n =
+          (excls, map (fn k => replicate k [TrueI] @ replicate (n - k) []) (0 upto n - 1))
+          |-> fold (fn ((c, c', _), thm) => nth_map c (nth