merged
authornipkow
Mon, 16 Apr 2012 19:01:57 +0200
changeset 47495 573ca05db948
parent 47494 8c8f27864ed1 (current diff)
parent 47493 2feb0aab030f (diff)
child 47496 a43f207f216f
merged
NEWS
doc-src/IsarOverview/IsaMakefile
doc-src/IsarOverview/Isar/Induction.thy
doc-src/IsarOverview/Isar/Logic.thy
doc-src/IsarOverview/Isar/ROOT.ML
doc-src/IsarOverview/Isar/document/Induction.tex
doc-src/IsarOverview/Isar/document/Logic.tex
doc-src/IsarOverview/Isar/document/Makefile
doc-src/IsarOverview/Isar/document/intro.tex
doc-src/IsarOverview/Isar/document/llncs.cls
doc-src/IsarOverview/Isar/document/root.bib
doc-src/IsarOverview/Isar/document/root.tex
doc-src/IsarOverview/Isar/makeDemo
doc-src/IsarOverview/Makefile
src/HOL/Mirabelle/Tools/mirabelle.ML
src/HOL/Mirabelle/Tools/mirabelle_arith.ML
src/HOL/Mirabelle/Tools/mirabelle_metis.ML
src/HOL/Mirabelle/Tools/mirabelle_quickcheck.ML
src/HOL/Mirabelle/Tools/mirabelle_refute.ML
src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML
src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML
src/HOL/ex/Set_Algebras.thy
--- a/ANNOUNCE	Wed Apr 04 09:59:49 2012 +0200
+++ b/ANNOUNCE	Mon Apr 16 19:01:57 2012 +0200
@@ -1,29 +1,15 @@
-Subject: Announcing Isabelle2011-1
+Subject: Announcing Isabelle2012
 To: isabelle-users@cl.cam.ac.uk
 
-Isabelle2011-1 is now available.
+Isabelle2012 is now available.
 
-This version significantly improves upon Isabelle2011, see the NEWS
+This version significantly improves upon Isabelle2011-1, see the NEWS
 file in the distribution for more details.  Some notable changes are:
 
-* Significantly improved Isabelle/jEdit Prover IDE (PIDE).
-
-* Improved system integration with Isabelle/Scala: YXML data encoding.
-
-* Improved parallel performance and scalability.
-
-* Improved document preparation: embedded rail-road diagrams.
-
-* HOL tool enhancements: Quickcheck, Nitpick, Sledgehammer, SMT/Z3
-  integration.
-
-* Numerous HOL library improvements: main HOL, HOLCF, HOL-Library,
-  Multivariate_Analysis, Probability.
-
-* Updated and extended Isabelle/Isar reference manual.
+* FIXME
 
 
-You may get Isabelle2011-1 from the following mirror sites:
+You may get Isabelle2012 from the following mirror sites:
 
   Cambridge (UK)       http://www.cl.cam.ac.uk/research/hvg/Isabelle/
   Munich (Germany)     http://isabelle.in.tum.de/
--- a/Admin/CHECKLIST	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/CHECKLIST	Mon Apr 16 19:01:57 2012 +0200
@@ -34,6 +34,9 @@
     etc/components
     lib/html/library_index_content.template
 
+- test separate compilation of Isabelle/Scala PIDE sources:
+    Admin/build jars_test
+
 - test contrib components:
     x86_64-linux without 32bit C/C++ libraries
     Mac OS X Leopard
--- a/Admin/build	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/build	Mon Apr 16 19:01:57 2012 +0200
@@ -28,6 +28,7 @@
     doc             documentation (requires latex and rail)
     doc-src         documentation sources from Isabelle theories
     jars            Isabelle/Scala layer (requires \$ISABELLE_JDK_HOME and \$SCALA_HOME)
+    jars_test       test separate build of jars
     jars_fresh      fresh build of jars
 
 EOF
@@ -121,6 +122,7 @@
     doc-src) build_doc-src;;
     jars) build_jars;;
     jars_fresh) build_jars -f;;
+    jars_test) build_jars -t;;
     *) fail "Bad module $MODULE"
   esac
 done
--- a/Admin/isatest/isatest-makedist	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/isatest/isatest-makedist	Mon Apr 16 19:01:57 2012 +0200
@@ -59,7 +59,7 @@
 
 echo "### building distribution"  >> $DISTLOG 2>&1
 mkdir -p $DISTPREFIX
-$MAKEDIST -j "/home/isabelle/contrib_devel/jedit_build-20120313" >> $DISTLOG 2>&1
+$MAKEDIST -j "/home/isabelle/contrib_devel/jedit_build-20120414" >> $DISTLOG 2>&1
 
 if [ $? -ne 0 ]
 then
--- a/Admin/isatest/settings/at-poly-test	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/isatest/settings/at-poly-test	Mon Apr 16 19:01:57 2012 +0200
@@ -4,7 +4,7 @@
   ML_SYSTEM="polyml-5.4.2"
   ML_PLATFORM="x86-linux"
   ML_HOME="$POLYML_HOME/$ML_PLATFORM"
-  ML_OPTIONS="-H 500"
+  ML_OPTIONS="-H 500 --gcthreads 1"
 
 ISABELLE_HOME_USER=~/isabelle-at-poly-test
 
--- a/Admin/isatest/settings/mac-poly-M4	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/isatest/settings/mac-poly-M4	Mon Apr 16 19:01:57 2012 +0200
@@ -1,10 +1,10 @@
 # -*- shell-script -*- :mode=shellscript:
 
-  POLYML_HOME="/home/polyml/polyml-5.4.1"
-  ML_SYSTEM="polyml-5.4.1"
+  POLYML_HOME="/home/polyml/polyml-svn"
+  ML_SYSTEM="polyml-5.4.2"
   ML_PLATFORM="x86-darwin"
   ML_HOME="$POLYML_HOME/$ML_PLATFORM"
-  ML_OPTIONS="-H 1000"
+  ML_OPTIONS="-H 500 --gcthreads 4 --gcshare 0"
 
 
 ISABELLE_HOME_USER=~/isabelle-mac-poly-M4
--- a/Admin/isatest/settings/mac-poly-M8	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/isatest/settings/mac-poly-M8	Mon Apr 16 19:01:57 2012 +0200
@@ -1,10 +1,10 @@
 # -*- shell-script -*- :mode=shellscript:
 
-  POLYML_HOME="/home/polyml/polyml-5.4.1"
-  ML_SYSTEM="polyml-5.4.1"
+  POLYML_HOME="/home/polyml/polyml-svn"
+  ML_SYSTEM="polyml-5.4.2"
   ML_PLATFORM="x86-darwin"
   ML_HOME="$POLYML_HOME/$ML_PLATFORM"
-  ML_OPTIONS="-H 1000"
+  ML_OPTIONS="-H 500 --gcthreads 8 --gcshare 0"
 
 
 ISABELLE_HOME_USER=~/isabelle-mac-poly-M8
--- a/Admin/isatest/settings/mac-poly64-M4	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/isatest/settings/mac-poly64-M4	Mon Apr 16 19:01:57 2012 +0200
@@ -1,10 +1,10 @@
 # -*- shell-script -*- :mode=shellscript:
 
-  POLYML_HOME="/home/polyml/polyml-5.4.1"
-  ML_SYSTEM="polyml-5.4.1"
+  POLYML_HOME="/home/polyml/polyml-svn"
+  ML_SYSTEM="polyml-5.4.2"
   ML_PLATFORM="x86_64-darwin"
   ML_HOME="$POLYML_HOME/$ML_PLATFORM"
-  ML_OPTIONS="-H 2000 --gcthreads 4"
+  ML_OPTIONS="-H 1000 --gcthreads 4 --gcshare 0"
 
 
 ISABELLE_HOME_USER=~/isabelle-mac-poly64-M4
--- a/Admin/isatest/settings/mac-poly64-M8	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/isatest/settings/mac-poly64-M8	Mon Apr 16 19:01:57 2012 +0200
@@ -1,10 +1,10 @@
 # -*- shell-script -*- :mode=shellscript:
 
-  POLYML_HOME="/home/polyml/polyml-5.4.1"
-  ML_SYSTEM="polyml-5.4.1"
+  POLYML_HOME="/home/polyml/polyml-svn"
+  ML_SYSTEM="polyml-5.4.2"
   ML_PLATFORM="x86_64-darwin"
   ML_HOME="$POLYML_HOME/$ML_PLATFORM"
-  ML_OPTIONS="-H 2000 --gcthreads 8"
+  ML_OPTIONS="-H 1000 --gcthreads 8 --gcshare 0"
 
 
 ISABELLE_HOME_USER=~/isabelle-mac-poly64-M8
--- a/Admin/java/README	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/java/README	Mon Apr 16 19:01:57 2012 +0200
@@ -1,3 +1,3 @@
-This is JDK 1.7.0_03 for Linux and Linux x86 from
+This is JDK 1.7.0_03 for Linux from
 http://www.oracle.com/technetwork/java/javase/downloads/index.html
 
--- a/Admin/java/etc/settings	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/java/etc/settings	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
 # -*- shell-script -*- :mode=shellscript:
 
-ISABELLE_JDK_HOME="$COMPONENT/${ISABELLE_PLATFORM64:-$ISABELLE_PLATFORM}/jdk1.7.0_03"
+ISABELLE_JDK_HOME="$COMPONENT/jdk1.7.0_03"
 
--- a/Admin/polyml/build	Wed Apr 04 09:59:49 2012 +0200
+++ b/Admin/polyml/build	Mon Apr 16 19:01:57 2012 +0200
@@ -63,6 +63,9 @@
   x86-cygwin)
     OPTIONS=()
     ;;
+  x86-mingw)
+    OPTIONS=()
+    ;;
   ppc-darwin | sparc-solaris | x86-solaris | x86-bsd)
     OPTIONS=()
     ;;
--- a/CONTRIBUTORS	Wed Apr 04 09:59:49 2012 +0200
+++ b/CONTRIBUTORS	Mon Apr 16 19:01:57 2012 +0200
@@ -3,15 +3,26 @@
 who is listed as an author in one of the source files of this Isabelle
 distribution.
 
-Contributions to this Isabelle version
---------------------------------------
+Contributions to Isabelle2012
+-----------------------------
+
+* March 2012: Christian Sternagel, Japan Advanced Institute of Science
+  and Technology
+  Consolidated theory of relation composition.
+
+* March 2012: Nik Sultana, University of Cambridge
+  HOL/TPTP parser and import facilities.
+
+* March 2012: Cezary Kaliszyk, University of Innsbruck and
+  Alexander Krauss, QAware GmbH
+  Faster and more scalable Import mechanism for HOL Light proofs.
 
 * January 2012: Florian Haftmann, TUM, et. al.
   (Re-)Introduction of the "set" type constructor.
 
-* March 2012: Cezary Kaliszyk, University of Innsbruck and
-  Alexander Krauss, QAware GmbH
-  Faster and more scalable Import mechanism for HOL Light proofs.
+* 2011/2012: Makarius Wenzel, Université Paris-Sud / LRI
+  Various refinements of local theory infrastructure.
+  Improvements of Isabelle/Scala layer and Isabelle/jEdit Prover IDE.
 
 
 Contributions to Isabelle2011-1
--- a/COPYRIGHT	Wed Apr 04 09:59:49 2012 +0200
+++ b/COPYRIGHT	Mon Apr 16 19:01:57 2012 +0200
@@ -1,6 +1,6 @@
 ISABELLE COPYRIGHT NOTICE, LICENCE AND DISCLAIMER.
 
-Copyright (c) 2011,
+Copyright (c) 2012,
   University of Cambridge,
   Technische Universitaet Muenchen,
   and contributors.
--- a/NEWS	Wed Apr 04 09:59:49 2012 +0200
+++ b/NEWS	Mon Apr 16 19:01:57 2012 +0200
@@ -1,8 +1,8 @@
 Isabelle NEWS -- history user-relevant changes
 ==============================================
 
-New in this Isabelle version
-----------------------------
+New in Isabelle2012 (May 2012)
+------------------------------
 
 *** General ***
 
@@ -47,12 +47,46 @@
 header -- minor INCOMPATIBILITY for user-defined commands.  Allow new
 commands to be used in the same theory where defined.
 
-* ISABELLE_JDK_HOME settings variable points to JDK with javac and jar
-(not just JRE).
-
 
 *** Pure ***
 
+* Auxiliary contexts indicate block structure for specifications with
+additional parameters and assumptions.  Such unnamed contexts may be
+nested within other targets, like 'theory', 'locale', 'class',
+'instantiation' etc.  Results from the local context are generalized
+accordingly and applied to the enclosing target context.  Example:
+
+  context
+    fixes x y z :: 'a
+    assumes xy: "x = y" and yz: "y = z"
+  begin
+
+  lemma my_trans: "x = z" using xy yz by simp
+
+  end
+
+  thm my_trans
+
+The most basic application is to factor-out context elements of
+several fixes/assumes/shows theorem statements, e.g. see
+~~/src/HOL/Isar_Examples/Group_Context.thy
+
+Any other local theory specification element works within the "context
+... begin ... end" block as well.
+
+* Bundled declarations associate attributed fact expressions with a
+given name in the context.  These may be later included in other
+contexts.  This allows to manage context extensions casually, without
+the logical dependencies of locales and locale interpretation.
+
+See commands 'bundle', 'include', 'including' etc. in the isar-ref
+manual.
+
+* Rule composition via attribute "OF" (or ML functions OF/MRS) is more
+tolerant against multiple unifiers, as long as the final result is
+unique.  (As before, rules are composed in canonical right-to-left
+order to accommodate newly introduced premises.)
+
 * Command 'definition' no longer exports the foundational "raw_def"
 into the user context.  Minor INCOMPATIBILITY, may use the regular
 "def" result with attribute "abs_def" to imitate the old version.
@@ -63,21 +97,21 @@
 undocumented feature.)
 
 * Discontinued old "prems" fact, which used to refer to the accidental
-collection of foundational premises in the context (marked as legacy
-since Isabelle2011).
+collection of foundational premises in the context (already marked as
+legacy since Isabelle2011).
 
 * Obsolete command 'types' has been discontinued.  Use 'type_synonym'
 instead.  INCOMPATIBILITY.
 
-* Ancient code generator for SML and its commands 'code_module',
+* Old code generator for SML and its commands 'code_module',
 'code_library', 'consts_code', 'types_code' have been discontinued.
 Use commands of the generic code generator instead.  INCOMPATIBILITY.
 
-* Redundant attribute 'code_inline' has been discontinued. Use
-'code_unfold' instead.  INCOMPATIBILITY.
-
-* Dropped attribute 'code_unfold_post' in favor of the its dual
-'code_abbrev', which yields a common pattern in definitions like
+* Redundant attribute "code_inline" has been discontinued. Use
+"code_unfold" instead.  INCOMPATIBILITY.
+
+* Dropped attribute "code_unfold_post" in favor of the its dual
+"code_abbrev", which yields a common pattern in definitions like
 
   definition [code_abbrev]: "f = t"
 
@@ -90,17 +124,20 @@
   lemma "P (x::'a::foo)" and "Q (y::'a::bar)"  -- "now illegal"
 
   lemma "P (x::'a)" and "Q (y::'a::bar)"
-    -- "now uniform 'a::bar instead of default sort for first occurence (!)"
+    -- "now uniform 'a::bar instead of default sort for first occurrence (!)"
 
 
 *** HOL ***
 
-* New tutorial "Programming and Proving in Isabelle/HOL".
+
+* New tutorial "Programming and Proving in Isabelle/HOL" ("prog-prove").
 It completely supercedes "A Tutorial Introduction to Structured Isar Proofs",
 which has been removed. It supercedes "Isabelle/HOL, A Proof Assistant
 for Higher-Order Logic" as the recommended beginners tutorial
 but does not cover all of the material of that old tutorial.
 
+* Discontinued old Tutorial on Isar ("isar-overview");
+
 * The representation of numerals has changed. We now have a datatype
 "num" representing strictly positive binary numerals, along with
 functions "numeral :: num => 'a" and "neg_numeral :: num => 'a" to
@@ -131,19 +168,44 @@
 sets separate, it is often sufficient to rephrase sets S accidentally
 used as predicates by "%x. x : S" and predicates P accidentally used
 as sets by "{x. P x}".  Corresponding proofs in a first step should be
-pruned from any tinkering with former theorems mem_def and
-Collect_def as far as possible.
-For developments which deliberately mixed predicates and
-sets, a planning step is necessary to determine what should become a
-predicate and what a set.  It can be helpful to carry out that step in
+pruned from any tinkering with former theorems mem_def and Collect_def
+as far as possible.
+
+For developments which deliberately mixed predicates and sets, a
+planning step is necessary to determine what should become a predicate
+and what a set.  It can be helpful to carry out that step in
 Isabelle2011-1 before jumping right into the current release.
 
+* The representation of numerals has changed.  Datatype "num"
+represents strictly positive binary numerals, along with functions
+"numeral :: num => 'a" and "neg_numeral :: num => 'a" to represent
+positive and negated numeric literals, respectively. (See definitions
+in ~~/src/HOL/Num.thy.) Potential INCOMPATIBILITY, some user theories
+may require adaptations as follows:
+
+  - Theorems with number_ring or number_semiring constraints: These
+    classes are gone; use comm_ring_1 or comm_semiring_1 instead.
+
+  - Theories defining numeric types: Remove number, number_semiring,
+    and number_ring instances. Defer all theorems about numerals until
+    after classes one and semigroup_add have been instantiated.
+
+  - Numeral-only simp rules: Replace each rule having a "number_of v"
+    pattern with two copies, one for numeral and one for neg_numeral.
+
+  - Theorems about subclasses of semiring_1 or ring_1: These classes
+    automatically support numerals now, so more simp rules and
+    simprocs may now apply within the proof.
+
+  - Definitions and theorems using old constructors Pls/Min/Bit0/Bit1:
+    Redefine using other integer operations.
+
 * Code generation by default implements sets as container type rather
 than predicates.  INCOMPATIBILITY.
 
 * New proof import from HOL Light: Faster, simpler, and more scalable.
 Requires a proof bundle, which is available as an external component.
-Removed old (and mostly dead) Importer for HOL4 and HOL Light.
+Discontinued old (and mostly dead) Importer for HOL4 and HOL Light.
 INCOMPATIBILITY.
 
 * New type synonym 'a rel = ('a * 'a) set
@@ -177,8 +239,8 @@
 generic mult_2 and mult_2_right instead. INCOMPATIBILITY.
 
 * More default pred/set conversions on a couple of relation operations
-and predicates.  Added powers of predicate relations.
-Consolidation of some relation theorems:
+and predicates.  Added powers of predicate relations.  Consolidation
+of some relation theorems:
 
   converse_def ~> converse_unfold
   rel_comp_def ~> rel_comp_unfold
@@ -189,8 +251,8 @@
 
 Generalized theorems INF_INT_eq, INF_INT_eq2, SUP_UN_eq, SUP_UN_eq2.
 
-See theory "Relation" for examples for making use of pred/set conversions
-by means of attributes "to_set" and "to_pred".
+See theory "Relation" for examples for making use of pred/set
+conversions by means of attributes "to_set" and "to_pred".
 
 INCOMPATIBILITY.
 
@@ -212,10 +274,11 @@
   SUPR_set_fold ~> SUP_set_fold
   INF_code ~> INF_set_foldr
   SUP_code ~> SUP_set_foldr
-  foldr.simps ~> foldr_Nil foldr_Cons (in point-free formulation)
-  foldl.simps ~> foldl_Nil foldl_Cons
-  foldr_fold_rev ~> foldr_def
-  foldl_fold ~> foldl_def
+  foldr.simps ~> foldr.simps (in point-free formulation)
+  foldr_fold_rev ~> foldr_conv_fold
+  foldl_fold ~> foldl_conv_fold
+  foldr_foldr ~> foldr_conv_foldl
+  foldl_foldr ~> foldl_conv_foldr
 
 INCOMPATIBILITY.
 
@@ -224,11 +287,19 @@
 rev_foldl_cons, fold_set_remdups, fold_set, fold_set1,
 concat_conv_foldl, foldl_weak_invariant, foldl_invariant,
 foldr_invariant, foldl_absorb0, foldl_foldr1_lemma, foldl_foldr1,
-listsum_conv_fold, listsum_foldl, sort_foldl_insort.  INCOMPATIBILITY.
-Prefer "List.fold" with canonical argument order, or boil down
-"List.foldr" and "List.foldl" to "List.fold" by unfolding "foldr_def"
-and "foldl_def".  For the common phrases "%xs. List.foldr plus xs 0"
-and "List.foldl plus 0", prefer "List.listsum".
+listsum_conv_fold, listsum_foldl, sort_foldl_insort, foldl_assoc,
+foldr_conv_foldl, start_le_sum, elem_le_sum, sum_eq_0_conv.
+INCOMPATIBILITY.  For the common phrases "%xs. List.foldr plus xs 0"
+and "List.foldl plus 0", prefer "List.listsum".  Otherwise it can be
+useful to boil down "List.foldr" and "List.foldl" to "List.fold" by
+unfolding "foldr_conv_fold" and "foldl_conv_fold".
+
+* Dropped lemmas minus_set_foldr, union_set_foldr, union_coset_foldr,
+inter_coset_foldr, Inf_fin_set_foldr, Sup_fin_set_foldr,
+Min_fin_set_foldr, Max_fin_set_foldr, Inf_set_foldr, Sup_set_foldr,
+INF_set_foldr, SUP_set_foldr.  INCOMPATIBILITY.  Prefer corresponding
+lemmas over fold rather than foldr, or make use of lemmas
+fold_conv_foldr and fold_rev.
 
 * Congruence rules Option.map_cong and Option.bind_cong for recursion
 through option types.
@@ -255,8 +326,8 @@
 INCOMPATIBILITY.
 
 * Renamed facts about the power operation on relations, i.e., relpow
-  to match the constant's name:
-  
+to match the constant's name:
+
   rel_pow_1 ~> relpow_1
   rel_pow_0_I ~> relpow_0_I
   rel_pow_Suc_I ~> relpow_Suc_I
@@ -265,7 +336,7 @@
   rel_pow_Suc_E ~> relpow_Suc_E
   rel_pow_E ~> relpow_E
   rel_pow_Suc_D2 ~> relpow_Suc_D2
-  rel_pow_Suc_E2 ~> relpow_Suc_E2 
+  rel_pow_Suc_E2 ~> relpow_Suc_E2
   rel_pow_Suc_D2' ~> relpow_Suc_D2'
   rel_pow_E2 ~> relpow_E2
   rel_pow_add ~> relpow_add
@@ -281,17 +352,167 @@
   rtrancl_finite_eq_rel_pow ~> rtrancl_finite_eq_relpow
   trancl_finite_eq_rel_pow ~> trancl_finite_eq_relpow
   single_valued_rel_pow ~> single_valued_relpow
- 
+
 INCOMPATIBILITY.
 
-* New theory HOL/Library/DAList provides an abstract type for association
-  lists with distinct keys.
+* Theory Relation: Consolidated constant name for relation composition
+and corresponding theorem names:
+
+  - Renamed constant rel_comp to relcomp
+
+  - Dropped abbreviation pred_comp. Use relcompp instead.
+
+  - Renamed theorems:
+
+    rel_compI ~> relcompI
+    rel_compEpair ~> relcompEpair
+    rel_compE ~> relcompE
+    pred_comp_rel_comp_eq ~> relcompp_relcomp_eq
+    rel_comp_empty1 ~> relcomp_empty1
+    rel_comp_mono ~> relcomp_mono
+    rel_comp_subset_Sigma ~> relcomp_subset_Sigma
+    rel_comp_distrib ~> relcomp_distrib
+    rel_comp_distrib2 ~> relcomp_distrib2
+    rel_comp_UNION_distrib ~> relcomp_UNION_distrib
+    rel_comp_UNION_distrib2 ~> relcomp_UNION_distrib2
+    single_valued_rel_comp ~> single_valued_relcomp
+    rel_comp_unfold ~> relcomp_unfold
+    converse_rel_comp ~> converse_relcomp
+    pred_compI ~> relcomppI
+    pred_compE ~> relcomppE
+    pred_comp_bot1 ~> relcompp_bot1
+    pred_comp_bot2 ~> relcompp_bot2
+    transp_pred_comp_less_eq ~> transp_relcompp_less_eq
+    pred_comp_mono ~> relcompp_mono
+    pred_comp_distrib ~> relcompp_distrib
+    pred_comp_distrib2 ~> relcompp_distrib2
+    converse_pred_comp ~> converse_relcompp
+
+    finite_rel_comp ~> finite_relcomp
+
+    set_rel_comp ~> set_relcomp
+
+INCOMPATIBILITY.
+
+* New theory HOL/Library/DAList provides an abstract type for
+association lists with distinct keys.
 
 * 'datatype' specifications allow explicit sort constraints.
 
 * Theory HOL/Library/Diagonalize has been removed. INCOMPATIBILITY,
 use theory HOL/Library/Nat_Bijection instead.
 
+* Theory HOL/Library/RBT_Impl: Backing implementation of red-black
+trees is now inside a type class context.  Names of affected
+operations and lemmas have been prefixed by rbt_.  INCOMPATIBILITY for
+theories working directly with raw red-black trees, adapt the names as
+follows:
+
+  Operations:
+  bulkload -> rbt_bulkload
+  del_from_left -> rbt_del_from_left
+  del_from_right -> rbt_del_from_right
+  del -> rbt_del
+  delete -> rbt_delete
+  ins -> rbt_ins
+  insert -> rbt_insert
+  insertw -> rbt_insert_with
+  insert_with_key -> rbt_insert_with_key
+  map_entry -> rbt_map_entry
+  lookup -> rbt_lookup
+  sorted -> rbt_sorted
+  tree_greater -> rbt_greater
+  tree_less -> rbt_less
+  tree_less_symbol -> rbt_less_symbol
+  union -> rbt_union
+  union_with -> rbt_union_with
+  union_with_key -> rbt_union_with_key
+
+  Lemmas:
+  balance_left_sorted -> balance_left_rbt_sorted
+  balance_left_tree_greater -> balance_left_rbt_greater
+  balance_left_tree_less -> balance_left_rbt_less
+  balance_right_sorted -> balance_right_rbt_sorted
+  balance_right_tree_greater -> balance_right_rbt_greater
+  balance_right_tree_less -> balance_right_rbt_less
+  balance_sorted -> balance_rbt_sorted
+  balance_tree_greater -> balance_rbt_greater
+  balance_tree_less -> balance_rbt_less
+  bulkload_is_rbt -> rbt_bulkload_is_rbt
+  combine_sorted -> combine_rbt_sorted
+  combine_tree_greater -> combine_rbt_greater
+  combine_tree_less -> combine_rbt_less
+  delete_in_tree -> rbt_delete_in_tree
+  delete_is_rbt -> rbt_delete_is_rbt
+  del_from_left_tree_greater -> rbt_del_from_left_rbt_greater
+  del_from_left_tree_less -> rbt_del_from_left_rbt_less
+  del_from_right_tree_greater -> rbt_del_from_right_rbt_greater
+  del_from_right_tree_less -> rbt_del_from_right_rbt_less
+  del_in_tree -> rbt_del_in_tree
+  del_inv1_inv2 -> rbt_del_inv1_inv2
+  del_sorted -> rbt_del_rbt_sorted
+  del_tree_greater -> rbt_del_rbt_greater
+  del_tree_less -> rbt_del_rbt_less
+  dom_lookup_Branch -> dom_rbt_lookup_Branch
+  entries_lookup -> entries_rbt_lookup
+  finite_dom_lookup -> finite_dom_rbt_lookup
+  insert_sorted -> rbt_insert_rbt_sorted
+  insertw_is_rbt -> rbt_insertw_is_rbt
+  insertwk_is_rbt -> rbt_insertwk_is_rbt
+  insertwk_sorted -> rbt_insertwk_rbt_sorted
+  insertw_sorted -> rbt_insertw_rbt_sorted
+  ins_sorted -> ins_rbt_sorted
+  ins_tree_greater -> ins_rbt_greater
+  ins_tree_less -> ins_rbt_less
+  is_rbt_sorted -> is_rbt_rbt_sorted
+  lookup_balance -> rbt_lookup_balance
+  lookup_bulkload -> rbt_lookup_rbt_bulkload
+  lookup_delete -> rbt_lookup_rbt_delete
+  lookup_Empty -> rbt_lookup_Empty
+  lookup_from_in_tree -> rbt_lookup_from_in_tree
+  lookup_in_tree -> rbt_lookup_in_tree
+  lookup_ins -> rbt_lookup_ins
+  lookup_insert -> rbt_lookup_rbt_insert
+  lookup_insertw -> rbt_lookup_rbt_insertw
+  lookup_insertwk -> rbt_lookup_rbt_insertwk
+  lookup_keys -> rbt_lookup_keys
+  lookup_map -> rbt_lookup_map
+  lookup_map_entry -> rbt_lookup_rbt_map_entry
+  lookup_tree_greater -> rbt_lookup_rbt_greater
+  lookup_tree_less -> rbt_lookup_rbt_less
+  lookup_union -> rbt_lookup_rbt_union
+  map_entry_color_of -> rbt_map_entry_color_of
+  map_entry_inv1 -> rbt_map_entry_inv1
+  map_entry_inv2 -> rbt_map_entry_inv2
+  map_entry_is_rbt -> rbt_map_entry_is_rbt
+  map_entry_sorted -> rbt_map_entry_rbt_sorted
+  map_entry_tree_greater -> rbt_map_entry_rbt_greater
+  map_entry_tree_less -> rbt_map_entry_rbt_less
+  map_tree_greater -> map_rbt_greater
+  map_tree_less -> map_rbt_less
+  map_sorted -> map_rbt_sorted
+  paint_sorted -> paint_rbt_sorted
+  paint_lookup -> paint_rbt_lookup
+  paint_tree_greater -> paint_rbt_greater
+  paint_tree_less -> paint_rbt_less
+  sorted_entries -> rbt_sorted_entries
+  tree_greater_eq_trans -> rbt_greater_eq_trans
+  tree_greater_nit -> rbt_greater_nit
+  tree_greater_prop -> rbt_greater_prop
+  tree_greater_simps -> rbt_greater_simps
+  tree_greater_trans -> rbt_greater_trans
+  tree_less_eq_trans -> rbt_less_eq_trans
+  tree_less_nit -> rbt_less_nit
+  tree_less_prop -> rbt_less_prop
+  tree_less_simps -> rbt_less_simps
+  tree_less_trans -> rbt_less_trans
+  tree_ord_props -> rbt_ord_props
+  union_Branch -> rbt_union_Branch
+  union_is_rbt -> rbt_union_is_rbt
+  unionw_is_rbt -> rbt_unionw_is_rbt
+  unionwk_is_rbt -> rbt_unionwk_is_rbt
+  unionwk_sorted -> rbt_unionwk_rbt_sorted
+
 * Session HOL-Word: Discontinued many redundant theorems specific to
 type 'a word. INCOMPATIBILITY, use the corresponding generic theorems
 instead.
@@ -398,36 +619,45 @@
 produces a rule which can be used to perform case distinction on both
 a list and a nat.
 
-* Improved code generation of multisets.
-
-* New diagnostic command find_unused_assms to find potentially superfluous
-  assumptions in theorems using Quickcheck.
+* Theory Library/Multiset: Improved code generation of multisets.
+
+* New diagnostic command 'find_unused_assms' to find potentially
+superfluous assumptions in theorems using Quickcheck.
 
 * Quickcheck:
+
   - Quickcheck returns variable assignments as counterexamples, which
     allows to reveal the underspecification of functions under test.
     For example, refuting "hd xs = x", it presents the variable
     assignment xs = [] and x = a1 as a counterexample, assuming that
     any property is false whenever "hd []" occurs in it.
+
     These counterexample are marked as potentially spurious, as
     Quickcheck also returns "xs = []" as a counterexample to the
     obvious theorem "hd xs = hd xs".
+
     After finding a potentially spurious counterexample, Quickcheck
     continues searching for genuine ones.
+
     By default, Quickcheck shows potentially spurious and genuine
-    counterexamples. The option "genuine_only" sets quickcheck to
-    only show genuine counterexamples.
+    counterexamples. The option "genuine_only" sets quickcheck to only
+    show genuine counterexamples.
 
   - The command 'quickcheck_generator' creates random and exhaustive
     value generators for a given type and operations.
+
     It generates values by using the operations as if they were
-    constructors of that type. 
+    constructors of that type.
 
   - Support for multisets.
 
   - Added "use_subtype" options.
- 
+
+  - Added "quickcheck_locale" configuration to specify how to process
+    conjectures in a locale context.
+
 * Nitpick:
+
   - Fixed infinite loop caused by the 'peephole_optim' option and
     affecting 'rat' and 'real'.
 
@@ -442,12 +672,15 @@
 
 * SMT:
   - renamed "smt_fixed" option to "smt_read_only_certificates".
- 
+
 * Command 'try0':
   - Renamed from 'try_methods'. INCOMPATIBILITY.
 
 * New "eventually_elim" method as a generalized variant of the
-  eventually_elim* rules. Supports structured proofs.
+eventually_elim* rules. Supports structured proofs.
+
+* HOL/TPTP: support to parse and import TPTP problems (all languages)
+into Isabelle/HOL.
 
 
 *** FOL ***
@@ -455,6 +688,16 @@
 * New "case_product" attribute (see HOL).
 
 
+*** ZF ***
+
+* Greater support for structured proofs involving induction or case
+analysis.
+
+* Much greater use of mathematical symbols.
+
+* Removal of many ML theorem bindings.  INCOMPATIBILITY.
+
+
 *** ML ***
 
 * Antiquotation @{keyword "name"} produces a parser for outer syntax
@@ -503,6 +746,15 @@
   delsplits     ~> Splitter.del_split
 
 
+*** System ***
+
+* ISABELLE_JDK_HOME settings variable points to JDK with javac and jar
+(not just JRE).
+
+* ISABELLE_HOME_WINDOWS refers to ISABELLE_HOME in windows file name
+notation, which is useful for the jEdit file browser, for example.
+
+
 
 New in Isabelle2011-1 (October 2011)
 ------------------------------------
@@ -1811,13 +2063,6 @@
 * All constant names are now qualified internally and use proper
 identifiers, e.g. "IFOL.eq" instead of "op =".  INCOMPATIBILITY.
 
-*** ZF ***
-
-* Greater support for structured proofs involving induction or case analysis.
-
-* Much greater use of special symbols.
-
-* Removal of many ML theorem bindings. INCOMPATIBILITY.
 
 *** ML ***
 
@@ -4909,7 +5154,7 @@
 * HOL/Nat: neq0_conv no longer declared as iff.  INCOMPATIBILITY.
 
 * HOL-Word: New extensive library and type for generic, fixed size
-machine words, with arithmetic, bit-wise, shifting and rotating
+machine words, with arithemtic, bit-wise, shifting and rotating
 operations, reflection into int, nat, and bool lists, automation for
 linear arithmetic (by automatic reflection into nat or int), including
 lemmas on overflow and monotonicity.  Instantiated to all appropriate
--- a/README	Wed Apr 04 09:59:49 2012 +0200
+++ b/README	Mon Apr 16 19:01:57 2012 +0200
@@ -16,8 +16,8 @@
      * The Poly/ML compiler and runtime system (version 5.2.1 or later).
      * The GNU bash shell (version 3.x or 2.x).
      * Perl (version 5.x).
-     * Java 1.6.x from Oracle or Apple -- for Scala and jEdit.
-     * GNU Emacs (version 23) -- for the Proof General 4.x interface.
+     * Java 1.6.x or 1.7.x from Oracle or Apple -- for Scala and jEdit.
+     * GNU Emacs (version 23 or 24) -- for the Proof General 4.x interface.
      * A complete LaTeX installation -- for document preparation.
 
 Installation
--- a/README_REPOSITORY	Wed Apr 04 09:59:49 2012 +0200
+++ b/README_REPOSITORY	Mon Apr 16 19:01:57 2012 +0200
@@ -32,10 +32,9 @@
 
 Mercurial provides nice web presentation of incoming changes with a
 digest of log entries; this also includes RSS/Atom news feeds.  There
-are add-on browsers, notably hgtk that is part of the TortoiseHg
-distribution and works for generic Python/GTk platforms.  The
-alternative "view" utility helps to inspect the semantic content of
-merge nodes.
+are add-on history browsers such as "hg view" and TortoiseHg.  Unlike
+the default web view, some of these tools help to inspect the semantic
+content of non-trivial merge nodes.
 
 
 Initial configuration
@@ -107,27 +106,27 @@
 time of writing and many years later.
 
 Push access to the Isabelle repository requires an account at TUM,
-with properly configured ssh to the local machines (e.g. macbroy20,
-atbroy100).  You also need to be a member of the "isabelle" Unix
+with properly configured ssh to the local machines (e.g. macbroy20 ..
+macbroy29).  You also need to be a member of the "isabelle" Unix
 group.
 
 Sharing a locally modified clone then works as follows, using your
 user name instead of "wenzelm":
 
-  hg out ssh://wenzelm@atbroy100//home/isabelle-repository/repos/isabelle
+  hg out ssh://wenzelm@macbroy20//home/isabelle-repository/repos/isabelle
 
 In fact, the "out" or "outgoing" command performs only a dry run: it
 displays the changesets that would get published.  An actual "push",
 with a lasting effect on the Isabelle repository, works like this:
 
-  hg push ssh://wenzelm@atbroy100//home/isabelle-repository/repos/isabelle
+  hg push ssh://wenzelm@macbroy20//home/isabelle-repository/repos/isabelle
 
 
 Default paths for push and pull can be configured in
 isabelle/.hg/hgrc, for example:
 
   [paths]
-  default = ssh://wenzelm@atbroy100//home/isabelle-repository/repos/isabelle
+  default = ssh://wenzelm@macbroy20//home/isabelle-repository/repos/isabelle
 
 Now "hg pull" or "hg push" will use that shared file space, unless a
 different URL is specified explicitly.
@@ -136,7 +135,7 @@
 source URL.  So we could have cloned via that ssh URL in the first
 place, to get exactly to the same point:
 
-  hg clone ssh://wenzelm@atbroy100//home/isabelle-repository/repos/isabelle
+  hg clone ssh://wenzelm@macbroy20//home/isabelle-repository/repos/isabelle
 
 
 Simple merges
--- a/doc-src/Dirs	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/Dirs	Mon Apr 16 19:01:57 2012 +0200
@@ -1,1 +1,1 @@
-Intro Ref System Logics HOL ZF Inductive TutorialI IsarOverview IsarRef IsarImplementation Locales LaTeXsugar Classes Codegen Functions Nitpick Main Sledgehammer ProgProve
+Intro Ref System Logics HOL ZF Inductive TutorialI IsarRef IsarImplementation Locales LaTeXsugar Classes Codegen Functions Nitpick Main Sledgehammer ProgProve
--- a/doc-src/IsarOverview/IsaMakefile	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-## targets
-
-default: Isar
-
-## global settings
-
-SRC = $(ISABELLE_HOME)/src
-OUT = $(ISABELLE_OUTPUT)
-LOG = $(OUT)/log
-USEDIR = $(ISABELLE_TOOL) usedir -i false -g false -d false -D document -v true
-
-
-## Isar
-
-Isar: $(LOG)/HOL-Isar.gz
-
-$(LOG)/HOL-Isar.gz: Isar/ROOT.ML Isar/document/intro.tex \
-  Isar/document/root.tex Isar/document/root.bib Isar/*.thy
-	@$(USEDIR) HOL Isar
-	@rm -f Isar/document/isabelle.sty Isar/document/isabellesym.sty \
-	 Isar/document/pdfsetup.sty Isar/document/session.tex
-
-
-## clean
-
-clean:
-	@rm -f $(LOG)/HOL-Isar.gz
-
--- a/doc-src/IsarOverview/Isar/Induction.thy	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,357 +0,0 @@
-(*<*)theory Induction imports Main begin
-fun itrev where
-"itrev [] ys = ys" |
-"itrev (x#xs) ys = itrev xs (x#ys)"
-(*>*)
-
-section{*Case distinction and induction \label{sec:Induct}*}
-
-text{* Computer science applications abound with inductively defined
-structures, which is why we treat them in more detail. HOL already
-comes with a datatype of lists with the two constructors @{text Nil}
-and @{text Cons}. @{text Nil} is written @{term"[]"} and @{text"Cons x
-xs"} is written @{term"x # xs"}.  *}
-
-subsection{*Case distinction\label{sec:CaseDistinction}*}
-
-text{* We have already met the @{text cases} method for performing
-binary case splits. Here is another example: *}
-lemma "\<not> A \<or> A"
-proof cases
-  assume "A" thus ?thesis ..
-next
-  assume "\<not> A" thus ?thesis ..
-qed
-
-text{*\noindent The two cases must come in this order because @{text
-cases} merely abbreviates @{text"(rule case_split)"} where
-@{thm[source] case_split} is @{thm case_split}. If we reverse
-the order of the two cases in the proof, the first case would prove
-@{prop"\<not> A \<Longrightarrow> \<not> A \<or> A"} which would solve the first premise of
-@{thm[source] case_split}, instantiating @{text ?P} with @{term "\<not>
-A"}, thus making the second premise @{prop"\<not> \<not> A \<Longrightarrow> \<not> A \<or> A"}.
-Therefore the order of subgoals is not always completely arbitrary.
-
-The above proof is appropriate if @{term A} is textually small.
-However, if @{term A} is large, we do not want to repeat it. This can
-be avoided by the following idiom *}
-
-lemma "\<not> A \<or> A"
-proof (cases "A")
-  case True thus ?thesis ..
-next
-  case False thus ?thesis ..
-qed
-
-text{*\noindent which is like the previous proof but instantiates
-@{text ?P} right away with @{term A}. Thus we could prove the two
-cases in any order. The phrase \isakeyword{case}~@{text True}
-abbreviates \isakeyword{assume}~@{text"True: A"} and analogously for
-@{text"False"} and @{prop"\<not>A"}.
-
-The same game can be played with other datatypes, for example lists,
-where @{term tl} is the tail of a list, and @{text length} returns a
-natural number (remember: $0-1=0$):
-*}
-(*<*)declare length_tl[simp del](*>*)
-lemma "length(tl xs) = length xs - 1"
-proof (cases xs)
-  case Nil thus ?thesis by simp
-next
-  case Cons thus ?thesis by simp
-qed
-text{*\noindent Here \isakeyword{case}~@{text Nil} abbreviates
-\isakeyword{assume}~@{text"Nil:"}~@{prop"xs = []"} and
-\isakeyword{case}~@{text Cons} abbreviates \isakeyword{fix}~@{text"? ??"}
-\isakeyword{assume}~@{text"Cons:"}~@{text"xs = ? # ??"},
-where @{text"?"} and @{text"??"}
-stand for variable names that have been chosen by the system.
-Therefore we cannot refer to them.
-Luckily, this proof is simple enough we do not need to refer to them.
-However, sometimes one may have to. Hence Isar offers a simple scheme for
-naming those variables: replace the anonymous @{text Cons} by
-@{text"(Cons y ys)"}, which abbreviates \isakeyword{fix}~@{text"y ys"}
-\isakeyword{assume}~@{text"Cons:"}~@{text"xs = y # ys"}.
-In each \isakeyword{case} the assumption can be
-referred to inside the proof by the name of the constructor. In
-Section~\ref{sec:full-Ind} below we will come across an example
-of this.
-
-\subsection{Structural induction}
-
-We start with an inductive proof where both cases are proved automatically: *}
-lemma "2 * (\<Sum>i::nat\<le>n. i) = n*(n+1)"
-by (induct n) simp_all
-
-text{*\noindent The constraint @{text"::nat"} is needed because all of
-the operations involved are overloaded.
-This proof also demonstrates that \isakeyword{by} can take two arguments,
-one to start and one to finish the proof --- the latter is optional.
-
-If we want to expose more of the structure of the
-proof, we can use pattern matching to avoid having to repeat the goal
-statement: *}
-lemma "2 * (\<Sum>i::nat\<le>n. i) = n*(n+1)" (is "?P n")
-proof (induct n)
-  show "?P 0" by simp
-next
-  fix n assume "?P n"
-  thus "?P(Suc n)" by simp
-qed
-
-text{* \noindent We could refine this further to show more of the equational
-proof. Instead we explore the same avenue as for case distinctions:
-introducing context via the \isakeyword{case} command: *}
-lemma "2 * (\<Sum>i::nat \<le> n. i) = n*(n+1)"
-proof (induct n)
-  case 0 show ?case by simp
-next
-  case Suc thus ?case by simp
-qed
-
-text{* \noindent The implicitly defined @{text ?case} refers to the
-corresponding case to be proved, i.e.\ @{text"?P 0"} in the first case and
-@{text"?P(Suc n)"} in the second case. Context \isakeyword{case}~@{text 0} is
-empty whereas \isakeyword{case}~@{text Suc} assumes @{text"?P n"}. Again we
-have the same problem as with case distinctions: we cannot refer to an anonymous @{term n}
-in the induction step because it has not been introduced via \isakeyword{fix}
-(in contrast to the previous proof). The solution is the one outlined for
-@{text Cons} above: replace @{term Suc} by @{text"(Suc i)"}: *}
-lemma fixes n::nat shows "n < n*n + 1"
-proof (induct n)
-  case 0 show ?case by simp
-next
-  case (Suc i) thus "Suc i < Suc i * Suc i + 1" by simp
-qed
-
-text{* \noindent Of course we could again have written
-\isakeyword{thus}~@{text ?case} instead of giving the term explicitly
-but we wanted to use @{term i} somewhere.
-
-\subsection{Generalization via @{text arbitrary}}
-
-It is frequently necessary to generalize a claim before it becomes
-provable by induction. The tutorial~\cite{LNCS2283} demonstrates this
-with @{prop"itrev xs ys = rev xs @ ys"}, where @{text ys}
-needs to be universally quantified before induction succeeds.\footnote{@{thm rev.simps(1)},\quad @{thm rev.simps(2)[no_vars]},\\ @{thm itrev.simps(1)[no_vars]},\quad @{thm itrev.simps(2)[no_vars]}} But
-strictly speaking, this quantification step is already part of the
-proof and the quantifiers should not clutter the original claim. This
-is how the quantification step can be combined with induction: *}
-lemma "itrev xs ys = rev xs @ ys"
-by (induct xs arbitrary: ys) simp_all
-text{*\noindent The annotation @{text"arbitrary:"}~\emph{vars}
-universally quantifies all \emph{vars} before the induction.  Hence
-they can be replaced by \emph{arbitrary} values in the proof.
-
-Generalization via @{text"arbitrary"} is particularly convenient
-if the induction step is a structured proof as opposed to the automatic
-example above. Then the claim is available in unquantified form but
-with the generalized variables replaced by @{text"?"}-variables, ready
-for instantiation. In the above example, in the @{const[source] Cons} case the
-induction hypothesis is @{text"itrev xs ?ys = rev xs @ ?ys"} (available
-under the name @{const[source] Cons}).
-
-
-\subsection{Inductive proofs of conditional formulae}
-\label{sec:full-Ind}
-
-Induction also copes well with formulae involving @{text"\<Longrightarrow>"}, for example
-*}
-
-lemma "xs \<noteq> [] \<Longrightarrow> hd(rev xs) = last xs"
-by (induct xs) simp_all
-
-text{*\noindent This is an improvement over that style the
-tutorial~\cite{LNCS2283} advises, which requires @{text"\<longrightarrow>"}.
-A further improvement is shown in the following proof:
-*}
-
-lemma  "map f xs = map f ys \<Longrightarrow> length xs = length ys"
-proof (induct ys arbitrary: xs)
-  case Nil thus ?case by simp
-next
-  case (Cons y ys)  note Asm = Cons
-  show ?case
-  proof (cases xs)
-    case Nil
-    hence False using Asm(2) by simp
-    thus ?thesis ..
-  next
-    case (Cons x xs')
-    with Asm(2) have "map f xs' = map f ys" by simp
-    from Asm(1)[OF this] `xs = x#xs'` show ?thesis by simp
-  qed
-qed
-
-text{*\noindent
-The base case is trivial. In the step case Isar assumes
-(under the name @{text Cons}) two propositions:
-\begin{center}
-\begin{tabular}{l}
-@{text"map f ?xs = map f ys \<Longrightarrow> length ?xs = length ys"}\\
-@{prop"map f xs = map f (y # ys)"}
-\end{tabular}
-\end{center}
-The first is the induction hypothesis, the second, and this is new,
-is the premise of the induction step. The actual goal at this point is merely
-@{prop"length xs = length (y#ys)"}. The assumptions are given the new name
-@{text Asm} to avoid a name clash further down. The proof procedes with a case distinction on @{text xs}. In the case @{prop"xs = []"}, the second of our two
-assumptions (@{text"Asm(2)"}) implies the contradiction @{text"0 = Suc(\<dots>)"}.
- In the case @{prop"xs = x#xs'"}, we first obtain
-@{prop"map f xs' = map f ys"}, from which a forward step with the first assumption (@{text"Asm(1)[OF this]"}) yields @{prop"length xs' = length ys"}. Together
-with @{prop"xs = x#xs"} this yields the goal
-@{prop"length xs = length (y#ys)"}.
-
-
-\subsection{Induction formulae involving @{text"\<And>"} or @{text"\<Longrightarrow>"}}
-
-Let us now consider abstractly the situation where the goal to be proved
-contains both @{text"\<And>"} and @{text"\<Longrightarrow>"}, say @{prop"\<And>x. P x \<Longrightarrow> Q x"}.
-This means that in each case of the induction,
-@{text ?case} would be of the form @{prop"\<And>x. P' x \<Longrightarrow> Q' x"}.  Thus the
-first proof steps will be the canonical ones, fixing @{text x} and assuming
-@{prop"P' x"}. To avoid this tedium, induction performs the canonical steps
-automatically: in each step case, the assumptions contain both the
-usual induction hypothesis and @{prop"P' x"}, whereas @{text ?case} is only
-@{prop"Q' x"}.
-
-\subsection{Rule induction}
-
-HOL also supports inductively defined sets. See \cite{LNCS2283}
-for details. As an example we define our own version of the reflexive
-transitive closure of a relation --- HOL provides a predefined one as well.*}
-inductive_set
-  rtc :: "('a \<times> 'a)set \<Rightarrow> ('a \<times> 'a)set"   ("_*" [1000] 999)
-  for r :: "('a \<times> 'a)set"
-where
-  refl:  "(x,x) \<in> r*"
-| step:  "\<lbrakk> (x,y) \<in> r; (y,z) \<in> r* \<rbrakk> \<Longrightarrow> (x,z) \<in> r*"
-
-text{* \noindent
-First the constant is declared as a function on binary
-relations (with concrete syntax @{term"r*"} instead of @{text"rtc
-r"}), then the defining clauses are given. We will now prove that
-@{term"r*"} is indeed transitive: *}
-
-lemma assumes A: "(x,y) \<in> r*" shows "(y,z) \<in> r* \<Longrightarrow> (x,z) \<in> r*"
-using A
-proof induct
-  case refl thus ?case .
-next
-  case step thus ?case by(blast intro: rtc.step)
-qed
-text{*\noindent Rule induction is triggered by a fact $(x_1,\dots,x_n)
-\in R$ piped into the proof, here \isakeyword{using}~@{text A}. The
-proof itself follows the inductive definition very
-closely: there is one case for each rule, and it has the same name as
-the rule, analogous to structural induction.
-
-However, this proof is rather terse. Here is a more readable version:
-*}
-
-lemma assumes "(x,y) \<in> r*" and "(y,z) \<in> r*" shows "(x,z) \<in> r*"
-using assms
-proof induct
-  fix x assume "(x,z) \<in> r*"  -- {*@{text B}[@{text y} := @{text x}]*}
-  thus "(x,z) \<in> r*" .
-next
-  fix x' x y
-  assume 1: "(x',x) \<in> r" and
-         IH: "(y,z) \<in> r* \<Longrightarrow> (x,z) \<in> r*" and
-         B:  "(y,z) \<in> r*"
-  from 1 IH[OF B] show "(x',z) \<in> r*" by(rule rtc.step)
-qed
-text{*\noindent
-This time, merely for a change, we start the proof with by feeding both
-assumptions into the inductive proof. Only the first assumption is
-``consumed'' by the induction.
-Since the second one is left over we don't just prove @{text ?thesis} but
-@{text"(y,z) \<in> r* \<Longrightarrow> ?thesis"}, just as in the previous proof.
-The base case is trivial. In the assumptions for the induction step we can
-see very clearly how things fit together and permit ourselves the
-obvious forward step @{text"IH[OF B]"}.
-
-The notation \isakeyword{case}~\isa{(}\emph{constructor} \emph{vars}\isa{)}
-is also supported for inductive definitions. The \emph{constructor} is the
-name of the rule and the \emph{vars} fix the free variables in the
-rule; the order of the \emph{vars} must correspond to the
-left-to-right order of the variables as they appear in the rule.
-For example, we could start the above detailed proof of the induction
-with \isakeyword{case}~\isa{(step x' x y)}. In that case we don't need
-to spell out the assumptions but can refer to them by @{text"step(.)"},
-although the resulting text will be quite cryptic.
-
-\subsection{More induction}
-
-We close the section by demonstrating how arbitrary induction
-rules are applied. As a simple example we have chosen recursion
-induction, i.e.\ induction based on a recursive function
-definition. However, most of what we show works for induction in
-general.
-
-The example is an unusual definition of rotation: *}
-
-fun rot :: "'a list \<Rightarrow> 'a list" where
-"rot [] = []" |
-"rot [x] = [x]" |
-"rot (x#y#zs) = y # rot(x#zs)"
-text{*\noindent This yields, among other things, the induction rule
-@{thm[source]rot.induct}: @{thm[display]rot.induct[no_vars]}
-The following proof relies on a default naming scheme for cases: they are
-called 1, 2, etc, unless they have been named explicitly. The latter happens
-only with datatypes and inductively defined sets, but (usually)
-not with recursive functions. *}
-
-lemma "xs \<noteq> [] \<Longrightarrow> rot xs = tl xs @ [hd xs]"
-proof (induct xs rule: rot.induct)
-  case 1 thus ?case by simp
-next
-  case 2 show ?case by simp
-next
-  case (3 a b cs)
-  have "rot (a # b # cs) = b # rot(a # cs)" by simp
-  also have "\<dots> = b # tl(a # cs) @ [hd(a # cs)]" by(simp add:3)
-  also have "\<dots> = tl (a # b # cs) @ [hd (a # b # cs)]" by simp
-  finally show ?case .
-qed
-
-text{*\noindent
-The third case is only shown in gory detail (see \cite{BauerW-TPHOLs01}
-for how to reason with chains of equations) to demonstrate that the
-\isakeyword{case}~\isa{(}\emph{constructor} \emph{vars}\isa{)} notation also
-works for arbitrary induction theorems with numbered cases. The order
-of the \emph{vars} corresponds to the order of the
-@{text"\<And>"}-quantified variables in each case of the induction
-theorem. For induction theorems produced by \isakeyword{fun} it is
-the order in which the variables appear on the left-hand side of the
-equation.
-
-The proof is so simple that it can be condensed to
-*}
-
-(*<*)lemma "xs \<noteq> [] \<Longrightarrow> rot xs = tl xs @ [hd xs]"(*>*)
-by (induct xs rule: rot.induct) simp_all
-
-(*<*)end(*>*)
-(*
-lemma assumes A: "(\<And>n. (\<And>m. m < n \<Longrightarrow> P m) \<Longrightarrow> P n)"
-  shows "P(n::nat)"
-proof (rule A)
-  show "\<And>m. m < n \<Longrightarrow> P m"
-  proof (induct n)
-    case 0 thus ?case by simp
-  next
-    case (Suc n)   -- {*\isakeyword{fix} @{term m} \isakeyword{assume} @{text Suc}: @{text[source]"?m < n \<Longrightarrow> P ?m"} @{prop[source]"m < Suc n"}*}
-    show ?case    -- {*@{term ?case}*}
-    proof cases
-      assume eq: "m = n"
-      from Suc and A have "P n" by blast
-      with eq show "P m" by simp
-    next
-      assume "m \<noteq> n"
-      with Suc have "m < n" by arith
-      thus "P m" by(rule Suc)
-    qed
-  qed
-qed
-*)
\ No newline at end of file
--- a/doc-src/IsarOverview/Isar/Logic.thy	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,714 +0,0 @@
-(*<*)theory Logic imports Main begin(*>*)
-
-section{*Logic \label{sec:Logic}*}
-
-subsection{*Propositional logic*}
-
-subsubsection{*Introduction rules*}
-
-text{* We start with a really trivial toy proof to introduce the basic
-features of structured proofs. *}
-lemma "A \<longrightarrow> A"
-proof (rule impI)
-  assume a: "A"
-  show "A" by(rule a)
-qed
-text{*\noindent
-The operational reading: the \isakeyword{assume}-\isakeyword{show}
-block proves @{prop"A \<Longrightarrow> A"} (@{term a} is a degenerate rule (no
-assumptions) that proves @{term A} outright), which rule
-@{thm[source]impI} (@{thm impI}) turns into the desired @{prop"A \<longrightarrow>
-A"}.  However, this text is much too detailed for comfort. Therefore
-Isar implements the following principle: \begin{quote}\em Command
-\isakeyword{proof} automatically tries to select an introduction rule
-based on the goal and a predefined list of rules.  \end{quote} Here
-@{thm[source]impI} is applied automatically: *}
-
-lemma "A \<longrightarrow> A"
-proof
-  assume a: A
-  show A by(rule a)
-qed
-
-text{*\noindent As you see above, single-identifier formulae such as @{term A}
-need not be enclosed in double quotes. However, we will continue to do so for
-uniformity.
-
-Instead of applying fact @{text a} via the @{text rule} method, we can
-also push it directly onto our goal.  The proof is then immediate,
-which is formally written as ``.'' in Isar: *}
-lemma "A \<longrightarrow> A"
-proof
-  assume a: "A"
-  from a show "A" .
-qed
-
-text{* We can also push several facts towards a goal, and put another
-rule in between to establish some result that is one step further
-removed.  We illustrate this by introducing a trivial conjunction: *}
-lemma "A \<longrightarrow> A \<and> A"
-proof
-  assume a: "A"
-  from a and a show "A \<and> A" by(rule conjI)
-qed
-text{*\noindent Rule @{thm[source]conjI} is of course @{thm conjI}.
-
-Proofs of the form \isakeyword{by}@{text"(rule"}~\emph{name}@{text")"}
-can be abbreviated to ``..'' if \emph{name} refers to one of the
-predefined introduction rules (or elimination rules, see below): *}
-
-lemma "A \<longrightarrow> A \<and> A"
-proof
-  assume a: "A"
-  from a and a show "A \<and> A" ..
-qed
-text{*\noindent
-This is what happens: first the matching introduction rule @{thm[source]conjI}
-is applied (first ``.''), the remaining problem is solved immediately (second ``.''). *}
-
-subsubsection{*Elimination rules*}
-
-text{*A typical elimination rule is @{thm[source]conjE}, $\land$-elimination:
-@{thm[display,indent=5]conjE}  In the following proof it is applied
-by hand, after its first (\emph{major}) premise has been eliminated via
-@{text"[OF ab]"}: *}
-lemma "A \<and> B \<longrightarrow> B \<and> A"
-proof
-  assume ab: "A \<and> B"
-  show "B \<and> A"
-  proof (rule conjE[OF ab])  -- {*@{text"conjE[OF ab]"}: @{thm conjE[OF ab]} *}
-    assume a: "A" and b: "B"
-    from b and a show ?thesis ..
-  qed
-qed
-text{*\noindent Note that the term @{text"?thesis"} always stands for the
-``current goal'', i.e.\ the enclosing \isakeyword{show} (or
-\isakeyword{have}) statement.
-
-This is too much proof text. Elimination rules should be selected
-automatically based on their major premise, the formula or rather connective
-to be eliminated. In Isar they are triggered by facts being fed
-\emph{into} a proof. Syntax:
-\begin{center}
-\isakeyword{from} \emph{fact} \isakeyword{show} \emph{proposition} \emph{proof}
-\end{center}
-where \emph{fact} stands for the name of a previously proved
-proposition, e.g.\ an assumption, an intermediate result or some global
-theorem, which may also be modified with @{text OF} etc.
-The \emph{fact} is ``piped'' into the \emph{proof}, which can deal with it
-how it chooses. If the \emph{proof} starts with a plain \isakeyword{proof},
-an elimination rule (from a predefined list) is applied
-whose first premise is solved by the \emph{fact}. Thus the proof above
-is equivalent to the following one: *}
-
-lemma "A \<and> B \<longrightarrow> B \<and> A"
-proof
-  assume ab: "A \<and> B"
-  from ab show "B \<and> A"
-  proof
-    assume a: "A" and b: "B"
-    from b and a show ?thesis ..
-  qed
-qed
-
-text{* Now we come to a second important principle:
-\begin{quote}\em
-Try to arrange the sequence of propositions in a UNIX-like pipe,
-such that the proof of each proposition builds on the previous proposition.
-\end{quote}
-The previous proposition can be referred to via the fact @{text this}.
-This greatly reduces the need for explicit naming of propositions.  We also
-rearrange the additional inner assumptions into proper order for immediate use:
-*}
-lemma "A \<and> B \<longrightarrow> B \<and> A"
-proof
-  assume "A \<and> B"
-  from this show "B \<and> A"
-  proof
-    assume "B" "A"
-    from this show ?thesis ..
-  qed
-qed
-
-text{*\noindent Because of the frequency of \isakeyword{from}~@{text
-this}, Isar provides two abbreviations:
-\begin{center}
-\begin{tabular}{r@ {\quad=\quad}l}
-\isakeyword{then} & \isakeyword{from} @{text this} \\
-\isakeyword{thus} & \isakeyword{then} \isakeyword{show}
-\end{tabular}
-\end{center}
-
-Here is an alternative proof that operates purely by forward reasoning: *}
-lemma "A \<and> B \<longrightarrow> B \<and> A"
-proof
-  assume ab: "A \<and> B"
-  from ab have a: "A" ..
-  from ab have b: "B" ..
-  from b a show "B \<and> A" ..
-qed
-
-text{*\noindent It is worth examining this text in detail because it
-exhibits a number of new concepts.  For a start, it is the first time
-we have proved intermediate propositions (\isakeyword{have}) on the
-way to the final \isakeyword{show}. This is the norm in nontrivial
-proofs where one cannot bridge the gap between the assumptions and the
-conclusion in one step. To understand how the proof works we need to
-explain more Isar details:
-\begin{itemize}
-\item
-Method @{text rule} can be given a list of rules, in which case
-@{text"(rule"}~\textit{rules}@{text")"} applies the first matching
-rule in the list \textit{rules}.
-\item Command \isakeyword{from} can be
-followed by any number of facts.  Given \isakeyword{from}~@{text
-f}$_1$~\dots~@{text f}$_n$, the proof step
-@{text"(rule"}~\textit{rules}@{text")"} following a \isakeyword{have}
-or \isakeyword{show} searches \textit{rules} for a rule whose first
-$n$ premises can be proved by @{text f}$_1$~\dots~@{text f}$_n$ in the
-given order.
-\item ``..'' is short for
-@{text"by(rule"}~\textit{elim-rules intro-rules}@{text")"}\footnote{or
-merely @{text"(rule"}~\textit{intro-rules}@{text")"} if there are no facts
-fed into the proof}, where \textit{elim-rules} and \textit{intro-rules}
-are the predefined elimination and introduction rule. Thus
-elimination rules are tried first (if there are incoming facts).
-\end{itemize}
-Hence in the above proof both \isakeyword{have}s are proved via
-@{thm[source]conjE} triggered by \isakeyword{from}~@{text ab} whereas
-in the \isakeyword{show} step no elimination rule is applicable and
-the proof succeeds with @{thm[source]conjI}. The latter would fail had
-we written \isakeyword{from}~@{text"a b"} instead of
-\isakeyword{from}~@{text"b a"}.
-
-A plain \isakeyword{proof} with no argument is short for
-\isakeyword{proof}~@{text"(rule"}~\textit{elim-rules intro-rules}@{text")"}\footnotemark[1].
-This means that the matching rule is selected by the incoming facts and the goal exactly as just explained.
-
-Although we have only seen a few introduction and elimination rules so
-far, Isar's predefined rules include all the usual natural deduction
-rules. We conclude our exposition of propositional logic with an extended
-example --- which rules are used implicitly where? *}
-lemma "\<not> (A \<and> B) \<longrightarrow> \<not> A \<or> \<not> B"
-proof
-  assume n: "\<not> (A \<and> B)"
-  show "\<not> A \<or> \<not> B"
-  proof (rule ccontr)
-    assume nn: "\<not> (\<not> A \<or> \<not> B)"
-    have "\<not> A"
-    proof
-      assume a: "A"
-      have "\<not> B"
-      proof
-        assume b: "B"
-        from a and b have "A \<and> B" ..
-        with n show False ..
-      qed
-      hence "\<not> A \<or> \<not> B" ..
-      with nn show False ..
-    qed
-    hence "\<not> A \<or> \<not> B" ..
-    with nn show False ..
-  qed
-qed
-text{*\noindent
-Rule @{thm[source]ccontr} (``classical contradiction'') is
-@{thm ccontr[no_vars]}.
-Apart from demonstrating the strangeness of classical
-arguments by contradiction, this example also introduces two new
-abbreviations:
-\begin{center}
-\begin{tabular}{l@ {\quad=\quad}l}
-\isakeyword{hence} & \isakeyword{then} \isakeyword{have} \\
-\isakeyword{with}~\emph{facts} &
-\isakeyword{from}~\emph{facts} @{text this}
-\end{tabular}
-\end{center}
-*}
-
-
-subsection{*Avoiding duplication*}
-
-text{* So far our examples have been a bit unnatural: normally we want to
-prove rules expressed with @{text"\<Longrightarrow>"}, not @{text"\<longrightarrow>"}. Here is an example:
-*}
-lemma "A \<and> B \<Longrightarrow> B \<and> A"
-proof
-  assume "A \<and> B" thus "B" ..
-next
-  assume "A \<and> B" thus "A" ..
-qed
-text{*\noindent The \isakeyword{proof} always works on the conclusion,
-@{prop"B \<and> A"} in our case, thus selecting $\land$-introduction. Hence
-we must show @{prop B} and @{prop A}; both are proved by
-$\land$-elimination and the proofs are separated by \isakeyword{next}:
-\begin{description}
-\item[\isakeyword{next}] deals with multiple subgoals. For example,
-when showing @{term"A \<and> B"} we need to show both @{term A} and @{term
-B}.  Each subgoal is proved separately, in \emph{any} order. The
-individual proofs are separated by \isakeyword{next}.  \footnote{Each
-\isakeyword{show} must prove one of the pending subgoals.  If a
-\isakeyword{show} matches multiple subgoals, e.g.\ if the subgoals
-contain ?-variables, the first one is proved. Thus the order in which
-the subgoals are proved can matter --- see
-\S\ref{sec:CaseDistinction} for an example.}
-
-Strictly speaking \isakeyword{next} is only required if the subgoals
-are proved in different assumption contexts which need to be
-separated, which is not the case above. For clarity we
-have employed \isakeyword{next} anyway and will continue to do so.
-\end{description}
-
-This is all very well as long as formulae are small. Let us now look at some
-devices to avoid repeating (possibly large) formulae. A very general method
-is pattern matching: *}
-
-lemma "large_A \<and> large_B \<Longrightarrow> large_B \<and> large_A"
-      (is "?AB \<Longrightarrow> ?B \<and> ?A")
-proof
-  assume "?AB" thus "?B" ..
-next
-  assume "?AB" thus "?A" ..
-qed
-text{*\noindent Any formula may be followed by
-@{text"("}\isakeyword{is}~\emph{pattern}@{text")"} which causes the pattern
-to be matched against the formula, instantiating the @{text"?"}-variables in
-the pattern. Subsequent uses of these variables in other terms causes
-them to be replaced by the terms they stand for.
-
-We can simplify things even more by stating the theorem by means of the
-\isakeyword{assumes} and \isakeyword{shows} elements which allow direct
-naming of assumptions: *}
-
-lemma assumes ab: "large_A \<and> large_B"
-  shows "large_B \<and> large_A" (is "?B \<and> ?A")
-proof
-  from ab show "?B" ..
-next
-  from ab show "?A" ..
-qed
-text{*\noindent Note the difference between @{text ?AB}, a term, and
-@{text ab}, a fact.
-
-Finally we want to start the proof with $\land$-elimination so we
-don't have to perform it twice, as above. Here is a slick way to
-achieve this: *}
-
-lemma assumes ab: "large_A \<and> large_B"
-  shows "large_B \<and> large_A" (is "?B \<and> ?A")
-using ab
-proof
-  assume "?B" "?A" thus ?thesis ..
-qed
-text{*\noindent Command \isakeyword{using} can appear before a proof
-and adds further facts to those piped into the proof. Here @{text ab}
-is the only such fact and it triggers $\land$-elimination. Another
-frequent idiom is as follows:
-\begin{center}
-\isakeyword{from} \emph{major-facts}~
-\isakeyword{show} \emph{proposition}~
-\isakeyword{using} \emph{minor-facts}~
-\emph{proof}
-\end{center}
-
-Sometimes it is necessary to suppress the implicit application of rules in a
-\isakeyword{proof}. For example \isakeyword{show(s)}~@{prop[source]"P \<or> Q"}
-would trigger $\lor$-introduction, requiring us to prove @{prop P}, which may
-not be what we had in mind.
-A simple ``@{text"-"}'' prevents this \emph{faux pas}: *}
-
-lemma assumes ab: "A \<or> B" shows "B \<or> A"
-proof -
-  from ab show ?thesis
-  proof
-    assume A thus ?thesis ..
-  next
-    assume B thus ?thesis ..
-  qed
-qed
-text{*\noindent Alternatively one can feed @{prop"A \<or> B"} directly
-into the proof, thus triggering the elimination rule: *}
-lemma assumes ab: "A \<or> B" shows "B \<or> A"
-using ab
-proof
-  assume A thus ?thesis ..
-next
-  assume B thus ?thesis ..
-qed
-text{* \noindent Remember that eliminations have priority over
-introductions.
-
-\subsection{Avoiding names}
-
-Too many names can easily clutter a proof.  We already learned
-about @{text this} as a means of avoiding explicit names. Another
-handy device is to refer to a fact not by name but by contents: for
-example, writing @{text "`A \<or> B`"} (enclosing the formula in back quotes)
-refers to the fact @{text"A \<or> B"}
-without the need to name it. Here is a simple example, a revised version
-of the previous proof *}
-
-lemma assumes "A \<or> B" shows "B \<or> A"
-using `A \<or> B`
-(*<*)oops(*>*)
-text{*\noindent which continues as before.
-
-Clearly, this device of quoting facts by contents is only advisable
-for small formulae. In such cases it is superior to naming because the
-reader immediately sees what the fact is without needing to search for
-it in the preceding proof text.
-
-The assumptions of a lemma can also be referred to via their
-predefined name @{text assms}. Hence the @{text"`A \<or> B`"} in the
-previous proof can also be replaced by @{text assms}. Note that @{text
-assms} refers to the list of \emph{all} assumptions. To pick out a
-specific one, say the second, write @{text"assms(2)"}.
-
-This indexing notation $name(.)$ works for any $name$ that stands for
-a list of facts, for example $f$@{text".simps"}, the equations of the
-recursively defined function $f$. You may also select sublists by writing
-$name(2-3)$.
-
-Above we recommended the UNIX-pipe model (i.e. @{text this}) to avoid
-the need to name propositions. But frequently we needed to feed more
-than one previously derived fact into a proof step. Then the UNIX-pipe
-model appears to break down and we need to name the different facts to
-refer to them. But this can be avoided: *}
-lemma assumes "A \<and> B" shows "B \<and> A"
-proof -
-  from `A \<and> B` have "B" ..
-  moreover
-  from `A \<and> B` have "A" ..
-  ultimately show "B \<and> A" ..
-qed
-text{*\noindent You can combine any number of facts @{term A1} \dots\ @{term
-An} into a sequence by separating their proofs with
-\isakeyword{moreover}. After the final fact, \isakeyword{ultimately} stands
-for \isakeyword{from}~@{term A1}~\dots~@{term An}.  This avoids having to
-introduce names for all of the sequence elements.
-
-
-\subsection{Predicate calculus}
-
-Command \isakeyword{fix} introduces new local variables into a
-proof. The pair \isakeyword{fix}-\isakeyword{show} corresponds to @{text"\<And>"}
-(the universal quantifier at the
-meta-level) just like \isakeyword{assume}-\isakeyword{show} corresponds to
-@{text"\<Longrightarrow>"}. Here is a sample proof, annotated with the rules that are
-applied implicitly: *}
-lemma assumes P: "\<forall>x. P x" shows "\<forall>x. P(f x)"
-proof                       --"@{thm[source]allI}: @{thm allI}"
-  fix a
-  from P show "P(f a)" ..  --"@{thm[source]allE}: @{thm allE}"
-qed
-text{*\noindent Note that in the proof we have chosen to call the bound
-variable @{term a} instead of @{term x} merely to show that the choice of
-local names is irrelevant.
-
-Next we look at @{text"\<exists>"} which is a bit more tricky.
-*}
-
-lemma assumes Pf: "\<exists>x. P(f x)" shows "\<exists>y. P y"
-proof -
-  from Pf show ?thesis
-  proof              -- "@{thm[source]exE}: @{thm exE}"
-    fix x
-    assume "P(f x)"
-    thus ?thesis ..  -- "@{thm[source]exI}: @{thm exI}"
-  qed
-qed
-text{*\noindent Explicit $\exists$-elimination as seen above can become
-cumbersome in practice.  The derived Isar language element
-\isakeyword{obtain} provides a more appealing form of generalised
-existence reasoning: *}
-
-lemma assumes Pf: "\<exists>x. P(f x)" shows "\<exists>y. P y"
-proof -
-  from Pf obtain x where "P(f x)" ..
-  thus "\<exists>y. P y" ..
-qed
-text{*\noindent Note how the proof text follows the usual mathematical style
-of concluding $P(x)$ from $\exists x. P(x)$, while carefully introducing $x$
-as a new local variable.  Technically, \isakeyword{obtain} is similar to
-\isakeyword{fix} and \isakeyword{assume} together with a soundness proof of
-the elimination involved.
-
-Here is a proof of a well known tautology.
-Which rule is used where?  *}
-
-lemma assumes ex: "\<exists>x. \<forall>y. P x y" shows "\<forall>y. \<exists>x. P x y"
-proof
-  fix y
-  from ex obtain x where "\<forall>y. P x y" ..
-  hence "P x y" ..
-  thus "\<exists>x. P x y" ..
-qed
-
-subsection{*Making bigger steps*}
-
-text{* So far we have confined ourselves to single step proofs. Of course
-powerful automatic methods can be used just as well. Here is an example,
-Cantor's theorem that there is no surjective function from a set to its
-powerset: *}
-theorem "\<exists>S. S \<notin> range (f :: 'a \<Rightarrow> 'a set)"
-proof
-  let ?S = "{x. x \<notin> f x}"
-  show "?S \<notin> range f"
-  proof
-    assume "?S \<in> range f"
-    then obtain y where "?S = f y" ..
-    show False
-    proof cases
-      assume "y \<in> ?S"
-      with `?S = f y` show False by blast
-    next
-      assume "y \<notin> ?S"
-      with `?S = f y` show False by blast
-    qed
-  qed
-qed
-text{*\noindent
-For a start, the example demonstrates two new constructs:
-\begin{itemize}
-\item \isakeyword{let} introduces an abbreviation for a term, in our case
-the witness for the claim.
-\item Proof by @{text"cases"} starts a proof by cases. Note that it remains
-implicit what the two cases are: it is merely expected that the two subproofs
-prove @{text"P \<Longrightarrow> ?thesis"} and @{text"\<not>P \<Longrightarrow> ?thesis"} (in that order)
-for some @{term P}.
-\end{itemize}
-If you wonder how to \isakeyword{obtain} @{term y}:
-via the predefined elimination rule @{thm rangeE[no_vars]}.
-
-Method @{text blast} is used because the contradiction does not follow easily
-by just a single rule. If you find the proof too cryptic for human
-consumption, here is a more detailed version; the beginning up to
-\isakeyword{obtain} stays unchanged. *}
-
-theorem "\<exists>S. S \<notin> range (f :: 'a \<Rightarrow> 'a set)"
-proof
-  let ?S = "{x. x \<notin> f x}"
-  show "?S \<notin> range f"
-  proof
-    assume "?S \<in> range f"
-    then obtain y where "?S = f y" ..
-    show False
-    proof cases
-      assume "y \<in> ?S"
-      hence "y \<notin> f y"   by simp
-      hence "y \<notin> ?S"    by(simp add: `?S = f y`)
-      with `y \<in> ?S` show False by contradiction
-    next
-      assume "y \<notin> ?S"
-      hence "y \<in> f y"   by simp
-      hence "y \<in> ?S"    by(simp add: `?S = f y`)
-      with `y \<notin> ?S` show False by contradiction
-    qed
-  qed
-qed
-text{*\noindent Method @{text contradiction} succeeds if both $P$ and
-$\neg P$ are among the assumptions and the facts fed into that step, in any order.
-
-As it happens, Cantor's theorem can be proved automatically by best-first
-search. Depth-first search would diverge, but best-first search successfully
-navigates through the large search space:
-*}
-
-theorem "\<exists>S. S \<notin> range (f :: 'a \<Rightarrow> 'a set)"
-by best
-(* Of course this only works in the context of HOL's carefully
-constructed introduction and elimination rules for set theory.*)
-
-subsection{*Raw proof blocks*}
-
-text{* Although we have shown how to employ powerful automatic methods like
-@{text blast} to achieve bigger proof steps, there may still be the
-tendency to use the default introduction and elimination rules to
-decompose goals and facts. This can lead to very tedious proofs:
-*}
-lemma "\<forall>x y. A x y \<and> B x y \<longrightarrow> C x y"
-proof
-  fix x show "\<forall>y. A x y \<and> B x y \<longrightarrow> C x y"
-  proof
-    fix y show "A x y \<and> B x y \<longrightarrow> C x y"
-    proof
-      assume "A x y \<and> B x y"
-      show "C x y" sorry
-    qed
-  qed
-qed
-text{*\noindent Since we are only interested in the decomposition and not the
-actual proof, the latter has been replaced by
-\isakeyword{sorry}. Command \isakeyword{sorry} proves anything but is
-only allowed in quick and dirty mode, the default interactive mode. It
-is very convenient for top down proof development.
-
-Luckily we can avoid this step by step decomposition very easily: *}
-
-lemma "\<forall>x y. A x y \<and> B x y \<longrightarrow> C x y"
-proof -
-  have "\<And>x y. \<lbrakk> A x y; B x y \<rbrakk> \<Longrightarrow> C x y"
-  proof -
-    fix x y assume "A x y" "B x y"
-    show "C x y" sorry
-  qed
-  thus ?thesis by blast
-qed
-
-text{*\noindent
-This can be simplified further by \emph{raw proof blocks}, i.e.\
-proofs enclosed in braces: *}
-
-lemma "\<forall>x y. A x y \<and> B x y \<longrightarrow> C x y"
-proof -
-  { fix x y assume "A x y" "B x y"
-    have "C x y" sorry }
-  thus ?thesis by blast
-qed
-
-text{*\noindent The result of the raw proof block is the same theorem
-as above, namely @{prop"\<And>x y. \<lbrakk> A x y; B x y \<rbrakk> \<Longrightarrow> C x y"}.  Raw
-proof blocks are like ordinary proofs except that they do not prove
-some explicitly stated property but that the property emerges directly
-out of the \isakeyword{fixe}s, \isakeyword{assume}s and
-\isakeyword{have} in the block. Thus they again serve to avoid
-duplication. Note that the conclusion of a raw proof block is stated with
-\isakeyword{have} rather than \isakeyword{show} because it is not the
-conclusion of some pending goal but some independent claim.
-
-The general idea demonstrated in this subsection is very
-important in Isar and distinguishes it from \isa{apply}-style proofs:
-\begin{quote}\em
-Do not manipulate the proof state into a particular form by applying
-proof methods but state the desired form explicitly and let the proof
-methods verify that from this form the original goal follows.
-\end{quote}
-This yields more readable and also more robust proofs.
-
-\subsubsection{General case distinctions}
-
-As an important application of raw proof blocks we show how to deal
-with general case distinctions --- more specific kinds are treated in
-\S\ref{sec:CaseDistinction}. Imagine that you would like to prove some
-goal by distinguishing $n$ cases $P_1$, \dots, $P_n$. You show that
-the $n$ cases are exhaustive (i.e.\ $P_1 \lor \dots \lor P_n$) and
-that each case $P_i$ implies the goal. Taken together, this proves the
-goal. The corresponding Isar proof pattern (for $n = 3$) is very handy:
-*}
-text_raw{*\renewcommand{\isamarkupcmt}[1]{#1}*}
-(*<*)lemma "XXX"(*>*)
-proof -
-  have "P\<^isub>1 \<or> P\<^isub>2 \<or> P\<^isub>3" (*<*)sorry(*>*) -- {*\dots*}
-  moreover
-  { assume P\<^isub>1
-    -- {*\dots*}
-    have ?thesis (*<*)sorry(*>*) -- {*\dots*} }
-  moreover
-  { assume P\<^isub>2
-    -- {*\dots*}
-    have ?thesis (*<*)sorry(*>*) -- {*\dots*} }
-  moreover
-  { assume P\<^isub>3
-    -- {*\dots*}
-    have ?thesis (*<*)sorry(*>*) -- {*\dots*} }
-  ultimately show ?thesis by blast
-qed
-text_raw{*\renewcommand{\isamarkupcmt}[1]{{\isastylecmt--- #1}}*}
-
-
-subsection{*Further refinements*}
-
-text{* This subsection discusses some further tricks that can make
-life easier although they are not essential. *}
-
-subsubsection{*\isakeyword{and}*}
-
-text{* Propositions (following \isakeyword{assume} etc) may but need not be
-separated by \isakeyword{and}. This is not just for readability
-(\isakeyword{from} \isa{A} \isakeyword{and} \isa{B} looks nicer than
-\isakeyword{from} \isa{A} \isa{B}) but for structuring lists of propositions
-into possibly named blocks. In
-\begin{center}
-\isakeyword{assume} \isa{A:} $A_1$ $A_2$ \isakeyword{and} \isa{B:} $A_3$
-\isakeyword{and} $A_4$
-\end{center}
-label \isa{A} refers to the list of propositions $A_1$ $A_2$ and
-label \isa{B} to $A_3$. *}
-
-subsubsection{*\isakeyword{note}*}
-text{* If you want to remember intermediate fact(s) that cannot be
-named directly, use \isakeyword{note}. For example the result of raw
-proof block can be named by following it with
-\isakeyword{note}~@{text"some_name = this"}.  As a side effect,
-@{text this} is set to the list of facts on the right-hand side. You
-can also say @{text"note some_fact"}, which simply sets @{text this},
-i.e.\ recalls @{text"some_fact"}, e.g.\ in a \isakeyword{moreover} sequence. *}
-
-
-subsubsection{*\isakeyword{fixes}*}
-
-text{* Sometimes it is necessary to decorate a proposition with type
-constraints, as in Cantor's theorem above. These type constraints tend
-to make the theorem less readable. The situation can be improved a
-little by combining the type constraint with an outer @{text"\<And>"}: *}
-
-theorem "\<And>f :: 'a \<Rightarrow> 'a set. \<exists>S. S \<notin> range f"
-(*<*)oops(*>*)
-text{*\noindent However, now @{term f} is bound and we need a
-\isakeyword{fix}~\isa{f} in the proof before we can refer to @{term f}.
-This is avoided by \isakeyword{fixes}: *}
-
-theorem fixes f :: "'a \<Rightarrow> 'a set" shows "\<exists>S. S \<notin> range f"
-(*<*)oops(*>*)
-text{* \noindent
-Even better, \isakeyword{fixes} allows to introduce concrete syntax locally:*}
-
-lemma comm_mono:
-  fixes r :: "'a \<Rightarrow> 'a \<Rightarrow> bool" (infix ">" 60) and
-       f :: "'a \<Rightarrow> 'a \<Rightarrow> 'a"   (infixl "++" 70)
-  assumes comm: "\<And>x y::'a. x ++ y = y ++ x" and
-          mono: "\<And>x y z::'a. x > y \<Longrightarrow> x ++ z > y ++ z"
-  shows "x > y \<Longrightarrow> z ++ x > z ++ y"
-by(simp add: comm mono)
-
-text{*\noindent The concrete syntax is dropped at the end of the proof and the
-theorem becomes @{thm[display,margin=60]comm_mono}
-\tweakskip *}
-
-subsubsection{*\isakeyword{obtain}*}
-
-text{* The \isakeyword{obtain} construct can introduce multiple
-witnesses and propositions as in the following proof fragment:*}
-
-lemma assumes A: "\<exists>x y. P x y \<and> Q x y" shows "R"
-proof -
-  from A obtain x y where P: "P x y" and Q: "Q x y"  by blast
-(*<*)oops(*>*)
-text{* Remember also that one does not even need to start with a formula
-containing @{text"\<exists>"} as we saw in the proof of Cantor's theorem.
-*}
-
-subsubsection{*Combining proof styles*}
-
-text{* Finally, whole \isa{apply}-scripts may appear in the leaves of the
-proof tree, although this is best avoided.  Here is a contrived example: *}
-
-lemma "A \<longrightarrow> (A \<longrightarrow> B) \<longrightarrow> B"
-proof
-  assume a: "A"
-  show "(A \<longrightarrow>B) \<longrightarrow> B"
-    apply(rule impI)
-    apply(erule impE)
-    apply(rule a)
-    apply assumption
-    done
-qed
-
-text{*\noindent You may need to resort to this technique if an
-automatic step fails to prove the desired proposition.
-
-When converting a proof from \isa{apply}-style into Isar you can proceed
-in a top-down manner: parts of the proof can be left in script form
-while the outer structure is already expressed in Isar. *}
-
-(*<*)end(*>*)
--- a/doc-src/IsarOverview/Isar/ROOT.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,3 +0,0 @@
-quick_and_dirty := true;
-
-use_thys ["Logic", "Induction"];
--- a/doc-src/IsarOverview/Isar/document/Induction.tex	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,718 +0,0 @@
-%
-\begin{isabellebody}%
-\def\isabellecontext{Induction}%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-%
-\isatagtheory
-%
-\endisatagtheory
-{\isafoldtheory}%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-%
-\isamarkupsection{Case distinction and induction \label{sec:Induct}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Computer science applications abound with inductively defined
-structures, which is why we treat them in more detail. HOL already
-comes with a datatype of lists with the two constructors \isa{Nil}
-and \isa{Cons}. \isa{Nil} is written \isa{{\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}} and \isa{Cons\ x\ xs} is written \isa{x\ {\isaliteral{23}{\isacharhash}}\ xs}.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsection{Case distinction\label{sec:CaseDistinction}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-We have already met the \isa{cases} method for performing
-binary case splits. Here is another example:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ cases\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The two cases must come in this order because \isa{cases} merely abbreviates \isa{{\isaliteral{28}{\isacharparenleft}}rule\ case{\isaliteral{5F}{\isacharunderscore}}split{\isaliteral{29}{\isacharparenright}}} where
-\isa{case{\isaliteral{5F}{\isacharunderscore}}split} is \isa{{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ {\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q}. If we reverse
-the order of the two cases in the proof, the first case would prove
-\isa{{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A} which would solve the first premise of
-\isa{case{\isaliteral{5F}{\isacharunderscore}}split}, instantiating \isa{{\isaliteral{3F}{\isacharquery}}P} with \isa{{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A}, thus making the second premise \isa{{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A}.
-Therefore the order of subgoals is not always completely arbitrary.
-
-The above proof is appropriate if \isa{A} is textually small.
-However, if \isa{A} is large, we do not want to repeat it. This can
-be avoided by the following idiom%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}cases\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ True\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ False\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent which is like the previous proof but instantiates
-\isa{{\isaliteral{3F}{\isacharquery}}P} right away with \isa{A}. Thus we could prove the two
-cases in any order. The phrase \isakeyword{case}~\isa{True}
-abbreviates \isakeyword{assume}~\isa{True{\isaliteral{3A}{\isacharcolon}}\ A} and analogously for
-\isa{False} and \isa{{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A}.
-
-The same game can be played with other datatypes, for example lists,
-where \isa{tl} is the tail of a list, and \isa{length} returns a
-natural number (remember: $0-1=0$):%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}length{\isaliteral{28}{\isacharparenleft}}tl\ xs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ length\ xs\ {\isaliteral{2D}{\isacharminus}}\ {\isadigit{1}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}cases\ xs{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ Nil\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ Cons\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Here \isakeyword{case}~\isa{Nil} abbreviates
-\isakeyword{assume}~\isa{Nil{\isaliteral{3A}{\isacharcolon}}}~\isa{xs\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}} and
-\isakeyword{case}~\isa{Cons} abbreviates \isakeyword{fix}~\isa{{\isaliteral{3F}{\isacharquery}}\ {\isaliteral{3F}{\isacharquery}}{\isaliteral{3F}{\isacharquery}}}
-\isakeyword{assume}~\isa{Cons{\isaliteral{3A}{\isacharcolon}}}~\isa{xs\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{3F}{\isacharquery}}\ {\isaliteral{23}{\isacharhash}}\ {\isaliteral{3F}{\isacharquery}}{\isaliteral{3F}{\isacharquery}}},
-where \isa{{\isaliteral{3F}{\isacharquery}}} and \isa{{\isaliteral{3F}{\isacharquery}}{\isaliteral{3F}{\isacharquery}}}
-stand for variable names that have been chosen by the system.
-Therefore we cannot refer to them.
-Luckily, this proof is simple enough we do not need to refer to them.
-However, sometimes one may have to. Hence Isar offers a simple scheme for
-naming those variables: replace the anonymous \isa{Cons} by
-\isa{{\isaliteral{28}{\isacharparenleft}}Cons\ y\ ys{\isaliteral{29}{\isacharparenright}}}, which abbreviates \isakeyword{fix}~\isa{y\ ys}
-\isakeyword{assume}~\isa{Cons{\isaliteral{3A}{\isacharcolon}}}~\isa{xs\ {\isaliteral{3D}{\isacharequal}}\ y\ {\isaliteral{23}{\isacharhash}}\ ys}.
-In each \isakeyword{case} the assumption can be
-referred to inside the proof by the name of the constructor. In
-Section~\ref{sec:full-Ind} below we will come across an example
-of this.
-
-\subsection{Structural induction}
-
-We start with an inductive proof where both cases are proved automatically:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isadigit{2}}\ {\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C53756D3E}{\isasymSum}}i{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}nat{\isaliteral{5C3C6C653E}{\isasymle}}n{\isaliteral{2E}{\isachardot}}\ i{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ n{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{28}{\isacharparenleft}}n{\isaliteral{2B}{\isacharplus}}{\isadigit{1}}{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{by}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ n{\isaliteral{29}{\isacharparenright}}\ simp{\isaliteral{5F}{\isacharunderscore}}all%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The constraint \isa{{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}nat} is needed because all of
-the operations involved are overloaded.
-This proof also demonstrates that \isakeyword{by} can take two arguments,
-one to start and one to finish the proof --- the latter is optional.
-
-If we want to expose more of the structure of the
-proof, we can use pattern matching to avoid having to repeat the goal
-statement:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isadigit{2}}\ {\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C53756D3E}{\isasymSum}}i{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}nat{\isaliteral{5C3C6C653E}{\isasymle}}n{\isaliteral{2E}{\isachardot}}\ i{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ n{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{28}{\isacharparenleft}}n{\isaliteral{2B}{\isacharplus}}{\isadigit{1}}{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{28}{\isacharparenleft}}\isakeyword{is}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}P\ n{\isaliteral{22}{\isachardoublequoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ n{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}P\ {\isadigit{0}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{fix}\isamarkupfalse%
-\ n\ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}P\ n{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}P{\isaliteral{28}{\isacharparenleft}}Suc\ n{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent We could refine this further to show more of the equational
-proof. Instead we explore the same avenue as for case distinctions:
-introducing context via the \isakeyword{case} command:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isadigit{2}}\ {\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C53756D3E}{\isasymSum}}i{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}nat\ {\isaliteral{5C3C6C653E}{\isasymle}}\ n{\isaliteral{2E}{\isachardot}}\ i{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ n{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{28}{\isacharparenleft}}n{\isaliteral{2B}{\isacharplus}}{\isadigit{1}}{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ n{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isadigit{0}}\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ Suc\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The implicitly defined \isa{{\isaliteral{3F}{\isacharquery}}case} refers to the
-corresponding case to be proved, i.e.\ \isa{{\isaliteral{3F}{\isacharquery}}P\ {\isadigit{0}}} in the first case and
-\isa{{\isaliteral{3F}{\isacharquery}}P{\isaliteral{28}{\isacharparenleft}}Suc\ n{\isaliteral{29}{\isacharparenright}}} in the second case. Context \isakeyword{case}~\isa{{\isadigit{0}}} is
-empty whereas \isakeyword{case}~\isa{Suc} assumes \isa{{\isaliteral{3F}{\isacharquery}}P\ n}. Again we
-have the same problem as with case distinctions: we cannot refer to an anonymous \isa{n}
-in the induction step because it has not been introduced via \isakeyword{fix}
-(in contrast to the previous proof). The solution is the one outlined for
-\isa{Cons} above: replace \isa{Suc} by \isa{{\isaliteral{28}{\isacharparenleft}}Suc\ i{\isaliteral{29}{\isacharparenright}}}:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{fixes}\ n{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}nat\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}n\ {\isaliteral{3C}{\isacharless}}\ n{\isaliteral{2A}{\isacharasterisk}}n\ {\isaliteral{2B}{\isacharplus}}\ {\isadigit{1}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ n{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isadigit{0}}\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}Suc\ i{\isaliteral{29}{\isacharparenright}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}Suc\ i\ {\isaliteral{3C}{\isacharless}}\ Suc\ i\ {\isaliteral{2A}{\isacharasterisk}}\ Suc\ i\ {\isaliteral{2B}{\isacharplus}}\ {\isadigit{1}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Of course we could again have written
-\isakeyword{thus}~\isa{{\isaliteral{3F}{\isacharquery}}case} instead of giving the term explicitly
-but we wanted to use \isa{i} somewhere.
-
-\subsection{Generalization via \isa{arbitrary}}
-
-It is frequently necessary to generalize a claim before it becomes
-provable by induction. The tutorial~\cite{LNCS2283} demonstrates this
-with \isa{itrev\ xs\ ys\ {\isaliteral{3D}{\isacharequal}}\ rev\ xs\ {\isaliteral{40}{\isacharat}}\ ys}, where \isa{ys}
-needs to be universally quantified before induction succeeds.\footnote{\isa{rev\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}},\quad \isa{rev\ {\isaliteral{28}{\isacharparenleft}}x\ {\isaliteral{23}{\isacharhash}}\ xs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ rev\ xs\ {\isaliteral{40}{\isacharat}}\ {\isaliteral{5B}{\isacharbrackleft}}x{\isaliteral{5D}{\isacharbrackright}}},\\ \isa{itrev\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}\ ys\ {\isaliteral{3D}{\isacharequal}}\ ys},\quad \isa{itrev\ {\isaliteral{28}{\isacharparenleft}}x\ {\isaliteral{23}{\isacharhash}}\ xs{\isaliteral{29}{\isacharparenright}}\ ys\ {\isaliteral{3D}{\isacharequal}}\ itrev\ xs\ {\isaliteral{28}{\isacharparenleft}}x\ {\isaliteral{23}{\isacharhash}}\ ys{\isaliteral{29}{\isacharparenright}}}} But
-strictly speaking, this quantification step is already part of the
-proof and the quantifiers should not clutter the original claim. This
-is how the quantification step can be combined with induction:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}itrev\ xs\ ys\ {\isaliteral{3D}{\isacharequal}}\ rev\ xs\ {\isaliteral{40}{\isacharat}}\ ys{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{by}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ xs\ arbitrary{\isaliteral{3A}{\isacharcolon}}\ ys{\isaliteral{29}{\isacharparenright}}\ simp{\isaliteral{5F}{\isacharunderscore}}all%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The annotation \isa{arbitrary{\isaliteral{3A}{\isacharcolon}}}~\emph{vars}
-universally quantifies all \emph{vars} before the induction.  Hence
-they can be replaced by \emph{arbitrary} values in the proof.
-
-Generalization via \isa{arbitrary} is particularly convenient
-if the induction step is a structured proof as opposed to the automatic
-example above. Then the claim is available in unquantified form but
-with the generalized variables replaced by \isa{{\isaliteral{3F}{\isacharquery}}}-variables, ready
-for instantiation. In the above example, in the \isa{Cons} case the
-induction hypothesis is \isa{itrev\ xs\ {\isaliteral{3F}{\isacharquery}}ys\ {\isaliteral{3D}{\isacharequal}}\ rev\ xs\ {\isaliteral{40}{\isacharat}}\ {\isaliteral{3F}{\isacharquery}}ys} (available
-under the name \isa{Cons}).
-
-
-\subsection{Inductive proofs of conditional formulae}
-\label{sec:full-Ind}
-
-Induction also copes well with formulae involving \isa{{\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}}, for example%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}xs\ {\isaliteral{5C3C6E6F7465713E}{\isasymnoteq}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ hd{\isaliteral{28}{\isacharparenleft}}rev\ xs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ last\ xs{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{by}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ xs{\isaliteral{29}{\isacharparenright}}\ simp{\isaliteral{5F}{\isacharunderscore}}all%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent This is an improvement over that style the
-tutorial~\cite{LNCS2283} advises, which requires \isa{{\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}}.
-A further improvement is shown in the following proof:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \ {\isaliteral{22}{\isachardoublequoteopen}}map\ f\ xs\ {\isaliteral{3D}{\isacharequal}}\ map\ f\ ys\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ length\ xs\ {\isaliteral{3D}{\isacharequal}}\ length\ ys{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ ys\ arbitrary{\isaliteral{3A}{\isacharcolon}}\ xs{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ Nil\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}Cons\ y\ ys{\isaliteral{29}{\isacharparenright}}\ \ \isacommand{note}\isamarkupfalse%
-\ Asm\ {\isaliteral{3D}{\isacharequal}}\ Cons\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}cases\ xs{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \isacommand{case}\isamarkupfalse%
-\ Nil\isanewline
-\ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ False\ \isacommand{using}\isamarkupfalse%
-\ Asm{\isaliteral{28}{\isacharparenleft}}{\isadigit{2}}{\isaliteral{29}{\isacharparenright}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{case}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}Cons\ x\ xs{\isaliteral{27}{\isacharprime}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \isacommand{with}\isamarkupfalse%
-\ Asm{\isaliteral{28}{\isacharparenleft}}{\isadigit{2}}{\isaliteral{29}{\isacharparenright}}\ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}map\ f\ xs{\isaliteral{27}{\isacharprime}}\ {\isaliteral{3D}{\isacharequal}}\ map\ f\ ys{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \ \ \isacommand{from}\isamarkupfalse%
-\ Asm{\isaliteral{28}{\isacharparenleft}}{\isadigit{1}}{\isaliteral{29}{\isacharparenright}}{\isaliteral{5B}{\isacharbrackleft}}OF\ this{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{60}{\isacharbackquoteopen}}xs\ {\isaliteral{3D}{\isacharequal}}\ x{\isaliteral{23}{\isacharhash}}xs{\isaliteral{27}{\isacharprime}}{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-The base case is trivial. In the step case Isar assumes
-(under the name \isa{Cons}) two propositions:
-\begin{center}
-\begin{tabular}{l}
-\isa{map\ f\ {\isaliteral{3F}{\isacharquery}}xs\ {\isaliteral{3D}{\isacharequal}}\ map\ f\ ys\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ length\ {\isaliteral{3F}{\isacharquery}}xs\ {\isaliteral{3D}{\isacharequal}}\ length\ ys}\\
-\isa{map\ f\ xs\ {\isaliteral{3D}{\isacharequal}}\ map\ f\ {\isaliteral{28}{\isacharparenleft}}y\ {\isaliteral{23}{\isacharhash}}\ ys{\isaliteral{29}{\isacharparenright}}}
-\end{tabular}
-\end{center}
-The first is the induction hypothesis, the second, and this is new,
-is the premise of the induction step. The actual goal at this point is merely
-\isa{length\ xs\ {\isaliteral{3D}{\isacharequal}}\ length\ {\isaliteral{28}{\isacharparenleft}}y\ {\isaliteral{23}{\isacharhash}}\ ys{\isaliteral{29}{\isacharparenright}}}. The assumptions are given the new name
-\isa{Asm} to avoid a name clash further down. The proof procedes with a case distinction on \isa{xs}. In the case \isa{xs\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}}, the second of our two
-assumptions (\isa{Asm{\isaliteral{28}{\isacharparenleft}}{\isadigit{2}}{\isaliteral{29}{\isacharparenright}}}) implies the contradiction \isa{{\isadigit{0}}\ {\isaliteral{3D}{\isacharequal}}\ Suc{\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C646F74733E}{\isasymdots}}{\isaliteral{29}{\isacharparenright}}}.
- In the case \isa{xs\ {\isaliteral{3D}{\isacharequal}}\ x\ {\isaliteral{23}{\isacharhash}}\ xs{\isaliteral{27}{\isacharprime}}}, we first obtain
-\isa{map\ f\ xs{\isaliteral{27}{\isacharprime}}\ {\isaliteral{3D}{\isacharequal}}\ map\ f\ ys}, from which a forward step with the first assumption (\isa{Asm{\isaliteral{28}{\isacharparenleft}}{\isadigit{1}}{\isaliteral{29}{\isacharparenright}}{\isaliteral{5B}{\isacharbrackleft}}OF\ this{\isaliteral{5D}{\isacharbrackright}}}) yields \isa{length\ xs{\isaliteral{27}{\isacharprime}}\ {\isaliteral{3D}{\isacharequal}}\ length\ ys}. Together
-with \isa{xs\ {\isaliteral{3D}{\isacharequal}}\ x\ {\isaliteral{23}{\isacharhash}}\ xs} this yields the goal
-\isa{length\ xs\ {\isaliteral{3D}{\isacharequal}}\ length\ {\isaliteral{28}{\isacharparenleft}}y\ {\isaliteral{23}{\isacharhash}}\ ys{\isaliteral{29}{\isacharparenright}}}.
-
-
-\subsection{Induction formulae involving \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}} or \isa{{\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}}}
-
-Let us now consider abstractly the situation where the goal to be proved
-contains both \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}} and \isa{{\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}}, say \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}x{\isaliteral{2E}{\isachardot}}\ P\ x\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ Q\ x}.
-This means that in each case of the induction,
-\isa{{\isaliteral{3F}{\isacharquery}}case} would be of the form \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}x{\isaliteral{2E}{\isachardot}}\ P{\isaliteral{27}{\isacharprime}}\ x\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ Q{\isaliteral{27}{\isacharprime}}\ x}.  Thus the
-first proof steps will be the canonical ones, fixing \isa{x} and assuming
-\isa{P{\isaliteral{27}{\isacharprime}}\ x}. To avoid this tedium, induction performs the canonical steps
-automatically: in each step case, the assumptions contain both the
-usual induction hypothesis and \isa{P{\isaliteral{27}{\isacharprime}}\ x}, whereas \isa{{\isaliteral{3F}{\isacharquery}}case} is only
-\isa{Q{\isaliteral{27}{\isacharprime}}\ x}.
-
-\subsection{Rule induction}
-
-HOL also supports inductively defined sets. See \cite{LNCS2283}
-for details. As an example we define our own version of the reflexive
-transitive closure of a relation --- HOL provides a predefined one as well.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{inductive{\isaliteral{5F}{\isacharunderscore}}set}\isamarkupfalse%
-\isanewline
-\ \ rtc\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}{\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C74696D65733E}{\isasymtimes}}\ {\isaliteral{27}{\isacharprime}}a{\isaliteral{29}{\isacharparenright}}set\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C74696D65733E}{\isasymtimes}}\ {\isaliteral{27}{\isacharprime}}a{\isaliteral{29}{\isacharparenright}}set{\isaliteral{22}{\isachardoublequoteclose}}\ \ \ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5F}{\isacharunderscore}}{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isadigit{1}}{\isadigit{0}}{\isadigit{0}}{\isadigit{0}}{\isaliteral{5D}{\isacharbrackright}}\ {\isadigit{9}}{\isadigit{9}}{\isadigit{9}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isakeyword{for}\ r\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}{\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C74696D65733E}{\isasymtimes}}\ {\isaliteral{27}{\isacharprime}}a{\isaliteral{29}{\isacharparenright}}set{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\isakeyword{where}\isanewline
-\ \ refl{\isaliteral{3A}{\isacharcolon}}\ \ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}x{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-{\isaliteral{7C}{\isacharbar}}\ step{\isaliteral{3A}{\isacharcolon}}\ \ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}\ {\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}y{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{28}{\isacharparenleft}}y{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}%
-\begin{isamarkuptext}%
-\noindent
-First the constant is declared as a function on binary
-relations (with concrete syntax \isa{r{\isaliteral{2A}{\isacharasterisk}}} instead of \isa{rtc\ r}), then the defining clauses are given. We will now prove that
-\isa{r{\isaliteral{2A}{\isacharasterisk}}} is indeed transitive:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ A{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}y{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}y{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{using}\isamarkupfalse%
-\ A\isanewline
-\isacommand{proof}\isamarkupfalse%
-\ induct\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ refl\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ step\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}blast\ intro{\isaliteral{3A}{\isacharcolon}}\ rtc{\isaliteral{2E}{\isachardot}}step{\isaliteral{29}{\isacharparenright}}\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Rule induction is triggered by a fact $(x_1,\dots,x_n)
-\in R$ piped into the proof, here \isakeyword{using}~\isa{A}. The
-proof itself follows the inductive definition very
-closely: there is one case for each rule, and it has the same name as
-the rule, analogous to structural induction.
-
-However, this proof is rather terse. Here is a more readable version:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}y{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}y{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{using}\isamarkupfalse%
-\ assms\isanewline
-\isacommand{proof}\isamarkupfalse%
-\ induct\isanewline
-\ \ \isacommand{fix}\isamarkupfalse%
-\ x\ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \ %
-\isamarkupcmt{\isa{B}[\isa{y} := \isa{x}]%
-}
-\isanewline
-\ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{fix}\isamarkupfalse%
-\ x{\isaliteral{27}{\isacharprime}}\ x\ y\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isadigit{1}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{27}{\isacharprime}}{\isaliteral{2C}{\isacharcomma}}x{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\isanewline
-\ \ \ \ \ \ \ \ \ IH{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}y{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{28}{\isacharparenleft}}x{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\isanewline
-\ \ \ \ \ \ \ \ \ B{\isaliteral{3A}{\isacharcolon}}\ \ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}y{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ {\isadigit{1}}\ IH{\isaliteral{5B}{\isacharbrackleft}}OF\ B{\isaliteral{5D}{\isacharbrackright}}\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{27}{\isacharprime}}{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}rule\ rtc{\isaliteral{2E}{\isachardot}}step{\isaliteral{29}{\isacharparenright}}\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-This time, merely for a change, we start the proof with by feeding both
-assumptions into the inductive proof. Only the first assumption is
-``consumed'' by the induction.
-Since the second one is left over we don't just prove \isa{{\isaliteral{3F}{\isacharquery}}thesis} but
-\isa{{\isaliteral{28}{\isacharparenleft}}y{\isaliteral{2C}{\isacharcomma}}z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C696E3E}{\isasymin}}\ r{\isaliteral{2A}{\isacharasterisk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}thesis}, just as in the previous proof.
-The base case is trivial. In the assumptions for the induction step we can
-see very clearly how things fit together and permit ourselves the
-obvious forward step \isa{IH{\isaliteral{5B}{\isacharbrackleft}}OF\ B{\isaliteral{5D}{\isacharbrackright}}}.
-
-The notation \isakeyword{case}~\isa{(}\emph{constructor} \emph{vars}\isa{)}
-is also supported for inductive definitions. The \emph{constructor} is the
-name of the rule and the \emph{vars} fix the free variables in the
-rule; the order of the \emph{vars} must correspond to the
-left-to-right order of the variables as they appear in the rule.
-For example, we could start the above detailed proof of the induction
-with \isakeyword{case}~\isa{(step x' x y)}. In that case we don't need
-to spell out the assumptions but can refer to them by \isa{step{\isaliteral{28}{\isacharparenleft}}{\isaliteral{2E}{\isachardot}}{\isaliteral{29}{\isacharparenright}}},
-although the resulting text will be quite cryptic.
-
-\subsection{More induction}
-
-We close the section by demonstrating how arbitrary induction
-rules are applied. As a simple example we have chosen recursion
-induction, i.e.\ induction based on a recursive function
-definition. However, most of what we show works for induction in
-general.
-
-The example is an unusual definition of rotation:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{fun}\isamarkupfalse%
-\ rot\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{27}{\isacharprime}}a\ list\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ list{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{where}\isanewline
-{\isaliteral{22}{\isachardoublequoteopen}}rot\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{7C}{\isacharbar}}\isanewline
-{\isaliteral{22}{\isachardoublequoteopen}}rot\ {\isaliteral{5B}{\isacharbrackleft}}x{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{5B}{\isacharbrackleft}}x{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{7C}{\isacharbar}}\isanewline
-{\isaliteral{22}{\isachardoublequoteopen}}rot\ {\isaliteral{28}{\isacharparenleft}}x{\isaliteral{23}{\isacharhash}}y{\isaliteral{23}{\isacharhash}}zs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ y\ {\isaliteral{23}{\isacharhash}}\ rot{\isaliteral{28}{\isacharparenleft}}x{\isaliteral{23}{\isacharhash}}zs{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}%
-\begin{isamarkuptext}%
-\noindent This yields, among other things, the induction rule
-\isa{rot{\isaliteral{2E}{\isachardot}}induct}: \begin{isabelle}%
-{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}P\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{5C3C416E643E}{\isasymAnd}}x{\isaliteral{2E}{\isachardot}}\ P\ {\isaliteral{5B}{\isacharbrackleft}}x{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y\ zs{\isaliteral{2E}{\isachardot}}\ P\ {\isaliteral{28}{\isacharparenleft}}x\ {\isaliteral{23}{\isacharhash}}\ zs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ P\ {\isaliteral{28}{\isacharparenleft}}x\ {\isaliteral{23}{\isacharhash}}\ y\ {\isaliteral{23}{\isacharhash}}\ zs{\isaliteral{29}{\isacharparenright}}{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ P\ a{\isadigit{0}}%
-\end{isabelle}
-The following proof relies on a default naming scheme for cases: they are
-called 1, 2, etc, unless they have been named explicitly. The latter happens
-only with datatypes and inductively defined sets, but (usually)
-not with recursive functions.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}xs\ {\isaliteral{5C3C6E6F7465713E}{\isasymnoteq}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ rot\ xs\ {\isaliteral{3D}{\isacharequal}}\ tl\ xs\ {\isaliteral{40}{\isacharat}}\ {\isaliteral{5B}{\isacharbrackleft}}hd\ xs{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ xs\ rule{\isaliteral{3A}{\isacharcolon}}\ rot{\isaliteral{2E}{\isachardot}}induct{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isadigit{1}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isadigit{2}}\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{case}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}{\isadigit{3}}\ a\ b\ cs{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}rot\ {\isaliteral{28}{\isacharparenleft}}a\ {\isaliteral{23}{\isacharhash}}\ b\ {\isaliteral{23}{\isacharhash}}\ cs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{3D}{\isacharequal}}\ b\ {\isaliteral{23}{\isacharhash}}\ rot{\isaliteral{28}{\isacharparenleft}}a\ {\isaliteral{23}{\isacharhash}}\ cs{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \isacommand{also}\isamarkupfalse%
-\ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C646F74733E}{\isasymdots}}\ {\isaliteral{3D}{\isacharequal}}\ b\ {\isaliteral{23}{\isacharhash}}\ tl{\isaliteral{28}{\isacharparenleft}}a\ {\isaliteral{23}{\isacharhash}}\ cs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{40}{\isacharat}}\ {\isaliteral{5B}{\isacharbrackleft}}hd{\isaliteral{28}{\isacharparenleft}}a\ {\isaliteral{23}{\isacharhash}}\ cs{\isaliteral{29}{\isacharparenright}}{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}simp\ add{\isaliteral{3A}{\isacharcolon}}{\isadigit{3}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{also}\isamarkupfalse%
-\ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C646F74733E}{\isasymdots}}\ {\isaliteral{3D}{\isacharequal}}\ tl\ {\isaliteral{28}{\isacharparenleft}}a\ {\isaliteral{23}{\isacharhash}}\ b\ {\isaliteral{23}{\isacharhash}}\ cs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{40}{\isacharat}}\ {\isaliteral{5B}{\isacharbrackleft}}hd\ {\isaliteral{28}{\isacharparenleft}}a\ {\isaliteral{23}{\isacharhash}}\ b\ {\isaliteral{23}{\isacharhash}}\ cs{\isaliteral{29}{\isacharparenright}}{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \isacommand{finally}\isamarkupfalse%
-\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}case\ \isacommand{{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-The third case is only shown in gory detail (see \cite{BauerW-TPHOLs01}
-for how to reason with chains of equations) to demonstrate that the
-\isakeyword{case}~\isa{(}\emph{constructor} \emph{vars}\isa{)} notation also
-works for arbitrary induction theorems with numbered cases. The order
-of the \emph{vars} corresponds to the order of the
-\isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}}-quantified variables in each case of the induction
-theorem. For induction theorems produced by \isakeyword{fun} it is
-the order in which the variables appear on the left-hand side of the
-equation.
-
-The proof is so simple that it can be condensed to%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{by}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}induct\ xs\ rule{\isaliteral{3A}{\isacharcolon}}\ rot{\isaliteral{2E}{\isachardot}}induct{\isaliteral{29}{\isacharparenright}}\ simp{\isaliteral{5F}{\isacharunderscore}}all\isanewline
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isadelimtheory
-%
-\endisadelimtheory
-%
-\isatagtheory
-%
-\endisatagtheory
-{\isafoldtheory}%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-\end{isabellebody}%
-%%% Local Variables:
-%%% mode: latex
-%%% TeX-master: "root"
-%%% End:
--- a/doc-src/IsarOverview/Isar/document/Logic.tex	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1688 +0,0 @@
-%
-\begin{isabellebody}%
-\def\isabellecontext{Logic}%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-%
-\isatagtheory
-%
-\endisatagtheory
-{\isafoldtheory}%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-%
-\isamarkupsection{Logic \label{sec:Logic}%
-}
-\isamarkuptrue%
-%
-\isamarkupsubsection{Propositional logic%
-}
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{Introduction rules%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-We start with a really trivial toy proof to introduce the basic
-features of structured proofs.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}rule\ impI{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}rule\ a{\isaliteral{29}{\isacharparenright}}\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-The operational reading: the \isakeyword{assume}-\isakeyword{show}
-block proves \isa{A\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ A} (\isa{a} is a degenerate rule (no
-assumptions) that proves \isa{A} outright), which rule
-\isa{impI} (\isa{{\isaliteral{28}{\isacharparenleft}}{\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q}) turns into the desired \isa{A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ A}.  However, this text is much too detailed for comfort. Therefore
-Isar implements the following principle: \begin{quote}\em Command
-\isakeyword{proof} automatically tries to select an introduction rule
-based on the goal and a predefined list of rules.  \end{quote} Here
-\isa{impI} is applied automatically:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ A\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ A\ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}rule\ a{\isaliteral{29}{\isacharparenright}}\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent As you see above, single-identifier formulae such as \isa{A}
-need not be enclosed in double quotes. However, we will continue to do so for
-uniformity.
-
-Instead of applying fact \isa{a} via the \isa{rule} method, we can
-also push it directly onto our goal.  The proof is then immediate,
-which is formally written as ``.'' in Isar:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ a\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-We can also push several facts towards a goal, and put another
-rule in between to establish some result that is one step further
-removed.  We illustrate this by introducing a trivial conjunction:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ a\ \isakeyword{and}\ a\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}rule\ conjI{\isaliteral{29}{\isacharparenright}}\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Rule \isa{conjI} is of course \isa{{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{3F}{\isacharquery}}P{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{5C3C616E643E}{\isasymand}}\ {\isaliteral{3F}{\isacharquery}}Q}.
-
-Proofs of the form \isakeyword{by}\isa{{\isaliteral{28}{\isacharparenleft}}rule}~\emph{name}\isa{{\isaliteral{29}{\isacharparenright}}}
-can be abbreviated to ``..'' if \emph{name} refers to one of the
-predefined introduction rules (or elimination rules, see below):%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ a\ \isakeyword{and}\ a\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-This is what happens: first the matching introduction rule \isa{conjI}
-is applied (first ``.''), the remaining problem is solved immediately (second ``.'').%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{Elimination rules%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-A typical elimination rule is \isa{conjE}, $\land$-elimination:
-\begin{isabelle}%
-\ \ \ \ \ {\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{5C3C616E643E}{\isasymand}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{3F}{\isacharquery}}P{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}R{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}R%
-\end{isabelle}  In the following proof it is applied
-by hand, after its first (\emph{major}) premise has been eliminated via
-\isa{{\isaliteral{5B}{\isacharbrackleft}}OF\ ab{\isaliteral{5D}{\isacharbrackright}}}:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}rule\ conjE{\isaliteral{5B}{\isacharbrackleft}}OF\ ab{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{29}{\isacharparenright}}\ \ %
-\isamarkupcmt{\isa{conjE{\isaliteral{5B}{\isacharbrackleft}}OF\ ab{\isaliteral{5D}{\isacharbrackright}}}: \isa{{\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}A{\isaliteral{3B}{\isacharsemicolon}}\ B{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}R{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}R}%
-}
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\ b{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{from}\isamarkupfalse%
-\ b\ \isakeyword{and}\ a\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Note that the term \isa{{\isaliteral{3F}{\isacharquery}}thesis} always stands for the
-``current goal'', i.e.\ the enclosing \isakeyword{show} (or
-\isakeyword{have}) statement.
-
-This is too much proof text. Elimination rules should be selected
-automatically based on their major premise, the formula or rather connective
-to be eliminated. In Isar they are triggered by facts being fed
-\emph{into} a proof. Syntax:
-\begin{center}
-\isakeyword{from} \emph{fact} \isakeyword{show} \emph{proposition} \emph{proof}
-\end{center}
-where \emph{fact} stands for the name of a previously proved
-proposition, e.g.\ an assumption, an intermediate result or some global
-theorem, which may also be modified with \isa{OF} etc.
-The \emph{fact} is ``piped'' into the \emph{proof}, which can deal with it
-how it chooses. If the \emph{proof} starts with a plain \isakeyword{proof},
-an elimination rule (from a predefined list) is applied
-whose first premise is solved by the \emph{fact}. Thus the proof above
-is equivalent to the following one:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ab\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\ b{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{from}\isamarkupfalse%
-\ b\ \isakeyword{and}\ a\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-Now we come to a second important principle:
-\begin{quote}\em
-Try to arrange the sequence of propositions in a UNIX-like pipe,
-such that the proof of each proposition builds on the previous proposition.
-\end{quote}
-The previous proposition can be referred to via the fact \isa{this}.
-This greatly reduces the need for explicit naming of propositions.  We also
-rearrange the additional inner assumptions into proper order for immediate use:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ this\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{from}\isamarkupfalse%
-\ this\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Because of the frequency of \isakeyword{from}~\isa{this}, Isar provides two abbreviations:
-\begin{center}
-\begin{tabular}{r@ {\quad=\quad}l}
-\isakeyword{then} & \isakeyword{from} \isa{this} \\
-\isakeyword{thus} & \isakeyword{then} \isakeyword{show}
-\end{tabular}
-\end{center}
-
-Here is an alternative proof that operates purely by forward reasoning:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ab\ \isacommand{have}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ab\ \isacommand{have}\isamarkupfalse%
-\ b{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ b\ a\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent It is worth examining this text in detail because it
-exhibits a number of new concepts.  For a start, it is the first time
-we have proved intermediate propositions (\isakeyword{have}) on the
-way to the final \isakeyword{show}. This is the norm in nontrivial
-proofs where one cannot bridge the gap between the assumptions and the
-conclusion in one step. To understand how the proof works we need to
-explain more Isar details:
-\begin{itemize}
-\item
-Method \isa{rule} can be given a list of rules, in which case
-\isa{{\isaliteral{28}{\isacharparenleft}}rule}~\textit{rules}\isa{{\isaliteral{29}{\isacharparenright}}} applies the first matching
-rule in the list \textit{rules}.
-\item Command \isakeyword{from} can be
-followed by any number of facts.  Given \isakeyword{from}~\isa{f}$_1$~\dots~\isa{f}$_n$, the proof step
-\isa{{\isaliteral{28}{\isacharparenleft}}rule}~\textit{rules}\isa{{\isaliteral{29}{\isacharparenright}}} following a \isakeyword{have}
-or \isakeyword{show} searches \textit{rules} for a rule whose first
-$n$ premises can be proved by \isa{f}$_1$~\dots~\isa{f}$_n$ in the
-given order.
-\item ``..'' is short for
-\isa{by{\isaliteral{28}{\isacharparenleft}}rule}~\textit{elim-rules intro-rules}\isa{{\isaliteral{29}{\isacharparenright}}}\footnote{or
-merely \isa{{\isaliteral{28}{\isacharparenleft}}rule}~\textit{intro-rules}\isa{{\isaliteral{29}{\isacharparenright}}} if there are no facts
-fed into the proof}, where \textit{elim-rules} and \textit{intro-rules}
-are the predefined elimination and introduction rule. Thus
-elimination rules are tried first (if there are incoming facts).
-\end{itemize}
-Hence in the above proof both \isakeyword{have}s are proved via
-\isa{conjE} triggered by \isakeyword{from}~\isa{ab} whereas
-in the \isakeyword{show} step no elimination rule is applicable and
-the proof succeeds with \isa{conjI}. The latter would fail had
-we written \isakeyword{from}~\isa{a\ b} instead of
-\isakeyword{from}~\isa{b\ a}.
-
-A plain \isakeyword{proof} with no argument is short for
-\isakeyword{proof}~\isa{{\isaliteral{28}{\isacharparenleft}}rule}~\textit{elim-rules intro-rules}\isa{{\isaliteral{29}{\isacharparenright}}}\footnotemark[1].
-This means that the matching rule is selected by the incoming facts and the goal exactly as just explained.
-
-Although we have only seen a few introduction and elimination rules so
-far, Isar's predefined rules include all the usual natural deduction
-rules. We conclude our exposition of propositional logic with an extended
-example --- which rules are used implicitly where?%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ {\isaliteral{28}{\isacharparenleft}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ n{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ {\isaliteral{28}{\isacharparenleft}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{28}{\isacharparenleft}}rule\ ccontr{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ nn{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ B{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ b{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \ \ \isacommand{from}\isamarkupfalse%
-\ a\ \isakeyword{and}\ b\ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \ \ \isacommand{with}\isamarkupfalse%
-\ n\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{with}\isamarkupfalse%
-\ nn\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ {\isaliteral{5C3C6E6F743E}{\isasymnot}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{with}\isamarkupfalse%
-\ nn\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-Rule \isa{ccontr} (``classical contradiction'') is
-\isa{{\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C6E6F743E}{\isasymnot}}\ P\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ False{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ P}.
-Apart from demonstrating the strangeness of classical
-arguments by contradiction, this example also introduces two new
-abbreviations:
-\begin{center}
-\begin{tabular}{l@ {\quad=\quad}l}
-\isakeyword{hence} & \isakeyword{then} \isakeyword{have} \\
-\isakeyword{with}~\emph{facts} &
-\isakeyword{from}~\emph{facts} \isa{this}
-\end{tabular}
-\end{center}%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsection{Avoiding duplication%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-So far our examples have been a bit unnatural: normally we want to
-prove rules expressed with \isa{{\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}}, not \isa{{\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}}. Here is an example:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The \isakeyword{proof} always works on the conclusion,
-\isa{B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A} in our case, thus selecting $\land$-introduction. Hence
-we must show \isa{B} and \isa{A}; both are proved by
-$\land$-elimination and the proofs are separated by \isakeyword{next}:
-\begin{description}
-\item[\isakeyword{next}] deals with multiple subgoals. For example,
-when showing \isa{A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B} we need to show both \isa{A} and \isa{B}.  Each subgoal is proved separately, in \emph{any} order. The
-individual proofs are separated by \isakeyword{next}.  \footnote{Each
-\isakeyword{show} must prove one of the pending subgoals.  If a
-\isakeyword{show} matches multiple subgoals, e.g.\ if the subgoals
-contain ?-variables, the first one is proved. Thus the order in which
-the subgoals are proved can matter --- see
-\S\ref{sec:CaseDistinction} for an example.}
-
-Strictly speaking \isakeyword{next} is only required if the subgoals
-are proved in different assumption contexts which need to be
-separated, which is not the case above. For clarity we
-have employed \isakeyword{next} anyway and will continue to do so.
-\end{description}
-
-This is all very well as long as formulae are small. Let us now look at some
-devices to avoid repeating (possibly large) formulae. A very general method
-is pattern matching:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}large{\isaliteral{5F}{\isacharunderscore}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ large{\isaliteral{5F}{\isacharunderscore}}B\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ large{\isaliteral{5F}{\isacharunderscore}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ large{\isaliteral{5F}{\isacharunderscore}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ {\isaliteral{28}{\isacharparenleft}}\isakeyword{is}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}AB\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ {\isaliteral{3F}{\isacharquery}}A{\isaliteral{22}{\isachardoublequoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}AB{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}AB{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Any formula may be followed by
-\isa{{\isaliteral{28}{\isacharparenleft}}}\isakeyword{is}~\emph{pattern}\isa{{\isaliteral{29}{\isacharparenright}}} which causes the pattern
-to be matched against the formula, instantiating the \isa{{\isaliteral{3F}{\isacharquery}}}-variables in
-the pattern. Subsequent uses of these variables in other terms causes
-them to be replaced by the terms they stand for.
-
-We can simplify things even more by stating the theorem by means of the
-\isakeyword{assumes} and \isakeyword{shows} elements which allow direct
-naming of assumptions:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}large{\isaliteral{5F}{\isacharunderscore}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ large{\isaliteral{5F}{\isacharunderscore}}B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}large{\isaliteral{5F}{\isacharunderscore}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ large{\isaliteral{5F}{\isacharunderscore}}A{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{28}{\isacharparenleft}}\isakeyword{is}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ {\isaliteral{3F}{\isacharquery}}A{\isaliteral{22}{\isachardoublequoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ab\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ab\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Note the difference between \isa{{\isaliteral{3F}{\isacharquery}}AB}, a term, and
-\isa{ab}, a fact.
-
-Finally we want to start the proof with $\land$-elimination so we
-don't have to perform it twice, as above. Here is a slick way to
-achieve this:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}large{\isaliteral{5F}{\isacharunderscore}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ large{\isaliteral{5F}{\isacharunderscore}}B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}large{\isaliteral{5F}{\isacharunderscore}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ large{\isaliteral{5F}{\isacharunderscore}}A{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{28}{\isacharparenleft}}\isakeyword{is}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ {\isaliteral{3F}{\isacharquery}}A{\isaliteral{22}{\isachardoublequoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{using}\isamarkupfalse%
-\ ab\isanewline
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}B{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Command \isakeyword{using} can appear before a proof
-and adds further facts to those piped into the proof. Here \isa{ab}
-is the only such fact and it triggers $\land$-elimination. Another
-frequent idiom is as follows:
-\begin{center}
-\isakeyword{from} \emph{major-facts}~
-\isakeyword{show} \emph{proposition}~
-\isakeyword{using} \emph{minor-facts}~
-\emph{proof}
-\end{center}
-
-Sometimes it is necessary to suppress the implicit application of rules in a
-\isakeyword{proof}. For example \isakeyword{show(s)}~\isa{{\isaliteral{22}{\isachardoublequote}}P\ {\isaliteral{5C3C6F723E}{\isasymor}}\ Q{\isaliteral{22}{\isachardoublequote}}}
-would trigger $\lor$-introduction, requiring us to prove \isa{P}, which may
-not be what we had in mind.
-A simple ``\isa{{\isaliteral{2D}{\isacharminus}}}'' prevents this \emph{faux pas}:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ab\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ A\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ B\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Alternatively one can feed \isa{A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B} directly
-into the proof, thus triggering the elimination rule:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ ab{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{using}\isamarkupfalse%
-\ ab\isanewline
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ A\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ B\ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Remember that eliminations have priority over
-introductions.
-
-\subsection{Avoiding names}
-
-Too many names can easily clutter a proof.  We already learned
-about \isa{this} as a means of avoiding explicit names. Another
-handy device is to refer to a fact not by name but by contents: for
-example, writing \isa{{\isaliteral{60}{\isacharbackquote}}A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B{\isaliteral{60}{\isacharbackquote}}} (enclosing the formula in back quotes)
-refers to the fact \isa{A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B}
-without the need to name it. Here is a simple example, a revised version
-of the previous proof%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C6F723E}{\isasymor}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{using}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B{\isaliteral{60}{\isacharbackquoteclose}}%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent which continues as before.
-
-Clearly, this device of quoting facts by contents is only advisable
-for small formulae. In such cases it is superior to naming because the
-reader immediately sees what the fact is without needing to search for
-it in the preceding proof text.
-
-The assumptions of a lemma can also be referred to via their
-predefined name \isa{assms}. Hence the \isa{{\isaliteral{60}{\isacharbackquote}}A\ {\isaliteral{5C3C6F723E}{\isasymor}}\ B{\isaliteral{60}{\isacharbackquote}}} in the
-previous proof can also be replaced by \isa{assms}. Note that \isa{assms} refers to the list of \emph{all} assumptions. To pick out a
-specific one, say the second, write \isa{assms{\isaliteral{28}{\isacharparenleft}}{\isadigit{2}}{\isaliteral{29}{\isacharparenright}}}.
-
-This indexing notation $name(.)$ works for any $name$ that stands for
-a list of facts, for example $f$\isa{{\isaliteral{2E}{\isachardot}}simps}, the equations of the
-recursively defined function $f$. You may also select sublists by writing
-$name(2-3)$.
-
-Above we recommended the UNIX-pipe model (i.e. \isa{this}) to avoid
-the need to name propositions. But frequently we needed to feed more
-than one previously derived fact into a proof step. Then the UNIX-pipe
-model appears to break down and we need to name the different facts to
-refer to them. But this can be avoided:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{moreover}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}A\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{ultimately}\isamarkupfalse%
-\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}B\ {\isaliteral{5C3C616E643E}{\isasymand}}\ A{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent You can combine any number of facts \isa{A{\isadigit{1}}} \dots\ \isa{An} into a sequence by separating their proofs with
-\isakeyword{moreover}. After the final fact, \isakeyword{ultimately} stands
-for \isakeyword{from}~\isa{A{\isadigit{1}}}~\dots~\isa{An}.  This avoids having to
-introduce names for all of the sequence elements.
-
-
-\subsection{Predicate calculus}
-
-Command \isakeyword{fix} introduces new local variables into a
-proof. The pair \isakeyword{fix}-\isakeyword{show} corresponds to \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}}
-(the universal quantifier at the
-meta-level) just like \isakeyword{assume}-\isakeyword{show} corresponds to
-\isa{{\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}}. Here is a sample proof, annotated with the rules that are
-applied implicitly:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ P{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x{\isaliteral{2E}{\isachardot}}\ P\ x{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x{\isaliteral{2E}{\isachardot}}\ P{\isaliteral{28}{\isacharparenleft}}f\ x{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ %
-\isamarkupcmt{\isa{allI}: \isa{{\isaliteral{28}{\isacharparenleft}}{\isaliteral{5C3C416E643E}{\isasymAnd}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}P\ x{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}P\ x}%
-}
-\isanewline
-\ \ \isacommand{fix}\isamarkupfalse%
-\ a\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ P\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}P{\isaliteral{28}{\isacharparenleft}}f\ a{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\ \ %
-\isamarkupcmt{\isa{allE}: \isa{{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}P\ x{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{3F}{\isacharquery}}x\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}R{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}R}%
-}
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Note that in the proof we have chosen to call the bound
-variable \isa{a} instead of \isa{x} merely to show that the choice of
-local names is irrelevant.
-
-Next we look at \isa{{\isaliteral{5C3C6578697374733E}{\isasymexists}}} which is a bit more tricky.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ Pf{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ P{\isaliteral{28}{\isacharparenleft}}f\ x{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}y{\isaliteral{2E}{\isachardot}}\ P\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ Pf\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\ \ \ \ \ \ \ \ \ \ \ \ \ \ %
-\isamarkupcmt{\isa{exE}: \isa{{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}P\ x{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{5C3C416E643E}{\isasymAnd}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}P\ x\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}Q}%
-}
-\isanewline
-\ \ \ \ \isacommand{fix}\isamarkupfalse%
-\ x\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}P{\isaliteral{28}{\isacharparenleft}}f\ x{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\ \ %
-\isamarkupcmt{\isa{exI}: \isa{{\isaliteral{3F}{\isacharquery}}P\ {\isaliteral{3F}{\isacharquery}}x\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}P\ x}%
-}
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Explicit $\exists$-elimination as seen above can become
-cumbersome in practice.  The derived Isar language element
-\isakeyword{obtain} provides a more appealing form of generalised
-existence reasoning:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ Pf{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ P{\isaliteral{28}{\isacharparenleft}}f\ x{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}y{\isaliteral{2E}{\isachardot}}\ P\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ Pf\ \isacommand{obtain}\isamarkupfalse%
-\ x\ \isakeyword{where}\ {\isaliteral{22}{\isachardoublequoteopen}}P{\isaliteral{28}{\isacharparenleft}}f\ x{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}y{\isaliteral{2E}{\isachardot}}\ P\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Note how the proof text follows the usual mathematical style
-of concluding $P(x)$ from $\exists x. P(x)$, while carefully introducing $x$
-as a new local variable.  Technically, \isakeyword{obtain} is similar to
-\isakeyword{fix} and \isakeyword{assume} together with a soundness proof of
-the elimination involved.
-
-Here is a proof of a well known tautology.
-Which rule is used where?%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ ex{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ {\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}y{\isaliteral{2E}{\isachardot}}\ P\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}y{\isaliteral{2E}{\isachardot}}\ {\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ P\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{fix}\isamarkupfalse%
-\ y\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ ex\ \isacommand{obtain}\isamarkupfalse%
-\ x\ \isakeyword{where}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}y{\isaliteral{2E}{\isachardot}}\ P\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}P\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}x{\isaliteral{2E}{\isachardot}}\ P\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isamarkupsubsection{Making bigger steps%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-So far we have confined ourselves to single step proofs. Of course
-powerful automatic methods can be used just as well. Here is an example,
-Cantor's theorem that there is no surjective function from a set to its
-powerset:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{theorem}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}S{\isaliteral{2E}{\isachardot}}\ S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ {\isaliteral{28}{\isacharparenleft}}f\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ set{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{let}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{7B}{\isacharbraceleft}}x{\isaliteral{2E}{\isachardot}}\ x\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ f\ x{\isaliteral{7D}{\isacharbraceright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ f{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{5C3C696E3E}{\isasymin}}\ range\ f{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{then}\isamarkupfalse%
-\ \isacommand{obtain}\isamarkupfalse%
-\ y\ \isakeyword{where}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ f\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{show}\isamarkupfalse%
-\ False\isanewline
-\ \ \ \ \isacommand{proof}\isamarkupfalse%
-\ cases\isanewline
-\ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C696E3E}{\isasymin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{with}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ f\ y{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{by}\isamarkupfalse%
-\ blast\isanewline
-\ \ \ \ \isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{with}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ f\ y{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{by}\isamarkupfalse%
-\ blast\isanewline
-\ \ \ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-For a start, the example demonstrates two new constructs:
-\begin{itemize}
-\item \isakeyword{let} introduces an abbreviation for a term, in our case
-the witness for the claim.
-\item Proof by \isa{cases} starts a proof by cases. Note that it remains
-implicit what the two cases are: it is merely expected that the two subproofs
-prove \isa{P\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}thesis} and \isa{{\isaliteral{5C3C6E6F743E}{\isasymnot}}P\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}thesis} (in that order)
-for some \isa{P}.
-\end{itemize}
-If you wonder how to \isakeyword{obtain} \isa{y}:
-via the predefined elimination rule \isa{{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}b\ {\isaliteral{5C3C696E3E}{\isasymin}}\ range\ f{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{5C3C416E643E}{\isasymAnd}}x{\isaliteral{2E}{\isachardot}}\ b\ {\isaliteral{3D}{\isacharequal}}\ f\ x\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ P{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ P}.
-
-Method \isa{blast} is used because the contradiction does not follow easily
-by just a single rule. If you find the proof too cryptic for human
-consumption, here is a more detailed version; the beginning up to
-\isakeyword{obtain} stays unchanged.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{theorem}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}S{\isaliteral{2E}{\isachardot}}\ S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ {\isaliteral{28}{\isacharparenleft}}f\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ set{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{let}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{7B}{\isacharbraceleft}}x{\isaliteral{2E}{\isachardot}}\ x\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ f\ x{\isaliteral{7D}{\isacharbraceright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ f{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{5C3C696E3E}{\isasymin}}\ range\ f{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{then}\isamarkupfalse%
-\ \isacommand{obtain}\isamarkupfalse%
-\ y\ \isakeyword{where}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ f\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{{\isaliteral{2E}{\isachardot}}{\isaliteral{2E}{\isachardot}}}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{show}\isamarkupfalse%
-\ False\isanewline
-\ \ \ \ \isacommand{proof}\isamarkupfalse%
-\ cases\isanewline
-\ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C696E3E}{\isasymin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ f\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \ \ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{22}{\isachardoublequoteclose}}\ \ \ \ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}simp\ add{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{60}{\isacharbackquoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ f\ y{\isaliteral{60}{\isacharbackquoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \ \ \isacommand{with}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}y\ {\isaliteral{5C3C696E3E}{\isasymin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{by}\isamarkupfalse%
-\ contradiction\isanewline
-\ \ \ \ \isacommand{next}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C696E3E}{\isasymin}}\ f\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \ \ \isacommand{by}\isamarkupfalse%
-\ simp\isanewline
-\ \ \ \ \ \ \isacommand{hence}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}y\ {\isaliteral{5C3C696E3E}{\isasymin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{22}{\isachardoublequoteclose}}\ \ \ \ \isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}simp\ add{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{60}{\isacharbackquoteopen}}{\isaliteral{3F}{\isacharquery}}S\ {\isaliteral{3D}{\isacharequal}}\ f\ y{\isaliteral{60}{\isacharbackquoteclose}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \ \ \isacommand{with}\isamarkupfalse%
-\ {\isaliteral{60}{\isacharbackquoteopen}}y\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ {\isaliteral{3F}{\isacharquery}}S{\isaliteral{60}{\isacharbackquoteclose}}\ \isacommand{show}\isamarkupfalse%
-\ False\ \isacommand{by}\isamarkupfalse%
-\ contradiction\isanewline
-\ \ \ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Method \isa{contradiction} succeeds if both $P$ and
-$\neg P$ are among the assumptions and the facts fed into that step, in any order.
-
-As it happens, Cantor's theorem can be proved automatically by best-first
-search. Depth-first search would diverge, but best-first search successfully
-navigates through the large search space:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{theorem}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}S{\isaliteral{2E}{\isachardot}}\ S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ {\isaliteral{28}{\isacharparenleft}}f\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ set{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{by}\isamarkupfalse%
-\ best%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isamarkupsubsection{Raw proof blocks%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Although we have shown how to employ powerful automatic methods like
-\isa{blast} to achieve bigger proof steps, there may still be the
-tendency to use the default introduction and elimination rules to
-decompose goals and facts. This can lead to very tedious proofs:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x\ y{\isaliteral{2E}{\isachardot}}\ A\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ x\ y\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{fix}\isamarkupfalse%
-\ x\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}y{\isaliteral{2E}{\isachardot}}\ A\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ x\ y\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{fix}\isamarkupfalse%
-\ y\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ x\ y\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \ \ \ \ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{sorry}\isamarkupfalse%
-\isanewline
-\ \ \ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent Since we are only interested in the decomposition and not the
-actual proof, the latter has been replaced by
-\isakeyword{sorry}. Command \isakeyword{sorry} proves anything but is
-only allowed in quick and dirty mode, the default interactive mode. It
-is very convenient for top down proof development.
-
-Luckily we can avoid this step by step decomposition very easily:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x\ y{\isaliteral{2E}{\isachardot}}\ A\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ x\ y\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y{\isaliteral{2E}{\isachardot}}\ {\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}\ A\ x\ y{\isaliteral{3B}{\isacharsemicolon}}\ B\ x\ y\ {\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \ \ \isacommand{fix}\isamarkupfalse%
-\ x\ y\ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{22}{\isachardoublequoteopen}}B\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{sorry}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{qed}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{by}\isamarkupfalse%
-\ blast\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-This can be simplified further by \emph{raw proof blocks}, i.e.\
-proofs enclosed in braces:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C666F72616C6C3E}{\isasymforall}}x\ y{\isaliteral{2E}{\isachardot}}\ A\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ B\ x\ y\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{{\isaliteral{7B}{\isacharbraceleft}}}\isamarkupfalse%
-\ \isacommand{fix}\isamarkupfalse%
-\ x\ y\ \isacommand{assume}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{22}{\isachardoublequoteopen}}B\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}C\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isacommand{sorry}\isamarkupfalse%
-\ \isacommand{{\isaliteral{7D}{\isacharbraceright}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{thus}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{by}\isamarkupfalse%
-\ blast\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The result of the raw proof block is the same theorem
-as above, namely \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y{\isaliteral{2E}{\isachardot}}\ {\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}A\ x\ y{\isaliteral{3B}{\isacharsemicolon}}\ B\ x\ y{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ C\ x\ y}.  Raw
-proof blocks are like ordinary proofs except that they do not prove
-some explicitly stated property but that the property emerges directly
-out of the \isakeyword{fixe}s, \isakeyword{assume}s and
-\isakeyword{have} in the block. Thus they again serve to avoid
-duplication. Note that the conclusion of a raw proof block is stated with
-\isakeyword{have} rather than \isakeyword{show} because it is not the
-conclusion of some pending goal but some independent claim.
-
-The general idea demonstrated in this subsection is very
-important in Isar and distinguishes it from \isa{apply}-style proofs:
-\begin{quote}\em
-Do not manipulate the proof state into a particular form by applying
-proof methods but state the desired form explicitly and let the proof
-methods verify that from this form the original goal follows.
-\end{quote}
-This yields more readable and also more robust proofs.
-
-\subsubsection{General case distinctions}
-
-As an important application of raw proof blocks we show how to deal
-with general case distinctions --- more specific kinds are treated in
-\S\ref{sec:CaseDistinction}. Imagine that you would like to prove some
-goal by distinguishing $n$ cases $P_1$, \dots, $P_n$. You show that
-the $n$ cases are exhaustive (i.e.\ $P_1 \lor \dots \lor P_n$) and
-that each case $P_i$ implies the goal. Taken together, this proves the
-goal. The corresponding Isar proof pattern (for $n = 3$) is very handy:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\renewcommand{\isamarkupcmt}[1]{#1}
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}P\isaliteral{5C3C5E697375623E}{}\isactrlisub {\isadigit{1}}\ {\isaliteral{5C3C6F723E}{\isasymor}}\ P\isaliteral{5C3C5E697375623E}{}\isactrlisub {\isadigit{2}}\ {\isaliteral{5C3C6F723E}{\isasymor}}\ P\isaliteral{5C3C5E697375623E}{}\isactrlisub {\isadigit{3}}{\isaliteral{22}{\isachardoublequoteclose}}\ \ %
-\isamarkupcmt{\dots%
-}
-\isanewline
-\ \ \isacommand{moreover}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{{\isaliteral{7B}{\isacharbraceleft}}}\isamarkupfalse%
-\ \isacommand{assume}\isamarkupfalse%
-\ P\isaliteral{5C3C5E697375623E}{}\isactrlisub {\isadigit{1}}\isanewline
-\ \ \ \ %
-\isamarkupcmt{\dots%
-}
-\isanewline
-\ \ \ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \ %
-\isamarkupcmt{\dots%
-}
-\ \isacommand{{\isaliteral{7D}{\isacharbraceright}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{moreover}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{{\isaliteral{7B}{\isacharbraceleft}}}\isamarkupfalse%
-\ \isacommand{assume}\isamarkupfalse%
-\ P\isaliteral{5C3C5E697375623E}{}\isactrlisub {\isadigit{2}}\isanewline
-\ \ \ \ %
-\isamarkupcmt{\dots%
-}
-\isanewline
-\ \ \ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \ %
-\isamarkupcmt{\dots%
-}
-\ \isacommand{{\isaliteral{7D}{\isacharbraceright}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{moreover}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{{\isaliteral{7B}{\isacharbraceleft}}}\isamarkupfalse%
-\ \isacommand{assume}\isamarkupfalse%
-\ P\isaliteral{5C3C5E697375623E}{}\isactrlisub {\isadigit{3}}\isanewline
-\ \ \ \ %
-\isamarkupcmt{\dots%
-}
-\isanewline
-\ \ \ \ \isacommand{have}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \ %
-\isamarkupcmt{\dots%
-}
-\ \isacommand{{\isaliteral{7D}{\isacharbraceright}}}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{ultimately}\isamarkupfalse%
-\ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{3F}{\isacharquery}}thesis\ \isacommand{by}\isamarkupfalse%
-\ blast\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\renewcommand{\isamarkupcmt}[1]{{\isastylecmt--- #1}}
-%
-\isamarkupsubsection{Further refinements%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-This subsection discusses some further tricks that can make
-life easier although they are not essential.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{\isakeyword{and}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Propositions (following \isakeyword{assume} etc) may but need not be
-separated by \isakeyword{and}. This is not just for readability
-(\isakeyword{from} \isa{A} \isakeyword{and} \isa{B} looks nicer than
-\isakeyword{from} \isa{A} \isa{B}) but for structuring lists of propositions
-into possibly named blocks. In
-\begin{center}
-\isakeyword{assume} \isa{A:} $A_1$ $A_2$ \isakeyword{and} \isa{B:} $A_3$
-\isakeyword{and} $A_4$
-\end{center}
-label \isa{A} refers to the list of propositions $A_1$ $A_2$ and
-label \isa{B} to $A_3$.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{\isakeyword{note}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-If you want to remember intermediate fact(s) that cannot be
-named directly, use \isakeyword{note}. For example the result of raw
-proof block can be named by following it with
-\isakeyword{note}~\isa{some{\isaliteral{5F}{\isacharunderscore}}name\ {\isaliteral{3D}{\isacharequal}}\ this}.  As a side effect,
-\isa{this} is set to the list of facts on the right-hand side. You
-can also say \isa{note\ some{\isaliteral{5F}{\isacharunderscore}}fact}, which simply sets \isa{this},
-i.e.\ recalls \isa{some{\isaliteral{5F}{\isacharunderscore}}fact}, e.g.\ in a \isakeyword{moreover} sequence.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{\isakeyword{fixes}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Sometimes it is necessary to decorate a proposition with type
-constraints, as in Cantor's theorem above. These type constraints tend
-to make the theorem less readable. The situation can be improved a
-little by combining the type constraint with an outer \isa{{\isaliteral{5C3C416E643E}{\isasymAnd}}}:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{theorem}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C416E643E}{\isasymAnd}}f\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ set{\isaliteral{2E}{\isachardot}}\ {\isaliteral{5C3C6578697374733E}{\isasymexists}}S{\isaliteral{2E}{\isachardot}}\ S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ f{\isaliteral{22}{\isachardoublequoteclose}}%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent However, now \isa{f} is bound and we need a
-\isakeyword{fix}~\isa{f} in the proof before we can refer to \isa{f}.
-This is avoided by \isakeyword{fixes}:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{theorem}\isamarkupfalse%
-\ \isakeyword{fixes}\ f\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ set{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}S{\isaliteral{2E}{\isachardot}}\ S\ {\isaliteral{5C3C6E6F74696E3E}{\isasymnotin}}\ range\ f{\isaliteral{22}{\isachardoublequoteclose}}%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent
-Even better, \isakeyword{fixes} allows to introduce concrete syntax locally:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ comm{\isaliteral{5F}{\isacharunderscore}}mono{\isaliteral{3A}{\isacharcolon}}\isanewline
-\ \ \isakeyword{fixes}\ r\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ bool{\isaliteral{22}{\isachardoublequoteclose}}\ {\isaliteral{28}{\isacharparenleft}}\isakeyword{infix}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{3E}{\isachargreater}}{\isaliteral{22}{\isachardoublequoteclose}}\ {\isadigit{6}}{\isadigit{0}}{\isaliteral{29}{\isacharparenright}}\ \isakeyword{and}\isanewline
-\ \ \ \ \ \ \ f\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a\ {\isaliteral{5C3C52696768746172726F773E}{\isasymRightarrow}}\ {\isaliteral{27}{\isacharprime}}a{\isaliteral{22}{\isachardoublequoteclose}}\ \ \ {\isaliteral{28}{\isacharparenleft}}\isakeyword{infixl}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}{\isaliteral{22}{\isachardoublequoteclose}}\ {\isadigit{7}}{\isadigit{0}}{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \isakeyword{assumes}\ comm{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}{\isaliteral{27}{\isacharprime}}a{\isaliteral{2E}{\isachardot}}\ x\ {\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}\ y\ {\isaliteral{3D}{\isacharequal}}\ y\ {\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}\ x{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\isanewline
-\ \ \ \ \ \ \ \ \ \ mono{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y\ z{\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}{\isaliteral{27}{\isacharprime}}a{\isaliteral{2E}{\isachardot}}\ x\ {\isaliteral{3E}{\isachargreater}}\ y\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ x\ {\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}\ z\ {\isaliteral{3E}{\isachargreater}}\ y\ {\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}\ z{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}x\ {\isaliteral{3E}{\isachargreater}}\ y\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ z\ {\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}\ x\ {\isaliteral{3E}{\isachargreater}}\ z\ {\isaliteral{2B}{\isacharplus}}{\isaliteral{2B}{\isacharplus}}\ y{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{by}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}simp\ add{\isaliteral{3A}{\isacharcolon}}\ comm\ mono{\isaliteral{29}{\isacharparenright}}%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent The concrete syntax is dropped at the end of the proof and the
-theorem becomes \begin{isabelle}%
-{\isaliteral{5C3C6C6272616B6B3E}{\isasymlbrakk}}{\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}f\ x\ y\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{3F}{\isacharquery}}f\ y\ x{\isaliteral{3B}{\isacharsemicolon}}\isanewline
-\isaindent{\ }{\isaliteral{5C3C416E643E}{\isasymAnd}}x\ y\ z{\isaliteral{2E}{\isachardot}}\ {\isaliteral{3F}{\isacharquery}}r\ x\ y\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}r\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{3F}{\isacharquery}}f\ x\ z{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{3F}{\isacharquery}}f\ y\ z{\isaliteral{29}{\isacharparenright}}{\isaliteral{3B}{\isacharsemicolon}}\ {\isaliteral{3F}{\isacharquery}}r\ {\isaliteral{3F}{\isacharquery}}x\ {\isaliteral{3F}{\isacharquery}}y{\isaliteral{5C3C726272616B6B3E}{\isasymrbrakk}}\isanewline
-{\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ {\isaliteral{3F}{\isacharquery}}r\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{3F}{\isacharquery}}f\ {\isaliteral{3F}{\isacharquery}}z\ {\isaliteral{3F}{\isacharquery}}x{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{28}{\isacharparenleft}}{\isaliteral{3F}{\isacharquery}}f\ {\isaliteral{3F}{\isacharquery}}z\ {\isaliteral{3F}{\isacharquery}}y{\isaliteral{29}{\isacharparenright}}%
-\end{isabelle}
-\tweakskip%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{\isakeyword{obtain}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-The \isakeyword{obtain} construct can introduce multiple
-witnesses and propositions as in the following proof fragment:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ \isakeyword{assumes}\ A{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{5C3C6578697374733E}{\isasymexists}}x\ y{\isaliteral{2E}{\isachardot}}\ P\ x\ y\ {\isaliteral{5C3C616E643E}{\isasymand}}\ Q\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{shows}\ {\isaliteral{22}{\isachardoublequoteopen}}R{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\ {\isaliteral{2D}{\isacharminus}}\isanewline
-\ \ \isacommand{from}\isamarkupfalse%
-\ A\ \isacommand{obtain}\isamarkupfalse%
-\ x\ y\ \isakeyword{where}\ P{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}P\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \isakeyword{and}\ Q{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}Q\ x\ y{\isaliteral{22}{\isachardoublequoteclose}}\ \ \isacommand{by}\isamarkupfalse%
-\ blast%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-Remember also that one does not even need to start with a formula
-containing \isa{{\isaliteral{5C3C6578697374733E}{\isasymexists}}} as we saw in the proof of Cantor's theorem.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{Combining proof styles%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Finally, whole \isa{apply}-scripts may appear in the leaves of the
-proof tree, although this is best avoided.  Here is a contrived example:%
-\end{isamarkuptext}%
-\isamarkuptrue%
-\isacommand{lemma}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ {\isaliteral{28}{\isacharparenleft}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\isatagproof
-\isacommand{proof}\isamarkupfalse%
-\isanewline
-\ \ \isacommand{assume}\isamarkupfalse%
-\ a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{22}{\isachardoublequoteopen}}A{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \isacommand{show}\isamarkupfalse%
-\ {\isaliteral{22}{\isachardoublequoteopen}}{\isaliteral{28}{\isacharparenleft}}A\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}B{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C6C6F6E6772696768746172726F773E}{\isasymlongrightarrow}}\ B{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
-\ \ \ \ \isacommand{apply}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}rule\ impI{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \isacommand{apply}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}erule\ impE{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \isacommand{apply}\isamarkupfalse%
-{\isaliteral{28}{\isacharparenleft}}rule\ a{\isaliteral{29}{\isacharparenright}}\isanewline
-\ \ \ \ \isacommand{apply}\isamarkupfalse%
-\ assumption\isanewline
-\ \ \ \ \isacommand{done}\isamarkupfalse%
-\isanewline
-\isacommand{qed}\isamarkupfalse%
-%
-\endisatagproof
-{\isafoldproof}%
-%
-\isadelimproof
-%
-\endisadelimproof
-%
-\begin{isamarkuptext}%
-\noindent You may need to resort to this technique if an
-automatic step fails to prove the desired proposition.
-
-When converting a proof from \isa{apply}-style into Isar you can proceed
-in a top-down manner: parts of the proof can be left in script form
-while the outer structure is already expressed in Isar.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-%
-\isatagtheory
-%
-\endisatagtheory
-{\isafoldtheory}%
-%
-\isadelimtheory
-%
-\endisadelimtheory
-\end{isabellebody}%
-%%% Local Variables:
-%%% mode: latex
-%%% TeX-master: "root"
-%%% End:
--- a/doc-src/IsarOverview/Isar/document/Makefile	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,26 +0,0 @@
-
-## targets
-
-default: dvi
-
-## dependencies
-
-include ../../../Makefile.in
-
-dvi: ../../isar-overview.dvi
-
-../../isar-overview.dvi: *.tex *.bib
-	$(LATEX) root
-	$(BIBTEX) root
-	$(LATEX) root
-	$(LATEX) root
-	mv root.dvi ../../isar-overview.dvi
-
-pdf: ../../isar-overview.pdf
-
-../../isar-overview.pdf: *.tex *.bib
-	$(PDFLATEX) root
-	$(BIBTEX) root
-	$(PDFLATEX) root
-	$(PDFLATEX) root
-	mv root.pdf ../../isar-overview.pdf
\ No newline at end of file
--- a/doc-src/IsarOverview/Isar/document/intro.tex	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,127 +0,0 @@
-\section{Introduction}
-
-This is a tutorial introduction to structured proofs in Isabelle/HOL.
-It introduces the core of the proof language Isar by example. Isar is
-an extension of the \isa{apply}-style proofs introduced in the
-Isabelle/HOL tutorial~\cite{LNCS2283} with structured proofs in a
-stylised language of mathematics.  These proofs are readable for both
-human and machine.
-
-\subsection{A first glimpse of Isar}
-Below you find a simplified grammar for Isar proofs.
-Parentheses are used for grouping and $^?$ indicates an optional item:
-\begin{center}
-\begin{tabular}{lrl}
-\emph{proof} & ::= & \isakeyword{proof} \emph{method}$^?$
-                     \emph{statement}*
-                     \isakeyword{qed} \\
-                 &$\mid$& \isakeyword{by} \emph{method}\\[1ex]
-\emph{statement} &::= & \isakeyword{fix} \emph{variables} \\
-             &$\mid$& \isakeyword{assume} \emph{propositions} \\
-             &$\mid$& (\isakeyword{from} \emph{fact}*)$^?$ 
-                    (\isakeyword{show} $\mid$ \isakeyword{have})
-                      \emph{propositions} \emph{proof} \\[1ex]
-\emph{proposition} &::=& (\emph{label}{\bf:})$^?$ \emph{string} \\[1ex]
-\emph{fact} &::=& \emph{label}
-\end{tabular}
-\end{center}
-A proof can be either compound (\isakeyword{proof} --
-\isakeyword{qed}) or atomic (\isakeyword{by}). A \emph{method} is a
-proof method.
-
-This is a typical proof skeleton:
-\begin{center}
-\begin{tabular}{@{}ll}
-\isakeyword{proof}\\
-\hspace*{3ex}\isakeyword{assume} \isa{"}\emph{the-assm}\isa{"}\\
-\hspace*{3ex}\isakeyword{have} \isa{"}\dots\isa{"} & --- intermediate result\\
-\hspace*{3ex}\vdots\\
-\hspace*{3ex}\isakeyword{have} \isa{"}\dots\isa{"} & --- intermediate result\\
-\hspace*{3ex}\isakeyword{show} \isa{"}\emph{the-concl}\isa{"}\\
-\isakeyword{qed}
-\end{tabular}
-\end{center}
-It proves \emph{the-assm}~$\Longrightarrow$~\emph{the-concl}. Text starting with
-``---'' is a comment. The intermediate \isakeyword{have}s are only
-there to bridge the gap between the assumption and the conclusion and
-do not contribute to the theorem being proved. In contrast,
-\isakeyword{show} establishes the conclusion of the theorem.
-
-\subsection{Background}
-
-Interactive theorem proving has been dominated by a model of proof
-that goes back to the LCF system~\cite{LCF}: a proof is a more or less
-structured sequence of commands that manipulate an implicit proof
-state. Thus the proof text is only suitable for the machine; for a
-human, the proof only comes alive when he can see the state changes
-caused by the stepwise execution of the commands. Such proofs are like
-uncommented assembly language programs. Their Isabelle incarnation are
-sequences of \isa{apply}-commands.
-
-In contrast there is the model of a mathematics-like proof language
-pioneered in the Mizar system~\cite{Rudnicki92} and followed by
-Isar~\cite{WenzelW-JAR}.
-The most important arguments in favour of this style are
-\emph{communication} and \emph{maintainance}: structured proofs are
-immensly more readable and maintainable than \isa{apply}-scripts.
-
-For reading this tutorial you should be familiar with natural
-deduction and the basics of Isabelle/HOL~\cite{LNCS2283} although we
-summarize the most important aspects of Isabelle below.  The
-definitive Isar reference is its manual~\cite{Isar-Ref-Man}. For an
-example-based account of Isar's support for reasoning by chains of
-(in)equations see~\cite{BauerW-TPHOLs01}.
-
-
-\subsection{Bits of Isabelle}
-
-Isabelle's meta-logic comes with a type of \emph{propositions} with
-implication $\Longrightarrow$ and a universal quantifier $\bigwedge$ for expressing
-inference rules and generality.  Iterated implications $A_1 \Longrightarrow \dots
-A_n \Longrightarrow A$ may be abbreviated to $[\![ A_1; \dots; A_n ]\!] \Longrightarrow A$.
-Applying a theorem $A \Longrightarrow B$ (named \isa{T}) to a theorem $A$ (named
-\isa{U}) is written \isa{T[OF U]} and yields theorem $B$.
-
-Isabelle terms are simply typed. Function types are
-written $\tau_1 \Rightarrow \tau_2$.
-
-Free variables that may be instantiated (``logical variables'' in Prolog
-parlance) are prefixed with a \isa{?}. Typically, theorems are stated with
-ordinary free variables but after the proof those are automatically replaced
-by \isa{?}-variables. Thus the theorem can be used with arbitrary instances
-of its free variables.
-
-Isabelle/HOL offers all the usual logical symbols like $\longrightarrow$, $\land$,
-$\forall$ etc. HOL formulae are propositions, e.g.\ $\forall$ can appear below
-$\Longrightarrow$, but not the other way around. Beware that $\longrightarrow$ binds more
-tightly than $\Longrightarrow$: in $\forall x. P \longrightarrow Q$ the $\forall x$ covers $P \longrightarrow Q$, whereas
-in $\forall x. P \Longrightarrow Q$ it covers only $P$.
-
-Proof methods include \isa{rule} (which performs a backwards
-step with a given rule, unifying the conclusion of the rule with the
-current subgoal and replacing the subgoal by the premises of the
-rule), \isa{simp} (for simplification) and \isa{blast} (for predicate
-calculus reasoning).
-
-\subsection{Advice}
-
-A word of warning for potential writers of Isar proofs.  It
-is easier to write obscure rather than readable texts.  Similarly,
-\isa{apply}-style proofs are sometimes easier to write than readable
-ones: structure does not emerge automatically but needs to be
-understood and imposed. If the precise structure of the proof is
-unclear at beginning, it can be useful to start with \isa{apply} for
-exploratory purposes until one has found a proof which can be
-converted into a structured text in a second step. Top down conversion
-is possible because Isar allows \isa{apply}-style proofs as components
-of structured ones.
-
-Finally, do not be misled by the simplicity of the formulae being proved,
-especially in the beginning. Isar has been used very successfully in
-large applications, for example the formalisation of Java
-dialects~\cite{KleinN-TOPLAS}.
-\medskip
-
-The rest of this tutorial is divided into two parts.
-Section~\ref{sec:Logic} introduces proofs in pure logic based on
-natural deduction. Section~\ref{sec:Induct} is dedicated to induction.
--- a/doc-src/IsarOverview/Isar/document/llncs.cls	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1189 +0,0 @@
-% LLNCS DOCUMENT CLASS -- version 2.13 (28-Jan-2002)
-% Springer Verlag LaTeX2e support for Lecture Notes in Computer Science
-%
-%%
-%% \CharacterTable
-%%  {Upper-case    \A\B\C\D\E\F\G\H\I\J\K\L\M\N\O\P\Q\R\S\T\U\V\W\X\Y\Z
-%%   Lower-case    \a\b\c\d\e\f\g\h\i\j\k\l\m\n\o\p\q\r\s\t\u\v\w\x\y\z
-%%   Digits        \0\1\2\3\4\5\6\7\8\9
-%%   Exclamation   \!     Double quote  \"     Hash (number) \#
-%%   Dollar        \$     Percent       \%     Ampersand     \&
-%%   Acute accent  \'     Left paren    \(     Right paren   \)
-%%   Asterisk      \*     Plus          \+     Comma         \,
-%%   Minus         \-     Point         \.     Solidus       \/
-%%   Colon         \:     Semicolon     \;     Less than     \<
-%%   Equals        \=     Greater than  \>     Question mark \?
-%%   Commercial at \@     Left bracket  \[     Backslash     \\
-%%   Right bracket \]     Circumflex    \^     Underscore    \_
-%%   Grave accent  \`     Left brace    \{     Vertical bar  \|
-%%   Right brace   \}     Tilde         \~}
-%%
-\NeedsTeXFormat{LaTeX2e}[1995/12/01]
-\ProvidesClass{llncs}[2002/01/28 v2.13
-^^J LaTeX document class for Lecture Notes in Computer Science]
-% Options
-\let\if@envcntreset\iffalse
-\DeclareOption{envcountreset}{\let\if@envcntreset\iftrue}
-\DeclareOption{citeauthoryear}{\let\citeauthoryear=Y}
-\DeclareOption{oribibl}{\let\oribibl=Y}
-\let\if@custvec\iftrue
-\DeclareOption{orivec}{\let\if@custvec\iffalse}
-\let\if@envcntsame\iffalse
-\DeclareOption{envcountsame}{\let\if@envcntsame\iftrue}
-\let\if@envcntsect\iffalse
-\DeclareOption{envcountsect}{\let\if@envcntsect\iftrue}
-\let\if@runhead\iffalse
-\DeclareOption{runningheads}{\let\if@runhead\iftrue}
-
-\let\if@openbib\iffalse
-\DeclareOption{openbib}{\let\if@openbib\iftrue}
-
-% languages
-\let\switcht@@therlang\relax
-\def\ds@deutsch{\def\switcht@@therlang{\switcht@deutsch}}
-\def\ds@francais{\def\switcht@@therlang{\switcht@francais}}
-
-\DeclareOption*{\PassOptionsToClass{\CurrentOption}{article}}
-
-\ProcessOptions
-
-\LoadClass[twoside]{article}
-\RequirePackage{multicol} % needed for the list of participants, index
-
-\setlength{\textwidth}{12.2cm}
-\setlength{\textheight}{19.3cm}
-\renewcommand\@pnumwidth{2em}
-\renewcommand\@tocrmarg{3.5em}
-%
-\def\@dottedtocline#1#2#3#4#5{%
-  \ifnum #1>\c@tocdepth \else
-    \vskip \z@ \@plus.2\p@
-    {\leftskip #2\relax \rightskip \@tocrmarg \advance\rightskip by 0pt plus 2cm
-               \parfillskip -\rightskip \pretolerance=10000
-     \parindent #2\relax\@afterindenttrue
-     \interlinepenalty\@M
-     \leavevmode
-     \@tempdima #3\relax
-     \advance\leftskip \@tempdima \null\nobreak\hskip -\leftskip
-     {#4}\nobreak
-     \leaders\hbox{$\m@th
-        \mkern \@dotsep mu\hbox{.}\mkern \@dotsep
-        mu$}\hfill
-     \nobreak
-     \hb@xt@\@pnumwidth{\hfil\normalfont \normalcolor #5}%
-     \par}%
-  \fi}
-%
-\def\switcht@albion{%
-\def\abstractname{Abstract.}
-\def\ackname{Acknowledgement.}
-\def\andname{and}
-\def\lastandname{\unskip, and}
-\def\appendixname{Appendix}
-\def\chaptername{Chapter}
-\def\claimname{Claim}
-\def\conjecturename{Conjecture}
-\def\contentsname{Table of Contents}
-\def\corollaryname{Corollary}
-\def\definitionname{Definition}
-\def\examplename{Example}
-\def\exercisename{Exercise}
-\def\figurename{Fig.}
-\def\keywordname{{\bf Key words:}}
-\def\indexname{Index}
-\def\lemmaname{Lemma}
-\def\contriblistname{List of Contributors}
-\def\listfigurename{List of Figures}
-\def\listtablename{List of Tables}
-\def\mailname{{\it Correspondence to\/}:}
-\def\noteaddname{Note added in proof}
-\def\notename{Note}
-\def\partname{Part}
-\def\problemname{Problem}
-\def\proofname{Proof}
-\def\propertyname{Property}
-\def\propositionname{Proposition}
-\def\questionname{Question}
-\def\remarkname{Remark}
-\def\seename{see}
-\def\solutionname{Solution}
-\def\subclassname{{\it Subject Classifications\/}:}
-\def\tablename{Table}
-\def\theoremname{Theorem}}
-\switcht@albion
-% Names of theorem like environments are already defined
-% but must be translated if another language is chosen
-%
-% French section
-\def\switcht@francais{%\typeout{On parle francais.}%
- \def\abstractname{R\'esum\'e.}%
- \def\ackname{Remerciements.}%
- \def\andname{et}%
- \def\lastandname{ et}%
- \def\appendixname{Appendice}
- \def\chaptername{Chapitre}%
- \def\claimname{Pr\'etention}%
- \def\conjecturename{Hypoth\`ese}%
- \def\contentsname{Table des mati\`eres}%
- \def\corollaryname{Corollaire}%
- \def\definitionname{D\'efinition}%
- \def\examplename{Exemple}%
- \def\exercisename{Exercice}%
- \def\figurename{Fig.}%
- \def\keywordname{{\bf Mots-cl\'e:}}
- \def\indexname{Index}
- \def\lemmaname{Lemme}%
- \def\contriblistname{Liste des contributeurs}
- \def\listfigurename{Liste des figures}%
- \def\listtablename{Liste des tables}%
- \def\mailname{{\it Correspondence to\/}:}
- \def\noteaddname{Note ajout\'ee \`a l'\'epreuve}%
- \def\notename{Remarque}%
- \def\partname{Partie}%
- \def\problemname{Probl\`eme}%
- \def\proofname{Preuve}%
- \def\propertyname{Caract\'eristique}%
-%\def\propositionname{Proposition}%
- \def\questionname{Question}%
- \def\remarkname{Remarque}%
- \def\seename{voir}
- \def\solutionname{Solution}%
- \def\subclassname{{\it Subject Classifications\/}:}
- \def\tablename{Tableau}%
- \def\theoremname{Th\'eor\`eme}%
-}
-%
-% German section
-\def\switcht@deutsch{%\typeout{Man spricht deutsch.}%
- \def\abstractname{Zusammenfassung.}%
- \def\ackname{Danksagung.}%
- \def\andname{und}%
- \def\lastandname{ und}%
- \def\appendixname{Anhang}%
- \def\chaptername{Kapitel}%
- \def\claimname{Behauptung}%
- \def\conjecturename{Hypothese}%
- \def\contentsname{Inhaltsverzeichnis}%
- \def\corollaryname{Korollar}%
-%\def\definitionname{Definition}%
- \def\examplename{Beispiel}%
- \def\exercisename{\"Ubung}%
- \def\figurename{Abb.}%
- \def\keywordname{{\bf Schl\"usselw\"orter:}}
- \def\indexname{Index}
-%\def\lemmaname{Lemma}%
- \def\contriblistname{Mitarbeiter}
- \def\listfigurename{Abbildungsverzeichnis}%
- \def\listtablename{Tabellenverzeichnis}%
- \def\mailname{{\it Correspondence to\/}:}
- \def\noteaddname{Nachtrag}%
- \def\notename{Anmerkung}%
- \def\partname{Teil}%
-%\def\problemname{Problem}%
- \def\proofname{Beweis}%
- \def\propertyname{Eigenschaft}%
-%\def\propositionname{Proposition}%
- \def\questionname{Frage}%
- \def\remarkname{Anmerkung}%
- \def\seename{siehe}
- \def\solutionname{L\"osung}%
- \def\subclassname{{\it Subject Classifications\/}:}
- \def\tablename{Tabelle}%
-%\def\theoremname{Theorem}%
-}
-
-% Ragged bottom for the actual page
-\def\thisbottomragged{\def\@textbottom{\vskip\z@ plus.0001fil
-\global\let\@textbottom\relax}}
-
-\renewcommand\small{%
-   \@setfontsize\small\@ixpt{11}%
-   \abovedisplayskip 8.5\p@ \@plus3\p@ \@minus4\p@
-   \abovedisplayshortskip \z@ \@plus2\p@
-   \belowdisplayshortskip 4\p@ \@plus2\p@ \@minus2\p@
-   \def\@listi{\leftmargin\leftmargini
-               \parsep 0\p@ \@plus1\p@ \@minus\p@
-               \topsep 8\p@ \@plus2\p@ \@minus4\p@
-               \itemsep0\p@}%
-   \belowdisplayskip \abovedisplayskip
-}
-
-\frenchspacing
-\widowpenalty=10000
-\clubpenalty=10000
-
-\setlength\oddsidemargin   {63\p@}
-\setlength\evensidemargin  {63\p@}
-\setlength\marginparwidth  {90\p@}
-
-\setlength\headsep   {16\p@}
-
-\setlength\footnotesep{7.7\p@}
-\setlength\textfloatsep{8mm\@plus 2\p@ \@minus 4\p@}
-\setlength\intextsep   {8mm\@plus 2\p@ \@minus 2\p@}
-
-\setcounter{secnumdepth}{2}
-
-\newcounter {chapter}
-\renewcommand\thechapter      {\@arabic\c@chapter}
-
-\newif\if@mainmatter \@mainmattertrue
-\newcommand\frontmatter{\cleardoublepage
-            \@mainmatterfalse\pagenumbering{Roman}}
-\newcommand\mainmatter{\cleardoublepage
-       \@mainmattertrue\pagenumbering{arabic}}
-\newcommand\backmatter{\if@openright\cleardoublepage\else\clearpage\fi
-      \@mainmatterfalse}
-
-\renewcommand\part{\cleardoublepage
-                 \thispagestyle{empty}%
-                 \if@twocolumn
-                     \onecolumn
-                     \@tempswatrue
-                   \else
-                     \@tempswafalse
-                 \fi
-                 \null\vfil
-                 \secdef\@part\@spart}
-
-\def\@part[#1]#2{%
-    \ifnum \c@secnumdepth >-2\relax
-      \refstepcounter{part}%
-      \addcontentsline{toc}{part}{\thepart\hspace{1em}#1}%
-    \else
-      \addcontentsline{toc}{part}{#1}%
-    \fi
-    \markboth{}{}%
-    {\centering
-     \interlinepenalty \@M
-     \normalfont
-     \ifnum \c@secnumdepth >-2\relax
-       \huge\bfseries \partname~\thepart
-       \par
-       \vskip 20\p@
-     \fi
-     \Huge \bfseries #2\par}%
-    \@endpart}
-\def\@spart#1{%
-    {\centering
-     \interlinepenalty \@M
-     \normalfont
-     \Huge \bfseries #1\par}%
-    \@endpart}
-\def\@endpart{\vfil\newpage
-              \if@twoside
-                \null
-                \thispagestyle{empty}%
-                \newpage
-              \fi
-              \if@tempswa
-                \twocolumn
-              \fi}
-
-\newcommand\chapter{\clearpage
-                    \thispagestyle{empty}%
-                    \global\@topnum\z@
-                    \@afterindentfalse
-                    \secdef\@chapter\@schapter}
-\def\@chapter[#1]#2{\ifnum \c@secnumdepth >\m@ne
-                       \if@mainmatter
-                         \refstepcounter{chapter}%
-                         \typeout{\@chapapp\space\thechapter.}%
-                         \addcontentsline{toc}{chapter}%
-                                  {\protect\numberline{\thechapter}#1}%
-                       \else
-                         \addcontentsline{toc}{chapter}{#1}%
-                       \fi
-                    \else
-                      \addcontentsline{toc}{chapter}{#1}%
-                    \fi
-                    \chaptermark{#1}%
-                    \addtocontents{lof}{\protect\addvspace{10\p@}}%
-                    \addtocontents{lot}{\protect\addvspace{10\p@}}%
-                    \if@twocolumn
-                      \@topnewpage[\@makechapterhead{#2}]%
-                    \else
-                      \@makechapterhead{#2}%
-                      \@afterheading
-                    \fi}
-\def\@makechapterhead#1{%
-% \vspace*{50\p@}%
-  {\centering
-    \ifnum \c@secnumdepth >\m@ne
-      \if@mainmatter
-        \large\bfseries \@chapapp{} \thechapter
-        \par\nobreak
-        \vskip 20\p@
-      \fi
-    \fi
-    \interlinepenalty\@M
-    \Large \bfseries #1\par\nobreak
-    \vskip 40\p@
-  }}
-\def\@schapter#1{\if@twocolumn
-                   \@topnewpage[\@makeschapterhead{#1}]%
-                 \else
-                   \@makeschapterhead{#1}%
-                   \@afterheading
-                 \fi}
-\def\@makeschapterhead#1{%
-% \vspace*{50\p@}%
-  {\centering
-    \normalfont
-    \interlinepenalty\@M
-    \Large \bfseries  #1\par\nobreak
-    \vskip 40\p@
-  }}
-
-\renewcommand\section{\@startsection{section}{1}{\z@}%
-                       {-18\p@ \@plus -4\p@ \@minus -4\p@}%
-                       {12\p@ \@plus 4\p@ \@minus 4\p@}%
-                       {\normalfont\large\bfseries\boldmath
-                        \rightskip=\z@ \@plus 8em\pretolerance=10000 }}
-\renewcommand\subsection{\@startsection{subsection}{2}{\z@}%
-                       {-18\p@ \@plus -4\p@ \@minus -4\p@}%
-                       {8\p@ \@plus 4\p@ \@minus 4\p@}%
-                       {\normalfont\normalsize\bfseries\boldmath
-                        \rightskip=\z@ \@plus 8em\pretolerance=10000 }}
-\renewcommand\subsubsection{\@startsection{subsubsection}{3}{\z@}%
-                       {-18\p@ \@plus -4\p@ \@minus -4\p@}%
-                       {-0.5em \@plus -0.22em \@minus -0.1em}%
-                       {\normalfont\normalsize\bfseries\boldmath}}
-\renewcommand\paragraph{\@startsection{paragraph}{4}{\z@}%
-                       {-12\p@ \@plus -4\p@ \@minus -4\p@}%
-                       {-0.5em \@plus -0.22em \@minus -0.1em}%
-                       {\normalfont\normalsize\itshape}}
-\renewcommand\subparagraph[1]{\typeout{LLNCS warning: You should not use
-                  \string\subparagraph\space with this class}\vskip0.5cm
-You should not use \verb|\subparagraph| with this class.\vskip0.5cm}
-
-\DeclareMathSymbol{\Gamma}{\mathalpha}{letters}{"00}
-\DeclareMathSymbol{\Delta}{\mathalpha}{letters}{"01}
-\DeclareMathSymbol{\Theta}{\mathalpha}{letters}{"02}
-\DeclareMathSymbol{\Lambda}{\mathalpha}{letters}{"03}
-\DeclareMathSymbol{\Xi}{\mathalpha}{letters}{"04}
-\DeclareMathSymbol{\Pi}{\mathalpha}{letters}{"05}
-\DeclareMathSymbol{\Sigma}{\mathalpha}{letters}{"06}
-\DeclareMathSymbol{\Upsilon}{\mathalpha}{letters}{"07}
-\DeclareMathSymbol{\Phi}{\mathalpha}{letters}{"08}
-\DeclareMathSymbol{\Psi}{\mathalpha}{letters}{"09}
-\DeclareMathSymbol{\Omega}{\mathalpha}{letters}{"0A}
-
-\let\footnotesize\small
-
-\if@custvec
-\def\vec#1{\mathchoice{\mbox{\boldmath$\displaystyle#1$}}
-{\mbox{\boldmath$\textstyle#1$}}
-{\mbox{\boldmath$\scriptstyle#1$}}
-{\mbox{\boldmath$\scriptscriptstyle#1$}}}
-\fi
-
-\def\squareforqed{\hbox{\rlap{$\sqcap$}$\sqcup$}}
-\def\qed{\ifmmode\squareforqed\else{\unskip\nobreak\hfil
-\penalty50\hskip1em\null\nobreak\hfil\squareforqed
-\parfillskip=0pt\finalhyphendemerits=0\endgraf}\fi}
-
-\def\getsto{\mathrel{\mathchoice {\vcenter{\offinterlineskip
-\halign{\hfil
-$\displaystyle##$\hfil\cr\gets\cr\to\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr\gets
-\cr\to\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr\gets
-\cr\to\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
-\gets\cr\to\cr}}}}}
-\def\lid{\mathrel{\mathchoice {\vcenter{\offinterlineskip\halign{\hfil
-$\displaystyle##$\hfil\cr<\cr\noalign{\vskip1.2pt}=\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr<\cr
-\noalign{\vskip1.2pt}=\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr<\cr
-\noalign{\vskip1pt}=\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
-<\cr
-\noalign{\vskip0.9pt}=\cr}}}}}
-\def\gid{\mathrel{\mathchoice {\vcenter{\offinterlineskip\halign{\hfil
-$\displaystyle##$\hfil\cr>\cr\noalign{\vskip1.2pt}=\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr>\cr
-\noalign{\vskip1.2pt}=\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr>\cr
-\noalign{\vskip1pt}=\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
->\cr
-\noalign{\vskip0.9pt}=\cr}}}}}
-\def\grole{\mathrel{\mathchoice {\vcenter{\offinterlineskip
-\halign{\hfil
-$\displaystyle##$\hfil\cr>\cr\noalign{\vskip-1pt}<\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr
->\cr\noalign{\vskip-1pt}<\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr
->\cr\noalign{\vskip-0.8pt}<\cr}}}
-{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr
->\cr\noalign{\vskip-0.3pt}<\cr}}}}}
-\def\bbbr{{\rm I\!R}} %reelle Zahlen
-\def\bbbm{{\rm I\!M}}
-\def\bbbn{{\rm I\!N}} %natuerliche Zahlen
-\def\bbbf{{\rm I\!F}}
-\def\bbbh{{\rm I\!H}}
-\def\bbbk{{\rm I\!K}}
-\def\bbbp{{\rm I\!P}}
-\def\bbbone{{\mathchoice {\rm 1\mskip-4mu l} {\rm 1\mskip-4mu l}
-{\rm 1\mskip-4.5mu l} {\rm 1\mskip-5mu l}}}
-\def\bbbc{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm C$}\hbox{\hbox
-to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}
-{\setbox0=\hbox{$\textstyle\rm C$}\hbox{\hbox
-to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptstyle\rm C$}\hbox{\hbox
-to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptscriptstyle\rm C$}\hbox{\hbox
-to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}}}
-\def\bbbq{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm
-Q$}\hbox{\raise
-0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.8\ht0\hss}\box0}}
-{\setbox0=\hbox{$\textstyle\rm Q$}\hbox{\raise
-0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.8\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptstyle\rm Q$}\hbox{\raise
-0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.7\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptscriptstyle\rm Q$}\hbox{\raise
-0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.7\ht0\hss}\box0}}}}
-\def\bbbt{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm
-T$}\hbox{\hbox to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}
-{\setbox0=\hbox{$\textstyle\rm T$}\hbox{\hbox
-to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptstyle\rm T$}\hbox{\hbox
-to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptscriptstyle\rm T$}\hbox{\hbox
-to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}}}
-\def\bbbs{{\mathchoice
-{\setbox0=\hbox{$\displaystyle     \rm S$}\hbox{\raise0.5\ht0\hbox
-to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\hbox
-to0pt{\kern0.55\wd0\vrule height0.5\ht0\hss}\box0}}
-{\setbox0=\hbox{$\textstyle        \rm S$}\hbox{\raise0.5\ht0\hbox
-to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\hbox
-to0pt{\kern0.55\wd0\vrule height0.5\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptstyle      \rm S$}\hbox{\raise0.5\ht0\hbox
-to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\raise0.05\ht0\hbox
-to0pt{\kern0.5\wd0\vrule height0.45\ht0\hss}\box0}}
-{\setbox0=\hbox{$\scriptscriptstyle\rm S$}\hbox{\raise0.5\ht0\hbox
-to0pt{\kern0.4\wd0\vrule height0.45\ht0\hss}\raise0.05\ht0\hbox
-to0pt{\kern0.55\wd0\vrule height0.45\ht0\hss}\box0}}}}
-\def\bbbz{{\mathchoice {\hbox{$\mathsf\textstyle Z\kern-0.4em Z$}}
-{\hbox{$\mathsf\textstyle Z\kern-0.4em Z$}}
-{\hbox{$\mathsf\scriptstyle Z\kern-0.3em Z$}}
-{\hbox{$\mathsf\scriptscriptstyle Z\kern-0.2em Z$}}}}
-
-\let\ts\,
-
-\setlength\leftmargini  {17\p@}
-\setlength\leftmargin    {\leftmargini}
-\setlength\leftmarginii  {\leftmargini}
-\setlength\leftmarginiii {\leftmargini}
-\setlength\leftmarginiv  {\leftmargini}
-\setlength  \labelsep  {.5em}
-\setlength  \labelwidth{\leftmargini}
-\addtolength\labelwidth{-\labelsep}
-
-\def\@listI{\leftmargin\leftmargini
-            \parsep 0\p@ \@plus1\p@ \@minus\p@
-            \topsep 8\p@ \@plus2\p@ \@minus4\p@
-            \itemsep0\p@}
-\let\@listi\@listI
-\@listi
-\def\@listii {\leftmargin\leftmarginii
-              \labelwidth\leftmarginii
-              \advance\labelwidth-\labelsep
-              \topsep    0\p@ \@plus2\p@ \@minus\p@}
-\def\@listiii{\leftmargin\leftmarginiii
-              \labelwidth\leftmarginiii
-              \advance\labelwidth-\labelsep
-              \topsep    0\p@ \@plus\p@\@minus\p@
-              \parsep    \z@
-              \partopsep \p@ \@plus\z@ \@minus\p@}
-
-\renewcommand\labelitemi{\normalfont\bfseries --}
-\renewcommand\labelitemii{$\m@th\bullet$}
-
-\setlength\arraycolsep{1.4\p@}
-\setlength\tabcolsep{1.4\p@}
-
-\def\tableofcontents{\chapter*{\contentsname\@mkboth{{\contentsname}}%
-                                                    {{\contentsname}}}
- \def\authcount##1{\setcounter{auco}{##1}\setcounter{@auth}{1}}
- \def\lastand{\ifnum\value{auco}=2\relax
-                 \unskip{} \andname\
-              \else
-                 \unskip \lastandname\
-              \fi}%
- \def\and{\stepcounter{@auth}\relax
-          \ifnum\value{@auth}=\value{auco}%
-             \lastand
-          \else
-             \unskip,
-          \fi}%
- \@starttoc{toc}\if@restonecol\twocolumn\fi}
-
-\def\l@part#1#2{\addpenalty{\@secpenalty}%
-   \addvspace{2em plus\p@}%  % space above part line
-   \begingroup
-     \parindent \z@
-     \rightskip \z@ plus 5em
-     \hrule\vskip5pt
-     \large               % same size as for a contribution heading
-     \bfseries\boldmath   % set line in boldface
-     \leavevmode          % TeX command to enter horizontal mode.
-     #1\par
-     \vskip5pt
-     \hrule
-     \vskip1pt
-     \nobreak             % Never break after part entry
-   \endgroup}
-
-\def\@dotsep{2}
-
-\def\hyperhrefextend{\ifx\hyper@anchor\@undefined\else
-{chapter.\thechapter}\fi}
-
-\def\addnumcontentsmark#1#2#3{%
-\addtocontents{#1}{\protect\contentsline{#2}{\protect\numberline
-                     {\thechapter}#3}{\thepage}\hyperhrefextend}}
-\def\addcontentsmark#1#2#3{%
-\addtocontents{#1}{\protect\contentsline{#2}{#3}{\thepage}\hyperhrefextend}}
-\def\addcontentsmarkwop#1#2#3{%
-\addtocontents{#1}{\protect\contentsline{#2}{#3}{0}\hyperhrefextend}}
-
-\def\@adcmk[#1]{\ifcase #1 \or
-\def\@gtempa{\addnumcontentsmark}%
-  \or    \def\@gtempa{\addcontentsmark}%
-  \or    \def\@gtempa{\addcontentsmarkwop}%
-  \fi\@gtempa{toc}{chapter}}
-\def\addtocmark{\@ifnextchar[{\@adcmk}{\@adcmk[3]}}
-
-\def\l@chapter#1#2{\addpenalty{-\@highpenalty}
- \vskip 1.0em plus 1pt \@tempdima 1.5em \begingroup
- \parindent \z@ \rightskip \@tocrmarg
- \advance\rightskip by 0pt plus 2cm
- \parfillskip -\rightskip \pretolerance=10000
- \leavevmode \advance\leftskip\@tempdima \hskip -\leftskip
- {\large\bfseries\boldmath#1}\ifx0#2\hfil\null
- \else
-      \nobreak
-      \leaders\hbox{$\m@th \mkern \@dotsep mu.\mkern
-      \@dotsep mu$}\hfill
-      \nobreak\hbox to\@pnumwidth{\hss #2}%
- \fi\par
- \penalty\@highpenalty \endgroup}
-
-\def\l@title#1#2{\addpenalty{-\@highpenalty}
- \addvspace{8pt plus 1pt}
- \@tempdima \z@
- \begingroup
- \parindent \z@ \rightskip \@tocrmarg
- \advance\rightskip by 0pt plus 2cm
- \parfillskip -\rightskip \pretolerance=10000
- \leavevmode \advance\leftskip\@tempdima \hskip -\leftskip
- #1\nobreak
- \leaders\hbox{$\m@th \mkern \@dotsep mu.\mkern
- \@dotsep mu$}\hfill
- \nobreak\hbox to\@pnumwidth{\hss #2}\par
- \penalty\@highpenalty \endgroup}
-
-\def\l@author#1#2{\addpenalty{\@highpenalty}
- \@tempdima=\z@ %15\p@
- \begingroup
- \parindent \z@ \rightskip \@tocrmarg
- \advance\rightskip by 0pt plus 2cm
- \pretolerance=10000
- \leavevmode \advance\leftskip\@tempdima %\hskip -\leftskip
- \textit{#1}\par
- \penalty\@highpenalty \endgroup}
-
-\setcounter{tocdepth}{0}
-\newdimen\tocchpnum
-\newdimen\tocsecnum
-\newdimen\tocsectotal
-\newdimen\tocsubsecnum
-\newdimen\tocsubsectotal
-\newdimen\tocsubsubsecnum
-\newdimen\tocsubsubsectotal
-\newdimen\tocparanum
-\newdimen\tocparatotal
-\newdimen\tocsubparanum
-\tocchpnum=\z@            % no chapter numbers
-\tocsecnum=15\p@          % section 88. plus 2.222pt
-\tocsubsecnum=23\p@       % subsection 88.8 plus 2.222pt
-\tocsubsubsecnum=27\p@    % subsubsection 88.8.8 plus 1.444pt
-\tocparanum=35\p@         % paragraph 88.8.8.8 plus 1.666pt
-\tocsubparanum=43\p@      % subparagraph 88.8.8.8.8 plus 1.888pt
-\def\calctocindent{%
-\tocsectotal=\tocchpnum
-\advance\tocsectotal by\tocsecnum
-\tocsubsectotal=\tocsectotal
-\advance\tocsubsectotal by\tocsubsecnum
-\tocsubsubsectotal=\tocsubsectotal
-\advance\tocsubsubsectotal by\tocsubsubsecnum
-\tocparatotal=\tocsubsubsectotal
-\advance\tocparatotal by\tocparanum}
-\calctocindent
-
-\def\l@section{\@dottedtocline{1}{\tocchpnum}{\tocsecnum}}
-\def\l@subsection{\@dottedtocline{2}{\tocsectotal}{\tocsubsecnum}}
-\def\l@subsubsection{\@dottedtocline{3}{\tocsubsectotal}{\tocsubsubsecnum}}
-\def\l@paragraph{\@dottedtocline{4}{\tocsubsubsectotal}{\tocparanum}}
-\def\l@subparagraph{\@dottedtocline{5}{\tocparatotal}{\tocsubparanum}}
-
-\def\listoffigures{\@restonecolfalse\if@twocolumn\@restonecoltrue\onecolumn
- \fi\section*{\listfigurename\@mkboth{{\listfigurename}}{{\listfigurename}}}
- \@starttoc{lof}\if@restonecol\twocolumn\fi}
-\def\l@figure{\@dottedtocline{1}{0em}{1.5em}}
-
-\def\listoftables{\@restonecolfalse\if@twocolumn\@restonecoltrue\onecolumn
- \fi\section*{\listtablename\@mkboth{{\listtablename}}{{\listtablename}}}
- \@starttoc{lot}\if@restonecol\twocolumn\fi}
-\let\l@table\l@figure
-
-\renewcommand\listoffigures{%
-    \section*{\listfigurename
-      \@mkboth{\listfigurename}{\listfigurename}}%
-    \@starttoc{lof}%
-    }
-
-\renewcommand\listoftables{%
-    \section*{\listtablename
-      \@mkboth{\listtablename}{\listtablename}}%
-    \@starttoc{lot}%
-    }
-
-\ifx\oribibl\undefined
-\ifx\citeauthoryear\undefined
-\renewenvironment{thebibliography}[1]
-     {\section*{\refname}
-      \def\@biblabel##1{##1.}
-      \small
-      \list{\@biblabel{\@arabic\c@enumiv}}%
-           {\settowidth\labelwidth{\@biblabel{#1}}%
-            \leftmargin\labelwidth
-            \advance\leftmargin\labelsep
-            \if@openbib
-              \advance\leftmargin\bibindent
-              \itemindent -\bibindent
-              \listparindent \itemindent
-              \parsep \z@
-            \fi
-            \usecounter{enumiv}%
-            \let\p@enumiv\@empty
-            \renewcommand\theenumiv{\@arabic\c@enumiv}}%
-      \if@openbib
-        \renewcommand\newblock{\par}%
-      \else
-        \renewcommand\newblock{\hskip .11em \@plus.33em \@minus.07em}%
-      \fi
-      \sloppy\clubpenalty4000\widowpenalty4000%
-      \sfcode`\.=\@m}
-     {\def\@noitemerr
-       {\@latex@warning{Empty `thebibliography' environment}}%
-      \endlist}
-\def\@lbibitem[#1]#2{\item[{[#1]}\hfill]\if@filesw
-     {\let\protect\noexpand\immediate
-     \write\@auxout{\string\bibcite{#2}{#1}}}\fi\ignorespaces}
-\newcount\@tempcntc
-\def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi
-  \@tempcnta\z@\@tempcntb\m@ne\def\@citea{}\@cite{\@for\@citeb:=#2\do
-    {\@ifundefined
-       {b@\@citeb}{\@citeo\@tempcntb\m@ne\@citea\def\@citea{,}{\bfseries
-        ?}\@warning
-       {Citation `\@citeb' on page \thepage \space undefined}}%
-    {\setbox\z@\hbox{\global\@tempcntc0\csname b@\@citeb\endcsname\relax}%
-     \ifnum\@tempcntc=\z@ \@citeo\@tempcntb\m@ne
-       \@citea\def\@citea{,}\hbox{\csname b@\@citeb\endcsname}%
-     \else
-      \advance\@tempcntb\@ne
-      \ifnum\@tempcntb=\@tempcntc
-      \else\advance\@tempcntb\m@ne\@citeo
-      \@tempcnta\@tempcntc\@tempcntb\@tempcntc\fi\fi}}\@citeo}{#1}}
-\def\@citeo{\ifnum\@tempcnta>\@tempcntb\else
-               \@citea\def\@citea{,\,\hskip\z@skip}%
-               \ifnum\@tempcnta=\@tempcntb\the\@tempcnta\else
-               {\advance\@tempcnta\@ne\ifnum\@tempcnta=\@tempcntb \else
-                \def\@citea{--}\fi
-      \advance\@tempcnta\m@ne\the\@tempcnta\@citea\the\@tempcntb}\fi\fi}
-\else
-\renewenvironment{thebibliography}[1]
-     {\section*{\refname}
-      \small
-      \list{}%
-           {\settowidth\labelwidth{}%
-            \leftmargin\parindent
-            \itemindent=-\parindent
-            \labelsep=\z@
-            \if@openbib
-              \advance\leftmargin\bibindent
-              \itemindent -\bibindent
-              \listparindent \itemindent
-              \parsep \z@
-            \fi
-            \usecounter{enumiv}%
-            \let\p@enumiv\@empty
-            \renewcommand\theenumiv{}}%
-      \if@openbib
-        \renewcommand\newblock{\par}%
-      \else
-        \renewcommand\newblock{\hskip .11em \@plus.33em \@minus.07em}%
-      \fi
-      \sloppy\clubpenalty4000\widowpenalty4000%
-      \sfcode`\.=\@m}
-     {\def\@noitemerr
-       {\@latex@warning{Empty `thebibliography' environment}}%
-      \endlist}
-      \def\@cite#1{#1}%
-      \def\@lbibitem[#1]#2{\item[]\if@filesw
-        {\def\protect##1{\string ##1\space}\immediate
-      \write\@auxout{\string\bibcite{#2}{#1}}}\fi\ignorespaces}
-   \fi
-\else
-\@cons\@openbib@code{\noexpand\small}
-\fi
-
-\def\idxquad{\hskip 10\p@}% space that divides entry from number
-
-\def\@idxitem{\par\hangindent 10\p@}
-
-\def\subitem{\par\setbox0=\hbox{--\enspace}% second order
-                \noindent\hangindent\wd0\box0}% index entry
-
-\def\subsubitem{\par\setbox0=\hbox{--\,--\enspace}% third
-                \noindent\hangindent\wd0\box0}% order index entry
-
-\def\indexspace{\par \vskip 10\p@ plus5\p@ minus3\p@\relax}
-
-\renewenvironment{theindex}
-               {\@mkboth{\indexname}{\indexname}%
-                \thispagestyle{empty}\parindent\z@
-                \parskip\z@ \@plus .3\p@\relax
-                \let\item\par
-                \def\,{\relax\ifmmode\mskip\thinmuskip
-                             \else\hskip0.2em\ignorespaces\fi}%
-                \normalfont\small
-                \begin{multicols}{2}[\@makeschapterhead{\indexname}]%
-                }
-                {\end{multicols}}
-
-\renewcommand\footnoterule{%
-  \kern-3\p@
-  \hrule\@width 2truecm
-  \kern2.6\p@}
-  \newdimen\fnindent
-  \fnindent1em
-\long\def\@makefntext#1{%
-    \parindent \fnindent%
-    \leftskip \fnindent%
-    \noindent
-    \llap{\hb@xt@1em{\hss\@makefnmark\ }}\ignorespaces#1}
-
-\long\def\@makecaption#1#2{%
-  \vskip\abovecaptionskip
-  \sbox\@tempboxa{{\bfseries #1.} #2}%
-  \ifdim \wd\@tempboxa >\hsize
-    {\bfseries #1.} #2\par
-  \else
-    \global \@minipagefalse
-    \hb@xt@\hsize{\hfil\box\@tempboxa\hfil}%
-  \fi
-  \vskip\belowcaptionskip}
-
-\def\fps@figure{htbp}
-\def\fnum@figure{\figurename\thinspace\thefigure}
-\def \@floatboxreset {%
-        \reset@font
-        \small
-        \@setnobreak
-        \@setminipage
-}
-\def\fps@table{htbp}
-\def\fnum@table{\tablename~\thetable}
-\renewenvironment{table}
-               {\setlength\abovecaptionskip{0\p@}%
-                \setlength\belowcaptionskip{10\p@}%
-                \@float{table}}
-               {\end@float}
-\renewenvironment{table*}
-               {\setlength\abovecaptionskip{0\p@}%
-                \setlength\belowcaptionskip{10\p@}%
-                \@dblfloat{table}}
-               {\end@dblfloat}
-
-\long\def\@caption#1[#2]#3{\par\addcontentsline{\csname
-  ext@#1\endcsname}{#1}{\protect\numberline{\csname
-  the#1\endcsname}{\ignorespaces #2}}\begingroup
-    \@parboxrestore
-    \@makecaption{\csname fnum@#1\endcsname}{\ignorespaces #3}\par
-  \endgroup}
-
-% LaTeX does not provide a command to enter the authors institute
-% addresses. The \institute command is defined here.
-
-\newcounter{@inst}
-\newcounter{@auth}
-\newcounter{auco}
-\newdimen\instindent
-\newbox\authrun
-\newtoks\authorrunning
-\newtoks\tocauthor
-\newbox\titrun
-\newtoks\titlerunning
-\newtoks\toctitle
-
-\def\clearheadinfo{\gdef\@author{No Author Given}%
-                   \gdef\@title{No Title Given}%
-                   \gdef\@subtitle{}%
-                   \gdef\@institute{No Institute Given}%
-                   \gdef\@thanks{}%
-                   \global\titlerunning={}\global\authorrunning={}%
-                   \global\toctitle={}\global\tocauthor={}}
-
-\def\institute#1{\gdef\@institute{#1}}
-
-\def\institutename{\par
- \begingroup
- \parskip=\z@
- \parindent=\z@
- \setcounter{@inst}{1}%
- \def\and{\par\stepcounter{@inst}%
- \noindent$^{\the@inst}$\enspace\ignorespaces}%
- \setbox0=\vbox{\def\thanks##1{}\@institute}%
- \ifnum\c@@inst=1\relax
-   \gdef\fnnstart{0}%
- \else
-   \xdef\fnnstart{\c@@inst}%
-   \setcounter{@inst}{1}%
-   \noindent$^{\the@inst}$\enspace
- \fi
- \ignorespaces
- \@institute\par
- \endgroup}
-
-\def\@fnsymbol#1{\ensuremath{\ifcase#1\or\star\or{\star\star}\or
-   {\star\star\star}\or \dagger\or \ddagger\or
-   \mathchar "278\or \mathchar "27B\or \|\or **\or \dagger\dagger
-   \or \ddagger\ddagger \else\@ctrerr\fi}}
-
-\def\inst#1{\unskip$^{#1}$}
-\def\fnmsep{\unskip$^,$}
-\def\email#1{{\tt#1}}
-\AtBeginDocument{\@ifundefined{url}{\def\url#1{#1}}{}%
-\@ifpackageloaded{babel}{%
-\@ifundefined{extrasenglish}{}{\addto\extrasenglish{\switcht@albion}}%
-\@ifundefined{extrasfrenchb}{}{\addto\extrasfrenchb{\switcht@francais}}%
-\@ifundefined{extrasgerman}{}{\addto\extrasgerman{\switcht@deutsch}}%
-}{\switcht@@therlang}%
-}
-\def\homedir{\~{ }}
-
-\def\subtitle#1{\gdef\@subtitle{#1}}
-\clearheadinfo
-
-\renewcommand\maketitle{\newpage
-  \refstepcounter{chapter}%
-  \stepcounter{section}%
-  \setcounter{section}{0}%
-  \setcounter{subsection}{0}%
-  \setcounter{figure}{0}
-  \setcounter{table}{0}
-  \setcounter{equation}{0}
-  \setcounter{footnote}{0}%
-  \begingroup
-    \parindent=\z@
-    \renewcommand\thefootnote{\@fnsymbol\c@footnote}%
-    \if@twocolumn
-      \ifnum \col@number=\@ne
-        \@maketitle
-      \else
-        \twocolumn[\@maketitle]%
-      \fi
-    \else
-      \newpage
-      \global\@topnum\z@   % Prevents figures from going at top of page.
-      \@maketitle
-    \fi
-    \thispagestyle{empty}\@thanks
-%
-    \def\\{\unskip\ \ignorespaces}\def\inst##1{\unskip{}}%
-    \def\thanks##1{\unskip{}}\def\fnmsep{\unskip}%
-    \instindent=\hsize
-    \advance\instindent by-\headlineindent
-    \if!\the\toctitle!\addcontentsline{toc}{title}{\@title}\else
-       \addcontentsline{toc}{title}{\the\toctitle}\fi
-    \if@runhead
-       \if!\the\titlerunning!\else
-         \edef\@title{\the\titlerunning}%
-       \fi
-       \global\setbox\titrun=\hbox{\small\rm\unboldmath\ignorespaces\@title}%
-       \ifdim\wd\titrun>\instindent
-          \typeout{Title too long for running head. Please supply}%
-          \typeout{a shorter form with \string\titlerunning\space prior to
-                   \string\maketitle}%
-          \global\setbox\titrun=\hbox{\small\rm
-          Title Suppressed Due to Excessive Length}%
-       \fi
-       \xdef\@title{\copy\titrun}%
-    \fi
-%
-    \if!\the\tocauthor!\relax
-      {\def\and{\noexpand\protect\noexpand\and}%
-      \protected@xdef\toc@uthor{\@author}}%
-    \else
-      \def\\{\noexpand\protect\noexpand\newline}%
-      \protected@xdef\scratch{\the\tocauthor}%
-      \protected@xdef\toc@uthor{\scratch}%
-    \fi
-    \addcontentsline{toc}{author}{\toc@uthor}%
-    \if@runhead
-       \if!\the\authorrunning!
-         \value{@inst}=\value{@auth}%
-         \setcounter{@auth}{1}%
-       \else
-         \edef\@author{\the\authorrunning}%
-       \fi
-       \global\setbox\authrun=\hbox{\small\unboldmath\@author\unskip}%
-       \ifdim\wd\authrun>\instindent
-          \typeout{Names of authors too long for running head. Please supply}%
-          \typeout{a shorter form with \string\authorrunning\space prior to
-                   \string\maketitle}%
-          \global\setbox\authrun=\hbox{\small\rm
-          Authors Suppressed Due to Excessive Length}%
-       \fi
-       \xdef\@author{\copy\authrun}%
-       \markboth{\@author}{\@title}%
-     \fi
-  \endgroup
-  \setcounter{footnote}{\fnnstart}%
-  \clearheadinfo}
-%
-\def\@maketitle{\newpage
- \markboth{}{}%
- \def\lastand{\ifnum\value{@inst}=2\relax
-                 \unskip{} \andname\
-              \else
-                 \unskip \lastandname\
-              \fi}%
- \def\and{\stepcounter{@auth}\relax
-          \ifnum\value{@auth}=\value{@inst}%
-             \lastand
-          \else
-             \unskip,
-          \fi}%
- \begin{center}%
- \let\newline\\
- {\Large \bfseries\boldmath
-  \pretolerance=10000
-  \@title \par}\vskip .8cm
-\if!\@subtitle!\else {\large \bfseries\boldmath
-  \vskip -.65cm
-  \pretolerance=10000
-  \@subtitle \par}\vskip .8cm\fi
- \setbox0=\vbox{\setcounter{@auth}{1}\def\and{\stepcounter{@auth}}%
- \def\thanks##1{}\@author}%
- \global\value{@inst}=\value{@auth}%
- \global\value{auco}=\value{@auth}%
- \setcounter{@auth}{1}%
-{\lineskip .5em
-\noindent\ignorespaces
-\@author\vskip.35cm}
- {\small\institutename}
- \end{center}%
- }
-
-% definition of the "\spnewtheorem" command.
-%
-% Usage:
-%
-%     \spnewtheorem{env_nam}{caption}[within]{cap_font}{body_font}
-% or  \spnewtheorem{env_nam}[numbered_like]{caption}{cap_font}{body_font}
-% or  \spnewtheorem*{env_nam}{caption}{cap_font}{body_font}
-%
-% New is "cap_font" and "body_font". It stands for
-% fontdefinition of the caption and the text itself.
-%
-% "\spnewtheorem*" gives a theorem without number.
-%
-% A defined spnewthoerem environment is used as described
-% by Lamport.
-%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-\def\@thmcountersep{}
-\def\@thmcounterend{.}
-
-\def\spnewtheorem{\@ifstar{\@sthm}{\@Sthm}}
-
-% definition of \spnewtheorem with number
-
-\def\@spnthm#1#2{%
-  \@ifnextchar[{\@spxnthm{#1}{#2}}{\@spynthm{#1}{#2}}}
-\def\@Sthm#1{\@ifnextchar[{\@spothm{#1}}{\@spnthm{#1}}}
-
-\def\@spxnthm#1#2[#3]#4#5{\expandafter\@ifdefinable\csname #1\endcsname
-   {\@definecounter{#1}\@addtoreset{#1}{#3}%
-   \expandafter\xdef\csname the#1\endcsname{\expandafter\noexpand
-     \csname the#3\endcsname \noexpand\@thmcountersep \@thmcounter{#1}}%
-   \expandafter\xdef\csname #1name\endcsname{#2}%
-   \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#4}{#5}}%
-                              \global\@namedef{end#1}{\@endtheorem}}}
-
-\def\@spynthm#1#2#3#4{\expandafter\@ifdefinable\csname #1\endcsname
-   {\@definecounter{#1}%
-   \expandafter\xdef\csname the#1\endcsname{\@thmcounter{#1}}%
-   \expandafter\xdef\csname #1name\endcsname{#2}%
-   \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#3}{#4}}%
-                               \global\@namedef{end#1}{\@endtheorem}}}
-
-\def\@spothm#1[#2]#3#4#5{%
-  \@ifundefined{c@#2}{\@latexerr{No theorem environment `#2' defined}\@eha}%
-  {\expandafter\@ifdefinable\csname #1\endcsname
-  {\global\@namedef{the#1}{\@nameuse{the#2}}%
-  \expandafter\xdef\csname #1name\endcsname{#3}%
-  \global\@namedef{#1}{\@spthm{#2}{\csname #1name\endcsname}{#4}{#5}}%
-  \global\@namedef{end#1}{\@endtheorem}}}}
-
-\def\@spthm#1#2#3#4{\topsep 7\p@ \@plus2\p@ \@minus4\p@
-\refstepcounter{#1}%
-\@ifnextchar[{\@spythm{#1}{#2}{#3}{#4}}{\@spxthm{#1}{#2}{#3}{#4}}}
-
-\def\@spxthm#1#2#3#4{\@spbegintheorem{#2}{\csname the#1\endcsname}{#3}{#4}%
-                    \ignorespaces}
-
-\def\@spythm#1#2#3#4[#5]{\@spopargbegintheorem{#2}{\csname
-       the#1\endcsname}{#5}{#3}{#4}\ignorespaces}
-
-\def\@spbegintheorem#1#2#3#4{\trivlist
-                 \item[\hskip\labelsep{#3#1\ #2\@thmcounterend}]#4}
-
-\def\@spopargbegintheorem#1#2#3#4#5{\trivlist
-      \item[\hskip\labelsep{#4#1\ #2}]{#4(#3)\@thmcounterend\ }#5}
-
-% definition of \spnewtheorem* without number
-
-\def\@sthm#1#2{\@Ynthm{#1}{#2}}
-
-\def\@Ynthm#1#2#3#4{\expandafter\@ifdefinable\csname #1\endcsname
-   {\global\@namedef{#1}{\@Thm{\csname #1name\endcsname}{#3}{#4}}%
-    \expandafter\xdef\csname #1name\endcsname{#2}%
-    \global\@namedef{end#1}{\@endtheorem}}}
-
-\def\@Thm#1#2#3{\topsep 7\p@ \@plus2\p@ \@minus4\p@
-\@ifnextchar[{\@Ythm{#1}{#2}{#3}}{\@Xthm{#1}{#2}{#3}}}
-
-\def\@Xthm#1#2#3{\@Begintheorem{#1}{#2}{#3}\ignorespaces}
-
-\def\@Ythm#1#2#3[#4]{\@Opargbegintheorem{#1}
-       {#4}{#2}{#3}\ignorespaces}
-
-\def\@Begintheorem#1#2#3{#3\trivlist
-                           \item[\hskip\labelsep{#2#1\@thmcounterend}]}
-
-\def\@Opargbegintheorem#1#2#3#4{#4\trivlist
-      \item[\hskip\labelsep{#3#1}]{#3(#2)\@thmcounterend\ }}
-
-\if@envcntsect
-   \def\@thmcountersep{.}
-   \spnewtheorem{theorem}{Theorem}[section]{\bfseries}{\itshape}
-\else
-   \spnewtheorem{theorem}{Theorem}{\bfseries}{\itshape}
-   \if@envcntreset
-      \@addtoreset{theorem}{section}
-   \else
-      \@addtoreset{theorem}{chapter}
-   \fi
-\fi
-
-%definition of divers theorem environments
-\spnewtheorem*{claim}{Claim}{\itshape}{\rmfamily}
-\spnewtheorem*{proof}{Proof}{\itshape}{\rmfamily}
-\if@envcntsame % alle Umgebungen wie Theorem.
-   \def\spn@wtheorem#1#2#3#4{\@spothm{#1}[theorem]{#2}{#3}{#4}}
-\else % alle Umgebungen mit eigenem Zaehler
-   \if@envcntsect % mit section numeriert
-      \def\spn@wtheorem#1#2#3#4{\@spxnthm{#1}{#2}[section]{#3}{#4}}
-   \else % nicht mit section numeriert
-      \if@envcntreset
-         \def\spn@wtheorem#1#2#3#4{\@spynthm{#1}{#2}{#3}{#4}
-                                   \@addtoreset{#1}{section}}
-      \else
-         \def\spn@wtheorem#1#2#3#4{\@spynthm{#1}{#2}{#3}{#4}
-                                   \@addtoreset{#1}{chapter}}%
-      \fi
-   \fi
-\fi
-\spn@wtheorem{case}{Case}{\itshape}{\rmfamily}
-\spn@wtheorem{conjecture}{Conjecture}{\itshape}{\rmfamily}
-\spn@wtheorem{corollary}{Corollary}{\bfseries}{\itshape}
-\spn@wtheorem{definition}{Definition}{\bfseries}{\itshape}
-\spn@wtheorem{example}{Example}{\itshape}{\rmfamily}
-\spn@wtheorem{exercise}{Exercise}{\itshape}{\rmfamily}
-\spn@wtheorem{lemma}{Lemma}{\bfseries}{\itshape}
-\spn@wtheorem{note}{Note}{\itshape}{\rmfamily}
-\spn@wtheorem{problem}{Problem}{\itshape}{\rmfamily}
-\spn@wtheorem{property}{Property}{\itshape}{\rmfamily}
-\spn@wtheorem{proposition}{Proposition}{\bfseries}{\itshape}
-\spn@wtheorem{question}{Question}{\itshape}{\rmfamily}
-\spn@wtheorem{solution}{Solution}{\itshape}{\rmfamily}
-\spn@wtheorem{remark}{Remark}{\itshape}{\rmfamily}
-
-\def\@takefromreset#1#2{%
-    \def\@tempa{#1}%
-    \let\@tempd\@elt
-    \def\@elt##1{%
-        \def\@tempb{##1}%
-        \ifx\@tempa\@tempb\else
-            \@addtoreset{##1}{#2}%
-        \fi}%
-    \expandafter\expandafter\let\expandafter\@tempc\csname cl@#2\endcsname
-    \expandafter\def\csname cl@#2\endcsname{}%
-    \@tempc
-    \let\@elt\@tempd}
-
-\def\theopargself{\def\@spopargbegintheorem##1##2##3##4##5{\trivlist
-      \item[\hskip\labelsep{##4##1\ ##2}]{##4##3\@thmcounterend\ }##5}
-                  \def\@Opargbegintheorem##1##2##3##4{##4\trivlist
-      \item[\hskip\labelsep{##3##1}]{##3##2\@thmcounterend\ }}
-      }
-
-\renewenvironment{abstract}{%
-      \list{}{\advance\topsep by0.35cm\relax\small
-      \leftmargin=1cm
-      \labelwidth=\z@
-      \listparindent=\z@
-      \itemindent\listparindent
-      \rightmargin\leftmargin}\item[\hskip\labelsep
-                                    \bfseries\abstractname]}
-    {\endlist}
-
-\newdimen\headlineindent             % dimension for space between
-\headlineindent=1.166cm              % number and text of headings.
-
-\def\ps@headings{\let\@mkboth\@gobbletwo
-   \let\@oddfoot\@empty\let\@evenfoot\@empty
-   \def\@evenhead{\normalfont\small\rlap{\thepage}\hspace{\headlineindent}%
-                  \leftmark\hfil}
-   \def\@oddhead{\normalfont\small\hfil\rightmark\hspace{\headlineindent}%
-                 \llap{\thepage}}
-   \def\chaptermark##1{}%
-   \def\sectionmark##1{}%
-   \def\subsectionmark##1{}}
-
-\def\ps@titlepage{\let\@mkboth\@gobbletwo
-   \let\@oddfoot\@empty\let\@evenfoot\@empty
-   \def\@evenhead{\normalfont\small\rlap{\thepage}\hspace{\headlineindent}%
-                  \hfil}
-   \def\@oddhead{\normalfont\small\hfil\hspace{\headlineindent}%
-                 \llap{\thepage}}
-   \def\chaptermark##1{}%
-   \def\sectionmark##1{}%
-   \def\subsectionmark##1{}}
-
-\if@runhead\ps@headings\else
-\ps@empty\fi
-
-\setlength\arraycolsep{1.4\p@}
-\setlength\tabcolsep{1.4\p@}
-
-\endinput
-%end of file llncs.cls
--- a/doc-src/IsarOverview/Isar/document/root.bib	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-@string{LNCS="Lect.\ Notes in Comp.\ Sci."}
-@string{Springer="Springer-Verlag"}
-@string{JAR="J. Automated Reasoning"}
-
-@InProceedings{BauerW-TPHOLs01,
-author={Gertrud Bauer and Markus Wenzel},
-title={Calculational Reasoning Revisited --- an {Isabelle/Isar} Experience},
-booktitle={Theorem Proving in Higher Order Logics, TPHOLs 2001},
-editor={R. Boulton and P. Jackson},
-year=2001,publisher=Springer,series=LNCS,volume=2152,pages="75--90"}
-
-@book{LCF,author="M.C.J. Gordon and Robin Milner and C.P. Wadsworth",
-title="Edinburgh {LCF}: a Mechanised Logic of Computation",
-publisher=Springer,series=LNCS,volume=78,year=1979}
-
-@book{LNCS2283,author={Tobias Nipkow and Lawrence Paulson and Markus Wenzel},
-title="Isabelle/HOL --- A Proof Assistant for Higher-Order Logic",
-publisher=Springer,series=LNCS,volume=2283,year=2002,
-note={\url{http://www.in.tum.de/~nipkow/LNCS2283/}}}
-
-@article{KleinN-TOPLAS,author={Gerwin Klein and Tobias Nipkow},
-title={A Machine-Checked Model for a {Java}-Like Language, Virtual Machine and Compiler},
-journal=TOPLAS,volume = {28}, number = {4}, year = {2006}, pages = {619--695},
-doi = {http://doi.acm.org/10.1145/1146809.1146811}}
-
-@InProceedings{Rudnicki92,author={P. Rudnicki},
-title={An Overview of the {Mizar} Project},
-booktitle={Workshop on Types for Proofs and Programs},
-year=1992,organization={Chalmers University of Technology}}
-
-@manual{Isar-Ref-Man,author="Markus Wenzel",
-title="The Isabelle/Isar Reference Manual",
-organization={Technische Universit{\"a}t M{\"u}nchen},year=2002,
-note={\url{http://isabelle.in.tum.de/dist/Isabelle2002/doc/isar-ref.pdf}}}
-
-@article{WenzelW-JAR,author={Markus Wenzel and Freek Wiedijk},
-title={A comparison of the mathematical proof languages {Mizar} and {Isar}},
-journal=JAR,year=2002,pages={389--411}}
-
--- a/doc-src/IsarOverview/Isar/document/root.tex	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,31 +0,0 @@
-\documentclass[envcountsame]{llncs}
-%\documentclass[11pt,a4paper]{article}
-\usepackage{../../../../lib/texinputs/isabelle,../../../../lib/texinputs/isabellesym,../../../pdfsetup}
-
-%for best-style documents ...
-\urlstyle{rm}
-%\isabellestyle{it}
-
-\newcommand{\tweakskip}{\vspace{-\medskipamount}}
-
-\pagestyle{plain}
-
-\begin{document}
-
-\title{A Tutorial Introduction to Structured Isar Proofs}
-\author{Tobias Nipkow}
-\institute{Institut f{\"u}r Informatik, TU M{\"u}nchen\\
- {\small\url{http://www.in.tum.de/~nipkow/}}}
-\date{}
-\maketitle
-
-\input{intro.tex}
-\input{Logic.tex}
-\input{Induction.tex}
-
-\begingroup
-\bibliographystyle{plain} \small\raggedright\frenchspacing
-\bibliography{root}
-\endgroup
-
-\end{document}
--- a/doc-src/IsarOverview/Isar/makeDemo	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,36 +0,0 @@
-#!/usr/bin/perl -w
-
-sub doit {
-    my ($file) = @_;
-
-    open (FILE, $file) || die $!;
-    undef $/; $text = <FILE>; $/ = "\n";
-    close FILE || die $!;
-
-    $_ = $text;
-
-    s/text_raw\{\*([^*]|\*[^}])*\*\}//sg;       # actual work done here
-    s/text\{\*([^*]|\*[^}])*\*\}//sg;       # actual work done here
-    s/\(\*<\*\)//sg;
-    s/\(\*>\*\)//sg;
-    s/--(\ )*"([^"])*"//sg;
-    s/--(\ )*\{\*([^*]|\*[^}])*\*\}//sg;
-
-    $result = $_;
-
-    if ($text ne $result) {
-        print STDERR "fixing $file\n";
-#        if (! -f "$file~~") {
-#            rename $file, "$file~~" || die $!;
-#        }
-        open (FILE, "> Demo/$file") || die $!;
-        print FILE $result;
-        close FILE || die $!;
-    }
-}
-
-
-foreach $file (@ARGV) {
-  eval { &doit($file); };
-  if ($@) { print STDERR "*** doit $file: ", $@, "\n"; }
-}
--- a/doc-src/IsarOverview/Makefile	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,19 +0,0 @@
-
-## targets
-
-default: dvi
-
-## dependencies
-
-dvi:
-	cd Isar/document; make dvi
-
-pdf:
-	cd Isar/document; make pdf
-
-clean:
-	cd Isar/document; make clean
-
-mrproper:
-	rm -f *.pdf *.dvi
-	cd Isar/document; make mrproper
--- a/doc-src/IsarRef/Thy/HOL_Specific.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/IsarRef/Thy/HOL_Specific.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1382,9 +1382,9 @@
     is a quotient extension theorem. Quotient extension theorems
     allow for quotienting inside container types. Given a polymorphic
     type that serves as a container, a map function defined for this
-    container  using @{command (HOL) "enriched_type"} and a relation
+    container using @{command (HOL) "enriched_type"} and a relation
     map defined for for the container type, the quotient extension
-    theorem should be @{term "Quotient R Abs Rep \<Longrightarrow> Quotient
+    theorem should be @{term "Quotient3 R Abs Rep \<Longrightarrow> Quotient3
     (rel_map R) (map Abs) (map Rep)"}. Quotient extension theorems
     are stored in a database and are used all the steps of lifting
     theorems.
@@ -1740,6 +1740,15 @@
     \item[@{text no_assms}] specifies whether assumptions in
     structured proofs should be ignored.
 
+    \item[@{text locale}] specifies how to process conjectures in
+    a locale context, i.e., they can be interpreted or expanded.
+    The option is a whitespace-separated list of the two words
+    @{text interpret} and @{text expand}. The list determines the
+    order they are employed. The default setting is to first use 
+    interpretations and then test the expanded conjecture.
+    The option is only provided as attribute declaration, but not
+    as parameter to the command. 
+
     \item[@{text timeout}] sets the time limit in seconds.
 
     \item[@{text default_type}] sets the type(s) generally used to
--- a/doc-src/IsarRef/Thy/Proof.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/IsarRef/Thy/Proof.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -294,7 +294,7 @@
 *}
 
 
-subsection {* Facts and forward chaining *}
+subsection {* Facts and forward chaining \label{sec:proof-facts} *}
 
 text {*
   \begin{matharray}{rcl}
@@ -440,7 +440,7 @@
   
     goal: (@{syntax props} + @'and')
     ;
-    longgoal: @{syntax thmdecl}? (@{syntax context_elem} * ) conclusion
+    longgoal: @{syntax thmdecl}? (@{syntax_ref \"includes\"}?) (@{syntax context_elem} * ) conclusion
     ;
     conclusion: @'shows' goal | @'obtains' (@{syntax parname}? case + '|')
     ;
@@ -454,8 +454,8 @@
   \<phi>"} to be put back into the target context.  An additional @{syntax
   context} specification may build up an initial proof context for the
   subsequent claim; this includes local definitions and syntax as
-  well, see the definition of @{syntax context_elem} in
-  \secref{sec:locale}.
+  well, see also @{syntax "includes"} in \secref{sec:bundle} and
+  @{syntax context_elem} in \secref{sec:locale}.
   
   \item @{command "theorem"}~@{text "a: \<phi>"} and @{command
   "corollary"}~@{text "a: \<phi>"} are essentially the same as @{command
--- a/doc-src/IsarRef/Thy/Spec.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/IsarRef/Thy/Spec.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -105,38 +105,54 @@
 section {* Local theory targets \label{sec:target} *}
 
 text {*
-  A local theory target is a context managed separately within the
-  enclosing theory.  Contexts may introduce parameters (fixed
-  variables) and assumptions (hypotheses).  Definitions and theorems
-  depending on the context may be added incrementally later on.  Named
-  contexts refer to locales (cf.\ \secref{sec:locale}) or type classes
-  (cf.\ \secref{sec:class}); the name ``@{text "-"}'' signifies the
-  global theory context.
-
   \begin{matharray}{rcll}
     @{command_def "context"} & : & @{text "theory \<rightarrow> local_theory"} \\
     @{command_def (local) "end"} & : & @{text "local_theory \<rightarrow> theory"} \\
   \end{matharray}
 
+  A local theory target is a context managed separately within the
+  enclosing theory.  Contexts may introduce parameters (fixed
+  variables) and assumptions (hypotheses).  Definitions and theorems
+  depending on the context may be added incrementally later on.
+
+  \emph{Named contexts} refer to locales (cf.\ \secref{sec:locale}) or
+  type classes (cf.\ \secref{sec:class}); the name ``@{text "-"}''
+  signifies the global theory context.
+
+  \emph{Unnamed contexts} may introduce additional parameters and
+  assumptions, and results produced in the context are generalized
+  accordingly.  Such auxiliary contexts may be nested within other
+  targets, like @{command "locale"}, @{command "class"}, @{command
+  "instantiation"}, @{command "overloading"}.
+
   @{rail "
     @@{command context} @{syntax nameref} @'begin'
     ;
-
+    @@{command context} @{syntax_ref \"includes\"}? (@{syntax context_elem} * ) @'begin'
+    ;
     @{syntax_def target}: '(' @'in' @{syntax nameref} ')'
   "}
 
   \begin{description}
   
-  \item @{command "context"}~@{text "c \<BEGIN>"} recommences an
-  existing locale or class context @{text c}.  Note that locale and
-  class definitions allow to include the @{keyword "begin"} keyword as
-  well, in order to continue the local theory immediately after the
-  initial specification.
+  \item @{command "context"}~@{text "c \<BEGIN>"} opens a named
+  context, by recommencing an existing locale or class @{text c}.
+  Note that locale and class definitions allow to include the
+  @{keyword "begin"} keyword as well, in order to continue the local
+  theory immediately after the initial specification.
+
+  \item @{command "context"}~@{text "bundles elements \<BEGIN>"} opens
+  an unnamed context, by extending the enclosing global or local
+  theory target by the given declaration bundles (\secref{sec:bundle})
+  and context elements (@{text "\<FIXES>"}, @{text "\<ASSUMES>"}
+  etc.).  This means any results stemming from definitions and proofs
+  in the extended context will be exported into the enclosing target
+  by lifting over extra parameters and premises.
   
-  \item @{command (local) "end"} concludes the current local theory
-  and continues the enclosing global theory.  Note that a global
-  @{command (global) "end"} has a different meaning: it concludes the
-  theory itself (\secref{sec:begin-thy}).
+  \item @{command (local) "end"} concludes the current local theory,
+  according to the nesting of contexts.  Note that a global @{command
+  (global) "end"} has a different meaning: it concludes the theory
+  itself (\secref{sec:begin-thy}).
   
   \item @{text "("}@{keyword_def "in"}~@{text "c)"} given after any
   local theory command specifies an immediate target, e.g.\
@@ -166,7 +182,83 @@
   generalizing the parameters of the context.  For example, @{text "a:
   B[x]"} becomes @{text "c.a: A[?x] \<Longrightarrow> B[?x]"}, again for arbitrary
   @{text "?x"}.
-*}
+
+  \medskip The Isabelle/HOL library contains numerous applications of
+  locales and classes, e.g.\ see @{file "~~/src/HOL/Algebra"}.  An
+  example for an unnamed auxiliary contexts is given in @{file
+  "~~/src/HOL/Isar_Examples/Group_Context.thy"}.  *}
+
+
+section {* Bundled declarations \label{sec:bundle} *}
+
+text {*
+  \begin{matharray}{rcl}
+    @{command_def "bundle"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+    @{command_def "print_bundles"}@{text "\<^sup>*"} & : & @{text "context \<rightarrow> "} \\
+    @{command_def "include"} & : & @{text "proof(state) \<rightarrow> proof(state)"} \\
+    @{command_def "including"} & : & @{text "proof(prove) \<rightarrow> proof(prove)"} \\
+    @{keyword_def "includes"} & : & syntax \\
+  \end{matharray}
+
+  The outer syntax of fact expressions (\secref{sec:syn-att}) involves
+  theorems and attributes, which are evaluated in the context and
+  applied to it.  Attributes may declare theorems to the context, as
+  in @{text "this_rule [intro] that_rule [elim]"} for example.
+  Configuration options (\secref{sec:config}) are special declaration
+  attributes that operate on the context without a theorem, as in
+  @{text "[[show_types = false]]"} for example.
+
+  Expressions of this form may be defined as \emph{bundled
+  declarations} in the context, and included in other situations later
+  on.  Including declaration bundles augments a local context casually
+  without logical dependencies, which is in contrast to locales and
+  locale interpretation (\secref{sec:locale}).
+
+  @{rail "
+    @@{command bundle} @{syntax target}? \\
+    @{syntax name} '=' @{syntax thmrefs} (@'for' (@{syntax vars} + @'and'))?
+    ;
+    (@@{command include} | @@{command including}) (@{syntax nameref}+)
+    ;
+    @{syntax_def \"includes\"}: @'includes' (@{syntax nameref}+)
+  "}
+
+  \begin{description}
+
+  \item @{command bundle}~@{text "b = decls"} defines a bundle of
+  declarations in the current context.  The RHS is similar to the one
+  of the @{command declare} command.  Bundles defined in local theory
+  targets are subject to transformations via morphisms, when moved
+  into different application contexts; this works analogously to any
+  other local theory specification.
+
+  \item @{command print_bundles} prints the named bundles that are
+  available in the current context.
+
+  \item @{command include}~@{text "b\<^sub>1 \<dots> b\<^sub>n"} includes the declarations
+  from the given bundles into the current proof body context.  This is
+  analogous to @{command "note"} (\secref{sec:proof-facts}) with the
+  expanded bundles.
+
+  \item @{command including} is similar to @{command include}, but
+  works in proof refinement (backward mode).  This is analogous to
+  @{command "using"} (\secref{sec:proof-facts}) with the expanded
+  bundles.
+
+  \item @{keyword "includes"}~@{text "b\<^sub>1 \<dots> b\<^sub>n"} is similar to
+  @{command include}, but works in situations where a specification
+  context is constructed, notably for @{command context} and long
+  statements of @{command theorem} etc.
+
+  \end{description}
+
+  Here is an artificial example of bundling various configuration
+  options: *}
+
+bundle trace = [[simp_trace, blast_trace, linarith_trace, metis_trace, smt_trace]]
+
+lemma "x = x"
+  including trace by metis
 
 
 section {* Basic specification elements *}
@@ -258,6 +350,12 @@
 section {* Generic declarations *}
 
 text {*
+  \begin{matharray}{rcl}
+    @{command_def "declaration"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+    @{command_def "syntax_declaration"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+    @{command_def "declare"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+  \end{matharray}
+
   Arbitrary operations on the background context may be wrapped-up as
   generic declaration elements.  Since the underlying concept of local
   theories may be subject to later re-interpretation, there is an
@@ -267,12 +365,6 @@
   case: it consists of a theorem which is applied to the context by
   means of an attribute.
 
-  \begin{matharray}{rcl}
-    @{command_def "declaration"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
-    @{command_def "syntax_declaration"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
-    @{command_def "declare"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
-  \end{matharray}
-
   @{rail "
     (@@{command declaration} | @@{command syntax_declaration})
       ('(' @'pervasive' ')')? \\ @{syntax target}? @{syntax text}
@@ -499,13 +591,6 @@
 subsection {* Locale interpretations *}
 
 text {*
-  Locale expressions may be instantiated, and the instantiated facts
-  added to the current context.  This requires a proof of the
-  instantiated specification and is called \emph{locale
-  interpretation}.  Interpretation is possible in locales @{command
-  "sublocale"}, theories (command @{command "interpretation"}) and
-  also within a proof body (command @{command "interpret"}).
-
   \begin{matharray}{rcl}
     @{command_def "interpretation"} & : & @{text "theory \<rightarrow> proof(prove)"} \\
     @{command_def "interpret"} & : & @{text "proof(state) | proof(chain) \<rightarrow> proof(prove)"} \\
@@ -514,6 +599,13 @@
     @{command_def "print_interps"}@{text "\<^sup>*"} & : & @{text "context \<rightarrow>"} \\
   \end{matharray}
 
+  Locale expressions may be instantiated, and the instantiated facts
+  added to the current context.  This requires a proof of the
+  instantiated specification and is called \emph{locale
+  interpretation}.  Interpretation is possible in locales @{command
+  "sublocale"}, theories (command @{command "interpretation"}) and
+  also within a proof body (command @{command "interpret"}).
+
   @{rail "
     @@{command interpretation} @{syntax locale_expr} equations?
     ;
@@ -635,15 +727,6 @@
 section {* Classes \label{sec:class} *}
 
 text {*
-  A class is a particular locale with \emph{exactly one} type variable
-  @{text \<alpha>}.  Beyond the underlying locale, a corresponding type class
-  is established which is interpreted logically as axiomatic type
-  class \cite{Wenzel:1997:TPHOL} whose logical content are the
-  assumptions of the locale.  Thus, classes provide the full
-  generality of locales combined with the commodity of type classes
-  (notably type-inference).  See \cite{isabelle-classes} for a short
-  tutorial.
-
   \begin{matharray}{rcl}
     @{command_def "class"} & : & @{text "theory \<rightarrow> local_theory"} \\
     @{command_def "instantiation"} & : & @{text "theory \<rightarrow> local_theory"} \\
@@ -655,6 +738,15 @@
     @{method_def intro_classes} & : & @{text method} \\
   \end{matharray}
 
+  A class is a particular locale with \emph{exactly one} type variable
+  @{text \<alpha>}.  Beyond the underlying locale, a corresponding type class
+  is established which is interpreted logically as axiomatic type
+  class \cite{Wenzel:1997:TPHOL} whose logical content are the
+  assumptions of the locale.  Thus, classes provide the full
+  generality of locales combined with the commodity of type classes
+  (notably type-inference).  See \cite{isabelle-classes} for a short
+  tutorial.
+
   @{rail "
     @@{command class} class_spec @'begin'?
     ;
@@ -811,6 +903,10 @@
 section {* Unrestricted overloading *}
 
 text {*
+  \begin{matharray}{rcl}
+    @{command_def "overloading"} & : & @{text "theory \<rightarrow> local_theory"} \\
+  \end{matharray}
+
   Isabelle/Pure's definitional schemes support certain forms of
   overloading (see \secref{sec:consts}).  Overloading means that a
   constant being declared as @{text "c :: \<alpha> decl"} may be
@@ -823,10 +919,6 @@
   The @{command "overloading"} target provides a convenient view for
   end-users.
 
-  \begin{matharray}{rcl}
-    @{command_def "overloading"} & : & @{text "theory \<rightarrow> local_theory"} \\
-  \end{matharray}
-
   @{rail "
     @@{command overloading} ( spec + ) @'begin'
     ;
@@ -1042,6 +1134,11 @@
 subsection {* Constants and definitions \label{sec:consts} *}
 
 text {*
+  \begin{matharray}{rcl}
+    @{command_def "consts"} & : & @{text "theory \<rightarrow> theory"} \\
+    @{command_def "defs"} & : & @{text "theory \<rightarrow> theory"} \\
+  \end{matharray}
+
   Definitions essentially express abbreviations within the logic.  The
   simplest form of a definition is @{text "c :: \<sigma> \<equiv> t"}, where @{text
   c} is a newly declared constant.  Isabelle also allows derived forms
@@ -1079,11 +1176,6 @@
   corresponding occurrences on some right-hand side need to be an
   instance of this, general @{text "d :: \<alpha> \<times> \<beta>"} will be disallowed.
 
-  \begin{matharray}{rcl}
-    @{command_def "consts"} & : & @{text "theory \<rightarrow> theory"} \\
-    @{command_def "defs"} & : & @{text "theory \<rightarrow> theory"} \\
-  \end{matharray}
-
   @{rail "
     @@{command consts} ((@{syntax name} '::' @{syntax type} @{syntax mixfix}?) +)
     ;
@@ -1159,10 +1251,15 @@
 
 section {* Oracles *}
 
-text {* Oracles allow Isabelle to take advantage of external reasoners
-  such as arithmetic decision procedures, model checkers, fast
-  tautology checkers or computer algebra systems.  Invoked as an
-  oracle, an external reasoner can create arbitrary Isabelle theorems.
+text {*
+  \begin{matharray}{rcll}
+    @{command_def "oracle"} & : & @{text "theory \<rightarrow> theory"} & (axiomatic!) \\
+  \end{matharray}
+
+  Oracles allow Isabelle to take advantage of external reasoners such
+  as arithmetic decision procedures, model checkers, fast tautology
+  checkers or computer algebra systems.  Invoked as an oracle, an
+  external reasoner can create arbitrary Isabelle theorems.
 
   It is the responsibility of the user to ensure that the external
   reasoner is as trustworthy as the application requires.  Another
@@ -1174,10 +1271,6 @@
   asserted, and records within the internal derivation object how
   presumed theorems depend on unproven suppositions.
 
-  \begin{matharray}{rcll}
-    @{command_def "oracle"} & : & @{text "theory \<rightarrow> theory"} & (axiomatic!) \\
-  \end{matharray}
-
   @{rail "
     @@{command oracle} @{syntax name} '=' @{syntax text}
   "}
--- a/doc-src/IsarRef/Thy/document/HOL_Specific.tex	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/IsarRef/Thy/document/HOL_Specific.tex	Mon Apr 16 19:01:57 2012 +0200
@@ -1959,9 +1959,9 @@
     is a quotient extension theorem. Quotient extension theorems
     allow for quotienting inside container types. Given a polymorphic
     type that serves as a container, a map function defined for this
-    container  using \hyperlink{command.HOL.enriched-type}{\mbox{\isa{\isacommand{enriched{\isaliteral{5F}{\isacharunderscore}}type}}}} and a relation
+    container using \hyperlink{command.HOL.enriched-type}{\mbox{\isa{\isacommand{enriched{\isaliteral{5F}{\isacharunderscore}}type}}}} and a relation
     map defined for for the container type, the quotient extension
-    theorem should be \isa{{\isaliteral{22}{\isachardoublequote}}Quotient\ R\ Abs\ Rep\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ Quotient\ {\isaliteral{28}{\isacharparenleft}}rel{\isaliteral{5F}{\isacharunderscore}}map\ R{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{28}{\isacharparenleft}}map\ Abs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{28}{\isacharparenleft}}map\ Rep{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}}. Quotient extension theorems
+    theorem should be \isa{{\isaliteral{22}{\isachardoublequote}}Quotient{\isadigit{3}}\ R\ Abs\ Rep\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ Quotient{\isadigit{3}}\ {\isaliteral{28}{\isacharparenleft}}rel{\isaliteral{5F}{\isacharunderscore}}map\ R{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{28}{\isacharparenleft}}map\ Abs{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{28}{\isacharparenleft}}map\ Rep{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}}. Quotient extension theorems
     are stored in a database and are used all the steps of lifting
     theorems.
 
@@ -2504,6 +2504,15 @@
     \item[\isa{no{\isaliteral{5F}{\isacharunderscore}}assms}] specifies whether assumptions in
     structured proofs should be ignored.
 
+    \item[\isa{locale}] specifies how to process conjectures in
+    a locale context, i.e., they can be interpreted or expanded.
+    The option is a whitespace-separated list of the two words
+    \isa{interpret} and \isa{expand}. The list determines the
+    order they are employed. The default setting is to first use 
+    interpretations and then test the expanded conjecture.
+    The option is only provided as attribute declaration, but not
+    as parameter to the command. 
+
     \item[\isa{timeout}] sets the time limit in seconds.
 
     \item[\isa{default{\isaliteral{5F}{\isacharunderscore}}type}] sets the type(s) generally used to
--- a/doc-src/IsarRef/Thy/document/Proof.tex	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/IsarRef/Thy/document/Proof.tex	Mon Apr 16 19:01:57 2012 +0200
@@ -373,7 +373,7 @@
 \end{isamarkuptext}%
 \isamarkuptrue%
 %
-\isamarkupsubsection{Facts and forward chaining%
+\isamarkupsubsection{Facts and forward chaining \label{sec:proof-facts}%
 }
 \isamarkuptrue%
 %
@@ -590,6 +590,10 @@
 \rail@nextbar{1}
 \rail@nont{\hyperlink{syntax.thmdecl}{\mbox{\isa{thmdecl}}}}[]
 \rail@endbar
+\rail@bar
+\rail@nextbar{1}
+\rail@nont{\indexref{}{syntax}{includes}\hyperlink{syntax.includes}{\mbox{\isa{includes}}}}[]
+\rail@endbar
 \rail@plus
 \rail@nextplus{1}
 \rail@cnont{\hyperlink{syntax.context-elem}{\mbox{\isa{context{\isaliteral{5F}{\isacharunderscore}}elem}}}}[]
@@ -634,8 +638,8 @@
   \item \hyperlink{command.lemma}{\mbox{\isa{\isacommand{lemma}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C7068693E}{\isasymphi}}{\isaliteral{22}{\isachardoublequote}}} enters proof mode with
   \isa{{\isaliteral{5C3C7068693E}{\isasymphi}}} as main goal, eventually resulting in some fact \isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{5C3C7475726E7374696C653E}{\isasymturnstile}}\ {\isaliteral{5C3C7068693E}{\isasymphi}}{\isaliteral{22}{\isachardoublequote}}} to be put back into the target context.  An additional \hyperlink{syntax.context}{\mbox{\isa{context}}} specification may build up an initial proof context for the
   subsequent claim; this includes local definitions and syntax as
-  well, see the definition of \hyperlink{syntax.context-elem}{\mbox{\isa{context{\isaliteral{5F}{\isacharunderscore}}elem}}} in
-  \secref{sec:locale}.
+  well, see also \hyperlink{syntax.includes}{\mbox{\isa{includes}}} in \secref{sec:bundle} and
+  \hyperlink{syntax.context-elem}{\mbox{\isa{context{\isaliteral{5F}{\isacharunderscore}}elem}}} in \secref{sec:locale}.
   
   \item \hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C7068693E}{\isasymphi}}{\isaliteral{22}{\isachardoublequote}}} and \hyperlink{command.corollary}{\mbox{\isa{\isacommand{corollary}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C7068693E}{\isasymphi}}{\isaliteral{22}{\isachardoublequote}}} are essentially the same as \hyperlink{command.lemma}{\mbox{\isa{\isacommand{lemma}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}a{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C7068693E}{\isasymphi}}{\isaliteral{22}{\isachardoublequote}}}, but the facts are internally marked as
   being of a different kind.  This discrimination acts like a formal
--- a/doc-src/IsarRef/Thy/document/Spec.tex	Wed Apr 04 09:59:49 2012 +0200
+++ b/doc-src/IsarRef/Thy/document/Spec.tex	Mon Apr 16 19:01:57 2012 +0200
@@ -167,25 +167,43 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-A local theory target is a context managed separately within the
-  enclosing theory.  Contexts may introduce parameters (fixed
-  variables) and assumptions (hypotheses).  Definitions and theorems
-  depending on the context may be added incrementally later on.  Named
-  contexts refer to locales (cf.\ \secref{sec:locale}) or type classes
-  (cf.\ \secref{sec:class}); the name ``\isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{2D}{\isacharminus}}{\isaliteral{22}{\isachardoublequote}}}'' signifies the
-  global theory context.
-
-  \begin{matharray}{rcll}
+\begin{matharray}{rcll}
     \indexdef{}{command}{context}\hypertarget{command.context}{\hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
     \indexdef{local}{command}{end}\hypertarget{command.local.end}{\hyperlink{command.local.end}{\mbox{\isa{\isacommand{end}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} \\
   \end{matharray}
 
+  A local theory target is a context managed separately within the
+  enclosing theory.  Contexts may introduce parameters (fixed
+  variables) and assumptions (hypotheses).  Definitions and theorems
+  depending on the context may be added incrementally later on.
+
+  \emph{Named contexts} refer to locales (cf.\ \secref{sec:locale}) or
+  type classes (cf.\ \secref{sec:class}); the name ``\isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{2D}{\isacharminus}}{\isaliteral{22}{\isachardoublequote}}}''
+  signifies the global theory context.
+
+  \emph{Unnamed contexts} may introduce additional parameters and
+  assumptions, and results produced in the context are generalized
+  accordingly.  Such auxiliary contexts may be nested within other
+  targets, like \hyperlink{command.locale}{\mbox{\isa{\isacommand{locale}}}}, \hyperlink{command.class}{\mbox{\isa{\isacommand{class}}}}, \hyperlink{command.instantiation}{\mbox{\isa{\isacommand{instantiation}}}}, \hyperlink{command.overloading}{\mbox{\isa{\isacommand{overloading}}}}.
+
   \begin{railoutput}
 \rail@begin{1}{}
 \rail@term{\hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}}}[]
 \rail@nont{\hyperlink{syntax.nameref}{\mbox{\isa{nameref}}}}[]
 \rail@term{\isa{\isakeyword{begin}}}[]
 \rail@end
+\rail@begin{2}{}
+\rail@term{\hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}}}[]
+\rail@bar
+\rail@nextbar{1}
+\rail@nont{\indexref{}{syntax}{includes}\hyperlink{syntax.includes}{\mbox{\isa{includes}}}}[]
+\rail@endbar
+\rail@plus
+\rail@nextplus{1}
+\rail@cnont{\hyperlink{syntax.context-elem}{\mbox{\isa{context{\isaliteral{5F}{\isacharunderscore}}elem}}}}[]
+\rail@endplus
+\rail@term{\isa{\isakeyword{begin}}}[]
+\rail@end
 \rail@begin{1}{\indexdef{}{syntax}{target}\hypertarget{syntax.target}{\hyperlink{syntax.target}{\mbox{\isa{target}}}}}
 \rail@term{\isa{{\isaliteral{28}{\isacharparenleft}}}}[]
 \rail@term{\isa{\isakeyword{in}}}[]
@@ -197,16 +215,23 @@
 
   \begin{description}
   
-  \item \hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}c\ {\isaliteral{5C3C424547494E3E}{\isasymBEGIN}}{\isaliteral{22}{\isachardoublequote}}} recommences an
-  existing locale or class context \isa{c}.  Note that locale and
-  class definitions allow to include the \hyperlink{keyword.begin}{\mbox{\isa{\isakeyword{begin}}}} keyword as
-  well, in order to continue the local theory immediately after the
-  initial specification.
+  \item \hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}c\ {\isaliteral{5C3C424547494E3E}{\isasymBEGIN}}{\isaliteral{22}{\isachardoublequote}}} opens a named
+  context, by recommencing an existing locale or class \isa{c}.
+  Note that locale and class definitions allow to include the
+  \hyperlink{keyword.begin}{\mbox{\isa{\isakeyword{begin}}}} keyword as well, in order to continue the local
+  theory immediately after the initial specification.
+
+  \item \hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}bundles\ elements\ {\isaliteral{5C3C424547494E3E}{\isasymBEGIN}}{\isaliteral{22}{\isachardoublequote}}} opens
+  an unnamed context, by extending the enclosing global or local
+  theory target by the given declaration bundles (\secref{sec:bundle})
+  and context elements (\isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{5C3C46495845533E}{\isasymFIXES}}{\isaliteral{22}{\isachardoublequote}}}, \isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{5C3C415353554D45533E}{\isasymASSUMES}}{\isaliteral{22}{\isachardoublequote}}}
+  etc.).  This means any results stemming from definitions and proofs
+  in the extended context will be exported into the enclosing target
+  by lifting over extra parameters and premises.
   
-  \item \hyperlink{command.local.end}{\mbox{\isa{\isacommand{end}}}} concludes the current local theory
-  and continues the enclosing global theory.  Note that a global
-  \hyperlink{command.global.end}{\mbox{\isa{\isacommand{end}}}} has a different meaning: it concludes the
-  theory itself (\secref{sec:begin-thy}).
+  \item \hyperlink{command.local.end}{\mbox{\isa{\isacommand{end}}}} concludes the current local theory,
+  according to the nesting of contexts.  Note that a global \hyperlink{command.global.end}{\mbox{\isa{\isacommand{end}}}} has a different meaning: it concludes the theory
+  itself (\secref{sec:begin-thy}).
   
   \item \isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{28}{\isacharparenleft}}{\isaliteral{22}{\isachardoublequote}}}\indexdef{}{keyword}{in}\hypertarget{keyword.in}{\hyperlink{keyword.in}{\mbox{\isa{\isakeyword{in}}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}c{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}} given after any
   local theory command specifies an immediate target, e.g.\
@@ -233,10 +258,137 @@
 
   Theorems are exported by discharging the assumptions and
   generalizing the parameters of the context.  For example, \isa{{\isaliteral{22}{\isachardoublequote}}a{\isaliteral{3A}{\isacharcolon}}\ B{\isaliteral{5B}{\isacharbrackleft}}x{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequote}}} becomes \isa{{\isaliteral{22}{\isachardoublequote}}c{\isaliteral{2E}{\isachardot}}a{\isaliteral{3A}{\isacharcolon}}\ A{\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{3F}{\isacharquery}}x{\isaliteral{5D}{\isacharbrackright}}\ {\isaliteral{5C3C4C6F6E6772696768746172726F773E}{\isasymLongrightarrow}}\ B{\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{3F}{\isacharquery}}x{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequote}}}, again for arbitrary
-  \isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{3F}{\isacharquery}}x{\isaliteral{22}{\isachardoublequote}}}.%
+  \isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{3F}{\isacharquery}}x{\isaliteral{22}{\isachardoublequote}}}.
+
+  \medskip The Isabelle/HOL library contains numerous applications of
+  locales and classes, e.g.\ see \verb|~~/src/HOL/Algebra|.  An
+  example for an unnamed auxiliary contexts is given in \verb|~~/src/HOL/Isar_Examples/Group_Context.thy|.%
 \end{isamarkuptext}%
 \isamarkuptrue%
 %
+\isamarkupsection{Bundled declarations \label{sec:bundle}%
+}
+\isamarkuptrue%
+%
+\begin{isamarkuptext}%
+\begin{matharray}{rcl}
+    \indexdef{}{command}{bundle}\hypertarget{command.bundle}{\hyperlink{command.bundle}{\mbox{\isa{\isacommand{bundle}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{command}{print\_bundles}\hypertarget{command.print-bundles}{\hyperlink{command.print-bundles}{\mbox{\isa{\isacommand{print{\isaliteral{5F}{\isacharunderscore}}bundles}}}}}\isa{{\isaliteral{22}{\isachardoublequote}}\isaliteral{5C3C5E7375703E}{}\isactrlsup {\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequote}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}context\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ {\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{command}{include}\hypertarget{command.include}{\hyperlink{command.include}{\mbox{\isa{\isacommand{include}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}proof{\isaliteral{28}{\isacharparenleft}}state{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ proof{\isaliteral{28}{\isacharparenleft}}state{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{command}{including}\hypertarget{command.including}{\hyperlink{command.including}{\mbox{\isa{\isacommand{including}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}proof{\isaliteral{28}{\isacharparenleft}}prove{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ proof{\isaliteral{28}{\isacharparenleft}}prove{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{keyword}{includes}\hypertarget{keyword.includes}{\hyperlink{keyword.includes}{\mbox{\isa{\isakeyword{includes}}}}} & : & syntax \\
+  \end{matharray}
+
+  The outer syntax of fact expressions (\secref{sec:syn-att}) involves
+  theorems and attributes, which are evaluated in the context and
+  applied to it.  Attributes may declare theorems to the context, as
+  in \isa{{\isaliteral{22}{\isachardoublequote}}this{\isaliteral{5F}{\isacharunderscore}}rule\ {\isaliteral{5B}{\isacharbrackleft}}intro{\isaliteral{5D}{\isacharbrackright}}\ that{\isaliteral{5F}{\isacharunderscore}}rule\ {\isaliteral{5B}{\isacharbrackleft}}elim{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequote}}} for example.
+  Configuration options (\secref{sec:config}) are special declaration
+  attributes that operate on the context without a theorem, as in
+  \isa{{\isaliteral{22}{\isachardoublequote}}{\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5B}{\isacharbrackleft}}show{\isaliteral{5F}{\isacharunderscore}}types\ {\isaliteral{3D}{\isacharequal}}\ false{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{22}{\isachardoublequote}}} for example.
+
+  Expressions of this form may be defined as \emph{bundled
+  declarations} in the context, and included in other situations later
+  on.  Including declaration bundles augments a local context casually
+  without logical dependencies, which is in contrast to locales and
+  locale interpretation (\secref{sec:locale}).
+
+  \begin{railoutput}
+\rail@begin{6}{}
+\rail@term{\hyperlink{command.bundle}{\mbox{\isa{\isacommand{bundle}}}}}[]
+\rail@bar
+\rail@nextbar{1}
+\rail@nont{\hyperlink{syntax.target}{\mbox{\isa{target}}}}[]
+\rail@endbar
+\rail@cr{3}
+\rail@nont{\hyperlink{syntax.name}{\mbox{\isa{name}}}}[]
+\rail@term{\isa{{\isaliteral{3D}{\isacharequal}}}}[]
+\rail@nont{\hyperlink{syntax.thmrefs}{\mbox{\isa{thmrefs}}}}[]
+\rail@bar
+\rail@nextbar{4}
+\rail@term{\isa{\isakeyword{for}}}[]
+\rail@plus
+\rail@nont{\hyperlink{syntax.vars}{\mbox{\isa{vars}}}}[]
+\rail@nextplus{5}
+\rail@cterm{\isa{\isakeyword{and}}}[]
+\rail@endplus
+\rail@endbar
+\rail@end
+\rail@begin{2}{}
+\rail@bar
+\rail@term{\hyperlink{command.include}{\mbox{\isa{\isacommand{include}}}}}[]
+\rail@nextbar{1}
+\rail@term{\hyperlink{command.including}{\mbox{\isa{\isacommand{including}}}}}[]
+\rail@endbar
+\rail@plus
+\rail@nont{\hyperlink{syntax.nameref}{\mbox{\isa{nameref}}}}[]
+\rail@nextplus{1}
+\rail@endplus
+\rail@end
+\rail@begin{2}{\indexdef{}{syntax}{includes}\hypertarget{syntax.includes}{\hyperlink{syntax.includes}{\mbox{\isa{includes}}}}}
+\rail@term{\isa{\isakeyword{includes}}}[]
+\rail@plus
+\rail@nont{\hyperlink{syntax.nameref}{\mbox{\isa{nameref}}}}[]
+\rail@nextplus{1}
+\rail@endplus
+\rail@end
+\end{railoutput}
+
+
+  \begin{description}
+
+  \item \hyperlink{command.bundle}{\mbox{\isa{\isacommand{bundle}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}b\ {\isaliteral{3D}{\isacharequal}}\ decls{\isaliteral{22}{\isachardoublequote}}} defines a bundle of
+  declarations in the current context.  The RHS is similar to the one
+  of the \hyperlink{command.declare}{\mbox{\isa{\isacommand{declare}}}} command.  Bundles defined in local theory
+  targets are subject to transformations via morphisms, when moved
+  into different application contexts; this works analogously to any
+  other local theory specification.
+
+  \item \hyperlink{command.print-bundles}{\mbox{\isa{\isacommand{print{\isaliteral{5F}{\isacharunderscore}}bundles}}}} prints the named bundles that are
+  available in the current context.
+
+  \item \hyperlink{command.include}{\mbox{\isa{\isacommand{include}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}b\isaliteral{5C3C5E7375623E}{}\isactrlsub {\isadigit{1}}\ {\isaliteral{5C3C646F74733E}{\isasymdots}}\ b\isaliteral{5C3C5E7375623E}{}\isactrlsub n{\isaliteral{22}{\isachardoublequote}}} includes the declarations
+  from the given bundles into the current proof body context.  This is
+  analogous to \hyperlink{command.note}{\mbox{\isa{\isacommand{note}}}} (\secref{sec:proof-facts}) with the
+  expanded bundles.
+
+  \item \hyperlink{command.including}{\mbox{\isa{\isacommand{including}}}} is similar to \hyperlink{command.include}{\mbox{\isa{\isacommand{include}}}}, but
+  works in proof refinement (backward mode).  This is analogous to
+  \hyperlink{command.using}{\mbox{\isa{\isacommand{using}}}} (\secref{sec:proof-facts}) with the expanded
+  bundles.
+
+  \item \hyperlink{keyword.includes}{\mbox{\isa{\isakeyword{includes}}}}~\isa{{\isaliteral{22}{\isachardoublequote}}b\isaliteral{5C3C5E7375623E}{}\isactrlsub {\isadigit{1}}\ {\isaliteral{5C3C646F74733E}{\isasymdots}}\ b\isaliteral{5C3C5E7375623E}{}\isactrlsub n{\isaliteral{22}{\isachardoublequote}}} is similar to
+  \hyperlink{command.include}{\mbox{\isa{\isacommand{include}}}}, but works in situations where a specification
+  context is constructed, notably for \hyperlink{command.context}{\mbox{\isa{\isacommand{context}}}} and long
+  statements of \hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}} etc.
+
+  \end{description}
+
+  Here is an artificial example of bundling various configuration
+  options:%
+\end{isamarkuptext}%
+\isamarkuptrue%
+\isacommand{bundle}\isamarkupfalse%
+\ trace\ {\isaliteral{3D}{\isacharequal}}\ {\isaliteral{5B}{\isacharbrackleft}}{\isaliteral{5B}{\isacharbrackleft}}simp{\isaliteral{5F}{\isacharunderscore}}trace{\isaliteral{2C}{\isacharcomma}}\ blast{\isaliteral{5F}{\isacharunderscore}}trace{\isaliteral{2C}{\isacharcomma}}\ linarith{\isaliteral{5F}{\isacharunderscore}}trace{\isaliteral{2C}{\isacharcomma}}\ metis{\isaliteral{5F}{\isacharunderscore}}trace{\isaliteral{2C}{\isacharcomma}}\ smt{\isaliteral{5F}{\isacharunderscore}}trace{\isaliteral{5D}{\isacharbrackright}}{\isaliteral{5D}{\isacharbrackright}}\isanewline
+\isanewline
+\isacommand{lemma}\isamarkupfalse%
+\ {\isaliteral{22}{\isachardoublequoteopen}}x\ {\isaliteral{3D}{\isacharequal}}\ x{\isaliteral{22}{\isachardoublequoteclose}}\isanewline
+\ \ \isacommand{including}\isamarkupfalse%
+\ trace%
+\isadelimproof
+\ %
+\endisadelimproof
+%
+\isatagproof
+\isacommand{by}\isamarkupfalse%
+\ metis%
+\endisatagproof
+{\isafoldproof}%
+%
+\isadelimproof
+%
+\endisadelimproof
+%
 \isamarkupsection{Basic specification elements%
 }
 \isamarkuptrue%
@@ -405,7 +557,13 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-Arbitrary operations on the background context may be wrapped-up as
+\begin{matharray}{rcl}
+    \indexdef{}{command}{declaration}\hypertarget{command.declaration}{\hyperlink{command.declaration}{\mbox{\isa{\isacommand{declaration}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{command}{syntax\_declaration}\hypertarget{command.syntax-declaration}{\hyperlink{command.syntax-declaration}{\mbox{\isa{\isacommand{syntax{\isaliteral{5F}{\isacharunderscore}}declaration}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{command}{declare}\hypertarget{command.declare}{\hyperlink{command.declare}{\mbox{\isa{\isacommand{declare}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
+  \end{matharray}
+
+  Arbitrary operations on the background context may be wrapped-up as
   generic declaration elements.  Since the underlying concept of local
   theories may be subject to later re-interpretation, there is an
   additional dependency on a morphism that tells the difference of the
@@ -414,12 +572,6 @@
   case: it consists of a theorem which is applied to the context by
   means of an attribute.
 
-  \begin{matharray}{rcl}
-    \indexdef{}{command}{declaration}\hypertarget{command.declaration}{\hyperlink{command.declaration}{\mbox{\isa{\isacommand{declaration}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
-    \indexdef{}{command}{syntax\_declaration}\hypertarget{command.syntax-declaration}{\hyperlink{command.syntax-declaration}{\mbox{\isa{\isacommand{syntax{\isaliteral{5F}{\isacharunderscore}}declaration}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
-    \indexdef{}{command}{declare}\hypertarget{command.declare}{\hyperlink{command.declare}{\mbox{\isa{\isacommand{declare}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
-  \end{matharray}
-
   \begin{railoutput}
 \rail@begin{5}{}
 \rail@bar
@@ -807,13 +959,7 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-Locale expressions may be instantiated, and the instantiated facts
-  added to the current context.  This requires a proof of the
-  instantiated specification and is called \emph{locale
-  interpretation}.  Interpretation is possible in locales \hyperlink{command.sublocale}{\mbox{\isa{\isacommand{sublocale}}}}, theories (command \hyperlink{command.interpretation}{\mbox{\isa{\isacommand{interpretation}}}}) and
-  also within a proof body (command \hyperlink{command.interpret}{\mbox{\isa{\isacommand{interpret}}}}).
-
-  \begin{matharray}{rcl}
+\begin{matharray}{rcl}
     \indexdef{}{command}{interpretation}\hypertarget{command.interpretation}{\hyperlink{command.interpretation}{\mbox{\isa{\isacommand{interpretation}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ proof{\isaliteral{28}{\isacharparenleft}}prove{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}} \\
     \indexdef{}{command}{interpret}\hypertarget{command.interpret}{\hyperlink{command.interpret}{\mbox{\isa{\isacommand{interpret}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}proof{\isaliteral{28}{\isacharparenleft}}state{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{7C}{\isacharbar}}\ proof{\isaliteral{28}{\isacharparenleft}}chain{\isaliteral{29}{\isacharparenright}}\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ proof{\isaliteral{28}{\isacharparenleft}}prove{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}} \\
     \indexdef{}{command}{sublocale}\hypertarget{command.sublocale}{\hyperlink{command.sublocale}{\mbox{\isa{\isacommand{sublocale}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ proof{\isaliteral{28}{\isacharparenleft}}prove{\isaliteral{29}{\isacharparenright}}{\isaliteral{22}{\isachardoublequote}}} \\
@@ -821,6 +967,12 @@
     \indexdef{}{command}{print\_interps}\hypertarget{command.print-interps}{\hyperlink{command.print-interps}{\mbox{\isa{\isacommand{print{\isaliteral{5F}{\isacharunderscore}}interps}}}}}\isa{{\isaliteral{22}{\isachardoublequote}}\isaliteral{5C3C5E7375703E}{}\isactrlsup {\isaliteral{2A}{\isacharasterisk}}{\isaliteral{22}{\isachardoublequote}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}context\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}{\isaliteral{22}{\isachardoublequote}}} \\
   \end{matharray}
 
+  Locale expressions may be instantiated, and the instantiated facts
+  added to the current context.  This requires a proof of the
+  instantiated specification and is called \emph{locale
+  interpretation}.  Interpretation is possible in locales \hyperlink{command.sublocale}{\mbox{\isa{\isacommand{sublocale}}}}, theories (command \hyperlink{command.interpretation}{\mbox{\isa{\isacommand{interpretation}}}}) and
+  also within a proof body (command \hyperlink{command.interpret}{\mbox{\isa{\isacommand{interpret}}}}).
+
   \begin{railoutput}
 \rail@begin{2}{}
 \rail@term{\hyperlink{command.interpretation}{\mbox{\isa{\isacommand{interpretation}}}}}[]
@@ -984,16 +1136,7 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-A class is a particular locale with \emph{exactly one} type variable
-  \isa{{\isaliteral{5C3C616C7068613E}{\isasymalpha}}}.  Beyond the underlying locale, a corresponding type class
-  is established which is interpreted logically as axiomatic type
-  class \cite{Wenzel:1997:TPHOL} whose logical content are the
-  assumptions of the locale.  Thus, classes provide the full
-  generality of locales combined with the commodity of type classes
-  (notably type-inference).  See \cite{isabelle-classes} for a short
-  tutorial.
-
-  \begin{matharray}{rcl}
+\begin{matharray}{rcl}
     \indexdef{}{command}{class}\hypertarget{command.class}{\hyperlink{command.class}{\mbox{\isa{\isacommand{class}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
     \indexdef{}{command}{instantiation}\hypertarget{command.instantiation}{\hyperlink{command.instantiation}{\mbox{\isa{\isacommand{instantiation}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
     \indexdef{}{command}{instance}\hypertarget{command.instance}{\hyperlink{command.instance}{\mbox{\isa{\isacommand{instance}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}local{\isaliteral{5F}{\isacharunderscore}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
@@ -1004,6 +1147,15 @@
     \indexdef{}{method}{intro\_classes}\hypertarget{method.intro-classes}{\hyperlink{method.intro-classes}{\mbox{\isa{intro{\isaliteral{5F}{\isacharunderscore}}classes}}}} & : & \isa{method} \\
   \end{matharray}
 
+  A class is a particular locale with \emph{exactly one} type variable
+  \isa{{\isaliteral{5C3C616C7068613E}{\isasymalpha}}}.  Beyond the underlying locale, a corresponding type class
+  is established which is interpreted logically as axiomatic type
+  class \cite{Wenzel:1997:TPHOL} whose logical content are the
+  assumptions of the locale.  Thus, classes provide the full
+  generality of locales combined with the commodity of type classes
+  (notably type-inference).  See \cite{isabelle-classes} for a short
+  tutorial.
+
   \begin{railoutput}
 \rail@begin{2}{}
 \rail@term{\hyperlink{command.class}{\mbox{\isa{\isacommand{class}}}}}[]
@@ -1214,7 +1366,11 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-Isabelle/Pure's definitional schemes support certain forms of
+\begin{matharray}{rcl}
+    \indexdef{}{command}{overloading}\hypertarget{command.overloading}{\hyperlink{command.overloading}{\mbox{\isa{\isacommand{overloading}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
+  \end{matharray}
+
+  Isabelle/Pure's definitional schemes support certain forms of
   overloading (see \secref{sec:consts}).  Overloading means that a
   constant being declared as \isa{{\isaliteral{22}{\isachardoublequote}}c\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C616C7068613E}{\isasymalpha}}\ decl{\isaliteral{22}{\isachardoublequote}}} may be
   defined separately on type instances
@@ -1226,10 +1382,6 @@
   The \hyperlink{command.overloading}{\mbox{\isa{\isacommand{overloading}}}} target provides a convenient view for
   end-users.
 
-  \begin{matharray}{rcl}
-    \indexdef{}{command}{overloading}\hypertarget{command.overloading}{\hyperlink{command.overloading}{\mbox{\isa{\isacommand{overloading}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ local{\isaliteral{5F}{\isacharunderscore}}theory{\isaliteral{22}{\isachardoublequote}}} \\
-  \end{matharray}
-
   \begin{railoutput}
 \rail@begin{2}{}
 \rail@term{\hyperlink{command.overloading}{\mbox{\isa{\isacommand{overloading}}}}}[]
@@ -1553,7 +1705,12 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-Definitions essentially express abbreviations within the logic.  The
+\begin{matharray}{rcl}
+    \indexdef{}{command}{consts}\hypertarget{command.consts}{\hyperlink{command.consts}{\mbox{\isa{\isacommand{consts}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} \\
+    \indexdef{}{command}{defs}\hypertarget{command.defs}{\hyperlink{command.defs}{\mbox{\isa{\isacommand{defs}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} \\
+  \end{matharray}
+
+  Definitions essentially express abbreviations within the logic.  The
   simplest form of a definition is \isa{{\isaliteral{22}{\isachardoublequote}}c\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C7369676D613E}{\isasymsigma}}\ {\isaliteral{5C3C65717569763E}{\isasymequiv}}\ t{\isaliteral{22}{\isachardoublequote}}}, where \isa{c} is a newly declared constant.  Isabelle also allows derived forms
   where the arguments of \isa{c} appear on the left, abbreviating a
   prefix of \isa{{\isaliteral{5C3C6C616D6264613E}{\isasymlambda}}}-abstractions, e.g.\ \isa{{\isaliteral{22}{\isachardoublequote}}c\ {\isaliteral{5C3C65717569763E}{\isasymequiv}}\ {\isaliteral{5C3C6C616D6264613E}{\isasymlambda}}x\ y{\isaliteral{2E}{\isachardot}}\ t{\isaliteral{22}{\isachardoublequote}}} may be
@@ -1588,11 +1745,6 @@
   corresponding occurrences on some right-hand side need to be an
   instance of this, general \isa{{\isaliteral{22}{\isachardoublequote}}d\ {\isaliteral{3A}{\isacharcolon}}{\isaliteral{3A}{\isacharcolon}}\ {\isaliteral{5C3C616C7068613E}{\isasymalpha}}\ {\isaliteral{5C3C74696D65733E}{\isasymtimes}}\ {\isaliteral{5C3C626574613E}{\isasymbeta}}{\isaliteral{22}{\isachardoublequote}}} will be disallowed.
 
-  \begin{matharray}{rcl}
-    \indexdef{}{command}{consts}\hypertarget{command.consts}{\hyperlink{command.consts}{\mbox{\isa{\isacommand{consts}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} \\
-    \indexdef{}{command}{defs}\hypertarget{command.defs}{\hyperlink{command.defs}{\mbox{\isa{\isacommand{defs}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} \\
-  \end{matharray}
-
   \begin{railoutput}
 \rail@begin{3}{}
 \rail@term{\hyperlink{command.consts}{\mbox{\isa{\isacommand{consts}}}}}[]
@@ -1738,10 +1890,14 @@
 \isamarkuptrue%
 %
 \begin{isamarkuptext}%
-Oracles allow Isabelle to take advantage of external reasoners
-  such as arithmetic decision procedures, model checkers, fast
-  tautology checkers or computer algebra systems.  Invoked as an
-  oracle, an external reasoner can create arbitrary Isabelle theorems.
+\begin{matharray}{rcll}
+    \indexdef{}{command}{oracle}\hypertarget{command.oracle}{\hyperlink{command.oracle}{\mbox{\isa{\isacommand{oracle}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} & (axiomatic!) \\
+  \end{matharray}
+
+  Oracles allow Isabelle to take advantage of external reasoners such
+  as arithmetic decision procedures, model checkers, fast tautology
+  checkers or computer algebra systems.  Invoked as an oracle, an
+  external reasoner can create arbitrary Isabelle theorems.
 
   It is the responsibility of the user to ensure that the external
   reasoner is as trustworthy as the application requires.  Another
@@ -1753,10 +1909,6 @@
   asserted, and records within the internal derivation object how
   presumed theorems depend on unproven suppositions.
 
-  \begin{matharray}{rcll}
-    \indexdef{}{command}{oracle}\hypertarget{command.oracle}{\hyperlink{command.oracle}{\mbox{\isa{\isacommand{oracle}}}}} & : & \isa{{\isaliteral{22}{\isachardoublequote}}theory\ {\isaliteral{5C3C72696768746172726F773E}{\isasymrightarrow}}\ theory{\isaliteral{22}{\isachardoublequote}}} & (axiomatic!) \\
-  \end{matharray}
-
   \begin{railoutput}
 \rail@begin{1}{}
 \rail@term{\hyperlink{command.oracle}{\mbox{\isa{\isacommand{oracle}}}}}[]
--- a/etc/isar-keywords.el	Wed Apr 04 09:59:49 2012 +0200
+++ b/etc/isar-keywords.el	Mon Apr 16 19:01:57 2012 +0200
@@ -129,6 +129,7 @@
     "lemma"
     "lemmas"
     "let"
+    "lift_definition"
     "linear_undo"
     "local_setup"
     "locale"
@@ -570,6 +571,7 @@
     "instance"
     "interpretation"
     "lemma"
+    "lift_definition"
     "nominal_inductive"
     "nominal_inductive2"
     "nominal_primrec"
--- a/lib/Tools/java	Wed Apr 04 09:59:49 2012 +0200
+++ b/lib/Tools/java	Mon Apr 16 19:01:57 2012 +0200
@@ -12,6 +12,6 @@
   SERVER=""
 fi
 
-exec "$ISABELLE_JDK_HOME/bin/java" -Dfile.encoding=UTF-8 $SERVER \
+isabelle_jdk java -Dfile.encoding=UTF-8 $SERVER \
   "-Djava.ext.dirs=$("$ISABELLE_HOME/src/Tools/JVM/java_ext_dirs")" "$@"
 
--- a/lib/Tools/scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/lib/Tools/scala	Mon Apr 16 19:01:57 2012 +0200
@@ -6,6 +6,8 @@
 
 [ -e "$ISABELLE_HOME/Admin/build" ] && { "$ISABELLE_HOME/Admin/build" jars || exit $?; }
 
+export JAVA_HOME="$ISABELLE_JDK_HOME"
+
 CLASSPATH="$(jvmpath "$CLASSPATH")"
 isabelle_scala scala -Dfile.encoding=UTF-8 \
   "-Djava.ext.dirs=$("$ISABELLE_HOME/src/Tools/JVM/java_ext_dirs")" "$@"
--- a/lib/Tools/scalac	Wed Apr 04 09:59:49 2012 +0200
+++ b/lib/Tools/scalac	Mon Apr 16 19:01:57 2012 +0200
@@ -6,6 +6,8 @@
 
 [ -e "$ISABELLE_HOME/Admin/build" ] && { "$ISABELLE_HOME/Admin/build" jars || exit $?; }
 
+export JAVA_HOME="$ISABELLE_JDK_HOME"
+
 CLASSPATH="$(jvmpath "$CLASSPATH")"
 isabelle_scala scalac -Dfile.encoding=UTF-8 \
   "-Djava.ext.dirs=$("$ISABELLE_HOME/src/Tools/JVM/java_ext_dirs")" "$@"
--- a/lib/scripts/getsettings	Wed Apr 04 09:59:49 2012 +0200
+++ b/lib/scripts/getsettings	Mon Apr 16 19:01:57 2012 +0200
@@ -11,6 +11,21 @@
 
 ISABELLE_SETTINGS_PRESENT=true
 
+#JVM path wrapper
+if [ "$OSTYPE" = cygwin ]
+then
+  ISABELLE_HOME_WINDOWS="$(cygpath -d "$(dirname "$ISABELLE_HOME")")\\$(basename "$ISABELLE_HOME")"
+  ISABELLE_HOME="$(cygpath -u "$ISABELLE_HOME_WINDOWS")"
+
+  CLASSPATH="$(cygpath -i -u -p "$CLASSPATH")"
+  function jvmpath() { cygpath -i -C UTF8 -w -p "$@"; }
+  THIS_CYGWIN="$(jvmpath "/")"
+else
+  function jvmpath() { echo "$@"; }
+  CLASSPATH="$CLASSPATH"
+fi
+HOME_JVM="$HOME"
+
 export ISABELLE_HOME
 if { echo -n "$ISABELLE_HOME" | fgrep " " >/dev/null; }
 then
@@ -53,17 +68,6 @@
   echo "$RESULT"
 }
 
-#JVM path wrapper
-if [ "$OSTYPE" = cygwin ]; then
-  CLASSPATH="$(cygpath -i -u -p "$CLASSPATH")"
-  function jvmpath() { cygpath -i -C UTF8 -w -p "$@"; }
-  THIS_CYGWIN="$(jvmpath "/")"
-else
-  function jvmpath() { echo "$@"; }
-  CLASSPATH="$CLASSPATH"
-fi
-HOME_JVM="$HOME"
-
 #shared library convenience
 function librarypath () {
   for X in "$@"
@@ -91,18 +95,27 @@
 
 #robust invocation via ISABELLE_JDK_HOME
 function isabelle_jdk () {
-  [ -z "$ISABELLE_JDK_HOME" ] && \
-    { echo "Unknown ISABELLE_JDK_HOME -- Java tools unavailable" >&2; return 2; }
-  local PRG="$1"; shift
-  "$ISABELLE_JDK_HOME/bin/$PRG" "$@"
+  if [ -z "$ISABELLE_JDK_HOME" ]; then
+    echo "Unknown ISABELLE_JDK_HOME -- Java tools unavailable" >&2
+    return 127
+  else
+    local PRG="$1"; shift
+    "$ISABELLE_JDK_HOME/bin/$PRG" "$@"
+  fi
 }
 
 #robust invocation via SCALA_HOME
 function isabelle_scala () {
-  [ -z "$SCALA_HOME" ] && \
-    { echo "Unknown SCALA_HOME -- Scala unavailable" >&2; return 2; }
-  local PRG="$1"; shift
-  "$SCALA_HOME/bin/$PRG" "$@"
+  if [ -z "$ISABELLE_JDK_HOME" ]; then
+    echo "Unknown ISABELLE_JDK_HOME -- Java tools unavailable" >&2
+    return 127
+  elif [ -z "$SCALA_HOME" ]; then
+    echo "Unknown SCALA_HOME -- Scala unavailable" >&2
+    return 127
+  else
+    local PRG="$1"; shift
+    "$SCALA_HOME/bin/$PRG" "$@"
+  fi
 }
 
 #CLASSPATH convenience
--- a/src/CCL/Wfd.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/CCL/Wfd.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -493,26 +493,19 @@
 subsection {* Evaluation *}
 
 ML {*
-
-local
-  structure Data = Named_Thms(val name = @{binding eval} val description = "evaluation rules");
-in
+structure Eval_Rules =
+  Named_Thms(val name = @{binding eval} val description = "evaluation rules");
 
 fun eval_tac ths =
   Subgoal.FOCUS_PREMS (fn {context, prems, ...} =>
-    DEPTH_SOLVE_1 (resolve_tac (ths @ prems @ Data.get context) 1));
+    DEPTH_SOLVE_1 (resolve_tac (ths @ prems @ Eval_Rules.get context) 1));
+*}
+setup Eval_Rules.setup
 
-val eval_setup =
-  Data.setup #>
-  Method.setup @{binding eval}
-    (Attrib.thms >> (fn ths => fn ctxt => SIMPLE_METHOD' (CHANGED o eval_tac ths ctxt)))
-    "evaluation";
-
-end;
-
+method_setup eval = {*
+  Attrib.thms >> (fn ths => fn ctxt => SIMPLE_METHOD' (CHANGED o eval_tac ths ctxt))
 *}
 
-setup eval_setup
 
 lemmas eval_rls [eval] = trueV falseV pairV lamV caseVtrue caseVfalse caseVpair caseVlam
 
--- a/src/HOL/Algebra/Ideal.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Algebra/Ideal.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -60,7 +60,7 @@
 lemma principalidealI:
   fixes R (structure)
   assumes "ideal I R"
-  assumes generate: "\<exists>i \<in> carrier R. I = Idl {i}"
+    and generate: "\<exists>i \<in> carrier R. I = Idl {i}"
   shows "principalideal I R"
 proof -
   interpret ideal I R by fact
@@ -82,7 +82,7 @@
 lemma maximalidealI:
   fixes R
   assumes "ideal I R"
-  assumes I_notcarr: "carrier R \<noteq> I"
+    and I_notcarr: "carrier R \<noteq> I"
     and I_maximal: "\<And>J. \<lbrakk>ideal J R; I \<subseteq> J; J \<subseteq> carrier R\<rbrakk> \<Longrightarrow> J = I \<or> J = carrier R"
   shows "maximalideal I R"
 proof -
@@ -105,8 +105,8 @@
 lemma primeidealI:
   fixes R (structure)
   assumes "ideal I R"
-  assumes "cring R"
-  assumes I_notcarr: "carrier R \<noteq> I"
+    and "cring R"
+    and I_notcarr: "carrier R \<noteq> I"
     and I_prime: "\<And>a b. \<lbrakk>a \<in> carrier R; b \<in> carrier R; a \<otimes> b \<in> I\<rbrakk> \<Longrightarrow> a \<in> I \<or> b \<in> I"
   shows "primeideal I R"
 proof -
@@ -120,8 +120,8 @@
 lemma primeidealI2:
   fixes R (structure)
   assumes "additive_subgroup I R"
-  assumes "cring R"
-  assumes I_l_closed: "\<And>a x. \<lbrakk>a \<in> I; x \<in> carrier R\<rbrakk> \<Longrightarrow> x \<otimes> a \<in> I"
+    and "cring R"
+    and I_l_closed: "\<And>a x. \<lbrakk>a \<in> I; x \<in> carrier R\<rbrakk> \<Longrightarrow> x \<otimes> a \<in> I"
     and I_r_closed: "\<And>a x. \<lbrakk>a \<in> I; x \<in> carrier R\<rbrakk> \<Longrightarrow> a \<otimes> x \<in> I"
     and I_notcarr: "carrier R \<noteq> I"
     and I_prime: "\<And>a b. \<lbrakk>a \<in> carrier R; b \<in> carrier R; a \<otimes> b \<in> I\<rbrakk> \<Longrightarrow> a \<in> I \<or> b \<in> I"
@@ -378,8 +378,8 @@
   with a show "H \<subseteq> I" by simp
 next
   fix x
-  assume HI: "H \<subseteq> I"
-  from Iideal and HI have "I \<in> {I. ideal I R \<and> H \<subseteq> I}" by fast
+  assume "H \<subseteq> I"
+  with Iideal have "I \<in> {I. ideal I R \<and> H \<subseteq> I}" by fast
   then show "Idl H \<subseteq> I" unfolding genideal_def by fast
 qed
 
--- a/src/HOL/Algebra/Ring.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Algebra/Ring.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -423,13 +423,10 @@
 proof (rule, rule)
   fix x
   assume xcarr: "x \<in> carrier R"
-  from xcarr
-      have "x = x \<otimes> \<one>" by simp
-  from this and onezero
-      have "x = x \<otimes> \<zero>" by simp
-  from this and xcarr
-      have "x = \<zero>" by simp
-  thus "x \<in> {\<zero>}" by fast
+  from xcarr have "x = x \<otimes> \<one>" by simp
+  with onezero have "x = x \<otimes> \<zero>" by simp
+  with xcarr have "x = \<zero>" by simp
+  then show "x \<in> {\<zero>}" by fast
 qed fast
 
 lemma one_zeroI:
@@ -550,11 +547,9 @@
   assumes field_Units: "Units R = carrier R - {\<zero>}"
   shows "field R"
 proof
-  from field_Units
-  have a: "\<zero> \<notin> Units R" by fast
-  have "\<one> \<in> Units R" by fast
-  from this and a
-  show "\<one> \<noteq> \<zero>" by force
+  from field_Units have "\<zero> \<notin> Units R" by fast
+  moreover have "\<one> \<in> Units R" by fast
+  ultimately show "\<one> \<noteq> \<zero>" by force
 next
   fix a b
   assume acarr: "a \<in> carrier R"
@@ -563,21 +558,15 @@
   show "a = \<zero> \<or> b = \<zero>"
   proof (cases "a = \<zero>", simp)
     assume "a \<noteq> \<zero>"
-    from this and field_Units and acarr
-    have aUnit: "a \<in> Units R" by fast
-    from bcarr
-    have "b = \<one> \<otimes> b" by algebra
-    also from aUnit acarr
-    have "... = (inv a \<otimes> a) \<otimes> b" by simp
+    with field_Units and acarr have aUnit: "a \<in> Units R" by fast
+    from bcarr have "b = \<one> \<otimes> b" by algebra
+    also from aUnit acarr have "... = (inv a \<otimes> a) \<otimes> b" by simp
     also from acarr bcarr aUnit[THEN Units_inv_closed]
     have "... = (inv a) \<otimes> (a \<otimes> b)" by algebra
-    also from ab and acarr bcarr aUnit
-    have "... = (inv a) \<otimes> \<zero>" by simp
-    also from aUnit[THEN Units_inv_closed]
-    have "... = \<zero>" by algebra
-    finally
-    have "b = \<zero>" .
-    thus "a = \<zero> \<or> b = \<zero>" by simp
+    also from ab and acarr bcarr aUnit have "... = (inv a) \<otimes> \<zero>" by simp
+    also from aUnit[THEN Units_inv_closed] have "... = \<zero>" by algebra
+    finally have "b = \<zero>" .
+    then show "a = \<zero> \<or> b = \<zero>" by simp
   qed
 qed (rule field_Units)
 
@@ -593,16 +582,10 @@
   fix x
   assume xcarr: "x \<in> carrier R"
     and "x \<noteq> \<zero>"
-  from this
-  have "\<exists>y\<in>carrier R. x \<otimes> y = \<one>" by (rule invex)
-  from this
-  obtain y
-    where ycarr: "y \<in> carrier R"
-    and xy: "x \<otimes> y = \<one>"
-    by fast
+  then have "\<exists>y\<in>carrier R. x \<otimes> y = \<one>" by (rule invex)
+  then obtain y where ycarr: "y \<in> carrier R" and xy: "x \<otimes> y = \<one>" by fast
   from xy xcarr ycarr have "y \<otimes> x = \<one>" by (simp add: m_comm)
-  from ycarr and this and xy
-  show "\<exists>y\<in>carrier R. y \<otimes> x = \<one> \<and> x \<otimes> y = \<one>" by fast
+  with ycarr and xy show "\<exists>y\<in>carrier R. y \<otimes> x = \<one> \<and> x \<otimes> y = \<one>" by fast
 qed
 
 
--- a/src/HOL/Auth/Smartcard/ShoupRubin.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Auth/Smartcard/ShoupRubin.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -762,7 +762,7 @@
 
 method_setup prepare = {*
     Scan.succeed (K (SIMPLE_METHOD ShoupRubin.prepare_tac)) *}
-  "to launch a few simple facts that'll help the simplifier"
+  "to launch a few simple facts that will help the simplifier"
 
 method_setup parts_prepare = {*
     Scan.succeed (fn ctxt => SIMPLE_METHOD (ShoupRubin.parts_prepare_tac ctxt)) *}
--- a/src/HOL/Auth/Smartcard/ShoupRubinBella.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Auth/Smartcard/ShoupRubinBella.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -771,7 +771,7 @@
 
 method_setup prepare = {*
     Scan.succeed (fn ctxt => SIMPLE_METHOD (ShoupRubinBella.prepare_tac ctxt)) *}
-  "to launch a few simple facts that'll help the simplifier"
+  "to launch a few simple facts that will help the simplifier"
 
 method_setup parts_prepare = {*
     Scan.succeed (fn ctxt => SIMPLE_METHOD (ShoupRubinBella.parts_prepare_tac ctxt)) *}
--- a/src/HOL/Boogie/Tools/boogie_tactics.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Boogie/Tools/boogie_tactics.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -45,12 +45,12 @@
     in tac ctxt (thms @ facts) end))
 
 val setup_boogie = Method.setup @{binding boogie} (boogie_method false)
-  ("Applies an SMT solver to the current goal " ^
-   "using the current set of Boogie background axioms")
+  "apply an SMT solver to the current goal \
+  \using the current set of Boogie background axioms"
 
 val setup_boogie_all = Method.setup @{binding boogie_all} (boogie_method true)
-  ("Applies an SMT solver to all goals " ^
-   "using the current set of Boogie background axioms")
+  "apply an SMT solver to all goals \
+  \using the current set of Boogie background axioms"
 
 
 local
@@ -96,7 +96,7 @@
 in
 val setup_boogie_cases = Method.setup @{binding boogie_cases}
   (Scan.succeed boogie_cases)
-  "Prepares a set of Boogie assertions for case-based proofs"
+  "prepare a set of Boogie assertions for case-based proofs"
 end
 
 
--- a/src/HOL/Datatype.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Datatype.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -14,12 +14,6 @@
   ("Tools/Datatype/datatype_realizer.ML")
 begin
 
-subsection {* Prelude: lifting over function space *}
-
-enriched_type map_fun: map_fun
-  by (simp_all add: fun_eq_iff)
-
-
 subsection {* The datatype universe *}
 
 definition "Node = {p. EX f x k. p = (f :: nat => 'b + nat, x ::'a + nat) & f k = Inr 0}"
@@ -532,3 +526,4 @@
 setup Datatype_Realizer.setup
 
 end
+
--- a/src/HOL/Decision_Procs/Commutative_Ring.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/Commutative_Ring.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -314,6 +314,9 @@
 
 
 use "commutative_ring_tac.ML"
-setup Commutative_Ring_Tac.setup
+
+method_setup comm_ring = {*
+  Scan.succeed (SIMPLE_METHOD' o Commutative_Ring_Tac.tac)
+*} "reflective decision procedure for equalities over commutative rings"
 
 end
--- a/src/HOL/Decision_Procs/Cooper.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/Cooper.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -2005,7 +2005,12 @@
 *}
 
 use "cooper_tac.ML"
-setup "Cooper_Tac.setup"
+
+method_setup cooper = {*
+  Args.mode "no_quantify" >>
+    (fn q => fn ctxt => SIMPLE_METHOD' (Cooper_Tac.linz_tac ctxt (not q)))
+*} "decision procedure for linear integer arithmetic"
+
 
 text {* Tests *}
 
--- a/src/HOL/Decision_Procs/Ferrack.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/Ferrack.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -2004,7 +2004,12 @@
 *}
 
 use "ferrack_tac.ML"
-setup Ferrack_Tac.setup
+
+method_setup rferrack = {*
+  Args.mode "no_quantify" >>
+    (fn q => fn ctxt => SIMPLE_METHOD' (Ferrack_Tac.linr_tac ctxt (not q)))
+*} "decision procedure for linear real arithmetic"
+
 
 lemma
   fixes x :: real
--- a/src/HOL/Decision_Procs/MIR.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/MIR.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -5635,7 +5635,12 @@
 *}
 
 use "mir_tac.ML"
-setup "Mir_Tac.setup"
+
+method_setup mir = {*
+  Args.mode "no_quantify" >>
+    (fn q => fn ctxt => SIMPLE_METHOD' (Mir_Tac.mir_tac ctxt (not q)))
+*} "decision procedure for MIR arithmetic"
+
 
 lemma "ALL (x::real). (\<lfloor>x\<rfloor> = \<lceil>x\<rceil> = (x = real \<lfloor>x\<rfloor>))"
   by mir
--- a/src/HOL/Decision_Procs/commutative_ring_tac.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/commutative_ring_tac.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -7,7 +7,6 @@
 signature COMMUTATIVE_RING_TAC =
 sig
   val tac: Proof.context -> int -> tactic
-  val setup: theory -> theory
 end
 
 structure Commutative_Ring_Tac: COMMUTATIVE_RING_TAC =
@@ -98,8 +97,4 @@
     THEN (simp_tac cring_ss i)
   end);
 
-val setup =
-  Method.setup @{binding comm_ring} (Scan.succeed (SIMPLE_METHOD' o tac))
-    "reflective decision procedure for equalities over commutative rings";
-
 end;
--- a/src/HOL/Decision_Procs/cooper_tac.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/cooper_tac.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -6,7 +6,6 @@
 sig
   val trace: bool Unsynchronized.ref
   val linz_tac: Proof.context -> bool -> int -> tactic
-  val setup: theory -> theory
 end
 
 structure Cooper_Tac: COOPER_TAC =
@@ -115,15 +114,4 @@
       | _ => (pre_thm, assm_tac i)
   in rtac (((mp_step nh) o (spec_step np)) th) i THEN tac end);
 
-val setup =
-  Method.setup @{binding cooper}
-    let
-      val parse_flag = Args.$$$ "no_quantify" >> K (K false)
-    in
-      Scan.lift (Scan.optional (Args.$$$ "(" |-- Scan.repeat1 parse_flag --| Args.$$$ ")") [] >>
-        curry (Library.foldl op |>) true) >>
-      (fn q => fn ctxt => SIMPLE_METHOD' (linz_tac ctxt q))
-    end
-    "decision procedure for linear integer arithmetic";
-
 end
--- a/src/HOL/Decision_Procs/ferrack_tac.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/ferrack_tac.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -6,7 +6,6 @@
 sig
   val trace: bool Unsynchronized.ref
   val linr_tac: Proof.context -> bool -> int -> tactic
-  val setup: theory -> theory
 end
 
 structure Ferrack_Tac =
@@ -96,10 +95,4 @@
       | _ => (pre_thm, assm_tac i)
   in rtac ((mp_step nh o spec_step np) th) i THEN tac end);
 
-val setup =
-  Method.setup @{binding rferrack}
-    (Args.mode "no_quantify" >> (fn q => fn ctxt =>
-      SIMPLE_METHOD' (linr_tac ctxt (not q))))
-    "decision procedure for linear real arithmetic";
-
 end
--- a/src/HOL/Decision_Procs/mir_tac.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Decision_Procs/mir_tac.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -6,7 +6,6 @@
 sig
   val trace: bool Unsynchronized.ref
   val mir_tac: Proof.context -> bool -> int -> tactic
-  val setup: theory -> theory
 end
 
 structure Mir_Tac =
@@ -143,15 +142,4 @@
       | _ => (pre_thm, assm_tac i)
   in rtac (((mp_step nh) o (spec_step np)) th) i THEN tac end);
 
-val setup =
-  Method.setup @{binding mir}
-    let
-      val parse_flag = Args.$$$ "no_quantify" >> K (K false)
-    in
-      Scan.lift (Scan.optional (Args.$$$ "(" |-- Scan.repeat1 parse_flag --| Args.$$$ ")") [] >>
-        curry (Library.foldl op |>) true) >>
-      (fn q => fn ctxt => SIMPLE_METHOD' (mir_tac ctxt q))
-    end
-    "decision procedure for MIR arithmetic";
-
 end
--- a/src/HOL/Fun.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Fun.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -803,4 +803,11 @@
 
 use "Tools/enriched_type.ML"
 
+enriched_type map_fun: map_fun
+  by (simp_all add: fun_eq_iff)
+
+enriched_type vimage
+  by (simp_all add: fun_eq_iff vimage_compose)
+
 end
+
--- a/src/HOL/FunDef.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/FunDef.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -110,14 +110,21 @@
 use "Tools/Function/relation.ML"
 use "Tools/Function/function.ML"
 use "Tools/Function/pat_completeness.ML"
+
+method_setup pat_completeness = {*
+  Scan.succeed (SIMPLE_METHOD' o Pat_Completeness.pat_completeness_tac)
+*} "prove completeness of datatype patterns"
+
 use "Tools/Function/fun.ML"
 use "Tools/Function/induction_schema.ML"
 
+method_setup induction_schema = {*
+  Scan.succeed (RAW_METHOD o Induction_Schema.induction_schema_tac)
+*} "prove an induction principle"
+
 setup {* 
   Function.setup
-  #> Pat_Completeness.setup
   #> Function_Fun.setup
-  #> Induction_Schema.setup
 *}
 
 subsection {* Measure Functions *}
@@ -137,6 +144,12 @@
 by (rule is_measure_trivial)
 
 use "Tools/Function/lexicographic_order.ML"
+
+method_setup lexicographic_order = {*
+  Method.sections clasimp_modifiers >>
+  (K (SIMPLE_METHOD o Lexicographic_Order.lexicographic_order_tac false))
+*} "termination prover for lexicographic orderings"
+
 setup Lexicographic_Order.setup 
 
 
--- a/src/HOL/Groebner_Basis.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Groebner_Basis.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -43,8 +43,20 @@
 
 use "Tools/groebner.ML"
 
-method_setup algebra = Groebner.algebra_method
-  "solve polynomial equations over (semi)rings and ideal membership problems using Groebner bases"
+method_setup algebra = {*
+  let
+    fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ()
+    val addN = "add"
+    val delN = "del"
+    val any_keyword = keyword addN || keyword delN
+    val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
+  in
+    Scan.optional (keyword addN |-- thms) [] --
+     Scan.optional (keyword delN |-- thms) [] >>
+    (fn (add_ths, del_ths) => fn ctxt =>
+      SIMPLE_METHOD' (Groebner.algebra_tac add_ths del_ths ctxt))
+  end
+*} "solve polynomial equations over (semi)rings and ideal membership problems using Groebner bases"
 
 declare dvd_def[algebra]
 declare dvd_eq_mod_eq_0[symmetric, algebra]
--- a/src/HOL/HOLCF/Fixrec.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/HOLCF/Fixrec.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -230,7 +230,9 @@
 use "Tools/holcf_library.ML"
 use "Tools/fixrec.ML"
 
-setup {* Fixrec.setup *}
+method_setup fixrec_simp = {*
+  Scan.succeed (SIMPLE_METHOD' o Fixrec.fixrec_simp_tac)
+*} "pattern prover for fixrec constants"
 
 setup {*
   Fixrec.add_matchers
--- a/src/HOL/HOLCF/Tools/fixrec.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/HOLCF/Tools/fixrec.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -12,7 +12,6 @@
     -> (bool * (Attrib.binding * string)) list -> local_theory -> local_theory
   val add_matchers: (string * string) list -> theory -> theory
   val fixrec_simp_tac: Proof.context -> int -> tactic
-  val setup: theory -> theory
 end
 
 structure Fixrec : FIXREC =
@@ -406,9 +405,4 @@
     (Parse.fixes -- (Parse.where_ |-- Parse.!!! alt_specs')
       >> (fn (fixes, specs) => add_fixrec_cmd fixes specs))
 
-val setup =
-  Method.setup @{binding fixrec_simp}
-    (Scan.succeed (SIMPLE_METHOD' o fixrec_simp_tac))
-    "pattern prover for fixrec constants"
-
 end
--- a/src/HOL/Hahn_Banach/Hahn_Banach.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Hahn_Banach/Hahn_Banach.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -151,12 +151,12 @@
         qed
       qed
 
-      def H' \<equiv> "H \<oplus> lin x'"
+      def H' \<equiv> "H + lin x'"
         -- {* Define @{text H'} as the direct sum of @{text H} and the linear closure of @{text x'}. \skp *}
       have HH': "H \<unlhd> H'"
       proof (unfold H'_def)
         from x'E have "vectorspace (lin x')" ..
-        with H show "H \<unlhd> H \<oplus> lin x'" ..
+        with H show "H \<unlhd> H + lin x'" ..
       qed
 
       obtain xi where
--- a/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Hahn_Banach/Hahn_Banach_Ext_Lemmas.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -90,7 +90,7 @@
 lemma h'_lf:
   assumes h'_def: "h' \<equiv> \<lambda>x. let (y, a) =
       SOME (y, a). x = y + a \<cdot> x0 \<and> y \<in> H in h y + a * xi"
-    and H'_def: "H' \<equiv> H \<oplus> lin x0"
+    and H'_def: "H' \<equiv> H + lin x0"
     and HE: "H \<unlhd> E"
   assumes "linearform H h"
   assumes x0: "x0 \<notin> H"  "x0 \<in> E"  "x0 \<noteq> 0"
@@ -106,7 +106,7 @@
     proof (unfold H'_def)
       from `x0 \<in> E`
       have "lin x0 \<unlhd> E" ..
-      with HE show "vectorspace (H \<oplus> lin x0)" using E ..
+      with HE show "vectorspace (H + lin x0)" using E ..
     qed
     {
       fix x1 x2 assume x1: "x1 \<in> H'" and x2: "x2 \<in> H'"
@@ -194,7 +194,7 @@
 lemma h'_norm_pres:
   assumes h'_def: "h' \<equiv> \<lambda>x. let (y, a) =
       SOME (y, a). x = y + a \<cdot> x0 \<and> y \<in> H in h y + a * xi"
-    and H'_def: "H' \<equiv> H \<oplus> lin x0"
+    and H'_def: "H' \<equiv> H + lin x0"
     and x0: "x0 \<notin> H"  "x0 \<in> E"  "x0 \<noteq> 0"
   assumes E: "vectorspace E" and HE: "subspace H E"
     and "seminorm E p" and "linearform H h"
--- a/src/HOL/Hahn_Banach/Subspace.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Hahn_Banach/Subspace.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -228,38 +228,38 @@
   set of all sums of elements from @{text U} and @{text V}.
 *}
 
-lemma sum_def: "U \<oplus> V = {u + v | u v. u \<in> U \<and> v \<in> V}"
+lemma sum_def: "U + V = {u + v | u v. u \<in> U \<and> v \<in> V}"
   unfolding set_plus_def by auto
 
 lemma sumE [elim]:
-    "x \<in> U \<oplus> V \<Longrightarrow> (\<And>u v. x = u + v \<Longrightarrow> u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> C) \<Longrightarrow> C"
+    "x \<in> U + V \<Longrightarrow> (\<And>u v. x = u + v \<Longrightarrow> u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> C) \<Longrightarrow> C"
   unfolding sum_def by blast
 
 lemma sumI [intro]:
-    "u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> x = u + v \<Longrightarrow> x \<in> U \<oplus> V"
+    "u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> x = u + v \<Longrightarrow> x \<in> U + V"
   unfolding sum_def by blast
 
 lemma sumI' [intro]:
-    "u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> u + v \<in> U \<oplus> V"
+    "u \<in> U \<Longrightarrow> v \<in> V \<Longrightarrow> u + v \<in> U + V"
   unfolding sum_def by blast
 
-text {* @{text U} is a subspace of @{text "U \<oplus> V"}. *}
+text {* @{text U} is a subspace of @{text "U + V"}. *}
 
 lemma subspace_sum1 [iff]:
   assumes "vectorspace U" "vectorspace V"
-  shows "U \<unlhd> U \<oplus> V"
+  shows "U \<unlhd> U + V"
 proof -
   interpret vectorspace U by fact
   interpret vectorspace V by fact
   show ?thesis
   proof
     show "U \<noteq> {}" ..
-    show "U \<subseteq> U \<oplus> V"
+    show "U \<subseteq> U + V"
     proof
       fix x assume x: "x \<in> U"
       moreover have "0 \<in> V" ..
-      ultimately have "x + 0 \<in> U \<oplus> V" ..
-      with x show "x \<in> U \<oplus> V" by simp
+      ultimately have "x + 0 \<in> U + V" ..
+      with x show "x \<in> U + V" by simp
     qed
     fix x y assume x: "x \<in> U" and "y \<in> U"
     then show "x + y \<in> U" by simp
@@ -271,30 +271,30 @@
 
 lemma sum_subspace [intro?]:
   assumes "subspace U E" "vectorspace E" "subspace V E"
-  shows "U \<oplus> V \<unlhd> E"
+  shows "U + V \<unlhd> E"
 proof -
   interpret subspace U E by fact
   interpret vectorspace E by fact
   interpret subspace V E by fact
   show ?thesis
   proof
-    have "0 \<in> U \<oplus> V"
+    have "0 \<in> U + V"
     proof
       show "0 \<in> U" using `vectorspace E` ..
       show "0 \<in> V" using `vectorspace E` ..
       show "(0::'a) = 0 + 0" by simp
     qed
-    then show "U \<oplus> V \<noteq> {}" by blast
-    show "U \<oplus> V \<subseteq> E"
+    then show "U + V \<noteq> {}" by blast
+    show "U + V \<subseteq> E"
     proof
-      fix x assume "x \<in> U \<oplus> V"
+      fix x assume "x \<in> U + V"
       then obtain u v where "x = u + v" and
         "u \<in> U" and "v \<in> V" ..
       then show "x \<in> E" by simp
     qed
   next
-    fix x y assume x: "x \<in> U \<oplus> V" and y: "y \<in> U \<oplus> V"
-    show "x + y \<in> U \<oplus> V"
+    fix x y assume x: "x \<in> U + V" and y: "y \<in> U + V"
+    show "x + y \<in> U + V"
     proof -
       from x obtain ux vx where "x = ux + vx" and "ux \<in> U" and "vx \<in> V" ..
       moreover
@@ -306,7 +306,7 @@
         using x y by (simp_all add: add_ac)
       then show ?thesis ..
     qed
-    fix a show "a \<cdot> x \<in> U \<oplus> V"
+    fix a show "a \<cdot> x \<in> U + V"
     proof -
       from x obtain u v where "x = u + v" and "u \<in> U" and "v \<in> V" ..
       then have "a \<cdot> u \<in> U" and "a \<cdot> v \<in> V"
@@ -319,7 +319,7 @@
 text{* The sum of two subspaces is a vectorspace. *}
 
 lemma sum_vs [intro?]:
-    "U \<unlhd> E \<Longrightarrow> V \<unlhd> E \<Longrightarrow> vectorspace E \<Longrightarrow> vectorspace (U \<oplus> V)"
+    "U \<unlhd> E \<Longrightarrow> V \<unlhd> E \<Longrightarrow> vectorspace E \<Longrightarrow> vectorspace (U + V)"
   by (rule subspace.vectorspace) (rule sum_subspace)
 
 
@@ -481,7 +481,7 @@
 proof -
   interpret vectorspace E by fact
   interpret subspace H E by fact
-  from x y x' have "x \<in> H \<oplus> lin x'" by auto
+  from x y x' have "x \<in> H + lin x'" by auto
   have "\<exists>!p. (\<lambda>(y, a). x = y + a \<cdot> x' \<and> y \<in> H) p" (is "\<exists>!p. ?P p")
   proof (rule ex_ex1I)
     from x y show "\<exists>p. ?P p" by blast
--- a/src/HOL/Imperative_HOL/ex/SatChecker.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Imperative_HOL/ex/SatChecker.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -675,21 +675,21 @@
 primrec tres_thm :: "(ClauseId, Clause) RBT_Impl.rbt \<Rightarrow> Lit \<times> ClauseId \<Rightarrow> Clause \<Rightarrow> Clause Heap"
 where
   "tres_thm t (l, j) cli =
-  (case (RBT_Impl.lookup t j) of 
+  (case (rbt_lookup t j) of 
      None \<Rightarrow> raise (''MiniSatChecked.res_thm: No resolvant clause in thms array for Conflict step.'')
    | Some clj \<Rightarrow> res_thm' l cli clj)"
 
 fun tdoProofStep :: " ProofStep \<Rightarrow> ((ClauseId, Clause) RBT_Impl.rbt * Clause list) \<Rightarrow> ((ClauseId, Clause) RBT_Impl.rbt * Clause list) Heap"
 where
   "tdoProofStep (Conflict saveTo (i, rs)) (t, rcl) =
-     (case (RBT_Impl.lookup t i) of
+     (case (rbt_lookup t i) of
        None \<Rightarrow> raise (''MiniSatChecked.doProofStep: No starting clause in thms array for Conflict step.'')
      | Some cli \<Rightarrow> do {
                       result \<leftarrow> foldM (tres_thm t) rs cli;
-                      return ((RBT_Impl.insert saveTo result t), rcl)
+                      return ((rbt_insert saveTo result t), rcl)
                    })"
-| "tdoProofStep (Delete cid) (t, rcl) = return ((RBT_Impl.delete cid t), rcl)"
-| "tdoProofStep (Root cid clause) (t, rcl) = return (RBT_Impl.insert cid (sort clause) t, (remdups(sort clause)) # rcl)"
+| "tdoProofStep (Delete cid) (t, rcl) = return ((rbt_delete cid t), rcl)"
+| "tdoProofStep (Root cid clause) (t, rcl) = return (rbt_insert cid (sort clause) t, (remdups(sort clause)) # rcl)"
 | "tdoProofStep (Xstep cid1 cid2) (t, rcl) = raise ''MiniSatChecked.doProofStep: Xstep constructor found.''"
 | "tdoProofStep (ProofDone b) (t, rcl) = raise ''MiniSatChecked.doProofStep: ProofDone constructor found.''"
 
@@ -698,7 +698,7 @@
   "tchecker n p i =
   do {
      rcs \<leftarrow> foldM (tdoProofStep) p (RBT_Impl.Empty, []);
-     (if (RBT_Impl.lookup (fst rcs) i) = Some [] then return (snd rcs) 
+     (if (rbt_lookup (fst rcs) i) = Some [] then return (snd rcs) 
                 else raise(''No empty clause''))
   }"
 
--- a/src/HOL/Import/HOL_Light_Maps.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Import/HOL_Light_Maps.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -93,9 +93,9 @@
   "snd = (\<lambda>p\<Colon>'A \<times> 'B. SOME y\<Colon>'B. \<exists>x\<Colon>'A. p = (x, y))"
   by auto
 
-(*lemma [import_const CURRY]:
+lemma CURRY_DEF[import_const CURRY]:
   "curry = (\<lambda>(f\<Colon>'A \<times> 'B \<Rightarrow> 'C) x y. f (x, y))"
-  using curry_def .*)
+  using curry_def .
 
 lemma [import_const ONE_ONE : Fun.inj]:
   "inj = (\<lambda>(f\<Colon>'A \<Rightarrow> 'B). \<forall>x1 x2. f x1 = f x2 \<longrightarrow> x1 = x2)"
@@ -200,16 +200,16 @@
   "\<forall>m n\<Colon>nat. if n = id 0 then m div n = id 0 \<and> m mod n = m else m = m div n * n + m mod n \<and> m mod n < n"
   by simp
 
-lemmas [import_type sum "_dest_sum" "_mk_sum"] = type_definition_sum
+lemmas [import_type sum "_dest_sum" "_mk_sum"] = type_definition_sum[where 'a="'A" and 'b="'B"]
 import_const_map INL : Sum_Type.Inl
 import_const_map INR : Sum_Type.Inr
 
 lemma sum_INDUCT:
-  "\<forall>P. (\<forall>a. P (Inl a)) \<and> (\<forall>a. P (Inr a)) \<longrightarrow> (\<forall>x. P x)"
+  "\<forall>P. (\<forall>a :: 'A. P (Inl a)) \<and> (\<forall>a :: 'B. P (Inr a)) \<longrightarrow> (\<forall>x. P x)"
   by (auto intro: sum.induct)
 
 lemma sum_RECURSION:
-  "\<forall>Inl' Inr'. \<exists>fn. (\<forall>a. fn (Inl a) = Inl' a) \<and> (\<forall>a. fn (Inr a) = Inr' a)"
+  "\<forall>Inl' Inr'. \<exists>fn. (\<forall>a :: 'A. fn (Inl a) = (Inl' a :: 'Z)) \<and> (\<forall>a :: 'B. fn (Inr a) = Inr' a)"
   by (intro allI, rule_tac x="sum_case Inl' Inr'" in exI) auto
 
 lemma OUTL[import_const "OUTL" : "Sum_Type.Projl"]:
@@ -249,7 +249,7 @@
   by simp
 
 lemma LENGTH[import_const LENGTH : List.length]:
-  "length [] = id 0 \<and> (\<forall>(h\<Colon>'A) t. length (h # t) = Suc (length t))"
+  "length ([] :: 'A list) = id 0 \<and> (\<forall>(h\<Colon>'A) t. length (h # t) = Suc (length t))"
   by simp
 
 lemma MAP[import_const MAP : List.map]:
@@ -307,7 +307,7 @@
   by simp
 
 lemma WF[import_const WF : Wellfounded.wfP]:
-  "wfP u \<longleftrightarrow> (\<forall>P. (\<exists>x :: 'A. P x) \<longrightarrow> (\<exists>x. P x \<and> (\<forall>y. u y x \<longrightarrow> \<not> P y)))"
+  "\<forall>u. wfP u \<longleftrightarrow> (\<forall>P. (\<exists>x :: 'A. P x) \<longrightarrow> (\<exists>x. P x \<and> (\<forall>y. u y x \<longrightarrow> \<not> P y)))"
 proof (intro allI iffI impI wfI_min[to_pred], elim exE wfE_min[to_pred])
   fix x :: "'a \<Rightarrow> 'a \<Rightarrow> bool" and xa :: "'a" and Q
   assume a: "xa \<in> Q"
--- a/src/HOL/Import/import_rule.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Import/import_rule.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -235,7 +235,7 @@
           [SOME rept, SOME abst, SOME cP, SOME (cterm_of thy' (Free ("a", aty))),
              SOME (cterm_of thy' (Free ("r", typ_of ctT)))]
           @{thm typedef_hol2hollight}
-    val th4 = meta_mp typedef_th (#type_definition (#2 typedef_info))
+    val th4 = typedef_th OF [#type_definition (#2 typedef_info)]
   in
     (th4, thy')
   end
@@ -339,6 +339,14 @@
     snd (Global_Theory.add_thm ((binding, thm'), []) thy)
   end
 
+fun log_timestamp () =
+  let
+    val time = Time.now ()
+    val millis = nth (space_explode "." (Time.fmt 3 time)) 1
+  in
+    Date.fmt "%d.%m.%Y %H:%M:%S." (Date.fromTimeLocal time) ^ millis
+  end
+
 fun process_line str tstate =
   let
     fun process tstate (#"R", [t]) = gettm t tstate |>> refl |-> setth
@@ -416,7 +424,7 @@
           gettm t1 tstate ||>> gettm t2 |>> (fn (t1, t2) => Thm.lambda t1 t2) |-> settm
       | process (thy, state) (#"+", [s]) =
           let
-            val _ = tracing ("Noting: " ^ s)
+            val _ = tracing ("NOTING " ^ log_timestamp () ^ ": " ^ s)
           in
             (store_thm (Binding.name (transl_dot s)) (last_thm state) thy, state)
           end
--- a/src/HOL/IsaMakefile	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/IsaMakefile	Mon Apr 16 19:01:57 2012 +0200
@@ -1027,7 +1027,7 @@
   ex/Quicksort.thy ex/ROOT.ML						\
   ex/Records.thy ex/ReflectionEx.thy ex/Refute_Examples.thy		\
   ex/SAT_Examples.thy ex/Serbian.thy ex/Set_Theory.thy			\
-  ex/Set_Algebras.thy ex/Simproc_Tests.thy ex/SVC_Oracle.thy		\
+  ex/Simproc_Tests.thy ex/SVC_Oracle.thy		\
   ex/sledgehammer_tactics.ML ex/Seq.thy ex/Sqrt.thy ex/Sqrt_Script.thy 	\
   ex/Sudoku.thy ex/Tarski.thy ex/Termination.thy ex/Transfer_Ex.thy	\
   ex/Tree23.thy	ex/Unification.thy ex/While_Combinator_Example.thy	\
@@ -1319,16 +1319,16 @@
 
 HOL-Mirabelle: HOL $(LOG)/HOL-Mirabelle.gz
 
-$(LOG)/HOL-Mirabelle.gz: $(OUT)/HOL Mirabelle/Mirabelle_Test.thy	\
-  Mirabelle/Mirabelle.thy Mirabelle/Tools/mirabelle.ML			\
-  Mirabelle/ROOT.ML Mirabelle/Tools/mirabelle_arith.ML			\
-  Mirabelle/Tools/mirabelle_metis.ML					\
-  Mirabelle/Tools/mirabelle_quickcheck.ML				\
-  Mirabelle/Tools/mirabelle_refute.ML					\
-  Mirabelle/Tools/mirabelle_sledgehammer.ML 				\
-  Mirabelle/Tools/mirabelle_sledgehammer_filter.ML			\
-  ex/sledgehammer_tactics.ML Mirabelle/lib/Tools/mirabelle		\
-  Mirabelle/lib/scripts/mirabelle.pl Library/FrechetDeriv.thy		\
+$(LOG)/HOL-Mirabelle.gz: $(OUT)/HOL Mirabelle/Mirabelle_Test.thy \
+  Mirabelle/Mirabelle.thy Mirabelle/Actions/mirabelle.ML \
+  Mirabelle/ROOT.ML Mirabelle/Actions/mirabelle_arith.ML \
+  Mirabelle/Actions/mirabelle_metis.ML \
+  Mirabelle/Actions/mirabelle_quickcheck.ML \
+  Mirabelle/Actions/mirabelle_refute.ML	\
+  Mirabelle/Actions/mirabelle_sledgehammer.ML \
+  Mirabelle/Actions/mirabelle_sledgehammer_filter.ML \
+  ex/sledgehammer_tactics.ML Mirabelle/lib/Tools/mirabelle \
+  Mirabelle/lib/scripts/mirabelle.pl Library/FrechetDeriv.thy \
   Library/Inner_Product.thy
 	@$(ISABELLE_TOOL) usedir $(OUT)/HOL Mirabelle
 	@cd Library; $(ISABELLE_TOOL) mirabelle -q arith Inner_Product.thy # some arbitrary small test case
--- a/src/HOL/Library/AList.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/AList.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -97,7 +97,7 @@
   have "map_of \<circ> fold (prod_case update) (zip ks vs) =
     fold (\<lambda>(k, v) f. f(k \<mapsto> v)) (zip ks vs) \<circ> map_of"
     by (rule fold_commute) (auto simp add: fun_eq_iff update_conv')
-  then show ?thesis by (auto simp add: updates_def fun_eq_iff map_upds_fold_map_upd foldl_def split_def)
+  then show ?thesis by (auto simp add: updates_def fun_eq_iff map_upds_fold_map_upd foldl_conv_fold split_def)
 qed
 
 lemma updates_conv: "map_of (updates ks vs al) k = ((map_of al)(ks[\<mapsto>]vs)) k"
@@ -427,7 +427,7 @@
 
 lemma merge_updates:
   "merge qs ps = updates (rev (map fst ps)) (rev (map snd ps)) qs"
-  by (simp add: merge_def updates_def foldr_def zip_rev zip_map_fst_snd)
+  by (simp add: merge_def updates_def foldr_conv_fold zip_rev zip_map_fst_snd)
 
 lemma dom_merge: "fst ` set (merge xs ys) = fst ` set xs \<union> fst ` set ys"
   by (induct ys arbitrary: xs) (auto simp add: dom_update)
@@ -448,7 +448,7 @@
     fold (\<lambda>(k, v) m. m(k \<mapsto> v)) (rev ys) \<circ> map_of"
     by (rule fold_commute) (simp add: update_conv' prod_case_beta split_def fun_eq_iff)
   then show ?thesis
-    by (simp add: merge_def map_add_map_of_foldr foldr_def fun_eq_iff)
+    by (simp add: merge_def map_add_map_of_foldr foldr_conv_fold fun_eq_iff)
 qed
 
 corollary merge_conv:
--- a/src/HOL/Library/BigO.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/BigO.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -92,7 +92,7 @@
   by (auto simp add: bigo_def) 
 
 lemma bigo_plus_self_subset [intro]: 
-  "O(f) \<oplus> O(f) <= O(f)"
+  "O(f) + O(f) <= O(f)"
   apply (auto simp add: bigo_alt_def set_plus_def)
   apply (rule_tac x = "c + ca" in exI)
   apply auto
@@ -104,14 +104,14 @@
   apply force
 done
 
-lemma bigo_plus_idemp [simp]: "O(f) \<oplus> O(f) = O(f)"
+lemma bigo_plus_idemp [simp]: "O(f) + O(f) = O(f)"
   apply (rule equalityI)
   apply (rule bigo_plus_self_subset)
   apply (rule set_zero_plus2) 
   apply (rule bigo_zero)
   done
 
-lemma bigo_plus_subset [intro]: "O(f + g) <= O(f) \<oplus> O(g)"
+lemma bigo_plus_subset [intro]: "O(f + g) <= O(f) + O(g)"
   apply (rule subsetI)
   apply (auto simp add: bigo_def bigo_pos_const func_plus set_plus_def)
   apply (subst bigo_pos_const [symmetric])+
@@ -153,15 +153,15 @@
   apply simp
   done
 
-lemma bigo_plus_subset2 [intro]: "A <= O(f) ==> B <= O(f) ==> A \<oplus> B <= O(f)"
-  apply (subgoal_tac "A \<oplus> B <= O(f) \<oplus> O(f)")
+lemma bigo_plus_subset2 [intro]: "A <= O(f) ==> B <= O(f) ==> A + B <= O(f)"
+  apply (subgoal_tac "A + B <= O(f) + O(f)")
   apply (erule order_trans)
   apply simp
   apply (auto del: subsetI simp del: bigo_plus_idemp)
   done
 
 lemma bigo_plus_eq: "ALL x. 0 <= f x ==> ALL x. 0 <= g x ==> 
-    O(f + g) = O(f) \<oplus> O(g)"
+    O(f + g) = O(f) + O(g)"
   apply (rule equalityI)
   apply (rule bigo_plus_subset)
   apply (simp add: bigo_alt_def set_plus_def func_plus)
@@ -273,12 +273,12 @@
 lemma bigo_abs5: "f =o O(g) ==> (%x. abs(f x)) =o O(g)" 
   by (unfold bigo_def, auto)
 
-lemma bigo_elt_subset2 [intro]: "f : g +o O(h) ==> O(f) <= O(g) \<oplus> O(h)"
+lemma bigo_elt_subset2 [intro]: "f : g +o O(h) ==> O(f) <= O(g) + O(h)"
 proof -
   assume "f : g +o O(h)"
-  also have "... <= O(g) \<oplus> O(h)"
+  also have "... <= O(g) + O(h)"
     by (auto del: subsetI)
-  also have "... = O(%x. abs(g x)) \<oplus> O(%x. abs(h x))"
+  also have "... = O(%x. abs(g x)) + O(%x. abs(h x))"
     apply (subst bigo_abs3 [symmetric])+
     apply (rule refl)
     done
@@ -287,13 +287,13 @@
   finally have "f : ...".
   then have "O(f) <= ..."
     by (elim bigo_elt_subset)
-  also have "... = O(%x. abs(g x)) \<oplus> O(%x. abs(h x))"
+  also have "... = O(%x. abs(g x)) + O(%x. abs(h x))"
     by (rule bigo_plus_eq, auto)
   finally show ?thesis
     by (simp add: bigo_abs3 [symmetric])
 qed
 
-lemma bigo_mult [intro]: "O(f)\<otimes>O(g) <= O(f * g)"
+lemma bigo_mult [intro]: "O(f)*O(g) <= O(f * g)"
   apply (rule subsetI)
   apply (subst bigo_def)
   apply (auto simp add: bigo_alt_def set_times_def func_times)
@@ -369,7 +369,7 @@
   done
 
 lemma bigo_mult7: "ALL x. f x ~= 0 ==>
-    O(f * g) <= O(f::'a => ('b::linordered_field)) \<otimes> O(g)"
+    O(f * g) <= O(f::'a => ('b::linordered_field)) * O(g)"
   apply (subst bigo_mult6)
   apply assumption
   apply (rule set_times_mono3)
@@ -377,7 +377,7 @@
   done
 
 lemma bigo_mult8: "ALL x. f x ~= 0 ==>
-    O(f * g) = O(f::'a => ('b::linordered_field)) \<otimes> O(g)"
+    O(f * g) = O(f::'a => ('b::linordered_field)) * O(g)"
   apply (rule equalityI)
   apply (erule bigo_mult7)
   apply (rule bigo_mult)
@@ -402,9 +402,9 @@
   show "f +o O(g) <= O(g)"
   proof -
     have "f : O(f)" by auto
-    then have "f +o O(g) <= O(f) \<oplus> O(g)"
+    then have "f +o O(g) <= O(f) + O(g)"
       by (auto del: subsetI)
-    also have "... <= O(g) \<oplus> O(g)"
+    also have "... <= O(g) + O(g)"
     proof -
       from a have "O(f) <= O(g)" by (auto del: subsetI)
       thus ?thesis by (auto del: subsetI)
@@ -656,7 +656,7 @@
 subsection {* Misc useful stuff *}
 
 lemma bigo_useful_intro: "A <= O(f) ==> B <= O(f) ==>
-  A \<oplus> B <= O(f)"
+  A + B <= O(f)"
   apply (subst bigo_plus_idemp [symmetric])
   apply (rule set_plus_mono2)
   apply assumption+
--- a/src/HOL/Library/Countable.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Countable.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -269,8 +269,7 @@
     by - (rule exI)
 qed
 
-method_setup countable_datatype = {*
-let
+ML {*
   fun countable_tac ctxt =
     SUBGOAL (fn (goal, i) =>
       let
@@ -297,9 +296,10 @@
            etac induct_thm i,
            REPEAT (resolve_tac rules i ORELSE atac i)]) 1
       end)
-in
+*}
+
+method_setup countable_datatype = {*
   Scan.succeed (fn ctxt => SIMPLE_METHOD' (countable_tac ctxt))
-end
 *} "prove countable class instances for datatypes"
 
 hide_const (open) finite_item nth_item
--- a/src/HOL/Library/Eval_Witness.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Eval_Witness.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -77,8 +77,8 @@
 
 method_setup eval_witness = {*
   Scan.lift (Scan.repeat Args.name) >>
-  (fn ws => K (SIMPLE_METHOD'
-    (CSUBGOAL (fn (ct, i) => rtac (eval_witness_oracle (ct, ws)) i))))
+    (fn ws => K (SIMPLE_METHOD'
+      (CSUBGOAL (fn (ct, i) => rtac (eval_witness_oracle (ct, ws)) i))))
 *} "evaluation with ML witnesses"
 
 
--- a/src/HOL/Library/Multiset.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Multiset.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -19,7 +19,7 @@
   show "(\<lambda>x. 0::nat) \<in> {f. finite {x. f x > 0}}" by simp
 qed
 
-lemmas multiset_typedef = Abs_multiset_inverse count_inverse count
+setup_lifting type_definition_multiset
 
 abbreviation Melem :: "'a => 'a multiset => bool"  ("(_/ :# _)" [50, 51] 50) where
   "a :# M == 0 < count M a"
@@ -82,21 +82,21 @@
 instantiation multiset :: (type) "{zero, plus}"
 begin
 
-definition Mempty_def:
-  "0 = Abs_multiset (\<lambda>a. 0)"
+lift_definition zero_multiset :: "'a multiset" is "\<lambda>a. 0"
+by (rule const0_in_multiset)
 
 abbreviation Mempty :: "'a multiset" ("{#}") where
   "Mempty \<equiv> 0"
 
-definition union_def:
-  "M + N = Abs_multiset (\<lambda>a. count M a + count N a)"
+lift_definition plus_multiset :: "'a multiset => 'a multiset => 'a multiset" is "\<lambda>M N. (\<lambda>a. M a + N a)"
+by (rule union_preserves_multiset)
 
 instance ..
 
 end
 
-definition single :: "'a => 'a multiset" where
-  "single a = Abs_multiset (\<lambda>b. if b = a then 1 else 0)"
+lift_definition single :: "'a => 'a multiset" is "\<lambda>a b. if b = a then 1 else 0"
+by (rule only1_in_multiset)
 
 syntax
   "_multiset" :: "args => 'a multiset"    ("{#(_)#}")
@@ -105,10 +105,10 @@
   "{#x#}" == "CONST single x"
 
 lemma count_empty [simp]: "count {#} a = 0"
-  by (simp add: Mempty_def in_multiset multiset_typedef)
+  by (simp add: zero_multiset.rep_eq)
 
 lemma count_single [simp]: "count {#b#} a = (if b = a then 1 else 0)"
-  by (simp add: single_def in_multiset multiset_typedef)
+  by (simp add: single.rep_eq)
 
 
 subsection {* Basic operations *}
@@ -116,7 +116,7 @@
 subsubsection {* Union *}
 
 lemma count_union [simp]: "count (M + N) a = count M a + count N a"
-  by (simp add: union_def in_multiset multiset_typedef)
+  by (simp add: plus_multiset.rep_eq)
 
 instance multiset :: (type) cancel_comm_monoid_add
   by default (simp_all add: multiset_eq_iff)
@@ -127,15 +127,15 @@
 instantiation multiset :: (type) minus
 begin
 
-definition diff_def:
-  "M - N = Abs_multiset (\<lambda>a. count M a - count N a)"
-
+lift_definition minus_multiset :: "'a multiset => 'a multiset => 'a multiset" is "\<lambda> M N. \<lambda>a. M a - N a"
+by (rule diff_preserves_multiset)
+ 
 instance ..
 
 end
 
 lemma count_diff [simp]: "count (M - N) a = count M a - count N a"
-  by (simp add: diff_def in_multiset multiset_typedef)
+  by (simp add: minus_multiset.rep_eq)
 
 lemma diff_empty [simp]: "M - {#} = M \<and> {#} - M = {#}"
 by(simp add: multiset_eq_iff)
@@ -264,8 +264,9 @@
 instantiation multiset :: (type) ordered_ab_semigroup_add_imp_le
 begin
 
-definition less_eq_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
-  mset_le_def: "A \<le> B \<longleftrightarrow> (\<forall>a. count A a \<le> count B a)"
+lift_definition less_eq_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" is "\<lambda> A B. (\<forall>a. A a \<le> B a)"
+by simp
+lemmas mset_le_def = less_eq_multiset_def
 
 definition less_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
   mset_less_def: "(A::'a multiset) < B \<longleftrightarrow> A \<le> B \<and> A \<noteq> B"
@@ -391,7 +392,7 @@
 
 lemma multiset_inter_count [simp]:
   "count (A #\<inter> B) x = min (count A x) (count B x)"
-  by (simp add: multiset_inter_def multiset_typedef)
+  by (simp add: multiset_inter_def)
 
 lemma multiset_inter_single: "a \<noteq> b \<Longrightarrow> {#a#} #\<inter> {#b#} = {#}"
   by (rule multiset_eqI) auto
@@ -414,14 +415,14 @@
 
 text {* Multiset comprehension *}
 
-definition filter :: "('a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset" where
-  "filter P M = Abs_multiset (\<lambda>x. if P x then count M x else 0)"
+lift_definition filter :: "('a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset" is "\<lambda>P M. \<lambda>x. if P x then M x else 0"
+by (rule filter_preserves_multiset)
 
 hide_const (open) filter
 
 lemma count_filter [simp]:
   "count (Multiset.filter P M) a = (if P a then count M a else 0)"
-  by (simp add: filter_def in_multiset multiset_typedef)
+  by (simp add: filter.rep_eq)
 
 lemma filter_empty [simp]:
   "Multiset.filter P {#} = {#}"
@@ -593,7 +594,7 @@
   and add: "!!M x. P M ==> P (M + {#x#})"
 shows "P M"
 proof -
-  note defns = union_def single_def Mempty_def
+  note defns = plus_multiset_def single_def zero_multiset_def
   note add' = add [unfolded defns, simplified]
   have aux: "\<And>a::'a. count (Abs_multiset (\<lambda>b. if b = a then 1 else 0)) =
     (\<lambda>b. if b = a then 1 else 0)" by (simp add: Abs_multiset_inverse in_multiset) 
@@ -1073,7 +1074,8 @@
 
 lemma map_of_join_raw:
   assumes "distinct (map fst ys)"
-  shows "map_of (join_raw f xs ys) x = (case map_of xs x of None => map_of ys x | Some v => (case map_of ys x of None => Some v | Some v' => Some (f x (v, v'))))"
+  shows "map_of (join_raw f xs ys) x = (case map_of xs x of None => map_of ys x | Some v =>
+    (case map_of ys x of None => Some v | Some v' => Some (f x (v, v'))))"
 using assms
 apply (induct ys)
 apply (auto simp add: map_of_map_default split: option.split)
@@ -1093,8 +1095,10 @@
   "subtract_entries_raw xs ys = foldr (%(k, v). AList.map_entry k (%v'. v' - v)) ys xs"
 
 lemma map_of_subtract_entries_raw:
-  "distinct (map fst ys) ==> map_of (subtract_entries_raw xs ys) x = (case map_of xs x of None => None | Some v => (case map_of ys x of None => Some v | Some v' => Some (v - v')))"
-unfolding subtract_entries_raw_def
+  assumes "distinct (map fst ys)"
+  shows "map_of (subtract_entries_raw xs ys) x = (case map_of xs x of None => None | Some v =>
+    (case map_of ys x of None => Some v | Some v' => Some (v - v')))"
+using assms unfolding subtract_entries_raw_def
 apply (induct ys)
 apply auto
 apply (simp split: option.split)
@@ -1197,7 +1201,7 @@
 
 lemma filter_Bag [code]:
   "Multiset.filter P (Bag xs) = Bag (DAList.filter (P \<circ> fst) xs)"
-by (rule multiset_eqI) (simp add: count_of_filter filter.rep_eq)
+by (rule multiset_eqI) (simp add: count_of_filter DAList.filter.rep_eq)
 
 lemma mset_less_eq_Bag [code]:
   "Bag xs \<le> A \<longleftrightarrow> (\<forall>(x, n) \<in> set (DAList.impl_of xs). count_of (DAList.impl_of xs) x \<le> count A x)"
--- a/src/HOL/Library/Polynomial.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Polynomial.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -492,7 +492,7 @@
 
 subsection {* Multiplication of polynomials *}
 
-text {* TODO: move to Set_Interval.thy *}
+(* TODO: move to Set_Interval.thy *)
 lemma setsum_atMost_Suc_shift:
   fixes f :: "nat \<Rightarrow> 'a::comm_monoid_add"
   shows "(\<Sum>i\<le>Suc n. f i) = f 0 + (\<Sum>i\<le>n. f (Suc i))"
--- a/src/HOL/Library/Quotient_List.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Quotient_List.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Library/Quotient3_List.thy
+(*  Title:      HOL/Library/Quotient_List.thy
     Author:     Cezary Kaliszyk and Christian Urban
 *)
 
--- a/src/HOL/Library/Quotient_Option.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Quotient_Option.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Library/Quotient3_Option.thy
+(*  Title:      HOL/Library/Quotient_Option.thy
     Author:     Cezary Kaliszyk and Christian Urban
 *)
 
--- a/src/HOL/Library/Quotient_Product.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Quotient_Product.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Library/Quotient3_Product.thy
+(*  Title:      HOL/Library/Quotient_Product.thy
     Author:     Cezary Kaliszyk and Christian Urban
 *)
 
--- a/src/HOL/Library/Quotient_Set.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Quotient_Set.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Library/Quotient3_Set.thy
+(*  Title:      HOL/Library/Quotient_Set.thy
     Author:     Cezary Kaliszyk and Christian Urban
 *)
 
--- a/src/HOL/Library/Quotient_Sum.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Quotient_Sum.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Library/Quotient3_Sum.thy
+(*  Title:      HOL/Library/Quotient_Sum.thy
     Author:     Cezary Kaliszyk and Christian Urban
 *)
 
--- a/src/HOL/Library/RBT.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/RBT.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -35,7 +35,7 @@
 subsection {* Primitive operations *}
 
 definition lookup :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> 'a \<rightharpoonup> 'b" where
-  [code]: "lookup t = RBT_Impl.lookup (impl_of t)"
+  [code]: "lookup t = rbt_lookup (impl_of t)"
 
 definition empty :: "('a\<Colon>linorder, 'b) rbt" where
   "empty = RBT RBT_Impl.Empty"
@@ -45,17 +45,17 @@
   by (simp add: empty_def RBT_inverse)
 
 definition insert :: "'a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" where
-  "insert k v t = RBT (RBT_Impl.insert k v (impl_of t))"
+  "insert k v t = RBT (rbt_insert k v (impl_of t))"
 
 lemma impl_of_insert [code abstract]:
-  "impl_of (insert k v t) = RBT_Impl.insert k v (impl_of t)"
+  "impl_of (insert k v t) = rbt_insert k v (impl_of t)"
   by (simp add: insert_def RBT_inverse)
 
 definition delete :: "'a\<Colon>linorder \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" where
-  "delete k t = RBT (RBT_Impl.delete k (impl_of t))"
+  "delete k t = RBT (rbt_delete k (impl_of t))"
 
 lemma impl_of_delete [code abstract]:
-  "impl_of (delete k t) = RBT_Impl.delete k (impl_of t)"
+  "impl_of (delete k t) = rbt_delete k (impl_of t)"
   by (simp add: delete_def RBT_inverse)
 
 definition entries :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a \<times> 'b) list" where
@@ -65,17 +65,17 @@
   [code]: "keys t = RBT_Impl.keys (impl_of t)"
 
 definition bulkload :: "('a\<Colon>linorder \<times> 'b) list \<Rightarrow> ('a, 'b) rbt" where
-  "bulkload xs = RBT (RBT_Impl.bulkload xs)"
+  "bulkload xs = RBT (rbt_bulkload xs)"
 
 lemma impl_of_bulkload [code abstract]:
-  "impl_of (bulkload xs) = RBT_Impl.bulkload xs"
+  "impl_of (bulkload xs) = rbt_bulkload xs"
   by (simp add: bulkload_def RBT_inverse)
 
 definition map_entry :: "'a \<Rightarrow> ('b \<Rightarrow> 'b) \<Rightarrow> ('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a, 'b) rbt" where
-  "map_entry k f t = RBT (RBT_Impl.map_entry k f (impl_of t))"
+  "map_entry k f t = RBT (rbt_map_entry k f (impl_of t))"
 
 lemma impl_of_map_entry [code abstract]:
-  "impl_of (map_entry k f t) = RBT_Impl.map_entry k f (impl_of t)"
+  "impl_of (map_entry k f t) = rbt_map_entry k f (impl_of t)"
   by (simp add: map_entry_def RBT_inverse)
 
 definition map :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a, 'b) rbt" where
@@ -98,11 +98,11 @@
 subsection {* Abstract lookup properties *}
 
 lemma lookup_RBT:
-  "is_rbt t \<Longrightarrow> lookup (RBT t) = RBT_Impl.lookup t"
+  "is_rbt t \<Longrightarrow> lookup (RBT t) = rbt_lookup t"
   by (simp add: lookup_def RBT_inverse)
 
 lemma lookup_impl_of:
-  "RBT_Impl.lookup (impl_of t) = lookup t"
+  "rbt_lookup (impl_of t) = lookup t"
   by (simp add: lookup_def)
 
 lemma entries_impl_of:
@@ -119,11 +119,11 @@
 
 lemma lookup_insert [simp]:
   "lookup (insert k v t) = (lookup t)(k \<mapsto> v)"
-  by (simp add: insert_def lookup_RBT lookup_insert lookup_impl_of)
+  by (simp add: insert_def lookup_RBT rbt_lookup_rbt_insert lookup_impl_of)
 
 lemma lookup_delete [simp]:
   "lookup (delete k t) = (lookup t)(k := None)"
-  by (simp add: delete_def lookup_RBT RBT_Impl.lookup_delete lookup_impl_of restrict_complement_singleton_eq)
+  by (simp add: delete_def lookup_RBT rbt_lookup_rbt_delete lookup_impl_of restrict_complement_singleton_eq)
 
 lemma map_of_entries [simp]:
   "map_of (entries t) = lookup t"
@@ -131,19 +131,19 @@
 
 lemma entries_lookup:
   "entries t1 = entries t2 \<longleftrightarrow> lookup t1 = lookup t2"
-  by (simp add: entries_def lookup_def entries_lookup)
+  by (simp add: entries_def lookup_def entries_rbt_lookup)
 
 lemma lookup_bulkload [simp]:
   "lookup (bulkload xs) = map_of xs"
-  by (simp add: bulkload_def lookup_RBT RBT_Impl.lookup_bulkload)
+  by (simp add: bulkload_def lookup_RBT rbt_lookup_rbt_bulkload)
 
 lemma lookup_map_entry [simp]:
   "lookup (map_entry k f t) = (lookup t)(k := Option.map f (lookup t k))"
-  by (simp add: map_entry_def lookup_RBT RBT_Impl.lookup_map_entry lookup_impl_of)
+  by (simp add: map_entry_def lookup_RBT rbt_lookup_rbt_map_entry lookup_impl_of)
 
 lemma lookup_map [simp]:
   "lookup (map f t) k = Option.map (f k) (lookup t k)"
-  by (simp add: map_def lookup_RBT RBT_Impl.lookup_map lookup_impl_of)
+  by (simp add: map_def lookup_RBT rbt_lookup_map lookup_impl_of)
 
 lemma fold_fold:
   "fold f t = List.fold (prod_case f) (entries t)"
@@ -154,7 +154,7 @@
   by (simp add: rbt_eq_iff is_empty_def impl_of_empty split: rbt.split)
 
 lemma RBT_lookup_empty [simp]: (*FIXME*)
-  "RBT_Impl.lookup t = Map.empty \<longleftrightarrow> t = RBT_Impl.Empty"
+  "rbt_lookup t = Map.empty \<longleftrightarrow> t = RBT_Impl.Empty"
   by (cases t) (auto simp add: fun_eq_iff)
 
 lemma lookup_empty_empty [simp]:
@@ -163,7 +163,7 @@
 
 lemma sorted_keys [iff]:
   "sorted (keys t)"
-  by (simp add: keys_def RBT_Impl.keys_def sorted_entries)
+  by (simp add: keys_def RBT_Impl.keys_def rbt_sorted_entries)
 
 lemma distinct_keys [iff]:
   "distinct (keys t)"
--- a/src/HOL/Library/RBT_Impl.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/RBT_Impl.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -65,202 +65,221 @@
 
 subsubsection {* Search tree properties *}
 
-definition tree_less :: "'a\<Colon>order \<Rightarrow> ('a, 'b) rbt \<Rightarrow> bool"
-where
-  tree_less_prop: "tree_less k t \<longleftrightarrow> (\<forall>x\<in>set (keys t). x < k)"
+context ord begin
 
-abbreviation tree_less_symbol (infix "|\<guillemotleft>" 50)
-where "t |\<guillemotleft> x \<equiv> tree_less x t"
+definition rbt_less :: "'a \<Rightarrow> ('a, 'b) rbt \<Rightarrow> bool"
+where
+  rbt_less_prop: "rbt_less k t \<longleftrightarrow> (\<forall>x\<in>set (keys t). x < k)"
 
-definition tree_greater :: "'a\<Colon>order \<Rightarrow> ('a, 'b) rbt \<Rightarrow> bool" (infix "\<guillemotleft>|" 50) 
+abbreviation rbt_less_symbol (infix "|\<guillemotleft>" 50)
+where "t |\<guillemotleft> x \<equiv> rbt_less x t"
+
+definition rbt_greater :: "'a \<Rightarrow> ('a, 'b) rbt \<Rightarrow> bool" (infix "\<guillemotleft>|" 50) 
 where
-  tree_greater_prop: "tree_greater k t = (\<forall>x\<in>set (keys t). k < x)"
+  rbt_greater_prop: "rbt_greater k t = (\<forall>x\<in>set (keys t). k < x)"
 
-lemma tree_less_simps [simp]:
-  "tree_less k Empty = True"
-  "tree_less k (Branch c lt kt v rt) \<longleftrightarrow> kt < k \<and> tree_less k lt \<and> tree_less k rt"
-  by (auto simp add: tree_less_prop)
+lemma rbt_less_simps [simp]:
+  "Empty |\<guillemotleft> k = True"
+  "Branch c lt kt v rt |\<guillemotleft> k \<longleftrightarrow> kt < k \<and> lt |\<guillemotleft> k \<and> rt |\<guillemotleft> k"
+  by (auto simp add: rbt_less_prop)
 
-lemma tree_greater_simps [simp]:
-  "tree_greater k Empty = True"
-  "tree_greater k (Branch c lt kt v rt) \<longleftrightarrow> k < kt \<and> tree_greater k lt \<and> tree_greater k rt"
-  by (auto simp add: tree_greater_prop)
+lemma rbt_greater_simps [simp]:
+  "k \<guillemotleft>| Empty = True"
+  "k \<guillemotleft>| (Branch c lt kt v rt) \<longleftrightarrow> k < kt \<and> k \<guillemotleft>| lt \<and> k \<guillemotleft>| rt"
+  by (auto simp add: rbt_greater_prop)
 
-lemmas tree_ord_props = tree_less_prop tree_greater_prop
+lemmas rbt_ord_props = rbt_less_prop rbt_greater_prop
+
+lemmas rbt_greater_nit = rbt_greater_prop entry_in_tree_keys
+lemmas rbt_less_nit = rbt_less_prop entry_in_tree_keys
 
-lemmas tree_greater_nit = tree_greater_prop entry_in_tree_keys
-lemmas tree_less_nit = tree_less_prop entry_in_tree_keys
+lemma (in order)
+  shows rbt_less_eq_trans: "l |\<guillemotleft> u \<Longrightarrow> u \<le> v \<Longrightarrow> l |\<guillemotleft> v"
+  and rbt_less_trans: "t |\<guillemotleft> x \<Longrightarrow> x < y \<Longrightarrow> t |\<guillemotleft> y"
+  and rbt_greater_eq_trans: "u \<le> v \<Longrightarrow> v \<guillemotleft>| r \<Longrightarrow> u \<guillemotleft>| r"
+  and rbt_greater_trans: "x < y \<Longrightarrow> y \<guillemotleft>| t \<Longrightarrow> x \<guillemotleft>| t"
+  by (auto simp: rbt_ord_props)
 
-lemma tree_less_eq_trans: "l |\<guillemotleft> u \<Longrightarrow> u \<le> v \<Longrightarrow> l |\<guillemotleft> v"
-  and tree_less_trans: "t |\<guillemotleft> x \<Longrightarrow> x < y \<Longrightarrow> t |\<guillemotleft> y"
-  and tree_greater_eq_trans: "u \<le> v \<Longrightarrow> v \<guillemotleft>| r \<Longrightarrow> u \<guillemotleft>| r"
-  and tree_greater_trans: "x < y \<Longrightarrow> y \<guillemotleft>| t \<Longrightarrow> x \<guillemotleft>| t"
-  by (auto simp: tree_ord_props)
-
-primrec sorted :: "('a::linorder, 'b) rbt \<Rightarrow> bool"
+primrec rbt_sorted :: "('a, 'b) rbt \<Rightarrow> bool"
 where
-  "sorted Empty = True"
-| "sorted (Branch c l k v r) = (l |\<guillemotleft> k \<and> k \<guillemotleft>| r \<and> sorted l \<and> sorted r)"
+  "rbt_sorted Empty = True"
+| "rbt_sorted (Branch c l k v r) = (l |\<guillemotleft> k \<and> k \<guillemotleft>| r \<and> rbt_sorted l \<and> rbt_sorted r)"
+
+end
 
-lemma sorted_entries:
-  "sorted t \<Longrightarrow> List.sorted (List.map fst (entries t))"
+context linorder begin
+
+lemma rbt_sorted_entries:
+  "rbt_sorted t \<Longrightarrow> List.sorted (List.map fst (entries t))"
 by (induct t) 
-  (force simp: sorted_append sorted_Cons tree_ord_props 
+  (force simp: sorted_append sorted_Cons rbt_ord_props 
       dest!: entry_in_tree_keys)+
 
 lemma distinct_entries:
-  "sorted t \<Longrightarrow> distinct (List.map fst (entries t))"
+  "rbt_sorted t \<Longrightarrow> distinct (List.map fst (entries t))"
 by (induct t) 
-  (force simp: sorted_append sorted_Cons tree_ord_props 
+  (force simp: sorted_append sorted_Cons rbt_ord_props 
       dest!: entry_in_tree_keys)+
 
-
 subsubsection {* Tree lookup *}
 
-primrec lookup :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> 'a \<rightharpoonup> 'b"
+primrec (in ord) rbt_lookup :: "('a, 'b) rbt \<Rightarrow> 'a \<rightharpoonup> 'b"
 where
-  "lookup Empty k = None"
-| "lookup (Branch _ l x y r) k = (if k < x then lookup l k else if x < k then lookup r k else Some y)"
+  "rbt_lookup Empty k = None"
+| "rbt_lookup (Branch _ l x y r) k = 
+   (if k < x then rbt_lookup l k else if x < k then rbt_lookup r k else Some y)"
 
-lemma lookup_keys: "sorted t \<Longrightarrow> dom (lookup t) = set (keys t)"
-  by (induct t) (auto simp: dom_def tree_greater_prop tree_less_prop)
+lemma rbt_lookup_keys: "rbt_sorted t \<Longrightarrow> dom (rbt_lookup t) = set (keys t)"
+  by (induct t) (auto simp: dom_def rbt_greater_prop rbt_less_prop)
 
-lemma dom_lookup_Branch: 
-  "sorted (Branch c t1 k v t2) \<Longrightarrow> 
-    dom (lookup (Branch c t1 k v t2)) 
-    = Set.insert k (dom (lookup t1) \<union> dom (lookup t2))"
+lemma dom_rbt_lookup_Branch: 
+  "rbt_sorted (Branch c t1 k v t2) \<Longrightarrow> 
+    dom (rbt_lookup (Branch c t1 k v t2)) 
+    = Set.insert k (dom (rbt_lookup t1) \<union> dom (rbt_lookup t2))"
 proof -
-  assume "sorted (Branch c t1 k v t2)"
-  moreover from this have "sorted t1" "sorted t2" by simp_all
-  ultimately show ?thesis by (simp add: lookup_keys)
+  assume "rbt_sorted (Branch c t1 k v t2)"
+  moreover from this have "rbt_sorted t1" "rbt_sorted t2" by simp_all
+  ultimately show ?thesis by (simp add: rbt_lookup_keys)
 qed
 
-lemma finite_dom_lookup [simp, intro!]: "finite (dom (lookup t))"
+lemma finite_dom_rbt_lookup [simp, intro!]: "finite (dom (rbt_lookup t))"
 proof (induct t)
   case Empty then show ?case by simp
 next
   case (Branch color t1 a b t2)
-  let ?A = "Set.insert a (dom (lookup t1) \<union> dom (lookup t2))"
-  have "dom (lookup (Branch color t1 a b t2)) \<subseteq> ?A" by (auto split: split_if_asm)
-  moreover from Branch have "finite (insert a (dom (lookup t1) \<union> dom (lookup t2)))" by simp
+  let ?A = "Set.insert a (dom (rbt_lookup t1) \<union> dom (rbt_lookup t2))"
+  have "dom (rbt_lookup (Branch color t1 a b t2)) \<subseteq> ?A" by (auto split: split_if_asm)
+  moreover from Branch have "finite (insert a (dom (rbt_lookup t1) \<union> dom (rbt_lookup t2)))" by simp
   ultimately show ?case by (rule finite_subset)
 qed 
 
-lemma lookup_tree_less[simp]: "t |\<guillemotleft> k \<Longrightarrow> lookup t k = None" 
+end
+
+context ord begin
+
+lemma rbt_lookup_rbt_less[simp]: "t |\<guillemotleft> k \<Longrightarrow> rbt_lookup t k = None" 
 by (induct t) auto
 
-lemma lookup_tree_greater[simp]: "k \<guillemotleft>| t \<Longrightarrow> lookup t k = None"
+lemma rbt_lookup_rbt_greater[simp]: "k \<guillemotleft>| t \<Longrightarrow> rbt_lookup t k = None"
 by (induct t) auto
 
-lemma lookup_Empty: "lookup Empty = empty"
+lemma rbt_lookup_Empty: "rbt_lookup Empty = empty"
 by (rule ext) simp
 
+end
+
+context linorder begin
+
 lemma map_of_entries:
-  "sorted t \<Longrightarrow> map_of (entries t) = lookup t"
+  "rbt_sorted t \<Longrightarrow> map_of (entries t) = rbt_lookup t"
 proof (induct t)
-  case Empty thus ?case by (simp add: lookup_Empty)
+  case Empty thus ?case by (simp add: rbt_lookup_Empty)
 next
   case (Branch c t1 k v t2)
-  have "lookup (Branch c t1 k v t2) = lookup t2 ++ [k\<mapsto>v] ++ lookup t1"
+  have "rbt_lookup (Branch c t1 k v t2) = rbt_lookup t2 ++ [k\<mapsto>v] ++ rbt_lookup t1"
   proof (rule ext)
     fix x
-    from Branch have SORTED: "sorted (Branch c t1 k v t2)" by simp
-    let ?thesis = "lookup (Branch c t1 k v t2) x = (lookup t2 ++ [k \<mapsto> v] ++ lookup t1) x"
+    from Branch have RBT_SORTED: "rbt_sorted (Branch c t1 k v t2)" by simp
+    let ?thesis = "rbt_lookup (Branch c t1 k v t2) x = (rbt_lookup t2 ++ [k \<mapsto> v] ++ rbt_lookup t1) x"
 
-    have DOM_T1: "!!k'. k'\<in>dom (lookup t1) \<Longrightarrow> k>k'"
+    have DOM_T1: "!!k'. k'\<in>dom (rbt_lookup t1) \<Longrightarrow> k>k'"
     proof -
       fix k'
-      from SORTED have "t1 |\<guillemotleft> k" by simp
-      with tree_less_prop have "\<forall>k'\<in>set (keys t1). k>k'" by auto
-      moreover assume "k'\<in>dom (lookup t1)"
-      ultimately show "k>k'" using lookup_keys SORTED by auto
+      from RBT_SORTED have "t1 |\<guillemotleft> k" by simp
+      with rbt_less_prop have "\<forall>k'\<in>set (keys t1). k>k'" by auto
+      moreover assume "k'\<in>dom (rbt_lookup t1)"
+      ultimately show "k>k'" using rbt_lookup_keys RBT_SORTED by auto
     qed
     
-    have DOM_T2: "!!k'. k'\<in>dom (lookup t2) \<Longrightarrow> k<k'"
+    have DOM_T2: "!!k'. k'\<in>dom (rbt_lookup t2) \<Longrightarrow> k<k'"
     proof -
       fix k'
-      from SORTED have "k \<guillemotleft>| t2" by simp
-      with tree_greater_prop have "\<forall>k'\<in>set (keys t2). k<k'" by auto
-      moreover assume "k'\<in>dom (lookup t2)"
-      ultimately show "k<k'" using lookup_keys SORTED by auto
+      from RBT_SORTED have "k \<guillemotleft>| t2" by simp
+      with rbt_greater_prop have "\<forall>k'\<in>set (keys t2). k<k'" by auto
+      moreover assume "k'\<in>dom (rbt_lookup t2)"
+      ultimately show "k<k'" using rbt_lookup_keys RBT_SORTED by auto
     qed
     
     {
       assume C: "x<k"
-      hence "lookup (Branch c t1 k v t2) x = lookup t1 x" by simp
+      hence "rbt_lookup (Branch c t1 k v t2) x = rbt_lookup t1 x" by simp
       moreover from C have "x\<notin>dom [k\<mapsto>v]" by simp
-      moreover have "x\<notin>dom (lookup t2)" proof
-        assume "x\<in>dom (lookup t2)"
+      moreover have "x \<notin> dom (rbt_lookup t2)"
+      proof
+        assume "x \<in> dom (rbt_lookup t2)"
         with DOM_T2 have "k<x" by blast
         with C show False by simp
       qed
       ultimately have ?thesis by (simp add: map_add_upd_left map_add_dom_app_simps)
     } moreover {
       assume [simp]: "x=k"
-      hence "lookup (Branch c t1 k v t2) x = [k \<mapsto> v] x" by simp
-      moreover have "x\<notin>dom (lookup t1)" proof
-        assume "x\<in>dom (lookup t1)"
+      hence "rbt_lookup (Branch c t1 k v t2) x = [k \<mapsto> v] x" by simp
+      moreover have "x \<notin> dom (rbt_lookup t1)" 
+      proof
+        assume "x \<in> dom (rbt_lookup t1)"
         with DOM_T1 have "k>x" by blast
         thus False by simp
       qed
       ultimately have ?thesis by (simp add: map_add_upd_left map_add_dom_app_simps)
     } moreover {
       assume C: "x>k"
-      hence "lookup (Branch c t1 k v t2) x = lookup t2 x" by (simp add: less_not_sym[of k x])
+      hence "rbt_lookup (Branch c t1 k v t2) x = rbt_lookup t2 x" by (simp add: less_not_sym[of k x])
       moreover from C have "x\<notin>dom [k\<mapsto>v]" by simp
-      moreover have "x\<notin>dom (lookup t1)" proof
-        assume "x\<in>dom (lookup t1)"
+      moreover have "x\<notin>dom (rbt_lookup t1)" proof
+        assume "x\<in>dom (rbt_lookup t1)"
         with DOM_T1 have "k>x" by simp
         with C show False by simp
       qed
       ultimately have ?thesis by (simp add: map_add_upd_left map_add_dom_app_simps)
     } ultimately show ?thesis using less_linear by blast
   qed
-  also from Branch have "lookup t2 ++ [k \<mapsto> v] ++ lookup t1 = map_of (entries (Branch c t1 k v t2))" by simp
+  also from Branch 
+  have "rbt_lookup t2 ++ [k \<mapsto> v] ++ rbt_lookup t1 = map_of (entries (Branch c t1 k v t2))" by simp
   finally show ?case by simp
 qed
 
-lemma lookup_in_tree: "sorted t \<Longrightarrow> lookup t k = Some v \<longleftrightarrow> (k, v) \<in> set (entries t)"
+lemma rbt_lookup_in_tree: "rbt_sorted t \<Longrightarrow> rbt_lookup t k = Some v \<longleftrightarrow> (k, v) \<in> set (entries t)"
   by (simp add: map_of_entries [symmetric] distinct_entries)
 
 lemma set_entries_inject:
-  assumes sorted: "sorted t1" "sorted t2" 
+  assumes rbt_sorted: "rbt_sorted t1" "rbt_sorted t2" 
   shows "set (entries t1) = set (entries t2) \<longleftrightarrow> entries t1 = entries t2"
 proof -
-  from sorted have "distinct (map fst (entries t1))"
+  from rbt_sorted have "distinct (map fst (entries t1))"
     "distinct (map fst (entries t2))"
     by (auto intro: distinct_entries)
-  with sorted show ?thesis
-    by (auto intro: map_sorted_distinct_set_unique sorted_entries simp add: distinct_map)
+  with rbt_sorted show ?thesis
+    by (auto intro: map_sorted_distinct_set_unique rbt_sorted_entries simp add: distinct_map)
 qed
 
 lemma entries_eqI:
-  assumes sorted: "sorted t1" "sorted t2" 
-  assumes lookup: "lookup t1 = lookup t2"
+  assumes rbt_sorted: "rbt_sorted t1" "rbt_sorted t2" 
+  assumes rbt_lookup: "rbt_lookup t1 = rbt_lookup t2"
   shows "entries t1 = entries t2"
 proof -
-  from sorted lookup have "map_of (entries t1) = map_of (entries t2)"
+  from rbt_sorted rbt_lookup have "map_of (entries t1) = map_of (entries t2)"
     by (simp add: map_of_entries)
-  with sorted have "set (entries t1) = set (entries t2)"
+  with rbt_sorted have "set (entries t1) = set (entries t2)"
     by (simp add: map_of_inject_set distinct_entries)
-  with sorted show ?thesis by (simp add: set_entries_inject)
+  with rbt_sorted show ?thesis by (simp add: set_entries_inject)
 qed
 
-lemma entries_lookup:
-  assumes "sorted t1" "sorted t2" 
-  shows "entries t1 = entries t2 \<longleftrightarrow> lookup t1 = lookup t2"
+lemma entries_rbt_lookup:
+  assumes "rbt_sorted t1" "rbt_sorted t2" 
+  shows "entries t1 = entries t2 \<longleftrightarrow> rbt_lookup t1 = rbt_lookup t2"
   using assms by (auto intro: entries_eqI simp add: map_of_entries [symmetric])
 
-lemma lookup_from_in_tree: 
-  assumes "sorted t1" "sorted t2" 
-  and "\<And>v. (k\<Colon>'a\<Colon>linorder, v) \<in> set (entries t1) \<longleftrightarrow> (k, v) \<in> set (entries t2)" 
-  shows "lookup t1 k = lookup t2 k"
+lemma rbt_lookup_from_in_tree: 
+  assumes "rbt_sorted t1" "rbt_sorted t2" 
+  and "\<And>v. (k, v) \<in> set (entries t1) \<longleftrightarrow> (k, v) \<in> set (entries t2)" 
+  shows "rbt_lookup t1 k = rbt_lookup t2 k"
 proof -
-  from assms have "k \<in> dom (lookup t1) \<longleftrightarrow> k \<in> dom (lookup t2)"
-    by (simp add: keys_entries lookup_keys)
-  with assms show ?thesis by (auto simp add: lookup_in_tree [symmetric])
+  from assms have "k \<in> dom (rbt_lookup t1) \<longleftrightarrow> k \<in> dom (rbt_lookup t2)"
+    by (simp add: keys_entries rbt_lookup_keys)
+  with assms show ?thesis by (auto simp add: rbt_lookup_in_tree [symmetric])
 qed
 
+end
 
 subsubsection {* Red-black properties *}
 
@@ -290,15 +309,18 @@
   "inv2 Empty = True"
 | "inv2 (Branch c lt k v rt) = (inv2 lt \<and> inv2 rt \<and> bheight lt = bheight rt)"
 
-definition is_rbt :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> bool" where
-  "is_rbt t \<longleftrightarrow> inv1 t \<and> inv2 t \<and> color_of t = B \<and> sorted t"
+context ord begin
 
-lemma is_rbt_sorted [simp]:
-  "is_rbt t \<Longrightarrow> sorted t" by (simp add: is_rbt_def)
+definition is_rbt :: "('a, 'b) rbt \<Rightarrow> bool" where
+  "is_rbt t \<longleftrightarrow> inv1 t \<and> inv2 t \<and> color_of t = B \<and> rbt_sorted t"
+
+lemma is_rbt_rbt_sorted [simp]:
+  "is_rbt t \<Longrightarrow> rbt_sorted t" by (simp add: is_rbt_def)
 
 theorem Empty_is_rbt [simp]:
   "is_rbt Empty" by (simp add: is_rbt_def)
 
+end
 
 subsection {* Insertion *}
 
@@ -324,61 +346,65 @@
   using assms
   by (induct l k v r rule: balance.induct) auto
 
-lemma balance_tree_greater[simp]: "(v \<guillemotleft>| balance a k x b) = (v \<guillemotleft>| a \<and> v \<guillemotleft>| b \<and> v < k)" 
+context ord begin
+
+lemma balance_rbt_greater[simp]: "(v \<guillemotleft>| balance a k x b) = (v \<guillemotleft>| a \<and> v \<guillemotleft>| b \<and> v < k)" 
   by (induct a k x b rule: balance.induct) auto
 
-lemma balance_tree_less[simp]: "(balance a k x b |\<guillemotleft> v) = (a |\<guillemotleft> v \<and> b |\<guillemotleft> v \<and> k < v)"
+lemma balance_rbt_less[simp]: "(balance a k x b |\<guillemotleft> v) = (a |\<guillemotleft> v \<and> b |\<guillemotleft> v \<and> k < v)"
   by (induct a k x b rule: balance.induct) auto
 
-lemma balance_sorted: 
-  fixes k :: "'a::linorder"
-  assumes "sorted l" "sorted r" "l |\<guillemotleft> k" "k \<guillemotleft>| r"
-  shows "sorted (balance l k v r)"
+end
+
+lemma (in linorder) balance_rbt_sorted: 
+  fixes k :: "'a"
+  assumes "rbt_sorted l" "rbt_sorted r" "l |\<guillemotleft> k" "k \<guillemotleft>| r"
+  shows "rbt_sorted (balance l k v r)"
 using assms proof (induct l k v r rule: balance.induct)
   case ("2_2" a x w b y t c z s va vb vd vc)
   hence "y < z \<and> z \<guillemotleft>| Branch B va vb vd vc" 
-    by (auto simp add: tree_ord_props)
-  hence "tree_greater y (Branch B va vb vd vc)" by (blast dest: tree_greater_trans)
+    by (auto simp add: rbt_ord_props)
+  hence "y \<guillemotleft>| (Branch B va vb vd vc)" by (blast dest: rbt_greater_trans)
   with "2_2" show ?case by simp
 next
   case ("3_2" va vb vd vc x w b y s c z)
-  from "3_2" have "x < y \<and> tree_less x (Branch B va vb vd vc)" 
+  from "3_2" have "x < y \<and> Branch B va vb vd vc |\<guillemotleft> x" 
     by simp
-  hence "tree_less y (Branch B va vb vd vc)" by (blast dest: tree_less_trans)
+  hence "Branch B va vb vd vc |\<guillemotleft> y" by (blast dest: rbt_less_trans)
   with "3_2" show ?case by simp
 next
   case ("3_3" x w b y s c z t va vb vd vc)
-  from "3_3" have "y < z \<and> tree_greater z (Branch B va vb vd vc)" by simp
-  hence "tree_greater y (Branch B va vb vd vc)" by (blast dest: tree_greater_trans)
+  from "3_3" have "y < z \<and> z \<guillemotleft>| Branch B va vb vd vc" by simp
+  hence "y \<guillemotleft>| Branch B va vb vd vc" by (blast dest: rbt_greater_trans)
   with "3_3" show ?case by simp
 next
   case ("3_4" vd ve vg vf x w b y s c z t va vb vii vc)
-  hence "x < y \<and> tree_less x (Branch B vd ve vg vf)" by simp
-  hence 1: "tree_less y (Branch B vd ve vg vf)" by (blast dest: tree_less_trans)
-  from "3_4" have "y < z \<and> tree_greater z (Branch B va vb vii vc)" by simp
-  hence "tree_greater y (Branch B va vb vii vc)" by (blast dest: tree_greater_trans)
+  hence "x < y \<and> Branch B vd ve vg vf |\<guillemotleft> x" by simp
+  hence 1: "Branch B vd ve vg vf |\<guillemotleft> y" by (blast dest: rbt_less_trans)
+  from "3_4" have "y < z \<and> z \<guillemotleft>| Branch B va vb vii vc" by simp
+  hence "y \<guillemotleft>| Branch B va vb vii vc" by (blast dest: rbt_greater_trans)
   with 1 "3_4" show ?case by simp
 next
   case ("4_2" va vb vd vc x w b y s c z t dd)
-  hence "x < y \<and> tree_less x (Branch B va vb vd vc)" by simp
-  hence "tree_less y (Branch B va vb vd vc)" by (blast dest: tree_less_trans)
+  hence "x < y \<and> Branch B va vb vd vc |\<guillemotleft> x" by simp
+  hence "Branch B va vb vd vc |\<guillemotleft> y" by (blast dest: rbt_less_trans)
   with "4_2" show ?case by simp
 next
   case ("5_2" x w b y s c z t va vb vd vc)
-  hence "y < z \<and> tree_greater z (Branch B va vb vd vc)" by simp
-  hence "tree_greater y (Branch B va vb vd vc)" by (blast dest: tree_greater_trans)
+  hence "y < z \<and> z \<guillemotleft>| Branch B va vb vd vc" by simp
+  hence "y \<guillemotleft>| Branch B va vb vd vc" by (blast dest: rbt_greater_trans)
   with "5_2" show ?case by simp
 next
   case ("5_3" va vb vd vc x w b y s c z t)
-  hence "x < y \<and> tree_less x (Branch B va vb vd vc)" by simp
-  hence "tree_less y (Branch B va vb vd vc)" by (blast dest: tree_less_trans)
+  hence "x < y \<and> Branch B va vb vd vc |\<guillemotleft> x" by simp
+  hence "Branch B va vb vd vc |\<guillemotleft> y" by (blast dest: rbt_less_trans)
   with "5_3" show ?case by simp
 next
   case ("5_4" va vb vg vc x w b y s c z t vd ve vii vf)
-  hence "x < y \<and> tree_less x (Branch B va vb vg vc)" by simp
-  hence 1: "tree_less y (Branch B va vb vg vc)" by (blast dest: tree_less_trans)
-  from "5_4" have "y < z \<and> tree_greater z (Branch B vd ve vii vf)" by simp
-  hence "tree_greater y (Branch B vd ve vii vf)" by (blast dest: tree_greater_trans)
+  hence "x < y \<and> Branch B va vb vg vc |\<guillemotleft> x" by simp
+  hence 1: "Branch B va vb vg vc |\<guillemotleft> y" by (blast dest: rbt_less_trans)
+  from "5_4" have "y < z \<and> z \<guillemotleft>| Branch B vd ve vii vf" by simp
+  hence "y \<guillemotleft>| Branch B vd ve vii vf" by (blast dest: rbt_greater_trans)
   with 1 "5_4" show ?case by simp
 qed simp+
 
@@ -394,11 +420,11 @@
   "entry_in_tree k x (balance l v y r) \<longleftrightarrow> entry_in_tree k x l \<or> k = v \<and> x = y \<or> entry_in_tree k x r"
   by (auto simp add: keys_def)
 
-lemma lookup_balance[simp]: 
-fixes k :: "'a::linorder"
-assumes "sorted l" "sorted r" "l |\<guillemotleft> k" "k \<guillemotleft>| r"
-shows "lookup (balance l k v r) x = lookup (Branch B l k v r) x"
-by (rule lookup_from_in_tree) (auto simp:assms balance_in_tree balance_sorted)
+lemma (in linorder) rbt_lookup_balance[simp]: 
+fixes k :: "'a"
+assumes "rbt_sorted l" "rbt_sorted r" "l |\<guillemotleft> k" "k \<guillemotleft>| r"
+shows "rbt_lookup (balance l k v r) x = rbt_lookup (Branch B l k v r) x"
+by (rule rbt_lookup_from_in_tree) (auto simp:assms balance_in_tree balance_rbt_sorted)
 
 primrec paint :: "color \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
 where
@@ -409,95 +435,112 @@
 lemma paint_inv1[simp]: "inv1l t \<Longrightarrow> inv1 (paint B t)" by (cases t) auto
 lemma paint_inv2[simp]: "inv2 t \<Longrightarrow> inv2 (paint c t)" by (cases t) auto
 lemma paint_color_of[simp]: "color_of (paint B t) = B" by (cases t) auto
-lemma paint_sorted[simp]: "sorted t \<Longrightarrow> sorted (paint c t)" by (cases t) auto
 lemma paint_in_tree[simp]: "entry_in_tree k x (paint c t) = entry_in_tree k x t" by (cases t) auto
-lemma paint_lookup[simp]: "lookup (paint c t) = lookup t" by (rule ext) (cases t, auto)
-lemma paint_tree_greater[simp]: "(v \<guillemotleft>| paint c t) = (v \<guillemotleft>| t)" by (cases t) auto
-lemma paint_tree_less[simp]: "(paint c t |\<guillemotleft> v) = (t |\<guillemotleft> v)" by (cases t) auto
+
+context ord begin
+
+lemma paint_rbt_sorted[simp]: "rbt_sorted t \<Longrightarrow> rbt_sorted (paint c t)" by (cases t) auto
+lemma paint_rbt_lookup[simp]: "rbt_lookup (paint c t) = rbt_lookup t" by (rule ext) (cases t, auto)
+lemma paint_rbt_greater[simp]: "(v \<guillemotleft>| paint c t) = (v \<guillemotleft>| t)" by (cases t) auto
+lemma paint_rbt_less[simp]: "(paint c t |\<guillemotleft> v) = (t |\<guillemotleft> v)" by (cases t) auto
 
 fun
-  ins :: "('a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
+  rbt_ins :: "('a \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
 where
-  "ins f k v Empty = Branch R Empty k v Empty" |
-  "ins f k v (Branch B l x y r) = (if k < x then balance (ins f k v l) x y r
-                               else if k > x then balance l x y (ins f k v r)
-                               else Branch B l x (f k y v) r)" |
-  "ins f k v (Branch R l x y r) = (if k < x then Branch R (ins f k v l) x y r
-                               else if k > x then Branch R l x y (ins f k v r)
-                               else Branch R l x (f k y v) r)"
+  "rbt_ins f k v Empty = Branch R Empty k v Empty" |
+  "rbt_ins f k v (Branch B l x y r) = (if k < x then balance (rbt_ins f k v l) x y r
+                                       else if k > x then balance l x y (rbt_ins f k v r)
+                                       else Branch B l x (f k y v) r)" |
+  "rbt_ins f k v (Branch R l x y r) = (if k < x then Branch R (rbt_ins f k v l) x y r
+                                       else if k > x then Branch R l x y (rbt_ins f k v r)
+                                       else Branch R l x (f k y v) r)"
 
 lemma ins_inv1_inv2: 
   assumes "inv1 t" "inv2 t"
-  shows "inv2 (ins f k x t)" "bheight (ins f k x t) = bheight t" 
-  "color_of t = B \<Longrightarrow> inv1 (ins f k x t)" "inv1l (ins f k x t)"
+  shows "inv2 (rbt_ins f k x t)" "bheight (rbt_ins f k x t) = bheight t" 
+  "color_of t = B \<Longrightarrow> inv1 (rbt_ins f k x t)" "inv1l (rbt_ins f k x t)"
   using assms
-  by (induct f k x t rule: ins.induct) (auto simp: balance_inv1 balance_inv2 balance_bheight)
+  by (induct f k x t rule: rbt_ins.induct) (auto simp: balance_inv1 balance_inv2 balance_bheight)
+
+end
+
+context linorder begin
 
-lemma ins_tree_greater[simp]: "(v \<guillemotleft>| ins f k x t) = (v \<guillemotleft>| t \<and> k > v)"
-  by (induct f k x t rule: ins.induct) auto
-lemma ins_tree_less[simp]: "(ins f k x t |\<guillemotleft> v) = (t |\<guillemotleft> v \<and> k < v)"
-  by (induct f k x t rule: ins.induct) auto
-lemma ins_sorted[simp]: "sorted t \<Longrightarrow> sorted (ins f k x t)"
-  by (induct f k x t rule: ins.induct) (auto simp: balance_sorted)
+lemma ins_rbt_greater[simp]: "(v \<guillemotleft>| rbt_ins f (k :: 'a) x t) = (v \<guillemotleft>| t \<and> k > v)"
+  by (induct f k x t rule: rbt_ins.induct) auto
+lemma ins_rbt_less[simp]: "(rbt_ins f k x t |\<guillemotleft> v) = (t |\<guillemotleft> v \<and> k < v)"
+  by (induct f k x t rule: rbt_ins.induct) auto
+lemma ins_rbt_sorted[simp]: "rbt_sorted t \<Longrightarrow> rbt_sorted (rbt_ins f k x t)"
+  by (induct f k x t rule: rbt_ins.induct) (auto simp: balance_rbt_sorted)
 
-lemma keys_ins: "set (keys (ins f k v t)) = { k } \<union> set (keys t)"
-  by (induct f k v t rule: ins.induct) auto
+lemma keys_ins: "set (keys (rbt_ins f k v t)) = { k } \<union> set (keys t)"
+  by (induct f k v t rule: rbt_ins.induct) auto
 
-lemma lookup_ins: 
-  fixes k :: "'a::linorder"
-  assumes "sorted t"
-  shows "lookup (ins f k v t) x = ((lookup t)(k |-> case lookup t k of None \<Rightarrow> v 
-                                                       | Some w \<Rightarrow> f k w v)) x"
-using assms by (induct f k v t rule: ins.induct) auto
+lemma rbt_lookup_ins: 
+  fixes k :: "'a"
+  assumes "rbt_sorted t"
+  shows "rbt_lookup (rbt_ins f k v t) x = ((rbt_lookup t)(k |-> case rbt_lookup t k of None \<Rightarrow> v 
+                                                                | Some w \<Rightarrow> f k w v)) x"
+using assms by (induct f k v t rule: rbt_ins.induct) auto
+
+end
+
+context ord begin
+
+definition rbt_insert_with_key :: "('a \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
+where "rbt_insert_with_key f k v t = paint B (rbt_ins f k v t)"
+
+definition rbt_insertw_def: "rbt_insert_with f = rbt_insert_with_key (\<lambda>_. f)"
 
-definition
-  insert_with_key :: "('a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
-where
-  "insert_with_key f k v t = paint B (ins f k v t)"
+definition rbt_insert :: "'a \<Rightarrow> 'b \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" where
+  "rbt_insert = rbt_insert_with_key (\<lambda>_ _ nv. nv)"
+
+end
+
+context linorder begin
 
-lemma insertwk_sorted: "sorted t \<Longrightarrow> sorted (insert_with_key f k x t)"
-  by (auto simp: insert_with_key_def)
+lemma rbt_insertwk_rbt_sorted: "rbt_sorted t \<Longrightarrow> rbt_sorted (rbt_insert_with_key f (k :: 'a) x t)"
+  by (auto simp: rbt_insert_with_key_def)
 
-theorem insertwk_is_rbt: 
+theorem rbt_insertwk_is_rbt: 
   assumes inv: "is_rbt t" 
-  shows "is_rbt (insert_with_key f k x t)"
+  shows "is_rbt (rbt_insert_with_key f k x t)"
 using assms
-unfolding insert_with_key_def is_rbt_def
+unfolding rbt_insert_with_key_def is_rbt_def
 by (auto simp: ins_inv1_inv2)
 
-lemma lookup_insertwk: 
-  assumes "sorted t"
-  shows "lookup (insert_with_key f k v t) x = ((lookup t)(k |-> case lookup t k of None \<Rightarrow> v 
+lemma rbt_lookup_rbt_insertwk: 
+  assumes "rbt_sorted t"
+  shows "rbt_lookup (rbt_insert_with_key f k v t) x = ((rbt_lookup t)(k |-> case rbt_lookup t k of None \<Rightarrow> v 
                                                        | Some w \<Rightarrow> f k w v)) x"
-unfolding insert_with_key_def using assms
-by (simp add:lookup_ins)
+unfolding rbt_insert_with_key_def using assms
+by (simp add:rbt_lookup_ins)
 
-definition
-  insertw_def: "insert_with f = insert_with_key (\<lambda>_. f)"
+lemma rbt_insertw_rbt_sorted: "rbt_sorted t \<Longrightarrow> rbt_sorted (rbt_insert_with f k v t)" 
+  by (simp add: rbt_insertwk_rbt_sorted rbt_insertw_def)
+theorem rbt_insertw_is_rbt: "is_rbt t \<Longrightarrow> is_rbt (rbt_insert_with f k v t)"
+  by (simp add: rbt_insertwk_is_rbt rbt_insertw_def)
 
-lemma insertw_sorted: "sorted t \<Longrightarrow> sorted (insert_with f k v t)" by (simp add: insertwk_sorted insertw_def)
-theorem insertw_is_rbt: "is_rbt t \<Longrightarrow> is_rbt (insert_with f k v t)" by (simp add: insertwk_is_rbt insertw_def)
-
-lemma lookup_insertw:
+lemma rbt_lookup_rbt_insertw:
   assumes "is_rbt t"
-  shows "lookup (insert_with f k v t) = (lookup t)(k \<mapsto> (if k:dom (lookup t) then f (the (lookup t k)) v else v))"
+  shows "rbt_lookup (rbt_insert_with f k v t) = (rbt_lookup t)(k \<mapsto> (if k:dom (rbt_lookup t) then f (the (rbt_lookup t k)) v else v))"
 using assms
-unfolding insertw_def
-by (rule_tac ext) (cases "lookup t k", auto simp:lookup_insertwk dom_def)
+unfolding rbt_insertw_def
+by (rule_tac ext) (cases "rbt_lookup t k", auto simp:rbt_lookup_rbt_insertwk dom_def)
 
-definition insert :: "'a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" where
-  "insert = insert_with_key (\<lambda>_ _ nv. nv)"
+lemma rbt_insert_rbt_sorted: "rbt_sorted t \<Longrightarrow> rbt_sorted (rbt_insert k v t)"
+  by (simp add: rbt_insertwk_rbt_sorted rbt_insert_def)
+theorem rbt_insert_is_rbt [simp]: "is_rbt t \<Longrightarrow> is_rbt (rbt_insert k v t)"
+  by (simp add: rbt_insertwk_is_rbt rbt_insert_def)
 
-lemma insert_sorted: "sorted t \<Longrightarrow> sorted (insert k v t)" by (simp add: insertwk_sorted insert_def)
-theorem insert_is_rbt [simp]: "is_rbt t \<Longrightarrow> is_rbt (insert k v t)" by (simp add: insertwk_is_rbt insert_def)
-
-lemma lookup_insert: 
+lemma rbt_lookup_rbt_insert: 
   assumes "is_rbt t"
-  shows "lookup (insert k v t) = (lookup t)(k\<mapsto>v)"
-unfolding insert_def
+  shows "rbt_lookup (rbt_insert k v t) = (rbt_lookup t)(k\<mapsto>v)"
+unfolding rbt_insert_def
 using assms
-by (rule_tac ext) (simp add: lookup_insertwk split:option.split)
+by (rule_tac ext) (simp add: rbt_lookup_rbt_insertwk split:option.split)
 
+end
 
 subsection {* Deletion *}
 
@@ -532,26 +575,31 @@
 lemma balance_left_inv1l: "\<lbrakk> inv1l lt; inv1 rt \<rbrakk> \<Longrightarrow> inv1l (balance_left lt k x rt)"
 by (induct lt k x rt rule: balance_left.induct) (auto simp: balance_inv1)
 
-lemma balance_left_sorted: "\<lbrakk> sorted l; sorted r; tree_less k l; tree_greater k r \<rbrakk> \<Longrightarrow> sorted (balance_left l k v r)"
+lemma (in linorder) balance_left_rbt_sorted: 
+  "\<lbrakk> rbt_sorted l; rbt_sorted r; rbt_less k l; k \<guillemotleft>| r \<rbrakk> \<Longrightarrow> rbt_sorted (balance_left l k v r)"
 apply (induct l k v r rule: balance_left.induct)
-apply (auto simp: balance_sorted)
-apply (unfold tree_greater_prop tree_less_prop)
+apply (auto simp: balance_rbt_sorted)
+apply (unfold rbt_greater_prop rbt_less_prop)
 by force+
 
-lemma balance_left_tree_greater: 
-  fixes k :: "'a::order"
+context order begin
+
+lemma balance_left_rbt_greater: 
+  fixes k :: "'a"
   assumes "k \<guillemotleft>| a" "k \<guillemotleft>| b" "k < x" 
   shows "k \<guillemotleft>| balance_left a x t b"
 using assms 
 by (induct a x t b rule: balance_left.induct) auto
 
-lemma balance_left_tree_less: 
-  fixes k :: "'a::order"
+lemma balance_left_rbt_less: 
+  fixes k :: "'a"
   assumes "a |\<guillemotleft> k" "b |\<guillemotleft> k" "x < k" 
   shows "balance_left a x t b |\<guillemotleft> k"
 using assms
 by (induct a x t b rule: balance_left.induct) auto
 
+end
+
 lemma balance_left_in_tree: 
   assumes "inv1l l" "inv1 r" "bheight l + 1 = bheight r"
   shows "entry_in_tree k v (balance_left l a b r) = (entry_in_tree k v l \<or> k = a \<and> v = b \<or> entry_in_tree k v r)"
@@ -578,24 +626,29 @@
 lemma balance_right_inv1l: "\<lbrakk> inv1 lt; inv1l rt \<rbrakk> \<Longrightarrow>inv1l (balance_right lt k x rt)"
 by (induct lt k x rt rule: balance_right.induct) (auto simp: balance_inv1)
 
-lemma balance_right_sorted: "\<lbrakk> sorted l; sorted r; tree_less k l; tree_greater k r \<rbrakk> \<Longrightarrow> sorted (balance_right l k v r)"
+lemma (in linorder) balance_right_rbt_sorted:
+  "\<lbrakk> rbt_sorted l; rbt_sorted r; rbt_less k l; k \<guillemotleft>| r \<rbrakk> \<Longrightarrow> rbt_sorted (balance_right l k v r)"
 apply (induct l k v r rule: balance_right.induct)
-apply (auto simp:balance_sorted)
-apply (unfold tree_less_prop tree_greater_prop)
+apply (auto simp:balance_rbt_sorted)
+apply (unfold rbt_less_prop rbt_greater_prop)
 by force+
 
-lemma balance_right_tree_greater: 
-  fixes k :: "'a::order"
+context order begin
+
+lemma balance_right_rbt_greater: 
+  fixes k :: "'a"
   assumes "k \<guillemotleft>| a" "k \<guillemotleft>| b" "k < x" 
   shows "k \<guillemotleft>| balance_right a x t b"
 using assms by (induct a x t b rule: balance_right.induct) auto
 
-lemma balance_right_tree_less: 
-  fixes k :: "'a::order"
+lemma balance_right_rbt_less: 
+  fixes k :: "'a"
   assumes "a |\<guillemotleft> k" "b |\<guillemotleft> k" "x < k" 
   shows "balance_right a x t b |\<guillemotleft> k"
 using assms by (induct a x t b rule: balance_right.induct) auto
 
+end
+
 lemma balance_right_in_tree:
   assumes "inv1 l" "inv1l r" "bheight l = bheight r + 1" "inv2 l" "inv2 r"
   shows "entry_in_tree x y (balance_right l k v r) = (entry_in_tree x y l \<or> x = k \<and> y = v \<or> entry_in_tree x y r)"
@@ -607,11 +660,11 @@
   "combine Empty x = x" 
 | "combine x Empty = x" 
 | "combine (Branch R a k x b) (Branch R c s y d) = (case (combine b c) of
-                                      Branch R b2 t z c2 \<Rightarrow> (Branch R (Branch R a k x b2) t z (Branch R c2 s y d)) |
-                                      bc \<Rightarrow> Branch R a k x (Branch R bc s y d))" 
+                                    Branch R b2 t z c2 \<Rightarrow> (Branch R (Branch R a k x b2) t z (Branch R c2 s y d)) |
+                                    bc \<Rightarrow> Branch R a k x (Branch R bc s y d))" 
 | "combine (Branch B a k x b) (Branch B c s y d) = (case (combine b c) of
-                                      Branch R b2 t z c2 \<Rightarrow> Branch R (Branch B a k x b2) t z (Branch B c2 s y d) |
-                                      bc \<Rightarrow> balance_left a k x (Branch B bc s y d))" 
+                                    Branch R b2 t z c2 \<Rightarrow> Branch R (Branch B a k x b2) t z (Branch B c2 s y d) |
+                                    bc \<Rightarrow> balance_left a k x (Branch B bc s y d))" 
 | "combine a (Branch R b k x c) = Branch R (combine a b) k x c" 
 | "combine (Branch R a k x b) c = Branch R a k x (combine b c)" 
 
@@ -630,26 +683,28 @@
 by (induct lt rt rule: combine.induct)
    (auto simp: balance_left_inv1 split: rbt.splits color.splits)
 
-lemma combine_tree_greater[simp]: 
-  fixes k :: "'a::linorder"
+context linorder begin
+
+lemma combine_rbt_greater[simp]: 
+  fixes k :: "'a"
   assumes "k \<guillemotleft>| l" "k \<guillemotleft>| r" 
   shows "k \<guillemotleft>| combine l r"
 using assms 
 by (induct l r rule: combine.induct)
-   (auto simp: balance_left_tree_greater split:rbt.splits color.splits)
+   (auto simp: balance_left_rbt_greater split:rbt.splits color.splits)
 
-lemma combine_tree_less[simp]: 
-  fixes k :: "'a::linorder"
+lemma combine_rbt_less[simp]: 
+  fixes k :: "'a"
   assumes "l |\<guillemotleft> k" "r |\<guillemotleft> k" 
   shows "combine l r |\<guillemotleft> k"
 using assms 
 by (induct l r rule: combine.induct)
-   (auto simp: balance_left_tree_less split:rbt.splits color.splits)
+   (auto simp: balance_left_rbt_less split:rbt.splits color.splits)
 
-lemma combine_sorted: 
-  fixes k :: "'a::linorder"
-  assumes "sorted l" "sorted r" "l |\<guillemotleft> k" "k \<guillemotleft>| r"
-  shows "sorted (combine l r)"
+lemma combine_rbt_sorted: 
+  fixes k :: "'a"
+  assumes "rbt_sorted l" "rbt_sorted r" "l |\<guillemotleft> k" "k \<guillemotleft>| r"
+  shows "rbt_sorted (combine l r)"
 using assms proof (induct l r rule: combine.induct)
   case (3 a x v b c y w d)
   hence ineqs: "a |\<guillemotleft> x" "x \<guillemotleft>| b" "b |\<guillemotleft> k" "k \<guillemotleft>| c" "c |\<guillemotleft> y" "y \<guillemotleft>| d"
@@ -657,48 +712,52 @@
   with 3
   show ?case
     by (cases "combine b c" rule: rbt_cases)
-      (auto, (metis combine_tree_greater combine_tree_less ineqs ineqs tree_less_simps(2) tree_greater_simps(2) tree_greater_trans tree_less_trans)+)
+      (auto, (metis combine_rbt_greater combine_rbt_less ineqs ineqs rbt_less_simps(2) rbt_greater_simps(2) rbt_greater_trans rbt_less_trans)+)
 next
   case (4 a x v b c y w d)
-  hence "x < k \<and> tree_greater k c" by simp
-  hence "tree_greater x c" by (blast dest: tree_greater_trans)
-  with 4 have 2: "tree_greater x (combine b c)" by (simp add: combine_tree_greater)
-  from 4 have "k < y \<and> tree_less k b" by simp
-  hence "tree_less y b" by (blast dest: tree_less_trans)
-  with 4 have 3: "tree_less y (combine b c)" by (simp add: combine_tree_less)
+  hence "x < k \<and> rbt_greater k c" by simp
+  hence "rbt_greater x c" by (blast dest: rbt_greater_trans)
+  with 4 have 2: "rbt_greater x (combine b c)" by (simp add: combine_rbt_greater)
+  from 4 have "k < y \<and> rbt_less k b" by simp
+  hence "rbt_less y b" by (blast dest: rbt_less_trans)
+  with 4 have 3: "rbt_less y (combine b c)" by (simp add: combine_rbt_less)
   show ?case
   proof (cases "combine b c" rule: rbt_cases)
     case Empty
-    from 4 have "x < y \<and> tree_greater y d" by auto
-    hence "tree_greater x d" by (blast dest: tree_greater_trans)
-    with 4 Empty have "sorted a" and "sorted (Branch B Empty y w d)" and "tree_less x a" and "tree_greater x (Branch B Empty y w d)" by auto
-    with Empty show ?thesis by (simp add: balance_left_sorted)
+    from 4 have "x < y \<and> rbt_greater y d" by auto
+    hence "rbt_greater x d" by (blast dest: rbt_greater_trans)
+    with 4 Empty have "rbt_sorted a" and "rbt_sorted (Branch B Empty y w d)"
+      and "rbt_less x a" and "rbt_greater x (Branch B Empty y w d)" by auto
+    with Empty show ?thesis by (simp add: balance_left_rbt_sorted)
   next
     case (Red lta va ka rta)
-    with 2 4 have "x < va \<and> tree_less x a" by simp
-    hence 5: "tree_less va a" by (blast dest: tree_less_trans)
-    from Red 3 4 have "va < y \<and> tree_greater y d" by simp
-    hence "tree_greater va d" by (blast dest: tree_greater_trans)
+    with 2 4 have "x < va \<and> rbt_less x a" by simp
+    hence 5: "rbt_less va a" by (blast dest: rbt_less_trans)
+    from Red 3 4 have "va < y \<and> rbt_greater y d" by simp
+    hence "rbt_greater va d" by (blast dest: rbt_greater_trans)
     with Red 2 3 4 5 show ?thesis by simp
   next
     case (Black lta va ka rta)
-    from 4 have "x < y \<and> tree_greater y d" by auto
-    hence "tree_greater x d" by (blast dest: tree_greater_trans)
-    with Black 2 3 4 have "sorted a" and "sorted (Branch B (combine b c) y w d)" and "tree_less x a" and "tree_greater x (Branch B (combine b c) y w d)" by auto
-    with Black show ?thesis by (simp add: balance_left_sorted)
+    from 4 have "x < y \<and> rbt_greater y d" by auto
+    hence "rbt_greater x d" by (blast dest: rbt_greater_trans)
+    with Black 2 3 4 have "rbt_sorted a" and "rbt_sorted (Branch B (combine b c) y w d)" 
+      and "rbt_less x a" and "rbt_greater x (Branch B (combine b c) y w d)" by auto
+    with Black show ?thesis by (simp add: balance_left_rbt_sorted)
   qed
 next
   case (5 va vb vd vc b x w c)
-  hence "k < x \<and> tree_less k (Branch B va vb vd vc)" by simp
-  hence "tree_less x (Branch B va vb vd vc)" by (blast dest: tree_less_trans)
-  with 5 show ?case by (simp add: combine_tree_less)
+  hence "k < x \<and> rbt_less k (Branch B va vb vd vc)" by simp
+  hence "rbt_less x (Branch B va vb vd vc)" by (blast dest: rbt_less_trans)
+  with 5 show ?case by (simp add: combine_rbt_less)
 next
   case (6 a x v b va vb vd vc)
-  hence "x < k \<and> tree_greater k (Branch B va vb vd vc)" by simp
-  hence "tree_greater x (Branch B va vb vd vc)" by (blast dest: tree_greater_trans)
-  with 6 show ?case by (simp add: combine_tree_greater)
+  hence "x < k \<and> rbt_greater k (Branch B va vb vd vc)" by simp
+  hence "rbt_greater x (Branch B va vb vd vc)" by (blast dest: rbt_greater_trans)
+  with 6 show ?case by (simp add: combine_rbt_greater)
 qed simp+
 
+end
+
 lemma combine_in_tree: 
   assumes "inv2 l" "inv2 r" "bheight l = bheight r" "inv1 l" "inv1 r"
   shows "entry_in_tree k v (combine l r) = (entry_in_tree k v l \<or> entry_in_tree k v r)"
@@ -721,29 +780,43 @@
   qed 
 qed (auto split: rbt.splits color.splits)
 
+context ord begin
+
 fun
-  del_from_left :: "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt" and
-  del_from_right :: "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt" and
-  del :: "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
+  rbt_del_from_left :: "'a \<Rightarrow> ('a,'b) rbt \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt" and
+  rbt_del_from_right :: "'a \<Rightarrow> ('a,'b) rbt \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt" and
+  rbt_del :: "'a\<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
 where
-  "del x Empty = Empty" |
-  "del x (Branch c a y s b) = (if x < y then del_from_left x a y s b else (if x > y then del_from_right x a y s b else combine a b))" |
-  "del_from_left x (Branch B lt z v rt) y s b = balance_left (del x (Branch B lt z v rt)) y s b" |
-  "del_from_left x a y s b = Branch R (del x a) y s b" |
-  "del_from_right x a y s (Branch B lt z v rt) = balance_right a y s (del x (Branch B lt z v rt))" | 
-  "del_from_right x a y s b = Branch R a y s (del x b)"
+  "rbt_del x Empty = Empty" |
+  "rbt_del x (Branch c a y s b) = 
+   (if x < y then rbt_del_from_left x a y s b 
+    else (if x > y then rbt_del_from_right x a y s b else combine a b))" |
+  "rbt_del_from_left x (Branch B lt z v rt) y s b = balance_left (rbt_del x (Branch B lt z v rt)) y s b" |
+  "rbt_del_from_left x a y s b = Branch R (rbt_del x a) y s b" |
+  "rbt_del_from_right x a y s (Branch B lt z v rt) = balance_right a y s (rbt_del x (Branch B lt z v rt))" | 
+  "rbt_del_from_right x a y s b = Branch R a y s (rbt_del x b)"
+
+end
+
+context linorder begin
 
 lemma 
   assumes "inv2 lt" "inv1 lt"
   shows
   "\<lbrakk>inv2 rt; bheight lt = bheight rt; inv1 rt\<rbrakk> \<Longrightarrow>
-  inv2 (del_from_left x lt k v rt) \<and> bheight (del_from_left x lt k v rt) = bheight lt \<and> (color_of lt = B \<and> color_of rt = B \<and> inv1 (del_from_left x lt k v rt) \<or> (color_of lt \<noteq> B \<or> color_of rt \<noteq> B) \<and> inv1l (del_from_left x lt k v rt))"
+   inv2 (rbt_del_from_left x lt k v rt) \<and> 
+   bheight (rbt_del_from_left x lt k v rt) = bheight lt \<and> 
+   (color_of lt = B \<and> color_of rt = B \<and> inv1 (rbt_del_from_left x lt k v rt) \<or> 
+    (color_of lt \<noteq> B \<or> color_of rt \<noteq> B) \<and> inv1l (rbt_del_from_left x lt k v rt))"
   and "\<lbrakk>inv2 rt; bheight lt = bheight rt; inv1 rt\<rbrakk> \<Longrightarrow>
-  inv2 (del_from_right x lt k v rt) \<and> bheight (del_from_right x lt k v rt) = bheight lt \<and> (color_of lt = B \<and> color_of rt = B \<and> inv1 (del_from_right x lt k v rt) \<or> (color_of lt \<noteq> B \<or> color_of rt \<noteq> B) \<and> inv1l (del_from_right x lt k v rt))"
-  and del_inv1_inv2: "inv2 (del x lt) \<and> (color_of lt = R \<and> bheight (del x lt) = bheight lt \<and> inv1 (del x lt) 
-  \<or> color_of lt = B \<and> bheight (del x lt) = bheight lt - 1 \<and> inv1l (del x lt))"
+  inv2 (rbt_del_from_right x lt k v rt) \<and> 
+  bheight (rbt_del_from_right x lt k v rt) = bheight lt \<and> 
+  (color_of lt = B \<and> color_of rt = B \<and> inv1 (rbt_del_from_right x lt k v rt) \<or> 
+   (color_of lt \<noteq> B \<or> color_of rt \<noteq> B) \<and> inv1l (rbt_del_from_right x lt k v rt))"
+  and rbt_del_inv1_inv2: "inv2 (rbt_del x lt) \<and> (color_of lt = R \<and> bheight (rbt_del x lt) = bheight lt \<and> inv1 (rbt_del x lt) 
+  \<or> color_of lt = B \<and> bheight (rbt_del x lt) = bheight lt - 1 \<and> inv1l (rbt_del x lt))"
 using assms
-proof (induct x lt k v rt and x lt k v rt and x lt rule: del_from_left_del_from_right_del.induct)
+proof (induct x lt k v rt and x lt k v rt and x lt rule: rbt_del_from_left_rbt_del_from_right_rbt_del.induct)
 case (2 y c _ y')
   have "y = y' \<or> y < y' \<or> y > y'" by auto
   thus ?case proof (elim disjE)
@@ -767,55 +840,55 @@
 qed auto
 
 lemma 
-  del_from_left_tree_less: "\<lbrakk>tree_less v lt; tree_less v rt; k < v\<rbrakk> \<Longrightarrow> tree_less v (del_from_left x lt k y rt)"
-  and del_from_right_tree_less: "\<lbrakk>tree_less v lt; tree_less v rt; k < v\<rbrakk> \<Longrightarrow> tree_less v (del_from_right x lt k y rt)"
-  and del_tree_less: "tree_less v lt \<Longrightarrow> tree_less v (del x lt)"
-by (induct x lt k y rt and x lt k y rt and x lt rule: del_from_left_del_from_right_del.induct) 
-   (auto simp: balance_left_tree_less balance_right_tree_less)
+  rbt_del_from_left_rbt_less: "\<lbrakk> lt |\<guillemotleft> v; rt |\<guillemotleft> v; k < v\<rbrakk> \<Longrightarrow> rbt_del_from_left x lt k y rt |\<guillemotleft> v"
+  and rbt_del_from_right_rbt_less: "\<lbrakk>lt |\<guillemotleft> v; rt |\<guillemotleft> v; k < v\<rbrakk> \<Longrightarrow> rbt_del_from_right x lt k y rt |\<guillemotleft> v"
+  and rbt_del_rbt_less: "lt |\<guillemotleft> v \<Longrightarrow> rbt_del x lt |\<guillemotleft> v"
+by (induct x lt k y rt and x lt k y rt and x lt rule: rbt_del_from_left_rbt_del_from_right_rbt_del.induct) 
+   (auto simp: balance_left_rbt_less balance_right_rbt_less)
 
-lemma del_from_left_tree_greater: "\<lbrakk>tree_greater v lt; tree_greater v rt; k > v\<rbrakk> \<Longrightarrow> tree_greater v (del_from_left x lt k y rt)"
-  and del_from_right_tree_greater: "\<lbrakk>tree_greater v lt; tree_greater v rt; k > v\<rbrakk> \<Longrightarrow> tree_greater v (del_from_right x lt k y rt)"
-  and del_tree_greater: "tree_greater v lt \<Longrightarrow> tree_greater v (del x lt)"
-by (induct x lt k y rt and x lt k y rt and x lt rule: del_from_left_del_from_right_del.induct)
-   (auto simp: balance_left_tree_greater balance_right_tree_greater)
+lemma rbt_del_from_left_rbt_greater: "\<lbrakk>v \<guillemotleft>| lt; v \<guillemotleft>| rt; k > v\<rbrakk> \<Longrightarrow> v \<guillemotleft>| rbt_del_from_left x lt k y rt"
+  and rbt_del_from_right_rbt_greater: "\<lbrakk>v \<guillemotleft>| lt; v \<guillemotleft>| rt; k > v\<rbrakk> \<Longrightarrow> v \<guillemotleft>| rbt_del_from_right x lt k y rt"
+  and rbt_del_rbt_greater: "v \<guillemotleft>| lt \<Longrightarrow> v \<guillemotleft>| rbt_del x lt"
+by (induct x lt k y rt and x lt k y rt and x lt rule: rbt_del_from_left_rbt_del_from_right_rbt_del.induct)
+   (auto simp: balance_left_rbt_greater balance_right_rbt_greater)
 
-lemma "\<lbrakk>sorted lt; sorted rt; tree_less k lt; tree_greater k rt\<rbrakk> \<Longrightarrow> sorted (del_from_left x lt k y rt)"
-  and "\<lbrakk>sorted lt; sorted rt; tree_less k lt; tree_greater k rt\<rbrakk> \<Longrightarrow> sorted (del_from_right x lt k y rt)"
-  and del_sorted: "sorted lt \<Longrightarrow> sorted (del x lt)"
-proof (induct x lt k y rt and x lt k y rt and x lt rule: del_from_left_del_from_right_del.induct)
+lemma "\<lbrakk>rbt_sorted lt; rbt_sorted rt; lt |\<guillemotleft> k; k \<guillemotleft>| rt\<rbrakk> \<Longrightarrow> rbt_sorted (rbt_del_from_left x lt k y rt)"
+  and "\<lbrakk>rbt_sorted lt; rbt_sorted rt; lt |\<guillemotleft> k; k \<guillemotleft>| rt\<rbrakk> \<Longrightarrow> rbt_sorted (rbt_del_from_right x lt k y rt)"
+  and rbt_del_rbt_sorted: "rbt_sorted lt \<Longrightarrow> rbt_sorted (rbt_del x lt)"
+proof (induct x lt k y rt and x lt k y rt and x lt rule: rbt_del_from_left_rbt_del_from_right_rbt_del.induct)
   case (3 x lta zz v rta yy ss bb)
-  from 3 have "tree_less yy (Branch B lta zz v rta)" by simp
-  hence "tree_less yy (del x (Branch B lta zz v rta))" by (rule del_tree_less)
-  with 3 show ?case by (simp add: balance_left_sorted)
+  from 3 have "Branch B lta zz v rta |\<guillemotleft> yy" by simp
+  hence "rbt_del x (Branch B lta zz v rta) |\<guillemotleft> yy" by (rule rbt_del_rbt_less)
+  with 3 show ?case by (simp add: balance_left_rbt_sorted)
 next
   case ("4_2" x vaa vbb vdd vc yy ss bb)
-  hence "tree_less yy (Branch R vaa vbb vdd vc)" by simp
-  hence "tree_less yy (del x (Branch R vaa vbb vdd vc))" by (rule del_tree_less)
+  hence "Branch R vaa vbb vdd vc |\<guillemotleft> yy" by simp
+  hence "rbt_del x (Branch R vaa vbb vdd vc) |\<guillemotleft> yy" by (rule rbt_del_rbt_less)
   with "4_2" show ?case by simp
 next
   case (5 x aa yy ss lta zz v rta) 
-  hence "tree_greater yy (Branch B lta zz v rta)" by simp
-  hence "tree_greater yy (del x (Branch B lta zz v rta))" by (rule del_tree_greater)
-  with 5 show ?case by (simp add: balance_right_sorted)
+  hence "yy \<guillemotleft>| Branch B lta zz v rta" by simp
+  hence "yy \<guillemotleft>| rbt_del x (Branch B lta zz v rta)" by (rule rbt_del_rbt_greater)
+  with 5 show ?case by (simp add: balance_right_rbt_sorted)
 next
   case ("6_2" x aa yy ss vaa vbb vdd vc)
-  hence "tree_greater yy (Branch R vaa vbb vdd vc)" by simp
-  hence "tree_greater yy (del x (Branch R vaa vbb vdd vc))" by (rule del_tree_greater)
+  hence "yy \<guillemotleft>| Branch R vaa vbb vdd vc" by simp
+  hence "yy \<guillemotleft>| rbt_del x (Branch R vaa vbb vdd vc)" by (rule rbt_del_rbt_greater)
   with "6_2" show ?case by simp
-qed (auto simp: combine_sorted)
+qed (auto simp: combine_rbt_sorted)
 
-lemma "\<lbrakk>sorted lt; sorted rt; tree_less kt lt; tree_greater kt rt; inv1 lt; inv1 rt; inv2 lt; inv2 rt; bheight lt = bheight rt; x < kt\<rbrakk> \<Longrightarrow> entry_in_tree k v (del_from_left x lt kt y rt) = (False \<or> (x \<noteq> k \<and> entry_in_tree k v (Branch c lt kt y rt)))"
-  and "\<lbrakk>sorted lt; sorted rt; tree_less kt lt; tree_greater kt rt; inv1 lt; inv1 rt; inv2 lt; inv2 rt; bheight lt = bheight rt; x > kt\<rbrakk> \<Longrightarrow> entry_in_tree k v (del_from_right x lt kt y rt) = (False \<or> (x \<noteq> k \<and> entry_in_tree k v (Branch c lt kt y rt)))"
-  and del_in_tree: "\<lbrakk>sorted t; inv1 t; inv2 t\<rbrakk> \<Longrightarrow> entry_in_tree k v (del x t) = (False \<or> (x \<noteq> k \<and> entry_in_tree k v t))"
-proof (induct x lt kt y rt and x lt kt y rt and x t rule: del_from_left_del_from_right_del.induct)
+lemma "\<lbrakk>rbt_sorted lt; rbt_sorted rt; lt |\<guillemotleft> kt; kt \<guillemotleft>| rt; inv1 lt; inv1 rt; inv2 lt; inv2 rt; bheight lt = bheight rt; x < kt\<rbrakk> \<Longrightarrow> entry_in_tree k v (rbt_del_from_left x lt kt y rt) = (False \<or> (x \<noteq> k \<and> entry_in_tree k v (Branch c lt kt y rt)))"
+  and "\<lbrakk>rbt_sorted lt; rbt_sorted rt; lt |\<guillemotleft> kt; kt \<guillemotleft>| rt; inv1 lt; inv1 rt; inv2 lt; inv2 rt; bheight lt = bheight rt; x > kt\<rbrakk> \<Longrightarrow> entry_in_tree k v (rbt_del_from_right x lt kt y rt) = (False \<or> (x \<noteq> k \<and> entry_in_tree k v (Branch c lt kt y rt)))"
+  and rbt_del_in_tree: "\<lbrakk>rbt_sorted t; inv1 t; inv2 t\<rbrakk> \<Longrightarrow> entry_in_tree k v (rbt_del x t) = (False \<or> (x \<noteq> k \<and> entry_in_tree k v t))"
+proof (induct x lt kt y rt and x lt kt y rt and x t rule: rbt_del_from_left_rbt_del_from_right_rbt_del.induct)
   case (2 xx c aa yy ss bb)
   have "xx = yy \<or> xx < yy \<or> xx > yy" by auto
   from this 2 show ?case proof (elim disjE)
     assume "xx = yy"
     with 2 show ?thesis proof (cases "xx = k")
       case True
-      from 2 `xx = yy` `xx = k` have "sorted (Branch c aa yy ss bb) \<and> k = yy" by simp
-      hence "\<not> entry_in_tree k v aa" "\<not> entry_in_tree k v bb" by (auto simp: tree_less_nit tree_greater_prop)
+      from 2 `xx = yy` `xx = k` have "rbt_sorted (Branch c aa yy ss bb) \<and> k = yy" by simp
+      hence "\<not> entry_in_tree k v aa" "\<not> entry_in_tree k v bb" by (auto simp: rbt_less_nit rbt_greater_prop)
       with `xx = yy` 2 `xx = k` show ?thesis by (simp add: combine_in_tree)
     qed (simp add: combine_in_tree)
   qed simp+
@@ -823,143 +896,147 @@
   case (3 xx lta zz vv rta yy ss bb)
   def mt[simp]: mt == "Branch B lta zz vv rta"
   from 3 have "inv2 mt \<and> inv1 mt" by simp
-  hence "inv2 (del xx mt) \<and> (color_of mt = R \<and> bheight (del xx mt) = bheight mt \<and> inv1 (del xx mt) \<or> color_of mt = B \<and> bheight (del xx mt) = bheight mt - 1 \<and> inv1l (del xx mt))" by (blast dest: del_inv1_inv2)
-  with 3 have 4: "entry_in_tree k v (del_from_left xx mt yy ss bb) = (False \<or> xx \<noteq> k \<and> entry_in_tree k v mt \<or> (k = yy \<and> v = ss) \<or> entry_in_tree k v bb)" by (simp add: balance_left_in_tree)
+  hence "inv2 (rbt_del xx mt) \<and> (color_of mt = R \<and> bheight (rbt_del xx mt) = bheight mt \<and> inv1 (rbt_del xx mt) \<or> color_of mt = B \<and> bheight (rbt_del xx mt) = bheight mt - 1 \<and> inv1l (rbt_del xx mt))" by (blast dest: rbt_del_inv1_inv2)
+  with 3 have 4: "entry_in_tree k v (rbt_del_from_left xx mt yy ss bb) = (False \<or> xx \<noteq> k \<and> entry_in_tree k v mt \<or> (k = yy \<and> v = ss) \<or> entry_in_tree k v bb)" by (simp add: balance_left_in_tree)
   thus ?case proof (cases "xx = k")
     case True
-    from 3 True have "tree_greater yy bb \<and> yy > k" by simp
-    hence "tree_greater k bb" by (blast dest: tree_greater_trans)
-    with 3 4 True show ?thesis by (auto simp: tree_greater_nit)
+    from 3 True have "yy \<guillemotleft>| bb \<and> yy > k" by simp
+    hence "k \<guillemotleft>| bb" by (blast dest: rbt_greater_trans)
+    with 3 4 True show ?thesis by (auto simp: rbt_greater_nit)
   qed auto
 next
   case ("4_1" xx yy ss bb)
   show ?case proof (cases "xx = k")
     case True
-    with "4_1" have "tree_greater yy bb \<and> k < yy" by simp
-    hence "tree_greater k bb" by (blast dest: tree_greater_trans)
+    with "4_1" have "yy \<guillemotleft>| bb \<and> k < yy" by simp
+    hence "k \<guillemotleft>| bb" by (blast dest: rbt_greater_trans)
     with "4_1" `xx = k` 
-   have "entry_in_tree k v (Branch R Empty yy ss bb) = entry_in_tree k v Empty" by (auto simp: tree_greater_nit)
+   have "entry_in_tree k v (Branch R Empty yy ss bb) = entry_in_tree k v Empty" by (auto simp: rbt_greater_nit)
     thus ?thesis by auto
   qed simp+
 next
   case ("4_2" xx vaa vbb vdd vc yy ss bb)
   thus ?case proof (cases "xx = k")
     case True
-    with "4_2" have "k < yy \<and> tree_greater yy bb" by simp
-    hence "tree_greater k bb" by (blast dest: tree_greater_trans)
-    with True "4_2" show ?thesis by (auto simp: tree_greater_nit)
+    with "4_2" have "k < yy \<and> yy \<guillemotleft>| bb" by simp
+    hence "k \<guillemotleft>| bb" by (blast dest: rbt_greater_trans)
+    with True "4_2" show ?thesis by (auto simp: rbt_greater_nit)
   qed auto
 next
   case (5 xx aa yy ss lta zz vv rta)
   def mt[simp]: mt == "Branch B lta zz vv rta"
   from 5 have "inv2 mt \<and> inv1 mt" by simp
-  hence "inv2 (del xx mt) \<and> (color_of mt = R \<and> bheight (del xx mt) = bheight mt \<and> inv1 (del xx mt) \<or> color_of mt = B \<and> bheight (del xx mt) = bheight mt - 1 \<and> inv1l (del xx mt))" by (blast dest: del_inv1_inv2)
-  with 5 have 3: "entry_in_tree k v (del_from_right xx aa yy ss mt) = (entry_in_tree k v aa \<or> (k = yy \<and> v = ss) \<or> False \<or> xx \<noteq> k \<and> entry_in_tree k v mt)" by (simp add: balance_right_in_tree)
+  hence "inv2 (rbt_del xx mt) \<and> (color_of mt = R \<and> bheight (rbt_del xx mt) = bheight mt \<and> inv1 (rbt_del xx mt) \<or> color_of mt = B \<and> bheight (rbt_del xx mt) = bheight mt - 1 \<and> inv1l (rbt_del xx mt))" by (blast dest: rbt_del_inv1_inv2)
+  with 5 have 3: "entry_in_tree k v (rbt_del_from_right xx aa yy ss mt) = (entry_in_tree k v aa \<or> (k = yy \<and> v = ss) \<or> False \<or> xx \<noteq> k \<and> entry_in_tree k v mt)" by (simp add: balance_right_in_tree)
   thus ?case proof (cases "xx = k")
     case True
-    from 5 True have "tree_less yy aa \<and> yy < k" by simp
-    hence "tree_less k aa" by (blast dest: tree_less_trans)
-    with 3 5 True show ?thesis by (auto simp: tree_less_nit)
+    from 5 True have "aa |\<guillemotleft> yy \<and> yy < k" by simp
+    hence "aa |\<guillemotleft> k" by (blast dest: rbt_less_trans)
+    with 3 5 True show ?thesis by (auto simp: rbt_less_nit)
   qed auto
 next
   case ("6_1" xx aa yy ss)
   show ?case proof (cases "xx = k")
     case True
-    with "6_1" have "tree_less yy aa \<and> k > yy" by simp
-    hence "tree_less k aa" by (blast dest: tree_less_trans)
-    with "6_1" `xx = k` show ?thesis by (auto simp: tree_less_nit)
+    with "6_1" have "aa |\<guillemotleft> yy \<and> k > yy" by simp
+    hence "aa |\<guillemotleft> k" by (blast dest: rbt_less_trans)
+    with "6_1" `xx = k` show ?thesis by (auto simp: rbt_less_nit)
   qed simp
 next
   case ("6_2" xx aa yy ss vaa vbb vdd vc)
   thus ?case proof (cases "xx = k")
     case True
-    with "6_2" have "k > yy \<and> tree_less yy aa" by simp
-    hence "tree_less k aa" by (blast dest: tree_less_trans)
-    with True "6_2" show ?thesis by (auto simp: tree_less_nit)
+    with "6_2" have "k > yy \<and> aa |\<guillemotleft> yy" by simp
+    hence "aa |\<guillemotleft> k" by (blast dest: rbt_less_trans)
+    with True "6_2" show ?thesis by (auto simp: rbt_less_nit)
   qed auto
 qed simp
 
+definition (in ord) rbt_delete where
+  "rbt_delete k t = paint B (rbt_del k t)"
 
-definition delete where
-  delete_def: "delete k t = paint B (del k t)"
-
-theorem delete_is_rbt [simp]: assumes "is_rbt t" shows "is_rbt (delete k t)"
+theorem rbt_delete_is_rbt [simp]: assumes "is_rbt t" shows "is_rbt (rbt_delete k t)"
 proof -
   from assms have "inv2 t" and "inv1 t" unfolding is_rbt_def by auto 
-  hence "inv2 (del k t) \<and> (color_of t = R \<and> bheight (del k t) = bheight t \<and> inv1 (del k t) \<or> color_of t = B \<and> bheight (del k t) = bheight t - 1 \<and> inv1l (del k t))" by (rule del_inv1_inv2)
-  hence "inv2 (del k t) \<and> inv1l (del k t)" by (cases "color_of t") auto
+  hence "inv2 (rbt_del k t) \<and> (color_of t = R \<and> bheight (rbt_del k t) = bheight t \<and> inv1 (rbt_del k t) \<or> color_of t = B \<and> bheight (rbt_del k t) = bheight t - 1 \<and> inv1l (rbt_del k t))" by (rule rbt_del_inv1_inv2)
+  hence "inv2 (rbt_del k t) \<and> inv1l (rbt_del k t)" by (cases "color_of t") auto
   with assms show ?thesis
-    unfolding is_rbt_def delete_def
-    by (auto intro: paint_sorted del_sorted)
+    unfolding is_rbt_def rbt_delete_def
+    by (auto intro: paint_rbt_sorted rbt_del_rbt_sorted)
 qed
 
-lemma delete_in_tree: 
+lemma rbt_delete_in_tree: 
   assumes "is_rbt t" 
-  shows "entry_in_tree k v (delete x t) = (x \<noteq> k \<and> entry_in_tree k v t)"
-  using assms unfolding is_rbt_def delete_def
-  by (auto simp: del_in_tree)
+  shows "entry_in_tree k v (rbt_delete x t) = (x \<noteq> k \<and> entry_in_tree k v t)"
+  using assms unfolding is_rbt_def rbt_delete_def
+  by (auto simp: rbt_del_in_tree)
 
-lemma lookup_delete:
+lemma rbt_lookup_rbt_delete:
   assumes is_rbt: "is_rbt t"
-  shows "lookup (delete k t) = (lookup t)|`(-{k})"
+  shows "rbt_lookup (rbt_delete k t) = (rbt_lookup t)|`(-{k})"
 proof
   fix x
-  show "lookup (delete k t) x = (lookup t |` (-{k})) x" 
+  show "rbt_lookup (rbt_delete k t) x = (rbt_lookup t |` (-{k})) x" 
   proof (cases "x = k")
     assume "x = k" 
     with is_rbt show ?thesis
-      by (cases "lookup (delete k t) k") (auto simp: lookup_in_tree delete_in_tree)
+      by (cases "rbt_lookup (rbt_delete k t) k") (auto simp: rbt_lookup_in_tree rbt_delete_in_tree)
   next
     assume "x \<noteq> k"
     thus ?thesis
-      by auto (metis is_rbt delete_is_rbt delete_in_tree is_rbt_sorted lookup_from_in_tree)
+      by auto (metis is_rbt rbt_delete_is_rbt rbt_delete_in_tree is_rbt_rbt_sorted rbt_lookup_from_in_tree)
   qed
 qed
 
+end
 
 subsection {* Union *}
 
-primrec
-  union_with_key :: "('a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
+context ord begin
+
+primrec rbt_union_with_key :: "('a \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"
 where
-  "union_with_key f t Empty = t"
-| "union_with_key f t (Branch c lt k v rt) = union_with_key f (union_with_key f (insert_with_key f k v t) lt) rt"
+  "rbt_union_with_key f t Empty = t"
+| "rbt_union_with_key f t (Branch c lt k v rt) = rbt_union_with_key f (rbt_union_with_key f (rbt_insert_with_key f k v t) lt) rt"
 
-lemma unionwk_sorted: "sorted lt \<Longrightarrow> sorted (union_with_key f lt rt)" 
-  by (induct rt arbitrary: lt) (auto simp: insertwk_sorted)
-theorem unionwk_is_rbt[simp]: "is_rbt lt \<Longrightarrow> is_rbt (union_with_key f lt rt)" 
-  by (induct rt arbitrary: lt) (simp add: insertwk_is_rbt)+
+definition rbt_union_with where
+  "rbt_union_with f = rbt_union_with_key (\<lambda>_. f)"
+
+definition rbt_union where
+  "rbt_union = rbt_union_with_key (%_ _ rv. rv)"
+
+end
 
-definition
-  union_with where
-  "union_with f = union_with_key (\<lambda>_. f)"
+context linorder begin
 
-theorem unionw_is_rbt: "is_rbt lt \<Longrightarrow> is_rbt (union_with f lt rt)" unfolding union_with_def by simp
+lemma rbt_unionwk_rbt_sorted: "rbt_sorted lt \<Longrightarrow> rbt_sorted (rbt_union_with_key f lt rt)" 
+  by (induct rt arbitrary: lt) (auto simp: rbt_insertwk_rbt_sorted)
+theorem rbt_unionwk_is_rbt[simp]: "is_rbt lt \<Longrightarrow> is_rbt (rbt_union_with_key f lt rt)" 
+  by (induct rt arbitrary: lt) (simp add: rbt_insertwk_is_rbt)+
 
-definition union where
-  "union = union_with_key (%_ _ rv. rv)"
+theorem rbt_unionw_is_rbt: "is_rbt lt \<Longrightarrow> is_rbt (rbt_union_with f lt rt)" unfolding rbt_union_with_def by simp
+
+theorem rbt_union_is_rbt: "is_rbt lt \<Longrightarrow> is_rbt (rbt_union lt rt)" unfolding rbt_union_def by simp
 
-theorem union_is_rbt: "is_rbt lt \<Longrightarrow> is_rbt (union lt rt)" unfolding union_def by simp
-
-lemma union_Branch[simp]:
-  "union t (Branch c lt k v rt) = union (union (insert k v t) lt) rt"
-  unfolding union_def insert_def
+lemma (in ord) rbt_union_Branch[simp]:
+  "rbt_union t (Branch c lt k v rt) = rbt_union (rbt_union (rbt_insert k v t) lt) rt"
+  unfolding rbt_union_def rbt_insert_def
   by simp
 
-lemma lookup_union:
-  assumes "is_rbt s" "sorted t"
-  shows "lookup (union s t) = lookup s ++ lookup t"
+lemma rbt_lookup_rbt_union:
+  assumes "is_rbt s" "rbt_sorted t"
+  shows "rbt_lookup (rbt_union s t) = rbt_lookup s ++ rbt_lookup t"
 using assms
 proof (induct t arbitrary: s)
-  case Empty thus ?case by (auto simp: union_def)
+  case Empty thus ?case by (auto simp: rbt_union_def)
 next
   case (Branch c l k v r s)
-  then have "sorted r" "sorted l" "l |\<guillemotleft> k" "k \<guillemotleft>| r" by auto
+  then have "rbt_sorted r" "rbt_sorted l" "l |\<guillemotleft> k" "k \<guillemotleft>| r" by auto
 
-  have meq: "lookup s(k \<mapsto> v) ++ lookup l ++ lookup r =
-    lookup s ++
-    (\<lambda>a. if a < k then lookup l a
-    else if k < a then lookup r a else Some v)" (is "?m1 = ?m2")
+  have meq: "rbt_lookup s(k \<mapsto> v) ++ rbt_lookup l ++ rbt_lookup r =
+    rbt_lookup s ++
+    (\<lambda>a. if a < k then rbt_lookup l a
+    else if k < a then rbt_lookup r a else Some v)" (is "?m1 = ?m2")
   proof (rule ext)
     fix a
 
@@ -967,7 +1044,7 @@
     thus "?m1 a = ?m2 a"
     proof (elim disjE)
       assume "k < a"
-      with `l |\<guillemotleft> k` have "l |\<guillemotleft> a" by (rule tree_less_trans)
+      with `l |\<guillemotleft> k` have "l |\<guillemotleft> a" by (rule rbt_less_trans)
       with `k < a` show ?thesis
         by (auto simp: map_add_def split: option.splits)
     next
@@ -976,52 +1053,57 @@
       show ?thesis by (auto simp: map_add_def)
     next
       assume "a < k"
-      from this `k \<guillemotleft>| r` have "a \<guillemotleft>| r" by (rule tree_greater_trans)
+      from this `k \<guillemotleft>| r` have "a \<guillemotleft>| r" by (rule rbt_greater_trans)
       with `a < k` show ?thesis
         by (auto simp: map_add_def split: option.splits)
     qed
   qed
 
-  from Branch have is_rbt: "is_rbt (RBT_Impl.union (RBT_Impl.insert k v s) l)"
-    by (auto intro: union_is_rbt insert_is_rbt)
+  from Branch have is_rbt: "is_rbt (RBT_Impl.rbt_union (RBT_Impl.rbt_insert k v s) l)"
+    by (auto intro: rbt_union_is_rbt rbt_insert_is_rbt)
   with Branch have IHs:
-    "lookup (union (union (insert k v s) l) r) = lookup (union (insert k v s) l) ++ lookup r"
-    "lookup (union (insert k v s) l) = lookup (insert k v s) ++ lookup l"
+    "rbt_lookup (rbt_union (rbt_union (rbt_insert k v s) l) r) = rbt_lookup (rbt_union (rbt_insert k v s) l) ++ rbt_lookup r"
+    "rbt_lookup (rbt_union (rbt_insert k v s) l) = rbt_lookup (rbt_insert k v s) ++ rbt_lookup l"
     by auto
   
   with meq show ?case
-    by (auto simp: lookup_insert[OF Branch(3)])
+    by (auto simp: rbt_lookup_rbt_insert[OF Branch(3)])
 
 qed
 
+end
 
 subsection {* Modifying existing entries *}
 
+context ord begin
+
 primrec
-  map_entry :: "'a\<Colon>linorder \<Rightarrow> ('b \<Rightarrow> 'b) \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt"
+  rbt_map_entry :: "'a \<Rightarrow> ('b \<Rightarrow> 'b) \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt"
 where
-  "map_entry k f Empty = Empty"
-| "map_entry k f (Branch c lt x v rt) =
-    (if k < x then Branch c (map_entry k f lt) x v rt
-    else if k > x then (Branch c lt x v (map_entry k f rt))
+  "rbt_map_entry k f Empty = Empty"
+| "rbt_map_entry k f (Branch c lt x v rt) =
+    (if k < x then Branch c (rbt_map_entry k f lt) x v rt
+    else if k > x then (Branch c lt x v (rbt_map_entry k f rt))
     else Branch c lt x (f v) rt)"
 
-lemma map_entry_color_of: "color_of (map_entry k f t) = color_of t" by (induct t) simp+
-lemma map_entry_inv1: "inv1 (map_entry k f t) = inv1 t" by (induct t) (simp add: map_entry_color_of)+
-lemma map_entry_inv2: "inv2 (map_entry k f t) = inv2 t" "bheight (map_entry k f t) = bheight t" by (induct t) simp+
-lemma map_entry_tree_greater: "tree_greater a (map_entry k f t) = tree_greater a t" by (induct t) simp+
-lemma map_entry_tree_less: "tree_less a (map_entry k f t) = tree_less a t" by (induct t) simp+
-lemma map_entry_sorted: "sorted (map_entry k f t) = sorted t"
-  by (induct t) (simp_all add: map_entry_tree_less map_entry_tree_greater)
+
+lemma rbt_map_entry_color_of: "color_of (rbt_map_entry k f t) = color_of t" by (induct t) simp+
+lemma rbt_map_entry_inv1: "inv1 (rbt_map_entry k f t) = inv1 t" by (induct t) (simp add: rbt_map_entry_color_of)+
+lemma rbt_map_entry_inv2: "inv2 (rbt_map_entry k f t) = inv2 t" "bheight (rbt_map_entry k f t) = bheight t" by (induct t) simp+
+lemma rbt_map_entry_rbt_greater: "rbt_greater a (rbt_map_entry k f t) = rbt_greater a t" by (induct t) simp+
+lemma rbt_map_entry_rbt_less: "rbt_less a (rbt_map_entry k f t) = rbt_less a t" by (induct t) simp+
+lemma rbt_map_entry_rbt_sorted: "rbt_sorted (rbt_map_entry k f t) = rbt_sorted t"
+  by (induct t) (simp_all add: rbt_map_entry_rbt_less rbt_map_entry_rbt_greater)
 
-theorem map_entry_is_rbt [simp]: "is_rbt (map_entry k f t) = is_rbt t" 
-unfolding is_rbt_def by (simp add: map_entry_inv2 map_entry_color_of map_entry_sorted map_entry_inv1 )
+theorem rbt_map_entry_is_rbt [simp]: "is_rbt (rbt_map_entry k f t) = is_rbt t" 
+unfolding is_rbt_def by (simp add: rbt_map_entry_inv2 rbt_map_entry_color_of rbt_map_entry_rbt_sorted rbt_map_entry_inv1 )
 
-theorem lookup_map_entry:
-  "lookup (map_entry k f t) = (lookup t)(k := Option.map f (lookup t k))"
+end
+
+theorem (in linorder) rbt_lookup_rbt_map_entry:
+  "rbt_lookup (rbt_map_entry k f t) = (rbt_lookup t)(k := Option.map f (rbt_lookup t k))"
   by (induct t) (auto split: option.splits simp add: fun_eq_iff)
 
-
 subsection {* Mapping all entries *}
 
 primrec
@@ -1033,18 +1115,28 @@
 lemma map_entries [simp]: "entries (map f t) = List.map (\<lambda>(k, v). (k, f k v)) (entries t)"
   by (induct t) auto
 lemma map_keys [simp]: "keys (map f t) = keys t" by (simp add: keys_def split_def)
-lemma map_tree_greater: "tree_greater k (map f t) = tree_greater k t" by (induct t) simp+
-lemma map_tree_less: "tree_less k (map f t) = tree_less k t" by (induct t) simp+
-lemma map_sorted: "sorted (map f t) = sorted t"  by (induct t) (simp add: map_tree_less map_tree_greater)+
 lemma map_color_of: "color_of (map f t) = color_of t" by (induct t) simp+
 lemma map_inv1: "inv1 (map f t) = inv1 t" by (induct t) (simp add: map_color_of)+
 lemma map_inv2: "inv2 (map f t) = inv2 t" "bheight (map f t) = bheight t" by (induct t) simp+
+
+context ord begin
+
+lemma map_rbt_greater: "rbt_greater k (map f t) = rbt_greater k t" by (induct t) simp+
+lemma map_rbt_less: "rbt_less k (map f t) = rbt_less k t" by (induct t) simp+
+lemma map_rbt_sorted: "rbt_sorted (map f t) = rbt_sorted t"  by (induct t) (simp add: map_rbt_less map_rbt_greater)+
 theorem map_is_rbt [simp]: "is_rbt (map f t) = is_rbt t" 
-unfolding is_rbt_def by (simp add: map_inv1 map_inv2 map_sorted map_color_of)
+unfolding is_rbt_def by (simp add: map_inv1 map_inv2 map_rbt_sorted map_color_of)
 
-theorem lookup_map: "lookup (map f t) x = Option.map (f x) (lookup t x)"
-  by (induct t) auto
+end
 
+theorem (in linorder) rbt_lookup_map: "rbt_lookup (map f t) x = Option.map (f x) (rbt_lookup t x)"
+  apply(induct t)
+  apply auto
+  apply(subgoal_tac "x = a")
+  apply auto
+  done
+ (* FIXME: simproc "antisym less" does not work for linorder context, only for linorder type class
+    by (induct t) auto *)
 
 subsection {* Folding over entries *}
 
@@ -1059,26 +1151,73 @@
 
 subsection {* Bulkloading a tree *}
 
-definition bulkload :: "('a \<times> 'b) list \<Rightarrow> ('a\<Colon>linorder, 'b) rbt" where
-  "bulkload xs = foldr (\<lambda>(k, v). insert k v) xs Empty"
+definition (in ord) rbt_bulkload :: "('a \<times> 'b) list \<Rightarrow> ('a, 'b) rbt" where
+  "rbt_bulkload xs = foldr (\<lambda>(k, v). rbt_insert k v) xs Empty"
+
+context linorder begin
 
-lemma bulkload_is_rbt [simp, intro]:
-  "is_rbt (bulkload xs)"
-  unfolding bulkload_def by (induct xs) auto
+lemma rbt_bulkload_is_rbt [simp, intro]:
+  "is_rbt (rbt_bulkload xs)"
+  unfolding rbt_bulkload_def by (induct xs) auto
 
-lemma lookup_bulkload:
-  "lookup (bulkload xs) = map_of xs"
+lemma rbt_lookup_rbt_bulkload:
+  "rbt_lookup (rbt_bulkload xs) = map_of xs"
 proof -
   obtain ys where "ys = rev xs" by simp
   have "\<And>t. is_rbt t \<Longrightarrow>
-    lookup (List.fold (prod_case insert) ys t) = lookup t ++ map_of (rev ys)"
-      by (induct ys) (simp_all add: bulkload_def lookup_insert prod_case_beta)
+    rbt_lookup (List.fold (prod_case rbt_insert) ys t) = rbt_lookup t ++ map_of (rev ys)"
+      by (induct ys) (simp_all add: rbt_bulkload_def rbt_lookup_rbt_insert prod_case_beta)
   from this Empty_is_rbt have
-    "lookup (List.fold (prod_case insert) (rev xs) Empty) = lookup Empty ++ map_of xs"
+    "rbt_lookup (List.fold (prod_case rbt_insert) (rev xs) Empty) = rbt_lookup Empty ++ map_of xs"
      by (simp add: `ys = rev xs`)
-  then show ?thesis by (simp add: bulkload_def lookup_Empty foldr_def)
+  then show ?thesis by (simp add: rbt_bulkload_def rbt_lookup_Empty foldr_conv_fold)
 qed
 
-hide_const (open) R B Empty insert delete entries keys bulkload lookup map_entry map fold union sorted
+end
+
+lemmas [code] =
+  ord.rbt_less_prop
+  ord.rbt_greater_prop
+  ord.rbt_sorted.simps
+  ord.rbt_lookup.simps
+  ord.is_rbt_def
+  ord.rbt_ins.simps
+  ord.rbt_insert_with_key_def
+  ord.rbt_insertw_def
+  ord.rbt_insert_def
+  ord.rbt_del_from_left.simps
+  ord.rbt_del_from_right.simps
+  ord.rbt_del.simps
+  ord.rbt_delete_def
+  ord.rbt_union_with_key.simps
+  ord.rbt_union_with_def
+  ord.rbt_union_def
+  ord.rbt_map_entry.simps
+  ord.rbt_bulkload_def
+
+text {* Restore original type constraints for constants *}
+setup {*
+  fold Sign.add_const_constraint
+    [(@{const_name rbt_less}, SOME @{typ "('a :: order) \<Rightarrow> ('a, 'b) rbt \<Rightarrow> bool"}),
+     (@{const_name rbt_greater}, SOME @{typ "('a :: order) \<Rightarrow> ('a, 'b) rbt \<Rightarrow> bool"}),
+     (@{const_name rbt_sorted}, SOME @{typ "('a :: linorder, 'b) rbt \<Rightarrow> bool"}),
+     (@{const_name rbt_lookup}, SOME @{typ "('a :: linorder, 'b) rbt \<Rightarrow> 'a \<rightharpoonup> 'b"}),
+     (@{const_name is_rbt}, SOME @{typ "('a :: linorder, 'b) rbt \<Rightarrow> bool"}),
+     (@{const_name rbt_ins}, SOME @{typ "('a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_insert_with_key}, SOME @{typ "('a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_insert_with}, SOME @{typ "('b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a :: linorder) \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_insert}, SOME @{typ "('a :: linorder) \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_del_from_left}, SOME @{typ "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_del_from_right}, SOME @{typ "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_del}, SOME @{typ "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_delete}, SOME @{typ "('a\<Colon>linorder) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_union_with_key}, SOME @{typ "('a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_union_with}, SOME @{typ "('b \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a\<Colon>linorder,'b) rbt \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_union}, SOME @{typ "('a\<Colon>linorder,'b) rbt \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_map_entry}, SOME @{typ "'a\<Colon>linorder \<Rightarrow> ('b \<Rightarrow> 'b) \<Rightarrow> ('a,'b) rbt \<Rightarrow> ('a,'b) rbt"}),
+     (@{const_name rbt_bulkload}, SOME @{typ "('a \<times> 'b) list \<Rightarrow> ('a\<Colon>linorder,'b) rbt"})]
+*}
+
+hide_const (open) R B Empty entries keys map fold
 
 end
--- a/src/HOL/Library/RBT_Mapping.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/RBT_Mapping.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -40,7 +40,7 @@
 
 lemma keys_Mapping [code]:
   "Mapping.keys (Mapping t) = set (keys t)"
-  by (simp add: RBT.keys_def Mapping_def Mapping.keys_def lookup_def lookup_keys)
+  by (simp add: RBT.keys_def Mapping_def Mapping.keys_def lookup_def rbt_lookup_keys)
 
 lemma ordered_keys_Mapping [code]:
   "Mapping.ordered_keys (Mapping t) = keys t"
@@ -144,22 +144,22 @@
   @{thm Empty_is_rbt}\hfill(@{text "Empty_is_rbt"})
 
   \noindent
-  @{thm insert_is_rbt}\hfill(@{text "insert_is_rbt"})
+  @{thm rbt_insert_is_rbt}\hfill(@{text "rbt_insert_is_rbt"})
 
   \noindent
-  @{thm delete_is_rbt}\hfill(@{text "delete_is_rbt"})
+  @{thm rbt_delete_is_rbt}\hfill(@{text "delete_is_rbt"})
 
   \noindent
-  @{thm bulkload_is_rbt}\hfill(@{text "bulkload_is_rbt"})
+  @{thm rbt_bulkload_is_rbt}\hfill(@{text "bulkload_is_rbt"})
 
   \noindent
-  @{thm map_entry_is_rbt}\hfill(@{text "map_entry_is_rbt"})
+  @{thm rbt_map_entry_is_rbt}\hfill(@{text "map_entry_is_rbt"})
 
   \noindent
   @{thm map_is_rbt}\hfill(@{text "map_is_rbt"})
 
   \noindent
-  @{thm union_is_rbt}\hfill(@{text "union_is_rbt"})
+  @{thm rbt_union_is_rbt}\hfill(@{text "union_is_rbt"})
 *}
 
 
--- a/src/HOL/Library/Set_Algebras.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Set_Algebras.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -14,11 +14,45 @@
   comments at the top of theory @{text BigO}.
 *}
 
-definition set_plus :: "'a::plus set \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "\<oplus>" 65) where
-  "A \<oplus> B = {c. \<exists>a\<in>A. \<exists>b\<in>B. c = a + b}"
+instantiation set :: (plus) plus
+begin
+
+definition plus_set :: "'a::plus set \<Rightarrow> 'a set \<Rightarrow> 'a set" where
+  set_plus_def: "A + B = {c. \<exists>a\<in>A. \<exists>b\<in>B. c = a + b}"
+
+instance ..
+
+end
+
+instantiation set :: (times) times
+begin
+
+definition times_set :: "'a::times set \<Rightarrow> 'a set \<Rightarrow> 'a set" where
+  set_times_def: "A * B = {c. \<exists>a\<in>A. \<exists>b\<in>B. c = a * b}"
+
+instance ..
+
+end
 
-definition set_times :: "'a::times set \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "\<otimes>" 70) where
-  "A \<otimes> B = {c. \<exists>a\<in>A. \<exists>b\<in>B. c = a * b}"
+instantiation set :: (zero) zero
+begin
+
+definition
+  set_zero[simp]: "0::('a::zero)set == {0}"
+
+instance ..
+
+end
+ 
+instantiation set :: (one) one
+begin
+
+definition
+  set_one[simp]: "1::('a::one)set == {1}"
+
+instance ..
+
+end
 
 definition elt_set_plus :: "'a::plus \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "+o" 70) where
   "a +o B = {c. \<exists>b\<in>B. c = a + b}"
@@ -29,105 +63,38 @@
 abbreviation (input) elt_set_eq :: "'a \<Rightarrow> 'a set \<Rightarrow> bool"  (infix "=o" 50) where
   "x =o A \<equiv> x \<in> A"
 
-interpretation set_add!: semigroup "set_plus :: 'a::semigroup_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" proof
-qed (force simp add: set_plus_def add.assoc)
-
-interpretation set_add!: abel_semigroup "set_plus :: 'a::ab_semigroup_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" proof
-qed (force simp add: set_plus_def add.commute)
+instance set :: (semigroup_add) semigroup_add
+by default (force simp add: set_plus_def add.assoc)
 
-interpretation set_add!: monoid "set_plus :: 'a::monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" proof
-qed (simp_all add: set_plus_def)
-
-interpretation set_add!: comm_monoid "set_plus :: 'a::comm_monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" proof
-qed (simp add: set_plus_def)
-
-definition listsum_set :: "('a::monoid_add set) list \<Rightarrow> 'a set" where
-  "listsum_set = monoid_add.listsum set_plus {0}"
+instance set :: (ab_semigroup_add) ab_semigroup_add
+by default (force simp add: set_plus_def add.commute)
 
-interpretation set_add!: monoid_add "set_plus :: 'a::monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" where
-  "monoid_add.listsum set_plus {0::'a} = listsum_set"
-proof -
-  show "class.monoid_add set_plus {0::'a}" proof
-  qed (simp_all add: set_add.assoc)
-  then interpret set_add!: monoid_add "set_plus :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" .
-  show "monoid_add.listsum set_plus {0::'a} = listsum_set"
-    by (simp only: listsum_set_def)
-qed
-
-definition setsum_set :: "('b \<Rightarrow> ('a::comm_monoid_add) set) \<Rightarrow> 'b set \<Rightarrow> 'a set" where
-  "setsum_set f A = (if finite A then fold_image set_plus f {0} A else {0})"
+instance set :: (monoid_add) monoid_add
+by default (simp_all add: set_plus_def)
 
-interpretation set_add!:
-  comm_monoid_big "set_plus :: 'a::comm_monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" setsum_set 
-proof
-qed (fact setsum_set_def)
-
-interpretation set_add!: comm_monoid_add "set_plus :: 'a::comm_monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" where
-  "monoid_add.listsum set_plus {0::'a} = listsum_set"
-  and "comm_monoid_add.setsum set_plus {0::'a} = setsum_set"
-proof -
-  show "class.comm_monoid_add (set_plus :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set) {0}" proof
-  qed (simp_all add: set_add.commute)
-  then interpret set_add!: comm_monoid_add "set_plus :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" .
-  show "monoid_add.listsum set_plus {0::'a} = listsum_set"
-    by (simp only: listsum_set_def)
-  show "comm_monoid_add.setsum set_plus {0::'a} = setsum_set"
-    by (simp add: set_add.setsum_def setsum_set_def fun_eq_iff)
-qed
+instance set :: (comm_monoid_add) comm_monoid_add
+by default (simp_all add: set_plus_def)
 
-interpretation set_mult!: semigroup "set_times :: 'a::semigroup_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" proof
-qed (force simp add: set_times_def mult.assoc)
-
-interpretation set_mult!: abel_semigroup "set_times :: 'a::ab_semigroup_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" proof
-qed (force simp add: set_times_def mult.commute)
+instance set :: (semigroup_mult) semigroup_mult
+by default (force simp add: set_times_def mult.assoc)
 
-interpretation set_mult!: monoid "set_times :: 'a::monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}" proof
-qed (simp_all add: set_times_def)
-
-interpretation set_mult!: comm_monoid "set_times :: 'a::comm_monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}" proof
-qed (simp add: set_times_def)
-
-definition power_set :: "nat \<Rightarrow> ('a::monoid_mult set) \<Rightarrow> 'a set" where
-  "power_set n A = power.power {1} set_times A n"
+instance set :: (ab_semigroup_mult) ab_semigroup_mult
+by default (force simp add: set_times_def mult.commute)
 
-interpretation set_mult!: monoid_mult "{1}" "set_times :: 'a::monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" where
-  "power.power {1} set_times = (\<lambda>A n. power_set n A)"
-proof -
-  show "class.monoid_mult {1} (set_times :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set)" proof
-  qed (simp_all add: set_mult.assoc)
-  show "power.power {1} set_times = (\<lambda>A n. power_set n A)"
-    by (simp add: power_set_def)
-qed
-
-definition setprod_set :: "('b \<Rightarrow> ('a::comm_monoid_mult) set) \<Rightarrow> 'b set \<Rightarrow> 'a set" where
-  "setprod_set f A = (if finite A then fold_image set_times f {1} A else {1})"
+instance set :: (monoid_mult) monoid_mult
+by default (simp_all add: set_times_def)
 
-interpretation set_mult!:
-  comm_monoid_big "set_times :: 'a::comm_monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}" setprod_set 
-proof
-qed (fact setprod_set_def)
+instance set :: (comm_monoid_mult) comm_monoid_mult
+by default (simp_all add: set_times_def)
 
-interpretation set_mult!: comm_monoid_mult "set_times :: 'a::comm_monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}" where
-  "power.power {1} set_times = (\<lambda>A n. power_set n A)"
-  and "comm_monoid_mult.setprod set_times {1::'a} = setprod_set"
-proof -
-  show "class.comm_monoid_mult (set_times :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set) {1}" proof
-  qed (simp_all add: set_mult.commute)
-  then interpret set_mult!: comm_monoid_mult "set_times :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}" .
-  show "power.power {1} set_times = (\<lambda>A n. power_set n A)"
-    by (simp add: power_set_def)
-  show "comm_monoid_mult.setprod set_times {1::'a} = setprod_set"
-    by (simp add: set_mult.setprod_def setprod_set_def fun_eq_iff)
-qed
-
-lemma set_plus_intro [intro]: "a : C ==> b : D ==> a + b : C \<oplus> D"
+lemma set_plus_intro [intro]: "a : C ==> b : D ==> a + b : C + D"
   by (auto simp add: set_plus_def)
 
 lemma set_plus_intro2 [intro]: "b : C ==> a + b : a +o C"
   by (auto simp add: elt_set_plus_def)
 
-lemma set_plus_rearrange: "((a::'a::comm_monoid_add) +o C) \<oplus>
-    (b +o D) = (a + b) +o (C \<oplus> D)"
+lemma set_plus_rearrange: "((a::'a::comm_monoid_add) +o C) +
+    (b +o D) = (a + b) +o (C + D)"
   apply (auto simp add: elt_set_plus_def set_plus_def add_ac)
    apply (rule_tac x = "ba + bb" in exI)
   apply (auto simp add: add_ac)
@@ -139,8 +106,8 @@
     (a + b) +o C"
   by (auto simp add: elt_set_plus_def add_assoc)
 
-lemma set_plus_rearrange3: "((a::'a::semigroup_add) +o B) \<oplus> C =
-    a +o (B \<oplus> C)"
+lemma set_plus_rearrange3: "((a::'a::semigroup_add) +o B) + C =
+    a +o (B + C)"
   apply (auto simp add: elt_set_plus_def set_plus_def)
    apply (blast intro: add_ac)
   apply (rule_tac x = "a + aa" in exI)
@@ -151,8 +118,8 @@
    apply (auto simp add: add_ac)
   done
 
-theorem set_plus_rearrange4: "C \<oplus> ((a::'a::comm_monoid_add) +o D) =
-    a +o (C \<oplus> D)"
+theorem set_plus_rearrange4: "C + ((a::'a::comm_monoid_add) +o D) =
+    a +o (C + D)"
   apply (auto simp add: elt_set_plus_def set_plus_def add_ac)
    apply (rule_tac x = "aa + ba" in exI)
    apply (auto simp add: add_ac)
@@ -165,17 +132,17 @@
   by (auto simp add: elt_set_plus_def)
 
 lemma set_plus_mono2 [intro]: "(C::('a::plus) set) <= D ==> E <= F ==>
-    C \<oplus> E <= D \<oplus> F"
+    C + E <= D + F"
   by (auto simp add: set_plus_def)
 
-lemma set_plus_mono3 [intro]: "a : C ==> a +o D <= C \<oplus> D"
+lemma set_plus_mono3 [intro]: "a : C ==> a +o D <= C + D"
   by (auto simp add: elt_set_plus_def set_plus_def)
 
 lemma set_plus_mono4 [intro]: "(a::'a::comm_monoid_add) : C ==>
-    a +o D <= D \<oplus> C"
+    a +o D <= D + C"
   by (auto simp add: elt_set_plus_def set_plus_def add_ac)
 
-lemma set_plus_mono5: "a:C ==> B <= D ==> a +o B <= C \<oplus> D"
+lemma set_plus_mono5: "a:C ==> B <= D ==> a +o B <= C + D"
   apply (subgoal_tac "a +o B <= a +o D")
    apply (erule order_trans)
    apply (erule set_plus_mono3)
@@ -188,21 +155,21 @@
   apply auto
   done
 
-lemma set_plus_mono2_b: "C <= D ==> E <= F ==> x : C \<oplus> E ==>
-    x : D \<oplus> F"
+lemma set_plus_mono2_b: "C <= D ==> E <= F ==> x : C + E ==>
+    x : D + F"
   apply (frule set_plus_mono2)
    prefer 2
    apply force
   apply assumption
   done
 
-lemma set_plus_mono3_b: "a : C ==> x : a +o D ==> x : C \<oplus> D"
+lemma set_plus_mono3_b: "a : C ==> x : a +o D ==> x : C + D"
   apply (frule set_plus_mono3)
   apply auto
   done
 
 lemma set_plus_mono4_b: "(a::'a::comm_monoid_add) : C ==>
-    x : a +o D ==> x : D \<oplus> C"
+    x : a +o D ==> x : D + C"
   apply (frule set_plus_mono4)
   apply auto
   done
@@ -210,7 +177,7 @@
 lemma set_zero_plus [simp]: "(0::'a::comm_monoid_add) +o C = C"
   by (auto simp add: elt_set_plus_def)
 
-lemma set_zero_plus2: "(0::'a::comm_monoid_add) : A ==> B <= A \<oplus> B"
+lemma set_zero_plus2: "(0::'a::comm_monoid_add) : A ==> B <= A + B"
   apply (auto simp add: set_plus_def)
   apply (rule_tac x = 0 in bexI)
    apply (rule_tac x = x in bexI)
@@ -231,14 +198,14 @@
   by (rule iffI, rule set_minus_imp_plus, assumption, rule set_plus_imp_minus,
     assumption)
 
-lemma set_times_intro [intro]: "a : C ==> b : D ==> a * b : C \<otimes> D"
+lemma set_times_intro [intro]: "a : C ==> b : D ==> a * b : C * D"
   by (auto simp add: set_times_def)
 
 lemma set_times_intro2 [intro!]: "b : C ==> a * b : a *o C"
   by (auto simp add: elt_set_times_def)
 
-lemma set_times_rearrange: "((a::'a::comm_monoid_mult) *o C) \<otimes>
-    (b *o D) = (a * b) *o (C \<otimes> D)"
+lemma set_times_rearrange: "((a::'a::comm_monoid_mult) *o C) *
+    (b *o D) = (a * b) *o (C * D)"
   apply (auto simp add: elt_set_times_def set_times_def)
    apply (rule_tac x = "ba * bb" in exI)
    apply (auto simp add: mult_ac)
@@ -250,8 +217,8 @@
     (a * b) *o C"
   by (auto simp add: elt_set_times_def mult_assoc)
 
-lemma set_times_rearrange3: "((a::'a::semigroup_mult) *o B) \<otimes> C =
-    a *o (B \<otimes> C)"
+lemma set_times_rearrange3: "((a::'a::semigroup_mult) *o B) * C =
+    a *o (B * C)"
   apply (auto simp add: elt_set_times_def set_times_def)
    apply (blast intro: mult_ac)
   apply (rule_tac x = "a * aa" in exI)
@@ -262,8 +229,8 @@
    apply (auto simp add: mult_ac)
   done
 
-theorem set_times_rearrange4: "C \<otimes> ((a::'a::comm_monoid_mult) *o D) =
-    a *o (C \<otimes> D)"
+theorem set_times_rearrange4: "C * ((a::'a::comm_monoid_mult) *o D) =
+    a *o (C * D)"
   apply (auto simp add: elt_set_times_def set_times_def
     mult_ac)
    apply (rule_tac x = "aa * ba" in exI)
@@ -277,17 +244,17 @@
   by (auto simp add: elt_set_times_def)
 
 lemma set_times_mono2 [intro]: "(C::('a::times) set) <= D ==> E <= F ==>
-    C \<otimes> E <= D \<otimes> F"
+    C * E <= D * F"
   by (auto simp add: set_times_def)
 
-lemma set_times_mono3 [intro]: "a : C ==> a *o D <= C \<otimes> D"
+lemma set_times_mono3 [intro]: "a : C ==> a *o D <= C * D"
   by (auto simp add: elt_set_times_def set_times_def)
 
 lemma set_times_mono4 [intro]: "(a::'a::comm_monoid_mult) : C ==>
-    a *o D <= D \<otimes> C"
+    a *o D <= D * C"
   by (auto simp add: elt_set_times_def set_times_def mult_ac)
 
-lemma set_times_mono5: "a:C ==> B <= D ==> a *o B <= C \<otimes> D"
+lemma set_times_mono5: "a:C ==> B <= D ==> a *o B <= C * D"
   apply (subgoal_tac "a *o B <= a *o D")
    apply (erule order_trans)
    apply (erule set_times_mono3)
@@ -300,21 +267,21 @@
   apply auto
   done
 
-lemma set_times_mono2_b: "C <= D ==> E <= F ==> x : C \<otimes> E ==>
-    x : D \<otimes> F"
+lemma set_times_mono2_b: "C <= D ==> E <= F ==> x : C * E ==>
+    x : D * F"
   apply (frule set_times_mono2)
    prefer 2
    apply force
   apply assumption
   done
 
-lemma set_times_mono3_b: "a : C ==> x : a *o D ==> x : C \<otimes> D"
+lemma set_times_mono3_b: "a : C ==> x : a *o D ==> x : C * D"
   apply (frule set_times_mono3)
   apply auto
   done
 
 lemma set_times_mono4_b: "(a::'a::comm_monoid_mult) : C ==>
-    x : a *o D ==> x : D \<otimes> C"
+    x : a *o D ==> x : D * C"
   apply (frule set_times_mono4)
   apply auto
   done
@@ -326,16 +293,16 @@
     (a * b) +o (a *o C)"
   by (auto simp add: elt_set_plus_def elt_set_times_def ring_distribs)
 
-lemma set_times_plus_distrib2: "(a::'a::semiring) *o (B \<oplus> C) =
-    (a *o B) \<oplus> (a *o C)"
+lemma set_times_plus_distrib2: "(a::'a::semiring) *o (B + C) =
+    (a *o B) + (a *o C)"
   apply (auto simp add: set_plus_def elt_set_times_def ring_distribs)
    apply blast
   apply (rule_tac x = "b + bb" in exI)
   apply (auto simp add: ring_distribs)
   done
 
-lemma set_times_plus_distrib3: "((a::'a::semiring) +o C) \<otimes> D <=
-    a *o D \<oplus> C \<otimes> D"
+lemma set_times_plus_distrib3: "((a::'a::semiring) +o C) * D <=
+    a *o D + C * D"
   apply (auto simp add:
     elt_set_plus_def elt_set_times_def set_times_def
     set_plus_def ring_distribs)
@@ -355,16 +322,16 @@
   by (auto simp add: elt_set_times_def)
 
 lemma set_plus_image:
-  fixes S T :: "'n::semigroup_add set" shows "S \<oplus> T = (\<lambda>(x, y). x + y) ` (S \<times> T)"
+  fixes S T :: "'n::semigroup_add set" shows "S + T = (\<lambda>(x, y). x + y) ` (S \<times> T)"
   unfolding set_plus_def by (fastforce simp: image_iff)
 
 lemma set_setsum_alt:
   assumes fin: "finite I"
-  shows "setsum_set S I = {setsum s I |s. \<forall>i\<in>I. s i \<in> S i}"
+  shows "setsum S I = {setsum s I |s. \<forall>i\<in>I. s i \<in> S i}"
     (is "_ = ?setsum I")
 using fin proof induct
   case (insert x F)
-  have "setsum_set S (insert x F) = S x \<oplus> ?setsum F"
+  have "setsum S (insert x F) = S x + ?setsum F"
     using insert.hyps by auto
   also have "...= {s x + setsum s F |s. \<forall> i\<in>insert x F. s i \<in> S i}"
     unfolding set_plus_def
@@ -380,15 +347,15 @@
 
 lemma setsum_set_cond_linear:
   fixes f :: "('a::comm_monoid_add) set \<Rightarrow> ('b::comm_monoid_add) set"
-  assumes [intro!]: "\<And>A B. P A  \<Longrightarrow> P B  \<Longrightarrow> P (A \<oplus> B)" "P {0}"
-    and f: "\<And>A B. P A  \<Longrightarrow> P B \<Longrightarrow> f (A \<oplus> B) = f A \<oplus> f B" "f {0} = {0}"
+  assumes [intro!]: "\<And>A B. P A  \<Longrightarrow> P B  \<Longrightarrow> P (A + B)" "P {0}"
+    and f: "\<And>A B. P A  \<Longrightarrow> P B \<Longrightarrow> f (A + B) = f A + f B" "f {0} = {0}"
   assumes all: "\<And>i. i \<in> I \<Longrightarrow> P (S i)"
-  shows "f (setsum_set S I) = setsum_set (f \<circ> S) I"
+  shows "f (setsum S I) = setsum (f \<circ> S) I"
 proof cases
   assume "finite I" from this all show ?thesis
   proof induct
     case (insert x F)
-    from `finite F` `\<And>i. i \<in> insert x F \<Longrightarrow> P (S i)` have "P (setsum_set S F)"
+    from `finite F` `\<And>i. i \<in> insert x F \<Longrightarrow> P (S i)` have "P (setsum S F)"
       by induct auto
     with insert show ?case
       by (simp, subst f) auto
@@ -397,8 +364,18 @@
 
 lemma setsum_set_linear:
   fixes f :: "('a::comm_monoid_add) set => ('b::comm_monoid_add) set"
-  assumes "\<And>A B. f(A) \<oplus> f(B) = f(A \<oplus> B)" "f {0} = {0}"
-  shows "f (setsum_set S I) = setsum_set (f \<circ> S) I"
+  assumes "\<And>A B. f(A) + f(B) = f(A + B)" "f {0} = {0}"
+  shows "f (setsum S I) = setsum (f \<circ> S) I"
   using setsum_set_cond_linear[of "\<lambda>x. True" f I S] assms by auto
 
+lemma set_times_Un_distrib:
+  "A * (B \<union> C) = A * B \<union> A * C"
+  "(A \<union> B) * C = A * C \<union> B * C"
+by (auto simp: set_times_def)
+
+lemma set_times_UNION_distrib:
+  "A * UNION I M = UNION I (%i. A * M i)"
+  "UNION I M * A = UNION I (%i. M i * A)"
+by (auto simp: set_times_def)
+
 end
--- a/src/HOL/Library/Target_Numeral.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Library/Target_Numeral.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -163,6 +163,10 @@
   "int_of (max k l) = max (int_of k) (int_of l)"
   by (simp add: max_def less_eq_int_def)
 
+lemma of_nat_nat_of [simp]:
+  "of_nat (nat_of k) = max 0 k"
+  by (simp add: nat_of_def Target_Numeral.int_eq_iff less_eq_int_def max_def)
+
 
 subsection {* Code theorems for target language numerals *}
 
@@ -620,18 +624,25 @@
 
 subsection {* Implementation for @{typ nat} *}
 
+definition Nat :: "Target_Numeral.int \<Rightarrow> nat" where
+  "Nat = Target_Numeral.nat_of"
+
 definition of_nat :: "nat \<Rightarrow> Target_Numeral.int" where
   [code_abbrev]: "of_nat = Nat.of_nat"
 
-hide_const (open) of_nat
+hide_const (open) of_nat Nat
 
 lemma int_of_nat [simp]:
   "Target_Numeral.int_of (Target_Numeral.of_nat n) = of_nat n"
   by (simp add: of_nat_def)
 
 lemma [code abstype]:
-  "Target_Numeral.nat_of (Target_Numeral.of_nat n) = n"
-  by (simp add: nat_of_def)
+  "Target_Numeral.Nat (Target_Numeral.of_nat n) = n"
+  by (simp add: Nat_def nat_of_def)
+
+lemma [code abstract]:
+  "Target_Numeral.of_nat (Target_Numeral.nat_of k) = max 0 k"
+  by (simp add: of_nat_def)
 
 lemma [code_abbrev]:
   "nat (Int.Pos k) = nat_of_num k"
--- a/src/HOL/Lifting.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Lifting.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -199,19 +199,19 @@
     apply safe
     apply (drule Abs1, simp)
     apply (erule Abs2)
-    apply (rule pred_compI)
+    apply (rule relcomppI)
     apply (rule Rep1)
     apply (rule Rep2)
-    apply (rule pred_compI, assumption)
+    apply (rule relcomppI, assumption)
     apply (drule Abs1, simp)
     apply (clarsimp simp add: R2)
-    apply (rule pred_compI, assumption)
+    apply (rule relcomppI, assumption)
     apply (drule Abs1, simp)+
     apply (clarsimp simp add: R2)
     apply (drule Abs1, simp)+
     apply (clarsimp simp add: R2)
-    apply (rule pred_compI, assumption)
-    apply (rule pred_compI [rotated])
+    apply (rule relcomppI, assumption)
+    apply (rule relcomppI [rotated])
     apply (erule conversepI)
     apply (drule Abs1, simp)+
     apply (simp add: R2)
@@ -236,32 +236,56 @@
   shows "invariant P x x \<equiv> P x"
 using assms by (auto simp add: invariant_def)
 
-lemma copy_type_to_Quotient:
+lemma UNIV_typedef_to_Quotient:
   assumes "type_definition Rep Abs UNIV"
-  and T_def: "T \<equiv> (\<lambda>x y. Abs x = y)"
+  and T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
   shows "Quotient (op =) Abs Rep T"
 proof -
   interpret type_definition Rep Abs UNIV by fact
-  from Abs_inject Rep_inverse T_def show ?thesis by (auto intro!: QuotientI)
+  from Abs_inject Rep_inverse Abs_inverse T_def show ?thesis 
+    by (fastforce intro!: QuotientI fun_eq_iff)
 qed
 
-lemma copy_type_to_equivp:
+lemma UNIV_typedef_to_equivp:
   fixes Abs :: "'a \<Rightarrow> 'b"
   and Rep :: "'b \<Rightarrow> 'a"
   assumes "type_definition Rep Abs (UNIV::'a set)"
   shows "equivp (op=::'a\<Rightarrow>'a\<Rightarrow>bool)"
 by (rule identity_equivp)
 
-lemma invariant_type_to_Quotient:
+lemma typedef_to_Quotient:
+  assumes "type_definition Rep Abs S"
+  and T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "Quotient (Lifting.invariant (\<lambda>x. x \<in> S)) Abs Rep T"
+proof -
+  interpret type_definition Rep Abs S by fact
+  from Rep Abs_inject Rep_inverse Abs_inverse T_def show ?thesis
+    by (auto intro!: QuotientI simp: invariant_def fun_eq_iff)
+qed
+
+lemma typedef_to_part_equivp:
+  assumes "type_definition Rep Abs S"
+  shows "part_equivp (Lifting.invariant (\<lambda>x. x \<in> S))"
+proof (intro part_equivpI)
+  interpret type_definition Rep Abs S by fact
+  show "\<exists>x. Lifting.invariant (\<lambda>x. x \<in> S) x x" using Rep by (auto simp: invariant_def)
+next
+  show "symp (Lifting.invariant (\<lambda>x. x \<in> S))" by (auto intro: sympI simp: invariant_def)
+next
+  show "transp (Lifting.invariant (\<lambda>x. x \<in> S))" by (auto intro: transpI simp: invariant_def)
+qed
+
+lemma open_typedef_to_Quotient:
   assumes "type_definition Rep Abs {x. P x}"
-  and T_def: "T \<equiv> (\<lambda>x y. (invariant P) x x \<and> Abs x = y)"
+  and T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
   shows "Quotient (invariant P) Abs Rep T"
 proof -
   interpret type_definition Rep Abs "{x. P x}" by fact
-  from Rep Abs_inject Rep_inverse T_def show ?thesis by (auto intro!: QuotientI simp: invariant_def)
+  from Rep Abs_inject Rep_inverse Abs_inverse T_def show ?thesis
+    by (auto intro!: QuotientI simp: invariant_def fun_eq_iff)
 qed
 
-lemma invariant_type_to_part_equivp:
+lemma open_typedef_to_part_equivp:
   assumes "type_definition Rep Abs {x. P x}"
   shows "part_equivp (invariant P)"
 proof (intro part_equivpI)
@@ -273,6 +297,84 @@
   show "transp (invariant P)" by (auto intro: transpI simp: invariant_def)
 qed
 
+text {* Generating transfer rules for quotients. *}
+
+lemma Quotient_right_unique:
+  assumes "Quotient R Abs Rep T" shows "right_unique T"
+  using assms unfolding Quotient_alt_def right_unique_def by metis
+
+lemma Quotient_right_total:
+  assumes "Quotient R Abs Rep T" shows "right_total T"
+  using assms unfolding Quotient_alt_def right_total_def by metis
+
+lemma Quotient_rel_eq_transfer:
+  assumes "Quotient R Abs Rep T"
+  shows "(T ===> T ===> op =) R (op =)"
+  using assms unfolding Quotient_alt_def fun_rel_def by simp
+
+lemma Quotient_bi_total:
+  assumes "Quotient R Abs Rep T" and "reflp R"
+  shows "bi_total T"
+  using assms unfolding Quotient_alt_def bi_total_def reflp_def by auto
+
+lemma Quotient_id_abs_transfer:
+  assumes "Quotient R Abs Rep T" and "reflp R"
+  shows "(op = ===> T) (\<lambda>x. x) Abs"
+  using assms unfolding Quotient_alt_def reflp_def fun_rel_def by simp
+
+text {* Generating transfer rules for a type defined with @{text "typedef"}. *}
+
+lemma typedef_bi_unique:
+  assumes type: "type_definition Rep Abs A"
+  assumes T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "bi_unique T"
+  unfolding bi_unique_def T_def
+  by (simp add: type_definition.Rep_inject [OF type])
+
+lemma typedef_right_total:
+  assumes T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "right_total T"
+  unfolding right_total_def T_def by simp
+
+lemma copy_type_bi_total:
+  assumes type: "type_definition Rep Abs UNIV"
+  assumes T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "bi_total T"
+  unfolding bi_total_def T_def
+  by (metis type_definition.Abs_inverse [OF type] UNIV_I)
+
+lemma typedef_transfer_All:
+  assumes type: "type_definition Rep Abs A"
+  assumes T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "((T ===> op =) ===> op =) (Ball A) All"
+  unfolding T_def fun_rel_def
+  by (metis type_definition.Rep [OF type]
+    type_definition.Abs_inverse [OF type])
+
+lemma typedef_transfer_Ex:
+  assumes type: "type_definition Rep Abs A"
+  assumes T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "((T ===> op =) ===> op =) (Bex A) Ex"
+  unfolding T_def fun_rel_def
+  by (metis type_definition.Rep [OF type]
+    type_definition.Abs_inverse [OF type])
+
+lemma typedef_transfer_bforall:
+  assumes type: "type_definition Rep Abs A"
+  assumes T_def: "T \<equiv> (\<lambda>x y. x = Rep y)"
+  shows "((T ===> op =) ===> op =)
+    (transfer_bforall (\<lambda>x. x \<in> A)) transfer_forall"
+  unfolding transfer_bforall_def transfer_forall_def Ball_def [symmetric]
+  by (rule typedef_transfer_All [OF assms])
+
+text {* Generating the correspondence rule for a constant defined with
+  @{text "lift_definition"}. *}
+
+lemma Quotient_to_transfer:
+  assumes "Quotient R Abs Rep T" and "R c c" and "c' \<equiv> Abs c"
+  shows "T c c'"
+  using assms by (auto dest: Quotient_cr_rel)
+
 subsection {* ML setup *}
 
 text {* Auxiliary data for the lifting package *}
--- a/src/HOL/Limits.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Limits.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -112,7 +112,8 @@
 qed
 
 ML {*
-  fun ev_elim_tac ctxt thms thm = let
+  fun eventually_elim_tac ctxt thms thm =
+    let
       val thy = Proof_Context.theory_of ctxt
       val mp_thms = thms RL [@{thm eventually_rev_mp}]
       val raw_elim_thm =
@@ -124,13 +125,11 @@
     in
       CASES cases (rtac raw_elim_thm 1) thm
     end
-
-  fun eventually_elim_setup name =
-    Method.setup name (Scan.succeed (fn ctxt => METHOD_CASES (ev_elim_tac ctxt)))
-      "elimination of eventually quantifiers"
 *}
 
-setup {* eventually_elim_setup @{binding "eventually_elim"} *}
+method_setup eventually_elim = {*
+  Scan.succeed (fn ctxt => METHOD_CASES (eventually_elim_tac ctxt))
+*} "elimination of eventually quantifiers"
 
 
 subsection {* Finer-than relation *}
--- a/src/HOL/List.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/List.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -85,18 +85,20 @@
 syntax (HTML output)
   "_filter" :: "[pttrn, 'a list, bool] => 'a list"("(1[_\<leftarrow>_ ./ _])")
 
-primrec -- {* canonical argument order *}
-  fold :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b" where
-    "fold f [] = id"
-  | "fold f (x # xs) = fold f xs \<circ> f x"
-
-definition 
-  foldr :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b" where
-  [code_abbrev]: "foldr f xs = fold f (rev xs)"
-
-definition
-  foldl :: "('b \<Rightarrow> 'a \<Rightarrow> 'b) \<Rightarrow> 'b \<Rightarrow> 'a list \<Rightarrow> 'b" where
-  "foldl f s xs = fold (\<lambda>x s. f s x)  xs s"
+primrec fold :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b"
+where
+  fold_Nil:  "fold f [] = id"
+| fold_Cons: "fold f (x # xs) = fold f xs \<circ> f x" -- {* natural argument order *}
+
+primrec foldr :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b"
+where
+  foldr_Nil:  "foldr f [] = id"
+| foldr_Cons: "foldr f (x # xs) = f x \<circ> foldr f xs" -- {* natural argument order *}
+
+primrec foldl :: "('b \<Rightarrow> 'a \<Rightarrow> 'b) \<Rightarrow> 'b \<Rightarrow> 'a list \<Rightarrow> 'b"
+where
+  foldl_Nil:  "foldl f a [] = a"
+| foldl_Cons: "foldl f a (x # xs) = foldl f (f a x) xs"
 
 primrec
   concat:: "'a list list \<Rightarrow> 'a list" where
@@ -250,8 +252,8 @@
 @{lemma[source] "filter (\<lambda>n::nat. n<2) [0,2,1] = [0,1]" by simp}\\
 @{lemma "concat [[a,b],[c,d,e],[],[f]] = [a,b,c,d,e,f]" by simp}\\
 @{lemma "fold f [a,b,c] x = f c (f b (f a x))" by simp}\\
-@{lemma "foldr f [a,b,c] x = f a (f b (f c x))" by (simp add: foldr_def)}\\
-@{lemma "foldl f x [a,b,c] = f (f (f x a) b) c" by (simp add: foldl_def)}\\
+@{lemma "foldr f [a,b,c] x = f a (f b (f c x))" by simp}\\
+@{lemma "foldl f x [a,b,c] = f (f (f x a) b) c" by simp}\\
 @{lemma "zip [a,b,c] [x,y,z] = [(a,x),(b,y),(c,z)]" by simp}\\
 @{lemma "zip [a,b] [x,y,z] = [(a,x),(b,y)]" by simp}\\
 @{lemma "splice [a,b,c] [x,y,z] = [a,x,b,y,c,z]" by simp}\\
@@ -277,7 +279,7 @@
 @{lemma "rotate 3 [a,b,c,d] = [d,a,b,c]" by (simp add:rotate_def eval_nat_numeral)}\\
 @{lemma "replicate 4 a = [a,a,a,a]" by (simp add:eval_nat_numeral)}\\
 @{lemma "[2..<5] = [2,3,4]" by (simp add:eval_nat_numeral)}\\
-@{lemma "listsum [1,2,3::nat] = 6" by (simp add: listsum_def foldr_def)}
+@{lemma "listsum [1,2,3::nat] = 6" by (simp add: listsum_def)}
 \end{tabular}}
 \caption{Characteristic examples}
 \label{fig:Characteristic}
@@ -2387,7 +2389,7 @@
 by(auto simp add: set_zip list_all2_eq list_all2_conv_all_nth cong: conj_cong)
 
 
-subsubsection {* @{const fold} with canonical argument order *}
+subsubsection {* @{const fold} with natural argument order *}
 
 lemma fold_remove1_split:
   assumes f: "\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> f x \<circ> f y = f y \<circ> f x"
@@ -2477,7 +2479,7 @@
   qed
 qed
 
-lemma union_set_fold:
+lemma union_set_fold [code]:
   "set xs \<union> A = fold Set.insert xs A"
 proof -
   interpret comp_fun_idem Set.insert
@@ -2485,7 +2487,11 @@
   show ?thesis by (simp add: union_fold_insert fold_set_fold)
 qed
 
-lemma minus_set_fold:
+lemma union_coset_filter [code]:
+  "List.coset xs \<union> A = List.coset (List.filter (\<lambda>x. x \<notin> A) xs)"
+  by auto
+
+lemma minus_set_fold [code]:
   "A - set xs = fold Set.remove xs A"
 proof -
   interpret comp_fun_idem Set.remove
@@ -2494,6 +2500,18 @@
     by (simp add: minus_fold_remove [of _ A] fold_set_fold)
 qed
 
+lemma minus_coset_filter [code]:
+  "A - List.coset xs = set (List.filter (\<lambda>x. x \<in> A) xs)"
+  by auto
+
+lemma inter_set_filter [code]:
+  "A \<inter> set xs = set (List.filter (\<lambda>x. x \<in> A) xs)"
+  by auto
+
+lemma inter_coset_fold [code]:
+  "A \<inter> List.coset xs = fold Set.remove xs A"
+  by (simp add: Diff_eq [symmetric] minus_set_fold)
+
 lemma (in lattice) Inf_fin_set_fold:
   "Inf_fin (set (x # xs)) = fold inf xs x"
 proof -
@@ -2503,6 +2521,8 @@
     by (simp add: Inf_fin_def fold1_set_fold del: set.simps)
 qed
 
+declare Inf_fin_set_fold [code]
+
 lemma (in lattice) Sup_fin_set_fold:
   "Sup_fin (set (x # xs)) = fold sup xs x"
 proof -
@@ -2512,6 +2532,8 @@
     by (simp add: Sup_fin_def fold1_set_fold del: set.simps)
 qed
 
+declare Sup_fin_set_fold [code]
+
 lemma (in linorder) Min_fin_set_fold:
   "Min (set (x # xs)) = fold min xs x"
 proof -
@@ -2521,6 +2543,8 @@
     by (simp add: Min_def fold1_set_fold del: set.simps)
 qed
 
+declare Min_fin_set_fold [code]
+
 lemma (in linorder) Max_fin_set_fold:
   "Max (set (x # xs)) = fold max xs x"
 proof -
@@ -2530,6 +2554,8 @@
     by (simp add: Max_def fold1_set_fold del: set.simps)
 qed
 
+declare Max_fin_set_fold [code]
+
 lemma (in complete_lattice) Inf_set_fold:
   "Inf (set xs) = fold inf xs top"
 proof -
@@ -2538,6 +2564,8 @@
   show ?thesis by (simp add: Inf_fold_inf fold_set_fold inf_commute)
 qed
 
+declare Inf_set_fold [where 'a = "'a set", code]
+
 lemma (in complete_lattice) Sup_set_fold:
   "Sup (set xs) = fold sup xs bot"
 proof -
@@ -2546,137 +2574,72 @@
   show ?thesis by (simp add: Sup_fold_sup fold_set_fold sup_commute)
 qed
 
+declare Sup_set_fold [where 'a = "'a set", code]
+
 lemma (in complete_lattice) INF_set_fold:
   "INFI (set xs) f = fold (inf \<circ> f) xs top"
   unfolding INF_def set_map [symmetric] Inf_set_fold fold_map ..
 
+declare INF_set_fold [code]
+
 lemma (in complete_lattice) SUP_set_fold:
   "SUPR (set xs) f = fold (sup \<circ> f) xs bot"
   unfolding SUP_def set_map [symmetric] Sup_set_fold fold_map ..
 
+declare SUP_set_fold [code]
 
 subsubsection {* Fold variants: @{const foldr} and @{const foldl} *}
 
 text {* Correspondence *}
 
-lemma foldr_foldl: -- {* The ``Third Duality Theorem'' in Bird \& Wadler: *}
+lemma foldr_conv_fold [code_abbrev]:
+  "foldr f xs = fold f (rev xs)"
+  by (induct xs) simp_all
+
+lemma foldl_conv_fold:
+  "foldl f s xs = fold (\<lambda>x s. f s x) xs s"
+  by (induct xs arbitrary: s) simp_all
+
+lemma foldr_conv_foldl: -- {* The ``Third Duality Theorem'' in Bird \& Wadler: *}
   "foldr f xs a = foldl (\<lambda>x y. f y x) a (rev xs)"
-  by (simp add: foldr_def foldl_def)
-
-lemma foldl_foldr:
+  by (simp add: foldr_conv_fold foldl_conv_fold)
+
+lemma foldl_conv_foldr:
   "foldl f a xs = foldr (\<lambda>x y. f y x) (rev xs) a"
-  by (simp add: foldr_def foldl_def)
+  by (simp add: foldr_conv_fold foldl_conv_fold)
 
 lemma foldr_fold:
   assumes "\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> f y \<circ> f x = f x \<circ> f y"
   shows "foldr f xs = fold f xs"
-  using assms unfolding foldr_def by (rule fold_rev)
-
-lemma
-  foldr_Nil [code, simp]: "foldr f [] = id"
-  and foldr_Cons [code, simp]: "foldr f (x # xs) = f x \<circ> foldr f xs"
-  by (simp_all add: foldr_def)
-
-lemma
-  foldl_Nil [simp]: "foldl f a [] = a"
-  and foldl_Cons [simp]: "foldl f a (x # xs) = foldl f (f a x) xs"
-  by (simp_all add: foldl_def)
+  using assms unfolding foldr_conv_fold by (rule fold_rev)
 
 lemma foldr_cong [fundef_cong]:
   "a = b \<Longrightarrow> l = k \<Longrightarrow> (\<And>a x. x \<in> set l \<Longrightarrow> f x a = g x a) \<Longrightarrow> foldr f l a = foldr g k b"
-  by (auto simp add: foldr_def intro!: fold_cong)
+  by (auto simp add: foldr_conv_fold intro!: fold_cong)
 
 lemma foldl_cong [fundef_cong]:
   "a = b \<Longrightarrow> l = k \<Longrightarrow> (\<And>a x. x \<in> set l \<Longrightarrow> f a x = g a x) \<Longrightarrow> foldl f a l = foldl g b k"
-  by (auto simp add: foldl_def intro!: fold_cong)
+  by (auto simp add: foldl_conv_fold intro!: fold_cong)
 
 lemma foldr_append [simp]:
   "foldr f (xs @ ys) a = foldr f xs (foldr f ys a)"
-  by (simp add: foldr_def)
+  by (simp add: foldr_conv_fold)
 
 lemma foldl_append [simp]:
   "foldl f a (xs @ ys) = foldl f (foldl f a xs) ys"
-  by (simp add: foldl_def)
+  by (simp add: foldl_conv_fold)
 
 lemma foldr_map [code_unfold]:
   "foldr g (map f xs) a = foldr (g o f) xs a"
-  by (simp add: foldr_def fold_map rev_map)
+  by (simp add: foldr_conv_fold fold_map rev_map)
 
 lemma foldl_map [code_unfold]:
   "foldl g a (map f xs) = foldl (\<lambda>a x. g a (f x)) a xs"
-  by (simp add: foldl_def fold_map comp_def)
-
-text {* Executing operations in terms of @{const foldr} -- tail-recursive! *}
+  by (simp add: foldl_conv_fold fold_map comp_def)
 
 lemma concat_conv_foldr [code]:
   "concat xss = foldr append xss []"
-  by (simp add: fold_append_concat_rev foldr_def)
-
-lemma minus_set_foldr [code]:
-  "A - set xs = foldr Set.remove xs A"
-proof -
-  have "\<And>x y :: 'a. Set.remove y \<circ> Set.remove x = Set.remove x \<circ> Set.remove y"
-    by (auto simp add: remove_def)
-  then show ?thesis by (simp add: minus_set_fold foldr_fold)
-qed
-
-lemma subtract_coset_filter [code]:
-  "A - List.coset xs = set (List.filter (\<lambda>x. x \<in> A) xs)"
-  by auto
-
-lemma union_set_foldr [code]:
-  "set xs \<union> A = foldr Set.insert xs A"
-proof -
-  have "\<And>x y :: 'a. insert y \<circ> insert x = insert x \<circ> insert y"
-    by auto
-  then show ?thesis by (simp add: union_set_fold foldr_fold)
-qed
-
-lemma union_coset_foldr [code]:
-  "List.coset xs \<union> A = List.coset (List.filter (\<lambda>x. x \<notin> A) xs)"
-  by auto
-
-lemma inter_set_filer [code]:
-  "A \<inter> set xs = set (List.filter (\<lambda>x. x \<in> A) xs)"
-  by auto
-
-lemma inter_coset_foldr [code]:
-  "A \<inter> List.coset xs = foldr Set.remove xs A"
-  by (simp add: Diff_eq [symmetric] minus_set_foldr)
-
-lemma (in lattice) Inf_fin_set_foldr [code]:
-  "Inf_fin (set (x # xs)) = foldr inf xs x"
-  by (simp add: Inf_fin_set_fold ac_simps foldr_fold fun_eq_iff del: set.simps)
-
-lemma (in lattice) Sup_fin_set_foldr [code]:
-  "Sup_fin (set (x # xs)) = foldr sup xs x"
-  by (simp add: Sup_fin_set_fold ac_simps foldr_fold fun_eq_iff del: set.simps)
-
-lemma (in linorder) Min_fin_set_foldr [code]:
-  "Min (set (x # xs)) = foldr min xs x"
-  by (simp add: Min_fin_set_fold ac_simps foldr_fold fun_eq_iff del: set.simps)
-
-lemma (in linorder) Max_fin_set_foldr [code]:
-  "Max (set (x # xs)) = foldr max xs x"
-  by (simp add: Max_fin_set_fold ac_simps foldr_fold fun_eq_iff del: set.simps)
-
-lemma (in complete_lattice) Inf_set_foldr:
-  "Inf (set xs) = foldr inf xs top"
-  by (simp add: Inf_set_fold ac_simps foldr_fold fun_eq_iff)
-
-lemma (in complete_lattice) Sup_set_foldr:
-  "Sup (set xs) = foldr sup xs bot"
-  by (simp add: Sup_set_fold ac_simps foldr_fold fun_eq_iff)
-
-declare Inf_set_foldr [where 'a = "'a set", code] Sup_set_foldr [where 'a = "'a set", code]
-
-lemma (in complete_lattice) INF_set_foldr [code]:
-  "INFI (set xs) f = foldr (inf \<circ> f) xs top"
-  by (simp only: INF_def Inf_set_foldr foldr_map set_map [symmetric])
-
-lemma (in complete_lattice) SUP_set_foldr [code]:
-  "SUPR (set xs) f = foldr (sup \<circ> f) xs bot"
-  by (simp only: SUP_def Sup_set_foldr foldr_map set_map [symmetric])
+  by (simp add: fold_append_concat_rev foldr_conv_fold)
 
 
 subsubsection {* @{text upt} *}
@@ -3108,7 +3071,7 @@
 
 lemma (in comm_monoid_add) listsum_rev [simp]:
   "listsum (rev xs) = listsum xs"
-  by (simp add: listsum_def foldr_def fold_rev fun_eq_iff add_ac)
+  by (simp add: listsum_def foldr_fold fold_rev fun_eq_iff add_ac)
 
 lemma (in monoid_add) fold_plus_listsum_rev:
   "fold plus xs = plus (listsum (rev xs))"
@@ -3116,40 +3079,12 @@
   fix x
   have "fold plus xs x = fold plus xs (x + 0)" by simp
   also have "\<dots> = fold plus (x # xs) 0" by simp
-  also have "\<dots> = foldr plus (rev xs @ [x]) 0" by (simp add: foldr_def)
+  also have "\<dots> = foldr plus (rev xs @ [x]) 0" by (simp add: foldr_conv_fold)
   also have "\<dots> = listsum (rev xs @ [x])" by (simp add: listsum_def)
   also have "\<dots> = listsum (rev xs) + listsum [x]" by simp
   finally show "fold plus xs x = listsum (rev xs) + x" by simp
 qed
 
-lemma (in semigroup_add) foldl_assoc:
-  "foldl plus (x + y) zs = x + foldl plus y zs"
-  by (simp add: foldl_def fold_commute_apply [symmetric] fun_eq_iff add_assoc)
-
-lemma (in ab_semigroup_add) foldr_conv_foldl:
-  "foldr plus xs a = foldl plus a xs"
-  by (simp add: foldl_def foldr_fold fun_eq_iff add_ac)
-
-text {*
-  Note: @{text "n \<le> foldl (op +) n ns"} looks simpler, but is more
-  difficult to use because it requires an additional transitivity step.
-*}
-
-lemma start_le_sum:
-  fixes m n :: nat
-  shows "m \<le> n \<Longrightarrow> m \<le> foldl plus n ns"
-  by (simp add: foldl_def add_commute fold_plus_listsum_rev)
-
-lemma elem_le_sum:
-  fixes m n :: nat 
-  shows "n \<in> set ns \<Longrightarrow> n \<le> foldl plus 0 ns"
-  by (force intro: start_le_sum simp add: in_set_conv_decomp)
-
-lemma sum_eq_0_conv [iff]:
-  fixes m :: nat
-  shows "foldl plus m ns = 0 \<longleftrightarrow> m = 0 \<and> (\<forall>n \<in> set ns. n = 0)"
-  by (induct ns arbitrary: m) auto
-
 text{* Some syntactic sugar for summing a function over a list: *}
 
 syntax
@@ -3186,17 +3121,18 @@
 
 lemma listsum_eq_0_nat_iff_nat [simp]:
   "listsum ns = (0::nat) \<longleftrightarrow> (\<forall>n \<in> set ns. n = 0)"
-  by (simp add: listsum_def foldr_conv_foldl)
+  by (induct ns) simp_all
+
+lemma member_le_listsum_nat:
+  "(n :: nat) \<in> set ns \<Longrightarrow> n \<le> listsum ns"
+  by (induct ns) auto
 
 lemma elem_le_listsum_nat:
   "k < size ns \<Longrightarrow> ns ! k \<le> listsum (ns::nat list)"
-apply(induct ns arbitrary: k)
- apply simp
-apply(fastforce simp add:nth_Cons split: nat.split)
-done
+  by (rule member_le_listsum_nat) simp
 
 lemma listsum_update_nat:
-  "k<size ns \<Longrightarrow> listsum (ns[k := (n::nat)]) = listsum ns + n - ns ! k"
+  "k < size ns \<Longrightarrow> listsum (ns[k := (n::nat)]) = listsum ns + n - ns ! k"
 apply(induct ns arbitrary:k)
  apply (auto split:nat.split)
 apply(drule elem_le_listsum_nat)
@@ -4053,7 +3989,7 @@
     show "(insort_key f y \<circ> insort_key f x) zs = (insort_key f x \<circ> insort_key f y) zs"
       by (induct zs) (auto intro: * simp add: **)
   qed
-  then show ?thesis by (simp add: sort_key_def foldr_def)
+  then show ?thesis by (simp add: sort_key_def foldr_conv_fold)
 qed
 
 lemma (in linorder) sort_conv_fold:
@@ -4601,7 +4537,7 @@
 lemma listsp_inf_eq [simp]: "listsp (inf A B) = inf (listsp A) (listsp B)"
 proof (rule mono_inf [where f=listsp, THEN order_antisym])
   show "mono listsp" by (simp add: mono_def listsp_mono)
-  show "inf (listsp A) (listsp B) \<le> listsp (inf A B)" by (blast intro!: listsp_infI predicate1I)
+  show "inf (listsp A) (listsp B) \<le> listsp (inf A B)" by (blast intro!: listsp_infI)
 qed
 
 lemmas listsp_conj_eq [simp] = listsp_inf_eq [simplified inf_fun_def inf_bool_def]
@@ -5631,8 +5567,6 @@
 
 subsubsection {* Implementation of sets by lists *}
 
-text {* Basic operations *}
-
 lemma is_empty_set [code]:
   "Set.is_empty (set xs) \<longleftrightarrow> List.null xs"
   by (simp add: Set.is_empty_def null_def)
@@ -5676,6 +5610,17 @@
   "image f (set xs) = set (map f xs)"
   by simp
 
+lemma subset_code [code]:
+  "set xs \<le> B \<longleftrightarrow> (\<forall>x\<in>set xs. x \<in> B)"
+  "A \<le> List.coset ys \<longleftrightarrow> (\<forall>y\<in>set ys. y \<notin> A)"
+  "List.coset [] \<le> set [] \<longleftrightarrow> False"
+  by auto
+
+text {* A frequent case – avoid intermediate sets *}
+lemma [code_unfold]:
+  "set xs \<subseteq> set ys \<longleftrightarrow> list_all (\<lambda>x. x \<in> set ys) xs"
+  by (auto simp: list_all_iff)
+
 lemma Ball_set [code]:
   "Ball (set xs) P \<longleftrightarrow> list_all P xs"
   by (simp add: list_all_iff)
@@ -5701,20 +5646,6 @@
   "Pow (set (x # xs)) = (let A = Pow (set xs) in A \<union> insert x ` A)"
   by (simp_all add: Pow_insert Let_def)
 
-text {* Further operations on sets *}
-
-(* Minimal refinement of equality on sets *)
-declare subset_eq[code del]
-lemma subset_code [code]:
-  "set xs <= B \<longleftrightarrow> (ALL x : set xs. x : B)"
-  "List.coset xs <= List.coset ys \<longleftrightarrow> set ys <= set xs"
-  "List.coset [] <= set [] \<longleftrightarrow> False"
-by auto
-
-text{* Optimising a frequent case: *}
-lemma [code_unfold]: "set xs \<subseteq> set ys \<longleftrightarrow> list_all (\<lambda>x. x \<in> set ys) xs"
-by (auto simp: list_all_iff)
-
 lemma setsum_code [code]:
   "setsum f (set xs) = listsum (map f (remdups xs))"
 by (simp add: listsum_distinct_conv_setsum_set)
@@ -5724,8 +5655,7 @@
 
 lemma [code]:
   "map_project f (set xs) = set (List.map_filter f xs)"
-unfolding map_project_def map_filter_def
-by auto (metis (lifting, mono_tags) CollectI image_eqI o_apply the.simps)
+  by (auto simp add: map_project_def map_filter_def image_def)
 
 hide_const (open) map_project
 
@@ -5747,7 +5677,7 @@
   "trancl (set xs) = ntrancl (card (set xs) - 1) (set xs)"
   by (simp add: finite_trancl_ntranl)
 
-lemma set_rel_comp [code]:
+lemma set_relcomp [code]:
   "set xys O set yzs = set ([(fst xy, snd yz). xy \<leftarrow> xys, yz \<leftarrow> yzs, snd xy = fst yz])"
   by (auto simp add: Bex_def)
 
@@ -5756,3 +5686,4 @@
   by (simp add: wf_iff_acyclic_if_finite)
 
 end
+
--- a/src/HOL/Matrix_LP/ComputeFloat.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/ComputeFloat.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/ComputeFloat.thy
+(*  Title:      HOL/Matrix_LP/ComputeFloat.thy
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Compute_Oracle/Compute_Oracle.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/Compute_Oracle.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/Compute_Oracle.thy
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/Compute_Oracle.thy
     Author:     Steven Obua, TU Munich
 
 Steven Obua's evaluator.
--- a/src/HOL/Matrix_LP/Compute_Oracle/am_compiler.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/am_compiler.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/am_compiler.ML
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/am_compiler.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Compute_Oracle/am_ghc.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/am_ghc.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/am_ghc.ML
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/am_ghc.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Compute_Oracle/am_interpreter.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/am_interpreter.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/am_interpreter.ML
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/am_interpreter.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Compute_Oracle/am_sml.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/am_sml.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/am_sml.ML
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/am_sml.ML
     Author:     Steven Obua
 
 TODO: "parameterless rewrite cannot be used in pattern": In a lot of
--- a/src/HOL/Matrix_LP/Compute_Oracle/compute.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/compute.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/compute.ML
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/compute.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Compute_Oracle/linker.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/linker.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Compute_Oracle/linker.ML
+(*  Title:      HOL/Matrix_LP/Compute_Oracle/linker.ML
     Author:     Steven Obua
 
 This module solves the problem that the computing oracle does not
--- a/src/HOL/Matrix_LP/Cplex.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Cplex.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Cplex.thy
+(*  Title:      HOL/Matrix_LP/Cplex.thy
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/CplexMatrixConverter.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/CplexMatrixConverter.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/CplexMatrixConverter.ML
+(*  Title:      HOL/Matrix_LP/CplexMatrixConverter.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Cplex_tools.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Cplex_tools.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Cplex_tools.ML
+(*  Title:      HOL/Matrix_LP/Cplex_tools.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/FloatSparseMatrixBuilder.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/FloatSparseMatrixBuilder.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/FloatSparseMatrixBuilder.ML
+(*  Title:      HOL/Matrix_LP/FloatSparseMatrixBuilder.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/LP.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/LP.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/LP.thy
+(*  Title:      HOL/Matrix_LP/LP.thy
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/Matrix.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/Matrix.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/Matrix.thy
+(*  Title:      HOL/Matrix_LP/Matrix.thy
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/SparseMatrix.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/SparseMatrix.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/SparseMatrix.thy
+(*  Title:      HOL/Matrix_LP/SparseMatrix.thy
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/fspmlp.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/fspmlp.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/fspmlp.ML
+(*  Title:      HOL/Matrix_LP/fspmlp.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Matrix_LP/matrixlp.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Matrix_LP/matrixlp.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Matrix/matrixlp.ML
+(*  Title:      HOL/Matrix_LP/matrixlp.ML
     Author:     Steven Obua
 *)
 
--- a/src/HOL/Metis_Examples/Big_O.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Metis_Examples/Big_O.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -146,17 +146,17 @@
 by (auto simp add: bigo_def)
 
 lemma bigo_plus_self_subset [intro]:
-  "O(f) \<oplus> O(f) <= O(f)"
+  "O(f) + O(f) <= O(f)"
 apply (auto simp add: bigo_alt_def set_plus_def)
 apply (rule_tac x = "c + ca" in exI)
 apply auto
 apply (simp add: ring_distribs func_plus)
 by (metis order_trans abs_triangle_ineq add_mono)
 
-lemma bigo_plus_idemp [simp]: "O(f) \<oplus> O(f) = O(f)"
+lemma bigo_plus_idemp [simp]: "O(f) + O(f) = O(f)"
 by (metis bigo_plus_self_subset bigo_zero set_eq_subset set_zero_plus2)
 
-lemma bigo_plus_subset [intro]: "O(f + g) <= O(f) \<oplus> O(g)"
+lemma bigo_plus_subset [intro]: "O(f + g) <= O(f) + O(g)"
 apply (rule subsetI)
 apply (auto simp add: bigo_def bigo_pos_const func_plus set_plus_def)
 apply (subst bigo_pos_const [symmetric])+
@@ -187,10 +187,10 @@
  apply (metis abs_triangle_ineq mult_le_cancel_left_pos)
 by (metis abs_ge_zero abs_of_pos zero_le_mult_iff)
 
-lemma bigo_plus_subset2 [intro]: "A <= O(f) \<Longrightarrow> B <= O(f) \<Longrightarrow> A \<oplus> B <= O(f)"
+lemma bigo_plus_subset2 [intro]: "A <= O(f) \<Longrightarrow> B <= O(f) \<Longrightarrow> A + B <= O(f)"
 by (metis bigo_plus_idemp set_plus_mono2)
 
-lemma bigo_plus_eq: "\<forall>x. 0 <= f x \<Longrightarrow> \<forall>x. 0 <= g x \<Longrightarrow> O(f + g) = O(f) \<oplus> O(g)"
+lemma bigo_plus_eq: "\<forall>x. 0 <= f x \<Longrightarrow> \<forall>x. 0 <= g x \<Longrightarrow> O(f + g) = O(f) + O(g)"
 apply (rule equalityI)
 apply (rule bigo_plus_subset)
 apply (simp add: bigo_alt_def set_plus_def func_plus)
@@ -284,25 +284,25 @@
 lemma bigo_abs5: "f =o O(g) \<Longrightarrow> (\<lambda>x. abs(f x)) =o O(g)"
 by (unfold bigo_def, auto)
 
-lemma bigo_elt_subset2 [intro]: "f : g +o O(h) \<Longrightarrow> O(f) <= O(g) \<oplus> O(h)"
+lemma bigo_elt_subset2 [intro]: "f : g +o O(h) \<Longrightarrow> O(f) <= O(g) + O(h)"
 proof -
   assume "f : g +o O(h)"
-  also have "... <= O(g) \<oplus> O(h)"
+  also have "... <= O(g) + O(h)"
     by (auto del: subsetI)
-  also have "... = O(\<lambda>x. abs(g x)) \<oplus> O(\<lambda>x. abs(h x))"
+  also have "... = O(\<lambda>x. abs(g x)) + O(\<lambda>x. abs(h x))"
     by (metis bigo_abs3)
   also have "... = O((\<lambda>x. abs(g x)) + (\<lambda>x. abs(h x)))"
     by (rule bigo_plus_eq [symmetric], auto)
   finally have "f : ...".
   then have "O(f) <= ..."
     by (elim bigo_elt_subset)
-  also have "... = O(\<lambda>x. abs(g x)) \<oplus> O(\<lambda>x. abs(h x))"
+  also have "... = O(\<lambda>x. abs(g x)) + O(\<lambda>x. abs(h x))"
     by (rule bigo_plus_eq, auto)
   finally show ?thesis
     by (simp add: bigo_abs3 [symmetric])
 qed
 
-lemma bigo_mult [intro]: "O(f) \<otimes> O(g) <= O(f * g)"
+lemma bigo_mult [intro]: "O(f) * O(g) <= O(f * g)"
 apply (rule subsetI)
 apply (subst bigo_def)
 apply (auto simp del: abs_mult mult_ac
@@ -358,14 +358,14 @@
 declare bigo_mult6 [simp]
 
 lemma bigo_mult7:
-"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) \<le> O(f\<Colon>'a \<Rightarrow> ('b\<Colon>linordered_field)) \<otimes> O(g)"
+"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) \<le> O(f\<Colon>'a \<Rightarrow> ('b\<Colon>linordered_field)) * O(g)"
 by (metis bigo_refl bigo_mult6 set_times_mono3)
 
 declare bigo_mult6 [simp del]
 declare bigo_mult7 [intro!]
 
 lemma bigo_mult8:
-"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) = O(f\<Colon>'a \<Rightarrow> ('b\<Colon>linordered_field)) \<otimes> O(g)"
+"\<forall>x. f x \<noteq> 0 \<Longrightarrow> O(f * g) = O(f\<Colon>'a \<Rightarrow> ('b\<Colon>linordered_field)) * O(g)"
 by (metis bigo_mult bigo_mult7 order_antisym_conv)
 
 lemma bigo_minus [intro]: "f : O(g) \<Longrightarrow> - f : O(g)"
@@ -575,7 +575,7 @@
 subsection {* Misc useful stuff *}
 
 lemma bigo_useful_intro: "A <= O(f) \<Longrightarrow> B <= O(f) \<Longrightarrow>
-  A \<oplus> B <= O(f)"
+  A + B <= O(f)"
   apply (subst bigo_plus_idemp [symmetric])
   apply (rule set_plus_mono2)
   apply assumption+
--- a/src/HOL/MicroJava/BV/BVExample.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/MicroJava/BV/BVExample.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -408,7 +408,7 @@
 
 lemma [code]:
   "unstables r step ss = 
-   foldr (\<lambda>p A. if \<not>stable r step ss p then insert p A else A) [0..<size ss] {}"
+   fold (\<lambda>p A. if \<not>stable r step ss p then insert p A else A) [0..<size ss] {}"
 proof -
   have "unstables r step ss = (UN p:{..<size ss}. if \<not>stable r step ss p then {p} else {})"
     apply (unfold unstables_def)
@@ -425,7 +425,7 @@
     apply simp+
     done
   also have "\<And>f. (UN p:{..<size ss}. f p) = Union (set (map f [0..<size ss]))" by auto
-  also note Sup_set_foldr also note foldr_map
+  also note Sup_set_fold also note fold_map
   also have "op \<union> \<circ> (\<lambda>p. if \<not> stable r step ss p then {p} else {}) = 
             (\<lambda>p A. if \<not>stable r step ss p then insert p A else A)"
     by(auto simp add: fun_eq_iff)
@@ -486,3 +486,4 @@
 *}
 
 end
+
--- a/src/HOL/MicroJava/J/State.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/MicroJava/J/State.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -190,9 +190,12 @@
 apply simp
 done
 
-instantiation loc' :: equal begin
+instantiation loc' :: equal
+begin
+
 definition "HOL.equal (l :: loc') l' \<longleftrightarrow> l = l'"
-instance proof qed(simp add: equal_loc'_def)
+instance by default (simp add: equal_loc'_def)
+
 end
 
 end
--- a/src/HOL/MicroJava/J/Type.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/MicroJava/J/Type.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -8,23 +8,40 @@
 theory Type imports JBasis begin
 
 typedecl cnam 
-instantiation cnam :: equal begin
+
+instantiation cnam :: equal
+begin
+
 definition "HOL.equal (cn :: cnam) cn' \<longleftrightarrow> cn = cn'"
-instance proof qed(simp add: equal_cnam_def)
+instance by default (simp add: equal_cnam_def)
+
 end
+
 text {* These instantiations only ensure that the merge in theory @{text "MicroJava"} succeeds. FIXME *}
-instantiation cnam :: typerep begin
+
+instantiation cnam :: typerep
+begin
+
 definition "typerep_class.typerep \<equiv> \<lambda>_ :: cnam itself. Typerep.Typerep (STR ''Type.cnam'') []"
-instance proof qed
+instance ..
+
 end
-instantiation cnam :: term_of begin
+
+instantiation cnam :: term_of
+begin
+
 definition "term_of_class.term_of (C :: cnam) \<equiv> 
   Code_Evaluation.Const (STR ''dummy_cnam'') (Typerep.Typerep (STR ''Type.cnam'') [])"
-instance proof qed
+instance ..
+
 end
-instantiation cnam :: partial_term_of begin
+
+instantiation cnam :: partial_term_of
+begin
+
 definition "partial_term_of_class.partial_term_of (C :: cnam itself) n = undefined"
-instance proof qed
+instance ..
+
 end
 
  -- "exceptions"
@@ -41,41 +58,73 @@
   | Cname cnam 
 
 typedecl vnam   -- "variable or field name"
-instantiation vnam :: equal begin
+
+instantiation vnam :: equal
+begin
+
 definition "HOL.equal (vn :: vnam) vn' \<longleftrightarrow> vn = vn'"
-instance proof qed(simp add: equal_vnam_def)
+instance by default (simp add: equal_vnam_def)
+
 end
-instantiation vnam :: typerep begin
+
+instantiation vnam :: typerep
+begin
+
 definition "typerep_class.typerep \<equiv> \<lambda>_ :: vnam itself. Typerep.Typerep (STR ''Type.vnam'') []"
-instance proof qed
+instance ..
+
 end
-instantiation vnam :: term_of begin
+
+instantiation vnam :: term_of
+begin
+
 definition "term_of_class.term_of (V :: vnam) \<equiv> 
   Code_Evaluation.Const (STR ''dummy_vnam'') (Typerep.Typerep (STR ''Type.vnam'') [])"
-instance proof qed
+instance ..
+
 end
-instantiation vnam :: partial_term_of begin
+
+instantiation vnam :: partial_term_of
+begin
+
 definition "partial_term_of_class.partial_term_of (V :: vnam itself) n = undefined"
-instance proof qed
+instance ..
+
 end
 
 typedecl mname  -- "method name"
-instantiation mname :: equal begin
+
+instantiation mname :: equal
+begin
+
 definition "HOL.equal (M :: mname) M' \<longleftrightarrow> M = M'"
-instance proof qed(simp add: equal_mname_def)
+instance by default (simp add: equal_mname_def)
+
 end
-instantiation mname :: typerep begin
+
+instantiation mname :: typerep
+begin
+
 definition "typerep_class.typerep \<equiv> \<lambda>_ :: mname itself. Typerep.Typerep (STR ''Type.mname'') []"
-instance proof qed
+instance ..
+
 end
-instantiation mname :: term_of begin
+
+instantiation mname :: term_of
+begin
+
 definition "term_of_class.term_of (M :: mname) \<equiv> 
   Code_Evaluation.Const (STR ''dummy_mname'') (Typerep.Typerep (STR ''Type.mname'') [])"
-instance proof qed
+instance ..
+
 end
-instantiation mname :: partial_term_of begin
+
+instantiation mname :: partial_term_of
+begin
+
 definition "partial_term_of_class.partial_term_of (M :: mname itself) n = undefined"
-instance proof qed
+instance ..
+
 end
 
 -- "names for @{text This} pointer and local/field variables"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,213 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle.ML
+    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
+*)
+
+signature MIRABELLE =
+sig
+  (*configuration*)
+  val logfile : string Config.T
+  val timeout : int Config.T
+  val start_line : int Config.T
+  val end_line : int Config.T
+
+  (*core*)
+  type init_action = int -> theory -> theory
+  type done_args = {last: Toplevel.state, log: string -> unit}
+  type done_action = int -> done_args -> unit
+  type run_args = {pre: Proof.state, post: Toplevel.state option,
+    timeout: Time.time, log: string -> unit, pos: Position.T, name: string}
+  type run_action = int -> run_args -> unit
+  type action = init_action * run_action * done_action
+  val catch : (int -> string) -> run_action -> run_action
+  val catch_result : (int -> string) -> 'a -> (int -> run_args -> 'a) ->
+    int -> run_args -> 'a
+  val register : action -> theory -> theory
+  val step_hook : Toplevel.transition -> Toplevel.state -> Toplevel.state ->
+    unit
+
+  (*utility functions*)
+  val can_apply : Time.time -> (Proof.context -> int -> tactic) ->
+    Proof.state -> bool
+  val theorems_in_proof_term : thm -> thm list
+  val theorems_of_sucessful_proof : Toplevel.state option -> thm list
+  val get_setting : (string * string) list -> string * string -> string
+  val get_int_setting : (string * string) list -> string * int -> int
+  val cpu_time : ('a -> 'b) -> 'a -> 'b * int
+end
+
+
+
+structure Mirabelle : MIRABELLE =
+struct
+
+(* Mirabelle configuration *)
+
+val logfile = Attrib.setup_config_string @{binding mirabelle_logfile} (K "")
+val timeout = Attrib.setup_config_int @{binding mirabelle_timeout} (K 30)
+val start_line = Attrib.setup_config_int @{binding mirabelle_start_line} (K 0)
+val end_line = Attrib.setup_config_int @{binding mirabelle_end_line} (K ~1)
+
+
+(* Mirabelle core *)
+
+type init_action = int -> theory -> theory
+type done_args = {last: Toplevel.state, log: string -> unit}
+type done_action = int -> done_args -> unit
+type run_args = {pre: Proof.state, post: Toplevel.state option,
+  timeout: Time.time, log: string -> unit, pos: Position.T, name: string}
+type run_action = int -> run_args -> unit
+type action = init_action * run_action * done_action
+
+structure Actions = Theory_Data
+(
+  type T = (int * run_action * done_action) list
+  val empty = []
+  val extend = I
+  fun merge data = Library.merge (K true) data  (* FIXME potential data loss because of (K true) *)
+)
+
+
+fun log_exn log tag id e = log (tag id ^ "exception:\n" ^ General.exnMessage e)
+
+fun catch tag f id (st as {log, ...}: run_args) = (f id st; ())
+  handle exn =>
+    if Exn.is_interrupt exn then reraise exn else (log_exn log tag id exn; ())
+
+fun catch_result tag d f id (st as {log, ...}: run_args) = f id st
+  handle exn =>
+    if Exn.is_interrupt exn then reraise exn else (log_exn log tag id exn; d)
+
+fun register (init, run, done) thy =
+  let val id = length (Actions.get thy) + 1
+  in
+    thy
+    |> init id
+    |> Actions.map (cons (id, run, done))
+  end
+
+local
+
+fun log thy s =
+  let fun append_to n = if n = "" then K () else File.append (Path.explode n)
+  in append_to (Config.get_global thy logfile) (s ^ "\n") end
+  (* FIXME: with multithreading and parallel proofs enabled, we might need to
+     encapsulate this inside a critical section *)
+
+fun log_sep thy = log thy "------------------"
+
+fun apply_actions thy pos name info (pre, post, time) actions =
+  let
+    fun apply f = f {pre=pre, post=post, timeout=time, log=log thy, pos=pos, name=name}
+    fun run (id, run, _) = (apply (run id); log_sep thy)
+  in (log thy info; log_sep thy; List.app run actions) end
+
+fun in_range _ _ NONE = true
+  | in_range l r (SOME i) = (l <= i andalso (r < 0 orelse i <= r))
+
+fun only_within_range thy pos f x =
+  let val l = Config.get_global thy start_line and r = Config.get_global thy end_line
+  in if in_range l r (Position.line_of pos) then f x else () end
+
+in
+
+fun run_actions tr pre post =
+  let
+    val thy = Proof.theory_of pre
+    val pos = Toplevel.pos_of tr
+    val name = Toplevel.name_of tr
+    val st = (pre, post, Time.fromSeconds (Config.get_global thy timeout))
+
+    val str0 = string_of_int o the_default 0
+    val loc = str0 (Position.line_of pos) ^ ":" ^ str0 (Position.offset_of pos)
+    val info = "\n\nat " ^ loc ^ " (" ^ name ^ "):"
+  in
+    only_within_range thy pos (apply_actions thy pos name info st) (Actions.get thy)
+  end
+
+fun done_actions st =
+  let
+    val thy = Toplevel.theory_of st
+    val _ = log thy "\n\n";
+  in
+    thy
+    |> Actions.get
+    |> List.app (fn (id, _, done) => done id {last=st, log=log thy})
+  end
+
+end
+
+val whitelist = ["apply", "by", "proof"]
+
+fun step_hook tr pre post =
+ (* FIXME: might require wrapping into "interruptible" *)
+  if can (Proof.assert_backward o Toplevel.proof_of) pre andalso
+     member (op =) whitelist (Toplevel.name_of tr)
+  then run_actions tr (Toplevel.proof_of pre) (SOME post)
+  else if not (Toplevel.is_toplevel pre) andalso Toplevel.is_toplevel post
+  then done_actions pre
+  else ()   (* FIXME: add theory_hook here *)
+
+
+
+(* Mirabelle utility functions *)
+
+fun can_apply time tac st =
+  let
+    val {context = ctxt, facts, goal} = Proof.goal st
+    val full_tac = HEADGOAL (Method.insert_tac facts THEN' tac ctxt)
+  in
+    (case try (TimeLimit.timeLimit time (Seq.pull o full_tac)) goal of
+      SOME (SOME _) => true
+    | _ => false)
+  end
+
+local
+
+fun fold_body_thms f =
+  let
+    fun app n (PBody {thms, ...}) = thms |> fold (fn (i, (name, prop, body)) =>
+      fn (x, seen) =>
+        if Inttab.defined seen i then (x, seen)
+        else
+          let
+            val body' = Future.join body
+            val (x', seen') = app (n + (if name = "" then 0 else 1)) body'
+              (x, Inttab.update (i, ()) seen)
+        in (x' |> n = 0 ? f (name, prop, body'), seen') end)
+  in fn bodies => fn x => #1 (fold (app 0) bodies (x, Inttab.empty)) end
+
+in
+
+fun theorems_in_proof_term thm =
+  let
+    val all_thms = Global_Theory.all_thms_of (Thm.theory_of_thm thm)
+    fun collect (s, _, _) = if s <> "" then insert (op =) s else I
+    fun member_of xs (x, y) = if member (op =) xs x then SOME y else NONE
+    fun resolve_thms names = map_filter (member_of names) all_thms
+  in
+    resolve_thms (fold_body_thms collect [Thm.proof_body_of thm] [])
+  end
+
+end
+
+fun theorems_of_sucessful_proof state =
+  (case state of
+    NONE => []
+  | SOME st =>
+      if not (Toplevel.is_proof st) then []
+      else theorems_in_proof_term (#goal (Proof.goal (Toplevel.proof_of st))))
+
+fun get_setting settings (key, default) =
+  the_default default (AList.lookup (op =) settings key)
+
+fun get_int_setting settings (key, default) =
+  (case Option.map Int.fromString (AList.lookup (op =) settings key) of
+    SOME (SOME i) => i
+  | SOME NONE => error ("bad option: " ^ key)
+  | NONE => default)
+
+fun cpu_time f x =
+  let val ({cpu, ...}, y) = Timing.timing f x
+  in (y, Time.toMilliseconds cpu) end
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle_arith.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,21 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle_arith.ML
+    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
+*)
+
+structure Mirabelle_Arith : MIRABELLE_ACTION =
+struct
+
+fun arith_tag id = "#" ^ string_of_int id ^ " arith: "
+
+fun init _ = I
+fun done _ _ = ()
+
+fun run id ({pre, timeout, log, ...}: Mirabelle.run_args) =
+  if Mirabelle.can_apply timeout Arith_Data.arith_tac pre
+  then log (arith_tag id ^ "succeeded")
+  else ()
+  handle TimeLimit.TimeOut => log (arith_tag id ^ "timeout")
+
+fun invoke _ = Mirabelle.register (init, Mirabelle.catch arith_tag run, done)
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle_metis.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,35 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle_metis.ML
+    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
+*)
+
+structure Mirabelle_Metis : MIRABELLE_ACTION =
+struct
+
+fun metis_tag id = "#" ^ string_of_int id ^ " metis: "
+
+fun init _ = I
+fun done _ _ = ()
+
+fun run id ({pre, post, timeout, log, ...}: Mirabelle.run_args) =
+  let
+    val thms = Mirabelle.theorems_of_sucessful_proof post
+    val names = map Thm.get_name_hint thms
+    val add_info = if null names then I else suffix (":\n" ^ commas names)
+
+    val facts = Facts.props (Proof_Context.facts_of (Proof.context_of pre))
+
+    fun metis ctxt =
+      Metis_Tactic.metis_tac [] ATP_Problem_Generate.liftingN ctxt
+                             (thms @ facts)
+  in
+    (if Mirabelle.can_apply timeout metis pre then "succeeded" else "failed")
+    |> prefix (metis_tag id)
+    |> add_info
+    |> log
+  end
+  handle TimeLimit.TimeOut => log (metis_tag id ^ "timeout")
+       | ERROR msg => log (metis_tag id ^ "error: " ^ msg)
+
+fun invoke _ = Mirabelle.register (init, Mirabelle.catch metis_tag run, done)
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle_quickcheck.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,28 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle_quickcheck.ML
+    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
+*)
+
+structure Mirabelle_Quickcheck : MIRABELLE_ACTION =
+struct
+
+fun qc_tag id = "#" ^ string_of_int id ^ " quickcheck: "
+
+fun init _ = I
+fun done _ _ = ()
+
+fun run args id ({pre, timeout, log, ...}: Mirabelle.run_args) =
+  let
+    val has_valid_key = member (op =) ["iterations", "size", "generator"] o fst
+    val quickcheck = Quickcheck.quickcheck (map (apsnd single) (filter has_valid_key args)) 1
+  in
+    (case TimeLimit.timeLimit timeout quickcheck pre of
+      NONE => log (qc_tag id ^ "no counterexample")
+    | SOME _ => log (qc_tag id ^ "counterexample found"))
+  end
+  handle TimeLimit.TimeOut => log (qc_tag id ^ "timeout")
+       | ERROR msg => log (qc_tag id ^ "error: " ^ msg)
+
+fun invoke args =
+  Mirabelle.register (init, Mirabelle.catch qc_tag (run args), done)
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle_refute.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,39 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle_refute.ML
+    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
+*)
+
+structure Mirabelle_Refute : MIRABELLE_ACTION =
+struct
+
+
+(* FIXME:
+fun refute_action args timeout {pre=st, ...} = 
+  let
+    val subgoal = 1
+    val thy = Proof.theory_of st
+    val thm = #goal (Proof.raw_goal st)
+
+    val refute = Refute.refute_goal thy args thm
+    val _ = TimeLimit.timeLimit timeout refute subgoal
+  in
+    val writ_log = Substring.full (the (Symtab.lookup tab "writeln"))
+    val warn_log = Substring.full (the (Symtab.lookup tab "warning"))
+
+    val r =
+      if Substring.isSubstring "model found" writ_log
+      then
+        if Substring.isSubstring "spurious" warn_log
+        then SOME "potential counterexample"
+        else SOME "real counterexample (bug?)"
+      else
+        if Substring.isSubstring "time limit" writ_log
+        then SOME "no counterexample (timeout)"
+        else if Substring.isSubstring "Search terminated" writ_log
+        then SOME "no counterexample (normal termination)"
+        else SOME "no counterexample (unknown)"
+  in r end
+*)
+
+fun invoke args = I (*Mirabelle.register ("refute", refute_action args)*)
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle_sledgehammer.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,767 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle_sledgehammer.ML
+    Author:     Jasmin Blanchette and Sascha Boehme and Tobias Nipkow, TU Munich
+*)
+
+structure Mirabelle_Sledgehammer : MIRABELLE_ACTION =
+struct
+
+(*To facilitate synching the description of Mirabelle Sledgehammer parameters
+ (in ../lib/Tools/mirabelle) with the parameters actually used by this
+ interface, the former extracts PARAMETER and DESCRIPTION from code below which
+ has this pattern (provided it appears in a single line):
+   val .*K = "PARAMETER" (*DESCRIPTION*)
+*)
+(*NOTE: descriptions mention parameters (particularly NAME) without a defined range.*)
+val proverK = "prover" (*=NAME: name of the external prover to call*)
+val prover_timeoutK = "prover_timeout" (*=TIME: timeout for invoked ATP (seconds of process time)*)
+val keepK = "keep" (*=PATH: path where to keep temporary files created by sledgehammer*)
+val minimizeK = "minimize" (*: enable minimization of theorem set found by sledgehammer*)
+                           (*refers to minimization attempted by Mirabelle*)
+val minimize_timeoutK = "minimize_timeout" (*=TIME: timeout for each minimization step (seconds of*)
+
+val reconstructorK = "reconstructor" (*=NAME: how to reconstruct proofs (ie. using metis/smt)*)
+val metis_ftK = "metis_ft" (*: apply metis with fully-typed encoding to the theorems found by sledgehammer*)
+
+val max_relevantK = "max_relevant" (*=NUM: max. relevant clauses to use*)
+val max_callsK = "max_calls" (*=NUM: max. no. of calls to sledgehammer*)
+val preplay_timeoutK = "preplay_timeout" (*=TIME: timeout for finding reconstructed proof*)
+val sh_minimizeK = "sh_minimize" (*: instruct sledgehammer to run its minimizer*)
+
+val check_trivialK = "check_trivial" (*: check if goals are "trivial" (false by default)*)
+val type_encK = "type_enc" (*=STRING: type encoding scheme*)
+val lam_transK = "lam_trans" (*=STRING: lambda translation scheme*)
+val strictK = "strict" (*=BOOL: run in strict mode*)
+val sliceK = "slice" (*=BOOL: allow sledgehammer-level strategy-scheduling*)
+val uncurried_aliasesK = "uncurried_aliases" (*=SMART_BOOL: use fresh function names to alias curried applications*)
+val e_selection_heuristicK = "e_selection_heuristic" (*: FIXME*)
+val term_orderK = "term_order" (*: FIXME*)
+val force_sosK = "force_sos" (*: use SOS*)
+val max_new_mono_instancesK = "max_new_mono_instances" (*=NUM: max. new monomorphic instances*)
+val max_mono_itersK = "max_mono_iters" (*=NUM: max. iterations of monomorphiser*)
+
+fun sh_tag id = "#" ^ string_of_int id ^ " sledgehammer: "
+fun minimize_tag id = "#" ^ string_of_int id ^ " minimize (sledgehammer): "
+fun reconstructor_tag reconstructor id =
+  "#" ^ string_of_int id ^ " " ^ (!reconstructor) ^ " (sledgehammer): "
+
+val separator = "-----"
+
+(*FIXME sensible to have Mirabelle-level Sledgehammer defaults?*)
+(*defaults used in this Mirabelle action*)
+val preplay_timeout_default = "4"
+val lam_trans_default = "smart"
+val uncurried_aliases_default = "smart"
+val type_enc_default = "smart"
+val strict_default = "false"
+val max_relevant_default = "smart"
+val slice_default = "true"
+val max_calls_default = "10000000"
+val trivial_default = "false"
+val minimize_timeout_default = 5
+
+(*If a key is present in args then augment a list with its pair*)
+(*This is used to avoid fixing default values at the Mirabelle level, and
+  instead use the default values of the tool (Sledgehammer in this case).*)
+fun available_parameter args key label list =
+  let
+    val value = AList.lookup (op =) args key
+  in if is_some value then (label, the value) :: list else list end
+
+
+datatype sh_data = ShData of {
+  calls: int,
+  success: int,
+  nontriv_calls: int,
+  nontriv_success: int,
+  lemmas: int,
+  max_lems: int,
+  time_isa: int,
+  time_prover: int,
+  time_prover_fail: int}
+
+datatype re_data = ReData of {
+  calls: int,
+  success: int,
+  nontriv_calls: int,
+  nontriv_success: int,
+  proofs: int,
+  time: int,
+  timeout: int,
+  lemmas: int * int * int,
+  posns: (Position.T * bool) list
+  }
+
+datatype min_data = MinData of {
+  succs: int,
+  ab_ratios: int
+  }
+
+fun make_sh_data
+      (calls,success,nontriv_calls,nontriv_success,lemmas,max_lems,time_isa,
+       time_prover,time_prover_fail) =
+  ShData{calls=calls, success=success, nontriv_calls=nontriv_calls,
+         nontriv_success=nontriv_success, lemmas=lemmas, max_lems=max_lems,
+         time_isa=time_isa, time_prover=time_prover,
+         time_prover_fail=time_prover_fail}
+
+fun make_min_data (succs, ab_ratios) =
+  MinData{succs=succs, ab_ratios=ab_ratios}
+
+fun make_re_data (calls,success,nontriv_calls,nontriv_success,proofs,time,
+                  timeout,lemmas,posns) =
+  ReData{calls=calls, success=success, nontriv_calls=nontriv_calls,
+         nontriv_success=nontriv_success, proofs=proofs, time=time,
+         timeout=timeout, lemmas=lemmas, posns=posns}
+
+val empty_sh_data = make_sh_data (0, 0, 0, 0, 0, 0, 0, 0, 0)
+val empty_min_data = make_min_data (0, 0)
+val empty_re_data = make_re_data (0, 0, 0, 0, 0, 0, 0, (0,0,0), [])
+
+fun tuple_of_sh_data (ShData {calls, success, nontriv_calls, nontriv_success,
+                              lemmas, max_lems, time_isa,
+  time_prover, time_prover_fail}) = (calls, success, nontriv_calls,
+  nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail)
+
+fun tuple_of_min_data (MinData {succs, ab_ratios}) = (succs, ab_ratios)
+
+fun tuple_of_re_data (ReData {calls, success, nontriv_calls, nontriv_success,
+  proofs, time, timeout, lemmas, posns}) = (calls, success, nontriv_calls,
+  nontriv_success, proofs, time, timeout, lemmas, posns)
+
+
+datatype reconstructor_mode =
+  Unminimized | Minimized | UnminimizedFT | MinimizedFT
+
+datatype data = Data of {
+  sh: sh_data,
+  min: min_data,
+  re_u: re_data, (* reconstructor with unminimized set of lemmas *)
+  re_m: re_data, (* reconstructor with minimized set of lemmas *)
+  re_uft: re_data, (* reconstructor with unminimized set of lemmas and fully-typed *)
+  re_mft: re_data, (* reconstructor with minimized set of lemmas and fully-typed *)
+  mini: bool   (* with minimization *)
+  }
+
+fun make_data (sh, min, re_u, re_m, re_uft, re_mft, mini) =
+  Data {sh=sh, min=min, re_u=re_u, re_m=re_m, re_uft=re_uft, re_mft=re_mft,
+    mini=mini}
+
+val empty_data = make_data (empty_sh_data, empty_min_data,
+  empty_re_data, empty_re_data, empty_re_data, empty_re_data, false)
+
+fun map_sh_data f (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
+  let val sh' = make_sh_data (f (tuple_of_sh_data sh))
+  in make_data (sh', min, re_u, re_m, re_uft, re_mft, mini) end
+
+fun map_min_data f (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
+  let val min' = make_min_data (f (tuple_of_min_data min))
+  in make_data (sh, min', re_u, re_m, re_uft, re_mft, mini) end
+
+fun map_re_data f m (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
+  let
+    fun map_me g Unminimized   (u, m, uft, mft) = (g u, m, uft, mft)
+      | map_me g Minimized     (u, m, uft, mft) = (u, g m, uft, mft)
+      | map_me g UnminimizedFT (u, m, uft, mft) = (u, m, g uft, mft)
+      | map_me g MinimizedFT   (u, m, uft, mft) = (u, m, uft, g mft)
+
+    val f' = make_re_data o f o tuple_of_re_data
+
+    val (re_u', re_m', re_uft', re_mft') =
+      map_me f' m (re_u, re_m, re_uft, re_mft)
+  in make_data (sh, min, re_u', re_m', re_uft', re_mft', mini) end
+
+fun set_mini mini (Data {sh, min, re_u, re_m, re_uft, re_mft, ...}) =
+  make_data (sh, min, re_u, re_m, re_uft, re_mft, mini)
+
+fun inc_max (n:int) (s,sos,m) = (s+n, sos + n*n, Int.max(m,n));
+
+val inc_sh_calls =  map_sh_data
+  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
+    => (calls + 1, success, nontriv_calls, nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail))
+
+val inc_sh_success = map_sh_data
+  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
+    => (calls, success + 1, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail))
+
+val inc_sh_nontriv_calls =  map_sh_data
+  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
+    => (calls, success, nontriv_calls + 1, nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail))
+
+val inc_sh_nontriv_success = map_sh_data
+  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
+    => (calls, success, nontriv_calls, nontriv_success + 1, lemmas,max_lems, time_isa, time_prover, time_prover_fail))
+
+fun inc_sh_lemmas n = map_sh_data
+  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
+    => (calls,success,nontriv_calls, nontriv_success, lemmas+n,max_lems,time_isa,time_prover,time_prover_fail))
+
+fun inc_sh_max_lems n = map_sh_data
+  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
+    => (calls,success,nontriv_calls, nontriv_success, lemmas,Int.max(max_lems,n),time_isa,time_prover,time_prover_fail))
+
+fun inc_sh_time_isa t = map_sh_data
+  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
+    => (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa + t,time_prover,time_prover_fail))
+
+fun inc_sh_time_prover t = map_sh_data
+  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
+    => (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover + t,time_prover_fail))
+
+fun inc_sh_time_prover_fail t = map_sh_data
+  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
+    => (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail + t))
+
+val inc_min_succs = map_min_data
+  (fn (succs,ab_ratios) => (succs+1, ab_ratios))
+
+fun inc_min_ab_ratios r = map_min_data
+  (fn (succs, ab_ratios) => (succs, ab_ratios+r))
+
+val inc_reconstructor_calls = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls + 1, success, nontriv_calls, nontriv_success, proofs, time, timeout, lemmas,posns))
+
+val inc_reconstructor_success = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success + 1, nontriv_calls, nontriv_success, proofs, time, timeout, lemmas,posns))
+
+val inc_reconstructor_nontriv_calls = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success, nontriv_calls + 1, nontriv_success, proofs, time, timeout, lemmas,posns))
+
+val inc_reconstructor_nontriv_success = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success, nontriv_calls, nontriv_success + 1, proofs, time, timeout, lemmas,posns))
+
+val inc_reconstructor_proofs = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success, nontriv_calls, nontriv_success, proofs + 1, time, timeout, lemmas,posns))
+
+fun inc_reconstructor_time m t = map_re_data
+ (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+  => (calls, success, nontriv_calls, nontriv_success, proofs, time + t, timeout, lemmas,posns)) m
+
+val inc_reconstructor_timeout = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success, nontriv_calls, nontriv_success, proofs, time, timeout + 1, lemmas,posns))
+
+fun inc_reconstructor_lemmas m n = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success, nontriv_calls, nontriv_success, proofs, time, timeout, inc_max n lemmas, posns)) m
+
+fun inc_reconstructor_posns m pos = map_re_data
+  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
+    => (calls, success, nontriv_calls, nontriv_success, proofs, time, timeout, lemmas, pos::posns)) m
+
+val str0 = string_of_int o the_default 0
+
+local
+
+val str = string_of_int
+val str3 = Real.fmt (StringCvt.FIX (SOME 3))
+fun percentage a b = string_of_int (a * 100 div b)
+fun time t = Real.fromInt t / 1000.0
+fun avg_time t n =
+  if n > 0 then (Real.fromInt t / 1000.0) / Real.fromInt n else 0.0
+
+fun log_sh_data log
+    (calls, success, nontriv_calls, nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail) =
+ (log ("Total number of sledgehammer calls: " ^ str calls);
+  log ("Number of successful sledgehammer calls: " ^ str success);
+  log ("Number of sledgehammer lemmas: " ^ str lemmas);
+  log ("Max number of sledgehammer lemmas: " ^ str max_lems);
+  log ("Success rate: " ^ percentage success calls ^ "%");
+  log ("Total number of nontrivial sledgehammer calls: " ^ str nontriv_calls);
+  log ("Number of successful nontrivial sledgehammer calls: " ^ str nontriv_success);
+  log ("Total time for sledgehammer calls (Isabelle): " ^ str3 (time time_isa));
+  log ("Total time for successful sledgehammer calls (ATP): " ^ str3 (time time_prover));
+  log ("Total time for failed sledgehammer calls (ATP): " ^ str3 (time time_prover_fail));
+  log ("Average time for sledgehammer calls (Isabelle): " ^
+    str3 (avg_time time_isa calls));
+  log ("Average time for successful sledgehammer calls (ATP): " ^
+    str3 (avg_time time_prover success));
+  log ("Average time for failed sledgehammer calls (ATP): " ^
+    str3 (avg_time time_prover_fail (calls - success)))
+  )
+
+fun str_of_pos (pos, triv) =
+  str0 (Position.line_of pos) (* ^ ":" ^ str0 (Position.offset_of pos) *) ^
+  (if triv then "[T]" else "")
+
+fun log_re_data log tag sh_calls (re_calls, re_success, re_nontriv_calls,
+     re_nontriv_success, re_proofs, re_time, re_timeout,
+    (lemmas, lems_sos, lems_max), re_posns) =
+ (log ("Total number of " ^ tag ^ "reconstructor calls: " ^ str re_calls);
+  log ("Number of successful " ^ tag ^ "reconstructor calls: " ^ str re_success ^
+    " (proof: " ^ str re_proofs ^ ")");
+  log ("Number of " ^ tag ^ "reconstructor timeouts: " ^ str re_timeout);
+  log ("Success rate: " ^ percentage re_success sh_calls ^ "%");
+  log ("Total number of nontrivial " ^ tag ^ "reconstructor calls: " ^ str re_nontriv_calls);
+  log ("Number of successful nontrivial " ^ tag ^ "reconstructor calls: " ^ str re_nontriv_success ^
+    " (proof: " ^ str re_proofs ^ ")");
+  log ("Number of successful " ^ tag ^ "reconstructor lemmas: " ^ str lemmas);
+  log ("SOS of successful " ^ tag ^ "reconstructor lemmas: " ^ str lems_sos);
+  log ("Max number of successful " ^ tag ^ "reconstructor lemmas: " ^ str lems_max);
+  log ("Total time for successful " ^ tag ^ "reconstructor calls: " ^ str3 (time re_time));
+  log ("Average time for successful " ^ tag ^ "reconstructor calls: " ^
+    str3 (avg_time re_time re_success));
+  if tag=""
+  then log ("Proved: " ^ space_implode " " (map str_of_pos re_posns))
+  else ()
+ )
+
+fun log_min_data log (succs, ab_ratios) =
+  (log ("Number of successful minimizations: " ^ string_of_int succs);
+   log ("After/before ratios: " ^ string_of_int ab_ratios)
+  )
+
+in
+
+fun log_data id log (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
+  let
+    val ShData {calls=sh_calls, ...} = sh
+
+    fun app_if (ReData {calls, ...}) f = if calls > 0 then f () else ()
+    fun log_re tag m =
+      log_re_data log tag sh_calls (tuple_of_re_data m)
+    fun log_reconstructor (tag1, m1) (tag2, m2) = app_if m1 (fn () =>
+      (log_re tag1 m1; log ""; app_if m2 (fn () => log_re tag2 m2)))
+  in
+    if sh_calls > 0
+    then
+     (log ("\n\n\nReport #" ^ string_of_int id ^ ":\n");
+      log_sh_data log (tuple_of_sh_data sh);
+      log "";
+      if not mini
+      then log_reconstructor ("", re_u) ("fully-typed ", re_uft)
+      else
+        app_if re_u (fn () =>
+         (log_reconstructor ("unminimized ", re_u) ("unminimized fully-typed ", re_uft);
+          log "";
+          app_if re_m (fn () =>
+            (log_min_data log (tuple_of_min_data min); log "";
+             log_reconstructor ("", re_m) ("fully-typed ", re_mft))))))
+    else ()
+  end
+
+end
+
+
+(* Warning: we implicitly assume single-threaded execution here! *)
+val data = Unsynchronized.ref ([] : (int * data) list)
+
+fun init id thy = (Unsynchronized.change data (cons (id, empty_data)); thy)
+fun done id ({log, ...}: Mirabelle.done_args) =
+  AList.lookup (op =) (!data) id
+  |> Option.map (log_data id log)
+  |> K ()
+
+fun change_data id f = (Unsynchronized.change data (AList.map_entry (op =) id f); ())
+
+
+fun get_prover ctxt args =
+  let
+    fun default_prover_name () =
+      hd (#provers (Sledgehammer_Isar.default_params ctxt []))
+      handle List.Empty => error "No ATP available."
+    fun get_prover name =
+      (name, Sledgehammer_Run.get_minimizing_prover ctxt
+                Sledgehammer_Provers.Normal name)
+  in
+    (case AList.lookup (op =) args proverK of
+      SOME name => get_prover name
+    | NONE => get_prover (default_prover_name ()))
+  end
+
+type stature = ATP_Problem_Generate.stature
+
+(* hack *)
+fun reconstructor_from_msg args msg =
+  (case AList.lookup (op =) args reconstructorK of
+    SOME name => name
+  | NONE =>
+    if String.isSubstring "metis (" msg then
+      msg |> Substring.full
+          |> Substring.position "metis ("
+          |> snd |> Substring.position ")"
+          |> fst |> Substring.string
+          |> suffix ")"
+    else if String.isSubstring "metis" msg then
+      "metis"
+    else
+      "smt")
+
+local
+
+datatype sh_result =
+  SH_OK of int * int * (string * stature) list |
+  SH_FAIL of int * int |
+  SH_ERROR
+
+fun run_sh prover_name prover type_enc strict max_relevant slice lam_trans
+        uncurried_aliases e_selection_heuristic term_order force_sos
+        hard_timeout timeout preplay_timeout sh_minimizeLST
+        max_new_mono_instancesLST max_mono_itersLST dir pos st =
+  let
+    val {context = ctxt, facts = chained_ths, goal} = Proof.goal st
+    val i = 1
+    fun set_file_name (SOME dir) =
+        Config.put Sledgehammer_Provers.dest_dir dir
+        #> Config.put Sledgehammer_Provers.problem_prefix
+          ("prob_" ^ str0 (Position.line_of pos) ^ "__")
+        #> Config.put SMT_Config.debug_files
+          (dir ^ "/" ^ Name.desymbolize false (ATP_Util.timestamp ()) ^ "_"
+          ^ serial_string ())
+      | set_file_name NONE = I
+    val st' =
+      st
+      |> Proof.map_context
+           (set_file_name dir
+            #> (Option.map (Config.put ATP_Systems.e_selection_heuristic)
+                  e_selection_heuristic |> the_default I)
+            #> (Option.map (Config.put ATP_Systems.term_order)
+                  term_order |> the_default I)
+            #> (Option.map (Config.put ATP_Systems.force_sos)
+                  force_sos |> the_default I))
+    val params as {relevance_thresholds, max_relevant, slice, ...} =
+      Sledgehammer_Isar.default_params ctxt
+         ([("verbose", "true"),
+           ("type_enc", type_enc),
+           ("strict", strict),
+           ("lam_trans", lam_trans |> the_default lam_trans_default),
+           ("uncurried_aliases", uncurried_aliases |> the_default uncurried_aliases_default),
+           ("max_relevant", max_relevant),
+           ("slice", slice),
+           ("timeout", string_of_int timeout),
+           ("preplay_timeout", preplay_timeout)]
+          |> sh_minimizeLST (*don't confuse the two minimization flags*)
+          |> max_new_mono_instancesLST
+          |> max_mono_itersLST)
+    val default_max_relevant =
+      Sledgehammer_Provers.default_max_relevant_for_prover ctxt slice
+        prover_name
+    val is_appropriate_prop =
+      Sledgehammer_Provers.is_appropriate_prop_for_prover ctxt prover_name
+    val is_built_in_const =
+      Sledgehammer_Provers.is_built_in_const_for_prover ctxt prover_name
+    val relevance_fudge =
+      Sledgehammer_Provers.relevance_fudge_for_prover ctxt prover_name
+    val relevance_override = {add = [], del = [], only = false}
+    val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal ctxt goal i
+    val time_limit =
+      (case hard_timeout of
+        NONE => I
+      | SOME secs => TimeLimit.timeLimit (Time.fromSeconds secs))
+    fun failed failure =
+      ({outcome = SOME failure, used_facts = [], run_time = Time.zeroTime,
+        preplay =
+          K (ATP_Proof_Reconstruct.Failed_to_Play Sledgehammer_Provers.plain_metis),
+        message = K "", message_tail = ""}, ~1)
+    val ({outcome, used_facts, run_time, preplay, message, message_tail}
+         : Sledgehammer_Provers.prover_result,
+        time_isa) = time_limit (Mirabelle.cpu_time (fn () =>
+      let
+        val _ = if is_appropriate_prop concl_t then ()
+                else raise Fail "inappropriate"
+        val ho_atp = Sledgehammer_Provers.is_ho_atp ctxt prover_name
+        val facts =
+          Sledgehammer_Filter.nearly_all_facts ctxt ho_atp relevance_override
+            chained_ths hyp_ts concl_t
+          |> filter (is_appropriate_prop o prop_of o snd)
+          |> Sledgehammer_Filter.relevant_facts ctxt relevance_thresholds
+                 (the_default default_max_relevant max_relevant)
+                 is_built_in_const relevance_fudge relevance_override
+                 chained_ths hyp_ts concl_t
+        val problem =
+          {state = st', goal = goal, subgoal = i,
+           subgoal_count = Sledgehammer_Util.subgoal_count st,
+           facts = facts |> map Sledgehammer_Provers.Untranslated_Fact,
+           smt_filter = NONE}
+      in prover params (K (K (K ""))) problem end)) ()
+      handle TimeLimit.TimeOut => failed ATP_Proof.TimedOut
+           | Fail "inappropriate" => failed ATP_Proof.Inappropriate
+    val time_prover = run_time |> Time.toMilliseconds
+    val msg = message (preplay ()) ^ message_tail
+  in
+    case outcome of
+      NONE => (msg, SH_OK (time_isa, time_prover, used_facts))
+    | SOME _ => (msg, SH_FAIL (time_isa, time_prover))
+  end
+  handle ERROR msg => ("error: " ^ msg, SH_ERROR)
+
+fun thms_of_name ctxt name =
+  let
+    val lex = Keyword.get_lexicons
+    val get = maps (Proof_Context.get_fact ctxt o fst)
+  in
+    Source.of_string name
+    |> Symbol.source
+    |> Token.source {do_recover=SOME false} lex Position.start
+    |> Token.source_proper
+    |> Source.source Token.stopper (Parse_Spec.xthms1 >> get) NONE
+    |> Source.exhaust
+  end
+
+in
+
+fun run_sledgehammer trivial args reconstructor named_thms id
+      ({pre=st, log, pos, ...}: Mirabelle.run_args) =
+  let
+    val triv_str = if trivial then "[T] " else ""
+    val _ = change_data id inc_sh_calls
+    val _ = if trivial then () else change_data id inc_sh_nontriv_calls
+    val (prover_name, prover) = get_prover (Proof.context_of st) args
+    val type_enc = AList.lookup (op =) args type_encK |> the_default type_enc_default
+    val strict = AList.lookup (op =) args strictK |> the_default strict_default
+    val max_relevant = AList.lookup (op =) args max_relevantK |> the_default max_relevant_default
+    val slice = AList.lookup (op =) args sliceK |> the_default slice_default
+    val lam_trans = AList.lookup (op =) args lam_transK
+    val uncurried_aliases = AList.lookup (op =) args uncurried_aliasesK
+    val e_selection_heuristic = AList.lookup (op =) args e_selection_heuristicK
+    val term_order = AList.lookup (op =) args term_orderK
+    val force_sos = AList.lookup (op =) args force_sosK
+      |> Option.map (curry (op <>) "false")
+    val dir = AList.lookup (op =) args keepK
+    val timeout = Mirabelle.get_int_setting args (prover_timeoutK, 30)
+    (* always use a hard timeout, but give some slack so that the automatic
+       minimizer has a chance to do its magic *)
+    val preplay_timeout = AList.lookup (op =) args preplay_timeoutK
+      |> the_default preplay_timeout_default
+    val sh_minimizeLST = available_parameter args sh_minimizeK "minimize"
+    val max_new_mono_instancesLST =
+      available_parameter args max_new_mono_instancesK max_new_mono_instancesK
+    val max_mono_itersLST = available_parameter args max_mono_itersK max_mono_itersK
+    val hard_timeout = SOME (2 * timeout)
+    val (msg, result) =
+      run_sh prover_name prover type_enc strict max_relevant slice lam_trans
+        uncurried_aliases e_selection_heuristic term_order force_sos
+        hard_timeout timeout preplay_timeout sh_minimizeLST
+        max_new_mono_instancesLST max_mono_itersLST dir pos st
+  in
+    case result of
+      SH_OK (time_isa, time_prover, names) =>
+        let
+          fun get_thms (name, stature) =
+            try (thms_of_name (Proof.context_of st)) name
+            |> Option.map (pair (name, stature))
+        in
+          change_data id inc_sh_success;
+          if trivial then () else change_data id inc_sh_nontriv_success;
+          change_data id (inc_sh_lemmas (length names));
+          change_data id (inc_sh_max_lems (length names));
+          change_data id (inc_sh_time_isa time_isa);
+          change_data id (inc_sh_time_prover time_prover);
+          reconstructor := reconstructor_from_msg args msg;
+          named_thms := SOME (map_filter get_thms names);
+          log (sh_tag id ^ triv_str ^ "succeeded (" ^ string_of_int time_isa ^ "+" ^
+            string_of_int time_prover ^ ") [" ^ prover_name ^ "]:\n" ^ msg)
+        end
+    | SH_FAIL (time_isa, time_prover) =>
+        let
+          val _ = change_data id (inc_sh_time_isa time_isa)
+          val _ = change_data id (inc_sh_time_prover_fail time_prover)
+        in log (sh_tag id ^ triv_str ^ "failed: " ^ msg) end
+    | SH_ERROR => log (sh_tag id ^ "failed: " ^ msg)
+  end
+
+end
+
+fun run_minimize args reconstructor named_thms id
+        ({pre=st, log, ...}: Mirabelle.run_args) =
+  let
+    val ctxt = Proof.context_of st
+    val n0 = length (these (!named_thms))
+    val (prover_name, _) = get_prover ctxt args
+    val type_enc = AList.lookup (op =) args type_encK |> the_default type_enc_default
+    val strict = AList.lookup (op =) args strictK |> the_default strict_default
+    val timeout =
+      AList.lookup (op =) args minimize_timeoutK
+      |> Option.map (fst o read_int o raw_explode)  (* FIXME Symbol.explode (?) *)
+      |> the_default minimize_timeout_default
+    val preplay_timeout = AList.lookup (op =) args preplay_timeoutK
+      |> the_default preplay_timeout_default
+    val sh_minimizeLST = available_parameter args sh_minimizeK "minimize"
+    val max_new_mono_instancesLST =
+      available_parameter args max_new_mono_instancesK max_new_mono_instancesK
+    val max_mono_itersLST = available_parameter args max_mono_itersK max_mono_itersK
+    val params = Sledgehammer_Isar.default_params ctxt
+     ([("provers", prover_name),
+       ("verbose", "true"),
+       ("type_enc", type_enc),
+       ("strict", strict),
+       ("timeout", string_of_int timeout),
+       ("preplay_timeout", preplay_timeout)]
+      |> sh_minimizeLST (*don't confuse the two minimization flags*)
+      |> max_new_mono_instancesLST
+      |> max_mono_itersLST)
+    val minimize =
+      Sledgehammer_Minimize.minimize_facts prover_name params
+          true 1 (Sledgehammer_Util.subgoal_count st)
+    val _ = log separator
+    val (used_facts, (preplay, message, message_tail)) =
+      minimize st (these (!named_thms))
+    val msg = message (preplay ()) ^ message_tail
+  in
+    case used_facts of
+      SOME named_thms' =>
+        (change_data id inc_min_succs;
+         change_data id (inc_min_ab_ratios ((100 * length named_thms') div n0));
+         if length named_thms' = n0
+         then log (minimize_tag id ^ "already minimal")
+         else (reconstructor := reconstructor_from_msg args msg;
+               named_thms := SOME named_thms';
+               log (minimize_tag id ^ "succeeded:\n" ^ msg))
+        )
+    | NONE => log (minimize_tag id ^ "failed: " ^ msg)
+  end
+
+fun override_params prover type_enc timeout =
+  [("provers", prover),
+   ("max_relevant", "0"),
+   ("type_enc", type_enc),
+   ("strict", "true"),
+   ("slice", "false"),
+   ("timeout", timeout |> Time.toSeconds |> string_of_int)]
+
+fun run_reconstructor trivial full m name reconstructor named_thms id
+    ({pre=st, timeout, log, pos, ...}: Mirabelle.run_args) =
+  let
+    fun do_reconstructor named_thms ctxt =
+      let
+        val ref_of_str =
+          suffix ";" #> Outer_Syntax.scan Position.none #> Parse_Spec.xthm
+          #> fst
+        val thms = named_thms |> maps snd
+        val facts = named_thms |> map (ref_of_str o fst o fst)
+        val relevance_override = {add = facts, del = [], only = true}
+        fun my_timeout time_slice =
+          timeout |> Time.toReal |> curry Real.* time_slice |> Time.fromReal
+        fun sledge_tac time_slice prover type_enc =
+          Sledgehammer_Tactics.sledgehammer_as_oracle_tac ctxt
+            (override_params prover type_enc (my_timeout time_slice))
+            relevance_override
+      in
+        if !reconstructor = "sledgehammer_tac" then
+          sledge_tac 0.2 ATP_Systems.z3_tptpN "mono_native"
+          ORELSE' sledge_tac 0.2 ATP_Systems.eN "mono_guards??"
+          ORELSE' sledge_tac 0.2 ATP_Systems.vampireN "mono_guards??"
+          ORELSE' sledge_tac 0.2 ATP_Systems.spassN "poly_tags"
+          ORELSE' Metis_Tactic.metis_tac [] ATP_Problem_Generate.combsN
+            ctxt thms
+        else if !reconstructor = "smt" then
+          SMT_Solver.smt_tac ctxt thms
+        else if full then
+          Metis_Tactic.metis_tac [ATP_Proof_Reconstruct.full_typesN]
+            ATP_Proof_Reconstruct.metis_default_lam_trans ctxt thms
+        else if String.isPrefix "metis (" (!reconstructor) then
+          let
+            val (type_encs, lam_trans) =
+              !reconstructor
+              |> Outer_Syntax.scan Position.start
+              |> filter Token.is_proper |> tl
+              |> Metis_Tactic.parse_metis_options |> fst
+              |>> the_default [ATP_Proof_Reconstruct.partial_typesN]
+              ||> the_default ATP_Proof_Reconstruct.metis_default_lam_trans
+          in Metis_Tactic.metis_tac type_encs lam_trans ctxt thms end
+        else if !reconstructor = "metis" then
+          Metis_Tactic.metis_tac [] ATP_Proof_Reconstruct.metis_default_lam_trans ctxt
+            thms
+        else
+          K all_tac
+      end
+    fun apply_reconstructor named_thms =
+      Mirabelle.can_apply timeout (do_reconstructor named_thms) st
+
+    fun with_time (false, t) = "failed (" ^ string_of_int t ^ ")"
+      | with_time (true, t) = (change_data id (inc_reconstructor_success m);
+          if trivial then ()
+          else change_data id (inc_reconstructor_nontriv_success m);
+          change_data id (inc_reconstructor_lemmas m (length named_thms));
+          change_data id (inc_reconstructor_time m t);
+          change_data id (inc_reconstructor_posns m (pos, trivial));
+          if name = "proof" then change_data id (inc_reconstructor_proofs m)
+          else ();
+          "succeeded (" ^ string_of_int t ^ ")")
+    fun timed_reconstructor named_thms =
+      (with_time (Mirabelle.cpu_time apply_reconstructor named_thms), true)
+      handle TimeLimit.TimeOut => (change_data id (inc_reconstructor_timeout m);
+               ("timeout", false))
+           | ERROR msg => ("error: " ^ msg, false)
+
+    val _ = log separator
+    val _ = change_data id (inc_reconstructor_calls m)
+    val _ = if trivial then ()
+            else change_data id (inc_reconstructor_nontriv_calls m)
+  in
+    named_thms
+    |> timed_reconstructor
+    |>> log o prefix (reconstructor_tag reconstructor id)
+    |> snd
+  end
+
+val try_timeout = seconds 5.0
+
+(* crude hack *)
+val num_sledgehammer_calls = Unsynchronized.ref 0
+
+fun sledgehammer_action args id (st as {pre, name, ...}: Mirabelle.run_args) =
+  let val goal = Thm.major_prem_of (#goal (Proof.goal pre)) in
+    if can Logic.dest_conjunction goal orelse can Logic.dest_equals goal
+    then () else
+    let
+      val max_calls =
+        AList.lookup (op =) args max_callsK |> the_default max_calls_default
+        |> Int.fromString |> the
+      val _ = num_sledgehammer_calls := !num_sledgehammer_calls + 1;
+    in
+      if !num_sledgehammer_calls > max_calls then ()
+      else
+        let
+          val reconstructor = Unsynchronized.ref ""
+          val named_thms =
+            Unsynchronized.ref (NONE : ((string * stature) * thm list) list option)
+          val minimize = AList.defined (op =) args minimizeK
+          val metis_ft = AList.defined (op =) args metis_ftK
+          val trivial =
+            if AList.lookup (op =) args check_trivialK |> the_default trivial_default
+                            |> Bool.fromString |> the then
+              Try0.try0 (SOME try_timeout) ([], [], [], []) pre
+              handle TimeLimit.TimeOut => false
+            else false
+          fun apply_reconstructor m1 m2 =
+            if metis_ft
+            then
+              if not (Mirabelle.catch_result (reconstructor_tag reconstructor) false
+                  (run_reconstructor trivial false m1 name reconstructor
+                       (these (!named_thms))) id st)
+              then
+                (Mirabelle.catch_result (reconstructor_tag reconstructor) false
+                  (run_reconstructor trivial true m2 name reconstructor
+                       (these (!named_thms))) id st; ())
+              else ()
+            else
+              (Mirabelle.catch_result (reconstructor_tag reconstructor) false
+                (run_reconstructor trivial false m1 name reconstructor
+                     (these (!named_thms))) id st; ())
+        in
+          change_data id (set_mini minimize);
+          Mirabelle.catch sh_tag (run_sledgehammer trivial args reconstructor
+                                                   named_thms) id st;
+          if is_some (!named_thms)
+          then
+           (apply_reconstructor Unminimized UnminimizedFT;
+            if minimize andalso not (null (these (!named_thms)))
+            then
+             (Mirabelle.catch minimize_tag
+                  (run_minimize args reconstructor named_thms) id st;
+              apply_reconstructor Minimized MinimizedFT)
+            else ())
+          else ()
+        end
+    end
+  end
+
+fun invoke args =
+  Mirabelle.register (init, sledgehammer_action args, done)
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Mirabelle/Actions/mirabelle_sledgehammer_filter.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,198 @@
+(*  Title:      HOL/Mirabelle/Actions/mirabelle_sledgehammer_filter.ML
+    Author:     Jasmin Blanchette, TU Munich
+*)
+
+structure Mirabelle_Sledgehammer_Filter : MIRABELLE_ACTION =
+struct
+
+fun get args name default_value =
+  case AList.lookup (op =) args name of
+    SOME value => the (Real.fromString value)
+  | NONE => default_value
+
+fun extract_relevance_fudge args
+      {local_const_multiplier, worse_irrel_freq, higher_order_irrel_weight,
+       abs_rel_weight, abs_irrel_weight, skolem_irrel_weight,
+       theory_const_rel_weight, theory_const_irrel_weight,
+       chained_const_irrel_weight, intro_bonus, elim_bonus, simp_bonus,
+       local_bonus, assum_bonus, chained_bonus, max_imperfect, max_imperfect_exp,
+       threshold_divisor, ridiculous_threshold} =
+  {local_const_multiplier =
+       get args "local_const_multiplier" local_const_multiplier,
+   worse_irrel_freq = get args "worse_irrel_freq" worse_irrel_freq,
+   higher_order_irrel_weight =
+       get args "higher_order_irrel_weight" higher_order_irrel_weight,
+   abs_rel_weight = get args "abs_rel_weight" abs_rel_weight,
+   abs_irrel_weight = get args "abs_irrel_weight" abs_irrel_weight,
+   skolem_irrel_weight = get args "skolem_irrel_weight" skolem_irrel_weight,
+   theory_const_rel_weight =
+       get args "theory_const_rel_weight" theory_const_rel_weight,
+   theory_const_irrel_weight =
+       get args "theory_const_irrel_weight" theory_const_irrel_weight,
+   chained_const_irrel_weight =
+       get args "chained_const_irrel_weight" chained_const_irrel_weight,
+   intro_bonus = get args "intro_bonus" intro_bonus,
+   elim_bonus = get args "elim_bonus" elim_bonus,
+   simp_bonus = get args "simp_bonus" simp_bonus,
+   local_bonus = get args "local_bonus" local_bonus,
+   assum_bonus = get args "assum_bonus" assum_bonus,
+   chained_bonus = get args "chained_bonus" chained_bonus,
+   max_imperfect = get args "max_imperfect" max_imperfect,
+   max_imperfect_exp = get args "max_imperfect_exp" max_imperfect_exp,
+   threshold_divisor = get args "threshold_divisor" threshold_divisor,
+   ridiculous_threshold = get args "ridiculous_threshold" ridiculous_threshold}
+
+structure Prooftab =
+  Table(type key = int * int val ord = prod_ord int_ord int_ord)
+
+val proof_table = Unsynchronized.ref (Prooftab.empty: string list list Prooftab.table)
+
+val num_successes = Unsynchronized.ref ([] : (int * int) list)
+val num_failures = Unsynchronized.ref ([] : (int * int) list)
+val num_found_proofs = Unsynchronized.ref ([] : (int * int) list)
+val num_lost_proofs = Unsynchronized.ref ([] : (int * int) list)
+val num_found_facts = Unsynchronized.ref ([] : (int * int) list)
+val num_lost_facts = Unsynchronized.ref ([] : (int * int) list)
+
+fun get id c = the_default 0 (AList.lookup (op =) (!c) id)
+fun add id c n =
+  c := (case AList.lookup (op =) (!c) id of
+          SOME m => AList.update (op =) (id, m + n) (!c)
+        | NONE => (id, n) :: !c)
+
+fun init proof_file _ thy =
+  let
+    fun do_line line =
+      case line |> space_explode ":" of
+        [line_num, offset, proof] =>
+        SOME (pairself (the o Int.fromString) (line_num, offset),
+              proof |> space_explode " " |> filter_out (curry (op =) ""))
+       | _ => NONE
+    val proofs = File.read (Path.explode proof_file)
+    val proof_tab =
+      proofs |> space_explode "\n"
+             |> map_filter do_line
+             |> AList.coalesce (op =)
+             |> Prooftab.make
+  in proof_table := proof_tab; thy end
+
+fun percentage a b = if b = 0 then "N/A" else string_of_int (a * 100 div b)
+fun percentage_alt a b = percentage a (a + b)
+
+fun done id ({log, ...} : Mirabelle.done_args) =
+  if get id num_successes + get id num_failures > 0 then
+    (log "";
+     log ("Number of overall successes: " ^
+          string_of_int (get id num_successes));
+     log ("Number of overall failures: " ^ string_of_int (get id num_failures));
+     log ("Overall success rate: " ^
+          percentage_alt (get id num_successes) (get id num_failures) ^ "%");
+     log ("Number of found proofs: " ^ string_of_int (get id num_found_proofs));
+     log ("Number of lost proofs: " ^ string_of_int (get id num_lost_proofs));
+     log ("Proof found rate: " ^
+          percentage_alt (get id num_found_proofs) (get id num_lost_proofs) ^
+          "%");
+     log ("Number of found facts: " ^ string_of_int (get id num_found_facts));
+     log ("Number of lost facts: " ^ string_of_int (get id num_lost_facts));
+     log ("Fact found rate: " ^
+          percentage_alt (get id num_found_facts) (get id num_lost_facts) ^
+          "%"))
+  else
+    ()
+
+val default_prover = ATP_Systems.eN (* arbitrary ATP *)
+
+fun with_index (i, s) = s ^ "@" ^ string_of_int i
+
+fun action args id ({pre, pos, log, ...} : Mirabelle.run_args) =
+  case (Position.line_of pos, Position.offset_of pos) of
+    (SOME line_num, SOME offset) =>
+    (case Prooftab.lookup (!proof_table) (line_num, offset) of
+       SOME proofs =>
+       let
+         val {context = ctxt, facts = chained_ths, goal} = Proof.goal pre
+         val prover = AList.lookup (op =) args "prover"
+                      |> the_default default_prover
+         val {relevance_thresholds, max_relevant, slice, ...} =
+           Sledgehammer_Isar.default_params ctxt args
+         val default_max_relevant =
+           Sledgehammer_Provers.default_max_relevant_for_prover ctxt slice
+                                                                prover
+         val is_appropriate_prop =
+           Sledgehammer_Provers.is_appropriate_prop_for_prover ctxt
+               default_prover
+         val is_built_in_const =
+           Sledgehammer_Provers.is_built_in_const_for_prover ctxt default_prover
+         val relevance_fudge =
+           extract_relevance_fudge args
+               (Sledgehammer_Provers.relevance_fudge_for_prover ctxt prover)
+         val relevance_override = {add = [], del = [], only = false}
+         val subgoal = 1
+         val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal ctxt goal subgoal
+         val ho_atp = Sledgehammer_Provers.is_ho_atp ctxt prover
+         val facts =
+          Sledgehammer_Filter.nearly_all_facts ctxt ho_atp relevance_override
+                                               chained_ths hyp_ts concl_t
+          |> filter (is_appropriate_prop o prop_of o snd)
+          |> Sledgehammer_Filter.relevant_facts ctxt relevance_thresholds
+                 (the_default default_max_relevant max_relevant)
+                 is_built_in_const relevance_fudge relevance_override
+                 chained_ths hyp_ts concl_t
+           |> map (fst o fst)
+         val (found_facts, lost_facts) =
+           flat proofs |> sort_distinct string_ord
+           |> map (fn fact => (find_index (curry (op =) fact) facts, fact))
+           |> List.partition (curry (op <=) 0 o fst)
+           |>> sort (prod_ord int_ord string_ord) ||> map snd
+         val found_proofs = filter (forall (member (op =) facts)) proofs
+         val n = length found_proofs
+         val _ =
+           if n = 0 then
+             (add id num_failures 1; log "Failure")
+           else
+             (add id num_successes 1;
+              add id num_found_proofs n;
+              log ("Success (" ^ string_of_int n ^ " of " ^
+                   string_of_int (length proofs) ^ " proofs)"))
+         val _ = add id num_lost_proofs (length proofs - n)
+         val _ = add id num_found_facts (length found_facts)
+         val _ = add id num_lost_facts (length lost_facts)
+         val _ =
+           if null found_facts then
+             ()
+           else
+             let
+               val found_weight =
+                 Real.fromInt (fold (fn (n, _) =>
+                                        Integer.add (n * n)) found_facts 0)
+                   / Real.fromInt (length found_facts)
+                 |> Math.sqrt |> Real.ceil
+             in
+               log ("Found facts (among " ^ string_of_int (length facts) ^
+                    ", weight " ^ string_of_int found_weight ^ "): " ^
+                    commas (map with_index found_facts))
+             end
+         val _ = if null lost_facts then
+                   ()
+                 else
+                   log ("Lost facts (among " ^ string_of_int (length facts) ^
+                        "): " ^ commas lost_facts)
+       in () end
+     | NONE => log "No known proof")
+  | _ => ()
+
+val proof_fileK = "proof_file"
+
+fun invoke args =
+  let
+    val (pf_args, other_args) =
+      args |> List.partition (curry (op =) proof_fileK o fst)
+    val proof_file = case pf_args of
+                       [] => error "No \"proof_file\" specified"
+                     | (_, s) :: _ => s
+  in Mirabelle.register (init proof_file, action other_args, done) end
+
+end;
+
+(* Workaround to keep the "mirabelle.pl" script happy *)
+structure Mirabelle_Sledgehammer_filter = Mirabelle_Sledgehammer_Filter;
--- a/src/HOL/Mirabelle/Mirabelle.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Mirabelle/Mirabelle.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -4,7 +4,7 @@
 
 theory Mirabelle
 imports Sledgehammer
-uses "Tools/mirabelle.ML"
+uses "Actions/mirabelle.ML"
      "../ex/sledgehammer_tactics.ML"
 begin
 
--- a/src/HOL/Mirabelle/Mirabelle_Test.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Mirabelle/Mirabelle_Test.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -7,12 +7,12 @@
 theory Mirabelle_Test
 imports Main Mirabelle
 uses
-  "Tools/mirabelle_arith.ML"
-  "Tools/mirabelle_metis.ML"
-  "Tools/mirabelle_quickcheck.ML"
-  "Tools/mirabelle_refute.ML"
-  "Tools/mirabelle_sledgehammer.ML"
-  "Tools/mirabelle_sledgehammer_filter.ML"
+  "Actions/mirabelle_arith.ML"
+  "Actions/mirabelle_metis.ML"
+  "Actions/mirabelle_quickcheck.ML"
+  "Actions/mirabelle_refute.ML"
+  "Actions/mirabelle_sledgehammer.ML"
+  "Actions/mirabelle_sledgehammer_filter.ML"
 begin
 
 text {*
--- a/src/HOL/Mirabelle/Tools/mirabelle.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,213 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle.ML
-    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
-*)
-
-signature MIRABELLE =
-sig
-  (*configuration*)
-  val logfile : string Config.T
-  val timeout : int Config.T
-  val start_line : int Config.T
-  val end_line : int Config.T
-
-  (*core*)
-  type init_action = int -> theory -> theory
-  type done_args = {last: Toplevel.state, log: string -> unit}
-  type done_action = int -> done_args -> unit
-  type run_args = {pre: Proof.state, post: Toplevel.state option,
-    timeout: Time.time, log: string -> unit, pos: Position.T, name: string}
-  type run_action = int -> run_args -> unit
-  type action = init_action * run_action * done_action
-  val catch : (int -> string) -> run_action -> run_action
-  val catch_result : (int -> string) -> 'a -> (int -> run_args -> 'a) ->
-    int -> run_args -> 'a
-  val register : action -> theory -> theory
-  val step_hook : Toplevel.transition -> Toplevel.state -> Toplevel.state ->
-    unit
-
-  (*utility functions*)
-  val can_apply : Time.time -> (Proof.context -> int -> tactic) ->
-    Proof.state -> bool
-  val theorems_in_proof_term : thm -> thm list
-  val theorems_of_sucessful_proof : Toplevel.state option -> thm list
-  val get_setting : (string * string) list -> string * string -> string
-  val get_int_setting : (string * string) list -> string * int -> int
-  val cpu_time : ('a -> 'b) -> 'a -> 'b * int
-end
-
-
-
-structure Mirabelle : MIRABELLE =
-struct
-
-(* Mirabelle configuration *)
-
-val logfile = Attrib.setup_config_string @{binding mirabelle_logfile} (K "")
-val timeout = Attrib.setup_config_int @{binding mirabelle_timeout} (K 30)
-val start_line = Attrib.setup_config_int @{binding mirabelle_start_line} (K 0)
-val end_line = Attrib.setup_config_int @{binding mirabelle_end_line} (K ~1)
-
-
-(* Mirabelle core *)
-
-type init_action = int -> theory -> theory
-type done_args = {last: Toplevel.state, log: string -> unit}
-type done_action = int -> done_args -> unit
-type run_args = {pre: Proof.state, post: Toplevel.state option,
-  timeout: Time.time, log: string -> unit, pos: Position.T, name: string}
-type run_action = int -> run_args -> unit
-type action = init_action * run_action * done_action
-
-structure Actions = Theory_Data
-(
-  type T = (int * run_action * done_action) list
-  val empty = []
-  val extend = I
-  fun merge data = Library.merge (K true) data  (* FIXME potential data loss because of (K true) *)
-)
-
-
-fun log_exn log tag id e = log (tag id ^ "exception:\n" ^ General.exnMessage e)
-
-fun catch tag f id (st as {log, ...}: run_args) = (f id st; ())
-  handle exn =>
-    if Exn.is_interrupt exn then reraise exn else (log_exn log tag id exn; ())
-
-fun catch_result tag d f id (st as {log, ...}: run_args) = f id st
-  handle exn =>
-    if Exn.is_interrupt exn then reraise exn else (log_exn log tag id exn; d)
-
-fun register (init, run, done) thy =
-  let val id = length (Actions.get thy) + 1
-  in
-    thy
-    |> init id
-    |> Actions.map (cons (id, run, done))
-  end
-
-local
-
-fun log thy s =
-  let fun append_to n = if n = "" then K () else File.append (Path.explode n)
-  in append_to (Config.get_global thy logfile) (s ^ "\n") end
-  (* FIXME: with multithreading and parallel proofs enabled, we might need to
-     encapsulate this inside a critical section *)
-
-fun log_sep thy = log thy "------------------"
-
-fun apply_actions thy pos name info (pre, post, time) actions =
-  let
-    fun apply f = f {pre=pre, post=post, timeout=time, log=log thy, pos=pos, name=name}
-    fun run (id, run, _) = (apply (run id); log_sep thy)
-  in (log thy info; log_sep thy; List.app run actions) end
-
-fun in_range _ _ NONE = true
-  | in_range l r (SOME i) = (l <= i andalso (r < 0 orelse i <= r))
-
-fun only_within_range thy pos f x =
-  let val l = Config.get_global thy start_line and r = Config.get_global thy end_line
-  in if in_range l r (Position.line_of pos) then f x else () end
-
-in
-
-fun run_actions tr pre post =
-  let
-    val thy = Proof.theory_of pre
-    val pos = Toplevel.pos_of tr
-    val name = Toplevel.name_of tr
-    val st = (pre, post, Time.fromSeconds (Config.get_global thy timeout))
-
-    val str0 = string_of_int o the_default 0
-    val loc = str0 (Position.line_of pos) ^ ":" ^ str0 (Position.offset_of pos)
-    val info = "\n\nat " ^ loc ^ " (" ^ name ^ "):"
-  in
-    only_within_range thy pos (apply_actions thy pos name info st) (Actions.get thy)
-  end
-
-fun done_actions st =
-  let
-    val thy = Toplevel.theory_of st
-    val _ = log thy "\n\n";
-  in
-    thy
-    |> Actions.get
-    |> List.app (fn (id, _, done) => done id {last=st, log=log thy})
-  end
-
-end
-
-val whitelist = ["apply", "by", "proof"]
-
-fun step_hook tr pre post =
- (* FIXME: might require wrapping into "interruptible" *)
-  if can (Proof.assert_backward o Toplevel.proof_of) pre andalso
-     member (op =) whitelist (Toplevel.name_of tr)
-  then run_actions tr (Toplevel.proof_of pre) (SOME post)
-  else if not (Toplevel.is_toplevel pre) andalso Toplevel.is_toplevel post
-  then done_actions pre
-  else ()   (* FIXME: add theory_hook here *)
-
-
-
-(* Mirabelle utility functions *)
-
-fun can_apply time tac st =
-  let
-    val {context = ctxt, facts, goal} = Proof.goal st
-    val full_tac = HEADGOAL (Method.insert_tac facts THEN' tac ctxt)
-  in
-    (case try (TimeLimit.timeLimit time (Seq.pull o full_tac)) goal of
-      SOME (SOME _) => true
-    | _ => false)
-  end
-
-local
-
-fun fold_body_thms f =
-  let
-    fun app n (PBody {thms, ...}) = thms |> fold (fn (i, (name, prop, body)) =>
-      fn (x, seen) =>
-        if Inttab.defined seen i then (x, seen)
-        else
-          let
-            val body' = Future.join body
-            val (x', seen') = app (n + (if name = "" then 0 else 1)) body'
-              (x, Inttab.update (i, ()) seen)
-        in (x' |> n = 0 ? f (name, prop, body'), seen') end)
-  in fn bodies => fn x => #1 (fold (app 0) bodies (x, Inttab.empty)) end
-
-in
-
-fun theorems_in_proof_term thm =
-  let
-    val all_thms = Global_Theory.all_thms_of (Thm.theory_of_thm thm)
-    fun collect (s, _, _) = if s <> "" then insert (op =) s else I
-    fun member_of xs (x, y) = if member (op =) xs x then SOME y else NONE
-    fun resolve_thms names = map_filter (member_of names) all_thms
-  in
-    resolve_thms (fold_body_thms collect [Thm.proof_body_of thm] [])
-  end
-
-end
-
-fun theorems_of_sucessful_proof state =
-  (case state of
-    NONE => []
-  | SOME st =>
-      if not (Toplevel.is_proof st) then []
-      else theorems_in_proof_term (#goal (Proof.goal (Toplevel.proof_of st))))
-
-fun get_setting settings (key, default) =
-  the_default default (AList.lookup (op =) settings key)
-
-fun get_int_setting settings (key, default) =
-  (case Option.map Int.fromString (AList.lookup (op =) settings key) of
-    SOME (SOME i) => i
-  | SOME NONE => error ("bad option: " ^ key)
-  | NONE => default)
-
-fun cpu_time f x =
-  let val ({cpu, ...}, y) = Timing.timing f x
-  in (y, Time.toMilliseconds cpu) end
-
-end
--- a/src/HOL/Mirabelle/Tools/mirabelle_arith.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,21 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle_arith.ML
-    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
-*)
-
-structure Mirabelle_Arith : MIRABELLE_ACTION =
-struct
-
-fun arith_tag id = "#" ^ string_of_int id ^ " arith: "
-
-fun init _ = I
-fun done _ _ = ()
-
-fun run id ({pre, timeout, log, ...}: Mirabelle.run_args) =
-  if Mirabelle.can_apply timeout Arith_Data.arith_tac pre
-  then log (arith_tag id ^ "succeeded")
-  else ()
-  handle TimeLimit.TimeOut => log (arith_tag id ^ "timeout")
-
-fun invoke _ = Mirabelle.register (init, Mirabelle.catch arith_tag run, done)
-
-end
--- a/src/HOL/Mirabelle/Tools/mirabelle_metis.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle_metis.ML
-    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
-*)
-
-structure Mirabelle_Metis : MIRABELLE_ACTION =
-struct
-
-fun metis_tag id = "#" ^ string_of_int id ^ " metis: "
-
-fun init _ = I
-fun done _ _ = ()
-
-fun run id ({pre, post, timeout, log, ...}: Mirabelle.run_args) =
-  let
-    val thms = Mirabelle.theorems_of_sucessful_proof post
-    val names = map Thm.get_name_hint thms
-    val add_info = if null names then I else suffix (":\n" ^ commas names)
-
-    val facts = Facts.props (Proof_Context.facts_of (Proof.context_of pre))
-
-    fun metis ctxt =
-      Metis_Tactic.metis_tac [] ATP_Problem_Generate.liftingN ctxt
-                             (thms @ facts)
-  in
-    (if Mirabelle.can_apply timeout metis pre then "succeeded" else "failed")
-    |> prefix (metis_tag id)
-    |> add_info
-    |> log
-  end
-  handle TimeLimit.TimeOut => log (metis_tag id ^ "timeout")
-       | ERROR msg => log (metis_tag id ^ "error: " ^ msg)
-
-fun invoke _ = Mirabelle.register (init, Mirabelle.catch metis_tag run, done)
-
-end
--- a/src/HOL/Mirabelle/Tools/mirabelle_quickcheck.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle_quickcheck.ML
-    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
-*)
-
-structure Mirabelle_Quickcheck : MIRABELLE_ACTION =
-struct
-
-fun qc_tag id = "#" ^ string_of_int id ^ " quickcheck: "
-
-fun init _ = I
-fun done _ _ = ()
-
-fun run args id ({pre, timeout, log, ...}: Mirabelle.run_args) =
-  let
-    val has_valid_key = member (op =) ["iterations", "size", "generator"] o fst
-    val quickcheck = Quickcheck.quickcheck (map (apsnd single) (filter has_valid_key args)) 1
-  in
-    (case TimeLimit.timeLimit timeout quickcheck pre of
-      NONE => log (qc_tag id ^ "no counterexample")
-    | SOME _ => log (qc_tag id ^ "counterexample found"))
-  end
-  handle TimeLimit.TimeOut => log (qc_tag id ^ "timeout")
-       | ERROR msg => log (qc_tag id ^ "error: " ^ msg)
-
-fun invoke args =
-  Mirabelle.register (init, Mirabelle.catch qc_tag (run args), done)
-
-end
--- a/src/HOL/Mirabelle/Tools/mirabelle_refute.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle_refute.ML
-    Author:     Jasmin Blanchette and Sascha Boehme, TU Munich
-*)
-
-structure Mirabelle_Refute : MIRABELLE_ACTION =
-struct
-
-
-(* FIXME:
-fun refute_action args timeout {pre=st, ...} = 
-  let
-    val subgoal = 1
-    val thy = Proof.theory_of st
-    val thm = #goal (Proof.raw_goal st)
-
-    val refute = Refute.refute_goal thy args thm
-    val _ = TimeLimit.timeLimit timeout refute subgoal
-  in
-    val writ_log = Substring.full (the (Symtab.lookup tab "writeln"))
-    val warn_log = Substring.full (the (Symtab.lookup tab "warning"))
-
-    val r =
-      if Substring.isSubstring "model found" writ_log
-      then
-        if Substring.isSubstring "spurious" warn_log
-        then SOME "potential counterexample"
-        else SOME "real counterexample (bug?)"
-      else
-        if Substring.isSubstring "time limit" writ_log
-        then SOME "no counterexample (timeout)"
-        else if Substring.isSubstring "Search terminated" writ_log
-        then SOME "no counterexample (normal termination)"
-        else SOME "no counterexample (unknown)"
-  in r end
-*)
-
-fun invoke args = I (*Mirabelle.register ("refute", refute_action args)*)
-
-end
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,746 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML
-    Author:     Jasmin Blanchette and Sascha Boehme and Tobias Nipkow, TU Munich
-*)
-
-structure Mirabelle_Sledgehammer : MIRABELLE_ACTION =
-struct
-
-val proverK = "prover"
-val prover_timeoutK = "prover_timeout"
-val keepK = "keep"
-val type_encK = "type_enc"
-val strictK = "strict"
-val sliceK = "slice"
-val lam_transK = "lam_trans"
-val uncurried_aliasesK = "uncurried_aliases"
-val e_selection_heuristicK = "e_selection_heuristic"
-val term_orderK = "term_order"
-val force_sosK = "force_sos"
-val max_relevantK = "max_relevant"
-val max_callsK = "max_calls"
-val minimizeK = "minimize" (*refers to minimization attempted by Mirabelle*)
-val minimize_timeoutK = "minimize_timeout"
-val metis_ftK = "metis_ft"
-val reconstructorK = "reconstructor"
-val preplay_timeoutK = "preplay_timeout"
-val sh_minimizeK = "sh_minimize" (*refers to minimizer run within Sledgehammer*)
-val max_new_mono_instancesK = "max_new_mono_instances"
-val max_mono_itersK = "max_mono_iters"
-val check_trivialK = "check_trivial" (*false by default*)
-
-fun sh_tag id = "#" ^ string_of_int id ^ " sledgehammer: "
-fun minimize_tag id = "#" ^ string_of_int id ^ " minimize (sledgehammer): "
-fun reconstructor_tag reconstructor id =
-  "#" ^ string_of_int id ^ " " ^ (!reconstructor) ^ " (sledgehammer): "
-
-val separator = "-----"
-
-val preplay_timeout_default = "4"
-(*FIXME sensible to have Mirabelle-level Sledgehammer defaults?*)
-
-(*If a key is present in args then augment a list with its pair*)
-(*This is used to avoid fixing default values at the Mirabelle level, and
-  instead use the default values of the tool (Sledgehammer in this case).*)
-fun available_parameter args key label list =
-  let
-    val value = AList.lookup (op =) args key
-  in if is_some value then (label, the value) :: list else list end
-
-
-datatype sh_data = ShData of {
-  calls: int,
-  success: int,
-  nontriv_calls: int,
-  nontriv_success: int,
-  lemmas: int,
-  max_lems: int,
-  time_isa: int,
-  time_prover: int,
-  time_prover_fail: int}
-
-datatype re_data = ReData of {
-  calls: int,
-  success: int,
-  nontriv_calls: int,
-  nontriv_success: int,
-  proofs: int,
-  time: int,
-  timeout: int,
-  lemmas: int * int * int,
-  posns: (Position.T * bool) list
-  }
-
-datatype min_data = MinData of {
-  succs: int,
-  ab_ratios: int
-  }
-
-fun make_sh_data
-      (calls,success,nontriv_calls,nontriv_success,lemmas,max_lems,time_isa,
-       time_prover,time_prover_fail) =
-  ShData{calls=calls, success=success, nontriv_calls=nontriv_calls,
-         nontriv_success=nontriv_success, lemmas=lemmas, max_lems=max_lems,
-         time_isa=time_isa, time_prover=time_prover,
-         time_prover_fail=time_prover_fail}
-
-fun make_min_data (succs, ab_ratios) =
-  MinData{succs=succs, ab_ratios=ab_ratios}
-
-fun make_re_data (calls,success,nontriv_calls,nontriv_success,proofs,time,
-                  timeout,lemmas,posns) =
-  ReData{calls=calls, success=success, nontriv_calls=nontriv_calls,
-         nontriv_success=nontriv_success, proofs=proofs, time=time,
-         timeout=timeout, lemmas=lemmas, posns=posns}
-
-val empty_sh_data = make_sh_data (0, 0, 0, 0, 0, 0, 0, 0, 0)
-val empty_min_data = make_min_data (0, 0)
-val empty_re_data = make_re_data (0, 0, 0, 0, 0, 0, 0, (0,0,0), [])
-
-fun tuple_of_sh_data (ShData {calls, success, nontriv_calls, nontriv_success,
-                              lemmas, max_lems, time_isa,
-  time_prover, time_prover_fail}) = (calls, success, nontriv_calls,
-  nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail)
-
-fun tuple_of_min_data (MinData {succs, ab_ratios}) = (succs, ab_ratios)
-
-fun tuple_of_re_data (ReData {calls, success, nontriv_calls, nontriv_success,
-  proofs, time, timeout, lemmas, posns}) = (calls, success, nontriv_calls,
-  nontriv_success, proofs, time, timeout, lemmas, posns)
-
-
-datatype reconstructor_mode =
-  Unminimized | Minimized | UnminimizedFT | MinimizedFT
-
-datatype data = Data of {
-  sh: sh_data,
-  min: min_data,
-  re_u: re_data, (* reconstructor with unminimized set of lemmas *)
-  re_m: re_data, (* reconstructor with minimized set of lemmas *)
-  re_uft: re_data, (* reconstructor with unminimized set of lemmas and fully-typed *)
-  re_mft: re_data, (* reconstructor with minimized set of lemmas and fully-typed *)
-  mini: bool   (* with minimization *)
-  }
-
-fun make_data (sh, min, re_u, re_m, re_uft, re_mft, mini) =
-  Data {sh=sh, min=min, re_u=re_u, re_m=re_m, re_uft=re_uft, re_mft=re_mft,
-    mini=mini}
-
-val empty_data = make_data (empty_sh_data, empty_min_data,
-  empty_re_data, empty_re_data, empty_re_data, empty_re_data, false)
-
-fun map_sh_data f (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
-  let val sh' = make_sh_data (f (tuple_of_sh_data sh))
-  in make_data (sh', min, re_u, re_m, re_uft, re_mft, mini) end
-
-fun map_min_data f (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
-  let val min' = make_min_data (f (tuple_of_min_data min))
-  in make_data (sh, min', re_u, re_m, re_uft, re_mft, mini) end
-
-fun map_re_data f m (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
-  let
-    fun map_me g Unminimized   (u, m, uft, mft) = (g u, m, uft, mft)
-      | map_me g Minimized     (u, m, uft, mft) = (u, g m, uft, mft)
-      | map_me g UnminimizedFT (u, m, uft, mft) = (u, m, g uft, mft)
-      | map_me g MinimizedFT   (u, m, uft, mft) = (u, m, uft, g mft)
-
-    val f' = make_re_data o f o tuple_of_re_data
-
-    val (re_u', re_m', re_uft', re_mft') =
-      map_me f' m (re_u, re_m, re_uft, re_mft)
-  in make_data (sh, min, re_u', re_m', re_uft', re_mft', mini) end
-
-fun set_mini mini (Data {sh, min, re_u, re_m, re_uft, re_mft, ...}) =
-  make_data (sh, min, re_u, re_m, re_uft, re_mft, mini)
-
-fun inc_max (n:int) (s,sos,m) = (s+n, sos + n*n, Int.max(m,n));
-
-val inc_sh_calls =  map_sh_data
-  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
-    => (calls + 1, success, nontriv_calls, nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail))
-
-val inc_sh_success = map_sh_data
-  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
-    => (calls, success + 1, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail))
-
-val inc_sh_nontriv_calls =  map_sh_data
-  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
-    => (calls, success, nontriv_calls + 1, nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail))
-
-val inc_sh_nontriv_success = map_sh_data
-  (fn (calls, success, nontriv_calls, nontriv_success, lemmas,max_lems, time_isa, time_prover, time_prover_fail)
-    => (calls, success, nontriv_calls, nontriv_success + 1, lemmas,max_lems, time_isa, time_prover, time_prover_fail))
-
-fun inc_sh_lemmas n = map_sh_data
-  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
-    => (calls,success,nontriv_calls, nontriv_success, lemmas+n,max_lems,time_isa,time_prover,time_prover_fail))
-
-fun inc_sh_max_lems n = map_sh_data
-  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
-    => (calls,success,nontriv_calls, nontriv_success, lemmas,Int.max(max_lems,n),time_isa,time_prover,time_prover_fail))
-
-fun inc_sh_time_isa t = map_sh_data
-  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
-    => (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa + t,time_prover,time_prover_fail))
-
-fun inc_sh_time_prover t = map_sh_data
-  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
-    => (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover + t,time_prover_fail))
-
-fun inc_sh_time_prover_fail t = map_sh_data
-  (fn (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail)
-    => (calls,success,nontriv_calls, nontriv_success, lemmas,max_lems,time_isa,time_prover,time_prover_fail + t))
-
-val inc_min_succs = map_min_data
-  (fn (succs,ab_ratios) => (succs+1, ab_ratios))
-
-fun inc_min_ab_ratios r = map_min_data
-  (fn (succs, ab_ratios) => (succs, ab_ratios+r))
-
-val inc_reconstructor_calls = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls + 1, success, nontriv_calls, nontriv_success, proofs, time, timeout, lemmas,posns))
-
-val inc_reconstructor_success = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success + 1, nontriv_calls, nontriv_success, proofs, time, timeout, lemmas,posns))
-
-val inc_reconstructor_nontriv_calls = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success, nontriv_calls + 1, nontriv_success, proofs, time, timeout, lemmas,posns))
-
-val inc_reconstructor_nontriv_success = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success, nontriv_calls, nontriv_success + 1, proofs, time, timeout, lemmas,posns))
-
-val inc_reconstructor_proofs = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success, nontriv_calls, nontriv_success, proofs + 1, time, timeout, lemmas,posns))
-
-fun inc_reconstructor_time m t = map_re_data
- (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-  => (calls, success, nontriv_calls, nontriv_success, proofs, time + t, timeout, lemmas,posns)) m
-
-val inc_reconstructor_timeout = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success, nontriv_calls, nontriv_success, proofs, time, timeout + 1, lemmas,posns))
-
-fun inc_reconstructor_lemmas m n = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success, nontriv_calls, nontriv_success, proofs, time, timeout, inc_max n lemmas, posns)) m
-
-fun inc_reconstructor_posns m pos = map_re_data
-  (fn (calls,success,nontriv_calls, nontriv_success, proofs,time,timeout,lemmas,posns)
-    => (calls, success, nontriv_calls, nontriv_success, proofs, time, timeout, lemmas, pos::posns)) m
-
-val str0 = string_of_int o the_default 0
-
-local
-
-val str = string_of_int
-val str3 = Real.fmt (StringCvt.FIX (SOME 3))
-fun percentage a b = string_of_int (a * 100 div b)
-fun time t = Real.fromInt t / 1000.0
-fun avg_time t n =
-  if n > 0 then (Real.fromInt t / 1000.0) / Real.fromInt n else 0.0
-
-fun log_sh_data log
-    (calls, success, nontriv_calls, nontriv_success, lemmas, max_lems, time_isa, time_prover, time_prover_fail) =
- (log ("Total number of sledgehammer calls: " ^ str calls);
-  log ("Number of successful sledgehammer calls: " ^ str success);
-  log ("Number of sledgehammer lemmas: " ^ str lemmas);
-  log ("Max number of sledgehammer lemmas: " ^ str max_lems);
-  log ("Success rate: " ^ percentage success calls ^ "%");
-  log ("Total number of nontrivial sledgehammer calls: " ^ str nontriv_calls);
-  log ("Number of successful nontrivial sledgehammer calls: " ^ str nontriv_success);
-  log ("Total time for sledgehammer calls (Isabelle): " ^ str3 (time time_isa));
-  log ("Total time for successful sledgehammer calls (ATP): " ^ str3 (time time_prover));
-  log ("Total time for failed sledgehammer calls (ATP): " ^ str3 (time time_prover_fail));
-  log ("Average time for sledgehammer calls (Isabelle): " ^
-    str3 (avg_time time_isa calls));
-  log ("Average time for successful sledgehammer calls (ATP): " ^
-    str3 (avg_time time_prover success));
-  log ("Average time for failed sledgehammer calls (ATP): " ^
-    str3 (avg_time time_prover_fail (calls - success)))
-  )
-
-fun str_of_pos (pos, triv) =
-  str0 (Position.line_of pos) (* ^ ":" ^ str0 (Position.offset_of pos) *) ^
-  (if triv then "[T]" else "")
-
-fun log_re_data log tag sh_calls (re_calls, re_success, re_nontriv_calls,
-     re_nontriv_success, re_proofs, re_time, re_timeout,
-    (lemmas, lems_sos, lems_max), re_posns) =
- (log ("Total number of " ^ tag ^ "reconstructor calls: " ^ str re_calls);
-  log ("Number of successful " ^ tag ^ "reconstructor calls: " ^ str re_success ^
-    " (proof: " ^ str re_proofs ^ ")");
-  log ("Number of " ^ tag ^ "reconstructor timeouts: " ^ str re_timeout);
-  log ("Success rate: " ^ percentage re_success sh_calls ^ "%");
-  log ("Total number of nontrivial " ^ tag ^ "reconstructor calls: " ^ str re_nontriv_calls);
-  log ("Number of successful nontrivial " ^ tag ^ "reconstructor calls: " ^ str re_nontriv_success ^
-    " (proof: " ^ str re_proofs ^ ")");
-  log ("Number of successful " ^ tag ^ "reconstructor lemmas: " ^ str lemmas);
-  log ("SOS of successful " ^ tag ^ "reconstructor lemmas: " ^ str lems_sos);
-  log ("Max number of successful " ^ tag ^ "reconstructor lemmas: " ^ str lems_max);
-  log ("Total time for successful " ^ tag ^ "reconstructor calls: " ^ str3 (time re_time));
-  log ("Average time for successful " ^ tag ^ "reconstructor calls: " ^
-    str3 (avg_time re_time re_success));
-  if tag=""
-  then log ("Proved: " ^ space_implode " " (map str_of_pos re_posns))
-  else ()
- )
-
-fun log_min_data log (succs, ab_ratios) =
-  (log ("Number of successful minimizations: " ^ string_of_int succs);
-   log ("After/before ratios: " ^ string_of_int ab_ratios)
-  )
-
-in
-
-fun log_data id log (Data {sh, min, re_u, re_m, re_uft, re_mft, mini}) =
-  let
-    val ShData {calls=sh_calls, ...} = sh
-
-    fun app_if (ReData {calls, ...}) f = if calls > 0 then f () else ()
-    fun log_re tag m =
-      log_re_data log tag sh_calls (tuple_of_re_data m)
-    fun log_reconstructor (tag1, m1) (tag2, m2) = app_if m1 (fn () =>
-      (log_re tag1 m1; log ""; app_if m2 (fn () => log_re tag2 m2)))
-  in
-    if sh_calls > 0
-    then
-     (log ("\n\n\nReport #" ^ string_of_int id ^ ":\n");
-      log_sh_data log (tuple_of_sh_data sh);
-      log "";
-      if not mini
-      then log_reconstructor ("", re_u) ("fully-typed ", re_uft)
-      else
-        app_if re_u (fn () =>
-         (log_reconstructor ("unminimized ", re_u) ("unminimized fully-typed ", re_uft);
-          log "";
-          app_if re_m (fn () =>
-            (log_min_data log (tuple_of_min_data min); log "";
-             log_reconstructor ("", re_m) ("fully-typed ", re_mft))))))
-    else ()
-  end
-
-end
-
-
-(* Warning: we implicitly assume single-threaded execution here! *)
-val data = Unsynchronized.ref ([] : (int * data) list)
-
-fun init id thy = (Unsynchronized.change data (cons (id, empty_data)); thy)
-fun done id ({log, ...}: Mirabelle.done_args) =
-  AList.lookup (op =) (!data) id
-  |> Option.map (log_data id log)
-  |> K ()
-
-fun change_data id f = (Unsynchronized.change data (AList.map_entry (op =) id f); ())
-
-
-fun get_prover ctxt args =
-  let
-    fun default_prover_name () =
-      hd (#provers (Sledgehammer_Isar.default_params ctxt []))
-      handle List.Empty => error "No ATP available."
-    fun get_prover name =
-      (name, Sledgehammer_Run.get_minimizing_prover ctxt
-                Sledgehammer_Provers.Normal name)
-  in
-    (case AList.lookup (op =) args proverK of
-      SOME name => get_prover name
-    | NONE => get_prover (default_prover_name ()))
-  end
-
-type stature = ATP_Problem_Generate.stature
-
-(* hack *)
-fun reconstructor_from_msg args msg =
-  (case AList.lookup (op =) args reconstructorK of
-    SOME name => name
-  | NONE =>
-    if String.isSubstring "metis (" msg then
-      msg |> Substring.full
-          |> Substring.position "metis ("
-          |> snd |> Substring.position ")"
-          |> fst |> Substring.string
-          |> suffix ")"
-    else if String.isSubstring "metis" msg then
-      "metis"
-    else
-      "smt")
-
-local
-
-datatype sh_result =
-  SH_OK of int * int * (string * stature) list |
-  SH_FAIL of int * int |
-  SH_ERROR
-
-fun run_sh prover_name prover type_enc strict max_relevant slice lam_trans
-        uncurried_aliases e_selection_heuristic term_order force_sos
-        hard_timeout timeout preplay_timeout sh_minimizeLST
-        max_new_mono_instancesLST max_mono_itersLST dir pos st =
-  let
-    val {context = ctxt, facts = chained_ths, goal} = Proof.goal st
-    val i = 1
-    fun set_file_name (SOME dir) =
-        Config.put Sledgehammer_Provers.dest_dir dir
-        #> Config.put Sledgehammer_Provers.problem_prefix
-          ("prob_" ^ str0 (Position.line_of pos) ^ "__")
-        #> Config.put SMT_Config.debug_files
-          (dir ^ "/" ^ Name.desymbolize false (ATP_Util.timestamp ()) ^ "_"
-          ^ serial_string ())
-      | set_file_name NONE = I
-    val st' =
-      st
-      |> Proof.map_context
-           (set_file_name dir
-            #> (Option.map (Config.put ATP_Systems.e_selection_heuristic)
-                  e_selection_heuristic |> the_default I)
-            #> (Option.map (Config.put ATP_Systems.term_order)
-                  term_order |> the_default I)
-            #> (Option.map (Config.put ATP_Systems.force_sos)
-                  force_sos |> the_default I))
-    val params as {relevance_thresholds, max_relevant, slice, ...} =
-      Sledgehammer_Isar.default_params ctxt
-         ([("verbose", "true"),
-           ("type_enc", type_enc),
-           ("strict", strict),
-           ("lam_trans", lam_trans |> the_default "smart"),
-           ("uncurried_aliases", uncurried_aliases |> the_default "smart"),
-           ("max_relevant", max_relevant),
-           ("slice", slice),
-           ("timeout", string_of_int timeout),
-           ("preplay_timeout", preplay_timeout)]
-          |> sh_minimizeLST (*don't confuse the two minimization flags*)
-          |> max_new_mono_instancesLST
-          |> max_mono_itersLST)
-    val default_max_relevant =
-      Sledgehammer_Provers.default_max_relevant_for_prover ctxt slice
-        prover_name
-    val is_appropriate_prop =
-      Sledgehammer_Provers.is_appropriate_prop_for_prover ctxt prover_name
-    val is_built_in_const =
-      Sledgehammer_Provers.is_built_in_const_for_prover ctxt prover_name
-    val relevance_fudge =
-      Sledgehammer_Provers.relevance_fudge_for_prover ctxt prover_name
-    val relevance_override = {add = [], del = [], only = false}
-    val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal ctxt goal i
-    val time_limit =
-      (case hard_timeout of
-        NONE => I
-      | SOME secs => TimeLimit.timeLimit (Time.fromSeconds secs))
-    fun failed failure =
-      ({outcome = SOME failure, used_facts = [], run_time = Time.zeroTime,
-        preplay =
-          K (ATP_Proof_Reconstruct.Failed_to_Play Sledgehammer_Provers.plain_metis),
-        message = K "", message_tail = ""}, ~1)
-    val ({outcome, used_facts, run_time, preplay, message, message_tail}
-         : Sledgehammer_Provers.prover_result,
-        time_isa) = time_limit (Mirabelle.cpu_time (fn () =>
-      let
-        val _ = if is_appropriate_prop concl_t then ()
-                else raise Fail "inappropriate"
-        val ho_atp = Sledgehammer_Provers.is_ho_atp ctxt prover_name
-        val facts =
-          Sledgehammer_Filter.nearly_all_facts ctxt ho_atp relevance_override
-            chained_ths hyp_ts concl_t
-          |> filter (is_appropriate_prop o prop_of o snd)
-          |> Sledgehammer_Filter.relevant_facts ctxt relevance_thresholds
-                 (the_default default_max_relevant max_relevant)
-                 is_built_in_const relevance_fudge relevance_override
-                 chained_ths hyp_ts concl_t
-        val problem =
-          {state = st', goal = goal, subgoal = i,
-           subgoal_count = Sledgehammer_Util.subgoal_count st,
-           facts = facts |> map Sledgehammer_Provers.Untranslated_Fact,
-           smt_filter = NONE}
-      in prover params (K (K (K ""))) problem end)) ()
-      handle TimeLimit.TimeOut => failed ATP_Proof.TimedOut
-           | Fail "inappropriate" => failed ATP_Proof.Inappropriate
-    val time_prover = run_time |> Time.toMilliseconds
-    val msg = message (preplay ()) ^ message_tail
-  in
-    case outcome of
-      NONE => (msg, SH_OK (time_isa, time_prover, used_facts))
-    | SOME _ => (msg, SH_FAIL (time_isa, time_prover))
-  end
-  handle ERROR msg => ("error: " ^ msg, SH_ERROR)
-
-fun thms_of_name ctxt name =
-  let
-    val lex = Keyword.get_lexicons
-    val get = maps (Proof_Context.get_fact ctxt o fst)
-  in
-    Source.of_string name
-    |> Symbol.source
-    |> Token.source {do_recover=SOME false} lex Position.start
-    |> Token.source_proper
-    |> Source.source Token.stopper (Parse_Spec.xthms1 >> get) NONE
-    |> Source.exhaust
-  end
-
-in
-
-fun run_sledgehammer trivial args reconstructor named_thms id
-      ({pre=st, log, pos, ...}: Mirabelle.run_args) =
-  let
-    val triv_str = if trivial then "[T] " else ""
-    val _ = change_data id inc_sh_calls
-    val _ = if trivial then () else change_data id inc_sh_nontriv_calls
-    val (prover_name, prover) = get_prover (Proof.context_of st) args
-    val type_enc = AList.lookup (op =) args type_encK |> the_default "smart"
-    val strict = AList.lookup (op =) args strictK |> the_default "false"
-    val max_relevant = AList.lookup (op =) args max_relevantK |> the_default "smart"
-    val slice = AList.lookup (op =) args sliceK |> the_default "true"
-    val lam_trans = AList.lookup (op =) args lam_transK
-    val uncurried_aliases = AList.lookup (op =) args uncurried_aliasesK
-    val e_selection_heuristic = AList.lookup (op =) args e_selection_heuristicK
-    val term_order = AList.lookup (op =) args term_orderK
-    val force_sos = AList.lookup (op =) args force_sosK
-      |> Option.map (curry (op <>) "false")
-    val dir = AList.lookup (op =) args keepK
-    val timeout = Mirabelle.get_int_setting args (prover_timeoutK, 30)
-    (* always use a hard timeout, but give some slack so that the automatic
-       minimizer has a chance to do its magic *)
-    val preplay_timeout = AList.lookup (op =) args preplay_timeoutK
-      |> the_default preplay_timeout_default
-    val sh_minimizeLST = available_parameter args sh_minimizeK "minimize"
-    val max_new_mono_instancesLST =
-      available_parameter args max_new_mono_instancesK max_new_mono_instancesK
-    val max_mono_itersLST = available_parameter args max_mono_itersK max_mono_itersK
-    val hard_timeout = SOME (2 * timeout)
-    val (msg, result) =
-      run_sh prover_name prover type_enc strict max_relevant slice lam_trans
-        uncurried_aliases e_selection_heuristic term_order force_sos
-        hard_timeout timeout preplay_timeout sh_minimizeLST
-        max_new_mono_instancesLST max_mono_itersLST dir pos st
-  in
-    case result of
-      SH_OK (time_isa, time_prover, names) =>
-        let
-          fun get_thms (name, stature) =
-            try (thms_of_name (Proof.context_of st)) name
-            |> Option.map (pair (name, stature))
-        in
-          change_data id inc_sh_success;
-          if trivial then () else change_data id inc_sh_nontriv_success;
-          change_data id (inc_sh_lemmas (length names));
-          change_data id (inc_sh_max_lems (length names));
-          change_data id (inc_sh_time_isa time_isa);
-          change_data id (inc_sh_time_prover time_prover);
-          reconstructor := reconstructor_from_msg args msg;
-          named_thms := SOME (map_filter get_thms names);
-          log (sh_tag id ^ triv_str ^ "succeeded (" ^ string_of_int time_isa ^ "+" ^
-            string_of_int time_prover ^ ") [" ^ prover_name ^ "]:\n" ^ msg)
-        end
-    | SH_FAIL (time_isa, time_prover) =>
-        let
-          val _ = change_data id (inc_sh_time_isa time_isa)
-          val _ = change_data id (inc_sh_time_prover_fail time_prover)
-        in log (sh_tag id ^ triv_str ^ "failed: " ^ msg) end
-    | SH_ERROR => log (sh_tag id ^ "failed: " ^ msg)
-  end
-
-end
-
-fun run_minimize args reconstructor named_thms id
-        ({pre=st, log, ...}: Mirabelle.run_args) =
-  let
-    val ctxt = Proof.context_of st
-    val n0 = length (these (!named_thms))
-    val (prover_name, _) = get_prover ctxt args
-    val type_enc = AList.lookup (op =) args type_encK |> the_default "smart"
-    val strict = AList.lookup (op =) args strictK |> the_default "false"
-    val timeout =
-      AList.lookup (op =) args minimize_timeoutK
-      |> Option.map (fst o read_int o raw_explode)  (* FIXME Symbol.explode (?) *)
-      |> the_default 5
-    val preplay_timeout = AList.lookup (op =) args preplay_timeoutK
-      |> the_default preplay_timeout_default
-    val sh_minimizeLST = available_parameter args sh_minimizeK "minimize"
-    val max_new_mono_instancesLST =
-      available_parameter args max_new_mono_instancesK max_new_mono_instancesK
-    val max_mono_itersLST = available_parameter args max_mono_itersK max_mono_itersK
-    val params = Sledgehammer_Isar.default_params ctxt
-     ([("provers", prover_name),
-       ("verbose", "true"),
-       ("type_enc", type_enc),
-       ("strict", strict),
-       ("timeout", string_of_int timeout),
-       ("preplay_timeout", preplay_timeout)]
-      |> sh_minimizeLST (*don't confuse the two minimization flags*)
-      |> max_new_mono_instancesLST
-      |> max_mono_itersLST)
-    val minimize =
-      Sledgehammer_Minimize.minimize_facts prover_name params
-          true 1 (Sledgehammer_Util.subgoal_count st)
-    val _ = log separator
-    val (used_facts, (preplay, message, message_tail)) =
-      minimize st (these (!named_thms))
-    val msg = message (preplay ()) ^ message_tail
-  in
-    case used_facts of
-      SOME named_thms' =>
-        (change_data id inc_min_succs;
-         change_data id (inc_min_ab_ratios ((100 * length named_thms') div n0));
-         if length named_thms' = n0
-         then log (minimize_tag id ^ "already minimal")
-         else (reconstructor := reconstructor_from_msg args msg;
-               named_thms := SOME named_thms';
-               log (minimize_tag id ^ "succeeded:\n" ^ msg))
-        )
-    | NONE => log (minimize_tag id ^ "failed: " ^ msg)
-  end
-
-fun override_params prover type_enc timeout =
-  [("provers", prover),
-   ("max_relevant", "0"),
-   ("type_enc", type_enc),
-   ("strict", "true"),
-   ("slice", "false"),
-   ("timeout", timeout |> Time.toSeconds |> string_of_int)]
-
-fun run_reconstructor trivial full m name reconstructor named_thms id
-    ({pre=st, timeout, log, pos, ...}: Mirabelle.run_args) =
-  let
-    fun do_reconstructor named_thms ctxt =
-      let
-        val ref_of_str =
-          suffix ";" #> Outer_Syntax.scan Position.none #> Parse_Spec.xthm
-          #> fst
-        val thms = named_thms |> maps snd
-        val facts = named_thms |> map (ref_of_str o fst o fst)
-        val relevance_override = {add = facts, del = [], only = true}
-        fun my_timeout time_slice =
-          timeout |> Time.toReal |> curry Real.* time_slice |> Time.fromReal
-        fun sledge_tac time_slice prover type_enc =
-          Sledgehammer_Tactics.sledgehammer_as_oracle_tac ctxt
-            (override_params prover type_enc (my_timeout time_slice))
-            relevance_override
-      in
-        if !reconstructor = "sledgehammer_tac" then
-          sledge_tac 0.2 ATP_Systems.z3_tptpN "mono_native"
-          ORELSE' sledge_tac 0.2 ATP_Systems.eN "mono_guards??"
-          ORELSE' sledge_tac 0.2 ATP_Systems.vampireN "mono_guards??"
-          ORELSE' sledge_tac 0.2 ATP_Systems.spassN "poly_tags"
-          ORELSE' Metis_Tactic.metis_tac [] ATP_Problem_Generate.combsN
-            ctxt thms
-        else if !reconstructor = "smt" then
-          SMT_Solver.smt_tac ctxt thms
-        else if full then
-          Metis_Tactic.metis_tac [ATP_Proof_Reconstruct.full_typesN]
-            ATP_Proof_Reconstruct.metis_default_lam_trans ctxt thms
-        else if String.isPrefix "metis (" (!reconstructor) then
-          let
-            val (type_encs, lam_trans) =
-              !reconstructor
-              |> Outer_Syntax.scan Position.start
-              |> filter Token.is_proper |> tl
-              |> Metis_Tactic.parse_metis_options |> fst
-              |>> the_default [ATP_Proof_Reconstruct.partial_typesN]
-              ||> the_default ATP_Proof_Reconstruct.metis_default_lam_trans
-          in Metis_Tactic.metis_tac type_encs lam_trans ctxt thms end
-        else if !reconstructor = "metis" then
-          Metis_Tactic.metis_tac [] ATP_Proof_Reconstruct.metis_default_lam_trans ctxt
-            thms
-        else
-          K all_tac
-      end
-    fun apply_reconstructor named_thms =
-      Mirabelle.can_apply timeout (do_reconstructor named_thms) st
-
-    fun with_time (false, t) = "failed (" ^ string_of_int t ^ ")"
-      | with_time (true, t) = (change_data id (inc_reconstructor_success m);
-          if trivial then ()
-          else change_data id (inc_reconstructor_nontriv_success m);
-          change_data id (inc_reconstructor_lemmas m (length named_thms));
-          change_data id (inc_reconstructor_time m t);
-          change_data id (inc_reconstructor_posns m (pos, trivial));
-          if name = "proof" then change_data id (inc_reconstructor_proofs m)
-          else ();
-          "succeeded (" ^ string_of_int t ^ ")")
-    fun timed_reconstructor named_thms =
-      (with_time (Mirabelle.cpu_time apply_reconstructor named_thms), true)
-      handle TimeLimit.TimeOut => (change_data id (inc_reconstructor_timeout m);
-               ("timeout", false))
-           | ERROR msg => ("error: " ^ msg, false)
-
-    val _ = log separator
-    val _ = change_data id (inc_reconstructor_calls m)
-    val _ = if trivial then ()
-            else change_data id (inc_reconstructor_nontriv_calls m)
-  in
-    named_thms
-    |> timed_reconstructor
-    |>> log o prefix (reconstructor_tag reconstructor id)
-    |> snd
-  end
-
-val try_timeout = seconds 5.0
-
-(* crude hack *)
-val num_sledgehammer_calls = Unsynchronized.ref 0
-
-fun sledgehammer_action args id (st as {pre, name, ...}: Mirabelle.run_args) =
-  let val goal = Thm.major_prem_of (#goal (Proof.goal pre)) in
-    if can Logic.dest_conjunction goal orelse can Logic.dest_equals goal
-    then () else
-    let
-      val max_calls =
-        AList.lookup (op =) args max_callsK |> the_default "10000000"
-        |> Int.fromString |> the
-      val _ = num_sledgehammer_calls := !num_sledgehammer_calls + 1;
-    in
-      if !num_sledgehammer_calls > max_calls then ()
-      else
-        let
-          val reconstructor = Unsynchronized.ref ""
-          val named_thms =
-            Unsynchronized.ref (NONE : ((string * stature) * thm list) list option)
-          val minimize = AList.defined (op =) args minimizeK
-          val metis_ft = AList.defined (op =) args metis_ftK
-          val trivial =
-            if AList.lookup (op =) args check_trivialK |> the_default "false"
-                            |> Bool.fromString |> the then
-              Try0.try0 (SOME try_timeout) ([], [], [], []) pre
-              handle TimeLimit.TimeOut => false
-            else false
-          fun apply_reconstructor m1 m2 =
-            if metis_ft
-            then
-              if not (Mirabelle.catch_result (reconstructor_tag reconstructor) false
-                  (run_reconstructor trivial false m1 name reconstructor
-                       (these (!named_thms))) id st)
-              then
-                (Mirabelle.catch_result (reconstructor_tag reconstructor) false
-                  (run_reconstructor trivial true m2 name reconstructor
-                       (these (!named_thms))) id st; ())
-              else ()
-            else
-              (Mirabelle.catch_result (reconstructor_tag reconstructor) false
-                (run_reconstructor trivial false m1 name reconstructor
-                     (these (!named_thms))) id st; ())
-        in
-          change_data id (set_mini minimize);
-          Mirabelle.catch sh_tag (run_sledgehammer trivial args reconstructor
-                                                   named_thms) id st;
-          if is_some (!named_thms)
-          then
-           (apply_reconstructor Unminimized UnminimizedFT;
-            if minimize andalso not (null (these (!named_thms)))
-            then
-             (Mirabelle.catch minimize_tag
-                  (run_minimize args reconstructor named_thms) id st;
-              apply_reconstructor Minimized MinimizedFT)
-            else ())
-          else ()
-        end
-    end
-  end
-
-fun invoke args =
-  Mirabelle.register (init, sledgehammer_action args, done)
-
-end
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,198 +0,0 @@
-(*  Title:      HOL/Mirabelle/Tools/mirabelle_sledgehammer_filter.ML
-    Author:     Jasmin Blanchette, TU Munich
-*)
-
-structure Mirabelle_Sledgehammer_Filter : MIRABELLE_ACTION =
-struct
-
-fun get args name default_value =
-  case AList.lookup (op =) args name of
-    SOME value => the (Real.fromString value)
-  | NONE => default_value
-
-fun extract_relevance_fudge args
-      {local_const_multiplier, worse_irrel_freq, higher_order_irrel_weight,
-       abs_rel_weight, abs_irrel_weight, skolem_irrel_weight,
-       theory_const_rel_weight, theory_const_irrel_weight,
-       chained_const_irrel_weight, intro_bonus, elim_bonus, simp_bonus,
-       local_bonus, assum_bonus, chained_bonus, max_imperfect, max_imperfect_exp,
-       threshold_divisor, ridiculous_threshold} =
-  {local_const_multiplier =
-       get args "local_const_multiplier" local_const_multiplier,
-   worse_irrel_freq = get args "worse_irrel_freq" worse_irrel_freq,
-   higher_order_irrel_weight =
-       get args "higher_order_irrel_weight" higher_order_irrel_weight,
-   abs_rel_weight = get args "abs_rel_weight" abs_rel_weight,
-   abs_irrel_weight = get args "abs_irrel_weight" abs_irrel_weight,
-   skolem_irrel_weight = get args "skolem_irrel_weight" skolem_irrel_weight,
-   theory_const_rel_weight =
-       get args "theory_const_rel_weight" theory_const_rel_weight,
-   theory_const_irrel_weight =
-       get args "theory_const_irrel_weight" theory_const_irrel_weight,
-   chained_const_irrel_weight =
-       get args "chained_const_irrel_weight" chained_const_irrel_weight,
-   intro_bonus = get args "intro_bonus" intro_bonus,
-   elim_bonus = get args "elim_bonus" elim_bonus,
-   simp_bonus = get args "simp_bonus" simp_bonus,
-   local_bonus = get args "local_bonus" local_bonus,
-   assum_bonus = get args "assum_bonus" assum_bonus,
-   chained_bonus = get args "chained_bonus" chained_bonus,
-   max_imperfect = get args "max_imperfect" max_imperfect,
-   max_imperfect_exp = get args "max_imperfect_exp" max_imperfect_exp,
-   threshold_divisor = get args "threshold_divisor" threshold_divisor,
-   ridiculous_threshold = get args "ridiculous_threshold" ridiculous_threshold}
-
-structure Prooftab =
-  Table(type key = int * int val ord = prod_ord int_ord int_ord)
-
-val proof_table = Unsynchronized.ref (Prooftab.empty: string list list Prooftab.table)
-
-val num_successes = Unsynchronized.ref ([] : (int * int) list)
-val num_failures = Unsynchronized.ref ([] : (int * int) list)
-val num_found_proofs = Unsynchronized.ref ([] : (int * int) list)
-val num_lost_proofs = Unsynchronized.ref ([] : (int * int) list)
-val num_found_facts = Unsynchronized.ref ([] : (int * int) list)
-val num_lost_facts = Unsynchronized.ref ([] : (int * int) list)
-
-fun get id c = the_default 0 (AList.lookup (op =) (!c) id)
-fun add id c n =
-  c := (case AList.lookup (op =) (!c) id of
-          SOME m => AList.update (op =) (id, m + n) (!c)
-        | NONE => (id, n) :: !c)
-
-fun init proof_file _ thy =
-  let
-    fun do_line line =
-      case line |> space_explode ":" of
-        [line_num, offset, proof] =>
-        SOME (pairself (the o Int.fromString) (line_num, offset),
-              proof |> space_explode " " |> filter_out (curry (op =) ""))
-       | _ => NONE
-    val proofs = File.read (Path.explode proof_file)
-    val proof_tab =
-      proofs |> space_explode "\n"
-             |> map_filter do_line
-             |> AList.coalesce (op =)
-             |> Prooftab.make
-  in proof_table := proof_tab; thy end
-
-fun percentage a b = if b = 0 then "N/A" else string_of_int (a * 100 div b)
-fun percentage_alt a b = percentage a (a + b)
-
-fun done id ({log, ...} : Mirabelle.done_args) =
-  if get id num_successes + get id num_failures > 0 then
-    (log "";
-     log ("Number of overall successes: " ^
-          string_of_int (get id num_successes));
-     log ("Number of overall failures: " ^ string_of_int (get id num_failures));
-     log ("Overall success rate: " ^
-          percentage_alt (get id num_successes) (get id num_failures) ^ "%");
-     log ("Number of found proofs: " ^ string_of_int (get id num_found_proofs));
-     log ("Number of lost proofs: " ^ string_of_int (get id num_lost_proofs));
-     log ("Proof found rate: " ^
-          percentage_alt (get id num_found_proofs) (get id num_lost_proofs) ^
-          "%");
-     log ("Number of found facts: " ^ string_of_int (get id num_found_facts));
-     log ("Number of lost facts: " ^ string_of_int (get id num_lost_facts));
-     log ("Fact found rate: " ^
-          percentage_alt (get id num_found_facts) (get id num_lost_facts) ^
-          "%"))
-  else
-    ()
-
-val default_prover = ATP_Systems.eN (* arbitrary ATP *)
-
-fun with_index (i, s) = s ^ "@" ^ string_of_int i
-
-fun action args id ({pre, pos, log, ...} : Mirabelle.run_args) =
-  case (Position.line_of pos, Position.offset_of pos) of
-    (SOME line_num, SOME offset) =>
-    (case Prooftab.lookup (!proof_table) (line_num, offset) of
-       SOME proofs =>
-       let
-         val {context = ctxt, facts = chained_ths, goal} = Proof.goal pre
-         val prover = AList.lookup (op =) args "prover"
-                      |> the_default default_prover
-         val {relevance_thresholds, max_relevant, slice, ...} =
-           Sledgehammer_Isar.default_params ctxt args
-         val default_max_relevant =
-           Sledgehammer_Provers.default_max_relevant_for_prover ctxt slice
-                                                                prover
-         val is_appropriate_prop =
-           Sledgehammer_Provers.is_appropriate_prop_for_prover ctxt
-               default_prover
-         val is_built_in_const =
-           Sledgehammer_Provers.is_built_in_const_for_prover ctxt default_prover
-         val relevance_fudge =
-           extract_relevance_fudge args
-               (Sledgehammer_Provers.relevance_fudge_for_prover ctxt prover)
-         val relevance_override = {add = [], del = [], only = false}
-         val subgoal = 1
-         val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal ctxt goal subgoal
-         val ho_atp = Sledgehammer_Provers.is_ho_atp ctxt prover
-         val facts =
-          Sledgehammer_Filter.nearly_all_facts ctxt ho_atp relevance_override
-                                               chained_ths hyp_ts concl_t
-          |> filter (is_appropriate_prop o prop_of o snd)
-          |> Sledgehammer_Filter.relevant_facts ctxt relevance_thresholds
-                 (the_default default_max_relevant max_relevant)
-                 is_built_in_const relevance_fudge relevance_override
-                 chained_ths hyp_ts concl_t
-           |> map (fst o fst)
-         val (found_facts, lost_facts) =
-           flat proofs |> sort_distinct string_ord
-           |> map (fn fact => (find_index (curry (op =) fact) facts, fact))
-           |> List.partition (curry (op <=) 0 o fst)
-           |>> sort (prod_ord int_ord string_ord) ||> map snd
-         val found_proofs = filter (forall (member (op =) facts)) proofs
-         val n = length found_proofs
-         val _ =
-           if n = 0 then
-             (add id num_failures 1; log "Failure")
-           else
-             (add id num_successes 1;
-              add id num_found_proofs n;
-              log ("Success (" ^ string_of_int n ^ " of " ^
-                   string_of_int (length proofs) ^ " proofs)"))
-         val _ = add id num_lost_proofs (length proofs - n)
-         val _ = add id num_found_facts (length found_facts)
-         val _ = add id num_lost_facts (length lost_facts)
-         val _ =
-           if null found_facts then
-             ()
-           else
-             let
-               val found_weight =
-                 Real.fromInt (fold (fn (n, _) =>
-                                        Integer.add (n * n)) found_facts 0)
-                   / Real.fromInt (length found_facts)
-                 |> Math.sqrt |> Real.ceil
-             in
-               log ("Found facts (among " ^ string_of_int (length facts) ^
-                    ", weight " ^ string_of_int found_weight ^ "): " ^
-                    commas (map with_index found_facts))
-             end
-         val _ = if null lost_facts then
-                   ()
-                 else
-                   log ("Lost facts (among " ^ string_of_int (length facts) ^
-                        "): " ^ commas lost_facts)
-       in () end
-     | NONE => log "No known proof")
-  | _ => ()
-
-val proof_fileK = "proof_file"
-
-fun invoke args =
-  let
-    val (pf_args, other_args) =
-      args |> List.partition (curry (op =) proof_fileK o fst)
-    val proof_file = case pf_args of
-                       [] => error "No \"proof_file\" specified"
-                     | (_, s) :: _ => s
-  in Mirabelle.register (init proof_file, action other_args, done) end
-
-end;
-
-(* Workaround to keep the "mirabelle.pl" script happy *)
-structure Mirabelle_Sledgehammer_filter = Mirabelle_Sledgehammer_Filter;
--- a/src/HOL/Mirabelle/lib/Tools/mirabelle	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Mirabelle/lib/Tools/mirabelle	Mon Apr 16 19:01:57 2012 +0200
@@ -8,13 +8,18 @@
 PRG="$(basename "$0")"
 
 function print_action_names() {
-  TOOLS="$MIRABELLE_HOME/Tools/mirabelle_*.ML"
-  for NAME in $TOOLS
+  ACTIONS="$MIRABELLE_HOME/Actions/mirabelle_*.ML"
+  for ACTION in $ACTIONS
   do
-    echo $NAME | sed 's/.*mirabelle_\(.*\)\.ML/    \1/'
+    echo $ACTION | perl -w -p -e 's/.*mirabelle_(.*)\.ML/    $1/'
   done
 }
 
+function print_sledgehammer_options() {
+  grep -e "^val .*K =" $MIRABELLE_HOME/Actions/mirabelle_sledgehammer.ML \
+  | perl -w -p -e 's/val .*K *= *"(.*)" *\(\*(.*)\*\)/    $1$2/'
+}
+
 function usage() {
   [ -n "$MIRABELLE_OUTPUT_PATH" ] && out="$MIRABELLE_OUTPUT_PATH" || out="None"
   timeout="$MIRABELLE_TIMEOUT"
@@ -36,18 +41,7 @@
   print_action_names
   echo
   echo "  Available OPTIONs for the ACTION sledgehammer:"
-  echo "    * prover=NAME: name of the external prover to call"
-  echo "    * prover_timeout=TIME: timeout for invoked ATP (seconds of process time)"
-  echo "    * prover_hard_timeout=TIME: timeout for invoked ATP (seconds of elapsed"
-  echo "      time)"
-  echo "    * keep=PATH: path where to keep temporary files created by sledgehammer"
-  echo "    * full_types: enable fully-typed encoding"
-  echo "    * minimize: enable minimization of theorem set found by sledgehammer"
-  echo "    * minimize_timeout=TIME: timeout for each minimization step (seconds of"
-  echo "      process time)"
-  echo "    * metis: apply metis to the theorems found by sledgehammer"
-  echo "    * metis_ft: apply metis with fully-typed encoding to the theorems found"
-  echo "      by sledgehammer"
+  print_sledgehammer_options
   echo
   echo "  FILES is a list of theory files, where each file is either NAME.thy"
   echo "  or NAME.thy[START:END] and START and END are numbers indicating the"
@@ -67,6 +61,8 @@
 
 # options
 
+[ $# -eq 0 ] && usage
+
 while getopts "L:T:O:t:q?" OPT
 do
   case "$OPT" in
--- a/src/HOL/Mirabelle/lib/scripts/mirabelle.pl	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Mirabelle/lib/scripts/mirabelle.pl	Mon Apr 16 19:01:57 2012 +0200
@@ -46,7 +46,7 @@
 my @action_names;
 foreach (split(/:/, $actions)) {
   if (m/([^[]*)/) {
-    push @action_files, "\"$mirabelle_home/Tools/mirabelle_$1.ML\"";
+    push @action_files, "\"$mirabelle_home/Actions/mirabelle_$1.ML\"";
     push @action_names, $1;
   }
 }
--- a/src/HOL/Multivariate_Analysis/Convex_Euclidean_Space.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Multivariate_Analysis/Convex_Euclidean_Space.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -5428,13 +5428,13 @@
 
 lemma closure_sum:
   fixes S T :: "('n::euclidean_space) set"
-  shows "closure S \<oplus> closure T \<subseteq> closure (S \<oplus> T)"
+  shows "closure S + closure T \<subseteq> closure (S + T)"
 proof-
-  have "(closure S) \<oplus> (closure T) = (\<lambda>(x,y). x + y) ` (closure S \<times> closure T)"
+  have "(closure S) + (closure T) = (\<lambda>(x,y). x + y) ` (closure S \<times> closure T)"
     by (simp add: set_plus_image)
   also have "... = (\<lambda>(x,y). x + y) ` closure (S \<times> T)"
     using closure_direct_sum by auto
-  also have "... \<subseteq> closure (S \<oplus> T)"
+  also have "... \<subseteq> closure (S + T)"
     using fst_snd_linear closure_linear_image[of "(\<lambda>(x,y). x + y)" "S \<times> T"]
     by (auto simp: set_plus_image)
   finally show ?thesis
@@ -5444,7 +5444,7 @@
 lemma convex_oplus:
 fixes S T :: "('n::euclidean_space) set"
 assumes "convex S" "convex T"
-shows "convex (S \<oplus> T)"
+shows "convex (S + T)"
 proof-
 have "{x + y |x y. x : S & y : T} = {c. EX a:S. EX b:T. c = a + b}" by auto
 thus ?thesis unfolding set_plus_def using convex_sums[of S T] assms by auto
@@ -5452,13 +5452,13 @@
 
 lemma convex_hull_sum:
 fixes S T :: "('n::euclidean_space) set"
-shows "convex hull (S \<oplus> T) = (convex hull S) \<oplus> (convex hull T)"
+shows "convex hull (S + T) = (convex hull S) + (convex hull T)"
 proof-
-have "(convex hull S) \<oplus> (convex hull T) =
+have "(convex hull S) + (convex hull T) =
       (%(x,y). x + y) ` ((convex hull S) <*> (convex hull T))"
    by (simp add: set_plus_image)
 also have "... = (%(x,y). x + y) ` (convex hull (S <*> T))" using convex_hull_direct_sum by auto
-also have "...= convex hull (S \<oplus> T)" using fst_snd_linear linear_conv_bounded_linear
+also have "...= convex hull (S + T)" using fst_snd_linear linear_conv_bounded_linear
    convex_hull_linear_image[of "(%(x,y). x + y)" "S <*> T"] by (auto simp add: set_plus_image)
 finally show ?thesis by auto
 qed
@@ -5466,12 +5466,12 @@
 lemma rel_interior_sum:
 fixes S T :: "('n::euclidean_space) set"
 assumes "convex S" "convex T"
-shows "rel_interior (S \<oplus> T) = (rel_interior S) \<oplus> (rel_interior T)"
+shows "rel_interior (S + T) = (rel_interior S) + (rel_interior T)"
 proof-
-have "(rel_interior S) \<oplus> (rel_interior T) = (%(x,y). x + y) ` (rel_interior S <*> rel_interior T)"
+have "(rel_interior S) + (rel_interior T) = (%(x,y). x + y) ` (rel_interior S <*> rel_interior T)"
    by (simp add: set_plus_image)
 also have "... = (%(x,y). x + y) ` rel_interior (S <*> T)" using rel_interior_direct_sum assms by auto
-also have "...= rel_interior (S \<oplus> T)" using fst_snd_linear convex_direct_sum assms
+also have "...= rel_interior (S + T)" using fst_snd_linear convex_direct_sum assms
    rel_interior_convex_linear_image[of "(%(x,y). x + y)" "S <*> T"] by (auto simp add: set_plus_image)
 finally show ?thesis by auto
 qed
@@ -5479,7 +5479,7 @@
 lemma convex_sum_gen:
   fixes S :: "'a \<Rightarrow> 'n::euclidean_space set"
   assumes "\<And>i. i \<in> I \<Longrightarrow> (convex (S i))"
-  shows "convex (setsum_set S I)"
+  shows "convex (setsum S I)"
 proof cases
   assume "finite I" from this assms show ?thesis
     by induct (auto simp: convex_oplus)
@@ -5487,14 +5487,14 @@
 
 lemma convex_hull_sum_gen:
 fixes S :: "'a => ('n::euclidean_space) set"
-shows "convex hull (setsum_set S I) = setsum_set (%i. (convex hull (S i))) I"
+shows "convex hull (setsum S I) = setsum (%i. (convex hull (S i))) I"
 apply (subst setsum_set_linear) using convex_hull_sum convex_hull_singleton by auto
 
 
 lemma rel_interior_sum_gen:
 fixes S :: "'a => ('n::euclidean_space) set"
 assumes "!i:I. (convex (S i))"
-shows "rel_interior (setsum_set S I) = setsum_set (%i. (rel_interior (S i))) I"
+shows "rel_interior (setsum S I) = setsum (%i. (rel_interior (S i))) I"
 apply (subst setsum_set_cond_linear[of convex])
   using rel_interior_sum rel_interior_sing[of "0"] assms by (auto simp add: convex_oplus)
 
@@ -5507,13 +5507,13 @@
 lemma convex_rel_open_sum:
 fixes S T :: "('n::euclidean_space) set"
 assumes "convex S" "rel_open S" "convex T" "rel_open T"
-shows "convex (S \<oplus> T) & rel_open (S \<oplus> T)"
+shows "convex (S + T) & rel_open (S + T)"
 by (metis assms convex_oplus rel_interior_sum rel_open_def)
 
 lemma convex_hull_finite_union_cones:
 assumes "finite I" "I ~= {}"
 assumes "!i:I. (convex (S i) & cone (S i) & (S i) ~= {})"
-shows "convex hull (Union (S ` I)) = setsum_set S I"
+shows "convex hull (Union (S ` I)) = setsum S I"
   (is "?lhs = ?rhs")
 proof-
 { fix x assume "x : ?lhs"
@@ -5547,16 +5547,16 @@
 fixes S T :: "('m::euclidean_space) set"
 assumes "convex S" "cone S" "S ~= {}"
 assumes "convex T" "cone T" "T ~= {}"
-shows "convex hull (S Un T) = S \<oplus> T"
+shows "convex hull (S Un T) = S + T"
 proof-
 def I == "{(1::nat),2}"
 def A == "(%i. (if i=(1::nat) then S else T))"
 have "Union (A ` I) = S Un T" using A_def I_def by auto
 hence "convex hull (Union (A ` I)) = convex hull (S Un T)" by auto
-moreover have "convex hull Union (A ` I) = setsum_set A I"
+moreover have "convex hull Union (A ` I) = setsum A I"
     apply (subst convex_hull_finite_union_cones[of I A]) using assms A_def I_def by auto
 moreover have
-  "setsum_set A I = S \<oplus> T" using A_def I_def
+  "setsum A I = S + T" using A_def I_def
      unfolding set_plus_def apply auto unfolding set_plus_def by auto
 ultimately show ?thesis by auto
 qed
@@ -5600,15 +5600,15 @@
   ultimately have "convex hull (Union (K ` I)) >= K0"
      unfolding K0_def using hull_minimal[of _ "convex hull (Union (K ` I))" "cone"] by blast
   hence "K0 = convex hull (Union (K ` I))" using geq by auto
-  also have "...=setsum_set K I"
+  also have "...=setsum K I"
      apply (subst convex_hull_finite_union_cones[of I K])
      using assms apply blast
      using `I ~= {}` apply blast
      unfolding K_def apply rule
      apply (subst convex_cone_hull) apply (subst convex_direct_sum)
      using assms cone_cone_hull `!i:I. K i ~= {}` K_def by auto
-  finally have "K0 = setsum_set K I" by auto
-  hence *: "rel_interior K0 = setsum_set (%i. (rel_interior (K i))) I"
+  finally have "K0 = setsum K I" by auto
+  hence *: "rel_interior K0 = setsum (%i. (rel_interior (K i))) I"
      using rel_interior_sum_gen[of I K] convK by auto
   { fix x assume "x : ?lhs"
     hence "((1::real),x) : rel_interior K0" using K0_def C0_def
--- a/src/HOL/NSA/Filter.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/NSA/Filter.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -20,65 +20,73 @@
   assumes empty [iff]: "{} \<notin> F"
   assumes Int:         "\<lbrakk>u \<in> F; v \<in> F\<rbrakk> \<Longrightarrow> u \<inter> v \<in> F"
   assumes subset:      "\<lbrakk>u \<in> F; u \<subseteq> v\<rbrakk> \<Longrightarrow> v \<in> F"
+begin
 
-lemma (in filter) memD: "A \<in> F \<Longrightarrow> - A \<notin> F"
+lemma memD: "A \<in> F \<Longrightarrow> - A \<notin> F"
 proof
   assume "A \<in> F" and "- A \<in> F"
   hence "A \<inter> (- A) \<in> F" by (rule Int)
   thus "False" by simp
 qed
 
-lemma (in filter) not_memI: "- A \<in> F \<Longrightarrow> A \<notin> F"
+lemma not_memI: "- A \<in> F \<Longrightarrow> A \<notin> F"
 by (drule memD, simp)
 
-lemma (in filter) Int_iff: "(x \<inter> y \<in> F) = (x \<in> F \<and> y \<in> F)"
+lemma Int_iff: "(x \<inter> y \<in> F) = (x \<in> F \<and> y \<in> F)"
 by (auto elim: subset intro: Int)
 
+end
+
 subsubsection {* Ultrafilters *}
 
 locale ultrafilter = filter +
   assumes ultra: "A \<in> F \<or> - A \<in> F"
+begin
 
-lemma (in ultrafilter) memI: "- A \<notin> F \<Longrightarrow> A \<in> F"
-by (cut_tac ultra [of A], simp)
+lemma memI: "- A \<notin> F \<Longrightarrow> A \<in> F"
+using ultra [of A] by simp
 
-lemma (in ultrafilter) not_memD: "A \<notin> F \<Longrightarrow> - A \<in> F"
+lemma not_memD: "A \<notin> F \<Longrightarrow> - A \<in> F"
 by (rule memI, simp)
 
-lemma (in ultrafilter) not_mem_iff: "(A \<notin> F) = (- A \<in> F)"
+lemma not_mem_iff: "(A \<notin> F) = (- A \<in> F)"
 by (rule iffI [OF not_memD not_memI])
 
-lemma (in ultrafilter) Compl_iff: "(- A \<in> F) = (A \<notin> F)"
+lemma Compl_iff: "(- A \<in> F) = (A \<notin> F)"
 by (rule iffI [OF not_memI not_memD])
 
-lemma (in ultrafilter) Un_iff: "(x \<union> y \<in> F) = (x \<in> F \<or> y \<in> F)"
+lemma Un_iff: "(x \<union> y \<in> F) = (x \<in> F \<or> y \<in> F)"
  apply (rule iffI)
   apply (erule contrapos_pp)
   apply (simp add: Int_iff not_mem_iff)
  apply (auto elim: subset)
 done
 
+end
+
 subsubsection {* Free Ultrafilters *}
 
 locale freeultrafilter = ultrafilter +
   assumes infinite: "A \<in> F \<Longrightarrow> infinite A"
+begin
 
-lemma (in freeultrafilter) finite: "finite A \<Longrightarrow> A \<notin> F"
+lemma finite: "finite A \<Longrightarrow> A \<notin> F"
 by (erule contrapos_pn, erule infinite)
 
-lemma (in freeultrafilter) singleton: "{x} \<notin> F"
+lemma singleton: "{x} \<notin> F"
 by (rule finite, simp)
 
-lemma (in freeultrafilter) insert_iff [simp]: "(insert x A \<in> F) = (A \<in> F)"
+lemma insert_iff [simp]: "(insert x A \<in> F) = (A \<in> F)"
 apply (subst insert_is_Un)
 apply (subst Un_iff)
 apply (simp add: singleton)
 done
 
-lemma (in freeultrafilter) filter: "filter F" ..
+lemma filter: "filter F" ..
 
-lemma (in freeultrafilter) ultrafilter: "ultrafilter F" ..
+lemma ultrafilter: "ultrafilter F" ..
 
+end
 
 subsection {* Collect properties *}
 
@@ -192,8 +200,8 @@
 
 subsection {* Ultrafilter Theorem *}
 
-text "A locale makes proof of ultrafilter Theorem more modular"
-locale UFT =
+text "A local context makes proof of ultrafilter Theorem more modular"
+context
   fixes   frechet :: "'a set set"
   and     superfrechet :: "'a set set set"
 
@@ -201,16 +209,17 @@
 
   defines frechet_def: "frechet \<equiv> {A. finite (- A)}"
   and     superfrechet_def: "superfrechet \<equiv> {G. filter G \<and> frechet \<subseteq> G}"
+begin
 
-lemma (in UFT) superfrechetI:
+lemma superfrechetI:
   "\<lbrakk>filter G; frechet \<subseteq> G\<rbrakk> \<Longrightarrow> G \<in> superfrechet"
 by (simp add: superfrechet_def)
 
-lemma (in UFT) superfrechetD1:
+lemma superfrechetD1:
   "G \<in> superfrechet \<Longrightarrow> filter G"
 by (simp add: superfrechet_def)
 
-lemma (in UFT) superfrechetD2:
+lemma superfrechetD2:
   "G \<in> superfrechet \<Longrightarrow> frechet \<subseteq> G"
 by (simp add: superfrechet_def)
 
@@ -240,13 +249,13 @@
              export main result: The ultrafilter Theorem
 *}
 
-lemma (in UFT) filter_frechet: "filter frechet"
+lemma filter_frechet: "filter frechet"
 by (unfold frechet_def, rule filter_cofinite [OF infinite_UNIV])
 
-lemma (in UFT) frechet_in_superfrechet: "frechet \<in> superfrechet"
+lemma frechet_in_superfrechet: "frechet \<in> superfrechet"
 by (rule superfrechetI [OF filter_frechet subset_refl])
 
-lemma (in UFT) lemma_mem_chain_filter:
+lemma lemma_mem_chain_filter:
   "\<lbrakk>c \<in> chain superfrechet; x \<in> c\<rbrakk> \<Longrightarrow> filter x"
 by (unfold chain_def superfrechet_def, blast)
 
@@ -260,8 +269,8 @@
 
 Number 2 is trivial, but 1 requires us to prove all the filter rules."
 
-lemma (in UFT) Union_chain_UNIV:
-"\<lbrakk>c \<in> chain superfrechet; c \<noteq> {}\<rbrakk> \<Longrightarrow> UNIV \<in> \<Union>c"
+lemma Union_chain_UNIV:
+  "\<lbrakk>c \<in> chain superfrechet; c \<noteq> {}\<rbrakk> \<Longrightarrow> UNIV \<in> \<Union>c"
 proof -
   assume 1: "c \<in> chain superfrechet" and 2: "c \<noteq> {}"
   from 2 obtain x where 3: "x \<in> c" by blast
@@ -270,8 +279,8 @@
   with 3 show "UNIV \<in> \<Union>c" by blast
 qed
 
-lemma (in UFT) Union_chain_empty:
-"c \<in> chain superfrechet \<Longrightarrow> {} \<notin> \<Union>c"
+lemma Union_chain_empty:
+  "c \<in> chain superfrechet \<Longrightarrow> {} \<notin> \<Union>c"
 proof
   assume 1: "c \<in> chain superfrechet" and 2: "{} \<in> \<Union>c"
   from 2 obtain x where 3: "x \<in> c" and 4: "{} \<in> x" ..
@@ -280,8 +289,8 @@
   with 4 show "False" by simp
 qed
 
-lemma (in UFT) Union_chain_Int:
-"\<lbrakk>c \<in> chain superfrechet; u \<in> \<Union>c; v \<in> \<Union>c\<rbrakk> \<Longrightarrow> u \<inter> v \<in> \<Union>c"
+lemma Union_chain_Int:
+  "\<lbrakk>c \<in> chain superfrechet; u \<in> \<Union>c; v \<in> \<Union>c\<rbrakk> \<Longrightarrow> u \<inter> v \<in> \<Union>c"
 proof -
   assume c: "c \<in> chain superfrechet"
   assume "u \<in> \<Union>c"
@@ -298,8 +307,8 @@
   with xyc show "u \<inter> v \<in> \<Union>c" ..
 qed
 
-lemma (in UFT) Union_chain_subset:
-"\<lbrakk>c \<in> chain superfrechet; u \<in> \<Union>c; u \<subseteq> v\<rbrakk> \<Longrightarrow> v \<in> \<Union>c"
+lemma Union_chain_subset:
+  "\<lbrakk>c \<in> chain superfrechet; u \<in> \<Union>c; u \<subseteq> v\<rbrakk> \<Longrightarrow> v \<in> \<Union>c"
 proof -
   assume c: "c \<in> chain superfrechet"
      and u: "u \<in> \<Union>c" and uv: "u \<subseteq> v"
@@ -309,7 +318,7 @@
   with xc show "v \<in> \<Union>c" ..
 qed
 
-lemma (in UFT) Union_chain_filter:
+lemma Union_chain_filter:
 assumes chain: "c \<in> chain superfrechet" and nonempty: "c \<noteq> {}"
 shows "filter (\<Union>c)"
 proof (rule filter.intro)
@@ -324,11 +333,11 @@
   with chain show "v \<in> \<Union>c" by (rule Union_chain_subset)
 qed
 
-lemma (in UFT) lemma_mem_chain_frechet_subset:
+lemma lemma_mem_chain_frechet_subset:
   "\<lbrakk>c \<in> chain superfrechet; x \<in> c\<rbrakk> \<Longrightarrow> frechet \<subseteq> x"
 by (unfold superfrechet_def chain_def, blast)
 
-lemma (in UFT) Union_chain_superfrechet:
+lemma Union_chain_superfrechet:
   "\<lbrakk>c \<noteq> {}; c \<in> chain superfrechet\<rbrakk> \<Longrightarrow> \<Union>c \<in> superfrechet"
 proof (rule superfrechetI)
   assume 1: "c \<in> chain superfrechet" and 2: "c \<noteq> {}"
@@ -341,7 +350,7 @@
 
 subsubsection {* Existence of free ultrafilter *}
 
-lemma (in UFT) max_cofinite_filter_Ex:
+lemma max_cofinite_filter_Ex:
   "\<exists>U\<in>superfrechet. \<forall>G\<in>superfrechet. U \<subseteq> G \<longrightarrow> U = G"
 proof (rule Zorn_Lemma2 [rule_format])
   fix c assume c: "c \<in> chain superfrechet"
@@ -357,7 +366,7 @@
   qed
 qed
 
-lemma (in UFT) mem_superfrechet_all_infinite:
+lemma mem_superfrechet_all_infinite:
   "\<lbrakk>U \<in> superfrechet; A \<in> U\<rbrakk> \<Longrightarrow> infinite A"
 proof
   assume U: "U \<in> superfrechet" and A: "A \<in> U" and fin: "finite A"
@@ -371,7 +380,7 @@
 
 text {* There exists a free ultrafilter on any infinite set *}
 
-lemma (in UFT) freeultrafilter_ex:
+lemma freeultrafilter_Ex:
   "\<exists>U::'a set set. freeultrafilter U"
 proof -
   from max_cofinite_filter_Ex obtain U
@@ -397,7 +406,7 @@
   then show ?thesis ..
 qed
 
-lemmas freeultrafilter_Ex = UFT.freeultrafilter_ex [OF UFT.intro]
+end
 
 hide_const (open) filter
 
--- a/src/HOL/NSA/StarDef.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/NSA/StarDef.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -89,7 +89,13 @@
 
 text {*Initialize transfer tactic.*}
 use "transfer.ML"
-setup Transfer.setup
+setup Transfer_Principle.setup
+
+method_setup transfer = {*
+  Attrib.thms >> (fn ths => fn ctxt =>
+    SIMPLE_METHOD' (Transfer_Principle.transfer_tac ctxt ths))
+*} "transfer principle"
+
 
 text {* Transfer introduction rules. *}
 
@@ -167,7 +173,7 @@
   "Standard = range star_of"
 
 text {* Transfer tactic should remove occurrences of @{term star_of} *}
-setup {* Transfer.add_const "StarDef.star_of" *}
+setup {* Transfer_Principle.add_const "StarDef.star_of" *}
 
 declare star_of_def [transfer_intro]
 
@@ -194,7 +200,7 @@
     UN_equiv_class2 [OF equiv_starrel equiv_starrel Ifun_congruent2])
 
 text {* Transfer tactic should remove occurrences of @{term Ifun} *}
-setup {* Transfer.add_const "StarDef.Ifun" *}
+setup {* Transfer_Principle.add_const "StarDef.Ifun" *}
 
 lemma transfer_Ifun [transfer_intro]:
   "\<lbrakk>f \<equiv> star_n F; x \<equiv> star_n X\<rbrakk> \<Longrightarrow> f \<star> x \<equiv> star_n (\<lambda>n. F n (X n))"
@@ -301,7 +307,7 @@
 by (simp add: unstar_def star_of_inject)
 
 text {* Transfer tactic should remove occurrences of @{term unstar} *}
-setup {* Transfer.add_const "StarDef.unstar" *}
+setup {* Transfer_Principle.add_const "StarDef.unstar" *}
 
 lemma transfer_unstar [transfer_intro]:
   "p \<equiv> star_n P \<Longrightarrow> unstar p \<equiv> {n. P n} \<in> \<U>"
@@ -343,7 +349,7 @@
 by (simp add: Iset_def starP2_star_n)
 
 text {* Transfer tactic should remove occurrences of @{term Iset} *}
-setup {* Transfer.add_const "StarDef.Iset" *}
+setup {* Transfer_Principle.add_const "StarDef.Iset" *}
 
 lemma transfer_mem [transfer_intro]:
   "\<lbrakk>x \<equiv> star_n X; a \<equiv> Iset (star_n A)\<rbrakk>
--- a/src/HOL/NSA/transfer.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/NSA/transfer.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -4,14 +4,14 @@
 Transfer principle tactic for nonstandard analysis.
 *)
 
-signature TRANSFER =
+signature TRANSFER_PRINCIPLE =
 sig
   val transfer_tac: Proof.context -> thm list -> int -> tactic
   val add_const: string -> theory -> theory
   val setup: theory -> theory
 end;
 
-structure Transfer: TRANSFER =
+structure Transfer_Principle: TRANSFER_PRINCIPLE =
 struct
 
 structure TransferData = Generic_Data
@@ -112,8 +112,6 @@
   Attrib.setup @{binding transfer_unfold} (Attrib.add_del unfold_add unfold_del)
     "declaration of transfer unfolding rule" #>
   Attrib.setup @{binding transfer_refold} (Attrib.add_del refold_add refold_del)
-    "declaration of transfer refolding rule" #>
-  Method.setup @{binding transfer} (Attrib.thms >> (fn ths => fn ctxt =>
-    SIMPLE_METHOD' (transfer_tac ctxt ths))) "transfer principle";
+    "declaration of transfer refolding rule"
 
 end;
--- a/src/HOL/Nat.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Nat.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1373,26 +1373,24 @@
 context linordered_semidom
 begin
 
-lemma zero_le_imp_of_nat: "0 \<le> of_nat m"
-  by (induct m) simp_all
+lemma of_nat_0_le_iff [simp]: "0 \<le> of_nat n"
+  by (induct n) simp_all
 
-lemma less_imp_of_nat_less: "m < n \<Longrightarrow> of_nat m < of_nat n"
-  apply (induct m n rule: diff_induct, simp_all)
-  apply (rule add_pos_nonneg [OF zero_less_one zero_le_imp_of_nat])
-  done
-
-lemma of_nat_less_imp_less: "of_nat m < of_nat n \<Longrightarrow> m < n"
-  apply (induct m n rule: diff_induct, simp_all)
-  apply (insert zero_le_imp_of_nat)
-  apply (force simp add: not_less [symmetric])
-  done
+lemma of_nat_less_0_iff [simp]: "\<not> of_nat m < 0"
+  by (simp add: not_less)
 
 lemma of_nat_less_iff [simp]: "of_nat m < of_nat n \<longleftrightarrow> m < n"
-  by (blast intro: of_nat_less_imp_less less_imp_of_nat_less)
+  by (induct m n rule: diff_induct, simp_all add: add_pos_nonneg)
 
 lemma of_nat_le_iff [simp]: "of_nat m \<le> of_nat n \<longleftrightarrow> m \<le> n"
   by (simp add: not_less [symmetric] linorder_not_less [symmetric])
 
+lemma less_imp_of_nat_less: "m < n \<Longrightarrow> of_nat m < of_nat n"
+  by simp
+
+lemma of_nat_less_imp_less: "of_nat m < of_nat n \<Longrightarrow> m < n"
+  by simp
+
 text{*Every @{text linordered_semidom} has characteristic zero.*}
 
 subclass semiring_char_0 proof
@@ -1400,18 +1398,12 @@
 
 text{*Special cases where either operand is zero*}
 
-lemma of_nat_0_le_iff [simp]: "0 \<le> of_nat n"
-  by (rule of_nat_le_iff [of 0, simplified])
-
 lemma of_nat_le_0_iff [simp, no_atp]: "of_nat m \<le> 0 \<longleftrightarrow> m = 0"
   by (rule of_nat_le_iff [of _ 0, simplified])
 
 lemma of_nat_0_less_iff [simp]: "0 < of_nat n \<longleftrightarrow> 0 < n"
   by (rule of_nat_less_iff [of 0, simplified])
 
-lemma of_nat_less_0_iff [simp]: "\<not> of_nat m < 0"
-  by (rule of_nat_less_iff [of _ 0, simplified])
-
 end
 
 context ring_1
--- a/src/HOL/Nominal/Examples/Standardization.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Nominal/Examples/Standardization.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -213,7 +213,8 @@
     prefer 2
     apply (erule allE, erule impE, rule refl, erule spec)
     apply simp
-   apply (clarsimp simp add: foldr_conv_foldl [symmetric] foldr_def fold_plus_listsum_rev listsum_map_remove1)
+    apply (simp only: foldl_conv_fold add_commute fold_plus_listsum_rev)
+    apply (fastforce simp add: listsum_map_remove1)
   apply clarify
   apply (subgoal_tac "\<exists>y::name. y \<sharp> (x, u, z)")
    prefer 2
@@ -232,8 +233,10 @@
   apply clarify
   apply (erule allE, erule impE)
    prefer 2
-   apply blast   
-  apply (force intro: le_imp_less_Suc trans_le_add1 trans_le_add2 elem_le_sum) 
+   apply blast
+  apply simp
+  apply (simp only: foldl_conv_fold add_commute fold_plus_listsum_rev)
+  apply (fastforce simp add: listsum_map_remove1)
   done
 
 theorem Apps_lam_induct:
@@ -855,3 +858,4 @@
 qed
 
 end
+
--- a/src/HOL/Orderings.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Orderings.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -312,7 +312,7 @@
 signature ORDERS =
 sig
   val print_structures: Proof.context -> unit
-  val setup: theory -> theory
+  val attrib_setup: theory -> theory
   val order_tac: Proof.context -> thm list -> int -> tactic
 end;
 
@@ -414,19 +414,15 @@
     (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_context o
         Toplevel.keep (print_structures o Toplevel.context_of)));
 
-
-(** Setup **)
-
-val setup =
-  Method.setup @{binding order} (Scan.succeed (fn ctxt => SIMPLE_METHOD' (order_tac ctxt [])))
-    "transitivity reasoner" #>
-  attrib_setup;
-
 end;
 
 *}
 
-setup Orders.setup
+setup Orders.attrib_setup
+
+method_setup order = {*
+  Scan.succeed (fn ctxt => SIMPLE_METHOD' (Orders.order_tac ctxt []))
+*} "transitivity reasoner"
 
 
 text {* Declarations to set up transitivity reasoner of partial and linear orders. *}
--- a/src/HOL/Predicate.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Predicate.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -702,7 +702,8 @@
 
 text {* Misc *}
 
-declare Inf_set_foldr [where 'a = "'a Predicate.pred", code] Sup_set_foldr [where 'a = "'a Predicate.pred", code]
+declare Inf_set_fold [where 'a = "'a Predicate.pred", code]
+declare Sup_set_fold [where 'a = "'a Predicate.pred", code]
 
 (* FIXME: better implement conversion by bisection *)
 
--- a/src/HOL/Predicate_Compile_Examples/Predicate_Compile_Tests.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Predicate_Compile_Examples/Predicate_Compile_Tests.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1027,8 +1027,8 @@
   (o * o => bool) => i * i => bool) [inductify] converse .
 
 thm converse.equation
-code_pred [inductify] rel_comp .
-thm rel_comp.equation
+code_pred [inductify] relcomp .
+thm relcomp.equation
 code_pred [inductify] Image .
 thm Image.equation
 declare singleton_iff[code_pred_inline]
--- a/src/HOL/Presburger.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Presburger.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -387,10 +387,25 @@
   by (simp cong: conj_cong)
 
 use "Tools/Qelim/cooper.ML"
-
 setup Cooper.setup
 
-method_setup presburger = "Cooper.method" "Cooper's algorithm for Presburger arithmetic"
+method_setup presburger = {*
+  let
+    fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ()
+    fun simple_keyword k = Scan.lift (Args.$$$ k) >> K ()
+    val addN = "add"
+    val delN = "del"
+    val elimN = "elim"
+    val any_keyword = keyword addN || keyword delN || simple_keyword elimN
+    val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
+  in
+    Scan.optional (simple_keyword elimN >> K false) true --
+    Scan.optional (keyword addN |-- thms) [] --
+    Scan.optional (keyword delN |-- thms) [] >>
+    (fn ((elim, add_ths), del_ths) => fn ctxt =>
+      SIMPLE_METHOD' (Cooper.tac elim add_ths del_ths ctxt))
+  end
+*} "Cooper's algorithm for Presburger arithmetic"
 
 declare dvd_eq_mod_eq_0[symmetric, presburger]
 declare mod_1[presburger] 
--- a/src/HOL/Proofs/Lambda/ListApplication.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Proofs/Lambda/ListApplication.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -110,10 +110,8 @@
     prefer 2
     apply (erule allE, erule mp, rule refl)
    apply simp
-   apply (rule lem0)
-    apply force
-   apply (rule elem_le_sum)
-   apply force
+   apply (simp only: foldl_conv_fold add_commute fold_plus_listsum_rev)
+   apply (fastforce simp add: listsum_map_remove1)
   apply clarify
   apply (rule assms)
    apply (erule allE, erule impE)
@@ -128,8 +126,8 @@
   apply (rule le_imp_less_Suc)
   apply (rule trans_le_add1)
   apply (rule trans_le_add2)
-  apply (rule elem_le_sum)
-  apply force
+  apply (simp only: foldl_conv_fold add_commute fold_plus_listsum_rev)
+  apply (simp add: member_le_listsum_nat)
   done
 
 theorem Apps_dB_induct:
@@ -143,3 +141,4 @@
   done
 
 end
+
--- a/src/HOL/Quickcheck_Examples/Quickcheck_Examples.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Quickcheck_Examples/Quickcheck_Examples.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -69,7 +69,7 @@
 
 lemma "app (fs @ gs) x = app gs (app fs x)"
   quickcheck[random, expect = no_counterexample]
-  quickcheck[exhaustive, size = 4, expect = no_counterexample]
+  quickcheck[exhaustive, size = 2, expect = no_counterexample]
   by (induct fs arbitrary: x) simp_all
 
 lemma "app (fs @ gs) x = app fs (app gs x)"
@@ -477,15 +477,31 @@
 locale antisym =
   fixes R
   assumes "R x y --> R y x --> x = y"
-begin
 
-lemma
+interpretation equal : antisym "op =" by default simp
+interpretation order_nat : antisym "op <= :: nat => _ => _" by default simp
+
+lemma (in antisym)
   "R x y --> R y z --> R x z"
 quickcheck[exhaustive, finite_type_size = 2, expect = no_counterexample]
 quickcheck[exhaustive, expect = counterexample]
 oops
 
-end
+declare [[quickcheck_locale = "interpret"]]
+
+lemma (in antisym)
+  "R x y --> R y z --> R x z"
+quickcheck[exhaustive, expect = no_counterexample]
+oops
+
+declare [[quickcheck_locale = "expand"]]
+
+lemma (in antisym)
+  "R x y --> R y z --> R x z"
+quickcheck[exhaustive, finite_type_size = 2, expect = no_counterexample]
+quickcheck[exhaustive, expect = counterexample]
+oops
+
 
 subsection {* Examples with HOL quantifiers *}
 
--- a/src/HOL/Quotient.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Quotient.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -19,13 +19,6 @@
 begin
 
 text {*
-  An aside: contravariant functorial structure of sets.
-*}
-
-enriched_type vimage
-  by (simp_all add: fun_eq_iff vimage_compose)
-
-text {*
   Basic definition for equivalence relations
   that are represented by predicates.
 *}
@@ -694,9 +687,9 @@
 apply (rule Quotient3I)
    apply (simp add: o_def Quotient3_abs_rep [OF R2] Quotient3_abs_rep [OF R1])
   apply simp
-  apply (rule_tac b="Rep1 (Rep2 a)" in pred_compI)
+  apply (rule_tac b="Rep1 (Rep2 a)" in relcomppI)
    apply (rule Quotient3_rep_reflp [OF R1])
-  apply (rule_tac b="Rep1 (Rep2 a)" in pred_compI [rotated])
+  apply (rule_tac b="Rep1 (Rep2 a)" in relcomppI [rotated])
    apply (rule Quotient3_rep_reflp [OF R1])
   apply (rule Rep1)
   apply (rule Quotient3_rep_reflp [OF R2])
@@ -707,8 +700,8 @@
      apply (erule Quotient3_refl1 [OF R1])
     apply (drule Quotient3_refl1 [OF R2], drule Rep1)
     apply (subgoal_tac "R1 r (Rep1 (Abs1 x))")
-     apply (rule_tac b="Rep1 (Abs1 x)" in pred_compI, assumption)
-     apply (erule pred_compI)
+     apply (rule_tac b="Rep1 (Abs1 x)" in relcomppI, assumption)
+     apply (erule relcomppI)
      apply (erule Quotient3_symp [OF R1, THEN sympD])
     apply (rule Quotient3_rel[symmetric, OF R1, THEN iffD2])
     apply (rule conjI, erule Quotient3_refl1 [OF R1])
@@ -721,8 +714,8 @@
     apply (erule Quotient3_refl1 [OF R1])
    apply (drule Quotient3_refl2 [OF R2], drule Rep1)
    apply (subgoal_tac "R1 s (Rep1 (Abs1 y))")
-    apply (rule_tac b="Rep1 (Abs1 y)" in pred_compI, assumption)
-    apply (erule pred_compI)
+    apply (rule_tac b="Rep1 (Abs1 y)" in relcomppI, assumption)
+    apply (erule relcomppI)
     apply (erule Quotient3_symp [OF R1, THEN sympD])
    apply (rule Quotient3_rel[symmetric, OF R1, THEN iffD2])
    apply (rule conjI, erule Quotient3_refl2 [OF R1])
@@ -738,11 +731,11 @@
   apply (erule Quotient3_refl1 [OF R1])
  apply (rename_tac a b c d)
  apply simp
- apply (rule_tac b="Rep1 (Abs1 r)" in pred_compI)
+ apply (rule_tac b="Rep1 (Abs1 r)" in relcomppI)
   apply (rule Quotient3_rel[symmetric, OF R1, THEN iffD2])
   apply (rule conjI, erule Quotient3_refl1 [OF R1])
   apply (simp add: Quotient3_abs_rep [OF R1] Quotient3_rep_reflp [OF R1])
- apply (rule_tac b="Rep1 (Abs1 s)" in pred_compI [rotated])
+ apply (rule_tac b="Rep1 (Abs1 s)" in relcomppI [rotated])
   apply (rule Quotient3_rel[symmetric, OF R1, THEN iffD2])
   apply (simp add: Quotient3_abs_rep [OF R1] Quotient3_rep_reflp [OF R1])
   apply (erule Quotient3_refl2 [OF R1])
@@ -772,22 +765,30 @@
 using assms
 by (rule OOO_quotient3) auto
 
-subsection {* Invariant *}
+subsection {* Quotient3 to Quotient *}
+
+lemma Quotient3_to_Quotient:
+assumes "Quotient3 R Abs Rep"
+and "T \<equiv> \<lambda>x y. R x x \<and> Abs x = y"
+shows "Quotient R Abs Rep T"
+using assms unfolding Quotient3_def by (intro QuotientI) blast+
 
-lemma copy_type_to_Quotient3:
-  assumes "type_definition Rep Abs UNIV"
-  shows "Quotient3 (op =) Abs Rep"
-proof -
-  interpret type_definition Rep Abs UNIV by fact
-  from Abs_inject Rep_inverse show ?thesis by (auto intro!: Quotient3I)
-qed
-
-lemma invariant_type_to_Quotient3:
-  assumes "type_definition Rep Abs {x. P x}"
-  shows "Quotient3 (Lifting.invariant P) Abs Rep"
-proof -
-  interpret type_definition Rep Abs "{x. P x}" by fact
-  from Rep Abs_inject Rep_inverse show ?thesis by (auto intro!: Quotient3I simp: invariant_def)
+lemma Quotient3_to_Quotient_equivp:
+assumes q: "Quotient3 R Abs Rep"
+and T_def: "T \<equiv> \<lambda>x y. Abs x = y"
+and eR: "equivp R"
+shows "Quotient R Abs Rep T"
+proof (intro QuotientI)
+  fix a
+  show "Abs (Rep a) = a" using q by(rule Quotient3_abs_rep)
+next
+  fix a
+  show "R (Rep a) (Rep a)" using q by(rule Quotient3_rep_reflp)
+next
+  fix r s
+  show "R r s = (R r r \<and> R s s \<and> Abs r = Abs s)" using q by(rule Quotient3_rel[symmetric])
+next
+  show "T = (\<lambda>x y. R x x \<and> Abs x = y)" using T_def equivp_reflp[OF eR] by simp
 qed
 
 subsection {* ML setup *}
@@ -903,3 +904,4 @@
   fun_rel (infixr "===>" 55)
 
 end
+
--- a/src/HOL/Quotient_Examples/FSet.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Quotient_Examples/FSet.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Quotient3_Examples/FSet.thy
+(*  Title:      HOL/Quotient_Examples/FSet.thy
     Author:     Cezary Kaliszyk, TU Munich
     Author:     Christian Urban, TU Munich
 
@@ -140,7 +140,7 @@
   next
     assume a: "(list_all2 R OOO op \<approx>) r s"
     then have b: "map Abs r \<approx> map Abs s"
-    proof (elim pred_compE)
+    proof (elim relcomppE)
       fix b ba
       assume c: "list_all2 R r b"
       assume d: "b \<approx> ba"
@@ -165,11 +165,11 @@
     have y: "list_all2 R (map Rep (map Abs s)) s"
       by (fact rep_abs_rsp_left[OF list_quotient3[OF q], OF list_all2_refl'[OF e, of s]])
     have c: "(op \<approx> OO list_all2 R) (map Rep (map Abs r)) s"
-      by (rule pred_compI) (rule b, rule y)
+      by (rule relcomppI) (rule b, rule y)
     have z: "list_all2 R r (map Rep (map Abs r))"
       by (fact rep_abs_rsp[OF list_quotient3[OF q], OF list_all2_refl'[OF e, of r]])
     then show "(list_all2 R OOO op \<approx>) r s"
-      using a c pred_compI by simp
+      using a c relcomppI by simp
   qed
 qed
 
@@ -360,7 +360,7 @@
 quotient_definition
   "concat_fset :: ('a fset) fset \<Rightarrow> 'a fset"
   is concat 
-proof (elim pred_compE)
+proof (elim relcomppE)
 fix a b ba bb
   assume a: "list_all2 op \<approx> a ba"
   with list_symp [OF list_eq_symp] have a': "list_all2 op \<approx> ba a" by (rule sympE)
@@ -397,9 +397,9 @@
 lemma Cons_rsp2 [quot_respect]:
   shows "(op \<approx> ===> list_all2 op \<approx> OOO op \<approx> ===> list_all2 op \<approx> OOO op \<approx>) Cons Cons"
   apply (auto intro!: fun_relI)
-  apply (rule_tac b="x # b" in pred_compI)
+  apply (rule_tac b="x # b" in relcomppI)
   apply auto
-  apply (rule_tac b="x # ba" in pred_compI)
+  apply (rule_tac b="x # ba" in relcomppI)
   apply auto
   done
 
@@ -453,7 +453,7 @@
   assumes "(R1 ===> R2 ===> R3) C C" and "(R4 ===> R5 ===> R6) C C"
   shows "(R1 OOO R4 ===> R2 OOO R5 ===> R3 OOO R6) C C"
   by (auto intro!: fun_relI)
-     (metis (full_types) assms fun_relE pred_compI)
+     (metis (full_types) assms fun_relE relcomppI)
 
 lemma append_rsp2 [quot_respect]:
   "(list_all2 op \<approx> OOO op \<approx> ===> list_all2 op \<approx> OOO op \<approx> ===> list_all2 op \<approx> OOO op \<approx>) append append"
@@ -479,7 +479,7 @@
     by (induct y ya rule: list_induct2')
        (simp_all, metis apply_rsp' list_eq_def)
   show "(list_all2 op \<approx> OOO op \<approx>) (map f xa) (map f' ya)"
-    by (metis a b c list_eq_def pred_compI)
+    by (metis a b c list_eq_def relcomppI)
 qed
 
 lemma map_prs2 [quot_preserve]:
--- a/src/HOL/Quotient_Examples/Lift_DList.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Quotient_Examples/Lift_DList.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -54,28 +54,13 @@
 proof -
   {
     fix x y
-    have "list_all2 cr_dlist x y \<Longrightarrow> 
-      List.map Abs_dlist x = y \<and> list_all2 (Lifting.invariant distinct) x x"
+    have "list_all2 cr_dlist x y \<Longrightarrow> x = List.map list_of_dlist y"
       unfolding list_all2_def cr_dlist_def by (induction x y rule: list_induct2') auto
   }
   note cr = this
-
   fix x :: "'a list list" and y :: "'a list list"
-  assume a: "(list_all2 cr_dlist OO Lifting.invariant distinct OO (list_all2 cr_dlist)\<inverse>\<inverse>) x y"
-  from a have l_x: "list_all2 (Lifting.invariant distinct) x x" by (auto simp add: cr)
-  from a have l_y: "list_all2 (Lifting.invariant distinct) y y" by (auto simp add: cr)
-  from a have m: "(Lifting.invariant distinct) (List.map Abs_dlist x) (List.map Abs_dlist y)" 
-    by (auto simp add: cr)
-
-  have "x = y" 
-  proof -
-    have m':"List.map Abs_dlist x = List.map Abs_dlist y" using m unfolding Lifting.invariant_def by simp
-    have dist: "\<And>l. list_all2 (Lifting.invariant distinct) l l \<Longrightarrow> !x. x \<in> (set l) \<longrightarrow> distinct x"
-      unfolding list_all2_def Lifting.invariant_def by (auto simp add: zip_same)
-    from dist[OF l_x] dist[OF l_y] have "inj_on Abs_dlist (set x \<union> set y)" by (intro inj_onI) 
-      (metis CollectI UnE Abs_dlist_inverse)
-    with m' show ?thesis by (rule map_inj_on)
-  qed
+  assume "(list_all2 cr_dlist OO Lifting.invariant distinct OO (list_all2 cr_dlist)\<inverse>\<inverse>) x y"
+  then have "x = y" by (auto dest: cr simp add: Lifting.invariant_def)
   then show "?thesis x y" unfolding Lifting.invariant_def by auto
 qed
 
--- a/src/HOL/Quotient_Examples/Lift_Fun.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Quotient_Examples/Lift_Fun.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1,4 +1,4 @@
-(*  Title:      HOL/Quotient3_Examples/Lift_Fun.thy
+(*  Title:      HOL/Quotient_Examples/Lift_Fun.thy
     Author:     Ondrej Kuncar
 *)
 
--- a/src/HOL/Quotient_Examples/Lift_RBT.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Quotient_Examples/Lift_RBT.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -37,16 +37,16 @@
 
 setup_lifting type_definition_rbt
 
-lift_definition lookup :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> 'a \<rightharpoonup> 'b" is "RBT_Impl.lookup" 
+lift_definition lookup :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> 'a \<rightharpoonup> 'b" is "rbt_lookup" 
 by simp
 
 lift_definition empty :: "('a\<Colon>linorder, 'b) rbt" is RBT_Impl.Empty 
 by (simp add: empty_def)
 
-lift_definition insert :: "'a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is "RBT_Impl.insert" 
+lift_definition insert :: "'a\<Colon>linorder \<Rightarrow> 'b \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is "rbt_insert" 
 by simp
 
-lift_definition delete :: "'a\<Colon>linorder \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is "RBT_Impl.delete" 
+lift_definition delete :: "'a\<Colon>linorder \<Rightarrow> ('a, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is "rbt_delete" 
 by simp
 
 lift_definition entries :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a \<times> 'b) list" is RBT_Impl.entries
@@ -55,10 +55,10 @@
 lift_definition keys :: "('a\<Colon>linorder, 'b) rbt \<Rightarrow> 'a list" is RBT_Impl.keys 
 by simp
 
-lift_definition bulkload :: "('a\<Colon>linorder \<times> 'b) list \<Rightarrow> ('a, 'b) rbt" is "RBT_Impl.bulkload" 
+lift_definition bulkload :: "('a\<Colon>linorder \<times> 'b) list \<Rightarrow> ('a, 'b) rbt" is "rbt_bulkload" 
 by simp
 
-lift_definition map_entry :: "'a \<Rightarrow> ('b \<Rightarrow> 'b) \<Rightarrow> ('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is RBT_Impl.map_entry 
+lift_definition map_entry :: "'a \<Rightarrow> ('b \<Rightarrow> 'b) \<Rightarrow> ('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is rbt_map_entry 
 by simp
 
 lift_definition map :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> ('a\<Colon>linorder, 'b) rbt \<Rightarrow> ('a, 'b) rbt" is RBT_Impl.map
@@ -80,11 +80,11 @@
 (* TODO: obtain the following lemmas by lifting existing theorems. *)
 
 lemma lookup_RBT:
-  "is_rbt t \<Longrightarrow> lookup (RBT t) = RBT_Impl.lookup t"
+  "is_rbt t \<Longrightarrow> lookup (RBT t) = rbt_lookup t"
   by (simp add: lookup_def RBT_inverse)
 
 lemma lookup_impl_of:
-  "RBT_Impl.lookup (impl_of t) = lookup t"
+  "rbt_lookup (impl_of t) = lookup t"
   by (simp add: lookup_def)
 
 lemma entries_impl_of:
@@ -101,11 +101,11 @@
 
 lemma lookup_insert [simp]:
   "lookup (insert k v t) = (lookup t)(k \<mapsto> v)"
-  by (simp add: insert_def lookup_RBT lookup_insert lookup_impl_of)
+  by (simp add: insert_def lookup_RBT rbt_lookup_rbt_insert lookup_impl_of)
 
 lemma lookup_delete [simp]:
   "lookup (delete k t) = (lookup t)(k := None)"
-  by (simp add: delete_def lookup_RBT RBT_Impl.lookup_delete lookup_impl_of restrict_complement_singleton_eq)
+  by (simp add: delete_def lookup_RBT rbt_lookup_rbt_delete lookup_impl_of restrict_complement_singleton_eq)
 
 lemma map_of_entries [simp]:
   "map_of (entries t) = lookup t"
@@ -113,19 +113,19 @@
 
 lemma entries_lookup:
   "entries t1 = entries t2 \<longleftrightarrow> lookup t1 = lookup t2"
-  by (simp add: entries_def lookup_def entries_lookup)
+  by (simp add: entries_def lookup_def entries_rbt_lookup)
 
 lemma lookup_bulkload [simp]:
   "lookup (bulkload xs) = map_of xs"
-  by (simp add: bulkload_def lookup_RBT RBT_Impl.lookup_bulkload)
+  by (simp add: bulkload_def lookup_RBT rbt_lookup_rbt_bulkload)
 
 lemma lookup_map_entry [simp]:
   "lookup (map_entry k f t) = (lookup t)(k := Option.map f (lookup t k))"
-  by (simp add: map_entry_def lookup_RBT RBT_Impl.lookup_map_entry lookup_impl_of)
+  by (simp add: map_entry_def lookup_RBT rbt_lookup_rbt_map_entry lookup_impl_of)
 
 lemma lookup_map [simp]:
   "lookup (map f t) k = Option.map (f k) (lookup t k)"
-  by (simp add: map_def lookup_RBT RBT_Impl.lookup_map lookup_impl_of)
+  by (simp add: map_def lookup_RBT rbt_lookup_map lookup_impl_of)
 
 lemma fold_fold:
   "fold f t = List.fold (prod_case f) (entries t)"
@@ -140,7 +140,7 @@
   by (simp add: rbt_eq_iff is_empty_def impl_of_empty split: rbt.split)
 
 lemma RBT_lookup_empty [simp]: (*FIXME*)
-  "RBT_Impl.lookup t = Map.empty \<longleftrightarrow> t = RBT_Impl.Empty"
+  "rbt_lookup t = Map.empty \<longleftrightarrow> t = RBT_Impl.Empty"
   by (cases t) (auto simp add: fun_eq_iff)
 
 lemma lookup_empty_empty [simp]:
@@ -149,7 +149,7 @@
 
 lemma sorted_keys [iff]:
   "sorted (keys t)"
-  by (simp add: keys_def RBT_Impl.keys_def sorted_entries)
+  by (simp add: keys_def RBT_Impl.keys_def rbt_sorted_entries)
 
 lemma distinct_keys [iff]:
   "distinct (keys t)"
--- a/src/HOL/RealDef.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/RealDef.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -1058,8 +1058,7 @@
 by simp (* solved by linordered_ring_le_cancel_factor simproc *)
 
 lemma real_mult_le_cancel_iff2 [simp]: "(0::real) < z ==> (z*x \<le> z*y) = (x\<le>y)"
-by (rule mult_le_cancel_left_pos)
-(* BH: Why doesn't "simp" prove this one, like it does the last one? *)
+by simp (* solved by linordered_ring_le_cancel_factor simproc *)
 
 
 subsection {* Embedding numbers into the Reals *}
@@ -1252,7 +1251,7 @@
 by (simp add: real_of_nat_def)
 
 lemma real_of_nat_ge_zero [iff]: "0 \<le> real (n::nat)"
-by (simp add: real_of_nat_def zero_le_imp_of_nat)
+by (simp add: real_of_nat_def)
 
 lemma real_of_nat_Suc_gt_zero: "0 < real (Suc n)"
 by (simp add: real_of_nat_def del: of_nat_Suc)
--- a/src/HOL/Relation.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Relation.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -146,7 +146,7 @@
 
 definition reflp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> bool"
 where
-  "reflp r \<longleftrightarrow> refl {(x, y). r x y}"
+  "reflp r \<longleftrightarrow> (\<forall>x. r x x)"
 
 lemma reflp_refl_eq [pred_set_conv]:
   "reflp (\<lambda>x y. (x, y) \<in> r) \<longleftrightarrow> refl r" 
@@ -507,29 +507,26 @@
 
 subsubsection {* Composition *}
 
-inductive_set rel_comp  :: "('a \<times> 'b) set \<Rightarrow> ('b \<times> 'c) set \<Rightarrow> ('a \<times> 'c) set" (infixr "O" 75)
+inductive_set relcomp  :: "('a \<times> 'b) set \<Rightarrow> ('b \<times> 'c) set \<Rightarrow> ('a \<times> 'c) set" (infixr "O" 75)
   for r :: "('a \<times> 'b) set" and s :: "('b \<times> 'c) set"
 where
-  rel_compI [intro]: "(a, b) \<in> r \<Longrightarrow> (b, c) \<in> s \<Longrightarrow> (a, c) \<in> r O s"
+  relcompI [intro]: "(a, b) \<in> r \<Longrightarrow> (b, c) \<in> s \<Longrightarrow> (a, c) \<in> r O s"
 
-abbreviation pred_comp (infixr "OO" 75) where
-  "pred_comp \<equiv> rel_compp"
+notation relcompp (infixr "OO" 75)
 
-lemmas pred_compI = rel_compp.intros
+lemmas relcomppI = relcompp.intros
 
 text {*
   For historic reasons, the elimination rules are not wholly corresponding.
   Feel free to consolidate this.
 *}
 
-inductive_cases rel_compEpair: "(a, c) \<in> r O s"
-inductive_cases pred_compE [elim!]: "(r OO s) a c"
+inductive_cases relcompEpair: "(a, c) \<in> r O s"
+inductive_cases relcomppE [elim!]: "(r OO s) a c"
 
-lemma rel_compE [elim!]: "xz \<in> r O s \<Longrightarrow>
+lemma relcompE [elim!]: "xz \<in> r O s \<Longrightarrow>
   (\<And>x y z. xz = (x, z) \<Longrightarrow> (x, y) \<in> r \<Longrightarrow> (y, z) \<in> s  \<Longrightarrow> P) \<Longrightarrow> P"
-  by (cases xz) (simp, erule rel_compEpair, iprover)
-
-lemmas pred_comp_rel_comp_eq = rel_compp_rel_comp_eq
+  by (cases xz) (simp, erule relcompEpair, iprover)
 
 lemma R_O_Id [simp]:
   "R O Id = R"
@@ -539,28 +536,28 @@
   "Id O R = R"
   by fast
 
-lemma rel_comp_empty1 [simp]:
+lemma relcomp_empty1 [simp]:
   "{} O R = {}"
   by blast
 
-lemma pred_comp_bot1 [simp]:
+lemma relcompp_bot1 [simp]:
   "\<bottom> OO R = \<bottom>"
-  by (fact rel_comp_empty1 [to_pred])
+  by (fact relcomp_empty1 [to_pred])
 
-lemma rel_comp_empty2 [simp]:
+lemma relcomp_empty2 [simp]:
   "R O {} = {}"
   by blast
 
-lemma pred_comp_bot2 [simp]:
+lemma relcompp_bot2 [simp]:
   "R OO \<bottom> = \<bottom>"
-  by (fact rel_comp_empty2 [to_pred])
+  by (fact relcomp_empty2 [to_pred])
 
 lemma O_assoc:
   "(R O S) O T = R O (S O T)"
   by blast
 
 
-lemma pred_comp_assoc:
+lemma relcompp_assoc:
   "(r OO s) OO t = r OO (s OO t)"
   by (fact O_assoc [to_pred])
 
@@ -568,55 +565,55 @@
   "trans r \<Longrightarrow> r O r \<subseteq> r"
   by (unfold trans_def) blast
 
-lemma transp_pred_comp_less_eq:
+lemma transp_relcompp_less_eq:
   "transp r \<Longrightarrow> r OO r \<le> r "
   by (fact trans_O_subset [to_pred])
 
-lemma rel_comp_mono:
+lemma relcomp_mono:
   "r' \<subseteq> r \<Longrightarrow> s' \<subseteq> s \<Longrightarrow> r' O s' \<subseteq> r O s"
   by blast
 
-lemma pred_comp_mono:
+lemma relcompp_mono:
   "r' \<le> r \<Longrightarrow> s' \<le> s \<Longrightarrow> r' OO s' \<le> r OO s "
-  by (fact rel_comp_mono [to_pred])
+  by (fact relcomp_mono [to_pred])
 
-lemma rel_comp_subset_Sigma:
+lemma relcomp_subset_Sigma:
   "r \<subseteq> A \<times> B \<Longrightarrow> s \<subseteq> B \<times> C \<Longrightarrow> r O s \<subseteq> A \<times> C"
   by blast
 
-lemma rel_comp_distrib [simp]:
+lemma relcomp_distrib [simp]:
   "R O (S \<union> T) = (R O S) \<union> (R O T)" 
   by auto
 
-lemma pred_comp_distrib [simp]:
+lemma relcompp_distrib [simp]:
   "R OO (S \<squnion> T) = R OO S \<squnion> R OO T"
-  by (fact rel_comp_distrib [to_pred])
+  by (fact relcomp_distrib [to_pred])
 
-lemma rel_comp_distrib2 [simp]:
+lemma relcomp_distrib2 [simp]:
   "(S \<union> T) O R = (S O R) \<union> (T O R)"
   by auto
 
-lemma pred_comp_distrib2 [simp]:
+lemma relcompp_distrib2 [simp]:
   "(S \<squnion> T) OO R = S OO R \<squnion> T OO R"
-  by (fact rel_comp_distrib2 [to_pred])
+  by (fact relcomp_distrib2 [to_pred])
 
-lemma rel_comp_UNION_distrib:
+lemma relcomp_UNION_distrib:
   "s O UNION I r = (\<Union>i\<in>I. s O r i) "
   by auto
 
-(* FIXME thm rel_comp_UNION_distrib [to_pred] *)
+(* FIXME thm relcomp_UNION_distrib [to_pred] *)
 
-lemma rel_comp_UNION_distrib2:
+lemma relcomp_UNION_distrib2:
   "UNION I r O s = (\<Union>i\<in>I. r i O s) "
   by auto
 
-(* FIXME thm rel_comp_UNION_distrib2 [to_pred] *)
+(* FIXME thm relcomp_UNION_distrib2 [to_pred] *)
 
-lemma single_valued_rel_comp:
+lemma single_valued_relcomp:
   "single_valued r \<Longrightarrow> single_valued s \<Longrightarrow> single_valued (r O s)"
   by (unfold single_valued_def) blast
 
-lemma rel_comp_unfold:
+lemma relcomp_unfold:
   "r O s = {(x, z). \<exists>y. (x, y) \<in> r \<and> (y, z) \<in> s}"
   by (auto simp add: set_eq_iff)
 
@@ -676,12 +673,12 @@
   "(r\<inverse>\<inverse>)\<inverse>\<inverse> = r"
   by (fact converse_converse [to_pred])
 
-lemma converse_rel_comp: "(r O s)^-1 = s^-1 O r^-1"
+lemma converse_relcomp: "(r O s)^-1 = s^-1 O r^-1"
   by blast
 
-lemma converse_pred_comp: "(r OO s)^--1 = s^--1 OO r^--1"
-  by (iprover intro: order_antisym conversepI pred_compI
-    elim: pred_compE dest: conversepD)
+lemma converse_relcompp: "(r OO s)^--1 = s^--1 OO r^--1"
+  by (iprover intro: order_antisym conversepI relcomppI
+    elim: relcomppE dest: conversepD)
 
 lemma converse_Int: "(r \<inter> s)^-1 = r^-1 \<inter> s^-1"
   by blast
--- a/src/HOL/Set.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Set.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -506,7 +506,7 @@
   -- {* Classical elimination rule. *}
   by (auto simp add: less_eq_set_def le_fun_def)
 
-lemma subset_eq [code, no_atp]: "A \<le> B = (\<forall>x\<in>A. x \<in> B)" by blast
+lemma subset_eq [no_atp]: "A \<le> B = (\<forall>x\<in>A. x \<in> B)" by blast
 
 lemma contra_subsetD [no_atp]: "A \<subseteq> B ==> c \<notin> B ==> c \<notin> A"
   by blast
--- a/src/HOL/TPTP/TPTP_Parser.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -17,6 +17,7 @@
   "TPTP_Parser/tptp_lexyacc.ML" (*generated from tptp.lex and tptp.yacc*)
   "TPTP_Parser/tptp_parser.ML"
   "TPTP_Parser/tptp_problem_name.ML"
+  "TPTP_Parser/tptp_proof.ML"
   "TPTP_Parser/tptp_interpret.ML"
 
 begin
--- a/src/HOL/TPTP/TPTP_Parser/tptp.lex	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp.lex	Mon Apr 16 19:01:57 2012 +0200
@@ -3,8 +3,8 @@
 
  Notes:
  * Omit %full in definitions to restrict alphabet to ascii.
- * Could include %posarg to ensure that start counting character positions from
-   0, but it would punish performance.
+ * Could include %posarg to ensure that we'd start counting character positions
+   from 0, but it would punish performance.
  * %s AF F COMMENT; -- could improve by making stateful.
 
  Acknowledgements:
@@ -52,55 +52,55 @@
 %header (functor TPTPLexFun(structure Tokens: TPTP_TOKENS));
 %arg (file_name:string);
 
-printable_char            = .;
-viewable_char             = [.\n];
+percentage_sign           = "%";
+double_quote              = ["];
+do_char                   = ([^"]|[\\]["\\]);
+single_quote              = ['];
+sq_char                   = ([\040-\041\043-\126]|[\\]['\\]);
+sign                      = [+-];
+dot                       = [.];
+exponent                  = [Ee];
+slash                     = [/];
+zero_numeric              = [0];
+non_zero_numeric          = [1-9];
 numeric                   = [0-9];
 lower_alpha               = [a-z];
 upper_alpha               = [A-Z];
 alpha_numeric             = ({lower_alpha}|{upper_alpha}|{numeric}|_);
-zero_numeric              = [0];
-non_zero_numeric          = [1-9];
-slash                     = [/];
-exponent                  = [Ee];
-dot                       = [.];
-any_char                  = [^\n];
 dollar                    = \$;
+printable_char            = .;
+viewable_char             = [.\n];
+
+dot_decimal               = {dot}{numeric}+;
+
 ddollar                   = \$\$;
 unsigned_integer          = {numeric}+;
-sign                      = [+-];
 divide                    = [/];
 
 signed_integer            = {sign}{unsigned_integer};
-dot_decimal               = {dot}{numeric}+;
 exp_suffix                = {exponent}({signed_integer}|{unsigned_integer});
 real                      = ({signed_integer}|{unsigned_integer}){dot_decimal}{exp_suffix}?;
-upper_word                = {upper_alpha}{alpha_numeric}*;
 rational                  = ({signed_integer}|{unsigned_integer}){divide}{unsigned_integer};
 
-percentage_sign           = "%";
+lower_word                = {lower_alpha}{alpha_numeric}*;
+upper_word                = {upper_alpha}{alpha_numeric}*;
+dollar_dollar_word        = {ddollar}{lower_word};
+dollar_word               = {dollar}{lower_word};
 
-sq_char                   = ([\040-\041\043-\126]|[\\]['\\]);
+distinct_object           = {double_quote}{do_char}+{double_quote};
 
 ws                        = ([\ ]|[\t]);
-eol                       = ("\013\010"|"\010"|"\013");
-
-single_quote              = ['];
 single_quoted             = {single_quote}({alpha_numeric}|{sq_char}|{ws})+{single_quote};
 
-lower_word                = {lower_alpha}{alpha_numeric}*;
-atomic_system_word        = {ddollar}{lower_word};
-atomic_defined_word       = {dollar}{lower_word};
-
 system_comment_one        = [%][\ ]*{ddollar}[_]*;
 system_comment_multi      = [/][\*][\ ]*(ddollar)([^\*]*[\*][\*]*[^/\*])*[^\*]*[\*][\*]*[/];
 system_comment            = (system_comment_one)|(system_comment_multi);
-comment_one               = {percentage_sign}[^\n]*;
-comment_multi             = [/][\*]([^\*]*[\*]+[^/\*])*[^\*]*[\*]+[/];
-comment                   = ({comment_one}|{comment_multi})+;
 
-do_char                   = ([^"]|[\\]["\\]);
-double_quote              = ["];
-distinct_object           = {double_quote}{do_char}+{double_quote};
+comment_line              = {percentage_sign}[^\n]*;
+comment_block             = [/][\*]([^\*]*[\*]+[^/\*])*[^\*]*[\*]+[/];
+comment                   = ({comment_line}|{comment_block})+;
+
+eol                       = ("\013\010"|"\010"|"\013");
 
 %%
 
@@ -127,7 +127,7 @@
 ":="            => (col:=yypos-(!eolpos); T.LET(!linep,!col));
 ">"             => (col:=yypos-(!eolpos); T.ARROW(!linep,!col));
 
-"<="            => (col:=yypos-(!eolpos); T.IF(!linep,!col));
+"<="            => (col:=yypos-(!eolpos); T.FI(!linep,!col));
 "<=>"           => (col:=yypos-(!eolpos); T.IFF(!linep,!col));
 "=>"            => (col:=yypos-(!eolpos); T.IMPLIES(!linep,!col));
 "["             => (col:=yypos-(!eolpos); T.LBRKT(!linep,!col));
@@ -170,10 +170,14 @@
 
 "$ite_f"        => (col:=yypos-(!eolpos); T.ITE_F(!linep,!col));
 "$ite_t"        => (col:=yypos-(!eolpos); T.ITE_T(!linep,!col));
+"$let_tf"        => (col:=yypos-(!eolpos); T.LET_TF(!linep,!col));
+"$let_ff"        => (col:=yypos-(!eolpos); T.LET_FF(!linep,!col));
+"$let_ft"        => (col:=yypos-(!eolpos); T.LET_FT(!linep,!col));
+"$let_tt"        => (col:=yypos-(!eolpos); T.LET_TT(!linep,!col));
 
-{lower_word}          => (col:=yypos-(!eolpos); T.LOWER_WORD(yytext,!linep,!col));
-{atomic_system_word}  => (col:=yypos-(!eolpos); T.ATOMIC_SYSTEM_WORD(yytext,!linep,!col));
-{atomic_defined_word} => (col:=yypos-(!eolpos); T.ATOMIC_DEFINED_WORD(yytext,!linep,!col));
+{lower_word}   => (col:=yypos-(!eolpos); T.LOWER_WORD(yytext,!linep,!col));
+{dollar_word}  => (col:=yypos-(!eolpos); T.DOLLAR_WORD(yytext,!linep,!col));
+{dollar_dollar_word}  => (col:=yypos-(!eolpos); T.DOLLAR_DOLLAR_WORD(yytext,!linep,!col));
 
 "+"           => (col:=yypos-(!eolpos); T.PLUS(!linep,!col));
 "*"           => (col:=yypos-(!eolpos); T.TIMES(!linep,!col));
--- a/src/HOL/TPTP/TPTP_Parser/tptp.yacc	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp.yacc	Mon Apr 16 19:01:57 2012 +0200
@@ -21,10 +21,13 @@
   | "unknown" => Role_Unknown
   | thing => raise (UNRECOGNISED_ROLE thing)
 
+fun extract_quant_info (Quant (quantifier, vars, tptp_formula)) =
+  (quantifier, vars, tptp_formula)
+
 %%
 %name TPTP
 %term AMPERSAND | AT_SIGN | CARET | COLON | COMMA | EQUALS | EXCLAMATION
-  | LET | ARROW | IF | IFF | IMPLIES | INCLUDE
+  | LET | ARROW | FI | IFF | IMPLIES | INCLUDE
   | LAMBDA | LBRKT | LPAREN | MAP_TO | MMINUS | NAND
   | NEQUALS | XOR | NOR | PERIOD | PPLUS | QUESTION | RBRKT | RPAREN
   | TILDE | TOK_FALSE | TOK_I | TOK_O | TOK_INT | TOK_REAL | TOK_RAT | TOK_TRUE
@@ -36,10 +39,12 @@
   | DUD | INDEF_CHOICE | DEFIN_CHOICE
   | OPERATOR_FORALL | OPERATOR_EXISTS
   | PLUS | TIMES | GENTZEN_ARROW | DEP_SUM | DEP_PROD
-  | ATOMIC_DEFINED_WORD of string | ATOMIC_SYSTEM_WORD of string
+  | DOLLAR_WORD of string | DOLLAR_DOLLAR_WORD of string
   | SUBTYPE | LET_TERM
   | THF | TFF | FOF | CNF
   | ITE_F | ITE_T
+  | LET_TF | LET_FF | LET_FT | LET_TT
+
 %nonterm
     annotations of annotation option
   | name of string
@@ -116,9 +121,6 @@
   | tff_tuple_list of tptp_formula list
   | tff_sequent of tptp_formula
   | tff_conditional of tptp_formula
-  | tff_defined_var of tptp_let
-  | tff_let_list of tptp_let list
-  | tptp_let of tptp_formula
   | tff_xprod_type of tptp_type
   | tff_mapping_type of tptp_type
   | tff_atomic_type of tptp_type
@@ -144,8 +146,6 @@
   | thf_tuple_list of tptp_formula list
   | thf_sequent of tptp_formula
   | thf_conditional of tptp_formula
-  | thf_defined_var of tptp_let
-  | thf_let_list of tptp_let list
   | thf_let of tptp_formula
   | thf_atom of tptp_formula
   | thf_union_type of tptp_type
@@ -183,6 +183,17 @@
   | tptp_file of tptp_problem
   | tptp of tptp_problem
 
+  | thf_let_defn of tptp_let list
+  | tff_let of tptp_formula
+  | tff_let_term_defn of tptp_let list
+  | tff_let_formula_defn of tptp_let list
+  | tff_quantified_type of tptp_type
+  | tff_monotype of tptp_type
+  | tff_type_arguments of tptp_type list
+  | let_term of tptp_term
+  | atomic_defined_word of string
+  | atomic_system_word of string
+
 %pos int
 %eop EOF
 %noshift EOF
@@ -196,7 +207,7 @@
 
 %left AT_SIGN
 %nonassoc IFF XOR
-%right IMPLIES IF
+%right IMPLIES FI
 %nonassoc EQUALS NEQUALS
 %right VLINE NOR
 %left AMPERSAND NAND
@@ -218,88 +229,488 @@
 
  Parser for TPTP languages. Latest version of the language spec can
  be obtained from http://www.cs.miami.edu/~tptp/TPTP/SyntaxBNF.html
+ This implements version 5.3.0.
 *)
 
+tptp : tptp_file (( tptp_file ))
+
+tptp_file : tptp_input tptp_file (( tptp_input :: tptp_file ))
+          | COMMENT tptp_file    (( tptp_file ))
+          |                      (( [] ))
+
+tptp_input : annotated_formula (( annotated_formula ))
+           | include_           (( include_ ))
+
+annotated_formula : thf_annotated (( thf_annotated ))
+                  | tff_annotated (( tff_annotated ))
+                  | fof_annotated (( fof_annotated ))
+                  | cnf_annotated (( cnf_annotated ))
+
+thf_annotated : THF LPAREN name COMMA formula_role COMMA thf_formula annotations RPAREN PERIOD ((
+  Annotated_Formula ((file_name, THFleft + 1, THFright + 1),
+   THF, name, formula_role, thf_formula, annotations)
+))
+
+tff_annotated : TFF LPAREN name COMMA formula_role COMMA tff_formula annotations RPAREN PERIOD ((
+  Annotated_Formula ((file_name, TFFleft + 1, TFFright + 1),
+   TFF, name, formula_role, tff_formula, annotations)
+))
+
+fof_annotated : FOF LPAREN name COMMA formula_role COMMA fof_formula annotations RPAREN PERIOD ((
+  Annotated_Formula ((file_name, FOFleft + 1, FOFright + 1),
+   FOF, name, formula_role, fof_formula, annotations)
+))
+
+cnf_annotated : CNF LPAREN name COMMA formula_role COMMA cnf_formula annotations RPAREN PERIOD ((
+  Annotated_Formula ((file_name, CNFleft + 1, CNFright + 1),
+   CNF, name, formula_role, cnf_formula, annotations)
+))
+
 annotations : COMMA general_term optional_info (( SOME (general_term, optional_info) ))
             |                                  (( NONE ))
 
-optional_info : COMMA useful_info (( useful_info ))
-              |                   (( [] ))
+formula_role : LOWER_WORD (( classify_role LOWER_WORD ))
+
+
+(* THF formulas *)
+
+thf_formula : thf_logic_formula (( thf_logic_formula ))
+            | thf_sequent       (( thf_sequent ))
+
+thf_logic_formula : thf_binary_formula   (( thf_binary_formula ))
+                  | thf_unitary_formula  (( thf_unitary_formula ))
+                  | thf_type_formula     (( THF_typing thf_type_formula ))
+                  | thf_subtype          (( Type_fmla thf_subtype ))
+
+thf_binary_formula : thf_binary_pair   (( thf_binary_pair ))
+                   | thf_binary_tuple  (( thf_binary_tuple ))
+                   | thf_binary_type   (( Type_fmla thf_binary_type ))
 
-useful_info : general_list (( general_list ))
+thf_binary_pair : thf_unitary_formula thf_pair_connective thf_unitary_formula ((
+  Fmla (thf_pair_connective, [thf_unitary_formula1, thf_unitary_formula2])
+))
+
+thf_binary_tuple : thf_or_formula    (( thf_or_formula ))
+                 | thf_and_formula   (( thf_and_formula ))
+                 | thf_apply_formula (( thf_apply_formula ))
+
+thf_or_formula : thf_unitary_formula VLINE thf_unitary_formula (( Fmla (Interpreted_Logic Or, [thf_unitary_formula1, thf_unitary_formula2]) ))
+               | thf_or_formula VLINE thf_unitary_formula      (( Fmla (Interpreted_Logic Or, [thf_or_formula, thf_unitary_formula]) ))
+
+thf_and_formula : thf_unitary_formula AMPERSAND thf_unitary_formula (( Fmla (Interpreted_Logic And, [thf_unitary_formula1, thf_unitary_formula2]) ))
+                | thf_and_formula AMPERSAND thf_unitary_formula     (( Fmla (Interpreted_Logic And, [thf_and_formula, thf_unitary_formula]) ))
+
+thf_apply_formula : thf_unitary_formula AT_SIGN thf_unitary_formula (( Fmla (Interpreted_ExtraLogic Apply, [thf_unitary_formula1, thf_unitary_formula2]) ))
+                  | thf_apply_formula AT_SIGN thf_unitary_formula   (( Fmla (Interpreted_ExtraLogic Apply, [thf_apply_formula, thf_unitary_formula]) ))
+
+thf_unitary_formula : thf_quantified_formula (( thf_quantified_formula ))
+                    | thf_unary_formula      (( thf_unary_formula ))
+                    | thf_atom               (( thf_atom ))
+                    | thf_conditional        (( thf_conditional ))
+                    | thf_let                (( thf_let ))
+                    | LPAREN thf_logic_formula RPAREN  (( thf_logic_formula ))
 
-general_list : LBRKT general_terms RBRKT (( general_terms ))
-             | LBRKT RBRKT               (( [] ))
+thf_quantified_formula : thf_quantifier LBRKT thf_variable_list RBRKT COLON thf_unitary_formula ((
+  Quant (thf_quantifier, thf_variable_list, thf_unitary_formula)
+))
+
+thf_variable_list : thf_variable                          (( [thf_variable] ))
+                  | thf_variable COMMA thf_variable_list  (( thf_variable :: thf_variable_list ))
+
+thf_variable : thf_typed_variable (( thf_typed_variable ))
+             | variable_          (( (variable_, NONE) ))
+
+thf_typed_variable : variable_ COLON thf_top_level_type (( (variable_, SOME thf_top_level_type) ))
+
+thf_unary_formula : thf_unary_connective LPAREN thf_logic_formula RPAREN ((
+  Fmla (thf_unary_connective, [thf_logic_formula])
+))
+
+thf_atom : term          (( Atom (THF_Atom_term term) ))
+         | thf_conn_term (( Atom (THF_Atom_conn_term thf_conn_term) ))
 
-general_terms : general_term COMMA general_terms (( general_term :: general_terms ))
-              | general_term                     (( [general_term] ))
+thf_conditional : ITE_F LPAREN thf_logic_formula COMMA thf_logic_formula COMMA thf_logic_formula RPAREN ((
+  Conditional (thf_logic_formula1, thf_logic_formula2, thf_logic_formula3)
+))
+
+thf_let : LET_TF LPAREN thf_let_defn COMMA thf_formula RPAREN ((
+  Let (thf_let_defn, thf_formula)
+))
+
+(*FIXME here could check that fmla is of right form (TPTP BNF L130-134)*)
+thf_let_defn : thf_quantified_formula ((
+  let
+    val (_, vars, fmla) = extract_quant_info thf_quantified_formula
+  in [Let_fmla (hd vars, fmla)]
+  end
+))
+
+thf_type_formula : thf_typeable_formula COLON thf_top_level_type (( (thf_typeable_formula, thf_top_level_type) ))
+
+thf_typeable_formula : thf_atom                         (( thf_atom ))
+                     | LPAREN thf_logic_formula RPAREN  (( thf_logic_formula ))
+
+thf_subtype : constant SUBTYPE constant (( Subtype(constant1, constant2) ))
+
+thf_top_level_type : thf_logic_formula (( Fmla_type thf_logic_formula ))
 
-general_term : general_data                    (( General_Data general_data ))
-             | general_data COLON general_term (( General_Term (general_data, general_term) ))
-             | general_list                    (( General_List general_list ))
+thf_unitary_type : thf_unitary_formula (( Fmla_type thf_unitary_formula ))
+
+thf_binary_type : thf_mapping_type (( thf_mapping_type ))
+                | thf_xprod_type   (( thf_xprod_type ))
+                | thf_union_type   (( thf_union_type ))
+
+thf_mapping_type : thf_unitary_type ARROW thf_unitary_type (( Fn_type(thf_unitary_type1, thf_unitary_type2) ))
+                 | thf_unitary_type ARROW thf_mapping_type (( Fn_type(thf_unitary_type, thf_mapping_type) ))
+
+thf_xprod_type : thf_unitary_type TIMES thf_unitary_type   (( Prod_type(thf_unitary_type1, thf_unitary_type2) ))
+               | thf_xprod_type TIMES thf_unitary_type     (( Prod_type(thf_xprod_type, thf_unitary_type) ))
+
+thf_union_type : thf_unitary_type PLUS thf_unitary_type    (( Sum_type(thf_unitary_type1, thf_unitary_type2) ))
+               | thf_union_type PLUS thf_unitary_type      (( Sum_type(thf_union_type, thf_unitary_type) ))
+
+thf_sequent : thf_tuple GENTZEN_ARROW thf_tuple  (( Sequent(thf_tuple1, thf_tuple2) ))
+            | LPAREN thf_sequent RPAREN          (( thf_sequent ))
+
+thf_tuple : LBRKT RBRKT                (( [] ))
+          | LBRKT thf_tuple_list RBRKT (( thf_tuple_list ))
+
+thf_tuple_list : thf_logic_formula                      (( [thf_logic_formula] ))
+               | thf_logic_formula COMMA thf_tuple_list (( thf_logic_formula :: thf_tuple_list ))
+
+
+(* TFF Formulas *)
+
+tff_formula : tff_logic_formula  (( tff_logic_formula ))
+            | tff_typed_atom     (( Atom (TFF_Typed_Atom tff_typed_atom) ))
+            | tff_sequent        (( tff_sequent ))
+
+tff_logic_formula : tff_binary_formula   (( tff_binary_formula ))
+                  | tff_unitary_formula  (( tff_unitary_formula ))
+
+tff_binary_formula : tff_binary_nonassoc (( tff_binary_nonassoc ))
+                   | tff_binary_assoc    (( tff_binary_assoc ))
+
+tff_binary_nonassoc : tff_unitary_formula binary_connective tff_unitary_formula (( Fmla (binary_connective, [tff_unitary_formula1, tff_unitary_formula2]) ))
+
+tff_binary_assoc : tff_or_formula  (( tff_or_formula ))
+                 | tff_and_formula (( tff_and_formula ))
 
-atomic_word : LOWER_WORD    (( LOWER_WORD ))
-            | SINGLE_QUOTED (( SINGLE_QUOTED ))
-            | THF           (( "thf" ))
-            | TFF           (( "tff" ))
-            | FOF           (( "fof" ))
-            | CNF           (( "cnf" ))
-            | INCLUDE       (( "include" ))
+tff_or_formula : tff_unitary_formula VLINE tff_unitary_formula      (( Fmla (Interpreted_Logic Or, [tff_unitary_formula1, tff_unitary_formula2]) ))
+               | tff_or_formula VLINE tff_unitary_formula           (( Fmla (Interpreted_Logic Or, [tff_or_formula, tff_unitary_formula]) ))
+
+tff_and_formula : tff_unitary_formula AMPERSAND tff_unitary_formula (( Fmla (Interpreted_Logic And, [tff_unitary_formula1, tff_unitary_formula2]) ))
+                | tff_and_formula AMPERSAND tff_unitary_formula     (( Fmla (Interpreted_Logic And, [tff_and_formula, tff_unitary_formula]) ))
+
+tff_unitary_formula : tff_quantified_formula           (( tff_quantified_formula ))
+                    | tff_unary_formula                (( tff_unary_formula ))
+                    | atomic_formula                   (( atomic_formula ))
+                    | tff_conditional                  (( tff_conditional ))
+                    | tff_let                          (( tff_let ))
+                    | LPAREN tff_logic_formula RPAREN  (( tff_logic_formula ))
+
+tff_quantified_formula : fol_quantifier LBRKT tff_variable_list RBRKT COLON tff_unitary_formula ((
+  Quant (fol_quantifier, tff_variable_list, tff_unitary_formula)
+))
+
+tff_variable_list : tff_variable                         (( [tff_variable] ))
+                  | tff_variable COMMA tff_variable_list (( tff_variable :: tff_variable_list ))
 
-variable_ : UPPER_WORD  (( UPPER_WORD ))
+tff_variable : tff_typed_variable (( tff_typed_variable ))
+             | variable_          (( (variable_, NONE) ))
+
+tff_typed_variable : variable_ COLON tff_atomic_type (( (variable_, SOME tff_atomic_type) ))
+
+tff_unary_formula : unary_connective tff_unitary_formula (( Fmla (unary_connective, [tff_unitary_formula]) ))
+                  | fol_infix_unary                      (( fol_infix_unary ))
+
+tff_conditional : ITE_F LPAREN tff_logic_formula COMMA tff_logic_formula COMMA tff_logic_formula RPAREN ((
+  Conditional (tff_logic_formula1, tff_logic_formula2, tff_logic_formula3)
+))
 
-general_function: atomic_word LPAREN general_terms RPAREN (( Application (atomic_word, general_terms) ))
+tff_let : LET_TF LPAREN tff_let_term_defn COMMA tff_formula RPAREN ((Let (tff_let_term_defn, tff_formula) ))
+        | LET_FF LPAREN tff_let_formula_defn COMMA tff_formula RPAREN (( Let (tff_let_formula_defn, tff_formula) ))
+
+(*FIXME here could check that fmla is of right form (TPTP BNF L210-214)*)
+(*FIXME why "term" if using "Let_fmla"?*)
+tff_let_term_defn : tff_quantified_formula ((
+  let
+    val (_, vars, fmla) = extract_quant_info tff_quantified_formula
+  in [Let_fmla (hd vars, fmla)]
+  end
+))
 
-general_data : atomic_word       (( Atomic_Word atomic_word ))
-             | general_function  (( general_function ))
-             | variable_         (( V variable_ ))
-             | number            (( Number number ))
-             | DISTINCT_OBJECT   (( Distinct_Object DISTINCT_OBJECT ))
-             | formula_data      (( formula_data ))
+(*FIXME here could check that fmla is of right form (TPTP BNF L215-217)*)
+tff_let_formula_defn : tff_quantified_formula ((
+  let
+    val (_, vars, fmla) = extract_quant_info tff_quantified_formula
+  in [Let_fmla (hd vars, fmla)]
+  end
+))
+
+tff_sequent : tff_tuple GENTZEN_ARROW tff_tuple (( Sequent (tff_tuple1, tff_tuple2) ))
+            | LPAREN tff_sequent RPAREN         (( tff_sequent ))
+
+tff_tuple : LBRKT RBRKT    (( [] ))
+          | LBRKT tff_tuple_list RBRKT (( tff_tuple_list ))
+
+tff_tuple_list : tff_logic_formula COMMA tff_tuple_list (( tff_logic_formula :: tff_tuple_list ))
+               | tff_logic_formula                      (( [tff_logic_formula] ))
+
+tff_typed_atom : tff_untyped_atom COLON tff_top_level_type (( (fst tff_untyped_atom, SOME tff_top_level_type) ))
+               | LPAREN tff_typed_atom RPAREN              (( tff_typed_atom ))
+
+tff_untyped_atom : functor_       (( (functor_, NONE) ))
+                 | system_functor (( (system_functor, NONE) ))
+
+tff_top_level_type : tff_atomic_type     (( tff_atomic_type ))
+                   | tff_mapping_type    (( tff_mapping_type ))
+                   | tff_quantified_type (( tff_quantified_type ))
+
+tff_quantified_type : DEP_PROD LBRKT tff_variable_list RBRKT COLON tff_monotype ((
+       Fmla_type (Quant (Dep_Prod, tff_variable_list, Type_fmla tff_monotype))
+))
+                    | LPAREN tff_quantified_type RPAREN (( tff_quantified_type ))
+
+tff_monotype : tff_atomic_type                (( tff_atomic_type ))
+             | LPAREN tff_mapping_type RPAREN (( tff_mapping_type ))
+
+tff_unitary_type : tff_atomic_type               (( tff_atomic_type ))
+                 | LPAREN tff_xprod_type RPAREN  (( tff_xprod_type ))
+
+tff_atomic_type : atomic_word   (( Atom_type atomic_word ))
+                | defined_type  (( Defined_type defined_type ))
+                | atomic_word LPAREN tff_type_arguments RPAREN (( Fmla_type (Fmla (Uninterpreted atomic_word, (map Type_fmla tff_type_arguments))) ))
+                | variable_ (( Fmla_type (Pred (Interpreted_ExtraLogic Apply, [Term_Var variable_])) ))
 
-number : integer          (( (Int_num, integer) ))
-       | REAL             (( (Real_num, REAL) ))
-       | RATIONAL         (( (Rat_num, RATIONAL) ))
+tff_type_arguments : tff_atomic_type   (( [tff_atomic_type]  ))
+                   | tff_atomic_type COMMA tff_type_arguments (( tff_atomic_type :: tff_type_arguments ))
+
+tff_mapping_type : tff_unitary_type ARROW tff_atomic_type (( Fn_type(tff_unitary_type, tff_atomic_type) ))
+                 | LPAREN tff_mapping_type RPAREN         (( tff_mapping_type ))
+
+tff_xprod_type : tff_atomic_type TIMES tff_atomic_type (( Prod_type(tff_atomic_type1, tff_atomic_type2) ))
+               | tff_xprod_type TIMES tff_atomic_type  (( Prod_type(tff_xprod_type, tff_atomic_type) ))
+               | LPAREN tff_xprod_type RPAREN          (( tff_xprod_type ))
+
+
+(* FOF Formulas *)
+
+fof_formula : fof_logic_formula  (( fof_logic_formula ))
+            | fof_sequent        (( fof_sequent ))
+
+fof_logic_formula : fof_binary_formula   (( fof_binary_formula ))
+                  | fof_unitary_formula  (( fof_unitary_formula ))
+
+fof_binary_formula : fof_binary_nonassoc (( fof_binary_nonassoc ))
+                   | fof_binary_assoc    (( fof_binary_assoc ))
 
-integer: UNSIGNED_INTEGER (( UNSIGNED_INTEGER ))
-       | SIGNED_INTEGER   (( SIGNED_INTEGER ))
+fof_binary_nonassoc : fof_unitary_formula binary_connective fof_unitary_formula ((
+  Fmla (binary_connective, [fof_unitary_formula1, fof_unitary_formula2] )
+))
+
+fof_binary_assoc : fof_or_formula  (( fof_or_formula ))
+                 | fof_and_formula (( fof_and_formula ))
+
+fof_or_formula : fof_unitary_formula VLINE fof_unitary_formula  (( Fmla (Interpreted_Logic Or, [fof_unitary_formula1, fof_unitary_formula2]) ))
+               | fof_or_formula VLINE fof_unitary_formula       (( Fmla (Interpreted_Logic Or, [fof_or_formula, fof_unitary_formula]) ))
+
+fof_and_formula : fof_unitary_formula AMPERSAND fof_unitary_formula (( Fmla (Interpreted_Logic And, [fof_unitary_formula1, fof_unitary_formula2]) ))
+                | fof_and_formula AMPERSAND fof_unitary_formula     (( Fmla (Interpreted_Logic And, [fof_and_formula, fof_unitary_formula]) ))
+
+fof_unitary_formula : fof_quantified_formula (( fof_quantified_formula ))
+                    | fof_unary_formula      (( fof_unary_formula ))
+                    | atomic_formula         (( atomic_formula ))
+                    | LPAREN fof_logic_formula RPAREN (( fof_logic_formula ))
+
+fof_quantified_formula : fol_quantifier LBRKT fof_variable_list RBRKT COLON fof_unitary_formula ((
+  Quant (fol_quantifier, map (fn v => (v, NONE)) fof_variable_list, fof_unitary_formula)
+))
 
-file_name : SINGLE_QUOTED (( SINGLE_QUOTED ))
+fof_variable_list : variable_                          (( [variable_] ))
+                  | variable_ COMMA fof_variable_list  (( variable_ :: fof_variable_list ))
+
+fof_unary_formula : unary_connective fof_unitary_formula (( Fmla (unary_connective, [fof_unitary_formula]) ))
+                  | fol_infix_unary                      (( fol_infix_unary ))
+
+fof_sequent : fof_tuple GENTZEN_ARROW fof_tuple (( Sequent (fof_tuple1, fof_tuple2) ))
+            | LPAREN fof_sequent RPAREN         (( fof_sequent ))
+
+fof_tuple : LBRKT RBRKT                 (( [] ))
+          | LBRKT fof_tuple_list RBRKT  (( fof_tuple_list ))
+
+fof_tuple_list : fof_logic_formula                      (( [fof_logic_formula] ))
+               | fof_logic_formula COMMA fof_tuple_list (( fof_logic_formula :: fof_tuple_list ))
+
+
+(* CNF Formulas *)
+
+cnf_formula : LPAREN disjunction RPAREN  (( disjunction ))
+            | disjunction                (( disjunction ))
+
+disjunction : literal                    (( literal ))
+            | disjunction VLINE literal  (( Fmla (Interpreted_Logic Or, [disjunction, literal]) ))
+
+literal : atomic_formula        (( atomic_formula ))
+        | TILDE atomic_formula  (( Fmla (Interpreted_Logic Not, [atomic_formula]) ))
+        | fol_infix_unary       (( fol_infix_unary ))
+
+
+(* Special Formulas  *)
+
+thf_conn_term : thf_pair_connective   (( thf_pair_connective ))
+              | assoc_connective      (( assoc_connective ))
+              | thf_unary_connective  (( thf_unary_connective ))
+
+fol_infix_unary : term infix_inequality term (( Pred (infix_inequality, [term1, term2]) ))
+
+
+(* Connectives - THF *)
 
-formula_data : DTHF LPAREN thf_formula RPAREN (( Formula_Data (THF, thf_formula) ))
-             | DTFF LPAREN tff_formula RPAREN (( Formula_Data (TFF, tff_formula) ))
-             | DFOF LPAREN fof_formula RPAREN (( Formula_Data (FOF, fof_formula) ))
-             | DCNF LPAREN cnf_formula RPAREN (( Formula_Data (CNF, cnf_formula) ))
-             | DFOT LPAREN term RPAREN (( Term_Data term ))
+thf_quantifier : fol_quantifier (( fol_quantifier ))
+               | CARET          (( Lambda ))
+               | DEP_PROD       (( Dep_Prod ))
+               | DEP_SUM        (( Dep_Sum ))
+               | INDEF_CHOICE   (( Epsilon ))
+               | DEFIN_CHOICE   (( Iota ))
+
+thf_pair_connective : infix_equality    (( infix_equality ))
+                    | infix_inequality  (( infix_inequality ))
+                    | binary_connective (( binary_connective ))
+
+thf_unary_connective : unary_connective (( unary_connective ))
+                     | OPERATOR_FORALL  (( Interpreted_Logic Op_Forall ))
+                     | OPERATOR_EXISTS  (( Interpreted_Logic Op_Exists ))
+
+
+(* Connectives - THF and TFF
+instead of subtype_sign use token SUBTYPE
+*)
+
+
+(* Connectives - FOF *)
 
-system_type : ATOMIC_SYSTEM_WORD (( ATOMIC_SYSTEM_WORD ))
+fol_quantifier : EXCLAMATION  (( Forall ))
+               | QUESTION     (( Exists ))
+
+binary_connective : IFF       (( Interpreted_Logic Iff ))
+                  | IMPLIES   (( Interpreted_Logic If ))
+                  | FI        (( Interpreted_Logic Fi ))
+                  | XOR       (( Interpreted_Logic Xor ))
+                  | NOR       (( Interpreted_Logic Nor ))
+                  | NAND      (( Interpreted_Logic Nand ))
+
+assoc_connective : VLINE      (( Interpreted_Logic Or ))
+                 | AMPERSAND  (( Interpreted_Logic And ))
 
-defined_type : ATOMIC_DEFINED_WORD ((
-  case ATOMIC_DEFINED_WORD of
-    "$i" => Type_Ind
+unary_connective : TILDE (( Interpreted_Logic Not ))
+
+
+(* The sequent arrow
+use token GENTZEN_ARROW
+*)
+
+
+(* Types for THF and TFF *)
+
+defined_type : atomic_defined_word ((
+  case atomic_defined_word of
+    "$oType" => Type_Bool
   | "$o" => Type_Bool
   | "$iType" => Type_Ind
-  | "$oType" => Type_Bool
-  | "$int" => Type_Int
+  | "$i" => Type_Ind
+  | "$tType" => Type_Type
   | "$real" => Type_Real
   | "$rat" => Type_Rat
-  | "$tType" => Type_Type
+  | "$int" => Type_Int
   | thing => raise UNRECOGNISED_SYMBOL ("defined_type", thing)
 ))
 
+system_type : atomic_system_word (( atomic_system_word ))
+
+
+(* First-order atoms *)
+
+atomic_formula : plain_atomic_formula   (( plain_atomic_formula ))
+               | defined_atomic_formula (( defined_atomic_formula ))
+               | system_atomic_formula  (( system_atomic_formula ))
+
+plain_atomic_formula : plain_term (( Pred plain_term ))
+
+defined_atomic_formula : defined_plain_formula (( defined_plain_formula ))
+                       | defined_infix_formula (( defined_infix_formula ))
+
+defined_plain_formula : defined_plain_term (( Pred defined_plain_term ))
+
+(*FIXME not used*)
+defined_prop : atomic_defined_word ((
+  case atomic_defined_word of
+    "$true"  => "$true"
+  | "$false" => "$false"
+  | thing => raise UNRECOGNISED_SYMBOL ("defined_prop", thing)
+))
+
+(*FIXME not used*)
+defined_pred : atomic_defined_word ((
+  case atomic_defined_word of
+    "$distinct"  => "$distinct"
+  | "$ite_f" => "$ite_f"
+  | "$less" => "$less"
+  | "$lesseq" => "$lesseq"
+  | "$greater" => "$greater"
+  | "$greatereq" => "$greatereq"
+  | "$is_int" => "$is_int"
+  | "$is_rat" => "$is_rat"
+  | thing => raise UNRECOGNISED_SYMBOL ("defined_pred", thing)
+))
+
+defined_infix_formula : term defined_infix_pred term ((Pred (defined_infix_pred, [term1, term2])))
+
+defined_infix_pred : infix_equality  (( infix_equality ))
+
+infix_equality : EQUALS    (( Interpreted_Logic Equals ))
+
+infix_inequality : NEQUALS (( Interpreted_Logic NEquals ))
+
+system_atomic_formula : system_term  (( Pred system_term ))
+
+
+(* First-order terms *)
+
+term : function_term     (( function_term ))
+     | variable_         (( Term_Var variable_ ))
+     | conditional_term  (( conditional_term ))
+     | let_term          (( let_term ))
+
+function_term : plain_term    (( Term_Func plain_term ))
+              | defined_term  (( defined_term ))
+              | system_term   (( Term_Func system_term ))
+
+plain_term : constant                          (( (constant, []) ))
+           | functor_ LPAREN arguments RPAREN  (( (functor_, arguments) ))
+
+constant : functor_ (( functor_ ))
+
 functor_ : atomic_word (( Uninterpreted atomic_word ))
 
-arguments : term                  (( [term] ))
-          | term COMMA arguments  (( term :: arguments ))
+defined_term : defined_atom        (( defined_atom ))
+             | defined_atomic_term (( defined_atomic_term ))
+
+defined_atom : number          (( Term_Num number ))
+             | DISTINCT_OBJECT (( Term_Distinct_Object DISTINCT_OBJECT ))
+
+defined_atomic_term : defined_plain_term (( Term_Func defined_plain_term ))
 
-system_functor : ATOMIC_SYSTEM_WORD (( System ATOMIC_SYSTEM_WORD ))
-system_constant : system_functor (( system_functor ))
-system_term : system_constant                         (( (system_constant, []) ))
-            | system_functor LPAREN arguments RPAREN  (( (system_functor, arguments) ))
+defined_plain_term : defined_constant                        (( (defined_constant, []) ))
+                   | defined_functor LPAREN arguments RPAREN (( (defined_functor, arguments) ))
+
+defined_constant : defined_functor (( defined_functor ))
 
-defined_functor : ATOMIC_DEFINED_WORD ((
-  case ATOMIC_DEFINED_WORD of
-    "$sum" => Interpreted_ExtraLogic Sum
+(*FIXME would be nicer to split these up*)
+defined_functor : atomic_defined_word ((
+  case atomic_defined_word of
+    "$uminus" => Interpreted_ExtraLogic UMinus
+  | "$sum" => Interpreted_ExtraLogic Sum
   | "$difference" => Interpreted_ExtraLogic Difference
   | "$product" => Interpreted_ExtraLogic Product
   | "$quotient" => Interpreted_ExtraLogic Quotient
@@ -316,7 +727,6 @@
   | "$to_int" => Interpreted_ExtraLogic To_Int
   | "$to_rat" => Interpreted_ExtraLogic To_Rat
   | "$to_real" => Interpreted_ExtraLogic To_Real
-  | "$uminus" => Interpreted_ExtraLogic UMinus
 
   | "$i" => TypeSymbol Type_Ind
   | "$o" => TypeSymbol Type_Bool
@@ -339,296 +749,46 @@
   | "$is_int" => Interpreted_ExtraLogic Is_Int
   | "$is_rat" => Interpreted_ExtraLogic Is_Rat
 
+  | "$distinct" => Interpreted_ExtraLogic Distinct
+
   | thing => raise UNRECOGNISED_SYMBOL ("defined_functor", thing)
 ))
 
-defined_constant : defined_functor (( defined_functor ))
+system_term : system_constant                         (( (system_constant, []) ))
+            | system_functor LPAREN arguments RPAREN  (( (system_functor, arguments) ))
+
+system_constant : system_functor (( system_functor ))
 
-defined_plain_term : defined_constant                        (( (defined_constant, []) ))
-                   | defined_functor LPAREN arguments RPAREN (( (defined_functor, arguments) ))
-defined_atomic_term : defined_plain_term (( Term_Func defined_plain_term ))
-defined_atom : number          (( Term_Num number ))
-             | DISTINCT_OBJECT (( Term_Distinct_Object DISTINCT_OBJECT ))
-defined_term : defined_atom        (( defined_atom ))
-             | defined_atomic_term (( defined_atomic_term ))
-constant : functor_ (( functor_ ))
-plain_term : constant                          (( (constant, []) ))
-           | functor_ LPAREN arguments RPAREN  (( (functor_, arguments) ))
-function_term : plain_term    (( Term_Func plain_term ))
-              | defined_term  (( defined_term ))
-              | system_term   (( Term_Func system_term ))
+system_functor : atomic_system_word (( System atomic_system_word ))
+
+variable_ : UPPER_WORD  (( UPPER_WORD ))
+
+arguments : term                  (( [term] ))
+          | term COMMA arguments  (( term :: arguments ))
 
 conditional_term : ITE_T LPAREN tff_logic_formula COMMA term COMMA term RPAREN ((
   Term_Conditional (tff_logic_formula, term1, term2)
 ))
 
-term : function_term     (( function_term ))
-     | variable_         (( Term_Var variable_ ))
-     | conditional_term  (( conditional_term ))
 
-system_atomic_formula : system_term  (( Pred system_term ))
-infix_equality : EQUALS    (( Interpreted_Logic Equals ))
-infix_inequality : NEQUALS (( Interpreted_Logic NEquals ))
-defined_infix_pred : infix_equality  (( infix_equality ))
-defined_infix_formula : term defined_infix_pred term ((Pred (defined_infix_pred, [term1, term2])))
-defined_prop : ATOMIC_DEFINED_WORD ((
-  case ATOMIC_DEFINED_WORD of
-    "$true"  => "$true"
-  | "$false" => "$false"
-  | thing => raise UNRECOGNISED_SYMBOL ("defined_prop", thing)
-))
-defined_pred : ATOMIC_DEFINED_WORD ((
-  case ATOMIC_DEFINED_WORD of
-    "$distinct"  => "$distinct"
-  | "$ite_f" => "$ite_f"
-  | "$less" => "$less"
-  | "$lesseq" => "$lesseq"
-  | "$greater" => "$greater"
-  | "$greatereq" => "$greatereq"
-  | "$is_int" => "$is_int"
-  | "$is_rat" => "$is_rat"
-  | thing => raise UNRECOGNISED_SYMBOL ("defined_pred", thing)
-))
-defined_plain_formula : defined_plain_term (( Pred defined_plain_term ))
-defined_atomic_formula : defined_plain_formula (( defined_plain_formula ))
-                       | defined_infix_formula (( defined_infix_formula ))
-plain_atomic_formula : plain_term (( Pred plain_term ))
-atomic_formula : plain_atomic_formula   (( plain_atomic_formula ))
-               | defined_atomic_formula (( defined_atomic_formula ))
-               | system_atomic_formula  (( system_atomic_formula ))
-
-assoc_connective : VLINE      (( Interpreted_Logic Or ))
-                 | AMPERSAND  (( Interpreted_Logic And ))
-binary_connective : IFF       (( Interpreted_Logic Iff ))
-                  | IMPLIES   (( Interpreted_Logic If ))
-                  | IF        (( Interpreted_Logic Fi ))
-                  | XOR       (( Interpreted_Logic Xor ))
-                  | NOR       (( Interpreted_Logic Nor ))
-                  | NAND      (( Interpreted_Logic Nand ))
-
-fol_quantifier : EXCLAMATION  (( Forall ))
-               | QUESTION     (( Exists ))
-thf_unary_connective : unary_connective (( unary_connective ))
-                     | OPERATOR_FORALL  (( Interpreted_Logic Op_Forall ))
-                     | OPERATOR_EXISTS  (( Interpreted_Logic Op_Exists ))
-thf_pair_connective : infix_equality    (( infix_equality ))
-                    | infix_inequality  (( infix_inequality ))
-                    | binary_connective (( binary_connective ))
-thf_quantifier : fol_quantifier (( fol_quantifier ))
-               | CARET          (( Lambda ))
-               | DEP_PROD       (( Dep_Prod ))
-               | DEP_SUM        (( Dep_Sum ))
-               | INDEF_CHOICE   (( Epsilon ))
-               | DEFIN_CHOICE   (( Iota ))
-fol_infix_unary : term infix_inequality term (( Pred (infix_inequality, [term1, term2]) ))
-thf_conn_term : thf_pair_connective   (( thf_pair_connective ))
-              | assoc_connective      (( assoc_connective ))
-              | thf_unary_connective  (( thf_unary_connective ))
-literal : atomic_formula        (( atomic_formula ))
-        | TILDE atomic_formula  (( Fmla (Interpreted_Logic Not, [atomic_formula]) ))
-        | fol_infix_unary       (( fol_infix_unary ))
-disjunction : literal                    (( literal ))
-            | disjunction VLINE literal  (( Fmla (Interpreted_Logic Or, [disjunction, literal]) ))
-cnf_formula : LPAREN disjunction RPAREN  (( disjunction ))
-            | disjunction                (( disjunction ))
-fof_tuple_list : fof_logic_formula                      (( [fof_logic_formula] ))
-               | fof_logic_formula COMMA fof_tuple_list (( fof_logic_formula :: fof_tuple_list ))
-fof_tuple : LBRKT RBRKT                 (( [] ))
-          | LBRKT fof_tuple_list RBRKT  (( fof_tuple_list ))
-fof_sequent : fof_tuple GENTZEN_ARROW fof_tuple (( Sequent (fof_tuple1, fof_tuple2) ))
-            | LPAREN fof_sequent RPAREN         (( fof_sequent ))
-unary_connective : TILDE (( Interpreted_Logic Not ))
-fof_unary_formula : unary_connective fof_unitary_formula (( Fmla (unary_connective, [fof_unitary_formula]) ))
-                  | fol_infix_unary                      (( fol_infix_unary ))
-fof_variable_list : variable_                          (( [variable_] ))
-                  | variable_ COMMA fof_variable_list  (( variable_ :: fof_variable_list ))
-fof_quantified_formula : fol_quantifier LBRKT fof_variable_list RBRKT COLON fof_unitary_formula ((
-  Quant (fol_quantifier, map (fn v => (v, NONE)) fof_variable_list, fof_unitary_formula)
-))
-fof_unitary_formula : fof_quantified_formula (( fof_quantified_formula ))
-                    | fof_unary_formula      (( fof_unary_formula ))
-                    | atomic_formula         (( atomic_formula ))
-                    | LPAREN fof_logic_formula RPAREN (( fof_logic_formula ))
-fof_and_formula : fof_unitary_formula AMPERSAND fof_unitary_formula (( Fmla (Interpreted_Logic And, [fof_unitary_formula1, fof_unitary_formula2]) ))
-                | fof_and_formula AMPERSAND fof_unitary_formula     (( Fmla (Interpreted_Logic And, [fof_and_formula, fof_unitary_formula]) ))
-fof_or_formula : fof_unitary_formula VLINE fof_unitary_formula  (( Fmla (Interpreted_Logic Or, [fof_unitary_formula1, fof_unitary_formula2]) ))
-               | fof_or_formula VLINE fof_unitary_formula       (( Fmla (Interpreted_Logic Or, [fof_or_formula, fof_unitary_formula]) ))
-fof_binary_assoc : fof_or_formula  (( fof_or_formula ))
-                 | fof_and_formula (( fof_and_formula ))
-fof_binary_nonassoc : fof_unitary_formula binary_connective fof_unitary_formula ((
-  Fmla (binary_connective, [fof_unitary_formula1, fof_unitary_formula2] )
-))
-fof_binary_formula : fof_binary_nonassoc (( fof_binary_nonassoc ))
-                   | fof_binary_assoc    (( fof_binary_assoc ))
-fof_logic_formula : fof_binary_formula   (( fof_binary_formula ))
-                  | fof_unitary_formula  (( fof_unitary_formula ))
-fof_formula : fof_logic_formula  (( fof_logic_formula ))
-            | fof_sequent        (( fof_sequent ))
+let_term : LET_FT LPAREN tff_let_formula_defn COMMA term RPAREN ((Term_Let (tff_let_formula_defn, term) ))
+         | LET_TT LPAREN tff_let_term_defn COMMA term RPAREN ((Term_Let (tff_let_term_defn, term) ))
 
 
-tff_tuple : LBRKT RBRKT    (( [] ))
-          | LBRKT tff_tuple_list RBRKT (( tff_tuple_list ))
-tff_tuple_list : tff_logic_formula COMMA tff_tuple_list (( tff_logic_formula :: tff_tuple_list ))
-               | tff_logic_formula                      (( [tff_logic_formula] ))
-tff_sequent : tff_tuple GENTZEN_ARROW tff_tuple (( Sequent (tff_tuple1, tff_tuple2) ))
-            | LPAREN tff_sequent RPAREN         (( tff_sequent ))
-tff_conditional : ITE_F LPAREN tff_logic_formula COMMA tff_logic_formula COMMA tff_logic_formula RPAREN ((
-  Conditional (tff_logic_formula1, tff_logic_formula2, tff_logic_formula3)
-))
-tff_defined_var : variable_ LET tff_logic_formula (( Let_fmla ((variable_, NONE), tff_logic_formula) ))
-                | variable_ LET_TERM term (( Let_term ((variable_, NONE), term) ))
-                | LPAREN tff_defined_var RPAREN (( tff_defined_var ))
-tff_let_list : tff_defined_var                    (( [tff_defined_var] ))
-             | tff_defined_var COMMA tff_let_list (( tff_defined_var :: tff_let_list ))
-tptp_let : LET LBRKT tff_let_list RBRKT COLON tff_unitary_formula ((
-  Let (tff_let_list, tff_unitary_formula)
-))
-tff_xprod_type : tff_atomic_type TIMES tff_atomic_type (( Prod_type(tff_atomic_type1, tff_atomic_type2) ))
-               | tff_xprod_type TIMES tff_atomic_type  (( Prod_type(tff_xprod_type, tff_atomic_type) ))
-               | LPAREN tff_xprod_type RPAREN          (( tff_xprod_type ))
-tff_mapping_type : tff_unitary_type ARROW tff_atomic_type (( Fn_type(tff_unitary_type, tff_atomic_type) ))
-                 | LPAREN tff_mapping_type RPAREN         (( tff_mapping_type ))
-tff_atomic_type : atomic_word   (( Atom_type atomic_word ))
-                | defined_type  (( Defined_type defined_type ))
-tff_unitary_type : tff_atomic_type               (( tff_atomic_type ))
-                 | LPAREN tff_xprod_type RPAREN  (( tff_xprod_type ))
-tff_top_level_type : tff_atomic_type   (( tff_atomic_type ))
-                   | tff_mapping_type  (( tff_mapping_type ))
-tff_untyped_atom : functor_       (( (functor_, NONE) ))
-                 | system_functor (( (system_functor, NONE) ))
-tff_typed_atom : tff_untyped_atom COLON tff_top_level_type (( (fst tff_untyped_atom, SOME tff_top_level_type) ))
-               | LPAREN tff_typed_atom RPAREN              (( tff_typed_atom ))
-
-tff_unary_formula : unary_connective tff_unitary_formula (( Fmla (unary_connective, [tff_unitary_formula]) ))
-                  | fol_infix_unary                      (( fol_infix_unary ))
-tff_typed_variable : variable_ COLON tff_atomic_type (( (variable_, SOME tff_atomic_type) ))
-tff_variable : tff_typed_variable (( tff_typed_variable ))
-             | variable_          (( (variable_, NONE) ))
-tff_variable_list : tff_variable                         (( [tff_variable] ))
-                  | tff_variable COMMA tff_variable_list (( tff_variable :: tff_variable_list ))
-tff_quantified_formula : fol_quantifier LBRKT tff_variable_list RBRKT COLON tff_unitary_formula ((
-  Quant (fol_quantifier, tff_variable_list, tff_unitary_formula)
-))
-tff_unitary_formula : tff_quantified_formula           (( tff_quantified_formula ))
-                    | tff_unary_formula                (( tff_unary_formula ))
-                    | atomic_formula                   (( atomic_formula ))
-                    | tptp_let                         (( tptp_let ))
-                    | variable_                        (( Pred (Uninterpreted variable_, []) ))
-                    | tff_conditional                  (( tff_conditional ))
-                    | LPAREN tff_logic_formula RPAREN  (( tff_logic_formula ))
-tff_and_formula : tff_unitary_formula AMPERSAND tff_unitary_formula (( Fmla (Interpreted_Logic And, [tff_unitary_formula1, tff_unitary_formula2]) ))
-                | tff_and_formula AMPERSAND tff_unitary_formula     (( Fmla (Interpreted_Logic And, [tff_and_formula, tff_unitary_formula]) ))
-tff_or_formula : tff_unitary_formula VLINE tff_unitary_formula      (( Fmla (Interpreted_Logic Or, [tff_unitary_formula1, tff_unitary_formula2]) ))
-               | tff_or_formula VLINE tff_unitary_formula           (( Fmla (Interpreted_Logic Or, [tff_or_formula, tff_unitary_formula]) ))
-tff_binary_assoc : tff_or_formula  (( tff_or_formula ))
-                 | tff_and_formula (( tff_and_formula ))
-tff_binary_nonassoc : tff_unitary_formula binary_connective tff_unitary_formula (( Fmla (binary_connective, [tff_unitary_formula1, tff_unitary_formula2]) ))
-tff_binary_formula : tff_binary_nonassoc (( tff_binary_nonassoc ))
-                   | tff_binary_assoc    (( tff_binary_assoc ))
-tff_logic_formula : tff_binary_formula   (( tff_binary_formula ))
-                  | tff_unitary_formula  (( tff_unitary_formula ))
-tff_formula : tff_logic_formula  (( tff_logic_formula ))
-            | tff_typed_atom     (( Atom (TFF_Typed_Atom tff_typed_atom) ))
-            | tff_sequent        (( tff_sequent ))
+(* Formula sources
+Don't currently use following non-terminals:
+source, sources, dag_source, inference_record, inference_rule, parent_list,
+parent_info, parent_details, internal_source, intro_type, external_source,
+file_source, file_info, theory, theory_name, creator_source, creator_name.
+*)
 
-thf_tuple : LBRKT RBRKT                (( [] ))
-          | LBRKT thf_tuple_list RBRKT (( thf_tuple_list ))
-thf_tuple_list : thf_logic_formula                      (( [thf_logic_formula] ))
-               | thf_logic_formula COMMA thf_tuple_list (( thf_logic_formula :: thf_tuple_list ))
-thf_sequent : thf_tuple GENTZEN_ARROW thf_tuple  (( Sequent(thf_tuple1, thf_tuple2) ))
-            | LPAREN thf_sequent RPAREN          (( thf_sequent ))
-thf_conditional : ITE_F LPAREN thf_logic_formula COMMA thf_logic_formula COMMA thf_logic_formula RPAREN ((
-  Conditional (thf_logic_formula1, thf_logic_formula2, thf_logic_formula3)
-))
-thf_defined_var : thf_variable LET thf_logic_formula (( Let_fmla (thf_variable, thf_logic_formula) ))
-                | LPAREN thf_defined_var RPAREN      (( thf_defined_var ))
-thf_let_list : thf_defined_var                    (( [thf_defined_var] ))
-             | thf_defined_var COMMA thf_let_list (( thf_defined_var :: thf_let_list ))
-thf_let : LET LBRKT thf_let_list RBRKT COLON thf_unitary_formula ((
-  Let (thf_let_list, thf_unitary_formula)
-))
-thf_atom : term          (( Atom (THF_Atom_term term) ))
-         | thf_conn_term (( Atom (THF_Atom_conn_term thf_conn_term) ))
-thf_union_type : thf_unitary_type PLUS thf_unitary_type    (( Sum_type(thf_unitary_type1, thf_unitary_type2) ))
-               | thf_union_type PLUS thf_unitary_type      (( Sum_type(thf_union_type, thf_unitary_type) ))
-thf_xprod_type : thf_unitary_type TIMES thf_unitary_type   (( Prod_type(thf_unitary_type1, thf_unitary_type2) ))
-               | thf_xprod_type TIMES thf_unitary_type     (( Prod_type(thf_xprod_type, thf_unitary_type) ))
-thf_mapping_type : thf_unitary_type ARROW thf_unitary_type (( Fn_type(thf_unitary_type1, thf_unitary_type2) ))
-                 | thf_unitary_type ARROW thf_mapping_type (( Fn_type(thf_unitary_type, thf_mapping_type) ))
-thf_binary_type : thf_mapping_type (( thf_mapping_type ))
-                | thf_xprod_type   (( thf_xprod_type ))
-                | thf_union_type   (( thf_union_type ))
-thf_unitary_type : thf_unitary_formula (( Fmla_type thf_unitary_formula ))
-thf_top_level_type : thf_logic_formula (( Fmla_type thf_logic_formula ))
-thf_subtype : constant SUBTYPE constant (( Subtype(constant1, constant2) ))
-thf_typeable_formula : thf_atom                         (( thf_atom ))
-                     | LPAREN thf_logic_formula RPAREN  (( thf_logic_formula ))
-thf_type_formula : thf_typeable_formula COLON thf_top_level_type (( (thf_typeable_formula, thf_top_level_type) ))
-thf_unary_formula : thf_unary_connective LPAREN thf_logic_formula RPAREN ((
-  Fmla (thf_unary_connective, [thf_logic_formula])
-))
-thf_typed_variable : variable_ COLON thf_top_level_type (( (variable_, SOME thf_top_level_type) ))
-thf_variable : thf_typed_variable (( thf_typed_variable ))
-             | variable_          (( (variable_, NONE) ))
-thf_variable_list : thf_variable                          (( [thf_variable] ))
-                  | thf_variable COMMA thf_variable_list  (( thf_variable :: thf_variable_list ))
-thf_quantified_formula : thf_quantifier LBRKT thf_variable_list RBRKT COLON thf_unitary_formula ((
-  Quant (thf_quantifier, thf_variable_list, thf_unitary_formula)
-))
-thf_unitary_formula : thf_quantified_formula (( thf_quantified_formula ))
-                    | thf_unary_formula      (( thf_unary_formula ))
-                    | thf_atom               (( thf_atom ))
-                    | thf_let                (( thf_let ))
-                    | thf_conditional        (( thf_conditional ))
-                    | LPAREN thf_logic_formula RPAREN  (( thf_logic_formula ))
-thf_apply_formula : thf_unitary_formula AT_SIGN thf_unitary_formula (( Fmla (Interpreted_ExtraLogic Apply, [thf_unitary_formula1, thf_unitary_formula2]) ))
-                  | thf_apply_formula AT_SIGN thf_unitary_formula   (( Fmla (Interpreted_ExtraLogic Apply, [thf_apply_formula, thf_unitary_formula]) ))
-thf_and_formula : thf_unitary_formula AMPERSAND thf_unitary_formula (( Fmla (Interpreted_Logic And, [thf_unitary_formula1, thf_unitary_formula2]) ))
-                | thf_and_formula AMPERSAND thf_unitary_formula     (( Fmla (Interpreted_Logic And, [thf_and_formula, thf_unitary_formula]) ))
-thf_or_formula : thf_unitary_formula VLINE thf_unitary_formula (( Fmla (Interpreted_Logic Or, [thf_unitary_formula1, thf_unitary_formula2]) ))
-               | thf_or_formula VLINE thf_unitary_formula      (( Fmla (Interpreted_Logic Or, [thf_or_formula, thf_unitary_formula]) ))
-thf_binary_tuple : thf_or_formula    (( thf_or_formula ))
-                 | thf_and_formula   (( thf_and_formula ))
-                 | thf_apply_formula (( thf_apply_formula ))
-thf_binary_pair : thf_unitary_formula thf_pair_connective thf_unitary_formula ((
-  Fmla (thf_pair_connective, [thf_unitary_formula1, thf_unitary_formula2])
-))
-thf_binary_formula : thf_binary_pair   (( thf_binary_pair ))
-                   | thf_binary_tuple  (( thf_binary_tuple ))
-                   | thf_binary_type   (( THF_type thf_binary_type ))
-thf_logic_formula : thf_binary_formula   (( thf_binary_formula ))
-                  | thf_unitary_formula  (( thf_unitary_formula ))
-                  | thf_type_formula     (( THF_typing thf_type_formula ))
-                  | thf_subtype          (( THF_type thf_subtype ))
-thf_formula : thf_logic_formula (( thf_logic_formula ))
-            | thf_sequent       (( thf_sequent ))
+
+(* Useful info fields *)
 
-formula_role : LOWER_WORD (( classify_role LOWER_WORD ))
-
-thf_annotated : THF LPAREN name COMMA formula_role COMMA thf_formula annotations RPAREN PERIOD ((
-  Annotated_Formula ((file_name, THFleft + 1, THFright + 1),
-   THF, name, formula_role, thf_formula, annotations)
-))
-
-tff_annotated : TFF LPAREN name COMMA formula_role COMMA tff_formula annotations RPAREN PERIOD ((
-  Annotated_Formula ((file_name, TFFleft + 1, TFFright + 1),
-   TFF, name, formula_role, tff_formula, annotations)
-))
+optional_info : COMMA useful_info (( useful_info ))
+              |                   (( [] ))
 
-fof_annotated : FOF LPAREN name COMMA formula_role COMMA fof_formula annotations RPAREN PERIOD ((
-  Annotated_Formula ((file_name, FOFleft + 1, FOFright + 1),
-   FOF, name, formula_role, fof_formula, annotations)
-))
-
-cnf_annotated : CNF LPAREN name COMMA formula_role COMMA cnf_formula annotations RPAREN PERIOD ((
-  Annotated_Formula ((file_name, CNFleft + 1, CNFright + 1),
-   CNF, name, formula_role, cnf_formula, annotations)
-))
-
-annotated_formula : cnf_annotated (( cnf_annotated ))
-                  | fof_annotated (( fof_annotated ))
-                  | tff_annotated (( tff_annotated ))
-                  | thf_annotated (( thf_annotated ))
+useful_info : general_list (( general_list ))
 
 include_ : INCLUDE LPAREN file_name formula_selection RPAREN PERIOD ((
   Include (file_name, formula_selection)
@@ -640,14 +800,57 @@
 name_list : name COMMA name_list   (( name :: name_list ))
           | name                   (( [name] ))
 
+
+(* Non-logical data *)
+
+general_term : general_data                    (( General_Data general_data ))
+             | general_data COLON general_term (( General_Term (general_data, general_term) ))
+             | general_list                    (( General_List general_list ))
+
+general_data : atomic_word       (( Atomic_Word atomic_word ))
+             | general_function  (( general_function ))
+             | variable_         (( V variable_ ))
+             | number            (( Number number ))
+             | DISTINCT_OBJECT   (( Distinct_Object DISTINCT_OBJECT ))
+             | formula_data      (( formula_data ))
+
+general_function: atomic_word LPAREN general_terms RPAREN (( Application (atomic_word, general_terms) ))
+
+formula_data : DTHF LPAREN thf_formula RPAREN (( Formula_Data (THF, thf_formula) ))
+             | DTFF LPAREN tff_formula RPAREN (( Formula_Data (TFF, tff_formula) ))
+             | DFOF LPAREN fof_formula RPAREN (( Formula_Data (FOF, fof_formula) ))
+             | DCNF LPAREN cnf_formula RPAREN (( Formula_Data (CNF, cnf_formula) ))
+             | DFOT LPAREN term RPAREN (( Term_Data term ))
+
+general_list : LBRKT general_terms RBRKT (( general_terms ))
+             | LBRKT RBRKT               (( [] ))
+
+general_terms : general_term COMMA general_terms (( general_term :: general_terms ))
+              | general_term                     (( [general_term] ))
+
+
+(* General purpose *)
+
 name : atomic_word (( atomic_word ))
      | integer     (( integer ))
 
-tptp_input : annotated_formula (( annotated_formula ))
-           | include_           (( include_ ))
+atomic_word : LOWER_WORD    (( LOWER_WORD ))
+            | SINGLE_QUOTED (( SINGLE_QUOTED ))
+            | THF           (( "thf" ))
+            | TFF           (( "tff" ))
+            | FOF           (( "fof" ))
+            | CNF           (( "cnf" ))
+            | INCLUDE       (( "include" ))
+
+atomic_defined_word : DOLLAR_WORD (( DOLLAR_WORD ))
 
-tptp_file : tptp_input tptp_file (( tptp_input :: tptp_file ))
-          | COMMENT tptp_file    (( tptp_file ))
-          |                      (( [] ))
+atomic_system_word : DOLLAR_DOLLAR_WORD (( DOLLAR_DOLLAR_WORD ))
+
+integer: UNSIGNED_INTEGER (( UNSIGNED_INTEGER ))
+       | SIGNED_INTEGER   (( SIGNED_INTEGER ))
 
-tptp : tptp_file (( tptp_file ))
+number : integer          (( (Int_num, integer) ))
+       | REAL             (( (Real_num, REAL) ))
+       | RATIONAL         (( (Rat_num, RATIONAL) ))
+
+file_name : SINGLE_QUOTED (( SINGLE_QUOTED ))
--- a/src/HOL/TPTP/TPTP_Parser/tptp_interpret.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_interpret.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -13,9 +13,9 @@
   type const_map = (string * term) list
   type var_types = (string * typ option) list
 
-  (*mapping from THF types to Isabelle/HOL types. This map works over all
-  base types (i.e. THF functions must be interpreted as Isabelle/HOL functions).
-  The map must be total wrt THF types. Later on there could be a configuration
+  (*mapping from TPTP types to Isabelle/HOL types. This map works over all
+  base types (i.e. TPTP functions must be interpreted as Isabelle/HOL functions).
+  The map must be total wrt TPTP types. Later on there could be a configuration
   option to make a map extensible.*)
   type type_map = (TPTP_Syntax.tptp_type * typ) list
 
@@ -33,7 +33,7 @@
   directive, may include the meaning of an entire TPTP file, is an extended
   Isabelle/HOL theory, an explicit map from constants to their Isabelle/HOL
   counterparts and their types, the meaning of any TPTP formulas encountered
-  while interpreting that statement, and a map from THF to Isabelle/HOL types
+  while interpreting that statement, and a map from TPTP to Isabelle/HOL types
   (these types would have been added to the theory returned in the first position
   of the tuple). The last value is NONE when the function which produced the
   whole tptp_general_meaning value was given a type_map argument -- since
@@ -53,7 +53,7 @@
      generative_type_interpretation : bool,
      generative_const_interpretation : bool*)}
 
-  (*map types form THF to Isabelle/HOL*)
+  (*map types form TPTP to Isabelle/HOL*)
   val interpret_type : config -> theory -> type_map ->
     TPTP_Syntax.tptp_type -> typ
 
@@ -68,11 +68,11 @@
   Arguments:
     cautious = indicates whether additional checks are made to check
       that all used types have been declared.
-    type_map = mapping of THF-types to Isabelle/HOL types. This argument may be
+    type_map = mapping of TPTP-types to Isabelle/HOL types. This argument may be
       given to force a specific mapping: this is usually done for using an
-      imported THF problem in Isar.
+      imported TPTP problem in Isar.
     const_map = as previous, but for constants.
-    path_prefix = path where THF problems etc are located (to help the "include"
+    path_prefix = path where TPTP problems etc are located (to help the "include"
       directive find the files.
     thy = theory where interpreted info will be stored.
   *)
@@ -93,8 +93,8 @@
     Arguments:
       new_basic_types = indicates whether interpretations of $i and $o
         types are to be added to the type map (unless it is Given).
-        This is "true" if we are running this over a fresh THF problem, but
-        "false" if we are calling this _during_ the interpretation of a THF file
+        This is "true" if we are running this over a fresh TPTP problem, but
+        "false" if we are calling this _during_ the interpretation of a TPTP file
         (i.e. when interpreting an "include" directive.
       config = config
       path_prefix = " "
@@ -118,13 +118,13 @@
   exception MISINTERPRET_TERM of string * TPTP_Syntax.tptp_term
   exception MISINTERPRET_TYPE of string * TPTP_Syntax.tptp_type
 
-  (*Generates a fresh Isabelle/HOL type for interpreting a THF type in a theory.*)
+  (*Generates a fresh Isabelle/HOL type for interpreting a TPTP type in a theory.*)
   val declare_type : config -> (TPTP_Syntax.tptp_type * string) -> type_map ->
     theory -> type_map * theory
 
   (*Returns the list of all files included in a directory and its
   subdirectories. This is only used for testing the parser/interpreter against
-  all THF problems.*)
+  all TPTP problems.*)
   val get_file_list : Path.T -> Path.T list
 
   type manifest = TPTP_Problem_Name.problem_name * tptp_general_meaning
@@ -133,9 +133,6 @@
 
   val import_file : bool -> Path.T -> Path.T -> type_map -> const_map ->
     theory -> theory
-
-  val extract_inference_info : (TPTP_Syntax.general_term * 'a list) option ->
-                               (string * string list) option
 end
 
 structure TPTP_Interpret : TPTP_INTERPRET =
@@ -178,7 +175,8 @@
   (type T = manifest list
    val empty = []
    val extend = I
-   val merge = Library.merge (op =))
+   (*SMLNJ complains if non-expanded form used below*)
+   fun merge v = Library.merge (op =) v)
 val get_manifests = TPTP_Data.get
 
 
@@ -189,7 +187,8 @@
 
 val IND_TYPE = "ind"
 
-fun mk_binding config ident =
+(*SMLNJ complains if type annotation not used below*)
+fun mk_binding (config : config) ident =
   let
     val pre_binding = Binding.name ident
   in
@@ -243,13 +242,13 @@
       Defined_type typ
   | fmlatype_to_type (Atom (THF_Atom_term (Term_Func (Uninterpreted str, [])))) =
       Atom_type str
-  | fmlatype_to_type (THF_type (Fn_type (ty1, ty2))) =
+  | fmlatype_to_type (Type_fmla (Fn_type (ty1, ty2))) =
       let
         val ty1' = case ty1 of
-            Fn_type _ => fmlatype_to_type (THF_type ty1)
+            Fn_type _ => fmlatype_to_type (Type_fmla ty1)
           | Fmla_type ty1 => fmlatype_to_type ty1
         val ty2' = case ty2 of
-            Fn_type _ => fmlatype_to_type (THF_type ty2)
+            Fn_type _ => fmlatype_to_type (Type_fmla ty2)
           | Fmla_type ty2 => fmlatype_to_type ty2
       in Fn_type (ty1', ty2') end
 
@@ -325,7 +324,7 @@
   (Const (str, Term.dummyT) $ Bound 0 $ Bound 1)
    |> (Term.absdummy Term.dummyT #> Term.absdummy Term.dummyT)
 
-fun dummy_THF () =
+fun dummy_term () =
   HOLogic.choice_const Term.dummyT $ Term.absdummy Term.dummyT @{term "True"}
 
 fun interpret_symbol config thy language const_map symb =
@@ -355,7 +354,7 @@
         | Remainder_F | Floor | Ceiling | Truncate | Round | To_Int | To_Rat
         | To_Real | EvalEq | Is_Int | Is_Rat*)
         | Apply => raise MISINTERPRET_SYMBOL ("Malformed term?", symb)
-        | _ => dummy_THF ()
+        | _ => dummy_term ()
         )
    | Interpreted_Logic logic_symbol =>
        (case logic_symbol of
@@ -521,7 +520,7 @@
         val num_term =
           if number_kind = Int_num then
             HOLogic.mk_number @{typ "int"} (the (Int.fromString num))
-          else dummy_THF () (*FIXME: not yet supporting rationals and reals*)
+          else dummy_term () (*FIXME: not yet supporting rationals and reals*)
       in (num_term, thy) end
   | Term_Distinct_Object str =>
       let
@@ -649,14 +648,19 @@
         | Dep_Sum =>
             raise MISINTERPRET_FORMULA ("Unsupported", tptp_fmla)
         end
-  (*FIXME unsupported
-    | Conditional of tptp_formula * tptp_formula * tptp_formula
-    | Let of tptp_let list * tptp_formula
-
-    and tptp_let =
-        Let_fmla of (string * tptp_type option) * tptp_formula (*both TFF and THF*)
-      | Let_term of (string * tptp_type option) * tptp_term  (*only TFF*)
-  *)
+    | Conditional (fmla1, fmla2, fmla3) =>
+        let
+          val interp = interpret_formula config language const_map var_types type_map
+          val (((fmla1', fmla2'), fmla3'), thy') =
+            interp fmla1 thy
+            ||>> interp fmla2
+            ||>> interp fmla3
+        in (HOLogic.mk_conj (HOLogic.mk_imp (fmla1', fmla2'),
+                             HOLogic.mk_imp (HOLogic.mk_not fmla1', fmla3')),
+            thy')
+        end
+    | Let (tptp_let_list, tptp_formula) => (*FIXME not yet implemented*)
+        raise MISINTERPRET_FORMULA ("'Let' not yet implemented", tptp_fmla)
     | Atom tptp_atom =>
         (case tptp_atom of
           TFF_Typed_Atom (symbol, tptp_type_opt) =>
@@ -667,39 +671,13 @@
              type_map tptp_term
         | THF_Atom_conn_term symbol =>
             (interpret_symbol config thy language const_map symbol, thy))
-    | THF_type _ => (*FIXME unsupported*)
+    | Type_fmla _ => (*FIXME unsupported*)
          raise MISINTERPRET_FORMULA
           ("Cannot interpret types as formulas", tptp_fmla)
     | THF_typing (tptp_formula, _) => (*FIXME ignoring type info*)
         interpret_formula config language
          const_map var_types type_map tptp_formula thy
 
-
-(*Extract name of inference rule, and the inferences it relies on*)
-(*This is tuned to work with LEO-II, and is brittle to upstream
-  changes of the proof protocol.*)
-fun extract_inference_info annot =
-  let
-    fun get_line_id (General_Data (Number (Int_num, num))) = [num]
-      | get_line_id (General_Data (Atomic_Word name)) = [name]
-      | get_line_id (General_Term (Number (Int_num, num), _ (*e.g. a bind*))) = [num]
-      | get_line_id _ = []
-        (*e.g. General_Data (Application ("theory", [General_Data
-          (Atomic_Word "equality")])) -- which would come from E through LEO-II*)
-  in
-    case annot of
-      NONE => NONE
-    | SOME annot =>
-      (case annot of
-        (General_Data (Application ("inference",
-        [General_Data (Atomic_Word inference_name),
-         _,
-         General_List related_lines])), _) =>
-          (SOME (inference_name, map get_line_id related_lines |> List.concat))
-      | _ => NONE)
-  end
-
-
 (*Extract the type from a typing*)
 local
   fun extract_type tptp_type =
@@ -746,7 +724,7 @@
            in
              case tptp_ty of
                Defined_type Type_Type => (*add new type*)
-                 (*generate an Isabelle/HOL type to interpret this THF type,
+                 (*generate an Isabelle/HOL type to interpret this TPTP type,
                  and add it to both the Isabelle/HOL theory and to the type_map*)
                   let
                     val (type_map', thy') =
@@ -817,7 +795,7 @@
             ((type_map, const_map,
               [(label, role, tptp_formula,
                 Syntax.check_term (Proof_Context.init_global thy') t,
-                extract_inference_info annotation_opt)]), thy')
+                TPTP_Proof.extract_inference_info annotation_opt)]), thy')
            end
 
 and interpret_problem new_basic_types config path_prefix lines type_map const_map thy =
@@ -889,7 +867,11 @@
 val _ =
   Outer_Syntax.improper_command @{command_spec "import_tptp"} "import TPTP problem"
     (Parse.path >> (fn path =>
-      Toplevel.theory (fn thy => import_file true (Path.dir path) path [] [] thy)))
+      Toplevel.theory (fn thy =>
+       (*NOTE: assumes Axioms directory is on same level as the Problems one
+         (which is how TPTP is currently organised)*)
+       import_file true (Path.appends [Path.dir path, Path.parent, Path.parent])
+        path [] [] thy)))
 
 
 (*Used for debugging. Returns all files contained within a directory or its
--- a/src/HOL/TPTP/TPTP_Parser/tptp_lexyacc.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_lexyacc.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -13,6 +13,10 @@
 sig
 type ('a,'b) token
 type svalue
+val LET_TT:  'a * 'a -> (svalue,'a) token
+val LET_FT:  'a * 'a -> (svalue,'a) token
+val LET_FF:  'a * 'a -> (svalue,'a) token
+val LET_TF:  'a * 'a -> (svalue,'a) token
 val ITE_T:  'a * 'a -> (svalue,'a) token
 val ITE_F:  'a * 'a -> (svalue,'a) token
 val CNF:  'a * 'a -> (svalue,'a) token
@@ -21,8 +25,8 @@
 val THF:  'a * 'a -> (svalue,'a) token
 val LET_TERM:  'a * 'a -> (svalue,'a) token
 val SUBTYPE:  'a * 'a -> (svalue,'a) token
-val ATOMIC_SYSTEM_WORD: (string) *  'a * 'a -> (svalue,'a) token
-val ATOMIC_DEFINED_WORD: (string) *  'a * 'a -> (svalue,'a) token
+val DOLLAR_DOLLAR_WORD: (string) *  'a * 'a -> (svalue,'a) token
+val DOLLAR_WORD: (string) *  'a * 'a -> (svalue,'a) token
 val DEP_PROD:  'a * 'a -> (svalue,'a) token
 val DEP_SUM:  'a * 'a -> (svalue,'a) token
 val GENTZEN_ARROW:  'a * 'a -> (svalue,'a) token
@@ -76,7 +80,7 @@
 val INCLUDE:  'a * 'a -> (svalue,'a) token
 val IMPLIES:  'a * 'a -> (svalue,'a) token
 val IFF:  'a * 'a -> (svalue,'a) token
-val IF:  'a * 'a -> (svalue,'a) token
+val FI:  'a * 'a -> (svalue,'a) token
 val ARROW:  'a * 'a -> (svalue,'a) token
 val LET:  'a * 'a -> (svalue,'a) token
 val EXCLAMATION:  'a * 'a -> (svalue,'a) token
@@ -103,8 +107,8 @@
 
  Notes:
  * Omit %full in definitions to restrict alphabet to ascii.
- * Could include %posarg to ensure that start counting character positions from
-   0, but it would punish performance.
+ * Could include %posarg to ensure that we'd start counting character positions
+   from 0, but it would punish performance.
  * %s AF F COMMENT; -- could improve by making stateful.
 
  Acknowledgements:
@@ -170,9 +174,9 @@
 \\000"
 ),
  (1, 
-"\000\000\000\000\000\000\000\000\000\134\136\000\000\135\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\134\130\124\000\102\090\089\083\082\081\080\078\077\072\070\057\
+"\000\000\000\000\000\000\000\000\000\144\146\000\000\145\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\144\140\134\000\102\090\089\083\082\081\080\078\077\072\070\057\
 \\048\048\048\048\048\048\048\048\048\048\045\000\039\037\036\033\
 \\030\029\029\029\029\029\029\029\029\029\029\029\029\029\029\029\
 \\029\029\029\029\029\029\029\029\029\029\029\028\000\027\026\000\
@@ -843,11 +847,11 @@
  (102, 
 "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\122\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\103\103\119\103\103\115\103\103\109\103\103\103\103\103\103\
+\\000\000\000\000\132\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\103\103\129\103\103\125\103\103\119\103\103\109\103\103\103\
 \\103\103\103\103\104\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
@@ -902,8 +906,8 @@
 \\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
 \\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
-\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
-\\103\103\103\103\110\103\103\103\103\103\103\000\000\000\000\000\
+\\000\103\103\103\103\110\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
  (110, 
@@ -913,8 +917,8 @@
 \\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
 \\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
-\\000\103\103\103\103\111\103\103\103\103\103\103\103\103\103\103\
-\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\111\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
  (111, 
@@ -935,19 +939,19 @@
 \\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
 \\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
-\\000\103\103\103\103\103\114\103\103\103\103\103\103\103\103\103\
+\\000\103\103\103\103\103\116\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\113\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
- (115, 
+ (113, 
 "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
 \\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
-\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\116\
-\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
+\\000\103\103\103\103\103\115\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\114\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
  (116, 
@@ -968,8 +972,8 @@
 \\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
 \\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
-\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\120\103\
-\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\120\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
  (120, 
@@ -979,7 +983,18 @@
 \\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
 \\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
-\\000\103\103\103\103\103\121\103\103\103\103\103\103\103\103\103\
+\\000\103\103\103\103\121\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
+\\000"
+),
+ (121, 
+"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\122\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
 \\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
@@ -987,72 +1002,127 @@
 "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\123\123\123\123\123\123\123\123\123\123\123\123\123\123\123\
-\\123\123\123\123\123\123\123\123\123\123\123\000\000\000\000\000\
+\\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
+\\000\103\103\103\103\103\124\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\123\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
- (123, 
+ (125, 
 "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\123\123\123\123\123\123\123\123\123\123\000\000\000\000\000\000\
-\\000\123\123\123\123\123\123\123\123\123\123\123\123\123\123\123\
-\\123\123\123\123\123\123\123\123\123\123\123\000\000\000\000\123\
-\\000\123\123\123\123\123\123\123\123\123\123\123\123\123\123\123\
-\\123\123\123\123\123\123\123\123\123\123\123\000\000\000\000\000\
+\\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\126\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
 \\000"
 ),
- (124, 
-"\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\000\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\129\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125"
+ (126, 
+"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
+\\000\103\103\103\103\103\128\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\127\103\103\103\103\103\103\000\000\000\000\000\
+\\000"
 ),
- (125, 
-"\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\128\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\126\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125"
-),
- (126, 
-"\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\127\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\126\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\125\
-\\125"
+ (129, 
+"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\130\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
+\\000"
 ),
  (130, 
 "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\133\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\132\131\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\000\
+\\000\103\103\103\103\103\103\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\103\
+\\000\103\103\103\103\103\131\103\103\103\103\103\103\103\103\103\
+\\103\103\103\103\103\103\103\103\103\103\103\000\000\000\000\000\
+\\000"
+),
+ (132, 
+"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\133\133\133\133\133\133\133\133\133\133\133\133\133\133\133\
+\\133\133\133\133\133\133\133\133\133\133\133\000\000\000\000\000\
+\\000"
+),
+ (133, 
+"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\133\133\133\133\133\133\133\133\133\133\000\000\000\000\000\000\
+\\000\133\133\133\133\133\133\133\133\133\133\133\133\133\133\133\
+\\133\133\133\133\133\133\133\133\133\133\133\000\000\000\000\133\
+\\000\133\133\133\133\133\133\133\133\133\133\133\133\133\133\133\
+\\133\133\133\133\133\133\133\133\133\133\133\000\000\000\000\000\
+\\000"
+),
+ (134, 
+"\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\000\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\139\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135"
+),
+ (135, 
+"\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\138\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\136\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135"
+),
+ (136, 
+"\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\137\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\136\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\135\
+\\135"
+),
+ (140, 
+"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\143\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\142\141\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000"
 ),
- (134, 
-"\000\000\000\000\000\000\000\000\000\134\000\000\000\000\000\000\
-\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
-\\134\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+ (144, 
+"\000\000\000\000\000\000\000\000\000\144\000\000\000\000\000\000\
+\\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
+\\144\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
@@ -1060,8 +1130,8 @@
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000"
 ),
- (135, 
-"\000\000\000\000\000\000\000\000\000\000\136\000\000\000\000\000\
+ (145, 
+"\000\000\000\000\000\000\000\000\000\000\146\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
 \\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
@@ -1086,25 +1156,25 @@
 {fin = [(N 71)], trans = 0},
 {fin = [(N 61)], trans = 0},
 {fin = [(N 86)], trans = 0},
-{fin = [(N 251)], trans = 7},
-{fin = [(N 251)], trans = 8},
-{fin = [(N 251)], trans = 9},
-{fin = [(N 186),(N 251)], trans = 7},
-{fin = [(N 251)], trans = 11},
-{fin = [(N 198),(N 251)], trans = 7},
-{fin = [(N 251)], trans = 13},
-{fin = [(N 251)], trans = 14},
-{fin = [(N 251)], trans = 15},
-{fin = [(N 251)], trans = 16},
-{fin = [(N 251)], trans = 17},
-{fin = [(N 251)], trans = 18},
-{fin = [(N 206),(N 251)], trans = 7},
-{fin = [(N 251)], trans = 20},
-{fin = [(N 251)], trans = 21},
-{fin = [(N 190),(N 251)], trans = 7},
-{fin = [(N 251)], trans = 23},
-{fin = [(N 251)], trans = 24},
-{fin = [(N 194),(N 251)], trans = 7},
+{fin = [(N 283)], trans = 7},
+{fin = [(N 283)], trans = 8},
+{fin = [(N 283)], trans = 9},
+{fin = [(N 186),(N 283)], trans = 7},
+{fin = [(N 283)], trans = 11},
+{fin = [(N 198),(N 283)], trans = 7},
+{fin = [(N 283)], trans = 13},
+{fin = [(N 283)], trans = 14},
+{fin = [(N 283)], trans = 15},
+{fin = [(N 283)], trans = 16},
+{fin = [(N 283)], trans = 17},
+{fin = [(N 283)], trans = 18},
+{fin = [(N 206),(N 283)], trans = 7},
+{fin = [(N 283)], trans = 20},
+{fin = [(N 283)], trans = 21},
+{fin = [(N 190),(N 283)], trans = 7},
+{fin = [(N 283)], trans = 23},
+{fin = [(N 283)], trans = 24},
+{fin = [(N 194),(N 283)], trans = 7},
 {fin = [(N 25)], trans = 0},
 {fin = [(N 80)], trans = 0},
 {fin = [(N 50)], trans = 0},
@@ -1114,7 +1184,7 @@
 {fin = [(N 12)], trans = 0},
 {fin = [(N 78)], trans = 33},
 {fin = [(N 21)], trans = 0},
-{fin = [(N 283)], trans = 0},
+{fin = [(N 315)], trans = 0},
 {fin = [(N 38)], trans = 0},
 {fin = [(N 31)], trans = 37},
 {fin = [(N 48)], trans = 0},
@@ -1123,10 +1193,10 @@
 {fin = [(N 68)], trans = 0},
 {fin = [(N 41)], trans = 42},
 {fin = [(N 45)], trans = 0},
-{fin = [(N 277)], trans = 0},
+{fin = [(N 309)], trans = 0},
 {fin = [(N 27)], trans = 45},
 {fin = [(N 36)], trans = 0},
-{fin = [(N 286)], trans = 0},
+{fin = [(N 318)], trans = 0},
 {fin = [(N 126)], trans = 48},
 {fin = [], trans = 49},
 {fin = [(N 104)], trans = 49},
@@ -1155,11 +1225,11 @@
 {fin = [(N 55)], trans = 0},
 {fin = [(N 123)], trans = 74},
 {fin = [(N 58)], trans = 75},
-{fin = [(N 274)], trans = 0},
+{fin = [(N 306)], trans = 0},
 {fin = [(N 29)], trans = 0},
-{fin = [(N 268)], trans = 78},
+{fin = [(N 300)], trans = 78},
 {fin = [(N 76)], trans = 0},
-{fin = [(N 270)], trans = 0},
+{fin = [(N 302)], trans = 0},
 {fin = [(N 82)], trans = 0},
 {fin = [(N 52)], trans = 0},
 {fin = [], trans = 83},
@@ -1182,39 +1252,49 @@
 {fin = [(N 182)], trans = 100},
 {fin = [(N 182)], trans = 101},
 {fin = [], trans = 102},
-{fin = [(N 266)], trans = 103},
-{fin = [(N 266)], trans = 104},
-{fin = [(N 266)], trans = 105},
-{fin = [(N 211),(N 266)], trans = 103},
-{fin = [(N 266)], trans = 107},
-{fin = [(N 231),(N 266)], trans = 103},
-{fin = [(N 266)], trans = 109},
-{fin = [(N 266)], trans = 110},
-{fin = [(N 266)], trans = 111},
-{fin = [(N 266)], trans = 112},
-{fin = [(N 245),(N 266)], trans = 103},
-{fin = [(N 238),(N 266)], trans = 103},
-{fin = [(N 266)], trans = 115},
-{fin = [(N 266)], trans = 116},
-{fin = [(N 226),(N 266)], trans = 103},
-{fin = [(N 216),(N 266)], trans = 103},
-{fin = [(N 266)], trans = 119},
-{fin = [(N 266)], trans = 120},
-{fin = [(N 221),(N 266)], trans = 103},
-{fin = [], trans = 122},
-{fin = [(N 259)], trans = 123},
-{fin = [], trans = 124},
-{fin = [], trans = 125},
-{fin = [], trans = 126},
-{fin = [(N 95)], trans = 125},
+{fin = [(N 290)], trans = 103},
+{fin = [(N 290)], trans = 104},
+{fin = [(N 290)], trans = 105},
+{fin = [(N 211),(N 290)], trans = 103},
+{fin = [(N 290)], trans = 107},
+{fin = [(N 231),(N 290)], trans = 103},
+{fin = [(N 290)], trans = 109},
+{fin = [(N 290)], trans = 110},
+{fin = [(N 290)], trans = 111},
+{fin = [(N 290)], trans = 112},
+{fin = [(N 290)], trans = 113},
+{fin = [(N 277),(N 290)], trans = 103},
+{fin = [(N 253),(N 290)], trans = 103},
+{fin = [(N 290)], trans = 116},
+{fin = [(N 269),(N 290)], trans = 103},
+{fin = [(N 261),(N 290)], trans = 103},
+{fin = [(N 290)], trans = 119},
+{fin = [(N 290)], trans = 120},
+{fin = [(N 290)], trans = 121},
+{fin = [(N 290)], trans = 122},
+{fin = [(N 245),(N 290)], trans = 103},
+{fin = [(N 238),(N 290)], trans = 103},
+{fin = [(N 290)], trans = 125},
+{fin = [(N 290)], trans = 126},
+{fin = [(N 226),(N 290)], trans = 103},
+{fin = [(N 216),(N 290)], trans = 103},
+{fin = [(N 290)], trans = 129},
+{fin = [(N 290)], trans = 130},
+{fin = [(N 221),(N 290)], trans = 103},
+{fin = [], trans = 132},
+{fin = [(N 298)], trans = 133},
+{fin = [], trans = 134},
+{fin = [], trans = 135},
+{fin = [], trans = 136},
+{fin = [(N 95)], trans = 135},
 {fin = [(N 95)], trans = 0},
-{fin = [], trans = 126},
-{fin = [(N 33)], trans = 130},
-{fin = [(N 280)], trans = 0},
+{fin = [], trans = 136},
+{fin = [(N 33)], trans = 140},
+{fin = [(N 312)], trans = 0},
 {fin = [(N 64)], trans = 0},
 {fin = [(N 18)], trans = 0},
-{fin = [(N 2)], trans = 134},
-{fin = [(N 7)], trans = 135},
+{fin = [(N 2)], trans = 144},
+{fin = [(N 7)], trans = 145},
 {fin = [(N 7)], trans = 0}])
 end
 structure StartStates =
@@ -1284,23 +1364,27 @@
 | 238 => (col:=yypos-(!eolpos); T.ITE_F(!linep,!col))
 | 245 => (col:=yypos-(!eolpos); T.ITE_T(!linep,!col))
 | 25 => (col:=yypos-(!eolpos); T.CARET(!linep,!col))
-| 251 => let val yytext=yymktext() in col:=yypos-(!eolpos); T.LOWER_WORD(yytext,!linep,!col) end
-| 259 => let val yytext=yymktext() in col:=yypos-(!eolpos); T.ATOMIC_SYSTEM_WORD(yytext,!linep,!col) end
-| 266 => let val yytext=yymktext() in col:=yypos-(!eolpos); T.ATOMIC_DEFINED_WORD(yytext,!linep,!col) end
-| 268 => (col:=yypos-(!eolpos); T.PLUS(!linep,!col))
+| 253 => (col:=yypos-(!eolpos); T.LET_TF(!linep,!col))
+| 261 => (col:=yypos-(!eolpos); T.LET_FF(!linep,!col))
+| 269 => (col:=yypos-(!eolpos); T.LET_FT(!linep,!col))
 | 27 => (col:=yypos-(!eolpos); T.COLON(!linep,!col))
-| 270 => (col:=yypos-(!eolpos); T.TIMES(!linep,!col))
-| 274 => (col:=yypos-(!eolpos); T.GENTZEN_ARROW(!linep,!col))
-| 277 => (col:=yypos-(!eolpos); T.SUBTYPE(!linep,!col))
-| 280 => (col:=yypos-(!eolpos); T.DEP_PROD(!linep,!col))
-| 283 => (col:=yypos-(!eolpos); T.DEP_SUM(!linep,!col))
-| 286 => (col:=yypos-(!eolpos); T.LET_TERM(!linep,!col))
+| 277 => (col:=yypos-(!eolpos); T.LET_TT(!linep,!col))
+| 283 => let val yytext=yymktext() in col:=yypos-(!eolpos); T.LOWER_WORD(yytext,!linep,!col) end
 | 29 => (col:=yypos-(!eolpos); T.COMMA(!linep,!col))
+| 290 => let val yytext=yymktext() in col:=yypos-(!eolpos); T.DOLLAR_WORD(yytext,!linep,!col) end
+| 298 => let val yytext=yymktext() in col:=yypos-(!eolpos); T.DOLLAR_DOLLAR_WORD(yytext,!linep,!col) end
+| 300 => (col:=yypos-(!eolpos); T.PLUS(!linep,!col))
+| 302 => (col:=yypos-(!eolpos); T.TIMES(!linep,!col))
+| 306 => (col:=yypos-(!eolpos); T.GENTZEN_ARROW(!linep,!col))
+| 309 => (col:=yypos-(!eolpos); T.SUBTYPE(!linep,!col))
 | 31 => (col:=yypos-(!eolpos); T.EQUALS(!linep,!col))
+| 312 => (col:=yypos-(!eolpos); T.DEP_PROD(!linep,!col))
+| 315 => (col:=yypos-(!eolpos); T.DEP_SUM(!linep,!col))
+| 318 => (col:=yypos-(!eolpos); T.LET_TERM(!linep,!col))
 | 33 => (col:=yypos-(!eolpos); T.EXCLAMATION(!linep,!col))
 | 36 => (col:=yypos-(!eolpos); T.LET(!linep,!col))
 | 38 => (col:=yypos-(!eolpos); T.ARROW(!linep,!col))
-| 41 => (col:=yypos-(!eolpos); T.IF(!linep,!col))
+| 41 => (col:=yypos-(!eolpos); T.FI(!linep,!col))
 | 45 => (col:=yypos-(!eolpos); T.IFF(!linep,!col))
 | 48 => (col:=yypos-(!eolpos); T.IMPLIES(!linep,!col))
 | 50 => (col:=yypos-(!eolpos); T.LBRKT(!linep,!col))
@@ -1392,6 +1476,9 @@
   | "unknown" => Role_Unknown
   | thing => raise (UNRECOGNISED_ROLE thing)
 
+fun extract_quant_info (Quant (quantifier, vars, tptp_formula)) =
+  (quantifier, vars, tptp_formula)
+
 
 end
 structure LrTable = Token.LrTable
@@ -1399,93 +1486,94 @@
 local open LrTable in 
 val table=let val actionRows =
 "\
-\\001\000\001\000\032\002\004\000\155\002\005\000\032\002\006\000\032\002\
-\\010\000\032\002\011\000\032\002\012\000\032\002\016\000\212\000\
-\\019\000\032\002\020\000\032\002\021\000\032\002\022\000\032\002\
-\\027\000\032\002\037\000\032\002\000\000\
-\\001\000\001\000\044\002\004\000\154\002\005\000\044\002\006\000\044\002\
-\\010\000\044\002\011\000\044\002\012\000\044\002\016\000\217\000\
-\\019\000\044\002\020\000\044\002\021\000\044\002\022\000\044\002\
-\\027\000\044\002\037\000\044\002\000\000\
-\\001\000\001\000\054\002\005\000\054\002\006\000\049\002\010\000\054\002\
-\\011\000\054\002\012\000\054\002\019\000\054\002\020\000\049\002\
-\\021\000\054\002\022\000\054\002\026\000\054\002\027\000\054\002\
-\\037\000\054\002\000\000\
-\\001\000\001\000\061\002\005\000\061\002\006\000\039\002\010\000\061\002\
-\\011\000\061\002\012\000\061\002\019\000\061\002\020\000\039\002\
-\\021\000\061\002\022\000\061\002\026\000\061\002\027\000\061\002\
-\\037\000\061\002\000\000\
-\\001\000\001\000\064\002\005\000\064\002\006\000\047\002\010\000\064\002\
-\\011\000\064\002\012\000\064\002\019\000\064\002\020\000\047\002\
-\\021\000\064\002\022\000\064\002\026\000\064\002\027\000\064\002\
-\\037\000\064\002\000\000\
-\\001\000\001\000\170\002\005\000\170\002\006\000\052\002\010\000\170\002\
-\\011\000\170\002\012\000\170\002\019\000\170\002\020\000\052\002\
-\\021\000\170\002\022\000\170\002\026\000\170\002\027\000\170\002\
-\\037\000\170\002\000\000\
-\\001\000\001\000\225\002\002\000\225\002\004\000\213\002\005\000\225\002\
-\\006\000\225\002\008\000\225\002\009\000\225\002\010\000\225\002\
-\\011\000\225\002\012\000\225\002\019\000\225\002\020\000\225\002\
-\\021\000\225\002\022\000\225\002\026\000\225\002\027\000\225\002\
-\\037\000\225\002\059\000\225\002\060\000\225\002\000\000\
-\\001\000\001\000\228\002\002\000\228\002\004\000\214\002\005\000\228\002\
-\\006\000\228\002\008\000\228\002\009\000\228\002\010\000\228\002\
-\\011\000\228\002\012\000\228\002\019\000\228\002\020\000\228\002\
-\\021\000\228\002\022\000\228\002\026\000\228\002\027\000\228\002\
-\\037\000\228\002\059\000\228\002\060\000\228\002\000\000\
-\\001\000\001\000\206\000\003\000\205\000\006\000\204\000\007\000\119\000\
-\\008\000\203\000\010\000\202\000\011\000\201\000\012\000\200\000\
-\\013\000\035\000\015\000\199\000\016\000\198\000\019\000\197\000\
-\\020\000\196\000\021\000\195\000\022\000\194\000\025\000\116\000\
-\\028\000\115\000\037\000\193\000\044\000\096\000\045\000\095\000\
-\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\094\000\
-\\051\000\031\000\053\000\093\000\055\000\192\000\056\000\191\000\
-\\057\000\190\000\058\000\189\000\062\000\188\000\063\000\187\000\
-\\064\000\092\000\065\000\091\000\068\000\030\000\069\000\029\000\
-\\070\000\028\000\071\000\027\000\072\000\186\000\073\000\090\000\000\000\
-\\001\000\001\000\206\000\003\000\205\000\006\000\204\000\007\000\119\000\
-\\008\000\203\000\010\000\202\000\011\000\201\000\012\000\200\000\
-\\013\000\035\000\016\000\024\001\019\000\197\000\020\000\196\000\
-\\021\000\195\000\022\000\194\000\025\000\116\000\026\000\023\001\
-\\028\000\115\000\037\000\193\000\044\000\096\000\045\000\095\000\
-\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\094\000\
-\\051\000\031\000\053\000\093\000\055\000\192\000\056\000\191\000\
-\\057\000\190\000\058\000\189\000\062\000\188\000\063\000\187\000\
-\\064\000\092\000\065\000\091\000\068\000\030\000\069\000\029\000\
-\\070\000\028\000\071\000\027\000\072\000\186\000\073\000\090\000\000\000\
-\\001\000\001\000\206\000\003\000\205\000\006\000\204\000\007\000\119\000\
-\\008\000\203\000\010\000\202\000\011\000\201\000\012\000\200\000\
-\\013\000\035\000\016\000\024\001\019\000\197\000\020\000\196\000\
-\\021\000\195\000\022\000\194\000\025\000\116\000\028\000\115\000\
-\\037\000\193\000\044\000\096\000\045\000\095\000\046\000\034\000\
-\\047\000\033\000\049\000\032\000\050\000\094\000\051\000\031\000\
-\\053\000\093\000\055\000\192\000\056\000\191\000\057\000\190\000\
-\\058\000\189\000\062\000\188\000\063\000\187\000\064\000\092\000\
-\\065\000\091\000\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\072\000\186\000\073\000\090\000\000\000\
-\\001\000\001\000\206\000\003\000\205\000\006\000\204\000\007\000\119\000\
-\\008\000\203\000\010\000\202\000\011\000\201\000\012\000\200\000\
-\\013\000\035\000\016\000\097\001\019\000\197\000\020\000\196\000\
-\\021\000\195\000\022\000\194\000\025\000\116\000\028\000\115\000\
-\\037\000\193\000\044\000\096\000\045\000\095\000\046\000\034\000\
-\\047\000\033\000\049\000\032\000\050\000\094\000\051\000\031\000\
-\\053\000\093\000\055\000\192\000\056\000\191\000\057\000\190\000\
-\\058\000\189\000\062\000\188\000\063\000\187\000\064\000\092\000\
-\\065\000\091\000\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\072\000\186\000\073\000\090\000\000\000\
-\\001\000\001\000\007\001\002\000\006\001\005\000\243\002\006\000\204\000\
-\\008\000\243\002\009\000\210\002\010\000\202\000\011\000\201\000\
-\\012\000\200\000\019\000\197\000\020\000\196\000\021\000\195\000\
-\\022\000\194\000\026\000\243\002\027\000\243\002\037\000\005\001\
-\\059\000\210\002\060\000\210\002\000\000\
-\\001\000\004\000\243\000\000\000\
-\\001\000\004\000\008\001\000\000\
-\\001\000\004\000\193\001\000\000\
-\\001\000\004\000\201\001\000\000\
+\\001\000\001\000\052\002\002\000\052\002\004\000\069\002\005\000\052\002\
+\\006\000\052\002\009\000\052\002\010\000\052\002\011\000\052\002\
+\\012\000\052\002\019\000\052\002\020\000\052\002\021\000\052\002\
+\\022\000\052\002\026\000\052\002\027\000\052\002\037\000\052\002\
+\\059\000\052\002\060\000\052\002\000\000\
+\\001\000\001\000\055\002\002\000\055\002\004\000\070\002\005\000\055\002\
+\\006\000\055\002\009\000\055\002\010\000\055\002\011\000\055\002\
+\\012\000\055\002\019\000\055\002\020\000\055\002\021\000\055\002\
+\\022\000\055\002\026\000\055\002\027\000\055\002\037\000\055\002\
+\\059\000\055\002\060\000\055\002\000\000\
+\\001\000\001\000\219\002\005\000\219\002\006\000\234\002\010\000\219\002\
+\\011\000\219\002\012\000\219\002\019\000\219\002\020\000\234\002\
+\\021\000\219\002\022\000\219\002\026\000\219\002\027\000\219\002\
+\\037\000\219\002\000\000\
+\\001\000\001\000\222\002\005\000\222\002\006\000\245\002\010\000\222\002\
+\\011\000\222\002\012\000\222\002\019\000\222\002\020\000\245\002\
+\\021\000\222\002\022\000\222\002\026\000\222\002\027\000\222\002\
+\\037\000\222\002\000\000\
+\\001\000\001\000\229\002\005\000\229\002\006\000\236\002\010\000\229\002\
+\\011\000\229\002\012\000\229\002\019\000\229\002\020\000\236\002\
+\\021\000\229\002\022\000\229\002\026\000\229\002\027\000\229\002\
+\\037\000\229\002\000\000\
+\\001\000\001\000\239\002\004\000\130\002\005\000\239\002\006\000\239\002\
+\\010\000\239\002\011\000\239\002\012\000\239\002\016\000\222\000\
+\\019\000\239\002\020\000\239\002\021\000\239\002\022\000\239\002\
+\\027\000\239\002\037\000\239\002\000\000\
+\\001\000\001\000\252\002\004\000\131\002\005\000\252\002\006\000\252\002\
+\\010\000\252\002\011\000\252\002\012\000\252\002\016\000\217\000\
+\\019\000\252\002\020\000\252\002\021\000\252\002\022\000\252\002\
+\\027\000\252\002\037\000\252\002\000\000\
+\\001\000\001\000\211\000\003\000\210\000\006\000\209\000\007\000\124\000\
+\\010\000\208\000\011\000\207\000\012\000\206\000\013\000\035\000\
+\\015\000\205\000\016\000\204\000\019\000\203\000\020\000\202\000\
+\\021\000\201\000\022\000\200\000\025\000\121\000\028\000\120\000\
+\\037\000\199\000\044\000\101\000\045\000\100\000\046\000\034\000\
+\\047\000\033\000\049\000\032\000\050\000\099\000\051\000\031\000\
+\\053\000\098\000\055\000\198\000\056\000\197\000\057\000\196\000\
+\\058\000\195\000\062\000\194\000\063\000\193\000\064\000\097\000\
+\\065\000\096\000\068\000\030\000\069\000\029\000\070\000\028\000\
+\\071\000\027\000\072\000\192\000\073\000\095\000\074\000\191\000\
+\\076\000\094\000\077\000\093\000\000\000\
+\\001\000\001\000\211\000\003\000\210\000\006\000\209\000\007\000\124\000\
+\\010\000\208\000\011\000\207\000\012\000\206\000\013\000\035\000\
+\\016\000\033\001\019\000\203\000\020\000\202\000\021\000\201\000\
+\\022\000\200\000\025\000\121\000\026\000\032\001\028\000\120\000\
+\\037\000\199\000\044\000\101\000\045\000\100\000\046\000\034\000\
+\\047\000\033\000\049\000\032\000\050\000\099\000\051\000\031\000\
+\\053\000\098\000\055\000\198\000\056\000\197\000\057\000\196\000\
+\\058\000\195\000\062\000\194\000\063\000\193\000\064\000\097\000\
+\\065\000\096\000\068\000\030\000\069\000\029\000\070\000\028\000\
+\\071\000\027\000\072\000\192\000\073\000\095\000\074\000\191\000\
+\\076\000\094\000\077\000\093\000\000\000\
+\\001\000\001\000\211\000\003\000\210\000\006\000\209\000\007\000\124\000\
+\\010\000\208\000\011\000\207\000\012\000\206\000\013\000\035\000\
+\\016\000\033\001\019\000\203\000\020\000\202\000\021\000\201\000\
+\\022\000\200\000\025\000\121\000\028\000\120\000\037\000\199\000\
+\\044\000\101\000\045\000\100\000\046\000\034\000\047\000\033\000\
+\\049\000\032\000\050\000\099\000\051\000\031\000\053\000\098\000\
+\\055\000\198\000\056\000\197\000\057\000\196\000\058\000\195\000\
+\\062\000\194\000\063\000\193\000\064\000\097\000\065\000\096\000\
+\\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
+\\072\000\192\000\073\000\095\000\074\000\191\000\076\000\094\000\
+\\077\000\093\000\000\000\
+\\001\000\001\000\211\000\003\000\210\000\006\000\209\000\007\000\124\000\
+\\010\000\208\000\011\000\207\000\012\000\206\000\013\000\035\000\
+\\016\000\110\001\019\000\203\000\020\000\202\000\021\000\201\000\
+\\022\000\200\000\025\000\121\000\028\000\120\000\037\000\199\000\
+\\044\000\101\000\045\000\100\000\046\000\034\000\047\000\033\000\
+\\049\000\032\000\050\000\099\000\051\000\031\000\053\000\098\000\
+\\055\000\198\000\056\000\197\000\057\000\196\000\058\000\195\000\
+\\062\000\194\000\063\000\193\000\064\000\097\000\065\000\096\000\
+\\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
+\\072\000\192\000\073\000\095\000\074\000\191\000\076\000\094\000\
+\\077\000\093\000\000\000\
+\\001\000\001\000\015\001\002\000\014\001\005\000\034\002\006\000\209\000\
+\\009\000\073\002\010\000\208\000\011\000\207\000\012\000\206\000\
+\\019\000\203\000\020\000\202\000\021\000\201\000\022\000\200\000\
+\\026\000\034\002\027\000\034\002\037\000\013\001\059\000\073\002\
+\\060\000\073\002\000\000\
+\\001\000\003\000\210\000\007\000\124\000\025\000\121\000\055\000\198\000\
+\\056\000\197\000\062\000\194\000\063\000\193\000\000\000\
+\\001\000\004\000\250\000\000\000\
+\\001\000\004\000\016\001\000\000\
 \\001\000\004\000\205\001\000\000\
-\\001\000\004\000\211\001\000\000\
-\\001\000\004\000\216\001\000\000\
-\\001\000\005\000\152\002\009\000\150\002\027\000\152\002\000\000\
+\\001\000\004\000\217\001\000\000\
+\\001\000\004\000\224\001\000\000\
+\\001\000\004\000\255\001\000\000\
+\\001\000\005\000\132\002\009\000\139\002\027\000\132\002\000\000\
 \\001\000\005\000\041\000\000\000\
 \\001\000\005\000\042\000\000\000\
 \\001\000\005\000\043\000\000\000\
@@ -1494,200 +1582,201 @@
 \\001\000\005\000\055\000\000\000\
 \\001\000\005\000\056\000\000\000\
 \\001\000\005\000\057\000\000\000\
-\\001\000\005\000\147\001\000\000\
-\\001\000\005\000\161\001\000\000\
-\\001\000\005\000\174\001\000\000\
-\\001\000\005\000\226\001\000\000\
-\\001\000\005\000\232\001\000\000\
-\\001\000\005\000\235\001\000\000\
-\\001\000\006\000\204\000\000\000\
-\\001\000\006\000\204\000\020\000\196\000\000\000\
-\\001\000\007\000\119\000\008\000\146\000\013\000\035\000\015\000\145\000\
-\\016\000\144\000\025\000\116\000\028\000\115\000\044\000\096\000\
-\\045\000\095\000\046\000\034\000\047\000\033\000\049\000\032\000\
-\\050\000\094\000\051\000\031\000\053\000\093\000\064\000\092\000\
-\\065\000\091\000\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\072\000\143\000\073\000\090\000\000\000\
-\\001\000\007\000\119\000\008\000\146\000\013\000\035\000\016\000\247\000\
-\\025\000\116\000\026\000\254\000\028\000\115\000\044\000\096\000\
-\\045\000\095\000\046\000\034\000\047\000\033\000\049\000\032\000\
-\\050\000\094\000\051\000\031\000\053\000\093\000\064\000\092\000\
-\\065\000\091\000\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\072\000\143\000\073\000\090\000\000\000\
-\\001\000\007\000\119\000\008\000\146\000\013\000\035\000\016\000\247\000\
-\\025\000\116\000\028\000\115\000\044\000\096\000\045\000\095\000\
-\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\094\000\
-\\051\000\031\000\053\000\093\000\064\000\092\000\065\000\091\000\
+\\001\000\005\000\158\001\000\000\
+\\001\000\005\000\159\001\000\000\
+\\001\000\005\000\160\001\000\000\
+\\001\000\005\000\177\001\000\000\
+\\001\000\005\000\178\001\000\000\
+\\001\000\005\000\179\001\000\000\
+\\001\000\005\000\187\001\000\000\
+\\001\000\005\000\188\001\000\000\
+\\001\000\005\000\238\001\000\000\
+\\001\000\005\000\249\001\000\000\
+\\001\000\005\000\252\001\000\000\
+\\001\000\006\000\209\000\000\000\
+\\001\000\006\000\209\000\020\000\202\000\000\000\
+\\001\000\007\000\124\000\013\000\035\000\015\000\123\000\016\000\122\000\
+\\025\000\121\000\028\000\120\000\044\000\101\000\045\000\100\000\
+\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\099\000\
+\\051\000\031\000\053\000\098\000\064\000\097\000\065\000\096\000\
 \\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
-\\072\000\143\000\073\000\090\000\000\000\
-\\001\000\007\000\119\000\013\000\035\000\015\000\118\000\016\000\117\000\
-\\025\000\116\000\028\000\115\000\044\000\096\000\045\000\095\000\
-\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\094\000\
-\\051\000\031\000\053\000\093\000\064\000\092\000\065\000\091\000\
+\\073\000\095\000\076\000\094\000\077\000\093\000\000\000\
+\\001\000\007\000\124\000\013\000\035\000\015\000\151\000\016\000\150\000\
+\\025\000\121\000\028\000\120\000\044\000\101\000\045\000\100\000\
+\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\099\000\
+\\051\000\031\000\053\000\098\000\064\000\097\000\065\000\096\000\
 \\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
-\\073\000\090\000\000\000\
-\\001\000\007\000\119\000\013\000\035\000\016\000\231\000\025\000\116\000\
-\\026\000\236\000\028\000\115\000\044\000\096\000\045\000\095\000\
-\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\094\000\
-\\051\000\031\000\053\000\093\000\064\000\092\000\065\000\091\000\
+\\072\000\149\000\073\000\095\000\074\000\148\000\075\000\147\000\
+\\076\000\094\000\077\000\093\000\000\000\
+\\001\000\007\000\124\000\013\000\035\000\016\000\238\000\025\000\121\000\
+\\026\000\243\000\028\000\120\000\044\000\101\000\045\000\100\000\
+\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\099\000\
+\\051\000\031\000\053\000\098\000\064\000\097\000\065\000\096\000\
+\\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
+\\073\000\095\000\076\000\094\000\077\000\093\000\000\000\
+\\001\000\007\000\124\000\013\000\035\000\016\000\238\000\025\000\121\000\
+\\028\000\120\000\044\000\101\000\045\000\100\000\046\000\034\000\
+\\047\000\033\000\049\000\032\000\050\000\099\000\051\000\031\000\
+\\053\000\098\000\064\000\097\000\065\000\096\000\068\000\030\000\
+\\069\000\029\000\070\000\028\000\071\000\027\000\073\000\095\000\
+\\076\000\094\000\077\000\093\000\000\000\
+\\001\000\007\000\124\000\013\000\035\000\016\000\254\000\025\000\121\000\
+\\026\000\007\001\028\000\120\000\044\000\101\000\045\000\100\000\
+\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\099\000\
+\\051\000\031\000\053\000\098\000\064\000\097\000\065\000\096\000\
 \\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
-\\073\000\090\000\000\000\
-\\001\000\007\000\119\000\013\000\035\000\016\000\231\000\025\000\116\000\
-\\028\000\115\000\044\000\096\000\045\000\095\000\046\000\034\000\
-\\047\000\033\000\049\000\032\000\050\000\094\000\051\000\031\000\
-\\053\000\093\000\064\000\092\000\065\000\091\000\068\000\030\000\
-\\069\000\029\000\070\000\028\000\071\000\027\000\073\000\090\000\000\000\
-\\001\000\008\000\166\001\067\000\165\001\000\000\
-\\001\000\008\000\176\001\000\000\
-\\001\000\009\000\151\002\027\000\145\002\060\000\145\002\000\000\
-\\001\000\009\000\011\001\059\000\010\001\060\000\009\001\000\000\
-\\001\000\009\000\153\001\000\000\
-\\001\000\013\000\035\000\015\000\042\001\026\000\142\001\039\000\041\001\
-\\040\000\040\001\041\000\039\001\042\000\038\001\043\000\037\001\
-\\044\000\096\000\045\000\095\000\046\000\034\000\047\000\033\000\
-\\049\000\032\000\050\000\094\000\051\000\031\000\053\000\036\001\
+\\072\000\149\000\073\000\095\000\074\000\148\000\075\000\147\000\
+\\076\000\094\000\077\000\093\000\000\000\
+\\001\000\007\000\124\000\013\000\035\000\016\000\254\000\025\000\121\000\
+\\028\000\120\000\044\000\101\000\045\000\100\000\046\000\034\000\
+\\047\000\033\000\049\000\032\000\050\000\099\000\051\000\031\000\
+\\053\000\098\000\064\000\097\000\065\000\096\000\068\000\030\000\
+\\069\000\029\000\070\000\028\000\071\000\027\000\072\000\149\000\
+\\073\000\095\000\074\000\148\000\075\000\147\000\076\000\094\000\
+\\077\000\093\000\000\000\
+\\001\000\007\000\124\000\025\000\121\000\000\000\
+\\001\000\009\000\140\002\027\000\151\002\060\000\151\002\000\000\
+\\001\000\009\000\019\001\059\000\018\001\060\000\017\001\000\000\
+\\001\000\009\000\166\001\000\000\
+\\001\000\013\000\035\000\015\000\050\001\026\000\153\001\039\000\049\001\
+\\040\000\048\001\041\000\047\001\042\000\046\001\043\000\045\001\
+\\044\000\101\000\045\000\100\000\046\000\034\000\047\000\033\000\
+\\049\000\032\000\050\000\099\000\051\000\031\000\053\000\044\001\
 \\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\000\000\
-\\001\000\013\000\035\000\015\000\042\001\039\000\041\001\040\000\040\001\
-\\041\000\039\001\042\000\038\001\043\000\037\001\044\000\096\000\
-\\045\000\095\000\046\000\034\000\047\000\033\000\049\000\032\000\
-\\050\000\094\000\051\000\031\000\053\000\036\001\068\000\030\000\
+\\001\000\013\000\035\000\015\000\050\001\039\000\049\001\040\000\048\001\
+\\041\000\047\001\042\000\046\001\043\000\045\001\044\000\101\000\
+\\045\000\100\000\046\000\034\000\047\000\033\000\049\000\032\000\
+\\050\000\099\000\051\000\031\000\053\000\044\001\068\000\030\000\
 \\069\000\029\000\070\000\028\000\071\000\027\000\000\000\
-\\001\000\013\000\035\000\016\000\098\000\028\000\097\000\044\000\096\000\
-\\045\000\095\000\046\000\034\000\047\000\033\000\049\000\032\000\
-\\050\000\094\000\051\000\031\000\053\000\093\000\064\000\092\000\
-\\065\000\091\000\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\073\000\090\000\000\000\
-\\001\000\013\000\035\000\016\000\078\001\049\000\032\000\051\000\031\000\
-\\064\000\077\001\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\000\000\
-\\001\000\013\000\035\000\016\000\157\001\049\000\032\000\051\000\031\000\
-\\064\000\077\001\068\000\030\000\069\000\029\000\070\000\028\000\
-\\071\000\027\000\000\000\
-\\001\000\013\000\035\000\028\000\097\000\044\000\096\000\045\000\095\000\
-\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\094\000\
-\\051\000\031\000\053\000\093\000\064\000\092\000\065\000\091\000\
+\\001\000\013\000\035\000\016\000\103\000\028\000\102\000\044\000\101\000\
+\\045\000\100\000\046\000\034\000\047\000\033\000\049\000\032\000\
+\\050\000\099\000\051\000\031\000\053\000\098\000\064\000\097\000\
+\\065\000\096\000\068\000\030\000\069\000\029\000\070\000\028\000\
+\\071\000\027\000\073\000\095\000\076\000\094\000\077\000\093\000\000\000\
+\\001\000\013\000\035\000\016\000\093\001\049\000\032\000\050\000\099\000\
+\\051\000\031\000\063\000\092\001\064\000\097\000\068\000\030\000\
+\\069\000\029\000\070\000\028\000\071\000\027\000\000\000\
+\\001\000\013\000\035\000\016\000\173\001\049\000\032\000\050\000\099\000\
+\\051\000\031\000\063\000\092\001\064\000\097\000\068\000\030\000\
+\\069\000\029\000\070\000\028\000\071\000\027\000\000\000\
+\\001\000\013\000\035\000\016\000\005\002\049\000\032\000\050\000\099\000\
+\\051\000\031\000\064\000\097\000\068\000\030\000\069\000\029\000\
+\\070\000\028\000\071\000\027\000\000\000\
+\\001\000\013\000\035\000\016\000\010\002\049\000\032\000\050\000\099\000\
+\\051\000\031\000\064\000\097\000\068\000\030\000\069\000\029\000\
+\\070\000\028\000\071\000\027\000\000\000\
+\\001\000\013\000\035\000\016\000\012\002\049\000\032\000\050\000\099\000\
+\\051\000\031\000\064\000\097\000\068\000\030\000\069\000\029\000\
+\\070\000\028\000\071\000\027\000\000\000\
+\\001\000\013\000\035\000\028\000\102\000\044\000\101\000\045\000\100\000\
+\\046\000\034\000\047\000\033\000\049\000\032\000\050\000\099\000\
+\\051\000\031\000\053\000\098\000\064\000\097\000\065\000\096\000\
 \\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\
-\\073\000\090\000\000\000\
-\\001\000\013\000\035\000\044\000\096\000\045\000\095\000\046\000\034\000\
-\\047\000\033\000\049\000\032\000\050\000\094\000\051\000\031\000\
-\\053\000\093\000\064\000\092\000\065\000\091\000\068\000\030\000\
-\\069\000\029\000\070\000\028\000\071\000\027\000\073\000\090\000\000\000\
+\\073\000\095\000\076\000\094\000\077\000\093\000\000\000\
+\\001\000\013\000\035\000\044\000\101\000\045\000\100\000\046\000\034\000\
+\\047\000\033\000\049\000\032\000\050\000\099\000\051\000\031\000\
+\\053\000\098\000\064\000\097\000\065\000\096\000\068\000\030\000\
+\\069\000\029\000\070\000\028\000\071\000\027\000\073\000\095\000\
+\\076\000\094\000\077\000\093\000\000\000\
 \\001\000\013\000\035\000\046\000\034\000\047\000\033\000\049\000\032\000\
 \\051\000\031\000\068\000\030\000\069\000\029\000\070\000\028\000\
 \\071\000\027\000\000\000\
-\\001\000\013\000\035\000\049\000\032\000\051\000\031\000\064\000\077\001\
-\\068\000\030\000\069\000\029\000\070\000\028\000\071\000\027\000\000\000\
+\\001\000\013\000\035\000\049\000\032\000\050\000\099\000\051\000\031\000\
+\\064\000\097\000\068\000\030\000\069\000\029\000\070\000\028\000\
+\\071\000\027\000\000\000\
 \\001\000\013\000\035\000\049\000\032\000\051\000\031\000\068\000\030\000\
 \\069\000\029\000\070\000\028\000\071\000\027\000\000\000\
 \\001\000\015\000\053\000\000\000\
-\\001\000\015\000\118\000\000\000\
-\\001\000\015\000\145\000\000\000\
-\\001\000\015\000\199\000\000\000\
-\\001\000\015\000\229\000\000\000\
-\\001\000\015\000\245\000\000\000\
-\\001\000\015\000\255\000\000\000\
-\\001\000\015\000\015\001\000\000\
-\\001\000\015\000\025\001\000\000\
-\\001\000\015\000\042\001\000\000\
+\\001\000\015\000\123\000\000\000\
+\\001\000\015\000\151\000\000\000\
+\\001\000\015\000\205\000\000\000\
+\\001\000\015\000\236\000\000\000\
+\\001\000\015\000\252\000\000\000\
+\\001\000\015\000\023\001\000\000\
+\\001\000\015\000\050\001\000\000\
+\\001\000\015\000\168\001\000\000\
 \\001\000\016\000\018\000\000\000\
 \\001\000\016\000\019\000\000\000\
 \\001\000\016\000\020\000\000\000\
 \\001\000\016\000\021\000\000\000\
 \\001\000\016\000\023\000\000\000\
-\\001\000\016\000\218\000\000\000\
-\\001\000\016\000\248\000\000\000\
-\\001\000\016\000\018\001\000\000\
-\\001\000\016\000\093\001\050\000\094\000\000\000\
-\\001\000\016\000\129\001\050\000\094\000\000\000\
-\\001\000\016\000\135\001\000\000\
-\\001\000\016\000\136\001\000\000\
-\\001\000\016\000\137\001\000\000\
-\\001\000\016\000\138\001\000\000\
-\\001\000\016\000\139\001\000\000\
+\\001\000\016\000\223\000\000\000\
+\\001\000\016\000\224\000\000\000\
+\\001\000\016\000\225\000\000\000\
+\\001\000\016\000\255\000\000\000\
+\\001\000\016\000\000\001\000\000\
+\\001\000\016\000\001\001\000\000\
+\\001\000\016\000\026\001\000\000\
+\\001\000\016\000\027\001\000\000\
+\\001\000\016\000\146\001\000\000\
+\\001\000\016\000\147\001\000\000\
+\\001\000\016\000\148\001\000\000\
+\\001\000\016\000\149\001\000\000\
+\\001\000\016\000\150\001\000\000\
 \\001\000\023\000\058\000\000\000\
-\\001\000\023\000\130\001\000\000\
-\\001\000\023\000\148\001\000\000\
-\\001\000\023\000\152\001\000\000\
-\\001\000\023\000\168\001\000\000\
-\\001\000\026\000\207\000\000\000\
-\\001\000\026\000\064\001\000\000\
-\\001\000\026\000\089\001\000\000\
-\\001\000\026\000\125\001\000\000\
-\\001\000\026\000\149\001\000\000\
-\\001\000\026\000\158\001\000\000\
-\\001\000\026\000\163\001\000\000\
-\\001\000\026\000\170\001\000\000\
-\\001\000\026\000\177\001\000\000\
-\\001\000\026\000\190\001\000\000\
+\\001\000\023\000\141\001\000\000\
+\\001\000\023\000\161\001\000\000\
+\\001\000\023\000\165\001\000\000\
+\\001\000\023\000\181\001\000\000\
+\\001\000\026\000\212\000\000\000\
+\\001\000\026\000\076\001\000\000\
+\\001\000\026\000\106\001\000\000\
+\\001\000\026\000\140\001\000\000\
+\\001\000\026\000\162\001\000\000\
+\\001\000\026\000\174\001\000\000\
+\\001\000\026\000\183\001\000\000\
+\\001\000\026\000\200\001\000\000\
+\\001\000\026\000\242\001\000\000\
 \\001\000\027\000\052\000\000\000\
-\\001\000\027\000\027\001\000\000\
-\\001\000\027\000\051\001\037\000\211\000\000\000\
-\\001\000\027\000\052\001\000\000\
-\\001\000\027\000\061\001\000\000\
-\\001\000\027\000\062\001\000\000\
-\\001\000\027\000\065\001\000\000\
-\\001\000\027\000\085\001\000\000\
-\\001\000\027\000\086\001\000\000\
-\\001\000\027\000\087\001\000\000\
-\\001\000\027\000\094\001\000\000\
-\\001\000\027\000\122\001\000\000\
-\\001\000\027\000\123\001\000\000\
-\\001\000\027\000\143\001\000\000\
-\\001\000\027\000\145\001\000\000\
-\\001\000\027\000\146\001\000\000\
-\\001\000\027\000\173\001\000\000\
-\\001\000\027\000\197\001\000\000\
-\\001\000\027\000\199\001\060\000\198\001\000\000\
-\\001\000\027\000\209\001\000\000\
-\\001\000\027\000\210\001\000\000\
-\\001\000\027\000\218\001\000\000\
-\\001\000\027\000\219\001\000\000\
-\\001\000\027\000\220\001\000\000\
-\\001\000\027\000\221\001\000\000\
-\\001\000\027\000\222\001\000\000\
+\\001\000\027\000\035\001\000\000\
+\\001\000\027\000\063\001\037\000\216\000\000\000\
+\\001\000\027\000\064\001\000\000\
+\\001\000\027\000\073\001\000\000\
+\\001\000\027\000\074\001\000\000\
+\\001\000\027\000\077\001\000\000\
+\\001\000\027\000\102\001\000\000\
+\\001\000\027\000\103\001\000\000\
+\\001\000\027\000\104\001\000\000\
+\\001\000\027\000\107\001\000\000\
+\\001\000\027\000\137\001\000\000\
+\\001\000\027\000\138\001\000\000\
+\\001\000\027\000\154\001\000\000\
+\\001\000\027\000\156\001\000\000\
+\\001\000\027\000\157\001\000\000\
+\\001\000\027\000\186\001\000\000\
+\\001\000\027\000\211\001\000\000\
+\\001\000\027\000\213\001\000\000\
+\\001\000\027\000\215\001\060\000\214\001\000\000\
 \\001\000\027\000\223\001\000\000\
-\\001\000\027\000\224\001\000\000\
-\\001\000\027\000\230\001\060\000\198\001\000\000\
+\\001\000\027\000\229\001\000\000\
+\\001\000\027\000\230\001\000\000\
+\\001\000\027\000\231\001\000\000\
+\\001\000\027\000\232\001\000\000\
+\\001\000\027\000\233\001\000\000\
+\\001\000\027\000\234\001\000\000\
+\\001\000\027\000\236\001\000\000\
+\\001\000\027\000\237\001\000\000\
 \\001\000\027\000\240\001\000\000\
-\\001\000\027\000\241\001\000\000\
-\\001\000\027\000\242\001\000\000\
+\\001\000\027\000\245\001\060\000\214\001\000\000\
+\\001\000\027\000\247\001\000\000\
+\\001\000\027\000\248\001\000\000\
+\\001\000\027\000\251\001\000\000\
+\\001\000\027\000\002\002\000\000\
+\\001\000\027\000\006\002\000\000\
+\\001\000\027\000\007\002\000\000\
+\\001\000\027\000\011\002\000\000\
 \\001\000\038\000\000\000\000\000\
 \\001\000\049\000\040\000\000\000\
-\\001\000\050\000\094\000\000\000\
+\\001\000\050\000\099\000\000\000\
 \\001\000\051\000\048\000\000\000\
-\\001\000\061\000\228\000\000\000\
-\\001\000\061\000\244\000\000\000\
-\\001\000\061\000\014\001\000\000\
-\\244\001\000\000\
-\\245\001\005\000\210\000\000\000\
-\\246\001\000\000\
-\\247\001\005\000\134\001\000\000\
-\\248\001\000\000\
-\\249\001\000\000\
-\\250\001\000\000\
-\\251\001\000\000\
-\\252\001\005\000\189\001\000\000\
-\\253\001\004\000\131\001\000\000\
-\\254\001\000\000\
-\\255\001\000\000\
-\\000\002\000\000\
-\\001\002\000\000\
-\\002\002\000\000\
-\\003\002\000\000\
-\\004\002\000\000\
-\\005\002\000\000\
-\\006\002\000\000\
-\\007\002\000\000\
-\\008\002\000\000\
-\\009\002\016\000\132\001\000\000\
-\\010\002\000\000\
-\\011\002\000\000\
-\\012\002\000\000\
-\\013\002\000\000\
+\\001\000\061\000\235\000\000\000\
+\\001\000\061\000\251\000\000\000\
+\\001\000\061\000\022\001\000\000\
 \\014\002\000\000\
 \\015\002\000\000\
 \\016\002\000\000\
-\\017\002\000\000\
+\\017\002\013\000\016\000\052\000\015\000\068\000\014\000\069\000\013\000\
+\\070\000\012\000\071\000\011\000\000\000\
 \\018\002\000\000\
 \\019\002\000\000\
 \\020\002\000\000\
@@ -1696,27 +1785,25 @@
 \\023\002\000\000\
 \\024\002\000\000\
 \\025\002\000\000\
+\\026\002\000\000\
 \\027\002\000\000\
 \\028\002\000\000\
-\\029\002\005\000\144\001\000\000\
+\\029\002\005\000\215\000\000\000\
 \\030\002\000\000\
 \\031\002\000\000\
-\\032\002\016\000\212\000\000\000\
+\\032\002\000\000\
 \\033\002\000\000\
-\\034\002\000\000\
 \\035\002\000\000\
-\\036\002\016\000\213\000\000\000\
+\\036\002\000\000\
 \\037\002\000\000\
 \\038\002\000\000\
 \\039\002\000\000\
 \\040\002\000\000\
-\\041\002\000\000\
-\\042\002\000\000\
-\\043\002\000\000\
+\\041\002\037\000\009\001\000\000\
+\\042\002\001\000\010\001\000\000\
+\\043\002\002\000\011\001\000\000\
 \\044\002\000\000\
-\\044\002\016\000\217\000\000\000\
 \\045\002\000\000\
-\\045\002\066\000\017\001\000\000\
 \\046\002\000\000\
 \\047\002\000\000\
 \\048\002\000\000\
@@ -1725,25 +1812,28 @@
 \\051\002\000\000\
 \\052\002\000\000\
 \\053\002\000\000\
+\\054\002\000\000\
 \\055\002\000\000\
 \\056\002\000\000\
-\\057\002\000\000\
+\\057\002\005\000\184\001\000\000\
 \\058\002\000\000\
+\\059\002\000\000\
+\\060\002\004\000\185\001\000\000\
+\\061\002\000\000\
 \\062\002\000\000\
 \\063\002\000\000\
+\\064\002\000\000\
 \\065\002\000\000\
 \\066\002\000\000\
 \\067\002\000\000\
 \\068\002\000\000\
-\\069\002\000\000\
-\\070\002\000\000\
 \\071\002\000\000\
 \\072\002\000\000\
 \\073\002\000\000\
 \\074\002\000\000\
-\\075\002\000\000\
-\\076\002\000\000\
-\\077\002\000\000\
+\\075\002\060\000\020\001\000\000\
+\\076\002\059\000\021\001\000\000\
+\\077\002\009\000\019\001\000\000\
 \\078\002\000\000\
 \\079\002\000\000\
 \\080\002\000\000\
@@ -1753,21 +1843,22 @@
 \\084\002\000\000\
 \\085\002\000\000\
 \\086\002\000\000\
-\\087\002\000\000\
+\\087\002\005\000\139\001\000\000\
 \\088\002\000\000\
 \\089\002\000\000\
 \\090\002\000\000\
 \\091\002\000\000\
 \\092\002\000\000\
-\\093\002\016\000\016\001\000\000\
+\\093\002\001\000\249\000\010\000\208\000\011\000\207\000\012\000\206\000\
+\\019\000\203\000\021\000\201\000\022\000\200\000\037\000\248\000\000\000\
 \\094\002\000\000\
 \\095\002\000\000\
 \\096\002\000\000\
-\\097\002\000\000\
-\\098\002\000\000\
+\\097\002\037\000\245\000\000\000\
+\\098\002\001\000\246\000\000\000\
 \\099\002\000\000\
-\\100\002\037\000\211\000\000\000\
-\\101\002\005\000\063\001\000\000\
+\\100\002\000\000\
+\\101\002\000\000\
 \\102\002\000\000\
 \\103\002\000\000\
 \\104\002\000\000\
@@ -1776,10 +1867,10 @@
 \\107\002\000\000\
 \\108\002\000\000\
 \\109\002\000\000\
-\\110\002\005\000\150\001\000\000\
+\\110\002\005\000\175\001\000\000\
 \\111\002\000\000\
 \\112\002\000\000\
-\\113\002\000\000\
+\\113\002\004\000\176\001\000\000\
 \\114\002\000\000\
 \\115\002\000\000\
 \\116\002\000\000\
@@ -1787,79 +1878,80 @@
 \\118\002\000\000\
 \\119\002\000\000\
 \\120\002\000\000\
-\\121\002\037\000\223\000\000\000\
-\\122\002\001\000\224\000\000\000\
+\\121\002\000\000\
+\\122\002\000\000\
 \\123\002\000\000\
 \\124\002\000\000\
 \\125\002\000\000\
 \\126\002\000\000\
-\\127\002\001\000\227\000\010\000\202\000\011\000\201\000\012\000\200\000\
-\\019\000\197\000\021\000\195\000\022\000\194\000\037\000\226\000\000\000\
+\\127\002\005\000\105\001\000\000\
 \\128\002\000\000\
 \\129\002\000\000\
-\\130\002\000\000\
-\\131\002\000\000\
-\\132\002\000\000\
-\\133\002\005\000\088\001\000\000\
+\\133\002\000\000\
 \\134\002\000\000\
 \\135\002\000\000\
 \\136\002\000\000\
 \\137\002\000\000\
 \\138\002\000\000\
 \\139\002\000\000\
-\\140\002\005\000\164\001\000\000\
-\\141\002\000\000\
+\\139\002\060\000\212\001\000\000\
+\\140\002\000\000\
+\\141\002\016\000\167\001\000\000\
 \\142\002\000\000\
 \\143\002\000\000\
 \\144\002\000\000\
+\\145\002\005\000\241\001\000\000\
 \\146\002\000\000\
 \\147\002\000\000\
 \\148\002\000\000\
 \\149\002\000\000\
-\\150\002\060\000\196\001\000\000\
-\\151\002\000\000\
+\\150\002\000\000\
+\\152\002\000\000\
 \\153\002\000\000\
+\\154\002\000\000\
+\\155\002\001\000\234\000\010\000\208\000\011\000\207\000\012\000\206\000\
+\\019\000\203\000\021\000\201\000\022\000\200\000\037\000\233\000\000\000\
 \\156\002\000\000\
 \\157\002\000\000\
 \\158\002\000\000\
-\\159\002\000\000\
-\\160\002\000\000\
+\\159\002\037\000\230\000\000\000\
+\\160\002\001\000\231\000\000\000\
 \\161\002\000\000\
-\\162\002\004\000\160\001\000\000\
-\\163\002\005\000\159\001\000\000\
+\\162\002\000\000\
+\\163\002\000\000\
 \\164\002\000\000\
 \\165\002\000\000\
 \\166\002\000\000\
 \\167\002\000\000\
 \\168\002\000\000\
 \\169\002\000\000\
+\\170\002\005\000\163\001\000\000\
 \\171\002\000\000\
 \\172\002\000\000\
 \\173\002\000\000\
 \\174\002\000\000\
 \\175\002\000\000\
 \\176\002\000\000\
-\\177\002\037\000\238\000\000\000\
-\\178\002\001\000\239\000\000\000\
+\\177\002\000\000\
+\\178\002\005\000\075\001\000\000\
 \\179\002\000\000\
 \\180\002\000\000\
-\\181\002\000\000\
+\\181\002\037\000\216\000\000\000\
 \\182\002\000\000\
-\\183\002\001\000\242\000\010\000\202\000\011\000\201\000\012\000\200\000\
-\\019\000\197\000\021\000\195\000\022\000\194\000\037\000\241\000\000\000\
+\\183\002\000\000\
 \\184\002\000\000\
 \\185\002\000\000\
 \\186\002\000\000\
 \\187\002\000\000\
 \\188\002\000\000\
-\\189\002\005\000\124\001\000\000\
+\\189\002\016\000\024\001\000\000\
 \\190\002\000\000\
 \\191\002\000\000\
 \\192\002\000\000\
 \\193\002\000\000\
 \\194\002\000\000\
 \\195\002\000\000\
-\\196\002\005\000\178\001\000\000\
+\\196\002\000\000\
 \\197\002\000\000\
 \\198\002\000\000\
 \\199\002\000\000\
@@ -1868,216 +1960,254 @@
 \\202\002\000\000\
 \\203\002\000\000\
 \\204\002\000\000\
-\\205\002\009\000\011\001\000\000\
+\\205\002\000\000\
 \\206\002\000\000\
 \\207\002\000\000\
-\\208\002\060\000\012\001\000\000\
-\\209\002\059\000\013\001\000\000\
+\\208\002\000\000\
+\\209\002\000\000\
 \\210\002\000\000\
 \\211\002\000\000\
 \\212\002\000\000\
-\\215\002\000\000\
+\\213\002\000\000\
+\\214\002\000\000\
 \\216\002\000\000\
 \\217\002\000\000\
 \\218\002\000\000\
-\\219\002\004\000\172\001\000\000\
-\\220\002\005\000\171\001\000\000\
+\\220\002\000\000\
 \\221\002\000\000\
-\\222\002\000\000\
-\\223\002\000\000\
-\\224\002\000\000\
 \\225\002\000\000\
 \\226\002\000\000\
 \\227\002\000\000\
 \\228\002\000\000\
-\\229\002\000\000\
 \\230\002\000\000\
 \\231\002\000\000\
 \\232\002\000\000\
 \\233\002\000\000\
 \\234\002\000\000\
-\\235\002\037\000\001\001\000\000\
-\\236\002\001\000\002\001\000\000\
-\\237\002\002\000\003\001\000\000\
+\\235\002\000\000\
+\\236\002\000\000\
+\\237\002\000\000\
+\\237\002\066\000\025\001\000\000\
 \\238\002\000\000\
 \\239\002\000\000\
+\\239\002\016\000\222\000\000\000\
 \\240\002\000\000\
 \\241\002\000\000\
 \\242\002\000\000\
+\\243\002\000\000\
 \\244\002\000\000\
 \\245\002\000\000\
 \\246\002\000\000\
 \\247\002\000\000\
-\\248\002\000\000\
+\\248\002\016\000\218\000\000\000\
 \\249\002\000\000\
 \\250\002\000\000\
 \\251\002\000\000\
-\\252\002\000\000\
+\\252\002\016\000\217\000\000\000\
 \\253\002\000\000\
 \\254\002\000\000\
-\\255\002\000\000\
+\\255\002\005\000\155\001\000\000\
 \\000\003\000\000\
 \\001\003\000\000\
 \\002\003\000\000\
-\\003\003\005\000\046\000\000\000\
+\\003\003\000\000\
 \\004\003\000\000\
-\\005\003\005\000\208\000\000\000\
+\\005\003\005\000\145\001\000\000\
 \\006\003\000\000\
 \\007\003\000\000\
 \\008\003\000\000\
-\\009\003\000\000\
+\\009\003\005\000\046\000\000\000\
 \\010\003\000\000\
-\\011\003\000\000\
-\\012\003\013\000\016\000\052\000\015\000\068\000\014\000\069\000\013\000\
-\\070\000\012\000\071\000\011\000\000\000\
+\\011\003\005\000\213\000\000\000\
+\\012\003\004\000\142\001\000\000\
 \\013\003\000\000\
+\\014\003\000\000\
+\\015\003\016\000\143\001\000\000\
+\\016\003\000\000\
+\\017\003\000\000\
+\\018\003\000\000\
+\\019\003\000\000\
+\\020\003\000\000\
+\\021\003\000\000\
+\\022\003\000\000\
+\\023\003\000\000\
+\\024\003\000\000\
+\\025\003\000\000\
+\\026\003\000\000\
+\\027\003\000\000\
+\\028\003\000\000\
+\\029\003\000\000\
+\\030\003\005\000\199\001\000\000\
+\\031\003\000\000\
+\\032\003\000\000\
+\\033\003\000\000\
+\\034\003\000\000\
+\\035\003\000\000\
+\\036\003\000\000\
+\\037\003\000\000\
+\\038\003\000\000\
+\\039\003\000\000\
+\\040\003\000\000\
+\\041\003\000\000\
+\\042\003\000\000\
+\\043\003\000\000\
+\\044\003\000\000\
+\\045\003\000\000\
+\\046\003\000\000\
+\\047\003\000\000\
 \"
 val actionRowNumbers =
-"\149\001\150\001\149\001\146\001\
-\\145\001\137\001\136\001\135\001\
-\\134\001\068\000\069\000\070\000\
-\\071\000\149\001\072\000\147\001\
-\\055\000\055\000\055\000\055\000\
-\\148\001\131\000\144\001\143\001\
-\\021\000\154\000\153\000\152\000\
-\\151\000\149\000\150\000\167\000\
-\\168\000\155\000\022\000\023\000\
-\\024\000\140\001\169\000\133\000\
-\\133\000\133\000\133\000\098\000\
-\\058\000\025\000\129\001\026\000\
-\\027\000\028\000\083\000\055\000\
-\\050\000\040\000\037\000\008\000\
-\\138\001\088\000\142\001\138\000\
-\\245\000\242\000\241\000\239\000\
-\\210\000\211\000\208\000\209\000\
-\\212\000\203\000\201\000\004\000\
-\\194\000\198\000\190\000\191\000\
-\\003\000\185\000\002\000\181\000\
-\\180\000\184\000\036\000\193\000\
-\\164\000\188\000\202\000\176\000\
-\\073\000\179\000\183\000\189\000\
-\\156\000\166\000\165\000\054\000\
-\\053\000\138\000\017\001\015\001\
-\\013\001\014\001\010\001\011\001\
-\\016\001\002\001\003\001\018\001\
-\\134\000\254\000\062\000\042\000\
-\\004\001\252\000\222\000\040\000\
-\\041\000\221\000\138\000\068\001\
-\\066\001\064\001\065\001\061\001\
-\\062\001\067\001\051\001\052\001\
-\\069\001\013\000\054\001\055\001\
-\\070\001\135\000\044\001\063\000\
-\\039\000\053\001\000\000\001\000\
-\\005\000\074\000\037\000\038\000\
-\\064\000\138\000\127\001\124\001\
-\\121\001\122\001\117\001\118\001\
-\\119\001\012\000\105\001\106\001\
-\\125\001\014\000\126\001\046\000\
-\\123\001\091\001\092\001\093\001\
-\\006\000\108\001\109\001\128\001\
-\\136\000\084\001\065\000\236\000\
-\\238\000\229\000\228\000\237\000\
-\\223\000\227\000\226\000\197\000\
-\\195\000\187\000\199\000\083\001\
-\\075\000\231\000\232\000\225\000\
-\\224\000\234\000\233\000\213\000\
-\\219\000\218\000\205\000\220\000\
-\\008\000\009\000\216\000\215\000\
-\\217\000\066\000\204\000\230\000\
-\\214\000\139\001\055\000\099\000\
-\\049\000\053\000\054\000\054\000\
-\\054\000\054\000\206\000\054\000\
-\\039\000\240\000\035\000\100\000\
-\\101\000\042\000\042\000\042\000\
-\\042\000\042\000\059\000\132\000\
-\\253\000\042\000\102\000\103\000\
-\\246\000\089\000\248\000\104\000\
-\\039\000\039\000\039\000\039\000\
-\\039\000\051\000\060\000\132\000\
-\\043\001\039\000\039\000\105\000\
-\\106\000\107\000\022\001\090\000\
-\\019\001\076\000\108\000\011\000\
-\\011\000\011\000\011\000\011\000\
-\\011\000\011\000\010\000\011\000\
-\\011\000\011\000\011\000\011\000\
-\\061\000\132\000\010\000\057\000\
-\\010\000\109\000\110\000\073\001\
-\\091\000\071\001\010\000\077\000\
-\\141\001\084\000\159\000\163\000\
-\\161\000\160\000\146\000\158\000\
-\\140\000\148\000\162\000\078\000\
-\\079\000\080\000\081\000\082\000\
-\\048\000\243\000\111\000\177\000\
-\\112\000\207\000\235\000\113\000\
-\\029\000\244\000\085\000\009\001\
-\\007\001\012\001\008\001\006\001\
-\\250\000\092\000\255\000\005\001\
-\\251\000\042\000\249\000\086\000\
-\\060\001\058\001\063\001\059\001\
-\\057\001\041\001\047\000\020\000\
-\\040\001\037\001\036\001\175\000\
-\\052\000\023\001\093\000\048\001\
-\\046\001\047\001\030\000\056\001\
-\\042\001\024\001\039\000\020\001\
-\\094\000\029\001\043\000\076\000\
-\\087\000\116\001\107\001\010\000\
-\\114\001\112\001\120\001\115\001\
-\\111\001\113\001\095\001\097\001\
-\\094\001\087\001\085\001\089\001\
-\\090\001\088\001\086\001\075\001\
-\\095\000\102\001\100\001\101\001\
-\\114\000\096\001\192\000\031\000\
-\\007\000\076\001\010\000\072\001\
-\\044\000\096\000\080\001\077\000\
-\\133\001\049\000\049\000\137\000\
-\\067\000\037\000\054\000\050\000\
-\\040\000\008\000\145\000\097\000\
-\\143\000\182\000\054\000\186\000\
-\\196\000\054\000\132\001\015\000\
-\\132\000\247\000\131\001\056\000\
-\\038\001\115\000\116\000\052\000\
-\\016\000\132\000\056\000\039\000\
-\\021\001\017\000\076\000\054\000\
-\\039\000\117\000\130\001\118\000\
-\\018\000\132\000\010\000\098\001\
-\\010\000\074\001\010\000\019\000\
-\\077\000\119\000\147\000\120\000\
-\\139\000\141\000\121\000\122\000\
-\\123\000\124\000\125\000\049\000\
-\\142\000\178\000\032\000\042\000\
-\\000\001\034\001\056\000\035\001\
-\\056\000\039\001\126\000\039\000\
-\\049\001\045\001\033\000\039\000\
-\\030\001\027\001\026\001\028\001\
-\\110\001\011\000\103\001\099\001\
-\\034\000\078\001\011\000\081\001\
-\\079\001\157\000\171\000\174\000\
-\\173\000\172\000\170\000\144\000\
-\\054\000\001\001\032\001\033\001\
-\\045\000\050\001\039\000\031\001\
-\\104\001\010\000\082\001\127\000\
-\\128\000\129\000\200\000\025\001\
-\\077\001\130\000"
+"\153\000\150\000\153\000\155\000\
+\\154\000\156\000\157\000\158\000\
+\\159\000\073\000\074\000\075\000\
+\\076\000\153\000\077\000\151\000\
+\\061\000\061\000\061\000\061\000\
+\\152\000\144\000\158\001\157\001\
+\\020\000\164\001\163\001\162\001\
+\\161\001\159\001\160\001\168\001\
+\\169\001\165\001\021\000\022\000\
+\\023\000\135\001\173\001\146\000\
+\\146\000\146\000\146\000\105\000\
+\\064\000\024\000\166\000\025\000\
+\\026\000\027\000\091\000\061\000\
+\\053\000\041\000\042\000\007\000\
+\\133\001\096\000\137\001\123\001\
+\\119\001\101\001\165\000\055\001\
+\\056\001\060\001\058\001\089\001\
+\\090\001\092\001\093\001\091\001\
+\\100\001\098\001\002\000\105\001\
+\\103\001\111\001\112\001\003\000\
+\\116\001\004\000\120\001\122\001\
+\\118\001\040\000\109\001\170\001\
+\\113\001\099\001\110\001\078\000\
+\\079\000\080\000\167\001\166\001\
+\\114\001\124\001\172\001\171\001\
+\\060\000\059\000\165\000\026\001\
+\\028\001\030\001\031\001\033\001\
+\\034\001\029\001\039\001\040\001\
+\\027\001\147\000\047\001\068\000\
+\\044\000\041\001\087\001\078\001\
+\\041\000\043\000\077\001\240\000\
+\\165\000\222\000\225\000\227\000\
+\\228\000\230\000\231\000\226\000\
+\\236\000\237\000\223\000\013\000\
+\\239\000\224\000\148\000\249\000\
+\\069\000\046\000\238\000\006\000\
+\\005\000\081\000\082\000\083\000\
+\\042\000\045\000\165\000\167\000\
+\\169\000\172\000\173\000\176\000\
+\\177\000\178\000\011\000\185\000\
+\\186\000\170\000\014\000\171\000\
+\\049\000\174\000\207\000\208\000\
+\\209\000\000\000\189\000\188\000\
+\\168\000\149\000\199\000\070\000\
+\\061\001\063\001\065\001\073\001\
+\\062\001\074\001\072\001\071\001\
+\\102\001\106\001\115\001\104\001\
+\\198\000\084\000\085\000\067\001\
+\\068\001\076\001\075\001\070\001\
+\\069\001\085\001\083\001\082\001\
+\\097\001\084\001\007\000\008\000\
+\\080\001\079\001\081\001\096\001\
+\\066\001\086\001\134\001\061\000\
+\\106\000\052\000\059\000\060\000\
+\\060\000\060\000\060\000\095\001\
+\\060\000\047\000\047\000\046\000\
+\\059\001\039\000\107\000\108\000\
+\\044\000\044\000\044\000\044\000\
+\\044\000\065\000\145\000\046\001\
+\\044\000\109\000\110\000\052\001\
+\\097\000\050\001\111\000\046\000\
+\\046\000\046\000\046\000\046\000\
+\\054\000\066\000\145\000\248\000\
+\\046\000\047\000\047\000\046\000\
+\\112\000\113\000\114\000\004\001\
+\\098\000\001\001\115\000\010\000\
+\\010\000\010\000\010\000\010\000\
+\\010\000\010\000\009\000\010\000\
+\\010\000\010\000\010\000\010\000\
+\\067\000\145\000\009\000\063\000\
+\\012\000\009\000\116\000\117\000\
+\\220\000\099\000\218\000\009\000\
+\\136\001\092\000\142\001\146\001\
+\\144\001\143\001\138\001\141\001\
+\\131\001\140\001\145\001\086\000\
+\\087\000\088\000\089\000\090\000\
+\\051\000\057\001\118\000\125\001\
+\\119\000\094\001\064\001\120\000\
+\\028\000\253\000\029\000\254\000\
+\\030\000\054\001\093\000\036\001\
+\\038\001\032\001\035\001\037\001\
+\\048\001\100\000\044\001\042\001\
+\\049\001\044\000\051\001\094\000\
+\\233\000\235\000\229\000\232\000\
+\\234\000\088\001\008\001\005\001\
+\\050\000\019\000\007\001\017\001\
+\\019\001\016\001\072\000\055\000\
+\\255\000\101\000\243\000\245\000\
+\\246\000\031\000\032\000\033\000\
+\\241\000\006\001\000\001\046\000\
+\\002\001\095\000\180\000\187\000\
+\\009\000\182\000\184\000\175\000\
+\\179\000\183\000\181\000\205\000\
+\\203\000\206\000\212\000\214\000\
+\\210\000\211\000\213\000\215\000\
+\\216\000\102\000\192\000\194\000\
+\\195\000\121\000\204\000\108\001\
+\\034\000\202\000\035\000\001\000\
+\\217\000\009\000\219\000\163\000\
+\\052\000\052\000\164\000\071\000\
+\\042\000\060\000\053\000\041\000\
+\\007\000\156\001\103\000\154\001\
+\\121\001\060\000\117\001\107\001\
+\\060\000\060\000\060\000\162\000\
+\\015\000\145\000\053\001\161\000\
+\\062\000\062\000\145\000\122\000\
+\\014\001\123\000\124\000\055\000\
+\\016\000\145\000\062\000\042\000\
+\\042\000\046\000\003\001\160\000\
+\\125\000\017\000\145\000\009\000\
+\\197\000\007\000\009\000\221\000\
+\\139\001\126\000\130\001\132\001\
+\\127\000\128\000\129\000\130\000\
+\\131\000\052\000\153\001\126\001\
+\\132\000\133\000\036\000\044\000\
+\\045\001\022\001\134\000\020\001\
+\\104\000\010\001\062\000\023\001\
+\\062\000\015\001\135\000\046\000\
+\\244\000\247\000\136\000\137\000\
+\\037\000\190\000\010\000\193\000\
+\\196\000\138\000\038\000\147\001\
+\\149\001\152\001\151\001\150\001\
+\\148\001\155\001\129\001\128\001\
+\\060\000\043\001\018\001\062\000\
+\\018\000\024\001\025\001\048\000\
+\\242\000\252\000\251\000\046\000\
+\\191\000\201\000\009\000\139\000\
+\\021\001\056\000\140\000\141\000\
+\\127\001\009\001\011\001\057\000\
+\\250\000\200\000\013\001\142\000\
+\\058\000\012\001\058\000\143\000"
 val gotoT =
 "\
-\\133\000\008\000\134\000\007\000\135\000\006\000\136\000\005\000\
-\\137\000\004\000\138\000\003\000\139\000\002\000\140\000\001\000\
-\\141\000\241\001\000\000\
-\\000\000\
-\\133\000\008\000\134\000\007\000\135\000\006\000\136\000\005\000\
-\\137\000\004\000\138\000\003\000\139\000\002\000\140\000\015\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\133\000\008\000\134\000\007\000\135\000\006\000\136\000\005\000\
-\\137\000\004\000\138\000\003\000\139\000\002\000\140\000\020\000\000\000\
+\\128\000\008\000\129\000\007\000\130\000\006\000\131\000\005\000\
+\\132\000\004\000\133\000\003\000\134\000\002\000\135\000\001\000\
+\\136\000\011\002\000\000\
+\\000\000\
+\\128\000\008\000\129\000\007\000\130\000\006\000\131\000\005\000\
+\\132\000\004\000\133\000\003\000\134\000\002\000\135\000\015\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\128\000\008\000\129\000\007\000\130\000\006\000\131\000\005\000\
+\\132\000\004\000\133\000\003\000\134\000\002\000\135\000\020\000\000\000\
 \\000\000\
 \\000\000\
 \\002\000\024\000\009\000\023\000\014\000\022\000\000\000\
@@ -2103,10 +2233,10 @@
 \\000\000\
 \\004\000\043\000\000\000\
 \\000\000\
-\\132\000\045\000\000\000\
-\\132\000\047\000\000\000\
-\\132\000\048\000\000\000\
-\\132\000\049\000\000\000\
+\\127\000\045\000\000\000\
+\\127\000\047\000\000\000\
+\\127\000\048\000\000\000\
+\\127\000\049\000\000\000\
 \\000\000\
 \\000\000\
 \\000\000\
@@ -2116,1094 +2246,1191 @@
 \\000\000\
 \\000\000\
 \\002\000\058\000\003\000\057\000\009\000\023\000\014\000\022\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\063\000\055\000\062\000\057\000\061\000\058\000\060\000\
-\\059\000\059\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\061\000\108\000\062\000\107\000\063\000\106\000\065\000\105\000\
-\\066\000\104\000\067\000\103\000\068\000\102\000\069\000\101\000\
-\\070\000\100\000\071\000\099\000\072\000\098\000\073\000\097\000\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\139\000\020\000\082\000\022\000\081\000\023\000\138\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\074\000\133\000\076\000\132\000\077\000\131\000\080\000\130\000\
-\\086\000\129\000\087\000\128\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\119\000\100\000\118\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\101\000\168\000\103\000\167\000\104\000\166\000\
-\\107\000\165\000\108\000\164\000\109\000\163\000\110\000\162\000\
-\\111\000\161\000\112\000\160\000\113\000\159\000\115\000\158\000\
-\\116\000\157\000\117\000\156\000\118\000\155\000\122\000\154\000\
-\\123\000\153\000\124\000\152\000\125\000\151\000\126\000\150\000\
-\\127\000\149\000\128\000\148\000\129\000\147\000\130\000\146\000\
-\\131\000\145\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\001\000\207\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\036\000\214\000\037\000\213\000\038\000\212\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\218\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\217\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\063\000\055\000\062\000\057\000\061\000\058\000\219\000\000\000\
-\\001\000\220\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\050\000\223\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\228\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\061\000\108\000\062\000\231\000\063\000\106\000\065\000\105\000\
-\\066\000\104\000\067\000\103\000\068\000\102\000\069\000\101\000\
-\\070\000\100\000\071\000\099\000\072\000\230\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\060\000\233\000\063\000\106\000\065\000\105\000\066\000\104\000\
-\\067\000\103\000\068\000\102\000\069\000\101\000\070\000\100\000\
-\\071\000\099\000\072\000\232\000\000\000\
-\\000\000\
-\\001\000\235\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\050\000\238\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\244\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\139\000\020\000\082\000\022\000\081\000\023\000\138\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\074\000\133\000\076\000\249\000\077\000\131\000\080\000\130\000\
-\\086\000\129\000\087\000\248\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\247\000\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\075\000\251\000\077\000\131\000\080\000\130\000\088\000\127\000\
-\\092\000\126\000\093\000\125\000\094\000\124\000\095\000\123\000\
-\\096\000\122\000\097\000\121\000\098\000\120\000\099\000\250\000\000\000\
-\\000\000\
-\\001\000\254\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\036\000\178\000\037\000\177\000\050\000\174\000\053\000\002\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\101\000\168\000\103\000\018\001\104\000\166\000\
-\\107\000\165\000\108\000\164\000\109\000\163\000\110\000\162\000\
-\\111\000\161\000\112\000\160\000\113\000\159\000\115\000\158\000\
-\\116\000\157\000\117\000\156\000\118\000\155\000\122\000\154\000\
-\\123\000\153\000\124\000\152\000\125\000\151\000\126\000\150\000\
-\\127\000\149\000\128\000\148\000\129\000\147\000\130\000\017\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\102\000\020\001\104\000\166\000\107\000\165\000\
-\\108\000\164\000\109\000\163\000\110\000\162\000\111\000\161\000\
-\\112\000\160\000\113\000\159\000\115\000\158\000\116\000\157\000\
-\\117\000\156\000\118\000\155\000\122\000\154\000\123\000\153\000\
-\\124\000\152\000\125\000\151\000\126\000\150\000\127\000\149\000\
-\\128\000\148\000\129\000\147\000\130\000\019\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\002\000\058\000\003\000\024\001\009\000\023\000\014\000\022\000\000\000\
-\\000\000\
-\\006\000\033\001\008\000\032\001\009\000\031\001\010\000\030\001\
-\\011\000\029\001\012\000\028\001\013\000\027\001\014\000\084\000\
-\\016\000\026\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\063\000\055\000\062\000\057\000\041\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\043\001\021\000\042\001\022\000\081\000\
-\\023\000\080\000\024\000\079\000\025\000\182\000\026\000\077\000\
-\\027\000\181\000\028\000\075\000\029\000\074\000\030\000\073\000\
-\\031\000\072\000\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\043\001\021\000\044\001\022\000\081\000\
-\\023\000\080\000\024\000\079\000\025\000\182\000\026\000\077\000\
-\\027\000\181\000\028\000\075\000\029\000\074\000\030\000\073\000\
-\\031\000\072\000\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\045\001\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\046\001\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\043\001\021\000\047\001\022\000\081\000\
-\\023\000\080\000\024\000\079\000\025\000\182\000\026\000\077\000\
-\\027\000\181\000\028\000\075\000\029\000\074\000\030\000\073\000\
-\\031\000\072\000\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\048\001\000\000\
-\\000\000\
-\\036\000\214\000\038\000\212\000\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\051\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\052\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\053\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\054\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\055\001\000\000\
-\\061\000\056\001\000\000\
-\\011\000\058\001\064\000\057\001\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\104\000\067\000\103\000\
-\\068\000\102\000\069\000\101\000\070\000\100\000\071\000\099\000\
-\\072\000\230\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\064\001\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\065\001\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\066\001\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\067\001\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\068\001\000\000\
-\\009\000\074\001\047\000\073\001\082\000\072\001\083\000\071\001\
-\\084\000\070\001\085\000\069\001\000\000\
-\\074\000\077\001\000\000\
-\\011\000\081\001\089\000\080\001\090\000\079\001\091\000\078\001\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\247\000\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\082\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\011\000\090\001\078\000\089\001\079\000\088\001\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\093\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\096\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\097\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\098\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\099\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\100\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\101\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\114\000\103\001\115\000\158\000\116\000\157\000\
-\\117\000\156\000\118\000\155\000\122\000\154\000\123\000\153\000\
-\\124\000\152\000\125\000\151\000\126\000\150\000\127\000\149\000\
-\\128\000\148\000\129\000\147\000\130\000\102\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\113\000\105\001\118\000\155\000\122\000\154\000\123\000\104\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\113\000\106\001\118\000\155\000\122\000\154\000\123\000\104\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\111\000\108\001\113\000\107\001\118\000\155\000\122\000\154\000\
-\\123\000\104\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\113\000\109\001\118\000\155\000\122\000\154\000\123\000\104\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\113\000\110\001\118\000\155\000\122\000\154\000\123\000\104\001\000\000\
-\\101\000\111\001\000\000\
-\\011\000\115\001\119\000\114\001\120\000\113\001\121\000\112\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\116\001\000\000\
-\\009\000\087\000\019\000\118\001\031\000\117\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\119\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\017\001\000\000\
-\\011\000\115\001\105\000\126\001\106\000\125\001\119\000\114\001\
-\\120\000\124\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\005\000\131\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\006\000\033\001\007\000\139\001\008\000\138\001\009\000\031\001\
-\\010\000\030\001\011\000\029\001\012\000\028\001\013\000\027\001\
-\\014\000\084\000\016\000\026\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\060\000\149\001\063\000\106\000\065\000\105\000\066\000\104\000\
-\\067\000\103\000\068\000\102\000\069\000\101\000\070\000\100\000\
-\\071\000\099\000\072\000\232\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\074\001\047\000\073\001\081\000\154\001\082\000\153\001\
-\\083\000\152\001\084\000\070\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\075\000\160\001\077\000\131\000\080\000\130\000\088\000\127\000\
-\\092\000\126\000\093\000\125\000\094\000\124\000\095\000\123\000\
-\\096\000\122\000\097\000\121\000\098\000\120\000\099\000\250\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\011\000\090\001\078\000\165\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\167\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\102\000\173\001\104\000\166\000\107\000\165\000\
-\\108\000\164\000\109\000\163\000\110\000\162\000\111\000\161\000\
-\\112\000\160\000\113\000\159\000\115\000\158\000\116\000\157\000\
-\\117\000\156\000\118\000\155\000\122\000\154\000\123\000\153\000\
-\\124\000\152\000\125\000\151\000\126\000\150\000\127\000\149\000\
-\\128\000\148\000\129\000\147\000\130\000\019\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\011\000\115\001\105\000\177\001\119\000\114\001\120\000\124\001\000\000\
-\\000\000\
-\\006\000\033\001\008\000\178\001\009\000\031\001\010\000\030\001\
-\\011\000\029\001\012\000\028\001\013\000\027\001\014\000\084\000\
-\\016\000\026\001\000\000\
-\\006\000\033\001\007\000\179\001\008\000\138\001\009\000\031\001\
-\\010\000\030\001\011\000\029\001\012\000\028\001\013\000\027\001\
-\\014\000\084\000\016\000\026\001\000\000\
-\\000\000\
-\\006\000\181\001\017\000\180\001\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\139\000\020\000\082\000\022\000\081\000\023\000\138\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\074\000\133\000\076\000\132\000\077\000\131\000\080\000\130\000\
-\\086\000\129\000\087\000\128\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\119\000\100\000\182\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\001\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\063\000\055\000\062\000\057\000\061\000\058\000\060\000\
-\\059\000\184\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\061\000\108\000\062\000\107\000\063\000\106\000\065\000\105\000\
-\\066\000\104\000\067\000\103\000\068\000\102\000\069\000\101\000\
-\\070\000\100\000\071\000\099\000\072\000\098\000\073\000\185\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\101\000\168\000\103\000\167\000\104\000\166\000\
-\\107\000\165\000\108\000\164\000\109\000\163\000\110\000\162\000\
-\\111\000\161\000\112\000\160\000\113\000\159\000\115\000\158\000\
-\\116\000\157\000\117\000\156\000\118\000\155\000\122\000\154\000\
-\\123\000\153\000\124\000\152\000\125\000\151\000\126\000\150\000\
-\\127\000\149\000\128\000\148\000\129\000\147\000\130\000\146\000\
-\\131\000\186\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\043\001\021\000\189\001\022\000\081\000\
-\\023\000\080\000\024\000\079\000\025\000\182\000\026\000\077\000\
-\\027\000\181\000\028\000\075\000\029\000\074\000\030\000\073\000\
-\\031\000\072\000\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\190\001\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\000\000\
-\\000\000\
-\\011\000\058\001\064\000\192\001\000\000\
-\\000\000\
-\\000\000\
-\\009\000\074\001\047\000\073\001\083\000\193\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\074\001\047\000\073\001\081\000\198\001\082\000\153\001\
-\\083\000\152\001\084\000\070\001\000\000\
-\\000\000\
-\\011\000\081\001\089\000\080\001\090\000\079\001\091\000\200\001\000\000\
-\\009\000\074\001\047\000\073\001\083\000\201\001\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\202\001\000\000\
-\\000\000\
-\\000\000\
-\\011\000\090\001\078\000\089\001\079\000\204\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\205\001\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\206\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\011\000\115\001\119\000\114\001\120\000\113\001\121\000\210\001\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\114\000\211\001\115\000\158\000\116\000\157\000\
-\\117\000\156\000\118\000\155\000\122\000\154\000\123\000\153\000\
-\\124\000\152\000\125\000\151\000\126\000\150\000\127\000\149\000\
-\\128\000\148\000\129\000\147\000\130\000\102\001\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\212\001\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\213\001\000\000\
-\\000\000\
-\\011\000\115\001\105\000\126\001\106\000\215\001\119\000\114\001\
-\\120\000\124\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\006\000\033\001\007\000\223\001\008\000\138\001\009\000\031\001\
-\\010\000\030\001\011\000\029\001\012\000\028\001\013\000\027\001\
-\\014\000\084\000\016\000\026\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\112\000\046\000\111\000\051\000\110\000\055\000\109\000\
-\\063\000\106\000\065\000\105\000\066\000\225\001\000\000\
-\\000\000\
-\\000\000\
-\\009\000\074\001\047\000\073\001\083\000\226\001\000\000\
-\\000\000\
-\\009\000\074\001\047\000\073\001\083\000\227\001\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\229\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\231\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\232\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\094\001\
-\\118\000\155\000\122\000\154\000\123\000\234\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\235\001\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\140\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\082\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\078\000\026\000\077\000\027\000\076\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\072\000\
-\\032\000\071\000\033\000\070\000\034\000\069\000\035\000\068\000\
-\\039\000\067\000\042\000\066\000\043\000\065\000\044\000\064\000\
-\\045\000\137\000\046\000\136\000\051\000\135\000\055\000\134\000\
-\\077\000\131\000\080\000\130\000\088\000\127\000\092\000\126\000\
-\\093\000\125\000\094\000\124\000\095\000\123\000\096\000\122\000\
-\\097\000\121\000\098\000\120\000\099\000\236\001\000\000\
-\\000\000\
-\\000\000\
-\\009\000\087\000\011\000\086\000\012\000\085\000\014\000\084\000\
-\\019\000\083\000\020\000\183\000\022\000\081\000\023\000\080\000\
-\\024\000\079\000\025\000\182\000\026\000\077\000\027\000\181\000\
-\\028\000\075\000\029\000\074\000\030\000\073\000\031\000\180\000\
-\\032\000\179\000\033\000\070\000\034\000\069\000\036\000\178\000\
-\\037\000\177\000\046\000\176\000\049\000\175\000\050\000\174\000\
-\\051\000\173\000\052\000\172\000\053\000\171\000\054\000\170\000\
-\\056\000\169\000\104\000\166\000\107\000\165\000\108\000\164\000\
-\\109\000\163\000\110\000\162\000\111\000\161\000\112\000\160\000\
-\\113\000\159\000\115\000\158\000\116\000\157\000\117\000\156\000\
-\\118\000\155\000\122\000\154\000\123\000\153\000\124\000\152\000\
-\\125\000\151\000\126\000\150\000\127\000\149\000\128\000\148\000\
-\\129\000\147\000\130\000\237\001\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
-\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\066\000\055\000\065\000\057\000\064\000\058\000\063\000\
+\\059\000\062\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\061\000\113\000\062\000\112\000\063\000\111\000\065\000\110\000\
+\\066\000\109\000\067\000\108\000\068\000\107\000\069\000\106\000\
+\\070\000\105\000\071\000\104\000\072\000\103\000\073\000\102\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\144\000\020\000\085\000\022\000\084\000\023\000\143\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\074\000\138\000\076\000\137\000\077\000\136\000\083\000\135\000\
+\\084\000\134\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\125\000\097\000\124\000\138\000\123\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\098\000\173\000\100\000\172\000\101\000\171\000\
+\\102\000\170\000\103\000\169\000\104\000\168\000\105\000\167\000\
+\\106\000\166\000\107\000\165\000\108\000\164\000\110\000\163\000\
+\\111\000\162\000\112\000\161\000\113\000\160\000\117\000\159\000\
+\\118\000\158\000\119\000\157\000\120\000\156\000\121\000\155\000\
+\\122\000\154\000\123\000\153\000\124\000\152\000\125\000\151\000\
+\\126\000\150\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\001\000\212\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\036\000\219\000\037\000\218\000\038\000\217\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\225\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\224\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\066\000\055\000\065\000\057\000\064\000\058\000\226\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\001\000\227\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\050\000\230\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\235\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\061\000\113\000\062\000\238\000\063\000\111\000\065\000\110\000\
+\\066\000\109\000\067\000\108\000\068\000\107\000\069\000\106\000\
+\\070\000\105\000\071\000\104\000\072\000\237\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\060\000\240\000\063\000\111\000\065\000\110\000\066\000\109\000\
+\\067\000\108\000\068\000\107\000\069\000\106\000\070\000\105\000\
+\\071\000\104\000\072\000\239\000\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\001\000\242\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\050\000\245\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\251\000\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\144\000\020\000\085\000\022\000\084\000\023\000\143\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\074\000\138\000\076\000\002\001\077\000\136\000\083\000\135\000\
+\\084\000\001\001\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\000\001\138\000\123\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\075\000\004\001\077\000\136\000\085\000\133\000\089\000\132\000\
+\\090\000\131\000\091\000\130\000\092\000\129\000\093\000\128\000\
+\\094\000\127\000\095\000\126\000\096\000\003\001\138\000\123\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\001\000\006\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\036\000\183\000\037\000\182\000\050\000\179\000\053\000\010\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\098\000\173\000\100\000\027\001\101\000\171\000\
+\\102\000\170\000\103\000\169\000\104\000\168\000\105\000\167\000\
+\\106\000\166\000\107\000\165\000\108\000\164\000\110\000\163\000\
+\\111\000\162\000\112\000\161\000\113\000\160\000\117\000\159\000\
+\\118\000\158\000\119\000\157\000\120\000\156\000\121\000\155\000\
+\\122\000\154\000\123\000\153\000\124\000\152\000\125\000\026\001\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\099\000\029\001\101\000\171\000\102\000\170\000\
+\\103\000\169\000\104\000\168\000\105\000\167\000\106\000\166\000\
+\\107\000\165\000\108\000\164\000\110\000\163\000\111\000\162\000\
+\\112\000\161\000\113\000\160\000\117\000\159\000\118\000\158\000\
+\\119\000\157\000\120\000\156\000\121\000\155\000\122\000\154\000\
+\\123\000\153\000\124\000\152\000\125\000\028\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\002\000\058\000\003\000\032\001\009\000\023\000\014\000\022\000\000\000\
+\\000\000\
+\\006\000\041\001\008\000\040\001\009\000\039\001\010\000\038\001\
+\\011\000\037\001\012\000\036\001\013\000\035\001\014\000\087\000\
+\\016\000\034\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\066\000\055\000\065\000\057\000\049\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\051\001\021\000\050\001\022\000\084\000\
+\\023\000\083\000\024\000\082\000\025\000\187\000\026\000\080\000\
+\\027\000\186\000\028\000\078\000\029\000\077\000\030\000\076\000\
+\\031\000\075\000\032\000\184\000\033\000\073\000\034\000\072\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\051\001\021\000\052\001\022\000\084\000\
+\\023\000\083\000\024\000\082\000\025\000\187\000\026\000\080\000\
+\\027\000\186\000\028\000\078\000\029\000\077\000\030\000\076\000\
+\\031\000\075\000\032\000\184\000\033\000\073\000\034\000\072\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\053\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\054\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\051\001\021\000\055\001\022\000\084\000\
+\\023\000\083\000\024\000\082\000\025\000\187\000\026\000\080\000\
+\\027\000\186\000\028\000\078\000\029\000\077\000\030\000\076\000\
+\\031\000\075\000\032\000\184\000\033\000\073\000\034\000\072\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\051\000\140\000\089\000\057\001\139\000\056\001\000\000\
+\\051\000\140\000\089\000\059\001\140\000\058\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\060\001\138\000\123\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\036\000\219\000\038\000\217\000\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\063\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\064\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\065\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\066\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\067\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\061\000\068\001\000\000\
+\\011\000\070\001\064\000\069\001\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\109\000\067\000\108\000\
+\\068\000\107\000\069\000\106\000\070\000\105\000\071\000\104\000\
+\\072\000\237\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\076\001\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\077\001\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\078\001\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\079\001\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\080\001\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\079\000\086\001\
+\\080\000\085\001\081\000\084\001\082\000\083\001\141\000\082\001\
+\\145\000\081\001\000\000\
+\\074\000\092\001\000\000\
+\\011\000\096\001\086\000\095\001\087\000\094\001\088\000\093\001\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\000\001\138\000\123\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\051\000\140\000\089\000\059\001\140\000\097\001\000\000\
+\\051\000\140\000\089\000\057\001\139\000\098\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\099\001\138\000\123\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\106\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\109\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\110\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\111\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\112\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\113\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\114\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\109\000\116\001\110\000\163\000\111\000\162\000\
+\\112\000\161\000\113\000\160\000\117\000\159\000\118\000\158\000\
+\\119\000\157\000\120\000\156\000\121\000\155\000\122\000\154\000\
+\\123\000\153\000\124\000\152\000\125\000\115\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\108\000\118\001\113\000\160\000\117\000\159\000\118\000\117\001\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\108\000\119\001\113\000\160\000\117\000\159\000\118\000\117\001\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\106\000\121\001\108\000\120\001\113\000\160\000\117\000\159\000\
+\\118\000\117\001\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\108\000\122\001\113\000\160\000\117\000\159\000\118\000\117\001\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\108\000\123\001\113\000\160\000\117\000\159\000\118\000\117\001\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\098\000\124\001\000\000\
+\\011\000\128\001\114\000\127\001\115\000\126\001\116\000\125\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\110\000\163\000\111\000\162\000\112\000\161\000\
+\\113\000\160\000\117\000\159\000\118\000\158\000\119\000\157\000\
+\\120\000\156\000\121\000\155\000\122\000\154\000\123\000\153\000\
+\\124\000\152\000\125\000\129\001\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\009\000\090\000\019\000\131\001\031\000\130\001\000\000\
+\\051\000\178\000\054\000\175\000\117\000\133\001\137\000\132\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\110\000\163\000\111\000\162\000\112\000\161\000\
+\\113\000\160\000\117\000\159\000\118\000\158\000\119\000\157\000\
+\\120\000\156\000\121\000\155\000\122\000\154\000\123\000\153\000\
+\\124\000\152\000\125\000\134\001\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\110\000\163\000\111\000\162\000\112\000\161\000\
+\\113\000\160\000\117\000\159\000\118\000\158\000\119\000\157\000\
+\\120\000\156\000\121\000\155\000\122\000\154\000\123\000\153\000\
+\\124\000\152\000\125\000\026\001\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\005\000\142\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\006\000\041\001\007\000\150\001\008\000\149\001\009\000\039\001\
+\\010\000\038\001\011\000\037\001\012\000\036\001\013\000\035\001\
+\\014\000\087\000\016\000\034\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\060\000\162\001\063\000\111\000\065\000\110\000\066\000\109\000\
+\\067\000\108\000\068\000\107\000\069\000\106\000\070\000\105\000\
+\\071\000\104\000\072\000\239\000\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\078\000\170\001\
+\\079\000\169\001\080\000\168\001\081\000\084\001\141\000\167\001\
+\\145\000\081\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\075\000\178\001\077\000\136\000\085\000\133\000\089\000\132\000\
+\\090\000\131\000\091\000\130\000\092\000\129\000\093\000\128\000\
+\\094\000\127\000\095\000\126\000\096\000\003\001\138\000\123\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\110\000\163\000\111\000\162\000\112\000\161\000\
+\\113\000\160\000\117\000\159\000\118\000\158\000\119\000\157\000\
+\\120\000\156\000\121\000\155\000\122\000\154\000\123\000\153\000\
+\\124\000\152\000\125\000\180\001\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\099\000\187\001\101\000\171\000\102\000\170\000\
+\\103\000\169\000\104\000\168\000\105\000\167\000\106\000\166\000\
+\\107\000\165\000\108\000\164\000\110\000\163\000\111\000\162\000\
+\\112\000\161\000\113\000\160\000\117\000\159\000\118\000\158\000\
+\\119\000\157\000\120\000\156\000\121\000\155\000\122\000\154\000\
+\\123\000\153\000\124\000\152\000\125\000\028\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\006\000\041\001\008\000\188\001\009\000\039\001\010\000\038\001\
+\\011\000\037\001\012\000\036\001\013\000\035\001\014\000\087\000\
+\\016\000\034\001\000\000\
+\\006\000\041\001\007\000\189\001\008\000\149\001\009\000\039\001\
+\\010\000\038\001\011\000\037\001\012\000\036\001\013\000\035\001\
+\\014\000\087\000\016\000\034\001\000\000\
+\\000\000\
+\\006\000\191\001\017\000\190\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\144\000\020\000\085\000\022\000\084\000\023\000\143\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\074\000\138\000\076\000\137\000\077\000\136\000\083\000\135\000\
+\\084\000\134\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\125\000\097\000\192\001\138\000\123\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\193\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\066\000\055\000\065\000\057\000\064\000\058\000\063\000\
+\\059\000\194\001\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\061\000\113\000\062\000\112\000\063\000\111\000\065\000\110\000\
+\\066\000\109\000\067\000\108\000\068\000\107\000\069\000\106\000\
+\\070\000\105\000\071\000\104\000\072\000\103\000\073\000\195\001\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\098\000\173\000\100\000\172\000\101\000\171\000\
+\\102\000\170\000\103\000\169\000\104\000\168\000\105\000\167\000\
+\\106\000\166\000\107\000\165\000\108\000\164\000\110\000\163\000\
+\\111\000\162\000\112\000\161\000\113\000\160\000\117\000\159\000\
+\\118\000\158\000\119\000\157\000\120\000\156\000\121\000\155\000\
+\\122\000\154\000\123\000\153\000\124\000\152\000\125\000\151\000\
+\\126\000\196\001\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\051\001\021\000\199\001\022\000\084\000\
+\\023\000\083\000\024\000\082\000\025\000\187\000\026\000\080\000\
+\\027\000\186\000\028\000\078\000\029\000\077\000\030\000\076\000\
+\\031\000\075\000\032\000\184\000\033\000\073\000\034\000\072\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\200\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\201\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\202\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\011\000\070\001\064\000\204\001\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\205\001\
+\\145\000\081\001\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\207\001\
+\\143\000\206\001\145\000\081\001\000\000\
+\\011\000\096\001\086\000\095\001\087\000\094\001\088\000\208\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\078\000\214\001\
+\\079\000\169\001\080\000\168\001\081\000\084\001\141\000\167\001\
+\\145\000\081\001\000\000\
+\\000\000\
+\\011\000\096\001\086\000\095\001\087\000\094\001\088\000\216\001\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\217\001\
+\\145\000\081\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\144\000\020\000\085\000\022\000\084\000\023\000\143\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\074\000\138\000\076\000\137\000\077\000\136\000\083\000\135\000\
+\\084\000\134\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\125\000\097\000\218\001\138\000\123\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\144\000\020\000\085\000\022\000\084\000\023\000\143\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\074\000\138\000\076\000\137\000\077\000\136\000\083\000\135\000\
+\\084\000\134\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\125\000\097\000\219\001\138\000\123\000\
+\\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\220\001\138\000\123\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\011\000\128\001\114\000\127\001\115\000\126\001\116\000\223\001\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\109\000\224\001\110\000\163\000\111\000\162\000\
+\\112\000\161\000\113\000\160\000\117\000\159\000\118\000\158\000\
+\\119\000\157\000\120\000\156\000\121\000\155\000\122\000\154\000\
+\\123\000\153\000\124\000\152\000\125\000\115\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\098\000\173\000\100\000\172\000\101\000\171\000\
+\\102\000\170\000\103\000\169\000\104\000\168\000\105\000\167\000\
+\\106\000\166\000\107\000\165\000\108\000\164\000\110\000\163\000\
+\\111\000\162\000\112\000\161\000\113\000\160\000\117\000\159\000\
+\\118\000\158\000\119\000\157\000\120\000\156\000\121\000\155\000\
+\\122\000\154\000\123\000\153\000\124\000\152\000\125\000\151\000\
+\\126\000\225\001\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\110\000\163\000\111\000\162\000\112\000\161\000\
+\\113\000\160\000\117\000\159\000\118\000\158\000\119\000\157\000\
+\\120\000\156\000\121\000\155\000\122\000\154\000\123\000\153\000\
+\\124\000\152\000\125\000\226\001\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\006\000\041\001\007\000\233\001\008\000\149\001\009\000\039\001\
+\\010\000\038\001\011\000\037\001\012\000\036\001\013\000\035\001\
+\\014\000\087\000\016\000\034\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\117\000\046\000\116\000\051\000\115\000\055\000\114\000\
+\\063\000\111\000\065\000\110\000\066\000\237\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\241\001\
+\\145\000\081\001\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\242\001\
+\\145\000\081\001\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\244\001\
+\\138\000\123\000\144\000\061\000\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\107\001\
+\\113\000\160\000\117\000\159\000\118\000\248\001\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\251\001\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\207\001\
+\\143\000\252\001\145\000\081\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\085\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\081\000\026\000\080\000\027\000\079\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\075\000\
+\\032\000\074\000\033\000\073\000\034\000\072\000\035\000\071\000\
+\\039\000\070\000\042\000\069\000\043\000\068\000\044\000\067\000\
+\\045\000\142\000\046\000\141\000\051\000\140\000\055\000\139\000\
+\\077\000\136\000\085\000\133\000\089\000\132\000\090\000\131\000\
+\\091\000\130\000\092\000\129\000\093\000\128\000\094\000\127\000\
+\\095\000\126\000\096\000\254\001\138\000\123\000\144\000\061\000\
+\\145\000\060\000\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\009\000\090\000\011\000\089\000\012\000\088\000\014\000\087\000\
+\\019\000\086\000\020\000\188\000\022\000\084\000\023\000\083\000\
+\\024\000\082\000\025\000\187\000\026\000\080\000\027\000\186\000\
+\\028\000\078\000\029\000\077\000\030\000\076\000\031\000\185\000\
+\\032\000\184\000\033\000\073\000\034\000\072\000\036\000\183\000\
+\\037\000\182\000\046\000\181\000\049\000\180\000\050\000\179\000\
+\\051\000\178\000\052\000\177\000\053\000\176\000\054\000\175\000\
+\\056\000\174\000\101\000\171\000\102\000\170\000\103\000\169\000\
+\\104\000\168\000\105\000\167\000\106\000\166\000\107\000\165\000\
+\\108\000\164\000\110\000\163\000\111\000\162\000\112\000\161\000\
+\\113\000\160\000\117\000\159\000\118\000\158\000\119\000\157\000\
+\\120\000\156\000\121\000\155\000\122\000\154\000\123\000\153\000\
+\\124\000\152\000\125\000\255\001\144\000\061\000\145\000\060\000\
+\\146\000\059\000\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\080\000\002\002\
+\\142\000\001\002\145\000\081\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\079\000\007\002\
+\\080\000\006\002\081\000\084\001\145\000\081\001\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\078\000\170\001\
+\\079\000\169\001\080\000\168\001\081\000\084\001\145\000\081\001\000\000\
+\\000\000\
+\\009\000\089\001\011\000\088\001\047\000\087\001\078\000\214\001\
+\\079\000\169\001\080\000\168\001\081\000\084\001\145\000\081\001\000\000\
 \\000\000\
 \"
-val numstates = 498
-val numrules = 282
+val numstates = 524
+val numrules = 290
 val s = Unsynchronized.ref "" and index = Unsynchronized.ref 0
 val string_to_int = fn () => 
 let val i = !index
@@ -3266,12 +3493,18 @@
 structure MlyValue = 
 struct
 datatype svalue = VOID | ntVOID of unit
- | ATOMIC_SYSTEM_WORD of  (string) | ATOMIC_DEFINED_WORD of  (string)
+ | DOLLAR_DOLLAR_WORD of  (string) | DOLLAR_WORD of  (string)
  | DISTINCT_OBJECT of  (string) | COMMENT of  (string)
  | LOWER_WORD of  (string) | UPPER_WORD of  (string)
  | SINGLE_QUOTED of  (string) | DOT_DECIMAL of  (string)
  | UNSIGNED_INTEGER of  (string) | SIGNED_INTEGER of  (string)
- | RATIONAL of  (string) | REAL of  (string) | tptp of  (tptp_problem)
+ | RATIONAL of  (string) | REAL of  (string)
+ | atomic_system_word of  (string) | atomic_defined_word of  (string)
+ | let_term of  (tptp_term) | tff_type_arguments of  (tptp_type list)
+ | tff_monotype of  (tptp_type) | tff_quantified_type of  (tptp_type)
+ | tff_let_formula_defn of  (tptp_let list)
+ | tff_let_term_defn of  (tptp_let list) | tff_let of  (tptp_formula)
+ | thf_let_defn of  (tptp_let list) | tptp of  (tptp_problem)
  | tptp_file of  (tptp_problem) | tptp_input of  (tptp_line)
  | include_ of  (tptp_line) | annotated_formula of  (tptp_line)
  | thf_annotated of  (tptp_line) | tff_annotated of  (tptp_line)
@@ -3296,8 +3529,7 @@
  | thf_unitary_type of  (tptp_type) | thf_binary_type of  (tptp_type)
  | thf_mapping_type of  (tptp_type) | thf_xprod_type of  (tptp_type)
  | thf_union_type of  (tptp_type) | thf_atom of  (tptp_formula)
- | thf_let of  (tptp_formula) | thf_let_list of  (tptp_let list)
- | thf_defined_var of  (tptp_let) | thf_conditional of  (tptp_formula)
+ | thf_let of  (tptp_formula) | thf_conditional of  (tptp_formula)
  | thf_sequent of  (tptp_formula)
  | thf_tuple_list of  (tptp_formula list)
  | thf_tuple of  (tptp_formula list) | tff_formula of  (tptp_formula)
@@ -3318,9 +3550,7 @@
  | tff_top_level_type of  (tptp_type)
  | tff_unitary_type of  (tptp_type) | tff_atomic_type of  (tptp_type)
  | tff_mapping_type of  (tptp_type) | tff_xprod_type of  (tptp_type)
- | tptp_let of  (tptp_formula) | tff_let_list of  (tptp_let list)
- | tff_defined_var of  (tptp_let) | tff_conditional of  (tptp_formula)
- | tff_sequent of  (tptp_formula)
+ | tff_conditional of  (tptp_formula) | tff_sequent of  (tptp_formula)
  | tff_tuple_list of  (tptp_formula list)
  | tff_tuple of  (tptp_formula list) | fof_formula of  (tptp_formula)
  | fof_logic_formula of  (tptp_formula)
@@ -3397,7 +3627,7 @@
   | (T 6) => "EXCLAMATION"
   | (T 7) => "LET"
   | (T 8) => "ARROW"
-  | (T 9) => "IF"
+  | (T 9) => "FI"
   | (T 10) => "IFF"
   | (T 11) => "IMPLIES"
   | (T 12) => "INCLUDE"
@@ -3451,8 +3681,8 @@
   | (T 60) => "GENTZEN_ARROW"
   | (T 61) => "DEP_SUM"
   | (T 62) => "DEP_PROD"
-  | (T 63) => "ATOMIC_DEFINED_WORD"
-  | (T 64) => "ATOMIC_SYSTEM_WORD"
+  | (T 63) => "DOLLAR_WORD"
+  | (T 64) => "DOLLAR_DOLLAR_WORD"
   | (T 65) => "SUBTYPE"
   | (T 66) => "LET_TERM"
   | (T 67) => "THF"
@@ -3461,21 +3691,26 @@
   | (T 70) => "CNF"
   | (T 71) => "ITE_F"
   | (T 72) => "ITE_T"
+  | (T 73) => "LET_TF"
+  | (T 74) => "LET_FF"
+  | (T 75) => "LET_FT"
+  | (T 76) => "LET_TT"
   | _ => "bogus-term"
 local open Header in
 val errtermvalue=
 fn _ => MlyValue.VOID
 end
 val terms : term list = nil
- $$ (T 72) $$ (T 71) $$ (T 70) $$ (T 69) $$ (T 68) $$ (T 67) $$ (T 66)
- $$ (T 65) $$ (T 62) $$ (T 61) $$ (T 60) $$ (T 59) $$ (T 58) $$ (T 57)
- $$ (T 56) $$ (T 55) $$ (T 54) $$ (T 53) $$ (T 42) $$ (T 41) $$ (T 40)
- $$ (T 39) $$ (T 38) $$ (T 37) $$ (T 36) $$ (T 35) $$ (T 34) $$ (T 33)
- $$ (T 32) $$ (T 31) $$ (T 30) $$ (T 29) $$ (T 28) $$ (T 27) $$ (T 26)
- $$ (T 25) $$ (T 24) $$ (T 23) $$ (T 22) $$ (T 21) $$ (T 20) $$ (T 19)
- $$ (T 18) $$ (T 17) $$ (T 16) $$ (T 15) $$ (T 14) $$ (T 13) $$ (T 12)
- $$ (T 11) $$ (T 10) $$ (T 9) $$ (T 8) $$ (T 7) $$ (T 6) $$ (T 5) $$ 
-(T 4) $$ (T 3) $$ (T 2) $$ (T 1) $$ (T 0)end
+ $$ (T 76) $$ (T 75) $$ (T 74) $$ (T 73) $$ (T 72) $$ (T 71) $$ (T 70)
+ $$ (T 69) $$ (T 68) $$ (T 67) $$ (T 66) $$ (T 65) $$ (T 62) $$ (T 61)
+ $$ (T 60) $$ (T 59) $$ (T 58) $$ (T 57) $$ (T 56) $$ (T 55) $$ (T 54)
+ $$ (T 53) $$ (T 42) $$ (T 41) $$ (T 40) $$ (T 39) $$ (T 38) $$ (T 37)
+ $$ (T 36) $$ (T 35) $$ (T 34) $$ (T 33) $$ (T 32) $$ (T 31) $$ (T 30)
+ $$ (T 29) $$ (T 28) $$ (T 27) $$ (T 26) $$ (T 25) $$ (T 24) $$ (T 23)
+ $$ (T 22) $$ (T 21) $$ (T 20) $$ (T 19) $$ (T 18) $$ (T 17) $$ (T 16)
+ $$ (T 15) $$ (T 14) $$ (T 13) $$ (T 12) $$ (T 11) $$ (T 10) $$ (T 9)
+ $$ (T 8) $$ (T 7) $$ (T 6) $$ (T 5) $$ (T 4) $$ (T 3) $$ (T 2) $$ (T 
+1) $$ (T 0)end
 structure Actions =
 struct 
 exception mlyAction of int
@@ -3484,292 +3719,1675 @@
 fn (i392,defaultPos,stack,
     (file_name):arg) =>
 case (i392,stack)
-of  ( 0, ( ( _, ( MlyValue.optional_info optional_info, _, 
+of  ( 0, ( ( _, ( MlyValue.tptp_file tptp_file, tptp_file1left, 
+tptp_file1right)) :: rest671)) => let val  result = MlyValue.tptp (
+( tptp_file ))
+ in ( LrTable.NT 135, ( result, tptp_file1left, tptp_file1right), 
+rest671)
+end
+|  ( 1, ( ( _, ( MlyValue.tptp_file tptp_file, _, tptp_file1right)) ::
+ ( _, ( MlyValue.tptp_input tptp_input, tptp_input1left, _)) :: 
+rest671)) => let val  result = MlyValue.tptp_file (
+( tptp_input :: tptp_file ))
+ in ( LrTable.NT 134, ( result, tptp_input1left, tptp_file1right), 
+rest671)
+end
+|  ( 2, ( ( _, ( MlyValue.tptp_file tptp_file, _, tptp_file1right)) ::
+ ( _, ( _, COMMENT1left, _)) :: rest671)) => let val  result = 
+MlyValue.tptp_file (( tptp_file ))
+ in ( LrTable.NT 134, ( result, COMMENT1left, tptp_file1right), 
+rest671)
+end
+|  ( 3, ( rest671)) => let val  result = MlyValue.tptp_file (( [] ))
+ in ( LrTable.NT 134, ( result, defaultPos, defaultPos), rest671)
+end
+|  ( 4, ( ( _, ( MlyValue.annotated_formula annotated_formula, 
+annotated_formula1left, annotated_formula1right)) :: rest671)) => let
+ val  result = MlyValue.tptp_input (( annotated_formula ))
+ in ( LrTable.NT 133, ( result, annotated_formula1left, 
+annotated_formula1right), rest671)
+end
+|  ( 5, ( ( _, ( MlyValue.include_ include_, include_1left, 
+include_1right)) :: rest671)) => let val  result = MlyValue.tptp_input
+ (( include_ ))
+ in ( LrTable.NT 133, ( result, include_1left, include_1right), 
+rest671)
+end
+|  ( 6, ( ( _, ( MlyValue.thf_annotated thf_annotated, 
+thf_annotated1left, thf_annotated1right)) :: rest671)) => let val  
+result = MlyValue.annotated_formula (( thf_annotated ))
+ in ( LrTable.NT 131, ( result, thf_annotated1left, 
+thf_annotated1right), rest671)
+end
+|  ( 7, ( ( _, ( MlyValue.tff_annotated tff_annotated, 
+tff_annotated1left, tff_annotated1right)) :: rest671)) => let val  
+result = MlyValue.annotated_formula (( tff_annotated ))
+ in ( LrTable.NT 131, ( result, tff_annotated1left, 
+tff_annotated1right), rest671)
+end
+|  ( 8, ( ( _, ( MlyValue.fof_annotated fof_annotated, 
+fof_annotated1left, fof_annotated1right)) :: rest671)) => let val  
+result = MlyValue.annotated_formula (( fof_annotated ))
+ in ( LrTable.NT 131, ( result, fof_annotated1left, 
+fof_annotated1right), rest671)
+end
+|  ( 9, ( ( _, ( MlyValue.cnf_annotated cnf_annotated, 
+cnf_annotated1left, cnf_annotated1right)) :: rest671)) => let val  
+result = MlyValue.annotated_formula (( cnf_annotated ))
+ in ( LrTable.NT 131, ( result, cnf_annotated1left, 
+cnf_annotated1right), rest671)
+end
+|  ( 10, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
+MlyValue.annotations annotations, _, _)) :: ( _, ( 
+MlyValue.thf_formula thf_formula, _, _)) :: _ :: ( _, ( 
+MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
+MlyValue.name name, _, _)) :: _ :: ( _, ( _, (THFleft as THF1left), 
+THFright)) :: rest671)) => let val  result = MlyValue.thf_annotated (
+(
+  Annotated_Formula ((file_name, THFleft + 1, THFright + 1),
+   THF, name, formula_role, thf_formula, annotations)
+)
+)
+ in ( LrTable.NT 130, ( result, THF1left, PERIOD1right), rest671)
+end
+|  ( 11, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
+MlyValue.annotations annotations, _, _)) :: ( _, ( 
+MlyValue.tff_formula tff_formula, _, _)) :: _ :: ( _, ( 
+MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
+MlyValue.name name, _, _)) :: _ :: ( _, ( _, (TFFleft as TFF1left), 
+TFFright)) :: rest671)) => let val  result = MlyValue.tff_annotated (
+(
+  Annotated_Formula ((file_name, TFFleft + 1, TFFright + 1),
+   TFF, name, formula_role, tff_formula, annotations)
+)
+)
+ in ( LrTable.NT 129, ( result, TFF1left, PERIOD1right), rest671)
+end
+|  ( 12, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
+MlyValue.annotations annotations, _, _)) :: ( _, ( 
+MlyValue.fof_formula fof_formula, _, _)) :: _ :: ( _, ( 
+MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
+MlyValue.name name, _, _)) :: _ :: ( _, ( _, (FOFleft as FOF1left), 
+FOFright)) :: rest671)) => let val  result = MlyValue.fof_annotated (
+(
+  Annotated_Formula ((file_name, FOFleft + 1, FOFright + 1),
+   FOF, name, formula_role, fof_formula, annotations)
+)
+)
+ in ( LrTable.NT 128, ( result, FOF1left, PERIOD1right), rest671)
+end
+|  ( 13, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
+MlyValue.annotations annotations, _, _)) :: ( _, ( 
+MlyValue.cnf_formula cnf_formula, _, _)) :: _ :: ( _, ( 
+MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
+MlyValue.name name, _, _)) :: _ :: ( _, ( _, (CNFleft as CNF1left), 
+CNFright)) :: rest671)) => let val  result = MlyValue.cnf_annotated (
+(
+  Annotated_Formula ((file_name, CNFleft + 1, CNFright + 1),
+   CNF, name, formula_role, cnf_formula, annotations)
+)
+)
+ in ( LrTable.NT 127, ( result, CNF1left, PERIOD1right), rest671)
+end
+|  ( 14, ( ( _, ( MlyValue.optional_info optional_info, _, 
 optional_info1right)) :: ( _, ( MlyValue.general_term general_term, _,
  _)) :: ( _, ( _, COMMA1left, _)) :: rest671)) => let val  result = 
 MlyValue.annotations (( SOME (general_term, optional_info) ))
  in ( LrTable.NT 0, ( result, COMMA1left, optional_info1right), 
 rest671)
 end
-|  ( 1, ( rest671)) => let val  result = MlyValue.annotations (
+|  ( 15, ( rest671)) => let val  result = MlyValue.annotations (
 ( NONE ))
  in ( LrTable.NT 0, ( result, defaultPos, defaultPos), rest671)
 end
-|  ( 2, ( ( _, ( MlyValue.useful_info useful_info, _, 
-useful_info1right)) :: ( _, ( _, COMMA1left, _)) :: rest671)) => let
- val  result = MlyValue.optional_info (( useful_info ))
- in ( LrTable.NT 4, ( result, COMMA1left, useful_info1right), rest671)
+|  ( 16, ( ( _, ( MlyValue.LOWER_WORD LOWER_WORD, LOWER_WORD1left, 
+LOWER_WORD1right)) :: rest671)) => let val  result = 
+MlyValue.formula_role (( classify_role LOWER_WORD ))
+ in ( LrTable.NT 126, ( result, LOWER_WORD1left, LOWER_WORD1right), 
+rest671)
+end
+|  ( 17, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, 
+thf_logic_formula1left, thf_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.thf_formula (( thf_logic_formula ))
+ in ( LrTable.NT 125, ( result, thf_logic_formula1left, 
+thf_logic_formula1right), rest671)
+end
+|  ( 18, ( ( _, ( MlyValue.thf_sequent thf_sequent, thf_sequent1left, 
+thf_sequent1right)) :: rest671)) => let val  result = 
+MlyValue.thf_formula (( thf_sequent ))
+ in ( LrTable.NT 125, ( result, thf_sequent1left, thf_sequent1right), 
+rest671)
+end
+|  ( 19, ( ( _, ( MlyValue.thf_binary_formula thf_binary_formula, 
+thf_binary_formula1left, thf_binary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_logic_formula (( thf_binary_formula ))
+ in ( LrTable.NT 124, ( result, thf_binary_formula1left, 
+thf_binary_formula1right), rest671)
+end
+|  ( 20, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, 
+thf_unitary_formula1left, thf_unitary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_logic_formula (( thf_unitary_formula )
+)
+ in ( LrTable.NT 124, ( result, thf_unitary_formula1left, 
+thf_unitary_formula1right), rest671)
+end
+|  ( 21, ( ( _, ( MlyValue.thf_type_formula thf_type_formula, 
+thf_type_formula1left, thf_type_formula1right)) :: rest671)) => let
+ val  result = MlyValue.thf_logic_formula (
+( THF_typing thf_type_formula ))
+ in ( LrTable.NT 124, ( result, thf_type_formula1left, 
+thf_type_formula1right), rest671)
+end
+|  ( 22, ( ( _, ( MlyValue.thf_subtype thf_subtype, thf_subtype1left, 
+thf_subtype1right)) :: rest671)) => let val  result = 
+MlyValue.thf_logic_formula (( Type_fmla thf_subtype ))
+ in ( LrTable.NT 124, ( result, thf_subtype1left, thf_subtype1right), 
+rest671)
+end
+|  ( 23, ( ( _, ( MlyValue.thf_binary_pair thf_binary_pair, 
+thf_binary_pair1left, thf_binary_pair1right)) :: rest671)) => let val 
+ result = MlyValue.thf_binary_formula (( thf_binary_pair ))
+ in ( LrTable.NT 123, ( result, thf_binary_pair1left, 
+thf_binary_pair1right), rest671)
+end
+|  ( 24, ( ( _, ( MlyValue.thf_binary_tuple thf_binary_tuple, 
+thf_binary_tuple1left, thf_binary_tuple1right)) :: rest671)) => let
+ val  result = MlyValue.thf_binary_formula (( thf_binary_tuple ))
+ in ( LrTable.NT 123, ( result, thf_binary_tuple1left, 
+thf_binary_tuple1right), rest671)
+end
+|  ( 25, ( ( _, ( MlyValue.thf_binary_type thf_binary_type, 
+thf_binary_type1left, thf_binary_type1right)) :: rest671)) => let val 
+ result = MlyValue.thf_binary_formula (( Type_fmla thf_binary_type ))
+ in ( LrTable.NT 123, ( result, thf_binary_type1left, 
+thf_binary_type1right), rest671)
+end
+|  ( 26, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2, _
+, thf_unitary_formula2right)) :: ( _, ( MlyValue.thf_pair_connective 
+thf_pair_connective, _, _)) :: ( _, ( MlyValue.thf_unitary_formula 
+thf_unitary_formula1, thf_unitary_formula1left, _)) :: rest671)) =>
+ let val  result = MlyValue.thf_binary_pair (
+(
+  Fmla (thf_pair_connective, [thf_unitary_formula1, thf_unitary_formula2])
+)
+)
+ in ( LrTable.NT 122, ( result, thf_unitary_formula1left, 
+thf_unitary_formula2right), rest671)
+end
+|  ( 27, ( ( _, ( MlyValue.thf_or_formula thf_or_formula, 
+thf_or_formula1left, thf_or_formula1right)) :: rest671)) => let val  
+result = MlyValue.thf_binary_tuple (( thf_or_formula ))
+ in ( LrTable.NT 121, ( result, thf_or_formula1left, 
+thf_or_formula1right), rest671)
+end
+|  ( 28, ( ( _, ( MlyValue.thf_and_formula thf_and_formula, 
+thf_and_formula1left, thf_and_formula1right)) :: rest671)) => let val 
+ result = MlyValue.thf_binary_tuple (( thf_and_formula ))
+ in ( LrTable.NT 121, ( result, thf_and_formula1left, 
+thf_and_formula1right), rest671)
+end
+|  ( 29, ( ( _, ( MlyValue.thf_apply_formula thf_apply_formula, 
+thf_apply_formula1left, thf_apply_formula1right)) :: rest671)) => let
+ val  result = MlyValue.thf_binary_tuple (( thf_apply_formula ))
+ in ( LrTable.NT 121, ( result, thf_apply_formula1left, 
+thf_apply_formula1right), rest671)
+end
+|  ( 30, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2, _
+, thf_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.thf_unitary_formula thf_unitary_formula1, 
+thf_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_or_formula (
+( Fmla (Interpreted_Logic Or, [thf_unitary_formula1, thf_unitary_formula2]) )
+)
+ in ( LrTable.NT 120, ( result, thf_unitary_formula1left, 
+thf_unitary_formula2right), rest671)
+end
+|  ( 31, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _,
+ thf_unitary_formula1right)) :: _ :: ( _, ( MlyValue.thf_or_formula 
+thf_or_formula, thf_or_formula1left, _)) :: rest671)) => let val  
+result = MlyValue.thf_or_formula (
+( Fmla (Interpreted_Logic Or, [thf_or_formula, thf_unitary_formula]) )
+)
+ in ( LrTable.NT 120, ( result, thf_or_formula1left, 
+thf_unitary_formula1right), rest671)
+end
+|  ( 32, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2, _
+, thf_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.thf_unitary_formula thf_unitary_formula1, 
+thf_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_and_formula (
+( Fmla (Interpreted_Logic And, [thf_unitary_formula1, thf_unitary_formula2]) )
+)
+ in ( LrTable.NT 119, ( result, thf_unitary_formula1left, 
+thf_unitary_formula2right), rest671)
+end
+|  ( 33, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _,
+ thf_unitary_formula1right)) :: _ :: ( _, ( MlyValue.thf_and_formula 
+thf_and_formula, thf_and_formula1left, _)) :: rest671)) => let val  
+result = MlyValue.thf_and_formula (
+( Fmla (Interpreted_Logic And, [thf_and_formula, thf_unitary_formula]) )
+)
+ in ( LrTable.NT 119, ( result, thf_and_formula1left, 
+thf_unitary_formula1right), rest671)
+end
+|  ( 34, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2, _
+, thf_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.thf_unitary_formula thf_unitary_formula1, 
+thf_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_apply_formula (
+( Fmla (Interpreted_ExtraLogic Apply, [thf_unitary_formula1, thf_unitary_formula2]) )
+)
+ in ( LrTable.NT 118, ( result, thf_unitary_formula1left, 
+thf_unitary_formula2right), rest671)
+end
+|  ( 35, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _,
+ thf_unitary_formula1right)) :: _ :: ( _, ( MlyValue.thf_apply_formula
+ thf_apply_formula, thf_apply_formula1left, _)) :: rest671)) => let
+ val  result = MlyValue.thf_apply_formula (
+( Fmla (Interpreted_ExtraLogic Apply, [thf_apply_formula, thf_unitary_formula]) )
+)
+ in ( LrTable.NT 118, ( result, thf_apply_formula1left, 
+thf_unitary_formula1right), rest671)
+end
+|  ( 36, ( ( _, ( MlyValue.thf_quantified_formula 
+thf_quantified_formula, thf_quantified_formula1left, 
+thf_quantified_formula1right)) :: rest671)) => let val  result = 
+MlyValue.thf_unitary_formula (( thf_quantified_formula ))
+ in ( LrTable.NT 117, ( result, thf_quantified_formula1left, 
+thf_quantified_formula1right), rest671)
+end
+|  ( 37, ( ( _, ( MlyValue.thf_unary_formula thf_unary_formula, 
+thf_unary_formula1left, thf_unary_formula1right)) :: rest671)) => let
+ val  result = MlyValue.thf_unitary_formula (( thf_unary_formula ))
+ in ( LrTable.NT 117, ( result, thf_unary_formula1left, 
+thf_unary_formula1right), rest671)
+end
+|  ( 38, ( ( _, ( MlyValue.thf_atom thf_atom, thf_atom1left, 
+thf_atom1right)) :: rest671)) => let val  result = 
+MlyValue.thf_unitary_formula (( thf_atom ))
+ in ( LrTable.NT 117, ( result, thf_atom1left, thf_atom1right), 
+rest671)
+end
+|  ( 39, ( ( _, ( MlyValue.thf_conditional thf_conditional, 
+thf_conditional1left, thf_conditional1right)) :: rest671)) => let val 
+ result = MlyValue.thf_unitary_formula (( thf_conditional ))
+ in ( LrTable.NT 117, ( result, thf_conditional1left, 
+thf_conditional1right), rest671)
+end
+|  ( 40, ( ( _, ( MlyValue.thf_let thf_let, thf_let1left, 
+thf_let1right)) :: rest671)) => let val  result = 
+MlyValue.thf_unitary_formula (( thf_let ))
+ in ( LrTable.NT 117, ( result, thf_let1left, thf_let1right), rest671)
 
 end
-|  ( 3, ( rest671)) => let val  result = MlyValue.optional_info (
-( [] ))
- in ( LrTable.NT 4, ( result, defaultPos, defaultPos), rest671)
-end
-|  ( 4, ( ( _, ( MlyValue.general_list general_list, general_list1left
-, general_list1right)) :: rest671)) => let val  result = 
-MlyValue.useful_info (( general_list ))
- in ( LrTable.NT 16, ( result, general_list1left, general_list1right),
- rest671)
-end
-|  ( 5, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( MlyValue.general_terms 
-general_terms, _, _)) :: ( _, ( _, LBRKT1left, _)) :: rest671)) => let
- val  result = MlyValue.general_list (( general_terms ))
- in ( LrTable.NT 5, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 6, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) :: 
-rest671)) => let val  result = MlyValue.general_list (( [] ))
- in ( LrTable.NT 5, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 7, ( ( _, ( MlyValue.general_terms general_terms, _, 
-general_terms1right)) :: _ :: ( _, ( MlyValue.general_term 
-general_term, general_term1left, _)) :: rest671)) => let val  result =
- MlyValue.general_terms (( general_term :: general_terms ))
- in ( LrTable.NT 6, ( result, general_term1left, general_terms1right),
- rest671)
-end
-|  ( 8, ( ( _, ( MlyValue.general_term general_term, general_term1left
-, general_term1right)) :: rest671)) => let val  result = 
-MlyValue.general_terms (( [general_term] ))
- in ( LrTable.NT 6, ( result, general_term1left, general_term1right), 
+|  ( 41, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.thf_logic_formula thf_logic_formula, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_unitary_formula (( thf_logic_formula ))
+ in ( LrTable.NT 117, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 42, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _,
+ thf_unitary_formula1right)) :: _ :: _ :: ( _, ( 
+MlyValue.thf_variable_list thf_variable_list, _, _)) :: _ :: ( _, ( 
+MlyValue.thf_quantifier thf_quantifier, thf_quantifier1left, _)) :: 
+rest671)) => let val  result = MlyValue.thf_quantified_formula (
+(
+  Quant (thf_quantifier, thf_variable_list, thf_unitary_formula)
+))
+ in ( LrTable.NT 116, ( result, thf_quantifier1left, 
+thf_unitary_formula1right), rest671)
+end
+|  ( 43, ( ( _, ( MlyValue.thf_variable thf_variable, 
+thf_variable1left, thf_variable1right)) :: rest671)) => let val  
+result = MlyValue.thf_variable_list (( [thf_variable] ))
+ in ( LrTable.NT 115, ( result, thf_variable1left, thf_variable1right)
+, rest671)
+end
+|  ( 44, ( ( _, ( MlyValue.thf_variable_list thf_variable_list, _, 
+thf_variable_list1right)) :: _ :: ( _, ( MlyValue.thf_variable 
+thf_variable, thf_variable1left, _)) :: rest671)) => let val  result =
+ MlyValue.thf_variable_list (( thf_variable :: thf_variable_list ))
+ in ( LrTable.NT 115, ( result, thf_variable1left, 
+thf_variable_list1right), rest671)
+end
+|  ( 45, ( ( _, ( MlyValue.thf_typed_variable thf_typed_variable, 
+thf_typed_variable1left, thf_typed_variable1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_variable (( thf_typed_variable ))
+ in ( LrTable.NT 114, ( result, thf_typed_variable1left, 
+thf_typed_variable1right), rest671)
+end
+|  ( 46, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
+variable_1right)) :: rest671)) => let val  result = 
+MlyValue.thf_variable (( (variable_, NONE) ))
+ in ( LrTable.NT 114, ( result, variable_1left, variable_1right), 
 rest671)
 end
-|  ( 9, ( ( _, ( MlyValue.general_data general_data, general_data1left
-, general_data1right)) :: rest671)) => let val  result = 
-MlyValue.general_term (( General_Data general_data ))
- in ( LrTable.NT 7, ( result, general_data1left, general_data1right), 
-rest671)
-end
-|  ( 10, ( ( _, ( MlyValue.general_term general_term, _, 
-general_term1right)) :: _ :: ( _, ( MlyValue.general_data general_data
-, general_data1left, _)) :: rest671)) => let val  result = 
-MlyValue.general_term (( General_Term (general_data, general_term) ))
- in ( LrTable.NT 7, ( result, general_data1left, general_term1right), 
+|  ( 47, ( ( _, ( MlyValue.thf_top_level_type thf_top_level_type, _, 
+thf_top_level_type1right)) :: _ :: ( _, ( MlyValue.variable_ variable_
+, variable_1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_typed_variable (( (variable_, SOME thf_top_level_type) ))
+ in ( LrTable.NT 113, ( result, variable_1left, 
+thf_top_level_type1right), rest671)
+end
+|  ( 48, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.thf_logic_formula thf_logic_formula, _, _)) :: _ :: ( _, ( 
+MlyValue.thf_unary_connective thf_unary_connective, 
+thf_unary_connective1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_unary_formula (
+(
+  Fmla (thf_unary_connective, [thf_logic_formula])
+))
+ in ( LrTable.NT 112, ( result, thf_unary_connective1left, 
+RPAREN1right), rest671)
+end
+|  ( 49, ( ( _, ( MlyValue.term term, term1left, term1right)) :: 
+rest671)) => let val  result = MlyValue.thf_atom (
+( Atom (THF_Atom_term term) ))
+ in ( LrTable.NT 102, ( result, term1left, term1right), rest671)
+end
+|  ( 50, ( ( _, ( MlyValue.thf_conn_term thf_conn_term, 
+thf_conn_term1left, thf_conn_term1right)) :: rest671)) => let val  
+result = MlyValue.thf_atom (
+( Atom (THF_Atom_conn_term thf_conn_term) ))
+ in ( LrTable.NT 102, ( result, thf_conn_term1left, 
+thf_conn_term1right), rest671)
+end
+|  ( 51, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.thf_logic_formula thf_logic_formula3, _, _)) :: _ :: ( _, ( 
+MlyValue.thf_logic_formula thf_logic_formula2, _, _)) :: _ :: ( _, ( 
+MlyValue.thf_logic_formula thf_logic_formula1, _, _)) :: _ :: ( _, ( _
+, ITE_F1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_conditional (
+(
+  Conditional (thf_logic_formula1, thf_logic_formula2, thf_logic_formula3)
+)
+)
+ in ( LrTable.NT 100, ( result, ITE_F1left, RPAREN1right), rest671)
+
+end
+|  ( 52, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.thf_formula 
+thf_formula, _, _)) :: _ :: ( _, ( MlyValue.thf_let_defn thf_let_defn,
+ _, _)) :: _ :: ( _, ( _, LET_TF1left, _)) :: rest671)) => let val  
+result = MlyValue.thf_let ((
+  Let (thf_let_defn, thf_formula)
+))
+ in ( LrTable.NT 101, ( result, LET_TF1left, RPAREN1right), rest671)
+
+end
+|  ( 53, ( ( _, ( MlyValue.thf_quantified_formula 
+thf_quantified_formula, thf_quantified_formula1left, 
+thf_quantified_formula1right)) :: rest671)) => let val  result = 
+MlyValue.thf_let_defn (
+(
+  let
+    val (_, vars, fmla) = extract_quant_info thf_quantified_formula
+  in [Let_fmla (hd vars, fmla)]
+  end
+)
+)
+ in ( LrTable.NT 136, ( result, thf_quantified_formula1left, 
+thf_quantified_formula1right), rest671)
+end
+|  ( 54, ( ( _, ( MlyValue.thf_top_level_type thf_top_level_type, _, 
+thf_top_level_type1right)) :: _ :: ( _, ( 
+MlyValue.thf_typeable_formula thf_typeable_formula, 
+thf_typeable_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_type_formula (
+( (thf_typeable_formula, thf_top_level_type) ))
+ in ( LrTable.NT 111, ( result, thf_typeable_formula1left, 
+thf_top_level_type1right), rest671)
+end
+|  ( 55, ( ( _, ( MlyValue.thf_atom thf_atom, thf_atom1left, 
+thf_atom1right)) :: rest671)) => let val  result = 
+MlyValue.thf_typeable_formula (( thf_atom ))
+ in ( LrTable.NT 110, ( result, thf_atom1left, thf_atom1right), 
 rest671)
 end
-|  ( 11, ( ( _, ( MlyValue.general_list general_list, 
-general_list1left, general_list1right)) :: rest671)) => let val  
-result = MlyValue.general_term (( General_List general_list ))
- in ( LrTable.NT 7, ( result, general_list1left, general_list1right), 
+|  ( 56, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.thf_logic_formula thf_logic_formula, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.thf_typeable_formula (( thf_logic_formula ))
+ in ( LrTable.NT 110, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 57, ( ( _, ( MlyValue.constant constant2, _, constant2right)) ::
+ _ :: ( _, ( MlyValue.constant constant1, constant1left, _)) :: 
+rest671)) => let val  result = MlyValue.thf_subtype (
+( Subtype(constant1, constant2) ))
+ in ( LrTable.NT 109, ( result, constant1left, constant2right), 
 rest671)
 end
-|  ( 12, ( ( _, ( MlyValue.LOWER_WORD LOWER_WORD, LOWER_WORD1left, 
-LOWER_WORD1right)) :: rest671)) => let val  result = 
-MlyValue.atomic_word (( LOWER_WORD ))
- in ( LrTable.NT 8, ( result, LOWER_WORD1left, LOWER_WORD1right), 
+|  ( 58, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, 
+thf_logic_formula1left, thf_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.thf_top_level_type (
+( Fmla_type thf_logic_formula ))
+ in ( LrTable.NT 108, ( result, thf_logic_formula1left, 
+thf_logic_formula1right), rest671)
+end
+|  ( 59, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, 
+thf_unitary_formula1left, thf_unitary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_unitary_type (
+( Fmla_type thf_unitary_formula ))
+ in ( LrTable.NT 107, ( result, thf_unitary_formula1left, 
+thf_unitary_formula1right), rest671)
+end
+|  ( 60, ( ( _, ( MlyValue.thf_mapping_type thf_mapping_type, 
+thf_mapping_type1left, thf_mapping_type1right)) :: rest671)) => let
+ val  result = MlyValue.thf_binary_type (( thf_mapping_type ))
+ in ( LrTable.NT 106, ( result, thf_mapping_type1left, 
+thf_mapping_type1right), rest671)
+end
+|  ( 61, ( ( _, ( MlyValue.thf_xprod_type thf_xprod_type, 
+thf_xprod_type1left, thf_xprod_type1right)) :: rest671)) => let val  
+result = MlyValue.thf_binary_type (( thf_xprod_type ))
+ in ( LrTable.NT 106, ( result, thf_xprod_type1left, 
+thf_xprod_type1right), rest671)
+end
+|  ( 62, ( ( _, ( MlyValue.thf_union_type thf_union_type, 
+thf_union_type1left, thf_union_type1right)) :: rest671)) => let val  
+result = MlyValue.thf_binary_type (( thf_union_type ))
+ in ( LrTable.NT 106, ( result, thf_union_type1left, 
+thf_union_type1right), rest671)
+end
+|  ( 63, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type2, _, 
+thf_unitary_type2right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
+thf_unitary_type1, thf_unitary_type1left, _)) :: rest671)) => let val 
+ result = MlyValue.thf_mapping_type (
+( Fn_type(thf_unitary_type1, thf_unitary_type2) ))
+ in ( LrTable.NT 105, ( result, thf_unitary_type1left, 
+thf_unitary_type2right), rest671)
+end
+|  ( 64, ( ( _, ( MlyValue.thf_mapping_type thf_mapping_type, _, 
+thf_mapping_type1right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
+thf_unitary_type, thf_unitary_type1left, _)) :: rest671)) => let val  
+result = MlyValue.thf_mapping_type (
+( Fn_type(thf_unitary_type, thf_mapping_type) ))
+ in ( LrTable.NT 105, ( result, thf_unitary_type1left, 
+thf_mapping_type1right), rest671)
+end
+|  ( 65, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type2, _, 
+thf_unitary_type2right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
+thf_unitary_type1, thf_unitary_type1left, _)) :: rest671)) => let val 
+ result = MlyValue.thf_xprod_type (
+( Prod_type(thf_unitary_type1, thf_unitary_type2) ))
+ in ( LrTable.NT 104, ( result, thf_unitary_type1left, 
+thf_unitary_type2right), rest671)
+end
+|  ( 66, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type, _, 
+thf_unitary_type1right)) :: _ :: ( _, ( MlyValue.thf_xprod_type 
+thf_xprod_type, thf_xprod_type1left, _)) :: rest671)) => let val  
+result = MlyValue.thf_xprod_type (
+( Prod_type(thf_xprod_type, thf_unitary_type) ))
+ in ( LrTable.NT 104, ( result, thf_xprod_type1left, 
+thf_unitary_type1right), rest671)
+end
+|  ( 67, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type2, _, 
+thf_unitary_type2right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
+thf_unitary_type1, thf_unitary_type1left, _)) :: rest671)) => let val 
+ result = MlyValue.thf_union_type (
+( Sum_type(thf_unitary_type1, thf_unitary_type2) ))
+ in ( LrTable.NT 103, ( result, thf_unitary_type1left, 
+thf_unitary_type2right), rest671)
+end
+|  ( 68, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type, _, 
+thf_unitary_type1right)) :: _ :: ( _, ( MlyValue.thf_union_type 
+thf_union_type, thf_union_type1left, _)) :: rest671)) => let val  
+result = MlyValue.thf_union_type (
+( Sum_type(thf_union_type, thf_unitary_type) ))
+ in ( LrTable.NT 103, ( result, thf_union_type1left, 
+thf_unitary_type1right), rest671)
+end
+|  ( 69, ( ( _, ( MlyValue.thf_tuple thf_tuple2, _, thf_tuple2right))
+ :: _ :: ( _, ( MlyValue.thf_tuple thf_tuple1, thf_tuple1left, _)) :: 
+rest671)) => let val  result = MlyValue.thf_sequent (
+( Sequent(thf_tuple1, thf_tuple2) ))
+ in ( LrTable.NT 99, ( result, thf_tuple1left, thf_tuple2right), 
+rest671)
+end
+|  ( 70, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.thf_sequent 
+thf_sequent, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
+ val  result = MlyValue.thf_sequent (( thf_sequent ))
+ in ( LrTable.NT 99, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 71, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) :: 
+rest671)) => let val  result = MlyValue.thf_tuple (( [] ))
+ in ( LrTable.NT 97, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 72, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
+MlyValue.thf_tuple_list thf_tuple_list, _, _)) :: ( _, ( _, LBRKT1left
+, _)) :: rest671)) => let val  result = MlyValue.thf_tuple (
+( thf_tuple_list ))
+ in ( LrTable.NT 97, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 73, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, 
+thf_logic_formula1left, thf_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.thf_tuple_list (( [thf_logic_formula] ))
+ in ( LrTable.NT 98, ( result, thf_logic_formula1left, 
+thf_logic_formula1right), rest671)
+end
+|  ( 74, ( ( _, ( MlyValue.thf_tuple_list thf_tuple_list, _, 
+thf_tuple_list1right)) :: _ :: ( _, ( MlyValue.thf_logic_formula 
+thf_logic_formula, thf_logic_formula1left, _)) :: rest671)) => let
+ val  result = MlyValue.thf_tuple_list (
+( thf_logic_formula :: thf_tuple_list ))
+ in ( LrTable.NT 98, ( result, thf_logic_formula1left, 
+thf_tuple_list1right), rest671)
+end
+|  ( 75, ( ( _, ( MlyValue.tff_logic_formula tff_logic_formula, 
+tff_logic_formula1left, tff_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.tff_formula (( tff_logic_formula ))
+ in ( LrTable.NT 96, ( result, tff_logic_formula1left, 
+tff_logic_formula1right), rest671)
+end
+|  ( 76, ( ( _, ( MlyValue.tff_typed_atom tff_typed_atom, 
+tff_typed_atom1left, tff_typed_atom1right)) :: rest671)) => let val  
+result = MlyValue.tff_formula (
+( Atom (TFF_Typed_Atom tff_typed_atom) ))
+ in ( LrTable.NT 96, ( result, tff_typed_atom1left, 
+tff_typed_atom1right), rest671)
+end
+|  ( 77, ( ( _, ( MlyValue.tff_sequent tff_sequent, tff_sequent1left, 
+tff_sequent1right)) :: rest671)) => let val  result = 
+MlyValue.tff_formula (( tff_sequent ))
+ in ( LrTable.NT 96, ( result, tff_sequent1left, tff_sequent1right), 
 rest671)
 end
-|  ( 13, ( ( _, ( MlyValue.SINGLE_QUOTED SINGLE_QUOTED, 
-SINGLE_QUOTED1left, SINGLE_QUOTED1right)) :: rest671)) => let val  
-result = MlyValue.atomic_word (( SINGLE_QUOTED ))
- in ( LrTable.NT 8, ( result, SINGLE_QUOTED1left, SINGLE_QUOTED1right)
-, rest671)
-end
-|  ( 14, ( ( _, ( _, THF1left, THF1right)) :: rest671)) => let val  
-result = MlyValue.atomic_word (( "thf" ))
- in ( LrTable.NT 8, ( result, THF1left, THF1right), rest671)
-end
-|  ( 15, ( ( _, ( _, TFF1left, TFF1right)) :: rest671)) => let val  
-result = MlyValue.atomic_word (( "tff" ))
- in ( LrTable.NT 8, ( result, TFF1left, TFF1right), rest671)
-end
-|  ( 16, ( ( _, ( _, FOF1left, FOF1right)) :: rest671)) => let val  
-result = MlyValue.atomic_word (( "fof" ))
- in ( LrTable.NT 8, ( result, FOF1left, FOF1right), rest671)
-end
-|  ( 17, ( ( _, ( _, CNF1left, CNF1right)) :: rest671)) => let val  
-result = MlyValue.atomic_word (( "cnf" ))
- in ( LrTable.NT 8, ( result, CNF1left, CNF1right), rest671)
-end
-|  ( 18, ( ( _, ( _, INCLUDE1left, INCLUDE1right)) :: rest671)) => let
- val  result = MlyValue.atomic_word (( "include" ))
- in ( LrTable.NT 8, ( result, INCLUDE1left, INCLUDE1right), rest671)
+|  ( 78, ( ( _, ( MlyValue.tff_binary_formula tff_binary_formula, 
+tff_binary_formula1left, tff_binary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.tff_logic_formula (( tff_binary_formula ))
+ in ( LrTable.NT 95, ( result, tff_binary_formula1left, 
+tff_binary_formula1right), rest671)
+end
+|  ( 79, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, 
+tff_unitary_formula1left, tff_unitary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.tff_logic_formula (( tff_unitary_formula )
+)
+ in ( LrTable.NT 95, ( result, tff_unitary_formula1left, 
+tff_unitary_formula1right), rest671)
+end
+|  ( 80, ( ( _, ( MlyValue.tff_binary_nonassoc tff_binary_nonassoc, 
+tff_binary_nonassoc1left, tff_binary_nonassoc1right)) :: rest671)) =>
+ let val  result = MlyValue.tff_binary_formula (
+( tff_binary_nonassoc ))
+ in ( LrTable.NT 94, ( result, tff_binary_nonassoc1left, 
+tff_binary_nonassoc1right), rest671)
+end
+|  ( 81, ( ( _, ( MlyValue.tff_binary_assoc tff_binary_assoc, 
+tff_binary_assoc1left, tff_binary_assoc1right)) :: rest671)) => let
+ val  result = MlyValue.tff_binary_formula (( tff_binary_assoc ))
+ in ( LrTable.NT 94, ( result, tff_binary_assoc1left, 
+tff_binary_assoc1right), rest671)
+end
+|  ( 82, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula2, _
+, tff_unitary_formula2right)) :: ( _, ( MlyValue.binary_connective 
+binary_connective, _, _)) :: ( _, ( MlyValue.tff_unitary_formula 
+tff_unitary_formula1, tff_unitary_formula1left, _)) :: rest671)) =>
+ let val  result = MlyValue.tff_binary_nonassoc (
+( Fmla (binary_connective, [tff_unitary_formula1, tff_unitary_formula2]) )
+)
+ in ( LrTable.NT 93, ( result, tff_unitary_formula1left, 
+tff_unitary_formula2right), rest671)
+end
+|  ( 83, ( ( _, ( MlyValue.tff_or_formula tff_or_formula, 
+tff_or_formula1left, tff_or_formula1right)) :: rest671)) => let val  
+result = MlyValue.tff_binary_assoc (( tff_or_formula ))
+ in ( LrTable.NT 92, ( result, tff_or_formula1left, 
+tff_or_formula1right), rest671)
+end
+|  ( 84, ( ( _, ( MlyValue.tff_and_formula tff_and_formula, 
+tff_and_formula1left, tff_and_formula1right)) :: rest671)) => let val 
+ result = MlyValue.tff_binary_assoc (( tff_and_formula ))
+ in ( LrTable.NT 92, ( result, tff_and_formula1left, 
+tff_and_formula1right), rest671)
+end
+|  ( 85, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula2, _
+, tff_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.tff_unitary_formula tff_unitary_formula1, 
+tff_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_or_formula (
+( Fmla (Interpreted_Logic Or, [tff_unitary_formula1, tff_unitary_formula2]) )
+)
+ in ( LrTable.NT 91, ( result, tff_unitary_formula1left, 
+tff_unitary_formula2right), rest671)
+end
+|  ( 86, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _,
+ tff_unitary_formula1right)) :: _ :: ( _, ( MlyValue.tff_or_formula 
+tff_or_formula, tff_or_formula1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_or_formula (
+( Fmla (Interpreted_Logic Or, [tff_or_formula, tff_unitary_formula]) )
+)
+ in ( LrTable.NT 91, ( result, tff_or_formula1left, 
+tff_unitary_formula1right), rest671)
+end
+|  ( 87, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula2, _
+, tff_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.tff_unitary_formula tff_unitary_formula1, 
+tff_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_and_formula (
+( Fmla (Interpreted_Logic And, [tff_unitary_formula1, tff_unitary_formula2]) )
+)
+ in ( LrTable.NT 90, ( result, tff_unitary_formula1left, 
+tff_unitary_formula2right), rest671)
+end
+|  ( 88, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _,
+ tff_unitary_formula1right)) :: _ :: ( _, ( MlyValue.tff_and_formula 
+tff_and_formula, tff_and_formula1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_and_formula (
+( Fmla (Interpreted_Logic And, [tff_and_formula, tff_unitary_formula]) )
+)
+ in ( LrTable.NT 90, ( result, tff_and_formula1left, 
+tff_unitary_formula1right), rest671)
+end
+|  ( 89, ( ( _, ( MlyValue.tff_quantified_formula 
+tff_quantified_formula, tff_quantified_formula1left, 
+tff_quantified_formula1right)) :: rest671)) => let val  result = 
+MlyValue.tff_unitary_formula (( tff_quantified_formula ))
+ in ( LrTable.NT 89, ( result, tff_quantified_formula1left, 
+tff_quantified_formula1right), rest671)
+end
+|  ( 90, ( ( _, ( MlyValue.tff_unary_formula tff_unary_formula, 
+tff_unary_formula1left, tff_unary_formula1right)) :: rest671)) => let
+ val  result = MlyValue.tff_unitary_formula (( tff_unary_formula ))
+ in ( LrTable.NT 89, ( result, tff_unary_formula1left, 
+tff_unary_formula1right), rest671)
+end
+|  ( 91, ( ( _, ( MlyValue.atomic_formula atomic_formula, 
+atomic_formula1left, atomic_formula1right)) :: rest671)) => let val  
+result = MlyValue.tff_unitary_formula (( atomic_formula ))
+ in ( LrTable.NT 89, ( result, atomic_formula1left, 
+atomic_formula1right), rest671)
+end
+|  ( 92, ( ( _, ( MlyValue.tff_conditional tff_conditional, 
+tff_conditional1left, tff_conditional1right)) :: rest671)) => let val 
+ result = MlyValue.tff_unitary_formula (( tff_conditional ))
+ in ( LrTable.NT 89, ( result, tff_conditional1left, 
+tff_conditional1right), rest671)
+end
+|  ( 93, ( ( _, ( MlyValue.tff_let tff_let, tff_let1left, 
+tff_let1right)) :: rest671)) => let val  result = 
+MlyValue.tff_unitary_formula (( tff_let ))
+ in ( LrTable.NT 89, ( result, tff_let1left, tff_let1right), rest671)
 
 end
-|  ( 19, ( ( _, ( MlyValue.UPPER_WORD UPPER_WORD, UPPER_WORD1left, 
-UPPER_WORD1right)) :: rest671)) => let val  result = 
-MlyValue.variable_ (( UPPER_WORD ))
- in ( LrTable.NT 10, ( result, UPPER_WORD1left, UPPER_WORD1right), 
-rest671)
-end
-|  ( 20, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.general_terms general_terms, _, _)) :: _ :: ( _, ( 
-MlyValue.atomic_word atomic_word, atomic_word1left, _)) :: rest671))
- => let val  result = MlyValue.general_function (
-( Application (atomic_word, general_terms) ))
- in ( LrTable.NT 15, ( result, atomic_word1left, RPAREN1right), 
+|  ( 94, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_logic_formula tff_logic_formula, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_unitary_formula (( tff_logic_formula ))
+ in ( LrTable.NT 89, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 95, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _,
+ tff_unitary_formula1right)) :: _ :: _ :: ( _, ( 
+MlyValue.tff_variable_list tff_variable_list, _, _)) :: _ :: ( _, ( 
+MlyValue.fol_quantifier fol_quantifier, fol_quantifier1left, _)) :: 
+rest671)) => let val  result = MlyValue.tff_quantified_formula (
+(
+  Quant (fol_quantifier, tff_variable_list, tff_unitary_formula)
+))
+ in ( LrTable.NT 88, ( result, fol_quantifier1left, 
+tff_unitary_formula1right), rest671)
+end
+|  ( 96, ( ( _, ( MlyValue.tff_variable tff_variable, 
+tff_variable1left, tff_variable1right)) :: rest671)) => let val  
+result = MlyValue.tff_variable_list (( [tff_variable] ))
+ in ( LrTable.NT 87, ( result, tff_variable1left, tff_variable1right),
+ rest671)
+end
+|  ( 97, ( ( _, ( MlyValue.tff_variable_list tff_variable_list, _, 
+tff_variable_list1right)) :: _ :: ( _, ( MlyValue.tff_variable 
+tff_variable, tff_variable1left, _)) :: rest671)) => let val  result =
+ MlyValue.tff_variable_list (( tff_variable :: tff_variable_list ))
+ in ( LrTable.NT 87, ( result, tff_variable1left, 
+tff_variable_list1right), rest671)
+end
+|  ( 98, ( ( _, ( MlyValue.tff_typed_variable tff_typed_variable, 
+tff_typed_variable1left, tff_typed_variable1right)) :: rest671)) =>
+ let val  result = MlyValue.tff_variable (( tff_typed_variable ))
+ in ( LrTable.NT 86, ( result, tff_typed_variable1left, 
+tff_typed_variable1right), rest671)
+end
+|  ( 99, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
+variable_1right)) :: rest671)) => let val  result = 
+MlyValue.tff_variable (( (variable_, NONE) ))
+ in ( LrTable.NT 86, ( result, variable_1left, variable_1right), 
 rest671)
 end
-|  ( 21, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left, 
-atomic_word1right)) :: rest671)) => let val  result = 
-MlyValue.general_data (( Atomic_Word atomic_word ))
- in ( LrTable.NT 9, ( result, atomic_word1left, atomic_word1right), 
-rest671)
-end
-|  ( 22, ( ( _, ( MlyValue.general_function general_function, 
-general_function1left, general_function1right)) :: rest671)) => let
- val  result = MlyValue.general_data (( general_function ))
- in ( LrTable.NT 9, ( result, general_function1left, 
-general_function1right), rest671)
-end
-|  ( 23, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
-variable_1right)) :: rest671)) => let val  result = 
-MlyValue.general_data (( V variable_ ))
- in ( LrTable.NT 9, ( result, variable_1left, variable_1right), 
+|  ( 100, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, _, 
+tff_atomic_type1right)) :: _ :: ( _, ( MlyValue.variable_ variable_, 
+variable_1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_typed_variable (( (variable_, SOME tff_atomic_type) ))
+ in ( LrTable.NT 85, ( result, variable_1left, tff_atomic_type1right),
+ rest671)
+end
+|  ( 101, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _
+, tff_unitary_formula1right)) :: ( _, ( MlyValue.unary_connective 
+unary_connective, unary_connective1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_unary_formula (
+( Fmla (unary_connective, [tff_unitary_formula]) ))
+ in ( LrTable.NT 84, ( result, unary_connective1left, 
+tff_unitary_formula1right), rest671)
+end
+|  ( 102, ( ( _, ( MlyValue.fol_infix_unary fol_infix_unary, 
+fol_infix_unary1left, fol_infix_unary1right)) :: rest671)) => let val 
+ result = MlyValue.tff_unary_formula (( fol_infix_unary ))
+ in ( LrTable.NT 84, ( result, fol_infix_unary1left, 
+fol_infix_unary1right), rest671)
+end
+|  ( 103, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_logic_formula tff_logic_formula3, _, _)) :: _ :: ( _, ( 
+MlyValue.tff_logic_formula tff_logic_formula2, _, _)) :: _ :: ( _, ( 
+MlyValue.tff_logic_formula tff_logic_formula1, _, _)) :: _ :: ( _, ( _
+, ITE_F1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_conditional (
+(
+  Conditional (tff_logic_formula1, tff_logic_formula2, tff_logic_formula3)
+)
+)
+ in ( LrTable.NT 76, ( result, ITE_F1left, RPAREN1right), rest671)
+end
+|  ( 104, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.tff_formula
+ tff_formula, _, _)) :: _ :: ( _, ( MlyValue.tff_let_term_defn 
+tff_let_term_defn, _, _)) :: _ :: ( _, ( _, LET_TF1left, _)) :: 
+rest671)) => let val  result = MlyValue.tff_let (
+(Let (tff_let_term_defn, tff_formula) ))
+ in ( LrTable.NT 137, ( result, LET_TF1left, RPAREN1right), rest671)
+
+end
+|  ( 105, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.tff_formula
+ tff_formula, _, _)) :: _ :: ( _, ( MlyValue.tff_let_formula_defn 
+tff_let_formula_defn, _, _)) :: _ :: ( _, ( _, LET_FF1left, _)) :: 
+rest671)) => let val  result = MlyValue.tff_let (
+( Let (tff_let_formula_defn, tff_formula) ))
+ in ( LrTable.NT 137, ( result, LET_FF1left, RPAREN1right), rest671)
+
+end
+|  ( 106, ( ( _, ( MlyValue.tff_quantified_formula 
+tff_quantified_formula, tff_quantified_formula1left, 
+tff_quantified_formula1right)) :: rest671)) => let val  result = 
+MlyValue.tff_let_term_defn (
+(
+  let
+    val (_, vars, fmla) = extract_quant_info tff_quantified_formula
+  in [Let_fmla (hd vars, fmla)]
+  end
+)
+)
+ in ( LrTable.NT 138, ( result, tff_quantified_formula1left, 
+tff_quantified_formula1right), rest671)
+end
+|  ( 107, ( ( _, ( MlyValue.tff_quantified_formula 
+tff_quantified_formula, tff_quantified_formula1left, 
+tff_quantified_formula1right)) :: rest671)) => let val  result = 
+MlyValue.tff_let_formula_defn (
+(
+  let
+    val (_, vars, fmla) = extract_quant_info tff_quantified_formula
+  in [Let_fmla (hd vars, fmla)]
+  end
+)
+)
+ in ( LrTable.NT 139, ( result, tff_quantified_formula1left, 
+tff_quantified_formula1right), rest671)
+end
+|  ( 108, ( ( _, ( MlyValue.tff_tuple tff_tuple2, _, tff_tuple2right))
+ :: _ :: ( _, ( MlyValue.tff_tuple tff_tuple1, tff_tuple1left, _)) :: 
+rest671)) => let val  result = MlyValue.tff_sequent (
+( Sequent (tff_tuple1, tff_tuple2) ))
+ in ( LrTable.NT 75, ( result, tff_tuple1left, tff_tuple2right), 
 rest671)
 end
-|  ( 24, ( ( _, ( MlyValue.number number, number1left, number1right))
- :: rest671)) => let val  result = MlyValue.general_data (
-( Number number ))
- in ( LrTable.NT 9, ( result, number1left, number1right), rest671)
-end
-|  ( 25, ( ( _, ( MlyValue.DISTINCT_OBJECT DISTINCT_OBJECT, 
-DISTINCT_OBJECT1left, DISTINCT_OBJECT1right)) :: rest671)) => let val 
- result = MlyValue.general_data (( Distinct_Object DISTINCT_OBJECT ))
- in ( LrTable.NT 9, ( result, DISTINCT_OBJECT1left, 
-DISTINCT_OBJECT1right), rest671)
-end
-|  ( 26, ( ( _, ( MlyValue.formula_data formula_data, 
-formula_data1left, formula_data1right)) :: rest671)) => let val  
-result = MlyValue.general_data (( formula_data ))
- in ( LrTable.NT 9, ( result, formula_data1left, formula_data1right), 
+|  ( 109, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.tff_sequent
+ tff_sequent, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
+ val  result = MlyValue.tff_sequent (( tff_sequent ))
+ in ( LrTable.NT 75, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 110, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) ::
+ rest671)) => let val  result = MlyValue.tff_tuple (( [] ))
+ in ( LrTable.NT 73, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 111, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
+MlyValue.tff_tuple_list tff_tuple_list, _, _)) :: ( _, ( _, LBRKT1left
+, _)) :: rest671)) => let val  result = MlyValue.tff_tuple (
+( tff_tuple_list ))
+ in ( LrTable.NT 73, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 112, ( ( _, ( MlyValue.tff_tuple_list tff_tuple_list, _, 
+tff_tuple_list1right)) :: _ :: ( _, ( MlyValue.tff_logic_formula 
+tff_logic_formula, tff_logic_formula1left, _)) :: rest671)) => let
+ val  result = MlyValue.tff_tuple_list (
+( tff_logic_formula :: tff_tuple_list ))
+ in ( LrTable.NT 74, ( result, tff_logic_formula1left, 
+tff_tuple_list1right), rest671)
+end
+|  ( 113, ( ( _, ( MlyValue.tff_logic_formula tff_logic_formula, 
+tff_logic_formula1left, tff_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.tff_tuple_list (( [tff_logic_formula] ))
+ in ( LrTable.NT 74, ( result, tff_logic_formula1left, 
+tff_logic_formula1right), rest671)
+end
+|  ( 114, ( ( _, ( MlyValue.tff_top_level_type tff_top_level_type, _, 
+tff_top_level_type1right)) :: _ :: ( _, ( MlyValue.tff_untyped_atom 
+tff_untyped_atom, tff_untyped_atom1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_typed_atom (
+( (fst tff_untyped_atom, SOME tff_top_level_type) ))
+ in ( LrTable.NT 83, ( result, tff_untyped_atom1left, 
+tff_top_level_type1right), rest671)
+end
+|  ( 115, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_typed_atom tff_typed_atom, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_typed_atom (( tff_typed_atom ))
+ in ( LrTable.NT 83, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 116, ( ( _, ( MlyValue.functor_ functor_, functor_1left, 
+functor_1right)) :: rest671)) => let val  result = 
+MlyValue.tff_untyped_atom (( (functor_, NONE) ))
+ in ( LrTable.NT 82, ( result, functor_1left, functor_1right), rest671
+)
+end
+|  ( 117, ( ( _, ( MlyValue.system_functor system_functor, 
+system_functor1left, system_functor1right)) :: rest671)) => let val  
+result = MlyValue.tff_untyped_atom (( (system_functor, NONE) ))
+ in ( LrTable.NT 82, ( result, system_functor1left, 
+system_functor1right), rest671)
+end
+|  ( 118, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, 
+tff_atomic_type1left, tff_atomic_type1right)) :: rest671)) => let val 
+ result = MlyValue.tff_top_level_type (( tff_atomic_type ))
+ in ( LrTable.NT 81, ( result, tff_atomic_type1left, 
+tff_atomic_type1right), rest671)
+end
+|  ( 119, ( ( _, ( MlyValue.tff_mapping_type tff_mapping_type, 
+tff_mapping_type1left, tff_mapping_type1right)) :: rest671)) => let
+ val  result = MlyValue.tff_top_level_type (( tff_mapping_type ))
+ in ( LrTable.NT 81, ( result, tff_mapping_type1left, 
+tff_mapping_type1right), rest671)
+end
+|  ( 120, ( ( _, ( MlyValue.tff_quantified_type tff_quantified_type, 
+tff_quantified_type1left, tff_quantified_type1right)) :: rest671)) =>
+ let val  result = MlyValue.tff_top_level_type (
+( tff_quantified_type ))
+ in ( LrTable.NT 81, ( result, tff_quantified_type1left, 
+tff_quantified_type1right), rest671)
+end
+|  ( 121, ( ( _, ( MlyValue.tff_monotype tff_monotype, _, 
+tff_monotype1right)) :: _ :: _ :: ( _, ( MlyValue.tff_variable_list 
+tff_variable_list, _, _)) :: _ :: ( _, ( _, DEP_PROD1left, _)) :: 
+rest671)) => let val  result = MlyValue.tff_quantified_type (
+(
+       Fmla_type (Quant (Dep_Prod, tff_variable_list, Type_fmla tff_monotype))
+)
+)
+ in ( LrTable.NT 140, ( result, DEP_PROD1left, tff_monotype1right), 
 rest671)
 end
-|  ( 27, ( ( _, ( MlyValue.integer integer, integer1left, 
-integer1right)) :: rest671)) => let val  result = MlyValue.number (
-( (Int_num, integer) ))
- in ( LrTable.NT 11, ( result, integer1left, integer1right), rest671)
+|  ( 122, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_quantified_type tff_quantified_type, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_quantified_type (( tff_quantified_type ))
+ in ( LrTable.NT 140, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 123, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, 
+tff_atomic_type1left, tff_atomic_type1right)) :: rest671)) => let val 
+ result = MlyValue.tff_monotype (( tff_atomic_type ))
+ in ( LrTable.NT 141, ( result, tff_atomic_type1left, 
+tff_atomic_type1right), rest671)
+end
+|  ( 124, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_mapping_type tff_mapping_type, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_monotype (( tff_mapping_type ))
+ in ( LrTable.NT 141, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 125, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, 
+tff_atomic_type1left, tff_atomic_type1right)) :: rest671)) => let val 
+ result = MlyValue.tff_unitary_type (( tff_atomic_type ))
+ in ( LrTable.NT 80, ( result, tff_atomic_type1left, 
+tff_atomic_type1right), rest671)
+end
+|  ( 126, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_xprod_type tff_xprod_type, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_unitary_type (( tff_xprod_type ))
+ in ( LrTable.NT 80, ( result, LPAREN1left, RPAREN1right), rest671)
 
 end
-|  ( 28, ( ( _, ( MlyValue.REAL REAL, REAL1left, REAL1right)) :: 
-rest671)) => let val  result = MlyValue.number (( (Real_num, REAL) ))
- in ( LrTable.NT 11, ( result, REAL1left, REAL1right), rest671)
-end
-|  ( 29, ( ( _, ( MlyValue.RATIONAL RATIONAL, RATIONAL1left, 
-RATIONAL1right)) :: rest671)) => let val  result = MlyValue.number (
-( (Rat_num, RATIONAL) ))
- in ( LrTable.NT 11, ( result, RATIONAL1left, RATIONAL1right), rest671
+|  ( 127, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left,
+ atomic_word1right)) :: rest671)) => let val  result = 
+MlyValue.tff_atomic_type (( Atom_type atomic_word ))
+ in ( LrTable.NT 79, ( result, atomic_word1left, atomic_word1right), 
+rest671)
+end
+|  ( 128, ( ( _, ( MlyValue.defined_type defined_type, 
+defined_type1left, defined_type1right)) :: rest671)) => let val  
+result = MlyValue.tff_atomic_type (( Defined_type defined_type ))
+ in ( LrTable.NT 79, ( result, defined_type1left, defined_type1right),
+ rest671)
+end
+|  ( 129, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_type_arguments tff_type_arguments, _, _)) :: _ :: ( _, ( 
+MlyValue.atomic_word atomic_word, atomic_word1left, _)) :: rest671))
+ => let val  result = MlyValue.tff_atomic_type (
+( Fmla_type (Fmla (Uninterpreted atomic_word, (map Type_fmla tff_type_arguments))) )
+)
+ in ( LrTable.NT 79, ( result, atomic_word1left, RPAREN1right), 
+rest671)
+end
+|  ( 130, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
+variable_1right)) :: rest671)) => let val  result = 
+MlyValue.tff_atomic_type (
+( Fmla_type (Pred (Interpreted_ExtraLogic Apply, [Term_Var variable_])) )
+)
+ in ( LrTable.NT 79, ( result, variable_1left, variable_1right), 
+rest671)
+end
+|  ( 131, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, 
+tff_atomic_type1left, tff_atomic_type1right)) :: rest671)) => let val 
+ result = MlyValue.tff_type_arguments (( [tff_atomic_type]  ))
+ in ( LrTable.NT 142, ( result, tff_atomic_type1left, 
+tff_atomic_type1right), rest671)
+end
+|  ( 132, ( ( _, ( MlyValue.tff_type_arguments tff_type_arguments, _, 
+tff_type_arguments1right)) :: _ :: ( _, ( MlyValue.tff_atomic_type 
+tff_atomic_type, tff_atomic_type1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_type_arguments (
+( tff_atomic_type :: tff_type_arguments ))
+ in ( LrTable.NT 142, ( result, tff_atomic_type1left, 
+tff_type_arguments1right), rest671)
+end
+|  ( 133, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, _, 
+tff_atomic_type1right)) :: _ :: ( _, ( MlyValue.tff_unitary_type 
+tff_unitary_type, tff_unitary_type1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_mapping_type (
+( Fn_type(tff_unitary_type, tff_atomic_type) ))
+ in ( LrTable.NT 78, ( result, tff_unitary_type1left, 
+tff_atomic_type1right), rest671)
+end
+|  ( 134, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_mapping_type tff_mapping_type, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_mapping_type (( tff_mapping_type ))
+ in ( LrTable.NT 78, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 135, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type2, _, 
+tff_atomic_type2right)) :: _ :: ( _, ( MlyValue.tff_atomic_type 
+tff_atomic_type1, tff_atomic_type1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_xprod_type (
+( Prod_type(tff_atomic_type1, tff_atomic_type2) ))
+ in ( LrTable.NT 77, ( result, tff_atomic_type1left, 
+tff_atomic_type2right), rest671)
+end
+|  ( 136, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, _, 
+tff_atomic_type1right)) :: _ :: ( _, ( MlyValue.tff_xprod_type 
+tff_xprod_type, tff_xprod_type1left, _)) :: rest671)) => let val  
+result = MlyValue.tff_xprod_type (
+( Prod_type(tff_xprod_type, tff_atomic_type) ))
+ in ( LrTable.NT 77, ( result, tff_xprod_type1left, 
+tff_atomic_type1right), rest671)
+end
+|  ( 137, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.tff_xprod_type tff_xprod_type, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.tff_xprod_type (( tff_xprod_type ))
+ in ( LrTable.NT 77, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 138, ( ( _, ( MlyValue.fof_logic_formula fof_logic_formula, 
+fof_logic_formula1left, fof_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.fof_formula (( fof_logic_formula ))
+ in ( LrTable.NT 72, ( result, fof_logic_formula1left, 
+fof_logic_formula1right), rest671)
+end
+|  ( 139, ( ( _, ( MlyValue.fof_sequent fof_sequent, fof_sequent1left,
+ fof_sequent1right)) :: rest671)) => let val  result = 
+MlyValue.fof_formula (( fof_sequent ))
+ in ( LrTable.NT 72, ( result, fof_sequent1left, fof_sequent1right), 
+rest671)
+end
+|  ( 140, ( ( _, ( MlyValue.fof_binary_formula fof_binary_formula, 
+fof_binary_formula1left, fof_binary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.fof_logic_formula (( fof_binary_formula ))
+ in ( LrTable.NT 71, ( result, fof_binary_formula1left, 
+fof_binary_formula1right), rest671)
+end
+|  ( 141, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, 
+fof_unitary_formula1left, fof_unitary_formula1right)) :: rest671)) =>
+ let val  result = MlyValue.fof_logic_formula (( fof_unitary_formula )
+)
+ in ( LrTable.NT 71, ( result, fof_unitary_formula1left, 
+fof_unitary_formula1right), rest671)
+end
+|  ( 142, ( ( _, ( MlyValue.fof_binary_nonassoc fof_binary_nonassoc, 
+fof_binary_nonassoc1left, fof_binary_nonassoc1right)) :: rest671)) =>
+ let val  result = MlyValue.fof_binary_formula (
+( fof_binary_nonassoc ))
+ in ( LrTable.NT 70, ( result, fof_binary_nonassoc1left, 
+fof_binary_nonassoc1right), rest671)
+end
+|  ( 143, ( ( _, ( MlyValue.fof_binary_assoc fof_binary_assoc, 
+fof_binary_assoc1left, fof_binary_assoc1right)) :: rest671)) => let
+ val  result = MlyValue.fof_binary_formula (( fof_binary_assoc ))
+ in ( LrTable.NT 70, ( result, fof_binary_assoc1left, 
+fof_binary_assoc1right), rest671)
+end
+|  ( 144, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula2,
+ _, fof_unitary_formula2right)) :: ( _, ( MlyValue.binary_connective 
+binary_connective, _, _)) :: ( _, ( MlyValue.fof_unitary_formula 
+fof_unitary_formula1, fof_unitary_formula1left, _)) :: rest671)) =>
+ let val  result = MlyValue.fof_binary_nonassoc (
+(
+  Fmla (binary_connective, [fof_unitary_formula1, fof_unitary_formula2] )
+)
 )
-end
-|  ( 30, ( ( _, ( MlyValue.UNSIGNED_INTEGER UNSIGNED_INTEGER, 
-UNSIGNED_INTEGER1left, UNSIGNED_INTEGER1right)) :: rest671)) => let
- val  result = MlyValue.integer (( UNSIGNED_INTEGER ))
- in ( LrTable.NT 13, ( result, UNSIGNED_INTEGER1left, 
-UNSIGNED_INTEGER1right), rest671)
-end
-|  ( 31, ( ( _, ( MlyValue.SIGNED_INTEGER SIGNED_INTEGER, 
-SIGNED_INTEGER1left, SIGNED_INTEGER1right)) :: rest671)) => let val  
-result = MlyValue.integer (( SIGNED_INTEGER ))
- in ( LrTable.NT 13, ( result, SIGNED_INTEGER1left, 
-SIGNED_INTEGER1right), rest671)
-end
-|  ( 32, ( ( _, ( MlyValue.SINGLE_QUOTED SINGLE_QUOTED, 
-SINGLE_QUOTED1left, SINGLE_QUOTED1right)) :: rest671)) => let val  
-result = MlyValue.file_name (( SINGLE_QUOTED ))
- in ( LrTable.NT 17, ( result, SINGLE_QUOTED1left, SINGLE_QUOTED1right
+ in ( LrTable.NT 69, ( result, fof_unitary_formula1left, 
+fof_unitary_formula2right), rest671)
+end
+|  ( 145, ( ( _, ( MlyValue.fof_or_formula fof_or_formula, 
+fof_or_formula1left, fof_or_formula1right)) :: rest671)) => let val  
+result = MlyValue.fof_binary_assoc (( fof_or_formula ))
+ in ( LrTable.NT 68, ( result, fof_or_formula1left, 
+fof_or_formula1right), rest671)
+end
+|  ( 146, ( ( _, ( MlyValue.fof_and_formula fof_and_formula, 
+fof_and_formula1left, fof_and_formula1right)) :: rest671)) => let val 
+ result = MlyValue.fof_binary_assoc (( fof_and_formula ))
+ in ( LrTable.NT 68, ( result, fof_and_formula1left, 
+fof_and_formula1right), rest671)
+end
+|  ( 147, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula2,
+ _, fof_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.fof_unitary_formula fof_unitary_formula1, 
+fof_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.fof_or_formula (
+( Fmla (Interpreted_Logic Or, [fof_unitary_formula1, fof_unitary_formula2]) )
+)
+ in ( LrTable.NT 67, ( result, fof_unitary_formula1left, 
+fof_unitary_formula2right), rest671)
+end
+|  ( 148, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
+, fof_unitary_formula1right)) :: _ :: ( _, ( MlyValue.fof_or_formula 
+fof_or_formula, fof_or_formula1left, _)) :: rest671)) => let val  
+result = MlyValue.fof_or_formula (
+( Fmla (Interpreted_Logic Or, [fof_or_formula, fof_unitary_formula]) )
+)
+ in ( LrTable.NT 67, ( result, fof_or_formula1left, 
+fof_unitary_formula1right), rest671)
+end
+|  ( 149, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula2,
+ _, fof_unitary_formula2right)) :: _ :: ( _, ( 
+MlyValue.fof_unitary_formula fof_unitary_formula1, 
+fof_unitary_formula1left, _)) :: rest671)) => let val  result = 
+MlyValue.fof_and_formula (
+( Fmla (Interpreted_Logic And, [fof_unitary_formula1, fof_unitary_formula2]) )
+)
+ in ( LrTable.NT 66, ( result, fof_unitary_formula1left, 
+fof_unitary_formula2right), rest671)
+end
+|  ( 150, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
+, fof_unitary_formula1right)) :: _ :: ( _, ( MlyValue.fof_and_formula 
+fof_and_formula, fof_and_formula1left, _)) :: rest671)) => let val  
+result = MlyValue.fof_and_formula (
+( Fmla (Interpreted_Logic And, [fof_and_formula, fof_unitary_formula]) )
+)
+ in ( LrTable.NT 66, ( result, fof_and_formula1left, 
+fof_unitary_formula1right), rest671)
+end
+|  ( 151, ( ( _, ( MlyValue.fof_quantified_formula 
+fof_quantified_formula, fof_quantified_formula1left, 
+fof_quantified_formula1right)) :: rest671)) => let val  result = 
+MlyValue.fof_unitary_formula (( fof_quantified_formula ))
+ in ( LrTable.NT 65, ( result, fof_quantified_formula1left, 
+fof_quantified_formula1right), rest671)
+end
+|  ( 152, ( ( _, ( MlyValue.fof_unary_formula fof_unary_formula, 
+fof_unary_formula1left, fof_unary_formula1right)) :: rest671)) => let
+ val  result = MlyValue.fof_unitary_formula (( fof_unary_formula ))
+ in ( LrTable.NT 65, ( result, fof_unary_formula1left, 
+fof_unary_formula1right), rest671)
+end
+|  ( 153, ( ( _, ( MlyValue.atomic_formula atomic_formula, 
+atomic_formula1left, atomic_formula1right)) :: rest671)) => let val  
+result = MlyValue.fof_unitary_formula (( atomic_formula ))
+ in ( LrTable.NT 65, ( result, atomic_formula1left, 
+atomic_formula1right), rest671)
+end
+|  ( 154, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.fof_logic_formula fof_logic_formula, _, _)) :: ( _, ( _, 
+LPAREN1left, _)) :: rest671)) => let val  result = 
+MlyValue.fof_unitary_formula (( fof_logic_formula ))
+ in ( LrTable.NT 65, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 155, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
+, fof_unitary_formula1right)) :: _ :: _ :: ( _, ( 
+MlyValue.fof_variable_list fof_variable_list, _, _)) :: _ :: ( _, ( 
+MlyValue.fol_quantifier fol_quantifier, fol_quantifier1left, _)) :: 
+rest671)) => let val  result = MlyValue.fof_quantified_formula (
+(
+  Quant (fol_quantifier, map (fn v => (v, NONE)) fof_variable_list, fof_unitary_formula)
+)
+)
+ in ( LrTable.NT 64, ( result, fol_quantifier1left, 
+fof_unitary_formula1right), rest671)
+end
+|  ( 156, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
+variable_1right)) :: rest671)) => let val  result = 
+MlyValue.fof_variable_list (( [variable_] ))
+ in ( LrTable.NT 63, ( result, variable_1left, variable_1right), 
+rest671)
+end
+|  ( 157, ( ( _, ( MlyValue.fof_variable_list fof_variable_list, _, 
+fof_variable_list1right)) :: _ :: ( _, ( MlyValue.variable_ variable_,
+ variable_1left, _)) :: rest671)) => let val  result = 
+MlyValue.fof_variable_list (( variable_ :: fof_variable_list ))
+ in ( LrTable.NT 63, ( result, variable_1left, fof_variable_list1right
 ), rest671)
 end
-|  ( 33, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.thf_formula 
-thf_formula, _, _)) :: _ :: ( _, ( _, DTHF1left, _)) :: rest671)) =>
- let val  result = MlyValue.formula_data (
-( Formula_Data (THF, thf_formula) ))
- in ( LrTable.NT 12, ( result, DTHF1left, RPAREN1right), rest671)
-end
-|  ( 34, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.tff_formula 
-tff_formula, _, _)) :: _ :: ( _, ( _, DTFF1left, _)) :: rest671)) =>
- let val  result = MlyValue.formula_data (
-( Formula_Data (TFF, tff_formula) ))
- in ( LrTable.NT 12, ( result, DTFF1left, RPAREN1right), rest671)
-end
-|  ( 35, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.fof_formula 
-fof_formula, _, _)) :: _ :: ( _, ( _, DFOF1left, _)) :: rest671)) =>
- let val  result = MlyValue.formula_data (
-( Formula_Data (FOF, fof_formula) ))
- in ( LrTable.NT 12, ( result, DFOF1left, RPAREN1right), rest671)
-end
-|  ( 36, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.cnf_formula 
-cnf_formula, _, _)) :: _ :: ( _, ( _, DCNF1left, _)) :: rest671)) =>
- let val  result = MlyValue.formula_data (
-( Formula_Data (CNF, cnf_formula) ))
- in ( LrTable.NT 12, ( result, DCNF1left, RPAREN1right), rest671)
-end
-|  ( 37, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.term term, _
-, _)) :: _ :: ( _, ( _, DFOT1left, _)) :: rest671)) => let val  result
- = MlyValue.formula_data (( Term_Data term ))
- in ( LrTable.NT 12, ( result, DFOT1left, RPAREN1right), rest671)
-end
-|  ( 38, ( ( _, ( MlyValue.ATOMIC_SYSTEM_WORD ATOMIC_SYSTEM_WORD, 
-ATOMIC_SYSTEM_WORD1left, ATOMIC_SYSTEM_WORD1right)) :: rest671)) =>
- let val  result = MlyValue.system_type (( ATOMIC_SYSTEM_WORD ))
- in ( LrTable.NT 47, ( result, ATOMIC_SYSTEM_WORD1left, 
-ATOMIC_SYSTEM_WORD1right), rest671)
-end
-|  ( 39, ( ( _, ( MlyValue.ATOMIC_DEFINED_WORD ATOMIC_DEFINED_WORD, 
-ATOMIC_DEFINED_WORD1left, ATOMIC_DEFINED_WORD1right)) :: rest671)) =>
+|  ( 158, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
+, fof_unitary_formula1right)) :: ( _, ( MlyValue.unary_connective 
+unary_connective, unary_connective1left, _)) :: rest671)) => let val  
+result = MlyValue.fof_unary_formula (
+( Fmla (unary_connective, [fof_unitary_formula]) ))
+ in ( LrTable.NT 62, ( result, unary_connective1left, 
+fof_unitary_formula1right), rest671)
+end
+|  ( 159, ( ( _, ( MlyValue.fol_infix_unary fol_infix_unary, 
+fol_infix_unary1left, fol_infix_unary1right)) :: rest671)) => let val 
+ result = MlyValue.fof_unary_formula (( fol_infix_unary ))
+ in ( LrTable.NT 62, ( result, fol_infix_unary1left, 
+fol_infix_unary1right), rest671)
+end
+|  ( 160, ( ( _, ( MlyValue.fof_tuple fof_tuple2, _, fof_tuple2right))
+ :: _ :: ( _, ( MlyValue.fof_tuple fof_tuple1, fof_tuple1left, _)) :: 
+rest671)) => let val  result = MlyValue.fof_sequent (
+( Sequent (fof_tuple1, fof_tuple2) ))
+ in ( LrTable.NT 61, ( result, fof_tuple1left, fof_tuple2right), 
+rest671)
+end
+|  ( 161, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.fof_sequent
+ fof_sequent, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
+ val  result = MlyValue.fof_sequent (( fof_sequent ))
+ in ( LrTable.NT 61, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 162, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) ::
+ rest671)) => let val  result = MlyValue.fof_tuple (( [] ))
+ in ( LrTable.NT 60, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 163, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
+MlyValue.fof_tuple_list fof_tuple_list, _, _)) :: ( _, ( _, LBRKT1left
+, _)) :: rest671)) => let val  result = MlyValue.fof_tuple (
+( fof_tuple_list ))
+ in ( LrTable.NT 60, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 164, ( ( _, ( MlyValue.fof_logic_formula fof_logic_formula, 
+fof_logic_formula1left, fof_logic_formula1right)) :: rest671)) => let
+ val  result = MlyValue.fof_tuple_list (( [fof_logic_formula] ))
+ in ( LrTable.NT 59, ( result, fof_logic_formula1left, 
+fof_logic_formula1right), rest671)
+end
+|  ( 165, ( ( _, ( MlyValue.fof_tuple_list fof_tuple_list, _, 
+fof_tuple_list1right)) :: _ :: ( _, ( MlyValue.fof_logic_formula 
+fof_logic_formula, fof_logic_formula1left, _)) :: rest671)) => let
+ val  result = MlyValue.fof_tuple_list (
+( fof_logic_formula :: fof_tuple_list ))
+ in ( LrTable.NT 59, ( result, fof_logic_formula1left, 
+fof_tuple_list1right), rest671)
+end
+|  ( 166, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.disjunction
+ disjunction, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
+ val  result = MlyValue.cnf_formula (( disjunction ))
+ in ( LrTable.NT 58, ( result, LPAREN1left, RPAREN1right), rest671)
+
+end
+|  ( 167, ( ( _, ( MlyValue.disjunction disjunction, disjunction1left,
+ disjunction1right)) :: rest671)) => let val  result = 
+MlyValue.cnf_formula (( disjunction ))
+ in ( LrTable.NT 58, ( result, disjunction1left, disjunction1right), 
+rest671)
+end
+|  ( 168, ( ( _, ( MlyValue.literal literal, literal1left, 
+literal1right)) :: rest671)) => let val  result = MlyValue.disjunction
+ (( literal ))
+ in ( LrTable.NT 57, ( result, literal1left, literal1right), rest671)
+
+end
+|  ( 169, ( ( _, ( MlyValue.literal literal, _, literal1right)) :: _
+ :: ( _, ( MlyValue.disjunction disjunction, disjunction1left, _)) :: 
+rest671)) => let val  result = MlyValue.disjunction (
+( Fmla (Interpreted_Logic Or, [disjunction, literal]) ))
+ in ( LrTable.NT 57, ( result, disjunction1left, literal1right), 
+rest671)
+end
+|  ( 170, ( ( _, ( MlyValue.atomic_formula atomic_formula, 
+atomic_formula1left, atomic_formula1right)) :: rest671)) => let val  
+result = MlyValue.literal (( atomic_formula ))
+ in ( LrTable.NT 56, ( result, atomic_formula1left, 
+atomic_formula1right), rest671)
+end
+|  ( 171, ( ( _, ( MlyValue.atomic_formula atomic_formula, _, 
+atomic_formula1right)) :: ( _, ( _, TILDE1left, _)) :: rest671)) =>
+ let val  result = MlyValue.literal (
+( Fmla (Interpreted_Logic Not, [atomic_formula]) ))
+ in ( LrTable.NT 56, ( result, TILDE1left, atomic_formula1right), 
+rest671)
+end
+|  ( 172, ( ( _, ( MlyValue.fol_infix_unary fol_infix_unary, 
+fol_infix_unary1left, fol_infix_unary1right)) :: rest671)) => let val 
+ result = MlyValue.literal (( fol_infix_unary ))
+ in ( LrTable.NT 56, ( result, fol_infix_unary1left, 
+fol_infix_unary1right), rest671)
+end
+|  ( 173, ( ( _, ( MlyValue.thf_pair_connective thf_pair_connective, 
+thf_pair_connective1left, thf_pair_connective1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_conn_term (( thf_pair_connective ))
+ in ( LrTable.NT 55, ( result, thf_pair_connective1left, 
+thf_pair_connective1right), rest671)
+end
+|  ( 174, ( ( _, ( MlyValue.assoc_connective assoc_connective, 
+assoc_connective1left, assoc_connective1right)) :: rest671)) => let
+ val  result = MlyValue.thf_conn_term (( assoc_connective ))
+ in ( LrTable.NT 55, ( result, assoc_connective1left, 
+assoc_connective1right), rest671)
+end
+|  ( 175, ( ( _, ( MlyValue.thf_unary_connective thf_unary_connective,
+ thf_unary_connective1left, thf_unary_connective1right)) :: rest671))
+ => let val  result = MlyValue.thf_conn_term (( thf_unary_connective )
+)
+ in ( LrTable.NT 55, ( result, thf_unary_connective1left, 
+thf_unary_connective1right), rest671)
+end
+|  ( 176, ( ( _, ( MlyValue.term term2, _, term2right)) :: ( _, ( 
+MlyValue.infix_inequality infix_inequality, _, _)) :: ( _, ( 
+MlyValue.term term1, term1left, _)) :: rest671)) => let val  result = 
+MlyValue.fol_infix_unary (( Pred (infix_inequality, [term1, term2]) ))
+ in ( LrTable.NT 54, ( result, term1left, term2right), rest671)
+end
+|  ( 177, ( ( _, ( MlyValue.fol_quantifier fol_quantifier, 
+fol_quantifier1left, fol_quantifier1right)) :: rest671)) => let val  
+result = MlyValue.thf_quantifier (( fol_quantifier ))
+ in ( LrTable.NT 53, ( result, fol_quantifier1left, 
+fol_quantifier1right), rest671)
+end
+|  ( 178, ( ( _, ( _, CARET1left, CARET1right)) :: rest671)) => let
+ val  result = MlyValue.thf_quantifier (( Lambda ))
+ in ( LrTable.NT 53, ( result, CARET1left, CARET1right), rest671)
+end
+|  ( 179, ( ( _, ( _, DEP_PROD1left, DEP_PROD1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_quantifier (( Dep_Prod ))
+ in ( LrTable.NT 53, ( result, DEP_PROD1left, DEP_PROD1right), rest671
+)
+end
+|  ( 180, ( ( _, ( _, DEP_SUM1left, DEP_SUM1right)) :: rest671)) =>
+ let val  result = MlyValue.thf_quantifier (( Dep_Sum ))
+ in ( LrTable.NT 53, ( result, DEP_SUM1left, DEP_SUM1right), rest671)
+
+end
+|  ( 181, ( ( _, ( _, INDEF_CHOICE1left, INDEF_CHOICE1right)) :: 
+rest671)) => let val  result = MlyValue.thf_quantifier (( Epsilon ))
+ in ( LrTable.NT 53, ( result, INDEF_CHOICE1left, INDEF_CHOICE1right),
+ rest671)
+end
+|  ( 182, ( ( _, ( _, DEFIN_CHOICE1left, DEFIN_CHOICE1right)) :: 
+rest671)) => let val  result = MlyValue.thf_quantifier (( Iota ))
+ in ( LrTable.NT 53, ( result, DEFIN_CHOICE1left, DEFIN_CHOICE1right),
+ rest671)
+end
+|  ( 183, ( ( _, ( MlyValue.infix_equality infix_equality, 
+infix_equality1left, infix_equality1right)) :: rest671)) => let val  
+result = MlyValue.thf_pair_connective (( infix_equality ))
+ in ( LrTable.NT 52, ( result, infix_equality1left, 
+infix_equality1right), rest671)
+end
+|  ( 184, ( ( _, ( MlyValue.infix_inequality infix_inequality, 
+infix_inequality1left, infix_inequality1right)) :: rest671)) => let
+ val  result = MlyValue.thf_pair_connective (( infix_inequality ))
+ in ( LrTable.NT 52, ( result, infix_inequality1left, 
+infix_inequality1right), rest671)
+end
+|  ( 185, ( ( _, ( MlyValue.binary_connective binary_connective, 
+binary_connective1left, binary_connective1right)) :: rest671)) => let
+ val  result = MlyValue.thf_pair_connective (( binary_connective ))
+ in ( LrTable.NT 52, ( result, binary_connective1left, 
+binary_connective1right), rest671)
+end
+|  ( 186, ( ( _, ( MlyValue.unary_connective unary_connective, 
+unary_connective1left, unary_connective1right)) :: rest671)) => let
+ val  result = MlyValue.thf_unary_connective (( unary_connective ))
+ in ( LrTable.NT 51, ( result, unary_connective1left, 
+unary_connective1right), rest671)
+end
+|  ( 187, ( ( _, ( _, OPERATOR_FORALL1left, OPERATOR_FORALL1right)) ::
+ rest671)) => let val  result = MlyValue.thf_unary_connective (
+( Interpreted_Logic Op_Forall ))
+ in ( LrTable.NT 51, ( result, OPERATOR_FORALL1left, 
+OPERATOR_FORALL1right), rest671)
+end
+|  ( 188, ( ( _, ( _, OPERATOR_EXISTS1left, OPERATOR_EXISTS1right)) ::
+ rest671)) => let val  result = MlyValue.thf_unary_connective (
+( Interpreted_Logic Op_Exists ))
+ in ( LrTable.NT 51, ( result, OPERATOR_EXISTS1left, 
+OPERATOR_EXISTS1right), rest671)
+end
+|  ( 189, ( ( _, ( _, EXCLAMATION1left, EXCLAMATION1right)) :: rest671
+)) => let val  result = MlyValue.fol_quantifier (( Forall ))
+ in ( LrTable.NT 50, ( result, EXCLAMATION1left, EXCLAMATION1right), 
+rest671)
+end
+|  ( 190, ( ( _, ( _, QUESTION1left, QUESTION1right)) :: rest671)) =>
+ let val  result = MlyValue.fol_quantifier (( Exists ))
+ in ( LrTable.NT 50, ( result, QUESTION1left, QUESTION1right), rest671
+)
+end
+|  ( 191, ( ( _, ( _, IFF1left, IFF1right)) :: rest671)) => let val  
+result = MlyValue.binary_connective (( Interpreted_Logic Iff ))
+ in ( LrTable.NT 49, ( result, IFF1left, IFF1right), rest671)
+end
+|  ( 192, ( ( _, ( _, IMPLIES1left, IMPLIES1right)) :: rest671)) =>
+ let val  result = MlyValue.binary_connective (
+( Interpreted_Logic If ))
+ in ( LrTable.NT 49, ( result, IMPLIES1left, IMPLIES1right), rest671)
+
+end
+|  ( 193, ( ( _, ( _, FI1left, FI1right)) :: rest671)) => let val  
+result = MlyValue.binary_connective (( Interpreted_Logic Fi ))
+ in ( LrTable.NT 49, ( result, FI1left, FI1right), rest671)
+end
+|  ( 194, ( ( _, ( _, XOR1left, XOR1right)) :: rest671)) => let val  
+result = MlyValue.binary_connective (( Interpreted_Logic Xor ))
+ in ( LrTable.NT 49, ( result, XOR1left, XOR1right), rest671)
+end
+|  ( 195, ( ( _, ( _, NOR1left, NOR1right)) :: rest671)) => let val  
+result = MlyValue.binary_connective (( Interpreted_Logic Nor ))
+ in ( LrTable.NT 49, ( result, NOR1left, NOR1right), rest671)
+end
+|  ( 196, ( ( _, ( _, NAND1left, NAND1right)) :: rest671)) => let val 
+ result = MlyValue.binary_connective (( Interpreted_Logic Nand ))
+ in ( LrTable.NT 49, ( result, NAND1left, NAND1right), rest671)
+end
+|  ( 197, ( ( _, ( _, VLINE1left, VLINE1right)) :: rest671)) => let
+ val  result = MlyValue.assoc_connective (( Interpreted_Logic Or ))
+ in ( LrTable.NT 48, ( result, VLINE1left, VLINE1right), rest671)
+end
+|  ( 198, ( ( _, ( _, AMPERSAND1left, AMPERSAND1right)) :: rest671))
+ => let val  result = MlyValue.assoc_connective (
+( Interpreted_Logic And ))
+ in ( LrTable.NT 48, ( result, AMPERSAND1left, AMPERSAND1right), 
+rest671)
+end
+|  ( 199, ( ( _, ( _, TILDE1left, TILDE1right)) :: rest671)) => let
+ val  result = MlyValue.unary_connective (( Interpreted_Logic Not ))
+ in ( LrTable.NT 45, ( result, TILDE1left, TILDE1right), rest671)
+end
+|  ( 200, ( ( _, ( MlyValue.atomic_defined_word atomic_defined_word, 
+atomic_defined_word1left, atomic_defined_word1right)) :: rest671)) =>
  let val  result = MlyValue.defined_type (
 (
-  case ATOMIC_DEFINED_WORD of
-    "$i" => Type_Ind
+  case atomic_defined_word of
+    "$oType" => Type_Bool
   | "$o" => Type_Bool
   | "$iType" => Type_Ind
-  | "$oType" => Type_Bool
-  | "$int" => Type_Int
+  | "$i" => Type_Ind
+  | "$tType" => Type_Type
   | "$real" => Type_Real
   | "$rat" => Type_Rat
-  | "$tType" => Type_Type
+  | "$int" => Type_Int
   | thing => raise UNRECOGNISED_SYMBOL ("defined_type", thing)
 )
 )
- in ( LrTable.NT 46, ( result, ATOMIC_DEFINED_WORD1left, 
-ATOMIC_DEFINED_WORD1right), rest671)
-end
-|  ( 40, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left, 
-atomic_word1right)) :: rest671)) => let val  result = 
+ in ( LrTable.NT 46, ( result, atomic_defined_word1left, 
+atomic_defined_word1right), rest671)
+end
+|  ( 201, ( ( _, ( MlyValue.atomic_system_word atomic_system_word, 
+atomic_system_word1left, atomic_system_word1right)) :: rest671)) =>
+ let val  result = MlyValue.system_type (( atomic_system_word ))
+ in ( LrTable.NT 47, ( result, atomic_system_word1left, 
+atomic_system_word1right), rest671)
+end
+|  ( 202, ( ( _, ( MlyValue.plain_atomic_formula plain_atomic_formula,
+ plain_atomic_formula1left, plain_atomic_formula1right)) :: rest671))
+ => let val  result = MlyValue.atomic_formula (
+( plain_atomic_formula ))
+ in ( LrTable.NT 44, ( result, plain_atomic_formula1left, 
+plain_atomic_formula1right), rest671)
+end
+|  ( 203, ( ( _, ( MlyValue.defined_atomic_formula 
+defined_atomic_formula, defined_atomic_formula1left, 
+defined_atomic_formula1right)) :: rest671)) => let val  result = 
+MlyValue.atomic_formula (( defined_atomic_formula ))
+ in ( LrTable.NT 44, ( result, defined_atomic_formula1left, 
+defined_atomic_formula1right), rest671)
+end
+|  ( 204, ( ( _, ( MlyValue.system_atomic_formula 
+system_atomic_formula, system_atomic_formula1left, 
+system_atomic_formula1right)) :: rest671)) => let val  result = 
+MlyValue.atomic_formula (( system_atomic_formula ))
+ in ( LrTable.NT 44, ( result, system_atomic_formula1left, 
+system_atomic_formula1right), rest671)
+end
+|  ( 205, ( ( _, ( MlyValue.plain_term plain_term, plain_term1left, 
+plain_term1right)) :: rest671)) => let val  result = 
+MlyValue.plain_atomic_formula (( Pred plain_term ))
+ in ( LrTable.NT 43, ( result, plain_term1left, plain_term1right), 
+rest671)
+end
+|  ( 206, ( ( _, ( MlyValue.defined_plain_formula 
+defined_plain_formula, defined_plain_formula1left, 
+defined_plain_formula1right)) :: rest671)) => let val  result = 
+MlyValue.defined_atomic_formula (( defined_plain_formula ))
+ in ( LrTable.NT 42, ( result, defined_plain_formula1left, 
+defined_plain_formula1right), rest671)
+end
+|  ( 207, ( ( _, ( MlyValue.defined_infix_formula 
+defined_infix_formula, defined_infix_formula1left, 
+defined_infix_formula1right)) :: rest671)) => let val  result = 
+MlyValue.defined_atomic_formula (( defined_infix_formula ))
+ in ( LrTable.NT 42, ( result, defined_infix_formula1left, 
+defined_infix_formula1right), rest671)
+end
+|  ( 208, ( ( _, ( MlyValue.defined_plain_term defined_plain_term, 
+defined_plain_term1left, defined_plain_term1right)) :: rest671)) =>
+ let val  result = MlyValue.defined_plain_formula (
+( Pred defined_plain_term ))
+ in ( LrTable.NT 41, ( result, defined_plain_term1left, 
+defined_plain_term1right), rest671)
+end
+|  ( 209, ( ( _, ( MlyValue.atomic_defined_word atomic_defined_word, 
+atomic_defined_word1left, atomic_defined_word1right)) :: rest671)) =>
+ let val  result = MlyValue.defined_prop (
+(
+  case atomic_defined_word of
+    "$true"  => "$true"
+  | "$false" => "$false"
+  | thing => raise UNRECOGNISED_SYMBOL ("defined_prop", thing)
+)
+)
+ in ( LrTable.NT 39, ( result, atomic_defined_word1left, 
+atomic_defined_word1right), rest671)
+end
+|  ( 210, ( ( _, ( MlyValue.atomic_defined_word atomic_defined_word, 
+atomic_defined_word1left, atomic_defined_word1right)) :: rest671)) =>
+ let val  result = MlyValue.defined_pred (
+(
+  case atomic_defined_word of
+    "$distinct"  => "$distinct"
+  | "$ite_f" => "$ite_f"
+  | "$less" => "$less"
+  | "$lesseq" => "$lesseq"
+  | "$greater" => "$greater"
+  | "$greatereq" => "$greatereq"
+  | "$is_int" => "$is_int"
+  | "$is_rat" => "$is_rat"
+  | thing => raise UNRECOGNISED_SYMBOL ("defined_pred", thing)
+)
+)
+ in ( LrTable.NT 40, ( result, atomic_defined_word1left, 
+atomic_defined_word1right), rest671)
+end
+|  ( 211, ( ( _, ( MlyValue.term term2, _, term2right)) :: ( _, ( 
+MlyValue.defined_infix_pred defined_infix_pred, _, _)) :: ( _, ( 
+MlyValue.term term1, term1left, _)) :: rest671)) => let val  result = 
+MlyValue.defined_infix_formula (
+(Pred (defined_infix_pred, [term1, term2])))
+ in ( LrTable.NT 38, ( result, term1left, term2right), rest671)
+end
+|  ( 212, ( ( _, ( MlyValue.infix_equality infix_equality, 
+infix_equality1left, infix_equality1right)) :: rest671)) => let val  
+result = MlyValue.defined_infix_pred (( infix_equality ))
+ in ( LrTable.NT 37, ( result, infix_equality1left, 
+infix_equality1right), rest671)
+end
+|  ( 213, ( ( _, ( _, EQUALS1left, EQUALS1right)) :: rest671)) => let
+ val  result = MlyValue.infix_equality (( Interpreted_Logic Equals ))
+ in ( LrTable.NT 35, ( result, EQUALS1left, EQUALS1right), rest671)
+
+end
+|  ( 214, ( ( _, ( _, NEQUALS1left, NEQUALS1right)) :: rest671)) =>
+ let val  result = MlyValue.infix_inequality (
+( Interpreted_Logic NEquals ))
+ in ( LrTable.NT 36, ( result, NEQUALS1left, NEQUALS1right), rest671)
+
+end
+|  ( 215, ( ( _, ( MlyValue.system_term system_term, system_term1left,
+ system_term1right)) :: rest671)) => let val  result = 
+MlyValue.system_atomic_formula (( Pred system_term ))
+ in ( LrTable.NT 34, ( result, system_term1left, system_term1right), 
+rest671)
+end
+|  ( 216, ( ( _, ( MlyValue.function_term function_term, 
+function_term1left, function_term1right)) :: rest671)) => let val  
+result = MlyValue.term (( function_term ))
+ in ( LrTable.NT 19, ( result, function_term1left, function_term1right
+), rest671)
+end
+|  ( 217, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
+variable_1right)) :: rest671)) => let val  result = MlyValue.term (
+( Term_Var variable_ ))
+ in ( LrTable.NT 19, ( result, variable_1left, variable_1right), 
+rest671)
+end
+|  ( 218, ( ( _, ( MlyValue.conditional_term conditional_term, 
+conditional_term1left, conditional_term1right)) :: rest671)) => let
+ val  result = MlyValue.term (( conditional_term ))
+ in ( LrTable.NT 19, ( result, conditional_term1left, 
+conditional_term1right), rest671)
+end
+|  ( 219, ( ( _, ( MlyValue.let_term let_term, let_term1left, 
+let_term1right)) :: rest671)) => let val  result = MlyValue.term (
+( let_term ))
+ in ( LrTable.NT 19, ( result, let_term1left, let_term1right), rest671
+)
+end
+|  ( 220, ( ( _, ( MlyValue.plain_term plain_term, plain_term1left, 
+plain_term1right)) :: rest671)) => let val  result = 
+MlyValue.function_term (( Term_Func plain_term ))
+ in ( LrTable.NT 32, ( result, plain_term1left, plain_term1right), 
+rest671)
+end
+|  ( 221, ( ( _, ( MlyValue.defined_term defined_term, 
+defined_term1left, defined_term1right)) :: rest671)) => let val  
+result = MlyValue.function_term (( defined_term ))
+ in ( LrTable.NT 32, ( result, defined_term1left, defined_term1right),
+ rest671)
+end
+|  ( 222, ( ( _, ( MlyValue.system_term system_term, system_term1left,
+ system_term1right)) :: rest671)) => let val  result = 
+MlyValue.function_term (( Term_Func system_term ))
+ in ( LrTable.NT 32, ( result, system_term1left, system_term1right), 
+rest671)
+end
+|  ( 223, ( ( _, ( MlyValue.constant constant, constant1left, 
+constant1right)) :: rest671)) => let val  result = MlyValue.plain_term
+ (( (constant, []) ))
+ in ( LrTable.NT 31, ( result, constant1left, constant1right), rest671
+)
+end
+|  ( 224, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.arguments 
+arguments, _, _)) :: _ :: ( _, ( MlyValue.functor_ functor_, 
+functor_1left, _)) :: rest671)) => let val  result = 
+MlyValue.plain_term (( (functor_, arguments) ))
+ in ( LrTable.NT 31, ( result, functor_1left, RPAREN1right), rest671)
+
+end
+|  ( 225, ( ( _, ( MlyValue.functor_ functor_, functor_1left, 
+functor_1right)) :: rest671)) => let val  result = MlyValue.constant (
+( functor_ ))
+ in ( LrTable.NT 30, ( result, functor_1left, functor_1right), rest671
+)
+end
+|  ( 226, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left,
+ atomic_word1right)) :: rest671)) => let val  result = 
 MlyValue.functor_ (( Uninterpreted atomic_word ))
  in ( LrTable.NT 18, ( result, atomic_word1left, atomic_word1right), 
 rest671)
 end
-|  ( 41, ( ( _, ( MlyValue.term term, term1left, term1right)) :: 
-rest671)) => let val  result = MlyValue.arguments (( [term] ))
- in ( LrTable.NT 20, ( result, term1left, term1right), rest671)
-end
-|  ( 42, ( ( _, ( MlyValue.arguments arguments, _, arguments1right))
- :: _ :: ( _, ( MlyValue.term term, term1left, _)) :: rest671)) => let
- val  result = MlyValue.arguments (( term :: arguments ))
- in ( LrTable.NT 20, ( result, term1left, arguments1right), rest671)
+|  ( 227, ( ( _, ( MlyValue.defined_atom defined_atom, 
+defined_atom1left, defined_atom1right)) :: rest671)) => let val  
+result = MlyValue.defined_term (( defined_atom ))
+ in ( LrTable.NT 29, ( result, defined_atom1left, defined_atom1right),
+ rest671)
+end
+|  ( 228, ( ( _, ( MlyValue.defined_atomic_term defined_atomic_term, 
+defined_atomic_term1left, defined_atomic_term1right)) :: rest671)) =>
+ let val  result = MlyValue.defined_term (( defined_atomic_term ))
+ in ( LrTable.NT 29, ( result, defined_atomic_term1left, 
+defined_atomic_term1right), rest671)
+end
+|  ( 229, ( ( _, ( MlyValue.number number, number1left, number1right))
+ :: rest671)) => let val  result = MlyValue.defined_atom (
+( Term_Num number ))
+ in ( LrTable.NT 28, ( result, number1left, number1right), rest671)
 
 end
-|  ( 43, ( ( _, ( MlyValue.ATOMIC_SYSTEM_WORD ATOMIC_SYSTEM_WORD, 
-ATOMIC_SYSTEM_WORD1left, ATOMIC_SYSTEM_WORD1right)) :: rest671)) =>
- let val  result = MlyValue.system_functor (
-( System ATOMIC_SYSTEM_WORD ))
- in ( LrTable.NT 22, ( result, ATOMIC_SYSTEM_WORD1left, 
-ATOMIC_SYSTEM_WORD1right), rest671)
-end
-|  ( 44, ( ( _, ( MlyValue.system_functor system_functor, 
-system_functor1left, system_functor1right)) :: rest671)) => let val  
-result = MlyValue.system_constant (( system_functor ))
- in ( LrTable.NT 23, ( result, system_functor1left, 
-system_functor1right), rest671)
-end
-|  ( 45, ( ( _, ( MlyValue.system_constant system_constant, 
-system_constant1left, system_constant1right)) :: rest671)) => let val 
- result = MlyValue.system_term (( (system_constant, []) ))
- in ( LrTable.NT 24, ( result, system_constant1left, 
-system_constant1right), rest671)
-end
-|  ( 46, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.arguments 
-arguments, _, _)) :: _ :: ( _, ( MlyValue.system_functor 
-system_functor, system_functor1left, _)) :: rest671)) => let val  
-result = MlyValue.system_term (( (system_functor, arguments) ))
- in ( LrTable.NT 24, ( result, system_functor1left, RPAREN1right), 
+|  ( 230, ( ( _, ( MlyValue.DISTINCT_OBJECT DISTINCT_OBJECT, 
+DISTINCT_OBJECT1left, DISTINCT_OBJECT1right)) :: rest671)) => let val 
+ result = MlyValue.defined_atom (
+( Term_Distinct_Object DISTINCT_OBJECT ))
+ in ( LrTable.NT 28, ( result, DISTINCT_OBJECT1left, 
+DISTINCT_OBJECT1right), rest671)
+end
+|  ( 231, ( ( _, ( MlyValue.defined_plain_term defined_plain_term, 
+defined_plain_term1left, defined_plain_term1right)) :: rest671)) =>
+ let val  result = MlyValue.defined_atomic_term (
+( Term_Func defined_plain_term ))
+ in ( LrTable.NT 27, ( result, defined_plain_term1left, 
+defined_plain_term1right), rest671)
+end
+|  ( 232, ( ( _, ( MlyValue.defined_constant defined_constant, 
+defined_constant1left, defined_constant1right)) :: rest671)) => let
+ val  result = MlyValue.defined_plain_term (( (defined_constant, []) )
+)
+ in ( LrTable.NT 26, ( result, defined_constant1left, 
+defined_constant1right), rest671)
+end
+|  ( 233, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.arguments 
+arguments, _, _)) :: _ :: ( _, ( MlyValue.defined_functor 
+defined_functor, defined_functor1left, _)) :: rest671)) => let val  
+result = MlyValue.defined_plain_term (( (defined_functor, arguments) )
+)
+ in ( LrTable.NT 26, ( result, defined_functor1left, RPAREN1right), 
 rest671)
 end
-|  ( 47, ( ( _, ( MlyValue.ATOMIC_DEFINED_WORD ATOMIC_DEFINED_WORD, 
-ATOMIC_DEFINED_WORD1left, ATOMIC_DEFINED_WORD1right)) :: rest671)) =>
+|  ( 234, ( ( _, ( MlyValue.defined_functor defined_functor, 
+defined_functor1left, defined_functor1right)) :: rest671)) => let val 
+ result = MlyValue.defined_constant (( defined_functor ))
+ in ( LrTable.NT 25, ( result, defined_functor1left, 
+defined_functor1right), rest671)
+end
+|  ( 235, ( ( _, ( MlyValue.atomic_defined_word atomic_defined_word, 
+atomic_defined_word1left, atomic_defined_word1right)) :: rest671)) =>
  let val  result = MlyValue.defined_functor (
 (
-  case ATOMIC_DEFINED_WORD of
-    "$sum" => Interpreted_ExtraLogic Sum
+  case atomic_defined_word of
+    "$uminus" => Interpreted_ExtraLogic UMinus
+  | "$sum" => Interpreted_ExtraLogic Sum
   | "$difference" => Interpreted_ExtraLogic Difference
   | "$product" => Interpreted_ExtraLogic Product
   | "$quotient" => Interpreted_ExtraLogic Quotient
@@ -3786,7 +5404,6 @@
   | "$to_int" => Interpreted_ExtraLogic To_Int
   | "$to_rat" => Interpreted_ExtraLogic To_Rat
   | "$to_real" => Interpreted_ExtraLogic To_Real
-  | "$uminus" => Interpreted_ExtraLogic UMinus
 
   | "$i" => TypeSymbol Type_Ind
   | "$o" => TypeSymbol Type_Bool
@@ -3809,103 +5426,57 @@
   | "$is_int" => Interpreted_ExtraLogic Is_Int
   | "$is_rat" => Interpreted_ExtraLogic Is_Rat
 
+  | "$distinct" => Interpreted_ExtraLogic Distinct
+
   | thing => raise UNRECOGNISED_SYMBOL ("defined_functor", thing)
 )
 )
- in ( LrTable.NT 21, ( result, ATOMIC_DEFINED_WORD1left, 
-ATOMIC_DEFINED_WORD1right), rest671)
-end
-|  ( 48, ( ( _, ( MlyValue.defined_functor defined_functor, 
-defined_functor1left, defined_functor1right)) :: rest671)) => let val 
- result = MlyValue.defined_constant (( defined_functor ))
- in ( LrTable.NT 25, ( result, defined_functor1left, 
-defined_functor1right), rest671)
-end
-|  ( 49, ( ( _, ( MlyValue.defined_constant defined_constant, 
-defined_constant1left, defined_constant1right)) :: rest671)) => let
- val  result = MlyValue.defined_plain_term (( (defined_constant, []) )
-)
- in ( LrTable.NT 26, ( result, defined_constant1left, 
-defined_constant1right), rest671)
-end
-|  ( 50, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.arguments 
-arguments, _, _)) :: _ :: ( _, ( MlyValue.defined_functor 
-defined_functor, defined_functor1left, _)) :: rest671)) => let val  
-result = MlyValue.defined_plain_term (( (defined_functor, arguments) )
-)
- in ( LrTable.NT 26, ( result, defined_functor1left, RPAREN1right), 
+ in ( LrTable.NT 21, ( result, atomic_defined_word1left, 
+atomic_defined_word1right), rest671)
+end
+|  ( 236, ( ( _, ( MlyValue.system_constant system_constant, 
+system_constant1left, system_constant1right)) :: rest671)) => let val 
+ result = MlyValue.system_term (( (system_constant, []) ))
+ in ( LrTable.NT 24, ( result, system_constant1left, 
+system_constant1right), rest671)
+end
+|  ( 237, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.arguments 
+arguments, _, _)) :: _ :: ( _, ( MlyValue.system_functor 
+system_functor, system_functor1left, _)) :: rest671)) => let val  
+result = MlyValue.system_term (( (system_functor, arguments) ))
+ in ( LrTable.NT 24, ( result, system_functor1left, RPAREN1right), 
 rest671)
 end
-|  ( 51, ( ( _, ( MlyValue.defined_plain_term defined_plain_term, 
-defined_plain_term1left, defined_plain_term1right)) :: rest671)) =>
- let val  result = MlyValue.defined_atomic_term (
-( Term_Func defined_plain_term ))
- in ( LrTable.NT 27, ( result, defined_plain_term1left, 
-defined_plain_term1right), rest671)
-end
-|  ( 52, ( ( _, ( MlyValue.number number, number1left, number1right))
- :: rest671)) => let val  result = MlyValue.defined_atom (
-( Term_Num number ))
- in ( LrTable.NT 28, ( result, number1left, number1right), rest671)
+|  ( 238, ( ( _, ( MlyValue.system_functor system_functor, 
+system_functor1left, system_functor1right)) :: rest671)) => let val  
+result = MlyValue.system_constant (( system_functor ))
+ in ( LrTable.NT 23, ( result, system_functor1left, 
+system_functor1right), rest671)
+end
+|  ( 239, ( ( _, ( MlyValue.atomic_system_word atomic_system_word, 
+atomic_system_word1left, atomic_system_word1right)) :: rest671)) =>
+ let val  result = MlyValue.system_functor (
+( System atomic_system_word ))
+ in ( LrTable.NT 22, ( result, atomic_system_word1left, 
+atomic_system_word1right), rest671)
+end
+|  ( 240, ( ( _, ( MlyValue.UPPER_WORD UPPER_WORD, UPPER_WORD1left, 
+UPPER_WORD1right)) :: rest671)) => let val  result = 
+MlyValue.variable_ (( UPPER_WORD ))
+ in ( LrTable.NT 10, ( result, UPPER_WORD1left, UPPER_WORD1right), 
+rest671)
+end
+|  ( 241, ( ( _, ( MlyValue.term term, term1left, term1right)) :: 
+rest671)) => let val  result = MlyValue.arguments (( [term] ))
+ in ( LrTable.NT 20, ( result, term1left, term1right), rest671)
+end
+|  ( 242, ( ( _, ( MlyValue.arguments arguments, _, arguments1right))
+ :: _ :: ( _, ( MlyValue.term term, term1left, _)) :: rest671)) => let
+ val  result = MlyValue.arguments (( term :: arguments ))
+ in ( LrTable.NT 20, ( result, term1left, arguments1right), rest671)
 
 end
-|  ( 53, ( ( _, ( MlyValue.DISTINCT_OBJECT DISTINCT_OBJECT, 
-DISTINCT_OBJECT1left, DISTINCT_OBJECT1right)) :: rest671)) => let val 
- result = MlyValue.defined_atom (
-( Term_Distinct_Object DISTINCT_OBJECT ))
- in ( LrTable.NT 28, ( result, DISTINCT_OBJECT1left, 
-DISTINCT_OBJECT1right), rest671)
-end
-|  ( 54, ( ( _, ( MlyValue.defined_atom defined_atom, 
-defined_atom1left, defined_atom1right)) :: rest671)) => let val  
-result = MlyValue.defined_term (( defined_atom ))
- in ( LrTable.NT 29, ( result, defined_atom1left, defined_atom1right),
- rest671)
-end
-|  ( 55, ( ( _, ( MlyValue.defined_atomic_term defined_atomic_term, 
-defined_atomic_term1left, defined_atomic_term1right)) :: rest671)) =>
- let val  result = MlyValue.defined_term (( defined_atomic_term ))
- in ( LrTable.NT 29, ( result, defined_atomic_term1left, 
-defined_atomic_term1right), rest671)
-end
-|  ( 56, ( ( _, ( MlyValue.functor_ functor_, functor_1left, 
-functor_1right)) :: rest671)) => let val  result = MlyValue.constant (
-( functor_ ))
- in ( LrTable.NT 30, ( result, functor_1left, functor_1right), rest671
-)
-end
-|  ( 57, ( ( _, ( MlyValue.constant constant, constant1left, 
-constant1right)) :: rest671)) => let val  result = MlyValue.plain_term
- (( (constant, []) ))
- in ( LrTable.NT 31, ( result, constant1left, constant1right), rest671
-)
-end
-|  ( 58, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.arguments 
-arguments, _, _)) :: _ :: ( _, ( MlyValue.functor_ functor_, 
-functor_1left, _)) :: rest671)) => let val  result = 
-MlyValue.plain_term (( (functor_, arguments) ))
- in ( LrTable.NT 31, ( result, functor_1left, RPAREN1right), rest671)
-
-end
-|  ( 59, ( ( _, ( MlyValue.plain_term plain_term, plain_term1left, 
-plain_term1right)) :: rest671)) => let val  result = 
-MlyValue.function_term (( Term_Func plain_term ))
- in ( LrTable.NT 32, ( result, plain_term1left, plain_term1right), 
-rest671)
-end
-|  ( 60, ( ( _, ( MlyValue.defined_term defined_term, 
-defined_term1left, defined_term1right)) :: rest671)) => let val  
-result = MlyValue.function_term (( defined_term ))
- in ( LrTable.NT 32, ( result, defined_term1left, defined_term1right),
- rest671)
-end
-|  ( 61, ( ( _, ( MlyValue.system_term system_term, system_term1left, 
-system_term1right)) :: rest671)) => let val  result = 
-MlyValue.function_term (( Term_Func system_term ))
- in ( LrTable.NT 32, ( result, system_term1left, system_term1right), 
-rest671)
-end
-|  ( 62, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.term term2,
+|  ( 243, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.term term2,
  _, _)) :: _ :: ( _, ( MlyValue.term term1, _, _)) :: _ :: ( _, ( 
 MlyValue.tff_logic_formula tff_logic_formula, _, _)) :: _ :: ( _, ( _,
  ITE_T1left, _)) :: rest671)) => let val  result = 
@@ -3915,1522 +5486,271 @@
 ))
  in ( LrTable.NT 33, ( result, ITE_T1left, RPAREN1right), rest671)
 end
-|  ( 63, ( ( _, ( MlyValue.function_term function_term, 
-function_term1left, function_term1right)) :: rest671)) => let val  
-result = MlyValue.term (( function_term ))
- in ( LrTable.NT 19, ( result, function_term1left, function_term1right
-), rest671)
-end
-|  ( 64, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
-variable_1right)) :: rest671)) => let val  result = MlyValue.term (
-( Term_Var variable_ ))
- in ( LrTable.NT 19, ( result, variable_1left, variable_1right), 
-rest671)
-end
-|  ( 65, ( ( _, ( MlyValue.conditional_term conditional_term, 
-conditional_term1left, conditional_term1right)) :: rest671)) => let
- val  result = MlyValue.term (( conditional_term ))
- in ( LrTable.NT 19, ( result, conditional_term1left, 
-conditional_term1right), rest671)
-end
-|  ( 66, ( ( _, ( MlyValue.system_term system_term, system_term1left, 
-system_term1right)) :: rest671)) => let val  result = 
-MlyValue.system_atomic_formula (( Pred system_term ))
- in ( LrTable.NT 34, ( result, system_term1left, system_term1right), 
-rest671)
-end
-|  ( 67, ( ( _, ( _, EQUALS1left, EQUALS1right)) :: rest671)) => let
- val  result = MlyValue.infix_equality (( Interpreted_Logic Equals ))
- in ( LrTable.NT 35, ( result, EQUALS1left, EQUALS1right), rest671)
-
-end
-|  ( 68, ( ( _, ( _, NEQUALS1left, NEQUALS1right)) :: rest671)) => let
- val  result = MlyValue.infix_inequality (
-( Interpreted_Logic NEquals ))
- in ( LrTable.NT 36, ( result, NEQUALS1left, NEQUALS1right), rest671)
-
-end
-|  ( 69, ( ( _, ( MlyValue.infix_equality infix_equality, 
-infix_equality1left, infix_equality1right)) :: rest671)) => let val  
-result = MlyValue.defined_infix_pred (( infix_equality ))
- in ( LrTable.NT 37, ( result, infix_equality1left, 
-infix_equality1right), rest671)
-end
-|  ( 70, ( ( _, ( MlyValue.term term2, _, term2right)) :: ( _, ( 
-MlyValue.defined_infix_pred defined_infix_pred, _, _)) :: ( _, ( 
-MlyValue.term term1, term1left, _)) :: rest671)) => let val  result = 
-MlyValue.defined_infix_formula (
-(Pred (defined_infix_pred, [term1, term2])))
- in ( LrTable.NT 38, ( result, term1left, term2right), rest671)
-end
-|  ( 71, ( ( _, ( MlyValue.ATOMIC_DEFINED_WORD ATOMIC_DEFINED_WORD, 
-ATOMIC_DEFINED_WORD1left, ATOMIC_DEFINED_WORD1right)) :: rest671)) =>
- let val  result = MlyValue.defined_prop (
-(
-  case ATOMIC_DEFINED_WORD of
-    "$true"  => "$true"
-  | "$false" => "$false"
-  | thing => raise UNRECOGNISED_SYMBOL ("defined_prop", thing)
-)
-)
- in ( LrTable.NT 39, ( result, ATOMIC_DEFINED_WORD1left, 
-ATOMIC_DEFINED_WORD1right), rest671)
-end
-|  ( 72, ( ( _, ( MlyValue.ATOMIC_DEFINED_WORD ATOMIC_DEFINED_WORD, 
-ATOMIC_DEFINED_WORD1left, ATOMIC_DEFINED_WORD1right)) :: rest671)) =>
- let val  result = MlyValue.defined_pred (
-(
-  case ATOMIC_DEFINED_WORD of
-    "$distinct"  => "$distinct"
-  | "$ite_f" => "$ite_f"
-  | "$less" => "$less"
-  | "$lesseq" => "$lesseq"
-  | "$greater" => "$greater"
-  | "$greatereq" => "$greatereq"
-  | "$is_int" => "$is_int"
-  | "$is_rat" => "$is_rat"
-  | thing => raise UNRECOGNISED_SYMBOL ("defined_pred", thing)
-)
-)
- in ( LrTable.NT 40, ( result, ATOMIC_DEFINED_WORD1left, 
-ATOMIC_DEFINED_WORD1right), rest671)
-end
-|  ( 73, ( ( _, ( MlyValue.defined_plain_term defined_plain_term, 
-defined_plain_term1left, defined_plain_term1right)) :: rest671)) =>
- let val  result = MlyValue.defined_plain_formula (
-( Pred defined_plain_term ))
- in ( LrTable.NT 41, ( result, defined_plain_term1left, 
-defined_plain_term1right), rest671)
-end
-|  ( 74, ( ( _, ( MlyValue.defined_plain_formula defined_plain_formula
-, defined_plain_formula1left, defined_plain_formula1right)) :: rest671
-)) => let val  result = MlyValue.defined_atomic_formula (
-( defined_plain_formula ))
- in ( LrTable.NT 42, ( result, defined_plain_formula1left, 
-defined_plain_formula1right), rest671)
-end
-|  ( 75, ( ( _, ( MlyValue.defined_infix_formula defined_infix_formula
-, defined_infix_formula1left, defined_infix_formula1right)) :: rest671
-)) => let val  result = MlyValue.defined_atomic_formula (
-( defined_infix_formula ))
- in ( LrTable.NT 42, ( result, defined_infix_formula1left, 
-defined_infix_formula1right), rest671)
-end
-|  ( 76, ( ( _, ( MlyValue.plain_term plain_term, plain_term1left, 
-plain_term1right)) :: rest671)) => let val  result = 
-MlyValue.plain_atomic_formula (( Pred plain_term ))
- in ( LrTable.NT 43, ( result, plain_term1left, plain_term1right), 
-rest671)
-end
-|  ( 77, ( ( _, ( MlyValue.plain_atomic_formula plain_atomic_formula, 
-plain_atomic_formula1left, plain_atomic_formula1right)) :: rest671))
- => let val  result = MlyValue.atomic_formula (
-( plain_atomic_formula ))
- in ( LrTable.NT 44, ( result, plain_atomic_formula1left, 
-plain_atomic_formula1right), rest671)
-end
-|  ( 78, ( ( _, ( MlyValue.defined_atomic_formula 
-defined_atomic_formula, defined_atomic_formula1left, 
-defined_atomic_formula1right)) :: rest671)) => let val  result = 
-MlyValue.atomic_formula (( defined_atomic_formula ))
- in ( LrTable.NT 44, ( result, defined_atomic_formula1left, 
-defined_atomic_formula1right), rest671)
-end
-|  ( 79, ( ( _, ( MlyValue.system_atomic_formula system_atomic_formula
-, system_atomic_formula1left, system_atomic_formula1right)) :: rest671
-)) => let val  result = MlyValue.atomic_formula (
-( system_atomic_formula ))
- in ( LrTable.NT 44, ( result, system_atomic_formula1left, 
-system_atomic_formula1right), rest671)
-end
-|  ( 80, ( ( _, ( _, VLINE1left, VLINE1right)) :: rest671)) => let
- val  result = MlyValue.assoc_connective (( Interpreted_Logic Or ))
- in ( LrTable.NT 48, ( result, VLINE1left, VLINE1right), rest671)
-end
-|  ( 81, ( ( _, ( _, AMPERSAND1left, AMPERSAND1right)) :: rest671)) =>
- let val  result = MlyValue.assoc_connective (
-( Interpreted_Logic And ))
- in ( LrTable.NT 48, ( result, AMPERSAND1left, AMPERSAND1right), 
-rest671)
-end
-|  ( 82, ( ( _, ( _, IFF1left, IFF1right)) :: rest671)) => let val  
-result = MlyValue.binary_connective (( Interpreted_Logic Iff ))
- in ( LrTable.NT 49, ( result, IFF1left, IFF1right), rest671)
-end
-|  ( 83, ( ( _, ( _, IMPLIES1left, IMPLIES1right)) :: rest671)) => let
- val  result = MlyValue.binary_connective (( Interpreted_Logic If ))
- in ( LrTable.NT 49, ( result, IMPLIES1left, IMPLIES1right), rest671)
-
-end
-|  ( 84, ( ( _, ( _, IF1left, IF1right)) :: rest671)) => let val  
-result = MlyValue.binary_connective (( Interpreted_Logic Fi ))
- in ( LrTable.NT 49, ( result, IF1left, IF1right), rest671)
-end
-|  ( 85, ( ( _, ( _, XOR1left, XOR1right)) :: rest671)) => let val  
-result = MlyValue.binary_connective (( Interpreted_Logic Xor ))
- in ( LrTable.NT 49, ( result, XOR1left, XOR1right), rest671)
-end
-|  ( 86, ( ( _, ( _, NOR1left, NOR1right)) :: rest671)) => let val  
-result = MlyValue.binary_connective (( Interpreted_Logic Nor ))
- in ( LrTable.NT 49, ( result, NOR1left, NOR1right), rest671)
-end
-|  ( 87, ( ( _, ( _, NAND1left, NAND1right)) :: rest671)) => let val  
-result = MlyValue.binary_connective (( Interpreted_Logic Nand ))
- in ( LrTable.NT 49, ( result, NAND1left, NAND1right), rest671)
-end
-|  ( 88, ( ( _, ( _, EXCLAMATION1left, EXCLAMATION1right)) :: rest671)
-) => let val  result = MlyValue.fol_quantifier (( Forall ))
- in ( LrTable.NT 50, ( result, EXCLAMATION1left, EXCLAMATION1right), 
-rest671)
-end
-|  ( 89, ( ( _, ( _, QUESTION1left, QUESTION1right)) :: rest671)) =>
- let val  result = MlyValue.fol_quantifier (( Exists ))
- in ( LrTable.NT 50, ( result, QUESTION1left, QUESTION1right), rest671
-)
-end
-|  ( 90, ( ( _, ( MlyValue.unary_connective unary_connective, 
-unary_connective1left, unary_connective1right)) :: rest671)) => let
- val  result = MlyValue.thf_unary_connective (( unary_connective ))
- in ( LrTable.NT 51, ( result, unary_connective1left, 
-unary_connective1right), rest671)
-end
-|  ( 91, ( ( _, ( _, OPERATOR_FORALL1left, OPERATOR_FORALL1right)) :: 
-rest671)) => let val  result = MlyValue.thf_unary_connective (
-( Interpreted_Logic Op_Forall ))
- in ( LrTable.NT 51, ( result, OPERATOR_FORALL1left, 
-OPERATOR_FORALL1right), rest671)
-end
-|  ( 92, ( ( _, ( _, OPERATOR_EXISTS1left, OPERATOR_EXISTS1right)) :: 
-rest671)) => let val  result = MlyValue.thf_unary_connective (
-( Interpreted_Logic Op_Exists ))
- in ( LrTable.NT 51, ( result, OPERATOR_EXISTS1left, 
-OPERATOR_EXISTS1right), rest671)
-end
-|  ( 93, ( ( _, ( MlyValue.infix_equality infix_equality, 
-infix_equality1left, infix_equality1right)) :: rest671)) => let val  
-result = MlyValue.thf_pair_connective (( infix_equality ))
- in ( LrTable.NT 52, ( result, infix_equality1left, 
-infix_equality1right), rest671)
-end
-|  ( 94, ( ( _, ( MlyValue.infix_inequality infix_inequality, 
-infix_inequality1left, infix_inequality1right)) :: rest671)) => let
- val  result = MlyValue.thf_pair_connective (( infix_inequality ))
- in ( LrTable.NT 52, ( result, infix_inequality1left, 
-infix_inequality1right), rest671)
-end
-|  ( 95, ( ( _, ( MlyValue.binary_connective binary_connective, 
-binary_connective1left, binary_connective1right)) :: rest671)) => let
- val  result = MlyValue.thf_pair_connective (( binary_connective ))
- in ( LrTable.NT 52, ( result, binary_connective1left, 
-binary_connective1right), rest671)
-end
-|  ( 96, ( ( _, ( MlyValue.fol_quantifier fol_quantifier, 
-fol_quantifier1left, fol_quantifier1right)) :: rest671)) => let val  
-result = MlyValue.thf_quantifier (( fol_quantifier ))
- in ( LrTable.NT 53, ( result, fol_quantifier1left, 
-fol_quantifier1right), rest671)
-end
-|  ( 97, ( ( _, ( _, CARET1left, CARET1right)) :: rest671)) => let
- val  result = MlyValue.thf_quantifier (( Lambda ))
- in ( LrTable.NT 53, ( result, CARET1left, CARET1right), rest671)
-end
-|  ( 98, ( ( _, ( _, DEP_PROD1left, DEP_PROD1right)) :: rest671)) =>
- let val  result = MlyValue.thf_quantifier (( Dep_Prod ))
- in ( LrTable.NT 53, ( result, DEP_PROD1left, DEP_PROD1right), rest671
-)
-end
-|  ( 99, ( ( _, ( _, DEP_SUM1left, DEP_SUM1right)) :: rest671)) => let
- val  result = MlyValue.thf_quantifier (( Dep_Sum ))
- in ( LrTable.NT 53, ( result, DEP_SUM1left, DEP_SUM1right), rest671)
-
-end
-|  ( 100, ( ( _, ( _, INDEF_CHOICE1left, INDEF_CHOICE1right)) :: 
-rest671)) => let val  result = MlyValue.thf_quantifier (( Epsilon ))
- in ( LrTable.NT 53, ( result, INDEF_CHOICE1left, INDEF_CHOICE1right),
- rest671)
-end
-|  ( 101, ( ( _, ( _, DEFIN_CHOICE1left, DEFIN_CHOICE1right)) :: 
-rest671)) => let val  result = MlyValue.thf_quantifier (( Iota ))
- in ( LrTable.NT 53, ( result, DEFIN_CHOICE1left, DEFIN_CHOICE1right),
- rest671)
-end
-|  ( 102, ( ( _, ( MlyValue.term term2, _, term2right)) :: ( _, ( 
-MlyValue.infix_inequality infix_inequality, _, _)) :: ( _, ( 
-MlyValue.term term1, term1left, _)) :: rest671)) => let val  result = 
-MlyValue.fol_infix_unary (( Pred (infix_inequality, [term1, term2]) ))
- in ( LrTable.NT 54, ( result, term1left, term2right), rest671)
-end
-|  ( 103, ( ( _, ( MlyValue.thf_pair_connective thf_pair_connective, 
-thf_pair_connective1left, thf_pair_connective1right)) :: rest671)) =>
- let val  result = MlyValue.thf_conn_term (( thf_pair_connective ))
- in ( LrTable.NT 55, ( result, thf_pair_connective1left, 
-thf_pair_connective1right), rest671)
-end
-|  ( 104, ( ( _, ( MlyValue.assoc_connective assoc_connective, 
-assoc_connective1left, assoc_connective1right)) :: rest671)) => let
- val  result = MlyValue.thf_conn_term (( assoc_connective ))
- in ( LrTable.NT 55, ( result, assoc_connective1left, 
-assoc_connective1right), rest671)
-end
-|  ( 105, ( ( _, ( MlyValue.thf_unary_connective thf_unary_connective,
- thf_unary_connective1left, thf_unary_connective1right)) :: rest671))
- => let val  result = MlyValue.thf_conn_term (( thf_unary_connective )
-)
- in ( LrTable.NT 55, ( result, thf_unary_connective1left, 
-thf_unary_connective1right), rest671)
-end
-|  ( 106, ( ( _, ( MlyValue.atomic_formula atomic_formula, 
-atomic_formula1left, atomic_formula1right)) :: rest671)) => let val  
-result = MlyValue.literal (( atomic_formula ))
- in ( LrTable.NT 56, ( result, atomic_formula1left, 
-atomic_formula1right), rest671)
-end
-|  ( 107, ( ( _, ( MlyValue.atomic_formula atomic_formula, _, 
-atomic_formula1right)) :: ( _, ( _, TILDE1left, _)) :: rest671)) =>
- let val  result = MlyValue.literal (
-( Fmla (Interpreted_Logic Not, [atomic_formula]) ))
- in ( LrTable.NT 56, ( result, TILDE1left, atomic_formula1right), 
-rest671)
-end
-|  ( 108, ( ( _, ( MlyValue.fol_infix_unary fol_infix_unary, 
-fol_infix_unary1left, fol_infix_unary1right)) :: rest671)) => let val 
- result = MlyValue.literal (( fol_infix_unary ))
- in ( LrTable.NT 56, ( result, fol_infix_unary1left, 
-fol_infix_unary1right), rest671)
-end
-|  ( 109, ( ( _, ( MlyValue.literal literal, literal1left, 
-literal1right)) :: rest671)) => let val  result = MlyValue.disjunction
- (( literal ))
- in ( LrTable.NT 57, ( result, literal1left, literal1right), rest671)
-
-end
-|  ( 110, ( ( _, ( MlyValue.literal literal, _, literal1right)) :: _
- :: ( _, ( MlyValue.disjunction disjunction, disjunction1left, _)) :: 
-rest671)) => let val  result = MlyValue.disjunction (
-( Fmla (Interpreted_Logic Or, [disjunction, literal]) ))
- in ( LrTable.NT 57, ( result, disjunction1left, literal1right), 
-rest671)
-end
-|  ( 111, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.disjunction
- disjunction, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
- val  result = MlyValue.cnf_formula (( disjunction ))
- in ( LrTable.NT 58, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 112, ( ( _, ( MlyValue.disjunction disjunction, disjunction1left,
- disjunction1right)) :: rest671)) => let val  result = 
-MlyValue.cnf_formula (( disjunction ))
- in ( LrTable.NT 58, ( result, disjunction1left, disjunction1right), 
-rest671)
-end
-|  ( 113, ( ( _, ( MlyValue.fof_logic_formula fof_logic_formula, 
-fof_logic_formula1left, fof_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.fof_tuple_list (( [fof_logic_formula] ))
- in ( LrTable.NT 59, ( result, fof_logic_formula1left, 
-fof_logic_formula1right), rest671)
-end
-|  ( 114, ( ( _, ( MlyValue.fof_tuple_list fof_tuple_list, _, 
-fof_tuple_list1right)) :: _ :: ( _, ( MlyValue.fof_logic_formula 
-fof_logic_formula, fof_logic_formula1left, _)) :: rest671)) => let
- val  result = MlyValue.fof_tuple_list (
-( fof_logic_formula :: fof_tuple_list ))
- in ( LrTable.NT 59, ( result, fof_logic_formula1left, 
-fof_tuple_list1right), rest671)
-end
-|  ( 115, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) ::
- rest671)) => let val  result = MlyValue.fof_tuple (( [] ))
- in ( LrTable.NT 60, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 116, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
-MlyValue.fof_tuple_list fof_tuple_list, _, _)) :: ( _, ( _, LBRKT1left
-, _)) :: rest671)) => let val  result = MlyValue.fof_tuple (
-( fof_tuple_list ))
- in ( LrTable.NT 60, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 117, ( ( _, ( MlyValue.fof_tuple fof_tuple2, _, fof_tuple2right))
- :: _ :: ( _, ( MlyValue.fof_tuple fof_tuple1, fof_tuple1left, _)) :: 
-rest671)) => let val  result = MlyValue.fof_sequent (
-( Sequent (fof_tuple1, fof_tuple2) ))
- in ( LrTable.NT 61, ( result, fof_tuple1left, fof_tuple2right), 
-rest671)
-end
-|  ( 118, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.fof_sequent
- fof_sequent, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
- val  result = MlyValue.fof_sequent (( fof_sequent ))
- in ( LrTable.NT 61, ( result, LPAREN1left, RPAREN1right), rest671)
+|  ( 244, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.term term,
+ _, _)) :: _ :: ( _, ( MlyValue.tff_let_formula_defn 
+tff_let_formula_defn, _, _)) :: _ :: ( _, ( _, LET_FT1left, _)) :: 
+rest671)) => let val  result = MlyValue.let_term (
+(Term_Let (tff_let_formula_defn, term) ))
+ in ( LrTable.NT 143, ( result, LET_FT1left, RPAREN1right), rest671)
 
 end
-|  ( 119, ( ( _, ( _, TILDE1left, TILDE1right)) :: rest671)) => let
- val  result = MlyValue.unary_connective (( Interpreted_Logic Not ))
- in ( LrTable.NT 45, ( result, TILDE1left, TILDE1right), rest671)
-end
-|  ( 120, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
-, fof_unitary_formula1right)) :: ( _, ( MlyValue.unary_connective 
-unary_connective, unary_connective1left, _)) :: rest671)) => let val  
-result = MlyValue.fof_unary_formula (
-( Fmla (unary_connective, [fof_unitary_formula]) ))
- in ( LrTable.NT 62, ( result, unary_connective1left, 
-fof_unitary_formula1right), rest671)
-end
-|  ( 121, ( ( _, ( MlyValue.fol_infix_unary fol_infix_unary, 
-fol_infix_unary1left, fol_infix_unary1right)) :: rest671)) => let val 
- result = MlyValue.fof_unary_formula (( fol_infix_unary ))
- in ( LrTable.NT 62, ( result, fol_infix_unary1left, 
-fol_infix_unary1right), rest671)
-end
-|  ( 122, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
-variable_1right)) :: rest671)) => let val  result = 
-MlyValue.fof_variable_list (( [variable_] ))
- in ( LrTable.NT 63, ( result, variable_1left, variable_1right), 
-rest671)
-end
-|  ( 123, ( ( _, ( MlyValue.fof_variable_list fof_variable_list, _, 
-fof_variable_list1right)) :: _ :: ( _, ( MlyValue.variable_ variable_,
- variable_1left, _)) :: rest671)) => let val  result = 
-MlyValue.fof_variable_list (( variable_ :: fof_variable_list ))
- in ( LrTable.NT 63, ( result, variable_1left, fof_variable_list1right
-), rest671)
-end
-|  ( 124, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
-, fof_unitary_formula1right)) :: _ :: _ :: ( _, ( 
-MlyValue.fof_variable_list fof_variable_list, _, _)) :: _ :: ( _, ( 
-MlyValue.fol_quantifier fol_quantifier, fol_quantifier1left, _)) :: 
-rest671)) => let val  result = MlyValue.fof_quantified_formula (
-(
-  Quant (fol_quantifier, map (fn v => (v, NONE)) fof_variable_list, fof_unitary_formula)
-)
-)
- in ( LrTable.NT 64, ( result, fol_quantifier1left, 
-fof_unitary_formula1right), rest671)
-end
-|  ( 125, ( ( _, ( MlyValue.fof_quantified_formula 
-fof_quantified_formula, fof_quantified_formula1left, 
-fof_quantified_formula1right)) :: rest671)) => let val  result = 
-MlyValue.fof_unitary_formula (( fof_quantified_formula ))
- in ( LrTable.NT 65, ( result, fof_quantified_formula1left, 
-fof_quantified_formula1right), rest671)
-end
-|  ( 126, ( ( _, ( MlyValue.fof_unary_formula fof_unary_formula, 
-fof_unary_formula1left, fof_unary_formula1right)) :: rest671)) => let
- val  result = MlyValue.fof_unitary_formula (( fof_unary_formula ))
- in ( LrTable.NT 65, ( result, fof_unary_formula1left, 
-fof_unary_formula1right), rest671)
-end
-|  ( 127, ( ( _, ( MlyValue.atomic_formula atomic_formula, 
-atomic_formula1left, atomic_formula1right)) :: rest671)) => let val  
-result = MlyValue.fof_unitary_formula (( atomic_formula ))
- in ( LrTable.NT 65, ( result, atomic_formula1left, 
-atomic_formula1right), rest671)
-end
-|  ( 128, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.fof_logic_formula fof_logic_formula, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.fof_unitary_formula (( fof_logic_formula ))
- in ( LrTable.NT 65, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 129, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula2,
- _, fof_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.fof_unitary_formula fof_unitary_formula1, 
-fof_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.fof_and_formula (
-( Fmla (Interpreted_Logic And, [fof_unitary_formula1, fof_unitary_formula2]) )
-)
- in ( LrTable.NT 66, ( result, fof_unitary_formula1left, 
-fof_unitary_formula2right), rest671)
-end
-|  ( 130, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
-, fof_unitary_formula1right)) :: _ :: ( _, ( MlyValue.fof_and_formula 
-fof_and_formula, fof_and_formula1left, _)) :: rest671)) => let val  
-result = MlyValue.fof_and_formula (
-( Fmla (Interpreted_Logic And, [fof_and_formula, fof_unitary_formula]) )
-)
- in ( LrTable.NT 66, ( result, fof_and_formula1left, 
-fof_unitary_formula1right), rest671)
-end
-|  ( 131, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula2,
- _, fof_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.fof_unitary_formula fof_unitary_formula1, 
-fof_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.fof_or_formula (
-( Fmla (Interpreted_Logic Or, [fof_unitary_formula1, fof_unitary_formula2]) )
-)
- in ( LrTable.NT 67, ( result, fof_unitary_formula1left, 
-fof_unitary_formula2right), rest671)
-end
-|  ( 132, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, _
-, fof_unitary_formula1right)) :: _ :: ( _, ( MlyValue.fof_or_formula 
-fof_or_formula, fof_or_formula1left, _)) :: rest671)) => let val  
-result = MlyValue.fof_or_formula (
-( Fmla (Interpreted_Logic Or, [fof_or_formula, fof_unitary_formula]) )
-)
- in ( LrTable.NT 67, ( result, fof_or_formula1left, 
-fof_unitary_formula1right), rest671)
-end
-|  ( 133, ( ( _, ( MlyValue.fof_or_formula fof_or_formula, 
-fof_or_formula1left, fof_or_formula1right)) :: rest671)) => let val  
-result = MlyValue.fof_binary_assoc (( fof_or_formula ))
- in ( LrTable.NT 68, ( result, fof_or_formula1left, 
-fof_or_formula1right), rest671)
-end
-|  ( 134, ( ( _, ( MlyValue.fof_and_formula fof_and_formula, 
-fof_and_formula1left, fof_and_formula1right)) :: rest671)) => let val 
- result = MlyValue.fof_binary_assoc (( fof_and_formula ))
- in ( LrTable.NT 68, ( result, fof_and_formula1left, 
-fof_and_formula1right), rest671)
-end
-|  ( 135, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula2,
- _, fof_unitary_formula2right)) :: ( _, ( MlyValue.binary_connective 
-binary_connective, _, _)) :: ( _, ( MlyValue.fof_unitary_formula 
-fof_unitary_formula1, fof_unitary_formula1left, _)) :: rest671)) =>
- let val  result = MlyValue.fof_binary_nonassoc (
-(
-  Fmla (binary_connective, [fof_unitary_formula1, fof_unitary_formula2] )
-)
-)
- in ( LrTable.NT 69, ( result, fof_unitary_formula1left, 
-fof_unitary_formula2right), rest671)
-end
-|  ( 136, ( ( _, ( MlyValue.fof_binary_nonassoc fof_binary_nonassoc, 
-fof_binary_nonassoc1left, fof_binary_nonassoc1right)) :: rest671)) =>
- let val  result = MlyValue.fof_binary_formula (
-( fof_binary_nonassoc ))
- in ( LrTable.NT 70, ( result, fof_binary_nonassoc1left, 
-fof_binary_nonassoc1right), rest671)
-end
-|  ( 137, ( ( _, ( MlyValue.fof_binary_assoc fof_binary_assoc, 
-fof_binary_assoc1left, fof_binary_assoc1right)) :: rest671)) => let
- val  result = MlyValue.fof_binary_formula (( fof_binary_assoc ))
- in ( LrTable.NT 70, ( result, fof_binary_assoc1left, 
-fof_binary_assoc1right), rest671)
-end
-|  ( 138, ( ( _, ( MlyValue.fof_binary_formula fof_binary_formula, 
-fof_binary_formula1left, fof_binary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.fof_logic_formula (( fof_binary_formula ))
- in ( LrTable.NT 71, ( result, fof_binary_formula1left, 
-fof_binary_formula1right), rest671)
-end
-|  ( 139, ( ( _, ( MlyValue.fof_unitary_formula fof_unitary_formula, 
-fof_unitary_formula1left, fof_unitary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.fof_logic_formula (( fof_unitary_formula )
-)
- in ( LrTable.NT 71, ( result, fof_unitary_formula1left, 
-fof_unitary_formula1right), rest671)
-end
-|  ( 140, ( ( _, ( MlyValue.fof_logic_formula fof_logic_formula, 
-fof_logic_formula1left, fof_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.fof_formula (( fof_logic_formula ))
- in ( LrTable.NT 72, ( result, fof_logic_formula1left, 
-fof_logic_formula1right), rest671)
-end
-|  ( 141, ( ( _, ( MlyValue.fof_sequent fof_sequent, fof_sequent1left,
- fof_sequent1right)) :: rest671)) => let val  result = 
-MlyValue.fof_formula (( fof_sequent ))
- in ( LrTable.NT 72, ( result, fof_sequent1left, fof_sequent1right), 
-rest671)
-end
-|  ( 142, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) ::
- rest671)) => let val  result = MlyValue.tff_tuple (( [] ))
- in ( LrTable.NT 73, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 143, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
-MlyValue.tff_tuple_list tff_tuple_list, _, _)) :: ( _, ( _, LBRKT1left
-, _)) :: rest671)) => let val  result = MlyValue.tff_tuple (
-( tff_tuple_list ))
- in ( LrTable.NT 73, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 144, ( ( _, ( MlyValue.tff_tuple_list tff_tuple_list, _, 
-tff_tuple_list1right)) :: _ :: ( _, ( MlyValue.tff_logic_formula 
-tff_logic_formula, tff_logic_formula1left, _)) :: rest671)) => let
- val  result = MlyValue.tff_tuple_list (
-( tff_logic_formula :: tff_tuple_list ))
- in ( LrTable.NT 74, ( result, tff_logic_formula1left, 
-tff_tuple_list1right), rest671)
-end
-|  ( 145, ( ( _, ( MlyValue.tff_logic_formula tff_logic_formula, 
-tff_logic_formula1left, tff_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.tff_tuple_list (( [tff_logic_formula] ))
- in ( LrTable.NT 74, ( result, tff_logic_formula1left, 
-tff_logic_formula1right), rest671)
-end
-|  ( 146, ( ( _, ( MlyValue.tff_tuple tff_tuple2, _, tff_tuple2right))
- :: _ :: ( _, ( MlyValue.tff_tuple tff_tuple1, tff_tuple1left, _)) :: 
-rest671)) => let val  result = MlyValue.tff_sequent (
-( Sequent (tff_tuple1, tff_tuple2) ))
- in ( LrTable.NT 75, ( result, tff_tuple1left, tff_tuple2right), 
-rest671)
-end
-|  ( 147, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.tff_sequent
- tff_sequent, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
- val  result = MlyValue.tff_sequent (( tff_sequent ))
- in ( LrTable.NT 75, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 148, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_logic_formula tff_logic_formula3, _, _)) :: _ :: ( _, ( 
-MlyValue.tff_logic_formula tff_logic_formula2, _, _)) :: _ :: ( _, ( 
-MlyValue.tff_logic_formula tff_logic_formula1, _, _)) :: _ :: ( _, ( _
-, ITE_F1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_conditional (
-(
-  Conditional (tff_logic_formula1, tff_logic_formula2, tff_logic_formula3)
-)
-)
- in ( LrTable.NT 76, ( result, ITE_F1left, RPAREN1right), rest671)
-end
-|  ( 149, ( ( _, ( MlyValue.tff_logic_formula tff_logic_formula, _, 
-tff_logic_formula1right)) :: _ :: ( _, ( MlyValue.variable_ variable_,
- variable_1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_defined_var (
-( Let_fmla ((variable_, NONE), tff_logic_formula) ))
- in ( LrTable.NT 77, ( result, variable_1left, tff_logic_formula1right
-), rest671)
-end
-|  ( 150, ( ( _, ( MlyValue.term term, _, term1right)) :: _ :: ( _, ( 
-MlyValue.variable_ variable_, variable_1left, _)) :: rest671)) => let
- val  result = MlyValue.tff_defined_var (
-( Let_term ((variable_, NONE), term) ))
- in ( LrTable.NT 77, ( result, variable_1left, term1right), rest671)
-
-end
-|  ( 151, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_defined_var tff_defined_var, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_defined_var (( tff_defined_var ))
- in ( LrTable.NT 77, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 152, ( ( _, ( MlyValue.tff_defined_var tff_defined_var, 
-tff_defined_var1left, tff_defined_var1right)) :: rest671)) => let val 
- result = MlyValue.tff_let_list (( [tff_defined_var] ))
- in ( LrTable.NT 78, ( result, tff_defined_var1left, 
-tff_defined_var1right), rest671)
-end
-|  ( 153, ( ( _, ( MlyValue.tff_let_list tff_let_list, _, 
-tff_let_list1right)) :: _ :: ( _, ( MlyValue.tff_defined_var 
-tff_defined_var, tff_defined_var1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_let_list (( tff_defined_var :: tff_let_list ))
- in ( LrTable.NT 78, ( result, tff_defined_var1left, 
-tff_let_list1right), rest671)
-end
-|  ( 154, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _
-, tff_unitary_formula1right)) :: _ :: _ :: ( _, ( 
-MlyValue.tff_let_list tff_let_list, _, _)) :: _ :: ( _, ( _, LET1left,
- _)) :: rest671)) => let val  result = MlyValue.tptp_let (
-(
-  Let (tff_let_list, tff_unitary_formula)
-))
- in ( LrTable.NT 79, ( result, LET1left, tff_unitary_formula1right), 
-rest671)
-end
-|  ( 155, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type2, _, 
-tff_atomic_type2right)) :: _ :: ( _, ( MlyValue.tff_atomic_type 
-tff_atomic_type1, tff_atomic_type1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_xprod_type (
-( Prod_type(tff_atomic_type1, tff_atomic_type2) ))
- in ( LrTable.NT 80, ( result, tff_atomic_type1left, 
-tff_atomic_type2right), rest671)
-end
-|  ( 156, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, _, 
-tff_atomic_type1right)) :: _ :: ( _, ( MlyValue.tff_xprod_type 
-tff_xprod_type, tff_xprod_type1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_xprod_type (
-( Prod_type(tff_xprod_type, tff_atomic_type) ))
- in ( LrTable.NT 80, ( result, tff_xprod_type1left, 
-tff_atomic_type1right), rest671)
-end
-|  ( 157, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_xprod_type tff_xprod_type, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_xprod_type (( tff_xprod_type ))
- in ( LrTable.NT 80, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 158, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, _, 
-tff_atomic_type1right)) :: _ :: ( _, ( MlyValue.tff_unitary_type 
-tff_unitary_type, tff_unitary_type1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_mapping_type (
-( Fn_type(tff_unitary_type, tff_atomic_type) ))
- in ( LrTable.NT 81, ( result, tff_unitary_type1left, 
-tff_atomic_type1right), rest671)
-end
-|  ( 159, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_mapping_type tff_mapping_type, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_mapping_type (( tff_mapping_type ))
- in ( LrTable.NT 81, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 160, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left,
- atomic_word1right)) :: rest671)) => let val  result = 
-MlyValue.tff_atomic_type (( Atom_type atomic_word ))
- in ( LrTable.NT 82, ( result, atomic_word1left, atomic_word1right), 
-rest671)
-end
-|  ( 161, ( ( _, ( MlyValue.defined_type defined_type, 
-defined_type1left, defined_type1right)) :: rest671)) => let val  
-result = MlyValue.tff_atomic_type (( Defined_type defined_type ))
- in ( LrTable.NT 82, ( result, defined_type1left, defined_type1right),
- rest671)
-end
-|  ( 162, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, 
-tff_atomic_type1left, tff_atomic_type1right)) :: rest671)) => let val 
- result = MlyValue.tff_unitary_type (( tff_atomic_type ))
- in ( LrTable.NT 83, ( result, tff_atomic_type1left, 
-tff_atomic_type1right), rest671)
-end
-|  ( 163, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_xprod_type tff_xprod_type, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_unitary_type (( tff_xprod_type ))
- in ( LrTable.NT 83, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 164, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, 
-tff_atomic_type1left, tff_atomic_type1right)) :: rest671)) => let val 
- result = MlyValue.tff_top_level_type (( tff_atomic_type ))
- in ( LrTable.NT 84, ( result, tff_atomic_type1left, 
-tff_atomic_type1right), rest671)
-end
-|  ( 165, ( ( _, ( MlyValue.tff_mapping_type tff_mapping_type, 
-tff_mapping_type1left, tff_mapping_type1right)) :: rest671)) => let
- val  result = MlyValue.tff_top_level_type (( tff_mapping_type ))
- in ( LrTable.NT 84, ( result, tff_mapping_type1left, 
-tff_mapping_type1right), rest671)
-end
-|  ( 166, ( ( _, ( MlyValue.functor_ functor_, functor_1left, 
-functor_1right)) :: rest671)) => let val  result = 
-MlyValue.tff_untyped_atom (( (functor_, NONE) ))
- in ( LrTable.NT 85, ( result, functor_1left, functor_1right), rest671
-)
-end
-|  ( 167, ( ( _, ( MlyValue.system_functor system_functor, 
-system_functor1left, system_functor1right)) :: rest671)) => let val  
-result = MlyValue.tff_untyped_atom (( (system_functor, NONE) ))
- in ( LrTable.NT 85, ( result, system_functor1left, 
-system_functor1right), rest671)
-end
-|  ( 168, ( ( _, ( MlyValue.tff_top_level_type tff_top_level_type, _, 
-tff_top_level_type1right)) :: _ :: ( _, ( MlyValue.tff_untyped_atom 
-tff_untyped_atom, tff_untyped_atom1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_typed_atom (
-( (fst tff_untyped_atom, SOME tff_top_level_type) ))
- in ( LrTable.NT 86, ( result, tff_untyped_atom1left, 
-tff_top_level_type1right), rest671)
-end
-|  ( 169, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_typed_atom tff_typed_atom, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_typed_atom (( tff_typed_atom ))
- in ( LrTable.NT 86, ( result, LPAREN1left, RPAREN1right), rest671)
+|  ( 245, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.term term,
+ _, _)) :: _ :: ( _, ( MlyValue.tff_let_term_defn tff_let_term_defn, _
+, _)) :: _ :: ( _, ( _, LET_TT1left, _)) :: rest671)) => let val  
+result = MlyValue.let_term ((Term_Let (tff_let_term_defn, term) ))
+ in ( LrTable.NT 143, ( result, LET_TT1left, RPAREN1right), rest671)
 
 end
-|  ( 170, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _
-, tff_unitary_formula1right)) :: ( _, ( MlyValue.unary_connective 
-unary_connective, unary_connective1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_unary_formula (
-( Fmla (unary_connective, [tff_unitary_formula]) ))
- in ( LrTable.NT 87, ( result, unary_connective1left, 
-tff_unitary_formula1right), rest671)
-end
-|  ( 171, ( ( _, ( MlyValue.fol_infix_unary fol_infix_unary, 
-fol_infix_unary1left, fol_infix_unary1right)) :: rest671)) => let val 
- result = MlyValue.tff_unary_formula (( fol_infix_unary ))
- in ( LrTable.NT 87, ( result, fol_infix_unary1left, 
-fol_infix_unary1right), rest671)
-end
-|  ( 172, ( ( _, ( MlyValue.tff_atomic_type tff_atomic_type, _, 
-tff_atomic_type1right)) :: _ :: ( _, ( MlyValue.variable_ variable_, 
-variable_1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_typed_variable (( (variable_, SOME tff_atomic_type) ))
- in ( LrTable.NT 88, ( result, variable_1left, tff_atomic_type1right),
- rest671)
-end
-|  ( 173, ( ( _, ( MlyValue.tff_typed_variable tff_typed_variable, 
-tff_typed_variable1left, tff_typed_variable1right)) :: rest671)) =>
- let val  result = MlyValue.tff_variable (( tff_typed_variable ))
- in ( LrTable.NT 89, ( result, tff_typed_variable1left, 
-tff_typed_variable1right), rest671)
-end
-|  ( 174, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
-variable_1right)) :: rest671)) => let val  result = 
-MlyValue.tff_variable (( (variable_, NONE) ))
- in ( LrTable.NT 89, ( result, variable_1left, variable_1right), 
-rest671)
-end
-|  ( 175, ( ( _, ( MlyValue.tff_variable tff_variable, 
-tff_variable1left, tff_variable1right)) :: rest671)) => let val  
-result = MlyValue.tff_variable_list (( [tff_variable] ))
- in ( LrTable.NT 90, ( result, tff_variable1left, tff_variable1right),
- rest671)
-end
-|  ( 176, ( ( _, ( MlyValue.tff_variable_list tff_variable_list, _, 
-tff_variable_list1right)) :: _ :: ( _, ( MlyValue.tff_variable 
-tff_variable, tff_variable1left, _)) :: rest671)) => let val  result =
- MlyValue.tff_variable_list (( tff_variable :: tff_variable_list ))
- in ( LrTable.NT 90, ( result, tff_variable1left, 
-tff_variable_list1right), rest671)
-end
-|  ( 177, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _
-, tff_unitary_formula1right)) :: _ :: _ :: ( _, ( 
-MlyValue.tff_variable_list tff_variable_list, _, _)) :: _ :: ( _, ( 
-MlyValue.fol_quantifier fol_quantifier, fol_quantifier1left, _)) :: 
-rest671)) => let val  result = MlyValue.tff_quantified_formula (
-(
-  Quant (fol_quantifier, tff_variable_list, tff_unitary_formula)
-))
- in ( LrTable.NT 91, ( result, fol_quantifier1left, 
-tff_unitary_formula1right), rest671)
-end
-|  ( 178, ( ( _, ( MlyValue.tff_quantified_formula 
-tff_quantified_formula, tff_quantified_formula1left, 
-tff_quantified_formula1right)) :: rest671)) => let val  result = 
-MlyValue.tff_unitary_formula (( tff_quantified_formula ))
- in ( LrTable.NT 92, ( result, tff_quantified_formula1left, 
-tff_quantified_formula1right), rest671)
-end
-|  ( 179, ( ( _, ( MlyValue.tff_unary_formula tff_unary_formula, 
-tff_unary_formula1left, tff_unary_formula1right)) :: rest671)) => let
- val  result = MlyValue.tff_unitary_formula (( tff_unary_formula ))
- in ( LrTable.NT 92, ( result, tff_unary_formula1left, 
-tff_unary_formula1right), rest671)
-end
-|  ( 180, ( ( _, ( MlyValue.atomic_formula atomic_formula, 
-atomic_formula1left, atomic_formula1right)) :: rest671)) => let val  
-result = MlyValue.tff_unitary_formula (( atomic_formula ))
- in ( LrTable.NT 92, ( result, atomic_formula1left, 
-atomic_formula1right), rest671)
-end
-|  ( 181, ( ( _, ( MlyValue.tptp_let tptp_let, tptp_let1left, 
-tptp_let1right)) :: rest671)) => let val  result = 
-MlyValue.tff_unitary_formula (( tptp_let ))
- in ( LrTable.NT 92, ( result, tptp_let1left, tptp_let1right), rest671
-)
-end
-|  ( 182, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
-variable_1right)) :: rest671)) => let val  result = 
-MlyValue.tff_unitary_formula (( Pred (Uninterpreted variable_, []) ))
- in ( LrTable.NT 92, ( result, variable_1left, variable_1right), 
-rest671)
-end
-|  ( 183, ( ( _, ( MlyValue.tff_conditional tff_conditional, 
-tff_conditional1left, tff_conditional1right)) :: rest671)) => let val 
- result = MlyValue.tff_unitary_formula (( tff_conditional ))
- in ( LrTable.NT 92, ( result, tff_conditional1left, 
-tff_conditional1right), rest671)
-end
-|  ( 184, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.tff_logic_formula tff_logic_formula, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_unitary_formula (( tff_logic_formula ))
- in ( LrTable.NT 92, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 185, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula2,
- _, tff_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.tff_unitary_formula tff_unitary_formula1, 
-tff_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_and_formula (
-( Fmla (Interpreted_Logic And, [tff_unitary_formula1, tff_unitary_formula2]) )
-)
- in ( LrTable.NT 93, ( result, tff_unitary_formula1left, 
-tff_unitary_formula2right), rest671)
-end
-|  ( 186, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _
-, tff_unitary_formula1right)) :: _ :: ( _, ( MlyValue.tff_and_formula 
-tff_and_formula, tff_and_formula1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_and_formula (
-( Fmla (Interpreted_Logic And, [tff_and_formula, tff_unitary_formula]) )
-)
- in ( LrTable.NT 93, ( result, tff_and_formula1left, 
-tff_unitary_formula1right), rest671)
-end
-|  ( 187, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula2,
- _, tff_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.tff_unitary_formula tff_unitary_formula1, 
-tff_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.tff_or_formula (
-( Fmla (Interpreted_Logic Or, [tff_unitary_formula1, tff_unitary_formula2]) )
-)
- in ( LrTable.NT 94, ( result, tff_unitary_formula1left, 
-tff_unitary_formula2right), rest671)
-end
-|  ( 188, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, _
-, tff_unitary_formula1right)) :: _ :: ( _, ( MlyValue.tff_or_formula 
-tff_or_formula, tff_or_formula1left, _)) :: rest671)) => let val  
-result = MlyValue.tff_or_formula (
-( Fmla (Interpreted_Logic Or, [tff_or_formula, tff_unitary_formula]) )
-)
- in ( LrTable.NT 94, ( result, tff_or_formula1left, 
-tff_unitary_formula1right), rest671)
-end
-|  ( 189, ( ( _, ( MlyValue.tff_or_formula tff_or_formula, 
-tff_or_formula1left, tff_or_formula1right)) :: rest671)) => let val  
-result = MlyValue.tff_binary_assoc (( tff_or_formula ))
- in ( LrTable.NT 95, ( result, tff_or_formula1left, 
-tff_or_formula1right), rest671)
-end
-|  ( 190, ( ( _, ( MlyValue.tff_and_formula tff_and_formula, 
-tff_and_formula1left, tff_and_formula1right)) :: rest671)) => let val 
- result = MlyValue.tff_binary_assoc (( tff_and_formula ))
- in ( LrTable.NT 95, ( result, tff_and_formula1left, 
-tff_and_formula1right), rest671)
-end
-|  ( 191, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula2,
- _, tff_unitary_formula2right)) :: ( _, ( MlyValue.binary_connective 
-binary_connective, _, _)) :: ( _, ( MlyValue.tff_unitary_formula 
-tff_unitary_formula1, tff_unitary_formula1left, _)) :: rest671)) =>
- let val  result = MlyValue.tff_binary_nonassoc (
-( Fmla (binary_connective, [tff_unitary_formula1, tff_unitary_formula2]) )
-)
- in ( LrTable.NT 96, ( result, tff_unitary_formula1left, 
-tff_unitary_formula2right), rest671)
-end
-|  ( 192, ( ( _, ( MlyValue.tff_binary_nonassoc tff_binary_nonassoc, 
-tff_binary_nonassoc1left, tff_binary_nonassoc1right)) :: rest671)) =>
- let val  result = MlyValue.tff_binary_formula (
-( tff_binary_nonassoc ))
- in ( LrTable.NT 97, ( result, tff_binary_nonassoc1left, 
-tff_binary_nonassoc1right), rest671)
-end
-|  ( 193, ( ( _, ( MlyValue.tff_binary_assoc tff_binary_assoc, 
-tff_binary_assoc1left, tff_binary_assoc1right)) :: rest671)) => let
- val  result = MlyValue.tff_binary_formula (( tff_binary_assoc ))
- in ( LrTable.NT 97, ( result, tff_binary_assoc1left, 
-tff_binary_assoc1right), rest671)
-end
-|  ( 194, ( ( _, ( MlyValue.tff_binary_formula tff_binary_formula, 
-tff_binary_formula1left, tff_binary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.tff_logic_formula (( tff_binary_formula ))
- in ( LrTable.NT 98, ( result, tff_binary_formula1left, 
-tff_binary_formula1right), rest671)
-end
-|  ( 195, ( ( _, ( MlyValue.tff_unitary_formula tff_unitary_formula, 
-tff_unitary_formula1left, tff_unitary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.tff_logic_formula (( tff_unitary_formula )
-)
- in ( LrTable.NT 98, ( result, tff_unitary_formula1left, 
-tff_unitary_formula1right), rest671)
-end
-|  ( 196, ( ( _, ( MlyValue.tff_logic_formula tff_logic_formula, 
-tff_logic_formula1left, tff_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.tff_formula (( tff_logic_formula ))
- in ( LrTable.NT 99, ( result, tff_logic_formula1left, 
-tff_logic_formula1right), rest671)
-end
-|  ( 197, ( ( _, ( MlyValue.tff_typed_atom tff_typed_atom, 
-tff_typed_atom1left, tff_typed_atom1right)) :: rest671)) => let val  
-result = MlyValue.tff_formula (
-( Atom (TFF_Typed_Atom tff_typed_atom) ))
- in ( LrTable.NT 99, ( result, tff_typed_atom1left, 
-tff_typed_atom1right), rest671)
-end
-|  ( 198, ( ( _, ( MlyValue.tff_sequent tff_sequent, tff_sequent1left,
- tff_sequent1right)) :: rest671)) => let val  result = 
-MlyValue.tff_formula (( tff_sequent ))
- in ( LrTable.NT 99, ( result, tff_sequent1left, tff_sequent1right), 
-rest671)
-end
-|  ( 199, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) ::
- rest671)) => let val  result = MlyValue.thf_tuple (( [] ))
- in ( LrTable.NT 100, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 200, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
-MlyValue.thf_tuple_list thf_tuple_list, _, _)) :: ( _, ( _, LBRKT1left
-, _)) :: rest671)) => let val  result = MlyValue.thf_tuple (
-( thf_tuple_list ))
- in ( LrTable.NT 100, ( result, LBRKT1left, RBRKT1right), rest671)
-end
-|  ( 201, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, 
-thf_logic_formula1left, thf_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.thf_tuple_list (( [thf_logic_formula] ))
- in ( LrTable.NT 101, ( result, thf_logic_formula1left, 
-thf_logic_formula1right), rest671)
-end
-|  ( 202, ( ( _, ( MlyValue.thf_tuple_list thf_tuple_list, _, 
-thf_tuple_list1right)) :: _ :: ( _, ( MlyValue.thf_logic_formula 
-thf_logic_formula, thf_logic_formula1left, _)) :: rest671)) => let
- val  result = MlyValue.thf_tuple_list (
-( thf_logic_formula :: thf_tuple_list ))
- in ( LrTable.NT 101, ( result, thf_logic_formula1left, 
-thf_tuple_list1right), rest671)
-end
-|  ( 203, ( ( _, ( MlyValue.thf_tuple thf_tuple2, _, thf_tuple2right))
- :: _ :: ( _, ( MlyValue.thf_tuple thf_tuple1, thf_tuple1left, _)) :: 
-rest671)) => let val  result = MlyValue.thf_sequent (
-( Sequent(thf_tuple1, thf_tuple2) ))
- in ( LrTable.NT 102, ( result, thf_tuple1left, thf_tuple2right), 
-rest671)
-end
-|  ( 204, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.thf_sequent
- thf_sequent, _, _)) :: ( _, ( _, LPAREN1left, _)) :: rest671)) => let
- val  result = MlyValue.thf_sequent (( thf_sequent ))
- in ( LrTable.NT 102, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 205, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.thf_logic_formula thf_logic_formula3, _, _)) :: _ :: ( _, ( 
-MlyValue.thf_logic_formula thf_logic_formula2, _, _)) :: _ :: ( _, ( 
-MlyValue.thf_logic_formula thf_logic_formula1, _, _)) :: _ :: ( _, ( _
-, ITE_F1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_conditional (
-(
-  Conditional (thf_logic_formula1, thf_logic_formula2, thf_logic_formula3)
-)
-)
- in ( LrTable.NT 103, ( result, ITE_F1left, RPAREN1right), rest671)
-
-end
-|  ( 206, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, _, 
-thf_logic_formula1right)) :: _ :: ( _, ( MlyValue.thf_variable 
-thf_variable, thf_variable1left, _)) :: rest671)) => let val  result =
- MlyValue.thf_defined_var (
-( Let_fmla (thf_variable, thf_logic_formula) ))
- in ( LrTable.NT 104, ( result, thf_variable1left, 
-thf_logic_formula1right), rest671)
-end
-|  ( 207, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.thf_defined_var thf_defined_var, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_defined_var (( thf_defined_var ))
- in ( LrTable.NT 104, ( result, LPAREN1left, RPAREN1right), rest671)
+|  ( 246, ( ( _, ( MlyValue.useful_info useful_info, _, 
+useful_info1right)) :: ( _, ( _, COMMA1left, _)) :: rest671)) => let
+ val  result = MlyValue.optional_info (( useful_info ))
+ in ( LrTable.NT 4, ( result, COMMA1left, useful_info1right), rest671)
 
 end
-|  ( 208, ( ( _, ( MlyValue.thf_defined_var thf_defined_var, 
-thf_defined_var1left, thf_defined_var1right)) :: rest671)) => let val 
- result = MlyValue.thf_let_list (( [thf_defined_var] ))
- in ( LrTable.NT 105, ( result, thf_defined_var1left, 
-thf_defined_var1right), rest671)
-end
-|  ( 209, ( ( _, ( MlyValue.thf_let_list thf_let_list, _, 
-thf_let_list1right)) :: _ :: ( _, ( MlyValue.thf_defined_var 
-thf_defined_var, thf_defined_var1left, _)) :: rest671)) => let val  
-result = MlyValue.thf_let_list (( thf_defined_var :: thf_let_list ))
- in ( LrTable.NT 105, ( result, thf_defined_var1left, 
-thf_let_list1right), rest671)
-end
-|  ( 210, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _
-, thf_unitary_formula1right)) :: _ :: _ :: ( _, ( 
-MlyValue.thf_let_list thf_let_list, _, _)) :: _ :: ( _, ( _, LET1left,
- _)) :: rest671)) => let val  result = MlyValue.thf_let (
-(
-  Let (thf_let_list, thf_unitary_formula)
-))
- in ( LrTable.NT 106, ( result, LET1left, thf_unitary_formula1right), 
-rest671)
-end
-|  ( 211, ( ( _, ( MlyValue.term term, term1left, term1right)) :: 
-rest671)) => let val  result = MlyValue.thf_atom (
-( Atom (THF_Atom_term term) ))
- in ( LrTable.NT 107, ( result, term1left, term1right), rest671)
-end
-|  ( 212, ( ( _, ( MlyValue.thf_conn_term thf_conn_term, 
-thf_conn_term1left, thf_conn_term1right)) :: rest671)) => let val  
-result = MlyValue.thf_atom (
-( Atom (THF_Atom_conn_term thf_conn_term) ))
- in ( LrTable.NT 107, ( result, thf_conn_term1left, 
-thf_conn_term1right), rest671)
-end
-|  ( 213, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type2, _, 
-thf_unitary_type2right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
-thf_unitary_type1, thf_unitary_type1left, _)) :: rest671)) => let val 
- result = MlyValue.thf_union_type (
-( Sum_type(thf_unitary_type1, thf_unitary_type2) ))
- in ( LrTable.NT 108, ( result, thf_unitary_type1left, 
-thf_unitary_type2right), rest671)
-end
-|  ( 214, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type, _, 
-thf_unitary_type1right)) :: _ :: ( _, ( MlyValue.thf_union_type 
-thf_union_type, thf_union_type1left, _)) :: rest671)) => let val  
-result = MlyValue.thf_union_type (
-( Sum_type(thf_union_type, thf_unitary_type) ))
- in ( LrTable.NT 108, ( result, thf_union_type1left, 
-thf_unitary_type1right), rest671)
-end
-|  ( 215, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type2, _, 
-thf_unitary_type2right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
-thf_unitary_type1, thf_unitary_type1left, _)) :: rest671)) => let val 
- result = MlyValue.thf_xprod_type (
-( Prod_type(thf_unitary_type1, thf_unitary_type2) ))
- in ( LrTable.NT 109, ( result, thf_unitary_type1left, 
-thf_unitary_type2right), rest671)
-end
-|  ( 216, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type, _, 
-thf_unitary_type1right)) :: _ :: ( _, ( MlyValue.thf_xprod_type 
-thf_xprod_type, thf_xprod_type1left, _)) :: rest671)) => let val  
-result = MlyValue.thf_xprod_type (
-( Prod_type(thf_xprod_type, thf_unitary_type) ))
- in ( LrTable.NT 109, ( result, thf_xprod_type1left, 
-thf_unitary_type1right), rest671)
-end
-|  ( 217, ( ( _, ( MlyValue.thf_unitary_type thf_unitary_type2, _, 
-thf_unitary_type2right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
-thf_unitary_type1, thf_unitary_type1left, _)) :: rest671)) => let val 
- result = MlyValue.thf_mapping_type (
-( Fn_type(thf_unitary_type1, thf_unitary_type2) ))
- in ( LrTable.NT 110, ( result, thf_unitary_type1left, 
-thf_unitary_type2right), rest671)
-end
-|  ( 218, ( ( _, ( MlyValue.thf_mapping_type thf_mapping_type, _, 
-thf_mapping_type1right)) :: _ :: ( _, ( MlyValue.thf_unitary_type 
-thf_unitary_type, thf_unitary_type1left, _)) :: rest671)) => let val  
-result = MlyValue.thf_mapping_type (
-( Fn_type(thf_unitary_type, thf_mapping_type) ))
- in ( LrTable.NT 110, ( result, thf_unitary_type1left, 
-thf_mapping_type1right), rest671)
-end
-|  ( 219, ( ( _, ( MlyValue.thf_mapping_type thf_mapping_type, 
-thf_mapping_type1left, thf_mapping_type1right)) :: rest671)) => let
- val  result = MlyValue.thf_binary_type (( thf_mapping_type ))
- in ( LrTable.NT 111, ( result, thf_mapping_type1left, 
-thf_mapping_type1right), rest671)
-end
-|  ( 220, ( ( _, ( MlyValue.thf_xprod_type thf_xprod_type, 
-thf_xprod_type1left, thf_xprod_type1right)) :: rest671)) => let val  
-result = MlyValue.thf_binary_type (( thf_xprod_type ))
- in ( LrTable.NT 111, ( result, thf_xprod_type1left, 
-thf_xprod_type1right), rest671)
-end
-|  ( 221, ( ( _, ( MlyValue.thf_union_type thf_union_type, 
-thf_union_type1left, thf_union_type1right)) :: rest671)) => let val  
-result = MlyValue.thf_binary_type (( thf_union_type ))
- in ( LrTable.NT 111, ( result, thf_union_type1left, 
-thf_union_type1right), rest671)
-end
-|  ( 222, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, 
-thf_unitary_formula1left, thf_unitary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.thf_unitary_type (
-( Fmla_type thf_unitary_formula ))
- in ( LrTable.NT 112, ( result, thf_unitary_formula1left, 
-thf_unitary_formula1right), rest671)
-end
-|  ( 223, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, 
-thf_logic_formula1left, thf_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.thf_top_level_type (
-( Fmla_type thf_logic_formula ))
- in ( LrTable.NT 113, ( result, thf_logic_formula1left, 
-thf_logic_formula1right), rest671)
-end
-|  ( 224, ( ( _, ( MlyValue.constant constant2, _, constant2right)) ::
- _ :: ( _, ( MlyValue.constant constant1, constant1left, _)) :: 
-rest671)) => let val  result = MlyValue.thf_subtype (
-( Subtype(constant1, constant2) ))
- in ( LrTable.NT 114, ( result, constant1left, constant2right), 
-rest671)
-end
-|  ( 225, ( ( _, ( MlyValue.thf_atom thf_atom, thf_atom1left, 
-thf_atom1right)) :: rest671)) => let val  result = 
-MlyValue.thf_typeable_formula (( thf_atom ))
- in ( LrTable.NT 115, ( result, thf_atom1left, thf_atom1right), 
-rest671)
-end
-|  ( 226, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.thf_logic_formula thf_logic_formula, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_typeable_formula (( thf_logic_formula ))
- in ( LrTable.NT 115, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 227, ( ( _, ( MlyValue.thf_top_level_type thf_top_level_type, _, 
-thf_top_level_type1right)) :: _ :: ( _, ( 
-MlyValue.thf_typeable_formula thf_typeable_formula, 
-thf_typeable_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_type_formula (
-( (thf_typeable_formula, thf_top_level_type) ))
- in ( LrTable.NT 116, ( result, thf_typeable_formula1left, 
-thf_top_level_type1right), rest671)
-end
-|  ( 228, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.thf_logic_formula thf_logic_formula, _, _)) :: _ :: ( _, ( 
-MlyValue.thf_unary_connective thf_unary_connective, 
-thf_unary_connective1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_unary_formula (
-(
-  Fmla (thf_unary_connective, [thf_logic_formula])
-))
- in ( LrTable.NT 117, ( result, thf_unary_connective1left, 
-RPAREN1right), rest671)
-end
-|  ( 229, ( ( _, ( MlyValue.thf_top_level_type thf_top_level_type, _, 
-thf_top_level_type1right)) :: _ :: ( _, ( MlyValue.variable_ variable_
-, variable_1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_typed_variable (( (variable_, SOME thf_top_level_type) ))
- in ( LrTable.NT 118, ( result, variable_1left, 
-thf_top_level_type1right), rest671)
-end
-|  ( 230, ( ( _, ( MlyValue.thf_typed_variable thf_typed_variable, 
-thf_typed_variable1left, thf_typed_variable1right)) :: rest671)) =>
- let val  result = MlyValue.thf_variable (( thf_typed_variable ))
- in ( LrTable.NT 119, ( result, thf_typed_variable1left, 
-thf_typed_variable1right), rest671)
-end
-|  ( 231, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
-variable_1right)) :: rest671)) => let val  result = 
-MlyValue.thf_variable (( (variable_, NONE) ))
- in ( LrTable.NT 119, ( result, variable_1left, variable_1right), 
-rest671)
-end
-|  ( 232, ( ( _, ( MlyValue.thf_variable thf_variable, 
-thf_variable1left, thf_variable1right)) :: rest671)) => let val  
-result = MlyValue.thf_variable_list (( [thf_variable] ))
- in ( LrTable.NT 120, ( result, thf_variable1left, thf_variable1right)
-, rest671)
-end
-|  ( 233, ( ( _, ( MlyValue.thf_variable_list thf_variable_list, _, 
-thf_variable_list1right)) :: _ :: ( _, ( MlyValue.thf_variable 
-thf_variable, thf_variable1left, _)) :: rest671)) => let val  result =
- MlyValue.thf_variable_list (( thf_variable :: thf_variable_list ))
- in ( LrTable.NT 120, ( result, thf_variable1left, 
-thf_variable_list1right), rest671)
-end
-|  ( 234, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _
-, thf_unitary_formula1right)) :: _ :: _ :: ( _, ( 
-MlyValue.thf_variable_list thf_variable_list, _, _)) :: _ :: ( _, ( 
-MlyValue.thf_quantifier thf_quantifier, thf_quantifier1left, _)) :: 
-rest671)) => let val  result = MlyValue.thf_quantified_formula (
-(
-  Quant (thf_quantifier, thf_variable_list, thf_unitary_formula)
-))
- in ( LrTable.NT 121, ( result, thf_quantifier1left, 
-thf_unitary_formula1right), rest671)
-end
-|  ( 235, ( ( _, ( MlyValue.thf_quantified_formula 
-thf_quantified_formula, thf_quantified_formula1left, 
-thf_quantified_formula1right)) :: rest671)) => let val  result = 
-MlyValue.thf_unitary_formula (( thf_quantified_formula ))
- in ( LrTable.NT 122, ( result, thf_quantified_formula1left, 
-thf_quantified_formula1right), rest671)
-end
-|  ( 236, ( ( _, ( MlyValue.thf_unary_formula thf_unary_formula, 
-thf_unary_formula1left, thf_unary_formula1right)) :: rest671)) => let
- val  result = MlyValue.thf_unitary_formula (( thf_unary_formula ))
- in ( LrTable.NT 122, ( result, thf_unary_formula1left, 
-thf_unary_formula1right), rest671)
-end
-|  ( 237, ( ( _, ( MlyValue.thf_atom thf_atom, thf_atom1left, 
-thf_atom1right)) :: rest671)) => let val  result = 
-MlyValue.thf_unitary_formula (( thf_atom ))
- in ( LrTable.NT 122, ( result, thf_atom1left, thf_atom1right), 
-rest671)
-end
-|  ( 238, ( ( _, ( MlyValue.thf_let thf_let, thf_let1left, 
-thf_let1right)) :: rest671)) => let val  result = 
-MlyValue.thf_unitary_formula (( thf_let ))
- in ( LrTable.NT 122, ( result, thf_let1left, thf_let1right), rest671)
-
-end
-|  ( 239, ( ( _, ( MlyValue.thf_conditional thf_conditional, 
-thf_conditional1left, thf_conditional1right)) :: rest671)) => let val 
- result = MlyValue.thf_unitary_formula (( thf_conditional ))
- in ( LrTable.NT 122, ( result, thf_conditional1left, 
-thf_conditional1right), rest671)
-end
-|  ( 240, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
-MlyValue.thf_logic_formula thf_logic_formula, _, _)) :: ( _, ( _, 
-LPAREN1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_unitary_formula (( thf_logic_formula ))
- in ( LrTable.NT 122, ( result, LPAREN1left, RPAREN1right), rest671)
-
-end
-|  ( 241, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2,
- _, thf_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.thf_unitary_formula thf_unitary_formula1, 
-thf_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_apply_formula (
-( Fmla (Interpreted_ExtraLogic Apply, [thf_unitary_formula1, thf_unitary_formula2]) )
-)
- in ( LrTable.NT 123, ( result, thf_unitary_formula1left, 
-thf_unitary_formula2right), rest671)
-end
-|  ( 242, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _
-, thf_unitary_formula1right)) :: _ :: ( _, ( 
-MlyValue.thf_apply_formula thf_apply_formula, thf_apply_formula1left,
- _)) :: rest671)) => let val  result = MlyValue.thf_apply_formula (
-( Fmla (Interpreted_ExtraLogic Apply, [thf_apply_formula, thf_unitary_formula]) )
-)
- in ( LrTable.NT 123, ( result, thf_apply_formula1left, 
-thf_unitary_formula1right), rest671)
-end
-|  ( 243, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2,
- _, thf_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.thf_unitary_formula thf_unitary_formula1, 
-thf_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_and_formula (
-( Fmla (Interpreted_Logic And, [thf_unitary_formula1, thf_unitary_formula2]) )
-)
- in ( LrTable.NT 124, ( result, thf_unitary_formula1left, 
-thf_unitary_formula2right), rest671)
-end
-|  ( 244, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _
-, thf_unitary_formula1right)) :: _ :: ( _, ( MlyValue.thf_and_formula 
-thf_and_formula, thf_and_formula1left, _)) :: rest671)) => let val  
-result = MlyValue.thf_and_formula (
-( Fmla (Interpreted_Logic And, [thf_and_formula, thf_unitary_formula]) )
-)
- in ( LrTable.NT 124, ( result, thf_and_formula1left, 
-thf_unitary_formula1right), rest671)
-end
-|  ( 245, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2,
- _, thf_unitary_formula2right)) :: _ :: ( _, ( 
-MlyValue.thf_unitary_formula thf_unitary_formula1, 
-thf_unitary_formula1left, _)) :: rest671)) => let val  result = 
-MlyValue.thf_or_formula (
-( Fmla (Interpreted_Logic Or, [thf_unitary_formula1, thf_unitary_formula2]) )
-)
- in ( LrTable.NT 125, ( result, thf_unitary_formula1left, 
-thf_unitary_formula2right), rest671)
-end
-|  ( 246, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, _
-, thf_unitary_formula1right)) :: _ :: ( _, ( MlyValue.thf_or_formula 
-thf_or_formula, thf_or_formula1left, _)) :: rest671)) => let val  
-result = MlyValue.thf_or_formula (
-( Fmla (Interpreted_Logic Or, [thf_or_formula, thf_unitary_formula]) )
-)
- in ( LrTable.NT 125, ( result, thf_or_formula1left, 
-thf_unitary_formula1right), rest671)
-end
-|  ( 247, ( ( _, ( MlyValue.thf_or_formula thf_or_formula, 
-thf_or_formula1left, thf_or_formula1right)) :: rest671)) => let val  
-result = MlyValue.thf_binary_tuple (( thf_or_formula ))
- in ( LrTable.NT 126, ( result, thf_or_formula1left, 
-thf_or_formula1right), rest671)
-end
-|  ( 248, ( ( _, ( MlyValue.thf_and_formula thf_and_formula, 
-thf_and_formula1left, thf_and_formula1right)) :: rest671)) => let val 
- result = MlyValue.thf_binary_tuple (( thf_and_formula ))
- in ( LrTable.NT 126, ( result, thf_and_formula1left, 
-thf_and_formula1right), rest671)
-end
-|  ( 249, ( ( _, ( MlyValue.thf_apply_formula thf_apply_formula, 
-thf_apply_formula1left, thf_apply_formula1right)) :: rest671)) => let
- val  result = MlyValue.thf_binary_tuple (( thf_apply_formula ))
- in ( LrTable.NT 126, ( result, thf_apply_formula1left, 
-thf_apply_formula1right), rest671)
-end
-|  ( 250, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula2,
- _, thf_unitary_formula2right)) :: ( _, ( MlyValue.thf_pair_connective
- thf_pair_connective, _, _)) :: ( _, ( MlyValue.thf_unitary_formula 
-thf_unitary_formula1, thf_unitary_formula1left, _)) :: rest671)) =>
- let val  result = MlyValue.thf_binary_pair (
-(
-  Fmla (thf_pair_connective, [thf_unitary_formula1, thf_unitary_formula2])
-)
-)
- in ( LrTable.NT 127, ( result, thf_unitary_formula1left, 
-thf_unitary_formula2right), rest671)
-end
-|  ( 251, ( ( _, ( MlyValue.thf_binary_pair thf_binary_pair, 
-thf_binary_pair1left, thf_binary_pair1right)) :: rest671)) => let val 
- result = MlyValue.thf_binary_formula (( thf_binary_pair ))
- in ( LrTable.NT 128, ( result, thf_binary_pair1left, 
-thf_binary_pair1right), rest671)
-end
-|  ( 252, ( ( _, ( MlyValue.thf_binary_tuple thf_binary_tuple, 
-thf_binary_tuple1left, thf_binary_tuple1right)) :: rest671)) => let
- val  result = MlyValue.thf_binary_formula (( thf_binary_tuple ))
- in ( LrTable.NT 128, ( result, thf_binary_tuple1left, 
-thf_binary_tuple1right), rest671)
-end
-|  ( 253, ( ( _, ( MlyValue.thf_binary_type thf_binary_type, 
-thf_binary_type1left, thf_binary_type1right)) :: rest671)) => let val 
- result = MlyValue.thf_binary_formula (( THF_type thf_binary_type ))
- in ( LrTable.NT 128, ( result, thf_binary_type1left, 
-thf_binary_type1right), rest671)
-end
-|  ( 254, ( ( _, ( MlyValue.thf_binary_formula thf_binary_formula, 
-thf_binary_formula1left, thf_binary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.thf_logic_formula (( thf_binary_formula ))
- in ( LrTable.NT 129, ( result, thf_binary_formula1left, 
-thf_binary_formula1right), rest671)
-end
-|  ( 255, ( ( _, ( MlyValue.thf_unitary_formula thf_unitary_formula, 
-thf_unitary_formula1left, thf_unitary_formula1right)) :: rest671)) =>
- let val  result = MlyValue.thf_logic_formula (( thf_unitary_formula )
-)
- in ( LrTable.NT 129, ( result, thf_unitary_formula1left, 
-thf_unitary_formula1right), rest671)
-end
-|  ( 256, ( ( _, ( MlyValue.thf_type_formula thf_type_formula, 
-thf_type_formula1left, thf_type_formula1right)) :: rest671)) => let
- val  result = MlyValue.thf_logic_formula (
-( THF_typing thf_type_formula ))
- in ( LrTable.NT 129, ( result, thf_type_formula1left, 
-thf_type_formula1right), rest671)
-end
-|  ( 257, ( ( _, ( MlyValue.thf_subtype thf_subtype, thf_subtype1left,
- thf_subtype1right)) :: rest671)) => let val  result = 
-MlyValue.thf_logic_formula (( THF_type thf_subtype ))
- in ( LrTable.NT 129, ( result, thf_subtype1left, thf_subtype1right), 
-rest671)
-end
-|  ( 258, ( ( _, ( MlyValue.thf_logic_formula thf_logic_formula, 
-thf_logic_formula1left, thf_logic_formula1right)) :: rest671)) => let
- val  result = MlyValue.thf_formula (( thf_logic_formula ))
- in ( LrTable.NT 130, ( result, thf_logic_formula1left, 
-thf_logic_formula1right), rest671)
-end
-|  ( 259, ( ( _, ( MlyValue.thf_sequent thf_sequent, thf_sequent1left,
- thf_sequent1right)) :: rest671)) => let val  result = 
-MlyValue.thf_formula (( thf_sequent ))
- in ( LrTable.NT 130, ( result, thf_sequent1left, thf_sequent1right), 
-rest671)
-end
-|  ( 260, ( ( _, ( MlyValue.LOWER_WORD LOWER_WORD, LOWER_WORD1left, 
-LOWER_WORD1right)) :: rest671)) => let val  result = 
-MlyValue.formula_role (( classify_role LOWER_WORD ))
- in ( LrTable.NT 131, ( result, LOWER_WORD1left, LOWER_WORD1right), 
-rest671)
-end
-|  ( 261, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
-MlyValue.annotations annotations, _, _)) :: ( _, ( 
-MlyValue.thf_formula thf_formula, _, _)) :: _ :: ( _, ( 
-MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
-MlyValue.name name, _, _)) :: _ :: ( _, ( _, (THFleft as THF1left), 
-THFright)) :: rest671)) => let val  result = MlyValue.thf_annotated (
-(
-  Annotated_Formula ((file_name, THFleft + 1, THFright + 1),
-   THF, name, formula_role, thf_formula, annotations)
-)
-)
- in ( LrTable.NT 135, ( result, THF1left, PERIOD1right), rest671)
-end
-|  ( 262, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
-MlyValue.annotations annotations, _, _)) :: ( _, ( 
-MlyValue.tff_formula tff_formula, _, _)) :: _ :: ( _, ( 
-MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
-MlyValue.name name, _, _)) :: _ :: ( _, ( _, (TFFleft as TFF1left), 
-TFFright)) :: rest671)) => let val  result = MlyValue.tff_annotated (
-(
-  Annotated_Formula ((file_name, TFFleft + 1, TFFright + 1),
-   TFF, name, formula_role, tff_formula, annotations)
-)
-)
- in ( LrTable.NT 134, ( result, TFF1left, PERIOD1right), rest671)
-end
-|  ( 263, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
-MlyValue.annotations annotations, _, _)) :: ( _, ( 
-MlyValue.fof_formula fof_formula, _, _)) :: _ :: ( _, ( 
-MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
-MlyValue.name name, _, _)) :: _ :: ( _, ( _, (FOFleft as FOF1left), 
-FOFright)) :: rest671)) => let val  result = MlyValue.fof_annotated (
-(
-  Annotated_Formula ((file_name, FOFleft + 1, FOFright + 1),
-   FOF, name, formula_role, fof_formula, annotations)
-)
-)
- in ( LrTable.NT 133, ( result, FOF1left, PERIOD1right), rest671)
-end
-|  ( 264, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
-MlyValue.annotations annotations, _, _)) :: ( _, ( 
-MlyValue.cnf_formula cnf_formula, _, _)) :: _ :: ( _, ( 
-MlyValue.formula_role formula_role, _, _)) :: _ :: ( _, ( 
-MlyValue.name name, _, _)) :: _ :: ( _, ( _, (CNFleft as CNF1left), 
-CNFright)) :: rest671)) => let val  result = MlyValue.cnf_annotated (
-(
-  Annotated_Formula ((file_name, CNFleft + 1, CNFright + 1),
-   CNF, name, formula_role, cnf_formula, annotations)
-)
-)
- in ( LrTable.NT 132, ( result, CNF1left, PERIOD1right), rest671)
-end
-|  ( 265, ( ( _, ( MlyValue.cnf_annotated cnf_annotated, 
-cnf_annotated1left, cnf_annotated1right)) :: rest671)) => let val  
-result = MlyValue.annotated_formula (( cnf_annotated ))
- in ( LrTable.NT 136, ( result, cnf_annotated1left, 
-cnf_annotated1right), rest671)
-end
-|  ( 266, ( ( _, ( MlyValue.fof_annotated fof_annotated, 
-fof_annotated1left, fof_annotated1right)) :: rest671)) => let val  
-result = MlyValue.annotated_formula (( fof_annotated ))
- in ( LrTable.NT 136, ( result, fof_annotated1left, 
-fof_annotated1right), rest671)
-end
-|  ( 267, ( ( _, ( MlyValue.tff_annotated tff_annotated, 
-tff_annotated1left, tff_annotated1right)) :: rest671)) => let val  
-result = MlyValue.annotated_formula (( tff_annotated ))
- in ( LrTable.NT 136, ( result, tff_annotated1left, 
-tff_annotated1right), rest671)
-end
-|  ( 268, ( ( _, ( MlyValue.thf_annotated thf_annotated, 
-thf_annotated1left, thf_annotated1right)) :: rest671)) => let val  
-result = MlyValue.annotated_formula (( thf_annotated ))
- in ( LrTable.NT 136, ( result, thf_annotated1left, 
-thf_annotated1right), rest671)
-end
-|  ( 269, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
+|  ( 247, ( rest671)) => let val  result = MlyValue.optional_info (
+( [] ))
+ in ( LrTable.NT 4, ( result, defaultPos, defaultPos), rest671)
+end
+|  ( 248, ( ( _, ( MlyValue.general_list general_list, 
+general_list1left, general_list1right)) :: rest671)) => let val  
+result = MlyValue.useful_info (( general_list ))
+ in ( LrTable.NT 16, ( result, general_list1left, general_list1right),
+ rest671)
+end
+|  ( 249, ( ( _, ( _, _, PERIOD1right)) :: _ :: ( _, ( 
 MlyValue.formula_selection formula_selection, _, _)) :: ( _, ( 
 MlyValue.file_name file_name, _, _)) :: _ :: ( _, ( _, INCLUDE1left, _
 )) :: rest671)) => let val  result = MlyValue.include_ (
 (
   Include (file_name, formula_selection)
 ))
- in ( LrTable.NT 137, ( result, INCLUDE1left, PERIOD1right), rest671)
+ in ( LrTable.NT 132, ( result, INCLUDE1left, PERIOD1right), rest671)
 
 end
-|  ( 270, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( MlyValue.name_list 
+|  ( 250, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( MlyValue.name_list 
 name_list, _, _)) :: _ :: ( _, ( _, COMMA1left, _)) :: rest671)) =>
  let val  result = MlyValue.formula_selection (( name_list  ))
  in ( LrTable.NT 3, ( result, COMMA1left, RBRKT1right), rest671)
 end
-|  ( 271, ( rest671)) => let val  result = MlyValue.formula_selection
+|  ( 251, ( rest671)) => let val  result = MlyValue.formula_selection
  (( [] ))
  in ( LrTable.NT 3, ( result, defaultPos, defaultPos), rest671)
 end
-|  ( 272, ( ( _, ( MlyValue.name_list name_list, _, name_list1right))
+|  ( 252, ( ( _, ( MlyValue.name_list name_list, _, name_list1right))
  :: _ :: ( _, ( MlyValue.name name, name1left, _)) :: rest671)) => let
  val  result = MlyValue.name_list (( name :: name_list ))
  in ( LrTable.NT 2, ( result, name1left, name_list1right), rest671)
 
 end
-|  ( 273, ( ( _, ( MlyValue.name name, name1left, name1right)) :: 
+|  ( 253, ( ( _, ( MlyValue.name name, name1left, name1right)) :: 
 rest671)) => let val  result = MlyValue.name_list (( [name] ))
  in ( LrTable.NT 2, ( result, name1left, name1right), rest671)
 end
-|  ( 274, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left,
+|  ( 254, ( ( _, ( MlyValue.general_data general_data, 
+general_data1left, general_data1right)) :: rest671)) => let val  
+result = MlyValue.general_term (( General_Data general_data ))
+ in ( LrTable.NT 7, ( result, general_data1left, general_data1right), 
+rest671)
+end
+|  ( 255, ( ( _, ( MlyValue.general_term general_term, _, 
+general_term1right)) :: _ :: ( _, ( MlyValue.general_data general_data
+, general_data1left, _)) :: rest671)) => let val  result = 
+MlyValue.general_term (( General_Term (general_data, general_term) ))
+ in ( LrTable.NT 7, ( result, general_data1left, general_term1right), 
+rest671)
+end
+|  ( 256, ( ( _, ( MlyValue.general_list general_list, 
+general_list1left, general_list1right)) :: rest671)) => let val  
+result = MlyValue.general_term (( General_List general_list ))
+ in ( LrTable.NT 7, ( result, general_list1left, general_list1right), 
+rest671)
+end
+|  ( 257, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left,
+ atomic_word1right)) :: rest671)) => let val  result = 
+MlyValue.general_data (( Atomic_Word atomic_word ))
+ in ( LrTable.NT 9, ( result, atomic_word1left, atomic_word1right), 
+rest671)
+end
+|  ( 258, ( ( _, ( MlyValue.general_function general_function, 
+general_function1left, general_function1right)) :: rest671)) => let
+ val  result = MlyValue.general_data (( general_function ))
+ in ( LrTable.NT 9, ( result, general_function1left, 
+general_function1right), rest671)
+end
+|  ( 259, ( ( _, ( MlyValue.variable_ variable_, variable_1left, 
+variable_1right)) :: rest671)) => let val  result = 
+MlyValue.general_data (( V variable_ ))
+ in ( LrTable.NT 9, ( result, variable_1left, variable_1right), 
+rest671)
+end
+|  ( 260, ( ( _, ( MlyValue.number number, number1left, number1right))
+ :: rest671)) => let val  result = MlyValue.general_data (
+( Number number ))
+ in ( LrTable.NT 9, ( result, number1left, number1right), rest671)
+end
+|  ( 261, ( ( _, ( MlyValue.DISTINCT_OBJECT DISTINCT_OBJECT, 
+DISTINCT_OBJECT1left, DISTINCT_OBJECT1right)) :: rest671)) => let val 
+ result = MlyValue.general_data (( Distinct_Object DISTINCT_OBJECT ))
+ in ( LrTable.NT 9, ( result, DISTINCT_OBJECT1left, 
+DISTINCT_OBJECT1right), rest671)
+end
+|  ( 262, ( ( _, ( MlyValue.formula_data formula_data, 
+formula_data1left, formula_data1right)) :: rest671)) => let val  
+result = MlyValue.general_data (( formula_data ))
+ in ( LrTable.NT 9, ( result, formula_data1left, formula_data1right), 
+rest671)
+end
+|  ( 263, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( 
+MlyValue.general_terms general_terms, _, _)) :: _ :: ( _, ( 
+MlyValue.atomic_word atomic_word, atomic_word1left, _)) :: rest671))
+ => let val  result = MlyValue.general_function (
+( Application (atomic_word, general_terms) ))
+ in ( LrTable.NT 15, ( result, atomic_word1left, RPAREN1right), 
+rest671)
+end
+|  ( 264, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.thf_formula
+ thf_formula, _, _)) :: _ :: ( _, ( _, DTHF1left, _)) :: rest671)) =>
+ let val  result = MlyValue.formula_data (
+( Formula_Data (THF, thf_formula) ))
+ in ( LrTable.NT 12, ( result, DTHF1left, RPAREN1right), rest671)
+end
+|  ( 265, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.tff_formula
+ tff_formula, _, _)) :: _ :: ( _, ( _, DTFF1left, _)) :: rest671)) =>
+ let val  result = MlyValue.formula_data (
+( Formula_Data (TFF, tff_formula) ))
+ in ( LrTable.NT 12, ( result, DTFF1left, RPAREN1right), rest671)
+end
+|  ( 266, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.fof_formula
+ fof_formula, _, _)) :: _ :: ( _, ( _, DFOF1left, _)) :: rest671)) =>
+ let val  result = MlyValue.formula_data (
+( Formula_Data (FOF, fof_formula) ))
+ in ( LrTable.NT 12, ( result, DFOF1left, RPAREN1right), rest671)
+end
+|  ( 267, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.cnf_formula
+ cnf_formula, _, _)) :: _ :: ( _, ( _, DCNF1left, _)) :: rest671)) =>
+ let val  result = MlyValue.formula_data (
+( Formula_Data (CNF, cnf_formula) ))
+ in ( LrTable.NT 12, ( result, DCNF1left, RPAREN1right), rest671)
+end
+|  ( 268, ( ( _, ( _, _, RPAREN1right)) :: ( _, ( MlyValue.term term,
+ _, _)) :: _ :: ( _, ( _, DFOT1left, _)) :: rest671)) => let val  
+result = MlyValue.formula_data (( Term_Data term ))
+ in ( LrTable.NT 12, ( result, DFOT1left, RPAREN1right), rest671)
+end
+|  ( 269, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( 
+MlyValue.general_terms general_terms, _, _)) :: ( _, ( _, LBRKT1left,
+ _)) :: rest671)) => let val  result = MlyValue.general_list (
+( general_terms ))
+ in ( LrTable.NT 5, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 270, ( ( _, ( _, _, RBRKT1right)) :: ( _, ( _, LBRKT1left, _)) ::
+ rest671)) => let val  result = MlyValue.general_list (( [] ))
+ in ( LrTable.NT 5, ( result, LBRKT1left, RBRKT1right), rest671)
+end
+|  ( 271, ( ( _, ( MlyValue.general_terms general_terms, _, 
+general_terms1right)) :: _ :: ( _, ( MlyValue.general_term 
+general_term, general_term1left, _)) :: rest671)) => let val  result =
+ MlyValue.general_terms (( general_term :: general_terms ))
+ in ( LrTable.NT 6, ( result, general_term1left, general_terms1right),
+ rest671)
+end
+|  ( 272, ( ( _, ( MlyValue.general_term general_term, 
+general_term1left, general_term1right)) :: rest671)) => let val  
+result = MlyValue.general_terms (( [general_term] ))
+ in ( LrTable.NT 6, ( result, general_term1left, general_term1right), 
+rest671)
+end
+|  ( 273, ( ( _, ( MlyValue.atomic_word atomic_word, atomic_word1left,
  atomic_word1right)) :: rest671)) => let val  result = MlyValue.name (
 ( atomic_word ))
  in ( LrTable.NT 1, ( result, atomic_word1left, atomic_word1right), 
 rest671)
 end
-|  ( 275, ( ( _, ( MlyValue.integer integer, integer1left, 
+|  ( 274, ( ( _, ( MlyValue.integer integer, integer1left, 
 integer1right)) :: rest671)) => let val  result = MlyValue.name (
 ( integer ))
  in ( LrTable.NT 1, ( result, integer1left, integer1right), rest671)
 
 end
-|  ( 276, ( ( _, ( MlyValue.annotated_formula annotated_formula, 
-annotated_formula1left, annotated_formula1right)) :: rest671)) => let
- val  result = MlyValue.tptp_input (( annotated_formula ))
- in ( LrTable.NT 138, ( result, annotated_formula1left, 
-annotated_formula1right), rest671)
-end
-|  ( 277, ( ( _, ( MlyValue.include_ include_, include_1left, 
-include_1right)) :: rest671)) => let val  result = MlyValue.tptp_input
- (( include_ ))
- in ( LrTable.NT 138, ( result, include_1left, include_1right), 
+|  ( 275, ( ( _, ( MlyValue.LOWER_WORD LOWER_WORD, LOWER_WORD1left, 
+LOWER_WORD1right)) :: rest671)) => let val  result = 
+MlyValue.atomic_word (( LOWER_WORD ))
+ in ( LrTable.NT 8, ( result, LOWER_WORD1left, LOWER_WORD1right), 
+rest671)
+end
+|  ( 276, ( ( _, ( MlyValue.SINGLE_QUOTED SINGLE_QUOTED, 
+SINGLE_QUOTED1left, SINGLE_QUOTED1right)) :: rest671)) => let val  
+result = MlyValue.atomic_word (( SINGLE_QUOTED ))
+ in ( LrTable.NT 8, ( result, SINGLE_QUOTED1left, SINGLE_QUOTED1right)
+, rest671)
+end
+|  ( 277, ( ( _, ( _, THF1left, THF1right)) :: rest671)) => let val  
+result = MlyValue.atomic_word (( "thf" ))
+ in ( LrTable.NT 8, ( result, THF1left, THF1right), rest671)
+end
+|  ( 278, ( ( _, ( _, TFF1left, TFF1right)) :: rest671)) => let val  
+result = MlyValue.atomic_word (( "tff" ))
+ in ( LrTable.NT 8, ( result, TFF1left, TFF1right), rest671)
+end
+|  ( 279, ( ( _, ( _, FOF1left, FOF1right)) :: rest671)) => let val  
+result = MlyValue.atomic_word (( "fof" ))
+ in ( LrTable.NT 8, ( result, FOF1left, FOF1right), rest671)
+end
+|  ( 280, ( ( _, ( _, CNF1left, CNF1right)) :: rest671)) => let val  
+result = MlyValue.atomic_word (( "cnf" ))
+ in ( LrTable.NT 8, ( result, CNF1left, CNF1right), rest671)
+end
+|  ( 281, ( ( _, ( _, INCLUDE1left, INCLUDE1right)) :: rest671)) =>
+ let val  result = MlyValue.atomic_word (( "include" ))
+ in ( LrTable.NT 8, ( result, INCLUDE1left, INCLUDE1right), rest671)
+
+end
+|  ( 282, ( ( _, ( MlyValue.DOLLAR_WORD DOLLAR_WORD, DOLLAR_WORD1left,
+ DOLLAR_WORD1right)) :: rest671)) => let val  result = 
+MlyValue.atomic_defined_word (( DOLLAR_WORD ))
+ in ( LrTable.NT 144, ( result, DOLLAR_WORD1left, DOLLAR_WORD1right), 
 rest671)
 end
-|  ( 278, ( ( _, ( MlyValue.tptp_file tptp_file, _, tptp_file1right))
- :: ( _, ( MlyValue.tptp_input tptp_input, tptp_input1left, _)) :: 
-rest671)) => let val  result = MlyValue.tptp_file (
-( tptp_input :: tptp_file ))
- in ( LrTable.NT 139, ( result, tptp_input1left, tptp_file1right), 
-rest671)
-end
-|  ( 279, ( ( _, ( MlyValue.tptp_file tptp_file, _, tptp_file1right))
- :: ( _, ( _, COMMENT1left, _)) :: rest671)) => let val  result = 
-MlyValue.tptp_file (( tptp_file ))
- in ( LrTable.NT 139, ( result, COMMENT1left, tptp_file1right), 
-rest671)
-end
-|  ( 280, ( rest671)) => let val  result = MlyValue.tptp_file (( [] ))
- in ( LrTable.NT 139, ( result, defaultPos, defaultPos), rest671)
-end
-|  ( 281, ( ( _, ( MlyValue.tptp_file tptp_file, tptp_file1left, 
-tptp_file1right)) :: rest671)) => let val  result = MlyValue.tptp (
-( tptp_file ))
- in ( LrTable.NT 140, ( result, tptp_file1left, tptp_file1right), 
-rest671)
+|  ( 283, ( ( _, ( MlyValue.DOLLAR_DOLLAR_WORD DOLLAR_DOLLAR_WORD, 
+DOLLAR_DOLLAR_WORD1left, DOLLAR_DOLLAR_WORD1right)) :: rest671)) =>
+ let val  result = MlyValue.atomic_system_word (( DOLLAR_DOLLAR_WORD )
+)
+ in ( LrTable.NT 145, ( result, DOLLAR_DOLLAR_WORD1left, 
+DOLLAR_DOLLAR_WORD1right), rest671)
+end
+|  ( 284, ( ( _, ( MlyValue.UNSIGNED_INTEGER UNSIGNED_INTEGER, 
+UNSIGNED_INTEGER1left, UNSIGNED_INTEGER1right)) :: rest671)) => let
+ val  result = MlyValue.integer (( UNSIGNED_INTEGER ))
+ in ( LrTable.NT 13, ( result, UNSIGNED_INTEGER1left, 
+UNSIGNED_INTEGER1right), rest671)
+end
+|  ( 285, ( ( _, ( MlyValue.SIGNED_INTEGER SIGNED_INTEGER, 
+SIGNED_INTEGER1left, SIGNED_INTEGER1right)) :: rest671)) => let val  
+result = MlyValue.integer (( SIGNED_INTEGER ))
+ in ( LrTable.NT 13, ( result, SIGNED_INTEGER1left, 
+SIGNED_INTEGER1right), rest671)
+end
+|  ( 286, ( ( _, ( MlyValue.integer integer, integer1left, 
+integer1right)) :: rest671)) => let val  result = MlyValue.number (
+( (Int_num, integer) ))
+ in ( LrTable.NT 11, ( result, integer1left, integer1right), rest671)
+
+end
+|  ( 287, ( ( _, ( MlyValue.REAL REAL, REAL1left, REAL1right)) :: 
+rest671)) => let val  result = MlyValue.number (( (Real_num, REAL) ))
+ in ( LrTable.NT 11, ( result, REAL1left, REAL1right), rest671)
+end
+|  ( 288, ( ( _, ( MlyValue.RATIONAL RATIONAL, RATIONAL1left, 
+RATIONAL1right)) :: rest671)) => let val  result = MlyValue.number (
+( (Rat_num, RATIONAL) ))
+ in ( LrTable.NT 11, ( result, RATIONAL1left, RATIONAL1right), rest671
+)
+end
+|  ( 289, ( ( _, ( MlyValue.SINGLE_QUOTED SINGLE_QUOTED, 
+SINGLE_QUOTED1left, SINGLE_QUOTED1right)) :: rest671)) => let val  
+result = MlyValue.file_name (( SINGLE_QUOTED ))
+ in ( LrTable.NT 17, ( result, SINGLE_QUOTED1left, SINGLE_QUOTED1right
+), rest671)
 end
 | _ => raise (mlyAction i392)
 end
@@ -5462,7 +5782,7 @@
 ParserData.MlyValue.VOID,p1,p2))
 fun ARROW (p1,p2) = Token.TOKEN (ParserData.LrTable.T 8,(
 ParserData.MlyValue.VOID,p1,p2))
-fun IF (p1,p2) = Token.TOKEN (ParserData.LrTable.T 9,(
+fun FI (p1,p2) = Token.TOKEN (ParserData.LrTable.T 9,(
 ParserData.MlyValue.VOID,p1,p2))
 fun IFF (p1,p2) = Token.TOKEN (ParserData.LrTable.T 10,(
 ParserData.MlyValue.VOID,p1,p2))
@@ -5570,10 +5890,10 @@
 ParserData.MlyValue.VOID,p1,p2))
 fun DEP_PROD (p1,p2) = Token.TOKEN (ParserData.LrTable.T 62,(
 ParserData.MlyValue.VOID,p1,p2))
-fun ATOMIC_DEFINED_WORD (i,p1,p2) = Token.TOKEN (ParserData.LrTable.T 
-63,(ParserData.MlyValue.ATOMIC_DEFINED_WORD i,p1,p2))
-fun ATOMIC_SYSTEM_WORD (i,p1,p2) = Token.TOKEN (ParserData.LrTable.T 
-64,(ParserData.MlyValue.ATOMIC_SYSTEM_WORD i,p1,p2))
+fun DOLLAR_WORD (i,p1,p2) = Token.TOKEN (ParserData.LrTable.T 63,(
+ParserData.MlyValue.DOLLAR_WORD i,p1,p2))
+fun DOLLAR_DOLLAR_WORD (i,p1,p2) = Token.TOKEN (ParserData.LrTable.T 
+64,(ParserData.MlyValue.DOLLAR_DOLLAR_WORD i,p1,p2))
 fun SUBTYPE (p1,p2) = Token.TOKEN (ParserData.LrTable.T 65,(
 ParserData.MlyValue.VOID,p1,p2))
 fun LET_TERM (p1,p2) = Token.TOKEN (ParserData.LrTable.T 66,(
@@ -5590,5 +5910,13 @@
 ParserData.MlyValue.VOID,p1,p2))
 fun ITE_T (p1,p2) = Token.TOKEN (ParserData.LrTable.T 72,(
 ParserData.MlyValue.VOID,p1,p2))
-end
-end
+fun LET_TF (p1,p2) = Token.TOKEN (ParserData.LrTable.T 73,(
+ParserData.MlyValue.VOID,p1,p2))
+fun LET_FF (p1,p2) = Token.TOKEN (ParserData.LrTable.T 74,(
+ParserData.MlyValue.VOID,p1,p2))
+fun LET_FT (p1,p2) = Token.TOKEN (ParserData.LrTable.T 75,(
+ParserData.MlyValue.VOID,p1,p2))
+fun LET_TT (p1,p2) = Token.TOKEN (ParserData.LrTable.T 76,(
+ParserData.MlyValue.VOID,p1,p2))
+end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_proof.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,43 @@
+(*  Title:      HOL/TPTP/TPTP_Parser/tptp_proof.ML
+    Author:     Nik Sultana, Cambridge University Computer Laboratory
+
+Collection of functions for handling TPTP proofs.
+*)
+
+signature TPTP_PROOF =
+sig
+  val extract_inference_info : (TPTP_Syntax.general_term * 'a list) option ->
+                               (string * string list) option
+end
+
+
+structure TPTP_Proof : TPTP_PROOF =
+struct
+
+open TPTP_Syntax
+
+(*Extract name of inference rule, and the inferences it relies on*)
+(*This is tuned to work with LEO-II, and is brittle wrt upstream
+  changes of the proof protocol.*)
+fun extract_inference_info annot =
+  let
+    fun get_line_id (General_Data (Number (Int_num, num))) = [num]
+      | get_line_id (General_Data (Atomic_Word name)) = [name]
+      | get_line_id (General_Term (Number (Int_num, num), _ (*e.g. a bind*))) = [num]
+      | get_line_id _ = []
+        (*e.g. General_Data (Application ("theory", [General_Data
+          (Atomic_Word "equality")])) -- which would come from E through LEO-II*)
+  in
+    case annot of
+      NONE => NONE
+    | SOME annot =>
+      (case annot of
+        (General_Data (Application ("inference",
+        [General_Data (Atomic_Word inference_name),
+         _,
+         General_List related_lines])), _) =>
+          (SOME (inference_name, map get_line_id related_lines |> List.concat))
+      | _ => NONE)
+  end
+
+end
--- a/src/HOL/TPTP/TPTP_Parser/tptp_syntax.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_syntax.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -53,13 +53,13 @@
     UMinus | Sum | Difference | Product | Quotient | Quotient_E |
     Quotient_T | Quotient_F | Remainder_E | Remainder_T | Remainder_F |
     Floor | Ceiling | Truncate | Round | To_Int | To_Rat | To_Real |
-    (*these should be in defined_pred, but that's not being used in TPTP*)
+    (*FIXME these should be in defined_pred, but that's not being used in TPTP*)
     Less | LessEq | Greater | GreaterEq | EvalEq | Is_Int | Is_Rat |
-    Apply (*this is just a matter of convenience*)
+    Distinct | Apply
 
   and logic_symbol = Equals | NEquals | Or | And | Iff | If | Fi | Xor |
     Nor | Nand | Not | Op_Forall | Op_Exists |
-    (*these should be in defined_pred, but that's not being used in TPTP*)
+    (*FIXME these should be in defined_pred, but that's not being used in TPTP*)
     True | False
 
   and quantifier = (*interpreted binders*)
@@ -86,6 +86,7 @@
     | Term_Conditional of tptp_formula * tptp_term * tptp_term
     | Term_Num of number_kind * string
     | Term_Distinct_Object of string
+    | Term_Let of tptp_let list * tptp_term (*FIXME remove list?*)
 
   and tptp_atom =
       TFF_Typed_Atom of symbol * tptp_type option (*only TFF*)
@@ -98,14 +99,14 @@
     | Sequent of tptp_formula list * tptp_formula list
     | Quant of quantifier * (string * tptp_type option) list * tptp_formula
     | Conditional of tptp_formula * tptp_formula * tptp_formula
-    | Let of tptp_let list * tptp_formula
+    | Let of tptp_let list * tptp_formula (*FIXME remove list?*)
     | Atom of tptp_atom
-    | THF_type of tptp_type
+    | Type_fmla of tptp_type
     | THF_typing of tptp_formula * tptp_type (*only THF*)
 
   and tptp_let =
       Let_fmla of (string * tptp_type option) * tptp_formula
-    | Let_term of (string * tptp_type option) * tptp_term  (*only TFF*)
+    | Let_term of (string * tptp_type option) * tptp_term (*only TFF*)
 
   and tptp_type =
       Prod_type of tptp_type * tptp_type
@@ -113,7 +114,7 @@
     | Atom_type of string
     | Defined_type of tptp_base_type
     | Sum_type of tptp_type * tptp_type (*only THF*)
-    | Fmla_type of tptp_formula (*only THF*)
+    | Fmla_type of tptp_formula
     | Subtype of symbol * symbol (*only THF*)
 
   type general_list = general_term list
@@ -128,7 +129,8 @@
   type position = string * int * int
 
   datatype tptp_line =
-      Annotated_Formula of position * language * string * role * tptp_formula * annotation option
+      Annotated_Formula of position * language * string * role *
+        tptp_formula * annotation option
    |  Include of string * string list
 
   type tptp_problem = tptp_line list
@@ -196,13 +198,12 @@
     UMinus | Sum | Difference | Product | Quotient | Quotient_E |
     Quotient_T | Quotient_F | Remainder_E | Remainder_T | Remainder_F |
     Floor | Ceiling | Truncate | Round | To_Int | To_Rat | To_Real |
-    (*these should be in defined_pred, but that's not being used in TPTP*)
     Less | LessEq | Greater | GreaterEq | EvalEq | Is_Int | Is_Rat |
-    Apply (*this is just a matter of convenience*)
+    Distinct |
+    Apply
 
   and logic_symbol = Equals | NEquals | Or | And | Iff | If | Fi | Xor |
     Nor | Nand | Not | Op_Forall | Op_Exists |
-    (*these should be in defined_pred, but that's not being used in TPTP*)
     True | False
 
   and quantifier = (*interpreted binders*)
@@ -229,6 +230,7 @@
     | Term_Conditional of tptp_formula * tptp_term * tptp_term
     | Term_Num of number_kind * string
     | Term_Distinct_Object of string
+    | Term_Let of tptp_let list * tptp_term (*FIXME remove list?*)
 
   and tptp_atom =
       TFF_Typed_Atom of symbol * tptp_type option (*only TFF*)
@@ -243,21 +245,21 @@
     | Conditional of tptp_formula * tptp_formula * tptp_formula
     | Let of tptp_let list * tptp_formula
     | Atom of tptp_atom
-    | THF_type of tptp_type
+    | Type_fmla of tptp_type
     | THF_typing of tptp_formula * tptp_type
 
   and tptp_let =
-      Let_fmla of (string * tptp_type option) * tptp_formula (*both TFF and THF*)
-    | Let_term of (string * tptp_type option) * tptp_term  (*only TFF*)
+      Let_fmla of (string * tptp_type option) * tptp_formula
+    | Let_term of (string * tptp_type option) * tptp_term
 
   and tptp_type =
       Prod_type of tptp_type * tptp_type
     | Fn_type of tptp_type * tptp_type
     | Atom_type of string
     | Defined_type of tptp_base_type
-    | Sum_type of tptp_type * tptp_type (*only THF*)
-    | Fmla_type of tptp_formula (*only THF*)
-    | Subtype of symbol * symbol (*only THF*)
+    | Sum_type of tptp_type * tptp_type
+    | Fmla_type of tptp_formula
+    | Subtype of symbol * symbol
 
 type general_list = general_term list
 type parent_details = general_list
@@ -269,13 +271,6 @@
 
 exception DEQUOTE of string
 
-(*
-datatype defined_functor =
-  ITE_T | UMINUS | SUM | DIFFERENCE | PRODUCT | QUOTIENT | QUOTIENT_E |
-  QUOTIENT_T | QUOTIENT_F | REMAINDER_E | REMAINDER_T | REMAINDER_F |
-  FLOOR | CEILING | TRUNCATE | ROUND | TO_INT | TO_RAT | TO_REAL
-*)
-
 type position = string * int * int
 
 datatype tptp_line =
@@ -350,29 +345,29 @@
 
 fun status_to_string status_value =
   case status_value of
-      Suc => "suc"	| Unp => "unp"
+      Suc => "suc"  | Unp => "unp"
     | Sap => "sap"  | Esa => "esa"
     | Sat => "sat"  | Fsa => "fsa"
     | Thm => "thm"  | Wuc => "wuc"
-    | Eqv => "eqv"	| Tac => "tac"
-    | Wec => "wec"	| Eth => "eth"
-    | Tau => "tau"	| Wtc => "wtc"
-    | Wth => "wth"	| Cax => "cax"
-    | Sca => "sca"	| Tca => "tca"
-    | Wca => "wca"	| Cup => "cup"
-    | Csp => "csp"	| Ecs => "ecs"
-    | Csa => "csa"	| Cth => "cth"
-    | Ceq => "ceq"	| Unc => "unc"
-    | Wcc => "wcc"	| Ect => "ect"
-    | Fun => "fun"	| Uns => "uns"
-    | Wct => "wct"	| Scc => "scc"
-    | Uca => "uca"	| Noc => "noc"
+    | Eqv => "eqv"  | Tac => "tac"
+    | Wec => "wec"  | Eth => "eth"
+    | Tau => "tau"  | Wtc => "wtc"
+    | Wth => "wth"  | Cax => "cax"
+    | Sca => "sca"  | Tca => "tca"
+    | Wca => "wca"  | Cup => "cup"
+    | Csp => "csp"  | Ecs => "ecs"
+    | Csa => "csa"  | Cth => "cth"
+    | Ceq => "ceq"  | Unc => "unc"
+    | Wcc => "wcc"  | Ect => "ect"
+    | Fun => "fun"  | Uns => "uns"
+    | Wct => "wct"  | Scc => "scc"
+    | Uca => "uca"  | Noc => "noc"
 
 fun string_of_tptp_term x =
   case x of
       Term_Func (symbol, tptp_term_list) =>
         "(" ^ string_of_symbol symbol ^ " " ^
-        String.concatWith " " (map string_of_tptp_term tptp_term_list) ^ ")"
+        space_implode " " (map string_of_tptp_term tptp_term_list) ^ ")"
     | Term_Var str => str
     | Term_Conditional (tptp_formula, tptp_term1, tptp_term2) => "" (*FIXME*)
     | Term_Num (_, str) => str
@@ -454,15 +449,15 @@
 
 and string_of_tptp_formula (Pred (symbol, tptp_term_list)) =
       "(" ^ string_of_symbol symbol ^
-      String.concatWith " " (map string_of_tptp_term tptp_term_list) ^ ")"
+      space_implode " " (map string_of_tptp_term tptp_term_list) ^ ")"
   | string_of_tptp_formula (Fmla (symbol, tptp_formula_list)) =
       "(" ^
       string_of_symbol symbol ^
-      String.concatWith " " (map string_of_tptp_formula tptp_formula_list) ^ ")"
+      space_implode " " (map string_of_tptp_formula tptp_formula_list) ^ ")"
   | string_of_tptp_formula (Sequent (tptp_formula_list1, tptp_formula_list2)) = "" (*FIXME*)
   | string_of_tptp_formula (Quant (quantifier, varlist, tptp_formula)) =
       string_of_quantifier quantifier ^ "[" ^
-      String.concatWith ", " (map (fn (n, ty) =>
+      space_implode ", " (map (fn (n, ty) =>
         case ty of
           NONE => n
         | SOME ty => n ^ " : " ^ string_of_tptp_type ty) varlist) ^ "] : (" ^
@@ -470,7 +465,7 @@
   | string_of_tptp_formula (Conditional _) = "" (*FIXME*)
   | string_of_tptp_formula (Let _) = "" (*FIXME*)
   | string_of_tptp_formula (Atom tptp_atom) = string_of_tptp_atom tptp_atom
-  | string_of_tptp_formula (THF_type tptp_type) = string_of_tptp_type tptp_type
+  | string_of_tptp_formula (Type_fmla tptp_type) = string_of_tptp_type tptp_type
   | string_of_tptp_formula (THF_typing (tptp_formula, tptp_type)) =
       string_of_tptp_formula tptp_formula ^ " : " ^ string_of_tptp_type tptp_type
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_to_dot.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,108 @@
+(*  Title:      HOL/TPTP/TPTP_Parser/tptp_to_dot.ML
+    Author:     Nik Sultana, Cambridge University Computer Laboratory
+
+Translates parsed TPTP proofs into DOT format. This can then be processed
+by an accompanying script to translate the proofs into other formats.
+*)
+
+signature TPTP_TO_DOT =
+sig
+  (*DOT-drawing function, works directly on parsed TPTP*)
+  val tptp_dot_node : bool -> bool -> TPTP_Syntax.tptp_line -> string
+
+  (*Parse a (LEO-II+E) proof and produce a DOT file*)
+  val write_proof_dot : string -> string -> unit
+end
+
+structure TPTP_To_Dot : TPTP_TO_DOT =
+struct
+
+open TPTP_Syntax
+
+(*Draw an arc between two nodes*)
+fun dot_arc reverse (src, label) target =
+  "\"" ^ (if reverse then target else src) ^
+  "\" -> \"" ^ (if reverse then src else target) ^
+  "\" " ^ (case label of
+              NONE => ""
+            | SOME label => "[label=\"" ^ label ^ "\"];") ^ "\n"
+
+(*Node shapes indicate the role of the related clauses.*)
+exception NO_ROLE_SHAPE
+fun the_role_shape role =
+  case role of
+    Role_Axiom => "triangle"
+  | Role_Hypothesis => "???"
+  | Role_Definition => raise NO_ROLE_SHAPE
+  | Role_Assumption => "???"
+  | Role_Lemma => "???"
+  | Role_Theorem => "???"
+  | Role_Conjecture => "house"
+  | Role_Negated_Conjecture => "invhouse"
+  | Role_Plain => "circle"
+  | Role_Fi_Domain => raise NO_ROLE_SHAPE
+  | Role_Fi_Functors => raise NO_ROLE_SHAPE
+  | Role_Fi_Predicates => raise NO_ROLE_SHAPE
+  | Role_Type => raise NO_ROLE_SHAPE
+  | Role_Unknown => raise NO_ROLE_SHAPE
+
+fun have_role_shape role =
+  (the_role_shape role; true)
+  handle NO_ROLE_SHAPE => false
+       | exc => raise exc
+
+(*Different styles are applied to nodes relating to clauses written in
+  difference languages.*)
+exception NO_LANG_STYLE
+fun the_lang_style lang =
+  case lang of
+      CNF => "dotted"
+    | FOF => "dashed"
+    | THF => "filled"
+    | _ => raise NO_LANG_STYLE
+
+(*Does the formula just consist of "$false"?*)
+fun is_last_line CNF (Pred (Interpreted_Logic False, [])) = true
+  | is_last_line THF (Atom (THF_Atom_term
+      (Term_Func (Interpreted_Logic False, [])))) = true
+  | is_last_line _ _ = false
+
+fun tptp_dot_node with_label reverse_arrows
+ (Annotated_Formula (_, lang, n, role, fmla_tptp, annot)) =
+ (*don't expect to find 'Include' in proofs*)
+ if have_role_shape role
+ then
+   "\"" ^ n ^
+   "\" [shape=\"" ^
+      (if is_last_line lang fmla_tptp then "doublecircle"
+       else the_role_shape role) ^
+   "\", style=\"" ^ the_lang_style lang ^
+   "\", label=\"" ^ n ^ "\"];\n" ^
+   (case TPTP_Proof.extract_inference_info annot of
+     NONE => ""
+   | SOME (rule, ids) =>
+       map (dot_arc reverse_arrows
+             (n, if with_label then SOME rule else NONE)) ids
+       |> implode)
+ else ""
+
+(*FIXME add opts to label arcs etc*)
+fun write_proof_dot input_file output_file =
+  let
+    (*rankdir=\"LR\";\n*)
+    val defaults =
+      "node[fixedsize=true];\n" ^
+      "node[width=.5];\n" ^
+      "node[shape=plaintext];\n" ^
+      "node[fillcolor=lightgray];\n" ^
+      "node[fontsize=40];\n" ^
+      "edge[dir=none];\n"
+  in
+    TPTP_Parser.parse_file input_file
+    |> map (tptp_dot_node false true)
+    |> implode
+    |> (fn str => "digraph ProofGraph {\n" ^ defaults ^ str ^ "}")
+    |> File.write (Path.explode output_file)
+  end
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/TPTP_Parser_Example.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,69 @@
+(*  Title:      HOL/TPTP/TPTP_Parser_Example.thy
+    Author:     Nik Sultana, Cambridge University Computer Laboratory
+
+Example of importing a TPTP problem and trying to prove it in Isabelle/HOL.
+*)
+
+theory TPTP_Parser_Example
+imports TPTP_Parser
+uses "~~/src/HOL/ex/sledgehammer_tactics.ML"
+begin
+
+import_tptp "$TPTP_PROBLEMS_PATH/ALG/ALG001^5.p" (*FIXME multiple imports*)
+(*import_tptp "$TPTP_PROBLEMS_PATH/NUM/NUM021^1.p"*)
+
+ML {*
+val an_fmlas =
+  TPTP_Interpret.get_manifests @{theory}
+  |> hd (*FIXME use named lookup*)
+  |> #2 (*get problem contents*)
+  |> #3 (*get formulas*)
+*}
+
+(*Display nicely.*)
+ML {*
+List.app (fn (n, role, _, fmla, _) =>
+  Pretty.writeln
+    (Pretty.block [Pretty.str ("\"" ^ n ^ "\"" ^ "(" ^
+      TPTP_Syntax.role_to_string role  ^ "): "), Syntax.pretty_term @{context} fmla])
+  ) (rev an_fmlas)
+*}
+
+ML {*
+(*Extract the (name, term) pairs of formulas having roles belonging to a
+ user-supplied set*)
+fun extract_terms roles : TPTP_Interpret.tptp_formula_meaning list ->
+ (string * term) list =
+   let
+     fun role_predicate (_, role, _, _, _) =
+       fold (fn r1 => fn b => role = r1 orelse b) roles false
+   in filter role_predicate #> map (fn (n, _, _, t, _) => (n, t)) end
+*}
+
+ML {*
+(*Use a given tactic on a goal*)
+fun prove_conjectures tactic ctxt an_fmlas =
+  let
+    val assumptions =
+      extract_terms
+       [TPTP_Syntax.Role_Definition (*FIXME include axioms, etc here*)]
+       an_fmlas
+      |> map snd
+    val goals = extract_terms [TPTP_Syntax.Role_Conjecture] an_fmlas
+    fun driver (n, goal) =
+      (n, Goal.prove ctxt [] assumptions goal (fn _ => tactic ctxt))
+  in map driver goals end
+
+val auto_prove = prove_conjectures auto_tac
+val sh_prove = prove_conjectures (fn ctxt =>
+  Sledgehammer_Tactics.sledgehammer_with_metis_tac ctxt []
+  (*FIXME use relevance_override*)
+  {add = [], del = [], only = false} 1)
+*}
+
+ML "auto_prove @{context} an_fmlas"
+
+sledgehammer_params [provers = z3_tptp leo2, debug]
+ML "sh_prove @{context} an_fmlas"
+
+end
\ No newline at end of file
--- a/src/HOL/TPTP/TPTP_Parser_Test.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/TPTP/TPTP_Parser_Test.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -7,7 +7,7 @@
 *)
 
 theory TPTP_Parser_Test
-imports TPTP_Parser
+imports TPTP_Parser TPTP_Parser_Example
 begin
 
 ML {*
@@ -69,12 +69,12 @@
 section "Source problems"
 ML {*
   (*problem source*)
-  val thf_probs_dir =
+  val tptp_probs_dir =
     Path.explode "$TPTP_PROBLEMS_PATH"
     |> Path.expand;
 
   (*list of files to under test*)
-  val files = TPTP_Syntax.get_file_list thf_probs_dir;
+  val files = TPTP_Syntax.get_file_list tptp_probs_dir;
 
 (*  (*test problem-name parsing and mangling*)
   val problem_names =
@@ -140,7 +140,7 @@
 
 subsection "More parser tests"
 ML {*
-  fun situate file_name = Path.append thf_probs_dir (Path.explode file_name);
+  fun situate file_name = Path.append tptp_probs_dir (Path.explode file_name);
   fun parser_test ctxt = (*FIXME argument order*)
     test_fn ctxt
      (fn file_name =>
@@ -184,8 +184,8 @@
     Timing.timing
     (TPTP_Interpret.interpret_file
      false
-     (Path.dir thf_probs_dir)
-    (Path.append thf_probs_dir (Path.explode "LCL/LCL825-1.p"))
+     (Path.dir tptp_probs_dir)
+    (Path.append tptp_probs_dir (Path.explode "LCL/LCL825-1.p"))
      []
      [])
     @{theory}
@@ -228,7 +228,7 @@
        TimeLimit.timeLimit (Time.fromSeconds timeout)
        (TPTP_Interpret.interpret_file
          false
-         (Path.dir thf_probs_dir)
+         (Path.dir tptp_probs_dir)
          file
          []
          [])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_graph	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,154 @@
+#!/usr/bin/env bash
+#
+# Author: Nik Sultana, Cambridge University Computer Lab
+#
+# DESCRIPTION: TPTP visualisation utility
+
+
+PRG="$(basename "$0")"
+
+#FIXME inline or move to settings
+DOT2TEX=dot2tex
+DOT=dot
+PERL=perl
+PDFLATEX=pdflatex
+[ -n "$ISABELLE_PDFLATEX" ] && PDFLATEX=$ISABELLE_PDFLATEX
+DOT2TEX_VERSION="$($DOT2TEX -V 2> /dev/null)"
+DOT_VERSION="$($DOT -V 2>&1 | grep Graphviz)"
+PERL_VERSION="$($PERL -v | grep -e "v[0-9]\+." 2> /dev/null)"
+PDFLATEX_VERSION="$($PDFLATEX -version | head -1 2> /dev/null)"
+
+function check_deps()
+{
+  #FIXME how well does this work across different platforms and/or versions of
+  #      the tools?
+  for DEP in DOT2TEX DOT PERL PDFLATEX
+  do
+    eval DEP_VAL=\$${DEP}
+    eval DEP_VERSION=\$${DEP}_VERSION
+    if [ -z "$DEP_VERSION" ]; then
+      echo "$DEP not installed"
+    else
+      echo "$DEP ($DEP_VAL) : $DEP_VERSION"
+    fi
+  done
+}
+
+function usage() {
+  echo
+  echo "Usage: isabelle $PRG [OPTIONS] IN_FILE OUT_FILE"
+  echo
+  echo "  Options are:"
+  echo "    -d           probe for dependencies"
+  echo "    -k           don't delete temp files, and print their location"
+  echo "    -n           print name of the generated file"
+  echo
+  echo "  Produces a DOT/TeX/PDF from a TPTP problem/proof, depending on whether"
+  echo "  the extension of OUT_FILE is dot/tex/pdf."
+  echo
+  exit 1
+}
+
+OUTPUT_FORMAT=2
+SHOW_TARGET=""
+KEEP_TEMP=""
+NON_EXEC=""
+
+while getopts "dnkX" OPT
+do
+  #FIXME could add "quiet" mode to send stderr (and some stdout) to /dev/null
+  case "$OPT" in
+    n)
+      SHOW_TARGET=true
+      ;;
+    k)
+      KEEP_TEMP=true
+      ;;
+    X)
+      NON_EXEC=true
+      ;;
+    d)
+      check_deps
+      exit 0
+      ;;
+  esac
+done
+
+shift $(($OPTIND - 1))
+[ "$#" -ne 2 -o "$1" = "-?" ] && usage
+
+case "${2##*.}" in
+    dot)
+      OUTPUT_FORMAT=0
+      ;;
+    tex)
+      OUTPUT_FORMAT=1
+      ;;
+    pdf)
+      OUTPUT_FORMAT=2
+      ;;
+    *)
+      echo "Unrecognised output file extension."
+      exit 1
+      ;;
+esac
+
+function generate_dot()
+{
+  #FIXME using a thy might be more robust
+  LOADER="use \"$TPTP_HOME/TPTP_Parser/ml_yacc_lib.ML\"; \
+          use \"$TPTP_HOME/TPTP_Parser/tptp_syntax.ML\"; \
+          use \"$TPTP_HOME/TPTP_Parser/tptp_lexyacc.ML\"; \
+          use \"$TPTP_HOME/TPTP_Parser/tptp_parser.ML\"; \
+          (*\"$TPTP_HOME/TPTP_Parser/tptp_problem_name.ML\";*) \
+          use \"$TPTP_HOME/TPTP_Parser/tptp_proof.ML\"; \
+          use \"$TPTP_HOME/TPTP_Parser/tptp_to_dot.ML\"; \
+          TPTP_To_Dot.write_proof_dot \"$1\" \"$2\"; exit 0;"
+  "$ISABELLE_PROCESS" -e "$LOADER" Pure
+}
+
+if [ "$OUTPUT_FORMAT" -eq 0 ]; then
+  [ -z "$NON_EXEC" ] && generate_dot "$1" "$2"
+  exit 0
+fi
+
+## set some essential variables
+
+WORKDIR=""
+while :
+do
+  #FIXME not perfectly reliable method, but probably good enough
+  WORKDIR="${ISABELLE_TMP_PREFIX}-tptpgraph$RANDOM"
+  [ ! -d "$WORKDIR" ] && break
+done
+OUTPUT_FILENAME="$(basename "$2")"
+FILEDIR="$(cd "$(dirname "$2")"; cd "$(pwd -P)"; pwd)"
+FILENAME="${OUTPUT_FILENAME%.*}"
+WD="$(pwd)"
+
+## generate and process files in temporary workdir, then move to destination dir
+
+mkdir -p $WORKDIR
+[ -z "$NON_EXEC" ] && generate_dot $1 "$WORKDIR/${FILENAME}.dot"
+cd $WORKDIR
+if [ -z "$NON_EXEC" ]; then
+  $DOT -Txdot "${FILENAME}.dot" \
+  | $DOT2TEX -f pgf -t raw --crop > "${FILENAME}.tex"
+  $PERL -w -p -e 's/_/\\_/g' "${FILENAME}.tex"
+fi
+
+if [ "$OUTPUT_FORMAT" -eq 1 ]; then
+  TARGET=$FILENAME.tex
+else
+  TARGET=$FILENAME.pdf
+  [ -z "$NON_EXEC" ] && $PDFLATEX "${FILENAME}.tex"
+fi
+[ -z "$NON_EXEC" ] && mv $TARGET $WD
+cd $WD
+if [ -n "$KEEP_TEMP" ]; then
+  echo $WORKDIR
+else
+  rm -rf $WORKDIR
+fi
+
+[ -n "$SHOW_TARGET" ] && echo "$FILEDIR/$TARGET"
--- a/src/HOL/Tools/Function/induction_schema.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Function/induction_schema.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -9,7 +9,6 @@
   val mk_ind_tac : (int -> tactic) -> (int -> tactic) -> (int -> tactic)
                    -> Proof.context -> thm list -> tactic
   val induction_schema_tac : Proof.context -> thm list -> tactic
-  val setup : theory -> theory
 end
 
 
@@ -395,8 +394,4 @@
 fun induction_schema_tac ctxt =
   mk_ind_tac (K all_tac) (assume_tac APPEND' Goal.assume_rule_tac ctxt) (K all_tac) ctxt;
 
-val setup =
-  Method.setup @{binding induction_schema} (Scan.succeed (RAW_METHOD o induction_schema_tac))
-    "proves an induction principle"
-
 end
--- a/src/HOL/Tools/Function/lexicographic_order.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Function/lexicographic_order.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -9,8 +9,6 @@
 sig
   val lex_order_tac : bool -> Proof.context -> tactic -> tactic
   val lexicographic_order_tac : bool -> Proof.context -> tactic
-  val lexicographic_order : Proof.context -> Proof.method
-
   val setup: theory -> theory
 end
 
@@ -218,12 +216,7 @@
   lex_order_tac quiet ctxt
     (auto_tac (map_simpset (fn ss => ss addsimps Function_Common.Termination_Simps.get ctxt) ctxt))
 
-val lexicographic_order = SIMPLE_METHOD o lexicographic_order_tac false
-
 val setup =
-  Method.setup @{binding lexicographic_order}
-    (Method.sections clasimp_modifiers >> (K lexicographic_order))
-    "termination prover for lexicographic orderings"
-  #> Context.theory_map (Function_Common.set_termination_prover (lexicographic_order_tac false))
+  Context.theory_map (Function_Common.set_termination_prover (lexicographic_order_tac false))
 
 end;
--- a/src/HOL/Tools/Function/pat_completeness.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Function/pat_completeness.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -7,11 +7,8 @@
 signature PAT_COMPLETENESS =
 sig
     val pat_completeness_tac: Proof.context -> int -> tactic
-    val pat_completeness: Proof.context -> Proof.method
     val prove_completeness : theory -> term list -> term -> term list list ->
       term list list -> thm
-
-    val setup : theory -> theory
 end
 
 structure Pat_Completeness : PAT_COMPLETENESS =
@@ -153,11 +150,4 @@
   end
   handle COMPLETENESS => no_tac)
 
-
-val pat_completeness = SIMPLE_METHOD' o pat_completeness_tac
-
-val setup =
-  Method.setup @{binding pat_completeness} (Scan.succeed pat_completeness)
-    "Completeness prover for datatype patterns"
-
 end
--- a/src/HOL/Tools/Function/termination.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Function/termination.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -141,7 +141,7 @@
 fun prove_chain thy chain_tac (c1, c2) =
   let
     val goal =
-      HOLogic.mk_eq (HOLogic.mk_binop @{const_name Relation.rel_comp} (c1, c2),
+      HOLogic.mk_eq (HOLogic.mk_binop @{const_name Relation.relcomp} (c1, c2),
         Const (@{const_abbrev Set.empty}, fastype_of c1))
       |> HOLogic.mk_Trueprop (* "C1 O C2 = {}" *)
   in
--- a/src/HOL/Tools/Lifting/lifting_def.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Lifting/lifting_def.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -6,6 +6,8 @@
 
 signature LIFTING_DEF =
 sig
+  exception FORCE_RTY of typ * term
+
   val add_lift_def:
     (binding * mixfix) -> typ -> term -> thm -> local_theory -> local_theory
 
@@ -26,6 +28,8 @@
 
 fun ants MRSL thm = fold (fn rl => fn thm => rl RS thm) ants thm
 
+exception FORCE_RTY of typ * term
+
 fun get_body_types (Type ("fun", [_, U]), Type ("fun", [_, V])) = get_body_types (U, V)
   | get_body_types (U, V)  = (U, V)
 
@@ -38,6 +42,7 @@
     val rhs_schematic = singleton (Variable.polymorphic ctxt) rhs
     val rty_schematic = fastype_of rhs_schematic
     val match = Sign.typ_match thy (rty_schematic, rty) Vartab.empty
+      handle Type.TYPE_MATCH => raise FORCE_RTY (rty, rhs)
   in
     Envir.subst_term_types match rhs_schematic
   end
@@ -165,7 +170,8 @@
 fun add_lift_def var qty rhs rsp_thm lthy =
   let
     val rty = fastype_of rhs
-    val absrep_trm =  Lifting_Term.absrep_fun lthy (rty, qty)
+    val quotient_thm = Lifting_Term.prove_quot_theorem lthy (rty, qty)
+    val absrep_trm =  Lifting_Term.quot_thm_abs quotient_thm
     val rty_forced = (domain_type o fastype_of) absrep_trm
     val forced_rhs = force_rty_type lthy rty_forced rhs
     val lhs = Free (Binding.print (#1 var), qty)
@@ -176,15 +182,20 @@
     val ((_, (_ , def_thm)), lthy') = 
       Local_Theory.define (var, ((Thm.def_binding (#1 var), []), newrhs)) lthy
 
+    val transfer_thm = [quotient_thm, rsp_thm, def_thm] MRSL @{thm Quotient_to_transfer}
+
     fun qualify defname suffix = Binding.name suffix
       |> Binding.qualify true defname
 
     val lhs_name = Binding.name_of (#1 var)
     val rsp_thm_name = qualify lhs_name "rsp"
     val code_eqn_thm_name = qualify lhs_name "rep_eq"
+    val transfer_thm_name = qualify lhs_name "transfer"
+    val transfer_attr = Attrib.internal (K Transfer.transfer_add)
   in
     lthy'
       |> (snd oo Local_Theory.note) ((rsp_thm_name, []), [rsp_thm])
+      |> (snd oo Local_Theory.note) ((transfer_thm_name, [transfer_attr]), [transfer_thm])
       |> define_code code_eqn_thm_name def_thm rsp_thm (rty_forced, qty)
   end
 
@@ -235,7 +246,8 @@
 fun lift_def_cmd (raw_var, rhs_raw) lthy =
   let
     val ((binding, SOME qty, mx), ctxt) = yield_singleton Proof_Context.read_vars raw_var lthy 
-    val rhs = (Syntax.check_term ctxt o Syntax.parse_term ctxt) rhs_raw
+    val rhs' = (Syntax.check_term ctxt o Syntax.parse_term ctxt) rhs_raw
+    val rhs = singleton (Variable.polymorphic ctxt) rhs'
  
     fun try_to_prove_refl thm = 
       let
@@ -252,9 +264,10 @@
           | _ => NONE
       end
 
-    val rsp_rel = Lifting_Term.equiv_relation lthy (fastype_of rhs, qty)
+    val quot_thm = Lifting_Term.prove_quot_theorem lthy (fastype_of rhs, qty)
+    val rsp_rel = Lifting_Term.quot_thm_rel quot_thm
     val rty_forced = (domain_type o fastype_of) rsp_rel;
-    val forced_rhs = force_rty_type lthy rty_forced rhs;
+    val forced_rhs = force_rty_type ctxt rty_forced rhs;
     val internal_rsp_tm = HOLogic.mk_Trueprop (rsp_rel $ forced_rhs $ forced_rhs)
     val readable_rsp_thm_eq = mk_readable_rsp_thm_eq internal_rsp_tm lthy
     val maybe_proven_rsp_thm = try_to_prove_refl readable_rsp_thm_eq
@@ -273,10 +286,50 @@
 
   in
     case maybe_proven_rsp_thm of
-      SOME _ => Proof.theorem NONE after_qed [] lthy
-      | NONE =>  Proof.theorem NONE after_qed [[(readable_rsp_tm,[])]] lthy
+      SOME _ => Proof.theorem NONE after_qed [] ctxt
+      | NONE =>  Proof.theorem NONE after_qed [[(readable_rsp_tm,[])]] ctxt
+  end
+
+fun quot_thm_err ctxt (rty, qty) pretty_msg =
+  let
+    val error_msg = cat_lines
+       ["Lifting failed for the following types:",
+        Pretty.string_of (Pretty.block
+         [Pretty.str "Raw type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty]),
+        Pretty.string_of (Pretty.block
+         [Pretty.str "Abstract type:", Pretty.brk 2, Syntax.pretty_typ ctxt qty]),
+        "",
+        (Pretty.string_of (Pretty.block
+         [Pretty.str "Reason:", Pretty.brk 2, pretty_msg]))]
+  in
+    error error_msg
   end
 
+fun force_rty_err ctxt rty rhs =
+  let
+    val error_msg = cat_lines
+       ["Lifting failed for the following term:",
+        Pretty.string_of (Pretty.block
+         [Pretty.str "Term:", Pretty.brk 2, Syntax.pretty_term ctxt rhs]),
+        Pretty.string_of (Pretty.block
+         [Pretty.str "Type:", Pretty.brk 2, Syntax.pretty_typ ctxt (fastype_of rhs)]),
+        "",
+        (Pretty.string_of (Pretty.block
+         [Pretty.str "Reason:", 
+          Pretty.brk 2, 
+          Pretty.str "The type of the term cannot be instancied to",
+          Pretty.brk 1,
+          Pretty.quote (Syntax.pretty_typ ctxt rty),
+          Pretty.str "."]))]
+    in
+      error error_msg
+    end
+
+fun lift_def_cmd_with_err_handling (raw_var, rhs_raw) lthy =
+  (lift_def_cmd (raw_var, rhs_raw) lthy
+    handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy (rty, qty) msg)
+    handle FORCE_RTY (rty, rhs) => force_rty_err lthy rty rhs
+
 (* parser and command *)
 val liftdef_parser =
   ((Parse.binding -- (@{keyword "::"} |-- (Parse.typ >> SOME) -- Parse.opt_mixfix')) >> Parse.triple2)
@@ -285,7 +338,7 @@
 val _ =
   Outer_Syntax.local_theory_to_proof @{command_spec "lift_definition"}
     "definition for constants over the quotient type"
-      (liftdef_parser >> lift_def_cmd)
+      (liftdef_parser >> lift_def_cmd_with_err_handling)
 
 
 end; (* structure *)
--- a/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,7 +1,7 @@
 (*  Title:      HOL/Tools/Lifting/lifting_setup.ML
     Author:     Ondrej Kuncar
 
-Setting the lifting infranstructure up.
+Setting up the lifting infrastructure.
 *)
 
 signature LIFTING_SETUP =
@@ -13,7 +13,7 @@
   val setup_by_typedef_thm: thm -> local_theory -> local_theory
 end;
 
-structure Lifting_Seup: LIFTING_SETUP =
+structure Lifting_Setup: LIFTING_SETUP =
 struct
 
 infix 0 MRSL
@@ -22,26 +22,11 @@
 
 exception SETUP_LIFTING_INFR of string
 
-fun define_cr_rel equiv_thm abs_fun lthy =
+fun define_cr_rel rep_fun lthy =
   let
-    fun force_type_of_rel rel forced_ty =
-      let
-        val thy = Proof_Context.theory_of lthy
-        val rel_ty = (domain_type o fastype_of) rel
-        val ty_inst = Sign.typ_match thy (rel_ty, forced_ty) Vartab.empty
-      in
-        Envir.subst_term_types ty_inst rel
-      end
-
-    val (rty, qty) = (dest_funT o fastype_of) abs_fun
-    val abs_fun_graph = HOLogic.mk_eq(abs_fun $ Bound 1, Bound 0)
-    val Abs_body = (case (HOLogic.dest_Trueprop o prop_of) equiv_thm of
-      Const (@{const_name equivp}, _) $ _ => abs_fun_graph
-      | Const (@{const_name part_equivp}, _) $ rel => 
-        HOLogic.mk_conj (force_type_of_rel rel rty $ Bound 1 $ Bound 1, abs_fun_graph)
-      | _ => raise SETUP_LIFTING_INFR "unsopported equivalence theorem"
-      )
-    val def_term = Abs ("x", rty, Abs ("y", qty, Abs_body));
+    val (qty, rty) = (dest_funT o fastype_of) rep_fun
+    val rep_fun_graph = (HOLogic.eq_const rty) $ Bound 1 $ (rep_fun $ Bound 0)
+    val def_term = Abs ("x", rty, Abs ("y", qty, rep_fun_graph));
     val qty_name = (fst o dest_Type) qty
     val cr_rel_name = Binding.prefix_name "cr_" (Binding.qualified_name qty_name)  
     val (fixed_def_term, lthy') = yield_singleton (Variable.importT_terms) def_term lthy
@@ -65,8 +50,35 @@
   else
     lthy
 
+fun quot_thm_sanity_check ctxt quot_thm =
+  let
+    val ((_, [quot_thm_fixed]), ctxt') = Variable.importT [quot_thm] ctxt 
+    val (rty, qty) = Lifting_Term.quot_thm_rty_qty quot_thm_fixed
+    val rty_tfreesT = Term.add_tfree_namesT rty []
+    val qty_tfreesT = Term.add_tfree_namesT qty []
+    val extra_rty_tfrees =
+      (case subtract (op =) qty_tfreesT rty_tfreesT of
+        [] => []
+      | extras => [Pretty.block ([Pretty.str "Extra variables in the raw type:",
+                                 Pretty.brk 1] @ 
+                                 ((Pretty.commas o map (Pretty.str o quote)) extras) @
+                                 [Pretty.str "."])])
+    val not_type_constr = 
+      (case qty of
+         Type _ => []
+         | _ => [Pretty.block [Pretty.str "The quotient type ",
+                                Pretty.quote (Syntax.pretty_typ ctxt' qty),
+                                Pretty.brk 1,
+                                Pretty.str "is not a type constructor."]])
+    val errs = extra_rty_tfrees @ not_type_constr
+  in
+    if null errs then () else error (cat_lines (["Sanity check of the quotient theorem failed:",""] 
+                                                @ (map Pretty.string_of errs)))
+  end
+
 fun setup_lifting_infr quot_thm equiv_thm lthy =
   let
+    val _ = quot_thm_sanity_check lthy quot_thm
     val (_, qtyp) = Lifting_Term.quot_thm_rty_qty quot_thm
     val qty_full_name = (fst o dest_Type) qtyp
     val quotients = { quot_thm = quot_thm }
@@ -82,9 +94,9 @@
   let
     fun derive_quot_and_equiv_thm to_quot_thm to_equiv_thm typedef_thm lthy =
       let
-        val (_ $ _ $ abs_fun $ _) = (HOLogic.dest_Trueprop o prop_of) typedef_thm
+        val (_ $ rep_fun $ _ $ _) = (HOLogic.dest_Trueprop o prop_of) typedef_thm
         val equiv_thm = typedef_thm RS to_equiv_thm
-        val (T_def, lthy') = define_cr_rel equiv_thm abs_fun lthy
+        val (T_def, lthy') = define_cr_rel rep_fun lthy
         val quot_thm =  [typedef_thm, T_def] MRSL to_quot_thm
       in
         (quot_thm, equiv_thm, lthy')
@@ -93,12 +105,13 @@
     val typedef_set = (snd o dest_comb o HOLogic.dest_Trueprop o prop_of) typedef_thm
     val (quot_thm, equiv_thm, lthy') = (case typedef_set of
       Const ("Orderings.top_class.top", _) => 
-        derive_quot_and_equiv_thm @{thm copy_type_to_Quotient} @{thm copy_type_to_equivp} 
+        derive_quot_and_equiv_thm @{thm UNIV_typedef_to_Quotient} @{thm UNIV_typedef_to_equivp} 
           typedef_thm lthy
       | Const (@{const_name "Collect"}, _) $ Abs (_, _, _) => 
-        derive_quot_and_equiv_thm @{thm invariant_type_to_Quotient} @{thm invariant_type_to_part_equivp} 
+        derive_quot_and_equiv_thm @{thm open_typedef_to_Quotient} @{thm open_typedef_to_part_equivp} 
           typedef_thm lthy
-      | _ => raise SETUP_LIFTING_INFR "unsupported typedef theorem")
+      | _ => derive_quot_and_equiv_thm @{thm typedef_to_Quotient} @{thm typedef_to_part_equivp} 
+          typedef_thm lthy)
   in
     setup_lifting_infr quot_thm equiv_thm lthy'
   end
@@ -107,7 +120,6 @@
 
 val _ = 
   Outer_Syntax.local_theory @{command_spec "setup_lifting"}
-    "Setup lifting infrastracture" 
+    "Setup lifting infrastructure" 
       (Parse_Spec.xthm >> (fn xthm => setup_by_typedef_thm_aux xthm))
-
 end;
--- a/src/HOL/Tools/Lifting/lifting_term.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Lifting/lifting_term.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -6,6 +6,8 @@
 
 signature LIFTING_TERM =
 sig
+  exception QUOT_THM of typ * typ * Pretty.T
+
   val prove_quot_theorem: Proof.context -> typ * typ -> thm
 
   val absrep_fun: Proof.context -> typ * typ -> term
@@ -24,29 +26,10 @@
 structure Lifting_Term: LIFTING_TERM =
 struct
 
-exception LIFT_MATCH of string
-
-(* matches a type pattern with a type *)
-fun match ctxt err ty_pat ty =
-  let
-    val thy = Proof_Context.theory_of ctxt
-  in
-    Sign.typ_match thy (ty_pat, ty) Vartab.empty
-      handle Type.TYPE_MATCH => err ctxt ty_pat ty
-  end
-
-fun equiv_match_err ctxt ty_pat ty =
-  let
-    val ty_pat_str = Syntax.string_of_typ ctxt ty_pat
-    val ty_str = Syntax.string_of_typ ctxt ty
-  in
-    raise LIFT_MATCH (space_implode " "
-      ["equiv_relation (Types ", quote ty_pat_str, "and", quote ty_str, " do not match.)"])
-  end
-
 (* generation of the Quotient theorem  *)
 
-exception QUOT_THM of string
+exception QUOT_THM_INTERNAL of Pretty.T
+exception QUOT_THM of typ * typ * Pretty.T
 
 fun get_quot_thm ctxt s =
   let
@@ -54,7 +37,10 @@
   in
     (case Lifting_Info.lookup_quotients ctxt s of
       SOME qdata => Thm.transfer thy (#quot_thm qdata)
-    | NONE => raise QUOT_THM ("No quotient type " ^ quote s ^ " found."))
+    | NONE => raise QUOT_THM_INTERNAL (Pretty.block 
+      [Pretty.str ("No quotient type " ^ quote s), 
+       Pretty.brk 1, 
+       Pretty.str "found."]))
   end
 
 fun get_rel_quot_thm ctxt s =
@@ -63,40 +49,29 @@
   in
     (case Lifting_Info.lookup_quotmaps ctxt s of
       SOME map_data => Thm.transfer thy (#quot_thm map_data)
-    | NONE => raise QUOT_THM ("get_relmap (no relation map function found for type " ^ s ^ ")"))
+    | NONE => raise QUOT_THM_INTERNAL (Pretty.block 
+      [Pretty.str ("No relator for the type " ^ quote s), 
+       Pretty.brk 1,
+       Pretty.str "found."]))
   end
 
-fun is_id_quot thm = (prop_of thm = prop_of @{thm identity_quotient})
-
-infix 0 MRSL
-
-fun ants MRSL thm = fold (fn rl => fn thm => rl RS thm) ants thm
-
 exception NOT_IMPL of string
 
-fun quot_thm_rel quot_thm = 
-  let
-    val (Const (@{const_name Quotient}, _) $ rel $ _ $ _ $ _) = 
-      (HOLogic.dest_Trueprop o prop_of) quot_thm
-  in
-    rel
-  end
+fun dest_Quotient (Const (@{const_name Quotient}, _) $ rel $ abs $ rep $ cr)
+      = (rel, abs, rep, cr)
+  | dest_Quotient t = raise TERM ("dest_Quotient", [t])
+
+fun quot_thm_rel quot_thm =
+  case (dest_Quotient o HOLogic.dest_Trueprop o prop_of) quot_thm of
+    (rel, _, _, _) => rel
 
 fun quot_thm_abs quot_thm =
-  let
-    val (Const (@{const_name Quotient}, _) $ _ $ abs $ _ $ _) = 
-      (HOLogic.dest_Trueprop o prop_of) quot_thm
-  in
-    abs
-  end
+  case (dest_Quotient o HOLogic.dest_Trueprop o prop_of) quot_thm of
+    (_, abs, _, _) => abs
 
 fun quot_thm_rep quot_thm =
-  let
-    val (Const (@{const_name Quotient}, _) $ _ $ _ $ rep $ _) = 
-      (HOLogic.dest_Trueprop o prop_of) quot_thm
-  in
-    rep
-  end
+  case (dest_Quotient o HOLogic.dest_Trueprop o prop_of) quot_thm of
+    (_, _, rep, _) => rep
 
 fun quot_thm_rty_qty quot_thm =
   let
@@ -106,68 +81,75 @@
     (domain_type abs_type, range_type abs_type)
   end
 
-fun prove_quot_theorem ctxt (rty, qty) =
-  case (rty, qty) of
-    (Type (s, tys), Type (s', tys')) =>
+fun check_raw_types (provided_rty_name, rty_of_qty_name) qty_name =
+  if provided_rty_name <> rty_of_qty_name then
+    raise QUOT_THM_INTERNAL (Pretty.block 
+        [Pretty.str ("The type " ^ quote provided_rty_name),
+         Pretty.brk 1,
+         Pretty.str ("is not a raw type for the quotient type " ^ quote qty_name ^ ";"),
+         Pretty.brk 1,
+         Pretty.str ("the correct raw type is " ^ quote rty_of_qty_name ^ ".")])
+  else
+    ()
+
+fun quotient_tac ctxt = SUBGOAL (fn (t, i) =>
+  let
+    val (_, abs, _, _) = dest_Quotient (HOLogic.dest_Trueprop t)
+    val (rty, qty) = Term.dest_funT (fastype_of abs)
+  in
+    case (rty, qty) of
+      (Type (s, _), Type (s', _)) =>
       if s = s'
       then
         let
-          val args = map (prove_quot_theorem ctxt) (tys ~~ tys')
+          val thm1 = SOME @{thm identity_quotient}
+          val thm2 = try (get_rel_quot_thm ctxt) s
         in
-          if forall is_id_quot args
-          then
-            @{thm identity_quotient}
-          else
-            args MRSL (get_rel_quot_thm ctxt s)
+          resolve_tac (map_filter I [thm1, thm2]) i
         end
       else
         let
           val quot_thm = get_quot_thm ctxt s'
-          val (Type (_, rtys), qty_pat) = quot_thm_rty_qty quot_thm
-          val qtyenv = match ctxt equiv_match_err qty_pat qty
-          val rtys' = map (Envir.subst_type qtyenv) rtys
-          val args = map (prove_quot_theorem ctxt) (tys ~~ rtys')
+          val (Type (rs, _), _) = quot_thm_rty_qty quot_thm
+          val _ = check_raw_types (s, rs) s'
         in
-          if forall is_id_quot args
-          then
-            quot_thm
-          else
-            let
-              val rel_quot_thm = args MRSL (get_rel_quot_thm ctxt s)
-            in
-              [rel_quot_thm, quot_thm] MRSL @{thm Quotient_compose}
-           end
+          resolve_tac [quot_thm, quot_thm RSN (2, @{thm Quotient_compose})] i
         end
-  | _ => @{thm identity_quotient}
+    | (_, Type (s, _)) =>
+      let
+        val thm1 = try (get_quot_thm ctxt) s
+        val thm2 = SOME @{thm identity_quotient}
+        val thm3 = try (get_rel_quot_thm ctxt) s
+      in
+        resolve_tac (map_filter I [thm1, thm2, thm3]) i
+      end
+  | _ => rtac @{thm identity_quotient} i
+    handle QUOT_THM_INTERNAL pretty_msg => raise QUOT_THM (rty, qty, pretty_msg)
+  end)
 
-fun force_qty_type thy qty quot_thm =
+fun prove_quot_theorem ctxt (rty, qty) =
   let
-    val abs_schematic = quot_thm_abs quot_thm 
-    val qty_schematic = (range_type o fastype_of) abs_schematic  
-    val match_env = Sign.typ_match thy (qty_schematic, qty) Vartab.empty
-    fun prep_ty thy (x, (S, ty)) =
-      (ctyp_of thy (TVar (x, S)), ctyp_of thy ty)
-    val ty_inst = Vartab.fold (cons o (prep_ty thy)) match_env []
+    val relT = [rty, rty] ---> HOLogic.boolT
+    val absT = rty --> qty
+    val repT = qty --> rty
+    val crT = [rty, qty] ---> HOLogic.boolT
+    val quotT = [relT, absT, repT, crT] ---> HOLogic.boolT
+    val rel = Var (("R", 0), relT)
+    val abs = Var (("Abs", 0), absT)
+    val rep = Var (("Rep", 0), repT)
+    val cr = Var (("T", 0), crT)
+    val quot = Const (@{const_name Quotient}, quotT)
+    val goal = HOLogic.Trueprop $ (quot $ rel $ abs $ rep $ cr)
+    val cgoal = Thm.cterm_of (Proof_Context.theory_of ctxt) goal
+    val tac = REPEAT (quotient_tac ctxt 1)
   in
-    Thm.instantiate (ty_inst, []) quot_thm
+    Goal.prove_internal [] cgoal (K tac)
   end
 
-fun absrep_fun ctxt (rty, qty) = 
-  let
-    val thy = Proof_Context.theory_of ctxt
-    val quot_thm = prove_quot_theorem ctxt (rty, qty)
-    val forced_quot_thm = force_qty_type thy qty quot_thm
-  in
-    quot_thm_abs forced_quot_thm
-  end
+fun absrep_fun ctxt (rty, qty) =
+  quot_thm_abs (prove_quot_theorem ctxt (rty, qty))
 
 fun equiv_relation ctxt (rty, qty) =
-  let
-    val thy = Proof_Context.theory_of ctxt
-    val quot_thm = prove_quot_theorem ctxt (rty, qty)
-    val forced_quot_thm = force_qty_type thy qty quot_thm
-  in
-    quot_thm_rel forced_quot_thm
-  end
+  quot_thm_rel (prove_quot_theorem ctxt (rty, qty))
 
 end;
--- a/src/HOL/Tools/Nitpick/kodkod.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Nitpick/kodkod.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1067,7 +1067,7 @@
                   TimedOut js
                 else if code = 0 then
                   Normal ([], js, first_error)
-                else if has_error "exec: java" then
+                else if code = 127 then
                   JavaNotInstalled
                 else if has_error "UnsupportedClassVersionError" then
                   JavaTooOld
--- a/src/HOL/Tools/Nitpick/nitpick.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -178,7 +178,7 @@
                (length ts downto 1) ts))]
 
 fun install_java_message () =
-  "Nitpick requires a Java 1.5 virtual machine called \"java\"."
+  "Nitpick requires Java Development Kit 1.6/1.7 via ISABELLE_JDK_HOME setting."
 fun install_kodkodi_message () =
   "Nitpick requires the external Java program Kodkodi. To install it, download \
   \the package from \"http://www21.in.tum.de/~blanchet/#software\" and add the \
--- a/src/HOL/Tools/Nitpick/nitpick_hol.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -391,7 +391,7 @@
    (@{const_name Id}, 0),
    (@{const_name converse}, 1),
    (@{const_name trancl}, 1),
-   (@{const_name rel_comp}, 2),
+   (@{const_name relcomp}, 2),
    (@{const_name finite}, 1),
    (@{const_name unknown}, 0),
    (@{const_name is_unknown}, 1),
--- a/src/HOL/Tools/Nitpick/nitpick_mono.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick_mono.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -856,7 +856,7 @@
                  accum ||> add_annotation_atom_comp Neq [] (V x) (A New))
               end
             | @{const_name trancl} => do_fragile_set_operation T accum
-            | @{const_name rel_comp} =>
+            | @{const_name relcomp} =>
               let
                 val x = Unsynchronized.inc max_fresh
                 val bc_set_M = domain_type T |> mtype_for_set x
--- a/src/HOL/Tools/Nitpick/nitpick_nut.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick_nut.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -522,7 +522,7 @@
           Op1 (Converse, range_type T, Any, sub t1)
         | (Const (@{const_name trancl}, T), [t1]) =>
           Op1 (Closure, range_type T, Any, sub t1)
-        | (Const (@{const_name rel_comp}, T), [t1, t2]) =>
+        | (Const (@{const_name relcomp}, T), [t1, t2]) =>
           Op2 (Composition, nth_range_type 2 T, Any, sub t1, sub t2)
         | (Const (x as (s as @{const_name Suc}, T)), []) =>
           if is_built_in_const thy stds x then Cst (Suc, T, Any)
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_core.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1334,7 +1334,7 @@
       let
         val full_mode = fold_rev (curry Fun) (map (K Input) (binder_types T)) Bool
       in
-        if member (op =) (modes_of Pred ctxt predname) full_mode then
+        if member eq_mode  (modes_of Pred ctxt predname) full_mode then
           let
             val Ts = binder_types T
             val arg_names = Name.variant_list []
--- a/src/HOL/Tools/Qelim/cooper.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Qelim/cooper.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -13,7 +13,6 @@
   exception COOPER of string
   val conv: Proof.context -> conv
   val tac: bool -> thm list -> thm list -> Proof.context -> int -> tactic
-  val method: (Proof.context -> Method.method) context_parser
   val setup: theory -> theory
 end;
 
@@ -722,8 +721,12 @@
      val ntac = (case qs of [] => q aconvc @{cterm "False"}
                          | _ => false)
     in 
-    if ntac then no_tac
-      else rtac (Goal.prove_internal [] g (K (blast_tac (put_claset HOL_cs ctxt) 1))) i
+      if ntac then no_tac
+      else
+        (case try (fn () =>
+            Goal.prove_internal [] g (K (blast_tac (put_claset HOL_cs ctxt) 1))) () of
+          NONE => no_tac
+        | SOME r => rtac r i)
     end)
 end;
 
@@ -836,43 +839,27 @@
 fun finish_tac q = SUBGOAL (fn (_, i) =>
   (if q then I else TRY) (rtac TrueI i));
 
-fun tac elim add_ths del_ths ctxt =
-let val ss = Simplifier.context ctxt (fst (get ctxt)) delsimps del_ths addsimps add_ths
+fun tac elim add_ths del_ths = Subgoal.FOCUS_PARAMS (fn {context = ctxt, ...} =>
+  let
+    val ss = Simplifier.context ctxt (fst (get ctxt)) delsimps del_ths addsimps add_ths
     val aprems = Arith_Data.get_arith_facts ctxt
-in
-  Method.insert_tac aprems
-  THEN_ALL_NEW Object_Logic.full_atomize_tac
-  THEN_ALL_NEW CONVERSION Thm.eta_long_conversion
-  THEN_ALL_NEW simp_tac ss
-  THEN_ALL_NEW (TRY o generalize_tac (int_nat_terms ctxt))
-  THEN_ALL_NEW Object_Logic.full_atomize_tac
-  THEN_ALL_NEW (thin_prems_tac ctxt (is_relevant ctxt))
-  THEN_ALL_NEW Object_Logic.full_atomize_tac
-  THEN_ALL_NEW div_mod_tac ctxt
-  THEN_ALL_NEW splits_tac ctxt
-  THEN_ALL_NEW simp_tac ss
-  THEN_ALL_NEW CONVERSION Thm.eta_long_conversion
-  THEN_ALL_NEW nat_to_int_tac ctxt
-  THEN_ALL_NEW (core_tac ctxt)
-  THEN_ALL_NEW finish_tac elim
-end;
-
-val method =
-  let
-    fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ()
-    fun simple_keyword k = Scan.lift (Args.$$$ k) >> K ()
-    val addN = "add"
-    val delN = "del"
-    val elimN = "elim"
-    val any_keyword = keyword addN || keyword delN || simple_keyword elimN
-    val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
   in
-    Scan.optional (simple_keyword elimN >> K false) true --
-    Scan.optional (keyword addN |-- thms) [] --
-    Scan.optional (keyword delN |-- thms) [] >>
-    (fn ((elim, add_ths), del_ths) => fn ctxt =>
-      SIMPLE_METHOD' (tac elim add_ths del_ths ctxt))
-  end;
+    Method.insert_tac aprems
+    THEN_ALL_NEW Object_Logic.full_atomize_tac
+    THEN_ALL_NEW CONVERSION Thm.eta_long_conversion
+    THEN_ALL_NEW simp_tac ss
+    THEN_ALL_NEW (TRY o generalize_tac (int_nat_terms ctxt))
+    THEN_ALL_NEW Object_Logic.full_atomize_tac
+    THEN_ALL_NEW (thin_prems_tac ctxt (is_relevant ctxt))
+    THEN_ALL_NEW Object_Logic.full_atomize_tac
+    THEN_ALL_NEW div_mod_tac ctxt
+    THEN_ALL_NEW splits_tac ctxt
+    THEN_ALL_NEW simp_tac ss
+    THEN_ALL_NEW CONVERSION Thm.eta_long_conversion
+    THEN_ALL_NEW nat_to_int_tac ctxt
+    THEN_ALL_NEW (core_tac ctxt)
+    THEN_ALL_NEW finish_tac elim
+  end 1);
 
 
 (* theory setup *)
--- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -363,6 +363,16 @@
             (fn t => absdummy dT (rt' (mk_eval_ffun dT rT' $ incr_boundvars 1 t $ Bound 0)),
               Type (@{type_name "Quickcheck_Narrowing.ffun"}, [dT, rT']))
       end
+      | eval_function (T as Type (@{type_name prod}, [fT, sT])) =
+        let
+          val (ft', fT') = eval_function fT
+          val (st', sT') = eval_function sT
+          val T' = Type (@{type_name prod}, [fT', sT'])
+          val map_const = Const (@{const_name map_pair}, (fT' --> fT) --> (sT' --> sT) --> T' --> T)
+          fun apply_dummy T t = absdummy T (t (Bound 0))
+        in
+          (fn t => list_comb (map_const, [apply_dummy fT' ft', apply_dummy sT' st', t]), T')
+        end
       | eval_function T = (I, T)
     val (tt, boundTs') = split_list (map eval_function boundTs)
     val t' = subst_bounds (map2 (fn f => fn x => f x) (rev tt) (map_index (Bound o fst) boundTs), t)
--- a/src/HOL/Tools/Quotient/quotient_def.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Quotient/quotient_def.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -18,8 +18,6 @@
     (binding * string option * mixfix) option * (Attrib.binding * (string * string)) ->
     local_theory -> Proof.state
 
-  val lift_raw_const: typ list -> (string * term * mixfix) -> local_theory ->
-    Quotient_Info.quotconsts * local_theory
 end;
 
 structure Quotient_Def: QUOTIENT_DEF =
@@ -331,17 +329,6 @@
 val quotient_def_cmd = gen_quotient_def Proof_Context.read_vars read_term'
 
 
-(* a wrapper for automatically lifting a raw constant *)
-fun lift_raw_const qtys (qconst_name, rconst, mx) ctxt =
-  let
-    val rty = fastype_of rconst
-    val qty = Quotient_Term.derive_qtyp ctxt qtys rty
-    val lhs = Free (qconst_name, qty)
-  in
-    (*quotient_def (SOME (Binding.name qconst_name, NONE, mx), (Attrib.empty_binding, (lhs, rconst))) ctxt*)
-    ({qconst = lhs, rconst = lhs, def = @{thm refl}}, ctxt)
-  end
-
 (* parser and command *)
 val quotdef_parser =
   Scan.option Parse_Spec.constdecl --
--- a/src/HOL/Tools/Quotient/quotient_type.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/Quotient/quotient_type.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -99,6 +99,54 @@
   else
     lthy
 
+infix 0 MRSL
+
+fun ants MRSL thm = fold (fn rl => fn thm => rl RS thm) ants thm
+
+fun define_cr_rel equiv_thm abs_fun lthy =
+  let
+    fun force_type_of_rel rel forced_ty =
+      let
+        val thy = Proof_Context.theory_of lthy
+        val rel_ty = (domain_type o fastype_of) rel
+        val ty_inst = Sign.typ_match thy (rel_ty, forced_ty) Vartab.empty
+      in
+        Envir.subst_term_types ty_inst rel
+      end
+  
+    val (rty, qty) = (dest_funT o fastype_of) abs_fun
+    val abs_fun_graph = HOLogic.mk_eq(abs_fun $ Bound 1, Bound 0)
+    val Abs_body = (case (HOLogic.dest_Trueprop o prop_of) equiv_thm of
+      Const (@{const_name equivp}, _) $ _ => abs_fun_graph
+      | Const (@{const_name part_equivp}, _) $ rel => 
+        HOLogic.mk_conj (force_type_of_rel rel rty $ Bound 1 $ Bound 1, abs_fun_graph)
+      | _ => error "unsupported equivalence theorem"
+      )
+    val def_term = Abs ("x", rty, Abs ("y", qty, Abs_body));
+    val qty_name = (fst o dest_Type) qty
+    val cr_rel_name = Binding.prefix_name "cr_" (Binding.qualified_name qty_name)  
+    val (fixed_def_term, lthy') = yield_singleton (Variable.importT_terms) def_term lthy
+    val ((_, (_ , def_thm)), lthy'') =
+      Local_Theory.define ((cr_rel_name, NoSyn), ((Thm.def_binding cr_rel_name, []), fixed_def_term)) lthy'
+  in
+    (def_thm, lthy'')
+  end;
+
+fun setup_lifting_package quot3_thm equiv_thm lthy =
+  let
+    val (_ $ _ $ abs_fun $ _) = (HOLogic.dest_Trueprop o prop_of) quot3_thm
+    val (T_def, lthy') = define_cr_rel equiv_thm abs_fun lthy
+    val quot_thm = (case (HOLogic.dest_Trueprop o prop_of) equiv_thm of
+      Const (@{const_name equivp}, _) $ _ => 
+        [quot3_thm, T_def, equiv_thm] MRSL @{thm Quotient3_to_Quotient_equivp}
+      | Const (@{const_name part_equivp}, _) $ _ => 
+        [quot3_thm, T_def] MRSL @{thm Quotient3_to_Quotient}
+      | _ => error "unsupported equivalence theorem"
+      )
+  in
+    Lifting_Setup.setup_lifting_infr quot_thm equiv_thm lthy'
+  end
+
 fun init_quotient_infr quot_thm equiv_thm lthy =
   let
     val (_ $ rel $ abs $ rep) = (HOLogic.dest_Trueprop o prop_of) quot_thm
@@ -115,6 +163,7 @@
         (fn phi => Quotient_Info.update_quotients qty_full_name (quot_info phi)
           #> Quotient_Info.update_abs_rep qty_full_name (abs_rep_info phi))
       |> define_abs_type quot_thm
+      |> setup_lifting_package quot_thm equiv_thm
   end
 
 (* main function for constructing a quotient type *)
--- a/src/HOL/Tools/groebner.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/groebner.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -16,7 +16,6 @@
   val ring_tac: thm list -> thm list -> Proof.context -> int -> tactic
   val ideal_tac: thm list -> thm list -> Proof.context -> int -> tactic
   val algebra_tac: thm list -> thm list -> Proof.context -> int -> tactic
-  val algebra_method: (Proof.context -> Method.method) context_parser
 end
 
 structure Groebner : GROEBNER =
@@ -1027,21 +1026,4 @@
 fun algebra_tac add_ths del_ths ctxt i =
  ring_tac add_ths del_ths ctxt i ORELSE ideal_tac add_ths del_ths ctxt i
 
-local
-
-fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ()
-val addN = "add"
-val delN = "del"
-val any_keyword = keyword addN || keyword delN
-val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
-
-in
-
-val algebra_method = ((Scan.optional (keyword addN |-- thms) []) --
-   (Scan.optional (keyword delN |-- thms) [])) >>
-  (fn (add_ths, del_ths) => fn ctxt =>
-       SIMPLE_METHOD' (algebra_tac add_ths del_ths ctxt))
-
 end;
-
-end;
--- a/src/HOL/Tools/legacy_transfer.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/legacy_transfer.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -1,7 +1,7 @@
-(*  Title:      HOL/Tools/transfer.ML
+(*  Title:      HOL/Tools/legacy_transfer.ML
     Author:     Amine Chaieb, University of Cambridge, 2009
-                Jeremy Avigad, Carnegie Mellon University
-                Florian Haftmann, TU Muenchen
+    Author:     Jeremy Avigad, Carnegie Mellon University
+    Author:     Florian Haftmann, TU Muenchen
 
 Simple transfer principle on theorems.
 *)
--- a/src/HOL/Tools/transfer.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Tools/transfer.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -113,6 +113,18 @@
     rtac thm2 i
   end handle Match => no_tac | TERM _ => no_tac)
 
+val post_simps =
+  @{thms App_def Abs_def transfer_forall_eq [symmetric]
+    transfer_implies_eq [symmetric] transfer_bforall_unfold}
+
+fun gen_frees_tac keepers ctxt = SUBGOAL (fn (t, i) =>
+  let
+    val vs = rev (Term.add_frees t [])
+    val vs' = filter_out (member (op =) keepers o fst) vs
+  in
+    Induct.arbitrary_tac ctxt 0 vs' i
+  end)
+
 fun transfer_tac ctxt = SUBGOAL (fn (t, i) =>
   let
     val bs = bnames t
@@ -128,8 +140,7 @@
          rtac @{thm equal_elim_rule1} i THEN
          rtac (Thm.reflexive (cert ctxt (fst (rename bs u)))) i) i,
        (* FIXME: rewrite_goal_tac does unwanted eta-contraction *)
-       rewrite_goal_tac @{thms App_def Abs_def} i,
-       rewrite_goal_tac @{thms transfer_forall_eq [symmetric] transfer_implies_eq [symmetric]} i,
+       rewrite_goal_tac post_simps i,
        rtac @{thm _} i]
   end)
 
@@ -143,10 +154,10 @@
          (resolve_tac rules ORELSE' atac ORELSE' eq_tac ctxt) i]
   end
 
-val transfer_method =
-  Scan.succeed (fn ctxt => SIMPLE_METHOD' (transfer_tac ctxt))
+val transfer_method : (Proof.context -> Method.method) context_parser =
+  Scan.succeed (fn ctxt => SIMPLE_METHOD' (gen_frees_tac [] ctxt THEN' transfer_tac ctxt))
 
-val correspondence_method =
+val correspondence_method : (Proof.context -> Method.method) context_parser =
   Scan.succeed (fn ctxt => SIMPLE_METHOD' (correspondence_tac ctxt))
 
 (* Attribute for correspondence rules *)
--- a/src/HOL/Transcendental.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Transcendental.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -421,7 +421,7 @@
          order_trans [OF norm_setsum]
          real_setsum_nat_ivl_bounded2
          mult_nonneg_nonneg
-         zero_le_imp_of_nat
+         of_nat_0_le_iff
          zero_le_power K)
       apply (rule le_Kn, simp)
       done
--- a/src/HOL/Transfer.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Transfer.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -62,12 +62,19 @@
 definition transfer_implies where
   "transfer_implies \<equiv> op \<longrightarrow>"
 
+definition transfer_bforall :: "('a \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> bool) \<Rightarrow> bool"
+  where "transfer_bforall \<equiv> (\<lambda>P Q. \<forall>x. P x \<longrightarrow> Q x)"
+
 lemma transfer_forall_eq: "(\<And>x. P x) \<equiv> Trueprop (transfer_forall (\<lambda>x. P x))"
   unfolding atomize_all transfer_forall_def ..
 
 lemma transfer_implies_eq: "(A \<Longrightarrow> B) \<equiv> Trueprop (transfer_implies A B)"
   unfolding atomize_imp transfer_implies_def ..
 
+lemma transfer_bforall_unfold:
+  "Trueprop (transfer_bforall P (\<lambda>x. Q x)) \<equiv> (\<And>x. P x \<Longrightarrow> Q x)"
+  unfolding transfer_bforall_def atomize_imp atomize_all ..
+
 lemma transfer_start: "\<lbrakk>Rel (op =) P Q; P\<rbrakk> \<Longrightarrow> Q"
   unfolding Rel_def by simp
 
--- a/src/HOL/Transitive_Closure.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Transitive_Closure.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -726,51 +726,91 @@
 lemma relpowp_relpow_eq [pred_set_conv]:
   fixes R :: "'a rel"
   shows "(\<lambda>x y. (x, y) \<in> R) ^^ n = (\<lambda>x y. (x, y) \<in> R ^^ n)"
-  by (induct n) (simp_all add: rel_compp_rel_comp_eq)
+  by (induct n) (simp_all add: relcompp_relcomp_eq)
 
 text {* for code generation *}
 
 definition relpow :: "nat \<Rightarrow> ('a \<times> 'a) set \<Rightarrow> ('a \<times> 'a) set" where
   relpow_code_def [code_abbrev]: "relpow = compow"
 
+definition relpowp :: "nat \<Rightarrow> ('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> 'a \<Rightarrow> bool)" where
+  relpowp_code_def [code_abbrev]: "relpowp = compow"
+
 lemma [code]:
   "relpow (Suc n) R = (relpow n R) O R"
   "relpow 0 R = Id"
   by (simp_all add: relpow_code_def)
 
+lemma [code]:
+  "relpowp (Suc n) R = (R ^^ n) OO R"
+  "relpowp 0 R = HOL.eq"
+  by (simp_all add: relpowp_code_def)
+
 hide_const (open) relpow
+hide_const (open) relpowp
 
 lemma relpow_1 [simp]:
   fixes R :: "('a \<times> 'a) set"
   shows "R ^^ 1 = R"
   by simp
 
+lemma relpowp_1 [simp]:
+  fixes P :: "'a \<Rightarrow> 'a \<Rightarrow> bool"
+  shows "P ^^ 1 = P"
+  by (fact relpow_1 [to_pred])
+
 lemma relpow_0_I: 
   "(x, x) \<in> R ^^ 0"
   by simp
 
+lemma relpowp_0_I:
+  "(P ^^ 0) x x"
+  by (fact relpow_0_I [to_pred])
+
 lemma relpow_Suc_I:
   "(x, y) \<in>  R ^^ n \<Longrightarrow> (y, z) \<in> R \<Longrightarrow> (x, z) \<in> R ^^ Suc n"
   by auto
 
+lemma relpowp_Suc_I:
+  "(P ^^ n) x y \<Longrightarrow> P y z \<Longrightarrow> (P ^^ Suc n) x z"
+  by (fact relpow_Suc_I [to_pred])
+
 lemma relpow_Suc_I2:
   "(x, y) \<in> R \<Longrightarrow> (y, z) \<in> R ^^ n \<Longrightarrow> (x, z) \<in> R ^^ Suc n"
   by (induct n arbitrary: z) (simp, fastforce)
 
+lemma relpowp_Suc_I2:
+  "P x y \<Longrightarrow> (P ^^ n) y z \<Longrightarrow> (P ^^ Suc n) x z"
+  by (fact relpow_Suc_I2 [to_pred])
+
 lemma relpow_0_E:
   "(x, y) \<in> R ^^ 0 \<Longrightarrow> (x = y \<Longrightarrow> P) \<Longrightarrow> P"
   by simp
 
+lemma relpowp_0_E:
+  "(P ^^ 0) x y \<Longrightarrow> (x = y \<Longrightarrow> Q) \<Longrightarrow> Q"
+  by (fact relpow_0_E [to_pred])
+
 lemma relpow_Suc_E:
   "(x, z) \<in> R ^^ Suc n \<Longrightarrow> (\<And>y. (x, y) \<in> R ^^ n \<Longrightarrow> (y, z) \<in> R \<Longrightarrow> P) \<Longrightarrow> P"
   by auto
 
+lemma relpowp_Suc_E:
+  "(P ^^ Suc n) x z \<Longrightarrow> (\<And>y. (P ^^ n) x y \<Longrightarrow> P y z \<Longrightarrow> Q) \<Longrightarrow> Q"
+  by (fact relpow_Suc_E [to_pred])
+
 lemma relpow_E:
   "(x, z) \<in>  R ^^ n \<Longrightarrow>  (n = 0 \<Longrightarrow> x = z \<Longrightarrow> P)
    \<Longrightarrow> (\<And>y m. n = Suc m \<Longrightarrow> (x, y) \<in>  R ^^ m \<Longrightarrow> (y, z) \<in> R \<Longrightarrow> P)
    \<Longrightarrow> P"
   by (cases n) auto
 
+lemma relpowp_E:
+  "(P ^^ n) x z \<Longrightarrow> (n = 0 \<Longrightarrow> x = z \<Longrightarrow> Q)
+  \<Longrightarrow> (\<And>y m. n = Suc m \<Longrightarrow> (P ^^ m) x y \<Longrightarrow> P y z \<Longrightarrow> Q)
+  \<Longrightarrow> Q"
+  by (fact relpow_E [to_pred])
+
 lemma relpow_Suc_D2:
   "(x, z) \<in> R ^^ Suc n \<Longrightarrow> (\<exists>y. (x, y) \<in> R \<and> (y, z) \<in> R ^^ n)"
   apply (induct n arbitrary: x z)
@@ -778,14 +818,26 @@
   apply (blast intro: relpow_Suc_I elim: relpow_0_E relpow_Suc_E)
   done
 
+lemma relpowp_Suc_D2:
+  "(P ^^ Suc n) x z \<Longrightarrow> \<exists>y. P x y \<and> (P ^^ n) y z"
+  by (fact relpow_Suc_D2 [to_pred])
+
 lemma relpow_Suc_E2:
   "(x, z) \<in> R ^^ Suc n \<Longrightarrow> (\<And>y. (x, y) \<in> R \<Longrightarrow> (y, z) \<in> R ^^ n \<Longrightarrow> P) \<Longrightarrow> P"
   by (blast dest: relpow_Suc_D2)
 
+lemma relpowp_Suc_E2:
+  "(P ^^ Suc n) x z \<Longrightarrow> (\<And>y. P x y \<Longrightarrow> (P ^^ n) y z \<Longrightarrow> Q) \<Longrightarrow> Q"
+  by (fact relpow_Suc_E2 [to_pred])
+
 lemma relpow_Suc_D2':
   "\<forall>x y z. (x, y) \<in> R ^^ n \<and> (y, z) \<in> R \<longrightarrow> (\<exists>w. (x, w) \<in> R \<and> (w, z) \<in> R ^^ n)"
   by (induct n) (simp_all, blast)
 
+lemma relpowp_Suc_D2':
+  "\<forall>x y z. (P ^^ n) x y \<and> P y z \<longrightarrow> (\<exists>w. P x w \<and> (P ^^ n) w z)"
+  by (fact relpow_Suc_D2' [to_pred])
+
 lemma relpow_E2:
   "(x, z) \<in> R ^^ n \<Longrightarrow>  (n = 0 \<Longrightarrow> x = z \<Longrightarrow> P)
      \<Longrightarrow> (\<And>y m. n = Suc m \<Longrightarrow> (x, y) \<in> R \<Longrightarrow> (y, z) \<in> R ^^ m \<Longrightarrow> P)
@@ -794,16 +846,32 @@
   apply (cut_tac n=nat and R=R in relpow_Suc_D2', simp, blast)
   done
 
+lemma relpowp_E2:
+  "(P ^^ n) x z \<Longrightarrow> (n = 0 \<Longrightarrow> x = z \<Longrightarrow> Q)
+    \<Longrightarrow> (\<And>y m. n = Suc m \<Longrightarrow> P x y \<Longrightarrow> (P ^^ m) y z \<Longrightarrow> Q)
+  \<Longrightarrow> Q"
+  by (fact relpow_E2 [to_pred])
+
 lemma relpow_add: "R ^^ (m+n) = R^^m O R^^n"
   by (induct n) auto
 
+lemma relpowp_add: "P ^^ (m + n) = P ^^ m OO P ^^ n"
+  by (fact relpow_add [to_pred])
+
 lemma relpow_commute: "R O R ^^ n = R ^^ n O R"
   by (induct n) (simp, simp add: O_assoc [symmetric])
 
+lemma relpowp_commute: "P OO P ^^ n = P ^^ n OO P"
+  by (fact relpow_commute [to_pred])
+
 lemma relpow_empty:
   "0 < n \<Longrightarrow> ({} :: ('a \<times> 'a) set) ^^ n = {}"
   by (cases n) auto
 
+lemma relpowp_bot:
+  "0 < n \<Longrightarrow> (\<bottom> :: 'a \<Rightarrow> 'a \<Rightarrow> bool) ^^ n = \<bottom>"
+  by (fact relpow_empty [to_pred])
+
 lemma rtrancl_imp_UN_relpow:
   assumes "p \<in> R^*"
   shows "p \<in> (\<Union>n. R ^^ n)"
@@ -818,6 +886,11 @@
   with Pair show ?thesis by simp
 qed
 
+lemma rtranclp_imp_Sup_relpowp:
+  assumes "(P^**) x y"
+  shows "(\<Squnion>n. P ^^ n) x y"
+  using assms and rtrancl_imp_UN_relpow [to_pred] by blast
+
 lemma relpow_imp_rtrancl:
   assumes "p \<in> R ^^ n"
   shows "p \<in> R^*"
@@ -833,14 +906,27 @@
   with Pair show ?thesis by simp
 qed
 
+lemma relpowp_imp_rtranclp:
+  assumes "(P ^^ n) x y"
+  shows "(P^**) x y"
+  using assms and relpow_imp_rtrancl [to_pred] by blast
+
 lemma rtrancl_is_UN_relpow:
   "R^* = (\<Union>n. R ^^ n)"
   by (blast intro: rtrancl_imp_UN_relpow relpow_imp_rtrancl)
 
+lemma rtranclp_is_Sup_relpowp:
+  "P^** = (\<Squnion>n. P ^^ n)"
+  using rtrancl_is_UN_relpow [to_pred, of P] by auto
+
 lemma rtrancl_power:
   "p \<in> R^* \<longleftrightarrow> (\<exists>n. p \<in> R ^^ n)"
   by (simp add: rtrancl_is_UN_relpow)
 
+lemma rtranclp_power:
+  "(P^**) x y \<longleftrightarrow> (\<exists>n. (P ^^ n) x y)"
+  by (simp add: rtranclp_is_Sup_relpowp)
+
 lemma trancl_power:
   "p \<in> R^+ \<longleftrightarrow> (\<exists>n > 0. p \<in> R ^^ n)"
   apply (cases p)
@@ -849,7 +935,7 @@
    apply (drule tranclD2)
    apply (clarsimp simp: rtrancl_is_UN_relpow)
    apply (rule_tac x="Suc n" in exI)
-   apply (clarsimp simp: rel_comp_unfold)
+   apply (clarsimp simp: relcomp_unfold)
    apply fastforce
   apply clarsimp
   apply (case_tac n, simp)
@@ -858,10 +944,18 @@
   apply (drule rtrancl_into_trancl1) apply auto
   done
 
+lemma tranclp_power:
+  "(P^++) x y \<longleftrightarrow> (\<exists>n > 0. (P ^^ n) x y)"
+  using trancl_power [to_pred, of P "(x, y)"] by simp
+
 lemma rtrancl_imp_relpow:
   "p \<in> R^* \<Longrightarrow> \<exists>n. p \<in> R ^^ n"
   by (auto dest: rtrancl_imp_UN_relpow)
 
+lemma rtranclp_imp_relpowp:
+  "(P^**) x y \<Longrightarrow> \<exists>n. (P ^^ n) x y"
+  by (auto dest: rtranclp_imp_Sup_relpowp)
+
 text{* By Sternagel/Thiemann: *}
 lemma relpow_fun_conv:
   "((a,b) \<in> R ^^ n) = (\<exists>f. f 0 = a \<and> f n = b \<and> (\<forall>i<n. (f i, f(Suc i)) \<in> R))"
@@ -870,7 +964,7 @@
 next
   case (Suc n)
   show ?case
-  proof (simp add: rel_comp_unfold Suc)
+  proof (simp add: relcomp_unfold Suc)
     show "(\<exists>y. (\<exists>f. f 0 = a \<and> f n = y \<and> (\<forall>i<n. (f i,f(Suc i)) \<in> R)) \<and> (y,b) \<in> R)
      = (\<exists>f. f 0 = a \<and> f(Suc n) = b \<and> (\<forall>i<Suc n. (f i, f (Suc i)) \<in> R))"
     (is "?l = ?r")
@@ -887,6 +981,10 @@
   qed
 qed
 
+lemma relpowp_fun_conv:
+  "(P ^^ n) x y \<longleftrightarrow> (\<exists>f. f 0 = x \<and> f n = y \<and> (\<forall>i<n. P (f i) (f (Suc i))))"
+  by (fact relpow_fun_conv [to_pred])
+
 lemma relpow_finite_bounded1:
 assumes "finite(R :: ('a*'a)set)" and "k>0"
 shows "R^^k \<subseteq> (UN n:{n. 0<n & n <= card R}. R^^n)" (is "_ \<subseteq> ?r")
@@ -979,7 +1077,7 @@
 apply(auto dest: relpow_finite_bounded1)
 done
 
-lemma finite_rel_comp[simp,intro]:
+lemma finite_relcomp[simp,intro]:
 assumes "finite R" and "finite S"
 shows "finite(R O S)"
 proof-
--- a/src/HOL/Wellfounded.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Wellfounded.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -271,7 +271,7 @@
       assume "y \<in> Q"
       with `y \<notin> ?Q'` 
       obtain w where "(w, y) \<in> R" and "w \<in> Q" by auto
-      from `(w, y) \<in> R` `(y, z) \<in> S` have "(w, z) \<in> R O S" by (rule rel_compI)
+      from `(w, y) \<in> R` `(y, z) \<in> S` have "(w, z) \<in> R O S" by (rule relcompI)
       with `R O S \<subseteq> R` have "(w, z) \<in> R" ..
       with `z \<in> ?Q'` have "w \<notin> Q" by blast 
       with `w \<in> Q` show False by contradiction
--- a/src/HOL/Word/Word.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/Word/Word.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -228,38 +228,74 @@
   thus "PROP P x" by simp
 qed
 
+subsection {* Correspondence relation for theorem transfer *}
+
+definition cr_word :: "int \<Rightarrow> 'a::len0 word \<Rightarrow> bool"
+  where "cr_word \<equiv> (\<lambda>x y. word_of_int x = y)"
+
+lemma Quotient_word:
+  "Quotient (\<lambda>x y. bintrunc (len_of TYPE('a)) x = bintrunc (len_of TYPE('a)) y)
+    word_of_int uint (cr_word :: _ \<Rightarrow> 'a::len0 word \<Rightarrow> bool)"
+  unfolding Quotient_alt_def cr_word_def
+  by (simp add: word_ubin.norm_eq_iff)
+
+lemma reflp_word:
+  "reflp (\<lambda>x y. bintrunc (len_of TYPE('a::len0)) x = bintrunc (len_of TYPE('a)) y)"
+  by (simp add: reflp_def)
+
+local_setup {*
+  Lifting_Setup.setup_lifting_infr @{thm Quotient_word} @{thm reflp_word}
+*}
+
+text {* TODO: The next several lemmas could be generated automatically. *}
+
+lemma bi_total_cr_word [transfer_rule]: "bi_total cr_word"
+  using Quotient_word reflp_word by (rule Quotient_bi_total)
+
+lemma right_unique_cr_word [transfer_rule]: "right_unique cr_word"
+  using Quotient_word by (rule Quotient_right_unique)
+
+lemma word_eq_transfer [transfer_rule]:
+  "(fun_rel cr_word (fun_rel cr_word op =))
+    (\<lambda>x y. bintrunc (len_of TYPE('a)) x = bintrunc (len_of TYPE('a)) y)
+    (op = :: 'a::len0 word \<Rightarrow> 'a word \<Rightarrow> bool)"
+  using Quotient_word by (rule Quotient_rel_eq_transfer)
+
+lemma word_of_int_transfer [transfer_rule]:
+  "(fun_rel op = cr_word) (\<lambda>x. x) word_of_int"
+  using Quotient_word reflp_word by (rule Quotient_id_abs_transfer)
+
+lemma uint_transfer [transfer_rule]:
+  "(fun_rel cr_word op =) (bintrunc (len_of TYPE('a)))
+    (uint :: 'a::len0 word \<Rightarrow> int)"
+  unfolding fun_rel_def cr_word_def by (simp add: word_ubin.eq_norm)
+
 subsection  "Arithmetic operations"
 
-definition
-  word_succ :: "'a :: len0 word => 'a word"
-where
-  "word_succ a = word_of_int (uint a + 1)"
-
-definition
-  word_pred :: "'a :: len0 word => 'a word"
-where
-  "word_pred a = word_of_int (uint a - 1)"
+lift_definition word_succ :: "'a::len0 word \<Rightarrow> 'a word" is "\<lambda>x. x + 1"
+  by (metis bintr_ariths(6))
+
+lift_definition word_pred :: "'a::len0 word \<Rightarrow> 'a word" is "\<lambda>x. x - 1"
+  by (metis bintr_ariths(7))
 
 instantiation word :: (len0) "{neg_numeral, Divides.div, comm_monoid_mult, comm_ring}"
 begin
 
-definition
-  word_0_wi: "0 = word_of_int 0"
-
-definition
-  word_1_wi: "1 = word_of_int 1"
-
-definition
-  word_add_def: "a + b = word_of_int (uint a + uint b)"
-
-definition
-  word_sub_wi: "a - b = word_of_int (uint a - uint b)"
-
-definition
-  word_minus_def: "- a = word_of_int (- uint a)"
-
-definition
-  word_mult_def: "a * b = word_of_int (uint a * uint b)"
+lift_definition zero_word :: "'a word" is "0" .
+
+lift_definition one_word :: "'a word" is "1" .
+
+lift_definition plus_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> 'a word" is "op +"
+  by (metis bintr_ariths(2))
+
+lift_definition minus_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> 'a word" is "op -"
+  by (metis bintr_ariths(3))
+
+lift_definition uminus_word :: "'a word \<Rightarrow> 'a word" is uminus
+  by (metis bintr_ariths(5))
+
+lift_definition times_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> 'a word" is "op *"
+  by (metis bintr_ariths(4))
 
 definition
   word_div_def: "a div b = word_of_int (uint a div uint b)"
@@ -267,9 +303,25 @@
 definition
   word_mod_def: "a mod b = word_of_int (uint a mod uint b)"
 
-lemmas word_arith_wis =
-  word_add_def word_sub_wi word_mult_def word_minus_def 
-  word_succ_def word_pred_def word_0_wi word_1_wi
+instance
+  by default (transfer, simp add: algebra_simps)+
+
+end
+
+text {* Legacy theorems: *}
+
+lemma word_arith_wis: shows
+  word_add_def: "a + b = word_of_int (uint a + uint b)" and
+  word_sub_wi: "a - b = word_of_int (uint a - uint b)" and
+  word_mult_def: "a * b = word_of_int (uint a * uint b)" and
+  word_minus_def: "- a = word_of_int (- uint a)" and
+  word_succ_alt: "word_succ a = word_of_int (uint a + 1)" and
+  word_pred_alt: "word_pred a = word_of_int (uint a - 1)" and
+  word_0_wi: "0 = word_of_int 0" and
+  word_1_wi: "1 = word_of_int 1"
+  unfolding plus_word_def minus_word_def times_word_def uminus_word_def
+  unfolding word_succ_def word_pred_def zero_word_def one_word_def
+  by simp_all
 
 lemmas arths = 
   bintr_ariths [THEN word_ubin.norm_eq_iff [THEN iffD1], folded word_ubin.eq_norm]
@@ -282,7 +334,7 @@
   wi_hom_neg: "- word_of_int a = word_of_int (- a)" and
   wi_hom_succ: "word_succ (word_of_int a) = word_of_int (a + 1)" and
   wi_hom_pred: "word_pred (word_of_int a) = word_of_int (a - 1)"
-  by (auto simp: word_arith_wis arths)
+  by (transfer, simp)+
 
 lemmas wi_hom_syms = wi_homs [symmetric]
 
@@ -290,17 +342,11 @@
 
 lemmas word_of_int_hom_syms = word_of_int_homs [symmetric]
 
-instance
-  by default (auto simp: split_word_all word_of_int_homs algebra_simps)
-
-end
-
 instance word :: (len) comm_ring_1
 proof
   have "0 < len_of TYPE('a)" by (rule len_gt_0)
   then show "(0::'a word) \<noteq> 1"
-    unfolding word_0_wi word_1_wi
-    by (auto simp add: word_ubin.norm_eq_iff [symmetric] gr0_conv_Suc)
+    by - (transfer, auto simp add: gr0_conv_Suc)
 qed
 
 lemma word_of_nat: "of_nat n = word_of_int (int n)"
@@ -344,21 +390,17 @@
 instantiation word :: (len0) bits
 begin
 
-definition
-  word_and_def: 
-  "(a::'a word) AND b = word_of_int (uint a AND uint b)"
-
-definition
-  word_or_def:  
-  "(a::'a word) OR b = word_of_int (uint a OR uint b)"
-
-definition
-  word_xor_def: 
-  "(a::'a word) XOR b = word_of_int (uint a XOR uint b)"
-
-definition
-  word_not_def: 
-  "NOT (a::'a word) = word_of_int (NOT (uint a))"
+lift_definition bitNOT_word :: "'a word \<Rightarrow> 'a word" is bitNOT
+  by (metis bin_trunc_not)
+
+lift_definition bitAND_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> 'a word" is bitAND
+  by (metis bin_trunc_and)
+
+lift_definition bitOR_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> 'a word" is bitOR
+  by (metis bin_trunc_or)
+
+lift_definition bitXOR_word :: "'a word \<Rightarrow> 'a word \<Rightarrow> 'a word" is bitXOR
+  by (metis bin_trunc_xor)
 
 definition
   word_test_bit_def: "test_bit a = bin_nth (uint a)"
@@ -390,6 +432,14 @@
 
 end
 
+lemma shows
+  word_not_def: "NOT (a::'a::len0 word) = word_of_int (NOT (uint a))" and
+  word_and_def: "(a::'a word) AND b = word_of_int (uint a AND uint b)" and
+  word_or_def: "(a::'a word) OR b = word_of_int (uint a OR uint b)" and
+  word_xor_def: "(a::'a word) XOR b = word_of_int (uint a XOR uint b)"
+  unfolding bitNOT_word_def bitAND_word_def bitOR_word_def bitXOR_word_def
+  by simp_all
+
 instantiation word :: (len) bitss
 begin
 
@@ -567,6 +617,12 @@
 
 declare word_neg_numeral_alt [symmetric, code_abbrev]
 
+lemma word_numeral_transfer [transfer_rule]:
+  "(fun_rel op = cr_word) numeral numeral"
+  "(fun_rel op = cr_word) neg_numeral neg_numeral"
+  unfolding fun_rel_def cr_word_def word_numeral_alt word_neg_numeral_alt
+  by simp_all
+
 lemma uint_bintrunc [simp]:
   "uint (numeral bin :: 'a word) = 
     bintrunc (len_of TYPE ('a :: len0)) (numeral bin)"
@@ -1201,9 +1257,6 @@
   word_sub_wi
   word_arith_wis (* FIXME: duplicate *)
 
-lemmas word_succ_alt = word_succ_def (* FIXME: duplicate *)
-lemmas word_pred_alt = word_pred_def (* FIXME: duplicate *)
-
 subsection  "Transferring goals from words to ints"
 
 lemma word_ths:  
@@ -1213,11 +1266,7 @@
   word_pred_succ: "word_pred (word_succ a) = a" and
   word_succ_pred: "word_succ (word_pred a) = a" and
   word_mult_succ: "word_succ a * b = b + a * b"
-  by (rule word_uint.Abs_cases [of b],
-      rule word_uint.Abs_cases [of a],
-      simp add: add_commute mult_commute 
-                ring_distribs word_of_int_homs
-           del: word_of_int_0 word_of_int_1)+
+  by (transfer, simp add: algebra_simps)+
 
 lemma uint_cong: "x = y \<Longrightarrow> uint x = uint y"
   by simp
@@ -1237,7 +1286,7 @@
 lemmas uint_mod_alt = word_mod_def [THEN trans [OF uint_cong int_word_uint]]
 
 lemma word_pred_0_n1: "word_pred 0 = word_of_int -1"
-  unfolding word_pred_def uint_eq_0 by simp
+  unfolding word_pred_m1 by simp
 
 lemma succ_pred_no [simp]:
   "word_succ (numeral w) = numeral w + 1"
@@ -2118,7 +2167,7 @@
   "word_of_int a AND word_of_int b = word_of_int (a AND b)"
   "word_of_int a OR word_of_int b = word_of_int (a OR b)"
   "word_of_int a XOR word_of_int b = word_of_int (a XOR b)"
-  unfolding word_log_defs wils1 by simp_all
+  by (transfer, rule refl)+
 
 lemma word_no_log_defs [simp]:
   "NOT (numeral a) = word_of_int (NOT (numeral a))"
@@ -2135,7 +2184,7 @@
   "numeral a XOR neg_numeral b = word_of_int (numeral a XOR neg_numeral b)"
   "neg_numeral a XOR numeral b = word_of_int (neg_numeral a XOR numeral b)"
   "neg_numeral a XOR neg_numeral b = word_of_int (neg_numeral a XOR neg_numeral b)"
-  unfolding word_numeral_alt word_neg_numeral_alt word_wi_log_defs by simp_all
+  by (transfer, rule refl)+
 
 text {* Special cases for when one of the arguments equals 1. *}
 
@@ -2153,15 +2202,23 @@
   "1 XOR neg_numeral b = word_of_int (1 XOR neg_numeral b)"
   "numeral a XOR 1 = word_of_int (numeral a XOR 1)"
   "neg_numeral a XOR 1 = word_of_int (neg_numeral a XOR 1)"
-  unfolding word_1_no word_no_log_defs by simp_all
+  by (transfer, simp)+
 
 lemma uint_or: "uint (x OR y) = (uint x) OR (uint y)"
-  by (simp add: word_or_def word_wi_log_defs word_ubin.eq_norm
-                bin_trunc_ao(2) [symmetric])
+  by (transfer, simp add: bin_trunc_ao)
 
 lemma uint_and: "uint (x AND y) = (uint x) AND (uint y)"
-  by (simp add: word_and_def word_wi_log_defs word_ubin.eq_norm
-                bin_trunc_ao(1) [symmetric]) 
+  by (transfer, simp add: bin_trunc_ao)
+
+lemma test_bit_wi [simp]:
+  "(word_of_int x::'a::len0 word) !! n \<longleftrightarrow> n < len_of TYPE('a) \<and> bin_nth x n"
+  unfolding word_test_bit_def
+  by (simp add: word_ubin.eq_norm nth_bintr)
+
+lemma word_test_bit_transfer [transfer_rule]:
+  "(fun_rel cr_word (fun_rel op = op =))
+    (\<lambda>x n. n < len_of TYPE('a) \<and> bin_nth x n) (test_bit :: 'a::len0 word \<Rightarrow> _)"
+  unfolding fun_rel_def cr_word_def by simp
 
 lemma word_ops_nth_size:
   "n < size (x::'a::len0 word) \<Longrightarrow> 
@@ -2169,42 +2226,32 @@
     (x AND y) !! n = (x !! n & y !! n) & 
     (x XOR y) !! n = (x !! n ~= y !! n) & 
     (NOT x) !! n = (~ x !! n)"
-  unfolding word_size word_test_bit_def word_log_defs
-  by (clarsimp simp add : word_ubin.eq_norm nth_bintr bin_nth_ops)
+  unfolding word_size by transfer (simp add: bin_nth_ops)
 
 lemma word_ao_nth:
   fixes x :: "'a::len0 word"
   shows "(x OR y) !! n = (x !! n | y !! n) & 
          (x AND y) !! n = (x !! n & y !! n)"
-  apply (cases "n < size x")
-   apply (drule_tac y = "y" in word_ops_nth_size)
-   apply simp
-  apply (simp add : test_bit_bin word_size)
-  done
-
-lemma test_bit_wi [simp]:
-  "(word_of_int x::'a::len0 word) !! n \<longleftrightarrow> n < len_of TYPE('a) \<and> bin_nth x n"
-  unfolding word_test_bit_def
-  by (simp add: nth_bintr [symmetric] word_ubin.eq_norm)
+  by transfer (auto simp add: bin_nth_ops)
 
 lemma test_bit_numeral [simp]:
   "(numeral w :: 'a::len0 word) !! n \<longleftrightarrow>
     n < len_of TYPE('a) \<and> bin_nth (numeral w) n"
-  unfolding word_numeral_alt test_bit_wi ..
+  by transfer (rule refl)
 
 lemma test_bit_neg_numeral [simp]:
   "(neg_numeral w :: 'a::len0 word) !! n \<longleftrightarrow>
     n < len_of TYPE('a) \<and> bin_nth (neg_numeral w) n"
-  unfolding word_neg_numeral_alt test_bit_wi ..
+  by transfer (rule refl)
 
 lemma test_bit_1 [simp]: "(1::'a::len word) !! n \<longleftrightarrow> n = 0"
-  unfolding word_1_wi test_bit_wi by auto
+  by transfer auto
   
 lemma nth_0 [simp]: "~ (0::'a::len0 word) !! n"
-  unfolding word_test_bit_def by simp
+  by transfer simp
 
 lemma nth_minus1 [simp]: "(-1::'a::len0 word) !! n \<longleftrightarrow> n < len_of TYPE('a)"
-  unfolding word_test_bit_def by (simp add: nth_bintr)
+  by transfer simp
 
 (* get from commutativity, associativity etc of int_and etc
   to same for word_and etc *)
@@ -2299,15 +2346,12 @@
 lemma word_add_not [simp]: 
   fixes x :: "'a::len0 word"
   shows "x + NOT x = -1"
-  using word_of_int_Ex [where x=x] 
-  by (auto simp: bwsimps bin_add_not [unfolded Min_def])
+  by transfer (simp add: bin_add_not)
 
 lemma word_plus_and_or [simp]:
   fixes x :: "'a::len0 word"
   shows "(x AND y) + (x OR y) = x + y"
-  using word_of_int_Ex [where x=x] 
-        word_of_int_Ex [where x=y] 
-  by (auto simp: bwsimps plus_and_or)
+  by transfer (simp add: plus_and_or)
 
 lemma leoa:   
   fixes x :: "'a::len0 word"
--- a/src/HOL/ex/Coherent.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/ex/Coherent.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -13,7 +13,7 @@
 
 no_notation
   comp (infixl "o" 55) and
-  rel_comp (infixr "O" 75)
+  relcomp (infixr "O" 75)
 
 lemma p1p2:
   assumes
--- a/src/HOL/ex/Executable_Relation.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/ex/Executable_Relation.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -27,7 +27,7 @@
 
 lemma comp_Id_on:
   "Id_on X O R = Set.project (%(x, y). x : X) R"
-by (auto intro!: rel_compI)
+by (auto intro!: relcompI)
 
 lemma comp_Id_on':
   "R O Id_on X = Set.project (%(x, y). y : X) R"
@@ -37,7 +37,7 @@
   "Set.project (%(x, y). x : X) (Id_on Y) = Id_on (X Int Y)"
 by auto
 
-lemma rel_comp_raw:
+lemma relcomp_raw:
   "(rel_raw X R) O (rel_raw Y S) = rel_raw (X Int Y) (Set.project (%(x, y). y : Y) R Un (Set.project (%(x, y). x : X) S Un R O S))"
 unfolding rel_raw_def
 apply simp
@@ -79,7 +79,7 @@
 
 subsubsection {* Constant definitions on relations *}
 
-hide_const (open) converse rel_comp rtrancl Image
+hide_const (open) converse relcomp rtrancl Image
 
 quotient_definition member :: "'a * 'a => 'a rel => bool" where
   "member" is "Set.member :: 'a * 'a => ('a * 'a) set => bool" done
@@ -92,9 +92,9 @@
 where
   "union" is "Set.union :: ('a * 'a) set => ('a * 'a) set => ('a * 'a) set" done
 
-quotient_definition rel_comp :: "'a rel => 'a rel => 'a rel"
+quotient_definition relcomp :: "'a rel => 'a rel => 'a rel"
 where
-  "rel_comp" is "Relation.rel_comp :: ('a * 'a) set => ('a * 'a) set => ('a * 'a) set" done
+  "relcomp" is "Relation.relcomp :: ('a * 'a) set => ('a * 'a) set => ('a * 'a) set" done
 
 quotient_definition rtrancl :: "'a rel => 'a rel"
 where
@@ -121,8 +121,8 @@
 by (lifting union_raw)
 
 lemma [code]:
-   "rel_comp (rel X R) (rel Y S) = rel (X Int Y) (Set.project (%(x, y). y : Y) R Un (Set.project (%(x, y). x : X) S Un R O S))"
-by (lifting rel_comp_raw)
+   "relcomp (rel X R) (rel Y S) = rel (X Int Y) (Set.project (%(x, y). y : Y) R Un (Set.project (%(x, y). x : X) S Un R O S))"
+by (lifting relcomp_raw)
 
 lemma [code]:
   "rtrancl (rel X R) = rel UNIV (R^+)"
--- a/src/HOL/ex/ROOT.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/ex/ROOT.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -67,7 +67,6 @@
   "Quicksort",
   "Birthday_Paradox",
   "List_to_Set_Comprehension_Examples",
-  "Set_Algebras",
   "Seq",
   "Simproc_Tests",
   "Executable_Relation"
--- a/src/HOL/ex/SAT_Examples.thy	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/HOL/ex/SAT_Examples.thy	Mon Apr 16 19:01:57 2012 +0200
@@ -80,11 +80,9 @@
 ML {* sat.trace_sat := false; *}
 ML {* quick_and_dirty := false; *}
 
-method_setup rawsat =
-  {* Scan.succeed (SIMPLE_METHOD' o sat.rawsat_tac) *}
-  "SAT solver (no preprocessing)"
-
-(* ML {* Toplevel.profiling := 1; *} *)
+method_setup rawsat = {*
+  Scan.succeed (SIMPLE_METHOD' o sat.rawsat_tac)
+*} "SAT solver (no preprocessing)"
 
 (* Translated from TPTP problem library: PUZ015-2.006.dimacs *)
 
@@ -512,8 +510,6 @@
    (as of 2006-08-30, on a 2.5 GHz Pentium 4)
 *)
 
-(* ML {* Toplevel.profiling := 0; *} *)
-
 text {*
 Function {\tt benchmark} takes the name of an existing DIMACS CNF
 file, parses this file, passes the problem to a SAT solver, and checks
--- a/src/HOL/ex/Set_Algebras.thy	Wed Apr 04 09:59:49 2012 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,371 +0,0 @@
-(*  Title:      HOL/ex/Set_Algebras.thy
-    Author:     Jeremy Avigad and Kevin Donnelly; Florian Haftmann, TUM
-*)
-
-header {* Algebraic operations on sets *}
-
-theory Set_Algebras
-imports Main Interpretation_with_Defs
-begin
-
-text {*
-  This library lifts operations like addition and muliplication to
-  sets.  It was designed to support asymptotic calculations. See the
-  comments at the top of theory @{text BigO}.
-*}
-
-definition set_plus :: "'a::plus set \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "\<oplus>" 65) where
-  "A \<oplus> B = {c. \<exists>a\<in>A. \<exists>b\<in>B. c = a + b}"
-
-definition set_times :: "'a::times set \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "\<otimes>" 70) where
-  "A \<otimes> B = {c. \<exists>a\<in>A. \<exists>b\<in>B. c = a * b}"
-
-definition elt_set_plus :: "'a::plus \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "+o" 70) where
-  "a +o B = {c. \<exists>b\<in>B. c = a + b}"
-
-definition elt_set_times :: "'a::times \<Rightarrow> 'a set \<Rightarrow> 'a set"  (infixl "*o" 80) where
-  "a *o B = {c. \<exists>b\<in>B. c = a * b}"
-
-abbreviation (input) elt_set_eq :: "'a \<Rightarrow> 'a set \<Rightarrow> bool"  (infix "=o" 50) where
-  "x =o A \<equiv> x \<in> A"
-
-interpretation set_add!: semigroup "set_plus :: 'a::semigroup_add set \<Rightarrow> 'a set \<Rightarrow> 'a set"
-  by default (force simp add: set_plus_def add.assoc)
-
-interpretation set_add!: abel_semigroup "set_plus :: 'a::ab_semigroup_add set \<Rightarrow> 'a set \<Rightarrow> 'a set"
-  by default (force simp add: set_plus_def add.commute)
-
-interpretation set_add!: monoid "set_plus :: 'a::monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}"
-  by default (simp_all add: set_plus_def)
-
-interpretation set_add!: comm_monoid "set_plus :: 'a::comm_monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}"
-  by default (simp add: set_plus_def)
-
-interpretation set_add!: monoid_add "set_plus :: 'a::monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}"
-  defines listsum_set is set_add.listsum
-  by default (simp_all add: set_add.assoc)
-
-interpretation
-  set_add!: comm_monoid_add "set_plus :: 'a::comm_monoid_add set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}"
-  defines setsum_set is set_add.setsum
-  where "monoid_add.listsum set_plus {0::'a} = listsum_set"
-proof -
-  show "class.comm_monoid_add (set_plus :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set) {0}"
-    by default (simp_all add: set_add.commute)
-  then interpret set_add!: comm_monoid_add "set_plus :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{0}" .
-  show "monoid_add.listsum set_plus {0::'a} = listsum_set"
-    by (simp only: listsum_set_def)
-qed
-
-interpretation set_mult!: semigroup "set_times :: 'a::semigroup_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set"
-  by default (force simp add: set_times_def mult.assoc)
-
-interpretation
-  set_mult!: abel_semigroup "set_times :: 'a::ab_semigroup_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set"
-  by default (force simp add: set_times_def mult.commute)
-
-interpretation set_mult!: monoid "set_times :: 'a::monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}"
-  by default (simp_all add: set_times_def)
-
-interpretation
-  set_mult!: comm_monoid "set_times :: 'a::comm_monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}"
-  by default (simp add: set_times_def)
-
-interpretation set_mult!: monoid_mult "{1}" "set_times :: 'a::monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set"
-  defines power_set is set_mult.power
-  by default (simp_all add: set_mult.assoc)
-
-interpretation
-  set_mult!: comm_monoid_mult "set_times :: 'a::comm_monoid_mult set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}"
-  defines setprod_set is set_mult.setprod
-  where "power.power {1} set_times = power_set"
-proof -
-  show "class.comm_monoid_mult (set_times :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set) {1}"
-    by default (simp_all add: set_mult.commute)
-  then interpret set_mult!: comm_monoid_mult "set_times :: 'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" "{1}" .
-  show "power.power {1} set_times = power_set"
-    by (simp add: power_set_def)
-qed
-
-lemma set_plus_intro [intro]: "a : C ==> b : D ==> a + b : C \<oplus> D"
-  by (auto simp add: set_plus_def)
-
-lemma set_plus_intro2 [intro]: "b : C ==> a + b : a +o C"
-  by (auto simp add: elt_set_plus_def)
-
-lemma set_plus_rearrange: "((a::'a::comm_monoid_add) +o C) \<oplus> (b +o D) = (a + b) +o (C \<oplus> D)"
-  apply (auto simp add: elt_set_plus_def set_plus_def add_ac)
-   apply (rule_tac x = "ba + bb" in exI)
-  apply (auto simp add: add_ac)
-  apply (rule_tac x = "aa + a" in exI)
-  apply (auto simp add: add_ac)
-  done
-
-lemma set_plus_rearrange2: "(a::'a::semigroup_add) +o (b +o C) = (a + b) +o C"
-  by (auto simp add: elt_set_plus_def add_assoc)
-
-lemma set_plus_rearrange3: "((a::'a::semigroup_add) +o B) \<oplus> C = a +o (B \<oplus> C)"
-  apply (auto simp add: elt_set_plus_def set_plus_def)
-   apply (blast intro: add_ac)
-  apply (rule_tac x = "a + aa" in exI)
-  apply (rule conjI)
-   apply (rule_tac x = "aa" in bexI)
-    apply auto
-  apply (rule_tac x = "ba" in bexI)
-   apply (auto simp add: add_ac)
-  done
-
-theorem set_plus_rearrange4: "C \<oplus> ((a::'a::comm_monoid_add) +o D) = a +o (C \<oplus> D)"
-  apply (auto intro!: simp add: elt_set_plus_def set_plus_def add_ac)
-   apply (rule_tac x = "aa + ba" in exI)
-   apply (auto simp add: add_ac)
-  done
-
-theorems set_plus_rearranges = set_plus_rearrange set_plus_rearrange2
-  set_plus_rearrange3 set_plus_rearrange4
-
-lemma set_plus_mono [intro!]: "C <= D ==> a +o C <= a +o D"
-  by (auto simp add: elt_set_plus_def)
-
-lemma set_plus_mono2 [intro]: "(C::('a::plus) set) <= D ==> E <= F ==>
-    C \<oplus> E <= D \<oplus> F"
-  by (auto simp add: set_plus_def)
-
-lemma set_plus_mono3 [intro]: "a : C ==> a +o D <= C \<oplus> D"
-  by (auto simp add: elt_set_plus_def set_plus_def)
-
-lemma set_plus_mono4 [intro]: "(a::'a::comm_monoid_add) : C ==>
-    a +o D <= D \<oplus> C"
-  by (auto simp add: elt_set_plus_def set_plus_def add_ac)
-
-lemma set_plus_mono5: "a:C ==> B <= D ==> a +o B <= C \<oplus> D"
-  apply (subgoal_tac "a +o B <= a +o D")
-   apply (erule order_trans)
-   apply (erule set_plus_mono3)
-  apply (erule set_plus_mono)
-  done
-
-lemma set_plus_mono_b: "C <= D ==> x : a +o C
-    ==> x : a +o D"
-  apply (frule set_plus_mono)
-  apply auto
-  done
-
-lemma set_plus_mono2_b: "C <= D ==> E <= F ==> x : C \<oplus> E ==>
-    x : D \<oplus> F"
-  apply (frule set_plus_mono2)
-   prefer 2
-   apply force
-  apply assumption
-  done
-
-lemma set_plus_mono3_b: "a : C ==> x : a +o D ==> x : C \<oplus> D"
-  apply (frule set_plus_mono3)
-  apply auto
-  done
-
-lemma set_plus_mono4_b: "(a::'a::comm_monoid_add) : C ==>
-    x : a +o D ==> x : D \<oplus> C"
-  apply (frule set_plus_mono4)
-  apply auto
-  done
-
-lemma set_zero_plus [simp]: "(0::'a::comm_monoid_add) +o C = C"
-  by (auto simp add: elt_set_plus_def)
-
-lemma set_zero_plus2: "(0::'a::comm_monoid_add) : A ==> B <= A \<oplus> B"
-  apply (auto intro!: simp add: set_plus_def)
-  apply (rule_tac x = 0 in bexI)
-   apply (rule_tac x = x in bexI)
-    apply (auto simp add: add_ac)
-  done
-
-lemma set_plus_imp_minus: "(a::'a::ab_group_add) : b +o C ==> (a - b) : C"
-  by (auto simp add: elt_set_plus_def add_ac diff_minus)
-
-lemma set_minus_imp_plus: "(a::'a::ab_group_add) - b : C ==> a : b +o C"
-  apply (auto simp add: elt_set_plus_def add_ac diff_minus)
-  apply (subgoal_tac "a = (a + - b) + b")
-   apply (rule bexI, assumption, assumption)
-  apply (auto simp add: add_ac)
-  done
-
-lemma set_minus_plus: "((a::'a::ab_group_add) - b : C) = (a : b +o C)"
-  by (rule iffI, rule set_minus_imp_plus, assumption, rule set_plus_imp_minus,
-    assumption)
-
-lemma set_times_intro [intro]: "a : C ==> b : D ==> a * b : C \<otimes> D"
-  by (auto simp add: set_times_def)
-
-lemma set_times_intro2 [intro!]: "b : C ==> a * b : a *o C"
-  by (auto simp add: elt_set_times_def)
-
-lemma set_times_rearrange: "((a::'a::comm_monoid_mult) *o C) \<otimes>
-    (b *o D) = (a * b) *o (C \<otimes> D)"
-  apply (auto simp add: elt_set_times_def set_times_def)
-   apply (rule_tac x = "ba * bb" in exI)
-   apply (auto simp add: mult_ac)
-  apply (rule_tac x = "aa * a" in exI)
-  apply (auto simp add: mult_ac)
-  done
-
-lemma set_times_rearrange2: "(a::'a::semigroup_mult) *o (b *o C) =
-    (a * b) *o C"
-  by (auto simp add: elt_set_times_def mult_assoc)
-
-lemma set_times_rearrange3: "((a::'a::semigroup_mult) *o B) \<otimes> C =
-    a *o (B \<otimes> C)"
-  apply (auto simp add: elt_set_times_def set_times_def)
-   apply (blast intro: mult_ac)
-  apply (rule_tac x = "a * aa" in exI)
-  apply (rule conjI)
-   apply (rule_tac x = "aa" in bexI)
-    apply auto
-  apply (rule_tac x = "ba" in bexI)
-   apply (auto simp add: mult_ac)
-  done
-
-theorem set_times_rearrange4: "C \<otimes> ((a::'a::comm_monoid_mult) *o D) =
-    a *o (C \<otimes> D)"
-  apply (auto intro!: simp add: elt_set_times_def set_times_def
-    mult_ac)
-   apply (rule_tac x = "aa * ba" in exI)
-   apply (auto simp add: mult_ac)
-  done
-
-theorems set_times_rearranges = set_times_rearrange set_times_rearrange2
-  set_times_rearrange3 set_times_rearrange4
-
-lemma set_times_mono [intro]: "C <= D ==> a *o C <= a *o D"
-  by (auto simp add: elt_set_times_def)
-
-lemma set_times_mono2 [intro]: "(C::('a::times) set) <= D ==> E <= F ==>
-    C \<otimes> E <= D \<otimes> F"
-  by (auto simp add: set_times_def)
-
-lemma set_times_mono3 [intro]: "a : C ==> a *o D <= C \<otimes> D"
-  by (auto simp add: elt_set_times_def set_times_def)
-
-lemma set_times_mono4 [intro]: "(a::'a::comm_monoid_mult) : C ==>
-    a *o D <= D \<otimes> C"
-  by (auto simp add: elt_set_times_def set_times_def mult_ac)
-
-lemma set_times_mono5: "a:C ==> B <= D ==> a *o B <= C \<otimes> D"
-  apply (subgoal_tac "a *o B <= a *o D")
-   apply (erule order_trans)
-   apply (erule set_times_mono3)
-  apply (erule set_times_mono)
-  done
-
-lemma set_times_mono_b: "C <= D ==> x : a *o C
-    ==> x : a *o D"
-  apply (frule set_times_mono)
-  apply auto
-  done
-
-lemma set_times_mono2_b: "C <= D ==> E <= F ==> x : C \<otimes> E ==>
-    x : D \<otimes> F"
-  apply (frule set_times_mono2)
-   prefer 2
-   apply force
-  apply assumption
-  done
-
-lemma set_times_mono3_b: "a : C ==> x : a *o D ==> x : C \<otimes> D"
-  apply (frule set_times_mono3)
-  apply auto
-  done
-
-lemma set_times_mono4_b: "(a::'a::comm_monoid_mult) : C ==>
-    x : a *o D ==> x : D \<otimes> C"
-  apply (frule set_times_mono4)
-  apply auto
-  done
-
-lemma set_one_times [simp]: "(1::'a::comm_monoid_mult) *o C = C"
-  by (auto simp add: elt_set_times_def)
-
-lemma set_times_plus_distrib: "(a::'a::semiring) *o (b +o C)=
-    (a * b) +o (a *o C)"
-  by (auto simp add: elt_set_plus_def elt_set_times_def ring_distribs)
-
-lemma set_times_plus_distrib2: "(a::'a::semiring) *o (B \<oplus> C) =
-    (a *o B) \<oplus> (a *o C)"
-  apply (auto simp add: set_plus_def elt_set_times_def ring_distribs)
-   apply blast
-  apply (rule_tac x = "b + bb" in exI)
-  apply (auto simp add: ring_distribs)
-  done
-
-lemma set_times_plus_distrib3: "((a::'a::semiring) +o C) \<otimes> D <=
-    a *o D \<oplus> C \<otimes> D"
-  apply (auto simp add:
-    elt_set_plus_def elt_set_times_def set_times_def
-    set_plus_def ring_distribs)
-  apply auto
-  done
-
-theorems set_times_plus_distribs =
-  set_times_plus_distrib
-  set_times_plus_distrib2
-
-lemma set_neg_intro: "(a::'a::ring_1) : (- 1) *o C ==>
-    - a : C"
-  by (auto simp add: elt_set_times_def)
-
-lemma set_neg_intro2: "(a::'a::ring_1) : C ==>
-    - a : (- 1) *o C"
-  by (auto simp add: elt_set_times_def)
-
-lemma set_plus_image:
-  fixes S T :: "'n::semigroup_add set" shows "S \<oplus> T = (\<lambda>(x, y). x + y) ` (S \<times> T)"
-  unfolding set_plus_def by (fastforce simp: image_iff)
-
-lemma set_setsum_alt:
-  assumes fin: "finite I"
-  shows "setsum_set S I = {setsum s I |s. \<forall>i\<in>I. s i \<in> S i}"
-    (is "_ = ?setsum I")
-using fin
-proof induct
-  case (insert x F)
-  have "setsum_set S (insert x F) = S x \<oplus> ?setsum F"
-    using insert.hyps by auto
-  also have "...= {s x + setsum s F |s. \<forall> i\<in>insert x F. s i \<in> S i}"
-  proof -
-    {
-      fix y s assume "y \<in> S x" "\<forall>i\<in>F. s i \<in> S i"
-      then have "\<exists>s'. y + setsum s F = s' x + setsum s' F \<and> (\<forall>i\<in>insert x F. s' i \<in> S i)"
-        using insert.hyps
-        by (intro exI[of _ "\<lambda>i. if i \<in> F then s i else y"]) (auto simp add: set_plus_def)
-    }
-    then show ?thesis
-      unfolding set_plus_def by auto
-  qed
-  finally show ?case
-    using insert.hyps by auto
-qed auto
-
-lemma setsum_set_cond_linear:
-  fixes f :: "('a::comm_monoid_add) set \<Rightarrow> ('b::comm_monoid_add) set"
-  assumes [intro!]: "\<And>A B. P A  \<Longrightarrow> P B  \<Longrightarrow> P (A \<oplus> B)" "P {0}"
-    and f: "\<And>A B. P A  \<Longrightarrow> P B \<Longrightarrow> f (A \<oplus> B) = f A \<oplus> f B" "f {0} = {0}"
-  assumes all: "\<And>i. i \<in> I \<Longrightarrow> P (S i)"
-  shows "f (setsum_set S I) = setsum_set (f \<circ> S) I"
-proof cases
-  assume "finite I" from this all show ?thesis
-  proof induct
-    case (insert x F)
-    from `finite F` `\<And>i. i \<in> insert x F \<Longrightarrow> P (S i)` have "P (setsum_set S F)"
-      by induct auto
-    with insert show ?case
-      by (simp, subst f) auto
-  qed (auto intro!: f)
-qed (auto intro!: f)
-
-lemma setsum_set_linear:
-  fixes f :: "('a::comm_monoid_add) set => ('b::comm_monoid_add) set"
-  assumes "\<And>A B. f(A) \<oplus> f(B) = f(A \<oplus> B)" "f {0} = {0}"
-  shows "f (setsum_set S I) = setsum_set (f \<circ> S) I"
-  using setsum_set_cond_linear[of "\<lambda>x. True" f I S] assms by auto
-
-end
--- a/src/Pure/Concurrent/future.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Concurrent/future.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -52,8 +52,8 @@
   val peek: 'a future -> 'a Exn.result option
   val is_finished: 'a future -> bool
   val interruptible_task: ('a -> 'b) -> 'a -> 'b
-  val cancel_group: group -> task list
-  val cancel: 'a future -> task list
+  val cancel_group: group -> unit
+  val cancel: 'a future -> unit
   type params = {name: string, group: group option, deps: task list, pri: int, interrupts: bool}
   val default_params: params
   val forks: params -> (unit -> 'a) list -> 'a future list
@@ -74,6 +74,8 @@
   val fulfill: 'a future -> 'a -> unit
   val shutdown: unit -> unit
   val status: (unit -> 'a) -> 'a
+  val group_tasks: group -> task list
+  val queue_status: unit -> {ready: int, pending: int, running: int, passive: int}
 end;
 
 structure Future: FUTURE =
@@ -181,9 +183,9 @@
 
 fun cancel_now group = (*requires SYNCHRONIZED*)
   let
-    val (tasks, threads) = Task_Queue.cancel (! queue) group;
-    val _ = List.app Simple_Thread.interrupt_unsynchronized threads;
-  in tasks end;
+    val running = Task_Queue.cancel (! queue) group;
+    val _ = List.app Simple_Thread.interrupt_unsynchronized running;
+  in running end;
 
 fun cancel_all () = (*requires SYNCHRONIZED*)
   let
@@ -409,11 +411,10 @@
 
 fun cancel_group group = SYNCHRONIZED "cancel_group" (fn () =>
   let
-    val running = cancel_now group;
-    val _ =
-      if null running then ()
-      else (cancel_later group; signal work_available; scheduler_check ());
-  in running end);
+    val _ = if null (cancel_now group) then () else cancel_later group;
+    val _ = signal work_available;
+    val _ = scheduler_check ();
+  in () end);
 
 fun cancel x = cancel_group (Task_Queue.group_of_task (task_of x));
 
@@ -597,7 +598,7 @@
             else reraise exn;
     fun job () =
       Multithreading.with_attributes Multithreading.no_interrupts
-        (fn _ => assign () before abort ());
+        (fn _ => Exn.release (Exn.capture assign () before abort ()));
     val task = SYNCHRONIZED "enqueue_passive" (fn () =>
       Unsynchronized.change_result queue (Task_Queue.enqueue_passive group job));
   in Future {promised = true, task = task, result = result} end;
@@ -613,11 +614,16 @@
       val _ =
         Multithreading.with_attributes Multithreading.no_interrupts (fn _ =>
           let
-            val still_passive =
+            val passive_job =
               SYNCHRONIZED "fulfill_result" (fn () =>
                 Unsynchronized.change_result queue
                   (Task_Queue.dequeue_passive (Thread.self ()) task));
-          in if still_passive then worker_exec (task, [job]) else () end);
+          in
+            (case passive_job of
+              SOME true => worker_exec (task, [job])
+            | SOME false => ()
+            | NONE => ignore (job (not (Task_Queue.is_canceled group))))
+          end);
       val _ =
         if is_some (Single_Assignment.peek result) then ()
         else worker_waiting [task] (fn () => ignore (Single_Assignment.await result));
@@ -650,6 +656,13 @@
   in x end;
 
 
+(* queue status *)
+
+fun group_tasks group = Task_Queue.group_tasks (! queue) group;
+
+fun queue_status () = Task_Queue.status (! queue);
+
+
 (*final declarations of this structure!*)
 val map = map_future;
 
--- a/src/Pure/Concurrent/lazy.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Concurrent/lazy.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -61,7 +61,7 @@
             SOME e =>
               let
                 val res0 = Exn.capture (restore_attributes e) ();
-                val _ = Future.fulfill_result x res0;
+                val _ = Exn.capture (fn () => Future.fulfill_result x res0) ();
                 val res = Future.join_result x;
                 (*semantic race: some other threads might see the same
                   interrupt, until there is a fresh start*)
--- a/src/Pure/Concurrent/par_exn.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Concurrent/par_exn.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -14,7 +14,7 @@
   val release_first: 'a Exn.result list -> 'a list
 end;
 
-structure Par_Exn =
+structure Par_Exn: PAR_EXN =
 struct
 
 (* identification via serial numbers *)
--- a/src/Pure/Concurrent/par_list.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Concurrent/par_list.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -39,8 +39,7 @@
         Future.forks {name = name, group = SOME group, deps = [], pri = 0, interrupts = true}
           (map (fn x => fn () => f x) xs);
       val results = Future.join_results futures
-        handle exn =>
-          (if Exn.is_interrupt exn then ignore (Future.cancel_group group) else (); reraise exn);
+        handle exn => (if Exn.is_interrupt exn then Future.cancel_group group else (); reraise exn);
     in results end;
 
 fun map_name name f xs = Par_Exn.release_first (managed_results name f xs);
--- a/src/Pure/Concurrent/single_assignment.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Concurrent/single_assignment.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -46,7 +46,7 @@
 fun assign (v as Var {name, lock, cond, var}) x =
   Simple_Thread.synchronized name lock (fn () =>
     (case peek v of
-      SOME _ => raise Fail ("Duplicate assignment to variable " ^ quote name)
+      SOME _ => raise Fail ("Duplicate assignment to " ^ name)
     | NONE =>
         uninterruptible (fn _ => fn () =>
          (SingleAssignment.saset (var, x); ConditionVar.broadcast cond)) ()));
--- a/src/Pure/Concurrent/task_queue.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Concurrent/task_queue.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -28,16 +28,17 @@
   val waiting: task -> task list -> (unit -> 'a) -> 'a
   type queue
   val empty: queue
+  val group_tasks: queue -> group -> task list
   val known_task: queue -> task -> bool
   val all_passive: queue -> bool
   val status: queue -> {ready: int, pending: int, running: int, passive: int}
-  val cancel: queue -> group -> task list * Thread.thread list
+  val cancel: queue -> group -> Thread.thread list
   val cancel_all: queue -> group list * Thread.thread list
   val finish: task -> queue -> bool * queue
   val enqueue_passive: group -> (unit -> bool) -> queue -> task * queue
   val enqueue: string -> group -> task list -> int -> (bool -> bool) -> queue -> task * queue
   val extend: task -> (bool -> bool) -> queue -> queue option
-  val dequeue_passive: Thread.thread -> task -> queue -> bool * queue
+  val dequeue_passive: Thread.thread -> task -> queue -> bool option * queue
   val dequeue: Thread.thread -> queue -> (task * (bool -> bool) list) option * queue
   val dequeue_deps: Thread.thread -> task list -> queue ->
     (((task * (bool -> bool) list) option * task list) * queue)
@@ -210,6 +211,7 @@
 fun make_queue groups jobs = Queue {groups = groups, jobs = jobs};
 val empty = make_queue Inttab.empty Task_Graph.empty;
 
+fun group_tasks (Queue {groups, ...}) group = Tasks.keys (get_tasks groups (group_id group));
 fun known_task (Queue {jobs, ...}) task = can (Task_Graph.get_entry jobs) task;
 
 
@@ -254,12 +256,9 @@
   let
     val _ = cancel_group group Exn.Interrupt;
     val running =
-      Tasks.fold (fn (task, _) => fn (tasks, threads) =>
-          (case get_job jobs task of
-            Running thread => (task :: tasks, insert Thread.equal thread threads)
-          | Passive _ => (task :: tasks, threads)
-          | _ => (tasks, threads)))
-        (get_tasks groups (group_id group)) ([], []);
+      Tasks.fold (fn (task, _) =>
+          (case get_job jobs task of Running thread => insert Thread.equal thread | _ => I))
+        (get_tasks groups (group_id group)) [];
   in running end;
 
 fun cancel_all (Queue {jobs, ...}) =
@@ -318,8 +317,9 @@
   (case try (get_job jobs) task of
     SOME (Passive _) =>
       let val jobs' = set_job task (Running thread) jobs
-      in (true, make_queue groups jobs') end
-  | _ => (false, queue));
+      in (SOME true, make_queue groups jobs') end
+  | SOME _ => (SOME false, queue)
+  | NONE => (NONE, queue));
 
 fun dequeue thread (queue as Queue {groups, jobs}) =
   (case Task_Graph.get_first (uncurry ready_job) jobs of
--- a/src/Pure/General/linear_set.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/General/linear_set.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -18,6 +18,7 @@
   val update: key * 'a -> 'a T -> 'a T
   val iterate: key option ->
     ((key option * key) * 'a -> 'b -> 'b option) -> 'a T -> 'b -> 'b
+  val dest: 'a T -> (key * 'a) list
   val get_after: 'a T -> key option -> key option
   val insert_after: key option -> key * 'a -> 'a T -> 'a T
   val delete_after: key option -> 'a T -> 'a T
@@ -94,6 +95,8 @@
           end;
   in apply NONE (optional_start set opt_start) end;
 
+fun dest set = rev (iterate NONE (fn ((_, key), x) => SOME o cons (key, x)) set []);
+
 
 (* relative addressing *)
 
@@ -129,5 +132,15 @@
               |> del_entry key2;
           in (start, entries') end)));
 
+
+(* ML pretty-printing -- requires Poly/ML 5.3.0 or later *)
+
+val _ =
+  PolyML.addPrettyPrinter (fn depth => fn pretty => fn set =>
+    ml_pretty
+      (ML_Pretty.enum "," "{" "}"
+        (ML_Pretty.pair (pretty_ml o PolyML.prettyRepresentation) (pretty_ml o pretty))
+        (dest set, depth)));
+
 end;
 
--- a/src/Pure/IsaMakefile	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/IsaMakefile	Mon Apr 16 19:01:57 2012 +0200
@@ -155,6 +155,7 @@
   ML/ml_parse.ML					\
   ML/ml_syntax.ML					\
   ML/ml_thms.ML						\
+  PIDE/command.ML					\
   PIDE/document.ML					\
   PIDE/isabelle_markup.ML				\
   PIDE/markup.ML					\
--- a/src/Pure/Isar/code.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Isar/code.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -66,7 +66,7 @@
   val is_constr: theory -> string -> bool
   val is_abstr: theory -> string -> bool
   val get_cert: theory -> ((thm * bool) list -> (thm * bool) list) -> string -> cert
-  val get_case_scheme: theory -> string -> (int * (int * string list)) option
+  val get_case_scheme: theory -> string -> (int * (int * string option list)) option
   val get_case_cong: theory -> string -> thm option
   val undefineds: theory -> string list
   val print_codesetup: theory -> unit
@@ -180,7 +180,7 @@
     (*with explicit history*),
   types: ((serial * ((string * sort) list * typ_spec)) list) Symtab.table
     (*with explicit history*),
-  cases: ((int * (int * string list)) * thm) Symtab.table * unit Symtab.table
+  cases: ((int * (int * string option list)) * thm) Symtab.table * unit Symtab.table
 };
 
 fun make_spec (history_concluded, (functions, (types, cases))) =
@@ -895,7 +895,7 @@
     fun analyze_cases cases =
       let
         val co_list = fold (AList.update (op =) o dest_case) cases [];
-      in map (the o AList.lookup (op =) co_list) params end;
+      in map (AList.lookup (op =) co_list) params end;
     fun analyze_let t =
       let
         val (head' $ arg, Var (param', _) $ arg') = Logic.dest_equals t;
@@ -948,10 +948,12 @@
                       :: Pretty.str "of"
                       :: map (Pretty.quote o Syntax.pretty_typ_global thy) tys)) cos)
       );
+    fun ignored_cases NONE = "<ignored>"
+      | ignored_cases (SOME c) = string_of_const thy c
     fun pretty_case (const, ((_, (_, [])), _)) = Pretty.str (string_of_const thy const)
       | pretty_case (const, ((_, (_, cos)), _)) = (Pretty.block o Pretty.breaks) [
           Pretty.str (string_of_const thy const), Pretty.str "with",
-          (Pretty.block o Pretty.commas o map (Pretty.str o string_of_const thy)) cos];
+          (Pretty.block o Pretty.commas o map (Pretty.str o ignored_cases)) cos];
     val functions = the_functions exec
       |> Symtab.dest
       |> (map o apsnd) (snd o fst)
@@ -1087,7 +1089,7 @@
 fun add_case thm thy =
   let
     val (case_const, (k, cos)) = case_cert thm;
-    val _ = case filter_out (is_constr thy) cos
+    val _ = case map_filter I cos |> filter_out (is_constr thy)
      of [] => ()
       | cs => error ("Non-constructor(s) in case certificate: " ^ commas_quote cs);
     val entry = (1 + Int.max (1, length cos), (k, cos));
@@ -1095,7 +1097,7 @@
       (Symtab.update (case_const, (entry, cong)));
     fun register_for_constructors (Constructors (cos', cases)) =
          Constructors (cos',
-           if exists (fn (co, _) => member (op =) cos co) cos'
+           if exists (fn (co, _) => member (op =) cos (SOME co)) cos'
            then insert (op =) case_const cases
            else cases)
       | register_for_constructors (x as Abstractor _) = x;
@@ -1131,7 +1133,7 @@
             ((the_functions o the_exec) thy) [old_proj];
     fun drop_outdated_cases cases = fold Symtab.delete_safe
       (Symtab.fold (fn (c, ((_, (_, cos)), _)) =>
-        if exists (member (op =) old_constrs) cos
+        if exists (member (op =) old_constrs) (map_filter I cos)
           then insert (op =) c else I) cases []) cases;
   in
     thy
--- a/src/Pure/Isar/isar_syn.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Isar/isar_syn.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -464,7 +464,7 @@
       (Parse.where_ |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":" -- Parse.prop)) [];
 
 val _ =
-  Outer_Syntax.command ("sublocale", Keyword.thy_goal)
+  Outer_Syntax.command ("sublocale", Keyword.thy_schematic_goal)
     "prove sublocale relation between a locale and a locale expression"
     (Parse.position Parse.xname --| (Parse.$$$ "\\<subseteq>" || Parse.$$$ "<") --
       parse_interpretation_arguments false
@@ -472,7 +472,7 @@
           Toplevel.print o Toplevel.theory_to_proof (Expression.sublocale_cmd I loc expr equations)));
 
 val _ =
-  Outer_Syntax.command ("interpretation", Keyword.thy_goal)
+  Outer_Syntax.command ("interpretation", Keyword.thy_schematic_goal)
     "prove interpretation of locale expression in theory"
     (parse_interpretation_arguments true
       >> (fn (expr, equations) => Toplevel.print o
--- a/src/Pure/Isar/outer_syntax.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Isar/outer_syntax.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -296,7 +296,8 @@
     val (tr, proper_head) = read head |>> Toplevel.modify_init init;
     val proof_trs = map read proof |> filter #2 |> map #1;
   in
-    if proper_head andalso proper_proof then [(tr, proof_trs)]
+    if proper_head andalso proper_proof andalso
+      not (Keyword.is_schematic_goal (Toplevel.name_of tr)) then [(tr, proof_trs)]
     else map (rpair []) (if proper_head then tr :: proof_trs else proof_trs)
   end;
 
--- a/src/Pure/Isar/outer_syntax.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Isar/outer_syntax.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -41,7 +41,7 @@
 }
 
 final class Outer_Syntax private(
-  keywords: Map[String, String] = Map((";" -> Keyword.DIAG)),
+  keywords: Map[String, String] = Map.empty,
   lexicon: Scan.Lexicon = Scan.Lexicon.empty,
   val completion: Completion = Completion.empty)
 {
--- a/src/Pure/Isar/proof.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Isar/proof.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -112,11 +112,8 @@
   val show_cmd: Method.text option -> (thm list list -> state -> state) ->
     (Attrib.binding * (string * string list) list) list -> bool -> state -> state
   val schematic_goal: state -> bool
-  val is_relevant: state -> bool
-  val local_future_proof: (state -> ('a * state) Future.future) ->
-    state -> 'a Future.future * state
-  val global_future_proof: (state -> ('a * Proof.context) Future.future) ->
-    state -> 'a Future.future * context
+  val local_future_proof: (state -> ('a * state) future) -> state -> 'a future * state
+  val global_future_proof: (state -> ('a * Proof.context) future) -> state -> 'a future * context
   val local_future_terminal_proof: Method.text * Method.text option -> bool -> state -> state
   val global_future_terminal_proof: Method.text * Method.text option -> bool -> state -> context
 end;
@@ -201,6 +198,9 @@
 val context_of = #context o current;
 val theory_of = Proof_Context.theory_of o context_of;
 
+fun map_node_context f =
+  map_node (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal));
+
 fun map_context f =
   map_current (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal));
 
@@ -295,24 +295,27 @@
 
 (* nested goal *)
 
-fun map_goal f g (State (Node {context, facts, mode, goal = SOME goal}, nodes)) =
+fun map_goal f g h (State (Node {context, facts, mode, goal = SOME goal}, node :: nodes)) =
       let
         val Goal {statement, messages, using, goal, before_qed, after_qed} = goal;
         val goal' = make_goal (g (statement, messages, using, goal, before_qed, after_qed));
-      in State (make_node (f context, facts, mode, SOME goal'), nodes) end
-  | map_goal f g (State (nd, node :: nodes)) =
-      let val State (node', nodes') = map_goal f g (State (node, nodes))
-      in map_context f (State (nd, node' :: nodes')) end
-  | map_goal _ _ state = state;
+        val node' = map_node_context h node;
+      in State (make_node (f context, facts, mode, SOME goal'), node' :: nodes) end
+  | map_goal f g h (State (nd, node :: nodes)) =
+      let
+        val nd' = map_node_context f nd;
+        val State (node', nodes') = map_goal f g h (State (node, nodes));
+      in State (nd', node' :: nodes') end
+  | map_goal _ _ _ state = state;
 
 fun provide_goal goal = map_goal I (fn (statement, _, using, _, before_qed, after_qed) =>
-  (statement, [], using, goal, before_qed, after_qed));
+  (statement, [], using, goal, before_qed, after_qed)) I;
 
 fun goal_message msg = map_goal I (fn (statement, messages, using, goal, before_qed, after_qed) =>
-  (statement, msg :: messages, using, goal, before_qed, after_qed));
+  (statement, msg :: messages, using, goal, before_qed, after_qed)) I;
 
 fun using_facts using = map_goal I (fn (statement, _, _, goal, before_qed, after_qed) =>
-  (statement, [], using, goal, before_qed, after_qed));
+  (statement, [], using, goal, before_qed, after_qed)) I;
 
 local
   fun find i state =
@@ -410,7 +413,7 @@
       |> map_goal
           (Proof_Context.add_cases false (no_goal_cases goal @ goal_cases goal') #>
            Proof_Context.add_cases true meth_cases)
-          (K (statement, [], using, goal', before_qed, after_qed)))
+          (K (statement, [], using, goal', before_qed, after_qed)) I)
   end;
 
 fun select_goals n meth state =
@@ -720,7 +723,7 @@
       let
         val ctxt = context_of state';
         val ths = maps snd named_facts;
-      in (statement, [], f ctxt ths using, g ctxt ths goal, before_qed, after_qed) end));
+      in (statement, [], f ctxt ths using, g ctxt ths goal, before_qed, after_qed) end) I);
 
 fun append_using _ ths using = using @ filter_out Thm.is_dummy ths;
 fun unfold_using ctxt ths = map (Local_Defs.unfold ctxt ths);
@@ -1077,7 +1080,9 @@
     val _ = assert_backward state;
     val (goal_ctxt, (_, goal)) = find_goal state;
     val {statement as (kind, _, prop), messages, using, goal, before_qed, after_qed} = goal;
-    val goal_txt = prop :: map Thm.term_of (Assumption.all_assms_of goal_ctxt);
+    val goal_tfrees =
+      fold Term.add_tfrees
+        (prop :: map Thm.term_of (Assumption.all_assms_of goal_ctxt)) [];
 
     val _ = is_relevant state andalso error "Cannot fork relevant proof";
 
@@ -1093,8 +1098,8 @@
 
     val result_ctxt =
       state
-      |> map_contexts (fold Variable.declare_term goal_txt)
       |> map_goal I (K (statement', messages, using, goal', before_qed, after_qed'))
+        (fold (Variable.declare_typ o TFree) goal_tfrees)
       |> fork_proof;
 
     val future_thm = result_ctxt |> Future.map (fn (_, x) =>
@@ -1102,7 +1107,7 @@
     val finished_goal = Goal.future_result goal_ctxt future_thm prop';
     val state' =
       state
-      |> map_goal I (K (statement, messages, using, finished_goal, NONE, after_qed))
+      |> map_goal I (K (statement, messages, using, finished_goal, NONE, after_qed)) I
       |> done;
   in (Future.map #1 result_ctxt, state') end;
 
--- a/src/Pure/Isar/toplevel.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Isar/toplevel.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -659,55 +659,44 @@
 
 fun command_result tr st =
   let val st' = command tr st
-  in (st', st') end;
+  in ((tr, st'), st') end;
 
 
 (* scheduled proof result *)
 
-structure States = Proof_Data
+structure Result = Proof_Data
 (
-  type T = state list future option;
-  fun init _ = NONE;
+  type T = (transition * state) list future;
+  val empty: T = Future.value [];
+  fun init _ = empty;
 );
 
 fun proof_result immediate (tr, proof_trs) st =
-  let val st' = command tr st in
-    if immediate orelse
-      null proof_trs orelse
-      Keyword.is_schematic_goal (name_of tr) orelse
-      exists (Keyword.is_qed_global o name_of) proof_trs orelse
-      not (can proof_of st') orelse
-      Proof.is_relevant (proof_of st')
-    then
-      let val (states, st'') = fold_map command_result proof_trs st'
-      in (Future.value ((tr, st') :: (proof_trs ~~ states)), st'') end
-    else
-      let
-        val (body_trs, end_tr) = split_last proof_trs;
-        val finish = Context.Theory o Proof_Context.theory_of;
+  if immediate orelse null proof_trs
+  then fold_map command_result (tr :: proof_trs) st |>> Future.value
+  else
+    let
+      val st' = command tr st;
+      val (body_trs, end_tr) = split_last proof_trs;
+      val finish = Context.Theory o Proof_Context.theory_of;
 
-        val future_proof = Proof.global_future_proof
-          (fn prf =>
-            Goal.fork_name "Toplevel.future_proof"
-              (fn () =>
-                let val (states, result_state) =
-                  (case st' of State (SOME (Proof (_, (_, orig_gthy))), prev)
-                    => State (SOME (Proof (Proof_Node.init prf, (finish, orig_gthy))), prev))
-                  |> fold_map command_result body_trs
-                  ||> command (end_tr |> set_print false);
-                in (states, presentation_context_of result_state) end))
-          #> (fn (states, ctxt) => States.put (SOME states) ctxt);
+      val future_proof = Proof.global_future_proof
+        (fn prf =>
+          Goal.fork_name "Toplevel.future_proof"
+            (fn () =>
+              let val (result, result_state) =
+                (case st' of State (SOME (Proof (_, (_, orig_gthy))), prev)
+                  => State (SOME (Proof (Proof_Node.init prf, (finish, orig_gthy))), prev))
+                |> fold_map command_result body_trs ||> command end_tr;
+              in (result, presentation_context_of result_state) end))
+        #-> Result.put;
 
-        val st'' = st' |> command (end_tr |> reset_trans |> end_proof (K future_proof));
+      val st'' = st'
+        |> command (tr |> set_print false |> reset_trans |> end_proof (K future_proof));
+      val result =
+        Result.get (presentation_context_of st'')
+        |> Future.map (fn body => (tr, st') :: body @ [(end_tr, st'')]);
 
-        val states =
-          (case States.get (presentation_context_of st'') of
-            NONE => raise Fail ("No future states for " ^ name_of tr ^ Position.str_of (pos_of tr))
-          | SOME states => states);
-        val result = states
-          |> Future.map (fn sts => (tr, st') :: (body_trs ~~ sts) @ [(end_tr, st'')]);
-
-      in (result, st'') end
-  end;
+    in (result, st'') end;
 
 end;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/PIDE/command.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,109 @@
+(*  Title:      Pure/PIDE/command.ML
+    Author:     Makarius
+
+Prover command execution.
+*)
+
+signature COMMAND =
+sig
+  type 'a memo
+  val memo: (unit -> 'a) -> 'a memo
+  val memo_value: 'a -> 'a memo
+  val memo_eval: 'a memo -> 'a
+  val memo_result: 'a memo -> 'a
+  val run_command: Toplevel.transition -> Toplevel.state -> Toplevel.state * unit lazy
+end;
+
+structure Command: COMMAND =
+struct
+
+(* memo results *)
+
+datatype 'a expr =
+  Expr of unit -> 'a |
+  Result of 'a Exn.result;
+
+abstype 'a memo = Memo of 'a expr Synchronized.var
+with
+
+fun memo e = Memo (Synchronized.var "Command.memo" (Expr e));
+fun memo_value a = Memo (Synchronized.var "Command.memo" (Result (Exn.Res a)));
+
+fun memo_eval (Memo v) =
+  (case Synchronized.value v of
+    Result res => res
+  | _ =>
+      Synchronized.guarded_access v
+        (fn Result res => SOME (res, Result res)
+          | Expr e =>
+              let val res = Exn.capture e ();  (*memoing of physical interrupts!*)
+              in SOME (res, Result res) end))
+  |> Exn.release;
+
+fun memo_result (Memo v) =
+  (case Synchronized.value v of
+    Result res => Exn.release res
+  | _ => raise Fail "Unfinished memo result");
+
+end;
+
+
+(* run command *)
+
+local
+
+fun run int tr st =
+  (case Toplevel.transition int tr st of
+    SOME (st', NONE) => ([], SOME st')
+  | SOME (_, SOME (exn, _)) => (ML_Compiler.exn_messages exn, NONE)
+  | NONE => ([(serial (), ML_Compiler.exn_message Runtime.TERMINATE)], NONE));
+
+fun timing tr t =
+  if Timing.is_relevant t then Toplevel.status tr (Isabelle_Markup.timing t) else ();
+
+fun proof_status tr st =
+  (case try Toplevel.proof_of st of
+    SOME prf => Toplevel.status tr (Proof.status_markup prf)
+  | NONE => ());
+
+val no_print = Lazy.value ();
+
+fun print_state tr st =
+  (Lazy.lazy o Toplevel.setmp_thread_position tr)
+    (fn () => Toplevel.print_state false st);
+
+in
+
+fun run_command tr st =
+  let
+    val is_init = Toplevel.is_init tr;
+    val is_proof = Keyword.is_proof (Toplevel.name_of tr);
+
+    val _ = Multithreading.interrupted ();
+    val _ = Toplevel.status tr Isabelle_Markup.forked;
+    val start = Timing.start ();
+    val (errs, result) = run (is_init orelse is_proof) (Toplevel.set_print false tr) st;
+    val _ = timing tr (Timing.result start);
+    val _ = Toplevel.status tr Isabelle_Markup.joined;
+    val _ = List.app (Toplevel.error_msg tr) errs;
+  in
+    (case result of
+      NONE =>
+        let
+          val _ = if null errs then Exn.interrupt () else ();
+          val _ = Toplevel.status tr Isabelle_Markup.failed;
+        in (st, no_print) end
+    | SOME st' =>
+        let
+          val _ = Toplevel.status tr Isabelle_Markup.finished;
+          val _ = proof_status tr st';
+          val do_print =
+            not is_init andalso
+              (Toplevel.print_of tr orelse (is_proof andalso Toplevel.is_proof st'));
+        in (st', if do_print then print_state tr st' else no_print) end)
+  end;
+
+end;
+
+end;
+
--- a/src/Pure/PIDE/command.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/command.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -145,7 +145,7 @@
   val range: Text.Range = Text.Range(0, length)
 
   val proper_range: Text.Range =
-    Text.Range(0, (length /: span.reverse.iterator.takeWhile(_.is_ignored))(_ - _.source.length))
+    Text.Range(0, (length /: span.reverse.iterator.takeWhile(_.is_space))(_ - _.source.length))
 
   def source(range: Text.Range): String = source.substring(range.start, range.stop)
 
--- a/src/Pure/PIDE/document.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/document.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -25,12 +25,12 @@
   type state
   val init_state: state
   val define_command: command_id -> string -> string -> state -> state
-  val cancel_execution: state -> Future.task list
-  val update_perspective: version_id -> version_id -> string -> command_id list -> state -> state
+  val remove_versions: version_id list -> state -> state
+  val discontinue_execution: state -> unit
+  val cancel_execution: state -> unit
+  val start_execution: state -> unit
   val update: version_id -> version_id -> edit list -> state ->
-    ((command_id * exec_id option) list * (string * command_id option) list) * state
-  val execute: version_id -> state -> state
-  val remove_versions: version_id list -> state -> state
+    (command_id * exec_id option) list * state
   val state: unit -> state
   val change_state: (state -> state) -> unit
 end;
@@ -59,62 +59,52 @@
 (** document structure **)
 
 type node_header = (string * Thy_Header.header) Exn.result;
-type perspective = (command_id -> bool) * command_id option; (*visible commands, last*)
+type perspective = (command_id -> bool) * command_id option;
 structure Entries = Linear_Set(type key = command_id val ord = int_ord);
 
-type exec = exec_id * (Toplevel.state * unit lazy) lazy;  (*eval/print process*)
-val no_print = Lazy.value ();
-val no_exec = (no_id, Lazy.value (Toplevel.toplevel, no_print));
+type exec = (Toplevel.state * unit lazy) Command.memo;  (*eval/print process*)
+val no_exec = Command.memo_value (Toplevel.toplevel, Lazy.value ());
 
 abstype node = Node of
- {touched: bool,
-  header: node_header,
-  perspective: perspective,
-  entries: exec option Entries.T,  (*command entries with excecutions*)
-  last_exec: command_id option,  (*last command with exec state assignment*)
-  result: Toplevel.state lazy}
+ {header: node_header,
+  perspective: perspective,  (*visible commands, last*)
+  entries: (exec_id * exec) option Entries.T,  (*command entries with excecutions*)
+  result: exec option}  (*result of last execution*)
 and version = Version of node Graph.T  (*development graph wrt. static imports*)
 with
 
-fun make_node (touched, header, perspective, entries, last_exec, result) =
-  Node {touched = touched, header = header, perspective = perspective,
-    entries = entries, last_exec = last_exec, result = result};
+fun make_node (header, perspective, entries, result) =
+  Node {header = header, perspective = perspective, entries = entries, result = result};
 
-fun map_node f (Node {touched, header, perspective, entries, last_exec, result}) =
-  make_node (f (touched, header, perspective, entries, last_exec, result));
+fun map_node f (Node {header, perspective, entries, result}) =
+  make_node (f (header, perspective, entries, result));
 
 fun make_perspective command_ids : perspective =
   (Inttab.defined (Inttab.make (map (rpair ()) command_ids)), try List.last command_ids);
 
 val no_header = Exn.Exn (ERROR "Bad theory header");
 val no_perspective = make_perspective [];
-val no_result = Lazy.value Toplevel.toplevel;
 
-val empty_node = make_node (false, no_header, no_perspective, Entries.empty, NONE, no_result);
-val clear_node = map_node (fn (_, header, _, _, _, _) =>
-  (false, header, no_perspective, Entries.empty, NONE, no_result));
+val empty_node = make_node (no_header, no_perspective, Entries.empty, NONE);
+val clear_node = map_node (fn (header, _, _, _) => (header, no_perspective, Entries.empty, NONE));
 
 
 (* basic components *)
 
-fun is_touched (Node {touched, ...}) = touched;
-fun set_touched touched =
-  map_node (fn (_, header, perspective, entries, last_exec, result) =>
-    (touched, header, perspective, entries, last_exec, result));
-
 fun get_header (Node {header, ...}) = header;
 fun set_header header =
-  map_node (fn (touched, _, perspective, entries, last_exec, result) =>
-    (touched, header, perspective, entries, last_exec, result));
+  map_node (fn (_, perspective, entries, result) => (header, perspective, entries, result));
 
 fun get_perspective (Node {perspective, ...}) = perspective;
 fun set_perspective ids =
-  map_node (fn (touched, header, _, entries, last_exec, result) =>
-    (touched, header, make_perspective ids, entries, last_exec, result));
+  map_node (fn (header, _, entries, result) => (header, make_perspective ids, entries, result));
+
+val visible_command = #1 o get_perspective;
+val visible_last = #2 o get_perspective;
+val visible_node = is_some o visible_last
 
 fun map_entries f =
-  map_node (fn (touched, header, perspective, entries, last_exec, result) =>
-    (touched, header, perspective, f entries, last_exec, result));
+  map_node (fn (header, perspective, entries, result) => (header, perspective, f entries, result));
 fun get_entries (Node {entries, ...}) = entries;
 
 fun iterate_entries f = Entries.iterate NONE f o get_entries;
@@ -123,15 +113,9 @@
     NONE => I
   | SOME id => Entries.iterate (SOME id) f entries);
 
-fun get_last_exec (Node {last_exec, ...}) = last_exec;
-fun set_last_exec last_exec =
-  map_node (fn (touched, header, perspective, entries, _, result) =>
-    (touched, header, perspective, entries, last_exec, result));
-
-fun get_theory pos (Node {result, ...}) = Toplevel.end_theory pos (Lazy.force result);
+fun get_result (Node {result, ...}) = result;
 fun set_result result =
-  map_node (fn (touched, header, perspective, entries, last_exec, _) =>
-    (touched, header, perspective, entries, last_exec, result));
+  map_node (fn (header, perspective, entries, _) => (header, perspective, entries, result));
 
 fun get_node nodes name = Graph.get_node nodes name handle Graph.UNDEF _ => empty_node;
 fun default_node name = Graph.default_node (name, empty_node);
@@ -160,8 +144,8 @@
     NONE => err_undef "command entry" id
   | SOME (exec, _) => exec);
 
-fun the_default_entry node (SOME id) = (id, (the_default no_exec (the_entry node id)))
-  | the_default_entry _ NONE = (no_id, no_exec);
+fun the_default_entry node (SOME id) = (id, the_default (no_id, no_exec) (the_entry node id))
+  | the_default_entry _ NONE = (no_id, (no_id, no_exec));
 
 fun update_entry id exec =
   map_entries (Entries.update (id, exec));
@@ -186,30 +170,13 @@
 fun nodes_of (Version nodes) = nodes;
 val node_of = get_node o nodes_of;
 
-local
-
 fun cycle_msg names = "Cyclic dependency of " ^ space_implode " via " (map quote names);
 
-fun touch_node name nodes =
-  fold (fn desc =>
-      update_node desc
-        (set_touched true #>
-          desc <> name ? (map_entries (reset_after NONE) #> set_result no_result)))
-    (Graph.all_succs nodes [name]) nodes;
-
-in
-
 fun edit_nodes (name, node_edit) (Version nodes) =
   Version
     (case node_edit of
-      Clear =>
-        nodes
-        |> update_node name clear_node
-        |> touch_node name
-    | Edits edits =>
-        nodes
-        |> update_node name (edit_node edits)
-        |> touch_node name
+      Clear => update_node name clear_node nodes
+    | Edits edits => update_node name (edit_node edits) nodes
     | Header header =>
         let
           val imports = (case header of Exn.Res (_, {imports, ...}) => imports | _ => []);
@@ -226,11 +193,7 @@
             (header', Graph.add_deps_acyclic (name, imports) nodes2)
               handle Graph.CYCLES cs => (Exn.Exn (ERROR (cat_lines (map cycle_msg cs))), nodes2);
         in Graph.map_node name (set_header header'') nodes3 end
-        |> touch_node name
-    | Perspective perspective =>
-        update_node name (set_perspective perspective) nodes);
-
-end;
+    | Perspective perspective => update_node name (set_perspective perspective) nodes);
 
 fun put_node (name, node) (Version nodes) =
   Version (update_node name (K node) nodes);
@@ -239,12 +202,12 @@
 
 
 
-(** global state -- document structure and execution process **)
+(** main state -- document structure and execution process **)
 
 abstype state = State of
  {versions: version Inttab.table,  (*version_id -> document content*)
-  commands: (string * Token.T list lazy) Inttab.table,  (*command_id -> name * span*)
-  execution: version_id * Future.group}  (*current execution process*)
+  commands: (string * Token.T list lazy) Inttab.table,  (*command_id -> named span*)
+  execution: version_id * Future.group * bool Unsynchronized.ref}  (*current execution process*)
 with
 
 fun make_state (versions, commands, execution) =
@@ -254,16 +217,21 @@
   make_state (f (versions, commands, execution));
 
 val init_state =
-  make_state (Inttab.make [(no_id, empty_version)], Inttab.empty, (no_id, Future.new_group NONE));
+  make_state (Inttab.make [(no_id, empty_version)], Inttab.empty,
+    (no_id, Future.new_group NONE, Unsynchronized.ref false));
+
+fun execution_of (State {execution, ...}) = execution;
 
 
 (* document versions *)
 
 fun define_version (id: version_id) version =
-  map_state (fn (versions, commands, execution) =>
-    let val versions' = Inttab.update_new (id, version) versions
-      handle Inttab.DUP dup => err_dup "document version" dup
-    in (versions', commands, execution) end);
+  map_state (fn (versions, commands, _) =>
+    let
+      val versions' = Inttab.update_new (id, version) versions
+        handle Inttab.DUP dup => err_dup "document version" dup;
+      val execution' = (id, Future.new_group NONE, Unsynchronized.ref true);
+    in (versions', commands, execution') end);
 
 fun the_version (State {versions, ...}) (id: version_id) =
   (case Inttab.lookup versions id of
@@ -276,18 +244,18 @@
 
 (* commands *)
 
-fun parse_command id text =
-  Position.setmp_thread_data (Position.id_only id)
-    (fn () =>
-      let
-        val toks = Thy_Syntax.parse_tokens (#1 (Outer_Syntax.get_syntax ())) (Position.id id) text;
-        val _ = Output.status (Markup.markup_only Isabelle_Markup.parsed);
-      in toks end) ();
-
 fun define_command (id: command_id) name text =
   map_state (fn (versions, commands, execution) =>
     let
-      val span = Lazy.lazy (fn () => parse_command (print_id id) text);
+      val id_string = print_id id;
+      val span = Lazy.lazy (fn () =>
+        Position.setmp_thread_data (Position.id_only id_string)
+          (fn () =>
+            Thy_Syntax.parse_tokens
+              (#1 (Outer_Syntax.get_syntax ())) (Position.id id_string) text) ());
+      val _ =
+        Position.setmp_thread_data (Position.id_only id_string)
+          (fn () => Output.status (Markup.markup_only Isabelle_Markup.accepted)) ();
       val commands' =
         Inttab.update_new (id, (name, span)) commands
           handle Inttab.DUP dup => err_dup "command" dup;
@@ -298,275 +266,8 @@
     NONE => err_undef "command" id
   | SOME command => command);
 
-
-(* document execution *)
-
-fun cancel_execution (State {execution, ...}) = Future.cancel_group (#2 execution);
-
 end;
 
-
-(* toplevel transactions *)
-
-local
-
-fun timing tr t =
-  if Timing.is_relevant t then Toplevel.status tr (Isabelle_Markup.timing t) else ();
-
-fun proof_status tr st =
-  (case try Toplevel.proof_of st of
-    SOME prf => Toplevel.status tr (Proof.status_markup prf)
-  | NONE => ());
-
-fun print_state tr st =
-  (Lazy.lazy o Toplevel.setmp_thread_position tr)
-    (fn () => Toplevel.print_state false st);
-
-fun run int tr st =
-  (case Toplevel.transition int tr st of
-    SOME (st', NONE) => ([], SOME st')
-  | SOME (_, SOME (exn, _)) => (ML_Compiler.exn_messages exn, NONE)
-  | NONE => ([(serial (), ML_Compiler.exn_message Runtime.TERMINATE)], NONE));
-
-in
-
-fun run_command tr st =
-  let
-    val is_init = Toplevel.is_init tr;
-    val is_proof = Keyword.is_proof (Toplevel.name_of tr);
-
-    val _ = Multithreading.interrupted ();
-    val _ = Toplevel.status tr Isabelle_Markup.forked;
-    val start = Timing.start ();
-    val (errs, result) = run (is_init orelse is_proof) (Toplevel.set_print false tr) st;
-    val _ = timing tr (Timing.result start);
-    val _ = Toplevel.status tr Isabelle_Markup.joined;
-    val _ = List.app (Toplevel.error_msg tr) errs;
-  in
-    (case result of
-      NONE =>
-        let
-          val _ = if null errs then Exn.interrupt () else ();
-          val _ = Toplevel.status tr Isabelle_Markup.failed;
-        in (st, no_print) end
-    | SOME st' =>
-        let
-          val _ = Toplevel.status tr Isabelle_Markup.finished;
-          val _ = proof_status tr st';
-          val do_print =
-            not is_init andalso
-              (Toplevel.print_of tr orelse (is_proof andalso Toplevel.is_proof st'));
-        in (st', if do_print then print_state tr st' else no_print) end)
-  end;
-
-end;
-
-
-
-
-(** update **)
-
-(* perspective *)
-
-fun update_perspective (old_id: version_id) (new_id: version_id) name perspective state =
-  let
-    val old_version = the_version state old_id;
-    val _ = Time.now ();  (* FIXME odd workaround for polyml-5.4.0/x86_64 -- needed? *)
-    val new_version = edit_nodes (name, Perspective perspective) old_version;
-  in define_version new_id new_version state end;
-
-
-(* edits *)
-
-local
-
-fun last_common state last_visible node0 node =
-  let
-    fun update_flags prev (visible, initial) =
-      let
-        val visible' = visible andalso prev <> last_visible;
-        val initial' = initial andalso
-          (case prev of
-            NONE => true
-          | SOME id => not (Keyword.is_theory_begin (#1 (the_command state id))));
-      in (visible', initial') end;
-    fun get_common ((prev, id), exec) (found, (_, flags)) =
-      if found then NONE
-      else
-        let val found' =
-          is_none exec orelse op <> (pairself (Option.map #1) (exec, lookup_entry node0 id));
-        in SOME (found', (prev, update_flags prev flags)) end;
-    val (found, (common, flags)) =
-      iterate_entries get_common node (false, (NONE, (true, true)));
-  in
-    if found then (common, flags)
-    else
-      let val last = Entries.get_after (get_entries node) common
-      in (last, update_flags last flags) end
-  end;
-
-fun illegal_init () = error "Illegal theory header after end of theory";
-
-fun new_exec state bad_init command_id' (execs, command_exec, init) =
-  if bad_init andalso is_none init then NONE
-  else
-    let
-      val (name, span) = the_command state command_id' ||> Lazy.force;
-      val (modify_init, init') =
-        if Keyword.is_theory_begin name then
-          (Toplevel.modify_init (the_default illegal_init init), NONE)
-        else (I, init);
-      val exec_id' = new_id ();
-      val exec_id'_string = print_id exec_id';
-      val tr =
-        Position.setmp_thread_data (Position.id_only exec_id'_string)
-          (fn () =>
-            #1 (Outer_Syntax.read_span (#2 (Outer_Syntax.get_syntax ())) span)
-            |> modify_init
-            |> Toplevel.put_id exec_id'_string);
-      val exec' = snd (snd command_exec) |> Lazy.map (fn (st, _) => run_command (tr ()) st);
-      val command_exec' = (command_id', (exec_id', exec'));
-    in SOME (command_exec' :: execs, command_exec', init') end;
-
-fun make_required nodes =
-  let
-    val all_visible =
-      Graph.fold (fn (a, (node, _)) => is_some (#2 (get_perspective node)) ? cons a) nodes []
-      |> Graph.all_preds nodes;
-    val required =
-      fold (fn a => exists (fn b => Graph.is_edge nodes (a, b)) all_visible ? Symtab.update (a, ()))
-        all_visible Symtab.empty;
-  in Symtab.defined required end;
-
-fun check_theory nodes name =
-  is_some (Thy_Info.lookup_theory name) orelse  (* FIXME more robust!? *)
-  is_some (Exn.get_res (get_header (get_node nodes name)));
-
-fun init_theory deps node =
-  let
-    (* FIXME provide files via Scala layer, not master_dir *)
-    val (dir, header) = Exn.release (get_header node);
-    val master_dir =
-      (case Url.explode dir of
-        Url.File path => path
-      | _ => Path.current);
-    val parents =
-      #imports header |> map (fn import =>
-        (case Thy_Info.lookup_theory import of  (* FIXME more robust!? *)
-          SOME thy => thy
-        | NONE =>
-            get_theory (Position.file_only import)
-              (snd (Future.join (the (AList.lookup (op =) deps import))))));
-  in Thy_Load.begin_theory master_dir header parents end;
-
-in
-
-fun update (old_id: version_id) (new_id: version_id) edits state =
-  let
-    val old_version = the_version state old_id;
-    val _ = Time.now ();  (* FIXME odd workaround for polyml-5.4.0/x86_64 *)
-    val new_version = fold edit_nodes edits old_version;
-
-    val nodes = nodes_of new_version;
-    val is_required = make_required nodes;
-
-    val updated =
-      nodes |> Graph.schedule
-        (fn deps => fn (name, node) =>
-          if not (is_touched node orelse is_required name)
-          then Future.value (([], [], []), node)
-          else
-            let
-              val node0 = node_of old_version name;
-              fun init () = init_theory deps node;
-              val bad_init =
-                not (check_theory nodes name andalso forall (check_theory nodes o #1) deps);
-            in
-              (singleton o Future.forks)
-                {name = "Document.update", group = NONE,
-                  deps = map (Future.task_of o #2) deps, pri = 0, interrupts = false}
-                (fn () =>
-                  let
-                    val required = is_required name;
-                    val last_visible = #2 (get_perspective node);
-                    val (common, (visible, initial)) = last_common state last_visible node0 node;
-                    val common_command_exec = the_default_entry node common;
-
-                    val (execs, (command_id, (_, exec)), _) =
-                      ([], common_command_exec, if initial then SOME init else NONE) |>
-                      (visible orelse required) ?
-                        iterate_entries_after common
-                          (fn ((prev, id), _) => fn res =>
-                            if not required andalso prev = last_visible then NONE
-                            else new_exec state bad_init id res) node;
-
-                    val no_execs =
-                      iterate_entries_after common
-                        (fn ((_, id0), exec0) => fn res =>
-                          if is_none exec0 then NONE
-                          else if exists (fn (_, (id, _)) => id0 = id) execs then SOME res
-                          else SOME (id0 :: res)) node0 [];
-
-                    val last_exec = if command_id = no_id then NONE else SOME command_id;
-                    val result =
-                      if is_some (after_entry node last_exec) then no_result
-                      else Lazy.map #1 exec;
-
-                    val node' = node
-                      |> fold reset_entry no_execs
-                      |> fold (fn (id, exec) => update_entry id (SOME exec)) execs
-                      |> set_last_exec last_exec
-                      |> set_result result
-                      |> set_touched false;
-                  in ((no_execs, execs, [(name, node')]), node') end)
-            end)
-      |> Future.joins |> map #1;
-
-    val command_execs =
-      map (rpair NONE) (maps #1 updated) @
-      map (fn (command_id, (exec_id, _)) => (command_id, SOME exec_id)) (maps #2 updated);
-    val updated_nodes = maps #3 updated;
-    val last_execs = map (fn (name, node) => (name, get_last_exec node)) updated_nodes;
-
-    val state' = state
-      |> define_version new_id (fold put_node updated_nodes new_version);
-  in ((command_execs, last_execs), state') end;
-
-end;
-
-
-(* execute *)
-
-fun execute version_id state =
-  state |> map_state (fn (versions, commands, _) =>
-    let
-      val version = the_version state version_id;
-
-      fun force_exec _ _ NONE = ()
-        | force_exec node command_id (SOME (_, exec)) =
-            let
-              val (_, print) = Lazy.force exec;
-              val _ =
-                if #1 (get_perspective node) command_id
-                then ignore (Lazy.future Future.default_params print)
-                else ();
-            in () end;
-
-      val group = Future.new_group NONE;
-      val _ =
-        nodes_of version |> Graph.schedule
-          (fn deps => fn (name, node) =>
-            (singleton o Future.forks)
-              {name = "theory:" ^ name, group = SOME (Future.new_group (SOME group)),
-                deps = map (Future.task_of o #2) deps, pri = 1, interrupts = false}
-              (iterate_entries (fn ((_, id), exec) => fn () =>
-                SOME (force_exec node id exec)) node));
-
-    in (versions, commands, (version_id, group)) end);
-
-
-(* remove versions *)
-
 fun remove_versions ids state = state |> map_state (fn (versions, _, execution) =>
   let
     val _ = member (op =) ids (#1 execution) andalso
@@ -583,6 +284,248 @@
 
 
 
+(** document execution **)
+
+val discontinue_execution = execution_of #> (fn (_, _, running) => running := false);
+
+val cancel_execution = execution_of #> (fn (_, group, _) => Future.cancel_group group);
+
+fun terminate_execution state =
+  let
+    val (_, group, _) = execution_of state;
+    val _ = Future.cancel_group group;
+  in Future.join_tasks (Future.group_tasks group) end;
+
+fun start_execution state =
+  let
+    fun finished_theory node =
+      (case Exn.capture (Command.memo_result o the) (get_result node) of
+        Exn.Res (st, _) => can (Toplevel.end_theory Position.none) st
+      | _ => false);
+
+    fun run node command_id exec =
+      let
+        val (_, print) = Command.memo_eval exec;
+        val _ =
+          if visible_command node command_id
+          then ignore (Lazy.future Future.default_params print)
+          else ();
+      in () end;
+
+    val (version_id, group, running) = execution_of state;
+    val _ =
+      nodes_of (the_version state version_id) |> Graph.schedule
+        (fn deps => fn (name, node) =>
+          if not (visible_node node) andalso finished_theory node then
+            Future.value ()
+          else
+            (singleton o Future.forks)
+              {name = "theory:" ^ name, group = SOME (Future.new_group (SOME group)),
+                deps = map (Future.task_of o #2) deps, pri = ~2, interrupts = false}
+              (fn () =>
+                iterate_entries (fn ((_, id), opt_exec) => fn () =>
+                  (case opt_exec of
+                    SOME (_, exec) => if ! running then SOME (run node id exec) else NONE
+                  | NONE => NONE)) node ()));
+  in () end;
+
+
+
+(** document update **)
+
+local
+
+fun stable_finished_theory node =
+  is_some (Exn.get_res (Exn.capture (fn () =>
+    fst (Command.memo_result (the (get_result node)))
+    |> Toplevel.end_theory Position.none
+    |> Global_Theory.join_proofs) ()));
+
+fun stable_command exec =
+  (case Exn.capture Command.memo_result exec of
+    Exn.Exn exn => not (Exn.is_interrupt exn)
+  | Exn.Res (st, _) =>
+      (case try Toplevel.theory_of st of
+        NONE => true
+      | SOME thy => is_some (Exn.get_res (Exn.capture Global_Theory.join_recent_proofs thy))));
+
+fun make_required nodes =
+  let
+    val all_visible =
+      Graph.fold (fn (a, (node, _)) => visible_node node ? cons a) nodes []
+      |> Graph.all_preds nodes
+      |> map (rpair ()) |> Symtab.make;
+
+    val required =
+      Symtab.fold (fn (a, ()) =>
+        exists (Symtab.defined all_visible) (Graph.immediate_succs nodes a) ?
+          Symtab.update (a, ())) all_visible Symtab.empty;
+  in Symtab.defined required end;
+
+fun init_theory imports node =
+  let
+    (* FIXME provide files via Scala layer, not master_dir *)
+    val (dir, header) = Exn.release (get_header node);
+    val master_dir =
+      (case Url.explode dir of
+        Url.File path => path
+      | _ => Path.current);
+    val parents =
+      #imports header |> map (fn import =>
+        (case Thy_Info.lookup_theory import of
+          SOME thy => thy
+        | NONE =>
+            Toplevel.end_theory (Position.file_only import)
+              (fst
+                (Command.memo_result
+                  (the_default no_exec
+                    (get_result (snd (the (AList.lookup (op =) imports import)))))))));
+  in Thy_Load.begin_theory master_dir header parents end;
+
+fun check_theory full name node =
+  is_some (Thy_Info.lookup_theory name) orelse
+  is_some (Exn.get_res (get_header node)) andalso (not full orelse is_some (get_result node));
+
+fun last_common state last_visible node0 node =
+  let
+    fun update_flags prev (visible, initial) =
+      let
+        val visible' = visible andalso prev <> last_visible;
+        val initial' = initial andalso
+          (case prev of
+            NONE => true
+          | SOME id => not (Keyword.is_theory_begin (#1 (the_command state id))));
+      in (visible', initial') end;
+    fun get_common ((prev, id), opt_exec) (found, (_, flags)) =
+      if found then NONE
+      else
+        let val found' =
+          (case opt_exec of
+            NONE => true
+          | SOME (exec_id, exec) =>
+              (case lookup_entry node0 id of
+                NONE => true
+              | SOME (exec_id0, _) => exec_id <> exec_id0) orelse not (stable_command exec));
+        in SOME (found', (prev, update_flags prev flags)) end;
+    val (found, (common, flags)) =
+      iterate_entries get_common node (false, (NONE, (true, true)));
+  in
+    if found then (common, flags)
+    else
+      let val last = Entries.get_after (get_entries node) common
+      in (last, update_flags last flags) end
+  end;
+
+fun illegal_init () = error "Illegal theory header after end of theory";
+
+fun new_exec state proper_init command_id' (execs, command_exec, init) =
+  if not proper_init andalso is_none init then NONE
+  else
+    let
+      val (name, span) = the_command state command_id' ||> Lazy.force;
+      val (modify_init, init') =
+        if Keyword.is_theory_begin name then
+          (Toplevel.modify_init (the_default illegal_init init), NONE)
+        else (I, init);
+      val exec_id' = new_id ();
+      val exec_id'_string = print_id exec_id';
+      val tr =
+        Position.setmp_thread_data (Position.id_only exec_id'_string)
+          (fn () =>
+            #1 (Outer_Syntax.read_span (#2 (Outer_Syntax.get_syntax ())) span)
+            |> modify_init
+            |> Toplevel.put_id exec_id'_string);
+      val exec' =
+        Command.memo (fn () =>
+          Command.run_command (tr ()) (fst (Command.memo_result (snd (snd command_exec)))));
+      val command_exec' = (command_id', (exec_id', exec'));
+    in SOME (command_exec' :: execs, command_exec', init') end;
+
+in
+
+fun update (old_id: version_id) (new_id: version_id) edits state =
+  let
+    val old_version = the_version state old_id;
+    val _ = Time.now ();  (* FIXME odd workaround for polyml-5.4.0/x86_64 *)
+    val new_version = fold edit_nodes edits old_version;
+
+    val nodes = nodes_of new_version;
+    val is_required = make_required nodes;
+
+    val _ = terminate_execution state;
+    val updated =
+      nodes |> Graph.schedule
+        (fn deps => fn (name, node) =>
+          (singleton o Future.forks)
+            {name = "Document.update", group = NONE,
+              deps = map (Future.task_of o #2) deps, pri = 0, interrupts = false}
+            (fn () =>
+              let
+                val imports = map (apsnd Future.join) deps;
+                val updated_imports = exists (is_some o #3 o #1 o #2) imports;
+                val required = is_required name;
+              in
+                if updated_imports orelse AList.defined (op =) edits name orelse
+                  required andalso not (stable_finished_theory node)
+                then
+                  let
+                    val node0 = node_of old_version name;
+                    fun init () = init_theory imports node;
+                    val proper_init =
+                      check_theory false name node andalso
+                      forall (fn (name, (_, node)) => check_theory true name node) imports;
+
+                    val last_visible = visible_last node;
+                    val (common, (visible, initial)) =
+                      if updated_imports then (NONE, (true, true))
+                      else last_common state last_visible node0 node;
+                    val common_command_exec = the_default_entry node common;
+
+                    val (new_execs, (command_id', (_, exec')), _) =
+                      ([], common_command_exec, if initial then SOME init else NONE) |>
+                      (visible orelse required) ?
+                        iterate_entries_after common
+                          (fn ((prev, id), _) => fn res =>
+                            if not required andalso prev = last_visible then NONE
+                            else new_exec state proper_init id res) node;
+
+                    val no_execs =
+                      iterate_entries_after common
+                        (fn ((_, id0), exec0) => fn res =>
+                          if is_none exec0 then NONE
+                          else if exists (fn (_, (id, _)) => id0 = id) new_execs then SOME res
+                          else SOME (id0 :: res)) node0 [];
+
+                    val last_exec = if command_id' = no_id then NONE else SOME command_id';
+                    val result =
+                      if is_some (after_entry node last_exec) then NONE
+                      else SOME exec';
+
+                    val node' = node
+                      |> fold reset_entry no_execs
+                      |> fold (fn (id, exec) => update_entry id (SOME exec)) new_execs
+                      |> set_result result;
+                    val updated_node =
+                      if null no_execs andalso null new_execs then NONE
+                      else SOME (name, node');
+                  in ((no_execs, new_execs, updated_node), node') end
+                else (([], [], NONE), node)
+              end))
+      |> Future.joins |> map #1;
+
+    val command_execs =
+      map (rpair NONE) (maps #1 updated) @
+      map (fn (command_id, (exec_id, _)) => (command_id, SOME exec_id)) (maps #2 updated);
+    val updated_nodes = map_filter #3 updated;
+
+    val state' = state
+      |> define_version new_id (fold put_node updated_nodes new_version);
+  in (command_execs, state') end;
+
+end;
+
+
+
 (** global state **)
 
 val global_state = Synchronized.var "Document" init_state;
--- a/src/Pure/PIDE/document.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/document.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -296,9 +296,7 @@
       result: PartialFunction[Text.Markup, A]): Stream[Text.Info[A]]
   }
 
-  type Assign =
-   (List[(Document.Command_ID, Option[Document.Exec_ID])],  // exec state assignment
-    List[(String, Option[Document.Command_ID])])  // last exec
+  type Assign = List[(Document.Command_ID, Option[Document.Exec_ID])]  // exec state assignment
 
   object State
   {
@@ -311,14 +309,12 @@
 
     final class Assignment private(
       val command_execs: Map[Command_ID, Exec_ID] = Map.empty,
-      val last_execs: Map[String, Option[Command_ID]] = Map.empty,
       val is_finished: Boolean = false)
     {
       def check_finished: Assignment = { require(is_finished); this }
-      def unfinished: Assignment = new Assignment(command_execs, last_execs, false)
+      def unfinished: Assignment = new Assignment(command_execs, false)
 
-      def assign(add_command_execs: List[(Command_ID, Option[Exec_ID])],
-        add_last_execs: List[(String, Option[Command_ID])]): Assignment =
+      def assign(add_command_execs: List[(Command_ID, Option[Exec_ID])]): Assignment =
       {
         require(!is_finished)
         val command_execs1 =
@@ -326,7 +322,7 @@
             case (res, (command_id, None)) => res - command_id
             case (res, (command_id, Some(exec_id))) => res + (command_id -> exec_id)
           }
-        new Assignment(command_execs1, last_execs ++ add_last_execs, true)
+        new Assignment(command_execs1, true)
       }
     }
 
@@ -368,7 +364,7 @@
       }
 
     def the_version(id: Version_ID): Version = versions.getOrElse(id, fail)
-    def the_command(id: Command_ID): Command.State = commands.getOrElse(id, fail)  // FIXME rename
+    def the_command_state(id: Command_ID): Command.State = commands.getOrElse(id, fail)
     def the_exec(id: Exec_ID): Command.State = execs.getOrElse(id, fail)
     def the_assignment(version: Version): State.Assignment =
       assignments.getOrElse(version.id, fail)
@@ -387,15 +383,14 @@
           }
       }
 
-    def assign(id: Version_ID, arg: Assign = (Nil, Nil)): (List[Command], State) =
+    def assign(id: Version_ID, command_execs: Assign = Nil): (List[Command], State) =
     {
       val version = the_version(id)
-      val (command_execs, last_execs) = arg
 
       val (changed_commands, new_execs) =
         ((Nil: List[Command], execs) /: command_execs) {
           case ((commands1, execs1), (cmd_id, exec)) =>
-            val st = the_command(cmd_id)
+            val st = the_command_state(cmd_id)
             val commands2 = st.command :: commands1
             val execs2 =
               exec match {
@@ -404,7 +399,7 @@
               }
             (commands2, execs2)
         }
-      val new_assignment = the_assignment(version).assign(command_execs, last_execs)
+      val new_assignment = the_assignment(version).assign(command_execs)
       val new_state = copy(assignments = assignments + (id -> new_assignment), execs = new_execs)
 
       (changed_commands, new_state)
@@ -424,21 +419,6 @@
     def tip_stable: Boolean = is_stable(history.tip)
     def tip_version: Version = history.tip.version.get_finished
 
-    def last_exec_offset(name: Node.Name): Text.Offset =
-    {
-      val version = tip_version
-      the_assignment(version).last_execs.get(name.node) match {
-        case Some(Some(id)) =>
-          val node = version.nodes(name)
-          val cmd = the_command(id).command
-          node.command_start(cmd) match {
-            case None => 0
-            case Some(start) => start + cmd.length
-          }
-        case _ => 0
-      }
-    }
-
     def continue_history(
         previous: Future[Version],
         edits: List[Edit_Text],
@@ -490,7 +470,11 @@
         the_exec(the_assignment(version).check_finished.command_execs
           .getOrElse(command.id, fail))
       }
-      catch { case _: State.Fail => command.empty_state }
+      catch {
+        case _: State.Fail =>
+          try { the_command_state(command.id) }
+          catch { case _: State.Fail => command.empty_state }
+      }
     }
 
 
--- a/src/Pure/PIDE/isabelle_markup.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/isabelle_markup.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -90,7 +90,7 @@
   val sendbackN: string val sendback: Markup.T
   val hiliteN: string val hilite: Markup.T
   val taskN: string
-  val parsedN: string val parsed: Markup.T
+  val acceptedN: string val accepted: Markup.T
   val forkedN: string val forked: Markup.T
   val joinedN: string val joined: Markup.T
   val failedN: string val failed: Markup.T
@@ -283,7 +283,7 @@
 
 val taskN = "task";
 
-val (parsedN, parsed) = markup_elem "parsed";
+val (acceptedN, accepted) = markup_elem "accepted";
 val (forkedN, forked) = markup_elem "forked";
 val (joinedN, joined) = markup_elem "joined";
 
--- a/src/Pure/PIDE/isabelle_markup.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/isabelle_markup.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -198,7 +198,7 @@
 
   val TASK = "task"
 
-  val PARSED = "parsed"
+  val ACCEPTED = "accepted"
   val FORKED = "forked"
   val JOINED = "joined"
   val FAILED = "failed"
--- a/src/Pure/PIDE/protocol.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/protocol.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -13,29 +13,19 @@
       Document.change_state (Document.define_command (Document.parse_id id) name text));
 
 val _ =
-  Isabelle_Process.protocol_command "Document.cancel_execution"
-    (fn [] => ignore (Document.cancel_execution (Document.state ())));
+  Isabelle_Process.protocol_command "Document.discontinue_execution"
+    (fn [] => Document.discontinue_execution (Document.state ()));
 
 val _ =
-  Isabelle_Process.protocol_command "Document.update_perspective"
-    (fn [old_id_string, new_id_string, name, ids_yxml] => Document.change_state (fn state =>
-      let
-        val old_id = Document.parse_id old_id_string;
-        val new_id = Document.parse_id new_id_string;
-        val ids = YXML.parse_body ids_yxml
-          |> let open XML.Decode in list int end;
-
-        val _ = Future.join_tasks (Document.cancel_execution state);
-      in
-        state
-        |> Document.update_perspective old_id new_id name ids
-        |> Document.execute new_id
-      end));
+  Isabelle_Process.protocol_command "Document.cancel_execution"
+    (fn [] => Document.cancel_execution (Document.state ()));
 
 val _ =
   Isabelle_Process.protocol_command "Document.update"
     (fn [old_id_string, new_id_string, edits_yxml] => Document.change_state (fn state =>
       let
+        val _ = Document.cancel_execution state;
+
         val old_id = Document.parse_id old_id_string;
         val new_id = Document.parse_id new_id_string;
         val edits =
@@ -60,17 +50,16 @@
                   fn (a, []) => Document.Perspective (map int_atom a)]))
             end;
 
-        val running = Document.cancel_execution state;
-        val (assignment, state1) = Document.update old_id new_id edits state;
-        val _ = Future.join_tasks running;
+        val (assignment, state') = Document.update old_id new_id edits state;
         val _ =
           Output.protocol_message Isabelle_Markup.assign_execs
             ((new_id, assignment) |>
               let open XML.Encode
-              in pair int (pair (list (pair int (option int))) (list (pair string (option int)))) end
+              in pair int (list (pair int (option int))) end
               |> YXML.string_of_body);
-        val state2 = Document.execute new_id state1;
-      in state2 end));
+
+        val _ = Document.start_execution state';
+      in state' end));
 
 val _ =
   Isabelle_Process.protocol_command "Document.remove_versions"
--- a/src/Pure/PIDE/protocol.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/PIDE/protocol.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -17,7 +17,7 @@
       try {
         import XML.Decode._
         val body = YXML.parse_body(text)
-        Some(pair(long, pair(list(pair(long, option(long))), list(pair(string, option(long)))))(body))
+        Some(pair(long, list(pair(long, option(long))))(body))
       }
       catch {
         case ERROR(_) => None
@@ -49,27 +49,27 @@
   }
 
   sealed case class Status(
-    private val parsed: Boolean = false,
+    private val accepted: Boolean = false,
     private val finished: Boolean = false,
     private val failed: Boolean = false,
     forks: Int = 0)
   {
-    def is_unprocessed: Boolean = parsed && !finished && !failed && forks == 0
+    def is_unprocessed: Boolean = accepted && !finished && !failed && forks == 0
     def is_running: Boolean = forks != 0
     def is_finished: Boolean = finished && forks == 0
     def is_failed: Boolean = failed && forks == 0
     def + (that: Status): Status =
-      Status(parsed || that.parsed, finished || that.finished,
+      Status(accepted || that.accepted, finished || that.finished,
         failed || that.failed, forks + that.forks)
   }
 
   val command_status_markup: Set[String] =
-    Set(Isabelle_Markup.PARSED, Isabelle_Markup.FINISHED, Isabelle_Markup.FAILED,
+    Set(Isabelle_Markup.ACCEPTED, Isabelle_Markup.FINISHED, Isabelle_Markup.FAILED,
       Isabelle_Markup.FORKED, Isabelle_Markup.JOINED)
 
   def command_status(status: Status, markup: Markup): Status =
     markup match {
-      case Markup(Isabelle_Markup.PARSED, _) => status.copy(parsed = true)
+      case Markup(Isabelle_Markup.ACCEPTED, _) => status.copy(accepted = true)
       case Markup(Isabelle_Markup.FINISHED, _) => status.copy(finished = true)
       case Markup(Isabelle_Markup.FAILED, _) => status.copy(failed = true)
       case Markup(Isabelle_Markup.FORKED, _) => status.copy(forks = status.forks + 1)
@@ -191,21 +191,9 @@
 
   /* document versions */
 
-  def cancel_execution()
-  {
-    input("Document.cancel_execution")
-  }
+  def discontinue_execution() { input("Document.discontinue_execution") }
 
-  def update_perspective(old_id: Document.Version_ID, new_id: Document.Version_ID,
-    name: Document.Node.Name, perspective: Command.Perspective)
-  {
-    val ids =
-    { import XML.Encode._
-      list(long)(perspective.commands.map(_.id)) }
-
-    input("Document.update_perspective", Document.ID(old_id), Document.ID(new_id),
-      name.node, YXML.string_of_body(ids))
-  }
+  def cancel_execution() { input("Document.cancel_execution") }
 
   def update(old_id: Document.Version_ID, new_id: Document.Version_ID,
     edits: List[Document.Edit_Command])
--- a/src/Pure/ROOT.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/ROOT.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -251,6 +251,7 @@
 use "Thy/present.ML";
 use "Thy/thy_load.ML";
 use "Thy/thy_info.ML";
+use "PIDE/command.ML";
 use "PIDE/document.ML";
 use "Thy/rail.ML";
 
--- a/src/Pure/System/gui_setup.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/System/gui_setup.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -51,6 +51,9 @@
       val platform64 = Isabelle_System.getenv("ISABELLE_PLATFORM64")
       if (platform64 != "") text.append("Isabelle platform (64 bit): " + platform64 + "\n")
       text.append("Isabelle home: " + Isabelle_System.getenv("ISABELLE_HOME") + "\n")
+      val isabelle_home_windows = Isabelle_System.getenv("ISABELLE_HOME_WINDOWS")
+      if (isabelle_home_windows != "")
+        text.append("Isabelle home (Windows): " + isabelle_home_windows + "\n")
       text.append("Isabelle jdk home: " + Isabelle_System.getenv("ISABELLE_JDK_HOME") + "\n")
     }
     catch { case ERROR(msg) => text.append(msg + "\n") }
--- a/src/Pure/System/isabelle_process.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/System/isabelle_process.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -167,7 +167,7 @@
     val _ = OS.Process.sleep (seconds 0.5);  (*yield to raw ML toplevel*)
     val _ = Output.physical_stderr Symbol.STX;
 
-    val _ = quick_and_dirty := true;
+    val _ = quick_and_dirty := false;
     val _ = Goal.parallel_proofs := 0;
     val _ =
       if Multithreading.max_threads_value () < 2
--- a/src/Pure/System/session.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/System/session.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -267,27 +267,6 @@
     }
 
 
-    /* perspective */
-
-    def update_perspective(name: Document.Node.Name, text_perspective: Text.Perspective)
-    {
-      val previous = global_state().tip_version
-      val (perspective, version) = Thy_Syntax.edit_perspective(previous, name, text_perspective)
-
-      val text_edits: List[Document.Edit_Text] =
-        List((name, Document.Node.Perspective(text_perspective)))
-      val change =
-        global_state >>>
-          (_.continue_history(Future.value(previous), text_edits, Future.value(version)))
-
-      val assignment = global_state().the_assignment(previous).check_finished
-      global_state >> (_.define_version(version, assignment))
-      global_state >>> (_.assign(version.id))
-
-      prover.get.update_perspective(previous.id, version.id, name, perspective)
-    }
-
-
     /* incoming edits */
 
     def handle_edits(name: Document.Node.Name,
@@ -296,7 +275,7 @@
     {
       val previous = global_state().history.tip.version
 
-      prover.get.cancel_execution()
+      prover.get.discontinue_execution()
 
       val text_edits = header_edit(name, header) :: edits.map(edit => (name, edit))
       val version = Future.promise[Document.Version]
@@ -420,7 +399,7 @@
         case _ =>
           if (output.is_exit && phase == Session.Startup) phase = Session.Failed
           else if (output.is_exit) phase = Session.Inactive
-          else if (output.is_stdout) { }
+          else if (output.is_init || output.is_stdout) { }
           else bad_output(output)
       }
     }
@@ -465,12 +444,8 @@
           reply(())
 
         case Edit_Node(name, header, perspective, text_edits) if prover.isDefined =>
-          if (text_edits.isEmpty && global_state().tip_stable &&
-              perspective.range.stop <= global_state().last_exec_offset(name))
-            update_perspective(name, perspective)
-          else
-            handle_edits(name, header,
-              List(Document.Node.Edits(text_edits), Document.Node.Perspective(perspective)))
+          handle_edits(name, header,
+            List(Document.Node.Edits(text_edits), Document.Node.Perspective(perspective)))
           reply(())
 
         case Messages(msgs) =>
--- a/src/Pure/Thy/thy_syntax.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/Thy/thy_syntax.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -118,28 +118,6 @@
     }
   }
 
-  def update_perspective(nodes: Document.Nodes,
-      name: Document.Node.Name, text_perspective: Text.Perspective)
-    : (Command.Perspective, Option[Document.Nodes]) =
-  {
-    val node = nodes(name)
-    val perspective = command_perspective(node, text_perspective)
-    val new_nodes =
-      if (node.perspective same perspective) None
-      else Some(nodes + (name -> node.update_perspective(perspective)))
-    (perspective, new_nodes)
-  }
-
-  def edit_perspective(previous: Document.Version,
-      name: Document.Node.Name, text_perspective: Text.Perspective)
-    : (Command.Perspective, Document.Version) =
-  {
-    val nodes = previous.nodes
-    val (perspective, new_nodes) = update_perspective(nodes, name, text_perspective)
-    val version = Document.Version.make(previous.syntax, new_nodes getOrElse nodes)
-    (perspective, version)
-  }
-
 
 
   /** header edits: structure and outer syntax **/
@@ -311,11 +289,11 @@
       case (name, Document.Node.Header(_)) =>
 
       case (name, Document.Node.Perspective(text_perspective)) =>
-        update_perspective(nodes, name, text_perspective) match {
-          case (_, None) =>
-          case (perspective, Some(nodes1)) =>
-            doc_edits += (name -> Document.Node.Perspective(perspective))
-            nodes = nodes1
+        val node = nodes(name)
+        val perspective = command_perspective(node, text_perspective)
+        if (!(node.perspective same perspective)) {
+          doc_edits += (name -> Document.Node.Perspective(perspective))
+          nodes += (name -> node.update_perspective(perspective))
         }
     }
     (doc_edits.toList, Document.Version.make(syntax, nodes))
--- a/src/Pure/build-jars	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/build-jars	Mon Apr 16 19:01:57 2012 +0200
@@ -77,6 +77,7 @@
   echo
   echo "  Options are:"
   echo "    -f           fresh build"
+  echo "    -t           test separate compilation of PIDE"
   echo
   exit 1
 }
@@ -95,13 +96,17 @@
 # options
 
 FRESH=""
+TEST_PIDE=""
 
-while getopts "f" OPT
+while getopts "ft" OPT
 do
   case "$OPT" in
     f)
       FRESH=true
       ;;
+    t)
+      TEST_PIDE=true
+      ;;
     \?)
       usage
       ;;
@@ -171,11 +176,16 @@
 
   SCALAC_OPTIONS="$ISABELLE_SCALA_BUILD_OPTIONS -d classes"
 
-  isabelle_scala scalac $SCALAC_OPTIONS "${PIDE_SOURCES[@]}" || \
-    fail "Failed to compile PIDE sources"
-
-  isabelle_scala scalac $SCALAC_OPTIONS -classpath classes "${PURE_SOURCES[@]}" || \
-    fail "Failed to compile Pure sources"
+  if [ "$TEST_PIDE" = true ]; then
+    isabelle_scala scalac $SCALAC_OPTIONS "${PIDE_SOURCES[@]}" || \
+      fail "Failed to compile PIDE sources"
+    isabelle_scala scalac $SCALAC_OPTIONS -classpath classes "${PURE_SOURCES[@]}" || \
+      fail "Failed to compile Pure sources"
+  else
+    isabelle_scala scalac $SCALAC_OPTIONS -classpath classes \
+      "${PIDE_SOURCES[@]}" "${PURE_SOURCES[@]}" || \
+      fail "Failed to compile sources"
+  fi
 
   mkdir -p "$TARGET_DIR/ext" || fail "Failed to create directory $TARGET_DIR/ext"
 
--- a/src/Pure/drule.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/drule.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -67,6 +67,8 @@
   val store_standard_thm: binding -> thm -> thm
   val store_thm_open: binding -> thm -> thm
   val store_standard_thm_open: binding -> thm -> thm
+  val multi_resolve: thm list -> thm -> thm Seq.seq
+  val multi_resolves: thm list -> thm list -> thm Seq.seq
   val compose_single: thm * int * thm -> thm
   val equals_cong: thm
   val imp_cong: thm
@@ -108,8 +110,6 @@
   val equal_elim_rule1: thm
   val equal_elim_rule2: thm
   val remdups_rl: thm
-  val multi_resolve: thm list -> thm -> thm Seq.seq
-  val multi_resolves: thm list -> thm list -> thm Seq.seq
   val abs_def: thm -> thm
 end;
 
@@ -314,50 +314,64 @@
             (Thm.permute_prems (new + 1) (new - p) (Thm.permute_prems new (p - new) thm))
   in rearr 0 end;
 
-(*Resolution: exactly one resolvent must be produced.*)
-fun tha RSN (i,thb) =
-  case Seq.chop 2 (Thm.biresolution false [(false,tha)] i thb) of
-      ([th],_) => th
-    | ([],_)   => raise THM("RSN: no unifiers", i, [tha,thb])
-    |      _   => raise THM("RSN: multiple unifiers", i, [tha,thb]);
+
+(*Resolution: multiple arguments, multiple results*)
+local
+  fun res th i rule =
+    Thm.biresolution false [(false, th)] i rule handle THM _ => Seq.empty;
 
-(*resolution: P==>Q, Q==>R gives P==>R. *)
+  fun multi_res _ [] rule = Seq.single rule
+    | multi_res i (th :: ths) rule = Seq.maps (res th i) (multi_res (i + 1) ths rule);
+in
+  val multi_resolve = multi_res 1;
+  fun multi_resolves facts rules = Seq.maps (multi_resolve facts) (Seq.of_list rules);
+end;
+
+(*Resolution: exactly one resolvent must be produced*)
+fun tha RSN (i, thb) =
+  (case Seq.chop 2 (Thm.biresolution false [(false, tha)] i thb) of
+    ([th], _) => th
+  | ([], _) => raise THM ("RSN: no unifiers", i, [tha, thb])
+  | _ => raise THM ("RSN: multiple unifiers", i, [tha, thb]));
+
+(*Resolution: P==>Q, Q==>R gives P==>R*)
 fun tha RS thb = tha RSN (1,thb);
 
 (*For joining lists of rules*)
-fun thas RLN (i,thbs) =
+fun thas RLN (i, thbs) =
   let val resolve = Thm.biresolution false (map (pair false) thas) i
       fun resb thb = Seq.list_of (resolve thb) handle THM _ => []
   in maps resb thbs end;
 
-fun thas RL thbs = thas RLN (1,thbs);
+fun thas RL thbs = thas RLN (1, thbs);
+
+(*Isar-style multi-resolution*)
+fun bottom_rl OF rls =
+  (case Seq.chop 2 (multi_resolve rls bottom_rl) of
+    ([th], _) => th
+  | ([], _) => raise THM ("OF: no unifiers", 0, bottom_rl :: rls)
+  | _ => raise THM ("OF: multiple unifiers", 0, bottom_rl :: rls));
 
 (*Resolve a list of rules against bottom_rl from right to left;
   makes proof trees*)
-fun rls MRS bottom_rl =
-  let fun rs_aux i [] = bottom_rl
-        | rs_aux i (rl::rls) = rl RSN (i, rs_aux (i+1) rls)
-  in  rs_aux 1 rls  end;
-
-(*A version of MRS with more appropriate argument order*)
-fun bottom_rl OF rls = rls MRS bottom_rl;
+fun rls MRS bottom_rl = bottom_rl OF rls;
 
 (*compose Q and [...,Qi,Q(i+1),...]==>R to [...,Q(i+1),...]==>R
   with no lifting or renaming!  Q may contain ==> or meta-quants
   ALWAYS deletes premise i *)
 fun compose(tha,i,thb) =
-    distinct Thm.eq_thm (Seq.list_of (Thm.bicompose false (false,tha,0) i thb));
+  distinct Thm.eq_thm (Seq.list_of (Thm.bicompose false (false,tha,0) i thb));
 
 fun compose_single (tha,i,thb) =
-  case compose (tha,i,thb) of
+  (case compose (tha,i,thb) of
     [th] => th
-  | _ => raise THM ("compose: unique result expected", i, [tha,thb]);
+  | _ => raise THM ("compose: unique result expected", i, [tha,thb]));
 
 (*compose Q and [Q1,Q2,...,Qk]==>R to [Q2,...,Qk]==>R getting unique result*)
 fun tha COMP thb =
-    case compose(tha,1,thb) of
-        [th] => th
-      | _ =>   raise THM("COMP", 1, [tha,thb]);
+  (case compose(tha, 1, thb) of
+    [th] => th
+  | _ => raise THM ("COMP", 1, [tha, thb]));
 
 
 (** theorem equality **)
@@ -866,25 +880,6 @@
     | _ => error "More names than abstractions in theorem"
   end;
 
-
-
-(** multi_resolve **)
-
-local
-
-fun res th i rule =
-  Thm.biresolution false [(false, th)] i rule handle THM _ => Seq.empty;
-
-fun multi_res _ [] rule = Seq.single rule
-  | multi_res i (th :: ths) rule = Seq.maps (res th i) (multi_res (i + 1) ths rule);
-
-in
-
-val multi_resolve = multi_res 1;
-fun multi_resolves facts rules = Seq.maps (multi_resolve facts) (Seq.of_list rules);
-
-end;
-
 end;
 
 structure Basic_Drule: BASIC_DRULE = Drule;
--- a/src/Pure/global_theory.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/global_theory.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -49,7 +49,7 @@
 
 (** theory data **)
 
-type proofs = thm list * unit lazy;
+type proofs = thm list * unit lazy;  (*accumulated thms, persistent join*)
 
 val empty_proofs: proofs = ([], Lazy.value ());
 
@@ -61,7 +61,7 @@
 
 structure Data = Theory_Data
 (
-  type T = Facts.T * (proofs * proofs);
+  type T = Facts.T * (proofs * proofs);  (*facts, recent proofs, all proofs of theory node*)
   val empty = (Facts.empty, (empty_proofs, empty_proofs));
   fun extend (facts, _) = (facts, snd empty);
   fun merge ((facts1, _), (facts2, _)) = (Facts.merge (facts1, facts2), snd empty);
--- a/src/Pure/goal.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/goal.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -125,7 +125,7 @@
       val _ = forked 1;
       val future =
         (singleton o Future.forks)
-          {name = name, group = NONE, deps = [], pri = 0, interrupts = false}
+          {name = name, group = NONE, deps = [], pri = ~1, interrupts = false}
           (fn () =>
             Exn.release
               (Exn.capture (Future.status o Future.interruptible_task) e before forked ~1));
--- a/src/Pure/simplifier.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Pure/simplifier.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -240,7 +240,7 @@
     fun simp_loop_tac i =
       Raw_Simplifier.generic_rewrite_goal_tac mode (solve_all_tac unsafe_solvers) ss i THEN
       (solve_tac i ORELSE TRY ((loop_tac THEN_ALL_NEW simp_loop_tac) i));
-  in simp_loop_tac end;
+  in SELECT_GOAL (simp_loop_tac 1) end;
 
 local
 
--- a/src/Tools/Code/code_thingol.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/Code/code_thingol.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -781,10 +781,14 @@
     fun arg_types num_args ty = fst (chop num_args (binder_types ty));
     val tys = arg_types num_args (snd c_ty);
     val ty = nth tys t_pos;
-    fun mk_constr c t = let val n = Code.args_number thy c
-      in ((c, arg_types n (fastype_of (untag_term t)) ---> ty), n) end;
+    fun mk_constr NONE t = NONE
+      | mk_constr (SOME c) t = let val n = Code.args_number thy c
+        in SOME ((c, arg_types n (fastype_of (untag_term t)) ---> ty), n) end;
+
     val constrs = if null case_pats then []
       else map2 mk_constr case_pats (nth_drop t_pos ts);
+    val constrs' = map_filter I constrs
+
     fun casify naming constrs ty ts =
       let
         val undefineds = map_filter (lookup_const naming) (Code.undefineds thy);
@@ -817,12 +821,12 @@
           then mk_clause (fn ([t], body) => (t, body)) [ty] (the_single ts_clause)
           else maps (fn ((constr as IConst (_, ((_, (tys, _)), _)), n), t) =>
             mk_clause (fn (ts, body) => (constr `$$ ts, body)) (take n tys) t)
-              (constrs ~~ ts_clause);
+              (constrs ~~ (map_filter (fn (NONE, _) => NONE | (SOME _, t) => SOME t) (case_pats ~~ ts_clause)));
       in ((t, ty), clauses) end;
   in
     translate_const thy algbr eqngr permissive some_thm (c_ty, NONE)
     ##>> fold_map (fn (constr, n) => translate_const thy algbr eqngr permissive some_thm (constr, NONE)
-      #>> rpair n) constrs
+      #>> rpair n) constrs'
     ##>> translate_typ thy algbr eqngr permissive ty
     ##>> fold_map (translate_term thy algbr eqngr permissive some_thm o rpair NONE) ts
     #-> (fn (((t, constrs), ty), ts) =>
--- a/src/Tools/JVM/java_ext_dirs	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/JVM/java_ext_dirs	Mon Apr 16 19:01:57 2012 +0200
@@ -19,5 +19,5 @@
 
 isabelle_jdk java \
   -classpath "$(jvmpath "$ISABELLE_HOME/src/Tools/JVM/java_ext_dirs.jar")" \
-  isabelle.Java_Ext_Dirs "$(jvmpath "$ISABELLE_HOME/lib/classes/ext")"
+  isabelle.Java_Ext_Dirs "$(jvmpath "$ISABELLE_HOME/lib/classes/ext")" 2>/dev/null
 
--- a/src/Tools/jEdit/README.html	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/jEdit/README.html	Mon Apr 16 19:01:57 2012 +0200
@@ -29,8 +29,8 @@
 </p>
 
 <p>
-  <em>Research and implementation of concepts around PIDE has been
-  kindly supported in the past 3 years by BMBF (http://www.bmbf.de),
+  <em>Research and implementation of concepts around PIDE has started
+  around 2008 and was kindly supported by BMBF (http://www.bmbf.de),
   Université Paris-Sud (http://www.u-psud.fr), and Digiteo
   (http://www.digiteo.fr).</em>
 </p>
@@ -156,7 +156,7 @@
 </ul>
 
 
-<h2>Limitations and workrounds (March 2012)</h2>
+<h2>Limitations and workrounds (May 2012)</h2>
 
 <ul>
   <li>No way to start/stop prover or switch to a different logic.<br/>
@@ -215,19 +215,6 @@
 </ul>
 
 
-<h2>Known problems with OpenJDK 1.6.x</h2>
-
-<ul>
-
-<li>The 2D rendering engine of OpenJDK 1.6.x distorts the appearance
-  of the jEdit text area.  Always use official JRE 1.6.x from Oracle
-  or Apple.</li>
-
-<li>jEdit 4.4.x on OpenJDK 1.6.x is generally not supported.</li>
-
-</ul>
-
-
 <h2>Licenses and home sites of contributing systems</h2>
 
 <ul>
--- a/src/Tools/jEdit/README_BUILD	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/jEdit/README_BUILD	Mon Apr 16 19:01:57 2012 +0200
@@ -1,24 +1,24 @@
 Requirements for instantaneous build from sources
 =================================================
 
-* Official Java JDK 1.6 from Sun/Oracle/Apple
+* Official Java JDK 1.6/1.7 from Sun/Oracle/Apple
   http://www.oracle.com/technetwork/java/javase/downloads/index.html
 
-  (or JDK/OpenJDK 1.7, but not OpenJDK 1.6)
+  (or OpenJDK 1.7, but not OpenJDK 1.6)
 
-* Scala 2.8.2.final or 2.9.1-1
+* Scala 2.9.2 (or 2.8.2.final)
   http://www.scala-lang.org
 
   (experimental support for Scala 2.10.x milestones)
 
 * Auxiliary jedit_build component
-  http://www4.in.tum.de/~wenzelm/test/jedit_build-20120327.tar.gz
+  http://www4.in.tum.de/~wenzelm/test/jedit_build-20120414.tar.gz
 
 
 Important settings within Isabelle environment
 ==============================================
 
-* init_component ".../jedit_build-20120327"
+* init_component ".../jedit_build-20120414"
 * ISABELLE_JDK_HOME
 * SCALA_HOME
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/patches/jedit-4.5.1/caret	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,12 @@
+diff -ru 4.5.1/jEdit/org/gjt/sp/jedit/textarea/TextArea.java 4.5.1/jEdit-patched/org/gjt/sp/jedit/textarea/TextArea.java
+--- 4.5.1/jEdit/org/gjt/sp/jedit/textarea/TextArea.java	2012-03-25 18:51:47.000000000 +0200
++++ 4.5.1/jEdit-patched/org/gjt/sp/jedit/textarea/TextArea.java	2012-04-14 18:37:11.000000000 +0200
+@@ -4907,7 +4907,7 @@
+ 	/**
+ 	 * Returns true if the caret is visible, false otherwise.
+ 	 */
+-	final boolean isCaretVisible()
++	public final boolean isCaretVisible()
+ 	{
+ 		return blink && hasFocus();
+ 	} //}}}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/patches/jedit-4.5.1/macos	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,15 @@
+diff -ru 4.5.1/jEdit/org/gjt/sp/jedit/OperatingSystem.java 4.5.1/jEdit-patched/org/gjt/sp/jedit/OperatingSystem.java
+--- 4.5.1/jEdit/org/gjt/sp/jedit/OperatingSystem.java	2012-03-25 18:52:03.000000000 +0200
++++ 4.5.1/jEdit-patched/org/gjt/sp/jedit/OperatingSystem.java	2012-04-13 19:28:37.000000000 +0200
+@@ -317,6 +317,10 @@
+ 			{
+ 				os = OS2;
+ 			}
++			else if(osName.contains("Mac OS X"))
++			{
++				os = MAC_OS_X;
++			}
+ 			else if(osName.contains("VMS"))
+ 			{
+ 				os = VMS;
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/patches/jedit-4.5.1/memory	Mon Apr 16 19:01:57 2012 +0200
@@ -0,0 +1,12 @@
+diff -ru 4.5.1/jEdit/org/gjt/sp/jedit/gui/statusbar/MemoryStatusWidgetFactory.java 4.5.1/jEdit-patched/org/gjt/sp/jedit/gui/statusbar/MemoryStatusWidgetFactory.java
+--- 4.5.1/jEdit/org/gjt/sp/jedit/gui/statusbar/MemoryStatusWidgetFactory.java	2012-03-25 18:51:51.000000000 +0200
++++ 4.5.1/jEdit-patched/org/gjt/sp/jedit/gui/statusbar/MemoryStatusWidgetFactory.java	2012-04-14 17:47:32.000000000 +0200
+@@ -222,7 +222,7 @@
+ 		} //}}}
+ 
+ 		//{{{ Private members
+-		private static final String memoryTestStr = "999/999Mb";
++		private static final String memoryTestStr = "9999/9999Mb";
+ 
+ 		private final LineMetrics lm;
+ 		private final Color progressForeground;
--- a/src/Tools/jEdit/src/document_model.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/jEdit/src/document_model.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -95,7 +95,6 @@
   private object pending_edits  // owned by Swing thread
   {
     private val pending = new mutable.ListBuffer[Text.Edit]
-    private var pending_perspective = false
     private var last_perspective: Text.Perspective = Text.Perspective.empty
 
     def snapshot(): List[Text.Edit] = pending.toList
@@ -104,16 +103,12 @@
     {
       Swing_Thread.require()
 
-      val new_perspective =
-        if (pending_perspective) { pending_perspective = false; perspective() }
-        else last_perspective
-
-      snapshot() match {
-        case Nil if last_perspective == new_perspective =>
-        case edits =>
-          pending.clear
-          last_perspective = new_perspective
-          session.edit_node(name, node_header(), new_perspective, edits)
+      val edits = snapshot()
+      val new_perspective = perspective()
+      if (!edits.isEmpty || last_perspective != new_perspective) {
+        pending.clear
+        last_perspective = new_perspective
+        session.edit_node(name, node_header(), new_perspective, edits)
       }
     }
 
@@ -129,7 +124,6 @@
 
     def update_perspective()
     {
-      pending_perspective = true
       delay_flush(true)
     }
 
--- a/src/Tools/jEdit/src/document_view.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/jEdit/src/document_view.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -27,8 +27,7 @@
 import org.gjt.sp.jedit.{jEdit, OperatingSystem, Debug}
 import org.gjt.sp.jedit.gui.RolloverButton
 import org.gjt.sp.jedit.options.GutterOptionPane
-import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea, TextAreaExtension, TextAreaPainter,
-  ScrollListener}
+import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea, TextAreaExtension, TextAreaPainter}
 import org.gjt.sp.jedit.syntax.{SyntaxStyle}
 
 
--- a/src/Tools/jEdit/src/modes/isabelle.xml	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/jEdit/src/modes/isabelle.xml	Mon Apr 16 19:01:57 2012 +0200
@@ -33,6 +33,7 @@
       <KEYWORD2>header</KEYWORD2>
       <KEYWORD1>theory</KEYWORD1>
       <KEYWORD2>imports</KEYWORD2>
+      <KEYWORD2>keywords</KEYWORD2>
       <KEYWORD2>uses</KEYWORD2>
       <KEYWORD2>begin</KEYWORD2>
       <KEYWORD2>end</KEYWORD2>
--- a/src/Tools/jEdit/src/text_area_painter.scala	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/jEdit/src/text_area_painter.scala	Mon Apr 16 19:01:57 2012 +0200
@@ -220,7 +220,7 @@
           else chunk_font.getStringBounds(s, font_context).getWidth.toFloat
 
         val caret_range =
-          if (text_area.hasFocus) doc_view.caret_range()
+          if (text_area.isCaretVisible) doc_view.caret_range()
           else Text.Range(-1)
 
         val markup =
@@ -373,7 +373,7 @@
       screen_line: Int, physical_line: Int, start: Int, end: Int, y: Int)
     {
       robust_snapshot { _ =>
-        if (text_area.hasFocus) {
+        if (text_area.isCaretVisible) {
           val caret = text_area.getCaretPosition
           if (start <= caret && caret == end - 1) {
             val painter = text_area.getPainter
--- a/src/Tools/quickcheck.ML	Wed Apr 04 09:59:49 2012 +0200
+++ b/src/Tools/quickcheck.ML	Mon Apr 16 19:01:57 2012 +0200
@@ -30,6 +30,7 @@
   val finite_types : bool Config.T
   val finite_type_size : int Config.T
   val tag : string Config.T
+  val locale : string Config.T
   val set_active_testers: string list -> Context.generic -> Context.generic
   datatype expectation = No_Expectation | No_Counterexample | Counterexample;
   datatype test_params = Test_Params of {default_type: typ list, expect : expectation};
@@ -176,6 +177,7 @@
 val depth = Attrib.setup_config_int @{binding quickcheck_depth} (K 10)
 
 val no_assms = Attrib.setup_config_bool @{binding quickcheck_no_assms} (K false)
+val locale = Attrib.setup_config_string @{binding quickcheck_locale} (K "interpret expand")
 val report = Attrib.setup_config_bool @{binding quickcheck_report} (K true)
 val timing = Attrib.setup_config_bool @{binding quickcheck_timing} (K false)
 val timeout = Attrib.setup_config_real @{binding quickcheck_timeout} (K 30.0)
@@ -336,6 +338,15 @@
     all t
   end
 
+fun locale_config_of s =
+  let
+    val cs = space_explode " " s
+  in
+    if forall (fn c => c = "expand" orelse c = "interpret") cs then cs
+    else (warning ("Invalid quickcheck_locale setting: falling back to the default setting.");
+      ["interpret", "expand"])
+  end
+
 fun test_goal (time_limit, is_interactive) (insts, eval_terms) i state =
   let
     val lthy = Proof.context_of state;
@@ -356,12 +367,14 @@
     fun axioms_of locale = case fst (Locale.specification_of thy locale) of
         NONE => []
       | SOME t => the_default [] (all_axioms_of lthy t)
-    val goals = case some_locale
+   val config = locale_config_of (Config.get lthy locale) 
+   val goals = case some_locale
      of NONE => [(proto_goal, eval_terms)]
-      | SOME locale =>
-          (Logic.list_implies (axioms_of locale, proto_goal), eval_terms) ::
-          map (fn (_, phi) => (Morphism.term phi proto_goal, map (Morphism.term phi) eval_terms))
-          (Locale.registrations_of (Context.Theory thy) (*FIXME*) locale);
+      | SOME locale => fold (fn c =>
+          if c = "expand" then cons (Logic.list_implies (axioms_of locale, proto_goal), eval_terms) else
+          if c = "interpret" then
+            append (map (fn (_, phi) => (Morphism.term phi proto_goal, map (Morphism.term phi) eval_terms))
+          (Locale.registrations_of (Context.Theory thy) (*FIXME*) locale)) else I) config [];
     val _ = verbose_message lthy (Pretty.string_of
       (Pretty.big_list ("Checking goals: ") (map (Syntax.pretty_term lthy o fst) goals)))
   in