Merge.
authorblanchet
Wed, 04 Mar 2009 10:45:52 +0100
changeset 30240 5b25fee0362c
parent 30239 179ff9cb160b
child 30241 3a1aef73b2b2
Merge.
doc-src/IsarImplementation/Thy/ML.thy
doc-src/IsarImplementation/Thy/ROOT.ML
doc-src/IsarImplementation/Thy/document/ML.tex
doc-src/IsarImplementation/Thy/document/session.tex
doc-src/IsarImplementation/implementation.tex
doc-src/IsarImplementation/style.sty
doc-src/IsarRef/IsaMakefile
doc-src/IsarRef/Makefile
doc-src/IsarRef/Thy/Document_Preparation.thy
doc-src/IsarRef/Thy/Generic.thy
doc-src/IsarRef/Thy/HOLCF_Specific.thy
doc-src/IsarRef/Thy/HOL_Specific.thy
doc-src/IsarRef/Thy/Inner_Syntax.thy
doc-src/IsarRef/Thy/Introduction.thy
doc-src/IsarRef/Thy/ML_Tactic.thy
doc-src/IsarRef/Thy/Misc.thy
doc-src/IsarRef/Thy/Outer_Syntax.thy
doc-src/IsarRef/Thy/Proof.thy
doc-src/IsarRef/Thy/Quick_Reference.thy
doc-src/IsarRef/Thy/ROOT-HOLCF.ML
doc-src/IsarRef/Thy/ROOT-ZF.ML
doc-src/IsarRef/Thy/ROOT.ML
doc-src/IsarRef/Thy/Spec.thy
doc-src/IsarRef/Thy/Symbols.thy
doc-src/IsarRef/Thy/ZF_Specific.thy
doc-src/IsarRef/Thy/document/Document_Preparation.tex
doc-src/IsarRef/Thy/document/Generic.tex
doc-src/IsarRef/Thy/document/HOLCF_Specific.tex
doc-src/IsarRef/Thy/document/HOL_Specific.tex
doc-src/IsarRef/Thy/document/Inner_Syntax.tex
doc-src/IsarRef/Thy/document/Introduction.tex
doc-src/IsarRef/Thy/document/ML_Tactic.tex
doc-src/IsarRef/Thy/document/Misc.tex
doc-src/IsarRef/Thy/document/Outer_Syntax.tex
doc-src/IsarRef/Thy/document/Proof.tex
doc-src/IsarRef/Thy/document/Quick_Reference.tex
doc-src/IsarRef/Thy/document/Spec.tex
doc-src/IsarRef/Thy/document/Symbols.tex
doc-src/IsarRef/Thy/document/ZF_Specific.tex
doc-src/IsarRef/isar-ref.tex
doc-src/IsarRef/style.sty
doc-src/Ref/Makefile
doc-src/Ref/classical.tex
doc-src/Ref/defining.tex
doc-src/Ref/introduction.tex
doc-src/Ref/ref.tex
doc-src/Ref/simplifier.tex
doc-src/Ref/substitution.tex
doc-src/Ref/syntax.tex
doc-src/Ref/tactic.tex
doc-src/Ref/tctical.tex
doc-src/Ref/theories.tex
doc-src/Ref/thm.tex
doc-src/System/Thy/Basics.thy
doc-src/System/Thy/Presentation.thy
doc-src/System/Thy/document/Basics.tex
doc-src/System/Thy/document/Presentation.tex
doc-src/System/system.tex
doc-src/TutorialI/Types/Numbers.thy
doc-src/TutorialI/Types/document/Numbers.tex
doc-src/TutorialI/Types/numerics.tex
doc-src/ZF/FOL.tex
doc-src/antiquote_setup.ML
doc-src/isar.sty
doc-src/manual.bib
doc-src/more_antiquote.ML
doc/Contents
etc/settings
lib/Tools/codegen
src/FOL/IFOL.thy
src/FOL/IsaMakefile
src/FOL/ex/ROOT.ML
src/FOLP/simp.ML
src/HOL/Algebra/Coset.thy
src/HOL/Algebra/Exponent.thy
src/HOL/Algebra/Sylow.thy
src/HOL/Algebra/poly/UnivPoly2.thy
src/HOL/Arith_Tools.thy
src/HOL/Complex_Main.thy
src/HOL/Decision_Procs/Approximation.thy
src/HOL/Decision_Procs/Cooper.thy
src/HOL/Decision_Procs/Ferrack.thy
src/HOL/Decision_Procs/MIR.thy
src/HOL/Decision_Procs/cooper_tac.ML
src/HOL/Decision_Procs/ferrack_tac.ML
src/HOL/Decision_Procs/mir_tac.ML
src/HOL/Deriv.thy
src/HOL/Divides.thy
src/HOL/Equiv_Relations.thy
src/HOL/Extraction/Euclid.thy
src/HOL/Fact.thy
src/HOL/GCD.thy
src/HOL/Groebner_Basis.thy
src/HOL/HOL.thy
src/HOL/Hoare/Arith2.thy
src/HOL/Import/lazy_seq.ML
src/HOL/Import/proof_kernel.ML
src/HOL/Induct/Common_Patterns.thy
src/HOL/Induct/LList.thy
src/HOL/Induct/QuoDataType.thy
src/HOL/Induct/QuoNestedDataType.thy
src/HOL/Induct/SList.thy
src/HOL/Int.thy
src/HOL/IntDiv.thy
src/HOL/Integration.thy
src/HOL/IsaMakefile
src/HOL/Library/Abstract_Rat.thy
src/HOL/Library/Boolean_Algebra.thy
src/HOL/Library/Char_nat.thy
src/HOL/Library/Code_Char.thy
src/HOL/Library/Coinductive_List.thy
src/HOL/Library/Determinants.thy
src/HOL/Library/Enum.thy
src/HOL/Library/Euclidean_Space.thy
src/HOL/Library/Float.thy
src/HOL/Library/Fundamental_Theorem_Algebra.thy
src/HOL/Library/Library.thy
src/HOL/Library/Numeral_Type.thy
src/HOL/Library/Order_Relation.thy
src/HOL/Library/Permutations.thy
src/HOL/Library/Pocklington.thy
src/HOL/Library/Primes.thy
src/HOL/Library/Word.thy
src/HOL/Library/Zorn.thy
src/HOL/Library/reflection.ML
src/HOL/List.thy
src/HOL/MacLaurin.thy
src/HOL/MetisExamples/Tarski.thy
src/HOL/NSA/NSA.thy
src/HOL/NSA/StarDef.thy
src/HOL/Nat.thy
src/HOL/NatBin.thy
src/HOL/Nominal/Examples/Fsub.thy
src/HOL/Nominal/Nominal.thy
src/HOL/Nominal/nominal_atoms.ML
src/HOL/Nominal/nominal_induct.ML
src/HOL/Nominal/nominal_inductive.ML
src/HOL/Nominal/nominal_inductive2.ML
src/HOL/Nominal/nominal_package.ML
src/HOL/Nominal/nominal_primrec.ML
src/HOL/Nominal/nominal_thmdecls.ML
src/HOL/NumberTheory/Chinese.thy
src/HOL/NumberTheory/Euler.thy
src/HOL/NumberTheory/EulerFermat.thy
src/HOL/NumberTheory/Gauss.thy
src/HOL/NumberTheory/Int2.thy
src/HOL/NumberTheory/IntPrimes.thy
src/HOL/NumberTheory/Quadratic_Reciprocity.thy
src/HOL/NumberTheory/Residues.thy
src/HOL/NumberTheory/WilsonBij.thy
src/HOL/NumberTheory/WilsonRuss.thy
src/HOL/Orderings.thy
src/HOL/Parity.thy
src/HOL/Plain.thy
src/HOL/Power.thy
src/HOL/Presburger.thy
src/HOL/RComplete.thy
src/HOL/ROOT.ML
src/HOL/Rational.thy
src/HOL/RealDef.thy
src/HOL/RealPow.thy
src/HOL/RealVector.thy
src/HOL/Relation.thy
src/HOL/Relation_Power.thy
src/HOL/Ring_and_Field.thy
src/HOL/SEQ.thy
src/HOL/Series.thy
src/HOL/SetInterval.thy
src/HOL/Tools/Qelim/langford.ML
src/HOL/Tools/Qelim/presburger.ML
src/HOL/Tools/TFL/post.ML
src/HOL/Tools/TFL/rules.ML
src/HOL/Tools/TFL/tfl.ML
src/HOL/Tools/atp_wrapper.ML
src/HOL/Tools/datatype_abs_proofs.ML
src/HOL/Tools/datatype_aux.ML
src/HOL/Tools/datatype_codegen.ML
src/HOL/Tools/datatype_package.ML
src/HOL/Tools/datatype_prop.ML
src/HOL/Tools/datatype_realizer.ML
src/HOL/Tools/datatype_rep_proofs.ML
src/HOL/Tools/function_package/fundef_common.ML
src/HOL/Tools/function_package/fundef_package.ML
src/HOL/Tools/function_package/scnp_solve.ML
src/HOL/Tools/function_package/size.ML
src/HOL/Tools/inductive_codegen.ML
src/HOL/Tools/inductive_package.ML
src/HOL/Tools/inductive_realizer.ML
src/HOL/Tools/inductive_set_package.ML
src/HOL/Tools/int_factor_simprocs.ML
src/HOL/Tools/lin_arith.ML
src/HOL/Tools/meson.ML
src/HOL/Tools/metis_tools.ML
src/HOL/Tools/old_primrec_package.ML
src/HOL/Tools/primrec_package.ML
src/HOL/Tools/recdef_package.ML
src/HOL/Tools/recfun_codegen.ML
src/HOL/Tools/record_package.ML
src/HOL/Tools/refute.ML
src/HOL/Tools/res_atp.ML
src/HOL/Tools/res_axioms.ML
src/HOL/Tools/res_clause.ML
src/HOL/Tools/res_hol_clause.ML
src/HOL/Tools/res_reconstruct.ML
src/HOL/Tools/sat_solver.ML
src/HOL/Tools/simpdata.ML
src/HOL/Tools/specification_package.ML
src/HOL/Transcendental.thy
src/HOL/Transitive_Closure.thy
src/HOL/UNITY/ListOrder.thy
src/HOL/UNITY/ProgressSets.thy
src/HOL/UNITY/UNITY.thy
src/HOL/Word/BinGeneral.thy
src/HOL/Word/Num_Lemmas.thy
src/HOL/Word/WordGenLib.thy
src/HOL/Word/WordShift.thy
src/HOL/ZF/Games.thy
src/HOL/ex/ApproximationEx.thy
src/HOL/ex/Eval_Examples.thy
src/HOL/ex/Numeral.thy
src/HOL/ex/ROOT.ML
src/HOL/ex/Tarski.thy
src/HOL/ex/ThreeDivides.thy
src/HOLCF/ConvexPD.thy
src/HOLCF/Fixrec.thy
src/HOLCF/IsaMakefile
src/HOLCF/LowerPD.thy
src/HOLCF/Tools/domain/domain_axioms.ML
src/HOLCF/Tools/domain/domain_library.ML
src/HOLCF/Tools/domain/domain_syntax.ML
src/HOLCF/Tools/fixrec_package.ML
src/HOLCF/UpperPD.thy
src/HOLCF/ex/Fixrec_ex.thy
src/HOLCF/ex/ROOT.ML
src/Provers/README
src/Provers/blast.ML
src/Provers/clasimp.ML
src/Provers/classical.ML
src/Provers/order.ML
src/Provers/trancl.ML
src/Provers/typedsimp.ML
src/Pure/General/binding.ML
src/Pure/General/markup.ML
src/Pure/General/name_space.ML
src/Pure/General/output.ML
src/Pure/General/swing.scala
src/Pure/IsaMakefile
src/Pure/Isar/ROOT.ML
src/Pure/Isar/args.ML
src/Pure/Isar/attrib.ML
src/Pure/Isar/calculation.ML
src/Pure/Isar/class.ML
src/Pure/Isar/class_target.ML
src/Pure/Isar/code.ML
src/Pure/Isar/code_unit.ML
src/Pure/Isar/constdefs.ML
src/Pure/Isar/element.ML
src/Pure/Isar/expression.ML
src/Pure/Isar/isar_cmd.ML
src/Pure/Isar/isar_syn.ML
src/Pure/Isar/local_defs.ML
src/Pure/Isar/locale.ML
src/Pure/Isar/method.ML
src/Pure/Isar/obtain.ML
src/Pure/Isar/outer_parse.ML
src/Pure/Isar/proof.ML
src/Pure/Isar/proof_context.ML
src/Pure/Isar/specification.ML
src/Pure/Isar/theory_target.ML
src/Pure/ML-Systems/mosml.ML
src/Pure/ML-Systems/polyml-experimental.ML
src/Pure/ML-Systems/polyml_common.ML
src/Pure/ML-Systems/smlnj.ML
src/Pure/ML/ml_antiquote.ML
src/Pure/ML/ml_syntax.ML
src/Pure/Proof/proofchecker.ML
src/Pure/Proof/reconstruct.ML
src/Pure/ProofGeneral/README
src/Pure/README
src/Pure/ROOT.ML
src/Pure/Syntax/parser.ML
src/Pure/Syntax/syn_ext.ML
src/Pure/Syntax/syn_trans.ML
src/Pure/Syntax/syntax.ML
src/Pure/Thy/thy_output.ML
src/Pure/Tools/ROOT.ML
src/Pure/axclass.ML
src/Pure/conv.ML
src/Pure/display.ML
src/Pure/envir.ML
src/Pure/library.ML
src/Pure/mk
src/Pure/more_thm.ML
src/Pure/proofterm.ML
src/Pure/pure_setup.ML
src/Pure/pure_thy.ML
src/Pure/sign.ML
src/Pure/sorts.ML
src/Pure/tctical.ML
src/Pure/term.ML
src/Pure/theory.ML
src/Pure/type_infer.ML
src/Tools/Compute_Oracle/Compute_Oracle.thy
src/Tools/Compute_Oracle/am_compiler.ML
src/Tools/Compute_Oracle/am_ghc.ML
src/Tools/Compute_Oracle/am_interpreter.ML
src/Tools/Compute_Oracle/am_sml.ML
src/Tools/Compute_Oracle/report.ML
src/Tools/IsaPlanner/README
src/Tools/IsaPlanner/isand.ML
src/Tools/IsaPlanner/rw_inst.ML
src/Tools/IsaPlanner/rw_tools.ML
src/Tools/IsaPlanner/zipper.ML
src/Tools/Metis/make-metis
src/Tools/Metis/metis.ML
src/Tools/README
src/Tools/atomize_elim.ML
src/Tools/auto_solve.ML
src/Tools/code/code_funcgr.ML
src/Tools/code/code_haskell.ML
src/Tools/code/code_name.ML
src/Tools/code/code_printer.ML
src/Tools/code/code_target.ML
src/Tools/code/code_thingol.ML
src/Tools/float.ML
src/Tools/induct.ML
src/Tools/induct_tacs.ML
src/Tools/nbe.ML
src/Tools/random_word.ML
src/Tools/rat.ML
src/ZF/Tools/datatype_package.ML
src/ZF/Tools/inductive_package.ML
src/ZF/Tools/primrec_package.ML
--- a/doc-src/IsarImplementation/Thy/ML.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/ML.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,6 @@
-(* $Id$ *)
-
-theory "ML" imports base begin
+theory "ML"
+imports Base
+begin
 
 chapter {* Advanced ML programming *}
 
--- a/doc-src/IsarImplementation/Thy/ROOT.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/ROOT.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,11 +1,11 @@
-
-(* $Id$ *)
-
-use_thy "prelim";
-use_thy "logic";
-use_thy "tactic";
-use_thy "proof";
-use_thy "isar";
-use_thy "locale";
-use_thy "integration";
-use_thy "ML";
+use_thys [
+  "Integration",
+  "Isar",
+  "Local_Theory",
+  "Logic",
+  "ML",
+  "Prelim",
+  "Proof",
+  "Syntax",
+  "Tactic"
+];
--- a/doc-src/IsarImplementation/Thy/document/ML.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/document/ML.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,14 +3,14 @@
 \def\isabellecontext{ML}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
 \isatagtheory
 \isacommand{theory}\isamarkupfalse%
-\ {\isachardoublequoteopen}ML{\isachardoublequoteclose}\ \isakeyword{imports}\ base\ \isakeyword{begin}%
+\ {\isachardoublequoteopen}ML{\isachardoublequoteclose}\isanewline
+\isakeyword{imports}\ Base\isanewline
+\isakeyword{begin}%
 \endisatagtheory
 {\isafoldtheory}%
 %
@@ -275,9 +275,9 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{NAMED\_CRITICAL}\verb|NAMED_CRITICAL: string -> (unit -> 'a) -> 'a| \\
-  \indexml{CRITICAL}\verb|CRITICAL: (unit -> 'a) -> 'a| \\
-  \indexml{setmp}\verb|setmp: 'a ref -> 'a -> ('b -> 'c) -> 'b -> 'c| \\
+  \indexdef{}{ML}{NAMED\_CRITICAL}\verb|NAMED_CRITICAL: string -> (unit -> 'a) -> 'a| \\
+  \indexdef{}{ML}{CRITICAL}\verb|CRITICAL: (unit -> 'a) -> 'a| \\
+  \indexdef{}{ML}{setmp}\verb|setmp: 'a ref -> 'a -> ('b -> 'c) -> 'b -> 'c| \\
   \end{mldecls}
 
   \begin{description}
@@ -331,7 +331,7 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{op |$>$ }\verb|op |\verb,|,\verb|> : 'a * ('a -> 'b) -> 'b| \\
+  \indexdef{}{ML}{op $\mid$$>$ }\verb|op |\verb,|,\verb|> : 'a * ('a -> 'b) -> 'b| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -410,10 +410,10 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{op |-$>$ }\verb|op |\verb,|,\verb|-> : ('c * 'a) * ('c -> 'a -> 'b) -> 'b| \\
-  \indexml{op |$>$$>$ }\verb|op |\verb,|,\verb|>> : ('a * 'c) * ('a -> 'b) -> 'b * 'c| \\
-  \indexml{op ||$>$ }\verb|op |\verb,|,\verb||\verb,|,\verb|> : ('c * 'a) * ('a -> 'b) -> 'c * 'b| \\
-  \indexml{op ||$>$$>$ }\verb|op |\verb,|,\verb||\verb,|,\verb|>> : ('c * 'a) * ('a -> 'd * 'b) -> ('c * 'd) * 'b| \\
+  \indexdef{}{ML}{op $\mid$-$>$ }\verb|op |\verb,|,\verb|-> : ('c * 'a) * ('c -> 'a -> 'b) -> 'b| \\
+  \indexdef{}{ML}{op $\mid$$>$$>$ }\verb|op |\verb,|,\verb|>> : ('a * 'c) * ('a -> 'b) -> 'b * 'c| \\
+  \indexdef{}{ML}{op $\mid$$\mid$$>$ }\verb|op |\verb,|,\verb||\verb,|,\verb|> : ('c * 'a) * ('a -> 'b) -> 'c * 'b| \\
+  \indexdef{}{ML}{op $\mid$$\mid$$>$$>$ }\verb|op |\verb,|,\verb||\verb,|,\verb|>> : ('c * 'a) * ('a -> 'd * 'b) -> ('c * 'd) * 'b| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -483,8 +483,8 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{fold}\verb|fold: ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b| \\
-  \indexml{fold\_map}\verb|fold_map: ('a -> 'b -> 'c * 'b) -> 'a list -> 'b -> 'c list * 'b| \\
+  \indexdef{}{ML}{fold}\verb|fold: ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b| \\
+  \indexdef{}{ML}{fold\_map}\verb|fold_map: ('a -> 'b -> 'c * 'b) -> 'a list -> 'b -> 'c list * 'b| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -545,11 +545,11 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{op \#$>$ }\verb|op #> : ('a -> 'b) * ('b -> 'c) -> 'a -> 'c| \\
-  \indexml{op \#-$>$ }\verb|op #-> : ('a -> 'c * 'b) * ('c -> 'b -> 'd) -> 'a -> 'd| \\
-  \indexml{op \#$>$$>$ }\verb|op #>> : ('a -> 'c * 'b) * ('c -> 'd) -> 'a -> 'd * 'b| \\
-  \indexml{op \#\#$>$ }\verb|op ##> : ('a -> 'c * 'b) * ('b -> 'd) -> 'a -> 'c * 'd| \\
-  \indexml{op \#\#$>$$>$ }\verb|op ##>> : ('a -> 'c * 'b) * ('b -> 'e * 'd) -> 'a -> ('c * 'e) * 'd| \\
+  \indexdef{}{ML}{op \#$>$ }\verb|op #> : ('a -> 'b) * ('b -> 'c) -> 'a -> 'c| \\
+  \indexdef{}{ML}{op \#-$>$ }\verb|op #-> : ('a -> 'c * 'b) * ('c -> 'b -> 'd) -> 'a -> 'd| \\
+  \indexdef{}{ML}{op \#$>$$>$ }\verb|op #>> : ('a -> 'c * 'b) * ('c -> 'd) -> 'a -> 'd * 'b| \\
+  \indexdef{}{ML}{op \#\#$>$ }\verb|op ##> : ('a -> 'c * 'b) * ('b -> 'd) -> 'a -> 'c * 'd| \\
+  \indexdef{}{ML}{op \#\#$>$$>$ }\verb|op ##>> : ('a -> 'c * 'b) * ('b -> 'e * 'd) -> 'a -> ('c * 'e) * 'd| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -576,8 +576,8 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{op ` }\verb|op ` : ('b -> 'a) -> 'b -> 'a * 'b| \\
-  \indexml{tap}\verb|tap: ('b -> 'a) -> 'b -> 'b| \\
+  \indexdef{}{ML}{op ` }\verb|op ` : ('b -> 'a) -> 'b -> 'a * 'b| \\
+  \indexdef{}{ML}{tap}\verb|tap: ('b -> 'a) -> 'b -> 'b| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -619,14 +619,14 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{is\_some}\verb|is_some: 'a option -> bool| \\
-  \indexml{is\_none}\verb|is_none: 'a option -> bool| \\
-  \indexml{the}\verb|the: 'a option -> 'a| \\
-  \indexml{these}\verb|these: 'a list option -> 'a list| \\
-  \indexml{the\_list}\verb|the_list: 'a option -> 'a list| \\
-  \indexml{the\_default}\verb|the_default: 'a -> 'a option -> 'a| \\
-  \indexml{try}\verb|try: ('a -> 'b) -> 'a -> 'b option| \\
-  \indexml{can}\verb|can: ('a -> 'b) -> 'a -> bool| \\
+  \indexdef{}{ML}{is\_some}\verb|is_some: 'a option -> bool| \\
+  \indexdef{}{ML}{is\_none}\verb|is_none: 'a option -> bool| \\
+  \indexdef{}{ML}{the}\verb|the: 'a option -> 'a| \\
+  \indexdef{}{ML}{these}\verb|these: 'a list option -> 'a list| \\
+  \indexdef{}{ML}{the\_list}\verb|the_list: 'a option -> 'a list| \\
+  \indexdef{}{ML}{the\_default}\verb|the_default: 'a -> 'a option -> 'a| \\
+  \indexdef{}{ML}{try}\verb|try: ('a -> 'b) -> 'a -> 'b option| \\
+  \indexdef{}{ML}{can}\verb|can: ('a -> 'b) -> 'a -> bool| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -659,10 +659,10 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexml{member}\verb|member: ('b * 'a -> bool) -> 'a list -> 'b -> bool| \\
-  \indexml{insert}\verb|insert: ('a * 'a -> bool) -> 'a -> 'a list -> 'a list| \\
-  \indexml{remove}\verb|remove: ('b * 'a -> bool) -> 'b -> 'a list -> 'a list| \\
-  \indexml{merge}\verb|merge: ('a * 'a -> bool) -> 'a list * 'a list -> 'a list| \\
+  \indexdef{}{ML}{member}\verb|member: ('b * 'a -> bool) -> 'a list -> 'b -> bool| \\
+  \indexdef{}{ML}{insert}\verb|insert: ('a * 'a -> bool) -> 'a -> 'a list -> 'a list| \\
+  \indexdef{}{ML}{remove}\verb|remove: ('b * 'a -> bool) -> 'b -> 'a list -> 'a list| \\
+  \indexdef{}{ML}{merge}\verb|merge: ('a * 'a -> bool) -> 'a list * 'a list -> 'a list| \\
   \end{mldecls}%
 \end{isamarkuptext}%
 \isamarkuptrue%
@@ -690,19 +690,19 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexmlexception{AList.DUP}\verb|exception AList.DUP| \\
-  \indexml{AList.lookup}\verb|AList.lookup: ('a * 'b -> bool) -> ('b * 'c) list -> 'a -> 'c option| \\
-  \indexml{AList.defined}\verb|AList.defined: ('a * 'b -> bool) -> ('b * 'c) list -> 'a -> bool| \\
-  \indexml{AList.update}\verb|AList.update: ('a * 'a -> bool) -> ('a * 'b) -> ('a * 'b) list -> ('a * 'b) list| \\
-  \indexml{AList.default}\verb|AList.default: ('a * 'a -> bool) -> ('a * 'b) -> ('a * 'b) list -> ('a * 'b) list| \\
-  \indexml{AList.delete}\verb|AList.delete: ('a * 'b -> bool) -> 'a -> ('b * 'c) list -> ('b * 'c) list| \\
-  \indexml{AList.map\_entry}\verb|AList.map_entry: ('a * 'b -> bool) -> 'a|\isasep\isanewline%
+  \indexdef{}{ML exception}{AList.DUP}\verb|exception AList.DUP| \\
+  \indexdef{}{ML}{AList.lookup}\verb|AList.lookup: ('a * 'b -> bool) -> ('b * 'c) list -> 'a -> 'c option| \\
+  \indexdef{}{ML}{AList.defined}\verb|AList.defined: ('a * 'b -> bool) -> ('b * 'c) list -> 'a -> bool| \\
+  \indexdef{}{ML}{AList.update}\verb|AList.update: ('a * 'a -> bool) -> ('a * 'b) -> ('a * 'b) list -> ('a * 'b) list| \\
+  \indexdef{}{ML}{AList.default}\verb|AList.default: ('a * 'a -> bool) -> ('a * 'b) -> ('a * 'b) list -> ('a * 'b) list| \\
+  \indexdef{}{ML}{AList.delete}\verb|AList.delete: ('a * 'b -> bool) -> 'a -> ('b * 'c) list -> ('b * 'c) list| \\
+  \indexdef{}{ML}{AList.map\_entry}\verb|AList.map_entry: ('a * 'b -> bool) -> 'a|\isasep\isanewline%
 \verb|    -> ('c -> 'c) -> ('b * 'c) list -> ('b * 'c) list| \\
-  \indexml{AList.map\_default}\verb|AList.map_default: ('a * 'a -> bool) -> 'a * 'b -> ('b -> 'b)|\isasep\isanewline%
+  \indexdef{}{ML}{AList.map\_default}\verb|AList.map_default: ('a * 'a -> bool) -> 'a * 'b -> ('b -> 'b)|\isasep\isanewline%
 \verb|    -> ('a * 'b) list -> ('a * 'b) list| \\
-  \indexml{AList.join}\verb|AList.join: ('a * 'a -> bool) -> ('a -> 'b * 'b -> 'b) (*exception DUP*)|\isasep\isanewline%
+  \indexdef{}{ML}{AList.join}\verb|AList.join: ('a * 'a -> bool) -> ('a -> 'b * 'b -> 'b) (*exception DUP*)|\isasep\isanewline%
 \verb|    -> ('a * 'b) list * ('a * 'b) list -> ('a * 'b) list (*exception AList.DUP*)| \\
-  \indexml{AList.merge}\verb|AList.merge: ('a * 'a -> bool) -> ('b * 'b -> bool)|\isasep\isanewline%
+  \indexdef{}{ML}{AList.merge}\verb|AList.merge: ('a * 'a -> bool) -> ('b * 'b -> bool)|\isasep\isanewline%
 \verb|    -> ('a * 'b) list * ('a * 'b) list -> ('a * 'b) list (*exception AList.DUP*)|
   \end{mldecls}%
 \end{isamarkuptext}%
@@ -732,25 +732,25 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-  \indexmltype{'a Symtab.table}\verb|type 'a Symtab.table| \\
-  \indexmlexception{Symtab.DUP}\verb|exception Symtab.DUP of string| \\
-  \indexmlexception{Symtab.SAME}\verb|exception Symtab.SAME| \\
-  \indexmlexception{Symtab.UNDEF}\verb|exception Symtab.UNDEF of string| \\
-  \indexml{Symtab.empty}\verb|Symtab.empty: 'a Symtab.table| \\
-  \indexml{Symtab.lookup}\verb|Symtab.lookup: 'a Symtab.table -> string -> 'a option| \\
-  \indexml{Symtab.defined}\verb|Symtab.defined: 'a Symtab.table -> string -> bool| \\
-  \indexml{Symtab.update}\verb|Symtab.update: (string * 'a) -> 'a Symtab.table -> 'a Symtab.table| \\
-  \indexml{Symtab.default}\verb|Symtab.default: string * 'a -> 'a Symtab.table -> 'a Symtab.table| \\
-  \indexml{Symtab.delete}\verb|Symtab.delete: string|\isasep\isanewline%
+  \indexdef{}{ML type}{'a Symtab.table}\verb|type 'a Symtab.table| \\
+  \indexdef{}{ML exception}{Symtab.DUP}\verb|exception Symtab.DUP of string| \\
+  \indexdef{}{ML exception}{Symtab.SAME}\verb|exception Symtab.SAME| \\
+  \indexdef{}{ML exception}{Symtab.UNDEF}\verb|exception Symtab.UNDEF of string| \\
+  \indexdef{}{ML}{Symtab.empty}\verb|Symtab.empty: 'a Symtab.table| \\
+  \indexdef{}{ML}{Symtab.lookup}\verb|Symtab.lookup: 'a Symtab.table -> string -> 'a option| \\
+  \indexdef{}{ML}{Symtab.defined}\verb|Symtab.defined: 'a Symtab.table -> string -> bool| \\
+  \indexdef{}{ML}{Symtab.update}\verb|Symtab.update: (string * 'a) -> 'a Symtab.table -> 'a Symtab.table| \\
+  \indexdef{}{ML}{Symtab.default}\verb|Symtab.default: string * 'a -> 'a Symtab.table -> 'a Symtab.table| \\
+  \indexdef{}{ML}{Symtab.delete}\verb|Symtab.delete: string|\isasep\isanewline%
 \verb|    -> 'a Symtab.table -> 'a Symtab.table (*exception Symtab.UNDEF*)| \\
-  \indexml{Symtab.map\_entry}\verb|Symtab.map_entry: string -> ('a -> 'a)|\isasep\isanewline%
+  \indexdef{}{ML}{Symtab.map\_entry}\verb|Symtab.map_entry: string -> ('a -> 'a)|\isasep\isanewline%
 \verb|    -> 'a Symtab.table -> 'a Symtab.table| \\
-  \indexml{Symtab.map\_default}\verb|Symtab.map_default: (string * 'a) -> ('a -> 'a)|\isasep\isanewline%
+  \indexdef{}{ML}{Symtab.map\_default}\verb|Symtab.map_default: (string * 'a) -> ('a -> 'a)|\isasep\isanewline%
 \verb|    -> 'a Symtab.table -> 'a Symtab.table| \\
-  \indexml{Symtab.join}\verb|Symtab.join: (string -> 'a * 'a -> 'a) (*exception Symtab.DUP/Symtab.SAME*)|\isasep\isanewline%
+  \indexdef{}{ML}{Symtab.join}\verb|Symtab.join: (string -> 'a * 'a -> 'a) (*exception Symtab.DUP/Symtab.SAME*)|\isasep\isanewline%
 \verb|    -> 'a Symtab.table * 'a Symtab.table|\isasep\isanewline%
 \verb|    -> 'a Symtab.table (*exception Symtab.DUP*)| \\
-  \indexml{Symtab.merge}\verb|Symtab.merge: ('a * 'a -> bool)|\isasep\isanewline%
+  \indexdef{}{ML}{Symtab.merge}\verb|Symtab.merge: ('a * 'a -> bool)|\isasep\isanewline%
 \verb|    -> 'a Symtab.table * 'a Symtab.table|\isasep\isanewline%
 \verb|    -> 'a Symtab.table (*exception Symtab.DUP*)|
   \end{mldecls}%
--- a/doc-src/IsarImplementation/Thy/document/session.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarImplementation/Thy/document/session.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,21 +1,23 @@
-\input{base.tex}
-
-\input{prelim.tex}
+\input{Base.tex}
 
-\input{logic.tex}
-
-\input{tactic.tex}
+\input{Integration.tex}
 
-\input{proof.tex}
-
-\input{isar.tex}
+\input{Isar.tex}
 
-\input{locale.tex}
+\input{Local_Theory.tex}
 
-\input{integration.tex}
+\input{Logic.tex}
 
 \input{ML.tex}
 
+\input{Prelim.tex}
+
+\input{Proof.tex}
+
+\input{Syntax.tex}
+
+\input{Tactic.tex}
+
 %%% Local Variables:
 %%% mode: latex
 %%% TeX-master: "root"
--- a/doc-src/IsarImplementation/implementation.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarImplementation/implementation.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-%% $Id$
-
 \documentclass[12pt,a4paper,fleqn]{report}
 \usepackage{latexsym,graphicx}
 \usepackage[refpage]{nomencl}
@@ -23,9 +20,6 @@
   and Larry Paulson
 }
 
-%FIXME
-%\makeglossary
-
 \makeindex
 
 
@@ -71,28 +65,24 @@
 \listoffigures
 \clearfirst
 
-%\input{intro.tex}
-\input{Thy/document/prelim.tex}
-\input{Thy/document/logic.tex}
-\input{Thy/document/tactic.tex}
-\input{Thy/document/proof.tex}
-\input{Thy/document/isar.tex}
-\input{Thy/document/locale.tex}
-\input{Thy/document/integration.tex}
+\input{Thy/document/Prelim.tex}
+\input{Thy/document/Logic.tex}
+\input{Thy/document/Tactic.tex}
+\input{Thy/document/Proof.tex}
+\input{Thy/document/Syntax.tex}
+\input{Thy/document/Isar.tex}
+\input{Thy/document/Local_Theory.tex}
+\input{Thy/document/Integration.tex}
 
 \appendix
 \input{Thy/document/ML.tex}
 
 \begingroup
 \tocentry{\bibname}
-\bibliographystyle{plain} \small\raggedright\frenchspacing
+\bibliographystyle{abbrv} \small\raggedright\frenchspacing
 \bibliography{../manual}
 \endgroup
 
-%FIXME
-%\tocentry{\glossaryname}
-%\printglossary
-
 \tocentry{\indexname}
 \printindex
 
--- a/doc-src/IsarImplementation/style.sty	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarImplementation/style.sty	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-%% $Id$
-
 %% toc
 \newcommand{\tocentry}[1]{\cleardoublepage\phantomsection\addcontentsline{toc}{chapter}{#1}
 \@mkboth{\MakeUppercase{#1}}{\MakeUppercase{#1}}}
@@ -10,24 +7,12 @@
 \newcommand{\chref}[1]{chapter~\ref{#1}}
 \newcommand{\figref}[1]{figure~\ref{#1}}
 
-%% glossary
-\renewcommand{\glossary}[2]{\nomenclature{\bf #1}{#2}}
-\newcommand{\seeglossary}[1]{\emph{#1}}
-\newcommand{\glossaryname}{Glossary}
-\renewcommand{\nomname}{\glossaryname}
-\renewcommand{\pagedeclaration}[1]{\nobreak\quad\dotfill~page~\bold{#1}}
-
-%% index
-\newcommand{\indexml}[1]{\index{\emph{#1}|bold}}
-\newcommand{\indexmlexception}[1]{\index{\emph{#1} (exception)|bold}}
-\newcommand{\indexmltype}[1]{\index{\emph{#1} (type)|bold}}
-\newcommand{\indexmlstructure}[1]{\index{\emph{#1} (structure)|bold}}
-\newcommand{\indexmlfunctor}[1]{\index{\emph{#1} (functor)|bold}}
-
 %% math
 \newcommand{\text}[1]{\mbox{#1}}
 \newcommand{\isasymvartheta}{\isamath{\theta}}
-\newcommand{\isactrlvec}[1]{\emph{$\overline{#1}$}}
+\newcommand{\isactrlvec}[1]{\emph{$\vec{#1}$}}
+\newcommand{\isactrlBG}{\isacharbackquoteopen}
+\newcommand{\isactrlEN}{\isacharbackquoteclose}
 
 \setcounter{secnumdepth}{2} \setcounter{tocdepth}{2}
 
@@ -49,6 +34,10 @@
 \newcommand{\isasymtype}{\minorcmd{type}}
 \newcommand{\isasymval}{\minorcmd{val}}
 
+\newcommand{\isasymFIX}{\isakeyword{fix}}
+\newcommand{\isasymASSUME}{\isakeyword{assume}}
+\newcommand{\isasymDEFINE}{\isakeyword{define}}
+\newcommand{\isasymNOTE}{\isakeyword{note}}
 \newcommand{\isasymGUESS}{\isakeyword{guess}}
 \newcommand{\isasymOBTAIN}{\isakeyword{obtain}}
 \newcommand{\isasymTHEORY}{\isakeyword{theory}}
@@ -61,6 +50,7 @@
 
 \isabellestyle{it}
 
+
 %%% Local Variables: 
 %%% mode: latex
 %%% TeX-master: "implementation"
--- a/doc-src/IsarRef/IsaMakefile	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/IsaMakefile	Wed Mar 04 10:45:52 2009 +0100
@@ -22,10 +22,11 @@
 HOL-IsarRef: $(LOG)/HOL-IsarRef.gz
 
 $(LOG)/HOL-IsarRef.gz: Thy/ROOT.ML ../antiquote_setup.ML		\
-  Thy/Inner_Syntax.thy Thy/Introduction.thy Thy/Outer_Syntax.thy	\
-  Thy/Spec.thy Thy/Proof.thy Thy/Misc.thy Thy/Document_Preparation.thy	\
-  Thy/Generic.thy Thy/HOL_Specific.thy Thy/Quick_Reference.thy		\
-  Thy/Symbols.thy Thy/ML_Tactic.thy
+  Thy/First_Order_Logic.thy Thy/Framework.thy Thy/Inner_Syntax.thy	\
+  Thy/Introduction.thy Thy/Outer_Syntax.thy Thy/Spec.thy Thy/Proof.thy	\
+  Thy/Misc.thy Thy/Document_Preparation.thy Thy/Generic.thy		\
+  Thy/HOL_Specific.thy Thy/Quick_Reference.thy Thy/Symbols.thy		\
+  Thy/ML_Tactic.thy
 	@$(USEDIR) -s IsarRef HOL Thy
 
 
--- a/doc-src/IsarRef/Makefile	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Makefile	Wed Mar 04 10:45:52 2009 +0100
@@ -1,7 +1,3 @@
-#
-# $Id$
-#
-
 ## targets
 
 default: dvi
--- a/doc-src/IsarRef/Thy/Document_Preparation.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Document_Preparation.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Document_Preparation
 imports Main
 begin
--- a/doc-src/IsarRef/Thy/Generic.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Generic.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Generic
 imports Main
 begin
--- a/doc-src/IsarRef/Thy/HOLCF_Specific.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/HOLCF_Specific.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory HOLCF_Specific
 imports HOLCF
 begin
--- a/doc-src/IsarRef/Thy/HOL_Specific.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/HOL_Specific.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -771,6 +771,55 @@
 *}
 
 
+section {* Intuitionistic proof search *}
+
+text {*
+  \begin{matharray}{rcl}
+    @{method_def (HOL) iprover} & : & @{text method} \\
+  \end{matharray}
+
+  \begin{rail}
+    'iprover' ('!' ?) (rulemod *)
+    ;
+  \end{rail}
+
+  The @{method (HOL) iprover} method performs intuitionistic proof
+  search, depending on specifically declared rules from the context,
+  or given as explicit arguments.  Chained facts are inserted into the
+  goal before commencing proof search; ``@{method (HOL) iprover}@{text
+  "!"}''  means to include the current @{fact prems} as well.
+  
+  Rules need to be classified as @{attribute (Pure) intro},
+  @{attribute (Pure) elim}, or @{attribute (Pure) dest}; here the
+  ``@{text "!"}'' indicator refers to ``safe'' rules, which may be
+  applied aggressively (without considering back-tracking later).
+  Rules declared with ``@{text "?"}'' are ignored in proof search (the
+  single-step @{method rule} method still observes these).  An
+  explicit weight annotation may be given as well; otherwise the
+  number of rule premises will be taken into account here.
+*}
+
+
+section {* Coherent Logic *}
+
+text {*
+  \begin{matharray}{rcl}
+    @{method_def (HOL) "coherent"} & : & @{text method} \\
+  \end{matharray}
+
+  \begin{rail}
+    'coherent' thmrefs?
+    ;
+  \end{rail}
+
+  The @{method (HOL) coherent} method solves problems of
+  \emph{Coherent Logic} \cite{Bezem-Coquand:2005}, which covers
+  applications in confluence theory, lattice theory and projective
+  geometry.  See @{"file" "~~/src/HOL/ex/Coherent.thy"} for some
+  examples.
+*}
+
+
 section {* Invoking automated reasoning tools -- The Sledgehammer *}
 
 text {*
--- a/doc-src/IsarRef/Thy/Inner_Syntax.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Inner_Syntax.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Inner_Syntax
 imports Main
 begin
@@ -370,7 +368,7 @@
   \end{matharray}
 
   \begin{rail}
-    ('notation' | 'no\_notation') target? mode? (nameref structmixfix + 'and')
+    ('notation' | 'no\_notation') target? mode? \\ (nameref structmixfix + 'and')
     ;
   \end{rail}
 
@@ -525,13 +523,15 @@
     & @{text "|"} & @{text "tid  |  tvar  |  "}@{verbatim "_"} \\
     & @{text "|"} & @{text "tid"} @{verbatim "::"} @{text "sort  |  tvar  "}@{verbatim "::"} @{text "sort  |  "}@{verbatim "_"} @{verbatim "::"} @{text "sort"} \\
     & @{text "|"} & @{text "id  |  type\<^sup>(\<^sup>1\<^sup>0\<^sup>0\<^sup>0\<^sup>) id  |  "}@{verbatim "("} @{text type} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text type} @{verbatim ")"} @{text id} \\
-    & @{text "|"} & @{text "longid  |  type\<^sup>(\<^sup>1\<^sup>0\<^sup>0\<^sup>0\<^sup>) longid  |  "}@{verbatim "("} @{text type} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text type} @{verbatim ")"} @{text longid} \\
+    & @{text "|"} & @{text "longid  |  type\<^sup>(\<^sup>1\<^sup>0\<^sup>0\<^sup>0\<^sup>) longid"} \\
+    & @{text "|"} & @{verbatim "("} @{text type} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text type} @{verbatim ")"} @{text longid} \\
     & @{text "|"} & @{text "type\<^sup>(\<^sup>1\<^sup>)"} @{verbatim "=>"} @{text type} & @{text "(0)"} \\
     & @{text "|"} & @{text "type\<^sup>(\<^sup>1\<^sup>)"} @{text "\<Rightarrow>"} @{text type} & @{text "(0)"} \\
     & @{text "|"} & @{verbatim "["} @{text type} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text type} @{verbatim "]"} @{verbatim "=>"} @{text type} & @{text "(0)"} \\
     & @{text "|"} & @{verbatim "["} @{text type} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text type} @{verbatim "]"} @{text "\<Rightarrow>"} @{text type} & @{text "(0)"} \\\\
 
-  @{syntax_def (inner) sort} & = & @{text "id  |  longid  |  "}@{verbatim "{}"}@{text "  |  "}@{verbatim "{"} @{text "(id | longid)"} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text "(id | longid)"} @{verbatim "}"} \\
+  @{syntax_def (inner) sort} & = & @{text "id  |  longid  |  "}@{verbatim "{}"} \\
+    & @{text "|"} & @{verbatim "{"} @{text "(id | longid)"} @{verbatim ","} @{text "\<dots>"} @{verbatim ","} @{text "(id | longid)"} @{verbatim "}"} \\
   \end{supertabular}
   \end{center}
 
--- a/doc-src/IsarRef/Thy/Introduction.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Introduction.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Introduction
 imports Main
 begin
@@ -12,27 +10,27 @@
   The \emph{Isabelle} system essentially provides a generic
   infrastructure for building deductive systems (programmed in
   Standard ML), with a special focus on interactive theorem proving in
-  higher-order logics.  In the olden days even end-users would refer
-  to certain ML functions (goal commands, tactics, tacticals etc.) to
-  pursue their everyday theorem proving tasks
-  \cite{isabelle-intro,isabelle-ref}.
+  higher-order logics.  Many years ago, even end-users would refer to
+  certain ML functions (goal commands, tactics, tacticals etc.) to
+  pursue their everyday theorem proving tasks.
   
   In contrast \emph{Isar} provides an interpreted language environment
   of its own, which has been specifically tailored for the needs of
   theory and proof development.  Compared to raw ML, the Isabelle/Isar
   top-level provides a more robust and comfortable development
-  platform, with proper support for theory development graphs,
-  single-step transactions with unlimited undo, etc.  The
-  Isabelle/Isar version of the \emph{Proof~General} user interface
-  \cite{proofgeneral,Aspinall:TACAS:2000} provides an adequate
-  front-end for interactive theory and proof development in this
-  advanced theorem proving environment.
+  platform, with proper support for theory development graphs, managed
+  transactions with unlimited undo etc.  The Isabelle/Isar version of
+  the \emph{Proof~General} user interface
+  \cite{proofgeneral,Aspinall:TACAS:2000} provides a decent front-end
+  for interactive theory and proof development in this advanced
+  theorem proving environment, even though it is somewhat biased
+  towards old-style proof scripts.
 
   \medskip Apart from the technical advances over bare-bones ML
   programming, the main purpose of the Isar language is to provide a
   conceptually different view on machine-checked proofs
-  \cite{Wenzel:1999:TPHOL,Wenzel-PhD}.  ``Isar'' stands for
-  ``Intelligible semi-automated reasoning''.  Drawing from both the
+  \cite{Wenzel:1999:TPHOL,Wenzel-PhD}.  \emph{Isar} stands for
+  \emph{Intelligible semi-automated reasoning}.  Drawing from both the
   traditions of informal mathematical proof texts and high-level
   programming languages, Isar offers a versatile environment for
   structured formal proof documents.  Thus properly written Isar
@@ -47,12 +45,12 @@
   Despite its grand design of structured proof texts, Isar is able to
   assimilate the old tactical style as an ``improper'' sub-language.
   This provides an easy upgrade path for existing tactic scripts, as
-  well as additional means for interactive experimentation and
-  debugging of structured proofs.  Isabelle/Isar supports a broad
-  range of proof styles, both readable and unreadable ones.
+  well as some means for interactive experimentation and debugging of
+  structured proofs.  Isabelle/Isar supports a broad range of proof
+  styles, both readable and unreadable ones.
 
-  \medskip The Isabelle/Isar framework \cite{Wenzel:2006:Festschrift}
-  is generic and should work reasonably well for any Isabelle
+  \medskip The generic Isabelle/Isar framework (see
+  \chref{ch:isar-framework}) works reasonably well for any Isabelle
   object-logic that conforms to the natural deduction view of the
   Isabelle/Pure framework.  Specific language elements introduced by
   the major object-logics are described in \chref{ch:hol}
@@ -72,194 +70,4 @@
   context; other commands emulate old-style tactical theorem proving.
 *}
 
-
-section {* User interfaces *}
-
-subsection {* Terminal sessions *}
-
-text {*
-  The Isabelle \texttt{tty} tool provides a very interface for running
-  the Isar interaction loop, with some support for command line
-  editing.  For example:
-\begin{ttbox}
-isabelle tty\medskip
-{\out Welcome to Isabelle/HOL (Isabelle2008)}\medskip
-theory Foo imports Main begin;
-definition foo :: nat where "foo == 1";
-lemma "0 < foo" by (simp add: foo_def);
-end;
-\end{ttbox}
-
-  Any Isabelle/Isar command may be retracted by @{command undo}.
-  See the Isabelle/Isar Quick Reference (\appref{ap:refcard}) for a
-  comprehensive overview of available commands and other language
-  elements.
-*}
-
-
-subsection {* Emacs Proof General *}
-
-text {*
-  Plain TTY-based interaction as above used to be quite feasible with
-  traditional tactic based theorem proving, but developing Isar
-  documents really demands some better user-interface support.  The
-  Proof~General environment by David Aspinall
-  \cite{proofgeneral,Aspinall:TACAS:2000} offers a generic Emacs
-  interface for interactive theorem provers that organizes all the
-  cut-and-paste and forward-backward walk through the text in a very
-  neat way.  In Isabelle/Isar, the current position within a partial
-  proof document is equally important than the actual proof state.
-  Thus Proof~General provides the canonical working environment for
-  Isabelle/Isar, both for getting acquainted (e.g.\ by replaying
-  existing Isar documents) and for production work.
-*}
-
-
-subsubsection{* Proof~General as default Isabelle interface *}
-
-text {*
-  The Isabelle interface wrapper script provides an easy way to invoke
-  Proof~General (including XEmacs or GNU Emacs).  The default
-  configuration of Isabelle is smart enough to detect the
-  Proof~General distribution in several canonical places (e.g.\
-  @{verbatim "$ISABELLE_HOME/contrib/ProofGeneral"}).  Thus the
-  capital @{verbatim Isabelle} executable would already refer to the
-  @{verbatim "ProofGeneral/isar"} interface without further ado.  The
-  Isabelle interface script provides several options; pass @{verbatim
-  "-?"}  to see its usage.
-
-  With the proper Isabelle interface setup, Isar documents may now be edited by
-  visiting appropriate theory files, e.g.\ 
-\begin{ttbox}
-Isabelle \({\langle}isabellehome{\rangle}\)/src/HOL/Isar_examples/Summation.thy
-\end{ttbox}
-  Beginners may note the tool bar for navigating forward and backward
-  through the text (this depends on the local Emacs installation).
-  Consult the Proof~General documentation \cite{proofgeneral} for
-  further basic command sequences, in particular ``@{verbatim "C-c C-return"}''
-  and ``@{verbatim "C-c u"}''.
-
-  \medskip Proof~General may be also configured manually by giving
-  Isabelle settings like this (see also \cite{isabelle-sys}):
-
-\begin{ttbox}
-ISABELLE_INTERFACE=\$ISABELLE_HOME/contrib/ProofGeneral/isar/interface
-PROOFGENERAL_OPTIONS=""
-\end{ttbox}
-  You may have to change @{verbatim
-  "$ISABELLE_HOME/contrib/ProofGeneral"} to the actual installation
-  directory of Proof~General.
-
-  \medskip Apart from the Isabelle command line, defaults for
-  interface options may be given by the @{verbatim PROOFGENERAL_OPTIONS}
-  setting.  For example, the Emacs executable to be used may be
-  configured in Isabelle's settings like this:
-\begin{ttbox}
-PROOFGENERAL_OPTIONS="-p xemacs-mule"  
-\end{ttbox}
-
-  Occasionally, a user's @{verbatim "~/.emacs"} file contains code
-  that is incompatible with the (X)Emacs version used by
-  Proof~General, causing the interface startup to fail prematurely.
-  Here the @{verbatim "-u false"} option helps to get the interface
-  process up and running.  Note that additional Lisp customization
-  code may reside in @{verbatim "proofgeneral-settings.el"} of
-  @{verbatim "$ISABELLE_HOME/etc"} or @{verbatim
-  "$ISABELLE_HOME_USER/etc"}.
-*}
-
-
-subsubsection {* The X-Symbol package *}
-
-text {*
-  Proof~General incorporates a version of the Emacs X-Symbol package
-  \cite{x-symbol}, which handles proper mathematical symbols displayed
-  on screen.  Pass option @{verbatim "-x true"} to the Isabelle
-  interface script, or check the appropriate Proof~General menu
-  setting by hand.  The main challenge of getting X-Symbol to work
-  properly is the underlying (semi-automated) X11 font setup.
-
-  \medskip Using proper mathematical symbols in Isabelle theories can
-  be very convenient for readability of large formulas.  On the other
-  hand, the plain ASCII sources easily become somewhat unintelligible.
-  For example, @{text "\<Longrightarrow>"} would appear as @{verbatim "\<Longrightarrow>"} according
-  the default set of Isabelle symbols.  Nevertheless, the Isabelle
-  document preparation system (see \chref{ch:document-prep}) will be
-  happy to print non-ASCII symbols properly.  It is even possible to
-  invent additional notation beyond the display capabilities of Emacs
-  and X-Symbol.
-*}
-
-
-section {* Isabelle/Isar theories *}
-
-text {*
-  Isabelle/Isar offers the following main improvements over classic
-  Isabelle.
-
-  \begin{enumerate}
-  
-  \item A \emph{theory format} that integrates specifications and
-  proofs, supporting interactive development and unlimited undo
-  operation.
-  
-  \item A \emph{formal proof document language} designed to support
-  intelligible semi-automated reasoning.  Instead of putting together
-  unreadable tactic scripts, the author is enabled to express the
-  reasoning in way that is close to usual mathematical practice.  The
-  old tactical style has been assimilated as ``improper'' language
-  elements.
-  
-  \item A simple document preparation system, for typesetting formal
-  developments together with informal text.  The resulting
-  hyper-linked PDF documents are equally well suited for WWW
-  presentation and as printed copies.
-
-  \end{enumerate}
-
-  The Isar proof language is embedded into the new theory format as a
-  proper sub-language.  Proof mode is entered by stating some
-  @{command theorem} or @{command lemma} at the theory level, and
-  left again with the final conclusion (e.g.\ via @{command qed}).
-  A few theory specification mechanisms also require some proof, such
-  as HOL's @{command typedef} which demands non-emptiness of the
-  representing sets.
-*}
-
-
-section {* How to write Isar proofs anyway? \label{sec:isar-howto} *}
-
-text {*
-  This is one of the key questions, of course.  First of all, the
-  tactic script emulation of Isabelle/Isar essentially provides a
-  clarified version of the very same unstructured proof style of
-  classic Isabelle.  Old-time users should quickly become acquainted
-  with that (slightly degenerative) view of Isar.
-
-  Writing \emph{proper} Isar proof texts targeted at human readers is
-  quite different, though.  Experienced users of the unstructured
-  style may even have to unlearn some of their habits to master proof
-  composition in Isar.  In contrast, new users with less experience in
-  old-style tactical proving, but a good understanding of mathematical
-  proof in general, often get started easier.
-
-  \medskip The present text really is only a reference manual on
-  Isabelle/Isar, not a tutorial.  Nevertheless, we will attempt to
-  give some clues of how the concepts introduced here may be put into
-  practice.  Especially note that \appref{ap:refcard} provides a quick
-  reference card of the most common Isabelle/Isar language elements.
-
-  Further issues concerning the Isar concepts are covered in the
-  literature
-  \cite{Wenzel:1999:TPHOL,Wiedijk:2000:MV,Bauer-Wenzel:2000:HB,Bauer-Wenzel:2001}.
-  The author's PhD thesis \cite{Wenzel-PhD} presently provides the
-  most complete exposition of Isar foundations, techniques, and
-  applications.  A number of example applications are distributed with
-  Isabelle, and available via the Isabelle WWW library (e.g.\
-  \url{http://isabelle.in.tum.de/library/}).  The ``Archive of Formal
-  Proofs'' \url{http://afp.sourceforge.net/} also provides plenty of
-  examples, both in proper Isar proof style and unstructured tactic
-  scripts.
-*}
-
 end
--- a/doc-src/IsarRef/Thy/ML_Tactic.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/ML_Tactic.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory ML_Tactic
 imports Main
 begin
--- a/doc-src/IsarRef/Thy/Misc.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Misc.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Misc
 imports Main
 begin
--- a/doc-src/IsarRef/Thy/Outer_Syntax.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Outer_Syntax.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Outer_Syntax
 imports Main
 begin
@@ -170,10 +168,10 @@
   Isabelle as @{verbatim \<forall>}.  There are infinitely many Isabelle
   symbols like this, although proper presentation is left to front-end
   tools such as {\LaTeX} or Proof~General with the X-Symbol package.
-  A list of standard Isabelle symbols that work well with these tools
-  is given in \appref{app:symbols}.  Note that @{verbatim "\<lambda>"} does
-  not belong to the @{text letter} category, since it is already used
-  differently in the Pure term language.
+  A list of predefined Isabelle symbols that work well with these
+  tools is given in \appref{app:symbols}.  Note that @{verbatim "\<lambda>"}
+  does not belong to the @{text letter} category, since it is already
+  used differently in the Pure term language.
 *}
 
 
--- a/doc-src/IsarRef/Thy/Proof.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Proof.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,17 +1,15 @@
-(* $Id$ *)
-
 theory Proof
 imports Main
 begin
 
-chapter {* Proofs *}
+chapter {* Proofs \label{ch:proofs} *}
 
 text {*
   Proof commands perform transitions of Isar/VM machine
   configurations, which are block-structured, consisting of a stack of
   nodes with three main components: logical proof context, current
-  facts, and open goals.  Isar/VM transitions are \emph{typed}
-  according to the following three different modes of operation:
+  facts, and open goals.  Isar/VM transitions are typed according to
+  the following three different modes of operation:
 
   \begin{description}
 
@@ -32,13 +30,17 @@
 
   \end{description}
 
-  The proof mode indicator may be read as a verb telling the writer
-  what kind of operation may be performed next.  The corresponding
-  typings of proof commands restricts the shape of well-formed proof
-  texts to particular command sequences.  So dynamic arrangements of
-  commands eventually turn out as static texts of a certain structure.
-  \Appref{ap:refcard} gives a simplified grammar of the overall
-  (extensible) language emerging that way.
+  The proof mode indicator may be understood as an instruction to the
+  writer, telling what kind of operation may be performed next.  The
+  corresponding typings of proof commands restricts the shape of
+  well-formed proof texts to particular command sequences.  So dynamic
+  arrangements of commands eventually turn out as static texts of a
+  certain structure.
+
+  \Appref{ap:refcard} gives a simplified grammar of the (extensible)
+  language emerging that way from the different types of proof
+  commands.  The main ideas of the overall Isar framework are
+  explained in \chref{ch:isar-framework}.
 *}
 
 
@@ -681,7 +683,6 @@
     @{method_def "assumption"} & : & @{text method} \\
     @{method_def "this"} & : & @{text method} \\
     @{method_def "rule"} & : & @{text method} \\
-    @{method_def "iprover"} & : & @{text method} \\[0.5ex]
     @{attribute_def (Pure) "intro"} & : & @{text attribute} \\
     @{attribute_def (Pure) "elim"} & : & @{text attribute} \\
     @{attribute_def (Pure) "dest"} & : & @{text attribute} \\
@@ -696,8 +697,6 @@
     ;
     'rule' thmrefs?
     ;
-    'iprover' ('!' ?) (rulemod *)
-    ;
     rulemod: ('intro' | 'elim' | 'dest') ((('!' | () | '?') nat?) | 'del') ':' thmrefs
     ;
     ('intro' | 'elim' | 'dest') ('!' | () | '?') nat?
@@ -756,27 +755,11 @@
   default behavior of @{command "proof"} and ``@{command ".."}'' 
   (double-dot) steps (see \secref{sec:proof-steps}).
   
-  \item @{method iprover} performs intuitionistic proof search,
-  depending on specifically declared rules from the context, or given
-  as explicit arguments.  Chained facts are inserted into the goal
-  before commencing proof search; ``@{method iprover}@{text "!"}''
-  means to include the current @{fact prems} as well.
-  
-  Rules need to be classified as @{attribute (Pure) intro},
-  @{attribute (Pure) elim}, or @{attribute (Pure) dest}; here the
-  ``@{text "!"}'' indicator refers to ``safe'' rules, which may be
-  applied aggressively (without considering back-tracking later).
-  Rules declared with ``@{text "?"}'' are ignored in proof search (the
-  single-step @{method rule} method still observes these).  An
-  explicit weight annotation may be given as well; otherwise the
-  number of rule premises will be taken into account here.
-  
   \item @{attribute (Pure) intro}, @{attribute (Pure) elim}, and
   @{attribute (Pure) dest} declare introduction, elimination, and
-  destruct rules, to be used with the @{method rule} and @{method
-  iprover} methods.  Note that the latter will ignore rules declared
-  with ``@{text "?"}'', while ``@{text "!"}''  are used most
-  aggressively.
+  destruct rules, to be used with method @{method rule}, and similar
+  tools.  Note that the latter will ignore rules declared with
+  ``@{text "?"}'', while ``@{text "!"}''  are used most aggressively.
   
   The classical reasoner (see \secref{sec:classical}) introduces its
   own variants of these attributes; use qualified names to access the
@@ -963,7 +946,7 @@
   \begin{matharray}{l}
     @{text "\<langle>using b\<^sub>1 \<dots> b\<^sub>k\<rangle>"}~~@{command "obtain"}~@{text "x\<^sub>1 \<dots> x\<^sub>m \<WHERE> a: \<phi>\<^sub>1 \<dots> \<phi>\<^sub>n  \<langle>proof\<rangle> \<equiv>"} \\[1ex]
     \quad @{command "have"}~@{text "\<And>thesis. (\<And>x\<^sub>1 \<dots> x\<^sub>m. \<phi>\<^sub>1 \<Longrightarrow> \<dots> \<phi>\<^sub>n \<Longrightarrow> thesis) \<Longrightarrow> thesis"} \\
-    \quad @{command "proof"}~@{text succeed} \\
+    \quad @{command "proof"}~@{method succeed} \\
     \qquad @{command "fix"}~@{text thesis} \\
     \qquad @{command "assume"}~@{text "that [Pure.intro?]: \<And>x\<^sub>1 \<dots> x\<^sub>m. \<phi>\<^sub>1 \<Longrightarrow> \<dots> \<phi>\<^sub>n \<Longrightarrow> thesis"} \\
     \qquad @{command "then"}~@{command "show"}~@{text thesis} \\
--- a/doc-src/IsarRef/Thy/Quick_Reference.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Quick_Reference.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory Quick_Reference
 imports Main
 begin
@@ -30,7 +28,7 @@
 
   \begin{tabular}{rcl}
     @{text "theory\<dash>stmt"} & = & @{command "theorem"}~@{text "name: props proof  |"}~~@{command "definition"}~@{text "\<dots>  |  \<dots>"} \\[1ex]
-    @{text "proof"} & = & @{text "prfx\<^sup>*"}~@{command "proof"}~@{text "method stmt\<^sup>*"}~@{command "qed"}~@{text method} \\
+    @{text "proof"} & = & @{text "prfx\<^sup>*"}~@{command "proof"}~@{text "method\<^sup>? stmt\<^sup>*"}~@{command "qed"}~@{text "method\<^sup>?"} \\
     & @{text "|"} & @{text "prfx\<^sup>*"}~@{command "done"} \\[1ex]
     @{text prfx} & = & @{command "apply"}~@{text method} \\
     & @{text "|"} & @{command "using"}~@{text "facts"} \\
--- a/doc-src/IsarRef/Thy/ROOT-HOLCF.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/ROOT-HOLCF.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-(* $Id$ *)
-
 set ThyOutput.source;
 use "../../antiquote_setup.ML";
 
--- a/doc-src/IsarRef/Thy/ROOT-ZF.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/ROOT-ZF.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-(* $Id$ *)
-
 set ThyOutput.source;
 use "../../antiquote_setup.ML";
 
--- a/doc-src/IsarRef/Thy/ROOT.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/ROOT.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,18 +1,20 @@
-
-(* $Id$ *)
-
+set quick_and_dirty;
 set ThyOutput.source;
 use "../../antiquote_setup.ML";
 
-use_thy "Introduction";
-use_thy "Outer_Syntax";
-use_thy "Document_Preparation";
-use_thy "Spec";
-use_thy "Proof";
-use_thy "Inner_Syntax";
-use_thy "Misc";
-use_thy "Generic";
-use_thy "HOL_Specific";
-use_thy "Quick_Reference";
-use_thy "Symbols";
-use_thy "ML_Tactic";
+use_thys [
+  "Introduction",
+  "Framework",
+  "First_Order_Logic",
+  "Outer_Syntax",
+  "Document_Preparation",
+  "Spec",
+  "Proof",
+  "Inner_Syntax",
+  "Misc",
+  "Generic",
+  "HOL_Specific",
+  "Quick_Reference",
+  "Symbols",
+  "ML_Tactic"
+];
--- a/doc-src/IsarRef/Thy/Spec.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Spec.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -4,6 +4,24 @@
 
 chapter {* Theory specifications *}
 
+text {*
+  The Isabelle/Isar theory format integrates specifications and
+  proofs, supporting interactive development with unlimited undo
+  operation.  There is an integrated document preparation system (see
+  \chref{ch:document-prep}), for typesetting formal developments
+  together with informal text.  The resulting hyper-linked PDF
+  documents can be used both for WWW presentation and printed copies.
+
+  The Isar proof language (see \chref{ch:proofs}) is embedded into the
+  theory language as a proper sub-language.  Proof mode is entered by
+  stating some @{command theorem} or @{command lemma} at the theory
+  level, and left again with the final conclusion (e.g.\ via @{command
+  qed}).  Some theory specification mechanisms also require a proof,
+  such as @{command typedef} in HOL, which demands non-emptiness of
+  the representing sets.
+*}
+
+
 section {* Defining theories \label{sec:begin-thy} *}
 
 text {*
@@ -106,9 +124,9 @@
   @{command (global) "end"} has a different meaning: it concludes the
   theory itself (\secref{sec:begin-thy}).
   
-  \item @{text "(\<IN> c)"} given after any local theory command
-  specifies an immediate target, e.g.\ ``@{command
-  "definition"}~@{text "(\<IN> c) \<dots>"}'' or ``@{command
+  \item @{text "("}@{keyword_def "in"}~@{text "c)"} given after any
+  local theory command specifies an immediate target, e.g.\
+  ``@{command "definition"}~@{text "(\<IN> c) \<dots>"}'' or ``@{command
   "theorem"}~@{text "(\<IN> c) \<dots>"}''.  This works both in a local or
   global theory context; the current target context will be suspended
   for this command only.  Note that ``@{text "(\<IN> -)"}'' will
@@ -1164,7 +1182,7 @@
 
   \end{description}
 
-  See @{"file" "~~/src/FOL/ex/IffOracle.thy"} for a worked example of
+  See @{"file" "~~/src/FOL/ex/Iff_Oracle.thy"} for a worked example of
   defining a new primitive rule as oracle, and turning it into a proof
   method.
 *}
--- a/doc-src/IsarRef/Thy/Symbols.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/Symbols.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,10 +1,8 @@
-(* $Id$ *)
-
 theory Symbols
 imports Pure
 begin
 
-chapter {* Standard Isabelle symbols \label{app:symbols} *}
+chapter {* Predefined Isabelle symbols \label{app:symbols} *}
 
 text {*
   Isabelle supports an infinite number of non-ASCII symbols, which are
--- a/doc-src/IsarRef/Thy/ZF_Specific.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/ZF_Specific.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,3 @@
-(* $Id$ *)
-
 theory ZF_Specific
 imports Main
 begin
--- a/doc-src/IsarRef/Thy/document/Document_Preparation.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Document_Preparation.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Document{\isacharunderscore}Preparation}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
--- a/doc-src/IsarRef/Thy/document/Generic.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Generic.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Generic}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
--- a/doc-src/IsarRef/Thy/document/HOLCF_Specific.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/HOLCF_Specific.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{HOLCF{\isacharunderscore}Specific}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
--- a/doc-src/IsarRef/Thy/document/HOL_Specific.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/HOL_Specific.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -779,6 +779,58 @@
 \end{isamarkuptext}%
 \isamarkuptrue%
 %
+\isamarkupsection{Intuitionistic proof search%
+}
+\isamarkuptrue%
+%
+\begin{isamarkuptext}%
+\begin{matharray}{rcl}
+    \indexdef{HOL}{method}{iprover}\hypertarget{method.HOL.iprover}{\hyperlink{method.HOL.iprover}{\mbox{\isa{iprover}}}} & : & \isa{method} \\
+  \end{matharray}
+
+  \begin{rail}
+    'iprover' ('!' ?) (rulemod *)
+    ;
+  \end{rail}
+
+  The \hyperlink{method.HOL.iprover}{\mbox{\isa{iprover}}} method performs intuitionistic proof
+  search, depending on specifically declared rules from the context,
+  or given as explicit arguments.  Chained facts are inserted into the
+  goal before commencing proof search; ``\hyperlink{method.HOL.iprover}{\mbox{\isa{iprover}}}\isa{{\isachardoublequote}{\isacharbang}{\isachardoublequote}}''  means to include the current \hyperlink{fact.prems}{\mbox{\isa{prems}}} as well.
+  
+  Rules need to be classified as \hyperlink{attribute.Pure.intro}{\mbox{\isa{intro}}},
+  \hyperlink{attribute.Pure.elim}{\mbox{\isa{elim}}}, or \hyperlink{attribute.Pure.dest}{\mbox{\isa{dest}}}; here the
+  ``\isa{{\isachardoublequote}{\isacharbang}{\isachardoublequote}}'' indicator refers to ``safe'' rules, which may be
+  applied aggressively (without considering back-tracking later).
+  Rules declared with ``\isa{{\isachardoublequote}{\isacharquery}{\isachardoublequote}}'' are ignored in proof search (the
+  single-step \hyperlink{method.rule}{\mbox{\isa{rule}}} method still observes these).  An
+  explicit weight annotation may be given as well; otherwise the
+  number of rule premises will be taken into account here.%
+\end{isamarkuptext}%
+\isamarkuptrue%
+%
+\isamarkupsection{Coherent Logic%
+}
+\isamarkuptrue%
+%
+\begin{isamarkuptext}%
+\begin{matharray}{rcl}
+    \indexdef{HOL}{method}{coherent}\hypertarget{method.HOL.coherent}{\hyperlink{method.HOL.coherent}{\mbox{\isa{coherent}}}} & : & \isa{method} \\
+  \end{matharray}
+
+  \begin{rail}
+    'coherent' thmrefs?
+    ;
+  \end{rail}
+
+  The \hyperlink{method.HOL.coherent}{\mbox{\isa{coherent}}} method solves problems of
+  \emph{Coherent Logic} \cite{Bezem-Coquand:2005}, which covers
+  applications in confluence theory, lattice theory and projective
+  geometry.  See \hyperlink{file.~~/src/HOL/ex/Coherent.thy}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}HOL{\isacharslash}ex{\isacharslash}Coherent{\isachardot}thy}}}} for some
+  examples.%
+\end{isamarkuptext}%
+\isamarkuptrue%
+%
 \isamarkupsection{Invoking automated reasoning tools -- The Sledgehammer%
 }
 \isamarkuptrue%
--- a/doc-src/IsarRef/Thy/document/Inner_Syntax.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Inner_Syntax.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Inner{\isacharunderscore}Syntax}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
@@ -120,19 +118,19 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls} 
-    \indexml{show\_types}\verb|show_types: bool ref| & default \verb|false| \\
-    \indexml{show\_sorts}\verb|show_sorts: bool ref| & default \verb|false| \\
-    \indexml{show\_consts}\verb|show_consts: bool ref| & default \verb|false| \\
-    \indexml{long\_names}\verb|long_names: bool ref| & default \verb|false| \\
-    \indexml{short\_names}\verb|short_names: bool ref| & default \verb|false| \\
-    \indexml{unique\_names}\verb|unique_names: bool ref| & default \verb|true| \\
-    \indexml{show\_brackets}\verb|show_brackets: bool ref| & default \verb|false| \\
-    \indexml{eta\_contract}\verb|eta_contract: bool ref| & default \verb|true| \\
-    \indexml{goals\_limit}\verb|goals_limit: int ref| & default \verb|10| \\
-    \indexml{Proof.show\_main\_goal}\verb|Proof.show_main_goal: bool ref| & default \verb|false| \\
-    \indexml{show\_hyps}\verb|show_hyps: bool ref| & default \verb|false| \\
-    \indexml{show\_tags}\verb|show_tags: bool ref| & default \verb|false| \\
-    \indexml{show\_question\_marks}\verb|show_question_marks: bool ref| & default \verb|true| \\
+    \indexdef{}{ML}{show\_types}\verb|show_types: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{show\_sorts}\verb|show_sorts: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{show\_consts}\verb|show_consts: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{long\_names}\verb|long_names: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{short\_names}\verb|short_names: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{unique\_names}\verb|unique_names: bool ref| & default \verb|true| \\
+    \indexdef{}{ML}{show\_brackets}\verb|show_brackets: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{eta\_contract}\verb|eta_contract: bool ref| & default \verb|true| \\
+    \indexdef{}{ML}{goals\_limit}\verb|goals_limit: int ref| & default \verb|10| \\
+    \indexdef{}{ML}{Proof.show\_main\_goal}\verb|Proof.show_main_goal: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{show\_hyps}\verb|show_hyps: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{show\_tags}\verb|show_tags: bool ref| & default \verb|false| \\
+    \indexdef{}{ML}{show\_question\_marks}\verb|show_question_marks: bool ref| & default \verb|true| \\
   \end{mldecls}
 
   These global ML variables control the detail of information that is
@@ -233,9 +231,9 @@
 %
 \begin{isamarkuptext}%
 \begin{mldecls}
-    \indexml{Pretty.setdepth}\verb|Pretty.setdepth: int -> unit| \\
-    \indexml{Pretty.setmargin}\verb|Pretty.setmargin: int -> unit| \\
-    \indexml{print\_depth}\verb|print_depth: int -> unit| \\
+    \indexdef{}{ML}{Pretty.setdepth}\verb|Pretty.setdepth: int -> unit| \\
+    \indexdef{}{ML}{Pretty.setmargin}\verb|Pretty.setmargin: int -> unit| \\
+    \indexdef{}{ML}{print\_depth}\verb|print_depth: int -> unit| \\
   \end{mldecls}
 
   These ML functions set limits for pretty printed text.
@@ -392,7 +390,7 @@
   \end{matharray}
 
   \begin{rail}
-    ('notation' | 'no\_notation') target? mode? (nameref structmixfix + 'and')
+    ('notation' | 'no\_notation') target? mode? \\ (nameref structmixfix + 'and')
     ;
   \end{rail}
 
@@ -551,13 +549,15 @@
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}tid\ \ {\isacharbar}\ \ tvar\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|_| \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}tid{\isachardoublequote}} \verb|::| \isa{{\isachardoublequote}sort\ \ {\isacharbar}\ \ tvar\ \ {\isachardoublequote}}\verb|::| \isa{{\isachardoublequote}sort\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|_| \verb|::| \isa{{\isachardoublequote}sort{\isachardoublequote}} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}id\ \ {\isacharbar}\ \ type\isactrlsup {\isacharparenleft}\isactrlsup {\isadigit{1}}\isactrlsup {\isadigit{0}}\isactrlsup {\isadigit{0}}\isactrlsup {\isadigit{0}}\isactrlsup {\isacharparenright}\ id\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|(| \isa{type} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{type} \verb|)| \isa{id} \\
-    & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}longid\ \ {\isacharbar}\ \ type\isactrlsup {\isacharparenleft}\isactrlsup {\isadigit{1}}\isactrlsup {\isadigit{0}}\isactrlsup {\isadigit{0}}\isactrlsup {\isadigit{0}}\isactrlsup {\isacharparenright}\ longid\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|(| \isa{type} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{type} \verb|)| \isa{longid} \\
+    & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}longid\ \ {\isacharbar}\ \ type\isactrlsup {\isacharparenleft}\isactrlsup {\isadigit{1}}\isactrlsup {\isadigit{0}}\isactrlsup {\isadigit{0}}\isactrlsup {\isadigit{0}}\isactrlsup {\isacharparenright}\ longid{\isachardoublequote}} \\
+    & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \verb|(| \isa{type} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{type} \verb|)| \isa{longid} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}type\isactrlsup {\isacharparenleft}\isactrlsup {\isadigit{1}}\isactrlsup {\isacharparenright}{\isachardoublequote}} \verb|=>| \isa{type} & \isa{{\isachardoublequote}{\isacharparenleft}{\isadigit{0}}{\isacharparenright}{\isachardoublequote}} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}type\isactrlsup {\isacharparenleft}\isactrlsup {\isadigit{1}}\isactrlsup {\isacharparenright}{\isachardoublequote}} \isa{{\isachardoublequote}{\isasymRightarrow}{\isachardoublequote}} \isa{type} & \isa{{\isachardoublequote}{\isacharparenleft}{\isadigit{0}}{\isacharparenright}{\isachardoublequote}} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \verb|[| \isa{type} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{type} \verb|]| \verb|=>| \isa{type} & \isa{{\isachardoublequote}{\isacharparenleft}{\isadigit{0}}{\isacharparenright}{\isachardoublequote}} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \verb|[| \isa{type} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{type} \verb|]| \isa{{\isachardoublequote}{\isasymRightarrow}{\isachardoublequote}} \isa{type} & \isa{{\isachardoublequote}{\isacharparenleft}{\isadigit{0}}{\isacharparenright}{\isachardoublequote}} \\\\
 
-  \indexdef{inner}{syntax}{sort}\hypertarget{syntax.inner.sort}{\hyperlink{syntax.inner.sort}{\mbox{\isa{sort}}}} & = & \isa{{\isachardoublequote}id\ \ {\isacharbar}\ \ longid\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|{}|\isa{{\isachardoublequote}\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|{| \isa{{\isachardoublequote}{\isacharparenleft}id\ {\isacharbar}\ longid{\isacharparenright}{\isachardoublequote}} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{{\isachardoublequote}{\isacharparenleft}id\ {\isacharbar}\ longid{\isacharparenright}{\isachardoublequote}} \verb|}| \\
+  \indexdef{inner}{syntax}{sort}\hypertarget{syntax.inner.sort}{\hyperlink{syntax.inner.sort}{\mbox{\isa{sort}}}} & = & \isa{{\isachardoublequote}id\ \ {\isacharbar}\ \ longid\ \ {\isacharbar}\ \ {\isachardoublequote}}\verb|{}| \\
+    & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \verb|{| \isa{{\isachardoublequote}{\isacharparenleft}id\ {\isacharbar}\ longid{\isacharparenright}{\isachardoublequote}} \verb|,| \isa{{\isachardoublequote}{\isasymdots}{\isachardoublequote}} \verb|,| \isa{{\isachardoublequote}{\isacharparenleft}id\ {\isacharbar}\ longid{\isacharparenright}{\isachardoublequote}} \verb|}| \\
   \end{supertabular}
   \end{center}
 
--- a/doc-src/IsarRef/Thy/document/Introduction.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Introduction.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Introduction}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
@@ -32,27 +30,27 @@
 The \emph{Isabelle} system essentially provides a generic
   infrastructure for building deductive systems (programmed in
   Standard ML), with a special focus on interactive theorem proving in
-  higher-order logics.  In the olden days even end-users would refer
-  to certain ML functions (goal commands, tactics, tacticals etc.) to
-  pursue their everyday theorem proving tasks
-  \cite{isabelle-intro,isabelle-ref}.
+  higher-order logics.  Many years ago, even end-users would refer to
+  certain ML functions (goal commands, tactics, tacticals etc.) to
+  pursue their everyday theorem proving tasks.
   
   In contrast \emph{Isar} provides an interpreted language environment
   of its own, which has been specifically tailored for the needs of
   theory and proof development.  Compared to raw ML, the Isabelle/Isar
   top-level provides a more robust and comfortable development
-  platform, with proper support for theory development graphs,
-  single-step transactions with unlimited undo, etc.  The
-  Isabelle/Isar version of the \emph{Proof~General} user interface
-  \cite{proofgeneral,Aspinall:TACAS:2000} provides an adequate
-  front-end for interactive theory and proof development in this
-  advanced theorem proving environment.
+  platform, with proper support for theory development graphs, managed
+  transactions with unlimited undo etc.  The Isabelle/Isar version of
+  the \emph{Proof~General} user interface
+  \cite{proofgeneral,Aspinall:TACAS:2000} provides a decent front-end
+  for interactive theory and proof development in this advanced
+  theorem proving environment, even though it is somewhat biased
+  towards old-style proof scripts.
 
   \medskip Apart from the technical advances over bare-bones ML
   programming, the main purpose of the Isar language is to provide a
   conceptually different view on machine-checked proofs
-  \cite{Wenzel:1999:TPHOL,Wenzel-PhD}.  ``Isar'' stands for
-  ``Intelligible semi-automated reasoning''.  Drawing from both the
+  \cite{Wenzel:1999:TPHOL,Wenzel-PhD}.  \emph{Isar} stands for
+  \emph{Intelligible semi-automated reasoning}.  Drawing from both the
   traditions of informal mathematical proof texts and high-level
   programming languages, Isar offers a versatile environment for
   structured formal proof documents.  Thus properly written Isar
@@ -67,12 +65,12 @@
   Despite its grand design of structured proof texts, Isar is able to
   assimilate the old tactical style as an ``improper'' sub-language.
   This provides an easy upgrade path for existing tactic scripts, as
-  well as additional means for interactive experimentation and
-  debugging of structured proofs.  Isabelle/Isar supports a broad
-  range of proof styles, both readable and unreadable ones.
+  well as some means for interactive experimentation and debugging of
+  structured proofs.  Isabelle/Isar supports a broad range of proof
+  styles, both readable and unreadable ones.
 
-  \medskip The Isabelle/Isar framework \cite{Wenzel:2006:Festschrift}
-  is generic and should work reasonably well for any Isabelle
+  \medskip The generic Isabelle/Isar framework (see
+  \chref{ch:isar-framework}) works reasonably well for any Isabelle
   object-logic that conforms to the natural deduction view of the
   Isabelle/Pure framework.  Specific language elements introduced by
   the major object-logics are described in \chref{ch:hol}
@@ -92,207 +90,6 @@
 \end{isamarkuptext}%
 \isamarkuptrue%
 %
-\isamarkupsection{User interfaces%
-}
-\isamarkuptrue%
-%
-\isamarkupsubsection{Terminal sessions%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-The Isabelle \texttt{tty} tool provides a very interface for running
-  the Isar interaction loop, with some support for command line
-  editing.  For example:
-\begin{ttbox}
-isabelle tty\medskip
-{\out Welcome to Isabelle/HOL (Isabelle2008)}\medskip
-theory Foo imports Main begin;
-definition foo :: nat where "foo == 1";
-lemma "0 < foo" by (simp add: foo_def);
-end;
-\end{ttbox}
-
-  Any Isabelle/Isar command may be retracted by \hyperlink{command.undo}{\mbox{\isa{\isacommand{undo}}}}.
-  See the Isabelle/Isar Quick Reference (\appref{ap:refcard}) for a
-  comprehensive overview of available commands and other language
-  elements.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsection{Emacs Proof General%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Plain TTY-based interaction as above used to be quite feasible with
-  traditional tactic based theorem proving, but developing Isar
-  documents really demands some better user-interface support.  The
-  Proof~General environment by David Aspinall
-  \cite{proofgeneral,Aspinall:TACAS:2000} offers a generic Emacs
-  interface for interactive theorem provers that organizes all the
-  cut-and-paste and forward-backward walk through the text in a very
-  neat way.  In Isabelle/Isar, the current position within a partial
-  proof document is equally important than the actual proof state.
-  Thus Proof~General provides the canonical working environment for
-  Isabelle/Isar, both for getting acquainted (e.g.\ by replaying
-  existing Isar documents) and for production work.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{Proof~General as default Isabelle interface%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-The Isabelle interface wrapper script provides an easy way to invoke
-  Proof~General (including XEmacs or GNU Emacs).  The default
-  configuration of Isabelle is smart enough to detect the
-  Proof~General distribution in several canonical places (e.g.\
-  \verb|$ISABELLE_HOME/contrib/ProofGeneral|).  Thus the
-  capital \verb|Isabelle| executable would already refer to the
-  \verb|ProofGeneral/isar| interface without further ado.  The
-  Isabelle interface script provides several options; pass \verb|-?|  to see its usage.
-
-  With the proper Isabelle interface setup, Isar documents may now be edited by
-  visiting appropriate theory files, e.g.\ 
-\begin{ttbox}
-Isabelle \({\langle}isabellehome{\rangle}\)/src/HOL/Isar_examples/Summation.thy
-\end{ttbox}
-  Beginners may note the tool bar for navigating forward and backward
-  through the text (this depends on the local Emacs installation).
-  Consult the Proof~General documentation \cite{proofgeneral} for
-  further basic command sequences, in particular ``\verb|C-c C-return|''
-  and ``\verb|C-c u|''.
-
-  \medskip Proof~General may be also configured manually by giving
-  Isabelle settings like this (see also \cite{isabelle-sys}):
-
-\begin{ttbox}
-ISABELLE_INTERFACE=\$ISABELLE_HOME/contrib/ProofGeneral/isar/interface
-PROOFGENERAL_OPTIONS=""
-\end{ttbox}
-  You may have to change \verb|$ISABELLE_HOME/contrib/ProofGeneral| to the actual installation
-  directory of Proof~General.
-
-  \medskip Apart from the Isabelle command line, defaults for
-  interface options may be given by the \verb|PROOFGENERAL_OPTIONS|
-  setting.  For example, the Emacs executable to be used may be
-  configured in Isabelle's settings like this:
-\begin{ttbox}
-PROOFGENERAL_OPTIONS="-p xemacs-mule"  
-\end{ttbox}
-
-  Occasionally, a user's \verb|~/.emacs| file contains code
-  that is incompatible with the (X)Emacs version used by
-  Proof~General, causing the interface startup to fail prematurely.
-  Here the \verb|-u false| option helps to get the interface
-  process up and running.  Note that additional Lisp customization
-  code may reside in \verb|proofgeneral-settings.el| of
-  \verb|$ISABELLE_HOME/etc| or \verb|$ISABELLE_HOME_USER/etc|.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsubsubsection{The X-Symbol package%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Proof~General incorporates a version of the Emacs X-Symbol package
-  \cite{x-symbol}, which handles proper mathematical symbols displayed
-  on screen.  Pass option \verb|-x true| to the Isabelle
-  interface script, or check the appropriate Proof~General menu
-  setting by hand.  The main challenge of getting X-Symbol to work
-  properly is the underlying (semi-automated) X11 font setup.
-
-  \medskip Using proper mathematical symbols in Isabelle theories can
-  be very convenient for readability of large formulas.  On the other
-  hand, the plain ASCII sources easily become somewhat unintelligible.
-  For example, \isa{{\isachardoublequote}{\isasymLongrightarrow}{\isachardoublequote}} would appear as \verb|\<Longrightarrow>| according
-  the default set of Isabelle symbols.  Nevertheless, the Isabelle
-  document preparation system (see \chref{ch:document-prep}) will be
-  happy to print non-ASCII symbols properly.  It is even possible to
-  invent additional notation beyond the display capabilities of Emacs
-  and X-Symbol.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsection{Isabelle/Isar theories%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-Isabelle/Isar offers the following main improvements over classic
-  Isabelle.
-
-  \begin{enumerate}
-  
-  \item A \emph{theory format} that integrates specifications and
-  proofs, supporting interactive development and unlimited undo
-  operation.
-  
-  \item A \emph{formal proof document language} designed to support
-  intelligible semi-automated reasoning.  Instead of putting together
-  unreadable tactic scripts, the author is enabled to express the
-  reasoning in way that is close to usual mathematical practice.  The
-  old tactical style has been assimilated as ``improper'' language
-  elements.
-  
-  \item A simple document preparation system, for typesetting formal
-  developments together with informal text.  The resulting
-  hyper-linked PDF documents are equally well suited for WWW
-  presentation and as printed copies.
-
-  \end{enumerate}
-
-  The Isar proof language is embedded into the new theory format as a
-  proper sub-language.  Proof mode is entered by stating some
-  \hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}} or \hyperlink{command.lemma}{\mbox{\isa{\isacommand{lemma}}}} at the theory level, and
-  left again with the final conclusion (e.g.\ via \hyperlink{command.qed}{\mbox{\isa{\isacommand{qed}}}}).
-  A few theory specification mechanisms also require some proof, such
-  as HOL's \hyperlink{command.typedef}{\mbox{\isa{\isacommand{typedef}}}} which demands non-emptiness of the
-  representing sets.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
-\isamarkupsection{How to write Isar proofs anyway? \label{sec:isar-howto}%
-}
-\isamarkuptrue%
-%
-\begin{isamarkuptext}%
-This is one of the key questions, of course.  First of all, the
-  tactic script emulation of Isabelle/Isar essentially provides a
-  clarified version of the very same unstructured proof style of
-  classic Isabelle.  Old-time users should quickly become acquainted
-  with that (slightly degenerative) view of Isar.
-
-  Writing \emph{proper} Isar proof texts targeted at human readers is
-  quite different, though.  Experienced users of the unstructured
-  style may even have to unlearn some of their habits to master proof
-  composition in Isar.  In contrast, new users with less experience in
-  old-style tactical proving, but a good understanding of mathematical
-  proof in general, often get started easier.
-
-  \medskip The present text really is only a reference manual on
-  Isabelle/Isar, not a tutorial.  Nevertheless, we will attempt to
-  give some clues of how the concepts introduced here may be put into
-  practice.  Especially note that \appref{ap:refcard} provides a quick
-  reference card of the most common Isabelle/Isar language elements.
-
-  Further issues concerning the Isar concepts are covered in the
-  literature
-  \cite{Wenzel:1999:TPHOL,Wiedijk:2000:MV,Bauer-Wenzel:2000:HB,Bauer-Wenzel:2001}.
-  The author's PhD thesis \cite{Wenzel-PhD} presently provides the
-  most complete exposition of Isar foundations, techniques, and
-  applications.  A number of example applications are distributed with
-  Isabelle, and available via the Isabelle WWW library (e.g.\
-  \url{http://isabelle.in.tum.de/library/}).  The ``Archive of Formal
-  Proofs'' \url{http://afp.sourceforge.net/} also provides plenty of
-  examples, both in proper Isar proof style and unstructured tactic
-  scripts.%
-\end{isamarkuptext}%
-\isamarkuptrue%
-%
 \isadelimtheory
 %
 \endisadelimtheory
--- a/doc-src/IsarRef/Thy/document/ML_Tactic.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/ML_Tactic.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{ML{\isacharunderscore}Tactic}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
--- a/doc-src/IsarRef/Thy/document/Misc.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Misc.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Misc}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
--- a/doc-src/IsarRef/Thy/document/Outer_Syntax.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Outer_Syntax.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Outer{\isacharunderscore}Syntax}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
@@ -185,10 +183,10 @@
   Isabelle as \verb|\<forall>|.  There are infinitely many Isabelle
   symbols like this, although proper presentation is left to front-end
   tools such as {\LaTeX} or Proof~General with the X-Symbol package.
-  A list of standard Isabelle symbols that work well with these tools
-  is given in \appref{app:symbols}.  Note that \verb|\<lambda>| does
-  not belong to the \isa{letter} category, since it is already used
-  differently in the Pure term language.%
+  A list of predefined Isabelle symbols that work well with these
+  tools is given in \appref{app:symbols}.  Note that \verb|\<lambda>|
+  does not belong to the \isa{letter} category, since it is already
+  used differently in the Pure term language.%
 \end{isamarkuptext}%
 \isamarkuptrue%
 %
--- a/doc-src/IsarRef/Thy/document/Proof.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Proof.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Proof}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
@@ -20,7 +18,7 @@
 %
 \endisadelimtheory
 %
-\isamarkupchapter{Proofs%
+\isamarkupchapter{Proofs \label{ch:proofs}%
 }
 \isamarkuptrue%
 %
@@ -28,8 +26,8 @@
 Proof commands perform transitions of Isar/VM machine
   configurations, which are block-structured, consisting of a stack of
   nodes with three main components: logical proof context, current
-  facts, and open goals.  Isar/VM transitions are \emph{typed}
-  according to the following three different modes of operation:
+  facts, and open goals.  Isar/VM transitions are typed according to
+  the following three different modes of operation:
 
   \begin{description}
 
@@ -49,13 +47,17 @@
 
   \end{description}
 
-  The proof mode indicator may be read as a verb telling the writer
-  what kind of operation may be performed next.  The corresponding
-  typings of proof commands restricts the shape of well-formed proof
-  texts to particular command sequences.  So dynamic arrangements of
-  commands eventually turn out as static texts of a certain structure.
-  \Appref{ap:refcard} gives a simplified grammar of the overall
-  (extensible) language emerging that way.%
+  The proof mode indicator may be understood as an instruction to the
+  writer, telling what kind of operation may be performed next.  The
+  corresponding typings of proof commands restricts the shape of
+  well-formed proof texts to particular command sequences.  So dynamic
+  arrangements of commands eventually turn out as static texts of a
+  certain structure.
+
+  \Appref{ap:refcard} gives a simplified grammar of the (extensible)
+  language emerging that way from the different types of proof
+  commands.  The main ideas of the overall Isar framework are
+  explained in \chref{ch:isar-framework}.%
 \end{isamarkuptext}%
 \isamarkuptrue%
 %
@@ -691,7 +693,6 @@
     \indexdef{}{method}{assumption}\hypertarget{method.assumption}{\hyperlink{method.assumption}{\mbox{\isa{assumption}}}} & : & \isa{method} \\
     \indexdef{}{method}{this}\hypertarget{method.this}{\hyperlink{method.this}{\mbox{\isa{this}}}} & : & \isa{method} \\
     \indexdef{}{method}{rule}\hypertarget{method.rule}{\hyperlink{method.rule}{\mbox{\isa{rule}}}} & : & \isa{method} \\
-    \indexdef{}{method}{iprover}\hypertarget{method.iprover}{\hyperlink{method.iprover}{\mbox{\isa{iprover}}}} & : & \isa{method} \\[0.5ex]
     \indexdef{Pure}{attribute}{intro}\hypertarget{attribute.Pure.intro}{\hyperlink{attribute.Pure.intro}{\mbox{\isa{intro}}}} & : & \isa{attribute} \\
     \indexdef{Pure}{attribute}{elim}\hypertarget{attribute.Pure.elim}{\hyperlink{attribute.Pure.elim}{\mbox{\isa{elim}}}} & : & \isa{attribute} \\
     \indexdef{Pure}{attribute}{dest}\hypertarget{attribute.Pure.dest}{\hyperlink{attribute.Pure.dest}{\mbox{\isa{dest}}}} & : & \isa{attribute} \\
@@ -706,8 +707,6 @@
     ;
     'rule' thmrefs?
     ;
-    'iprover' ('!' ?) (rulemod *)
-    ;
     rulemod: ('intro' | 'elim' | 'dest') ((('!' | () | '?') nat?) | 'del') ':' thmrefs
     ;
     ('intro' | 'elim' | 'dest') ('!' | () | '?') nat?
@@ -762,26 +761,11 @@
   default behavior of \hyperlink{command.proof}{\mbox{\isa{\isacommand{proof}}}} and ``\hyperlink{command.ddot}{\mbox{\isa{\isacommand{{\isachardot}{\isachardot}}}}}'' 
   (double-dot) steps (see \secref{sec:proof-steps}).
   
-  \item \hyperlink{method.iprover}{\mbox{\isa{iprover}}} performs intuitionistic proof search,
-  depending on specifically declared rules from the context, or given
-  as explicit arguments.  Chained facts are inserted into the goal
-  before commencing proof search; ``\hyperlink{method.iprover}{\mbox{\isa{iprover}}}\isa{{\isachardoublequote}{\isacharbang}{\isachardoublequote}}''
-  means to include the current \hyperlink{fact.prems}{\mbox{\isa{prems}}} as well.
-  
-  Rules need to be classified as \hyperlink{attribute.Pure.intro}{\mbox{\isa{intro}}},
-  \hyperlink{attribute.Pure.elim}{\mbox{\isa{elim}}}, or \hyperlink{attribute.Pure.dest}{\mbox{\isa{dest}}}; here the
-  ``\isa{{\isachardoublequote}{\isacharbang}{\isachardoublequote}}'' indicator refers to ``safe'' rules, which may be
-  applied aggressively (without considering back-tracking later).
-  Rules declared with ``\isa{{\isachardoublequote}{\isacharquery}{\isachardoublequote}}'' are ignored in proof search (the
-  single-step \hyperlink{method.rule}{\mbox{\isa{rule}}} method still observes these).  An
-  explicit weight annotation may be given as well; otherwise the
-  number of rule premises will be taken into account here.
-  
   \item \hyperlink{attribute.Pure.intro}{\mbox{\isa{intro}}}, \hyperlink{attribute.Pure.elim}{\mbox{\isa{elim}}}, and
   \hyperlink{attribute.Pure.dest}{\mbox{\isa{dest}}} declare introduction, elimination, and
-  destruct rules, to be used with the \hyperlink{method.rule}{\mbox{\isa{rule}}} and \hyperlink{method.iprover}{\mbox{\isa{iprover}}} methods.  Note that the latter will ignore rules declared
-  with ``\isa{{\isachardoublequote}{\isacharquery}{\isachardoublequote}}'', while ``\isa{{\isachardoublequote}{\isacharbang}{\isachardoublequote}}''  are used most
-  aggressively.
+  destruct rules, to be used with method \hyperlink{method.rule}{\mbox{\isa{rule}}}, and similar
+  tools.  Note that the latter will ignore rules declared with
+  ``\isa{{\isachardoublequote}{\isacharquery}{\isachardoublequote}}'', while ``\isa{{\isachardoublequote}{\isacharbang}{\isachardoublequote}}''  are used most aggressively.
   
   The classical reasoner (see \secref{sec:classical}) introduces its
   own variants of these attributes; use qualified names to access the
@@ -966,7 +950,7 @@
   \begin{matharray}{l}
     \isa{{\isachardoublequote}{\isasymlangle}using\ b\isactrlsub {\isadigit{1}}\ {\isasymdots}\ b\isactrlsub k{\isasymrangle}{\isachardoublequote}}~~\hyperlink{command.obtain}{\mbox{\isa{\isacommand{obtain}}}}~\isa{{\isachardoublequote}x\isactrlsub {\isadigit{1}}\ {\isasymdots}\ x\isactrlsub m\ {\isasymWHERE}\ a{\isacharcolon}\ {\isasymphi}\isactrlsub {\isadigit{1}}\ {\isasymdots}\ {\isasymphi}\isactrlsub n\ \ {\isasymlangle}proof{\isasymrangle}\ {\isasymequiv}{\isachardoublequote}} \\[1ex]
     \quad \hyperlink{command.have}{\mbox{\isa{\isacommand{have}}}}~\isa{{\isachardoublequote}{\isasymAnd}thesis{\isachardot}\ {\isacharparenleft}{\isasymAnd}x\isactrlsub {\isadigit{1}}\ {\isasymdots}\ x\isactrlsub m{\isachardot}\ {\isasymphi}\isactrlsub {\isadigit{1}}\ {\isasymLongrightarrow}\ {\isasymdots}\ {\isasymphi}\isactrlsub n\ {\isasymLongrightarrow}\ thesis{\isacharparenright}\ {\isasymLongrightarrow}\ thesis{\isachardoublequote}} \\
-    \quad \hyperlink{command.proof}{\mbox{\isa{\isacommand{proof}}}}~\isa{succeed} \\
+    \quad \hyperlink{command.proof}{\mbox{\isa{\isacommand{proof}}}}~\hyperlink{method.succeed}{\mbox{\isa{succeed}}} \\
     \qquad \hyperlink{command.fix}{\mbox{\isa{\isacommand{fix}}}}~\isa{thesis} \\
     \qquad \hyperlink{command.assume}{\mbox{\isa{\isacommand{assume}}}}~\isa{{\isachardoublequote}that\ {\isacharbrackleft}Pure{\isachardot}intro{\isacharquery}{\isacharbrackright}{\isacharcolon}\ {\isasymAnd}x\isactrlsub {\isadigit{1}}\ {\isasymdots}\ x\isactrlsub m{\isachardot}\ {\isasymphi}\isactrlsub {\isadigit{1}}\ {\isasymLongrightarrow}\ {\isasymdots}\ {\isasymphi}\isactrlsub n\ {\isasymLongrightarrow}\ thesis{\isachardoublequote}} \\
     \qquad \hyperlink{command.then}{\mbox{\isa{\isacommand{then}}}}~\hyperlink{command.show}{\mbox{\isa{\isacommand{show}}}}~\isa{thesis} \\
--- a/doc-src/IsarRef/Thy/document/Quick_Reference.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Quick_Reference.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Quick{\isacharunderscore}Reference}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
@@ -52,7 +50,7 @@
 
   \begin{tabular}{rcl}
     \isa{{\isachardoublequote}theory{\isasymdash}stmt{\isachardoublequote}} & = & \hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}}~\isa{{\isachardoublequote}name{\isacharcolon}\ props\ proof\ \ {\isacharbar}{\isachardoublequote}}~~\hyperlink{command.definition}{\mbox{\isa{\isacommand{definition}}}}~\isa{{\isachardoublequote}{\isasymdots}\ \ {\isacharbar}\ \ {\isasymdots}{\isachardoublequote}} \\[1ex]
-    \isa{{\isachardoublequote}proof{\isachardoublequote}} & = & \isa{{\isachardoublequote}prfx\isactrlsup {\isacharasterisk}{\isachardoublequote}}~\hyperlink{command.proof}{\mbox{\isa{\isacommand{proof}}}}~\isa{{\isachardoublequote}method\ stmt\isactrlsup {\isacharasterisk}{\isachardoublequote}}~\hyperlink{command.qed}{\mbox{\isa{\isacommand{qed}}}}~\isa{method} \\
+    \isa{{\isachardoublequote}proof{\isachardoublequote}} & = & \isa{{\isachardoublequote}prfx\isactrlsup {\isacharasterisk}{\isachardoublequote}}~\hyperlink{command.proof}{\mbox{\isa{\isacommand{proof}}}}~\isa{{\isachardoublequote}method\isactrlsup {\isacharquery}\ stmt\isactrlsup {\isacharasterisk}{\isachardoublequote}}~\hyperlink{command.qed}{\mbox{\isa{\isacommand{qed}}}}~\isa{{\isachardoublequote}method\isactrlsup {\isacharquery}{\isachardoublequote}} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \isa{{\isachardoublequote}prfx\isactrlsup {\isacharasterisk}{\isachardoublequote}}~\hyperlink{command.done}{\mbox{\isa{\isacommand{done}}}} \\[1ex]
     \isa{prfx} & = & \hyperlink{command.apply}{\mbox{\isa{\isacommand{apply}}}}~\isa{method} \\
     & \isa{{\isachardoublequote}{\isacharbar}{\isachardoublequote}} & \hyperlink{command.using}{\mbox{\isa{\isacommand{using}}}}~\isa{{\isachardoublequote}facts{\isachardoublequote}} \\
--- a/doc-src/IsarRef/Thy/document/Spec.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Spec.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -22,6 +22,23 @@
 }
 \isamarkuptrue%
 %
+\begin{isamarkuptext}%
+The Isabelle/Isar theory format integrates specifications and
+  proofs, supporting interactive development with unlimited undo
+  operation.  There is an integrated document preparation system (see
+  \chref{ch:document-prep}), for typesetting formal developments
+  together with informal text.  The resulting hyper-linked PDF
+  documents can be used both for WWW presentation and printed copies.
+
+  The Isar proof language (see \chref{ch:proofs}) is embedded into the
+  theory language as a proper sub-language.  Proof mode is entered by
+  stating some \hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}} or \hyperlink{command.lemma}{\mbox{\isa{\isacommand{lemma}}}} at the theory
+  level, and left again with the final conclusion (e.g.\ via \hyperlink{command.qed}{\mbox{\isa{\isacommand{qed}}}}).  Some theory specification mechanisms also require a proof,
+  such as \hyperlink{command.typedef}{\mbox{\isa{\isacommand{typedef}}}} in HOL, which demands non-emptiness of
+  the representing sets.%
+\end{isamarkuptext}%
+\isamarkuptrue%
+%
 \isamarkupsection{Defining theories \label{sec:begin-thy}%
 }
 \isamarkuptrue%
@@ -127,8 +144,9 @@
   \hyperlink{command.global.end}{\mbox{\isa{\isacommand{end}}}} has a different meaning: it concludes the
   theory itself (\secref{sec:begin-thy}).
   
-  \item \isa{{\isachardoublequote}{\isacharparenleft}{\isasymIN}\ c{\isacharparenright}{\isachardoublequote}} given after any local theory command
-  specifies an immediate target, e.g.\ ``\hyperlink{command.definition}{\mbox{\isa{\isacommand{definition}}}}~\isa{{\isachardoublequote}{\isacharparenleft}{\isasymIN}\ c{\isacharparenright}\ {\isasymdots}{\isachardoublequote}}'' or ``\hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}}~\isa{{\isachardoublequote}{\isacharparenleft}{\isasymIN}\ c{\isacharparenright}\ {\isasymdots}{\isachardoublequote}}''.  This works both in a local or
+  \item \isa{{\isachardoublequote}{\isacharparenleft}{\isachardoublequote}}\indexdef{}{keyword}{in}\hypertarget{keyword.in}{\hyperlink{keyword.in}{\mbox{\isa{\isakeyword{in}}}}}~\isa{{\isachardoublequote}c{\isacharparenright}{\isachardoublequote}} given after any
+  local theory command specifies an immediate target, e.g.\
+  ``\hyperlink{command.definition}{\mbox{\isa{\isacommand{definition}}}}~\isa{{\isachardoublequote}{\isacharparenleft}{\isasymIN}\ c{\isacharparenright}\ {\isasymdots}{\isachardoublequote}}'' or ``\hyperlink{command.theorem}{\mbox{\isa{\isacommand{theorem}}}}~\isa{{\isachardoublequote}{\isacharparenleft}{\isasymIN}\ c{\isacharparenright}\ {\isasymdots}{\isachardoublequote}}''.  This works both in a local or
   global theory context; the current target context will be suspended
   for this command only.  Note that ``\isa{{\isachardoublequote}{\isacharparenleft}{\isasymIN}\ {\isacharminus}{\isacharparenright}{\isachardoublequote}}'' will
   always produce a global result independently of the current target
@@ -792,8 +810,8 @@
   \end{matharray}
 
   \begin{mldecls}
-    \indexml{bind\_thms}\verb|bind_thms: string * thm list -> unit| \\
-    \indexml{bind\_thm}\verb|bind_thm: string * thm -> unit| \\
+    \indexdef{}{ML}{bind\_thms}\verb|bind_thms: string * thm list -> unit| \\
+    \indexdef{}{ML}{bind\_thm}\verb|bind_thm: string * thm -> unit| \\
   \end{mldecls}
 
   \begin{rail}
@@ -1178,7 +1196,7 @@
 
   \end{description}
 
-  See \hyperlink{file.~~/src/FOL/ex/IffOracle.thy}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}FOL{\isacharslash}ex{\isacharslash}IffOracle{\isachardot}thy}}}} for a worked example of
+  See \hyperlink{file.~~/src/FOL/ex/Iff-Oracle.thy}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}FOL{\isacharslash}ex{\isacharslash}Iff{\isacharunderscore}Oracle{\isachardot}thy}}}} for a worked example of
   defining a new primitive rule as oracle, and turning it into a proof
   method.%
 \end{isamarkuptext}%
--- a/doc-src/IsarRef/Thy/document/Symbols.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/Symbols.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{Symbols}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
@@ -20,7 +18,7 @@
 %
 \endisadelimtheory
 %
-\isamarkupchapter{Standard Isabelle symbols \label{app:symbols}%
+\isamarkupchapter{Predefined Isabelle symbols \label{app:symbols}%
 }
 \isamarkuptrue%
 %
--- a/doc-src/IsarRef/Thy/document/ZF_Specific.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/Thy/document/ZF_Specific.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -3,8 +3,6 @@
 \def\isabellecontext{ZF{\isacharunderscore}Specific}%
 %
 \isadelimtheory
-\isanewline
-\isanewline
 %
 \endisadelimtheory
 %
--- a/doc-src/IsarRef/isar-ref.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/isar-ref.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-%% $Id$
-
 \documentclass[12pt,a4paper,fleqn]{report}
 \usepackage{amssymb}
 \usepackage[greek,english]{babel}
@@ -27,12 +24,13 @@
   With Contributions by
   Clemens Ballarin,
   Stefan Berghofer, \\
+  Timothy Bourke
   Lucas Dixon,
-  Florian Haftmann,
-  Gerwin Klein, \\
+  Florian Haftmann, \\
+  Gerwin Klein,
   Alexander Krauss,
-  Tobias Nipkow,
-  David von Oheimb, \\
+  Tobias Nipkow, \\
+  David von Oheimb,
   Larry Paulson,
   and Sebastian Skalberg
 }
@@ -82,7 +80,11 @@
 
 \pagenumbering{roman} \tableofcontents \clearfirst
 
+\part{Basic Concepts}
 \input{Thy/document/Introduction.tex}
+\input{Thy/document/Framework.tex}
+\input{Thy/document/First_Order_Logic.tex}
+\part{General Language Elements}
 \input{Thy/document/Outer_Syntax.tex}
 \input{Thy/document/Document_Preparation.tex}
 \input{Thy/document/Spec.tex}
@@ -90,10 +92,12 @@
 \input{Thy/document/Inner_Syntax.tex}
 \input{Thy/document/Misc.tex}
 \input{Thy/document/Generic.tex}
+\part{Object-Logics}
 \input{Thy/document/HOL_Specific.tex}
 \input{Thy/document/HOLCF_Specific.tex}
 \input{Thy/document/ZF_Specific.tex}
 
+\part{Appendix}
 \appendix
 \input{Thy/document/Quick_Reference.tex}
 \let\int\intorig
@@ -101,7 +105,7 @@
 \input{Thy/document/ML_Tactic.tex}
 
 \begingroup
-  \bibliographystyle{plain} \small\raggedright\frenchspacing
+  \bibliographystyle{abbrv} \small\raggedright\frenchspacing
   \bibliography{../manual}
 \endgroup
 
--- a/doc-src/IsarRef/style.sty	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/IsarRef/style.sty	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-%% $Id$
-
 %% toc
 \newcommand{\tocentry}[1]{\cleardoublepage\phantomsection\addcontentsline{toc}{chapter}{#1}
 \@mkboth{\MakeUppercase{#1}}{\MakeUppercase{#1}}}
@@ -18,12 +15,17 @@
 
 %% ML
 \newenvironment{mldecls}{\par\noindent\begingroup\def\isanewline{\\}\begin{tabular}{ll}}{\end{tabular}\medskip\endgroup}
-\newcommand{\indexml}[1]{\index{#1 (ML value)|bold}}
+
+%% Isar
+\newcommand{\isasymBBAR}{{\,\newdimen{\tmpheight}\settoheight\tmpheight{\isacharbar}\rule{1pt}{\tmpheight}\,}}
+\isafoldtag{noproof}\def\isafoldnoproof{~\isafold{proof}}
 
 %% math
+\newcommand{\isasymstrut}{\isamath{\mathstrut}}
+\newcommand{\isasymvartheta}{\isamath{\,\theta}}
 \newcommand{\isactrlvec}[1]{\emph{$\overline{#1}$}}
 \renewcommand{\isadigit}[1]{\isamath{#1}}
-
+\newcommand{\text}[1]{\mbox{#1}}
 
 %% global style options
 \pagestyle{headings}
--- a/doc-src/Ref/Makefile	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/Makefile	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-#
-# $Id$
-#
 
 ## targets
 
@@ -12,16 +9,15 @@
 include ../Makefile.in
 
 NAME = ref
-FILES = ref.tex introduction.tex goals.tex tactic.tex tctical.tex \
-	thm.tex theories.tex defining.tex syntax.tex substitution.tex \
-	simplifier.tex classical.tex theory-syntax.tex \
-	../rail.sty ../proof.sty ../iman.sty ../extra.sty ../ttbox.sty ../manual.bib
+FILES = ref.tex introduction.tex tactic.tex tctical.tex thm.tex	\
+	theories.tex defining.tex syntax.tex substitution.tex	\
+	simplifier.tex classical.tex ../proof.sty ../iman.sty	\
+	../extra.sty ../ttbox.sty ../manual.bib
 
 dvi: $(NAME).dvi
 
 $(NAME).dvi: $(FILES) isabelle.eps
 	$(LATEX) $(NAME)
-	$(RAIL) $(NAME)
 	$(BIBTEX) $(NAME)
 	$(LATEX) $(NAME)
 	$(LATEX) $(NAME)
@@ -32,7 +28,6 @@
 
 $(NAME).pdf: $(FILES) isabelle.pdf
 	$(PDFLATEX) $(NAME)
-	$(RAIL) $(NAME)
 	$(BIBTEX) $(NAME)
 	$(PDFLATEX) $(NAME)
 	$(PDFLATEX) $(NAME)
--- a/doc-src/Ref/classical.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/classical.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+
 \chapter{The Classical Reasoner}\label{chap:classical}
 \index{classical reasoner|(}
 \newcommand\ainfer[2]{\begin{array}{r@{\,}l}#2\\ \hline#1\end{array}}
@@ -28,29 +28,6 @@
 be traced, and their components can be called directly; in this manner,
 any proof can be viewed interactively.
 
-The simplest way to apply the classical reasoner (to subgoal~$i$) is to type
-\begin{ttbox}
-by (Blast_tac \(i\));
-\end{ttbox}
-This command quickly proves most simple formulas of the predicate calculus or
-set theory.  To attempt to prove subgoals using a combination of
-rewriting and classical reasoning, try
-\begin{ttbox}
-auto();                         \emph{\textrm{applies to all subgoals}}
-force i;                        \emph{\textrm{applies to one subgoal}}
-\end{ttbox}
-To do all obvious logical steps, even if they do not prove the
-subgoal, type one of the following:
-\begin{ttbox}
-by Safe_tac;                   \emph{\textrm{applies to all subgoals}}
-by (Clarify_tac \(i\));            \emph{\textrm{applies to one subgoal}}
-\end{ttbox}
-
-
-You need to know how the classical reasoner works in order to use it
-effectively.  There are many tactics to choose from, including 
-{\tt Fast_tac} and \texttt{Best_tac}.
-
 We shall first discuss the underlying principles, then present the classical
 reasoner.  Finally, we shall see how to instantiate it for new logics.  The
 logics FOL, ZF, HOL and HOLCF have it already installed.
--- a/doc-src/Ref/defining.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/defining.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,376 +1,5 @@
-%% $Id$
+
 \chapter{Defining Logics} \label{Defining-Logics}
-This chapter explains how to define new formal systems --- in particular,
-their concrete syntax.  While Isabelle can be regarded as a theorem prover
-for set theory, higher-order logic or the sequent calculus, its
-distinguishing feature is support for the definition of new logics.
-
-Isabelle logics are hierarchies of theories, which are described and
-illustrated in
-\iflabelundefined{sec:defining-theories}{{\em Introduction to Isabelle}}%
-{\S\ref{sec:defining-theories}}.  That material, together with the theory
-files provided in the examples directories, should suffice for all simple
-applications.  The easiest way to define a new theory is by modifying a
-copy of an existing theory.
-
-This chapter documents the meta-logic syntax, mixfix declarations and
-pretty printing.  The extended examples in \S\ref{sec:min_logics}
-demonstrate the logical aspects of the definition of theories.
-
-
-\section{Priority grammars} \label{sec:priority_grammars}
-\index{priority grammars|(}
-
-A context-free grammar contains a set of {\bf nonterminal symbols}, a set of
-{\bf terminal symbols} and a set of {\bf productions}\index{productions}.
-Productions have the form ${A=\gamma}$, where $A$ is a nonterminal and
-$\gamma$ is a string of terminals and nonterminals.  One designated
-nonterminal is called the {\bf start symbol}.  The language defined by the
-grammar consists of all strings of terminals that can be derived from the
-start symbol by applying productions as rewrite rules.
-
-The syntax of an Isabelle logic is specified by a {\bf priority
-  grammar}.\index{priorities} Each nonterminal is decorated by an integer
-priority, as in~$A^{(p)}$.  A nonterminal $A^{(p)}$ in a derivation may be
-rewritten using a production $A^{(q)} = \gamma$ only if~$p \leq q$.  Any
-priority grammar can be translated into a normal context free grammar by
-introducing new nonterminals and productions.
-
-Formally, a set of context free productions $G$ induces a derivation
-relation $\longrightarrow@G$.  Let $\alpha$ and $\beta$ denote strings of
-terminal or nonterminal symbols.  Then
-\[ \alpha\, A^{(p)}\, \beta ~\longrightarrow@G~ \alpha\,\gamma\,\beta \]
-if and only if $G$ contains some production $A^{(q)}=\gamma$ for~$p \leq q$.
-
-The following simple grammar for arithmetic expressions demonstrates how
-binding power and associativity of operators can be enforced by priorities.
-\begin{center}
-\begin{tabular}{rclr}
-  $A^{(9)}$ & = & {\tt0} \\
-  $A^{(9)}$ & = & {\tt(} $A^{(0)}$ {\tt)} \\
-  $A^{(0)}$ & = & $A^{(0)}$ {\tt+} $A^{(1)}$ \\
-  $A^{(2)}$ & = & $A^{(3)}$ {\tt*} $A^{(2)}$ \\
-  $A^{(3)}$ & = & {\tt-} $A^{(3)}$
-\end{tabular}
-\end{center}
-The choice of priorities determines that {\tt -} binds tighter than {\tt *},
-which binds tighter than {\tt +}.  Furthermore {\tt +} associates to the
-left and {\tt *} to the right.
-
-For clarity, grammars obey these conventions:
-\begin{itemize}
-\item All priorities must lie between~0 and \ttindex{max_pri}, which is a
-  some fixed integer.  Sometimes {\tt max_pri} is written as $\infty$.
-\item Priority 0 on the right-hand side and priority \ttindex{max_pri} on
-  the left-hand side may be omitted.
-\item The production $A^{(p)} = \alpha$ is written as $A = \alpha~(p)$; the
-  priority of the left-hand side actually appears in a column on the far
-  right.
-\item Alternatives are separated by~$|$.
-\item Repetition is indicated by dots~(\dots) in an informal but obvious
-  way.
-\end{itemize}
-
-Using these conventions and assuming $\infty=9$, the grammar
-takes the form
-\begin{center}
-\begin{tabular}{rclc}
-$A$ & = & {\tt0} & \hspace*{4em} \\
- & $|$ & {\tt(} $A$ {\tt)} \\
- & $|$ & $A$ {\tt+} $A^{(1)}$ & (0) \\
- & $|$ & $A^{(3)}$ {\tt*} $A^{(2)}$ & (2) \\
- & $|$ & {\tt-} $A^{(3)}$ & (3)
-\end{tabular}
-\end{center}
-\index{priority grammars|)}
-
-
-\begin{figure}\small
-\begin{center}
-\begin{tabular}{rclc}
-$any$ &=& $prop$ ~~$|$~~ $logic$ \\\\
-$prop$ &=& {\tt(} $prop$ {\tt)} \\
-     &$|$& $prop^{(4)}$ {\tt::} $type$ & (3) \\
-     &$|$& {\tt PROP} $aprop$ \\
-     &$|$& $any^{(3)}$ {\tt ==} $any^{(2)}$ & (2) \\
-     &$|$& $any^{(3)}$ {\tt =?=} $any^{(2)}$ & (2) \\
-     &$|$& $prop^{(2)}$ {\tt ==>} $prop^{(1)}$ & (1) \\
-     &$|$& {\tt[|} $prop$ {\tt;} \dots {\tt;} $prop$ {\tt|]} {\tt==>} $prop^{(1)}$ & (1) \\
-     &$|$& {\tt!!} $idts$ {\tt.} $prop$ & (0) \\
-     &$|$& {\tt OFCLASS} {\tt(} $type$ {\tt,} $logic$ {\tt)} \\\\
-$aprop$ &=& $id$ ~~$|$~~ $longid$ ~~$|$~~ $var$
-    ~~$|$~~ $logic^{(\infty)}$ {\tt(} $any$ {\tt,} \dots {\tt,} $any$ {\tt)} \\\\
-$logic$ &=& {\tt(} $logic$ {\tt)} \\
-      &$|$& $logic^{(4)}$ {\tt::} $type$ & (3) \\
-      &$|$& $id$ ~~$|$~~ $longid$ ~~$|$~~ $var$
-    ~~$|$~~ $logic^{(\infty)}$ {\tt(} $any$ {\tt,} \dots {\tt,} $any$ {\tt)} \\
-      &$|$& {\tt \%} $pttrns$ {\tt.} $any^{(3)}$ & (3) \\
-      &$|$& {\tt TYPE} {\tt(} $type$ {\tt)} \\\\
-$idts$ &=& $idt$ ~~$|$~~ $idt^{(1)}$ $idts$ \\\\
-$idt$ &=& $id$ ~~$|$~~ {\tt(} $idt$ {\tt)} \\
-    &$|$& $id$ {\tt ::} $type$ & (0) \\\\
-$pttrns$ &=& $pttrn$ ~~$|$~~ $pttrn^{(1)}$ $pttrns$ \\\\
-$pttrn$ &=& $idt$ \\\\
-$type$ &=& {\tt(} $type$ {\tt)} \\
-     &$|$& $tid$ ~~$|$~~ $tvar$ ~~$|$~~ $tid$ {\tt::} $sort$
-       ~~$|$~~ $tvar$ {\tt::} $sort$ \\
-     &$|$& $id$ ~~$|$~~ $type^{(\infty)}$ $id$
-                ~~$|$~~ {\tt(} $type$ {\tt,} \dots {\tt,} $type$ {\tt)} $id$ \\
-     &$|$& $longid$ ~~$|$~~ $type^{(\infty)}$ $longid$
-                ~~$|$~~ {\tt(} $type$ {\tt,} \dots {\tt,} $type$ {\tt)} $longid$ \\
-     &$|$& $type^{(1)}$ {\tt =>} $type$ & (0) \\
-     &$|$& {\tt[}  $type$ {\tt,} \dots {\tt,} $type$ {\tt]} {\tt=>} $type$&(0) \\\\
-$sort$ &=& $id$ ~~$|$~~ $longid$ ~~$|$~~ {\tt\ttlbrace\ttrbrace} ~~$|$~~
-  {\tt\ttlbrace} $id$ ~$|$~ $longid${\tt,}\dots{\tt,} $id$ ~$|$~$longid$ {\tt\ttrbrace}
-\end{tabular}
-\index{*PROP symbol}
-\index{*== symbol}\index{*=?= symbol}\index{*==> symbol}
-\index{*:: symbol}\index{*=> symbol}
-\index{sort constraints}
-%the index command: a percent is permitted, but braces must match!
-\index{%@{\tt\%} symbol}
-\index{{}@{\tt\ttlbrace} symbol}\index{{}@{\tt\ttrbrace} symbol}
-\index{*[ symbol}\index{*] symbol}
-\index{*"!"! symbol}
-\index{*"["| symbol}
-\index{*"|"] symbol}
-\end{center}
-\caption{Meta-logic syntax}\label{fig:pure_gram}
-\end{figure}
-
-
-\section{The Pure syntax} \label{sec:basic_syntax}
-\index{syntax!Pure|(}
-
-At the root of all object-logics lies the theory \thydx{Pure}.  It
-contains, among many other things, the Pure syntax.  An informal account of
-this basic syntax (types, terms and formulae) appears in
-\iflabelundefined{sec:forward}{{\em Introduction to Isabelle}}%
-{\S\ref{sec:forward}}.  A more precise description using a priority grammar
-appears in Fig.\ts\ref{fig:pure_gram}.  It defines the following
-nonterminals:
-\begin{ttdescription}
-  \item[\ndxbold{any}] denotes any term.
-
-  \item[\ndxbold{prop}] denotes terms of type {\tt prop}.  These are formulae
-    of the meta-logic.  Note that user constants of result type {\tt prop}
-    (i.e.\ $c :: \ldots \To prop$) should always provide concrete syntax.
-    Otherwise atomic propositions with head $c$ may be printed incorrectly.
-
-  \item[\ndxbold{aprop}] denotes atomic propositions.
-
-%% FIXME huh!?
-%  These typically
-%  include the judgement forms of the object-logic; its definition
-%  introduces a meta-level predicate for each judgement form.
-
-  \item[\ndxbold{logic}] denotes terms whose type belongs to class
-    \cldx{logic}, excluding type \tydx{prop}.
-
-  \item[\ndxbold{idts}] denotes a list of identifiers, possibly constrained
-    by types.
-    
-  \item[\ndxbold{pttrn}, \ndxbold{pttrns}] denote patterns for
-    abstraction, cases etc.  Initially the same as $idt$ and $idts$,
-    these are intended to be augmented by user extensions.
-
-  \item[\ndxbold{type}] denotes types of the meta-logic.
-
-  \item[\ndxbold{sort}] denotes meta-level sorts.
-\end{ttdescription}
-
-\begin{warn}
-  In {\tt idts}, note that \verb|x::nat y| is parsed as \verb|x::(nat y)|,
-  treating {\tt y} like a type constructor applied to {\tt nat}.  The
-  likely result is an error message.  To avoid this interpretation, use
-  parentheses and write \verb|(x::nat) y|.
-  \index{type constraints}\index{*:: symbol}
-
-  Similarly, \verb|x::nat y::nat| is parsed as \verb|x::(nat y::nat)| and
-  yields an error.  The correct form is \verb|(x::nat) (y::nat)|.
-\end{warn}
-
-\begin{warn}
-  Type constraints bind very weakly.  For example, \verb!x<y::nat! is normally
-  parsed as \verb!(x<y)::nat!, unless \verb$<$ has priority of 3 or less, in
-  which case the string is likely to be ambiguous.  The correct form is
-  \verb!x<(y::nat)!.
-\end{warn}
-
-\subsection{Logical types and default syntax}\label{logical-types}
-\index{lambda calc@$\lambda$-calculus}
-
-Isabelle's representation of mathematical languages is based on the
-simply typed $\lambda$-calculus.  All logical types, namely those of
-class \cldx{logic}, are automatically equipped with a basic syntax of
-types, identifiers, variables, parentheses, $\lambda$-abstraction and
-application.
-\begin{warn}
-  Isabelle combines the syntaxes for all types of class \cldx{logic} by
-  mapping all those types to the single nonterminal $logic$.  Thus all
-  productions of $logic$, in particular $id$, $var$ etc, become available.
-\end{warn}
-
-
-\subsection{Lexical matters}
-The parser does not process input strings directly.  It operates on token
-lists provided by Isabelle's \bfindex{lexer}.  There are two kinds of
-tokens: \bfindex{delimiters} and \bfindex{name tokens}.
-
-\index{reserved words}
-Delimiters can be regarded as reserved words of the syntax.  You can
-add new ones when extending theories.  In Fig.\ts\ref{fig:pure_gram} they
-appear in typewriter font, for example {\tt ==}, {\tt =?=} and
-{\tt PROP}\@.
-
-Name tokens have a predefined syntax.  The lexer distinguishes six disjoint
-classes of names: \rmindex{identifiers}, \rmindex{unknowns}, type
-identifiers\index{type identifiers}, type unknowns\index{type unknowns},
-\rmindex{numerals}, \rmindex{strings}.  They are denoted by \ndxbold{id},
-\ndxbold{var}, \ndxbold{tid}, \ndxbold{tvar}, \ndxbold{num}, \ndxbold{xnum},
-\ndxbold{xstr}, respectively.  Typical examples are {\tt x}, {\tt ?x7}, {\tt
-  'a}, {\tt ?'a3}, {\tt \#42}, {\tt ''foo bar''}.  Here is the precise syntax:
-\begin{eqnarray*}
-id        & =   & letter\,quasiletter^* \\
-longid    & =   & id (\mbox{\tt .}id)^+ \\
-var       & =   & \mbox{\tt ?}id ~~|~~ \mbox{\tt ?}id\mbox{\tt .}nat \\
-tid       & =   & \mbox{\tt '}id \\
-tvar      & =   & \mbox{\tt ?}tid ~~|~~
-                  \mbox{\tt ?}tid\mbox{\tt .}nat \\
-num       & =   & nat ~~|~~ \mbox{\tt-}nat ~~|~~ \verb,0x,\,hex^+ ~~|~~ \verb,0b,\,bin^+ \\
-xnum      & =   & \mbox{\tt \#}num \\
-xstr      & =   & \mbox{\tt ''~\dots~\tt ''} \\[1ex]
-letter & = & latin ~|~ \verb,\<,latin\verb,>, ~|~ \verb,\<,latin\,latin\verb,>, ~|~ greek ~| \\
-      &   & \verb,\<^isub>, ~|~ \verb,\<^isup>, \\
-quasiletter & = & letter ~|~ digit ~|~ \verb,_, ~|~ \verb,', \\
-latin & = & \verb,a, ~|~ \dots ~|~ \verb,z, ~|~ \verb,A, ~|~ \dots ~|~ \verb,Z, \\
-digit & = & \verb,0, ~|~ \dots ~|~ \verb,9, \\
-nat & = & digit^+ \\
-bin & = & \verb,0, ~|~ \verb,1, \\
-hex & = & digit  ~|~  \verb,a, ~|~ \dots ~|~ \verb,f, ~|~ \verb,A, ~|~ \dots ~|~ \verb,F, \\
-greek & = & \verb,\<alpha>, ~|~ \verb,\<beta>, ~|~ \verb,\<gamma>, ~|~ \verb,\<delta>, ~| \\
-      &   & \verb,\<epsilon>, ~|~ \verb,\<zeta>, ~|~ \verb,\<eta>, ~|~ \verb,\<theta>, ~| \\
-      &   & \verb,\<iota>, ~|~ \verb,\<kappa>, ~|~ \verb,\<mu>, ~|~ \verb,\<nu>, ~| \\
-      &   & \verb,\<xi>, ~|~ \verb,\<pi>, ~|~ \verb,\<rho>, ~|~ \verb,\<sigma>, ~| \\
-      &   & \verb,\<tau>, ~|~ \verb,\<upsilon>, ~|~ \verb,\<phi>, ~|~ \verb,\<psi>, ~| \\
-      &   & \verb,\<omega>, ~|~ \verb,\<Gamma>, ~|~ \verb,\<Delta>, ~|~ \verb,\<Theta>, ~| \\
-      &   & \verb,\<Lambda>, ~|~ \verb,\<Xi>, ~|~ \verb,\<Pi>, ~|~ \verb,\<Sigma>, ~| \\
-      &   & \verb,\<Upsilon>, ~|~ \verb,\<Phi>, ~|~ \verb,\<Psi>, ~|~ \verb,\<Omega>, \\
-\end{eqnarray*}
-The lexer repeatedly takes the longest prefix of the input string that
-forms a valid token.  A maximal prefix that is both a delimiter and a
-name is treated as a delimiter.  Spaces, tabs, newlines and formfeeds
-are separators; they never occur within tokens, except those of class
-$xstr$.
-
-\medskip
-Delimiters need not be separated by white space.  For example, if {\tt -}
-is a delimiter but {\tt --} is not, then the string {\tt --} is treated as
-two consecutive occurrences of the token~{\tt -}.  In contrast, \ML\
-treats {\tt --} as a single symbolic name.  The consequence of Isabelle's
-more liberal scheme is that the same string may be parsed in different ways
-after extending the syntax: after adding {\tt --} as a delimiter, the input
-{\tt --} is treated as a single token.
-
-A \ndxbold{var} or \ndxbold{tvar} describes an unknown, which is internally
-a pair of base name and index (\ML\ type \mltydx{indexname}).  These
-components are either separated by a dot as in {\tt ?x.1} or {\tt ?x7.3} or
-run together as in {\tt ?x1}.  The latter form is possible if the base name
-does not end with digits.  If the index is 0, it may be dropped altogether:
-{\tt ?x} abbreviates both {\tt ?x0} and {\tt ?x.0}.
-
-Tokens of class $num$, $xnum$ or $xstr$ are not used by the meta-logic.
-Object-logics may provide numerals and string constants by adding appropriate
-productions and translation functions.
-
-\medskip
-Although name tokens are returned from the lexer rather than the parser, it
-is more logical to regard them as nonterminals.  Delimiters, however, are
-terminals; they are just syntactic sugar and contribute nothing to the
-abstract syntax tree.
-
-
-\subsection{*Inspecting the syntax} \label{pg:print_syn}
-\begin{ttbox}
-syn_of              : theory -> Syntax.syntax
-print_syntax        : theory -> unit
-Syntax.print_syntax : Syntax.syntax -> unit
-Syntax.print_gram   : Syntax.syntax -> unit
-Syntax.print_trans  : Syntax.syntax -> unit
-\end{ttbox}
-The abstract type \mltydx{Syntax.syntax} allows manipulation of syntaxes
-in \ML.  You can display values of this type by calling the following
-functions:
-\begin{ttdescription}
-\item[\ttindexbold{syn_of} {\it thy}] returns the syntax of the Isabelle
-  theory~{\it thy} as an \ML\ value.
-
-\item[\ttindexbold{print_syntax} $thy$] uses {\tt Syntax.print_syntax}
- to display the syntax part of theory $thy$.
-
-\item[\ttindexbold{Syntax.print_syntax} {\it syn}] shows virtually all
-  information contained in the syntax {\it syn}.  The displayed output can
-  be large.  The following two functions are more selective.
-
-\item[\ttindexbold{Syntax.print_gram} {\it syn}] shows the grammar part
-  of~{\it syn}, namely the lexicon, logical types and productions.  These are
-  discussed below.
-
-\item[\ttindexbold{Syntax.print_trans} {\it syn}] shows the translation
-  part of~{\it syn}, namely the constants, parse/print macros and
-  parse/print translations.
-\end{ttdescription}
-
-The output of the above print functions is divided into labelled sections.
-The grammar is represented by {\tt lexicon}, {\tt logtypes} and {\tt prods}.
-The rest refers to syntactic translations and macro expansion.  Here is an
-explanation of the various sections.
-\begin{description}
-  \item[{\tt lexicon}] lists the delimiters used for lexical
-    analysis.\index{delimiters}
-
-  \item[{\tt logtypes}] lists the types that are regarded the same as {\tt
-    logic} syntactically.  Thus types of object-logics (e.g.\ {\tt nat}, say)
-    will be automatically equipped with the standard syntax of
-    $\lambda$-calculus.
-
-  \item[{\tt prods}] lists the \rmindex{productions} of the priority grammar.
-    The nonterminal $A^{(n)}$ is rendered in {\sc ascii} as {\tt $A$[$n$]}.
-    Each delimiter is quoted.  Some productions are shown with {\tt =>} and
-    an attached string.  These strings later become the heads of parse
-    trees; they also play a vital role when terms are printed (see
-    \S\ref{sec:asts}).
-
-    Productions with no strings attached are called {\bf copy
-      productions}\indexbold{productions!copy}.  Their right-hand side must
-    have exactly one nonterminal symbol (or name token).  The parser does
-    not create a new parse tree node for copy productions, but simply
-    returns the parse tree of the right-hand symbol.
-
-    If the right-hand side consists of a single nonterminal with no
-    delimiters, then the copy production is called a {\bf chain
-      production}.  Chain productions act as abbreviations:
-    conceptually, they are removed from the grammar by adding new
-    productions.  Priority information attached to chain productions is
-    ignored; only the dummy value $-1$ is displayed.
-    
-  \item[\ttindex{print_modes}] lists the alternative print modes
-    provided by this syntax (see \S\ref{sec:prmodes}).
-
-  \item[{\tt consts}, {\tt parse_rules}, {\tt print_rules}]
-    relate to macros (see \S\ref{sec:macros}).
-
-  \item[{\tt parse_ast_translation}, {\tt print_ast_translation}]
-    list sets of constants that invoke translation functions for abstract
-    syntax trees.  Section \S\ref{sec:asts} below discusses this obscure
-    matter.\index{constants!for translations}
-
-  \item[{\tt parse_translation}, {\tt print_translation}] list the sets
-    of constants that invoke translation functions for terms (see
-    \S\ref{sec:tr_funs}).
-\end{description}
-\index{syntax!Pure|)}
-
 
 \section{Mixfix declarations} \label{sec:mixfix}
 \index{mixfix declarations|(}
@@ -515,49 +144,6 @@
   syntax}.  Try this as an exercise and study the changes in the
 grammar.
 
-\subsection{The mixfix template}
-Let us now take a closer look at the string $template$ appearing in mixfix
-annotations.  This string specifies a list of parsing and printing
-directives: delimiters\index{delimiters}, arguments, spaces, blocks of
-indentation and line breaks.  These are encoded by the following character
-sequences:
-\index{pretty printing|(}
-\begin{description}
-\item[~$d$~] is a delimiter, namely a non-empty sequence of characters
-  other than the special characters {\tt _}, {\tt(}, {\tt)} and~{\tt/}.
-  Even these characters may appear if escaped; this means preceding it with
-  a~{\tt '} (single quote).  Thus you have to write {\tt ''} if you really
-  want a single quote.  Furthermore, a~{\tt '} followed by a space separates
-  delimiters without extra white space being added for printing.
-
-\item[~{\tt_}~] is an argument position, which stands for a nonterminal symbol
-  or name token.
-
-\item[~$s$~] is a non-empty sequence of spaces for printing.  This and the
-  following specifications do not affect parsing at all.
-
-\item[~{\tt(}$n$~] opens a pretty printing block.  The optional number $n$
-  specifies how much indentation to add when a line break occurs within the
-  block.  If {\tt(} is not followed by digits, the indentation defaults
-  to~0.
-
-\item[~{\tt)}~] closes a pretty printing block.
-
-\item[~{\tt//}~] forces a line break.
-
-\item[~{\tt/}$s$~] allows a line break.  Here $s$ stands for the string of
-  spaces (zero or more) right after the {\tt /} character.  These spaces
-  are printed if the break is not taken.
-\end{description}
-For example, the template {\tt"(_ +/ _)"} specifies an infix operator.
-There are two argument positions; the delimiter~{\tt+} is preceded by a
-space and followed by a space or line break; the entire phrase is a pretty
-printing block.  Other examples appear in Fig.\ts\ref{fig:set_trans} below.
-Isabelle's pretty printer resembles the one described in
-Paulson~\cite{paulson-ml2}.
-
-\index{pretty printing|)}
-
 
 \subsection{Infixes}
 \indexbold{infixes}
@@ -723,141 +309,6 @@
 ambiguity should be eliminated by changing the grammar or the rule.
 
 
-\section{Example: some minimal logics} \label{sec:min_logics}
-\index{examples!of logic definitions}
-
-This section presents some examples that have a simple syntax.  They
-demonstrate how to define new object-logics from scratch.
-
-First we must define how an object-logic syntax is embedded into the
-meta-logic.  Since all theorems must conform to the syntax for~\ndx{prop}
-(see Fig.\ts\ref{fig:pure_gram}), that syntax has to be extended with the
-object-level syntax.  Assume that the syntax of your object-logic defines a
-meta-type~\tydx{o} of formulae which refers to the nonterminal {\tt logic}.
-These formulae can now appear in axioms and theorems wherever \ndx{prop} does
-if you add the production
-\[ prop ~=~ logic. \]
-This is not supposed to be a copy production but an implicit coercion from
-formulae to propositions:
-\begin{ttbox}
-Base = Pure +
-types
-  o
-arities
-  o :: logic
-consts
-  Trueprop :: o => prop   ("_" 5)
-end
-\end{ttbox}
-The constant \cdx{Trueprop} (the name is arbitrary) acts as an invisible
-coercion function.  Assuming this definition resides in a file {\tt Base.thy},
-you have to load it with the command {\tt use_thy "Base"}.
-
-One of the simplest nontrivial logics is {\bf minimal logic} of
-implication.  Its definition in Isabelle needs no advanced features but
-illustrates the overall mechanism nicely:
-\begin{ttbox}
-Hilbert = Base +
-consts
-  "-->" :: [o, o] => o   (infixr 10)
-rules
-  K     "P --> Q --> P"
-  S     "(P --> Q --> R) --> (P --> Q) --> P --> R"
-  MP    "[| P --> Q; P |] ==> Q"
-end
-\end{ttbox}
-After loading this definition from the file {\tt Hilbert.thy}, you can
-start to prove theorems in the logic:
-\begin{ttbox}
-Goal "P --> P";
-{\out Level 0}
-{\out P --> P}
-{\out  1.  P --> P}
-\ttbreak
-by (resolve_tac [Hilbert.MP] 1);
-{\out Level 1}
-{\out P --> P}
-{\out  1.  ?P --> P --> P}
-{\out  2.  ?P}
-\ttbreak
-by (resolve_tac [Hilbert.MP] 1);
-{\out Level 2}
-{\out P --> P}
-{\out  1.  ?P1 --> ?P --> P --> P}
-{\out  2.  ?P1}
-{\out  3.  ?P}
-\ttbreak
-by (resolve_tac [Hilbert.S] 1);
-{\out Level 3}
-{\out P --> P}
-{\out  1.  P --> ?Q2 --> P}
-{\out  2.  P --> ?Q2}
-\ttbreak
-by (resolve_tac [Hilbert.K] 1);
-{\out Level 4}
-{\out P --> P}
-{\out  1.  P --> ?Q2}
-\ttbreak
-by (resolve_tac [Hilbert.K] 1);
-{\out Level 5}
-{\out P --> P}
-{\out No subgoals!}
-\end{ttbox}
-As we can see, this Hilbert-style formulation of minimal logic is easy to
-define but difficult to use.  The following natural deduction formulation is
-better:
-\begin{ttbox}
-MinI = Base +
-consts
-  "-->" :: [o, o] => o   (infixr 10)
-rules
-  impI  "(P ==> Q) ==> P --> Q"
-  impE  "[| P --> Q; P |] ==> Q"
-end
-\end{ttbox}
-Note, however, that although the two systems are equivalent, this fact
-cannot be proved within Isabelle.  Axioms {\tt S} and {\tt K} can be
-derived in {\tt MinI} (exercise!), but {\tt impI} cannot be derived in {\tt
-  Hilbert}.  The reason is that {\tt impI} is only an {\bf admissible} rule
-in {\tt Hilbert}, something that can only be shown by induction over all
-possible proofs in {\tt Hilbert}.
-
-We may easily extend minimal logic with falsity:
-\begin{ttbox}
-MinIF = MinI +
-consts
-  False :: o
-rules
-  FalseE "False ==> P"
-end
-\end{ttbox}
-On the other hand, we may wish to introduce conjunction only:
-\begin{ttbox}
-MinC = Base +
-consts
-  "&" :: [o, o] => o   (infixr 30)
-\ttbreak
-rules
-  conjI  "[| P; Q |] ==> P & Q"
-  conjE1 "P & Q ==> P"
-  conjE2 "P & Q ==> Q"
-end
-\end{ttbox}
-And if we want to have all three connectives together, we create and load a
-theory file consisting of a single line:
-\begin{ttbox}
-MinIFC = MinIF + MinC
-\end{ttbox}
-Now we can prove mixed theorems like
-\begin{ttbox}
-Goal "P & False --> Q";
-by (resolve_tac [MinI.impI] 1);
-by (dresolve_tac [MinC.conjE2] 1);
-by (eresolve_tac [MinIF.FalseE] 1);
-\end{ttbox}
-Try this as an exercise!
-
-
 %%% Local Variables: 
 %%% mode: latex
 %%% TeX-master: "ref"
--- a/doc-src/Ref/introduction.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/introduction.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,23 +1,5 @@
-
-%% $Id$
 
 \chapter{Basic Use of Isabelle}\index{sessions|(} 
-The Reference Manual is a comprehensive description of Isabelle
-proper, including all \ML{} commands, functions and packages.  It
-really is intended for reference, perhaps for browsing, but not for
-reading through.  It is not a tutorial, but assumes familiarity with
-the basic logical concepts of Isabelle.
-
-When you are looking for a way of performing some task, scan the Table of
-Contents for a relevant heading.  Functions are organized by their purpose,
-by their operands (subgoals, tactics, theorems), and by their usefulness.
-In each section, basic functions appear first, then advanced functions, and
-finally esoteric functions.  Use the Index when you are looking for the
-definition of a particular Isabelle function.
-
-A few examples are presented.  Many example files are distributed with
-Isabelle, however; please experiment interactively.
-
 
 \section{Basic interaction with Isabelle}
 \index{starting up|bold}\nobreak
@@ -217,109 +199,6 @@
 value is returned.
 
 
-\section{Printing of terms and theorems}\label{sec:printing-control}
-\index{printing control|(}
-Isabelle's pretty printer is controlled by a number of parameters.
-
-\subsection{Printing limits}
-\begin{ttbox} 
-Pretty.setdepth  : int -> unit
-Pretty.setmargin : int -> unit
-print_depth      : int -> unit
-\end{ttbox}
-These set limits for terminal output.  See also {\tt goals_limit},
-which limits the number of subgoals printed
-(\S\ref{sec:goals-printing}).
-
-\begin{ttdescription}
-\item[\ttindexbold{Pretty.setdepth} \(d\);] tells Isabelle's pretty printer to
-  limit the printing depth to~$d$.  This affects the display of theorems and
-  terms.  The default value is~0, which permits printing to an arbitrary
-  depth.  Useful values for $d$ are~10 and~20.
-
-\item[\ttindexbold{Pretty.setmargin} \(m\);]  
-  tells Isabelle's pretty printer to assume a right margin (page width)
-  of~$m$.  The initial margin is~76.
-
-\item[\ttindexbold{print_depth} \(n\);]  
-  limits the printing depth of complex \ML{} values, such as theorems and
-  terms.  This command affects the \ML{} top level and its effect is
-  compiler-dependent.  Typically $n$ should be less than~10.
-\end{ttdescription}
-
-
-\subsection{Printing of hypotheses, brackets, types etc.}
-\index{meta-assumptions!printing of}
-\index{types!printing of}\index{sorts!printing of}
-\begin{ttbox} 
-show_hyps     : bool ref \hfill{\bf initially false}
-show_tags     : bool ref \hfill{\bf initially false}
-show_brackets : bool ref \hfill{\bf initially false}
-show_types    : bool ref \hfill{\bf initially false}
-show_sorts    : bool ref \hfill{\bf initially false}
-show_consts   : bool ref \hfill{\bf initially false}
-long_names    : bool ref \hfill{\bf initially false}
-\end{ttbox}
-These flags allow you to control how much information is displayed for
-types, terms and theorems.  The hypotheses of theorems \emph{are}
-normally shown.  Superfluous parentheses of types and terms are not.
-Types and sorts of variables are normally hidden.
-
-Note that displaying types and sorts may explain why a polymorphic
-inference rule fails to resolve with some goal, or why a rewrite rule
-does not apply as expected.
-
-\begin{ttdescription}
-
-\item[reset \ttindexbold{show_hyps};] makes Isabelle show each
-  meta-level hypothesis as a dot.
-  
-\item[set \ttindexbold{show_tags};] makes Isabelle show tags of theorems
-  (which are basically just comments that may be attached by some tools).
-  
-\item[set \ttindexbold{show_brackets};] makes Isabelle show full
-  bracketing.  In particular, this reveals the grouping of infix
-  operators.
-  
-\item[set \ttindexbold{show_types};] makes Isabelle show types when
-  printing a term or theorem.
-  
-\item[set \ttindexbold{show_sorts};] makes Isabelle show both types
-  and the sorts of type variables, independently of the value of
-  \texttt{show_types}.
-  
-\item[set \ttindexbold{show_consts};] makes Isabelle show types of constants
-  when printing proof states.  Note that the output can be enormous as
-  polymorphic constants often occur at several different type instances.
-
-\item[set \ttindexbold{long_names};] forces names of all objects
-  (types, constants, theorems, etc.) to be printed in their fully
-  qualified internal form.
-
-\end{ttdescription}
-
-
-\subsection{Eta-contraction before printing}
-\begin{ttbox} 
-eta_contract: bool ref
-\end{ttbox}
-The {\bf $\eta$-contraction law} asserts $(\lambda x.f(x))\equiv f$,
-provided $x$ is not free in ~$f$.  It asserts {\bf extensionality} of
-functions: $f\equiv g$ if $f(x)\equiv g(x)$ for all~$x$.  Higher-order
-unification frequently puts terms into a fully $\eta$-expanded form.  For
-example, if $F$ has type $(\tau\To\tau)\To\tau$ then its expanded form is
-$\lambda h.F(\lambda x.h(x))$.  By default, the user sees this expanded
-form.
-
-\begin{ttdescription}
-\item[set \ttindexbold{eta_contract};]
-makes Isabelle perform $\eta$-contractions before printing, so that
-$\lambda h.F(\lambda x.h(x))$ appears simply as~$F$.  The
-distinction between a term and its $\eta$-expanded form occasionally
-matters.
-\end{ttdescription}
-\index{printing control|)}
-
 \section{Diagnostic messages}
 \index{error messages}
 \index{warnings}
@@ -351,40 +230,16 @@
 \ttindex{warning} resume normal program execution.
 
 
-\section{Displaying exceptions as error messages}
-\index{exceptions!printing of}
+\section{Timing}
+\index{timing statistics}\index{proofs!timing}
 \begin{ttbox} 
-print_exn: exn -> 'a
+timing: bool ref \hfill{\bf initially false}
 \end{ttbox}
-Certain Isabelle primitives, such as the forward proof functions {\tt RS}
-and {\tt RSN}, are called both interactively and from programs.  They
-indicate errors not by printing messages, but by raising exceptions.  For
-interactive use, \ML's reporting of an uncaught exception may be
-uninformative.  The Poly/ML function {\tt exception_trace} can generate a
-backtrace.\index{Poly/{\ML} compiler}
 
 \begin{ttdescription}
-\item[\ttindexbold{print_exn} $e$] 
-displays the exception~$e$ in a readable manner, and then re-raises~$e$.
-Typical usage is~\hbox{\tt $EXP$ handle e => print_exn e;}, where
-$EXP$ is an expression that may raise an exception.
-
-{\tt print_exn} can display the following common exceptions, which concern
-types, terms, theorems and theories, respectively.  Each carries a message
-and related information.
-\begin{ttbox} 
-exception TYPE   of string * typ list * term list
-exception TERM   of string * term list
-exception THM    of string * int * thm list
-exception THEORY of string * theory list
-\end{ttbox}
+\item[set \ttindexbold{timing};] enables global timing in Isabelle.
+  This information is compiler-dependent.
 \end{ttdescription}
-\begin{warn}
-  {\tt print_exn} prints terms by calling \ttindex{prin}, which obtains
-  pretty printing information from the proof state last stored in the
-  subgoal module.  The appearance of the output thus depends upon the
-  theory used in the last interactive proof.
-\end{warn}
 
 \index{sessions|)}
 
--- a/doc-src/Ref/ref.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/ref.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,13 +1,12 @@
 \documentclass[12pt,a4paper]{report}
-\usepackage{graphicx,../iman,../extra,../ttbox,../proof,../rail,../pdfsetup}
+\usepackage{graphicx,../iman,../extra,../ttbox,../proof,../pdfsetup}
 
-%% $Id$
 %%\includeonly{}
 %%% to index ids: \[\\tt \([a-zA-Z0-9][a-zA-Z0-9_'.]*\)    [\\ttindexbold{\1}
 %%% to delete old ones:  \\indexbold{\*[^}]*}
 %% run    sedindex ref    to prepare index file
 %%% needs chapter on Provers/typedsimp.ML?
-\title{\includegraphics[scale=0.5]{isabelle} \\[4ex] The Isabelle Reference Manual}
+\title{\includegraphics[scale=0.5]{isabelle} \\[4ex] Old Isabelle Reference Manual}
 
 \author{{\em Lawrence C. Paulson}\\
         Computer Laboratory \\ University of Cambridge \\
@@ -22,10 +21,6 @@
 \sloppy
 \binperiod     %%%treat . like a binary operator
 
-\railalias{lbrace}{\ttlbrace}
-\railalias{rbrace}{\ttrbrace}
-\railterm{lbrace,rbrace}
-
 \begin{document}
 \underscoreoff
 
@@ -34,17 +29,10 @@
 \index{meta-rules|see{meta-rules}}
 
 \maketitle 
-\emph{Note}: this document is part of the earlier Isabelle documentation, 
-which is somewhat superseded by the Isabelle/HOL
-\emph{Tutorial}~\cite{isa-tutorial}. Much of it is concerned with 
-the old-style theory syntax and the primitives for conducting proofs 
-using the ML top level. This style of interaction is largely obsolete:
-most Isabelle proofs are now written using the Isar 
-language and the Proof General interface. However, this is the only
-comprehensive Isabelle reference manual.  
-
-See also the \emph{Introduction to Isabelle}, which has tutorial examples
-on conducting proofs using the ML top-level.
+\emph{Note}: this document is part of the earlier Isabelle
+documentation and is mostly outdated.  Fully obsolete parts of the
+original text have already been removed.  The remaining material
+covers some aspects that did not make it into the newer manuals yet.
 
 \subsubsection*{Acknowledgements} 
 Tobias Nipkow, of T. U. Munich, wrote most of
@@ -62,7 +50,6 @@
 \pagenumbering{roman} \tableofcontents \clearfirst
 
 \include{introduction}
-\include{goals}
 \include{tactic}
 \include{tctical}
 \include{thm}
--- a/doc-src/Ref/simplifier.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/simplifier.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+
 \chapter{Simplification}
 \label{chap:simplification}
 \index{simplification|(}
@@ -810,173 +810,6 @@
 \end{warn}
 
 
-\section{Examples of using the Simplifier}
-\index{examples!of simplification} Assume we are working within {\tt
-  FOL} (see the file \texttt{FOL/ex/Nat}) and that
-\begin{ttdescription}
-\item[Nat.thy] 
-  is a theory including the constants $0$, $Suc$ and $+$,
-\item[add_0]
-  is the rewrite rule $0+\Var{n} = \Var{n}$,
-\item[add_Suc]
-  is the rewrite rule $Suc(\Var{m})+\Var{n} = Suc(\Var{m}+\Var{n})$,
-\item[induct]
-  is the induction rule $\List{\Var{P}(0);\; \Forall x. \Var{P}(x)\Imp
-    \Var{P}(Suc(x))} \Imp \Var{P}(\Var{n})$.
-\end{ttdescription}
-We augment the implicit simpset inherited from \texttt{Nat} with the
-basic rewrite rules for addition of natural numbers:
-\begin{ttbox}
-Addsimps [add_0, add_Suc];
-\end{ttbox}
-
-\subsection{A trivial example}
-Proofs by induction typically involve simplification.  Here is a proof
-that~0 is a right identity:
-\begin{ttbox}
-Goal "m+0 = m";
-{\out Level 0}
-{\out m + 0 = m}
-{\out  1. m + 0 = m}
-\end{ttbox}
-The first step is to perform induction on the variable~$m$.  This returns a
-base case and inductive step as two subgoals:
-\begin{ttbox}
-by (res_inst_tac [("n","m")] induct 1);
-{\out Level 1}
-{\out m + 0 = m}
-{\out  1. 0 + 0 = 0}
-{\out  2. !!x. x + 0 = x ==> Suc(x) + 0 = Suc(x)}
-\end{ttbox}
-Simplification solves the first subgoal trivially:
-\begin{ttbox}
-by (Simp_tac 1);
-{\out Level 2}
-{\out m + 0 = m}
-{\out  1. !!x. x + 0 = x ==> Suc(x) + 0 = Suc(x)}
-\end{ttbox}
-The remaining subgoal requires \ttindex{Asm_simp_tac} in order to use the
-induction hypothesis as a rewrite rule:
-\begin{ttbox}
-by (Asm_simp_tac 1);
-{\out Level 3}
-{\out m + 0 = m}
-{\out No subgoals!}
-\end{ttbox}
-
-\subsection{An example of tracing}
-\index{tracing!of simplification|(}\index{*trace_simp}
-
-Let us prove a similar result involving more complex terms.  We prove
-that addition is commutative.
-\begin{ttbox}
-Goal "m+Suc(n) = Suc(m+n)";
-{\out Level 0}
-{\out m + Suc(n) = Suc(m + n)}
-{\out  1. m + Suc(n) = Suc(m + n)}
-\end{ttbox}
-Performing induction on~$m$ yields two subgoals:
-\begin{ttbox}
-by (res_inst_tac [("n","m")] induct 1);
-{\out Level 1}
-{\out m + Suc(n) = Suc(m + n)}
-{\out  1. 0 + Suc(n) = Suc(0 + n)}
-{\out  2. !!x. x + Suc(n) = Suc(x + n) ==>}
-{\out          Suc(x) + Suc(n) = Suc(Suc(x) + n)}
-\end{ttbox}
-Simplification solves the first subgoal, this time rewriting two
-occurrences of~0:
-\begin{ttbox}
-by (Simp_tac 1);
-{\out Level 2}
-{\out m + Suc(n) = Suc(m + n)}
-{\out  1. !!x. x + Suc(n) = Suc(x + n) ==>}
-{\out          Suc(x) + Suc(n) = Suc(Suc(x) + n)}
-\end{ttbox}
-Switching tracing on illustrates how the simplifier solves the remaining
-subgoal: 
-\begin{ttbox}
-set trace_simp;
-by (Asm_simp_tac 1);
-\ttbreak
-{\out Adding rewrite rule:}
-{\out .x + Suc n == Suc (.x + n)}
-\ttbreak
-{\out Applying instance of rewrite rule:}
-{\out ?m + Suc ?n == Suc (?m + ?n)}
-{\out Rewriting:}
-{\out Suc .x + Suc n == Suc (Suc .x + n)}
-\ttbreak
-{\out Applying instance of rewrite rule:}
-{\out Suc ?m + ?n == Suc (?m + ?n)}
-{\out Rewriting:}
-{\out Suc .x + n == Suc (.x + n)}
-\ttbreak
-{\out Applying instance of rewrite rule:}
-{\out Suc ?m + ?n == Suc (?m + ?n)}
-{\out Rewriting:}
-{\out Suc .x + n == Suc (.x + n)}
-\ttbreak
-{\out Applying instance of rewrite rule:}
-{\out ?x = ?x == True}
-{\out Rewriting:}
-{\out Suc (Suc (.x + n)) = Suc (Suc (.x + n)) == True}
-\ttbreak
-{\out Level 3}
-{\out m + Suc(n) = Suc(m + n)}
-{\out No subgoals!}
-\end{ttbox}
-Many variations are possible.  At Level~1 (in either example) we could have
-solved both subgoals at once using the tactical \ttindex{ALLGOALS}:
-\begin{ttbox}
-by (ALLGOALS Asm_simp_tac);
-{\out Level 2}
-{\out m + Suc(n) = Suc(m + n)}
-{\out No subgoals!}
-\end{ttbox}
-\index{tracing!of simplification|)}
-
-
-\subsection{Free variables and simplification}
-
-Here is a conjecture to be proved for an arbitrary function~$f$
-satisfying the law $f(Suc(\Var{n})) = Suc(f(\Var{n}))$:
-\begin{ttbox}
-val [prem] = Goal
-               "(!!n. f(Suc(n)) = Suc(f(n))) ==> f(i+j) = i+f(j)";
-{\out Level 0}
-{\out f(i + j) = i + f(j)}
-{\out  1. f(i + j) = i + f(j)}
-\ttbreak
-{\out val prem = "f(Suc(?n)) = Suc(f(?n))}
-{\out             [!!n. f(Suc(n)) = Suc(f(n))]" : thm}
-\end{ttbox}
-In the theorem~\texttt{prem}, note that $f$ is a free variable while
-$\Var{n}$ is a schematic variable.
-\begin{ttbox}
-by (res_inst_tac [("n","i")] induct 1);
-{\out Level 1}
-{\out f(i + j) = i + f(j)}
-{\out  1. f(0 + j) = 0 + f(j)}
-{\out  2. !!x. f(x + j) = x + f(j) ==> f(Suc(x) + j) = Suc(x) + f(j)}
-\end{ttbox}
-We simplify each subgoal in turn.  The first one is trivial:
-\begin{ttbox}
-by (Simp_tac 1);
-{\out Level 2}
-{\out f(i + j) = i + f(j)}
-{\out  1. !!x. f(x + j) = x + f(j) ==> f(Suc(x) + j) = Suc(x) + f(j)}
-\end{ttbox}
-The remaining subgoal requires rewriting by the premise, so we add it
-to the current simpset:
-\begin{ttbox}
-by (asm_simp_tac (simpset() addsimps [prem]) 1);
-{\out Level 3}
-{\out f(i + j) = i + f(j)}
-{\out No subgoals!}
-\end{ttbox}
-
-
 \section{Permutative rewrite rules}
 \index{rewrite rules!permutative|(}
 
--- a/doc-src/Ref/substitution.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/substitution.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+
 \chapter{Substitution Tactics} \label{substitution}
 \index{tactics!substitution|(}\index{equality|(}
 
--- a/doc-src/Ref/syntax.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/syntax.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+
 \chapter{Syntax Transformations} \label{chap:syntax}
 \newcommand\ttapp{\mathrel{\hbox{\tt\$}}}
 \newcommand\mtt[1]{\mbox{\tt #1}}
--- a/doc-src/Ref/tactic.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/tactic.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,235 +1,8 @@
-%% $Id$
+
 \chapter{Tactics} \label{tactics}
-\index{tactics|(} Tactics have type \mltydx{tactic}.  This is just an
-abbreviation for functions from theorems to theorem sequences, where
-the theorems represent states of a backward proof.  Tactics seldom
-need to be coded from scratch, as functions; instead they are
-expressed using basic tactics and tacticals.
-
-This chapter only presents the primitive tactics.  Substantial proofs
-require the power of automatic tools like simplification
-(Chapter~\ref{chap:simplification}) and classical tableau reasoning
-(Chapter~\ref{chap:classical}).
-
-\section{Resolution and assumption tactics}
-{\bf Resolution} is Isabelle's basic mechanism for refining a subgoal using
-a rule.  {\bf Elim-resolution} is particularly suited for elimination
-rules, while {\bf destruct-resolution} is particularly suited for
-destruction rules.  The {\tt r}, {\tt e}, {\tt d} naming convention is
-maintained for several different kinds of resolution tactics, as well as
-the shortcuts in the subgoal module.
-
-All the tactics in this section act on a subgoal designated by a positive
-integer~$i$.  They fail (by returning the empty sequence) if~$i$ is out of
-range.
-
-\subsection{Resolution tactics}
-\index{resolution!tactics}
-\index{tactics!resolution|bold}
-\begin{ttbox} 
-resolve_tac  : thm list -> int -> tactic
-eresolve_tac : thm list -> int -> tactic
-dresolve_tac : thm list -> int -> tactic
-forward_tac  : thm list -> int -> tactic 
-\end{ttbox}
-These perform resolution on a list of theorems, $thms$, representing a list
-of object-rules.  When generating next states, they take each of the rules
-in the order given.  Each rule may yield several next states, or none:
-higher-order resolution may yield multiple resolvents.
-\begin{ttdescription}
-\item[\ttindexbold{resolve_tac} {\it thms} {\it i}] 
-  refines the proof state using the rules, which should normally be
-  introduction rules.  It resolves a rule's conclusion with
-  subgoal~$i$ of the proof state.
-
-\item[\ttindexbold{eresolve_tac} {\it thms} {\it i}] 
-  \index{elim-resolution}
-  performs elim-resolution with the rules, which should normally be
-  elimination rules.  It resolves with a rule, proves its first premise by
-  assumption, and finally \emph{deletes} that assumption from any new
-  subgoals.  (To rotate a rule's premises,
-  see \texttt{rotate_prems} in~{\S}\ref{MiscellaneousForwardRules}.)
-
-\item[\ttindexbold{dresolve_tac} {\it thms} {\it i}] 
-  \index{forward proof}\index{destruct-resolution}
-  performs destruct-resolution with the rules, which normally should
-  be destruction rules.  This replaces an assumption by the result of
-  applying one of the rules.
-
-\item[\ttindexbold{forward_tac}]\index{forward proof}
-  is like {\tt dresolve_tac} except that the selected assumption is not
-  deleted.  It applies a rule to an assumption, adding the result as a new
-  assumption.
-\end{ttdescription}
-
-\subsection{Assumption tactics}
-\index{tactics!assumption|bold}\index{assumptions!tactics for}
-\begin{ttbox} 
-assume_tac    : int -> tactic
-eq_assume_tac : int -> tactic
-\end{ttbox} 
-\begin{ttdescription}
-\item[\ttindexbold{assume_tac} {\it i}] 
-attempts to solve subgoal~$i$ by assumption.
-
-\item[\ttindexbold{eq_assume_tac}] 
-is like {\tt assume_tac} but does not use unification.  It succeeds (with a
-\emph{unique} next state) if one of the assumptions is identical to the
-subgoal's conclusion.  Since it does not instantiate variables, it cannot
-make other subgoals unprovable.  It is intended to be called from proof
-strategies, not interactively.
-\end{ttdescription}
-
-\subsection{Matching tactics} \label{match_tac}
-\index{tactics!matching}
-\begin{ttbox} 
-match_tac  : thm list -> int -> tactic
-ematch_tac : thm list -> int -> tactic
-dmatch_tac : thm list -> int -> tactic
-\end{ttbox}
-These are just like the resolution tactics except that they never
-instantiate unknowns in the proof state.  Flexible subgoals are not updated
-willy-nilly, but are left alone.  Matching --- strictly speaking --- means
-treating the unknowns in the proof state as constants; these tactics merely
-discard unifiers that would update the proof state.
-\begin{ttdescription}
-\item[\ttindexbold{match_tac} {\it thms} {\it i}] 
-refines the proof state using the rules, matching a rule's
-conclusion with subgoal~$i$ of the proof state.
-
-\item[\ttindexbold{ematch_tac}] 
-is like {\tt match_tac}, but performs elim-resolution.
-
-\item[\ttindexbold{dmatch_tac}] 
-is like {\tt match_tac}, but performs destruct-resolution.
-\end{ttdescription}
-
-
-\subsection{Explicit instantiation} \label{res_inst_tac}
-\index{tactics!instantiation}\index{instantiation}
-\begin{ttbox} 
-res_inst_tac    : (string*string)list -> thm -> int -> tactic
-eres_inst_tac   : (string*string)list -> thm -> int -> tactic
-dres_inst_tac   : (string*string)list -> thm -> int -> tactic
-forw_inst_tac   : (string*string)list -> thm -> int -> tactic
-instantiate_tac : (string*string)list -> tactic
-\end{ttbox}
-The first four of these tactics are designed for applying rules by resolution
-such as substitution and induction, which cause difficulties for higher-order 
-unification.  The tactics accept explicit instantiations for unknowns 
-in the rule ---typically, in the rule's conclusion. The last one, 
-{\tt instantiate_tac}, may be used to instantiate unknowns in the proof state,
-independently of rule application. 
-
-Each instantiation is a pair {\tt($v$,$e$)}, 
-where $v$ is an unknown \emph{without} its leading question mark!
-\begin{itemize}
-\item If $v$ is the type unknown {\tt'a}, then
-the rule must contain a type unknown \verb$?'a$ of some
-sort~$s$, and $e$ should be a type of sort $s$.
-
-\item If $v$ is the unknown {\tt P}, then
-the rule must contain an unknown \verb$?P$ of some type~$\tau$,
-and $e$ should be a term of some type~$\sigma$ such that $\tau$ and
-$\sigma$ are unifiable.  If the unification of $\tau$ and $\sigma$
-instantiates any type unknowns in $\tau$, these instantiations
-are recorded for application to the rule.
-\end{itemize}
-Types are instantiated before terms are.  Because type instantiations are
-inferred from term instantiations, explicit type instantiations are seldom
-necessary --- if \verb$?t$ has type \verb$?'a$, then the instantiation list
-\texttt{[("'a","bool"), ("t","True")]} may be simplified to
-\texttt{[("t","True")]}.  Type unknowns in the proof state may cause
-failure because the tactics cannot instantiate them.
-
-The first four instantiation tactics act on a given subgoal.  Terms in the
-instantiations are type-checked in the context of that subgoal --- in
-particular, they may refer to that subgoal's parameters.  Any unknowns in
-the terms receive subscripts and are lifted over the parameters; thus, you
-may not refer to unknowns in the subgoal.
-
-\begin{ttdescription}
-\item[\ttindexbold{res_inst_tac} {\it insts} {\it thm} {\it i}]
-instantiates the rule {\it thm} with the instantiations {\it insts}, as
-described above, and then performs resolution on subgoal~$i$.  Resolution
-typically causes further instantiations; you need not give explicit
-instantiations for every unknown in the rule.
-
-\item[\ttindexbold{eres_inst_tac}] 
-is like {\tt res_inst_tac}, but performs elim-resolution.
-
-\item[\ttindexbold{dres_inst_tac}] 
-is like {\tt res_inst_tac}, but performs destruct-resolution.
-
-\item[\ttindexbold{forw_inst_tac}] 
-is like {\tt dres_inst_tac} except that the selected assumption is not
-deleted.  It applies the instantiated rule to an assumption, adding the
-result as a new assumption.
-
-\item[\ttindexbold{instantiate_tac} {\it insts}] 
-instantiates unknowns in the proof state. This affects the main goal as 
-well as all subgoals.
-\end{ttdescription}
-
+\index{tactics|(}
 
 \section{Other basic tactics}
-\subsection{Tactic shortcuts}
-\index{shortcuts!for tactics}
-\index{tactics!resolution}\index{tactics!assumption}
-\index{tactics!meta-rewriting}
-\begin{ttbox} 
-rtac     :      thm ->        int -> tactic
-etac     :      thm ->        int -> tactic
-dtac     :      thm ->        int -> tactic
-ftac     :      thm ->        int -> tactic
-atac     :                    int -> tactic
-eatac    :      thm -> int -> int -> tactic
-datac    :      thm -> int -> int -> tactic
-fatac    :      thm -> int -> int -> tactic
-ares_tac :      thm list   -> int -> tactic
-rewtac   :      thm ->               tactic
-\end{ttbox}
-These abbreviate common uses of tactics.
-\begin{ttdescription}
-\item[\ttindexbold{rtac} {\it thm} {\it i}] 
-abbreviates \hbox{\tt resolve_tac [{\it thm}] {\it i}}, doing resolution.
-
-\item[\ttindexbold{etac} {\it thm} {\it i}] 
-abbreviates \hbox{\tt eresolve_tac [{\it thm}] {\it i}}, doing elim-resolution.
-
-\item[\ttindexbold{dtac} {\it thm} {\it i}] 
-abbreviates \hbox{\tt dresolve_tac [{\it thm}] {\it i}}, doing
-destruct-resolution.
-
-\item[\ttindexbold{ftac} {\it thm} {\it i}] 
-abbreviates \hbox{\tt forward_tac [{\it thm}] {\it i}}, doing
-destruct-resolution without deleting the assumption.
-
-\item[\ttindexbold{atac} {\it i}] 
-abbreviates \hbox{\tt assume_tac {\it i}}, doing proof by assumption.
-
-\item[\ttindexbold{eatac} {\it thm} {\it j} {\it i}] 
-performs \hbox{\tt etac {\it thm}} and then {\it j} times \texttt{atac}, 
-solving additionally {\it j}~premises of the rule {\it thm} by assumption.
-
-\item[\ttindexbold{datac} {\it thm} {\it j} {\it i}] 
-performs \hbox{\tt dtac {\it thm}} and then {\it j} times \texttt{atac}, 
-solving additionally {\it j}~premises of the rule {\it thm} by assumption.
-
-\item[\ttindexbold{fatac} {\it thm} {\it j} {\it i}] 
-performs \hbox{\tt ftac {\it thm}} and then {\it j} times \texttt{atac}, 
-solving additionally {\it j}~premises of the rule {\it thm} by assumption.
-
-\item[\ttindexbold{ares_tac} {\it thms} {\it i}] 
-tries proof by assumption and resolution; it abbreviates
-\begin{ttbox}
-assume_tac {\it i} ORELSE resolve_tac {\it thms} {\it i}
-\end{ttbox}
-
-\item[\ttindexbold{rewtac} {\it def}] 
-abbreviates \hbox{\tt rewrite_goals_tac [{\it def}]}, unfolding a definition.
-\end{ttdescription}
-
 
 \subsection{Inserting premises and facts}\label{cut_facts_tac}
 \index{tactics!for inserting facts}\index{assumptions!inserting}
@@ -351,52 +124,6 @@
 
 \section{Obscure tactics}
 
-\subsection{Renaming parameters in a goal} \index{parameters!renaming}
-\begin{ttbox} 
-rename_tac        : string -> int -> tactic
-rename_last_tac   : string -> string list -> int -> tactic
-Logic.set_rename_prefix : string -> unit
-Logic.auto_rename       : bool ref      \hfill{\bf initially false}
-\end{ttbox}
-When creating a parameter, Isabelle chooses its name by matching variable
-names via the object-rule.  Given the rule $(\forall I)$ formalized as
-$\left(\Forall x. P(x)\right) \Imp \forall x.P(x)$, Isabelle will note that
-the $\Forall$-bound variable in the premise has the same name as the
-$\forall$-bound variable in the conclusion.  
-
-Sometimes there is insufficient information and Isabelle chooses an
-arbitrary name.  The renaming tactics let you override Isabelle's choice.
-Because renaming parameters has no logical effect on the proof state, the
-{\tt by} command prints the message {\tt Warning:\ same as previous
-level}.
-
-Alternatively, you can suppress the naming mechanism described above and
-have Isabelle generate uniform names for parameters.  These names have the
-form $p${\tt a}, $p${\tt b}, $p${\tt c},~\ldots, where $p$ is any desired
-prefix.  They are ugly but predictable.
-
-\begin{ttdescription}
-\item[\ttindexbold{rename_tac} {\it str} {\it i}] 
-interprets the string {\it str} as a series of blank-separated variable
-names, and uses them to rename the parameters of subgoal~$i$.  The names
-must be distinct.  If there are fewer names than parameters, then the
-tactic renames the innermost parameters and may modify the remaining ones
-to ensure that all the parameters are distinct.
-
-\item[\ttindexbold{rename_last_tac} {\it prefix} {\it suffixes} {\it i}] 
-generates a list of names by attaching each of the {\it suffixes\/} to the 
-{\it prefix}.  It is intended for coding structural induction tactics,
-where several of the new parameters should have related names.
-
-\item[\ttindexbold{Logic.set_rename_prefix} {\it prefix};] 
-sets the prefix for uniform renaming to~{\it prefix}.  The default prefix
-is {\tt"k"}.
-
-\item[set \ttindexbold{Logic.auto_rename};] 
-makes Isabelle generate uniform names for parameters. 
-\end{ttdescription}
-
-
 \subsection{Manipulating assumptions}
 \index{assumptions!rotating}
 \begin{ttbox} 
@@ -594,142 +321,6 @@
 is no longer than {\it limit}.
 \end{ttdescription}
 
-
-\section{Programming tools for proof strategies}
-Do not consider using the primitives discussed in this section unless you
-really need to code tactics from scratch.
-
-\subsection{Operations on tactics}
-\index{tactics!primitives for coding} A tactic maps theorems to sequences of
-theorems.  The type constructor for sequences (lazy lists) is called
-\mltydx{Seq.seq}.  To simplify the types of tactics and tacticals,
-Isabelle defines a type abbreviation:
-\begin{ttbox} 
-type tactic = thm -> thm Seq.seq
-\end{ttbox} 
-The following operations provide means for coding tactics in a clean style.
-\begin{ttbox} 
-PRIMITIVE :                  (thm -> thm) -> tactic  
-SUBGOAL   : ((term*int) -> tactic) -> int -> tactic
-\end{ttbox} 
-\begin{ttdescription}
-\item[\ttindexbold{PRIMITIVE} $f$] packages the meta-rule~$f$ as a tactic that
-  applies $f$ to the proof state and returns the result as a one-element
-  sequence.  If $f$ raises an exception, then the tactic's result is the empty
-  sequence.
-
-\item[\ttindexbold{SUBGOAL} $f$ $i$] 
-extracts subgoal~$i$ from the proof state as a term~$t$, and computes a
-tactic by calling~$f(t,i)$.  It applies the resulting tactic to the same
-state.  The tactic body is expressed using tactics and tacticals, but may
-peek at a particular subgoal:
-\begin{ttbox} 
-SUBGOAL (fn (t,i) => {\it tactic-valued expression})
-\end{ttbox} 
-\end{ttdescription}
-
-
-\subsection{Tracing}
-\index{tactics!tracing}
-\index{tracing!of tactics}
-\begin{ttbox} 
-pause_tac: tactic
-print_tac: string -> tactic
-\end{ttbox}
-These tactics print tracing information when they are applied to a proof
-state.  Their output may be difficult to interpret.  Note that certain of
-the searching tacticals, such as {\tt REPEAT}, have built-in tracing
-options.
-\begin{ttdescription}
-\item[\ttindexbold{pause_tac}] 
-prints {\footnotesize\tt** Press RETURN to continue:} and then reads a line
-from the terminal.  If this line is blank then it returns the proof state
-unchanged; otherwise it fails (which may terminate a repetition).
-
-\item[\ttindexbold{print_tac}~$msg$] 
-returns the proof state unchanged, with the side effect of printing it at
-the terminal.
-\end{ttdescription}
-
-
-\section{*Sequences}
-\index{sequences (lazy lists)|bold}
-The module {\tt Seq} declares a type of lazy lists.  It uses
-Isabelle's type \mltydx{option} to represent the possible presence
-(\ttindexbold{Some}) or absence (\ttindexbold{None}) of
-a value:
-\begin{ttbox}
-datatype 'a option = None  |  Some of 'a;
-\end{ttbox}
-The {\tt Seq} structure is supposed to be accessed via fully qualified
-names and should not be \texttt{open}.
-
-\subsection{Basic operations on sequences}
-\begin{ttbox} 
-Seq.empty   : 'a seq
-Seq.make    : (unit -> ('a * 'a seq) option) -> 'a seq
-Seq.single  : 'a -> 'a seq
-Seq.pull    : 'a seq -> ('a * 'a seq) option
-\end{ttbox}
-\begin{ttdescription}
-\item[Seq.empty] is the empty sequence.
-
-\item[\tt Seq.make (fn () => Some ($x$, $xq$))] constructs the
-  sequence with head~$x$ and tail~$xq$, neither of which is evaluated.
-
-\item[Seq.single $x$] 
-constructs the sequence containing the single element~$x$.
-
-\item[Seq.pull $xq$] returns {\tt None} if the sequence is empty and
-  {\tt Some ($x$, $xq'$)} if the sequence has head~$x$ and tail~$xq'$.
-  Warning: calling \hbox{Seq.pull $xq$} again will {\it recompute\/}
-  the value of~$x$; it is not stored!
-\end{ttdescription}
-
-
-\subsection{Converting between sequences and lists}
-\begin{ttbox} 
-Seq.chop    : int * 'a seq -> 'a list * 'a seq
-Seq.list_of : 'a seq -> 'a list
-Seq.of_list : 'a list -> 'a seq
-\end{ttbox}
-\begin{ttdescription}
-\item[Seq.chop ($n$, $xq$)] returns the first~$n$ elements of~$xq$ as a
-  list, paired with the remaining elements of~$xq$.  If $xq$ has fewer
-  than~$n$ elements, then so will the list.
-  
-\item[Seq.list_of $xq$] returns the elements of~$xq$, which must be
-  finite, as a list.
-  
-\item[Seq.of_list $xs$] creates a sequence containing the elements
-  of~$xs$.
-\end{ttdescription}
-
-
-\subsection{Combining sequences}
-\begin{ttbox} 
-Seq.append      : 'a seq * 'a seq -> 'a seq
-Seq.interleave  : 'a seq * 'a seq -> 'a seq
-Seq.flat        : 'a seq seq -> 'a seq
-Seq.map         : ('a -> 'b) -> 'a seq -> 'b seq
-Seq.filter      : ('a -> bool) -> 'a seq -> 'a seq
-\end{ttbox} 
-\begin{ttdescription}
-\item[Seq.append ($xq$, $yq$)] concatenates $xq$ to $yq$.
-  
-\item[Seq.interleave ($xq$, $yq$)] joins $xq$ with $yq$ by
-  interleaving their elements.  The result contains all the elements
-  of the sequences, even if both are infinite.
-  
-\item[Seq.flat $xqq$] concatenates a sequence of sequences.
-  
-\item[Seq.map $f$ $xq$] applies $f$ to every element
-  of~$xq=x@1,x@2,\ldots$, yielding the sequence $f(x@1),f(x@2),\ldots$.
-  
-\item[Seq.filter $p$ $xq$] returns the sequence consisting of all
-  elements~$x$ of~$xq$ such that $p(x)$ is {\tt true}.
-\end{ttdescription}
-
 \index{tactics|)}
 
 
--- a/doc-src/Ref/tctical.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/tctical.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+
 \chapter{Tacticals}
 \index{tacticals|(}
 Tacticals are operations on tactics.  Their implementation makes use of
--- a/doc-src/Ref/theories.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/theories.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,216 +1,6 @@
-
-%% $Id$
 
 \chapter{Theories, Terms and Types} \label{theories}
-\index{theories|(}\index{signatures|bold}
-\index{reading!axioms|see{\texttt{assume_ax}}} Theories organize the syntax,
-declarations and axioms of a mathematical development.  They are built,
-starting from the Pure or CPure theory, by extending and merging existing
-theories.  They have the \ML\ type \mltydx{theory}.  Theory operations signal
-errors by raising exception \xdx{THEORY}, returning a message and a list of
-theories.
-
-Signatures, which contain information about sorts, types, constants and
-syntax, have the \ML\ type~\mltydx{Sign.sg}.  For identification, each
-signature carries a unique list of \bfindex{stamps}, which are \ML\
-references to strings.  The strings serve as human-readable names; the
-references serve as unique identifiers.  Each primitive signature has a
-single stamp.  When two signatures are merged, their lists of stamps are
-also merged.  Every theory carries a unique signature.
-
-Terms and types are the underlying representation of logical syntax.  Their
-\ML\ definitions are irrelevant to naive Isabelle users.  Programmers who
-wish to extend Isabelle may need to know such details, say to code a tactic
-that looks for subgoals of a particular form.  Terms and types may be
-`certified' to be well-formed with respect to a given signature.
-
-
-\section{Defining theories}\label{sec:ref-defining-theories}
-
-Theories are defined via theory files $name$\texttt{.thy} (there are also
-\ML-level interfaces which are only intended for people building advanced
-theory definition packages).  Appendix~\ref{app:TheorySyntax} presents the
-concrete syntax for theory files; here follows an explanation of the
-constituent parts.
-\begin{description}
-\item[{\it theoryDef}] is the full definition.  The new theory is called $id$.
-  It is the union of the named \textbf{parent
-    theories}\indexbold{theories!parent}, possibly extended with new
-  components.  \thydx{Pure} and \thydx{CPure} are the basic theories, which
-  contain only the meta-logic.  They differ just in their concrete syntax for
-  function applications.
-  
-  The new theory begins as a merge of its parents.
-  \begin{ttbox}
-    Attempt to merge different versions of theories: "\(T@1\)", \(\ldots\), "\(T@n\)"
-  \end{ttbox}
-  This error may especially occur when a theory is redeclared --- say to
-  change an inappropriate definition --- and bindings to old versions persist.
-  Isabelle ensures that old and new theories of the same name are not involved
-  in a proof.
-
-\item[$classes$]
-  is a series of class declarations.  Declaring {\tt$id$ < $id@1$ \dots\
-    $id@n$} makes $id$ a subclass of the existing classes $id@1\dots
-  id@n$.  This rules out cyclic class structures.  Isabelle automatically
-  computes the transitive closure of subclass hierarchies; it is not
-  necessary to declare \texttt{c < e} in addition to \texttt{c < d} and \texttt{d <
-    e}.
-
-\item[$default$]
-  introduces $sort$ as the new default sort for type variables.  This applies
-  to unconstrained type variables in an input string but not to type
-  variables created internally.  If omitted, the default sort is the listwise
-  union of the default sorts of the parent theories (i.e.\ their logical
-  intersection).
-  
-\item[$sort$] is a finite set of classes.  A single class $id$ abbreviates the
-  sort $\{id\}$.
-
-\item[$types$]
-  is a series of type declarations.  Each declares a new type constructor
-  or type synonym.  An $n$-place type constructor is specified by
-  $(\alpha@1,\dots,\alpha@n)name$, where the type variables serve only to
-  indicate the number~$n$.
-
-  A \textbf{type synonym}\indexbold{type synonyms} is an abbreviation
-  $(\alpha@1,\dots,\alpha@n)name = \tau$, where $name$ and $\tau$ can
-  be strings.
-
-\item[$infix$]
-  declares a type or constant to be an infix operator having priority $nat$
-  and associating to the left (\texttt{infixl}) or right (\texttt{infixr}).
-  Only 2-place type constructors can have infix status; an example is {\tt
-  ('a,'b)~"*"~(infixr~20)}, which may express binary product types.
-
-\item[$arities$] is a series of type arity declarations.  Each assigns
-  arities to type constructors.  The $name$ must be an existing type
-  constructor, which is given the additional arity $arity$.
-  
-\item[$nonterminals$]\index{*nonterminal symbols} declares purely
-  syntactic types to be used as nonterminal symbols of the context
-  free grammar.
-
-\item[$consts$] is a series of constant declarations.  Each new
-  constant $name$ is given the specified type.  The optional $mixfix$
-  annotations may attach concrete syntax to the constant.
-  
-\item[$syntax$] \index{*syntax section}\index{print mode} is a variant
-  of $consts$ which adds just syntax without actually declaring
-  logical constants.  This gives full control over a theory's context
-  free grammar.  The optional $mode$ specifies the print mode where the
-  mixfix productions should be added.  If there is no \texttt{output}
-  option given, all productions are also added to the input syntax
-  (regardless of the print mode).
-
-\item[$mixfix$] \index{mixfix declarations}
-  annotations can take three forms:
-  \begin{itemize}
-  \item A mixfix template given as a $string$ of the form
-    {\tt"}\dots{\tt\_}\dots{\tt\_}\dots{\tt"} where the $i$-th underscore
-    indicates the position where the $i$-th argument should go.  The list
-    of numbers gives the priority of each argument.  The final number gives
-    the priority of the whole construct.
-
-  \item A constant $f$ of type $\tau@1\To(\tau@2\To\tau)$ can be given {\bf
-    infix} status.
-
-  \item A constant $f$ of type $(\tau@1\To\tau@2)\To\tau$ can be given {\bf
-    binder} status.  The declaration \texttt{binder} $\cal Q$ $p$ causes
-  ${\cal Q}\,x.F(x)$ to be treated
-  like $f(F)$, where $p$ is the priority.
-  \end{itemize}
-
-\item[$trans$]
-  specifies syntactic translation rules (macros).  There are three forms:
-  parse rules (\texttt{=>}), print rules (\texttt{<=}), and parse/print rules ({\tt
-  ==}).
-
-\item[$rules$]
-  is a series of rule declarations.  Each has a name $id$ and the formula is
-  given by the $string$.  Rule names must be distinct within any single
-  theory.
-
-\item[$defs$] is a series of definitions.  They are just like $rules$, except
-  that every $string$ must be a definition (see below for details).
-
-\item[$constdefs$] combines the declaration of constants and their
-  definition.  The first $string$ is the type, the second the definition.
-  
-\item[$axclass$] \index{*axclass section} defines an \rmindex{axiomatic type
-    class} \cite{Wenzel:1997:TPHOL} as the intersection of existing classes,
-  with additional axioms holding.  Class axioms may not contain more than one
-  type variable.  The class axioms (with implicit sort constraints added) are
-  bound to the given names.  Furthermore a class introduction rule is
-  generated, which is automatically employed by $instance$ to prove
-  instantiations of this class.
-  
-\item[$instance$] \index{*instance section} proves class inclusions or
-  type arities at the logical level and then transfers these to the
-  type signature.  The instantiation is proven and checked properly.
-  The user has to supply sufficient witness information: theorems
-  ($longident$), axioms ($string$), or even arbitrary \ML{} tactic
-  code $verbatim$.
-
-\item[$oracle$] links the theory to a trusted external reasoner.  It is
-  allowed to create theorems, but each theorem carries a proof object
-  describing the oracle invocation.  See \S\ref{sec:oracles} for details.
-  
-\item[$local$, $global$] change the current name declaration mode.
-  Initially, theories start in $local$ mode, causing all names of
-  types, constants, axioms etc.\ to be automatically qualified by the
-  theory name.  Changing this to $global$ causes all names to be
-  declared as short base names only.
-  
-  The $local$ and $global$ declarations act like switches, affecting
-  all following theory sections until changed again explicitly.  Also
-  note that the final state at the end of the theory will persist.  In
-  particular, this determines how the names of theorems stored later
-  on are handled.
-  
-\item[$setup$]\index{*setup!theory} applies a list of ML functions to
-  the theory.  The argument should denote a value of type
-  \texttt{(theory -> theory) list}.  Typically, ML packages are
-  initialized in this way.
-
-\item[$ml$] \index{*ML section}
-  consists of \ML\ code, typically for parse and print translation functions.
-\end{description}
-%
-Chapters~\ref{Defining-Logics} and \ref{chap:syntax} explain mixfix
-declarations, translation rules and the \texttt{ML} section in more detail.
-
-
-\subsection{*Classes and arities}
-\index{classes!context conditions}\index{arities!context conditions}
-
-In order to guarantee principal types~\cite{nipkow-prehofer},
-arity declarations must obey two conditions:
-\begin{itemize}
-\item There must not be any two declarations $ty :: (\vec{r})c$ and
-  $ty :: (\vec{s})c$ with $\vec{r} \neq \vec{s}$.  For example, this
-  excludes the following:
-\begin{ttbox}
-arities
-  foo :: (\{logic{\}}) logic
-  foo :: (\{{\}})logic
-\end{ttbox}
-
-\item If there are two declarations $ty :: (s@1,\dots,s@n)c$ and $ty ::
-  (s@1',\dots,s@n')c'$ such that $c' < c$ then $s@i' \preceq s@i$ must hold
-  for $i=1,\dots,n$.  The relationship $\preceq$, defined as
-\[ s' \preceq s \iff \forall c\in s. \exists c'\in s'.~ c'\le c, \]
-expresses that the set of types represented by $s'$ is a subset of the
-set of types represented by $s$.  Assuming $term \preceq logic$, the
-following is forbidden:
-\begin{ttbox}
-arities
-  foo :: (\{logic{\}})logic
-  foo :: (\{{\}})term
-\end{ttbox}
-
-\end{itemize}
-
+\index{theories|(}
 
 \section{The theory loader}\label{sec:more-theories}
 \index{theories!reading}\index{files!reading}
@@ -247,13 +37,6 @@
   dispose a large number of theories at once.  Note that {\ML} bindings to
   theorems etc.\ of removed theories may still persist.
   
-\item[reset \ttindexbold{delete_tmpfiles};] processing theory files usually
-  involves temporary {\ML} files to be created.  By default, these are deleted
-  afterwards.  Resetting the \texttt{delete_tmpfiles} flag inhibits this,
-  leaving the generated code for debugging purposes.  The basic location for
-  temporary files is determined by the \texttt{ISABELLE_TMP} environment
-  variable (which is private to the running Isabelle process and may be
-  retrieved by \ttindex{getenv} from {\ML}).
 \end{ttdescription}
 
 \medskip Theory and {\ML} files are located by skimming through the
@@ -296,224 +79,6 @@
 temporarily appended to the load path, too.
 
 
-\section{Locales}
-\label{Locales}
-
-Locales \cite{kammueller-locales} are a concept of local proof contexts.  They
-are introduced as named syntactic objects within theories and can be
-opened in any descendant theory.
-
-\subsection{Declaring Locales}
-
-A locale is declared in a theory section that starts with the
-keyword \texttt{locale}.  It consists typically of three parts, the
-\texttt{fixes} part, the \texttt{assumes} part, and the \texttt{defines} part.
-Appendix \ref{app:TheorySyntax} presents the full syntax.
-
-\subsubsection{Parts of Locales}
-
-The subsection introduced by the keyword \texttt{fixes} declares the locale
-constants in a way that closely resembles a global \texttt{consts}
-declaration.  In particular, there may be an optional pretty printing syntax
-for the locale constants.
-
-The subsequent \texttt{assumes} part specifies the locale rules.  They are
-defined like \texttt{rules}: by an identifier followed by the rule
-given as a string.  Locale rules admit the statement of local assumptions
-about the locale constants.  The \texttt{assumes} part is optional.  Non-fixed
-variables in locale rules are automatically bound by the universal quantifier
-\texttt{!!} of the meta-logic.
-
-Finally, the \texttt{defines} part introduces the definitions that are
-available in the locale.  Locale constants declared in the \texttt{fixes}
-section are defined using the meta-equality \texttt{==}.  If the
-locale constant is a functiond then its definition can (as usual) have
-variables on the left-hand side acting as formal parameters; they are
-considered as schematic variables and are automatically generalized by
-universal quantification of the meta-logic.  The right hand side of a
-definition must not contain variables that are not already on the left hand
-side.  In so far locale definitions behave like theory level definitions.
-However, the locale concept realizes \emph{dependent definitions}: any variable
-that is fixed as a locale constant can occur on the right hand side of
-definitions.  For an illustration of these dependent definitions see the
-occurrence of the locale constant \texttt{G} on the right hand side of the
-definitions of the locale \texttt{group} below.  Naturally, definitions can
-already use the syntax of the locale constants in the \texttt{fixes}
-subsection.  The \texttt{defines} part is, as the \texttt{assumes} part,
-optional.
-
-\subsubsection{Example for Definition}
-The concrete syntax of locale definitions is demonstrated by example below.
-
-Locale \texttt{group} assumes the definition of groups in a theory
-file\footnote{This and other examples are from \texttt{HOL/ex}.}.  A locale
-defining a convenient proof environment for group related proofs may be
-added to the theory as follows:
-\begin{ttbox}
-  locale group =
-    fixes 
-      G         :: "'a grouptype"
-      e         :: "'a"
-      binop     :: "'a => 'a => 'a"        (infixr "#" 80)
-      inv       :: "'a => 'a"              ("i(_)" [90] 91)
-    assumes
-      Group_G   "G: Group"
-    defines
-      e_def     "e == unit G"
-      binop_def "x # y == bin_op G x y"
-      inv_def   "i(x) == inverse G x"
-\end{ttbox}
-
-\subsubsection{Polymorphism}
-
-In contrast to polymorphic definitions in theories, the use of the
-same type variable for the declaration of different locale constants in the
-fixes part means \emph{the same} type.  In other words, the scope of the
-polymorphic variables is extended over all constant declarations of a locale.
-In the above example \texttt{'a} refers to the same type which is fixed inside
-the locale.  In an exported theorem (see \S\ref{sec:locale-export}) the
-constructors of locale \texttt{group} are polymorphic, yet only simultaneously
-instantiatable.
-
-\subsubsection{Nested Locales}
-
-A locale can be defined as the extension of a previously defined
-locale.  This operation of extension is optional and is syntactically
-expressed as 
-\begin{ttbox}
-locale foo = bar + ...
-\end{ttbox}
-The locale \texttt{foo} builds on the constants and syntax of the locale {\tt
-bar}.  That is, all contents of the locale \texttt{bar} can be used in
-definitions and rules of the corresponding parts of the locale {\tt
-foo}.  Although locale \texttt{foo} assumes the \texttt{fixes} part of \texttt{bar} it
-does not automatically subsume its rules and definitions.  Normally, one
-expects to use locale \texttt{foo} only if locale \texttt{bar} is already
-active.  These aspects of use and activation of locales are considered in the
-subsequent section.
-
-
-\subsection{Locale Scope}
-
-Locales are by default inactive, but they can be invoked.  The list of
-currently active locales is called \emph{scope}.  The process of activating
-them is called \emph{opening}; the reverse is \emph{closing}.
-
-\subsubsection{Scope}
-The locale scope is part of each theory.  It is a dynamic stack containing
-all active locales at a certain point in an interactive session.
-The scope lives until all locales are explicitly closed.  At one time there
-can be more than one locale open.  The contents of these various active
-locales are all visible in the scope.  In case of nested locales for example,
-the nesting is actually reflected to the scope, which contains the nested
-locales as layers.  To check the state of the scope during a development the
-function \texttt{Print\_scope} may be used.  It displays the names of all open
-locales on the scope.  The function \texttt{print\_locales} applied to a theory
-displays all locales contained in that theory and in addition also the
-current scope.
-
-The scope is manipulated by the commands for opening and closing of locales. 
-
-\subsubsection{Opening}
-Locales can be \emph{opened} at any point during a session where
-we want to prove theorems concerning the locale.  Opening a locale means
-making its contents visible by pushing it onto the scope of the current
-theory.  Inside a scope of opened locales, theorems can use all definitions and
-rules contained in the locales on the scope.  The rules and definitions may
-be accessed individually using the function \ttindex{thm}.  This function is
-applied to the names assigned to locale rules and definitions as
-strings.  The opening command is called \texttt{Open\_locale} and takes the 
-name of the locale to be opened as its argument.
-
-If one opens a locale \texttt{foo} that is defined by extension from locale
-\texttt{bar}, the function \texttt{Open\_locale} checks if locale \texttt{bar}
-is open.  If so, then it just opens \texttt{foo}, if not, then it prints a
-message and opens \texttt{bar} before opening \texttt{foo}.  Naturally, this
-carries on, if \texttt{bar} is again an extension.
-
-\subsubsection{Closing}
-
-\emph{Closing} means to cancel the last opened locale, pushing it out of the
-scope.  Theorems proved during the life cycle of this locale will be disabled,
-unless they have been explicitly exported, as described below.  However, when
-the same locale is opened again these theorems may be used again as well,
-provided that they were saved as theorems in the first place, using
-\texttt{qed} or ML assignment.  The command \texttt{Close\_locale} takes a
-locale name as a string and checks if this locale is actually the topmost
-locale on the scope.  If this is the case, it removes this locale, otherwise
-it prints a warning message and does not change the scope.
-
-\subsubsection{Export of Theorems}
-\label{sec:locale-export}
-
-Export of theorems transports theorems out of the scope of locales.  Locale
-rules that have been used in the proof of an exported theorem inside the
-locale are carried by the exported form of the theorem as its individual
-meta-assumptions.  The locale constants are universally quantified variables
-in these theorems, hence such theorems can be instantiated individually.
-Definitions become unfolded; locale constants that were merely used for
-definitions vanish.  Logically, exporting corresponds to a combined
-application of introduction rules for implication and universal
-quantification.  Exporting forms a kind of normalization of theorems in a
-locale scope.
-
-According to the possibility of nested locales there are two different forms
-of export.  The first one is realized by the function \texttt{export} that
-exports theorems through all layers of opened locales of the scope.  Hence,
-the application of export to a theorem yields a theorem of the global level,
-that is, the current theory context without any local assumptions or
-definitions.
-
-When locales are nested we might want to export a theorem, not to the global
-level of the current theory but just to the previous level.  The other export
-function, \texttt{Export}, transports theorems one level up in the scope; the
-theorem still uses locale constants, definitions and rules of the locales
-underneath.
-
-\subsection{Functions for Locales}
-\label{Syntax}
-\index{locales!functions}
-
-Here is a quick reference list of locale functions.
-\begin{ttbox}
-  Open_locale  : xstring -> unit 
-  Close_locale : xstring -> unit
-  export       :     thm -> thm
-  Export       :     thm -> thm
-  thm          : xstring -> thm
-  Print_scope  :    unit -> unit
-  print_locales:  theory -> unit
-\end{ttbox}
-\begin{ttdescription}
-\item[\ttindexbold{Open_locale} $xstring$] 
-    opens the locale {\it xstring}, adding it to the scope of the theory of the
-  current context.  If the opened locale is built by extension, the ancestors
-  are opened automatically.
-  
-\item[\ttindexbold{Close_locale} $xstring$] eliminates the locale {\it
-    xstring} from the scope if it is the topmost item on it, otherwise it does
-  not change the scope and produces a warning.
-
-\item[\ttindexbold{export} $thm$] locale definitions become expanded in {\it
-    thm} and locale rules that were used in the proof of {\it thm} become part
-  of its individual assumptions.  This normalization happens with respect to
-  \emph{all open locales} on the scope.
-  
-\item[\ttindexbold{Export} $thm$] works like \texttt{export} but normalizes
-  theorems only up to the previous level of locales on the scope.
-  
-\item[\ttindexbold{thm} $xstring$] applied to the name of a locale definition
-  or rule it returns the definition as a theorem.
-  
-\item[\ttindexbold{Print_scope}()] prints the names of the locales in the
-  current scope of the current theory context.
-  
-\item[\ttindexbold{print_locale} $theory$] prints all locales that are
-  contained in {\it theory} directly or indirectly.  It also displays the
-  current scope similar to \texttt{Print\_scope}.
-\end{ttdescription}
-
-
 \section{Basic operations on theories}\label{BasicOperationsOnTheories}
 
 \subsection{*Theory inclusion}
@@ -905,111 +470,6 @@
 \end{ttdescription}
 
 
-\section{Oracles: calling trusted external reasoners}
-\label{sec:oracles}
-\index{oracles|(}
-
-Oracles allow Isabelle to take advantage of external reasoners such as
-arithmetic decision procedures, model checkers, fast tautology checkers or
-computer algebra systems.  Invoked as an oracle, an external reasoner can
-create arbitrary Isabelle theorems.  It is your responsibility to ensure that
-the external reasoner is as trustworthy as your application requires.
-Isabelle's proof objects~(\S\ref{sec:proofObjects}) record how each theorem
-depends upon oracle calls.
-
-\begin{ttbox}
-invoke_oracle     : theory -> xstring -> Sign.sg * object -> thm
-Theory.add_oracle : bstring * (Sign.sg * object -> term) -> theory 
-                    -> theory
-\end{ttbox}
-\begin{ttdescription}
-\item[\ttindexbold{invoke_oracle} $thy$ $name$ ($sign$, $data$)]
-  invokes the oracle $name$ of theory $thy$ passing the information
-  contained in the exception value $data$ and creating a theorem
-  having signature $sign$.  Note that type \ttindex{object} is just an
-  abbreviation for \texttt{exn}.  Errors arise if $thy$ does not have
-  an oracle called $name$, if the oracle rejects its arguments or if
-  its result is ill-typed.
-  
-\item[\ttindexbold{Theory.add_oracle} $name$ $fun$ $thy$] extends
-  $thy$ by oracle $fun$ called $name$.  It is seldom called
-  explicitly, as there is concrete syntax for oracles in theory files.
-\end{ttdescription}
-
-A curious feature of {\ML} exceptions is that they are ordinary constructors.
-The {\ML} type \texttt{exn} is a datatype that can be extended at any time.  (See
-my {\em {ML} for the Working Programmer}~\cite{paulson-ml2}, especially
-page~136.)  The oracle mechanism takes advantage of this to allow an oracle to
-take any information whatever.
-
-There must be some way of invoking the external reasoner from \ML, either
-because it is coded in {\ML} or via an operating system interface.  Isabelle
-expects the {\ML} function to take two arguments: a signature and an
-exception object.
-\begin{itemize}
-\item The signature will typically be that of a desendant of the theory
-  declaring the oracle.  The oracle will use it to distinguish constants from
-  variables, etc., and it will be attached to the generated theorems.
-
-\item The exception is used to pass arbitrary information to the oracle.  This
-  information must contain a full description of the problem to be solved by
-  the external reasoner, including any additional information that might be
-  required.  The oracle may raise the exception to indicate that it cannot
-  solve the specified problem.
-\end{itemize}
-
-A trivial example is provided in theory \texttt{FOL/ex/IffOracle}.  This
-oracle generates tautologies of the form $P\bimp\cdots\bimp P$, with
-an even number of $P$s.
-
-The \texttt{ML} section of \texttt{IffOracle.thy} begins by declaring
-a few auxiliary functions (suppressed below) for creating the
-tautologies.  Then it declares a new exception constructor for the
-information required by the oracle: here, just an integer. It finally
-defines the oracle function itself.
-\begin{ttbox}
-exception IffOracleExn of int;\medskip
-fun mk_iff_oracle (sign, IffOracleExn n) =
-  if n > 0 andalso n mod 2 = 0
-  then Trueprop \$ mk_iff n
-  else raise IffOracleExn n;
-\end{ttbox}
-Observe the function's two arguments, the signature \texttt{sign} and the
-exception given as a pattern.  The function checks its argument for
-validity.  If $n$ is positive and even then it creates a tautology
-containing $n$ occurrences of~$P$.  Otherwise it signals error by
-raising its own exception (just by happy coincidence).  Errors may be
-signalled by other means, such as returning the theorem \texttt{True}.
-Please ensure that the oracle's result is correctly typed; Isabelle
-will reject ill-typed theorems by raising a cryptic exception at top
-level.
-
-The \texttt{oracle} section of \texttt{IffOracle.thy} installs above
-\texttt{ML} function as follows:
-\begin{ttbox}
-IffOracle = FOL +\medskip
-oracle
-  iff = mk_iff_oracle\medskip
-end
-\end{ttbox}
-
-Now in \texttt{IffOracle.ML} we first define a wrapper for invoking
-the oracle:
-\begin{ttbox}
-fun iff_oracle n = invoke_oracle IffOracle.thy "iff"
-                      (sign_of IffOracle.thy, IffOracleExn n);
-\end{ttbox}
-
-Here are some example applications of the \texttt{iff} oracle.  An
-argument of 10 is allowed, but one of 5 is forbidden:
-\begin{ttbox}
-iff_oracle 10;
-{\out  "P <-> P <-> P <-> P <-> P <-> P <-> P <-> P <-> P <-> P" : thm}
-iff_oracle 5;
-{\out Exception- IffOracleExn 5 raised}
-\end{ttbox}
-
-\index{oracles|)}
 \index{theories|)}
 
 
--- a/doc-src/Ref/thm.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/Ref/thm.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+
 \chapter{Theorems and Forward Proof}
 \index{theorems|(}
 
@@ -13,19 +13,6 @@
 ignore such complexities --- and skip all but the first section of
 this chapter.
 
-The theorem operations do not print error messages.  Instead, they raise
-exception~\xdx{THM}\@.  Use \ttindex{print_exn} to display
-exceptions nicely:
-\begin{ttbox} 
-allI RS mp  handle e => print_exn e;
-{\out Exception THM raised:}
-{\out RSN: no unifiers -- premise 1}
-{\out (!!x. ?P(x)) ==> ALL x. ?P(x)}
-{\out [| ?P --> ?Q; ?P |] ==> ?Q}
-{\out}
-{\out uncaught exception THM}
-\end{ttbox}
-
 
 \section{Basic operations on theorems}
 \subsection{Pretty-printing a theorem}
--- a/doc-src/System/Thy/Basics.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/System/Thy/Basics.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -360,8 +360,8 @@
   @{verbatim "-W"} option makes Isabelle enter a special process
   wrapper for interaction via an external program; the protocol is a
   stripped-down version of Proof General the interaction mode, see
-  also @{"file" "~~/src/Pure/Tools/isabelle_process.ML"} and @{"file"
-  "~~/src/Pure/Tools/isabelle_process.scala"}.
+  also @{"file" "~~/src/Pure/System/isabelle_process.ML"} and @{"file"
+  "~~/src/Pure/System/isabelle_process.scala"}.
 
   \medskip The @{verbatim "-S"} option makes the Isabelle process more
   secure by disabling some critical operations, notably runtime
--- a/doc-src/System/Thy/Presentation.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/System/Thy/Presentation.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -654,7 +654,7 @@
   "-"}@{text foo}'' to drop, and ``@{verbatim "/"}@{text foo}'' to
   fold text tagged as @{text foo}.  The builtin default is equivalent
   to the tag specification ``@{verbatim
-  "/theory,/proof,/ML,+visible,-invisible"}''; see also the {\LaTeX}
+  "+theory,+proof,+ML,+visible,-invisible"}''; see also the {\LaTeX}
   macros @{verbatim "\\isakeeptag"}, @{verbatim "\\isadroptag"}, and
   @{verbatim "\\isafoldtag"}, in @{"file"
   "~~/lib/texinputs/isabelle.sty"}.
--- a/doc-src/System/Thy/document/Basics.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/System/Thy/document/Basics.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -369,7 +369,7 @@
   \verb|-W| option makes Isabelle enter a special process
   wrapper for interaction via an external program; the protocol is a
   stripped-down version of Proof General the interaction mode, see
-  also \hyperlink{file.~~/src/Pure/Tools/isabelle-process.ML}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}Pure{\isacharslash}Tools{\isacharslash}isabelle{\isacharunderscore}process{\isachardot}ML}}}} and \hyperlink{file.~~/src/Pure/Tools/isabelle-process.scala}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}Pure{\isacharslash}Tools{\isacharslash}isabelle{\isacharunderscore}process{\isachardot}scala}}}}.
+  also \hyperlink{file.~~/src/Pure/System/isabelle-process.ML}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}Pure{\isacharslash}System{\isacharslash}isabelle{\isacharunderscore}process{\isachardot}ML}}}} and \hyperlink{file.~~/src/Pure/System/isabelle-process.scala}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}src{\isacharslash}Pure{\isacharslash}System{\isacharslash}isabelle{\isacharunderscore}process{\isachardot}scala}}}}.
 
   \medskip The \verb|-S| option makes the Isabelle process more
   secure by disabling some critical operations, notably runtime
--- a/doc-src/System/Thy/document/Presentation.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/System/Thy/document/Presentation.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -668,7 +668,7 @@
   tagged Isabelle command regions.  Tags are specified as a comma
   separated list of modifier/name pairs: ``\verb|+|\isa{foo}'' (or just ``\isa{foo}'') means to keep, ``\verb|-|\isa{foo}'' to drop, and ``\verb|/|\isa{foo}'' to
   fold text tagged as \isa{foo}.  The builtin default is equivalent
-  to the tag specification ``\verb|/theory,/proof,/ML,+visible,-invisible|''; see also the {\LaTeX}
+  to the tag specification ``\verb|+theory,+proof,+ML,+visible,-invisible|''; see also the {\LaTeX}
   macros \verb|\isakeeptag|, \verb|\isadroptag|, and
   \verb|\isafoldtag|, in \hyperlink{file.~~/lib/texinputs/isabelle.sty}{\mbox{\isa{\isatt{{\isachartilde}{\isachartilde}{\isacharslash}lib{\isacharslash}texinputs{\isacharslash}isabelle{\isachardot}sty}}}}.
 
--- a/doc-src/System/system.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/System/system.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -36,7 +36,7 @@
 \input{Thy/document/Misc.tex}
 
 \begingroup
-  \bibliographystyle{plain} \small\raggedright\frenchspacing
+  \bibliographystyle{abbrv} \small\raggedright\frenchspacing
   \bibliography{../manual}
 \endgroup
 
--- a/doc-src/TutorialI/Types/Numbers.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/TutorialI/Types/Numbers.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -100,8 +100,8 @@
 @{thm[display] div_mult1_eq[no_vars]}
 \rulename{div_mult1_eq}
 
-@{thm[display] mod_mult1_eq[no_vars]}
-\rulename{mod_mult1_eq}
+@{thm[display] mod_mult_right_eq[no_vars]}
+\rulename{mod_mult_right_eq}
 
 @{thm[display] div_mult2_eq[no_vars]}
 \rulename{div_mult2_eq}
@@ -147,8 +147,8 @@
 @{thm[display] zdiv_zadd1_eq[no_vars]}
 \rulename{zdiv_zadd1_eq}
 
-@{thm[display] zmod_zadd1_eq[no_vars]}
-\rulename{zmod_zadd1_eq}
+@{thm[display] mod_add_eq[no_vars]}
+\rulename{mod_add_eq}
 
 @{thm[display] zdiv_zmult1_eq[no_vars]}
 \rulename{zdiv_zmult1_eq}
--- a/doc-src/TutorialI/Types/document/Numbers.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/TutorialI/Types/document/Numbers.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -244,7 +244,7 @@
 \begin{isabelle}%
 a\ {\isacharasterisk}\ b\ mod\ c\ {\isacharequal}\ a\ {\isacharasterisk}\ {\isacharparenleft}b\ mod\ c{\isacharparenright}\ mod\ c%
 \end{isabelle}
-\rulename{mod_mult1_eq}
+\rulename{mod_mult_right_eq}
 
 \begin{isabelle}%
 a\ div\ {\isacharparenleft}b\ {\isacharasterisk}\ c{\isacharparenright}\ {\isacharequal}\ a\ div\ b\ div\ c%
@@ -318,7 +318,7 @@
 \begin{isabelle}%
 {\isacharparenleft}a\ {\isacharplus}\ b{\isacharparenright}\ mod\ c\ {\isacharequal}\ {\isacharparenleft}a\ mod\ c\ {\isacharplus}\ b\ mod\ c{\isacharparenright}\ mod\ c%
 \end{isabelle}
-\rulename{zmod_zadd1_eq}
+\rulename{mod_add_eq}
 
 \begin{isabelle}%
 a\ {\isacharasterisk}\ b\ div\ c\ {\isacharequal}\ a\ {\isacharasterisk}\ {\isacharparenleft}b\ div\ c{\isacharparenright}\ {\isacharplus}\ a\ {\isacharasterisk}\ {\isacharparenleft}b\ mod\ c{\isacharparenright}\ div\ c%
--- a/doc-src/TutorialI/Types/numerics.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/TutorialI/Types/numerics.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -154,7 +154,7 @@
 a\ *\ b\ div\ c\ =\ a\ *\ (b\ div\ c)\ +\ a\ *\ (b\ mod\ c)\ div\ c%
 \rulename{div_mult1_eq}\isanewline
 a\ *\ b\ mod\ c\ =\ a\ *\ (b\ mod\ c)\ mod\ c%
-\rulename{mod_mult1_eq}\isanewline
+\rulename{mod_mult_right_eq}\isanewline
 a\ div\ (b*c)\ =\ a\ div\ b\ div\ c%
 \rulename{div_mult2_eq}\isanewline
 a\ mod\ (b*c)\ =\ b * (a\ div\ b\ mod\ c)\ +\ a\ mod\ b%
@@ -276,7 +276,7 @@
 \rulename{zdiv_zadd1_eq}
 \par\smallskip
 (a\ +\ b)\ mod\ c\ =\ (a\ mod\ c\ +\ b\ mod\ c)\ mod\ c%
-\rulename{zmod_zadd1_eq}
+\rulename{mod_add_eq}
 \end{isabelle}
 
 \begin{isabelle}
--- a/doc-src/ZF/FOL.tex	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/ZF/FOL.tex	Wed Mar 04 10:45:52 2009 +0100
@@ -1,4 +1,4 @@
-%% $Id$
+%!TEX root = logics-ZF.tex
 \chapter{First-Order Logic}
 \index{first-order logic|(}
 
@@ -360,7 +360,8 @@
 logic by designating \isa{IFOL} rather than \isa{FOL} as the parent
 theory:
 \begin{isabelle}
-\isacommand{theory}\ IFOL\_examples\ =\ IFOL:
+\isacommand{theory}\ IFOL\_examples\ \isacommand{imports}\ IFOL\isanewline
+\isacommand{begin}
 \end{isabelle}
 The proof begins by entering the goal, then applying the rule $({\imp}I)$.
 \begin{isabelle}
@@ -441,7 +442,7 @@
 \ 1.\ (\isasymexists y.\ \isasymforall x.\ Q(x,\ y))\
 \isasymlongrightarrow \ (\isasymforall x.\ \isasymexists y.\ Q(x,\ y))
 \isanewline
-\isacommand{by} (tactic {*IntPr.fast_tac 1*})\isanewline
+\isacommand{by} (tactic \ttlbrace*IntPr.fast_tac 1*\ttrbrace)\isanewline
 No\ subgoals!
 \end{isabelle}
 
@@ -529,7 +530,8 @@
 $\all{x}P(x)$ is true.  Either way the theorem holds.  First, we must
 work in a theory based on classical logic, the theory \isa{FOL}:
 \begin{isabelle}
-\isacommand{theory}\ FOL\_examples\ =\ FOL:
+\isacommand{theory}\ FOL\_examples\ \isacommand{imports}\ FOL\isanewline
+\isacommand{begin}
 \end{isabelle}
 
 The formal proof does not conform in any obvious way to the sketch given
@@ -631,7 +633,8 @@
 $if::[o,o,o]\To o$.  The axiom \tdx{if_def} asserts the
 equation~$(if)$.
 \begin{isabelle}
-\isacommand{theory}\ If\ =\ FOL:\isanewline
+\isacommand{theory}\ If\ \isacommand{imports}\ FOL\isanewline
+\isacommand{begin}\isanewline
 \isacommand{constdefs}\isanewline
 \ \ if\ ::\ "[o,o,o]=>o"\isanewline
 \ \ \ "if(P,Q,R)\ ==\ P\&Q\ |\ \isachartilde P\&R"
--- a/doc-src/antiquote_setup.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/antiquote_setup.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,4 @@
 (*  Title:      Doc/antiquote_setup.ML
-    ID:         $Id$
     Author:     Makarius
 
 Auxiliary antiquotations for the Isabelle manuals.
@@ -13,13 +12,17 @@
 
 (* misc utils *)
 
-val clean_string = translate_string
+fun translate f = Symbol.explode #> map f #> implode;
+
+val clean_string = translate
   (fn "_" => "\\_"
+    | "#" => "\\#"
     | "<" => "$<$"
     | ">" => "$>$"
-    | "#" => "\\#"
     | "{" => "\\{"
+    | "|" => "$\\mid$"
     | "}" => "\\}"
+    | "\\<dash>" => "-"
     | c => c);
 
 fun clean_name "\\<dots>" = "dots"
@@ -28,7 +31,7 @@
   | clean_name "_" = "underscore"
   | clean_name "{" = "braceleft"
   | clean_name "}" = "braceright"
-  | clean_name s = s |> translate_string (fn "_" => "-" | c => c);
+  | clean_name s = s |> translate (fn "_" => "-" | "\\<dash>" => "-" | c => c);
 
 
 (* verbatim text *)
@@ -66,8 +69,9 @@
     val txt' = if kind = "" then txt else kind ^ " " ^ txt;
     val _ = writeln (ml (txt1, txt2));
     val _ = ML_Context.eval_in (SOME ctxt) false Position.none (ml (txt1, txt2));
+    val kind' = if kind = "" then "ML" else "ML " ^ kind;
   in
-    "\\indexml" ^ kind ^ enclose "{" "}" (clean_string txt1) ^
+    "\\indexdef{}{" ^ kind' ^ "}{" ^ clean_string txt1 ^ "}" ^
     (txt'
     |> (if ! O.quotes then quote else I)
     |> (if ! O.display then enclose "\\begin{verbatim}\n" "\n\\end{verbatim}"
@@ -193,6 +197,7 @@
   entity_antiqs no_check "" "case" @
   entity_antiqs (K ThyOutput.defined_command) "" "antiquotation" @
   entity_antiqs (fn _ => fn name => is_some (OS.Process.getEnv name)) "isatt" "setting" @
+  entity_antiqs no_check "" "inference" @
   entity_antiqs no_check "isatt" "executable" @
   entity_antiqs (K check_tool) "isatt" "tool" @
   entity_antiqs (K (File.exists o Path.explode)) "isatt" "file" @
--- a/doc-src/isar.sty	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/isar.sty	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,3 @@
-
-%% $Id$
-
 \usepackage{ifthen}
 
 \newcommand{\indexdef}[3]%
@@ -20,3 +17,9 @@
 \newcommand{\isasymIMPORTS}{\isakeyword{imports}}
 \newcommand{\isasymIN}{\isakeyword{in}}
 \newcommand{\isasymSTRUCTURE}{\isakeyword{structure}}
+\newcommand{\isasymFIXES}{\isakeyword{fixes}}
+\newcommand{\isasymASSUMES}{\isakeyword{assumes}}
+\newcommand{\isasymSHOWS}{\isakeyword{shows}}
+\newcommand{\isasymOBTAINS}{\isakeyword{obtains}}
+
+\newcommand{\isasymASSM}{\isacommand{assm}}
--- a/doc-src/manual.bib	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/manual.bib	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,4 @@
 % BibTeX database for the Isabelle documentation
-%
-% Lawrence C Paulson $Id$
 
 %publishers
 @string{AP="Academic Press"}
@@ -185,6 +183,16 @@
                   {F}ormal-{L}ogic {E}ngineering},
   crossref =     {tphols99}}
 
+
+@InProceedings{Bezem-Coquand:2005,
+  author = 	 {M.A. Bezem and T. Coquand},
+  title = 	 {Automating {Coherent Logic}},
+  booktitle = {LPAR-12},
+  editor = 	 {G. Sutcliffe and A. Voronkov},
+  volume = 	 3835,
+  series = 	 LNCS,
+  publisher = Springer}
+
 @book{Bird-Wadler,author="Richard Bird and Philip Wadler",
 title="Introduction to Functional Programming",publisher=PH,year=1988}
 
@@ -469,6 +477,17 @@
   number        = {364/07}
 }
 
+@InProceedings{Haftmann-Wenzel:2009,
+  author        = {Florian Haftmann and Makarius Wenzel},
+  title         = {Local theory specifications in {Isabelle/Isar}},
+  editor        = {Stefano Berardi and Ferruccio Damiani and de Liguoro, Ugo},
+  booktitle     = {Types for Proofs and Programs, TYPES 2008},
+  publisher     = {Springer},
+  series        = {LNCS},
+  volume        = {????},
+  year          = {2009}
+}
+
 @manual{isabelle-classes,
   author        = {Florian Haftmann},
   title         = {Haskell-style type classes with {Isabelle}/{Isar}},
@@ -669,6 +688,16 @@
   pages		= {341-386},
   crossref	= {birtwistle89}}
 
+@Article{Miller:1991,
+  author = 	 {Dale Miller},
+  title = 	 {A Logic Programming Language with Lambda-Abstraction, Function Variables,
+    and Simple Unification},
+  journal = 	 {Journal of Logic and Computation},
+  year = 	 1991,
+  volume =	 1,
+  number =	 4
+}
+
 @Article{miller-mixed,
   Author	= {Dale Miller},
   Title		= {Unification Under a Mixed Prefix},
@@ -1198,6 +1227,15 @@
   pages		= {578-596},
   crossref	= {fme93}}
 
+@Article{Schroeder-Heister:1984,
+  author =       {Peter Schroeder-Heister},
+  title =        {A Natural Extension of Natural Deduction},
+  journal =      {Journal of Symbolic Logic},
+  year =         1984,
+  volume =       49,
+  number =       4
+}
+
 @inproceedings{slind-tfl,
   author	= {Konrad Slind},
   title		= {Function Definition in Higher Order Logic},
@@ -1331,6 +1369,24 @@
   year=2002,
   note =	 {\url{http://tumb1.biblio.tu-muenchen.de/publ/diss/in/2002/wenzel.html}}}
 
+@Article{Wenzel-Wiedijk:2002,
+  author = 	 {Freek Wiedijk and Markus Wenzel},
+  title = 	 {A comparison of the mathematical proof languages {Mizar} and {Isar}.},
+  journal = 	 {Journal of Automated Reasoning},
+  year = 	 2002,
+  volume =	 29,
+  number =	 {3-4}
+}
+
+@InCollection{Wenzel-Paulson:2006,
+  author = 	 {Markus Wenzel and Lawrence C. Paulson},
+  title = 	 {{Isabelle/Isar}},
+  booktitle = 	 {The Seventeen Provers of the World},
+  year =	 2006,
+  editor =	 {F. Wiedijk},
+  series =	 {LNAI 3600}
+}
+
 @InCollection{Wenzel:2006:Festschrift,
   author = 	 {Makarius Wenzel},
   title = 	 {{Isabelle/Isar} --- a generic framework for human-readable proof documents},
--- a/doc-src/more_antiquote.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc-src/more_antiquote.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,4 @@
 (*  Title:      Doc/more_antiquote.ML
-    ID:         $Id$
     Author:     Florian Haftmann, TU Muenchen
 
 More antiquotations.
@@ -92,9 +91,9 @@
   let
     val thy = ProofContext.theory_of ctxt;
     val const = Code_Unit.check_const thy raw_const;
-    val (_, funcgr) = Code_Funcgr.make thy [const];
+    val (_, funcgr) = Code_Wellsorted.make thy [const];
     fun holize thm = @{thm meta_eq_to_obj_eq} OF [thm];
-    val thms = Code_Funcgr.eqns funcgr const
+    val thms = Code_Wellsorted.eqns funcgr const
       |> map_filter (fn (thm, linear) => if linear then SOME thm else NONE)
       |> map (holize o no_vars ctxt o AxClass.overload thy);
   in ThyOutput.output_list pretty_thm src ctxt thms end;
--- a/doc/Contents	Wed Mar 04 10:43:39 2009 +0100
+++ b/doc/Contents	Wed Mar 04 10:45:52 2009 +0100
@@ -6,13 +6,16 @@
   functions       Tutorial on Function Definitions
   codegen         Tutorial on Code Generation
   sugar           LaTeX sugar for proof documents
-  ind-defs        (Co)Inductive Definitions in ZF
 
 Reference Manuals
   isar-ref        The Isabelle/Isar Reference Manual
   implementation  The Isabelle/Isar Implementation Manual
   system          The Isabelle System Manual
-  ref             The Isabelle Reference Manual
+
+Old Manuals (outdated!)
+  intro           Old Introduction to Isabelle
+  ref             Old Isabelle Reference Manual
   logics          Isabelle's Logics: overview and misc logics
   logics-HOL      Isabelle's Logics: HOL
   logics-ZF       Isabelle's Logics: FOL and ZF
+  ind-defs        (Co)Inductive Definitions in ZF
--- a/etc/settings	Wed Mar 04 10:43:39 2009 +0100
+++ b/etc/settings	Wed Mar 04 10:45:52 2009 +0100
@@ -60,12 +60,6 @@
 #ML_OPTIONS=""
 #ML_PLATFORM=""
 
-# Alice 1.4 (experimental!)
-#ML_SYSTEM=alice
-#ML_HOME="/usr/local/alice/bin"
-#ML_OPTIONS=""
-#ML_PLATFORM=""
-
 
 ###
 ### JVM components (Scala or Java)
@@ -268,6 +262,8 @@
 
 # zChaff (SAT Solver, cf. Isabelle/src/HOL/Tools/sat_solver.ML)
 #ZCHAFF_HOME=/usr/local/bin
+#ZCHAFF_VERSION=2004.5.13
+#ZCHAFF_VERSION=2004.11.15
 
 # BerkMin561 (SAT Solver, cf. Isabelle/src/HOL/Tools/sat_solver.ML)
 #BERKMIN_HOME=/usr/local/bin
--- a/lib/Tools/codegen	Wed Mar 04 10:43:39 2009 +0100
+++ b/lib/Tools/codegen	Wed Mar 04 10:45:52 2009 +0100
@@ -36,5 +36,5 @@
 THY=$(echo $THY | sed -e 's/\\/\\\\"/g; s/"/\\\"/g')
 ISAR="theory Codegen imports \"$THY\" begin export_code $CMD end"
 
-echo "$ISAR" | "$ISABELLE_TOOL" tty -l "$IMAGE"
+echo "$ISAR" | "$ISABELLE_PROCESS" -I "$IMAGE"
 exit ${PIPESTATUS[1]}
--- a/src/FOL/IFOL.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/FOL/IFOL.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,4 @@
 (*  Title:      FOL/IFOL.thy
-    ID:         $Id$
     Author:     Lawrence C Paulson and Markus Wenzel
 *)
 
@@ -14,9 +13,10 @@
   "~~/src/Tools/IsaPlanner/isand.ML"
   "~~/src/Tools/IsaPlanner/rw_tools.ML"
   "~~/src/Tools/IsaPlanner/rw_inst.ML"
-  "~~/src/Provers/eqsubst.ML"
+  "~~/src/Tools/eqsubst.ML"
   "~~/src/Provers/quantifier1.ML"
-  "~~/src/Provers/project_rule.ML"
+  "~~/src/Tools/intuitionistic.ML"
+  "~~/src/Tools/project_rule.ML"
   "~~/src/Tools/atomize_elim.ML"
   ("fologic.ML")
   ("hypsubstdata.ML")
@@ -610,6 +610,8 @@
 
 subsection {* Intuitionistic Reasoning *}
 
+setup {* Intuitionistic.method_setup "iprover" *}
+
 lemma impE':
   assumes 1: "P --> Q"
     and 2: "Q ==> R"
--- a/src/FOL/IsaMakefile	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/FOL/IsaMakefile	Wed Mar 04 10:45:52 2009 +0100
@@ -32,12 +32,13 @@
   $(SRC)/Provers/clasimp.ML $(SRC)/Provers/classical.ML			\
   $(SRC)/Tools/IsaPlanner/zipper.ML $(SRC)/Tools/IsaPlanner/isand.ML	\
   $(SRC)/Tools/IsaPlanner/rw_tools.ML					\
-  $(SRC)/Tools/IsaPlanner/rw_inst.ML $(SRC)/Provers/eqsubst.ML		\
+  $(SRC)/Tools/IsaPlanner/rw_inst.ML $(SRC)/Tools/eqsubst.ML		\
   $(SRC)/Provers/hypsubst.ML $(SRC)/Tools/induct.ML			\
-  $(SRC)/Tools/atomize_elim.ML $(SRC)/Provers/project_rule.ML		\
-  $(SRC)/Provers/quantifier1.ML $(SRC)/Provers/splitter.ML FOL.thy	\
-  IFOL.thy ROOT.ML blastdata.ML cladata.ML document/root.tex		\
-  fologic.ML hypsubstdata.ML intprover.ML simpdata.ML
+  $(SRC)/Tools/intuitionistic.ML $(SRC)/Tools/atomize_elim.ML		\
+  $(SRC)/Tools/project_rule.ML $(SRC)/Provers/quantifier1.ML		\
+  $(SRC)/Provers/splitter.ML FOL.thy IFOL.thy ROOT.ML blastdata.ML	\
+  cladata.ML document/root.tex fologic.ML hypsubstdata.ML intprover.ML	\
+  simpdata.ML
 	@$(ISABELLE_TOOL) usedir -p 2 -b $(OUT)/Pure FOL
 
 
@@ -46,12 +47,12 @@
 FOL-ex: FOL $(LOG)/FOL-ex.gz
 
 $(LOG)/FOL-ex.gz: $(OUT)/FOL ex/First_Order_Logic.thy ex/If.thy		\
-  ex/IffOracle.thy ex/Nat.thy ex/Natural_Numbers.thy	\
-  ex/LocaleTest.thy    \
-  ex/Miniscope.thy ex/Prolog.thy ex/ROOT.ML ex/Classical.thy		\
-  ex/document/root.tex ex/Foundation.thy ex/Intuitionistic.thy		\
-  ex/Intro.thy ex/Propositional_Int.thy ex/Propositional_Cla.thy	\
-  ex/Quantifiers_Int.thy ex/Quantifiers_Cla.thy
+  ex/Iff_Oracle.thy ex/Nat.thy ex/Nat_Class.thy ex/Natural_Numbers.thy	\
+  ex/LocaleTest.thy ex/Miniscope.thy ex/Prolog.thy ex/ROOT.ML		\
+  ex/Classical.thy ex/document/root.tex ex/Foundation.thy		\
+  ex/Intuitionistic.thy ex/Intro.thy ex/Propositional_Int.thy		\
+  ex/Propositional_Cla.thy ex/Quantifiers_Int.thy			\
+  ex/Quantifiers_Cla.thy
 	@$(ISABELLE_TOOL) usedir $(OUT)/FOL ex
 
 
--- a/src/FOL/ex/ROOT.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/FOL/ex/ROOT.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,7 +1,4 @@
 (*  Title:      FOL/ex/ROOT.ML
-    ID:         $Id$
-    Author:     Lawrence C Paulson, Cambridge University Computer Laboratory
-    Copyright   1992  University of Cambridge
 
 Examples for First-Order Logic. 
 *)
@@ -11,23 +8,19 @@
   "Natural_Numbers",
   "Intro",
   "Nat",
+  "Nat_Class",
   "Foundation",
   "Prolog",
-
   "Intuitionistic",
   "Propositional_Int",
   "Quantifiers_Int",
-
   "Classical",
   "Propositional_Cla",
   "Quantifiers_Cla",
   "Miniscope",
   "If",
-
-  "NatClass",
-  "IffOracle"
+  "Iff_Oracle"
 ];
 
 (*regression test for locales -- sets several global flags!*)
 no_document use_thy "LocaleTest";
-
--- a/src/FOLP/simp.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/FOLP/simp.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -433,7 +433,7 @@
         val thms = map (trivial o cterm_of(Thm.theory_of_thm thm)) As;
         val new_rws = List.concat(map mk_rew_rules thms);
         val rwrls = map mk_trans (List.concat(map mk_rew_rules thms));
-        val anet' = foldr lhs_insert_thm anet rwrls
+        val anet' = List.foldr lhs_insert_thm anet rwrls
     in  if !tracing andalso not(null new_rws)
         then (writeln"Adding rewrites:";  Display.prths new_rws;  ())
         else ();
--- a/src/HOL/Algebra/Coset.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Algebra/Coset.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -602,8 +602,8 @@
   interpret group G by fact
   show ?thesis
   proof (intro equiv.intro)
-    show "refl (carrier G) (rcong H)"
-      by (auto simp add: r_congruent_def refl_def) 
+    show "refl_on (carrier G) (rcong H)"
+      by (auto simp add: r_congruent_def refl_on_def) 
   next
     show "sym (rcong H)"
     proof (simp add: r_congruent_def sym_def, clarify)
--- a/src/HOL/Algebra/Exponent.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Algebra/Exponent.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -210,12 +210,12 @@
 
 lemma p_fac_forw: "[| (m::nat) > 0; k>0; k < p^a; (p^r) dvd (p^a)* m - k |]  
   ==> (p^r) dvd (p^a) - k"
-apply (frule_tac k1 = k and i = p in p_fac_forw_lemma [THEN le_imp_power_dvd], auto)
+apply (frule p_fac_forw_lemma [THEN le_imp_power_dvd, of _ k p], auto)
 apply (subgoal_tac "p^r dvd p^a*m")
  prefer 2 apply (blast intro: dvd_mult2)
 apply (drule dvd_diffD1)
   apply assumption
- prefer 2 apply (blast intro: dvd_diff)
+ prefer 2 apply (blast intro: nat_dvd_diff)
 apply (drule gr0_implies_Suc, auto)
 done
 
@@ -226,12 +226,12 @@
 
 lemma p_fac_backw: "[| m>0; k>0; (p::nat)\<noteq>0;  k < p^a;  (p^r) dvd p^a - k |]  
   ==> (p^r) dvd (p^a)*m - k"
-apply (frule_tac k1 = k and i = p in r_le_a_forw [THEN le_imp_power_dvd], auto)
+apply (frule_tac k1 = k and p1 = p in r_le_a_forw [THEN le_imp_power_dvd], auto)
 apply (subgoal_tac "p^r dvd p^a*m")
  prefer 2 apply (blast intro: dvd_mult2)
 apply (drule dvd_diffD1)
   apply assumption
- prefer 2 apply (blast intro: dvd_diff)
+ prefer 2 apply (blast intro: nat_dvd_diff)
 apply (drule less_imp_Suc_add, auto)
 done
 
--- a/src/HOL/Algebra/Sylow.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Algebra/Sylow.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -20,8 +20,8 @@
       and "RelM == {(N1,N2). N1 \<in> calM & N2 \<in> calM &
                              (\<exists>g \<in> carrier(G). N1 = (N2 #> g) )}"
 
-lemma (in sylow) RelM_refl: "refl calM RelM"
-apply (auto simp add: refl_def RelM_def calM_def)
+lemma (in sylow) RelM_refl_on: "refl_on calM RelM"
+apply (auto simp add: refl_on_def RelM_def calM_def)
 apply (blast intro!: coset_mult_one [symmetric])
 done
 
@@ -40,7 +40,7 @@
 
 lemma (in sylow) RelM_equiv: "equiv calM RelM"
 apply (unfold equiv_def)
-apply (blast intro: RelM_refl RelM_sym RelM_trans)
+apply (blast intro: RelM_refl_on RelM_sym RelM_trans)
 done
 
 lemma (in sylow) M_subset_calM_prep: "M' \<in> calM // RelM  ==> M' \<subseteq> calM"
--- a/src/HOL/Algebra/poly/UnivPoly2.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Algebra/poly/UnivPoly2.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,6 +1,5 @@
 (*
   Title:     Univariate Polynomials
-  Id:        $Id$
   Author:    Clemens Ballarin, started 9 December 1996
   Copyright: Clemens Ballarin
 *)
@@ -388,7 +387,7 @@
   proof (cases k)
     case 0 then show ?thesis by simp ring
   next
-    case Suc then show ?thesis by (simp add: algebra_simps) ring
+    case Suc then show ?thesis by simp (ring, simp)
   qed
   then show "coeff (monom a 0 * p) k = coeff (a *s p) k" by ring
 qed
--- a/src/HOL/Arith_Tools.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Arith_Tools.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -68,8 +68,9 @@
 apply (subst add_eq_if)
 apply (simp split add: nat.split
             del: nat_numeral_1_eq_1
-            add: numeral_1_eq_Suc_0 [symmetric] Let_def
-                 neg_imp_number_of_eq_0 neg_number_of_pred_iff_0)
+            add: nat_numeral_1_eq_1 [symmetric]
+                 numeral_1_eq_Suc_0 [symmetric]
+                 neg_number_of_pred_iff_0)
 done
 
 lemma nat_rec_number_of [simp]:
@@ -89,7 +90,8 @@
 apply (subst add_eq_if)
 apply (simp split add: nat.split
             del: nat_numeral_1_eq_1
-            add: numeral_1_eq_Suc_0 [symmetric] Let_def neg_imp_number_of_eq_0
+            add: nat_numeral_1_eq_1 [symmetric]
+                 numeral_1_eq_Suc_0 [symmetric]
                  neg_number_of_pred_iff_0)
 done
 
--- a/src/HOL/Complex_Main.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Complex_Main.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -9,7 +9,6 @@
   Ln
   Taylor
   Integration
-  FrechetDeriv
 begin
 
 end
--- a/src/HOL/Decision_Procs/Approximation.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/Approximation.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,7 +1,9 @@
-(* Title:     HOL/Reflection/Approximation.thy
- * Author:    Johannes Hölzl <hoelzl@in.tum.de> 2008 / 2009
- *)
+(*  Title:      HOL/Reflection/Approximation.thy
+    Author:     Johannes Hoelzl <hoelzl@in.tum.de> 2008 / 2009
+*)
+
 header {* Prove unequations about real numbers by computation *}
+
 theory Approximation
 imports Complex_Main Float Reflection Dense_Linear_Order Efficient_Nat
 begin
--- a/src/HOL/Decision_Procs/Cooper.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/Cooper.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -620,7 +620,7 @@
   {assume "i=0" hence ?case using "12.hyps" by (simp add: dvd_def Let_def)}
   moreover 
   {assume i1: "abs i = 1"
-      from zdvd_1_left[where m = "Inum bs a"] uminus_dvd_conv[where d="1" and t="Inum bs a"]
+      from one_dvd[of "Inum bs a"] uminus_dvd_conv[where d="1" and t="Inum bs a"]
       have ?case using i1 apply (cases "i=0", simp_all add: Let_def) 
 	by (cases "i > 0", simp_all)}
   moreover   
@@ -640,7 +640,7 @@
   {assume "i=0" hence ?case using "13.hyps" by (simp add: dvd_def Let_def)}
   moreover 
   {assume i1: "abs i = 1"
-      from zdvd_1_left[where m = "Inum bs a"] uminus_dvd_conv[where d="1" and t="Inum bs a"]
+      from one_dvd[of "Inum bs a"] uminus_dvd_conv[where d="1" and t="Inum bs a"]
       have ?case using i1 apply (cases "i=0", simp_all add: Let_def)
       apply (cases "i > 0", simp_all) done}
   moreover   
@@ -990,7 +990,7 @@
   have "j=0 \<or> (j\<noteq>0 \<and> ?c = 0) \<or> (j\<noteq>0 \<and> ?c >0) \<or> (j\<noteq> 0 \<and> ?c<0)" by arith
   moreover
   {assume "j=0" hence z: "zlfm (Dvd j a) = (zlfm (Eq a))" by (simp add: Let_def) 
-    hence ?case using prems by (simp del: zlfm.simps add: zdvd_0_left)}
+    hence ?case using prems by (simp del: zlfm.simps)}
   moreover
   {assume "?c=0" and "j\<noteq>0" hence ?case 
       using zsplit0_I[OF spl, where x="i" and bs="bs"]
@@ -1005,7 +1005,7 @@
   moreover
   {assume cn: "?c < 0" and jnz: "j\<noteq>0" hence l: "?L (?l (Dvd j a))" 
       by (simp add: nb Let_def split_def)
-    hence ?case using Ia cn jnz zdvd_zminus_iff[where m="abs j" and n="?c*i + ?N ?r" ]
+    hence ?case using Ia cn jnz dvd_minus_iff[of "abs j" "?c*i + ?N ?r" ]
       by (simp add: Let_def split_def) }
   ultimately show ?case by blast
 next
@@ -1019,7 +1019,7 @@
   have "j=0 \<or> (j\<noteq>0 \<and> ?c = 0) \<or> (j\<noteq>0 \<and> ?c >0) \<or> (j\<noteq> 0 \<and> ?c<0)" by arith
   moreover
   {assume "j=0" hence z: "zlfm (NDvd j a) = (zlfm (NEq a))" by (simp add: Let_def) 
-    hence ?case using prems by (simp del: zlfm.simps add: zdvd_0_left)}
+    hence ?case using prems by (simp del: zlfm.simps)}
   moreover
   {assume "?c=0" and "j\<noteq>0" hence ?case 
       using zsplit0_I[OF spl, where x="i" and bs="bs"]
@@ -1034,7 +1034,7 @@
   moreover
   {assume cn: "?c < 0" and jnz: "j\<noteq>0" hence l: "?L (?l (Dvd j a))" 
       by (simp add: nb Let_def split_def)
-    hence ?case using Ia cn jnz zdvd_zminus_iff[where m="abs j" and n="?c*i + ?N ?r" ]
+    hence ?case using Ia cn jnz dvd_minus_iff[of "abs j" "?c*i + ?N ?r"]
       by (simp add: Let_def split_def)}
   ultimately show ?case by blast
 qed auto
@@ -1092,10 +1092,10 @@
   using lin ad d
 proof(induct p rule: iszlfm.induct)
   case (9 i c e)  thus ?case using d
-    by (simp add: zdvd_trans[where m="i" and n="d" and k="d'"])
+    by (simp add: dvd_trans[of "i" "d" "d'"])
 next
   case (10 i c e) thus ?case using d
-    by (simp add: zdvd_trans[where m="i" and n="d" and k="d'"])
+    by (simp add: dvd_trans[of "i" "d" "d'"])
 qed simp_all
 
 lemma \<delta> : assumes lin:"iszlfm p"
@@ -1354,7 +1354,7 @@
   case (9 j c e) hence nb: "numbound0 e" by simp
   have "Ifm bbs (x#bs) (mirror (Dvd j (CN 0 c e))) = (j dvd c*x - Inum (x#bs) e)" (is "_ = (j dvd c*x - ?e)") by simp
     also have "\<dots> = (j dvd (- (c*x - ?e)))"
-    by (simp only: zdvd_zminus_iff)
+    by (simp only: dvd_minus_iff)
   also have "\<dots> = (j dvd (c* (- x)) + ?e)"
     apply (simp only: minus_mult_right[symmetric] minus_mult_left[symmetric] diff_def zadd_ac zminus_zadd_distrib)
     by (simp add: algebra_simps)
@@ -1366,7 +1366,7 @@
     case (10 j c e) hence nb: "numbound0 e" by simp
   have "Ifm bbs (x#bs) (mirror (Dvd j (CN 0 c e))) = (j dvd c*x - Inum (x#bs) e)" (is "_ = (j dvd c*x - ?e)") by simp
     also have "\<dots> = (j dvd (- (c*x - ?e)))"
-    by (simp only: zdvd_zminus_iff)
+    by (simp only: dvd_minus_iff)
   also have "\<dots> = (j dvd (c* (- x)) + ?e)"
     apply (simp only: minus_mult_right[symmetric] minus_mult_left[symmetric] diff_def zadd_ac zminus_zadd_distrib)
     by (simp add: algebra_simps)
@@ -1392,7 +1392,7 @@
   and dr: "d\<beta> p l"
   and d: "l dvd l'"
   shows "d\<beta> p l'"
-using dr linp zdvd_trans[where n="l" and k="l'", simplified d]
+using dr linp dvd_trans[of _ "l" "l'", simplified d]
 by (induct p rule: iszlfm.induct) simp_all
 
 lemma \<alpha>_l: assumes lp: "iszlfm p"
@@ -1431,7 +1431,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(l*x + (l div c) * Inum (x # bs) e < 0) =
@@ -1449,7 +1449,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(l*x + (l div c) * Inum (x# bs) e \<le> 0) =
@@ -1467,7 +1467,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(l*x + (l div c)* Inum (x # bs) e > 0) =
@@ -1485,7 +1485,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(l*x + (l div c)* Inum (x # bs) e \<ge> 0) =
@@ -1505,7 +1505,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(l * x + (l div c) * Inum (x # bs) e = 0) =
@@ -1523,7 +1523,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(l * x + (l div c) * Inum (x # bs) e \<noteq> 0) =
@@ -1541,7 +1541,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(\<exists> (k::int). l * x + (l div c) * Inum (x # bs) e = ((l div c) * j) * k) = (\<exists> (k::int). (c * (l div c)) * x + (l div c) * Inum (x # bs) e = ((l div c) * j) * k)"  by simp
@@ -1558,7 +1558,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(\<exists> (k::int). l * x + (l div c) * Inum (x # bs) e = ((l div c) * j) * k) = (\<exists> (k::int). (c * (l div c)) * x + (l div c) * Inum (x # bs) e = ((l div c) * j) * k)"  by simp
--- a/src/HOL/Decision_Procs/Ferrack.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/Ferrack.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -501,9 +501,9 @@
   assumes gdg: "g dvd g'" and dgt':"dvdnumcoeff t g'"
   shows "dvdnumcoeff t g"
   using dgt' gdg 
-  by (induct t rule: dvdnumcoeff.induct, simp_all add: gdg zdvd_trans[OF gdg])
+  by (induct t rule: dvdnumcoeff.induct, simp_all add: gdg dvd_trans[OF gdg])
 
-declare zdvd_trans [trans add]
+declare dvd_trans [trans add]
 
 lemma natabs0: "(nat (abs x) = 0) = (x = 0)"
 by arith
--- a/src/HOL/Decision_Procs/MIR.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/MIR.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -83,7 +83,7 @@
   have "real (floor x) \<le> x" by simp 
   hence "real (floor x) < real (n + 1) " using ub by arith
   hence "floor x < n+1" by simp
-  moreover from lb have "n \<le> floor x" using floor_mono2[where x="real n" and y="x"] 
+  moreover from lb have "n \<le> floor x" using floor_mono[where x="real n" and y="x"] 
     by simp ultimately show "floor x = n" by simp
 qed
 
@@ -132,13 +132,13 @@
   assume d: "real d rdvd t"
   from d int_rdvd_real have d2: "d dvd (floor t)" and ti: "real (floor t) = t" by auto
 
-  from iffD2[OF zdvd_abs1] d2 have "(abs d) dvd (floor t)" by blast
+  from iffD2[OF abs_dvd_iff] d2 have "(abs d) dvd (floor t)" by blast
   with ti int_rdvd_real[symmetric] have "real (abs d) rdvd t" by blast 
   thus "abs (real d) rdvd t" by simp
 next
   assume "abs (real d) rdvd t" hence "real (abs d) rdvd t" by simp
   with int_rdvd_real[where i="abs d" and x="t"] have d2: "abs d dvd floor t" and ti: "real (floor t) =t" by auto
-  from iffD1[OF zdvd_abs1] d2 have "d dvd floor t" by blast
+  from iffD1[OF abs_dvd_iff] d2 have "d dvd floor t" by blast
   with ti int_rdvd_real[symmetric] show "real d rdvd t" by blast
 qed
 
@@ -675,9 +675,9 @@
   assumes gdg: "g dvd g'" and dgt':"dvdnumcoeff t g'"
   shows "dvdnumcoeff t g"
   using dgt' gdg 
-  by (induct t rule: dvdnumcoeff.induct, simp_all add: gdg zdvd_trans[OF gdg])
-
-declare zdvd_trans [trans add]
+  by (induct t rule: dvdnumcoeff.induct, simp_all add: gdg dvd_trans[OF gdg])
+
+declare dvd_trans [trans add]
 
 lemma natabs0: "(nat (abs x) = 0) = (x = 0)"
 by arith
@@ -1775,11 +1775,11 @@
   "(real (a::int) \<le> b) = (a \<le> floor b \<or> (a = floor b \<and> real (floor b) < b))"
 proof( auto)
   assume alb: "real a \<le> b" and agb: "\<not> a \<le> floor b"
-  from alb have "floor (real a) \<le> floor b " by (simp only: floor_mono2) 
+  from alb have "floor (real a) \<le> floor b " by (simp only: floor_mono) 
   hence "a \<le> floor b" by simp with agb show "False" by simp
 next
   assume alb: "a \<le> floor b"
-  hence "real a \<le> real (floor b)" by (simp only: floor_mono2)
+  hence "real a \<le> real (floor b)" by (simp only: floor_mono)
   also have "\<dots>\<le> b" by simp  finally show  "real a \<le> b" . 
 qed
 
@@ -2114,10 +2114,10 @@
   using lin ad d
 proof(induct p rule: iszlfm.induct)
   case (9 i c e)  thus ?case using d
-    by (simp add: zdvd_trans[where m="i" and n="d" and k="d'"])
+    by (simp add: dvd_trans[of "i" "d" "d'"])
 next
   case (10 i c e) thus ?case using d
-    by (simp add: zdvd_trans[where m="i" and n="d" and k="d'"])
+    by (simp add: dvd_trans[of "i" "d" "d'"])
 qed simp_all
 
 lemma \<delta> : assumes lin:"iszlfm p bs"
@@ -2496,7 +2496,7 @@
   and dr: "d\<beta> p l"
   and d: "l dvd l'"
   shows "d\<beta> p l'"
-using dr linp zdvd_trans[where n="l" and k="l'", simplified d]
+using dr linp dvd_trans[of _ "l" "l'", simplified d]
 by (induct p rule: iszlfm.induct) simp_all
 
 lemma \<alpha>_l: assumes lp: "iszlfm p (a#bs)"
@@ -2535,7 +2535,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(real l * real x + real (l div c) * Inum (real x # bs) e < (0\<Colon>real)) =
@@ -2553,7 +2553,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(real l * real x + real (l div c) * Inum (real x # bs) e \<le> (0\<Colon>real)) =
@@ -2571,7 +2571,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(real l * real x + real (l div c) * Inum (real x # bs) e > (0\<Colon>real)) =
@@ -2589,7 +2589,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(real l * real x + real (l div c) * Inum (real x # bs) e \<ge> (0\<Colon>real)) =
@@ -2607,7 +2607,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(real l * real x + real (l div c) * Inum (real x # bs) e = (0\<Colon>real)) =
@@ -2625,7 +2625,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(real l * real x + real (l div c) * Inum (real x # bs) e \<noteq> (0\<Colon>real)) =
@@ -2643,7 +2643,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(\<exists> (k::int). real l * real x + real (l div c) * Inum (real x # bs) e = (real (l div c) * real j) * real k) = (\<exists> (k::int). real (c * (l div c)) * real x + real (l div c) * Inum (real x # bs) e = (real (l div c) * real j) * real k)"  by simp
@@ -2660,7 +2660,7 @@
       by (simp add: zdiv_mono1[OF clel cp])
     then have ldcp:"0 < l div c" 
       by (simp add: zdiv_self[OF cnz])
-    have "c * (l div c) = c* (l div c) + l mod c" using d' zdvd_iff_zmod_eq_0[where m="c" and n="l"] by simp
+    have "c * (l div c) = c* (l div c) + l mod c" using d' dvd_eq_mod_eq_0[of "c" "l"] by simp
     hence cl:"c * (l div c) =l" using zmod_zdiv_equality[where a="l" and b="c", symmetric] 
       by simp
     hence "(\<exists> (k::int). real l * real x + real (l div c) * Inum (real x # bs) e = (real (l div c) * real j) * real k) = (\<exists> (k::int). real (c * (l div c)) * real x + real (l div c) * Inum (real x # bs) e = (real (l div c) * real j) * real k)"  by simp
@@ -3697,7 +3697,7 @@
   assumes xb: "real m \<le> x \<and> x < real ((n::int) + 1)"
   shows "\<exists> j\<in> {m.. n}. real j \<le> x \<and> x < real (j+1)" (is "\<exists> j\<in> ?N. ?P j")
 by (rule bexI[where P="?P" and x="floor x" and A="?N"]) 
-(auto simp add: floor_less_eq[where x="x" and a="n+1", simplified] xb[simplified] floor_mono2[where x="real m" and y="x", OF conjunct1[OF xb], simplified floor_real_of_int[where n="m"]])
+(auto simp add: floor_less_eq[where x="x" and a="n+1", simplified] xb[simplified] floor_mono[where x="real m" and y="x", OF conjunct1[OF xb], simplified floor_real_of_int[where n="m"]])
 
 lemma rsplit0_complete:
   assumes xp:"0 \<le> x" and x1:"x < 1"
@@ -5926,7 +5926,7 @@
 apply mir
 done
 
-lemma "ALL x y. \<lfloor>x\<rfloor> = \<lfloor>y\<rfloor> \<longrightarrow> 0 \<le> abs (y - x) \<and> abs (y - x) \<le> 1"
+lemma "ALL (x::real) (y::real). \<lfloor>x\<rfloor> = \<lfloor>y\<rfloor> \<longrightarrow> 0 \<le> abs (y - x) \<and> abs (y - x) \<le> 1"
 apply mir
 done
 
--- a/src/HOL/Decision_Procs/cooper_tac.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/cooper_tac.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -27,12 +27,9 @@
 val Suc_plus1 = @{thm Suc_plus1};
 val imp_le_cong = @{thm imp_le_cong};
 val conj_le_cong = @{thm conj_le_cong};
-val nat_mod_add_eq = @{thm mod_add1_eq} RS sym;
-val nat_mod_add_left_eq = @{thm mod_add_left_eq} RS sym;
-val nat_mod_add_right_eq = @{thm mod_add_right_eq} RS sym;
-val int_mod_add_eq = @{thm mod_add_eq} RS sym;
-val int_mod_add_left_eq = @{thm zmod_zadd_left_eq} RS sym;
-val int_mod_add_right_eq = @{thm zmod_zadd_right_eq} RS sym;
+val mod_add_left_eq = @{thm mod_add_left_eq} RS sym;
+val mod_add_right_eq = @{thm mod_add_right_eq} RS sym;
+val mod_add_eq = @{thm mod_add_eq} RS sym;
 val nat_div_add_eq = @{thm div_add1_eq} RS sym;
 val int_div_add_eq = @{thm zdiv_zadd1_eq} RS sym;
 
@@ -70,14 +67,13 @@
     val (t,np,nh) = prepare_for_linz q g
     (* Some simpsets for dealing with mod div abs and nat*)
     val mod_div_simpset = HOL_basic_ss 
-			addsimps [refl,nat_mod_add_eq, nat_mod_add_left_eq, 
-				  nat_mod_add_right_eq, int_mod_add_eq, 
-				  int_mod_add_right_eq, int_mod_add_left_eq,
+			addsimps [refl,mod_add_eq, mod_add_left_eq, 
+				  mod_add_right_eq,
 				  nat_div_add_eq, int_div_add_eq,
 				  @{thm mod_self}, @{thm "zmod_self"},
 				  @{thm mod_by_0}, @{thm div_by_0},
 				  @{thm "zdiv_zero"}, @{thm "zmod_zero"}, @{thm "div_0"}, @{thm "mod_0"},
-				  @{thm "zdiv_1"}, @{thm "zmod_1"}, @{thm "div_1"}, @{thm "mod_1"},
+				  @{thm "div_by_1"}, @{thm "mod_by_1"}, @{thm "div_1"}, @{thm "mod_1"},
 				  Suc_plus1]
 			addsimps @{thms add_ac}
 			addsimprocs [cancel_div_mod_proc]
--- a/src/HOL/Decision_Procs/ferrack_tac.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/ferrack_tac.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -31,12 +31,8 @@
 val Suc_plus1 = @{thm Suc_plus1};
 val imp_le_cong = @{thm imp_le_cong};
 val conj_le_cong = @{thm conj_le_cong};
-val nat_mod_add_eq = @{thm mod_add1_eq} RS sym;
-val nat_mod_add_left_eq = @{thm mod_add_left_eq} RS sym;
-val nat_mod_add_right_eq = @{thm mod_add_right_eq} RS sym;
-val int_mod_add_eq = @{thm mod_add_eq} RS sym;
-val int_mod_add_left_eq = @{thm zmod_zadd_left_eq} RS sym;
-val int_mod_add_right_eq = @{thm zmod_zadd_right_eq} RS sym;
+val mod_add_left_eq = @{thm mod_add_left_eq} RS sym;
+val mod_add_right_eq = @{thm mod_add_right_eq} RS sym;
 val nat_div_add_eq = @{thm div_add1_eq} RS sym;
 val int_div_add_eq = @{thm zdiv_zadd1_eq} RS sym;
 val ZDIVISION_BY_ZERO_MOD = @{thm DIVISION_BY_ZERO} RS conjunct2;
--- a/src/HOL/Decision_Procs/mir_tac.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Decision_Procs/mir_tac.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -46,12 +46,9 @@
 val Suc_plus1 = @{thm "Suc_plus1"};
 val imp_le_cong = @{thm "imp_le_cong"};
 val conj_le_cong = @{thm "conj_le_cong"};
-val nat_mod_add_eq = @{thm "mod_add1_eq"} RS sym;
-val nat_mod_add_left_eq = @{thm "mod_add_left_eq"} RS sym;
-val nat_mod_add_right_eq = @{thm "mod_add_right_eq"} RS sym;
-val int_mod_add_eq = @{thm "mod_add_eq"} RS sym;
-val int_mod_add_left_eq = @{thm "zmod_zadd_left_eq"} RS sym;
-val int_mod_add_right_eq = @{thm "zmod_zadd_right_eq"} RS sym;
+val mod_add_eq = @{thm "mod_add_eq"} RS sym;
+val mod_add_left_eq = @{thm "mod_add_left_eq"} RS sym;
+val mod_add_right_eq = @{thm "mod_add_right_eq"} RS sym;
 val nat_div_add_eq = @{thm "div_add1_eq"} RS sym;
 val int_div_add_eq = @{thm "zdiv_zadd1_eq"} RS sym;
 val ZDIVISION_BY_ZERO_MOD = @{thm "DIVISION_BY_ZERO"} RS conjunct2;
@@ -96,10 +93,10 @@
     val (t,np,nh) = prepare_for_mir thy q g
     (* Some simpsets for dealing with mod div abs and nat*)
     val mod_div_simpset = HOL_basic_ss 
-                        addsimps [refl,nat_mod_add_eq, 
+                        addsimps [refl, mod_add_eq, 
                                   @{thm "mod_self"}, @{thm "zmod_self"},
                                   @{thm "zdiv_zero"},@{thm "zmod_zero"},@{thm "div_0"}, @{thm "mod_0"},
-                                  @{thm "zdiv_1"}, @{thm "zmod_1"}, @{thm "div_1"}, @{thm "mod_1"},
+                                  @{thm "div_by_1"}, @{thm "mod_by_1"}, @{thm "div_1"}, @{thm "mod_1"},
                                   @{thm "Suc_plus1"}]
                         addsimps @{thms add_ac}
                         addsimprocs [cancel_div_mod_proc]
--- a/src/HOL/Deriv.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Deriv.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -9,7 +9,7 @@
 header{* Differentiation *}
 
 theory Deriv
-imports Lim Polynomial
+imports Lim
 begin
 
 text{*Standard Definitions*}
@@ -217,9 +217,7 @@
 by (cases "n", simp, simp add: DERIV_power_Suc f)
 
 
-(* ------------------------------------------------------------------------ *)
-(* Caratheodory formulation of derivative at a point: standard proof        *)
-(* ------------------------------------------------------------------------ *)
+text {* Caratheodory formulation of derivative at a point *}
 
 lemma CARAT_DERIV:
      "(DERIV f x :> l) =
@@ -307,6 +305,9 @@
        ==> DERIV (%y. f(y) / (g y)) x :> (d*g(x) - (e*f(x))) / (g(x) ^ Suc (Suc 0))"
 by (drule (2) DERIV_divide) (simp add: mult_commute power_Suc)
 
+lemma lemma_DERIV_subst: "[| DERIV f x :> D; D = E |] ==> DERIV f x :> E"
+by auto
+
 
 subsection {* Differentiability predicate *}
 
@@ -655,6 +656,9 @@
 apply (blast intro: IVT2)
 done
 
+
+subsection {* Boundedness of continuous functions *}
+
 text{*By bisection, function continuous on closed interval is bounded above*}
 
 lemma isCont_bounded:
@@ -773,6 +777,8 @@
 done
 
 
+subsection {* Local extrema *}
+
 text{*If @{term "0 < f'(x)"} then @{term x} is Locally Strictly Increasing At The Right*}
 
 lemma DERIV_left_inc:
@@ -877,6 +883,9 @@
   shows "[| DERIV f x :> l; 0 < d; \<forall>y. \<bar>x-y\<bar> < d --> f(x) = f(y) |] ==> l = 0"
 by (auto dest!: DERIV_local_max)
 
+
+subsection {* Rolle's Theorem *}
+
 text{*Lemma about introducing open ball in open interval*}
 lemma lemma_interval_lt:
      "[| a < x;  x < b |]
@@ -1163,6 +1172,8 @@
 qed
 
 
+subsection {* Continuous injective functions *}
+
 text{*Dull lemma: an continuous injection on an interval must have a
 strict maximum at an end point, not in the middle.*}
 
@@ -1356,6 +1367,9 @@
     using neq by (rule LIM_inverse)
 qed
 
+
+subsection {* Generalized Mean Value Theorem *}
+
 theorem GMVT:
   fixes a b :: real
   assumes alb: "a < b"
@@ -1442,245 +1456,6 @@
   with g'cdef f'cdef cint show ?thesis by auto
 qed
 
-lemma lemma_DERIV_subst: "[| DERIV f x :> D; D = E |] ==> DERIV f x :> E"
-by auto
-
-
-subsection {* Derivatives of univariate polynomials *}
-
-definition
-  pderiv :: "'a::real_normed_field poly \<Rightarrow> 'a poly" where
-  "pderiv = poly_rec 0 (\<lambda>a p p'. p + pCons 0 p')"
-
-lemma pderiv_0 [simp]: "pderiv 0 = 0"
-  unfolding pderiv_def by (simp add: poly_rec_0)
-
-lemma pderiv_pCons: "pderiv (pCons a p) = p + pCons 0 (pderiv p)"
-  unfolding pderiv_def by (simp add: poly_rec_pCons)
-
-lemma coeff_pderiv: "coeff (pderiv p) n = of_nat (Suc n) * coeff p (Suc n)"
-  apply (induct p arbitrary: n, simp)
-  apply (simp add: pderiv_pCons coeff_pCons algebra_simps split: nat.split)
-  done
-
-lemma pderiv_eq_0_iff: "pderiv p = 0 \<longleftrightarrow> degree p = 0"
-  apply (rule iffI)
-  apply (cases p, simp)
-  apply (simp add: expand_poly_eq coeff_pderiv del: of_nat_Suc)
-  apply (simp add: expand_poly_eq coeff_pderiv coeff_eq_0)
-  done
-
-lemma degree_pderiv: "degree (pderiv p) = degree p - 1"
-  apply (rule order_antisym [OF degree_le])
-  apply (simp add: coeff_pderiv coeff_eq_0)
-  apply (cases "degree p", simp)
-  apply (rule le_degree)
-  apply (simp add: coeff_pderiv del: of_nat_Suc)
-  apply (rule subst, assumption)
-  apply (rule leading_coeff_neq_0, clarsimp)
-  done
-
-lemma pderiv_singleton [simp]: "pderiv [:a:] = 0"
-by (simp add: pderiv_pCons)
-
-lemma pderiv_add: "pderiv (p + q) = pderiv p + pderiv q"
-by (rule poly_ext, simp add: coeff_pderiv algebra_simps)
-
-lemma pderiv_minus: "pderiv (- p) = - pderiv p"
-by (rule poly_ext, simp add: coeff_pderiv)
-
-lemma pderiv_diff: "pderiv (p - q) = pderiv p - pderiv q"
-by (rule poly_ext, simp add: coeff_pderiv algebra_simps)
-
-lemma pderiv_smult: "pderiv (smult a p) = smult a (pderiv p)"
-by (rule poly_ext, simp add: coeff_pderiv algebra_simps)
-
-lemma pderiv_mult: "pderiv (p * q) = p * pderiv q + q * pderiv p"
-apply (induct p)
-apply simp
-apply (simp add: pderiv_add pderiv_smult pderiv_pCons algebra_simps)
-done
-
-lemma pderiv_power_Suc:
-  "pderiv (p ^ Suc n) = smult (of_nat (Suc n)) (p ^ n) * pderiv p"
-apply (induct n)
-apply simp
-apply (subst power_Suc)
-apply (subst pderiv_mult)
-apply (erule ssubst)
-apply (simp add: smult_add_left algebra_simps)
-done
-
-lemma DERIV_cmult2: "DERIV f x :> D ==> DERIV (%x. (f x) * c :: real) x :> D * c"
-by (simp add: DERIV_cmult mult_commute [of _ c])
-
-lemma DERIV_pow2: "DERIV (%x. x ^ Suc n) x :> real (Suc n) * (x ^ n)"
-by (rule lemma_DERIV_subst, rule DERIV_pow, simp)
-declare DERIV_pow2 [simp] DERIV_pow [simp]
-
-lemma DERIV_add_const: "DERIV f x :> D ==>  DERIV (%x. a + f x :: 'a::real_normed_field) x :> D"
-by (rule lemma_DERIV_subst, rule DERIV_add, auto)
-
-lemma poly_DERIV[simp]: "DERIV (%x. poly p x) x :> poly (pderiv p) x"
-apply (induct p)
-apply simp
-apply (simp add: pderiv_pCons)
-apply (rule lemma_DERIV_subst)
-apply (rule DERIV_add DERIV_mult DERIV_const DERIV_ident | assumption)+
-apply simp
-done
-
-text{* Consequences of the derivative theorem above*}
-
-lemma poly_differentiable[simp]: "(%x. poly p x) differentiable (x::real)"
-apply (simp add: differentiable_def)
-apply (blast intro: poly_DERIV)
-done
-
-lemma poly_isCont[simp]: "isCont (%x. poly p x) (x::real)"
-by (rule poly_DERIV [THEN DERIV_isCont])
-
-lemma poly_IVT_pos: "[| a < b; poly p (a::real) < 0; 0 < poly p b |]
-      ==> \<exists>x. a < x & x < b & (poly p x = 0)"
-apply (cut_tac f = "%x. poly p x" and a = a and b = b and y = 0 in IVT_objl)
-apply (auto simp add: order_le_less)
-done
-
-lemma poly_IVT_neg: "[| (a::real) < b; 0 < poly p a; poly p b < 0 |]
-      ==> \<exists>x. a < x & x < b & (poly p x = 0)"
-by (insert poly_IVT_pos [where p = "- p" ]) simp
-
-lemma poly_MVT: "(a::real) < b ==>
-     \<exists>x. a < x & x < b & (poly p b - poly p a = (b - a) * poly (pderiv p) x)"
-apply (drule_tac f = "poly p" in MVT, auto)
-apply (rule_tac x = z in exI)
-apply (auto simp add: real_mult_left_cancel poly_DERIV [THEN DERIV_unique])
-done
-
-text{*Lemmas for Derivatives*}
-
-(* FIXME
-lemma lemma_order_pderiv [rule_format]:
-     "\<forall>p q a. 0 < n &
-       poly (pderiv p) \<noteq> poly [] &
-       poly p = poly ([- a, 1] %^ n *** q) & ~ [- a, 1] divides q
-       --> n = Suc (order a (pderiv p))"
-apply (induct "n", safe)
-apply (rule order_unique_lemma, rule conjI, assumption)
-apply (subgoal_tac "\<forall>r. r divides (pderiv p) = r divides (pderiv ([-a, 1] %^ Suc n *** q))")
-apply (drule_tac [2] poly_pderiv_welldef)
- prefer 2 apply (simp add: divides_def del: pmult_Cons pexp_Suc) 
-apply (simp del: pmult_Cons pexp_Suc) 
-apply (rule conjI)
-apply (simp add: divides_def fun_eq del: pmult_Cons pexp_Suc)
-apply (rule_tac x = "[-a, 1] *** (pderiv q) +++ real (Suc n) %* q" in exI)
-apply (simp add: poly_pderiv_mult poly_pderiv_exp_prime poly_add poly_mult poly_cmult right_distrib mult_ac del: pmult_Cons pexp_Suc)
-apply (simp add: poly_mult right_distrib left_distrib mult_ac del: pmult_Cons)
-apply (erule_tac V = "\<forall>r. r divides pderiv p = r divides pderiv ([- a, 1] %^ Suc n *** q)" in thin_rl)
-apply (unfold divides_def)
-apply (simp (no_asm) add: poly_pderiv_mult poly_pderiv_exp_prime fun_eq poly_add poly_mult del: pmult_Cons pexp_Suc)
-apply (rule contrapos_np, assumption)
-apply (rotate_tac 3, erule contrapos_np)
-apply (simp del: pmult_Cons pexp_Suc, safe)
-apply (rule_tac x = "inverse (real (Suc n)) %* (qa +++ -- (pderiv q))" in exI)
-apply (subgoal_tac "poly ([-a, 1] %^ n *** q) = poly ([-a, 1] %^ n *** ([-a, 1] *** (inverse (real (Suc n)) %* (qa +++ -- (pderiv q))))) ")
-apply (drule poly_mult_left_cancel [THEN iffD1], simp)
-apply (simp add: fun_eq poly_mult poly_add poly_cmult poly_minus del: pmult_Cons mult_cancel_left, safe)
-apply (rule_tac c1 = "real (Suc n)" in real_mult_left_cancel [THEN iffD1])
-apply (simp (no_asm))
-apply (subgoal_tac "real (Suc n) * (poly ([- a, 1] %^ n) xa * poly q xa) =
-          (poly qa xa + - poly (pderiv q) xa) *
-          (poly ([- a, 1] %^ n) xa *
-           ((- a + xa) * (inverse (real (Suc n)) * real (Suc n))))")
-apply (simp only: mult_ac)  
-apply (rotate_tac 2)
-apply (drule_tac x = xa in spec)
-apply (simp add: left_distrib mult_ac del: pmult_Cons)
-done
-
-lemma order_pderiv: "[| poly (pderiv p) \<noteq> poly []; order a p \<noteq> 0 |]
-      ==> (order a p = Suc (order a (pderiv p)))"
-apply (case_tac "poly p = poly []")
-apply (auto dest: pderiv_zero)
-apply (drule_tac a = a and p = p in order_decomp)
-using neq0_conv
-apply (blast intro: lemma_order_pderiv)
-done
-
-text{*Now justify the standard squarefree decomposition, i.e. f / gcd(f,f'). *}
-
-lemma poly_squarefree_decomp_order: "[| poly (pderiv p) \<noteq> poly [];
-         poly p = poly (q *** d);
-         poly (pderiv p) = poly (e *** d);
-         poly d = poly (r *** p +++ s *** pderiv p)
-      |] ==> order a q = (if order a p = 0 then 0 else 1)"
-apply (subgoal_tac "order a p = order a q + order a d")
-apply (rule_tac [2] s = "order a (q *** d)" in trans)
-prefer 2 apply (blast intro: order_poly)
-apply (rule_tac [2] order_mult)
- prefer 2 apply force
-apply (case_tac "order a p = 0", simp)
-apply (subgoal_tac "order a (pderiv p) = order a e + order a d")
-apply (rule_tac [2] s = "order a (e *** d)" in trans)
-prefer 2 apply (blast intro: order_poly)
-apply (rule_tac [2] order_mult)
- prefer 2 apply force
-apply (case_tac "poly p = poly []")
-apply (drule_tac p = p in pderiv_zero, simp)
-apply (drule order_pderiv, assumption)
-apply (subgoal_tac "order a (pderiv p) \<le> order a d")
-apply (subgoal_tac [2] " ([-a, 1] %^ (order a (pderiv p))) divides d")
- prefer 2 apply (simp add: poly_entire order_divides)
-apply (subgoal_tac [2] " ([-a, 1] %^ (order a (pderiv p))) divides p & ([-a, 1] %^ (order a (pderiv p))) divides (pderiv p) ")
- prefer 3 apply (simp add: order_divides)
- prefer 2 apply (simp add: divides_def del: pexp_Suc pmult_Cons, safe)
-apply (rule_tac x = "r *** qa +++ s *** qaa" in exI)
-apply (simp add: fun_eq poly_add poly_mult left_distrib right_distrib mult_ac del: pexp_Suc pmult_Cons, auto)
-done
-
-
-lemma poly_squarefree_decomp_order2: "[| poly (pderiv p) \<noteq> poly [];
-         poly p = poly (q *** d);
-         poly (pderiv p) = poly (e *** d);
-         poly d = poly (r *** p +++ s *** pderiv p)
-      |] ==> \<forall>a. order a q = (if order a p = 0 then 0 else 1)"
-apply (blast intro: poly_squarefree_decomp_order)
-done
-
-lemma order_pderiv2: "[| poly (pderiv p) \<noteq> poly []; order a p \<noteq> 0 |]
-      ==> (order a (pderiv p) = n) = (order a p = Suc n)"
-apply (auto dest: order_pderiv)
-done
-
-lemma rsquarefree_roots:
-  "rsquarefree p = (\<forall>a. ~(poly p a = 0 & poly (pderiv p) a = 0))"
-apply (simp add: rsquarefree_def)
-apply (case_tac "poly p = poly []", simp, simp)
-apply (case_tac "poly (pderiv p) = poly []")
-apply simp
-apply (drule pderiv_iszero, clarify)
-apply (subgoal_tac "\<forall>a. order a p = order a [h]")
-apply (simp add: fun_eq)
-apply (rule allI)
-apply (cut_tac p = "[h]" and a = a in order_root)
-apply (simp add: fun_eq)
-apply (blast intro: order_poly)
-apply (auto simp add: order_root order_pderiv2)
-apply (erule_tac x="a" in allE, simp)
-done
-
-lemma poly_squarefree_decomp: "[| poly (pderiv p) \<noteq> poly [];
-         poly p = poly (q *** d);
-         poly (pderiv p) = poly (e *** d);
-         poly d = poly (r *** p +++ s *** pderiv p)
-      |] ==> rsquarefree q & (\<forall>a. (poly q a = 0) = (poly p a = 0))"
-apply (frule poly_squarefree_decomp_order2, assumption+) 
-apply (case_tac "poly p = poly []")
-apply (blast dest: pderiv_zero)
-apply (simp (no_asm) add: rsquarefree_def order_root del: pmult_Cons)
-apply (simp add: poly_entire del: pmult_Cons)
-done
-*)
 
 subsection {* Theorems about Limits *}
 
--- a/src/HOL/Divides.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Divides.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -44,10 +44,10 @@
 by (simp add: mod_div_equality2)
 
 lemma mod_by_0 [simp]: "a mod 0 = a"
-  using mod_div_equality [of a zero] by simp
+using mod_div_equality [of a zero] by simp
 
 lemma mod_0 [simp]: "0 mod a = 0"
-  using mod_div_equality [of zero a] div_0 by simp 
+using mod_div_equality [of zero a] div_0 by simp
 
 lemma div_mult_self2 [simp]:
   assumes "b \<noteq> 0"
@@ -178,6 +178,12 @@
 lemma dvd_div_mult_self: "a dvd b \<Longrightarrow> (b div a) * a = b"
 by (subst (2) mod_div_equality [of b a, symmetric]) (simp add:dvd_imp_mod_0)
 
+lemma dvd_div_mult: "a dvd b \<Longrightarrow> (b div a) * c = b * c div a"
+apply (cases "a = 0")
+ apply simp
+apply (auto simp: dvd_def mult_assoc)
+done
+
 lemma div_dvd_div[simp]:
   "a dvd b \<Longrightarrow> a dvd c \<Longrightarrow> (b div a dvd c div a) = (b dvd c)"
 apply (cases "a = 0")
@@ -188,6 +194,12 @@
 apply(fastsimp simp add: mult_assoc)
 done
 
+lemma dvd_mod_imp_dvd: "[| k dvd m mod n;  k dvd n |] ==> k dvd m"
+  apply (subgoal_tac "k dvd (m div n) *n + m mod n")
+   apply (simp add: mod_div_equality)
+  apply (simp only: dvd_add dvd_mult)
+  done
+
 text {* Addition respects modular equivalence. *}
 
 lemma mod_add_left_eq: "(a + b) mod c = (a mod c + b) mod c"
@@ -330,6 +342,25 @@
   unfolding diff_minus using assms
   by (intro mod_add_cong mod_minus_cong)
 
+lemma dvd_neg_div: "y dvd x \<Longrightarrow> -x div y = - (x div y)"
+apply (case_tac "y = 0") apply simp
+apply (auto simp add: dvd_def)
+apply (subgoal_tac "-(y * k) = y * - k")
+ apply (erule ssubst)
+ apply (erule div_mult_self1_is_id)
+apply simp
+done
+
+lemma dvd_div_neg: "y dvd x \<Longrightarrow> x div -y = - (x div y)"
+apply (case_tac "y = 0") apply simp
+apply (auto simp add: dvd_def)
+apply (subgoal_tac "y * k = -y * -k")
+ apply (erule ssubst)
+ apply (rule div_mult_self1_is_id)
+ apply simp
+apply simp
+done
+
 end
 
 
@@ -478,9 +509,9 @@
   from divmod_rel have divmod_m_n: "divmod_rel m n (m div n) (m mod n)" .
   with assms have m_div_n: "m div n \<ge> 1"
     by (cases "m div n") (auto simp add: divmod_rel_def)
-  from assms divmod_m_n have "divmod_rel (m - n) n (m div n - 1) (m mod n)"
+  from assms divmod_m_n have "divmod_rel (m - n) n (m div n - Suc 0) (m mod n)"
     by (cases "m div n") (auto simp add: divmod_rel_def)
-  with divmod_eq have "divmod (m - n) n = (m div n - 1, m mod n)" by simp
+  with divmod_eq have "divmod (m - n) n = (m div n - Suc 0, m mod n)" by simp
   moreover from divmod_div_mod have "divmod (m - n) n = ((m - n) div n, (m - n) mod n)" .
   ultimately have "m div n = Suc ((m - n) div n)"
     and "m mod n = (m - n) mod n" using m_div_n by simp_all
@@ -653,16 +684,6 @@
 apply (blast intro: divmod_rel [THEN divmod_rel_mult1_eq, THEN div_eq])
 done
 
-lemma mod_mult1_eq: "(a*b) mod c = a*(b mod c) mod (c::nat)"
-by (rule mod_mult_right_eq)
-
-lemma mod_mult1_eq': "(a*b) mod (c::nat) = ((a mod c) * b) mod c"
-by (rule mod_mult_left_eq)
-
-lemma mod_mult_distrib_mod:
-  "(a*b) mod (c::nat) = ((a mod c) * (b mod c)) mod c"
-by (rule mod_mult_eq)
-
 lemma divmod_rel_add1_eq:
   "[| divmod_rel a c aq ar; divmod_rel b c bq br;  c > 0 |]
    ==> divmod_rel (a + b) c (aq + bq + (ar+br) div c) ((ar + br) mod c)"
@@ -675,9 +696,6 @@
 apply (blast intro: divmod_rel_add1_eq [THEN div_eq] divmod_rel)
 done
 
-lemma mod_add1_eq: "(a+b) mod (c::nat) = (a mod c + b mod c) mod c"
-by (rule mod_add_eq)
-
 lemma mod_lemma: "[| (0::nat) < c; r < b |] ==> b * (q mod c) + r < b * c"
   apply (cut_tac m = q and n = c in mod_less_divisor)
   apply (drule_tac [2] m = "q mod c" in less_imp_Suc_add, auto)
@@ -795,12 +813,6 @@
 apply (auto simp add: Suc_diff_le le_mod_geq)
 done
 
-lemma nat_mod_div_trivial: "m mod n div n = (0 :: nat)"
-by simp
-
-lemma nat_mod_mod_trivial: "m mod n mod n = (m mod n :: nat)"
-by simp
-
 
 subsubsection {* The Divides Relation *}
 
@@ -810,6 +822,9 @@
 lemma dvd_1_iff_1 [simp]: "(m dvd Suc 0) = (m = Suc 0)"
 by (simp add: dvd_def)
 
+lemma nat_dvd_1_iff_1 [simp]: "m dvd (1::nat) \<longleftrightarrow> m = 1"
+by (simp add: dvd_def)
+
 lemma dvd_anti_sym: "[| m dvd n; n dvd m |] ==> m = (n::nat)"
   unfolding dvd_def
   by (force dest: mult_eq_self_implies_10 simp add: mult_assoc mult_eq_1_iff)
@@ -819,9 +834,9 @@
 interpretation dvd!: order "op dvd" "\<lambda>n m \<Colon> nat. n dvd m \<and> \<not> m dvd n"
   proof qed (auto intro: dvd_refl dvd_trans dvd_anti_sym)
 
-lemma dvd_diff: "[| k dvd m; k dvd n |] ==> k dvd (m-n :: nat)"
-  unfolding dvd_def
-  by (blast intro: diff_mult_distrib2 [symmetric])
+lemma nat_dvd_diff[simp]: "[| k dvd m; k dvd n |] ==> k dvd (m-n :: nat)"
+unfolding dvd_def
+by (blast intro: diff_mult_distrib2 [symmetric])
 
 lemma dvd_diffD: "[| k dvd m-n; k dvd n; n\<le>m |] ==> k dvd (m::nat)"
   apply (erule linorder_not_less [THEN iffD2, THEN add_diff_inverse, THEN subst])
@@ -829,7 +844,7 @@
   done
 
 lemma dvd_diffD1: "[| k dvd m-n; k dvd m; n\<le>m |] ==> k dvd (n::nat)"
-by (drule_tac m = m in dvd_diff, auto)
+by (drule_tac m = m in nat_dvd_diff, auto)
 
 lemma dvd_reduce: "(k dvd n + k) = (k dvd (n::nat))"
   apply (rule iffI)
@@ -838,7 +853,7 @@
   apply (subgoal_tac "n = (n+k) -k")
    prefer 2 apply simp
   apply (erule ssubst)
-  apply (erule dvd_diff)
+  apply (erule nat_dvd_diff)
   apply (rule dvd_refl)
   done
 
@@ -848,12 +863,6 @@
   apply (blast intro: mod_mult_distrib2 [symmetric])
   done
 
-lemma dvd_mod_imp_dvd: "[| (k::nat) dvd m mod n;  k dvd n |] ==> k dvd m"
-  apply (subgoal_tac "k dvd (m div n) *n + m mod n")
-   apply (simp add: mod_div_equality)
-  apply (simp only: dvd_add dvd_mult)
-  done
-
 lemma dvd_mod_iff: "k dvd n ==> ((k::nat) dvd m mod n) = (k dvd m)"
 by (blast intro: dvd_mod_imp_dvd dvd_mod)
 
@@ -889,21 +898,9 @@
   apply (simp only: dvd_eq_mod_eq_0)
   done
 
-lemma le_imp_power_dvd: "!!i::nat. m \<le> n ==> i^m dvd i^n"
-  apply (unfold dvd_def)
-  apply (erule linorder_not_less [THEN iffD2, THEN add_diff_inverse, THEN subst])
-  apply (simp add: power_add)
-  done
-
 lemma nat_zero_less_power_iff [simp]: "(x^n > 0) = (x > (0::nat) | n=0)"
   by (induct n) auto
 
-lemma power_le_dvd [rule_format]: "k^j dvd n --> i\<le>j --> k^i dvd (n::nat)"
-  apply (induct j)
-   apply (simp_all add: le_Suc_eq)
-  apply (blast dest!: dvd_mult_right)
-  done
-
 lemma power_dvd_imp_le: "[|i^m dvd i^n;  (1::nat) < i|] ==> m \<le> n"
   apply (rule power_le_imp_le_exp, assumption)
   apply (erule dvd_imp_le, simp)
--- a/src/HOL/Equiv_Relations.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Equiv_Relations.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -12,7 +12,7 @@
 
 locale equiv =
   fixes A and r
-  assumes refl: "refl A r"
+  assumes refl_on: "refl_on A r"
     and sym: "sym r"
     and trans: "trans r"
 
@@ -27,21 +27,21 @@
     "sym r ==> trans r ==> r\<inverse> O r \<subseteq> r"
   by (unfold trans_def sym_def converse_def) blast
 
-lemma refl_comp_subset: "refl A r ==> r \<subseteq> r\<inverse> O r"
-  by (unfold refl_def) blast
+lemma refl_on_comp_subset: "refl_on A r ==> r \<subseteq> r\<inverse> O r"
+  by (unfold refl_on_def) blast
 
 lemma equiv_comp_eq: "equiv A r ==> r\<inverse> O r = r"
   apply (unfold equiv_def)
   apply clarify
   apply (rule equalityI)
-   apply (iprover intro: sym_trans_comp_subset refl_comp_subset)+
+   apply (iprover intro: sym_trans_comp_subset refl_on_comp_subset)+
   done
 
 text {* Second half. *}
 
 lemma comp_equivI:
     "r\<inverse> O r = r ==> Domain r = A ==> equiv A r"
-  apply (unfold equiv_def refl_def sym_def trans_def)
+  apply (unfold equiv_def refl_on_def sym_def trans_def)
   apply (erule equalityE)
   apply (subgoal_tac "\<forall>x y. (x, y) \<in> r --> (y, x) \<in> r")
    apply fast
@@ -63,12 +63,12 @@
   done
 
 lemma equiv_class_self: "equiv A r ==> a \<in> A ==> a \<in> r``{a}"
-  by (unfold equiv_def refl_def) blast
+  by (unfold equiv_def refl_on_def) blast
 
 lemma subset_equiv_class:
     "equiv A r ==> r``{b} \<subseteq> r``{a} ==> b \<in> A ==> (a,b) \<in> r"
   -- {* lemma for the next result *}
-  by (unfold equiv_def refl_def) blast
+  by (unfold equiv_def refl_on_def) blast
 
 lemma eq_equiv_class:
     "r``{a} = r``{b} ==> equiv A r ==> b \<in> A ==> (a, b) \<in> r"
@@ -79,7 +79,7 @@
   by (unfold equiv_def trans_def sym_def) blast
 
 lemma equiv_type: "equiv A r ==> r \<subseteq> A \<times> A"
-  by (unfold equiv_def refl_def) blast
+  by (unfold equiv_def refl_on_def) blast
 
 theorem equiv_class_eq_iff:
   "equiv A r ==> ((x, y) \<in> r) = (r``{x} = r``{y} & x \<in> A & y \<in> A)"
@@ -103,7 +103,7 @@
   by (unfold quotient_def) blast
 
 lemma Union_quotient: "equiv A r ==> Union (A//r) = A"
-  by (unfold equiv_def refl_def quotient_def) blast
+  by (unfold equiv_def refl_on_def quotient_def) blast
 
 lemma quotient_disj:
   "equiv A r ==> X \<in> A//r ==> Y \<in> A//r ==> X = Y | (X \<inter> Y = {})"
@@ -228,7 +228,7 @@
 
 lemma congruent2_implies_congruent:
     "equiv A r1 ==> congruent2 r1 r2 f ==> a \<in> A ==> congruent r2 (f a)"
-  by (unfold congruent_def congruent2_def equiv_def refl_def) blast
+  by (unfold congruent_def congruent2_def equiv_def refl_on_def) blast
 
 lemma congruent2_implies_congruent_UN:
   "equiv A1 r1 ==> equiv A2 r2 ==> congruent2 r1 r2 f ==> a \<in> A2 ==>
@@ -237,7 +237,7 @@
   apply clarify
   apply (rule equiv_type [THEN subsetD, THEN SigmaE2], assumption+)
   apply (simp add: UN_equiv_class congruent2_implies_congruent)
-  apply (unfold congruent2_def equiv_def refl_def)
+  apply (unfold congruent2_def equiv_def refl_on_def)
   apply (blast del: equalityI)
   done
 
@@ -272,7 +272,7 @@
     ==> congruent2 r1 r2 f"
   -- {* Suggested by John Harrison -- the two subproofs may be *}
   -- {* \emph{much} simpler than the direct proof. *}
-  apply (unfold congruent2_def equiv_def refl_def)
+  apply (unfold congruent2_def equiv_def refl_on_def)
   apply clarify
   apply (blast intro: trans)
   done
--- a/src/HOL/Extraction/Euclid.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Extraction/Euclid.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -189,7 +189,7 @@
       assume pn: "p \<le> n"
       from `prime p` have "0 < p" by (rule prime_g_zero)
       then have "p dvd n!" using pn by (rule dvd_factorial)
-      with dvd have "p dvd ?k - n!" by (rule dvd_diff)
+      with dvd have "p dvd ?k - n!" by (rule nat_dvd_diff)
       then have "p dvd 1" by simp
       with prime show False using prime_nd_one by auto
     qed
--- a/src/HOL/Fact.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Fact.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -7,7 +7,7 @@
 header{*Factorial Function*}
 
 theory Fact
-imports Nat
+imports Main
 begin
 
 consts fact :: "nat => nat"
@@ -58,7 +58,7 @@
   "n < Suc m ==> fact (Suc m - n) = (Suc m - n) * fact (m - n)"
 apply (induct n arbitrary: m)
 apply auto
-apply (drule_tac x = "m - 1" in meta_spec, auto)
+apply (drule_tac x = "m - Suc 0" in meta_spec, auto)
 done
 
 lemma fact_num0: "fact 0 = 1"
--- a/src/HOL/GCD.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/GCD.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -60,9 +60,12 @@
 lemma gcd_non_0: "n > 0 \<Longrightarrow> gcd m n = gcd n (m mod n)"
   by simp
 
-lemma gcd_1 [simp, algebra]: "gcd m (Suc 0) = 1"
+lemma gcd_1 [simp, algebra]: "gcd m (Suc 0) = Suc 0"
   by simp
 
+lemma nat_gcd_1_right [simp, algebra]: "gcd m 1 = 1"
+  unfolding One_nat_def by (rule gcd_1)
+
 declare gcd.simps [simp del]
 
 text {*
@@ -116,9 +119,12 @@
   apply (blast intro: dvd_trans)
   done
 
-lemma gcd_1_left [simp, algebra]: "gcd (Suc 0) m = 1"
+lemma gcd_1_left [simp, algebra]: "gcd (Suc 0) m = Suc 0"
   by (simp add: gcd_commute)
 
+lemma nat_gcd_1_left [simp, algebra]: "gcd 1 m = 1"
+  unfolding One_nat_def by (rule gcd_1_left)
+
 text {*
   \medskip Multiplication laws
 *}
@@ -156,7 +162,6 @@
      apply (simp add: gcd_assoc)
      apply (simp add: gcd_commute)
     apply (simp_all add: mult_commute)
-  apply (blast intro: dvd_mult)
   done
 
 
@@ -404,7 +409,7 @@
   {fix x y assume H: "a * x - b * y = d \<or> b * x - a * y = d"
     have dv: "?g dvd a*x" "?g dvd b * y" "?g dvd b*x" "?g dvd a * y"
       using dvd_mult2[OF gcd_dvd1[of a b]] dvd_mult2[OF gcd_dvd2[of a b]] by simp_all
-    from dvd_diff[OF dv(1,2)] dvd_diff[OF dv(3,4)] H
+    from nat_dvd_diff[OF dv(1,2)] nat_dvd_diff[OF dv(3,4)] H
     have ?rhs by auto}
   ultimately show ?thesis by blast
 qed
@@ -597,8 +602,8 @@
   from h' have "int (nat \<bar>k\<bar>) = int (nat \<bar>i\<bar> * h')" by simp
   then have "\<bar>k\<bar> = \<bar>i\<bar> * int h'" by (simp add: int_mult)
   then show ?thesis
-    apply (subst zdvd_abs1 [symmetric])
-    apply (subst zdvd_abs2 [symmetric])
+    apply (subst abs_dvd_iff [symmetric])
+    apply (subst dvd_abs_iff [symmetric])
     apply (unfold dvd_def)
     apply (rule_tac x = "int h'" in exI, simp)
     done
@@ -614,11 +619,11 @@
   let ?m' = "nat \<bar>m\<bar>"
   let ?n' = "nat \<bar>n\<bar>"
   from `k dvd m` and `k dvd n` have dvd': "?k' dvd ?m'" "?k' dvd ?n'"
-    unfolding zdvd_int by (simp_all only: int_nat_abs zdvd_abs1 zdvd_abs2)
+    unfolding zdvd_int by (simp_all only: int_nat_abs abs_dvd_iff dvd_abs_iff)
   from gcd_greatest [OF dvd'] have "int (nat \<bar>k\<bar>) dvd zgcd m n"
     unfolding zgcd_def by (simp only: zdvd_int)
   then have "\<bar>k\<bar> dvd zgcd m n" by (simp only: int_nat_abs)
-  then show "k dvd zgcd m n" by (simp add: zdvd_abs1)
+  then show "k dvd zgcd m n" by simp
 qed
 
 lemma div_zgcd_relprime:
@@ -721,7 +726,7 @@
   assumes "k dvd i" shows "k dvd (zlcm i j)"
 proof -
   have "nat(abs k) dvd nat(abs i)" using `k dvd i`
-    by(simp add:int_dvd_iff[symmetric] dvd_int_iff[symmetric] zdvd_abs1)
+    by(simp add:int_dvd_iff[symmetric] dvd_int_iff[symmetric])
   thus ?thesis by(simp add:zlcm_def dvd_int_iff)(blast intro: dvd_trans)
 qed
 
@@ -729,7 +734,7 @@
   assumes "k dvd j" shows "k dvd (zlcm i j)"
 proof -
   have "nat(abs k) dvd nat(abs j)" using `k dvd j`
-    by(simp add:int_dvd_iff[symmetric] dvd_int_iff[symmetric] zdvd_abs1)
+    by(simp add:int_dvd_iff[symmetric] dvd_int_iff[symmetric])
   thus ?thesis by(simp add:zlcm_def dvd_int_iff)(blast intro: dvd_trans)
 qed
 
--- a/src/HOL/Groebner_Basis.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Groebner_Basis.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -147,7 +147,7 @@
 next show "pwr (mul x y) q = mul (pwr x q) (pwr y q)" by (rule pwr_mul)
 next show "pwr (pwr x p) q = pwr x (p * q)" by (rule pwr_pwr)
 next show "pwr x 0 = r1" using pwr_0 .
-next show "pwr x 1 = x" by (simp add: nat_number pwr_Suc pwr_0 mul_1 mul_c)
+next show "pwr x 1 = x" unfolding One_nat_def by (simp add: nat_number pwr_Suc pwr_0 mul_1 mul_c)
 next show "mul x (add y z) = add (mul x y) (mul x z)" using mul_d by simp
 next show "pwr x (Suc q) = mul x (pwr x q)" using pwr_Suc by simp
 next show "pwr x (2 * n) = mul (pwr x n) (pwr x n)" by (simp add: nat_number mul_pwr)
@@ -436,8 +436,8 @@
 *} "solve polynomial equations over (semi)rings and ideal membership problems using Groebner bases"
 declare dvd_def[algebra]
 declare dvd_eq_mod_eq_0[symmetric, algebra]
-declare nat_mod_div_trivial[algebra]
-declare nat_mod_mod_trivial[algebra]
+declare mod_div_trivial[algebra]
+declare mod_mod_trivial[algebra]
 declare conjunct1[OF DIVISION_BY_ZERO, algebra]
 declare conjunct2[OF DIVISION_BY_ZERO, algebra]
 declare zmod_zdiv_equality[symmetric,algebra]
@@ -448,16 +448,16 @@
 declare zmod_zminus2[algebra]
 declare zdiv_zero[algebra]
 declare zmod_zero[algebra]
-declare zmod_1[algebra]
-declare zdiv_1[algebra]
+declare mod_by_1[algebra]
+declare div_by_1[algebra]
 declare zmod_minus1_right[algebra]
 declare zdiv_minus1_right[algebra]
 declare mod_div_trivial[algebra]
 declare mod_mod_trivial[algebra]
-declare zmod_zmult_self1[algebra]
-declare zmod_zmult_self2[algebra]
+declare mod_mult_self2_is_0[algebra]
+declare mod_mult_self1_is_0[algebra]
 declare zmod_eq_0_iff[algebra]
-declare zdvd_0_left[algebra]
+declare dvd_0_left_iff[algebra]
 declare zdvd1_eq[algebra]
 declare zmod_eq_dvd_iff[algebra]
 declare nat_mod_eq_iff[algebra]
--- a/src/HOL/HOL.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/HOL.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -12,14 +12,15 @@
   "~~/src/Tools/IsaPlanner/isand.ML"
   "~~/src/Tools/IsaPlanner/rw_tools.ML"
   "~~/src/Tools/IsaPlanner/rw_inst.ML"
-  "~~/src/Provers/project_rule.ML"
+  "~~/src/Tools/intuitionistic.ML"
+  "~~/src/Tools/project_rule.ML"
   "~~/src/Provers/hypsubst.ML"
   "~~/src/Provers/splitter.ML"
   "~~/src/Provers/classical.ML"
   "~~/src/Provers/blast.ML"
   "~~/src/Provers/clasimp.ML"
-  "~~/src/Provers/coherent.ML"
-  "~~/src/Provers/eqsubst.ML"
+  "~~/src/Tools/coherent.ML"
+  "~~/src/Tools/eqsubst.ML"
   "~~/src/Provers/quantifier1.ML"
   ("Tools/simpdata.ML")
   "~~/src/Tools/random_word.ML"
@@ -28,7 +29,8 @@
   ("~~/src/Tools/induct_tacs.ML")
   "~~/src/Tools/value.ML"
   "~~/src/Tools/code/code_name.ML"
-  "~~/src/Tools/code/code_funcgr.ML"
+  "~~/src/Tools/code/code_funcgr.ML" (*formal dependency*)
+  "~~/src/Tools/code/code_wellsorted.ML" 
   "~~/src/Tools/code/code_thingol.ML"
   "~~/src/Tools/code/code_printer.ML"
   "~~/src/Tools/code/code_target.ML"
@@ -38,6 +40,9 @@
   ("Tools/recfun_codegen.ML")
 begin
 
+setup {* Intuitionistic.method_setup "iprover" *}
+
+
 subsection {* Primitive logic *}
 
 subsubsection {* Core syntax *}
@@ -290,7 +295,7 @@
 typed_print_translation {*
 let
   fun tr' c = (c, fn show_sorts => fn T => fn ts =>
-    if T = dummyT orelse not (! show_types) andalso can Term.dest_Type T then raise Match
+    if (not o null) ts orelse T = dummyT orelse not (! show_types) andalso can Term.dest_Type T then raise Match
     else Syntax.const Syntax.constrainC $ Syntax.const c $ Syntax.term_of_typ show_sorts T);
 in map tr' [@{const_syntax HOL.one}, @{const_syntax HOL.zero}] end;
 *} -- {* show types that are presumably too general *}
@@ -1704,11 +1709,6 @@
 subsection {* Nitpick theorem store *}
 
 ML {*
-structure Nitpick_Const_Def_Thms = NamedThmsFun
-(
-  val name = "nitpick_const_def"
-  val description = "pseudo-definition of constants as needed by Nitpick"
-)
 structure Nitpick_Const_Simp_Thms = NamedThmsFun
 (
   val name = "nitpick_const_simp"
@@ -1725,8 +1725,7 @@
   val description = "introduction rules for (co)inductive predicates as needed by Nitpick"
 )
 *}
-setup {* Nitpick_Const_Def_Thms.setup
-         #> Nitpick_Const_Simp_Thms.setup
+setup {* Nitpick_Const_Simp_Thms.setup
          #> Nitpick_Const_Psimp_Thms.setup
          #> Nitpick_Ind_Intro_Thms.setup *}
 
--- a/src/HOL/Hoare/Arith2.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Hoare/Arith2.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -42,12 +42,12 @@
 
 lemma cd_diff_l: "n<=m ==> cd x m n = cd x (m-n) n"
   apply (unfold cd_def)
-  apply (blast intro: dvd_diff dest: dvd_diffD)
+  apply (fastsimp dest: dvd_diffD)
   done
 
 lemma cd_diff_r: "m<=n ==> cd x m n = cd x m (n-m)"
   apply (unfold cd_def)
-  apply (blast intro: dvd_diff dest: dvd_diffD)
+  apply (fastsimp dest: dvd_diffD)
   done
 
 
--- a/src/HOL/Import/lazy_seq.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Import/lazy_seq.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,4 @@
 (*  Title:      HOL/Import/lazy_seq.ML
-    ID:         $Id$
     Author:     Sebastian Skalberg, TU Muenchen
 
 Alternative version of lazy sequences.
@@ -408,8 +407,8 @@
 	make (fn () => copy (f x))
     end
 
-fun EVERY fs = foldr (op THEN) succeed fs
-fun FIRST fs = foldr (op ORELSE) fail fs
+fun EVERY fs = List.foldr (op THEN) succeed fs
+fun FIRST fs = List.foldr (op ORELSE) fail fs
 
 fun TRY f x =
     make (fn () =>
--- a/src/HOL/Import/proof_kernel.ML	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Import/proof_kernel.ML	Wed Mar 04 10:45:52 2009 +0100
@@ -777,7 +777,7 @@
                 val (c,asl) = case terms of
                                   [] => raise ERR "x2p" "Bad oracle description"
                                 | (hd::tl) => (hd,tl)
-                val tg = foldr (fn (oracle,tg) => Tag.merge (Tag.read oracle) tg) Tag.empty_tag ors
+                val tg = List.foldr (fn (oracle,tg) => Tag.merge (Tag.read oracle) tg) Tag.empty_tag ors
             in
                 mk_proof (POracle(tg,map xml_to_term asl,xml_to_term c))
             end
@@ -1840,7 +1840,7 @@
                       | inst_type ty1 ty2 (ty as Type(name,tys)) =
                         Type(name,map (inst_type ty1 ty2) tys)
                 in
-                    foldr (fn (v,th) =>
+                    List.foldr (fn (v,th) =>
                               let
                                   val cdom = fst (dom_rng (fst (dom_rng cty)))
                                   val vty  = type_of v
@@ -1852,7 +1852,7 @@
                 end
               | SOME _ => raise ERR "GEN_ABS" "Bad constant"
               | NONE =>
-                foldr (fn (v,th) => mk_ABS v th thy) th vlist'
+                List.foldr (fn (v,th) => mk_ABS v th thy) th vlist'
         val res = HOLThm(rens_of info',th1)
         val _ = message "RESULT:"
         val _ = if_debug pth res
@@ -2020,7 +2020,7 @@
                                Sign.add_consts_i consts thy'
                            end
 
-            val thy1 = foldr (fn(name,thy)=>
+            val thy1 = List.foldr (fn(name,thy)=>
                                 snd (get_defname thyname name thy)) thy1 names
             fun new_name name = fst (get_defname thyname name thy1)
             val names' = map (fn name => (new_name name,name,false)) names
@@ -2041,7 +2041,7 @@
                      then quotename name
                      else (quotename newname) ^ ": " ^ (quotename name),thy')
                 end
-            val (new_names,thy') = foldr (fn(name,(names,thy)) =>
+            val (new_names,thy') = List.foldr (fn(name,(names,thy)) =>
                                             let
                                                 val (name',thy') = handle_const (name,thy)
                                             in
--- a/src/HOL/Induct/Common_Patterns.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Induct/Common_Patterns.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,4 @@
 (*  Title:      HOL/Induct/Common_Patterns.thy
-    ID:         $Id$
     Author:     Makarius
 *)
 
--- a/src/HOL/Induct/LList.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Induct/LList.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -8,7 +8,7 @@
 bounds on the amount of lookahead required.
 
 Could try (but would it work for the gfp analogue of term?)
-  LListD_Fun_def "LListD_Fun(A) == (%Z. diag({Numb(0)}) <++> diag(A) <**> Z)"
+  LListD_Fun_def "LListD_Fun(A) == (%Z. Id_on({Numb(0)}) <++> Id_on(A) <**> Z)"
 
 A nice but complex example would be [ML for the Working Programmer, page 176]
   from(1) = enumerate (Lmap (Lmap(pack), makeqq(from(1),from(1))))
@@ -95,7 +95,7 @@
   llistD_Fun :: "('a llist * 'a llist)set => ('a llist * 'a llist)set" where
     "llistD_Fun(r) =   
         prod_fun Abs_LList Abs_LList `         
-                LListD_Fun (diag(range Leaf))   
+                LListD_Fun (Id_on(range Leaf))   
                             (prod_fun Rep_LList Rep_LList ` r)"
 
 
@@ -265,12 +265,12 @@
 subsection{* @{text llist} equality as a @{text gfp}; the bisimulation principle *}
 
 text{*This theorem is actually used, unlike the many similar ones in ZF*}
-lemma LListD_unfold: "LListD r = dsum (diag {Numb 0}) (dprod r (LListD r))"
+lemma LListD_unfold: "LListD r = dsum (Id_on {Numb 0}) (dprod r (LListD r))"
   by (fast intro!: LListD.intros [unfolded NIL_def CONS_def]
            elim: LListD.cases [unfolded NIL_def CONS_def])
 
 lemma LListD_implies_ntrunc_equality [rule_format]:
-     "\<forall>M N. (M,N) \<in> LListD(diag A) --> ntrunc k M = ntrunc k N"
+     "\<forall>M N. (M,N) \<in> LListD(Id_on A) --> ntrunc k M = ntrunc k N"
 apply (induct_tac "k" rule: nat_less_induct) 
 apply (safe del: equalityI)
 apply (erule LListD.cases)
@@ -283,7 +283,7 @@
 
 text{*The domain of the @{text LListD} relation*}
 lemma Domain_LListD: 
-    "Domain (LListD(diag A)) \<subseteq> llist(A)"
+    "Domain (LListD(Id_on A)) \<subseteq> llist(A)"
 apply (rule subsetI)
 apply (erule llist.coinduct)
 apply (simp add: NIL_def CONS_def)
@@ -291,10 +291,10 @@
 done
 
 text{*This inclusion justifies the use of coinduction to show @{text "M = N"}*}
-lemma LListD_subset_diag: "LListD(diag A) \<subseteq> diag(llist(A))"
+lemma LListD_subset_Id_on: "LListD(Id_on A) \<subseteq> Id_on(llist(A))"
 apply (rule subsetI)
 apply (rule_tac p = x in PairE, safe)
-apply (rule diag_eqI)
+apply (rule Id_on_eqI)
 apply (rule LListD_implies_ntrunc_equality [THEN ntrunc_equality], assumption) 
 apply (erule DomainI [THEN Domain_LListD [THEN subsetD]])
 done
@@ -321,7 +321,7 @@
 by (simp add: LListD_Fun_def NIL_def)
 
 lemma LListD_Fun_CONS_I: 
-     "[| x\<in>A;  (M,N):s |] ==> (CONS x M, CONS x N) \<in> LListD_Fun (diag A) s"
+     "[| x\<in>A;  (M,N):s |] ==> (CONS x M, CONS x N) \<in> LListD_Fun (Id_on A) s"
 by (simp add: LListD_Fun_def CONS_def, blast)
 
 text{*Utilise the "strong" part, i.e. @{text "gfp(f)"}*}
@@ -335,24 +335,24 @@
 
 
 text{*This converse inclusion helps to strengthen @{text LList_equalityI}*}
-lemma diag_subset_LListD: "diag(llist(A)) \<subseteq> LListD(diag A)"
+lemma Id_on_subset_LListD: "Id_on(llist(A)) \<subseteq> LListD(Id_on A)"
 apply (rule subsetI)
 apply (erule LListD_coinduct)
 apply (rule subsetI)
-apply (erule diagE)
+apply (erule Id_onE)
 apply (erule ssubst)
 apply (erule llist.cases)
-apply (simp_all add: diagI LListD_Fun_NIL_I LListD_Fun_CONS_I)
+apply (simp_all add: Id_onI LListD_Fun_NIL_I LListD_Fun_CONS_I)
 done
 
-lemma LListD_eq_diag: "LListD(diag A) = diag(llist(A))"
-apply (rule equalityI LListD_subset_diag diag_subset_LListD)+
+lemma LListD_eq_Id_on: "LListD(Id_on A) = Id_on(llist(A))"
+apply (rule equalityI LListD_subset_Id_on Id_on_subset_LListD)+
 done
 
-lemma LListD_Fun_diag_I: "M \<in> llist(A) ==> (M,M) \<in> LListD_Fun (diag A) (X Un diag(llist(A)))"
-apply (rule LListD_eq_diag [THEN subst])
+lemma LListD_Fun_Id_on_I: "M \<in> llist(A) ==> (M,M) \<in> LListD_Fun (Id_on A) (X Un Id_on(llist(A)))"
+apply (rule LListD_eq_Id_on [THEN subst])
 apply (rule LListD_Fun_LListD_I)
-apply (simp add: LListD_eq_diag diagI)
+apply (simp add: LListD_eq_Id_on Id_onI)
 done
 
 
@@ -360,11 +360,11 @@
       [also admits true equality]
    Replace @{text A} by some particular set, like @{text "{x. True}"}??? *}
 lemma LList_equalityI:
-     "[| (M,N) \<in> r;  r \<subseteq> LListD_Fun (diag A) (r Un diag(llist(A))) |] 
+     "[| (M,N) \<in> r;  r \<subseteq> LListD_Fun (Id_on A) (r Un Id_on(llist(A))) |] 
       ==>  M=N"
-apply (rule LListD_subset_diag [THEN subsetD, THEN diagE])
+apply (rule LListD_subset_Id_on [THEN subsetD, THEN Id_onE])
 apply (erule LListD_coinduct)
-apply (simp add: LListD_eq_diag, safe)
+apply (simp add: LListD_eq_Id_on, safe)
 done
 
 
@@ -525,14 +525,14 @@
      f(NIL)=g(NIL);                                              
      !!x l. [| x\<in>A;  l \<in> llist(A) |] ==>                          
             (f(CONS x l),g(CONS x l)) \<in>                          
-                LListD_Fun (diag A) ((%u.(f(u),g(u)))`llist(A) Un   
-                                    diag(llist(A)))              
+                LListD_Fun (Id_on A) ((%u.(f(u),g(u)))`llist(A) Un   
+                                    Id_on(llist(A)))              
   |] ==> f(M) = g(M)"
 apply (rule LList_equalityI)
 apply (erule imageI)
 apply (rule image_subsetI)
 apply (erule_tac a=x in llist.cases)
-apply (erule ssubst, erule ssubst, erule LListD_Fun_diag_I, blast) 
+apply (erule ssubst, erule ssubst, erule LListD_Fun_Id_on_I, blast) 
 done
 
 
@@ -687,7 +687,7 @@
 
 lemma LListD_Fun_subset_Times_llist: 
     "r \<subseteq> (llist A) <*> (llist A) 
-     ==> LListD_Fun (diag A) r \<subseteq> (llist A) <*> (llist A)"
+     ==> LListD_Fun (Id_on A) r \<subseteq> (llist A) <*> (llist A)"
 by (auto simp add: LListD_Fun_def)
 
 lemma subset_Times_llist:
@@ -703,9 +703,9 @@
 apply (simp add: LListI [THEN Abs_LList_inverse])
 done
 
-lemma prod_fun_range_eq_diag:
+lemma prod_fun_range_eq_Id_on:
      "prod_fun Rep_LList  Rep_LList ` range(%x. (x, x)) =  
-      diag(llist(range Leaf))"
+      Id_on(llist(range Leaf))"
 apply (rule equalityI, blast) 
 apply (fast elim: LListI [THEN Abs_LList_inverse, THEN subst])
 done
@@ -730,10 +730,10 @@
 apply (rule image_compose [THEN subst])
 apply (rule prod_fun_compose [THEN subst])
 apply (subst image_Un)
-apply (subst prod_fun_range_eq_diag)
+apply (subst prod_fun_range_eq_Id_on)
 apply (rule LListD_Fun_subset_Times_llist [THEN prod_fun_lemma])
 apply (rule subset_Times_llist [THEN Un_least])
-apply (rule diag_subset_Times)
+apply (rule Id_on_subset_Times)
 done
 
 subsubsection{* Rules to prove the 2nd premise of @{text llist_equalityI} *}
@@ -755,8 +755,8 @@
 apply (rule Rep_LList_inverse [THEN subst])
 apply (rule prod_fun_imageI)
 apply (subst image_Un)
-apply (subst prod_fun_range_eq_diag)
-apply (rule Rep_LList [THEN LListD, THEN LListD_Fun_diag_I])
+apply (subst prod_fun_range_eq_Id_on)
+apply (rule Rep_LList [THEN LListD, THEN LListD_Fun_Id_on_I])
 done
 
 text{*A special case of @{text list_equality} for functions over lazy lists*}
--- a/src/HOL/Induct/QuoDataType.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Induct/QuoDataType.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -47,7 +47,7 @@
 
 theorem equiv_msgrel: "equiv UNIV msgrel"
 proof -
-  have "reflexive msgrel" by (simp add: refl_def msgrel_refl)
+  have "refl msgrel" by (simp add: refl_on_def msgrel_refl)
   moreover have "sym msgrel" by (simp add: sym_def, blast intro: msgrel.SYM)
   moreover have "trans msgrel" by (simp add: trans_def, blast intro: msgrel.TRANS)
   ultimately show ?thesis by (simp add: equiv_def)
--- a/src/HOL/Induct/QuoNestedDataType.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Induct/QuoNestedDataType.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -44,7 +44,7 @@
 
 theorem equiv_exprel: "equiv UNIV exprel"
 proof -
-  have "reflexive exprel" by (simp add: refl_def exprel_refl)
+  have "refl exprel" by (simp add: refl_on_def exprel_refl)
   moreover have "sym exprel" by (simp add: sym_def, blast intro: exprel.SYM)
   moreover have "trans exprel" by (simp add: trans_def, blast intro: exprel.TRANS)
   ultimately show ?thesis by (simp add: equiv_def)
--- a/src/HOL/Induct/SList.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Induct/SList.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,15 +1,10 @@
-(* *********************************************************************** *)
-(*                                                                         *)
-(* Title:      SList.thy (Extended List Theory)                            *)
-(* Based on:   $Id$      *)
-(* Author:     Lawrence C Paulson, Cambridge University Computer Laboratory*)
-(* Author:     B. Wolff, University of Bremen                              *)
-(* Purpose:    Enriched theory of lists                                    *)
-(*	       mutual indirect recursive data-types                        *)
-(*                                                                         *)
-(* *********************************************************************** *)
+(*  Title:      SList.thy
+    Author:     Lawrence C Paulson, Cambridge University Computer Laboratory
+    Author:     B. Wolff, University of Bremen
 
-(* Definition of type 'a list (strict lists) by a least fixed point
+Enriched theory of lists; mutual indirect recursive data-types.
+
+Definition of type 'a list (strict lists) by a least fixed point
 
 We use          list(A) == lfp(%Z. {NUMB(0)} <+> A <*> Z)
 and not         list    == lfp(%Z. {NUMB(0)} <+> range(Leaf) <*> Z)
@@ -24,6 +19,8 @@
 Tidied by lcp.  Still needs removal of nat_rec.
 *)
 
+header {* Extended List Theory (old) *}
+
 theory SList
 imports Sexp
 begin
@@ -79,12 +76,12 @@
 
 (*Declaring the abstract list constructors*)
 
-(*<*)no_translations
+no_translations
   "[x, xs]" == "x#[xs]"
   "[x]" == "x#[]"
-no_syntax
-  Nil :: "'a list"  ("[]")
-  Cons :: "'a \<Rightarrow> 'a list \<Rightarrow> 'a list"  (infixr "#" 65)(*>*)
+no_notation
+  Nil  ("[]") and
+  Cons (infixr "#" 65)
 
 definition
   Nil       :: "'a list"                               ("[]") where
@@ -149,8 +146,8 @@
   ttl       :: "'a list => 'a list" where
   "ttl xs   = list_rec xs [] (%x xs r. xs)"
 
-(*<*)no_syntax
-    member :: "'a \<Rightarrow> 'a list \<Rightarrow> bool" (infixl "mem" 55)(*>*)
+no_notation member  (infixl "mem" 55)
+
 definition
   member :: "['a, 'a list] => bool"    (infixl "mem" 55) where
   "x mem xs = list_rec xs False (%y ys r. if y=x then True else r)"
@@ -163,8 +160,8 @@
   map       :: "('a=>'b) => ('a list => 'b list)" where
   "map f xs = list_rec xs [] (%x l r. f(x)#r)"
 
-(*<*)no_syntax
-  "\<^const>List.append" :: "'a list => 'a list => 'a list" (infixr "@" 65)(*>*)
+no_notation append  (infixr "@" 65)
+
 definition
   append    :: "['a list, 'a list] => 'a list"   (infixr "@" 65) where
   "xs@ys = list_rec xs ys (%x l r. x#r)"
@@ -342,14 +339,14 @@
 
 
 lemma not_CONS_self: "N: list(A) ==> !M. N ~= CONS M N"
-by (erule list.induct, simp_all)
+apply (erule list.induct) apply simp_all done
 
 lemma not_Cons_self2: "\<forall>x. l ~= x#l"
-by (induct_tac "l" rule: list_induct, simp_all)
+by (induct l rule: list_induct) simp_all
 
 
 lemma neq_Nil_conv2: "(xs ~= []) = (\<exists>y ys. xs = y#ys)"
-by (induct_tac "xs" rule: list_induct, auto)
+by (induct xs rule: list_induct) auto
 
 (** Conversion rules for List_case: case analysis operator **)
 
@@ -491,7 +488,7 @@
 
 lemma expand_list_case: 
  "P(list_case a f xs) = ((xs=[] --> P a ) & (!y ys. xs=y#ys --> P(f y ys)))"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 
 (**** Function definitions ****)
@@ -533,41 +530,44 @@
 (** @ - append **)
 
 lemma append_assoc [simp]: "(xs@ys)@zs = xs@(ys@zs)"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma append_Nil2 [simp]: "xs @ [] = xs"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 (** mem **)
 
 lemma mem_append [simp]: "x mem (xs@ys) = (x mem xs | x mem ys)"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma mem_filter [simp]: "x mem [x\<leftarrow>xs. P x ] = (x mem xs & P(x))"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 (** list_all **)
 
 lemma list_all_True [simp]: "(Alls x:xs. True) = True"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma list_all_conj [simp]:
      "list_all p (xs@ys) = ((list_all p xs) & (list_all p ys))"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma list_all_mem_conv: "(Alls x:xs. P(x)) = (!x. x mem xs --> P(x))"
-apply (induct_tac "xs" rule: list_induct, simp_all)
+apply (induct xs rule: list_induct)
+apply simp_all
 apply blast 
 done
 
 lemma nat_case_dist : "(! n. P n) = (P 0 & (! n. P (Suc n)))"
 apply auto
-apply (induct_tac "n", auto)
+apply (induct_tac n)
+apply auto
 done
 
 
 lemma alls_P_eq_P_nth: "(Alls u:A. P u) = (!n. n < length A --> P(nth n A))"
-apply (induct_tac "A" rule: list_induct, simp_all)
+apply (induct_tac A rule: list_induct)
+apply simp_all
 apply (rule trans)
 apply (rule_tac [2] nat_case_dist [symmetric], simp_all)
 done
@@ -583,7 +583,7 @@
 lemma Abs_Rep_map: 
      "(!!x. f(x): sexp) ==>  
         Abs_map g (Rep_map f xs) = map (%t. g(f(t))) xs"
-apply (induct_tac "xs" rule: list_induct)
+apply (induct xs rule: list_induct)
 apply (simp_all add: Rep_map_type list_sexp [THEN subsetD])
 done
 
@@ -591,24 +591,25 @@
 (** Additional mapping lemmas **)
 
 lemma map_ident [simp]: "map(%x. x)(xs) = xs"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma map_append [simp]: "map f (xs@ys) = map f xs  @ map f ys"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma map_compose: "map(f o g)(xs) = map f (map g xs)"
 apply (simp add: o_def)
-apply (induct_tac "xs" rule: list_induct, simp_all)
+apply (induct xs rule: list_induct)
+apply simp_all
 done
 
 
 lemma mem_map_aux1 [rule_format]:
      "x mem (map f q) --> (\<exists>y. y mem q & x = f y)"
-by (induct_tac "q" rule: list_induct, simp_all, blast)
+by (induct q rule: list_induct) auto
 
 lemma mem_map_aux2 [rule_format]: 
      "(\<exists>y. y mem q & x = f y) --> x mem (map f q)"
-by (induct_tac "q" rule: list_induct, auto)
+by (induct q rule: list_induct) auto
 
 lemma mem_map: "x mem (map f q) = (\<exists>y. y mem q & x = f y)"
 apply (rule iffI)
@@ -617,10 +618,10 @@
 done
 
 lemma hd_append [rule_format]: "A ~= [] --> hd(A @ B) = hd(A)"
-by (induct_tac "A" rule: list_induct, auto)
+by (induct A rule: list_induct) auto
 
 lemma tl_append [rule_format]: "A ~= [] --> tl(A @ B) = tl(A) @ B"
-by (induct_tac "A" rule: list_induct, auto)
+by (induct A rule: list_induct) auto
 
 
 (** take **)
@@ -638,8 +639,8 @@
 by (simp add: drop_def)
 
 lemma drop_Suc1 [simp]: "drop [] (Suc x) = []"
-apply (simp add: drop_def)
-apply (induct_tac "x", auto) 
+apply (induct x) 
+apply (simp_all add: drop_def)
 done
 
 lemma drop_Suc2 [simp]: "drop(a#xs)(Suc x) = drop xs x"
@@ -698,9 +699,7 @@
 
 
 lemma zipWith_Cons_Nil [simp]: "zipWith f (x,[])  = []"
-apply (simp add: zipWith_def)
-apply (induct_tac "x" rule: list_induct, simp_all)
-done
+by (induct x rule: list_induct) (simp_all add: zipWith_def)
 
 
 lemma zipWith_Nil_Cons [simp]: "zipWith f ([],x) = []"
@@ -722,23 +721,23 @@
 done
 
 lemma map_flat: "map f (flat S) = flat(map (map f) S)"
-by (induct_tac "S" rule: list_induct, simp_all)
+by (induct S rule: list_induct) simp_all
 
 lemma list_all_map_eq: "(Alls u:xs. f(u) = g(u)) --> map f xs = map g xs"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma filter_map_d: "filter p (map f xs) = map f (filter(p o f)(xs))"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma filter_compose: "filter p (filter q xs) = filter(%x. p x & q x) xs"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 (* "filter(p, filter(q,xs)) = filter(q, filter(p,xs))",
    "filter(p, filter(p,xs)) = filter(p,xs)" BIRD's thms.*)
  
 lemma filter_append [rule_format, simp]:
      "\<forall>B. filter p (A @ B) = (filter p A @ filter p B)"
-by (induct_tac "A" rule: list_induct, simp_all)
+by (induct A rule: list_induct) simp_all
 
 
 (* inits(xs) == map(fst,splits(xs)), 
@@ -749,44 +748,50 @@
    x mem xs & y mem ys = <x,y> mem diag(xs,ys) *)
 
 lemma length_append: "length(xs@ys) = length(xs)+length(ys)"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 lemma length_map: "length(map f xs) = length(xs)"
-by (induct_tac "xs" rule: list_induct, simp_all)
+by (induct xs rule: list_induct) simp_all
 
 
 lemma take_Nil [simp]: "take [] n = []"
-by (induct_tac "n", simp_all)
+by (induct n) simp_all
 
 lemma take_take_eq [simp]: "\<forall>n. take (take xs n) n = take xs n"
-apply (induct_tac "xs" rule: list_induct, simp_all)
+apply (induct xs rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "n", auto)
+apply (induct_tac n)
+apply auto
 done
 
 lemma take_take_Suc_eq1 [rule_format]:
      "\<forall>n. take (take xs(Suc(n+m))) n = take xs n"
-apply (induct_tac "xs" rule: list_induct, simp_all)
+apply (induct_tac xs rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "n", auto)
+apply (induct_tac n)
+apply auto
 done
 
 declare take_Suc [simp del]
 
 lemma take_take_1: "take (take xs (n+m)) n = take xs n"
-apply (induct_tac "m")
+apply (induct m)
 apply (simp_all add: take_take_Suc_eq1)
 done
 
 lemma take_take_Suc_eq2 [rule_format]:
      "\<forall>n. take (take xs n)(Suc(n+m)) = take xs n"
-apply (induct_tac "xs" rule: list_induct, simp_all)
+apply (induct_tac xs rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "n", auto)
+apply (induct_tac n)
+apply auto
 done
 
 lemma take_take_2: "take(take xs n)(n+m) = take xs n"
-apply (induct_tac "m")
+apply (induct m)
 apply (simp_all add: take_take_Suc_eq2)
 done
 
@@ -794,29 +799,33 @@
 (* length(drop(xs,n)) = length(xs) - n *)
 
 lemma drop_Nil [simp]: "drop  [] n  = []"
-by (induct_tac "n", auto)
+by (induct n) auto
 
 lemma drop_drop [rule_format]: "\<forall>xs. drop (drop xs m) n = drop xs(m+n)"
-apply (induct_tac "m", auto) 
-apply (induct_tac "xs" rule: list_induct, auto) 
+apply (induct_tac m)
+apply auto
+apply (induct_tac xs rule: list_induct)
+apply auto
 done
 
 lemma take_drop [rule_format]: "\<forall>xs. (take xs n) @ (drop xs n) = xs"
-apply (induct_tac "n", auto) 
-apply (induct_tac "xs" rule: list_induct, auto) 
+apply (induct_tac n)
+apply auto
+apply (induct_tac xs rule: list_induct)
+apply auto
 done
 
 lemma copy_copy: "copy x n @ copy x m = copy x (n+m)"
-by (induct_tac "n", auto)
+by (induct n) auto
 
 lemma length_copy: "length(copy x n)  = n"
-by (induct_tac "n", auto)
+by (induct n) auto
 
 lemma length_take [rule_format, simp]:
      "\<forall>xs. length(take xs n) = min (length xs) n"
-apply (induct_tac "n")
+apply (induct n)
  apply auto
-apply (induct_tac "xs" rule: list_induct)
+apply (induct_tac xs rule: list_induct)
  apply auto
 done
 
@@ -824,85 +833,93 @@
 by (simp only: length_append [symmetric] take_drop)
 
 lemma take_append [rule_format]: "\<forall>A. length(A) = n --> take(A@B) n = A"
-apply (induct_tac "n")
+apply (induct n)
 apply (rule allI)
 apply (rule_tac [2] allI)
-apply (induct_tac "A" rule: list_induct)
-apply (induct_tac [3] "A" rule: list_induct, simp_all)
+apply (induct_tac A rule: list_induct)
+apply (induct_tac [3] A rule: list_induct, simp_all)
 done
 
 lemma take_append2 [rule_format]:
      "\<forall>A. length(A) = n --> take(A@B) (n+k) = A @ take B k"
-apply (induct_tac "n")
+apply (induct n)
 apply (rule allI)
 apply (rule_tac [2] allI)
-apply (induct_tac "A" rule: list_induct)
-apply (induct_tac [3] "A" rule: list_induct, simp_all)
+apply (induct_tac A rule: list_induct)
+apply (induct_tac [3] A rule: list_induct, simp_all)
 done
 
 lemma take_map [rule_format]: "\<forall>n. take (map f A) n = map f (take A n)"
-apply (induct_tac "A" rule: list_induct, simp_all)
+apply (induct A rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "n", simp_all)
+apply (induct_tac n)
+apply simp_all
 done
 
 lemma drop_append [rule_format]: "\<forall>A. length(A) = n --> drop(A@B)n = B"
-apply (induct_tac "n")
+apply (induct n)
 apply (rule allI)
 apply (rule_tac [2] allI)
-apply (induct_tac "A" rule: list_induct)
-apply (induct_tac [3] "A" rule: list_induct, simp_all)
+apply (induct_tac A rule: list_induct)
+apply (induct_tac [3] A rule: list_induct)
+apply simp_all
 done
 
 lemma drop_append2 [rule_format]:
      "\<forall>A. length(A) = n --> drop(A@B)(n+k) = drop B k"
-apply (induct_tac "n")
+apply (induct n)
 apply (rule allI)
 apply (rule_tac [2] allI)
-apply (induct_tac "A" rule: list_induct)
-apply (induct_tac [3] "A" rule: list_induct, simp_all)
+apply (induct_tac A rule: list_induct)
+apply (induct_tac [3] A rule: list_induct)
+apply simp_all
 done
 
 
 lemma drop_all [rule_format]: "\<forall>A. length(A) = n --> drop A n = []"
-apply (induct_tac "n")
+apply (induct n)
 apply (rule allI)
 apply (rule_tac [2] allI)
-apply (induct_tac "A" rule: list_induct)
-apply (induct_tac [3] "A" rule: list_induct, auto)
+apply (induct_tac A rule: list_induct)
+apply (induct_tac [3] A rule: list_induct)
+apply auto
 done
 
 lemma drop_map [rule_format]: "\<forall>n. drop (map f A) n = map f (drop A n)"
-apply (induct_tac "A" rule: list_induct, simp_all)
+apply (induct A rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "n", simp_all)
+apply (induct_tac n)
+apply simp_all
 done
 
 lemma take_all [rule_format]: "\<forall>A. length(A) = n --> take A n = A"
-apply (induct_tac "n")
+apply (induct n)
 apply (rule allI)
 apply (rule_tac [2] allI)
-apply (induct_tac "A" rule: list_induct)
-apply (induct_tac [3] "A" rule: list_induct, auto) 
+apply (induct_tac A rule: list_induct)
+apply (induct_tac [3] A rule: list_induct)
+apply auto
 done
 
 lemma foldl_single: "foldl f a [b] = f a b"
 by simp_all
 
-lemma foldl_append [rule_format, simp]:
-     "\<forall>a. foldl f a (A @ B) = foldl f (foldl f a A) B"
-by (induct_tac "A" rule: list_induct, simp_all)
+lemma foldl_append [simp]:
+  "\<And>a. foldl f a (A @ B) = foldl f (foldl f a A) B"
+by (induct A rule: list_induct) simp_all
 
-lemma foldl_map [rule_format]:
-     "\<forall>e. foldl f e (map g S) = foldl (%x y. f x (g y)) e S"
-by (induct_tac "S" rule: list_induct, simp_all)
+lemma foldl_map:
+  "\<And>e. foldl f e (map g S) = foldl (%x y. f x (g y)) e S"
+by (induct S rule: list_induct) simp_all
 
 lemma foldl_neutr_distr [rule_format]:
   assumes r_neutr: "\<forall>a. f a e = a" 
       and r_neutl: "\<forall>a. f e a = a"
       and assoc:   "\<forall>a b c. f a (f b c) = f(f a b) c"
   shows "\<forall>y. f y (foldl f e A) = foldl f y A"
-apply (induct_tac "A" rule: list_induct)
+apply (induct A rule: list_induct)
 apply (simp_all add: r_neutr r_neutl, clarify) 
 apply (erule all_dupE) 
 apply (rule trans) 
@@ -923,95 +940,98 @@
 
 lemma foldr_append [rule_format, simp]:
      "\<forall>a. foldr f a (A @ B) = foldr f (foldr f a B) A"
-apply (induct_tac "A" rule: list_induct, simp_all)
-done
+by (induct A rule: list_induct) simp_all
 
 
-lemma foldr_map [rule_format]: "\<forall>e. foldr f e (map g S) = foldr (f o g) e S"
-apply (simp add: o_def)
-apply (induct_tac "S" rule: list_induct, simp_all)
-done
+lemma foldr_map: "\<And>e. foldr f e (map g S) = foldr (f o g) e S"
+by (induct S rule: list_induct) (simp_all add: o_def)
 
 lemma foldr_Un_eq_UN: "foldr op Un {} S = (UN X: {t. t mem S}.X)"
-by (induct_tac "S" rule: list_induct, auto)
+by (induct S rule: list_induct) auto
 
 lemma foldr_neutr_distr:
      "[| !a. f e a = a; !a b c. f a (f b c) = f(f a b) c |]    
       ==> foldr f y S = f (foldr f e S) y"
-by (induct_tac "S" rule: list_induct, auto)
+by (induct S rule: list_induct) auto
 
 lemma foldr_append2: 
     "[| !a. f e a = a; !a b c. f a (f b c) = f(f a b) c |]
      ==> foldr f e (A @ B) = f (foldr f e A) (foldr f e B)"
 apply auto
-apply (rule foldr_neutr_distr, auto)
+apply (rule foldr_neutr_distr)
+apply auto
 done
 
 lemma foldr_flat: 
     "[| !a. f e a = a; !a b c. f a (f b c) = f(f a b) c |] ==>  
       foldr f e (flat S) = (foldr f e)(map (foldr f e) S)"
-apply (induct_tac "S" rule: list_induct)
+apply (induct S rule: list_induct)
 apply (simp_all del: foldr_append add: foldr_append2)
 done
 
 
 lemma list_all_map: "(Alls x:map f xs .P(x)) = (Alls x:xs.(P o f)(x))"
-by (induct_tac "xs" rule: list_induct, auto)
+by (induct xs rule: list_induct) auto
 
 lemma list_all_and: 
      "(Alls x:xs. P(x)&Q(x)) = ((Alls x:xs. P(x))&(Alls x:xs. Q(x)))"
-by (induct_tac "xs" rule: list_induct, auto)
+by (induct xs rule: list_induct) auto
 
 
 lemma nth_map [rule_format]:
      "\<forall>i. i < length(A)  --> nth i (map f A) = f(nth i A)"
-apply (induct_tac "A" rule: list_induct, simp_all)
+apply (induct A rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "i", auto) 
+apply (induct_tac i)
+apply auto
 done
 
 lemma nth_app_cancel_right [rule_format]:
      "\<forall>i. i < length(A)  --> nth i(A@B) = nth i A"
-apply (induct_tac "A" rule: list_induct, simp_all)
+apply (induct A rule: list_induct)
+apply simp_all
 apply (rule allI)
-apply (induct_tac "i", simp_all)
+apply (induct_tac i)
+apply simp_all
 done
 
 lemma nth_app_cancel_left [rule_format]:
      "\<forall>n. n = length(A) --> nth(n+i)(A@B) = nth i B"
-by (induct_tac "A" rule: list_induct, simp_all)
+by (induct A rule: list_induct) simp_all
 
 
 (** flat **)
 
 lemma flat_append [simp]: "flat(xs@ys) = flat(xs) @ flat(ys)"
-by (induct_tac "xs" rule: list_induct, auto)
+by (induct xs rule: list_induct) auto
 
 lemma filter_flat: "filter p (flat S) = flat(map (filter p) S)"
-by (induct_tac "S" rule: list_induct, auto)
+by (induct S rule: list_induct) auto
 
 
 (** rev **)
 
 lemma rev_append [simp]: "rev(xs@ys) = rev(ys) @ rev(xs)"
-by (induct_tac "xs" rule: list_induct, auto)
+by (induct xs rule: list_induct) auto
 
 lemma rev_rev_ident [simp]: "rev(rev l) = l"
-by (induct_tac "l" rule: list_induct, auto)
+by (induct l rule: list_induct) auto
 
 lemma rev_flat: "rev(flat ls) = flat (map rev (rev ls))"
-by (induct_tac "ls" rule: list_induct, auto)
+by (induct ls rule: list_induct) auto
 
 lemma rev_map_distrib: "rev(map f l) = map f (rev l)"
-by (induct_tac "l" rule: list_induct, auto)
+by (induct l rule: list_induct) auto
 
 lemma foldl_rev: "foldl f b (rev l) = foldr (%x y. f y x) b l"
-by (induct_tac "l" rule: list_induct, auto)
+by (induct l rule: list_induct) auto
 
 lemma foldr_rev: "foldr f b (rev l) = foldl (%x y. f y x) b l"
 apply (rule sym)
 apply (rule trans)
-apply (rule_tac [2] foldl_rev, simp)
+apply (rule_tac [2] foldl_rev)
+apply simp
 done
 
 end
--- a/src/HOL/Int.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Int.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -77,7 +77,7 @@
 by (simp add: intrel_def)
 
 lemma equiv_intrel: "equiv UNIV intrel"
-by (simp add: intrel_def equiv_def refl_def sym_def trans_def)
+by (simp add: intrel_def equiv_def refl_on_def sym_def trans_def)
 
 text{*Reduces equality of equivalence classes to the @{term intrel} relation:
   @{term "(intrel `` {x} = intrel `` {y}) = ((x,y) \<in> intrel)"} *}
@@ -832,8 +832,8 @@
                              le_imp_0_less [THEN order_less_imp_le])  
 next
   case (neg n)
-  thus ?thesis by (simp del: of_nat_Suc of_nat_add
-    add: algebra_simps of_nat_1 [symmetric] of_nat_add [symmetric])
+  thus ?thesis by (simp del: of_nat_Suc of_nat_add of_nat_1
+    add: algebra_simps of_nat_1 [where 'a=int, symmetric] of_nat_add [symmetric])
 qed
 
 lemma bin_less_0_simps:
@@ -1165,8 +1165,8 @@
                              le_imp_0_less [THEN order_less_imp_le])  
 next
   case (neg n)
-  thus ?thesis by (simp del: of_nat_Suc of_nat_add
-    add: algebra_simps of_nat_1 [symmetric] of_nat_add [symmetric])
+  thus ?thesis by (simp del: of_nat_Suc of_nat_add of_nat_1
+    add: algebra_simps of_nat_1 [where 'a=int, symmetric] of_nat_add [symmetric])
 qed
 
 text {* Less-Than or Equals *}
@@ -1547,7 +1547,7 @@
      "abs(-1 ^ n) = (1::'a::{ordered_idom,number_ring,recpower})"
 by (simp add: power_abs)
 
-lemma of_int_number_of_eq:
+lemma of_int_number_of_eq [simp]:
      "of_int (number_of v) = (number_of v :: 'a :: number_ring)"
 by (simp add: number_of_eq) 
 
@@ -1785,11 +1785,12 @@
 lemma int_val_lemma:
      "(\<forall>i<n::nat. abs(f(i+1) - f i) \<le> 1) -->  
       f 0 \<le> k --> k \<le> f n --> (\<exists>i \<le> n. f i = (k::int))"
+unfolding One_nat_def
 apply (induct n, simp)
 apply (intro strip)
 apply (erule impE, simp)
 apply (erule_tac x = n in allE, simp)
-apply (case_tac "k = f (n+1) ")
+apply (case_tac "k = f (Suc n)")
 apply force
 apply (erule impE)
  apply (simp add: abs_if split add: split_if_asm)
@@ -1803,6 +1804,7 @@
          f m \<le> k; k \<le> f n |] ==> ? i. m \<le> i & i \<le> n & f i = (k::int)"
 apply (cut_tac n = "n-m" and f = "%i. f (i+m) " and k = k 
        in int_val_lemma)
+unfolding One_nat_def
 apply simp
 apply (erule exE)
 apply (rule_tac x = "i+m" in exI, arith)
--- a/src/HOL/IntDiv.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/IntDiv.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -547,34 +547,6 @@
 simproc_setup binary_int_mod ("number_of m mod number_of n :: int") =
   {* K (divmod_proc (@{thm divmod_rel_mod_eq})) *}
 
-(* The following 8 lemmas are made unnecessary by the above simprocs: *)
-
-lemmas div_pos_pos_number_of =
-    div_pos_pos [of "number_of v" "number_of w", standard]
-
-lemmas div_neg_pos_number_of =
-    div_neg_pos [of "number_of v" "number_of w", standard]
-
-lemmas div_pos_neg_number_of =
-    div_pos_neg [of "number_of v" "number_of w", standard]
-
-lemmas div_neg_neg_number_of =
-    div_neg_neg [of "number_of v" "number_of w", standard]
-
-
-lemmas mod_pos_pos_number_of =
-    mod_pos_pos [of "number_of v" "number_of w", standard]
-
-lemmas mod_neg_pos_number_of =
-    mod_neg_pos [of "number_of v" "number_of w", standard]
-
-lemmas mod_pos_neg_number_of =
-    mod_pos_neg [of "number_of v" "number_of w", standard]
-
-lemmas mod_neg_neg_number_of =
-    mod_neg_neg [of "number_of v" "number_of w", standard]
-
-
 lemmas posDivAlg_eqn_number_of [simp] =
     posDivAlg_eqn [of "number_of v" "number_of w", standard]
 
@@ -584,15 +556,6 @@
 
 text{*Special-case simplification *}
 
-lemma zmod_1 [simp]: "a mod (1::int) = 0"
-apply (cut_tac a = a and b = 1 in pos_mod_sign)
-apply (cut_tac [2] a = a and b = 1 in pos_mod_bound)
-apply (auto simp del:pos_mod_bound pos_mod_sign)
-done
-
-lemma zdiv_1 [simp]: "a div (1::int) = a"
-by (cut_tac a = a and b = 1 in zmod_zdiv_equality, auto)
-
 lemma zmod_minus1_right [simp]: "a mod (-1::int) = 0"
 apply (cut_tac a = a and b = "-1" in neg_mod_sign)
 apply (cut_tac [2] a = a and b = "-1" in neg_mod_bound)
@@ -726,9 +689,6 @@
 apply (blast intro: divmod_rel_div_mod [THEN zmult1_lemma, THEN divmod_rel_mod])
 done
 
-lemma zdiv_zmult_self1 [simp]: "b \<noteq> (0::int) ==> (a*b) div b = a"
-by (simp add: zdiv_zmult1_eq)
-
 lemma zmod_zdiv_trivial: "(a mod b) div b = (0::int)"
 apply (case_tac "b = 0", simp)
 apply (auto simp add: linorder_neq_iff div_pos_pos_trivial div_neg_neg_trivial)
@@ -754,7 +714,7 @@
   assume not0: "b \<noteq> 0"
   show "(a + c * b) div b = c + a div b"
     unfolding zdiv_zadd1_eq [of a "c * b"] using not0 
-      by (simp add: zmod_zmult1_eq zmod_zdiv_trivial)
+      by (simp add: zmod_zmult1_eq zmod_zdiv_trivial zdiv_zmult1_eq)
 qed auto
 
 lemma posDivAlg_div_mod:
@@ -784,41 +744,12 @@
   show ?thesis by simp
 qed
 
-lemma zdiv_zadd_self1: "a \<noteq> (0::int) ==> (a+b) div a = b div a + 1"
-by (rule div_add_self1) (* already declared [simp] *)
-
-lemma zdiv_zadd_self2: "a \<noteq> (0::int) ==> (b+a) div a = b div a + 1"
-by (rule div_add_self2) (* already declared [simp] *)
-
-lemma zdiv_zmult_self2: "b \<noteq> (0::int) ==> (b*a) div b = a"
-by (rule div_mult_self1_is_id) (* already declared [simp] *)
-
-lemma zmod_zmult_self1: "(a*b) mod b = (0::int)"
-by (rule mod_mult_self2_is_0) (* already declared [simp] *)
-
-lemma zmod_zmult_self2: "(b*a) mod b = (0::int)"
-by (rule mod_mult_self1_is_0) (* already declared [simp] *)
-
 lemma zmod_eq_0_iff: "(m mod d = 0) = (EX q::int. m = d*q)"
 by (simp add: dvd_eq_mod_eq_0 [symmetric] dvd_def)
 
 (* REVISIT: should this be generalized to all semiring_div types? *)
 lemmas zmod_eq_0D [dest!] = zmod_eq_0_iff [THEN iffD1]
 
-lemma zmod_zadd_left_eq: "(a+b) mod (c::int) = ((a mod c) + b) mod c"
-by (rule mod_add_left_eq)
-
-lemma zmod_zadd_right_eq: "(a+b) mod (c::int) = (a + (b mod c)) mod c"
-by (rule mod_add_right_eq)
-
-lemma zmod_zadd_self1: "(a+b) mod a = b mod (a::int)"
-by (rule mod_add_self1) (* already declared [simp] *)
-
-lemma zmod_zadd_self2: "(b+a) mod a = b mod (a::int)"
-by (rule mod_add_self2) (* already declared [simp] *)
-
-lemma zmod_zdiff1_eq: "(a - b) mod c = (a mod c - b mod c) mod (c::int)"
-by (rule mod_diff_eq)
 
 subsection{*Proving  @{term "a div (b*c) = (a div b) div c"} *}
 
@@ -902,13 +833,6 @@
   "(k*m) div (k*n) = (if k = (0::int) then 0 else m div n)"
 by (simp add:zdiv_zmult_zmult1)
 
-(*
-lemma zdiv_zmult_zmult2: "c \<noteq> (0::int) ==> (a*c) div (b*c) = a div b"
-apply (drule zdiv_zmult_zmult1)
-apply (auto simp add: mult_commute)
-done
-*)
-
 
 subsection{*Distribution of Factors over mod*}
 
@@ -933,9 +857,6 @@
 apply (auto simp add: mult_commute)
 done
 
-lemma zmod_zmod_cancel: "n dvd m \<Longrightarrow> (k::int) mod m mod n = k mod n"
-by (rule mod_mod_cancel)
-
 
 subsection {*Splitting Rules for div and mod*}
 
@@ -1070,7 +991,7 @@
 apply (subgoal_tac "(1 + 2* (-b - 1)) mod (2* (-a)) = 
                     1 + 2* ((-b - 1) mod (-a))")
 apply (rule_tac [2] pos_zmod_mult_2)
-apply (auto simp add: minus_mult_right [symmetric] right_diff_distrib)
+apply (auto simp add: right_diff_distrib)
 apply (subgoal_tac " (-1 - (2 * b)) = - (1 + (2 * b))")
  prefer 2 apply simp 
 apply (simp only: zmod_zminus_zminus diff_minus minus_add_distrib [symmetric])
@@ -1132,38 +1053,8 @@
 
 subsection {* The Divides Relation *}
 
-lemma zdvd_iff_zmod_eq_0: "(m dvd n) = (n mod m = (0::int))"
-  by (rule dvd_eq_mod_eq_0)
-
 lemmas zdvd_iff_zmod_eq_0_number_of [simp] =
-  zdvd_iff_zmod_eq_0 [of "number_of x" "number_of y", standard]
-
-lemma zdvd_0_right: "(m::int) dvd 0"
-  by (rule dvd_0_right) (* already declared [iff] *)
-
-lemma zdvd_0_left: "(0 dvd (m::int)) = (m = 0)"
-  by (rule dvd_0_left_iff) (* already declared [noatp,simp] *)
-
-lemma zdvd_1_left: "1 dvd (m::int)"
-  by (rule one_dvd) (* already declared [simp] *)
-
-lemma zdvd_refl: "m dvd (m::int)"
-  by (rule dvd_refl) (* already declared [simp] *)
-
-lemma zdvd_trans: "m dvd n ==> n dvd k ==> m dvd (k::int)"
-  by (rule dvd_trans)
-
-lemma zdvd_zminus_iff: "m dvd -n \<longleftrightarrow> m dvd (n::int)"
-  by (rule dvd_minus_iff) (* already declared [simp] *)
-
-lemma zdvd_zminus2_iff: "-m dvd n \<longleftrightarrow> m dvd (n::int)"
-  by (rule minus_dvd_iff) (* already declared [simp] *)
-
-lemma zdvd_abs1: "( \<bar>i::int\<bar> dvd j) = (i dvd j)"
-  by (rule abs_dvd_iff) (* already declared [simp] *)
-
-lemma zdvd_abs2: "( (i::int) dvd \<bar>j\<bar>) = (i dvd j)" 
-  by (rule dvd_abs_iff) (* already declared [simp] *)
+  dvd_eq_mod_eq_0 [of "number_of x::int" "number_of y::int", standard]
 
 lemma zdvd_anti_sym:
     "0 < m ==> 0 < n ==> m dvd n ==> n dvd m ==> m = (n::int)"
@@ -1171,58 +1062,32 @@
   apply (simp add: mult_assoc zero_less_mult_iff zmult_eq_1_iff)
   done
 
-lemma zdvd_zadd: "k dvd m ==> k dvd n ==> k dvd (m + n :: int)"
-  by (rule dvd_add)
-
-lemma zdvd_dvd_eq: assumes anz:"a \<noteq> 0" and ab: "(a::int) dvd b" and ba:"b dvd a" 
+lemma zdvd_dvd_eq: assumes "a \<noteq> 0" and "(a::int) dvd b" and "b dvd a" 
   shows "\<bar>a\<bar> = \<bar>b\<bar>"
 proof-
-  from ab obtain k where k:"b = a*k" unfolding dvd_def by blast 
-  from ba obtain k' where k':"a = b*k'" unfolding dvd_def by blast 
+  from `a dvd b` obtain k where k:"b = a*k" unfolding dvd_def by blast 
+  from `b dvd a` obtain k' where k':"a = b*k'" unfolding dvd_def by blast 
   from k k' have "a = a*k*k'" by simp
   with mult_cancel_left1[where c="a" and b="k*k'"]
-  have kk':"k*k' = 1" using anz by (simp add: mult_assoc)
+  have kk':"k*k' = 1" using `a\<noteq>0` by (simp add: mult_assoc)
   hence "k = 1 \<and> k' = 1 \<or> k = -1 \<and> k' = -1" by (simp add: zmult_eq_1_iff)
   thus ?thesis using k k' by auto
 qed
 
-lemma zdvd_zdiff: "k dvd m ==> k dvd n ==> k dvd (m - n :: int)"
-  by (rule Ring_and_Field.dvd_diff)
-
 lemma zdvd_zdiffD: "k dvd m - n ==> k dvd n ==> k dvd (m::int)"
   apply (subgoal_tac "m = n + (m - n)")
    apply (erule ssubst)
-   apply (blast intro: zdvd_zadd, simp)
+   apply (blast intro: dvd_add, simp)
   done
 
-lemma zdvd_zmult: "k dvd (n::int) ==> k dvd m * n"
-  by (rule dvd_mult)
-
-lemma zdvd_zmult2: "k dvd (m::int) ==> k dvd m * n"
-  by (rule dvd_mult2)
-
-lemma zdvd_triv_right: "(k::int) dvd m * k"
-  by (rule dvd_triv_right) (* already declared [simp] *)
-
-lemma zdvd_triv_left: "(k::int) dvd k * m"
-  by (rule dvd_triv_left) (* already declared [simp] *)
-
-lemma zdvd_zmultD2: "j * k dvd n ==> j dvd (n::int)"
-  by (rule dvd_mult_left)
-
-lemma zdvd_zmultD: "j * k dvd n ==> k dvd (n::int)"
-  by (rule dvd_mult_right)
-
-lemma zdvd_zmult_mono: "i dvd m ==> j dvd (n::int) ==> i * j dvd m * n"
-  by (rule mult_dvd_mono)
-
 lemma zdvd_reduce: "(k dvd n + k * m) = (k dvd (n::int))"
-  apply (rule iffI)
-   apply (erule_tac [2] zdvd_zadd)
-   apply (subgoal_tac "n = (n + k * m) - k * m")
-    apply (erule ssubst)
-    apply (erule zdvd_zdiff, simp_all)
-  done
+apply (rule iffI)
+ apply (erule_tac [2] dvd_add)
+ apply (subgoal_tac "n = (n + k * m) - k * m")
+  apply (erule ssubst)
+  apply (erule dvd_diff)
+  apply(simp_all)
+done
 
 lemma zdvd_zmod: "f dvd m ==> f dvd (n::int) ==> f dvd m mod n"
   apply (simp add: dvd_def)
@@ -1232,7 +1097,7 @@
 lemma zdvd_zmod_imp_zdvd: "k dvd m mod n ==> k dvd n ==> k dvd (m::int)"
   apply (subgoal_tac "k dvd n * (m div n) + m mod n")
    apply (simp add: zmod_zdiv_equality [symmetric])
-  apply (simp only: zdvd_zadd zdvd_zmult2)
+  apply (simp only: dvd_add dvd_mult2)
   done
 
 lemma zdvd_not_zless: "0 < m ==> m < n ==> \<not> n dvd (m::int)"
@@ -1252,7 +1117,7 @@
 lemma zdvd_mult_div_cancel:"(n::int) dvd m \<Longrightarrow> n * (m div n) = m"
 apply (subgoal_tac "m mod n = 0")
  apply (simp add: zmult_div_cancel)
-apply (simp only: zdvd_iff_zmod_eq_0)
+apply (simp only: dvd_eq_mod_eq_0)
 done
 
 lemma zdvd_mult_cancel: assumes d:"k * m dvd k * n" and kz:"k \<noteq> (0::int)"
@@ -1265,10 +1130,6 @@
   thus ?thesis by simp
 qed
 
-lemma zdvd_zmult_cancel_disj[simp]:
-  "(k*m) dvd (k*n) = (k=0 | m dvd (n::int))"
-by (auto simp: zdvd_zmult_mono dest: zdvd_mult_cancel)
-
 
 theorem ex_nat: "(\<exists>x::nat. P x) = (\<exists>x::int. 0 <= x \<and> P (nat x))"
 apply (simp split add: split_nat)
@@ -1300,44 +1161,38 @@
       then show ?thesis by (simp only: negative_eq_positive) auto
     qed
   qed
-  then show ?thesis by (auto elim!: dvdE simp only: zdvd_triv_left int_mult)
+  then show ?thesis by (auto elim!: dvdE simp only: dvd_triv_left int_mult)
 qed
 
 lemma zdvd1_eq[simp]: "(x::int) dvd 1 = ( \<bar>x\<bar> = 1)"
 proof
-  assume d: "x dvd 1" hence "int (nat \<bar>x\<bar>) dvd int (nat 1)" by (simp add: zdvd_abs1)
+  assume d: "x dvd 1" hence "int (nat \<bar>x\<bar>) dvd int (nat 1)" by simp
   hence "nat \<bar>x\<bar> dvd 1" by (simp add: zdvd_int)
   hence "nat \<bar>x\<bar> = 1"  by simp
   thus "\<bar>x\<bar> = 1" by (cases "x < 0", auto)
 next
   assume "\<bar>x\<bar>=1" thus "x dvd 1" 
-    by(cases "x < 0",simp_all add: minus_equation_iff zdvd_iff_zmod_eq_0)
+    by(cases "x < 0",simp_all add: minus_equation_iff dvd_eq_mod_eq_0)
 qed
 lemma zdvd_mult_cancel1: 
   assumes mp:"m \<noteq>(0::int)" shows "(m * n dvd m) = (\<bar>n\<bar> = 1)"
 proof
   assume n1: "\<bar>n\<bar> = 1" thus "m * n dvd m" 
-    by (cases "n >0", auto simp add: zdvd_zminus2_iff minus_equation_iff)
+    by (cases "n >0", auto simp add: minus_dvd_iff minus_equation_iff)
 next
   assume H: "m * n dvd m" hence H2: "m * n dvd m * 1" by simp
   from zdvd_mult_cancel[OF H2 mp] show "\<bar>n\<bar> = 1" by (simp only: zdvd1_eq)
 qed
 
 lemma int_dvd_iff: "(int m dvd z) = (m dvd nat (abs z))"
-  unfolding zdvd_int by (cases "z \<ge> 0") (simp_all add: zdvd_zminus_iff)
+  unfolding zdvd_int by (cases "z \<ge> 0") simp_all
 
 lemma dvd_int_iff: "(z dvd int m) = (nat (abs z) dvd m)"
-  unfolding zdvd_int by (cases "z \<ge> 0") (simp_all add: zdvd_zminus2_iff)
+  unfolding zdvd_int by (cases "z \<ge> 0") simp_all
 
 lemma nat_dvd_iff: "(nat z dvd m) = (if 0 \<le> z then (z dvd int m) else m = 0)"
   by (auto simp add: dvd_int_iff)
 
-lemma zminus_dvd_iff [iff]: "(-z dvd w) = (z dvd (w::int))"
-  by (rule minus_dvd_iff)
-
-lemma dvd_zminus_iff [iff]: "(z dvd -w) = (z dvd (w::int))"
-  by (rule dvd_minus_iff)
-
 lemma zdvd_imp_le: "[| z dvd n; 0 < n |] ==> z \<le> (n::int)"
   apply (rule_tac z=n in int_cases)
   apply (auto simp add: dvd_int_iff)
@@ -1367,10 +1222,13 @@
 apply (auto simp add: IntDiv.divmod_rel_def of_nat_mult)
 done
 
+lemma abs_div: "(y::int) dvd x \<Longrightarrow> abs (x div y) = abs x div abs y"
+by (unfold dvd_def, cases "y=0", auto simp add: abs_mult)
+
 text{*Suggested by Matthias Daum*}
 lemma int_power_div_base:
      "\<lbrakk>0 < m; 0 < k\<rbrakk> \<Longrightarrow> k ^ m div k = (k::int) ^ (m - Suc 0)"
-apply (subgoal_tac "k ^ m = k ^ ((m - 1) + 1)")
+apply (subgoal_tac "k ^ m = k ^ ((m - Suc 0) + Suc 0)")
  apply (erule ssubst)
  apply (simp only: power_add)
  apply simp_all
@@ -1387,8 +1245,8 @@
 by (rule mod_diff_right_eq [symmetric])
 
 lemmas zmod_simps =
-  IntDiv.zmod_zadd_left_eq  [symmetric]
-  IntDiv.zmod_zadd_right_eq [symmetric]
+  mod_add_left_eq  [symmetric]
+  mod_add_right_eq [symmetric]
   IntDiv.zmod_zmult1_eq     [symmetric]
   mod_mult_left_eq          [symmetric]
   IntDiv.zpower_zmod
@@ -1463,14 +1321,14 @@
   assume H: "x mod n = y mod n"
   hence "x mod n - y mod n = 0" by simp
   hence "(x mod n - y mod n) mod n = 0" by simp 
-  hence "(x - y) mod n = 0" by (simp add: zmod_zdiff1_eq[symmetric])
-  thus "n dvd x - y" by (simp add: zdvd_iff_zmod_eq_0)
+  hence "(x - y) mod n = 0" by (simp add: mod_diff_eq[symmetric])
+  thus "n dvd x - y" by (simp add: dvd_eq_mod_eq_0)
 next
   assume H: "n dvd x - y"
   then obtain k where k: "x-y = n*k" unfolding dvd_def by blast
   hence "x = n*k + y" by simp
   hence "x mod n = (n*k + y) mod n" by simp
-  thus "x mod n = y mod n" by (simp add: zmod_zadd_left_eq)
+  thus "x mod n = y mod n" by (simp add: mod_add_left_eq)
 qed
 
 lemma nat_mod_eq_lemma: assumes xyn: "(x::nat) mod n = y  mod n" and xy:"y \<le> x"
--- a/src/HOL/Integration.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Integration.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -134,7 +134,7 @@
 apply (frule partition [THEN iffD1], safe)
 apply (drule_tac x = "psize D" and P="%n. psize D \<le> n --> ?P n" in spec, safe)
 apply (case_tac "psize D = 0")
-apply (drule_tac [2] n = "psize D - 1" in partition_lt, auto)
+apply (drule_tac [2] n = "psize D - Suc 0" in partition_lt, auto)
 done
 
 lemma partition_gt: "[|partition(a,b) D; n < (psize D)|] ==> D(n) < D(psize D)"
@@ -145,7 +145,7 @@
 apply (rotate_tac 2)
 apply (drule_tac x = "psize D" in spec)
 apply (rule ccontr)
-apply (drule_tac n = "psize D - 1" in partition_lt)
+apply (drule_tac n = "psize D - Suc 0" in partition_lt)
 apply auto
 done
 
--- a/src/HOL/IsaMakefile	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/IsaMakefile	Wed Mar 04 10:45:52 2009 +0100
@@ -13,7 +13,6 @@
   HOL-Library \
   HOL-ex \
   HOL-Auth \
-  HOL-AxClasses \
   HOL-Bali \
   HOL-Decision_Procs \
   HOL-Extraction \
@@ -79,38 +78,39 @@
 $(OUT)/Pure: Pure
 
 BASE_DEPENDENCIES = $(OUT)/Pure \
+  $(SRC)/Provers/blast.ML \
+  $(SRC)/Provers/clasimp.ML \
+  $(SRC)/Provers/classical.ML \
+  $(SRC)/Provers/hypsubst.ML \
+  $(SRC)/Provers/quantifier1.ML \
+  $(SRC)/Provers/splitter.ML \
+  $(SRC)/Tools/IsaPlanner/isand.ML \
+  $(SRC)/Tools/IsaPlanner/rw_inst.ML \
+  $(SRC)/Tools/IsaPlanner/rw_tools.ML \
+  $(SRC)/Tools/IsaPlanner/zipper.ML \
+  $(SRC)/Tools/atomize_elim.ML \
+  $(SRC)/Tools/code/code_funcgr.ML \
+  $(SRC)/Tools/code/code_haskell.ML \
+  $(SRC)/Tools/code/code_ml.ML \
+  $(SRC)/Tools/code/code_name.ML \
+  $(SRC)/Tools/code/code_printer.ML \
+  $(SRC)/Tools/code/code_target.ML \
+  $(SRC)/Tools/code/code_thingol.ML \
+  $(SRC)/Tools/code/code_wellsorted.ML \
+  $(SRC)/Tools/coherent.ML \
+  $(SRC)/Tools/eqsubst.ML \
+  $(SRC)/Tools/induct.ML \
+  $(SRC)/Tools/intuitionistic.ML \
+  $(SRC)/Tools/induct_tacs.ML \
+  $(SRC)/Tools/nbe.ML \
+  $(SRC)/Tools/project_rule.ML \
+  $(SRC)/Tools/random_word.ML \
+  $(SRC)/Tools/value.ML \
   Code_Setup.thy \
   HOL.thy \
   Tools/hologic.ML \
   Tools/recfun_codegen.ML \
   Tools/simpdata.ML \
-  $(SRC)/Tools/atomize_elim.ML \
-  $(SRC)/Tools/code/code_funcgr.ML \
-  $(SRC)/Tools/code/code_funcgr.ML \
-  $(SRC)/Tools/code/code_name.ML \
-  $(SRC)/Tools/code/code_printer.ML \
-  $(SRC)/Tools/code/code_target.ML \
-  $(SRC)/Tools/code/code_ml.ML \
-  $(SRC)/Tools/code/code_haskell.ML \
-  $(SRC)/Tools/code/code_thingol.ML \
-  $(SRC)/Tools/induct.ML \
-  $(SRC)/Tools/induct_tacs.ML \
-  $(SRC)/Tools/IsaPlanner/isand.ML \
-  $(SRC)/Tools/IsaPlanner/rw_inst.ML \
-  $(SRC)/Tools/IsaPlanner/rw_tools.ML \
-  $(SRC)/Tools/IsaPlanner/zipper.ML \
-  $(SRC)/Tools/nbe.ML \
-  $(SRC)/Tools/random_word.ML \
-  $(SRC)/Tools/value.ML \
-  $(SRC)/Provers/blast.ML \
-  $(SRC)/Provers/clasimp.ML \
-  $(SRC)/Provers/classical.ML \
-  $(SRC)/Provers/coherent.ML \
-  $(SRC)/Provers/eqsubst.ML \
-  $(SRC)/Provers/hypsubst.ML \
-  $(SRC)/Provers/project_rule.ML \
-  $(SRC)/Provers/quantifier1.ML \
-  $(SRC)/Provers/splitter.ML \
 
 $(OUT)/HOL-Base: base.ML $(BASE_DEPENDENCIES)
 	@$(ISABELLE_TOOL) usedir -b -f base.ML -d false -g false $(OUT)/Pure HOL-Base
@@ -267,11 +267,11 @@
 	@$(ISABELLE_TOOL) usedir -b -f main.ML -g true $(OUT)/Pure HOL-Main
 
 $(OUT)/HOL: ROOT.ML $(MAIN_DEPENDENCIES) \
+  Archimedean_Field.thy \
   Complex_Main.thy \
   Complex.thy \
   Deriv.thy \
   Fact.thy \
-  FrechetDeriv.thy \
   Integration.thy \
   Lim.thy \
   Ln.thy \
@@ -285,7 +285,6 @@
   GCD.thy \
   Parity.thy \
   Lubs.thy \
-  Polynomial.thy \
   PReal.thy \
   Rational.thy \
   RComplete.thy \
@@ -314,8 +313,11 @@
   Library/Euclidean_Space.thy Library/Glbs.thy Library/normarith.ML \
   Library/Executable_Set.thy Library/Infinite_Set.thy			\
   Library/FuncSet.thy Library/Permutations.thy Library/Determinants.thy\
+  Library/Bit.thy \
   Library/Finite_Cartesian_Product.thy \
+  Library/FrechetDeriv.thy \
   Library/Fundamental_Theorem_Algebra.thy \
+  Library/Inner_Product.thy \
   Library/Library.thy Library/List_Prefix.thy Library/State_Monad.thy	\
   Library/Nat_Int_Bij.thy Library/Multiset.thy Library/Permutation.thy	\
   Library/Primes.thy Library/Pocklington.thy Library/Quotient.thy	\
@@ -336,6 +338,10 @@
   Library/Boolean_Algebra.thy Library/Countable.thy	\
   Library/RBT.thy	Library/Univ_Poly.thy	\
   Library/Random.thy	Library/Quickcheck.thy	\
+  Library/Poly_Deriv.thy \
+  Library/Polynomial.thy \
+  Library/Product_plus.thy \
+  Library/Product_Vector.thy \
   Library/Enum.thy Library/Float.thy $(SRC)/Tools/float.ML $(SRC)/HOL/Tools/float_arith.ML \
   Library/reify_data.ML Library/reflection.ML
 	@cd Library; $(ISABELLE_TOOL) usedir $(OUT)/HOL Library
@@ -790,15 +796,6 @@
 	@$(ISABELLE_TOOL) usedir $(OUT)/HOL IOA
 
 
-## HOL-AxClasses
-
-HOL-AxClasses: HOL $(LOG)/HOL-AxClasses.gz
-
-$(LOG)/HOL-AxClasses.gz: $(OUT)/HOL AxClasses/Group.thy			\
-  AxClasses/Product.thy AxClasses/ROOT.ML AxClasses/Semigroups.thy
-	@$(ISABELLE_TOOL) usedir $(OUT)/HOL AxClasses
-
-
 ## HOL-Lattice
 
 HOL-Lattice: HOL $(LOG)/HOL-Lattice.gz
@@ -814,34 +811,31 @@
 HOL-ex: HOL $(LOG)/HOL-ex.gz
 
 $(LOG)/HOL-ex.gz: $(OUT)/HOL Library/Commutative_Ring.thy		\
-  Library/Primes.thy							\
-  ex/Abstract_NAT.thy ex/Antiquote.thy ex/Arith_Examples.thy ex/BT.thy	\
-  ex/BinEx.thy ex/CTL.thy ex/Chinese.thy ex/Classical.thy		\
-  ex/Coherent.thy ex/Dense_Linear_Order_Ex.thy ex/Eval_Examples.thy	\
-  ex/Groebner_Examples.thy ex/Quickcheck_Generators.thy		\
-  ex/Codegenerator.thy ex/Codegenerator_Pretty.thy			\
-  ex/CodegenSML_Test.thy ex/Formal_Power_Series_Examples.thy						\
-  ex/Commutative_RingEx.thy ex/Efficient_Nat_examples.thy		\
-  ex/Hex_Bin_Examples.thy ex/Commutative_Ring_Complete.thy		\
-  ex/ExecutableContent.thy ex/Fundefs.thy ex/Guess.thy ex/Hebrew.thy	\
-  ex/Binary.thy ex/Higher_Order_Logic.thy ex/Hilbert_Classical.thy	\
+  Library/Primes.thy ex/Abstract_NAT.thy ex/Antiquote.thy		\
+  ex/ApproximationEx.thy ex/Arith_Examples.thy				\
+  ex/Arithmetic_Series_Complex.thy ex/BT.thy ex/BinEx.thy		\
+  ex/Binary.thy ex/CTL.thy ex/Chinese.thy ex/Classical.thy		\
+  ex/CodegenSML_Test.thy ex/Codegenerator.thy				\
+  ex/Codegenerator_Pretty.thy ex/Coherent.thy				\
+  ex/Commutative_RingEx.thy ex/Commutative_Ring_Complete.thy		\
+  ex/Dense_Linear_Order_Ex.thy ex/Efficient_Nat_examples.thy		\
+  ex/Eval_Examples.thy ex/ExecutableContent.thy				\
+  ex/Formal_Power_Series_Examples.thy ex/Fundefs.thy			\
+  ex/Groebner_Examples.thy ex/Guess.thy ex/HarmonicSeries.thy		\
+  ex/Hebrew.thy ex/Hex_Bin_Examples.thy ex/Higher_Order_Logic.thy	\
+  ex/Hilbert_Classical.thy ex/ImperativeQuicksort.thy			\
   ex/Induction_Scheme.thy ex/InductiveInvariant.thy			\
   ex/InductiveInvariant_examples.thy ex/Intuitionistic.thy		\
-  ex/Lagrange.thy ex/LocaleTest2.thy ex/MT.thy		\
-  ex/MergeSort.thy ex/MonoidGroup.thy ex/Multiquote.thy ex/NatSum.thy	\
+  ex/Lagrange.thy ex/LocaleTest2.thy ex/MT.thy ex/MergeSort.thy		\
+  ex/Meson_Test.thy ex/MonoidGroup.thy ex/Multiquote.thy ex/NatSum.thy	\
   ex/Numeral.thy ex/PER.thy ex/PresburgerEx.thy ex/Primrec.thy		\
-  ex/Quickcheck_Examples.thy	\
-  ex/ReflectionEx.thy ex/ROOT.ML ex/Recdefs.thy ex/Records.thy		\
+  ex/Quickcheck_Examples.thy ex/Quickcheck_Generators.thy ex/ROOT.ML	\
+  ex/Recdefs.thy ex/Records.thy ex/ReflectionEx.thy			\
   ex/Refute_Examples.thy ex/SAT_Examples.thy ex/SVC_Oracle.thy		\
-  ex/Subarray.thy ex/Sublist.thy                                        \
-  ex/Sudoku.thy ex/Tarski.thy ex/Termination.thy ex/Term_Of_Syntax.thy	\
-  ex/Unification.thy ex/document/root.bib			        \
-  ex/document/root.tex ex/Meson_Test.thy ex/set.thy	\
-  ex/svc_funcs.ML ex/svc_test.thy	\
-  ex/ImperativeQuicksort.thy	\
-  ex/Arithmetic_Series_Complex.thy ex/HarmonicSeries.thy	\
-  ex/Sqrt.thy ex/Sqrt_Script.thy \
-  ex/ApproximationEx.thy
+  ex/Serbian.thy ex/Sqrt.thy ex/Sqrt_Script.thy ex/Subarray.thy		\
+  ex/Sublist.thy ex/Sudoku.thy ex/Tarski.thy ex/Term_Of_Syntax.thy	\
+  ex/Termination.thy ex/Unification.thy ex/document/root.bib		\
+  ex/document/root.tex ex/set.thy ex/svc_funcs.ML ex/svc_test.thy
 	@$(ISABELLE_TOOL) usedir $(OUT)/HOL ex
 
 
@@ -1062,22 +1056,22 @@
 ## clean
 
 clean:
-	@rm -f  $(OUT)/HOL-Plain $(OUT)/HOL-Main $(OUT)/HOL $(OUT)/HOL-Nominal $(OUT)/TLA \
-		$(LOG)/HOL.gz $(LOG)/TLA.gz \
-		$(LOG)/HOL-Isar_examples.gz $(LOG)/HOL-Induct.gz \
-		$(LOG)/HOL-ex.gz $(LOG)/HOL-Subst.gz $(LOG)/HOL-IMP.gz \
-		$(LOG)/HOL-IMPP.gz $(LOG)/HOL-Hoare.gz \
-		$(LOG)/HOL-HoareParallel.gz \
-		$(LOG)/HOL-Lex.gz $(LOG)/HOL-Algebra.gz \
-		$(LOG)/HOL-Auth.gz $(LOG)/HOL-UNITY.gz \
-		$(LOG)/HOL-Modelcheck.gz $(LOG)/HOL-Lambda.gz \
-                $(LOG)/HOL-Bali.gz \
-		$(LOG)/HOL-MicroJava.gz $(LOG)/HOL-NanoJava.gz \
-                $(LOG)/HOL-Nominal-Examples.gz \
-		$(LOG)/HOL-IOA.gz $(LOG)/HOL-AxClasses \
-		$(LOG)/HOL-Lattice $(LOG)/HOL-Matrix \
-		$(LOG)/HOL-HahnBanach.gz $(LOG)/HOL-SET-Protocol.gz \
-                $(LOG)/TLA-Inc.gz $(LOG)/TLA-Buffer.gz $(LOG)/TLA-Memory.gz \
-		$(LOG)/HOL-Library.gz $(LOG)/HOL-Unix.gz \
-                $(OUT)/HOL-Word $(LOG)/HOL-Word.gz $(LOG)/HOL-Word-Examples.gz \
-                $(OUT)/HOL-NSA $(LOG)/HOL-NSA.gz $(LOG)/HOL-NSA-Examples.gz
+	@rm -f $(OUT)/HOL-Plain $(OUT)/HOL-Main $(OUT)/HOL		\
+		$(OUT)/HOL-Nominal $(OUT)/TLA $(LOG)/HOL.gz		\
+		$(LOG)/TLA.gz $(LOG)/HOL-Isar_examples.gz		\
+		$(LOG)/HOL-Induct.gz $(LOG)/HOL-ex.gz			\
+		$(LOG)/HOL-Subst.gz $(LOG)/HOL-IMP.gz			\
+		$(LOG)/HOL-IMPP.gz $(LOG)/HOL-Hoare.gz			\
+		$(LOG)/HOL-HoareParallel.gz $(LOG)/HOL-Lex.gz		\
+		$(LOG)/HOL-Algebra.gz $(LOG)/HOL-Auth.gz		\
+		$(LOG)/HOL-UNITY.gz $(LOG)/HOL-Modelcheck.gz		\
+		$(LOG)/HOL-Lambda.gz $(LOG)/HOL-Bali.gz			\
+		$(LOG)/HOL-MicroJava.gz $(LOG)/HOL-NanoJava.gz		\
+		$(LOG)/HOL-Nominal-Examples.gz $(LOG)/HOL-IOA.gz	\
+		$(LOG)/HOL-Lattice $(LOG)/HOL-Matrix			\
+		$(LOG)/HOL-HahnBanach.gz $(LOG)/HOL-SET-Protocol.gz	\
+		$(LOG)/TLA-Inc.gz $(LOG)/TLA-Buffer.gz			\
+		$(LOG)/TLA-Memory.gz $(LOG)/HOL-Library.gz		\
+		$(LOG)/HOL-Unix.gz $(OUT)/HOL-Word $(LOG)/HOL-Word.gz	\
+		$(LOG)/HOL-Word-Examples.gz $(OUT)/HOL-NSA		\
+		$(LOG)/HOL-NSA.gz $(LOG)/HOL-NSA-Examples.gz
--- a/src/HOL/Library/Abstract_Rat.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Library/Abstract_Rat.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -247,7 +247,7 @@
     (of_int(n div d)::'a::{field, ring_char_0}) = of_int n / of_int d"
   apply (frule of_int_div_aux [of d n, where ?'a = 'a])
   apply simp
-  apply (simp add: zdvd_iff_zmod_eq_0)
+  apply (simp add: dvd_eq_mod_eq_0)
 done
 
 
--- a/src/HOL/Library/Boolean_Algebra.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Library/Boolean_Algebra.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -223,7 +223,7 @@
 lemma xor_left_self [simp]: "x \<oplus> (x \<oplus> y) = y"
 by (simp only: xor_assoc [symmetric] xor_self xor_zero_left)
 
-lemma xor_compl_left: "\<sim> x \<oplus> y = \<sim> (x \<oplus> y)"
+lemma xor_compl_left [simp]: "\<sim> x \<oplus> y = \<sim> (x \<oplus> y)"
 apply (simp only: xor_def de_Morgan_disj de_Morgan_conj double_compl)
 apply (simp only: conj_disj_distribs)
 apply (simp only: conj_cancel_right conj_cancel_left)
@@ -231,7 +231,7 @@
 apply (simp only: disj_ac conj_ac)
 done
 
-lemma xor_compl_right: "x \<oplus> \<sim> y = \<sim> (x \<oplus> y)"
+lemma xor_compl_right [simp]: "x \<oplus> \<sim> y = \<sim> (x \<oplus> y)"
 apply (simp only: xor_def de_Morgan_disj de_Morgan_conj double_compl)
 apply (simp only: conj_disj_distribs)
 apply (simp only: conj_cancel_right conj_cancel_left)
@@ -239,11 +239,11 @@
 apply (simp only: disj_ac conj_ac)
 done
 
-lemma xor_cancel_right [simp]: "x \<oplus> \<sim> x = \<one>"
+lemma xor_cancel_right: "x \<oplus> \<sim> x = \<one>"
 by (simp only: xor_compl_right xor_self compl_zero)
 
-lemma xor_cancel_left [simp]: "\<sim> x \<oplus> x = \<one>"
-by (subst xor_commute) (rule xor_cancel_right)
+lemma xor_cancel_left: "\<sim> x \<oplus> x = \<one>"
+by (simp only: xor_compl_left xor_self compl_zero)
 
 lemma conj_xor_distrib: "x \<sqinter> (y \<oplus> z) = (x \<sqinter> y) \<oplus> (x \<sqinter> z)"
 proof -
--- a/src/HOL/Library/Char_nat.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Library/Char_nat.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -132,7 +132,7 @@
 lemma Char_char_of_nat:
   "Char n m = char_of_nat (nat_of_nibble n * 16 + nat_of_nibble m)"
   unfolding char_of_nat_def Let_def nibble_pair_of_nat_def
-  by (auto simp add: div_add1_eq mod_add1_eq nat_of_nibble_div_16 nibble_of_nat_norm nibble_of_nat_of_nibble)
+  by (auto simp add: div_add1_eq mod_add_eq nat_of_nibble_div_16 nibble_of_nat_norm nibble_of_nat_of_nibble)
 
 lemma char_of_nat_of_char:
   "char_of_nat (nat_of_char c) = c"
@@ -165,7 +165,7 @@
   show ?thesis
     by (simp add: nat_of_char.simps char_of_nat_def nibble_of_pair
       nat_of_nibble_of_nat mod_mult_distrib
-      n aux3 mod_mult_self3 l_256 aux4 mod_add1_eq [of "256 * k"] l_div_256)
+      n aux3 mod_mult_self3 l_256 aux4 mod_add_eq [of "256 * k"] l_div_256)
 qed
 
 lemma nibble_pair_of_nat_char:
--- a/src/HOL/Library/Code_Char.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Library/Code_Char.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -1,5 +1,4 @@
 (*  Title:      HOL/Library/Code_Char.thy
-    ID:         $Id$
     Author:     Florian Haftmann
 *)
 
--- a/src/HOL/Library/Coinductive_List.thy	Wed Mar 04 10:43:39 2009 +0100
+++ b/src/HOL/Library/Coinductive_List.thy	Wed Mar 04 10:45:52 2009 +0100
@@ -298,12 +298,12 @@
       (CONS a M, CONS b N) \<in> EqLList r"
 
 lemma EqLList_unfold:
-    "EqLList r = dsum (diag {Datatype.Numb 0}) (dprod r (EqLList r))"
+    "EqLList r = dsum (Id_on {Datatype.Numb 0}) (dprod r (EqLList r))"
   by (fast intro!: EqLList.intros [unfolded NIL_def CONS_def]
            elim: EqLList.cases [unfolded NIL_def CONS_def])
 
 lemma EqLList_implies_ntrunc_equality:
-    "(M, N) \<in> EqLList (diag A) \<Longrightarrow> ntrunc k M = ntrunc k N"
+    "(M, N) \<in> EqLList (Id_on A) \<Longrightarrow> ntrunc k M = ntrunc k N"
   apply (induct k arbitrary: M N rule: nat_less_induct)
   apply (erule EqLList.cases)
    apply (safe del: equalityI)
@@ -314,28 +314,28 @@
    apply (simp_all add: CONS_def less_Suc_eq)
   done
 
-lemma Domain_EqLList: "Domain (EqLList (diag A)) \<subseteq> LList A"
+lemma Domain_EqLList: "Domain (EqLList (Id_on A)) \<subseteq> LList A"
   apply (rule subsetI)
   apply (erule LList.coinduct)
   apply (subst (asm) EqLList_unfold)
   apply (auto simp add: NIL_def CONS_def)
   done
 
-lemma EqLList_diag: "EqLList (diag A) = diag (LList A)"
+lemma EqLList_Id_on: "EqLList (Id_on A) = Id_on (LList A)"
   (is "?lhs = ?rhs")
 proof
   show "?lhs \<subseteq> ?rhs"
     apply (rule subsetI)
     apply (rule_tac p = x in PairE)
     apply clarify
-    apply (rule diag_eqI)
+    apply (rule Id_on_eqI)
      apply (rule EqLList_implies_ntrunc_equality [THEN ntrunc_equality],
        assumption)
     apply (erule DomainI [THEN Domain_EqLList [THEN subsetD]])
     done
   {
-    fix M N assume "(M, N) \<in> diag (LList A)"
-    then have "(M, N) \<in> EqLList (diag A)"
+    fix M N assume "(M, N) \<in> Id_on (LList A)"
+    then have "(M, N) \<in> EqLList (Id_on A)"
     proof coinduct
       case (EqLList M N)
       then obtain L where L: "L \<in> LList A" and MN: "M = L" "N = L" by blast
@@ -344,7 +344,7 @@
         case NIL with MN have ?EqNIL by simp
         then show ?thesis ..
       next
-        case CONS with MN have ?EqCONS by (simp add: diagI)
+        case CONS with MN have ?EqCONS by (simp add: Id_onI)
         then show ?thesis ..
       qed
     qed
@@ -352,8 +352,8 @@
   then show "?rhs \<subseteq> ?lhs" by auto
 qed
 
-lemma EqLList_diag_iff [iff]: "(p \<in> EqLList (diag A)) = (p \<in> diag (LList A))"
-  by (simp only: EqLList_diag)
+lemma EqLList_Id_on_iff [iff]: "(p \<in> EqLList (Id_on A)) = (p \<in> Id_on (LList A))"
+  by (simp only: EqLList_Id_on)
 
 
 text {*
@@ -367,11 +367,11 @@
     and step: "\<And>M N. (M, N) \<in> r \<Longrightarrow>
       M = NIL \<and> N = NIL \<or>
         (\<exists>a b M' N'.
-          M = CONS a M' \<and> N = CONS b N' \<and> (a, b) \<in> diag A \<and>
-            ((M', N') \<in> r \<or> (M', N') \<in> EqLList (diag A)))"
+          M = CONS a M' \<and> N = CONS b N' \<and> (a, b) \<in> Id_on A \<and>
+            ((M', N') \<in> r \<or> (M', N') \<in> EqLList (Id_on A)))"
   shows "M = N"
 proof -
-  from r have "(M, N) \<in> EqLList (diag A)"
+  from r have "(M, N) \<in> EqLList (Id_on A)"
   proof coinduct
     case EqLList
     then show ?case by (rule step)
@@ -387,8 +387,8 @@
             (f (CONS x l), g (CONS x l)) = (NIL, NIL) \<or>
             (\<exists>M N a b.
               (f (CONS x l), g (CONS x l)) = (CONS a M, CONS b N) \<and>
-                (a, b) \<in> diag A \<and>
-                (M, N) \<in> {(f u, g u) | u. u \<in> LList A} \<union> diag (LList A))"
+                (a, b) \<in> Id_on A \<and>
+                (M, N) \<in> {(f u, g u) | u. u \<in> LList A} \<union> Id_on (LList A))"
       (is "\<And>x l. _ \<Longrightarrow> _ \<Longrightarrow> ?fun_CONS x l")
   shows "f M = g M"
 proof -
@@ -401,8 +401,8 @@
     from L show ?case
     proof (cases L)
       case NIL
-      with fun_NIL and MN have "(M, N) \<in> diag (LList A)" by auto
-      then have "(M, N) \<in> EqLList (diag A)" ..
+      with fun_NIL and MN have "(M, N) \<in> Id_on (LList A)" by auto
+      then have "(M, N) \<in> EqLList (Id_on A)" ..
       then show ?thesis by cases simp_all
     next
       case (CONS a K)
@@ -411,23 +411,23 @@
       then show ?thesis
       proof
         assume ?NIL
-        with MN CONS have "(M, N) \<in> diag (LList A)" by auto
-        then have "(M, N) \<in> EqLList (diag A)" ..
+        with MN CONS have "(M, N) \<in> Id_on (LList A)" by auto
+        then have "(M, N) \<in> EqLList (Id_on A)" ..
         then show ?thesis by cases simp_all
       next
         assume ?CONS
         with CONS obtain a b M' N' where