# HG changeset patch # User wenzelm # Date 1432584703 -7200 # Node ID ff82ba1893c80c00396f7a2c3de645f9c70188f4 # Parent cc71f01f9fdec21f4a3133b7b4ebe7cc3bf63101# Parent 82453d0f49eeb44714916581488029187ffc57fc merged, resolving conflicts in Admin/isatest/settings/afp-poly and src/HOL/Tools/Nitpick/nitpick_model.ML; diff -r cc71f01f9fde -r ff82ba1893c8 .hgtags --- a/.hgtags Sat May 23 22:13:24 2015 +0200 +++ b/.hgtags Mon May 25 22:11:43 2015 +0200 @@ -31,3 +31,8 @@ 8f4a332500e41bb67efc3e141608829473606a72 Isabelle2014 42d34eeb283c645de7792a327e86d846f9cfb5f9 Isabelle2015-RC0 c9760373aa0f9a941d0e58d1432a823eaa14a0cc Isabelle2015-RC1 +8483c2883c8c73d94ff05627d5d9de0c821e78ac Isabelle2015-RC2 +e0c3e11e9bea53656fdd1a258ac66c2e74390582 Isabelle2015-RC3 +05fe9bdc4f8f2f550b44c4ded6bbc578408b7a14 Isabelle2015-RC4 +d7f636331176ed8baa0c6f40d9fbb18838829156 Isabelle2015-RC5 +5ae2a2e74c93eafeb00b1ddeef0404256745ebba Isabelle2015 diff -r cc71f01f9fde -r ff82ba1893c8 Admin/Release/CHECKLIST --- a/Admin/Release/CHECKLIST Sat May 23 22:13:24 2015 +0200 +++ b/Admin/Release/CHECKLIST Mon May 25 22:11:43 2015 +0200 @@ -7,10 +7,6 @@ - test polyml-5.4.1, polyml-5.4.0, polyml-5.3.0, smlnj; -- test Isabelle/jEdit on single-core; - -- test Isabelle/jEdit on airy device; - - test 'display_drafts' command; - test "#!/usr/bin/env isabelle_scala_script"; @@ -20,6 +16,8 @@ - check ANNOUNCE, README, NEWS, COPYRIGHT, CONTRIBUTORS; +- check versions in src/Tools/jEdit/Isabelle.props; + - check funny base directory, e.g. "Test 中国"; - check scalable fonts, e.g. src/Doc/Prog_Prove (NOTE: T1 encoding @@ -42,7 +40,9 @@ Admin/build jars_test - test Isabelle/jEdit: - print buffer + . print buffer + . on single-core + . on airy device - test contrib components: x86_64-linux without 32bit C/C++ libraries @@ -83,8 +83,8 @@ default = http://bitbucket.org/isabelle_project/isabelle-release default = ssh://hg@bitbucket.org/isabelle_project/isabelle-release -- isatest@macbroy28:hg-isabelle/.hg/hgrc -- isatest@macbroy28:devel-page/content/index.content +- isatest@lxbroy2:hg-isabelle/.hg/hgrc +- isatest@lxbroy2:devel-page/content/index.content Post-release diff -r cc71f01f9fde -r ff82ba1893c8 Admin/Release/build --- a/Admin/Release/build Sat May 23 22:13:24 2015 +0200 +++ b/Admin/Release/build Mon May 25 22:11:43 2015 +0200 @@ -115,7 +115,7 @@ # make bundles -for PLATFORM_FAMILY in linux macos windows +for PLATFORM_FAMILY in linux windows macos do echo diff -r cc71f01f9fde -r ff82ba1893c8 Admin/Release/build_library --- a/Admin/Release/build_library Sat May 23 22:13:24 2015 +0200 +++ b/Admin/Release/build_library Mon May 25 22:11:43 2015 +0200 @@ -87,6 +87,7 @@ cd .. if [ "$RC" = 0 ]; then + chmod -R a+r "$ISABELLE_NAME" chmod -R g=o "$ISABELLE_NAME" tar -c -z -f "$ARCHIVE_DIR/${ISABELLE_NAME}_library.tar.gz" "$ISABELLE_NAME/browser_info" fi diff -r cc71f01f9fde -r ff82ba1893c8 Admin/components/components.sha1 --- a/Admin/components/components.sha1 Sat May 23 22:13:24 2015 +0200 +++ b/Admin/components/components.sha1 Mon May 25 22:11:43 2015 +0200 @@ -1,6 +1,7 @@ 70105fd6fbfd1a868383fc510772b95234325d31 csdp-6.x.tar.gz 2f6417b8e96a0e4e8354fe0f1a253c18fb55d9a7 cvc3-2.4.1.tar.gz a5e02b5e990da4275dc5d4480c3b72fc73160c28 cvc4-1.5pre-1.tar.gz +4d9658fd2688ae8ac78da8fdfcbf85960f871b71 cvc4-1.5pre-2.tar.gz 03aec2ec5757301c9df149f115d1f4f1d2cafd9e cvc4-1.5pre.tar.gz 842d9526f37b928cf9e22f141884365129990d63 cygwin-20130110.tar.gz cb3b0706d208f104b800267697204f6d82f7b48a cygwin-20130114.tar.gz @@ -26,6 +27,7 @@ ae7ee5becb26512f18c609e83b34612918bae5f0 exec_process-1.0.tar.gz 59a71e08c34ff01f3f5c4af00db5e16369527eb7 Haskabelle-2013.tar.gz 23a96ff4951d72f4024b6e8843262eda988bc151 Haskabelle-2014.tar.gz +eccff31931fb128c1dd522cfc85495c9b66e67af Haskabelle-2015.tar.gz 683acd94761ef460cca1a628f650355370de5afb hol-light-bundle-0.5-126.tar.gz 8d83e433c1419e0c0cc5fd1762903d11b4a5752c jdk-6u31.tar.gz 38d2d2a91c66714c18430e136e7e5191af3996e6 jdk-7u11.tar.gz @@ -86,6 +88,7 @@ 36f78f27291a9ceb13bf1120b62a45625afd44a6 polyml-5.5.1.tar.gz a588640dbf5da9ae15455b02ef709764a48637dc polyml-5.5.2-1.tar.gz 4b690390946f7bfb777b89eb16d6f08987cca12f polyml-5.5.2-2.tar.gz +5b31ad8556e41dfd6d5e85f407818be399aa3d2a polyml-5.5.2-3.tar.gz 532f6e8814752aeb406c62fabcfd2cc05f8a7ca8 polyml-5.5.2.tar.gz 8ee375cfc38972f080dbc78f07b68dac03efe968 ProofGeneral-3.7.1.1.tar.gz 847b52c0676b5eb0fbf0476f64fc08c2d72afd0c ProofGeneral-4.1.tar.gz diff -r cc71f01f9fde -r ff82ba1893c8 Admin/components/main --- a/Admin/components/main Sat May 23 22:13:24 2015 +0200 +++ b/Admin/components/main Mon May 25 22:11:43 2015 +0200 @@ -1,15 +1,15 @@ #main components for everyday use, without big impact on overall build time csdp-6.x -cvc4-1.5pre-1 +cvc4-1.5pre-2 e-1.8 exec_process-1.0.3 -Haskabelle-2014 +Haskabelle-2015 jdk-7u80 jedit_build-20150228 jfreechart-1.0.14-1 jortho-1.0-2 kodkodi-1.5.2 -polyml-5.5.2-2 +polyml-5.5.2-3 scala-2.11.6 spass-3.8ds xz-java-1.2-1 diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/afp-poly --- a/Admin/isatest/settings/afp-poly Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/afp-poly Mon May 25 22:11:43 2015 +0200 @@ -2,9 +2,9 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - ML_PLATFORM="x86_64-darwin" - ML_HOME="$POLYML_HOME/$ML_PLATFORM" - ML_OPTIONS="-H 2000" +ML_PLATFORM="$ISABELLE_PLATFORM64" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" +ML_OPTIONS="-H 2000" ISABELLE_GHC=ghc diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/at-poly-e --- a/Admin/isatest/settings/at-poly-e Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/at-poly-e Mon May 25 22:11:43 2015 +0200 @@ -2,8 +2,8 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.3.0" - ML_SYSTEM="polyml-5.3.0" + POLYML_HOME="/home/polyml/polyml-5.4.1" + ML_SYSTEM="polyml-5.4.1" ML_PLATFORM="x86-linux" ML_HOME="$POLYML_HOME/$ML_PLATFORM" ML_OPTIONS="-H 1000" diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/at64-poly --- a/Admin/isatest/settings/at64-poly Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/at64-poly Mon May 25 22:11:43 2015 +0200 @@ -2,11 +2,9 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.5.2" - ML_SYSTEM="polyml-5.5.2" - ML_PLATFORM="x86_64-linux" - ML_HOME="$POLYML_HOME/$ML_PLATFORM" - ML_OPTIONS="--minheap 2000 --maxheap 8000 --gcthreads 1" +ML_PLATFORM="$ISABELLE_PLATFORM64" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" +ML_OPTIONS="--minheap 2000 --maxheap 8000 --gcthreads 1" ISABELLE_HOME_USER=~/isabelle-at64-poly diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly-M2-alternative --- a/Admin/isatest/settings/mac-poly-M2-alternative Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly-M2-alternative Mon May 25 22:11:43 2015 +0200 @@ -4,9 +4,8 @@ init_components /home/isabelle/contrib "$HOME/admin/components/optional" init_components /home/isabelle/contrib "$HOME/admin/components/nonfree" -ML_SYSTEM="polyml-5.5.2" -ML_PLATFORM="x86-darwin" -ML_HOME="/home/polyml/polyml-5.5.2/$ML_PLATFORM" +ML_PLATFORM="$ISABELLE_PLATFORM32" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" ML_OPTIONS="-H 1000" ISABELLE_HOME_USER=~/isabelle-mac-poly-M2-alternative diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly-M4 --- a/Admin/isatest/settings/mac-poly-M4 Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly-M4 Mon May 25 22:11:43 2015 +0200 @@ -2,11 +2,9 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.5.2" - ML_SYSTEM="polyml-5.5.2" - ML_PLATFORM="x86-darwin" - ML_HOME="$POLYML_HOME/$ML_PLATFORM" - ML_OPTIONS="-H 500 --gcthreads 4" +ML_PLATFORM="$ISABELLE_PLATFORM32" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" +ML_OPTIONS="-H 500 --gcthreads 4" ISABELLE_HOME_USER=~/isabelle-mac-poly-M4 diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly-M8 --- a/Admin/isatest/settings/mac-poly-M8 Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly-M8 Mon May 25 22:11:43 2015 +0200 @@ -2,11 +2,9 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.5.2" - ML_SYSTEM="polyml-5.5.2" - ML_PLATFORM="x86-darwin" - ML_HOME="$POLYML_HOME/$ML_PLATFORM" - ML_OPTIONS="-H 500 --gcthreads 8" +ML_PLATFORM="$ISABELLE_PLATFORM32" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" +ML_OPTIONS="-H 500 --gcthreads 8" ISABELLE_HOME_USER=~/isabelle-mac-poly-M8 diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly-M8-quick_and_dirty --- a/Admin/isatest/settings/mac-poly-M8-quick_and_dirty Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly-M8-quick_and_dirty Mon May 25 22:11:43 2015 +0200 @@ -2,8 +2,8 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.4.1" - ML_SYSTEM="polyml-5.4.1" + POLYML_HOME="/home/polyml/polyml-5.5.1" + ML_SYSTEM="polyml-5.5.1" ML_PLATFORM="x86-darwin" ML_HOME="$POLYML_HOME/$ML_PLATFORM" ML_OPTIONS="-H 1000" diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly64-M2 --- a/Admin/isatest/settings/mac-poly64-M2 Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly64-M2 Mon May 25 22:11:43 2015 +0200 @@ -2,8 +2,8 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.4.0" - ML_SYSTEM="polyml-5.4.0" + POLYML_HOME="/home/polyml/polyml-5.5.0" + ML_SYSTEM="polyml-5.5.0" ML_PLATFORM="x86_64-darwin" ML_HOME="$POLYML_HOME/$ML_PLATFORM" ML_OPTIONS="-H 1000" diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly64-M4 --- a/Admin/isatest/settings/mac-poly64-M4 Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly64-M4 Mon May 25 22:11:43 2015 +0200 @@ -2,11 +2,9 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.5.2" - ML_SYSTEM="polyml-5.5.2" - ML_PLATFORM="x86_64-darwin" - ML_HOME="$POLYML_HOME/$ML_PLATFORM" - ML_OPTIONS="-H 2000 --gcthreads 4" +ML_PLATFORM="$ISABELLE_PLATFORM64" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" +ML_OPTIONS="-H 2000 --gcthreads 4" ISABELLE_GHC=ghc diff -r cc71f01f9fde -r ff82ba1893c8 Admin/isatest/settings/mac-poly64-M8 --- a/Admin/isatest/settings/mac-poly64-M8 Sat May 23 22:13:24 2015 +0200 +++ b/Admin/isatest/settings/mac-poly64-M8 Mon May 25 22:11:43 2015 +0200 @@ -2,11 +2,9 @@ init_components /home/isabelle/contrib "$HOME/admin/components/main" - POLYML_HOME="/home/polyml/polyml-5.5.2" - ML_SYSTEM="polyml-5.5.2" - ML_PLATFORM="x86_64-darwin" - ML_HOME="$POLYML_HOME/$ML_PLATFORM" - ML_OPTIONS="-H 2000 --gcthreads 8" +ML_PLATFORM="$ISABELLE_PLATFORM64" +ML_HOME="$POLYML_HOME/$ML_PLATFORM" +ML_OPTIONS="-H 2000 --gcthreads 8" ISABELLE_GHC=ghc diff -r cc71f01f9fde -r ff82ba1893c8 Admin/polyml/README --- a/Admin/polyml/README Sat May 23 22:13:24 2015 +0200 +++ b/Admin/polyml/README Mon May 25 22:11:43 2015 +0200 @@ -3,7 +3,7 @@ This compilation of Poly/ML 5.5.2 is based on http://sourceforge.net/p/polyml/code/HEAD/tree/fixes-5.5.2 version -2007. See also fixes-5.5.2.diff for the differences to the official +2009. See also fixes-5.5.2.diff for the differences to the official source distribution polyml.5.5.2.tar.gz from http://sourceforge.net/projects/polyml/. @@ -21,4 +21,4 @@ Makarius - 17-Apr-2015 + 22-Apr-2015 diff -r cc71f01f9fde -r ff82ba1893c8 CONTRIBUTORS --- a/CONTRIBUTORS Sat May 23 22:13:24 2015 +0200 +++ b/CONTRIBUTORS Mon May 25 22:11:43 2015 +0200 @@ -13,6 +13,10 @@ * 2014/2015: Daniel Matichuk, Toby Murray, NICTA and Makarius Wenzel The Eisbach proof method language and "match" method. +* Winter 2014 and Spring 2015: Ondrej Kuncar, TUM + Extension of lift_definition to execute lifted functions that have as a + return type a datatype containing a subtype. + * March 2015: Jasmin Blanchette, Inria & LORIA & MPII, Mathias Fleury, MPII, and Dmitriy Traytel, TUM More multiset theorems, syntax, and operations. diff -r cc71f01f9fde -r ff82ba1893c8 NEWS --- a/NEWS Sat May 23 22:13:24 2015 +0200 +++ b/NEWS Mon May 25 22:11:43 2015 +0200 @@ -70,8 +70,9 @@ by combining existing ones with their usual syntax. The "match" proof method provides basic fact/term matching in addition to premise/conclusion matching through Subgoal.focus, and binds fact names -from matches as well as term patterns within matches. See also -~~/src/HOL/Eisbach/Eisbach.thy and the included examples. +from matches as well as term patterns within matches. The Isabelle +documentation provides an entry "eisbach" for the Eisbach User Manual. +Sources and various examples are in ~~/src/HOL/Eisbach/. *** Prover IDE -- Isabelle/Scala/jEdit *** @@ -87,14 +88,14 @@ marker, SideKick parser. * Document antiquotation @{cite} provides formal markup, which is -interpreted semi-formally based on .bib files that happen to be opened -in the editor (hyperlinks, completion etc.). +interpreted semi-formally based on .bib files that happen to be open in +the editor (hyperlinks, completion etc.). * Less waste of vertical space via negative line spacing (see Global Options / Text Area). * Improved graphview panel with optional output of PNG or PDF, for -display of 'thy_deps', 'locale_deps', 'class_deps' etc. +display of 'thy_deps', 'class_deps' etc. * The commands 'thy_deps' and 'class_deps' allow optional bounds to restrict the visualized hierarchy. @@ -139,6 +140,11 @@ antiquotations need to observe the margin explicitly according to Thy_Output.string_of_margin. Minor INCOMPATIBILITY. +* Specification of 'document_files' in the session ROOT file is +mandatory for document preparation. The legacy mode with implicit +copying of the document/ directory is no longer supported. Minor +INCOMPATIBILITY. + *** Pure *** @@ -223,6 +229,10 @@ of rel_prod_def and rel_sum_def. Minor INCOMPATIBILITY: (rarely used by name) transfer theorem names changed (e.g. map_prod_transfer ~> prod.map_transfer). + - Parametricity theorems for map functions, relators, set functions, + constructors, case combinators, discriminators, selectors and + (co)recursors are automatically proved and registered as transfer + rules. * Old datatype package: - The old 'datatype' command has been renamed 'old_datatype', and @@ -268,6 +278,11 @@ - New option 'smt_statistics' to display statistics of the new 'smt' method, especially runtime statistics of Z3 proof reconstruction. +* Lifting: command 'lift_definition' allows to execute lifted constants +that have as a return type a datatype containing a subtype. This +overcomes long-time limitations in the area of code generation and +lifting, and avoids tedious workarounds. + * Command and antiquotation "value" provide different evaluation slots (again), where the previous strategy (NBE after ML) serves as default. Minor INCOMPATIBILITY. diff -r cc71f01f9fde -r ff82ba1893c8 doc/Contents --- a/doc/Contents Sat May 23 22:13:24 2015 +0200 +++ b/doc/Contents Mon May 25 22:11:43 2015 +0200 @@ -8,6 +8,7 @@ codegen Tutorial on Code Generation nitpick User's Guide to Nitpick sledgehammer User's Guide to Sledgehammer + eisbach The Eisbach User Manual sugar LaTeX Sugar for Isabelle documents Reference Manuals! diff -r cc71f01f9fde -r ff82ba1893c8 etc/settings --- a/etc/settings Sat May 23 22:13:24 2015 +0200 +++ b/etc/settings Mon May 25 22:11:43 2015 +0200 @@ -18,7 +18,11 @@ classpath "$ISABELLE_HOME/lib/classes/Pure.jar" -#paranoia setting -- avoid problems of Java/Swing versus XIM/IBus etc. +#paranoia settings -- avoid intrusion of alien options +unset "_JAVA_OPTIONS" +unset "JAVA_TOOL_OPTIONS" + +#paranoia settings -- avoid problems of Java/Swing versus XIM/IBus etc. unset XMODIFIERS diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Datatypes/Datatypes.thy --- a/src/Doc/Datatypes/Datatypes.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Datatypes/Datatypes.thy Mon May 25 22:11:43 2015 +0200 @@ -100,7 +100,7 @@ describes how to specify datatypes using the @{command datatype} command. \item Section \ref{sec:defining-primitively-recursive-functions}, ``Defining -Primitively Recursive Functions,'' describes how to specify recursive functions +Primitively Recursive Functions,'' describes how to specify functions using @{command primrec}. (A separate tutorial @{cite "isabelle-function"} describes the more general \keyw{fun} and \keyw{function} commands.) @@ -109,7 +109,7 @@ \item Section \ref{sec:defining-primitively-corecursive-functions}, ``Defining Primitively Corecursive Functions,'' describes how to specify -corecursive functions using the @{command primcorec} and +functions using the @{command primcorec} and @{command primcorecursive} commands. \item Section \ref{sec:registering-bounded-natural-functors}, ``Registering @@ -124,7 +124,7 @@ @{command datatype} and @{command codatatype}. %\item Section \ref{sec:using-the-standard-ml-interface}, ``Using the Standard -ML Interface,'' %describes the package's programmatic interface. +%ML Interface,'' describes the package's programmatic interface. \item Section \ref{sec:selecting-plugins}, ``Selecting Plugins,'' is concerned with the package's interoperability with other Isabelle packages and tools, such @@ -161,7 +161,7 @@ text {* Datatypes are illustrated through concrete examples featuring different flavors of recursion. More examples can be found in the directory -\verb|~~/src/HOL/|\allowbreak\verb|BNF/Examples|. +\verb|~~/src/HOL/|\allowbreak\verb|Datatype_Examples|. *} @@ -1667,7 +1667,7 @@ Codatatypes can be specified using the @{command codatatype} command. The command is first illustrated through concrete examples featuring different flavors of corecursion. More examples can be found in the directory -\verb|~~/src/HOL/|\allowbreak\verb|BNF/Examples|. The +\verb|~~/src/HOL/|\allowbreak\verb|Datatype_Examples|. The \emph{Archive of Formal Proofs} also includes some useful codatatypes, notably for lazy lists @{cite "lochbihler-2010"}. *} diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Eisbach/Base.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Doc/Eisbach/Base.thy Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,39 @@ +section \Basic setup that is not included in the document\ + +theory Base +imports Main +begin + +ML_file "~~/src/Doc/antiquote_setup.ML" + +ML\ +fun get_split_rule ctxt target = + let + val (head, args) = strip_comb (Envir.eta_contract target); + val (const_name, _) = dest_Const head; + val const_name_components = Long_Name.explode const_name; + + val _ = + if String.isPrefix "case_" (List.last const_name_components) then () + else raise TERM ("Not a case statement", [target]); + + val type_name = Long_Name.implode (rev (tl (rev const_name_components))); + val split = Proof_Context.get_thm ctxt (type_name ^ ".split"); + val vars = Term.add_vars (Thm.prop_of split) []; + + val datatype_name = nth (rev const_name_components) 1; + + fun is_datatype (Type (a, _)) = Long_Name.base_name a = Long_Name.base_name datatype_name + | is_datatype _ = false; + + val datatype_var = + (case find_first (fn (_, T') => is_datatype T') vars of + SOME var => Thm.cterm_of ctxt (Term.Var var) + | NONE => error ("Couldn't find datatype in thm: " ^ datatype_name)); + in + SOME (Drule.cterm_instantiate [(datatype_var, Thm.cterm_of ctxt (List.last args))] split) + end + handle TERM _ => NONE; +\ + +end diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Eisbach/Manual.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Doc/Eisbach/Manual.thy Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,971 @@ +(*:wrap=hard:maxLineLen=78:*) + +theory Manual +imports Base "../Eisbach_Tools" +begin + +chapter \The method command\ + +text \ + The @{command_def method} command provides the ability to write proof + methods by combining existing ones with their usual syntax. Specifically it + allows compound proof methods to be named, and to extend the name space of + basic methods accordingly. Method definitions may abstract over parameters: + terms, facts, or other methods. + + \medskip The syntax diagram below refers to some syntactic categories that + are further defined in @{cite "isabelle-isar-ref"}. + + @{rail \ + @@{command method} name args @'=' method + ; + args: term_args? method_args? \ fact_args? decl_args? + ; + term_args: @'for' @{syntax "fixes"} + ; + method_args: @'methods' (name+) + ; + fact_args: @'uses' (name+) + ; + decl_args: @'declares' (name+) + \} +\ + + +section \Basic method definitions\ + +text \ + Consider the following proof that makes use of usual Isar method + combinators. +\ + + lemma "P \ Q \ P" + by ((rule impI, (erule conjE)?) | assumption)+ + +text \ + It is clear that this compound method will be applicable in more cases than + this proof alone. With the @{command method} command we can define a proof + method that makes the above functionality available generally. +\ + + method prop_solver\<^sub>1 = + ((rule impI, (erule conjE)?) | assumption)+ + + lemma "P \ Q \ R \ P" + by prop_solver\<^sub>1 + +text \ + In this example, the facts @{text impI} and @{text conjE} are static. They + are evaluated once when the method is defined and cannot be changed later. + This makes the method stable in the sense of \emph{static scoping}: naming + another fact @{text impI} in a later context won't affect the behaviour of + @{text "prop_solver\<^sub>1"}. +\ + + +section \Term abstraction\ + +text \ + Methods can also abstract over terms using the @{keyword_def "for"} keyword, + optionally providing type constraints. For instance, the following proof + method @{text intro_ex} takes a term @{term y} of any type, which it uses to + instantiate the @{term x}-variable of @{text exI} (existential introduction) + before applying the result as a rule. The instantiation is performed here by + Isar's @{attribute_ref "where"} attribute. If the current subgoal is to find + a witness for the given predicate @{term Q}, then this has the effect of + committing to @{term y}. +\ + + method intro_ex for Q :: "'a \ bool" and y :: 'a = + (rule exI ["where" P = Q and x = y]) + + +text \ + The term parameters @{term y} and @{term Q} can be used arbitrarily inside + the method body, as part of attribute applications or arguments to other + methods. The expression is type-checked as far as possible when the method + is defined, however dynamic type errors can still occur when it is invoked + (e.g.\ when terms are instantiated in a parameterized fact). Actual term + arguments are supplied positionally, in the same order as in the method + definition. +\ + + lemma "P a \ \x. P x" + by (intro_ex P a) + + +section \Fact abstraction\ + +subsection \Named theorems\ + +text \ + A @{text "named theorem"} is a fact whose contents are produced dynamically + within the current proof context. The Isar command @{command_ref + "named_theorems"} provides simple access to this concept: it declares a + dynamic fact with corresponding \emph{attribute} for managing + this particular data slot in the context. +\ + + named_theorems intros + +text \ + So far @{text "intros"} refers to the empty fact. Using the Isar command + @{command_ref "declare"} we may apply declaration attributes to the context. + Below we declare both @{text "conjI"} and @{text "impI"} as @{text + "intros"}, adding them to the named theorem slot. +\ + + declare conjI [intros] and impI [intros] + +text \ + We can refer to named theorems as dynamic facts within a particular proof + context, which are evaluated whenever the method is invoked. Instead of + having facts hard-coded into the method, as in @{text prop_solver\<^sub>1}, we can + instead refer to these named theorems. +\ + + named_theorems elims + declare conjE [elims] + + method prop_solver\<^sub>3 = + ((rule intros, (erule elims)?) | assumption)+ + + lemma "P \ Q \ P" + by prop_solver\<^sub>3 + +text \ + Often these named theorems need to be augmented on the spot, when a method + is invoked. The @{keyword_def "declares"} keyword in the signature of + @{command method} adds the common method syntax @{text "method decl: facts"} + for each named theorem @{text decl}. +\ + + method prop_solver\<^sub>4 declares intros elims = + ((rule intros, (erule elims)?) | assumption)+ + + lemma "P \ (P \ Q) \ Q \ P" + by (prop_solver\<^sub>4 elims: impE intros: conjI) + + +subsection \Simple fact abstraction\ + +text \ + The @{keyword "declares"} keyword requires that a corresponding dynamic fact + has been declared with @{command_ref named_theorems}. This is useful for + managing collections of facts which are to be augmented with declarations, + but is overkill if we simply want to pass a fact to a method. + + We may use the @{keyword_def "uses"} keyword in the method header to provide + a simple fact parameter. In contrast to @{keyword "declares"}, these facts + are always implicitly empty unless augmented when the method is invoked. +\ + + method rule_twice uses my_rule = + (rule my_rule, rule my_rule) + + lemma "P \ Q \ (P \ Q) \ Q" + by (rule_twice my_rule: conjI) + + +section \Higher-order methods\ + +text \ + The \emph{structured concatenation} combinator ``@{text "method\<^sub>1 ; + method\<^sub>2"}'' was introduced in Isabelle2015, motivated by development of + Eisbach. It is similar to ``@{text "method\<^sub>1, method\<^sub>2"}'', but @{text + method\<^sub>2} is invoked on on \emph{all} subgoals that have newly emerged from + @{text method\<^sub>1}. This is useful to handle cases where the number of + subgoals produced by a method is determined dynamically at run-time. +\ +text_raw\\vbox{\ + method conj_with uses rule = + (intro conjI ; intro rule) + + lemma + assumes A: "P" + shows "P \ P \ P" + by (conj_with rule: A) +text_raw\}\ +text \ + Method definitions may take other methods as arguments, and thus implement + method combinators with prefix syntax. For example, to more usefully exploit + Isabelle's backtracking, the explicit requirement that a method solve all + produced subgoals is frequently useful. This can easily be written as a + \emph{higher-order method} using ``@{text ";"}''. The @{keyword "methods"} + keyword denotes method parameters that are other proof methods to be invoked + by the method being defined. +\ + + method solve methods m = (m ; fail) + +text \ + Given some method-argument @{text m}, @{text "solve \m\"} applies the + method @{text m} and then fails whenever @{text m} produces any new unsolved + subgoals --- i.e. when @{text m} fails to completely discharge the goal it + was applied to. +\ + + +section \Example\ + +text \ + With these simple features we are ready to write our first non-trivial proof + method. Returning to the first-order logic example, the following method + definition applies various rules with their canonical methods. +\ + + named_theorems subst + + method prop_solver declares intros elims subst = + (assumption | + (rule intros) | erule elims | + subst subst | subst (asm) subst | + (erule notE ; solve \prop_solver\))+ + +text \ + The only non-trivial part above is the final alternative @{text "(erule notE + ; solve \prop_solver\)"}. Here, in the case that all other alternatives + fail, the method takes one of the assumptions @{term "\ P"} of the current + goal and eliminates it with the rule @{text notE}, causing the goal to be + proved to become @{term P}. The method then recursively invokes itself on + the remaining goals. The job of the recursive call is to demonstrate that + there is a contradiction in the original assumptions (i.e.\ that @{term P} + can be derived from them). Note this recursive invocation is applied with + the @{method solve} method combinator to ensure that a contradiction will + indeed be shown. In the case where a contradiction cannot be found, + backtracking will occur and a different assumption @{term "\ Q"} will be + chosen for elimination. + + Note that the recursive call to @{method prop_solver} does not have any + parameters passed to it. Recall that fact parameters, e.g.\ @{text + "intros"}, @{text "elims"}, and @{text "subst"}, are managed by declarations + in the current proof context. They will therefore be passed to any recursive + call to @{method prop_solver} and, more generally, any invocation of a + method which declares these named theorems. + + \medskip After declaring some standard rules to the context, the @{method + prop_solver} becomes capable of solving non-trivial propositional + tautologies.\ + + lemmas [intros] = + conjI -- \@{thm conjI}\ + impI -- \@{thm impI}\ + disjCI -- \@{thm disjCI}\ + iffI -- \@{thm iffI}\ + notI -- \@{thm notI}\ + + lemmas [elims] = + impCE -- \@{thm impCE}\ + conjE -- \@{thm conjE}\ + disjE -- \@{thm disjE}\ + + lemma "(A \ B) \ (A \ C) \ (B \ C) \ C" + by prop_solver + + +chapter \The match method \label{s:matching}\ + +text \ + So far we have seen methods defined as simple combinations of other methods. + Some familiar programming language concepts have been introduced (i.e.\ + abstraction and recursion). The only control flow has been implicitly the + result of backtracking. When designing more sophisticated proof methods this + proves too restrictive and difficult to manage conceptually. + + To address this, we introduce the @{method_def "match"} method, which + provides more direct access to the higher-order matching facility at the + core of Isabelle. It is implemented as a separate proof method (in + Isabelle/ML), and thus can be directly applied to proofs, however it is most + useful when applied in the context of writing Eisbach method definitions. + + \medskip The syntax diagram below refers to some syntactic categories that + are further defined in @{cite "isabelle-isar-ref"}. + + @{rail \ + @@{method match} kind @'in' (pattern '\' cartouche + '\') + ; + kind: + (@'conclusion' | @'premises' ('(' 'local' ')')? | + '(' term ')' | @{syntax thmrefs}) + ; + pattern: fact_name? term args? \ (@'for' fixes)? + ; + fact_name: @{syntax name} @{syntax attributes}? ':' + ; + args: '(' (('multi' | 'cut' nat?) + ',') ')' + \} + + Matching allows methods to introspect the goal state, and to implement more + explicit control flow. In the basic case, a term or fact @{text ts} is given + to match against as a \emph{match target}, along with a collection of + pattern-method pairs @{text "(p, m)"}: roughly speaking, when the pattern + @{text p} matches any member of @{text ts}, the \emph{inner} method @{text + m} will be executed. +\ + + lemma + assumes X: + "Q \ P" + "Q" + shows P + by (match X in I: "Q \ P" and I': "Q" \ \insert mp [OF I I']\) + +text \ + In this example we have a structured Isar proof, with the named + assumption @{text "X"} and a conclusion @{term "P"}. With the match method + we can find the local facts @{term "Q \ P"} and @{term "Q"}, binding them to + separately as @{text "I"} and @{text "I'"}. We then specialize the + modus-ponens rule @{thm mp [of Q P]} to these facts to solve the goal. +\ + + +section \Subgoal focus\ + +text\ + In the previous example we were able to match against an assumption out of + the Isar proof state. In general, however, proof subgoals can be + \emph{unstructured}, with goal parameters and premises arising from rule + application. To address this, @{method match} uses \emph{subgoal focusing} + to produce structured goals out of + unstructured ones. In place of fact or term, we may give the + keyword @{keyword_def "premises"} as the match target. This causes a subgoal + focus on the first subgoal, lifting local goal parameters to fixed term + variables and premises into hypothetical theorems. The match is performed + against these theorems, naming them and binding them as appropriate. + Similarly giving the keyword @{keyword_def "conclusion"} matches against the + conclusion of the first subgoal. + + An unstructured version of the previous example can then be similarly solved + through focusing. +\ + + lemma "Q \ P \ Q \ P" + by (match premises in + I: "Q \ P" and I': "Q" \ \insert mp [OF I I']\) + +text \ + Match variables may be specified by giving a list of @{keyword_ref + "for"}-fixes after the pattern description. This marks those terms as bound + variables, which may be used in the method body. +\ + + lemma "Q \ P \ Q \ P" + by (match premises in I: "Q \ A" and I': "Q" for A \ + \match conclusion in A \ \insert mp [OF I I']\\) + +text \ + In this example @{term A} is a match variable which is bound to @{term P} + upon a successful match. The inner @{method match} then matches the + now-bound @{term A} (bound to @{term P}) against the conclusion (also @{term + P}), finally applying the specialized rule to solve the goal. + + Schematic terms like @{text "?P"} may also be used to specify match + variables, but the result of the match is not bound, and thus cannot be used + in the inner method body. + + \medskip In the following example we extract the predicate of an + existentially quantified conclusion in the current subgoal and search the + current premises for a matching fact. If both matches are successful, we + then instantiate the existential introduction rule with both the witness and + predicate, solving with the matched premise. +\ + + method solve_ex = + (match conclusion in "\x. Q x" for Q \ + \match premises in U: "Q y" for y \ + \rule exI [where P = Q and x = y, OF U]\\) + +text \ + The first @{method match} matches the pattern @{term "\x. Q x"} against the + current conclusion, binding the term @{term "Q"} in the inner match. Next + the pattern @{text "Q y"} is matched against all premises of the current + subgoal. In this case @{term "Q"} is fixed and @{term "y"} may be + instantiated. Once a match is found, the local fact @{text U} is bound to + the matching premise and the variable @{term "y"} is bound to the matching + witness. The existential introduction rule @{text "exI:"}~@{thm exI} is then + instantiated with @{term "y"} as the witness and @{term "Q"} as the + predicate, with its proof obligation solved by the local fact U (using the + Isar attribute @{attribute OF}). The following example is a trivial use of + this method. +\ + + lemma "halts p \ \x. halts x" + by solve_ex + + +subsection \Operating within a focus\ + +text \ + Subgoal focusing provides a structured form of a subgoal, allowing for more + expressive introspection of the goal state. This requires some consideration + in order to be used effectively. When the keyword @{keyword "premises"} is + given as the match target, the premises of the subgoal are lifted into + hypothetical theorems, which can be found and named via match patterns. + Additionally these premises are stripped from the subgoal, leaving only the + conclusion. This renders them inaccessible to standard proof methods which + operate on the premises, such as @{method frule} or @{method erule}. Naive + usage of these methods within a match will most likely not function as the + method author intended. +\ + + method my_allE_bad for y :: 'a = + (match premises in I: "\x :: 'a. ?Q x" \ + \erule allE [where x = y]\) + +text \ + Here we take a single parameter @{term y} and specialize the universal + elimination rule (@{thm allE}) to it, then attempt to apply this specialized + rule with @{method erule}. The method @{method erule} will attempt to unify + with a universal quantifier in the premises that matches the type of @{term + y}. Since @{keyword "premises"} causes a focus, however, there are no + subgoal premises to be found and thus @{method my_allE_bad} will always + fail. If focusing instead left the premises in place, using methods + like @{method erule} would lead to unintended behaviour, specifically during + backtracking. In our example, @{method erule} could choose an alternate + premise while backtracking, while leaving @{text I} bound to the original + match. In the case of more complex inner methods, where either @{text I} or + bound terms are used, this would almost certainly not be the intended + behaviour. + + An alternative implementation would be to specialize the elimination rule to + the bound term and apply it directly. +\ + + method my_allE_almost for y :: 'a = + (match premises in I: "\x :: 'a. ?Q x" \ + \rule allE [where x = y, OF I]\) + + lemma "\x. P x \ P y" + by (my_allE_almost y) + +text \ + This method will insert a specialized duplicate of a universally quantified + premise. Although this will successfully apply in the presence of such a + premise, it is not likely the intended behaviour. Repeated application of + this method will produce an infinite stream of duplicate specialized + premises, due to the original premise never being removed. To address this, + matched premises may be declared with the @{attribute "thin"} attribute. + This will hide the premise from subsequent inner matches, and remove it from + the list of premises when the inner method has finished and the subgoal is + unfocused. It can be considered analogous to the existing @{text thin_tac}. + + To complete our example, the correct implementation of the method + will @{attribute "thin"} the premise from the match and then apply it to the + specialized elimination rule.\ + + method my_allE for y :: 'a = + (match premises in I [thin]: "\x :: 'a. ?Q x" \ + \rule allE [where x = y, OF I]\) + + lemma "\x. P x \ \x. Q x \ P y \ Q y" + by (my_allE y)+ (rule conjI) + +subsubsection \Inner focusing\ + +text \ + Premises are \emph{accumulated} for the purposes of subgoal focusing. + In contrast to using standard methods like @{method frule} within + focused match, another @{method match} will have access to all the premises + of the outer focus. + \ + + lemma "A \ B \ A \ B" + by (match premises in H: A \ \intro conjI, rule H, + match premises in H': B \ \rule H'\\) + +text \ + In this example, the inner @{method match} can find the focused premise + @{term B}. In contrast, the @{method assumption} method would fail here + due to @{term B} not being logically accessible. +\ + + lemma + "A \ A \ (B \ B)" + by (match premises in H: A \ \intro conjI, rule H, rule impI, + match premises (local) in A \ \fail\ + \ H': B \ \rule H'\\) + +text \ + In this example, the only premise that exists in the first focus is + @{term "A"}. Prior to the inner match, the rule @{text impI} changes + the goal @{term "B \ B"} into @{term "B \ B"}. A standard premise + match would also include @{term A} as an original premise of the outer + match. The @{text local} argument limits the match to + newly focused premises. + +\ + +section \Attributes\ + +text \ + Attributes may throw errors when applied to a given fact. For example, rule + instantiation will fail of there is a type mismatch or if a given variable + doesn't exist. Within a match or a method definition, it isn't generally + possible to guarantee that applied attributes won't fail. For example, in + the following method there is no guarantee that the two provided facts will + necessarily compose. +\ + + method my_compose uses rule1 rule2 = + (rule rule1 [OF rule2]) + +text \ + Some attributes (like @{attribute OF}) have been made partially + Eisbach-aware. This means that they are able to form a closure despite not + necessarily always being applicable. In the case of @{attribute OF}, it is + up to the proof author to guard attribute application with an appropriate + @{method match}, but there are still no static guarantees. + + In contrast to @{attribute OF}, the @{attribute "where"} and @{attribute of} + attributes attempt to provide static guarantees that they will apply + whenever possible. + + Within a match pattern for a fact, each outermost quantifier specifies the + requirement that a matching fact must have a schematic variable at that + point. This gives a corresponding name to this ``slot'' for the purposes of + forming a static closure, allowing the @{attribute "where"} attribute to + perform an instantiation at run-time. +\ +text_raw\\vbox{\ + lemma + assumes A: "Q \ False" + shows "\ Q" + by (match intros in X: "\P. (P \ False) \ \ P" \ + \rule X [where P = Q, OF A]\) +text_raw\}\ +text \ + Subgoal focusing converts the outermost quantifiers of premises into + schematics when lifting them to hypothetical facts. This allows us to + instantiate them with @{attribute "where"} when using an appropriate match + pattern. +\ + + lemma "(\x :: 'a. A x \ B x) \ A y \ B y" + by (match premises in I: "\x :: 'a. ?P x \ ?Q x" \ + \rule I [where x = y]\) + +text \ + The @{attribute of} attribute behaves similarly. It is worth noting, + however, that the positional instantiation of @{attribute of} occurs against + the position of the variables as they are declared \emph{in the match + pattern}. +\ + + lemma + fixes A B and x :: 'a and y :: 'b + assumes asm: "(\x y. A y x \ B x y )" + shows "A y x \ B x y" + by (match asm in I: "\(x :: 'a) (y :: 'b). ?P x y \ ?Q x y" \ + \rule I [of x y]\) + +text \ + In this example, the order of schematics in @{text asm} is actually @{text + "?y ?x"}, but we instantiate our matched rule in the opposite order. This is + because the effective rule @{term I} was bound from the match, which + declared the @{typ 'a} slot first and the @{typ 'b} slot second. + + To get the dynamic behaviour of @{attribute of} we can choose to invoke it + \emph{unchecked}. This avoids trying to do any type inference for the + provided parameters, instead storing them as their most general type and + doing type matching at run-time. This, like @{attribute OF}, will throw + errors if the expected slots don't exist or there is a type mismatch. +\ + + lemma + fixes A B and x :: 'a and y :: 'b + assumes asm: "\x y. A y x \ B x y" + shows "A y x \ B x y" + by (match asm in I: "PROP ?P" \ \rule I [of (unchecked) y x]\) + +text \ + Attributes may be applied to matched facts directly as they are matched. Any + declarations will therefore be applied in the context of the inner method, + as well as any transformations to the rule. +\ + + lemma "(\x :: 'a. A x \ B x) \ A y \ B y" + by (match premises in I [of y, intros]: "\x :: 'a. ?P x \ ?Q x" \ + \prop_solver\) + +text \ + In this example, the pattern @{text "\x :: 'a. ?P x \ ?Q x"} matches against + the only premise, giving an appropriately typed slot for @{term y}. After + the match, the resulting rule is instantiated to @{term y} and then declared + as an @{attribute intros} rule. This is then picked up by @{method + prop_solver} to solve the goal. +\ + + +section \Multi-match \label{sec:multi}\ + +text \ + In all previous examples, @{method match} was only ever searching for a + single rule or premise. Each local fact would therefore always have a length + of exactly one. We may, however, wish to find \emph{all} matching results. + To achieve this, we can simply mark a given pattern with the @{text + "(multi)"} argument. +\ + + lemma + assumes asms: "A \ B" "A \ D" + shows "(A \ B) \ (A \ D)" + apply (match asms in I [intros]: "?P \ ?Q" \ \solves \prop_solver\\)? + apply (match asms in I [intros]: "?P \ ?Q" (multi) \ \prop_solver\) + done + +text \ + In the first @{method match}, without the @{text "(multi)"} argument, @{term + I} is only ever be bound to one of the members of @{text asms}. This + backtracks over both possibilities (see next section), however neither + assumption in isolation is sufficient to solve to goal. The use of the + @{method solves} combinator ensures that @{method prop_solver} has no effect + on the goal when it doesn't solve it, and so the first match leaves the goal + unchanged. In the second @{method match}, @{text I} is bound to all of + @{text asms}, declaring both results as @{text intros}. With these rules + @{method prop_solver} is capable of solving the goal. + + Using for-fixed variables in patterns imposes additional constraints on the + results. In all previous examples, the choice of using @{text ?P} or a + for-fixed @{term P} only depended on whether or not @{term P} was mentioned + in another pattern or the inner method. When using a multi-match, however, + all for-fixed terms must agree in the results. +\ + + lemma + assumes asms: "A \ B" "A \ D" "D \ B" + shows "(A \ B) \ (A \ D)" + apply (match asms in I [intros]: "?P \ Q" (multi) for Q \ + \solves \prop_solver\\)? + apply (match asms in I [intros]: "P \ ?Q" (multi) for P \ + \prop_solver\) + done + +text \ + Here we have two seemingly-equivalent applications of @{method match}, + however only the second one is capable of solving the goal. The first + @{method match} selects the first and third members of @{text asms} (those + that agree on their conclusion), which is not sufficient. The second + @{method match} selects the first and second members of @{text asms} (those + that agree on their assumption), which is enough for @{method prop_solver} + to solve the goal. +\ + + +section \Dummy patterns\ + +text \ + Dummy patterns may be given as placeholders for unique schematics in + patterns. They implicitly receive all currently bound variables as + arguments, and are coerced into the @{typ prop} type whenever possible. For + example, the trivial dummy pattern @{text "_"} will match any proposition. + In contrast, by default the pattern @{text "?P"} is considered to have type + @{typ bool}. It will not bind anything with meta-logical connectives (e.g. + @{text "_ \ _"} or @{text "_ &&& _"}). +\ + + lemma + assumes asms: "A &&& B \ D" + shows "(A \ B \ D)" + by (match asms in I: _ \ \prop_solver intros: I conjunctionI\) + + +section \Backtracking\ + +text \ + Patterns are considered top-down, executing the inner method @{text m} of + the first pattern which is satisfied by the current match target. By + default, matching performs extensive backtracking by attempting all valid + variable and fact bindings according to the given pattern. In particular, + all unifiers for a given pattern will be explored, as well as each matching + fact. The inner method @{text m} will be re-executed for each different + variable/fact binding during backtracking. A successful match is considered + a cut-point for backtracking. Specifically, once a match is made no other + pattern-method pairs will be considered. + + The method @{text foo} below fails for all goals that are conjunctions. Any + such goal will match the first pattern, causing the second pattern (that + would otherwise match all goals) to never be considered. +\ + + method foo = + (match conclusion in "?P \ ?Q" \ \fail\ \ "?R" \ \prop_solver\) + +text \ + The failure of an inner method that is executed after a successful match + will cause the entire match to fail. This distinction is important + due to the pervasive use of backtracking. When a method is used in a + combinator chain, its failure + becomes significant because it signals previously applied methods to move to + the next result. Therefore, it is necessary for @{method match} to not mask + such failure. One can always rewrite a match using the combinators ``@{text + "?"}'' and ``@{text "|"}'' to try subsequent patterns in the case of an + inner-method failure. The following proof method, for example, always + invokes @{method prop_solver} for all goals because its first alternative + either never matches or (if it does match) always fails. +\ + + method foo\<^sub>1 = + (match conclusion in "?P \ ?Q" \ \fail\) | + (match conclusion in "?R" \ \prop_solver\) + + +subsection \Cut\ + +text \ + Backtracking may be controlled more precisely by marking individual patterns + as \emph{cut}. This causes backtracking to not progress beyond this pattern: + once a match is found no others will be considered. +\ + + method foo\<^sub>2 = + (match premises in I: "P \ Q" (cut) and I': "P \ ?U" for P Q \ + \rule mp [OF I' I [THEN conjunct1]]\) + +text \ + In this example, once a conjunction is found (@{term "P \ Q"}), all possible + implications of @{term "P"} in the premises are considered, evaluating the + inner @{method rule} with each consequent. No other conjunctions will be + considered, with method failure occurring once all implications of the + form @{text "P \ ?U"} have been explored. Here the left-right processing of + individual patterns is important, as all patterns after of the cut will + maintain their usual backtracking behaviour. +\ + + lemma "A \ B \ A \ D \ A \ C \ C" + by foo\<^sub>2 + + lemma "C \ D \ A \ B \ A \ C \ C" + by (foo\<^sub>2 | prop_solver) + +text \ + In this example, the first lemma is solved by @{text foo\<^sub>2}, by first + picking @{term "A \ D"} for @{text I'}, then backtracking and ultimately + succeeding after picking @{term "A \ C"}. In the second lemma, however, + @{term "C \ D"} is matched first, the second pattern in the match cannot be + found and so the method fails, falling through to @{method prop_solver}. + + More precise control is also possible by giving a positive + number @{text n} as an argument to @{text cut}. This will limit the number + of backtracking results of that match to be at most @{text n}. + The match argument @{text "(cut 1)"} is the same as simply @{text "(cut)"}. +\ + + +subsection \Multi-match revisited\ + +text \ + A multi-match will produce a sequence of potential bindings for for-fixed + variables, where each binding environment is the result of matching against + at least one element from the match target. For each environment, the match + result will be all elements of the match target which agree with the pattern + under that environment. This can result in unexpected behaviour when giving + very general patterns. +\ + + lemma + assumes asms: "\x. A x \ B x" "\y. A y \ C y" "\z. B z \ C z" + shows "A x \ C x" + by (match asms in I: "\x. P x \ ?Q x" (multi) for P \ + \match (P) in "A" \ \fail\ + \ _ \ \match I in "\x. A x \ B x" \ \fail\ + \ _ \ \rule I\\\) + +text \ + Intuitively it seems like this proof should fail to check. The first match + result, which binds @{term I} to the first two members of @{text asms}, + fails the second inner match due to binding @{term P} to @{term A}. + Backtracking then attempts to bind @{term I} to the third member of @{text + asms}. This passes all inner matches, but fails when @{method rule} cannot + successfully apply this to the current goal. After this, a valid match that + is produced by the unifier is one which binds @{term P} to simply @{text + "\a. A ?x"}. The first inner match succeeds because @{text "\a. A ?x"} does + not match @{term A}. The next inner match succeeds because @{term I} has + only been bound to the first member of @{text asms}. This is due to @{method + match} considering @{text "\a. A ?x"} and @{text "\a. A ?y"} as distinct + terms. + + The simplest way to address this is to explicitly disallow term bindings + which we would consider invalid. +\ + + method abs_used for P = + (match (P) in "\a. ?P" \ \fail\ \ _ \ \-\) + +text \ + This method has no effect on the goal state, but instead serves as a filter + on the environment produced from match. +\ + + +section \Uncurrying\ + +text \ + The @{method match} method is not aware of the logical content of match + targets. Each pattern is simply matched against the shallow structure of a + fact or term. Most facts are in \emph{normal form}, which curries premises + via meta-implication @{text "_ \ _"}. +\ + +text_raw \\vbox{\ + lemma + assumes asms: "D \ B \ C" "D \ A" + shows "D \ B \ C \ A" + by (match asms in H: "D \ _" (multi) \ \prop_solver elims: H\) +text_raw \}\ +text \ + For the first member of @{text asms} the dummy pattern successfully matches + against @{term "B \ C"} and so the proof is successful. +\ + + lemma + assumes asms: "A \ B \ C" "D \ C" + shows "D \ (A \ B) \ C" + apply (match asms in H: "_ \ C" (multi) \ \prop_solver elims: H\)(*<*)? + apply (prop_solver elims: asms) + done(*>*) + +text \ + This proof will fail to solve the goal. Our match pattern will only match + rules which have a single premise, and conclusion @{term C}, so the first + member of @{text asms} is not bound and thus the proof fails. Matching a + pattern of the form @{term "P \ Q"} against this fact will bind @{term "P"} + to @{term "A"} and @{term Q} to @{term "B \ C"}. Our pattern, with a + concrete @{term "C"} in the conclusion, will fail to match this fact. + + To express our desired match, we may \emph{uncurry} our rules before + matching against them. This forms a meta-conjunction of all premises in a + fact, so that only one implication remains. For example the uncurried + version of @{term "A \ B \ C"} is @{term "A &&& B \ C"}. This will now match + our desired pattern @{text "_ \ C"}, and can be \emph{curried} after the + match to put it back into normal form. +\ + + lemma + assumes asms: "A \ B \ C" "D \ C" + shows "D \ (A \ B) \ C" + by (match asms [uncurry] in H [curry]: "_ \ C" (multi) \ + \prop_solver elims: H\) + + +section \Reverse matching\ + +text \ + The @{method match} method only attempts to perform matching of the pattern + against the match target. Specifically this means that it will not + instantiate schematic terms in the match target. +\ + + lemma + assumes asms: "\x :: 'a. A x" + shows "A y" + apply (match asms in H: "A y" \ \rule H\)? + apply (match asms in H: P for P \ + \match ("A y") in P \ \rule H\\) + done + +text \ + In the first @{method match} we attempt to find a member of @{text asms} + which matches our goal precisely. This fails due to no such member existing. + The second match reverses the role of the fact in the match, by first giving + a general pattern @{term P}. This bound pattern is then matched against + @{term "A y"}. In this case, @{term P} is bound to @{text "A ?x"} and so it + successfully matches. +\ + + +section \Type matching\ + +text \ + The rule instantiation attributes @{attribute "where"} and @{attribute "of"} + attempt to guarantee type-correctness wherever possible. This can require + additional invocations of @{method match} in order to statically ensure that + instantiation will succeed. +\ + + lemma + assumes asms: "\x :: 'a. A x" + shows "A y" + by (match asms in H: "\z :: 'b. P z" for P \ + \match (y) in "y :: 'b" for y \ \rule H [where z = y]\\) + +text \ + In this example the type @{text 'b} is matched to @{text 'a}, however + statically they are formally distinct types. The first match binds @{text + 'b} while the inner match serves to coerce @{term y} into having the type + @{text 'b}. This allows the rule instantiation to successfully apply. +\ + + +chapter \Method development\ + +section \Tracing methods\ + +text \ + Method tracing is supported by auxiliary print methods provided by @{theory + Eisbach_Tools}. These include @{method print_fact}, @{method print_term} and + @{method print_type}. Whenever a print method is evaluated it leaves the + goal unchanged and writes its argument as tracing output. + + Print methods can be combined with the @{method fail} method to investigate + the backtracking behaviour of a method. +\ + + lemma + assumes asms: A B C D + shows D + apply (match asms in H: _ \ \print_fact H, fail\)(*<*)? + apply (simp add: asms) + done(*>*) + +text \ + This proof will fail, but the tracing output will show the order that the + assumptions are attempted. +\ + + +section \Integrating with Isabelle/ML\ + +subsubsection \Attributes\ + +text \ + A custom rule attribute is a simple way to extend the functionality of + Eisbach methods. The dummy rule attribute notation (@{text "[[ _ ]]"}) + invokes the given attribute against a dummy fact and evaluates to the result + of that attribute. When used as a match target, this can serve as an + effective auxiliary function. +\ + + attribute_setup get_split_rule = + \Args.term >> (fn t => + Thm.rule_attribute (fn context => fn _ => + (case get_split_rule (Context.proof_of context) t of + SOME thm => thm + | NONE => Drule.dummy_thm)))\ + +text \ + In this example, the new attribute @{attribute get_split_rule} lifts the ML + function of the same name into an attribute. When applied to a case + distinction over a datatype, it retrieves its corresponding split rule. + + We can then integrate this intro a method that applies the split rule, first + matching to ensure that fetching the rule was successful. +\ +(*<*)declare TrueI [intros](*>*) + method splits = + (match conclusion in "?P f" for f \ + \match [[get_split_rule f]] in U: "(_ :: bool) = _" \ + \rule U [THEN iffD2]\\) + + lemma "L \ [] \ case L of [] \ False | _ \ True" + apply splits + apply (prop_solver intros: allI) + done + +text \ + Here the new @{method splits} method transforms the goal to use only logical + connectives: @{term "L = [] \ False \ (\x y. L = x # y \ True)"}. This goal + is then in a form solvable by @{method prop_solver} when given the universal + quantifier introduction rule @{text allI}. +\ + +end diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Eisbach/Preface.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Doc/Eisbach/Preface.thy Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,35 @@ +(*:wrap=hard:maxLineLen=78:*) + +theory Preface +imports Base "../Eisbach_Tools" +begin + +text \ + \emph{Eisbach} is a collection of tools which form the basis for defining + new proof methods in Isabelle/Isar~@{cite "Wenzel-PhD"}. It can be thought + of as a ``proof method language'', but is more precisely an infrastructure + for defining new proof methods out of existing ones. + + The core functionality of Eisbach is provided by the Isar @{command method} + command. Here users may define new methods by combining existing ones with + the usual Isar syntax. These methods can be abstracted over terms, facts and + other methods, as one might expect in any higher-order functional language. + + Additional functionality is provided by extending the space of methods and + attributes. The new @{method match} method allows for explicit control-flow, + by taking a match target and a list of pattern-method pairs. By using the + functionality provided by Eisbach, additional support methods can be easily + written. For example, the @{method catch} method, which provides basic + try-catch functionality, only requires a few lines of ML. + + Eisbach is meant to allow users to write automation using only Isar syntax. + Traditionally proof methods have been written in Isabelle/ML, which poses a + high barrier-to-entry for many users. + + \medskip This manual is written for users familiar with Isabelle/Isar, but + not necessarily Isabelle/ML. It covers the usage of the @{command method} as + well as the @{method match} method, as well as discussing their integration + with existing Isar concepts such as @{command named_theorems}. +\ + +end \ No newline at end of file diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Eisbach/document/build --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Doc/Eisbach/document/build Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -e + +FORMAT="$1" +VARIANT="$2" + +"$ISABELLE_TOOL" logo Eisbach +"$ISABELLE_HOME/src/Doc/prepare_document" "$FORMAT" + diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Eisbach/document/root.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Doc/Eisbach/document/root.tex Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,88 @@ +\documentclass[12pt,a4paper,fleqn]{report} +\usepackage[T1]{fontenc} +\usepackage{latexsym,graphicx} +\usepackage[refpage]{nomencl} +\usepackage{iman,extra,isar,proof} +\usepackage[nohyphen,strings]{underscore} +\usepackage{isabelle} +\usepackage{isabellesym} +\usepackage{railsetup} +\usepackage{ttbox} +\usepackage{supertabular} +\usepackage{style} +\usepackage{pdfsetup} + + +\hyphenation{Isabelle} +\hyphenation{Eisbach} + +\isadroptag{theory} +\title{\includegraphics[scale=0.5]{isabelle_eisbach} + \\[4ex] The Eisbach User Manual} +\author{Daniel Matichuk \\ + Makarius Wenzel \\ + Toby Murray +} + + +% Control fixmes etc. +\newif\ifDraft \newif\ifFinal +%\Drafttrue\Finalfalse +\Draftfalse\Finaltrue + + +\ifDraft + \usepackage{draftcopy} + \newcommand{\Comment}[1]{\textbf{\textsl{#1}}} + \newenvironment{LongComment}[1] % multi-paragraph comment, argument is owner + {\begingroup\par\noindent\slshape \textbf{Begin Comment[#1]}\par} + {\par\noindent\textbf{End Comment}\endgroup\par} + \newcommand{\FIXME}[1]{\textbf{\textsl{FIXME: #1}}} + \newcommand{\TODO}[1]{\textbf{\textsl{TODO: #1}}} +\else + \newcommand{\Comment}[1]{\relax} + \newenvironment{LongComment}[1]{\expandafter\comment}{\expandafter\endcomment} + \newcommand{\FIXME}[1]{\relax} + \newcommand{\TODO}[1]{\relax} +\fi + +% This sort of command for each active author can be convenient +\newcommand{\dan}[1]{\Comment{#1 [dan]}} +\newcommand{\toby}[1]{\Comment{#1 [toby]}} +\newcommand{\makarius}[1]{\Comment{#1 [makarius]}} + + +\makeindex + +\chardef\charbackquote=`\` +\newcommand{\backquote}{\mbox{\tt\charbackquote}} + + +\begin{document} + +\maketitle + +\pagenumbering{roman} +\chapter*{Preface} +\input{Preface.tex} +\tableofcontents +\clearfirst + +\input{Manual.tex} + +\begingroup +\tocentry{\bibname} +\bibliographystyle{abbrv} \small\raggedright\frenchspacing +\bibliography{manual} +\endgroup + +\tocentry{\indexname} +\printindex + +\end{document} + + +%%% Local Variables: +%%% mode: latex +%%% TeX-master: t +%%% End: diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Eisbach/document/style.sty --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Doc/Eisbach/document/style.sty Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,68 @@ +%% toc +\newcommand{\tocentry}[1]{\cleardoublepage\phantomsection\addcontentsline{toc}{chapter}{#1} +\@mkboth{\MakeUppercase{#1}}{\MakeUppercase{#1}}} + +%% references +\newcommand{\secref}[1]{\S\ref{#1}} +\newcommand{\chref}[1]{chapter~\ref{#1}} +\newcommand{\figref}[1]{figure~\ref{#1}} + +%% math +\newcommand{\text}[1]{\mbox{#1}} +\newcommand{\isasymvartheta}{\isamath{\theta}} +\newcommand{\isactrlvec}[1]{\emph{$\vec{#1}$}} +\newcommand{\isactrlBG}{\isacharbackquoteopen} +\newcommand{\isactrlEN}{\isacharbackquoteclose} + +\setcounter{secnumdepth}{2} \setcounter{tocdepth}{2} + +\pagestyle{headings} +\sloppy +\binperiod + +\parindent 0pt\parskip 0.5ex + +\renewcommand{\isadigit}[1]{\isamath{#1}} + +\newenvironment{mldecls}{\par\noindent\begingroup\footnotesize\def\isanewline{\\}\begin{tabular}{l}}{\end{tabular}\smallskip\endgroup} + +\isafoldtag{FIXME} + +\isakeeptag{mlref} +\renewcommand{\isatagmlref}{\subsection*{\makebox[0pt][r]{\fbox{ML}~~}Reference}} +\renewcommand{\endisatagmlref}{} + +\isakeeptag{mlantiq} +\renewcommand{\isatagmlantiq}{\subsection*{\makebox[0pt][r]{\fbox{ML}~~}Antiquotations}} +\renewcommand{\endisatagmlantiq}{} + +\isakeeptag{mlex} +\renewcommand{\isatagmlex}{\subsection*{\makebox[0pt][r]{\fbox{ML}~~}Examples}} +\renewcommand{\endisatagmlex}{} + +\renewcommand{\isatagML}{\begingroup\isabellestyle{default}\isastyle\def\isadigit##1{##1}} +\renewcommand{\endisatagML}{\endgroup} + +\newcommand{\minorcmd}[1]{{\sf #1}} +\newcommand{\isasymtype}{\minorcmd{type}} +\newcommand{\isasymval}{\minorcmd{val}} + +\newcommand{\isasymFIX}{\isakeyword{fix}} +\newcommand{\isasymASSUME}{\isakeyword{assume}} +\newcommand{\isasymDEFINE}{\isakeyword{define}} +\newcommand{\isasymNOTE}{\isakeyword{note}} +\newcommand{\isasymGUESS}{\isakeyword{guess}} +\newcommand{\isasymOBTAIN}{\isakeyword{obtain}} +\newcommand{\isasymTHEORY}{\isakeyword{theory}} +\newcommand{\isasymUSES}{\isakeyword{uses}} +\newcommand{\isasymEND}{\isakeyword{end}} +\newcommand{\isasymCONSTS}{\isakeyword{consts}} +\newcommand{\isasymDEFS}{\isakeyword{defs}} +\newcommand{\isasymTHEOREM}{\isakeyword{theorem}} +\newcommand{\isasymDEFINITION}{\isakeyword{definition}} + +\isabellestyle{literal} + +\railtermfont{\isabellestyle{tt}} +\railnontermfont{\isabellestyle{itunderscore}} +\railnamefont{\isabellestyle{itunderscore}} diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Implementation/Integration.thy --- a/src/Doc/Implementation/Integration.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Implementation/Integration.thy Mon May 25 22:11:43 2015 +0200 @@ -187,7 +187,7 @@ sub-graph of theories, the intrinsic parallelism can be exploited by the system to speedup loading. - This variant is used by default in @{tool build} @{cite "isabelle-sys"}. + This variant is used by default in @{tool build} @{cite "isabelle-system"}. \item @{ML Thy_Info.get_theory}~@{text A} retrieves the theory value presently associated with name @{text A}. Note that the result might be diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Implementation/ML.thy --- a/src/Doc/Implementation/ML.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Implementation/ML.thy Mon May 25 22:11:43 2015 +0200 @@ -1418,7 +1418,7 @@ \item sequence of Isabelle symbols (see also \secref{sec:symbols}), with @{ML Symbol.explode} as key operation; - \item XML tree structure via YXML (see also @{cite "isabelle-sys"}), + \item XML tree structure via YXML (see also @{cite "isabelle-system"}), with @{ML YXML.parse_body} as key operation. \end{enumerate} diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/Document_Preparation.thy --- a/src/Doc/Isar_Ref/Document_Preparation.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/Document_Preparation.thy Mon May 25 22:11:43 2015 +0200 @@ -11,7 +11,7 @@ {\LaTeX} output is generated while processing a \emph{session} in batch mode, as explained in the \emph{The Isabelle System Manual} - @{cite "isabelle-sys"}. The main Isabelle tools to get started with + @{cite "isabelle-system"}. The main Isabelle tools to get started with document preparation are @{tool_ref mkroot} and @{tool_ref build}. The classic Isabelle/HOL tutorial @{cite "isabelle-hol-book"} also @@ -81,7 +81,7 @@ \ -section \Document Antiquotations \label{sec:antiq}\ +section \Document antiquotations \label{sec:antiq}\ text \ \begin{matharray}{rcl} @@ -434,7 +434,7 @@ \end{tabular} \medskip The Isabelle document preparation system - @{cite "isabelle-sys"} allows tagged command regions to be presented + @{cite "isabelle-system"} allows tagged command regions to be presented specifically, e.g.\ to fold proof texts, or drop parts of the text completely. @@ -459,7 +459,7 @@ arbitrary tags to ``keep'', ``drop'', or ``fold'' the corresponding parts of the text. Logic sessions may also specify ``document versions'', where given tags are interpreted in some particular way. - Again see @{cite "isabelle-sys"} for further details. + Again see @{cite "isabelle-system"} for further details. \ diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/HOL_Specific.thy --- a/src/Doc/Isar_Ref/HOL_Specific.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/HOL_Specific.thy Mon May 25 22:11:43 2015 +0200 @@ -1,6 +1,6 @@ theory HOL_Specific imports Base "~~/src/HOL/Library/Old_Datatype" "~~/src/HOL/Library/Old_Recdef" - "~~/src/Tools/Adhoc_Overloading" + "~~/src/Tools/Adhoc_Overloading" "~~/src/HOL/Library/Dlist" "~~/src/HOL/Library/FSet" begin chapter \Higher-Order Logic\ @@ -1616,8 +1616,8 @@ \} @{rail \ - @@{command (HOL) lift_definition} @{syntax name} '::' @{syntax type} @{syntax mixfix}? \ - 'is' @{syntax term} (@'parametric' (@{syntax thmref}+))? + @@{command (HOL) lift_definition} ('(' 'code_dt' ')')? @{syntax name} '::' @{syntax type} \ + @{syntax mixfix}? 'is' @{syntax term} (@'parametric' (@{syntax thmref}+))? \} @{rail \ @@ -1695,9 +1695,24 @@ the abstraction function. Integration with [@{attribute code} abstract]: For subtypes (e.g., - corresponding to a datatype invariant, such as dlist), @{command + corresponding to a datatype invariant, such as @{typ "'a dlist"}), @{command (HOL) "lift_definition"} uses a code certificate theorem - @{text f.rep_eq} as a code equation. + @{text f.rep_eq} as a code equation. Because of the limitation of the code generator, + @{text f.rep_eq} cannot be used as a code equation if the subtype occurs inside the result + type rather than at the top level (e.g., function returning @{typ "'a dlist option"} vs. + @{typ "'a dlist"}). In this case, an extension of @{command + (HOL) "lift_definition"} can be invoked by specifying the flag @{text "code_dt"}. This + extension enables code execution through series of internal type and lifting definitions + if the return type @{text "\"} meets the following inductive conditions: + \begin{description} + \item @{text "\"} is a type variable + \item @{text "\ = \\<^sub>1 \ \\<^sub>n \"}, where @{text "\"} is an abstract type constructor + and @{text "\\<^sub>1 \ \\<^sub>n"} do not contain abstract types (i.e., @{typ "int dlist"} is allowed + whereas @{typ "int dlist dlist"} not) + \item @{text "\ = \\<^sub>1 \ \\<^sub>n \"}, @{text "\"} is a type constructor that was defined as a + (co)datatype whose constructor argument types do not contain either non-free datatypes + or the function type. + \end{description} Integration with [@{attribute code} equation]: For total quotients, @{command (HOL) "lift_definition"} uses @{text f.abs_eq} as a code equation. @@ -1780,7 +1795,7 @@ and thus sets up lifting for an abstract type @{text \} (that is defined by @{text Quotient_thm}). Optional theorems @{text pcr_def} and @{text pcr_cr_eq_thm} can be specified to register the parametrized - correspondence relation for @{text \}. E.g., for @{text "'a dlist"}, @{text pcr_def} is + correspondence relation for @{text \}. E.g., for @{typ "'a dlist"}, @{text pcr_def} is @{text "pcr_dlist A \ list_all2 A \\ cr_dlist"} and @{text pcr_cr_eq_thm} is @{text "pcr_dlist op= = op="}. This attribute is rather used for low-level diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/Inner_Syntax.thy --- a/src/Doc/Isar_Ref/Inner_Syntax.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/Inner_Syntax.thy Mon May 25 22:11:43 2015 +0200 @@ -110,7 +110,7 @@ @{command "print_state"}~@{text "(latex xsymbols)"} prints the current proof state with mathematical symbols and special characters represented in {\LaTeX} source, according to the Isabelle style - @{cite "isabelle-sys"}. + @{cite "isabelle-system"}. Note that antiquotations (cf.\ \secref{sec:antiq}) provide a more systematic way to include formal items into the printed text @@ -1023,7 +1023,7 @@ need to be passed-through carefully by syntax transformations. Pre-terms are further processed by the so-called \emph{check} and - \emph{unckeck} phases that are intertwined with type-inference (see + \emph{uncheck} phases that are intertwined with type-inference (see also @{cite "isabelle-implementation"}). The latter allows to operate on higher-order abstract syntax with proper binding and type information already available. diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/Misc.thy --- a/src/Doc/Isar_Ref/Misc.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/Misc.thy Mon May 25 22:11:43 2015 +0200 @@ -103,7 +103,7 @@ \item @{command "thm_deps"}~@{text "a\<^sub>1 \ a\<^sub>n"} visualizes dependencies of facts, using Isabelle's graph browser - tool (see also @{cite "isabelle-sys"}). + tool (see also @{cite "isabelle-system"}). \item @{command "unused_thms"}~@{text "A\<^sub>1 \ A\<^sub>m - B\<^sub>1 \ B\<^sub>n"} displays all theorems that are proved in theories @{text "B\<^sub>1 \ B\<^sub>n"} diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/Outer_Syntax.thy --- a/src/Doc/Isar_Ref/Outer_Syntax.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/Outer_Syntax.thy Mon May 25 22:11:43 2015 +0200 @@ -28,7 +28,7 @@ Printed theory documents usually omit quotes to gain readability (this is a matter of {\LaTeX} macro setup, say via @{verbatim - "\\isabellestyle"}, see also @{cite "isabelle-sys"}). Experienced + "\\isabellestyle"}, see also @{cite "isabelle-system"}). Experienced users of Isabelle/Isar may easily reconstruct the lost technical information, while mere readers need not care about quotes at all. \ diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/Preface.thy --- a/src/Doc/Isar_Ref/Preface.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/Preface.thy Mon May 25 22:11:43 2015 +0200 @@ -2,8 +2,6 @@ imports Base Main begin -chapter \Preface\ - text \ The \emph{Isabelle} system essentially provides a generic infrastructure for building deductive systems (programmed in diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/Spec.thy --- a/src/Doc/Isar_Ref/Spec.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/Spec.thy Mon May 25 22:11:43 2015 +0200 @@ -178,7 +178,7 @@ accesses to the local scope, as determined by the enclosing @{command "context"}~@{keyword "begin"}~\dots~@{command "end"} block. Outside its scope, a @{keyword "private"} name is inaccessible, and a @{keyword - "qualified"} name is only accessible with additional qualification. + "qualified"} name is only accessible with some qualification. Neither a global @{command theory} nor a @{command locale} target provides a local scope by itself: an extra unnamed context is required to use @@ -1541,7 +1541,7 @@ \item @{command "hide_class"}~@{text names} fully removes class declarations from a given name space; with the @{text "(open)"} - option, only the base name is hidden. + option, only the unqualified base name is hidden. Note that hiding name space accesses has no impact on logical declarations --- they remain valid internally. Entities that are no diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Isar_Ref/document/root.tex --- a/src/Doc/Isar_Ref/document/root.tex Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Isar_Ref/document/root.tex Mon May 25 22:11:43 2015 +0200 @@ -60,7 +60,8 @@ \maketitle \pagenumbering{roman} -{\def\isamarkupchapter#1{\chapter*{#1}}\input{Preface.tex}} +\chapter*{Preface} +\input{Preface.tex} \tableofcontents \clearfirst diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/JEdit.thy --- a/src/Doc/JEdit/JEdit.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/JEdit/JEdit.thy Mon May 25 22:11:43 2015 +0200 @@ -82,9 +82,9 @@ The options allow to specify a logic session name --- the same selector is accessible in the \emph{Theories} panel (\secref{sec:theories}). On application startup, the selected logic session image is provided - automatically by the Isabelle build tool @{cite "isabelle-sys"}: if it is + automatically by the Isabelle build tool @{cite "isabelle-system"}: if it is absent or outdated wrt.\ its sources, the build process updates it before - entering the Prover IDE. Changing the logic session within Isabelle/jEdit + entering the Prover IDE. Change of the logic session within Isabelle/jEdit requires a restart of the whole application. \medskip The main job of the Prover IDE is to manage sources and their @@ -103,7 +103,8 @@ Thus the Prover IDE gives an impression of direct access to formal content of the prover within the editor, but in reality only certain aspects are - exposed, according to the possibilities of the prover and its many tools. + exposed, according to the possibilities of the prover and its many add-on + tools. \ @@ -169,7 +170,7 @@ Isabelle system options are managed by Isabelle/Scala and changes are stored in @{file_unchecked "$ISABELLE_HOME_USER/etc/preferences"}, independently of - other jEdit properties. See also @{cite "isabelle-sys"}, especially the + other jEdit properties. See also @{cite "isabelle-system"}, especially the coverage of sessions and command-line tools like @{tool build} or @{tool options}. @@ -181,7 +182,7 @@ Isabelle system options. Note that some of these options affect general parameters that are relevant outside Isabelle/jEdit as well, e.g.\ @{system_option threads} or @{system_option parallel_proofs} for the - Isabelle build tool @{cite "isabelle-sys"}, but it is possible to use the + Isabelle build tool @{cite "isabelle-system"}, but it is possible to use the settings variable @{setting ISABELLE_BUILD_OPTIONS} to change defaults for batch builds without affecting Isabelle/jEdit. @@ -242,7 +243,7 @@ The @{verbatim "-l"} option specifies the session name of the logic image to be used for proof processing. Additional session root directories may be included via option @{verbatim "-d"} to augment - that name space of @{tool build} @{cite "isabelle-sys"}. + that name space of @{tool build} @{cite "isabelle-system"}. By default, the specified image is checked and built on demand. The @{verbatim "-s"} option determines where to store the result session image @@ -256,7 +257,7 @@ The @{verbatim "-J"} and @{verbatim "-j"} options allow to pass additional low-level options to the JVM or jEdit, respectively. The defaults are - provided by the Isabelle settings environment @{cite "isabelle-sys"}, but + provided by the Isabelle settings environment @{cite "isabelle-system"}, but note that these only work for the command-line tool described here, and not the regular application. @@ -270,12 +271,15 @@ chapter \Augmented jEdit functionality\ -section \Look-and-feel\ +section \GUI rendering\ + +subsection \Look-and-feel \label{sec:look-and-feel}\ -text \jEdit is a Java/AWT/Swing application with some ambition to - support ``native'' look-and-feel on all platforms, within the limits - of what Oracle as Java provider and major operating system - distributors allow (see also \secref{sec:problems}). +text \ + jEdit is a Java/AWT/Swing application with some ambition to support + ``native'' look-and-feel on all platforms, within the limits of what Oracle + as Java provider and major operating system distributors allow (see also + \secref{sec:problems}). Isabelle/jEdit enables platform-specific look-and-feel by default as follows. @@ -285,11 +289,14 @@ \item[Linux:] The platform-independent \emph{Nimbus} is used by default. - \emph{GTK+} works under the side-condition that the overall GTK theme is - selected in a Swing-friendly way.\footnote{GTK support in Java/Swing was - once marketed aggressively by Sun, but never quite finished. Today (2013) it + \emph{GTK+} also works under the side-condition that the overall GTK theme + is selected in a Swing-friendly way.\footnote{GTK support in Java/Swing was + once marketed aggressively by Sun, but never quite finished. Today (2015) it is lagging behind further development of Swing and GTK. The graphics - rendering performance can be worse than for other Swing look-and-feels.} + rendering performance can be worse than for other Swing look-and-feels. + Nonetheless it has its uses for displays with very high resolution (such as + ``4K'' or ``UHD'' models), because the rendering by the external library is + subject to global system settings for font scaling.} \item[Windows:] Regular \emph{Windows} is used by default, but \emph{Windows Classic} also works. @@ -308,11 +315,78 @@ in mind that this extra variance of GUI functionality is unlikely to work in arbitrary combinations. The platform-independent \emph{Nimbus} and \emph{Metal} should always work. The historic - \emph{CDE/Motif} is better avoided. + \emph{CDE/Motif} should be ignored. After changing the look-and-feel in \emph{Global Options~/ Appearance}, it is advisable to restart Isabelle/jEdit in order to - take full effect.\ + take full effect. +\ + + +subsection \Displays with very high resolution \label{sec:hdpi}\ + +text \ + Many years ago, displays with $1024 \times 768$ or $1280 \times 1024$ pixels + were considered ``high resolution'' and bitmap fonts with 12 or 14 pixels as + adequate for text rendering. Today (2015), we routinely see ``Full HD'' + monitors at $1920 \times 1080$ pixels, and occasionally ``Ultra HD'' at + $3840 \times 2160$ or more, but GUI rendering did not really progress + beyond the old standards. + + Isabelle/jEdit defaults are a compromise for reasonable out-of-the box + results on common platforms and medium resolution displays (e.g.\ the ``Full + HD'' category). Subsequently there are further hints to improve on that. + + \medskip The \textbf{operating-system platform} usually provides some + configuration for global scaling of text fonts, e.g.\ $120\%$--$250\%$ on + Windows. Changing that only has a partial effect on GUI rendering; + satisfactory display quality requires further adjustments. + + \medskip The Isabelle/jEdit \textbf{application} and its plugins provide + various font properties that are summarized below. + + \begin{itemize} + + \item \emph{Global Options / Text Area / Text font}: the main text area + font, which is also used as reference point for various derived font sizes, + e.g.\ the Output panel (\secref{sec:output}). + + \item \emph{Global Options / Gutter / Gutter font}: the font for the gutter + area left of the main text area, e.g.\ relevant for display of line numbers + (disabled by default). + + \item \emph{Global Options / Appearance / Button, menu and label font} as + well as \emph{List and text field font}: this specifies the primary and + secondary font for the old \emph{Metal} look-and-feel + (\secref{sec:look-and-feel}), which happens to scale better than newer ones + like \emph{Nimbus}. + + \item \emph{Plugin Options / Isabelle / General / Reset Font Size}: the main + text area font size for action @{action_ref "isabelle.reset-font-size"}, + e.g.\ relevant for quick scaling like in major web browsers. + + \item \emph{Plugin Options / Console / General / Font}: the console window + font, e.g.\ relevant for Isabelle/Scala command-line. + + \end{itemize} + + In \figref{fig:isabelle-jedit-hdpi} the \emph{Metal} look-and-feel is + configured with custom fonts at 30 pixels, and the main text area and + console at 36 pixels. Despite the old-fashioned appearance of \emph{Metal}, + this leads to decent rendering quality on all platforms. + + \begin{figure}[htb] + \begin{center} + \includegraphics[width=\textwidth]{isabelle-jedit-hdpi} + \end{center} + \caption{Metal look-and-feel with custom fonts for very high resolution} + \label{fig:isabelle-jedit-hdpi} + \end{figure} + + On Linux, it is also possible to use \emph{GTK+} with a suitable theme and + global font scaling. On Mac OS X, the default setup for ``Retina'' displays + should work adequately with the native look-and-feel. +\ section \Dockable windows \label{sec:dockables}\ @@ -333,10 +407,10 @@ \emph{HyperSearch Results} or the \emph{File System Browser}. Plugins often provide a central dockable to access their key functionality, which may be opened by the user on demand. The Isabelle/jEdit plugin takes this approach - to the extreme: its plugin menu merely provides entry-points to panels that - are managed as dockable windows. Some important panels are docked by + to the extreme: its plugin menu provides the entry-points to many panels + that are managed as dockable windows. Some important panels are docked by default, e.g.\ \emph{Documentation}, \emph{Output}, \emph{Query}, but the - user can change this arrangement easily. + user can change this arrangement easily and persistently. Compared to plain jEdit, dockable window management in Isabelle/jEdit is slightly augmented according to the the following principles: @@ -398,15 +472,15 @@ alphabets in comments. \medskip \paragraph{Encoding.} Technically, the Unicode view on Isabelle - symbols is an \emph{encoding} in jEdit (not in the underlying JVM) that is - called @{verbatim "UTF-8-Isabelle"}. It is provided by the Isabelle/jEdit - plugin and enabled by default for all source files. Sometimes such defaults - are reset accidentally, or malformed UTF-8 sequences in the text force jEdit - to fall back on a different encoding like @{verbatim "ISO-8859-15"}. In that - case, verbatim ``@{verbatim "\"}'' will be shown in the text buffer instead - of its Unicode rendering ``@{text "\"}''. The jEdit menu operation - \emph{File~/ Reload with Encoding~/ UTF-8-Isabelle} helps to resolve such - problems (after repairing malformed parts of the text). + symbols is an \emph{encoding} called @{verbatim "UTF-8-Isabelle"} in jEdit + (not in the underlying JVM). It is provided by the Isabelle/jEdit plugin and + enabled by default for all source files. Sometimes such defaults are reset + accidentally, or malformed UTF-8 sequences in the text force jEdit to fall + back on a different encoding like @{verbatim "ISO-8859-15"}. In that case, + verbatim ``@{verbatim "\"}'' will be shown in the text buffer instead of its + Unicode rendering ``@{text "\"}''. The jEdit menu operation \emph{File~/ + Reload with Encoding~/ UTF-8-Isabelle} helps to resolve such problems (after + repairing malformed parts of the text). \medskip \paragraph{Font.} Correct rendering via Unicode requires a font that contains glyphs for the corresponding codepoints. Most @@ -450,11 +524,11 @@ some web browser or mail client, as long as the same Unicode view on Isabelle symbols is used. - \item Copy/paste from prover output within Isabelle/jEdit. The - same principles as for text buffers apply, but note that \emph{copy} - in secondary Isabelle/jEdit windows works via the keyboard shortcut - @{verbatim "C+c"}, while jEdit menu actions always refer to the - primary text area! + \item Copy/paste from prover output within Isabelle/jEdit. The same + principles as for text buffers apply, but note that \emph{copy} in secondary + Isabelle/jEdit windows works via the keyboard shortcuts @{verbatim "C+c"} or + @{verbatim "C+INSERT"}, while jEdit menu actions always refer to the primary + text area! \item Completion provided by Isabelle plugin (see \secref{sec:completion}). Isabelle symbols have a canonical name @@ -592,9 +666,9 @@ Despite the flexibility of URLs in jEdit, local files are particularly important and are accessible without protocol prefix. Here the path notation is that of the Java Virtual Machine on the underlying platform. On Windows - the preferred form uses backslashes, but happens to accept forward slashes - like Unix/POSIX. Further differences arise due to Windows drive letters and - network shares. + the preferred form uses backslashes, but happens to accept also forward + slashes like Unix/POSIX. Further differences arise due to Windows drive + letters and network shares. The Java notation for files needs to be distinguished from the one of Isabelle, which uses POSIX notation with forward slashes on \emph{all} @@ -611,8 +685,8 @@ though, due to the bias of jEdit towards platform-specific notation and of Isabelle towards POSIX. Moreover, the Isabelle settings environment is not yet active when starting Isabelle/jEdit via its standard application - wrapper, in contrast to @{verbatim "isabelle jedit"} run from the command - line (\secref{sec:command-line}). + wrapper, in contrast to @{tool jedit} run from the command line + (\secref{sec:command-line}). Isabelle/jEdit imitates @{verbatim "$ISABELLE_HOME"} and @{verbatim "$ISABELLE_HOME_USER"} within the Java process environment, in order to @@ -684,7 +758,7 @@ In any case, source files are managed by the PIDE infrastructure: the physical file-system only plays a subordinate role. The relevant version of - source text is passed directly from the editor to the prover, via internal + source text is passed directly from the editor to the prover, using internal communication channels. \ @@ -695,7 +769,7 @@ The \emph{Theories} panel (see also \figref{fig:theories}) provides an overview of the status of continuous checking of theory nodes within the document model. Unlike batch sessions of @{tool build} @{cite - "isabelle-sys"}, theory nodes are identified by full path names; this allows + "isabelle-system"}, theory nodes are identified by full path names; this allows to work with multiple (disjoint) Isabelle sessions simultaneously within the same editor session. @@ -736,13 +810,14 @@ rendering, based on a standard repertoire known from IDEs for programming languages: colors, icons, highlighting, squiggly underlines, tooltips, hyperlinks etc. For outer syntax of Isabelle/Isar there is some traditional - syntax-highlighting via static keyword tables and tokenization within the - editor. In contrast, the painting of inner syntax (term language etc.)\ uses - semantic information that is reported dynamically from the logical context. - Thus the prover can provide additional markup to help the user to understand - the meaning of formal text, and to produce more text with some add-on tools - (e.g.\ information messages with \emph{sendback} markup by automated provers - or disprovers in the background). + syntax-highlighting via static keywords and tokenization within the editor; + this buffer syntax is determined from theory imports. In contrast, the + painting of inner syntax (term language etc.)\ uses semantic information + that is reported dynamically from the logical context. Thus the prover can + provide additional markup to help the user to understand the meaning of + formal text, and to produce more text with some add-on tools (e.g.\ + information messages with \emph{sendback} markup by automated provers or + disprovers in the background). \ @@ -763,7 +838,7 @@ document-model on demand, the first time when opened explicitly in the editor. There are further tricks to manage markup of ML files, such that Isabelle/HOL may be edited conveniently in the Prover IDE on small machines - with only 4--8\,GB of main memory. Using @{verbatim Pure} as logic session + with only 8\,GB of main memory. Using @{verbatim Pure} as logic session image, the exploration may start at the top @{file "$ISABELLE_HOME/src/HOL/Main.thy"} or the bottom @{file "$ISABELLE_HOME/src/HOL/HOL.thy"}, for example. @@ -1017,7 +1092,7 @@ subject to formal document processing of the editor session and thus prevents further exploration: the chain of hyperlinks may end in some source file of the underlying logic image, or within the - Isabelle/ML bootstrap sources of Isabelle/Pure.\ + ML bootstrap sources of Isabelle/Pure.\ section \Completion \label{sec:completion}\ @@ -1092,7 +1167,7 @@ text \ Syntax completion tables are determined statically from the keywords of the ``outer syntax'' of the underlying edit mode: for theory files this is the - syntax of Isar commands. + syntax of Isar commands according to the cumulative theory imports. Keywords are usually plain words, which means the completion mechanism only inserts them directly into the text for explicit completion @@ -1381,7 +1456,7 @@ \begin{itemize} \item @{system_option_def completion_limit} specifies the maximum number of - name-space entries exposed in semantic completion by the prover. + items for various semantic completion operations (name-space entries etc.) \item @{system_option_def jedit_completion} guards implicit completion via regular jEdit key events (\secref{sec:completion-input}): it allows to @@ -1567,6 +1642,76 @@ nonetheless, say to remove earlier proof attempts.\ +chapter \Isabelle document preparation\ + +text \The ultimate purpose of Isabelle is to produce nicely rendered documents + with the Isabelle document preparation system, which is based on {\LaTeX}; + see also @{cite "isabelle-system" and "isabelle-isar-ref"}. Isabelle/jEdit + provides some additional support for document editing.\ + + +section \Document outline\ + +text \Theory sources may contain document markup commands, such as + @{command_ref chapter}, @{command_ref section}, @{command subsection}. The + Isabelle SideKick parser (\secref{sec:sidekick}) represents this document + outline as structured tree view, with formal statements and proofs nested + inside; see \figref{fig:sidekick-document}. + + \begin{figure}[htb] + \begin{center} + \includegraphics[scale=0.333]{sidekick-document} + \end{center} + \caption{Isabelle document outline via SideKick tree view} + \label{fig:sidekick-document} + \end{figure} + + It is also possible to use text folding according to this structure, by + adjusting \emph{Utilities / Buffer Options / Folding mode} of jEdit. The + default mode @{verbatim isabelle} uses the structure of formal definitions, + statements, and proofs. The alternative mode @{verbatim sidekick} uses the + document structure of the SideKick parser, as explained above.\ + + +section \Citations and Bib{\TeX} entries\ + +text \Citations are managed by {\LaTeX} and Bib{\TeX} in @{verbatim ".bib"} + files. The Isabelle session build process and the @{tool latex} tool @{cite + "isabelle-system"} are smart enough to assemble the result, based on the + session directory layout. + + The document antiquotation @{text "@{cite}"} is described in @{cite + "isabelle-isar-ref"}. Within the Prover IDE it provides semantic markup for + tooltips, hyperlinks, and completion for Bib{\TeX} database entries. + Isabelle/jEdit does \emph{not} know about the actual Bib{\TeX} environment + used in {\LaTeX} batch-mode, but it can take citations from those @{verbatim + ".bib"} files that happen to be open in the editor; see + \figref{fig:cite-completion}. + + \begin{figure}[htb] + \begin{center} + \includegraphics[scale=0.333]{cite-completion} + \end{center} + \caption{Semantic completion of citations from open Bib{\TeX} files} + \label{fig:cite-completion} + \end{figure} + + Isabelle/jEdit also provides some support for editing @{verbatim ".bib"} + files themselves. There is syntax highlighting based on entry types + (according to standard Bib{\TeX} styles), a context-menu to compose entries + systematically, and a SideKick tree view of the overall content; see + \figref{fig:bibtex-mode}. + + \begin{figure}[htb] + \begin{center} + \includegraphics[scale=0.333]{bibtex-mode} + \end{center} + \caption{Bib{\TeX} mode with context menu and SideKick tree view} + \label{fig:bibtex-mode} + \end{figure} +\ + + chapter \Miscellaneous tools\ section \Timing\ @@ -1618,7 +1763,7 @@ \begin{itemize} \item \emph{Protocol} shows internal messages between the - Isabelle/Scala and Isabelle/ML side of the PIDE editing protocol. + Isabelle/Scala and Isabelle/ML side of the PIDE document editing protocol. Recording of messages starts with the first activation of the corresponding dockable window; earlier messages are lost. @@ -1640,11 +1785,14 @@ Under normal circumstances, prover output always works via managed message channels (corresponding to @{ML writeln}, @{ML warning}, @{ML Output.error_message} in Isabelle/ML), which are displayed by regular means - within the document model (\secref{sec:output}). + within the document model (\secref{sec:output}). Unhandled Isabelle/ML + exceptions are printed by the system via @{ML Output.error_message}. - \item \emph{Syslog} shows system messages that might be relevant to - diagnose problems with the startup or shutdown phase of the prover - process; this also includes raw output on @{verbatim stderr}. + \item \emph{Syslog} shows system messages that might be relevant to diagnose + problems with the startup or shutdown phase of the prover process; this also + includes raw output on @{verbatim stderr}. Isabelle/ML also provides an + explicit @{ML Output.system_message} operation, which is occasionally useful + for diagnostic purposes within the system infrastructure itself. A limited amount of syslog messages are buffered, independently of the docking state of the \emph{Syslog} panel. This allows to @@ -1711,12 +1859,18 @@ \textbf{Workaround:} Use a regular re-parenting X11 window manager. - \item \textbf{Problem:} Recent forks of Linux/X11 window managers - and desktop environments (variants of Gnome) disrupt the handling of - menu popups and mouse positions of Java/AWT/Swing. + \item \textbf{Problem:} Various forks of Linux/X11 window managers and + desktop environments (like Gnome) disrupt the handling of menu popups and + mouse positions of Java/AWT/Swing. \textbf{Workaround:} Use mainstream versions of Linux desktops. + \item \textbf{Problem:} Native Windows look-and-feel with global font + scaling leads to bad GUI rendering of various tree views. + + \textbf{Workaround:} Use \emph{Metal} look-and-feel and re-adjust its + primary and secondary font as explained in \secref{sec:hdpi}. + \item \textbf{Problem:} Full-screen mode via jEdit action @{action_ref "toggle-full-screen"} (default keyboard shortcut @{verbatim F11}) works on Windows, but not on Mac OS X or various Linux/X11 window managers. diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/auto-tools.png Binary file src/Doc/JEdit/document/auto-tools.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/bibtex-mode.png Binary file src/Doc/JEdit/document/bibtex-mode.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/cite-completion.png Binary file src/Doc/JEdit/document/cite-completion.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/isabelle-jedit-hdpi.png Binary file src/Doc/JEdit/document/isabelle-jedit-hdpi.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/isabelle-jedit.png Binary file src/Doc/JEdit/document/isabelle-jedit.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/output.png Binary file src/Doc/JEdit/document/output.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/popup1.png Binary file src/Doc/JEdit/document/popup1.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/popup2.png Binary file src/Doc/JEdit/document/popup2.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/query.png Binary file src/Doc/JEdit/document/query.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/sidekick-document.png Binary file src/Doc/JEdit/document/sidekick-document.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/sidekick.png Binary file src/Doc/JEdit/document/sidekick.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/sledgehammer.png Binary file src/Doc/JEdit/document/sledgehammer.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/JEdit/document/theories.png Binary file src/Doc/JEdit/document/theories.png has changed diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Nitpick/document/root.tex --- a/src/Doc/Nitpick/document/root.tex Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Nitpick/document/root.tex Mon May 25 22:11:43 2015 +0200 @@ -27,7 +27,8 @@ \def\lparr{\mathopen{(\mkern-4mu\mid}} \def\rparr{\mathclose{\mid\mkern-4mu)}} -\def\unk{{?}} +%\def\unk{{?}} +\def\unk{{\_}} \def\unkef{(\lambda x.\; \unk)} \def\undef{(\lambda x.\; \_)} %\def\unr{\textit{others}} @@ -931,7 +932,7 @@ \hbox{}\qquad Free variable: \nopagebreak \\ \hbox{}\qquad\qquad $n = 1$ \\ \hbox{}\qquad Constants: \nopagebreak \\ -\hbox{}\qquad\qquad $\textit{even} = (λx. ?)(0 := True, 1 := False, 2 := True, 3 := False)$ \\ +\hbox{}\qquad\qquad $\textit{even} = \unkef(0 := True, 1 := False, 2 := True, 3 := False)$ \\ \hbox{}\qquad\qquad $\textit{odd}_{\textsl{base}} = {}$ \\ \hbox{}\qquad\qquad\quad $\unkef(0 := \textit{False},\, 1 := \textit{True},\, 2 := \textit{False},\, 3 := \textit{False})$ \\ \hbox{}\qquad\qquad $\textit{odd}_{\textsl{step}} = \unkef$\\ diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/ROOT --- a/src/Doc/ROOT Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/ROOT Mon May 25 22:11:43 2015 +0200 @@ -59,6 +59,28 @@ "root.tex" "style.sty" +session Eisbach (doc) in "Eisbach" = "HOL-Eisbach" + + options [document_variants = "eisbach", quick_and_dirty, + print_mode = "no_brackets,iff", show_question_marks = false] + theories [document = false] + Base + theories + Preface + Manual + document_files (in "..") + "prepare_document" + "pdfsetup.sty" + "iman.sty" + "extra.sty" + "isar.sty" + "ttbox.sty" + "underscore.sty" + "manual.bib" + document_files + "build" + "root.tex" + "style.sty" + session Functions (doc) in "Functions" = HOL + options [document_variants = "functions", skip_proofs = false, quick_and_dirty] theories Functions @@ -183,14 +205,18 @@ "style.sty" document_files "auto-tools.png" + "bibtex-mode.png" "build" + "cite-completion.png" "isabelle-jedit.png" + "isabelle-jedit-hdpi.png" "output.png" "query.png" "popup1.png" "popup2.png" "root.tex" "sidekick.png" + "sidekick-document.png" "sledgehammer.png" "theories.png" diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/Tutorial/Documents/Documents.thy --- a/src/Doc/Tutorial/Documents/Documents.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/Tutorial/Documents/Documents.thy Mon May 25 22:11:43 2015 +0200 @@ -345,7 +345,7 @@ setup) and \texttt{isabelle build} (to run sessions as specified in the corresponding \texttt{ROOT} file). These Isabelle tools are described in further detail in the \emph{Isabelle System Manual} - @{cite "isabelle-sys"}. + @{cite "isabelle-system"}. For example, a new session \texttt{MySession} (with document preparation) may be produced as follows: @@ -406,7 +406,7 @@ \texttt{MySession/document} directory as well. In particular, adding a file named \texttt{root.bib} causes an automatic run of \texttt{bibtex} to process a bibliographic database; see also - \texttt{isabelle document} @{cite "isabelle-sys"}. + \texttt{isabelle document} @{cite "isabelle-system"}. \medskip Any failure of the document preparation phase in an Isabelle batch session leaves the generated sources in their target @@ -694,7 +694,7 @@ preparation system allows the user to specify how to interpret a tagged region, in order to keep, drop, or fold the corresponding parts of the document. See the \emph{Isabelle System Manual} - @{cite "isabelle-sys"} for further details, especially on + @{cite "isabelle-system"} for further details, especially on \texttt{isabelle build} and \texttt{isabelle document}. Ignored material is specified by delimiting the original formal diff -r cc71f01f9fde -r ff82ba1893c8 src/Doc/manual.bib --- a/src/Doc/manual.bib Sat May 23 22:13:24 2015 +0200 +++ b/src/Doc/manual.bib Mon May 25 22:11:43 2015 +0200 @@ -1834,8 +1834,8 @@ title = "{SPASS} Version 3.5", note = {\url{http://www.spass-prover.org/publications/spass.pdf}}} -@manual{isabelle-sys, - author = {Markus Wenzel and Stefan Berghofer}, +@manual{isabelle-system, + author = {Makarius Wenzel and Stefan Berghofer}, title = {The {Isabelle} System Manual}, institution = {TU Munich}, note = {\url{http://isabelle.in.tum.de/doc/system.pdf}}} @@ -1965,7 +1965,7 @@ @inproceedings{Wenzel:2013:ITP, author = {Makarius Wenzel}, title = {Shared-Memory Multiprocessing for Interactive Theorem Proving}, - booktitle = {Interactive Theorem Proving - 4th International Conference, + booktitle = {Interactive Theorem Proving --- 4th International Conference, ITP 2013, Rennes, France, July 22-26, 2013. Proceedings}, editor = {Sandrine Blazy and Christine Paulin-Mohring and @@ -1997,7 +1997,7 @@ year = 2014, series = {EPTCS}, month = {July}, - note = {To appear, \url{http://eptcs.web.cse.unsw.edu.au/paper.cgi?UITP2014:11}} + note = {\url{http://eptcs.web.cse.unsw.edu.au/paper.cgi?UITP2014:11}} } @book{principia, diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Binomial.thy --- a/src/HOL/Binomial.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Binomial.thy Mon May 25 22:11:43 2015 +0200 @@ -39,7 +39,7 @@ by (induct n) (auto simp: le_Suc_eq) context - fixes XXX :: "'a :: {semiring_char_0,linordered_semidom,semiring_no_zero_divisors}" + assumes "SORT_CONSTRAINT('a::linordered_semidom)" begin lemma fact_mono: "m \ n \ fact m \ (fact n :: 'a)" @@ -79,8 +79,7 @@ by (induct n) (auto simp: less_Suc_eq) lemma fact_less_mono: - fixes XXX :: "'a :: {semiring_char_0,linordered_semidom,semiring_no_zero_divisors}" - shows "\0 < m; m < n\ \ fact m < (fact n :: 'a)" + "\0 < m; m < n\ \ fact m < (fact n :: 'a::linordered_semidom)" by (metis of_nat_fact of_nat_less_iff fact_less_mono_nat) lemma fact_ge_Suc_0_nat [simp]: "fact n \ Suc 0" diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Cardinals/Bounded_Set.thy --- a/src/HOL/Cardinals/Bounded_Set.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Cardinals/Bounded_Set.thy Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: HOL/Cardinals/Boundes_Set.thy +(* Title: HOL/Cardinals/Bounded_Set.thy Author: Dmitriy Traytel, TU Muenchen Copyright 2015 diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/Eisbach.thy --- a/src/HOL/Eisbach/Eisbach.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/Eisbach.thy Mon May 25 22:11:43 2015 +0200 @@ -1,15 +1,15 @@ -(* Title: Eisbach.thy +(* Title: HOL/Eisbach/Eisbach.thy Author: Daniel Matichuk, NICTA/UNSW Main entry point for Eisbach proof method language. *) theory Eisbach -imports Pure +imports Main keywords "method" :: thy_decl and - "concl" - "prems" (* FIXME conflict with "prems" in Isar, which is presently dormant *) + "conclusion" + "premises" "declares" "methods" "\" "\" @@ -17,27 +17,19 @@ begin ML_file "parse_tools.ML" +ML_file "method_closure.ML" ML_file "eisbach_rule_insts.ML" -ML_file "method_closure.ML" ML_file "match_method.ML" ML_file "eisbach_antiquotations.ML" (* FIXME reform Isabelle/Pure attributes to make this work by default *) -attribute_setup THEN = - \Scan.lift (Scan.optional (Args.bracks Parse.nat) 1) -- Attrib.thm >> (fn (i, B) => - Method_Closure.free_aware_rule_attribute [B] (fn _ => fn A => A RSN (i, B)))\ - "resolution with rule" +setup \ + fold (Method_Closure.wrap_attribute {handle_all_errs = true, declaration = true}) + [@{binding intro}, @{binding elim}, @{binding dest}, @{binding simp}] #> + fold (Method_Closure.wrap_attribute {handle_all_errs = false, declaration = false}) + [@{binding THEN}, @{binding OF}, @{binding rotated}, @{binding simplified}] +\ -attribute_setup OF = - \Attrib.thms >> (fn Bs => - Method_Closure.free_aware_rule_attribute Bs (fn _ => fn A => A OF Bs))\ - "rule resolved with facts" - -attribute_setup rotated = - \Scan.lift (Scan.optional Parse.int 1 >> (fn n => - Method_Closure.free_aware_rule_attribute [] (fn _ => rotate_prems n)))\ - "rotated theorem premises" - -method solves methods m = \m; fail\ +method solves methods m = (m; fail) end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/Eisbach_Tools.thy --- a/src/HOL/Eisbach/Eisbach_Tools.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/Eisbach_Tools.thy Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: Eisbach_Tools.thy +(* Title: HOL/Eisbach/Eisbach_Tools.thy Author: Daniel Matichuk, NICTA/UNSW Usability tools for Eisbach. @@ -35,9 +35,47 @@ (Scan.lift (Scan.ahead Parse.not_eof) -- Args.term) (fn ctxt => fn (tok, t) => (* FIXME proper formatting!? *) - Token.unparse tok ^ ": " ^ Syntax.string_of_term ctxt t)); + Token.unparse tok ^ ": " ^ Syntax.string_of_term ctxt t) #> + setup_trace_method @{binding print_type} + (Scan.lift (Scan.ahead Parse.not_eof) -- Args.typ) + (fn ctxt => fn (tok, t) => + (* FIXME proper formatting!? *) + Token.unparse tok ^ ": " ^ Syntax.string_of_typ ctxt t)); end \ +ML \ + fun try_map v seq = + (case try Seq.pull seq of + SOME (SOME (x, seq')) => Seq.make (fn () => SOME(x, try_map v seq')) + | SOME NONE => Seq.empty + | NONE => v); +\ + +method_setup catch = \ + Method_Closure.parse_method -- Method_Closure.parse_method >> + (fn (text, text') => fn ctxt => fn using => fn st => + let + val method = Method_Closure.method_evaluate text ctxt using; + val backup_results = Method_Closure.method_evaluate text' ctxt using st; + in + (case try method st of + SOME seq => try_map backup_results seq + | NONE => backup_results) + end) +\ + +ML \ + fun uncurry_rule thm = Conjunction.uncurry_balanced (Thm.nprems_of thm) thm; + fun curry_rule thm = + if Thm.no_prems thm then thm + else + let val conjs = Logic.dest_conjunctions (Thm.major_prem_of thm); + in Conjunction.curry_balanced (length conjs) thm end; +\ + +attribute_setup uncurry = \Scan.succeed (Thm.rule_attribute (K uncurry_rule))\ +attribute_setup curry = \Scan.succeed (Thm.rule_attribute (K curry_rule))\ + end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/Examples.thy --- a/src/HOL/Eisbach/Examples.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/Examples.thy Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: Examples.thy +(* Title: HOL/Eisbach/Examples.thy Author: Daniel Matichuk, NICTA/UNSW *) @@ -11,19 +11,19 @@ subsection \Basic methods\ -method my_intros = \rule conjI | rule impI\ +method my_intros = (rule conjI | rule impI) lemma "P \ Q \ Z \ X" apply my_intros+ oops -method my_intros' uses intros = \rule conjI | rule impI | rule intros\ +method my_intros' uses intros = (rule conjI | rule impI | rule intros) lemma "P \ Q \ Z \ X" apply (my_intros' intros: disjI1)+ oops -method my_spec for x :: 'a = \drule spec[where x="x"]\ +method my_spec for x :: 'a = (drule spec[where x="x"]) lemma "\x. P x \ P x" apply (my_spec x) @@ -34,11 +34,11 @@ subsection \Focusing and matching\ method match_test = - \match prems in U: "P x \ Q x" for P Q x \ + (match premises in U: "P x \ Q x" for P Q x \ \print_term P, print_term Q, print_term x, - print_fact U\\ + print_fact U\) lemma "\x. P x \ Q x \ A x \ B x \ R x y \ True" apply match_test -- \Valid match, but not quite what we were expecting..\ @@ -51,8 +51,6 @@ back back back - back - back oops text \Use matching to avoid "improper" methods\ @@ -60,18 +58,17 @@ lemma focus_test: shows "\x. \x. P x \ P x" apply (my_spec "x :: 'a", assumption)? -- \Wrong x\ - apply (match concl in "P x" for x \ \my_spec x, assumption\) + apply (match conclusion in "P x" for x \ \my_spec x, assumption\) done text \Matches are exclusive. Backtracking will not occur past a match\ method match_test' = - \match concl in + (match conclusion in "P \ Q" for P Q \ \print_term P, print_term Q, rule conjI[where P="P" and Q="Q"]; assumption\ - \ "H" for H \ \print_term H\ - \ + \ "H" for H \ \print_term H\) text \Solves goal\ lemma "P \ Q \ P \ Q" @@ -89,20 +86,20 @@ method my_spec_guess = - \match concl in "P (x :: 'a)" for P x \ + (match conclusion in "P (x :: 'a)" for P x \ \drule spec[where x=x], print_term P, - print_term x\\ + print_term x\) lemma "\x. P (x :: nat) \ Q (x :: nat)" apply my_spec_guess oops method my_spec_guess2 = - \match prems in U[thin]:"\x. P x \ Q x" and U':"P x" for P Q x \ + (match premises in U[thin]:"\x. P x \ Q x" and U':"P x" for P Q x \ \insert spec[where x=x, OF U], print_term P, - print_term Q\\ + print_term Q\) lemma "\x. P x \ Q x \ Q x \ Q x" apply my_spec_guess2? -- \Fails. Note that both "P"s must match\ @@ -118,7 +115,7 @@ subsection \Higher-order methods\ method higher_order_example for x methods meth = - \cases x, meth, meth\ + (cases x, meth, meth) lemma assumes A: "x = Some a" @@ -129,12 +126,12 @@ subsection \Recursion\ method recursion_example for x :: bool = - \print_term x, + (print_term x, match (x) in "A \ B" for A B \ - \(print_term A, + \print_term A, print_term B, recursion_example A, - recursion_example B) | -\\ + recursion_example B | -\) lemma "P" apply (recursion_example "(A \ D) \ (B \ C)") @@ -151,15 +148,13 @@ subsection \Demo\ -method solve methods m = \m;fail\ - named_theorems intros and elims and subst method prop_solver declares intros elims subst = - \(assumption | + (assumption | rule intros | erule elims | subst subst | subst (asm) subst | - (erule notE; solve \prop_solver\))+\ + (erule notE; solves \prop_solver\))+ lemmas [intros] = conjI @@ -177,11 +172,11 @@ done method guess_all = - \match prems in U[thin]:"\x. P (x :: 'a)" for P \ - \match prems in "?H (y :: 'a)" for y \ + (match premises in U[thin]:"\x. P (x :: 'a)" for P \ + \match premises in "?H (y :: 'a)" for y \ \rule allE[where P = P and x = y, OF U]\ - | match concl in "?H (y :: 'a)" for y \ - \rule allE[where P = P and x = y, OF U]\\\ + | match conclusion in "?H (y :: 'a)" for y \ + \rule allE[where P = P and x = y, OF U]\\) lemma "(\x. P x \ Q x) \ P y \ Q y" apply guess_all @@ -189,14 +184,14 @@ done lemma "(\x. P x \ Q x) \ P z \ P y \ Q y" - apply (solve \guess_all, prop_solver\) -- \Try it without solve\ + apply (solves \guess_all, prop_solver\) -- \Try it without solve\ done method guess_ex = - \match concl in + (match conclusion in "\x. P (x :: 'a)" for P \ - \match prems in "?H (x :: 'a)" for x \ - \rule exI[where x=x]\\\ + \match premises in "?H (x :: 'a)" for x \ + \rule exI[where x=x]\\) lemma "P x \ \x. P x" apply guess_ex @@ -204,7 +199,7 @@ done method fol_solver = - \(guess_ex | guess_all | prop_solver) ; solve \fol_solver\\ + ((guess_ex | guess_all | prop_solver) ; solves \fol_solver\) declare allI [intros] @@ -217,4 +212,36 @@ and "(\x. \y. R x y) \ (\y. \x. R x y)" by fol_solver+ + +text \ + Eisbach_Tools provides the catch method, which catches run-time method + errors. In this example the OF attribute throws an error when it can't + compose H with A, forcing H to be re-bound to different members of imps + until it succeeds. +\ + +lemma + assumes imps: "A \ B" "A \ C" "B \ D" + assumes A: "A" + shows "B \ C" + apply (rule conjI) + apply ((match imps in H:"_ \ _" \ \catch \rule H[OF A], print_fact H\ \print_fact H, fail\\)+) + done + +text \ + Eisbach_Tools provides the curry and uncurry attributes. This is useful + when the number of premises of a thm isn't known statically. The pattern + @{term "P \ Q"} matches P against the major premise of a thm, and Q is the + rest of the premises with the conclusion. If we first uncurry, then @{term + "P \ Q"} will match P with the conjunction of all the premises, and Q with + the final conclusion of the rule. +\ + +lemma + assumes imps: "A \ B \ C" "D \ C" "E \ D \ A" + shows "(A \ B \ C) \ (D \ C)" + by (match imps[uncurry] in H[curry]:"_ \ C" (cut, multi) \ + \match H in "E \ _" \ \fail\ + \ _ \ \simp add: H\\) + end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/Tests.thy --- a/src/HOL/Eisbach/Tests.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/Tests.thy Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: Tests.thy +(* Title: HOL/Eisbach/Tests.thy Author: Daniel Matichuk, NICTA/UNSW *) @@ -8,12 +8,12 @@ imports Main Eisbach_Tools begin -section \Named Theorems Tests\ + +subsection \Named Theorems Tests\ named_theorems foo -method foo declares foo = - \rule foo\ +method foo declares foo = (rule foo) lemma assumes A [foo]: A @@ -21,8 +21,10 @@ apply foo done +method abs_used for P = (match (P) in "\a. ?Q" \ \fail\ \ _ \ \-\) -section \Match Tests\ + +subsection \Match Tests\ notepad begin @@ -30,12 +32,12 @@ fix A y have "(\x. A x) \ A y" - apply (rule dup, match prems in Y: "\B. P B" for P \ \match (P) in A \ \print_fact Y, rule Y\\) - apply (rule dup, match prems in Y: "\B :: 'a. P B" for P \ \match (P) in A \ \print_fact Y, rule Y\\) - apply (rule dup, match prems in Y: "\B :: 'a. P B" for P \ \match concl in "P y" for y \ \print_fact Y, print_term y, rule Y[where B=y]\\) - apply (rule dup, match prems in Y: "\B :: 'a. P B" for P \ \match concl in "P z" for z \ \print_fact Y, print_term y, rule Y[where B=z]\\) - apply (rule dup, match concl in "P y" for P \ \match prems in Y: "\z. P z" \ \print_fact Y, rule Y[where z=y]\\) - apply (match prems in Y: "\z :: 'a. P z" for P \ \match concl in "P y" \ \print_fact Y, rule Y[where z=y]\\) + apply (rule dup, match premises in Y: "\B. P B" for P \ \match (P) in A \ \print_fact Y, rule Y\\) + apply (rule dup, match premises in Y: "\B :: 'a. P B" for P \ \match (P) in A \ \print_fact Y, rule Y\\) + apply (rule dup, match premises in Y: "\B :: 'a. P B" for P \ \match conclusion in "P y" for y \ \print_fact Y, print_term y, rule Y[where B=y]\\) + apply (rule dup, match premises in Y: "\B :: 'a. P B" for P \ \match conclusion in "P z" for z \ \print_fact Y, print_term y, rule Y[where B=z]\\) + apply (rule dup, match conclusion in "P y" for P \ \match premises in Y: "\z. P z" \ \print_fact Y, rule Y[where z=y]\\) + apply (match premises in Y: "\z :: 'a. P z" for P \ \match conclusion in "P y" \ \print_fact Y, rule Y[where z=y]\\) done assume X: "\x. A x" "A y" @@ -44,37 +46,49 @@ apply (match X in Y:"B ?x" and Y':"B ?x" for B \ \print_fact Y, print_term B\) apply (match X in Y:"B x" and Y':"B x" for B x \ \print_fact Y, print_term B, print_term x\) apply (insert X) - apply (match prems in Y:"\B. A B" and Y':"B y" for B and y :: 'a \ \print_fact Y[where B=y], print_term B\) - apply (match prems in Y:"B ?x" and Y':"B ?x" for B \ \print_fact Y, print_term B\) - apply (match prems in Y:"B x" and Y':"B x" for B x \ \print_fact Y, print_term B\) - apply (match concl in "P x" and "P y" for P x \ \print_term P, print_term x\) + apply (match premises in Y:"\B. A B" and Y':"B y" for B and y :: 'a \ \print_fact Y[where B=y], print_term B\) + apply (match premises in Y:"B ?x" and Y':"B ?x" for B \ \print_fact Y, print_term B\) + apply (match premises in Y:"B x" and Y':"B x" for B x \ \print_fact Y, print_term B\) + apply (match conclusion in "P x" and "P y" for P x \ \print_term P, print_term x\) apply assumption done + { + fix B x y + assume X: "\x y. B x x y" + have "B x x y" + by (match X in Y:"\y. B y y z" for z \ \rule Y[where y=x]\) + + fix A B + have "(\x y. A (B x) y) \ A (B x) y" + by (match premises in Y: "\xx. ?H (B xx)" \ \rule Y\) + } + (* match focusing retains prems *) fix B x have "(\x. A x) \ (\z. B z) \ A y \ B x" - apply (match prems in Y: "\z :: 'a. A z" \ \match prems in Y': "\z :: 'b. B z" \ \print_fact Y, print_fact Y', rule Y'[where z=x]\\) + apply (match premises in Y: "\z :: 'a. A z" \ \match premises in Y': "\z :: 'b. B z" \ \print_fact Y, print_fact Y', rule Y'[where z=x]\\) done (*Attributes *) fix C have "(\x :: 'a. A x) \ (\z. B z) \ A y \ B x \ B x \ A y" apply (intro conjI) - apply (match prems in Y: "\z :: 'a. A z" and Y'[intro]:"\z :: 'b. B z" \ \fastforce\) - apply (match prems in Y: "\z :: 'a. A z" \ \match prems in Y'[intro]:"\z :: 'b. B z" \ \fastforce\\) - apply (match prems in Y[thin]: "\z :: 'a. A z" \ \(match prems in Y':"\z :: 'a. A z" \ \fail\ \ Y': "?H" \ \-\)\) + apply (match premises in Y: "\z :: 'a. A z" and Y'[intro]:"\z :: 'b. B z" \ \fastforce\) + apply (match premises in Y: "\z :: 'a. A z" \ \match premises in Y'[intro]:"\z :: 'b. B z" \ \fastforce\\) + apply (match premises in Y[thin]: "\z :: 'a. A z" \ \(match premises in Y':"\z :: 'a. A z" \ \print_fact Y,fail\ \ _ \ \print_fact Y\)\) + (*apply (match premises in Y: "\z :: 'b. B z" \ \(match premises in Y'[thin]:"\z :: 'b. B z" \ \(match premises in Y':"\z :: 'a. A z" \ \fail\ \ Y': _ \ \-\)\)\)*) apply assumption done fix A B C D have "\uu'' uu''' uu uu'. (\x :: 'a. A uu' x) \ D uu y \ (\z. B uu z) \ C uu y \ (\z y. C uu z) \ B uu x \ B uu x \ C uu y" - apply (match prems in Y[thin]: "\z :: 'a. A ?zz' z" and + apply (match premises in Y[thin]: "\z :: 'a. A ?zz' z" and Y'[thin]: "\rr :: 'b. B ?zz rr" \ \print_fact Y, print_fact Y', intro conjI, rule Y', insert Y', insert Y'[where rr=x]\) - apply (match prems in Y:"B ?u ?x" \ \rule Y\) + apply (match premises in Y:"B ?u ?x" \ \rule Y\) apply (insert TrueI) - apply (match prems in Y'[thin]: "\ff. B uu ff" for uu \ \insert Y', drule meta_spec[where x=x]\) + apply (match premises in Y'[thin]: "\ff. B uu ff" for uu \ \insert Y', drule meta_spec[where x=x]\) apply assumption done @@ -82,33 +96,58 @@ (* Multi-matches. As many facts as match are bound. *) fix A B C x have "(\x :: 'a. A x) \ (\y :: 'a. B y) \ C y \ (A x \ B y \ C y)" - apply (match prems in Y[thin]: "\z :: 'a. ?A z" (multi) \ \intro conjI, (rule Y)+\) - apply (match prems in Y[thin]: "\z :: 'a. ?A z" (multi) \ \fail\ \ "C y" \ \-\) (* multi-match must bind something *) - apply (match prems in Y: "C y" \ \rule Y\) + apply (match premises in Y[thin]: "\z :: 'a. ?A z" (multi) \ \intro conjI, (rule Y)+\) + apply (match premises in Y[thin]: "\z :: 'a. ?A z" (multi) \ \fail\ \ "C y" \ \-\) (* multi-match must bind something *) + apply (match premises in Y: "C y" \ \rule Y\) done fix A B C x have "(\x :: 'a. A x) \ (\y :: 'a. B y) \ C y \ (A x \ B y \ C y)" - apply (match prems in Y[thin]: "\z. ?A z" (multi) \ \intro conjI, (rule Y)+\) - apply (match prems in Y[thin]: "\z. ?A z" (multi) \ \fail\ \ "C y" \ \-\) (* multi-match must bind something *) - apply (match prems in Y: "C y" \ \rule Y\) + apply (match premises in Y[thin]: "\z. ?A z" (multi) \ \intro conjI, (rule Y)+\) + apply (match premises in Y[thin]: "\z. ?A z" (multi) \ \fail\ \ "C y" \ \-\) (* multi-match must bind something *) + apply (match premises in Y: "C y" \ \rule Y\) done + fix A B C P Q and x :: 'a and y :: 'a + have "(\x y :: 'a. A x y \ Q) \ (\a b. B (a :: 'a) (b :: 'a) \ Q) \ (\x y. C (x :: 'a) (y :: 'a) \ P) \ A y x \ B y x" + by (match premises in Y: "\z a. ?A (z :: 'a) (a :: 'a) \ R" (multi) for R \ \rule conjI, rule Y[where z=x,THEN conjunct1], rule Y[THEN conjunct1]\) + (*We may use for-fixes in multi-matches too. All bound facts must agree on the fixed term *) fix A B C x have "(\y :: 'a. B y \ C y) \ (\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ C y \ (A x \ B y \ C y)" - apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \intro conjI Y[THEN conjunct1]\) - apply (match prems in Y: "\z :: 'a. ?A z \ False" (multi) \ \print_fact Y, fail\ \ "C y" \ \print_term C\) (* multi-match must bind something *) - apply (match prems in Y: "\x. B x \ C x" \ \intro conjI Y[THEN conjunct1]\) - apply (match prems in Y: "C ?x" \ \rule Y\) + apply (match premises in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ + \match (P) in B \ \fail\ + \ "\a. B" \ \fail\ + \ _ \ \-\, + intro conjI, (rule Y[THEN conjunct1])\) + apply (rule dup) + apply (match premises in Y':"\x :: 'a. ?U x \ Q x" and Y: "\x :: 'a. Q x \ ?U x" (multi) for Q \ \insert Y[THEN conjunct1]\) + apply assumption (* Previous match requires that Q is consistent *) + apply (match premises in Y: "\z :: 'a. ?A z \ False" (multi) \ \print_fact Y, fail\ \ "C y" \ \print_term C\) (* multi-match must bind something *) + apply (match premises in Y: "\x. B x \ C x" \ \intro conjI Y[THEN conjunct1]\) + apply (match premises in Y: "C ?x" \ \rule Y\) + done + + (* All bindings must be tried for a particular theorem. + However all combinations are NOT explored. *) + fix B A C + assume asms:"\a b. B (a :: 'a) (b :: 'a) \ Q" "\x :: 'a. A x x \ Q" "\a b. C (a :: 'a) (b :: 'a) \ Q" + have "B y x \ C x y \ B x y \ C y x \ A x x" + apply (intro conjI) + apply (match asms in Y: "\z a. ?A (z :: 'a) (a :: 'a) \ R" (multi) for R \ \rule Y[where z=x,THEN conjunct1]\) + apply (match asms in Y: "\z a. ?A (z :: 'a) (a :: 'a) \ R" (multi) for R \ \rule Y[where a=x,THEN conjunct1]\) + apply (match asms in Y: "\z a. ?A (z :: 'a) (a :: 'a) \ R" (multi) for R \ \rule Y[where a=x,THEN conjunct1]\) + apply (match asms in Y: "\z a. ?A (z :: 'a) (a :: 'a) \ R" (multi) for R \ \rule Y[where z=x,THEN conjunct1]\) + apply (match asms in Y: "\z a. A (z :: 'a) (a :: 'a) \ R" for R \ \fail\ \ _ \ \-\) + apply (rule asms[THEN conjunct1]) done (* Attributes *) fix A B C x have "(\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ (\y :: 'a. B y \ C y) \ C y \ (A x \ B y \ C y)" - apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match Y[THEN conjunct1] in Y':"?H" (multi) \ \intro conjI,rule Y'\\) - apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match Y[THEN conjunct2] in Y':"?H" (multi) \ \rule Y'\\) + apply (match premises in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match Y[THEN conjunct1] in Y':"?H" (multi) \ \intro conjI,rule Y'\\) + apply (match premises in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \match Y[THEN conjunct2] in Y':"?H" (multi) \ \rule Y'\\) apply assumption done @@ -123,28 +162,70 @@ (* Testing THEN_ALL_NEW within match *) fix A B C x have "(\x :: 'a. A x \ B x) \ (\y :: 'a. A y \ C y) \ (\y :: 'a. B y \ C y) \ C y \ (A x \ B y \ C y)" - apply (match prems in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \intro conjI ; ((rule Y[THEN conjunct1])?); rule Y[THEN conjunct2] \) + apply (match premises in Y: "\x :: 'a. P x \ ?U x" (multi) for P \ \intro conjI ; ((rule Y[THEN conjunct1])?); rule Y[THEN conjunct2] \) done (* Cut tests *) fix A B C have "D \ C \ A \ B \ A \ C \ D \ True \ C" - by (((match prems in I: "P \ Q" (cut) + by (((match premises in I: "P \ Q" (cut) and I': "P \ ?U" for P Q \ \rule mp [OF I' I[THEN conjunct1]]\)?), simp) + have "D \ C \ A \ B \ A \ C \ D \ True \ C" + by (match premises in I: "P \ Q" (cut 2) + and I': "P \ ?U" for P Q \ \rule mp [OF I' I[THEN conjunct1]]\) + have "A \ B \ A \ C \ C" - by (((match prems in I: "P \ Q" (cut) + by (((match premises in I: "P \ Q" (cut) and I': "P \ ?U" for P Q \ \rule mp [OF I' I[THEN conjunct1]]\)?, simp) | simp) + fix f x y + have "f x y \ f x y" + by (match conclusion in "f x y" for f x y \ \print_term f\) + + fix A B C + assume X: "A \ B" "A \ C" C + have "A \ B \ C" + by (match X in H: "A \ ?H" (multi, cut) \ + \match H in "A \ C" and "A \ B" \ \fail\\ + | simp add: X) + + + (* Thinning an inner focus *) + (* Thinning should persist within a match, even when on an external premise *) + + fix A + have "(\x. A x \ B) \ B \ C \ C" + apply (match premises in H:"\x. A x \ B" \ + \match premises in H'[thin]: "\x. A x \ B" \ + \match premises in H'':"\x. A x \ B" \ \fail\ + \ _ \ \-\\ + ,match premises in H'':"\x. A x \ B" \ \fail\ \ _ \ \-\\) + apply (match premises in H:"\x. A x \ B" \ \fail\ + \ H':_ \ \rule H'[THEN conjunct2]\) + done + + + (* Local premises *) + (* Only match premises which actually existed in the goal we just focused.*) + + fix A + assume asms: "C \ D" + have "B \ C \ C" + by (match premises in _ \ \insert asms, + match premises (local) in "B \ C" \ \fail\ + \ H:"C \ D" \ \rule H[THEN conjunct1]\\) end + + (* Testing inner focusing. This fails if we don't smash flex-flex pairs produced by retrofitting. This needs to be done more carefully to avoid smashing legitimate pairs.*) schematic_lemma "?A x \ A x" - apply (match concl in "H" for H \ \match concl in Y for Y \ \print_term Y\\) + apply (match conclusion in "H" for H \ \match conclusion in Y for Y \ \print_term Y\\) apply assumption done @@ -169,9 +250,10 @@ fun test_internal_fact ctxt factnm = (case try (Proof_Context.get_thms ctxt) factnm of NONE => () - | SOME _ => error "Found internal fact")\ + | SOME _ => error "Found internal fact"); +\ -method uses_test\<^sub>1 uses uses_test\<^sub>1_uses = \rule uses_test\<^sub>1_uses\ +method uses_test\<^sub>1 uses uses_test\<^sub>1_uses = (rule uses_test\<^sub>1_uses) lemma assumes A shows A by (uses_test\<^sub>1 uses_test\<^sub>1_uses: assms) @@ -181,12 +263,12 @@ ML \test_internal_fact @{context} "Tests.uses_test\<^sub>1.uses_test\<^sub>1_uses"\ -(* Testing term and fact passing in recursion *) +subsection \Testing term and fact passing in recursion\ method recursion_example for x :: bool uses facts = - \match (x) in + (match (x) in "A \ B" for A B \ \(recursion_example A facts: facts, recursion_example B facts: facts)\ - \ "?H" \ \match facts in U: "x" \ \insert U\\\ + \ "?H" \ \match facts in U: "x" \ \insert U\\) lemma assumes asms: "A" "B" "C" "D" @@ -195,12 +277,29 @@ apply simp done +(* uses facts are not accumulated *) + +method recursion_example' for A :: bool and B :: bool uses facts = + (match facts in + H: "A" and H': "B" \ \recursion_example' "A" "B" facts: H TrueI\ + \ "A" and "True" \ \recursion_example' "A" "B" facts: TrueI\ + \ "True" \ \-\ + \ "PROP ?P" \ \fail\) + +lemma + assumes asms: "A" "B" + shows "True" + apply (recursion_example' "A" "B" facts: asms) + apply simp + done + + (*Method.sections in existing method*) -method my_simp\<^sub>1 uses my_simp\<^sub>1_facts = \simp add: my_simp\<^sub>1_facts\ +method my_simp\<^sub>1 uses my_simp\<^sub>1_facts = (simp add: my_simp\<^sub>1_facts) lemma assumes A shows A by (my_simp\<^sub>1 my_simp\<^sub>1_facts: assms) (*Method.sections via Eisbach argument parser*) -method uses_test\<^sub>2 uses uses_test\<^sub>2_uses = \uses_test\<^sub>1 uses_test\<^sub>1_uses: uses_test\<^sub>2_uses\ +method uses_test\<^sub>2 uses uses_test\<^sub>2_uses = (uses_test\<^sub>1 uses_test\<^sub>1_uses: uses_test\<^sub>2_uses) lemma assumes A shows A by (uses_test\<^sub>2 uses_test\<^sub>2_uses: assms) @@ -208,7 +307,7 @@ named_theorems declare_facts\<^sub>1 -method declares_test\<^sub>1 declares declare_facts\<^sub>1 = \rule declare_facts\<^sub>1\ +method declares_test\<^sub>1 declares declare_facts\<^sub>1 = (rule declare_facts\<^sub>1) lemma assumes A shows A by (declares_test\<^sub>1 declare_facts\<^sub>1: assms) @@ -218,29 +317,90 @@ subsection \Rule Instantiation Tests\ method my_allE\<^sub>1 for x :: 'a and P :: "'a \ bool" = - \erule allE [where x = x and P = P]\ + (erule allE [where x = x and P = P]) lemma "\x. Q x \ Q x" by (my_allE\<^sub>1 x Q) method my_allE\<^sub>2 for x :: 'a and P :: "'a \ bool" = - \erule allE [of P x]\ + (erule allE [of P x]) lemma "\x. Q x \ Q x" by (my_allE\<^sub>2 x Q) method my_allE\<^sub>3 for x :: 'a and P :: "'a \ bool" = - \match allE [where 'a = 'a] in X: "\(x :: 'a) P R. \x. P x \ (P x \ R) \ R" \ - \erule X [where x = x and P = P]\\ + (match allE [where 'a = 'a] in X: "\(x :: 'a) P R. \x. P x \ (P x \ R) \ R" \ + \erule X [where x = x and P = P]\) lemma "\x. Q x \ Q x" by (my_allE\<^sub>3 x Q) method my_allE\<^sub>4 for x :: 'a and P :: "'a \ bool" = - \match allE [where 'a = 'a] in X: "\(x :: 'a) P R. \x. P x \ (P x \ R) \ R" \ - \erule X [of x P]\\ + (match allE [where 'a = 'a] in X: "\(x :: 'a) P R. \x. P x \ (P x \ R) \ R" \ + \erule X [of x P]\) lemma "\x. Q x \ Q x" by (my_allE\<^sub>4 x Q) -ML {* + +subsection \Polymorphism test\ + +axiomatization foo' :: "'a \ 'b \ 'c \ bool" +axiomatization where foo'_ax1: "foo' x y z \ z \ y" +axiomatization where foo'_ax2: "foo' x y y \ x \ z" +axiomatization where foo'_ax3: "foo' (x :: int) y y \ y \ y" + +lemmas my_thms = foo'_ax1 foo'_ax2 foo'_ax3 + +definition first_id where "first_id x = x" + +lemmas my_thms' = my_thms[of "first_id x" for x] + +method print_conclusion = (match conclusion in concl for concl \ \print_term concl\) + +lemma + assumes foo: "\x (y :: bool). foo' (A x) B (A x)" + shows "\z. A z \ B" + apply + (match conclusion in "f x y" for f y and x :: "'d :: type" \ \ + match my_thms' in R:"\(x :: 'f :: type). ?P (first_id x) \ ?R" + and R':"\(x :: 'f :: type). ?P' (first_id x) \ ?R'" \ \ + match (x) in "q :: 'f" for q \ \ + rule R[of q,simplified first_id_def], + print_conclusion, + rule foo + \\\) + done + + +subsection \Unchecked rule instantiation, with the possibility of runtime errors\ + +named_theorems my_thms_named + +declare foo'_ax3[my_thms_named] + +method foo_method3 declares my_thms_named = + (match my_thms_named[of (unchecked) z for z] in R:"PROP ?H" \ \rule R\) + +notepad +begin + + (*FIXME: Shouldn't need unchecked keyword here. See Tests_Failing.thy *) + fix A B x + have "foo' x B A \ A \ B" + by (match my_thms[of (unchecked) z for z] in R:"PROP ?H" \ \rule R\) + + fix A B x + note foo'_ax1[my_thms_named] + have "foo' x B A \ A \ B" + by (match my_thms_named[where x=z for z] in R:"PROP ?H" \ \rule R\) + + fix A B x + note foo'_ax1[my_thms_named] foo'_ax2[my_thms_named] foo'_ax3[my_thms_named] + have "foo' x B A \ A \ B" + by foo_method3 + +end + + +ML \ structure Data = Generic_Data ( type T = thm list; @@ -248,13 +408,13 @@ val extend = I; fun merge data : T = Thm.merge_thms data; ); -*} +\ local_setup \Local_Theory.add_thms_dynamic (@{binding test_dyn}, Data.get)\ setup \Context.theory_map (Data.put @{thms TrueI})\ -method dynamic_thms_test = \rule test_dyn\ +method dynamic_thms_test = (rule test_dyn) locale foo = fixes A @@ -269,4 +429,69 @@ end + +notepad +begin + fix A x + assume X: "\x. A x" + have "A x" + by (match X in H[of x]:"\x. A x" \ \print_fact H,match H in "A x" \ \rule H\\) + + fix A x B + assume X: "\x :: bool. A x \ B" "\x. A x" + assume Y: "A B" + have "B \ B \ B \ B \ B \ B" + apply (intro conjI) + apply (match X in H[OF X(2)]:"\x. A x \ B" \ \print_fact H,rule H\) + apply (match X in H':"\x. A x" and H[OF H']:"\x. A x \ B" \ \print_fact H',print_fact H,rule H\) + apply (match X in H[of Q]:"\x. A x \ ?R" and "?P \ Q" for Q \ \print_fact H,rule H, rule Y\) + apply (match X in H[of Q,OF Y]:"\x. A x \ ?R" and "?P \ Q" for Q \ \print_fact H,rule H\) + apply (match X in H[OF Y,intro]:"\x. A x \ ?R" \ \print_fact H,fastforce\) + apply (match X in H[intro]:"\x. A x \ ?R" \ \rule H[where x=B], rule Y\) + done + + fix x :: "prop" and A + assume X: "TERM x" + assume Y: "\x :: prop. A x" + have "A TERM x" + apply (match X in "PROP y" for y \ \rule Y[where x="PROP y"]\) + done end + +subsection \Proper context for method parameters\ + +method add_simp methods m uses f = (match f in H[simp]:_ \ \m\) + +method add_my_thms methods m uses f = (match f in H[my_thms_named]:_ \ \m\) + +method rule_my_thms = (rule my_thms_named) +method rule_my_thms' declares my_thms_named = (rule my_thms_named) + +lemma + assumes A: A and B: B + shows + "(A \ B) \ A \ A \ A" + apply (intro conjI) + apply (add_simp \add_simp \simp\ f: B\ f: A) + apply (add_my_thms \rule_my_thms\ f:A) + apply (add_my_thms \rule_my_thms'\ f:A) + apply (add_my_thms \rule my_thms_named\ f:A) + done + +subsection \Shallow parser tests\ + +method all_args for A B methods m1 m2 uses f1 f2 declares my_thms_named = (fail) + +lemma True + by (all_args True False \-\ \fail\ f1: TrueI f2: TrueI my_thms_named: TrueI | rule TrueI) + +subsection \Method name internalization test\ + + +method test2 = (simp) + +method simp = fail + +lemma "A \ A" by test2 + +end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/eisbach_antiquotations.ML --- a/src/HOL/Eisbach/eisbach_antiquotations.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/eisbach_antiquotations.ML Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: eisbach_antiquotations.ML +(* Title: HOL/Eisbach/eisbach_antiquotations.ML Author: Daniel Matichuk, NICTA/UNSW ML antiquotations for Eisbach. diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/eisbach_rule_insts.ML --- a/src/HOL/Eisbach/eisbach_rule_insts.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/eisbach_rule_insts.ML Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: eisbach_rule_insts.ML +(* Title: HOL/Eisbach/eisbach_rule_insts.ML Author: Daniel Matichuk, NICTA/UNSW Eisbach-aware variants of the "where" and "of" attributes. @@ -72,38 +72,59 @@ |> restore_tags thm end; +(* FIXME unused *) +fun read_instantiate_no_thm ctxt insts fixes = + let + val (type_insts, term_insts) = + List.partition (fn (((x, _) : indexname), _) => String.isPrefix "'" x) insts; + + val ctxt1 = + ctxt + |> Context_Position.not_really + |> Proof_Context.read_vars fixes |-> Proof_Context.add_fixes |> #2; + + val typs = + map snd type_insts + |> Syntax.read_typs ctxt1 + |> Syntax.check_typs ctxt1; + + val typ_insts' = map2 (fn (xi, _) => fn T => (xi,T)) type_insts typs; + + val terms = + map snd term_insts + |> Syntax.read_terms ctxt1 + |> Syntax.check_terms ctxt1; + + val term_insts' = map2 (fn (xi, _) => fn t => (xi, t)) term_insts terms; + + in (typ_insts',term_insts') end; + datatype rule_inst = - Named_Insts of ((indexname * string) * (term -> unit)) list -| Term_Insts of (indexname * term) list; + Named_Insts of ((indexname * string) * (term -> unit)) list * (binding * string option * mixfix) list +(*| Unchecked_Of_Insts of (string option list * string option list) * (binding * string option * mixfix) list*) +| Term_Insts of (indexname * term) list +| Unchecked_Term_Insts of term option list * term option list; + +fun mk_pair (t, t') = Logic.mk_conjunction (Logic.mk_term t, Logic.mk_term t'); -fun embed_indexname ((xi,s),f) = - let - fun wrap_xi xi t = Logic.mk_conjunction (Logic.mk_term (Var (xi,fastype_of t)),Logic.mk_term t); - in ((xi,s),f o wrap_xi xi) end; +fun dest_pair t = apply2 Logic.dest_term (Logic.dest_conjunction t); -fun unembed_indexname t = +fun embed_indexname ((xi, s), f) = + let fun wrap_xi xi t = mk_pair (Var (xi, fastype_of t), t); + in ((xi, s), f o wrap_xi xi) end; + +fun unembed_indexname t = dest_pair t |> apfst (Term.dest_Var #> fst); + +fun read_where_insts (insts, fixes) = let - val (t, t') = apply2 Logic.dest_term (Logic.dest_conjunction t); - val (xi, _) = Term.dest_Var t; - in (xi, t') end; - -fun read_where_insts toks = - let - val parser = - Parse.!!! - (Parse.and_list1 (Args.var -- (Args.$$$ "=" |-- Parse_Tools.name_term)) -- Parse.for_fixes) - --| Scan.ahead Parse.eof; - val (insts, fixes) = the (Scan.read Token.stopper parser toks); - val insts' = if forall (fn (_, v) => Parse_Tools.is_real_val v) insts - then Term_Insts (map (fn (_,t) => unembed_indexname (Parse_Tools.the_real_val t)) insts) - else Named_Insts (map (fn (xi, p) => embed_indexname - ((xi,Parse_Tools.the_parse_val p),Parse_Tools.the_parse_fun p)) insts); - in - (insts', fixes) - end; + then Term_Insts (map (unembed_indexname o Parse_Tools.the_real_val o snd) insts) + else + Named_Insts (map (fn (xi, p) => embed_indexname + ((xi, Parse_Tools.the_parse_val p), Parse_Tools.the_parse_fun p)) insts, fixes); + in insts' end; fun of_rule thm (args, concl_args) = let @@ -119,31 +140,55 @@ val inst = Args.maybe Parse_Tools.name_term; val concl = Args.$$$ "concl" -- Args.colon; -fun read_of_insts toks thm = +fun close_unchecked_insts context ((insts,concl_inst), fixes) = let - val parser = - Parse.!!! - ((Scan.repeat (Scan.unless concl inst) -- Scan.optional (concl |-- Scan.repeat inst) []) - -- Parse.for_fixes) --| Scan.ahead Parse.eof; - val ((insts, concl_insts), fixes) = - the (Scan.read Token.stopper parser toks); + val ctxt = Context.proof_of context; + val ctxt1 = ctxt + |> Proof_Context.read_vars fixes |-> Proof_Context.add_fixes |> #2; + + val insts' = insts @ concl_inst; + + val term_insts = + map (the_list o (Option.map Parse_Tools.the_parse_val)) insts' + |> burrow (Syntax.read_terms ctxt1 + #> Syntax.check_terms ctxt1 + #> Variable.export_terms ctxt1 ctxt) + |> map (try the_single); + + val _ = + (insts', term_insts) + |> ListPair.app (fn (SOME p, SOME t) => Parse_Tools.the_parse_fun p t | _ => ()); + val (insts'',concl_insts'') = chop (length insts) term_insts; + in Unchecked_Term_Insts (insts'', concl_insts'') end; - val insts' = - if forall (fn SOME t => Parse_Tools.is_real_val t | NONE => true) (insts @ concl_insts) - then - Term_Insts - (map_filter (Option.map (Parse_Tools.the_real_val #> unembed_indexname)) (insts @ concl_insts)) - - else +fun read_of_insts checked context ((insts, concl_insts), fixes) = + if forall (fn SOME t => Parse_Tools.is_real_val t | NONE => true) (insts @ concl_insts) + then + if checked + then + (fn _ => + Term_Insts + (map (unembed_indexname o Parse_Tools.the_real_val) (map_filter I (insts @ concl_insts)))) + else + (fn _ => + Unchecked_Term_Insts + (map (Option.map Parse_Tools.the_real_val) insts, + map (Option.map Parse_Tools.the_real_val) concl_insts)) + else + if checked + then + (fn thm => Named_Insts - (apply2 (map (Option.map (fn p => (Parse_Tools.the_parse_val p,Parse_Tools.the_parse_fun p)))) + (apply2 + (map (Option.map (fn p => (Parse_Tools.the_parse_val p, Parse_Tools.the_parse_fun p)))) (insts, concl_insts) - |> of_rule thm |> map ((fn (xi, (nm, tok)) => embed_indexname ((xi, nm), tok)))); - in - (insts', fixes) - end; + |> of_rule thm |> map ((fn (xi, (nm, f)) => embed_indexname ((xi, nm), f))), fixes)) + else + let val result = close_unchecked_insts context ((insts, concl_insts), fixes); + in fn _ => result end; -fun read_instantiate_closed ctxt ((Named_Insts insts), fixes) thm = + +fun read_instantiate_closed ctxt (Named_Insts (insts, fixes)) thm = let val insts' = map (fn ((v, t), _) => ((v, Position.none), t)) insts; @@ -168,22 +213,42 @@ in (thm'' |> restore_tags thm) end - | read_instantiate_closed _ ((Term_Insts insts), _) thm = instantiate_xis insts thm; - -val parse_all : Token.T list context_parser = Scan.lift (Scan.many Token.not_eof); + | read_instantiate_closed ctxt (Unchecked_Term_Insts insts) thm = + let + val (xis, ts) = ListPair.unzip (of_rule thm insts); + val ctxt' = Variable.declare_maxidx (Thm.maxidx_of thm) ctxt; + val (ts', ctxt'') = Variable.import_terms false ts ctxt'; + val ts'' = Variable.export_terms ctxt'' ctxt ts'; + val insts' = ListPair.zip (xis, ts''); + in instantiate_xis insts' thm end + | read_instantiate_closed _ (Term_Insts insts) thm = instantiate_xis insts thm; val _ = Theory.setup - (Attrib.setup @{binding "where"} (parse_all >> - (fn toks => Thm.rule_attribute (fn context => - read_instantiate_closed (Context.proof_of context) (read_where_insts toks)))) + (Attrib.setup @{binding "where"} + (Scan.lift + (Parse.and_list1 (Args.var -- (Args.$$$ "=" |-- Parse_Tools.name_term)) -- Parse.for_fixes) + >> (fn args => let val args' = read_where_insts args in Thm.rule_attribute (fn context => + read_instantiate_closed (Context.proof_of context) args') end)) "named instantiation of theorem"); val _ = Theory.setup - (Attrib.setup @{binding "of"} (parse_all >> - (fn toks => Thm.rule_attribute (fn context => fn thm => - read_instantiate_closed (Context.proof_of context) (read_of_insts toks thm) thm))) + (Attrib.setup @{binding "of"} + (Scan.lift + (Args.mode "unchecked" -- + (Scan.repeat (Scan.unless concl inst) -- + Scan.optional (concl |-- Scan.repeat inst) [] -- + Parse.for_fixes)) -- Scan.state >> + (fn ((unchecked, args), context) => + let + val read_insts = read_of_insts (not unchecked) context args; + in + Thm.rule_attribute (fn context => fn thm => + if Method_Closure.is_free_thm thm andalso unchecked + then Method_Closure.dummy_free_thm + else read_instantiate_closed (Context.proof_of context) (read_insts thm) thm) + end)) "positional instantiation of theorem"); end; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/match_method.ML --- a/src/HOL/Eisbach/match_method.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/match_method.ML Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: match_method.ML +(* Title: HOL/Eisbach/match_method.ML Author: Daniel Matichuk, NICTA/UNSW Setup for "match" proof method. It provides basic fact/term matching in @@ -40,86 +40,79 @@ Match_Term of term Item_Net.T | Match_Fact of thm Item_Net.T | Match_Concl - | Match_Prems; + | Match_Prems of bool; val aconv_net = Item_Net.init (op aconv) single; val parse_match_kind = - Scan.lift @{keyword "concl"} >> K Match_Concl || - Scan.lift @{keyword "prems"} >> K Match_Prems || + Scan.lift @{keyword "conclusion"} >> K Match_Concl || + Scan.lift (@{keyword "premises"} |-- Args.mode "local") >> Match_Prems || Scan.lift (@{keyword "("}) |-- Args.term --| Scan.lift (@{keyword ")"}) >> (fn t => Match_Term (Item_Net.update t aconv_net)) || Attrib.thms >> (fn thms => Match_Fact (fold Item_Net.update thms Thm.full_rules)); -fun nameable_match m = (case m of Match_Fact _ => true | Match_Prems => true | _ => false); +fun nameable_match m = (case m of Match_Fact _ => true | Match_Prems _ => true | _ => false); fun prop_match m = (case m of Match_Term _ => false | _ => true); val bound_term : (term, binding) Parse_Tools.parse_val parser = Parse_Tools.parse_term_val Parse.binding; val fixes = - Parse.and_list1 (Scan.repeat1 bound_term -- - Scan.option (@{keyword "::"} |-- Parse.!!! Parse.typ) >> (fn (xs, T) => map (rpair T) xs)) - >> flat; + Parse.and_list1 (Scan.repeat1 (Parse.position bound_term) -- + Scan.option (@{keyword "::"} |-- Parse.!!! Parse.typ) + >> (fn (xs, T) => map (fn (x, pos) => ((x, T), pos)) xs)) >> flat; val for_fixes = Scan.optional (@{keyword "for"} |-- fixes) []; -fun pos_of dyn = - (case dyn of - Parse_Tools.Parse_Val (b, _) => Binding.pos_of b - | _ => raise Fail "Not a parse value"); - +fun pos_of dyn = Parse_Tools.the_parse_val dyn |> Binding.pos_of; (*FIXME: Dynamic facts modify the background theory, so we have to resort to token replacement for matched facts. *) fun dynamic_fact ctxt = bound_term -- Args.opt_attribs (Attrib.check_name ctxt); -type match_args = {unify : bool, multi : bool, cut : bool}; +type match_args = {multi : bool, cut : int}; val parse_match_args = Scan.optional (Args.parens (Parse.enum1 "," - (Args.$$$ "unify" || Args.$$$ "multi" || Args.$$$ "cut"))) [] >> + (Args.$$$ "multi" -- Scan.succeed ~1 || Args.$$$ "cut" -- Scan.optional Parse.nat 1))) [] >> (fn ss => - fold (fn s => fn {unify, multi, cut} => + fold (fn s => fn {multi, cut} => (case s of - "unify" => {unify = true, multi = multi, cut = cut} - | "multi" => {unify = unify, multi = true, cut = cut} - | "cut" => {unify = unify, multi = multi, cut = true})) - ss {unify = false, multi = false, cut = false}); + ("multi", _) => {multi = true, cut = cut} + | ("cut", n) => {multi = multi, cut = n})) + ss {multi = false, cut = ~1}); -(*TODO: Shape holes in thms *) fun parse_named_pats match_kind = Args.context :|-- (fn ctxt => - Scan.lift (Parse.and_list1 (Scan.option (dynamic_fact ctxt --| Args.colon) :-- - (fn opt_dyn => - if is_none opt_dyn orelse nameable_match match_kind - then Parse_Tools.name_term -- parse_match_args - else - let val b = #1 (the opt_dyn) - in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end)) - -- for_fixes -- (@{keyword "\"} |-- Parse.token Parse.cartouche)) + Scan.lift (Parse.and_list1 + (Scan.option (dynamic_fact ctxt --| Args.colon) :-- + (fn opt_dyn => + if is_none opt_dyn orelse nameable_match match_kind + then Parse_Tools.name_term -- parse_match_args + else + let val b = #1 (the opt_dyn) + in error ("Cannot bind fact name in term match" ^ Position.here (pos_of b)) end)) + -- for_fixes -- (@{keyword "\"} |-- Parse.token Parse.cartouche)) >> (fn ((ts, fixes), cartouche) => (case Token.get_value cartouche of SOME (Token.Source src) => let val text = Method_Closure.read_inner_method ctxt src - (*TODO: Correct parse context for attributes?*) val ts' = map (fn (b, (Parse_Tools.Real_Val v, match_args)) => ((Option.map (fn (b, att) => - (Parse_Tools.the_real_val b, - map (Attrib.attribute ctxt) att)) b, match_args), v) + (Parse_Tools.the_real_val b, att)) b, match_args), v) | _ => raise Fail "Expected closed term") ts - val fixes' = map (fn (p, _) => Parse_Tools.the_real_val p) fixes + val fixes' = map (fn ((p, _), _) => Parse_Tools.the_real_val p) fixes in (ts', fixes', text) end | SOME _ => error "Unexpected token value in match cartouche" | NONE => let - val fixes' = map (fn (pb, otyp) => (Parse_Tools.the_parse_val pb, otyp, NoSyn)) fixes; + val fixes' = map (fn ((pb, otyp), _) => (Parse_Tools.the_parse_val pb, otyp, NoSyn)) fixes; val (fixes'', ctxt1) = Proof_Context.read_vars fixes' ctxt; val (fix_nms, ctxt2) = Proof_Context.add_fixes fixes'' ctxt1; @@ -130,10 +123,34 @@ then Syntax.parse_prop ctxt3 term else Syntax.parse_term ctxt3 term; + fun drop_Trueprop_dummy t = + (case t of + Const (@{const_name Trueprop}, _) $ + (Const (@{syntax_const "_type_constraint_"}, T) $ + Const (@{const_name Pure.dummy_pattern}, _)) => + Const (@{syntax_const "_type_constraint_"}, T) $ + Const (@{const_name Pure.dummy_pattern}, propT) + | t1 $ t2 => drop_Trueprop_dummy t1 $ drop_Trueprop_dummy t2 + | Abs (a, T, b) => Abs (a, T, drop_Trueprop_dummy b) + | _ => t); + val pats = map (fn (_, (term, _)) => parse_term (Parse_Tools.the_parse_val term)) ts + |> map drop_Trueprop_dummy + |> (fn ts => fold_map Term.replace_dummy_patterns ts (Variable.maxidx_of ctxt3 + 1)) + |> fst |> Syntax.check_terms ctxt3; + val pat_fixes = fold (Term.add_frees) pats [] |> map fst; + + val _ = + map2 (fn nm => fn (_, pos) => + member (op =) pat_fixes nm orelse + error ("For-fixed variable must be bound in some pattern" ^ Position.here pos)) + fix_nms fixes; + + val _ = map (Term.map_types Type.no_tvars) pats; + val ctxt4 = fold Variable.declare_term pats ctxt3; val (Ts, ctxt5) = ctxt4 |> fold_map Proof_Context.inferred_param fix_nms; @@ -146,12 +163,6 @@ | reject_extra_free _ () = (); val _ = (fold o fold_aterms) reject_extra_free pats (); - (*fun test_multi_bind {multi = multi, ...} pat = multi andalso - not (null (inter (op =) (map Free (Term.add_frees pat [])) real_fixes)) andalso - error "Cannot fix terms in multi-match. Use a schematic instead." - - val _ = map2 (fn pat => fn (_, (_, match_args)) => test_multi_bind match_args pat) pats ts*) - val binds = map (fn (b, _) => Option.map (fn (b, att) => (Parse_Tools.the_parse_val b, att)) b) ts; @@ -163,20 +174,27 @@ val param_thm = map (Drule.mk_term o Thm.cterm_of ctxt' o Free) abs_nms |> Conjunction.intr_balanced - |> Drule.generalize ([], map fst abs_nms); + |> Drule.generalize ([], map fst abs_nms) + |> Method_Closure.tag_free_thm; - val thm = + val atts = map (Attrib.attribute ctxt') att; + val (param_thm', ctxt'') = Thm.proof_attributes atts param_thm ctxt'; + + fun label_thm thm = Thm.cterm_of ctxt' (Free (nm, propT)) |> Drule.mk_term - |> not (null abs_nms) ? Conjunction.intr param_thm - |> Drule.zero_var_indexes - |> Method_Closure.tag_free_thm; + |> not (null abs_nms) ? Conjunction.intr thm + + val [head_thm, body_thm] = + Drule.zero_var_indexes_list (map label_thm [param_thm, param_thm']) + |> map Method_Closure.tag_free_thm; - (*TODO: Preprocess attributes here?*) - - val (_, ctxt'') = Proof_Context.note_thmss "" [((b, []), [([thm], [])])] ctxt'; + val ctxt''' = + Attrib.local_notes "" [((b, []), [([body_thm], [])])] ctxt'' + |> snd + |> Variable.declare_maxidx (Thm.maxidx_of head_thm); in - (SOME (Thm.prop_of thm, map (Attrib.attribute ctxt) att) :: tms, ctxt'') + (SOME (Thm.prop_of head_thm, att) :: tms, ctxt''') end | upd_ctxt NONE _ (tms, ctxt) = (NONE :: tms, ctxt); @@ -184,7 +202,7 @@ |> (fn ctxt => fold2 upd_ctxt binds pats ([], ctxt) |> apfst rev) ||> Proof_Context.restore_mode ctxt; - val (src, text) = Method_Closure.read_text_closure ctxt6 (Token.input_of cartouche); + val (src, text) = Method_Closure.read_inner_text_closure ctxt6 (Token.input_of cartouche); val morphism = Variable.export_morphism ctxt6 @@ -193,20 +211,34 @@ |> Variable.declare_maxidx (Variable.maxidx_of ctxt6)); val pats' = map (Term.map_types Type_Infer.paramify_vars #> Morphism.term morphism) pats; - val _ = ListPair.app (fn ((_, (Parse_Tools.Parse_Val (_, f), _)), t) => f t) (ts, pats'); + val _ = ListPair.app (fn ((_, (v, _)), t) => Parse_Tools.the_parse_fun v t) (ts, pats'); - val binds' = map (Option.map (fn (t, atts) => (Morphism.term morphism t, atts))) binds; + fun close_src src = + let + val src' = Token.closure_src src |> Token.transform_src morphism; + val _ = + map2 (fn tok1 => fn tok2 => + (case Token.get_value tok2 of + SOME value => Token.assign (SOME value) tok1 + | NONE => ())) + (Token.args_of_src src) + (Token.args_of_src src'); + in src' end; + + val binds' = + map (Option.map (fn (t, atts) => (Morphism.term morphism t, map close_src atts))) binds; val _ = ListPair.app - (fn ((SOME ((Parse_Tools.Parse_Val (_, f), _)), _), SOME (t, _)) => f t + (fn ((SOME ((v, _)), _), SOME (t, _)) => Parse_Tools.the_parse_fun v t | ((NONE, _), NONE) => () | _ => error "Mismatch between real and parsed bound variables") (ts, binds'); val real_fixes' = map (Morphism.term morphism) real_fixes; val _ = - ListPair.app (fn ((Parse_Tools.Parse_Val (_, f), _), t) => f t) (fixes, real_fixes'); + ListPair.app (fn (((v, _) , _), t) => Parse_Tools.the_parse_fun v t) + (fixes, real_fixes'); val match_args = map (fn (_, (_, match_args)) => match_args) ts; val binds'' = (binds' ~~ match_args) ~~ pats'; @@ -218,10 +250,6 @@ end))); -fun parse_match_bodies match_kind = - Parse.enum1' "\" (parse_named_pats match_kind); - - fun dest_internal_fact t = (case try Logic.dest_conjunction t of SOME (params, head) => @@ -234,19 +262,8 @@ let val ts' = map (Envir.norm_term env) ts; val insts = map (Thm.cterm_of ctxt) ts' ~~ map (Thm.cterm_of ctxt) params; - val tags = Thm.get_tags thm; - - (* - val Tinsts = Type.raw_matches ((map (fastype_of) params), (map (fastype_of) ts')) Vartab.empty - |> Vartab.dest - |> map (fn (xi, (S, typ)) => (certT (TVar (xi, S)), certT typ)) - *) - - val thm' = Drule.cterm_instantiate insts thm - (*|> Thm.instantiate (Tinsts, [])*) - |> Thm.map_tags (K tags); in - thm' + Drule.cterm_instantiate insts thm end; fun do_inst fact_insts' env text ctxt = @@ -256,35 +273,30 @@ (fn ((((SOME ((_, head), att), _), _), _), thms) => SOME (head, (thms, att)) | _ => NONE) fact_insts'; - fun apply_attribute thm att ctxt = - let - val (opt_context', thm') = att (Context.Proof ctxt, thm) - in - (case thm' of - SOME _ => error "Rule attributes cannot be applied here" - | _ => the_default ctxt (Option.map Context.proof_of opt_context')) - end; - - fun apply_attributes atts thm = fold (apply_attribute thm) atts; - - (*TODO: What to do about attributes that raise errors?*) - val (fact_insts, ctxt') = - fold_map (fn (head, (thms, atts : attribute list)) => fn ctxt => - ((head, thms), fold (apply_attributes atts) thms ctxt)) fact_insts ctxt; - fun try_dest_term thm = try (Thm.prop_of #> dest_internal_fact #> snd) thm; - fun expand_fact thm = + fun expand_fact fact_insts thm = the_default [thm] (case try_dest_term thm of SOME t_ident => AList.lookup (op aconv) fact_insts t_ident | NONE => NONE); - val morphism = + fun fact_morphism fact_insts = Morphism.term_morphism "do_inst.term" (Envir.norm_term env) $> - Morphism.fact_morphism "do_inst.fact" (maps expand_fact); + Morphism.typ_morphism "do_inst.type" (Envir.norm_type (Envir.type_env env)) $> + Morphism.fact_morphism "do_inst.fact" (maps (expand_fact fact_insts)); - val text' = Method.map_source (Token.transform_src morphism) text; + fun apply_attribute (head, (fact, atts)) (fact_insts, ctxt) = + let + val morphism = fact_morphism fact_insts; + val atts' = map (Attrib.attribute ctxt o Token.transform_src morphism) atts; + val (fact'', ctxt') = fold_map (Thm.proof_attributes atts') fact ctxt; + in ((head, fact'') :: fact_insts, ctxt') end; + + (*TODO: What to do about attributes that raise errors?*) + val (fact_insts', ctxt') = fold_rev (apply_attribute) fact_insts ([], ctxt); + + val text' = Method.map_source (Token.transform_src (fact_morphism fact_insts')) text; in (text', ctxt') end; @@ -307,28 +319,62 @@ ((((Option.map prep_head x, args), params''), pat''), ctxt') end; -fun match_filter_env ctxt fixes (ts, params) thm env = +fun recalculate_maxidx env = + let + val tenv = Envir.term_env env; + val tyenv = Envir.type_env env; + val max_tidx = Vartab.fold (fn (_, (_, t)) => curry Int.max (maxidx_of_term t)) tenv ~1; + val max_Tidx = Vartab.fold (fn (_, (_, T)) => curry Int.max (maxidx_of_typ T)) tyenv ~1; + in + Envir.Envir + {maxidx = Int.max (Int.max (max_tidx, max_Tidx), Envir.maxidx_of env), + tenv = tenv, tyenv = tyenv} + end + +fun morphism_env morphism env = + let + val tenv = Envir.term_env env + |> Vartab.map (K (fn (T, t) => (Morphism.typ morphism T, Morphism.term morphism t))); + val tyenv = Envir.type_env env + |> Vartab.map (K (fn (S, T) => (S, Morphism.typ morphism T))); + in Envir.Envir {maxidx = Envir.maxidx_of env, tenv = tenv, tyenv = tyenv} end; + +fun export_with_params ctxt morphism (SOME ts, params) thm env = + let + val outer_env = morphism_env morphism env; + val thm' = Morphism.thm morphism thm; + in inst_thm ctxt outer_env params ts thm' end + | export_with_params _ morphism (NONE,_) thm _ = Morphism.thm morphism thm; + +fun match_filter_env is_newly_fixed pat_vars fixes params env = let val param_vars = map Term.dest_Var params; - val params' = map (Envir.lookup env) param_vars; + + val tenv = Envir.term_env env; + + val params' = map (fn (xi, _) => Vartab.lookup tenv xi) param_vars; val fixes_vars = map Term.dest_Var fixes; - val tenv = Envir.term_env env; val all_vars = Vartab.keys tenv; val extra_vars = subtract (fn ((xi, _), xi') => xi = xi') fixes_vars all_vars; - val tenv' = Envir.term_env env - |> fold (Vartab.delete_safe) extra_vars; + val tenv' = tenv |> fold (Vartab.delete_safe) extra_vars; val env' = - Envir.Envir {maxidx = Envir.maxidx_of env, tenv = tenv', tyenv = Envir.type_env env}; + Envir.Envir {maxidx = Envir.maxidx_of env, tenv = tenv', tyenv = Envir.type_env env} + + val all_params_bound = forall (fn SOME (_, Free (x,_)) => is_newly_fixed x | _ => false) params'; + + val all_params_distinct = not (has_duplicates (op =) params'); - val all_params_bound = forall (fn SOME (Var _) => true | _ => false) params'; + val pat_fixes = inter (eq_fst (op =)) fixes_vars pat_vars; + + val all_pat_fixes_bound = forall (fn (xi, _) => is_some (Vartab.lookup tenv' xi)) pat_fixes; in - if all_params_bound - then SOME (case ts of SOME ts => inst_thm ctxt env params ts thm | _ => thm, env') + if all_params_bound andalso all_pat_fixes_bound andalso all_params_distinct + then SOME env' else NONE end; @@ -339,7 +385,7 @@ fun prem_id_eq ((id, _ : thm), (id', _ : thm)) = id = id'; val prem_rules : (int * thm) Item_Net.T = - Item_Net.init prem_id_eq (single o Thm.full_prop_of o snd); + Item_Net.init prem_id_eq (single o Thm.full_prop_of o snd); fun raw_thm_to_id thm = (case Properties.get (Thm.get_tags thm) prem_idN of NONE => NONE | SOME id => Int.fromString id) @@ -359,13 +405,34 @@ val focus_prems = #1 o Focus_Data.get; +fun hyp_from_premid ctxt (ident, prem) = + let + val ident = Thm.cterm_of ctxt (HOLogic.mk_number @{typ nat} ident |> Logic.mk_term); + val hyp = + (case #hyps (Thm.crep_thm prem) of + [hyp] => hyp + | _ => error "Prem should have exactly one hyp"); (* FIXME error vs. raise Fail !? *) + val ct = Drule.mk_term (hyp) |> Thm.cprop_of; + in Drule.protect (Conjunction.mk_conjunction (ident, ct)) end; + +fun hyp_from_ctermid ctxt (ident,cterm) = + let + val ident = Thm.cterm_of ctxt (HOLogic.mk_number @{typ nat} ident |> Logic.mk_term); + in Drule.protect (Conjunction.mk_conjunction (ident, cterm)) end; + +fun add_premid_hyp premid ctxt = + Thm.declare_hyps (hyp_from_premid ctxt premid) ctxt; + fun add_focus_prem prem = + `(Focus_Data.get #> #1 #> #1) ##> (Focus_Data.map o @{apply 3(1)}) (fn (next, net) => (next + 1, Item_Net.update (next, Thm.tag_rule (prem_idN, string_of_int next) prem) net)); -fun remove_focus_prem thm = +fun remove_focus_prem' (ident, thm) = (Focus_Data.map o @{apply 3(1)} o apsnd) - (Item_Net.remove (raw_thm_to_id thm, thm)); + (Item_Net.remove (ident, thm)); + +fun remove_focus_prem thm = remove_focus_prem' (raw_thm_to_id thm, thm); (*TODO: Preliminary analysis to see if we're trying to clear in a non-focus match?*) val _ = @@ -394,22 +461,48 @@ (Focus_Data.map o @{apply 3(3)}) (append (map (fn (_, ct) => Thm.term_of ct) params)); +fun solve_term ct = Thm.trivial ct OF [Drule.termI]; + +fun get_thinned_prems goal = + let + val chyps = Thm.crep_thm goal |> #hyps; + + fun prem_from_hyp hyp goal = + let + val asm = Thm.assume hyp; + val (identt,ct) = asm |> Goal.conclude |> Thm.cprop_of |> Conjunction.dest_conjunction; + val ident = HOLogic.dest_number (Thm.term_of identt |> Logic.dest_term) |> snd; + val thm = Conjunction.intr (solve_term identt) (solve_term ct) |> Goal.protect 0 + val goal' = Thm.implies_elim (Thm.implies_intr hyp goal) thm; + in + (SOME (ident,ct),goal') + end handle TERM _ => (NONE,goal) | THM _ => (NONE,goal); + in + fold_map prem_from_hyp chyps goal + |>> map_filter I + end; + (* Add focus elements as proof data *) -fun augment_focus - ({context, params, prems, asms, concl, schematics} : Subgoal.focus) : Subgoal.focus = +fun augment_focus (focus: Subgoal.focus) : (int list * Subgoal.focus) = let - val context' = context + val {context, params, prems, asms, concl, schematics} = focus; + + val (prem_ids,ctxt') = context |> add_focus_params params |> add_focus_schematics (snd schematics) - |> fold add_focus_prem (rev prems); + |> fold_map add_focus_prem (rev prems) + + val local_prems = map2 pair prem_ids (rev prems); + + val ctxt'' = fold add_premid_hyp local_prems ctxt'; in - {context = context', + (prem_ids,{context = ctxt'', params = params, prems = prems, concl = concl, schematics = schematics, - asms = asms} + asms = asms}) end; @@ -432,69 +525,154 @@ schematics = schematics', asms = asms} : Subgoal.focus, goal'') end; -exception MATCH_CUT; + +fun deduplicate eq prev seq = + Seq.make (fn () => + (case Seq.pull seq of + SOME (x, seq') => + if member eq prev x + then Seq.pull (deduplicate eq prev seq') + else SOME (x, deduplicate eq (x :: prev) seq') + | NONE => NONE)); + + +fun consistent_env env = + let + val tenv = Envir.term_env env; + val tyenv = Envir.type_env env; + in + forall (fn (_, (T, t)) => Envir.norm_type tyenv T = fastype_of t) (Vartab.dest tenv) + end; + +fun term_eq_wrt (env1,env2) (t1,t2) = + Envir.eta_contract (Envir.norm_term env1 t1) aconv + Envir.eta_contract (Envir.norm_term env2 t2); + +fun type_eq_wrt (env1,env2) (T1,T2) = + Envir.norm_type (Envir.type_env env1) T1 = Envir.norm_type (Envir.type_env env2) T2 + -val raise_match : (thm * Envir.env) Seq.seq = Seq.make (fn () => raise MATCH_CUT); +fun eq_env (env1, env2) = + Envir.maxidx_of env1 = Envir.maxidx_of env1 andalso + ListPair.allEq (fn ((var, (_, t)), (var', (_, t'))) => + (var = var' andalso term_eq_wrt (env1,env2) (t,t'))) + (apply2 Vartab.dest (Envir.term_env env1, Envir.term_env env2)) + andalso + ListPair.allEq (fn ((var, (_, T)), (var', (_, T'))) => + var = var' andalso type_eq_wrt (env1,env2) (T,T')) + (apply2 Vartab.dest (Envir.type_env env1, Envir.type_env env2)); + + +fun merge_env (env1,env2) = + let + val tenv = + Vartab.merge (eq_snd (term_eq_wrt (env1, env2))) (Envir.term_env env1, Envir.term_env env2); + val tyenv = + Vartab.merge (eq_snd (type_eq_wrt (env1, env2)) andf eq_fst (op =)) + (Envir.type_env env1,Envir.type_env env2); + val maxidx = Int.max (Envir.maxidx_of env1, Envir.maxidx_of env2); + in Envir.Envir {maxidx = maxidx, tenv = tenv, tyenv = tyenv} end; + + +fun import_with_tags thms ctxt = + let + val ((_, thms'), ctxt') = Variable.import false thms ctxt; + val thms'' = map2 (fn thm => Thm.map_tags (K (Thm.get_tags thm))) thms thms'; + in (thms'', ctxt') end; + + +fun try_merge (env, env') = SOME (merge_env (env, env')) handle Vartab.DUP _ => NONE + + +fun Seq_retrieve seq f = + let + fun retrieve' (list, seq) f = + (case Seq.pull seq of + SOME (x, seq') => + if f x then (SOME x, (list, seq')) + else retrieve' (list @ [x], seq') f + | NONE => (NONE, (list, seq))); + + val (result, (list, seq)) = retrieve' ([], seq) f; + in (result, Seq.append (Seq.of_list list) seq) end; fun match_facts ctxt fixes prop_pats get = let fun is_multi (((_, x : match_args), _), _) = #multi x; - fun is_unify (_, x : match_args) = #unify x; - fun is_cut (_, x : match_args) = #cut x; + fun get_cut (((_, x : match_args), _), _) = #cut x; + fun do_cut n = if n = ~1 then I else Seq.take n; + + val raw_thmss = map (get o snd) prop_pats; + val (thmss,ctxt') = fold_burrow import_with_tags raw_thmss ctxt; - fun match_thm (((x, params), pat), thm) env = + val newly_fixed = Variable.is_newly_fixed ctxt' ctxt; + + val morphism = Variable.export_morphism ctxt' ctxt; + + fun match_thm (((x, params), pat), thm) = let - fun try_dest_term term = the_default term (try Logic.dest_term term); - - val pat' = pat |> Envir.norm_term env |> try_dest_term; + val pat_vars = Term.add_vars pat []; - val item' = Thm.prop_of thm |> try_dest_term; val ts = Option.map (fst o fst) (fst x); - (*FIXME: Do we need to move one of these patterns above the other?*) + + val item' = Thm.prop_of thm; val matches = - (if is_unify x - then Unify.smash_unifiers (Context.Proof ctxt) [(pat', item') ] env - else Unify.matchers (Context.Proof ctxt) [(pat', item')]) + (Unify.matchers (Context.Proof ctxt) [(pat, item')]) + |> Seq.filter consistent_env |> Seq.map_filter (fn env' => - match_filter_env ctxt fixes (ts, params) thm (Envir.merge (env, env'))) - |> is_cut x ? (fn t => Seq.make (fn () => - Option.map (fn (x, _) => (x, raise_match)) (Seq.pull t))); - in - matches - end; + (case match_filter_env newly_fixed pat_vars fixes params env' of + SOME env'' => SOME (export_with_params ctxt morphism (ts,params) thm env',env'') + | NONE => NONE)) + |> Seq.map (apfst (Thm.map_tags (K (Thm.get_tags thm)))) + |> deduplicate (eq_pair Thm.eq_thm_prop eq_env) [] + in matches end; val all_matches = - map (fn pat => (pat, get (snd pat))) prop_pats + map2 pair prop_pats thmss |> map (fn (pat, matches) => (pat, map (fn thm => match_thm (pat, thm)) matches)); fun proc_multi_match (pat, thmenvs) (pats, env) = - if is_multi pat then - let - val empty = ([], Envir.empty ~1); + do_cut (get_cut pat) + (if is_multi pat then + let + fun maximal_set tail seq envthms = + Seq.make (fn () => + (case Seq.pull seq of + SOME ((thm, env'), seq') => + let + val (result, envthms') = + Seq_retrieve envthms (fn (env, _) => eq_env (env, env')); + in + (case result of + SOME (_,thms) => SOME ((env', thm :: thms), maximal_set tail seq' envthms') + | NONE => Seq.pull (maximal_set (tail @ [(env', [thm])]) seq' envthms')) + end + | NONE => Seq.pull (Seq.append envthms (Seq.of_list tail)))); - val thmenvs' = - Seq.EVERY (map (fn e => fn (thms, env) => - Seq.append (Seq.map (fn (thm, env') => (thm :: thms, env')) (e env)) - (Seq.single (thms, env))) thmenvs) empty; - in - Seq.map_filter (fn (fact, env') => - if not (null fact) then SOME ((pat, fact) :: pats, env') else NONE) thmenvs' - end - else - fold (fn e => Seq.append (Seq.map (fn (thm, env') => - ((pat, [thm]) :: pats, env')) (e env))) thmenvs Seq.empty; + val maximal_sets = fold (maximal_set []) thmenvs Seq.empty; + in + maximal_sets + |> Seq.map swap + |> Seq.filter (fn (thms, _) => not (null thms)) + |> Seq.map_filter (fn (thms, env') => + (case try_merge (env, env') of + SOME env'' => SOME ((pat, thms) :: pats, env'') + | NONE => NONE)) + end + else + let + fun just_one (thm, env') = + (case try_merge (env,env') of + SOME env'' => SOME ((pat,[thm]) :: pats, env'') + | NONE => NONE); + in fold (fn seq => Seq.append (Seq.map_filter just_one seq)) thmenvs Seq.empty end); val all_matches = - Seq.EVERY (map proc_multi_match all_matches) ([], Envir.empty ~1) - |> Seq.filter (fn (_, e) => forall (is_some o Envir.lookup e o Term.dest_Var) fixes); - - fun map_handle seq = Seq.make (fn () => - (case (Seq.pull seq handle MATCH_CUT => NONE) of - SOME (x, seq') => SOME (x, map_handle seq') - | NONE => NONE)); + Seq.EVERY (map proc_multi_match all_matches) ([], Envir.empty ~1); in - map_handle all_matches + all_matches + |> Seq.map (apsnd (morphism_env morphism)) end; fun real_match using ctxt fixes m text pats goal = @@ -507,7 +685,6 @@ |> Seq.map (fn (fact_insts, env) => do_inst fact_insts env text ctxt') end; - (*TODO: Slightly hacky re-use of fact match implementation in plain term matching *) fun make_term_matches ctxt get = let val pats' = @@ -535,20 +712,24 @@ let fun focus_cases f g = (case match_kind of - Match_Prems => f + Match_Prems b => f b | Match_Concl => g | _ => raise Fail "Match kind fell through"); - val ({context = focus_ctxt, params, asms, concl, ...}, focused_goal) = - focus_cases (Subgoal.focus_prems) (focus_concl) ctxt 1 goal + val (goal_thins,goal) = get_thinned_prems goal; + + val ((local_premids,{context = focus_ctxt, params, asms, concl, ...}), focused_goal) = + focus_cases (K Subgoal.focus_prems) (focus_concl) ctxt 1 goal |>> augment_focus; val texts = focus_cases - (fn _ => + (fn is_local => fn _ => make_fact_matches focus_ctxt - (Item_Net.retrieve (focus_prems focus_ctxt |> snd) #> - order_list)) + (Item_Net.retrieve (focus_prems focus_ctxt |> snd) + #> filter_out (member (eq_fst (op =)) goal_thins) + #> is_local ? filter (fn (p,_) => exists (fn id' => id' = p) local_premids) + #> order_list)) (fn _ => make_term_matches focus_ctxt (fn _ => [Logic.strip_imp_concl (Thm.term_of concl)])) (); @@ -557,13 +738,34 @@ fun do_retrofit inner_ctxt goal' = let - val cleared_prems = - subtract (eq_fst (op =)) + val (goal'_thins,goal') = get_thinned_prems goal'; + + val thinned_prems = + ((subtract (eq_fst (op =)) (focus_prems inner_ctxt |> snd |> Item_Net.content) - (focus_prems focus_ctxt |> snd |> Item_Net.content) - |> map (fn (_, thm) => - Thm.hyps_of thm - |> (fn [hyp] => hyp | _ => error "Prem should have only one hyp")); + (focus_prems focus_ctxt |> snd |> Item_Net.content)) + |> map (fn (id, thm) => + #hyps (Thm.crep_thm thm) + |> (fn [chyp] => (id, (SOME chyp, NONE)) + | _ => error "Prem should have only one hyp"))); + + val all_thinned_prems = + thinned_prems @ + map (fn (id, prem) => (id, (NONE, SOME prem))) (goal'_thins @ goal_thins); + + val (thinned_local_prems,thinned_extra_prems) = + List.partition (fn (id, _) => member (op =) local_premids id) all_thinned_prems; + + val local_thins = + thinned_local_prems + |> map (fn (_, (SOME t, _)) => Thm.term_of t + | (_, (_, SOME pt)) => Thm.term_of pt |> Logic.dest_term); + + val extra_thins = + thinned_extra_prems + |> map (fn (id, (SOME ct, _)) => (id, Drule.mk_term ct |> Thm.cprop_of) + | (id, (_, SOME pt)) => (id, pt)) + |> map (hyp_from_ctermid inner_ctxt); val n_subgoals = Thm.nprems_of goal'; fun prep_filter t = @@ -572,12 +774,13 @@ if member (op =) prems t then SOME (remove1 (op aconv) t prems) else NONE; in Subgoal.retrofit inner_ctxt ctxt params asms 1 goal' goal |> - (if n_subgoals = 0 orelse null cleared_prems then I + (if n_subgoals = 0 orelse null local_thins then I else Seq.map (Goal.restrict 1 n_subgoals) #> Seq.maps (ALLGOALS (fn i => - DETERM (filter_prems_tac' ctxt prep_filter filter_test cleared_prems i))) + DETERM (filter_prems_tac' ctxt prep_filter filter_test local_thins i))) #> Seq.map (Goal.unrestrict 1)) + |> Seq.map (fold Thm.weaken extra_thins) end; fun apply_text (text, ctxt') = @@ -585,7 +788,7 @@ val goal' = DROP_CASES (Method_Closure.method_evaluate text ctxt' using) focused_goal |> Seq.maps (DETERM (do_retrofit ctxt')) - |> Seq.map (fn goal => ([]: cases, goal)) + |> Seq.map (fn goal => ([]: cases, goal)); in goal' end; in Seq.map apply_text texts @@ -593,22 +796,19 @@ end; val match_parser = - parse_match_kind :-- (fn kind => Scan.lift @{keyword "in"} |-- parse_match_bodies kind) >> + parse_match_kind :-- (fn kind => + Scan.lift @{keyword "in"} |-- Parse.enum1' "\" (parse_named_pats kind)) >> (fn (matches, bodies) => fn ctxt => fn using => fn goal => if Method_Closure.is_dummy goal then Seq.empty else let fun exec (pats, fixes, text) goal = let - val ctxt' = fold Variable.declare_term fixes ctxt - |> fold (fn (_, t) => Variable.declare_term t) pats; (*Is this a good idea? We really only care about the maxidx*) - in - real_match using ctxt' fixes matches text pats goal - end; - in - Seq.FIRST (map exec bodies) goal - |> Seq.flat - end); + val ctxt' = + fold Variable.declare_term fixes ctxt + |> fold (fn (_, t) => Variable.declare_term t) pats; (*Is this a good idea? We really only care about the maxidx*) + in real_match using ctxt' fixes matches text pats goal end; + in Seq.flat (Seq.FIRST (map exec bodies) goal) end); val _ = Theory.setup diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/method_closure.ML --- a/src/HOL/Eisbach/method_closure.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/method_closure.ML Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: method_closure.ML +(* Title: HOL/Eisbach/method_closure.ML Author: Daniel Matichuk, NICTA/UNSW Facilities for treating method syntax as a closure, with abstraction @@ -12,19 +12,25 @@ sig val is_dummy: thm -> bool val tag_free_thm: thm -> thm + val is_free_thm: thm -> bool + val dummy_free_thm: thm val free_aware_rule_attribute: thm list -> (Context.generic -> thm -> thm) -> Thm.attribute + val wrap_attribute: {handle_all_errs : bool, declaration : bool} -> + Binding.binding -> theory -> theory val read_inner_method: Proof.context -> Token.src -> Method.text - val read_text_closure: Proof.context -> Input.source -> Token.src * Method.text + val read_text_closure: Proof.context -> Token.src -> Token.src * Method.text + val read_inner_text_closure: Proof.context -> Input.source -> Token.src * Method.text + val parse_method: Method.text context_parser val method_evaluate: Method.text -> Proof.context -> Method.method val get_inner_method: Proof.context -> string * Position.T -> (term list * (string list * string list)) * Method.text val eval_inner_method: Proof.context -> (term list * string list) * Method.text -> - term list -> (string * thm list) list -> Method.method list -> + term list -> (string * thm list) list -> (Proof.context -> Method.method) list -> Proof.context -> Method.method val method_definition: binding -> (binding * typ option * mixfix) list -> - binding list -> binding list -> binding list -> Input.source -> local_theory -> local_theory + binding list -> binding list -> binding list -> Token.src -> local_theory -> local_theory val method_definition_cmd: binding -> (binding * string option * mixfix) list -> - binding list -> binding list -> binding list -> Input.source -> local_theory -> local_theory + binding list -> binding list -> binding list -> Token.src -> local_theory -> local_theory end; structure Method_Closure: METHOD_CLOSURE = @@ -34,12 +40,10 @@ structure Data = Generic_Data ( - type T = - ((term list * (string list * string list)) * Method.text) Symtab.table; + type T = ((term list * (string list * string list)) * Method.text) Symtab.table; val empty: T = Symtab.empty; val extend = I; - fun merge (methods1,methods2) : T = - (Symtab.merge (K true) (methods1, methods2)); + fun merge data : T = Symtab.merge (K true) data; ); val get_methods = Data.get o Context.Proof; @@ -49,12 +53,12 @@ structure Local_Data = Proof_Data ( type T = - Method.method Symtab.table * (*dynamic methods*) + (Proof.context -> Method.method) Symtab.table * (*dynamic methods*) (term list -> Proof.context -> Method.method) (*recursive method*); fun init _ : T = (Symtab.empty, fn _ => fn _ => Method.fail); ); -fun lookup_dynamic_method full_name ctxt = +fun lookup_dynamic_method ctxt full_name = (case Symtab.lookup (#1 (Local_Data.get ctxt)) full_name of SOME m => m | NONE => error ("Illegal use of internal Eisbach method: " ^ quote full_name)); @@ -87,6 +91,35 @@ if exists is_free_thm (thm :: args) then dummy_free_thm else f context thm); +fun free_aware_attribute thy {handle_all_errs,declaration} src (context, thm) = + let + val src' = Token.init_assignable_src src; + fun apply_att thm = (Attrib.attribute_global thy src') (context, thm); + val _ = + if handle_all_errs then (try apply_att Drule.dummy_thm; ()) + else (apply_att Drule.dummy_thm; ()) handle THM _ => () | TERM _ => () | TYPE _ => (); + + val src'' = Token.closure_src src'; + val thms = + map_filter Token.get_value (Token.args_of_src src'') + |> map_filter (fn (Token.Fact (_, f)) => SOME f | _ => NONE) + |> flat; + in + if exists is_free_thm (thm :: thms) then + if declaration then (NONE, NONE) + else (NONE, SOME dummy_free_thm) + else apply_att thm + end; + +fun wrap_attribute args binding thy = + let + val name = Binding.name_of binding; + val name' = Attrib.check_name_generic (Context.Theory thy) (name, Position.none); + fun get_src src = Token.src (name', Token.range_of_src src) (Token.args_of_src src); + in + Attrib.define_global binding (free_aware_attribute thy args o get_src) "" thy + |> snd + end; (* thm semantics for combined methods with internal parser. Simulates outer syntax parsing. *) (* Creates closures for each combined method while parsing, based on the parse context *) @@ -97,38 +130,38 @@ val parser = Parse.!!! (Method.parser' ctxt 0 --| Scan.ahead Parse.eof); in (case Scan.read Token.stopper parser toks of - SOME (method_text, _) => method_text + SOME (method_text, pos) => (Method.report (method_text, pos); method_text) | NONE => error ("Failed to parse method" ^ Position.here (#2 (Token.name_of_src src)))) end; -fun read_text_closure ctxt input = +fun read_text_closure ctxt source = let - (*tokens*) + val src = Token.init_assignable_src source; + val method_text = read_inner_method ctxt src; + val method_text' = Method.map_source (Method.method_closure ctxt) method_text; + (*FIXME: Does not markup method parameters. Needs to be done by Method.parser' directly. *) + val _ = + Method.map_source (fn src => (try (Method.check_name ctxt) (Token.name_of_src src); src)) + method_text; + val src' = Token.closure_src src; + in (src', method_text') end; + +fun read_inner_text_closure ctxt input = + let val keywords = Thy_Header.get_keywords' ctxt; val toks = Input.source_explode input |> Token.read_no_commands keywords (Scan.one Token.not_eof); - val _ = - toks |> List.app (fn tok => - if Token.keyword_with Symbol.is_ascii_identifier tok then - Context_Position.report ctxt (Token.pos_of tok) Markup.keyword1 - else ()); + in read_text_closure ctxt (Token.src ("", Input.pos_of input) toks) end; - (*source closure*) - val src = - Token.src ("", Input.pos_of input) toks - |> Token.init_assignable_src; - val method_text = read_inner_method ctxt src; - val method_text' = Method.map_source (Method.method_closure ctxt) method_text; - val src' = Token.closure_src src; - in (src', method_text') end; val parse_method = Args.context -- Scan.lift (Parse.token Parse.cartouche) >> (fn (ctxt, tok) => (case Token.get_value tok of NONE => let - val (src, text) = read_text_closure ctxt (Token.input_of tok); + val input = Token.input_of tok; + val (src, text) = read_inner_text_closure ctxt input; val _ = Token.assign (SOME (Token.Source src)) tok; in text end | SOME (Token.Source src) => read_inner_method ctxt src @@ -136,26 +169,22 @@ error ("Unexpected inner token value for method cartouche" ^ Position.here (Token.pos_of tok)))); -fun method_evaluate text ctxt : Method.method = fn facts => fn st => - if is_dummy st then Seq.empty - else Method.evaluate text (Config.put Method.closure false ctxt) facts st; - fun parse_term_args args = Args.context :|-- (fn ctxt => let + val ctxt' = Proof_Context.set_mode (Proof_Context.mode_schematic) ctxt; + fun parse T = - (if T = propT then Syntax.parse_prop ctxt else Syntax.parse_term ctxt) + (if T = propT then Syntax.parse_prop ctxt' else Syntax.parse_term ctxt') #> Type.constraint (Type_Infer.paramify_vars T); fun do_parse' T = - Parse_Tools.name_term >> - (fn Parse_Tools.Parse_Val (s, f) => (parse T s, f) - | Parse_Tools.Real_Val t' => (t', K ())); + Parse_Tools.name_term >> Parse_Tools.parse_val_cases (parse T); fun do_parse (Var (_, T)) = do_parse' T | do_parse (Free (_, T)) = do_parse' T - | do_parse t = error ("Unexpected method parameter: " ^ Syntax.string_of_term ctxt t); + | do_parse t = error ("Unexpected method parameter: " ^ Syntax.string_of_term ctxt' t); fun rep [] x = Scan.succeed [] x | rep (t :: ts) x = (do_parse t -- rep ts >> op ::) x; @@ -163,7 +192,7 @@ fun check ts = let val (ts, fs) = split_list ts; - val ts' = Syntax.check_terms ctxt ts |> Variable.polymorphic ctxt; + val ts' = Syntax.check_terms ctxt' ts |> Variable.polymorphic ctxt'; val _ = ListPair.app (fn (f, t) => f t) (fs, ts'); in ts' end; in Scan.lift (rep args) >> check end); @@ -193,7 +222,7 @@ in Method.map_source (Token.transform_src morphism) text end; fun evaluate_dynamic_thm ctxt name = - (case (try (Named_Theorems.get ctxt) name) of + (case try (Named_Theorems.get ctxt) name of SOME thms => thms | NONE => Proof_Context.get_thms ctxt name); @@ -204,28 +233,53 @@ Token.Fact (SOME name, evaluate_dynamic_thm ctxt name) | x => x); +fun method_evaluate text ctxt : Method.method = fn facts => fn st => + let + val ctxt' = Config.put Method.closure false ctxt; + in + if is_dummy st then Seq.empty + else Method.evaluate (evaluate_named_theorems ctxt' text) ctxt' facts st + end; + fun evaluate_method_def fix_env raw_text ctxt = let val text = raw_text - |> instantiate_text fix_env - |> evaluate_named_theorems ctxt; + |> instantiate_text fix_env; in method_evaluate text ctxt end; fun setup_local_method binding lthy = let val full_name = Local_Theory.full_name lthy binding; + fun get_method ctxt = lookup_dynamic_method ctxt full_name ctxt; in lthy - |> update_dynamic_method (full_name, Method.fail) - |> Method.local_setup binding (Scan.succeed (lookup_dynamic_method full_name)) "(internal)" + |> update_dynamic_method (full_name, K Method.fail) + |> Method.local_setup binding (Scan.succeed get_method) "(internal)" end; fun setup_local_fact binding = Named_Theorems.declare binding ""; +(* FIXME: In general we need the ability to override all dynamic facts. + This is also slow: we need Named_Theorems.only *) +fun empty_named_thm named_thm ctxt = + let + val contents = Named_Theorems.get ctxt named_thm; + val attrib = snd oo Thm.proof_attributes [Named_Theorems.del named_thm]; + in fold attrib contents ctxt end; + +fun dummy_named_thm named_thm ctxt = + let + val ctxt' = empty_named_thm named_thm ctxt; + val (_,ctxt'') = Thm.proof_attributes [Named_Theorems.add named_thm] dummy_free_thm ctxt'; + in ctxt'' end; + fun parse_method_args method_names = let - fun bind_method (name, text) ctxt = - update_dynamic_method (name, method_evaluate text ctxt) ctxt; + fun bind_method (name, text) ctxt = + let + val method = method_evaluate text; + val inner_update = method o update_dynamic_method (name,K (method ctxt)); + in update_dynamic_method (name,inner_update) ctxt end; fun do_parse t = parse_method >> pair t; fun rep [] x = Scan.succeed [] x @@ -274,7 +328,7 @@ fn ctxt => evaluate_method_def (match fixes) text (setup_ctxt ctxt) end; -fun gen_method_definition prep_vars name vars uses attribs methods body lthy = +fun gen_method_definition prep_vars name vars uses attribs methods source lthy = let val (uses_nms, lthy1) = lthy |> Proof_Context.concealed @@ -294,17 +348,19 @@ fun parser args eval = apfst (Config.put_generic Method.old_section_parser true) #> - (parse_term_args args --| - Method.sections modifiers -- - (*Scan.depend (fn context => Scan.succeed () >> (K (fold XNamed_Theorems.empty uses_nms context, ()))) --*) (* FIXME *) - parse_method_args method_names >> eval); + (parse_term_args args -- + parse_method_args method_names --| + (Scan.depend (fn context => + Scan.succeed (Context.map_proof (fold empty_named_thm uses_nms) context,())) -- + Method.sections modifiers) >> eval); val lthy3 = lthy2 + |> fold dummy_named_thm named_thms |> Method.local_setup (Binding.make (Binding.name_of name, Position.none)) (parser term_args (fn (fixes, decl) => fn ctxt => get_recursive_method ctxt fixes (decl ctxt))) "(internal)"; - val (src, text) = read_text_closure lthy3 body; + val (src, text) = read_text_closure lthy3 source; val morphism = Variable.export_morphism lthy3 @@ -335,10 +391,11 @@ val _ = Outer_Syntax.local_theory @{command_keyword method} "Eisbach method definition" (Parse.binding -- Parse.for_fixes -- - ((Scan.optional (@{keyword "uses"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) -- - (Scan.optional (@{keyword "declares"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) [])) -- - (Scan.optional (@{keyword "methods"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) -- - Parse.!!! (@{keyword "="} |-- Parse.token Parse.cartouche) - >> (fn ((((name, vars), (uses, attribs)), methods), cartouche) => - method_definition_cmd name vars uses attribs methods (Token.input_of cartouche))); + ((Scan.optional (@{keyword "methods"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) -- + (Scan.optional (@{keyword "uses"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) [])) -- + (Scan.optional (@{keyword "declares"} |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) -- + Parse.!!! (@{keyword "="} + |-- (Parse.position (Parse.args1 (K true)) >> (fn (args, pos) => Token.src ("", pos) args))) + >> (fn ((((name, vars), (methods, uses)), attribs), source) => + method_definition_cmd name vars uses attribs methods source)); end; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Eisbach/parse_tools.ML --- a/src/HOL/Eisbach/parse_tools.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Eisbach/parse_tools.ML Mon May 25 22:11:43 2015 +0200 @@ -1,4 +1,4 @@ -(* Title: parse_tools.ML +(* Title: HOL/Eisbach/parse_tools.ML Author: Daniel Matichuk, NICTA/UNSW Simple tools for deferred stateful token values. @@ -6,18 +6,21 @@ signature PARSE_TOOLS = sig + datatype ('a, 'b) parse_val = Real_Val of 'a - | Parse_Val of 'b * ('a -> unit) + | Parse_Val of 'b * ('a -> unit); - val parse_term_val : 'b parser -> (term, 'b) parse_val parser - - val name_term : (term, string) parse_val parser val is_real_val : ('a, 'b) parse_val -> bool val the_real_val : ('a, 'b) parse_val -> 'a val the_parse_val : ('a, 'b) parse_val -> 'b val the_parse_fun : ('a, 'b) parse_val -> ('a -> unit) + + val parse_val_cases: ('b -> 'a) -> ('a, 'b) parse_val -> ('a * ('a -> unit)) + + val parse_term_val : 'b parser -> (term, 'b) parse_val parser + val name_term : (term, string) parse_val parser end; structure Parse_Tools: PARSE_TOOLS = @@ -46,4 +49,7 @@ fun the_parse_fun (Parse_Val (_, f)) = f | the_parse_fun _ = raise Fail "Expected open parsed value"; +fun parse_val_cases g (Parse_Val (b, f)) = (g b, f) + | parse_val_cases _ (Real_Val v) = (v, K ()); + end; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Library/FSet.thy --- a/src/HOL/Library/FSet.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Library/FSet.thy Mon May 25 22:11:43 2015 +0200 @@ -1001,14 +1001,16 @@ folded size_fset_overloaded_def] lemma fset_size_o_map: "inj f \ size_fset g \ fimage f = size_fset (g \ f)" - unfolding size_fset_def fimage_def - by (auto simp: Abs_fset_inverse setsum.reindex_cong[OF subset_inj_on[OF _ top_greatest]]) - + apply (subst fun_eq_iff) + including fset.lifting by transfer (auto intro: setsum.reindex_cong subset_inj_on) + setup {* BNF_LFP_Size.register_size_global @{type_name fset} @{const_name size_fset} @{thms size_fset_simps size_fset_overloaded_simps} @{thms fset_size_o_map} *} +lifting_update fset.lifting +lifting_forget fset.lifting subsection {* Advanced relator customization *} diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Library/refute.ML --- a/src/HOL/Library/refute.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Library/refute.ML Mon May 25 22:11:43 2015 +0200 @@ -2969,7 +2969,7 @@ "try to find a model that refutes a given subgoal" (scan_parms -- Scan.optional Parse.nat 1 >> (fn (parms, i) => - Toplevel.keep (fn state => + Toplevel.keep_proof (fn state => let val ctxt = Toplevel.context_of state; val {goal = st, ...} = Proof.raw_goal (Toplevel.proof_of state); diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Lifting.thy --- a/src/HOL/Lifting.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Lifting.thy Mon May 25 22:11:43 2015 +0200 @@ -265,6 +265,13 @@ shows "part_equivp (eq_onp P)" using typedef_to_part_equivp [OF assms] by simp +lemma type_definition_Quotient_not_empty: "Quotient (eq_onp P) Abs Rep T \ \x. P x" +unfolding eq_onp_def by (drule Quotient_rep_reflp) blast + +lemma type_definition_Quotient_not_empty_witness: "Quotient (eq_onp P) Abs Rep T \ P (Rep undefined)" +unfolding eq_onp_def by (drule Quotient_rep_reflp) blast + + text {* Generating transfer rules for quotients. *} context @@ -538,6 +545,12 @@ end +(* needed for lifting_def_code_dt.ML (moved from Lifting_Set) *) +lemma right_total_UNIV_transfer: + assumes "right_total A" + shows "(rel_set A) (Collect (Domainp A)) UNIV" + using assms unfolding right_total_def rel_set_def Domainp_iff by blast + subsection {* ML setup *} ML_file "Tools/Lifting/lifting_util.ML" @@ -555,6 +568,7 @@ ML_file "Tools/Lifting/lifting_term.ML" ML_file "Tools/Lifting/lifting_def.ML" ML_file "Tools/Lifting/lifting_setup.ML" +ML_file "Tools/Lifting/lifting_def_code_dt.ML" hide_const (open) POS NEG diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Lifting_Set.thy --- a/src/HOL/Lifting_Set.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Lifting_Set.thy Mon May 25 22:11:43 2015 +0200 @@ -205,10 +205,7 @@ shows "(rel_set A ===> rel_set A ===> op =) (op \) (op \)" unfolding subset_eq [abs_def] by transfer_prover -lemma right_total_UNIV_transfer[transfer_rule]: - assumes "right_total A" - shows "(rel_set A) (Collect (Domainp A)) UNIV" - using assms unfolding right_total_def rel_set_def Domainp_iff by blast +declare right_total_UNIV_transfer[transfer_rule] lemma UNIV_transfer [transfer_rule]: assumes "bi_total A" diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Quotient_Examples/Lifting_Code_Dt_Test.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Quotient_Examples/Lifting_Code_Dt_Test.thy Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,101 @@ +(* Title: HOL/Quotient_Examples/Lifting_Code_Dt_Test.thy + Author: Ondrej Kuncar, TU Muenchen + Copyright 2015 + +Miscellaneous lift_definition(code_dt) definitions (for testing purposes). +*) + +theory Lifting_Code_Dt_Test +imports Main +begin + +(* basic examples *) + +typedef bool2 = "{x. x}" by auto + +setup_lifting type_definition_bool2 + +lift_definition(code_dt) f1 :: "bool2 option" is "Some True" by simp + +lift_definition(code_dt) f2 :: "bool2 list" is "[True]" by simp + +lift_definition(code_dt) f3 :: "bool2 \ int" is "(True, 42)" by simp + +lift_definition(code_dt) f4 :: "int + bool2" is "Inr True" by simp + +lift_definition(code_dt) f5 :: "'a \ (bool2 \ 'a) option" is "\x. Some (True, x)" by simp + +(* ugly (i.e., sensitive to rewriting done in my tactics) definition of T *) + +typedef 'a T = "{ x::'a. \(y::'a) z::'a. \(w::'a). (z = z) \ eq_onp top y y + \ rel_prod (eq_onp top) (eq_onp top) (x, y) (x, y) \ pred_prod top top (w, w) }" + by auto + +setup_lifting type_definition_T + +lift_definition(code_dt) f6 :: "bool T option" is "Some True" by simp + +lift_definition(code_dt) f7 :: "(bool T \ int) option" is "Some (True, 42)" by simp + +lift_definition(code_dt) f8 :: "bool T \ int \ (bool T \ int) option" + is "\x y. if x then Some (x, y) else None" by simp + +lift_definition(code_dt) f9 :: "nat \ ((bool T \ int) option) list \ nat" + is "\x. ([Some (True, 42)], x)" by simp + +(* complicated nested datatypes *) + +(* stolen from Datatype_Examples *) +datatype 'a tree = Empty | Node 'a "'a tree list" + +datatype 'a ttree = TEmpty | TNode 'a "'a ttree list tree" + +datatype 'a tttree = TEmpty | TNode 'a "'a tttree list ttree list tree" + +lift_definition(code_dt) f10 :: "int \ int T tree" is "\i. Node i [Node i Nil, Empty]" by simp + +lift_definition(code_dt) f11 :: "int \ int T ttree" + is "\i. ttree.TNode i (Node [ttree.TNode i Empty] [])" by simp + +lift_definition(code_dt) f12 :: "int \ int T tttree" is "\i. tttree.TNode i Empty" by simp + +(* Phantom type variables *) + +datatype 'a phantom = PH1 | PH2 + +datatype ('a, 'b) phantom2 = PH21 'a | PH22 "'a option" + +lift_definition(code_dt) f13 :: "int \ int T phantom" is "\i. PH1" by auto + +lift_definition(code_dt) f14 :: "int \ (int T, nat T) phantom2" is "\i. PH22 (Some i)" by auto + +(* Mutual datatypes *) + +datatype 'a M1 = Empty 'a | CM "'a M2" +and 'a M2 = CM2 "'a M1" + +lift_definition(code_dt) f15 :: "int \ int T M1" is "\i. Empty i" by auto + +(* Codatatypes *) + +codatatype 'a stream = S 'a "'a stream" + +primcorec + sconst :: "'a \ 'a stream" where + "sconst a = S a (sconst a)" + +lift_definition(code_dt) f16 :: "int \ int T stream" is "\i. sconst i" unfolding pred_stream_def +by auto + +(* Sort constraints *) + +datatype ('a::finite, 'b::finite) F = F 'a | F2 'b + +instance T :: (finite) finite by (default, transfer, auto) + +lift_definition(code_dt) f17 :: "bool \ (bool T, 'b::finite) F" is "\b. F b" by auto + +export_code f1 f2 f3 f4 f5 f6 f7 f8 f9 f10 f11 f12 f13 f14 f15 f16 f17 + checking SML OCaml? Haskell? Scala? + +end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/ROOT --- a/src/HOL/ROOT Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/ROOT Mon May 25 22:11:43 2015 +0200 @@ -962,6 +962,7 @@ Quotient_Rat Lift_DList Int_Pow + Lifting_Code_Dt_Test session "HOL-Predicate_Compile_Examples" in Predicate_Compile_Examples = HOL + options [document = false] diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/SMT.thy --- a/src/HOL/SMT.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/SMT.thy Mon May 25 22:11:43 2015 +0200 @@ -49,7 +49,7 @@ *} method_setup moura = {* - Scan.succeed (SIMPLE_METHOD' o moura_tac) + Scan.succeed (SIMPLE_METHOD' o moura_tac) *} "solve skolemization goals, especially those arising from Z3 proofs" hide_fact (open) choices bchoices diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/BNF/bnf_comp.ML --- a/src/HOL/Tools/BNF/bnf_comp.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/BNF/bnf_comp.ML Mon May 25 22:11:43 2015 +0200 @@ -149,10 +149,10 @@ let val olive = live_of_bnf outer; val onwits = nwits_of_bnf outer; - val odead = dead_of_bnf outer; + val odeads = deads_of_bnf outer; val inner = hd inners; val ilive = live_of_bnf inner; - val ideads = map dead_of_bnf inners; + val ideadss = map deads_of_bnf inners; val inwitss = map nwits_of_bnf inners; (* TODO: check olive = length inners > 0, @@ -160,9 +160,9 @@ forall inner from inners. idead = dead *) val (oDs, lthy1) = apfst (map TFree) - (Variable.invent_types (replicate odead @{sort type}) lthy); + (Variable.invent_types (map Type.sort_of_atyp odeads) lthy); val (Dss, lthy2) = apfst (map (map TFree)) - (fold_map Variable.invent_types (map (fn n => replicate n @{sort type}) ideads) lthy1); + (fold_map Variable.invent_types (map (map Type.sort_of_atyp) ideadss) lthy1); val (Ass, lthy3) = apfst (replicate ilive o map TFree) (Variable.invent_types (replicate ilive @{sort type}) lthy2); val As = if ilive > 0 then hd Ass else []; @@ -379,13 +379,13 @@ let val b = Binding.suffix_name (mk_killN n) (name_of_bnf bnf); val live = live_of_bnf bnf; - val dead = dead_of_bnf bnf; + val deads = deads_of_bnf bnf; val nwits = nwits_of_bnf bnf; (* TODO: check 0 < n <= live *) val (Ds, lthy1) = apfst (map TFree) - (Variable.invent_types (replicate dead @{sort type}) lthy); + (Variable.invent_types (map Type.sort_of_atyp deads) lthy); val ((killedAs, As), lthy2) = apfst (`(take n) o map TFree) (Variable.invent_types (replicate live @{sort type}) lthy1); val (Bs, _(*lthy3*)) = apfst (append killedAs o map TFree) @@ -478,13 +478,13 @@ let val b = Binding.suffix_name (mk_liftN n) (name_of_bnf bnf); val live = live_of_bnf bnf; - val dead = dead_of_bnf bnf; + val deads = deads_of_bnf bnf; val nwits = nwits_of_bnf bnf; (* TODO: check 0 < n *) val (Ds, lthy1) = apfst (map TFree) - (Variable.invent_types (replicate dead @{sort type}) lthy); + (Variable.invent_types (map Type.sort_of_atyp deads) lthy); val ((newAs, As), lthy2) = apfst (chop n o map TFree) (Variable.invent_types (replicate (n + live) @{sort type}) lthy1); val ((newBs, Bs), _(*lthy3*)) = apfst (chop n o map TFree) @@ -568,14 +568,14 @@ let val b = Binding.suffix_name (mk_permuteN src dest) (name_of_bnf bnf); val live = live_of_bnf bnf; - val dead = dead_of_bnf bnf; + val deads = deads_of_bnf bnf; val nwits = nwits_of_bnf bnf; fun permute xs = permute_like_unique (op =) src dest xs; fun unpermute xs = permute_like_unique (op =) dest src xs; val (Ds, lthy1) = apfst (map TFree) - (Variable.invent_types (replicate dead @{sort type}) lthy); + (Variable.invent_types (map Type.sort_of_atyp deads) lthy); val (As, lthy2) = apfst (map TFree) (Variable.invent_types (replicate live @{sort type}) lthy1); val (Bs, _(*lthy3*)) = apfst (map TFree) diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/BNF/bnf_gfp_rec_sugar_tactics.ML --- a/src/HOL/Tools/BNF/bnf_gfp_rec_sugar_tactics.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/BNF/bnf_gfp_rec_sugar_tactics.ML Mon May 25 22:11:43 2015 +0200 @@ -37,6 +37,8 @@ val split_connectI = @{thms allI impI conjI}; val unfold_lets = @{thms Let_def[abs_def] split_beta} +fun clean_blast_tac ctxt = blast_tac (put_claset (claset_of @{theory_context HOL}) ctxt); + fun exhaust_inst_as_projs ctxt frees thm = let val num_frees = length frees; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML --- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML Mon May 25 22:11:43 2015 +0200 @@ -397,7 +397,13 @@ val ctrs0 = map (prep_term no_defs_lthy) raw_ctrs; - val Type (fcT_name, As0) = body_type (fastype_of (hd ctrs0)); + val (fcT_name, As0) = + (case body_type (fastype_of (hd ctrs0)) of + Type T' => T' + | _ => error "Expected type constructor in body type of constructor"); + val _ = forall ((fn Type (T_name, _) => T_name = fcT_name | _ => false) o body_type + o fastype_of) (tl ctrs0) orelse error "Constructors not constructing same type"; + val fc_b_name = Long_Name.base_name fcT_name; val fc_b = Binding.name fc_b_name; @@ -675,7 +681,7 @@ val goalss = [exhaust_goal] :: inject_goalss @ half_distinct_goalss; - fun after_qed (thmss0 as [exhaust_thm] :: thmss) lthy = + fun after_qed ([exhaust_thm] :: thmss) lthy = let val ((inject_thms, inject_thmss), half_distinct_thmss) = chop n thmss |>> `flat; @@ -751,7 +757,7 @@ fun prove_split selss goal = Goal.prove_sorry lthy [] [] goal (fn _ => - mk_split_tac lthy uexhaust_thm case_thms selss inject_thmss distinct_thmsss) + mk_split_tac lthy ms uexhaust_thm case_thms selss inject_thmss distinct_thmsss) |> singleton (Proof_Context.export names_lthy lthy) |> Thm.close_derivation; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Ctr_Sugar/ctr_sugar_tactics.ML --- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar_tactics.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar_tactics.ML Mon May 25 22:11:43 2015 +0200 @@ -7,7 +7,6 @@ signature CTR_SUGAR_GENERAL_TACTICS = sig - val clean_blast_tac: Proof.context -> int -> tactic val select_prem_tac: int -> (int -> tactic) -> int -> int -> tactic val unfold_thms_tac: Proof.context -> thm list -> tactic end; @@ -32,8 +31,8 @@ val mk_half_distinct_disc_tac: Proof.context -> int -> thm -> thm -> tactic val mk_nchotomy_tac: int -> thm -> tactic val mk_other_half_distinct_disc_tac: thm -> tactic - val mk_split_tac: Proof.context -> thm -> thm list -> thm list list -> thm list list -> - thm list list list -> tactic + val mk_split_tac: Proof.context -> int list -> thm -> thm list -> thm list list -> + thm list list -> thm list list list -> tactic val mk_split_asm_tac: Proof.context -> thm -> tactic val mk_unique_disc_def_tac: int -> thm -> tactic end; @@ -45,8 +44,6 @@ val meta_mp = @{thm meta_mp}; -fun clean_blast_tac ctxt = blast_tac (put_claset (claset_of @{theory_context HOL}) ctxt); - fun select_prem_tac n tac k = DETERM o (EVERY' [REPEAT_DETERM_N (k - 1) o etac thin_rl, tac, REPEAT_DETERM_N (n - k) o etac thin_rl]); @@ -170,12 +167,17 @@ rtac casex]) cases (map2 (seq_conds if_P_or_not_P_OF n) (1 upto n) discss') selss)); -fun mk_split_tac ctxt uexhaust cases selss injectss distinctsss = - HEADGOAL (rtac uexhaust) THEN - ALLGOALS (fn k => (hyp_subst_tac ctxt THEN' - simp_tac (ss_only (@{thms simp_thms} @ cases @ nth selss (k - 1) @ nth injectss (k - 1) @ - flat (nth distinctsss (k - 1))) ctxt)) k) THEN - ALLGOALS (clean_blast_tac ctxt); +fun mk_split_tac ctxt ms uexhaust cases selss injectss distinctsss = + let val depth = fold Integer.max ms 0 in + HEADGOAL (rtac uexhaust) THEN + ALLGOALS (fn k => (hyp_subst_tac ctxt THEN' + simp_tac (ss_only (@{thms simp_thms} @ cases @ nth selss (k - 1) @ nth injectss (k - 1) @ + flat (nth distinctsss (k - 1))) ctxt)) k) THEN + ALLGOALS (etac thin_rl THEN' rtac iffI THEN' + REPEAT_DETERM o rtac allI THEN' rtac impI THEN' REPEAT_DETERM o etac conjE THEN' + hyp_subst_tac ctxt THEN' atac THEN' REPEAT_DETERM o etac allE THEN' etac impE THEN' + REPEAT_DETERM o (rtac conjI THEN' rtac refl) THEN' rtac refl THEN' atac) + end; val split_asm_thms = @{thms imp_conv_disj de_Morgan_conj de_Morgan_disj not_not not_ex}; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_bnf.ML --- a/src/HOL/Tools/Lifting/lifting_bnf.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Lifting/lifting_bnf.ML Mon May 25 22:11:43 2015 +0200 @@ -87,9 +87,8 @@ fun relator_eq_onp bnf ctxt = let - val relator_eq_onp_thm = lookup_defined_pred_data ctxt (type_name_of_bnf bnf) - |> Transfer.rel_eq_onp |> Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.arg1_conv - (Raw_Simplifier.rewrite ctxt false @{thms eq_onp_top_eq_eq[THEN eq_reflection]}))) + val relator_eq_onp_thm = lookup_defined_pred_data ctxt (type_name_of_bnf bnf) + |> Transfer.rel_eq_onp in [((Binding.empty, []), [([relator_eq_onp_thm], @{attributes [relator_eq_onp]})])] end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_def.ML --- a/src/HOL/Tools/Lifting/lifting_def.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Lifting/lifting_def.ML Mon May 25 22:11:43 2015 +0200 @@ -6,15 +6,48 @@ signature LIFTING_DEF = sig + datatype code_eq = UNKNOWN_EQ | NONE_EQ | ABS_EQ | REP_EQ + type lift_def + val rty_of_lift_def: lift_def -> typ + val qty_of_lift_def: lift_def -> typ + val rhs_of_lift_def: lift_def -> term + val lift_const_of_lift_def: lift_def -> term + val def_thm_of_lift_def: lift_def -> thm + val rsp_thm_of_lift_def: lift_def -> thm + val abs_eq_of_lift_def: lift_def -> thm + val rep_eq_of_lift_def: lift_def -> thm option + val code_eq_of_lift_def: lift_def -> code_eq + val transfer_rules_of_lift_def: lift_def -> thm list + val morph_lift_def: morphism -> lift_def -> lift_def + val inst_of_lift_def: Proof.context -> typ -> lift_def -> lift_def + val mk_lift_const_of_lift_def: typ -> lift_def -> term + + type config = { notes: bool } + val map_config: (bool -> bool) -> config -> config + val default_config: config + val generate_parametric_transfer_rule: Proof.context -> thm -> thm -> thm - val add_lift_def: - (binding * mixfix) -> typ -> term -> thm -> thm list -> local_theory -> local_theory + val add_lift_def: + config -> binding * mixfix -> typ -> term -> thm -> thm list -> local_theory -> + lift_def * local_theory + + val prepare_lift_def: + (binding * mixfix -> typ -> term -> thm -> thm list -> Proof.context -> + lift_def * local_theory) -> + binding * mixfix -> typ -> term -> thm list -> local_theory -> + term option * (thm -> Proof.context -> lift_def * local_theory) - val lift_def_cmd: - (binding * string option * mixfix) * string * (Facts.ref * Token.src list) list -> - local_theory -> Proof.state + val gen_lift_def: + (binding * mixfix -> typ -> term -> thm -> thm list -> local_theory -> + lift_def * local_theory) -> + binding * mixfix -> typ -> term -> (Proof.context -> tactic) -> thm list -> + local_theory -> lift_def * local_theory + + val lift_def: + config -> binding * mixfix -> typ -> term -> (Proof.context -> tactic) -> thm list -> + local_theory -> lift_def * local_theory val can_generate_code_cert: thm -> bool end @@ -26,6 +59,70 @@ infix 0 MRSL +datatype code_eq = UNKNOWN_EQ | NONE_EQ | ABS_EQ | REP_EQ + +datatype lift_def = LIFT_DEF of { + rty: typ, + qty: typ, + rhs: term, + lift_const: term, + def_thm: thm, + rsp_thm: thm, + abs_eq: thm, + rep_eq: thm option, + code_eq: code_eq, + transfer_rules: thm list +}; + +fun rep_lift_def (LIFT_DEF lift_def) = lift_def; +val rty_of_lift_def = #rty o rep_lift_def; +val qty_of_lift_def = #qty o rep_lift_def; +val rhs_of_lift_def = #rhs o rep_lift_def; +val lift_const_of_lift_def = #lift_const o rep_lift_def; +val def_thm_of_lift_def = #def_thm o rep_lift_def; +val rsp_thm_of_lift_def = #rsp_thm o rep_lift_def; +val abs_eq_of_lift_def = #abs_eq o rep_lift_def; +val rep_eq_of_lift_def = #rep_eq o rep_lift_def; +val code_eq_of_lift_def = #code_eq o rep_lift_def; +val transfer_rules_of_lift_def = #transfer_rules o rep_lift_def; + +fun mk_lift_def rty qty rhs lift_const def_thm rsp_thm abs_eq rep_eq code_eq transfer_rules = + LIFT_DEF {rty = rty, qty = qty, + rhs = rhs, lift_const = lift_const, + def_thm = def_thm, rsp_thm = rsp_thm, abs_eq = abs_eq, rep_eq = rep_eq, + code_eq = code_eq, transfer_rules = transfer_rules }; + +fun map_lift_def f1 f2 f3 f4 f5 f6 f7 f8 f9 f10 + (LIFT_DEF {rty = rty, qty = qty, rhs = rhs, lift_const = lift_const, + def_thm = def_thm, rsp_thm = rsp_thm, abs_eq = abs_eq, rep_eq = rep_eq, code_eq = code_eq, + transfer_rules = transfer_rules }) = + LIFT_DEF {rty = f1 rty, qty = f2 qty, rhs = f3 rhs, lift_const = f4 lift_const, + def_thm = f5 def_thm, rsp_thm = f6 rsp_thm, abs_eq = f7 abs_eq, rep_eq = f8 rep_eq, + code_eq = f9 code_eq, transfer_rules = f10 transfer_rules } + +fun morph_lift_def phi = + let + val mtyp = Morphism.typ phi + val mterm = Morphism.term phi + val mthm = Morphism.thm phi + in + map_lift_def mtyp mtyp mterm mterm mthm mthm mthm (Option.map mthm) I (map mthm) + end + +fun mk_inst_of_lift_def qty lift_def = Vartab.empty |> Type.raw_match (qty_of_lift_def lift_def, qty) + +fun mk_lift_const_of_lift_def qty lift_def = Envir.subst_term_types (mk_inst_of_lift_def qty lift_def) + (lift_const_of_lift_def lift_def) + +fun inst_of_lift_def ctxt qty lift_def = mk_inst_of_lift_def qty lift_def + |> instT_morphism ctxt |> (fn phi => morph_lift_def phi lift_def) + +(* Config *) + +type config = { notes: bool }; +fun map_config f1 { notes = notes } = { notes = f1 notes } +val default_config = { notes = true }; + (* Reflexivity prover *) fun mono_eq_prover ctxt prop = @@ -289,7 +386,6 @@ SOME (simplify_code_eq ctxt (unabs_def RS @{thm meta_eq_to_obj_eq})) else let - val thy = Proof_Context.theory_of ctxt val quot_thm = Lifting_Term.prove_quot_thm ctxt (get_body_types (rty, qty)) val rel_fun = prove_rel ctxt rsp_thm (rty, qty) val rep_abs_thm = [quot_thm, rel_fun] MRSL @{thm Quotient_rep_abs_eq} @@ -358,23 +454,39 @@ in if is_valid_eq abs_eq_thm then - Code.add_default_eqn abs_eq_thm thy + (ABS_EQ, Code.add_default_eqn abs_eq_thm thy) else let val (rty_body, qty_body) = get_body_types (rty, qty) in if rty_body = qty_body then - Code.add_default_eqn (the opt_rep_eq_thm) thy + (REP_EQ, Code.add_default_eqn (the opt_rep_eq_thm) thy) else if is_some opt_rep_eq_thm andalso is_valid_abs_eq (the opt_rep_eq_thm) then - Code.add_abs_default_eqn (the opt_rep_eq_thm) thy + (REP_EQ, Code.add_abs_default_eqn (the opt_rep_eq_thm) thy) else - thy + (NONE_EQ, thy) end end local + fun no_no_code ctxt (rty, qty) = + if same_type_constrs (rty, qty) then + forall (no_no_code ctxt) (Targs rty ~~ Targs qty) + else + if is_Type qty then + if Lifting_Info.is_no_code_type ctxt (Tname qty) then false + else + let + val (rty', rtyq) = Lifting_Term.instantiate_rtys ctxt (rty, qty) + val (rty's, rtyqs) = (Targs rty', Targs rtyq) + in + forall (no_no_code ctxt) (rty's ~~ rtyqs) + end + else + true + fun encode_code_eq ctxt abs_eq opt_rep_eq (rty, qty) = let fun mk_type typ = typ |> Logic.mk_type |> Thm.cterm_of ctxt |> Drule.mk_term @@ -395,11 +507,20 @@ (abs_eq, opt_rep_eq, (dest_type rty, dest_type qty)) end + structure Data = Generic_Data + ( + type T = code_eq option + val empty = NONE + val extend = I + fun merge _ = NONE + ); + fun register_encoded_code_eq thm thy = let val (abs_eq_thm, opt_rep_eq_thm, (rty, qty)) = decode_code_eq thm + val (code_eq, thy) = register_code_eq_thy abs_eq_thm opt_rep_eq_thm (rty, qty) thy in - register_code_eq_thy abs_eq_thm opt_rep_eq_thm (rty, qty) thy + Context.theory_map (Data.put (SOME code_eq)) thy end handle DECODE => thy @@ -407,31 +528,28 @@ (fn thm => Context.mapping (register_encoded_code_eq thm) I) val register_code_eq_attrib = Attrib.internal (K register_code_eq_attribute) - fun no_no_code ctxt (rty, qty) = - if same_type_constrs (rty, qty) then - forall (no_no_code ctxt) (Targs rty ~~ Targs qty) - else - if is_Type qty then - if Lifting_Info.is_no_code_type ctxt (Tname qty) then false - else - let - val (rty', rtyq) = Lifting_Term.instantiate_rtys ctxt (rty, qty) - val (rty's, rtyqs) = (Targs rty', Targs rtyq) - in - forall (no_no_code ctxt) (rty's ~~ rtyqs) - end - else - true in fun register_code_eq abs_eq_thm opt_rep_eq_thm (rty, qty) lthy = let val encoded_code_eq = encode_code_eq lthy abs_eq_thm opt_rep_eq_thm (rty, qty) in - if no_no_code lthy (rty, qty) then - (snd oo Local_Theory.note) ((Binding.empty, [register_code_eq_attrib]), [encoded_code_eq]) lthy + if no_no_code lthy (rty, qty) then + let + val lthy = (snd oo Local_Theory.note) + ((Binding.empty, [register_code_eq_attrib]), [encoded_code_eq]) lthy + val opt_code_eq = Data.get (Context.Theory (Proof_Context.theory_of lthy)) + val code_eq = if is_some opt_code_eq then the opt_code_eq + else UNKNOWN_EQ (* UNKNOWN_EQ means that we are in a locale and we do not know + which code equation is going to be used. This is going to be resolved at the + point when an interpretation of the locale is executed. *) + val lthy = Local_Theory.declaration {syntax = false, pervasive = true} + (K (Data.put NONE)) lthy + in + (code_eq, lthy) + end else - lthy + (NONE_EQ, lthy) end end @@ -447,7 +565,7 @@ par_thms - a parametricity theorem for rhs *) -fun add_lift_def var qty rhs rsp_thm par_thms lthy = +fun add_lift_def (config: config) var qty rhs rsp_thm par_thms lthy = let val rty = fastype_of rhs val quot_thm = Lifting_Term.prove_quot_thm lthy (rty, qty) @@ -458,134 +576,44 @@ val prop = Logic.mk_equals (lhs, absrep_trm $ forced_rhs) val (_, prop') = Local_Defs.cert_def lthy prop val (_, newrhs) = Local_Defs.abs_def prop' - - val ((_, (_ , def_thm)), lthy') = - Local_Theory.define (var, ((Thm.def_binding (#1 var), []), newrhs)) lthy + val var = (#notes config = false ? apfst Binding.concealed) var + val def_name = if #notes config then Thm.def_binding (#1 var) else Binding.empty + + val ((lift_const, (_ , def_thm)), lthy) = + Local_Theory.define (var, ((def_name, []), newrhs)) lthy - val transfer_rules = generate_transfer_rules lthy' quot_thm rsp_thm def_thm par_thms + val transfer_rules = generate_transfer_rules lthy quot_thm rsp_thm def_thm par_thms - val abs_eq_thm = generate_abs_eq lthy' def_thm rsp_thm quot_thm - val opt_rep_eq_thm = generate_rep_eq lthy' def_thm rsp_thm (rty_forced, qty) + val abs_eq_thm = generate_abs_eq lthy def_thm rsp_thm quot_thm + val opt_rep_eq_thm = generate_rep_eq lthy def_thm rsp_thm (rty_forced, qty) fun qualify defname suffix = Binding.qualified true suffix defname - val lhs_name = (#1 var) - val rsp_thm_name = qualify lhs_name "rsp" - val abs_eq_thm_name = qualify lhs_name "abs_eq" - val rep_eq_thm_name = qualify lhs_name "rep_eq" - val transfer_rule_name = qualify lhs_name "transfer" - val transfer_attr = Attrib.internal (K Transfer.transfer_add) - in - lthy' - |> (snd oo Local_Theory.note) ((rsp_thm_name, []), [rsp_thm]) - |> (snd oo Local_Theory.note) ((transfer_rule_name, [transfer_attr]), transfer_rules) - |> (snd oo Local_Theory.note) ((abs_eq_thm_name, []), [abs_eq_thm]) - |> (case opt_rep_eq_thm of - SOME rep_eq_thm => (snd oo Local_Theory.note) ((rep_eq_thm_name, []), [rep_eq_thm]) - | NONE => I) - |> register_code_eq abs_eq_thm opt_rep_eq_thm (rty_forced, qty) - end - -local - val eq_onp_assms_tac_fixed_rules = map (Transfer.prep_transfer_domain_thm @{context}) - [@{thm pcr_Domainp_total}, @{thm pcr_Domainp_par_left_total}, @{thm pcr_Domainp_par}, - @{thm pcr_Domainp}] -in -fun mk_readable_rsp_thm_eq tm lthy = - let - val ctm = Thm.cterm_of lthy tm - - fun assms_rewr_conv tactic rule ct = + fun notes names = let - fun prove_extra_assms thm = - let - val assms = cprems_of thm - fun finish thm = if Thm.no_prems thm then SOME (Goal.conclude thm) else NONE - fun prove ctm = Option.mapPartial finish (SINGLE tactic (Goal.init ctm)) - in - map_interrupt prove assms - end - - fun cconl_of thm = Drule.strip_imp_concl (Thm.cprop_of thm) - fun lhs_of thm = fst (Thm.dest_equals (cconl_of thm)) - fun rhs_of thm = snd (Thm.dest_equals (cconl_of thm)) - val rule1 = Thm.incr_indexes (Thm.maxidx_of_cterm ct + 1) rule; - val lhs = lhs_of rule1; - val rule2 = Thm.rename_boundvars (Thm.term_of lhs) (Thm.term_of ct) rule1; - val rule3 = - Thm.instantiate (Thm.match (lhs, ct)) rule2 - handle Pattern.MATCH => raise CTERM ("assms_rewr_conv", [lhs, ct]); - val proved_assms = prove_extra_assms rule3 + val lhs_name = (#1 var) + val rsp_thmN = qualify lhs_name "rsp" + val abs_eq_thmN = qualify lhs_name "abs_eq" + val rep_eq_thmN = qualify lhs_name "rep_eq" + val transfer_ruleN = qualify lhs_name "transfer" + val notes = + [(rsp_thmN, [], [rsp_thm]), + (transfer_ruleN, @{attributes [transfer_rule]}, transfer_rules), + (abs_eq_thmN, [], [abs_eq_thm])] + @ (case opt_rep_eq_thm of SOME rep_eq_thm => [(rep_eq_thmN, [], [rep_eq_thm])] | NONE => []) in - case proved_assms of - SOME proved_assms => - let - val rule3 = proved_assms MRSL rule3 - val rule4 = - if lhs_of rule3 aconvc ct then rule3 - else - let val ceq = Thm.dest_fun2 (Thm.cprop_of rule3) - in rule3 COMP Thm.trivial (Thm.mk_binop ceq ct (rhs_of rule3)) end - in Thm.transitive rule4 (Thm.beta_conversion true (rhs_of rule4)) end - | NONE => Conv.no_conv ct + if names then map (fn (name, attrs, thms) => ((name, []), [(thms, attrs)])) notes + else map_filter (fn (_, attrs, thms) => if null attrs then NONE + else SOME ((Binding.empty, []), [(thms, attrs)])) notes end - - fun assms_rewrs_conv tactic rules = Conv.first_conv (map (assms_rewr_conv tactic) rules) - - fun simp_arrows_conv ctm = - let - val unfold_conv = Conv.rewrs_conv - [@{thm rel_fun_eq_eq_onp[THEN eq_reflection]}, - @{thm rel_fun_eq_onp_rel[THEN eq_reflection]}, - @{thm rel_fun_eq[THEN eq_reflection]}, - @{thm rel_fun_eq_rel[THEN eq_reflection]}, - @{thm rel_fun_def[THEN eq_reflection]}] - fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2 - val eq_onp_assms_tac_rules = @{thm left_unique_OO} :: - eq_onp_assms_tac_fixed_rules @ (Transfer.get_transfer_raw lthy) - val eq_onp_assms_tac = (TRY o REPEAT_ALL_NEW (resolve_tac lthy eq_onp_assms_tac_rules) - THEN_ALL_NEW (DETERM o Transfer.eq_tac lthy)) 1 - val relator_eq_onp_conv = Conv.bottom_conv - (K (Conv.try_conv (assms_rewrs_conv eq_onp_assms_tac - (Lifting_Info.get_relator_eq_onp_rules lthy)))) lthy - val relator_eq_conv = Conv.bottom_conv - (K (Conv.try_conv (Conv.rewrs_conv (Transfer.get_relator_eq lthy)))) lthy - in - case (Thm.term_of ctm) of - Const (@{const_name "rel_fun"}, _) $ _ $ _ => - (binop_conv2 simp_arrows_conv simp_arrows_conv then_conv unfold_conv) ctm - | _ => (relator_eq_onp_conv then_conv relator_eq_conv) ctm - end - - val unfold_ret_val_invs = Conv.bottom_conv - (K (Conv.try_conv (Conv.rewr_conv @{thm eq_onp_same_args[THEN eq_reflection]}))) lthy - val unfold_inv_conv = - Conv.top_sweep_conv (K (Conv.rewr_conv @{thm eq_onp_def[THEN eq_reflection]})) lthy - val simp_conv = HOLogic.Trueprop_conv (Conv.fun2_conv simp_arrows_conv) - val univq_conv = Conv.rewr_conv @{thm HOL.all_simps(6)[symmetric, THEN eq_reflection]} - val univq_prenex_conv = Conv.top_conv (K (Conv.try_conv univq_conv)) lthy - val beta_conv = Thm.beta_conversion true - val eq_thm = - (simp_conv then_conv univq_prenex_conv then_conv beta_conv then_conv unfold_ret_val_invs - then_conv unfold_inv_conv) ctm + val (code_eq, lthy) = register_code_eq abs_eq_thm opt_rep_eq_thm (rty_forced, qty) lthy + val lift_def = mk_lift_def rty_forced qty newrhs lift_const def_thm rsp_thm abs_eq_thm + opt_rep_eq_thm code_eq transfer_rules in - Object_Logic.rulify lthy (eq_thm RS Drule.equal_elim_rule2) - end -end - -fun rename_to_tnames ctxt term = - let - fun all_typs (Const (@{const_name Pure.all}, _) $ Abs (_, T, t)) = T :: all_typs t - | all_typs _ = [] - - fun rename (Const (@{const_name Pure.all}, T1) $ Abs (_, T2, t)) (new_name :: names) = - (Const (@{const_name Pure.all}, T1) $ Abs (new_name, T2, rename t names)) - | rename t _ = t - - val (fixed_def_t, _) = yield_singleton (Variable.importT_terms) term ctxt - val new_names = Old_Datatype_Prop.make_tnames (all_typs fixed_def_t) - in - rename term new_names + lthy + |> Local_Theory.notes (notes (#notes config)) |> snd + |> ` (fn lthy => morph_lift_def (Local_Theory.target_morphism lthy) lift_def) + ||> Local_Theory.restore end (* This is not very cheap way of getting the rules but we have only few active @@ -601,17 +629,8 @@ Symtab.fold (fn (_, data) => fn l => collect data l) table [] end -(* - - lifting_definition command. It opens a proof of a corresponding respectfulness - theorem in a user-friendly, readable form. Then add_lift_def is called internally. - -*) - -fun lift_def_cmd (raw_var, rhs_raw, par_xthms) lthy = +fun prepare_lift_def add_lift_def var qty rhs par_thms lthy = let - val ((binding, SOME qty, mx), lthy) = yield_singleton Proof_Context.read_vars raw_var lthy - val rhs = (Syntax.check_term lthy o Syntax.parse_term lthy) rhs_raw val rsp_rel = Lifting_Term.equiv_relation lthy (fastype_of rhs, qty) val rty_forced = (domain_type o fastype_of) rsp_rel; val forced_rhs = force_rty_type lthy rty_forced rhs; @@ -625,84 +644,31 @@ |>> snd val to_rsp = rsp_prsp_eq RS Drule.equal_elim_rule2 val opt_proven_rsp_thm = try_prove_reflexivity lthy prsp_tm - val par_thms = Attrib.eval_thms lthy par_xthms fun after_qed internal_rsp_thm lthy = - add_lift_def (binding, mx) qty rhs (internal_rsp_thm RS to_rsp) par_thms lthy + add_lift_def var qty rhs (internal_rsp_thm RS to_rsp) par_thms lthy in case opt_proven_rsp_thm of - SOME thm => Proof.theorem NONE (K (after_qed thm)) [] lthy - | NONE => - let - val readable_rsp_thm_eq = mk_readable_rsp_thm_eq prsp_tm lthy - val (readable_rsp_tm, _) = Logic.dest_implies (Thm.prop_of readable_rsp_thm_eq) - val readable_rsp_tm_tnames = rename_to_tnames lthy readable_rsp_tm - - fun after_qed' thm_list lthy = - let - val internal_rsp_thm = Goal.prove lthy [] [] prsp_tm - (fn {context = ctxt, ...} => - rtac readable_rsp_thm_eq 1 THEN Proof_Context.fact_tac ctxt (hd thm_list) 1) - in - after_qed internal_rsp_thm lthy - end - in - Proof.theorem NONE after_qed' [[(readable_rsp_tm_tnames,[])]] lthy - end - end - -fun quot_thm_err ctxt (rty, qty) pretty_msg = - let - val error_msg = cat_lines - ["Lifting failed for the following types:", - Pretty.string_of (Pretty.block - [Pretty.str "Raw type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty]), - Pretty.string_of (Pretty.block - [Pretty.str "Abstract type:", Pretty.brk 2, Syntax.pretty_typ ctxt qty]), - "", - (Pretty.string_of (Pretty.block - [Pretty.str "Reason:", Pretty.brk 2, pretty_msg]))] - in - error error_msg + SOME thm => (NONE, K (after_qed thm)) + | NONE => (SOME prsp_tm, after_qed) end -fun check_rty_err ctxt (rty_schematic, rty_forced) (raw_var, rhs_raw) = +fun gen_lift_def add_lift_def var qty rhs tac par_thms lthy = let - val (_, ctxt') = yield_singleton Proof_Context.read_vars raw_var ctxt - val rhs = (Syntax.check_term ctxt' o Syntax.parse_term ctxt') rhs_raw - val error_msg = cat_lines - ["Lifting failed for the following term:", - Pretty.string_of (Pretty.block - [Pretty.str "Term:", Pretty.brk 2, Syntax.pretty_term ctxt rhs]), - Pretty.string_of (Pretty.block - [Pretty.str "Type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty_schematic]), - "", - (Pretty.string_of (Pretty.block - [Pretty.str "Reason:", - Pretty.brk 2, - Pretty.str "The type of the term cannot be instantiated to", - Pretty.brk 1, - Pretty.quote (Syntax.pretty_typ ctxt rty_forced), - Pretty.str "."]))] - in - error error_msg - end + val (goal, after_qed) = prepare_lift_def add_lift_def var qty rhs par_thms lthy + in + case goal of + SOME goal => + let + val rsp_thm = Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, ...} => tac ctxt) + |> Thm.close_derivation + in + after_qed rsp_thm lthy + end + | NONE => after_qed Drule.dummy_thm lthy + end -fun lift_def_cmd_with_err_handling (raw_var, rhs_raw, par_xthms) lthy = - (lift_def_cmd (raw_var, rhs_raw, par_xthms) lthy - handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy (rty, qty) msg) - handle Lifting_Term.CHECK_RTY (rty_schematic, rty_forced) => - check_rty_err lthy (rty_schematic, rty_forced) (raw_var, rhs_raw) - -(* parser and command *) -val liftdef_parser = - (((Parse.binding -- (@{keyword "::"} |-- (Parse.typ >> SOME) -- Parse.opt_mixfix')) >> Parse.triple2) - --| @{keyword "is"} -- Parse.term -- - Scan.optional (@{keyword "parametric"} |-- Parse.!!! Parse.xthms1) []) >> Parse.triple1 -val _ = - Outer_Syntax.local_theory_to_proof @{command_keyword lift_definition} - "definition for constants over the quotient type" - (liftdef_parser >> lift_def_cmd_with_err_handling) - +fun lift_def config var qty rhs tac par_thms lthy = gen_lift_def (add_lift_def config) + var qty rhs tac par_thms lthy end (* structure *) diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_def_code_dt.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Tools/Lifting/lifting_def_code_dt.ML Mon May 25 22:11:43 2015 +0200 @@ -0,0 +1,816 @@ +(* Title: HOL/Tools/Lifting/lifting_def_code_dt.ML + Author: Ondrej Kuncar + +Workaround that allows us to execute lifted constants that have +as a return type a datatype containing a subtype; lift_definition command +*) + +signature LIFTING_DEF_CODE_DT = +sig + type rep_isom_data + val isom_of_rep_isom_data: rep_isom_data -> term + val transfer_of_rep_isom_data: rep_isom_data -> thm + val bundle_name_of_rep_isom_data: rep_isom_data -> string + val pointer_of_rep_isom_data: rep_isom_data -> string + + type code_dt + val rty_of_code_dt: code_dt -> typ + val qty_of_code_dt: code_dt -> typ + val wit_of_code_dt: code_dt -> term + val wit_thm_of_code_dt: code_dt -> thm + val rep_isom_data_of_code_dt: code_dt -> rep_isom_data option + val morph_code_dt: morphism -> code_dt -> code_dt + val mk_witness_of_code_dt: typ -> code_dt -> term + val mk_rep_isom_of_code_dt: typ -> code_dt -> term option + + val code_dt_of: Proof.context -> typ * typ -> code_dt option + val code_dt_of_global: theory -> typ * typ -> code_dt option + val all_code_dt_of: Proof.context -> code_dt list + val all_code_dt_of_global: theory -> code_dt list + + type config_code_dt = { code_dt: bool, lift_config: Lifting_Def.config } + val default_config_code_dt: config_code_dt + + val add_lift_def_code_dt: + config_code_dt -> binding * mixfix -> typ -> term -> thm -> thm list -> local_theory -> + Lifting_Def.lift_def * local_theory + + val lift_def_code_dt: + config_code_dt -> binding * mixfix -> typ -> term -> (Proof.context -> tactic) -> thm list -> + local_theory -> Lifting_Def.lift_def * local_theory + + val lift_def_cmd: + string list * (binding * string option * mixfix) * string * (Facts.ref * Token.src list) list -> + local_theory -> Proof.state +end + +structure Lifting_Def_Code_Dt: LIFTING_DEF_CODE_DT = +struct + +open Ctr_Sugar_Util BNF_Util BNF_FP_Util BNF_FP_Def_Sugar Lifting_Def Lifting_Util + +infix 0 MRSL + +(** data structures **) + +(* all type variables in qty are in rty *) +datatype rep_isom_data = REP_ISOM of { isom: term, transfer: thm, bundle_name: string, pointer: string } +fun isom_of_rep_isom_data (REP_ISOM rep_isom) = #isom rep_isom; +fun transfer_of_rep_isom_data (REP_ISOM rep_isom) = #transfer rep_isom; +fun bundle_name_of_rep_isom_data (REP_ISOM rep_isom) = #bundle_name rep_isom; +fun pointer_of_rep_isom_data (REP_ISOM rep_isom) = #pointer rep_isom; + +datatype code_dt = CODE_DT of { rty: typ, qty: typ, wit: term, wit_thm: thm, + rep_isom_data: rep_isom_data option }; +fun rty_of_code_dt (CODE_DT code_dt) = #rty code_dt; +fun qty_of_code_dt (CODE_DT code_dt) = #qty code_dt; +fun wit_of_code_dt (CODE_DT code_dt) = #wit code_dt; +fun wit_thm_of_code_dt (CODE_DT code_dt) = #wit_thm code_dt; +fun rep_isom_data_of_code_dt (CODE_DT code_dt) = #rep_isom_data code_dt; +fun ty_alpha_equiv (T, U) = Type.raw_instance (T, U) andalso Type.raw_instance (U, T); +fun code_dt_eq c = (ty_alpha_equiv o apply2 rty_of_code_dt) c + andalso (ty_alpha_equiv o apply2 qty_of_code_dt) c; +fun term_of_code_dt code_dt = code_dt |> `rty_of_code_dt ||> qty_of_code_dt |> HOLogic.mk_prodT + |> Net.encode_type |> single; + +(* modulo renaming, typ must contain TVars *) +fun is_code_dt_of_type (rty, qty) code_dt = code_dt |> `rty_of_code_dt ||> qty_of_code_dt + |> HOLogic.mk_prodT |> curry ty_alpha_equiv (HOLogic.mk_prodT (rty, qty)); + +fun mk_rep_isom_data isom transfer bundle_name pointer = + REP_ISOM { isom = isom, transfer = transfer, bundle_name = bundle_name, pointer = pointer} + +fun mk_code_dt rty qty wit wit_thm rep_isom_data = + CODE_DT { rty = rty, qty = qty, wit = wit, wit_thm = wit_thm, rep_isom_data = rep_isom_data }; + +fun map_rep_isom_data f1 f2 f3 f4 + (REP_ISOM { isom = isom, transfer = transfer, bundle_name = bundle_name, pointer = pointer }) = + REP_ISOM { isom = f1 isom, transfer = f2 transfer, bundle_name = f3 bundle_name, pointer = f4 pointer }; + +fun map_code_dt f1 f2 f3 f4 f5 f6 f7 f8 + (CODE_DT {rty = rty, qty = qty, wit = wit, wit_thm = wit_thm, rep_isom_data = rep_isom_data}) = + CODE_DT {rty = f1 rty, qty = f2 qty, wit = f3 wit, wit_thm = f4 wit_thm, + rep_isom_data = Option.map (map_rep_isom_data f5 f6 f7 f8) rep_isom_data}; + +fun update_rep_isom isom transfer binding pointer i = mk_code_dt (rty_of_code_dt i) (qty_of_code_dt i) + (wit_of_code_dt i) (wit_thm_of_code_dt i) (SOME (mk_rep_isom_data isom transfer binding pointer)) + +fun morph_code_dt phi = + let + val mty = Morphism.typ phi + val mterm = Morphism.term phi + val mthm = Morphism.thm phi + in + map_code_dt mty mty mterm mthm mterm mthm I I + end + +val transfer_code_dt = morph_code_dt o Morphism.transfer_morphism; + +structure Data = Generic_Data +( + type T = code_dt Item_Net.T + val empty = Item_Net.init code_dt_eq term_of_code_dt + val extend = I + val merge = Item_Net.merge +); + +fun code_dt_of_generic context (rty, qty) = + let + val typ = HOLogic.mk_prodT (rty, qty) + val prefiltred = Item_Net.retrieve_matching (Data.get context) (Net.encode_type typ) + in + prefiltred |> filter (is_code_dt_of_type (rty, qty)) + |> map (transfer_code_dt (Context.theory_of context)) |> find_first (fn _ => true) + end; + +fun code_dt_of ctxt (rty, qty) = + let + val sch_rty = Logic.type_map (singleton (Variable.polymorphic ctxt)) rty + val sch_qty = Logic.type_map (singleton (Variable.polymorphic ctxt)) qty + in + code_dt_of_generic (Context.Proof ctxt) (sch_rty, sch_qty) + end; + +fun code_dt_of_global thy (rty, qty) = + let + val sch_rty = Logic.varifyT_global rty + val sch_qty = Logic.varifyT_global qty + in + code_dt_of_generic (Context.Theory thy) (sch_rty, sch_qty) + end; + +fun all_code_dt_of_generic context = + Item_Net.content (Data.get context) |> map (transfer_code_dt (Context.theory_of context)); + +val all_code_dt_of = all_code_dt_of_generic o Context.Proof; +val all_code_dt_of_global = all_code_dt_of_generic o Context.Theory; + +fun update_code_dt code_dt = + Local_Theory.declaration {syntax = false, pervasive = true} + (fn phi => Data.map (Item_Net.update (morph_code_dt phi code_dt))); + +fun mk_match_of_code_dt qty code_dt = Vartab.empty |> Type.raw_match (qty_of_code_dt code_dt, qty) + |> Vartab.dest |> map (fn (x, (S, T)) => (TVar (x, S), T)); + +fun mk_witness_of_code_dt qty code_dt = + Term.subst_atomic_types (mk_match_of_code_dt qty code_dt) (wit_of_code_dt code_dt) + +fun mk_rep_isom_of_code_dt qty code_dt = Option.map + (isom_of_rep_isom_data #> Term.subst_atomic_types (mk_match_of_code_dt qty code_dt)) + (rep_isom_data_of_code_dt code_dt) + + +(** unique name for a type **) + +fun var_name name sort = if sort = @{sort "{type}"} orelse sort = [] then ["x" ^ name] + else "x" ^ name :: "x_" :: sort @ ["x_"]; + +fun concat_Tnames (Type (name, ts)) = name :: maps concat_Tnames ts + | concat_Tnames (TFree (name, sort)) = var_name name sort + | concat_Tnames (TVar ((name, _), sort)) = var_name name sort; + +fun unique_Tname (rty, qty) = + let + val Tnames = map Long_Name.base_name (concat_Tnames rty @ ["x_x"] @ concat_Tnames qty); + in + fold (Binding.qualify false) (tl Tnames) (Binding.name (hd Tnames)) + end; + +(** witnesses **) + +fun mk_undefined T = Const (@{const_name undefined}, T); + +fun mk_witness quot_thm = + let + val wit_thm = quot_thm RS @{thm type_definition_Quotient_not_empty_witness} + val wit = quot_thm_rep quot_thm $ mk_undefined (quot_thm_rty_qty quot_thm |> snd) + in + (wit, wit_thm) + end + +(** config **) + +type config_code_dt = { code_dt: bool, lift_config: config } +val default_config_code_dt = { code_dt = false, lift_config = default_config } + + +(** Main code **) + +val ld_no_notes = { notes = false } + +fun comp_lift_error _ _ = error "Composition of abstract types has not been implemented yet." + +fun lift qty (quot_thm, (lthy, rel_eq_onps)) = + let + val quot_thm = Lifting_Term.force_qty_type lthy qty quot_thm + val (rty, qty) = quot_thm_rty_qty quot_thm; + in + if is_none (code_dt_of lthy (rty, qty)) then + let + val (wit, wit_thm) = (mk_witness quot_thm + handle THM _ => error ("code_dt: " ^ quote (Tname qty) ^ " was not defined as a subtype.")) + val code_dt = mk_code_dt rty qty wit wit_thm NONE + in + (quot_thm, (update_code_dt code_dt lthy |> Local_Theory.restore, rel_eq_onps)) + end + else + (quot_thm, (lthy, rel_eq_onps)) + end; + +fun case_tac rule ctxt i st = + (Subgoal.FOCUS_PARAMS (fn {params, ...} => HEADGOAL(rtac + (Ctr_Sugar_Util.cterm_instantiate_pos [SOME (params |> hd |> snd)] rule))) ctxt i st); + +fun bundle_name_of_bundle_binding binding phi context = + Name_Space.full_name (Name_Space.naming_of context) (Morphism.binding phi binding); + +fun prove_schematic_quot_thm actions ctxt = Lifting_Term.prove_schematic_quot_thm actions + (Lifting_Info.get_quotients ctxt) ctxt + +fun prove_code_dt (rty, qty) lthy = + let + val (fold_quot_thm: (local_theory * thm list) Lifting_Term.fold_quot_thm) = + { constr = constr, lift = lift, comp_lift = comp_lift_error }; + in prove_schematic_quot_thm fold_quot_thm lthy (rty, qty) (lthy, []) |> snd end +and add_lift_def_code_dt config var qty rhs rsp_thm par_thms lthy = + let + fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2 + fun ret_rel_conv conv ctm = + case (Thm.term_of ctm) of + Const (@{const_name "rel_fun"}, _) $ _ $ _ => + binop_conv2 Conv.all_conv conv ctm + | _ => conv ctm + fun R_conv rel_eq_onps = Transfer.top_sweep_rewr_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]} + then_conv Transfer.bottom_rewr_conv rel_eq_onps + + val (ret_lift_def, lthy) = add_lift_def (#lift_config config) var qty rhs rsp_thm par_thms lthy + in + if (not (#code_dt config) orelse (code_eq_of_lift_def ret_lift_def <> NONE_EQ) + andalso (code_eq_of_lift_def ret_lift_def <> UNKNOWN_EQ)) + (* Let us try even in case of UNKNOWN_EQ. If this leads to problems, the user can always + say that they do not want this workaround. *) + then (ret_lift_def, lthy) + else + let + val lift_def = inst_of_lift_def lthy qty ret_lift_def + val rty = rty_of_lift_def lift_def + val rty_ret = body_type rty + val qty_ret = body_type qty + + val (lthy, rel_eq_onps) = prove_code_dt (rty_ret, qty_ret) lthy + val code_dt = code_dt_of lthy (rty_ret, qty_ret) + in + if is_none code_dt orelse is_none (rep_isom_data_of_code_dt (the code_dt)) then (ret_lift_def, lthy) + else + let + val code_dt = the code_dt + val rhs = dest_comb (rhs_of_lift_def lift_def) |> snd + val rep_isom_data = code_dt |> rep_isom_data_of_code_dt |> the + val pointer = pointer_of_rep_isom_data rep_isom_data + val quot_active = + Lifting_Info.lookup_restore_data lthy pointer |> the |> #quotient |> #quot_thm + |> Lifting_Info.lookup_quot_thm_quotients lthy |> is_some + val qty_code_dt_bundle_name = bundle_name_of_rep_isom_data rep_isom_data + val rep_isom = mk_rep_isom_of_code_dt qty_ret code_dt |> the + val lthy = if quot_active then lthy else Bundle.includes [qty_code_dt_bundle_name] lthy + fun qty_isom_of_rep_isom rep = rep |> dest_Const |> snd |> domain_type + val qty_isom = qty_isom_of_rep_isom rep_isom + val f'_var = (Binding.suffix_name "_aux" (fst var), NoSyn); + val f'_qty = strip_type qty |> fst |> rpair qty_isom |> op ---> + val f'_rsp_rel = Lifting_Term.equiv_relation lthy (rty, f'_qty); + val rsp = rsp_thm_of_lift_def lift_def + val rel_eq_onps_conv = HOLogic.Trueprop_conv (Conv.fun2_conv (ret_rel_conv (R_conv rel_eq_onps))) + val rsp_norm = Conv.fconv_rule rel_eq_onps_conv rsp + val f'_rsp_goal = HOLogic.mk_Trueprop (f'_rsp_rel $ rhs $ rhs); + val f'_rsp = Goal.prove_sorry lthy [] [] f'_rsp_goal + (K (HEADGOAL (CONVERSION (rel_eq_onps_conv) THEN' rtac rsp_norm))) + |> Thm.close_derivation + val (f'_lift_def, lthy) = add_lift_def ld_no_notes f'_var f'_qty rhs f'_rsp [] lthy + val f'_lift_def = inst_of_lift_def lthy f'_qty f'_lift_def + val f'_lift_const = mk_lift_const_of_lift_def f'_qty f'_lift_def + val args_lthy = lthy + val (args, lthy) = mk_Frees "x" (binder_types qty) lthy + val f_alt_def_goal_lhs = list_comb (lift_const_of_lift_def lift_def, args); + val f_alt_def_goal_rhs = rep_isom $ list_comb (f'_lift_const, args); + val f_alt_def_goal = HOLogic.mk_Trueprop (HOLogic.mk_eq (f_alt_def_goal_lhs, f_alt_def_goal_rhs)); + fun f_alt_def_tac ctxt i = + EVERY' [Transfer.gen_frees_tac [] ctxt, DETERM o Transfer.transfer_tac true ctxt, + SELECT_GOAL (Local_Defs.unfold_tac ctxt [id_apply]), rtac refl] i; + val rep_isom_transfer = transfer_of_rep_isom_data rep_isom_data + val (_, transfer_lthy) = Proof_Context.note_thmss "" [((Binding.empty, []), + [([rep_isom_transfer], [Transfer.transfer_add])])] lthy + val f_alt_def = Goal.prove_sorry transfer_lthy [] [] f_alt_def_goal + (fn {context = ctxt, prems = _} => HEADGOAL (f_alt_def_tac ctxt)) + |> Thm.close_derivation + |> singleton (Variable.export lthy args_lthy) + val lthy = args_lthy + val lthy = lthy + |> Local_Theory.note ((Binding.empty, @{attributes [code]}), [f_alt_def]) + |> snd + (* if processing a mutual datatype (there is a cycle!) the corresponding quotient + will be needed later and will be forgotten later *) + |> (if quot_active then I else Lifting_Setup.lifting_forget pointer) + in + (ret_lift_def, lthy) + end + end + end +and mk_rep_isom qty_isom_bundle (rty, qty, qty_isom) lthy = + let + (* logical definition of qty qty_isom isomorphism *) + val uTname = unique_Tname (rty, qty) + fun eq_onp_to_top_tac ctxt = SELECT_GOAL (Local_Defs.unfold_tac ctxt + (@{thm eq_onp_top_eq_eq[symmetric]} :: Lifting_Info.get_relator_eq_onp_rules ctxt)) + fun lift_isom_tac ctxt = HEADGOAL (eq_onp_to_top_tac ctxt + THEN' (rtac @{thm id_transfer})); + + val (rep_isom_lift_def, lthy) = lift_def ld_no_notes (Binding.qualified true "Rep_isom" uTname, NoSyn) + (qty_isom --> qty) (HOLogic.id_const rty) lift_isom_tac [] lthy + |> apfst (inst_of_lift_def lthy (qty_isom --> qty)); + val (abs_isom, lthy) = lift_def ld_no_notes (Binding.qualified true "Abs_isom" uTname, NoSyn) + (qty --> qty_isom) (HOLogic.id_const rty) lift_isom_tac [] lthy + |> apfst (mk_lift_const_of_lift_def (qty --> qty_isom)); + val rep_isom = lift_const_of_lift_def rep_isom_lift_def + + val pointer = Lifting_Setup.pointer_of_bundle_binding lthy qty_isom_bundle + fun code_dt phi context = code_dt_of lthy (rty, qty) |> the |> + update_rep_isom rep_isom (transfer_rules_of_lift_def rep_isom_lift_def |> hd) + (bundle_name_of_bundle_binding qty_isom_bundle phi context) pointer; + val lthy = lthy + |> Local_Theory.declaration {syntax = false, pervasive = true} + (fn phi => fn context => Data.map (Item_Net.update (morph_code_dt phi (code_dt phi context))) context) + |> Local_Theory.restore + + (* in order to make the qty qty_isom isomorphism executable we have to define discriminators + and selectors for qty_isom *) + val (rty_name, typs) = dest_Type rty + val (_, qty_typs) = dest_Type qty + val fp = BNF_FP_Def_Sugar.fp_sugar_of lthy rty_name + val fp = if is_some fp then the fp + else error ("code_dt: " ^ quote rty_name ^ " is not a datatype.") + val ctr_sugar = fp |> #fp_ctr_sugar |> #ctr_sugar + val ctrs = map (Ctr_Sugar.mk_ctr typs) (#ctrs ctr_sugar); + val qty_ctrs = map (Ctr_Sugar.mk_ctr qty_typs) (#ctrs ctr_sugar); + val ctr_Tss = map (dest_Const #> snd #> binder_types) ctrs; + val qty_ctr_Tss = map (dest_Const #> snd #> binder_types) qty_ctrs; + + val n = length ctrs; + val ks = 1 upto n; + val (xss, _) = mk_Freess "x" ctr_Tss lthy; + + fun sel_retT (rty' as Type (s, rtys'), qty' as Type (s', qtys')) = + if (rty', qty') = (rty, qty) then qty_isom else (if s = s' + then Type (s, map sel_retT (rtys' ~~ qtys')) else qty') + | sel_retT (_, qty') = qty'; + + val sel_retTs = map2 (map2 (sel_retT oo pair)) ctr_Tss qty_ctr_Tss + + fun lazy_prove_code_dt (rty, qty) lthy = + if is_none (code_dt_of lthy (rty, qty)) then prove_code_dt (rty, qty) lthy |> fst else lthy; + + val lthy = fold2 (fold2 (lazy_prove_code_dt oo pair)) ctr_Tss sel_retTs lthy + + val sel_argss = @{map 4} (fn k => fn xs => @{map 2} (fn x => fn qty_ret => + (k, qty_ret, (xs, x)))) ks xss xss sel_retTs; + + fun mk_sel_casex (_, _, (_, x)) = Ctr_Sugar.mk_case typs (x |> dest_Free |> snd) (#casex ctr_sugar); + val dis_casex = Ctr_Sugar.mk_case typs HOLogic.boolT (#casex ctr_sugar); + fun mk_sel_case_args lthy ctr_Tss ks (k, qty_ret, (xs, x)) = + let + val T = x |> dest_Free |> snd; + fun gen_undef_wit Ts wits = + case code_dt_of lthy (T, qty_ret) of + SOME code_dt => + (fold_rev (Term.lambda o curry Free Name.uu) Ts (mk_witness_of_code_dt qty_ret code_dt), + wit_thm_of_code_dt code_dt :: wits) + | NONE => (fold_rev (Term.lambda o curry Free Name.uu) Ts (mk_undefined T), wits) + in + @{fold_map 2} (fn Ts => fn k' => fn wits => + (if k = k' then (fold_rev Term.lambda xs x, wits) else gen_undef_wit Ts wits)) ctr_Tss ks [] + end; + fun mk_sel_rhs arg = + let val (sel_rhs, wits) = mk_sel_case_args lthy ctr_Tss ks arg + in (arg |> #2, wits, list_comb (mk_sel_casex arg, sel_rhs)) end; + fun mk_dis_case_args args k = map (fn (k', arg) => (if k = k' + then fold_rev Term.lambda arg @{const True} else fold_rev Term.lambda arg @{const False})) args; + val sel_rhs = map (map mk_sel_rhs) sel_argss + val dis_rhs = map (fn k => list_comb (dis_casex, mk_dis_case_args (ks ~~ xss) k)) ks + val dis_qty = qty_isom --> HOLogic.boolT; + val dis_names = map (fn k => Binding.qualified true ("dis" ^ string_of_int k) uTname) ks; + + val (diss, lthy) = @{fold_map 2} (fn b => fn rhs => fn lthy => + lift_def ld_no_notes (b, NoSyn) dis_qty rhs (K all_tac) [] lthy + |> apfst (mk_lift_const_of_lift_def dis_qty)) dis_names dis_rhs lthy + + val unfold_lift_sel_rsp = @{lemma "(\x. P1 x \ P2 (f x)) \ (rel_fun (eq_onp P1) (eq_onp P2)) f f" + by (simp add: eq_onp_same_args rel_fun_eq_onp_rel)} + + fun lift_sel_tac exhaust_rule dt_rules wits ctxt i = + (Method.insert_tac wits THEN' + eq_onp_to_top_tac ctxt THEN' (* normalize *) + rtac unfold_lift_sel_rsp THEN' + case_tac exhaust_rule ctxt THEN_ALL_NEW ( + EVERY' [hyp_subst_tac ctxt, (* does not kill wits because = was rewritten to eq_onp top *) + Raw_Simplifier.rewrite_goal_tac ctxt (map safe_mk_meta_eq dt_rules), + REPEAT_DETERM o etac conjE, atac])) i + val pred_simps = Transfer.lookup_pred_data lthy (Tname rty) |> the |> Transfer.pred_simps + val sel_tac = lift_sel_tac (#exhaust ctr_sugar) (#case_thms ctr_sugar @ pred_simps) + val sel_names = map (fn (k, xs) => map (fn k' => Binding.qualified true + ("sel" ^ string_of_int k ^ string_of_int k') uTname) (1 upto length xs)) (ks ~~ ctr_Tss); + val (selss, lthy) = @{fold_map 2} (@{fold_map 2} (fn b => fn (qty_ret, wits, rhs) => fn lthy => + lift_def_code_dt { code_dt = true, lift_config = ld_no_notes } + (b, NoSyn) (qty_isom --> qty_ret) rhs (HEADGOAL o sel_tac wits) [] lthy + |> apfst (mk_lift_const_of_lift_def (qty_isom --> qty_ret)))) sel_names sel_rhs lthy + + (* now we can execute the qty qty_isom isomorphism *) + fun mk_type_definition newT oldT RepC AbsC A = + let + val typedefC = + Const (@{const_name type_definition}, + (newT --> oldT) --> (oldT --> newT) --> HOLogic.mk_setT oldT --> HOLogic.boolT); + in typedefC $ RepC $ AbsC $ A end; + val typedef_goal = mk_type_definition qty_isom qty rep_isom abs_isom (HOLogic.mk_UNIV qty) |> + HOLogic.mk_Trueprop; + fun typ_isom_tac ctxt i = + EVERY' [ SELECT_GOAL (Local_Defs.unfold_tac ctxt @{thms type_definition_def}), + DETERM o Transfer.transfer_tac true ctxt, + SELECT_GOAL (Local_Defs.unfold_tac ctxt @{thms eq_onp_top_eq_eq}) (* normalize *), + Raw_Simplifier.rewrite_goal_tac ctxt + (map safe_mk_meta_eq @{thms id_apply simp_thms Ball_def}), + rtac TrueI] i; + + val (_, transfer_lthy) = Proof_Context.note_thmss "" [((Binding.empty, []), + [(@{thms right_total_UNIV_transfer},[Transfer.transfer_add]), + (@{thms Domain_eq_top}, [Transfer.transfer_domain_add]) ])] lthy; + + val quot_thm_isom = Goal.prove_sorry transfer_lthy [] [] typedef_goal + (fn {context = ctxt, prems = _} => typ_isom_tac ctxt 1) + |> Thm.close_derivation + |> singleton (Variable.export transfer_lthy lthy) + |> (fn thm => @{thm UNIV_typedef_to_Quotient} OF [thm, @{thm reflexive}]) + val qty_isom_name = Tname qty_isom; + val quot_isom_rep = + let + val (quotients : Lifting_Term.quotients) = Symtab.insert (Lifting_Info.quotient_eq) (qty_isom_name, + {quot_thm = quot_thm_isom, pcr_info = NONE}) Symtab.empty + val id_actions = { constr = K I, lift = K I, comp_lift = K I } + in + fn ctxt => fn (rty, qty) => Lifting_Term.prove_schematic_quot_thm id_actions quotients + ctxt (rty, qty) () |> fst |> Lifting_Term.force_qty_type ctxt qty + |> quot_thm_rep + end; + val x_lthy = lthy + val (x, lthy) = yield_singleton (mk_Frees "x") qty_isom lthy; + + fun mk_ctr ctr ctr_Ts sels = + let + val sel_ret_Ts = map (dest_Const #> snd #> body_type) sels; + + fun rep_isom lthy t (rty, qty) = + let + val rep = quot_isom_rep lthy (rty, qty) + in + if is_Const rep andalso (rep |> dest_Const |> fst) = @{const_name id} then + t else rep $ t + end; + in + @{fold 3} (fn sel => fn ctr_T => fn sel_ret_T => fn ctr => + ctr $ rep_isom lthy (sel $ x) (ctr_T, sel_ret_T)) sels ctr_Ts sel_ret_Ts ctr + end; + + (* stolen from Metis *) + exception BREAK_LIST + fun break_list (x :: xs) = (x, xs) + | break_list _ = raise BREAK_LIST + + val (ctr, ctrs) = qty_ctrs |> rev |> break_list; + val (ctr_Ts, ctr_Tss) = qty_ctr_Tss |> rev |> break_list; + val (sel, rselss) = selss |> rev |> break_list; + val rdiss = rev diss |> tl; + + val first_ctr = mk_ctr ctr ctr_Ts sel; + + fun mk_If_ctr dis ctr ctr_Ts sel elsex = mk_If (dis$x) (mk_ctr ctr ctr_Ts sel) elsex; + + val rhs = @{fold 4} mk_If_ctr rdiss ctrs ctr_Tss rselss first_ctr; + + val rep_isom_code_goal = HOLogic.mk_Trueprop (HOLogic.mk_eq (rep_isom$x, rhs)); + + local + val rep_isom_code_tac_rules = map safe_mk_meta_eq @{thms refl id_apply if_splits simp_thms} + in + fun rep_isom_code_tac (ctr_sugar:Ctr_Sugar.ctr_sugar) ctxt i = + let + val exhaust = ctr_sugar |> #exhaust + val cases = ctr_sugar |> #case_thms + val map_ids = fp |> #fp_nesting_bnfs |> map BNF_Def.map_id0_of_bnf + val simp_rules = map safe_mk_meta_eq (cases @ map_ids) @ rep_isom_code_tac_rules + in + EVERY' [Transfer.gen_frees_tac [] ctxt, DETERM o (Transfer.transfer_tac true ctxt), + case_tac exhaust ctxt THEN_ALL_NEW EVERY' [hyp_subst_tac ctxt, + Raw_Simplifier.rewrite_goal_tac ctxt simp_rules, rtac TrueI ]] i + end + end + + (* stolen from bnf_fp_n2m.ML *) + fun force_typ ctxt T = + Term.map_types Type_Infer.paramify_vars + #> Type.constraint T + #> singleton (Type_Infer_Context.infer_types ctxt); + + (* The following tests that types in rty have corresponding arities imposed by constraints of + the datatype fp. Otherwise rep_isom_code_tac could fail (especially transfer in it) is such + a way that it is not easy to infer the problem with sorts. + *) + val _ = yield_singleton (mk_Frees "x") (#T fp) lthy |> fst |> force_typ lthy qty + + val rep_isom_code = Goal.prove_sorry lthy [] [] rep_isom_code_goal + (fn {context = ctxt, prems = _} => rep_isom_code_tac ctr_sugar ctxt 1) + |> Thm.close_derivation + |> singleton(Variable.export lthy x_lthy) + val lthy = x_lthy + val lthy = + lthy + |> snd o Local_Theory.note ((Binding.empty, @{attributes [code]}), [rep_isom_code]) + |> Lifting_Setup.lifting_forget pointer + in + ((selss, diss, rep_isom_code), lthy) + end +and constr qty (quot_thm, (lthy, rel_eq_onps)) = + let + val quot_thm = Lifting_Term.force_qty_type lthy qty quot_thm + val (rty, qty) = quot_thm_rty_qty quot_thm + val rty_name = Tname rty; + val pred_data = Transfer.lookup_pred_data lthy rty_name + val pred_data = if is_some pred_data then the pred_data + else error ("code_dt: " ^ quote rty_name ^ " is not a datatype.") + val rel_eq_onp = safe_mk_meta_eq (Transfer.rel_eq_onp pred_data); + val rel_eq_onps = insert Thm.eq_thm rel_eq_onp rel_eq_onps + val R_conv = Transfer.top_sweep_rewr_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]} + then_conv Conv.rewr_conv rel_eq_onp + val quot_thm = Conv.fconv_rule(HOLogic.Trueprop_conv (Quotient_R_conv R_conv)) quot_thm; + in + if is_none (code_dt_of lthy (rty, qty)) then + let + val non_empty_pred = quot_thm RS @{thm type_definition_Quotient_not_empty} + val pred = quot_thm_rel quot_thm |> dest_comb |> snd; + val (pred, lthy) = yield_singleton (Variable.import_terms true) pred lthy; + val TFrees = Term.add_tfreesT qty [] + + fun non_empty_typedef_tac non_empty_pred ctxt i = + (Method.insert_tac [non_empty_pred] THEN' + SELECT_GOAL (Local_Defs.unfold_tac ctxt [mem_Collect_eq]) THEN' atac) i + val uTname = unique_Tname (rty, qty) + val Tdef_set = HOLogic.mk_Collect ("x", rty, pred $ Free("x", rty)); + val ((_, tcode_dt), lthy) = conceal_naming_result (typedef (Binding.concealed uTname, TFrees, NoSyn) + Tdef_set NONE (fn lthy => HEADGOAL (non_empty_typedef_tac non_empty_pred lthy))) lthy; + val type_definition_thm = tcode_dt |> snd |> #type_definition; + val qty_isom = tcode_dt |> fst |> #abs_type; + + val config = { notes = false} + val (binding, lthy) = conceal_naming_result (Lifting_Setup.setup_by_typedef_thm + config type_definition_thm) lthy + val lthy = Local_Theory.restore lthy + val (wit, wit_thm) = mk_witness quot_thm; + val code_dt = mk_code_dt rty qty wit wit_thm NONE; + val lthy = lthy + |> update_code_dt code_dt + |> Local_Theory.restore + |> mk_rep_isom binding (rty, qty, qty_isom) |> snd + in + (quot_thm, (lthy, rel_eq_onps)) + end + else + (quot_thm, (lthy, rel_eq_onps)) + end +and lift_def_code_dt config var qty rhs tac par_thms lthy = gen_lift_def (add_lift_def_code_dt config) + var qty rhs tac par_thms lthy + + +(** from parsed parameters to the config record **) + +fun map_config_code_dt f1 f2 ({code_dt = code_dt, lift_config = lift_config}: config_code_dt) = + {code_dt = f1 code_dt, lift_config = f2 lift_config} + +fun update_config_code_dt nval = map_config_code_dt (K nval) I + +val config_flags = [("code_dt", update_config_code_dt true)] + +fun evaluate_params params = + let + fun eval_param param config = + case AList.lookup (op =) config_flags param of + SOME update => update config + | NONE => error ("Unknown parameter: " ^ (quote param)) + in + fold eval_param params default_config_code_dt + end + +(** + + lift_definition command. It opens a proof of a corresponding respectfulness + theorem in a user-friendly, readable form. Then add_lift_def_code_dt is called internally. + +**) + +local + val eq_onp_assms_tac_fixed_rules = map (Transfer.prep_transfer_domain_thm @{context}) + [@{thm pcr_Domainp_total}, @{thm pcr_Domainp_par_left_total}, @{thm pcr_Domainp_par}, + @{thm pcr_Domainp}] +in +fun mk_readable_rsp_thm_eq tm lthy = + let + val ctm = Thm.cterm_of lthy tm + + fun assms_rewr_conv tactic rule ct = + let + fun prove_extra_assms thm = + let + val assms = cprems_of thm + fun finish thm = if Thm.no_prems thm then SOME (Goal.conclude thm) else NONE + fun prove ctm = Option.mapPartial finish (SINGLE tactic (Goal.init ctm)) + in + map_interrupt prove assms + end + + fun cconl_of thm = Drule.strip_imp_concl (Thm.cprop_of thm) + fun lhs_of thm = fst (Thm.dest_equals (cconl_of thm)) + fun rhs_of thm = snd (Thm.dest_equals (cconl_of thm)) + val rule1 = Thm.incr_indexes (Thm.maxidx_of_cterm ct + 1) rule; + val lhs = lhs_of rule1; + val rule2 = Thm.rename_boundvars (Thm.term_of lhs) (Thm.term_of ct) rule1; + val rule3 = + Thm.instantiate (Thm.match (lhs, ct)) rule2 + handle Pattern.MATCH => raise CTERM ("assms_rewr_conv", [lhs, ct]); + val proved_assms = prove_extra_assms rule3 + in + case proved_assms of + SOME proved_assms => + let + val rule3 = proved_assms MRSL rule3 + val rule4 = + if lhs_of rule3 aconvc ct then rule3 + else + let val ceq = Thm.dest_fun2 (Thm.cprop_of rule3) + in rule3 COMP Thm.trivial (Thm.mk_binop ceq ct (rhs_of rule3)) end + in Thm.transitive rule4 (Thm.beta_conversion true (rhs_of rule4)) end + | NONE => Conv.no_conv ct + end + + fun assms_rewrs_conv tactic rules = Conv.first_conv (map (assms_rewr_conv tactic) rules) + + fun simp_arrows_conv ctm = + let + val unfold_conv = Conv.rewrs_conv + [@{thm rel_fun_eq_eq_onp[THEN eq_reflection]}, + @{thm rel_fun_eq_onp_rel[THEN eq_reflection]}, + @{thm rel_fun_eq[THEN eq_reflection]}, + @{thm rel_fun_eq_rel[THEN eq_reflection]}, + @{thm rel_fun_def[THEN eq_reflection]}] + fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2 + val eq_onp_assms_tac_rules = @{thm left_unique_OO} :: + eq_onp_assms_tac_fixed_rules @ (Transfer.get_transfer_raw lthy) + val intro_top_rule = @{thm eq_onp_top_eq_eq[symmetric, THEN eq_reflection]} + val kill_tops = Transfer.top_sweep_rewr_conv [@{thm eq_onp_top_eq_eq[THEN eq_reflection]}] + val eq_onp_assms_tac = (CONVERSION kill_tops THEN' + TRY o REPEAT_ALL_NEW (resolve_tac lthy eq_onp_assms_tac_rules) + THEN_ALL_NEW (DETERM o Transfer.eq_tac lthy)) 1 + val relator_eq_onp_conv = Conv.bottom_conv + (K (Conv.try_conv (assms_rewrs_conv eq_onp_assms_tac + (intro_top_rule :: Lifting_Info.get_relator_eq_onp_rules lthy)))) lthy + then_conv kill_tops + val relator_eq_conv = Conv.bottom_conv + (K (Conv.try_conv (Conv.rewrs_conv (Transfer.get_relator_eq lthy)))) lthy + in + case (Thm.term_of ctm) of + Const (@{const_name "rel_fun"}, _) $ _ $ _ => + (binop_conv2 simp_arrows_conv simp_arrows_conv then_conv unfold_conv) ctm + | _ => (relator_eq_onp_conv then_conv relator_eq_conv) ctm + end + + val unfold_ret_val_invs = Conv.bottom_conv + (K (Conv.try_conv (Conv.rewr_conv @{thm eq_onp_same_args[THEN eq_reflection]}))) lthy + val unfold_inv_conv = + Conv.top_sweep_conv (K (Conv.rewr_conv @{thm eq_onp_def[THEN eq_reflection]})) lthy + val simp_conv = HOLogic.Trueprop_conv (Conv.fun2_conv simp_arrows_conv) + val univq_conv = Conv.rewr_conv @{thm HOL.all_simps(6)[symmetric, THEN eq_reflection]} + val univq_prenex_conv = Conv.top_conv (K (Conv.try_conv univq_conv)) lthy + val beta_conv = Thm.beta_conversion true + val eq_thm = + (simp_conv then_conv univq_prenex_conv then_conv beta_conv then_conv unfold_ret_val_invs + then_conv unfold_inv_conv) ctm + in + Object_Logic.rulify lthy (eq_thm RS Drule.equal_elim_rule2) + end +end + +fun rename_to_tnames ctxt term = + let + fun all_typs (Const (@{const_name Pure.all}, _) $ Abs (_, T, t)) = T :: all_typs t + | all_typs _ = [] + + fun rename (Const (@{const_name Pure.all}, T1) $ Abs (_, T2, t)) (new_name :: names) = + (Const (@{const_name Pure.all}, T1) $ Abs (new_name, T2, rename t names)) + | rename t _ = t + + val (fixed_def_t, _) = yield_singleton (Variable.importT_terms) term ctxt + val new_names = Old_Datatype_Prop.make_tnames (all_typs fixed_def_t) + in + rename term new_names + end + +fun quot_thm_err ctxt (rty, qty) pretty_msg = + let + val error_msg = cat_lines + ["Lifting failed for the following types:", + Pretty.string_of (Pretty.block + [Pretty.str "Raw type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty]), + Pretty.string_of (Pretty.block + [Pretty.str "Abstract type:", Pretty.brk 2, Syntax.pretty_typ ctxt qty]), + "", + (Pretty.string_of (Pretty.block + [Pretty.str "Reason:", Pretty.brk 2, pretty_msg]))] + in + error error_msg + end + +fun check_rty_err ctxt (rty_schematic, rty_forced) (raw_var, rhs_raw) = + let + val (_, ctxt') = yield_singleton Proof_Context.read_vars raw_var ctxt + val rhs = (Syntax.check_term ctxt' o Syntax.parse_term ctxt') rhs_raw + val error_msg = cat_lines + ["Lifting failed for the following term:", + Pretty.string_of (Pretty.block + [Pretty.str "Term:", Pretty.brk 2, Syntax.pretty_term ctxt rhs]), + Pretty.string_of (Pretty.block + [Pretty.str "Type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty_schematic]), + "", + (Pretty.string_of (Pretty.block + [Pretty.str "Reason:", + Pretty.brk 2, + Pretty.str "The type of the term cannot be instantiated to", + Pretty.brk 1, + Pretty.quote (Syntax.pretty_typ ctxt rty_forced), + Pretty.str "."]))] + in + error error_msg + end + +fun lift_def_cmd (params, raw_var, rhs_raw, par_xthms) lthy = + let + val config = evaluate_params params + val ((binding, SOME qty, mx), lthy) = yield_singleton Proof_Context.read_vars raw_var lthy + val var = (binding, mx) + val rhs = (Syntax.check_term lthy o Syntax.parse_term lthy) rhs_raw + val par_thms = Attrib.eval_thms lthy par_xthms + val (goal, after_qed) = prepare_lift_def (add_lift_def_code_dt config) var qty rhs par_thms lthy + val (goal, after_qed) = + case goal of + NONE => (goal, K (after_qed Drule.dummy_thm)) + | SOME prsp_tm => + let + val readable_rsp_thm_eq = mk_readable_rsp_thm_eq prsp_tm lthy + val (readable_rsp_tm, _) = Logic.dest_implies (Thm.prop_of readable_rsp_thm_eq) + val readable_rsp_tm_tnames = rename_to_tnames lthy readable_rsp_tm + + fun after_qed' [[thm]] lthy = + let + val internal_rsp_thm = Goal.prove lthy [] [] prsp_tm + (fn {context = ctxt, ...} => + rtac readable_rsp_thm_eq 1 THEN Proof_Context.fact_tac ctxt [thm] 1) + in + after_qed internal_rsp_thm lthy + end + in + (SOME readable_rsp_tm_tnames, after_qed') + end + fun after_qed_with_err_handling thmss ctxt = (after_qed thmss ctxt + handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy (rty, qty) msg) + handle Lifting_Term.CHECK_RTY (rty_schematic, rty_forced) => + check_rty_err lthy (rty_schematic, rty_forced) (raw_var, rhs_raw); + in + Proof.theorem NONE (snd oo after_qed_with_err_handling) [map (rpair []) (the_list goal)] lthy + end + +fun lift_def_cmd_with_err_handling (params, (raw_var, rhs_raw, par_xthms)) lthy = + (lift_def_cmd (params, raw_var, rhs_raw, par_xthms) lthy + handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy (rty, qty) msg) + handle Lifting_Term.CHECK_RTY (rty_schematic, rty_forced) => + check_rty_err lthy (rty_schematic, rty_forced) (raw_var, rhs_raw); + +val parse_param = Parse.name +val parse_params = Scan.optional (Args.parens (Parse.list parse_param)) []; + +(* parser and command *) +val liftdef_parser = + parse_params -- + (((Parse.binding -- (@{keyword "::"} |-- (Parse.typ >> SOME) -- Parse.opt_mixfix') >> Parse.triple2) + --| @{keyword "is"} -- Parse.term -- + Scan.optional (@{keyword "parametric"} |-- Parse.!!! Parse.xthms1) []) >> Parse.triple1) + +val _ = + Outer_Syntax.local_theory_to_proof @{command_keyword "lift_definition"} + "definition for constants over the quotient type" + (liftdef_parser >> lift_def_cmd_with_err_handling) + +end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_info.ML --- a/src/HOL/Tools/Lifting/lifting_info.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Lifting/lifting_info.ML Mon May 25 22:11:43 2015 +0200 @@ -16,6 +16,7 @@ val quotient_eq: quotient * quotient -> bool val transform_quotient: morphism -> quotient -> quotient val lookup_quotients: Proof.context -> string -> quotient option + val lookup_quot_thm_quotients: Proof.context -> thm -> quotient option val update_quotients: string -> quotient -> Context.generic -> Context.generic val delete_quotients: thm -> Context.generic -> Context.generic val print_quotients: Proof.context -> unit @@ -221,6 +222,17 @@ fun lookup_quotients ctxt type_name = Symtab.lookup (get_quotients ctxt) type_name +fun lookup_quot_thm_quotients ctxt quot_thm = + let + val (_, qtyp) = quot_thm_rty_qty quot_thm + val qty_full_name = (fst o dest_Type) qtyp + fun compare_data (data:quotient) = Thm.eq_thm_prop (#quot_thm data, quot_thm) + in + case lookup_quotients ctxt qty_full_name of + SOME quotient => if compare_data quotient then SOME quotient else NONE + | NONE => NONE + end + fun update_quotients type_name qinfo ctxt = Data.map (map_quotients (Symtab.update (type_name, qinfo))) ctxt @@ -228,10 +240,8 @@ let val (_, qtyp) = quot_thm_rty_qty quot_thm val qty_full_name = (fst o dest_Type) qtyp - val symtab = get_quotients' ctxt - fun compare_data (_, data:quotient) = Thm.eq_thm_prop (#quot_thm data, quot_thm) in - if Symtab.member compare_data symtab (qty_full_name, quot_thm) + if is_some (lookup_quot_thm_quotients (Context.proof_of ctxt) quot_thm) then Data.map (map_quotients (Symtab.delete qty_full_name)) ctxt else ctxt end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_setup.ML --- a/src/HOL/Tools/Lifting/lifting_setup.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Lifting/lifting_setup.ML Mon May 25 22:11:43 2015 +0200 @@ -8,11 +8,19 @@ sig exception SETUP_LIFTING_INFR of string - val setup_by_quotient: thm -> thm option -> thm option -> local_theory -> local_theory + type config = { notes: bool }; + val default_config: config; - val setup_by_typedef_thm: thm -> local_theory -> local_theory + val setup_by_quotient: config -> thm -> thm option -> thm option -> local_theory -> + binding * local_theory + + val setup_by_typedef_thm: config -> thm -> local_theory -> binding * local_theory val lifting_restore: Lifting_Info.quotient -> Context.generic -> Context.generic + + val lifting_forget: string -> local_theory -> local_theory + val update_transfer_rules: string -> local_theory -> local_theory + val pointer_of_bundle_binding: Proof.context -> binding -> string end structure Lifting_Setup: LIFTING_SETUP = @@ -24,18 +32,25 @@ exception SETUP_LIFTING_INFR of string -fun define_crel rep_fun lthy = +(* Config *) + +type config = { notes: bool }; +val default_config = { notes = true }; + +fun define_crel (config: config) rep_fun lthy = let val (qty, rty) = (dest_funT o fastype_of) rep_fun val rep_fun_graph = (HOLogic.eq_const rty) $ Bound 1 $ (rep_fun $ Bound 0) val def_term = Abs ("x", rty, Abs ("y", qty, rep_fun_graph)) val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty val crel_name = Binding.prefix_name "cr_" qty_name - val (fixed_def_term, lthy') = yield_singleton (Variable.importT_terms) def_term lthy - val ((_, (_ , def_thm)), lthy'') = - Local_Theory.define ((crel_name, NoSyn), ((Thm.def_binding crel_name, []), fixed_def_term)) lthy' - in - (def_thm, lthy'') + val (fixed_def_term, lthy) = yield_singleton (Variable.importT_terms) def_term lthy + val ((_, (_ , def_thm)), lthy) = if #notes config then + Local_Theory.define ((crel_name, NoSyn), ((Thm.def_binding crel_name, []), fixed_def_term)) lthy + else + Local_Theory.define ((Binding.concealed crel_name, NoSyn), ((Binding.empty, []), fixed_def_term)) lthy + in + (def_thm, lthy) end fun print_define_pcrel_warning msg = @@ -48,7 +63,7 @@ warning warning_msg end -fun define_pcrel crel lthy = +fun define_pcrel (config: config) crel lthy = let val (fixed_crel, lthy) = yield_singleton Variable.importT_terms crel lthy val [rty', qty] = (binder_types o fastype_of) fixed_crel @@ -67,14 +82,25 @@ (rty --> rty' --> HOLogic.boolT) --> (rty' --> qty --> HOLogic.boolT) --> rty --> qty --> HOLogic.boolT) - val relator_type = foldr1 (op -->) ((map type_of args_fixed) @ [rty, qty, HOLogic.boolT]) val qty_name = (fst o dest_Type) qty val pcrel_name = Binding.prefix_name "pcr_" ((Binding.name o Long_Name.base_name) qty_name) + val relator_type = foldr1 (op -->) ((map type_of args_fixed) @ [rty, qty, HOLogic.boolT]) val lhs = Library.foldl (op $) ((Free (Binding.name_of pcrel_name, relator_type)), args_fixed) val rhs = relcomp_op $ param_rel_fixed $ fixed_crel val definition_term = Logic.mk_equals (lhs, rhs) - val ((_, (_, def_thm)), lthy) = Specification.definition ((SOME (pcrel_name, SOME relator_type, NoSyn)), - ((Binding.empty, []), definition_term)) lthy + fun note_def lthy = + Specification.definition ((SOME (pcrel_name, SOME relator_type, NoSyn)), + ((Binding.empty, []), definition_term)) lthy |>> (snd #> snd); + fun raw_def lthy = + let + val ((_, rhs), prove) = Local_Defs.derived_def lthy true definition_term; + val ((_, (_, raw_th)), lthy) = lthy + |> Local_Theory.define ((Binding.concealed pcrel_name, NoSyn), ((Binding.empty, []), rhs)); + val th = prove lthy raw_th; + in + (th, lthy) + end + val (def_thm, lthy) = if #notes config then note_def lthy else raw_def lthy in (SOME def_thm, lthy) end @@ -96,10 +122,12 @@ error error_msg end in - fun define_pcr_cr_eq lthy pcr_rel_def = + fun define_pcr_cr_eq (config: config) lthy pcr_rel_def = let val lhs = (Thm.term_of o Thm.lhs_of) pcr_rel_def - val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type o List.last o binder_types o fastype_of) lhs + val qty_name = + (Binding.name o Long_Name.base_name o fst o dest_Type o + List.last o binder_types o fastype_of) lhs val args = (snd o strip_comb) lhs fun make_inst var ctxt = @@ -127,8 +155,8 @@ |> Conv.fconv_rule (Conv.arg_conv (Conv.rewr_conv eq_OO_meta)) |> mk_HOL_eq |> singleton (Variable.export lthy orig_lthy) - val ((_, [thm]), lthy) = - Local_Theory.note ((Binding.qualified true "pcr_cr_eq" qty_name, []), [thm]) lthy + val lthy = (#notes config ? (Local_Theory.note + ((Binding.qualified true "pcr_cr_eq" qty_name, []), [thm]) #> snd)) lthy in (thm, lthy) end @@ -229,18 +257,19 @@ |> Local_Theory.declaration {syntax = false, pervasive = true} (fn phi => Lifting_Info.init_restore_data bundle_name (phi_qinfo phi)) |> Bundle.bundle ((binding, [restore_lifting_att])) [] + |> pair binding end -fun setup_lifting_infr quot_thm opt_reflp_thm lthy = +fun setup_lifting_infr config quot_thm opt_reflp_thm lthy = let val _ = quot_thm_sanity_check lthy quot_thm val (_, qty) = quot_thm_rty_qty quot_thm - val (pcrel_def, lthy) = define_pcrel (quot_thm_crel quot_thm) lthy + val (pcrel_def, lthy) = define_pcrel config (quot_thm_crel quot_thm) lthy (**) val pcrel_def = Option.map (Morphism.thm (Local_Theory.target_morphism lthy)) pcrel_def (**) val (pcr_cr_eq, lthy) = case pcrel_def of - SOME pcrel_def => apfst SOME (define_pcr_cr_eq lthy pcrel_def) + SOME pcrel_def => apfst SOME (define_pcr_cr_eq config lthy pcrel_def) | NONE => (NONE, lthy) val pcr_info = case pcrel_def of SOME pcrel_def => SOME { pcrel_def = pcrel_def, pcr_cr_eq = the pcr_cr_eq } @@ -444,10 +473,10 @@ (dom_thm RS @{thm pcr_Domainp}) |> fold_Domainp_pcrel pcrel_def val thms = - [("domain", pcr_Domainp), - ("domain_par", pcr_Domainp_par), - ("domain_par_left_total", pcr_Domainp_par_left_total), - ("domain_eq", pcr_Domainp_eq)] + [("domain", [pcr_Domainp], @{attributes [transfer_domain_rule]}), + ("domain_par", [pcr_Domainp_par], @{attributes [transfer_domain_rule]}), + ("domain_par_left_total", [pcr_Domainp_par_left_total], @{attributes [transfer_domain_rule]}), + ("domain_eq", [pcr_Domainp_eq], @{attributes [transfer_domain_rule]})] in thms end @@ -459,7 +488,7 @@ |> fold_Domainp_pcrel pcrel_def |> reduce_Domainp ctxt (Transfer.get_relator_domain ctxt) in - [("domain", thm)] + [("domain", [thm], @{attributes [transfer_domain_rule]})] end end @@ -470,6 +499,19 @@ fun get_Domainp_thm quot_thm = the (get_first (try(curry op RS quot_thm)) [@{thm eq_onp_to_Domainp}, @{thm Quotient_to_Domainp}]) +fun notes names thms = + let + val notes = + if names then map (fn (name, thms, attrs) => ((name, []), [(thms, attrs)])) thms + else map_filter (fn (_, thms, attrs) => if null attrs then NONE + else SOME ((Binding.empty, []), [(thms, attrs)])) thms + in + Local_Theory.notes notes #> snd + end + +fun map_thms map_name map_thm thms = + map (fn (name, thms, attr) => (map_name name, map map_thm thms, attr)) thms + (* Sets up the Lifting package by a quotient theorem. @@ -479,64 +521,55 @@ opt_par_thm - a parametricity theorem for R *) -fun setup_by_quotient quot_thm opt_reflp_thm opt_par_thm lthy = +fun setup_by_quotient (config: config) quot_thm opt_reflp_thm opt_par_thm lthy = let (**) val quot_thm = Morphism.thm (Local_Theory.target_morphism lthy) quot_thm (**) - val transfer_attr = Attrib.internal (K Transfer.transfer_add) - val transfer_domain_attr = Attrib.internal (K Transfer.transfer_domain_add) val (rty, qty) = quot_thm_rty_qty quot_thm val induct_attr = Attrib.internal (K (Induct.induct_type (fst (dest_Type qty)))) val qty_full_name = (fst o dest_Type) qty val qty_name = (Binding.name o Long_Name.base_name) qty_full_name fun qualify suffix = Binding.qualified true suffix qty_name - val lthy = case opt_reflp_thm of + val notes1 = case opt_reflp_thm of SOME reflp_thm => let val thms = - [("abs_induct", @{thm Quotient_total_abs_induct}, [induct_attr]), - ("abs_eq_iff", @{thm Quotient_total_abs_eq_iff}, [] )] + [("abs_induct", @{thms Quotient_total_abs_induct}, [induct_attr]), + ("abs_eq_iff", @{thms Quotient_total_abs_eq_iff}, [] )] in - lthy - |> fold (fn (name, thm, attr) => (snd oo Local_Theory.note) ((qualify name, attr), - [[quot_thm, reflp_thm] MRSL thm])) thms + map_thms qualify (fn thm => [quot_thm, reflp_thm] MRSL thm) thms end | NONE => let val thms = - [("abs_induct", @{thm Quotient_abs_induct}, [induct_attr])] + [("abs_induct", @{thms Quotient_abs_induct}, [induct_attr])] in - fold (fn (name, thm, attr) => (snd oo Local_Theory.note) ((qualify name, attr), - [quot_thm RS thm])) thms lthy + map_thms qualify (fn thm => quot_thm RS thm) thms end val dom_thm = get_Domainp_thm quot_thm - fun setup_transfer_rules_nonpar lthy = + fun setup_transfer_rules_nonpar notes = let - val lthy = + val notes1 = case opt_reflp_thm of SOME reflp_thm => let val thms = - [("id_abs_transfer",@{thm Quotient_id_abs_transfer}), - ("left_total", @{thm Quotient_left_total} ), - ("bi_total", @{thm Quotient_bi_total})] + [("id_abs_transfer",@{thms Quotient_id_abs_transfer}, @{attributes [transfer_rule]}), + ("left_total", @{thms Quotient_left_total}, @{attributes [transfer_rule]}), + ("bi_total", @{thms Quotient_bi_total}, @{attributes [transfer_rule]})] in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [[quot_thm, reflp_thm] MRSL thm])) thms lthy + map_thms qualify (fn thm => [quot_thm, reflp_thm] MRSL thm) thms end - | NONE => - lthy - |> (snd oo Local_Theory.note) ((qualify "domain", [transfer_domain_attr]), [dom_thm]) + | NONE => map_thms qualify I [("domain", [dom_thm], @{attributes [transfer_domain_rule]})] - val thms = - [("rel_eq_transfer", @{thm Quotient_rel_eq_transfer}), - ("right_unique", @{thm Quotient_right_unique} ), - ("right_total", @{thm Quotient_right_total} )] + val notes2 = map_thms qualify (fn thm => quot_thm RS thm) + [("rel_eq_transfer", @{thms Quotient_rel_eq_transfer}, @{attributes [transfer_rule]}), + ("right_unique", @{thms Quotient_right_unique}, @{attributes [transfer_rule]}), + ("right_total", @{thms Quotient_right_total}, @{attributes [transfer_rule]})] in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [quot_thm RS thm])) thms lthy + notes2 @ notes1 @ notes end fun generate_parametric_rel_eq lthy transfer_rule opt_param_thm = @@ -551,11 +584,11 @@ error error_msg end - fun setup_transfer_rules_par lthy = + fun setup_transfer_rules_par lthy notes = let val pcrel_info = (the (get_pcrel_info lthy qty_full_name)) val pcrel_def = #pcrel_def pcrel_info - val lthy = + val notes1 = case opt_reflp_thm of SOME reflp_thm => let @@ -568,22 +601,17 @@ val left_total = parametrize_class_constraint lthy pcrel_def left_total val bi_total = parametrize_class_constraint lthy pcrel_def bi_total val thms = - [("id_abs_transfer",id_abs_transfer), - ("left_total", left_total ), - ("bi_total", bi_total )] + [("id_abs_transfer", [id_abs_transfer], @{attributes [transfer_rule]}), + ("left_total", [left_total], @{attributes [transfer_rule]}), + ("bi_total", [bi_total], @{attributes [transfer_rule]})] in - lthy - |> fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [thm])) thms - |> fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_domain_attr]), - [thm])) domain_thms + map_thms qualify I thms @ map_thms qualify I domain_thms end | NONE => let val thms = parametrize_domain dom_thm pcrel_info lthy in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_domain_attr]), - [thm])) thms lthy + map_thms qualify I thms end val rel_eq_transfer = generate_parametric_rel_eq lthy @@ -593,22 +621,25 @@ (quot_thm RS @{thm Quotient_right_unique}) val right_total = parametrize_class_constraint lthy pcrel_def (quot_thm RS @{thm Quotient_right_total}) - val thms = - [("rel_eq_transfer", rel_eq_transfer), - ("right_unique", right_unique ), - ("right_total", right_total )] + val notes2 = map_thms qualify I + [("rel_eq_transfer", [rel_eq_transfer], @{attributes [transfer_rule]}), + ("right_unique", [right_unique], @{attributes [transfer_rule]}), + ("right_total", [right_total], @{attributes [transfer_rule]})] in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [thm])) thms lthy + notes2 @ notes1 @ notes end - fun setup_transfer_rules lthy = - if is_some (get_pcrel_info lthy qty_full_name) then setup_transfer_rules_par lthy - else setup_transfer_rules_nonpar lthy + fun setup_rules lthy = + let + val thms = if is_some (get_pcrel_info lthy qty_full_name) + then setup_transfer_rules_par lthy notes1 else setup_transfer_rules_nonpar notes1 + in + notes (#notes config) thms lthy + end in lthy - |> setup_lifting_infr quot_thm opt_reflp_thm - |> setup_transfer_rules + |> setup_lifting_infr config quot_thm opt_reflp_thm + ||> setup_rules end (* @@ -619,12 +650,10 @@ typedef_thm - a typedef theorem (type_definition Rep Abs S) *) -fun setup_by_typedef_thm typedef_thm lthy = +fun setup_by_typedef_thm config typedef_thm lthy = let - val transfer_attr = Attrib.internal (K Transfer.transfer_add) - val transfer_domain_attr = Attrib.internal (K Transfer.transfer_domain_add) val (_ $ rep_fun $ _ $ typedef_set) = (HOLogic.dest_Trueprop o Thm.prop_of) typedef_thm - val (T_def, lthy) = define_crel rep_fun lthy + val (T_def, lthy) = define_crel config rep_fun lthy (**) val T_def = Morphism.thm (Local_Theory.target_morphism lthy) T_def (**) @@ -646,40 +675,37 @@ | _ => NONE val dom_thm = get_Domainp_thm quot_thm - fun setup_transfer_rules_nonpar lthy = + fun setup_transfer_rules_nonpar notes = let - val lthy = + val notes1 = case opt_reflp_thm of SOME reflp_thm => let val thms = - [("id_abs_transfer",@{thm Quotient_id_abs_transfer}), - ("left_total", @{thm Quotient_left_total} ), - ("bi_total", @{thm Quotient_bi_total} )] + [("id_abs_transfer",@{thms Quotient_id_abs_transfer}, @{attributes [transfer_rule]}), + ("left_total", @{thms Quotient_left_total}, @{attributes [transfer_rule]}), + ("bi_total", @{thms Quotient_bi_total}, @{attributes [transfer_rule]})] in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [[quot_thm, reflp_thm] MRSL thm])) thms lthy + map_thms qualify (fn thm => [quot_thm, reflp_thm] MRSL thm) thms end | NONE => - lthy - |> (snd oo Local_Theory.note) ((qualify "domain", [transfer_domain_attr]), [dom_thm]) + map_thms qualify I [("domain", [dom_thm], @{attributes [transfer_domain_rule]})] val thms = - [("rep_transfer", @{thm typedef_rep_transfer}), - ("left_unique", @{thm typedef_left_unique} ), - ("right_unique", @{thm typedef_right_unique}), - ("right_total", @{thm typedef_right_total} ), - ("bi_unique", @{thm typedef_bi_unique} )] - in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [[typedef_thm, T_def] MRSL thm])) thms lthy + [("rep_transfer", @{thms typedef_rep_transfer}, @{attributes [transfer_rule]}), + ("left_unique", @{thms typedef_left_unique}, @{attributes [transfer_rule]}), + ("right_unique", @{thms typedef_right_unique}, @{attributes [transfer_rule]}), + ("right_total", @{thms typedef_right_total}, @{attributes [transfer_rule]}), + ("bi_unique", @{thms typedef_bi_unique}, @{attributes [transfer_rule]})] + in + map_thms qualify (fn thm => [typedef_thm, T_def] MRSL thm) thms @ notes1 @ notes end - fun setup_transfer_rules_par lthy = + fun setup_transfer_rules_par lthy notes = let val pcrel_info = (the (get_pcrel_info lthy qty_full_name)) val pcrel_def = #pcrel_def pcrel_info - val lthy = + val notes1 = case opt_reflp_thm of SOME reflp_thm => let @@ -692,48 +718,46 @@ (Lifting_Term.parametrize_transfer_rule lthy ([quot_thm, reflp_thm] MRSL @{thm Quotient_id_abs_transfer})) val thms = - [("left_total", left_total ), - ("bi_total", bi_total ), - ("id_abs_transfer",id_abs_transfer)] + [("left_total", [left_total], @{attributes [transfer_rule]}), + ("bi_total", [bi_total], @{attributes [transfer_rule]}), + ("id_abs_transfer",[id_abs_transfer], @{attributes [transfer_rule]})] in - lthy - |> fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [thm])) thms - |> fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_domain_attr]), - [thm])) domain_thms + map_thms qualify I thms @ map_thms qualify I domain_thms end | NONE => let val thms = parametrize_domain dom_thm pcrel_info lthy in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_domain_attr]), - [thm])) thms lthy + map_thms qualify I thms end - val thms = - ("rep_transfer", generate_parametric_id lthy rty - (Lifting_Term.parametrize_transfer_rule lthy ([typedef_thm, T_def] MRSL @{thm typedef_rep_transfer}))) - :: - (map_snd (fn thm => parametrize_class_constraint lthy pcrel_def ([typedef_thm, T_def] MRSL thm)) - [("left_unique", @{thm typedef_left_unique} ), - ("right_unique", @{thm typedef_right_unique}), - ("bi_unique", @{thm typedef_bi_unique} ), - ("right_total", @{thm typedef_right_total} )]) + val notes2 = map_thms qualify (fn thm => generate_parametric_id lthy rty + (Lifting_Term.parametrize_transfer_rule lthy ([typedef_thm, T_def] MRSL thm))) + [("rep_transfer", @{thms typedef_rep_transfer}, @{attributes [transfer_rule]})]; + val notes3 = + map_thms qualify + (fn thm => parametrize_class_constraint lthy pcrel_def ([typedef_thm, T_def] MRSL thm)) + [("left_unique", @{thms typedef_left_unique}, @{attributes [transfer_rule]}), + ("right_unique", @{thms typedef_right_unique},@{attributes [transfer_rule]}), + ("bi_unique", @{thms typedef_bi_unique}, @{attributes [transfer_rule]}), + ("right_total", @{thms typedef_right_total}, @{attributes [transfer_rule]})] in - fold (fn (name, thm) => (snd oo Local_Theory.note) ((qualify name, [transfer_attr]), - [thm])) thms lthy + notes3 @ notes2 @ notes1 @ notes end - fun setup_transfer_rules lthy = - if is_some (get_pcrel_info lthy qty_full_name) then setup_transfer_rules_par lthy - else setup_transfer_rules_nonpar lthy + val notes1 = [(Binding.prefix_name "Quotient_" qty_name, [quot_thm], [])] + fun setup_rules lthy = + let + val thms = if is_some (get_pcrel_info lthy qty_full_name) + then setup_transfer_rules_par lthy notes1 else setup_transfer_rules_nonpar notes1 + in + notes (#notes config) thms lthy + end in lthy - |> (snd oo Local_Theory.note) ((Binding.prefix_name "Quotient_" qty_name, []), - [quot_thm]) - |> setup_lifting_infr quot_thm opt_reflp_thm - |> setup_transfer_rules + |> setup_lifting_infr config quot_thm opt_reflp_thm + ||> setup_rules end fun setup_lifting_cmd xthm opt_reflp_xthm opt_par_xthm lthy = @@ -755,7 +779,7 @@ fun check_qty qty = if not (is_Type qty) then error "The abstract type must be a type constructor." else () - + fun setup_quotient () = let val opt_reflp_thm = Option.map (singleton (Attrib.eval_thms lthy)) opt_reflp_xthm @@ -763,7 +787,7 @@ val opt_par_thm = Option.map (singleton (Attrib.eval_thms lthy)) opt_par_xthm val _ = check_qty (snd (quot_thm_rty_qty input_thm)) in - setup_by_quotient input_thm opt_reflp_thm opt_par_thm lthy + setup_by_quotient default_config input_thm opt_reflp_thm opt_par_thm lthy |> snd end fun setup_typedef () = @@ -776,7 +800,7 @@ | NONE => ( case opt_par_xthm of SOME _ => error "The parametricity theorem cannot be specified if the type_definition theorem is used." - | NONE => setup_by_typedef_thm input_thm lthy + | NONE => setup_by_typedef_thm default_config input_thm lthy |> snd ) end in @@ -969,6 +993,9 @@ | _ => error "The provided bundle is not a lifting bundle." end +fun pointer_of_bundle_binding ctxt binding = Name_Space.full_name (Name_Space.naming_of + (Context.Theory (Proof_Context.theory_of ctxt))) binding + fun lifting_forget pointer lthy = let fun get_transfer_rules_to_delete qinfo ctxt = diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_term.ML --- a/src/HOL/Tools/Lifting/lifting_term.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Lifting/lifting_term.ML Mon May 25 22:11:43 2015 +0200 @@ -11,6 +11,16 @@ exception MERGE_TRANSFER_REL of Pretty.T exception CHECK_RTY of typ * typ + type 'a fold_quot_thm = { constr: typ -> thm * 'a -> thm * 'a, lift: typ -> thm * 'a -> thm * 'a, + comp_lift: typ -> thm * 'a -> thm * 'a } + + type quotients = Lifting_Info.quotient Symtab.table + + val force_qty_type: Proof.context -> typ -> thm -> thm + + val prove_schematic_quot_thm: 'a fold_quot_thm -> quotients -> Proof.context -> + typ * typ -> 'a -> thm * 'a + val instantiate_rtys: Proof.context -> typ * typ -> typ * typ val prove_quot_thm: Proof.context -> typ * typ -> thm @@ -40,6 +50,8 @@ exception MERGE_TRANSFER_REL of Pretty.T exception CHECK_RTY of typ * typ +type quotients = Lifting_Info.quotient Symtab.table + fun match ctxt err ty_pat ty = let val thy = Proof_Context.theory_of ctxt @@ -61,43 +73,43 @@ Pretty.str "don't match."]) end -fun get_quot_data ctxt s = - case Lifting_Info.lookup_quotients ctxt s of +fun get_quot_data (quotients: quotients) s = + case Symtab.lookup quotients s of SOME qdata => qdata | NONE => raise QUOT_THM_INTERNAL (Pretty.block [Pretty.str ("No quotient type " ^ quote s), Pretty.brk 1, Pretty.str "found."]) -fun get_quot_thm ctxt s = +fun get_quot_thm quotients ctxt s = let val thy = Proof_Context.theory_of ctxt in - Thm.transfer thy (#quot_thm (get_quot_data ctxt s)) + Thm.transfer thy (#quot_thm (get_quot_data quotients s)) end -fun has_pcrel_info ctxt s = is_some (#pcr_info (get_quot_data ctxt s)) +fun has_pcrel_info quotients s = is_some (#pcr_info (get_quot_data quotients s)) -fun get_pcrel_info ctxt s = - case #pcr_info (get_quot_data ctxt s) of +fun get_pcrel_info quotients s = + case #pcr_info (get_quot_data quotients s) of SOME pcr_info => pcr_info | NONE => raise QUOT_THM_INTERNAL (Pretty.block [Pretty.str ("No parametrized correspondce relation for " ^ quote s), Pretty.brk 1, Pretty.str "found."]) -fun get_pcrel_def ctxt s = +fun get_pcrel_def quotients ctxt s = let val thy = Proof_Context.theory_of ctxt in - Thm.transfer thy (#pcrel_def (get_pcrel_info ctxt s)) + Thm.transfer thy (#pcrel_def (get_pcrel_info quotients s)) end -fun get_pcr_cr_eq ctxt s = +fun get_pcr_cr_eq quotients ctxt s = let val thy = Proof_Context.theory_of ctxt in - Thm.transfer thy (#pcr_cr_eq (get_pcrel_info ctxt s)) + Thm.transfer thy (#pcr_cr_eq (get_pcrel_info quotients s)) end fun get_rel_quot_thm ctxt s = @@ -188,11 +200,12 @@ rel_quot_thm_prems end -fun rty_is_TVar ctxt qty = (is_TVar o fst o quot_thm_rty_qty o get_quot_thm ctxt o Tname) qty +fun gen_rty_is_TVar quotients ctxt qty = qty |> Tname |> get_quot_thm quotients ctxt |> + quot_thm_rty_qty |> fst |> is_TVar -fun instantiate_rtys ctxt (rty, (qty as Type (qty_name, _))) = +fun gen_instantiate_rtys quotients ctxt (rty, (qty as Type (qty_name, _))) = let - val quot_thm = get_quot_thm ctxt qty_name + val quot_thm = get_quot_thm quotients ctxt qty_name val (rty_pat, qty_pat) = quot_thm_rty_qty quot_thm fun inst_rty (Type (s, tys), Type (s', tys')) = @@ -216,27 +229,39 @@ in (inst_rty (rty_pat, rty), Envir.subst_type qtyenv rty_pat) end - | instantiate_rtys _ _ = error "instantiate_rtys: not Type" + | gen_instantiate_rtys _ _ _ = error "gen_instantiate_rtys: not Type" + +fun instantiate_rtys ctxt (rty, qty) = + gen_instantiate_rtys (Lifting_Info.get_quotients ctxt) ctxt (rty, qty) -fun prove_schematic_quot_thm ctxt (rty, qty) = +type 'a fold_quot_thm = { constr: typ -> thm * 'a -> thm * 'a, lift: typ -> thm * 'a -> thm * 'a, + comp_lift: typ -> thm * 'a -> thm * 'a } + +fun prove_schematic_quot_thm (actions: 'a fold_quot_thm) quotients ctxt (rty, qty) fold_val = let fun lifting_step (rty, qty) = let - val (rty', rtyq) = instantiate_rtys ctxt (rty, qty) - val (rty's, rtyqs) = if rty_is_TVar ctxt qty then ([rty'],[rtyq]) + val (rty', rtyq) = gen_instantiate_rtys quotients ctxt (rty, qty) + val (rty's, rtyqs) = if gen_rty_is_TVar quotients ctxt qty then ([rty'],[rtyq]) else (Targs rty', Targs rtyq) - val args = map (prove_schematic_quot_thm ctxt) (rty's ~~ rtyqs) + val (args, fold_val) = + fold_map (prove_schematic_quot_thm actions quotients ctxt) (rty's ~~ rtyqs) fold_val in if forall is_id_quot args then - get_quot_thm ctxt (Tname qty) + let + val quot_thm = get_quot_thm quotients ctxt (Tname qty) + in + #lift actions qty (quot_thm, fold_val) + end else let - val quot_thm = get_quot_thm ctxt (Tname qty) - val rel_quot_thm = if rty_is_TVar ctxt qty then the_single args else + val quot_thm = get_quot_thm quotients ctxt (Tname qty) + val rel_quot_thm = if gen_rty_is_TVar quotients ctxt qty then the_single args else args MRSL (get_rel_quot_thm ctxt (Tname rty)) + val comp_quot_thm = [rel_quot_thm, quot_thm] MRSL @{thm Quotient_compose} in - [rel_quot_thm, quot_thm] MRSL @{thm Quotient_compose} + #comp_lift actions qty (comp_quot_thm, fold_val) end end in @@ -245,18 +270,24 @@ if s = s' then let - val args = map (prove_schematic_quot_thm ctxt) (zip_Tvars ctxt s tys tys') + val (args, fold_val) = + fold_map (prove_schematic_quot_thm actions quotients ctxt) + (zip_Tvars ctxt s tys tys') fold_val in if forall is_id_quot args then - @{thm identity_quotient} + (@{thm identity_quotient}, fold_val) else - args MRSL (get_rel_quot_thm ctxt s) + let + val quot_thm = args MRSL (get_rel_quot_thm ctxt s) + in + #constr actions qty (quot_thm, fold_val) + end end else lifting_step (rty, qty) | (_, Type (s', tys')) => - (case try (get_quot_thm ctxt) s' of + (case try (get_quot_thm quotients ctxt) s' of SOME quot_thm => let val rty_pat = (fst o quot_thm_rty_qty) quot_thm @@ -267,9 +298,10 @@ let val rty_pat = Type (s', map (fn _ => TFree ("a",[])) tys') in - prove_schematic_quot_thm ctxt (rty_pat, qty) + prove_schematic_quot_thm actions quotients ctxt (rty_pat, qty) fold_val end) - | _ => @{thm identity_quotient}) + | _ => (@{thm identity_quotient}, fold_val) + ) end handle QUOT_THM_INTERNAL pretty_msg => raise QUOT_THM (rty, qty, pretty_msg) @@ -302,14 +334,20 @@ qty, a representation type of the theorem is an instance of rty in general. *) -fun prove_quot_thm ctxt (rty, qty) = - let - val schematic_quot_thm = prove_schematic_quot_thm ctxt (rty, qty) - val quot_thm = force_qty_type ctxt qty schematic_quot_thm - val _ = check_rty_type ctxt rty quot_thm - in - quot_thm - end + +local + val id_actions = { constr = K I, lift = K I, comp_lift = K I } +in + fun prove_quot_thm ctxt (rty, qty) = + let + val quotients = Lifting_Info.get_quotients ctxt + val (schematic_quot_thm, _) = prove_schematic_quot_thm id_actions quotients ctxt (rty, qty) () + val quot_thm = force_qty_type ctxt qty schematic_quot_thm + val _ = check_rty_type ctxt rty quot_thm + in + quot_thm + end +end (* Computes the composed abstraction function for rty and qty. @@ -449,17 +487,7 @@ fun rewrs_imp rules = first_imp (map rewr_imp rules) in - (* - ctm - of the form "[POS|NEG] (par_R OO T) t f) ?X", where par_R is a parametricity transfer - relation for t and T is a transfer relation between t and f, which consists only from - parametrized transfer relations (i.e., pcr_?) and equalities op=. POS or NEG encodes - co-variance or contra-variance. - - The function merges par_R OO T using definitions of parametrized correspondence relations - (e.g., (rel_S R) OO (pcr_T op=) --> pcr_T R using the definition pcr_T R = (rel_S R) OO cr_T). - *) - - fun merge_transfer_relations ctxt ctm = + fun gen_merge_transfer_relations quotients ctxt ctm = let val ctm = Thm.dest_arg ctm val tm = Thm.term_of ctm @@ -507,19 +535,21 @@ in case distr_rule of NONE => raise MERGE_TRANSFER_REL (cannot_merge_error_msg ()) - | SOME distr_rule => (map (merge_transfer_relations ctxt) (cprems_of distr_rule)) + | SOME distr_rule => (map (gen_merge_transfer_relations quotients ctxt) + (cprems_of distr_rule)) MRSL distr_rule end else let - val pcrel_def = get_pcrel_def ctxt ((fst o dest_Type) qty) + val pcrel_def = get_pcrel_def quotients ctxt ((fst o dest_Type) qty) val pcrel_const = (head_of o fst o Logic.dest_equals o Thm.prop_of) pcrel_def in if same_constants pcrel_const (head_of trans_rel) then let val unfolded_ctm = Thm.rhs_of (Conv.arg1_conv (Conv.arg_conv (Conv.rewr_conv pcrel_def)) ctm) val distr_rule = rewrs_imp @{thms POS_pcr_rule NEG_pcr_rule} unfolded_ctm - val result = (map (merge_transfer_relations ctxt) (cprems_of distr_rule)) MRSL distr_rule + val result = (map (gen_merge_transfer_relations quotients ctxt) + (cprems_of distr_rule)) MRSL distr_rule val fold_pcr_rel = Conv.rewr_conv (Thm.symmetric pcrel_def) in Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.combination_conv @@ -531,17 +561,22 @@ end end handle QUOT_THM_INTERNAL pretty_msg => raise MERGE_TRANSFER_REL pretty_msg + + (* + ctm - of the form "[POS|NEG] (par_R OO T) t f) ?X", where par_R is a parametricity transfer + relation for t and T is a transfer relation between t and f, which consists only from + parametrized transfer relations (i.e., pcr_?) and equalities op=. POS or NEG encodes + co-variance or contra-variance. + + The function merges par_R OO T using definitions of parametrized correspondence relations + (e.g., (rel_S R) OO (pcr_T op=) --> pcr_T R using the definition pcr_T R = (rel_S R) OO cr_T). + *) + + fun merge_transfer_relations ctxt ctm = gen_merge_transfer_relations + (Lifting_Info.get_quotients ctxt) ctxt ctm end -(* - It replaces cr_T by pcr_T op= in the transfer relation. For composed - abstract types, it replaces T_rel R OO cr_T by pcr_T R. If the parametrized - correspondce relation does not exist, the original relation is kept. - - thm - a transfer rule -*) - -fun parametrize_transfer_rule ctxt thm = +fun gen_parametrize_transfer_rule quotients ctxt thm = let fun parametrize_relation_conv ctm = let @@ -558,21 +593,21 @@ val q = (fst o dest_Type) qty in let - val (rty', rtyq) = instantiate_rtys ctxt (rty, qty) - val (rty's, rtyqs) = if rty_is_TVar ctxt qty then ([rty'],[rtyq]) + val (rty', rtyq) = gen_instantiate_rtys quotients ctxt (rty, qty) + val (rty's, rtyqs) = if gen_rty_is_TVar quotients ctxt qty then ([rty'],[rtyq]) else (Targs rty', Targs rtyq) in if forall op= (rty's ~~ rtyqs) then let - val pcr_cr_eq = (Thm.symmetric o mk_meta_eq) (get_pcr_cr_eq ctxt q) + val pcr_cr_eq = (Thm.symmetric o mk_meta_eq) (get_pcr_cr_eq quotients ctxt q) in Conv.rewr_conv pcr_cr_eq ctm end handle QUOT_THM_INTERNAL _ => Conv.all_conv ctm else - if has_pcrel_info ctxt q then + if has_pcrel_info quotients q then let - val pcrel_def = Thm.symmetric (get_pcrel_def ctxt q) + val pcrel_def = Thm.symmetric (get_pcrel_def quotients ctxt q) in (Conv.rewr_conv pcrel_def then_conv all_args_conv parametrize_relation_conv) ctm end @@ -584,4 +619,16 @@ in Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.fun2_conv parametrize_relation_conv)) thm end + +(* + It replaces cr_T by pcr_T op= in the transfer relation. For composed + abstract types, it replaces T_rel R OO cr_T by pcr_T R. If the parametrized + correspondce relation does not exist, the original relation is kept. + + thm - a transfer rule +*) + +fun parametrize_transfer_rule ctxt thm = + gen_parametrize_transfer_rule (Lifting_Info.get_quotients ctxt) ctxt thm + end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Lifting/lifting_util.ML --- a/src/HOL/Tools/Lifting/lifting_util.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Lifting/lifting_util.ML Mon May 25 22:11:43 2015 +0200 @@ -16,6 +16,8 @@ val quot_thm_rep: thm -> term val quot_thm_crel: thm -> term val quot_thm_rty_qty: thm -> typ * typ + val Quotient_conv: conv -> conv -> conv -> conv -> conv + val Quotient_R_conv: conv -> conv val undisch: thm -> thm val undisch_all: thm -> thm @@ -32,6 +34,9 @@ val mk_HOL_eq: thm -> thm val safe_HOL_meta_eq: thm -> thm val map_interrupt: ('a -> 'b option) -> 'a list -> 'b list option + val instT_thm: Proof.context -> Type.tyenv -> thm -> thm + val instT_morphism: Proof.context -> Type.tyenv -> morphism + val conceal_naming_result: (local_theory -> 'a * local_theory) -> local_theory -> 'a * local_theory end @@ -80,6 +85,11 @@ (domain_type abs_type, range_type abs_type) end +fun Quotient_conv R_conv Abs_conv Rep_conv T_conv = Conv.combination_conv (Conv.combination_conv + (Conv.combination_conv (Conv.arg_conv R_conv) Abs_conv) Rep_conv) T_conv; + +fun Quotient_R_conv R_conv = Quotient_conv R_conv Conv.all_conv Conv.all_conv Conv.all_conv; + fun undisch thm = let val assm = Thm.cprem_of thm 1 @@ -132,4 +142,23 @@ map_interrupt' f l [] end +fun instT_thm ctxt env = + let + val cinst = env |> Vartab.dest + |> map (fn (x, (S, T)) => (Thm.ctyp_of ctxt (TVar (x, S)), Thm.ctyp_of ctxt T)); + in + Thm.instantiate (cinst, []) + end; + +fun instT_morphism ctxt env = + Morphism.morphism "Lifting_Util.instT" + {binding = [], + typ = [Envir.subst_type env], + term = [Envir.subst_term_types env], + fact = [map (instT_thm ctxt env)]}; + +fun conceal_naming_result f lthy = + let val old_lthy = lthy + in lthy |> Proof_Context.concealed |> f ||> Proof_Context.restore_naming old_lthy end; + end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Nitpick/nitpick_commands.ML --- a/src/HOL/Tools/Nitpick/nitpick_commands.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Nitpick/nitpick_commands.ML Mon May 25 22:11:43 2015 +0200 @@ -377,7 +377,7 @@ Outer_Syntax.command @{command_keyword nitpick} "try to find a counterexample for a given subgoal using Nitpick" (parse_params -- Scan.optional Parse.nat 1 >> (fn (params, i) => - Toplevel.keep (fn state => + Toplevel.keep_proof (fn state => ignore (pick_nits params Normal i (Toplevel.proof_position_of state) (Toplevel.proof_of state))))) diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Nitpick/nitpick_model.ML --- a/src/HOL/Tools/Nitpick/nitpick_model.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Nitpick/nitpick_model.ML Mon May 25 22:11:43 2015 +0200 @@ -879,8 +879,11 @@ t1 = t2 end -fun pretty_term_auto_global ctxt t = +fun pretty_term_auto_global ctxt t0 = let + val t = map_aterms (fn t as Const (s, _) => + if s = irrelevant orelse s = unknown then Term.dummy else t | t => t) t0 + fun add_fake_const s = Sign.declare_const_global ((Binding.name s, @{typ 'a}), NoSyn) #> #2 diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Quotient/quotient_type.ML --- a/src/HOL/Tools/Quotient/quotient_type.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Quotient/quotient_type.ML Mon May 25 22:11:43 2015 +0200 @@ -125,9 +125,11 @@ | Const (@{const_name part_equivp}, _) $ _ => (NONE, [quot3_thm, T_def] MRSL @{thm Quotient3_to_Quotient}) | _ => error "unsupported equivalence theorem") + val config = { notes = true } in lthy' - |> Lifting_Setup.setup_by_quotient quot_thm reflp_thm opt_par_thm + |> Lifting_Setup.setup_by_quotient config quot_thm reflp_thm opt_par_thm + |> snd |> (snd oo Local_Theory.note) ((quotient_thm_name, []), [quot_thm]) end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/SMT/cvc4_proof_parse.ML --- a/src/HOL/Tools/SMT/cvc4_proof_parse.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/SMT/cvc4_proof_parse.ML Mon May 25 22:11:43 2015 +0200 @@ -15,29 +15,32 @@ struct fun parse_proof ({ll_defs, assms, ...} : SMT_Translate.replay_data) xfacts prems _ output = - let - val num_ll_defs = length ll_defs + if exists (String.isPrefix "(error \"This build of CVC4 doesn't have proof support") output then + {outcome = NONE, fact_ids = NONE, atp_proof = K []} + else + let + val num_ll_defs = length ll_defs - val id_of_index = Integer.add num_ll_defs - val index_of_id = Integer.add (~ num_ll_defs) + val id_of_index = Integer.add num_ll_defs + val index_of_id = Integer.add (~ num_ll_defs) - val used_assert_ids = map_filter (try SMTLIB_Interface.assert_index_of_name) output - val used_assm_js = - map_filter (fn id => let val i = index_of_id id in if i >= 0 then SOME i else NONE end) - used_assert_ids + val used_assert_ids = map_filter (try SMTLIB_Interface.assert_index_of_name) output + val used_assm_js = + map_filter (fn id => let val i = index_of_id id in if i >= 0 then SOME i else NONE end) + used_assert_ids - val conjecture_i = 0 - val prems_i = conjecture_i + 1 - val num_prems = length prems - val facts_i = prems_i + num_prems + val conjecture_i = 0 + val prems_i = conjecture_i + 1 + val num_prems = length prems + val facts_i = prems_i + num_prems - val fact_ids' = - map_filter (fn j => - let val (i, _) = nth assms j in - try (apsnd (nth xfacts)) (id_of_index j, i - facts_i) - end) used_assm_js - in - {outcome = NONE, fact_ids = fact_ids', atp_proof = fn () => []} - end + val fact_ids' = + map_filter (fn j => + let val (i, _) = nth assms j in + try (apsnd (nth xfacts)) (id_of_index j, i - facts_i) + end) used_assm_js + in + {outcome = NONE, fact_ids = SOME fact_ids', atp_proof = K []} + end end; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/SMT/smt_solver.ML --- a/src/HOL/Tools/SMT/smt_solver.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/SMT/smt_solver.ML Mon May 25 22:11:43 2015 +0200 @@ -11,7 +11,7 @@ type parsed_proof = {outcome: SMT_Failure.failure option, - fact_ids: (int * ((string * ATP_Problem_Generate.stature) * thm)) list, + fact_ids: (int * ((string * ATP_Problem_Generate.stature) * thm)) list option, atp_proof: unit -> (term, string) ATP_Proof.atp_step list} type solver_config = @@ -140,7 +140,7 @@ type parsed_proof = {outcome: SMT_Failure.failure option, - fact_ids: (int * ((string * ATP_Problem_Generate.stature) * thm)) list, + fact_ids: (int * ((string * ATP_Problem_Generate.stature) * thm)) list option, atp_proof: unit -> (term, string) ATP_Proof.atp_step list} type solver_config = @@ -195,7 +195,7 @@ (Unsat, lines) => (case parse_proof0 of SOME pp => pp outer_ctxt replay_data xfacts prems concl lines - | NONE => {outcome = NONE, fact_ids = [], atp_proof = K []}) + | NONE => {outcome = NONE, fact_ids = NONE, atp_proof = K []}) | (result, _) => raise SMT_Failure.SMT (SMT_Failure.Counterexample (result = Sat))) fun replay outcome replay0 oracle outer_ctxt @@ -270,7 +270,7 @@ in parse_proof ctxt replay_data xfacts (map Thm.prop_of prems) (Thm.term_of concl) output end - handle SMT_Failure.SMT fail => {outcome = SOME fail, fact_ids = [], atp_proof = K []} + handle SMT_Failure.SMT fail => {outcome = SOME fail, fact_ids = NONE, atp_proof = K []} (* SMT tactic *) diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/SMT/smt_systems.ML --- a/src/HOL/Tools/SMT/smt_systems.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/SMT/smt_systems.ML Mon May 25 22:11:43 2015 +0200 @@ -27,10 +27,13 @@ " failed -- enable tracing using the " ^ quote (Config.name_of SMT_Config.trace) ^ " option for details")) +fun is_blank_or_error_line "" = true + | is_blank_or_error_line s = String.isPrefix "(error " s + fun on_first_line test_outcome solver_name lines = let val split_first = (fn [] => ("", []) | l :: ls => (l, ls)) - val (l, ls) = split_first (snd (take_prefix (curry (op =) "") lines)) + val (l, ls) = split_first (snd (take_prefix is_blank_or_error_line lines)) in (test_outcome solver_name l, ls) end fun on_first_non_unsupported_line test_outcome solver_name lines = @@ -59,7 +62,6 @@ end - (* CVC4 *) val cvc4_extensions = Attrib.setup_config_bool @{binding cvc4_extensions} (K false) @@ -68,6 +70,7 @@ fun cvc4_options ctxt = [ "--random-seed=" ^ string_of_int (Config.get ctxt SMT_Config.random_seed), "--lang=smt2", + "--continued-execution", "--tlimit", string_of_int (Real.ceil (1000.0 * Config.get ctxt SMT_Config.timeout))] fun select_class ctxt = diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/SMT/verit_proof_parse.ML --- a/src/HOL/Tools/SMT/verit_proof_parse.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/SMT/verit_proof_parse.ML Mon May 25 22:11:43 2015 +0200 @@ -70,7 +70,7 @@ val fact_helper_ids' = map (apsnd (ATP_Util.short_thm_name ctxt)) helper_ids' @ map (apsnd (fst o fst)) fact_ids' in - {outcome = NONE, fact_ids = fact_ids', + {outcome = NONE, fact_ids = SOME fact_ids', atp_proof = fn () => atp_proof_of_veriT_proof ctxt ll_defs rewrite_rules prems concl fact_helper_ts prem_ids conjecture_id fact_helper_ids' steps} end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/SMT/z3_replay.ML --- a/src/HOL/Tools/SMT/z3_replay.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/SMT/z3_replay.ML Mon May 25 22:11:43 2015 +0200 @@ -206,7 +206,7 @@ val fact_helper_ids' = map (apsnd (ATP_Util.short_thm_name ctxt)) helper_ids' @ map (apsnd (fst o fst)) fact_ids' in - {outcome = NONE, fact_ids = fact_ids', + {outcome = NONE, fact_ids = SOME fact_ids', atp_proof = fn () => Z3_Isar.atp_proof_of_z3_proof ctxt ll_defs rewrite_rules prems concl fact_helper_ts prem_ids conjecture_id fact_helper_ids' steps} end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Sledgehammer/sledgehammer_commands.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_commands.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_commands.ML Mon May 25 22:11:43 2015 +0200 @@ -296,7 +296,7 @@ val default_learn_prover_timeout = 2.0 -fun hammer_away override_params subcommand opt_i fact_override state0 = +fun hammer_away override_params output_result subcommand opt_i fact_override state0 = let (* We generally want chained facts to be picked up by the relevance filter, because it can then give it a proper name, which is useful for a variety of reasons (minimization, Isar proofs, @@ -324,8 +324,9 @@ in if subcommand = runN then let val i = the_default 1 opt_i in - ignore (run_sledgehammer (get_params Normal thy override_params) Normal NONE i fact_override - state) + ignore + (run_sledgehammer + (get_params Normal thy override_params) Normal output_result i fact_override state) end else if subcommand = messagesN then messages opt_i @@ -358,19 +359,9 @@ error ("Unknown subcommand: " ^ quote subcommand ^ ".") end -fun sledgehammer_trans (((subcommand, params), fact_override), opt_i) = - Toplevel.keep (hammer_away params subcommand opt_i fact_override o Toplevel.proof_of) - fun string_of_raw_param (key, values) = key ^ (case implode_param values of "" => "" | value => " = " ^ value) -fun sledgehammer_params_trans params = - Toplevel.theory (fold set_default_raw_param params #> tap (fn thy => - writeln ("Default parameters for Sledgehammer:\n" ^ - (case rev (default_raw_params Normal thy) of - [] => "none" - | params => params |> map string_of_raw_param |> sort_strings |> cat_lines)))) - val parse_query_bang = @{keyword "?"} || @{keyword "!"} || @{keyword "!!"} val parse_key = Scan.repeat1 (Parse.typ_group || parse_query_bang) >> implode_param val parse_value = Scan.repeat1 (Parse.xname || Parse.float_number || parse_query_bang) @@ -388,12 +379,20 @@ val _ = Outer_Syntax.command @{command_keyword sledgehammer} "search for first-order proof using automatic theorem provers" - ((Scan.optional Parse.name runN -- parse_params - -- parse_fact_override -- Scan.option Parse.nat) #>> sledgehammer_trans) + (Scan.optional Parse.name runN -- parse_params + -- parse_fact_override -- Scan.option Parse.nat >> + (fn (((subcommand, params), fact_override), opt_i) => + Toplevel.keep_proof + (hammer_away params NONE subcommand opt_i fact_override o Toplevel.proof_of))) val _ = Outer_Syntax.command @{command_keyword sledgehammer_params} "set and display the default parameters for Sledgehammer" - (parse_params #>> sledgehammer_params_trans) + (parse_params >> (fn params => + Toplevel.theory (fold set_default_raw_param params #> tap (fn thy => + writeln ("Default parameters for Sledgehammer:\n" ^ + (case rev (default_raw_params Normal thy) of + [] => "none" + | params => params |> map string_of_raw_param |> sort_strings |> cat_lines)))))) fun try_sledgehammer auto state = let @@ -410,23 +409,17 @@ Query_Operation.register sledgehammerN (fn {state = st, args, output_result} => (case try Toplevel.proof_of st of SOME state => - let - val thy = Proof.theory_of state - val ctxt = Proof.context_of state - val [provers_arg, isar_proofs_arg] = args - - val override_params = - ((if provers_arg = "" then [] else [("provers", space_explode " " provers_arg)]) @ - [("isar_proofs", [isar_proofs_arg]), - ("blocking", ["true"]), - ("debug", ["false"]), - ("verbose", ["false"]), - ("overlord", ["false"])]) - |> map (normalize_raw_param ctxt) - in - ignore (run_sledgehammer (get_params Normal thy override_params) Normal - (SOME output_result) 1 no_fact_override state) - end + let + val [provers_arg, isar_proofs_arg, try0_arg] = args + val override_params = + ((if provers_arg = "" then [] else [("provers", space_explode " " provers_arg)]) @ + [("isar_proofs", [isar_proofs_arg]), + ("try0", [try0_arg]), + ("blocking", ["true"]), + ("debug", ["false"]), + ("verbose", ["false"]), + ("overlord", ["false"])]); + in hammer_away override_params (SOME output_result) runN NONE no_fact_override state end | NONE => error "Unknown proof context")) end; diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact.ML Mon May 25 22:11:43 2015 +0200 @@ -55,6 +55,8 @@ del : (Facts.ref * Token.src list) list, only : bool} +val local_thisN = Long_Name.localN ^ Long_Name.separator ^ Auto_Bind.thisN + (* gracefully handle huge background theories *) val max_facts_for_duplicates = 50000 val max_facts_for_complex_check = 25000 @@ -499,7 +501,7 @@ else let fun get_name () = - if name0 = "" then + if name0 = "" orelse name0 = local_thisN then backquote_thm ctxt th else let val short_name = Facts.extern ctxt facts name0 in diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Sledgehammer/sledgehammer_proof_methods.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_proof_methods.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_proof_methods.ML Mon May 25 22:11:43 2015 +0200 @@ -34,6 +34,7 @@ ((string * stature) list * (proof_method * play_outcome)) * string * int * int val is_proof_method_direct : proof_method -> bool + val proof_method_distinguishes_chained_and_direct : proof_method -> bool val string_of_proof_method : Proof.context -> string list -> proof_method -> string val tac_of_proof_method : Proof.context -> thm list * thm list -> proof_method -> int -> tactic val thms_influence_proof_method : Proof.context -> proof_method -> thm list -> bool @@ -80,6 +81,10 @@ | is_proof_method_direct Simp_Size_Method = true | is_proof_method_direct _ = false +fun proof_method_distinguishes_chained_and_direct Simp_Method = true + | proof_method_distinguishes_chained_and_direct Simp_Size_Method = true + | proof_method_distinguishes_chained_and_direct _ = false + fun is_proof_method_multi_goal Auto_Method = true | is_proof_method_multi_goal _ = false @@ -163,8 +168,14 @@ | apply_on_subgoal i n = "prefer " ^ string_of_int i ^ " " ^ apply_on_subgoal 1 n (* FIXME *) -fun proof_method_command ctxt meth i n _(*used_chaineds*) _(*num_chained*) ss = - let val (indirect_ss, direct_ss) = if is_proof_method_direct meth then ([], ss) else (ss, []) in +fun proof_method_command ctxt meth i n used_chaineds _(*num_chained*) extras = + let + val (indirect_ss, direct_ss) = + if is_proof_method_direct meth then + ([], extras |> proof_method_distinguishes_chained_and_direct meth ? append used_chaineds) + else + (extras, []) + in (if null indirect_ss then "" else "using " ^ space_implode " " indirect_ss ^ " ") ^ apply_on_subgoal i n ^ string_of_proof_method ctxt direct_ss meth ^ (if is_proof_method_multi_goal meth andalso n <> 1 then "[1]" else "") diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Sledgehammer/sledgehammer_prover_smt.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_prover_smt.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_prover_smt.ML Mon May 25 22:11:43 2015 +0200 @@ -126,7 +126,7 @@ reraise exn else {outcome = SOME (SMT_Failure.Other_Failure (Runtime.exn_message exn)), - fact_ids = [], atp_proof = K []} + fact_ids = NONE, atp_proof = K []} val death = Timer.checkRealTimer timer val outcome0 = if is_none outcome0 then SOME outcome else outcome0 @@ -189,8 +189,10 @@ val {outcome, filter_result = {fact_ids, atp_proof, ...}, used_from, run_time} = smt_filter_loop name params state goal subgoal factss - val used_named_facts = map snd fact_ids - val used_facts = sort_wrt fst (map fst used_named_facts) + val used_facts = + (case fact_ids of + NONE => map fst used_from + | SOME ids => sort_wrt fst (map (fst o snd) ids)) val outcome = Option.map failure_of_smt_failure outcome val (preferred_methss, message) = diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Transfer/transfer.ML --- a/src/HOL/Tools/Transfer/transfer.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Transfer/transfer.ML Mon May 25 22:11:43 2015 +0200 @@ -8,10 +8,16 @@ signature TRANSFER = sig type pred_data + val mk_pred_data: thm -> thm -> thm list -> pred_data val rel_eq_onp: pred_data -> thm + val rel_eq_onp_with_tops: pred_data -> thm + val pred_def: pred_data -> thm + val pred_simps: pred_data -> thm list + val update_pred_simps: thm list -> pred_data -> pred_data val bottom_rewr_conv: thm list -> conv val top_rewr_conv: thm list -> conv + val top_sweep_rewr_conv: thm list -> conv val prep_conv: conv val get_transfer_raw: Proof.context -> thm list @@ -46,15 +52,33 @@ structure Transfer : TRANSFER = struct +fun bottom_rewr_conv rewrs = Conv.bottom_conv (K (Conv.try_conv (Conv.rewrs_conv rewrs))) @{context} +fun top_rewr_conv rewrs = Conv.top_conv (K (Conv.try_conv (Conv.rewrs_conv rewrs))) @{context} +fun top_sweep_rewr_conv rewrs = Conv.top_sweep_conv (K (Conv.rewrs_conv rewrs)) @{context} + (** Theory Data **) val compound_xhs_empty_net = Item_Net.init (Thm.eq_thm_prop o apply2 snd) (single o fst); val rewr_rules = Item_Net.init Thm.eq_thm_prop (single o fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.concl_of); -type pred_data = {rel_eq_onp: thm} +datatype pred_data = PRED_DATA of {pred_def:thm, rel_eq_onp: thm, pred_simps: thm list} + +fun mk_pred_data pred_def rel_eq_onp pred_simps = PRED_DATA {pred_def = pred_def, + rel_eq_onp = rel_eq_onp, pred_simps = pred_simps} + +fun map_pred_data' f1 f2 f3 (PRED_DATA {pred_def, rel_eq_onp, pred_simps}) = + PRED_DATA {pred_def = f1 pred_def, rel_eq_onp = f2 rel_eq_onp, pred_simps = f3 pred_simps} -val rel_eq_onp: pred_data -> thm = #rel_eq_onp +fun rep_pred_data (PRED_DATA p) = p +val rel_eq_onp = #rel_eq_onp o rep_pred_data +val rel_eq_onp_with_tops = (Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.arg1_conv + (top_sweep_rewr_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]})))) + o #rel_eq_onp o rep_pred_data +val pred_def = #pred_def o rep_pred_data +val pred_simps = #pred_simps o rep_pred_data +fun update_pred_simps new_pred_data = map_pred_data' I I (K new_pred_data) + structure Data = Generic_Data ( @@ -182,9 +206,6 @@ (** Conversions **) -fun bottom_rewr_conv rewrs = Conv.bottom_conv (K (Conv.try_conv (Conv.rewrs_conv rewrs))) @{context} -fun top_rewr_conv rewrs = Conv.top_conv (K (Conv.try_conv (Conv.rewrs_conv rewrs))) @{context} - fun transfer_rel_conv conv = Conv.concl_conv ~1 (HOLogic.Trueprop_conv (Conv.fun2_conv (Conv.arg_conv conv))) @@ -788,7 +809,12 @@ val untransferred_attribute_parser = Attrib.thms >> untransferred_attribute -fun morph_pred_data phi {rel_eq_onp} = {rel_eq_onp = Morphism.thm phi rel_eq_onp} +fun morph_pred_data phi = + let + val morph_thm = Morphism.thm phi + in + map_pred_data' morph_thm morph_thm (map morph_thm) + end fun lookup_pred_data ctxt type_name = Symtab.lookup (get_pred_data ctxt) type_name |> Option.map (morph_pred_data (Morphism.transfer_morphism (Proof_Context.theory_of ctxt))) diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/Transfer/transfer_bnf.ML --- a/src/HOL/Tools/Transfer/transfer_bnf.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/Transfer/transfer_bnf.ML Mon May 25 22:11:43 2015 +0200 @@ -102,7 +102,7 @@ val (((As, Bs), Ds), ctxt) = ctxt |> mk_TFrees live ||>> mk_TFrees live - ||>> mk_TFrees (dead_of_bnf bnf) + ||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf)) val relator = mk_rel_of_bnf Ds As Bs bnf val relsT = map2 mk_pred2T As Bs @@ -181,7 +181,7 @@ val Tname = base_name_of_bnf bnf val ((As, Ds), lthy) = lthy |> mk_TFrees live - ||>> mk_TFrees (dead_of_bnf bnf) + ||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf)) val T = mk_T_of_bnf Ds As bnf val sets = mk_sets_of_bnf (replicate live Ds) (replicate live As) bnf val argTs = map mk_pred1T As @@ -232,7 +232,7 @@ val (((As, Bs), Ds), ctxt) = ctxt |> mk_TFrees live ||>> mk_TFrees live - ||>> mk_TFrees (dead_of_bnf bnf) + ||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf)) val relator = mk_rel_of_bnf Ds As Bs bnf val relsT = map2 mk_pred2T As Bs @@ -259,7 +259,7 @@ val old_ctxt = ctxt val ((As, Ds), ctxt) = ctxt |> mk_TFrees live - ||>> mk_TFrees (dead_of_bnf bnf) + ||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf)) val T = mk_T_of_bnf Ds As bnf val argTs = map mk_pred1T As val (args, ctxt) = mk_Frees "P" argTs ctxt @@ -283,10 +283,7 @@ fun qualify defname suffix = Binding.qualified true suffix defname val Domainp_rel_thm_name = qualify (base_name_of_bnf bnf) "Domainp_rel" val rel_eq_onp_thm_name = qualify (base_name_of_bnf bnf) "rel_eq_onp" - val rel_eq_onp_internal = Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.arg1_conv - (Raw_Simplifier.rewrite lthy false @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]}))) - rel_eq_onp - val pred_data = {rel_eq_onp = rel_eq_onp_internal} + val pred_data = Transfer.mk_pred_data pred_def rel_eq_onp [] val type_name = type_name_of_bnf bnf val relator_domain_attr = @{attributes [relator_domain]} val notes = [((Domainp_rel_thm_name, []), [([Domainp_rel], relator_domain_attr)]), @@ -334,17 +331,15 @@ map_filter (uncurry (fn true => SOME | false => K NONE)) (liveness ~~ As) end -fun sorts_of_fp fp_sugar = map (snd o Ctr_Sugar_Util.dest_TFree_or_TVar) (lives_of_fp fp_sugar) - fun prove_pred_inject lthy (fp_sugar:fp_sugar) = let val involved_types = distinct op= ( map type_name_of_bnf (#fp_nesting_bnfs fp_sugar) @ map type_name_of_bnf (#live_nesting_bnfs fp_sugar) @ map type_name_of_bnf (#bnfs (#fp_res fp_sugar))) - val eq_onps = map (Transfer.rel_eq_onp o lookup_defined_pred_data lthy) involved_types + val eq_onps = map (Transfer.rel_eq_onp_with_tops o lookup_defined_pred_data lthy) involved_types val old_lthy = lthy - val (As, lthy) = mk_TFrees' (sorts_of_fp fp_sugar) lthy + val (As, lthy) = mk_TFrees' (map Type.sort_of_atyp (lives_of_fp fp_sugar)) lthy val predTs = map mk_pred1T As val (preds, lthy) = mk_Frees "P" predTs lthy val args = map mk_eq_onp preds @@ -403,18 +398,26 @@ fun qualify defname suffix = Binding.qualified true suffix defname val pred_inject_thm_name = qualify (base_name_of_bnf (bnf_of_fp_sugar fp_sugar)) "pred_inject" val simp_attrs = @{attributes [simp]} + val type_name = type_name_of_bnf (#fp_bnf fp_sugar) + val pred_data = lookup_defined_pred_data lthy type_name + |> Transfer.update_pred_simps pred_injects in - [((pred_inject_thm_name, []), [(pred_injects, simp_attrs)])] + lthy + |> Local_Theory.note ((pred_inject_thm_name, simp_attrs), pred_injects) + |> snd + |> Local_Theory.declaration {syntax = false, pervasive = true} + (fn phi => Transfer.update_pred_data type_name (Transfer.morph_pred_data phi pred_data)) + |> Local_Theory.restore end fun transfer_fp_sugars_interpretation fp_sugar lthy = let - val pred_injects_notes = pred_injects fp_sugar lthy + val lthy = pred_injects fp_sugar lthy val transfer_rules_notes = fp_sugar_transfer_rules fp_sugar in lthy - |> Local_Theory.notes (pred_injects_notes @ transfer_rules_notes) + |> Local_Theory.notes transfer_rules_notes |> snd end diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Tools/try0.ML --- a/src/HOL/Tools/try0.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Tools/try0.ML Mon May 25 22:11:43 2015 +0200 @@ -106,12 +106,14 @@ fun silence_methods debug = Config.put Metis_Tactic.verbose debug #> Config.put Lin_Arith.verbose debug - #> (not debug ? - (Context_Position.set_visible false - #> Proof_Context.background_theory (fn thy => - thy - |> Context_Position.set_visible_global false - |> Config.put_global Unify.trace_bound (Config.get_global thy Unify.search_bound)))); + #> not debug ? (fn ctxt => + ctxt + |> Context_Position.set_visible false + |> Config.put Unify.trace_bound (Config.get ctxt Unify.search_bound) + |> Proof_Context.background_theory (fn thy => + thy + |> Context_Position.set_visible_global false + |> Config.put_global Unify.trace_bound (Config.get_global thy Unify.search_bound))); fun generic_try0 mode timeout_opt quad st = let @@ -154,7 +156,8 @@ fun try0 timeout_opt = fst oo generic_try0 Normal timeout_opt; fun try0_trans quad = - Toplevel.keep (K () o generic_try0 Normal (SOME default_timeout) quad o Toplevel.proof_of); + Toplevel.keep_proof + (ignore o generic_try0 Normal (SOME default_timeout) quad o Toplevel.proof_of); fun merge_attrs (s1, i1, e1, d1) (s2, i2, e2, d2) = (s1 @ s2, i1 @ i2, e1 @ e2, d1 @ d2); diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Transcendental.thy --- a/src/HOL/Transcendental.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Transcendental.thy Mon May 25 22:11:43 2015 +0200 @@ -1132,8 +1132,7 @@ unfolding exp_def by (rule summable_exp_generic [THEN summable_sums]) lemma exp_fdiffs: - fixes XXX :: "'a::{real_normed_field,banach}" - shows "diffs (\n. inverse (fact n)) = (\n. inverse (fact n :: 'a))" + "diffs (\n. inverse (fact n)) = (\n. inverse (fact n :: 'a::{real_normed_field,banach}))" by (simp add: diffs_def mult_ac nonzero_inverse_mult_distrib nonzero_of_real_inverse del: mult_Suc of_nat_Suc) @@ -3897,8 +3896,7 @@ where "tan = (\x. sin x / cos x)" lemma tan_of_real: - fixes XXX :: "'a::{real_normed_field,banach}" - shows "of_real(tan x) = (tan(of_real x) :: 'a)" + "of_real (tan x) = (tan (of_real x) :: 'a::{real_normed_field,banach})" by (simp add: tan_def sin_of_real cos_of_real) lemma tan_in_Reals [simp]: diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/Transfer.thy --- a/src/HOL/Transfer.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/Transfer.thy Mon May 25 22:11:43 2015 +0200 @@ -269,6 +269,8 @@ lemma Domainp_refl[transfer_domain_rule]: "Domainp T = Domainp T" .. +lemma Domain_eq_top: "Domainp op= = top" by auto + lemma Domainp_prod_fun_eq[relator_domain]: "Domainp (op= ===> T) = (\f. \x. (Domainp T) (f x))" by (auto intro: choice simp: Domainp_iff rel_fun_def fun_eq_iff) diff -r cc71f01f9fde -r ff82ba1893c8 src/HOL/ex/Cartouche_Examples.thy --- a/src/HOL/ex/Cartouche_Examples.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/HOL/ex/Cartouche_Examples.thy Mon May 25 22:11:43 2015 +0200 @@ -42,7 +42,7 @@ ML \ Outer_Syntax.command @{command_keyword cartouche} "" (Parse.cartouche >> (fn s => - Toplevel.imperative (fn () => writeln s))) + Toplevel.keep (fn _ => writeln s))) \ cartouche \abc\ diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Concurrent/future.scala --- a/src/Pure/Concurrent/future.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Concurrent/future.scala Mon May 25 22:11:43 2015 +0200 @@ -26,7 +26,7 @@ new Pending_Future(Scala_Future[A](body)(execution_context)) def promise[A]: Promise[A] = - new Promise_Future[A](Scala_Promise[A]) + new Promise_Future[A](Scala_Promise[A]()) } trait Future[A] @@ -90,4 +90,3 @@ } def fulfill(x: A): Unit = promise.success(x) } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/General/completion.scala --- a/src/Pure/General/completion.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/General/completion.scala Mon May 25 22:11:43 2015 +0200 @@ -432,7 +432,7 @@ if ok completion <- words_map.get_list(complete_word) } yield (complete_word, completion) - ((full_word, completions)) + (full_word, completions) }) } diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/General/graph.scala --- a/src/Pure/General/graph.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/General/graph.scala Mon May 25 22:11:43 2015 +0200 @@ -39,17 +39,17 @@ /* XML data representation */ def encode[Key, A](key: XML.Encode.T[Key], info: XML.Encode.T[A]): XML.Encode.T[Graph[Key, A]] = - ((graph: Graph[Key, A]) => { + (graph: Graph[Key, A]) => { import XML.Encode._ list(pair(pair(key, info), list(key)))(graph.dest) - }) + } def decode[Key, A](key: XML.Decode.T[Key], info: XML.Decode.T[A])( implicit ord: Ordering[Key]): XML.Decode.T[Graph[Key, A]] = - ((body: XML.Body) => { + (body: XML.Body) => { import XML.Decode._ make(list(pair(pair(key, info), list(key)))(body))(ord) - }) + } } @@ -209,7 +209,7 @@ xs0 match { case Nil => xs1 case x :: xs => - if (!(x_set(x)) || x == z || path.contains(x) || + if (!x_set(x) || x == z || path.contains(x) || xs.exists(red(x)) || xs1.exists(red(x))) irreds(xs, xs1) else irreds(xs, x :: xs1) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/General/name_space.ML --- a/src/Pure/General/name_space.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/General/name_space.ML Mon May 25 22:11:43 2015 +0200 @@ -179,7 +179,8 @@ fun entry_ord space = int_ord o apply2 (#serial o the_entry space); -fun is_concealed space name = #concealed (the_entry space name); +fun is_concealed space name = + #concealed (the_entry space name) handle ERROR _ => false; (* intern *) @@ -449,8 +450,7 @@ val (accs, accs') = accesses naming binding; val internals' = internals |> fold (add_name name) accs; val entries' = entries - |> Change_Table.map_entry name (fn (externals, entry) => - (Library.merge (op =) (externals, accs'), entry)) + |> Change_Table.map_entry name (apfst (fold_rev (update op =) accs')); in (kind, internals', entries') end); diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/General/path.scala --- a/src/Pure/General/path.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/General/path.scala Mon May 25 22:11:43 2015 +0200 @@ -18,9 +18,9 @@ /* path elements */ sealed abstract class Elem - private case class Root(val name: String) extends Elem - private case class Basic(val name: String) extends Elem - private case class Variable(val name: String) extends Elem + private case class Root(name: String) extends Elem + private case class Basic(name: String) extends Elem + private case class Variable(name: String) extends Elem private case object Parent extends Elem private def err_elem(msg: String, s: String): Nothing = @@ -30,7 +30,7 @@ if (s == "" || s == "~" || s == "~~") err_elem("Illegal", s) else { "/\\$:\"'".iterator.foreach(c => - if (s.iterator.exists(_ == c)) + if (s.iterator.contains(c)) err_elem("Illegal character " + quote(c.toString) + " in", s)) s } diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/General/scan.scala --- a/src/Pure/General/scan.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/General/scan.scala Mon May 25 22:11:43 2015 +0200 @@ -91,7 +91,7 @@ private def quoted_body(quote: Symbol.Symbol): Parser[String] = { rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" | - (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString) + ("""\\\d\d\d""".r ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString) } def quoted(quote: Symbol.Symbol): Parser[String] = @@ -307,7 +307,7 @@ { /* representation */ - private sealed case class Tree(val branches: Map[Char, (String, Tree)]) + private sealed case class Tree(branches: Map[Char, (String, Tree)]) private val empty_tree = Tree(Map()) val empty: Lexicon = new Lexicon(empty_tree) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/General/symbol.scala --- a/src/Pure/General/symbol.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/General/symbol.scala Mon May 25 22:11:43 2015 +0200 @@ -318,7 +318,7 @@ val names: Map[Symbol, String] = { val name = new Regex("""\\<\^?([A-Za-z][A-Za-z0-9_']*)>""") - Map((for ((sym @ name(a), _) <- symbols) yield (sym -> a)): _*) + Map((for ((sym @ name(a), _) <- symbols) yield sym -> a): _*) } val groups: List[(String, List[Symbol])] = @@ -334,7 +334,7 @@ for { (sym, props) <- symbols ("abbrev", a) <- props.reverse - } yield (sym -> a)): _*) + } yield sym -> a): _*) /* recoding */ @@ -381,7 +381,7 @@ private val Font = new Properties.String("font") val fonts: Map[Symbol, String] = - recode_map((for ((sym, Font(font)) <- symbols) yield (sym -> font)): _*) + recode_map((for ((sym, Font(font)) <- symbols) yield sym -> font): _*) val font_names: List[String] = Set(fonts.toList.map(_._2): _*).toList val font_index: Map[String, Int] = Map((font_names zip (0 until font_names.length).toList): _*) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/attrib.ML --- a/src/Pure/Isar/attrib.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/attrib.ML Mon May 25 22:11:43 2015 +0200 @@ -18,6 +18,7 @@ val check_name: Proof.context -> xstring * Position.T -> string val check_src: Proof.context -> Token.src -> Token.src val pretty_attribs: Proof.context -> Token.src list -> Pretty.T list + val pretty_binding: Proof.context -> binding -> string -> Pretty.T list val attribute: Proof.context -> Token.src -> attribute val attribute_global: theory -> Token.src -> attribute val attribute_cmd: Proof.context -> Token.src -> attribute @@ -158,6 +159,15 @@ fun pretty_attribs _ [] = [] | pretty_attribs ctxt srcs = [Pretty.enum "," "[" "]" (map (Token.pretty_src ctxt) srcs)]; +fun pretty_binding ctxt (b, atts) sep = + (case (Binding.is_empty b, null atts) of + (true, true) => [] + | (false, true) => [Pretty.block [Binding.pretty b, Pretty.str sep]] + | (true, false) => [Pretty.block (pretty_attribs ctxt atts @ [Pretty.str sep])] + | (false, false) => + [Pretty.block + (Binding.pretty b :: Pretty.brk 1 :: pretty_attribs ctxt atts @ [Pretty.str sep])]); + (* get attributes *) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/class.ML --- a/src/Pure/Isar/class.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/class.ML Mon May 25 22:11:43 2015 +0200 @@ -588,7 +588,10 @@ Pretty.block (Pretty.breaks [Pretty.str v, Pretty.str "==", Proof_Context.pretty_const lthy c, Pretty.str "::", Syntax.pretty_typ lthy ty]); - in Pretty.keyword1 "instantiation" :: map pr_arity tycos @ map pr_param params end; + in + [Pretty.block + (Pretty.fbreaks (Pretty.keyword1 "instantiation" :: map pr_arity tycos @ map pr_param params))] + end; fun conclude lthy = let @@ -729,22 +732,27 @@ "apply some intro/elim rule"); + (** diagnostics **) -fun pretty_specification thy c = - if is_class thy c then +fun pretty_specification thy class = + if is_class thy class then let - val class_ctxt = init c thy; - val class_space = Proof_Context.class_space class_ctxt; + val class_ctxt = init class thy; + val prt_class = Name_Space.pretty class_ctxt (Proof_Context.class_space class_ctxt); + + val super_classes = Sign.minimize_sort thy (Sign.super_classes thy class); val fix_args = - #params (Axclass.get_info thy c) + #params (Axclass.get_info thy class) |> map (fn (c, T) => (Binding.name (Long_Name.base_name c), SOME T, NoSyn)); val fixes = if null fix_args then [] else [Element.Fixes fix_args]; - val assumes = Locale.hyp_spec_of thy c; + val assumes = Locale.hyp_spec_of thy class; val header = - [Pretty.keyword1 "class", Pretty.brk 1, Name_Space.pretty class_ctxt class_space c]; + [Pretty.keyword1 "class", Pretty.brk 1, prt_class class, Pretty.str " =", Pretty.brk 1] @ + Pretty.separate " +" (map prt_class super_classes) @ + (if null super_classes then [] else [Pretty.str " +"]); val body = if null fixes andalso null assumes then [] else @@ -754,4 +762,3 @@ else []; end; - diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/element.ML --- a/src/Pure/Isar/element.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/element.ML Mon May 25 22:11:43 2015 +0200 @@ -112,12 +112,6 @@ Pretty.block [Pretty.keyword2 keyword, Pretty.brk 1, x] :: map (fn y => Pretty.block [Pretty.str " ", Pretty.keyword2 sep, Pretty.brk 1, y]) ys; -fun pretty_name_atts ctxt (b, atts) sep = - if Attrib.is_empty_binding (b, atts) then [] - else - [Pretty.block (Pretty.breaks - (Binding.pretty b :: Attrib.pretty_attribs ctxt atts @ [Pretty.str sep]))]; - (* pretty_stmt *) @@ -126,10 +120,10 @@ val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt; val prt_term = Pretty.quote o Syntax.pretty_term ctxt; val prt_terms = separate (Pretty.keyword2 "and") o map prt_term; - val prt_name_atts = pretty_name_atts ctxt; + val prt_binding = Attrib.pretty_binding ctxt; fun prt_show (a, ts) = - Pretty.block (Pretty.breaks (prt_name_atts a ":" @ prt_terms (map fst ts))); + Pretty.block (Pretty.breaks (prt_binding a ":" @ prt_terms (map fst ts))); fun prt_var (x, SOME T) = Pretty.block [Pretty.str (Binding.name_of x ^ " ::"), Pretty.brk 1, prt_typ T] @@ -153,10 +147,8 @@ val prt_term = Pretty.quote o Syntax.pretty_term ctxt; val prt_thm = Pretty.backquote o Display.pretty_thm ctxt; - fun prt_name_atts (b, atts) sep = - if not show_attribs orelse null atts then - [Pretty.block [Binding.pretty b, Pretty.str sep]] - else pretty_name_atts ctxt (b, atts) sep; + fun prt_binding (b, atts) = + Attrib.pretty_binding ctxt (b, if show_attribs then atts else []); fun prt_fact (ths, atts) = if not show_attribs orelse null atts then map prt_thm ths @@ -174,12 +166,12 @@ fun prt_constrain (x, T) = prt_fix (Binding.name x, SOME T, NoSyn); fun prt_asm (a, ts) = - Pretty.block (Pretty.breaks (prt_name_atts a ":" @ map (prt_term o fst) ts)); + Pretty.block (Pretty.breaks (prt_binding a ":" @ map (prt_term o fst) ts)); fun prt_def (a, (t, _)) = - Pretty.block (Pretty.breaks (prt_name_atts a ":" @ [prt_term t])); + Pretty.block (Pretty.breaks (prt_binding a ":" @ [prt_term t])); fun prt_note (a, ths) = - Pretty.block (Pretty.breaks (flat (prt_name_atts a "=" :: map prt_fact ths))); + Pretty.block (Pretty.breaks (flat (prt_binding a " =" :: map prt_fact ths))); in fn Fixes fixes => pretty_items "fixes" "and" (map prt_fix fixes) | Constrains xs => pretty_items "constrains" "and" (map prt_constrain xs) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/generic_target.ML --- a/src/Pure/Isar/generic_target.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/generic_target.ML Mon May 25 22:11:43 2015 +0200 @@ -7,17 +7,17 @@ signature GENERIC_TARGET = sig - (* consts *) + (*consts*) val standard_const: (int * int -> bool) -> Syntax.mode -> (binding * mixfix) * term -> local_theory -> local_theory - (* background operations *) + (*background operations*) val background_foundation: ((binding * typ) * mixfix) * (binding * term) -> term list * term list -> local_theory -> (term * thm) * local_theory val background_declaration: declaration -> local_theory -> local_theory val background_abbrev: binding * term -> term list -> local_theory -> (term * term) * local_theory - (* lifting primitives to local theory operations *) + (*lifting primitives to local theory operations*) val define: (((binding * typ) * mixfix) * (binding * term) -> term list * term list -> local_theory -> (term * thm) * local_theory) -> bool -> (binding * mixfix) * (Attrib.binding * term) -> local_theory -> @@ -31,7 +31,7 @@ term list * term list -> local_theory -> local_theory) -> string * bool -> (binding * mixfix) * term -> local_theory -> (term * term) * local_theory - (* theory operations *) + (*theory operations*) val theory_foundation: ((binding * typ) * mixfix) * (binding * term) -> term list * term list -> local_theory -> (term * thm) * local_theory val theory_notes: string -> @@ -44,7 +44,7 @@ val theory_registration: string * morphism -> (morphism * bool) option -> morphism -> local_theory -> local_theory - (* locale operations *) + (*locale operations*) val locale_notes: string -> string -> (Attrib.binding * (thm list * Token.src list) list) list -> (Attrib.binding * (thm list * Token.src list) list) list -> @@ -75,6 +75,7 @@ else ctxt) lthy; + (** declarations **) fun standard_declaration pred decl lthy = @@ -84,6 +85,7 @@ else ctxt) lthy; + (** consts **) fun check_mixfix ctxt (b, extra_tfrees) mx = @@ -123,7 +125,7 @@ | _ => NONE) else NONE; in - case const_alias of + (case const_alias of SOME c => context |> Context.mapping (Sign.const_alias b' c) (Proof_Context.const_alias b' c) @@ -133,7 +135,7 @@ |> Proof_Context.generic_add_abbrev Print_Mode.internal (b', Term.close_schematic_term rhs') |-> (fn (const as Const (c, _), _) => same_shape ? (Proof_Context.generic_revert_abbrev (#1 prmode) c #> - Morphism.form (Proof_Context.generic_notation true prmode [(const, mx)]))) + Morphism.form (Proof_Context.generic_notation true prmode [(const, mx)])))) end else context; @@ -141,6 +143,7 @@ standard_declaration pred (const_decl (K true) prmode ((b, mx), rhs)); + (** background primitives **) fun background_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) lthy = @@ -170,6 +173,7 @@ #>> apply2 (fn t => Term.list_comb (Logic.unvarify_global t, params)) + (** lifting primitive to local theory operations **) (* define *) @@ -300,6 +304,7 @@ end; + (** theory operations **) fun theory_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) = @@ -326,6 +331,7 @@ Local_Theory.raw_theory o Context.theory_map ooo Locale.add_registration; + (** locale operations **) fun locale_notes locale kind global_facts local_facts = diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/isar_syn.ML --- a/src/Pure/Isar/isar_syn.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/isar_syn.ML Mon May 25 22:11:43 2015 +0200 @@ -794,7 +794,7 @@ Locale.pretty_locale_deps thy |> map (fn {name, parents, body} => ((name, Graph_Display.content_node (Locale.extern thy name) [body]), parents)) - |> Graph_Display.display_graph)))); + |> Graph_Display.display_graph_old)))); val _ = Outer_Syntax.command @{command_keyword print_term_bindings} @@ -854,13 +854,13 @@ val _ = Outer_Syntax.command @{command_keyword welcome} "print welcome message" - (Scan.succeed (Toplevel.imperative (writeln o Session.welcome))); + (Scan.succeed (Toplevel.keep (fn _ => writeln (Session.welcome ())))); val _ = Outer_Syntax.command @{command_keyword display_drafts} "display raw source files with symbols" (Scan.repeat1 Parse.path >> (fn names => - Toplevel.imperative (fn () => ignore (Present.display_drafts (map Path.explode names))))); + Toplevel.keep (fn _ => ignore (Present.display_drafts (map Path.explode names))))); diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/local_defs.ML --- a/src/Pure/Isar/local_defs.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/local_defs.ML Mon May 25 22:11:43 2015 +0200 @@ -138,17 +138,19 @@ fn th => let val th' = exp th; - val defs_asms = asms |> map (Thm.assume #> (fn asm => - (case try (head_of_def o Thm.prop_of) asm of - NONE => (asm, false) - | SOME x => - let val t = Free x in - (case try exp_term t of - NONE => (asm, false) - | SOME u => - if t aconv u then (asm, false) - else (Drule.abs_def (Drule.gen_all (Variable.maxidx_of outer) asm), true)) - end))); + val defs_asms = asms + |> filter_out (Drule.is_sort_constraint o Thm.term_of) + |> map (Thm.assume #> (fn asm => + (case try (head_of_def o Thm.prop_of) asm of + NONE => (asm, false) + | SOME x => + let val t = Free x in + (case try exp_term t of + NONE => (asm, false) + | SOME u => + if t aconv u then (asm, false) + else (Drule.abs_def (Drule.gen_all (Variable.maxidx_of outer) asm), true)) + end))); in (apply2 (map #1) (List.partition #2 defs_asms), th') end end; diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/method.ML --- a/src/Pure/Isar/method.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/method.ML Mon May 25 22:11:43 2015 +0200 @@ -511,11 +511,11 @@ local fun sect (modifier : modifier parser) = Scan.depend (fn context => - Scan.ahead Parse.not_eof -- modifier -- Scan.repeat (Scan.unless modifier Parse.xthm) >> - (fn ((tok, {init, attribute, pos}), xthms) => + Scan.ahead Parse.not_eof -- Scan.trace modifier -- Scan.repeat (Scan.unless modifier Parse.xthm) + >> (fn ((tok0, ({init, attribute, pos}, modifier_toks)), xthms) => let val decl = - (case Token.get_value tok of + (case Token.get_value tok0 of SOME (Token.Declaration decl) => decl | _ => let @@ -530,14 +530,18 @@ val facts = Attrib.partial_evaluation ctxt [((Binding.name "dummy", []), thms)] |> map (fn (_, bs) => ((Binding.empty, [Attrib.internal (K attribute)]), bs)); - val _ = - Context_Position.report ctxt (Token.pos_of tok) - (Markup.entity Markup.method_modifierN "" - |> Markup.properties (Position.def_properties_of pos)); + fun decl phi = Context.mapping I init #> Attrib.generic_notes "" (Attrib.transform_facts phi facts) #> snd; - val _ = Token.assign (SOME (Token.Declaration decl)) tok; + + val modifier_report = + (Position.set_range (Token.range_of modifier_toks), + Markup.properties (Position.def_properties_of pos) + (Markup.entity Markup.method_modifierN "")); + val _ = + Context_Position.reports ctxt (modifier_report :: Token.reports_of_value tok0); + val _ = Token.assign (SOME (Token.Declaration decl)) tok0; in decl end); in (Morphism.form decl context, decl) end)); diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/named_target.ML --- a/src/Pure/Isar/named_target.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/named_target.ML Mon May 25 22:11:43 2015 +0200 @@ -116,28 +116,28 @@ (* pretty *) fun pretty (target, is_class) ctxt = - let - val target_name = - [Pretty.keyword1 (if is_class then "class" else "locale"), Pretty.brk 1, - Locale.pretty_name ctxt target]; - val fixes = - map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) - (#1 (Proof_Context.inferred_fixes ctxt)); - val assumes = - map (fn A => (Attrib.empty_binding, [(Thm.term_of A, [])])) - (Assumption.all_assms_of ctxt); - val elems = - (if null fixes then [] else [Element.Fixes fixes]) @ - (if null assumes then [] else [Element.Assumes assumes]); - val body_elems = - if target = "" then [] - else if null elems then [Pretty.block target_name] + if target = "" then + [Pretty.block [Pretty.keyword1 "theory", Pretty.brk 1, + Pretty.str (Context.theory_name (Proof_Context.theory_of ctxt))]] + else if is_class then Class.pretty_specification (Proof_Context.theory_of ctxt) target + else + (* FIXME pretty locale content *) + let + val target_name = [Pretty.keyword1 "locale", Pretty.brk 1, Locale.pretty_name ctxt target]; + val fixes = + map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) + (#1 (Proof_Context.inferred_fixes ctxt)); + val assumes = + map (fn A => (Attrib.empty_binding, [(Thm.term_of A, [])])) + (Assumption.all_assms_of ctxt); + val elems = + (if null fixes then [] else [Element.Fixes fixes]) @ + (if null assumes then [] else [Element.Assumes assumes]); + in + if null elems then [Pretty.block target_name] else [Pretty.block (Pretty.fbreaks (Pretty.block (target_name @ [Pretty.str " ="]) :: - map (Pretty.chunks o Element.pretty_ctxt ctxt) elems))]; - in - Pretty.block [Pretty.keyword1 "theory", Pretty.brk 1, - Pretty.str (Context.theory_name (Proof_Context.theory_of ctxt))] :: body_elems - end; + map (Pretty.chunks o Element.pretty_ctxt ctxt) elems))] + end; (* init *) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/outer_syntax.scala Mon May 25 22:11:43 2015 +0200 @@ -33,7 +33,7 @@ result += '\\' if (c < 10) result += '0' if (c < 100) result += '0' - result ++= (c.asInstanceOf[Int].toString) + result ++= c.asInstanceOf[Int].toString } else result += c } diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/overloading.ML --- a/src/Pure/Isar/overloading.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/overloading.ML Mon May 25 22:11:43 2015 +0200 @@ -174,7 +174,10 @@ Pretty.block (Pretty.breaks [Pretty.str v, Pretty.str "==", Proof_Context.pretty_const lthy c, Pretty.str "::", Syntax.pretty_typ lthy ty]); - in Pretty.keyword1 "overloading" :: map pr_operation overloading end; + in + [Pretty.block + (Pretty.fbreaks (Pretty.keyword1 "overloading" :: map pr_operation overloading))] + end; fun conclude lthy = let diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/proof_context.ML --- a/src/Pure/Isar/proof_context.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/proof_context.ML Mon May 25 22:11:43 2015 +0200 @@ -1346,9 +1346,10 @@ [Pretty.quote (prt_term (Var (xi, Term.fastype_of t))), Pretty.str " =", Pretty.brk 1, Pretty.quote (prt_term t)]; - fun prt_asm (b, ts) = Pretty.block (Pretty.breaks - ((if Binding.is_empty b then [] - else [Binding.pretty b, Pretty.str ":"]) @ map (Pretty.quote o prt_term) ts)); + fun prt_asm (b, ts) = + Pretty.block (Pretty.breaks + ((if Binding.is_empty b then [] else [Binding.pretty b, Pretty.str ":"]) @ + map (Pretty.quote o prt_term) ts)); fun prt_sect _ _ _ [] = [] | prt_sect head sep prt xs = diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/token.ML --- a/src/Pure/Isar/token.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/token.ML Mon May 25 22:11:43 2015 +0200 @@ -396,8 +396,18 @@ fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v))) | map_value _ tok = tok; +fun map_values f = + (map_args o map_value) (fn Source src => Source (map_values f src) | x => f x); + + +(* reports of value *) + +fun get_assignable_value (Token (_, _, Assignable r)) = ! r + | get_assignable_value (Token (_, _, Value v)) = v + | get_assignable_value _ = NONE; + fun reports_of_value tok = - (case get_value tok of + (case get_assignable_value tok of SOME (Literal markup) => let val pos = pos_of tok; @@ -409,9 +419,6 @@ end | _ => []); -fun map_values f = - (map_args o map_value) (fn Source src => Source (map_values f src) | x => f x); - (* maxidx *) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/token.scala Mon May 25 22:11:43 2015 +0200 @@ -144,7 +144,7 @@ var ctxt = context while (!in.atEnd) { Parsers.parse(Parsers.token_line(keywords, ctxt), in) match { - case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } + case Parsers.Success((x, c), rest) => toks += x; ctxt = c; in = rest case Parsers.NoSuccess(_, rest) => error("Unexpected failure of tokenizing input:\n" + rest.source.toString) } @@ -158,7 +158,7 @@ def implode(toks: List[Token]): String = toks match { case List(tok) => tok.source - case toks => toks.map(_.source).mkString + case _ => toks.map(_.source).mkString } @@ -222,7 +222,7 @@ } -sealed case class Token(val kind: Token.Kind.Value, val source: String) +sealed case class Token(kind: Token.Kind.Value, source: String) { def is_command: Boolean = kind == Token.Kind.COMMAND def is_command_kind(keywords: Keyword.Keywords, pred: String => Boolean): Boolean = diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Isar/toplevel.ML --- a/src/Pure/Isar/toplevel.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Isar/toplevel.ML Mon May 25 22:11:43 2015 +0200 @@ -40,7 +40,7 @@ val exit: transition -> transition val keep: (state -> unit) -> transition -> transition val keep': (bool -> state -> unit) -> transition -> transition - val imperative: (unit -> unit) -> transition -> transition + val keep_proof: (state -> unit) -> transition -> transition val ignored: Position.T -> transition val is_ignored: transition -> bool val malformed: Position.T -> string -> transition @@ -345,13 +345,18 @@ fun transaction f = present_transaction f (K ()); fun keep f = add_trans (Keep (fn _ => f)); -fun imperative f = keep (fn _ => f ()); -fun ignored pos = empty |> name "" |> position pos |> imperative I; +fun keep_proof f = + keep (fn st => + if is_proof st then f st + else if is_skipped_proof st then () + else warning "No proof state"); + +fun ignored pos = empty |> name "" |> position pos |> keep (fn _ => ()); fun is_ignored tr = name_of tr = ""; fun malformed pos msg = - empty |> name "" |> position pos |> imperative (fn () => error msg); + empty |> name "" |> position pos |> keep (fn _ => error msg); (* theory transitions *) @@ -377,9 +382,9 @@ val lthy = f thy; val gthy = if begin then Context.Proof lthy else Context.Theory (Named_Target.exit lthy); val _ = - if begin then - Output.state (Pretty.string_of (Pretty.chunks (Local_Theory.pretty lthy))) - else (); + (case Local_Theory.pretty lthy of + [] => () + | prts => Output.state (Pretty.string_of (Pretty.chunks prts))); in Theory (gthy, SOME lthy) end | _ => raise UNDEF)); diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/ML/ml_lex.scala --- a/src/Pure/ML/ml_lex.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/ML/ml_lex.scala Mon May 25 22:11:43 2015 +0200 @@ -62,7 +62,7 @@ val ERROR = Value("bad input") } - sealed case class Token(val kind: Kind.Value, val source: String) + sealed case class Token(kind: Kind.Value, source: String) { def is_keyword: Boolean = kind == Kind.KEYWORD def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source) @@ -282,7 +282,7 @@ var ctxt = context while (!in.atEnd) { Parsers.parse(Parsers.token_line(SML, ctxt), in) match { - case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } + case Parsers.Success((x, c), rest) => toks += x; ctxt = c; in = rest case Parsers.NoSuccess(_, rest) => error("Unexpected failure of tokenizing input:\n" + rest.source.toString) } diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/command.scala Mon May 25 22:11:43 2015 +0200 @@ -427,7 +427,7 @@ val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] = ((Symbol.Text_Chunk.Default -> chunk) :: (for (Exn.Res((name, Some((_, file)))) <- blobs) - yield (Symbol.Text_Chunk.File(name.node) -> file))).toMap + yield Symbol.Text_Chunk.File(name.node) -> file)).toMap def length: Int = source.length def range: Text.Range = chunk.range diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/document.ML --- a/src/Pure/PIDE/document.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/document.ML Mon May 25 22:11:43 2015 +0200 @@ -434,6 +434,21 @@ (* document execution *) +fun make_required nodes = + let + fun all_preds P = + String_Graph.fold (fn (a, (node, _)) => P node ? cons a) nodes [] + |> String_Graph.all_preds nodes + |> Symtab.make_set; + + val all_visible = all_preds visible_node; + val all_required = all_preds required_node; + in + Symtab.fold (fn (a, ()) => + exists (Symtab.defined all_visible) (String_Graph.immediate_succs nodes a) ? + Symtab.update (a, ())) all_visible all_required + end; + fun start_execution state = state |> map_state (fn (versions, blobs, commands, execution) => timeit "Document.start_execution" (fn () => let @@ -446,10 +461,13 @@ fun finished_import (name, (node, _)) = finished_result node orelse is_some (loaded_theory name); + + val nodes = nodes_of (the_version state version_id); + val required = make_required nodes; val _ = - nodes_of (the_version state version_id) |> String_Graph.schedule + nodes |> String_Graph.schedule (fn deps => fn (name, node) => - if visible_node node orelse pending_result node then + if Symtab.defined required name orelse visible_node node orelse pending_result node then let fun body () = (if forall finished_import deps then @@ -498,21 +516,6 @@ local -fun make_required nodes = - let - fun all_preds P = - String_Graph.fold (fn (a, (node, _)) => P node ? cons a) nodes [] - |> String_Graph.all_preds nodes - |> Symtab.make_set; - - val all_visible = all_preds visible_node; - val all_required = all_preds required_node; - in - Symtab.fold (fn (a, ()) => - exists (Symtab.defined all_visible) (String_Graph.immediate_succs nodes a) ? - Symtab.update (a, ())) all_visible all_required - end; - fun init_theory deps node span = let val master_dir = master_directory node; diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/document.scala --- a/src/Pure/PIDE/document.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/document.scala Mon May 25 22:11:43 2015 +0200 @@ -502,21 +502,21 @@ final case class State private( /*reachable versions*/ - val versions: Map[Document_ID.Version, Version] = Map.empty, + versions: Map[Document_ID.Version, Version] = Map.empty, /*inlined auxiliary files*/ - val blobs: Set[SHA1.Digest] = Set.empty, + blobs: Set[SHA1.Digest] = Set.empty, /*static markup from define_command*/ - val commands: Map[Document_ID.Command, Command.State] = Map.empty, + commands: Map[Document_ID.Command, Command.State] = Map.empty, /*dynamic markup from execution*/ - val execs: Map[Document_ID.Exec, Command.State] = Map.empty, + execs: Map[Document_ID.Exec, Command.State] = Map.empty, /*command-exec assignment for each version*/ - val assignments: Map[Document_ID.Version, State.Assignment] = Map.empty, + assignments: Map[Document_ID.Version, State.Assignment] = Map.empty, /*commands with markup produced by other commands (imm_succs)*/ - val commands_redirection: Graph[Document_ID.Command, Unit] = Graph.long, + commands_redirection: Graph[Document_ID.Command, Unit] = Graph.long, /*explicit (linear) history*/ - val history: History = History.init, + history: History = History.init, /*intermediate state between remove_versions/removed_versions*/ - val removing_versions: Boolean = false) + removing_versions: Boolean = false) { private def fail[A]: A = throw new State.Fail(this) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/markup_tree.scala --- a/src/Pure/PIDE/markup_tree.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/markup_tree.scala Mon May 25 22:11:43 2015 +0200 @@ -57,7 +57,7 @@ def filter_markup(elements: Markup.Elements): List[XML.Elem] = { var result: List[XML.Elem] = Nil - for { elem <- rev_markup; if (elements(elem.name)) } + for (elem <- rev_markup if elements(elem.name)) result ::= elem result.toList } @@ -267,4 +267,3 @@ case list => list.mkString("Tree(", ",", ")") } } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/prover.scala --- a/src/Pure/PIDE/prover.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/prover.scala Mon May 25 22:11:43 2015 +0200 @@ -293,7 +293,7 @@ { val n = read_int() val buf = - if (n <= default_buffer.size) default_buffer + if (n <= default_buffer.length) default_buffer else new Array[Byte](n) var i = 0 @@ -367,4 +367,3 @@ protocol_command_bytes(name, args.map(Bytes(_)): _*) } } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/text.scala --- a/src/Pure/PIDE/text.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/text.scala Mon May 25 22:11:43 2015 +0200 @@ -34,7 +34,7 @@ } } - sealed case class Range(val start: Offset, val stop: Offset) + sealed case class Range(start: Offset, stop: Offset) { // denotation: {start} Un {i. start < i & i < stop} if (start > stop) @@ -124,7 +124,7 @@ /* information associated with text range */ - sealed case class Info[A](val range: Text.Range, val info: A) + sealed case class Info[A](range: Text.Range, info: A) { def restrict(r: Text.Range): Info[A] = Info(range.restrict(r), info) def try_restrict(r: Text.Range): Option[Info[A]] = range.try_restrict(r).map(Info(_, info)) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/PIDE/xml.scala --- a/src/Pure/PIDE/xml.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/PIDE/xml.scala Mon May 25 22:11:43 2015 +0200 @@ -36,9 +36,9 @@ /* wrapped elements */ - val XML_ELEM = "xml_elem"; - val XML_NAME = "xml_name"; - val XML_BODY = "xml_body"; + val XML_ELEM = "xml_elem" + val XML_NAME = "xml_name" + val XML_BODY = "xml_body" object Wrapped_Elem { diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/System/isabelle_process.scala --- a/src/Pure/System/isabelle_process.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/System/isabelle_process.scala Mon May 25 22:11:43 2015 +0200 @@ -25,7 +25,7 @@ process.stdin.close process } - catch { case exn @ ERROR(_) => system_channel.accepted(); throw(exn) } + catch { case exn @ ERROR(_) => system_channel.accepted(); throw exn } new Isabelle_Process(receiver, system_channel, system_process) } diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/System/isabelle_system.ML --- a/src/Pure/System/isabelle_system.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/System/isabelle_system.ML Mon May 25 22:11:43 2015 +0200 @@ -59,8 +59,10 @@ (* directory operations *) fun mkdirs path = - if File.is_dir path orelse #rc (Bash.process ("mkdir -p " ^ File.shell_path path)) = 0 then () - else error ("Failed to create directory: " ^ Path.print path); + if File.is_dir path then () + else + (bash ("perl -e \"use File::Path make_path; make_path(" ^ File.shell_path path ^ ");\""); + if File.is_dir path then () else error ("Failed to create directory: " ^ Path.print path)); fun mkdir path = if File.is_dir path then () else OS.FileSys.mkDir (File.platform_path path); diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/System/isabelle_system.scala Mon May 25 22:11:43 2015 +0200 @@ -93,9 +93,9 @@ default( default( default(sys.env + ("ISABELLE_JDK_HOME" -> posix_path(jdk_home())), - ("TEMP_WINDOWS" -> temp_windows)), - ("HOME" -> user_home)), - ("ISABELLE_APP" -> "true")) + "TEMP_WINDOWS" -> temp_windows), + "HOME" -> user_home), + "ISABELLE_APP" -> "true") } val system_home = @@ -125,8 +125,8 @@ val entries = (for (entry <- File.read(dump) split "\u0000" if entry != "") yield { val i = entry.indexOf('=') - if (i <= 0) (entry -> "") - else (entry.substring(0, i) -> entry.substring(i + 1)) + if (i <= 0) entry -> "" + else entry.substring(0, i) -> entry.substring(i + 1) }).toMap entries + ("PATH" -> entries("PATH_JVM")) - "PATH_JVM" } @@ -145,7 +145,8 @@ def getenv_strict(name: String): String = { val value = getenv(name) - if (value != "") value else error("Undefined environment variable: " + name) + if (value != "") value + else error("Undefined Isabelle environment variable: " + quote(name)) } def get_cygwin_root(): String = getenv_strict("CYGWIN_ROOT") @@ -257,8 +258,10 @@ /* mkdirs */ def mkdirs(path: Path): Unit = - if (path.is_dir || bash("mkdir -p " + shell_path(path)).rc == 0) () - else error("Failed to create directory: " + quote(platform_path(path))) + if (!path.is_dir) { + bash("perl -e \"use File::Path make_path; make_path(" + shell_path(path) + ");\"") + if (!path.is_dir) error("Failed to create directory: " + quote(platform_path(path))) + } diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/System/options.scala --- a/src/Pure/System/options.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/System/options.scala Mon May 25 22:11:43 2015 +0200 @@ -368,7 +368,7 @@ (for { (name, opt2) <- options.iterator opt1 = defaults.options.get(name) - if (opt1.isEmpty || opt1.get.value != opt2.value) + if opt1.isEmpty || opt1.get.value != opt2.value } yield (name, opt2.value, if (opt1.isEmpty) " (* unknown *)" else "")).toList val prefs = @@ -429,4 +429,3 @@ } val seconds = new Seconds_Access } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Thy/html.scala --- a/src/Pure/Thy/html.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Thy/html.scala Mon May 25 22:11:43 2015 +0200 @@ -28,7 +28,7 @@ case '"' => result ++= """ case '\'' => result ++= "'" case '\n' => result ++= "
" - case c => result += c + case _ => result += c } def encode_chars(s: String) = s.iterator.foreach(encode_char(_)) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Thy/thy_header.scala Mon May 25 22:11:43 2015 +0200 @@ -100,7 +100,7 @@ val args = position(theory_name) ~ - (opt($$$(IMPORTS) ~! (rep1(position(theory_xname)))) ^^ + (opt($$$(IMPORTS) ~! rep1(position(theory_xname))) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ (opt($$$(KEYWORDS) ~! keyword_decls) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Thy/thy_syntax.scala --- a/src/Pure/Thy/thy_syntax.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Thy/thy_syntax.scala Mon May 25 22:11:43 2015 +0200 @@ -83,7 +83,8 @@ if (update_header) { val node1 = node.update_header(header) if (node.header.imports.map(_._1) != node1.header.imports.map(_._1) || - node.header.keywords != node1.header.keywords) syntax_changed0 += name + node.header.keywords != node1.header.keywords || + node.header.errors != node1.header.errors) syntax_changed0 += name nodes += (name -> node1) doc_edits += (name -> Document.Node.Deps(header)) } @@ -304,7 +305,7 @@ val reparse = (syntax_changed /: nodes0.iterator)({ case (reparse, (name, node)) => - if (node.load_commands.exists(_.blobs_changed(doc_blobs))) + if (node.load_commands.exists(_.blobs_changed(doc_blobs)) && !reparse.contains(name)) name :: reparse else reparse }) @@ -338,7 +339,7 @@ node2, (name, node2.edit_perspective)) else node2 - if (!(node.same_perspective(node3.text_perspective, node3.perspective))) + if (!node.same_perspective(node3.text_perspective, node3.perspective)) doc_edits += (name -> node3.perspective) doc_edits += (name -> Document.Node.Edits(diff_commands(commands, node3.commands))) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Tools/bibtex.scala --- a/src/Pure/Tools/bibtex.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Tools/bibtex.scala Mon May 25 22:11:43 2015 +0200 @@ -133,7 +133,7 @@ } } - sealed case class Token(kind: Token.Kind.Value, val source: String) + sealed case class Token(kind: Token.Kind.Value, source: String) { def is_kind: Boolean = kind == Token.Kind.COMMAND || @@ -398,7 +398,7 @@ var ctxt = context while (!in.atEnd) { Parsers.parse(Parsers.chunk_line(ctxt), in) match { - case Parsers.Success((x, c), rest) => { chunks += x; ctxt = c; in = rest } + case Parsers.Success((x, c), rest) => chunks += x; ctxt = c; in = rest case Parsers.NoSuccess(_, rest) => error("Unepected failure to parse input:\n" + rest.source.toString) } @@ -406,4 +406,3 @@ (chunks.toList, ctxt) } } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/Tools/build.scala Mon May 25 22:11:43 2015 +0200 @@ -908,7 +908,7 @@ loop(pending - name, running - name, results + (name -> Result(false, heap, res.rc))) //}}} - case None if (running.size < (max_jobs max 1)) => + case None if running.size < (max_jobs max 1) => //{{{ check/start next job pending.dequeue(running.isDefinedAt(_)) match { case Some((name, info)) => diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/drule.ML --- a/src/Pure/drule.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/drule.ML Mon May 25 22:11:43 2015 +0200 @@ -93,6 +93,7 @@ val dest_term: thm -> cterm val cterm_rule: (thm -> thm) -> cterm -> cterm val dummy_thm: thm + val is_sort_constraint: term -> bool val sort_constraintI: thm val sort_constraint_eq: thm val with_subgoal: int -> (thm -> thm) -> thm -> thm @@ -647,6 +648,9 @@ (* sort_constraint *) +fun is_sort_constraint (Const ("Pure.sort_constraint", _) $ Const ("Pure.type", _)) = true + | is_sort_constraint _ = false; + val sort_constraintI = store_standard_thm (Binding.concealed (Binding.make ("sort_constraintI", @{here}))) (Thm.equal_elim (Thm.symmetric sort_constraint_def) (mk_term T)); diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/library.scala --- a/src/Pure/library.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/library.scala Mon May 25 22:11:43 2015 +0200 @@ -107,7 +107,7 @@ def hasNext(): Boolean = state.isDefined def next(): CharSequence = state match { - case Some((s, i)) => { state = next_chunk(i); s } + case Some((s, i)) => state = next_chunk(i); s case None => Iterator.empty.next() } } @@ -207,7 +207,7 @@ /* canonical list operations */ - def member[A, B](xs: List[A])(x: B): Boolean = xs.exists(_ == x) + def member[A, B](xs: List[A])(x: B): Boolean = xs.contains(x) def insert[A](x: A)(xs: List[A]): List[A] = if (xs.contains(x)) xs else x :: xs def remove[A, B](x: B)(xs: List[A]): List[A] = if (member(xs)(x)) xs.filterNot(_ == x) else xs def update[A](x: A)(xs: List[A]): List[A] = x :: remove(x)(xs) diff -r cc71f01f9fde -r ff82ba1893c8 src/Pure/pure_syn.ML --- a/src/Pure/pure_syn.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Pure/pure_syn.ML Mon May 25 22:11:43 2015 +0200 @@ -43,7 +43,7 @@ val _ = Outer_Syntax.command ("text_raw", @{here}) "raw LaTeX text" - (Parse.document_source >> K (Toplevel.imperative I)); + (Parse.document_source >> K (Toplevel.keep (fn _ => ()))); val _ = Outer_Syntax.command ("theory", @{here}) "begin theory" diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/Code/code_thingol.ML --- a/src/Tools/Code/code_thingol.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/Code/code_thingol.ML Mon May 25 22:11:43 2015 +0200 @@ -924,29 +924,33 @@ fun code_thms ctxt = Pretty.writeln o Code_Preproc.pretty ctxt o code_depgr ctxt; -fun join_strong_conn gr = +fun coalesce_strong_conn gr = let val xss = Graph.strong_conn gr; - val xss_zs = map (fn xs => (xs, commas xs)) xss; - val z_for = the o AList.lookup (op =) (maps (fn (xs, z) => map (fn x => (x, z)) xs) xss_zs); - val succs = map (fn (xs, z) => (z, (map z_for o maps (Graph.immediate_succs gr)) xs)) xss_zs; + val xss_ys = map (fn xs => (xs, commas xs)) xss; + val y_for = the o AList.lookup (op =) (maps (fn (xs, y) => map (fn x => (x, y)) xs) xss_ys); + fun coalesced_succs_for xs = maps (Graph.immediate_succs gr) xs + |> subtract (op =) xs + |> map y_for + |> distinct (op =); + val succs = map (fn (xs, _) => (xs, coalesced_succs_for xs)) xss_ys; in - Graph.empty - |> fold (fn (xs, z) => Graph.new_node (z, map (Graph.get_node gr) xs)) xss_zs - |> fold (fn (z, succs) => fold (fn w => Graph.add_edge (z, w)) succs) succs + map (fn (xs, y) => ((y, xs), (maps (Graph.get_node gr) xs, (the o AList.lookup (op =) succs) xs))) xss_ys end; fun code_deps ctxt consts = let val thy = Proof_Context.theory_of ctxt; - val namify = commas o map (Code.string_of_const thy); + fun mk_entry ((name, consts), (ps, deps)) = + let + val label = commas (map (Code.string_of_const thy) consts); + in ((name, Graph_Display.content_node label (Pretty.str label :: ps)), deps) end; in code_depgr ctxt consts - |> Graph.map (fn c => fn _ => c) - |> join_strong_conn - |> Graph.dest - |> map (fn ((c, cs), ds) => ((c, Graph_Display.content_node (namify cs) []), ds)) - |> Graph_Display.display_graph_old + |> Graph.map (K (Code.pretty_cert thy o snd)) + |> coalesce_strong_conn + |> map mk_entry + |> Graph_Display.display_graph end; local diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/Graphview/shapes.scala --- a/src/Tools/Graphview/shapes.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/Graphview/shapes.scala Mon May 25 22:11:43 2015 +0200 @@ -163,7 +163,7 @@ val seg = Array[Double](0.0, 0.0, 0.0, 0.0, 0.0, 0.0) var p1 = (0.0, 0.0) var p2 = (0.0, 0.0) - while (!i.isDone()) { + while (!i.isDone) { i.currentSegment(seg) match { case PathIterator.SEG_MOVETO => p2 = (seg(0), seg(1)) case PathIterator.SEG_LINETO => @@ -223,4 +223,4 @@ } } } -} \ No newline at end of file +} diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/Graphview/tree_panel.scala --- a/src/Tools/Graphview/tree_panel.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/Graphview/tree_panel.scala Mon May 25 22:11:43 2015 +0200 @@ -70,6 +70,7 @@ private val root = new DefaultMutableTreeNode("Nodes") val tree = new JTree(root) + tree.setRowHeight(0) tree.addKeyListener(new KeyAdapter { override def keyPressed(e: KeyEvent): Unit = diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/Isabelle.props --- a/src/Tools/jEdit/src/Isabelle.props Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/Isabelle.props Mon May 25 22:11:43 2015 +0200 @@ -5,7 +5,7 @@ #identification plugin.isabelle.jedit.Plugin.name=Isabelle plugin.isabelle.jedit.Plugin.author=Johannes Hölzl, Fabian Immler, Lars Hupel, Markus Kaiser, Makarius Wenzel -plugin.isabelle.jedit.Plugin.version=5.0 +plugin.isabelle.jedit.Plugin.version=6.0 plugin.isabelle.jedit.Plugin.description=Isabelle Prover IDE #system parameters @@ -14,10 +14,10 @@ #dependencies plugin.isabelle.jedit.Plugin.depend.0=jdk 1.7 -plugin.isabelle.jedit.Plugin.depend.1=jedit 05.01.00.00 +plugin.isabelle.jedit.Plugin.depend.1=jedit 05.02.00.00 plugin.isabelle.jedit.Plugin.depend.2=plugin console.ConsolePlugin 5.1.4 plugin.isabelle.jedit.Plugin.depend.3=plugin errorlist.ErrorListPlugin 2.3 -plugin.isabelle.jedit.Plugin.depend.4=plugin sidekick.SideKickPlugin 1.6 +plugin.isabelle.jedit.Plugin.depend.4=plugin sidekick.SideKickPlugin 1.7 plugin.isabelle.jedit.Plugin.depend.5=plugin gatchan.highlight.HighlightPlugin 2.0 #options diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/active.scala --- a/src/Tools/jEdit/src/active.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/active.scala Mon May 25 22:11:43 2015 +0200 @@ -61,9 +61,9 @@ props match { case Position.Id(id) => Isabelle.edit_command(snapshot, buffer, - props.exists(_ == Markup.PADDING_COMMAND), id, text) + props.contains(Markup.PADDING_COMMAND), id, text) case _ => - if (props.exists(_ == Markup.PADDING_LINE)) + if (props.contains(Markup.PADDING_LINE)) Isabelle.insert_line_padding(text_area, text) else text_area.setSelectedText(text) } diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/bibtex_jedit.scala --- a/src/Tools/jEdit/src/bibtex_jedit.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/bibtex_jedit.scala Mon May 25 22:11:43 2015 +0200 @@ -252,11 +252,11 @@ val label_html = "" + HTML.encode(kind) + "" + (if (name == "") "" else " " + HTML.encode(name)) + "" - val range = Text.Range(offset, offset + source.size) + val range = Text.Range(offset, offset + source.length) val asset = new Asset(label, label_html, range, source) data.root.add(new DefaultMutableTreeNode(asset)) } - offset += source.size + offset += source.length } data } diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/document_model.scala Mon May 25 22:11:43 2015 +0200 @@ -27,7 +27,6 @@ def apply(buffer: Buffer): Option[Document_Model] = { - GUI_Thread.require {} buffer.getProperty(key) match { case model: Document_Model => Some(model) case _ => None @@ -51,18 +50,19 @@ { GUI_Thread.require {} - old_model match { - case Some(old) - if old.node_name == node_name && Isabelle.buffer_token_marker(buffer).isEmpty => old - - case _ => - apply(buffer).map(_.deactivate) - val model = new Document_Model(session, buffer, node_name) - buffer.setProperty(key, model) - model.activate() - buffer.propertiesChanged - model - } + val model = + old_model match { + case Some(old) if old.node_name == node_name => old + case _ => + apply(buffer).map(_.deactivate) + val model = new Document_Model(session, buffer, node_name) + buffer.setProperty(key, model) + model.activate() + buffer.propertiesChanged + model + } + model.init_token_marker + model } } @@ -223,17 +223,19 @@ /* pending edits */ - private object pending_edits // owned by GUI thread + private object pending_edits { private var pending_clear = false private val pending = new mutable.ListBuffer[Text.Edit] private var last_perspective = Document.Node.no_perspective_text - def is_pending(): Boolean = pending_clear || pending.nonEmpty - def snapshot(): List[Text.Edit] = pending.toList + def is_pending(): Boolean = synchronized { pending_clear || pending.nonEmpty } + def snapshot(): List[Text.Edit] = synchronized { pending.toList } - def flushed_edits(doc_blobs: Document.Blobs): List[Document.Edit_Text] = + def flushed_edits(doc_blobs: Document.Blobs): List[Document.Edit_Text] = synchronized { + GUI_Thread.require {} + val clear = pending_clear val edits = snapshot() val (reparse, perspective) = node_perspective(doc_blobs) @@ -246,8 +248,10 @@ else Nil } - def edit(clear: Boolean, e: Text.Edit) + def edit(clear: Boolean, e: Text.Edit): Unit = synchronized { + GUI_Thread.require {} + reset_blob() reset_bibtex() @@ -261,10 +265,10 @@ } def snapshot(): Document.Snapshot = - GUI_Thread.require { session.snapshot(node_name, pending_edits.snapshot()) } + session.snapshot(node_name, pending_edits.snapshot()) def flushed_edits(doc_blobs: Document.Blobs): List[Document.Edit_Text] = - GUI_Thread.require { pending_edits.flushed_edits(doc_blobs) } + pending_edits.flushed_edits(doc_blobs) /* buffer listener */ diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/documentation_dockable.scala --- a/src/Tools/jEdit/src/documentation_dockable.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/documentation_dockable.scala Mon May 25 22:11:43 2015 +0200 @@ -46,6 +46,7 @@ } private val tree = new JTree(root) + tree.setRowHeight(0) override def focusOnDefaultComponent { tree.requestFocusInWindow } diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/graphview_dockable.scala --- a/src/Tools/jEdit/src/graphview_dockable.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/graphview_dockable.scala Mon May 25 22:11:43 2015 +0200 @@ -117,13 +117,13 @@ override def init() { - GUI.parent_window(this).map(_.addWindowFocusListener(window_focus_listener)) + GUI.parent_window(this).foreach(_.addWindowFocusListener(window_focus_listener)) PIDE.session.global_options += main } override def exit() { - GUI.parent_window(this).map(_.removeWindowFocusListener(window_focus_listener)) + GUI.parent_window(this).foreach(_.removeWindowFocusListener(window_focus_listener)) PIDE.session.global_options -= main } } diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/isabelle.scala --- a/src/Tools/jEdit/src/isabelle.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/isabelle.scala Mon May 25 22:11:43 2015 +0200 @@ -78,10 +78,8 @@ def buffer_token_marker(buffer: Buffer): Option[TokenMarker] = { val mode = JEdit_Lib.buffer_mode(buffer) - val new_token_marker = - if (mode == "isabelle") Some(new Token_Markup.Marker(mode, Some(buffer))) - else mode_token_marker(mode) - if (new_token_marker == Some(buffer.getTokenMarker)) None else new_token_marker + if (mode == "isabelle") Some(new Token_Markup.Marker(mode, Some(buffer))) + else mode_token_marker(mode) } diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/isabelle_sidekick.scala --- a/src/Tools/jEdit/src/isabelle_sidekick.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/isabelle_sidekick.scala Mon May 25 22:11:43 2015 +0200 @@ -93,7 +93,7 @@ // FIXME lock buffer (!??) val data = Isabelle_Sidekick.root_data(buffer) - val syntax = GUI_Thread.now { Isabelle.buffer_syntax(buffer) } + val syntax = Isabelle.buffer_syntax(buffer) val ok = if (syntax.isDefined) { val ok = parser(buffer, syntax.get, data) @@ -162,11 +162,9 @@ override def parser(buffer: Buffer, syntax: Outer_Syntax, data: SideKickParsedData): Boolean = { val opt_snapshot = - GUI_Thread.now { - PIDE.document_model(buffer) match { - case Some(model) if model.is_theory => Some(model.snapshot) - case _ => None - } + PIDE.document_model(buffer) match { + case Some(model) if model.is_theory => Some(model.snapshot) + case _ => None } opt_snapshot match { case Some(snapshot) => diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/jEdit.props --- a/src/Tools/jEdit/src/jEdit.props Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/jEdit.props Mon May 25 22:11:43 2015 +0200 @@ -256,6 +256,8 @@ sidekick.complete-instant.toggle=false sidekick.complete-popup.accept-characters=\\t sidekick.complete-popup.insert-characters= +sidekick.persistentFilter=true +sidekick.showFilter=true sidekick.splitter.location=721 systrayicon=false tip.show=false diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/jedit_lib.scala --- a/src/Tools/jEdit/src/jedit_lib.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/jedit_lib.scala Mon May 25 22:11:43 2015 +0200 @@ -230,7 +230,7 @@ /* graphics range */ - case class Gfx_Range(val x: Int, val y: Int, val length: Int) + case class Gfx_Range(x: Int, y: Int, length: Int) // NB: jEdit always normalizes \r\n and \r to \n // NB: last line lacks \n @@ -274,7 +274,7 @@ if (offset >= 0) { val range = point_range(text_area.getBuffer, offset) gfx_range(text_area, range) match { - case Some(g) if (g.x <= x && x < g.x + g.length) => Some(range) + case Some(g) if g.x <= x && x < g.x + g.length => Some(range) case _ => None } } @@ -371,4 +371,3 @@ (mod & InputEvent.META_MASK) != 0 } } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/plugin.scala --- a/src/Tools/jEdit/src/plugin.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/plugin.scala Mon May 25 22:11:43 2015 +0200 @@ -13,7 +13,7 @@ import scala.swing.{ListView, ScrollPane} -import org.gjt.sp.jedit.{jEdit, EBMessage, EBPlugin, Buffer, View, Debug} +import org.gjt.sp.jedit.{jEdit, EBMessage, EBPlugin, Buffer, View, Debug, PerspectiveManager} import org.jedit.options.CombinedOptions import org.gjt.sp.jedit.gui.AboutDialog import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea} @@ -148,7 +148,7 @@ /* current document content */ - def snapshot(view: View): Document.Snapshot = GUI_Thread.now + def snapshot(view: View): Document.Snapshot = { val buffer = view.getBuffer document_model(buffer) match { @@ -199,7 +199,9 @@ private lazy val delay_load = GUI_Thread.delay_last(PIDE.options.seconds("editor_load_delay")) { - if (Isabelle.continuous_checking && delay_load_activated()) { + if (Isabelle.continuous_checking && delay_load_activated() && + PerspectiveManager.isPerspectiveEnabled) + { try { val view = jEdit.getActiveView() @@ -311,7 +313,7 @@ if (Distribution.is_identified && !Distribution.is_official) { GUI.warning_dialog(jEdit.getActiveView, "Isabelle version for testing", - "This is " + Distribution.version +".", + "This is " + Distribution.version + ".", "It is for testing only, not for production use.") } @@ -330,10 +332,10 @@ } case msg: EditPaneUpdate - if (msg.getWhat == EditPaneUpdate.BUFFER_CHANGING || + if msg.getWhat == EditPaneUpdate.BUFFER_CHANGING || msg.getWhat == EditPaneUpdate.BUFFER_CHANGED || msg.getWhat == EditPaneUpdate.CREATED || - msg.getWhat == EditPaneUpdate.DESTROYED) => + msg.getWhat == EditPaneUpdate.DESTROYED => val edit_pane = msg.getEditPane val buffer = edit_pane.getBuffer val text_area = edit_pane.getTextArea diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/pretty_text_area.scala --- a/src/Tools/jEdit/src/pretty_text_area.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/pretty_text_area.scala Mon May 25 22:11:43 2015 +0200 @@ -118,7 +118,7 @@ jEdit.getColorProperty("view.gutter.focusBorderColor"), jEdit.getColorProperty("view.gutter.noFocusBorderColor"), getPainter.getBackground) - getGutter.setFoldPainter(getFoldPainter) + getGutter.setFoldPainter(view.getTextArea.getFoldPainter) getGutter.setGutterEnabled(jEdit.getBooleanProperty("view.gutter.enabled")) if (getWidth > 0) { diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/rendering.scala --- a/src/Tools/jEdit/src/rendering.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/rendering.scala Mon May 25 22:11:43 2015 +0200 @@ -407,7 +407,7 @@ PIDE.editor.hyperlink_command_id(snapshot, id, offset) case _ => None } - opt_link.map(link => (links :+ Text.Info(snapshot.convert(info_range), link))) + opt_link.map(link => links :+ Text.Info(snapshot.convert(info_range), link)) case (links, Text.Info(info_range, XML.Elem(Markup(Markup.POSITION, props), _))) => val opt_link = @@ -419,14 +419,14 @@ PIDE.editor.hyperlink_command_id(snapshot, id, offset) case _ => None } - opt_link.map(link => (links :+ Text.Info(snapshot.convert(info_range), link))) + opt_link.map(link => links :+ Text.Info(snapshot.convert(info_range), link)) case (links, Text.Info(info_range, XML.Elem(Markup.Citation(name), _))) => val opt_link = Bibtex_JEdit.entries_iterator.collectFirst( { case (a, buffer, offset) if a == name => PIDE.editor.hyperlink_buffer(buffer, offset) }) - opt_link.map(link => (links :+ Text.Info(snapshot.convert(info_range), link))) + opt_link.map(link => links :+ Text.Info(snapshot.convert(info_range), link)) case _ => None }) match { case Text.Info(_, _ :+ info) :: _ => Some(info) case _ => None } @@ -472,7 +472,7 @@ case (msgs, Text.Info(info_range, XML.Elem(Markup(name, props @ Markup.Serial(serial)), body))) => val entry: Command.Results.Entry = - (serial -> XML.Elem(Markup(Markup.message(name), props), body)) + serial -> XML.Elem(Markup(Markup.message(name), props), body) Some(Text.Info(snapshot.convert(info_range), entry) :: msgs) case _ => None diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/rich_text_area.scala --- a/src/Tools/jEdit/src/rich_text_area.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/rich_text_area.scala Mon May 25 22:11:43 2015 +0200 @@ -284,7 +284,7 @@ for { (color, separator) <- rendering.line_background(line_range) } { gfx.setColor(color) - val sep = if (separator) (2 min (line_height / 2)) else 0 + val sep = if (separator) 2 min (line_height / 2) else 0 gfx.fillRect(0, y + i * line_height, text_area.getWidth, line_height - sep) } @@ -659,4 +659,3 @@ painter.removeExtension(set_state) } } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/sledgehammer_dockable.scala --- a/src/Tools/jEdit/src/sledgehammer_dockable.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/sledgehammer_dockable.scala Mon May 25 22:11:43 2015 +0200 @@ -73,13 +73,15 @@ private def clicked { PIDE.options.string("sledgehammer_provers") = provers.getText - sledgehammer.apply_query(List(provers.getText, isar_proofs.selected.toString)) + sledgehammer.apply_query( + List(provers.getText, isar_proofs.selected.toString, try0.selected.toString)) } private val provers_label = new Label("Provers:") { tooltip = GUI.tooltip_lines( - "Automatic provers as space-separated list, e.g.\ne spass remote_vampire") + "Automatic provers as space-separated list, e.g.\n" + + PIDE.options.value.check_name("sledgehammer_provers").default_value) } private val provers = new HistoryTextField("isabelle-sledgehammer-provers") { @@ -103,10 +105,15 @@ } private val isar_proofs = new CheckBox("Isar proofs") { - tooltip = "Specify whether Isar proofs should be output in addition to metis line" + tooltip = "Specify whether Isar proofs should be output in addition to \"by\" one-liner" selected = false } + private val try0 = new CheckBox("Try methods") { + tooltip = "Try standard proof methods like \"auto\" and \"blast\" as alternatives to \"metis\"" + selected = true + } + private val apply_query = new Button("Apply") { tooltip = "Search for first-order proof using automatic theorem provers" reactions += { case ButtonClicked(_) => clicked } @@ -124,7 +131,7 @@ private val controls = new Wrap_Panel(Wrap_Panel.Alignment.Right)( - provers_label, Component.wrap(provers), isar_proofs, + provers_label, Component.wrap(provers), isar_proofs, try0, process_indicator.component, apply_query, cancel_query, locate_query, zoom) add(controls.peer, BorderLayout.NORTH) diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/jEdit/src/token_markup.scala --- a/src/Tools/jEdit/src/token_markup.scala Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/jEdit/src/token_markup.scala Mon May 25 22:11:43 2015 +0200 @@ -109,7 +109,7 @@ { val max_user_fonts = 2 if (Symbol.font_names.length > max_user_fonts) - error("Too many user symbol fonts (max " + max_user_fonts + " permitted): " + + error("Too many user symbol fonts (" + max_user_fonts + " permitted): " + Symbol.font_names.mkString(", ")) override def extendStyles(styles: Array[SyntaxStyle]): Array[SyntaxStyle] = @@ -448,4 +448,3 @@ } } } - diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/quickcheck.ML --- a/src/Tools/quickcheck.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/quickcheck.ML Mon May 25 22:11:43 2015 +0200 @@ -510,8 +510,8 @@ fun quickcheck args i state = Option.map (the o get_first counterexample_of) (fst (gen_quickcheck args i state)); -fun quickcheck_cmd args i state = - gen_quickcheck args i (Toplevel.proof_of state) +fun quickcheck_cmd args i st = + gen_quickcheck args i (Toplevel.proof_of st) |> apfst (Option.map (the o get_first response_of)) |> (fn (r, state) => writeln (Pretty.string_of @@ -534,7 +534,7 @@ Outer_Syntax.command @{command_keyword quickcheck} "try to find counterexample for subgoal" (parse_args -- Scan.optional Parse.nat 1 >> - (fn (args, i) => Toplevel.keep (quickcheck_cmd args i))); + (fn (args, i) => Toplevel.keep_proof (quickcheck_cmd args i))); (* automatic testing *) diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/solve_direct.ML --- a/src/Tools/solve_direct.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/solve_direct.ML Mon May 25 22:11:43 2015 +0200 @@ -94,7 +94,7 @@ val _ = Outer_Syntax.command @{command_keyword solve_direct} "try to solve conjectures directly with existing theorems" - (Scan.succeed (Toplevel.keep (ignore o solve_direct o Toplevel.proof_of))); + (Scan.succeed (Toplevel.keep_proof (ignore o solve_direct o Toplevel.proof_of))); (* hook *) diff -r cc71f01f9fde -r ff82ba1893c8 src/Tools/try.ML --- a/src/Tools/try.ML Sat May 23 22:13:24 2015 +0200 +++ b/src/Tools/try.ML Mon May 25 22:11:43 2015 +0200 @@ -77,7 +77,7 @@ val _ = Outer_Syntax.command @{command_keyword try} "try a combination of automatic proving and disproving tools" - (Scan.succeed (Toplevel.keep (ignore o try_tools o Toplevel.proof_of))) + (Scan.succeed (Toplevel.keep_proof (ignore o try_tools o Toplevel.proof_of))) (* automatic try (TTY) *) diff -r cc71f01f9fde -r ff82ba1893c8 src/ZF/AC/AC16_WO4.thy --- a/src/ZF/AC/AC16_WO4.thy Sat May 23 22:13:24 2015 +0200 +++ b/src/ZF/AC/AC16_WO4.thy Mon May 25 22:11:43 2015 +0200 @@ -541,7 +541,7 @@ THEN apply_type])+ done -lemma (in AC16) conclusion: +lemma (in AC16) "conclusion": "\a f. Ord(a) & domain(f) = a & (\bb m)" apply (rule well_ord_LL [THEN exE]) apply (rename_tac S)