# HG changeset patch # User wenzelm # Date 1455739607 -3600 # Node ID a105bea3936f0772962ae3af09c092a58b014169 # Parent ec44535f954a5d6ec9e20a9e63e37b7db9dc9a26# Parent ccb42dbf4aa18a5d89cc65120667f9bdcfa49885 merged diff -r ec44535f954a -r a105bea3936f Admin/bash_process/bash_process.c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Admin/bash_process/bash_process.c Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,88 @@ +/* Author: Makarius + +Bash process with separate process group id. +*/ + +#include +#include +#include +#include +#include +#include + +static void fail(const char *msg) +{ + fprintf(stderr, "%s\n", msg); + fflush(stderr); + exit(2); +} + + +int main(int argc, char *argv[]) +{ + /* args */ + + if (argc < 2) { + fprintf(stderr, "Bad arguments: missing pid file\n"); + fflush(stderr); + exit(1); + } + char *pid_name = argv[1]; + + + /* setsid */ + + if (setsid() == -1) { + pid_t pid = fork(); + int status; + + if (pid == -1) fail("Cannot set session id (failed to fork)"); + else if (pid != 0) { + if (waitpid(pid, &status, 0) == -1) { + fail("Cannot join forked process"); + } + + if (WIFEXITED(status)) { + exit(WEXITSTATUS(status)); + } + else if (WIFSIGNALED(status)) { + exit(128 + WTERMSIG(status)); + } + else { + fail("Unknown status of forked process"); + } + } + else if (setsid() == -1) fail("Cannot set session id (after fork)"); + } + + + /* report pid */ + + if (strcmp(pid_name, "-") == 0) { + fprintf(stdout, "%d\n", getpid()); + fflush(stdout); + } + else if (strlen(pid_name) > 0) { + FILE *pid_file; + pid_file = fopen(pid_name, "w"); + if (pid_file == NULL) fail("Cannot open pid file"); + fprintf(pid_file, "%d", getpid()); + fclose(pid_file); + } + + + /* shift command line */ + + int i; + for (i = 2; i < argc; i++) { + argv[i - 2] = argv[i]; + } + argv[argc - 2] = NULL; + argv[argc - 1] = NULL; + + + /* exec */ + + execvp("bash", argv); + fail("Cannot exec process"); +} diff -r ec44535f954a -r a105bea3936f Admin/bash_process/build --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Admin/bash_process/build Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +# +# Multi-platform build script + +THIS="$(cd "$(dirname "$0")"; pwd)" +PRG="$(basename "$0")" + + +# diagnostics + +function usage() +{ + echo + echo "Usage: $PRG TARGET" + echo + exit 1 +} + +function fail() +{ + echo "$1" >&2 + exit 2 +} + + +# command line args + +[ "$#" -eq 0 ] && usage +TARGET="$1"; shift + +[ "$#" -eq 0 ] || usage + + +# main + +mkdir -p "$TARGET" + +case "$TARGET" in + x86_64-linux | x86_64-darwin) + cc -m64 bash_process.c -o "$TARGET/bash_process" + ;; + x86-linux | x86-darwin) + cc -m32 bash_process.c -o "$TARGET/bash_process" + ;; + x86-cygwin) + cc bash_process.c -o "$TARGET/bash_process.exe" + ;; + *) + cc bash_process.c -o "$TARGET/bash_process" + ;; +esac diff -r ec44535f954a -r a105bea3936f Admin/bash_process/etc/settings --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Admin/bash_process/etc/settings Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,3 @@ +# -*- shell-script -*- :mode=shellscript: + +ISABELLE_BASH_PROCESS="$COMPONENT/${ISABELLE_PLATFORM64:-$ISABELLE_PLATFORM32}/bash_process" diff -r ec44535f954a -r a105bea3936f Admin/components/components.sha1 --- a/Admin/components/components.sha1 Wed Feb 17 15:57:10 2016 +0100 +++ b/Admin/components/components.sha1 Wed Feb 17 21:06:47 2016 +0100 @@ -1,3 +1,5 @@ +fbe83b522cb37748ac1b3c943ad71704fdde2f82 bash_process-1.1.1.tar.gz +bb9ef498cd594b4289221b96146d529c899da209 bash_process-1.1.tar.gz 70105fd6fbfd1a868383fc510772b95234325d31 csdp-6.x.tar.gz 2f6417b8e96a0e4e8354fe0f1a253c18fb55d9a7 cvc3-2.4.1.tar.gz a5e02b5e990da4275dc5d4480c3b72fc73160c28 cvc4-1.5pre-1.tar.gz diff -r ec44535f954a -r a105bea3936f Admin/components/main --- a/Admin/components/main Wed Feb 17 15:57:10 2016 +0100 +++ b/Admin/components/main Wed Feb 17 21:06:47 2016 +0100 @@ -1,8 +1,8 @@ #main components for everyday use, without big impact on overall build time +bash_process-1.1.1 csdp-6.x cvc4-1.5pre-3 e-1.8 -exec_process-1.0.3 Haskabelle-2015 isabelle_fonts-20160102 jdk-8u72 diff -r ec44535f954a -r a105bea3936f Admin/exec_process/build --- a/Admin/exec_process/build Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,53 +0,0 @@ -#!/usr/bin/env bash -# -# Multi-platform build script - -THIS="$(cd "$(dirname "$0")"; pwd)" -PRG="$(basename "$0")" - - -# diagnostics - -function usage() -{ - echo - echo "Usage: $PRG TARGET" - echo - exit 1 -} - -function fail() -{ - echo "$1" >&2 - exit 2 -} - - -# command line args - -[ "$#" -eq 0 ] && usage -TARGET="$1"; shift - -[ "$#" -eq 0 ] || usage - - -# main - -mkdir -p "$TARGET" - -case "$TARGET" in - x86_64-linux | x86_64-darwin) - cc -m64 exec_process.c -o "$TARGET/exec_process" - ;; - x86-linux | x86-darwin) - cc -m32 exec_process.c -o "$TARGET/exec_process" - ;; - x86-cygwin) - cc exec_process.c -o "$TARGET/exec_process.exe" - ;; - *) - cc exec_process.c -o "$TARGET/exec_process" - ;; -esac - - diff -r ec44535f954a -r a105bea3936f Admin/exec_process/etc/settings --- a/Admin/exec_process/etc/settings Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -# -*- shell-script -*- :mode=shellscript: - -EXEC_PROCESS="$COMPONENT/${ISABELLE_PLATFORM64:-$ISABELLE_PLATFORM32}/exec_process" diff -r ec44535f954a -r a105bea3936f Admin/exec_process/exec_process.c --- a/Admin/exec_process/exec_process.c Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -/* Author: Makarius - -Bash process group invocation. -*/ - -#include -#include -#include -#include - - -static void fail(const char *msg) -{ - fprintf(stderr, "%s\n", msg); - exit(2); -} - - -int main(int argc, char *argv[]) -{ - /* args */ - - if (argc != 3) { - fprintf(stderr, "Bad arguments\n"); - exit(1); - } - char *pid_name = argv[1]; - char *script = argv[2]; - - - /* setsid */ - - if (setsid() == -1) { - pid_t pid = fork(); - if (pid == -1) fail("Cannot set session id (failed to fork)"); - else if (pid != 0) exit(0); - else if (setsid() == -1) fail("Cannot set session id (after fork)"); - } - - - /* report pid */ - - FILE *pid_file; - pid_file = fopen(pid_name, "w"); - if (pid_file == NULL) fail("Cannot open pid file"); - fprintf(pid_file, "%d", getpid()); - fclose(pid_file); - - - /* exec */ - - char *cmd_line[4]; - cmd_line[0] = "bash"; - cmd_line[1] = "-c"; - cmd_line[2] = script; - cmd_line[3] = NULL; - - execvp(cmd_line[0], cmd_line); - fail("Cannot exec process"); -} - diff -r ec44535f954a -r a105bea3936f Admin/isatest/settings/mac-poly-M2-alternative --- a/Admin/isatest/settings/mac-poly-M2-alternative Wed Feb 17 15:57:10 2016 +0100 +++ b/Admin/isatest/settings/mac-poly-M2-alternative Wed Feb 17 21:06:47 2016 +0100 @@ -26,6 +26,4 @@ ISABELLE_BUILD_OPTIONS="browser_info=true document=pdf threads=2 parallel_proofs=2" -ISABELLE_FULL_TEST=true - ISABELLE_GHC=ghc diff -r ec44535f954a -r a105bea3936f Admin/isatest/settings/mac-poly-M4 --- a/Admin/isatest/settings/mac-poly-M4 Wed Feb 17 15:57:10 2016 +0100 +++ b/Admin/isatest/settings/mac-poly-M4 Wed Feb 17 21:06:47 2016 +0100 @@ -24,8 +24,6 @@ ISABELLE_BUILD_OPTIONS="browser_info=false document=pdf document_variants=document:outline=/proof,/ML threads=4 parallel_proofs=2" -ISABELLE_FULL_TEST=true - ISABELLE_GHC=ghc ISABELLE_MLTON=mlton ISABELLE_OCAML=ocaml diff -r ec44535f954a -r a105bea3936f Admin/isatest/settings/mac-poly-M8 --- a/Admin/isatest/settings/mac-poly-M8 Wed Feb 17 15:57:10 2016 +0100 +++ b/Admin/isatest/settings/mac-poly-M8 Wed Feb 17 21:06:47 2016 +0100 @@ -24,8 +24,6 @@ ISABELLE_BUILD_OPTIONS="browser_info=false document=pdf document_variants=document:outline=/proof,/ML threads=8 parallel_proofs=2" -ISABELLE_FULL_TEST=true - ISABELLE_GHC=ghc ISABELLE_MLTON=mlton ISABELLE_OCAML=ocaml diff -r ec44535f954a -r a105bea3936f CONTRIBUTORS --- a/CONTRIBUTORS Wed Feb 17 15:57:10 2016 +0100 +++ b/CONTRIBUTORS Wed Feb 17 21:06:47 2016 +0100 @@ -3,6 +3,10 @@ listed as an author in one of the source files of this Isabelle distribution. +Contributions to this Isabelle version +-------------------------------------- + + Contributions to Isabelle2016 ----------------------------- diff -r ec44535f954a -r a105bea3936f NEWS --- a/NEWS Wed Feb 17 15:57:10 2016 +0100 +++ b/NEWS Wed Feb 17 21:06:47 2016 +0100 @@ -4,6 +4,15 @@ (Note: Isabelle/jEdit shows a tree-view of the NEWS file in Sidekick.) +New in this Isabelle version +---------------------------- + +*** Isar *** + +* Command '\' is an alias for 'sorry', with different +typesetting. E.g. to produce proof holes in examples and documentation. + + New in Isabelle2016 (February 2016) ----------------------------------- diff -r ec44535f954a -r a105bea3936f lib/texinputs/isabellesym.sty --- a/lib/texinputs/isabellesym.sty Wed Feb 17 15:57:10 2016 +0100 +++ b/lib/texinputs/isabellesym.sty Wed Feb 17 21:06:47 2016 +0100 @@ -365,3 +365,4 @@ \newcommand{\isasymopen}{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}} \newcommand{\isasymclose}{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}} \newcommand{\isasymcomment}{\isatext{---}} +\newcommand{\isasymproof}{\isamath{\,\langle\mathit{proof}\rangle}} diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Datatype_Benchmark/Brackin.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Datatype_Benchmark/Brackin.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,41 @@ +(* Title: Benchmarks/Datatype_Benchmark/Brackin.thy + +A couple of datatypes from Steve Brackin's work. +*) + +theory Brackin imports Main begin + +datatype T = + X1 | X2 | X3 | X4 | X5 | X6 | X7 | X8 | X9 | X10 | X11 | + X12 | X13 | X14 | X15 | X16 | X17 | X18 | X19 | X20 | X21 | + X22 | X23 | X24 | X25 | X26 | X27 | X28 | X29 | X30 | X31 | + X32 | X33 | X34 + +datatype ('a, 'b, 'c) TY1 = + NoF + | Fk 'a "('a, 'b, 'c) TY2" +and ('a, 'b, 'c) TY2 = + Ta bool + | Td bool + | Tf "('a, 'b, 'c) TY1" + | Tk bool + | Tp bool + | App 'a "('a, 'b, 'c) TY1" "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY3" + | Pair "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY2" +and ('a, 'b, 'c) TY3 = + NoS + | Fresh "('a, 'b, 'c) TY2" + | Trustworthy 'a + | PrivateKey 'a 'b 'c + | PublicKey 'a 'b 'c + | Conveyed 'a "('a, 'b, 'c) TY2" + | Possesses 'a "('a, 'b, 'c) TY2" + | Received 'a "('a, 'b, 'c) TY2" + | Recognizes 'a "('a, 'b, 'c) TY2" + | NeverMalFromSelf 'a 'b "('a, 'b, 'c) TY2" + | Sends 'a "('a, 'b, 'c) TY2" 'b + | SharedSecret 'a "('a, 'b, 'c) TY2" 'b + | Believes 'a "('a, 'b, 'c) TY3" + | And "('a, 'b, 'c) TY3" "('a, 'b, 'c) TY3" + +end diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Datatype_Benchmark/IsaFoR.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Datatype_Benchmark/IsaFoR.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,380 @@ +(* Title: Benchmarks/Datatype_Benchmark/IsaFoR.thy + Author: Rene Thiemann, UIBK + Copyright 2014 + +Benchmark consisting of datatypes defined in IsaFoR. +*) + +section \Benchmark Consisting of Datatypes Defined in IsaFoR\ + +theory IsaFoR +imports Real +begin + +datatype (discs_sels) ('f, 'l) lab = + Lab "('f, 'l) lab" 'l + | FunLab "('f, 'l) lab" "('f, 'l) lab list" + | UnLab 'f + | Sharp "('f, 'l) lab" + +datatype (discs_sels) 'f projL = Projection "(('f \ nat) \ nat) list" + +datatype (discs_sels) ('f, 'v) "term" = Var 'v | Fun 'f "('f, 'v) term list" +datatype (discs_sels) ('f, 'v) ctxt = + Hole ("\") + | More 'f "('f, 'v) term list" "('f, 'v) ctxt" "('f, 'v) term list" + +type_synonym ('f, 'v) rule = "('f, 'v) term \ ('f, 'v) term" +type_synonym ('f, 'v) trs = "('f, 'v) rule set" + +type_synonym ('f, 'v) rules = "('f, 'v) rule list" +type_synonym ('f, 'l, 'v) ruleLL = "(('f, 'l) lab, 'v) rule" +type_synonym ('f, 'l, 'v) trsLL = "(('f, 'l) lab, 'v) rules" +type_synonym ('f, 'l, 'v) termsLL = "(('f, 'l) lab, 'v) term list" + +datatype (discs_sels) pos = Empty ("\") | PCons "nat" "pos" (infixr "<#" 70) + +type_synonym ('f, 'v) prseq = "(pos \ ('f, 'v) rule \ bool \ ('f, 'v) term) list" +type_synonym ('f, 'v) rseq = "(pos \ ('f, 'v) rule \ ('f, 'v) term) list" + +type_synonym ('f, 'l, 'v) rseqL = "((('f, 'l) lab, 'v) rule \ (('f, 'l) lab, 'v) rseq) list" +type_synonym ('f, 'l, 'v) dppLL = + "bool \ bool \ ('f, 'l, 'v) trsLL \ ('f, 'l, 'v) trsLL \ + ('f, 'l, 'v) termsLL \ + ('f, 'l, 'v) trsLL \ ('f, 'l, 'v) trsLL" + +type_synonym ('f, 'l, 'v) qreltrsLL = + "bool \ ('f, 'l, 'v) termsLL \ ('f, 'l, 'v) trsLL \ ('f, 'l, 'v) trsLL" + +type_synonym ('f, 'l, 'v) qtrsLL = + "bool \ ('f, 'l, 'v) termsLL \ ('f, 'l, 'v) trsLL" + +datatype (discs_sels) location = H | A | B | R + +type_synonym ('f, 'v) forb_pattern = "('f, 'v) ctxt \ ('f, 'v) term \ location" +type_synonym ('f, 'v) forb_patterns = "('f, 'v) forb_pattern set" + +type_synonym ('f, 'l, 'v) fptrsLL = + "(('f, 'l) lab, 'v) forb_pattern list \ ('f, 'l, 'v) trsLL" + +type_synonym ('f, 'l, 'v) prob = "('f, 'l, 'v) qreltrsLL + ('f, 'l, 'v) dppLL" + +type_synonym ('f, 'a) lpoly_inter = "'f \ nat \ ('a \ 'a list)" +type_synonym ('f, 'a) lpoly_interL = "(('f \ nat) \ ('a \ 'a list)) list" + +type_synonym 'v monom = "('v \ nat) list" +type_synonym ('v, 'a) poly = "('v monom \ 'a) list" +type_synonym ('f, 'a) poly_inter_list = "(('f \ nat) \ (nat, 'a) poly) list" +type_synonym 'a vec = "'a list" +type_synonym 'a mat = "'a vec list" + +datatype (discs_sels) arctic = MinInfty | Num_arc int +datatype (discs_sels) 'a arctic_delta = MinInfty_delta | Num_arc_delta 'a +datatype (discs_sels) order_tag = Lex | Mul + +type_synonym 'f status_prec_repr = "(('f \ nat) \ (nat \ order_tag)) list" + +datatype (discs_sels) af_entry = + Collapse nat + | AFList "nat list" + +type_synonym 'f afs_list = "(('f \ nat) \ af_entry) list" +type_synonym 'f prec_weight_repr = "(('f \ nat) \ (nat \ nat \ (nat list option))) list \ nat" + +datatype (discs_sels) 'f redtriple_impl = + Int_carrier "('f, int) lpoly_interL" + | Int_nl_carrier "('f, int) poly_inter_list" + | Rat_carrier "('f, rat) lpoly_interL" + | Rat_nl_carrier rat "('f, rat) poly_inter_list" + | Real_carrier "('f, real) lpoly_interL" + | Real_nl_carrier real "('f, real) poly_inter_list" + | Arctic_carrier "('f, arctic) lpoly_interL" + | Arctic_rat_carrier "('f, rat arctic_delta) lpoly_interL" + | Int_mat_carrier nat nat "('f, int mat) lpoly_interL" + | Rat_mat_carrier nat nat "('f, rat mat) lpoly_interL" + | Real_mat_carrier nat nat "('f, real mat) lpoly_interL" + | Arctic_mat_carrier nat "('f, arctic mat) lpoly_interL" + | Arctic_rat_mat_carrier nat "('f, rat arctic_delta mat) lpoly_interL" + | RPO "'f status_prec_repr" "'f afs_list" + | KBO "'f prec_weight_repr" "'f afs_list" + +datatype (discs_sels) list_order_type = MS_Ext | Max_Ext | Min_Ext | Dms_Ext +type_synonym 'f scnp_af = "(('f \ nat) \ (nat \ nat) list) list" + +datatype (discs_sels) 'f root_redtriple_impl = SCNP list_order_type "'f scnp_af" "'f redtriple_impl" + +type_synonym 'f sig_map_list = "(('f \ nat) \ 'f list) list" +type_synonym ('f, 'v) uncurry_info = "'f \ 'f sig_map_list \ ('f, 'v) rules \ ('f, 'v) rules" + +datatype (discs_sels) arithFun = + Arg nat + | Const nat + | Sum "arithFun list" + | Max "arithFun list" + | Min "arithFun list" + | Prod "arithFun list" + | IfEqual arithFun arithFun arithFun arithFun + +datatype (discs_sels) 'f sl_inter = SL_Inter nat "(('f \ nat) \ arithFun) list" +datatype (discs_sels) ('f, 'v) sl_variant = + Rootlab "('f \ nat) option" + | Finitelab "'f sl_inter" + | QuasiFinitelab "'f sl_inter" 'v + +type_synonym ('f, 'v) crit_pair_joins = "(('f, 'v) term \ ('f, 'v) rseq \ ('f, 'v) term \ ('f, 'v) rseq) list" + +datatype (discs_sels) 'f join_info = Guided "('f, string) crit_pair_joins" | Join_NF | Join_BFS nat + +type_synonym unknown_info = string + +type_synonym dummy_prf = unit + +datatype (discs_sels) ('f, 'v) complex_constant_removal_prf = Complex_Constant_Removal_Proof + "('f, 'v) term" + "(('f, 'v) rule \ ('f, 'v) rule) list" + +datatype (discs_sels) ('f, 'v) cond_constraint = + CC_cond bool "('f, 'v) rule" + | CC_rewr "('f, 'v) term" "('f, 'v) term" + | CC_impl "('f, 'v) cond_constraint list" "('f, 'v) cond_constraint" + | CC_all 'v "('f, 'v) cond_constraint" + +type_synonym ('f, 'v, 'w) gsubstL = "('v \ ('f, 'w) term) list" +type_synonym ('f, 'v) substL = "('f, 'v, 'v) gsubstL" + +datatype (discs_sels) ('f, 'v) cond_constraint_prf = + Final + | Delete_Condition "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" + | Different_Constructor "('f, 'v) cond_constraint" + | Same_Constructor "('f, 'v) cond_constraint" "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" + | Variable_Equation 'v "('f, 'v) term" "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" + | Funarg_Into_Var "('f, 'v) cond_constraint" nat 'v "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" + | Simplify_Condition "('f, 'v) cond_constraint" "('f, 'v) substL" "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" + | Induction "('f, 'v) cond_constraint" "('f, 'v) cond_constraint list" "(('f, 'v) rule \ (('f, 'v) term \ 'v list) list \ ('f, 'v) cond_constraint \ ('f, 'v) cond_constraint_prf) list" + +datatype (discs_sels) ('f, 'v) cond_red_pair_prf = + Cond_Red_Pair_Prf + 'f "(('f, 'v) cond_constraint \ ('f, 'v) rules \ ('f, 'v) cond_constraint_prf) list" nat nat + +datatype (discs_sels) ('q, 'f) ta_rule = TA_rule 'f "'q list" 'q ("_ _ \ _") +datatype (discs_sels) ('q, 'f) tree_automaton = Tree_Automaton "'q list" "('q, 'f) ta_rule list" "('q \ 'q) list" +datatype (discs_sels) 'q ta_relation = + Decision_Proc + | Id_Relation + | Some_Relation "('q \ 'q) list" + +datatype (discs_sels) boundstype = Roof | Match +datatype (discs_sels) ('f, 'q) bounds_info = Bounds_Info boundstype nat "'q list" "('q, 'f \ nat) tree_automaton" "'q ta_relation" + +datatype (discs_sels) ('f, 'v) pat_eqv_prf = + Pat_Dom_Renaming "('f, 'v) substL" + | Pat_Irrelevant "('f, 'v) substL" "('f, 'v) substL" + | Pat_Simplify "('f, 'v) substL" "('f, 'v) substL" + +datatype (discs_sels) pat_rule_pos = Pat_Base | Pat_Pump | Pat_Close + +datatype (discs_sels) ('f, 'v) pat_rule_prf = + Pat_OrigRule "('f, 'v) rule" bool + | Pat_InitPump "('f, 'v) pat_rule_prf" "('f, 'v) substL" "('f, 'v) substL" + | Pat_InitPumpCtxt "('f, 'v) pat_rule_prf" "('f, 'v) substL" pos 'v + | Pat_Equiv "('f, 'v) pat_rule_prf" bool "('f, 'v) pat_eqv_prf" + | Pat_Narrow "('f, 'v) pat_rule_prf" "('f, 'v) pat_rule_prf" pos + | Pat_Inst "('f, 'v) pat_rule_prf" "('f, 'v) substL" pat_rule_pos + | Pat_Rewr "('f, 'v) pat_rule_prf" "('f, 'v) term \ ('f, 'v) rseq" pat_rule_pos 'v + | Pat_Exp_Sigma "('f, 'v) pat_rule_prf" nat + +datatype (discs_sels) ('f, 'v) non_loop_prf = + Non_Loop_Prf "('f, 'v) pat_rule_prf" "('f, 'v) substL" "('f, 'v) substL" nat nat pos + +datatype (discs_sels) ('f, 'l, 'v) problem = + SN_TRS "('f, 'l, 'v) qreltrsLL" + | SN_FP_TRS "('f, 'l, 'v) fptrsLL" + | Finite_DPP "('f, 'l, 'v) dppLL" + | Unknown_Problem unknown_info + | Not_SN_TRS "('f, 'l, 'v) qtrsLL" + | Not_RelSN_TRS "('f, 'l, 'v) qreltrsLL" + | Infinite_DPP "('f, 'l, 'v) dppLL" + | Not_SN_FP_TRS "('f, 'l, 'v) fptrsLL" + +declare [[bnf_timing]] + +datatype (discs_sels) ('f, 'l, 'v, 'a, 'b, 'c, 'd, 'e) generic_assm_proof = + SN_assm_proof "('f, 'l, 'v) qreltrsLL" 'a + | Finite_assm_proof "('f, 'l, 'v) dppLL" 'b + | SN_FP_assm_proof "('f, 'l, 'v) fptrsLL" 'c + | Not_SN_assm_proof "('f, 'l, 'v) qtrsLL" 'a + | Infinite_assm_proof "('f, 'l, 'v) dppLL" 'b + | Not_RelSN_assm_proof "('f, 'l, 'v) qreltrsLL" 'c + | Not_SN_FP_assm_proof "('f, 'l, 'v) fptrsLL" 'd + | Unknown_assm_proof unknown_info 'e + +type_synonym ('f, 'l, 'v, 'a, 'b, 'c, 'd) assm_proof = "('f, 'l, 'v, 'a, 'b, 'c, dummy_prf, 'd) generic_assm_proof" + +datatype (discs_sels) ('f, 'l, 'v) assm = + SN_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) qreltrsLL" + | SN_FP_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) fptrsLL" + | Finite_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) dppLL" + | Unknown_assm "('f, 'l, 'v) problem list" unknown_info + | Not_SN_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) qtrsLL" + | Not_RelSN_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) qreltrsLL" + | Not_SN_FP_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) fptrsLL" + | Infinite_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) dppLL" + +fun satisfied :: "('f, 'l, 'v) problem \ bool" where + "satisfied (SN_TRS t) = (t = t)" +| "satisfied (SN_FP_TRS t) = (t \ t)" +| "satisfied (Finite_DPP d) = (d \ d)" +| "satisfied (Unknown_Problem s) = (s = ''foo'')" +| "satisfied (Not_SN_TRS (nfs, q, r)) = (length q = length r)" +| "satisfied (Not_RelSN_TRS (nfs, q, r, rw)) = (r = rw)" +| "satisfied (Infinite_DPP d) = (d = d)" +| "satisfied (Not_SN_FP_TRS t) = (t = t)" + +fun collect_assms :: "('tp \ ('f, 'l, 'v) assm list) + \ ('dpp \ ('f, 'l, 'v) assm list) + \ ('fptp \ ('f, 'l, 'v) assm list) + \ ('unk \ ('f, 'l, 'v) assm list) + \ ('f, 'l, 'v, 'tp, 'dpp, 'fptp, 'unk) assm_proof \ ('f, 'l, 'v) assm list" where + "collect_assms tp dpp fptp unk (SN_assm_proof t prf) = tp prf" +| "collect_assms tp dpp fptp unk (SN_FP_assm_proof t prf) = fptp prf" +| "collect_assms tp dpp fptp unk (Finite_assm_proof d prf) = dpp prf" +| "collect_assms tp dpp fptp unk (Unknown_assm_proof p prf) = unk prf" +| "collect_assms _ _ _ _ _ = []" + +fun collect_neg_assms :: "('tp \ ('f, 'l, 'v) assm list) + \ ('dpp \ ('f, 'l, 'v) assm list) + \ ('rtp \ ('f, 'l, 'v) assm list) + \ ('fptp \ ('f, 'l, 'v) assm list) + \ ('unk \ ('f, 'l, 'v) assm list) + \ ('f, 'l, 'v, 'tp, 'dpp, 'rtp, 'fptp, 'unk) generic_assm_proof \ ('f, 'l, 'v) assm list" where + "collect_neg_assms tp dpp rtp fptp unk (Not_SN_assm_proof t prf) = tp prf" +| "collect_neg_assms tp dpp rtp fptp unk (Infinite_assm_proof d prf) = dpp prf" +| "collect_neg_assms tp dpp rtp fptp unk (Not_RelSN_assm_proof t prf) = rtp prf" +| "collect_neg_assms tp dpp rtp fptp unk (Not_SN_FP_assm_proof t prf) = fptp prf" +| "collect_neg_assms tp dpp rtp fptp unk (Unknown_assm_proof p prf) = unk prf" +| "collect_neg_assms tp dpp rtp fptp unk _ = []" + +datatype (discs_sels) ('f, 'l, 'v) dp_nontermination_proof = + DP_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) prseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" + | DP_Nonloop "(('f, 'l) lab, 'v) non_loop_prf" + | DP_Rule_Removal "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) dp_nontermination_proof" + | DP_Q_Increase "('f, 'l, 'v) termsLL" "('f, 'l, 'v) dp_nontermination_proof" + | DP_Q_Reduction "('f, 'l, 'v) termsLL" "('f, 'l, 'v) dp_nontermination_proof" + | DP_Termination_Switch "('f, 'l) lab join_info" "('f, 'l, 'v) dp_nontermination_proof" + | DP_Instantiation "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_nontermination_proof" + | DP_Rewriting "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" "(('f, 'l) lab, 'v) rule" pos "('f, 'l, 'v) dp_nontermination_proof" + | DP_Narrowing "('f, 'l, 'v) ruleLL" pos "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_nontermination_proof" + | DP_Assume_Infinite "('f, 'l, 'v) dppLL" + "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, + ('f, 'l, 'v) dp_nontermination_proof, + ('f, 'l, 'v) reltrs_nontermination_proof, + ('f, 'l, 'v) fp_nontermination_proof, + ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" +and ('f, 'l, 'v) "trs_nontermination_proof" = + TRS_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) rseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" + | TRS_Not_Well_Formed + | TRS_Rule_Removal "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_nontermination_proof" + | TRS_String_Reversal "('f, 'l, 'v) trs_nontermination_proof" + | TRS_DP_Trans "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_nontermination_proof" + | TRS_Nonloop "(('f, 'l) lab, 'v) non_loop_prf" + | TRS_Q_Increase "('f, 'l, 'v) termsLL" "('f, 'l, 'v) trs_nontermination_proof" + | TRS_Assume_Not_SN "('f, 'l, 'v) qtrsLL" + "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, + ('f, 'l, 'v) dp_nontermination_proof, + ('f, 'l, 'v) reltrs_nontermination_proof, + ('f, 'l, 'v) fp_nontermination_proof, + ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" +and ('f, 'l, 'v)"reltrs_nontermination_proof" = + Rel_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) prseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" + | Rel_Not_Well_Formed + | Rel_Rule_Removal "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) reltrs_nontermination_proof" + | Rel_R_Not_SN "('f, 'l, 'v) trs_nontermination_proof" + | Rel_TRS_Assume_Not_SN "('f, 'l, 'v) qreltrsLL" + "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, + ('f, 'l, 'v) dp_nontermination_proof, + ('f, 'l, 'v) reltrs_nontermination_proof, + ('f, 'l, 'v) fp_nontermination_proof, + ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" +and ('f, 'l, 'v) "fp_nontermination_proof" = + FPTRS_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) rseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" + | FPTRS_Rule_Removal "('f, 'l, 'v) trsLL" "('f, 'l, 'v) fp_nontermination_proof" + | FPTRS_Assume_Not_SN "('f, 'l, 'v) fptrsLL" + "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, + ('f, 'l, 'v) dp_nontermination_proof, + ('f, 'l, 'v) reltrs_nontermination_proof, + ('f, 'l, 'v) fp_nontermination_proof, + ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" +and ('f, 'l, 'v) neg_unknown_proof = + Assume_NT_Unknown unknown_info + "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, + ('f, 'l, 'v) dp_nontermination_proof, + ('f, 'l, 'v) reltrs_nontermination_proof, + ('f, 'l, 'v) fp_nontermination_proof, + ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" + +datatype (discs_sels) ('f, 'l, 'v) dp_termination_proof = + P_is_Empty + | Subterm_Criterion_Proc "('f, 'l) lab projL" "('f, 'l, 'v) rseqL" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Redpair_Proc "('f, 'l) lab root_redtriple_impl + ('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Redpair_UR_Proc "('f, 'l) lab root_redtriple_impl + ('f, 'l) lab redtriple_impl" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Usable_Rules_Proc "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Dep_Graph_Proc "(('f, 'l, 'v) dp_termination_proof option \ ('f, 'l, 'v) trsLL) list" + | Mono_Redpair_Proc "('f, 'l) lab redtriple_impl" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Mono_Redpair_UR_Proc "('f, 'l) lab redtriple_impl" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Size_Change_Subterm_Proc "((('f, 'l) lab, 'v) rule \ ((nat \ nat) list \ (nat \ nat) list)) list" + | Size_Change_Redpair_Proc "('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL option" + "((('f, 'l) lab, 'v) rule \ ((nat \ nat) list \ (nat \ nat) list)) list" + | Uncurry_Proc "nat option" "(('f, 'l) lab, 'v) uncurry_info" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Fcc_Proc "('f, 'l) lab" "(('f, 'l) lab, 'v) ctxt list" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Split_Proc + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" + "('f, 'l, 'v) dp_termination_proof" "('f, 'l, 'v) dp_termination_proof" + | Semlab_Proc + "(('f, 'l) lab, 'v) sl_variant" "('f, 'l, 'v) trsLL" + "(('f, 'l) lab, 'v) term list" "('f, 'l, 'v) trsLL" + "('f, 'l, 'v) dp_termination_proof" + | Switch_Innermost_Proc "('f, 'l) lab join_info" "('f, 'l, 'v) dp_termination_proof" + | Rewriting_Proc + "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" + "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" pos "('f, 'l, 'v) dp_termination_proof" + | Instantiation_Proc "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Forward_Instantiation_Proc + "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) dp_termination_proof" + | Narrowing_Proc "('f, 'l, 'v) ruleLL" pos "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Assume_Finite + "('f, 'l, 'v) dppLL" "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" + | Unlab_Proc "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" + | Q_Reduction_Proc "('f, 'l, 'v) termsLL" "('f, 'l, 'v) dp_termination_proof" + | Complex_Constant_Removal_Proc "(('f, 'l) lab, 'v) complex_constant_removal_prf" "('f, 'l, 'v) dp_termination_proof" + | General_Redpair_Proc + "('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" + "(('f, 'l) lab, 'v) cond_red_pair_prf" "('f, 'l, 'v) dp_termination_proof list" + | To_Trs_Proc "('f, 'l, 'v) trs_termination_proof" +and ('f, 'l, 'v) trs_termination_proof = + DP_Trans bool bool "(('f, 'l) lab, 'v) rules" "('f, 'l, 'v) dp_termination_proof" + | Rule_Removal "('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" + | String_Reversal "('f, 'l, 'v) trs_termination_proof" + | Bounds "(('f, 'l) lab, 'v) bounds_info" + | Uncurry "(('f, 'l) lab, 'v) uncurry_info" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" + | Semlab + "(('f, 'l) lab, 'v) sl_variant" "(('f, 'l) lab, 'v) term list" + "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" + | R_is_Empty + | Fcc "(('f, 'l) lab, 'v) ctxt list" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" + | Split "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" "('f, 'l, 'v) trs_termination_proof" + | Switch_Innermost "('f, 'l) lab join_info" "('f, 'l, 'v) trs_termination_proof" + | Drop_Equality "('f, 'l, 'v) trs_termination_proof" + | Remove_Nonapplicable_Rules "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" + | Assume_SN "('f, 'l, 'v) qreltrsLL" "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" +and ('f, 'l, 'v) unknown_proof = + Assume_Unknown unknown_info "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" +and ('f, 'l, 'v) fptrs_termination_proof = + Assume_FP_SN "('f, 'l, 'v) fptrsLL" "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" + +end diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Datatype_Benchmark/Misc_N2M.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Datatype_Benchmark/Misc_N2M.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,471 @@ +(* Title: Benchmarks/Datatype_Benchmark/Misc_N2M.thy + Author: Dmitriy Traytel, TU Muenchen + Copyright 2014 + +Miscellaneous tests for the nested-to-mutual reduction. +*) + +section \Miscellaneous Tests for the Nested-to-Mutual Reduction\ + +theory Misc_N2M +imports "~~/src/HOL/Library/BNF_Axiomatization" +begin + +declare [[typedef_overloaded]] + + +locale misc +begin + +datatype 'a li = Ni | Co 'a "'a li" +datatype 'a tr = Tr "'a \ 'a tr li" + +primrec (nonexhaustive) + f_tl :: "'a \ 'a tr li \ nat" and + f_t :: "'a \ 'a tr \ nat" +where + "f_tl _ Ni = 0" | + "f_t a (Tr ts) = 1 + f_tl a (ts a)" + +datatype ('a, 'b) l = N | C 'a 'b "('a, 'b) l" +datatype ('a, 'b) t = T "(('a, 'b) t, 'a) l" "(('a, 'b) t, 'b) l" + +primrec (nonexhaustive) + f_tl2 :: "(('a, 'a) t, 'a) l \ nat" and + f_t2 :: "('a, 'a) t \ nat" +where + "f_tl2 N = 0" | + "f_t2 (T ts us) = f_tl2 ts + f_tl2 us" + +primrec (nonexhaustive) + g_tla :: "(('a, 'b) t, 'a) l \ nat" and + g_tlb :: "(('a, 'b) t, 'b) l \ nat" and + g_t :: "('a, 'b) t \ nat" +where + "g_tla N = 0" | + "g_tlb N = 1" | + "g_t (T ts us) = g_tla ts + g_tlb us" + + +datatype + 'a l1 = N1 | C1 'a "'a l1" + +datatype + ('a, 'b) t1 = T1 'a 'b "('a, 'b) t1 l1" "(nat \ ('a, 'b) t1) l1" and + ('a, 'b) t2 = T2 "('a, 'b) t1" + +primrec + h1_tl1 :: "(nat, 'a) t1 l1 \ nat" and + h1_natl1 :: "(nat \ (nat, 'a) t1) l1 \ nat" and + h1_t1 :: "(nat, 'a) t1 \ nat" +where + "h1_tl1 N1 = 0" | + "h1_tl1 (C1 t ts) = h1_t1 t + h1_tl1 ts" | + "h1_natl1 N1 = Suc 0" | + "h1_natl1 (C1 n ts) = fst n + h1_natl1 ts" | + "h1_t1 (T1 n _ ts _) = n + h1_tl1 ts" + +end + + +bnf_axiomatization ('a, 'b) M0 [wits: "('a, 'b) M0"] +bnf_axiomatization ('a, 'b) N0 [wits: "('a, 'b) N0"] +bnf_axiomatization ('a, 'b) K0 [wits: "('a, 'b) K0"] +bnf_axiomatization ('a, 'b) L0 [wits: "('a, 'b) L0"] +bnf_axiomatization ('a, 'b, 'c) G0 [wits: "('a, 'b, 'c) G0"] + +locale (*co*)mplicated +begin + +datatype 'a M = CM "('a, 'a M) M0" +datatype 'a N = CN "('a N, 'a) N0" +datatype ('a, 'b) K = CK "('a, ('a, 'b) L) K0" + and ('a, 'b) L = CL "('b, ('a, 'b) K) L0" +datatype 'a G = CG "('a, ('a G, 'a G N) K, ('a G M, 'a G) L) G0" + +primrec + fG :: "'a G \ 'a G" +and fK :: "('a G, 'a G N) K \ ('a G, 'a G N) K" +and fL :: "('a G, 'a G N) L \ ('a G, 'a G N) L" +and fM :: "'a G M \ 'a G M" where + "fG (CG x) = CG (map_G0 id fK (map_L fM fG) x)" +| "fK (CK y) = CK (map_K0 fG fL y)" +| "fL (CL z) = CL (map_L0 (map_N fG) fK z)" +| "fM (CM w) = CM (map_M0 fG fM w)" +thm fG_def fK_def fL_def fM_def fG.simps fK.simps fL.simps fM.simps + +end + +locale complicated +begin + +codatatype 'a M = CM "('a, 'a M) M0" +codatatype 'a N = CN "('a N, 'a) N0" +codatatype ('a, 'b) K = CK "('a, ('a, 'b) L) K0" + and ('a, 'b) L = CL "('b, ('a, 'b) K) L0" +codatatype 'a G = CG "('a, ('a G, 'a G N) K, ('a G M, 'a G) L) G0" + +primcorec + fG :: "'a G \ 'a G" +and fK :: "('a G, 'a G N) K \ ('a G, 'a G N) K" +and fL :: "('a G, 'a G N) L \ ('a G, 'a G N) L" +and fM :: "'a G M \ 'a G M" where + "fG x = CG (map_G0 id fK (map_L fM fG) (un_CG x))" +| "fK y = CK (map_K0 fG fL (un_CK y))" +| "fL z = CL (map_L0 (map_N fG) fK (un_CL z))" +| "fM w = CM (map_M0 fG fM (un_CM w))" +thm fG_def fK_def fL_def fM_def fG.simps fK.simps fL.simps fM.simps + +end + +datatype ('a, 'b) F1 = F1 'a 'b +datatype F2 = F2 "((unit, nat) F1, F2) F1" | F2' +datatype 'a kk1 = K1 'a | K2 "'a kk2" and 'a kk2 = K3 "'a kk1" +datatype 'a ll1 = L1 'a | L2 "'a ll2 kk2" and 'a ll2 = L3 "'a ll1" + +datatype_compat F1 +datatype_compat F2 +datatype_compat kk1 kk2 +datatype_compat ll1 ll2 + + +subsection \Deep Nesting\ + +datatype 'a tree = Empty | Node 'a "'a tree list" +datatype_compat tree + +datatype 'a ttree = TEmpty | TNode 'a "'a ttree list tree" +datatype_compat ttree + +datatype 'a tttree = TEmpty | TNode 'a "'a tttree list ttree list tree" +datatype_compat tttree +(* +datatype 'a ttttree = TEmpty | TNode 'a "'a ttttree list tttree list ttree list tree" +datatype_compat ttttree +*) + +datatype ('a,'b)complex = + C1 nat "'a ttree" +| C2 "('a,'b)complex list tree tree" 'b "('a,'b)complex" "('a,'b)complex2 ttree list" +and ('a,'b)complex2 = D1 "('a,'b)complex ttree" +datatype_compat complex complex2 + +datatype 'a F = F1 'a | F2 "'a F" +datatype 'a G = G1 'a | G2 "'a G F" +datatype H = H1 | H2 "H G" + +datatype_compat F +datatype_compat G +datatype_compat H + + +subsection \Primrec cache\ + +datatype 'a l = N | C 'a "'a l" +datatype ('a, 'b) t = T 'a 'b "('a, 'b) t l" + +context linorder +begin + +(* slow *) +primrec + f1_tl :: "(nat, 'a) t l \ nat" and + f1_t :: "(nat, 'a) t \ nat" +where + "f1_tl N = 0" | + "f1_tl (C t ts) = f1_t t + f1_tl ts" | + "f1_t (T n _ ts) = n + f1_tl ts" + +(* should be fast *) +primrec + f2_t :: "(nat, 'b) t \ nat" and + f2_tl :: "(nat, 'b) t l \ nat" +where + "f2_tl N = 0" | + "f2_tl (C t ts) = f2_t t + f2_tl ts" | + "f2_t (T n _ ts) = n + f2_tl ts" + +end + +(* should be fast *) +primrec + g1_t :: "('a, int) t \ nat" and + g1_tl :: "('a, int) t l \ nat" +where + "g1_t (T _ _ ts) = g1_tl ts" | + "g1_tl N = 0" | + "g1_tl (C _ ts) = g1_tl ts" + +(* should be fast *) +primrec + g2_t :: "(int, int) t \ nat" and + g2_tl :: "(int, int) t l \ nat" +where + "g2_t (T _ _ ts) = g2_tl ts" | + "g2_tl N = 0" | + "g2_tl (C _ ts) = g2_tl ts" + + +datatype + 'a l1 = N1 | C1 'a "'a l2" and + 'a l2 = N2 | C2 'a "'a l1" + +primrec + sum_l1 :: "'a::{zero,plus} l1 \ 'a" and + sum_l2 :: "'a::{zero,plus} l2 \ 'a" +where + "sum_l1 N1 = 0" | + "sum_l1 (C1 n ns) = n + sum_l2 ns" | + "sum_l2 N2 = 0" | + "sum_l2 (C2 n ns) = n + sum_l1 ns" + +datatype + ('a, 'b) t1 = T1 'a 'b "('a, 'b) t1 l1" and + ('a, 'b) t2 = T2 "('a, 'b) t1" + +(* slow *) +primrec + h1_tl1 :: "(nat, 'a) t1 l1 \ nat" and + h1_tl2 :: "(nat, 'a) t1 l2 \ nat" and + h1_t1 :: "(nat, 'a) t1 \ nat" +where + "h1_tl1 N1 = 0" | + "h1_tl1 (C1 t ts) = h1_t1 t + h1_tl2 ts" | + "h1_tl2 N2 = 0" | + "h1_tl2 (C2 t ts) = h1_t1 t + h1_tl1 ts" | + "h1_t1 (T1 n _ ts) = n + h1_tl1 ts" + +(* should be fast *) +primrec + h2_tl1 :: "(nat, 'a) t1 l1 \ nat" and + h2_tl2 :: "(nat, 'a) t1 l2 \ nat" and + h2_t1 :: "(nat, 'a) t1 \ nat" +where + "h2_tl1 N1 = 0" | + "h2_tl1 (C1 t ts) = h2_t1 t + h2_tl2 ts" | + "h2_tl2 N2 = 0" | + "h2_tl2 (C2 t ts) = h2_t1 t + h2_tl1 ts" | + "h2_t1 (T1 n _ ts) = n + h2_tl1 ts" + +(* should be fast *) +primrec + h3_tl2 :: "(nat, 'a) t1 l2 \ nat" and + h3_tl1 :: "(nat, 'a) t1 l1 \ nat" and + h3_t1 :: "(nat, 'a) t1 \ nat" +where + "h3_tl1 N1 = 0" | + "h3_tl1 (C1 t ts) = h3_t1 t + h3_tl2 ts" | + "h3_tl2 N2 = 0" | + "h3_tl2 (C2 t ts) = h3_t1 t + h3_tl1 ts" | + "h3_t1 (T1 n _ ts) = n + h3_tl1 ts" + +(* should be fast *) +primrec + i1_tl2 :: "(nat, 'a) t1 l2 \ nat" and + i1_tl1 :: "(nat, 'a) t1 l1 \ nat" and + i1_t1 :: "(nat, 'a) t1 \ nat" and + i1_t2 :: "(nat, 'a) t2 \ nat" +where + "i1_tl1 N1 = 0" | + "i1_tl1 (C1 t ts) = i1_t1 t + i1_tl2 ts" | + "i1_tl2 N2 = 0" | + "i1_tl2 (C2 t ts) = i1_t1 t + i1_tl1 ts" | + "i1_t1 (T1 n _ ts) = n + i1_tl1 ts" | + "i1_t2 (T2 t) = i1_t1 t" + +(* should be fast *) +primrec + j1_t2 :: "(nat, 'a) t2 \ nat" and + j1_t1 :: "(nat, 'a) t1 \ nat" and + j1_tl1 :: "(nat, 'a) t1 l1 \ nat" and + j1_tl2 :: "(nat, 'a) t1 l2 \ nat" +where + "j1_tl1 N1 = 0" | + "j1_tl1 (C1 t ts) = j1_t1 t + j1_tl2 ts" | + "j1_tl2 N2 = 0" | + "j1_tl2 (C2 t ts) = j1_t1 t + j1_tl1 ts" | + "j1_t1 (T1 n _ ts) = n + j1_tl1 ts" | + "j1_t2 (T2 t) = j1_t1 t" + + +datatype 'a l3 = N3 | C3 'a "'a l3" +datatype 'a l4 = N4 | C4 'a "'a l4" +datatype ('a, 'b) t3 = T3 'a 'b "('a, 'b) t3 l3" "('a, 'b) t3 l4" + +(* slow *) +primrec + k1_tl3 :: "(nat, 'a) t3 l3 \ nat" and + k1_tl4 :: "(nat, 'a) t3 l4 \ nat" and + k1_t3 :: "(nat, 'a) t3 \ nat" +where + "k1_tl3 N3 = 0" | + "k1_tl3 (C3 t ts) = k1_t3 t + k1_tl3 ts" | + "k1_tl4 N4 = 0" | + "k1_tl4 (C4 t ts) = k1_t3 t + k1_tl4 ts" | + "k1_t3 (T3 n _ ts us) = n + k1_tl3 ts + k1_tl4 us" + +(* should be fast *) +primrec + k2_tl4 :: "(nat, int) t3 l4 \ nat" and + k2_tl3 :: "(nat, int) t3 l3 \ nat" and + k2_t3 :: "(nat, int) t3 \ nat" +where + "k2_tl4 N4 = 0" | + "k2_tl4 (C4 t ts) = k2_t3 t + k2_tl4 ts" | + "k2_tl3 N3 = 0" | + "k2_tl3 (C3 t ts) = k2_t3 t + k2_tl3 ts" | + "k2_t3 (T3 n _ ts us) = n + k2_tl3 ts + k2_tl4 us" + + +datatype ('a, 'b) l5 = N5 | C5 'a 'b "('a, 'b) l5" +datatype ('a, 'b) l6 = N6 | C6 'a 'b "('a, 'b) l6" +datatype ('a, 'b, 'c) t4 = T4 'a 'b "(('a, 'b, 'c) t4, 'b) l5" "(('a, 'b, 'c) t4, 'c) l6" + +(* slow *) +primrec + l1_tl5 :: "((nat, 'a, 'b) t4, 'a) l5 \ nat" and + l1_tl6 :: "((nat, 'a, 'b) t4, 'b) l6 \ nat" and + l1_t4 :: "(nat, 'a, 'b) t4 \ nat" +where + "l1_tl5 N5 = 0" | + "l1_tl5 (C5 t _ ts) = l1_t4 t + l1_tl5 ts" | + "l1_tl6 N6 = 0" | + "l1_tl6 (C6 t _ ts) = l1_t4 t + l1_tl6 ts" | + "l1_t4 (T4 n _ ts us) = n + l1_tl5 ts + l1_tl6 us" + + +subsection \Primcorec Cache\ + +codatatype 'a col = N | C 'a "'a col" +codatatype ('a, 'b) cot = T 'a 'b "('a, 'b) cot col" + +context linorder +begin + +(* slow *) +primcorec + f1_cotcol :: "nat \ (nat, 'a) cot col" and + f1_cot :: "nat \ (nat, 'a) cot" +where + "n = 0 \ f1_cotcol n = N" | + "_ \ f1_cotcol n = C (f1_cot n) (f1_cotcol n)" | + "f1_cot n = T n undefined (f1_cotcol n)" + +(* should be fast *) +primcorec + f2_cotcol :: "nat \ (nat, 'b) cot col" and + f2_cot :: "nat \ (nat, 'b) cot" +where + "n = 0 \ f2_cotcol n = N" | + "_ \ f2_cotcol n = C (f2_cot n) (f2_cotcol n)" | + "f2_cot n = T n undefined (f2_cotcol n)" + +end + +(* should be fast *) +primcorec + g1_cot :: "int \ (int, 'a) cot" and + g1_cotcol :: "int \ (int, 'a) cot col" +where + "g1_cot n = T n undefined (g1_cotcol n)" | + "n = 0 \ g1_cotcol n = N" | + "_ \ g1_cotcol n = C (g1_cot n) (g1_cotcol n)" + +(* should be fast *) +primcorec + g2_cot :: "int \ (int, int) cot" and + g2_cotcol :: "int \ (int, int) cot col" +where + "g2_cot n = T n undefined (g2_cotcol n)" | + "n = 0 \ g2_cotcol n = N" | + "_ \ g2_cotcol n = C (g2_cot n) (g2_cotcol n)" + + +codatatype + 'a col1 = N1 | C1 'a "'a col2" and + 'a col2 = N2 | C2 'a "'a col1" + +codatatype + ('a, 'b) cot1 = T1 'a 'b "('a, 'b) cot1 col1" and + ('a, 'b) cot2 = T2 "('a, 'b) cot1" + +(* slow *) +primcorec + h1_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and + h1_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and + h1_cot1 :: "nat \ (nat, 'a) cot1" +where + "h1_cotcol1 n = C1 (h1_cot1 n) (h1_cotcol2 n)" | + "h1_cotcol2 n = C2 (h1_cot1 n) (h1_cotcol1 n)" | + "h1_cot1 n = T1 n undefined (h1_cotcol1 n)" + +(* should be fast *) +primcorec + h2_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and + h2_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and + h2_cot1 :: "nat \ (nat, 'a) cot1" +where + "h2_cotcol1 n = C1 (h2_cot1 n) (h2_cotcol2 n)" | + "h2_cotcol2 n = C2 (h2_cot1 n) (h2_cotcol1 n)" | + "h2_cot1 n = T1 n undefined (h2_cotcol1 n)" + +(* should be fast *) +primcorec + h3_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and + h3_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and + h3_cot1 :: "nat \ (nat, 'a) cot1" +where + "h3_cotcol1 n = C1 (h3_cot1 n) (h3_cotcol2 n)" | + "h3_cotcol2 n = C2 (h3_cot1 n) (h3_cotcol1 n)" | + "h3_cot1 n = T1 n undefined (h3_cotcol1 n)" + +(* should be fast *) +primcorec + i1_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and + i1_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and + i1_cot1 :: "nat \ (nat, 'a) cot1" and + i1_cot2 :: "nat \ (nat, 'a) cot2" +where + "i1_cotcol1 n = C1 (i1_cot1 n) (i1_cotcol2 n)" | + "i1_cotcol2 n = C2 (i1_cot1 n) (i1_cotcol1 n)" | + "i1_cot1 n = T1 n undefined (i1_cotcol1 n)" | + "i1_cot2 n = T2 (i1_cot1 n)" + +(* should be fast *) +primcorec + j1_cot2 :: "nat \ (nat, 'a) cot2" and + j1_cot1 :: "nat \ (nat, 'a) cot1" and + j1_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and + j1_cotcol2 :: "nat \ (nat, 'a) cot1 col2" +where + "j1_cotcol1 n = C1 (j1_cot1 n) (j1_cotcol2 n)" | + "j1_cotcol2 n = C2 (j1_cot1 n) (j1_cotcol1 n)" | + "j1_cot1 n = T1 n undefined (j1_cotcol1 n)" | + "j1_cot2 n = T2 (j1_cot1 n)" + + +codatatype 'a col3 = N3 | C3 'a "'a col3" +codatatype 'a col4 = N4 | C4 'a "'a col4" +codatatype ('a, 'b) cot3 = T3 'a 'b "('a, 'b) cot3 col3" "('a, 'b) cot3 col4" + +(* slow *) +primcorec + k1_cotcol3 :: "nat \ (nat, 'a) cot3 col3" and + k1_cotcol4 :: "nat \ (nat, 'a) cot3 col4" and + k1_cot3 :: "nat \ (nat, 'a) cot3" +where + "k1_cotcol3 n = C3 (k1_cot3 n) (k1_cotcol3 n)" | + "k1_cotcol4 n = C4 (k1_cot3 n) (k1_cotcol4 n)" | + "k1_cot3 n = T3 n undefined (k1_cotcol3 n) (k1_cotcol4 n)" + +(* should be fast *) +primcorec + k2_cotcol4 :: "nat \ (nat, 'a) cot3 col4" and + k2_cotcol3 :: "nat \ (nat, 'a) cot3 col3" and + k2_cot3 :: "nat \ (nat, 'a) cot3" +where + "k2_cotcol4 n = C4 (k2_cot3 n) (k2_cotcol4 n)" | + "k2_cotcol3 n = C3 (k2_cot3 n) (k2_cotcol3 n)" | + "k2_cot3 n = T3 n undefined (k2_cotcol3 n) (k2_cotcol4 n)" + +end diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Quickcheck_Benchmark/Find_Unused_Assms_Examples.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Quickcheck_Benchmark/Find_Unused_Assms_Examples.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,27 @@ +theory Find_Unused_Assms_Examples +imports Complex_Main +begin + +section \Arithmetics\ + +declare [[quickcheck_finite_types = false]] + +find_unused_assms Divides +find_unused_assms GCD +find_unused_assms Real + +section \Set Theory\ + +declare [[quickcheck_finite_types = true]] + +find_unused_assms Fun +find_unused_assms Relation +find_unused_assms Set +find_unused_assms Wellfounded + +section \Datatypes\ + +find_unused_assms List +find_unused_assms Map + +end diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_Base.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_Base.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,196 @@ +theory Needham_Schroeder_Base +imports Main "~~/src/HOL/Library/Predicate_Compile_Quickcheck" +begin + +datatype agent = Alice | Bob | Spy + +definition agents :: "agent set" +where + "agents = {Spy, Alice, Bob}" + +definition bad :: "agent set" +where + "bad = {Spy}" + +datatype key = pubEK agent | priEK agent + +fun invKey +where + "invKey (pubEK A) = priEK A" +| "invKey (priEK A) = pubEK A" + +datatype + msg = Agent agent + | Key key + | Nonce nat + | MPair msg msg + | Crypt key msg + +syntax + "_MTuple" :: "['a, args] => 'a * 'b" ("(2\_,/ _\)") +translations + "\x, y, z\" == "\x, \y, z\\" + "\x, y\" == "CONST MPair x y" + +inductive_set + parts :: "msg set => msg set" + for H :: "msg set" + where + Inj [intro]: "X \ H ==> X \ parts H" + | Fst: "\X,Y\ \ parts H ==> X \ parts H" + | Snd: "\X,Y\ \ parts H ==> Y \ parts H" + | Body: "Crypt K X \ parts H ==> X \ parts H" + +inductive_set + analz :: "msg set => msg set" + for H :: "msg set" + where + Inj [intro,simp] : "X \ H ==> X \ analz H" + | Fst: "\X,Y\ \ analz H ==> X \ analz H" + | Snd: "\X,Y\ \ analz H ==> Y \ analz H" + | Decrypt [dest]: + "[|Crypt K X \ analz H; Key(invKey K): analz H|] ==> X \ analz H" + +inductive_set + synth :: "msg set => msg set" + for H :: "msg set" + where + Inj [intro]: "X \ H ==> X \ synth H" + | Agent [intro]: "Agent agt \ synth H" + | MPair [intro]: "[|X \ synth H; Y \ synth H|] ==> \X,Y\ \ synth H" + | Crypt [intro]: "[|X \ synth H; Key(K) \ H|] ==> Crypt K X \ synth H" + +primrec initState +where + initState_Alice: + "initState Alice = {Key (priEK Alice)} \ (Key ` pubEK ` agents)" +| initState_Bob: + "initState Bob = {Key (priEK Bob)} \ (Key ` pubEK ` agents)" +| initState_Spy: + "initState Spy = (Key ` priEK ` bad) \ (Key ` pubEK ` agents)" + +datatype + event = Says agent agent msg + | Gets agent msg + | Notes agent msg + + +primrec knows :: "agent => event list => msg set" +where + knows_Nil: "knows A [] = initState A" +| knows_Cons: + "knows A (ev # evs) = + (if A = Spy then + (case ev of + Says A' B X => insert X (knows Spy evs) + | Gets A' X => knows Spy evs + | Notes A' X => + if A' \ bad then insert X (knows Spy evs) else knows Spy evs) + else + (case ev of + Says A' B X => + if A'=A then insert X (knows A evs) else knows A evs + | Gets A' X => + if A'=A then insert X (knows A evs) else knows A evs + | Notes A' X => + if A'=A then insert X (knows A evs) else knows A evs))" + +abbreviation (input) + spies :: "event list => msg set" where + "spies == knows Spy" + +primrec used :: "event list => msg set" +where + used_Nil: "used [] = \(parts ` initState ` agents)" +| used_Cons: "used (ev # evs) = + (case ev of + Says A B X => parts {X} \ used evs + | Gets A X => used evs + | Notes A X => parts {X} \ used evs)" + +subsection \Preparations for sets\ + +fun find_first :: "('a => 'b option) => 'a list => 'b option" +where + "find_first f [] = None" +| "find_first f (x # xs) = (case f x of Some y => Some y | None => find_first f xs)" + +consts cps_of_set :: "'a set => ('a => term list option) => term list option" + +lemma + [code]: "cps_of_set (set xs) f = find_first f xs" +sorry + +consts pos_cps_of_set :: "'a set => ('a => (bool * term list) option) => natural => (bool * term list) option" +consts neg_cps_of_set :: "'a set => ('a Quickcheck_Exhaustive.unknown => term list Quickcheck_Exhaustive.three_valued) => natural => term list Quickcheck_Exhaustive.three_valued" + +lemma + [code]: "pos_cps_of_set (set xs) f i = find_first f xs" +sorry + +consts find_first' :: "('b Quickcheck_Exhaustive.unknown => 'a Quickcheck_Exhaustive.three_valued) + => 'b list => 'a Quickcheck_Exhaustive.three_valued" + +lemma [code]: + "find_first' f [] = Quickcheck_Exhaustive.No_value" + "find_first' f (x # xs) = (case f (Quickcheck_Exhaustive.Known x) of Quickcheck_Exhaustive.No_value => find_first' f xs | Quickcheck_Exhaustive.Value x => Quickcheck_Exhaustive.Value x | Quickcheck_Exhaustive.Unknown_value => (case find_first' f xs of Quickcheck_Exhaustive.Value x => Quickcheck_Exhaustive.Value x | _ => Quickcheck_Exhaustive.Unknown_value))" +sorry + +lemma + [code]: "neg_cps_of_set (set xs) f i = find_first' f xs" +sorry + +setup \ +let + val Fun = Predicate_Compile_Aux.Fun + val Input = Predicate_Compile_Aux.Input + val Output = Predicate_Compile_Aux.Output + val Bool = Predicate_Compile_Aux.Bool + val oi = Fun (Output, Fun (Input, Bool)) + val ii = Fun (Input, Fun (Input, Bool)) + fun of_set compfuns (Type ("fun", [T, _])) = + case body_type (Predicate_Compile_Aux.mk_monadT compfuns T) of + Type ("Quickcheck_Exhaustive.three_valued", _) => + Const(@{const_name neg_cps_of_set}, HOLogic.mk_setT T --> (Predicate_Compile_Aux.mk_monadT compfuns T)) + | Type ("Predicate.pred", _) => Const(@{const_name pred_of_set}, HOLogic.mk_setT T --> Predicate_Compile_Aux.mk_monadT compfuns T) + | _ => Const(@{const_name pos_cps_of_set}, HOLogic.mk_setT T --> (Predicate_Compile_Aux.mk_monadT compfuns T)) + fun member compfuns (U as Type ("fun", [T, _])) = + (absdummy T (absdummy (HOLogic.mk_setT T) (Predicate_Compile_Aux.mk_if compfuns + (Const (@{const_name "Set.member"}, T --> HOLogic.mk_setT T --> @{typ bool}) $ Bound 1 $ Bound 0)))) + +in + Core_Data.force_modes_and_compilations @{const_name Set.member} + [(oi, (of_set, false)), (ii, (member, false))] +end +\ + +subsection \Derived equations for analz, parts and synth\ + +lemma [code]: + "analz H = (let + H' = H \ (\((%m. case m of \X, Y\ => {X, Y} | Crypt K X => if Key (invKey K) : H then {X} else {} | _ => {}) ` H)) + in if H' = H then H else analz H')" +sorry + +lemma [code]: + "parts H = (let + H' = H \ (\((%m. case m of \X, Y\ => {X, Y} | Crypt K X => {X} | _ => {}) ` H)) + in if H' = H then H else parts H')" +sorry + +definition synth' :: "msg set => msg => bool" +where + "synth' H m = (m : synth H)" + +lemmas [code_pred_intro] = synth.intros[folded synth'_def] + +code_pred [generator_cps] synth' unfolding synth'_def by (rule synth.cases) fastforce+ + +setup \Predicate_Compile_Data.ignore_consts [@{const_name analz}, @{const_name knows}]\ +declare ListMem_iff[symmetric, code_pred_inline] +declare [[quickcheck_timing]] + +setup Exhaustive_Generators.setup_exhaustive_datatype_interpretation +declare [[quickcheck_full_support = false]] + +end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_Guided_Attacker_Example.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_Guided_Attacker_Example.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,100 @@ +theory Needham_Schroeder_Guided_Attacker_Example +imports Needham_Schroeder_Base +begin + +inductive_set ns_public :: "event list set" + where + (*Initial trace is empty*) + Nil: "[] \ ns_public" + + | Fake_NS1: "\evs1 \ ns_public; Nonce NA \ analz (spies evs1) \ + \ Says Spy B (Crypt (pubEK B) \Nonce NA, Agent A\) + # evs1 \ ns_public" + | Fake_NS2: "\evs1 \ ns_public; Nonce NA \ analz (spies evs1); Nonce NB \ analz (spies evs1) \ + \ Says Spy A (Crypt (pubEK A) \Nonce NA, Nonce NB\) + # evs1 \ ns_public" + + (*Alice initiates a protocol run, sending a nonce to Bob*) + | NS1: "\evs1 \ ns_public; Nonce NA \ used evs1\ + \ Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) + # evs1 \ ns_public" + (*Bob responds to Alice's message with a further nonce*) + | NS2: "\evs2 \ ns_public; Nonce NB \ used evs2; + Says A' B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs2\ + \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) + # evs2 \ ns_public" + + (*Alice proves her existence by sending NB back to Bob.*) + | NS3: "\evs3 \ ns_public; + Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs3; + Says B' A (Crypt (pubEK A) \Nonce NA, Nonce NB\) \ set evs3\ + \ Says A B (Crypt (pubEK B) (Nonce NB)) # evs3 \ ns_public" + +declare ListMem_iff[symmetric, code_pred_inline] + +lemmas [code_pred_intro] = ns_publicp.intros[folded synth'_def] + +code_pred [skip_proof] ns_publicp unfolding synth'_def by (rule ns_publicp.cases) fastforce+ +thm ns_publicp.equation + +code_pred [generator_cps] ns_publicp . +thm ns_publicp.generator_cps_equation + + +lemma "ns_publicp evs ==> \ (Says Alice Bob (Crypt (pubEK Bob) (Nonce NB))) : set evs" +quickcheck[smart_exhaustive, depth = 5, timeout = 100, expect = counterexample] +(*quickcheck[narrowing, size = 6, timeout = 200, verbose, expect = no_counterexample]*) +oops + +lemma + "\ns_publicp evs\ + \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) : set evs + \ A \ Spy \ B \ Spy \ A \ B + \ Nonce NB \ analz (spies evs)" +(*quickcheck[smart_exhaustive, depth = 6, timeout = 100, expect = counterexample] +quickcheck[narrowing, size = 7, timeout = 200, expect = no_counterexample]*) +oops + +section \Proving the counterexample trace for validation\ + +lemma + assumes "A = Alice" "B = Bob" "C = Spy" "NA = 0" "NB = 1" + assumes "evs = + [Says Alice Spy (Crypt (pubEK Spy) (Nonce 1)), + Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\), + Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\), + Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\)]" (is "_ = [?e3, ?e2, ?e1, ?e0]") + shows "A \ Spy" "B \ Spy" "evs : ns_public" "Nonce NB : analz (knows Spy evs)" +proof - + from assms show "A \ Spy" by auto + from assms show "B \ Spy" by auto + have "[] : ns_public" by (rule Nil) + then have first_step: "[?e0] : ns_public" + proof (rule NS1) + show "Nonce 0 ~: used []" by eval + qed + then have "[?e1, ?e0] : ns_public" + proof (rule Fake_NS1) + show "Nonce 0 : analz (knows Spy [?e0])" by eval + qed + then have "[?e2, ?e1, ?e0] : ns_public" + proof (rule NS2) + show "Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\) \ set [?e1, ?e0]" by simp + show " Nonce 1 ~: used [?e1, ?e0]" by eval + qed + then show "evs : ns_public" + unfolding assms + proof (rule NS3) + show " Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\) \ set [?e2, ?e1, ?e0]" by simp + show "Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\) + : set [?e2, ?e1, ?e0]" by simp + qed + from assms show "Nonce NB : analz (knows Spy evs)" + apply simp + apply (rule analz.intros(4)) + apply (rule analz.intros(1)) + apply (auto simp add: bad_def) + done +qed + +end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_No_Attacker_Example.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_No_Attacker_Example.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,47 @@ +theory Needham_Schroeder_No_Attacker_Example +imports Needham_Schroeder_Base +begin + +inductive_set ns_public :: "event list set" + where + (*Initial trace is empty*) + Nil: "[] \ ns_public" + (*Alice initiates a protocol run, sending a nonce to Bob*) + | NS1: "\evs1 \ ns_public; Nonce NA \ used evs1\ + \ Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) + # evs1 \ ns_public" + (*Bob responds to Alice's message with a further nonce*) + | NS2: "\evs2 \ ns_public; Nonce NB \ used evs2; + Says A' B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs2\ + \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) + # evs2 \ ns_public" + + (*Alice proves her existence by sending NB back to Bob.*) + | NS3: "\evs3 \ ns_public; + Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs3; + Says B' A (Crypt (pubEK A) \Nonce NA, Nonce NB\) \ set evs3\ + \ Says A B (Crypt (pubEK B) (Nonce NB)) # evs3 \ ns_public" + +code_pred [skip_proof] ns_publicp . +thm ns_publicp.equation + +code_pred [generator_cps] ns_publicp . +thm ns_publicp.generator_cps_equation + +lemma "ns_publicp evs ==> \ (Says Alice Bob (Crypt (pubEK Bob) (Nonce NB))) : set evs" +(*quickcheck[random, iterations = 1000000, size = 20, timeout = 3600, verbose] +quickcheck[exhaustive, size = 8, timeout = 3600, verbose] +quickcheck[narrowing, size = 7, timeout = 3600, verbose]*) +quickcheck[smart_exhaustive, depth = 5, timeout = 30, expect = counterexample] +oops + +lemma + "\ns_publicp evs\ + \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) : set evs + \ A \ Spy \ B \ Spy \ A \ B + \ Nonce NB \ analz (spies evs)" +quickcheck[smart_exhaustive, depth = 6, timeout = 30] +quickcheck[narrowing, size = 6, timeout = 30, verbose] +oops + +end diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_Unguided_Attacker_Example.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Quickcheck_Benchmark/Needham_Schroeder_Unguided_Attacker_Example.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,96 @@ +theory Needham_Schroeder_Unguided_Attacker_Example +imports Needham_Schroeder_Base +begin + +inductive_set ns_public :: "event list set" + where + (*Initial trace is empty*) + Nil: "[] \ ns_public" + + | Fake: "\evs1 \ ns_public; X \ synth (analz (spies evs1)) \ + \ Says Spy A X # evs1 \ ns_public" + + (*Alice initiates a protocol run, sending a nonce to Bob*) + | NS1: "\evs1 \ ns_public; Nonce NA \ used evs1\ + \ Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) + # evs1 \ ns_public" + (*Bob responds to Alice's message with a further nonce*) + | NS2: "\evs2 \ ns_public; Nonce NB \ used evs2; + Says A' B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs2\ + \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) + # evs2 \ ns_public" + + (*Alice proves her existence by sending NB back to Bob.*) + | NS3: "\evs3 \ ns_public; + Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs3; + Says B' A (Crypt (pubEK A) \Nonce NA, Nonce NB\) \ set evs3\ + \ Says A B (Crypt (pubEK B) (Nonce NB)) # evs3 \ ns_public" + +declare ListMem_iff[symmetric, code_pred_inline] + +lemmas [code_pred_intro] = ns_publicp.intros[folded synth'_def] + +code_pred [skip_proof] ns_publicp unfolding synth'_def by (rule ns_publicp.cases) fastforce+ +thm ns_publicp.equation + +code_pred [generator_cps] ns_publicp . +thm ns_publicp.generator_cps_equation + + +lemma "ns_publicp evs ==> \ (Says Alice Bob (Crypt (pubEK Bob) (Nonce NB))) : set evs" +quickcheck[smart_exhaustive, depth = 5, timeout = 200, expect = counterexample] +(*quickcheck[narrowing, size = 6, timeout = 200, verbose, expect = no_counterexample]*) +oops + +lemma + "\ns_publicp evs\ + \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) : set evs + \ A \ Spy \ B \ Spy \ A \ B + \ Nonce NB \ analz (spies evs)" +quickcheck[smart_exhaustive, depth = 6, timeout = 100, expect = no_counterexample] +(*quickcheck[narrowing, size = 7, timeout = 200, expect = no_counterexample]*) +oops + +section \Proving the counterexample trace for validation\ + +lemma + assumes "A = Alice" "B = Bob" "C = Spy" "NA = 0" "NB = 1" + assumes "evs = + [Says Alice Spy (Crypt (pubEK Spy) (Nonce 1)), + Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\), + Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\), + Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\)]" (is "_ = [?e3, ?e2, ?e1, ?e0]") + shows "A \ Spy" "B \ Spy" "evs : ns_public" "Nonce NB : analz (knows Spy evs)" +proof - + from assms show "A \ Spy" by auto + from assms show "B \ Spy" by auto + have "[] : ns_public" by (rule Nil) + then have first_step: "[?e0] : ns_public" + proof (rule NS1) + show "Nonce 0 ~: used []" by eval + qed + then have "[?e1, ?e0] : ns_public" + proof (rule Fake) + show "Crypt (pubEK Bob) \Nonce 0, Agent Alice\ : synth (analz (knows Spy [?e0]))" + by (intro synth.intros(2,3,4,1)) eval+ + qed + then have "[?e2, ?e1, ?e0] : ns_public" + proof (rule NS2) + show "Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\) \ set [?e1, ?e0]" by simp + show " Nonce 1 ~: used [?e1, ?e0]" by eval + qed + then show "evs : ns_public" + unfolding assms + proof (rule NS3) + show " Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\) \ set [?e2, ?e1, ?e0]" by simp + show "Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\) : set [?e2, ?e1, ?e0]" by simp + qed + from assms show "Nonce NB : analz (knows Spy evs)" + apply simp + apply (rule analz.intros(4)) + apply (rule analz.intros(1)) + apply (auto simp add: bad_def) + done +qed + +end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/Benchmarks/ROOT --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/ROOT Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,26 @@ +chapter HOL + +session "HOL-Datatype_Benchmark" in Datatype_Benchmark = HOL + + description {* + Big (co)datatypes. + *} + options [document = false] + theories + Brackin + IsaFoR + Misc_N2M + +session "HOL-Quickcheck_Benchmark" in Quickcheck_Benchmark = HOL + + theories [quick_and_dirty] + Find_Unused_Assms_Examples + Needham_Schroeder_No_Attacker_Example + Needham_Schroeder_Guided_Attacker_Example + Needham_Schroeder_Unguided_Attacker_Example + +session "HOL-Record_Benchmark" in Record_Benchmark = HOL + + description {* + Big records. + *} + options [document = false] + theories + Record_Benchmark diff -r ec44535f954a -r a105bea3936f src/Benchmarks/Record_Benchmark/Record_Benchmark.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Benchmarks/Record_Benchmark/Record_Benchmark.thy Wed Feb 17 21:06:47 2016 +0100 @@ -0,0 +1,427 @@ +(* Title: Benchmarks/Record_Benchmark/Record_Benchmark.thy + Author: Norbert Schirmer, DFKI +*) + +section \Benchmark for large record\ + +theory Record_Benchmark +imports Main +begin + +declare [[record_timing]] + +record many_A = +A000::nat +A001::nat +A002::nat +A003::nat +A004::nat +A005::nat +A006::nat +A007::nat +A008::nat +A009::nat +A010::nat +A011::nat +A012::nat +A013::nat +A014::nat +A015::nat +A016::nat +A017::nat +A018::nat +A019::nat +A020::nat +A021::nat +A022::nat +A023::nat +A024::nat +A025::nat +A026::nat +A027::nat +A028::nat +A029::nat +A030::nat +A031::nat +A032::nat +A033::nat +A034::nat +A035::nat +A036::nat +A037::nat +A038::nat +A039::nat +A040::nat +A041::nat +A042::nat +A043::nat +A044::nat +A045::nat +A046::nat +A047::nat +A048::nat +A049::nat +A050::nat +A051::nat +A052::nat +A053::nat +A054::nat +A055::nat +A056::nat +A057::nat +A058::nat +A059::nat +A060::nat +A061::nat +A062::nat +A063::nat +A064::nat +A065::nat +A066::nat +A067::nat +A068::nat +A069::nat +A070::nat +A071::nat +A072::nat +A073::nat +A074::nat +A075::nat +A076::nat +A077::nat +A078::nat +A079::nat +A080::nat +A081::nat +A082::nat +A083::nat +A084::nat +A085::nat +A086::nat +A087::nat +A088::nat +A089::nat +A090::nat +A091::nat +A092::nat +A093::nat +A094::nat +A095::nat +A096::nat +A097::nat +A098::nat +A099::nat +A100::nat +A101::nat +A102::nat +A103::nat +A104::nat +A105::nat +A106::nat +A107::nat +A108::nat +A109::nat +A110::nat +A111::nat +A112::nat +A113::nat +A114::nat +A115::nat +A116::nat +A117::nat +A118::nat +A119::nat +A120::nat +A121::nat +A122::nat +A123::nat +A124::nat +A125::nat +A126::nat +A127::nat +A128::nat +A129::nat +A130::nat +A131::nat +A132::nat +A133::nat +A134::nat +A135::nat +A136::nat +A137::nat +A138::nat +A139::nat +A140::nat +A141::nat +A142::nat +A143::nat +A144::nat +A145::nat +A146::nat +A147::nat +A148::nat +A149::nat +A150::nat +A151::nat +A152::nat +A153::nat +A154::nat +A155::nat +A156::nat +A157::nat +A158::nat +A159::nat +A160::nat +A161::nat +A162::nat +A163::nat +A164::nat +A165::nat +A166::nat +A167::nat +A168::nat +A169::nat +A170::nat +A171::nat +A172::nat +A173::nat +A174::nat +A175::nat +A176::nat +A177::nat +A178::nat +A179::nat +A180::nat +A181::nat +A182::nat +A183::nat +A184::nat +A185::nat +A186::nat +A187::nat +A188::nat +A189::nat +A190::nat +A191::nat +A192::nat +A193::nat +A194::nat +A195::nat +A196::nat +A197::nat +A198::nat +A199::nat +A200::nat +A201::nat +A202::nat +A203::nat +A204::nat +A205::nat +A206::nat +A207::nat +A208::nat +A209::nat +A210::nat +A211::nat +A212::nat +A213::nat +A214::nat +A215::nat +A216::nat +A217::nat +A218::nat +A219::nat +A220::nat +A221::nat +A222::nat +A223::nat +A224::nat +A225::nat +A226::nat +A227::nat +A228::nat +A229::nat +A230::nat +A231::nat +A232::nat +A233::nat +A234::nat +A235::nat +A236::nat +A237::nat +A238::nat +A239::nat +A240::nat +A241::nat +A242::nat +A243::nat +A244::nat +A245::nat +A246::nat +A247::nat +A248::nat +A249::nat +A250::nat +A251::nat +A252::nat +A253::nat +A254::nat +A255::nat +A256::nat +A257::nat +A258::nat +A259::nat +A260::nat +A261::nat +A262::nat +A263::nat +A264::nat +A265::nat +A266::nat +A267::nat +A268::nat +A269::nat +A270::nat +A271::nat +A272::nat +A273::nat +A274::nat +A275::nat +A276::nat +A277::nat +A278::nat +A279::nat +A280::nat +A281::nat +A282::nat +A283::nat +A284::nat +A285::nat +A286::nat +A287::nat +A288::nat +A289::nat +A290::nat +A291::nat +A292::nat +A293::nat +A294::nat +A295::nat +A296::nat +A297::nat +A298::nat +A299::nat + +record many_B = many_A + +B000::nat +B001::nat +B002::nat +B003::nat +B004::nat +B005::nat +B006::nat +B007::nat +B008::nat +B009::nat +B010::nat +B011::nat +B012::nat +B013::nat +B014::nat +B015::nat +B016::nat +B017::nat +B018::nat +B019::nat +B020::nat +B021::nat +B022::nat +B023::nat +B024::nat +B025::nat +B026::nat +B027::nat +B028::nat +B029::nat +B030::nat + +lemma "A155 (r\A255:=x\) = A155 r" + by simp + +lemma "A155 (r\A255:=x,A253:=y,A255:=z \) = A155 r" + by simp + +lemma "(r\A255:=x,A253:=y,A255:=z \) = r\A253:=y,A255:=z\" + by simp + +lemma "(r\A255:=x,A253:=y,A255:=z \) = r\A253:=y,A255:=z\" + apply (tactic \simp_tac + (put_simpset HOL_basic_ss @{context} addsimprocs [Record.upd_simproc]) 1\) + done + +lemma "(\r. P (A155 r)) \ (\x. P x)" + apply (tactic \simp_tac + (put_simpset HOL_basic_ss @{context} addsimprocs [Record.split_simproc (K ~1)]) 1\) + apply simp + done + +lemma "(\r. P (A155 r)) \ (\x. P x)" + apply (tactic \Record.split_simp_tac @{context} [] (K ~1) 1\) + apply simp + done + +lemma "(\r. P (A155 r)) \ (\x. P x)" + apply (tactic \simp_tac + (put_simpset HOL_basic_ss @{context} addsimprocs [Record.split_simproc (K ~1)]) 1\) + apply simp + done + +lemma "(\r. P (A155 r)) \ (\x. P x)" + apply (tactic \Record.split_simp_tac @{context} [] (K ~1) 1\) + apply simp + done + +lemma "\r. P (A155 r) \ (\x. P x)" + apply (tactic \simp_tac + (put_simpset HOL_basic_ss @{context} addsimprocs [Record.split_simproc (K ~1)]) 1\) + apply auto + done + +lemma "\r. P (A155 r) \ (\x. P x)" + apply (tactic \Record.split_simp_tac @{context} [] (K ~1) 1\) + apply auto + done + +lemma "P (A155 r) \ (\x. P x)" + apply (tactic \Record.split_simp_tac @{context} [] (K ~1) 1\) + apply auto + done + +lemma fixes r shows "P (A155 r) \ (\x. P x)" + apply (tactic \Record.split_simp_tac @{context} [] (K ~1) 1\) + apply auto + done + + +notepad +begin + fix P r + assume "P (A155 r)" + then have "\x. P x" + apply - + apply (tactic \Record.split_simp_tac @{context} [] (K ~1) 1\) + apply auto + done +end + + +lemma "\r. A155 r = x" + apply (tactic \simp_tac + (put_simpset HOL_basic_ss @{context} addsimprocs [Record.ex_sel_eq_simproc]) 1\) + done + +print_record many_A + +print_record many_B + +end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/Doc/Isar_Ref/Base.thy --- a/src/Doc/Isar_Ref/Base.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/Isar_Ref/Base.thy Wed Feb 17 21:06:47 2016 +0100 @@ -2,14 +2,8 @@ theory Base imports Pure -keywords "\" :: "qed" % "proof" begin -ML \ - Outer_Syntax.command @{command_keyword "\"} "dummy proof" - (Scan.succeed Isar_Cmd.skip_proof); -\ - ML_file "../antiquote_setup.ML" end diff -r ec44535f954a -r a105bea3936f src/Doc/Isar_Ref/document/style.sty --- a/src/Doc/Isar_Ref/document/style.sty Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/Isar_Ref/document/style.sty Wed Feb 17 21:06:47 2016 +0100 @@ -18,9 +18,6 @@ \newcommand{\isasymBBAR}{{\,\newdimen{\tmpheight}\settoheight\tmpheight{\isacharbar}\rule{1pt}{\tmpheight}\,}} \renewcommand{\isacommand}[1]{\isakeyword{\isadigitreset#1}} -\newcommand{\isasymproof}{\isamath{\,\langle\mathit{proof}\rangle}} - - %% ML \newenvironment{mldecls}{\par\noindent\begingroup\def\isanewline{\\}\begin{tabular}{ll}}{\end{tabular}\medskip\endgroup} diff -r ec44535f954a -r a105bea3936f src/Doc/Prog_Prove/Basics.thy --- a/src/Doc/Prog_Prove/Basics.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/Prog_Prove/Basics.thy Wed Feb 17 21:06:47 2016 +0100 @@ -140,13 +140,15 @@ message, it refers to the HOL syntax as the \concept{inner syntax} and the enclosing theory language as the \concept{outer syntax}. +\ifsem\else \subsection{Proof State} \begin{warn} -By default Isabelle/jEdit does not show the proof state -in the output window. You should enable this by ticking the -``Proof state'' box. +By default Isabelle/jEdit does not show the proof state but this tutorial +refers to it frequently. You should tick the ``Proof state'' box +to see the proof state in the output window. \end{warn} +\fi *} (*<*) end diff -r ec44535f954a -r a105bea3936f src/Doc/Prog_Prove/Bool_nat_list.thy --- a/src/Doc/Prog_Prove/Bool_nat_list.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/Prog_Prove/Bool_nat_list.thy Wed Feb 17 21:06:47 2016 +0100 @@ -58,7 +58,8 @@ need to be established by induction in most cases. Command \isacom{apply}@{text"(induction m)"} instructs Isabelle to start a proof by induction on @{text m}. In response, it will show the -following proof state: +following proof state\ifsem\footnote{See page \pageref{proof-state} for how to +display the proof state.}\fi: @{subgoals[display,indent=0]} The numbered lines are known as \emph{subgoals}. The first subgoal is the base case, the second one the induction step. diff -r ec44535f954a -r a105bea3936f src/Doc/Prog_Prove/Logic.thy --- a/src/Doc/Prog_Prove/Logic.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/Prog_Prove/Logic.thy Wed Feb 17 21:06:47 2016 +0100 @@ -134,7 +134,7 @@ \begin{tabular}{l@ {\quad}l} @{const_typ set}\index{set@@{const set}} & converts a list to the set of its elements\\ @{const_typ finite}\index{finite@@{const finite}} & is true iff its argument is finite\\ -@{const_typ card}\index{card@@{const card}} & is the cardinality of a finite set\\ +\noquotes{@{term[source] "card :: 'a set \ nat"}}\index{card@@{const card}} & is the cardinality of a finite set\\ & and is @{text 0} for all infinite sets\\ @{thm image_def}\index{$IMP042@@{term"f ` A"}} & is the image of a function over a set \end{tabular} diff -r ec44535f954a -r a105bea3936f src/Doc/Prog_Prove/document/intro-isabelle.tex --- a/src/Doc/Prog_Prove/document/intro-isabelle.tex Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/Prog_Prove/document/intro-isabelle.tex Wed Feb 17 21:06:47 2016 +0100 @@ -55,10 +55,11 @@ \subsection*{Getting Started with Isabelle} If you have not done so already, download and install Isabelle +(this book is compatible with Isabelle2016) from \url{http://isabelle.in.tum.de}. You can start it by clicking on the application icon. This will launch Isabelle's -user interface based on the text editor \concept{jedit}. Below you see -a typical example snapshot of a jedit session. At this point we merely explain +user interface based on the text editor \concept{jEdit}. Below you see +a typical example snapshot of a session. At this point we merely explain the layout of the window, not its contents. \begin{center} @@ -71,7 +72,14 @@ lower part of the window. You can examine the response to any input phrase by clicking on that phrase or by hovering over underlined text. -This should suffice to get started with the jedit interface. +\begin{warn}\label{proof-state} +Part I frequently refers to the proof state. +You can see the proof state combined with other system output if you +press the ``Output'' button to open the output area and tick the +``Proof state'' box to see the proof state in the output area. +\end{warn} + +This should suffice to get started with the jEdit interface. Now you need to learn what to type into it. \else If you want to apply what you have learned about Isabelle we recommend you diff -r ec44535f954a -r a105bea3936f src/Doc/System/Sessions.thy --- a/src/Doc/System/Sessions.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Doc/System/Sessions.thy Wed Feb 17 21:06:47 2016 +0100 @@ -198,10 +198,6 @@ the subsequent theories to be processed. Conditions are considered ``true'' if the corresponding environment value is defined and non-empty. - For example, the \<^verbatim>\condition=ISABELLE_FULL_TEST\ may be used to guard - extraordinary theories, which are meant to be enabled explicitly via some - shell prefix \<^verbatim>\env ISABELLE_FULL_TEST=true\ before invoking @{tool build}. - \<^item> @{system_option_def "timeout"} and @{system_option_def "timeout_scale"} specify a real wall-clock timeout for the session as a whole: the two values are multiplied and taken as the number of seconds. Typically, diff -r ec44535f954a -r a105bea3936f src/HOL/Datatype_Examples/Brackin.thy --- a/src/HOL/Datatype_Examples/Brackin.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,41 +0,0 @@ -(* Title: HOL/Datatype_Examples/Brackin.thy - -A couple of datatypes from Steve Brackin's work. -*) - -theory Brackin imports Main begin - -datatype T = - X1 | X2 | X3 | X4 | X5 | X6 | X7 | X8 | X9 | X10 | X11 | - X12 | X13 | X14 | X15 | X16 | X17 | X18 | X19 | X20 | X21 | - X22 | X23 | X24 | X25 | X26 | X27 | X28 | X29 | X30 | X31 | - X32 | X33 | X34 - -datatype ('a, 'b, 'c) TY1 = - NoF - | Fk 'a "('a, 'b, 'c) TY2" -and ('a, 'b, 'c) TY2 = - Ta bool - | Td bool - | Tf "('a, 'b, 'c) TY1" - | Tk bool - | Tp bool - | App 'a "('a, 'b, 'c) TY1" "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY3" - | Pair "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY2" -and ('a, 'b, 'c) TY3 = - NoS - | Fresh "('a, 'b, 'c) TY2" - | Trustworthy 'a - | PrivateKey 'a 'b 'c - | PublicKey 'a 'b 'c - | Conveyed 'a "('a, 'b, 'c) TY2" - | Possesses 'a "('a, 'b, 'c) TY2" - | Received 'a "('a, 'b, 'c) TY2" - | Recognizes 'a "('a, 'b, 'c) TY2" - | NeverMalFromSelf 'a 'b "('a, 'b, 'c) TY2" - | Sends 'a "('a, 'b, 'c) TY2" 'b - | SharedSecret 'a "('a, 'b, 'c) TY2" 'b - | Believes 'a "('a, 'b, 'c) TY3" - | And "('a, 'b, 'c) TY3" "('a, 'b, 'c) TY3" - -end diff -r ec44535f954a -r a105bea3936f src/HOL/Datatype_Examples/IsaFoR.thy --- a/src/HOL/Datatype_Examples/IsaFoR.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,380 +0,0 @@ -(* Title: HOL/Datatype_Examples/IsaFoR.thy - Author: Rene Thiemann, UIBK - Copyright 2014 - -Benchmark consisting of datatypes defined in IsaFoR. -*) - -section {* Benchmark Consisting of Datatypes Defined in IsaFoR *} - -theory IsaFoR -imports Real -begin - -datatype (discs_sels) ('f, 'l) lab = - Lab "('f, 'l) lab" 'l - | FunLab "('f, 'l) lab" "('f, 'l) lab list" - | UnLab 'f - | Sharp "('f, 'l) lab" - -datatype (discs_sels) 'f projL = Projection "(('f \ nat) \ nat) list" - -datatype (discs_sels) ('f, 'v) "term" = Var 'v | Fun 'f "('f, 'v) term list" -datatype (discs_sels) ('f, 'v) ctxt = - Hole ("\") - | More 'f "('f, 'v) term list" "('f, 'v) ctxt" "('f, 'v) term list" - -type_synonym ('f, 'v) rule = "('f, 'v) term \ ('f, 'v) term" -type_synonym ('f, 'v) trs = "('f, 'v) rule set" - -type_synonym ('f, 'v) rules = "('f, 'v) rule list" -type_synonym ('f, 'l, 'v) ruleLL = "(('f, 'l) lab, 'v) rule" -type_synonym ('f, 'l, 'v) trsLL = "(('f, 'l) lab, 'v) rules" -type_synonym ('f, 'l, 'v) termsLL = "(('f, 'l) lab, 'v) term list" - -datatype (discs_sels) pos = Empty ("\") | PCons "nat" "pos" (infixr "<#" 70) - -type_synonym ('f, 'v) prseq = "(pos \ ('f, 'v) rule \ bool \ ('f, 'v) term) list" -type_synonym ('f, 'v) rseq = "(pos \ ('f, 'v) rule \ ('f, 'v) term) list" - -type_synonym ('f, 'l, 'v) rseqL = "((('f, 'l) lab, 'v) rule \ (('f, 'l) lab, 'v) rseq) list" -type_synonym ('f, 'l, 'v) dppLL = - "bool \ bool \ ('f, 'l, 'v) trsLL \ ('f, 'l, 'v) trsLL \ - ('f, 'l, 'v) termsLL \ - ('f, 'l, 'v) trsLL \ ('f, 'l, 'v) trsLL" - -type_synonym ('f, 'l, 'v) qreltrsLL = - "bool \ ('f, 'l, 'v) termsLL \ ('f, 'l, 'v) trsLL \ ('f, 'l, 'v) trsLL" - -type_synonym ('f, 'l, 'v) qtrsLL = - "bool \ ('f, 'l, 'v) termsLL \ ('f, 'l, 'v) trsLL" - -datatype (discs_sels) location = H | A | B | R - -type_synonym ('f, 'v) forb_pattern = "('f, 'v) ctxt \ ('f, 'v) term \ location" -type_synonym ('f, 'v) forb_patterns = "('f, 'v) forb_pattern set" - -type_synonym ('f, 'l, 'v) fptrsLL = - "(('f, 'l) lab, 'v) forb_pattern list \ ('f, 'l, 'v) trsLL" - -type_synonym ('f, 'l, 'v) prob = "('f, 'l, 'v) qreltrsLL + ('f, 'l, 'v) dppLL" - -type_synonym ('f, 'a) lpoly_inter = "'f \ nat \ ('a \ 'a list)" -type_synonym ('f, 'a) lpoly_interL = "(('f \ nat) \ ('a \ 'a list)) list" - -type_synonym 'v monom = "('v \ nat) list" -type_synonym ('v, 'a) poly = "('v monom \ 'a) list" -type_synonym ('f, 'a) poly_inter_list = "(('f \ nat) \ (nat, 'a) poly) list" -type_synonym 'a vec = "'a list" -type_synonym 'a mat = "'a vec list" - -datatype (discs_sels) arctic = MinInfty | Num_arc int -datatype (discs_sels) 'a arctic_delta = MinInfty_delta | Num_arc_delta 'a -datatype (discs_sels) order_tag = Lex | Mul - -type_synonym 'f status_prec_repr = "(('f \ nat) \ (nat \ order_tag)) list" - -datatype (discs_sels) af_entry = - Collapse nat - | AFList "nat list" - -type_synonym 'f afs_list = "(('f \ nat) \ af_entry) list" -type_synonym 'f prec_weight_repr = "(('f \ nat) \ (nat \ nat \ (nat list option))) list \ nat" - -datatype (discs_sels) 'f redtriple_impl = - Int_carrier "('f, int) lpoly_interL" - | Int_nl_carrier "('f, int) poly_inter_list" - | Rat_carrier "('f, rat) lpoly_interL" - | Rat_nl_carrier rat "('f, rat) poly_inter_list" - | Real_carrier "('f, real) lpoly_interL" - | Real_nl_carrier real "('f, real) poly_inter_list" - | Arctic_carrier "('f, arctic) lpoly_interL" - | Arctic_rat_carrier "('f, rat arctic_delta) lpoly_interL" - | Int_mat_carrier nat nat "('f, int mat) lpoly_interL" - | Rat_mat_carrier nat nat "('f, rat mat) lpoly_interL" - | Real_mat_carrier nat nat "('f, real mat) lpoly_interL" - | Arctic_mat_carrier nat "('f, arctic mat) lpoly_interL" - | Arctic_rat_mat_carrier nat "('f, rat arctic_delta mat) lpoly_interL" - | RPO "'f status_prec_repr" "'f afs_list" - | KBO "'f prec_weight_repr" "'f afs_list" - -datatype (discs_sels) list_order_type = MS_Ext | Max_Ext | Min_Ext | Dms_Ext -type_synonym 'f scnp_af = "(('f \ nat) \ (nat \ nat) list) list" - -datatype (discs_sels) 'f root_redtriple_impl = SCNP list_order_type "'f scnp_af" "'f redtriple_impl" - -type_synonym 'f sig_map_list = "(('f \ nat) \ 'f list) list" -type_synonym ('f, 'v) uncurry_info = "'f \ 'f sig_map_list \ ('f, 'v) rules \ ('f, 'v) rules" - -datatype (discs_sels) arithFun = - Arg nat - | Const nat - | Sum "arithFun list" - | Max "arithFun list" - | Min "arithFun list" - | Prod "arithFun list" - | IfEqual arithFun arithFun arithFun arithFun - -datatype (discs_sels) 'f sl_inter = SL_Inter nat "(('f \ nat) \ arithFun) list" -datatype (discs_sels) ('f, 'v) sl_variant = - Rootlab "('f \ nat) option" - | Finitelab "'f sl_inter" - | QuasiFinitelab "'f sl_inter" 'v - -type_synonym ('f, 'v) crit_pair_joins = "(('f, 'v) term \ ('f, 'v) rseq \ ('f, 'v) term \ ('f, 'v) rseq) list" - -datatype (discs_sels) 'f join_info = Guided "('f, string) crit_pair_joins" | Join_NF | Join_BFS nat - -type_synonym unknown_info = string - -type_synonym dummy_prf = unit - -datatype (discs_sels) ('f, 'v) complex_constant_removal_prf = Complex_Constant_Removal_Proof - "('f, 'v) term" - "(('f, 'v) rule \ ('f, 'v) rule) list" - -datatype (discs_sels) ('f, 'v) cond_constraint = - CC_cond bool "('f, 'v) rule" - | CC_rewr "('f, 'v) term" "('f, 'v) term" - | CC_impl "('f, 'v) cond_constraint list" "('f, 'v) cond_constraint" - | CC_all 'v "('f, 'v) cond_constraint" - -type_synonym ('f, 'v, 'w) gsubstL = "('v \ ('f, 'w) term) list" -type_synonym ('f, 'v) substL = "('f, 'v, 'v) gsubstL" - -datatype (discs_sels) ('f, 'v) cond_constraint_prf = - Final - | Delete_Condition "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" - | Different_Constructor "('f, 'v) cond_constraint" - | Same_Constructor "('f, 'v) cond_constraint" "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" - | Variable_Equation 'v "('f, 'v) term" "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" - | Funarg_Into_Var "('f, 'v) cond_constraint" nat 'v "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" - | Simplify_Condition "('f, 'v) cond_constraint" "('f, 'v) substL" "('f, 'v) cond_constraint" "('f, 'v) cond_constraint_prf" - | Induction "('f, 'v) cond_constraint" "('f, 'v) cond_constraint list" "(('f, 'v) rule \ (('f, 'v) term \ 'v list) list \ ('f, 'v) cond_constraint \ ('f, 'v) cond_constraint_prf) list" - -datatype (discs_sels) ('f, 'v) cond_red_pair_prf = - Cond_Red_Pair_Prf - 'f "(('f, 'v) cond_constraint \ ('f, 'v) rules \ ('f, 'v) cond_constraint_prf) list" nat nat - -datatype (discs_sels) ('q, 'f) ta_rule = TA_rule 'f "'q list" 'q ("_ _ \ _") -datatype (discs_sels) ('q, 'f) tree_automaton = Tree_Automaton "'q list" "('q, 'f) ta_rule list" "('q \ 'q) list" -datatype (discs_sels) 'q ta_relation = - Decision_Proc - | Id_Relation - | Some_Relation "('q \ 'q) list" - -datatype (discs_sels) boundstype = Roof | Match -datatype (discs_sels) ('f, 'q) bounds_info = Bounds_Info boundstype nat "'q list" "('q, 'f \ nat) tree_automaton" "'q ta_relation" - -datatype (discs_sels) ('f, 'v) pat_eqv_prf = - Pat_Dom_Renaming "('f, 'v) substL" - | Pat_Irrelevant "('f, 'v) substL" "('f, 'v) substL" - | Pat_Simplify "('f, 'v) substL" "('f, 'v) substL" - -datatype (discs_sels) pat_rule_pos = Pat_Base | Pat_Pump | Pat_Close - -datatype (discs_sels) ('f, 'v) pat_rule_prf = - Pat_OrigRule "('f, 'v) rule" bool - | Pat_InitPump "('f, 'v) pat_rule_prf" "('f, 'v) substL" "('f, 'v) substL" - | Pat_InitPumpCtxt "('f, 'v) pat_rule_prf" "('f, 'v) substL" pos 'v - | Pat_Equiv "('f, 'v) pat_rule_prf" bool "('f, 'v) pat_eqv_prf" - | Pat_Narrow "('f, 'v) pat_rule_prf" "('f, 'v) pat_rule_prf" pos - | Pat_Inst "('f, 'v) pat_rule_prf" "('f, 'v) substL" pat_rule_pos - | Pat_Rewr "('f, 'v) pat_rule_prf" "('f, 'v) term \ ('f, 'v) rseq" pat_rule_pos 'v - | Pat_Exp_Sigma "('f, 'v) pat_rule_prf" nat - -datatype (discs_sels) ('f, 'v) non_loop_prf = - Non_Loop_Prf "('f, 'v) pat_rule_prf" "('f, 'v) substL" "('f, 'v) substL" nat nat pos - -datatype (discs_sels) ('f, 'l, 'v) problem = - SN_TRS "('f, 'l, 'v) qreltrsLL" - | SN_FP_TRS "('f, 'l, 'v) fptrsLL" - | Finite_DPP "('f, 'l, 'v) dppLL" - | Unknown_Problem unknown_info - | Not_SN_TRS "('f, 'l, 'v) qtrsLL" - | Not_RelSN_TRS "('f, 'l, 'v) qreltrsLL" - | Infinite_DPP "('f, 'l, 'v) dppLL" - | Not_SN_FP_TRS "('f, 'l, 'v) fptrsLL" - -declare [[bnf_timing]] - -datatype (discs_sels) ('f, 'l, 'v, 'a, 'b, 'c, 'd, 'e) generic_assm_proof = - SN_assm_proof "('f, 'l, 'v) qreltrsLL" 'a - | Finite_assm_proof "('f, 'l, 'v) dppLL" 'b - | SN_FP_assm_proof "('f, 'l, 'v) fptrsLL" 'c - | Not_SN_assm_proof "('f, 'l, 'v) qtrsLL" 'a - | Infinite_assm_proof "('f, 'l, 'v) dppLL" 'b - | Not_RelSN_assm_proof "('f, 'l, 'v) qreltrsLL" 'c - | Not_SN_FP_assm_proof "('f, 'l, 'v) fptrsLL" 'd - | Unknown_assm_proof unknown_info 'e - -type_synonym ('f, 'l, 'v, 'a, 'b, 'c, 'd) assm_proof = "('f, 'l, 'v, 'a, 'b, 'c, dummy_prf, 'd) generic_assm_proof" - -datatype (discs_sels) ('f, 'l, 'v) assm = - SN_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) qreltrsLL" - | SN_FP_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) fptrsLL" - | Finite_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) dppLL" - | Unknown_assm "('f, 'l, 'v) problem list" unknown_info - | Not_SN_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) qtrsLL" - | Not_RelSN_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) qreltrsLL" - | Not_SN_FP_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) fptrsLL" - | Infinite_assm "('f, 'l, 'v) problem list" "('f, 'l, 'v) dppLL" - -fun satisfied :: "('f, 'l, 'v) problem \ bool" where - "satisfied (SN_TRS t) = (t = t)" -| "satisfied (SN_FP_TRS t) = (t \ t)" -| "satisfied (Finite_DPP d) = (d \ d)" -| "satisfied (Unknown_Problem s) = (s = ''foo'')" -| "satisfied (Not_SN_TRS (nfs, q, r)) = (length q = length r)" -| "satisfied (Not_RelSN_TRS (nfs, q, r, rw)) = (r = rw)" -| "satisfied (Infinite_DPP d) = (d = d)" -| "satisfied (Not_SN_FP_TRS t) = (t = t)" - -fun collect_assms :: "('tp \ ('f, 'l, 'v) assm list) - \ ('dpp \ ('f, 'l, 'v) assm list) - \ ('fptp \ ('f, 'l, 'v) assm list) - \ ('unk \ ('f, 'l, 'v) assm list) - \ ('f, 'l, 'v, 'tp, 'dpp, 'fptp, 'unk) assm_proof \ ('f, 'l, 'v) assm list" where - "collect_assms tp dpp fptp unk (SN_assm_proof t prf) = tp prf" -| "collect_assms tp dpp fptp unk (SN_FP_assm_proof t prf) = fptp prf" -| "collect_assms tp dpp fptp unk (Finite_assm_proof d prf) = dpp prf" -| "collect_assms tp dpp fptp unk (Unknown_assm_proof p prf) = unk prf" -| "collect_assms _ _ _ _ _ = []" - -fun collect_neg_assms :: "('tp \ ('f, 'l, 'v) assm list) - \ ('dpp \ ('f, 'l, 'v) assm list) - \ ('rtp \ ('f, 'l, 'v) assm list) - \ ('fptp \ ('f, 'l, 'v) assm list) - \ ('unk \ ('f, 'l, 'v) assm list) - \ ('f, 'l, 'v, 'tp, 'dpp, 'rtp, 'fptp, 'unk) generic_assm_proof \ ('f, 'l, 'v) assm list" where - "collect_neg_assms tp dpp rtp fptp unk (Not_SN_assm_proof t prf) = tp prf" -| "collect_neg_assms tp dpp rtp fptp unk (Infinite_assm_proof d prf) = dpp prf" -| "collect_neg_assms tp dpp rtp fptp unk (Not_RelSN_assm_proof t prf) = rtp prf" -| "collect_neg_assms tp dpp rtp fptp unk (Not_SN_FP_assm_proof t prf) = fptp prf" -| "collect_neg_assms tp dpp rtp fptp unk (Unknown_assm_proof p prf) = unk prf" -| "collect_neg_assms tp dpp rtp fptp unk _ = []" - -datatype (discs_sels) ('f, 'l, 'v) dp_nontermination_proof = - DP_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) prseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" - | DP_Nonloop "(('f, 'l) lab, 'v) non_loop_prf" - | DP_Rule_Removal "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) dp_nontermination_proof" - | DP_Q_Increase "('f, 'l, 'v) termsLL" "('f, 'l, 'v) dp_nontermination_proof" - | DP_Q_Reduction "('f, 'l, 'v) termsLL" "('f, 'l, 'v) dp_nontermination_proof" - | DP_Termination_Switch "('f, 'l) lab join_info" "('f, 'l, 'v) dp_nontermination_proof" - | DP_Instantiation "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_nontermination_proof" - | DP_Rewriting "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" "(('f, 'l) lab, 'v) rule" pos "('f, 'l, 'v) dp_nontermination_proof" - | DP_Narrowing "('f, 'l, 'v) ruleLL" pos "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_nontermination_proof" - | DP_Assume_Infinite "('f, 'l, 'v) dppLL" - "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, - ('f, 'l, 'v) dp_nontermination_proof, - ('f, 'l, 'v) reltrs_nontermination_proof, - ('f, 'l, 'v) fp_nontermination_proof, - ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" -and ('f, 'l, 'v) "trs_nontermination_proof" = - TRS_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) rseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" - | TRS_Not_Well_Formed - | TRS_Rule_Removal "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_nontermination_proof" - | TRS_String_Reversal "('f, 'l, 'v) trs_nontermination_proof" - | TRS_DP_Trans "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_nontermination_proof" - | TRS_Nonloop "(('f, 'l) lab, 'v) non_loop_prf" - | TRS_Q_Increase "('f, 'l, 'v) termsLL" "('f, 'l, 'v) trs_nontermination_proof" - | TRS_Assume_Not_SN "('f, 'l, 'v) qtrsLL" - "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, - ('f, 'l, 'v) dp_nontermination_proof, - ('f, 'l, 'v) reltrs_nontermination_proof, - ('f, 'l, 'v) fp_nontermination_proof, - ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" -and ('f, 'l, 'v)"reltrs_nontermination_proof" = - Rel_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) prseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" - | Rel_Not_Well_Formed - | Rel_Rule_Removal "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) reltrs_nontermination_proof" - | Rel_R_Not_SN "('f, 'l, 'v) trs_nontermination_proof" - | Rel_TRS_Assume_Not_SN "('f, 'l, 'v) qreltrsLL" - "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, - ('f, 'l, 'v) dp_nontermination_proof, - ('f, 'l, 'v) reltrs_nontermination_proof, - ('f, 'l, 'v) fp_nontermination_proof, - ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" -and ('f, 'l, 'v) "fp_nontermination_proof" = - FPTRS_Loop "(('f, 'l) lab, 'v) term" "(('f, 'l) lab, 'v) rseq" "(('f, 'l) lab, 'v) substL" "(('f, 'l) lab, 'v) ctxt" - | FPTRS_Rule_Removal "('f, 'l, 'v) trsLL" "('f, 'l, 'v) fp_nontermination_proof" - | FPTRS_Assume_Not_SN "('f, 'l, 'v) fptrsLL" - "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, - ('f, 'l, 'v) dp_nontermination_proof, - ('f, 'l, 'v) reltrs_nontermination_proof, - ('f, 'l, 'v) fp_nontermination_proof, - ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" -and ('f, 'l, 'v) neg_unknown_proof = - Assume_NT_Unknown unknown_info - "('f, 'l, 'v, ('f, 'l, 'v) trs_nontermination_proof, - ('f, 'l, 'v) dp_nontermination_proof, - ('f, 'l, 'v) reltrs_nontermination_proof, - ('f, 'l, 'v) fp_nontermination_proof, - ('f, 'l, 'v) neg_unknown_proof) generic_assm_proof list" - -datatype (discs_sels) ('f, 'l, 'v) dp_termination_proof = - P_is_Empty - | Subterm_Criterion_Proc "('f, 'l) lab projL" "('f, 'l, 'v) rseqL" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Redpair_Proc "('f, 'l) lab root_redtriple_impl + ('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Redpair_UR_Proc "('f, 'l) lab root_redtriple_impl + ('f, 'l) lab redtriple_impl" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Usable_Rules_Proc "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Dep_Graph_Proc "(('f, 'l, 'v) dp_termination_proof option \ ('f, 'l, 'v) trsLL) list" - | Mono_Redpair_Proc "('f, 'l) lab redtriple_impl" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Mono_Redpair_UR_Proc "('f, 'l) lab redtriple_impl" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Size_Change_Subterm_Proc "((('f, 'l) lab, 'v) rule \ ((nat \ nat) list \ (nat \ nat) list)) list" - | Size_Change_Redpair_Proc "('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL option" - "((('f, 'l) lab, 'v) rule \ ((nat \ nat) list \ (nat \ nat) list)) list" - | Uncurry_Proc "nat option" "(('f, 'l) lab, 'v) uncurry_info" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Fcc_Proc "('f, 'l) lab" "(('f, 'l) lab, 'v) ctxt list" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Split_Proc - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" - "('f, 'l, 'v) dp_termination_proof" "('f, 'l, 'v) dp_termination_proof" - | Semlab_Proc - "(('f, 'l) lab, 'v) sl_variant" "('f, 'l, 'v) trsLL" - "(('f, 'l) lab, 'v) term list" "('f, 'l, 'v) trsLL" - "('f, 'l, 'v) dp_termination_proof" - | Switch_Innermost_Proc "('f, 'l) lab join_info" "('f, 'l, 'v) dp_termination_proof" - | Rewriting_Proc - "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" - "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) ruleLL" pos "('f, 'l, 'v) dp_termination_proof" - | Instantiation_Proc "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Forward_Instantiation_Proc - "('f, 'l, 'v) ruleLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL option" "('f, 'l, 'v) dp_termination_proof" - | Narrowing_Proc "('f, 'l, 'v) ruleLL" pos "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Assume_Finite - "('f, 'l, 'v) dppLL" "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" - | Unlab_Proc "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) dp_termination_proof" - | Q_Reduction_Proc "('f, 'l, 'v) termsLL" "('f, 'l, 'v) dp_termination_proof" - | Complex_Constant_Removal_Proc "(('f, 'l) lab, 'v) complex_constant_removal_prf" "('f, 'l, 'v) dp_termination_proof" - | General_Redpair_Proc - "('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trsLL" - "(('f, 'l) lab, 'v) cond_red_pair_prf" "('f, 'l, 'v) dp_termination_proof list" - | To_Trs_Proc "('f, 'l, 'v) trs_termination_proof" -and ('f, 'l, 'v) trs_termination_proof = - DP_Trans bool bool "(('f, 'l) lab, 'v) rules" "('f, 'l, 'v) dp_termination_proof" - | Rule_Removal "('f, 'l) lab redtriple_impl" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" - | String_Reversal "('f, 'l, 'v) trs_termination_proof" - | Bounds "(('f, 'l) lab, 'v) bounds_info" - | Uncurry "(('f, 'l) lab, 'v) uncurry_info" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" - | Semlab - "(('f, 'l) lab, 'v) sl_variant" "(('f, 'l) lab, 'v) term list" - "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" - | R_is_Empty - | Fcc "(('f, 'l) lab, 'v) ctxt list" "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" - | Split "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" "('f, 'l, 'v) trs_termination_proof" - | Switch_Innermost "('f, 'l) lab join_info" "('f, 'l, 'v) trs_termination_proof" - | Drop_Equality "('f, 'l, 'v) trs_termination_proof" - | Remove_Nonapplicable_Rules "('f, 'l, 'v) trsLL" "('f, 'l, 'v) trs_termination_proof" - | Assume_SN "('f, 'l, 'v) qreltrsLL" "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" -and ('f, 'l, 'v) unknown_proof = - Assume_Unknown unknown_info "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" -and ('f, 'l, 'v) fptrs_termination_proof = - Assume_FP_SN "('f, 'l, 'v) fptrsLL" "('f, 'l, 'v, ('f, 'l, 'v) trs_termination_proof, ('f, 'l, 'v) dp_termination_proof, ('f, 'l, 'v) fptrs_termination_proof, ('f, 'l, 'v) unknown_proof) assm_proof list" - -end diff -r ec44535f954a -r a105bea3936f src/HOL/Datatype_Examples/Misc_N2M.thy --- a/src/HOL/Datatype_Examples/Misc_N2M.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,471 +0,0 @@ -(* Title: HOL/Datatype_Examples/Misc_N2M.thy - Author: Dmitriy Traytel, TU Muenchen - Copyright 2014 - -Miscellaneous tests for the nested-to-mutual reduction. -*) - -section \Miscellaneous Tests for the Nested-to-Mutual Reduction\ - -theory Misc_N2M -imports "~~/src/HOL/Library/BNF_Axiomatization" -begin - -declare [[typedef_overloaded]] - - -locale misc -begin - -datatype 'a li = Ni | Co 'a "'a li" -datatype 'a tr = Tr "'a \ 'a tr li" - -primrec (nonexhaustive) - f_tl :: "'a \ 'a tr li \ nat" and - f_t :: "'a \ 'a tr \ nat" -where - "f_tl _ Ni = 0" | - "f_t a (Tr ts) = 1 + f_tl a (ts a)" - -datatype ('a, 'b) l = N | C 'a 'b "('a, 'b) l" -datatype ('a, 'b) t = T "(('a, 'b) t, 'a) l" "(('a, 'b) t, 'b) l" - -primrec (nonexhaustive) - f_tl2 :: "(('a, 'a) t, 'a) l \ nat" and - f_t2 :: "('a, 'a) t \ nat" -where - "f_tl2 N = 0" | - "f_t2 (T ts us) = f_tl2 ts + f_tl2 us" - -primrec (nonexhaustive) - g_tla :: "(('a, 'b) t, 'a) l \ nat" and - g_tlb :: "(('a, 'b) t, 'b) l \ nat" and - g_t :: "('a, 'b) t \ nat" -where - "g_tla N = 0" | - "g_tlb N = 1" | - "g_t (T ts us) = g_tla ts + g_tlb us" - - -datatype - 'a l1 = N1 | C1 'a "'a l1" - -datatype - ('a, 'b) t1 = T1 'a 'b "('a, 'b) t1 l1" "(nat \ ('a, 'b) t1) l1" and - ('a, 'b) t2 = T2 "('a, 'b) t1" - -primrec - h1_tl1 :: "(nat, 'a) t1 l1 \ nat" and - h1_natl1 :: "(nat \ (nat, 'a) t1) l1 \ nat" and - h1_t1 :: "(nat, 'a) t1 \ nat" -where - "h1_tl1 N1 = 0" | - "h1_tl1 (C1 t ts) = h1_t1 t + h1_tl1 ts" | - "h1_natl1 N1 = Suc 0" | - "h1_natl1 (C1 n ts) = fst n + h1_natl1 ts" | - "h1_t1 (T1 n _ ts _) = n + h1_tl1 ts" - -end - - -bnf_axiomatization ('a, 'b) M0 [wits: "('a, 'b) M0"] -bnf_axiomatization ('a, 'b) N0 [wits: "('a, 'b) N0"] -bnf_axiomatization ('a, 'b) K0 [wits: "('a, 'b) K0"] -bnf_axiomatization ('a, 'b) L0 [wits: "('a, 'b) L0"] -bnf_axiomatization ('a, 'b, 'c) G0 [wits: "('a, 'b, 'c) G0"] - -locale (*co*)mplicated -begin - -datatype 'a M = CM "('a, 'a M) M0" -datatype 'a N = CN "('a N, 'a) N0" -datatype ('a, 'b) K = CK "('a, ('a, 'b) L) K0" - and ('a, 'b) L = CL "('b, ('a, 'b) K) L0" -datatype 'a G = CG "('a, ('a G, 'a G N) K, ('a G M, 'a G) L) G0" - -primrec - fG :: "'a G \ 'a G" -and fK :: "('a G, 'a G N) K \ ('a G, 'a G N) K" -and fL :: "('a G, 'a G N) L \ ('a G, 'a G N) L" -and fM :: "'a G M \ 'a G M" where - "fG (CG x) = CG (map_G0 id fK (map_L fM fG) x)" -| "fK (CK y) = CK (map_K0 fG fL y)" -| "fL (CL z) = CL (map_L0 (map_N fG) fK z)" -| "fM (CM w) = CM (map_M0 fG fM w)" -thm fG_def fK_def fL_def fM_def fG.simps fK.simps fL.simps fM.simps - -end - -locale complicated -begin - -codatatype 'a M = CM "('a, 'a M) M0" -codatatype 'a N = CN "('a N, 'a) N0" -codatatype ('a, 'b) K = CK "('a, ('a, 'b) L) K0" - and ('a, 'b) L = CL "('b, ('a, 'b) K) L0" -codatatype 'a G = CG "('a, ('a G, 'a G N) K, ('a G M, 'a G) L) G0" - -primcorec - fG :: "'a G \ 'a G" -and fK :: "('a G, 'a G N) K \ ('a G, 'a G N) K" -and fL :: "('a G, 'a G N) L \ ('a G, 'a G N) L" -and fM :: "'a G M \ 'a G M" where - "fG x = CG (map_G0 id fK (map_L fM fG) (un_CG x))" -| "fK y = CK (map_K0 fG fL (un_CK y))" -| "fL z = CL (map_L0 (map_N fG) fK (un_CL z))" -| "fM w = CM (map_M0 fG fM (un_CM w))" -thm fG_def fK_def fL_def fM_def fG.simps fK.simps fL.simps fM.simps - -end - -datatype ('a, 'b) F1 = F1 'a 'b -datatype F2 = F2 "((unit, nat) F1, F2) F1" | F2' -datatype 'a kk1 = K1 'a | K2 "'a kk2" and 'a kk2 = K3 "'a kk1" -datatype 'a ll1 = L1 'a | L2 "'a ll2 kk2" and 'a ll2 = L3 "'a ll1" - -datatype_compat F1 -datatype_compat F2 -datatype_compat kk1 kk2 -datatype_compat ll1 ll2 - - -subsection \Deep Nesting\ - -datatype 'a tree = Empty | Node 'a "'a tree list" -datatype_compat tree - -datatype 'a ttree = TEmpty | TNode 'a "'a ttree list tree" -datatype_compat ttree - -datatype 'a tttree = TEmpty | TNode 'a "'a tttree list ttree list tree" -datatype_compat tttree -(* -datatype 'a ttttree = TEmpty | TNode 'a "'a ttttree list tttree list ttree list tree" -datatype_compat ttttree -*) - -datatype ('a,'b)complex = - C1 nat "'a ttree" -| C2 "('a,'b)complex list tree tree" 'b "('a,'b)complex" "('a,'b)complex2 ttree list" -and ('a,'b)complex2 = D1 "('a,'b)complex ttree" -datatype_compat complex complex2 - -datatype 'a F = F1 'a | F2 "'a F" -datatype 'a G = G1 'a | G2 "'a G F" -datatype H = H1 | H2 "H G" - -datatype_compat F -datatype_compat G -datatype_compat H - - -subsection \Primrec cache\ - -datatype 'a l = N | C 'a "'a l" -datatype ('a, 'b) t = T 'a 'b "('a, 'b) t l" - -context linorder -begin - -(* slow *) -primrec - f1_tl :: "(nat, 'a) t l \ nat" and - f1_t :: "(nat, 'a) t \ nat" -where - "f1_tl N = 0" | - "f1_tl (C t ts) = f1_t t + f1_tl ts" | - "f1_t (T n _ ts) = n + f1_tl ts" - -(* should be fast *) -primrec - f2_t :: "(nat, 'b) t \ nat" and - f2_tl :: "(nat, 'b) t l \ nat" -where - "f2_tl N = 0" | - "f2_tl (C t ts) = f2_t t + f2_tl ts" | - "f2_t (T n _ ts) = n + f2_tl ts" - -end - -(* should be fast *) -primrec - g1_t :: "('a, int) t \ nat" and - g1_tl :: "('a, int) t l \ nat" -where - "g1_t (T _ _ ts) = g1_tl ts" | - "g1_tl N = 0" | - "g1_tl (C _ ts) = g1_tl ts" - -(* should be fast *) -primrec - g2_t :: "(int, int) t \ nat" and - g2_tl :: "(int, int) t l \ nat" -where - "g2_t (T _ _ ts) = g2_tl ts" | - "g2_tl N = 0" | - "g2_tl (C _ ts) = g2_tl ts" - - -datatype - 'a l1 = N1 | C1 'a "'a l2" and - 'a l2 = N2 | C2 'a "'a l1" - -primrec - sum_l1 :: "'a::{zero,plus} l1 \ 'a" and - sum_l2 :: "'a::{zero,plus} l2 \ 'a" -where - "sum_l1 N1 = 0" | - "sum_l1 (C1 n ns) = n + sum_l2 ns" | - "sum_l2 N2 = 0" | - "sum_l2 (C2 n ns) = n + sum_l1 ns" - -datatype - ('a, 'b) t1 = T1 'a 'b "('a, 'b) t1 l1" and - ('a, 'b) t2 = T2 "('a, 'b) t1" - -(* slow *) -primrec - h1_tl1 :: "(nat, 'a) t1 l1 \ nat" and - h1_tl2 :: "(nat, 'a) t1 l2 \ nat" and - h1_t1 :: "(nat, 'a) t1 \ nat" -where - "h1_tl1 N1 = 0" | - "h1_tl1 (C1 t ts) = h1_t1 t + h1_tl2 ts" | - "h1_tl2 N2 = 0" | - "h1_tl2 (C2 t ts) = h1_t1 t + h1_tl1 ts" | - "h1_t1 (T1 n _ ts) = n + h1_tl1 ts" - -(* should be fast *) -primrec - h2_tl1 :: "(nat, 'a) t1 l1 \ nat" and - h2_tl2 :: "(nat, 'a) t1 l2 \ nat" and - h2_t1 :: "(nat, 'a) t1 \ nat" -where - "h2_tl1 N1 = 0" | - "h2_tl1 (C1 t ts) = h2_t1 t + h2_tl2 ts" | - "h2_tl2 N2 = 0" | - "h2_tl2 (C2 t ts) = h2_t1 t + h2_tl1 ts" | - "h2_t1 (T1 n _ ts) = n + h2_tl1 ts" - -(* should be fast *) -primrec - h3_tl2 :: "(nat, 'a) t1 l2 \ nat" and - h3_tl1 :: "(nat, 'a) t1 l1 \ nat" and - h3_t1 :: "(nat, 'a) t1 \ nat" -where - "h3_tl1 N1 = 0" | - "h3_tl1 (C1 t ts) = h3_t1 t + h3_tl2 ts" | - "h3_tl2 N2 = 0" | - "h3_tl2 (C2 t ts) = h3_t1 t + h3_tl1 ts" | - "h3_t1 (T1 n _ ts) = n + h3_tl1 ts" - -(* should be fast *) -primrec - i1_tl2 :: "(nat, 'a) t1 l2 \ nat" and - i1_tl1 :: "(nat, 'a) t1 l1 \ nat" and - i1_t1 :: "(nat, 'a) t1 \ nat" and - i1_t2 :: "(nat, 'a) t2 \ nat" -where - "i1_tl1 N1 = 0" | - "i1_tl1 (C1 t ts) = i1_t1 t + i1_tl2 ts" | - "i1_tl2 N2 = 0" | - "i1_tl2 (C2 t ts) = i1_t1 t + i1_tl1 ts" | - "i1_t1 (T1 n _ ts) = n + i1_tl1 ts" | - "i1_t2 (T2 t) = i1_t1 t" - -(* should be fast *) -primrec - j1_t2 :: "(nat, 'a) t2 \ nat" and - j1_t1 :: "(nat, 'a) t1 \ nat" and - j1_tl1 :: "(nat, 'a) t1 l1 \ nat" and - j1_tl2 :: "(nat, 'a) t1 l2 \ nat" -where - "j1_tl1 N1 = 0" | - "j1_tl1 (C1 t ts) = j1_t1 t + j1_tl2 ts" | - "j1_tl2 N2 = 0" | - "j1_tl2 (C2 t ts) = j1_t1 t + j1_tl1 ts" | - "j1_t1 (T1 n _ ts) = n + j1_tl1 ts" | - "j1_t2 (T2 t) = j1_t1 t" - - -datatype 'a l3 = N3 | C3 'a "'a l3" -datatype 'a l4 = N4 | C4 'a "'a l4" -datatype ('a, 'b) t3 = T3 'a 'b "('a, 'b) t3 l3" "('a, 'b) t3 l4" - -(* slow *) -primrec - k1_tl3 :: "(nat, 'a) t3 l3 \ nat" and - k1_tl4 :: "(nat, 'a) t3 l4 \ nat" and - k1_t3 :: "(nat, 'a) t3 \ nat" -where - "k1_tl3 N3 = 0" | - "k1_tl3 (C3 t ts) = k1_t3 t + k1_tl3 ts" | - "k1_tl4 N4 = 0" | - "k1_tl4 (C4 t ts) = k1_t3 t + k1_tl4 ts" | - "k1_t3 (T3 n _ ts us) = n + k1_tl3 ts + k1_tl4 us" - -(* should be fast *) -primrec - k2_tl4 :: "(nat, int) t3 l4 \ nat" and - k2_tl3 :: "(nat, int) t3 l3 \ nat" and - k2_t3 :: "(nat, int) t3 \ nat" -where - "k2_tl4 N4 = 0" | - "k2_tl4 (C4 t ts) = k2_t3 t + k2_tl4 ts" | - "k2_tl3 N3 = 0" | - "k2_tl3 (C3 t ts) = k2_t3 t + k2_tl3 ts" | - "k2_t3 (T3 n _ ts us) = n + k2_tl3 ts + k2_tl4 us" - - -datatype ('a, 'b) l5 = N5 | C5 'a 'b "('a, 'b) l5" -datatype ('a, 'b) l6 = N6 | C6 'a 'b "('a, 'b) l6" -datatype ('a, 'b, 'c) t4 = T4 'a 'b "(('a, 'b, 'c) t4, 'b) l5" "(('a, 'b, 'c) t4, 'c) l6" - -(* slow *) -primrec - l1_tl5 :: "((nat, 'a, 'b) t4, 'a) l5 \ nat" and - l1_tl6 :: "((nat, 'a, 'b) t4, 'b) l6 \ nat" and - l1_t4 :: "(nat, 'a, 'b) t4 \ nat" -where - "l1_tl5 N5 = 0" | - "l1_tl5 (C5 t _ ts) = l1_t4 t + l1_tl5 ts" | - "l1_tl6 N6 = 0" | - "l1_tl6 (C6 t _ ts) = l1_t4 t + l1_tl6 ts" | - "l1_t4 (T4 n _ ts us) = n + l1_tl5 ts + l1_tl6 us" - - -subsection \Primcorec Cache\ - -codatatype 'a col = N | C 'a "'a col" -codatatype ('a, 'b) cot = T 'a 'b "('a, 'b) cot col" - -context linorder -begin - -(* slow *) -primcorec - f1_cotcol :: "nat \ (nat, 'a) cot col" and - f1_cot :: "nat \ (nat, 'a) cot" -where - "n = 0 \ f1_cotcol n = N" | - "_ \ f1_cotcol n = C (f1_cot n) (f1_cotcol n)" | - "f1_cot n = T n undefined (f1_cotcol n)" - -(* should be fast *) -primcorec - f2_cotcol :: "nat \ (nat, 'b) cot col" and - f2_cot :: "nat \ (nat, 'b) cot" -where - "n = 0 \ f2_cotcol n = N" | - "_ \ f2_cotcol n = C (f2_cot n) (f2_cotcol n)" | - "f2_cot n = T n undefined (f2_cotcol n)" - -end - -(* should be fast *) -primcorec - g1_cot :: "int \ (int, 'a) cot" and - g1_cotcol :: "int \ (int, 'a) cot col" -where - "g1_cot n = T n undefined (g1_cotcol n)" | - "n = 0 \ g1_cotcol n = N" | - "_ \ g1_cotcol n = C (g1_cot n) (g1_cotcol n)" - -(* should be fast *) -primcorec - g2_cot :: "int \ (int, int) cot" and - g2_cotcol :: "int \ (int, int) cot col" -where - "g2_cot n = T n undefined (g2_cotcol n)" | - "n = 0 \ g2_cotcol n = N" | - "_ \ g2_cotcol n = C (g2_cot n) (g2_cotcol n)" - - -codatatype - 'a col1 = N1 | C1 'a "'a col2" and - 'a col2 = N2 | C2 'a "'a col1" - -codatatype - ('a, 'b) cot1 = T1 'a 'b "('a, 'b) cot1 col1" and - ('a, 'b) cot2 = T2 "('a, 'b) cot1" - -(* slow *) -primcorec - h1_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and - h1_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and - h1_cot1 :: "nat \ (nat, 'a) cot1" -where - "h1_cotcol1 n = C1 (h1_cot1 n) (h1_cotcol2 n)" | - "h1_cotcol2 n = C2 (h1_cot1 n) (h1_cotcol1 n)" | - "h1_cot1 n = T1 n undefined (h1_cotcol1 n)" - -(* should be fast *) -primcorec - h2_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and - h2_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and - h2_cot1 :: "nat \ (nat, 'a) cot1" -where - "h2_cotcol1 n = C1 (h2_cot1 n) (h2_cotcol2 n)" | - "h2_cotcol2 n = C2 (h2_cot1 n) (h2_cotcol1 n)" | - "h2_cot1 n = T1 n undefined (h2_cotcol1 n)" - -(* should be fast *) -primcorec - h3_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and - h3_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and - h3_cot1 :: "nat \ (nat, 'a) cot1" -where - "h3_cotcol1 n = C1 (h3_cot1 n) (h3_cotcol2 n)" | - "h3_cotcol2 n = C2 (h3_cot1 n) (h3_cotcol1 n)" | - "h3_cot1 n = T1 n undefined (h3_cotcol1 n)" - -(* should be fast *) -primcorec - i1_cotcol2 :: "nat \ (nat, 'a) cot1 col2" and - i1_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and - i1_cot1 :: "nat \ (nat, 'a) cot1" and - i1_cot2 :: "nat \ (nat, 'a) cot2" -where - "i1_cotcol1 n = C1 (i1_cot1 n) (i1_cotcol2 n)" | - "i1_cotcol2 n = C2 (i1_cot1 n) (i1_cotcol1 n)" | - "i1_cot1 n = T1 n undefined (i1_cotcol1 n)" | - "i1_cot2 n = T2 (i1_cot1 n)" - -(* should be fast *) -primcorec - j1_cot2 :: "nat \ (nat, 'a) cot2" and - j1_cot1 :: "nat \ (nat, 'a) cot1" and - j1_cotcol1 :: "nat \ (nat, 'a) cot1 col1" and - j1_cotcol2 :: "nat \ (nat, 'a) cot1 col2" -where - "j1_cotcol1 n = C1 (j1_cot1 n) (j1_cotcol2 n)" | - "j1_cotcol2 n = C2 (j1_cot1 n) (j1_cotcol1 n)" | - "j1_cot1 n = T1 n undefined (j1_cotcol1 n)" | - "j1_cot2 n = T2 (j1_cot1 n)" - - -codatatype 'a col3 = N3 | C3 'a "'a col3" -codatatype 'a col4 = N4 | C4 'a "'a col4" -codatatype ('a, 'b) cot3 = T3 'a 'b "('a, 'b) cot3 col3" "('a, 'b) cot3 col4" - -(* slow *) -primcorec - k1_cotcol3 :: "nat \ (nat, 'a) cot3 col3" and - k1_cotcol4 :: "nat \ (nat, 'a) cot3 col4" and - k1_cot3 :: "nat \ (nat, 'a) cot3" -where - "k1_cotcol3 n = C3 (k1_cot3 n) (k1_cotcol3 n)" | - "k1_cotcol4 n = C4 (k1_cot3 n) (k1_cotcol4 n)" | - "k1_cot3 n = T3 n undefined (k1_cotcol3 n) (k1_cotcol4 n)" - -(* should be fast *) -primcorec - k2_cotcol4 :: "nat \ (nat, 'a) cot3 col4" and - k2_cotcol3 :: "nat \ (nat, 'a) cot3 col3" and - k2_cot3 :: "nat \ (nat, 'a) cot3" -where - "k2_cotcol4 n = C4 (k2_cot3 n) (k2_cotcol4 n)" | - "k2_cotcol3 n = C3 (k2_cot3 n) (k2_cotcol3 n)" | - "k2_cot3 n = T3 n undefined (k2_cotcol3 n) (k2_cotcol4 n)" - -end diff -r ec44535f954a -r a105bea3936f src/HOL/Eisbach/Examples_FOL.thy --- a/src/HOL/Eisbach/Examples_FOL.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Eisbach/Examples_FOL.thy Wed Feb 17 21:06:47 2016 +0100 @@ -1,4 +1,4 @@ -(* Title: HOL/Eisbach/Examples.thy +(* Title: HOL/Eisbach/Examples_FOL.thy Author: Daniel Matichuk, NICTA/UNSW *) diff -r ec44535f954a -r a105bea3936f src/HOL/IMP/Denotational.thy --- a/src/HOL/IMP/Denotational.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/IMP/Denotational.thy Wed Feb 17 21:06:47 2016 +0100 @@ -105,7 +105,7 @@ by(simp add: cont_def) also have "\ = (f^^0){} \ \" by simp also have "\ = ?U" - by(auto simp del: funpow.simps) (metis not0_implies_Suc) + using assms funpow_decreasing le_SucI mono_if_cont by blast finally show "f ?U \ ?U" by simp qed next diff -r ec44535f954a -r a105bea3936f src/HOL/Imperative_HOL/document/root.tex --- a/src/HOL/Imperative_HOL/document/root.tex Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Imperative_HOL/document/root.tex Wed Feb 17 21:06:47 2016 +0100 @@ -1,4 +1,3 @@ - \documentclass[11pt,a4paper]{article} \usepackage{graphicx,isabelle,isabellesym,latexsym} \usepackage{amssymb} @@ -55,12 +54,6 @@ \newcommand{\isactrlbvec}{\emph\bgroup\begin{math}{}\overline\bgroup\mbox\bgroup\isastylescript} \newcommand{\isactrlevec}{\egroup\egroup\end{math}\egroup} -% Isar proof -\newcommand{\isasymproof}{$\;\langle\mathit{proof}\rangle$} - -% Isar sorry -\renewcommand{\isacommand}[1]{\ifthenelse{\equal{sorry}{#1}}{\isasymproof}{\isakeyword{#1}}} - \begin{document} diff -r ec44535f954a -r a105bea3936f src/HOL/Induct/Common_Patterns.thy --- a/src/HOL/Induct/Common_Patterns.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Induct/Common_Patterns.thy Wed Feb 17 21:06:47 2016 +0100 @@ -42,12 +42,12 @@ proof (induct n arbitrary: x) case 0 note prem = \A 0 x\ - show "P 0 x" sorry + show "P 0 x" \ next case (Suc n) note hyp = \\x. A n x \ P n x\ and prem = \A (Suc n) x\ - show "P (Suc n) x" sorry + show "P (Suc n) x" \ qed @@ -70,13 +70,13 @@ case 0 note prem = \A (a x)\ and defn = \0 = a x\ - show "P (a x)" sorry + show "P (a x)" \ next case (Suc n) note hyp = \\x. n = a x \ A (a x) \ P (a x)\ and prem = \A (a x)\ and defn = \Suc n = a x\ - show "P (a x)" sorry + show "P (a x)" \ qed text \ @@ -99,18 +99,18 @@ shows "P n" and "Q n" proof (induct n) case 0 case 1 - show "P 0" sorry + show "P 0" \ next case 0 case 2 - show "Q 0" sorry + show "Q 0" \ next case (Suc n) case 1 note hyps = \P n\ \Q n\ - show "P (Suc n)" sorry + show "P (Suc n)" \ next case (Suc n) case 2 note hyps = \P n\ \Q n\ - show "Q (Suc n)" sorry + show "Q (Suc n)" \ qed text \ @@ -127,11 +127,11 @@ { case 1 note \A 0\ - show "P 0" sorry + show "P 0" \ next case 2 note \B 0\ - show "Q 0" sorry + show "Q 0" \ } next case (Suc n) @@ -140,11 +140,11 @@ { case 1 note \A (Suc n)\ - show "P (Suc n)" sorry + show "P (Suc n)" \ next case 2 note \B (Suc n)\ - show "Q (Suc n)" sorry + show "Q (Suc n)" \ } qed @@ -172,26 +172,26 @@ { case 1 note prem = \A 0 x\ - show "P 0 x" sorry + show "P 0 x" \ } { case 2 note prem = \B 0 y\ - show "Q 0 y" sorry + show "Q 0 y" \ } next case (Suc n) note hyps = \\x. A n x \ P n x\ \\y. B n y \ Q n y\ - then have some_intermediate_result sorry + then have some_intermediate_result \ { case 1 note prem = \A (Suc n) x\ - show "P (Suc n) x" sorry + show "P (Suc n) x" \ } { case 2 note prem = \B (Suc n) y\ - show "Q (Suc n) y" sorry + show "Q (Suc n) y" \ } qed @@ -238,22 +238,22 @@ and "R bazar" proof (induct foo and bar and bazar) case (Foo1 n) - show "P (Foo1 n)" sorry + show "P (Foo1 n)" \ next case (Foo2 bar) note \Q bar\ - show "P (Foo2 bar)" sorry + show "P (Foo2 bar)" \ next case (Bar1 b) - show "Q (Bar1 b)" sorry + show "Q (Bar1 b)" \ next case (Bar2 bazar) note \R bazar\ - show "Q (Bar2 bazar)" sorry + show "Q (Bar2 bazar)" \ next case (Bazar foo) note \P foo\ - show "R (Bazar foo)" sorry + show "R (Bazar foo)" \ qed text \ @@ -296,11 +296,11 @@ using assms proof induct case zero - show "P 0" sorry + show "P 0" \ next case (double n) note \Even n\ and \P n\ - show "P (2 * n)" sorry + show "P (2 * n)" \ qed text \ @@ -324,20 +324,20 @@ case zero { case 1 - show "P1 0" sorry + show "P1 0" \ next case 2 - show "P2 0" sorry + show "P2 0" \ } next case (double n) note \Even n\ and \P1 n\ and \P2 n\ { case 1 - show "P1 (2 * n)" sorry + show "P1 (2 * n)" \ next case 2 - show "P2 (2 * n)" sorry + show "P2 (2 * n)" \ } qed @@ -362,20 +362,20 @@ "Odd n \ Q2 n" proof (induct rule: Evn_Odd.inducts) case zero - { case 1 show "P1 0" sorry } - { case 2 show "P2 0" sorry } - { case 3 show "P3 0" sorry } + { case 1 show "P1 0" \ } + { case 2 show "P2 0" \ } + { case 3 show "P3 0" \ } next case (succ_Evn n) note \Evn n\ and \P1 n\ \P2 n\ \P3 n\ - { case 1 show "Q1 (Suc n)" sorry } - { case 2 show "Q2 (Suc n)" sorry } + { case 1 show "Q1 (Suc n)" \ } + { case 2 show "Q2 (Suc n)" \ } next case (succ_Odd n) note \Odd n\ and \Q1 n\ \Q2 n\ - { case 1 show "P1 (Suc n)" sorry } - { case 2 show "P2 (Suc n)" sorry } - { case 3 show "P3 (Suc n)" sorry } + { case 1 show "P1 (Suc n)" \ } + { case 2 show "P2 (Suc n)" \ } + { case 3 show "P3 (Suc n)" \ } qed diff -r ec44535f954a -r a105bea3936f src/HOL/Isar_Examples/Structured_Statements.thy --- a/src/HOL/Isar_Examples/Structured_Statements.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Isar_Examples/Structured_Statements.thy Wed Feb 17 21:06:47 2016 +0100 @@ -17,17 +17,17 @@ have "A \ B" proof - show B if A using that sorry + show B if A using that \ qed have "\ A" proof - show False if A using that sorry + show False if A using that \ qed have "\x. P x" proof - show "P x" for x sorry + show "P x" for x \ qed end @@ -40,8 +40,8 @@ have "A \ B" proof - show B if A sorry - show A if B sorry + show B if A \ + show A if B \ qed next fix A B :: bool @@ -86,16 +86,16 @@ then have something proof cases case a thm \A\ - then show ?thesis sorry + then show ?thesis \ next case b thm \B\ - then show ?thesis sorry + then show ?thesis \ next case c thm \C\ - then show ?thesis sorry + then show ?thesis \ next case d thm \D\ - then show ?thesis sorry + then show ?thesis \ qed next fix A :: "'a \ bool" @@ -107,10 +107,10 @@ then have something proof cases case a thm \A x\ - then show ?thesis sorry + then show ?thesis \ next case b thm \B y z\ - then show ?thesis sorry + then show ?thesis \ qed end @@ -124,9 +124,9 @@ have "P n" proof (induct n) - show "P 0" sorry + show "P 0" \ show "P (Suc n)" if "P n" for n thm \P n\ - using that sorry + using that \ qed end @@ -142,8 +142,8 @@ proof - show ?thesis when A (is ?A) and B (is ?B) using that by (rule r) - show ?A sorry - show ?B sorry + show ?A \ + show ?B \ qed next fix a :: 'a @@ -153,9 +153,9 @@ have C proof - show ?thesis when "A x" (is ?A) for x :: 'a \ \abstract @{term x}\ - using that sorry + using that \ show "?A a" \ \concrete @{term a}\ - sorry + \ qed end diff -r ec44535f954a -r a105bea3936f src/HOL/Isar_Examples/document/root.tex --- a/src/HOL/Isar_Examples/document/root.tex Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Isar_Examples/document/root.tex Wed Feb 17 21:06:47 2016 +0100 @@ -5,10 +5,6 @@ \isabellestyle{it} \usepackage{pdfsetup}\urlstyle{rm} -\renewcommand{\isacommand}[1] -{\ifthenelse{\equal{sorry}{#1}}{$\;$\dummyproof} - {\ifthenelse{\equal{oops}{#1}}{$\vdots$}{\isakeyword{#1}}}} - \newcommand{\DUMMYPROOF}{{\langle\mathit{proof}\rangle}} \newcommand{\dummyproof}{$\DUMMYPROOF$} diff -r ec44535f954a -r a105bea3936f src/HOL/Library/Product_Vector.thy --- a/src/HOL/Library/Product_Vector.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Library/Product_Vector.thy Wed Feb 17 21:06:47 2016 +0100 @@ -218,6 +218,20 @@ lemma continuous_on_swap[continuous_intros]: "continuous_on A prod.swap" by (simp add: prod.swap_def continuous_on_fst continuous_on_snd continuous_on_Pair continuous_on_id) +lemma continuous_on_swap_args: + assumes "continuous_on (A\B) (\(x,y). d x y)" + shows "continuous_on (B\A) (\(x,y). d y x)" +proof - + have "(\(x,y). d y x) = (\(x,y). d x y) o prod.swap" + by force + then show ?thesis + apply (rule ssubst) + apply (rule continuous_on_compose) + apply (force intro: continuous_on_subset [OF continuous_on_swap]) + apply (force intro: continuous_on_subset [OF assms]) + done +qed + lemma isCont_fst [simp]: "isCont f a \ isCont (\x. fst (f x)) a" by (fact continuous_fst) diff -r ec44535f954a -r a105bea3936f src/HOL/Multivariate_Analysis/Bounded_Linear_Function.thy --- a/src/HOL/Multivariate_Analysis/Bounded_Linear_Function.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Multivariate_Analysis/Bounded_Linear_Function.thy Wed Feb 17 21:06:47 2016 +0100 @@ -58,30 +58,30 @@ \Scan.succeed (Thm.declaration_attribute (fn thm => fold (fn (r, s) => Named_Theorems.add_thm s (thm RS r)) [ - (@{thm bounded_linear.has_derivative}, "Deriv.derivative_intros"), - (@{thm bounded_linear.tendsto}, "Topological_Spaces.tendsto_intros"), - (@{thm bounded_linear.continuous}, "Topological_Spaces.continuous_intros"), - (@{thm bounded_linear.continuous_on}, "Topological_Spaces.continuous_intros"), - (@{thm bounded_linear.uniformly_continuous_on}, "Topological_Spaces.continuous_intros"), - (@{thm bounded_linear_compose}, "Bounded_Linear_Function.bounded_linear_intros") + (@{thm bounded_linear.has_derivative}, @{named_theorems derivative_intros}), + (@{thm bounded_linear.tendsto}, @{named_theorems tendsto_intros}), + (@{thm bounded_linear.continuous}, @{named_theorems continuous_intros}), + (@{thm bounded_linear.continuous_on}, @{named_theorems continuous_intros}), + (@{thm bounded_linear.uniformly_continuous_on}, @{named_theorems continuous_intros}), + (@{thm bounded_linear_compose}, @{named_theorems bounded_linear_intros}) ]))\ attribute_setup bounded_bilinear = \Scan.succeed (Thm.declaration_attribute (fn thm => fold (fn (r, s) => Named_Theorems.add_thm s (thm RS r)) [ - (@{thm bounded_bilinear.FDERIV}, "Deriv.derivative_intros"), - (@{thm bounded_bilinear.tendsto}, "Topological_Spaces.tendsto_intros"), - (@{thm bounded_bilinear.continuous}, "Topological_Spaces.continuous_intros"), - (@{thm bounded_bilinear.continuous_on}, "Topological_Spaces.continuous_intros"), + (@{thm bounded_bilinear.FDERIV}, @{named_theorems derivative_intros}), + (@{thm bounded_bilinear.tendsto}, @{named_theorems tendsto_intros}), + (@{thm bounded_bilinear.continuous}, @{named_theorems continuous_intros}), + (@{thm bounded_bilinear.continuous_on}, @{named_theorems continuous_intros}), (@{thm bounded_linear_compose[OF bounded_bilinear.bounded_linear_left]}, - "Bounded_Linear_Function.bounded_linear_intros"), + @{named_theorems bounded_linear_intros}), (@{thm bounded_linear_compose[OF bounded_bilinear.bounded_linear_right]}, - "Bounded_Linear_Function.bounded_linear_intros"), + @{named_theorems bounded_linear_intros}), (@{thm bounded_linear.uniformly_continuous_on[OF bounded_bilinear.bounded_linear_left]}, - "Topological_Spaces.continuous_intros"), + @{named_theorems continuous_intros}), (@{thm bounded_linear.uniformly_continuous_on[OF bounded_bilinear.bounded_linear_right]}, - "Topological_Spaces.continuous_intros") + @{named_theorems continuous_intros}) ]))\ diff -r ec44535f954a -r a105bea3936f src/HOL/Multivariate_Analysis/Cauchy_Integral_Thm.thy --- a/src/HOL/Multivariate_Analysis/Cauchy_Integral_Thm.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Multivariate_Analysis/Cauchy_Integral_Thm.thy Wed Feb 17 21:06:47 2016 +0100 @@ -5722,8 +5722,8 @@ lemma higher_deriv_ident [simp]: "(deriv ^^ n) (\w. w) z = (if n = 0 then z else if n = 1 then 1 else 0)" - apply (induction n) - apply (simp_all add: deriv_ident funpow_Suc_right del: funpow.simps, simp) + apply (induction n, simp) + apply (metis higher_deriv_linear lambda_one) done corollary higher_deriv_id [simp]: @@ -6893,6 +6893,60 @@ text\Proof is based on Dixon's, as presented in Lang's "Complex Analysis" book (page 147).\ +lemma contour_integral_continuous_on_linepath_2D: + assumes "open u" and cont_dw: "\w. w \ u \ F w contour_integrable_on (linepath a b)" + and cond_uu: "continuous_on (u \ u) (\(x,y). F x y)" + and abu: "closed_segment a b \ u" + shows "continuous_on u (\w. contour_integral (linepath a b) (F w))" +proof - + have *: "\d>0. \x'\u. dist x' w < d \ + dist (contour_integral (linepath a b) (F x')) + (contour_integral (linepath a b) (F w)) \ \" + if "w \ u" "0 < \" "a \ b" for w \ + proof - + obtain \ where "\>0" and \: "cball w \ \ u" using open_contains_cball \open u\ \w \ u\ by force + let ?TZ = "{(t,z) |t z. t \ cball w \ \ z \ closed_segment a b}" + have "uniformly_continuous_on ?TZ (\(x,y). F x y)" + apply (rule compact_uniformly_continuous) + apply (rule continuous_on_subset[OF cond_uu]) + using abu \ + apply (auto simp: compact_Times simp del: mem_cball) + done + then obtain \ where "\>0" + and \: "\x x'. \x\?TZ; x'\?TZ; dist x' x < \\ \ + dist ((\(x,y). F x y) x') ((\(x,y). F x y) x) < \/norm(b - a)" + apply (rule uniformly_continuous_onE [where e = "\/norm(b - a)"]) + using \0 < \\ \a \ b\ by auto + have \: "\norm (w - x1) \ \; x2 \ closed_segment a b; + norm (w - x1') \ \; x2' \ closed_segment a b; norm ((x1', x2') - (x1, x2)) < \\ + \ norm (F x1' x2' - F x1 x2) \ \ / cmod (b - a)" + for x1 x2 x1' x2' + using \ [of "(x1,x2)" "(x1',x2')"] by (force simp add: dist_norm) + have le_ee: "cmod (contour_integral (linepath a b) (\x. F x' x - F w x)) \ \" + if "x' \ u" "cmod (x' - w) < \" "cmod (x' - w) < \" for x' + proof - + have "cmod (contour_integral (linepath a b) (\x. F x' x - F w x)) \ \/norm(b - a) * norm(b - a)" + apply (rule has_contour_integral_bound_linepath [OF has_contour_integral_integral _ \]) + apply (rule contour_integrable_diff [OF cont_dw cont_dw]) + using \0 < \\ \a \ b\ \0 < \\ \w \ u\ that + apply (auto simp: norm_minus_commute) + done + also have "... = \" using \a \ b\ by simp + finally show ?thesis . + qed + show ?thesis + apply (rule_tac x="min \ \" in exI) + using \0 < \\ \0 < \\ + apply (auto simp: dist_norm contour_integral_diff [OF cont_dw cont_dw, symmetric] \w \ u\ intro: le_ee) + done + qed + show ?thesis + apply (rule continuous_onI) + apply (cases "a=b") + apply (auto intro: *) + done +qed + text\This version has @{term"polynomial_function \"} as an additional assumption.\ lemma Cauchy_integral_formula_global_weak: assumes u: "open u" and holf: "f holomorphic_on u" @@ -7065,10 +7119,10 @@ by (meson Lim_at_infinityI) moreover have "h holomorphic_on UNIV" proof - - have con_ff: "continuous (at (x,z)) (\y. (f(snd y) - f(fst y)) / (snd y - fst y))" + have con_ff: "continuous (at (x,z)) (\(x,y). (f y - f x) / (y - x))" if "x \ u" "z \ u" "x \ z" for x z using that conf - apply (simp add: continuous_on_eq_continuous_at u) + apply (simp add: split_def continuous_on_eq_continuous_at u) apply (simp | rule continuous_intros continuous_within_compose2 [where g=f])+ done have con_fstsnd: "continuous_on UNIV (\x. (fst x - snd x) ::complex)" @@ -7083,8 +7137,8 @@ apply (rule continuous_on_interior [of u]) apply (simp add: holf holomorphic_deriv holomorphic_on_imp_continuous_on u) by (simp add: interior_open that u) - have tendsto_f': "((\x. if snd x = fst x then deriv f (fst x) - else (f (snd x) - f (fst x)) / (snd x - fst x)) \ deriv f x) + have tendsto_f': "((\(x,y). if y = x then deriv f (x) + else (f (y) - f (x)) / (y - x)) \ deriv f x) (at (x, x) within u \ u)" if "x \ u" for x proof (rule Lim_withinI) fix e::real assume "0 < e" @@ -7120,8 +7174,7 @@ qed show "\d>0. \xa\u \ u. 0 < dist xa (x, x) \ dist xa (x, x) < d \ - dist (if snd xa = fst xa then deriv f (fst xa) else (f (snd xa) - f (fst xa)) / (snd xa - fst xa)) - (deriv f x) \ e" + dist (case xa of (x, y) \ if y = x then deriv f x else (f y - f x) / (y - x)) (deriv f x) \ e" apply (rule_tac x="min k1 k2" in exI) using \k1>0\ \k2>0\ \e>0\ apply (force simp: dist_norm neq intro: dual_order.strict_trans2 k1 less_imp_le norm_fst_le) @@ -7134,16 +7187,16 @@ using \' using path_image_def vector_derivative_at by fastforce have f_has_cint: "\w. w \ v - path_image \ \ ((\u. f u / (u - w) ^ 1) has_contour_integral h w) \" by (simp add: V) - have cond_uu: "continuous_on (u \ u) (\y. d (fst y) (snd y))" + have cond_uu: "continuous_on (u \ u) (\(x,y). d x y)" apply (simp add: continuous_on_eq_continuous_within d_def continuous_within tendsto_f') apply (simp add: Lim_within_open_NO_MATCH open_Times u, clarify) - apply (rule Lim_transform_within_open [OF _ open_uu_Id, where f = "(\x. (f (snd x) - f (fst x)) / (snd x - fst x))"]) + apply (rule Lim_transform_within_open [OF _ open_uu_Id, where f = "(\(x,y). (f y - f x) / (y - x))"]) using con_ff apply (auto simp: continuous_within) done have hol_dw: "(\z. d z w) holomorphic_on u" if "w \ u" for w proof - - have "continuous_on u ((\y. d (fst y) (snd y)) o (\z. (w,z)))" + have "continuous_on u ((\(x,y). d x y) o (\z. (w,z)))" by (rule continuous_on_compose continuous_intros continuous_on_subset [OF cond_uu] | force intro: that)+ then have *: "continuous_on u (\z. if w = z then deriv f z else (f w - f z) / (w - z))" by (rule rev_iffD1 [OF _ continuous_on_cong [OF refl]]) (simp add: d_def field_simps) @@ -7159,53 +7212,11 @@ qed { fix a b assume abu: "closed_segment a b \ u" - then have cont_dw: "\w. w \ u \ (\z. d z w) contour_integrable_on (linepath a b)" + then have "\w. w \ u \ (\z. d z w) contour_integrable_on (linepath a b)" by (metis hol_dw continuous_on_subset contour_integrable_continuous_linepath holomorphic_on_imp_continuous_on) - have *: "\da>0. \x'\u. dist x' w < da \ - dist (contour_integral (linepath a b) (\z. d z x')) - (contour_integral (linepath a b) (\z. d z w)) \ ee" - if "w \ u" "0 < ee" "a \ b" for w ee - proof - - obtain dd where "dd>0" and dd: "cball w dd \ u" using open_contains_cball u \w \ u\ by force - let ?abdd = "{(z,t) |z t. z \ closed_segment a b \ t \ cball w dd}" - have "uniformly_continuous_on ?abdd (\y. d (fst y) (snd y))" - apply (rule compact_uniformly_continuous) - apply (rule continuous_on_subset[OF cond_uu]) - using abu dd - apply (auto simp: compact_Times simp del: mem_cball) - done - then obtain kk where "kk>0" - and kk: "\x x'. \x\?abdd; x'\?abdd; dist x' x < kk\ \ - dist ((\y. d (fst y) (snd y)) x') ((\y. d (fst y) (snd y)) x) < ee/norm(b - a)" - apply (rule uniformly_continuous_onE [where e = "ee/norm(b - a)"]) - using \0 < ee\ \a \ b\ by auto - have kk: "\x1 \ closed_segment a b; norm (w - x2) \ dd; - x1' \ closed_segment a b; norm (w - x2') \ dd; norm ((x1', x2') - (x1, x2)) < kk\ - \ norm (d x1' x2' - d x1 x2) \ ee / cmod (b - a)" - for x1 x2 x1' x2' - using kk [of "(x1,x2)" "(x1',x2')"] by (force simp add: dist_norm) - have le_ee: "cmod (contour_integral (linepath a b) (\x. d x x' - d x w)) \ ee" - if "x' \ u" "cmod (x' - w) < dd" "cmod (x' - w) < kk" for x' - proof - - have "cmod (contour_integral (linepath a b) (\x. d x x' - d x w)) \ ee/norm(b - a) * norm(b - a)" - apply (rule has_contour_integral_bound_linepath [OF has_contour_integral_integral _ kk]) - apply (rule contour_integrable_diff [OF cont_dw cont_dw]) - using \0 < ee\ \a \ b\ \0 < dd\ \w \ u\ that - apply (auto simp: norm_minus_commute) - done - also have "... = ee" using \a \ b\ by simp - finally show ?thesis . - qed - show ?thesis - apply (rule_tac x="min dd kk" in exI) - using \0 < dd\ \0 < kk\ - apply (auto simp: dist_norm contour_integral_diff [OF cont_dw cont_dw, symmetric] \w \ u\ intro: le_ee) - done - qed - have cont_cint_d: "continuous_on u (\w. contour_integral (linepath a b) (\z. d z w))" - apply (rule continuous_onI) - apply (cases "a=b") - apply (auto intro: *) + then have cont_cint_d: "continuous_on u (\w. contour_integral (linepath a b) (\z. d z w))" + apply (rule contour_integral_continuous_on_linepath_2D [OF \open u\ _ _ abu]) + apply (auto simp: intro: continuous_on_swap_args cond_uu) done have cont_cint_d\: "continuous_on {0..1} ((\w. contour_integral (linepath a b) (\z. d z w)) o \)" apply (rule continuous_on_compose) @@ -7223,7 +7234,6 @@ using abu by (force simp add: h_def intro: contour_integral_eq) also have "... = contour_integral \ (\w. contour_integral (linepath a b) (\z. d z w))" apply (rule contour_integral_swap) - apply (simp add: split_def) apply (rule continuous_on_subset [OF cond_uu]) using abu pasz \valid_path \\ apply (auto intro!: continuous_intros) @@ -7243,17 +7253,16 @@ have A2: "\\<^sub>F n in sequentially. \xa\path_image \. cmod (d (a n) xa - d x xa) < ee" if "0 < ee" for ee proof - let ?ddpa = "{(w,z) |w z. w \ cball x dd \ z \ path_image \}" - have "uniformly_continuous_on ?ddpa (\y. d (fst y) (snd y))" + have "uniformly_continuous_on ?ddpa (\(x,y). d x y)" apply (rule compact_uniformly_continuous [OF continuous_on_subset[OF cond_uu]]) using dd pasz \valid_path \\ apply (auto simp: compact_Times compact_valid_path_image simp del: mem_cball) done then obtain kk where "kk>0" and kk: "\x x'. \x\?ddpa; x'\?ddpa; dist x' x < kk\ \ - dist ((\y. d (fst y) (snd y)) x') ((\y. d (fst y) (snd y)) x) < ee" + dist ((\(x,y). d x y) x') ((\(x,y). d x y) x) < ee" apply (rule uniformly_continuous_onE [where e = ee]) using \0 < ee\ by auto - have kk: "\norm (w - x) \ dd; z \ path_image \; norm ((w, z) - (x, z)) < kk\ \ norm (d w z - d x z) < ee" for w z using \dd>0\ kk [of "(x,z)" "(w,z)"] by (force simp add: norm_minus_commute dist_norm) diff -r ec44535f954a -r a105bea3936f src/HOL/Multivariate_Analysis/Complex_Analysis_Basics.thy --- a/src/HOL/Multivariate_Analysis/Complex_Analysis_Basics.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Multivariate_Analysis/Complex_Analysis_Basics.thy Wed Feb 17 21:06:47 2016 +0100 @@ -447,16 +447,16 @@ lemma holomorphic_cong: "s = t ==> (\x. x \ s \ f x = g x) \ f holomorphic_on s \ g holomorphic_on t" by (metis holomorphic_transform) -lemma holomorphic_on_linear [holomorphic_intros]: "(op * c) holomorphic_on s" +lemma holomorphic_on_linear [simp, holomorphic_intros]: "(op * c) holomorphic_on s" unfolding holomorphic_on_def by (metis complex_differentiable_linear) -lemma holomorphic_on_const [holomorphic_intros]: "(\z. c) holomorphic_on s" +lemma holomorphic_on_const [simp, holomorphic_intros]: "(\z. c) holomorphic_on s" unfolding holomorphic_on_def by (metis complex_differentiable_const) -lemma holomorphic_on_ident [holomorphic_intros]: "(\x. x) holomorphic_on s" +lemma holomorphic_on_ident [simp, holomorphic_intros]: "(\x. x) holomorphic_on s" unfolding holomorphic_on_def by (metis complex_differentiable_ident) -lemma holomorphic_on_id [holomorphic_intros]: "id holomorphic_on s" +lemma holomorphic_on_id [simp, holomorphic_intros]: "id holomorphic_on s" unfolding id_def by (rule holomorphic_on_ident) lemma holomorphic_on_compose: @@ -545,6 +545,11 @@ unfolding DERIV_deriv_iff_complex_differentiable[symmetric] by (auto intro!: DERIV_imp_deriv derivative_eq_intros) +lemma complex_derivative_cdivide_right: + "f complex_differentiable at z \ deriv (\w. f w / c) z = deriv f z / c" + unfolding Fields.field_class.field_divide_inverse + by (blast intro: complex_derivative_cmult_right) + lemma complex_derivative_transform_within_open: "\f holomorphic_on s; g holomorphic_on s; open s; z \ s; \w. w \ s \ f w = g w\ \ deriv f z = deriv g z" diff -r ec44535f954a -r a105bea3936f src/HOL/Nat.thy --- a/src/HOL/Nat.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Nat.thy Wed Feb 17 21:06:47 2016 +0100 @@ -1329,6 +1329,9 @@ end +lemma funpow_0 [simp]: "(f ^^ 0) x = x" + by simp + lemma funpow_Suc_right: "f ^^ Suc n = f ^^ n \ f" proof (induct n) diff -r ec44535f954a -r a105bea3936f src/HOL/Quickcheck_Benchmark/Find_Unused_Assms_Examples.thy --- a/src/HOL/Quickcheck_Benchmark/Find_Unused_Assms_Examples.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,27 +0,0 @@ -theory Find_Unused_Assms_Examples -imports Complex_Main -begin - -section {* Arithmetics *} - -declare [[quickcheck_finite_types = false]] - -find_unused_assms Divides -find_unused_assms GCD -find_unused_assms Real - -section {* Set Theory *} - -declare [[quickcheck_finite_types = true]] - -find_unused_assms Fun -find_unused_assms Relation -find_unused_assms Set -find_unused_assms Wellfounded - -section {* Datatypes *} - -find_unused_assms List -find_unused_assms Map - -end diff -r ec44535f954a -r a105bea3936f src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Base.thy --- a/src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Base.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,196 +0,0 @@ -theory Needham_Schroeder_Base -imports Main "~~/src/HOL/Library/Predicate_Compile_Quickcheck" -begin - -datatype agent = Alice | Bob | Spy - -definition agents :: "agent set" -where - "agents = {Spy, Alice, Bob}" - -definition bad :: "agent set" -where - "bad = {Spy}" - -datatype key = pubEK agent | priEK agent - -fun invKey -where - "invKey (pubEK A) = priEK A" -| "invKey (priEK A) = pubEK A" - -datatype - msg = Agent agent - | Key key - | Nonce nat - | MPair msg msg - | Crypt key msg - -syntax - "_MTuple" :: "['a, args] => 'a * 'b" ("(2\_,/ _\)") -translations - "\x, y, z\" == "\x, \y, z\\" - "\x, y\" == "CONST MPair x y" - -inductive_set - parts :: "msg set => msg set" - for H :: "msg set" - where - Inj [intro]: "X \ H ==> X \ parts H" - | Fst: "\X,Y\ \ parts H ==> X \ parts H" - | Snd: "\X,Y\ \ parts H ==> Y \ parts H" - | Body: "Crypt K X \ parts H ==> X \ parts H" - -inductive_set - analz :: "msg set => msg set" - for H :: "msg set" - where - Inj [intro,simp] : "X \ H ==> X \ analz H" - | Fst: "\X,Y\ \ analz H ==> X \ analz H" - | Snd: "\X,Y\ \ analz H ==> Y \ analz H" - | Decrypt [dest]: - "[|Crypt K X \ analz H; Key(invKey K): analz H|] ==> X \ analz H" - -inductive_set - synth :: "msg set => msg set" - for H :: "msg set" - where - Inj [intro]: "X \ H ==> X \ synth H" - | Agent [intro]: "Agent agt \ synth H" - | MPair [intro]: "[|X \ synth H; Y \ synth H|] ==> \X,Y\ \ synth H" - | Crypt [intro]: "[|X \ synth H; Key(K) \ H|] ==> Crypt K X \ synth H" - -primrec initState -where - initState_Alice: - "initState Alice = {Key (priEK Alice)} \ (Key ` pubEK ` agents)" -| initState_Bob: - "initState Bob = {Key (priEK Bob)} \ (Key ` pubEK ` agents)" -| initState_Spy: - "initState Spy = (Key ` priEK ` bad) \ (Key ` pubEK ` agents)" - -datatype - event = Says agent agent msg - | Gets agent msg - | Notes agent msg - - -primrec knows :: "agent => event list => msg set" -where - knows_Nil: "knows A [] = initState A" -| knows_Cons: - "knows A (ev # evs) = - (if A = Spy then - (case ev of - Says A' B X => insert X (knows Spy evs) - | Gets A' X => knows Spy evs - | Notes A' X => - if A' \ bad then insert X (knows Spy evs) else knows Spy evs) - else - (case ev of - Says A' B X => - if A'=A then insert X (knows A evs) else knows A evs - | Gets A' X => - if A'=A then insert X (knows A evs) else knows A evs - | Notes A' X => - if A'=A then insert X (knows A evs) else knows A evs))" - -abbreviation (input) - spies :: "event list => msg set" where - "spies == knows Spy" - -primrec used :: "event list => msg set" -where - used_Nil: "used [] = \(parts ` initState ` agents)" -| used_Cons: "used (ev # evs) = - (case ev of - Says A B X => parts {X} \ used evs - | Gets A X => used evs - | Notes A X => parts {X} \ used evs)" - -subsection {* Preparations for sets *} - -fun find_first :: "('a => 'b option) => 'a list => 'b option" -where - "find_first f [] = None" -| "find_first f (x # xs) = (case f x of Some y => Some y | None => find_first f xs)" - -consts cps_of_set :: "'a set => ('a => term list option) => term list option" - -lemma - [code]: "cps_of_set (set xs) f = find_first f xs" -sorry - -consts pos_cps_of_set :: "'a set => ('a => (bool * term list) option) => natural => (bool * term list) option" -consts neg_cps_of_set :: "'a set => ('a Quickcheck_Exhaustive.unknown => term list Quickcheck_Exhaustive.three_valued) => natural => term list Quickcheck_Exhaustive.three_valued" - -lemma - [code]: "pos_cps_of_set (set xs) f i = find_first f xs" -sorry - -consts find_first' :: "('b Quickcheck_Exhaustive.unknown => 'a Quickcheck_Exhaustive.three_valued) - => 'b list => 'a Quickcheck_Exhaustive.three_valued" - -lemma [code]: - "find_first' f [] = Quickcheck_Exhaustive.No_value" - "find_first' f (x # xs) = (case f (Quickcheck_Exhaustive.Known x) of Quickcheck_Exhaustive.No_value => find_first' f xs | Quickcheck_Exhaustive.Value x => Quickcheck_Exhaustive.Value x | Quickcheck_Exhaustive.Unknown_value => (case find_first' f xs of Quickcheck_Exhaustive.Value x => Quickcheck_Exhaustive.Value x | _ => Quickcheck_Exhaustive.Unknown_value))" -sorry - -lemma - [code]: "neg_cps_of_set (set xs) f i = find_first' f xs" -sorry - -setup {* -let - val Fun = Predicate_Compile_Aux.Fun - val Input = Predicate_Compile_Aux.Input - val Output = Predicate_Compile_Aux.Output - val Bool = Predicate_Compile_Aux.Bool - val oi = Fun (Output, Fun (Input, Bool)) - val ii = Fun (Input, Fun (Input, Bool)) - fun of_set compfuns (Type ("fun", [T, _])) = - case body_type (Predicate_Compile_Aux.mk_monadT compfuns T) of - Type ("Quickcheck_Exhaustive.three_valued", _) => - Const(@{const_name neg_cps_of_set}, HOLogic.mk_setT T --> (Predicate_Compile_Aux.mk_monadT compfuns T)) - | Type ("Predicate.pred", _) => Const(@{const_name pred_of_set}, HOLogic.mk_setT T --> Predicate_Compile_Aux.mk_monadT compfuns T) - | _ => Const(@{const_name pos_cps_of_set}, HOLogic.mk_setT T --> (Predicate_Compile_Aux.mk_monadT compfuns T)) - fun member compfuns (U as Type ("fun", [T, _])) = - (absdummy T (absdummy (HOLogic.mk_setT T) (Predicate_Compile_Aux.mk_if compfuns - (Const (@{const_name "Set.member"}, T --> HOLogic.mk_setT T --> @{typ bool}) $ Bound 1 $ Bound 0)))) - -in - Core_Data.force_modes_and_compilations @{const_name Set.member} - [(oi, (of_set, false)), (ii, (member, false))] -end -*} - -subsection {* Derived equations for analz, parts and synth *} - -lemma [code]: - "analz H = (let - H' = H \ (\((%m. case m of \X, Y\ => {X, Y} | Crypt K X => if Key (invKey K) : H then {X} else {} | _ => {}) ` H)) - in if H' = H then H else analz H')" -sorry - -lemma [code]: - "parts H = (let - H' = H \ (\((%m. case m of \X, Y\ => {X, Y} | Crypt K X => {X} | _ => {}) ` H)) - in if H' = H then H else parts H')" -sorry - -definition synth' :: "msg set => msg => bool" -where - "synth' H m = (m : synth H)" - -lemmas [code_pred_intro] = synth.intros[folded synth'_def] - -code_pred [generator_cps] synth' unfolding synth'_def by (rule synth.cases) fastforce+ - -setup {* Predicate_Compile_Data.ignore_consts [@{const_name analz}, @{const_name knows}] *} -declare ListMem_iff[symmetric, code_pred_inline] -declare [[quickcheck_timing]] - -setup Exhaustive_Generators.setup_exhaustive_datatype_interpretation -declare [[quickcheck_full_support = false]] - -end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Guided_Attacker_Example.thy --- a/src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Guided_Attacker_Example.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,100 +0,0 @@ -theory Needham_Schroeder_Guided_Attacker_Example -imports Needham_Schroeder_Base -begin - -inductive_set ns_public :: "event list set" - where - (*Initial trace is empty*) - Nil: "[] \ ns_public" - - | Fake_NS1: "\evs1 \ ns_public; Nonce NA \ analz (spies evs1) \ - \ Says Spy B (Crypt (pubEK B) \Nonce NA, Agent A\) - # evs1 \ ns_public" - | Fake_NS2: "\evs1 \ ns_public; Nonce NA \ analz (spies evs1); Nonce NB \ analz (spies evs1) \ - \ Says Spy A (Crypt (pubEK A) \Nonce NA, Nonce NB\) - # evs1 \ ns_public" - - (*Alice initiates a protocol run, sending a nonce to Bob*) - | NS1: "\evs1 \ ns_public; Nonce NA \ used evs1\ - \ Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) - # evs1 \ ns_public" - (*Bob responds to Alice's message with a further nonce*) - | NS2: "\evs2 \ ns_public; Nonce NB \ used evs2; - Says A' B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs2\ - \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) - # evs2 \ ns_public" - - (*Alice proves her existence by sending NB back to Bob.*) - | NS3: "\evs3 \ ns_public; - Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs3; - Says B' A (Crypt (pubEK A) \Nonce NA, Nonce NB\) \ set evs3\ - \ Says A B (Crypt (pubEK B) (Nonce NB)) # evs3 \ ns_public" - -declare ListMem_iff[symmetric, code_pred_inline] - -lemmas [code_pred_intro] = ns_publicp.intros[folded synth'_def] - -code_pred [skip_proof] ns_publicp unfolding synth'_def by (rule ns_publicp.cases) fastforce+ -thm ns_publicp.equation - -code_pred [generator_cps] ns_publicp . -thm ns_publicp.generator_cps_equation - - -lemma "ns_publicp evs ==> \ (Says Alice Bob (Crypt (pubEK Bob) (Nonce NB))) : set evs" -quickcheck[smart_exhaustive, depth = 5, timeout = 100, expect = counterexample] -(*quickcheck[narrowing, size = 6, timeout = 200, verbose, expect = no_counterexample]*) -oops - -lemma - "\ns_publicp evs\ - \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) : set evs - \ A \ Spy \ B \ Spy \ A \ B - \ Nonce NB \ analz (spies evs)" -(*quickcheck[smart_exhaustive, depth = 6, timeout = 100, expect = counterexample] -quickcheck[narrowing, size = 7, timeout = 200, expect = no_counterexample]*) -oops - -section {* Proving the counterexample trace for validation *} - -lemma - assumes "A = Alice" "B = Bob" "C = Spy" "NA = 0" "NB = 1" - assumes "evs = - [Says Alice Spy (Crypt (pubEK Spy) (Nonce 1)), - Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\), - Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\), - Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\)]" (is "_ = [?e3, ?e2, ?e1, ?e0]") - shows "A \ Spy" "B \ Spy" "evs : ns_public" "Nonce NB : analz (knows Spy evs)" -proof - - from assms show "A \ Spy" by auto - from assms show "B \ Spy" by auto - have "[] : ns_public" by (rule Nil) - then have first_step: "[?e0] : ns_public" - proof (rule NS1) - show "Nonce 0 ~: used []" by eval - qed - then have "[?e1, ?e0] : ns_public" - proof (rule Fake_NS1) - show "Nonce 0 : analz (knows Spy [?e0])" by eval - qed - then have "[?e2, ?e1, ?e0] : ns_public" - proof (rule NS2) - show "Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\) \ set [?e1, ?e0]" by simp - show " Nonce 1 ~: used [?e1, ?e0]" by eval - qed - then show "evs : ns_public" - unfolding assms - proof (rule NS3) - show " Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\) \ set [?e2, ?e1, ?e0]" by simp - show "Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\) - : set [?e2, ?e1, ?e0]" by simp - qed - from assms show "Nonce NB : analz (knows Spy evs)" - apply simp - apply (rule analz.intros(4)) - apply (rule analz.intros(1)) - apply (auto simp add: bad_def) - done -qed - -end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/HOL/Quickcheck_Benchmark/Needham_Schroeder_No_Attacker_Example.thy --- a/src/HOL/Quickcheck_Benchmark/Needham_Schroeder_No_Attacker_Example.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -theory Needham_Schroeder_No_Attacker_Example -imports Needham_Schroeder_Base -begin - -inductive_set ns_public :: "event list set" - where - (*Initial trace is empty*) - Nil: "[] \ ns_public" - (*Alice initiates a protocol run, sending a nonce to Bob*) - | NS1: "\evs1 \ ns_public; Nonce NA \ used evs1\ - \ Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) - # evs1 \ ns_public" - (*Bob responds to Alice's message with a further nonce*) - | NS2: "\evs2 \ ns_public; Nonce NB \ used evs2; - Says A' B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs2\ - \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) - # evs2 \ ns_public" - - (*Alice proves her existence by sending NB back to Bob.*) - | NS3: "\evs3 \ ns_public; - Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs3; - Says B' A (Crypt (pubEK A) \Nonce NA, Nonce NB\) \ set evs3\ - \ Says A B (Crypt (pubEK B) (Nonce NB)) # evs3 \ ns_public" - -code_pred [skip_proof] ns_publicp . -thm ns_publicp.equation - -code_pred [generator_cps] ns_publicp . -thm ns_publicp.generator_cps_equation - -lemma "ns_publicp evs ==> \ (Says Alice Bob (Crypt (pubEK Bob) (Nonce NB))) : set evs" -(*quickcheck[random, iterations = 1000000, size = 20, timeout = 3600, verbose] -quickcheck[exhaustive, size = 8, timeout = 3600, verbose] -quickcheck[narrowing, size = 7, timeout = 3600, verbose]*) -quickcheck[smart_exhaustive, depth = 5, timeout = 30, expect = counterexample] -oops - -lemma - "\ns_publicp evs\ - \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) : set evs - \ A \ Spy \ B \ Spy \ A \ B - \ Nonce NB \ analz (spies evs)" -quickcheck[smart_exhaustive, depth = 6, timeout = 30] -quickcheck[narrowing, size = 6, timeout = 30, verbose] -oops - -end diff -r ec44535f954a -r a105bea3936f src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Unguided_Attacker_Example.thy --- a/src/HOL/Quickcheck_Benchmark/Needham_Schroeder_Unguided_Attacker_Example.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,96 +0,0 @@ -theory Needham_Schroeder_Unguided_Attacker_Example -imports Needham_Schroeder_Base -begin - -inductive_set ns_public :: "event list set" - where - (*Initial trace is empty*) - Nil: "[] \ ns_public" - - | Fake: "\evs1 \ ns_public; X \ synth (analz (spies evs1)) \ - \ Says Spy A X # evs1 \ ns_public" - - (*Alice initiates a protocol run, sending a nonce to Bob*) - | NS1: "\evs1 \ ns_public; Nonce NA \ used evs1\ - \ Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) - # evs1 \ ns_public" - (*Bob responds to Alice's message with a further nonce*) - | NS2: "\evs2 \ ns_public; Nonce NB \ used evs2; - Says A' B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs2\ - \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) - # evs2 \ ns_public" - - (*Alice proves her existence by sending NB back to Bob.*) - | NS3: "\evs3 \ ns_public; - Says A B (Crypt (pubEK B) \Nonce NA, Agent A\) \ set evs3; - Says B' A (Crypt (pubEK A) \Nonce NA, Nonce NB\) \ set evs3\ - \ Says A B (Crypt (pubEK B) (Nonce NB)) # evs3 \ ns_public" - -declare ListMem_iff[symmetric, code_pred_inline] - -lemmas [code_pred_intro] = ns_publicp.intros[folded synth'_def] - -code_pred [skip_proof] ns_publicp unfolding synth'_def by (rule ns_publicp.cases) fastforce+ -thm ns_publicp.equation - -code_pred [generator_cps] ns_publicp . -thm ns_publicp.generator_cps_equation - - -lemma "ns_publicp evs ==> \ (Says Alice Bob (Crypt (pubEK Bob) (Nonce NB))) : set evs" -quickcheck[smart_exhaustive, depth = 5, timeout = 200, expect = counterexample] -(*quickcheck[narrowing, size = 6, timeout = 200, verbose, expect = no_counterexample]*) -oops - -lemma - "\ns_publicp evs\ - \ Says B A (Crypt (pubEK A) \Nonce NA, Nonce NB\) : set evs - \ A \ Spy \ B \ Spy \ A \ B - \ Nonce NB \ analz (spies evs)" -quickcheck[smart_exhaustive, depth = 6, timeout = 100, expect = no_counterexample] -(*quickcheck[narrowing, size = 7, timeout = 200, expect = no_counterexample]*) -oops - -section {* Proving the counterexample trace for validation *} - -lemma - assumes "A = Alice" "B = Bob" "C = Spy" "NA = 0" "NB = 1" - assumes "evs = - [Says Alice Spy (Crypt (pubEK Spy) (Nonce 1)), - Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\), - Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\), - Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\)]" (is "_ = [?e3, ?e2, ?e1, ?e0]") - shows "A \ Spy" "B \ Spy" "evs : ns_public" "Nonce NB : analz (knows Spy evs)" -proof - - from assms show "A \ Spy" by auto - from assms show "B \ Spy" by auto - have "[] : ns_public" by (rule Nil) - then have first_step: "[?e0] : ns_public" - proof (rule NS1) - show "Nonce 0 ~: used []" by eval - qed - then have "[?e1, ?e0] : ns_public" - proof (rule Fake) - show "Crypt (pubEK Bob) \Nonce 0, Agent Alice\ : synth (analz (knows Spy [?e0]))" - by (intro synth.intros(2,3,4,1)) eval+ - qed - then have "[?e2, ?e1, ?e0] : ns_public" - proof (rule NS2) - show "Says Spy Bob (Crypt (pubEK Bob) \Nonce 0, Agent Alice\) \ set [?e1, ?e0]" by simp - show " Nonce 1 ~: used [?e1, ?e0]" by eval - qed - then show "evs : ns_public" - unfolding assms - proof (rule NS3) - show " Says Alice Spy (Crypt (pubEK Spy) \Nonce 0, Agent Alice\) \ set [?e2, ?e1, ?e0]" by simp - show "Says Bob Alice (Crypt (pubEK Alice) \Nonce 0, Nonce 1\) : set [?e2, ?e1, ?e0]" by simp - qed - from assms show "Nonce NB : analz (knows Spy evs)" - apply simp - apply (rule analz.intros(4)) - apply (rule analz.intros(1)) - apply (auto simp add: bad_def) - done -qed - -end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/HOL/ROOT --- a/src/HOL/ROOT Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/ROOT Wed Feb 17 21:06:47 2016 +0100 @@ -614,10 +614,9 @@ Ballot Erdoes_Szekeres Sum_of_Powers + Sudoku theories [skip_proofs = false] Meson_Test - theories [condition = ISABELLE_FULL_TEST] - Sudoku document_files "root.bib" "root.tex" session "HOL-Isar_Examples" in Isar_Examples = HOL + @@ -789,7 +788,7 @@ session "HOL-Datatype_Examples" in Datatype_Examples = HOL + description {* - (Co)datatype Examples, including large ones from John Harrison. + (Co)datatype Examples. *} options [document = false] theories @@ -808,10 +807,6 @@ Misc_Datatype Misc_Primcorec Misc_Primrec - theories [condition = ISABELLE_FULL_TEST] - Brackin - IsaFoR - Misc_N2M session "HOL-Word" (main) in Word = HOL + theories Word @@ -851,7 +846,6 @@ Boogie SMT_Examples SMT_Word_Examples - theories [condition = ISABELLE_FULL_TEST] SMT_Tests files "Boogie_Dijkstra.certs" @@ -966,13 +960,6 @@ Hotel_Example Quickcheck_Narrowing_Examples -session "HOL-Quickcheck_Benchmark" in Quickcheck_Benchmark = HOL + - theories [condition = ISABELLE_FULL_TEST, quick_and_dirty] - Find_Unused_Assms_Examples - Needham_Schroeder_No_Attacker_Example - Needham_Schroeder_Guided_Attacker_Example - Needham_Schroeder_Unguided_Attacker_Example - session "HOL-Quotient_Examples" in Quotient_Examples = HOL + description {* Author: Cezary Kaliszyk and Christian Urban @@ -1141,12 +1128,3 @@ theories TrivEx TrivEx2 - -session "HOL-Record_Benchmark" in Record_Benchmark = HOL + - description {* - Some benchmark on large record. - *} - options [document = false] - theories [condition = ISABELLE_FULL_TEST] - Record_Benchmark - diff -r ec44535f954a -r a105bea3936f src/HOL/Record_Benchmark/Record_Benchmark.thy --- a/src/HOL/Record_Benchmark/Record_Benchmark.thy Wed Feb 17 15:57:10 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,427 +0,0 @@ -(* Title: HOL/Record_Benchmark/Record_Benchmark.thy - Author: Norbert Schirmer, DFKI -*) - -section {* Benchmark for large record *} - -theory Record_Benchmark -imports Main -begin - -declare [[record_timing]] - -record many_A = -A000::nat -A001::nat -A002::nat -A003::nat -A004::nat -A005::nat -A006::nat -A007::nat -A008::nat -A009::nat -A010::nat -A011::nat -A012::nat -A013::nat -A014::nat -A015::nat -A016::nat -A017::nat -A018::nat -A019::nat -A020::nat -A021::nat -A022::nat -A023::nat -A024::nat -A025::nat -A026::nat -A027::nat -A028::nat -A029::nat -A030::nat -A031::nat -A032::nat -A033::nat -A034::nat -A035::nat -A036::nat -A037::nat -A038::nat -A039::nat -A040::nat -A041::nat -A042::nat -A043::nat -A044::nat -A045::nat -A046::nat -A047::nat -A048::nat -A049::nat -A050::nat -A051::nat -A052::nat -A053::nat -A054::nat -A055::nat -A056::nat -A057::nat -A058::nat -A059::nat -A060::nat -A061::nat -A062::nat -A063::nat -A064::nat -A065::nat -A066::nat -A067::nat -A068::nat -A069::nat -A070::nat -A071::nat -A072::nat -A073::nat -A074::nat -A075::nat -A076::nat -A077::nat -A078::nat -A079::nat -A080::nat -A081::nat -A082::nat -A083::nat -A084::nat -A085::nat -A086::nat -A087::nat -A088::nat -A089::nat -A090::nat -A091::nat -A092::nat -A093::nat -A094::nat -A095::nat -A096::nat -A097::nat -A098::nat -A099::nat -A100::nat -A101::nat -A102::nat -A103::nat -A104::nat -A105::nat -A106::nat -A107::nat -A108::nat -A109::nat -A110::nat -A111::nat -A112::nat -A113::nat -A114::nat -A115::nat -A116::nat -A117::nat -A118::nat -A119::nat -A120::nat -A121::nat -A122::nat -A123::nat -A124::nat -A125::nat -A126::nat -A127::nat -A128::nat -A129::nat -A130::nat -A131::nat -A132::nat -A133::nat -A134::nat -A135::nat -A136::nat -A137::nat -A138::nat -A139::nat -A140::nat -A141::nat -A142::nat -A143::nat -A144::nat -A145::nat -A146::nat -A147::nat -A148::nat -A149::nat -A150::nat -A151::nat -A152::nat -A153::nat -A154::nat -A155::nat -A156::nat -A157::nat -A158::nat -A159::nat -A160::nat -A161::nat -A162::nat -A163::nat -A164::nat -A165::nat -A166::nat -A167::nat -A168::nat -A169::nat -A170::nat -A171::nat -A172::nat -A173::nat -A174::nat -A175::nat -A176::nat -A177::nat -A178::nat -A179::nat -A180::nat -A181::nat -A182::nat -A183::nat -A184::nat -A185::nat -A186::nat -A187::nat -A188::nat -A189::nat -A190::nat -A191::nat -A192::nat -A193::nat -A194::nat -A195::nat -A196::nat -A197::nat -A198::nat -A199::nat -A200::nat -A201::nat -A202::nat -A203::nat -A204::nat -A205::nat -A206::nat -A207::nat -A208::nat -A209::nat -A210::nat -A211::nat -A212::nat -A213::nat -A214::nat -A215::nat -A216::nat -A217::nat -A218::nat -A219::nat -A220::nat -A221::nat -A222::nat -A223::nat -A224::nat -A225::nat -A226::nat -A227::nat -A228::nat -A229::nat -A230::nat -A231::nat -A232::nat -A233::nat -A234::nat -A235::nat -A236::nat -A237::nat -A238::nat -A239::nat -A240::nat -A241::nat -A242::nat -A243::nat -A244::nat -A245::nat -A246::nat -A247::nat -A248::nat -A249::nat -A250::nat -A251::nat -A252::nat -A253::nat -A254::nat -A255::nat -A256::nat -A257::nat -A258::nat -A259::nat -A260::nat -A261::nat -A262::nat -A263::nat -A264::nat -A265::nat -A266::nat -A267::nat -A268::nat -A269::nat -A270::nat -A271::nat -A272::nat -A273::nat -A274::nat -A275::nat -A276::nat -A277::nat -A278::nat -A279::nat -A280::nat -A281::nat -A282::nat -A283::nat -A284::nat -A285::nat -A286::nat -A287::nat -A288::nat -A289::nat -A290::nat -A291::nat -A292::nat -A293::nat -A294::nat -A295::nat -A296::nat -A297::nat -A298::nat -A299::nat - -record many_B = many_A + -B000::nat -B001::nat -B002::nat -B003::nat -B004::nat -B005::nat -B006::nat -B007::nat -B008::nat -B009::nat -B010::nat -B011::nat -B012::nat -B013::nat -B014::nat -B015::nat -B016::nat -B017::nat -B018::nat -B019::nat -B020::nat -B021::nat -B022::nat -B023::nat -B024::nat -B025::nat -B026::nat -B027::nat -B028::nat -B029::nat -B030::nat - -lemma "A155 (r\A255:=x\) = A155 r" - by simp - -lemma "A155 (r\A255:=x,A253:=y,A255:=z \) = A155 r" - by simp - -lemma "(r\A255:=x,A253:=y,A255:=z \) = r\A253:=y,A255:=z\" - by simp - -lemma "(r\A255:=x,A253:=y,A255:=z \) = r\A253:=y,A255:=z\" - apply (tactic {* simp_tac - (put_simpset HOL_basic_ss @{context} addsimprocs [Record.upd_simproc]) 1*}) - done - -lemma "(\r. P (A155 r)) \ (\x. P x)" - apply (tactic {* simp_tac - (put_simpset HOL_basic_ss @{context} addsimprocs [Record.split_simproc (K ~1)]) 1*}) - apply simp - done - -lemma "(\r. P (A155 r)) \ (\x. P x)" - apply (tactic {* Record.split_simp_tac @{context} [] (K ~1) 1*}) - apply simp - done - -lemma "(\r. P (A155 r)) \ (\x. P x)" - apply (tactic {* simp_tac - (put_simpset HOL_basic_ss @{context} addsimprocs [Record.split_simproc (K ~1)]) 1*}) - apply simp - done - -lemma "(\r. P (A155 r)) \ (\x. P x)" - apply (tactic {* Record.split_simp_tac @{context} [] (K ~1) 1*}) - apply simp - done - -lemma "\r. P (A155 r) \ (\x. P x)" - apply (tactic {* simp_tac - (put_simpset HOL_basic_ss @{context} addsimprocs [Record.split_simproc (K ~1)]) 1*}) - apply auto - done - -lemma "\r. P (A155 r) \ (\x. P x)" - apply (tactic {* Record.split_simp_tac @{context} [] (K ~1) 1*}) - apply auto - done - -lemma "P (A155 r) \ (\x. P x)" - apply (tactic {* Record.split_simp_tac @{context} [] (K ~1) 1*}) - apply auto - done - -lemma fixes r shows "P (A155 r) \ (\x. P x)" - apply (tactic {* Record.split_simp_tac @{context} [] (K ~1) 1*}) - apply auto - done - - -notepad -begin - fix P r - assume "P (A155 r)" - then have "\x. P x" - apply - - apply (tactic {* Record.split_simp_tac @{context} [] (K ~1) 1*}) - apply auto - done -end - - -lemma "\r. A155 r = x" - apply (tactic {*simp_tac - (put_simpset HOL_basic_ss @{context} addsimprocs [Record.ex_sel_eq_simproc]) 1*}) - done - -print_record many_A - -print_record many_B - -end \ No newline at end of file diff -r ec44535f954a -r a105bea3936f src/HOL/Series.thy --- a/src/HOL/Series.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Series.thy Wed Feb 17 21:06:47 2016 +0100 @@ -129,6 +129,10 @@ (simp add: eq atLeast0LessThan del: add_Suc_right) qed +corollary sums_0: + "(\n. f n = 0) \ (f sums 0)" + by (metis (no_types) finite.emptyI setsum.empty sums_finite) + lemma summable_finite: "finite N \ (\n. n \ N \ f n = 0) \ summable f" by (rule sums_summable) (rule sums_finite) diff -r ec44535f954a -r a105bea3936f src/HOL/Tools/ATP/atp_problem_generate.ML --- a/src/HOL/Tools/ATP/atp_problem_generate.ML Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Tools/ATP/atp_problem_generate.ML Wed Feb 17 21:06:47 2016 +0100 @@ -1675,12 +1675,12 @@ |> class_membs_of_types type_enc add_sorts_on_tvar |> map (class_atom type_enc))) #> (case type_enc of - Native (_, poly, _) => + Native (_, Type_Class_Polymorphic, _) => mk_atyquant AForall (map (fn TVar (z as (_, S)) => - (AType ((tvar_name z, []), []), - if poly = Type_Class_Polymorphic then map (`make_class) (normalize_classes S) - else [])) Ts) - | _ => mk_aquant AForall (map (fn TVar z => (tvar_name z, NONE)) Ts))) + (AType ((tvar_name z, []), []), map (`make_class) (normalize_classes S) )) Ts) + | Native (_, Raw_Polymorphic _, _) => + mk_atyquant AForall (map (fn TVar (z as (_, S)) => (AType ((tvar_name z, []), []), [])) Ts) + | _ => mk_aquant AForall (map (fn TVar z => (tvar_name z, NONE)) Ts))) fun eq_formula type_enc atomic_Ts bounds pred_sym tm1 tm2 = (if pred_sym then AConn (AIff, [AAtom tm1, AAtom tm2]) diff -r ec44535f954a -r a105bea3936f src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Wed Feb 17 21:06:47 2016 +0100 @@ -156,284 +156,295 @@ let val (verbose, alt_metis_args, preplay_timeout, compress, try0, minimize, atp_proof0, goal) = isar_params () - - val systematic_methods' = insert (op =) (Metis_Method alt_metis_args) systematic_methods + in + if null atp_proof0 then + one_line_proof_text ctxt 0 one_line_params + else + let + val systematic_methods' = insert (op =) (Metis_Method alt_metis_args) systematic_methods - fun massage_methods (meths as meth :: _) = - if not try0 then [meth] - else if smt_proofs = SOME true then SMT_Method :: meths - else meths - - val (params, _, concl_t) = strip_subgoal goal subgoal ctxt - val fixes = map (fn (s, T) => (Binding.name s, SOME T, NoSyn)) params - val ctxt = ctxt |> Variable.set_body false |> Proof_Context.add_fixes fixes |> snd + fun massage_methods (meths as meth :: _) = + if not try0 then [meth] + else if smt_proofs = SOME true then SMT_Method :: meths + else meths - val do_preplay = preplay_timeout <> Time.zeroTime - val compress = - (case compress of - NONE => if isar_proofs = NONE andalso do_preplay then 1000.0 else 10.0 - | SOME n => n) + val (params, _, concl_t) = strip_subgoal goal subgoal ctxt + val fixes = map (fn (s, T) => (Binding.name s, SOME T, NoSyn)) params + val ctxt = ctxt |> Variable.set_body false |> Proof_Context.add_fixes fixes |> snd - fun is_fixed ctxt = Variable.is_declared ctxt orf Name.is_skolem - fun skolems_of ctxt t = Term.add_frees t [] |> filter_out (is_fixed ctxt o fst) |> rev + val do_preplay = preplay_timeout <> Time.zeroTime + val compress = + (case compress of + NONE => if isar_proofs = NONE andalso do_preplay then 1000.0 else 10.0 + | SOME n => n) - fun get_role keep_role ((num, _), role, t, rule, _) = - if keep_role role then SOME ((raw_label_of_num num, t), rule) else NONE - - val atp_proof = - fold_rev add_line_pass1 atp_proof0 [] - |> add_lines_pass2 [] + fun is_fixed ctxt = Variable.is_declared ctxt orf Name.is_skolem + fun skolems_of ctxt t = Term.add_frees t [] |> filter_out (is_fixed ctxt o fst) |> rev - val conjs = - map_filter (fn (name, role, _, _, _) => - if member (op =) [Conjecture, Negated_Conjecture] role then SOME name else NONE) - atp_proof - val assms = map_filter (Option.map fst o get_role (curry (op =) Hypothesis)) atp_proof + fun get_role keep_role ((num, _), role, t, rule, _) = + if keep_role role then SOME ((raw_label_of_num num, t), rule) else NONE + + val atp_proof = + fold_rev add_line_pass1 atp_proof0 [] + |> add_lines_pass2 [] + + val conjs = + map_filter (fn (name, role, _, _, _) => + if member (op =) [Conjecture, Negated_Conjecture] role then SOME name else NONE) + atp_proof + val assms = map_filter (Option.map fst o get_role (curry (op =) Hypothesis)) atp_proof - fun add_lemma ((l, t), rule) ctxt = - let - val (skos, meths) = - (if is_skolemize_rule rule then (skolems_of ctxt t, skolem_methods) - else if is_arith_rule rule then ([], arith_methods) - else ([], rewrite_methods)) - ||> massage_methods - in - (Prove ([], skos, l, t, [], ([], []), meths, ""), - ctxt |> not (null skos) ? (Variable.add_fixes (map fst skos) #> snd)) - end + fun add_lemma ((l, t), rule) ctxt = + let + val (skos, meths) = + (if is_skolemize_rule rule then (skolems_of ctxt t, skolem_methods) + else if is_arith_rule rule then ([], arith_methods) + else ([], rewrite_methods)) + ||> massage_methods + in + (Prove ([], skos, l, t, [], ([], []), meths, ""), + ctxt |> not (null skos) ? (Variable.add_fixes (map fst skos) #> snd)) + end - val (lems, _) = - fold_map add_lemma (map_filter (get_role (curry (op =) Lemma)) atp_proof) ctxt - - val bot = #1 (List.last atp_proof) - - val refute_graph = - atp_proof - |> map (fn (name, _, _, _, from) => (from, name)) - |> make_refute_graph bot - |> fold (Atom_Graph.default_node o rpair ()) conjs - - val axioms = axioms_of_refute_graph refute_graph conjs + val (lems, _) = + fold_map add_lemma (map_filter (get_role (curry (op =) Lemma)) atp_proof) ctxt - val tainted = tainted_atoms_of_refute_graph refute_graph conjs - val is_clause_tainted = exists (member (op =) tainted) - val steps = - Symtab.empty - |> fold (fn (name as (s, _), role, t, rule, _) => - Symtab.update_new (s, (rule, t - |> (if is_clause_tainted [name] then - HOLogic.dest_Trueprop - #> role <> Conjecture ? s_not - #> fold exists_of (map Var (Term.add_vars t [])) - #> HOLogic.mk_Trueprop - else - I)))) - atp_proof + val bot = #1 (List.last atp_proof) + + val refute_graph = + atp_proof + |> map (fn (name, _, _, _, from) => (from, name)) + |> make_refute_graph bot + |> fold (Atom_Graph.default_node o rpair ()) conjs + + val axioms = axioms_of_refute_graph refute_graph conjs - fun is_referenced_in_step _ (Let _) = false - | is_referenced_in_step l (Prove (_, _, _, _, subs, (ls, _), _, _)) = - member (op =) ls l orelse exists (is_referenced_in_proof l) subs - and is_referenced_in_proof l (Proof (_, _, steps)) = - exists (is_referenced_in_step l) steps + val tainted = tainted_atoms_of_refute_graph refute_graph conjs + val is_clause_tainted = exists (member (op =) tainted) + val steps = + Symtab.empty + |> fold (fn (name as (s, _), role, t, rule, _) => + Symtab.update_new (s, (rule, t + |> (if is_clause_tainted [name] then + HOLogic.dest_Trueprop + #> role <> Conjecture ? s_not + #> fold exists_of (map Var (Term.add_vars t [])) + #> HOLogic.mk_Trueprop + else + I)))) + atp_proof - fun insert_lemma_in_step lem - (step as Prove (qs, fix, l, t, subs, (ls, gs), meths, comment)) = - let val l' = the (label_of_isar_step lem) in - if member (op =) ls l' then - [lem, step] - else - let val refs = map (is_referenced_in_proof l') subs in - if length (filter I refs) = 1 then - let - val subs' = map2 (fn false => I | true => insert_lemma_in_proof lem) refs subs - in - [Prove (qs, fix, l, t, subs', (ls, gs), meths, comment)] - end - else + fun is_referenced_in_step _ (Let _) = false + | is_referenced_in_step l (Prove (_, _, _, _, subs, (ls, _), _, _)) = + member (op =) ls l orelse exists (is_referenced_in_proof l) subs + and is_referenced_in_proof l (Proof (_, _, steps)) = + exists (is_referenced_in_step l) steps + + fun insert_lemma_in_step lem + (step as Prove (qs, fix, l, t, subs, (ls, gs), meths, comment)) = + let val l' = the (label_of_isar_step lem) in + if member (op =) ls l' then [lem, step] + else + let val refs = map (is_referenced_in_proof l') subs in + if length (filter I refs) = 1 then + let + val subs' = map2 (fn false => I | true => insert_lemma_in_proof lem) refs + subs + in + [Prove (qs, fix, l, t, subs', (ls, gs), meths, comment)] + end + else + [lem, step] + end end - end - and insert_lemma_in_steps lem [] = [lem] - | insert_lemma_in_steps lem (step :: steps) = - if is_referenced_in_step (the (label_of_isar_step lem)) step then - insert_lemma_in_step lem step @ steps - else - step :: insert_lemma_in_steps lem steps - and insert_lemma_in_proof lem (Proof (fix, assms, steps)) = - Proof (fix, assms, insert_lemma_in_steps lem steps) - - val rule_of_clause_id = fst o the o Symtab.lookup steps o fst - - val finish_off = close_form #> rename_bound_vars - - fun prop_of_clause [(num, _)] = Symtab.lookup steps num |> the |> snd |> finish_off - | prop_of_clause names = - let - val lits = - map (HOLogic.dest_Trueprop o snd) (map_filter (Symtab.lookup steps o fst) names) - in - (case List.partition (can HOLogic.dest_not) lits of - (negs as _ :: _, pos as _ :: _) => - s_imp (Library.foldr1 s_conj (map HOLogic.dest_not negs), Library.foldr1 s_disj pos) - | _ => fold (curry s_disj) lits @{term False}) - end - |> HOLogic.mk_Trueprop |> finish_off - - fun maybe_show outer c = if outer andalso eq_set (op =) (c, conjs) then [Show] else [] - - fun isar_steps outer predecessor accum [] = - accum - |> (if tainted = [] then - (* e.g., trivial, empty proof by Z3 *) - cons (Prove (if outer then [Show] else [], [], no_label, concl_t, [], - sort_facts (the_list predecessor, []), massage_methods systematic_methods', "")) + and insert_lemma_in_steps lem [] = [lem] + | insert_lemma_in_steps lem (step :: steps) = + if is_referenced_in_step (the (label_of_isar_step lem)) step then + insert_lemma_in_step lem step @ steps else - I) - |> rev - | isar_steps outer _ accum (Have (id, (gamma, c)) :: infs) = - let - val l = label_of_clause c - val t = prop_of_clause c - val rule = rule_of_clause_id id - val skolem = is_skolemize_rule rule + step :: insert_lemma_in_steps lem steps + and insert_lemma_in_proof lem (Proof (fix, assms, steps)) = + Proof (fix, assms, insert_lemma_in_steps lem steps) + + val rule_of_clause_id = fst o the o Symtab.lookup steps o fst + + val finish_off = close_form #> rename_bound_vars - val deps = ([], []) - |> fold add_fact_of_dependency gamma - |> is_maybe_ext_rule rule ? add_global_fact [short_thm_name ctxt ext] - |> sort_facts - val meths = - (if skolem then skolem_methods - else if is_arith_rule rule then arith_methods - else if is_datatype_rule rule then datatype_methods - else systematic_methods') - |> massage_methods + fun prop_of_clause [(num, _)] = Symtab.lookup steps num |> the |> snd |> finish_off + | prop_of_clause names = + let + val lits = + map (HOLogic.dest_Trueprop o snd) (map_filter (Symtab.lookup steps o fst) names) + in + (case List.partition (can HOLogic.dest_not) lits of + (negs as _ :: _, pos as _ :: _) => + s_imp (Library.foldr1 s_conj (map HOLogic.dest_not negs), + Library.foldr1 s_disj pos) + | _ => fold (curry s_disj) lits @{term False}) + end + |> HOLogic.mk_Trueprop |> finish_off + + fun maybe_show outer c = if outer andalso eq_set (op =) (c, conjs) then [Show] else [] - fun prove sub facts = Prove (maybe_show outer c, [], l, t, sub, facts, meths, "") - fun steps_of_rest step = isar_steps outer (SOME l) (step :: accum) infs - in - if is_clause_tainted c then - (case gamma of - [g] => - if skolem andalso is_clause_tainted g then - let - val skos = skolems_of ctxt (prop_of_clause g) - val subproof = Proof (skos, [], rev accum) - in - isar_steps outer (SOME l) [prove [subproof] ([], [])] infs - end - else - steps_of_rest (prove [] deps) - | _ => steps_of_rest (prove [] deps)) - else - steps_of_rest - (if skolem then - (case skolems_of ctxt t of - [] => prove [] deps - | skos => Prove ([], skos, l, t, [], deps, meths, "")) - else - prove [] deps) - end - | isar_steps outer predecessor accum (Cases cases :: infs) = - let - fun isar_case (c, subinfs) = - isar_proof false [] [(label_of_clause c, prop_of_clause c)] [] subinfs - val c = succedent_of_cases cases - val l = label_of_clause c - val t = prop_of_clause c - val step = - Prove (maybe_show outer c, [], l, t, - map isar_case (filter_out (null o snd) cases), - sort_facts (the_list predecessor, []), massage_methods systematic_methods', "") - in - isar_steps outer (SOME l) (step :: accum) infs - end - and isar_proof outer fix assms lems infs = - Proof (fix, assms, fold_rev insert_lemma_in_steps lems (isar_steps outer NONE [] infs)) + fun isar_steps outer predecessor accum [] = + accum + |> (if tainted = [] then + (* e.g., trivial, empty proof by Z3 *) + cons (Prove (if outer then [Show] else [], [], no_label, concl_t, [], + sort_facts (the_list predecessor, []), massage_methods systematic_methods', + "")) + else + I) + |> rev + | isar_steps outer _ accum (Have (id, (gamma, c)) :: infs) = + let + val l = label_of_clause c + val t = prop_of_clause c + val rule = rule_of_clause_id id + val skolem = is_skolemize_rule rule - val trace = Config.get ctxt trace + val deps = ([], []) + |> fold add_fact_of_dependency gamma + |> is_maybe_ext_rule rule ? add_global_fact [short_thm_name ctxt ext] + |> sort_facts + val meths = + (if skolem then skolem_methods + else if is_arith_rule rule then arith_methods + else if is_datatype_rule rule then datatype_methods + else systematic_methods') + |> massage_methods - val canonical_isar_proof = - refute_graph - |> trace ? tap (tracing o prefix "Refute graph:\n" o string_of_refute_graph) - |> redirect_graph axioms tainted bot - |> trace ? tap (tracing o prefix "Direct proof:\n" o string_of_direct_proof) - |> isar_proof true params assms lems - |> postprocess_isar_proof_remove_show_stuttering - |> postprocess_isar_proof_remove_unreferenced_steps I - |> relabel_isar_proof_canonically - - val ctxt = ctxt |> enrich_context_with_local_facts canonical_isar_proof - - val preplay_data = Unsynchronized.ref Canonical_Label_Tab.empty - - val _ = fold_isar_steps (fn meth => - K (set_preplay_outcomes_of_isar_step ctxt preplay_timeout preplay_data meth [])) - (steps_of_isar_proof canonical_isar_proof) () + fun prove sub facts = Prove (maybe_show outer c, [], l, t, sub, facts, meths, "") + fun steps_of_rest step = isar_steps outer (SOME l) (step :: accum) infs + in + if is_clause_tainted c then + (case gamma of + [g] => + if skolem andalso is_clause_tainted g then + let + val skos = skolems_of ctxt (prop_of_clause g) + val subproof = Proof (skos, [], rev accum) + in + isar_steps outer (SOME l) [prove [subproof] ([], [])] infs + end + else + steps_of_rest (prove [] deps) + | _ => steps_of_rest (prove [] deps)) + else + steps_of_rest + (if skolem then + (case skolems_of ctxt t of + [] => prove [] deps + | skos => Prove ([], skos, l, t, [], deps, meths, "")) + else + prove [] deps) + end + | isar_steps outer predecessor accum (Cases cases :: infs) = + let + fun isar_case (c, subinfs) = + isar_proof false [] [(label_of_clause c, prop_of_clause c)] [] subinfs + val c = succedent_of_cases cases + val l = label_of_clause c + val t = prop_of_clause c + val step = + Prove (maybe_show outer c, [], l, t, + map isar_case (filter_out (null o snd) cases), + sort_facts (the_list predecessor, []), massage_methods systematic_methods', + "") + in + isar_steps outer (SOME l) (step :: accum) infs + end + and isar_proof outer fix assms lems infs = + Proof (fix, assms, + fold_rev insert_lemma_in_steps lems (isar_steps outer NONE [] infs)) - fun str_of_preplay_outcome outcome = - if Lazy.is_finished outcome then string_of_play_outcome (Lazy.force outcome) else "?" - fun str_of_meth l meth = - string_of_proof_method ctxt [] meth ^ " " ^ - str_of_preplay_outcome (preplay_outcome_of_isar_step_for_method (!preplay_data) l meth) - fun comment_of l = map (str_of_meth l) #> commas + val trace = Config.get ctxt trace - fun trace_isar_proof label proof = - if trace then - tracing (timestamp () ^ "\n" ^ label ^ ":\n\n" ^ - string_of_isar_proof ctxt subgoal subgoal_count - (comment_isar_proof comment_of proof) ^ "\n") - else - () + val canonical_isar_proof = + refute_graph + |> trace ? tap (tracing o prefix "Refute graph:\n" o string_of_refute_graph) + |> redirect_graph axioms tainted bot + |> trace ? tap (tracing o prefix "Direct proof:\n" o string_of_direct_proof) + |> isar_proof true params assms lems + |> postprocess_isar_proof_remove_show_stuttering + |> postprocess_isar_proof_remove_unreferenced_steps I + |> relabel_isar_proof_canonically - fun comment_of l (meth :: _) = - (case (verbose, - Lazy.force (preplay_outcome_of_isar_step_for_method (!preplay_data) l meth)) of - (false, Played _) => "" - | (_, outcome) => string_of_play_outcome outcome) + val ctxt = ctxt |> enrich_context_with_local_facts canonical_isar_proof + + val preplay_data = Unsynchronized.ref Canonical_Label_Tab.empty + + val _ = fold_isar_steps (fn meth => + K (set_preplay_outcomes_of_isar_step ctxt preplay_timeout preplay_data meth [])) + (steps_of_isar_proof canonical_isar_proof) () - val (play_outcome, isar_proof) = - canonical_isar_proof - |> tap (trace_isar_proof "Original") - |> compress_isar_proof ctxt compress preplay_timeout preplay_data - |> tap (trace_isar_proof "Compressed") - |> postprocess_isar_proof_remove_unreferenced_steps - (keep_fastest_method_of_isar_step (!preplay_data) - #> minimize ? minimize_isar_step_dependencies ctxt preplay_data) - |> tap (trace_isar_proof "Minimized") - |> `(preplay_outcome_of_isar_proof (!preplay_data)) - ||> (comment_isar_proof comment_of - #> chain_isar_proof - #> kill_useless_labels_in_isar_proof - #> relabel_isar_proof_nicely - #> rationalize_obtains_in_isar_proofs ctxt) - in - (case (num_chained, add_isar_steps (steps_of_isar_proof isar_proof) 0) of - (0, 1) => - one_line_proof_text ctxt 0 - (if play_outcome_ord (play_outcome, one_line_play) = LESS then - (case isar_proof of - Proof (_, _, [Prove (_, _, _, _, _, (_, gfs), meth :: _, _)]) => - let - val used_facts' = filter (fn (s, (sc, _)) => - member (op =) gfs s andalso sc <> Chained) used_facts - in - ((used_facts', (meth, play_outcome)), banner, subgoal, subgoal_count) - end) - else - one_line_params) ^ - (if isar_proofs = SOME true then "\n(No Isar proof available.)" - else "") - | (_, num_steps) => - let - val msg = - (if verbose then [string_of_int num_steps ^ " step" ^ plural_s num_steps] else []) @ - (if do_preplay then [string_of_play_outcome play_outcome] else []) + fun str_of_preplay_outcome outcome = + if Lazy.is_finished outcome then string_of_play_outcome (Lazy.force outcome) else "?" + fun str_of_meth l meth = + string_of_proof_method ctxt [] meth ^ " " ^ + str_of_preplay_outcome + (preplay_outcome_of_isar_step_for_method (!preplay_data) l meth) + fun comment_of l = map (str_of_meth l) #> commas + + fun trace_isar_proof label proof = + if trace then + tracing (timestamp () ^ "\n" ^ label ^ ":\n\n" ^ + string_of_isar_proof ctxt subgoal subgoal_count + (comment_isar_proof comment_of proof) ^ "\n") + else + () + + fun comment_of l (meth :: _) = + (case (verbose, + Lazy.force (preplay_outcome_of_isar_step_for_method (!preplay_data) l meth)) of + (false, Played _) => "" + | (_, outcome) => string_of_play_outcome outcome) + + val (play_outcome, isar_proof) = + canonical_isar_proof + |> tap (trace_isar_proof "Original") + |> compress_isar_proof ctxt compress preplay_timeout preplay_data + |> tap (trace_isar_proof "Compressed") + |> postprocess_isar_proof_remove_unreferenced_steps + (keep_fastest_method_of_isar_step (!preplay_data) + #> minimize ? minimize_isar_step_dependencies ctxt preplay_data) + |> tap (trace_isar_proof "Minimized") + |> `(preplay_outcome_of_isar_proof (!preplay_data)) + ||> (comment_isar_proof comment_of + #> chain_isar_proof + #> kill_useless_labels_in_isar_proof + #> relabel_isar_proof_nicely + #> rationalize_obtains_in_isar_proofs ctxt) in - one_line_proof_text ctxt 0 one_line_params ^ - "\n\nIsar proof" ^ (commas msg |> not (null msg) ? enclose " (" ")") ^ ":\n" ^ - Active.sendback_markup [Markup.padding_command] - (string_of_isar_proof ctxt subgoal subgoal_count isar_proof) - end) + (case (num_chained, add_isar_steps (steps_of_isar_proof isar_proof) 0) of + (0, 1) => + one_line_proof_text ctxt 0 + (if play_outcome_ord (play_outcome, one_line_play) = LESS then + (case isar_proof of + Proof (_, _, [Prove (_, _, _, _, _, (_, gfs), meth :: _, _)]) => + let + val used_facts' = filter (fn (s, (sc, _)) => + member (op =) gfs s andalso sc <> Chained) used_facts + in + ((used_facts', (meth, play_outcome)), banner, subgoal, subgoal_count) + end) + else + one_line_params) ^ + (if isar_proofs = SOME true then "\n(No Isar proof available.)" else "") + | (_, num_steps) => + let + val msg = + (if verbose then [string_of_int num_steps ^ " step" ^ plural_s num_steps] + else []) @ + (if do_preplay then [string_of_play_outcome play_outcome] else []) + in + one_line_proof_text ctxt 0 one_line_params ^ + "\n\nIsar proof" ^ (commas msg |> not (null msg) ? enclose " (" ")") ^ ":\n" ^ + Active.sendback_markup [Markup.padding_command] + (string_of_isar_proof ctxt subgoal subgoal_count isar_proof) + end) + end end in if debug then diff -r ec44535f954a -r a105bea3936f src/HOL/Topological_Spaces.thy --- a/src/HOL/Topological_Spaces.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/HOL/Topological_Spaces.thy Wed Feb 17 21:06:47 2016 +0100 @@ -1076,6 +1076,9 @@ lemma lim_le: "convergent f \ (\n. f n \ (x::'a::linorder_topology)) \ lim f \ x" using LIMSEQ_le_const2[of f "lim f" x] by (simp add: convergent_LIMSEQ_iff) +lemma lim_const [simp]: "lim (\m. a) = a" + by (simp add: limI) + subsubsection\Increasing and Decreasing Series\ lemma incseq_le: "incseq X \ X \ L \ X n \ (L::'a::linorder_topology)" diff -r ec44535f954a -r a105bea3936f src/Pure/Concurrent/bash.ML --- a/src/Pure/Concurrent/bash.ML Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/Concurrent/bash.ML Wed Feb 17 21:06:47 2016 +0100 @@ -40,10 +40,11 @@ File.shell_path script_path ^ " > " ^ File.shell_path out_path ^ " 2> " ^ File.shell_path err_path; - val _ = getenv_strict "EXEC_PROCESS"; + val _ = getenv_strict "ISABELLE_BASH_PROCESS"; val status = OS.Process.system - ("exec \"$EXEC_PROCESS\" " ^ File.shell_path pid_path ^ " " ^ quote bash_script); + ("exec \"$ISABELLE_BASH_PROCESS\" " ^ File.shell_path pid_path ^ + " bash -c " ^ quote bash_script); val res = (case Posix.Process.fromStatus status of Posix.Process.W_EXITED => Result 0 diff -r ec44535f954a -r a105bea3936f src/Pure/Concurrent/bash.scala --- a/src/Pure/Concurrent/bash.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/Concurrent/bash.scala Wed Feb 17 21:06:47 2016 +0100 @@ -19,13 +19,21 @@ { def out: String = cat_lines(out_lines) def err: String = cat_lines(err_lines) - def add_err(s: String): Result = copy(err_lines = err_lines ::: List(s)) - def set_rc(i: Int): Result = copy(rc = i) + + def error(s: String, err_rc: Int = 0): Result = + copy(err_lines = err_lines ::: List(s), rc = rc max err_rc) - def check_error: Result = + def check: Result = if (rc == Exn.Interrupt.return_code) throw Exn.Interrupt() - else if (rc != 0) error(err) + else if (rc != 0) Library.error(err) else this + + def print: Result = + { + Output.warning(Library.trim_line(err)) + Output.writeln(Library.trim_line(out)) + Result(Nil, Nil, rc) + } } @@ -39,9 +47,13 @@ cwd: JFile, env: Map[String, String], redirect: Boolean, args: String*) extends Prover.System_Process { - private val params = - List(File.standard_path(Path.explode("~~/lib/scripts/process")), "group", "-", "no_script") - private val proc = Isabelle_System.execute_env(cwd, env, redirect, (params ::: args.toList):_*) + private val proc = + { + val params = + List(File.platform_path(Path.variable("ISABELLE_BASH_PROCESS")), "-", "bash") + Isabelle_System.process( + cwd, Isabelle_System.settings(env), redirect, (params ::: args.toList):_*) + } // channels diff -r ec44535f954a -r a105bea3936f src/Pure/General/file.scala --- a/src/Pure/General/file.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/General/file.scala Wed Feb 17 21:06:47 2016 +0100 @@ -102,8 +102,9 @@ /* shell path (bash) */ - def shell_path(path: Path): String = "'" + standard_path(path) + "'" - def shell_path(file: JFile): String = "'" + standard_path(file) + "'" + def shell_quote(s: String): String = "'" + s + "'" + def shell_path(path: Path): String = shell_quote(standard_path(path)) + def shell_path(file: JFile): String = shell_quote(standard_path(file)) /* directory content */ diff -r ec44535f954a -r a105bea3936f src/Pure/General/output.scala --- a/src/Pure/General/output.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/General/output.scala Wed Feb 17 21:06:47 2016 +0100 @@ -18,7 +18,7 @@ def warning_text(msg: String): String = cat_lines(split_lines(clean_yxml(msg)).map("### " + _)) def error_text(msg: String): String = cat_lines(split_lines(clean_yxml(msg)).map("*** " + _)) - def writeln(msg: String) { Console.err.println(writeln_text(msg)) } - def warning(msg: String) { Console.err.println(warning_text(msg)) } - def error_message(msg: String) { Console.err.println(error_text(msg)) } + def writeln(msg: String) { if (msg != "") Console.err.println(writeln_text(msg)) } + def warning(msg: String) { if (msg != "") Console.err.println(warning_text(msg)) } + def error_message(msg: String) { if (msg != "") Console.err.println(error_text(msg)) } } diff -r ec44535f954a -r a105bea3936f src/Pure/Isar/isar_syn.ML --- a/src/Pure/Isar/isar_syn.ML Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/Isar/isar_syn.ML Wed Feb 17 21:06:47 2016 +0100 @@ -669,6 +669,10 @@ (Scan.succeed Isar_Cmd.skip_proof); val _ = + Outer_Syntax.command @{command_keyword "\"} "dummy proof (quick-and-dirty mode only!)" + (Scan.succeed Isar_Cmd.skip_proof); + +val _ = Outer_Syntax.command @{command_keyword oops} "forget proof" (Scan.succeed (Toplevel.forget_proof true)); diff -r ec44535f954a -r a105bea3936f src/Pure/PIDE/batch_session.scala --- a/src/Pure/PIDE/batch_session.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/PIDE/batch_session.scala Wed Feb 17 21:06:47 2016 +0100 @@ -58,7 +58,7 @@ case _ => } - prover_session.start("Isabelle", List("-r", "-q", parent_session)) + prover_session.start("Isabelle", "-r -q " + quote(parent_session)) batch_session } diff -r ec44535f954a -r a105bea3936f src/Pure/PIDE/prover.scala --- a/src/Pure/PIDE/prover.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/PIDE/prover.scala Wed Feb 17 21:06:47 2016 +0100 @@ -121,7 +121,7 @@ /** process manager **/ - private val process_result = + private val process_result: Future[Int] = Future.thread("process_result") { system_process.join } private def terminate_process() @@ -183,9 +183,15 @@ def terminate() { + system_output("Terminating prover process") command_input_close() - system_output("Terminating prover process") - terminate_process() + + var count = 10 + while (!process_result.is_finished && count > 0) { + Thread.sleep(100) + count -= 1 + } + if (!process_result.is_finished) terminate_process() } diff -r ec44535f954a -r a105bea3936f src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/PIDE/resources.scala Wed Feb 17 21:06:47 2016 +0100 @@ -136,7 +136,7 @@ /* prover process */ - def start_prover(receiver: Prover.Message => Unit, name: String, args: List[String]): Prover = + def start_prover(receiver: Prover.Message => Unit, name: String, args: String): Prover = Isabelle_Process(receiver, args) } diff -r ec44535f954a -r a105bea3936f src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/PIDE/session.scala Wed Feb 17 21:06:47 2016 +0100 @@ -212,7 +212,7 @@ /* internal messages */ - private case class Start(name: String, args: List[String]) + private case class Start(name: String, args: String) private case object Stop private case class Cancel_Exec(exec_id: Document_ID.Exec) private case class Protocol_Command(name: String, args: List[String]) @@ -601,7 +601,7 @@ pending_edits: List[Text.Edit] = Nil): Document.Snapshot = global_state.value.snapshot(name, pending_edits) - def start(name: String, args: List[String]) + def start(name: String, args: String) { manager.send(Start(name, args)) } def stop() diff -r ec44535f954a -r a105bea3936f src/Pure/Pure.thy --- a/src/Pure/Pure.thy Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/Pure.thy Wed Feb 17 21:06:47 2016 +0100 @@ -64,7 +64,7 @@ and "}" :: prf_close % "proof" and "next" :: next_block % "proof" and "qed" :: qed_block % "proof" - and "by" ".." "." "sorry" :: "qed" % "proof" + and "by" ".." "." "sorry" "\" :: "qed" % "proof" and "done" :: "qed_script" % "proof" and "oops" :: qed_global % "proof" and "defer" "prefer" "apply" :: prf_script % "proof" diff -r ec44535f954a -r a105bea3936f src/Pure/System/cygwin.scala --- a/src/Pure/System/cygwin.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/System/cygwin.scala Wed Feb 17 21:06:47 2016 +0100 @@ -25,7 +25,7 @@ { val cwd = new JFile(isabelle_root) val env = Map("CYGWIN" -> "nodosfilewarning") - val proc = Isabelle_System.raw_execute(cwd, env, true, args: _*) + val proc = Isabelle_System.process(cwd, env, true, args: _*) val (output, rc) = Isabelle_System.process_output(proc) if (rc != 0) error(output) } diff -r ec44535f954a -r a105bea3936f src/Pure/System/isabelle_process.scala --- a/src/Pure/System/isabelle_process.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/System/isabelle_process.scala Wed Feb 17 21:06:47 2016 +0100 @@ -11,15 +11,16 @@ { def apply( receiver: Prover.Message => Unit = Console.println(_), - prover_args: List[String] = Nil): Isabelle_Process = + prover_args: String = ""): Isabelle_Process = { val system_channel = System_Channel() val system_process = try { - val cmdline = - Isabelle_System.getenv_strict("ISABELLE_PROCESS") :: - (system_channel.prover_args ::: prover_args) - val process = Bash.process(null, null, false, cmdline: _*) + val script = + File.shell_quote(Isabelle_System.getenv_strict("ISABELLE_PROCESS")) + + " -P " + system_channel.server_name + + (if (prover_args == "") "" else " " + prover_args) + val process = Bash.process(null, null, false, "-c", script) process.stdin.close process } @@ -34,8 +35,7 @@ system_channel: System_Channel, system_process: Prover.System_Process) extends Prover(receiver, system_channel, system_process) - { - def encode(s: String): String = Symbol.encode(s) - def decode(s: String): String = Symbol.decode(s) - } - +{ + def encode(s: String): String = Symbol.encode(s) + def decode(s: String): String = Symbol.decode(s) +} diff -r ec44535f954a -r a105bea3936f src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/System/isabelle_system.scala Wed Feb 17 21:06:47 2016 +0100 @@ -51,10 +51,10 @@ @volatile private var _settings: Option[Map[String, String]] = None - def settings(): Map[String, String] = + def settings(env: Map[String, String] = null): Map[String, String] = { if (_settings.isEmpty) init() // unsynchronized check - _settings.get + if (env == null) _settings.get else _settings.get ++ env } def init(isabelle_root: String = "", cygwin_root: String = ""): Unit = synchronized { @@ -112,7 +112,7 @@ List(isabelle_root1 + JFile.separator + "bin" + JFile.separator + "isabelle", "getenv", "-d", dump.toString) - val (output, rc) = process_output(raw_execute(null, env, true, (cmd1 ::: cmd2): _*)) + val (output, rc) = process_output(process(null, env, true, (cmd1 ::: cmd2): _*)) if (rc != 0) error(output) val entries = @@ -133,7 +133,7 @@ /* getenv */ - def getenv(name: String): String = settings.getOrElse(name, "") + def getenv(name: String): String = settings().getOrElse(name, "") def getenv_strict(name: String): String = { @@ -175,9 +175,9 @@ /** external processes **/ - /* raw execute for bootstrapping */ + /* raw process */ - def raw_execute(cwd: JFile, env: Map[String, String], redirect: Boolean, args: String*): Process = + def process(cwd: JFile, env: Map[String, String], redirect: Boolean, args: String*): Process = { val cmdline = new java.util.LinkedList[String] for (s <- args) cmdline.add(s) @@ -216,8 +216,7 @@ val cmdline = if (Platform.is_windows) List(cygwin_root() + "\\bin\\env.exe") ::: args.toList else args - val env1 = if (env == null) settings else settings ++ env - raw_execute(cwd, env1, redirect, cmdline: _*) + process(cwd, settings(env), redirect, cmdline: _*) } def execute(redirect: Boolean, args: String*): Process = @@ -297,7 +296,7 @@ if (Platform.is_windows) List(cygwin_root() + "\\bin\\bash.exe") else List("/usr/bin/env", "bash") val cmdline = bash ::: List("-c", "kill -" + signal + " -" + group_pid) - process_output(raw_execute(null, null, true, cmdline: _*)) + process_output(process(null, null, true, cmdline: _*)) } @@ -316,7 +315,7 @@ } } - def bash_env(cwd: JFile, env: Map[String, String], script: String, + def bash(script: String, cwd: JFile = null, env: Map[String, String] = null, progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), progress_limit: Option[Long] = None, @@ -324,7 +323,7 @@ { with_tmp_file("isabelle_script") { script_file => File.write(script_file, script) - val proc = Bash.process(cwd, env, false, "bash", File.standard_path(script_file)) + val proc = Bash.process(cwd, env, false, File.standard_path(script_file)) proc.stdin.close val limited = new Limited_Progress(proc, progress_limit) @@ -342,8 +341,6 @@ } } - def bash(script: String): Bash.Result = bash_env(null, null, script) - /* system tools */ diff -r ec44535f954a -r a105bea3936f src/Pure/System/system_channel.scala --- a/src/Pure/System/system_channel.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/System/system_channel.scala Wed Feb 17 21:06:47 2016 +0100 @@ -20,9 +20,8 @@ { private val server = new ServerSocket(0, 2, InetAddress.getByName("127.0.0.1")) - def params: List[String] = List("127.0.0.1", server.getLocalPort.toString) - - def prover_args: List[String] = List("-P", "127.0.0.1:" + server.getLocalPort) + val server_name: String = "127.0.0.1:" + server.getLocalPort + override def toString: String = server_name def rendezvous(): (OutputStream, InputStream) = { diff -r ec44535f954a -r a105bea3936f src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/Tools/build.scala Wed Feb 17 21:06:47 2016 +0100 @@ -602,7 +602,7 @@ private val result = Future.thread("build") { - Isabelle_System.bash_env(info.dir.file, env, script, + Isabelle_System.bash(script, info.dir.file, env, progress_stdout = (line: String) => Library.try_unprefix("\floading_theory = ", line) match { case Some(theory) => progress.theory(name, theory) @@ -640,8 +640,8 @@ timeout_request.foreach(_.cancel) if (res.rc == Exn.Interrupt.return_code) { - if (was_timeout) res.add_err(Output.error_text("Timeout")).set_rc(1) - else res.add_err(Output.error_text("Interrupt")) + if (was_timeout) res.error(Output.error_text("Timeout"), 1) + else res.error(Output.error_text("Interrupt")) } else res } diff -r ec44535f954a -r a105bea3936f src/Pure/Tools/check_source.scala --- a/src/Pure/Tools/check_source.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Pure/Tools/check_source.scala Wed Feb 17 21:06:47 2016 +0100 @@ -41,9 +41,9 @@ def check_hg(root: Path) { Output.writeln("Checking " + root + " ...") - Isabelle_System.hg("--repository " + File.shell_path(root) + " root").check_error + Isabelle_System.hg("--repository " + File.shell_path(root) + " root").check for { - file <- Isabelle_System.hg("manifest", root).check_error.out_lines + file <- Isabelle_System.hg("manifest", root).check.out_lines if file.endsWith(".thy") || file.endsWith(".ML") || file.endsWith("/ROOT") } check_file(root + Path.explode(file)) } diff -r ec44535f954a -r a105bea3936f src/Tools/jEdit/src/active.scala --- a/src/Tools/jEdit/src/active.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Tools/jEdit/src/active.scala Wed Feb 17 21:06:47 2016 +0100 @@ -33,9 +33,8 @@ Standard_Thread.fork("browser") { val graph_file = Isabelle_System.tmp_file("graph") File.write(graph_file, XML.content(body)) - Isabelle_System.bash_env(null, - Map("GRAPH_FILE" -> File.standard_path(graph_file)), - "\"$ISABELLE_TOOL\" browser -c \"$GRAPH_FILE\" &") + Isabelle_System.bash("\"$ISABELLE_TOOL\" browser -c \"$GRAPH_FILE\" &", + env = Map("GRAPH_FILE" -> File.standard_path(graph_file))) } case XML.Elem(Markup(Markup.GRAPHVIEW, _), body) => diff -r ec44535f954a -r a105bea3936f src/Tools/jEdit/src/isabelle_logic.scala --- a/src/Tools/jEdit/src/isabelle_logic.scala Wed Feb 17 15:57:10 2016 +0100 +++ b/src/Tools/jEdit/src/isabelle_logic.scala Wed Feb 17 21:06:47 2016 +0100 @@ -69,14 +69,16 @@ dirs = session_dirs(), sessions = List(session_name())) } - def session_start() + def session_args(): String = { val print_modes = - space_explode(',', PIDE.options.string("jedit_print_mode")) ::: - space_explode(',', Isabelle_System.getenv("JEDIT_PRINT_MODE")) - PIDE.session.start("Isabelle", print_modes.map("-m" + _) ::: List("-r", "-q", session_name())) + (space_explode(',', PIDE.options.string("jedit_print_mode")) ::: + space_explode(',', Isabelle_System.getenv("JEDIT_PRINT_MODE"))).map("-m " + _) + (print_modes ::: List("-r", "-q", File.shell_quote(session_name()))).mkString(" ") } + def session_start(): Unit = PIDE.session.start("Isabelle", session_args()) + def session_dirs(): List[Path] = Path.split(Isabelle_System.getenv("JEDIT_SESSION_DIRS")) def session_list(): List[String] =