# HG changeset patch # User nipkow # Date 1426580541 -3600 # Node ID ba54b27d733d3c9d9891435b92e346502be54833 # Parent 3a1141d56bf1f734a9121a885c4d635471bfe222# Parent 0bb88aa3476886355ebc99d316f6f82bafdf5366 merged diff -r 0bb88aa34768 -r ba54b27d733d src/Doc/Datatypes/Datatypes.thy --- a/src/Doc/Datatypes/Datatypes.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Doc/Datatypes/Datatypes.thy Tue Mar 17 09:22:21 2015 +0100 @@ -552,7 +552,8 @@ \noindent The main constituents of a constructor specification are the name of the constructor and the list of its argument types. An optional discriminator name -can be supplied at the front to override the default, which is +can be supplied at the front. If discriminators are enabled (cf.\ the +@{text "discs_sels"} option) but no name is supplied, the default is @{text "\x. x = C\<^sub>j"} for nullary constructors and @{text t.is_C\<^sub>j} otherwise. @@ -566,9 +567,10 @@ The syntactic entity \synt{type} denotes a HOL type @{cite "isabelle-isar-ref"}. In addition to the type of a constructor argument, it is possible to specify a -name for the corresponding selector to override the default name -(@{text un_C\<^sub>ji}). The same selector names can be reused for several -constructors as long as they share the same type. +name for the corresponding selector. The same selector name can be reused for +arguments to several constructors as long as the arguments share the same type. +If selectors are enabled (cf.\ the @{text "discs_sels"} option) but no name is +supplied, the default name is @{text un_C\<^sub>ji}. *} @@ -614,9 +616,7 @@ @{text compat_tree.rec}). \item All types through which recursion takes place must be new-style datatypes -or the function type. In principle, it should be possible to support old-style -datatypes as well, but this has not been implemented yet (and there is currently -no way to register old-style datatypes as new-style datatypes). +or the function type. \end{itemize} *} @@ -625,7 +625,7 @@ \label{ssec:datatype-generated-constants} *} text {* -Given a datatype @{text "('a\<^sub>1, \, 'a\<^sub>m) t"} with $m > 0$ live type variables +Given a datatype @{text "('a\<^sub>1, \, 'a\<^sub>m) t"} with $m$ live type variables and $n$ constructors @{text "t.C\<^sub>1"}, \ldots, @{text "t.C\<^sub>n"}, the following auxiliary constants are introduced: @@ -653,6 +653,10 @@ \medskip \noindent +The discriminators and selectors are generated only if the @{text "discs_sels"} +option is enabled or if names are specified for discriminators or selectors. +The set functions, map function, and relator are generated only if $m > 0$. + In addition, some of the plugins introduce their own constants (Section~\ref{sec:selecting-plugins}). The case combinator, discriminators, and selectors are collectively called \emph{destructors}. The prefix @@ -997,6 +1001,9 @@ @{thm list.rel_map(1)[no_vars]} \\ @{thm list.rel_map(2)[no_vars]} +\item[@{text "t."}\hthm{rel_refl}\rm:] ~ \\ +@{thm list.rel_refl[no_vars]} + \item[@{text "t."}\hthm{rel_mono} @{text "[mono, relator_mono]"}\rm:] ~ \\ @{thm list.rel_mono[no_vars]} \\ The @{text "[relator_mono]"} attribute is set by the @{text lifting} plugin @@ -3115,9 +3122,9 @@ \label{ssec:lifting} *} text {* -For each (co)datatype with live type arguments and each manually registered BNF, -the \hthm{lifting} plugin generates properties and attributes that guide the -Lifting tool. +For each (co)datatype and each manually registered BNF with at least one live +type argument and no dead type arguments, the \hthm{lifting} plugin generates +properties and attributes that guide the Lifting tool. The plugin derives the following property: @@ -3226,8 +3233,8 @@ Kun\v{c}ar implemented the @{text transfer} and @{text lifting} plugins. Florian Haftmann and Christian Urban provided general advice on Isabelle and package writing. Stefan Milius and Lutz Schr\"oder found an elegant proof that -eliminated one of the BNF proof obligations. Andreas Lochbihler and Christian -Sternagel suggested many textual improvements to this tutorial. +eliminated one of the BNF proof obligations. Andreas Lochbihler, Tobias Nipkow, +and Christian Sternagel suggested many textual improvements to this tutorial. *} end diff -r 0bb88aa34768 -r ba54b27d733d src/Doc/Logics_ZF/If.thy --- a/src/Doc/Logics_ZF/If.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Doc/Logics_ZF/If.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Doc/ZF/If.thy +(* Title: Doc/Logics_ZF/If.thy Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1991 University of Cambridge diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/BNF_Def.thy --- a/src/HOL/BNF_Def.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/BNF_Def.thy Tue Mar 17 09:22:21 2015 +0100 @@ -228,6 +228,12 @@ lemma comp_apply_eq: "f (g x) = h (k x) \ (f \ g) x = (h \ k) x" unfolding comp_apply by assumption +lemma refl_ge_eq: "(\x. R x x) \ op = \ R" + by auto + +lemma ge_eq_refl: "op = \ R \ R x x" + by auto + ML_file "Tools/BNF/bnf_util.ML" ML_file "Tools/BNF/bnf_tactics.ML" ML_file "Tools/BNF/bnf_def_tactics.ML" diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Codegenerator_Test/Code_Test_GHC.thy --- a/src/HOL/Codegenerator_Test/Code_Test_GHC.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Codegenerator_Test/Code_Test_GHC.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Code_Test_GHC.thy +(* Title: HOL/Codegenerator_Test/Code_Test_GHC.thy Author: Andreas Lochbihler, ETH Zurich Test case for test_code on GHC diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Codegenerator_Test/Code_Test_MLton.thy --- a/src/HOL/Codegenerator_Test/Code_Test_MLton.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Codegenerator_Test/Code_Test_MLton.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Code_Test_MLtonL.thy +(* Title: HOL/Codegenerator_Test/Code_Test_MLton.thy Author: Andreas Lochbihler, ETH Zurich Test case for test_code on MLton diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Codegenerator_Test/Code_Test_OCaml.thy --- a/src/HOL/Codegenerator_Test/Code_Test_OCaml.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Codegenerator_Test/Code_Test_OCaml.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Code_Test_OCaml.thy +(* Title: HOL/Codegenerator_Test/Code_Test_OCaml.thy Author: Andreas Lochbihler, ETH Zurich Test case for test_code on OCaml diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Codegenerator_Test/Code_Test_PolyML.thy --- a/src/HOL/Codegenerator_Test/Code_Test_PolyML.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Codegenerator_Test/Code_Test_PolyML.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Code_Test_PolyML.thy +(* Title: HOL/Codegenerator_Test/Code_Test_PolyML.thy Author: Andreas Lochbihler, ETH Zurich Test case for test_code on PolyML diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Codegenerator_Test/Code_Test_SMLNJ.thy --- a/src/HOL/Codegenerator_Test/Code_Test_SMLNJ.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Codegenerator_Test/Code_Test_SMLNJ.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Code_Test_SMLNJ.thy +(* Title: HOL/Codegenerator_Test/Code_Test_SMLNJ.thy Author: Andreas Lochbihler, ETH Zurich Test case for test_code on SMLNJ diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Codegenerator_Test/Code_Test_Scala.thy --- a/src/HOL/Codegenerator_Test/Code_Test_Scala.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Codegenerator_Test/Code_Test_Scala.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Code_Test_Scala.thy +(* Title: HOL/Codegenerator_Test/Code_Test_Scala.thy Author: Andreas Lochbihler, ETH Zurich Test case for test_code on Scala diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Inequalities.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOL/Inequalities.thy Tue Mar 17 09:22:21 2015 +0100 @@ -0,0 +1,97 @@ +(* Title: HOL/Inequalities.thy + Author: Tobias Nipkow + Author: Johannes Hölzl +*) + +theory Inequalities + imports Real_Vector_Spaces +begin + +lemma setsum_triangle_reindex: + fixes n :: nat + shows "(\(i,j)\{(i,j). i+j < n}. f i j) = (\ki\k. f i (k - i))" + apply (simp add: setsum.Sigma) + apply (rule setsum.reindex_bij_witness[where j="\(i, j). (i+j, i)" and i="\(k, i). (i, k - i)"]) + apply auto + done + +lemma gauss_sum_div2: "(2::'a::semiring_div) \ 0 \ + setsum of_nat {1..n} = of_nat n * (of_nat n + 1) div (2::'a)" +using gauss_sum[where n=n and 'a = 'a,symmetric] by auto + +lemma Setsum_Icc_int: assumes "0 \ m" and "(m::int) \ n" +shows "\ {m..n} = (n*(n+1) - m*(m-1)) div 2" +proof- + { fix k::int assume "k\0" + hence "\ {1..k::int} = k * (k+1) div 2" + by (rule gauss_sum_div2[where 'a = int, transferred]) simp + } note 1 = this + have "{m..n} = {0..n} - {0..m-1}" using `m\0` by auto + hence "\{m..n} = \{0..n} - \{0..m-1}" using assms + by (force intro!: setsum_diff) + also have "{0..n} = {0} Un {1..n}" using assms by auto + also have "\({0} \ {1..n}) = \{1..n}" by(simp add: setsum.union_disjoint) + also have "\ = n * (n+1) div 2" by(rule 1[OF order_trans[OF assms]]) + also have "{0..m-1} = (if m=0 then {} else {0} Un {1..m-1})" + using assms by auto + also have "\ \ = m*(m-1) div 2" using `m\0` by(simp add: 1 mult.commute) + also have "n*(n+1) div 2 - m*(m-1) div 2 = (n*(n+1) - m*(m-1)) div 2" + apply(subgoal_tac "even(n*(n+1)) \ even(m*(m-1))") + by (auto (*simp: even_def[symmetric]*)) + finally show ?thesis . +qed + +lemma Setsum_Icc_nat: assumes "(m::nat) \ n" +shows "\ {m..n} = (n*(n+1) - m*(m-1)) div 2" +proof - + have "m*(m-1) \ n*(n + 1)" + using assms by (meson diff_le_self order_trans le_add1 mult_le_mono) + hence "int(\ {m..n}) = int((n*(n+1) - m*(m-1)) div 2)" using assms + by (auto simp add: Setsum_Icc_int[transferred, OF _ assms] zdiv_int int_mult + split: zdiff_int_split) + thus ?thesis by simp +qed + +lemma Setsum_Ico_nat: assumes "(m::nat) \ n" +shows "\ {m..{m..{m..n-1}" by simp + also have "\ = (n*(n-1) - m*(m-1)) div 2" + using assms `m < n` by (simp add: Setsum_Icc_nat mult.commute) + finally show ?thesis . +next + assume "\ m < n" with assms show ?thesis by simp +qed + +lemma Chebyshev_sum_upper: + fixes a b::"nat \ 'a::linordered_idom" + assumes "\i j. i \ j \ j < n \ a i \ a j" + assumes "\i j. i \ j \ j < n \ b i \ b j" + shows "of_nat n * (\k=0.. (\k=0..k=0..j=0..j=0..k=0..i j. a i * b j"] setsum_right_distrib) + also + { fix i j::nat assume "i 0 \ b i - b j \ 0 \ a i - a j \ 0 \ b i - b j \ 0" + using assms by (cases "i \ j") (auto simp: algebra_simps) + } hence "?S \ 0" + by (auto intro!: setsum_nonpos simp: mult_le_0_iff) + (auto simp: field_simps) + finally show ?thesis by (simp add: algebra_simps) +qed + +lemma Chebyshev_sum_upper_nat: + fixes a b :: "nat \ nat" + shows "(\i j. \ i\j; j \ a i \ a j) \ + (\i j. \ i\j; j \ b i \ b j) \ + n * (\i=0.. (\i=0..i=0.. R \ op=" unfolding reflp_def by blast -lemma ge_eq_refl: - "R \ op= \ R x x" by blast - text {* Proving a parametrized correspondence relation *} definition POS :: "('a \ 'b \ bool) \ ('a \ 'b \ bool) \ bool" where diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Series.thy --- a/src/HOL/Series.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Series.thy Tue Mar 17 09:22:21 2015 +0100 @@ -10,8 +10,8 @@ section {* Infinite Series *} theory Series -imports Limits -begin +imports Limits Inequalities +begin subsection {* Definition of infinite summability *} @@ -576,14 +576,6 @@ @{url "http://www.math.unl.edu/~webnotes/classes/class41/prp77.htm"} *} -lemma setsum_triangle_reindex: - fixes n :: nat - shows "(\(i,j)\{(i,j). i+j < n}. f i j) = (\ki\k. f i (k - i))" - apply (simp add: setsum.Sigma) - apply (rule setsum.reindex_bij_witness[where j="\(i, j). (i+j, i)" and i="\(k, i). (i, k - i)"]) - apply auto - done - lemma Cauchy_product_sums: fixes a b :: "nat \ 'a::{real_normed_algebra,banach}" assumes a: "summable (\k. norm (a k))" diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/TPTP/TPTP_Proof_Reconstruction_Test.thy --- a/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: HOL/TPTP/TPTP_Proof_Reconstruction.thy +(* Title: HOL/TPTP/TPTP_Proof_Reconstruction_Test.thy Author: Nik Sultana, Cambridge University Computer Laboratory Various tests for the proof reconstruction module. diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/TPTP/TPTP_Proof_Reconstruction_Test_Units.thy --- a/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test_Units.thy Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test_Units.thy Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: HOL/TPTP/TPTP_Proof_Reconstruction.thy +(* Title: HOL/TPTP/TPTP_Proof_Reconstruction_Test_Units.thy Author: Nik Sultana, Cambridge University Computer Laboratory Unit tests for proof reconstruction module. diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Tools/BNF/bnf_comp.ML --- a/src/HOL/Tools/BNF/bnf_comp.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Tools/BNF/bnf_comp.ML Tue Mar 17 09:22:21 2015 +0100 @@ -26,10 +26,30 @@ (string * sort) list -> typ -> (comp_cache * unfold_set) * local_theory -> (BNF_Def.bnf * (typ list * typ list)) * ((comp_cache * unfold_set) * local_theory) val default_comp_sort: (string * sort) list list -> (string * sort) list + val clean_compose_bnf: BNF_Def.inline_policy -> (binding -> binding) -> binding -> BNF_Def.bnf -> + BNF_Def.bnf list -> unfold_set * local_theory -> BNF_Def.bnf * (unfold_set * local_theory) + val kill_bnf: (binding -> binding) -> int -> BNF_Def.bnf -> + (comp_cache * unfold_set) * local_theory -> + BNF_Def.bnf * ((comp_cache * unfold_set) * local_theory) + val lift_bnf: (binding -> binding) -> int -> BNF_Def.bnf -> + (comp_cache * unfold_set) * local_theory -> + BNF_Def.bnf * ((comp_cache * unfold_set) * local_theory) + val permute_bnf: (binding -> binding) -> int list -> int list -> BNF_Def.bnf -> + (comp_cache * unfold_set) * local_theory -> + BNF_Def.bnf * ((comp_cache * unfold_set) * local_theory) + val permute_and_kill_bnf: (binding -> binding) -> int -> int list -> int list -> BNF_Def.bnf -> + (comp_cache * unfold_set) * local_theory -> + BNF_Def.bnf * ((comp_cache * unfold_set) * local_theory) + val lift_and_permute_bnf: (binding -> binding) -> int -> int list -> int list -> BNF_Def.bnf -> + (comp_cache * unfold_set) * local_theory -> + BNF_Def.bnf * ((comp_cache * unfold_set) * local_theory) val normalize_bnfs: (int -> binding -> binding) -> ''a list list -> ''a list -> (''a list list -> ''a list) -> BNF_Def.bnf list -> (comp_cache * unfold_set) * local_theory -> (int list list * ''a list) * (BNF_Def.bnf list * ((comp_cache * unfold_set) * local_theory)) - + val compose_bnf: BNF_Def.inline_policy -> (int -> binding -> binding) -> + ((string * sort) list list -> (string * sort) list) -> BNF_Def.bnf -> BNF_Def.bnf list -> + typ list -> typ list list -> typ list list -> (comp_cache * unfold_set) * local_theory -> + (BNF_Def.bnf * (typ list * typ list)) * ((comp_cache * unfold_set) * local_theory) type absT_info = {absT: typ, repT: typ, @@ -625,11 +645,11 @@ (* Composition pipeline *) -fun permute_and_kill qualify n src dest bnf = +fun permute_and_kill_bnf qualify n src dest bnf = permute_bnf qualify src dest bnf #> uncurry (kill_bnf qualify n); -fun lift_and_permute qualify n src dest bnf = +fun lift_and_permute_bnf qualify n src dest bnf = lift_bnf qualify n bnf #> uncurry (permute_bnf qualify src dest); @@ -641,8 +661,8 @@ val before_kill_dest = map2 append kill_poss live_poss; val kill_ns = map length kill_poss; val (inners', accum') = - @{fold_map 5} (fn i => permute_and_kill (qualify i)) - (if length bnfs = 1 then [0] else (1 upto length bnfs)) + @{fold_map 5} (permute_and_kill_bnf o qualify) + (if length bnfs = 1 then [0] else 1 upto length bnfs) kill_ns before_kill_src before_kill_dest bnfs accum; val Ass' = map2 (map o nth) Ass live_poss; @@ -653,7 +673,7 @@ val after_lift_src = map2 append new_poss old_poss; val lift_ns = map (fn xs => length As - length xs) Ass'; in - ((kill_poss, As), @{fold_map 5} (fn i => lift_and_permute (qualify i)) + ((kill_poss, As), @{fold_map 5} (lift_and_permute_bnf o qualify) (if length bnfs = 1 then [0] else 1 upto length bnfs) lift_ns after_lift_src after_lift_dest inners' accum') end; @@ -881,7 +901,7 @@ val map_b = def_qualify (mk_prefix_binding mapN); val rel_b = def_qualify (mk_prefix_binding relN); val set_bs = if live = 1 then [def_qualify (mk_prefix_binding setN)] - else map (fn i => def_qualify (mk_prefix_binding (mk_setN i))) (1 upto live); + else map (def_qualify o mk_prefix_binding o mk_setN) (1 upto live); val notes = (map_b, map_def) :: (rel_b, rel_def) :: (set_bs ~~ set_defs) |> map (fn (b, def) => ((b, []), [([def], [])])) @@ -938,7 +958,7 @@ val ((inners, (Dss, Ass)), (accum', lthy')) = apfst (apsnd split_list o split_list) (@{fold_map 2} (fn i => bnf_of_typ Smart_Inline (qualify i) flatten_tyargs Xs Ds0) - (if length Ts' = 1 then [0] else (1 upto length Ts')) Ts' accum); + (if length Ts' = 1 then [0] else 1 upto length Ts') Ts' accum); in compose_bnf const_policy qualify flatten_tyargs bnf inners oDs Dss Ass (accum', lthy') end) diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Tools/BNF/bnf_def.ML --- a/src/HOL/Tools/BNF/bnf_def.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Tools/BNF/bnf_def.ML Tue Mar 17 09:22:21 2015 +0100 @@ -81,6 +81,7 @@ val rel_mono_of_bnf: bnf -> thm val rel_mono_strong0_of_bnf: bnf -> thm val rel_mono_strong_of_bnf: bnf -> thm + val rel_refl_of_bnf: bnf -> thm val rel_transfer_of_bnf: bnf -> thm val rel_eq_of_bnf: bnf -> thm val rel_flip_of_bnf: bnf -> thm @@ -250,13 +251,14 @@ rel_Grp: thm lazy, rel_conversep: thm lazy, rel_OO: thm lazy, + rel_refl: thm lazy, rel_transfer: thm lazy }; fun mk_facts bd_Card_order bd_Cinfinite bd_Cnotzero collect_set_map in_bd in_cong in_mono in_rel inj_map inj_map_strong map_comp map_cong map_cong_simp map_id map_ident0 map_ident map_transfer rel_eq rel_flip set_map rel_cong rel_map rel_mono rel_mono_strong0 rel_mono_strong - rel_transfer rel_Grp rel_conversep rel_OO set_transfer = { + set_transfer rel_Grp rel_conversep rel_OO rel_refl rel_transfer = { bd_Card_order = bd_Card_order, bd_Cinfinite = bd_Cinfinite, bd_Cnotzero = bd_Cnotzero, @@ -286,6 +288,7 @@ rel_Grp = rel_Grp, rel_conversep = rel_conversep, rel_OO = rel_OO, + rel_refl = rel_refl, set_transfer = set_transfer}; fun map_facts f { @@ -318,6 +321,7 @@ rel_Grp, rel_conversep, rel_OO, + rel_refl, set_transfer} = {bd_Card_order = f bd_Card_order, bd_Cinfinite = f bd_Cinfinite, @@ -348,6 +352,7 @@ rel_Grp = Lazy.map f rel_Grp, rel_conversep = Lazy.map f rel_conversep, rel_OO = Lazy.map f rel_OO, + rel_refl = Lazy.map f rel_refl, set_transfer = Lazy.map (map f) set_transfer}; val morph_facts = map_facts o Morphism.thm; @@ -479,6 +484,7 @@ val rel_mono_of_bnf = Lazy.force o #rel_mono o #facts o rep_bnf; val rel_mono_strong0_of_bnf = Lazy.force o #rel_mono_strong0 o #facts o rep_bnf; val rel_mono_strong_of_bnf = Lazy.force o #rel_mono_strong o #facts o rep_bnf; +val rel_refl_of_bnf = Lazy.force o #rel_refl o #facts o rep_bnf; val rel_transfer_of_bnf = Lazy.force o #rel_transfer o #facts o rep_bnf; val rel_Grp_of_bnf = Lazy.force o #rel_Grp o #facts o rep_bnf; val rel_conversep_of_bnf = Lazy.force o #rel_conversep o #facts o rep_bnf; @@ -654,14 +660,15 @@ val set_map0N = "set_map0"; val set_mapN = "set_map"; val set_bdN = "set_bd"; -val set_transferN = "set_transfer" +val set_transferN = "set_transfer"; val rel_GrpN = "rel_Grp"; val rel_conversepN = "rel_conversep"; -val rel_mapN = "rel_map" -val rel_monoN = "rel_mono" -val rel_mono_strong0N = "rel_mono_strong0" -val rel_mono_strongN = "rel_mono_strong" -val rel_transferN = "rel_transfer" +val rel_mapN = "rel_map"; +val rel_monoN = "rel_mono"; +val rel_mono_strong0N = "rel_mono_strong0"; +val rel_mono_strongN = "rel_mono_strong"; +val rel_reflN = "rel_refl"; +val rel_transferN = "rel_transfer"; val rel_comppN = "rel_compp"; val rel_compp_GrpN = "rel_compp_Grp"; @@ -732,6 +739,7 @@ (rel_mapN, Lazy.force (#rel_map facts), []), (rel_monoN, [Lazy.force (#rel_mono facts)], mono_attrs), (rel_mono_strongN, [Lazy.force (#rel_mono_strong facts)], []), + (rel_reflN, [Lazy.force (#rel_refl facts)], []), (rel_transferN, [Lazy.force (#rel_transfer facts)], []), (set_mapN, map Lazy.force (#set_map facts), []), (set_transferN, Lazy.force (#set_transfer facts), [])] @@ -1406,6 +1414,11 @@ val rel_map = Lazy.lazy mk_rel_map; + fun mk_rel_refl () = @{thm ge_eq_refl[OF ord_eq_le_trans]} OF + [Lazy.force rel_eq RS sym, Lazy.force rel_mono OF (replicate live @{thm refl_ge_eq})]; + + val rel_refl = Lazy.lazy mk_rel_refl; + fun mk_map_transfer () = let val rels = map2 mk_rel_fun transfer_domRs transfer_ranRs; @@ -1494,7 +1507,7 @@ val facts = mk_facts bd_Card_order bd_Cinfinite bd_Cnotzero collect_set_map in_bd in_cong in_mono in_rel inj_map inj_map_strong map_comp map_cong map_cong_simp map_id map_ident0 map_ident map_transfer rel_eq rel_flip set_map rel_cong rel_map rel_mono rel_mono_strong0 - rel_mono_strong rel_transfer rel_Grp rel_conversep rel_OO set_transfer; + rel_mono_strong set_transfer rel_Grp rel_conversep rel_OO rel_refl rel_transfer; val wits = map2 mk_witness bnf_wits wit_thms; diff -r 0bb88aa34768 -r ba54b27d733d src/HOL/Tools/Lifting/lifting_bnf.ML --- a/src/HOL/Tools/Lifting/lifting_bnf.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/HOL/Tools/Lifting/lifting_bnf.ML Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: HOL/Tools/Transfer/transfer_bnf.ML +(* Title: HOL/Tools/Lifting/lifting_bnf.ML Author: Ondrej Kuncar, TU Muenchen Setup for Lifting for types that are BNF. diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/Concurrent/random.ML --- a/src/Pure/Concurrent/random.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/Concurrent/random.ML Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Pure/Confurrent/random.ML +(* Title: Pure/Concurrent/random.ML Author: Lawrence C Paulson, Cambridge University Computer Laboratory Pseudo random numbers. diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/GUI/jfx_gui.scala --- a/src/Pure/GUI/jfx_gui.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/GUI/jfx_gui.scala Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -/* Title: Pure/GUI/jfx_thread.scala +/* Title: Pure/GUI/jfx_gui.scala Module: PIDE-JFX Author: Makarius diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/General/antiquote.scala --- a/src/Pure/General/antiquote.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/General/antiquote.scala Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -/* Title: Pure/ML/antiquote.scala +/* Title: Pure/General/antiquote.scala Author: Makarius Antiquotations within plain text. diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/General/completion.ML --- a/src/Pure/General/completion.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/General/completion.ML Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -(* Title: Pure/Isar/completion.ML +(* Title: Pure/General/completion.ML Author: Makarius Semantic completion within the formal context. diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/General/completion.scala --- a/src/Pure/General/completion.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/General/completion.scala Tue Mar 17 09:22:21 2015 +0100 @@ -135,10 +135,31 @@ /** semantic completion **/ + def report_no_completion(pos: Position.T): String = + YXML.string_of_tree(Semantic.Info(pos, No_Completion)) + + def report_names(pos: Position.T, total: Int, names: List[(String, (String, String))]): String = + YXML.string_of_tree(Semantic.Info(pos, Names(total, names))) + object Semantic { object Info { + def apply(pos: Position.T, semantic: Semantic): XML.Elem = + { + val elem = + semantic match { + case No_Completion => XML.Elem(Markup(Markup.NO_COMPLETION, pos), Nil) + case Names(total, names) => + XML.Elem(Markup(Markup.COMPLETION, pos), + { + import XML.Encode._ + pair(int, list(pair(string, pair(string, string))))(total, names) + }) + } + XML.Elem(Markup(Markup.REPORT, pos), List(elem)) + } + def unapply(info: Text.Markup): Option[Text.Info[Semantic]] = info.info match { case XML.Elem(Markup(Markup.COMPLETION, _), body) => diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/Isar/token.scala Tue Mar 17 09:22:21 2015 +0100 @@ -161,6 +161,7 @@ val start: Pos = new Pos(1, 1, "", "") def file(file: String): Pos = new Pos(1, 1, file, "") def id(id: String): Pos = new Pos(0, 1, "", id) + val command: Pos = id(Markup.COMMAND) } final class Pos private[Token]( diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/batch_session.scala --- a/src/Pure/PIDE/batch_session.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/batch_session.scala Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -/* Title: Pure/Tools/batch_session.scala +/* Title: Pure/PIDE/batch_session.scala Author: Makarius PIDE session in batch mode. diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/command.scala Tue Mar 17 09:22:21 2015 +0100 @@ -234,7 +234,7 @@ if (Protocol.is_inlined(message)) { for { (chunk_name, chunk) <- command.chunks.iterator - range <- Protocol.message_positions( + range <- Protocol_Message.positions( self_id, command.position, chunk_name, chunk, message) } st = st.add_markup(false, chunk_name, Text.Info(range, message2)) } @@ -362,13 +362,15 @@ case Command_Span.Command_Span(name, _) if syntax.is_theory_begin(name) => val header = resources.check_thy_reader("", node_name, - new CharSequenceReader(span.source), Token.Pos.id(Markup.COMMAND)) - val import_errors = + new CharSequenceReader(span.source), Token.Pos.command) + val errors = for ((imp, pos) <- header.imports if !can_import(imp)) yield { - val name = imp.node - "Bad theory import " + Markup.Path(name).markup(quote(name)) + Position.here(pos) + val msg = + "Bad theory import " + + Markup.Path(imp.node).markup(quote(imp.toString)) + Position.here(pos) + Exn.Exn(ERROR(msg)): Command.Blob } - ((header.errors ::: import_errors).map(msg => Exn.Exn(ERROR(msg)): Command.Blob), -1) + (errors, -1) // auxiliary files case _ => diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/document.ML --- a/src/Pure/PIDE/document.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/document.ML Tue Mar 17 09:22:21 2015 +0100 @@ -8,7 +8,7 @@ signature DOCUMENT = sig val timing: bool Unsynchronized.ref - type node_header = string * Thy_Header.header * string list + type node_header = {master: string, header: Thy_Header.header, errors: string list} type overlay = Document_ID.command * (string * string list) datatype node_edit = Edits of (Document_ID.command option * Document_ID.command option) list | @@ -42,7 +42,8 @@ fun err_dup kind id = error ("Duplicate " ^ kind ^ ": " ^ Document_ID.print id); fun err_undef kind id = error ("Undefined " ^ kind ^ ": " ^ Document_ID.print id); -type node_header = string * Thy_Header.header * string list; +type node_header = + {master: string, header: Thy_Header.header, errors: string list}; type perspective = {required: bool, (*required node*) @@ -74,7 +75,8 @@ visible_last = try List.last command_ids, overlays = Inttab.make_list overlays}; -val no_header: node_header = ("", Thy_Header.make ("", Position.none) [] [], []); +val no_header: node_header = + {master = "", header = Thy_Header.make ("", Position.none) [] [], errors = []}; val no_perspective = make_perspective (false, [], []); val empty_node = make_node (no_header, NONE, no_perspective, Entries.empty, NONE); @@ -95,20 +97,16 @@ (* basic components *) -fun master_directory (Node {header = (master, _, _), ...}) = +fun master_directory (Node {header = {master, ...}, ...}) = (case try Url.explode master of SOME (Url.File path) => path | _ => Path.current); -fun set_header header = +fun set_header master header errors = map_node (fn (_, keywords, perspective, entries, result) => - (header, keywords, perspective, entries, result)); + ({master = master, header = header, errors = errors}, keywords, perspective, entries, result)); -fun get_header_raw (Node {header, ...}) = header; - -fun get_header (Node {header = (master, header, errors), ...}) = - if null errors then (master, header) - else error (cat_lines errors); +fun get_header (Node {header, ...}) = header; fun set_keywords keywords = map_node (fn (header, _, perspective, entries, result) => @@ -118,7 +116,16 @@ fun read_header node span = let - val {name = (name, _), imports, keywords} = #2 (get_header node); + val {header, errors, ...} = get_header node; + val _ = + if null errors then () + else + cat_lines errors |> + (case Position.get_id (Position.thread_data ()) of + NONE => I + | SOME id => Protocol_Message.command_positions_yxml id) + |> error; + val {name = (name, _), imports, keywords} = header; val {name = (_, pos), imports = imports', ...} = Thy_Header.read_tokens span; in Thy_Header.make (name, pos) (map #1 imports ~~ map #2 imports') keywords end; @@ -232,7 +239,7 @@ Version (case node_edit of Edits edits => update_node name (edit_node edits) nodes - | Deps (master, header, errors) => + | Deps {master, header, errors} => let val imports = map fst (#imports header); val nodes1 = nodes @@ -244,7 +251,7 @@ val (nodes3, errors1) = (String_Graph.add_deps_acyclic (name, imports) nodes2, errors) handle String_Graph.CYCLES cs => (nodes2, errors @ map cycle_msg cs); - in String_Graph.map_node name (set_header (master, header, errors1)) nodes3 end + in String_Graph.map_node name (set_header master header errors1) nodes3 end | Perspective perspective => update_node name (set_perspective perspective) nodes); fun update_keywords name nodes = @@ -252,7 +259,7 @@ if is_empty_node node then node else let - val (master, header, errors) = get_header_raw node; + val {master, header, errors} = get_header node; val imports_keywords = map_filter (get_keywords o get_node nodes o #1) (#imports header); val keywords = Library.foldl Keyword.merge_keywords (Session.get_keywords (), imports_keywords); @@ -262,7 +269,7 @@ (keywords, if member (op =) errors msg then errors else errors @ [msg]); in node - |> set_header (master, header, errors') + |> set_header master header errors' |> set_keywords (SOME keywords') end); @@ -513,21 +520,29 @@ val master_dir = master_directory node; val header = read_header node span; val imports = #imports header; - val parents = - imports |> map (fn (import, _) => + + fun maybe_end_theory pos st = + SOME (Toplevel.end_theory pos st) + handle ERROR msg => (Output.error_message msg; NONE); + val parents_reports = + imports |> map_filter (fn (import, pos) => (case loaded_theory import of - SOME thy => thy - | NONE => - Toplevel.end_theory (Position.file_only import) + NONE => + maybe_end_theory pos (case get_result (snd (the (AList.lookup (op =) deps import))) of NONE => Toplevel.toplevel - | SOME eval => Command.eval_result_state eval))); - val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents); + | SOME eval => Command.eval_result_state eval) + | some => some) + |> Option.map (fn thy => (thy, (pos, Theory.get_markup thy)))); + + val parents = + if null parents_reports then [Thy_Info.get_theory "Pure"] else map #1 parents_reports; + val _ = Position.reports (map #2 parents_reports); in Resources.begin_theory master_dir header parents end; fun check_theory full name node = is_some (loaded_theory name) orelse - can get_header node andalso (not full orelse is_some (get_result node)); + null (#errors (get_header node)) andalso (not full orelse is_some (get_result node)); fun last_common keywords state node_required node0 node = let diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/protocol.ML --- a/src/Pure/PIDE/protocol.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/protocol.ML Tue Mar 17 09:22:21 2015 +0100 @@ -32,11 +32,15 @@ let val (blobs, blobs_index) = YXML.parse_body blobs_yxml |> - let open XML.Decode in + let + val message = + YXML.string_of_body o Protocol_Message.command_positions id; + open XML.Decode; + in pair (list (variant [fn ([], a) => Exn.Res (pair string (option string) a), - fn ([], a) => Exn.Exn (ERROR (YXML.string_of_body a))])) + fn ([], a) => Exn.Exn (ERROR (message a))])) int end; val toks = @@ -78,7 +82,7 @@ (list YXML.string_of_body)))) a; val imports' = map (rpair Position.none) imports; val header = Thy_Header.make (name, Position.none) imports' keywords; - in Document.Deps (master, header, errors) end, + in Document.Deps {master = master, header = header, errors = errors} end, fn (a :: b, c) => Document.Perspective (bool_atom a, map int_atom b, list (pair int (pair string (list string))) c)])) diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/protocol.scala --- a/src/Pure/PIDE/protocol.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/protocol.scala Tue Mar 17 09:22:21 2015 +0100 @@ -186,34 +186,6 @@ /* result messages */ - private val clean_elements = - Markup.Elements(Markup.REPORT, Markup.NO_REPORT) - - def clean_message(body: XML.Body): XML.Body = - body filter { - case XML.Wrapped_Elem(Markup(name, _), _, _) => !clean_elements(name) - case XML.Elem(Markup(name, _), _) => !clean_elements(name) - case _ => true - } map { - case XML.Wrapped_Elem(markup, body, ts) => XML.Wrapped_Elem(markup, body, clean_message(ts)) - case XML.Elem(markup, ts) => XML.Elem(markup, clean_message(ts)) - case t => t - } - - def message_reports(props: Properties.T, body: XML.Body): List[XML.Elem] = - body flatMap { - case XML.Wrapped_Elem(Markup(Markup.REPORT, ps), body, ts) => - List(XML.Wrapped_Elem(Markup(Markup.REPORT, props ::: ps), body, ts)) - case XML.Elem(Markup(Markup.REPORT, ps), ts) => - List(XML.Elem(Markup(Markup.REPORT, props ::: ps), ts)) - case XML.Wrapped_Elem(_, _, ts) => message_reports(props, ts) - case XML.Elem(_, ts) => message_reports(props, ts) - case XML.Text(_) => Nil - } - - - /* specific messages */ - def is_result(msg: XML.Tree): Boolean = msg match { case XML.Elem(Markup(Markup.RESULT, _), _) => true @@ -302,53 +274,6 @@ case _ => None } } - - - /* reported positions */ - - private val position_elements = - Markup.Elements(Markup.BINDING, Markup.ENTITY, Markup.REPORT, Markup.POSITION) - - def message_positions( - self_id: Document_ID.Generic => Boolean, - command_position: Position.T, - chunk_name: Symbol.Text_Chunk.Name, - chunk: Symbol.Text_Chunk, - message: XML.Elem): Set[Text.Range] = - { - def elem_positions(props: Properties.T, set: Set[Text.Range]): Set[Text.Range] = - props match { - case Position.Identified(id, name) if self_id(id) && name == chunk_name => - val opt_range = - Position.Range.unapply(props) orElse { - if (name == Symbol.Text_Chunk.Default) - Position.Range.unapply(command_position) - else None - } - opt_range match { - case Some(symbol_range) => - chunk.incorporate(symbol_range) match { - case Some(range) => set + range - case _ => set - } - case None => set - } - case _ => set - } - - def positions(set: Set[Text.Range], tree: XML.Tree): Set[Text.Range] = - tree match { - case XML.Wrapped_Elem(Markup(name, props), _, body) => - body.foldLeft(if (position_elements(name)) elem_positions(props, set) else set)(positions) - case XML.Elem(Markup(name, props), body) => - body.foldLeft(if (position_elements(name)) elem_positions(props, set) else set)(positions) - case XML.Text(_) => set - } - - val set = positions(Set.empty, message) - if (set.isEmpty) elem_positions(message.markup.properties, set) - else set - } } @@ -382,29 +307,6 @@ def define_blob(digest: SHA1.Digest, bytes: Bytes): Unit = protocol_command_bytes("Document.define_blob", Bytes(digest.toString), bytes) - private def resolve_id(id: String, body: XML.Body): XML.Body = - { - def resolve_property(p: (String, String)): (String, String) = - if (p._1 == Markup.ID && p._2 == Markup.COMMAND) (Markup.ID, id) else p - - def resolve_markup(markup: Markup): Markup = - Markup(markup.name, markup.properties.map(resolve_property)) - - def resolve_tree(t: XML.Tree): XML.Tree = - t match { - case XML.Wrapped_Elem(markup, ts1, ts2) => - XML.Wrapped_Elem(resolve_markup(markup), ts1.map(resolve_tree _), ts2.map(resolve_tree _)) - case XML.Elem(markup, ts) => - XML.Elem(resolve_markup(markup), ts.map(resolve_tree _)) - case text => text - } - body.map(resolve_tree _) - } - - private def resolve_id(id: String, s: String): XML.Body = - try { resolve_id(id, YXML.parse_body(s)) } - catch { case ERROR(_) => XML.Encode.string(s) } - def define_command(command: Command) { val blobs_yxml = @@ -413,7 +315,7 @@ variant(List( { case Exn.Res((a, b)) => (Nil, pair(string, option(string))((a.node, b.map(p => p._1.toString)))) }, - { case Exn.Exn(e) => (Nil, resolve_id(command.id.toString, Exn.message(e))) })) + { case Exn.Exn(e) => (Nil, string(Exn.message(e))) })) YXML.string_of_body(pair(list(encode_blob), int)(command.blobs, command.blobs_index)) } diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/protocol_message.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/PIDE/protocol_message.ML Tue Mar 17 09:22:21 2015 +0100 @@ -0,0 +1,27 @@ +(* Title: Pure/PIDE/protocol_message.ML + Author: Makarius + +Auxiliary operations on protocol messages. +*) + +signature PROTOCOL_MESSAGE = +sig + val command_positions: string -> XML.body -> XML.body + val command_positions_yxml: string -> string -> string +end; + +structure Protocol_Message: PROTOCOL_MESSAGE = +struct + +fun command_positions id = + let + fun attribute (a, b) = + if a = Markup.idN andalso b = Markup.commandN then (a, id) else (a, b); + fun tree (XML.Elem ((a, atts), ts)) = XML.Elem ((a, map attribute atts), map tree ts) + | tree text = text; + in map tree end; + +fun command_positions_yxml id = + YXML.string_of_body o command_positions id o YXML.parse_body; + +end; diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/protocol_message.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/PIDE/protocol_message.scala Tue Mar 17 09:22:21 2015 +0100 @@ -0,0 +1,84 @@ +/* Title: Pure/PIDE/protocol_message.scala + Author: Makarius + +Auxiliary operations on protocol messages. +*/ + +package isabelle + + +object Protocol_Message +{ + /* inlined reports */ + + private val report_elements = + Markup.Elements(Markup.REPORT, Markup.NO_REPORT) + + def clean_reports(body: XML.Body): XML.Body = + body filter { + case XML.Wrapped_Elem(Markup(name, _), _, _) => !report_elements(name) + case XML.Elem(Markup(name, _), _) => !report_elements(name) + case _ => true + } map { + case XML.Wrapped_Elem(markup, body, ts) => XML.Wrapped_Elem(markup, body, clean_reports(ts)) + case XML.Elem(markup, ts) => XML.Elem(markup, clean_reports(ts)) + case t => t + } + + def reports(props: Properties.T, body: XML.Body): List[XML.Elem] = + body flatMap { + case XML.Wrapped_Elem(Markup(Markup.REPORT, ps), body, ts) => + List(XML.Wrapped_Elem(Markup(Markup.REPORT, props ::: ps), body, ts)) + case XML.Elem(Markup(Markup.REPORT, ps), ts) => + List(XML.Elem(Markup(Markup.REPORT, props ::: ps), ts)) + case XML.Wrapped_Elem(_, _, ts) => reports(props, ts) + case XML.Elem(_, ts) => reports(props, ts) + case XML.Text(_) => Nil + } + + + /* reported positions */ + + private val position_elements = + Markup.Elements(Markup.BINDING, Markup.ENTITY, Markup.REPORT, Markup.POSITION) + + def positions( + self_id: Document_ID.Generic => Boolean, + command_position: Position.T, + chunk_name: Symbol.Text_Chunk.Name, + chunk: Symbol.Text_Chunk, + message: XML.Elem): Set[Text.Range] = + { + def elem(props: Properties.T, set: Set[Text.Range]): Set[Text.Range] = + props match { + case Position.Identified(id, name) if self_id(id) && name == chunk_name => + val opt_range = + Position.Range.unapply(props) orElse { + if (name == Symbol.Text_Chunk.Default) + Position.Range.unapply(command_position) + else None + } + opt_range match { + case Some(symbol_range) => + chunk.incorporate(symbol_range) match { + case Some(range) => set + range + case _ => set + } + case None => set + } + case _ => set + } + def tree(set: Set[Text.Range], t: XML.Tree): Set[Text.Range] = + t match { + case XML.Wrapped_Elem(Markup(name, props), _, body) => + body.foldLeft(if (position_elements(name)) elem(props, set) else set)(tree) + case XML.Elem(Markup(name, props), body) => + body.foldLeft(if (position_elements(name)) elem(props, set) else set)(tree) + case XML.Text(_) => set + } + + val set = tree(Set.empty, message) + if (set.isEmpty) elem(message.markup.properties, set) + else set + } +} diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/prover.scala --- a/src/Pure/PIDE/prover.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/prover.scala Tue Mar 17 09:22:21 2015 +0100 @@ -108,8 +108,8 @@ { if (kind == Markup.INIT) system_channel.accepted() - val main = XML.Elem(Markup(kind, props), Protocol.clean_message(body)) - val reports = Protocol.message_reports(props, body) + val main = XML.Elem(Markup(kind, props), Protocol_Message.clean_reports(body)) + val reports = Protocol_Message.reports(props, body) for (msg <- main :: reports) receiver(new Prover.Output(xml_cache.elem(msg))) } diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/resources.ML Tue Mar 17 09:22:21 2015 +0100 @@ -68,7 +68,8 @@ val {name = (name, pos), imports, keywords} = Thy_Header.read (Path.position master_file) text; val _ = thy_name <> name andalso - error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name ^ Position.here pos); + error ("Bad theory name " ^ quote name ^ + " for file " ^ Path.print path ^ Position.here pos); in {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, imports = imports, keywords = keywords} diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/PIDE/resources.scala Tue Mar 17 09:22:21 2015 +0100 @@ -96,8 +96,9 @@ val base_name = Long_Name.base_name(node_name.theory) val (name, pos) = header.name if (base_name != name) - error("Bad file name " + Resources.thy_path(Path.basic(base_name)) + - " for theory " + quote(name) + Position.here(pos)) + error("Bad theory name " + quote(name) + + " for file " + Resources.thy_path(Path.basic(base_name)) + Position.here(pos) + + Completion.report_names(pos, 1, List((base_name, ("theory", base_name))))) val imports = header.imports.map({ case (s, pos) => (import_name(qualifier, node_name, s), pos) }) diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/ROOT --- a/src/Pure/ROOT Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/ROOT Tue Mar 17 09:22:21 2015 +0100 @@ -172,6 +172,7 @@ "PIDE/execution.ML" "PIDE/markup.ML" "PIDE/protocol.ML" + "PIDE/protocol_message.ML" "PIDE/query_operation.ML" "PIDE/resources.ML" "PIDE/session.ML" diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/ROOT.ML --- a/src/Pure/ROOT.ML Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/ROOT.ML Tue Mar 17 09:22:21 2015 +0100 @@ -310,6 +310,7 @@ use "PIDE/resources.ML"; use "Thy/thy_info.ML"; use "PIDE/session.ML"; +use "PIDE/protocol_message.ML"; use "PIDE/document.ML"; (*theory and proof operations*) diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/System/posix_interrupt.scala --- a/src/Pure/System/posix_interrupt.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/System/posix_interrupt.scala Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -/* Title: Pure/System/interrupt.scala +/* Title: Pure/System/posix_interrupt.scala Author: Makarius Support for POSIX interrupts (bypassed on Windows). diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/Tools/build.scala Tue Mar 17 09:22:21 2015 +0100 @@ -744,7 +744,7 @@ { /* session tree and dependencies */ - val full_tree = find_sessions(options, dirs, select_dirs) + val full_tree = find_sessions(options.int("completion_limit") = 0, dirs, select_dirs) val (selected, selected_tree) = full_tree.selection(requirements, all_sessions, session_groups, sessions) diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/Tools/print_operation.scala --- a/src/Pure/Tools/print_operation.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/Tools/print_operation.scala Tue Mar 17 09:22:21 2015 +0100 @@ -1,4 +1,4 @@ -/* Title: Pure/System/print_operation.scala +/* Title: Pure/Tools/print_operation.scala Author: Makarius Print operations as asynchronous query. diff -r 0bb88aa34768 -r ba54b27d733d src/Pure/build-jars --- a/src/Pure/build-jars Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Pure/build-jars Tue Mar 17 09:22:21 2015 +0100 @@ -64,6 +64,7 @@ PIDE/markup.scala PIDE/markup_tree.scala PIDE/protocol.scala + PIDE/protocol_message.scala PIDE/prover.scala PIDE/query_operation.scala PIDE/resources.scala diff -r 0bb88aa34768 -r ba54b27d733d src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala Tue Mar 17 09:22:17 2015 +0100 +++ b/src/Tools/jEdit/src/document_model.scala Tue Mar 17 09:22:21 2015 +0100 @@ -80,7 +80,7 @@ JEdit_Lib.buffer_lock(buffer) { Exn.capture { PIDE.resources.check_thy_reader("", node_name, - JEdit_Lib.buffer_reader(buffer), Token.Pos.file(node_name.node)) + JEdit_Lib.buffer_reader(buffer), Token.Pos.command) } match { case Exn.Res(header) => header case Exn.Exn(exn) => Document.Node.bad_header(Exn.message(exn))