--- a/NEWS Fri Nov 15 22:02:05 2013 +0100
+++ b/NEWS Sat Nov 16 18:34:11 2013 +0100
@@ -62,6 +62,15 @@
instead of explicitly stating boundedness of sets.
+*** ML ***
+
+* Toplevel function "use" refers to raw ML bootstrap environment,
+without Isar context nor antiquotations. Potential INCOMPATIBILITY.
+Note that 'ML_file' is the canonical command to load ML files into the
+formal context.
+
+
+
New in Isabelle2013-1 (November 2013)
-------------------------------------
--- a/etc/isar-keywords.el Fri Nov 15 22:02:05 2013 +0100
+++ b/etc/isar-keywords.el Sat Nov 16 18:34:11 2013 +0100
@@ -1,6 +1,6 @@
;;
;; Keyword classification tables for Isabelle/Isar.
-;; Generated from HOL + HOL-Auth + HOL-BNF + HOL-BNF-LFP + HOL-Bali + HOL-Decision_Procs + HOL-IMP + HOL-Imperative_HOL + HOL-Import + HOL-Library + HOL-Mutabelle + HOL-Nominal + HOL-Proofs + HOL-Proofs-Extraction + HOL-SPARK + HOL-Statespace + HOL-TPTP + HOL-ex + HOLCF + Pure.
+;; Generated from HOL + HOL-Auth + HOL-BNF + HOL-BNF-LFP + HOL-Bali + HOL-Decision_Procs + HOL-IMP + HOL-Imperative_HOL + HOL-Import + HOL-Library + HOL-Mutabelle + HOL-Nominal + HOL-Proofs + HOL-Proofs-Extraction + HOL-SPARK + HOL-Statespace + HOL-TPTP + HOL-Word-SMT_Examples + HOL-ex + HOLCF + Pure.
;; *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT ***
;;
@@ -33,6 +33,7 @@
"axiomatization"
"back"
"bnf"
+ "boogie_file"
"bundle"
"by"
"cannot_undo"
@@ -487,6 +488,7 @@
"atom_decl"
"attribute_setup"
"axiomatization"
+ "boogie_file"
"bundle"
"case_of_simps"
"class"
--- a/src/Doc/System/Sessions.thy Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Doc/System/Sessions.thy Sat Nov 16 18:34:11 2013 +0100
@@ -399,7 +399,7 @@
\smallskip Build some session images with cleanup of their
descendants, while retaining their ancestry:
\begin{ttbox}
-isabelle build -b -c HOL-Boogie HOL-SPARK
+isabelle build -b -c HOL-Algebra HOL-Word
\end{ttbox}
\smallskip Clean all sessions without building anything:
--- a/src/HOL/Library/Formal_Power_Series.thy Fri Nov 15 22:02:05 2013 +0100
+++ b/src/HOL/Library/Formal_Power_Series.thy Sat Nov 16 18:34:11 2013 +0100
@@ -415,7 +415,7 @@
lemma X_power_iff: "X^k = Abs_fps (\<lambda>n. if n = k then (1::'a::comm_ring_1) else 0)"
proof (induct k)
case 0
- thus ?case by (simp add: X_def fps_eq_iff)
+ then show ?case by (simp add: X_def fps_eq_iff)
next
case (Suc k)
{
@@ -578,13 +578,13 @@
by (simp add: X_power_iff)
-lemma fps_sum_rep_nth: "(setsum (%i. fps_const(a$i)*X^i) {0..m})$n =
+lemma fps_sum_rep_nth: "(setsum (\<lambda>i. fps_const(a$i)*X^i) {0..m})$n =
(if n \<le> m then a$n else (0::'a::comm_ring_1))"
apply (auto simp add: fps_setsum_nth cond_value_iff cong del: if_weak_cong)
apply (simp add: setsum_delta')
done
-lemma fps_notation: "(%n. setsum (%i. fps_const(a$i) * X^i) {0..n}) ----> a"
+lemma fps_notation: "(\<lambda>n. setsum (\<lambda>i. fps_const(a$i) * X^i) {0..n}) ----> a"
(is "?s ----> a")
proof -
{
@@ -596,7 +596,7 @@
{
fix n::nat
assume nn0: "n \<ge> n0"
- then have thnn0: "(1/2)^n <= (1/2 :: real)^n0"
+ then have thnn0: "(1/2)^n \<le> (1/2 :: real)^n0"
by (auto intro: power_decreasing)
{
assume "?s n = a"
@@ -615,7 +615,7 @@
by (auto simp: fps_sum_rep_nth not_le k_def fps_eq_iff split: split_if_asm intro: LeastI2_ex)
then have "dist (?s n) a < (1/2)^n" unfolding dth
by (auto intro: power_strict_decreasing)
- also have "\<dots> <= (1/2)^n0" using nn0
+ also have "\<dots> \<le> (1/2)^n0" using nn0
by (auto intro: power_decreasing)
also have "\<dots> < r" using n0 by simp
finally have "dist (?s n) a < r" .
@@ -1080,7 +1080,7 @@
{
assume a0: "a$0 = 0"
then have eq: "inverse a = 0" by (simp add: fps_inverse_def)
- { assume "n = 0" hence ?thesis by simp }
+ { assume "n = 0" then have ?thesis by simp }
moreover
{
assume n: "n > 0"
@@ -1120,8 +1120,10 @@
proof-
from inverse_mult_eq_1[OF a0]
have "fps_deriv (inverse a * a) = 0" by simp
- hence "inverse a * fps_deriv a + fps_deriv (inverse a) * a = 0" by simp
- hence "inverse a * (inverse a * fps_deriv a + fps_deriv (inverse a) * a) = 0" by simp
+ then have "inverse a * fps_deriv a + fps_deriv (inverse a) * a = 0"
+ by simp
+ then have "inverse a * (inverse a * fps_deriv a + fps_deriv (inverse a) * a) = 0"
+ by simp
with inverse_mult_eq_1[OF a0]
have "(inverse a)\<^sup>2 * fps_deriv a + fps_deriv (inverse a) = 0"
unfolding power2_eq_square
@@ -1140,13 +1142,15 @@
shows "inverse (a * b) = inverse a * inverse b"
proof -
{
- assume a0: "a$0 = 0" hence ab0: "(a*b)$0 = 0" by (simp add: fps_mult_nth)
+ assume a0: "a$0 = 0"
+ then have ab0: "(a*b)$0 = 0" by (simp add: fps_mult_nth)
from a0 ab0 have th: "inverse a = 0" "inverse (a*b) = 0" by simp_all
have ?thesis unfolding th by simp
}
moreover
{
- assume b0: "b$0 = 0" hence ab0: "(a*b)$0 = 0" by (simp add: fps_mult_nth)
+ assume b0: "b$0 = 0"
+ then have ab0: "(a*b)$0 = 0" by (simp add: fps_mult_nth)
from b0 ab0 have th: "inverse b = 0" "inverse (a*b) = 0" by simp_all
have ?thesis unfolding th by simp
}
@@ -1264,7 +1268,7 @@
also have "\<dots> = ?rhs $ n"
proof (induct k)
case 0
- thus ?case by (simp add: fps_setsum_nth)
+ then show ?case by (simp add: fps_setsum_nth)
next
case (Suc k)
note th = Suc.hyps[symmetric]
@@ -1335,7 +1339,7 @@
fix n:: nat
{
assume "n=0"
- hence "a$n = ((1 - ?X) * ?sa) $ n"
+ then have "a$n = ((1 - ?X) * ?sa) $ n"
by (simp add: fps_mult_nth)
}
moreover
@@ -1391,16 +1395,19 @@
done
lemma append_natpermute_less_eq:
- assumes h: "xs@ys \<in> natpermute n k"
+ assumes "xs @ ys \<in> natpermute n k"
shows "listsum xs \<le> n" and "listsum ys \<le> n"
proof -
- from h have "listsum (xs @ ys) = n" by (simp add: natpermute_def)
- hence "listsum xs + listsum ys = n" by simp
- then show "listsum xs \<le> n" and "listsum ys \<le> n" by simp_all
+ from assms have "listsum (xs @ ys) = n"
+ by (simp add: natpermute_def)
+ then have "listsum xs + listsum ys = n"
+ by simp
+ then show "listsum xs \<le> n" and "listsum ys \<le> n"
+ by simp_all
qed
lemma natpermute_split:
- assumes mn: "h \<le> k"
+ assumes "h \<le> k"
shows "natpermute n k =
(\<Union>m \<in>{0..n}. {l1 @ l2 |l1 l2. l1 \<in> natpermute m h \<and> l2 \<in> natpermute (n - m) (k - h)})"
(is "?L = ?R" is "?L = (\<Union>m \<in>{0..n}. ?S m)")
@@ -1419,7 +1426,7 @@
have "l \<in> ?L" using leq xs ys h
apply (clarsimp simp add: natpermute_def)
unfolding xs' ys'
- using mn xs ys
+ using assms xs ys
unfolding natpermute_def
apply simp
done
@@ -1433,12 +1440,12 @@
let ?m = "listsum ?xs"
from l have ls: "listsum (?xs @ ?ys) = n"
by (simp add: natpermute_def)
- have xs: "?xs \<in> natpermute ?m h" using l mn
+ have xs: "?xs \<in> natpermute ?m h" using l assms
by (simp add: natpermute_def)
have l_take_drop: "listsum l = listsum (take h l @ drop h l)"
by simp
then have ys: "?ys \<in> natpermute (n - ?m) (k - h)"
- using l mn ls by (auto simp add: natpermute_def simp del: append_take_drop_id)
+ using l assms ls by (auto simp add: natpermute_def simp del: append_take_drop_id)
from ls have m: "?m \<in> {0..n}"
by (simp add: l_take_drop del: append_take_drop_id)
from xs ys ls have "l \<in> ?R"
@@ -1544,7 +1551,7 @@
ultimately show ?thesis by auto
qed
- (* The general form *)
+text {* The general form *}
lemma fps_setprod_nth:
fixes m :: nat
and a :: "nat \<Rightarrow> ('a::comm_ring_1) fps"
@@ -1565,9 +1572,7 @@
case (Suc k)
then have km: "k < m" by arith
have u0: "{0 .. k} \<union> {m} = {0..m}"
- using Suc apply (simp add: set_eq_iff)
- apply presburger
- done
+ using Suc by (simp add: set_eq_iff) presburger
have f0: "finite {0 .. k}" "finite {m}" by auto
have d0: "{0 .. k} \<inter> {m} = {}" using Suc by auto
have "(setprod a {0 .. m}) $ n = (setprod a {0 .. k} * a m) $ n"
@@ -1606,18 +1611,21 @@
and a :: "('a::comm_ring_1) fps"
shows "(a ^ Suc m)$n = setsum (\<lambda>v. setprod (\<lambda>j. a $ (v!j)) {0..m}) (natpermute n (m+1))"
proof -
- have th0: "a^Suc m = setprod (\<lambda>i. a) {0..m}" by (simp add: setprod_constant)
+ have th0: "a^Suc m = setprod (\<lambda>i. a) {0..m}"
+ by (simp add: setprod_constant)
show ?thesis unfolding th0 fps_setprod_nth ..
qed
lemma fps_power_nth:
- fixes m :: nat and a :: "('a::comm_ring_1) fps"
+ fixes m :: nat
+ and a :: "('a::comm_ring_1) fps"
shows "(a ^m)$n =
(if m=0 then 1$n else setsum (\<lambda>v. setprod (\<lambda>j. a $ (v!j)) {0..m - 1}) (natpermute n m))"
by (cases m) (simp_all add: fps_power_nth_Suc del: power_Suc)
lemma fps_nth_power_0:
- fixes m :: nat and a :: "('a::{comm_ring_1}) fps"
+ fixes m :: nat
+ and a :: "('a::{comm_ring_1}) fps"
shows "(a ^m)$0 = (a$0) ^ m"
proof (cases m)
case 0
@@ -1650,7 +1658,7 @@
{
assume n0: "n=0"
from h have "(b oo a)$n = (c oo a)$n" by simp
- hence "b$n = c$n" using n0 by (simp add: fps_compose_nth)
+ then have "b$n = c$n" using n0 by (simp add: fps_compose_nth)
}
moreover
{
@@ -1767,7 +1775,7 @@
qed
lemma natpermute_max_card:
- assumes n0: "n\<noteq>0"
+ assumes n0: "n \<noteq> 0"
shows "card {xs \<in> natpermute n (k+1). n \<in> set xs} = k + 1"
unfolding natpermute_contain_maximal
proof -
@@ -1793,7 +1801,8 @@
then show "{replicate (k + 1) 0[i := n]} \<inter> {replicate (k + 1) 0[j := n]} = {}"
by auto
qed
- from card_UN_disjoint[OF fK fAK d] show "card (\<Union>i\<in>{0..k}. {replicate (k + 1) 0[i := n]}) = k+1"
+ from card_UN_disjoint[OF fK fAK d]
+ show "card (\<Union>i\<in>{0..k}. {replicate (k + 1) 0[i := n]}) = k + 1"
by simp
qed
@@ -1801,7 +1810,7 @@
fixes a:: "'a::field_char_0 fps"
assumes a0: "a$0 \<noteq> 0"
shows "(r (Suc k) (a$0)) ^ Suc k = a$0 \<longleftrightarrow> (fps_radical r (Suc k) a) ^ (Suc k) = a"
-proof-
+proof -
let ?r = "fps_radical r (Suc k) a"
{
assume r0: "(r (Suc k) (a$0)) ^ Suc k = a$0"
@@ -1814,7 +1823,7 @@
assume H: "\<forall>m<n. ?r ^ Suc k $ m = a$m"
{
assume "n = 0"
- hence "?r ^ Suc k $ n = a $n"
+ then have "?r ^ Suc k $ n = a $n"
using fps_radical_power_nth[of r "Suc k" a, OF r0] by simp
}
moreover
@@ -1865,7 +1874,7 @@
moreover
{
assume h: "(fps_radical r (Suc k) a) ^ (Suc k) = a"
- hence "((fps_radical r (Suc k) a) ^ (Suc k))$0 = a$0" by simp
+ then have "((fps_radical r (Suc k) a) ^ (Suc k))$0 = a$0" by simp
then have "(r (Suc k) (a$0)) ^ Suc k = a$0"
unfolding fps_power_nth_Suc
by (simp add: setprod_constant del: replicate.simps)
@@ -1884,7 +1893,7 @@
{fix z have "?r ^ Suc k $ z = a$z"
proof(induct z rule: nat_less_induct)
fix n assume H: "\<forall>m<n. ?r ^ Suc k $ m = a$m"
- {assume "n = 0" hence "?r ^ Suc k $ n = a $n"
+ {assume "n = 0" then have "?r ^ Suc k $ n = a $n"
using fps_radical_power_nth[of r "Suc k" a, OF r0] by simp}
moreover
{fix n1 assume n1: "n = Suc n1"
@@ -1928,13 +1937,13 @@
*)
lemma eq_divide_imp':
- assumes c0: "(c::'a::field) ~= 0"
+ assumes c0: "(c::'a::field) \<noteq> 0"
and eq: "a * c = b"
shows "a = b / c"
proof -
from eq have "a * c * inverse c = b * inverse c"
by simp
- hence "a * (inverse c * c) = b/c"
+ then have "a * (inverse c * c) = b/c"
by (simp only: field_simps divide_inverse)
then show "a = b/c"
unfolding field_inverse[OF c0] by simp
@@ -1966,7 +1975,7 @@
assume h: "\<forall>m<n. a$m = ?r $m"
{
assume "n = 0"
- hence "a$n = ?r $n" using a0 by simp
+ then have "a$n = ?r $n" using a0 by simp
}
moreover
{
@@ -1994,7 +2003,8 @@
by (auto simp del: replicate.simps)
have "(\<Prod>j\<in>{0..k}. a $ v ! j) = (\<Prod>j\<in>{0..k}. if j = i then a $ n else r (Suc k) (b$0))"
apply (rule setprod_cong, simp)
- using i a0 apply (simp del: replicate.simps)
+ using i a0
+ apply (simp del: replicate.simps)
done
also have "\<dots> = a $ n * (?r $ 0)^k"
using i by (simp add: setprod_gen_delta)
@@ -2100,11 +2110,11 @@
from iffD1[OF power_radical[of a r], OF a0 r0]
have "fps_deriv (?r ^ Suc k) = fps_deriv a"
by simp
- hence "fps_deriv ?r * ?w = fps_deriv a"
+ then have "fps_deriv ?r * ?w = fps_deriv a"
by (simp add: fps_deriv_power mult_ac del: power_Suc)
- hence "?iw * fps_deriv ?r * ?w = ?iw * fps_deriv a"
+ then have "?iw * fps_deriv ?r * ?w = ?iw * fps_deriv a"
by simp
- hence "fps_deriv ?r * (?iw * ?w) = fps_deriv a / ?w"
+ then have "fps_deriv ?r * (?iw * ?w) = fps_deriv a / ?w"
by (simp add: fps_divide_def)
then show ?thesis unfolding th0 by simp
qed
@@ -2125,7 +2135,7 @@
by (simp add: fps_mult_nth ra0 rb0 power_mult_distrib)
{
assume "k = 0"
- hence ?thesis using r0' by simp
+ then have ?thesis using r0' by simp
}
moreover
{
@@ -2145,7 +2155,7 @@
moreover
{
assume h: "fps_radical r k (a*b) = fps_radical r k a * fps_radical r k b"
- hence "(fps_radical r k (a*b))$0 = (fps_radical r k a * fps_radical r k b)$0"
+ then have "(fps_radical r k (a*b))$0 = (fps_radical r k a * fps_radical r k b)$0"
by simp
then have "r k ((a * b) $ 0) = r k (a $ 0) * r k (b $ 0)"
using k by (simp add: fps_mult_nth)
@@ -2166,7 +2176,7 @@
proof-
from r0' have r0: "(r (k) ((a*b)$0)) ^ k = (a*b)$0"
by (simp add: fps_mult_nth ra0 rb0 power_mult_distrib)
- {assume "k=0" hence ?thesis by simp}
+ {assume "k=0" then have ?thesis by simp}
moreover
{fix h assume k: "k = Suc h"
let ?ra = "fps_radical r (Suc h) a"
@@ -2447,7 +2457,8 @@
qed
lemma convolution_eq:
- "setsum (%i. a (i :: nat) * b (n - i)) {0 .. n} = setsum (%(i,j). a i * b j) {(i,j). i <= n \<and> j \<le> n \<and> i + j = n}"
+ "setsum (\<lambda>i. a (i :: nat) * b (n - i)) {0 .. n} =
+ setsum (\<lambda>(i,j). a i * b j) {(i,j). i \<le> n \<and> j \<le> n \<and> i + j = n}"
apply (rule setsum_reindex_cong[where f=fst])
apply (clarsimp simp add: inj_on_def)
apply (auto simp add: set_eq_iff image_iff)
@@ -2461,7 +2472,7 @@
assumes c0: "c$0 = (0::'a::idom)"
and d0: "d$0 = 0"
shows "((a oo c) * (b oo d))$n =
- setsum (%(k,m). a$k * b$m * (c^k * d^m) $ n) {(k,m). k + m \<le> n}" (is "?l = ?r")
+ setsum (\<lambda>(k,m). a$k * b$m * (c^k * d^m) $ n) {(k,m). k + m \<le> n}" (is "?l = ?r")
proof -
let ?S = "{(k\<Colon>nat, m\<Colon>nat). k + m \<le> n}"
have s: "?S \<subseteq> {0..n} <*> {0..n}" by (auto simp add: subset_eq)
@@ -2469,7 +2480,7 @@
apply (rule finite_subset[OF s])
apply auto
done
- have "?r = setsum (%i. setsum (%(k,m). a$k * (c^k)$i * b$m * (d^m) $ (n - i)) {(k,m). k + m \<le> n}) {0..n}"
+ have "?r = setsum (\<lambda>i. setsum (\<lambda>(k,m). a$k * (c^k)$i * b$m * (d^m) $ (n - i)) {(k,m). k + m \<le> n}) {0..n}"
apply (simp add: fps_mult_nth setsum_right_distrib)
apply (subst setsum_commute)
apply (rule setsum_cong2)
@@ -2480,7 +2491,8 @@
apply (rule setsum_cong2)
apply (simp add: setsum_cartesian_product mult_assoc)
apply (rule setsum_mono_zero_right[OF f])
- apply (simp add: subset_eq) apply presburger
+ apply (simp add: subset_eq)
+ apply presburger
apply clarsimp
apply (rule ccontr)
apply (clarsimp simp add: not_le)
@@ -2499,7 +2511,7 @@
assumes c0: "c$0 = (0::'a::idom)"
and d0: "d$0 = 0"
shows "((a oo c) * (b oo d))$n =
- setsum (%k. setsum (%m. a$k * b$m * (c^k * d^m) $ n) {0..n}) {0..n}" (is "?l = ?r")
+ setsum (\<lambda>k. setsum (\<lambda>m. a$k * b$m * (c^k * d^m) $ n) {0..n}) {0..n}" (is "?l = ?r")
unfolding product_composition_lemma[OF c0 d0]
unfolding setsum_cartesian_product
apply (rule setsum_mono_zero_left)
@@ -2523,12 +2535,12 @@
lemma setsum_pair_less_iff:
- "setsum (%((k::nat),m). a k * b m * c (k + m)) {(k,m). k + m \<le> n} =
- setsum (%s. setsum (%i. a i * b (s - i) * c s) {0..s}) {0..n}"
+ "setsum (\<lambda>((k::nat),m). a k * b m * c (k + m)) {(k,m). k + m \<le> n} =
+ setsum (\<lambda>s. setsum (\<lambda>i. a i * b (s - i) * c s) {0..s}) {0..n}"
(is "?l = ?r")
proof -
let ?KM = "{(k,m). k + m \<le> n}"
- let ?f = "%s. UNION {(0::nat)..s} (%i. {(i,s - i)})"
+ let ?f = "\<lambda>s. UNION {(0::nat)..s} (\<lambda>i. {(i,s - i)})"
have th0: "?KM = UNION {0..n} ?f"
apply (simp add: set_eq_iff)
apply presburger (* FIXME: slow! *)
@@ -2545,10 +2557,10 @@
lemma fps_compose_mult_distrib_lemma:
assumes c0: "c$0 = (0::'a::idom)"
shows "((a oo c) * (b oo c))$n =
- setsum (%s. setsum (%i. a$i * b$(s - i) * (c^s) $ n) {0..s}) {0..n}"
+ setsum (\<lambda>s. setsum (\<lambda>i. a$i * b$(s - i) * (c^s) $ n) {0..s}) {0..n}"
(is "?l = ?r")
unfolding product_composition_lemma[OF c0 c0] power_add[symmetric]
- unfolding setsum_pair_less_iff[where a = "%k. a$k" and b="%m. b$m" and c="%s. (c ^ s)$n" and n = n] ..
+ unfolding setsum_pair_less_iff[where a = "\<lambda>k. a$k" and b="\<lambda>m. b$m" and c="\<lambda>s. (c ^ s)$n" and n = n] ..
lemma fps_compose_mult_distrib:
@@ -2560,7 +2572,7 @@
lemma fps_compose_setprod_distrib:
assumes c0: "c$0 = (0::'a::idom)"
- shows "(setprod a S) oo c = setprod (%k. a k oo c) S" (is "?l = ?r")
+ shows "setprod a S oo c = setprod (\<lambda>k. a k oo c) S"
apply (cases "finite S")
apply simp_all
apply (induct S rule: finite_induct)
@@ -2577,7 +2589,7 @@
then show ?thesis by simp
next
case (Suc m)
- have th0: "a^n = setprod (%k. a) {0..m}" "(a oo c) ^ n = setprod (%k. a oo c) {0..m}"
+ have th0: "a^n = setprod (\<lambda>k. a) {0..m}" "(a oo c) ^ n = setprod (\<lambda>k. a oo c) {0..m}"
by (simp_all add: setprod_constant Suc)
then show ?thesis
by (simp add: fps_compose_setprod_distrib[OF c0])
@@ -2607,7 +2619,7 @@
unfolding fps_compose_mult_distrib[OF b0, symmetric]
unfolding inverse_mult_eq_1[OF a0]
fps_compose_1 ..
-
+
then have "(?ia oo b) * (a oo b) * ?iab = 1 * ?iab" by simp
then have "(?ia oo b) * (?iab * (a oo b)) = ?iab" by simp
then show ?thesis unfolding inverse_mult_eq_1[OF ab0] by simp
@@ -2629,8 +2641,8 @@
have th0: "(1 - X) $ 0 \<noteq> (0::'a)" by simp
from fps_inverse_gp[where ?'a = 'a]
have "inverse ?one = 1 - X" by (simp add: fps_eq_iff)
- hence "inverse (inverse ?one) = inverse (1 - X)" by simp
- hence th: "?one = 1/(1 - X)" unfolding fps_inverse_idempotent[OF o0]
+ then have "inverse (inverse ?one) = inverse (1 - X)" by simp
+ then have th: "?one = 1/(1 - X)" unfolding fps_inverse_idempotent[OF o0]
by (simp add: fps_divide_def)
show ?thesis
unfolding th
@@ -2706,13 +2718,13 @@
fix n
{
assume kn: "k>n"
- hence "?l $ n = ?r $n" using a0 startsby_zero_power_prefix[OF a0] Suc
+ then have "?l $ n = ?r $n" using a0 startsby_zero_power_prefix[OF a0] Suc
by (simp add: fps_compose_nth del: power_Suc)
}
moreover
{
assume kn: "k \<le> n"
- hence "?l$n = ?r$n"
+ then have "?l$n = ?r$n"
by (simp add: fps_compose_nth mult_delta_left setsum_delta)
}
moreover have "k >n \<or> k\<le> n" by arith
@@ -2751,7 +2763,7 @@
have th0: "?d$0 \<noteq> 0" using a1 by (simp add: fps_compose_nth)
from fps_inv_right[OF a0 a1] have "?d * ?dia = 1"
by (simp add: fps_compose_deriv[OF ia0, of a, symmetric] )
- hence "inverse ?d * ?d * ?dia = inverse ?d * 1" by simp
+ then have "inverse ?d * ?d * ?dia = inverse ?d * 1" by simp
with inverse_mult_eq_1 [OF th0]
show "?dia = inverse ?d" by simp
qed
@@ -2988,7 +3000,7 @@
subsubsection{* Logarithmic series *}
lemma Abs_fps_if_0:
- "Abs_fps(%n. if n=0 then (v::'a::ring_1) else f n) = fps_const v + X * Abs_fps (%n. f (Suc n))"
+ "Abs_fps(\<lambda>n. if n=0 then (v::'a::ring_1) else f n) = fps_const v + X * Abs_fps (\<lambda>n. f (Suc n))"
by (auto simp add: fps_eq_iff)
definition L :: "'a::field_char_0 \<Rightarrow> 'a fps"
@@ -3004,8 +3016,9 @@
lemma L_0[simp]: "L c $ 0 = 0" by (simp add: L_def)
lemma L_E_inv:
- assumes a: "a\<noteq> (0::'a::{field_char_0})"
- shows "L a = fps_inv (E a - 1)" (is "?l = ?r")
+ fixes a :: "'a::field_char_0"
+ assumes a: "a \<noteq> 0"
+ shows "L a = fps_inv (E a - 1)" (is "?l = ?r")
proof -
let ?b = "E a - 1"
have b0: "?b $ 0 = 0" by simp
@@ -3022,7 +3035,7 @@
have "fps_deriv (fps_inv ?b) = fps_const (inverse a) / (X + 1)"
using a
by (simp add: fps_const_inverse eq fps_divide_def fps_inverse_mult)
- hence "fps_deriv ?l = fps_deriv ?r"
+ then have "fps_deriv ?l = fps_deriv ?r"
by (simp add: fps_deriv_L add_commute fps_divide_def divide_inverse)
then show ?thesis unfolding fps_deriv_eq_iff
by (simp add: L_nth fps_inv_def)
@@ -3085,10 +3098,10 @@
have "a$n = (c gchoose n) * a$0"
proof (induct n)
case 0
- thus ?case by simp
+ then show ?case by simp
next
case (Suc m)
- thus ?case unfolding th0
+ then show ?case unfolding th0
apply (simp add: field_simps del: of_nat_Suc)
unfolding mult_assoc[symmetric] gbinomial_mult_1
apply (simp add: field_simps)
@@ -3142,7 +3155,7 @@
have "?P = fps_const (?P$0) * ?b (c + d)"
unfolding fps_binomial_ODE_unique[symmetric]
using th0 by simp
- hence "?P = 0" by (simp add: fps_mult_nth)
+ then have "?P = 0" by (simp add: fps_mult_nth)
then show ?thesis by simp
qed
@@ -3189,14 +3202,14 @@
lemma Vandermonde_pochhammer_lemma:
fixes a :: "'a::field_char_0"
assumes b: "\<forall> j\<in>{0 ..<n}. b \<noteq> of_nat j"
- shows "setsum (%k. (pochhammer (- a) k * pochhammer (- (of_nat n)) k) /
+ shows "setsum (\<lambda>k. (pochhammer (- a) k * pochhammer (- (of_nat n)) k) /
(of_nat (fact k) * pochhammer (b - of_nat n + 1) k)) {0..n} =
- pochhammer (- (a+ b)) n / pochhammer (- b) n"
+ pochhammer (- (a + b)) n / pochhammer (- b) n"
(is "?l = ?r")
proof -
- let ?m1 = "%m. (- 1 :: 'a) ^ m"
- let ?f = "%m. of_nat (fact m)"
- let ?p = "%(x::'a). pochhammer (- x)"
+ let ?m1 = "\<lambda>m. (- 1 :: 'a) ^ m"
+ let ?f = "\<lambda>m. of_nat (fact m)"
+ let ?p = "\<lambda>(x::'a). pochhammer (- x)"
from b have bn0: "?p b n \<noteq> 0" unfolding pochhammer_eq_0_iff by simp
{
fix k
@@ -3241,7 +3254,7 @@
moreover
{
assume nk: "k \<noteq> n"
- have m1nk: "?m1 n = setprod (%i. - 1) {0..m}" "?m1 k = setprod (%i. - 1) {0..h}"
+ have m1nk: "?m1 n = setprod (\<lambda>i. - 1) {0..m}" "?m1 k = setprod (\<lambda>i. - 1) {0..h}"
by (simp_all add: setprod_constant m h)
from kn nk have kn': "k < n" by simp
have bnz0: "pochhammer (b - of_nat n + 1) k \<noteq> 0"
@@ -3251,9 +3264,9 @@
apply (erule_tac x= "n - ka - 1" in allE)
apply (auto simp add: algebra_simps of_nat_diff)
done
- have eq1: "setprod (%k. (1::'a) + of_nat m - of_nat k) {0 .. h} =
+ have eq1: "setprod (\<lambda>k. (1::'a) + of_nat m - of_nat k) {0 .. h} =
setprod of_nat {Suc (m - h) .. Suc m}"
- apply (rule strong_setprod_reindex_cong[where f="%k. Suc m - k "])
+ apply (rule strong_setprod_reindex_cong[where f="\<lambda>k. Suc m - k "])
using kn' h m
apply (auto simp add: inj_on_def image_def)
apply (rule_tac x="Suc m - x" in bexI)
@@ -3274,17 +3287,17 @@
apply (rule setprod_cong)
apply auto
done
- have th20: "?m1 n * ?p b n = setprod (%i. b - of_nat i) {0..m}"
+ have th20: "?m1 n * ?p b n = setprod (\<lambda>i. b - of_nat i) {0..m}"
unfolding m1nk
unfolding m h pochhammer_Suc_setprod
unfolding setprod_timesf[symmetric]
apply (rule setprod_cong)
apply auto
done
- have th21:"pochhammer (b - of_nat n + 1) k = setprod (%i. b - of_nat i) {n - k .. n - 1}"
+ have th21:"pochhammer (b - of_nat n + 1) k = setprod (\<lambda>i. b - of_nat i) {n - k .. n - 1}"
unfolding h m
unfolding pochhammer_Suc_setprod
- apply (rule strong_setprod_reindex_cong[where f="%k. n - 1 - k"])
+ apply (rule strong_setprod_reindex_cong[where f="\<lambda>k. n - 1 - k"])
using kn
apply (auto simp add: inj_on_def m h image_def)
apply (rule_tac x= "m - x" in bexI)
@@ -3292,7 +3305,7 @@
done
have "?m1 n * ?p b n =
- pochhammer (b - of_nat n + 1) k * setprod (%i. b - of_nat i) {0.. n - k - 1}"
+ pochhammer (b - of_nat n + 1) k * setprod (\<lambda>i. b - of_nat i) {0.. n - k - 1}"
unfolding th20 th21
unfolding h m
apply (subst setprod_Un_disjoint[symmetric])
@@ -3302,7 +3315,7 @@
apply auto
done
then have th2: "(?m1 n * ?p b n)/pochhammer (b - of_nat n + 1) k =
- setprod (%i. b - of_nat i) {0.. n - k - 1}"
+ setprod (\<lambda>i. b - of_nat i) {0.. n - k - 1}"
using nz' by (simp add: field_simps)
have "(?m1 n * ?p b n * ?m1 k * ?p (of_nat n) k) / (?f n * pochhammer (b - of_nat n + 1) k) =
((?m1 k * ?p (of_nat n) k) / ?f n) * ((?m1 n * ?p b n)/pochhammer (b - of_nat n + 1) k)"
@@ -3349,8 +3362,8 @@
lemma Vandermonde_pochhammer:
fixes a :: "'a::field_char_0"
- assumes c: "ALL i : {0..< n}. c \<noteq> - of_nat i"
- shows "setsum (%k. (pochhammer a k * pochhammer (- (of_nat n)) k) /
+ assumes c: "\<forall>i \<in> {0..< n}. c \<noteq> - of_nat i"
+ shows "setsum (\<lambda>k. (pochhammer a k * pochhammer (- (of_nat n)) k) /
(of_nat (fact k) * pochhammer c k)) {0..n} = pochhammer (c - a) n / pochhammer c n"
proof -
let ?a = "- a"
@@ -3637,8 +3650,8 @@
subsection {* Hypergeometric series *}
definition "F as bs (c::'a::{field_char_0, field_inverse_zero}) =
- Abs_fps (%n. (foldl (%r a. r* pochhammer a n) 1 as * c^n) /
- (foldl (%r b. r * pochhammer b n) 1 bs * of_nat (fact n)))"
+ Abs_fps (\<lambda>n. (foldl (\<lambda>r a. r* pochhammer a n) 1 as * c^n) /
+ (foldl (\<lambda>r b. r * pochhammer b n) 1 bs * of_nat (fact n)))"
lemma F_nth[simp]: "F as bs c $ n =
(foldl (\<lambda>r a. r* pochhammer a n) 1 as * c^n) /
@@ -3646,11 +3659,13 @@
by (simp add: F_def)
lemma foldl_mult_start:
- "foldl (%r x. r * f x) (v::'a::comm_ring_1) as * x = foldl (%r x. r * f x) (v * x) as "
+ fixes v :: "'a::comm_ring_1"
+ shows "foldl (\<lambda>r x. r * f x) v as * x = foldl (\<lambda>r x. r * f x) (v * x) as "
by (induct as arbitrary: x v) (auto simp add: algebra_simps)
lemma foldr_mult_foldl:
- "foldr (%x r. r * f x) as v = foldl (%r x. r * f x) (v :: 'a::comm_ring_1) as"
+ fixes v :: "'a::comm_ring_1"
+ shows "foldr (\<lambda>x r. r * f x) as v = foldl (\<lambda>r x. r * f x) v as"
by (induct as arbitrary: v) (auto simp add: foldl_mult_start)
lemma F_nth_alt:
@@ -3675,28 +3690,28 @@
lemma F_0[simp]: "F as bs c $0 = 1"
apply simp
- apply (subgoal_tac "ALL as. foldl (%(r::'a) (a::'a). r) 1 as = 1")
+ apply (subgoal_tac "\<forall>as. foldl (\<lambda>(r::'a) (a::'a). r) 1 as = 1")
apply auto
apply (induct_tac as)
apply auto
done
lemma foldl_prod_prod:
- "foldl (%(r::'b::comm_ring_1) (x::'a::comm_ring_1). r * f x) v as * foldl (%r x. r * g x) w as =
- foldl (%r x. r * f x * g x) (v*w) as"
+ "foldl (\<lambda>(r::'b::comm_ring_1) (x::'a::comm_ring_1). r * f x) v as * foldl (\<lambda>r x. r * g x) w as =
+ foldl (\<lambda>r x. r * f x * g x) (v * w) as"
by (induct as arbitrary: v w) (auto simp add: algebra_simps)
lemma F_rec:
- "F as bs c $ Suc n = ((foldl (%r a. r* (a + of_nat n)) c as) /
- (foldl (%r b. r * (b + of_nat n)) (of_nat (Suc n)) bs )) * F as bs c $ n"
+ "F as bs c $ Suc n = ((foldl (\<lambda>r a. r* (a + of_nat n)) c as) /
+ (foldl (\<lambda>r b. r * (b + of_nat n)) (of_nat (Suc n)) bs )) * F as bs c $ n"
apply (simp del: of_nat_Suc of_nat_add fact_Suc)
apply (simp add: foldl_mult_start del: fact_Suc of_nat_Suc)
unfolding foldl_prod_prod[unfolded foldl_mult_start] pochhammer_Suc
apply (simp add: algebra_simps of_nat_mult)
done
-lemma XD_nth[simp]: "XD a $ n = (if n=0 then 0 else of_nat n * a$n)"
+lemma XD_nth[simp]: "XD a $ n = (if n = 0 then 0 else of_nat n * a$n)"
by (simp add: XD_def)
lemma XD_0th[simp]: "XD a $ 0 = 0" by simp
@@ -3718,11 +3733,11 @@
lemma F_minus_nat:
"F [- of_nat n] [- of_nat (n + m)] (c::'a::{field_char_0, field_inverse_zero}) $ k =
- (if k <= n then
+ (if k \<le> n then
pochhammer (- of_nat n) k * c ^ k / (pochhammer (- of_nat (n + m)) k * of_nat (fact k))
else 0)"
"F [- of_nat m] [- of_nat (m + n)] (c::'a::{field_char_0, field_inverse_zero}) $ k =
- (if k <= m then
+ (if k \<le> m then
pochhammer (- of_nat m) k * c ^ k / (pochhammer (- of_nat (m + n)) k * of_nat (fact k))
else 0)"
by (auto simp add: pochhammer_eq_0_iff)
@@ -3736,14 +3751,14 @@
lemma pochhammer_rec_if: "pochhammer a n = (if n = 0 then 1 else a * pochhammer (a + 1) (n - 1))"
by (cases n) (simp_all add: pochhammer_rec)
-lemma XDp_foldr_nth [simp]: "foldr (%c r. XDp c o r) cs (%c. XDp c a) c0 $ n =
- foldr (%c r. (c + of_nat n) * r) cs (c0 + of_nat n) * a$n"
+lemma XDp_foldr_nth [simp]: "foldr (\<lambda>c r. XDp c o r) cs (\<lambda>c. XDp c a) c0 $ n =
+ foldr (\<lambda>c r. (c + of_nat n) * r) cs (c0 + of_nat n) * a$n"
by (induct cs arbitrary: c0) (auto simp add: algebra_simps)
lemma genric_XDp_foldr_nth:
- assumes f: "ALL n c a. f c a $ n = (of_nat n + k c) * a$n"
- shows "foldr (%c r. f c o r) cs (%c. g c a) c0 $ n =
- foldr (%c r. (k c + of_nat n) * r) cs (g c0 a $ n)"
+ assumes f: "\<forall>n c a. f c a $ n = (of_nat n + k c) * a$n"
+ shows "foldr (\<lambda>c r. f c o r) cs (\<lambda>c. g c a) c0 $ n =
+ foldr (\<lambda>c r. (k c + of_nat n) * r) cs (g c0 a $ n)"
by (induct cs arbitrary: c0) (auto simp add: algebra_simps f)
lemma dist_less_imp_nth_equal:
@@ -3765,7 +3780,7 @@
shows "dist f g < inverse (2 ^ i)"
proof (cases "f = g")
case False
- hence "\<exists>n. f $ n \<noteq> g $ n" by (simp add: fps_eq_iff)
+ then have "\<exists>n. f $ n \<noteq> g $ n" by (simp add: fps_eq_iff)
with assms have "dist f g = inverse (2 ^ (LEAST n. f $ n \<noteq> g $ n))"
by (simp add: split_if_asm dist_fps_def)
moreover
@@ -3788,7 +3803,7 @@
have "\<exists>M. \<forall>m \<ge> M. \<forall>j\<le>i. X M $ j = X m $ j" by blast
}
then obtain M where M: "\<forall>i. \<forall>m \<ge> M i. \<forall>j \<le> i. X (M i) $ j = X m $ j" by metis
- hence "\<forall>i. \<forall>m \<ge> M i. \<forall>j \<le> i. X (M i) $ j = X m $ j" by metis
+ then have "\<forall>i. \<forall>m \<ge> M i. \<forall>j \<le> i. X (M i) $ j = X m $ j" by metis
show "convergent X"
proof (rule convergentI)
show "X ----> Abs_fps (\<lambda>i. X (M i) $ i)"
@@ -3803,7 +3818,7 @@
done
then obtain i where "inverse (2 ^ i) < e" by (auto simp: eventually_sequentially)
have "eventually (\<lambda>x. M i \<le> x) sequentially" by (auto simp: eventually_sequentially)
- thus "eventually (\<lambda>x. dist (X x) (Abs_fps (\<lambda>i. X (M i) $ i)) < e) sequentially"
+ then show "eventually (\<lambda>x. dist (X x) (Abs_fps (\<lambda>i. X (M i) $ i)) < e) sequentially"
proof eventually_elim
fix x
assume "M i \<le> x"
--- a/src/HOL/ROOT Fri Nov 15 22:02:05 2013 +0100
+++ b/src/HOL/ROOT Sat Nov 16 18:34:11 2013 +0100
@@ -780,13 +780,10 @@
theories [condition = ISABELLE_FULL_TEST]
SMT_Tests
files
- "Boogie_Dijkstra.b2i"
"Boogie_Dijkstra.certs"
- "Boogie_Max.b2i"
"Boogie_Max.certs"
"SMT_Examples.certs"
"SMT_Word_Examples.certs"
- "VCC_Max.b2i"
"VCC_Max.certs"
session "HOL-SPARK" (main) in "SPARK" = "HOL-Word" +
--- a/src/HOL/SMT_Examples/Boogie.thy Fri Nov 15 22:02:05 2013 +0100
+++ b/src/HOL/SMT_Examples/Boogie.thy Sat Nov 16 18:34:11 2013 +0100
@@ -6,6 +6,7 @@
theory Boogie
imports Main
+keywords "boogie_file" :: thy_load ("b2i")
begin
text {*
@@ -56,17 +57,17 @@
declare [[smt_certificates = "Boogie_Max.certs"]]
-setup {* Boogie.boogie_prove "Boogie_Max.b2i" *}
+boogie_file Boogie_Max
declare [[smt_certificates = "Boogie_Dijkstra.certs"]]
-setup {* Boogie.boogie_prove "Boogie_Dijkstra.b2i" *}
+boogie_file Boogie_Dijkstra
declare [[z3_with_extensions = true]]
declare [[smt_certificates = "VCC_Max.certs"]]
-setup {* Boogie.boogie_prove "VCC_Max.b2i" *}
+boogie_file VCC_Max
end
--- a/src/HOL/SMT_Examples/boogie.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/HOL/SMT_Examples/boogie.ML Sat Nov 16 18:34:11 2013 +0100
@@ -6,7 +6,7 @@
signature BOOGIE =
sig
- val boogie_prove: string -> theory -> theory
+ val boogie_prove: theory -> string -> unit
end
structure Boogie: BOOGIE =
@@ -299,21 +299,33 @@
ALLGOALS (SMT_Solver.smt_tac ctxt (boogie_rules @ axioms))
-fun boogie_prove file_name thy =
+fun boogie_prove thy text =
let
- val (text, thy') = Thy_Load.use_file (Path.explode file_name) thy
val lines = explode_lines text
val ((axioms, vc), ctxt) =
empty_context
|> parse_lines lines
|> add_unique_axioms
- |> build_proof_context thy'
+ |> build_proof_context thy
val _ = Goal.prove ctxt [] axioms vc (fn {prems, context} =>
boogie_tac context prems)
val _ = writeln "Verification condition proved successfully"
- in thy' end
+ in () end
+
+
+(* Isar command *)
+
+val _ =
+ Outer_Syntax.command @{command_spec "boogie_file"}
+ "prove verification condition from .b2i file"
+ (Thy_Load.provide_parse_files "boogie_file" >> (fn files =>
+ Toplevel.theory (fn thy =>
+ let
+ val ([{text, ...}], thy') = files thy;
+ val _ = boogie_prove thy' text;
+ in thy' end)))
end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/General/bytes.scala Sat Nov 16 18:34:11 2013 +0100
@@ -0,0 +1,114 @@
+/* Title: Pure/General/bytes.scala
+ Module: PIDE
+ Author: Makarius
+
+Immutable byte vectors versus UTF8 strings.
+*/
+
+package isabelle
+
+
+import java.io.{File => JFile, OutputStream, FileInputStream}
+
+
+object Bytes
+{
+ val empty: Bytes = new Bytes(Array[Byte](), 0, 0)
+
+ def apply(s: CharSequence): Bytes =
+ {
+ val str = s.toString
+ if (str.isEmpty) empty
+ else {
+ val b = str.getBytes(UTF8.charset)
+ new Bytes(b, 0, b.length)
+ }
+ }
+
+ def apply(a: Array[Byte], offset: Int, length: Int): Bytes =
+ if (length == 0) empty
+ else {
+ val b = new Array[Byte](length)
+ java.lang.System.arraycopy(a, offset, b, 0, length)
+ new Bytes(b, 0, b.length)
+ }
+
+
+ /* read */
+
+ def read(file: JFile): Bytes =
+ {
+ var i = 0
+ var m = 0
+ val n = file.length.toInt
+ val bytes = new Array[Byte](n)
+
+ val stream = new FileInputStream(file)
+ try {
+ do {
+ m = stream.read(bytes, i, n - i)
+ if (m != -1) i += m
+ } while (m != -1 && n > i)
+ }
+ finally { stream.close }
+
+ new Bytes(bytes, 0, bytes.length)
+ }
+}
+
+final class Bytes private(
+ protected val bytes: Array[Byte],
+ protected val offset: Int,
+ val length: Int)
+{
+ /* equality */
+
+ override def equals(that: Any): Boolean =
+ {
+ that match {
+ case other: Bytes =>
+ if (this eq other) true
+ else if (length != other.length) false
+ else (0 until length).forall(i => bytes(offset + i) == other.bytes(other.offset + i))
+ case _ => false
+ }
+ }
+
+ private lazy val hash: Int =
+ {
+ var h = 0
+ for (i <- offset until offset + length) {
+ val b = bytes(i).asInstanceOf[Int] & 0xFF
+ h = 31 * h + b
+ }
+ h
+ }
+
+ override def hashCode(): Int = hash
+
+
+ /* content */
+
+ def sha1_digest: SHA1.Digest = SHA1.digest(bytes)
+
+ override def toString: String =
+ UTF8.decode_chars(s => s, bytes, offset, offset + length).toString
+
+ def isEmpty: Boolean = length == 0
+
+ def +(other: Bytes): Bytes =
+ if (other.isEmpty) this
+ else if (isEmpty) other
+ else {
+ val new_bytes = new Array[Byte](length + other.length)
+ java.lang.System.arraycopy(bytes, offset, new_bytes, 0, length)
+ java.lang.System.arraycopy(other.bytes, other.offset, new_bytes, length, other.length)
+ new Bytes(new_bytes, 0, new_bytes.length)
+ }
+
+
+ /* write */
+
+ def write(stream: OutputStream): Unit = stream.write(bytes, offset, length)
+}
+
--- a/src/Pure/General/file.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/General/file.scala Sat Nov 16 18:34:11 2013 +0100
@@ -36,25 +36,7 @@
/* read */
- def read_bytes(file: JFile): Array[Byte] =
- {
- var i = 0
- var m = 0
- val n = file.length.toInt
- val buf = new Array[Byte](n)
-
- val stream = new FileInputStream(file)
- try {
- do {
- m = stream.read(buf, i, n - i)
- if (m != -1) i += m
- } while (m != -1 && n > i)
- }
- finally { stream.close }
- buf
- }
-
- def read(file: JFile): String = new String(read_bytes(file), UTF8.charset)
+ def read(file: JFile): String = Bytes.read(file).toString
def read(path: Path): String = read(path.file)
--- a/src/Pure/General/sha1.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/General/sha1.scala Sat Nov 16 18:34:11 2013 +0100
@@ -56,6 +56,8 @@
make_result(digest)
}
- def digest(string: String): Digest = digest(UTF8.string_bytes(string))
+ def digest(bytes: Bytes): Digest = bytes.sha1_digest
+
+ def digest(string: String): Digest = digest(Bytes(string))
}
--- a/src/Pure/ROOT.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/ROOT.ML Sat Nov 16 18:34:11 2013 +0100
@@ -341,8 +341,6 @@
(* ML toplevel commands *)
-fun use name = Toplevel.program (fn () => Thy_Load.use_ml (Path.explode name));
-
fun use_thys args = Toplevel.program (fn () => Thy_Info.use_thys (map (rpair Position.none) args));
val use_thy = use_thys o single;
--- a/src/Pure/System/invoke_scala.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/System/invoke_scala.scala Sat Nov 16 18:34:11 2013 +0100
@@ -89,15 +89,14 @@
}
private def invoke_scala(
- prover: Session.Prover, output: Isabelle_Process.Output): Boolean = synchronized
+ prover: Session.Prover, msg: Isabelle_Process.Protocol_Output): Boolean = synchronized
{
- output.properties match {
+ msg.properties match {
case Markup.Invoke_Scala(name, id) =>
futures += (id ->
default_thread_pool.submit(() =>
{
- val arg = XML.content(output.body)
- val (tag, result) = Invoke_Scala.method(name, arg)
+ val (tag, result) = Invoke_Scala.method(name, msg.text)
fulfill(prover, id, tag, result)
}))
true
@@ -106,9 +105,9 @@
}
private def cancel_scala(
- prover: Session.Prover, output: Isabelle_Process.Output): Boolean = synchronized
+ prover: Session.Prover, msg: Isabelle_Process.Protocol_Output): Boolean = synchronized
{
- output.properties match {
+ msg.properties match {
case Markup.Cancel_Scala(id) =>
futures.get(id) match {
case Some(future) => cancel(prover, id, future)
--- a/src/Pure/System/isabelle_process.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/System/isabelle_process.scala Sat Nov 16 18:34:11 2013 +0100
@@ -43,14 +43,12 @@
def is_system = kind == Markup.SYSTEM
def is_status = kind == Markup.STATUS
def is_report = kind == Markup.REPORT
- def is_protocol = kind == Markup.PROTOCOL
def is_syslog = is_init || is_exit || is_system || is_stderr
override def toString: String =
{
val res =
if (is_status || is_report) message.body.map(_.toString).mkString
- else if (is_protocol) "..."
else Pretty.string_of(message.body)
if (properties.isEmpty)
kind.toString + " [[" + res + "]]"
@@ -59,6 +57,12 @@
(for ((x, y) <- properties) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]"
}
}
+
+ class Protocol_Output(props: Properties.T, val bytes: Bytes)
+ extends Output(XML.Elem(Markup(Markup.PROTOCOL, props), Nil))
+ {
+ lazy val text: String = bytes.toString
+ }
}
@@ -89,28 +93,29 @@
receiver(new Output(XML.Elem(Markup(Markup.SYSTEM, Nil), List(XML.Text(text)))))
}
- private def output_message(kind: String, props: Properties.T, body: XML.Body)
+ private def protocol_output(props: Properties.T, bytes: Bytes)
+ {
+ receiver(new Protocol_Output(props, bytes))
+ }
+
+ private def output(kind: String, props: Properties.T, body: XML.Body)
{
if (kind == Markup.INIT) system_channel.accepted()
- if (kind == Markup.PROTOCOL)
- receiver(new Output(XML.Elem(Markup(kind, props), body)))
- else {
- val main = XML.Elem(Markup(kind, props), Protocol.clean_message(body))
- val reports = Protocol.message_reports(props, body)
- for (msg <- main :: reports) receiver(new Output(xml_cache.elem(msg)))
- }
+
+ val main = XML.Elem(Markup(kind, props), Protocol.clean_message(body))
+ val reports = Protocol.message_reports(props, body)
+ for (msg <- main :: reports) receiver(new Output(xml_cache.elem(msg)))
}
private def exit_message(rc: Int)
{
- output_message(Markup.EXIT, Markup.Return_Code(rc),
- List(XML.Text("Return code: " + rc.toString)))
+ output(Markup.EXIT, Markup.Return_Code(rc), List(XML.Text("Return code: " + rc.toString)))
}
/* command input actor */
- private case class Input_Chunks(chunks: List[Array[Byte]])
+ private case class Input_Chunks(chunks: List[Bytes])
private case object Close
private def close(p: (Thread, Actor))
@@ -232,7 +237,7 @@
else done = true
}
if (result.length > 0) {
- output_message(markup, Nil, List(XML.Text(decode(result.toString))))
+ output(markup, Nil, List(XML.Text(decode(result.toString))))
result.length = 0
}
else {
@@ -261,8 +266,8 @@
//{{{
receive {
case Input_Chunks(chunks) =>
- stream.write(UTF8.string_bytes(chunks.map(_.length).mkString("", ",", "\n")))
- chunks.foreach(stream.write(_))
+ Bytes(chunks.map(_.length).mkString("", ",", "\n")).write(stream)
+ chunks.foreach(_.write(stream))
stream.flush
case Close =>
stream.close
@@ -306,7 +311,7 @@
}
//}}}
- def read_chunk(do_decode: Boolean): XML.Body =
+ def read_chunk_bytes(): (Array[Byte], Int) =
//{{{
{
val n = read_int()
@@ -325,23 +330,33 @@
if (i != n)
throw new Protocol_Error("bad chunk (unexpected EOF after " + i + " of " + n + " bytes)")
- if (do_decode)
- YXML.parse_body_failsafe(UTF8.decode_chars(decode, buf, 0, n))
- else List(XML.Text(UTF8.decode_chars(s => s, buf, 0, n).toString))
+ (buf, n)
}
//}}}
+ def read_chunk(): XML.Body =
+ {
+ val (buf, n) = read_chunk_bytes()
+ YXML.parse_body_failsafe(UTF8.decode_chars(decode, buf, 0, n))
+ }
+
try {
do {
try {
- val header = read_chunk(true)
+ val header = read_chunk()
header match {
case List(XML.Elem(Markup(name, props), Nil)) =>
val kind = name.intern
- val body = read_chunk(kind != Markup.PROTOCOL)
- output_message(kind, props, body)
+ if (kind == Markup.PROTOCOL) {
+ val (buf, n) = read_chunk_bytes()
+ protocol_output(props, Bytes(buf, 0, n))
+ }
+ else {
+ val body = read_chunk()
+ output(kind, props, body)
+ }
case _ =>
- read_chunk(false)
+ read_chunk()
throw new Protocol_Error("bad header: " + header.toString)
}
}
@@ -362,13 +377,13 @@
/** main methods **/
- def protocol_command_raw(name: String, args: Array[Byte]*): Unit =
- command_input._2 ! Input_Chunks(UTF8.string_bytes(name) :: args.toList)
+ def protocol_command_raw(name: String, args: Bytes*): Unit =
+ command_input._2 ! Input_Chunks(Bytes(name) :: args.toList)
def protocol_command(name: String, args: String*)
{
receiver(new Input(name, args.toList))
- protocol_command_raw(name, args.map(UTF8.string_bytes): _*)
+ protocol_command_raw(name, args.map(Bytes(_)): _*)
}
def options(opts: Options): Unit =
--- a/src/Pure/System/session.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/System/session.scala Sat Nov 16 18:34:11 2013 +0100
@@ -46,12 +46,12 @@
abstract class Protocol_Handler
{
def stop(prover: Prover): Unit = {}
- val functions: Map[String, (Prover, Isabelle_Process.Output) => Boolean]
+ val functions: Map[String, (Prover, Isabelle_Process.Protocol_Output) => Boolean]
}
class Protocol_Handlers(
handlers: Map[String, Session.Protocol_Handler] = Map.empty,
- functions: Map[String, Isabelle_Process.Output => Boolean] = Map.empty)
+ functions: Map[String, Isabelle_Process.Protocol_Output => Boolean] = Map.empty)
{
def get(name: String): Option[Protocol_Handler] = handlers.get(name)
@@ -71,7 +71,7 @@
val new_handler = Class.forName(name).newInstance.asInstanceOf[Protocol_Handler]
val new_functions =
for ((a, f) <- new_handler.functions.toList) yield
- (a, (output: Isabelle_Process.Output) => f(prover, output))
+ (a, (msg: Isabelle_Process.Protocol_Output) => f(prover, msg))
val dups = for ((a, _) <- new_functions if functions1.isDefinedAt(a)) yield a
if (!dups.isEmpty) error("Duplicate protocol functions: " + commas_quote(dups))
@@ -88,10 +88,10 @@
new Protocol_Handlers(handlers2, functions2)
}
- def invoke(output: Isabelle_Process.Output): Boolean =
- output.properties match {
+ def invoke(msg: Isabelle_Process.Protocol_Output): Boolean =
+ msg.properties match {
case Markup.Function(a) if functions.isDefinedAt(a) =>
- try { functions(a)(output) }
+ try { functions(a)(msg) }
catch {
case exn: Throwable =>
System.err.println("Failed invocation of protocol function: " +
@@ -281,17 +281,16 @@
msg match {
case _: Isabelle_Process.Input =>
buffer += msg
+ case output: Isabelle_Process.Protocol_Output if output.properties == Markup.Flush =>
+ flush()
case output: Isabelle_Process.Output =>
- if (output.is_protocol && output.properties == Markup.Flush) flush()
- else {
- buffer += msg
- if (output.is_syslog)
- syslog >> (queue =>
- {
- val queue1 = queue.enqueue(output.message)
- if (queue1.length > syslog_limit) queue1.dequeue._2 else queue1
- })
- }
+ buffer += msg
+ if (output.is_syslog)
+ syslog >> (queue =>
+ {
+ val queue1 = queue.enqueue(output.message)
+ if (queue1.length > syslog_limit) queue1.dequeue._2 else queue1
+ })
}
}
@@ -414,69 +413,69 @@
}
}
- if (output.is_protocol) {
- val handled = _protocol_handlers.invoke(output)
- if (!handled) {
- output.properties match {
- case Markup.Protocol_Handler(name) =>
- _protocol_handlers = _protocol_handlers.add(prover.get, name)
+ output match {
+ case msg: Isabelle_Process.Protocol_Output =>
+ val handled = _protocol_handlers.invoke(msg)
+ if (!handled) {
+ msg.properties match {
+ case Markup.Protocol_Handler(name) =>
+ _protocol_handlers = _protocol_handlers.add(prover.get, name)
+
+ case Protocol.Command_Timing(state_id, timing) =>
+ val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil)))
+ accumulate(state_id, prover.get.xml_cache.elem(message))
- case Protocol.Command_Timing(state_id, timing) =>
- val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil)))
- accumulate(state_id, prover.get.xml_cache.elem(message))
+ case Markup.Assign_Update =>
+ msg.text match {
+ case Protocol.Assign_Update(id, update) =>
+ try {
+ val cmds = global_state >>> (_.assign(id, update))
+ delay_commands_changed.invoke(true, cmds)
+ }
+ catch { case _: Document.State.Fail => bad_output() }
+ case _ => bad_output()
+ }
+ // FIXME separate timeout event/message!?
+ if (prover.isDefined && System.currentTimeMillis() > prune_next) {
+ val old_versions = global_state >>> (_.prune_history(prune_size))
+ if (!old_versions.isEmpty) prover.get.remove_versions(old_versions)
+ prune_next = System.currentTimeMillis() + prune_delay.ms
+ }
- case Markup.Assign_Update =>
- XML.content(output.body) match {
- case Protocol.Assign_Update(id, update) =>
- try {
- val cmds = global_state >>> (_.assign(id, update))
- delay_commands_changed.invoke(true, cmds)
- }
- catch { case _: Document.State.Fail => bad_output() }
- case _ => bad_output()
- }
- // FIXME separate timeout event/message!?
- if (prover.isDefined && System.currentTimeMillis() > prune_next) {
- val old_versions = global_state >>> (_.prune_history(prune_size))
- if (!old_versions.isEmpty) prover.get.remove_versions(old_versions)
- prune_next = System.currentTimeMillis() + prune_delay.ms
- }
+ case Markup.Removed_Versions =>
+ msg.text match {
+ case Protocol.Removed(removed) =>
+ try {
+ global_state >> (_.removed_versions(removed))
+ }
+ catch { case _: Document.State.Fail => bad_output() }
+ case _ => bad_output()
+ }
+
+ case Markup.ML_Statistics(props) =>
+ statistics.event(Session.Statistics(props))
+
+ case Markup.Task_Statistics(props) =>
+ // FIXME
- case Markup.Removed_Versions =>
- XML.content(output.body) match {
- case Protocol.Removed(removed) =>
- try {
- global_state >> (_.removed_versions(removed))
- }
- catch { case _: Document.State.Fail => bad_output() }
- case _ => bad_output()
- }
-
- case Markup.ML_Statistics(props) =>
- statistics.event(Session.Statistics(props))
-
- case Markup.Task_Statistics(props) =>
- // FIXME
-
- case _ => bad_output()
+ case _ => bad_output()
+ }
+ }
+ case _ =>
+ output.properties match {
+ case Position.Id(state_id) =>
+ accumulate(state_id, output.message)
+
+ case _ if output.is_init =>
+ phase = Session.Ready
+
+ case Markup.Return_Code(rc) if output.is_exit =>
+ if (rc == 0) phase = Session.Inactive
+ else phase = Session.Failed
+
+ case _ => raw_output_messages.event(output)
}
}
- }
- else {
- output.properties match {
- case Position.Id(state_id) =>
- accumulate(state_id, output.message)
-
- case _ if output.is_init =>
- phase = Session.Ready
-
- case Markup.Return_Code(rc) if output.is_exit =>
- if (rc == 0) phase = Session.Inactive
- else phase = Session.Failed
-
- case _ => raw_output_messages.event(output)
- }
- }
}
//}}}
--- a/src/Pure/System/utf8.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/System/utf8.scala Sat Nov 16 18:34:11 2013 +0100
@@ -20,13 +20,11 @@
val charset: Charset = Charset.forName(charset_name)
def codec(): Codec = Codec(charset)
- def string_bytes(s: String): Array[Byte] = s.getBytes(charset)
-
/* permissive UTF-8 decoding */
// see also http://en.wikipedia.org/wiki/UTF-8#Description
- // overlong encodings enable byte-stuffing
+ // overlong encodings enable byte-stuffing of low-ASCII
def decode_permissive(text: CharSequence): String =
{
--- a/src/Pure/Thy/thy_info.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/Thy/thy_info.ML Sat Nov 16 18:34:11 2013 +0100
@@ -126,6 +126,7 @@
val get_imports = Thy_Load.imports_of o get_theory;
+(*Proof General legacy*)
fun loaded_files name = NAMED_CRITICAL "Thy_Info" (fn () =>
(case get_deps name of
NONE => []
@@ -305,7 +306,7 @@
#2 master = #2 master' andalso
(case lookup_theory name of
NONE => false
- | SOME theory => Thy_Load.load_current theory);
+ | SOME theory => Thy_Load.loaded_files_current theory);
in (current, deps', theory_pos', imports', keywords') end);
in
--- a/src/Pure/Thy/thy_load.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/Thy/thy_load.ML Sat Nov 16 18:34:11 2013 +0100
@@ -9,6 +9,7 @@
val master_directory: theory -> Path.T
val imports_of: theory -> (string * Position.T) list
val thy_path: Path.T -> Path.T
+ val read_files: Path.T -> string -> Path.T * Position.T -> Token.file list
val parse_files: string -> (theory -> Token.file list) parser
val check_thy: Path.T -> string ->
{master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
@@ -16,16 +17,11 @@
val provide: Path.T * SHA1.digest -> theory -> theory
val provide_parse_files: string -> (theory -> Token.file list * theory) parser
val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
- val use_file: Path.T -> theory -> string * theory
val loaded_files: theory -> Path.T list
- val load_current: theory -> bool
- val use_ml: Path.T -> unit
- val exec_ml: Path.T -> generic_theory -> generic_theory
+ val loaded_files_current: theory -> bool
val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
val load_thy: (Toplevel.transition -> Time.time option) -> int -> Path.T -> Thy_Header.header ->
Position.T -> string -> theory list -> theory * (unit -> unit) * int
- val set_master_path: Path.T -> unit
- val get_master_path: unit -> Path.T
end;
structure Thy_Load: THY_LOAD =
@@ -60,11 +56,11 @@
fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, []));
-(* inlined files *)
+(* auxiliary files *)
fun check_file dir file = File.check_file (File.full_path dir file);
-fun read_files cmd dir (path, pos) =
+fun read_files dir cmd (path, pos) =
let
fun make_file file =
let
@@ -81,48 +77,10 @@
Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
(case Token.get_files tok of
SOME files => files
- | NONE => read_files cmd (master_directory thy) (Path.explode name, Token.position_of tok)));
-
-local
-
-fun clean ((i1, t1) :: (i2, t2) :: toks) =
- if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks
- else (i1, t1) :: clean ((i2, t2) :: toks)
- | clean toks = toks;
-
-fun clean_tokens toks =
- ((0 upto length toks - 1) ~~ toks)
- |> filter (fn (_, tok) => Token.is_proper tok)
- |> clean;
-
-fun find_file toks =
- rev (clean_tokens toks) |> get_first (fn (i, tok) =>
- if Token.is_name tok then
- SOME (i, (Path.explode (Token.content_of tok), Token.position_of tok))
- handle ERROR msg => error (msg ^ Token.pos_of tok)
- else NONE);
-
-in
-
-fun resolve_files master_dir span =
- (case span of
- Thy_Syntax.Span (Thy_Syntax.Command (cmd, pos), toks) =>
- if Keyword.is_theory_load cmd then
- (case find_file toks of
- NONE => error ("Bad file argument of command " ^ quote cmd ^ Position.here pos)
- | SOME (i, path) =>
- let
- val toks' = toks |> map_index (fn (j, tok) =>
- if i = j then Token.put_files (read_files cmd master_dir path) tok
- else tok);
- in Thy_Syntax.Span (Thy_Syntax.Command (cmd, pos), toks') end)
- else span
- | span => span);
-
-end;
+ | NONE => read_files (master_directory thy) cmd (Path.explode name, Token.position_of tok)));
-(* check files *)
+(* theory files *)
val thy_path = Path.ext "thy";
@@ -164,17 +122,12 @@
val id = SHA1.digest text;
in ((full_path, id), text) end;
-fun use_file src_path thy =
- let
- val ((_, id), text) = load_file thy src_path;
- val thy' = provide (src_path, id) thy;
- in (text, thy') end;
-
+(*Proof General legacy*)
fun loaded_files thy =
let val {master_dir, provided, ...} = Files.get thy
in map (File.full_path master_dir o #1) provided end;
-fun load_current thy =
+fun loaded_files_current thy =
#provided (Files.get thy) |>
forall (fn (src_path, id) =>
(case try (load_file thy) src_path of
@@ -182,29 +135,6 @@
| SOME ((_, id'), _) => id = id'));
-(* provide files *)
-
-fun eval_file path text = ML_Context.eval_text true (Path.position path) text;
-
-fun use_ml src_path =
- if is_none (Context.thread_data ()) then
- let val path = check_file Path.current src_path
- in eval_file path (File.read path) end
- else
- let
- val thy = ML_Context.the_global_context ();
-
- val ((path, id), text) = load_file thy src_path;
- val _ = eval_file path text;
- val _ = Context.>> Local_Theory.propagate_ml_env;
-
- val provide = provide (src_path, id);
- val _ = Context.>> (Context.mapping provide (Local_Theory.background_theory provide));
- in () end;
-
-fun exec_ml src_path = ML_Context.exec (fn () => use_ml src_path);
-
-
(* load_thy *)
fun begin_theory master_dir {name, imports, keywords} parents =
@@ -246,7 +176,7 @@
val lexs = Keyword.get_lexicons ();
val toks = Thy_Syntax.parse_tokens lexs text_pos text;
- val spans = map (resolve_files master_dir) (Thy_Syntax.parse_spans toks);
+ val spans = map (Thy_Syntax.resolve_files (read_files master_dir)) (Thy_Syntax.parse_spans toks);
val elements = Thy_Syntax.parse_elements spans;
val _ = Present.theory_source name
@@ -291,16 +221,4 @@
|> space_implode (Thy_Output.verb_text "/" ^ "\\discretionary{}{}{}")
end));
-
-(* global master path *) (*Proof General legacy*)
-
-local
- val master_path = Unsynchronized.ref Path.current;
-in
-
-fun set_master_path path = master_path := path;
-fun get_master_path () = ! master_path;
-
end;
-
-end;
--- a/src/Pure/Thy/thy_syntax.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/Thy/thy_syntax.ML Sat Nov 16 18:34:11 2013 +0100
@@ -15,6 +15,7 @@
val span_content: span -> Token.T list
val present_span: span -> Output.output
val parse_spans: Token.T list -> span list
+ val resolve_files: (string -> Path.T * Position.T -> Token.file list) -> span -> span
datatype 'a element = Element of 'a * ('a element list * 'a) option
val atom: 'a -> 'a element
val map_element: ('a -> 'b) -> 'a element -> 'b element
@@ -142,6 +143,47 @@
end;
+(* inlined files *)
+
+local
+
+fun clean ((i1, t1) :: (i2, t2) :: toks) =
+ if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks
+ else (i1, t1) :: clean ((i2, t2) :: toks)
+ | clean toks = toks;
+
+fun clean_tokens toks =
+ ((0 upto length toks - 1) ~~ toks)
+ |> filter (fn (_, tok) => Token.is_proper tok)
+ |> clean;
+
+fun find_file toks =
+ rev (clean_tokens toks) |> get_first (fn (i, tok) =>
+ if Token.is_name tok then
+ SOME (i, (Path.explode (Token.content_of tok), Token.position_of tok))
+ handle ERROR msg => error (msg ^ Token.pos_of tok)
+ else NONE);
+
+in
+
+fun resolve_files read_files span =
+ (case span of
+ Span (Command (cmd, pos), toks) =>
+ if Keyword.is_theory_load cmd then
+ (case find_file toks of
+ NONE => error ("Bad file argument of command " ^ quote cmd ^ Position.here pos)
+ | SOME (i, path) =>
+ let
+ val toks' = toks |> map_index (fn (j, tok) =>
+ if i = j then Token.put_files (read_files cmd path) tok
+ else tok);
+ in Span (Command (cmd, pos), toks') end)
+ else span
+ | _ => span);
+
+end;
+
+
(** specification elements: commands with optional proof **)
--- a/src/Pure/Tools/proof_general.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/Tools/proof_general.ML Sat Nov 16 18:34:11 2013 +0100
@@ -36,6 +36,7 @@
val tell_clear_response: unit -> unit
val inform_file_processed: string -> unit
val inform_file_retracted: string -> unit
+ val master_path: Path.T Unsynchronized.ref
structure ThyLoad: sig val add_path: string -> unit end
val thm_deps: bool Unsynchronized.ref
val proof_generalN: string
@@ -293,11 +294,14 @@
(** theory loader **)
-(* fake old ThyLoad -- with new semantics *)
+(* global master path *)
+val master_path = Unsynchronized.ref Path.current;
+
+(*fake old ThyLoad -- with new semantics*)
structure ThyLoad =
struct
- val add_path = Thy_Load.set_master_path o Path.explode;
+ fun add_path path = master_path := Path.explode path;
end;
--- a/src/Pure/Tools/sledgehammer_params.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/Tools/sledgehammer_params.scala Sat Nov 16 18:34:11 2013 +0100
@@ -37,13 +37,10 @@
def get_provers: String = synchronized { _provers }
private def sledgehammer_provers(
- prover: Session.Prover, output: Isabelle_Process.Output): Boolean =
+ prover: Session.Prover, msg: Isabelle_Process.Protocol_Output): Boolean =
{
- output.body match {
- case Nil => update_provers(""); true
- case List(XML.Text(s)) => update_provers(s); true
- case _ => false
- }
+ update_provers(msg.text)
+ true
}
val functions =
--- a/src/Pure/build-jars Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/build-jars Sat Nov 16 18:34:11 2013 +0100
@@ -13,6 +13,7 @@
Concurrent/future.scala
Concurrent/simple_thread.scala
Concurrent/volatile.scala
+ General/bytes.scala
General/exn.scala
General/file.scala
General/graph.scala
--- a/src/Pure/pure_syn.ML Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Pure/pure_syn.ML Sat Nov 16 18:34:11 2013 +0100
@@ -14,7 +14,7 @@
(Thy_Header.args >> (fn header =>
Toplevel.print o
Toplevel.init_theory
- (fn () => Thy_Info.toplevel_begin_theory (Thy_Load.get_master_path ()) header)));
+ (fn () => Thy_Info.toplevel_begin_theory (! ProofGeneral.master_path) header)));
val _ =
Outer_Syntax.command
--- a/src/Tools/jEdit/src/document_model.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Tools/jEdit/src/document_model.scala Sat Nov 16 18:34:11 2013 +0100
@@ -14,9 +14,6 @@
import org.gjt.sp.jedit.Buffer
import org.gjt.sp.jedit.buffer.{BufferAdapter, BufferListener, JEditBuffer}
-import org.gjt.sp.jedit.textarea.TextArea
-
-import java.awt.font.TextAttribute
object Document_Model
--- a/src/Tools/jEdit/src/document_view.scala Fri Nov 15 22:02:05 2013 +0100
+++ b/src/Tools/jEdit/src/document_view.scala Sat Nov 16 18:34:11 2013 +0100
@@ -10,21 +10,15 @@
import isabelle._
-import scala.collection.mutable
-import scala.collection.immutable.SortedMap
import scala.actors.Actor._
-import java.lang.System
-import java.text.BreakIterator
-import java.awt.{Color, Graphics2D, Point}
+import java.awt.Graphics2D
import java.awt.event.KeyEvent
import javax.swing.event.{CaretListener, CaretEvent}
-import org.gjt.sp.jedit.{jEdit, Debug}
-import org.gjt.sp.jedit.gui.RolloverButton
+import org.gjt.sp.jedit.jEdit
import org.gjt.sp.jedit.options.GutterOptionPane
-import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea, TextAreaExtension, TextAreaPainter}
-import org.gjt.sp.jedit.syntax.SyntaxStyle
+import org.gjt.sp.jedit.textarea.{JEditTextArea, TextAreaExtension, TextAreaPainter}
object Document_View
@@ -244,7 +238,8 @@
}
}
- case bad => System.err.println("command_change_actor: ignoring bad message " + bad)
+ case bad =>
+ java.lang.System.err.println("command_change_actor: ignoring bad message " + bad)
}
}
}