--- a/etc/symbols Thu Feb 27 18:07:53 2014 +0100
+++ b/etc/symbols Thu Feb 27 21:36:40 2014 +0100
@@ -272,7 +272,7 @@
\<bar> code: 0x0000a6 group: punctuation abbrev: ||
\<plusminus> code: 0x0000b1 group: operator
\<minusplus> code: 0x002213 group: operator
-\<times> code: 0x0000d7 group: operator
+\<times> code: 0x0000d7 group: operator abbrev: <*>
\<div> code: 0x0000f7 group: operator
\<cdot> code: 0x0022c5 group: operator
\<star> code: 0x0022c6 group: operator
--- a/src/HOL/Decision_Procs/Commutative_Ring.thy Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/Commutative_Ring.thy Thu Feb 27 21:36:40 2014 +0100
@@ -143,7 +143,7 @@
| "pow n P =
(if even n then pow (n div 2) (sqr P)
else P \<otimes> pow (n div 2) (sqr P))"
-
+
lemma pow_if:
"pow n P =
(if n = 0 then Pc 1 else if even n then pow (n div 2) (sqr P)
--- a/src/HOL/Decision_Procs/Commutative_Ring_Complete.thy Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/Commutative_Ring_Complete.thy Thu Feb 27 21:36:40 2014 +0100
@@ -59,9 +59,9 @@
apply (case_tac y, auto)
done
-lemma norm_PXtrans:
+lemma norm_PXtrans:
assumes A: "isnorm (PX P x Q)"
- and "isnorm Q2"
+ and "isnorm Q2"
shows "isnorm (PX P x Q2)"
proof (cases P)
case (PX p1 y p2)
@@ -78,31 +78,34 @@
case Pinj
with assms show ?thesis by (cases x) auto
qed
-
-lemma norm_PXtrans2:
- assumes "isnorm (PX P x Q)" and "isnorm Q2"
- shows "isnorm (PX P (Suc (n + x)) Q2)"
-proof (cases P)
- case (PX p1 y p2)
- with assms show ?thesis
- apply (cases x)
- apply auto
- apply (cases p2)
- apply auto
- done
-next
- case Pc
- with assms show ?thesis by (cases x) auto
-next
- case Pinj
- with assms show ?thesis by (cases x) auto
-qed
+lemma norm_PXtrans2:
+ assumes "isnorm (PX P x Q)"
+ and "isnorm Q2"
+ shows "isnorm (PX P (Suc (n + x)) Q2)"
+proof (cases P)
+ case (PX p1 y p2)
+ with assms show ?thesis
+ apply (cases x)
+ apply auto
+ apply (cases p2)
+ apply auto
+ done
+next
+ case Pc
+ with assms show ?thesis
+ by (cases x) auto
+next
+ case Pinj
+ with assms show ?thesis
+ by (cases x) auto
+qed
+
text {* mkPX conserves normalizedness (@{text "_cn"}) *}
-lemma mkPX_cn:
+lemma mkPX_cn:
assumes "x \<noteq> 0"
and "isnorm P"
- and "isnorm Q"
+ and "isnorm Q"
shows "isnorm (mkPX P x Q)"
proof (cases P)
case (Pc c)
@@ -114,7 +117,8 @@
by (cases x) (auto simp add: mkPinj_cn mkPX_def)
next
case (PX P1 y P2)
- with assms have Y0: "y > 0" by (cases y) auto
+ with assms have Y0: "y > 0"
+ by (cases y) auto
from assms PX have "isnorm P1" "isnorm P2"
by (auto simp add: norm_PX1[of P1 y P2] norm_PX2[of P1 y P2])
from assms PX Y0 show ?thesis
@@ -125,10 +129,12 @@
lemma add_cn: "isnorm P \<Longrightarrow> isnorm Q \<Longrightarrow> isnorm (P \<oplus> Q)"
proof (induct P Q rule: add.induct)
case (2 c i P2)
- then show ?case by (cases P2) (simp_all, cases i, simp_all)
+ then show ?case
+ by (cases P2) (simp_all, cases i, simp_all)
next
case (3 i P2 c)
- then show ?case by (cases P2) (simp_all, cases i, simp_all)
+ then show ?case
+ by (cases P2) (simp_all, cases i, simp_all)
next
case (4 c P2 i Q2)
then have "isnorm P2" "isnorm Q2"
@@ -143,11 +149,13 @@
by (cases i) (simp, cases P2, auto, case_tac pol2, auto)
next
case (6 x P2 y Q2)
- then have Y0: "y>0" by (cases y) (auto simp add: norm_Pinj_0_False)
- with 6 have X0: "x>0" by (cases x) (auto simp add: norm_Pinj_0_False)
+ then have Y0: "y > 0"
+ by (cases y) (auto simp add: norm_Pinj_0_False)
+ with 6 have X0: "x > 0"
+ by (cases x) (auto simp add: norm_Pinj_0_False)
have "x < y \<or> x = y \<or> x > y" by arith
- moreover
- { assume "x < y"
+ moreover {
+ assume "x < y"
then have "\<exists>d. y = d + x" by arith
then obtain d where y: "y = d + x" ..
moreover
@@ -158,18 +166,22 @@
moreover
from 6 `x < y` y have "isnorm (Pinj d Q2)"
by (cases d, simp, cases Q2, auto)
- ultimately have ?case by (simp add: mkPinj_cn) }
- moreover
- { assume "x = y"
+ ultimately have ?case by (simp add: mkPinj_cn)
+ }
+ moreover {
+ assume "x = y"
moreover
from 6 have "isnorm P2" "isnorm Q2"
by (auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
moreover
note 6 Y0
- ultimately have ?case by (simp add: mkPinj_cn) }
- moreover
- { assume "x > y" then have "\<exists>d. x = d + y" by arith
- then obtain d where x: "x = d + y"..
+ ultimately have ?case by (simp add: mkPinj_cn)
+ }
+ moreover {
+ assume "x > y"
+ then have "\<exists>d. x = d + y"
+ by arith
+ then obtain d where x: "x = d + y" ..
moreover
note 6 Y0
moreover
@@ -178,111 +190,150 @@
moreover
from 6 `x > y` x have "isnorm (Pinj d P2)"
by (cases d) (simp, cases P2, auto)
- ultimately have ?case by (simp add: mkPinj_cn) }
+ ultimately have ?case by (simp add: mkPinj_cn)
+ }
ultimately show ?case by blast
next
case (7 x P2 Q2 y R)
have "x = 0 \<or> x = 1 \<or> x > 1" by arith
- moreover
- { assume "x = 0"
- with 7 have ?case by (auto simp add: norm_Pinj_0_False) }
- moreover
- { assume "x = 1"
+ moreover {
+ assume "x = 0"
+ with 7 have ?case by (auto simp add: norm_Pinj_0_False)
+ }
+ moreover {
+ assume "x = 1"
from 7 have "isnorm R" "isnorm P2"
by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
with 7 `x = 1` have "isnorm (R \<oplus> P2)" by simp
with 7 `x = 1` have ?case
- by (simp add: norm_PXtrans[of Q2 y _]) }
- moreover
- { assume "x > 1" then have "\<exists>d. x=Suc (Suc d)" by arith
+ by (simp add: norm_PXtrans[of Q2 y _])
+ }
+ moreover {
+ assume "x > 1" then have "\<exists>d. x=Suc (Suc d)" by arith
then obtain d where X: "x=Suc (Suc d)" ..
with 7 have NR: "isnorm R" "isnorm P2"
by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
with 7 X have "isnorm (Pinj (x - 1) P2)" by (cases P2) auto
with 7 X NR have "isnorm (R \<oplus> Pinj (x - 1) P2)" by simp
with `isnorm (PX Q2 y R)` X have ?case
- by (simp add: norm_PXtrans[of Q2 y _]) }
+ by (simp add: norm_PXtrans[of Q2 y _])
+ }
ultimately show ?case by blast
next
case (8 Q2 y R x P2)
have "x = 0 \<or> x = 1 \<or> x > 1" by arith
- moreover
- { assume "x = 0" with 8 have ?case by (auto simp add: norm_Pinj_0_False) }
- moreover
- { assume "x = 1"
- with 8 have "isnorm R" "isnorm P2" by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
- with 8 `x = 1` have "isnorm (R \<oplus> P2)" by simp
- with 8 `x = 1` have ?case by (simp add: norm_PXtrans[of Q2 y _]) }
- moreover
- { assume "x > 1" then have "\<exists>d. x=Suc (Suc d)" by arith
+ moreover {
+ assume "x = 0"
+ with 8 have ?case
+ by (auto simp add: norm_Pinj_0_False)
+ }
+ moreover {
+ assume "x = 1"
+ with 8 have "isnorm R" "isnorm P2"
+ by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
+ with 8 `x = 1` have "isnorm (R \<oplus> P2)"
+ by simp
+ with 8 `x = 1` have ?case
+ by (simp add: norm_PXtrans[of Q2 y _])
+ }
+ moreover {
+ assume "x > 1"
+ then have "\<exists>d. x = Suc (Suc d)" by arith
then obtain d where X: "x = Suc (Suc d)" ..
with 8 have NR: "isnorm R" "isnorm P2"
by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
- with 8 X have "isnorm (Pinj (x - 1) P2)" by (cases P2) auto
- with 8 `x > 1` NR have "isnorm (R \<oplus> Pinj (x - 1) P2)" by simp
- with `isnorm (PX Q2 y R)` X have ?case by (simp add: norm_PXtrans[of Q2 y _]) }
+ with 8 X have "isnorm (Pinj (x - 1) P2)"
+ by (cases P2) auto
+ with 8 `x > 1` NR have "isnorm (R \<oplus> Pinj (x - 1) P2)"
+ by simp
+ with `isnorm (PX Q2 y R)` X have ?case
+ by (simp add: norm_PXtrans[of Q2 y _])
+ }
ultimately show ?case by blast
next
case (9 P1 x P2 Q1 y Q2)
- then have Y0: "y>0" by (cases y) auto
- with 9 have X0: "x>0" by (cases x) auto
+ then have Y0: "y > 0" by (cases y) auto
+ with 9 have X0: "x > 0" by (cases x) auto
with 9 have NP1: "isnorm P1" and NP2: "isnorm P2"
by (auto simp add: norm_PX1[of P1 _ P2] norm_PX2[of P1 _ P2])
with 9 have NQ1: "isnorm Q1" and NQ2: "isnorm Q2"
by (auto simp add: norm_PX1[of Q1 _ Q2] norm_PX2[of Q1 _ Q2])
have "y < x \<or> x = y \<or> x < y" by arith
- moreover
- { assume sm1: "y < x" then have "\<exists>d. x = d + y" by arith
+ moreover {
+ assume sm1: "y < x"
+ then have "\<exists>d. x = d + y" by arith
then obtain d where sm2: "x = d + y" ..
note 9 NQ1 NP1 NP2 NQ2 sm1 sm2
moreover
- have "isnorm (PX P1 d (Pc 0))"
+ have "isnorm (PX P1 d (Pc 0))"
proof (cases P1)
case (PX p1 y p2)
- with 9 sm1 sm2 show ?thesis by (cases d) (simp, cases p2, auto)
+ with 9 sm1 sm2 show ?thesis
+ by (cases d) (simp, cases p2, auto)
next
- case Pc with 9 sm1 sm2 show ?thesis by (cases d) auto
+ case Pc
+ with 9 sm1 sm2 show ?thesis
+ by (cases d) auto
next
- case Pinj with 9 sm1 sm2 show ?thesis by (cases d) auto
+ case Pinj
+ with 9 sm1 sm2 show ?thesis
+ by (cases d) auto
qed
- ultimately have "isnorm (P2 \<oplus> Q2)" "isnorm (PX P1 (x - y) (Pc 0) \<oplus> Q1)" by auto
- with Y0 sm1 sm2 have ?case by (simp add: mkPX_cn) }
- moreover
- { assume "x = y"
- with 9 NP1 NP2 NQ1 NQ2 have "isnorm (P2 \<oplus> Q2)" "isnorm (P1 \<oplus> Q1)" by auto
- with `x = y` Y0 have ?case by (simp add: mkPX_cn) }
- moreover
- { assume sm1: "x < y" then have "\<exists>d. y = d + x" by arith
+ ultimately have "isnorm (P2 \<oplus> Q2)" "isnorm (PX P1 (x - y) (Pc 0) \<oplus> Q1)"
+ by auto
+ with Y0 sm1 sm2 have ?case
+ by (simp add: mkPX_cn)
+ }
+ moreover {
+ assume "x = y"
+ with 9 NP1 NP2 NQ1 NQ2 have "isnorm (P2 \<oplus> Q2)" "isnorm (P1 \<oplus> Q1)"
+ by auto
+ with `x = y` Y0 have ?case
+ by (simp add: mkPX_cn)
+ }
+ moreover {
+ assume sm1: "x < y"
+ then have "\<exists>d. y = d + x" by arith
then obtain d where sm2: "y = d + x" ..
note 9 NQ1 NP1 NP2 NQ2 sm1 sm2
moreover
- have "isnorm (PX Q1 d (Pc 0))"
+ have "isnorm (PX Q1 d (Pc 0))"
proof (cases Q1)
case (PX p1 y p2)
- with 9 sm1 sm2 show ?thesis by (cases d) (simp, cases p2, auto)
+ with 9 sm1 sm2 show ?thesis
+ by (cases d) (simp, cases p2, auto)
next
- case Pc with 9 sm1 sm2 show ?thesis by (cases d) auto
+ case Pc
+ with 9 sm1 sm2 show ?thesis
+ by (cases d) auto
next
- case Pinj with 9 sm1 sm2 show ?thesis by (cases d) auto
+ case Pinj
+ with 9 sm1 sm2 show ?thesis
+ by (cases d) auto
qed
- ultimately have "isnorm (P2 \<oplus> Q2)" "isnorm (PX Q1 (y - x) (Pc 0) \<oplus> P1)" by auto
- with X0 sm1 sm2 have ?case by (simp add: mkPX_cn) }
+ ultimately have "isnorm (P2 \<oplus> Q2)" "isnorm (PX Q1 (y - x) (Pc 0) \<oplus> P1)"
+ by auto
+ with X0 sm1 sm2 have ?case
+ by (simp add: mkPX_cn)
+ }
ultimately show ?case by blast
qed simp
text {* mul concerves normalizedness *}
lemma mul_cn: "isnorm P \<Longrightarrow> isnorm Q \<Longrightarrow> isnorm (P \<otimes> Q)"
proof (induct P Q rule: mul.induct)
- case (2 c i P2) then show ?case
+ case (2 c i P2)
+ then show ?case
by (cases P2) (simp_all, cases i, simp_all add: mkPinj_cn)
next
- case (3 i P2 c) then show ?case
+ case (3 i P2 c)
+ then show ?case
by (cases P2) (simp_all, cases i, simp_all add: mkPinj_cn)
next
case (4 c P2 i Q2)
then have "isnorm P2" "isnorm Q2"
by (auto simp only: norm_PX1[of P2 i Q2] norm_PX2[of P2 i Q2])
- with 4 show ?case
+ with 4 show ?case
by (cases "c = 0") (simp_all, cases "i = 0", simp_all add: mkPX_cn)
next
case (5 P2 i Q2 c)
@@ -293,89 +344,129 @@
next
case (6 x P2 y Q2)
have "x < y \<or> x = y \<or> x > y" by arith
- moreover
- { assume "x < y" then have "\<exists>d. y = d + x" by arith
+ moreover {
+ assume "x < y"
+ then have "\<exists>d. y = d + x" by arith
then obtain d where y: "y = d + x" ..
moreover
note 6
moreover
- from 6 have "x > 0" by (cases x) (auto simp add: norm_Pinj_0_False)
+ from 6 have "x > 0"
+ by (cases x) (auto simp add: norm_Pinj_0_False)
moreover
- from 6 have "isnorm P2" "isnorm Q2" by (auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
+ from 6 have "isnorm P2" "isnorm Q2"
+ by (auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
moreover
- from 6 `x < y` y have "isnorm (Pinj d Q2)" by (cases d) (simp, cases Q2, auto)
- ultimately have ?case by (simp add: mkPinj_cn) }
- moreover
- { assume "x = y"
+ from 6 `x < y` y have "isnorm (Pinj d Q2)"
+ by (cases d) (simp, cases Q2, auto)
+ ultimately have ?case by (simp add: mkPinj_cn)
+ }
+ moreover {
+ assume "x = y"
moreover
- from 6 have "isnorm P2" "isnorm Q2" by(auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
+ from 6 have "isnorm P2" "isnorm Q2"
+ by (auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
moreover
- from 6 have "y>0" by (cases y) (auto simp add: norm_Pinj_0_False)
+ from 6 have "y>0"
+ by (cases y) (auto simp add: norm_Pinj_0_False)
moreover
note 6
- ultimately have ?case by (simp add: mkPinj_cn) }
- moreover
- { assume "x > y" then have "\<exists>d. x = d + y" by arith
+ ultimately have ?case by (simp add: mkPinj_cn)
+ }
+ moreover {
+ assume "x > y"
+ then have "\<exists>d. x = d + y" by arith
then obtain d where x: "x = d + y" ..
moreover
note 6
moreover
- from 6 have "y > 0" by (cases y) (auto simp add: norm_Pinj_0_False)
+ from 6 have "y > 0"
+ by (cases y) (auto simp add: norm_Pinj_0_False)
moreover
- from 6 have "isnorm P2" "isnorm Q2" by (auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
+ from 6 have "isnorm P2" "isnorm Q2"
+ by (auto simp add: norm_Pinj[of _ P2] norm_Pinj[of _ Q2])
moreover
- from 6 `x > y` x have "isnorm (Pinj d P2)" by (cases d) (simp, cases P2, auto)
- ultimately have ?case by (simp add: mkPinj_cn) }
+ from 6 `x > y` x have "isnorm (Pinj d P2)"
+ by (cases d) (simp, cases P2, auto)
+ ultimately have ?case by (simp add: mkPinj_cn)
+ }
ultimately show ?case by blast
next
case (7 x P2 Q2 y R)
then have Y0: "y > 0" by (cases y) auto
have "x = 0 \<or> x = 1 \<or> x > 1" by arith
- moreover
- { assume "x = 0" with 7 have ?case by (auto simp add: norm_Pinj_0_False) }
- moreover
- { assume "x = 1"
- from 7 have "isnorm R" "isnorm P2" by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
- with 7 `x = 1` have "isnorm (R \<otimes> P2)" "isnorm Q2" by (auto simp add: norm_PX1[of Q2 y R])
- with 7 `x = 1` Y0 have ?case by (simp add: mkPX_cn) }
- moreover
- { assume "x > 1" then have "\<exists>d. x = Suc (Suc d)" by arith
+ moreover {
+ assume "x = 0"
+ with 7 have ?case
+ by (auto simp add: norm_Pinj_0_False)
+ }
+ moreover {
+ assume "x = 1"
+ from 7 have "isnorm R" "isnorm P2"
+ by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
+ with 7 `x = 1` have "isnorm (R \<otimes> P2)" "isnorm Q2"
+ by (auto simp add: norm_PX1[of Q2 y R])
+ with 7 `x = 1` Y0 have ?case
+ by (simp add: mkPX_cn)
+ }
+ moreover {
+ assume "x > 1"
+ then have "\<exists>d. x = Suc (Suc d)"
+ by arith
then obtain d where X: "x = Suc (Suc d)" ..
from 7 have NR: "isnorm R" "isnorm Q2"
by (auto simp add: norm_PX2[of Q2 y R] norm_PX1[of Q2 y R])
moreover
- from 7 X have "isnorm (Pinj (x - 1) P2)" by (cases P2) auto
+ from 7 X have "isnorm (Pinj (x - 1) P2)"
+ by (cases P2) auto
moreover
- from 7 have "isnorm (Pinj x P2)" by (cases P2) auto
+ from 7 have "isnorm (Pinj x P2)"
+ by (cases P2) auto
moreover
note 7 X
- ultimately have "isnorm (R \<otimes> Pinj (x - 1) P2)" "isnorm (Pinj x P2 \<otimes> Q2)" by auto
- with Y0 X have ?case by (simp add: mkPX_cn) }
+ ultimately have "isnorm (R \<otimes> Pinj (x - 1) P2)" "isnorm (Pinj x P2 \<otimes> Q2)"
+ by auto
+ with Y0 X have ?case
+ by (simp add: mkPX_cn)
+ }
ultimately show ?case by blast
next
case (8 Q2 y R x P2)
- then have Y0: "y > 0" by (cases y) auto
+ then have Y0: "y > 0"
+ by (cases y) auto
have "x = 0 \<or> x = 1 \<or> x > 1" by arith
- moreover
- { assume "x = 0" with 8 have ?case by (auto simp add: norm_Pinj_0_False) }
- moreover
- { assume "x = 1"
- from 8 have "isnorm R" "isnorm P2" by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
- with 8 `x = 1` have "isnorm (R \<otimes> P2)" "isnorm Q2" by (auto simp add: norm_PX1[of Q2 y R])
- with 8 `x = 1` Y0 have ?case by (simp add: mkPX_cn) }
- moreover
- { assume "x > 1" then have "\<exists>d. x = Suc (Suc d)" by arith
+ moreover {
+ assume "x = 0"
+ with 8 have ?case
+ by (auto simp add: norm_Pinj_0_False)
+ }
+ moreover {
+ assume "x = 1"
+ from 8 have "isnorm R" "isnorm P2"
+ by (auto simp add: norm_Pinj[of _ P2] norm_PX2[of Q2 y R])
+ with 8 `x = 1` have "isnorm (R \<otimes> P2)" "isnorm Q2"
+ by (auto simp add: norm_PX1[of Q2 y R])
+ with 8 `x = 1` Y0 have ?case
+ by (simp add: mkPX_cn)
+ }
+ moreover {
+ assume "x > 1"
+ then have "\<exists>d. x = Suc (Suc d)" by arith
then obtain d where X: "x = Suc (Suc d)" ..
from 8 have NR: "isnorm R" "isnorm Q2"
by (auto simp add: norm_PX2[of Q2 y R] norm_PX1[of Q2 y R])
moreover
- from 8 X have "isnorm (Pinj (x - 1) P2)" by (cases P2) auto
+ from 8 X have "isnorm (Pinj (x - 1) P2)"
+ by (cases P2) auto
moreover
- from 8 X have "isnorm (Pinj x P2)" by (cases P2) auto
+ from 8 X have "isnorm (Pinj x P2)"
+ by (cases P2) auto
moreover
note 8 X
- ultimately have "isnorm (R \<otimes> Pinj (x - 1) P2)" "isnorm (Pinj x P2 \<otimes> Q2)" by auto
- with Y0 X have ?case by (simp add: mkPX_cn) }
+ ultimately have "isnorm (R \<otimes> Pinj (x - 1) P2)" "isnorm (Pinj x P2 \<otimes> Q2)"
+ by auto
+ with Y0 X have ?case by (simp add: mkPX_cn)
+ }
ultimately show ?case by blast
next
case (9 P1 x P2 Q1 y Q2)
@@ -383,16 +474,18 @@
from 9 have Y0: "y > 0" by (cases y) auto
note 9
moreover
- from 9 have "isnorm P1" "isnorm P2" by (auto simp add: norm_PX1[of P1 x P2] norm_PX2[of P1 x P2])
- moreover
- from 9 have "isnorm Q1" "isnorm Q2" by (auto simp add: norm_PX1[of Q1 y Q2] norm_PX2[of Q1 y Q2])
+ from 9 have "isnorm P1" "isnorm P2"
+ by (auto simp add: norm_PX1[of P1 x P2] norm_PX2[of P1 x P2])
+ moreover
+ from 9 have "isnorm Q1" "isnorm Q2"
+ by (auto simp add: norm_PX1[of Q1 y Q2] norm_PX2[of Q1 y Q2])
ultimately have "isnorm (P1 \<otimes> Q1)" "isnorm (P2 \<otimes> Q2)"
- "isnorm (P1 \<otimes> mkPinj 1 Q2)" "isnorm (Q1 \<otimes> mkPinj 1 P2)"
+ "isnorm (P1 \<otimes> mkPinj 1 Q2)" "isnorm (Q1 \<otimes> mkPinj 1 P2)"
by (auto simp add: mkPinj_cn)
with 9 X0 Y0 have
"isnorm (mkPX (P1 \<otimes> Q1) (x + y) (P2 \<otimes> Q2))"
- "isnorm (mkPX (P1 \<otimes> mkPinj (Suc 0) Q2) x (Pc 0))"
- "isnorm (mkPX (Q1 \<otimes> mkPinj (Suc 0) P2) y (Pc 0))"
+ "isnorm (mkPX (P1 \<otimes> mkPinj (Suc 0) Q2) x (Pc 0))"
+ "isnorm (mkPX (Q1 \<otimes> mkPinj (Suc 0) P2) y (Pc 0))"
by (auto simp add: mkPX_cn)
then show ?case by (simp add: add_cn)
qed simp
@@ -401,8 +494,10 @@
lemma neg_cn: "isnorm P \<Longrightarrow> isnorm (neg P)"
proof (induct P)
case (Pinj i P2)
- then have "isnorm P2" by (simp add: norm_Pinj[of i P2])
- with Pinj show ?case by (cases P2) (auto, cases i, auto)
+ then have "isnorm P2"
+ by (simp add: norm_Pinj[of i P2])
+ with Pinj show ?case
+ by (cases P2) (auto, cases i, auto)
next
case (PX P1 x P2) note PX1 = this
from PX have "isnorm P2" "isnorm P1"
@@ -410,10 +505,12 @@
with PX show ?case
proof (cases P1)
case (PX p1 y p2)
- with PX1 show ?thesis by (cases x) (auto, cases p2, auto)
+ with PX1 show ?thesis
+ by (cases x) (auto, cases p2, auto)
next
case Pinj
- with PX1 show ?thesis by (cases x) auto
+ with PX1 show ?thesis
+ by (cases x) auto
qed (cases x, auto)
qed simp
@@ -430,21 +527,22 @@
case (Pinj i Q)
then show ?case
by (cases Q) (auto simp add: mkPX_cn mkPinj_cn, cases i, auto simp add: mkPX_cn mkPinj_cn)
-next
+next
case (PX P1 x P2)
then have "x + x \<noteq> 0" "isnorm P2" "isnorm P1"
by (cases x, auto simp add: norm_PX1[of P1 x P2] norm_PX2[of P1 x P2])
with PX have "isnorm (mkPX (Pc (1 + 1) \<otimes> P1 \<otimes> mkPinj (Suc 0) P2) x (Pc 0))"
and "isnorm (mkPX (sqr P1) (x + x) (sqr P2))"
by (auto simp add: add_cn mkPX_cn mkPinj_cn mul_cn)
- then show ?case by (auto simp add: add_cn mkPX_cn mkPinj_cn mul_cn)
+ then show ?case
+ by (auto simp add: add_cn mkPX_cn mkPinj_cn mul_cn)
qed
text {* pow conserves normalizedness *}
lemma pow_cn: "isnorm P \<Longrightarrow> isnorm (pow n P)"
proof (induct n arbitrary: P rule: less_induct)
case (less k)
- show ?case
+ show ?case
proof (cases "k = 0")
case True
then show ?thesis by simp
--- a/src/HOL/Decision_Procs/Dense_Linear_Order.thy Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/Dense_Linear_Order.thy Thu Feb 27 21:36:40 2014 +0100
@@ -12,8 +12,6 @@
ML_file "langford_data.ML"
ML_file "ferrante_rackoff_data.ML"
-setup {* Langford_Data.setup #> Ferrante_Rackoff_Data.setup *}
-
context linorder
begin
--- a/src/HOL/Decision_Procs/commutative_ring_tac.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/commutative_ring_tac.ML Thu Feb 27 21:36:40 2014 +0100
@@ -22,31 +22,40 @@
fun polexT t = Type (@{type_name Commutative_Ring.polex}, [t]);
(* pol *)
-fun pol_Pc t = Const (@{const_name Commutative_Ring.pol.Pc}, t --> polT t);
-fun pol_Pinj t = Const (@{const_name Commutative_Ring.pol.Pinj}, HOLogic.natT --> polT t --> polT t);
-fun pol_PX t = Const (@{const_name Commutative_Ring.pol.PX}, polT t --> HOLogic.natT --> polT t --> polT t);
+fun pol_Pc t =
+ Const (@{const_name Commutative_Ring.pol.Pc}, t --> polT t);
+fun pol_Pinj t =
+ Const (@{const_name Commutative_Ring.pol.Pinj}, HOLogic.natT --> polT t --> polT t);
+fun pol_PX t =
+ Const (@{const_name Commutative_Ring.pol.PX}, polT t --> HOLogic.natT --> polT t --> polT t);
(* polex *)
-fun polex_add t = Const (@{const_name Commutative_Ring.polex.Add}, polexT t --> polexT t --> polexT t);
-fun polex_sub t = Const (@{const_name Commutative_Ring.polex.Sub}, polexT t --> polexT t --> polexT t);
-fun polex_mul t = Const (@{const_name Commutative_Ring.polex.Mul}, polexT t --> polexT t --> polexT t);
-fun polex_neg t = Const (@{const_name Commutative_Ring.polex.Neg}, polexT t --> polexT t);
-fun polex_pol t = Const (@{const_name Commutative_Ring.polex.Pol}, polT t --> polexT t);
-fun polex_pow t = Const (@{const_name Commutative_Ring.polex.Pow}, polexT t --> HOLogic.natT --> polexT t);
+fun polex_add t =
+ Const (@{const_name Commutative_Ring.polex.Add}, polexT t --> polexT t --> polexT t);
+fun polex_sub t =
+ Const (@{const_name Commutative_Ring.polex.Sub}, polexT t --> polexT t --> polexT t);
+fun polex_mul t =
+ Const (@{const_name Commutative_Ring.polex.Mul}, polexT t --> polexT t --> polexT t);
+fun polex_neg t =
+ Const (@{const_name Commutative_Ring.polex.Neg}, polexT t --> polexT t);
+fun polex_pol t =
+ Const (@{const_name Commutative_Ring.polex.Pol}, polT t --> polexT t);
+fun polex_pow t =
+ Const (@{const_name Commutative_Ring.polex.Pow}, polexT t --> HOLogic.natT --> polexT t);
(* reification of polynoms : primitive cring expressions *)
fun reif_pol T vs (t as Free _) =
let
val one = @{term "1::nat"};
val i = find_index (fn t' => t' = t) vs
- in if i = 0
- then pol_PX T $ (pol_Pc T $ cring_one T)
- $ one $ (pol_Pc T $ cring_zero T)
- else pol_Pinj T $ HOLogic.mk_nat i
- $ (pol_PX T $ (pol_Pc T $ cring_one T)
- $ one $ (pol_Pc T $ cring_zero T))
+ in
+ if i = 0 then
+ pol_PX T $ (pol_Pc T $ cring_one T) $ one $ (pol_Pc T $ cring_zero T)
+ else
+ pol_Pinj T $ HOLogic.mk_nat i $
+ (pol_PX T $ (pol_Pc T $ cring_one T) $ one $ (pol_Pc T $ cring_zero T))
end
- | reif_pol T vs t = pol_Pc T $ t;
+ | reif_pol T _ t = pol_Pc T $ t;
(* reification of polynom expressions *)
fun reif_polex T vs (Const (@{const_name Groups.plus}, _) $ a $ b) =
@@ -62,34 +71,33 @@
| reif_polex T vs t = polex_pol T $ reif_pol T vs t;
(* reification of the equation *)
-val cr_sort = @{sort "comm_ring_1"};
+val cr_sort = @{sort comm_ring_1};
-fun reif_eq thy (eq as Const(@{const_name HOL.eq}, Type("fun", [T, _])) $ lhs $ rhs) =
- if Sign.of_sort thy (T, cr_sort) then
+fun reif_eq ctxt (eq as Const (@{const_name HOL.eq}, Type (@{type_name fun}, [T, _])) $ lhs $ rhs) =
+ if Sign.of_sort (Proof_Context.theory_of ctxt) (T, cr_sort) then
let
+ val thy = Proof_Context.theory_of ctxt;
val fs = Misc_Legacy.term_frees eq;
val cvs = cterm_of thy (HOLogic.mk_list T fs);
val clhs = cterm_of thy (reif_polex T fs lhs);
val crhs = cterm_of thy (reif_polex T fs rhs);
val ca = ctyp_of thy T;
in (ca, cvs, clhs, crhs) end
- else error ("reif_eq: not an equation over " ^ Syntax.string_of_sort_global thy cr_sort)
+ else error ("reif_eq: not an equation over " ^ Syntax.string_of_sort ctxt cr_sort)
| reif_eq _ _ = error "reif_eq: not an equation";
(* The cring tactic *)
(* Attention: You have to make sure that no t^0 is in the goal!! *)
(* Use simply rewriting t^0 = 1 *)
val cring_simps =
- [@{thm mkPX_def}, @{thm mkPinj_def}, @{thm sub_def}, @{thm power_add},
- @{thm even_def}, @{thm pow_if}, sym OF [@{thm power_add}]];
+ @{thms mkPX_def mkPinj_def sub_def power_add even_def pow_if power_add [symmetric]};
fun tac ctxt = SUBGOAL (fn (g, i) =>
let
- val thy = Proof_Context.theory_of ctxt;
val cring_ctxt = ctxt addsimps cring_simps; (*FIXME really the full simpset!?*)
- val (ca, cvs, clhs, crhs) = reif_eq thy (HOLogic.dest_Trueprop g)
+ val (ca, cvs, clhs, crhs) = reif_eq ctxt (HOLogic.dest_Trueprop g);
val norm_eq_th =
- simplify cring_ctxt (instantiate' [SOME ca] [SOME clhs, SOME crhs, SOME cvs] @{thm norm_eq})
+ simplify cring_ctxt (instantiate' [SOME ca] [SOME clhs, SOME crhs, SOME cvs] @{thm norm_eq});
in
cut_tac norm_eq_th i
THEN (simp_tac cring_ctxt i)
--- a/src/HOL/Decision_Procs/ferrante_rackoff_data.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/ferrante_rackoff_data.ML Thu Feb 27 21:36:40 2014 +0100
@@ -17,7 +17,6 @@
whatis: morphism -> cterm -> cterm -> ord,
simpset: morphism -> simpset} -> declaration
val match: Proof.context -> cterm -> entry option
- val setup: theory -> theory
end;
structure Ferrante_Rackoff_Data: FERRANTE_RACKOF_DATA =
@@ -117,28 +116,24 @@
val terms = thms >> map Drule.dest_term;
in
-val ferrak_setup =
- Attrib.setup @{binding ferrack}
- ((keyword minfN |-- thms)
- -- (keyword pinfN |-- thms)
- -- (keyword nmiN |-- thms)
- -- (keyword npiN |-- thms)
- -- (keyword lin_denseN |-- thms)
- -- (keyword qeN |-- thms)
- -- (keyword atomsN |-- terms) >>
- (fn ((((((minf,pinf),nmi),npi),lin_dense),qe), atoms)=>
- if length qe = 1 then
- add ({minf = minf, pinf = pinf, nmi = nmi, npi = npi, ld = lin_dense,
- qe = hd qe, atoms = atoms},
- {isolate_conv = undefined, whatis = undefined, simpset = HOL_ss})
- else error "only one theorem for qe!"))
- "Ferrante Rackoff data";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding ferrack}
+ ((keyword minfN |-- thms)
+ -- (keyword pinfN |-- thms)
+ -- (keyword nmiN |-- thms)
+ -- (keyword npiN |-- thms)
+ -- (keyword lin_denseN |-- thms)
+ -- (keyword qeN |-- thms)
+ -- (keyword atomsN |-- terms) >>
+ (fn ((((((minf,pinf),nmi),npi),lin_dense),qe), atoms)=>
+ if length qe = 1 then
+ add ({minf = minf, pinf = pinf, nmi = nmi, npi = npi, ld = lin_dense,
+ qe = hd qe, atoms = atoms},
+ {isolate_conv = undefined, whatis = undefined, simpset = HOL_ss})
+ else error "only one theorem for qe!"))
+ "Ferrante Rackoff data");
end;
-
-(* theory setup *)
-
-val setup = ferrak_setup;
-
end;
--- a/src/HOL/Decision_Procs/langford.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/langford.ML Thu Feb 27 21:36:40 2014 +0100
@@ -12,19 +12,20 @@
struct
val dest_set =
- let
- fun h acc ct =
- case term_of ct of
- Const (@{const_name Orderings.bot}, _) => acc
- | Const (@{const_name insert}, _) $ _ $ t => h (Thm.dest_arg1 ct :: acc) (Thm.dest_arg ct)
-in h [] end;
+ let
+ fun h acc ct =
+ (case term_of ct of
+ Const (@{const_name Orderings.bot}, _) => acc
+ | Const (@{const_name insert}, _) $ _ $ t => h (Thm.dest_arg1 ct :: acc) (Thm.dest_arg ct));
+ in h [] end;
fun prove_finite cT u =
-let val [th0,th1] = map (instantiate' [SOME cT] []) @{thms "finite.intros"}
+ let
+ val [th0, th1] = map (instantiate' [SOME cT] []) @{thms "finite.intros"}
fun ins x th =
- Thm.implies_elim (instantiate' [] [(SOME o Thm.dest_arg o Thm.dest_arg)
- (Thm.cprop_of th), SOME x] th1) th
-in fold ins u th0 end;
+ Thm.implies_elim
+ (instantiate' [] [(SOME o Thm.dest_arg o Thm.dest_arg) (Thm.cprop_of th), SOME x] th1) th
+ in fold ins u th0 end;
fun simp_rule ctxt =
Conv.fconv_rule
--- a/src/HOL/Decision_Procs/langford_data.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Decision_Procs/langford_data.ML Thu Feb 27 21:36:40 2014 +0100
@@ -2,18 +2,20 @@
sig
type entry
val get: Proof.context -> simpset * (thm * entry) list
- val add: entry -> attribute
+ val add: entry -> attribute
val del: attribute
- val setup: theory -> theory
val match: Proof.context -> cterm -> entry option
end;
-structure Langford_Data: LANGFORD_DATA =
+structure Langford_Data: LANGFORD_DATA =
struct
(* data *)
-type entry = {qe_bnds: thm, qe_nolb : thm , qe_noub: thm,
- gs : thm list, gst : thm, atoms: cterm list};
+
+type entry =
+ {qe_bnds: thm, qe_nolb : thm , qe_noub: thm,
+ gs : thm list, gst : thm, atoms: cterm list};
+
val eq_key = Thm.eq_thm;
fun eq_data arg = eq_fst eq_key arg;
@@ -32,9 +34,9 @@
val del = Thm.declaration_attribute (Data.map o del_data);
-fun add entry =
- Thm.declaration_attribute (fn key => fn context => context |> Data.map
- (del_data key #> apsnd (cons (key, entry))));
+fun add entry =
+ Thm.declaration_attribute (fn key => fn context => context |> Data.map
+ (del_data key #> apsnd (cons (key, entry))));
val add_simp = Thm.declaration_attribute (fn th => fn context =>
(Data.map o apfst) (simpset_map (Context.proof_of context) (Simplifier.add_simp th)) context);
@@ -44,70 +46,68 @@
fun match ctxt tm =
let
- fun match_inst
- {qe_bnds, qe_nolb, qe_noub, gs, gst, atoms} pat =
- let
+ fun match_inst {qe_bnds, qe_nolb, qe_noub, gs, gst, atoms} pat =
+ let
fun h instT =
let
val substT = Thm.instantiate (instT, []);
val substT_cterm = Drule.cterm_rule substT;
- val qe_bnds' = substT qe_bnds
- val qe_nolb' = substT qe_nolb
- val qe_noub' = substT qe_noub
- val gs' = map substT gs
- val gst' = substT gst
- val atoms' = map substT_cterm atoms
- val result = {qe_bnds = qe_bnds', qe_nolb = qe_nolb',
- qe_noub = qe_noub', gs = gs', gst = gst',
- atoms = atoms'}
- in SOME result end
- in (case try Thm.match (pat, tm) of
- NONE => NONE
- | SOME (instT, _) => h instT)
+ val qe_bnds' = substT qe_bnds;
+ val qe_nolb' = substT qe_nolb;
+ val qe_noub' = substT qe_noub;
+ val gs' = map substT gs;
+ val gst' = substT gst;
+ val atoms' = map substT_cterm atoms;
+ val result =
+ {qe_bnds = qe_bnds', qe_nolb = qe_nolb',
+ qe_noub = qe_noub', gs = gs', gst = gst',
+ atoms = atoms'};
+ in SOME result end;
+ in
+ (case try Thm.match (pat, tm) of
+ NONE => NONE
+ | SOME (instT, _) => h instT)
end;
- fun match_struct (_,
- entry as ({atoms = atoms, ...}): entry) =
+ fun match_struct (_, entry as ({atoms = atoms, ...}): entry) =
get_first (match_inst entry) atoms;
in get_first match_struct (snd (get ctxt)) end;
+
(* concrete syntax *)
local
-val qeN = "qe";
-val gatherN = "gather";
-val atomsN = "atoms"
-fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
-val any_keyword =
- keyword qeN || keyword gatherN || keyword atomsN;
+ val qeN = "qe";
+ val gatherN = "gather";
+ val atomsN = "atoms"
+ fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
+ val any_keyword = keyword qeN || keyword gatherN || keyword atomsN;
-val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
-val terms = thms >> map Drule.dest_term;
+ val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat;
+ val terms = thms >> map Drule.dest_term;
in
-val langford_setup =
- Attrib.setup @{binding langford}
- ((keyword qeN |-- thms)
- -- (keyword gatherN |-- thms)
- -- (keyword atomsN |-- terms) >>
- (fn ((qes,gs), atoms) =>
- if length qes = 3 andalso length gs > 1 then
- let
- val [q1,q2,q3] = qes
- val gst::gss = gs
- val entry = {qe_bnds = q1, qe_nolb = q2,
- qe_noub = q3, gs = gss, gst = gst, atoms = atoms}
- in add entry end
- else error "Need 3 theorems for qe and at least one for gs"))
- "Langford data";
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding langford}
+ ((keyword qeN |-- thms) --
+ (keyword gatherN |-- thms) --
+ (keyword atomsN |-- terms) >> (fn ((qes, gs), atoms) =>
+ if length qes = 3 andalso length gs > 1 then
+ let
+ val [q1, q2, q3] = qes;
+ val gst :: gss = gs;
+ val entry =
+ {qe_bnds = q1, qe_nolb = q2,
+ qe_noub = q3, gs = gss, gst = gst, atoms = atoms};
+ in add entry end
+ else error "Need 3 theorems for qe and at least one for gs"))
+ "Langford data");
+end;
+
+val _ =
+ Theory.setup
+ (Attrib.setup @{binding langfordsimp} (Attrib.add_del add_simp del_simp) "Langford simpset");
end;
-
-(* theory setup *)
-
-val setup =
- langford_setup #>
- Attrib.setup @{binding langfordsimp} (Attrib.add_del add_simp del_simp) "Langford simpset";
-
-end;
--- a/src/HOL/Multivariate_Analysis/Convex_Euclidean_Space.thy Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/Multivariate_Analysis/Convex_Euclidean_Space.thy Thu Feb 27 21:36:40 2014 +0100
@@ -8137,7 +8137,7 @@
then have "rel_interior (S \<times> T) = rel_interior ((S \<times> (UNIV :: 'm set)) \<inter> ((UNIV :: 'n set) \<times> T))"
by auto
also have "\<dots> = rel_interior (S \<times> (UNIV :: 'm set)) \<inter> rel_interior ((UNIV :: 'n set) \<times> T)"
- apply (subst convex_rel_interior_inter_two[of "S \<times> (UNIV :: 'm set)" "(UNIV :: 'n set) <*> T"])
+ apply (subst convex_rel_interior_inter_two[of "S \<times> (UNIV :: 'm set)" "(UNIV :: 'n set) \<times> T"])
using * ri assms convex_Times
apply auto
done
@@ -8338,7 +8338,7 @@
lemma rel_interior_convex_cone_aux:
fixes S :: "'m::euclidean_space set"
assumes "convex S"
- shows "(c, x) \<in> rel_interior (cone hull ({(1 :: real)} <*> S)) \<longleftrightarrow>
+ shows "(c, x) \<in> rel_interior (cone hull ({(1 :: real)} \<times> S)) \<longleftrightarrow>
c > 0 \<and> x \<in> ((op *\<^sub>R c) ` (rel_interior S))"
proof (cases "S = {}")
case True
@@ -8347,33 +8347,33 @@
next
case False
then obtain s where "s \<in> S" by auto
- have conv: "convex ({(1 :: real)} <*> S)"
+ have conv: "convex ({(1 :: real)} \<times> S)"
using convex_Times[of "{(1 :: real)}" S] assms convex_singleton[of "1 :: real"]
by auto
- def f \<equiv> "\<lambda>y. {z. (y, z) \<in> cone hull ({1 :: real} <*> S)}"
- then have *: "(c, x) \<in> rel_interior (cone hull ({(1 :: real)} <*> S)) =
+ def f \<equiv> "\<lambda>y. {z. (y, z) \<in> cone hull ({1 :: real} \<times> S)}"
+ then have *: "(c, x) \<in> rel_interior (cone hull ({(1 :: real)} \<times> S)) =
(c \<in> rel_interior {y. f y \<noteq> {}} \<and> x \<in> rel_interior (f c))"
- apply (subst rel_interior_projection[of "cone hull ({(1 :: real)} <*> S)" f c x])
- using convex_cone_hull[of "{(1 :: real)} <*> S"] conv
+ apply (subst rel_interior_projection[of "cone hull ({(1 :: real)} \<times> S)" f c x])
+ using convex_cone_hull[of "{(1 :: real)} \<times> S"] conv
apply auto
done
{
fix y :: real
assume "y \<ge> 0"
- then have "y *\<^sub>R (1,s) \<in> cone hull ({1 :: real} <*> S)"
- using cone_hull_expl[of "{(1 :: real)} <*> S"] `s \<in> S` by auto
+ then have "y *\<^sub>R (1,s) \<in> cone hull ({1 :: real} \<times> S)"
+ using cone_hull_expl[of "{(1 :: real)} \<times> S"] `s \<in> S` by auto
then have "f y \<noteq> {}"
using f_def by auto
}
then have "{y. f y \<noteq> {}} = {0..}"
- using f_def cone_hull_expl[of "{1 :: real} <*> S"] by auto
+ using f_def cone_hull_expl[of "{1 :: real} \<times> S"] by auto
then have **: "rel_interior {y. f y \<noteq> {}} = {0<..}"
using rel_interior_real_semiline by auto
{
fix c :: real
assume "c > 0"
then have "f c = (op *\<^sub>R c ` S)"
- using f_def cone_hull_expl[of "{1 :: real} <*> S"] by auto
+ using f_def cone_hull_expl[of "{1 :: real} \<times> S"] by auto
then have "rel_interior (f c) = op *\<^sub>R c ` rel_interior S"
using rel_interior_convex_scaleR[of S c] assms by auto
}
@@ -8383,7 +8383,7 @@
lemma rel_interior_convex_cone:
fixes S :: "'m::euclidean_space set"
assumes "convex S"
- shows "rel_interior (cone hull ({1 :: real} <*> S)) =
+ shows "rel_interior (cone hull ({1 :: real} \<times> S)) =
{(c, c *\<^sub>R x) | c x. c > 0 \<and> x \<in> rel_interior S}"
(is "?lhs = ?rhs")
proof -
@@ -8621,13 +8621,13 @@
fixes S T :: "'n::euclidean_space set"
shows "convex hull (S + T) = convex hull S + convex hull T"
proof -
- have "convex hull S + convex hull T = (\<lambda>(x,y). x + y) ` ((convex hull S) <*> (convex hull T))"
+ have "convex hull S + convex hull T = (\<lambda>(x,y). x + y) ` ((convex hull S) \<times> (convex hull T))"
by (simp add: set_plus_image)
- also have "\<dots> = (\<lambda>(x,y). x + y) ` (convex hull (S <*> T))"
+ also have "\<dots> = (\<lambda>(x,y). x + y) ` (convex hull (S \<times> T))"
using convex_hull_Times by auto
also have "\<dots> = convex hull (S + T)"
using fst_snd_linear linear_conv_bounded_linear
- convex_hull_linear_image[of "(\<lambda>(x,y). x + y)" "S <*> T"]
+ convex_hull_linear_image[of "(\<lambda>(x,y). x + y)" "S \<times> T"]
by (auto simp add: set_plus_image)
finally show ?thesis ..
qed
@@ -8638,13 +8638,13 @@
and "convex T"
shows "rel_interior (S + T) = rel_interior S + rel_interior T"
proof -
- have "rel_interior S + rel_interior T = (\<lambda>(x,y). x + y) ` (rel_interior S <*> rel_interior T)"
+ have "rel_interior S + rel_interior T = (\<lambda>(x,y). x + y) ` (rel_interior S \<times> rel_interior T)"
by (simp add: set_plus_image)
- also have "\<dots> = (\<lambda>(x,y). x + y) ` rel_interior (S <*> T)"
+ also have "\<dots> = (\<lambda>(x,y). x + y) ` rel_interior (S \<times> T)"
using rel_interior_direct_sum assms by auto
also have "\<dots> = rel_interior (S + T)"
using fst_snd_linear convex_Times assms
- rel_interior_convex_linear_image[of "(\<lambda>(x,y). x + y)" "S <*> T"]
+ rel_interior_convex_linear_image[of "(\<lambda>(x,y). x + y)" "S \<times> T"]
by (auto simp add: set_plus_image)
finally show ?thesis ..
qed
@@ -8685,7 +8685,7 @@
and "rel_open S"
and "convex T"
and "rel_open T"
- shows "convex (S <*> T) \<and> rel_open (S <*> T)"
+ shows "convex (S \<times> T) \<and> rel_open (S \<times> T)"
by (metis assms convex_Times rel_interior_direct_sum rel_open_def)
lemma convex_rel_open_sum:
@@ -8799,8 +8799,8 @@
def C0 \<equiv> "convex hull (\<Union>(S ` I))"
have "\<forall>i\<in>I. C0 \<ge> S i"
unfolding C0_def using hull_subset[of "\<Union>(S ` I)"] by auto
- def K0 \<equiv> "cone hull ({1 :: real} <*> C0)"
- def K \<equiv> "\<lambda>i. cone hull ({1 :: real} <*> S i)"
+ def K0 \<equiv> "cone hull ({1 :: real} \<times> C0)"
+ def K \<equiv> "\<lambda>i. cone hull ({1 :: real} \<times> S i)"
have "\<forall>i\<in>I. K i \<noteq> {}"
unfolding K_def using assms
by (simp add: cone_hull_empty_iff[symmetric])
@@ -8838,13 +8838,13 @@
done
ultimately have geq: "K0 \<supseteq> convex hull (\<Union>(K ` I))"
using hull_minimal[of _ "K0" "convex"] by blast
- have "\<forall>i\<in>I. K i \<supseteq> {1 :: real} <*> S i"
+ have "\<forall>i\<in>I. K i \<supseteq> {1 :: real} \<times> S i"
using K_def by (simp add: hull_subset)
- then have "\<Union>(K ` I) \<supseteq> {1 :: real} <*> \<Union>(S ` I)"
+ then have "\<Union>(K ` I) \<supseteq> {1 :: real} \<times> \<Union>(S ` I)"
by auto
- then have "convex hull \<Union>(K ` I) \<supseteq> convex hull ({1 :: real} <*> \<Union>(S ` I))"
+ then have "convex hull \<Union>(K ` I) \<supseteq> convex hull ({1 :: real} \<times> \<Union>(S ` I))"
by (simp add: hull_mono)
- then have "convex hull \<Union>(K ` I) \<supseteq> {1 :: real} <*> C0"
+ then have "convex hull \<Union>(K ` I) \<supseteq> {1 :: real} \<times> C0"
unfolding C0_def
using convex_hull_Times[of "{(1 :: real)}" "\<Union>(S ` I)"] convex_hull_singleton
by auto
@@ -8889,7 +8889,7 @@
{
fix i
assume "i \<in> I"
- then have "convex (S i) \<and> k i \<in> rel_interior (cone hull {1} <*> S i)"
+ then have "convex (S i) \<and> k i \<in> rel_interior (cone hull {1} \<times> S i)"
using k K_def assms by auto
then have "\<exists>ci si. k i = (ci, ci *\<^sub>R si) \<and> 0 < ci \<and> si \<in> rel_interior (S i)"
using rel_interior_convex_cone[of "S i"] by auto
--- a/src/HOL/SMT_Examples/boogie.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/SMT_Examples/boogie.ML Thu Feb 27 21:36:40 2014 +0100
@@ -6,7 +6,7 @@
signature BOOGIE =
sig
- val boogie_prove: theory -> string -> unit
+ val boogie_prove: theory -> string list -> unit
end
structure Boogie: BOOGIE =
@@ -252,15 +252,13 @@
-(* splitting of text into a lists of lists of tokens *)
+(* splitting of text lines into a lists of tokens *)
fun is_blank c = (c = " " orelse c = "\t" orelse c = "\r" orelse c = "\n")
-fun explode_lines text =
- text
- |> split_lines
- |> map (String.tokens (is_blank o str))
- |> filter (fn [] => false | _ => true)
+val token_lines =
+ map (String.tokens (is_blank o str))
+ #> filter (fn [] => false | _ => true)
@@ -299,13 +297,11 @@
ALLGOALS (SMT_Solver.smt_tac ctxt (boogie_rules @ axioms))
-fun boogie_prove thy text =
+fun boogie_prove thy lines =
let
- val lines = explode_lines text
-
val ((axioms, vc), ctxt) =
empty_context
- |> parse_lines lines
+ |> parse_lines (token_lines lines)
|> add_unique_axioms
|> build_proof_context thy
@@ -324,8 +320,8 @@
(Thy_Load.provide_parse_files "boogie_file" >> (fn files =>
Toplevel.theory (fn thy =>
let
- val ([{text, ...}], thy') = files thy;
- val _ = boogie_prove thy' text;
+ val ([{lines, ...}], thy') = files thy;
+ val _ = boogie_prove thy' lines;
in thy' end)))
end
--- a/src/HOL/SPARK/SPARK_Setup.thy Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/SPARK/SPARK_Setup.thy Thu Feb 27 21:36:40 2014 +0100
@@ -183,6 +183,4 @@
ML_file "Tools/spark_vcs.ML"
ML_file "Tools/spark_commands.ML"
-setup SPARK_Commands.setup
-
end
--- a/src/HOL/SPARK/Tools/spark_commands.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/HOL/SPARK/Tools/spark_commands.ML Thu Feb 27 21:36:40 2014 +0100
@@ -4,26 +4,20 @@
Isar commands for handling SPARK/Ada verification conditions.
*)
-
-signature SPARK_COMMANDS =
-sig
- val setup: theory -> theory
-end
-
-structure SPARK_Commands: SPARK_COMMANDS =
+structure SPARK_Commands: sig end =
struct
fun spark_open header (prfx, files) thy =
let
- val ([{src_path, text = vc_text, pos = vc_pos, ...}: Token.file,
- {text = fdl_text, pos = fdl_pos, ...},
- {text = rls_text, pos = rls_pos, ...}], thy') = files thy;
+ val ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file,
+ {lines = fdl_lines, pos = fdl_pos, ...},
+ {lines = rls_lines, pos = rls_pos, ...}], thy') = files thy;
val base = fst (Path.split_ext (File.full_path (Thy_Load.master_directory thy') src_path));
in
SPARK_VCs.set_vcs
- (snd (Fdl_Parser.parse_declarations fdl_pos fdl_text))
- (Fdl_Parser.parse_rules rls_pos rls_text)
- (snd (Fdl_Parser.parse_vcs header vc_pos vc_text))
+ (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines)))
+ (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines))
+ (snd (Fdl_Parser.parse_vcs header vc_pos (cat_lines vc_lines)))
base prfx thy'
end;
@@ -172,11 +166,11 @@
(Parse.reserved "incomplete" --| @{keyword ")"}) >> K true) false >>
(Toplevel.theory o SPARK_VCs.close));
-val setup = Theory.at_end (fn thy =>
+val _ = Theory.setup (Theory.at_end (fn thy =>
let
val _ = SPARK_VCs.is_closed thy
orelse error ("Found the end of the theory, " ^
"but the last SPARK environment is still open.")
- in NONE end);
+ in NONE end));
end;
--- a/src/Pure/Isar/token.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/Isar/token.ML Thu Feb 27 21:36:40 2014 +0100
@@ -10,7 +10,7 @@
Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
Nat | Float | String | AltString | Verbatim | Cartouche | Space | Comment | InternalValue |
Error of string | Sync | EOF
- type file = {src_path: Path.T, text: string, pos: Position.T}
+ type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T}
datatype value =
Text of string | Typ of typ | Term of term | Fact of thm list |
Attribute of morphism -> attribute | Files of file Exn.result list
@@ -80,7 +80,7 @@
args.ML). Note that an assignable ref designates an intermediate
state of internalization -- it is NOT meant to persist.*)
-type file = {src_path: Path.T, text: string, pos: Position.T};
+type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T};
datatype value =
Text of string |
--- a/src/Pure/PIDE/command.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/PIDE/command.ML Thu Feb 27 21:36:40 2014 +0100
@@ -6,15 +6,17 @@
signature COMMAND =
sig
- type blob = (string * string option) Exn.result
+ type 'a blob = (string * 'a option) Exn.result
+ type blob_digest = string blob
+ type content = SHA1.digest * string list
val read_file: Path.T -> Position.T -> Path.T -> Token.file
- val read: (unit -> theory) -> Path.T -> blob list -> Token.T list -> Toplevel.transition
+ val read: (unit -> theory) -> Path.T -> content blob list -> Token.T list -> Toplevel.transition
type eval
val eval_eq: eval * eval -> bool
val eval_running: eval -> bool
val eval_finished: eval -> bool
val eval_result_state: eval -> Toplevel.state
- val eval: (unit -> theory) -> Path.T -> blob list -> Token.T list -> eval -> eval
+ val eval: (unit -> theory) -> Path.T -> content blob list -> Token.T list -> eval -> eval
type print
val print: bool -> (string * string list) list -> string ->
eval -> print list -> print list option
@@ -86,13 +88,29 @@
(* read *)
-type blob = (string * string option) Exn.result; (*file node name, digest or text*)
+type 'a blob = (string * 'a option) Exn.result; (*file node name, content*)
+type blob_digest = string blob; (*raw digest*)
+type content = SHA1.digest * string list; (*digest, lines*)
fun read_file master_dir pos src_path =
let
val full_path = File.check_file (File.full_path master_dir src_path);
val _ = Position.report pos (Markup.path (Path.implode full_path));
- in {src_path = src_path, text = File.read full_path, pos = Path.position full_path} end;
+ val text = File.read full_path;
+ val lines = split_lines text;
+ val digest = SHA1.digest text;
+ in {src_path = src_path, lines = lines, digest = digest, pos = Path.position full_path} end;
+
+local
+
+fun blob_file src_path file (digest, lines) =
+ let
+ val file_pos =
+ Position.file file (*sic!*) |>
+ (case Position.get_id (Position.thread_data ()) of
+ NONE => I
+ | SOME exec_id => Position.put_id exec_id);
+ in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end
fun resolve_files master_dir blobs toks =
(case Thy_Syntax.parse_spans toks of
@@ -101,17 +119,10 @@
let
fun make_file src_path (Exn.Res (_, NONE)) =
Exn.interruptible_capture (fn () => read_file master_dir pos src_path) ()
- | make_file src_path (Exn.Res (file, SOME text)) =
- let
- val _ = Position.report pos (Markup.path file);
- val file_pos =
- Position.file file (*sic!*) |>
- (case Position.get_id (Position.thread_data ()) of
- NONE => I
- | SOME exec_id => Position.put_id exec_id);
- in Exn.Res {src_path = src_path, text = text, pos = file_pos} end
+ | make_file src_path (Exn.Res (file, SOME content)) =
+ (Position.report pos (Markup.path file);
+ Exn.Res (blob_file src_path file content))
| make_file _ (Exn.Exn e) = Exn.Exn e;
-
val src_paths = Keyword.command_files cmd path;
in
if null blobs then
@@ -123,6 +134,8 @@
|> Thy_Syntax.span_content
| _ => toks);
+in
+
fun read init master_dir blobs span =
let
val outer_syntax = #2 (Outer_Syntax.get_syntax ());
@@ -149,6 +162,8 @@
handle ERROR msg => Toplevel.malformed (#1 proper_range) msg
end;
+end;
+
(* eval *)
--- a/src/Pure/PIDE/command.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/PIDE/command.scala Thu Feb 27 21:36:40 2014 +0100
@@ -228,6 +228,8 @@
val range = Text.Range(0, length)
private val symbol_index = Symbol.Index(text)
def decode(r: Text.Range): Text.Range = symbol_index.decode(r)
+
+ override def toString: String = "Command.File(" + file_name + ")"
}
@@ -338,6 +340,9 @@
/* blobs */
+ def blobs_changed(doc_blobs: Document.Blobs): Boolean =
+ blobs.exists({ case Exn.Res((name, _)) => doc_blobs.changed(name) case _ => false })
+
def blobs_names: List[Document.Node.Name] =
for (Exn.Res((name, _)) <- blobs) yield name
--- a/src/Pure/PIDE/document.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/PIDE/document.ML Thu Feb 27 21:36:40 2014 +0100
@@ -18,7 +18,7 @@
type state
val init_state: state
val define_blob: string -> string -> state -> state
- val define_command: Document_ID.command -> string -> Command.blob list -> string ->
+ val define_command: Document_ID.command -> string -> Command.blob_digest list -> string ->
state -> state
val remove_versions: Document_ID.version list -> state -> state
val start_execution: state -> state
@@ -234,8 +234,8 @@
abstype state = State of
{versions: version Inttab.table, (*version id -> document content*)
- blobs: string Symtab.table, (*digest -> text*)
- commands: (string * Command.blob list * Token.T list lazy) Inttab.table,
+ blobs: (SHA1.digest * string list) Symtab.table, (*digest -> digest, lines*)
+ commands: (string * Command.blob_digest list * Token.T list lazy) Inttab.table,
(*command id -> name, inlined files, command span*)
execution: execution} (*current execution process*)
with
@@ -275,13 +275,18 @@
fun define_blob digest text =
map_state (fn (versions, blobs, commands, execution) =>
- let val blobs' = Symtab.update (digest, text) blobs
+ let
+ val sha1_digest = SHA1.digest text;
+ val _ =
+ digest = SHA1.rep sha1_digest orelse
+ error ("Ill-defined blob: bad SHA1 digest " ^ digest ^ " vs. " ^ SHA1.rep sha1_digest);
+ val blobs' = Symtab.update (digest, (sha1_digest, split_lines text)) blobs;
in (versions, blobs', commands, execution) end);
fun the_blob (State {blobs, ...}) digest =
(case Symtab.lookup blobs digest of
NONE => error ("Undefined blob: " ^ digest)
- | SOME text => text);
+ | SOME content => content);
(* commands *)
@@ -496,8 +501,8 @@
val command_visible = visible_command node command_id';
val command_overlays = overlays node command_id';
- val (command_name, blobs0, span0) = the_command state command_id';
- val blobs = (map o Exn.map_result o apsnd o Option.map) (the_blob state) blobs0;
+ val (command_name, blob_digests, span0) = the_command state command_id';
+ val blobs = (map o Exn.map_result o apsnd o Option.map) (the_blob state) blob_digests;
val span = Lazy.force span0;
val eval' =
--- a/src/Pure/PIDE/document.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/PIDE/document.scala Thu Feb 27 21:36:40 2014 +0100
@@ -24,6 +24,8 @@
final class Overlays private(rep: Map[Node.Name, Node.Overlays])
{
+ override def toString: String = rep.mkString("Overlays(", ",", ")")
+
def apply(name: Document.Node.Name): Node.Overlays =
rep.getOrElse(name, Node.Overlays.empty)
@@ -41,14 +43,39 @@
}
- /* individual nodes */
+ /* document blobs: auxiliary files */
+
+ sealed case class Blob(bytes: Bytes, file: Command.File, changed: Boolean)
+
+ object Blobs
+ {
+ def apply(blobs: Map[Node.Name, Blob]): Blobs = new Blobs(blobs)
+ val empty: Blobs = apply(Map.empty)
+ }
+
+ final class Blobs private(blobs: Map[Node.Name, Blob])
+ {
+ override def toString: String = blobs.mkString("Blobs(", ",", ")")
+
+ def get(name: Node.Name): Option[Blob] = blobs.get(name)
+
+ def changed(name: Node.Name): Boolean =
+ get(name) match {
+ case Some(blob) => blob.changed
+ case None => false
+ }
+
+ def retrieve(digest: SHA1.Digest): Option[Blob] =
+ blobs.collectFirst({ case (_, blob) if blob.bytes.sha1_digest == digest => blob })
+ }
+
+
+ /* document nodes: theories and auxiliary files */
type Edit[A, B] = (Node.Name, Node.Edit[A, B])
type Edit_Text = Edit[Text.Edit, Text.Perspective]
type Edit_Command = Edit[Command.Edit, Command.Perspective]
- type Blobs = Map[Node.Name, (Bytes, Command.File)]
-
object Node
{
val empty: Node = new Node()
@@ -104,6 +131,8 @@
final class Overlays private(rep: Multi_Map[Command, (String, List[String])])
{
+ override def toString: String = rep.mkString("Node.Overlays(", ",", ")")
+
def commands: Set[Command] = rep.keySet
def is_empty: Boolean = rep.isEmpty
def dest: List[(Command, (String, List[String]))] = rep.iterator.toList
@@ -292,6 +321,8 @@
val nodes: Nodes = Nodes.empty)
{
def is_init: Boolean = id == Document_ID.none
+
+ override def toString: String = "Version(" + id + ")"
}
--- a/src/Pure/PIDE/protocol.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/PIDE/protocol.scala Thu Feb 27 21:36:40 2014 +0100
@@ -315,8 +315,9 @@
{
/* inlined files */
- def define_blob(blob: Bytes): Unit =
- protocol_command_raw("Document.define_blob", Bytes(blob.sha1_digest.toString), blob)
+ def define_blob(blob: Document.Blob): Unit =
+ protocol_command_raw(
+ "Document.define_blob", Bytes(blob.bytes.sha1_digest.toString), blob.bytes)
/* commands */
--- a/src/Pure/System/session.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/System/session.scala Thu Feb 27 21:36:40 2014 +0100
@@ -378,11 +378,12 @@
digest <- command.blobs_digests
if !global_state().defined_blob(digest)
} {
- doc_blobs.collectFirst({ case (_, (b, _)) if b.sha1_digest == digest => b }) match {
+ doc_blobs.retrieve(digest) match {
case Some(blob) =>
global_state >> (_.define_blob(digest))
prover.get.define_blob(blob)
- case None => System.err.println("Missing blob for SHA1 digest " + digest)
+ case None =>
+ System.err.println("Missing blob for SHA1 digest " + digest)
}
}
@@ -524,7 +525,7 @@
case Update_Options(options) if prover.isDefined =>
if (is_ready) {
prover.get.options(options)
- handle_raw_edits(Map.empty, Nil)
+ handle_raw_edits(Document.Blobs.empty, Nil)
}
global_options.event(Session.Global_Options(options))
reply(())
--- a/src/Pure/Thy/thy_load.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/Thy/thy_load.ML Thu Feb 27 21:36:40 2014 +0100
@@ -100,7 +100,7 @@
parse_files cmd >> (fn files => fn thy =>
let
val fs = files thy;
- val thy' = fold (fn {src_path, text, ...} => provide (src_path, SHA1.digest text)) fs thy;
+ val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy;
in (fs, thy') end);
fun load_file thy src_path =
--- a/src/Pure/Thy/thy_syntax.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/Thy/thy_syntax.scala Thu Feb 27 21:36:40 2014 +0100
@@ -268,27 +268,25 @@
Exn.capture {
val name =
Document.Node.Name(thy_load.append(node_name.master_dir, Path.explode(file_name)))
- val blob =
- doc_blobs.get(name) match {
- case Some((bytes, file)) => Some((bytes.sha1_digest, file))
- case None => None
- }
+ val blob = doc_blobs.get(name).map(blob => ((blob.bytes.sha1_digest, blob.file)))
(name, blob)
- }
- )
+ })
}
/* reparse range of command spans */
@tailrec private def chop_common(
- cmds: List[Command], spans: List[(List[Command.Blob], List[Token])])
- : (List[Command], List[(List[Command.Blob], List[Token])]) =
- (cmds, spans) match {
- case (c :: cs, (blobs, span) :: ps) if c.blobs == blobs && c.span == span =>
- chop_common(cs, ps)
- case _ => (cmds, spans)
+ cmds: List[Command],
+ blobs_spans: List[(List[Command.Blob], List[Token])])
+ : (List[Command], List[(List[Command.Blob], List[Token])]) =
+ {
+ (cmds, blobs_spans) match {
+ case (cmd :: cmds, (blobs, span) :: rest) if cmd.blobs == blobs && cmd.span == span =>
+ chop_common(cmds, rest)
+ case _ => (cmds, blobs_spans)
}
+ }
private def reparse_spans(
thy_load: Thy_Load,
@@ -299,24 +297,24 @@
first: Command, last: Command): Linear_Set[Command] =
{
val cmds0 = commands.iterator(first, last).toList
- val spans0 =
+ val blobs_spans0 =
parse_spans(syntax.scan(cmds0.iterator.map(_.source).mkString)).
map(span => (resolve_files(thy_load, syntax, name, span, doc_blobs), span))
- val (cmds1, spans1) = chop_common(cmds0, spans0)
+ val (cmds1, blobs_spans1) = chop_common(cmds0, blobs_spans0)
- val (rev_cmds2, rev_spans2) = chop_common(cmds1.reverse, spans1.reverse)
+ val (rev_cmds2, rev_blobs_spans2) = chop_common(cmds1.reverse, blobs_spans1.reverse)
val cmds2 = rev_cmds2.reverse
- val spans2 = rev_spans2.reverse
+ val blobs_spans2 = rev_blobs_spans2.reverse
cmds2 match {
case Nil =>
- assert(spans2.isEmpty)
+ assert(blobs_spans2.isEmpty)
commands
case cmd :: _ =>
val hook = commands.prev(cmd)
val inserted =
- spans2.map({ case (blobs, span) => Command(Document_ID.make(), name, blobs, span) })
+ blobs_spans2.map({ case (blobs, span) => Command(Document_ID.make(), name, blobs, span) })
(commands /: cmds2)(_ - _).append_after(hook, inserted)
}
}
@@ -450,8 +448,9 @@
val reparse =
(reparse0 /: nodes0.entries)({
case (reparse, (name, node)) =>
- if (node.thy_load_commands.isEmpty) reparse
- else name :: reparse
+ if (node.thy_load_commands.exists(_.blobs_changed(doc_blobs)))
+ name :: reparse
+ else reparse
})
val reparse_set = reparse.toSet
--- a/src/Pure/pure_syn.ML Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Pure/pure_syn.ML Thu Feb 27 21:36:40 2014 +0100
@@ -22,11 +22,11 @@
"ML text from file"
(Thy_Load.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
let
- val [{src_path, text, pos}] = files (Context.theory_of gthy);
- val provide = Thy_Load.provide (src_path, SHA1.digest text);
+ val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy);
+ val provide = Thy_Load.provide (src_path, digest);
in
gthy
- |> ML_Context.exec (fn () => ML_Context.eval_text true pos text)
+ |> ML_Context.exec (fn () => ML_Context.eval_text true pos (cat_lines lines))
|> Local_Theory.propagate_ml_env
|> Context.mapping provide (Local_Theory.background_theory provide)
end)));
--- a/src/Tools/jEdit/etc/options Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Tools/jEdit/etc/options Thu Feb 27 21:36:40 2014 +0100
@@ -59,7 +59,6 @@
option unprocessed1_color : string = "FFA0A032"
option running_color : string = "610061FF"
option running1_color : string = "61006164"
-option light_color : string = "F0F0F064"
option bullet_color : string = "000000FF"
option tooltip_color : string = "FFFFE9FF"
option writeln_color : string = "C0C0C0FF"
--- a/src/Tools/jEdit/src/document_model.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Tools/jEdit/src/document_model.scala Thu Feb 27 21:36:40 2014 +0100
@@ -2,7 +2,8 @@
Author: Fabian Immler, TU Munich
Author: Makarius
-Document model connected to jEdit buffer -- single node in theory graph.
+Document model connected to jEdit buffer (node in theory graph or
+auxiliary file).
*/
package isabelle.jedit
@@ -98,7 +99,7 @@
val empty_perspective: Document.Node.Perspective_Text =
Document.Node.Perspective(false, Text.Perspective.empty, Document.Node.Overlays.empty)
- def node_perspective(): Document.Node.Perspective_Text =
+ def node_perspective(doc_blobs: Document.Blobs): (Boolean, Document.Node.Perspective_Text) =
{
Swing_Thread.require()
@@ -124,11 +125,14 @@
range = snapshot.convert(cmd.proper_range + start)
} yield range
- Document.Node.Perspective(node_required,
- Text.Perspective(document_view_ranges ::: thy_load_ranges),
- PIDE.editor.node_overlays(node_name))
+ val reparse = snapshot.node.thy_load_commands.exists(_.blobs_changed(doc_blobs))
+
+ (reparse,
+ Document.Node.Perspective(node_required,
+ Text.Perspective(document_view_ranges ::: thy_load_ranges),
+ PIDE.editor.node_overlays(node_name)))
}
- else empty_perspective
+ else (false, empty_perspective)
}
@@ -138,39 +142,27 @@
private def reset_blob(): Unit = Swing_Thread.require { _blob = None }
- def blob(): (Bytes, Command.File) =
+ def get_blob(): Option[Document.Blob] =
Swing_Thread.require {
- _blob match {
- case Some(x) => x
- case None =>
- val b = PIDE.thy_load.file_content(buffer)
- val file = new Command.File(node_name.node, buffer.getSegment(0, buffer.getLength))
- _blob = Some((b, file))
- (b, file)
+ if (is_theory) None
+ else {
+ val (bytes, file) =
+ _blob match {
+ case Some(x) => x
+ case None =>
+ val bytes = PIDE.thy_load.file_content(buffer)
+ val file = new Command.File(node_name.node, buffer.getSegment(0, buffer.getLength))
+ _blob = Some((bytes, file))
+ (bytes, file)
+ }
+ val changed = pending_edits.is_pending()
+ Some(Document.Blob(bytes, file, changed))
}
}
/* edits */
- def init_edits(): List[Document.Edit_Text] =
- {
- Swing_Thread.require()
-
- val header = node_header()
- val text = JEdit_Lib.buffer_text(buffer)
- val perspective = node_perspective()
-
- if (is_theory)
- List(session.header_edit(node_name, header),
- node_name -> Document.Node.Clear(),
- node_name -> Document.Node.Edits(List(Text.Edit.insert(0, text))),
- node_name -> perspective)
- else
- List(node_name -> Document.Node.Blob(),
- node_name -> Document.Node.Edits(List(Text.Edit.insert(0, text))))
- }
-
def node_edits(
clear: Boolean,
text_edits: List[Text.Edit],
@@ -204,14 +196,15 @@
private val pending = new mutable.ListBuffer[Text.Edit]
private var last_perspective = empty_perspective
+ def is_pending(): Boolean = pending_clear || !pending.isEmpty
def snapshot(): List[Text.Edit] = pending.toList
- def flushed_edits(): List[Document.Edit_Text] =
+ def flushed_edits(doc_blobs: Document.Blobs): List[Document.Edit_Text] =
{
val clear = pending_clear
val edits = snapshot()
- val perspective = node_perspective()
- if (clear || !edits.isEmpty || last_perspective != perspective) {
+ val (reparse, perspective) = node_perspective(doc_blobs)
+ if (clear || reparse || !edits.isEmpty || last_perspective != perspective) {
pending_clear = false
pending.clear
last_perspective = perspective
@@ -236,8 +229,8 @@
def snapshot(): Document.Snapshot =
Swing_Thread.require { session.snapshot(node_name, pending_edits.snapshot()) }
- def flushed_edits(): List[Document.Edit_Text] =
- Swing_Thread.require { pending_edits.flushed_edits() }
+ def flushed_edits(doc_blobs: Document.Blobs): List[Document.Edit_Text] =
+ Swing_Thread.require { pending_edits.flushed_edits(doc_blobs) }
/* buffer listener */
@@ -246,7 +239,7 @@
{
override def bufferLoaded(buffer: JEditBuffer)
{
- pending_edits.edit(true, Text.Edit.insert(0, buffer.getText(0, buffer.getLength)))
+ pending_edits.edit(true, Text.Edit.insert(0, JEdit_Lib.buffer_text(buffer)))
}
override def contentInserted(buffer: JEditBuffer,
@@ -269,6 +262,7 @@
private def activate()
{
+ pending_edits.edit(true, Text.Edit.insert(0, JEdit_Lib.buffer_text(buffer)))
buffer.addBufferListener(buffer_listener)
Token_Markup.refresh_buffer(buffer)
}
--- a/src/Tools/jEdit/src/jedit_editor.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Tools/jEdit/src/jedit_editor.scala Thu Feb 27 21:36:40 2014 +0100
@@ -26,8 +26,9 @@
{
Swing_Thread.require()
- val edits = PIDE.document_models().flatMap(_.flushed_edits())
- if (!edits.isEmpty) session.update(PIDE.document_blobs(), edits)
+ val doc_blobs = PIDE.document_blobs()
+ val edits = PIDE.document_models().flatMap(_.flushed_edits(doc_blobs))
+ if (!edits.isEmpty) session.update(doc_blobs, edits)
}
private val delay_flush =
--- a/src/Tools/jEdit/src/plugin.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Tools/jEdit/src/plugin.scala Thu Feb 27 21:36:40 2014 +0100
@@ -38,10 +38,7 @@
@volatile var plugin: Plugin = null
@volatile var session: Session = new Session(new JEdit_Thy_Load(Set.empty, Outer_Syntax.empty))
- def options_changed() {
- session.global_options.event(Session.Global_Options(options.value))
- plugin.load_theories()
- }
+ def options_changed() { plugin.options_changed() }
def thy_load(): JEdit_Thy_Load =
session.thy_load.asInstanceOf[JEdit_Thy_Load]
@@ -83,7 +80,12 @@
} yield model
def document_blobs(): Document.Blobs =
- document_models().filterNot(_.is_theory).map(model => (model.node_name -> model.blob())).toMap
+ Document.Blobs(
+ (for {
+ buffer <- JEdit_Lib.jedit_buffers()
+ model <- document_model(buffer)
+ blob <- model.get_blob()
+ } yield (model.node_name -> blob)).toMap)
def exit_models(buffers: List[Buffer])
{
@@ -97,32 +99,30 @@
}
}
- def init_models(buffers: List[Buffer])
+ def init_models()
{
Swing_Thread.now {
PIDE.editor.flush()
- val init_edits =
- (List.empty[Document.Edit_Text] /: buffers) { case (edits, buffer) =>
- JEdit_Lib.buffer_lock(buffer) {
- if (buffer.getBooleanProperty(Buffer.GZIPPED)) edits
- else {
- val node_name = thy_load.node_name(buffer)
- val (model_edits, model) =
- document_model(buffer) match {
- case Some(model) if model.node_name == node_name => (Nil, model)
- case _ =>
- val model = Document_Model.init(session, buffer, node_name)
- (model.init_edits(), model)
- }
- for {
- text_area <- JEdit_Lib.jedit_text_areas(buffer)
- if document_view(text_area).map(_.model) != Some(model)
- } Document_View.init(model, text_area)
- model_edits ::: edits
+
+ for {
+ buffer <- JEdit_Lib.jedit_buffers()
+ if buffer != null && !buffer.isLoading && !buffer.getBooleanProperty(Buffer.GZIPPED)
+ } {
+ JEdit_Lib.buffer_lock(buffer) {
+ val node_name = thy_load.node_name(buffer)
+ val model =
+ document_model(buffer) match {
+ case Some(model) if model.node_name == node_name => model
+ case _ => Document_Model.init(session, buffer, node_name)
}
- }
+ for {
+ text_area <- JEdit_Lib.jedit_text_areas(buffer)
+ if document_view(text_area).map(_.model) != Some(model)
+ } Document_View.init(model, text_area)
}
- session.update(document_blobs(), init_edits)
+ }
+
+ PIDE.editor.invoke()
}
}
@@ -168,16 +168,22 @@
class Plugin extends EBPlugin
{
+ /* options */
+
+ def options_changed() {
+ PIDE.session.global_options.event(Session.Global_Options(PIDE.options.value))
+ Swing_Thread.later { delay_load.invoke() }
+ }
+
+
/* theory files */
private lazy val delay_init =
Swing_Thread.delay_last(PIDE.options.seconds("editor_load_delay"))
{
- PIDE.init_models(JEdit_Lib.jedit_buffers().toList)
+ PIDE.init_models()
}
- def load_theories() { Swing_Thread.later { delay_load.invoke() }}
-
private lazy val delay_load =
Swing_Thread.delay_last(PIDE.options.seconds("editor_load_delay"))
{
@@ -245,7 +251,7 @@
case Session.Ready =>
PIDE.session.update_options(PIDE.options.value)
- PIDE.init_models(JEdit_Lib.jedit_buffers().toList)
+ PIDE.init_models()
Swing_Thread.later { delay_load.invoke() }
case Session.Shutdown =>
@@ -285,9 +291,8 @@
case msg: BufferUpdate
if msg.getWhat == BufferUpdate.LOADED || msg.getWhat == BufferUpdate.PROPERTIES_CHANGED =>
if (PIDE.session.is_ready) {
- val buffer = msg.getBuffer
- if (buffer != null && !buffer.isLoading) delay_init.invoke()
- load_theories()
+ delay_init.invoke()
+ delay_load.invoke()
}
case msg: EditPaneUpdate
--- a/src/Tools/jEdit/src/rendering.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Tools/jEdit/src/rendering.scala Thu Feb 27 21:36:40 2014 +0100
@@ -201,14 +201,12 @@
private val separator_elements = Set(Markup.SEPARATOR)
- private val background1_elements =
+ private val background_elements =
Protocol.command_status_elements + Markup.WRITELN_MESSAGE +
Markup.TRACING_MESSAGE + Markup.WARNING_MESSAGE +
Markup.ERROR_MESSAGE + Markup.BAD + Markup.INTENSIFY ++
active_elements
- private val background2_elements = Set(Markup.TOKEN_RANGE)
-
private val foreground_elements =
Set(Markup.STRING, Markup.ALTSTRING, Markup.VERBATIM,
Markup.CARTOUCHE, Markup.ANTIQUOTED)
@@ -234,7 +232,6 @@
val unprocessed1_color = color_value("unprocessed1_color")
val running_color = color_value("running_color")
val running1_color = color_value("running1_color")
- val light_color = color_value("light_color")
val bullet_color = color_value("bullet_color")
val tooltip_color = color_value("tooltip_color")
val writeln_color = color_value("writeln_color")
@@ -624,14 +621,14 @@
/* text background */
- def background1(range: Text.Range): List[Text.Info[Color]] =
+ def background(range: Text.Range): List[Text.Info[Color]] =
{
if (snapshot.is_outdated) List(Text.Info(range, outdated_color))
else
for {
Text.Info(r, result) <-
snapshot.cumulate[(Option[Protocol.Status], Option[Color])](
- range, (Some(Protocol.Status.init), None), Rendering.background1_elements,
+ range, (Some(Protocol.Status.init), None), Rendering.background_elements,
command_state =>
{
case (((Some(status), color), Text.Info(_, XML.Elem(markup, _))))
@@ -663,9 +660,6 @@
} yield Text.Info(r, color)
}
- def background2(range: Text.Range): List[Text.Info[Color]] =
- snapshot.select(range, Rendering.background2_elements, _ => _ => Some(light_color))
-
/* text foreground */
--- a/src/Tools/jEdit/src/rich_text_area.scala Thu Feb 27 18:07:53 2014 +0100
+++ b/src/Tools/jEdit/src/rich_text_area.scala Thu Feb 27 21:36:40 2014 +0100
@@ -245,9 +245,9 @@
gfx.fillRect(0, y + i * line_height, text_area.getWidth, line_height - sep)
}
- // background color (1)
+ // background color
for {
- Text.Info(range, color) <- rendering.background1(line_range)
+ Text.Info(range, color) <- rendering.background(line_range)
r <- JEdit_Lib.gfx_range(text_area, range)
} {
gfx.setColor(color)
@@ -264,15 +264,6 @@
gfx.fillRect(r.x, y + i * line_height, r.length, line_height)
}
- // background color (2)
- for {
- Text.Info(range, color) <- rendering.background2(line_range)
- r <- JEdit_Lib.gfx_range(text_area, range)
- } {
- gfx.setColor(color)
- gfx.fillRect(r.x + 2, y + i * line_height + 2, r.length - 4, line_height - 4)
- }
-
// squiggly underline
for {
Text.Info(range, color) <- rendering.squiggly_underline(line_range)