--- a/doc-src/Classes/Thy/Classes.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/doc-src/Classes/Thy/Classes.thy Mon Jul 05 14:21:53 2010 +0100
@@ -194,7 +194,7 @@
using our simple algebra:
*}
-instantiation %quote * :: (semigroup, semigroup) semigroup
+instantiation %quote prod :: (semigroup, semigroup) semigroup
begin
definition %quote
@@ -260,7 +260,7 @@
end %quote
-instantiation %quote * :: (monoidl, monoidl) monoidl
+instantiation %quote prod :: (monoidl, monoidl) monoidl
begin
definition %quote
@@ -297,7 +297,7 @@
end %quote
-instantiation %quote * :: (monoid, monoid) monoid
+instantiation %quote prod :: (monoid, monoid) monoid
begin
instance %quote proof
--- a/doc-src/Classes/Thy/document/Classes.tex Mon Jul 05 14:21:30 2010 +0100
+++ b/doc-src/Classes/Thy/document/Classes.tex Mon Jul 05 14:21:53 2010 +0100
@@ -286,7 +286,7 @@
%
\isatagquote
\isacommand{instantiation}\isamarkupfalse%
-\ {\isacharasterisk}\ {\isacharcolon}{\isacharcolon}\ {\isacharparenleft}semigroup{\isacharcomma}\ semigroup{\isacharparenright}\ semigroup\isanewline
+\ prod\ {\isacharcolon}{\isacharcolon}\ {\isacharparenleft}semigroup{\isacharcomma}\ semigroup{\isacharparenright}\ semigroup\isanewline
\isakeyword{begin}\isanewline
\isanewline
\isacommand{definition}\isamarkupfalse%
@@ -405,7 +405,7 @@
\isanewline
\isanewline
\isacommand{instantiation}\isamarkupfalse%
-\ {\isacharasterisk}\ {\isacharcolon}{\isacharcolon}\ {\isacharparenleft}monoidl{\isacharcomma}\ monoidl{\isacharparenright}\ monoidl\isanewline
+\ prod\ {\isacharcolon}{\isacharcolon}\ {\isacharparenleft}monoidl{\isacharcomma}\ monoidl{\isacharparenright}\ monoidl\isanewline
\isakeyword{begin}\isanewline
\isanewline
\isacommand{definition}\isamarkupfalse%
@@ -479,7 +479,7 @@
\isanewline
\isanewline
\isacommand{instantiation}\isamarkupfalse%
-\ {\isacharasterisk}\ {\isacharcolon}{\isacharcolon}\ {\isacharparenleft}monoid{\isacharcomma}\ monoid{\isacharparenright}\ monoid\isanewline
+\ prod\ {\isacharcolon}{\isacharcolon}\ {\isacharparenleft}monoid{\isacharcomma}\ monoid{\isacharparenright}\ monoid\isanewline
\isakeyword{begin}\isanewline
\isanewline
\isacommand{instance}\isamarkupfalse%
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Codegenerator_Test/Candidates.thy Mon Jul 05 14:21:53 2010 +0100
@@ -0,0 +1,25 @@
+
+(* Author: Florian Haftmann, TU Muenchen *)
+
+header {* A huge collection of equations to generate code from *}
+
+theory Candidates
+imports
+ Complex_Main
+ Library
+ List_Prefix
+ "~~/src/HOL/Number_Theory/Primes"
+ "~~/src/HOL/ex/Records"
+begin
+
+inductive sublist :: "'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
+ empty: "sublist [] xs"
+ | drop: "sublist ys xs \<Longrightarrow> sublist ys (x # xs)"
+ | take: "sublist ys xs \<Longrightarrow> sublist (x # ys) (x # xs)"
+
+code_pred sublist .
+
+(*avoid popular infix*)
+code_reserved SML upto
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Codegenerator_Test/Candidates_Pretty.thy Mon Jul 05 14:21:53 2010 +0100
@@ -0,0 +1,10 @@
+
+(* Author: Florian Haftmann, TU Muenchen *)
+
+header {* Generating code using pretty literals and natural number literals *}
+
+theory Candidates_Pretty
+imports Candidates Code_Char Efficient_Nat
+begin
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Codegenerator_Test/Generate.thy Mon Jul 05 14:21:53 2010 +0100
@@ -0,0 +1,15 @@
+
+(* Author: Florian Haftmann, TU Muenchen *)
+
+header {* Pervasive test of code generator *}
+
+theory Generate
+imports Candidates
+begin
+
+export_code * in SML module_name Code_Test
+ in OCaml module_name Code_Test file -
+ in Haskell file -
+ in Scala file -
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Codegenerator_Test/Generate_Pretty.thy Mon Jul 05 14:21:53 2010 +0100
@@ -0,0 +1,20 @@
+
+(* Author: Florian Haftmann, TU Muenchen *)
+
+header {* Pervasive test of code generator using pretty literals *}
+
+theory Generate_Pretty
+imports Candidates_Pretty
+begin
+
+lemma [code, code del]: "nat_of_char = nat_of_char" ..
+lemma [code, code del]: "char_of_nat = char_of_nat" ..
+lemma [code, code del]: "(less_eq :: char \<Rightarrow> _) = less_eq" ..
+lemma [code, code del]: "(less :: char \<Rightarrow> _) = less" ..
+
+export_code * in SML module_name Code_Test
+ in OCaml module_name Code_Test file -
+ in Haskell file -
+ in Scala file -
+
+end
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Codegenerator_Test/ROOT.ML Mon Jul 05 14:21:53 2010 +0100
@@ -0,0 +1,1 @@
+use_thys ["Generate", "Generate_Pretty"];
--- a/src/HOL/Imperative_HOL/Array.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Imperative_HOL/Array.thy Mon Jul 05 14:21:53 2010 +0100
@@ -28,7 +28,7 @@
[code del]: "nth a i = (do len \<leftarrow> length a;
(if i < len
then Heap_Monad.heap (\<lambda>h. (get_array a h ! i, h))
- else raise (''array lookup: index out of range''))
+ else raise ''array lookup: index out of range'')
done)"
definition
@@ -37,18 +37,12 @@
[code del]: "upd i x a = (do len \<leftarrow> length a;
(if i < len
then Heap_Monad.heap (\<lambda>h. (a, Heap.upd a i x h))
- else raise (''array update: index out of range''))
+ else raise ''array update: index out of range'')
done)"
lemma upd_return:
"upd i x a \<guillemotright> return a = upd i x a"
-proof (rule Heap_eqI)
- fix h
- obtain len h' where "Heap_Monad.execute (Array.length a) h = (len, h')"
- by (cases "Heap_Monad.execute (Array.length a) h")
- then show "Heap_Monad.execute (upd i x a \<guillemotright> return a) h = Heap_Monad.execute (upd i x a) h"
- by (auto simp add: upd_def bindM_def split: sum.split)
-qed
+ by (rule Heap_eqI) (simp add: upd_def bindM_def split: option.split)
subsection {* Derivates *}
@@ -99,14 +93,11 @@
lemma array_make [code]:
"Array.new n x = make n (\<lambda>_. x)"
- by (induct n) (simp_all add: make_def new_def Heap_Monad.heap_def
- monad_simp array_of_list_replicate [symmetric]
- map_replicate_trivial replicate_append_same
- of_list_def)
+ by (rule Heap_eqI) (simp add: make_def new_def array_of_list_replicate map_replicate_trivial of_list_def)
lemma array_of_list_make [code]:
"of_list xs = make (List.length xs) (\<lambda>n. xs ! n)"
- unfolding make_def map_nth ..
+ by (rule Heap_eqI) (simp add: make_def map_nth)
subsection {* Code generator setup *}
@@ -135,13 +126,11 @@
by (simp add: make'_def o_def)
definition length' where
- [code del]: "length' = Array.length \<guillemotright>== liftM Code_Numeral.of_nat"
+ [code del]: "length' a = Array.length a \<guillemotright>= (\<lambda>n. return (Code_Numeral.of_nat n))"
hide_const (open) length'
lemma [code]:
- "Array.length = Array.length' \<guillemotright>== liftM Code_Numeral.nat_of"
- by (simp add: length'_def monad_simp',
- simp add: liftM_def comp_def monad_simp,
- simp add: monad_simp')
+ "Array.length a = Array.length' a \<guillemotright>= (\<lambda>i. return (Code_Numeral.nat_of i))"
+ by (simp add: length'_def)
definition nth' where
[code del]: "nth' a = Array.nth a o Code_Numeral.nat_of"
@@ -155,7 +144,7 @@
hide_const (open) upd'
lemma [code]:
"Array.upd i x a = Array.upd' a (Code_Numeral.of_nat i) x \<guillemotright> return a"
- by (simp add: upd'_def monad_simp upd_return)
+ by (simp add: upd'_def upd_return)
subsubsection {* SML *}
--- a/src/HOL/Imperative_HOL/Heap_Monad.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Imperative_HOL/Heap_Monad.thy Mon Jul 05 14:21:53 2010 +0100
@@ -12,16 +12,12 @@
subsubsection {* Monad combinators *}
-datatype exception = Exn
-
text {* Monadic heap actions either produce values
and transform the heap, or fail *}
-datatype 'a Heap = Heap "heap \<Rightarrow> ('a + exception) \<times> heap"
+datatype 'a Heap = Heap "heap \<Rightarrow> ('a \<times> heap) option"
-primrec
- execute :: "'a Heap \<Rightarrow> heap \<Rightarrow> ('a + exception) \<times> heap" where
- "execute (Heap f) = f"
-lemmas [code del] = execute.simps
+primrec execute :: "'a Heap \<Rightarrow> heap \<Rightarrow> ('a \<times> heap) option" where
+ [code del]: "execute (Heap f) = f"
lemma Heap_execute [simp]:
"Heap (execute f) = f" by (cases f) simp_all
@@ -34,58 +30,67 @@
"(\<And>h. (\<lambda>x. execute (f x) h) = (\<lambda>y. execute (g y) h)) \<Longrightarrow> f = g"
by (auto simp: expand_fun_eq intro: Heap_eqI)
-lemma Heap_strip: "(\<And>f. PROP P f) \<equiv> (\<And>g. PROP P (Heap g))"
-proof
- fix g :: "heap \<Rightarrow> ('a + exception) \<times> heap"
- assume "\<And>f. PROP P f"
- then show "PROP P (Heap g)" .
-next
- fix f :: "'a Heap"
- assume assm: "\<And>g. PROP P (Heap g)"
- then have "PROP P (Heap (execute f))" .
- then show "PROP P f" by simp
-qed
-
-definition
- heap :: "(heap \<Rightarrow> 'a \<times> heap) \<Rightarrow> 'a Heap" where
- [code del]: "heap f = Heap (\<lambda>h. apfst Inl (f h))"
+definition heap :: "(heap \<Rightarrow> 'a \<times> heap) \<Rightarrow> 'a Heap" where
+ [code del]: "heap f = Heap (Some \<circ> f)"
lemma execute_heap [simp]:
- "execute (heap f) h = apfst Inl (f h)"
+ "execute (heap f) = Some \<circ> f"
by (simp add: heap_def)
-definition
- bindM :: "'a Heap \<Rightarrow> ('a \<Rightarrow> 'b Heap) \<Rightarrow> 'b Heap" (infixl ">>=" 54) where
- [code del]: "f >>= g = Heap (\<lambda>h. case execute f h of
- (Inl x, h') \<Rightarrow> execute (g x) h'
- | r \<Rightarrow> r)"
-
-notation
- bindM (infixl "\<guillemotright>=" 54)
+lemma heap_cases [case_names succeed fail]:
+ fixes f and h
+ assumes succeed: "\<And>x h'. execute f h = Some (x, h') \<Longrightarrow> P"
+ assumes fail: "execute f h = None \<Longrightarrow> P"
+ shows P
+ using assms by (cases "execute f h") auto
-abbreviation
- chainM :: "'a Heap \<Rightarrow> 'b Heap \<Rightarrow> 'b Heap" (infixl ">>" 54) where
- "f >> g \<equiv> f >>= (\<lambda>_. g)"
-
-notation
- chainM (infixl "\<guillemotright>" 54)
-
-definition
- return :: "'a \<Rightarrow> 'a Heap" where
+definition return :: "'a \<Rightarrow> 'a Heap" where
[code del]: "return x = heap (Pair x)"
lemma execute_return [simp]:
- "execute (return x) h = apfst Inl (x, h)"
+ "execute (return x) = Some \<circ> Pair x"
by (simp add: return_def)
-definition
- raise :: "string \<Rightarrow> 'a Heap" where -- {* the string is just decoration *}
- [code del]: "raise s = Heap (Pair (Inr Exn))"
+definition raise :: "string \<Rightarrow> 'a Heap" where -- {* the string is just decoration *}
+ [code del]: "raise s = Heap (\<lambda>_. None)"
lemma execute_raise [simp]:
- "execute (raise s) h = (Inr Exn, h)"
+ "execute (raise s) = (\<lambda>_. None)"
by (simp add: raise_def)
+definition bindM :: "'a Heap \<Rightarrow> ('a \<Rightarrow> 'b Heap) \<Rightarrow> 'b Heap" (infixl ">>=" 54) where
+ [code del]: "f >>= g = Heap (\<lambda>h. case execute f h of
+ Some (x, h') \<Rightarrow> execute (g x) h'
+ | None \<Rightarrow> None)"
+
+notation bindM (infixl "\<guillemotright>=" 54)
+
+lemma execute_bind [simp]:
+ "execute f h = Some (x, h') \<Longrightarrow> execute (f \<guillemotright>= g) h = execute (g x) h'"
+ "execute f h = None \<Longrightarrow> execute (f \<guillemotright>= g) h = None"
+ by (simp_all add: bindM_def)
+
+lemma execute_bind_heap [simp]:
+ "execute (heap f \<guillemotright>= g) h = execute (g (fst (f h))) (snd (f h))"
+ by (simp add: bindM_def split_def)
+
+lemma return_bind [simp]: "return x \<guillemotright>= f = f x"
+ by (rule Heap_eqI) simp
+
+lemma bind_return [simp]: "f \<guillemotright>= return = f"
+ by (rule Heap_eqI) (simp add: bindM_def split: option.splits)
+
+lemma bind_bind [simp]: "(f \<guillemotright>= g) \<guillemotright>= k = f \<guillemotright>= (\<lambda>x. g x \<guillemotright>= k)"
+ by (rule Heap_eqI) (simp add: bindM_def split: option.splits)
+
+lemma raise_bind [simp]: "raise e \<guillemotright>= f = raise e"
+ by (rule Heap_eqI) simp
+
+abbreviation chainM :: "'a Heap \<Rightarrow> 'b Heap \<Rightarrow> 'b Heap" (infixl ">>" 54) where
+ "f >> g \<equiv> f >>= (\<lambda>_. g)"
+
+notation chainM (infixl "\<guillemotright>" 54)
+
subsubsection {* do-syntax *}
@@ -170,88 +175,10 @@
subsection {* Monad properties *}
-subsubsection {* Monad laws *}
-
-lemma return_bind: "return x \<guillemotright>= f = f x"
- by (simp add: bindM_def return_def)
-
-lemma bind_return: "f \<guillemotright>= return = f"
-proof (rule Heap_eqI)
- fix h
- show "execute (f \<guillemotright>= return) h = execute f h"
- by (auto simp add: bindM_def return_def split: sum.splits prod.splits)
-qed
-
-lemma bind_bind: "(f \<guillemotright>= g) \<guillemotright>= h = f \<guillemotright>= (\<lambda>x. g x \<guillemotright>= h)"
- by (rule Heap_eqI) (auto simp add: bindM_def split: split: sum.splits prod.splits)
-
-lemma bind_bind': "f \<guillemotright>= (\<lambda>x. g x \<guillemotright>= h x) = f \<guillemotright>= (\<lambda>x. g x \<guillemotright>= (\<lambda>y. return (x, y))) \<guillemotright>= (\<lambda>(x, y). h x y)"
- by (rule Heap_eqI) (auto simp add: bindM_def split: split: sum.splits prod.splits)
-
-lemma raise_bind: "raise e \<guillemotright>= f = raise e"
- by (simp add: raise_def bindM_def)
-
-
-lemmas monad_simp = return_bind bind_return bind_bind raise_bind
-
-
subsection {* Generic combinators *}
-definition
- liftM :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b Heap"
-where
- "liftM f = return o f"
-
-definition
- compM :: "('a \<Rightarrow> 'b Heap) \<Rightarrow> ('b \<Rightarrow> 'c Heap) \<Rightarrow> 'a \<Rightarrow> 'c Heap" (infixl ">>==" 54)
-where
- "(f >>== g) = (\<lambda>x. f x \<guillemotright>= g)"
-
-notation
- compM (infixl "\<guillemotright>==" 54)
-
-lemma liftM_collapse: "liftM f x = return (f x)"
- by (simp add: liftM_def)
-
-lemma liftM_compM: "liftM f \<guillemotright>== g = g o f"
- by (auto intro: Heap_eqI' simp add: expand_fun_eq liftM_def compM_def bindM_def)
-
-lemma compM_return: "f \<guillemotright>== return = f"
- by (simp add: compM_def monad_simp)
-
-lemma compM_compM: "(f \<guillemotright>== g) \<guillemotright>== h = f \<guillemotright>== (g \<guillemotright>== h)"
- by (simp add: compM_def monad_simp)
-
-lemma liftM_bind:
- "(\<lambda>x. liftM f x \<guillemotright>= liftM g) = liftM (\<lambda>x. g (f x))"
- by (rule Heap_eqI') (simp add: monad_simp liftM_def bindM_def)
-
-lemma liftM_comp:
- "liftM f o g = liftM (f o g)"
- by (rule Heap_eqI') (simp add: liftM_def)
-
-lemmas monad_simp' = monad_simp liftM_compM compM_return
- compM_compM liftM_bind liftM_comp
-
-primrec
- mapM :: "('a \<Rightarrow> 'b Heap) \<Rightarrow> 'a list \<Rightarrow> 'b list Heap"
-where
- "mapM f [] = return []"
- | "mapM f (x#xs) = do y \<leftarrow> f x;
- ys \<leftarrow> mapM f xs;
- return (y # ys)
- done"
-
-primrec
- foldM :: "('a \<Rightarrow> 'b \<Rightarrow> 'b Heap) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b Heap"
-where
- "foldM f [] s = return s"
- | "foldM f (x#xs) s = f x s \<guillemotright>= foldM f xs"
-
-definition
- assert :: "('a \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'a Heap"
-where
- "assert P x = (if P x then return x else raise (''assert''))"
+definition assert :: "('a \<Rightarrow> bool) \<Rightarrow> 'a \<Rightarrow> 'a Heap" where
+ "assert P x = (if P x then return x else raise ''assert'')"
lemma assert_cong [fundef_cong]:
assumes "P = P'"
@@ -259,28 +186,42 @@
shows "(assert P x >>= f) = (assert P' x >>= f')"
using assms by (auto simp add: assert_def return_bind raise_bind)
+definition liftM :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b Heap" where
+ "liftM f = return o f"
+
+lemma liftM_collapse [simp]:
+ "liftM f x = return (f x)"
+ by (simp add: liftM_def)
+
+lemma bind_liftM:
+ "(f \<guillemotright>= liftM g) = (f \<guillemotright>= (\<lambda>x. return (g x)))"
+ by (simp add: liftM_def comp_def)
+
+primrec mapM :: "('a \<Rightarrow> 'b Heap) \<Rightarrow> 'a list \<Rightarrow> 'b list Heap" where
+ "mapM f [] = return []"
+| "mapM f (x#xs) = do
+ y \<leftarrow> f x;
+ ys \<leftarrow> mapM f xs;
+ return (y # ys)
+ done"
+
+
subsubsection {* A monadic combinator for simple recursive functions *}
text {* Using a locale to fix arguments f and g of MREC *}
locale mrec =
-fixes
- f :: "'a => ('b + 'a) Heap"
- and g :: "'a => 'a => 'b => 'b Heap"
+ fixes f :: "'a \<Rightarrow> ('b + 'a) Heap"
+ and g :: "'a \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> 'b Heap"
begin
-function (default "\<lambda>(x,h). (Inr Exn, undefined)")
- mrec
-where
- "mrec x h =
- (case Heap_Monad.execute (f x) h of
- (Inl (Inl r), h') \<Rightarrow> (Inl r, h')
- | (Inl (Inr s), h') \<Rightarrow>
- (case mrec s h' of
- (Inl z, h'') \<Rightarrow> Heap_Monad.execute (g x s z) h''
- | (Inr e, h'') \<Rightarrow> (Inr e, h''))
- | (Inr e, h') \<Rightarrow> (Inr e, h')
- )"
+function (default "\<lambda>(x, h). None") mrec :: "'a \<Rightarrow> heap \<Rightarrow> ('b \<times> heap) option" where
+ "mrec x h = (case execute (f x) h of
+ Some (Inl r, h') \<Rightarrow> Some (r, h')
+ | Some (Inr s, h') \<Rightarrow> (case mrec s h' of
+ Some (z, h'') \<Rightarrow> execute (g x s z) h''
+ | None \<Rightarrow> None)
+ | None \<Rightarrow> None)"
by auto
lemma graph_implies_dom:
@@ -290,38 +231,37 @@
apply (erule mrec_rel.cases)
by simp
-lemma mrec_default: "\<not> mrec_dom (x, h) \<Longrightarrow> mrec x h = (Inr Exn, undefined)"
+lemma mrec_default: "\<not> mrec_dom (x, h) \<Longrightarrow> mrec x h = None"
unfolding mrec_def
by (rule fundef_default_value[OF mrec_sumC_def graph_implies_dom, of _ _ "(x, h)", simplified])
lemma mrec_di_reverse:
assumes "\<not> mrec_dom (x, h)"
shows "
- (case Heap_Monad.execute (f x) h of
- (Inl (Inl r), h') \<Rightarrow> False
- | (Inl (Inr s), h') \<Rightarrow> \<not> mrec_dom (s, h')
- | (Inr e, h') \<Rightarrow> False
+ (case execute (f x) h of
+ Some (Inl r, h') \<Rightarrow> False
+ | Some (Inr s, h') \<Rightarrow> \<not> mrec_dom (s, h')
+ | None \<Rightarrow> False
)"
-using assms
-by (auto split:prod.splits sum.splits)
- (erule notE, rule accpI, elim mrec_rel.cases, simp)+
-
+using assms apply (auto split: option.split sum.split)
+apply (rule ccontr)
+apply (erule notE, rule accpI, elim mrec_rel.cases, auto)+
+done
lemma mrec_rule:
"mrec x h =
- (case Heap_Monad.execute (f x) h of
- (Inl (Inl r), h') \<Rightarrow> (Inl r, h')
- | (Inl (Inr s), h') \<Rightarrow>
+ (case execute (f x) h of
+ Some (Inl r, h') \<Rightarrow> Some (r, h')
+ | Some (Inr s, h') \<Rightarrow>
(case mrec s h' of
- (Inl z, h'') \<Rightarrow> Heap_Monad.execute (g x s z) h''
- | (Inr e, h'') \<Rightarrow> (Inr e, h''))
- | (Inr e, h') \<Rightarrow> (Inr e, h')
+ Some (z, h'') \<Rightarrow> execute (g x s z) h''
+ | None \<Rightarrow> None)
+ | None \<Rightarrow> None
)"
apply (cases "mrec_dom (x,h)", simp)
apply (frule mrec_default)
apply (frule mrec_di_reverse, simp)
-by (auto split: sum.split prod.split simp: mrec_default)
-
+by (auto split: sum.split option.split simp: mrec_default)
definition
"MREC x = Heap (mrec x)"
@@ -340,32 +280,31 @@
apply simp
apply (rule ext)
apply (unfold mrec_rule[of x])
- by (auto split:prod.splits sum.splits)
-
+ by (auto split: option.splits prod.splits sum.splits)
lemma MREC_pinduct:
- assumes "Heap_Monad.execute (MREC x) h = (Inl r, h')"
- assumes non_rec_case: "\<And> x h h' r. Heap_Monad.execute (f x) h = (Inl (Inl r), h') \<Longrightarrow> P x h h' r"
- assumes rec_case: "\<And> x h h1 h2 h' s z r. Heap_Monad.execute (f x) h = (Inl (Inr s), h1) \<Longrightarrow> Heap_Monad.execute (MREC s) h1 = (Inl z, h2) \<Longrightarrow> P s h1 h2 z
- \<Longrightarrow> Heap_Monad.execute (g x s z) h2 = (Inl r, h') \<Longrightarrow> P x h h' r"
+ assumes "execute (MREC x) h = Some (r, h')"
+ assumes non_rec_case: "\<And> x h h' r. execute (f x) h = Some (Inl r, h') \<Longrightarrow> P x h h' r"
+ assumes rec_case: "\<And> x h h1 h2 h' s z r. execute (f x) h = Some (Inr s, h1) \<Longrightarrow> execute (MREC s) h1 = Some (z, h2) \<Longrightarrow> P s h1 h2 z
+ \<Longrightarrow> execute (g x s z) h2 = Some (r, h') \<Longrightarrow> P x h h' r"
shows "P x h h' r"
proof -
- from assms(1) have mrec: "mrec x h = (Inl r, h')"
+ from assms(1) have mrec: "mrec x h = Some (r, h')"
unfolding MREC_def execute.simps .
from mrec have dom: "mrec_dom (x, h)"
apply -
apply (rule ccontr)
apply (drule mrec_default) by auto
- from mrec have h'_r: "h' = (snd (mrec x h))" "r = (Sum_Type.Projl (fst (mrec x h)))"
+ from mrec have h'_r: "h' = snd (the (mrec x h))" "r = fst (the (mrec x h))"
by auto
- from mrec have "P x h (snd (mrec x h)) (Sum_Type.Projl (fst (mrec x h)))"
+ from mrec have "P x h (snd (the (mrec x h))) (fst (the (mrec x h)))"
proof (induct arbitrary: r h' rule: mrec.pinduct[OF dom])
case (1 x h)
- obtain rr h' where "mrec x h = (rr, h')" by fastsimp
- obtain fret h1 where exec_f: "Heap_Monad.execute (f x) h = (fret, h1)" by fastsimp
+ obtain rr h' where "the (mrec x h) = (rr, h')" by fastsimp
show ?case
- proof (cases fret)
- case (Inl a)
+ proof (cases "execute (f x) h")
+ case (Some result)
+ then obtain a h1 where exec_f: "execute (f x) h = Some (a, h1)" by fastsimp
note Inl' = this
show ?thesis
proof (cases a)
@@ -375,23 +314,28 @@
next
case (Inr b)
note Inr' = this
- obtain ret_mrec h2 where mrec_rec: "mrec b h1 = (ret_mrec, h2)" by fastsimp
- from this Inl 1(1) exec_f mrec show ?thesis
- proof (cases "ret_mrec")
- case (Inl aaa)
- from this mrec exec_f Inl' Inr' 1(1) mrec_rec 1(2) [OF exec_f [symmetric] Inl' Inr', of "aaa" "h2"] 1(3)
- show ?thesis
- apply auto
- apply (rule rec_case)
- unfolding MREC_def by auto
+ show ?thesis
+ proof (cases "mrec b h1")
+ case (Some result)
+ then obtain aaa h2 where mrec_rec: "mrec b h1 = Some (aaa, h2)" by fastsimp
+ moreover from this have "P b h1 (snd (the (mrec b h1))) (fst (the (mrec b h1)))"
+ apply (intro 1(2))
+ apply (auto simp add: Inr Inl')
+ done
+ moreover note mrec mrec_rec exec_f Inl' Inr' 1(1) 1(3)
+ ultimately show ?thesis
+ apply auto
+ apply (rule rec_case)
+ apply auto
+ unfolding MREC_def by auto
next
- case (Inr b)
- from this Inl 1(1) exec_f mrec Inr' mrec_rec 1(3) show ?thesis by auto
+ case None
+ from this 1(1) exec_f mrec Inr' 1(3) show ?thesis by auto
qed
qed
next
- case (Inr b)
- from this 1(1) mrec exec_f 1(3) show ?thesis by simp
+ case None
+ from this 1(1) mrec 1(3) show ?thesis by simp
qed
qed
from this h'_r show ?thesis by simp
@@ -412,38 +356,29 @@
subsubsection {* Logical intermediate layer *}
-definition
- Fail :: "String.literal \<Rightarrow> exception"
-where
- [code del]: "Fail s = Exn"
+primrec raise' :: "String.literal \<Rightarrow> 'a Heap" where
+ [code del, code_post]: "raise' (STR s) = raise s"
-definition
- raise_exc :: "exception \<Rightarrow> 'a Heap"
-where
- [code del]: "raise_exc e = raise []"
+lemma raise_raise' [code_inline]:
+ "raise s = raise' (STR s)"
+ by simp
-lemma raise_raise_exc [code, code_unfold]:
- "raise s = raise_exc (Fail (STR s))"
- unfolding Fail_def raise_exc_def raise_def ..
+code_datatype raise' -- {* avoid @{const "Heap"} formally *}
-hide_const (open) Fail raise_exc
+hide_const (open) raise'
subsubsection {* SML and OCaml *}
code_type Heap (SML "unit/ ->/ _")
-code_const Heap (SML "raise/ (Fail/ \"bare Heap\")")
code_const "op \<guillemotright>=" (SML "!(fn/ f'_/ =>/ fn/ ()/ =>/ f'_/ (_/ ())/ ())")
code_const return (SML "!(fn/ ()/ =>/ _)")
-code_const "Heap_Monad.Fail" (SML "Fail")
-code_const "Heap_Monad.raise_exc" (SML "!(fn/ ()/ =>/ raise/ _)")
+code_const Heap_Monad.raise' (SML "!(raise/ Fail/ _)")
code_type Heap (OCaml "_")
-code_const Heap (OCaml "failwith/ \"bare Heap\"")
code_const "op \<guillemotright>=" (OCaml "!(fun/ f'_/ ()/ ->/ f'_/ (_/ ())/ ())")
code_const return (OCaml "!(fun/ ()/ ->/ _)")
-code_const "Heap_Monad.Fail" (OCaml "Failure")
-code_const "Heap_Monad.raise_exc" (OCaml "!(fun/ ()/ ->/ raise/ _)")
+code_const Heap_Monad.raise' (OCaml "failwith/ _")
setup {*
@@ -514,8 +449,6 @@
*}
-code_reserved OCaml Failure raise
-
subsubsection {* Haskell *}
@@ -556,10 +489,8 @@
text {* Monad *}
code_type Heap (Haskell "Heap.ST/ Heap.RealWorld/ _")
-code_const Heap (Haskell "error/ \"bare Heap\"")
code_monad "op \<guillemotright>=" Haskell
code_const return (Haskell "return")
-code_const "Heap_Monad.Fail" (Haskell "_")
-code_const "Heap_Monad.raise_exc" (Haskell "error")
+code_const Heap_Monad.raise' (Haskell "error/ _")
end
--- a/src/HOL/Imperative_HOL/Ref.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Imperative_HOL/Ref.thy Mon Jul 05 14:21:53 2010 +0100
@@ -1,5 +1,4 @@
(* Title: HOL/Library/Ref.thy
- ID: $Id$
Author: John Matthews, Galois Connections; Alexander Krauss, Lukas Bulwahn & Florian Haftmann, TU Muenchen
*)
@@ -53,7 +52,7 @@
lemma update_change [code]:
"r := e = Ref.change (\<lambda>_. e) r \<guillemotright> return ()"
- by (auto simp add: monad_simp change_def lookup_chain)
+ by (auto simp add: change_def lookup_chain)
subsection {* Code generator setup *}
--- a/src/HOL/Imperative_HOL/Relational.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Imperative_HOL/Relational.thy Mon Jul 05 14:21:53 2010 +0100
@@ -9,10 +9,10 @@
definition crel :: "'a Heap \<Rightarrow> heap \<Rightarrow> heap \<Rightarrow> 'a \<Rightarrow> bool"
where
- crel_def': "crel c h h' r \<longleftrightarrow> Heap_Monad.execute c h = (Inl r, h')"
+ crel_def': "crel c h h' r \<longleftrightarrow> Heap_Monad.execute c h = Some (r, h')"
lemma crel_def: -- FIXME
- "crel c h h' r \<longleftrightarrow> (Inl r, h') = Heap_Monad.execute c h"
+ "crel c h h' r \<longleftrightarrow> Some (r, h') = Heap_Monad.execute c h"
unfolding crel_def' by auto
lemma crel_deterministic: "\<lbrakk> crel f h h' a; crel f h h'' b \<rbrakk> \<Longrightarrow> (a = b) \<and> (h' = h'')"
@@ -28,8 +28,7 @@
lemma crelE[consumes 1]:
assumes "crel (f >>= g) h h'' r'"
obtains h' r where "crel f h h' r" "crel (g r) h' h'' r'"
- using assms
- by (auto simp add: crel_def bindM_def Let_def prod_case_beta split_def Pair_fst_snd_eq split add: sum.split_asm)
+ using assms by (auto simp add: crel_def bindM_def split: option.split_asm)
lemma crelE'[consumes 1]:
assumes "crel (f >> g) h h'' r'"
@@ -86,8 +85,8 @@
lemma crel_heap:
assumes "crel (Heap_Monad.heap f) h h' r"
obtains "h' = snd (f h)" "r = fst (f h)"
- using assms
- unfolding heap_def crel_def apfst_def split_def prod_fun_def by simp_all
+ using assms by (cases "f h") (simp add: crel_def)
+
subsection {* Elimination rules for array commands *}
@@ -369,11 +368,9 @@
apply (cases f)
apply simp
apply (simp add: expand_fun_eq split_def)
+apply (auto split: option.split)
+apply (erule_tac x="x" in meta_allE)
apply auto
-apply (case_tac "fst (fun x)")
-apply (simp_all add: Pair_fst_snd_eq)
-apply (erule_tac x="x" in meta_allE)
-apply fastsimp
done
section {* Introduction rules *}
@@ -502,10 +499,10 @@
shows "P x h h' r"
proof (rule MREC_pinduct[OF assms(1)[unfolded crel_def, symmetric]])
fix x h h1 h2 h' s z r
- assume "Heap_Monad.execute (f x) h = (Inl (Inr s), h1)"
- "Heap_Monad.execute (MREC f g s) h1 = (Inl z, h2)"
+ assume "Heap_Monad.execute (f x) h = Some (Inr s, h1)"
+ "Heap_Monad.execute (MREC f g s) h1 = Some (z, h2)"
"P s h1 h2 z"
- "Heap_Monad.execute (g x s z) h2 = (Inl r, h')"
+ "Heap_Monad.execute (g x s z) h2 = Some (r, h')"
from assms(3)[unfolded crel_def, OF this(1)[symmetric] this(2)[symmetric] this(3) this(4)[symmetric]]
show "P x h h' r" .
next
@@ -519,15 +516,15 @@
definition noError :: "'a Heap \<Rightarrow> heap \<Rightarrow> bool"
where
- "noError c h \<longleftrightarrow> (\<exists>r h'. (Inl r, h') = Heap_Monad.execute c h)"
+ "noError c h \<longleftrightarrow> (\<exists>r h'. Some (r, h') = Heap_Monad.execute c h)"
lemma noError_def': -- FIXME
- "noError c h \<longleftrightarrow> (\<exists>r h'. Heap_Monad.execute c h = (Inl r, h'))"
+ "noError c h \<longleftrightarrow> (\<exists>r h'. Heap_Monad.execute c h = Some (r, h'))"
unfolding noError_def apply auto proof -
fix r h'
- assume "(Inl r, h') = Heap_Monad.execute c h"
- then have "Heap_Monad.execute c h = (Inl r, h')" ..
- then show "\<exists>r h'. Heap_Monad.execute c h = (Inl r, h')" by blast
+ assume "Some (r, h') = Heap_Monad.execute c h"
+ then have "Heap_Monad.execute c h = Some (r, h')" ..
+ then show "\<exists>r h'. Heap_Monad.execute c h = Some (r, h')" by blast
qed
subsection {* Introduction rules for basic monadic commands *}
@@ -640,7 +637,7 @@
(*TODO: move to HeapMonad *)
lemma mapM_append:
"mapM f (xs @ ys) = mapM f xs \<guillemotright>= (\<lambda>xs. mapM f ys \<guillemotright>= (\<lambda>ys. return (xs @ ys)))"
- by (induct xs) (simp_all add: monad_simp)
+ by (induct xs) simp_all
lemma noError_freeze:
shows "noError (freeze a) h"
--- a/src/HOL/Imperative_HOL/ex/Linked_Lists.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Imperative_HOL/ex/Linked_Lists.thy Mon Jul 05 14:21:53 2010 +0100
@@ -56,7 +56,7 @@
return (x#xs)
done"
unfolding traverse_def
-by (auto simp: traverse_def monad_simp MREC_rule)
+by (auto simp: traverse_def MREC_rule)
section {* Proving correctness with relational abstraction *}
@@ -531,7 +531,7 @@
done"
unfolding rev'_def MREC_rule[of _ _ "(q, p)"] unfolding rev'_def[symmetric]
thm arg_cong2
- by (auto simp add: monad_simp expand_fun_eq intro: arg_cong2[where f = "op \<guillemotright>="] split: node.split)
+ by (auto simp add: expand_fun_eq intro: arg_cong2[where f = "op \<guillemotright>="] split: node.split)
fun rev :: "('a:: heap) node \<Rightarrow> 'a node Heap"
where
--- a/src/HOL/Imperative_HOL/ex/SatChecker.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Imperative_HOL/ex/SatChecker.thy Mon Jul 05 14:21:53 2010 +0100
@@ -402,6 +402,12 @@
res_thm' l cli clj
done))"
+primrec
+ foldM :: "('a \<Rightarrow> 'b \<Rightarrow> 'b Heap) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b Heap"
+where
+ "foldM f [] s = return s"
+ | "foldM f (x#xs) s = f x s \<guillemotright>= foldM f xs"
+
fun doProofStep2 :: "Clause option array \<Rightarrow> ProofStep \<Rightarrow> Clause list \<Rightarrow> Clause list Heap"
where
"doProofStep2 a (Conflict saveTo (i, rs)) rcs =
--- a/src/HOL/IsaMakefile Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/IsaMakefile Mon Jul 05 14:21:53 2010 +0100
@@ -10,27 +10,28 @@
images: \
HOL \
+ HOL-Library \
HOL-Algebra \
- HOL-Base \
HOL-Boogie \
- HOL-Main \
HOL-Multivariate_Analysis \
HOL-NSA \
HOL-Nominal \
- HOL-Plain \
HOL-Probability \
HOL-Proofs \
HOL-Word \
HOL4 \
- TLA
+ TLA \
+ HOL-Base \
+ HOL-Main \
+ HOL-Plain
-#Note: keep targets sorted (except for HOL-Library and HOL-ex)
+#Note: keep targets sorted (except for HOL-ex)
test: \
- HOL-Library \
HOL-ex \
HOL-Auth \
HOL-Bali \
HOL-Boogie-Examples \
+ HOL-Codegenerator_Test \
HOL-Decision_Procs \
HOL-Hahn_Banach \
HOL-Hoare \
@@ -392,10 +393,11 @@
## HOL-Library
-HOL-Library: HOL $(LOG)/HOL-Library.gz
+HOL-Library: HOL $(OUT)/HOL-Library
-$(LOG)/HOL-Library.gz: $(OUT)/HOL $(SRC)/HOL/Tools/float_arith.ML \
- $(SRC)/Tools/float.ML Library/Abstract_Rat.thy Library/AssocList.thy \
+$(OUT)/HOL-Library: $(OUT)/HOL library.ML \
+ $(SRC)/HOL/Tools/float_arith.ML $(SRC)/Tools/float.ML \
+ Library/Abstract_Rat.thy Library/AssocList.thy \
Library/BigO.thy Library/Binomial.thy Library/Bit.thy \
Library/Boolean_Algebra.thy Library/Cardinality.thy \
Library/Char_nat.thy Library/Code_Char.thy Library/Code_Char_chr.thy \
@@ -408,7 +410,7 @@
Library/FuncSet.thy Library/Fundamental_Theorem_Algebra.thy \
Library/Glbs.thy Library/Indicator_Function.thy \
Library/Infinite_Set.thy Library/Inner_Product.thy \
- Library/HOL_Library_ROOT.ML Library/Kleene_Algebra.thy \
+ Library/Kleene_Algebra.thy \
Library/LaTeXsugar.thy Library/Lattice_Algebras.thy \
Library/Lattice_Syntax.thy Library/Library.thy \
Library/List_Prefix.thy Library/List_lexord.thy Library/Mapping.thy \
@@ -432,10 +434,10 @@
Library/Sum_Of_Squares/sum_of_squares.ML \
Library/Transitive_Closure_Table.thy Library/Univ_Poly.thy \
Library/While_Combinator.thy Library/Zorn.thy \
- Library/document/root.bib Library/document/root.tex \
Library/positivstellensatz.ML Library/reflection.ML \
- Library/reify_data.ML
- @$(ISABELLE_TOOL) usedir -f HOL_Library_ROOT.ML $(OUT)/HOL Library
+ Library/reify_data.ML \
+ Library/document/root.bib Library/document/root.tex
+ @$(ISABELLE_TOOL) usedir -b -f library.ML $(OUT)/HOL HOL-Library
## HOL-Hahn_Banach
@@ -632,6 +634,18 @@
@$(ISABELLE_TOOL) usedir -g true $(OUT)/HOL Hoare_Parallel
+## HOL-Codegenerator_Test
+
+HOL-Codegenerator_Test: HOL-Library $(LOG)/HOL-Codegenerator_Test.gz
+
+$(LOG)/HOL-Codegenerator_Test.gz: $(OUT)/HOL-Library \
+ Codegenerator_Test/ROOT.ML Codegenerator_Test/Candidates.thy \
+ Codegenerator_Test/Candidates_Pretty.thy \
+ Codegenerator_Test/Generate.thy \
+ Codegenerator_Test/Generate_Pretty.thy
+ @$(ISABELLE_TOOL) usedir -d false -g false -i false $(OUT)/HOL-Library Codegenerator_Test
+
+
## HOL-Metis_Examples
HOL-Metis_Examples: HOL $(LOG)/HOL-Metis_Examples.gz
@@ -976,9 +990,7 @@
ex/Abstract_NAT.thy ex/Antiquote.thy ex/Arith_Examples.thy \
ex/Arithmetic_Series_Complex.thy ex/BT.thy ex/BinEx.thy \
ex/Binary.thy ex/CTL.thy ex/Chinese.thy ex/Classical.thy \
- ex/CodegenSML_Test.thy ex/Codegenerator_Candidates.thy \
- ex/Codegenerator_Pretty.thy ex/Codegenerator_Pretty_Test.thy \
- ex/Codegenerator_Test.thy ex/Coherent.thy ex/Dedekind_Real.thy \
+ ex/CodegenSML_Test.thy ex/Coherent.thy ex/Dedekind_Real.thy \
ex/Efficient_Nat_examples.thy ex/Eval_Examples.thy ex/Fundefs.thy \
ex/Gauge_Integration.thy ex/Groebner_Examples.thy ex/Guess.thy \
ex/HarmonicSeries.thy ex/Hebrew.thy ex/Hex_Bin_Examples.thy \
--- a/src/HOL/Library/Char_nat.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Library/Char_nat.thy Mon Jul 05 14:21:53 2010 +0100
@@ -53,8 +53,6 @@
"nibble_of_nat (n mod 16) = nibble_of_nat n"
unfolding nibble_of_nat_def mod_mod_trivial ..
-lemmas [code] = nibble_of_nat_norm [symmetric]
-
lemma nibble_of_nat_simps [simp]:
"nibble_of_nat 0 = Nibble0"
"nibble_of_nat 1 = Nibble1"
@@ -74,9 +72,6 @@
"nibble_of_nat 15 = NibbleF"
unfolding nibble_of_nat_def by auto
-lemmas nibble_of_nat_code [code] = nibble_of_nat_simps
- [simplified nat_number Let_def not_neg_number_of_Pls neg_number_of_Bit0 neg_number_of_Bit1 if_False add_0 add_Suc One_nat_def]
-
lemma nibble_of_nat_of_nibble: "nibble_of_nat (nat_of_nibble n) = n"
by (cases n) (simp_all only: nat_of_nibble.simps nibble_of_nat_simps)
--- a/src/HOL/Library/Fset.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Library/Fset.thy Mon Jul 05 14:21:53 2010 +0100
@@ -18,7 +18,7 @@
lemma Fset_member [simp]:
"Fset (member A) = A"
- by (rule member_inverse)
+ by (fact member_inverse)
declare member_inject [simp]
--- a/src/HOL/Library/HOL_Library_ROOT.ML Mon Jul 05 14:21:30 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-use_thys ["Library", "List_Prefix", "List_lexord", "Sublist_Order"];
--- a/src/HOL/Library/Library.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Library/Library.thy Mon Jul 05 14:21:53 2010 +0100
@@ -8,18 +8,14 @@
Bit
Boolean_Algebra
Char_ord
- Code_Char_chr
- Code_Integer
Continuity
ContNotDenum
Convex
Countable
Diagonalize
Dlist
- Efficient_Nat
Enum
Eval_Witness
- Executable_Set
Float
Formal_Power_Series
Fraction_Field
--- a/src/HOL/Library/Mapping.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Library/Mapping.thy Mon Jul 05 14:21:53 2010 +0100
@@ -8,19 +8,36 @@
subsection {* Type definition and primitive operations *}
-datatype ('a, 'b) mapping = Mapping "'a \<rightharpoonup> 'b"
+typedef (open) ('a, 'b) mapping = "UNIV :: ('a \<rightharpoonup> 'b) set"
+ morphisms lookup Mapping ..
+
+lemma lookup_Mapping [simp]:
+ "lookup (Mapping f) = f"
+ by (rule Mapping_inverse) rule
+
+lemma Mapping_lookup [simp]:
+ "Mapping (lookup m) = m"
+ by (fact lookup_inverse)
+
+declare lookup_inject [simp]
+
+lemma Mapping_inject [simp]:
+ "Mapping f = Mapping g \<longleftrightarrow> f = g"
+ by (simp add: Mapping_inject)
+
+lemma mapping_eqI:
+ assumes "lookup m = lookup n"
+ shows "m = n"
+ using assms by simp
definition empty :: "('a, 'b) mapping" where
"empty = Mapping (\<lambda>_. None)"
-primrec lookup :: "('a, 'b) mapping \<Rightarrow> 'a \<rightharpoonup> 'b" where
- "lookup (Mapping f) = f"
+definition update :: "'a \<Rightarrow> 'b \<Rightarrow> ('a, 'b) mapping \<Rightarrow> ('a, 'b) mapping" where
+ "update k v m = Mapping ((lookup m)(k \<mapsto> v))"
-primrec update :: "'a \<Rightarrow> 'b \<Rightarrow> ('a, 'b) mapping \<Rightarrow> ('a, 'b) mapping" where
- "update k v (Mapping f) = Mapping (f (k \<mapsto> v))"
-
-primrec delete :: "'a \<Rightarrow> ('a, 'b) mapping \<Rightarrow> ('a, 'b) mapping" where
- "delete k (Mapping f) = Mapping (f (k := None))"
+definition delete :: "'a \<Rightarrow> ('a, 'b) mapping \<Rightarrow> ('a, 'b) mapping" where
+ "delete k m = Mapping ((lookup m)(k := None))"
subsection {* Derived operations *}
@@ -59,15 +76,6 @@
subsection {* Properties *}
-lemma lookup_inject [simp]:
- "lookup m = lookup n \<longleftrightarrow> m = n"
- by (cases m, cases n) simp
-
-lemma mapping_eqI:
- assumes "lookup m = lookup n"
- shows "m = n"
- using assms by simp
-
lemma keys_is_none_lookup [code_inline]:
"k \<in> keys m \<longleftrightarrow> \<not> (Option.is_none (lookup m k))"
by (auto simp add: keys_def is_none_def)
@@ -78,11 +86,11 @@
lemma lookup_update [simp]:
"lookup (update k v m) = (lookup m) (k \<mapsto> v)"
- by (cases m) simp
+ by (simp add: update_def)
lemma lookup_delete [simp]:
"lookup (delete k m) = (lookup m) (k := None)"
- by (cases m) simp
+ by (simp add: delete_def)
lemma lookup_map_entry [simp]:
"lookup (map_entry k f m) = (lookup m) (k := Option.map f (lookup m k))"
@@ -150,11 +158,11 @@
lemma is_empty_update [simp]:
"\<not> is_empty (update k v m)"
- by (cases m) (simp add: is_empty_empty)
+ by (simp add: is_empty_empty update_def)
lemma is_empty_delete:
"is_empty (delete k m) \<longleftrightarrow> is_empty m \<or> keys m = {k}"
- by (cases m) (auto simp add: is_empty_empty keys_def dom_eq_empty_conv [symmetric] simp del: dom_eq_empty_conv)
+ by (auto simp add: delete_def is_empty_def keys_def simp del: dom_eq_empty_conv)
lemma is_empty_replace [simp]:
"is_empty (replace k v m) \<longleftrightarrow> is_empty m"
@@ -268,23 +276,20 @@
by (simp add: ordered_keys_def)
-subsection {* Some technical code lemmas *}
+subsection {* Code generator setup *}
-lemma [code]:
- "mapping_case f m = f (Mapping.lookup m)"
- by (cases m) simp
+code_datatype empty update
+
+instantiation mapping :: (type, type) eq
+begin
-lemma [code]:
- "mapping_rec f m = f (Mapping.lookup m)"
- by (cases m) simp
+definition [code del]:
+ "HOL.eq m n \<longleftrightarrow> lookup m = lookup n"
-lemma [code]:
- "Nat.size (m :: (_, _) mapping) = 0"
- by (cases m) simp
+instance proof
+qed (simp add: eq_mapping_def)
-lemma [code]:
- "mapping_size f g m = 0"
- by (cases m) simp
+end
hide_const (open) empty is_empty lookup update delete ordered_keys keys size
--- a/src/HOL/Nitpick.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Nitpick.thy Mon Jul 05 14:21:53 2010 +0100
@@ -69,9 +69,6 @@
apply (erule_tac x = y in allE)
by (auto simp: mem_def)
-lemma split_def [nitpick_def]: "split f = (\<lambda>p. f (fst p) (snd p))"
-by (simp add: prod_case_unfold split_def)
-
lemma rtrancl_def [nitpick_def, no_atp]: "r\<^sup>* \<equiv> (r\<^sup>+)\<^sup>="
by simp
@@ -252,12 +249,12 @@
less_frac less_eq_frac of_frac
hide_type (open) bisim_iterator fin_fun fun_box pair_box unsigned_bit signed_bit
word
-hide_fact (open) If_def Ex1_def split_def rtrancl_def rtranclp_def tranclp_def
- refl'_def wf'_def wf_wfrec'_def wfrec'_def card'_def setsum'_def
- fold_graph'_def The_psimp Eps_psimp unit_case_def nat_case_def
- list_size_simp nat_gcd_def nat_lcm_def int_gcd_def int_lcm_def Frac_def
- zero_frac_def one_frac_def num_def denom_def norm_frac_def frac_def
- plus_frac_def times_frac_def uminus_frac_def number_of_frac_def
- inverse_frac_def less_frac_def less_eq_frac_def of_frac_def
+hide_fact (open) If_def Ex1_def rtrancl_def rtranclp_def tranclp_def refl'_def
+ wf'_def wf_wfrec'_def wfrec'_def card'_def setsum'_def fold_graph'_def
+ The_psimp Eps_psimp unit_case_def nat_case_def list_size_simp nat_gcd_def
+ nat_lcm_def int_gcd_def int_lcm_def Frac_def zero_frac_def one_frac_def
+ num_def denom_def norm_frac_def frac_def plus_frac_def times_frac_def
+ uminus_frac_def number_of_frac_def inverse_frac_def less_frac_def
+ less_eq_frac_def of_frac_def
end
--- a/src/HOL/Nitpick_Examples/Core_Nits.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Nitpick_Examples/Core_Nits.thy Mon Jul 05 14:21:53 2010 +0100
@@ -17,11 +17,11 @@
subsection {* Curry in a Hurry *}
lemma "(\<lambda>f x y. (curry o split) f x y) = (\<lambda>f x y. (\<lambda>x. x) f x y)"
-nitpick [card = 1\<midarrow>12, expect = unknown (*none*)]
+nitpick [card = 1\<midarrow>12, expect = none]
by auto
lemma "(\<lambda>f p. (split o curry) f p) = (\<lambda>f p. (\<lambda>x. x) f p)"
-nitpick [card = 1\<midarrow>12, expect = unknown (*none*)]
+nitpick [card = 1\<midarrow>12, expect = none]
by auto
lemma "split (curry f) = f"
@@ -61,12 +61,12 @@
by auto
lemma "split o curry = (\<lambda>x. x)"
-nitpick [card = 1\<midarrow>12, expect = unknown (*none*)]
+nitpick [card = 1\<midarrow>12, expect = none]
apply (rule ext)+
by auto
lemma "curry o split = (\<lambda>x. x)"
-nitpick [card = 1\<midarrow>12, expect = unknown (*none*)]
+nitpick [card = 1\<midarrow>12, expect = none]
apply (rule ext)+
by auto
--- a/src/HOL/Product_Type.thy Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Product_Type.thy Mon Jul 05 14:21:53 2010 +0100
@@ -158,6 +158,8 @@
by (simp add: Pair_def Abs_prod_inject)
qed
+declare prod.simps(2) [nitpick_simp del]
+
declare weak_case_cong [cong del]
@@ -391,7 +393,7 @@
code_const fst and snd
(Haskell "fst" and "snd")
-lemma prod_case_unfold: "prod_case = (%c p. c (fst p) (snd p))"
+lemma prod_case_unfold [nitpick_def]: "prod_case = (%c p. c (fst p) (snd p))"
by (simp add: expand_fun_eq split: prod.split)
lemma fst_eqD: "fst (x, y) = a ==> x = a"
--- a/src/HOL/ROOT.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/ROOT.ML Mon Jul 05 14:21:53 2010 +0100
@@ -1,3 +1,4 @@
-(* Classical Higher-order Logic -- batteries included *)
+
+(* Classical Higher-order Logic *)
use_thys ["Complex_Main"];
--- a/src/HOL/Tools/Nitpick/nitpick.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick.ML Mon Jul 05 14:21:53 2010 +0100
@@ -876,6 +876,20 @@
"") ^ "."
end
+ val (skipped, the_scopes) =
+ all_scopes hol_ctxt binarize cards_assigns maxes_assigns iters_assigns
+ bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs
+ finitizable_dataTs
+ val _ = if skipped > 0 then
+ print_m (fn () => "Too many scopes. Skipping " ^
+ string_of_int skipped ^ " scope" ^
+ plural_s skipped ^
+ ". (Consider using \"mono\" or \
+ \\"merge_type_vars\" to prevent this.)")
+ else
+ ()
+ val _ = scopes := the_scopes
+
fun run_batches _ _ []
(found_really_genuine, max_potential, max_genuine, donno) =
if donno > 0 andalso max_genuine > 0 then
@@ -888,7 +902,7 @@
" This suggests that the induction hypothesis might be \
\general enough to prove this subgoal."
else
- "")); "none")
+ "")); if skipped > 0 then "unknown" else "none")
else
(print_m (fn () =>
"Nitpick could not find a" ^
@@ -903,20 +917,6 @@
run_batches (j + 1) n (if max_genuine > 0 then batches else []) z
end
- val (skipped, the_scopes) =
- all_scopes hol_ctxt binarize cards_assigns maxes_assigns iters_assigns
- bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs
- finitizable_dataTs
- val _ = if skipped > 0 then
- print_m (fn () => "Too many scopes. Skipping " ^
- string_of_int skipped ^ " scope" ^
- plural_s skipped ^
- ". (Consider using \"mono\" or \
- \\"merge_type_vars\" to prevent this.)")
- else
- ()
- val _ = scopes := the_scopes
-
val batches = batch_list batch_size (!scopes)
val outcome_code =
(run_batches 0 (length batches) batches
--- a/src/HOL/Tools/Nitpick/nitpick_mono.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_mono.ML Mon Jul 05 14:21:53 2010 +0100
@@ -628,7 +628,9 @@
end
and do_term t (accum as (gamma as {bound_Ts, bound_Ms, frees, consts},
cset)) =
- (case t of
+ (print_g (fn () => " \<Gamma> \<turnstile> " ^
+ Syntax.string_of_term ctxt t ^ " : _?");
+ case t of
Const (x as (s, T)) =>
(case AList.lookup (op =) consts x of
SOME M => (M, accum)
@@ -846,48 +848,54 @@
Plus => do_term t accum
| Minus => consider_general_equals mdata false x t1 t2 accum
in
- case t of
- Const (x as (@{const_name all}, _)) $ Abs (s1, T1, t1) =>
- do_quantifier x s1 T1 t1
- | Const (x as (@{const_name "=="}, _)) $ t1 $ t2 => do_equals x t1 t2
- | @{const Trueprop} $ t1 =>
- let val (m1, accum) = do_formula sn t1 accum in
- (MApp (MRaw (@{const Trueprop}, mtype_for (bool_T --> prop_T)),
- m1), accum)
- end
- | @{const Not} $ t1 =>
- let val (m1, accum) = do_formula (negate sn) t1 accum in
- (MApp (MRaw (@{const Not}, mtype_for (bool_T --> bool_T)), m1),
- accum)
- end
- | Const (x as (@{const_name All}, _)) $ Abs (s1, T1, t1) =>
- do_quantifier x s1 T1 t1
- | Const (x0 as (s0 as @{const_name Ex}, T0))
- $ (t1 as Abs (s1, T1, t1')) =>
- (case sn of
- Plus => do_quantifier x0 s1 T1 t1'
- | Minus =>
- (* FIXME: Move elsewhere *)
- do_term (@{const Not}
- $ (HOLogic.eq_const (domain_type T0) $ t1
- $ Abs (Name.uu, T1, @{const False}))) accum)
- | Const (x as (@{const_name "op ="}, _)) $ t1 $ t2 =>
- do_equals x t1 t2
- | (t0 as Const (s0, _)) $ t1 $ t2 =>
- if s0 = @{const_name "==>"} orelse s0 = @{const_name "op &"} orelse
- s0 = @{const_name "op |"} orelse s0 = @{const_name "op -->"} then
- let
- val impl = (s0 = @{const_name "==>"} orelse
- s0 = @{const_name "op -->"})
- val (m1, accum) = do_formula (sn |> impl ? negate) t1 accum
- val (m2, accum) = do_formula sn t2 accum
- in
- (MApp (MApp (MRaw (t0, mtype_for (fastype_of t0)), m1), m2),
- accum)
- end
- else
- do_term t accum
- | _ => do_term t accum
+ (print_g (fn () => " \<Gamma> \<turnstile> " ^
+ Syntax.string_of_term ctxt t ^ " : o\<^sup>" ^
+ string_for_sign sn ^ "?");
+ case t of
+ Const (x as (@{const_name all}, _)) $ Abs (s1, T1, t1) =>
+ do_quantifier x s1 T1 t1
+ | Const (x as (@{const_name "=="}, _)) $ t1 $ t2 => do_equals x t1 t2
+ | @{const Trueprop} $ t1 =>
+ let val (m1, accum) = do_formula sn t1 accum in
+ (MApp (MRaw (@{const Trueprop}, mtype_for (bool_T --> prop_T)),
+ m1), accum)
+ end
+ | @{const Not} $ t1 =>
+ let val (m1, accum) = do_formula (negate sn) t1 accum in
+ (MApp (MRaw (@{const Not}, mtype_for (bool_T --> bool_T)), m1),
+ accum)
+ end
+ | Const (x as (@{const_name All}, _)) $ Abs (s1, T1, t1) =>
+ do_quantifier x s1 T1 t1
+ | Const (x0 as (s0 as @{const_name Ex}, T0))
+ $ (t1 as Abs (s1, T1, t1')) =>
+ (case sn of
+ Plus => do_quantifier x0 s1 T1 t1'
+ | Minus =>
+ (* FIXME: Move elsewhere *)
+ do_term (@{const Not}
+ $ (HOLogic.eq_const (domain_type T0) $ t1
+ $ Abs (Name.uu, T1, @{const False}))) accum)
+ | Const (x as (@{const_name "op ="}, _)) $ t1 $ t2 =>
+ do_equals x t1 t2
+ | Const (@{const_name Let}, _) $ t1 $ t2 =>
+ do_formula sn (betapply (t2, t1)) accum
+ | (t0 as Const (s0, _)) $ t1 $ t2 =>
+ if s0 = @{const_name "==>"} orelse s0 = @{const_name "op &"} orelse
+ s0 = @{const_name "op |"} orelse
+ s0 = @{const_name "op -->"} then
+ let
+ val impl = (s0 = @{const_name "==>"} orelse
+ s0 = @{const_name "op -->"})
+ val (m1, accum) = do_formula (sn |> impl ? negate) t1 accum
+ val (m2, accum) = do_formula sn t2 accum
+ in
+ (MApp (MApp (MRaw (t0, mtype_for (fastype_of t0)), m1), m2),
+ accum)
+ end
+ else
+ do_term t accum
+ | _ => do_term t accum)
end
|> tap (fn (m, _) =>
print_g (fn () => "\<Gamma> \<turnstile> " ^
--- a/src/HOL/Tools/Nitpick/nitpick_preproc.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Tools/Nitpick/nitpick_preproc.ML Mon Jul 05 14:21:53 2010 +0100
@@ -179,7 +179,8 @@
let
val (t1, t2) = pairself (do_term new_Ts old_Ts Neut) (t1, t2)
val (T1, T2) = pairself (curry fastype_of1 new_Ts) (t1, t2)
- val T = [T1, T2] |> sort Term_Ord.typ_ord |> List.last
+ val T = if def then T1
+ else [T1, T2] |> sort (int_ord o pairself size_of_typ) |> hd
in
list_comb (Const (s0, T --> T --> body_type T0),
map2 (coerce_term hol_ctxt new_Ts T) [T1, T2] [t1, t2])
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_tptp_format.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_tptp_format.ML Mon Jul 05 14:21:53 2010 +0100
@@ -12,7 +12,6 @@
type hol_clause = Metis_Clauses.hol_clause
type name_pool = string Symtab.table * string Symtab.table
- val type_wrapper_name : string
val write_tptp_file :
theory -> bool -> bool -> bool -> Path.T
-> hol_clause list * hol_clause list * hol_clause list * hol_clause list
--- a/src/HOL/base.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/base.ML Mon Jul 05 14:21:53 2010 +0100
@@ -1,2 +1,4 @@
-(*side-entry for HOL-Base*)
+
+(* side-entry for HOL-Base *)
+
use_thys ["HOL"];
--- a/src/HOL/ex/Codegenerator_Candidates.thy Mon Jul 05 14:21:30 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,39 +0,0 @@
-
-(* Author: Florian Haftmann, TU Muenchen *)
-
-header {* A huge collection of equations to generate code from *}
-
-theory Codegenerator_Candidates
-imports
- Complex_Main
- AssocList
- Binomial
- "~~/src/HOL/Decision_Procs/Commutative_Ring_Complete"
- Dlist
- Fset
- Enum
- List_Prefix
- More_List
- Nat_Infinity
- Nested_Environment
- Option_ord
- Permutation
- "~~/src/HOL/Number_Theory/Primes"
- Product_ord
- "~~/src/HOL/ex/Records"
- RBT
- SetsAndFunctions
- While_Combinator
-begin
-
-inductive sublist :: "'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
- empty: "sublist [] xs"
- | drop: "sublist ys xs \<Longrightarrow> sublist ys (x # xs)"
- | take: "sublist ys xs \<Longrightarrow> sublist (x # ys) (x # xs)"
-
-code_pred sublist .
-
-(*avoid popular infix*)
-code_reserved SML upto
-
-end
--- a/src/HOL/ex/Codegenerator_Pretty.thy Mon Jul 05 14:21:30 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-
-(* Author: Florian Haftmann, TU Muenchen *)
-
-header {* Generating code using pretty literals and natural number literals *}
-
-theory Codegenerator_Pretty
-imports "~~/src/HOL/ex/Codegenerator_Candidates" Code_Char Efficient_Nat
-begin
-
-declare isnorm.simps [code del]
-
-end
--- a/src/HOL/ex/Codegenerator_Pretty_Test.thy Mon Jul 05 14:21:30 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-
-(* Author: Florian Haftmann, TU Muenchen *)
-
-header {* Pervasive test of code generator using pretty literals *}
-
-theory Codegenerator_Pretty_Test
-imports Codegenerator_Pretty
-begin
-
-export_code * in SML module_name CodegenTest
- in OCaml module_name CodegenTest file -
- in Haskell file -
- in Scala file -
-
-end
--- a/src/HOL/ex/Codegenerator_Test.thy Mon Jul 05 14:21:30 2010 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-
-(* Author: Florian Haftmann, TU Muenchen *)
-
-header {* Pervasive test of code generator *}
-
-theory Codegenerator_Test
-imports Codegenerator_Candidates
-begin
-
-export_code * in SML module_name CodegenTest
- in OCaml module_name CodegenTest file -
- in Haskell file -
- in Scala file -
-
-end
--- a/src/HOL/ex/ROOT.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/ex/ROOT.ML Mon Jul 05 14:21:53 2010 +0100
@@ -8,8 +8,6 @@
"Efficient_Nat_examples",
"FuncSet",
"Eval_Examples",
- "Codegenerator_Test",
- "Codegenerator_Pretty_Test",
"NormalForm"
];
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/library.ML Mon Jul 05 14:21:53 2010 +0100
@@ -0,0 +1,5 @@
+
+(* Classical Higher-order Logic -- batteries included *)
+
+use_thys ["Library", "List_Prefix", "List_lexord", "Sublist_Order",
+ "Code_Char_chr", "Code_Integer", "Efficient_Nat", "Executable_Set"];
--- a/src/HOL/main.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/main.ML Mon Jul 05 14:21:53 2010 +0100
@@ -1,2 +1,4 @@
-(*side-entry for HOL-Main*)
+
+(* side-entry for HOL-Main *)
+
use_thys ["Main"];
--- a/src/HOL/plain.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/HOL/plain.ML Mon Jul 05 14:21:53 2010 +0100
@@ -1,2 +1,4 @@
-(*side-entry for HOL-Plain*)
+
+(* side-entry for HOL-Plain *)
+
use_thys ["Plain"];
--- a/src/Pure/Concurrent/future.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/Concurrent/future.ML Mon Jul 05 14:21:53 2010 +0100
@@ -59,6 +59,7 @@
val cancel_group: group -> unit
val cancel: 'a future -> unit
val shutdown: unit -> unit
+ val report: (unit -> 'a) -> 'a
end;
structure Future: FUTURE =
@@ -284,7 +285,7 @@
if forall (Thread.isActive o #1) (! workers) then ()
else
let
- val (alive, dead) = List.partition (Thread.isActive o #1) (! workers);
+ val (alive, dead) = List.partition (Thread.isActive o #1) (! workers);
val _ = workers := alive;
in
Multithreading.tracing 0 (fn () =>
@@ -523,6 +524,16 @@
else ();
+(* report markup *)
+
+fun report e =
+ let
+ val _ = Output.status (Markup.markup Markup.forked "");
+ val x = e (); (*sic -- report "joined" only for success*)
+ val _ = Output.status (Markup.markup Markup.joined "");
+ in x end;
+
+
(*final declarations of this structure!*)
val map = map_future;
--- a/src/Pure/Isar/toplevel.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/Isar/toplevel.ML Mon Jul 05 14:21:53 2010 +0100
@@ -86,9 +86,9 @@
val error_msg: transition -> exn * string -> unit
val add_hook: (transition -> state -> state -> unit) -> unit
val transition: bool -> transition -> state -> (state * (exn * string) option) option
+ val run_command: string -> transition -> state -> state option
val commit_exit: Position.T -> transition
val command: transition -> state -> state
- val run_command: string -> transition -> state -> state option
val excursion: (transition * transition list) list -> (transition * state) list lazy
end;
@@ -561,6 +561,12 @@
fun status tr m =
setmp_thread_position tr (fn () => Output.status (Markup.markup m "")) ();
+fun async_state (tr as Transition {print, ...}) st =
+ if print then
+ ignore (Future.fork_group (Task_Queue.new_group (Future.worker_group ()))
+ (fn () => Future.report (setmp_thread_position tr (fn () => print_state false st))))
+ else ();
+
fun error_msg tr exn_info =
setmp_thread_position tr (fn () =>
Output.error_msg (ML_Compiler.exn_message (EXCURSION_FAIL exn_info))) ();
@@ -614,6 +620,22 @@
end;
+(* managed execution *)
+
+fun run_command thy_name (tr as Transition {print, ...}) st =
+ (case
+ (case init_of tr of
+ SOME name => Exn.capture (fn () => Thy_Load.check_name thy_name name) ()
+ | NONE => Exn.Result ()) of
+ Exn.Result () =>
+ (case transition false tr st of
+ SOME (st', NONE) => (status tr Markup.finished; async_state tr st'; SOME st')
+ | SOME (_, SOME (exn as Exn.Interrupt, _)) => reraise exn
+ | SOME (_, SOME exn_info) => (error_msg tr exn_info; status tr Markup.failed; NONE)
+ | NONE => (error_msg tr (TERMINATE, at_command tr); status tr Markup.failed; NONE))
+ | Exn.Exn exn => (error_msg tr (exn, at_command tr); status tr Markup.failed; NONE));
+
+
(* commit final exit *)
fun commit_exit pos =
@@ -635,19 +657,6 @@
let val st' = command tr st
in (st', st') end;
-fun run_command thy_name tr st =
- (case
- (case init_of tr of
- SOME name => Exn.capture (fn () => Thy_Load.check_name thy_name name) ()
- | NONE => Exn.Result ()) of
- Exn.Result () =>
- (case transition true tr st of
- SOME (st', NONE) => (status tr Markup.finished; SOME st')
- | SOME (_, SOME (exn as Exn.Interrupt, _)) => reraise exn
- | SOME (_, SOME exn_info) => (error_msg tr exn_info; status tr Markup.failed; NONE)
- | NONE => (error_msg tr (TERMINATE, at_command tr); status tr Markup.failed; NONE))
- | Exn.Exn exn => (error_msg tr (exn, at_command tr); status tr Markup.failed; NONE));
-
(* excursion of units, consisting of commands with proof *)
--- a/src/Pure/PIDE/document.scala Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/PIDE/document.scala Mon Jul 05 14:21:53 2010 +0100
@@ -14,13 +14,13 @@
{
/* command start positions */
- def command_starts(commands: Linear_Set[Command]): Iterator[(Command, Int)] =
+ def command_starts(commands: Iterator[Command], offset: Int = 0): Iterator[(Command, Int)] =
{
- var offset = 0
- for (cmd <- commands.iterator) yield {
- val start = offset
- offset += cmd.length
- (cmd, start)
+ var i = offset
+ for (command <- commands) yield {
+ val start = i
+ i += command.length
+ (command, start)
}
}
@@ -60,9 +60,10 @@
{
eds match {
case e :: es =>
- command_starts(commands).find { // FIXME relative search!
+ command_starts(commands.iterator).find {
case (cmd, cmd_start) =>
- e.can_edit(cmd.source, cmd_start) || e.is_insert && e.start == cmd_start + cmd.length
+ e.can_edit(cmd.source, cmd_start) ||
+ e.is_insert && e.start == cmd_start + cmd.length
} match {
case Some((cmd, cmd_start)) if e.can_edit(cmd.source, cmd_start) =>
val (rest, text) = e.edit(cmd.source, cmd_start)
@@ -144,7 +145,7 @@
{
/* command ranges */
- def command_starts: Iterator[(Command, Int)] = Document.command_starts(commands)
+ def command_starts: Iterator[(Command, Int)] = Document.command_starts(commands.iterator)
def command_start(cmd: Command): Option[Int] =
command_starts.find(_._1 == cmd).map(_._2)
--- a/src/Pure/Syntax/parser.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/Syntax/parser.ML Mon Jul 05 14:21:53 2010 +0100
@@ -8,9 +8,9 @@
sig
type gram
val empty_gram: gram
- val extend_gram: gram -> Syn_Ext.xprod list -> gram
+ val extend_gram: Syn_Ext.xprod list -> gram -> gram
val make_gram: Syn_Ext.xprod list -> gram
- val merge_grams: gram -> gram -> gram
+ val merge_gram: gram * gram -> gram
val pretty_gram: gram -> Pretty.T list
datatype parsetree =
Node of string * parsetree list |
@@ -23,19 +23,16 @@
structure Parser: PARSER =
struct
-open Lexicon Syn_Ext;
-
-
(** datatype gram **)
type nt_tag = int; (*production for the NTs are stored in an array
so we can identify NTs by their index*)
-datatype symb = Terminal of token
+datatype symb = Terminal of Lexicon.token
| Nonterminal of nt_tag * int; (*(tag, precedence)*)
-type nt_gram = ((nt_tag list * token list) *
- (token option * (symb list * string * int) list) list);
+type nt_gram = ((nt_tag list * Lexicon.token list) *
+ (Lexicon.token option * (symb list * string * int) list) list);
(*(([dependent_nts], [start_tokens]),
[(start_token, [(rhs, name, prio)])])*)
(*depent_nts is a list of all NTs whose lookahead
@@ -53,8 +50,8 @@
lambda productions are stored as normal productions
and also as an entry in "lambdas"*)
-val UnknownStart = eof; (*productions for which no starting token is
- known yet are associated with this token*)
+val UnknownStart = Lexicon.eof; (*productions for which no starting token is
+ known yet are associated with this token*)
(* get all NTs that are connected with a list of NTs
(used for expanding chain list)*)
@@ -125,7 +122,7 @@
(*get all known starting tokens for a nonterminal*)
fun starts_for_nt nt = snd (fst (Array.sub (prods, nt)));
- val token_union = uncurry (union matching_tokens);
+ val token_union = uncurry (union Lexicon.matching_tokens);
(*update prods, lookaheads, and lambdas according to new lambda NTs*)
val (added_starts', lambdas') =
@@ -158,7 +155,7 @@
val nt_dependencies' = union (op =) nts nt_dependencies;
(*associate production with new starting tokens*)
- fun copy ([]: token option list) nt_prods = nt_prods
+ fun copy ([]: Lexicon.token option list) nt_prods = nt_prods
| copy (tk :: tks) nt_prods =
let val old_prods = these (AList.lookup (op =) nt_prods tk);
@@ -259,7 +256,7 @@
let
val ((old_nts, old_tks), nt_prods) = Array.sub (prods, nt);
- val new_tks = subtract matching_tokens old_tks start_tks;
+ val new_tks = subtract Lexicon.matching_tokens old_tks start_tks;
(*store new production*)
fun store [] prods is_new =
@@ -278,7 +275,7 @@
else (new_prod :: tk_prods, true);
val prods' = prods
- |> is_new' ? AList.update (op =) (tk: token option, tk_prods');
+ |> is_new' ? AList.update (op =) (tk: Lexicon.token option, tk_prods');
in store tks prods' (is_new orelse is_new') end;
val (nt_prods', prod_count', changed) =
@@ -329,10 +326,10 @@
andalso member (op =) new_tks (the tk);
(*associate production with new starting tokens*)
- fun copy ([]: token list) nt_prods = nt_prods
+ fun copy ([]: Lexicon.token list) nt_prods = nt_prods
| copy (tk :: tks) nt_prods =
let
- val tk_prods = (these o AList.lookup (op =) nt_prods) (SOME tk);
+ val tk_prods = these (AList.lookup (op =) nt_prods (SOME tk));
val tk_prods' =
if not lambda then p :: tk_prods
@@ -359,7 +356,7 @@
val (lookahead as (old_nts, old_tks), nt_prods) =
Array.sub (prods, nt);
- val tk_prods = (these o AList.lookup (op =) nt_prods) key;
+ val tk_prods = these (AList.lookup (op =) nt_prods key);
(*associate productions with new lookahead tokens*)
val (tk_prods', nt_prods') =
@@ -370,7 +367,7 @@
|> (key = SOME UnknownStart) ? AList.update (op =) (key, tk_prods')
val added_tks =
- subtract matching_tokens old_tks new_tks;
+ subtract Lexicon.matching_tokens old_tks new_tks;
in if null added_tks then
(Array.update (prods, nt, (lookahead, nt_prods'));
process_nts nts added)
@@ -381,7 +378,7 @@
end;
val ((dependent, _), _) = Array.sub (prods, changed_nt);
- in add_starts (starts @ (process_nts dependent [])) end;
+ in add_starts (starts @ process_nts dependent []) end;
in add_starts added_starts' end;
in add_prods prods chains' lambdas' prod_count ps end;
@@ -394,8 +391,8 @@
val nt_name = the o Inttab.lookup (Inttab.make (map swap (Symtab.dest tags)));
- fun pretty_symb (Terminal (Token (Literal, s, _))) = Pretty.quote (Pretty.str s)
- | pretty_symb (Terminal tok) = Pretty.str (str_of_token tok)
+ fun pretty_symb (Terminal (Lexicon.Token (Lexicon.Literal, s, _))) = Pretty.quote (Pretty.str s)
+ | pretty_symb (Terminal tok) = Pretty.str (Lexicon.str_of_token tok)
| pretty_symb (Nonterminal (tag, p)) =
Pretty.str (nt_name tag ^ "[" ^ signed_string_of_int p ^ "]");
@@ -422,7 +419,6 @@
(** Operations on gramars **)
-(*The mother of all grammars*)
val empty_gram = Gram {nt_count = 0, prod_count = 0,
tags = Symtab.empty, chains = [], lambdas = [],
prods = Array.array (0, (([], []), []))};
@@ -439,75 +435,75 @@
(*Add productions to a grammar*)
-fun extend_gram gram [] = gram
- | extend_gram (Gram {nt_count, prod_count, tags, chains, lambdas, prods})
- xprods =
- let
- (*Get tag for existing nonterminal or create a new one*)
- fun get_tag nt_count tags nt =
- case Symtab.lookup tags nt of
- SOME tag => (nt_count, tags, tag)
- | NONE => (nt_count+1, Symtab.update_new (nt, nt_count) tags,
- nt_count);
+fun extend_gram [] gram = gram
+ | extend_gram xprods (Gram {nt_count, prod_count, tags, chains, lambdas, prods}) =
+ let
+ (*Get tag for existing nonterminal or create a new one*)
+ fun get_tag nt_count tags nt =
+ case Symtab.lookup tags nt of
+ SOME tag => (nt_count, tags, tag)
+ | NONE => (nt_count+1, Symtab.update_new (nt, nt_count) tags,
+ nt_count);
- (*Convert symbols to the form used by the parser;
- delimiters and predefined terms are stored as terminals,
- nonterminals are converted to integer tags*)
- fun symb_of [] nt_count tags result = (nt_count, tags, rev result)
- | symb_of ((Delim s) :: ss) nt_count tags result =
- symb_of ss nt_count tags (Terminal (Token (Literal, s, Position.no_range)) :: result)
- | symb_of ((Argument (s, p)) :: ss) nt_count tags result =
- let
- val (nt_count', tags', new_symb) =
- case predef_term s of
- NONE =>
- let val (nt_count', tags', s_tag) = get_tag nt_count tags s;
- in (nt_count', tags', Nonterminal (s_tag, p)) end
- | SOME tk => (nt_count, tags, Terminal tk);
- in symb_of ss nt_count' tags' (new_symb :: result) end
- | symb_of (_ :: ss) nt_count tags result =
- symb_of ss nt_count tags result;
+ (*Convert symbols to the form used by the parser;
+ delimiters and predefined terms are stored as terminals,
+ nonterminals are converted to integer tags*)
+ fun symb_of [] nt_count tags result = (nt_count, tags, rev result)
+ | symb_of ((Syn_Ext.Delim s) :: ss) nt_count tags result =
+ symb_of ss nt_count tags
+ (Terminal (Lexicon.Token (Lexicon.Literal, s, Position.no_range)) :: result)
+ | symb_of ((Syn_Ext.Argument (s, p)) :: ss) nt_count tags result =
+ let
+ val (nt_count', tags', new_symb) =
+ case Lexicon.predef_term s of
+ NONE =>
+ let val (nt_count', tags', s_tag) = get_tag nt_count tags s;
+ in (nt_count', tags', Nonterminal (s_tag, p)) end
+ | SOME tk => (nt_count, tags, Terminal tk);
+ in symb_of ss nt_count' tags' (new_symb :: result) end
+ | symb_of (_ :: ss) nt_count tags result =
+ symb_of ss nt_count tags result;
- (*Convert list of productions by invoking symb_of for each of them*)
- fun prod_of [] nt_count prod_count tags result =
- (nt_count, prod_count, tags, result)
- | prod_of ((XProd (lhs, xsymbs, const, pri)) :: ps)
- nt_count prod_count tags result =
- let val (nt_count', tags', lhs_tag) = get_tag nt_count tags lhs;
+ (*Convert list of productions by invoking symb_of for each of them*)
+ fun prod_of [] nt_count prod_count tags result =
+ (nt_count, prod_count, tags, result)
+ | prod_of ((Syn_Ext.XProd (lhs, xsymbs, const, pri)) :: ps)
+ nt_count prod_count tags result =
+ let val (nt_count', tags', lhs_tag) = get_tag nt_count tags lhs;
- val (nt_count'', tags'', prods) =
- symb_of xsymbs nt_count' tags' [];
- in prod_of ps nt_count'' (prod_count+1) tags''
- ((lhs_tag, (prods, const, pri)) :: result)
- end;
+ val (nt_count'', tags'', prods) =
+ symb_of xsymbs nt_count' tags' [];
+ in prod_of ps nt_count'' (prod_count+1) tags''
+ ((lhs_tag, (prods, const, pri)) :: result)
+ end;
- val (nt_count', prod_count', tags', xprods') =
- prod_of xprods nt_count prod_count tags [];
+ val (nt_count', prod_count', tags', xprods') =
+ prod_of xprods nt_count prod_count tags [];
- (*Copy array containing productions of old grammar;
- this has to be done to preserve the old grammar while being able
- to change the array's content*)
- val prods' =
- let fun get_prod i = if i < nt_count then Array.sub (prods, i)
- else (([], []), []);
- in Array.tabulate (nt_count', get_prod) end;
+ (*Copy array containing productions of old grammar;
+ this has to be done to preserve the old grammar while being able
+ to change the array's content*)
+ val prods' =
+ let fun get_prod i = if i < nt_count then Array.sub (prods, i)
+ else (([], []), []);
+ in Array.tabulate (nt_count', get_prod) end;
- val fromto_chains = inverse_chains chains [];
+ val fromto_chains = inverse_chains chains [];
- (*Add new productions to old ones*)
- val (fromto_chains', lambdas', _) =
- add_prods prods' fromto_chains lambdas NONE xprods';
+ (*Add new productions to old ones*)
+ val (fromto_chains', lambdas', _) =
+ add_prods prods' fromto_chains lambdas NONE xprods';
- val chains' = inverse_chains fromto_chains' [];
- in Gram {nt_count = nt_count', prod_count = prod_count', tags = tags',
- chains = chains', lambdas = lambdas', prods = prods'}
- end;
+ val chains' = inverse_chains fromto_chains' [];
+ in Gram {nt_count = nt_count', prod_count = prod_count', tags = tags',
+ chains = chains', lambdas = lambdas', prods = prods'}
+ end;
-val make_gram = extend_gram empty_gram;
+fun make_gram xprods = extend_gram xprods empty_gram;
(*Merge two grammars*)
-fun merge_grams gram_a gram_b =
+fun merge_gram (gram_a, gram_b) =
let
(*find out which grammar is bigger*)
val (Gram {nt_count = nt_count1, prod_count = prod_count1, tags = tags1,
@@ -604,7 +600,7 @@
datatype parsetree =
Node of string * parsetree list |
- Tip of token;
+ Tip of Lexicon.token;
type state =
nt_tag * int * (*identification and production precedence*)
@@ -675,7 +671,7 @@
fun movedot_term (A, j, ts, Terminal a :: sa, id, i) c =
- if valued_token c then
+ if Lexicon.valued_token c then
(A, j, ts @ [Tip c], sa, id, i)
else (A, j, ts, sa, id, i);
@@ -695,105 +691,105 @@
(*get all productions of a NT and NTs chained to it which can
be started by specified token*)
fun prods_for prods chains include_none tk nts =
-let
- fun token_assoc (list, key) =
- let fun assoc [] result = result
- | assoc ((keyi, pi) :: pairs) result =
- if is_some keyi andalso matching_tokens (the keyi, key)
- orelse include_none andalso is_none keyi then
- assoc pairs (pi @ result)
- else assoc pairs result;
- in assoc list [] end;
+ let
+ fun token_assoc (list, key) =
+ let fun assoc [] result = result
+ | assoc ((keyi, pi) :: pairs) result =
+ if is_some keyi andalso Lexicon.matching_tokens (the keyi, key)
+ orelse include_none andalso is_none keyi then
+ assoc pairs (pi @ result)
+ else assoc pairs result;
+ in assoc list [] end;
- fun get_prods [] result = result
- | get_prods (nt :: nts) result =
- let val nt_prods = snd (Array.sub (prods, nt));
- in get_prods nts ((token_assoc (nt_prods, tk)) @ result) end;
-in get_prods (connected_with chains nts nts) [] end;
+ fun get_prods [] result = result
+ | get_prods (nt :: nts) result =
+ let val nt_prods = snd (Array.sub (prods, nt));
+ in get_prods nts ((token_assoc (nt_prods, tk)) @ result) end;
+ in get_prods (connected_with chains nts nts) [] end;
fun PROCESSS warned prods chains Estate i c states =
-let
-fun all_prods_for nt = prods_for prods chains true c [nt];
+ let
+ fun all_prods_for nt = prods_for prods chains true c [nt];
-fun processS used [] (Si, Sii) = (Si, Sii)
- | processS used (S :: States) (Si, Sii) =
- (case S of
- (_, _, _, Nonterminal (nt, minPrec) :: _, _, _) =>
- let (*predictor operation*)
- val (used', new_states) =
- (case AList.lookup (op =) used nt of
- SOME (usedPrec, l) => (*nonterminal has been processed*)
- if usedPrec <= minPrec then
- (*wanted precedence has been processed*)
- (used, movedot_lambda S l)
- else (*wanted precedence hasn't been parsed yet*)
- let
- val tk_prods = all_prods_for nt;
+ fun processS used [] (Si, Sii) = (Si, Sii)
+ | processS used (S :: States) (Si, Sii) =
+ (case S of
+ (_, _, _, Nonterminal (nt, minPrec) :: _, _, _) =>
+ let (*predictor operation*)
+ val (used', new_states) =
+ (case AList.lookup (op =) used nt of
+ SOME (usedPrec, l) => (*nonterminal has been processed*)
+ if usedPrec <= minPrec then
+ (*wanted precedence has been processed*)
+ (used, movedot_lambda S l)
+ else (*wanted precedence hasn't been parsed yet*)
+ let
+ val tk_prods = all_prods_for nt;
- val States' = mkStates i minPrec nt
- (getRHS' minPrec usedPrec tk_prods);
- in (update_prec (nt, minPrec) used,
- movedot_lambda S l @ States')
- end
+ val States' = mkStates i minPrec nt
+ (getRHS' minPrec usedPrec tk_prods);
+ in (update_prec (nt, minPrec) used,
+ movedot_lambda S l @ States')
+ end
- | NONE => (*nonterminal is parsed for the first time*)
- let val tk_prods = all_prods_for nt;
- val States' = mkStates i minPrec nt
- (getRHS minPrec tk_prods);
- in ((nt, (minPrec, [])) :: used, States') end);
+ | NONE => (*nonterminal is parsed for the first time*)
+ let val tk_prods = all_prods_for nt;
+ val States' = mkStates i minPrec nt
+ (getRHS minPrec tk_prods);
+ in ((nt, (minPrec, [])) :: used, States') end);
- val dummy =
- if not (!warned) andalso
- length (new_states @ States) > (!branching_level) then
- (warning "Currently parsed expression could be extremely ambiguous.";
- warned := true)
- else ();
- in
- processS used' (new_states @ States) (S :: Si, Sii)
- end
- | (_, _, _, Terminal a :: _, _, _) => (*scanner operation*)
- processS used States
- (S :: Si,
- if matching_tokens (a, c) then movedot_term S c :: Sii else Sii)
- | (A, prec, ts, [], id, j) => (*completer operation*)
- let val tt = if id = "" then ts else [Node (id, ts)] in
- if j = i then (*lambda production?*)
- let
- val (used', O) = update_trees used (A, (tt, prec));
+ val dummy =
+ if not (!warned) andalso
+ length (new_states @ States) > (!branching_level) then
+ (warning "Currently parsed expression could be extremely ambiguous.";
+ warned := true)
+ else ();
in
- case O of
- NONE =>
- let val Slist = getS A prec Si;
- val States' = map (movedot_nonterm tt) Slist;
- in processS used' (States' @ States) (S :: Si, Sii) end
- | SOME n =>
- if n >= prec then processS used' States (S :: Si, Sii)
- else
- let val Slist = getS' A prec n Si;
- val States' = map (movedot_nonterm tt) Slist;
- in processS used' (States' @ States) (S :: Si, Sii) end
+ processS used' (new_states @ States) (S :: Si, Sii)
end
- else
- let val Slist = getStates Estate i j A prec
- in processS used (map (movedot_nonterm tt) Slist @ States)
- (S :: Si, Sii)
- end
- end)
-in processS [] states ([], []) end;
+ | (_, _, _, Terminal a :: _, _, _) => (*scanner operation*)
+ processS used States
+ (S :: Si,
+ if Lexicon.matching_tokens (a, c) then movedot_term S c :: Sii else Sii)
+ | (A, prec, ts, [], id, j) => (*completer operation*)
+ let val tt = if id = "" then ts else [Node (id, ts)] in
+ if j = i then (*lambda production?*)
+ let
+ val (used', O) = update_trees used (A, (tt, prec));
+ in
+ case O of
+ NONE =>
+ let val Slist = getS A prec Si;
+ val States' = map (movedot_nonterm tt) Slist;
+ in processS used' (States' @ States) (S :: Si, Sii) end
+ | SOME n =>
+ if n >= prec then processS used' States (S :: Si, Sii)
+ else
+ let val Slist = getS' A prec n Si;
+ val States' = map (movedot_nonterm tt) Slist;
+ in processS used' (States' @ States) (S :: Si, Sii) end
+ end
+ else
+ let val Slist = getStates Estate i j A prec
+ in processS used (map (movedot_nonterm tt) Slist @ States)
+ (S :: Si, Sii)
+ end
+ end)
+ in processS [] states ([], []) end;
fun produce warned prods tags chains stateset i indata prev_token =
(case Array.sub (stateset, i) of
[] =>
let
- val toks = if is_eof prev_token then indata else prev_token :: indata;
- val pos = Position.str_of (pos_of_token prev_token);
+ val toks = if Lexicon.is_eof prev_token then indata else prev_token :: indata;
+ val pos = Position.str_of (Lexicon.pos_of_token prev_token);
in
if null toks then error ("Inner syntax error: unexpected end of input" ^ pos)
else error (Pretty.string_of (Pretty.block
(Pretty.str ("Inner syntax error" ^ pos ^ " at \"") ::
- Pretty.breaks (map (Pretty.str o str_of_token) (#1 (split_last toks))) @
+ Pretty.breaks (map (Pretty.str o Lexicon.str_of_token) (#1 (split_last toks))) @
[Pretty.str "\""])))
end
| s =>
@@ -807,21 +803,20 @@
end));
-fun get_trees l = map_filter (fn (_, _, [pt], _, _, _) => SOME pt | _ => NONE)
- l;
+fun get_trees l = map_filter (fn (_, _, [pt], _, _, _) => SOME pt | _ => NONE) l;
fun earley prods tags chains startsymbol indata =
let
- val start_tag = case Symtab.lookup tags startsymbol of
- SOME tag => tag
- | NONE => error ("parse: Unknown startsymbol " ^
- quote startsymbol);
- val S0 = [(~1, 0, [], [Nonterminal (start_tag, 0), Terminal eof], "", 0)];
+ val start_tag =
+ (case Symtab.lookup tags startsymbol of
+ SOME tag => tag
+ | NONE => error ("Inner syntax: unknown startsymbol " ^ quote startsymbol));
+ val S0 = [(~1, 0, [], [Nonterminal (start_tag, 0), Terminal Lexicon.eof], "", 0)];
val s = length indata + 1;
val Estate = Array.array (s, []);
in
Array.update (Estate, 0, S0);
- get_trees (produce (Unsynchronized.ref false) prods tags chains Estate 0 indata eof)
+ get_trees (produce (Unsynchronized.ref false) prods tags chains Estate 0 indata Lexicon.eof)
end;
@@ -830,10 +825,10 @@
val end_pos =
(case try List.last toks of
NONE => Position.none
- | SOME (Token (_, _, (_, end_pos))) => end_pos);
+ | SOME (Lexicon.Token (_, _, (_, end_pos))) => end_pos);
val r =
- (case earley prods tags chains start (toks @ [mk_eof end_pos]) of
- [] => sys_error "parse: no parse trees"
+ (case earley prods tags chains start (toks @ [Lexicon.mk_eof end_pos]) of
+ [] => raise Fail "no parse trees"
| pts => pts);
in r end;
@@ -842,7 +837,8 @@
let
fun freeze a = map_range (curry Array.sub a) (Array.length a);
val prods = maps snd (maps snd (freeze (#prods gram)));
- fun guess (SOME ([Nonterminal (_, k), Terminal (Token (Literal, s, _)), Nonterminal (_, l)], _, j)) =
+ fun guess (SOME ([Nonterminal (_, k),
+ Terminal (Lexicon.Token (Lexicon.Literal, s, _)), Nonterminal (_, l)], _, j)) =
if k = j andalso l = j + 1 then SOME (s, true, false, j)
else if k = j + 1 then if l = j then SOME (s, false, true, j)
else if l = j + 1 then SOME (s, false, false, j)
--- a/src/Pure/Syntax/syntax.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/Syntax/syntax.ML Mon Jul 05 14:21:53 2010 +0100
@@ -297,7 +297,7 @@
Syntax
({input = new_xprods @ input,
lexicon = fold Scan.extend_lexicon (Syn_Ext.delims_of new_xprods) lexicon,
- gram = Parser.extend_gram gram new_xprods,
+ gram = Parser.extend_gram new_xprods gram,
consts = Library.merge (op =) (consts1, filter_out Lexicon.is_marked consts2),
prmodes = insert (op =) mode (Library.merge (op =) (prmodes1, prmodes2)),
parse_ast_trtab =
@@ -362,7 +362,7 @@
Syntax
({input = Library.merge (op =) (input1, input2),
lexicon = Scan.merge_lexicons (lexicon1, lexicon2),
- gram = Parser.merge_grams gram1 gram2,
+ gram = Parser.merge_gram (gram1, gram2),
consts = sort_distinct string_ord (consts1 @ consts2),
prmodes = Library.merge (op =) (prmodes1, prmodes2),
parse_ast_trtab =
--- a/src/Pure/System/isabelle_process.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/System/isabelle_process.ML Mon Jul 05 14:21:53 2010 +0100
@@ -91,6 +91,7 @@
(Unsynchronized.change print_mode
(fold (update op =) [isabelle_processN, Keyword.keyword_status_reportN, Pretty.symbolicN]);
setup_channels out |> init_message;
+ quick_and_dirty := true; (* FIXME !? *)
Keyword.report ();
Output.status (Markup.markup Markup.ready "");
Isar.toplevel_loop {init = true, welcome = false, sync = true, secure = true});
--- a/src/Pure/System/isabelle_process.scala Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/System/isabelle_process.scala Mon Jul 05 14:21:53 2010 +0100
@@ -19,89 +19,55 @@
{
/* results */
- object Kind extends Enumeration {
- //{{{ values and codes
- // internal system notification
- val SYSTEM = Value("SYSTEM")
- // Posix channels/events
- val STDIN = Value("STDIN")
- val STDOUT = Value("STDOUT")
- val SIGNAL = Value("SIGNAL")
- val EXIT = Value("EXIT")
- // Isabelle messages
- val INIT = Value("INIT")
- val STATUS = Value("STATUS")
- val WRITELN = Value("WRITELN")
- val TRACING = Value("TRACING")
- val WARNING = Value("WARNING")
- val ERROR = Value("ERROR")
- val DEBUG = Value("DEBUG")
- // messages codes
- val code = Map(
- ('A' : Int) -> Kind.INIT,
- ('B' : Int) -> Kind.STATUS,
- ('C' : Int) -> Kind.WRITELN,
- ('D' : Int) -> Kind.TRACING,
- ('E' : Int) -> Kind.WARNING,
- ('F' : Int) -> Kind.ERROR,
- ('G' : Int) -> Kind.DEBUG,
- ('0' : Int) -> Kind.SYSTEM,
- ('1' : Int) -> Kind.STDIN,
- ('2' : Int) -> Kind.STDOUT,
- ('3' : Int) -> Kind.SIGNAL,
- ('4' : Int) -> Kind.EXIT)
+ object Kind {
// message markup
val markup = Map(
- Kind.INIT -> Markup.INIT,
- Kind.STATUS -> Markup.STATUS,
- Kind.WRITELN -> Markup.WRITELN,
- Kind.TRACING -> Markup.TRACING,
- Kind.WARNING -> Markup.WARNING,
- Kind.ERROR -> Markup.ERROR,
- Kind.DEBUG -> Markup.DEBUG,
- Kind.SYSTEM -> Markup.SYSTEM,
- Kind.STDIN -> Markup.STDIN,
- Kind.STDOUT -> Markup.STDOUT,
- Kind.SIGNAL -> Markup.SIGNAL,
- Kind.EXIT -> Markup.EXIT)
- //}}}
- def is_raw(kind: Value) =
- kind == STDOUT
- def is_control(kind: Value) =
- kind == SYSTEM ||
- kind == SIGNAL ||
- kind == EXIT
- def is_system(kind: Value) =
- kind == SYSTEM ||
- kind == STDIN ||
- kind == SIGNAL ||
- kind == EXIT ||
- kind == STATUS
+ ('A' : Int) -> Markup.INIT,
+ ('B' : Int) -> Markup.STATUS,
+ ('C' : Int) -> Markup.WRITELN,
+ ('D' : Int) -> Markup.TRACING,
+ ('E' : Int) -> Markup.WARNING,
+ ('F' : Int) -> Markup.ERROR,
+ ('G' : Int) -> Markup.DEBUG)
+ def is_raw(kind: String) =
+ kind == Markup.STDOUT
+ def is_control(kind: String) =
+ kind == Markup.SYSTEM ||
+ kind == Markup.SIGNAL ||
+ kind == Markup.EXIT
+ def is_system(kind: String) =
+ kind == Markup.SYSTEM ||
+ kind == Markup.STDIN ||
+ kind == Markup.SIGNAL ||
+ kind == Markup.EXIT ||
+ kind == Markup.STATUS
}
- class Result(val kind: Kind.Value, val props: List[(String, String)], val body: List[XML.Tree])
+ class Result(val message: XML.Elem)
{
- def message = XML.Elem(Kind.markup(kind), props, body)
+ def kind = message.name
+ def body = message.body
+
+ def is_raw = Kind.is_raw(kind)
+ def is_control = Kind.is_control(kind)
+ def is_system = Kind.is_system(kind)
+ def is_status = kind == Markup.STATUS
+ def is_ready = is_status && body == List(XML.Elem(Markup.READY, Nil, Nil))
override def toString: String =
{
val res =
- if (kind == Kind.STATUS) body.map(_.toString).mkString
- else Pretty.string_of(body)
+ if (is_status) message.body.map(_.toString).mkString
+ else Pretty.string_of(message.body)
+ val props = message.attributes
if (props.isEmpty)
kind.toString + " [[" + res + "]]"
else
kind.toString + " " +
(for ((x, y) <- props) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]"
}
- def is_raw = Kind.is_raw(kind)
- def is_control = Kind.is_control(kind)
- def is_system = Kind.is_system(kind)
- def is_ready = kind == Kind.STATUS && body == List(XML.Elem(Markup.READY, Nil, Nil))
-
- def cache(c: XML.Cache): Result =
- new Result(kind, c.cache_props(props), c.cache_trees(body))
+ def cache(c: XML.Cache): Result = new Result(c.cache_tree(message).asInstanceOf[XML.Elem])
}
}
@@ -127,15 +93,15 @@
/* results */
- private def put_result(kind: Kind.Value, props: List[(String, String)], body: List[XML.Tree])
+ private def put_result(kind: String, props: List[(String, String)], body: List[XML.Tree])
{
- if (kind == Kind.INIT) {
+ if (kind == Markup.INIT) {
for ((Markup.PID, p) <- props) pid = p
}
- receiver ! new Result(kind, props, body)
+ receiver ! new Result(XML.Elem(kind, props, body))
}
- private def put_result(kind: Kind.Value, text: String)
+ private def put_result(kind: String, text: String)
{
put_result(kind, Nil, List(XML.Text(system.symbols.decode(text))))
}
@@ -145,13 +111,13 @@
def interrupt() = synchronized {
if (proc == null) error("Cannot interrupt Isabelle: no process")
- if (pid == null) put_result(Kind.SYSTEM, "Cannot interrupt: unknown pid")
+ if (pid == null) put_result(Markup.SYSTEM, "Cannot interrupt: unknown pid")
else {
try {
if (system.execute(true, "kill", "-INT", pid).waitFor == 0)
- put_result(Kind.SIGNAL, "INT")
+ put_result(Markup.SIGNAL, "INT")
else
- put_result(Kind.SYSTEM, "Cannot interrupt: kill command failed")
+ put_result(Markup.SYSTEM, "Cannot interrupt: kill command failed")
}
catch { case e: IOException => error("Cannot interrupt Isabelle: " + e.getMessage) }
}
@@ -162,7 +128,7 @@
else {
try_close()
Thread.sleep(500) // FIXME property!?
- put_result(Kind.SIGNAL, "KILL")
+ put_result(Markup.SIGNAL, "KILL")
proc.destroy
proc = null
pid = null
@@ -222,17 +188,17 @@
finished = true
}
else {
- put_result(Kind.STDIN, s)
+ put_result(Markup.STDIN, s)
writer.write(s)
writer.flush
}
//}}}
}
catch {
- case e: IOException => put_result(Kind.SYSTEM, "Stdin thread: " + e.getMessage)
+ case e: IOException => put_result(Markup.SYSTEM, "Stdin thread: " + e.getMessage)
}
}
- put_result(Kind.SYSTEM, "Stdin thread terminated")
+ put_result(Markup.SYSTEM, "Stdin thread terminated")
}
}
@@ -256,7 +222,7 @@
else done = true
}
if (result.length > 0) {
- put_result(Kind.STDOUT, result.toString)
+ put_result(Markup.STDOUT, result.toString)
result.length = 0
}
else {
@@ -267,10 +233,10 @@
//}}}
}
catch {
- case e: IOException => put_result(Kind.SYSTEM, "Stdout thread: " + e.getMessage)
+ case e: IOException => put_result(Markup.SYSTEM, "Stdout thread: " + e.getMessage)
}
}
- put_result(Kind.SYSTEM, "Stdout thread terminated")
+ put_result(Markup.SYSTEM, "Stdout thread terminated")
}
}
@@ -332,8 +298,8 @@
val body = read_chunk()
header match {
case List(XML.Elem(name, props, Nil))
- if name.size == 1 && Kind.code.isDefinedAt(name(0)) =>
- put_result(Kind.code(name(0)), props, body)
+ if name.size == 1 && Kind.markup.isDefinedAt(name(0)) =>
+ put_result(Kind.markup(name(0)), props, body)
case _ => throw new Protocol_Error("bad header: " + header.toString)
}
}
@@ -341,15 +307,15 @@
}
catch {
case e: IOException =>
- put_result(Kind.SYSTEM, "Cannot read message:\n" + e.getMessage)
+ put_result(Markup.SYSTEM, "Cannot read message:\n" + e.getMessage)
case e: Protocol_Error =>
- put_result(Kind.SYSTEM, "Malformed message:\n" + e.getMessage)
+ put_result(Markup.SYSTEM, "Malformed message:\n" + e.getMessage)
}
} while (c != -1)
stream.close
try_close()
- put_result(Kind.SYSTEM, "Message thread terminated")
+ put_result(Markup.SYSTEM, "Message thread terminated")
}
}
@@ -392,8 +358,8 @@
override def run() = {
val rc = proc.waitFor()
Thread.sleep(300) // FIXME property!?
- put_result(Kind.SYSTEM, "Exit thread terminated")
- put_result(Kind.EXIT, rc.toString)
+ put_result(Markup.SYSTEM, "Exit thread terminated")
+ put_result(Markup.EXIT, rc.toString)
rm_fifo()
}
}.start
--- a/src/Pure/System/session.scala Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/System/session.scala Mon Jul 05 14:21:53 2010 +0100
@@ -110,14 +110,14 @@
{
raw_results.event(result)
- val target_id: Option[Session.Entity_ID] = Position.get_id(result.props)
+ val target_id: Option[Session.Entity_ID] = Position.get_id(result.message.attributes)
val target: Option[Session.Entity] =
target_id match {
case None => None
case Some(id) => lookup_entity(id)
}
if (target.isDefined) target.get.consume(result.message, indicate_command_change)
- else if (result.kind == Isabelle_Process.Kind.STATUS) {
+ else if (result.is_status) {
// global status message
result.body match {
@@ -145,7 +145,7 @@
case _ => if (!result.is_ready) bad_result(result)
}
}
- else if (result.kind == Isabelle_Process.Kind.EXIT)
+ else if (result.kind == Markup.EXIT)
prover = null
else if (result.is_raw)
raw_output.event(result)
@@ -176,7 +176,7 @@
{
receiveWithin(timeout) {
case result: Isabelle_Process.Result
- if result.kind == Isabelle_Process.Kind.INIT =>
+ if result.kind == Markup.INIT =>
while (receive {
case result: Isabelle_Process.Result =>
handle_result(result); !result.is_ready
@@ -184,7 +184,7 @@
None
case result: Isabelle_Process.Result
- if result.kind == Isabelle_Process.Kind.EXIT =>
+ if result.kind == Markup.EXIT =>
Some(startup_error())
case TIMEOUT => // FIXME clarify
--- a/src/Pure/goal.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/goal.ML Mon Jul 05 14:21:53 2010 +0100
@@ -104,12 +104,7 @@
(* parallel proofs *)
-fun fork e = Future.fork_pri ~1 (fn () =>
- let
- val _ = Output.status (Markup.markup Markup.forked "");
- val x = e (); (*sic*)
- val _ = Output.status (Markup.markup Markup.joined "");
- in x end);
+fun fork e = Future.fork_pri ~1 (fn () => Future.report e);
val parallel_proofs = Unsynchronized.ref 1;
--- a/src/Pure/library.scala Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Pure/library.scala Mon Jul 05 14:21:53 2010 +0100
@@ -129,11 +129,12 @@
def timeit[A](message: String)(e: => A) =
{
- val start = System.currentTimeMillis()
+ val start = System.nanoTime()
val result = Exn.capture(e)
- val stop = System.currentTimeMillis()
+ val stop = System.nanoTime()
System.err.println(
- (if (message.isEmpty) "" else message + ": ") + (stop - start) + "ms elapsed time")
+ (if (message == null || message.isEmpty) "" else message + ": ") +
+ ((stop - start).toDouble / 1000000) + "ms elapsed time")
Exn.release(result)
}
}
--- a/src/Tools/Code/code_thingol.ML Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Tools/Code/code_thingol.ML Mon Jul 05 14:21:53 2010 +0100
@@ -267,10 +267,7 @@
| purify_base "op &" = "and"
| purify_base "op |" = "or"
| purify_base "op -->" = "implies"
- | purify_base "op :" = "member"
| purify_base "op =" = "eq"
- | purify_base "*" = "product"
- | purify_base "+" = "sum"
| purify_base s = Name.desymbolize false s;
fun namify thy get_basename get_thyname name =
let
--- a/src/Tools/Code/etc/settings Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Tools/Code/etc/settings Mon Jul 05 14:21:53 2010 +0100
@@ -1,2 +1,12 @@
ISABELLE_TOOLS="$ISABELLE_TOOLS:$COMPONENT/lib/Tools"
+
+EXEC_GHC=$(choosefrom \
+ "$ISABELLE_HOME/contrib/ghc" \
+ "$ISABELLE_HOME/../ghc" \
+ $(type -p ghc))
+
+EXEC_OCAML=$(choosefrom \
+ "$ISABELLE_HOME/contrib/ocaml" \
+ "$ISABELLE_HOME/../ocaml" \
+ $(type -p ocaml))
--- a/src/Tools/jEdit/src/jedit/document_model.scala Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Tools/jEdit/src/jedit/document_model.scala Mon Jul 05 14:21:53 2010 +0100
@@ -95,14 +95,6 @@
def to_current(doc: Document, offset: Int): Int =
(offset /: changes_from(doc)) ((i, change) => change after i)
- def lines_of_command(doc: Document, cmd: Command): (Int, Int) =
- {
- val start = doc.command_start(cmd).get // FIXME total?
- val stop = start + cmd.length
- (buffer.getLineOfOffset(to_current(doc, start)),
- buffer.getLineOfOffset(to_current(doc, stop)))
- }
-
/* text edits */
--- a/src/Tools/jEdit/src/jedit/document_view.scala Mon Jul 05 14:21:30 2010 +0100
+++ b/src/Tools/jEdit/src/jedit/document_view.scala Mon Jul 05 14:21:53 2010 +0100
@@ -104,13 +104,10 @@
react {
case Command_Set(changed) =>
Swing_Thread.now {
+ // FIXME cover doc states as well!!?
val document = model.recent_document()
- // FIXME cover doc states as well!!?
- for (command <- changed if document.commands.contains(command)) {
- update_syntax(document, command)
- invalidate_line(document, command)
- overview.repaint()
- }
+ if (changed.exists(document.commands.contains))
+ full_repaint(document, changed)
}
case bad => System.err.println("command_change_actor: ignoring bad message " + bad)
@@ -118,34 +115,82 @@
}
}
+ def full_repaint(document: Document, changed: Set[Command])
+ {
+ Swing_Thread.assert()
+
+ val buffer = model.buffer
+ var visible_change = false
+
+ for ((command, start) <- document.command_starts) {
+ if (changed(command)) {
+ val stop = start + command.length
+ val line1 = buffer.getLineOfOffset(model.to_current(document, start))
+ val line2 = buffer.getLineOfOffset(model.to_current(document, stop))
+ if (line2 >= text_area.getFirstLine &&
+ line1 <= text_area.getFirstLine + text_area.getVisibleLines)
+ visible_change = true
+ text_area.invalidateLineRange(line1, line2)
+ }
+ }
+ if (visible_change) model.buffer.propertiesChanged()
+
+ overview.repaint() // FIXME paint here!?
+ }
+
/* text_area_extension */
private val text_area_extension = new TextAreaExtension
{
- override def paintValidLine(gfx: Graphics2D,
- screen_line: Int, physical_line: Int, start: Int, end: Int, y: Int)
+ override def paintScreenLineRange(gfx: Graphics2D,
+ first_line: Int, last_line: Int, physical_lines: Array[Int],
+ start: Array[Int], end: Array[Int], y0: Int, line_height: Int)
{
- val document = model.recent_document()
- def from_current(pos: Int) = model.from_current(document, pos)
- def to_current(pos: Int) = model.to_current(document, pos)
+ Swing_Thread.now {
+ val document = model.recent_document()
+ def from_current(pos: Int) = model.from_current(document, pos)
+ def to_current(pos: Int) = model.to_current(document, pos)
- val line_end = model.visible_line_end(start, end)
- val line_height = text_area.getPainter.getFontMetrics.getHeight
+ val command_range: Iterable[(Command, Int)] =
+ {
+ val range = document.command_range(from_current(start(0)))
+ if (range.hasNext) {
+ val (cmd0, start0) = range.next
+ new Iterable[(Command, Int)] {
+ def iterator = Document.command_starts(document.commands.iterator(cmd0), start0)
+ }
+ }
+ else Iterable.empty
+ }
- val saved_color = gfx.getColor
- try {
- for ((command, command_start) <-
- document.command_range(from_current(start), from_current(line_end)) if !command.is_ignored)
- {
- val p = text_area.offsetToXY(start max to_current(command_start))
- val q = text_area.offsetToXY(line_end min to_current(command_start + command.length))
- assert(p.y == q.y)
- gfx.setColor(Document_View.choose_color(document, command))
- gfx.fillRect(p.x, y, q.x - p.x, line_height)
+ val saved_color = gfx.getColor
+ try {
+ var y = y0
+ for (i <- 0 until physical_lines.length) {
+ if (physical_lines(i) != -1) {
+ val line_start = start(i)
+ val line_end = model.visible_line_end(line_start, end(i))
+
+ val a = from_current(line_start)
+ val b = from_current(line_end)
+ val cmds = command_range.iterator.
+ dropWhile { case (cmd, c) => c + cmd.length <= a } .
+ takeWhile { case (_, c) => c < b }
+
+ for ((command, command_start) <- cmds if !command.is_ignored) {
+ val p = text_area.offsetToXY(line_start max to_current(command_start))
+ val q = text_area.offsetToXY(line_end min to_current(command_start + command.length))
+ assert(p.y == q.y)
+ gfx.setColor(Document_View.choose_color(document, command))
+ gfx.fillRect(p.x, y, q.x - p.x, line_height)
+ }
+ }
+ y += line_height
+ }
}
+ finally { gfx.setColor(saved_color) }
}
- finally { gfx.setColor(saved_color) }
}
override def getToolTipText(x: Int, y: Int): String =
@@ -186,30 +231,6 @@
}
- /* (re)painting */
-
- private val update_delay = Swing_Thread.delay_first(500) { model.buffer.propertiesChanged() }
- // FIXME update_delay property
-
- private def update_syntax(document: Document, cmd: Command)
- {
- val (line1, line2) = model.lines_of_command(document, cmd)
- if (line2 >= text_area.getFirstLine &&
- line1 <= text_area.getFirstLine + text_area.getVisibleLines)
- update_delay()
- }
-
- private def invalidate_line(document: Document, cmd: Command) =
- {
- val (start, stop) = model.lines_of_command(document, cmd)
- text_area.invalidateLineRange(start, stop)
- }
-
- private def invalidate_all() =
- text_area.invalidateLineRange(text_area.getFirstPhysicalLine,
- text_area.getLastPhysicalLine)
-
-
/* overview of command status left of scrollbar */
private val overview = new JPanel(new BorderLayout)
@@ -252,9 +273,9 @@
val buffer = model.buffer
val document = model.recent_document()
def to_current(pos: Int) = model.to_current(document, pos)
- val saved_color = gfx.getColor
+ val saved_color = gfx.getColor // FIXME needed!?
try {
- for ((command, start) <- document.command_range(0) if !command.is_ignored) {
+ for ((command, start) <- document.command_starts if !command.is_ignored) {
val line1 = buffer.getLineOfOffset(to_current(start))
val line2 = buffer.getLineOfOffset(to_current(start + command.length)) + 1
val y = line_to_y(line1)