Discontinued old spark_open; spark_open_siv is now spark_open
authorberghofe
Wed, 30 Apr 2014 15:43:44 +0200
changeset 56798 939e88e79724
parent 56797 32963b43a538
child 56802 c83eb2435b27
Discontinued old spark_open; spark_open_siv is now spark_open
src/HOL/SPARK/Examples/Gcd/Greatest_Common_Divisor.thy
src/HOL/SPARK/Examples/Liseq/Longest_Increasing_Subsequence.thy
src/HOL/SPARK/Examples/RIPEMD-160/F.thy
src/HOL/SPARK/Examples/RIPEMD-160/Hash.thy
src/HOL/SPARK/Examples/RIPEMD-160/K_L.thy
src/HOL/SPARK/Examples/RIPEMD-160/K_R.thy
src/HOL/SPARK/Examples/RIPEMD-160/R_L.thy
src/HOL/SPARK/Examples/RIPEMD-160/R_R.thy
src/HOL/SPARK/Examples/RIPEMD-160/Round.thy
src/HOL/SPARK/Examples/RIPEMD-160/S_L.thy
src/HOL/SPARK/Examples/RIPEMD-160/S_R.thy
src/HOL/SPARK/Examples/Sqrt/Sqrt.thy
src/HOL/SPARK/Manual/Complex_Types.thy
src/HOL/SPARK/Manual/Example_Verification.thy
src/HOL/SPARK/Manual/Proc1.thy
src/HOL/SPARK/Manual/Proc2.thy
src/HOL/SPARK/Manual/Reference.thy
src/HOL/SPARK/Manual/Simple_Greatest_Common_Divisor.thy
src/HOL/SPARK/SPARK_Setup.thy
src/HOL/SPARK/Tools/spark_commands.ML
--- a/src/HOL/SPARK/Examples/Gcd/Greatest_Common_Divisor.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/Gcd/Greatest_Common_Divisor.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -10,7 +10,7 @@
 spark_proof_functions
   gcd = "gcd :: int \<Rightarrow> int \<Rightarrow> int"
 
-spark_open "greatest_common_divisor/g_c_d.siv"
+spark_open "greatest_common_divisor/g_c_d"
 
 spark_vc procedure_g_c_d_4
 proof -
--- a/src/HOL/SPARK/Examples/Liseq/Longest_Increasing_Subsequence.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/Liseq/Longest_Increasing_Subsequence.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -27,7 +27,7 @@
 *}
 
 definition liseq' :: "(nat \<Rightarrow> 'a::linorder) \<Rightarrow> nat \<Rightarrow> nat" where
-  "liseq' xs i = Max (card ` (iseq xs (Suc i) \<inter> {is. Max is = i}))"  
+  "liseq' xs i = Max (card ` (iseq xs (Suc i) \<inter> {is. Max is = i}))"
 
 lemma iseq_finite: "finite (iseq xs i)"
   apply (simp add: iseq_def)
@@ -541,7 +541,7 @@
 
 text {* The verification conditions *}
 
-spark_open "liseq/liseq_length.siv"
+spark_open "liseq/liseq_length"
 
 spark_vc procedure_liseq_length_5
   by (simp_all add: liseq_singleton liseq'_singleton)
--- a/src/HOL/SPARK/Examples/RIPEMD-160/F.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/F.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification
 begin
 
-spark_open "rmd/f.siv"
+spark_open "rmd/f"
 
 spark_vc function_f_2
   using assms by simp_all
--- a/src/HOL/SPARK/Examples/RIPEMD-160/Hash.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/Hash.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification
 begin
 
-spark_open "rmd/hash.siv"
+spark_open "rmd/hash"
 
 abbreviation from_chain :: "chain \<Rightarrow> RMD.chain" where
   "from_chain c \<equiv> (
--- a/src/HOL/SPARK/Examples/RIPEMD-160/K_L.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/K_L.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification
 begin
 
-spark_open "rmd/k_l.siv"
+spark_open "rmd/k_l"
 
 spark_vc function_k_l_6
   using assms by (simp add: K_def)
--- a/src/HOL/SPARK/Examples/RIPEMD-160/K_R.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/K_R.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification
 begin
 
-spark_open "rmd/k_r.siv"
+spark_open "rmd/k_r"
 
 spark_vc function_k_r_6
   using assms by (simp add: K'_def)
--- a/src/HOL/SPARK/Examples/RIPEMD-160/R_L.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/R_L.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification RMD_Lemmas
 begin
 
-spark_open "rmd/r_l.siv"
+spark_open "rmd/r_l"
 
 spark_vc function_r_l_2
 proof -
--- a/src/HOL/SPARK/Examples/RIPEMD-160/R_R.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/R_R.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification RMD_Lemmas
 begin
 
-spark_open "rmd/r_r.siv"
+spark_open "rmd/r_r"
 
 spark_vc function_r_r_2
 proof -
--- a/src/HOL/SPARK/Examples/RIPEMD-160/Round.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/Round.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification
 begin
 
-spark_open "rmd/round.siv"
+spark_open "rmd/round"
 
 abbreviation from_chain :: "chain \<Rightarrow> RMD.chain" where
   "from_chain c \<equiv> (
@@ -88,7 +88,7 @@
   show ?thesis
     unfolding `nat (i + 1) = Suc (nat i)` steps_step steps_to_steps'
     ..
-qed 
+qed
 
 lemma step_from_hyp:
   assumes
@@ -244,7 +244,7 @@
         nat_transfer
       unfolding `?MM = 2 ^ len_of TYPE(32)`
       unfolding word_uint.Abs_norm
-      by (simp add: 
+      by (simp add:
         `a' mod ?MM = a'`
         `e' mod ?MM = e'`
         `?X mod ?MM = ?X`)
@@ -274,7 +274,7 @@
           \<lparr>h0 = ca, h1 = cb, h2 = cc,
              h3 = cd, h4 = ce\<rparr>\<rparr>)
     0 x"
-    unfolding steps_def 
+    unfolding steps_def
     by (simp add:
       uint_word_of_int_id[OF `0 <= ca` `ca <= ?M`]
       uint_word_of_int_id[OF `0 <= cb` `cb <= ?M`]
@@ -398,7 +398,7 @@
     `crc <= ?M`
     `crd <= ?M`
     `cre <= ?M`
-    
+
     `0 <= cla`
     `0 <= clb`
     `0 <= clc`
--- a/src/HOL/SPARK/Examples/RIPEMD-160/S_L.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/S_L.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification RMD_Lemmas
 begin
 
-spark_open "rmd/s_l.siv"
+spark_open "rmd/s_l"
 
 spark_vc function_s_l_2
 proof -
--- a/src/HOL/SPARK/Examples/RIPEMD-160/S_R.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/RIPEMD-160/S_R.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -8,7 +8,7 @@
 imports RMD_Specification RMD_Lemmas
 begin
 
-spark_open "rmd/s_r.siv"
+spark_open "rmd/s_r"
 
 spark_vc function_s_r_2
 proof -
--- a/src/HOL/SPARK/Examples/Sqrt/Sqrt.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Examples/Sqrt/Sqrt.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -7,7 +7,7 @@
 imports SPARK
 begin
 
-spark_open "sqrt/isqrt.siv"
+spark_open "sqrt/isqrt"
 
 spark_vc function_isqrt_4
 proof -
--- a/src/HOL/SPARK/Manual/Complex_Types.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Manual/Complex_Types.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -31,7 +31,7 @@
   complex_types__initialized3 = initialized3
 
 (*<*)
-spark_open "complex_types_app/initialize.siv"
+spark_open "complex_types_app/initialize"
 
 spark_vc procedure_initialize_1
   by (simp add: initialized_def)
--- a/src/HOL/SPARK/Manual/Example_Verification.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Manual/Example_Verification.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -127,7 +127,7 @@
 We now instruct Isabelle to open
 a new verification environment and load a set of VCs. This is done using the
 command \isa{\isacommand{spark\_open}}, which must be given the name of a
-\texttt{*.siv} or \texttt{*.vcg} file as an argument. Behind the scenes, Isabelle
+\texttt{*.siv} file as an argument. Behind the scenes, Isabelle
 parses this file and the corresponding \texttt{*.fdl} and \texttt{*.rls} files,
 and converts the VCs to Isabelle terms. Using the command \isa{\isacommand{spark\_status}},
 the user can display the current VCs together with their status (proved, unproved).
--- a/src/HOL/SPARK/Manual/Proc1.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Manual/Proc1.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -2,7 +2,7 @@
 imports SPARK
 begin
 
-spark_open "loop_invariant/proc1.siv"
+spark_open "loop_invariant/proc1"
 
 spark_vc procedure_proc1_5
   by (simp add: ring_distribs pull_mods)
--- a/src/HOL/SPARK/Manual/Proc2.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Manual/Proc2.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -2,7 +2,7 @@
 imports SPARK
 begin
 
-spark_open "loop_invariant/proc2.siv"
+spark_open "loop_invariant/proc2"
 
 spark_vc procedure_proc2_7
   by (simp add: ring_distribs pull_mods)
--- a/src/HOL/SPARK/Manual/Reference.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Manual/Reference.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -24,8 +24,9 @@
 This section describes the syntax and effect of each of the commands provided
 by HOL-\SPARK{}.
 @{rail "@'spark_open' name ('(' name ')')?"}
-Opens a new \SPARK{} verification environment and loads a file with VCs. The file can be either
-a \texttt{*.vcg} or a \texttt{*.siv} file. The corresponding \texttt{*.fdl} and \texttt{*.rls}
+Opens a new \SPARK{} verification environment and loads a \texttt{*.siv} file with VCs.
+Alternatively, \texttt{*.vcg} files can be loaded using \isa{\isacommand{spark\_open\_vcg}}.
+The corresponding \texttt{*.fdl} and \texttt{*.rls}
 files must reside in the same directory as the file given as an argument to the command.
 This command also generates records and datatypes for the types specified in the
 \texttt{*.fdl} file, unless they have already been associated with user-defined
--- a/src/HOL/SPARK/Manual/Simple_Greatest_Common_Divisor.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Manual/Simple_Greatest_Common_Divisor.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -10,7 +10,7 @@
 spark_proof_functions
   gcd = "gcd :: int \<Rightarrow> int \<Rightarrow> int"
 
-spark_open "simple_greatest_common_divisor/g_c_d.siv"
+spark_open "simple_greatest_common_divisor/g_c_d"
 
 spark_vc procedure_g_c_d_4
   using `0 < d` `gcd c d = gcd m n`
--- a/src/HOL/SPARK/SPARK_Setup.thy	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/SPARK_Setup.thy	Wed Apr 30 15:43:44 2014 +0200
@@ -9,8 +9,8 @@
 imports "~~/src/HOL/Word/Word" "~~/src/HOL/Word/Bit_Comparison"
 keywords
   "spark_open_vcg" :: thy_load ("vcg", "fdl", "rls") and
-  "spark_open_siv" :: thy_load ("siv", "fdl", "rls") and
-  "spark_open" "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
+  "spark_open" :: thy_load ("siv", "fdl", "rls") and
+  "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
   "spark_vc" :: thy_goal and "spark_status" :: diag
 begin
 
--- a/src/HOL/SPARK/Tools/spark_commands.ML	Tue Apr 29 22:52:15 2014 +0200
+++ b/src/HOL/SPARK/Tools/spark_commands.ML	Wed Apr 30 15:43:44 2014 +0200
@@ -4,10 +4,11 @@
 
 Isar commands for handling SPARK/Ada verification conditions.
 *)
+
 structure SPARK_Commands: sig end =
 struct
 
-fun spark_open header (prfx, files) thy =
+fun spark_open header (files, prfx) thy =
   let
     val ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file,
       {lines = fdl_lines, pos = fdl_pos, ...},
@@ -21,25 +22,6 @@
       base prfx thy'
   end;
 
-(* FIXME *)
-fun spark_open_old (vc_name, prfx) thy =
-  let
-    val ((vc_path, vc_id), vc_text) = Resources.load_file thy (Path.explode vc_name);
-    val (base, header) =
-      (case Path.split_ext vc_path of
-        (base, "vcg") => (base, Fdl_Lexer.vcg_header >> K ())
-      | (base, "siv") => (base, Fdl_Lexer.siv_header >> K ())
-      | _ => error "File name must end with .vcg or .siv");
-    val fdl_path = Path.ext "fdl" base;
-    val rls_path = Path.ext "rls" base;
-  in
-    SPARK_VCs.set_vcs
-      (snd (Fdl_Parser.parse_declarations (Path.position fdl_path) (File.read fdl_path)))
-      (Fdl_Parser.parse_rules (Path.position rls_path) (File.read rls_path))
-      (snd (Fdl_Parser.parse_vcs header (Path.position vc_path) vc_text))
-      base prfx thy
-  end;
-
 fun add_proof_fun_cmd pf thy =
   let val ctxt = Proof_Context.init_global thy
   in SPARK_VCs.add_proof_fun
@@ -110,20 +92,15 @@
   end;
 
 val _ =
-  Outer_Syntax.command @{command_spec "spark_open"}
-    "open a new SPARK environment and load a SPARK-generated .vcg or .siv file"
-    (Parse.name -- Parse.parname >> (Toplevel.theory o spark_open_old));
-
-val _ =
   Outer_Syntax.command @{command_spec "spark_open_vcg"}
     "open a new SPARK environment and load a SPARK-generated .vcg file"
-    (Parse.parname -- Resources.provide_parse_files "spark_open_vcg"
+    (Resources.provide_parse_files "spark_open_vcg" -- Parse.parname
       >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header));
 
 val _ =
-  Outer_Syntax.command @{command_spec "spark_open_siv"}
+  Outer_Syntax.command @{command_spec "spark_open"}
     "open a new SPARK environment and load a SPARK-generated .siv file"
-    (Parse.parname -- Resources.provide_parse_files "spark_open_siv"
+    (Resources.provide_parse_files "spark_open" -- Parse.parname
       >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
 
 val pfun_type = Scan.option
@@ -169,7 +146,7 @@
 val _ = Theory.setup (Theory.at_end (fn thy =>
   let
     val _ = SPARK_VCs.is_closed thy
-      orelse error ("Found the end of the theory, " ^ 
+      orelse error ("Found the end of the theory, " ^
         "but the last SPARK environment is still open.")
   in NONE end));