41561
|
1 |
(* Title: HOL/SPARK/Tools/spark_commands.ML
|
|
2 |
Author: Stefan Berghofer
|
|
3 |
Copyright: secunet Security Networks AG
|
|
4 |
|
|
5 |
Isar commands for handling SPARK/Ada verification conditions.
|
|
6 |
*)
|
|
7 |
|
|
8 |
signature SPARK_COMMANDS =
|
|
9 |
sig
|
|
10 |
val setup: theory -> theory
|
|
11 |
end
|
|
12 |
|
|
13 |
structure SPARK_Commands: SPARK_COMMANDS =
|
|
14 |
struct
|
|
15 |
|
|
16 |
fun read f path = f (Position.file (Path.implode path)) (File.read path);
|
|
17 |
|
|
18 |
fun spark_open vc_name thy =
|
|
19 |
let
|
|
20 |
val (vc_path, _) = Thy_Load.check_file
|
|
21 |
[Thy_Load.master_directory thy] (Path.explode vc_name);
|
|
22 |
val (base, header) = (case Path.split_ext vc_path of
|
|
23 |
(base, "vcg") => (base, Fdl_Lexer.vcg_header >> K ())
|
|
24 |
| (base, "siv") => (base, Fdl_Lexer.siv_header >> K ())
|
|
25 |
| _ => error "File name must end with .vcg or .siv");
|
|
26 |
val fdl_path = Path.ext "fdl" base;
|
|
27 |
val rls_path = Path.ext "rls" base;
|
|
28 |
in
|
|
29 |
SPARK_VCs.set_vcs
|
|
30 |
(snd (read Fdl_Parser.parse_declarations fdl_path))
|
|
31 |
(read Fdl_Parser.parse_rules rls_path)
|
|
32 |
(snd (snd (read (Fdl_Parser.parse_vcs header) vc_path)))
|
|
33 |
base thy
|
|
34 |
end;
|
|
35 |
|
|
36 |
fun add_proof_fun_cmd pf thy =
|
|
37 |
let val ctxt = ProofContext.init_global thy
|
|
38 |
in SPARK_VCs.add_proof_fun
|
|
39 |
(fn optT => Syntax.parse_term ctxt #>
|
|
40 |
the_default I (Option.map Type.constraint optT) #>
|
|
41 |
Syntax.check_term ctxt) pf thy
|
|
42 |
end;
|
|
43 |
|
|
44 |
fun get_vc thy vc_name =
|
|
45 |
(case SPARK_VCs.lookup_vc thy vc_name of
|
|
46 |
SOME (ctxt, (_, proved, ctxt', stmt)) =>
|
|
47 |
if proved then
|
|
48 |
error ("The verification condition " ^
|
|
49 |
quote vc_name ^ " has already been proved.")
|
|
50 |
else (ctxt @ [ctxt'], stmt)
|
|
51 |
| NONE => error ("There is no verification condition " ^
|
|
52 |
quote vc_name ^ "."));
|
|
53 |
|
|
54 |
fun prove_vc vc_name lthy =
|
|
55 |
let
|
|
56 |
val thy = ProofContext.theory_of lthy;
|
|
57 |
val (ctxt, stmt) = get_vc thy vc_name
|
|
58 |
in
|
|
59 |
Specification.theorem Thm.theoremK NONE
|
|
60 |
(K (Local_Theory.background_theory (SPARK_VCs.mark_proved vc_name)))
|
|
61 |
(Binding.name vc_name, []) ctxt stmt true lthy
|
|
62 |
end;
|
|
63 |
|
|
64 |
fun string_of_status false = "(unproved)"
|
|
65 |
| string_of_status true = "(proved)";
|
|
66 |
|
|
67 |
fun chunks ps = Pretty.blk (0,
|
|
68 |
flat (separate [Pretty.fbrk, Pretty.fbrk] (map single ps)));
|
|
69 |
|
|
70 |
fun show_status (p, f) = Toplevel.no_timing o Toplevel.keep (fn state =>
|
|
71 |
let
|
|
72 |
val thy = Toplevel.theory_of state;
|
|
73 |
|
|
74 |
val (context, defs, vcs) = SPARK_VCs.get_vcs thy;
|
|
75 |
|
|
76 |
val vcs' = AList.coalesce (op =) (map_filter
|
|
77 |
(fn (name, (trace, status, ctxt, stmt)) =>
|
|
78 |
if p status then
|
|
79 |
SOME (trace, (name, status, ctxt, stmt))
|
|
80 |
else NONE) vcs);
|
|
81 |
|
|
82 |
val ctxt = state |>
|
|
83 |
Toplevel.theory_of |>
|
|
84 |
ProofContext.init_global |>
|
|
85 |
Context.proof_map (fold Element.init context)
|
|
86 |
in
|
|
87 |
(writeln "Context:\n";
|
|
88 |
Pretty.chunks (maps (Element.pretty_ctxt ctxt) context) |>
|
|
89 |
Pretty.writeln;
|
|
90 |
|
|
91 |
writeln "\nDefinitions:\n";
|
|
92 |
Pretty.chunks (map (fn (bdg, th) => Pretty.block
|
|
93 |
[Pretty.str (Binding.str_of bdg ^ ":"),
|
|
94 |
Pretty.brk 1,
|
|
95 |
Display.pretty_thm ctxt th])
|
|
96 |
defs) |>
|
|
97 |
Pretty.writeln;
|
|
98 |
|
|
99 |
writeln "\nVerification conditions:\n";
|
|
100 |
chunks (maps (fn (trace, vcs'') =>
|
|
101 |
Pretty.str trace ::
|
|
102 |
map (fn (name, status, context', stmt) =>
|
|
103 |
Pretty.big_list (name ^ " " ^ f status)
|
|
104 |
(Element.pretty_ctxt ctxt context' @
|
|
105 |
Element.pretty_stmt ctxt stmt)) vcs'') vcs') |>
|
|
106 |
Pretty.writeln)
|
|
107 |
end);
|
|
108 |
|
|
109 |
val _ =
|
|
110 |
Outer_Syntax.command "spark_open"
|
|
111 |
"Open a new SPARK environment and load a SPARK-generated .vcg or .siv file."
|
|
112 |
Keyword.thy_decl
|
|
113 |
(Parse.name >> (Toplevel.theory o spark_open));
|
|
114 |
|
|
115 |
val pfun_type = Scan.option
|
|
116 |
(Args.parens (Parse.list1 Parse.name) --| Args.colon -- Parse.name);
|
|
117 |
|
|
118 |
val _ =
|
|
119 |
Outer_Syntax.command "spark_proof_functions"
|
|
120 |
"Associate SPARK proof functions with terms."
|
|
121 |
Keyword.thy_decl
|
|
122 |
(Scan.repeat1 (Parse.name -- (pfun_type --| Args.$$$ "=" -- Parse.term)) >>
|
|
123 |
(Toplevel.theory o fold add_proof_fun_cmd));
|
|
124 |
|
|
125 |
val _ =
|
|
126 |
Outer_Syntax.command "spark_vc"
|
|
127 |
"Enter into proof mode for a specific verification condition."
|
|
128 |
Keyword.thy_goal
|
|
129 |
(Parse.name >> (fn name =>
|
|
130 |
(Toplevel.print o Toplevel.local_theory_to_proof NONE (prove_vc name))));
|
|
131 |
|
|
132 |
val _ =
|
|
133 |
Outer_Syntax.improper_command "spark_status"
|
|
134 |
"Show the name and state of all loaded verification conditions."
|
|
135 |
Keyword.diag
|
|
136 |
(Scan.optional
|
|
137 |
(Args.parens
|
|
138 |
( Args.$$$ "proved" >> K (I, K "")
|
|
139 |
|| Args.$$$ "unproved" >> K (not, K "")))
|
|
140 |
(K true, string_of_status) >> show_status);
|
|
141 |
|
|
142 |
val _ =
|
|
143 |
Outer_Syntax.command "spark_end"
|
|
144 |
"Close the current SPARK environment."
|
|
145 |
Keyword.thy_decl
|
|
146 |
(Scan.succeed (Toplevel.theory SPARK_VCs.close));
|
|
147 |
|
|
148 |
val setup = Theory.at_end (fn thy =>
|
|
149 |
let
|
|
150 |
val _ = SPARK_VCs.is_closed thy
|
|
151 |
orelse error ("Found the end of the theory, " ^
|
|
152 |
"but the last SPARK environment is still open.")
|
|
153 |
in NONE end);
|
|
154 |
|
|
155 |
end;
|