src/HOL/SPARK/Tools/spark_commands.ML
author wenzelm
Mon Mar 31 12:35:39 2014 +0200 (2014-03-31 ago)
changeset 56334 6b3739fee456
parent 56208 06cc31dff138
child 56798 939e88e79724
permissions -rw-r--r--
some shortcuts for chunks, which sometimes avoid bulky string output;
     1 (*  Title:      HOL/SPARK/Tools/spark_commands.ML
     2     Author:     Stefan Berghofer
     3     Copyright:  secunet Security Networks AG
     4 
     5 Isar commands for handling SPARK/Ada verification conditions.
     6 *)
     7 structure SPARK_Commands: sig end =
     8 struct
     9 
    10 fun spark_open header (prfx, files) thy =
    11   let
    12     val ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file,
    13       {lines = fdl_lines, pos = fdl_pos, ...},
    14       {lines = rls_lines, pos = rls_pos, ...}], thy') = files thy;
    15     val base = fst (Path.split_ext (File.full_path (Resources.master_directory thy') src_path));
    16   in
    17     SPARK_VCs.set_vcs
    18       (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines)))
    19       (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines))
    20       (snd (Fdl_Parser.parse_vcs header vc_pos (cat_lines vc_lines)))
    21       base prfx thy'
    22   end;
    23 
    24 (* FIXME *)
    25 fun spark_open_old (vc_name, prfx) thy =
    26   let
    27     val ((vc_path, vc_id), vc_text) = Resources.load_file thy (Path.explode vc_name);
    28     val (base, header) =
    29       (case Path.split_ext vc_path of
    30         (base, "vcg") => (base, Fdl_Lexer.vcg_header >> K ())
    31       | (base, "siv") => (base, Fdl_Lexer.siv_header >> K ())
    32       | _ => error "File name must end with .vcg or .siv");
    33     val fdl_path = Path.ext "fdl" base;
    34     val rls_path = Path.ext "rls" base;
    35   in
    36     SPARK_VCs.set_vcs
    37       (snd (Fdl_Parser.parse_declarations (Path.position fdl_path) (File.read fdl_path)))
    38       (Fdl_Parser.parse_rules (Path.position rls_path) (File.read rls_path))
    39       (snd (Fdl_Parser.parse_vcs header (Path.position vc_path) vc_text))
    40       base prfx thy
    41   end;
    42 
    43 fun add_proof_fun_cmd pf thy =
    44   let val ctxt = Proof_Context.init_global thy
    45   in SPARK_VCs.add_proof_fun
    46     (fn optT => Syntax.parse_term ctxt #>
    47        the_default I (Option.map Type.constraint optT) #>
    48        Syntax.check_term ctxt) pf thy
    49   end;
    50 
    51 fun add_spark_type_cmd (s, (raw_typ, cmap)) thy =
    52   SPARK_VCs.add_type (s, (Syntax.read_typ_global thy raw_typ, cmap)) thy;
    53 
    54 fun get_vc thy vc_name =
    55   (case SPARK_VCs.lookup_vc thy false vc_name of
    56     SOME (ctxt, (_, proved, ctxt', stmt)) =>
    57       if is_some proved then
    58         error ("The verification condition " ^
    59           quote vc_name ^ " has already been proved.")
    60       else (ctxt @ [ctxt'], stmt)
    61   | NONE => error ("There is no verification condition " ^
    62       quote vc_name ^ "."));
    63 
    64 fun prove_vc vc_name lthy =
    65   let
    66     val thy = Proof_Context.theory_of lthy;
    67     val (ctxt, stmt) = get_vc thy vc_name
    68   in
    69     Specification.theorem Thm.theoremK NONE
    70       (fn thmss => (Local_Theory.background_theory
    71          (SPARK_VCs.mark_proved vc_name (flat thmss))))
    72       (Binding.name vc_name, []) [] ctxt stmt false lthy
    73   end;
    74 
    75 fun string_of_status NONE = "(unproved)"
    76   | string_of_status (SOME _) = "(proved)";
    77 
    78 fun show_status thy (p, f) =
    79   let
    80     val (context, defs, vcs) = SPARK_VCs.get_vcs thy true;
    81 
    82     val vcs' = AList.coalesce (op =) (map_filter
    83       (fn (name, (trace, status, ctxt, stmt)) =>
    84          if p status then
    85            SOME (trace, (name, status, ctxt, stmt))
    86          else NONE) vcs);
    87 
    88     val ctxt = thy |>
    89       Proof_Context.init_global |>
    90       Context.proof_map (fold Element.init context)
    91   in
    92     [Pretty.str "Context:",
    93      Pretty.chunks (maps (Element.pretty_ctxt ctxt) context),
    94 
    95      Pretty.str "Definitions:",
    96      Pretty.chunks (map (fn (b, th) => Pretty.block
    97        [Binding.pretty b, Pretty.str ":",
    98         Pretty.brk 1,
    99         Display.pretty_thm ctxt th])
   100           defs),
   101 
   102      Pretty.str "Verification conditions:",
   103      Pretty.chunks2 (maps (fn (trace, vcs'') =>
   104        Pretty.str trace ::
   105        map (fn (name, status, context', stmt) =>
   106          Pretty.big_list (name ^ " " ^ f status)
   107            (Element.pretty_ctxt ctxt context' @
   108             Element.pretty_stmt ctxt stmt)) vcs'') vcs')] |>
   109     Pretty.writeln_chunks2
   110   end;
   111 
   112 val _ =
   113   Outer_Syntax.command @{command_spec "spark_open"}
   114     "open a new SPARK environment and load a SPARK-generated .vcg or .siv file"
   115     (Parse.name -- Parse.parname >> (Toplevel.theory o spark_open_old));
   116 
   117 val _ =
   118   Outer_Syntax.command @{command_spec "spark_open_vcg"}
   119     "open a new SPARK environment and load a SPARK-generated .vcg file"
   120     (Parse.parname -- Resources.provide_parse_files "spark_open_vcg"
   121       >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header));
   122 
   123 val _ =
   124   Outer_Syntax.command @{command_spec "spark_open_siv"}
   125     "open a new SPARK environment and load a SPARK-generated .siv file"
   126     (Parse.parname -- Resources.provide_parse_files "spark_open_siv"
   127       >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
   128 
   129 val pfun_type = Scan.option
   130   (Args.parens (Parse.list1 Parse.name) --| Args.colon -- Parse.name);
   131 
   132 val _ =
   133   Outer_Syntax.command @{command_spec "spark_proof_functions"}
   134     "associate SPARK proof functions with terms"
   135     (Scan.repeat1 (Parse.name -- (pfun_type --| Args.$$$ "=" -- Parse.term)) >>
   136       (Toplevel.theory o fold add_proof_fun_cmd));
   137 
   138 val _ =
   139   Outer_Syntax.command @{command_spec "spark_types"}
   140     "associate SPARK types with types"
   141     (Scan.repeat1 (Parse.name -- Parse.!!! (Args.$$$ "=" |-- Parse.typ --
   142        Scan.optional (Args.parens (Parse.list1
   143          (Parse.name -- Parse.!!! (Args.$$$ "=" |-- Parse.xname)))) [])) >>
   144        (Toplevel.theory o fold add_spark_type_cmd));
   145 
   146 val _ =
   147   Outer_Syntax.command @{command_spec "spark_vc"}
   148     "enter into proof mode for a specific verification condition"
   149     (Parse.name >> (fn name =>
   150       (Toplevel.print o Toplevel.local_theory_to_proof NONE (prove_vc name))));
   151 
   152 val _ =
   153   Outer_Syntax.improper_command @{command_spec "spark_status"}
   154     "show the name and state of all loaded verification conditions"
   155     (Scan.optional
   156        (Args.parens
   157           (   Args.$$$ "proved" >> K (is_some, K "")
   158            || Args.$$$ "unproved" >> K (is_none, K "")))
   159        (K true, string_of_status) >> (fn args =>
   160         Toplevel.keep (fn state => show_status (Toplevel.theory_of state) args)))
   161 
   162 val _ =
   163   Outer_Syntax.command @{command_spec "spark_end"}
   164     "close the current SPARK environment"
   165     (Scan.optional (@{keyword "("} |-- Parse.!!!
   166          (Parse.reserved "incomplete" --| @{keyword ")"}) >> K true) false >>
   167        (Toplevel.theory o SPARK_VCs.close));
   168 
   169 val _ = Theory.setup (Theory.at_end (fn thy =>
   170   let
   171     val _ = SPARK_VCs.is_closed thy
   172       orelse error ("Found the end of the theory, " ^ 
   173         "but the last SPARK environment is still open.")
   174   in NONE end));
   175 
   176 end;