tuned
authorboehmes
Wed, 15 Dec 2010 08:39:24 +0100
changeset 41123 3bb9be510a9d
parent 41122 72176ec5e031
child 41124 1de17a2de5ad
tuned
src/HOL/Tools/SMT/smt_translate.ML
src/HOL/Tools/SMT/smt_utils.ML
src/HOL/Tools/SMT/z3_proof_literals.ML
src/HOL/Tools/SMT/z3_proof_parser.ML
src/HOL/Tools/SMT/z3_proof_tools.ML
--- a/src/HOL/Tools/SMT/smt_translate.ML	Wed Dec 15 08:39:24 2010 +0100
+++ b/src/HOL/Tools/SMT/smt_translate.ML	Wed Dec 15 08:39:24 2010 +0100
@@ -6,7 +6,7 @@
 
 signature SMT_TRANSLATE =
 sig
-  (* intermediate term structure *)
+  (*intermediate term structure*)
   datatype squant = SForall | SExists
   datatype 'a spattern = SPat of 'a list | SNoPat of 'a list
   datatype sterm =
@@ -15,7 +15,7 @@
     SLet of string * sterm * sterm |
     SQua of squant * string list * sterm spattern list * int option * sterm
 
-  (* configuration options *)
+  (*configuration options*)
   type prefixes = {sort_prefix: string, func_prefix: string}
   type sign = {
     header: string list,
--- a/src/HOL/Tools/SMT/smt_utils.ML	Wed Dec 15 08:39:24 2010 +0100
+++ b/src/HOL/Tools/SMT/smt_utils.ML	Wed Dec 15 08:39:24 2010 +0100
@@ -6,17 +6,18 @@
 
 signature SMT_UTILS =
 sig
+  (*basic combinators*)
   val repeat: ('a -> 'a option) -> 'a -> 'a
   val repeat_yield: ('a -> 'b -> ('a * 'b) option) -> 'a -> 'b -> 'a * 'b
 
-  (* types *)
+  (*types*)
   val dest_funT: int -> typ -> typ list * typ
 
-  (* terms *)
+  (*terms*)
   val dest_conj: term -> term * term
   val dest_disj: term -> term * term
 
-  (* patterns and instantiations *)
+  (*patterns and instantiations*)
   val mk_const_pat: theory -> string -> (ctyp -> 'a) -> 'a * cterm
   val destT1: ctyp -> ctyp
   val destT2: ctyp -> ctyp
@@ -24,7 +25,7 @@
   val instT: ctyp -> ctyp * cterm -> cterm
   val instT': cterm -> ctyp * cterm -> cterm
 
-  (* certified terms *)
+  (*certified terms*)
   val certify: Proof.context -> term -> cterm
   val typ_of: cterm -> typ
   val dest_cabs: cterm -> Proof.context -> cterm * Proof.context
@@ -35,7 +36,7 @@
   val dest_cprop: cterm -> cterm
   val mk_cequals: cterm -> cterm -> cterm
 
-  (* conversions *)
+  (*conversions*)
   val if_conv: (term -> bool) -> conv -> conv -> conv
   val if_true_conv: (term -> bool) -> conv -> conv
   val binders_conv: (Proof.context -> conv) -> Proof.context -> conv
@@ -45,6 +46,8 @@
 structure SMT_Utils: SMT_UTILS =
 struct
 
+(* basic combinators *)
+
 fun repeat f =
   let fun rep x = (case f x of SOME y => rep y | NONE => x)
   in rep end
--- a/src/HOL/Tools/SMT/z3_proof_literals.ML	Wed Dec 15 08:39:24 2010 +0100
+++ b/src/HOL/Tools/SMT/z3_proof_literals.ML	Wed Dec 15 08:39:24 2010 +0100
@@ -6,7 +6,7 @@
 
 signature Z3_PROOF_LITERALS =
 sig
-  (* literal table *)
+  (*literal table*)
   type littab = thm Termtab.table
   val make_littab: thm list -> littab
   val insert_lit: thm -> littab -> littab
@@ -14,17 +14,17 @@
   val lookup_lit: littab -> term -> thm option
   val get_first_lit: (term -> bool) -> littab -> thm option
 
-  (* rules *)
+  (*rules*)
   val true_thm: thm
   val rewrite_true: thm
 
-  (* properties *)
+  (*properties*)
   val is_conj: term -> bool
   val is_disj: term -> bool
   val exists_lit: bool -> (term -> bool) -> term -> bool
   val negate: cterm -> cterm
 
-  (* proof tools *)
+  (*proof tools*)
   val explode: bool -> bool -> bool -> term list -> thm -> thm list
   val join: bool -> littab -> term -> thm
   val prove_conj_disj_eq: cterm -> thm
@@ -37,7 +37,7 @@
 
 
 
-(** literal table **)
+(* literal table *)
 
 type littab = thm Termtab.table
 
@@ -51,14 +51,14 @@
 
 
 
-(** rules **)
+(* rules *)
 
 val true_thm = @{lemma "~False" by simp}
 val rewrite_true = @{lemma "True == ~ False" by simp}
 
 
 
-(** properties and term operations **)
+(* properties and term operations *)
 
 val is_neg = (fn @{const Not} $ _ => true | _ => false)
 fun is_neg' f = (fn @{const Not} $ t => f t | _ => false)
@@ -87,9 +87,9 @@
 
 
 
-(** proof tools **)
+(* proof tools *)
 
-(* explosion of conjunctions and disjunctions *)
+(** explosion of conjunctions and disjunctions **)
 
 local
   fun destc ct = Thm.dest_binop (Thm.dest_arg ct)
@@ -117,8 +117,10 @@
   val dneg_rule = T.precompose destn @{thm notnotD}
 in
 
-(* explode a term into literals and collect all rules to be able to deduce
-   particular literals afterwards *)
+(*
+  explode a term into literals and collect all rules to be able to deduce
+  particular literals afterwards
+*)
 fun explode_term is_conj =
   let
     val dest = if is_conj then dest_conj_term else dest_disj_term
@@ -144,11 +146,15 @@
 
   in explode0 end
 
-(* extract a literal by applying previously collected rules *)
+(*
+  extract a literal by applying previously collected rules
+*)
 fun extract_lit thm rules = fold T.compose rules thm
 
 
-(* explode a theorem into its literals *)
+(*
+  explode a theorem into its literals
+*)
 fun explode is_conj full keep_intermediate stop_lits =
   let
     val dest_rules = if is_conj then dest_conj_rules else dest_disj_rules
@@ -180,7 +186,7 @@
 
 
 
-(* joining of literals to conjunctions or disjunctions *)
+(** joining of literals to conjunctions or disjunctions **)
 
 local
   fun on_cprem i f thm = f (Thm.cprem_of thm i)
@@ -259,7 +265,7 @@
 
 
 
-(* proving equality of conjunctions or disjunctions *)
+(** proving equality of conjunctions or disjunctions **)
 
 fun iff_intro thm1 thm2 = thm2 COMP (thm1 COMP @{thm iffI})
 
--- a/src/HOL/Tools/SMT/z3_proof_parser.ML	Wed Dec 15 08:39:24 2010 +0100
+++ b/src/HOL/Tools/SMT/z3_proof_parser.ML	Wed Dec 15 08:39:24 2010 +0100
@@ -6,7 +6,7 @@
 
 signature Z3_PROOF_PARSER =
 sig
-  (* proof rules *)
+  (*proof rules*)
   datatype rule = TrueAxiom | Asserted | Goal | ModusPonens | Reflexivity |
     Symmetry | Transitivity | TransitivityStar | Monotonicity | QuantIntro |
     Distributivity | AndElim | NotOrElim | Rewrite | RewriteStar | PullQuant |
@@ -16,7 +16,7 @@
     CnfStar | Skolemize | ModusPonensOeq | ThLemma of string list
   val string_of_rule: rule -> string
 
-  (* proof parser *)
+  (*proof parser*)
   datatype proof_step = Proof_Step of {
     rule: rule,
     prems: int list,
@@ -34,7 +34,7 @@
 
 
 
-(** proof rules **)
+(* proof rules *)
 
 datatype rule = TrueAxiom | Asserted | Goal | ModusPonens | Reflexivity |
   Symmetry | Transitivity | TransitivityStar | Monotonicity | QuantIntro |
@@ -91,7 +91,7 @@
 
 
 
-(** certified terms and variables **)
+(* certified terms and variables *)
 
 val (var_prefix, decl_prefix) = ("v", "sk")
 (* "decl_prefix" is for skolem constants (represented by free variables)
@@ -171,7 +171,7 @@
 
 
 
-(** proof parser **)
+(* proof parser *)
 
 datatype proof_step = Proof_Step of {
   rule: rule,
@@ -179,7 +179,7 @@
   prop: cterm }
 
 
-(* parser context *)
+(** parser context **)
 
 val not_false = Thm.cterm_of @{theory} (@{const Not} $ @{const False})
 
@@ -240,7 +240,7 @@
 fun finish (_, _, _, steps, vars, ctxt) = (steps, vars, ctxt)
 
 
-(* core parser *)
+(** core parser **)
 
 fun parse_exn line_no msg = raise SMT_Failure.SMT (SMT_Failure.Other_Failure
   ("Z3 proof parser (line " ^ string_of_int line_no ^ "): " ^ msg))
@@ -269,7 +269,7 @@
 fun lookup_context f x (st as ((_, cx), _)) = (f cx x, st)
 
 
-(* parser combinators and parsers for basic entities *)
+(** parser combinators and parsers for basic entities **)
 
 fun $$ s = Scan.lift (Scan.$$ s)
 fun this s = Scan.lift (Scan.this_string s)
@@ -306,7 +306,7 @@
 fun id st = ($$ "#" |-- nat_num) st
 
 
-(* parsers for various parts of Z3 proofs *)
+(** parsers for various parts of Z3 proofs **)
 
 fun sort st = Scan.first [
   this "array" |-- bra (sort --| $$ ":" -- sort) >> (op -->),
@@ -400,15 +400,17 @@
   rule SOME ~1)
 
 
-(* overall parser *)
+(** overall parser **)
 
-(* Currently, terms are parsed bottom-up (i.e., along with parsing the proof
-   text line by line), but proofs are reconstructed top-down (i.e. by an
-   in-order top-down traversal of the proof tree/graph).  The latter approach
-   was taken because some proof texts comprise irrelevant proof steps which
-   will thus not be reconstructed.  This approach might also be beneficial
-   for constructing terms, but it would also increase the complexity of the
-   (otherwise rather modular) code. *)
+(*
+  Currently, terms are parsed bottom-up (i.e., along with parsing the proof
+  text line by line), but proofs are reconstructed top-down (i.e. by an
+  in-order top-down traversal of the proof tree/graph).  The latter approach
+  was taken because some proof texts comprise irrelevant proof steps which
+  will thus not be reconstructed.  This approach might also be beneficial
+  for constructing terms, but it would also increase the complexity of the
+  (otherwise rather modular) code.
+*)
 
 fun parse ctxt typs terms proof_text =
   make_context ctxt typs terms
--- a/src/HOL/Tools/SMT/z3_proof_tools.ML	Wed Dec 15 08:39:24 2010 +0100
+++ b/src/HOL/Tools/SMT/z3_proof_tools.ML	Wed Dec 15 08:39:24 2010 +0100
@@ -6,18 +6,18 @@
 
 signature Z3_PROOF_TOOLS =
 sig
-  (* accessing and modifying terms *)
+  (*accessing and modifying terms*)
   val term_of: cterm -> term
   val prop_of: thm -> term
   val as_meta_eq: cterm -> cterm
 
-  (* theorem nets *)
+  (*theorem nets*)
   val thm_net_of: ('a -> thm) -> 'a list -> 'a Net.net
   val net_instance': ((thm -> thm option) -> 'a -> 'a option) -> 'a Net.net ->
     cterm -> 'a option
   val net_instance: thm Net.net -> cterm -> thm option
 
-  (* proof combinators *)
+  (*proof combinators*)
   val under_assumption: (thm -> thm) -> cterm -> thm
   val with_conv: conv -> (cterm -> thm) -> cterm -> thm
   val discharge: thm -> thm -> thm
@@ -29,16 +29,16 @@
   val by_abstraction: bool * bool -> Proof.context -> thm list ->
     (Proof.context -> cterm -> thm) -> cterm -> thm
 
-  (* a faster COMP *)
+  (*a faster COMP*)
   type compose_data
   val precompose: (cterm -> cterm list) -> thm -> compose_data
   val precompose2: (cterm -> cterm * cterm) -> thm -> compose_data
   val compose: compose_data -> thm -> thm
 
-  (* unfolding of 'distinct' *)
+  (*unfolding of 'distinct'*)
   val unfold_distinct_conv: conv
 
-  (* simpset *)
+  (*simpset*)
   val add_simproc: Simplifier.simproc -> Context.generic -> Context.generic
   val make_simpset: Proof.context -> thm list -> simpset
 end
@@ -107,7 +107,11 @@
 
 fun by_tac tac ct = Goal.norm_result (Goal.prove_internal [] ct (K (tac 1)))
 
-(* |- c x == t x ==> P (c x)  ~~>  c == t |- P (c x) *) 
+(*
+   |- c x == t x ==> P (c x)
+  ---------------------------
+      c == t |- P (c x)
+*) 
 fun make_hyp_def thm ctxt =
   let
     val (lhs, rhs) = Thm.dest_binop (Thm.cprem_of thm 1)