--- a/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML Mon Apr 26 21:18:20 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML Mon Apr 26 21:20:43 2010 +0200
@@ -21,11 +21,11 @@
minimize_command * string * string vector * thm * int
-> string * string list
val isar_proof_text:
- name_pool option -> bool -> int -> bool -> Proof.context
+ name_pool option * bool * int * bool * Proof.context * int list list
-> minimize_command * string * string vector * thm * int
-> string * string list
val proof_text:
- bool -> name_pool option -> bool -> int -> bool -> Proof.context
+ bool -> name_pool option * bool * int * bool * Proof.context * int list list
-> minimize_command * string * string vector * thm * int
-> string * string list
end;
@@ -41,8 +41,7 @@
fun is_ident_char c = Char.isAlphaNum c orelse c = #"_"
fun is_head_digit s = Char.isDigit (String.sub (s, 0))
-fun is_axiom_clause_number thm_names line_num =
- line_num <= Vector.length thm_names
+fun is_axiom_clause_number thm_names num = num <= Vector.length thm_names
fun ugly_name NONE s = s
| ugly_name (SOME the_pool) s =
@@ -50,12 +49,6 @@
SOME s' => s'
| NONE => s
-val trace_path = Path.basic "sledgehammer_proof_trace"
-fun trace_proof_msg f =
- if !trace then File.append (File.tmp_path trace_path) (f ()) else ();
-
-val string_of_thm = PrintMode.setmp [] o Display.string_of_thm
-
(**** PARSING OF TSTP FORMAT ****)
(* Syntax trees, either term list or formulae *)
@@ -110,7 +103,7 @@
fun parse_literals pool =
parse_literal pool ::: Scan.repeat ($$ "|" |-- parse_literal pool)
-(*Clause: a list of literals separated by the disjunction sign*)
+(* Clause: a list of literals separated by the disjunction sign. *)
fun parse_clause pool =
$$ "(" |-- parse_literals pool --| $$ ")" || Scan.single (parse_literal pool)
@@ -153,15 +146,14 @@
(* Syntax: <name>[0:<inference><annotations>] ||
<cnf_formulas> -> <cnf_formulas>. *)
-fun retuple_spass_proof_line ((name, deps), ts) = (name, ts, deps)
-fun parse_spass_proof_line pool =
+fun retuple_spass_line ((name, deps), ts) = (name, ts, deps)
+fun parse_spass_line pool =
parse_integer --| $$ "[" --| $$ "0" --| $$ ":" --| Symbol.scan_id
-- parse_spass_annotations --| $$ "]" --| $$ "|" --| $$ "|"
-- parse_horn_clause pool --| $$ "."
- >> retuple_spass_proof_line
+ >> retuple_spass_line
-fun parse_proof_line pool =
- fst o (parse_tstp_line pool || parse_spass_proof_line pool)
+fun parse_line pool = fst o (parse_tstp_line pool || parse_spass_line pool)
(**** INTERPRETATION OF TSTP SYNTAX TREES ****)
@@ -209,13 +201,10 @@
(*Invert the table of translations between Isabelle and ATPs*)
val const_trans_table_inv =
- Symtab.update ("fequal", "op =")
- (Symtab.make (map swap (Symtab.dest const_trans_table)));
+ Symtab.update ("fequal", @{const_name "op ="})
+ (Symtab.make (map swap (Symtab.dest const_trans_table)))
-fun invert_const c =
- case Symtab.lookup const_trans_table_inv c of
- SOME c' => c'
- | NONE => c;
+fun invert_const c = c |> Symtab.lookup const_trans_table_inv |> the_default c
(*The number of type arguments of a constant, zero if it's monomorphic*)
fun num_typargs thy s = length (Sign.const_typargs thy (s, Sign.the_const_type thy s));
@@ -272,21 +261,52 @@
| add_constraint ((false, cl, TVar(ix,_)), vt) = addix (ix,cl) vt
| add_constraint (_, vt) = vt;
-(* Final treatment of the list of "real" literals from a clause. *)
-fun finish [] =
- (* No "real" literals means only type information. *)
- HOLogic.true_const
- | finish lits =
- case filter_out (curry (op =) HOLogic.false_const) lits of
- [] => HOLogic.false_const
- | xs => foldr1 HOLogic.mk_disj (rev xs);
+fun is_positive_literal (@{const Trueprop} $ t) = is_positive_literal t
+ | is_positive_literal (@{const Not} $ _) = false
+ | is_positive_literal t = true
+
+fun negate_term thy (@{const Trueprop} $ t) =
+ @{const Trueprop} $ negate_term thy t
+ | negate_term thy (Const (@{const_name All}, T) $ Abs (s, T', t')) =
+ Const (@{const_name Ex}, T) $ Abs (s, T', negate_term thy t')
+ | negate_term thy (Const (@{const_name Ex}, T) $ Abs (s, T', t')) =
+ Const (@{const_name All}, T) $ Abs (s, T', negate_term thy t')
+ | negate_term thy (@{const "op -->"} $ t1 $ t2) =
+ @{const "op &"} $ t1 $ negate_term thy t2
+ | negate_term thy (@{const "op &"} $ t1 $ t2) =
+ @{const "op |"} $ negate_term thy t1 $ negate_term thy t2
+ | negate_term thy (@{const "op |"} $ t1 $ t2) =
+ @{const "op &"} $ negate_term thy t1 $ negate_term thy t2
+ | negate_term thy (@{const Not} $ t) = t
+ | negate_term thy t =
+ if fastype_of t = @{typ prop} then
+ HOLogic.mk_Trueprop (negate_term thy (Object_Logic.atomize_term thy t))
+ else
+ @{const Not} $ t
+
+fun clause_for_literals _ [] = HOLogic.false_const
+ | clause_for_literals _ [lit] = lit
+ | clause_for_literals thy lits =
+ case List.partition is_positive_literal lits of
+ (pos_lits as _ :: _, neg_lits as _ :: _) =>
+ @{const "op -->"}
+ $ foldr1 HOLogic.mk_conj (map (negate_term thy) neg_lits)
+ $ foldr1 HOLogic.mk_disj pos_lits
+ | _ => foldr1 HOLogic.mk_disj lits
+
+(* Final treatment of the list of "real" literals from a clause.
+ No "real" literals means only type information. *)
+fun finish_clause _ [] = HOLogic.true_const
+ | finish_clause thy lits =
+ lits |> filter_out (curry (op =) HOLogic.false_const) |> rev
+ |> clause_for_literals thy
(*Accumulate sort constraints in vt, with "real" literals in lits.*)
-fun lits_of_strees _ (vt, lits) [] = (vt, finish lits)
- | lits_of_strees ctxt (vt, lits) (t::ts) =
- lits_of_strees ctxt (add_constraint (constraint_of_stree true t, vt), lits) ts
- handle STREE _ =>
- lits_of_strees ctxt (vt, term_of_stree [] (ProofContext.theory_of ctxt) t :: lits) ts;
+fun lits_of_strees thy (vt, lits) [] = (vt, finish_clause thy lits)
+ | lits_of_strees thy (vt, lits) (t :: ts) =
+ lits_of_strees thy (add_constraint (constraint_of_stree true t, vt), lits)
+ ts
+ handle STREE _ => lits_of_strees thy (vt, term_of_stree [] thy t :: lits) ts
(*Update TVars/TFrees with detected sort constraints.*)
fun repair_sorts vt =
@@ -304,14 +324,14 @@
(*Interpret a list of syntax trees as a clause, given by "real" literals and sort constraints.
vt0 holds the initial sort constraints, from the conjecture clauses.*)
fun clause_of_strees ctxt vt ts =
- let val (vt, dt) = lits_of_strees ctxt (vt, []) ts in
+ let val (vt, dt) = lits_of_strees (ProofContext.theory_of ctxt) (vt, []) ts in
dt |> repair_sorts vt |> TypeInfer.constrain HOLogic.boolT
|> Syntax.check_term ctxt
end
fun gen_all_vars t = fold_rev Logic.all (OldTerm.term_vars t) t;
-fun decode_proof_step vt0 (name, ts, deps) ctxt =
+fun decode_line vt0 (name, ts, deps) ctxt =
let val cl = clause_of_strees ctxt vt0 ts in
((name, cl, deps), fold Variable.declare_term (OldTerm.term_frees cl) ctxt)
end
@@ -331,102 +351,11 @@
(**** Translation of TSTP files to Isar Proofs ****)
-fun decode_proof_steps ctxt tuples =
+fun decode_lines ctxt tuples =
let val vt0 = tfree_constraints_of_clauses Vartab.empty (map #2 tuples) in
- #1 (fold_map (decode_proof_step vt0) tuples ctxt)
+ #1 (fold_map (decode_line vt0) tuples ctxt)
end
-(** Finding a matching assumption. The literals may be permuted, and variable names
- may disagree. We must try all combinations of literals (quadratic!) and
- match the variable names consistently. **)
-
-fun strip_alls_aux n (Const(@{const_name all}, _)$Abs(a,T,t)) =
- strip_alls_aux (n+1) (subst_bound (Var ((a,n), T), t))
- | strip_alls_aux _ t = t;
-
-val strip_alls = strip_alls_aux 0;
-
-exception MATCH_LITERAL of unit
-
-(* Remark 1: Ignore types. They are not to be trusted.
- Remark 2: Ignore order of arguments for equality. SPASS sometimes swaps
- them for no apparent reason. *)
-fun match_literal (Const (@{const_name "op ="}, _) $ t1 $ u1)
- (Const (@{const_name "op ="}, _) $ t2 $ u2) env =
- (env |> match_literal t1 t2 |> match_literal u1 u2
- handle MATCH_LITERAL () =>
- env |> match_literal t1 u2 |> match_literal u1 t2)
- | match_literal (t1 $ u1) (t2 $ u2) env =
- env |> match_literal t1 t2 |> match_literal u1 u2
- | match_literal (Abs (_,_,t1)) (Abs (_,_,t2)) env =
- match_literal t1 t2 env
- | match_literal (Bound i1) (Bound i2) env =
- if i1=i2 then env else raise MATCH_LITERAL ()
- | match_literal (Const(a1,_)) (Const(a2,_)) env =
- if a1=a2 then env else raise MATCH_LITERAL ()
- | match_literal (Free(a1,_)) (Free(a2,_)) env =
- if a1=a2 then env else raise MATCH_LITERAL ()
- | match_literal (Var(ix1,_)) (Var(ix2,_)) env = insert (op =) (ix1,ix2) env
- | match_literal _ _ _ = raise MATCH_LITERAL ()
-
-(* Checking that all variable associations are unique. The list "env" contains
- no repetitions, but does it contain say (x, y) and (y, y)? *)
-fun good env =
- let val (xs,ys) = ListPair.unzip env
- in not (has_duplicates (op=) xs orelse has_duplicates (op=) ys) end;
-
-(*Match one list of literals against another, ignoring types and the order of
- literals. Sorting is unreliable because we don't have types or variable names.*)
-fun matches_aux _ [] [] = true
- | matches_aux env (lit::lits) ts =
- let fun match1 us [] = false
- | match1 us (t::ts) =
- let val env' = match_literal lit t env
- in (good env' andalso matches_aux env' lits (us@ts)) orelse
- match1 (t::us) ts
- end
- handle MATCH_LITERAL () => match1 (t::us) ts
- in match1 [] ts end;
-
-(*Is this length test useful?*)
-fun matches (lits1,lits2) =
- length lits1 = length lits2 andalso
- matches_aux [] (map Envir.eta_contract lits1) (map Envir.eta_contract lits2);
-
-fun permuted_clause t =
- let val lits = HOLogic.disjuncts t
- fun perm [] = NONE
- | perm (ctm::ctms) =
- if matches (lits, HOLogic.disjuncts (HOLogic.dest_Trueprop (strip_alls ctm)))
- then SOME ctm else perm ctms
- in perm end;
-
-(*ctms is a list of conjecture clauses as yielded by Isabelle. Those returned by the
- ATP may have their literals reordered.*)
-fun isar_proof_body ctxt sorts ctms =
- let
- val _ = trace_proof_msg (K "\n\nisar_proof_body: start\n")
- val string_of_term =
- PrintMode.setmp (filter (curry (op =) Symbol.xsymbolsN)
- (print_mode_value ()))
- (Syntax.string_of_term ctxt)
- fun have_or_show "show" _ = " show \""
- | have_or_show have label = " " ^ have ^ " " ^ label ^ ": \""
- fun do_line _ (label, t, []) =
- (* No depedencies: it's a conjecture clause, with no proof. *)
- (case permuted_clause t ctms of
- SOME u => " assume " ^ label ^ ": \"" ^ string_of_term u ^ "\"\n"
- | NONE => raise TERM ("Sledgehammer_Proof_Reconstruct.isar_proof_body",
- [t]))
- | do_line have (label, t, deps) =
- have_or_show have label ^
- string_of_term (gen_all_vars (HOLogic.mk_Trueprop t)) ^
- "\"\n by (metis " ^ space_implode " " deps ^ ")\n"
- fun do_lines [(label, t, deps)] = [do_line "show" (label, t, deps)]
- | do_lines ((label, t, deps) :: lines) =
- do_line "have" (label, t, deps) :: do_lines lines
- in setmp_CRITICAL show_sorts sorts do_lines end;
-
fun unequal t (_, t', _) = not (t aconv t');
(*No "real" literals means only type information*)
@@ -438,7 +367,7 @@
(*Discard axioms; consolidate adjacent lines that prove the same clause, since they differ
only in type information.*)
-fun add_proof_line thm_names (num, t, []) lines =
+fun add_line thm_names (num, t, []) lines =
(* No dependencies: axiom or conjecture clause *)
if is_axiom_clause_number thm_names num then
(* Axioms are not proof lines *)
@@ -452,7 +381,7 @@
pre @ map (replace_deps (num', [num])) post)
else
(num, t, []) :: lines
- | add_proof_line _ (num, t, deps) lines =
+ | add_line _ (num, t, deps) lines =
if eq_types t then (num, t, deps) :: lines
(*Type information will be deleted later; skip repetition test.*)
else (*FIXME: Doesn't this code risk conflating proofs involving different types??*)
@@ -463,32 +392,30 @@
(pre @ map (replace_deps (num', [num])) post);
(*Recursively delete empty lines (type information) from the proof.*)
-fun add_nonnull_prfline ((num, t, []), lines) = (*no dependencies, so a conjecture clause*)
- if eq_types t (*must be type information, tfree_tcs, clsrel, clsarity: delete refs to it*)
- then delete_dep num lines
- else (num, t, []) :: lines
- | add_nonnull_prfline ((num, t, deps), lines) = (num, t, deps) :: lines
+fun add_nonnull_line (num, t, []) lines = (*no dependencies, so a conjecture clause*)
+ if eq_types t then
+ (*must be type information, tfree_tcs, clsrel, clsarity: delete refs to it*)
+ delete_dep num lines
+ else
+ (num, t, []) :: lines
+ | add_nonnull_line (num, t, deps) lines = (num, t, deps) :: lines
and delete_dep num lines =
- List.foldr add_nonnull_prfline [] (map (replace_deps (num, [])) lines);
+ fold_rev add_nonnull_line (map (replace_deps (num, [])) lines) []
fun bad_free (Free (a,_)) = String.isPrefix skolem_prefix a
| bad_free _ = false;
-(*TVars are forbidden in goals. Also, we don't want lines with <2 dependencies.
- To further compress proofs, setting modulus:=n deletes every nth line, and nlines
- counts the number of proof lines processed so far.
- Deleted lines are replaced by their own dependencies. Note that the "add_nonnull_prfline"
- phase may delete some dependencies, hence this phase comes later.*)
-fun add_wanted_prfline ctxt _ ((num, t, []), (nlines, lines)) =
- (nlines, (num, t, []) :: lines) (*conjecture clauses must be kept*)
- | add_wanted_prfline ctxt modulus ((num, t, deps), (nlines, lines)) =
- if eq_types t orelse not (null (Term.add_tvars t [])) orelse
- exists_subterm bad_free t orelse
- (not (null lines) andalso (*final line can't be deleted for these reasons*)
- (length deps < 2 orelse nlines mod modulus <> 0))
- then (nlines+1, map (replace_deps (num, deps)) lines) (*Delete line*)
- else (nlines+1, (num, t, deps) :: lines);
+fun add_desired_line ctxt (num, t, []) (j, lines) =
+ (j, (num, t, []) :: lines) (* conjecture clauses must be kept *)
+ | add_desired_line ctxt (num, t, deps) (j, lines) =
+ (j + 1,
+ if eq_types t orelse not (null (Term.add_tvars t [])) orelse
+ exists_subterm bad_free t orelse length deps < 2 then
+ map (replace_deps (num, deps)) lines (* delete line *)
+ else
+ (num, t, deps) :: lines)
+(* ### *)
(*Replace numeric proof lines by strings, either from thm_names or sequential line numbers*)
fun stringify_deps thm_names deps_map [] = []
| stringify_deps thm_names deps_map ((num, t, deps) :: lines) =
@@ -508,61 +435,22 @@
stringify_deps thm_names ((num, label) :: deps_map) lines
end
-fun isar_proof_start i =
- (if i = 1 then "" else "prefer " ^ string_of_int i ^ "\n") ^
- "proof (neg_clausify)\n";
-fun isar_fixes [] = ""
- | isar_fixes ts = " fix " ^ space_implode " " ts ^ "\n";
-fun isar_proof_end 1 = "qed"
- | isar_proof_end _ = "next"
+(** EXTRACTING LEMMAS **)
-fun isar_proof_from_atp_proof pool modulus sorts ctxt cnfs thm_names goal i =
- let
- val _ = trace_proof_msg (K "\nisar_proof_from_atp_proof: start\n")
- val tuples = map (parse_proof_line pool o explode) cnfs
- val _ = trace_proof_msg (fn () =>
- Int.toString (length tuples) ^ " tuples extracted\n")
- val ctxt = ProofContext.set_mode ProofContext.mode_schematic ctxt
- val raw_lines =
- fold_rev (add_proof_line thm_names) (decode_proof_steps ctxt tuples) []
- val _ = trace_proof_msg (fn () =>
- Int.toString (length raw_lines) ^ " raw_lines extracted\n")
- val nonnull_lines = List.foldr add_nonnull_prfline [] raw_lines
- val _ = trace_proof_msg (fn () =>
- Int.toString (length nonnull_lines) ^ " nonnull_lines extracted\n")
- val (_, lines) = List.foldr (add_wanted_prfline ctxt modulus) (0,[]) nonnull_lines
- val _ = trace_proof_msg (fn () =>
- Int.toString (length lines) ^ " lines extracted\n")
- val (ccls, fixes) = neg_conjecture_clauses ctxt goal i
- val _ = trace_proof_msg (fn () =>
- Int.toString (length ccls) ^ " conjecture clauses\n")
- val ccls = map forall_intr_vars ccls
- val _ = app (fn th => trace_proof_msg
- (fn () => "\nccl: " ^ string_of_thm ctxt th)) ccls
- val body = isar_proof_body ctxt sorts (map prop_of ccls)
- (stringify_deps thm_names [] lines)
- val n = Logic.count_prems (prop_of goal)
- val _ = trace_proof_msg (K "\nisar_proof_from_atp_proof: finishing\n")
- in
- isar_proof_start i ^ isar_fixes (map #1 fixes) ^ implode body ^
- isar_proof_end n ^ "\n"
- end
- handle STREE _ => raise Fail "Cannot parse ATP output";
-
-
-(* === EXTRACTING LEMMAS === *)
(* A list consisting of the first number in each line is returned.
TSTP: Interesting lines have the form "cnf(108, axiom, ...)", where the
number (108) is extracted.
SPASS: Lines have the form "108[0:Inp] ...", where the first number (108) is
extracted. *)
-fun extract_clause_numbers_in_proof proof =
+fun extract_clause_numbers_in_atp_proof atp_proof =
let
val tokens_of = String.tokens (not o is_ident_char)
- fun extract_num ("cnf" :: num :: _ :: _) = Int.fromString num
+ fun extract_num ("cnf" :: num :: "axiom" :: _) = Int.fromString num
+ | extract_num ("cnf" :: num :: "negated_conjecture" :: _) =
+ Int.fromString num
| extract_num (num :: "0" :: "Inp" :: _) = Int.fromString num
| extract_num _ = NONE
- in proof |> split_lines |> map_filter (extract_num o tokens_of) end
+ in atp_proof |> split_lines |> map_filter (extract_num o tokens_of) end
(* Used to label theorems chained into the Sledgehammer call (or rather
goal?) *)
@@ -586,24 +474,59 @@
"To minimize the number of lemmas, try this command: " ^
Markup.markup Markup.sendback command ^ ".\n"
-fun metis_proof_text (minimize_command, proof, thm_names, goal, i) =
+fun metis_proof_text (minimize_command, atp_proof, thm_names, goal, i) =
let
val lemmas =
- proof |> extract_clause_numbers_in_proof
- |> filter (is_axiom_clause_number thm_names)
- |> map (fn i => Vector.sub (thm_names, i - 1))
- |> filter_out (fn s => s = "??.unknown" orelse s = chained_hint)
- |> sort_distinct string_ord
+ atp_proof |> extract_clause_numbers_in_atp_proof
+ |> filter (is_axiom_clause_number thm_names)
+ |> map (fn i => Vector.sub (thm_names, i - 1))
+ |> filter_out (fn s => s = "??.unknown" orelse s = chained_hint)
+ |> sort_distinct string_ord
val n = Logic.count_prems (prop_of goal)
in (metis_line i n lemmas ^ minimize_line minimize_command lemmas, lemmas) end
-val is_proof_line = String.isPrefix "cnf(" orf String.isSubstring "||"
+val is_valid_line = String.isPrefix "cnf(" orf String.isSubstring "||"
+
+(** NEW PROOF RECONSTRUCTION CODE **)
+
+type label = string * int
+type facts = label list * string list
+
+fun merge_fact_sets (ls1, ss1) (ls2, ss2) =
+ (union (op =) ls1 ls2, union (op =) ss1 ss2)
+
+datatype qualifier = Show | Then | Moreover | Ultimately
-fun do_space c = if Char.isSpace c then "" else str c
+datatype step =
+ Fix of term list |
+ Assume of label * term |
+ Have of qualifier list * label * term * byline
+and byline =
+ Facts of facts |
+ CaseSplit of step list list * facts
+
+val raw_prefix = "X"
+val assum_prefix = "A"
+val fact_prefix = "F"
+
+(* ###
+fun add_fact_from_dep s =
+ case Int.fromString s of
+ SOME n => apfst (cons (raw_prefix, n))
+ | NONE => apsnd (cons s)
+*)
+
+val add_fact_from_dep = apfst o cons o pair raw_prefix
+
+fun step_for_tuple _ (label, t, []) = Assume ((raw_prefix, label), t)
+ | step_for_tuple j (label, t, deps) =
+ Have (if j = 1 then [Show] else [], (raw_prefix, label), t,
+ Facts (fold add_fact_from_dep deps ([], [])))
fun strip_spaces_in_list [] = ""
- | strip_spaces_in_list [c1] = do_space c1
- | strip_spaces_in_list [c1, c2] = do_space c1 ^ do_space c2
+ | strip_spaces_in_list [c1] = if Char.isSpace c1 then "" else str c1
+ | strip_spaces_in_list [c1, c2] =
+ strip_spaces_in_list [c1] ^ strip_spaces_in_list [c2]
| strip_spaces_in_list (c1 :: c2 :: c3 :: cs) =
if Char.isSpace c1 then
strip_spaces_in_list (c2 :: c3 :: cs)
@@ -611,39 +534,337 @@
if Char.isSpace c3 then
strip_spaces_in_list (c1 :: c3 :: cs)
else
- str c1 ^
- (if is_ident_char c1 andalso is_ident_char c3 then " " else "") ^
+ str c1 ^ (if forall is_ident_char [c1, c3] then " " else "") ^
strip_spaces_in_list (c3 :: cs)
else
str c1 ^ strip_spaces_in_list (c2 :: c3 :: cs)
-
val strip_spaces = strip_spaces_in_list o String.explode
-fun isar_proof_text pool debug modulus sorts ctxt
- (minimize_command, proof, thm_names, goal, i) =
+fun proof_from_atp_proof pool ctxt atp_proof thm_names frees =
+ let
+ val tuples =
+ atp_proof |> split_lines |> map strip_spaces
+ |> filter is_valid_line
+ |> map (parse_line pool o explode)
+ |> decode_lines ctxt
+ val tuples = fold_rev (add_line thm_names) tuples []
+ val tuples = fold_rev add_nonnull_line tuples []
+ val tuples = fold_rev (add_desired_line ctxt) tuples (0, []) |> snd
+ in
+ (if null frees then [] else [Fix frees]) @
+ map2 step_for_tuple (length tuples downto 1) tuples
+ end
+
+val indent_size = 2
+val no_label = ("", ~1)
+
+fun no_show qs = not (member (op =) qs Show)
+
+(* When redirecting proofs, we keep information about the labels seen so far in
+ the "backpatches" data structure. The first component indicates which facts
+ should be associated with forthcoming proof steps. The second component is a
+ pair ("keep_ls", "drop_ls"), where "keep_ls" are the labels to keep and
+ "drop_ls" are those that should be dropped in a case split. *)
+type backpatches = (label * facts) list * (label list * label list)
+
+fun using_of_step (Have (_, _, _, by)) =
+ (case by of
+ Facts (ls, _) => ls
+ | CaseSplit (proofs, (ls, _)) => fold (union (op =) o using_of) proofs ls)
+ | using_of_step _ = []
+and using_of proof = fold (union (op =) o using_of_step) proof []
+
+fun new_labels_of_step (Fix _) = []
+ | new_labels_of_step (Assume (l, _)) = [l]
+ | new_labels_of_step (Have (_, l, _, _)) = [l]
+val new_labels_of = maps new_labels_of_step
+
+val join_proofs =
+ let
+ fun aux _ [] = NONE
+ | aux proof_tail (proofs as (proof1 :: _)) =
+ if exists null proofs then
+ NONE
+ else if forall (curry (op =) (hd proof1) o hd) (tl proofs) then
+ aux (hd proof1 :: proof_tail) (map tl proofs)
+ else case hd proof1 of
+ Have ([], l, t, by) =>
+ if forall (fn Have ([], l', t', _) :: _ => (l, t) = (l', t')
+ | _ => false) (tl proofs) andalso
+ not (exists (member (op =) (maps new_labels_of proofs))
+ (using_of proof_tail)) then
+ SOME (l, t, map rev proofs, proof_tail)
+ else
+ NONE
+ | _ => NONE
+ in aux [] o map rev end
+
+fun case_split_qualifiers proofs =
+ case length proofs of
+ 0 => []
+ | 1 => [Then]
+ | _ => [Ultimately]
+
+val index_in_shape = find_index o exists o curry (op =)
+
+fun direct_proof thy conjecture_shape hyp_ts concl_t proof =
let
- val cnfs = proof |> split_lines |> map strip_spaces |> filter is_proof_line
+ val concl_ls = map (pair raw_prefix) (List.last conjecture_shape)
+ fun find_hyp (_, j) = nth hyp_ts (index_in_shape j conjecture_shape)
+ fun first_pass ([], contra) = ([], contra)
+ | first_pass ((ps as Fix _) :: proof, contra) =
+ first_pass (proof, contra) |>> cons ps
+ | first_pass ((ps as Assume (l, t)) :: proof, contra) =
+ if member (op =) concl_ls l then
+ first_pass (proof, contra ||> cons ps)
+ else
+ first_pass (proof, contra) |>> cons (Assume (l, find_hyp l))
+ | first_pass ((ps as Have (qs, l, t, Facts (ls, ss))) :: proof, contra) =
+ if exists (member (op =) (fst contra)) ls then
+ first_pass (proof, contra |>> cons l ||> cons ps)
+ else
+ first_pass (proof, contra) |>> cons ps
+ | first_pass _ = raise Fail "malformed proof"
+ val (proof_top, (contra_ls, contra_proof)) =
+ first_pass (proof, (concl_ls, []))
+ val backpatch_label = the_default ([], []) oo AList.lookup (op =) o fst
+ fun backpatch_labels patches ls =
+ fold merge_fact_sets (map (backpatch_label patches) ls) ([], [])
+ fun second_pass end_qs ([], assums, patches) =
+ ([Have (end_qs, no_label,
+ if length assums < length concl_ls then
+ clause_for_literals thy (map (negate_term thy o fst) assums)
+ else
+ concl_t,
+ Facts (backpatch_labels patches (map snd assums)))], patches)
+ | second_pass end_qs (Assume (l, t) :: proof, assums, patches) =
+ second_pass end_qs (proof, (t, l) :: assums, patches)
+ | second_pass end_qs (Have (qs, l, t, Facts (ls, ss)) :: proof, assums,
+ patches) =
+ if member (op =) (snd (snd patches)) l andalso
+ not (AList.defined (op =) (fst patches) l) then
+ second_pass end_qs (proof, assums, patches ||> apsnd (append ls))
+ else
+ (case List.partition (member (op =) contra_ls) ls of
+ ([contra_l], co_ls) =>
+ if no_show qs then
+ second_pass end_qs
+ (proof, assums,
+ patches |>> cons (contra_l, (l :: co_ls, ss)))
+ |>> cons (if member (op =) (fst (snd patches)) l then
+ Assume (l, negate_term thy t)
+ else
+ Have (qs, l, negate_term thy t,
+ Facts (backpatch_label patches l)))
+ else
+ second_pass end_qs (proof, assums,
+ patches |>> cons (contra_l, (co_ls, ss)))
+ | (contra_ls as _ :: _, co_ls) =>
+ let
+ val proofs =
+ map_filter
+ (fn l =>
+ if member (op =) concl_ls l then
+ NONE
+ else
+ let
+ val drop_ls = filter (curry (op <>) l) contra_ls
+ in
+ second_pass []
+ (proof, assums,
+ patches ||> apfst (insert (op =) l)
+ ||> apsnd (union (op =) drop_ls))
+ |> fst |> SOME
+ end) contra_ls
+ val facts = (co_ls, [])
+ in
+ (case join_proofs proofs of
+ SOME (l, t, proofs, proof_tail) =>
+ Have (case_split_qualifiers proofs @
+ (if null proof_tail then end_qs else []), l, t,
+ CaseSplit (proofs, facts)) :: proof_tail
+ | NONE =>
+ [Have (case_split_qualifiers proofs @ end_qs, no_label,
+ concl_t, CaseSplit (proofs, facts))],
+ patches)
+ end
+ | _ => raise Fail "malformed proof")
+ | second_pass _ _ = raise Fail "malformed proof"
+ val (proof_bottom, _) =
+ second_pass [Show] (contra_proof, [], ([], ([], [])))
+ in proof_top @ proof_bottom end
+
+val kill_duplicate_assumptions_in_proof =
+ let
+ fun relabel_facts subst =
+ apfst (map (fn l => AList.lookup (op =) subst l |> the_default l))
+ fun do_step (ps as Fix _) (proof, subst, assums) =
+ (ps :: proof, subst, assums)
+ | do_step (ps as Assume (l, t)) (proof, subst, assums) =
+ (case AList.lookup (op aconv) assums t of
+ SOME l' => (proof, (l', l) :: subst, assums)
+ | NONE => (ps :: proof, subst, (t, l) :: assums))
+ | do_step (Have (qs, l, t, by)) (proof, subst, assums) =
+ (Have (qs, l, t,
+ case by of
+ Facts facts => Facts (relabel_facts subst facts)
+ | CaseSplit (proofs, facts) =>
+ CaseSplit (map do_proof proofs, relabel_facts subst facts)) ::
+ proof, subst, assums)
+ and do_proof proof = fold do_step proof ([], [], []) |> #1 |> rev
+ in do_proof end
+
+(* FIXME: implement *)
+fun shrink_proof shrink_factor proof = proof
+
+val then_chain_proof =
+ let
+ fun aux _ [] = []
+ | aux _ ((ps as Fix _) :: proof) = ps :: aux no_label proof
+ | aux _ ((ps as Assume (l, _)) :: proof) = ps :: aux l proof
+ | aux l' (Have (qs, l, t, by) :: proof) =
+ (case by of
+ Facts (ls, ss) =>
+ Have (if member (op =) ls l' then
+ (Then :: qs, l, t,
+ Facts (filter_out (curry (op =) l') ls, ss))
+ else
+ (qs, l, t, Facts (ls, ss)))
+ | CaseSplit (proofs, facts) =>
+ Have (qs, l, t, CaseSplit (map (aux no_label) proofs, facts))) ::
+ aux l proof
+ in aux no_label end
+
+fun kill_useless_labels_in_proof proof =
+ let
+ val used_ls = using_of proof
+ fun do_label l = if member (op =) used_ls l then l else no_label
+ fun kill (Fix ts) = Fix ts
+ | kill (Assume (l, t)) = Assume (do_label l, t)
+ | kill (Have (qs, l, t, by)) =
+ Have (qs, do_label l, t,
+ case by of
+ CaseSplit (proofs, facts) =>
+ CaseSplit (map (map kill) proofs, facts)
+ | _ => by)
+ in map kill proof end
+
+fun prefix_for_depth n = replicate_string (n + 1)
+
+val relabel_proof =
+ let
+ fun aux _ _ _ [] = []
+ | aux subst depth nextp ((ps as Fix _) :: proof) =
+ ps :: aux subst depth nextp proof
+ | aux subst depth (next_assum, next_fact) (Assume (l, t) :: proof) =
+ if l = no_label then
+ Assume (l, t) :: aux subst depth (next_assum, next_fact) proof
+ else
+ let val l' = (prefix_for_depth depth assum_prefix, next_assum) in
+ Assume (l', t) ::
+ aux ((l, l') :: subst) depth (next_assum + 1, next_fact) proof
+ end
+ | aux subst depth (next_assum, next_fact) (Have (qs, l, t, by) :: proof) =
+ let
+ val (l', subst, next_fact) =
+ if l = no_label then
+ (l, subst, next_fact)
+ else
+ let
+ val l' = (prefix_for_depth depth fact_prefix, next_fact)
+ in (l', (l, l') :: subst, next_fact + 1) end
+ val relabel_facts = apfst (map (the o AList.lookup (op =) subst))
+ val by =
+ case by of
+ Facts facts => Facts (relabel_facts facts)
+ | CaseSplit (proofs, facts) =>
+ CaseSplit (map (aux subst (depth + 1) (1, 1)) proofs,
+ relabel_facts facts)
+ in
+ Have (qs, l', t, by) ::
+ aux subst depth (next_assum, next_fact) proof
+ end
+ in aux [] 0 (1, 1) end
+
+fun string_for_proof ctxt sorts i n =
+ let
+ fun do_indent ind = replicate_string (ind * indent_size) " "
+ fun do_raw_label (s, j) = s ^ string_of_int j
+ fun do_label l = if l = no_label then "" else do_raw_label l ^ ": "
+ fun do_have qs =
+ (if member (op =) qs Moreover then "moreover " else "") ^
+ (if member (op =) qs Ultimately then "ultimately " else "") ^
+ (if member (op =) qs Then then
+ if member (op =) qs Show then "thus" else "hence"
+ else
+ if member (op =) qs Show then "show" else "have")
+ val do_term =
+ Nitpick_Util.maybe_quote
+ o PrintMode.setmp (filter (curry (op =) Symbol.xsymbolsN)
+ (print_mode_value ()))
+ (Syntax.string_of_term ctxt)
+ fun do_using [] = ""
+ | do_using ls = "using " ^ (space_implode " " (map do_raw_label ls)) ^ " "
+ fun do_by_facts [] [] = "by blast"
+ | do_by_facts _ [] = "by metis"
+ | do_by_facts _ ss = "by (metis " ^ space_implode " " ss ^ ")"
+ fun do_facts ind (ls, ss) =
+ do_indent (ind + 1) ^ do_using ls ^ do_by_facts ls ss
+ and do_step ind (Fix ts) =
+ do_indent ind ^ "fix " ^ space_implode " and " (map do_term ts) ^ "\n"
+ | do_step ind (Assume (l, t)) =
+ do_indent ind ^ "assume " ^ do_label l ^ do_term t ^ "\n"
+ | do_step ind (Have (qs, l, t, Facts facts)) =
+ do_indent ind ^ do_have qs ^ " " ^
+ do_label l ^ do_term t ^ "\n" ^ do_facts ind facts ^ "\n"
+ | do_step ind (Have (qs, l, t, CaseSplit (proofs, facts))) =
+ space_implode (do_indent ind ^ "moreover\n")
+ (map (do_block ind) proofs) ^
+ do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^ "\n" ^
+ do_facts ind facts ^ "\n"
+ and do_steps prefix suffix ind steps =
+ let val s = implode (map (do_step ind) steps) in
+ replicate_string (ind * indent_size - size prefix) " " ^ prefix ^
+ String.extract (s, ind * indent_size,
+ SOME (size s - ind * indent_size - 1)) ^
+ suffix ^ "\n"
+ end
+ and do_block ind proof = do_steps "{ " " }" (ind + 1) proof
+ and do_proof proof =
+ (if i <> 1 then "prefer " ^ string_of_int i ^ "\n" else "") ^
+ do_indent 0 ^ "proof -\n" ^
+ do_steps "" "" 1 proof ^
+ do_indent 0 ^ (if n <> 1 then "next" else "qed") ^ "\n"
+ in setmp_CRITICAL show_sorts sorts do_proof end
+
+fun isar_proof_text (pool, debug, shrink_factor, sorts, ctxt, conjecture_shape)
+ (minimize_command, atp_proof, thm_names, goal, i) =
+ let
+ val thy = ProofContext.theory_of ctxt
+ val (frees, hyp_ts, concl_t) = strip_subgoal goal i
+ val n = Logic.count_prems (prop_of goal)
val (one_line_proof, lemma_names) =
- metis_proof_text (minimize_command, proof, thm_names, goal, i)
- val tokens = String.tokens (fn c => c = #" ") one_line_proof
+ metis_proof_text (minimize_command, atp_proof, thm_names, goal, i)
fun isar_proof_for () =
- case isar_proof_from_atp_proof pool modulus sorts ctxt cnfs thm_names goal
- i of
+ case proof_from_atp_proof pool ctxt atp_proof thm_names frees
+ |> direct_proof thy conjecture_shape hyp_ts concl_t
+ |> kill_duplicate_assumptions_in_proof
+ |> shrink_proof shrink_factor
+ |> then_chain_proof
+ |> kill_useless_labels_in_proof
+ |> relabel_proof
+ |> string_for_proof ctxt sorts i n of
"" => ""
- | isar_proof =>
- "\nStructured proof:\n" ^ Markup.markup Markup.sendback isar_proof
+ | proof => "\nStructured proof:\n" ^ Markup.markup Markup.sendback proof
val isar_proof =
- if member (op =) tokens chained_hint then
- ""
- else if debug then
+ if debug then
isar_proof_for ()
else
try isar_proof_for ()
|> the_default "Warning: The Isar proof construction failed.\n"
in (one_line_proof ^ isar_proof, lemma_names) end
-fun proof_text isar_proof pool debug modulus sorts ctxt =
- if isar_proof then isar_proof_text pool debug modulus sorts ctxt
- else metis_proof_text
+fun proof_text isar_proof =
+ if isar_proof then isar_proof_text else K metis_proof_text
end;