# HG changeset patch # User blanchet # Date 1282719727 -7200 # Node ID 8b8ed80b569909ed4f127292246b4f62041ee895 # Parent 0ce517c1970fed7f29f5470ec7c459530ac606cb renamed "relevance_convergence" to "relevance_decay" diff -r 0ce517c1970f -r 8b8ed80b5699 doc-src/Sledgehammer/sledgehammer.tex --- a/doc-src/Sledgehammer/sledgehammer.tex Tue Aug 24 22:57:22 2010 +0200 +++ b/doc-src/Sledgehammer/sledgehammer.tex Wed Aug 25 09:02:07 2010 +0200 @@ -452,10 +452,10 @@ relevance filter. The option ranges from 0 to 100, where 0 means that all theorems are relevant. -\opdefault{relevance\_convergence}{int}{31} -Specifies the convergence factor, expressed as a percentage, used by the -relevance filter. This factor is used by the relevance filter to scale down the -relevance of facts at each iteration of the filter. +\opdefault{relevance\_decay}{int}{31} +Specifies the decay factor, expressed as a percentage, used by the relevance +filter. This factor is used by the relevance filter to scale down the relevance +of new facts at each iteration of the filter. \opdefault{max\_relevant\_per\_iter}{int\_or\_smart}{\textit{smart}} Specifies the maximum number of facts that may be added during one iteration of diff -r 0ce517c1970f -r 8b8ed80b5699 src/HOL/Tools/Sledgehammer/sledgehammer.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer.ML Tue Aug 24 22:57:22 2010 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer.ML Wed Aug 25 09:02:07 2010 +0200 @@ -19,7 +19,7 @@ full_types: bool, explicit_apply: bool, relevance_threshold: real, - relevance_convergence: real, + relevance_decay: real, max_relevant_per_iter: int option, theory_relevant: bool option, defs_relevant: bool, @@ -88,7 +88,7 @@ full_types: bool, explicit_apply: bool, relevance_threshold: real, - relevance_convergence: real, + relevance_decay: real, max_relevant_per_iter: int option, theory_relevant: bool option, defs_relevant: bool, @@ -215,7 +215,7 @@ known_failures, default_max_relevant_per_iter, default_theory_relevant, explicit_forall, use_conjecture_for_hypotheses} ({debug, verbose, overlord, full_types, explicit_apply, - relevance_threshold, relevance_convergence, + relevance_threshold, relevance_decay, max_relevant_per_iter, theory_relevant, defs_relevant, isar_proof, isar_shrink_factor, timeout, ...} : params) minimize_command @@ -233,7 +233,7 @@ case axioms of SOME axioms => axioms | NONE => - (relevant_facts full_types relevance_threshold relevance_convergence + (relevant_facts full_types relevance_threshold relevance_decay defs_relevant (the_default default_max_relevant_per_iter max_relevant_per_iter) diff -r 0ce517c1970f -r 8b8ed80b5699 src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML Tue Aug 24 22:57:22 2010 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML Wed Aug 25 09:02:07 2010 +0200 @@ -285,114 +285,108 @@ fun compare_pairs ((_, w1), (_, w2)) = Real.compare (w2, w1) (* Limit the number of new facts, to prevent runaway acceptance. *) -fun take_best max_new (new_pairs : (annotated_thm * real) list) = +fun take_best max_relevant_per_iter (new_pairs : (annotated_thm * real) list) = let val nnew = length new_pairs in - if nnew <= max_new then + if nnew <= max_relevant_per_iter then (map #1 new_pairs, []) else let val new_pairs = sort compare_pairs new_pairs - val accepted = List.take (new_pairs, max_new) + val accepted = List.take (new_pairs, max_relevant_per_iter) in trace_msg (fn () => ("Number of candidates, " ^ Int.toString nnew ^ - ", exceeds the limit of " ^ Int.toString max_new)); + ", exceeds the limit of " ^ Int.toString max_relevant_per_iter)); trace_msg (fn () => ("Effective pass mark: " ^ Real.toString (#2 (List.last accepted)))); trace_msg (fn () => "Actually passed: " ^ space_implode ", " (map (fst o (fn f => f ()) o fst o fst o fst) accepted)); - (map #1 accepted, List.drop (new_pairs, max_new)) + (map #1 accepted, List.drop (new_pairs, max_relevant_per_iter)) end end; val threshold_divisor = 2.0 val ridiculous_threshold = 0.1 -fun relevance_filter ctxt relevance_threshold relevance_convergence - defs_relevant max_new theory_relevant +fun relevance_filter ctxt relevance_threshold relevance_decay defs_relevant + max_relevant_per_iter theory_relevant ({add, del, ...} : relevance_override) axioms goal_ts = - if relevance_threshold > 1.0 then - [] - else if relevance_threshold < 0.0 then - axioms - else - let - val thy = ProofContext.theory_of ctxt - val const_tab = fold (count_axiom_consts theory_relevant thy) axioms - Symtab.empty - val goal_const_tab = get_consts thy (SOME false) goal_ts - val _ = - trace_msg (fn () => "Initial constants: " ^ - commas (goal_const_tab - |> Symtab.dest - |> filter (curry (op <>) [] o snd) - |> map fst)) - val add_thms = maps (ProofContext.get_fact ctxt) add - val del_thms = maps (ProofContext.get_fact ctxt) del - fun iter j threshold rel_const_tab = - let - fun relevant ([], rejects) [] = - (* Nothing was added this iteration. *) - if j = 0 andalso threshold >= ridiculous_threshold then - (* First iteration? Try again. *) - iter 0 (threshold / threshold_divisor) rel_const_tab - (map (apsnd SOME) rejects) + let + val thy = ProofContext.theory_of ctxt + val const_tab = fold (count_axiom_consts theory_relevant thy) axioms + Symtab.empty + val goal_const_tab = get_consts thy (SOME false) goal_ts + val _ = + trace_msg (fn () => "Initial constants: " ^ + commas (goal_const_tab |> Symtab.dest + |> filter (curry (op <>) [] o snd) + |> map fst)) + val add_thms = maps (ProofContext.get_fact ctxt) add + val del_thms = maps (ProofContext.get_fact ctxt) del + fun iter j threshold rel_const_tab = + let + fun relevant ([], rejects) [] = + (* Nothing was added this iteration. *) + if j = 0 andalso threshold >= ridiculous_threshold then + (* First iteration? Try again. *) + iter 0 (threshold / threshold_divisor) rel_const_tab + (map (apsnd SOME) rejects) + else + (* Add "add:" facts. *) + if null add_thms then + [] else - (* Add "add:" facts. *) - if null add_thms then - [] - else - map_filter (fn ((p as (_, th), _), _) => - if member Thm.eq_thm add_thms th then SOME p - else NONE) rejects - | relevant (new_pairs, rejects) [] = - let - val (new_rels, more_rejects) = take_best max_new new_pairs - val rel_const_tab' = - rel_const_tab |> fold add_const_to_table (maps snd new_rels) - fun is_dirty c = - const_mem rel_const_tab' c andalso - not (const_mem rel_const_tab c) - val rejects = - more_rejects @ rejects - |> map (fn (ax as (_, consts), old_weight) => - (ax, if exists is_dirty consts then NONE - else SOME old_weight)) - val threshold = - threshold + (1.0 - threshold) * relevance_convergence - in - trace_msg (fn () => "relevant this iteration: " ^ - Int.toString (length new_rels)); - map #1 new_rels @ iter (j + 1) threshold rel_const_tab' rejects - end - | relevant (new_rels, rejects) - (((ax as ((name, th), axiom_consts)), cached_weight) - :: rest) = - let - val weight = - case cached_weight of - SOME w => w - | NONE => axiom_weight const_tab rel_const_tab axiom_consts - in - if weight >= threshold orelse - (defs_relevant andalso defines thy th rel_const_tab) then - (trace_msg (fn () => - fst (name ()) ^ " passes: " ^ Real.toString weight - ^ " consts: " ^ commas (map fst axiom_consts)); - relevant ((ax, weight) :: new_rels, rejects) rest) - else - relevant (new_rels, (ax, weight) :: rejects) rest - end - in - trace_msg (fn () => "relevant_facts, current threshold: " ^ - Real.toString threshold); - relevant ([], []) - end - in - axioms |> filter_out (member Thm.eq_thm del_thms o snd) - |> map (rpair NONE o pair_consts_axiom theory_relevant thy) - |> iter 0 relevance_threshold goal_const_tab - |> tap (fn res => trace_msg (fn () => + map_filter (fn ((p as (_, th), _), _) => + if member Thm.eq_thm add_thms th then SOME p + else NONE) rejects + | relevant (new_pairs, rejects) [] = + let + val (new_rels, more_rejects) = + take_best max_relevant_per_iter new_pairs + val rel_const_tab' = + rel_const_tab |> fold add_const_to_table (maps snd new_rels) + fun is_dirty c = + const_mem rel_const_tab' c andalso + not (const_mem rel_const_tab c) + val rejects = + more_rejects @ rejects + |> map (fn (ax as (_, consts), old_weight) => + (ax, if exists is_dirty consts then NONE + else SOME old_weight)) + val threshold = threshold + (1.0 - threshold) * relevance_decay + in + trace_msg (fn () => "relevant this iteration: " ^ + Int.toString (length new_rels)); + map #1 new_rels @ iter (j + 1) threshold rel_const_tab' rejects + end + | relevant (new_rels, rejects) + (((ax as ((name, th), axiom_consts)), cached_weight) + :: rest) = + let + val weight = + case cached_weight of + SOME w => w + | NONE => axiom_weight const_tab rel_const_tab axiom_consts + in + if weight >= threshold orelse + (defs_relevant andalso defines thy th rel_const_tab) then + (trace_msg (fn () => + fst (name ()) ^ " passes: " ^ Real.toString weight + ^ " consts: " ^ commas (map fst axiom_consts)); + relevant ((ax, weight) :: new_rels, rejects) rest) + else + relevant (new_rels, (ax, weight) :: rejects) rest + end + in + trace_msg (fn () => "relevant_facts, current threshold: " ^ + Real.toString threshold); + relevant ([], []) + end + in + axioms |> filter_out (member Thm.eq_thm del_thms o snd) + |> map (rpair NONE o pair_consts_axiom theory_relevant thy) + |> iter 0 relevance_threshold goal_const_tab + |> tap (fn res => trace_msg (fn () => "Total relevant: " ^ Int.toString (length res))) - end + end (***************************************************************) (* Retrieving and filtering lemmas *) @@ -614,8 +608,8 @@ (* ATP invocation methods setup *) (***************************************************************) -fun relevant_facts full_types relevance_threshold relevance_convergence - defs_relevant max_new theory_relevant +fun relevant_facts full_types relevance_threshold relevance_decay defs_relevant + max_relevant_per_iter theory_relevant (relevance_override as {add, del, only}) (ctxt, (chained_ths, _)) hyp_ts concl_t = let @@ -632,9 +626,14 @@ in trace_msg (fn () => "Considering " ^ Int.toString (length axioms) ^ " theorems"); - relevance_filter ctxt relevance_threshold relevance_convergence - defs_relevant max_new theory_relevant relevance_override - axioms (concl_t :: hyp_ts) + (if relevance_threshold > 1.0 then + [] + else if relevance_threshold < 0.0 then + axioms + else + relevance_filter ctxt relevance_threshold relevance_decay defs_relevant + max_relevant_per_iter theory_relevant relevance_override + axioms (concl_t :: hyp_ts)) |> map (apfst (fn f => f ())) |> sort_wrt (fst o fst) end diff -r 0ce517c1970f -r 8b8ed80b5699 src/HOL/Tools/Sledgehammer/sledgehammer_fact_minimize.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact_minimize.ML Tue Aug 24 22:57:22 2010 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_minimize.ML Wed Aug 25 09:02:07 2010 +0200 @@ -41,9 +41,8 @@ end fun test_theorems ({debug, verbose, overlord, atps, full_types, - relevance_threshold, relevance_convergence, - defs_relevant, isar_proof, isar_shrink_factor, ...} - : params) + relevance_threshold, relevance_decay, defs_relevant, + isar_proof, isar_shrink_factor, ...} : params) (prover : prover) explicit_apply timeout subgoal state name_thms_pairs = let @@ -53,10 +52,10 @@ {debug = debug, verbose = verbose, overlord = overlord, atps = atps, full_types = full_types, explicit_apply = explicit_apply, relevance_threshold = relevance_threshold, - relevance_convergence = relevance_convergence, - max_relevant_per_iter = NONE, theory_relevant = NONE, - defs_relevant = defs_relevant, isar_proof = isar_proof, - isar_shrink_factor = isar_shrink_factor, timeout = timeout} + relevance_decay = relevance_decay, max_relevant_per_iter = NONE, + theory_relevant = NONE, defs_relevant = defs_relevant, + isar_proof = isar_proof, isar_shrink_factor = isar_shrink_factor, + timeout = timeout} val axioms = maps (fn (n, ths) => map (pair n) ths) name_thms_pairs val {context = ctxt, facts, goal} = Proof.goal state val problem = diff -r 0ce517c1970f -r 8b8ed80b5699 src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Tue Aug 24 22:57:22 2010 +0200 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Wed Aug 25 09:02:07 2010 +0200 @@ -68,7 +68,7 @@ ("overlord", "false"), ("explicit_apply", "false"), ("relevance_threshold", "40"), - ("relevance_convergence", "31"), + ("relevance_decay", "31"), ("max_relevant_per_iter", "smart"), ("theory_relevant", "smart"), ("defs_relevant", "false"), @@ -170,8 +170,8 @@ val explicit_apply = lookup_bool "explicit_apply" val relevance_threshold = 0.01 * Real.fromInt (lookup_int "relevance_threshold") - val relevance_convergence = - 0.01 * Real.fromInt (lookup_int "relevance_convergence") + val relevance_decay = + 0.01 * Real.fromInt (lookup_int "relevance_decay") val max_relevant_per_iter = lookup_int_option "max_relevant_per_iter" val theory_relevant = lookup_bool_option "theory_relevant" val defs_relevant = lookup_bool "defs_relevant" @@ -182,7 +182,7 @@ {debug = debug, verbose = verbose, overlord = overlord, atps = atps, full_types = full_types, explicit_apply = explicit_apply, relevance_threshold = relevance_threshold, - relevance_convergence = relevance_convergence, + relevance_decay = relevance_decay, max_relevant_per_iter = max_relevant_per_iter, theory_relevant = theory_relevant, defs_relevant = defs_relevant, isar_proof = isar_proof, isar_shrink_factor = isar_shrink_factor,