merged
authorwenzelm
Mon, 26 Nov 2012 17:13:44 +0100
changeset 50231 81a067b188b8
parent 50230 79773c44e57b (diff)
parent 50217 ce1f0602f48e (current diff)
child 50232 289a34f9c383
merged
NEWS
src/HOL/Tools/Lifting/lifting_setup.ML
src/Pure/Tools/xml_syntax.ML
--- a/Admin/component_repository/components.sha1	Mon Nov 26 16:28:22 2012 +0100
+++ b/Admin/component_repository/components.sha1	Mon Nov 26 17:13:44 2012 +0100
@@ -1,5 +1,6 @@
 2f6417b8e96a0e4e8354fe0f1a253c18fb55d9a7  cvc3-2.4.1.tar.gz
 0fe549949a025d65d52d6deca30554de8fca3b6e  e-1.5.tar.gz
+b98a98025d1f7e560ca6864a53296137dae736b4  e-1.6.tar.gz
 6d34b18ca0aa1e10bab6413045d079188c0e2dfb  exec_process-1.0.1.tar.gz
 8b9bffd10e396d965e815418295f2ee2849bea75  exec_process-1.0.2.tar.gz
 ae7ee5becb26512f18c609e83b34612918bae5f0  exec_process-1.0.tar.gz
@@ -15,6 +16,7 @@
 7b012f725ec1cc102dc259df178d511cc7890bba  jedit_build-20120813.tar.gz
 8e1d36f5071e3def2cb281f7fefe9f52352cb88f  jedit_build-20120903.tar.gz
 6c737137cc597fc920943783382e928ea79e3feb  kodkodi-1.2.16.tar.gz
+5f95c96bb99927f3a026050f85bd056f37a9189e  kodkodi-1.5.2.tar.gz
 1c8cb6a8f4cbeaedce2d6d1ba8fc7e2ab3663aeb  polyml-5.4.1.tar.gz
 1812e9fa6d163f63edb93e37d1217640a166cf3e  polyml-5.5.0.tar.gz
 8ee375cfc38972f080dbc78f07b68dac03efe968  ProofGeneral-3.7.1.1.tar.gz
--- a/Admin/components/main	Mon Nov 26 16:28:22 2012 +0100
+++ b/Admin/components/main	Mon Nov 26 17:13:44 2012 +0100
@@ -1,10 +1,10 @@
 #main components for everyday use, without big impact on overall build time
 cvc3-2.4.1
-e-1.5
+e-1.6
 exec_process-1.0.2
 jdk-7u6
 jedit_build-20120903
-kodkodi-1.2.16
+kodkodi-1.5.2
 polyml-5.5.0
 scala-2.9.2
 spass-3.8ds
--- a/CONTRIBUTORS	Mon Nov 26 16:28:22 2012 +0100
+++ b/CONTRIBUTORS	Mon Nov 26 17:13:44 2012 +0100
@@ -9,6 +9,14 @@
 * 2012: Makarius Wenzel, Université Paris-Sud / LRI
   Improvements of Isabelle/Scala and Isabelle/jEdit Prover IDE.
 
+* Fall 2012: Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+  Jasmin Blanchette, TUM
+  Implemented Machine Learning for Sledgehammer (MaSh).
+
+* Fall 2012: Steffen Smolka, TUM
+  Various improvements to Sledgehammer's Isar proof generator, including
+  a smart type annotation algorithm and proof shrinking.
+
 * November 2012: Fabian Immler, TUM
   "Symbols" dockable for Isabelle/jEdit.
 
--- a/NEWS	Mon Nov 26 16:28:22 2012 +0100
+++ b/NEWS	Mon Nov 26 17:13:44 2012 +0100
@@ -277,8 +277,9 @@
 
   - Added MaSh relevance filter based on machine-learning; see the
     Sledgehammer manual for details.
+  - Polished Isar proofs generated with "isar_proofs" option.
   - Rationalized type encodings ("type_enc" option).
-  - Renamed "kill_provers" subcommand to "kill"
+  - Renamed "kill_provers" subcommand to "kill".
   - Renamed options:
       isar_proof ~> isar_proofs
       isar_shrink_factor ~> isar_shrink
--- a/etc/components	Mon Nov 26 16:28:22 2012 +0100
+++ b/etc/components	Mon Nov 26 17:13:44 2012 +0100
@@ -7,5 +7,6 @@
 src/HOL/Library/Sum_of_Squares
 src/HOL/Tools/ATP
 src/HOL/Tools/Predicate_Compile
+src/HOL/Tools/Sledgehammer/MaSh
 src/HOL/Tools/SMT
 src/HOL/TPTP
--- a/src/Doc/Sledgehammer/document/root.tex	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/Doc/Sledgehammer/document/root.tex	Mon Nov 26 17:13:44 2012 +0100
@@ -414,15 +414,15 @@
 \item[\labelitemi]
 An experimental, memoryful alternative to MePo is \emph{MaSh}
 (\underline{Ma}chine Learner for \underline{S}ledge\underline{h}ammer). It
-relies on an external tool called \texttt{mash} that applies machine learning to
+relies on an external Python tool that applies machine learning to
 the problem of finding relevant facts.
 
 \item[\labelitemi] The \emph{Mesh} filter combines MePo and MaSh.
 \end{enum}
 
-The default is either MePo or Mesh, depending on whether \texttt{mash} is
-installed and what class of provers the target prover belongs to
-(\S\ref{relevance-filter}).
+The default is either MePo or Mesh, depending on whether the environment
+variable \texttt{MASH} is set and what class of provers the target prover
+belongs to (\S\ref{relevance-filter}).
 
 The number of facts included in a problem varies from prover to prover, since
 some provers get overwhelmed more easily than others. You can show the number of
@@ -1055,16 +1055,15 @@
 The traditional memoryless MePo relevance filter.
 
 \item[\labelitemi] \textbf{\textit{mash}:}
-The memoryful MaSh machine learner. MaSh relies on the external program
-\texttt{mash}, which can be obtained from the author at \authoremail. To install
-it, set the environment variable \texttt{MASH\_HOME} to the directory that
-contains the \texttt{mash} executable.
-Persistent data is stored in the \texttt{\$ISABELLE\_HOME\_USER/mash} directory.
+The memoryful MaSh machine learner. MaSh relies on the external Python program
+\texttt{mash.py}, which is part of Isabelle. To enable MaSh, set the environment
+variable \texttt{MASH} to \texttt{yes}. Persistent data is stored in the
+directory \texttt{\$ISABELLE\_HOME\_USER/mash}.
 
 \item[\labelitemi] \textbf{\textit{mesh}:} A combination of MePo and MaSh.
 
-\item[\labelitemi] \textbf{\textit{smart}:} Use Mesh if \texttt{mash} is
-installed and the target prover is an ATP; otherwise, use MePo.
+\item[\labelitemi] \textbf{\textit{smart}:} Use Mesh if MaSh is
+enabled and the target prover is an ATP; otherwise, use MePo.
 \end{enum}
 
 \opdefault{max\_facts}{smart\_int}{smart}
@@ -1084,7 +1083,7 @@
 \optrue{learn}{dont\_learn}
 Specifies whether MaSh should be run automatically by Sledgehammer to learn the
 available theories (and hence provide more accurate results). Learning only
-takes place if \texttt{mash} is installed.
+takes place if MaSh is enabled.
 
 \opdefault{max\_new\_mono\_instances}{int}{smart}
 Specifies the maximum number of monomorphic instances to generate beyond
--- a/src/HOL/Fact.thy	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Fact.thy	Mon Nov 26 17:13:44 2012 +0100
@@ -306,4 +306,13 @@
 lemma inv_of_nat_fact_ge_zero [simp]: "(0::'a::linordered_field) \<le> inverse (of_nat (fact n))"
 by (auto intro: order_less_imp_le)
 
+lemma fact_eq_rev_setprod_nat: "fact (k::nat) = (\<Prod>i<k. k - i)"
+  unfolding fact_altdef_nat
+proof (rule strong_setprod_reindex_cong)
+  { fix i assume "Suc 0 \<le> i" "i \<le> k" then have "\<exists>j\<in>{..<k}. i = k - j"
+      by (intro bexI[of _ "k - i"]) simp_all }
+  then show "{1..k} = (\<lambda>i. k - i) ` {..<k}"
+    by (auto simp: image_iff)
+qed (auto intro: inj_onI)
+
 end
--- a/src/HOL/Library/Binomial.thy	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Library/Binomial.thy	Mon Nov 26 17:13:44 2012 +0100
@@ -537,4 +537,42 @@
   then show ?thesis using kn by simp
 qed
 
+(* Contributed by Manuel Eberl *)
+(* Alternative definition of the binomial coefficient as \<Prod>i<k. (n - i) / (k - i) *)
+lemma binomial_altdef_of_nat:
+  fixes n k :: nat and x :: "'a :: {field_char_0, field_inverse_zero}"
+  assumes "k \<le> n" shows "of_nat (n choose k) = (\<Prod>i<k. of_nat (n - i) / of_nat (k - i) :: 'a)"
+proof cases
+  assume "0 < k"
+  then have "(of_nat (n choose k) :: 'a) = (\<Prod>i<k. of_nat n - of_nat i) / of_nat (fact k)"
+    unfolding binomial_gbinomial gbinomial_def
+    by (auto simp: gr0_conv_Suc lessThan_Suc_atMost atLeast0AtMost)
+  also have "\<dots> = (\<Prod>i<k. of_nat (n - i) / of_nat (k - i) :: 'a)"
+    using `k \<le> n` unfolding fact_eq_rev_setprod_nat of_nat_setprod
+    by (auto simp add: setprod_dividef intro!: setprod_cong of_nat_diff[symmetric])
+  finally show ?thesis .
+qed simp
+
+lemma binomial_ge_n_over_k_pow_k:
+  fixes k n :: nat and x :: "'a :: linordered_field_inverse_zero"
+  assumes "0 < k" and "k \<le> n" shows "(of_nat n / of_nat k :: 'a) ^ k \<le> of_nat (n choose k)"
+proof -
+  have "(of_nat n / of_nat k :: 'a) ^ k = (\<Prod>i<k. of_nat n / of_nat k :: 'a)"
+    by (simp add: setprod_constant)
+  also have "\<dots> \<le> of_nat (n choose k)"
+    unfolding binomial_altdef_of_nat[OF `k\<le>n`]
+  proof (safe intro!: setprod_mono)
+    fix i::nat  assume  "i < k"
+    from assms have "n * i \<ge> i * k" by simp
+    hence "n * k - n * i \<le> n * k - i * k" by arith
+    hence "n * (k - i) \<le> (n - i) * k"
+      by (simp add: diff_mult_distrib2 nat_mult_commute)
+    hence "of_nat n * of_nat (k - i) \<le> of_nat (n - i) * (of_nat k :: 'a)"
+      unfolding of_nat_mult[symmetric] of_nat_le_iff .
+    with assms show "of_nat n / of_nat k \<le> of_nat (n - i) / (of_nat (k - i) :: 'a)"
+      using `i < k` by (simp add: field_simps)
+  qed (simp add: zero_le_divide_iff)
+  finally show ?thesis .
+qed
+
 end
--- a/src/HOL/Quotient_Examples/Lift_FSet.thy	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Quotient_Examples/Lift_FSet.thy	Mon Nov 26 17:13:44 2012 +0100
@@ -98,27 +98,26 @@
   @{text "list"} and @{text "fset"} types with the same element type.
   To relate nested types like @{text "'a list list"} and
   @{text "'a fset fset"}, we define a parameterized version of the
-  correspondence relation, @{text "cr_fset'"}. *}
+  correspondence relation, @{text "pcr_fset"}. *}
+
+thm pcr_fset_def
 
-definition cr_fset' :: "('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'b fset \<Rightarrow> bool"
-  where "cr_fset' R = list_all2 R OO cr_fset"
-
-lemma right_unique_cr_fset' [transfer_rule]:
-  "right_unique A \<Longrightarrow> right_unique (cr_fset' A)"
-  unfolding cr_fset'_def
+lemma right_unique_pcr_fset [transfer_rule]:
+  "right_unique A \<Longrightarrow> right_unique (pcr_fset A)"
+  unfolding pcr_fset_def
   by (intro right_unique_OO right_unique_list_all2 fset.right_unique)
 
-lemma right_total_cr_fset' [transfer_rule]:
-  "right_total A \<Longrightarrow> right_total (cr_fset' A)"
-  unfolding cr_fset'_def
+lemma right_total_pcr_fset [transfer_rule]:
+  "right_total A \<Longrightarrow> right_total (pcr_fset A)"
+  unfolding pcr_fset_def
   by (intro right_total_OO right_total_list_all2 fset.right_total)
 
-lemma bi_total_cr_fset' [transfer_rule]:
-  "bi_total A \<Longrightarrow> bi_total (cr_fset' A)"
-  unfolding cr_fset'_def
+lemma bi_total_pcr_fset [transfer_rule]:
+  "bi_total A \<Longrightarrow> bi_total (pcr_fset A)"
+  unfolding pcr_fset_def
   by (intro bi_total_OO bi_total_list_all2 fset.bi_total)
 
-text {* Transfer rules for @{text "cr_fset'"} can be derived from the
+text {* Transfer rules for @{text "pcr_fset"} can be derived from the
   existing transfer rules for @{text "cr_fset"} together with the
   transfer rules for the polymorphic raw constants. *}
 
@@ -126,16 +125,16 @@
   could potentially be automated. *}
 
 lemma fnil_transfer [transfer_rule]:
-  "(cr_fset' A) [] fnil"
-  unfolding cr_fset'_def
+  "(pcr_fset A) [] fnil"
+  unfolding pcr_fset_def
   apply (rule relcomppI)
   apply (rule Nil_transfer)
   apply (rule fnil.transfer)
   done
 
 lemma fcons_transfer [transfer_rule]:
-  "(A ===> cr_fset' A ===> cr_fset' A) Cons fcons"
-  unfolding cr_fset'_def
+  "(A ===> pcr_fset A ===> pcr_fset A) Cons fcons"
+  unfolding pcr_fset_def
   apply (intro fun_relI)
   apply (elim relcomppE)
   apply (rule relcomppI)
@@ -144,8 +143,8 @@
   done
 
 lemma fappend_transfer [transfer_rule]:
-  "(cr_fset' A ===> cr_fset' A ===> cr_fset' A) append fappend"
-  unfolding cr_fset'_def
+  "(pcr_fset A ===> pcr_fset A ===> pcr_fset A) append fappend"
+  unfolding pcr_fset_def
   apply (intro fun_relI)
   apply (elim relcomppE)
   apply (rule relcomppI)
@@ -154,8 +153,8 @@
   done
 
 lemma fmap_transfer [transfer_rule]:
-  "((A ===> B) ===> cr_fset' A ===> cr_fset' B) map fmap"
-  unfolding cr_fset'_def
+  "((A ===> B) ===> pcr_fset A ===> pcr_fset B) map fmap"
+  unfolding pcr_fset_def
   apply (intro fun_relI)
   apply (elim relcomppE)
   apply (rule relcomppI)
@@ -164,8 +163,8 @@
   done
 
 lemma ffilter_transfer [transfer_rule]:
-  "((A ===> op =) ===> cr_fset' A ===> cr_fset' A) filter ffilter"
-  unfolding cr_fset'_def
+  "((A ===> op =) ===> pcr_fset A ===> pcr_fset A) filter ffilter"
+  unfolding pcr_fset_def
   apply (intro fun_relI)
   apply (elim relcomppE)
   apply (rule relcomppI)
@@ -174,8 +173,8 @@
   done
 
 lemma fset_transfer [transfer_rule]:
-  "(cr_fset' A ===> set_rel A) set fset"
-  unfolding cr_fset'_def
+  "(pcr_fset A ===> set_rel A) set fset"
+  unfolding pcr_fset_def
   apply (intro fun_relI)
   apply (elim relcomppE)
   apply (drule fset.transfer [THEN fun_relD, unfolded relator_eq])
@@ -184,8 +183,8 @@
   done
 
 lemma fconcat_transfer [transfer_rule]:
-  "(cr_fset' (cr_fset' A) ===> cr_fset' A) concat fconcat"
-  unfolding cr_fset'_def
+  "(pcr_fset (pcr_fset A) ===> pcr_fset A) concat fconcat"
+  unfolding pcr_fset_def
   unfolding list_all2_OO
   apply (intro fun_relI)
   apply (elim relcomppE)
@@ -202,8 +201,8 @@
 
 lemma fset_eq_transfer [transfer_rule]:
   assumes "bi_unique A"
-  shows "(cr_fset' A ===> cr_fset' A ===> op =) list_eq (op =)"
-  unfolding cr_fset'_def
+  shows "(pcr_fset A ===> pcr_fset A ===> op =) list_eq (op =)"
+  unfolding pcr_fset_def
   apply (intro fun_relI)
   apply (elim relcomppE)
   apply (rule trans)
--- a/src/HOL/TPTP/lib/Tools/tptp_translate	Mon Nov 26 16:28:22 2012 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-#
-# Author: Jasmin Blanchette
-#
-# DESCRIPTION: Translation tool for TPTP formats
-
-
-PRG="$(basename "$0")"
-
-function usage() {
-  echo
-  echo "Usage: isabelle $PRG FORMAT IN_FILE OUT_FILE"
-  echo
-  echo "  Translates TPTP input file to specified TPTP format (\"CNF\", \"FOF\", \"TFF0\", \"TFF1\", or \"THF0\")."
-  echo
-  exit 1
-}
-
-[ "$#" -ne 3 -o "$1" = "-?" ] && usage
-
-SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
-
-args=("$@")
-
-echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
-ML {* ATP_Problem_Import.translate_tptp_file \"${args[0]}\" \"${args[1]}\" \"${args[2]}\" *} end;" \
-  > /tmp/$SCRATCH.thy
-"$ISABELLE_PROCESS" -e "use_thy \"/tmp/$SCRATCH\"; exit 1;"
--- a/src/HOL/Tools/Lifting/lifting_info.ML	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Tools/Lifting/lifting_info.ML	Mon Nov 26 17:13:44 2012 +0100
@@ -11,7 +11,7 @@
   val lookup_quotmaps_global: theory -> string -> quotmaps option
   val print_quotmaps: Proof.context -> unit
 
-  type quotients = {quot_thm: thm}
+  type quotients = {quot_thm: thm, pcrel_def: thm option}
   val transform_quotients: morphism -> quotients -> quotients
   val lookup_quotients: Proof.context -> string -> quotients option
   val lookup_quotients_global: theory -> string -> quotients option
@@ -118,7 +118,7 @@
   end
 
 (* info about quotient types *)
-type quotients = {quot_thm: thm}
+type quotients = {quot_thm: thm, pcrel_def: thm option}
 
 structure Quotients = Generic_Data
 (
@@ -128,8 +128,8 @@
   fun merge data = Symtab.merge (K true) data
 )
 
-fun transform_quotients phi {quot_thm} =
-  {quot_thm = Morphism.thm phi quot_thm}
+fun transform_quotients phi {quot_thm, pcrel_def} =
+  {quot_thm = Morphism.thm phi quot_thm, pcrel_def = Option.map (Morphism.thm phi) pcrel_def}
 
 val lookup_quotients = Symtab.lookup o Quotients.get o Context.Proof
 val lookup_quotients_global = Symtab.lookup o Quotients.get o Context.Theory
@@ -144,8 +144,8 @@
     val maybe_stored_quot_thm = Symtab.lookup symtab qty_full_name
   in
     case maybe_stored_quot_thm of
-      SOME {quot_thm = stored_quot_thm} => 
-        if Thm.eq_thm_prop (stored_quot_thm, quot_thm) then
+      SOME data => 
+        if Thm.eq_thm_prop (#quot_thm data, quot_thm) then
           Quotients.map (Symtab.delete qty_full_name) ctxt
         else
           ctxt
@@ -157,12 +157,14 @@
 
 fun print_quotients ctxt =
   let
-    fun prt_quot (qty_name, {quot_thm}) =
+    fun prt_quot (qty_name, {quot_thm, pcrel_def}) =
       Pretty.block (separate (Pretty.brk 2)
        [Pretty.str "type:", 
         Pretty.str qty_name,
         Pretty.str "quot. thm:",
-        Syntax.pretty_term ctxt (prop_of quot_thm)])
+        Syntax.pretty_term ctxt (prop_of quot_thm),
+        Pretty.str "pcrel_def thm:",
+        option_fold (Pretty.str "-") ((Syntax.pretty_term ctxt) o prop_of) pcrel_def])
   in
     map prt_quot (Symtab.dest (Quotients.get (Context.Proof ctxt)))
     |> Pretty.big_list "quotients:"
--- a/src/HOL/Tools/Lifting/lifting_setup.ML	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML	Mon Nov 26 17:13:44 2012 +0100
@@ -22,20 +22,62 @@
 
 exception SETUP_LIFTING_INFR of string
 
-fun define_cr_rel rep_fun lthy =
+fun define_crel rep_fun lthy =
   let
     val (qty, rty) = (dest_funT o fastype_of) rep_fun
     val rep_fun_graph = (HOLogic.eq_const rty) $ Bound 1 $ (rep_fun $ Bound 0)
     val def_term = Abs ("x", rty, Abs ("y", qty, rep_fun_graph));
     val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty
-    val cr_rel_name = Binding.prefix_name "cr_" qty_name
+    val crel_name = Binding.prefix_name "cr_" qty_name
     val (fixed_def_term, lthy') = yield_singleton (Variable.importT_terms) def_term lthy
     val ((_, (_ , def_thm)), lthy'') =
-      Local_Theory.define ((cr_rel_name, NoSyn), ((Thm.def_binding cr_rel_name, []), fixed_def_term)) lthy'
+      Local_Theory.define ((crel_name, NoSyn), ((Thm.def_binding crel_name, []), fixed_def_term)) lthy'
   in
     (def_thm, lthy'')
   end
 
+fun print_define_pcrel_warning msg = 
+  let
+    val warning_msg = cat_lines 
+      ["Generation of a parametrized correspondence relation failed.",
+      (Pretty.string_of (Pretty.block
+         [Pretty.str "Reason:", Pretty.brk 2, msg]))]
+  in
+    warning warning_msg
+  end
+
+fun define_pcrel crel lthy =
+  let
+    val pcrel = Morphism.term (Local_Theory.target_morphism lthy) crel
+    val [crel_poly] = Variable.polymorphic lthy [pcrel]
+    val rty_raw = (domain_type o fastype_of) crel_poly
+    val (quot_thm, assms) = Lifting_Term.prove_param_quot_thm lthy rty_raw
+    val args = map (fn (_, q_thm) => quot_thm_crel q_thm) assms
+    val parametrized_relator = quot_thm_crel quot_thm
+    val [rty, rty'] = (binder_types o fastype_of) parametrized_relator
+    val thy = Proof_Context.theory_of lthy
+    val tyenv_match = Sign.typ_match thy (rty_raw, rty') Vartab.empty
+    val crel_subst = Envir.subst_term (tyenv_match,Vartab.empty) crel_poly
+    val lthy = Variable.declare_names parametrized_relator lthy
+    val (crel_typed, lthy) = yield_singleton Variable.importT_terms crel_subst lthy
+    val qty = (domain_type o range_type o fastype_of) crel_typed
+    val relcomp_op = Const (@{const_name "relcompp"}, 
+          (rty --> rty' --> HOLogic.boolT) --> 
+          (rty' --> qty --> HOLogic.boolT) --> 
+          rty --> qty --> HOLogic.boolT)
+    val relator_type = foldr1 (op -->) ((map type_of args) @ [rty, qty, HOLogic.boolT])
+    val qty_name = (fst o dest_Type) qty
+    val pcrel_name = Binding.prefix_name "pcr_" ((Binding.name o Long_Name.base_name) qty_name)
+    val lhs = Library.foldl (op $) ((Free (Binding.name_of pcrel_name, relator_type)), args)
+    val rhs = relcomp_op $ parametrized_relator $ crel_typed
+    val definition_term = Logic.mk_equals (lhs, rhs)
+    val ((_, (_, def_thm)), lthy) = Specification.definition ((SOME (pcrel_name, SOME relator_type, NoSyn)), 
+      ((Binding.empty, []), definition_term)) lthy
+  in
+    (SOME def_thm, lthy)
+  end
+  handle Lifting_Term.PARAM_QUOT_THM (_, msg) => (print_define_pcrel_warning msg; (NONE, lthy))
+
 fun define_code_constr gen_code quot_thm lthy =
   let
     val abs = quot_thm_abs quot_thm
@@ -95,10 +137,11 @@
   let
     val _ = quot_thm_sanity_check lthy quot_thm
     val (_, qtyp) = quot_thm_rty_qty quot_thm
-    val qty_full_name = (fst o dest_Type) qtyp
-    val quotients = { quot_thm = quot_thm }
+    val (pcrel_def, lthy) = define_pcrel (quot_thm_crel quot_thm) lthy
+    val quotients = { quot_thm = quot_thm, pcrel_def = pcrel_def }
+    val qty_full_name = (fst o dest_Type) qtyp  
     fun quot_info phi = Lifting_Info.transform_quotients phi quotients
-    val lthy' = case maybe_reflp_thm of
+    val lthy = case maybe_reflp_thm of
       SOME reflp_thm => lthy
         |> (snd oo Local_Theory.note) ((Binding.empty, [Lifting_Info.add_reflexivity_rule_attrib]),
               [reflp_thm])
@@ -108,7 +151,7 @@
       | NONE => lthy
         |> define_abs_type gen_code quot_thm
   in
-    lthy'
+    lthy
       |> Local_Theory.declaration {syntax = false, pervasive = true}
         (fn phi => Lifting_Info.update_quotients qty_full_name (quot_info phi))
   end
@@ -130,7 +173,7 @@
     val induct_attr = Attrib.internal (K (Induct.induct_type (fst (dest_Type qty))))
     val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty
     fun qualify suffix = Binding.qualified true suffix qty_name
-    val lthy' = case maybe_reflp_thm of
+    val lthy = case maybe_reflp_thm of
       SOME reflp_thm => lthy
         |> (snd oo Local_Theory.note) ((qualify "bi_total", [transfer_attr]), 
           [[quot_thm, reflp_thm] MRSL @{thm Quotient_bi_total}])
@@ -150,7 +193,7 @@
         |> (snd oo Local_Theory.note) ((qualify "abs_induct", [induct_attr]),
           [quot_thm RS @{thm Quotient_abs_induct}])
   in
-    lthy'
+    lthy
       |> (snd oo Local_Theory.note) ((qualify "right_unique", [transfer_attr]), 
         [quot_thm RS @{thm Quotient_right_unique}])
       |> (snd oo Local_Theory.note) ((qualify "right_total", [transfer_attr]), 
@@ -172,7 +215,7 @@
   let
     val transfer_attr = Attrib.internal (K Transfer.transfer_add)
     val (_ $ rep_fun $ _ $ typedef_set) = (HOLogic.dest_Trueprop o prop_of) typedef_thm
-    val (T_def, lthy') = define_cr_rel rep_fun lthy
+    val (T_def, lthy') = define_crel rep_fun lthy
 
     val quot_thm = case typedef_set of
       Const ("Orderings.top_class.top", _) => 
--- a/src/HOL/Tools/Lifting/lifting_term.ML	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Tools/Lifting/lifting_term.ML	Mon Nov 26 17:13:44 2012 +0100
@@ -7,6 +7,7 @@
 signature LIFTING_TERM =
 sig
   exception QUOT_THM of typ * typ * Pretty.T
+  exception PARAM_QUOT_THM of typ * Pretty.T
   exception CHECK_RTY of typ * typ
 
   val prove_quot_thm: Proof.context -> typ * typ -> thm
@@ -14,17 +15,19 @@
   val abs_fun: Proof.context -> typ * typ -> term
 
   val equiv_relation: Proof.context -> typ * typ -> term
+
+  val prove_param_quot_thm: Proof.context -> typ -> thm * (typ * thm) list
 end
 
 structure Lifting_Term: LIFTING_TERM =
 struct
-
 open Lifting_Util
 
 infix 0 MRSL
 
 exception QUOT_THM_INTERNAL of Pretty.T
 exception QUOT_THM of typ * typ * Pretty.T
+exception PARAM_QUOT_THM of typ * Pretty.T
 exception CHECK_RTY of typ * typ
 
 fun match ctxt err ty_pat ty =
@@ -235,4 +238,51 @@
 fun equiv_relation ctxt (rty, qty) =
   quot_thm_rel (prove_quot_thm ctxt (rty, qty))
 
+fun get_fresh_Q_t ctxt =
+  let
+    val Q_t = Syntax.read_term ctxt "Trueprop (Quotient R Abs Rep T)"
+    val ctxt = Variable.declare_names Q_t ctxt
+    val frees_Q_t = Term.add_free_names Q_t []
+    val (_, ctxt) = Variable.variant_fixes frees_Q_t ctxt
+  in
+    (Q_t, ctxt)
+  end
+
+fun prove_param_quot_thm ctxt ty = 
+  let 
+    fun generate (ty as Type (s, tys)) (table_ctxt as (table, ctxt)) =
+      if null tys 
+      then 
+        let 
+          val thy = Proof_Context.theory_of ctxt
+          val instantiated_id_quot_thm = instantiate' [SOME (ctyp_of thy ty)] [] @{thm identity_quotient}
+        in
+          (instantiated_id_quot_thm, (table, ctxt)) 
+        end
+      else
+        let
+          val (args, table_ctxt) = fold_map generate tys table_ctxt
+        in
+          (args MRSL (get_rel_quot_thm ctxt s), table_ctxt)
+        end 
+      | generate (ty as (TVar _)) (table, ctxt) =
+        if AList.defined (op=) table ty 
+        then (the (AList.lookup (op=) table ty), (table, ctxt))
+        else 
+          let
+            val thy = Proof_Context.theory_of ctxt
+            val (Q_t, ctxt') = get_fresh_Q_t ctxt
+            val Q_thm = Thm.assume (cterm_of thy Q_t)
+            val table' = (ty, Q_thm)::table
+          in
+            (Q_thm, (table', ctxt'))
+          end
+      | generate _ _ = error "generate_param_quot_thm: TFree"
+
+    val (param_quot_thm, (table, _)) = generate ty ([], ctxt)
+  in
+    (param_quot_thm, rev table)
+  end
+  handle QUOT_THM_INTERNAL pretty_msg => raise PARAM_QUOT_THM (ty, pretty_msg)
+
 end;
--- a/src/HOL/Tools/Lifting/lifting_util.ML	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Tools/Lifting/lifting_util.ML	Mon Nov 26 17:13:44 2012 +0100
@@ -8,11 +8,13 @@
 sig
   val MRSL: thm list * thm -> thm
   val Trueprop_conv: conv -> conv
+  val option_fold: 'b -> ('a -> 'b) -> 'a option -> 'b
   val dest_Quotient: term -> term * term * term * term
 
   val quot_thm_rel: thm -> term
   val quot_thm_abs: thm -> term
   val quot_thm_rep: thm -> term
+  val quot_thm_crel: thm -> term
   val quot_thm_rty_qty: thm -> typ * typ
 end;
 
@@ -29,6 +31,9 @@
     Const (@{const_name Trueprop}, _) $ _ => Conv.arg_conv cv ct
   | _ => raise CTERM ("Trueprop_conv", [ct]))
 
+fun option_fold a _ NONE = a
+  | option_fold _ f (SOME x) = f x
+
 fun dest_Quotient (Const (@{const_name Quotient}, _) $ rel $ abs $ rep $ cr)
       = (rel, abs, rep, cr)
   | dest_Quotient t = raise TERM ("dest_Quotient", [t])
@@ -50,6 +55,10 @@
   case (dest_Quotient o HOLogic.dest_Trueprop o prop_of) quot_thm of
     (_, _, rep, _) => rep
 
+fun quot_thm_crel quot_thm =
+  case (dest_Quotient o HOLogic.dest_Trueprop o prop_of) quot_thm of
+    (_, _, _, crel) => crel
+
 fun quot_thm_rty_qty quot_thm =
   let
     val abs = quot_thm_abs quot_thm
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/etc/settings	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,3 @@
+# -*- shell-script -*- :mode=shellscript:
+
+ISABELLE_SLEDGEHAMMER_MASH="$COMPONENT"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/argparse.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,2357 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/argparse.py
+#
+# Argument parser. See copyright notice below.
+
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2006-2009 Steven J. Bethard <steven.bethard@gmail.com>.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy
+# of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Command-line parsing library
+
+This module is an optparse-inspired command-line parsing library that:
+
+    - handles both optional and positional arguments
+    - produces highly informative usage messages
+    - supports parsers that dispatch to sub-parsers
+
+The following is a simple usage example that sums integers from the
+command-line and writes the result to a file::
+
+    parser = argparse.ArgumentParser(
+        description='sum the integers at the command line')
+    parser.add_argument(
+        'integers', metavar='int', nargs='+', type=int,
+        help='an integer to be summed')
+    parser.add_argument(
+        '--log', default=sys.stdout, type=argparse.FileType('w'),
+        help='the file where the sum should be written')
+    args = parser.parse_args()
+    args.log.write('%s' % sum(args.integers))
+    args.log.close()
+
+The module contains the following public classes:
+
+    - ArgumentParser -- The main entry point for command-line parsing. As the
+        example above shows, the add_argument() method is used to populate
+        the parser with actions for optional and positional arguments. Then
+        the parse_args() method is invoked to convert the args at the
+        command-line into an object with attributes.
+
+    - ArgumentError -- The exception raised by ArgumentParser objects when
+        there are errors with the parser's actions. Errors raised while
+        parsing the command-line are caught by ArgumentParser and emitted
+        as command-line messages.
+
+    - FileType -- A factory for defining types of files to be created. As the
+        example above shows, instances of FileType are typically passed as
+        the type= argument of add_argument() calls.
+
+    - Action -- The base class for parser actions. Typically actions are
+        selected by passing strings like 'store_true' or 'append_const' to
+        the action= argument of add_argument(). However, for greater
+        customization of ArgumentParser actions, subclasses of Action may
+        be defined and passed as the action= argument.
+
+    - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
+        ArgumentDefaultsHelpFormatter -- Formatter classes which
+        may be passed as the formatter_class= argument to the
+        ArgumentParser constructor. HelpFormatter is the default,
+        RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
+        not to change the formatting for help text, and
+        ArgumentDefaultsHelpFormatter adds information about argument defaults
+        to the help.
+
+All other classes in this module are considered implementation details.
+(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
+considered public as object names -- the API of the formatter objects is
+still considered an implementation detail.)
+"""
+
+__version__ = '1.1'
+__all__ = [
+    'ArgumentParser',
+    'ArgumentError',
+    'Namespace',
+    'Action',
+    'FileType',
+    'HelpFormatter',
+    'RawDescriptionHelpFormatter',
+    'RawTextHelpFormatter',
+    'ArgumentDefaultsHelpFormatter',
+]
+
+
+import copy as _copy
+import os as _os
+import re as _re
+import sys as _sys
+import textwrap as _textwrap
+
+from gettext import gettext as _
+
+try:
+    _set = set
+except NameError:
+    from sets import Set as _set
+
+try:
+    _basestring = basestring
+except NameError:
+    _basestring = str
+
+try:
+    _sorted = sorted
+except NameError:
+
+    def _sorted(iterable, reverse=False):
+        result = list(iterable)
+        result.sort()
+        if reverse:
+            result.reverse()
+        return result
+
+
+def _callable(obj):
+    return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
+
+# silence Python 2.6 buggy warnings about Exception.message
+if _sys.version_info[:2] == (2, 6):
+    import warnings
+    warnings.filterwarnings(
+        action='ignore',
+        message='BaseException.message has been deprecated as of Python 2.6',
+        category=DeprecationWarning,
+        module='argparse')
+
+
+SUPPRESS = '==SUPPRESS=='
+
+OPTIONAL = '?'
+ZERO_OR_MORE = '*'
+ONE_OR_MORE = '+'
+PARSER = 'A...'
+REMAINDER = '...'
+
+# =============================
+# Utility functions and classes
+# =============================
+
+class _AttributeHolder(object):
+    """Abstract base class that provides __repr__.
+
+    The __repr__ method returns a string in the format::
+        ClassName(attr=name, attr=name, ...)
+    The attributes are determined either by a class-level attribute,
+    '_kwarg_names', or by inspecting the instance __dict__.
+    """
+
+    def __repr__(self):
+        type_name = type(self).__name__
+        arg_strings = []
+        for arg in self._get_args():
+            arg_strings.append(repr(arg))
+        for name, value in self._get_kwargs():
+            arg_strings.append('%s=%r' % (name, value))
+        return '%s(%s)' % (type_name, ', '.join(arg_strings))
+
+    def _get_kwargs(self):
+        return _sorted(self.__dict__.items())
+
+    def _get_args(self):
+        return []
+
+
+def _ensure_value(namespace, name, value):
+    if getattr(namespace, name, None) is None:
+        setattr(namespace, name, value)
+    return getattr(namespace, name)
+
+
+# ===============
+# Formatting Help
+# ===============
+
+class HelpFormatter(object):
+    """Formatter for generating usage messages and argument help strings.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def __init__(self,
+                 prog,
+                 indent_increment=2,
+                 max_help_position=24,
+                 width=None):
+
+        # default setting for width
+        if width is None:
+            try:
+                width = int(_os.environ['COLUMNS'])
+            except (KeyError, ValueError):
+                width = 80
+            width -= 2
+
+        self._prog = prog
+        self._indent_increment = indent_increment
+        self._max_help_position = max_help_position
+        self._width = width
+
+        self._current_indent = 0
+        self._level = 0
+        self._action_max_length = 0
+
+        self._root_section = self._Section(self, None)
+        self._current_section = self._root_section
+
+        self._whitespace_matcher = _re.compile(r'\s+')
+        self._long_break_matcher = _re.compile(r'\n\n\n+')
+
+    # ===============================
+    # Section and indentation methods
+    # ===============================
+    def _indent(self):
+        self._current_indent += self._indent_increment
+        self._level += 1
+
+    def _dedent(self):
+        self._current_indent -= self._indent_increment
+        assert self._current_indent >= 0, 'Indent decreased below 0.'
+        self._level -= 1
+
+    class _Section(object):
+
+        def __init__(self, formatter, parent, heading=None):
+            self.formatter = formatter
+            self.parent = parent
+            self.heading = heading
+            self.items = []
+
+        def format_help(self):
+            # format the indented section
+            if self.parent is not None:
+                self.formatter._indent()
+            join = self.formatter._join_parts
+            for func, args in self.items:
+                func(*args)
+            item_help = join([func(*args) for func, args in self.items])
+            if self.parent is not None:
+                self.formatter._dedent()
+
+            # return nothing if the section was empty
+            if not item_help:
+                return ''
+
+            # add the heading if the section was non-empty
+            if self.heading is not SUPPRESS and self.heading is not None:
+                current_indent = self.formatter._current_indent
+                heading = '%*s%s:\n' % (current_indent, '', self.heading)
+            else:
+                heading = ''
+
+            # join the section-initial newline, the heading and the help
+            return join(['\n', heading, item_help, '\n'])
+
+    def _add_item(self, func, args):
+        self._current_section.items.append((func, args))
+
+    # ========================
+    # Message building methods
+    # ========================
+    def start_section(self, heading):
+        self._indent()
+        section = self._Section(self, self._current_section, heading)
+        self._add_item(section.format_help, [])
+        self._current_section = section
+
+    def end_section(self):
+        self._current_section = self._current_section.parent
+        self._dedent()
+
+    def add_text(self, text):
+        if text is not SUPPRESS and text is not None:
+            self._add_item(self._format_text, [text])
+
+    def add_usage(self, usage, actions, groups, prefix=None):
+        if usage is not SUPPRESS:
+            args = usage, actions, groups, prefix
+            self._add_item(self._format_usage, args)
+
+    def add_argument(self, action):
+        if action.help is not SUPPRESS:
+
+            # find all invocations
+            get_invocation = self._format_action_invocation
+            invocations = [get_invocation(action)]
+            for subaction in self._iter_indented_subactions(action):
+                invocations.append(get_invocation(subaction))
+
+            # update the maximum item length
+            invocation_length = max([len(s) for s in invocations])
+            action_length = invocation_length + self._current_indent
+            self._action_max_length = max(self._action_max_length,
+                                          action_length)
+
+            # add the item to the list
+            self._add_item(self._format_action, [action])
+
+    def add_arguments(self, actions):
+        for action in actions:
+            self.add_argument(action)
+
+    # =======================
+    # Help-formatting methods
+    # =======================
+    def format_help(self):
+        help = self._root_section.format_help()
+        if help:
+            help = self._long_break_matcher.sub('\n\n', help)
+            help = help.strip('\n') + '\n'
+        return help
+
+    def _join_parts(self, part_strings):
+        return ''.join([part
+                        for part in part_strings
+                        if part and part is not SUPPRESS])
+
+    def _format_usage(self, usage, actions, groups, prefix):
+        if prefix is None:
+            prefix = _('usage: ')
+
+        # if usage is specified, use that
+        if usage is not None:
+            usage = usage % dict(prog=self._prog)
+
+        # if no optionals or positionals are available, usage is just prog
+        elif usage is None and not actions:
+            usage = '%(prog)s' % dict(prog=self._prog)
+
+        # if optionals and positionals are available, calculate usage
+        elif usage is None:
+            prog = '%(prog)s' % dict(prog=self._prog)
+
+            # split optionals from positionals
+            optionals = []
+            positionals = []
+            for action in actions:
+                if action.option_strings:
+                    optionals.append(action)
+                else:
+                    positionals.append(action)
+
+            # build full usage string
+            format = self._format_actions_usage
+            action_usage = format(optionals + positionals, groups)
+            usage = ' '.join([s for s in [prog, action_usage] if s])
+
+            # wrap the usage parts if it's too long
+            text_width = self._width - self._current_indent
+            if len(prefix) + len(usage) > text_width:
+
+                # break usage into wrappable parts
+                part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
+                opt_usage = format(optionals, groups)
+                pos_usage = format(positionals, groups)
+                opt_parts = _re.findall(part_regexp, opt_usage)
+                pos_parts = _re.findall(part_regexp, pos_usage)
+                assert ' '.join(opt_parts) == opt_usage
+                assert ' '.join(pos_parts) == pos_usage
+
+                # helper for wrapping lines
+                def get_lines(parts, indent, prefix=None):
+                    lines = []
+                    line = []
+                    if prefix is not None:
+                        line_len = len(prefix) - 1
+                    else:
+                        line_len = len(indent) - 1
+                    for part in parts:
+                        if line_len + 1 + len(part) > text_width:
+                            lines.append(indent + ' '.join(line))
+                            line = []
+                            line_len = len(indent) - 1
+                        line.append(part)
+                        line_len += len(part) + 1
+                    if line:
+                        lines.append(indent + ' '.join(line))
+                    if prefix is not None:
+                        lines[0] = lines[0][len(indent):]
+                    return lines
+
+                # if prog is short, follow it with optionals or positionals
+                if len(prefix) + len(prog) <= 0.75 * text_width:
+                    indent = ' ' * (len(prefix) + len(prog) + 1)
+                    if opt_parts:
+                        lines = get_lines([prog] + opt_parts, indent, prefix)
+                        lines.extend(get_lines(pos_parts, indent))
+                    elif pos_parts:
+                        lines = get_lines([prog] + pos_parts, indent, prefix)
+                    else:
+                        lines = [prog]
+
+                # if prog is long, put it on its own line
+                else:
+                    indent = ' ' * len(prefix)
+                    parts = opt_parts + pos_parts
+                    lines = get_lines(parts, indent)
+                    if len(lines) > 1:
+                        lines = []
+                        lines.extend(get_lines(opt_parts, indent))
+                        lines.extend(get_lines(pos_parts, indent))
+                    lines = [prog] + lines
+
+                # join lines into usage
+                usage = '\n'.join(lines)
+
+        # prefix with 'usage:'
+        return '%s%s\n\n' % (prefix, usage)
+
+    def _format_actions_usage(self, actions, groups):
+        # find group indices and identify actions in groups
+        group_actions = _set()
+        inserts = {}
+        for group in groups:
+            try:
+                start = actions.index(group._group_actions[0])
+            except ValueError:
+                continue
+            else:
+                end = start + len(group._group_actions)
+                if actions[start:end] == group._group_actions:
+                    for action in group._group_actions:
+                        group_actions.add(action)
+                    if not group.required:
+                        inserts[start] = '['
+                        inserts[end] = ']'
+                    else:
+                        inserts[start] = '('
+                        inserts[end] = ')'
+                    for i in range(start + 1, end):
+                        inserts[i] = '|'
+
+        # collect all actions format strings
+        parts = []
+        for i, action in enumerate(actions):
+
+            # suppressed arguments are marked with None
+            # remove | separators for suppressed arguments
+            if action.help is SUPPRESS:
+                parts.append(None)
+                if inserts.get(i) == '|':
+                    inserts.pop(i)
+                elif inserts.get(i + 1) == '|':
+                    inserts.pop(i + 1)
+
+            # produce all arg strings
+            elif not action.option_strings:
+                part = self._format_args(action, action.dest)
+
+                # if it's in a group, strip the outer []
+                if action in group_actions:
+                    if part[0] == '[' and part[-1] == ']':
+                        part = part[1:-1]
+
+                # add the action string to the list
+                parts.append(part)
+
+            # produce the first way to invoke the option in brackets
+            else:
+                option_string = action.option_strings[0]
+
+                # if the Optional doesn't take a value, format is:
+                #    -s or --long
+                if action.nargs == 0:
+                    part = '%s' % option_string
+
+                # if the Optional takes a value, format is:
+                #    -s ARGS or --long ARGS
+                else:
+                    default = action.dest.upper()
+                    args_string = self._format_args(action, default)
+                    part = '%s %s' % (option_string, args_string)
+
+                # make it look optional if it's not required or in a group
+                if not action.required and action not in group_actions:
+                    part = '[%s]' % part
+
+                # add the action string to the list
+                parts.append(part)
+
+        # insert things at the necessary indices
+        for i in _sorted(inserts, reverse=True):
+            parts[i:i] = [inserts[i]]
+
+        # join all the action items with spaces
+        text = ' '.join([item for item in parts if item is not None])
+
+        # clean up separators for mutually exclusive groups
+        open = r'[\[(]'
+        close = r'[\])]'
+        text = _re.sub(r'(%s) ' % open, r'\1', text)
+        text = _re.sub(r' (%s)' % close, r'\1', text)
+        text = _re.sub(r'%s *%s' % (open, close), r'', text)
+        text = _re.sub(r'\(([^|]*)\)', r'\1', text)
+        text = text.strip()
+
+        # return the text
+        return text
+
+    def _format_text(self, text):
+        if '%(prog)' in text:
+            text = text % dict(prog=self._prog)
+        text_width = self._width - self._current_indent
+        indent = ' ' * self._current_indent
+        return self._fill_text(text, text_width, indent) + '\n\n'
+
+    def _format_action(self, action):
+        # determine the required width and the entry label
+        help_position = min(self._action_max_length + 2,
+                            self._max_help_position)
+        help_width = self._width - help_position
+        action_width = help_position - self._current_indent - 2
+        action_header = self._format_action_invocation(action)
+
+        # ho nelp; start on same line and add a final newline
+        if not action.help:
+            tup = self._current_indent, '', action_header
+            action_header = '%*s%s\n' % tup
+
+        # short action name; start on the same line and pad two spaces
+        elif len(action_header) <= action_width:
+            tup = self._current_indent, '', action_width, action_header
+            action_header = '%*s%-*s  ' % tup
+            indent_first = 0
+
+        # long action name; start on the next line
+        else:
+            tup = self._current_indent, '', action_header
+            action_header = '%*s%s\n' % tup
+            indent_first = help_position
+
+        # collect the pieces of the action help
+        parts = [action_header]
+
+        # if there was help for the action, add lines of help text
+        if action.help:
+            help_text = self._expand_help(action)
+            help_lines = self._split_lines(help_text, help_width)
+            parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
+            for line in help_lines[1:]:
+                parts.append('%*s%s\n' % (help_position, '', line))
+
+        # or add a newline if the description doesn't end with one
+        elif not action_header.endswith('\n'):
+            parts.append('\n')
+
+        # if there are any sub-actions, add their help as well
+        for subaction in self._iter_indented_subactions(action):
+            parts.append(self._format_action(subaction))
+
+        # return a single string
+        return self._join_parts(parts)
+
+    def _format_action_invocation(self, action):
+        if not action.option_strings:
+            metavar, = self._metavar_formatter(action, action.dest)(1)
+            return metavar
+
+        else:
+            parts = []
+
+            # if the Optional doesn't take a value, format is:
+            #    -s, --long
+            if action.nargs == 0:
+                parts.extend(action.option_strings)
+
+            # if the Optional takes a value, format is:
+            #    -s ARGS, --long ARGS
+            else:
+                default = action.dest.upper()
+                args_string = self._format_args(action, default)
+                for option_string in action.option_strings:
+                    parts.append('%s %s' % (option_string, args_string))
+
+            return ', '.join(parts)
+
+    def _metavar_formatter(self, action, default_metavar):
+        if action.metavar is not None:
+            result = action.metavar
+        elif action.choices is not None:
+            choice_strs = [str(choice) for choice in action.choices]
+            result = '{%s}' % ','.join(choice_strs)
+        else:
+            result = default_metavar
+
+        def format(tuple_size):
+            if isinstance(result, tuple):
+                return result
+            else:
+                return (result, ) * tuple_size
+        return format
+
+    def _format_args(self, action, default_metavar):
+        get_metavar = self._metavar_formatter(action, default_metavar)
+        if action.nargs is None:
+            result = '%s' % get_metavar(1)
+        elif action.nargs == OPTIONAL:
+            result = '[%s]' % get_metavar(1)
+        elif action.nargs == ZERO_OR_MORE:
+            result = '[%s [%s ...]]' % get_metavar(2)
+        elif action.nargs == ONE_OR_MORE:
+            result = '%s [%s ...]' % get_metavar(2)
+        elif action.nargs == REMAINDER:
+            result = '...'
+        elif action.nargs == PARSER:
+            result = '%s ...' % get_metavar(1)
+        else:
+            formats = ['%s' for _ in range(action.nargs)]
+            result = ' '.join(formats) % get_metavar(action.nargs)
+        return result
+
+    def _expand_help(self, action):
+        params = dict(vars(action), prog=self._prog)
+        for name in list(params):
+            if params[name] is SUPPRESS:
+                del params[name]
+        for name in list(params):
+            if hasattr(params[name], '__name__'):
+                params[name] = params[name].__name__
+        if params.get('choices') is not None:
+            choices_str = ', '.join([str(c) for c in params['choices']])
+            params['choices'] = choices_str
+        return self._get_help_string(action) % params
+
+    def _iter_indented_subactions(self, action):
+        try:
+            get_subactions = action._get_subactions
+        except AttributeError:
+            pass
+        else:
+            self._indent()
+            for subaction in get_subactions():
+                yield subaction
+            self._dedent()
+
+    def _split_lines(self, text, width):
+        text = self._whitespace_matcher.sub(' ', text).strip()
+        return _textwrap.wrap(text, width)
+
+    def _fill_text(self, text, width, indent):
+        text = self._whitespace_matcher.sub(' ', text).strip()
+        return _textwrap.fill(text, width, initial_indent=indent,
+                                           subsequent_indent=indent)
+
+    def _get_help_string(self, action):
+        return action.help
+
+
+class RawDescriptionHelpFormatter(HelpFormatter):
+    """Help message formatter which retains any formatting in descriptions.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _fill_text(self, text, width, indent):
+        return ''.join([indent + line for line in text.splitlines(True)])
+
+
+class RawTextHelpFormatter(RawDescriptionHelpFormatter):
+    """Help message formatter which retains formatting of all help text.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _split_lines(self, text, width):
+        return text.splitlines()
+
+
+class ArgumentDefaultsHelpFormatter(HelpFormatter):
+    """Help message formatter which adds default values to argument help.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _get_help_string(self, action):
+        help = action.help
+        if '%(default)' not in action.help:
+            if action.default is not SUPPRESS:
+                defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
+                if action.option_strings or action.nargs in defaulting_nargs:
+                    help += ' (default: %(default)s)'
+        return help
+
+
+# =====================
+# Options and Arguments
+# =====================
+
+def _get_action_name(argument):
+    if argument is None:
+        return None
+    elif argument.option_strings:
+        return  '/'.join(argument.option_strings)
+    elif argument.metavar not in (None, SUPPRESS):
+        return argument.metavar
+    elif argument.dest not in (None, SUPPRESS):
+        return argument.dest
+    else:
+        return None
+
+
+class ArgumentError(Exception):
+    """An error from creating or using an argument (optional or positional).
+
+    The string value of this exception is the message, augmented with
+    information about the argument that caused it.
+    """
+
+    def __init__(self, argument, message):
+        self.argument_name = _get_action_name(argument)
+        self.message = message
+
+    def __str__(self):
+        if self.argument_name is None:
+            format = '%(message)s'
+        else:
+            format = 'argument %(argument_name)s: %(message)s'
+        return format % dict(message=self.message,
+                             argument_name=self.argument_name)
+
+
+class ArgumentTypeError(Exception):
+    """An error from trying to convert a command line string to a type."""
+    pass
+
+
+# ==============
+# Action classes
+# ==============
+
+class Action(_AttributeHolder):
+    """Information about how to convert command line strings to Python objects.
+
+    Action objects are used by an ArgumentParser to represent the information
+    needed to parse a single argument from one or more strings from the
+    command line. The keyword arguments to the Action constructor are also
+    all attributes of Action instances.
+
+    Keyword Arguments:
+
+        - option_strings -- A list of command-line option strings which
+            should be associated with this action.
+
+        - dest -- The name of the attribute to hold the created object(s)
+
+        - nargs -- The number of command-line arguments that should be
+            consumed. By default, one argument will be consumed and a single
+            value will be produced.  Other values include:
+                - N (an integer) consumes N arguments (and produces a list)
+                - '?' consumes zero or one arguments
+                - '*' consumes zero or more arguments (and produces a list)
+                - '+' consumes one or more arguments (and produces a list)
+            Note that the difference between the default and nargs=1 is that
+            with the default, a single value will be produced, while with
+            nargs=1, a list containing a single value will be produced.
+
+        - const -- The value to be produced if the option is specified and the
+            option uses an action that takes no values.
+
+        - default -- The value to be produced if the option is not specified.
+
+        - type -- The type which the command-line arguments should be converted
+            to, should be one of 'string', 'int', 'float', 'complex' or a
+            callable object that accepts a single string argument. If None,
+            'string' is assumed.
+
+        - choices -- A container of values that should be allowed. If not None,
+            after a command-line argument has been converted to the appropriate
+            type, an exception will be raised if it is not a member of this
+            collection.
+
+        - required -- True if the action must always be specified at the
+            command line. This is only meaningful for optional command-line
+            arguments.
+
+        - help -- The help string describing the argument.
+
+        - metavar -- The name to be used for the option's argument with the
+            help string. If None, the 'dest' value will be used as the name.
+    """
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 nargs=None,
+                 const=None,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        self.option_strings = option_strings
+        self.dest = dest
+        self.nargs = nargs
+        self.const = const
+        self.default = default
+        self.type = type
+        self.choices = choices
+        self.required = required
+        self.help = help
+        self.metavar = metavar
+
+    def _get_kwargs(self):
+        names = [
+            'option_strings',
+            'dest',
+            'nargs',
+            'const',
+            'default',
+            'type',
+            'choices',
+            'help',
+            'metavar',
+        ]
+        return [(name, getattr(self, name)) for name in names]
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        raise NotImplementedError(_('.__call__() not defined'))
+
+
+class _StoreAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 nargs=None,
+                 const=None,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        if nargs == 0:
+            raise ValueError('nargs for store actions must be > 0; if you '
+                             'have nothing to store, actions such as store '
+                             'true or store const may be more appropriate')
+        if const is not None and nargs != OPTIONAL:
+            raise ValueError('nargs must be %r to supply const' % OPTIONAL)
+        super(_StoreAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=nargs,
+            const=const,
+            default=default,
+            type=type,
+            choices=choices,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        setattr(namespace, self.dest, values)
+
+
+class _StoreConstAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 const,
+                 default=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        super(_StoreConstAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=0,
+            const=const,
+            default=default,
+            required=required,
+            help=help)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        setattr(namespace, self.dest, self.const)
+
+
+class _StoreTrueAction(_StoreConstAction):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=False,
+                 required=False,
+                 help=None):
+        super(_StoreTrueAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            const=True,
+            default=default,
+            required=required,
+            help=help)
+
+
+class _StoreFalseAction(_StoreConstAction):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=True,
+                 required=False,
+                 help=None):
+        super(_StoreFalseAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            const=False,
+            default=default,
+            required=required,
+            help=help)
+
+
+class _AppendAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 nargs=None,
+                 const=None,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        if nargs == 0:
+            raise ValueError('nargs for append actions must be > 0; if arg '
+                             'strings are not supplying the value to append, '
+                             'the append const action may be more appropriate')
+        if const is not None and nargs != OPTIONAL:
+            raise ValueError('nargs must be %r to supply const' % OPTIONAL)
+        super(_AppendAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=nargs,
+            const=const,
+            default=default,
+            type=type,
+            choices=choices,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        items = _copy.copy(_ensure_value(namespace, self.dest, []))
+        items.append(values)
+        setattr(namespace, self.dest, items)
+
+
+class _AppendConstAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 const,
+                 default=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        super(_AppendConstAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=0,
+            const=const,
+            default=default,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        items = _copy.copy(_ensure_value(namespace, self.dest, []))
+        items.append(self.const)
+        setattr(namespace, self.dest, items)
+
+
+class _CountAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=None,
+                 required=False,
+                 help=None):
+        super(_CountAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=0,
+            default=default,
+            required=required,
+            help=help)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        new_count = _ensure_value(namespace, self.dest, 0) + 1
+        setattr(namespace, self.dest, new_count)
+
+
+class _HelpAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest=SUPPRESS,
+                 default=SUPPRESS,
+                 help=None):
+        super(_HelpAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            default=default,
+            nargs=0,
+            help=help)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        parser.print_help()
+        parser.exit()
+
+
+class _VersionAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 version=None,
+                 dest=SUPPRESS,
+                 default=SUPPRESS,
+                 help=None):
+        super(_VersionAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            default=default,
+            nargs=0,
+            help=help)
+        self.version = version
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        version = self.version
+        if version is None:
+            version = parser.version
+        formatter = parser._get_formatter()
+        formatter.add_text(version)
+        parser.exit(message=formatter.format_help())
+
+
+class _SubParsersAction(Action):
+
+    class _ChoicesPseudoAction(Action):
+
+        def __init__(self, name, help):
+            sup = super(_SubParsersAction._ChoicesPseudoAction, self)
+            sup.__init__(option_strings=[], dest=name, help=help)
+
+    def __init__(self,
+                 option_strings,
+                 prog,
+                 parser_class,
+                 dest=SUPPRESS,
+                 help=None,
+                 metavar=None):
+
+        self._prog_prefix = prog
+        self._parser_class = parser_class
+        self._name_parser_map = {}
+        self._choices_actions = []
+
+        super(_SubParsersAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=PARSER,
+            choices=self._name_parser_map,
+            help=help,
+            metavar=metavar)
+
+    def add_parser(self, name, **kwargs):
+        # set prog from the existing prefix
+        if kwargs.get('prog') is None:
+            kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
+
+        # create a pseudo-action to hold the choice help
+        if 'help' in kwargs:
+            help = kwargs.pop('help')
+            choice_action = self._ChoicesPseudoAction(name, help)
+            self._choices_actions.append(choice_action)
+
+        # create the parser and add it to the map
+        parser = self._parser_class(**kwargs)
+        self._name_parser_map[name] = parser
+        return parser
+
+    def _get_subactions(self):
+        return self._choices_actions
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        parser_name = values[0]
+        arg_strings = values[1:]
+
+        # set the parser name if requested
+        if self.dest is not SUPPRESS:
+            setattr(namespace, self.dest, parser_name)
+
+        # select the parser
+        try:
+            parser = self._name_parser_map[parser_name]
+        except KeyError:
+            tup = parser_name, ', '.join(self._name_parser_map)
+            msg = _('unknown parser %r (choices: %s)' % tup)
+            raise ArgumentError(self, msg)
+
+        # parse all the remaining options into the namespace
+        parser.parse_args(arg_strings, namespace)
+
+
+# ==============
+# Type classes
+# ==============
+
+class FileType(object):
+    """Factory for creating file object types
+
+    Instances of FileType are typically passed as type= arguments to the
+    ArgumentParser add_argument() method.
+
+    Keyword Arguments:
+        - mode -- A string indicating how the file is to be opened. Accepts the
+            same values as the builtin open() function.
+        - bufsize -- The file's desired buffer size. Accepts the same values as
+            the builtin open() function.
+    """
+
+    def __init__(self, mode='r', bufsize=None):
+        self._mode = mode
+        self._bufsize = bufsize
+
+    def __call__(self, string):
+        # the special argument "-" means sys.std{in,out}
+        if string == '-':
+            if 'r' in self._mode:
+                return _sys.stdin
+            elif 'w' in self._mode:
+                return _sys.stdout
+            else:
+                msg = _('argument "-" with mode %r' % self._mode)
+                raise ValueError(msg)
+
+        # all other arguments are used as file names
+        if self._bufsize:
+            return open(string, self._mode, self._bufsize)
+        else:
+            return open(string, self._mode)
+
+    def __repr__(self):
+        args = [self._mode, self._bufsize]
+        args_str = ', '.join([repr(arg) for arg in args if arg is not None])
+        return '%s(%s)' % (type(self).__name__, args_str)
+
+# ===========================
+# Optional and Positional Parsing
+# ===========================
+
+class Namespace(_AttributeHolder):
+    """Simple object for storing attributes.
+
+    Implements equality by attribute names and values, and provides a simple
+    string representation.
+    """
+
+    def __init__(self, **kwargs):
+        for name in kwargs:
+            setattr(self, name, kwargs[name])
+
+    def __eq__(self, other):
+        return vars(self) == vars(other)
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __contains__(self, key):
+        return key in self.__dict__
+
+
+class _ActionsContainer(object):
+
+    def __init__(self,
+                 description,
+                 prefix_chars,
+                 argument_default,
+                 conflict_handler):
+        super(_ActionsContainer, self).__init__()
+
+        self.description = description
+        self.argument_default = argument_default
+        self.prefix_chars = prefix_chars
+        self.conflict_handler = conflict_handler
+
+        # set up registries
+        self._registries = {}
+
+        # register actions
+        self.register('action', None, _StoreAction)
+        self.register('action', 'store', _StoreAction)
+        self.register('action', 'store_const', _StoreConstAction)
+        self.register('action', 'store_true', _StoreTrueAction)
+        self.register('action', 'store_false', _StoreFalseAction)
+        self.register('action', 'append', _AppendAction)
+        self.register('action', 'append_const', _AppendConstAction)
+        self.register('action', 'count', _CountAction)
+        self.register('action', 'help', _HelpAction)
+        self.register('action', 'version', _VersionAction)
+        self.register('action', 'parsers', _SubParsersAction)
+
+        # raise an exception if the conflict handler is invalid
+        self._get_handler()
+
+        # action storage
+        self._actions = []
+        self._option_string_actions = {}
+
+        # groups
+        self._action_groups = []
+        self._mutually_exclusive_groups = []
+
+        # defaults storage
+        self._defaults = {}
+
+        # determines whether an "option" looks like a negative number
+        self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
+
+        # whether or not there are any optionals that look like negative
+        # numbers -- uses a list so it can be shared and edited
+        self._has_negative_number_optionals = []
+
+    # ====================
+    # Registration methods
+    # ====================
+    def register(self, registry_name, value, object):
+        registry = self._registries.setdefault(registry_name, {})
+        registry[value] = object
+
+    def _registry_get(self, registry_name, value, default=None):
+        return self._registries[registry_name].get(value, default)
+
+    # ==================================
+    # Namespace default accessor methods
+    # ==================================
+    def set_defaults(self, **kwargs):
+        self._defaults.update(kwargs)
+
+        # if these defaults match any existing arguments, replace
+        # the previous default on the object with the new one
+        for action in self._actions:
+            if action.dest in kwargs:
+                action.default = kwargs[action.dest]
+
+    def get_default(self, dest):
+        for action in self._actions:
+            if action.dest == dest and action.default is not None:
+                return action.default
+        return self._defaults.get(dest, None)
+
+
+    # =======================
+    # Adding argument actions
+    # =======================
+    def add_argument(self, *args, **kwargs):
+        """
+        add_argument(dest, ..., name=value, ...)
+        add_argument(option_string, option_string, ..., name=value, ...)
+        """
+
+        # if no positional args are supplied or only one is supplied and
+        # it doesn't look like an option string, parse a positional
+        # argument
+        chars = self.prefix_chars
+        if not args or len(args) == 1 and args[0][0] not in chars:
+            if args and 'dest' in kwargs:
+                raise ValueError('dest supplied twice for positional argument')
+            kwargs = self._get_positional_kwargs(*args, **kwargs)
+
+        # otherwise, we're adding an optional argument
+        else:
+            kwargs = self._get_optional_kwargs(*args, **kwargs)
+
+        # if no default was supplied, use the parser-level default
+        if 'default' not in kwargs:
+            dest = kwargs['dest']
+            if dest in self._defaults:
+                kwargs['default'] = self._defaults[dest]
+            elif self.argument_default is not None:
+                kwargs['default'] = self.argument_default
+
+        # create the action object, and add it to the parser
+        action_class = self._pop_action_class(kwargs)
+        if not _callable(action_class):
+            raise ValueError('unknown action "%s"' % action_class)
+        action = action_class(**kwargs)
+
+        # raise an error if the action type is not callable
+        type_func = self._registry_get('type', action.type, action.type)
+        if not _callable(type_func):
+            raise ValueError('%r is not callable' % type_func)
+
+        return self._add_action(action)
+
+    def add_argument_group(self, *args, **kwargs):
+        group = _ArgumentGroup(self, *args, **kwargs)
+        self._action_groups.append(group)
+        return group
+
+    def add_mutually_exclusive_group(self, **kwargs):
+        group = _MutuallyExclusiveGroup(self, **kwargs)
+        self._mutually_exclusive_groups.append(group)
+        return group
+
+    def _add_action(self, action):
+        # resolve any conflicts
+        self._check_conflict(action)
+
+        # add to actions list
+        self._actions.append(action)
+        action.container = self
+
+        # index the action by any option strings it has
+        for option_string in action.option_strings:
+            self._option_string_actions[option_string] = action
+
+        # set the flag if any option strings look like negative numbers
+        for option_string in action.option_strings:
+            if self._negative_number_matcher.match(option_string):
+                if not self._has_negative_number_optionals:
+                    self._has_negative_number_optionals.append(True)
+
+        # return the created action
+        return action
+
+    def _remove_action(self, action):
+        self._actions.remove(action)
+
+    def _add_container_actions(self, container):
+        # collect groups by titles
+        title_group_map = {}
+        for group in self._action_groups:
+            if group.title in title_group_map:
+                msg = _('cannot merge actions - two groups are named %r')
+                raise ValueError(msg % (group.title))
+            title_group_map[group.title] = group
+
+        # map each action to its group
+        group_map = {}
+        for group in container._action_groups:
+
+            # if a group with the title exists, use that, otherwise
+            # create a new group matching the container's group
+            if group.title not in title_group_map:
+                title_group_map[group.title] = self.add_argument_group(
+                    title=group.title,
+                    description=group.description,
+                    conflict_handler=group.conflict_handler)
+
+            # map the actions to their new group
+            for action in group._group_actions:
+                group_map[action] = title_group_map[group.title]
+
+        # add container's mutually exclusive groups
+        # NOTE: if add_mutually_exclusive_group ever gains title= and
+        # description= then this code will need to be expanded as above
+        for group in container._mutually_exclusive_groups:
+            mutex_group = self.add_mutually_exclusive_group(
+                required=group.required)
+
+            # map the actions to their new mutex group
+            for action in group._group_actions:
+                group_map[action] = mutex_group
+
+        # add all actions to this container or their group
+        for action in container._actions:
+            group_map.get(action, self)._add_action(action)
+
+    def _get_positional_kwargs(self, dest, **kwargs):
+        # make sure required is not specified
+        if 'required' in kwargs:
+            msg = _("'required' is an invalid argument for positionals")
+            raise TypeError(msg)
+
+        # mark positional arguments as required if at least one is
+        # always required
+        if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
+            kwargs['required'] = True
+        if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
+            kwargs['required'] = True
+
+        # return the keyword arguments with no option strings
+        return dict(kwargs, dest=dest, option_strings=[])
+
+    def _get_optional_kwargs(self, *args, **kwargs):
+        # determine short and long option strings
+        option_strings = []
+        long_option_strings = []
+        for option_string in args:
+            # error on strings that don't start with an appropriate prefix
+            if not option_string[0] in self.prefix_chars:
+                msg = _('invalid option string %r: '
+                        'must start with a character %r')
+                tup = option_string, self.prefix_chars
+                raise ValueError(msg % tup)
+
+            # strings starting with two prefix characters are long options
+            option_strings.append(option_string)
+            if option_string[0] in self.prefix_chars:
+                if len(option_string) > 1:
+                    if option_string[1] in self.prefix_chars:
+                        long_option_strings.append(option_string)
+
+        # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
+        dest = kwargs.pop('dest', None)
+        if dest is None:
+            if long_option_strings:
+                dest_option_string = long_option_strings[0]
+            else:
+                dest_option_string = option_strings[0]
+            dest = dest_option_string.lstrip(self.prefix_chars)
+            if not dest:
+                msg = _('dest= is required for options like %r')
+                raise ValueError(msg % option_string)
+            dest = dest.replace('-', '_')
+
+        # return the updated keyword arguments
+        return dict(kwargs, dest=dest, option_strings=option_strings)
+
+    def _pop_action_class(self, kwargs, default=None):
+        action = kwargs.pop('action', default)
+        return self._registry_get('action', action, action)
+
+    def _get_handler(self):
+        # determine function from conflict handler string
+        handler_func_name = '_handle_conflict_%s' % self.conflict_handler
+        try:
+            return getattr(self, handler_func_name)
+        except AttributeError:
+            msg = _('invalid conflict_resolution value: %r')
+            raise ValueError(msg % self.conflict_handler)
+
+    def _check_conflict(self, action):
+
+        # find all options that conflict with this option
+        confl_optionals = []
+        for option_string in action.option_strings:
+            if option_string in self._option_string_actions:
+                confl_optional = self._option_string_actions[option_string]
+                confl_optionals.append((option_string, confl_optional))
+
+        # resolve any conflicts
+        if confl_optionals:
+            conflict_handler = self._get_handler()
+            conflict_handler(action, confl_optionals)
+
+    def _handle_conflict_error(self, action, conflicting_actions):
+        message = _('conflicting option string(s): %s')
+        conflict_string = ', '.join([option_string
+                                     for option_string, action
+                                     in conflicting_actions])
+        raise ArgumentError(action, message % conflict_string)
+
+    def _handle_conflict_resolve(self, action, conflicting_actions):
+
+        # remove all conflicting options
+        for option_string, action in conflicting_actions:
+
+            # remove the conflicting option
+            action.option_strings.remove(option_string)
+            self._option_string_actions.pop(option_string, None)
+
+            # if the option now has no option string, remove it from the
+            # container holding it
+            if not action.option_strings:
+                action.container._remove_action(action)
+
+
+class _ArgumentGroup(_ActionsContainer):
+
+    def __init__(self, container, title=None, description=None, **kwargs):
+        # add any missing keyword arguments by checking the container
+        update = kwargs.setdefault
+        update('conflict_handler', container.conflict_handler)
+        update('prefix_chars', container.prefix_chars)
+        update('argument_default', container.argument_default)
+        super_init = super(_ArgumentGroup, self).__init__
+        super_init(description=description, **kwargs)
+
+        # group attributes
+        self.title = title
+        self._group_actions = []
+
+        # share most attributes with the container
+        self._registries = container._registries
+        self._actions = container._actions
+        self._option_string_actions = container._option_string_actions
+        self._defaults = container._defaults
+        self._has_negative_number_optionals = \
+            container._has_negative_number_optionals
+
+    def _add_action(self, action):
+        action = super(_ArgumentGroup, self)._add_action(action)
+        self._group_actions.append(action)
+        return action
+
+    def _remove_action(self, action):
+        super(_ArgumentGroup, self)._remove_action(action)
+        self._group_actions.remove(action)
+
+
+class _MutuallyExclusiveGroup(_ArgumentGroup):
+
+    def __init__(self, container, required=False):
+        super(_MutuallyExclusiveGroup, self).__init__(container)
+        self.required = required
+        self._container = container
+
+    def _add_action(self, action):
+        if action.required:
+            msg = _('mutually exclusive arguments must be optional')
+            raise ValueError(msg)
+        action = self._container._add_action(action)
+        self._group_actions.append(action)
+        return action
+
+    def _remove_action(self, action):
+        self._container._remove_action(action)
+        self._group_actions.remove(action)
+
+
+class ArgumentParser(_AttributeHolder, _ActionsContainer):
+    """Object for parsing command line strings into Python objects.
+
+    Keyword Arguments:
+        - prog -- The name of the program (default: sys.argv[0])
+        - usage -- A usage message (default: auto-generated from arguments)
+        - description -- A description of what the program does
+        - epilog -- Text following the argument descriptions
+        - parents -- Parsers whose arguments should be copied into this one
+        - formatter_class -- HelpFormatter class for printing help messages
+        - prefix_chars -- Characters that prefix optional arguments
+        - fromfile_prefix_chars -- Characters that prefix files containing
+            additional arguments
+        - argument_default -- The default value for all arguments
+        - conflict_handler -- String indicating how to handle conflicts
+        - add_help -- Add a -h/-help option
+    """
+
+    def __init__(self,
+                 prog=None,
+                 usage=None,
+                 description=None,
+                 epilog=None,
+                 version=None,
+                 parents=[],
+                 formatter_class=HelpFormatter,
+                 prefix_chars='-',
+                 fromfile_prefix_chars=None,
+                 argument_default=None,
+                 conflict_handler='error',
+                 add_help=True):
+
+        if version is not None:
+            import warnings
+            warnings.warn(
+                """The "version" argument to ArgumentParser is deprecated. """
+                """Please use """
+                """"add_argument(..., action='version', version="N", ...)" """
+                """instead""", DeprecationWarning)
+
+        superinit = super(ArgumentParser, self).__init__
+        superinit(description=description,
+                  prefix_chars=prefix_chars,
+                  argument_default=argument_default,
+                  conflict_handler=conflict_handler)
+
+        # default setting for prog
+        if prog is None:
+            prog = _os.path.basename(_sys.argv[0])
+
+        self.prog = prog
+        self.usage = usage
+        self.epilog = epilog
+        self.version = version
+        self.formatter_class = formatter_class
+        self.fromfile_prefix_chars = fromfile_prefix_chars
+        self.add_help = add_help
+
+        add_group = self.add_argument_group
+        self._positionals = add_group(_('positional arguments'))
+        self._optionals = add_group(_('optional arguments'))
+        self._subparsers = None
+
+        # register types
+        def identity(string):
+            return string
+        self.register('type', None, identity)
+
+        # add help and version arguments if necessary
+        # (using explicit default to override global argument_default)
+        if self.add_help:
+            self.add_argument(
+                '-h', '--help', action='help', default=SUPPRESS,
+                help=_('show this help message and exit'))
+        if self.version:
+            self.add_argument(
+                '-v', '--version', action='version', default=SUPPRESS,
+                version=self.version,
+                help=_("show program's version number and exit"))
+
+        # add parent arguments and defaults
+        for parent in parents:
+            self._add_container_actions(parent)
+            try:
+                defaults = parent._defaults
+            except AttributeError:
+                pass
+            else:
+                self._defaults.update(defaults)
+
+    # =======================
+    # Pretty __repr__ methods
+    # =======================
+    def _get_kwargs(self):
+        names = [
+            'prog',
+            'usage',
+            'description',
+            'version',
+            'formatter_class',
+            'conflict_handler',
+            'add_help',
+        ]
+        return [(name, getattr(self, name)) for name in names]
+
+    # ==================================
+    # Optional/Positional adding methods
+    # ==================================
+    def add_subparsers(self, **kwargs):
+        if self._subparsers is not None:
+            self.error(_('cannot have multiple subparser arguments'))
+
+        # add the parser class to the arguments if it's not present
+        kwargs.setdefault('parser_class', type(self))
+
+        if 'title' in kwargs or 'description' in kwargs:
+            title = _(kwargs.pop('title', 'subcommands'))
+            description = _(kwargs.pop('description', None))
+            self._subparsers = self.add_argument_group(title, description)
+        else:
+            self._subparsers = self._positionals
+
+        # prog defaults to the usage message of this parser, skipping
+        # optional arguments and with no "usage:" prefix
+        if kwargs.get('prog') is None:
+            formatter = self._get_formatter()
+            positionals = self._get_positional_actions()
+            groups = self._mutually_exclusive_groups
+            formatter.add_usage(self.usage, positionals, groups, '')
+            kwargs['prog'] = formatter.format_help().strip()
+
+        # create the parsers action and add it to the positionals list
+        parsers_class = self._pop_action_class(kwargs, 'parsers')
+        action = parsers_class(option_strings=[], **kwargs)
+        self._subparsers._add_action(action)
+
+        # return the created parsers action
+        return action
+
+    def _add_action(self, action):
+        if action.option_strings:
+            self._optionals._add_action(action)
+        else:
+            self._positionals._add_action(action)
+        return action
+
+    def _get_optional_actions(self):
+        return [action
+                for action in self._actions
+                if action.option_strings]
+
+    def _get_positional_actions(self):
+        return [action
+                for action in self._actions
+                if not action.option_strings]
+
+    # =====================================
+    # Command line argument parsing methods
+    # =====================================
+    def parse_args(self, args=None, namespace=None):
+        args, argv = self.parse_known_args(args, namespace)
+        if argv:
+            msg = _('unrecognized arguments: %s')
+            self.error(msg % ' '.join(argv))
+        return args
+
+    def parse_known_args(self, args=None, namespace=None):
+        # args default to the system args
+        if args is None:
+            args = _sys.argv[1:]
+
+        # default Namespace built from parser defaults
+        if namespace is None:
+            namespace = Namespace()
+
+        # add any action defaults that aren't present
+        for action in self._actions:
+            if action.dest is not SUPPRESS:
+                if not hasattr(namespace, action.dest):
+                    if action.default is not SUPPRESS:
+                        default = action.default
+                        if isinstance(action.default, _basestring):
+                            default = self._get_value(action, default)
+                        setattr(namespace, action.dest, default)
+
+        # add any parser defaults that aren't present
+        for dest in self._defaults:
+            if not hasattr(namespace, dest):
+                setattr(namespace, dest, self._defaults[dest])
+
+        # parse the arguments and exit if there are any errors
+        try:
+            return self._parse_known_args(args, namespace)
+        except ArgumentError:
+            err = _sys.exc_info()[1]
+            self.error(str(err))
+
+    def _parse_known_args(self, arg_strings, namespace):
+        # replace arg strings that are file references
+        if self.fromfile_prefix_chars is not None:
+            arg_strings = self._read_args_from_files(arg_strings)
+
+        # map all mutually exclusive arguments to the other arguments
+        # they can't occur with
+        action_conflicts = {}
+        for mutex_group in self._mutually_exclusive_groups:
+            group_actions = mutex_group._group_actions
+            for i, mutex_action in enumerate(mutex_group._group_actions):
+                conflicts = action_conflicts.setdefault(mutex_action, [])
+                conflicts.extend(group_actions[:i])
+                conflicts.extend(group_actions[i + 1:])
+
+        # find all option indices, and determine the arg_string_pattern
+        # which has an 'O' if there is an option at an index,
+        # an 'A' if there is an argument, or a '-' if there is a '--'
+        option_string_indices = {}
+        arg_string_pattern_parts = []
+        arg_strings_iter = iter(arg_strings)
+        for i, arg_string in enumerate(arg_strings_iter):
+
+            # all args after -- are non-options
+            if arg_string == '--':
+                arg_string_pattern_parts.append('-')
+                for arg_string in arg_strings_iter:
+                    arg_string_pattern_parts.append('A')
+
+            # otherwise, add the arg to the arg strings
+            # and note the index if it was an option
+            else:
+                option_tuple = self._parse_optional(arg_string)
+                if option_tuple is None:
+                    pattern = 'A'
+                else:
+                    option_string_indices[i] = option_tuple
+                    pattern = 'O'
+                arg_string_pattern_parts.append(pattern)
+
+        # join the pieces together to form the pattern
+        arg_strings_pattern = ''.join(arg_string_pattern_parts)
+
+        # converts arg strings to the appropriate and then takes the action
+        seen_actions = _set()
+        seen_non_default_actions = _set()
+
+        def take_action(action, argument_strings, option_string=None):
+            seen_actions.add(action)
+            argument_values = self._get_values(action, argument_strings)
+
+            # error if this argument is not allowed with other previously
+            # seen arguments, assuming that actions that use the default
+            # value don't really count as "present"
+            if argument_values is not action.default:
+                seen_non_default_actions.add(action)
+                for conflict_action in action_conflicts.get(action, []):
+                    if conflict_action in seen_non_default_actions:
+                        msg = _('not allowed with argument %s')
+                        action_name = _get_action_name(conflict_action)
+                        raise ArgumentError(action, msg % action_name)
+
+            # take the action if we didn't receive a SUPPRESS value
+            # (e.g. from a default)
+            if argument_values is not SUPPRESS:
+                action(self, namespace, argument_values, option_string)
+
+        # function to convert arg_strings into an optional action
+        def consume_optional(start_index):
+
+            # get the optional identified at this index
+            option_tuple = option_string_indices[start_index]
+            action, option_string, explicit_arg = option_tuple
+
+            # identify additional optionals in the same arg string
+            # (e.g. -xyz is the same as -x -y -z if no args are required)
+            match_argument = self._match_argument
+            action_tuples = []
+            while True:
+
+                # if we found no optional action, skip it
+                if action is None:
+                    extras.append(arg_strings[start_index])
+                    return start_index + 1
+
+                # if there is an explicit argument, try to match the
+                # optional's string arguments to only this
+                if explicit_arg is not None:
+                    arg_count = match_argument(action, 'A')
+
+                    # if the action is a single-dash option and takes no
+                    # arguments, try to parse more single-dash options out
+                    # of the tail of the option string
+                    chars = self.prefix_chars
+                    if arg_count == 0 and option_string[1] not in chars:
+                        action_tuples.append((action, [], option_string))
+                        for char in self.prefix_chars:
+                            option_string = char + explicit_arg[0]
+                            explicit_arg = explicit_arg[1:] or None
+                            optionals_map = self._option_string_actions
+                            if option_string in optionals_map:
+                                action = optionals_map[option_string]
+                                break
+                        else:
+                            msg = _('ignored explicit argument %r')
+                            raise ArgumentError(action, msg % explicit_arg)
+
+                    # if the action expect exactly one argument, we've
+                    # successfully matched the option; exit the loop
+                    elif arg_count == 1:
+                        stop = start_index + 1
+                        args = [explicit_arg]
+                        action_tuples.append((action, args, option_string))
+                        break
+
+                    # error if a double-dash option did not use the
+                    # explicit argument
+                    else:
+                        msg = _('ignored explicit argument %r')
+                        raise ArgumentError(action, msg % explicit_arg)
+
+                # if there is no explicit argument, try to match the
+                # optional's string arguments with the following strings
+                # if successful, exit the loop
+                else:
+                    start = start_index + 1
+                    selected_patterns = arg_strings_pattern[start:]
+                    arg_count = match_argument(action, selected_patterns)
+                    stop = start + arg_count
+                    args = arg_strings[start:stop]
+                    action_tuples.append((action, args, option_string))
+                    break
+
+            # add the Optional to the list and return the index at which
+            # the Optional's string args stopped
+            assert action_tuples
+            for action, args, option_string in action_tuples:
+                take_action(action, args, option_string)
+            return stop
+
+        # the list of Positionals left to be parsed; this is modified
+        # by consume_positionals()
+        positionals = self._get_positional_actions()
+
+        # function to convert arg_strings into positional actions
+        def consume_positionals(start_index):
+            # match as many Positionals as possible
+            match_partial = self._match_arguments_partial
+            selected_pattern = arg_strings_pattern[start_index:]
+            arg_counts = match_partial(positionals, selected_pattern)
+
+            # slice off the appropriate arg strings for each Positional
+            # and add the Positional and its args to the list
+            for action, arg_count in zip(positionals, arg_counts):
+                args = arg_strings[start_index: start_index + arg_count]
+                start_index += arg_count
+                take_action(action, args)
+
+            # slice off the Positionals that we just parsed and return the
+            # index at which the Positionals' string args stopped
+            positionals[:] = positionals[len(arg_counts):]
+            return start_index
+
+        # consume Positionals and Optionals alternately, until we have
+        # passed the last option string
+        extras = []
+        start_index = 0
+        if option_string_indices:
+            max_option_string_index = max(option_string_indices)
+        else:
+            max_option_string_index = -1
+        while start_index <= max_option_string_index:
+
+            # consume any Positionals preceding the next option
+            next_option_string_index = min([
+                index
+                for index in option_string_indices
+                if index >= start_index])
+            if start_index != next_option_string_index:
+                positionals_end_index = consume_positionals(start_index)
+
+                # only try to parse the next optional if we didn't consume
+                # the option string during the positionals parsing
+                if positionals_end_index > start_index:
+                    start_index = positionals_end_index
+                    continue
+                else:
+                    start_index = positionals_end_index
+
+            # if we consumed all the positionals we could and we're not
+            # at the index of an option string, there were extra arguments
+            if start_index not in option_string_indices:
+                strings = arg_strings[start_index:next_option_string_index]
+                extras.extend(strings)
+                start_index = next_option_string_index
+
+            # consume the next optional and any arguments for it
+            start_index = consume_optional(start_index)
+
+        # consume any positionals following the last Optional
+        stop_index = consume_positionals(start_index)
+
+        # if we didn't consume all the argument strings, there were extras
+        extras.extend(arg_strings[stop_index:])
+
+        # if we didn't use all the Positional objects, there were too few
+        # arg strings supplied.
+        if positionals:
+            self.error(_('too few arguments'))
+
+        # make sure all required actions were present
+        for action in self._actions:
+            if action.required:
+                if action not in seen_actions:
+                    name = _get_action_name(action)
+                    self.error(_('argument %s is required') % name)
+
+        # make sure all required groups had one option present
+        for group in self._mutually_exclusive_groups:
+            if group.required:
+                for action in group._group_actions:
+                    if action in seen_non_default_actions:
+                        break
+
+                # if no actions were used, report the error
+                else:
+                    names = [_get_action_name(action)
+                             for action in group._group_actions
+                             if action.help is not SUPPRESS]
+                    msg = _('one of the arguments %s is required')
+                    self.error(msg % ' '.join(names))
+
+        # return the updated namespace and the extra arguments
+        return namespace, extras
+
+    def _read_args_from_files(self, arg_strings):
+        # expand arguments referencing files
+        new_arg_strings = []
+        for arg_string in arg_strings:
+
+            # for regular arguments, just add them back into the list
+            if arg_string[0] not in self.fromfile_prefix_chars:
+                new_arg_strings.append(arg_string)
+
+            # replace arguments referencing files with the file content
+            else:
+                try:
+                    args_file = open(arg_string[1:])
+                    try:
+                        arg_strings = []
+                        for arg_line in args_file.read().splitlines():
+                            for arg in self.convert_arg_line_to_args(arg_line):
+                                arg_strings.append(arg)
+                        arg_strings = self._read_args_from_files(arg_strings)
+                        new_arg_strings.extend(arg_strings)
+                    finally:
+                        args_file.close()
+                except IOError:
+                    err = _sys.exc_info()[1]
+                    self.error(str(err))
+
+        # return the modified argument list
+        return new_arg_strings
+
+    def convert_arg_line_to_args(self, arg_line):
+        return [arg_line]
+
+    def _match_argument(self, action, arg_strings_pattern):
+        # match the pattern for this action to the arg strings
+        nargs_pattern = self._get_nargs_pattern(action)
+        match = _re.match(nargs_pattern, arg_strings_pattern)
+
+        # raise an exception if we weren't able to find a match
+        if match is None:
+            nargs_errors = {
+                None: _('expected one argument'),
+                OPTIONAL: _('expected at most one argument'),
+                ONE_OR_MORE: _('expected at least one argument'),
+            }
+            default = _('expected %s argument(s)') % action.nargs
+            msg = nargs_errors.get(action.nargs, default)
+            raise ArgumentError(action, msg)
+
+        # return the number of arguments matched
+        return len(match.group(1))
+
+    def _match_arguments_partial(self, actions, arg_strings_pattern):
+        # progressively shorten the actions list by slicing off the
+        # final actions until we find a match
+        result = []
+        for i in range(len(actions), 0, -1):
+            actions_slice = actions[:i]
+            pattern = ''.join([self._get_nargs_pattern(action)
+                               for action in actions_slice])
+            match = _re.match(pattern, arg_strings_pattern)
+            if match is not None:
+                result.extend([len(string) for string in match.groups()])
+                break
+
+        # return the list of arg string counts
+        return result
+
+    def _parse_optional(self, arg_string):
+        # if it's an empty string, it was meant to be a positional
+        if not arg_string:
+            return None
+
+        # if it doesn't start with a prefix, it was meant to be positional
+        if not arg_string[0] in self.prefix_chars:
+            return None
+
+        # if the option string is present in the parser, return the action
+        if arg_string in self._option_string_actions:
+            action = self._option_string_actions[arg_string]
+            return action, arg_string, None
+
+        # if it's just a single character, it was meant to be positional
+        if len(arg_string) == 1:
+            return None
+
+        # if the option string before the "=" is present, return the action
+        if '=' in arg_string:
+            option_string, explicit_arg = arg_string.split('=', 1)
+            if option_string in self._option_string_actions:
+                action = self._option_string_actions[option_string]
+                return action, option_string, explicit_arg
+
+        # search through all possible prefixes of the option string
+        # and all actions in the parser for possible interpretations
+        option_tuples = self._get_option_tuples(arg_string)
+
+        # if multiple actions match, the option string was ambiguous
+        if len(option_tuples) > 1:
+            options = ', '.join([option_string
+                for action, option_string, explicit_arg in option_tuples])
+            tup = arg_string, options
+            self.error(_('ambiguous option: %s could match %s') % tup)
+
+        # if exactly one action matched, this segmentation is good,
+        # so return the parsed action
+        elif len(option_tuples) == 1:
+            option_tuple, = option_tuples
+            return option_tuple
+
+        # if it was not found as an option, but it looks like a negative
+        # number, it was meant to be positional
+        # unless there are negative-number-like options
+        if self._negative_number_matcher.match(arg_string):
+            if not self._has_negative_number_optionals:
+                return None
+
+        # if it contains a space, it was meant to be a positional
+        if ' ' in arg_string:
+            return None
+
+        # it was meant to be an optional but there is no such option
+        # in this parser (though it might be a valid option in a subparser)
+        return None, arg_string, None
+
+    def _get_option_tuples(self, option_string):
+        result = []
+
+        # option strings starting with two prefix characters are only
+        # split at the '='
+        chars = self.prefix_chars
+        if option_string[0] in chars and option_string[1] in chars:
+            if '=' in option_string:
+                option_prefix, explicit_arg = option_string.split('=', 1)
+            else:
+                option_prefix = option_string
+                explicit_arg = None
+            for option_string in self._option_string_actions:
+                if option_string.startswith(option_prefix):
+                    action = self._option_string_actions[option_string]
+                    tup = action, option_string, explicit_arg
+                    result.append(tup)
+
+        # single character options can be concatenated with their arguments
+        # but multiple character options always have to have their argument
+        # separate
+        elif option_string[0] in chars and option_string[1] not in chars:
+            option_prefix = option_string
+            explicit_arg = None
+            short_option_prefix = option_string[:2]
+            short_explicit_arg = option_string[2:]
+
+            for option_string in self._option_string_actions:
+                if option_string == short_option_prefix:
+                    action = self._option_string_actions[option_string]
+                    tup = action, option_string, short_explicit_arg
+                    result.append(tup)
+                elif option_string.startswith(option_prefix):
+                    action = self._option_string_actions[option_string]
+                    tup = action, option_string, explicit_arg
+                    result.append(tup)
+
+        # shouldn't ever get here
+        else:
+            self.error(_('unexpected option string: %s') % option_string)
+
+        # return the collected option tuples
+        return result
+
+    def _get_nargs_pattern(self, action):
+        # in all examples below, we have to allow for '--' args
+        # which are represented as '-' in the pattern
+        nargs = action.nargs
+
+        # the default (None) is assumed to be a single argument
+        if nargs is None:
+            nargs_pattern = '(-*A-*)'
+
+        # allow zero or one arguments
+        elif nargs == OPTIONAL:
+            nargs_pattern = '(-*A?-*)'
+
+        # allow zero or more arguments
+        elif nargs == ZERO_OR_MORE:
+            nargs_pattern = '(-*[A-]*)'
+
+        # allow one or more arguments
+        elif nargs == ONE_OR_MORE:
+            nargs_pattern = '(-*A[A-]*)'
+
+        # allow any number of options or arguments
+        elif nargs == REMAINDER:
+            nargs_pattern = '([-AO]*)'
+
+        # allow one argument followed by any number of options or arguments
+        elif nargs == PARSER:
+            nargs_pattern = '(-*A[-AO]*)'
+
+        # all others should be integers
+        else:
+            nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
+
+        # if this is an optional action, -- is not allowed
+        if action.option_strings:
+            nargs_pattern = nargs_pattern.replace('-*', '')
+            nargs_pattern = nargs_pattern.replace('-', '')
+
+        # return the pattern
+        return nargs_pattern
+
+    # ========================
+    # Value conversion methods
+    # ========================
+    def _get_values(self, action, arg_strings):
+        # for everything but PARSER args, strip out '--'
+        if action.nargs not in [PARSER, REMAINDER]:
+            arg_strings = [s for s in arg_strings if s != '--']
+
+        # optional argument produces a default when not present
+        if not arg_strings and action.nargs == OPTIONAL:
+            if action.option_strings:
+                value = action.const
+            else:
+                value = action.default
+            if isinstance(value, _basestring):
+                value = self._get_value(action, value)
+                self._check_value(action, value)
+
+        # when nargs='*' on a positional, if there were no command-line
+        # args, use the default if it is anything other than None
+        elif (not arg_strings and action.nargs == ZERO_OR_MORE and
+              not action.option_strings):
+            if action.default is not None:
+                value = action.default
+            else:
+                value = arg_strings
+            self._check_value(action, value)
+
+        # single argument or optional argument produces a single value
+        elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
+            arg_string, = arg_strings
+            value = self._get_value(action, arg_string)
+            self._check_value(action, value)
+
+        # REMAINDER arguments convert all values, checking none
+        elif action.nargs == REMAINDER:
+            value = [self._get_value(action, v) for v in arg_strings]
+
+        # PARSER arguments convert all values, but check only the first
+        elif action.nargs == PARSER:
+            value = [self._get_value(action, v) for v in arg_strings]
+            self._check_value(action, value[0])
+
+        # all other types of nargs produce a list
+        else:
+            value = [self._get_value(action, v) for v in arg_strings]
+            for v in value:
+                self._check_value(action, v)
+
+        # return the converted value
+        return value
+
+    def _get_value(self, action, arg_string):
+        type_func = self._registry_get('type', action.type, action.type)
+        if not _callable(type_func):
+            msg = _('%r is not callable')
+            raise ArgumentError(action, msg % type_func)
+
+        # convert the value to the appropriate type
+        try:
+            result = type_func(arg_string)
+
+        # ArgumentTypeErrors indicate errors
+        except ArgumentTypeError:
+            name = getattr(action.type, '__name__', repr(action.type))
+            msg = str(_sys.exc_info()[1])
+            raise ArgumentError(action, msg)
+
+        # TypeErrors or ValueErrors also indicate errors
+        except (TypeError, ValueError):
+            name = getattr(action.type, '__name__', repr(action.type))
+            msg = _('invalid %s value: %r')
+            raise ArgumentError(action, msg % (name, arg_string))
+
+        # return the converted value
+        return result
+
+    def _check_value(self, action, value):
+        # converted value must be one of the choices (if specified)
+        if action.choices is not None and value not in action.choices:
+            tup = value, ', '.join(map(repr, action.choices))
+            msg = _('invalid choice: %r (choose from %s)') % tup
+            raise ArgumentError(action, msg)
+
+    # =======================
+    # Help-formatting methods
+    # =======================
+    def format_usage(self):
+        formatter = self._get_formatter()
+        formatter.add_usage(self.usage, self._actions,
+                            self._mutually_exclusive_groups)
+        return formatter.format_help()
+
+    def format_help(self):
+        formatter = self._get_formatter()
+
+        # usage
+        formatter.add_usage(self.usage, self._actions,
+                            self._mutually_exclusive_groups)
+
+        # description
+        formatter.add_text(self.description)
+
+        # positionals, optionals and user-defined groups
+        for action_group in self._action_groups:
+            formatter.start_section(action_group.title)
+            formatter.add_text(action_group.description)
+            formatter.add_arguments(action_group._group_actions)
+            formatter.end_section()
+
+        # epilog
+        formatter.add_text(self.epilog)
+
+        # determine help from format above
+        return formatter.format_help()
+
+    def format_version(self):
+        import warnings
+        warnings.warn(
+            'The format_version method is deprecated -- the "version" '
+            'argument to ArgumentParser is no longer supported.',
+            DeprecationWarning)
+        formatter = self._get_formatter()
+        formatter.add_text(self.version)
+        return formatter.format_help()
+
+    def _get_formatter(self):
+        return self.formatter_class(prog=self.prog)
+
+    # =====================
+    # Help-printing methods
+    # =====================
+    def print_usage(self, file=None):
+        if file is None:
+            file = _sys.stdout
+        self._print_message(self.format_usage(), file)
+
+    def print_help(self, file=None):
+        if file is None:
+            file = _sys.stdout
+        self._print_message(self.format_help(), file)
+
+    def print_version(self, file=None):
+        import warnings
+        warnings.warn(
+            'The print_version method is deprecated -- the "version" '
+            'argument to ArgumentParser is no longer supported.',
+            DeprecationWarning)
+        self._print_message(self.format_version(), file)
+
+    def _print_message(self, message, file=None):
+        if message:
+            if file is None:
+                file = _sys.stderr
+            file.write(message)
+
+    # ===============
+    # Exiting methods
+    # ===============
+    def exit(self, status=0, message=None):
+        if message:
+            self._print_message(message, _sys.stderr)
+        _sys.exit(status)
+
+    def error(self, message):
+        """error(message: string)
+
+        Prints a usage message incorporating the message to stderr and
+        exits.
+
+        If you override this in a subclass, it should not return -- it
+        should either exit or raise an exception.
+        """
+        self.print_usage(_sys.stderr)
+        self.exit(2, _('%s: error: %s\n') % (self.prog, message))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/compareStats.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,66 @@
+#!/usr/bin/python
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/compareStats.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# Tool that compares MaSh statistics and displays a comparison.
+
+'''
+Created on Jul 13, 2012
+
+@author: Daniel Kuehlwein
+'''
+
+import sys
+from argparse import ArgumentParser,RawDescriptionHelpFormatter
+from matplotlib.pyplot import plot,figure,show,legend,xlabel,ylabel,axis,hist
+from stats import Statistics
+
+parser = ArgumentParser(description='Compare Statistics.  \n\n\
+Loads different statistics and displays a comparison. Requires the matplotlib module.\n\n\
+-------- Example Usage ---------------\n\
+./compareStats.py --statFiles ../tmp/natISANB.stats ../tmp/natATPNB.stats -b 30\n\n\
+Author: Daniel Kuehlwein, July 2012',formatter_class=RawDescriptionHelpFormatter)
+parser.add_argument('--statFiles', default=None, nargs='+', 
+                    help='The names of the saved statistic files.')
+parser.add_argument('-b','--bins',default=50,help="Number of bins for the AUC histogram. Default=50.",type=int)
+
+def main(argv = sys.argv[1:]):
+    args = parser.parse_args(argv)  
+    if args.statFiles == None:
+        print 'Filenames missing.'
+        sys.exit(-1)
+
+    aucData = []
+    aucLabels = []
+    for statFile in args.statFiles:
+        s = Statistics()
+        s.load(statFile)
+        avgRecall = [float(x)/s.problems for x in s.recallData]
+        figure('Recall')
+        plot(range(s.cutOff),avgRecall,label=statFile)
+        legend(loc='lower right')
+        ylabel('Average Recall')
+        xlabel('Highest ranked premises')
+        axis([0,s.cutOff,0.0,1.0])
+        figure('100%Recall')
+        plot(range(s.cutOff),s.recall100Data,label=statFile)
+        legend(loc='lower right')
+        ylabel('100%Recall')
+        xlabel('Highest ranked premises')
+        axis([0,s.cutOff,0,s.problems])
+        aucData.append(s.aucData)
+        aucLabels.append(statFile)
+    figure('AUC Histogram')
+    hist(aucData,bins=args.bins,label=aucLabels,histtype='bar')
+    legend(loc='upper left')
+    ylabel('Problems')
+    xlabel('AUC')
+        
+    show()
+
+if __name__ == '__main__':
+    #args = ['--statFiles','../tmp/natISANB.stats','../tmp/natATPNB.stats','-b','30']
+    #sys.exit(main(args))
+    sys.exit(main())
+    
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/dictionaries.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,188 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/dictionaries.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# Persistent dictionaries: accessibility, dependencies, and features.
+
+'''
+Created on Jul 12, 2012
+
+@author: daniel
+'''
+
+from os.path import join
+from Queue import Queue
+from readData import create_accessible_dict,create_dependencies_dict,create_feature_dict
+from cPickle import load,dump
+
+class Dictionaries(object):
+    '''
+    This class contains all info about name-> id mapping, etc.
+    '''
+    def __init__(self):
+        '''
+        Constructor
+        '''
+        self.nameIdDict = {}
+        self.idNameDict = {}
+        self.featureIdDict={}
+        self.maxNameId = 0
+        self.maxFeatureId = 0
+        self.featureDict = {}
+        self.dependenciesDict = {}
+        self.accessibleDict = {}
+        self.expandedAccessibles = {}
+        self.changed = True
+    
+    """
+    Init functions. Side Effect: nameIdDict, idNameDict, featureIdDict get filled!
+    """    
+    def init_featureDict(self,featureFile):
+        self.featureDict,self.maxNameId,self.maxFeatureId = create_feature_dict(self.nameIdDict,self.idNameDict,self.maxNameId,self.featureIdDict,\
+                                                                                self.maxFeatureId,featureFile)        
+    def init_dependenciesDict(self,depFile):
+        self.dependenciesDict = create_dependencies_dict(self.nameIdDict,depFile)
+    def init_accessibleDict(self,accFile):
+        self.accessibleDict,self.maxNameId = create_accessible_dict(self.nameIdDict,self.idNameDict,self.maxNameId,accFile)
+    
+    def init_all(self,inputFolder,featureFileName = 'mash_features',depFileName='mash_dependencies',accFileName = 'mash_accessibility'):
+        featureFile = join(inputFolder,featureFileName)
+        depFile = join(inputFolder,depFileName)
+        accFile = join(inputFolder,accFileName)
+        self.init_featureDict(featureFile)
+        self.init_accessibleDict(accFile)
+        self.init_dependenciesDict(depFile)
+        self.expandedAccessibles = {}
+        self.changed = True
+        
+    def get_name_id(self,name):
+        """
+        Return the Id for a name.
+        If it doesn't exist yet, a new entry is created.
+        """
+        if self.nameIdDict.has_key(name):
+            nameId = self.nameIdDict[name]
+        else:
+            self.nameIdDict[name] = self.maxNameId
+            self.idNameDict[self.maxNameId] = name
+            nameId = self.maxNameId
+            self.maxNameId += 1 
+            self.changed = True
+        return nameId
+
+    def add_feature(self,featureName):
+        if not self.featureIdDict.has_key(featureName):
+            self.featureIdDict[featureName] = self.maxFeatureId
+            self.maxFeatureId += 1
+            self.changed = True 
+            
+    def expand_accessibles(self,acc):
+        accessibles = set(acc)
+        unexpandedQueue = Queue()
+        for a in acc:
+            if self.expandedAccessibles.has_key(a):
+                accessibles = accessibles.union(self.expandedAccessibles[a])
+            else:
+                unexpandedQueue.put(a)
+        while not unexpandedQueue.empty():
+            nextUnExp = unexpandedQueue.get()
+            nextUnExpAcc = self.accessibleDict[nextUnExp]            
+            for a in nextUnExpAcc:
+                if not a in accessibles:
+                    accessibles = accessibles.union([a])
+                    if self.expandedAccessibles.has_key(a):
+                        accessibles = accessibles.union(self.expandedAccessibles[a])
+                    else:
+                        unexpandedQueue.put(a)                    
+        return list(accessibles)
+            
+    def parse_fact(self,line):
+        """
+        Parses a single line, extracting accessibles, features, and dependencies.
+        """
+        assert line.startswith('! ')
+        line = line[2:]
+        
+        # line = name:accessibles;features;dependencies
+        line = line.split(':')
+        name = line[0].strip()
+        nameId = self.get_name_id(name)
+    
+        line = line[1].split(';')       
+        # Accessible Ids
+        unExpAcc = [self.nameIdDict[a.strip()] for a in line[0].split()]
+        self.accessibleDict[nameId] = unExpAcc
+        # Feature Ids
+        featureNames = [f.strip() for f in line[1].split()]
+        for fn in featureNames:
+            self.add_feature(fn)
+        self.featureDict[nameId] = [self.featureIdDict[fn] for fn in featureNames]
+        self.dependenciesDict[nameId] = [self.nameIdDict[d.strip()] for d in line[2].split()]
+        self.changed = True
+        return nameId
+    
+    def parse_overwrite(self,line):
+        """
+        Parses a single line, extracts the problemId and the Ids of the dependencies.
+        """
+        assert line.startswith('p ')
+        line = line[2:]
+        
+        # line = name:dependencies
+        line = line.split(':')
+        name = line[0].strip()
+        nameId = self.get_name_id(name)
+    
+        dependencies = [self.nameIdDict[d.strip()] for d in line[1].split()]
+        self.changed = True
+        return nameId,dependencies
+    
+    def parse_problem(self,line):
+        """
+        Parses a problem and returns the features and the accessibles. 
+        """
+        assert line.startswith('? ')
+        line = line[2:]
+        name = None
+        
+        # Check whether there is a problem name:
+        tmp = line.split(':')
+        if len(tmp) == 2:
+            name = tmp[0].strip()
+            line = tmp[1]
+        
+        # line = accessibles;features
+        line = line.split(';')
+        # Accessible Ids, expand and store the accessibles.
+        unExpAcc = [self.nameIdDict[a.strip()] for a in line[0].split()]
+        if len(self.expandedAccessibles.keys())>=100:
+            self.expandedAccessibles = {}
+            self.changed = True
+        for accId in unExpAcc:
+            if not self.expandedAccessibles.has_key(accId):
+                accIdAcc = self.accessibleDict[accId]
+                self.expandedAccessibles[accId] = self.expand_accessibles(accIdAcc)
+                self.changed = True
+        accessibles = self.expand_accessibles(unExpAcc)
+        # Feature Ids
+        featureNames = [f.strip() for f in line[1].split()]
+        for fn in featureNames:
+            self.add_feature(fn)
+        features = [self.featureIdDict[fn] for fn in featureNames]
+        return name,features,accessibles    
+    
+    def save(self,fileName):
+        if self.changed:
+            dictsStream = open(fileName, 'wb')
+            dump((self.accessibleDict,self.dependenciesDict,self.expandedAccessibles,self.featureDict,\
+                self.featureIdDict,self.idNameDict,self.maxFeatureId,self.maxNameId,self.nameIdDict),dictsStream)
+            self.changed = False
+            dictsStream.close()
+    def load(self,fileName):
+        dictsStream = open(fileName, 'rb')        
+        self.accessibleDict,self.dependenciesDict,self.expandedAccessibles,self.featureDict,\
+              self.featureIdDict,self.idNameDict,self.maxFeatureId,self.maxNameId,self.nameIdDict = load(dictsStream)
+        self.changed = False
+        dictsStream.close()
+    
+            
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/mash.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,228 @@
+#!/usr/bin/python
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/mash.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# Entry point for MaSh (Machine Learning for Sledgehammer).
+
+'''
+MaSh - Machine Learning for Sledgehammer
+
+MaSh allows to use different machine learning algorithms to predict relevant fact for Sledgehammer.
+
+Created on July 12, 2012
+
+@author: Daniel Kuehlwein
+'''
+
+import logging,datetime,string,os,sys
+from argparse import ArgumentParser,RawDescriptionHelpFormatter
+from time import time
+from stats import Statistics
+from dictionaries import Dictionaries
+from naiveBayes import NBClassifier
+from snow import SNoW
+from predefined import Predefined
+
+# Set up command-line parser
+parser = ArgumentParser(description='MaSh - Machine Learning for Sledgehammer.  \n\n\
+MaSh allows to use different machine learning algorithms to predict relevant facts for Sledgehammer.\n\n\
+--------------- Example Usage ---------------\n\
+First initialize:\n./mash.py -l test.log -o ../tmp/ --init --inputDir ../data/Nat/ \n\
+Then create predictions:\n./mash.py -i ../data/Nat/mash_commands -p ../tmp/test.pred -l test.log -o ../tmp/ --statistics\n\
+\n\n\
+Author: Daniel Kuehlwein, July 2012',formatter_class=RawDescriptionHelpFormatter)
+parser.add_argument('-i','--inputFile',help='File containing all problems to be solved.')
+parser.add_argument('-o','--outputDir', default='../tmp/',help='Directory where all created files are stored. Default=../tmp/.')
+parser.add_argument('-p','--predictions',default='../tmp/%s.predictions' % datetime.datetime.now(), 
+                    help='File where the predictions stored. Default=../tmp/dateTime.predictions.')
+parser.add_argument('--numberOfPredictions',default=200,help="Number of premises to write in the output. Default=200.",type=int)
+
+parser.add_argument('--init',default=False,action='store_true',help="Initialize Mash. Requires --inputDir to be defined. Default=False.")
+parser.add_argument('--inputDir',default='../data/Nat/',\
+                    help='Directory containing all the input data. MaSh expects the following files: mash_features,mash_dependencies,mash_accessibility')
+parser.add_argument('--depFile', default='mash_dependencies',
+                    help='Name of the file with the premise dependencies. The file must be in inputDir. Default = mash_dependencies')
+parser.add_argument('--saveModel',default=False,action='store_true',help="Stores the learned Model at the end of a prediction run. Default=False.")
+
+parser.add_argument('--nb',default=False,action='store_true',help="Use Naive Bayes for learning. This is the default learning method.")
+parser.add_argument('--snow',default=False,action='store_true',help="Use SNoW's naive bayes instead of Naive Bayes for learning.")
+parser.add_argument('--predef',default=False,action='store_true',\
+                    help="Use predefined predictions. Used only for comparison with the actual learning. Expects mash_meng_paulson_suggestions in inputDir.")
+parser.add_argument('--statistics',default=False,action='store_true',help="Create and show statistics for the top CUTOFF predictions.\
+                    WARNING: This will make the program a lot slower! Default=False.")
+parser.add_argument('--saveStats',default=None,help="If defined, stores the statistics in the filename provided.")
+parser.add_argument('--cutOff',default=500,help="Option for statistics. Only consider the first cutOff predictions. Default=500.",type=int)
+parser.add_argument('-l','--log', default='../tmp/%s.log' % datetime.datetime.now(), help='Log file name. Default=../tmp/dateTime.log')
+parser.add_argument('-q','--quiet',default=False,action='store_true',help="If enabled, only print warnings. Default=False.")
+
+def main(argv = sys.argv[1:]):        
+    # Initializing command-line arguments
+    args = parser.parse_args(argv)
+
+    # Set up logging 
+    logging.basicConfig(level=logging.DEBUG,
+                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
+                        datefmt='%d-%m %H:%M:%S',
+                        filename=args.log,
+                        filemode='w')
+    console = logging.StreamHandler(sys.stdout)
+    console.setLevel(logging.INFO)
+    formatter = logging.Formatter('# %(message)s')
+    console.setFormatter(formatter)
+    logging.getLogger('').addHandler(console)
+    logger = logging.getLogger('main.py')
+    if args.quiet:
+        logger.setLevel(logging.WARNING)
+        console.setLevel(logging.WARNING)
+    if not os.path.exists(args.outputDir):
+        os.makedirs(args.outputDir)
+
+    logger.info('Using the following settings: %s',args)
+    # Pick algorithm
+    if args.nb:
+        logger.info('Using Naive Bayes for learning.')        
+        model = NBClassifier() 
+        modelFile = os.path.join(args.outputDir,'NB.pickle')
+    elif args.snow:
+        logger.info('Using naive bayes (SNoW) for learning.')
+        model = SNoW()
+        modelFile = os.path.join(args.outputDir,'SNoW.pickle')
+    elif args.predef:
+        logger.info('Using predefined predictions.')
+        predictionFile = os.path.join(args.inputDir,'mash_meng_paulson_suggestions') 
+        model = Predefined(predictionFile)
+        modelFile = os.path.join(args.outputDir,'isabelle.pickle')        
+    else:
+        logger.info('No algorithm specified. Using Naive Bayes.')        
+        model = NBClassifier() 
+        modelFile = os.path.join(args.outputDir,'NB.pickle')    
+    dictsFile = os.path.join(args.outputDir,'dicts.pickle')    
+    
+    # Initializing model
+    if args.init:        
+        logger.info('Initializing Model.')
+        startTime = time()
+        
+        # Load all data        
+        dicts = Dictionaries()
+        dicts.init_all(args.inputDir,depFileName=args.depFile)
+        
+        # Create Model
+        trainData = dicts.featureDict.keys()
+        if args.predef:
+            dicts = model.initializeModel(trainData,dicts)
+        else:
+            model.initializeModel(trainData,dicts)
+        
+        model.save(modelFile)
+        dicts.save(dictsFile)
+
+        logger.info('All Done. %s seconds needed.',round(time()-startTime,2))
+        return 0
+    # Create predictions and/or update model       
+    else:
+        lineCounter = 0
+        dicts = Dictionaries()
+        # Load Files
+        if os.path.isfile(dictsFile):
+            dicts.load(dictsFile)
+        if os.path.isfile(modelFile):
+            model.load(modelFile)
+        
+        # IO Streams
+        OS = open(args.predictions,'a')
+        IS = open(args.inputFile,'r')
+        
+        # Statistics
+        if args.statistics:
+            stats = Statistics(args.cutOff)
+        
+        predictions = None
+        #Reading Input File
+        for line in IS:
+ #           try:
+            if True:
+                if line.startswith('!'):
+                    problemId = dicts.parse_fact(line)
+                    # Statistics
+                    if args.statistics:
+                        acc = dicts.accessibleDict[problemId]
+                        if args.predef:
+                            predictions = model.predict[problemId]
+                        else:
+                            predictions,_predictionsValues = model.predict(dicts.featureDict[problemId],dicts.expand_accessibles(acc))        
+                        stats.update(predictions,dicts.dependenciesDict[problemId])
+                        if not stats.badPreds == []:
+                            bp = string.join([str(dicts.idNameDict[x]) for x in stats.badPreds], ',')
+                            logger.debug('Bad predictions: %s',bp)    
+                    # Update Dependencies, p proves p
+                    dicts.dependenciesDict[problemId] = [problemId]+dicts.dependenciesDict[problemId]
+                    model.update(problemId,dicts.featureDict[problemId],dicts.dependenciesDict[problemId])
+                elif line.startswith('p'):
+                    # Overwrite old proof.
+                    problemId,newDependencies = dicts.parse_overwrite(line)
+                    newDependencies = [problemId]+newDependencies
+                    model.overwrite(problemId,newDependencies,dicts)
+                    dicts.dependenciesDict[problemId] = newDependencies
+                elif line.startswith('?'):
+                    startTime = time()
+                    if args.predef:
+                        continue
+                    name,features,accessibles = dicts.parse_problem(line)
+                    # Create predictions
+                    logger.info('Starting computation for problem on line %s',lineCounter)                
+                    predictions,predictionValues = model.predict(features,accessibles)        
+                    assert len(predictions) == len(predictionValues)
+                    logger.info('Done. %s seconds needed.',round(time()-startTime,2))
+                    
+                    # Output        
+                    predictionNames = [str(dicts.idNameDict[p]) for p in predictions[:args.numberOfPredictions]]
+                    predictionValues = [str(x) for x in predictionValues[:args.numberOfPredictions]]                    
+                    predictionsStringList = ['%s=%s' % (predictionNames[i],predictionValues[i]) for i in range(len(predictionNames))]                
+                    predictionsString = string.join(predictionsStringList,' ')
+                    outString = '%s: %s' % (name,predictionsString)
+                    OS.write('%s\n' % outString)
+                    lineCounter += 1
+                else:
+                    logger.warning('Unspecified input format: \n%s',line)
+                    sys.exit(-1)
+            """
+            except:
+                logger.warning('An error occurred on line %s .',line)
+                lineCounter += 1
+                continue
+            """    
+        OS.close()
+        IS.close()
+        
+        # Statistics
+        if args.statistics:
+            stats.printAvg()
+        
+        # Save
+        if args.saveModel:
+            model.save(modelFile)
+        dicts.save(dictsFile)
+        if not args.saveStats == None:
+            statsFile = os.path.join(args.outputDir,args.saveStats)
+            stats.save(statsFile)
+    return 0
+
+if __name__ == '__main__':
+    # Example:
+    # Nat
+    #args = ['-l','testIsabelle.log','-o','../tmp/','--statistics','--init','--inputDir','../data/Nat/','--predef']
+    #args = ['-i', '../data/Nat/mash_commands','-p','../tmp/testIsabelle.pred','-l','testIsabelle.log','--predef','-o','../tmp/','--statistics','--saveStats','../tmp/natATPMP.stats']
+    #args = ['-l','testNB.log','-o','../tmp/','--statistics','--init','--inputDir','../data/Nat/']    
+    #args = ['-i', '../data/Nat/mash_commands','-p','../tmp/testNB.pred','-l','../tmp/testNB.log','--nb','-o','../tmp/','--statistics','--saveStats','../tmp/natATPNB.stats','--cutOff','500']
+    # BUG
+    #args = ['-l','testIsabelle.log','-o','../tmp/','--statistics','--init','--inputDir','../data/List/','--isabelle']
+    #args = ['-i', '../data/List/mash_commands','-p','../tmp/testIsabelle.pred','-l','testIsabelle.log','--isabelle','-o','../tmp/','--statistics']
+    #args = ['-l','testNB.log','-o','../tmp/','--statistics','--init','--inputDir','../bug/init','--init']
+    #args = ['-i', '../bug/adds/mash_commands','-p','../tmp/testNB.pred','-l','testNB.log','--nb','-o','../tmp/']
+    #startTime = time()
+    #sys.exit(main(args))
+    #print 'New ' + str(round(time()-startTime,2))    
+    sys.exit(main())
+    
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/naiveBayes.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,136 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/naiveBayes.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# An updatable naive Bayes classifier.
+
+'''
+Created on Jul 11, 2012
+
+@author: Daniel Kuehlwein
+'''
+
+from cPickle import dump,load
+from numpy import array,exp
+from math import log
+
+class NBClassifier(object):
+    '''
+    An updatable naive Bayes classifier.
+    '''
+
+    def __init__(self):
+        '''
+        Constructor
+        '''
+        self.counts = {}
+    
+    def initializeModel(self,trainData,dicts):
+        """
+        Build basic model from training data.
+        """        
+        for d in trainData:
+            dPosCount = 0
+            dFeatureCounts = {}            
+            self.counts[d] = [dPosCount,dFeatureCounts]
+        
+        for key in dicts.dependenciesDict.keys():
+            keyDeps = dicts.dependenciesDict[key]
+            for dep in keyDeps:
+                self.counts[dep][0] += 1
+                depFeatures = dicts.featureDict[key]
+                for f in depFeatures:
+                    if self.counts[dep][1].has_key(f):
+                        self.counts[dep][1][f] += 1
+                    else:
+                        self.counts[dep][1][f] = 1
+
+    
+    def update(self,dataPoint,features,dependencies):
+        """
+        Updates the Model.
+        """
+        if not self.counts.has_key(dataPoint):
+            dPosCount = 0
+            dFeatureCounts = {}            
+            self.counts[dataPoint] = [dPosCount,dFeatureCounts]
+        for dep in dependencies:
+            self.counts[dep][0] += 1
+            for f in features:
+                if self.counts[dep][1].has_key(f):
+                    self.counts[dep][1][f] += 1
+                else:
+                    self.counts[dep][1][f] = 1
+ 
+    def delete(self,dataPoint,features,dependencies):
+        """
+        Deletes a single datapoint from the model.
+        """
+        for dep in dependencies:
+            self.counts[dep][0] -= 1
+            for f in features:
+                self.counts[dep][1][f] -= 1
+
+            
+    def overwrite(self,problemId,newDependencies,dicts):
+        """
+        Deletes the old dependencies of problemId and replaces them with the new ones. Updates the model accordingly.
+        """
+        assert self.counts.has_key(problemId)
+        oldDeps = dicts.dependenciesDict[problemId]
+        features = dicts.featureDict[problemId]
+        self.delete(problemId,features,oldDeps)
+        self.update(problemId,features,newDependencies)
+    
+    def predict(self,features,accessibles):
+        """
+        For each accessible, predicts the probability of it being useful given the features.
+        Returns a ranking of the accessibles.
+        """
+        predictions = []
+        for a in accessibles:            
+            posA = self.counts[a][0]
+            fA = set(self.counts[a][1].keys())
+            fWeightsA = self.counts[a][1]
+            resultA = log(posA) 
+            for f in features:
+                if f in fA:
+                    if fWeightsA[f] == 0:
+                        resultA -= 15
+                    else:
+                        assert fWeightsA[f] <= posA
+                        resultA += log(float(fWeightsA[f])/posA)
+                else:
+                    resultA -= 15
+            predictions.append(resultA)
+        expPredictions = array([exp(x) for x in predictions])
+        predictions = array(predictions)
+        perm = (-predictions).argsort()        
+        return array(accessibles)[perm],expPredictions[perm] 
+    
+    def save(self,fileName):
+        OStream = open(fileName, 'wb')
+        dump(self.counts,OStream)        
+        OStream.close()
+        
+    def load(self,fileName):
+        OStream = open(fileName, 'rb')
+        self.counts = load(OStream)      
+        OStream.close()
+
+    
+if __name__ == '__main__':
+    featureDict = {0:[0,1,2],1:[3,2,1]}
+    dependenciesDict = {0:[0],1:[0,1]}
+    libDicts = (featureDict,dependenciesDict,{})
+    c = NBClassifier()
+    c.initializeModel([0,1],libDicts)
+    c.update(2,[14,1,3],[0,2])
+    print c.counts
+    print c.predict([0,14],[0,1,2])
+    c.storeModel('x')
+    d = NBClassifier()
+    d.loadModel('x')
+    print c.counts
+    print d.counts
+    print 'Done'
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/predefined.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,66 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/predefined.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# A classifier that uses the Meng-Paulson predictions.
+
+'''
+Created on Jul 11, 2012
+
+@author: Daniel Kuehlwein
+'''
+
+from cPickle import dump,load
+
+class Predefined(object):
+    '''
+    A classifier that uses the Meng-Paulson predictions.
+    Only used to easily compare statistics between the old Sledgehammer algorithm and the new machine learning ones.
+    '''
+
+    def __init__(self,mpPredictionFile):
+        '''
+        Constructor
+        '''
+        self.predictionFile = mpPredictionFile
+    
+    def initializeModel(self,_trainData,dicts):
+        """
+        Load predictions.
+        """        
+        self.predictions = {}
+        IS = open(self.predictionFile,'r')
+        for line in IS:
+            line = line.split(':')
+            name = line[0].strip()
+            predId = dicts.get_name_id(name)
+            line = line[1].split()
+            preds = [dicts.get_name_id(x.strip())for x in line]
+            self.predictions[predId] = preds
+        IS.close()
+        return dicts
+    
+    def update(self,dataPoint,features,dependencies):
+        """
+        Updates the Model.
+        """
+        # No Update needed since we assume that we got all predictions
+        pass
+            
+    
+    def predict(self,problemId):
+        """
+        Return the saved predictions.
+        """
+        return self.predictions[problemId] 
+    
+    def save(self,fileName):
+        OStream = open(fileName, 'wb')
+        dump((self.predictionFile,self.predictions),OStream)        
+        OStream.close()
+        
+    def load(self,fileName):
+        OStream = open(fileName, 'rb')
+        self.predictionFile,self.predictions = load(OStream)      
+        OStream.close()
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/readData.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,85 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/readData.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# All functions to read the Isabelle output.
+
+'''
+All functions to read the Isabelle output.
+
+Created on July 9, 2012
+
+@author: Daniel Kuehlwein
+'''
+
+import sys,logging
+
+def create_feature_dict(nameIdDict,idNameDict,maxNameId,featureIdDict,maxFeatureId,inputFile):
+    logger = logging.getLogger('create_feature_dict')    
+    featureDict = {}
+    IS = open(inputFile,'r') 
+    for line in IS:
+        line = line.split(':')
+        name = line[0]        
+        # Name Id
+        if nameIdDict.has_key(name):
+            logger.warning('%s appears twice in the feature file. Aborting.',name)
+            sys.exit(-1)
+        else:
+            nameIdDict[name] = maxNameId
+            idNameDict[maxNameId] = name
+            nameId = maxNameId
+            maxNameId += 1            
+        # Feature Ids
+        featureNames = [f.strip() for f in line[1].split()]             
+        for fn in featureNames:
+            if not featureIdDict.has_key(fn):
+                featureIdDict[fn] = maxFeatureId
+                maxFeatureId += 1
+        featureIds = [featureIdDict[fn] for fn in featureNames]
+        # Store results
+        featureDict[nameId] = featureIds
+    IS.close()
+    return featureDict,maxNameId,maxFeatureId
+     
+def create_dependencies_dict(nameIdDict,inputFile):
+    logger = logging.getLogger('create_dependencies_dict')
+    dependenciesDict = {}
+    IS = open(inputFile,'r')
+    for line in IS:
+        line = line.split(':')
+        name = line[0]        
+        # Name Id
+        if not nameIdDict.has_key(name):
+            logger.warning('%s is missing in nameIdDict. Aborting.',name)
+            sys.exit(-1)
+
+        nameId = nameIdDict[name]
+        dependenciesIds = [nameIdDict[f.strip()] for f in line[1].split()]             
+        # Store results, add p proves p
+        dependenciesDict[nameId] = [nameId] + dependenciesIds
+    IS.close()
+    return dependenciesDict
+
+def create_accessible_dict(nameIdDict,idNameDict,maxNameId,inputFile):
+    logger = logging.getLogger('create_accessible_dict')
+    accessibleDict = {}
+    IS = open(inputFile,'r')
+    for line in IS:
+        line = line.split(':')
+        name = line[0]     
+        # Name Id
+        if not nameIdDict.has_key(name):
+            logger.warning('%s is missing in nameIdDict. Adding it as theory.',name)
+            nameIdDict[name] = maxNameId
+            idNameDict[maxNameId] = name
+            nameId = maxNameId
+            maxNameId += 1  
+        else:
+            nameId = nameIdDict[name]
+        accessibleStrings = line[1].split()
+        accessibleDict[nameId] = [nameIdDict[a.strip()] for a in accessibleStrings]
+    IS.close()
+    return accessibleDict,maxNameId
+        
+    
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/snow.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,103 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/snow.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# Wrapper for SNoW.
+
+'''
+THIS FILE IS NOT UP TO DATE!
+NEEDS SOME FIXING BEFORE IT WILL WORK WITH THE MAIN ALGORITHM
+
+Created on Jul 12, 2012
+
+@author: daniel
+'''
+
+import logging,shlex,subprocess,string
+from cPickle import load,dump
+
+class SNoW(object):
+    '''
+    Calls the SNoW framework.
+    '''
+
+    def __init__(self):
+        '''
+        Constructor
+        '''
+        self.logger = logging.getLogger('SNoW')
+        self.SNoWTrainFile = '../tmp/snow.train'
+        self.SNoWTestFile = '../snow.test'  
+        self.SNoWNetFile = '../tmp/snow.net' 
+    
+    def initializeModel(self,trainData,dicts):
+        """
+        Build basic model from training data.
+        """     
+        # Prepare input files
+        self.logger.debug('Creating IO Files')
+        OS = open(self.SNoWTrainFile,'w')
+        for nameId in trainData:
+            features = [f+dicts.maxNameId for f in dicts.featureDict[nameId]]
+            features = map(str,features)
+            featureString = string.join(features,',')
+            dependencies = dicts.dependenciesDict[nameId]
+            dependencies = map(str,dependencies)
+            dependenciesString = string.join(dependencies,',')
+            snowString = string.join([featureString,dependenciesString],',')+':\n' 
+            OS.write(snowString)
+        OS.close()
+
+        # Build Model
+        self.logger.debug('Building Model START.')
+        snowTrainCommand = '../bin/snow -train -M+ -I %s -F %s -g- -B :0-%s' % (self.SNoWTrainFile,self.SNoWNetFile,dicts.maxNameId-1)    
+        args = shlex.split(snowTrainCommand)
+        p = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)    
+        p.wait()
+        self.logger.debug('Building Model END.')   
+
+    
+    def update(self,dataPoint,features,dependencies,dicts):
+        """
+        Updates the Model.
+        THIS IS NOT WORKING ATM< BUT WE DONT CARE
+        """        
+        self.logger.debug('Updating Model START') 
+        trainData = dicts.featureDict.keys()       
+        self.initializeModel(trainData,dicts)
+        self.logger.debug('Updating Model END')
+            
+    
+    def predict(self,features,accessibles,dicts):
+        logger = logging.getLogger('predict_SNoW')
+         
+        OS = open(self.SNoWTestFile,'w')
+        features = map(str,features)
+        featureString = string.join(features, ',')
+        snowString = featureString+':'
+        OS.write(snowString)
+        OS.close() 
+        
+        snowTestCommand = '../bin/snow -test -I %s -F %s -o allboth' % (self.SNoWTestFile,self.SNoWNetFile)
+        args = shlex.split(snowTestCommand)
+        p = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)    
+        (lines, _stderrdata) = p.communicate()
+        logger.debug('SNoW finished.')
+        lines = lines.split('\n')    
+        assert lines[9].startswith('Example ')
+        assert lines[-4] == ''
+        predictionsCon = []    
+        for line in lines[10:-4]:
+            premiseId = int(line.split()[0][:-1])
+            predictionsCon.append(premiseId)
+        return predictionsCon
+    
+    def save(self,fileName):
+        OStream = open(fileName, 'wb')
+        dump(self.counts,OStream)        
+        OStream.close()
+        
+    def load(self,fileName):
+        OStream = open(fileName, 'rb')
+        self.counts = load(OStream)      
+        OStream.close()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/stats.py	Mon Nov 26 17:13:44 2012 +0100
@@ -0,0 +1,130 @@
+#     Title:      HOL/Tools/Sledgehammer/MaSh/src/stats.py
+#     Author:     Daniel Kuehlwein, ICIS, Radboud University Nijmegen
+#     Copyright   2012
+#
+# Statistics collector.
+
+'''
+Created on Jul 9, 2012
+
+@author: Daniel Kuehlwein
+'''
+
+import logging,string
+from cPickle import load,dump
+
+class Statistics(object):
+    '''
+    Class for all the statistics
+    '''
+
+    def __init__(self,cutOff=500):
+        '''
+        Constructor
+        '''
+        self.logger = logging.getLogger('Statistics')
+        self.avgAUC = 0.0
+        self.avgRecall100 = 0.0
+        self.avgAvailable = 0.0
+        self.avgDepNr = 0.0
+        self.problems = 0.0
+        self.cutOff = cutOff
+        self.recallData = [0]*cutOff
+        self.recall100Data = [0]*cutOff
+        self.aucData = []
+        
+    def update(self,predictions,dependencies):
+        """
+        Evaluates AUC, dependencies, recall100 and number of available premises of a prediction.
+        """
+
+        available = len(predictions)
+        predictions = predictions[:self.cutOff]
+        dependencies = set(dependencies)
+        depNr = len(dependencies)
+        aucSum = 0.    
+        posResults = 0.        
+        positives, negatives = 0, 0
+        recall100 = 0.0
+        badPreds = []
+        depsFound = []
+        for index,pId in enumerate(predictions):
+            if pId in dependencies:        #positive
+                posResults+=1
+                positives+=1
+                recall100 = index+1
+                depsFound.append(pId)
+                if index > 200:
+                    badPreds.append(pId)
+            else:            
+                aucSum += posResults
+                negatives+=1
+            # Update Recall and Recall100 stats
+            if depNr == positives:
+                self.recall100Data[index] += 1
+            if depNr == 0:
+                self.recallData[index] += 1
+            else:
+                self.recallData[index] += float(positives)/depNr
+    
+        if not depNr == positives:
+            depsFound = set(depsFound)
+            missing = []
+            for dep in dependencies:
+                if not dep in depsFound:
+                    missing.append(dep)
+                    badPreds.append(dep)
+                    recall100 = len(predictions)+1
+                    positives+=1
+            self.logger.debug('Dependencies missing for %s in accessibles! Estimating Statistics.',\
+                              string.join([str(dep) for dep in missing],','))
+    
+        if positives == 0 or negatives == 0:
+            auc = 1.0
+        else:            
+            auc = aucSum/(negatives*positives)
+        
+        self.aucData.append(auc)
+        self.avgAUC += auc
+        self.avgRecall100 += recall100
+        self.problems += 1
+        self.badPreds = badPreds
+        self.avgAvailable += available 
+        self.avgDepNr += depNr
+        self.logger.info('AUC: %s \t Needed: %s \t Recall100: %s \t Available: %s \t cutOff:%s',\
+                          round(100*auc,2),depNr,recall100,available,self.cutOff)        
+        
+    def printAvg(self):
+        self.logger.info('Average results:')
+        self.logger.info('avgAUC: %s \t avgDepNr: %s \t avgRecall100: %s \t cutOff:%s', \
+                         round(100*self.avgAUC/self.problems,2),round(self.avgDepNr/self.problems,2),self.avgRecall100/self.problems,self.cutOff)
+
+        try:
+            from matplotlib.pyplot import plot,figure,show,xlabel,ylabel,axis,hist
+            avgRecall = [float(x)/self.problems for x in self.recallData]
+            figure('Recall')
+            plot(range(self.cutOff),avgRecall)
+            ylabel('Average Recall')
+            xlabel('Highest ranked premises')
+            axis([0,self.cutOff,0.0,1.0])
+            figure('100%Recall')
+            plot(range(self.cutOff),self.recall100Data)
+            ylabel('100%Recall')
+            xlabel('Highest ranked premises')
+            axis([0,self.cutOff,0,self.problems])
+            figure('AUC Histogram')
+            hist(self.aucData,bins=100)
+            ylabel('Problems')
+            xlabel('AUC')
+            show()
+        except:
+            self.logger.warning('Matplotlib module missing. Skipping graphs.')
+    
+    def save(self,fileName):       
+        oStream = open(fileName, 'wb')
+        dump((self.avgAUC,self.avgRecall100,self.avgAvailable,self.avgDepNr,self.problems,self.cutOff,self.recallData,self.recall100Data,self.aucData),oStream)
+        oStream.close()
+    def load(self,fileName):
+        iStream = open(fileName, 'rb')        
+        self.avgAUC,self.avgRecall100,self.avgAvailable,self.avgDepNr,self.problems,self.cutOff,self.recallData,self.recall100Data,self.aucData = load(iStream)
+        iStream.close()
\ No newline at end of file
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML	Mon Nov 26 17:13:44 2012 +0100
@@ -55,7 +55,7 @@
     Proof.context -> bool -> int -> string list * string list
     -> (string * real) list
   val mash_unlearn : Proof.context -> unit
-  val mash_could_suggest_facts : unit -> bool
+  val is_mash_enabled : unit -> bool
   val mash_can_suggest_facts : Proof.context -> bool
   val mash_suggested_facts :
     Proof.context -> params -> string -> int -> term list -> term
@@ -101,7 +101,7 @@
 val relearn_isarN = "relearn_isar"
 val relearn_atpN = "relearn_atp"
 
-fun mash_home () = getenv "MASH_HOME"
+fun mash_home () = getenv "ISABELLE_SLEDGEHAMMER_MASH"
 fun mash_model_dir () =
   getenv "ISABELLE_HOME_USER" ^ "/mash"
   |> tap (Isabelle_System.mkdir o Path.explode)
@@ -446,9 +446,8 @@
       " --numberOfPredictions " ^ string_of_int max_suggs ^
       (if save then " --saveModel" else "")
     val command =
-      mash_home () ^ "/mash --quiet --outputDir " ^ mash_model_dir () ^
+      mash_home () ^ "/src/mash.py --quiet --outputDir " ^ mash_model_dir () ^
       " --log " ^ log_file ^ " " ^ core ^ " >& " ^ err_file
-
       |> tap (fn _ => trace_msg ctxt (fn () =>
              case try File.read (Path.explode err_file) of
                NONE => "Done"
@@ -623,7 +622,7 @@
 
 end
 
-fun mash_could_suggest_facts () = mash_home () <> ""
+fun is_mash_enabled () = (getenv "MASH" = "yes")
 fun mash_can_suggest_facts ctxt = not (Graph.is_empty (#fact_G (mash_get ctxt)))
 
 fun num_keys keys = Graph.Keys.fold (K (Integer.add 1)) keys 0
@@ -974,7 +973,7 @@
         | NONE =>
           if is_smt_prover ctxt prover then
             mepoN
-          else if mash_could_suggest_facts () then
+          else if is_mash_enabled () then
             (maybe_learn ();
              if mash_can_suggest_facts ctxt then meshN else mepoN)
           else
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_reconstruct.ML	Mon Nov 26 16:28:22 2012 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_reconstruct.ML	Mon Nov 26 17:13:44 2012 +0100
@@ -505,13 +505,13 @@
   type key = indexname list
   val ord = list_ord Term_Ord.fast_indexname_ord)
 
-(* (1) Generalize Types *)
+(* (1) Generalize types *)
 fun generalize_types ctxt t =
   t |> map_types (fn _ => dummyT)
     |> Syntax.check_term
          (Proof_Context.set_mode Proof_Context.mode_pattern ctxt)
 
-(* (2) Typing-spot Table *)
+(* (2) Typing-spot table *)
 local
 fun key_of_atype (TVar (z, _)) =
     Ord_List.insert Term_Ord.fast_indexname_ord z
@@ -535,7 +535,7 @@
   post_fold_term_type update_tab (Var_Set_Tab.empty, 0) #> fst
 end
 
-(* (3) Reverse-Greedy *)
+(* (3) Reverse-greedy *)
 fun reverse_greedy typing_spot_tab =
   let
     fun update_count z =
@@ -555,9 +555,10 @@
       |>> sort_distinct (rev_order o cost_ord o pairself snd)
   in fold drop_superfluous typing_spots ([], tvar_count_tab) |> fst end
 
-(* (4) Introduce Annotations *)
-fun introduce_annotations thy spots t t' =
+(* (4) Introduce annotations *)
+fun introduce_annotations ctxt spots t t' =
   let
+    val thy = Proof_Context.theory_of ctxt
     val get_types = post_fold_term_type (K cons) []
     fun match_types tp =
       fold (Sign.typ_match thy) (op ~~ (pairself get_types tp)) Vartab.empty
@@ -572,6 +573,7 @@
       let
         val unica =
           Vartab.fold (add_all_tfree_namesT o snd o snd) env []
+          |> filter_out (Variable.is_declared ctxt)
           |> unica fast_string_ord
         val erase_unica = map_atyps
           (fn T as TFree (s, _) =>
@@ -611,13 +613,12 @@
 (* (5) Annotate *)
 fun annotate_types ctxt t =
   let
-    val thy = Proof_Context.theory_of ctxt
     val t' = generalize_types ctxt t
     val typing_spots =
       t' |> typing_spot_table
          |> reverse_greedy
          |> sort int_ord
-  in introduce_annotations thy typing_spots t t' end
+  in introduce_annotations ctxt typing_spots t t' end
 
 val indent_size = 2
 val no_label = ("", ~1)
@@ -817,7 +818,7 @@
            | (NONE, (_, s)) => (true, Time.+ (preplay_timeout, s))) o apfst Lazy.force)
         (false, seconds 0.0)
 
-    (* Metis Preplaying *)
+    (* Metis preplaying *)
     fun try_metis timeout (Prove (_, _, t, By_Metis fact_names)) =
       if not preplay then (fn () => SOME (seconds 0.0)) else
         let