--- a/src/Doc/ProgProve/Bool_nat_list.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/Doc/ProgProve/Bool_nat_list.thy Sat Nov 16 18:34:11 2013 +0100
@@ -419,7 +419,7 @@
From now on lists are always the predefined lists.
-\subsection{Exercises}
+\subsection*{Exercises}
\begin{exercise}
Use the \isacom{value} command to evaluate the following expressions:
--- a/src/Doc/ProgProve/Isar.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/Doc/ProgProve/Isar.thy Sat Nov 16 18:34:11 2013 +0100
@@ -590,7 +590,7 @@
the fact just proved, in this case the preceding block. In general,
\isacom{note} introduces a new name for one or more facts.
-\subsection{Exercises}
+\subsection*{Exercises}
\exercise
Give a readable, structured proof of the following lemma:
@@ -1077,7 +1077,7 @@
\end{warn}
-\subsection{Exercises}
+\subsection*{Exercises}
\exercise
--- a/src/Doc/ProgProve/Logic.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/Doc/ProgProve/Logic.thy Sat Nov 16 18:34:11 2013 +0100
@@ -142,7 +142,7 @@
@{theory Main}.
-\subsection{Exercises}
+\subsection*{Exercises}
\exercise
Start from the data type of binary trees defined earlier:
@@ -788,7 +788,7 @@
transitive closure merely simplifies the form of the induction rule.
-\subsection{Exercises}
+\subsection*{Exercises}
\begin{exercise}
Formalise the following definition of palindromes
--- a/src/Doc/ProgProve/Types_and_funs.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/Doc/ProgProve/Types_and_funs.thy Sat Nov 16 18:34:11 2013 +0100
@@ -201,7 +201,7 @@
except in its name, and is applicable independently of @{text f}.
-\subsection{Exercises}
+\subsection*{Exercises}
\begin{exercise}
Starting from the type @{text "'a tree"} defined in the text, define
@@ -336,7 +336,7 @@
those that change in recursive calls.
-\subsection{Exercises}
+\subsection*{Exercises}
\begin{exercise}
Write a tail-recursive variant of the @{text add} function on @{typ nat}:
@@ -523,7 +523,7 @@
Method @{text auto} can be modified in exactly the same way.
-\subsection{Exercises}
+\subsection*{Exercises}
\exercise\label{exe:tree0}
Define a datatype @{text tree0} of binary tree skeletons which do not store
--- a/src/HOL/BNF/BNF_Util.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/BNF/BNF_Util.thy Sat Nov 16 18:34:11 2013 +0100
@@ -9,7 +9,7 @@
header {* Library for Bounded Natural Functors *}
theory BNF_Util
-imports Ctr_Sugar "../Cardinals/Cardinal_Arithmetic"
+imports "../Cardinals/Cardinal_Arithmetic"
begin
lemma subset_Collect_iff: "B \<subseteq> A \<Longrightarrow> (B \<subseteq> {x \<in> A. P x}) = (\<forall>x \<in> B. P x)"
--- a/src/HOL/BNF/More_BNFs.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/BNF/More_BNFs.thy Sat Nov 16 18:34:11 2013 +0100
@@ -206,7 +206,7 @@
by (transfer, clarsimp, metis fst_conv)
qed
-lemma wpull_fmap:
+lemma wpull_fimage:
assumes "wpull A B1 B2 f1 f2 p1 p2"
shows "wpull {x. fset x \<subseteq> A} {x. fset x \<subseteq> B1} {x. fset x \<subseteq> B2}
(fimage f1) (fimage f2) (fimage p1) (fimage p2)"
@@ -245,7 +245,7 @@
apply (rule natLeq_card_order)
apply (rule natLeq_cinfinite)
apply transfer apply (metis ordLess_imp_ordLeq finite_iff_ordLess_natLeq)
- apply (erule wpull_fmap)
+ apply (erule wpull_fimage)
apply (simp add: Grp_def relcompp.simps conversep.simps fun_eq_iff fset_rel_alt fset_rel_aux)
apply transfer apply simp
done
--- a/src/HOL/BNF/Tools/bnf_lfp_compat.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_lfp_compat.ML Sat Nov 16 18:34:11 2013 +0100
@@ -49,7 +49,7 @@
SOME (fp_sugar as {fp = Least_FP, ...}) => fp_sugar
| _ => not_datatype s);
- val fp_sugar0 as {fp_res = {Ts = fpTs0, ...}, ...} = lfp_sugar_of fpT_name1;
+ val {fp_res = {Ts = fpTs0, ...}, ...} = lfp_sugar_of fpT_name1;
val fpT_names' = map (fst o dest_Type) fpTs0;
val _ = eq_set (op =) (fpT_names, fpT_names') orelse not_mutually_recursive fpT_names;
@@ -82,7 +82,7 @@
fold add_nested_types_of subTs (seen @ mutual_Ts)
end
| NONE => error ("Unsupported recursion via type constructor " ^ quote s ^
- " not associated with new-style datatype (cf. \"datatype_new\")"));
+ " not corresponding to new-style datatype (cf. \"datatype_new\")"));
val Ts = add_nested_types_of fpT1 [];
val b_names = map base_name_of_typ Ts;
@@ -92,7 +92,7 @@
val nn_fp = length fpTs;
val nn = length Ts;
val get_indices = K [];
- val fp_sugars0 = if nn = 1 then [fp_sugar0] else map (lfp_sugar_of o fst o dest_Type) Ts;
+ val fp_sugars0 = map (lfp_sugar_of o fst o dest_Type) Ts;
val callssss = map (fn fp_sugar0 => indexify_callsss fp_sugar0 []) fp_sugars0;
val ((fp_sugars, (lfp_sugar_thms, _)), lthy) =
@@ -105,15 +105,13 @@
fp_sugars;
val inducts = conj_dests nn induct;
- val frozen_Ts = map Type.legacy_freeze_type Ts;
- val mk_dtyp = dtyp_of_typ frozen_Ts;
+ val mk_dtyp = dtyp_of_typ Ts;
- fun mk_ctr_descr (Const (s, T)) =
- (s, map mk_dtyp (binder_types (Type.legacy_freeze_type T)));
+ fun mk_ctr_descr Ts = mk_ctr Ts #> dest_Const ##> (binder_types #> map mk_dtyp);
fun mk_typ_descr index (Type (T_name, Ts)) ({ctrs, ...} : ctr_sugar) =
- (index, (T_name, map mk_dtyp Ts, map mk_ctr_descr ctrs));
+ (index, (T_name, map mk_dtyp Ts, map (mk_ctr_descr Ts) ctrs));
- val descr = map3 mk_typ_descr (0 upto nn - 1) frozen_Ts ctr_sugars;
+ val descr = map3 mk_typ_descr (0 upto nn - 1) Ts ctr_sugars;
val recs = map (fst o dest_Const o co_rec_of) co_iterss;
val rec_thms = flat (map co_rec_of iter_thmsss);
--- a/src/HOL/BNF/Tools/bnf_util.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/BNF/Tools/bnf_util.ML Sat Nov 16 18:34:11 2013 +0100
@@ -54,7 +54,6 @@
term list list list list * Proof.context
val nonzero_string_of_int: int -> string
val retype_free: typ -> term -> term
- val typ_subst_nonatomic: (typ * typ) list -> typ -> typ
val binder_fun_types: typ -> typ list
val body_fun_type: typ -> typ
@@ -307,14 +306,6 @@
val mk_TFreess = fold_map mk_TFrees;
-(*Replace each Ti by Ui (starting from the leaves); inst = [(T1, U1), ..., (Tn, Un)].*)
-fun typ_subst_nonatomic [] = I
- | typ_subst_nonatomic inst =
- let
- fun subst (Type (s, Ts)) = perhaps (AList.lookup (op =) inst) (Type (s, map subst Ts))
- | subst T = perhaps (AList.lookup (op =) inst) T;
- in subst end;
-
fun mk_Freesss x Tsss = fold_map2 mk_Freess (mk_names (length Tsss) x) Tsss;
fun mk_Freessss x Tssss = fold_map2 mk_Freesss (mk_names (length Tssss) x) Tssss;
--- a/src/HOL/Cardinals/Wellfounded_More_Base.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Cardinals/Wellfounded_More_Base.thy Sat Nov 16 18:34:11 2013 +0100
@@ -8,7 +8,7 @@
header {* More on Well-Founded Relations (Base) *}
theory Wellfounded_More_Base
-imports Wellfounded Order_Relation_More_Base "~~/src/HOL/Library/Wfrec"
+imports Order_Relation_More_Base "~~/src/HOL/Library/Wfrec"
begin
--- a/src/HOL/Codegenerator_Test/Generate_Efficient_Datastructures.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Codegenerator_Test/Generate_Efficient_Datastructures.thy Sat Nov 16 18:34:11 2013 +0100
@@ -40,6 +40,18 @@
lemma [code, code del]:
"Cardinality.eq_set = Cardinality.eq_set" ..
+lemma [code, code del]:
+ "(Gcd :: nat set \<Rightarrow> nat) = Gcd" ..
+
+lemma [code, code del]:
+ "(Lcm :: nat set \<Rightarrow> nat) = Lcm" ..
+
+lemma [code, code del]:
+ "(Gcd :: int set \<Rightarrow> int) = Gcd" ..
+
+lemma [code, code del]:
+ "(Lcm :: int set \<Rightarrow> int) = Lcm" ..
+
(*
If the code generation ends with an exception with the following message:
'"List.set" is not a constructor, on left hand side of equation: ...',
--- a/src/HOL/GCD.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/GCD.thy Sat Nov 16 18:34:11 2013 +0100
@@ -1654,11 +1654,11 @@
apply (metis Lcm0_iff dvd_Lcm_nat dvd_imp_le neq0_conv)
done
-lemma Lcm_set_nat [code_unfold]:
+lemma Lcm_set_nat [code, code_unfold]:
"Lcm (set ns) = fold lcm ns (1::nat)"
by (fact gcd_lcm_complete_lattice_nat.Sup_set_fold)
-lemma Gcd_set_nat [code_unfold]:
+lemma Gcd_set_nat [code, code_unfold]:
"Gcd (set ns) = fold gcd ns (0::nat)"
by (fact gcd_lcm_complete_lattice_nat.Inf_set_fold)
@@ -1730,11 +1730,11 @@
assumes "\<forall>m\<in>M. n dvd m" shows "n dvd Gcd M"
using assms by (simp add: Gcd_int_def dvd_int_iff)
-lemma Lcm_set_int [code_unfold]:
+lemma Lcm_set_int [code, code_unfold]:
"Lcm (set xs) = fold lcm xs (1::int)"
by (induct xs rule: rev_induct, simp_all add: lcm_commute_int)
-lemma Gcd_set_int [code_unfold]:
+lemma Gcd_set_int [code, code_unfold]:
"Gcd (set xs) = fold gcd xs (0::int)"
by (induct xs rule: rev_induct, simp_all add: gcd_commute_int)
--- a/src/HOL/ROOT Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/ROOT Sat Nov 16 18:34:11 2013 +0100
@@ -43,6 +43,7 @@
Code_Real_Approx_By_Float
Code_Target_Numeral
DAList
+ DAList_Multiset
RBT_Mapping
RBT_Set
(*legacy tools*)
--- a/src/HOL/TPTP/atp_problem_import.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/TPTP/atp_problem_import.ML Sat Nov 16 18:34:11 2013 +0100
@@ -7,23 +7,20 @@
signature ATP_PROBLEM_IMPORT =
sig
- val read_tptp_file :
- theory -> (term -> term) -> string
- -> term list * (term list * term list) * Proof.context
+ val read_tptp_file : theory -> (string * term -> 'a) -> string ->
+ 'a list * ('a list * 'a list) * Proof.context
val nitpick_tptp_file : theory -> int -> string -> unit
val refute_tptp_file : theory -> int -> string -> unit
val can_tac : Proof.context -> tactic -> term -> bool
val SOLVE_TIMEOUT : int -> string -> tactic -> tactic
- val atp_tac :
- Proof.context -> int -> (string * string) list -> int -> string -> int
- -> tactic
+ val atp_tac : Proof.context -> int -> (string * string) list -> int -> string -> int -> tactic
val smt_solver_tac : string -> Proof.context -> int -> tactic
val freeze_problem_consts : theory -> term -> term
val make_conj : term list * term list -> term list -> term
val sledgehammer_tptp_file : theory -> int -> string -> unit
val isabelle_tptp_file : theory -> int -> string -> unit
val isabelle_hot_tptp_file : theory -> int -> string -> unit
- val translate_tptp_file : string -> string -> string -> unit
+ val translate_tptp_file : theory -> string -> string -> string -> unit
end;
structure ATP_Problem_Import : ATP_PROBLEM_IMPORT =
@@ -32,6 +29,8 @@
open ATP_Util
open ATP_Problem
open ATP_Proof
+open ATP_Problem_Generate
+open ATP_Systems
val debug = false
val overlord = false
@@ -39,16 +38,18 @@
(** TPTP parsing **)
+fun exploded_absolute_path file_name =
+ Path.explode file_name
+ |> (fn path => path |> not (Path.is_absolute path) ? Path.append (Path.explode "$PWD"))
+
fun read_tptp_file thy postproc file_name =
let
fun has_role role (_, role', _, _) = (role' = role)
- fun get_prop (_, _, P, _) =
- P |> Logic.varify_global |> close_form |> postproc
- val path =
- Path.explode file_name
- |> (fn path =>
- path |> not (Path.is_absolute path)
- ? Path.append (Path.explode "$PWD"))
+ fun get_prop (name, role, P, info) =
+ let val P' = P |> Logic.varify_global |> close_form in
+ (name, P') |> postproc
+ end
+ val path = exploded_absolute_path file_name
val ((_, _, problem), thy) =
TPTP_Interpret.interpret_file true [Path.dir path, Path.explode "$TPTP"]
path [] [] thy
@@ -68,7 +69,7 @@
fun nitpick_tptp_file thy timeout file_name =
let
- val (conjs, (defs, nondefs), ctxt) = read_tptp_file thy I file_name
+ val (conjs, (defs, nondefs), ctxt) = read_tptp_file thy snd file_name
val thy = Proof_Context.theory_of ctxt
val (defs, pseudo_defs) =
defs |> map (ATP_Util.abs_extensionalize_term ctxt
@@ -115,7 +116,7 @@
else
"Unknown")
|> Output.urgent_message
- val (conjs, (defs, nondefs), ctxt) = read_tptp_file thy I file_name
+ val (conjs, (defs, nondefs), ctxt) = read_tptp_file thy snd file_name
val params =
[("maxtime", string_of_int timeout),
("maxvars", "100000")]
@@ -272,7 +273,7 @@
fun sledgehammer_tptp_file thy timeout file_name =
let
val (conjs, assms, ctxt) =
- read_tptp_file thy (freeze_problem_consts thy) file_name
+ read_tptp_file thy (freeze_problem_consts thy o snd) file_name
val conj = make_conj assms conjs
in
can_tac ctxt (sledgehammer_tac true ctxt timeout 1) conj
@@ -282,7 +283,7 @@
fun generic_isabelle_tptp_file demo thy timeout file_name =
let
val (conjs, assms, ctxt) =
- read_tptp_file thy (freeze_problem_consts thy) file_name
+ read_tptp_file thy (freeze_problem_consts thy o snd) file_name
val conj = make_conj assms conjs
val (last_hope_atp, last_hope_completeness) =
if demo then (ATP_Systems.satallaxN, 0) else (ATP_Systems.vampireN, 2)
@@ -300,6 +301,33 @@
(** Translator between TPTP(-like) file formats **)
-fun translate_tptp_file format in_file_name out_file_name = ()
+fun translate_tptp_file thy format_str in_file_name out_file_name =
+ let
+ val (conjs, (defs, nondefs), ctxt) = read_tptp_file thy I in_file_name
+ val conj = make_conj ([], []) (map snd conjs)
+
+ val (format, type_enc, lam_trans) =
+ (case format_str of
+ "FOF" => (FOF, "mono_guards??", liftingN)
+ | "TFF0" => (TFF Monomorphic, "mono_native", liftingN)
+ | "THF0" => (THF (Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN)
+ | "DFG" => (DFG Monomorphic, "mono_native", liftingN)
+ | _ => error ("Unknown format " ^ quote format_str ^
+ " (expected \"FOF\", \"TFF0\", \"THF0\", or \"DFG\")"))
+ val uncurried_aliases = false
+ val readable_names = true
+ val presimp = true
+ val facts = map (apfst (rpair (Global, Non_Rec_Def))) defs @
+ map (apfst (rpair (Global, General))) nondefs
+ val (atp_problem, _, _, _, _) =
+ prepare_atp_problem ctxt format Hypothesis (type_enc_of_string Strict type_enc) Translator
+ lam_trans uncurried_aliases readable_names presimp [] conj facts
+
+ val ord = effective_term_order ctxt spassN
+ val ord_info = K []
+ val lines = lines_of_atp_problem format ord ord_info atp_problem
+ in
+ File.write_list (exploded_absolute_path out_file_name) lines
+ end
end;
--- a/src/HOL/TPTP/lib/Tools/tptp_isabelle Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/TPTP/lib/Tools/tptp_isabelle Sat Nov 16 18:34:11 2013 +0100
@@ -29,5 +29,5 @@
echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
ML {* ATP_Problem_Import.isabelle_tptp_file @{theory} ($TIMEOUT) \"$FILE\" *} end;" \
> /tmp/$SCRATCH.thy
- "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val it = (): unit"
+ "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val \|^Loading theory"
done
--- a/src/HOL/TPTP/lib/Tools/tptp_isabelle_hot Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/TPTP/lib/Tools/tptp_isabelle_hot Sat Nov 16 18:34:11 2013 +0100
@@ -29,5 +29,5 @@
echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
ML {* ATP_Problem_Import.isabelle_hot_tptp_file @{theory} ($TIMEOUT) \"$FILE\" *} end;" \
> /tmp/$SCRATCH.thy
- "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val it = (): unit"
+ "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val \|^Loading theory"
done
--- a/src/HOL/TPTP/lib/Tools/tptp_nitpick Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/TPTP/lib/Tools/tptp_nitpick Sat Nov 16 18:34:11 2013 +0100
@@ -29,5 +29,5 @@
echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
ML {* ATP_Problem_Import.nitpick_tptp_file @{theory} ($TIMEOUT) \"$FILE\" *} end;" \
> /tmp/$SCRATCH.thy
- "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val it = (): unit"
+ "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val \|^Loading theory"
done
--- a/src/HOL/TPTP/lib/Tools/tptp_refute Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/TPTP/lib/Tools/tptp_refute Sat Nov 16 18:34:11 2013 +0100
@@ -28,5 +28,5 @@
echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
ML {* ATP_Problem_Import.refute_tptp_file @{theory} ($TIMEOUT) \"$FILE\" *} end;" \
> /tmp/$SCRATCH.thy
- "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val it = (): unit"
+ "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val \|^Loading theory"
done
--- a/src/HOL/TPTP/lib/Tools/tptp_sledgehammer Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/TPTP/lib/Tools/tptp_sledgehammer Sat Nov 16 18:34:11 2013 +0100
@@ -29,5 +29,5 @@
echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
ML {* ATP_Problem_Import.sledgehammer_tptp_file @{theory} ($TIMEOUT) \"$FILE\" *} end;" \
> /tmp/$SCRATCH.thy
- "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val it = (): unit"
+ "$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val \|^Loading theory"
done
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/TPTP/lib/Tools/tptp_translate Sat Nov 16 18:34:11 2013 +0100
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+#
+# Author: Jasmin Blanchette
+#
+# DESCRIPTION: translate between TPTP formats
+
+
+PRG="$(basename "$0")"
+
+function usage() {
+ echo
+ echo "Usage: isabelle $PRG FORMAT IN_FILE OUT_FILE"
+ echo
+ echo " Translates TPTP input file to the specified format (\"FOF\", \"TFF0\", \"THF0\", or \"DFG\")."
+ echo
+ exit 1
+}
+
+[ "$#" -ne 3 -o "$1" = "-?" ] && usage
+
+SCRATCH="Scratch_${PRG}_$$_${RANDOM}"
+
+args=("$@")
+
+echo "theory $SCRATCH imports \"$TPTP_HOME/ATP_Problem_Import\" begin \
+ML {* ATP_Problem_Import.translate_tptp_file @{theory} \"${args[0]}\" \"${args[1]}\" \"${args[2]}\" *} end;" \
+ > /tmp/$SCRATCH.thy
+"$ISABELLE_PROCESS" -q -e "use_thy \"/tmp/$SCRATCH\"; exit 1;" HOL-TPTP | grep --line-buffered -v "^###\|^PROOF FAILED for depth\|^Failure node\|inferences so far. Searching to depth\|^val \|^Loading theory"
--- a/src/HOL/Tools/ATP/atp_problem_generate.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/ATP/atp_problem_generate.ML Sat Nov 16 18:34:11 2013 +0100
@@ -15,7 +15,7 @@
type atp_formula_role = ATP_Problem.atp_formula_role
type 'a atp_problem = 'a ATP_Problem.atp_problem
- datatype mode = Metis | Sledgehammer | Sledgehammer_Completish | Exporter
+ datatype mode = Metis | Sledgehammer | Sledgehammer_Completish | Exporter | Translator
datatype scope = Global | Local | Assum | Chained
datatype status =
@@ -127,7 +127,7 @@
open ATP_Util
open ATP_Problem
-datatype mode = Metis | Sledgehammer | Sledgehammer_Completish | Exporter
+datatype mode = Metis | Sledgehammer | Sledgehammer_Completish | Exporter | Translator
datatype scope = Global | Local | Assum | Chained
datatype status =
@@ -2756,12 +2756,11 @@
val app_op_and_predicator_threshold = 45
fun prepare_atp_problem ctxt format prem_role type_enc mode lam_trans
- uncurried_aliases readable_names preproc hyp_ts concl_t
+ uncurried_aliases readable_names presimp hyp_ts concl_t
facts =
let
val thy = Proof_Context.theory_of ctxt
val type_enc = type_enc |> adjust_type_enc format
- val exporter = (mode = Exporter)
val completish = (mode = Sledgehammer_Completish)
(* Forcing explicit applications is expensive for polymorphic encodings,
because it takes only one existential variable ranging over "'a => 'b" to
@@ -2778,7 +2777,7 @@
if lam_trans = keep_lamsN andalso not (is_type_enc_higher_order type_enc) then liftingN
else lam_trans
val (fact_names, classes, conjs, facts, subclass_pairs, tcon_clauses, lifted) =
- translate_formulas ctxt prem_role format type_enc lam_trans preproc hyp_ts concl_t facts
+ translate_formulas ctxt prem_role format type_enc lam_trans presimp hyp_ts concl_t facts
val (_, sym_tab0) = sym_table_of_facts ctxt type_enc app_op_level conjs facts
val mono = conjs @ facts |> mononotonicity_info_of_facts ctxt type_enc completish
val ctrss = all_ctrss_of_datatypes thy
@@ -2811,10 +2810,12 @@
val datatype_decl_lines = map decl_line_of_datatype datatypes
val decl_lines = class_decl_lines @ sym_decl_lines @ datatype_decl_lines
val num_facts = length facts
+ val freshen = mode <> Exporter andalso mode <> Translator
+ val pos = mode <> Exporter
+ val rank_of = rank_of_fact_num num_facts
val fact_lines =
- map (line_of_fact ctxt fact_prefix ascii_of I (not exporter)
- (not exporter) mono type_enc (rank_of_fact_num num_facts))
- (0 upto num_facts - 1 ~~ facts)
+ map (line_of_fact ctxt fact_prefix ascii_of I freshen pos mono type_enc rank_of)
+ (0 upto num_facts - 1 ~~ facts)
val subclass_lines = maps (lines_of_subclass_pair type_enc) subclass_pairs
val tcon_lines = map (line_of_tcon_clause type_enc) tcon_clauses
val helper_lines =
--- a/src/HOL/Tools/Sledgehammer/MaSh/src/dictionaries.py Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/dictionaries.py Sat Nov 16 18:34:11 2013 +0100
@@ -22,7 +22,7 @@
self.nameIdDict = {}
self.idNameDict = {}
self.featureIdDict={}
- self.maxNameId = 0
+ self.maxNameId = 1
self.maxFeatureId = 0
self.featureDict = {}
self.dependenciesDict = {}
@@ -30,6 +30,9 @@
self.expandedAccessibles = {}
self.accFile = ''
self.changed = True
+ # Unnamed facts
+ self.nameIdDict[''] = 0
+ self.idNameDict[0] = 'Unnamed Fact'
"""
Init functions. nameIdDict, idNameDict, featureIdDict, articleDict get filled!
@@ -153,13 +156,18 @@
name = line[0].strip()
nameId = self.get_name_id(name)
line = line[1].split(';')
- # Accessible Ids
- #unExpAcc = [self.nameIdDict[a.strip()] for a in line[0].split()]
- #self.accessibleDict[nameId] = unExpAcc
- self.accessibleDict[nameId] = self.parse_unExpAcc(line[0])
features = self.get_features(line)
self.featureDict[nameId] = features
- self.dependenciesDict[nameId] = [self.nameIdDict[d.strip()] for d in line[2].split()]
+ try:
+ self.dependenciesDict[nameId] = [self.nameIdDict[d.strip()] for d in line[2].split()]
+ except:
+ unknownDeps = []
+ for d in line[2].split():
+ if not self.nameIdDict.has_key(d):
+ unknownDeps.append(d)
+ raise LookupError('Unknown fact used as dependency: %s. Facts need to be introduced before being used as depedency.' % ','.join(unknownDeps))
+ self.accessibleDict[nameId] = self.parse_unExpAcc(line[0])
+
self.changed = True
return nameId
@@ -173,9 +181,18 @@
# line = name:dependencies
line = line.split(':')
name = line[0].strip()
- nameId = self.get_name_id(name)
-
- dependencies = [self.nameIdDict[d.strip()] for d in line[1].split()]
+ try:
+ nameId = self.nameIdDict[name]
+ except:
+ raise LookupError('Trying to overwrite dependencies for unknown fact: %s. Facts need to be introduced before overwriting them.' % name)
+ try:
+ dependencies = [self.nameIdDict[d.strip()] for d in line[1].split()]
+ except:
+ unknownDeps = []
+ for d in line[1].split():
+ if not self.nameIdDict.has_key(d):
+ unknownDeps.append(d)
+ raise LookupError('Unknown fact used as dependency: %s. Facts need to be introduced before being used as depedency.' % ','.join(unknownDeps))
self.changed = True
return nameId,dependencies
--- a/src/HOL/Tools/Sledgehammer/MaSh/src/readData.py Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/readData.py Sat Nov 16 18:34:11 2013 +0100
@@ -29,7 +29,10 @@
nameId = nameIdDict[name]
dependenciesIds = [nameIdDict[f.strip()] for f in line[1].split()]
# Store results, add p proves p
- dependenciesDict[nameId] = [nameId] + dependenciesIds
+ if nameId == 0:
+ dependenciesDict[nameId] = dependenciesIds
+ else:
+ dependenciesDict[nameId] = [nameId] + dependenciesIds
IS.close()
return dependenciesDict
--- a/src/HOL/Tools/Sledgehammer/MaSh/src/server.py Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/server.py Sat Nov 16 18:34:11 2013 +0100
@@ -14,6 +14,7 @@
from sparseNaiveBayes import sparseNBClassifier
from KNN import KNN,euclidean
from KNNs import KNNAdaptPointFeatures,KNNUrban
+#from bayesPlusMetric import sparseNBPlusClassifier
from predefined import Predefined
from ExpandFeatures import ExpandFeatures
from stats import Statistics
@@ -58,15 +59,28 @@
else:
argv = argv.split(';')
self.server.args = init_parser(argv)
+
+ # Set up logging
+ logging.basicConfig(level=logging.DEBUG,
+ format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
+ datefmt='%d-%m %H:%M:%S',
+ filename=self.server.args.log+'server',
+ filemode='w')
+ self.server.logger = logging.getLogger('server')
+
# Load all data
self.server.dicts = Dictionaries()
if os.path.isfile(self.server.args.dictsFile):
- self.server.dicts.load(self.server.args.dictsFile)
+ self.server.dicts.load(self.server.args.dictsFile)
+ #elif not self.server.args.dictsFile == '../tmp/dict.pickle':
+ # raise IOError('Cannot find dictsFile at %s '% self.server.args.dictsFile)
elif self.server.args.init:
self.server.dicts.init_all(self.server.args)
# Pick model
if self.server.args.algorithm == 'nb':
- self.server.model = sparseNBClassifier(self.server.args.NBDefaultPriorWeight,self.server.args.NBPosWeight,self.server.args.NBDefVal)
+ ###TODO: !!
+ self.server.model = sparseNBClassifier(self.server.args.NBDefaultPriorWeight,self.server.args.NBPosWeight,self.server.args.NBDefVal)
+ #self.server.model = sparseNBPlusClassifier(self.server.args.NBDefaultPriorWeight,self.server.args.NBPosWeight,self.server.args.NBDefVal)
elif self.server.args.algorithm == 'KNN':
#self.server.model = KNN(self.server.dicts)
self.server.model = KNNAdaptPointFeatures(self.server.dicts)
@@ -80,6 +94,8 @@
# Create Model
if os.path.isfile(self.server.args.modelFile):
self.server.model.load(self.server.args.modelFile)
+ #elif not self.server.args.modelFile == '../tmp/model.pickle':
+ # raise IOError('Cannot find modelFile at %s '% self.server.args.modelFile)
elif self.server.args.init:
trainData = self.server.dicts.featureDict.keys()
self.server.model.initializeModel(trainData,self.server.dicts)
@@ -89,13 +105,6 @@
self.server.statementCounter = 1
self.server.computeStats = False
- # Set up logging
- logging.basicConfig(level=logging.DEBUG,
- format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
- datefmt='%d-%m %H:%M:%S',
- filename=self.server.args.log+'server',
- filemode='w')
- self.server.logger = logging.getLogger('server')
self.server.logger.debug('Initialized in '+str(round(time()-self.startTime,2))+' seconds.')
self.request.sendall('Server initialized in '+str(round(time()-self.startTime,2))+' seconds.')
self.server.callCounter = 1
@@ -117,8 +126,11 @@
if self.server.args.expandFeatures:
self.server.expandFeatures.update(self.server.dicts.featureDict[problemId],self.server.dicts.dependenciesDict[problemId])
# Update Dependencies, p proves p
- self.server.dicts.dependenciesDict[problemId] = [problemId]+self.server.dicts.dependenciesDict[problemId]
+ if not problemId == 0:
+ self.server.dicts.dependenciesDict[problemId] = [problemId]+self.server.dicts.dependenciesDict[problemId]
+ ###TODO:
self.server.model.update(problemId,self.server.dicts.featureDict[problemId],self.server.dicts.dependenciesDict[problemId])
+ #self.server.model.update(problemId,self.server.dicts.featureDict[problemId],self.server.dicts.dependenciesDict[problemId],self.server.dicts)
def overwrite(self):
# Overwrite old proof.
--- a/src/HOL/Tools/Sledgehammer/MaSh/src/sparseNaiveBayes.py Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/MaSh/src/sparseNaiveBayes.py Sat Nov 16 18:34:11 2013 +0100
@@ -41,10 +41,11 @@
self.counts[d] = [self.defaultPriorWeight,dFeatureCounts]
for key,keyDeps in dicts.dependenciesDict.iteritems():
+ keyFeatures = dicts.featureDict[key]
for dep in keyDeps:
self.counts[dep][0] += 1
- depFeatures = dicts.featureDict[key]
- for f in depFeatures.iterkeys():
+ #depFeatures = dicts.featureDict[key]
+ for f in keyFeatures.iterkeys():
if self.counts[dep][1].has_key(f):
self.counts[dep][1][f] += 1
else:
@@ -55,7 +56,7 @@
"""
Updates the Model.
"""
- if not self.counts.has_key(dataPoint):
+ if (not self.counts.has_key(dataPoint)) and (not dataPoint == 0):
dFeatureCounts = {}
# Give p |- p a higher weight
if not self.defaultPriorWeight == 0:
@@ -86,7 +87,10 @@
"""
Deletes the old dependencies of problemId and replaces them with the new ones. Updates the model accordingly.
"""
- assert self.counts.has_key(problemId)
+ try:
+ assert self.counts.has_key(problemId)
+ except:
+ raise LookupError('Trying to overwrite dependencies for unknown fact: %s. Facts need to be introduced before overwriting them.' % dicts.idNameDict[problemId])
oldDeps = dicts.dependenciesDict[problemId]
features = dicts.featureDict[problemId]
self.delete(problemId,features,oldDeps)
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML Sat Nov 16 18:34:11 2013 +0100
@@ -216,10 +216,6 @@
val unencode_strs =
space_explode " " #> filter_out (curry (op =) "") #> map unencode_str
-fun freshish_name () =
- Date.fmt ".%Y%m%d_%H%M%S__" (Date.fromTimeLocal (Time.now ())) ^
- serial_string ()
-
(* Avoid scientific notation *)
fun safe_str_of_real r =
if r < 0.00001 then "0.00001"
@@ -282,10 +278,11 @@
fun learn _ _ _ [] = ()
| learn ctxt overlord save learns =
- (trace_msg ctxt (fn () => "MaSh learn " ^
- elide_string 1000 (space_implode " " (map #1 learns)));
- run_mash_tool ctxt overlord ([] |> save ? cons save_models_arg) false
- (learns, str_of_learn) (K ()))
+ let val names = elide_string 1000 (space_implode " " (map #1 learns)) in
+ (trace_msg ctxt (fn () => "MaSh learn" ^ (if names = "" then "" else " " ^ names));
+ run_mash_tool ctxt overlord ([] |> save ? cons save_models_arg) false
+ (learns, str_of_learn) (K ()))
+ end
fun relearn _ _ _ [] = ()
| relearn ctxt overlord save relearns =
@@ -1026,7 +1023,6 @@
launch_thread (timeout |> the_default one_day) (fn () =>
let
val thy = Proof_Context.theory_of ctxt
- val name = freshish_name ()
val feats = features_of ctxt thy 0 Symtab.empty (Local, General) [t] |> map fst
in
peek_state ctxt overlord (fn {access_G, ...} =>
@@ -1036,7 +1032,7 @@
used_ths |> filter (is_fact_in_graph access_G)
|> map nickname_of_thm
in
- MaSh.learn ctxt overlord true [(name, parents, feats, deps)]
+ MaSh.learn ctxt overlord true [("", parents, feats, deps)]
end);
(true, "")
end)
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_provers.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_provers.ML Sat Nov 16 18:34:11 2013 +0100
@@ -505,9 +505,9 @@
them each time. *)
val atp_important_message_keep_quotient = 25
-fun choose_type_enc soundness best_type_enc format =
+fun choose_type_enc strictness best_type_enc format =
the_default best_type_enc
- #> type_enc_of_string soundness
+ #> type_enc_of_string strictness
#> adjust_type_enc format
fun isar_proof_reconstructor ctxt name =
@@ -663,9 +663,9 @@
Real.ceil (max_fact_factor * Real.fromInt best_max_facts) +
max_fact_factor_fudge
|> Integer.min (length facts)
- val soundness = if strict then Strict else Non_Strict
+ val strictness = if strict then Strict else Non_Strict
val type_enc =
- type_enc |> choose_type_enc soundness best_type_enc format
+ type_enc |> choose_type_enc strictness best_type_enc format
val sound = is_type_enc_sound type_enc
val real_ms = Real.fromInt o Time.toMilliseconds
val slice_timeout =
--- a/src/HOL/Tools/ctr_sugar.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/ctr_sugar.ML Sat Nov 16 18:34:11 2013 +0100
@@ -210,16 +210,16 @@
fun mk_ctr Ts t =
let val Type (_, Ts0) = body_type (fastype_of t) in
- Term.subst_atomic_types (Ts0 ~~ Ts) t
+ subst_nonatomic_types (Ts0 ~~ Ts) t
end;
fun mk_case Ts T t =
let val (Type (_, Ts0), body) = strip_type (fastype_of t) |>> List.last in
- Term.subst_atomic_types ((body, T) :: (Ts0 ~~ Ts)) t
+ subst_nonatomic_types ((body, T) :: (Ts0 ~~ Ts)) t
end;
fun mk_disc_or_sel Ts t =
- Term.subst_atomic_types (snd (Term.dest_Type (domain_type (fastype_of t))) ~~ Ts) t;
+ subst_nonatomic_types (snd (Term.dest_Type (domain_type (fastype_of t))) ~~ Ts) t;
fun name_of_const what t =
(case head_of t of
--- a/src/HOL/Tools/ctr_sugar_util.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/ctr_sugar_util.ML Sat Nov 16 18:34:11 2013 +0100
@@ -36,6 +36,9 @@
(string * sort) list * Proof.context
val variant_tfrees: string list -> Proof.context -> typ list * Proof.context
+ val typ_subst_nonatomic: (typ * typ) list -> typ -> typ
+ val subst_nonatomic_types: (typ * typ) list -> term -> term
+
val mk_predT: typ list -> typ
val mk_pred1T: typ -> typ
@@ -143,6 +146,17 @@
apfst (map TFree) o
variant_types (map (ensure_prefix "'") ss) (replicate (length ss) HOLogic.typeS);
+(*Replace each Ti by Ui (starting from the leaves); inst = [(T1, U1), ..., (Tn, Un)].*)
+fun typ_subst_nonatomic [] = I
+ | typ_subst_nonatomic inst =
+ let
+ fun subst (Type (s, Ts)) = perhaps (AList.lookup (op =) inst) (Type (s, map subst Ts))
+ | subst T = perhaps (AList.lookup (op =) inst) T;
+ in subst end;
+
+fun subst_nonatomic_types [] = I
+ | subst_nonatomic_types inst = map_types (typ_subst_nonatomic inst);
+
fun mk_predT Ts = Ts ---> HOLogic.boolT;
fun mk_pred1T T = mk_predT [T];
--- a/src/HOL/Tools/try0.ML Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/Tools/try0.ML Sat Nov 16 18:34:11 2013 +0100
@@ -110,8 +110,9 @@
let
val st = st |> Proof.map_context (Config.put Metis_Tactic.verbose false #>
Config.put Lin_Arith.verbose false)
+ fun trd (_, _, t) = t
fun par_map f =
- if mode = Normal then Par_List.map f #> map_filter I #> sort (int_ord o pairself #3)
+ if mode = Normal then Par_List.map f #> map_filter I #> sort (int_ord o pairself trd)
else Par_List.get_some f #> the_list
in
if mode = Normal then
--- a/src/HOL/ex/Coercion_Examples.thy Sat Nov 16 18:31:30 2013 +0100
+++ b/src/HOL/ex/Coercion_Examples.thy Sat Nov 16 18:34:11 2013 +0100
@@ -37,10 +37,9 @@
(* Coercion/type maps definitions *)
-primrec nat_of_bool :: "bool \<Rightarrow> nat"
+abbreviation nat_of_bool :: "bool \<Rightarrow> nat"
where
- "nat_of_bool False = 0"
-| "nat_of_bool True = 1"
+ "nat_of_bool \<equiv> of_bool"
declare [[coercion nat_of_bool]]
@@ -201,5 +200,5 @@
declare [[coercion_args uminus -]]
declare [[coercion_args plus + +]]
term "- (n + m)"
-
+
end