--- a/NEWS Thu Sep 07 23:13:15 2017 +0200
+++ b/NEWS Fri Sep 08 00:01:36 2017 +0200
@@ -134,6 +134,8 @@
*** HOL ***
+* The Nunchaku model finder is now part of "Main".
+
* SMT module:
- A new option, 'smt_nat_as_int', has been added to translate 'nat' to
'int' and benefit from the SMT solver's theory reasoning. It is
@@ -569,7 +571,7 @@
quantifier-free propositional logic with linear real arithmetic
including min/max/abs. See HOL/ex/Argo_Examples.thy for examples.
-* The new "nunchaku" program integrates the Nunchaku model finder. The
+* The new "nunchaku" command integrates the Nunchaku model finder. The
tool is experimental. See ~~/src/HOL/Nunchaku/Nunchaku.thy for details.
* Metis: The problem encoding has changed very slightly. This might
--- a/src/HOL/Main.thy Thu Sep 07 23:13:15 2017 +0200
+++ b/src/HOL/Main.thy Fri Sep 08 00:01:36 2017 +0200
@@ -6,7 +6,7 @@
\<close>
theory Main
-imports Predicate_Compile Quickcheck_Narrowing Extraction Nitpick BNF_Greatest_Fixpoint Filter Conditionally_Complete_Lattices Binomial GCD
+imports Predicate_Compile Quickcheck_Narrowing Extraction Nunchaku BNF_Greatest_Fixpoint Filter Conditionally_Complete_Lattices Binomial GCD
begin
text \<open>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Nunchaku.thy Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,45 @@
+(* Title: HOL/Nunchaku.thy
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Nunchaku: Yet another counterexample generator for Isabelle/HOL.
+
+Nunchaku relies on an external program of the same name. The sources are
+available at
+
+ https://github.com/nunchaku-inria
+
+The "$NUNCHAKU_HOME" environment variable must be set to the absolute path to
+the directory containing the "nunchaku" executable. The Isabelle components
+for CVC4 and Kodkodi are necessary to use these backend solvers.
+*)
+
+theory Nunchaku
+imports Nitpick
+keywords
+ "nunchaku" :: diag and
+ "nunchaku_params" :: thy_decl
+begin
+
+consts unreachable :: 'a
+
+definition The_unsafe :: "('a \<Rightarrow> bool) \<Rightarrow> 'a" where
+ "The_unsafe = The"
+
+definition rmember :: "'a set \<Rightarrow> 'a \<Rightarrow> bool" where
+ "rmember A x \<longleftrightarrow> x \<in> A"
+
+ML_file "Tools/Nunchaku/nunchaku_util.ML"
+ML_file "Tools/Nunchaku/nunchaku_collect.ML"
+ML_file "Tools/Nunchaku/nunchaku_problem.ML"
+ML_file "Tools/Nunchaku/nunchaku_translate.ML"
+ML_file "Tools/Nunchaku/nunchaku_model.ML"
+ML_file "Tools/Nunchaku/nunchaku_reconstruct.ML"
+ML_file "Tools/Nunchaku/nunchaku_display.ML"
+ML_file "Tools/Nunchaku/nunchaku_tool.ML"
+ML_file "Tools/Nunchaku/nunchaku.ML"
+ML_file "Tools/Nunchaku/nunchaku_commands.ML"
+
+hide_const (open) unreachable The_unsafe rmember
+
+end
--- a/src/HOL/Nunchaku/Nunchaku.thy Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-(* Title: HOL/Nunchaku/Nunchaku.thy
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Nunchaku: Yet another counterexample generator for Isabelle/HOL.
-
-Nunchaku relies on an external program of the same name. The program is still
-being actively developed. The sources are available at
-
- https://github.com/nunchaku-inria
-
-The "$NUNCHAKU_HOME" environment variable must be set to the absolute path to
-the directory containing the "nunchaku" executable. The Isabelle components
-for CVC4 and Kodkodi are necessary to use these backend solvers.
-*)
-
-theory Nunchaku
-imports Main
-keywords
- "nunchaku" :: diag and
- "nunchaku_params" :: thy_decl
-begin
-
-consts unreachable :: 'a
-
-definition The_unsafe :: "('a \<Rightarrow> bool) \<Rightarrow> 'a" where
- "The_unsafe = The"
-
-definition rmember :: "'a set \<Rightarrow> 'a \<Rightarrow> bool" where
- "rmember A x \<longleftrightarrow> x \<in> A"
-
-ML_file "Tools/nunchaku_util.ML"
-ML_file "Tools/nunchaku_collect.ML"
-ML_file "Tools/nunchaku_problem.ML"
-ML_file "Tools/nunchaku_translate.ML"
-ML_file "Tools/nunchaku_model.ML"
-ML_file "Tools/nunchaku_reconstruct.ML"
-ML_file "Tools/nunchaku_display.ML"
-ML_file "Tools/nunchaku_tool.ML"
-ML_file "Tools/nunchaku.ML"
-ML_file "Tools/nunchaku_commands.ML"
-
-hide_const (open) unreachable The_unsafe rmember
-
-end
--- a/src/HOL/Nunchaku/Tools/nunchaku.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,327 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-The core of the Nunchaku integration in Isabelle.
-*)
-
-signature NUNCHAKU =
-sig
- type isa_model = Nunchaku_Reconstruct.isa_model
-
- datatype mode = Auto_Try | Try | Normal
-
- type mode_of_operation_params =
- {solvers: string list,
- falsify: bool,
- assms: bool,
- spy: bool,
- overlord: bool,
- expect: string}
-
- type scope_of_search_params =
- {wfs: ((string * typ) option * bool option) list,
- whacks: (term option * bool) list,
- cards: (typ option * (int option * int option)) list,
- monos: (typ option * bool option) list}
-
- type output_format_params =
- {verbose: bool,
- debug: bool,
- max_potential: int,
- max_genuine: int,
- evals: term list,
- atomss: (typ option * string list) list}
-
- type optimization_params =
- {specialize: bool,
- multithread: bool}
-
- type timeout_params =
- {timeout: Time.time,
- wf_timeout: Time.time}
-
- type params =
- {mode_of_operation_params: mode_of_operation_params,
- scope_of_search_params: scope_of_search_params,
- output_format_params: output_format_params,
- optimization_params: optimization_params,
- timeout_params: timeout_params}
-
- val genuineN: string
- val quasi_genuineN: string
- val potentialN: string
- val noneN: string
- val unknownN: string
- val no_nunchakuN: string
-
- val run_chaku_on_prop: Proof.state -> params -> mode -> int -> term list -> term ->
- string * isa_model option
- val run_chaku_on_subgoal: Proof.state -> params -> mode -> int -> string * isa_model option
-end;
-
-structure Nunchaku : NUNCHAKU =
-struct
-
-open Nunchaku_Util;
-open Nunchaku_Collect;
-open Nunchaku_Problem;
-open Nunchaku_Translate;
-open Nunchaku_Model;
-open Nunchaku_Reconstruct;
-open Nunchaku_Display;
-open Nunchaku_Tool;
-
-datatype mode = Auto_Try | Try | Normal;
-
-type mode_of_operation_params =
- {solvers: string list,
- falsify: bool,
- assms: bool,
- spy: bool,
- overlord: bool,
- expect: string};
-
-type scope_of_search_params =
- {wfs: ((string * typ) option * bool option) list,
- whacks: (term option * bool) list,
- cards: (typ option * (int option * int option)) list,
- monos: (typ option * bool option) list};
-
-type output_format_params =
- {verbose: bool,
- debug: bool,
- max_potential: int,
- max_genuine: int,
- evals: term list,
- atomss: (typ option * string list) list};
-
-type optimization_params =
- {specialize: bool,
- multithread: bool};
-
-type timeout_params =
- {timeout: Time.time,
- wf_timeout: Time.time};
-
-type params =
- {mode_of_operation_params: mode_of_operation_params,
- scope_of_search_params: scope_of_search_params,
- output_format_params: output_format_params,
- optimization_params: optimization_params,
- timeout_params: timeout_params};
-
-val genuineN = "genuine";
-val quasi_genuineN = "quasi_genuine";
-val potentialN = "potential";
-val noneN = "none";
-val unknownN = "unknown";
-
-val no_nunchakuN = "no_nunchaku";
-
-fun str_of_mode Auto_Try = "Auto_Try"
- | str_of_mode Try = "Try"
- | str_of_mode Normal = "Normal";
-
-fun none_true assigns = forall (curry (op <>) (SOME true) o snd) assigns;
-
-fun has_lonely_bool_var (@{const Pure.conjunction} $ (@{const Trueprop} $ Free _) $ _) = true
- | has_lonely_bool_var _ = false;
-
-val syntactic_sorts =
- @{sort "{default,zero,one,plus,minus,uminus,times,inverse,abs,sgn,ord,equal}"} @ @{sort numeral};
-
-fun has_tfree_syntactic_sort (TFree (_, S as _ :: _)) = subset (op =) (S, syntactic_sorts)
- | has_tfree_syntactic_sort _ = false;
-
-val has_syntactic_sorts = exists_type (exists_subtype has_tfree_syntactic_sort);
-
-(* Give the soft timeout a chance. *)
-val timeout_slack = seconds 1.0;
-
-fun run_chaku_on_prop state
- ({mode_of_operation_params = {solvers, falsify, assms, spy, overlord, expect},
- scope_of_search_params = {wfs, whacks, cards, monos},
- output_format_params = {verbose, debug, evals, atomss, ...},
- optimization_params = {specialize, ...},
- timeout_params = {timeout, wf_timeout}})
- mode i all_assms subgoal =
- let
- val ctxt = Proof.context_of state;
-
- val timer = Timer.startRealTimer ()
-
- val print = writeln;
- val print_n = if mode = Normal then writeln else K ();
- fun print_v f = if verbose then writeln (f ()) else ();
- fun print_d f = if debug then writeln (f ()) else ();
-
- val das_wort_Model = if falsify then "Countermodel" else "Model";
- val das_wort_model = if falsify then "countermodel" else "model";
-
- val tool_params =
- {solvers = solvers, overlord = overlord, debug = debug, specialize = specialize,
- timeout = timeout};
-
- fun run () =
- let
- val outcome as (outcome_code, _) =
- let
- val (poly_axioms, isa_problem as {sound, complete, ...}) =
- isa_problem_of_subgoal ctxt falsify wfs whacks cards debug wf_timeout evals
- (if assms then all_assms else []) subgoal;
- val _ = print_d (fn () => "*** Isabelle problem ***\n" ^
- str_of_isa_problem ctxt isa_problem);
- val ugly_nun_problem = nun_problem_of_isa ctxt isa_problem;
- val _ = print_d (fn () => "*** Ugly Nunchaku problem ***\n" ^
- str_of_nun_problem ugly_nun_problem);
- val (nice_nun_problem, pool) = nice_nun_problem ugly_nun_problem;
- val _ = print_d (fn () => "*** Nice Nunchaku problem ***\n" ^
- str_of_nun_problem nice_nun_problem);
-
- fun print_any_hints () =
- if has_lonely_bool_var subgoal then
- print "Hint: Maybe you forgot a colon after the lemma's name?"
- else if has_syntactic_sorts subgoal then
- print "Hint: Maybe you forgot a type constraint?"
- else
- ();
-
- fun get_isa_model_opt output =
- let
- val nice_nun_model = nun_model_of_str output;
- val _ = print_d (fn () => "*** Nice Nunchaku model ***\n" ^
- str_of_nun_model nice_nun_model);
- val ugly_nun_model = ugly_nun_model pool nice_nun_model;
- val _ = print_d (fn () => "*** Ugly Nunchaku model ***\n" ^
- str_of_nun_model ugly_nun_model);
-
- val pat_completes = pat_completes_of_isa_problem isa_problem;
- val isa_model = isa_model_of_nun ctxt pat_completes atomss ugly_nun_model;
- val _ = print_d (fn () => "*** Isabelle model ***\n" ^
- str_of_isa_model ctxt isa_model);
- in
- isa_model
- end;
-
- fun isa_model_opt output =
- if debug then SOME (get_isa_model_opt output) else try get_isa_model_opt output;
-
- val model_str = isa_model_opt #> pretty_of_isa_model_opt ctxt #> Pretty.string_of;
-
- fun unsat_means_theorem () =
- null whacks andalso null cards andalso null monos;
-
- fun unknown () =
- (print_n ("No " ^ das_wort_model ^ " can be found\n\
- \The problem lies outside Nunchaku's fragment, or the Nunchaku backends are not \
- \installed properly");
- (unknownN, NONE));
-
- fun unsat_or_unknown complete =
- if complete then
- (print_n ("No " ^ das_wort_model ^ " exists" ^
- (if falsify andalso unsat_means_theorem () then "\nThe goal is a theorem"
- else ""));
- (noneN, NONE))
- else
- unknown ();
-
- fun sat_or_maybe_sat sound output =
- let val header = if sound then das_wort_Model else "Potential " ^ das_wort_model in
- (case (null poly_axioms, none_true wfs) of
- (true, true) =>
- (print (header ^ ":\n" ^
- model_str output); print_any_hints ();
- (genuineN, isa_model_opt output))
- | (no_poly, no_wf) =>
- let
- val ignorings = []
- |> not no_poly ? cons "polymorphic axioms"
- |> not no_wf ? cons "unchecked well-foundedness";
- in
- (print (header ^ " (ignoring " ^ space_implode " and " ignorings ^ "):\n" ^
- model_str output ^
- (if no_poly then
- ""
- else
- "\nIgnored axioms:\n" ^
- cat_lines (map (prefix " " o Syntax.string_of_term ctxt) poly_axioms)));
- print_any_hints ();
- (quasi_genuineN, isa_model_opt output))
- end)
- end;
- in
- (case solve_nun_problem tool_params nice_nun_problem of
- Unsat => unsat_or_unknown complete
- | Sat (output, _) => sat_or_maybe_sat sound output
- | Unknown NONE => unknown ()
- | Unknown (SOME (output, _)) => sat_or_maybe_sat false output
- | Timeout => (print_n "Time out"; (unknownN, NONE))
- | Nunchaku_Var_Not_Set =>
- (print_n ("Variable $" ^ nunchaku_home_env_var ^ " not set"); (unknownN, NONE))
- | Nunchaku_Cannot_Execute =>
- (print_n "External tool \"nunchaku\" cannot execute"; (unknownN, NONE))
- | Nunchaku_Not_Found =>
- (print_n "External tool \"nunchaku\" not found"; (unknownN, NONE))
- | CVC4_Cannot_Execute =>
- (print_n "External tool \"cvc4\" cannot execute"; (unknownN, NONE))
- | CVC4_Not_Found => (print_n "External tool \"cvc4\" not found"; (unknownN, NONE))
- | Unknown_Error (code, msg) =>
- (print_n ("Unknown error: " ^ msg ^
- (if code = 0 then "" else " (code " ^ string_of_int code ^ ")"));
- (unknownN, NONE)))
- end
- handle
- CYCLIC_DEPS () =>
- (print_n "Cyclic dependencies (or bug in Nunchaku)"; (unknownN, NONE))
- | TOO_DEEP_DEPS () =>
- (print_n "Too deep dependencies (or bug in Nunchaku)"; (unknownN, NONE))
- | TOO_META t =>
- (print_n ("Formula too meta for Nunchaku:\n" ^ Syntax.string_of_term ctxt t);
- (unknownN, NONE))
- | UNEXPECTED_POLYMORPHISM t =>
- (print_n ("Unexpected polymorphism in term\n" ^ Syntax.string_of_term ctxt t);
- (unknownN, NONE))
- | UNEXPECTED_VAR t =>
- (print_n ("Unexpected schematic variables in term\n" ^ Syntax.string_of_term ctxt t);
- (unknownN, NONE))
- | UNSUPPORTED_FUNC t =>
- (print_n ("Unsupported low-level constant in problem: " ^ Syntax.string_of_term ctxt t);
- (unknownN, NONE));
- in
- if expect = "" orelse outcome_code = expect then outcome
- else error ("Unexpected outcome: " ^ quote outcome_code)
- end;
-
- val _ = spying spy (fn () => (state, i, "starting " ^ str_of_mode mode ^ " mode"));
-
- val outcome as (outcome_code, _) =
- Timeout.apply (Time.+ (timeout, timeout_slack)) run ()
- handle Timeout.TIMEOUT _ => (print_n "Time out"; (unknownN, NONE));
-
- val _ = print_v (fn () => "Total time: " ^ string_of_time (Timer.checkRealTimer timer));
-
- val _ = spying spy (fn () => (state, i, "outcome: " ^ outcome_code));
- in
- if expect = "" orelse outcome_code = expect then outcome
- else error ("Unexpected outcome: " ^ quote outcome_code)
- end;
-
-fun run_chaku_on_subgoal state params mode i =
- let
- val ctxt = Proof.context_of state;
- val goal = Thm.prop_of (#goal (Proof.raw_goal state));
- in
- if Logic.count_prems goal = 0 then
- (writeln "No subgoal!"; (noneN, NONE))
- else
- let
- val subgoal = fst (Logic.goal_params goal i);
- val all_assms = map Thm.term_of (Assumption.all_assms_of ctxt);
- in
- run_chaku_on_prop state params mode i all_assms subgoal
- end
- end;
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_collect.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1119 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_collect.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Collecting of Isabelle/HOL definitions etc. for Nunchaku.
-*)
-
-signature NUNCHAKU_COLLECT =
-sig
- val dest_co_datatype_case: Proof.context -> string * typ -> (string * typ) list
-
- type isa_type_spec =
- {abs_typ: typ,
- rep_typ: typ,
- wrt: term,
- abs: term,
- rep: term}
-
- type isa_co_data_spec =
- {typ: typ,
- ctrs: term list}
-
- type isa_const_spec =
- {const: term,
- props: term list}
-
- type isa_rec_spec =
- {const: term,
- props: term list,
- pat_complete: bool}
-
- type isa_consts_spec =
- {consts: term list,
- props: term list}
-
- datatype isa_command =
- ITVal of typ * (int option * int option)
- | ITypedef of isa_type_spec
- | IQuotient of isa_type_spec
- | ICoData of BNF_Util.fp_kind * isa_co_data_spec list
- | IVal of term
- | ICoPred of BNF_Util.fp_kind * bool * isa_const_spec list
- | IRec of isa_rec_spec list
- | ISpec of isa_consts_spec
- | IAxiom of term
- | IGoal of term
- | IEval of term
-
- type isa_problem =
- {commandss: isa_command list list,
- sound: bool,
- complete: bool}
-
- exception CYCLIC_DEPS of unit
- exception TOO_DEEP_DEPS of unit
- exception TOO_META of term
- exception UNEXPECTED_POLYMORPHISM of term
- exception UNEXPECTED_VAR of term
- exception UNSUPPORTED_FUNC of term
-
- val isa_problem_of_subgoal: Proof.context -> bool -> ((string * typ) option * bool option) list ->
- (term option * bool) list -> (typ option * (int option * int option)) list -> bool ->
- Time.time -> term list -> term list -> term -> term list * isa_problem
- val pat_completes_of_isa_problem: isa_problem -> term list
- val str_of_isa_problem: Proof.context -> isa_problem -> string
-end;
-
-structure Nunchaku_Collect : NUNCHAKU_COLLECT =
-struct
-
-open Nunchaku_Util;
-
-type isa_type_spec =
- {abs_typ: typ,
- rep_typ: typ,
- wrt: term,
- abs: term,
- rep: term};
-
-type isa_co_data_spec =
- {typ: typ,
- ctrs: term list};
-
-type isa_const_spec =
- {const: term,
- props: term list};
-
-type isa_rec_spec =
- {const: term,
- props: term list,
- pat_complete: bool};
-
-type isa_consts_spec =
- {consts: term list,
- props: term list};
-
-datatype isa_command =
- ITVal of typ * (int option * int option)
-| ITypedef of isa_type_spec
-| IQuotient of isa_type_spec
-| ICoData of BNF_Util.fp_kind * isa_co_data_spec list
-| IVal of term
-| ICoPred of BNF_Util.fp_kind * bool * isa_const_spec list
-| IRec of isa_rec_spec list
-| ISpec of isa_consts_spec
-| IAxiom of term
-| IGoal of term
-| IEval of term;
-
-type isa_problem =
- {commandss: isa_command list list,
- sound: bool,
- complete: bool};
-
-exception CYCLIC_DEPS of unit;
-exception TOO_DEEP_DEPS of unit;
-exception TOO_META of term;
-exception UNEXPECTED_POLYMORPHISM of term;
-exception UNEXPECTED_VAR of term;
-exception UNSUPPORTED_FUNC of term;
-
-fun str_of_and_list str_of_elem =
- map str_of_elem #> space_implode ("\nand ");
-
-val key_of_typ =
- let
- fun key_of (Type (s, [])) = s
- | key_of (Type (s, Ts)) = s ^ "(" ^ commas (map key_of Ts) ^ ")"
- | key_of (TFree (s, _)) = s;
- in
- prefix "y" o key_of
- end;
-
-fun key_of_const ctxt =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- fun key_of (Const (x as (s, _))) =
- (case Sign.const_typargs thy x of
- [] => s
- | Ts => s ^ "(" ^ commas (map key_of_typ Ts) ^ ")")
- | key_of (Free (s, _)) = s;
- in
- prefix "t" o key_of
- end;
-
-val add_type_keys = fold_subtypes (insert (op =) o key_of_typ);
-
-fun add_aterm_keys ctxt t =
- if is_Const t orelse is_Free t then insert (op =) (key_of_const ctxt t) else I;
-
-fun add_keys ctxt t =
- fold_aterms (add_aterm_keys ctxt) t
- #> fold_types add_type_keys t;
-
-fun close_form except t =
- fold (fn ((s, i), T) => fn t' =>
- HOLogic.all_const T $ Abs (s, T, abstract_over (Var ((s, i), T), t')))
- (Term.add_vars t [] |> subtract (op =) except) t;
-
-(* "imp_conjL[symmetric]" is important for inductive predicates with multiple assumptions. *)
-val basic_defs =
- @{thms Ball_def[abs_def] Bex_def[abs_def] case_bool_if Ex1_def[abs_def]
- imp_conjL[symmetric, abs_def] Let_def[abs_def] rmember_def[symmetric, abs_def]};
-
-fun unfold_basic_def ctxt =
- let val thy = Proof_Context.theory_of ctxt in
- Pattern.rewrite_term thy (map (Logic.dest_equals o Thm.prop_of) basic_defs) []
- end;
-
-val has_polymorphism = exists_type (exists_subtype is_TVar);
-
-fun whack_term thy whacks =
- let
- fun whk t =
- if triple_lookup (term_match thy o swap) whacks t = SOME true then
- Const (@{const_name unreachable}, fastype_of t)
- else
- (case t of
- u $ v => whk u $ whk v
- | Abs (s, T, u) => Abs (s, T, whk u)
- | _ => t);
- in
- whk
- end;
-
-fun preprocess_term_basic falsify ctxt whacks t =
- let val thy = Proof_Context.theory_of ctxt in
- if has_polymorphism t then
- raise UNEXPECTED_POLYMORPHISM t
- else
- t
- |> attach_typeS
- |> whack_term thy whacks
- |> Object_Logic.atomize_term ctxt
- |> tap (fn t' => fastype_of t' <> @{typ prop} orelse raise TOO_META t)
- |> falsify ? HOLogic.mk_not
- |> unfold_basic_def ctxt
- end;
-
-val check_closed = tap (fn t => null (Term.add_vars t []) orelse raise UNEXPECTED_VAR t);
-
-val preprocess_prop = close_form [] oooo preprocess_term_basic;
-val preprocess_closed_term = check_closed ooo preprocess_term_basic false;
-
-val is_type_builtin = member (op =) [@{type_name bool}, @{type_name fun}];
-
-val is_const_builtin =
- member (op =) [@{const_name All}, @{const_name conj}, @{const_name disj}, @{const_name Eps},
- @{const_name HOL.eq}, @{const_name Ex}, @{const_name False}, @{const_name If},
- @{const_name implies}, @{const_name Not}, @{const_name The}, @{const_name The_unsafe},
- @{const_name True}];
-
-datatype type_classification = Builtin | TVal | Typedef | Quotient | Co_Datatype;
-
-fun classify_type_name ctxt T_name =
- if is_type_builtin T_name then
- Builtin
- else if T_name = @{type_name itself} then
- Co_Datatype
- else
- (case BNF_FP_Def_Sugar.fp_sugar_of ctxt T_name of
- SOME _ => Co_Datatype
- | NONE =>
- (case Ctr_Sugar.ctr_sugar_of ctxt T_name of
- SOME _ => Co_Datatype
- | NONE =>
- (case Quotient_Info.lookup_quotients ctxt T_name of
- SOME _ => Quotient
- | NONE =>
- if T_name = @{type_name set} then
- Typedef
- else
- (case Typedef.get_info ctxt T_name of
- _ :: _ => Typedef
- | [] => TVal))));
-
-fun fp_kind_of_ctr_sugar_kind Ctr_Sugar.Codatatype = BNF_Util.Greatest_FP
- | fp_kind_of_ctr_sugar_kind _ = BNF_Util.Least_FP;
-
-fun mutual_co_datatypes_of ctxt (T_name, Ts) =
- (if T_name = @{type_name itself} then
- (BNF_Util.Least_FP, [@{typ "'a itself"}], [[@{const Pure.type ('a)}]])
- else
- let
- val (fp, ctr_sugars) =
- (case BNF_FP_Def_Sugar.fp_sugar_of ctxt T_name of
- SOME (fp_sugar as {fp, fp_res = {Ts, ...}, ...}) =>
- (fp,
- (case Ts of
- [_] => [fp_sugar]
- | _ => map (the o BNF_FP_Def_Sugar.fp_sugar_of ctxt o fst o dest_Type) Ts)
- |> map (#ctr_sugar o #fp_ctr_sugar))
- | NONE =>
- (case Ctr_Sugar.ctr_sugar_of ctxt T_name of
- SOME (ctr_sugar as {kind, ...}) =>
- (* Any freely constructed type that is not a codatatype is considered a datatype. This
- is sound (but incomplete) for model finding. *)
- (fp_kind_of_ctr_sugar_kind kind, [ctr_sugar])));
- in
- (fp, map #T ctr_sugars, map #ctrs ctr_sugars)
- end)
- |> @{apply 3(2)} (map ((fn Type (s, _) => Type (s, Ts))))
- |> @{apply 3(3)} (map (map (Ctr_Sugar.mk_ctr Ts)));
-
-fun typedef_of ctxt whacks T_name =
- if T_name = @{type_name set} then
- let
- val A = Logic.varifyT_global @{typ 'a};
- val absT = Type (@{type_name set}, [A]);
- val repT = A --> HOLogic.boolT;
- val pred = K (Abs (Name.uu, repT, @{const True}));
- val abs = Const (@{const_name Collect}, repT --> absT);
- val rep = Const (@{const_name rmember}, absT --> repT);
- in
- (absT, repT, pred, abs, rep)
- end
- else
- (case Typedef.get_info ctxt T_name of
- (* When several entries are returned, it shouldn't matter much which one we take (according to
- Florian Haftmann). The "Logic.varifyT_global" calls are a workaround because these types'
- variables sometimes clash with locally fixed type variables. *)
- ({abs_type, rep_type, Abs_name, Rep_name, ...}, {Rep, ...}) :: _ =>
- let
- val absT = Logic.varifyT_global abs_type;
- val repT = Logic.varifyT_global rep_type;
- val set0 = Thm.prop_of Rep
- |> HOLogic.dest_Trueprop
- |> HOLogic.dest_mem
- |> snd;
- val pred0 = Abs (Name.uu, repT, HOLogic.mk_mem (Bound 0, set0));
- fun pred () = preprocess_prop false ctxt whacks pred0;
- val abs = Const (Abs_name, repT --> absT);
- val rep = Const (Rep_name, absT --> repT);
- in
- (absT, repT, pred, abs, rep)
- end);
-
-fun quotient_of ctxt whacks T_name =
- (case Quotient_Info.lookup_quotients ctxt T_name of
- SOME {equiv_rel = equiv_rel0, qtyp, rtyp, quot_thm, ...} =>
- let
- val _ $ (_ $ _ $ abs $ rep) = Thm.prop_of quot_thm;
- fun equiv_rel () = preprocess_prop false ctxt whacks equiv_rel0;
- in
- (qtyp, rtyp, equiv_rel, abs, rep)
- end);
-
-fun is_co_datatype_ctr ctxt (s, T) =
- (case body_type T of
- Type (fpT_name, Ts) =>
- classify_type_name ctxt fpT_name = Co_Datatype andalso
- let
- val ctrs =
- if fpT_name = @{type_name itself} then
- [Const (@{const_name Pure.type}, @{typ "'a itself"})]
- else
- (case BNF_FP_Def_Sugar.fp_sugar_of ctxt fpT_name of
- SOME {fp_ctr_sugar = {ctr_sugar = {ctrs, ...}, ...}, ...} => ctrs
- | NONE =>
- (case Ctr_Sugar.ctr_sugar_of ctxt fpT_name of
- SOME {ctrs, ...} => ctrs
- | _ => []));
-
- fun is_right_ctr (t' as Const (s', _)) =
- s = s' andalso fastype_of (Ctr_Sugar.mk_ctr Ts t') = T;
- in
- exists is_right_ctr ctrs
- end
- | _ => false);
-
-fun dest_co_datatype_case ctxt (s, T) =
- let val thy = Proof_Context.theory_of ctxt in
- (case strip_fun_type (Sign.the_const_type thy s) of
- (gen_branch_Ts, gen_body_fun_T) =>
- (case gen_body_fun_T of
- Type (@{type_name fun}, [Type (fpT_name, _), _]) =>
- if classify_type_name ctxt fpT_name = Co_Datatype then
- let
- val Type (_, fpTs) = domain_type (funpow (length gen_branch_Ts) range_type T);
- val (ctrs0, Const (case_name, _)) =
- (case BNF_FP_Def_Sugar.fp_sugar_of ctxt fpT_name of
- SOME {fp_ctr_sugar = {ctr_sugar = {ctrs, casex, ...}, ...}, ...} => (ctrs, casex)
- | NONE =>
- (case Ctr_Sugar.ctr_sugar_of ctxt fpT_name of
- SOME {ctrs, casex, ...} => (ctrs, casex)));
- in
- if s = case_name then map (dest_Const o Ctr_Sugar.mk_ctr fpTs) ctrs0
- else raise Fail "non-case"
- end
- else
- raise Fail "non-case"))
- end;
-
-val is_co_datatype_case = can o dest_co_datatype_case;
-
-fun is_quotient_abs ctxt whacks (s, T) =
- (case T of
- Type (@{type_name fun}, [_, Type (absT_name, _)]) =>
- classify_type_name ctxt absT_name = Quotient andalso
- (case quotient_of ctxt whacks absT_name of
- (_, _, _, Const (s', _), _) => s' = s)
- | _ => false);
-
-fun is_quotient_rep ctxt whacks (s, T) =
- (case T of
- Type (@{type_name fun}, [Type (absT_name, _), _]) =>
- classify_type_name ctxt absT_name = Quotient andalso
- (case quotient_of ctxt whacks absT_name of
- (_, _, _, _, Const (s', _)) => s' = s)
- | _ => false);
-
-fun is_maybe_typedef_abs ctxt whacks absT_name s =
- if absT_name = @{type_name set} then
- s = @{const_name Collect}
- else
- (case try (typedef_of ctxt whacks) absT_name of
- SOME (_, _, _, Const (s', _), _) => s' = s
- | NONE => false);
-
-fun is_maybe_typedef_rep ctxt whacks absT_name s =
- if absT_name = @{type_name set} then
- s = @{const_name rmember}
- else
- (case try (typedef_of ctxt whacks) absT_name of
- SOME (_, _, _, _, Const (s', _)) => s' = s
- | NONE => false);
-
-fun is_typedef_abs ctxt whacks (s, T) =
- (case T of
- Type (@{type_name fun}, [_, Type (absT_name, _)]) =>
- classify_type_name ctxt absT_name = Typedef andalso
- is_maybe_typedef_abs ctxt whacks absT_name s
- | _ => false);
-
-fun is_typedef_rep ctxt whacks (s, T) =
- (case T of
- Type (@{type_name fun}, [Type (absT_name, _), _]) =>
- classify_type_name ctxt absT_name = Typedef andalso
- is_maybe_typedef_rep ctxt whacks absT_name s
- | _ => false);
-
-fun is_stale_typedef_abs ctxt whacks (s, T) =
- (case T of
- Type (@{type_name fun}, [_, Type (absT_name, _)]) =>
- classify_type_name ctxt absT_name <> Typedef andalso
- is_maybe_typedef_abs ctxt whacks absT_name s
- | _ => false);
-
-fun is_stale_typedef_rep ctxt whacks (s, T) =
- (case T of
- Type (@{type_name fun}, [Type (absT_name, _), _]) =>
- classify_type_name ctxt absT_name <> Typedef andalso
- is_maybe_typedef_rep ctxt whacks absT_name s
- | _ => false);
-
-fun instantiate_constant_types_in_term ctxt csts target =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- fun try_const _ _ (res as SOME _) = res
- | try_const (s', T') cst NONE =
- (case cst of
- Const (s, T) =>
- if s = s' then
- SOME (Sign.typ_match thy (T', T) Vartab.empty)
- handle Type.TYPE_MATCH => NONE
- else
- NONE
- | _ => NONE);
-
- fun subst_for (Const x) = fold (try_const x) csts NONE
- | subst_for (t as Free _) = if member (op aconv) csts t then SOME Vartab.empty else NONE
- | subst_for (t1 $ t2) = (case subst_for t1 of SOME subst => SOME subst | NONE => subst_for t2)
- | subst_for (Abs (_, _, t')) = subst_for t'
- | subst_for _ = NONE;
- in
- (case subst_for target of
- SOME subst => Envir.subst_term_types subst target
- | NONE => raise Type.TYPE_MATCH)
- end;
-
-datatype card = One | Fin | Fin_or_Inf | Inf
-
-(* Similar to "ATP_Util.tiny_card_of_type". *)
-fun card_of_type ctxt =
- let
- fun max_card Inf _ = Inf
- | max_card _ Inf = Inf
- | max_card Fin_or_Inf _ = Fin_or_Inf
- | max_card _ Fin_or_Inf = Fin_or_Inf
- | max_card Fin _ = Fin
- | max_card _ Fin = Fin
- | max_card One One = One;
-
- fun card_of avoid T =
- if member (op =) avoid T then
- Inf
- else
- (case T of
- TFree _ => Fin_or_Inf
- | TVar _ => Inf
- | Type (@{type_name fun}, [T1, T2]) =>
- (case (card_of avoid T1, card_of avoid T2) of
- (_, One) => One
- | (k1, k2) => max_card k1 k2)
- | Type (@{type_name prod}, [T1, T2]) =>
- (case (card_of avoid T1, card_of avoid T2) of
- (k1, k2) => max_card k1 k2)
- | Type (@{type_name set}, [T']) => card_of avoid (T' --> HOLogic.boolT)
- | Type (T_name, Ts) =>
- (case try (mutual_co_datatypes_of ctxt) (T_name, Ts) of
- NONE => Inf
- | SOME (_, fpTs, ctrss) =>
- (case ctrss of [[_]] => One | _ => Fin)
- |> fold (fold (fold (max_card o card_of (fpTs @ avoid)) o binder_types o fastype_of))
- ctrss));
- in
- card_of []
- end;
-
-fun int_of_classif Spec_Rules.Equational = 1
- | int_of_classif Spec_Rules.Inductive = 2
- | int_of_classif Spec_Rules.Co_Inductive = 3
- | int_of_classif Spec_Rules.Unknown = 4;
-
-val classif_ord = int_ord o apply2 int_of_classif;
-
-fun spec_rules_of ctxt (x as (s, T)) =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- fun subst_of t0 =
- try (Sign.typ_match thy (fastype_of t0, T)) Vartab.empty;
-
- fun process_spec _ (res as SOME _) = res
- | process_spec (classif, (ts0, ths as _ :: _)) NONE =
- (case get_first subst_of ts0 of
- SOME subst =>
- (let
- val ts = map (Envir.subst_term_types subst) ts0;
- val poly_props = map Thm.prop_of ths;
- val props = map (instantiate_constant_types_in_term ctxt ts) poly_props;
- in
- if exists (exists (exists_type (exists_subtype is_TVar))) [ts, props] then NONE
- else SOME (classif, ts, props, poly_props)
- end
- handle Type.TYPE_MATCH => NONE)
- | NONE => NONE)
- | process_spec _ NONE = NONE;
-
- fun spec_rules () =
- Spec_Rules.retrieve ctxt (Const x)
- |> sort (classif_ord o apply2 fst);
-
- val specs =
- if s = @{const_name The} then
- [(Spec_Rules.Unknown, ([Logic.varify_global @{term The}], [@{thm theI_unique}]))]
- else if s = @{const_name finite} then
- let val card = card_of_type ctxt T in
- if card = Inf orelse card = Fin_or_Inf then
- spec_rules ()
- else
- [(Spec_Rules.Equational, ([Logic.varify_global @{term finite}],
- [Skip_Proof.make_thm thy (Logic.varify_global @{prop "finite A = True"})]))]
- end
- else
- spec_rules ();
- in
- fold process_spec specs NONE
- end;
-
-fun lhs_of_equation (Const (@{const_name Pure.eq}, _) $ t $ _) = t
- | lhs_of_equation (@{const Trueprop} $ (Const (@{const_name HOL.eq}, _) $ t $ _)) = t;
-
-fun specialize_definition_type thy x def0 =
- let
- val def = specialize_type thy x def0;
- val lhs = lhs_of_equation def;
- in
- if exists_Const (curry (op =) x) lhs then def else raise Fail "cannot specialize"
- end;
-
-fun definition_of thy (x as (s, _)) =
- Defs.specifications_of (Theory.defs_of thy) (Defs.Const, s)
- |> map_filter #def
- |> map_filter (try (specialize_definition_type thy x o Thm.prop_of o Thm.axiom thy))
- |> try hd;
-
-fun is_builtin_theory thy_id =
- Context.subthy_id (thy_id, Context.theory_id @{theory Hilbert_Choice});
-
-val orphan_axioms_of =
- Spec_Rules.get
- #> filter (curry (op =) Spec_Rules.Unknown o fst)
- #> map snd
- #> filter (null o fst)
- #> maps snd
- #> filter_out (is_builtin_theory o Thm.theory_id)
- #> map Thm.prop_of;
-
-fun keys_of _ (ITVal (T, _)) = [key_of_typ T]
- | keys_of _ (ITypedef {abs_typ, ...}) = [key_of_typ abs_typ]
- | keys_of _ (IQuotient {abs_typ, ...}) = [key_of_typ abs_typ]
- | keys_of _ (ICoData (_, specs)) = map (key_of_typ o #typ) specs
- | keys_of ctxt (IVal const) = [key_of_const ctxt const]
- | keys_of ctxt (ICoPred (_, _, specs)) = map (key_of_const ctxt o #const) specs
- | keys_of ctxt (IRec specs) = map (key_of_const ctxt o #const) specs
- | keys_of ctxt (ISpec {consts, ...}) = map (key_of_const ctxt) consts
- | keys_of _ (IAxiom _) = []
- | keys_of _ (IGoal _) = []
- | keys_of _ (IEval _) = [];
-
-fun co_data_spec_deps_of ctxt ({ctrs, ...} : isa_co_data_spec) =
- fold (add_keys ctxt) ctrs [];
-fun const_spec_deps_of ctxt consts props =
- fold (add_keys ctxt) props [] |> subtract (op =) (map (key_of_const ctxt) consts);
-fun consts_spec_deps_of ctxt {consts, props} =
- fold (add_keys ctxt) props [] |> subtract (op =) (map (key_of_const ctxt) consts);
-
-fun deps_of _ (ITVal _) = []
- | deps_of ctxt (ITypedef {wrt, ...}) = add_keys ctxt wrt []
- | deps_of ctxt (IQuotient {wrt, ...}) = add_keys ctxt wrt []
- | deps_of ctxt (ICoData (_, specs)) = maps (co_data_spec_deps_of ctxt) specs
- | deps_of _ (IVal const) = add_type_keys (fastype_of const) []
- | deps_of ctxt (ICoPred (_, _, specs)) =
- maps (const_spec_deps_of ctxt (map #const specs) o #props) specs
- | deps_of ctxt (IRec specs) = maps (const_spec_deps_of ctxt (map #const specs) o #props) specs
- | deps_of ctxt (ISpec spec) = consts_spec_deps_of ctxt spec
- | deps_of ctxt (IAxiom prop) = add_keys ctxt prop []
- | deps_of ctxt (IGoal prop) = add_keys ctxt prop []
- | deps_of ctxt (IEval t) = add_keys ctxt t [];
-
-fun consts_of_rec_or_spec (IRec specs) = map #const specs
- | consts_of_rec_or_spec (ISpec {consts, ...}) = consts;
-
-fun props_of_rec_or_spec (IRec specs) = maps #props specs
- | props_of_rec_or_spec (ISpec {props, ...}) = props;
-
-fun merge_two_rec_or_spec cmd cmd' =
- ISpec {consts = consts_of_rec_or_spec cmd @ consts_of_rec_or_spec cmd',
- props = props_of_rec_or_spec cmd @ props_of_rec_or_spec cmd'};
-
-fun merge_two (ICoData (fp, specs)) (ICoData (fp', specs'), complete) =
- (ICoData (BNF_Util.case_fp fp fp fp', specs @ specs'), complete andalso fp = fp')
- | merge_two (IRec specs) (IRec specs', complete) = (IRec (specs @ specs'), complete)
- | merge_two (cmd as IRec _) (cmd' as ISpec _, complete) =
- (merge_two_rec_or_spec cmd cmd', complete)
- | merge_two (cmd as ISpec _) (cmd' as IRec _, complete) =
- (merge_two_rec_or_spec cmd cmd', complete)
- | merge_two (cmd as ISpec _) (cmd' as ISpec _, complete) =
- (merge_two_rec_or_spec cmd cmd', complete)
- | merge_two _ _ = raise CYCLIC_DEPS ();
-
-fun sort_isa_commands_topologically ctxt cmds =
- let
- fun normal_pairs [] = []
- | normal_pairs (all as normal :: _) = map (rpair normal) all;
-
- fun add_node [] _ = I
- | add_node (normal :: _) cmd = Graph.new_node (normal, cmd);
-
- fun merge_scc (cmd :: cmds) complete = fold merge_two cmds (cmd, complete);
-
- fun sort_problem (cmds, complete) =
- let
- val keyss = map (keys_of ctxt) cmds;
- val normal_keys = Symtab.make (maps normal_pairs keyss);
- val normalize = Symtab.lookup normal_keys;
-
- fun add_deps [] _ = I
- | add_deps (normal :: _) cmd =
- let
- val deps = deps_of ctxt cmd
- |> map_filter normalize
- |> remove (op =) normal;
- in
- fold (fn dep => Graph.add_edge (dep, normal)) deps
- end;
-
- val cmd_of_key = the o AList.lookup (op =) (map hd keyss ~~ cmds);
-
- val G = Graph.empty
- |> fold2 add_node keyss cmds
- |> fold2 add_deps keyss cmds;
-
- val cmd_sccs = rev (Graph.strong_conn G)
- |> map (map cmd_of_key);
- in
- if exists (can (fn _ :: _ :: _ => ())) cmd_sccs then
- sort_problem (fold_map merge_scc cmd_sccs complete)
- else
- (Graph.schedule (K snd) G, complete)
- end;
-
- val typedecls = filter (can (fn ITVal _ => ())) cmds;
- val (mixed, complete) =
- (filter (can (fn ITypedef _ => () | IQuotient _ => () | ICoData _ => () | IVal _ => ()
- | ICoPred _ => () | IRec _ => () | ISpec _ => ())) cmds, true)
- |> sort_problem;
- val axioms = filter (can (fn IAxiom _ => ())) cmds;
- val goals = filter (can (fn IGoal _ => ())) cmds;
- val evals = filter (can (fn IEval _ => ())) cmds;
- in
- (typedecls @ mixed @ axioms @ goals @ evals, complete)
- end;
-
-fun group_of (ITVal _) = 1
- | group_of (ITypedef _) = 2
- | group_of (IQuotient _) = 3
- | group_of (ICoData _) = 4
- | group_of (IVal _) = 5
- | group_of (ICoPred _) = 6
- | group_of (IRec _) = 7
- | group_of (ISpec _) = 8
- | group_of (IAxiom _) = 9
- | group_of (IGoal _) = 10
- | group_of (IEval _) = 11;
-
-fun group_isa_commands [] = []
- | group_isa_commands [cmd] = [[cmd]]
- | group_isa_commands (cmd :: cmd' :: cmds) =
- let val (group :: groups) = group_isa_commands (cmd' :: cmds) in
- if group_of cmd = group_of cmd' then
- (cmd :: group) :: groups
- else
- [cmd] :: (group :: groups)
- end;
-
-fun defined_by (Const (@{const_name All}, _) $ t) = defined_by t
- | defined_by (Abs (_, _, t)) = defined_by t
- | defined_by (@{const implies} $ _ $ u) = defined_by u
- | defined_by (Const (@{const_name HOL.eq}, _) $ t $ _) = head_of t
- | defined_by t = head_of t;
-
-fun partition_props [_] props = SOME [props]
- | partition_props consts props =
- let
- val propss = map (fn const => filter (fn prop => defined_by prop aconv const) props) consts;
- in
- if eq_set (op aconv) (props, flat propss) andalso forall (not o null) propss then SOME propss
- else NONE
- end;
-
-fun hol_concl_head (Const (@{const_name All}, _) $ Abs (_, _, t)) = hol_concl_head t
- | hol_concl_head (Const (@{const_name implies}, _) $ _ $ t) = hol_concl_head t
- | hol_concl_head (t $ _) = hol_concl_head t
- | hol_concl_head t = t;
-
-fun is_inductive_set_intro t =
- (case hol_concl_head t of
- Const (@{const_name rmember}, _) => true
- | _ => false);
-
-exception NO_TRIPLE of unit;
-
-fun triple_for_intro_rule ctxt x rule =
- let
- val (prems, concl) = Logic.strip_horn rule
- |>> map (Object_Logic.atomize_term ctxt)
- ||> Object_Logic.atomize_term ctxt;
-
- val (mains, sides) = List.partition (exists_Const (curry (op =) x)) prems;
-
- val is_right_head = curry (op aconv) (Const x) o head_of;
- in
- if forall is_right_head mains then (sides, mains, concl) else raise NO_TRIPLE ()
- end;
-
-val tuple_for_args = HOLogic.mk_tuple o snd o strip_comb;
-
-fun wf_constraint_for rel sides concl mains =
- HOLogic.mk_mem (HOLogic.mk_prod (apply2 tuple_for_args (mains, concl)), Var rel)
- |> fold (curry HOLogic.mk_imp) sides
- |> close_form [rel];
-
-fun wf_constraint_for_triple rel (sides, mains, concl) =
- map (wf_constraint_for rel sides concl) mains
- |> foldr1 HOLogic.mk_conj;
-
-fun terminates_by ctxt timeout goal tac =
- can (SINGLE (Classical.safe_tac ctxt) #> the
- #> SINGLE (DETERM_TIMEOUT timeout (tac ctxt (auto_tac ctxt))) #> the
- #> Goal.finish ctxt) goal;
-
-val max_cached_wfs = 50;
-val cached_timeout = Synchronized.var "Nunchaku_Collect.cached_timeout" Time.zeroTime;
-val cached_wf_props = Synchronized.var "Nunchaku_Collect.cached_wf_props" ([] : (term * bool) list);
-
-val termination_tacs = [Lexicographic_Order.lex_order_tac true, ScnpReconstruct.sizechange_tac];
-
-fun is_wellfounded_inductive_predicate ctxt wfs debug wf_timeout const intros =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- val Const (x as (_, T)) = head_of (HOLogic.dest_Trueprop (Logic.strip_imp_concl (hd intros)));
- in
- (case triple_lookup (const_match thy o swap) wfs (dest_Const const) of
- SOME (SOME wf) => wf
- | _ =>
- (case map (triple_for_intro_rule ctxt x) intros |> filter_out (null o #2) of
- [] => true
- | triples =>
- let
- val binders_T = HOLogic.mk_tupleT (binder_types T);
- val rel_T = HOLogic.mk_setT (HOLogic.mk_prodT (binders_T, binders_T));
- val j = fold (Integer.max o maxidx_of_term) intros 0 + 1;
- val rel = (("R", j), rel_T);
- val prop =
- Const (@{const_name wf}, rel_T --> HOLogic.boolT) $ Var rel ::
- map (wf_constraint_for_triple rel) triples
- |> foldr1 HOLogic.mk_conj
- |> HOLogic.mk_Trueprop;
- in
- if debug then writeln ("Wellfoundedness goal: " ^ Syntax.string_of_term ctxt prop)
- else ();
- if wf_timeout = Synchronized.value cached_timeout andalso
- length (Synchronized.value cached_wf_props) < max_cached_wfs then
- ()
- else
- (Synchronized.change cached_wf_props (K []);
- Synchronized.change cached_timeout (K wf_timeout));
- (case AList.lookup (op =) (Synchronized.value cached_wf_props) prop of
- SOME wf => wf
- | NONE =>
- let
- val goal = Goal.init (Thm.cterm_of ctxt prop);
- val wf = exists (terminates_by ctxt wf_timeout goal) termination_tacs;
- in
- Synchronized.change cached_wf_props (cons (prop, wf)); wf
- end)
- end)
- handle
- List.Empty => false
- | NO_TRIPLE () => false)
- end;
-
-datatype lhs_pat =
- Only_Vars
-| Prim_Pattern of string
-| Any_Pattern;
-
-fun is_likely_pat_complete ctxt props =
- let
- val is_Var_or_Bound = is_Var orf is_Bound;
-
- fun lhs_pat_of t =
- (case t of
- Const (@{const_name All}, _) $ Abs (_, _, t) => lhs_pat_of t
- | Const (@{const_name HOL.eq}, _) $ u $ _ =>
- (case filter_out is_Var_or_Bound (snd (strip_comb u)) of
- [] => Only_Vars
- | [v] =>
- (case strip_comb v of
- (cst as Const (_, T), args) =>
- (case body_type T of
- Type (T_name, _) =>
- if can (Ctr_Sugar.dest_ctr ctxt T_name) cst andalso forall is_Var_or_Bound args then
- Prim_Pattern T_name
- else
- Any_Pattern
- | _ => Any_Pattern)
- | _ => Any_Pattern)
- | _ => Any_Pattern)
- | _ => Any_Pattern);
- in
- (case map lhs_pat_of props of
- [] => false
- | pats as Prim_Pattern T_name :: _ =>
- forall (can (fn Prim_Pattern _ => ())) pats andalso
- length pats = length (#ctrs (the (Ctr_Sugar.ctr_sugar_of ctxt T_name)))
- | pats => forall (curry (op =) Only_Vars) pats)
- end;
-
-(* Prevents divergence in case of cyclic or infinite axiom dependencies. *)
-val axioms_max_depth = 255
-
-fun isa_problem_of_subgoal ctxt falsify wfs whacks cards debug wf_timeout evals0 some_assms0
- subgoal0 =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- fun card_of T =
- (case triple_lookup (typ_match thy o swap) cards T of
- NONE => (NONE, NONE)
- | SOME (c1, c2) => (if c1 = SOME 1 then NONE else c1, c2));
-
- fun axioms_of_class class =
- #axioms (Axclass.get_info thy class)
- handle ERROR _ => [];
-
- fun monomorphize_class_axiom T t =
- (case Term.add_tvars t [] of
- [] => t
- | [(x, S)] => Envir.subst_term_types (Vartab.make [(x, (S, T))]) t);
-
- fun consider_sort depth T S (seens as (seenS, seenT, seen), problem) =
- if member (op =) seenS S then
- (seens, problem)
- else if depth > axioms_max_depth then
- raise TOO_DEEP_DEPS ()
- else
- let
- val seenS = S :: seenS;
- val seens = (seenS, seenT, seen);
-
- val supers = Sign.complete_sort thy S;
- val axioms0 = maps (map Thm.prop_of o axioms_of_class) supers;
- val axioms = map (preprocess_prop false ctxt whacks o monomorphize_class_axiom T) axioms0;
- in
- (seens, map IAxiom axioms @ problem)
- |> fold (consider_term (depth + 1)) axioms
- end
- and consider_type depth T =
- (case T of
- Type (s, Ts) =>
- if is_type_builtin s then fold (consider_type depth) Ts
- else consider_non_builtin_type depth T
- | _ => consider_non_builtin_type depth T)
- and consider_non_builtin_type depth T (seens as (seenS, seenT, seen), problem) =
- if member (op =) seenT T then
- (seens, problem)
- else
- let
- val seenT = T :: seenT;
- val seens = (seenS, seenT, seen);
-
- fun consider_typedef_or_quotient itypedef_or_quotient tuple_of s =
- let
- val (T0, repT0, wrt0, abs0, rep0) = tuple_of ctxt whacks s;
- val tyenv = Sign.typ_match thy (T0, T) Vartab.empty;
- val substT = Envir.subst_type tyenv;
- val subst = Envir.subst_term_types tyenv;
- val repT = substT repT0;
- val wrt = subst (wrt0 ());
- val abs = subst abs0;
- val rep = subst rep0;
- in
- apsnd (cons (itypedef_or_quotient {abs_typ = T, rep_typ = repT, wrt = wrt, abs = abs,
- rep = rep}))
- #> consider_term (depth + 1) wrt
- end;
- in
- (seens, problem)
- |> (case T of
- TFree (_, S) =>
- apsnd (cons (ITVal (T, card_of T)))
- #> consider_sort depth T S
- | TVar (_, S) => consider_sort depth T S
- | Type (s, Ts) =>
- fold (consider_type depth) Ts
- #> (case classify_type_name ctxt s of
- Co_Datatype =>
- let
- val (fp, fpTs, ctrss) = mutual_co_datatypes_of ctxt (s, Ts);
- val specs = map2 (fn T => fn ctrs => {typ = T, ctrs = ctrs}) fpTs ctrss;
- in
- (fn ((seenS, seenT, seen), problem) =>
- ((seenS, union (op =) fpTs seenT, seen), ICoData (fp, specs) :: problem))
- #> fold (fold (consider_type (depth + 1) o fastype_of)) ctrss
- end
- | Typedef => consider_typedef_or_quotient ITypedef typedef_of s
- | Quotient => consider_typedef_or_quotient IQuotient quotient_of s
- | TVal => apsnd (cons (ITVal (T, card_of T)))))
- end
- and consider_term depth t =
- (case t of
- t1 $ t2 => fold (consider_term depth) [t1, t2]
- | Var (_, T) => consider_type depth T
- | Bound _ => I
- | Abs (_, T, t') =>
- consider_term depth t'
- #> consider_type depth T
- | _ => (fn (seens as (seenS, seenT, seen), problem) =>
- if member (op aconv) seen t then
- (seens, problem)
- else if depth > axioms_max_depth then
- raise TOO_DEEP_DEPS ()
- else
- let
- val seen = t :: seen;
- val seens = (seenS, seenT, seen);
- in
- (case t of
- Const (x as (s, T)) =>
- (if is_const_builtin s orelse is_co_datatype_ctr ctxt x orelse
- is_co_datatype_case ctxt x orelse is_quotient_abs ctxt whacks x orelse
- is_quotient_rep ctxt whacks x orelse is_typedef_abs ctxt whacks x orelse
- is_typedef_rep ctxt whacks x then
- (seens, problem)
- else if is_stale_typedef_abs ctxt whacks x orelse
- is_stale_typedef_rep ctxt whacks x then
- raise UNSUPPORTED_FUNC t
- else
- (case spec_rules_of ctxt x of
- SOME (classif, consts, props0, poly_props) =>
- let
- val props = map (preprocess_prop false ctxt whacks) props0;
-
- fun def_or_spec () =
- (case definition_of thy x of
- SOME eq0 =>
- let val eq = preprocess_prop false ctxt whacks eq0 in
- ([eq], [IRec [{const = t, props = [eq], pat_complete = true}]])
- end
- | NONE => (props, [ISpec {consts = consts, props = props}]));
-
- val (props', cmds) =
- if null props then
- ([], map IVal consts)
- else if classif = Spec_Rules.Equational then
- (case partition_props consts props of
- SOME propss =>
- (props,
- [IRec (map2 (fn const => fn props =>
- {const = const, props = props,
- pat_complete = is_likely_pat_complete ctxt props})
- consts propss)])
- | NONE => def_or_spec ())
- else if member (op =) [Spec_Rules.Inductive, Spec_Rules.Co_Inductive]
- classif then
- if is_inductive_set_intro (hd props) then
- def_or_spec ()
- else
- (case partition_props consts props of
- SOME propss =>
- (props,
- [ICoPred (if classif = Spec_Rules.Inductive then BNF_Util.Least_FP
- else BNF_Util.Greatest_FP,
- length consts = 1 andalso
- is_wellfounded_inductive_predicate ctxt wfs debug wf_timeout
- (the_single consts) poly_props,
- map2 (fn const => fn props => {const = const, props = props})
- consts propss)])
- | NONE => def_or_spec ())
- else
- def_or_spec ();
- in
- ((seenS, seenT, union (op aconv) consts seen), cmds @ problem)
- |> fold (consider_term (depth + 1)) props'
- end
- | NONE =>
- (case definition_of thy x of
- SOME eq0 =>
- let val eq = preprocess_prop false ctxt whacks eq0 in
- (seens, IRec [{const = t, props = [eq], pat_complete = true}] :: problem)
- |> consider_term (depth + 1) eq
- end
- | NONE => (seens, IVal t :: problem))))
- |> consider_type depth T
- | Free (_, T) =>
- (seens, IVal t :: problem)
- |> consider_type depth T)
- end));
-
- val (poly_axioms, mono_axioms0) = orphan_axioms_of ctxt
- |> List.partition has_polymorphism;
-
- fun implicit_evals_of pol (@{const Not} $ t) = implicit_evals_of (not pol) t
- | implicit_evals_of pol (@{const implies} $ t $ u) =
- (case implicit_evals_of pol u of
- [] => implicit_evals_of (not pol) t
- | ts => ts)
- | implicit_evals_of pol (@{const conj} $ t $ u) =
- union (op aconv) (implicit_evals_of pol t) (implicit_evals_of pol u)
- | implicit_evals_of pol (@{const disj} $ t $ u) =
- union (op aconv) (implicit_evals_of pol t) (implicit_evals_of pol u)
- | implicit_evals_of false (Const (@{const_name HOL.eq}, _) $ t $ u) =
- distinct (op aconv) [t, u]
- | implicit_evals_of true (Const (@{const_name HOL.eq}, _) $ t $ _) = [t]
- | implicit_evals_of _ _ = [];
-
- val mono_axioms_and_some_assms =
- map (preprocess_prop false ctxt whacks) (mono_axioms0 @ some_assms0);
- val subgoal = preprocess_prop falsify ctxt whacks subgoal0;
- val implicit_evals = implicit_evals_of true subgoal;
- val evals = map (preprocess_closed_term ctxt whacks) evals0;
- val seens = ([], [], []);
-
- val (commandss, complete) =
- (seens,
- map IAxiom mono_axioms_and_some_assms @ [IGoal subgoal] @ map IEval (implicit_evals @ evals))
- |> fold (consider_term 0) (subgoal :: evals @ mono_axioms_and_some_assms)
- |> snd
- |> rev (* prettier *)
- |> sort_isa_commands_topologically ctxt
- |>> group_isa_commands;
- in
- (poly_axioms, {commandss = commandss, sound = true, complete = complete})
- end;
-
-fun add_pat_complete_of_command cmd =
- (case cmd of
- ICoPred (_, _, specs) => union (op =) (map #const specs)
- | IRec specs =>
- union (op =) (map_filter (try (fn {const, pat_complete = true, ...} => const)) specs)
- | _ => I);
-
-fun pat_completes_of_isa_problem {commandss, ...} =
- fold (fold add_pat_complete_of_command) commandss [];
-
-fun str_of_isa_term_with_type ctxt t =
- Syntax.string_of_term ctxt t ^ " : " ^ Syntax.string_of_typ ctxt (fastype_of t);
-
-fun is_triv_wrt (Abs (_, _, body)) = is_triv_wrt body
- | is_triv_wrt @{const True} = true
- | is_triv_wrt _ = false;
-
-fun str_of_isa_type_spec ctxt {abs_typ, rep_typ, wrt, abs, rep} =
- Syntax.string_of_typ ctxt abs_typ ^ " := " ^ Syntax.string_of_typ ctxt rep_typ ^
- (if is_triv_wrt wrt then "" else "\n wrt " ^ Syntax.string_of_term ctxt wrt) ^
- "\n abstract " ^ Syntax.string_of_term ctxt abs ^
- "\n concrete " ^ Syntax.string_of_term ctxt rep;
-
-fun str_of_isa_co_data_spec ctxt {typ, ctrs} =
- Syntax.string_of_typ ctxt typ ^ " :=\n " ^
- space_implode "\n| " (map (str_of_isa_term_with_type ctxt) ctrs);
-
-fun str_of_isa_const_spec ctxt {const, props} =
- str_of_isa_term_with_type ctxt const ^ " :=\n " ^
- space_implode ";\n " (map (Syntax.string_of_term ctxt) props);
-
-fun str_of_isa_rec_spec ctxt {const, props, pat_complete} =
- str_of_isa_term_with_type ctxt const ^ (if pat_complete then " [pat_complete]" else "") ^
- " :=\n " ^ space_implode ";\n " (map (Syntax.string_of_term ctxt) props);
-
-fun str_of_isa_consts_spec ctxt {consts, props} =
- space_implode " and\n " (map (str_of_isa_term_with_type ctxt) consts) ^ " :=\n " ^
- space_implode ";\n " (map (Syntax.string_of_term ctxt) props);
-
-fun str_of_isa_card NONE = ""
- | str_of_isa_card (SOME k) = signed_string_of_int k;
-
-fun str_of_isa_cards_suffix (NONE, NONE) = ""
- | str_of_isa_cards_suffix (c1, c2) = " " ^ str_of_isa_card c1 ^ "-" ^ str_of_isa_card c2;
-
-fun str_of_isa_command ctxt (ITVal (T, cards)) =
- "type " ^ Syntax.string_of_typ ctxt T ^ str_of_isa_cards_suffix cards
- | str_of_isa_command ctxt (ITypedef spec) = "typedef " ^ str_of_isa_type_spec ctxt spec
- | str_of_isa_command ctxt (IQuotient spec) = "quotient " ^ str_of_isa_type_spec ctxt spec
- | str_of_isa_command ctxt (ICoData (fp, specs)) =
- BNF_Util.case_fp fp "data" "codata" ^ " " ^ str_of_and_list (str_of_isa_co_data_spec ctxt) specs
- | str_of_isa_command ctxt (IVal t) = "val " ^ str_of_isa_term_with_type ctxt t
- | str_of_isa_command ctxt (ICoPred (fp, wf, specs)) =
- BNF_Util.case_fp fp "pred" "copred" ^ " " ^ (if wf then "[wf] " else "") ^
- str_of_and_list (str_of_isa_const_spec ctxt) specs
- | str_of_isa_command ctxt (IRec specs) = "rec " ^ str_of_and_list (str_of_isa_rec_spec ctxt) specs
- | str_of_isa_command ctxt (ISpec spec) = "spec " ^ str_of_isa_consts_spec ctxt spec
- | str_of_isa_command ctxt (IAxiom prop) = "axiom " ^ Syntax.string_of_term ctxt prop
- | str_of_isa_command ctxt (IGoal prop) = "goal " ^ Syntax.string_of_term ctxt prop
- | str_of_isa_command ctxt (IEval t) = "eval " ^ Syntax.string_of_term ctxt t;
-
-fun str_of_isa_problem ctxt {commandss, sound, complete} =
- map (cat_lines o map (suffix "." o str_of_isa_command ctxt)) commandss
- |> space_implode "\n\n" |> suffix "\n"
- |> prefix ("# " ^ (if sound then "sound" else "unsound") ^ "\n")
- |> prefix ("# " ^ (if complete then "complete" else "incomplete") ^ "\n");
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_commands.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,265 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_commands.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Adds the "nunchaku" and "nunchaku_params" commands to Isabelle/Isar's outer syntax.
-*)
-
-signature NUNCHAKU_COMMANDS =
-sig
- type params = Nunchaku.params
-
- val default_params: theory -> (string * string) list -> params
-end;
-
-structure Nunchaku_Commands : NUNCHAKU_COMMANDS =
-struct
-
-open Nunchaku_Util;
-open Nunchaku;
-
-type raw_param = string * string list;
-
-val default_default_params =
- [("assms", "true"),
- ("debug", "false"),
- ("falsify", "true"),
- ("max_genuine", "1"),
- ("max_potential", "1"),
- ("overlord", "false"),
- ("solvers", "cvc4 kodkod paradox smbc"),
- ("specialize", "true"),
- ("spy", "false"),
- ("timeout", "30"),
- ("verbose", "false"),
- ("wf_timeout", "0.5")];
-
-val negated_params =
- [("dont_whack", "whack"),
- ("dont_specialize", "specialize"),
- ("dont_spy", "spy"),
- ("no_assms", "assms"),
- ("no_debug", "debug"),
- ("no_overlord", "overlord"),
- ("non_mono", "mono"),
- ("non_wf", "wf"),
- ("quiet", "verbose"),
- ("satisfy", "falsify")];
-
-fun is_known_raw_param s =
- AList.defined (op =) default_default_params s orelse
- AList.defined (op =) negated_params s orelse
- member (op =) ["atoms", "card", "eval", "expect"] s orelse
- exists (fn p => String.isPrefix (p ^ " ") s)
- ["atoms", "card", "dont_whack", "mono", "non_mono", "non_wf", "wf", "whack"];
-
-fun check_raw_param (s, _) =
- if is_known_raw_param s then () else error ("Unknown parameter: " ^ quote s);
-
-fun unnegate_param_name name =
- (case AList.lookup (op =) negated_params name of
- NONE =>
- if String.isPrefix "dont_" name then SOME (unprefix "dont_" name)
- else if String.isPrefix "non_" name then SOME (unprefix "non_" name)
- else NONE
- | some_name => some_name);
-
-fun normalize_raw_param (name, value) =
- (case unnegate_param_name name of
- SOME name' =>
- [(name',
- (case value of
- ["false"] => ["true"]
- | ["true"] => ["false"]
- | [] => ["false"]
- | _ => value))]
- | NONE => [(name, value)]);
-
-structure Data = Theory_Data
-(
- type T = raw_param list
- val empty = default_default_params |> map (apsnd single)
- val extend = I
- fun merge data = AList.merge (op =) (K true) data
-);
-
-val set_default_raw_param = Data.map o fold (AList.update (op =)) o normalize_raw_param;
-val default_raw_params = Data.get;
-
-fun is_punctuation s = (s = "," orelse s = "-");
-
-fun stringify_raw_param_value [] = ""
- | stringify_raw_param_value [s] = s
- | stringify_raw_param_value (s1 :: s2 :: ss) =
- s1 ^ (if is_punctuation s1 orelse is_punctuation s2 then "" else " ") ^
- stringify_raw_param_value (s2 :: ss);
-
-fun extract_params ctxt mode default_params override_params =
- let
- val override_params = maps normalize_raw_param override_params;
- val raw_params = rev override_params @ rev default_params;
- val raw_lookup = AList.lookup (op =) raw_params;
- val lookup = Option.map stringify_raw_param_value o raw_lookup;
- val lookup_string = the_default "" o lookup;
- val lookup_strings = these o Option.map (space_explode " ") o lookup;
-
- fun general_lookup_bool option default_value name =
- (case lookup name of
- SOME s => parse_bool_option option name s
- | NONE => default_value);
-
- val lookup_bool = the o general_lookup_bool false (SOME false);
-
- fun lookup_int name =
- (case lookup name of
- SOME s =>
- (case Int.fromString s of
- SOME i => i
- | NONE => error ("Parameter " ^ quote name ^ " must be assigned an integer value"))
- | NONE => 0);
-
- fun int_range_from_string name s =
- (case space_explode "-" s of
- [s] => (s, s)
- | [s1, s2] => (s1, s2)
- | _ => error ("Parameter " ^ quote name ^ " must be assigned a range of integers"))
- |> apply2 Int.fromString;
-
- fun lookup_assigns read pre of_str =
- (case lookup pre of
- SOME s => [(NONE, of_str s)]
- | NONE => []) @
- map (fn (name, value) => (SOME (read (String.extract (name, size pre + 1, NONE))),
- of_str (stringify_raw_param_value value)))
- (filter (String.isPrefix (pre ^ " ") o fst) raw_params);
-
- fun lookup_int_range_assigns read pre =
- lookup_assigns read pre (int_range_from_string pre);
-
- fun lookup_bool_assigns read pre =
- lookup_assigns read pre (the o parse_bool_option false pre);
-
- fun lookup_bool_option_assigns read pre =
- lookup_assigns read pre (parse_bool_option true pre);
-
- fun lookup_strings_assigns read pre =
- lookup_assigns read pre (space_explode " ");
-
- fun lookup_time name =
- (case lookup name of
- SOME s => parse_time name s
- | NONE => Time.zeroTime);
-
- val read_type_polymorphic =
- Syntax.read_typ ctxt #> Logic.mk_type
- #> singleton (Variable.polymorphic ctxt) #> Logic.dest_type;
- val read_term_polymorphic =
- Syntax.read_term ctxt #> singleton (Variable.polymorphic ctxt);
- val lookup_term_list_option_polymorphic =
- AList.lookup (op =) raw_params #> Option.map (map read_term_polymorphic);
-
- fun read_const_polymorphic s =
- (case read_term_polymorphic s of
- Const x => x
- | t => error ("Not a constant: " ^ Syntax.string_of_term ctxt t));
-
- val solvers = lookup_strings "solvers";
- val falsify = lookup_bool "falsify";
- val assms = lookup_bool "assms";
- val spy = getenv "NUNCHAKU_SPY" = "yes" orelse lookup_bool "spy";
- val overlord = lookup_bool "overlord";
- val expect = lookup_string "expect";
-
- val wfs = lookup_bool_option_assigns read_const_polymorphic "wf";
- val whacks = lookup_bool_assigns read_term_polymorphic "whack";
- val cards = lookup_int_range_assigns read_type_polymorphic "card";
- val monos = lookup_bool_option_assigns read_type_polymorphic "mono";
-
- val debug = (mode <> Auto_Try andalso lookup_bool "debug");
- val verbose = debug orelse (mode <> Auto_Try andalso lookup_bool "verbose");
- val max_potential = if mode = Normal then Int.max (0, lookup_int "max_potential") else 0;
- val max_genuine = Int.max (0, lookup_int "max_genuine");
- val evals = these (lookup_term_list_option_polymorphic "eval");
- val atomss = lookup_strings_assigns read_type_polymorphic "atoms";
-
- val specialize = lookup_bool "specialize";
- val multithread = mode = Normal andalso lookup_bool "multithread";
-
- val timeout = lookup_time "timeout";
- val wf_timeout = lookup_time "wf_timeout";
-
- val mode_of_operation_params =
- {solvers = solvers, falsify = falsify, assms = assms, spy = spy, overlord = overlord,
- expect = expect};
-
- val scope_of_search_params =
- {wfs = wfs, whacks = whacks, cards = cards, monos = monos};
-
- val output_format_params =
- {verbose = verbose, debug = debug, max_potential = max_potential, max_genuine = max_genuine,
- evals = evals, atomss = atomss};
-
- val optimization_params =
- {specialize = specialize, multithread = multithread};
-
- val timeout_params =
- {timeout = timeout, wf_timeout = wf_timeout};
- in
- {mode_of_operation_params = mode_of_operation_params,
- scope_of_search_params = scope_of_search_params,
- output_format_params = output_format_params,
- optimization_params = optimization_params,
- timeout_params = timeout_params}
- end;
-
-fun default_params thy =
- extract_params (Proof_Context.init_global thy) Normal (default_raw_params thy)
- o map (apsnd single);
-
-val parse_key = Scan.repeat1 Parse.embedded >> space_implode " ";
-val parse_value =
- Scan.repeat1 (Parse.minus >> single
- || Scan.repeat1 (Scan.unless Parse.minus (Parse.name || Parse.float_number))
- || @{keyword ","} |-- Parse.number >> prefix "," >> single)
- >> flat;
-val parse_param = parse_key -- Scan.optional (@{keyword "="} |-- parse_value) [];
-val parse_params = Scan.optional (@{keyword "["} |-- Parse.list parse_param --| @{keyword "]"}) [];
-
-fun run_chaku override_params mode i state0 =
- let
- val state = Proof.map_contexts (Try0.silence_methods false) state0;
- val thy = Proof.theory_of state;
- val ctxt = Proof.context_of state;
- val _ = List.app check_raw_param override_params;
- val params = extract_params ctxt mode (default_raw_params thy) override_params;
- in
- (if mode = Auto_Try then perhaps o try else fn f => fn x => f x)
- (fn _ => run_chaku_on_subgoal state params mode i) (unknownN, NONE)
- |> `(fn (outcome_code, _) => outcome_code = genuineN)
- end;
-
-fun string_for_raw_param (name, value) =
- name ^ " = " ^ stringify_raw_param_value value;
-
-fun nunchaku_params_trans params =
- Toplevel.theory (fold set_default_raw_param params
- #> tap (fn thy =>
- let val params = rev (default_raw_params thy) in
- List.app check_raw_param params;
- writeln ("Default parameters for Nunchaku:\n" ^
- (params |> map string_for_raw_param |> sort_strings |> cat_lines))
- end));
-
-val _ =
- Outer_Syntax.command @{command_keyword nunchaku}
- "try to find a countermodel using Nunchaku"
- (parse_params -- Scan.optional Parse.nat 1 >> (fn (params, i) =>
- Toplevel.keep_proof (fn state =>
- ignore (run_chaku params Normal i (Toplevel.proof_of state)))));
-
-val _ =
- Outer_Syntax.command @{command_keyword nunchaku_params}
- "set and display the default parameters for Nunchaku"
- (parse_params #>> nunchaku_params_trans);
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_display.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,91 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_display.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Pretty printing of Isabelle/HOL models for Nunchaku.
-*)
-
-signature NUNCHAKU_DISPLAY =
-sig
- type isa_model = Nunchaku_Reconstruct.isa_model
-
- val pretty_of_isa_model_opt: Proof.context -> isa_model option -> Pretty.T
-end;
-
-structure Nunchaku_Display : NUNCHAKU_DISPLAY =
-struct
-
-open Nunchaku_Util;
-open Nunchaku_Reconstruct;
-
-val indent_size = 2;
-
-val pretty_indent = Pretty.indent indent_size;
-
-fun sorting_str_of_typ (TFree (s, _)) = "a" ^ s
- | sorting_str_of_typ (Type (s, Ts)) = "b" ^ s ^ space_implode " " (map sorting_str_of_typ Ts)
- | sorting_str_of_typ (TVar _) = "X";
-
-fun sorting_str_of_term (Const (s, T)) = "b" ^ s ^ sorting_str_of_typ T
- | sorting_str_of_term (Free (s, _)) = "a" ^ s
- | sorting_str_of_term (t $ u) = sorting_str_of_term t ^ " " ^ sorting_str_of_term u
- | sorting_str_of_term (Abs (_, T, t)) = "c" ^ sorting_str_of_typ T ^ " " ^ sorting_str_of_term t
- | sorting_str_of_term _ = "X";
-
-fun pretty_of_isa_model_opt _ NONE =
- pretty_indent (Pretty.str "Model unavailable (internal error)")
- | pretty_of_isa_model_opt ctxt0
- (SOME {type_model, free_model, pat_complete_model, pat_incomplete_model, skolem_model}) =
- let
- val ctxt = ctxt0 |> Config.put show_question_marks false;
-
- val pat_incomplete_model' = pat_incomplete_model
- |> filter_out (can (fn Const (@{const_name unreachable}, _) => ()) o fst);
-
- fun pretty_of_typ_entry (T, atoms) =
- Pretty.block (Pretty.breaks [Syntax.pretty_typ ctxt T, Pretty.str "=",
- Pretty.enum "," "{" "}" (map (Syntax.pretty_term ctxt) atoms)]);
-
- fun pretty_of_term_entry (t, value) =
- let
- val no_types_ctxt = ctxt |> Config.put show_types false;
- val schematic_ctxt = ctxt |> Proof_Context.set_mode Proof_Context.mode_schematic;
-
- val show_types = Config.get ctxt show_types;
- val value' = value |> perhaps (try (Syntax.check_term schematic_ctxt));
- val T = fastype_of t;
- val T' = if T = dummyT then try fastype_of value' |> the_default T else T;
- val t' = t |> show_types ? Type.constraint T';
- in
- Pretty.block (Pretty.breaks
- [Syntax.pretty_term ctxt t'
- |> (show_types andalso T' <> dummyT) ? (single #> Pretty.enclose "(" ")"),
- Pretty.str "=", Syntax.pretty_term no_types_ctxt value'])
- end;
-
- fun chunks_of_entries sorting_str_of pretty_of title entries =
- if not (null entries) then
- (if title = "" then [] else [Pretty.str (title ^ plural_s_for_list entries ^ ":")]) @
- map (pretty_indent o pretty_of) (sort_by (sorting_str_of o fst) entries)
- else
- [];
-
- val chunks =
- (if null free_model then
- [pretty_indent (Pretty.str "No free variables")]
- else
- chunks_of_entries sorting_str_of_term pretty_of_term_entry "" free_model) @
- chunks_of_entries sorting_str_of_term pretty_of_term_entry "Skolem constant" skolem_model @
- chunks_of_entries sorting_str_of_term pretty_of_term_entry "Underspecified constant"
- pat_incomplete_model' @
- (if Config.get ctxt show_consts then
- chunks_of_entries sorting_str_of_term pretty_of_term_entry "Fully specified constant"
- pat_complete_model
- else
- []) @
- chunks_of_entries sorting_str_of_typ pretty_of_typ_entry "Type" type_model;
- in
- Pretty.chunks chunks
- end;
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_model.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,284 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_model.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Abstract syntax tree for Nunchaku models.
-*)
-
-signature NUNCHAKU_MODEL =
-sig
- type ident = Nunchaku_Problem.ident
- type ty = Nunchaku_Problem.ty
- type tm = Nunchaku_Problem.tm
- type name_pool = Nunchaku_Problem.name_pool
-
- type ty_entry = ty * tm list
- type tm_entry = tm * tm
-
- type nun_model =
- {type_model: ty_entry list,
- const_model: tm_entry list,
- skolem_model: tm_entry list}
-
- val str_of_nun_model: nun_model -> string
-
- val allocate_ugly: name_pool -> string * string -> string * name_pool
-
- val ugly_nun_model: name_pool -> nun_model -> nun_model
-
- datatype token =
- Ident of ident
- | Symbol of ident
- | Atom of ident * int
- | End_of_Stream
-
- val parse_tok: ''a -> ''a list -> ''a * ''a list
- val parse_ident: token list -> ident * token list
- val parse_id: ident -> token list -> token * token list
- val parse_sym: ident -> token list -> token * token list
- val parse_atom: token list -> (ident * int) * token list
- val nun_model_of_str: string -> nun_model
-end;
-
-structure Nunchaku_Model : NUNCHAKU_MODEL =
-struct
-
-open Nunchaku_Problem;
-
-type ty_entry = ty * tm list;
-type tm_entry = tm * tm;
-
-type nun_model =
- {type_model: ty_entry list,
- const_model: tm_entry list,
- skolem_model: tm_entry list};
-
-val nun_SAT = str_of_ident "SAT";
-
-fun str_of_ty_entry (ty, tms) =
- "type " ^ str_of_ty ty ^ " := {" ^ commas (map str_of_tm tms) ^ "}.";
-
-fun str_of_tm_entry (tm, value) =
- "val " ^ str_of_tm tm ^ " := " ^ str_of_tm value ^ ".";
-
-fun str_of_nun_model {type_model, const_model, skolem_model} =
- map str_of_ty_entry type_model @ "" :: map str_of_tm_entry const_model @ "" ::
- map str_of_tm_entry skolem_model
- |> cat_lines;
-
-fun fold_map_ty_entry_idents f (ty, atoms) =
- fold_map_ty_idents f ty
- ##>> fold_map (fold_map_tm_idents f) atoms;
-
-fun fold_map_tm_entry_idents f (tm, value) =
- fold_map_tm_idents f tm
- ##>> fold_map_tm_idents f value;
-
-fun fold_map_nun_model_idents f {type_model, const_model, skolem_model} =
- fold_map (fold_map_ty_entry_idents f) type_model
- ##>> fold_map (fold_map_tm_entry_idents f) const_model
- ##>> fold_map (fold_map_tm_entry_idents f) skolem_model
- #>> (fn ((type_model, const_model), skolem_model) =>
- {type_model = type_model, const_model = const_model, skolem_model = skolem_model});
-
-fun swap_name_pool ({nice_of_ugly, ugly_of_nice} : name_pool) =
- {nice_of_ugly = ugly_of_nice, ugly_of_nice = nice_of_ugly};
-
-fun allocate_ugly pool (nice, ugly_sugg) =
- allocate_nice (swap_name_pool pool) (nice, ugly_sugg) ||> swap_name_pool;
-
-fun ugly_ident nice (pool as {ugly_of_nice, ...}) =
- (case Symtab.lookup ugly_of_nice nice of
- NONE => allocate_ugly pool (nice, nice)
- | SOME ugly => (ugly, pool));
-
-fun ugly_nun_model pool model =
- fst (fold_map_nun_model_idents ugly_ident model pool);
-
-datatype token =
- Ident of ident
-| Symbol of ident
-| Atom of ident * int
-| End_of_Stream;
-
-val rev_str = String.implode o rev o String.explode;
-
-fun atom_of_str s =
- (case first_field "_" (rev_str s) of
- SOME (rev_suf, rev_pre) =>
- let
- val pre = rev_str rev_pre;
- val suf = rev_str rev_suf;
- in
- (case Int.fromString suf of
- SOME j => Atom (ident_of_str pre, j)
- | NONE => raise Fail "ill-formed atom")
- end
- | NONE => raise Fail "ill-formed atom");
-
-fun is_alnum_etc_char c = Char.isAlphaNum c orelse c = #"_" orelse c = #"/";
-
-val multi_ids =
- [nun_arrow, nun_assign, nun_conj, nun_disj, nun_implies, nun_unparsable, nun_irrelevant];
-
-val nun_anon_fun_prefix_exploded = String.explode nun_anon_fun_prefix;
-val [nun_dollar_char] = String.explode nun_dollar;
-
-fun next_token [] = (End_of_Stream, [])
- | next_token (c :: cs) =
- if Char.isSpace c then
- next_token cs
- else if c = nun_dollar_char then
- let val n = find_index (not o is_alnum_etc_char) cs in
- (if n = ~1 then (cs, []) else chop n cs)
- |>> (String.implode
- #> (if is_prefix (op =) nun_anon_fun_prefix_exploded cs then ident_of_str #> Ident
- else atom_of_str))
- end
- else if is_alnum_etc_char c then
- let val n = find_index (not o is_alnum_etc_char) cs in
- (if n = ~1 then (cs, []) else chop n cs)
- |>> (cons c #> String.implode #> ident_of_str #> Ident)
- end
- else
- let
- fun next_multi id =
- let
- val s = str_of_ident id;
- val n = String.size s - 1;
- in
- if c = String.sub (s, 0) andalso
- is_prefix (op =) (String.explode (String.substring (s, 1, n))) cs then
- SOME (Symbol id, drop n cs)
- else
- NONE
- end;
- in
- (case get_first next_multi multi_ids of
- SOME res => res
- | NONE => (Symbol (ident_of_str (String.str c)), cs))
- end;
-
-val tokenize =
- let
- fun toks cs =
- (case next_token cs of
- (End_of_Stream, []) => []
- | (tok, cs') => tok :: toks cs');
- in
- toks o String.explode
- end;
-
-fun parse_enum sep scan = scan ::: Scan.repeat (sep |-- scan);
-
-fun parse_tok tok = Scan.one (curry (op =) tok);
-
-val parse_ident = Scan.some (try (fn Ident id => id));
-val parse_id = parse_tok o Ident;
-val parse_sym = parse_tok o Symbol;
-val parse_atom = Scan.some (try (fn Atom id_j => id_j));
-
-val confusing_ids = [nun_else, nun_then, nun_with];
-
-val parse_confusing_id = Scan.one (fn Ident id => member (op =) confusing_ids id | _ => false);
-
-fun parse_ty toks =
- (parse_ty_arg -- Scan.option (parse_sym nun_arrow -- parse_ty)
- >> (fn (ty, NONE) => ty
- | (lhs, SOME (Symbol id, rhs)) => NType (id, [lhs, rhs]))) toks
-and parse_ty_arg toks =
- (parse_ident >> (rpair [] #> NType)
- || parse_sym nun_lparen |-- parse_ty --| parse_sym nun_rparen) toks;
-
-val parse_choice_or_unique =
- (parse_tok (Ident nun_choice) || parse_tok (Ident nun_unique)
- || parse_tok (Ident nun_unique_unsafe))
- -- parse_ty_arg
- >> (fn (Ident id, ty) => NConst (id, [ty], mk_arrows_ty ([ty, prop_ty], ty)));
-
-fun parse_tm toks =
- (parse_id nun_lambda |-- Scan.repeat parse_arg --| parse_sym nun_dot -- parse_tm >> nabss
- || parse_id nun_mu |-- parse_arg --| parse_sym nun_dot -- parse_tm
- >> (fn (var, body) =>
- let val ty = safe_ty_of body in
- NApp (NConst (nun_mu, [ty], mk_arrow_ty (mk_arrow_ty (ty, ty), ty)), NAbs (var, body))
- end)
- || parse_id nun_if |-- parse_tm --| parse_id nun_then -- parse_tm --| parse_id nun_else
- -- parse_tm
- >> (fn ((cond, th), el) =>
- let val ty = safe_ty_of th in
- napps (NConst (nun_if, [ty], mk_arrows_ty ([prop_ty, ty, ty], ty)), [cond, th, el])
- end)
- || parse_implies) toks
-and parse_implies toks =
- (parse_disj -- Scan.option (parse_sym nun_implies -- parse_implies)
- >> (fn (tm, NONE) => tm
- | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
-and parse_disj toks =
- (parse_conj -- Scan.option (parse_sym nun_disj -- parse_disj)
- >> (fn (tm, NONE) => tm
- | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
-and parse_conj toks =
- (parse_equals -- Scan.option (parse_sym nun_conj -- parse_conj)
- >> (fn (tm, NONE) => tm
- | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
-and parse_equals toks =
- (parse_comb -- Scan.option (parse_sym nun_equals -- parse_comb)
- >> (fn (tm, NONE) => tm
- | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
-and parse_comb toks =
- (parse_arg -- Scan.repeat (Scan.unless parse_confusing_id parse_arg) >> napps) toks
-and parse_arg toks =
- (parse_choice_or_unique
- || parse_ident >> (fn id => NConst (id, [], dummy_ty))
- || parse_sym nun_irrelevant
- |-- Scan.option (parse_sym nun_lparen |-- parse_tm --| parse_sym nun_rparen) (* FIXME *)
- >> (fn _ => NConst (nun_irrelevant, [], dummy_ty))
- || parse_sym nun_unparsable |-- parse_ty >> (fn ty => NConst (nun_unparsable, [], ty))
- || parse_sym nun_lparen |-- parse_tm -- Scan.option (parse_sym nun_colon |-- parse_ty)
- --| parse_sym nun_rparen
- >> (fn (NConst (id, [], _), SOME ty) => NConst (id, [], ty)
- | (tm, _) => tm)
- || parse_atom >> (fn (id, j) => NAtom (j, NType (id, [])))) toks;
-
-val parse_witness_name =
- parse_ident >> (fn id => NConst (hd (space_explode "/" id), [], dummy_ty));
-
-val parse_witness =
- parse_id nun__witness_of |-- parse_sym nun_lparen |-- (parse_id nun_forall || parse_id nun_exists)
- |-- Scan.option (parse_sym nun_lparen) |-- parse_witness_name
- --| Scan.repeat (Scan.one (curry (op <>) (Symbol nun_assign)));
-
-datatype entry =
- Type_Entry of ty_entry
-| Skolem_Entry of tm_entry
-| Const_Entry of tm_entry;
-
-val parse_entry =
- (parse_id nun_type |-- parse_ty --| parse_sym nun_assign --| parse_sym nun_lbrace --
- parse_enum (parse_sym nun_comma) parse_tm --| parse_sym nun_rbrace
- >> Type_Entry
- || parse_id nun_val |-- parse_witness --| parse_sym nun_assign -- parse_tm >> Skolem_Entry
- || parse_id nun_val |-- parse_tm --| parse_sym nun_assign -- parse_tm >> Const_Entry)
- --| parse_sym nun_dot;
-
-val parse_model =
- parse_id nun_SAT |-- parse_sym nun_colon |-- parse_sym nun_lbrace |-- Scan.repeat parse_entry
- --| parse_sym nun_rbrace;
-
-fun add_entry entry ({type_model, const_model, skolem_model} : nun_model) =
- (case entry of
- Type_Entry e =>
- {type_model = e :: type_model, const_model = const_model, skolem_model = skolem_model}
- | Skolem_Entry e =>
- {type_model = type_model, const_model = const_model, skolem_model = e :: skolem_model}
- | Const_Entry e =>
- {type_model = type_model, const_model = e :: const_model, skolem_model = skolem_model});
-
-fun nun_model_of_str str =
- let val (entries, _) = parse_model (tokenize str) in
- {type_model = [], const_model = [], skolem_model = []}
- |> fold_rev add_entry entries
- end;
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_problem.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,799 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_problem.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Abstract syntax tree for Nunchaku problems.
-*)
-
-signature NUNCHAKU_PROBLEM =
-sig
- eqtype ident
-
- datatype ty =
- NType of ident * ty list
-
- datatype tm =
- NAtom of int * ty
- | NConst of ident * ty list * ty
- | NAbs of tm * tm
- | NMatch of tm * (ident * tm list * tm) list
- | NApp of tm * tm
-
- type nun_copy_spec =
- {abs_ty: ty,
- rep_ty: ty,
- subset: tm option,
- quotient: tm option,
- abs: tm,
- rep: tm}
-
- type nun_ctr_spec =
- {ctr: tm,
- arg_tys: ty list}
-
- type nun_co_data_spec =
- {ty: ty,
- ctrs: nun_ctr_spec list}
-
- type nun_const_spec =
- {const: tm,
- props: tm list}
-
- type nun_consts_spec =
- {consts: tm list,
- props: tm list}
-
- datatype nun_command =
- NTVal of ty * (int option * int option)
- | NCopy of nun_copy_spec
- | NData of nun_co_data_spec list
- | NCodata of nun_co_data_spec list
- | NVal of tm * ty
- | NPred of bool * nun_const_spec list
- | NCopred of nun_const_spec list
- | NRec of nun_const_spec list
- | NSpec of nun_consts_spec
- | NAxiom of tm
- | NGoal of tm
- | NEval of tm
-
- type nun_problem =
- {commandss: nun_command list list,
- sound: bool,
- complete: bool}
-
- type name_pool =
- {nice_of_ugly: string Symtab.table,
- ugly_of_nice: string Symtab.table}
-
- val nun_abstract: string
- val nun_and: string
- val nun_anon_fun_prefix: string
- val nun_arrow: string
- val nun_asserting: string
- val nun_assign: string
- val nun_at: string
- val nun_axiom: string
- val nun_bar: string
- val nun_choice: string
- val nun_codata: string
- val nun_colon: string
- val nun_comma: string
- val nun_concrete: string
- val nun_conj: string
- val nun_copred: string
- val nun_copy: string
- val nun_data: string
- val nun_disj: string
- val nun_dollar: string
- val nun_dot: string
- val nun_dummy: string
- val nun_else: string
- val nun_end: string
- val nun_equals: string
- val nun_eval: string
- val nun_exists: string
- val nun_false: string
- val nun_forall: string
- val nun_goal: string
- val nun_hash: string
- val nun_if: string
- val nun_implies: string
- val nun_irrelevant: string
- val nun_lambda: string
- val nun_lbrace: string
- val nun_lbracket: string
- val nun_lparen: string
- val nun_match: string
- val nun_mu: string
- val nun_not: string
- val nun_partial_quotient: string
- val nun_pred: string
- val nun_prop: string
- val nun_quotient: string
- val nun_rbrace: string
- val nun_rbracket: string
- val nun_rec: string
- val nun_rparen: string
- val nun_semicolon: string
- val nun_spec: string
- val nun_subset: string
- val nun_then: string
- val nun_true: string
- val nun_type: string
- val nun_unparsable: string
- val nun_unique: string
- val nun_unique_unsafe: string
- val nun_val: string
- val nun_wf: string
- val nun_with: string
- val nun__witness_of: string
-
- val ident_of_str: string -> ident
- val str_of_ident: ident -> string
- val encode_args: string list -> string
- val nun_const_of_str: string list -> string -> ident
- val nun_tconst_of_str: string list -> string -> ident
- val nun_free_of_str: string -> ident
- val nun_tfree_of_str: string -> ident
- val nun_var_of_str: string -> ident
- val str_of_nun_const: ident -> string list * string
- val str_of_nun_tconst: ident -> string list * string
- val str_of_nun_free: ident -> string
- val str_of_nun_tfree: ident -> string
- val str_of_nun_var: ident -> string
-
- val dummy_ty: ty
- val prop_ty: ty
- val mk_arrow_ty: ty * ty -> ty
- val mk_arrows_ty: ty list * ty -> ty
- val nabss: tm list * tm -> tm
- val napps: tm * tm list -> tm
-
- val ty_of: tm -> ty
- val safe_ty_of: tm -> ty
-
- val fold_map_ty_idents: (string -> 'a -> string * 'a) -> ty -> 'a -> ty * 'a
- val fold_map_tm_idents: (string -> 'a -> string * 'a) -> tm -> 'a -> tm * 'a
- val fold_map_nun_command_idents: (string -> 'a -> string * 'a) -> nun_command -> 'a ->
- nun_command * 'a
- val fold_map_nun_problem_idents: (string -> 'a -> string * 'a) -> nun_problem -> 'a ->
- nun_problem * 'a
-
- val allocate_nice: name_pool -> string * string -> string * name_pool
-
- val rcomb_tms: tm list -> tm -> tm
- val abs_tms: tm list -> tm -> tm
- val beta_reduce_tm: tm -> tm
- val eta_expandN_tm: int -> tm -> tm
- val eta_expand_builtin_tm: tm -> tm
-
- val str_of_ty: ty -> string
- val str_of_tm: tm -> string
- val str_of_tmty: tm -> string
-
- val nice_nun_problem: nun_problem -> nun_problem * name_pool
- val str_of_nun_problem: nun_problem -> string
-end;
-
-structure Nunchaku_Problem : NUNCHAKU_PROBLEM =
-struct
-
-open Nunchaku_Util;
-
-type ident = string;
-
-datatype ty =
- NType of ident * ty list;
-
-datatype tm =
- NAtom of int * ty
-| NConst of ident * ty list * ty
-| NAbs of tm * tm
-| NMatch of tm * (ident * tm list * tm) list
-| NApp of tm * tm;
-
-type nun_copy_spec =
- {abs_ty: ty,
- rep_ty: ty,
- subset: tm option,
- quotient: tm option,
- abs: tm,
- rep: tm};
-
-type nun_ctr_spec =
- {ctr: tm,
- arg_tys: ty list};
-
-type nun_co_data_spec =
- {ty: ty,
- ctrs: nun_ctr_spec list};
-
-type nun_const_spec =
- {const: tm,
- props: tm list};
-
-type nun_consts_spec =
- {consts: tm list,
- props: tm list};
-
-datatype nun_command =
- NTVal of ty * (int option * int option)
-| NCopy of nun_copy_spec
-| NData of nun_co_data_spec list
-| NCodata of nun_co_data_spec list
-| NVal of tm * ty
-| NPred of bool * nun_const_spec list
-| NCopred of nun_const_spec list
-| NRec of nun_const_spec list
-| NSpec of nun_consts_spec
-| NAxiom of tm
-| NGoal of tm
-| NEval of tm;
-
-type nun_problem =
- {commandss: nun_command list list,
- sound: bool,
- complete: bool};
-
-type name_pool =
- {nice_of_ugly: string Symtab.table,
- ugly_of_nice: string Symtab.table};
-
-val nun_abstract = "abstract";
-val nun_and = "and";
-val nun_anon_fun_prefix = "anon_fun_";
-val nun_arrow = "->";
-val nun_asserting = "asserting";
-val nun_assign = ":=";
-val nun_at = "@";
-val nun_axiom = "axiom";
-val nun_bar = "|";
-val nun_choice = "choice";
-val nun_codata = "codata";
-val nun_colon = ":";
-val nun_comma = ",";
-val nun_concrete = "concrete";
-val nun_conj = "&&";
-val nun_copred = "copred";
-val nun_copy = "copy";
-val nun_data = "data";
-val nun_disj = "||";
-val nun_dollar = "$";
-val nun_dot = ".";
-val nun_dummy = "_";
-val nun_else = "else";
-val nun_end = "end";
-val nun_equals = "=";
-val nun_eval = "eval";
-val nun_exists = "exists";
-val nun_false = "false";
-val nun_forall = "forall";
-val nun_goal = "goal";
-val nun_hash = "#";
-val nun_if = "if";
-val nun_implies = "=>";
-val nun_irrelevant = "?__";
-val nun_lambda = "fun";
-val nun_lbrace = "{";
-val nun_lbracket = "[";
-val nun_lparen = "(";
-val nun_match = "match";
-val nun_mu = "mu";
-val nun_not = "~";
-val nun_partial_quotient = "partial_quotient";
-val nun_pred = "pred";
-val nun_prop = "prop";
-val nun_quotient = "quotient";
-val nun_rbrace = "}";
-val nun_rbracket = "]";
-val nun_rec = "rec";
-val nun_rparen = ")";
-val nun_semicolon = ";";
-val nun_spec = "spec";
-val nun_subset = "subset";
-val nun_then = "then";
-val nun_true = "true";
-val nun_type = "type";
-val nun_unique = "unique";
-val nun_unique_unsafe = "unique_unsafe";
-val nun_unparsable = "?__unparsable";
-val nun_val = "val";
-val nun_wf = "wf";
-val nun_with = "with";
-val nun__witness_of = "_witness_of";
-
-val nun_parens = enclose nun_lparen nun_rparen;
-
-fun nun_parens_if_space s = s |> String.isSubstring " " s ? nun_parens;
-
-fun str_of_nun_arg_list str_of_arg =
- map (prefix " " o nun_parens_if_space o str_of_arg) #> space_implode "";
-
-fun str_of_nun_and_list str_of_elem =
- map str_of_elem #> space_implode ("\n" ^ nun_and ^ " ");
-
-val is_nun_const_quantifier = member (op =) [nun_forall, nun_exists];
-val is_nun_const_connective = member (op =) [nun_conj, nun_disj, nun_implies];
-
-val nun_builtin_arity =
- [(nun_asserting, 2),
- (nun_conj, 2),
- (nun_disj, 2),
- (nun_equals, 2),
- (nun_exists, 1),
- (nun_false, 0),
- (nun_forall, 1),
- (nun_if, 3),
- (nun_implies, 2),
- (nun_not, 1),
- (nun_true, 0)];
-
-val arity_of_nun_builtin = AList.lookup (op =) nun_builtin_arity #> the_default 0;
-
-val nun_const_prefix = "c.";
-val nun_free_prefix = "f.";
-val nun_var_prefix = "v.";
-val nun_tconst_prefix = "C.";
-val nun_tfree_prefix = "F.";
-val nun_custom_id_suffix = "_";
-
-val ident_of_str = I : string -> ident;
-val str_of_ident = I : ident -> string;
-
-val encode_args = enclose "(" ")" o commas;
-
-fun decode_args s =
- let
- fun delta #"(" = 1
- | delta #")" = ~1
- | delta _ = 0;
-
- fun dec 0 (#"(" :: #")" :: cs) _ = ([], String.implode cs)
- | dec 0 (#"(" :: cs) [] = dec 1 cs [[]]
- | dec 0 cs _ = ([], String.implode cs)
- | dec _ [] _ = raise Fail ("ill-formed arguments in " ^ quote s)
- | dec 1 (#")" :: cs) args = (rev (map (String.implode o rev) args), String.implode cs)
- | dec 1 (#"," :: cs) args = dec 1 cs ([] :: args)
- | dec n (c :: cs) (arg :: args) = dec (n + delta c) cs ((c :: arg) :: args);
- in
- dec 0 (String.explode s) []
- end;
-
-fun nun_const_of_str args =
- suffix nun_custom_id_suffix #> prefix nun_const_prefix #> prefix (encode_args args);
-fun nun_tconst_of_str args =
- suffix nun_custom_id_suffix #> prefix nun_tconst_prefix #> prefix (encode_args args);
-
-val nun_free_of_str = suffix nun_custom_id_suffix #> prefix nun_free_prefix;
-val nun_tfree_of_str = suffix nun_custom_id_suffix #> prefix nun_tfree_prefix;
-val nun_var_of_str = suffix nun_custom_id_suffix #> prefix nun_var_prefix;
-val str_of_nun_const = decode_args ##> unprefix nun_const_prefix ##> unsuffix nun_custom_id_suffix;
-val str_of_nun_tconst = decode_args ##> unprefix nun_tconst_prefix ##> unsuffix nun_custom_id_suffix;
-val str_of_nun_free = unprefix nun_free_prefix #> unsuffix nun_custom_id_suffix;
-val str_of_nun_tfree = unprefix nun_tfree_prefix #> unsuffix nun_custom_id_suffix;
-val str_of_nun_var = unprefix nun_var_prefix #> unsuffix nun_custom_id_suffix;
-
-fun index_name s 0 = s
- | index_name s j =
- let
- val n = size s;
- val m = n - 1;
- in
- String.substring (s, 0, m) ^ string_of_int j ^ String.substring (s, m, n - m)
- end;
-
-val dummy_ty = NType (nun_dummy, []);
-val prop_ty = NType (nun_prop, []);
-
-fun mk_arrow_ty (dom, ran) = NType (nun_arrow, [dom, ran]);
-val mk_arrows_ty = Library.foldr mk_arrow_ty;
-
-val nabss = Library.foldr NAbs;
-val napps = Library.foldl NApp;
-
-fun domain_ty (NType (_, [ty, _])) = ty
- | domain_ty ty = ty;
-
-fun range_ty (NType (_, [_, ty])) = ty
- | range_ty ty = ty;
-
-fun domain_tys 0 _ = []
- | domain_tys n ty = domain_ty ty :: domain_tys (n - 1) (range_ty ty);
-
-fun ty_of (NAtom (_, ty)) = ty
- | ty_of (NConst (_, _, ty)) = ty
- | ty_of (NAbs (var, body)) = mk_arrow_ty (ty_of var, ty_of body)
- | ty_of (NMatch (_, (_, _, body1) :: _)) = ty_of body1
- | ty_of (NApp (const, _)) = range_ty (ty_of const);
-
-val safe_ty_of = try ty_of #> the_default dummy_ty;
-
-fun strip_nun_binders binder (app as NApp (NConst (id, _, _), NAbs (var, body))) =
- if id = binder then
- strip_nun_binders binder body
- |>> cons var
- else
- ([], app)
- | strip_nun_binders _ tm = ([], tm);
-
-fun fold_map_option _ NONE = pair NONE
- | fold_map_option f (SOME x) = f x #>> SOME;
-
-fun fold_map_ty_idents f (NType (id, tys)) =
- f id
- ##>> fold_map (fold_map_ty_idents f) tys
- #>> NType;
-
-fun fold_map_match_branch_idents f (id, vars, body) =
- f id
- ##>> fold_map (fold_map_tm_idents f) vars
- ##>> fold_map_tm_idents f body
- #>> Scan.triple1
-and fold_map_tm_idents f (NAtom (j, ty)) =
- fold_map_ty_idents f ty
- #>> curry NAtom j
- | fold_map_tm_idents f (NConst (id, tys, ty)) =
- f id
- ##>> fold_map (fold_map_ty_idents f) tys
- ##>> fold_map_ty_idents f ty
- #>> (Scan.triple1 #> NConst)
- | fold_map_tm_idents f (NAbs (var, body)) =
- fold_map_tm_idents f var
- ##>> fold_map_tm_idents f body
- #>> NAbs
- | fold_map_tm_idents f (NMatch (obj, branches)) =
- fold_map_tm_idents f obj
- ##>> fold_map (fold_map_match_branch_idents f) branches
- #>> NMatch
- | fold_map_tm_idents f (NApp (const, arg)) =
- fold_map_tm_idents f const
- ##>> fold_map_tm_idents f arg
- #>> NApp;
-
-fun fold_map_nun_copy_spec_idents f {abs_ty, rep_ty, subset, quotient, abs, rep} =
- fold_map_ty_idents f abs_ty
- ##>> fold_map_ty_idents f rep_ty
- ##>> fold_map_option (fold_map_tm_idents f) subset
- ##>> fold_map_option (fold_map_tm_idents f) quotient
- ##>> fold_map_tm_idents f abs
- ##>> fold_map_tm_idents f rep
- #>> (fn (((((abs_ty, rep_ty), subset), quotient), abs), rep) =>
- {abs_ty = abs_ty, rep_ty = rep_ty, subset = subset, quotient = quotient, abs = abs, rep = rep});
-
-fun fold_map_nun_ctr_spec_idents f {ctr, arg_tys} =
- fold_map_tm_idents f ctr
- ##>> fold_map (fold_map_ty_idents f) arg_tys
- #>> (fn (ctr, arg_tys) => {ctr = ctr, arg_tys = arg_tys});
-
-fun fold_map_nun_co_data_spec_idents f {ty, ctrs} =
- fold_map_ty_idents f ty
- ##>> fold_map (fold_map_nun_ctr_spec_idents f) ctrs
- #>> (fn (ty, ctrs) => {ty = ty, ctrs = ctrs});
-
-fun fold_map_nun_const_spec_idents f {const, props} =
- fold_map_tm_idents f const
- ##>> fold_map (fold_map_tm_idents f) props
- #>> (fn (const, props) => {const = const, props = props});
-
-fun fold_map_nun_consts_spec_idents f {consts, props} =
- fold_map (fold_map_tm_idents f) consts
- ##>> fold_map (fold_map_tm_idents f) props
- #>> (fn (consts, props) => {consts = consts, props = props});
-
-fun fold_map_nun_command_idents f (NTVal (ty, cards)) =
- fold_map_ty_idents f ty
- #>> (rpair cards #> NTVal)
- | fold_map_nun_command_idents f (NCopy spec) =
- fold_map_nun_copy_spec_idents f spec
- #>> NCopy
- | fold_map_nun_command_idents f (NData specs) =
- fold_map (fold_map_nun_co_data_spec_idents f) specs
- #>> NData
- | fold_map_nun_command_idents f (NCodata specs) =
- fold_map (fold_map_nun_co_data_spec_idents f) specs
- #>> NCodata
- | fold_map_nun_command_idents f (NVal (tm, ty)) =
- fold_map_tm_idents f tm
- ##>> fold_map_ty_idents f ty
- #>> NVal
- | fold_map_nun_command_idents f (NPred (wf, specs)) =
- fold_map (fold_map_nun_const_spec_idents f) specs
- #>> curry NPred wf
- | fold_map_nun_command_idents f (NCopred specs) =
- fold_map (fold_map_nun_const_spec_idents f) specs
- #>> NCopred
- | fold_map_nun_command_idents f (NRec specs) =
- fold_map (fold_map_nun_const_spec_idents f) specs
- #>> NRec
- | fold_map_nun_command_idents f (NSpec spec) =
- fold_map_nun_consts_spec_idents f spec
- #>> NSpec
- | fold_map_nun_command_idents f (NAxiom tm) =
- fold_map_tm_idents f tm
- #>> NAxiom
- | fold_map_nun_command_idents f (NGoal tm) =
- fold_map_tm_idents f tm
- #>> NGoal
- | fold_map_nun_command_idents f (NEval tm) =
- fold_map_tm_idents f tm
- #>> NEval;
-
-fun fold_map_nun_problem_idents f ({commandss, sound, complete} : nun_problem) =
- fold_map (fold_map (fold_map_nun_command_idents f)) commandss
- #>> (fn commandss' => {commandss = commandss', sound = sound, complete = complete});
-
-fun dest_rassoc_args oper arg0 rest =
- (case rest of
- NApp (NApp (oper', arg1), rest') =>
- if oper' = oper then arg0 :: dest_rassoc_args oper arg1 rest' else [arg0, rest]
- | _ => [arg0, rest]);
-
-fun replace_tm from to =
- let
- (* This code assumes all enclosing binders bind distinct variables and bound variables are
- distinct from any other variables. *)
- fun repl_br (id, vars, body) = (id, map repl vars, repl body)
- and repl (NApp (const, arg)) = NApp (repl const, repl arg)
- | repl (NAbs (var, body)) = NAbs (var, repl body)
- | repl (NMatch (obj, branches)) = NMatch (repl obj, map repl_br branches)
- | repl tm = if tm = from then to else tm;
- in
- repl
- end;
-
-val rcomb_tms = fold (fn arg => fn func => NApp (func, arg));
-val abs_tms = fold_rev (curry NAbs);
-
-fun fresh_var_names_wrt_tm n tm =
- let
- fun max_var_br (_, vars, body) = fold max_var (body :: vars)
- and max_var (NAtom _) = I
- | max_var (NConst (id, _, _)) =
- (fn max => if String.isPrefix nun_var_prefix id andalso size id > size max then id else max)
- | max_var (NApp (func, arg)) = fold max_var [func, arg]
- | max_var (NAbs (var, body)) = fold max_var [var, body]
- | max_var (NMatch (obj, branches)) = max_var obj #> fold max_var_br branches;
-
- val dummy_name = nun_var_of_str Name.uu;
- val max_name = max_var tm dummy_name;
- in
- map (index_name max_name) (1 upto n)
- end;
-
-fun beta_reduce_tm (NApp (NAbs (var, body), arg)) = beta_reduce_tm (replace_tm var arg body)
- | beta_reduce_tm (NApp (const, arg)) =
- (case beta_reduce_tm const of
- const' as NAbs _ => beta_reduce_tm (NApp (const', arg))
- | const' => NApp (const', beta_reduce_tm arg))
- | beta_reduce_tm (NAbs (var, body)) = NAbs (var, beta_reduce_tm body)
- | beta_reduce_tm (NMatch (obj, branches)) =
- NMatch (beta_reduce_tm obj, map (@{apply 3(3)} beta_reduce_tm) branches)
- | beta_reduce_tm tm = tm;
-
-fun eta_expandN_tm 0 tm = tm
- | eta_expandN_tm n tm =
- let
- val var_names = fresh_var_names_wrt_tm n tm;
- val arg_tys = domain_tys n (ty_of tm);
- val vars = map2 (fn id => fn ty => NConst (id, [], ty)) var_names arg_tys;
- in
- abs_tms vars (rcomb_tms vars tm)
- end;
-
-val eta_expand_builtin_tm =
- let
- fun expand_quant_arg (NAbs (var, body)) = NAbs (var, expand_quant_arg body)
- | expand_quant_arg (NMatch (obj, branches)) =
- NMatch (obj, map (@{apply 3(3)} expand_quant_arg) branches)
- | expand_quant_arg (tm as NApp (_, NAbs _)) = tm
- | expand_quant_arg (NApp (quant, arg)) = NApp (quant, eta_expandN_tm 1 arg)
- | expand_quant_arg tm = tm;
-
- fun expand args (NApp (func, arg)) = expand (expand [] arg :: args) func
- | expand args (func as NConst (id, _, _)) =
- let val missing = Int.max (0, arity_of_nun_builtin id - length args) in
- rcomb_tms args func
- |> eta_expandN_tm missing
- |> is_nun_const_quantifier id ? expand_quant_arg
- end
- | expand args (func as NAtom _) = rcomb_tms args func
- | expand args (NAbs (var, body)) = rcomb_tms args (NAbs (var, expand [] body))
- | expand args (NMatch (obj, branches)) =
- rcomb_tms args (NMatch (obj, map (@{apply 3(3)} (expand [])) branches));
- in
- expand []
- end;
-
-val str_of_ty =
- let
- fun str_of maybe_parens (NType (id, tys)) =
- if id = nun_arrow then
- (case tys of
- [ty, ty'] => maybe_parens (str_of nun_parens ty ^ " " ^ nun_arrow ^ " " ^ str_of I ty'))
- else
- id ^ str_of_nun_arg_list (str_of I) tys
- in
- str_of I
- end;
-
-val (str_of_tmty, str_of_tm) =
- let
- fun is_triv_head (NConst (id, _, _)) = (arity_of_nun_builtin id = 0)
- | is_triv_head (NAtom _) = true
- | is_triv_head (NApp (const, _)) = is_triv_head const
- | is_triv_head (NAbs _) = false
- | is_triv_head (NMatch _) = false;
-
- fun str_of_at_const id tys =
- nun_at ^ str_of_ident id ^ str_of_nun_arg_list str_of_ty tys;
-
- fun str_of_app ty_opt const arg =
- let
- val ty_opt' =
- try (Option.map (fn ty => mk_arrow_ty (ty_of arg, ty))) ty_opt
- |> the_default NONE;
- in
- (str_of ty_opt' const |> (case const of NAbs _ => nun_parens | _ => I)) ^
- str_of_nun_arg_list (str_of NONE) [arg]
- end
- and str_of_br ty_opt (id, vars, body) =
- " " ^ nun_bar ^ " " ^ id ^ space_implode "" (map (prefix " " o str_of NONE) vars) ^ " " ^
- nun_arrow ^ " " ^ str_of ty_opt body
- and str_of_tmty tm =
- let val ty = ty_of tm in
- str_of (SOME ty) tm ^ " " ^ nun_colon ^ " " ^ str_of_ty ty
- end
- and str_of _ (NAtom (j, _)) = nun_dollar ^ string_of_int j
- | str_of _ (NConst (id, [], _)) = str_of_ident id
- | str_of (SOME ty0) (NConst (id, tys, ty)) =
- if ty = ty0 then str_of_ident id else str_of_at_const id tys
- | str_of _ (NConst (id, tys, _)) = str_of_at_const id tys
- | str_of ty_opt (NAbs (var, body)) =
- nun_lambda ^ " " ^
- (case ty_opt of
- SOME ty => str_of (SOME (domain_ty ty))
- | NONE => nun_parens o str_of_tmty) var ^
- nun_dot ^ " " ^ str_of (Option.map range_ty ty_opt) body
- | str_of ty_opt (NMatch (obj, branches)) =
- nun_match ^ " " ^ str_of NONE obj ^ " " ^ nun_with ^ " " ^
- space_implode "" (map (str_of_br ty_opt) branches) ^ " " ^ nun_end
- | str_of ty_opt (app as NApp (func, argN)) =
- (case (func, argN) of
- (NApp (oper as NConst (id, _, _), arg1), arg2) =>
- if id = nun_asserting then
- str_of ty_opt arg1 ^ " " ^ nun_asserting ^ " " ^ str_of (SOME prop_ty) arg2
- |> nun_parens
- else if id = nun_equals then
- (str_of NONE arg1 |> not (is_triv_head arg1) ? nun_parens) ^ " " ^ id ^ " " ^
- (str_of (try ty_of arg2) arg2 |> not (is_triv_head arg2) ? nun_parens)
- else if is_nun_const_connective id then
- let val args = dest_rassoc_args oper arg1 arg2 in
- space_implode (" " ^ id ^ " ")
- (map (fn arg => str_of NONE arg |> not (is_triv_head arg) ? nun_parens) args)
- end
- else
- str_of_app ty_opt func argN
- | (NApp (NApp (NConst (id, _, _), arg1), arg2), arg3) =>
- if id = nun_if then
- nun_if ^ " " ^ str_of NONE arg1 ^ " " ^ nun_then ^ " " ^ str_of NONE arg2 ^ " " ^
- nun_else ^ " " ^ str_of NONE arg3
- |> nun_parens
- else
- str_of_app ty_opt func argN
- | (NConst (id, _, _), NAbs _) =>
- if is_nun_const_quantifier id then
- let val (vars, body) = strip_nun_binders id app in
- id ^ " " ^ space_implode " " (map (nun_parens o str_of_tmty) vars) ^ nun_dot ^ " " ^
- str_of NONE body
- end
- else
- str_of_app ty_opt func argN
- | _ => str_of_app ty_opt func argN);
- in
- (str_of_tmty, str_of NONE)
- end;
-
-val empty_name_pool = {nice_of_ugly = Symtab.empty, ugly_of_nice = Symtab.empty};
-
-val nice_of_ugly_suggestion =
- unascii_of #> Long_Name.base_name #> ascii_of #> unsuffix nun_custom_id_suffix
- #> (fn s => if s = "" orelse not (Char.isAlpha (String.sub (s, 0))) then "x" ^ s else s);
-
-fun allocate_nice ({nice_of_ugly, ugly_of_nice} : name_pool) (ugly, nice_sugg0) =
- let
- fun alloc j =
- let val nice_sugg = index_name nice_sugg0 j in
- (case Symtab.lookup ugly_of_nice nice_sugg of
- NONE =>
- (nice_sugg,
- {nice_of_ugly = Symtab.update_new (ugly, nice_sugg) nice_of_ugly,
- ugly_of_nice = Symtab.update_new (nice_sugg, ugly) ugly_of_nice})
- | SOME _ => alloc (j + 1))
- end;
- in
- alloc 0
- end;
-
-fun nice_ident ugly (pool as {nice_of_ugly, ...}) =
- if String.isSuffix nun_custom_id_suffix ugly then
- (case Symtab.lookup nice_of_ugly ugly of
- NONE => allocate_nice pool (ugly, nice_of_ugly_suggestion ugly)
- | SOME nice => (nice, pool))
- else
- (ugly, pool);
-
-fun nice_nun_problem problem =
- fold_map_nun_problem_idents nice_ident problem empty_name_pool;
-
-fun str_of_tval (NType (id, tys)) =
- str_of_ident id ^ " " ^ nun_colon ^ " " ^
- fold (K (prefix (nun_type ^ " " ^ nun_arrow ^ " "))) tys nun_type;
-
-fun is_triv_subset (NAbs (_, body)) = is_triv_subset body
- | is_triv_subset (NConst (id, _, _)) = (id = nun_true)
- | is_triv_subset _ = false;
-
-fun str_of_nun_copy_spec {abs_ty, rep_ty, subset, quotient, abs, rep} =
- str_of_ty abs_ty ^ " " ^ nun_assign ^ " " ^ str_of_ty rep_ty ^
- (case subset of
- NONE => ""
- | SOME s => if is_triv_subset s then "" else "\n " ^ nun_subset ^ " " ^ str_of_tm s) ^
- (* TODO: use nun_quotient when possible *)
- (case quotient of
- NONE => ""
- | SOME q => "\n " ^ nun_partial_quotient ^ " " ^ str_of_tm q) ^
- "\n " ^ nun_abstract ^ " " ^ str_of_tm abs ^ "\n " ^ nun_concrete ^ " " ^ str_of_tm rep;
-
-fun str_of_nun_ctr_spec {ctr, arg_tys} =
- str_of_tm ctr ^ str_of_nun_arg_list str_of_ty arg_tys;
-
-fun str_of_nun_co_data_spec {ty, ctrs} =
- str_of_ty ty ^ " " ^ nun_assign ^ "\n " ^
- space_implode ("\n" ^ nun_bar ^ " ") (map str_of_nun_ctr_spec ctrs);
-
-fun str_of_nun_const_spec {const, props} =
- str_of_tmty const ^ " " ^ nun_assign ^ "\n " ^
- space_implode (nun_semicolon ^ "\n ") (map str_of_tm props);
-
-fun str_of_nun_consts_spec {consts, props} =
- space_implode (" " ^ nun_and ^ "\n ") (map str_of_tmty consts) ^ " " ^ nun_assign ^ "\n " ^
- space_implode (nun_semicolon ^ "\n ") (map str_of_tm props);
-
-fun str_of_nun_cards_suffix (NONE, NONE) = ""
- | str_of_nun_cards_suffix (c1, c2) =
- let
- val s1 = Option.map (prefix "min_card " o signed_string_of_int) c1;
- val s2 = Option.map (prefix "max_card " o signed_string_of_int) c2;
- in
- enclose " [" "]" (space_implode ", " (map_filter I [s1, s2]))
- end;
-
-fun str_of_nun_command (NTVal (ty, cards)) =
- nun_val ^ " " ^ str_of_tval ty ^ str_of_nun_cards_suffix cards
- | str_of_nun_command (NCopy spec) = nun_copy ^ " " ^ str_of_nun_copy_spec spec
- | str_of_nun_command (NData specs) =
- nun_data ^ " " ^ str_of_nun_and_list str_of_nun_co_data_spec specs
- | str_of_nun_command (NCodata specs) =
- nun_codata ^ " " ^ str_of_nun_and_list str_of_nun_co_data_spec specs
- | str_of_nun_command (NVal (tm, ty)) =
- nun_val ^ " " ^ str_of_tm tm ^ " " ^ nun_colon ^ " " ^ str_of_ty ty
- | str_of_nun_command (NPred (wf, specs)) =
- nun_pred ^ " " ^ (if wf then nun_lbracket ^ nun_wf ^ nun_rbracket ^ " " else "") ^
- str_of_nun_and_list str_of_nun_const_spec specs
- | str_of_nun_command (NCopred specs) =
- nun_copred ^ " " ^ str_of_nun_and_list str_of_nun_const_spec specs
- | str_of_nun_command (NRec specs) =
- nun_rec ^ " " ^ str_of_nun_and_list str_of_nun_const_spec specs
- | str_of_nun_command (NSpec spec) = nun_spec ^ " " ^ str_of_nun_consts_spec spec
- | str_of_nun_command (NAxiom tm) = nun_axiom ^ " " ^ str_of_tm tm
- | str_of_nun_command (NGoal tm) = nun_goal ^ " " ^ str_of_tm tm
- | str_of_nun_command (NEval tm) = nun_hash ^ " " ^ nun_eval ^ " " ^ str_of_tm tm;
-
-fun str_of_nun_problem {commandss, sound, complete} =
- map (cat_lines o map (suffix nun_dot o str_of_nun_command)) commandss
- |> space_implode "\n\n" |> suffix "\n"
- |> prefix (nun_hash ^ " " ^ (if sound then "sound" else "unsound") ^ "\n")
- |> prefix (nun_hash ^ " " ^ (if complete then "complete" else "incomplete") ^ "\n");
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_reconstruct.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,244 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_reconstruct.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Reconstruction of Nunchaku models in Isabelle/HOL.
-*)
-
-signature NUNCHAKU_RECONSTRUCT =
-sig
- type nun_model = Nunchaku_Model.nun_model
-
- type typ_entry = typ * term list
- type term_entry = term * term
-
- type isa_model =
- {type_model: typ_entry list,
- free_model: term_entry list,
- pat_complete_model: term_entry list,
- pat_incomplete_model: term_entry list,
- skolem_model: term_entry list}
-
- val str_of_isa_model: Proof.context -> isa_model -> string
-
- val isa_model_of_nun: Proof.context -> term list -> (typ option * string list) list ->
- nun_model -> isa_model
-end;
-
-structure Nunchaku_Reconstruct : NUNCHAKU_RECONSTRUCT =
-struct
-
-open Nunchaku_Util;
-open Nunchaku_Problem;
-open Nunchaku_Translate;
-open Nunchaku_Model;
-
-type typ_entry = typ * term list;
-type term_entry = term * term;
-
-type isa_model =
- {type_model: typ_entry list,
- free_model: term_entry list,
- pat_complete_model: term_entry list,
- pat_incomplete_model: term_entry list,
- skolem_model: term_entry list};
-
-val anonymousN = "anonymous";
-val irrelevantN = "irrelevant";
-val unparsableN = "unparsable";
-
-val nun_arrow_exploded = String.explode nun_arrow;
-
-val is_ty_meta = member (op =) (String.explode "()->,");
-
-fun next_token_lowlevel [] = (End_of_Stream, [])
- | next_token_lowlevel (c :: cs) =
- if Char.isSpace c then
- next_token_lowlevel cs
- else if not (is_ty_meta c) then
- let val n = find_index (Char.isSpace orf is_ty_meta) cs in
- (if n = ~1 then (cs, []) else chop n cs)
- |>> (cons c #> String.implode #> ident_of_str #> Ident)
- end
- else if is_prefix (op =) nun_arrow_exploded (c :: cs) then
- (Ident nun_arrow, tl cs)
- else
- (Symbol (String.str c), cs);
-
-val tokenize_lowlevel =
- let
- fun toks cs =
- (case next_token_lowlevel cs of
- (End_of_Stream, []) => []
- | (tok, cs') => tok :: toks cs');
- in
- toks o String.explode
- end;
-
-fun parse_lowlevel_ty tok =
- (Scan.optional
- (parse_sym "(" |-- Scan.repeat (parse_lowlevel_ty --| Scan.option (parse_sym ",")) --|
- parse_sym ")")
- []
- -- parse_ident >> (swap #> NType)) tok;
-
-val ty_of_lowlevel_str = fst o parse_lowlevel_ty o tokenize_lowlevel;
-
-fun ident_of_const (NConst (id, _, _)) = id
- | ident_of_const _ = nun_dummy;
-
-fun str_of_typ_entry ctxt (T, ts) =
- "type " ^ Syntax.string_of_typ ctxt T ^
- " := {" ^ commas (map (Syntax.string_of_term ctxt) ts) ^ "}.";
-
-fun str_of_term_entry ctxt (tm, value) =
- "val " ^ Syntax.string_of_term ctxt tm ^ " := " ^ Syntax.string_of_term ctxt value ^ ".";
-
-fun str_of_isa_model ctxt
- {type_model, free_model, pat_complete_model, pat_incomplete_model, skolem_model} =
- map (str_of_typ_entry ctxt) type_model @ "" ::
- map (str_of_term_entry ctxt) free_model @ "" ::
- map (str_of_term_entry ctxt) pat_complete_model @ "" ::
- map (str_of_term_entry ctxt) pat_incomplete_model @ "" ::
- map (str_of_term_entry ctxt) skolem_model
- |> cat_lines;
-
-fun typ_of_nun ctxt =
- let
- fun typ_of (NType (id, tys)) =
- let val Ts = map typ_of tys in
- if id = nun_dummy then
- dummyT
- else if id = nun_prop then
- @{typ bool}
- else if id = nun_arrow then
- Type (@{type_name fun}, Ts)
- else
- (case try str_of_nun_tconst id of
- SOME (args, s) =>
- let val tys' = map ty_of_lowlevel_str args in
- Type (s, map typ_of (tys' @ tys))
- end
- | NONE =>
- (case try str_of_nun_tfree id of
- SOME s => TFree (Proof_Context.check_tfree ctxt (flip_quote s, dummyS))
- | NONE => raise Fail ("unknown type constructor: " ^ quote (str_of_ident id))))
- end;
- in
- typ_of
- end;
-
-fun one_letter_of s =
- let val c = String.sub (Long_Name.base_name s, 0) in
- String.str (if Char.isAlpha c then c else #"x")
- end;
-
-fun base_of_typ (Type (s, _)) = s
- | base_of_typ (TFree (s, _)) = flip_quote s
- | base_of_typ (TVar ((s, _), _)) = flip_quote s;
-
-fun term_of_nun ctxt atomss =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- val typ_of = typ_of_nun ctxt;
-
- fun nth_atom T j =
- let val ss = these (triple_lookup (typ_match thy) atomss T) in
- if j >= 0 andalso j < length ss then nth ss j
- else one_letter_of (base_of_typ T) ^ nat_subscript (j + 1)
- end;
-
- fun term_of _ (NAtom (j, ty)) =
- let val T = typ_of ty in Var ((nth_atom T j, 0), T) end
- | term_of bounds (NConst (id, tys0, ty)) =
- if id = nun_conj then
- HOLogic.conj
- else if id = nun_disj then
- HOLogic.disj
- else if id = nun_choice then
- Const (@{const_name Eps}, typ_of ty)
- else if id = nun_equals then
- Const (@{const_name HOL.eq}, typ_of ty)
- else if id = nun_false then
- @{const False}
- else if id = nun_if then
- Const (@{const_name If}, typ_of ty)
- else if id = nun_implies then
- @{term implies}
- else if id = nun_unique then
- Const (@{const_name The}, typ_of ty)
- else if id = nun_unique_unsafe then
- Const (@{const_name The_unsafe}, typ_of ty)
- else if id = nun_true then
- @{const True}
- else if String.isPrefix nun_anon_fun_prefix id then
- let val j = Int.fromString (unprefix nun_anon_fun_prefix id) |> the_default ~1 in
- Var ((anonymousN ^ nat_subscript (j + 1), 0), typ_of ty)
- end
- else if id = nun_irrelevant then
- (* FIXME: get bounds from Nunchaku *)
- list_comb (Var ((irrelevantN, 0), map (typ_of o safe_ty_of) bounds ---> typ_of ty),
- map Bound (length bounds - 1 downto 0))
- else if id = nun_unparsable then
- (* FIXME: get bounds from Nunchaku *)
- list_comb (Var ((unparsableN, 0), typ_of ty), map Bound (length bounds - 1 downto 0))
- else
- (case try str_of_nun_const id of
- SOME (args, s) =>
- let val tys = map ty_of_lowlevel_str args in
- Sign.mk_const thy (s, map typ_of (tys @ tys0))
- end
- | NONE =>
- (case try str_of_nun_free id of
- SOME s => Free (s, typ_of ty)
- | NONE =>
- (case try str_of_nun_var id of
- SOME s => Var ((s, 0), typ_of ty)
- | NONE =>
- (case find_index (fn bound => ident_of_const bound = id) bounds of
- ~1 => Var ((str_of_ident id, 0), typ_of ty) (* shouldn't happen? *)
- | j => Bound j))))
- | term_of bounds (NAbs (var, body)) =
- let val T = typ_of (safe_ty_of var) in
- Abs (one_letter_of (base_of_typ T), T, term_of (var :: bounds) body)
- end
- | term_of bounds (NApp (func, arg)) =
- let
- fun same () = term_of bounds func $ term_of bounds arg;
- in
- (case (func, arg) of
- (NConst (id, _, _), NAbs _) =>
- if id = nun_mu then
- let val Abs (s, T, body) = term_of bounds arg in
- Const (@{const_name The}, (T --> HOLogic.boolT) --> T)
- $ Abs (s, T, HOLogic.eq_const T $ Bound 0 $ body)
- end
- else
- same ()
- | _ => same ())
- end
- | term_of _ (NMatch _) = raise Fail "unexpected match";
- in
- term_of []
- end;
-
-fun isa_typ_entry_of_nun ctxt atomss (ty, atoms) =
- (typ_of_nun ctxt ty, map (term_of_nun ctxt atomss) atoms);
-
-fun isa_term_entry_of_nun ctxt atomss (tm, value) =
- (term_of_nun ctxt atomss tm, term_of_nun ctxt atomss value);
-
-fun isa_model_of_nun ctxt pat_completes atomss {type_model, const_model, skolem_model} =
- let
- val free_and_const_model = map (isa_term_entry_of_nun ctxt atomss) const_model;
- val (free_model, (pat_complete_model, pat_incomplete_model)) =
- List.partition (is_Free o fst) free_and_const_model
- ||> List.partition (member (op aconv) pat_completes o fst);
- in
- {type_model = map (isa_typ_entry_of_nun ctxt atomss) type_model, free_model = free_model,
- pat_complete_model = pat_complete_model, pat_incomplete_model = pat_incomplete_model,
- skolem_model = map (isa_term_entry_of_nun ctxt atomss) skolem_model}
- end;
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_tool.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,141 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_tool.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Interface to the external "nunchaku" tool.
-*)
-
-signature NUNCHAKU_TOOL =
-sig
- type ty = Nunchaku_Problem.ty
- type tm = Nunchaku_Problem.tm
- type nun_problem = Nunchaku_Problem.nun_problem
-
- type tool_params =
- {solvers: string list,
- overlord: bool,
- debug: bool,
- specialize: bool,
- timeout: Time.time}
-
- type nun_solution =
- {tys: (ty * tm list) list,
- tms: (tm * tm) list}
-
- datatype nun_outcome =
- Unsat
- | Sat of string * nun_solution
- | Unknown of (string * nun_solution) option
- | Timeout
- | Nunchaku_Var_Not_Set
- | Nunchaku_Cannot_Execute
- | Nunchaku_Not_Found
- | CVC4_Cannot_Execute
- | CVC4_Not_Found
- | Unknown_Error of int * string
-
- val nunchaku_home_env_var: string
-
- val solve_nun_problem: tool_params -> nun_problem -> nun_outcome
-end;
-
-structure Nunchaku_Tool : NUNCHAKU_TOOL =
-struct
-
-open Nunchaku_Util;
-open Nunchaku_Problem;
-
-type tool_params =
- {solvers: string list,
- overlord: bool,
- debug: bool,
- specialize: bool,
- timeout: Time.time};
-
-type nun_solution =
- {tys: (ty * tm list) list,
- tms: (tm * tm) list};
-
-datatype nun_outcome =
- Unsat
-| Sat of string * nun_solution
-| Unknown of (string * nun_solution) option
-| Timeout
-| Nunchaku_Var_Not_Set
-| Nunchaku_Cannot_Execute
-| Nunchaku_Not_Found
-| CVC4_Cannot_Execute
-| CVC4_Not_Found
-| Unknown_Error of int * string;
-
-fun bash_output_error s =
- let val {out, err, rc, ...} = Bash.process s in
- ((out, err), rc)
- end;
-
-val nunchaku_home_env_var = "NUNCHAKU_HOME";
-
-val cached_outcome = Synchronized.var "Nunchaku_Tool.cached_outcome"
- (NONE : ((string list * nun_problem) * nun_outcome) option);
-
-fun uncached_solve_nun_problem ({solvers, overlord, specialize, timeout, ...} : tool_params)
- (problem as {sound, complete, ...}) =
- with_tmp_or_overlord_file overlord "nunchaku" "nun" (fn prob_path =>
- if getenv nunchaku_home_env_var = "" then
- Nunchaku_Var_Not_Set
- else
- let
- val bash_cmd =
- "PATH=\"$CVC4_HOME:$KODKODI/bin:$PATH\" \"$" ^
- nunchaku_home_env_var ^ "\"/nunchaku --skolems-in-model --no-color " ^
- (if specialize then "" else "--no-specialize ") ^
- "--solvers \"" ^ Bash_Syntax.string (space_implode " " solvers) ^ "\" " ^
- "--timeout " ^ string_of_int (Time.toSeconds timeout) ^ " " ^
- File.bash_path prob_path;
- val comments =
- [bash_cmd, "This file was generated by Isabelle (most likely Nunchaku)", timestamp ()];
- val prob_str = cat_lines (map (prefix "# ") comments) ^ "\n\n" ^ str_of_nun_problem problem;
- val _ = File.write prob_path prob_str;
- val ((output, error), code) = bash_output_error bash_cmd;
- in
- if String.isPrefix "SAT" output then
- (if sound then Sat else Unknown o SOME) (output, {tys = [], tms = []})
- else if String.isPrefix "UNSAT" output then
- if complete then Unsat else Unknown NONE
- else if String.isSubstring "TIMEOUT" output
- (* FIXME: temporary *)
- orelse String.isSubstring "kodkod failed (errcode 152)" error then
- Timeout
- else if String.isPrefix "UNKNOWN" output then
- Unknown NONE
- else if code = 126 then
- Nunchaku_Cannot_Execute
- else if code = 127 then
- Nunchaku_Not_Found
- else
- Unknown_Error (code,
- simplify_spaces (elide_string 1000 (if error <> "" then error else output)))
- end);
-
-fun solve_nun_problem (params as {solvers, overlord, debug, ...}) problem =
- let val key = (solvers, problem) in
- (case (overlord orelse debug,
- AList.lookup (op =) (the_list (Synchronized.value cached_outcome)) key) of
- (false, SOME outcome) => outcome
- | _ =>
- let
- val outcome = uncached_solve_nun_problem params problem;
-
- fun update_cache () =
- Synchronized.change cached_outcome (K (SOME (key, outcome)));
- in
- (case outcome of
- Unsat => update_cache ()
- | Sat _ => update_cache ()
- | Unknown _ => update_cache ()
- | _ => ());
- outcome
- end)
- end;
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_translate.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,193 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_translate.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-Translation of Isabelle/HOL problems to Nunchaku.
-*)
-
-signature NUNCHAKU_TRANSLATE =
-sig
- type isa_problem = Nunchaku_Collect.isa_problem
- type ty = Nunchaku_Problem.ty
- type nun_problem = Nunchaku_Problem.nun_problem
-
- val flip_quote: string -> string
- val lowlevel_str_of_ty: ty -> string
-
- val nun_problem_of_isa: Proof.context -> isa_problem -> nun_problem
-end;
-
-structure Nunchaku_Translate : NUNCHAKU_TRANSLATE =
-struct
-
-open Nunchaku_Util;
-open Nunchaku_Collect;
-open Nunchaku_Problem;
-
-fun flip_quote s =
- (case try (unprefix "'") s of
- SOME s' => s'
- | NONE => prefix "'" s);
-
-fun lowlevel_str_of_ty (NType (id, tys)) =
- (if null tys then "" else encode_args (map lowlevel_str_of_ty tys)) ^ id;
-
-fun strip_nun_abs 0 tm = ([], tm)
- | strip_nun_abs n (NAbs (var, body)) =
- strip_nun_abs (n - 1) body
- |>> cons var;
-
-val strip_nun_comb =
- let
- fun strip args (NApp (func, arg)) = strip (arg :: args) func
- | strip args tm = (tm, args);
- in
- strip []
- end;
-
-fun ty_of_isa (Type (s, Ts)) =
- let val tys = map ty_of_isa Ts in
- (case s of
- @{type_name bool} => prop_ty
- | @{type_name fun} => NType (nun_arrow, tys)
- | _ =>
- let
- val args = map lowlevel_str_of_ty tys;
- val id = nun_tconst_of_str args s;
- in
- NType (id, [])
- end)
- end
- | ty_of_isa (TFree (s, _)) = NType (nun_tfree_of_str (flip_quote s), [])
- | ty_of_isa (TVar _) = raise Fail "unexpected TVar";
-
-fun gen_tm_of_isa in_prop ctxt t =
- let
- val thy = Proof_Context.theory_of ctxt;
-
- fun id_of_const (x as (s, _)) =
- let val args = map (lowlevel_str_of_ty o ty_of_isa) (Sign.const_typargs thy x) in
- nun_const_of_str args s
- end;
-
- fun tm_of_branch ctr_id var_count f_arg_tm =
- let val (vars, body) = strip_nun_abs var_count f_arg_tm in
- (ctr_id, vars, body)
- end;
-
- fun tm_of bounds (Const (x as (s, T))) =
- (case try (dest_co_datatype_case ctxt) x of
- SOME ctrs =>
- let
- val num_f_args = length ctrs;
- val min_args = num_f_args + 1;
- val var_counts = map (num_binder_types o snd) ctrs;
-
- val dummy_free = Free (Name.uu, T);
- val tm = tm_of bounds dummy_free;
- val tm' = eta_expandN_tm min_args tm;
- val (vars, body) = strip_nun_abs min_args tm';
- val (_, (f_args, obj :: other_args)) = strip_nun_comb body ||> chop num_f_args;
- val f_args' = map2 eta_expandN_tm var_counts f_args;
-
- val ctr_ids = map id_of_const ctrs;
- in
- NMatch (obj, @{map 3} tm_of_branch ctr_ids var_counts f_args')
- |> rcomb_tms other_args
- |> abs_tms vars
- end
- | NONE =>
- if s = @{const_name unreachable} andalso in_prop then
- let val ty = ty_of_isa T in
- napps (NConst (nun_asserting, [ty], mk_arrows_ty ([ty, prop_ty], ty)),
- [NConst (id_of_const x, [], ty), NConst (nun_false, [], prop_ty)])
- end
- else
- let
- val id =
- (case s of
- @{const_name All} => nun_forall
- | @{const_name conj} => nun_conj
- | @{const_name disj} => nun_disj
- | @{const_name HOL.eq} => nun_equals
- | @{const_name Eps} => nun_choice
- | @{const_name Ex} => nun_exists
- | @{const_name False} => nun_false
- | @{const_name If} => nun_if
- | @{const_name implies} => nun_implies
- | @{const_name Not} => nun_not
- | @{const_name The} => nun_unique
- | @{const_name The_unsafe} => nun_unique_unsafe
- | @{const_name True} => nun_true
- | _ => id_of_const x);
- in
- NConst (id, [], ty_of_isa T)
- end)
- | tm_of _ (Free (s, T)) = NConst (nun_free_of_str s, [], ty_of_isa T)
- | tm_of _ (Var ((s, _), T)) = NConst (nun_var_of_str s, [], ty_of_isa T)
- | tm_of bounds (Abs (s, T, t)) =
- let
- val (s', bounds') = Name.variant s bounds;
- val x = Var ((s', 0), T);
- in
- NAbs (tm_of bounds' x, tm_of bounds' (subst_bound (x, t)))
- end
- | tm_of bounds (t $ u) = NApp (tm_of bounds t, tm_of bounds u)
- | tm_of _ (Bound _) = raise Fail "unexpected Bound";
- in
- t
- |> tm_of Name.context
- |> beta_reduce_tm
- |> eta_expand_builtin_tm
- end;
-
-val tm_of_isa = gen_tm_of_isa false;
-val prop_of_isa = gen_tm_of_isa true;
-
-fun nun_copy_spec_of_isa_typedef ctxt {abs_typ, rep_typ, wrt, abs, rep} =
- {abs_ty = ty_of_isa abs_typ, rep_ty = ty_of_isa rep_typ, subset = SOME (tm_of_isa ctxt wrt),
- quotient = NONE, abs = tm_of_isa ctxt abs, rep = tm_of_isa ctxt rep};
-
-fun nun_copy_spec_of_isa_quotient ctxt {abs_typ, rep_typ, wrt, abs, rep} =
- {abs_ty = ty_of_isa abs_typ, rep_ty = ty_of_isa rep_typ, subset = NONE,
- quotient = SOME (tm_of_isa ctxt wrt), abs = tm_of_isa ctxt abs, rep = tm_of_isa ctxt rep};
-
-fun nun_ctr_of_isa ctxt ctr =
- {ctr = tm_of_isa ctxt ctr, arg_tys = map ty_of_isa (binder_types (fastype_of ctr))};
-
-fun nun_co_data_spec_of_isa ctxt {typ, ctrs} =
- {ty = ty_of_isa typ, ctrs = map (nun_ctr_of_isa ctxt) ctrs};
-
-fun nun_const_spec_of_isa ctxt {const, props} =
- {const = tm_of_isa ctxt const, props = map (prop_of_isa ctxt) props};
-
-fun nun_rec_spec_of_isa ctxt {const, props, ...} =
- {const = tm_of_isa ctxt const, props = map (prop_of_isa ctxt) props};
-
-fun nun_consts_spec_of_isa ctxt {consts, props} =
- {consts = map (tm_of_isa ctxt) consts, props = map (prop_of_isa ctxt) props};
-
-fun nun_problem_of_isa ctxt {commandss, sound, complete} =
- let
- fun cmd_of cmd =
- (case cmd of
- ITVal (T, cards) => NTVal (ty_of_isa T, cards)
- | ITypedef spec => NCopy (nun_copy_spec_of_isa_typedef ctxt spec)
- | IQuotient spec => NCopy (nun_copy_spec_of_isa_quotient ctxt spec)
- | ICoData (fp, specs) =>
- BNF_Util.case_fp fp NData NCodata (map (nun_co_data_spec_of_isa ctxt) specs)
- | IVal t => NVal (tm_of_isa ctxt t, ty_of_isa (fastype_of t))
- | ICoPred (fp, wf, specs) =>
- (if wf then curry NPred true
- else if fp = BNF_Util.Least_FP then curry NPred false
- else NCopred) (map (nun_const_spec_of_isa ctxt) specs)
- | IRec specs => NRec (map (nun_rec_spec_of_isa ctxt) specs)
- | ISpec spec => NSpec (nun_consts_spec_of_isa ctxt spec)
- | IAxiom prop => NAxiom (prop_of_isa ctxt prop)
- | IGoal prop => NGoal (prop_of_isa ctxt prop)
- | IEval t => NEval (tm_of_isa ctxt t));
- in
- {commandss = map (map cmd_of) commandss, sound = sound, complete = complete}
- end;
-
-end;
--- a/src/HOL/Nunchaku/Tools/nunchaku_util.ML Thu Sep 07 23:13:15 2017 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,95 +0,0 @@
-(* Title: HOL/Nunchaku/Tools/nunchaku_util.ML
- Author: Jasmin Blanchette, Inria Nancy, LORIA, MPII
- Copyright 2015, 2016
-
-General-purpose functions used by Nunchaku.
-*)
-
-signature NUNCHAKU_UTIL =
-sig
- val elide_string: int -> string -> string
- val nat_subscript: int -> string
- val timestamp: unit -> string
- val parse_bool_option: bool -> string -> string -> bool option
- val parse_time: string -> string -> Time.time
- val string_of_time: Time.time -> string
- val simplify_spaces: string -> string
- val ascii_of: string -> string
- val unascii_of: string -> string
- val double_lookup: ('a * 'a -> bool) -> ('a option * 'b) list -> 'a -> 'b option
- val triple_lookup: (''a * ''a -> bool) -> (''a option * 'b) list -> ''a -> 'b option
- val plural_s_for_list: 'a list -> string
- val with_overlord_file: string -> string -> (Path.T -> 'a) -> 'a
- val with_tmp_or_overlord_file: bool -> string -> string -> (Path.T -> 'a) -> 'a
- val num_binder_types: typ -> int
- val strip_fun_type: typ -> typ list * typ
- val attach_typeS: term -> term
- val specialize_type: theory -> string * typ -> term -> term
- val typ_match: theory -> typ * typ -> bool
- val term_match: theory -> term * term -> bool
- val const_match: theory -> (string * typ) * (string * typ) -> bool
- val DETERM_TIMEOUT: Time.time -> tactic -> tactic
- val spying: bool -> (unit -> Proof.state * int * string) -> unit
-end;
-
-structure Nunchaku_Util : NUNCHAKU_UTIL =
-struct
-
-val elide_string = ATP_Util.elide_string;
-val nat_subscript = Nitpick_Util.nat_subscript;
-val timestamp = ATP_Util.timestamp;
-
-val parse_bool_option = Sledgehammer_Util.parse_bool_option;
-val parse_time = Sledgehammer_Util.parse_time;
-val string_of_time = ATP_Util.string_of_time;
-val simplify_spaces = Sledgehammer_Util.simplify_spaces;
-val ascii_of = ATP_Problem_Generate.ascii_of;
-val unascii_of = ATP_Problem_Generate.unascii_of;
-val double_lookup = Nitpick_Util.double_lookup;
-val triple_lookup = Nitpick_Util.triple_lookup;
-val plural_s_for_list = Nitpick_Util.plural_s_for_list;
-
-fun with_overlord_file name ext f =
- f (Path.explode ("$ISABELLE_HOME_USER/" ^ name ^ "." ^ ext));
-
-fun with_tmp_or_overlord_file overlord =
- if overlord then with_overlord_file else Isabelle_System.with_tmp_file;
-
-val num_binder_types = BNF_Util.num_binder_types
-val strip_fun_type = BNF_Util.strip_fun_type;
-
-(* Clone from "HOL/Tools/inductive_realizer.ML". *)
-val attach_typeS =
- map_types (map_atyps
- (fn TFree (s, []) => TFree (s, @{sort type})
- | TVar (ixn, []) => TVar (ixn, @{sort type})
- | T => T));
-
-val specialize_type = ATP_Util.specialize_type;
-
-fun typ_match thy TU = can (Sign.typ_match thy TU) Vartab.empty;
-fun term_match thy tu = can (Pattern.match thy tu) (Vartab.empty, Vartab.empty);
-fun const_match thy = term_match thy o apply2 Const;
-
-val DETERM_TIMEOUT = Nitpick_Util.DETERM_TIMEOUT;
-
-val spying_version = "a"
-
-val hackish_string_of_term = Sledgehammer_Util.hackish_string_of_term;
-
-fun spying spy f =
- if spy then
- let
- val (state, i, message) = f ();
- val ctxt = Proof.context_of state;
- val goal = Logic.get_goal (Thm.prop_of (#goal (Proof.goal state))) i;
- val hash =
- String.substring (SHA1.rep (SHA1.digest (hackish_string_of_term ctxt goal)), 0, 12);
- in
- File.append (Path.explode "$ISABELLE_HOME_USER/spy_nunchaku")
- (spying_version ^ " " ^ timestamp () ^ ": " ^ hash ^ ": " ^ message ^ "\n")
- end
- else
- ();
-
-end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,327 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+The core of the Nunchaku integration in Isabelle.
+*)
+
+signature NUNCHAKU =
+sig
+ type isa_model = Nunchaku_Reconstruct.isa_model
+
+ datatype mode = Auto_Try | Try | Normal
+
+ type mode_of_operation_params =
+ {solvers: string list,
+ falsify: bool,
+ assms: bool,
+ spy: bool,
+ overlord: bool,
+ expect: string}
+
+ type scope_of_search_params =
+ {wfs: ((string * typ) option * bool option) list,
+ whacks: (term option * bool) list,
+ cards: (typ option * (int option * int option)) list,
+ monos: (typ option * bool option) list}
+
+ type output_format_params =
+ {verbose: bool,
+ debug: bool,
+ max_potential: int,
+ max_genuine: int,
+ evals: term list,
+ atomss: (typ option * string list) list}
+
+ type optimization_params =
+ {specialize: bool,
+ multithread: bool}
+
+ type timeout_params =
+ {timeout: Time.time,
+ wf_timeout: Time.time}
+
+ type params =
+ {mode_of_operation_params: mode_of_operation_params,
+ scope_of_search_params: scope_of_search_params,
+ output_format_params: output_format_params,
+ optimization_params: optimization_params,
+ timeout_params: timeout_params}
+
+ val genuineN: string
+ val quasi_genuineN: string
+ val potentialN: string
+ val noneN: string
+ val unknownN: string
+ val no_nunchakuN: string
+
+ val run_chaku_on_prop: Proof.state -> params -> mode -> int -> term list -> term ->
+ string * isa_model option
+ val run_chaku_on_subgoal: Proof.state -> params -> mode -> int -> string * isa_model option
+end;
+
+structure Nunchaku : NUNCHAKU =
+struct
+
+open Nunchaku_Util;
+open Nunchaku_Collect;
+open Nunchaku_Problem;
+open Nunchaku_Translate;
+open Nunchaku_Model;
+open Nunchaku_Reconstruct;
+open Nunchaku_Display;
+open Nunchaku_Tool;
+
+datatype mode = Auto_Try | Try | Normal;
+
+type mode_of_operation_params =
+ {solvers: string list,
+ falsify: bool,
+ assms: bool,
+ spy: bool,
+ overlord: bool,
+ expect: string};
+
+type scope_of_search_params =
+ {wfs: ((string * typ) option * bool option) list,
+ whacks: (term option * bool) list,
+ cards: (typ option * (int option * int option)) list,
+ monos: (typ option * bool option) list};
+
+type output_format_params =
+ {verbose: bool,
+ debug: bool,
+ max_potential: int,
+ max_genuine: int,
+ evals: term list,
+ atomss: (typ option * string list) list};
+
+type optimization_params =
+ {specialize: bool,
+ multithread: bool};
+
+type timeout_params =
+ {timeout: Time.time,
+ wf_timeout: Time.time};
+
+type params =
+ {mode_of_operation_params: mode_of_operation_params,
+ scope_of_search_params: scope_of_search_params,
+ output_format_params: output_format_params,
+ optimization_params: optimization_params,
+ timeout_params: timeout_params};
+
+val genuineN = "genuine";
+val quasi_genuineN = "quasi_genuine";
+val potentialN = "potential";
+val noneN = "none";
+val unknownN = "unknown";
+
+val no_nunchakuN = "no_nunchaku";
+
+fun str_of_mode Auto_Try = "Auto_Try"
+ | str_of_mode Try = "Try"
+ | str_of_mode Normal = "Normal";
+
+fun none_true assigns = forall (curry (op <>) (SOME true) o snd) assigns;
+
+fun has_lonely_bool_var (@{const Pure.conjunction} $ (@{const Trueprop} $ Free _) $ _) = true
+ | has_lonely_bool_var _ = false;
+
+val syntactic_sorts =
+ @{sort "{default,zero,one,plus,minus,uminus,times,inverse,abs,sgn,ord,equal}"} @ @{sort numeral};
+
+fun has_tfree_syntactic_sort (TFree (_, S as _ :: _)) = subset (op =) (S, syntactic_sorts)
+ | has_tfree_syntactic_sort _ = false;
+
+val has_syntactic_sorts = exists_type (exists_subtype has_tfree_syntactic_sort);
+
+(* Give the soft timeout a chance. *)
+val timeout_slack = seconds 1.0;
+
+fun run_chaku_on_prop state
+ ({mode_of_operation_params = {solvers, falsify, assms, spy, overlord, expect},
+ scope_of_search_params = {wfs, whacks, cards, monos},
+ output_format_params = {verbose, debug, evals, atomss, ...},
+ optimization_params = {specialize, ...},
+ timeout_params = {timeout, wf_timeout}})
+ mode i all_assms subgoal =
+ let
+ val ctxt = Proof.context_of state;
+
+ val timer = Timer.startRealTimer ()
+
+ val print = writeln;
+ val print_n = if mode = Normal then writeln else K ();
+ fun print_v f = if verbose then writeln (f ()) else ();
+ fun print_d f = if debug then writeln (f ()) else ();
+
+ val das_wort_Model = if falsify then "Countermodel" else "Model";
+ val das_wort_model = if falsify then "countermodel" else "model";
+
+ val tool_params =
+ {solvers = solvers, overlord = overlord, debug = debug, specialize = specialize,
+ timeout = timeout};
+
+ fun run () =
+ let
+ val outcome as (outcome_code, _) =
+ let
+ val (poly_axioms, isa_problem as {sound, complete, ...}) =
+ isa_problem_of_subgoal ctxt falsify wfs whacks cards debug wf_timeout evals
+ (if assms then all_assms else []) subgoal;
+ val _ = print_d (fn () => "*** Isabelle problem ***\n" ^
+ str_of_isa_problem ctxt isa_problem);
+ val ugly_nun_problem = nun_problem_of_isa ctxt isa_problem;
+ val _ = print_d (fn () => "*** Ugly Nunchaku problem ***\n" ^
+ str_of_nun_problem ugly_nun_problem);
+ val (nice_nun_problem, pool) = nice_nun_problem ugly_nun_problem;
+ val _ = print_d (fn () => "*** Nice Nunchaku problem ***\n" ^
+ str_of_nun_problem nice_nun_problem);
+
+ fun print_any_hints () =
+ if has_lonely_bool_var subgoal then
+ print "Hint: Maybe you forgot a colon after the lemma's name?"
+ else if has_syntactic_sorts subgoal then
+ print "Hint: Maybe you forgot a type constraint?"
+ else
+ ();
+
+ fun get_isa_model_opt output =
+ let
+ val nice_nun_model = nun_model_of_str output;
+ val _ = print_d (fn () => "*** Nice Nunchaku model ***\n" ^
+ str_of_nun_model nice_nun_model);
+ val ugly_nun_model = ugly_nun_model pool nice_nun_model;
+ val _ = print_d (fn () => "*** Ugly Nunchaku model ***\n" ^
+ str_of_nun_model ugly_nun_model);
+
+ val pat_completes = pat_completes_of_isa_problem isa_problem;
+ val isa_model = isa_model_of_nun ctxt pat_completes atomss ugly_nun_model;
+ val _ = print_d (fn () => "*** Isabelle model ***\n" ^
+ str_of_isa_model ctxt isa_model);
+ in
+ isa_model
+ end;
+
+ fun isa_model_opt output =
+ if debug then SOME (get_isa_model_opt output) else try get_isa_model_opt output;
+
+ val model_str = isa_model_opt #> pretty_of_isa_model_opt ctxt #> Pretty.string_of;
+
+ fun unsat_means_theorem () =
+ null whacks andalso null cards andalso null monos;
+
+ fun unknown () =
+ (print_n ("No " ^ das_wort_model ^ " can be found\n\
+ \The problem lies outside Nunchaku's fragment, or the Nunchaku backends are not \
+ \installed properly");
+ (unknownN, NONE));
+
+ fun unsat_or_unknown complete =
+ if complete then
+ (print_n ("No " ^ das_wort_model ^ " exists" ^
+ (if falsify andalso unsat_means_theorem () then "\nThe goal is a theorem"
+ else ""));
+ (noneN, NONE))
+ else
+ unknown ();
+
+ fun sat_or_maybe_sat sound output =
+ let val header = if sound then das_wort_Model else "Potential " ^ das_wort_model in
+ (case (null poly_axioms, none_true wfs) of
+ (true, true) =>
+ (print (header ^ ":\n" ^
+ model_str output); print_any_hints ();
+ (genuineN, isa_model_opt output))
+ | (no_poly, no_wf) =>
+ let
+ val ignorings = []
+ |> not no_poly ? cons "polymorphic axioms"
+ |> not no_wf ? cons "unchecked well-foundedness";
+ in
+ (print (header ^ " (ignoring " ^ space_implode " and " ignorings ^ "):\n" ^
+ model_str output ^
+ (if no_poly then
+ ""
+ else
+ "\nIgnored axioms:\n" ^
+ cat_lines (map (prefix " " o Syntax.string_of_term ctxt) poly_axioms)));
+ print_any_hints ();
+ (quasi_genuineN, isa_model_opt output))
+ end)
+ end;
+ in
+ (case solve_nun_problem tool_params nice_nun_problem of
+ Unsat => unsat_or_unknown complete
+ | Sat (output, _) => sat_or_maybe_sat sound output
+ | Unknown NONE => unknown ()
+ | Unknown (SOME (output, _)) => sat_or_maybe_sat false output
+ | Timeout => (print_n "Time out"; (unknownN, NONE))
+ | Nunchaku_Var_Not_Set =>
+ (print_n ("Variable $" ^ nunchaku_home_env_var ^ " not set"); (unknownN, NONE))
+ | Nunchaku_Cannot_Execute =>
+ (print_n "External tool \"nunchaku\" cannot execute"; (unknownN, NONE))
+ | Nunchaku_Not_Found =>
+ (print_n "External tool \"nunchaku\" not found"; (unknownN, NONE))
+ | CVC4_Cannot_Execute =>
+ (print_n "External tool \"cvc4\" cannot execute"; (unknownN, NONE))
+ | CVC4_Not_Found => (print_n "External tool \"cvc4\" not found"; (unknownN, NONE))
+ | Unknown_Error (code, msg) =>
+ (print_n ("Unknown error: " ^ msg ^
+ (if code = 0 then "" else " (code " ^ string_of_int code ^ ")"));
+ (unknownN, NONE)))
+ end
+ handle
+ CYCLIC_DEPS () =>
+ (print_n "Cyclic dependencies (or bug in Nunchaku)"; (unknownN, NONE))
+ | TOO_DEEP_DEPS () =>
+ (print_n "Too deep dependencies (or bug in Nunchaku)"; (unknownN, NONE))
+ | TOO_META t =>
+ (print_n ("Formula too meta for Nunchaku:\n" ^ Syntax.string_of_term ctxt t);
+ (unknownN, NONE))
+ | UNEXPECTED_POLYMORPHISM t =>
+ (print_n ("Unexpected polymorphism in term\n" ^ Syntax.string_of_term ctxt t);
+ (unknownN, NONE))
+ | UNEXPECTED_VAR t =>
+ (print_n ("Unexpected schematic variables in term\n" ^ Syntax.string_of_term ctxt t);
+ (unknownN, NONE))
+ | UNSUPPORTED_FUNC t =>
+ (print_n ("Unsupported low-level constant in problem: " ^ Syntax.string_of_term ctxt t);
+ (unknownN, NONE));
+ in
+ if expect = "" orelse outcome_code = expect then outcome
+ else error ("Unexpected outcome: " ^ quote outcome_code)
+ end;
+
+ val _ = spying spy (fn () => (state, i, "starting " ^ str_of_mode mode ^ " mode"));
+
+ val outcome as (outcome_code, _) =
+ Timeout.apply (Time.+ (timeout, timeout_slack)) run ()
+ handle Timeout.TIMEOUT _ => (print_n "Time out"; (unknownN, NONE));
+
+ val _ = print_v (fn () => "Total time: " ^ string_of_time (Timer.checkRealTimer timer));
+
+ val _ = spying spy (fn () => (state, i, "outcome: " ^ outcome_code));
+ in
+ if expect = "" orelse outcome_code = expect then outcome
+ else error ("Unexpected outcome: " ^ quote outcome_code)
+ end;
+
+fun run_chaku_on_subgoal state params mode i =
+ let
+ val ctxt = Proof.context_of state;
+ val goal = Thm.prop_of (#goal (Proof.raw_goal state));
+ in
+ if Logic.count_prems goal = 0 then
+ (writeln "No subgoal!"; (noneN, NONE))
+ else
+ let
+ val subgoal = fst (Logic.goal_params goal i);
+ val all_assms = map Thm.term_of (Assumption.all_assms_of ctxt);
+ in
+ run_chaku_on_prop state params mode i all_assms subgoal
+ end
+ end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_collect.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,1119 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_collect.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Collecting of Isabelle/HOL definitions etc. for Nunchaku.
+*)
+
+signature NUNCHAKU_COLLECT =
+sig
+ val dest_co_datatype_case: Proof.context -> string * typ -> (string * typ) list
+
+ type isa_type_spec =
+ {abs_typ: typ,
+ rep_typ: typ,
+ wrt: term,
+ abs: term,
+ rep: term}
+
+ type isa_co_data_spec =
+ {typ: typ,
+ ctrs: term list}
+
+ type isa_const_spec =
+ {const: term,
+ props: term list}
+
+ type isa_rec_spec =
+ {const: term,
+ props: term list,
+ pat_complete: bool}
+
+ type isa_consts_spec =
+ {consts: term list,
+ props: term list}
+
+ datatype isa_command =
+ ITVal of typ * (int option * int option)
+ | ITypedef of isa_type_spec
+ | IQuotient of isa_type_spec
+ | ICoData of BNF_Util.fp_kind * isa_co_data_spec list
+ | IVal of term
+ | ICoPred of BNF_Util.fp_kind * bool * isa_const_spec list
+ | IRec of isa_rec_spec list
+ | ISpec of isa_consts_spec
+ | IAxiom of term
+ | IGoal of term
+ | IEval of term
+
+ type isa_problem =
+ {commandss: isa_command list list,
+ sound: bool,
+ complete: bool}
+
+ exception CYCLIC_DEPS of unit
+ exception TOO_DEEP_DEPS of unit
+ exception TOO_META of term
+ exception UNEXPECTED_POLYMORPHISM of term
+ exception UNEXPECTED_VAR of term
+ exception UNSUPPORTED_FUNC of term
+
+ val isa_problem_of_subgoal: Proof.context -> bool -> ((string * typ) option * bool option) list ->
+ (term option * bool) list -> (typ option * (int option * int option)) list -> bool ->
+ Time.time -> term list -> term list -> term -> term list * isa_problem
+ val pat_completes_of_isa_problem: isa_problem -> term list
+ val str_of_isa_problem: Proof.context -> isa_problem -> string
+end;
+
+structure Nunchaku_Collect : NUNCHAKU_COLLECT =
+struct
+
+open Nunchaku_Util;
+
+type isa_type_spec =
+ {abs_typ: typ,
+ rep_typ: typ,
+ wrt: term,
+ abs: term,
+ rep: term};
+
+type isa_co_data_spec =
+ {typ: typ,
+ ctrs: term list};
+
+type isa_const_spec =
+ {const: term,
+ props: term list};
+
+type isa_rec_spec =
+ {const: term,
+ props: term list,
+ pat_complete: bool};
+
+type isa_consts_spec =
+ {consts: term list,
+ props: term list};
+
+datatype isa_command =
+ ITVal of typ * (int option * int option)
+| ITypedef of isa_type_spec
+| IQuotient of isa_type_spec
+| ICoData of BNF_Util.fp_kind * isa_co_data_spec list
+| IVal of term
+| ICoPred of BNF_Util.fp_kind * bool * isa_const_spec list
+| IRec of isa_rec_spec list
+| ISpec of isa_consts_spec
+| IAxiom of term
+| IGoal of term
+| IEval of term;
+
+type isa_problem =
+ {commandss: isa_command list list,
+ sound: bool,
+ complete: bool};
+
+exception CYCLIC_DEPS of unit;
+exception TOO_DEEP_DEPS of unit;
+exception TOO_META of term;
+exception UNEXPECTED_POLYMORPHISM of term;
+exception UNEXPECTED_VAR of term;
+exception UNSUPPORTED_FUNC of term;
+
+fun str_of_and_list str_of_elem =
+ map str_of_elem #> space_implode ("\nand ");
+
+val key_of_typ =
+ let
+ fun key_of (Type (s, [])) = s
+ | key_of (Type (s, Ts)) = s ^ "(" ^ commas (map key_of Ts) ^ ")"
+ | key_of (TFree (s, _)) = s;
+ in
+ prefix "y" o key_of
+ end;
+
+fun key_of_const ctxt =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ fun key_of (Const (x as (s, _))) =
+ (case Sign.const_typargs thy x of
+ [] => s
+ | Ts => s ^ "(" ^ commas (map key_of_typ Ts) ^ ")")
+ | key_of (Free (s, _)) = s;
+ in
+ prefix "t" o key_of
+ end;
+
+val add_type_keys = fold_subtypes (insert (op =) o key_of_typ);
+
+fun add_aterm_keys ctxt t =
+ if is_Const t orelse is_Free t then insert (op =) (key_of_const ctxt t) else I;
+
+fun add_keys ctxt t =
+ fold_aterms (add_aterm_keys ctxt) t
+ #> fold_types add_type_keys t;
+
+fun close_form except t =
+ fold (fn ((s, i), T) => fn t' =>
+ HOLogic.all_const T $ Abs (s, T, abstract_over (Var ((s, i), T), t')))
+ (Term.add_vars t [] |> subtract (op =) except) t;
+
+(* "imp_conjL[symmetric]" is important for inductive predicates with multiple assumptions. *)
+val basic_defs =
+ @{thms Ball_def[abs_def] Bex_def[abs_def] case_bool_if Ex1_def[abs_def]
+ imp_conjL[symmetric, abs_def] Let_def[abs_def] rmember_def[symmetric, abs_def]};
+
+fun unfold_basic_def ctxt =
+ let val thy = Proof_Context.theory_of ctxt in
+ Pattern.rewrite_term thy (map (Logic.dest_equals o Thm.prop_of) basic_defs) []
+ end;
+
+val has_polymorphism = exists_type (exists_subtype is_TVar);
+
+fun whack_term thy whacks =
+ let
+ fun whk t =
+ if triple_lookup (term_match thy o swap) whacks t = SOME true then
+ Const (@{const_name unreachable}, fastype_of t)
+ else
+ (case t of
+ u $ v => whk u $ whk v
+ | Abs (s, T, u) => Abs (s, T, whk u)
+ | _ => t);
+ in
+ whk
+ end;
+
+fun preprocess_term_basic falsify ctxt whacks t =
+ let val thy = Proof_Context.theory_of ctxt in
+ if has_polymorphism t then
+ raise UNEXPECTED_POLYMORPHISM t
+ else
+ t
+ |> attach_typeS
+ |> whack_term thy whacks
+ |> Object_Logic.atomize_term ctxt
+ |> tap (fn t' => fastype_of t' <> @{typ prop} orelse raise TOO_META t)
+ |> falsify ? HOLogic.mk_not
+ |> unfold_basic_def ctxt
+ end;
+
+val check_closed = tap (fn t => null (Term.add_vars t []) orelse raise UNEXPECTED_VAR t);
+
+val preprocess_prop = close_form [] oooo preprocess_term_basic;
+val preprocess_closed_term = check_closed ooo preprocess_term_basic false;
+
+val is_type_builtin = member (op =) [@{type_name bool}, @{type_name fun}];
+
+val is_const_builtin =
+ member (op =) [@{const_name All}, @{const_name conj}, @{const_name disj}, @{const_name Eps},
+ @{const_name HOL.eq}, @{const_name Ex}, @{const_name False}, @{const_name If},
+ @{const_name implies}, @{const_name Not}, @{const_name The}, @{const_name The_unsafe},
+ @{const_name True}];
+
+datatype type_classification = Builtin | TVal | Typedef | Quotient | Co_Datatype;
+
+fun classify_type_name ctxt T_name =
+ if is_type_builtin T_name then
+ Builtin
+ else if T_name = @{type_name itself} then
+ Co_Datatype
+ else
+ (case BNF_FP_Def_Sugar.fp_sugar_of ctxt T_name of
+ SOME _ => Co_Datatype
+ | NONE =>
+ (case Ctr_Sugar.ctr_sugar_of ctxt T_name of
+ SOME _ => Co_Datatype
+ | NONE =>
+ (case Quotient_Info.lookup_quotients ctxt T_name of
+ SOME _ => Quotient
+ | NONE =>
+ if T_name = @{type_name set} then
+ Typedef
+ else
+ (case Typedef.get_info ctxt T_name of
+ _ :: _ => Typedef
+ | [] => TVal))));
+
+fun fp_kind_of_ctr_sugar_kind Ctr_Sugar.Codatatype = BNF_Util.Greatest_FP
+ | fp_kind_of_ctr_sugar_kind _ = BNF_Util.Least_FP;
+
+fun mutual_co_datatypes_of ctxt (T_name, Ts) =
+ (if T_name = @{type_name itself} then
+ (BNF_Util.Least_FP, [@{typ "'a itself"}], [[@{const Pure.type ('a)}]])
+ else
+ let
+ val (fp, ctr_sugars) =
+ (case BNF_FP_Def_Sugar.fp_sugar_of ctxt T_name of
+ SOME (fp_sugar as {fp, fp_res = {Ts, ...}, ...}) =>
+ (fp,
+ (case Ts of
+ [_] => [fp_sugar]
+ | _ => map (the o BNF_FP_Def_Sugar.fp_sugar_of ctxt o fst o dest_Type) Ts)
+ |> map (#ctr_sugar o #fp_ctr_sugar))
+ | NONE =>
+ (case Ctr_Sugar.ctr_sugar_of ctxt T_name of
+ SOME (ctr_sugar as {kind, ...}) =>
+ (* Any freely constructed type that is not a codatatype is considered a datatype. This
+ is sound (but incomplete) for model finding. *)
+ (fp_kind_of_ctr_sugar_kind kind, [ctr_sugar])));
+ in
+ (fp, map #T ctr_sugars, map #ctrs ctr_sugars)
+ end)
+ |> @{apply 3(2)} (map ((fn Type (s, _) => Type (s, Ts))))
+ |> @{apply 3(3)} (map (map (Ctr_Sugar.mk_ctr Ts)));
+
+fun typedef_of ctxt whacks T_name =
+ if T_name = @{type_name set} then
+ let
+ val A = Logic.varifyT_global @{typ 'a};
+ val absT = Type (@{type_name set}, [A]);
+ val repT = A --> HOLogic.boolT;
+ val pred = K (Abs (Name.uu, repT, @{const True}));
+ val abs = Const (@{const_name Collect}, repT --> absT);
+ val rep = Const (@{const_name rmember}, absT --> repT);
+ in
+ (absT, repT, pred, abs, rep)
+ end
+ else
+ (case Typedef.get_info ctxt T_name of
+ (* When several entries are returned, it shouldn't matter much which one we take (according to
+ Florian Haftmann). The "Logic.varifyT_global" calls are a workaround because these types'
+ variables sometimes clash with locally fixed type variables. *)
+ ({abs_type, rep_type, Abs_name, Rep_name, ...}, {Rep, ...}) :: _ =>
+ let
+ val absT = Logic.varifyT_global abs_type;
+ val repT = Logic.varifyT_global rep_type;
+ val set0 = Thm.prop_of Rep
+ |> HOLogic.dest_Trueprop
+ |> HOLogic.dest_mem
+ |> snd;
+ val pred0 = Abs (Name.uu, repT, HOLogic.mk_mem (Bound 0, set0));
+ fun pred () = preprocess_prop false ctxt whacks pred0;
+ val abs = Const (Abs_name, repT --> absT);
+ val rep = Const (Rep_name, absT --> repT);
+ in
+ (absT, repT, pred, abs, rep)
+ end);
+
+fun quotient_of ctxt whacks T_name =
+ (case Quotient_Info.lookup_quotients ctxt T_name of
+ SOME {equiv_rel = equiv_rel0, qtyp, rtyp, quot_thm, ...} =>
+ let
+ val _ $ (_ $ _ $ abs $ rep) = Thm.prop_of quot_thm;
+ fun equiv_rel () = preprocess_prop false ctxt whacks equiv_rel0;
+ in
+ (qtyp, rtyp, equiv_rel, abs, rep)
+ end);
+
+fun is_co_datatype_ctr ctxt (s, T) =
+ (case body_type T of
+ Type (fpT_name, Ts) =>
+ classify_type_name ctxt fpT_name = Co_Datatype andalso
+ let
+ val ctrs =
+ if fpT_name = @{type_name itself} then
+ [Const (@{const_name Pure.type}, @{typ "'a itself"})]
+ else
+ (case BNF_FP_Def_Sugar.fp_sugar_of ctxt fpT_name of
+ SOME {fp_ctr_sugar = {ctr_sugar = {ctrs, ...}, ...}, ...} => ctrs
+ | NONE =>
+ (case Ctr_Sugar.ctr_sugar_of ctxt fpT_name of
+ SOME {ctrs, ...} => ctrs
+ | _ => []));
+
+ fun is_right_ctr (t' as Const (s', _)) =
+ s = s' andalso fastype_of (Ctr_Sugar.mk_ctr Ts t') = T;
+ in
+ exists is_right_ctr ctrs
+ end
+ | _ => false);
+
+fun dest_co_datatype_case ctxt (s, T) =
+ let val thy = Proof_Context.theory_of ctxt in
+ (case strip_fun_type (Sign.the_const_type thy s) of
+ (gen_branch_Ts, gen_body_fun_T) =>
+ (case gen_body_fun_T of
+ Type (@{type_name fun}, [Type (fpT_name, _), _]) =>
+ if classify_type_name ctxt fpT_name = Co_Datatype then
+ let
+ val Type (_, fpTs) = domain_type (funpow (length gen_branch_Ts) range_type T);
+ val (ctrs0, Const (case_name, _)) =
+ (case BNF_FP_Def_Sugar.fp_sugar_of ctxt fpT_name of
+ SOME {fp_ctr_sugar = {ctr_sugar = {ctrs, casex, ...}, ...}, ...} => (ctrs, casex)
+ | NONE =>
+ (case Ctr_Sugar.ctr_sugar_of ctxt fpT_name of
+ SOME {ctrs, casex, ...} => (ctrs, casex)));
+ in
+ if s = case_name then map (dest_Const o Ctr_Sugar.mk_ctr fpTs) ctrs0
+ else raise Fail "non-case"
+ end
+ else
+ raise Fail "non-case"))
+ end;
+
+val is_co_datatype_case = can o dest_co_datatype_case;
+
+fun is_quotient_abs ctxt whacks (s, T) =
+ (case T of
+ Type (@{type_name fun}, [_, Type (absT_name, _)]) =>
+ classify_type_name ctxt absT_name = Quotient andalso
+ (case quotient_of ctxt whacks absT_name of
+ (_, _, _, Const (s', _), _) => s' = s)
+ | _ => false);
+
+fun is_quotient_rep ctxt whacks (s, T) =
+ (case T of
+ Type (@{type_name fun}, [Type (absT_name, _), _]) =>
+ classify_type_name ctxt absT_name = Quotient andalso
+ (case quotient_of ctxt whacks absT_name of
+ (_, _, _, _, Const (s', _)) => s' = s)
+ | _ => false);
+
+fun is_maybe_typedef_abs ctxt whacks absT_name s =
+ if absT_name = @{type_name set} then
+ s = @{const_name Collect}
+ else
+ (case try (typedef_of ctxt whacks) absT_name of
+ SOME (_, _, _, Const (s', _), _) => s' = s
+ | NONE => false);
+
+fun is_maybe_typedef_rep ctxt whacks absT_name s =
+ if absT_name = @{type_name set} then
+ s = @{const_name rmember}
+ else
+ (case try (typedef_of ctxt whacks) absT_name of
+ SOME (_, _, _, _, Const (s', _)) => s' = s
+ | NONE => false);
+
+fun is_typedef_abs ctxt whacks (s, T) =
+ (case T of
+ Type (@{type_name fun}, [_, Type (absT_name, _)]) =>
+ classify_type_name ctxt absT_name = Typedef andalso
+ is_maybe_typedef_abs ctxt whacks absT_name s
+ | _ => false);
+
+fun is_typedef_rep ctxt whacks (s, T) =
+ (case T of
+ Type (@{type_name fun}, [Type (absT_name, _), _]) =>
+ classify_type_name ctxt absT_name = Typedef andalso
+ is_maybe_typedef_rep ctxt whacks absT_name s
+ | _ => false);
+
+fun is_stale_typedef_abs ctxt whacks (s, T) =
+ (case T of
+ Type (@{type_name fun}, [_, Type (absT_name, _)]) =>
+ classify_type_name ctxt absT_name <> Typedef andalso
+ is_maybe_typedef_abs ctxt whacks absT_name s
+ | _ => false);
+
+fun is_stale_typedef_rep ctxt whacks (s, T) =
+ (case T of
+ Type (@{type_name fun}, [Type (absT_name, _), _]) =>
+ classify_type_name ctxt absT_name <> Typedef andalso
+ is_maybe_typedef_rep ctxt whacks absT_name s
+ | _ => false);
+
+fun instantiate_constant_types_in_term ctxt csts target =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ fun try_const _ _ (res as SOME _) = res
+ | try_const (s', T') cst NONE =
+ (case cst of
+ Const (s, T) =>
+ if s = s' then
+ SOME (Sign.typ_match thy (T', T) Vartab.empty)
+ handle Type.TYPE_MATCH => NONE
+ else
+ NONE
+ | _ => NONE);
+
+ fun subst_for (Const x) = fold (try_const x) csts NONE
+ | subst_for (t as Free _) = if member (op aconv) csts t then SOME Vartab.empty else NONE
+ | subst_for (t1 $ t2) = (case subst_for t1 of SOME subst => SOME subst | NONE => subst_for t2)
+ | subst_for (Abs (_, _, t')) = subst_for t'
+ | subst_for _ = NONE;
+ in
+ (case subst_for target of
+ SOME subst => Envir.subst_term_types subst target
+ | NONE => raise Type.TYPE_MATCH)
+ end;
+
+datatype card = One | Fin | Fin_or_Inf | Inf
+
+(* Similar to "ATP_Util.tiny_card_of_type". *)
+fun card_of_type ctxt =
+ let
+ fun max_card Inf _ = Inf
+ | max_card _ Inf = Inf
+ | max_card Fin_or_Inf _ = Fin_or_Inf
+ | max_card _ Fin_or_Inf = Fin_or_Inf
+ | max_card Fin _ = Fin
+ | max_card _ Fin = Fin
+ | max_card One One = One;
+
+ fun card_of avoid T =
+ if member (op =) avoid T then
+ Inf
+ else
+ (case T of
+ TFree _ => Fin_or_Inf
+ | TVar _ => Inf
+ | Type (@{type_name fun}, [T1, T2]) =>
+ (case (card_of avoid T1, card_of avoid T2) of
+ (_, One) => One
+ | (k1, k2) => max_card k1 k2)
+ | Type (@{type_name prod}, [T1, T2]) =>
+ (case (card_of avoid T1, card_of avoid T2) of
+ (k1, k2) => max_card k1 k2)
+ | Type (@{type_name set}, [T']) => card_of avoid (T' --> HOLogic.boolT)
+ | Type (T_name, Ts) =>
+ (case try (mutual_co_datatypes_of ctxt) (T_name, Ts) of
+ NONE => Inf
+ | SOME (_, fpTs, ctrss) =>
+ (case ctrss of [[_]] => One | _ => Fin)
+ |> fold (fold (fold (max_card o card_of (fpTs @ avoid)) o binder_types o fastype_of))
+ ctrss));
+ in
+ card_of []
+ end;
+
+fun int_of_classif Spec_Rules.Equational = 1
+ | int_of_classif Spec_Rules.Inductive = 2
+ | int_of_classif Spec_Rules.Co_Inductive = 3
+ | int_of_classif Spec_Rules.Unknown = 4;
+
+val classif_ord = int_ord o apply2 int_of_classif;
+
+fun spec_rules_of ctxt (x as (s, T)) =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ fun subst_of t0 =
+ try (Sign.typ_match thy (fastype_of t0, T)) Vartab.empty;
+
+ fun process_spec _ (res as SOME _) = res
+ | process_spec (classif, (ts0, ths as _ :: _)) NONE =
+ (case get_first subst_of ts0 of
+ SOME subst =>
+ (let
+ val ts = map (Envir.subst_term_types subst) ts0;
+ val poly_props = map Thm.prop_of ths;
+ val props = map (instantiate_constant_types_in_term ctxt ts) poly_props;
+ in
+ if exists (exists (exists_type (exists_subtype is_TVar))) [ts, props] then NONE
+ else SOME (classif, ts, props, poly_props)
+ end
+ handle Type.TYPE_MATCH => NONE)
+ | NONE => NONE)
+ | process_spec _ NONE = NONE;
+
+ fun spec_rules () =
+ Spec_Rules.retrieve ctxt (Const x)
+ |> sort (classif_ord o apply2 fst);
+
+ val specs =
+ if s = @{const_name The} then
+ [(Spec_Rules.Unknown, ([Logic.varify_global @{term The}], [@{thm theI_unique}]))]
+ else if s = @{const_name finite} then
+ let val card = card_of_type ctxt T in
+ if card = Inf orelse card = Fin_or_Inf then
+ spec_rules ()
+ else
+ [(Spec_Rules.Equational, ([Logic.varify_global @{term finite}],
+ [Skip_Proof.make_thm thy (Logic.varify_global @{prop "finite A = True"})]))]
+ end
+ else
+ spec_rules ();
+ in
+ fold process_spec specs NONE
+ end;
+
+fun lhs_of_equation (Const (@{const_name Pure.eq}, _) $ t $ _) = t
+ | lhs_of_equation (@{const Trueprop} $ (Const (@{const_name HOL.eq}, _) $ t $ _)) = t;
+
+fun specialize_definition_type thy x def0 =
+ let
+ val def = specialize_type thy x def0;
+ val lhs = lhs_of_equation def;
+ in
+ if exists_Const (curry (op =) x) lhs then def else raise Fail "cannot specialize"
+ end;
+
+fun definition_of thy (x as (s, _)) =
+ Defs.specifications_of (Theory.defs_of thy) (Defs.Const, s)
+ |> map_filter #def
+ |> map_filter (try (specialize_definition_type thy x o Thm.prop_of o Thm.axiom thy))
+ |> try hd;
+
+fun is_builtin_theory thy_id =
+ Context.subthy_id (thy_id, Context.theory_id @{theory Hilbert_Choice});
+
+val orphan_axioms_of =
+ Spec_Rules.get
+ #> filter (curry (op =) Spec_Rules.Unknown o fst)
+ #> map snd
+ #> filter (null o fst)
+ #> maps snd
+ #> filter_out (is_builtin_theory o Thm.theory_id)
+ #> map Thm.prop_of;
+
+fun keys_of _ (ITVal (T, _)) = [key_of_typ T]
+ | keys_of _ (ITypedef {abs_typ, ...}) = [key_of_typ abs_typ]
+ | keys_of _ (IQuotient {abs_typ, ...}) = [key_of_typ abs_typ]
+ | keys_of _ (ICoData (_, specs)) = map (key_of_typ o #typ) specs
+ | keys_of ctxt (IVal const) = [key_of_const ctxt const]
+ | keys_of ctxt (ICoPred (_, _, specs)) = map (key_of_const ctxt o #const) specs
+ | keys_of ctxt (IRec specs) = map (key_of_const ctxt o #const) specs
+ | keys_of ctxt (ISpec {consts, ...}) = map (key_of_const ctxt) consts
+ | keys_of _ (IAxiom _) = []
+ | keys_of _ (IGoal _) = []
+ | keys_of _ (IEval _) = [];
+
+fun co_data_spec_deps_of ctxt ({ctrs, ...} : isa_co_data_spec) =
+ fold (add_keys ctxt) ctrs [];
+fun const_spec_deps_of ctxt consts props =
+ fold (add_keys ctxt) props [] |> subtract (op =) (map (key_of_const ctxt) consts);
+fun consts_spec_deps_of ctxt {consts, props} =
+ fold (add_keys ctxt) props [] |> subtract (op =) (map (key_of_const ctxt) consts);
+
+fun deps_of _ (ITVal _) = []
+ | deps_of ctxt (ITypedef {wrt, ...}) = add_keys ctxt wrt []
+ | deps_of ctxt (IQuotient {wrt, ...}) = add_keys ctxt wrt []
+ | deps_of ctxt (ICoData (_, specs)) = maps (co_data_spec_deps_of ctxt) specs
+ | deps_of _ (IVal const) = add_type_keys (fastype_of const) []
+ | deps_of ctxt (ICoPred (_, _, specs)) =
+ maps (const_spec_deps_of ctxt (map #const specs) o #props) specs
+ | deps_of ctxt (IRec specs) = maps (const_spec_deps_of ctxt (map #const specs) o #props) specs
+ | deps_of ctxt (ISpec spec) = consts_spec_deps_of ctxt spec
+ | deps_of ctxt (IAxiom prop) = add_keys ctxt prop []
+ | deps_of ctxt (IGoal prop) = add_keys ctxt prop []
+ | deps_of ctxt (IEval t) = add_keys ctxt t [];
+
+fun consts_of_rec_or_spec (IRec specs) = map #const specs
+ | consts_of_rec_or_spec (ISpec {consts, ...}) = consts;
+
+fun props_of_rec_or_spec (IRec specs) = maps #props specs
+ | props_of_rec_or_spec (ISpec {props, ...}) = props;
+
+fun merge_two_rec_or_spec cmd cmd' =
+ ISpec {consts = consts_of_rec_or_spec cmd @ consts_of_rec_or_spec cmd',
+ props = props_of_rec_or_spec cmd @ props_of_rec_or_spec cmd'};
+
+fun merge_two (ICoData (fp, specs)) (ICoData (fp', specs'), complete) =
+ (ICoData (BNF_Util.case_fp fp fp fp', specs @ specs'), complete andalso fp = fp')
+ | merge_two (IRec specs) (IRec specs', complete) = (IRec (specs @ specs'), complete)
+ | merge_two (cmd as IRec _) (cmd' as ISpec _, complete) =
+ (merge_two_rec_or_spec cmd cmd', complete)
+ | merge_two (cmd as ISpec _) (cmd' as IRec _, complete) =
+ (merge_two_rec_or_spec cmd cmd', complete)
+ | merge_two (cmd as ISpec _) (cmd' as ISpec _, complete) =
+ (merge_two_rec_or_spec cmd cmd', complete)
+ | merge_two _ _ = raise CYCLIC_DEPS ();
+
+fun sort_isa_commands_topologically ctxt cmds =
+ let
+ fun normal_pairs [] = []
+ | normal_pairs (all as normal :: _) = map (rpair normal) all;
+
+ fun add_node [] _ = I
+ | add_node (normal :: _) cmd = Graph.new_node (normal, cmd);
+
+ fun merge_scc (cmd :: cmds) complete = fold merge_two cmds (cmd, complete);
+
+ fun sort_problem (cmds, complete) =
+ let
+ val keyss = map (keys_of ctxt) cmds;
+ val normal_keys = Symtab.make (maps normal_pairs keyss);
+ val normalize = Symtab.lookup normal_keys;
+
+ fun add_deps [] _ = I
+ | add_deps (normal :: _) cmd =
+ let
+ val deps = deps_of ctxt cmd
+ |> map_filter normalize
+ |> remove (op =) normal;
+ in
+ fold (fn dep => Graph.add_edge (dep, normal)) deps
+ end;
+
+ val cmd_of_key = the o AList.lookup (op =) (map hd keyss ~~ cmds);
+
+ val G = Graph.empty
+ |> fold2 add_node keyss cmds
+ |> fold2 add_deps keyss cmds;
+
+ val cmd_sccs = rev (Graph.strong_conn G)
+ |> map (map cmd_of_key);
+ in
+ if exists (can (fn _ :: _ :: _ => ())) cmd_sccs then
+ sort_problem (fold_map merge_scc cmd_sccs complete)
+ else
+ (Graph.schedule (K snd) G, complete)
+ end;
+
+ val typedecls = filter (can (fn ITVal _ => ())) cmds;
+ val (mixed, complete) =
+ (filter (can (fn ITypedef _ => () | IQuotient _ => () | ICoData _ => () | IVal _ => ()
+ | ICoPred _ => () | IRec _ => () | ISpec _ => ())) cmds, true)
+ |> sort_problem;
+ val axioms = filter (can (fn IAxiom _ => ())) cmds;
+ val goals = filter (can (fn IGoal _ => ())) cmds;
+ val evals = filter (can (fn IEval _ => ())) cmds;
+ in
+ (typedecls @ mixed @ axioms @ goals @ evals, complete)
+ end;
+
+fun group_of (ITVal _) = 1
+ | group_of (ITypedef _) = 2
+ | group_of (IQuotient _) = 3
+ | group_of (ICoData _) = 4
+ | group_of (IVal _) = 5
+ | group_of (ICoPred _) = 6
+ | group_of (IRec _) = 7
+ | group_of (ISpec _) = 8
+ | group_of (IAxiom _) = 9
+ | group_of (IGoal _) = 10
+ | group_of (IEval _) = 11;
+
+fun group_isa_commands [] = []
+ | group_isa_commands [cmd] = [[cmd]]
+ | group_isa_commands (cmd :: cmd' :: cmds) =
+ let val (group :: groups) = group_isa_commands (cmd' :: cmds) in
+ if group_of cmd = group_of cmd' then
+ (cmd :: group) :: groups
+ else
+ [cmd] :: (group :: groups)
+ end;
+
+fun defined_by (Const (@{const_name All}, _) $ t) = defined_by t
+ | defined_by (Abs (_, _, t)) = defined_by t
+ | defined_by (@{const implies} $ _ $ u) = defined_by u
+ | defined_by (Const (@{const_name HOL.eq}, _) $ t $ _) = head_of t
+ | defined_by t = head_of t;
+
+fun partition_props [_] props = SOME [props]
+ | partition_props consts props =
+ let
+ val propss = map (fn const => filter (fn prop => defined_by prop aconv const) props) consts;
+ in
+ if eq_set (op aconv) (props, flat propss) andalso forall (not o null) propss then SOME propss
+ else NONE
+ end;
+
+fun hol_concl_head (Const (@{const_name All}, _) $ Abs (_, _, t)) = hol_concl_head t
+ | hol_concl_head (Const (@{const_name implies}, _) $ _ $ t) = hol_concl_head t
+ | hol_concl_head (t $ _) = hol_concl_head t
+ | hol_concl_head t = t;
+
+fun is_inductive_set_intro t =
+ (case hol_concl_head t of
+ Const (@{const_name rmember}, _) => true
+ | _ => false);
+
+exception NO_TRIPLE of unit;
+
+fun triple_for_intro_rule ctxt x rule =
+ let
+ val (prems, concl) = Logic.strip_horn rule
+ |>> map (Object_Logic.atomize_term ctxt)
+ ||> Object_Logic.atomize_term ctxt;
+
+ val (mains, sides) = List.partition (exists_Const (curry (op =) x)) prems;
+
+ val is_right_head = curry (op aconv) (Const x) o head_of;
+ in
+ if forall is_right_head mains then (sides, mains, concl) else raise NO_TRIPLE ()
+ end;
+
+val tuple_for_args = HOLogic.mk_tuple o snd o strip_comb;
+
+fun wf_constraint_for rel sides concl mains =
+ HOLogic.mk_mem (HOLogic.mk_prod (apply2 tuple_for_args (mains, concl)), Var rel)
+ |> fold (curry HOLogic.mk_imp) sides
+ |> close_form [rel];
+
+fun wf_constraint_for_triple rel (sides, mains, concl) =
+ map (wf_constraint_for rel sides concl) mains
+ |> foldr1 HOLogic.mk_conj;
+
+fun terminates_by ctxt timeout goal tac =
+ can (SINGLE (Classical.safe_tac ctxt) #> the
+ #> SINGLE (DETERM_TIMEOUT timeout (tac ctxt (auto_tac ctxt))) #> the
+ #> Goal.finish ctxt) goal;
+
+val max_cached_wfs = 50;
+val cached_timeout = Synchronized.var "Nunchaku_Collect.cached_timeout" Time.zeroTime;
+val cached_wf_props = Synchronized.var "Nunchaku_Collect.cached_wf_props" ([] : (term * bool) list);
+
+val termination_tacs = [Lexicographic_Order.lex_order_tac true, ScnpReconstruct.sizechange_tac];
+
+fun is_wellfounded_inductive_predicate ctxt wfs debug wf_timeout const intros =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ val Const (x as (_, T)) = head_of (HOLogic.dest_Trueprop (Logic.strip_imp_concl (hd intros)));
+ in
+ (case triple_lookup (const_match thy o swap) wfs (dest_Const const) of
+ SOME (SOME wf) => wf
+ | _ =>
+ (case map (triple_for_intro_rule ctxt x) intros |> filter_out (null o #2) of
+ [] => true
+ | triples =>
+ let
+ val binders_T = HOLogic.mk_tupleT (binder_types T);
+ val rel_T = HOLogic.mk_setT (HOLogic.mk_prodT (binders_T, binders_T));
+ val j = fold (Integer.max o maxidx_of_term) intros 0 + 1;
+ val rel = (("R", j), rel_T);
+ val prop =
+ Const (@{const_name wf}, rel_T --> HOLogic.boolT) $ Var rel ::
+ map (wf_constraint_for_triple rel) triples
+ |> foldr1 HOLogic.mk_conj
+ |> HOLogic.mk_Trueprop;
+ in
+ if debug then writeln ("Wellfoundedness goal: " ^ Syntax.string_of_term ctxt prop)
+ else ();
+ if wf_timeout = Synchronized.value cached_timeout andalso
+ length (Synchronized.value cached_wf_props) < max_cached_wfs then
+ ()
+ else
+ (Synchronized.change cached_wf_props (K []);
+ Synchronized.change cached_timeout (K wf_timeout));
+ (case AList.lookup (op =) (Synchronized.value cached_wf_props) prop of
+ SOME wf => wf
+ | NONE =>
+ let
+ val goal = Goal.init (Thm.cterm_of ctxt prop);
+ val wf = exists (terminates_by ctxt wf_timeout goal) termination_tacs;
+ in
+ Synchronized.change cached_wf_props (cons (prop, wf)); wf
+ end)
+ end)
+ handle
+ List.Empty => false
+ | NO_TRIPLE () => false)
+ end;
+
+datatype lhs_pat =
+ Only_Vars
+| Prim_Pattern of string
+| Any_Pattern;
+
+fun is_likely_pat_complete ctxt props =
+ let
+ val is_Var_or_Bound = is_Var orf is_Bound;
+
+ fun lhs_pat_of t =
+ (case t of
+ Const (@{const_name All}, _) $ Abs (_, _, t) => lhs_pat_of t
+ | Const (@{const_name HOL.eq}, _) $ u $ _ =>
+ (case filter_out is_Var_or_Bound (snd (strip_comb u)) of
+ [] => Only_Vars
+ | [v] =>
+ (case strip_comb v of
+ (cst as Const (_, T), args) =>
+ (case body_type T of
+ Type (T_name, _) =>
+ if can (Ctr_Sugar.dest_ctr ctxt T_name) cst andalso forall is_Var_or_Bound args then
+ Prim_Pattern T_name
+ else
+ Any_Pattern
+ | _ => Any_Pattern)
+ | _ => Any_Pattern)
+ | _ => Any_Pattern)
+ | _ => Any_Pattern);
+ in
+ (case map lhs_pat_of props of
+ [] => false
+ | pats as Prim_Pattern T_name :: _ =>
+ forall (can (fn Prim_Pattern _ => ())) pats andalso
+ length pats = length (#ctrs (the (Ctr_Sugar.ctr_sugar_of ctxt T_name)))
+ | pats => forall (curry (op =) Only_Vars) pats)
+ end;
+
+(* Prevents divergence in case of cyclic or infinite axiom dependencies. *)
+val axioms_max_depth = 255
+
+fun isa_problem_of_subgoal ctxt falsify wfs whacks cards debug wf_timeout evals0 some_assms0
+ subgoal0 =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ fun card_of T =
+ (case triple_lookup (typ_match thy o swap) cards T of
+ NONE => (NONE, NONE)
+ | SOME (c1, c2) => (if c1 = SOME 1 then NONE else c1, c2));
+
+ fun axioms_of_class class =
+ #axioms (Axclass.get_info thy class)
+ handle ERROR _ => [];
+
+ fun monomorphize_class_axiom T t =
+ (case Term.add_tvars t [] of
+ [] => t
+ | [(x, S)] => Envir.subst_term_types (Vartab.make [(x, (S, T))]) t);
+
+ fun consider_sort depth T S (seens as (seenS, seenT, seen), problem) =
+ if member (op =) seenS S then
+ (seens, problem)
+ else if depth > axioms_max_depth then
+ raise TOO_DEEP_DEPS ()
+ else
+ let
+ val seenS = S :: seenS;
+ val seens = (seenS, seenT, seen);
+
+ val supers = Sign.complete_sort thy S;
+ val axioms0 = maps (map Thm.prop_of o axioms_of_class) supers;
+ val axioms = map (preprocess_prop false ctxt whacks o monomorphize_class_axiom T) axioms0;
+ in
+ (seens, map IAxiom axioms @ problem)
+ |> fold (consider_term (depth + 1)) axioms
+ end
+ and consider_type depth T =
+ (case T of
+ Type (s, Ts) =>
+ if is_type_builtin s then fold (consider_type depth) Ts
+ else consider_non_builtin_type depth T
+ | _ => consider_non_builtin_type depth T)
+ and consider_non_builtin_type depth T (seens as (seenS, seenT, seen), problem) =
+ if member (op =) seenT T then
+ (seens, problem)
+ else
+ let
+ val seenT = T :: seenT;
+ val seens = (seenS, seenT, seen);
+
+ fun consider_typedef_or_quotient itypedef_or_quotient tuple_of s =
+ let
+ val (T0, repT0, wrt0, abs0, rep0) = tuple_of ctxt whacks s;
+ val tyenv = Sign.typ_match thy (T0, T) Vartab.empty;
+ val substT = Envir.subst_type tyenv;
+ val subst = Envir.subst_term_types tyenv;
+ val repT = substT repT0;
+ val wrt = subst (wrt0 ());
+ val abs = subst abs0;
+ val rep = subst rep0;
+ in
+ apsnd (cons (itypedef_or_quotient {abs_typ = T, rep_typ = repT, wrt = wrt, abs = abs,
+ rep = rep}))
+ #> consider_term (depth + 1) wrt
+ end;
+ in
+ (seens, problem)
+ |> (case T of
+ TFree (_, S) =>
+ apsnd (cons (ITVal (T, card_of T)))
+ #> consider_sort depth T S
+ | TVar (_, S) => consider_sort depth T S
+ | Type (s, Ts) =>
+ fold (consider_type depth) Ts
+ #> (case classify_type_name ctxt s of
+ Co_Datatype =>
+ let
+ val (fp, fpTs, ctrss) = mutual_co_datatypes_of ctxt (s, Ts);
+ val specs = map2 (fn T => fn ctrs => {typ = T, ctrs = ctrs}) fpTs ctrss;
+ in
+ (fn ((seenS, seenT, seen), problem) =>
+ ((seenS, union (op =) fpTs seenT, seen), ICoData (fp, specs) :: problem))
+ #> fold (fold (consider_type (depth + 1) o fastype_of)) ctrss
+ end
+ | Typedef => consider_typedef_or_quotient ITypedef typedef_of s
+ | Quotient => consider_typedef_or_quotient IQuotient quotient_of s
+ | TVal => apsnd (cons (ITVal (T, card_of T)))))
+ end
+ and consider_term depth t =
+ (case t of
+ t1 $ t2 => fold (consider_term depth) [t1, t2]
+ | Var (_, T) => consider_type depth T
+ | Bound _ => I
+ | Abs (_, T, t') =>
+ consider_term depth t'
+ #> consider_type depth T
+ | _ => (fn (seens as (seenS, seenT, seen), problem) =>
+ if member (op aconv) seen t then
+ (seens, problem)
+ else if depth > axioms_max_depth then
+ raise TOO_DEEP_DEPS ()
+ else
+ let
+ val seen = t :: seen;
+ val seens = (seenS, seenT, seen);
+ in
+ (case t of
+ Const (x as (s, T)) =>
+ (if is_const_builtin s orelse is_co_datatype_ctr ctxt x orelse
+ is_co_datatype_case ctxt x orelse is_quotient_abs ctxt whacks x orelse
+ is_quotient_rep ctxt whacks x orelse is_typedef_abs ctxt whacks x orelse
+ is_typedef_rep ctxt whacks x then
+ (seens, problem)
+ else if is_stale_typedef_abs ctxt whacks x orelse
+ is_stale_typedef_rep ctxt whacks x then
+ raise UNSUPPORTED_FUNC t
+ else
+ (case spec_rules_of ctxt x of
+ SOME (classif, consts, props0, poly_props) =>
+ let
+ val props = map (preprocess_prop false ctxt whacks) props0;
+
+ fun def_or_spec () =
+ (case definition_of thy x of
+ SOME eq0 =>
+ let val eq = preprocess_prop false ctxt whacks eq0 in
+ ([eq], [IRec [{const = t, props = [eq], pat_complete = true}]])
+ end
+ | NONE => (props, [ISpec {consts = consts, props = props}]));
+
+ val (props', cmds) =
+ if null props then
+ ([], map IVal consts)
+ else if classif = Spec_Rules.Equational then
+ (case partition_props consts props of
+ SOME propss =>
+ (props,
+ [IRec (map2 (fn const => fn props =>
+ {const = const, props = props,
+ pat_complete = is_likely_pat_complete ctxt props})
+ consts propss)])
+ | NONE => def_or_spec ())
+ else if member (op =) [Spec_Rules.Inductive, Spec_Rules.Co_Inductive]
+ classif then
+ if is_inductive_set_intro (hd props) then
+ def_or_spec ()
+ else
+ (case partition_props consts props of
+ SOME propss =>
+ (props,
+ [ICoPred (if classif = Spec_Rules.Inductive then BNF_Util.Least_FP
+ else BNF_Util.Greatest_FP,
+ length consts = 1 andalso
+ is_wellfounded_inductive_predicate ctxt wfs debug wf_timeout
+ (the_single consts) poly_props,
+ map2 (fn const => fn props => {const = const, props = props})
+ consts propss)])
+ | NONE => def_or_spec ())
+ else
+ def_or_spec ();
+ in
+ ((seenS, seenT, union (op aconv) consts seen), cmds @ problem)
+ |> fold (consider_term (depth + 1)) props'
+ end
+ | NONE =>
+ (case definition_of thy x of
+ SOME eq0 =>
+ let val eq = preprocess_prop false ctxt whacks eq0 in
+ (seens, IRec [{const = t, props = [eq], pat_complete = true}] :: problem)
+ |> consider_term (depth + 1) eq
+ end
+ | NONE => (seens, IVal t :: problem))))
+ |> consider_type depth T
+ | Free (_, T) =>
+ (seens, IVal t :: problem)
+ |> consider_type depth T)
+ end));
+
+ val (poly_axioms, mono_axioms0) = orphan_axioms_of ctxt
+ |> List.partition has_polymorphism;
+
+ fun implicit_evals_of pol (@{const Not} $ t) = implicit_evals_of (not pol) t
+ | implicit_evals_of pol (@{const implies} $ t $ u) =
+ (case implicit_evals_of pol u of
+ [] => implicit_evals_of (not pol) t
+ | ts => ts)
+ | implicit_evals_of pol (@{const conj} $ t $ u) =
+ union (op aconv) (implicit_evals_of pol t) (implicit_evals_of pol u)
+ | implicit_evals_of pol (@{const disj} $ t $ u) =
+ union (op aconv) (implicit_evals_of pol t) (implicit_evals_of pol u)
+ | implicit_evals_of false (Const (@{const_name HOL.eq}, _) $ t $ u) =
+ distinct (op aconv) [t, u]
+ | implicit_evals_of true (Const (@{const_name HOL.eq}, _) $ t $ _) = [t]
+ | implicit_evals_of _ _ = [];
+
+ val mono_axioms_and_some_assms =
+ map (preprocess_prop false ctxt whacks) (mono_axioms0 @ some_assms0);
+ val subgoal = preprocess_prop falsify ctxt whacks subgoal0;
+ val implicit_evals = implicit_evals_of true subgoal;
+ val evals = map (preprocess_closed_term ctxt whacks) evals0;
+ val seens = ([], [], []);
+
+ val (commandss, complete) =
+ (seens,
+ map IAxiom mono_axioms_and_some_assms @ [IGoal subgoal] @ map IEval (implicit_evals @ evals))
+ |> fold (consider_term 0) (subgoal :: evals @ mono_axioms_and_some_assms)
+ |> snd
+ |> rev (* prettier *)
+ |> sort_isa_commands_topologically ctxt
+ |>> group_isa_commands;
+ in
+ (poly_axioms, {commandss = commandss, sound = true, complete = complete})
+ end;
+
+fun add_pat_complete_of_command cmd =
+ (case cmd of
+ ICoPred (_, _, specs) => union (op =) (map #const specs)
+ | IRec specs =>
+ union (op =) (map_filter (try (fn {const, pat_complete = true, ...} => const)) specs)
+ | _ => I);
+
+fun pat_completes_of_isa_problem {commandss, ...} =
+ fold (fold add_pat_complete_of_command) commandss [];
+
+fun str_of_isa_term_with_type ctxt t =
+ Syntax.string_of_term ctxt t ^ " : " ^ Syntax.string_of_typ ctxt (fastype_of t);
+
+fun is_triv_wrt (Abs (_, _, body)) = is_triv_wrt body
+ | is_triv_wrt @{const True} = true
+ | is_triv_wrt _ = false;
+
+fun str_of_isa_type_spec ctxt {abs_typ, rep_typ, wrt, abs, rep} =
+ Syntax.string_of_typ ctxt abs_typ ^ " := " ^ Syntax.string_of_typ ctxt rep_typ ^
+ (if is_triv_wrt wrt then "" else "\n wrt " ^ Syntax.string_of_term ctxt wrt) ^
+ "\n abstract " ^ Syntax.string_of_term ctxt abs ^
+ "\n concrete " ^ Syntax.string_of_term ctxt rep;
+
+fun str_of_isa_co_data_spec ctxt {typ, ctrs} =
+ Syntax.string_of_typ ctxt typ ^ " :=\n " ^
+ space_implode "\n| " (map (str_of_isa_term_with_type ctxt) ctrs);
+
+fun str_of_isa_const_spec ctxt {const, props} =
+ str_of_isa_term_with_type ctxt const ^ " :=\n " ^
+ space_implode ";\n " (map (Syntax.string_of_term ctxt) props);
+
+fun str_of_isa_rec_spec ctxt {const, props, pat_complete} =
+ str_of_isa_term_with_type ctxt const ^ (if pat_complete then " [pat_complete]" else "") ^
+ " :=\n " ^ space_implode ";\n " (map (Syntax.string_of_term ctxt) props);
+
+fun str_of_isa_consts_spec ctxt {consts, props} =
+ space_implode " and\n " (map (str_of_isa_term_with_type ctxt) consts) ^ " :=\n " ^
+ space_implode ";\n " (map (Syntax.string_of_term ctxt) props);
+
+fun str_of_isa_card NONE = ""
+ | str_of_isa_card (SOME k) = signed_string_of_int k;
+
+fun str_of_isa_cards_suffix (NONE, NONE) = ""
+ | str_of_isa_cards_suffix (c1, c2) = " " ^ str_of_isa_card c1 ^ "-" ^ str_of_isa_card c2;
+
+fun str_of_isa_command ctxt (ITVal (T, cards)) =
+ "type " ^ Syntax.string_of_typ ctxt T ^ str_of_isa_cards_suffix cards
+ | str_of_isa_command ctxt (ITypedef spec) = "typedef " ^ str_of_isa_type_spec ctxt spec
+ | str_of_isa_command ctxt (IQuotient spec) = "quotient " ^ str_of_isa_type_spec ctxt spec
+ | str_of_isa_command ctxt (ICoData (fp, specs)) =
+ BNF_Util.case_fp fp "data" "codata" ^ " " ^ str_of_and_list (str_of_isa_co_data_spec ctxt) specs
+ | str_of_isa_command ctxt (IVal t) = "val " ^ str_of_isa_term_with_type ctxt t
+ | str_of_isa_command ctxt (ICoPred (fp, wf, specs)) =
+ BNF_Util.case_fp fp "pred" "copred" ^ " " ^ (if wf then "[wf] " else "") ^
+ str_of_and_list (str_of_isa_const_spec ctxt) specs
+ | str_of_isa_command ctxt (IRec specs) = "rec " ^ str_of_and_list (str_of_isa_rec_spec ctxt) specs
+ | str_of_isa_command ctxt (ISpec spec) = "spec " ^ str_of_isa_consts_spec ctxt spec
+ | str_of_isa_command ctxt (IAxiom prop) = "axiom " ^ Syntax.string_of_term ctxt prop
+ | str_of_isa_command ctxt (IGoal prop) = "goal " ^ Syntax.string_of_term ctxt prop
+ | str_of_isa_command ctxt (IEval t) = "eval " ^ Syntax.string_of_term ctxt t;
+
+fun str_of_isa_problem ctxt {commandss, sound, complete} =
+ map (cat_lines o map (suffix "." o str_of_isa_command ctxt)) commandss
+ |> space_implode "\n\n" |> suffix "\n"
+ |> prefix ("# " ^ (if sound then "sound" else "unsound") ^ "\n")
+ |> prefix ("# " ^ (if complete then "complete" else "incomplete") ^ "\n");
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_commands.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,265 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_commands.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Adds the "nunchaku" and "nunchaku_params" commands to Isabelle/Isar's outer syntax.
+*)
+
+signature NUNCHAKU_COMMANDS =
+sig
+ type params = Nunchaku.params
+
+ val default_params: theory -> (string * string) list -> params
+end;
+
+structure Nunchaku_Commands : NUNCHAKU_COMMANDS =
+struct
+
+open Nunchaku_Util;
+open Nunchaku;
+
+type raw_param = string * string list;
+
+val default_default_params =
+ [("assms", "true"),
+ ("debug", "false"),
+ ("falsify", "true"),
+ ("max_genuine", "1"),
+ ("max_potential", "1"),
+ ("overlord", "false"),
+ ("solvers", "cvc4 kodkod paradox smbc"),
+ ("specialize", "true"),
+ ("spy", "false"),
+ ("timeout", "30"),
+ ("verbose", "false"),
+ ("wf_timeout", "0.5")];
+
+val negated_params =
+ [("dont_whack", "whack"),
+ ("dont_specialize", "specialize"),
+ ("dont_spy", "spy"),
+ ("no_assms", "assms"),
+ ("no_debug", "debug"),
+ ("no_overlord", "overlord"),
+ ("non_mono", "mono"),
+ ("non_wf", "wf"),
+ ("quiet", "verbose"),
+ ("satisfy", "falsify")];
+
+fun is_known_raw_param s =
+ AList.defined (op =) default_default_params s orelse
+ AList.defined (op =) negated_params s orelse
+ member (op =) ["atoms", "card", "eval", "expect"] s orelse
+ exists (fn p => String.isPrefix (p ^ " ") s)
+ ["atoms", "card", "dont_whack", "mono", "non_mono", "non_wf", "wf", "whack"];
+
+fun check_raw_param (s, _) =
+ if is_known_raw_param s then () else error ("Unknown parameter: " ^ quote s);
+
+fun unnegate_param_name name =
+ (case AList.lookup (op =) negated_params name of
+ NONE =>
+ if String.isPrefix "dont_" name then SOME (unprefix "dont_" name)
+ else if String.isPrefix "non_" name then SOME (unprefix "non_" name)
+ else NONE
+ | some_name => some_name);
+
+fun normalize_raw_param (name, value) =
+ (case unnegate_param_name name of
+ SOME name' =>
+ [(name',
+ (case value of
+ ["false"] => ["true"]
+ | ["true"] => ["false"]
+ | [] => ["false"]
+ | _ => value))]
+ | NONE => [(name, value)]);
+
+structure Data = Theory_Data
+(
+ type T = raw_param list
+ val empty = default_default_params |> map (apsnd single)
+ val extend = I
+ fun merge data = AList.merge (op =) (K true) data
+);
+
+val set_default_raw_param = Data.map o fold (AList.update (op =)) o normalize_raw_param;
+val default_raw_params = Data.get;
+
+fun is_punctuation s = (s = "," orelse s = "-");
+
+fun stringify_raw_param_value [] = ""
+ | stringify_raw_param_value [s] = s
+ | stringify_raw_param_value (s1 :: s2 :: ss) =
+ s1 ^ (if is_punctuation s1 orelse is_punctuation s2 then "" else " ") ^
+ stringify_raw_param_value (s2 :: ss);
+
+fun extract_params ctxt mode default_params override_params =
+ let
+ val override_params = maps normalize_raw_param override_params;
+ val raw_params = rev override_params @ rev default_params;
+ val raw_lookup = AList.lookup (op =) raw_params;
+ val lookup = Option.map stringify_raw_param_value o raw_lookup;
+ val lookup_string = the_default "" o lookup;
+ val lookup_strings = these o Option.map (space_explode " ") o lookup;
+
+ fun general_lookup_bool option default_value name =
+ (case lookup name of
+ SOME s => parse_bool_option option name s
+ | NONE => default_value);
+
+ val lookup_bool = the o general_lookup_bool false (SOME false);
+
+ fun lookup_int name =
+ (case lookup name of
+ SOME s =>
+ (case Int.fromString s of
+ SOME i => i
+ | NONE => error ("Parameter " ^ quote name ^ " must be assigned an integer value"))
+ | NONE => 0);
+
+ fun int_range_from_string name s =
+ (case space_explode "-" s of
+ [s] => (s, s)
+ | [s1, s2] => (s1, s2)
+ | _ => error ("Parameter " ^ quote name ^ " must be assigned a range of integers"))
+ |> apply2 Int.fromString;
+
+ fun lookup_assigns read pre of_str =
+ (case lookup pre of
+ SOME s => [(NONE, of_str s)]
+ | NONE => []) @
+ map (fn (name, value) => (SOME (read (String.extract (name, size pre + 1, NONE))),
+ of_str (stringify_raw_param_value value)))
+ (filter (String.isPrefix (pre ^ " ") o fst) raw_params);
+
+ fun lookup_int_range_assigns read pre =
+ lookup_assigns read pre (int_range_from_string pre);
+
+ fun lookup_bool_assigns read pre =
+ lookup_assigns read pre (the o parse_bool_option false pre);
+
+ fun lookup_bool_option_assigns read pre =
+ lookup_assigns read pre (parse_bool_option true pre);
+
+ fun lookup_strings_assigns read pre =
+ lookup_assigns read pre (space_explode " ");
+
+ fun lookup_time name =
+ (case lookup name of
+ SOME s => parse_time name s
+ | NONE => Time.zeroTime);
+
+ val read_type_polymorphic =
+ Syntax.read_typ ctxt #> Logic.mk_type
+ #> singleton (Variable.polymorphic ctxt) #> Logic.dest_type;
+ val read_term_polymorphic =
+ Syntax.read_term ctxt #> singleton (Variable.polymorphic ctxt);
+ val lookup_term_list_option_polymorphic =
+ AList.lookup (op =) raw_params #> Option.map (map read_term_polymorphic);
+
+ fun read_const_polymorphic s =
+ (case read_term_polymorphic s of
+ Const x => x
+ | t => error ("Not a constant: " ^ Syntax.string_of_term ctxt t));
+
+ val solvers = lookup_strings "solvers";
+ val falsify = lookup_bool "falsify";
+ val assms = lookup_bool "assms";
+ val spy = getenv "NUNCHAKU_SPY" = "yes" orelse lookup_bool "spy";
+ val overlord = lookup_bool "overlord";
+ val expect = lookup_string "expect";
+
+ val wfs = lookup_bool_option_assigns read_const_polymorphic "wf";
+ val whacks = lookup_bool_assigns read_term_polymorphic "whack";
+ val cards = lookup_int_range_assigns read_type_polymorphic "card";
+ val monos = lookup_bool_option_assigns read_type_polymorphic "mono";
+
+ val debug = (mode <> Auto_Try andalso lookup_bool "debug");
+ val verbose = debug orelse (mode <> Auto_Try andalso lookup_bool "verbose");
+ val max_potential = if mode = Normal then Int.max (0, lookup_int "max_potential") else 0;
+ val max_genuine = Int.max (0, lookup_int "max_genuine");
+ val evals = these (lookup_term_list_option_polymorphic "eval");
+ val atomss = lookup_strings_assigns read_type_polymorphic "atoms";
+
+ val specialize = lookup_bool "specialize";
+ val multithread = mode = Normal andalso lookup_bool "multithread";
+
+ val timeout = lookup_time "timeout";
+ val wf_timeout = lookup_time "wf_timeout";
+
+ val mode_of_operation_params =
+ {solvers = solvers, falsify = falsify, assms = assms, spy = spy, overlord = overlord,
+ expect = expect};
+
+ val scope_of_search_params =
+ {wfs = wfs, whacks = whacks, cards = cards, monos = monos};
+
+ val output_format_params =
+ {verbose = verbose, debug = debug, max_potential = max_potential, max_genuine = max_genuine,
+ evals = evals, atomss = atomss};
+
+ val optimization_params =
+ {specialize = specialize, multithread = multithread};
+
+ val timeout_params =
+ {timeout = timeout, wf_timeout = wf_timeout};
+ in
+ {mode_of_operation_params = mode_of_operation_params,
+ scope_of_search_params = scope_of_search_params,
+ output_format_params = output_format_params,
+ optimization_params = optimization_params,
+ timeout_params = timeout_params}
+ end;
+
+fun default_params thy =
+ extract_params (Proof_Context.init_global thy) Normal (default_raw_params thy)
+ o map (apsnd single);
+
+val parse_key = Scan.repeat1 Parse.embedded >> space_implode " ";
+val parse_value =
+ Scan.repeat1 (Parse.minus >> single
+ || Scan.repeat1 (Scan.unless Parse.minus (Parse.name || Parse.float_number))
+ || @{keyword ","} |-- Parse.number >> prefix "," >> single)
+ >> flat;
+val parse_param = parse_key -- Scan.optional (@{keyword "="} |-- parse_value) [];
+val parse_params = Scan.optional (@{keyword "["} |-- Parse.list parse_param --| @{keyword "]"}) [];
+
+fun run_chaku override_params mode i state0 =
+ let
+ val state = Proof.map_contexts (Try0.silence_methods false) state0;
+ val thy = Proof.theory_of state;
+ val ctxt = Proof.context_of state;
+ val _ = List.app check_raw_param override_params;
+ val params = extract_params ctxt mode (default_raw_params thy) override_params;
+ in
+ (if mode = Auto_Try then perhaps o try else fn f => fn x => f x)
+ (fn _ => run_chaku_on_subgoal state params mode i) (unknownN, NONE)
+ |> `(fn (outcome_code, _) => outcome_code = genuineN)
+ end;
+
+fun string_for_raw_param (name, value) =
+ name ^ " = " ^ stringify_raw_param_value value;
+
+fun nunchaku_params_trans params =
+ Toplevel.theory (fold set_default_raw_param params
+ #> tap (fn thy =>
+ let val params = rev (default_raw_params thy) in
+ List.app check_raw_param params;
+ writeln ("Default parameters for Nunchaku:\n" ^
+ (params |> map string_for_raw_param |> sort_strings |> cat_lines))
+ end));
+
+val _ =
+ Outer_Syntax.command @{command_keyword nunchaku}
+ "try to find a countermodel using Nunchaku"
+ (parse_params -- Scan.optional Parse.nat 1 >> (fn (params, i) =>
+ Toplevel.keep_proof (fn state =>
+ ignore (run_chaku params Normal i (Toplevel.proof_of state)))));
+
+val _ =
+ Outer_Syntax.command @{command_keyword nunchaku_params}
+ "set and display the default parameters for Nunchaku"
+ (parse_params #>> nunchaku_params_trans);
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_display.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,91 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_display.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Pretty printing of Isabelle/HOL models for Nunchaku.
+*)
+
+signature NUNCHAKU_DISPLAY =
+sig
+ type isa_model = Nunchaku_Reconstruct.isa_model
+
+ val pretty_of_isa_model_opt: Proof.context -> isa_model option -> Pretty.T
+end;
+
+structure Nunchaku_Display : NUNCHAKU_DISPLAY =
+struct
+
+open Nunchaku_Util;
+open Nunchaku_Reconstruct;
+
+val indent_size = 2;
+
+val pretty_indent = Pretty.indent indent_size;
+
+fun sorting_str_of_typ (TFree (s, _)) = "a" ^ s
+ | sorting_str_of_typ (Type (s, Ts)) = "b" ^ s ^ space_implode " " (map sorting_str_of_typ Ts)
+ | sorting_str_of_typ (TVar _) = "X";
+
+fun sorting_str_of_term (Const (s, T)) = "b" ^ s ^ sorting_str_of_typ T
+ | sorting_str_of_term (Free (s, _)) = "a" ^ s
+ | sorting_str_of_term (t $ u) = sorting_str_of_term t ^ " " ^ sorting_str_of_term u
+ | sorting_str_of_term (Abs (_, T, t)) = "c" ^ sorting_str_of_typ T ^ " " ^ sorting_str_of_term t
+ | sorting_str_of_term _ = "X";
+
+fun pretty_of_isa_model_opt _ NONE =
+ pretty_indent (Pretty.str "Model unavailable (internal error)")
+ | pretty_of_isa_model_opt ctxt0
+ (SOME {type_model, free_model, pat_complete_model, pat_incomplete_model, skolem_model}) =
+ let
+ val ctxt = ctxt0 |> Config.put show_question_marks false;
+
+ val pat_incomplete_model' = pat_incomplete_model
+ |> filter_out (can (fn Const (@{const_name unreachable}, _) => ()) o fst);
+
+ fun pretty_of_typ_entry (T, atoms) =
+ Pretty.block (Pretty.breaks [Syntax.pretty_typ ctxt T, Pretty.str "=",
+ Pretty.enum "," "{" "}" (map (Syntax.pretty_term ctxt) atoms)]);
+
+ fun pretty_of_term_entry (t, value) =
+ let
+ val no_types_ctxt = ctxt |> Config.put show_types false;
+ val schematic_ctxt = ctxt |> Proof_Context.set_mode Proof_Context.mode_schematic;
+
+ val show_types = Config.get ctxt show_types;
+ val value' = value |> perhaps (try (Syntax.check_term schematic_ctxt));
+ val T = fastype_of t;
+ val T' = if T = dummyT then try fastype_of value' |> the_default T else T;
+ val t' = t |> show_types ? Type.constraint T';
+ in
+ Pretty.block (Pretty.breaks
+ [Syntax.pretty_term ctxt t'
+ |> (show_types andalso T' <> dummyT) ? (single #> Pretty.enclose "(" ")"),
+ Pretty.str "=", Syntax.pretty_term no_types_ctxt value'])
+ end;
+
+ fun chunks_of_entries sorting_str_of pretty_of title entries =
+ if not (null entries) then
+ (if title = "" then [] else [Pretty.str (title ^ plural_s_for_list entries ^ ":")]) @
+ map (pretty_indent o pretty_of) (sort_by (sorting_str_of o fst) entries)
+ else
+ [];
+
+ val chunks =
+ (if null free_model then
+ [pretty_indent (Pretty.str "No free variables")]
+ else
+ chunks_of_entries sorting_str_of_term pretty_of_term_entry "" free_model) @
+ chunks_of_entries sorting_str_of_term pretty_of_term_entry "Skolem constant" skolem_model @
+ chunks_of_entries sorting_str_of_term pretty_of_term_entry "Underspecified constant"
+ pat_incomplete_model' @
+ (if Config.get ctxt show_consts then
+ chunks_of_entries sorting_str_of_term pretty_of_term_entry "Fully specified constant"
+ pat_complete_model
+ else
+ []) @
+ chunks_of_entries sorting_str_of_typ pretty_of_typ_entry "Type" type_model;
+ in
+ Pretty.chunks chunks
+ end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_model.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,284 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_model.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Abstract syntax tree for Nunchaku models.
+*)
+
+signature NUNCHAKU_MODEL =
+sig
+ type ident = Nunchaku_Problem.ident
+ type ty = Nunchaku_Problem.ty
+ type tm = Nunchaku_Problem.tm
+ type name_pool = Nunchaku_Problem.name_pool
+
+ type ty_entry = ty * tm list
+ type tm_entry = tm * tm
+
+ type nun_model =
+ {type_model: ty_entry list,
+ const_model: tm_entry list,
+ skolem_model: tm_entry list}
+
+ val str_of_nun_model: nun_model -> string
+
+ val allocate_ugly: name_pool -> string * string -> string * name_pool
+
+ val ugly_nun_model: name_pool -> nun_model -> nun_model
+
+ datatype token =
+ Ident of ident
+ | Symbol of ident
+ | Atom of ident * int
+ | End_of_Stream
+
+ val parse_tok: ''a -> ''a list -> ''a * ''a list
+ val parse_ident: token list -> ident * token list
+ val parse_id: ident -> token list -> token * token list
+ val parse_sym: ident -> token list -> token * token list
+ val parse_atom: token list -> (ident * int) * token list
+ val nun_model_of_str: string -> nun_model
+end;
+
+structure Nunchaku_Model : NUNCHAKU_MODEL =
+struct
+
+open Nunchaku_Problem;
+
+type ty_entry = ty * tm list;
+type tm_entry = tm * tm;
+
+type nun_model =
+ {type_model: ty_entry list,
+ const_model: tm_entry list,
+ skolem_model: tm_entry list};
+
+val nun_SAT = str_of_ident "SAT";
+
+fun str_of_ty_entry (ty, tms) =
+ "type " ^ str_of_ty ty ^ " := {" ^ commas (map str_of_tm tms) ^ "}.";
+
+fun str_of_tm_entry (tm, value) =
+ "val " ^ str_of_tm tm ^ " := " ^ str_of_tm value ^ ".";
+
+fun str_of_nun_model {type_model, const_model, skolem_model} =
+ map str_of_ty_entry type_model @ "" :: map str_of_tm_entry const_model @ "" ::
+ map str_of_tm_entry skolem_model
+ |> cat_lines;
+
+fun fold_map_ty_entry_idents f (ty, atoms) =
+ fold_map_ty_idents f ty
+ ##>> fold_map (fold_map_tm_idents f) atoms;
+
+fun fold_map_tm_entry_idents f (tm, value) =
+ fold_map_tm_idents f tm
+ ##>> fold_map_tm_idents f value;
+
+fun fold_map_nun_model_idents f {type_model, const_model, skolem_model} =
+ fold_map (fold_map_ty_entry_idents f) type_model
+ ##>> fold_map (fold_map_tm_entry_idents f) const_model
+ ##>> fold_map (fold_map_tm_entry_idents f) skolem_model
+ #>> (fn ((type_model, const_model), skolem_model) =>
+ {type_model = type_model, const_model = const_model, skolem_model = skolem_model});
+
+fun swap_name_pool ({nice_of_ugly, ugly_of_nice} : name_pool) =
+ {nice_of_ugly = ugly_of_nice, ugly_of_nice = nice_of_ugly};
+
+fun allocate_ugly pool (nice, ugly_sugg) =
+ allocate_nice (swap_name_pool pool) (nice, ugly_sugg) ||> swap_name_pool;
+
+fun ugly_ident nice (pool as {ugly_of_nice, ...}) =
+ (case Symtab.lookup ugly_of_nice nice of
+ NONE => allocate_ugly pool (nice, nice)
+ | SOME ugly => (ugly, pool));
+
+fun ugly_nun_model pool model =
+ fst (fold_map_nun_model_idents ugly_ident model pool);
+
+datatype token =
+ Ident of ident
+| Symbol of ident
+| Atom of ident * int
+| End_of_Stream;
+
+val rev_str = String.implode o rev o String.explode;
+
+fun atom_of_str s =
+ (case first_field "_" (rev_str s) of
+ SOME (rev_suf, rev_pre) =>
+ let
+ val pre = rev_str rev_pre;
+ val suf = rev_str rev_suf;
+ in
+ (case Int.fromString suf of
+ SOME j => Atom (ident_of_str pre, j)
+ | NONE => raise Fail "ill-formed atom")
+ end
+ | NONE => raise Fail "ill-formed atom");
+
+fun is_alnum_etc_char c = Char.isAlphaNum c orelse c = #"_" orelse c = #"/";
+
+val multi_ids =
+ [nun_arrow, nun_assign, nun_conj, nun_disj, nun_implies, nun_unparsable, nun_irrelevant];
+
+val nun_anon_fun_prefix_exploded = String.explode nun_anon_fun_prefix;
+val [nun_dollar_char] = String.explode nun_dollar;
+
+fun next_token [] = (End_of_Stream, [])
+ | next_token (c :: cs) =
+ if Char.isSpace c then
+ next_token cs
+ else if c = nun_dollar_char then
+ let val n = find_index (not o is_alnum_etc_char) cs in
+ (if n = ~1 then (cs, []) else chop n cs)
+ |>> (String.implode
+ #> (if is_prefix (op =) nun_anon_fun_prefix_exploded cs then ident_of_str #> Ident
+ else atom_of_str))
+ end
+ else if is_alnum_etc_char c then
+ let val n = find_index (not o is_alnum_etc_char) cs in
+ (if n = ~1 then (cs, []) else chop n cs)
+ |>> (cons c #> String.implode #> ident_of_str #> Ident)
+ end
+ else
+ let
+ fun next_multi id =
+ let
+ val s = str_of_ident id;
+ val n = String.size s - 1;
+ in
+ if c = String.sub (s, 0) andalso
+ is_prefix (op =) (String.explode (String.substring (s, 1, n))) cs then
+ SOME (Symbol id, drop n cs)
+ else
+ NONE
+ end;
+ in
+ (case get_first next_multi multi_ids of
+ SOME res => res
+ | NONE => (Symbol (ident_of_str (String.str c)), cs))
+ end;
+
+val tokenize =
+ let
+ fun toks cs =
+ (case next_token cs of
+ (End_of_Stream, []) => []
+ | (tok, cs') => tok :: toks cs');
+ in
+ toks o String.explode
+ end;
+
+fun parse_enum sep scan = scan ::: Scan.repeat (sep |-- scan);
+
+fun parse_tok tok = Scan.one (curry (op =) tok);
+
+val parse_ident = Scan.some (try (fn Ident id => id));
+val parse_id = parse_tok o Ident;
+val parse_sym = parse_tok o Symbol;
+val parse_atom = Scan.some (try (fn Atom id_j => id_j));
+
+val confusing_ids = [nun_else, nun_then, nun_with];
+
+val parse_confusing_id = Scan.one (fn Ident id => member (op =) confusing_ids id | _ => false);
+
+fun parse_ty toks =
+ (parse_ty_arg -- Scan.option (parse_sym nun_arrow -- parse_ty)
+ >> (fn (ty, NONE) => ty
+ | (lhs, SOME (Symbol id, rhs)) => NType (id, [lhs, rhs]))) toks
+and parse_ty_arg toks =
+ (parse_ident >> (rpair [] #> NType)
+ || parse_sym nun_lparen |-- parse_ty --| parse_sym nun_rparen) toks;
+
+val parse_choice_or_unique =
+ (parse_tok (Ident nun_choice) || parse_tok (Ident nun_unique)
+ || parse_tok (Ident nun_unique_unsafe))
+ -- parse_ty_arg
+ >> (fn (Ident id, ty) => NConst (id, [ty], mk_arrows_ty ([ty, prop_ty], ty)));
+
+fun parse_tm toks =
+ (parse_id nun_lambda |-- Scan.repeat parse_arg --| parse_sym nun_dot -- parse_tm >> nabss
+ || parse_id nun_mu |-- parse_arg --| parse_sym nun_dot -- parse_tm
+ >> (fn (var, body) =>
+ let val ty = safe_ty_of body in
+ NApp (NConst (nun_mu, [ty], mk_arrow_ty (mk_arrow_ty (ty, ty), ty)), NAbs (var, body))
+ end)
+ || parse_id nun_if |-- parse_tm --| parse_id nun_then -- parse_tm --| parse_id nun_else
+ -- parse_tm
+ >> (fn ((cond, th), el) =>
+ let val ty = safe_ty_of th in
+ napps (NConst (nun_if, [ty], mk_arrows_ty ([prop_ty, ty, ty], ty)), [cond, th, el])
+ end)
+ || parse_implies) toks
+and parse_implies toks =
+ (parse_disj -- Scan.option (parse_sym nun_implies -- parse_implies)
+ >> (fn (tm, NONE) => tm
+ | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
+and parse_disj toks =
+ (parse_conj -- Scan.option (parse_sym nun_disj -- parse_disj)
+ >> (fn (tm, NONE) => tm
+ | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
+and parse_conj toks =
+ (parse_equals -- Scan.option (parse_sym nun_conj -- parse_conj)
+ >> (fn (tm, NONE) => tm
+ | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
+and parse_equals toks =
+ (parse_comb -- Scan.option (parse_sym nun_equals -- parse_comb)
+ >> (fn (tm, NONE) => tm
+ | (lhs, SOME (Symbol id, rhs)) => napps (NConst (id, [], dummy_ty), [lhs, rhs]))) toks
+and parse_comb toks =
+ (parse_arg -- Scan.repeat (Scan.unless parse_confusing_id parse_arg) >> napps) toks
+and parse_arg toks =
+ (parse_choice_or_unique
+ || parse_ident >> (fn id => NConst (id, [], dummy_ty))
+ || parse_sym nun_irrelevant
+ |-- Scan.option (parse_sym nun_lparen |-- parse_tm --| parse_sym nun_rparen) (* FIXME *)
+ >> (fn _ => NConst (nun_irrelevant, [], dummy_ty))
+ || parse_sym nun_unparsable |-- parse_ty >> (fn ty => NConst (nun_unparsable, [], ty))
+ || parse_sym nun_lparen |-- parse_tm -- Scan.option (parse_sym nun_colon |-- parse_ty)
+ --| parse_sym nun_rparen
+ >> (fn (NConst (id, [], _), SOME ty) => NConst (id, [], ty)
+ | (tm, _) => tm)
+ || parse_atom >> (fn (id, j) => NAtom (j, NType (id, [])))) toks;
+
+val parse_witness_name =
+ parse_ident >> (fn id => NConst (hd (space_explode "/" id), [], dummy_ty));
+
+val parse_witness =
+ parse_id nun__witness_of |-- parse_sym nun_lparen |-- (parse_id nun_forall || parse_id nun_exists)
+ |-- Scan.option (parse_sym nun_lparen) |-- parse_witness_name
+ --| Scan.repeat (Scan.one (curry (op <>) (Symbol nun_assign)));
+
+datatype entry =
+ Type_Entry of ty_entry
+| Skolem_Entry of tm_entry
+| Const_Entry of tm_entry;
+
+val parse_entry =
+ (parse_id nun_type |-- parse_ty --| parse_sym nun_assign --| parse_sym nun_lbrace --
+ parse_enum (parse_sym nun_comma) parse_tm --| parse_sym nun_rbrace
+ >> Type_Entry
+ || parse_id nun_val |-- parse_witness --| parse_sym nun_assign -- parse_tm >> Skolem_Entry
+ || parse_id nun_val |-- parse_tm --| parse_sym nun_assign -- parse_tm >> Const_Entry)
+ --| parse_sym nun_dot;
+
+val parse_model =
+ parse_id nun_SAT |-- parse_sym nun_colon |-- parse_sym nun_lbrace |-- Scan.repeat parse_entry
+ --| parse_sym nun_rbrace;
+
+fun add_entry entry ({type_model, const_model, skolem_model} : nun_model) =
+ (case entry of
+ Type_Entry e =>
+ {type_model = e :: type_model, const_model = const_model, skolem_model = skolem_model}
+ | Skolem_Entry e =>
+ {type_model = type_model, const_model = const_model, skolem_model = e :: skolem_model}
+ | Const_Entry e =>
+ {type_model = type_model, const_model = e :: const_model, skolem_model = skolem_model});
+
+fun nun_model_of_str str =
+ let val (entries, _) = parse_model (tokenize str) in
+ {type_model = [], const_model = [], skolem_model = []}
+ |> fold_rev add_entry entries
+ end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_problem.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,799 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_problem.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Abstract syntax tree for Nunchaku problems.
+*)
+
+signature NUNCHAKU_PROBLEM =
+sig
+ eqtype ident
+
+ datatype ty =
+ NType of ident * ty list
+
+ datatype tm =
+ NAtom of int * ty
+ | NConst of ident * ty list * ty
+ | NAbs of tm * tm
+ | NMatch of tm * (ident * tm list * tm) list
+ | NApp of tm * tm
+
+ type nun_copy_spec =
+ {abs_ty: ty,
+ rep_ty: ty,
+ subset: tm option,
+ quotient: tm option,
+ abs: tm,
+ rep: tm}
+
+ type nun_ctr_spec =
+ {ctr: tm,
+ arg_tys: ty list}
+
+ type nun_co_data_spec =
+ {ty: ty,
+ ctrs: nun_ctr_spec list}
+
+ type nun_const_spec =
+ {const: tm,
+ props: tm list}
+
+ type nun_consts_spec =
+ {consts: tm list,
+ props: tm list}
+
+ datatype nun_command =
+ NTVal of ty * (int option * int option)
+ | NCopy of nun_copy_spec
+ | NData of nun_co_data_spec list
+ | NCodata of nun_co_data_spec list
+ | NVal of tm * ty
+ | NPred of bool * nun_const_spec list
+ | NCopred of nun_const_spec list
+ | NRec of nun_const_spec list
+ | NSpec of nun_consts_spec
+ | NAxiom of tm
+ | NGoal of tm
+ | NEval of tm
+
+ type nun_problem =
+ {commandss: nun_command list list,
+ sound: bool,
+ complete: bool}
+
+ type name_pool =
+ {nice_of_ugly: string Symtab.table,
+ ugly_of_nice: string Symtab.table}
+
+ val nun_abstract: string
+ val nun_and: string
+ val nun_anon_fun_prefix: string
+ val nun_arrow: string
+ val nun_asserting: string
+ val nun_assign: string
+ val nun_at: string
+ val nun_axiom: string
+ val nun_bar: string
+ val nun_choice: string
+ val nun_codata: string
+ val nun_colon: string
+ val nun_comma: string
+ val nun_concrete: string
+ val nun_conj: string
+ val nun_copred: string
+ val nun_copy: string
+ val nun_data: string
+ val nun_disj: string
+ val nun_dollar: string
+ val nun_dot: string
+ val nun_dummy: string
+ val nun_else: string
+ val nun_end: string
+ val nun_equals: string
+ val nun_eval: string
+ val nun_exists: string
+ val nun_false: string
+ val nun_forall: string
+ val nun_goal: string
+ val nun_hash: string
+ val nun_if: string
+ val nun_implies: string
+ val nun_irrelevant: string
+ val nun_lambda: string
+ val nun_lbrace: string
+ val nun_lbracket: string
+ val nun_lparen: string
+ val nun_match: string
+ val nun_mu: string
+ val nun_not: string
+ val nun_partial_quotient: string
+ val nun_pred: string
+ val nun_prop: string
+ val nun_quotient: string
+ val nun_rbrace: string
+ val nun_rbracket: string
+ val nun_rec: string
+ val nun_rparen: string
+ val nun_semicolon: string
+ val nun_spec: string
+ val nun_subset: string
+ val nun_then: string
+ val nun_true: string
+ val nun_type: string
+ val nun_unparsable: string
+ val nun_unique: string
+ val nun_unique_unsafe: string
+ val nun_val: string
+ val nun_wf: string
+ val nun_with: string
+ val nun__witness_of: string
+
+ val ident_of_str: string -> ident
+ val str_of_ident: ident -> string
+ val encode_args: string list -> string
+ val nun_const_of_str: string list -> string -> ident
+ val nun_tconst_of_str: string list -> string -> ident
+ val nun_free_of_str: string -> ident
+ val nun_tfree_of_str: string -> ident
+ val nun_var_of_str: string -> ident
+ val str_of_nun_const: ident -> string list * string
+ val str_of_nun_tconst: ident -> string list * string
+ val str_of_nun_free: ident -> string
+ val str_of_nun_tfree: ident -> string
+ val str_of_nun_var: ident -> string
+
+ val dummy_ty: ty
+ val prop_ty: ty
+ val mk_arrow_ty: ty * ty -> ty
+ val mk_arrows_ty: ty list * ty -> ty
+ val nabss: tm list * tm -> tm
+ val napps: tm * tm list -> tm
+
+ val ty_of: tm -> ty
+ val safe_ty_of: tm -> ty
+
+ val fold_map_ty_idents: (string -> 'a -> string * 'a) -> ty -> 'a -> ty * 'a
+ val fold_map_tm_idents: (string -> 'a -> string * 'a) -> tm -> 'a -> tm * 'a
+ val fold_map_nun_command_idents: (string -> 'a -> string * 'a) -> nun_command -> 'a ->
+ nun_command * 'a
+ val fold_map_nun_problem_idents: (string -> 'a -> string * 'a) -> nun_problem -> 'a ->
+ nun_problem * 'a
+
+ val allocate_nice: name_pool -> string * string -> string * name_pool
+
+ val rcomb_tms: tm list -> tm -> tm
+ val abs_tms: tm list -> tm -> tm
+ val beta_reduce_tm: tm -> tm
+ val eta_expandN_tm: int -> tm -> tm
+ val eta_expand_builtin_tm: tm -> tm
+
+ val str_of_ty: ty -> string
+ val str_of_tm: tm -> string
+ val str_of_tmty: tm -> string
+
+ val nice_nun_problem: nun_problem -> nun_problem * name_pool
+ val str_of_nun_problem: nun_problem -> string
+end;
+
+structure Nunchaku_Problem : NUNCHAKU_PROBLEM =
+struct
+
+open Nunchaku_Util;
+
+type ident = string;
+
+datatype ty =
+ NType of ident * ty list;
+
+datatype tm =
+ NAtom of int * ty
+| NConst of ident * ty list * ty
+| NAbs of tm * tm
+| NMatch of tm * (ident * tm list * tm) list
+| NApp of tm * tm;
+
+type nun_copy_spec =
+ {abs_ty: ty,
+ rep_ty: ty,
+ subset: tm option,
+ quotient: tm option,
+ abs: tm,
+ rep: tm};
+
+type nun_ctr_spec =
+ {ctr: tm,
+ arg_tys: ty list};
+
+type nun_co_data_spec =
+ {ty: ty,
+ ctrs: nun_ctr_spec list};
+
+type nun_const_spec =
+ {const: tm,
+ props: tm list};
+
+type nun_consts_spec =
+ {consts: tm list,
+ props: tm list};
+
+datatype nun_command =
+ NTVal of ty * (int option * int option)
+| NCopy of nun_copy_spec
+| NData of nun_co_data_spec list
+| NCodata of nun_co_data_spec list
+| NVal of tm * ty
+| NPred of bool * nun_const_spec list
+| NCopred of nun_const_spec list
+| NRec of nun_const_spec list
+| NSpec of nun_consts_spec
+| NAxiom of tm
+| NGoal of tm
+| NEval of tm;
+
+type nun_problem =
+ {commandss: nun_command list list,
+ sound: bool,
+ complete: bool};
+
+type name_pool =
+ {nice_of_ugly: string Symtab.table,
+ ugly_of_nice: string Symtab.table};
+
+val nun_abstract = "abstract";
+val nun_and = "and";
+val nun_anon_fun_prefix = "anon_fun_";
+val nun_arrow = "->";
+val nun_asserting = "asserting";
+val nun_assign = ":=";
+val nun_at = "@";
+val nun_axiom = "axiom";
+val nun_bar = "|";
+val nun_choice = "choice";
+val nun_codata = "codata";
+val nun_colon = ":";
+val nun_comma = ",";
+val nun_concrete = "concrete";
+val nun_conj = "&&";
+val nun_copred = "copred";
+val nun_copy = "copy";
+val nun_data = "data";
+val nun_disj = "||";
+val nun_dollar = "$";
+val nun_dot = ".";
+val nun_dummy = "_";
+val nun_else = "else";
+val nun_end = "end";
+val nun_equals = "=";
+val nun_eval = "eval";
+val nun_exists = "exists";
+val nun_false = "false";
+val nun_forall = "forall";
+val nun_goal = "goal";
+val nun_hash = "#";
+val nun_if = "if";
+val nun_implies = "=>";
+val nun_irrelevant = "?__";
+val nun_lambda = "fun";
+val nun_lbrace = "{";
+val nun_lbracket = "[";
+val nun_lparen = "(";
+val nun_match = "match";
+val nun_mu = "mu";
+val nun_not = "~";
+val nun_partial_quotient = "partial_quotient";
+val nun_pred = "pred";
+val nun_prop = "prop";
+val nun_quotient = "quotient";
+val nun_rbrace = "}";
+val nun_rbracket = "]";
+val nun_rec = "rec";
+val nun_rparen = ")";
+val nun_semicolon = ";";
+val nun_spec = "spec";
+val nun_subset = "subset";
+val nun_then = "then";
+val nun_true = "true";
+val nun_type = "type";
+val nun_unique = "unique";
+val nun_unique_unsafe = "unique_unsafe";
+val nun_unparsable = "?__unparsable";
+val nun_val = "val";
+val nun_wf = "wf";
+val nun_with = "with";
+val nun__witness_of = "_witness_of";
+
+val nun_parens = enclose nun_lparen nun_rparen;
+
+fun nun_parens_if_space s = s |> String.isSubstring " " s ? nun_parens;
+
+fun str_of_nun_arg_list str_of_arg =
+ map (prefix " " o nun_parens_if_space o str_of_arg) #> space_implode "";
+
+fun str_of_nun_and_list str_of_elem =
+ map str_of_elem #> space_implode ("\n" ^ nun_and ^ " ");
+
+val is_nun_const_quantifier = member (op =) [nun_forall, nun_exists];
+val is_nun_const_connective = member (op =) [nun_conj, nun_disj, nun_implies];
+
+val nun_builtin_arity =
+ [(nun_asserting, 2),
+ (nun_conj, 2),
+ (nun_disj, 2),
+ (nun_equals, 2),
+ (nun_exists, 1),
+ (nun_false, 0),
+ (nun_forall, 1),
+ (nun_if, 3),
+ (nun_implies, 2),
+ (nun_not, 1),
+ (nun_true, 0)];
+
+val arity_of_nun_builtin = AList.lookup (op =) nun_builtin_arity #> the_default 0;
+
+val nun_const_prefix = "c.";
+val nun_free_prefix = "f.";
+val nun_var_prefix = "v.";
+val nun_tconst_prefix = "C.";
+val nun_tfree_prefix = "F.";
+val nun_custom_id_suffix = "_";
+
+val ident_of_str = I : string -> ident;
+val str_of_ident = I : ident -> string;
+
+val encode_args = enclose "(" ")" o commas;
+
+fun decode_args s =
+ let
+ fun delta #"(" = 1
+ | delta #")" = ~1
+ | delta _ = 0;
+
+ fun dec 0 (#"(" :: #")" :: cs) _ = ([], String.implode cs)
+ | dec 0 (#"(" :: cs) [] = dec 1 cs [[]]
+ | dec 0 cs _ = ([], String.implode cs)
+ | dec _ [] _ = raise Fail ("ill-formed arguments in " ^ quote s)
+ | dec 1 (#")" :: cs) args = (rev (map (String.implode o rev) args), String.implode cs)
+ | dec 1 (#"," :: cs) args = dec 1 cs ([] :: args)
+ | dec n (c :: cs) (arg :: args) = dec (n + delta c) cs ((c :: arg) :: args);
+ in
+ dec 0 (String.explode s) []
+ end;
+
+fun nun_const_of_str args =
+ suffix nun_custom_id_suffix #> prefix nun_const_prefix #> prefix (encode_args args);
+fun nun_tconst_of_str args =
+ suffix nun_custom_id_suffix #> prefix nun_tconst_prefix #> prefix (encode_args args);
+
+val nun_free_of_str = suffix nun_custom_id_suffix #> prefix nun_free_prefix;
+val nun_tfree_of_str = suffix nun_custom_id_suffix #> prefix nun_tfree_prefix;
+val nun_var_of_str = suffix nun_custom_id_suffix #> prefix nun_var_prefix;
+val str_of_nun_const = decode_args ##> unprefix nun_const_prefix ##> unsuffix nun_custom_id_suffix;
+val str_of_nun_tconst = decode_args ##> unprefix nun_tconst_prefix ##> unsuffix nun_custom_id_suffix;
+val str_of_nun_free = unprefix nun_free_prefix #> unsuffix nun_custom_id_suffix;
+val str_of_nun_tfree = unprefix nun_tfree_prefix #> unsuffix nun_custom_id_suffix;
+val str_of_nun_var = unprefix nun_var_prefix #> unsuffix nun_custom_id_suffix;
+
+fun index_name s 0 = s
+ | index_name s j =
+ let
+ val n = size s;
+ val m = n - 1;
+ in
+ String.substring (s, 0, m) ^ string_of_int j ^ String.substring (s, m, n - m)
+ end;
+
+val dummy_ty = NType (nun_dummy, []);
+val prop_ty = NType (nun_prop, []);
+
+fun mk_arrow_ty (dom, ran) = NType (nun_arrow, [dom, ran]);
+val mk_arrows_ty = Library.foldr mk_arrow_ty;
+
+val nabss = Library.foldr NAbs;
+val napps = Library.foldl NApp;
+
+fun domain_ty (NType (_, [ty, _])) = ty
+ | domain_ty ty = ty;
+
+fun range_ty (NType (_, [_, ty])) = ty
+ | range_ty ty = ty;
+
+fun domain_tys 0 _ = []
+ | domain_tys n ty = domain_ty ty :: domain_tys (n - 1) (range_ty ty);
+
+fun ty_of (NAtom (_, ty)) = ty
+ | ty_of (NConst (_, _, ty)) = ty
+ | ty_of (NAbs (var, body)) = mk_arrow_ty (ty_of var, ty_of body)
+ | ty_of (NMatch (_, (_, _, body1) :: _)) = ty_of body1
+ | ty_of (NApp (const, _)) = range_ty (ty_of const);
+
+val safe_ty_of = try ty_of #> the_default dummy_ty;
+
+fun strip_nun_binders binder (app as NApp (NConst (id, _, _), NAbs (var, body))) =
+ if id = binder then
+ strip_nun_binders binder body
+ |>> cons var
+ else
+ ([], app)
+ | strip_nun_binders _ tm = ([], tm);
+
+fun fold_map_option _ NONE = pair NONE
+ | fold_map_option f (SOME x) = f x #>> SOME;
+
+fun fold_map_ty_idents f (NType (id, tys)) =
+ f id
+ ##>> fold_map (fold_map_ty_idents f) tys
+ #>> NType;
+
+fun fold_map_match_branch_idents f (id, vars, body) =
+ f id
+ ##>> fold_map (fold_map_tm_idents f) vars
+ ##>> fold_map_tm_idents f body
+ #>> Scan.triple1
+and fold_map_tm_idents f (NAtom (j, ty)) =
+ fold_map_ty_idents f ty
+ #>> curry NAtom j
+ | fold_map_tm_idents f (NConst (id, tys, ty)) =
+ f id
+ ##>> fold_map (fold_map_ty_idents f) tys
+ ##>> fold_map_ty_idents f ty
+ #>> (Scan.triple1 #> NConst)
+ | fold_map_tm_idents f (NAbs (var, body)) =
+ fold_map_tm_idents f var
+ ##>> fold_map_tm_idents f body
+ #>> NAbs
+ | fold_map_tm_idents f (NMatch (obj, branches)) =
+ fold_map_tm_idents f obj
+ ##>> fold_map (fold_map_match_branch_idents f) branches
+ #>> NMatch
+ | fold_map_tm_idents f (NApp (const, arg)) =
+ fold_map_tm_idents f const
+ ##>> fold_map_tm_idents f arg
+ #>> NApp;
+
+fun fold_map_nun_copy_spec_idents f {abs_ty, rep_ty, subset, quotient, abs, rep} =
+ fold_map_ty_idents f abs_ty
+ ##>> fold_map_ty_idents f rep_ty
+ ##>> fold_map_option (fold_map_tm_idents f) subset
+ ##>> fold_map_option (fold_map_tm_idents f) quotient
+ ##>> fold_map_tm_idents f abs
+ ##>> fold_map_tm_idents f rep
+ #>> (fn (((((abs_ty, rep_ty), subset), quotient), abs), rep) =>
+ {abs_ty = abs_ty, rep_ty = rep_ty, subset = subset, quotient = quotient, abs = abs, rep = rep});
+
+fun fold_map_nun_ctr_spec_idents f {ctr, arg_tys} =
+ fold_map_tm_idents f ctr
+ ##>> fold_map (fold_map_ty_idents f) arg_tys
+ #>> (fn (ctr, arg_tys) => {ctr = ctr, arg_tys = arg_tys});
+
+fun fold_map_nun_co_data_spec_idents f {ty, ctrs} =
+ fold_map_ty_idents f ty
+ ##>> fold_map (fold_map_nun_ctr_spec_idents f) ctrs
+ #>> (fn (ty, ctrs) => {ty = ty, ctrs = ctrs});
+
+fun fold_map_nun_const_spec_idents f {const, props} =
+ fold_map_tm_idents f const
+ ##>> fold_map (fold_map_tm_idents f) props
+ #>> (fn (const, props) => {const = const, props = props});
+
+fun fold_map_nun_consts_spec_idents f {consts, props} =
+ fold_map (fold_map_tm_idents f) consts
+ ##>> fold_map (fold_map_tm_idents f) props
+ #>> (fn (consts, props) => {consts = consts, props = props});
+
+fun fold_map_nun_command_idents f (NTVal (ty, cards)) =
+ fold_map_ty_idents f ty
+ #>> (rpair cards #> NTVal)
+ | fold_map_nun_command_idents f (NCopy spec) =
+ fold_map_nun_copy_spec_idents f spec
+ #>> NCopy
+ | fold_map_nun_command_idents f (NData specs) =
+ fold_map (fold_map_nun_co_data_spec_idents f) specs
+ #>> NData
+ | fold_map_nun_command_idents f (NCodata specs) =
+ fold_map (fold_map_nun_co_data_spec_idents f) specs
+ #>> NCodata
+ | fold_map_nun_command_idents f (NVal (tm, ty)) =
+ fold_map_tm_idents f tm
+ ##>> fold_map_ty_idents f ty
+ #>> NVal
+ | fold_map_nun_command_idents f (NPred (wf, specs)) =
+ fold_map (fold_map_nun_const_spec_idents f) specs
+ #>> curry NPred wf
+ | fold_map_nun_command_idents f (NCopred specs) =
+ fold_map (fold_map_nun_const_spec_idents f) specs
+ #>> NCopred
+ | fold_map_nun_command_idents f (NRec specs) =
+ fold_map (fold_map_nun_const_spec_idents f) specs
+ #>> NRec
+ | fold_map_nun_command_idents f (NSpec spec) =
+ fold_map_nun_consts_spec_idents f spec
+ #>> NSpec
+ | fold_map_nun_command_idents f (NAxiom tm) =
+ fold_map_tm_idents f tm
+ #>> NAxiom
+ | fold_map_nun_command_idents f (NGoal tm) =
+ fold_map_tm_idents f tm
+ #>> NGoal
+ | fold_map_nun_command_idents f (NEval tm) =
+ fold_map_tm_idents f tm
+ #>> NEval;
+
+fun fold_map_nun_problem_idents f ({commandss, sound, complete} : nun_problem) =
+ fold_map (fold_map (fold_map_nun_command_idents f)) commandss
+ #>> (fn commandss' => {commandss = commandss', sound = sound, complete = complete});
+
+fun dest_rassoc_args oper arg0 rest =
+ (case rest of
+ NApp (NApp (oper', arg1), rest') =>
+ if oper' = oper then arg0 :: dest_rassoc_args oper arg1 rest' else [arg0, rest]
+ | _ => [arg0, rest]);
+
+fun replace_tm from to =
+ let
+ (* This code assumes all enclosing binders bind distinct variables and bound variables are
+ distinct from any other variables. *)
+ fun repl_br (id, vars, body) = (id, map repl vars, repl body)
+ and repl (NApp (const, arg)) = NApp (repl const, repl arg)
+ | repl (NAbs (var, body)) = NAbs (var, repl body)
+ | repl (NMatch (obj, branches)) = NMatch (repl obj, map repl_br branches)
+ | repl tm = if tm = from then to else tm;
+ in
+ repl
+ end;
+
+val rcomb_tms = fold (fn arg => fn func => NApp (func, arg));
+val abs_tms = fold_rev (curry NAbs);
+
+fun fresh_var_names_wrt_tm n tm =
+ let
+ fun max_var_br (_, vars, body) = fold max_var (body :: vars)
+ and max_var (NAtom _) = I
+ | max_var (NConst (id, _, _)) =
+ (fn max => if String.isPrefix nun_var_prefix id andalso size id > size max then id else max)
+ | max_var (NApp (func, arg)) = fold max_var [func, arg]
+ | max_var (NAbs (var, body)) = fold max_var [var, body]
+ | max_var (NMatch (obj, branches)) = max_var obj #> fold max_var_br branches;
+
+ val dummy_name = nun_var_of_str Name.uu;
+ val max_name = max_var tm dummy_name;
+ in
+ map (index_name max_name) (1 upto n)
+ end;
+
+fun beta_reduce_tm (NApp (NAbs (var, body), arg)) = beta_reduce_tm (replace_tm var arg body)
+ | beta_reduce_tm (NApp (const, arg)) =
+ (case beta_reduce_tm const of
+ const' as NAbs _ => beta_reduce_tm (NApp (const', arg))
+ | const' => NApp (const', beta_reduce_tm arg))
+ | beta_reduce_tm (NAbs (var, body)) = NAbs (var, beta_reduce_tm body)
+ | beta_reduce_tm (NMatch (obj, branches)) =
+ NMatch (beta_reduce_tm obj, map (@{apply 3(3)} beta_reduce_tm) branches)
+ | beta_reduce_tm tm = tm;
+
+fun eta_expandN_tm 0 tm = tm
+ | eta_expandN_tm n tm =
+ let
+ val var_names = fresh_var_names_wrt_tm n tm;
+ val arg_tys = domain_tys n (ty_of tm);
+ val vars = map2 (fn id => fn ty => NConst (id, [], ty)) var_names arg_tys;
+ in
+ abs_tms vars (rcomb_tms vars tm)
+ end;
+
+val eta_expand_builtin_tm =
+ let
+ fun expand_quant_arg (NAbs (var, body)) = NAbs (var, expand_quant_arg body)
+ | expand_quant_arg (NMatch (obj, branches)) =
+ NMatch (obj, map (@{apply 3(3)} expand_quant_arg) branches)
+ | expand_quant_arg (tm as NApp (_, NAbs _)) = tm
+ | expand_quant_arg (NApp (quant, arg)) = NApp (quant, eta_expandN_tm 1 arg)
+ | expand_quant_arg tm = tm;
+
+ fun expand args (NApp (func, arg)) = expand (expand [] arg :: args) func
+ | expand args (func as NConst (id, _, _)) =
+ let val missing = Int.max (0, arity_of_nun_builtin id - length args) in
+ rcomb_tms args func
+ |> eta_expandN_tm missing
+ |> is_nun_const_quantifier id ? expand_quant_arg
+ end
+ | expand args (func as NAtom _) = rcomb_tms args func
+ | expand args (NAbs (var, body)) = rcomb_tms args (NAbs (var, expand [] body))
+ | expand args (NMatch (obj, branches)) =
+ rcomb_tms args (NMatch (obj, map (@{apply 3(3)} (expand [])) branches));
+ in
+ expand []
+ end;
+
+val str_of_ty =
+ let
+ fun str_of maybe_parens (NType (id, tys)) =
+ if id = nun_arrow then
+ (case tys of
+ [ty, ty'] => maybe_parens (str_of nun_parens ty ^ " " ^ nun_arrow ^ " " ^ str_of I ty'))
+ else
+ id ^ str_of_nun_arg_list (str_of I) tys
+ in
+ str_of I
+ end;
+
+val (str_of_tmty, str_of_tm) =
+ let
+ fun is_triv_head (NConst (id, _, _)) = (arity_of_nun_builtin id = 0)
+ | is_triv_head (NAtom _) = true
+ | is_triv_head (NApp (const, _)) = is_triv_head const
+ | is_triv_head (NAbs _) = false
+ | is_triv_head (NMatch _) = false;
+
+ fun str_of_at_const id tys =
+ nun_at ^ str_of_ident id ^ str_of_nun_arg_list str_of_ty tys;
+
+ fun str_of_app ty_opt const arg =
+ let
+ val ty_opt' =
+ try (Option.map (fn ty => mk_arrow_ty (ty_of arg, ty))) ty_opt
+ |> the_default NONE;
+ in
+ (str_of ty_opt' const |> (case const of NAbs _ => nun_parens | _ => I)) ^
+ str_of_nun_arg_list (str_of NONE) [arg]
+ end
+ and str_of_br ty_opt (id, vars, body) =
+ " " ^ nun_bar ^ " " ^ id ^ space_implode "" (map (prefix " " o str_of NONE) vars) ^ " " ^
+ nun_arrow ^ " " ^ str_of ty_opt body
+ and str_of_tmty tm =
+ let val ty = ty_of tm in
+ str_of (SOME ty) tm ^ " " ^ nun_colon ^ " " ^ str_of_ty ty
+ end
+ and str_of _ (NAtom (j, _)) = nun_dollar ^ string_of_int j
+ | str_of _ (NConst (id, [], _)) = str_of_ident id
+ | str_of (SOME ty0) (NConst (id, tys, ty)) =
+ if ty = ty0 then str_of_ident id else str_of_at_const id tys
+ | str_of _ (NConst (id, tys, _)) = str_of_at_const id tys
+ | str_of ty_opt (NAbs (var, body)) =
+ nun_lambda ^ " " ^
+ (case ty_opt of
+ SOME ty => str_of (SOME (domain_ty ty))
+ | NONE => nun_parens o str_of_tmty) var ^
+ nun_dot ^ " " ^ str_of (Option.map range_ty ty_opt) body
+ | str_of ty_opt (NMatch (obj, branches)) =
+ nun_match ^ " " ^ str_of NONE obj ^ " " ^ nun_with ^ " " ^
+ space_implode "" (map (str_of_br ty_opt) branches) ^ " " ^ nun_end
+ | str_of ty_opt (app as NApp (func, argN)) =
+ (case (func, argN) of
+ (NApp (oper as NConst (id, _, _), arg1), arg2) =>
+ if id = nun_asserting then
+ str_of ty_opt arg1 ^ " " ^ nun_asserting ^ " " ^ str_of (SOME prop_ty) arg2
+ |> nun_parens
+ else if id = nun_equals then
+ (str_of NONE arg1 |> not (is_triv_head arg1) ? nun_parens) ^ " " ^ id ^ " " ^
+ (str_of (try ty_of arg2) arg2 |> not (is_triv_head arg2) ? nun_parens)
+ else if is_nun_const_connective id then
+ let val args = dest_rassoc_args oper arg1 arg2 in
+ space_implode (" " ^ id ^ " ")
+ (map (fn arg => str_of NONE arg |> not (is_triv_head arg) ? nun_parens) args)
+ end
+ else
+ str_of_app ty_opt func argN
+ | (NApp (NApp (NConst (id, _, _), arg1), arg2), arg3) =>
+ if id = nun_if then
+ nun_if ^ " " ^ str_of NONE arg1 ^ " " ^ nun_then ^ " " ^ str_of NONE arg2 ^ " " ^
+ nun_else ^ " " ^ str_of NONE arg3
+ |> nun_parens
+ else
+ str_of_app ty_opt func argN
+ | (NConst (id, _, _), NAbs _) =>
+ if is_nun_const_quantifier id then
+ let val (vars, body) = strip_nun_binders id app in
+ id ^ " " ^ space_implode " " (map (nun_parens o str_of_tmty) vars) ^ nun_dot ^ " " ^
+ str_of NONE body
+ end
+ else
+ str_of_app ty_opt func argN
+ | _ => str_of_app ty_opt func argN);
+ in
+ (str_of_tmty, str_of NONE)
+ end;
+
+val empty_name_pool = {nice_of_ugly = Symtab.empty, ugly_of_nice = Symtab.empty};
+
+val nice_of_ugly_suggestion =
+ unascii_of #> Long_Name.base_name #> ascii_of #> unsuffix nun_custom_id_suffix
+ #> (fn s => if s = "" orelse not (Char.isAlpha (String.sub (s, 0))) then "x" ^ s else s);
+
+fun allocate_nice ({nice_of_ugly, ugly_of_nice} : name_pool) (ugly, nice_sugg0) =
+ let
+ fun alloc j =
+ let val nice_sugg = index_name nice_sugg0 j in
+ (case Symtab.lookup ugly_of_nice nice_sugg of
+ NONE =>
+ (nice_sugg,
+ {nice_of_ugly = Symtab.update_new (ugly, nice_sugg) nice_of_ugly,
+ ugly_of_nice = Symtab.update_new (nice_sugg, ugly) ugly_of_nice})
+ | SOME _ => alloc (j + 1))
+ end;
+ in
+ alloc 0
+ end;
+
+fun nice_ident ugly (pool as {nice_of_ugly, ...}) =
+ if String.isSuffix nun_custom_id_suffix ugly then
+ (case Symtab.lookup nice_of_ugly ugly of
+ NONE => allocate_nice pool (ugly, nice_of_ugly_suggestion ugly)
+ | SOME nice => (nice, pool))
+ else
+ (ugly, pool);
+
+fun nice_nun_problem problem =
+ fold_map_nun_problem_idents nice_ident problem empty_name_pool;
+
+fun str_of_tval (NType (id, tys)) =
+ str_of_ident id ^ " " ^ nun_colon ^ " " ^
+ fold (K (prefix (nun_type ^ " " ^ nun_arrow ^ " "))) tys nun_type;
+
+fun is_triv_subset (NAbs (_, body)) = is_triv_subset body
+ | is_triv_subset (NConst (id, _, _)) = (id = nun_true)
+ | is_triv_subset _ = false;
+
+fun str_of_nun_copy_spec {abs_ty, rep_ty, subset, quotient, abs, rep} =
+ str_of_ty abs_ty ^ " " ^ nun_assign ^ " " ^ str_of_ty rep_ty ^
+ (case subset of
+ NONE => ""
+ | SOME s => if is_triv_subset s then "" else "\n " ^ nun_subset ^ " " ^ str_of_tm s) ^
+ (* TODO: use nun_quotient when possible *)
+ (case quotient of
+ NONE => ""
+ | SOME q => "\n " ^ nun_partial_quotient ^ " " ^ str_of_tm q) ^
+ "\n " ^ nun_abstract ^ " " ^ str_of_tm abs ^ "\n " ^ nun_concrete ^ " " ^ str_of_tm rep;
+
+fun str_of_nun_ctr_spec {ctr, arg_tys} =
+ str_of_tm ctr ^ str_of_nun_arg_list str_of_ty arg_tys;
+
+fun str_of_nun_co_data_spec {ty, ctrs} =
+ str_of_ty ty ^ " " ^ nun_assign ^ "\n " ^
+ space_implode ("\n" ^ nun_bar ^ " ") (map str_of_nun_ctr_spec ctrs);
+
+fun str_of_nun_const_spec {const, props} =
+ str_of_tmty const ^ " " ^ nun_assign ^ "\n " ^
+ space_implode (nun_semicolon ^ "\n ") (map str_of_tm props);
+
+fun str_of_nun_consts_spec {consts, props} =
+ space_implode (" " ^ nun_and ^ "\n ") (map str_of_tmty consts) ^ " " ^ nun_assign ^ "\n " ^
+ space_implode (nun_semicolon ^ "\n ") (map str_of_tm props);
+
+fun str_of_nun_cards_suffix (NONE, NONE) = ""
+ | str_of_nun_cards_suffix (c1, c2) =
+ let
+ val s1 = Option.map (prefix "min_card " o signed_string_of_int) c1;
+ val s2 = Option.map (prefix "max_card " o signed_string_of_int) c2;
+ in
+ enclose " [" "]" (space_implode ", " (map_filter I [s1, s2]))
+ end;
+
+fun str_of_nun_command (NTVal (ty, cards)) =
+ nun_val ^ " " ^ str_of_tval ty ^ str_of_nun_cards_suffix cards
+ | str_of_nun_command (NCopy spec) = nun_copy ^ " " ^ str_of_nun_copy_spec spec
+ | str_of_nun_command (NData specs) =
+ nun_data ^ " " ^ str_of_nun_and_list str_of_nun_co_data_spec specs
+ | str_of_nun_command (NCodata specs) =
+ nun_codata ^ " " ^ str_of_nun_and_list str_of_nun_co_data_spec specs
+ | str_of_nun_command (NVal (tm, ty)) =
+ nun_val ^ " " ^ str_of_tm tm ^ " " ^ nun_colon ^ " " ^ str_of_ty ty
+ | str_of_nun_command (NPred (wf, specs)) =
+ nun_pred ^ " " ^ (if wf then nun_lbracket ^ nun_wf ^ nun_rbracket ^ " " else "") ^
+ str_of_nun_and_list str_of_nun_const_spec specs
+ | str_of_nun_command (NCopred specs) =
+ nun_copred ^ " " ^ str_of_nun_and_list str_of_nun_const_spec specs
+ | str_of_nun_command (NRec specs) =
+ nun_rec ^ " " ^ str_of_nun_and_list str_of_nun_const_spec specs
+ | str_of_nun_command (NSpec spec) = nun_spec ^ " " ^ str_of_nun_consts_spec spec
+ | str_of_nun_command (NAxiom tm) = nun_axiom ^ " " ^ str_of_tm tm
+ | str_of_nun_command (NGoal tm) = nun_goal ^ " " ^ str_of_tm tm
+ | str_of_nun_command (NEval tm) = nun_hash ^ " " ^ nun_eval ^ " " ^ str_of_tm tm;
+
+fun str_of_nun_problem {commandss, sound, complete} =
+ map (cat_lines o map (suffix nun_dot o str_of_nun_command)) commandss
+ |> space_implode "\n\n" |> suffix "\n"
+ |> prefix (nun_hash ^ " " ^ (if sound then "sound" else "unsound") ^ "\n")
+ |> prefix (nun_hash ^ " " ^ (if complete then "complete" else "incomplete") ^ "\n");
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_reconstruct.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,244 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_reconstruct.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Reconstruction of Nunchaku models in Isabelle/HOL.
+*)
+
+signature NUNCHAKU_RECONSTRUCT =
+sig
+ type nun_model = Nunchaku_Model.nun_model
+
+ type typ_entry = typ * term list
+ type term_entry = term * term
+
+ type isa_model =
+ {type_model: typ_entry list,
+ free_model: term_entry list,
+ pat_complete_model: term_entry list,
+ pat_incomplete_model: term_entry list,
+ skolem_model: term_entry list}
+
+ val str_of_isa_model: Proof.context -> isa_model -> string
+
+ val isa_model_of_nun: Proof.context -> term list -> (typ option * string list) list ->
+ nun_model -> isa_model
+end;
+
+structure Nunchaku_Reconstruct : NUNCHAKU_RECONSTRUCT =
+struct
+
+open Nunchaku_Util;
+open Nunchaku_Problem;
+open Nunchaku_Translate;
+open Nunchaku_Model;
+
+type typ_entry = typ * term list;
+type term_entry = term * term;
+
+type isa_model =
+ {type_model: typ_entry list,
+ free_model: term_entry list,
+ pat_complete_model: term_entry list,
+ pat_incomplete_model: term_entry list,
+ skolem_model: term_entry list};
+
+val anonymousN = "anonymous";
+val irrelevantN = "irrelevant";
+val unparsableN = "unparsable";
+
+val nun_arrow_exploded = String.explode nun_arrow;
+
+val is_ty_meta = member (op =) (String.explode "()->,");
+
+fun next_token_lowlevel [] = (End_of_Stream, [])
+ | next_token_lowlevel (c :: cs) =
+ if Char.isSpace c then
+ next_token_lowlevel cs
+ else if not (is_ty_meta c) then
+ let val n = find_index (Char.isSpace orf is_ty_meta) cs in
+ (if n = ~1 then (cs, []) else chop n cs)
+ |>> (cons c #> String.implode #> ident_of_str #> Ident)
+ end
+ else if is_prefix (op =) nun_arrow_exploded (c :: cs) then
+ (Ident nun_arrow, tl cs)
+ else
+ (Symbol (String.str c), cs);
+
+val tokenize_lowlevel =
+ let
+ fun toks cs =
+ (case next_token_lowlevel cs of
+ (End_of_Stream, []) => []
+ | (tok, cs') => tok :: toks cs');
+ in
+ toks o String.explode
+ end;
+
+fun parse_lowlevel_ty tok =
+ (Scan.optional
+ (parse_sym "(" |-- Scan.repeat (parse_lowlevel_ty --| Scan.option (parse_sym ",")) --|
+ parse_sym ")")
+ []
+ -- parse_ident >> (swap #> NType)) tok;
+
+val ty_of_lowlevel_str = fst o parse_lowlevel_ty o tokenize_lowlevel;
+
+fun ident_of_const (NConst (id, _, _)) = id
+ | ident_of_const _ = nun_dummy;
+
+fun str_of_typ_entry ctxt (T, ts) =
+ "type " ^ Syntax.string_of_typ ctxt T ^
+ " := {" ^ commas (map (Syntax.string_of_term ctxt) ts) ^ "}.";
+
+fun str_of_term_entry ctxt (tm, value) =
+ "val " ^ Syntax.string_of_term ctxt tm ^ " := " ^ Syntax.string_of_term ctxt value ^ ".";
+
+fun str_of_isa_model ctxt
+ {type_model, free_model, pat_complete_model, pat_incomplete_model, skolem_model} =
+ map (str_of_typ_entry ctxt) type_model @ "" ::
+ map (str_of_term_entry ctxt) free_model @ "" ::
+ map (str_of_term_entry ctxt) pat_complete_model @ "" ::
+ map (str_of_term_entry ctxt) pat_incomplete_model @ "" ::
+ map (str_of_term_entry ctxt) skolem_model
+ |> cat_lines;
+
+fun typ_of_nun ctxt =
+ let
+ fun typ_of (NType (id, tys)) =
+ let val Ts = map typ_of tys in
+ if id = nun_dummy then
+ dummyT
+ else if id = nun_prop then
+ @{typ bool}
+ else if id = nun_arrow then
+ Type (@{type_name fun}, Ts)
+ else
+ (case try str_of_nun_tconst id of
+ SOME (args, s) =>
+ let val tys' = map ty_of_lowlevel_str args in
+ Type (s, map typ_of (tys' @ tys))
+ end
+ | NONE =>
+ (case try str_of_nun_tfree id of
+ SOME s => TFree (Proof_Context.check_tfree ctxt (flip_quote s, dummyS))
+ | NONE => raise Fail ("unknown type constructor: " ^ quote (str_of_ident id))))
+ end;
+ in
+ typ_of
+ end;
+
+fun one_letter_of s =
+ let val c = String.sub (Long_Name.base_name s, 0) in
+ String.str (if Char.isAlpha c then c else #"x")
+ end;
+
+fun base_of_typ (Type (s, _)) = s
+ | base_of_typ (TFree (s, _)) = flip_quote s
+ | base_of_typ (TVar ((s, _), _)) = flip_quote s;
+
+fun term_of_nun ctxt atomss =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ val typ_of = typ_of_nun ctxt;
+
+ fun nth_atom T j =
+ let val ss = these (triple_lookup (typ_match thy) atomss T) in
+ if j >= 0 andalso j < length ss then nth ss j
+ else one_letter_of (base_of_typ T) ^ nat_subscript (j + 1)
+ end;
+
+ fun term_of _ (NAtom (j, ty)) =
+ let val T = typ_of ty in Var ((nth_atom T j, 0), T) end
+ | term_of bounds (NConst (id, tys0, ty)) =
+ if id = nun_conj then
+ HOLogic.conj
+ else if id = nun_disj then
+ HOLogic.disj
+ else if id = nun_choice then
+ Const (@{const_name Eps}, typ_of ty)
+ else if id = nun_equals then
+ Const (@{const_name HOL.eq}, typ_of ty)
+ else if id = nun_false then
+ @{const False}
+ else if id = nun_if then
+ Const (@{const_name If}, typ_of ty)
+ else if id = nun_implies then
+ @{term implies}
+ else if id = nun_unique then
+ Const (@{const_name The}, typ_of ty)
+ else if id = nun_unique_unsafe then
+ Const (@{const_name The_unsafe}, typ_of ty)
+ else if id = nun_true then
+ @{const True}
+ else if String.isPrefix nun_anon_fun_prefix id then
+ let val j = Int.fromString (unprefix nun_anon_fun_prefix id) |> the_default ~1 in
+ Var ((anonymousN ^ nat_subscript (j + 1), 0), typ_of ty)
+ end
+ else if id = nun_irrelevant then
+ (* FIXME: get bounds from Nunchaku *)
+ list_comb (Var ((irrelevantN, 0), map (typ_of o safe_ty_of) bounds ---> typ_of ty),
+ map Bound (length bounds - 1 downto 0))
+ else if id = nun_unparsable then
+ (* FIXME: get bounds from Nunchaku *)
+ list_comb (Var ((unparsableN, 0), typ_of ty), map Bound (length bounds - 1 downto 0))
+ else
+ (case try str_of_nun_const id of
+ SOME (args, s) =>
+ let val tys = map ty_of_lowlevel_str args in
+ Sign.mk_const thy (s, map typ_of (tys @ tys0))
+ end
+ | NONE =>
+ (case try str_of_nun_free id of
+ SOME s => Free (s, typ_of ty)
+ | NONE =>
+ (case try str_of_nun_var id of
+ SOME s => Var ((s, 0), typ_of ty)
+ | NONE =>
+ (case find_index (fn bound => ident_of_const bound = id) bounds of
+ ~1 => Var ((str_of_ident id, 0), typ_of ty) (* shouldn't happen? *)
+ | j => Bound j))))
+ | term_of bounds (NAbs (var, body)) =
+ let val T = typ_of (safe_ty_of var) in
+ Abs (one_letter_of (base_of_typ T), T, term_of (var :: bounds) body)
+ end
+ | term_of bounds (NApp (func, arg)) =
+ let
+ fun same () = term_of bounds func $ term_of bounds arg;
+ in
+ (case (func, arg) of
+ (NConst (id, _, _), NAbs _) =>
+ if id = nun_mu then
+ let val Abs (s, T, body) = term_of bounds arg in
+ Const (@{const_name The}, (T --> HOLogic.boolT) --> T)
+ $ Abs (s, T, HOLogic.eq_const T $ Bound 0 $ body)
+ end
+ else
+ same ()
+ | _ => same ())
+ end
+ | term_of _ (NMatch _) = raise Fail "unexpected match";
+ in
+ term_of []
+ end;
+
+fun isa_typ_entry_of_nun ctxt atomss (ty, atoms) =
+ (typ_of_nun ctxt ty, map (term_of_nun ctxt atomss) atoms);
+
+fun isa_term_entry_of_nun ctxt atomss (tm, value) =
+ (term_of_nun ctxt atomss tm, term_of_nun ctxt atomss value);
+
+fun isa_model_of_nun ctxt pat_completes atomss {type_model, const_model, skolem_model} =
+ let
+ val free_and_const_model = map (isa_term_entry_of_nun ctxt atomss) const_model;
+ val (free_model, (pat_complete_model, pat_incomplete_model)) =
+ List.partition (is_Free o fst) free_and_const_model
+ ||> List.partition (member (op aconv) pat_completes o fst);
+ in
+ {type_model = map (isa_typ_entry_of_nun ctxt atomss) type_model, free_model = free_model,
+ pat_complete_model = pat_complete_model, pat_incomplete_model = pat_incomplete_model,
+ skolem_model = map (isa_term_entry_of_nun ctxt atomss) skolem_model}
+ end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_tool.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,141 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_tool.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Interface to the external "nunchaku" tool.
+*)
+
+signature NUNCHAKU_TOOL =
+sig
+ type ty = Nunchaku_Problem.ty
+ type tm = Nunchaku_Problem.tm
+ type nun_problem = Nunchaku_Problem.nun_problem
+
+ type tool_params =
+ {solvers: string list,
+ overlord: bool,
+ debug: bool,
+ specialize: bool,
+ timeout: Time.time}
+
+ type nun_solution =
+ {tys: (ty * tm list) list,
+ tms: (tm * tm) list}
+
+ datatype nun_outcome =
+ Unsat
+ | Sat of string * nun_solution
+ | Unknown of (string * nun_solution) option
+ | Timeout
+ | Nunchaku_Var_Not_Set
+ | Nunchaku_Cannot_Execute
+ | Nunchaku_Not_Found
+ | CVC4_Cannot_Execute
+ | CVC4_Not_Found
+ | Unknown_Error of int * string
+
+ val nunchaku_home_env_var: string
+
+ val solve_nun_problem: tool_params -> nun_problem -> nun_outcome
+end;
+
+structure Nunchaku_Tool : NUNCHAKU_TOOL =
+struct
+
+open Nunchaku_Util;
+open Nunchaku_Problem;
+
+type tool_params =
+ {solvers: string list,
+ overlord: bool,
+ debug: bool,
+ specialize: bool,
+ timeout: Time.time};
+
+type nun_solution =
+ {tys: (ty * tm list) list,
+ tms: (tm * tm) list};
+
+datatype nun_outcome =
+ Unsat
+| Sat of string * nun_solution
+| Unknown of (string * nun_solution) option
+| Timeout
+| Nunchaku_Var_Not_Set
+| Nunchaku_Cannot_Execute
+| Nunchaku_Not_Found
+| CVC4_Cannot_Execute
+| CVC4_Not_Found
+| Unknown_Error of int * string;
+
+fun bash_output_error s =
+ let val {out, err, rc, ...} = Bash.process s in
+ ((out, err), rc)
+ end;
+
+val nunchaku_home_env_var = "NUNCHAKU_HOME";
+
+val cached_outcome = Synchronized.var "Nunchaku_Tool.cached_outcome"
+ (NONE : ((string list * nun_problem) * nun_outcome) option);
+
+fun uncached_solve_nun_problem ({solvers, overlord, specialize, timeout, ...} : tool_params)
+ (problem as {sound, complete, ...}) =
+ with_tmp_or_overlord_file overlord "nunchaku" "nun" (fn prob_path =>
+ if getenv nunchaku_home_env_var = "" then
+ Nunchaku_Var_Not_Set
+ else
+ let
+ val bash_cmd =
+ "PATH=\"$CVC4_HOME:$KODKODI/bin:$PATH\" \"$" ^
+ nunchaku_home_env_var ^ "\"/nunchaku --skolems-in-model --no-color " ^
+ (if specialize then "" else "--no-specialize ") ^
+ "--solvers \"" ^ Bash_Syntax.string (space_implode " " solvers) ^ "\" " ^
+ "--timeout " ^ string_of_int (Time.toSeconds timeout) ^ " " ^
+ File.bash_path prob_path;
+ val comments =
+ [bash_cmd, "This file was generated by Isabelle (most likely Nunchaku)", timestamp ()];
+ val prob_str = cat_lines (map (prefix "# ") comments) ^ "\n\n" ^ str_of_nun_problem problem;
+ val _ = File.write prob_path prob_str;
+ val ((output, error), code) = bash_output_error bash_cmd;
+ in
+ if String.isPrefix "SAT" output then
+ (if sound then Sat else Unknown o SOME) (output, {tys = [], tms = []})
+ else if String.isPrefix "UNSAT" output then
+ if complete then Unsat else Unknown NONE
+ else if String.isSubstring "TIMEOUT" output
+ (* FIXME: temporary *)
+ orelse String.isSubstring "kodkod failed (errcode 152)" error then
+ Timeout
+ else if String.isPrefix "UNKNOWN" output then
+ Unknown NONE
+ else if code = 126 then
+ Nunchaku_Cannot_Execute
+ else if code = 127 then
+ Nunchaku_Not_Found
+ else
+ Unknown_Error (code,
+ simplify_spaces (elide_string 1000 (if error <> "" then error else output)))
+ end);
+
+fun solve_nun_problem (params as {solvers, overlord, debug, ...}) problem =
+ let val key = (solvers, problem) in
+ (case (overlord orelse debug,
+ AList.lookup (op =) (the_list (Synchronized.value cached_outcome)) key) of
+ (false, SOME outcome) => outcome
+ | _ =>
+ let
+ val outcome = uncached_solve_nun_problem params problem;
+
+ fun update_cache () =
+ Synchronized.change cached_outcome (K (SOME (key, outcome)));
+ in
+ (case outcome of
+ Unsat => update_cache ()
+ | Sat _ => update_cache ()
+ | Unknown _ => update_cache ()
+ | _ => ());
+ outcome
+ end)
+ end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_translate.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,193 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_translate.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+Translation of Isabelle/HOL problems to Nunchaku.
+*)
+
+signature NUNCHAKU_TRANSLATE =
+sig
+ type isa_problem = Nunchaku_Collect.isa_problem
+ type ty = Nunchaku_Problem.ty
+ type nun_problem = Nunchaku_Problem.nun_problem
+
+ val flip_quote: string -> string
+ val lowlevel_str_of_ty: ty -> string
+
+ val nun_problem_of_isa: Proof.context -> isa_problem -> nun_problem
+end;
+
+structure Nunchaku_Translate : NUNCHAKU_TRANSLATE =
+struct
+
+open Nunchaku_Util;
+open Nunchaku_Collect;
+open Nunchaku_Problem;
+
+fun flip_quote s =
+ (case try (unprefix "'") s of
+ SOME s' => s'
+ | NONE => prefix "'" s);
+
+fun lowlevel_str_of_ty (NType (id, tys)) =
+ (if null tys then "" else encode_args (map lowlevel_str_of_ty tys)) ^ id;
+
+fun strip_nun_abs 0 tm = ([], tm)
+ | strip_nun_abs n (NAbs (var, body)) =
+ strip_nun_abs (n - 1) body
+ |>> cons var;
+
+val strip_nun_comb =
+ let
+ fun strip args (NApp (func, arg)) = strip (arg :: args) func
+ | strip args tm = (tm, args);
+ in
+ strip []
+ end;
+
+fun ty_of_isa (Type (s, Ts)) =
+ let val tys = map ty_of_isa Ts in
+ (case s of
+ @{type_name bool} => prop_ty
+ | @{type_name fun} => NType (nun_arrow, tys)
+ | _ =>
+ let
+ val args = map lowlevel_str_of_ty tys;
+ val id = nun_tconst_of_str args s;
+ in
+ NType (id, [])
+ end)
+ end
+ | ty_of_isa (TFree (s, _)) = NType (nun_tfree_of_str (flip_quote s), [])
+ | ty_of_isa (TVar _) = raise Fail "unexpected TVar";
+
+fun gen_tm_of_isa in_prop ctxt t =
+ let
+ val thy = Proof_Context.theory_of ctxt;
+
+ fun id_of_const (x as (s, _)) =
+ let val args = map (lowlevel_str_of_ty o ty_of_isa) (Sign.const_typargs thy x) in
+ nun_const_of_str args s
+ end;
+
+ fun tm_of_branch ctr_id var_count f_arg_tm =
+ let val (vars, body) = strip_nun_abs var_count f_arg_tm in
+ (ctr_id, vars, body)
+ end;
+
+ fun tm_of bounds (Const (x as (s, T))) =
+ (case try (dest_co_datatype_case ctxt) x of
+ SOME ctrs =>
+ let
+ val num_f_args = length ctrs;
+ val min_args = num_f_args + 1;
+ val var_counts = map (num_binder_types o snd) ctrs;
+
+ val dummy_free = Free (Name.uu, T);
+ val tm = tm_of bounds dummy_free;
+ val tm' = eta_expandN_tm min_args tm;
+ val (vars, body) = strip_nun_abs min_args tm';
+ val (_, (f_args, obj :: other_args)) = strip_nun_comb body ||> chop num_f_args;
+ val f_args' = map2 eta_expandN_tm var_counts f_args;
+
+ val ctr_ids = map id_of_const ctrs;
+ in
+ NMatch (obj, @{map 3} tm_of_branch ctr_ids var_counts f_args')
+ |> rcomb_tms other_args
+ |> abs_tms vars
+ end
+ | NONE =>
+ if s = @{const_name unreachable} andalso in_prop then
+ let val ty = ty_of_isa T in
+ napps (NConst (nun_asserting, [ty], mk_arrows_ty ([ty, prop_ty], ty)),
+ [NConst (id_of_const x, [], ty), NConst (nun_false, [], prop_ty)])
+ end
+ else
+ let
+ val id =
+ (case s of
+ @{const_name All} => nun_forall
+ | @{const_name conj} => nun_conj
+ | @{const_name disj} => nun_disj
+ | @{const_name HOL.eq} => nun_equals
+ | @{const_name Eps} => nun_choice
+ | @{const_name Ex} => nun_exists
+ | @{const_name False} => nun_false
+ | @{const_name If} => nun_if
+ | @{const_name implies} => nun_implies
+ | @{const_name Not} => nun_not
+ | @{const_name The} => nun_unique
+ | @{const_name The_unsafe} => nun_unique_unsafe
+ | @{const_name True} => nun_true
+ | _ => id_of_const x);
+ in
+ NConst (id, [], ty_of_isa T)
+ end)
+ | tm_of _ (Free (s, T)) = NConst (nun_free_of_str s, [], ty_of_isa T)
+ | tm_of _ (Var ((s, _), T)) = NConst (nun_var_of_str s, [], ty_of_isa T)
+ | tm_of bounds (Abs (s, T, t)) =
+ let
+ val (s', bounds') = Name.variant s bounds;
+ val x = Var ((s', 0), T);
+ in
+ NAbs (tm_of bounds' x, tm_of bounds' (subst_bound (x, t)))
+ end
+ | tm_of bounds (t $ u) = NApp (tm_of bounds t, tm_of bounds u)
+ | tm_of _ (Bound _) = raise Fail "unexpected Bound";
+ in
+ t
+ |> tm_of Name.context
+ |> beta_reduce_tm
+ |> eta_expand_builtin_tm
+ end;
+
+val tm_of_isa = gen_tm_of_isa false;
+val prop_of_isa = gen_tm_of_isa true;
+
+fun nun_copy_spec_of_isa_typedef ctxt {abs_typ, rep_typ, wrt, abs, rep} =
+ {abs_ty = ty_of_isa abs_typ, rep_ty = ty_of_isa rep_typ, subset = SOME (tm_of_isa ctxt wrt),
+ quotient = NONE, abs = tm_of_isa ctxt abs, rep = tm_of_isa ctxt rep};
+
+fun nun_copy_spec_of_isa_quotient ctxt {abs_typ, rep_typ, wrt, abs, rep} =
+ {abs_ty = ty_of_isa abs_typ, rep_ty = ty_of_isa rep_typ, subset = NONE,
+ quotient = SOME (tm_of_isa ctxt wrt), abs = tm_of_isa ctxt abs, rep = tm_of_isa ctxt rep};
+
+fun nun_ctr_of_isa ctxt ctr =
+ {ctr = tm_of_isa ctxt ctr, arg_tys = map ty_of_isa (binder_types (fastype_of ctr))};
+
+fun nun_co_data_spec_of_isa ctxt {typ, ctrs} =
+ {ty = ty_of_isa typ, ctrs = map (nun_ctr_of_isa ctxt) ctrs};
+
+fun nun_const_spec_of_isa ctxt {const, props} =
+ {const = tm_of_isa ctxt const, props = map (prop_of_isa ctxt) props};
+
+fun nun_rec_spec_of_isa ctxt {const, props, ...} =
+ {const = tm_of_isa ctxt const, props = map (prop_of_isa ctxt) props};
+
+fun nun_consts_spec_of_isa ctxt {consts, props} =
+ {consts = map (tm_of_isa ctxt) consts, props = map (prop_of_isa ctxt) props};
+
+fun nun_problem_of_isa ctxt {commandss, sound, complete} =
+ let
+ fun cmd_of cmd =
+ (case cmd of
+ ITVal (T, cards) => NTVal (ty_of_isa T, cards)
+ | ITypedef spec => NCopy (nun_copy_spec_of_isa_typedef ctxt spec)
+ | IQuotient spec => NCopy (nun_copy_spec_of_isa_quotient ctxt spec)
+ | ICoData (fp, specs) =>
+ BNF_Util.case_fp fp NData NCodata (map (nun_co_data_spec_of_isa ctxt) specs)
+ | IVal t => NVal (tm_of_isa ctxt t, ty_of_isa (fastype_of t))
+ | ICoPred (fp, wf, specs) =>
+ (if wf then curry NPred true
+ else if fp = BNF_Util.Least_FP then curry NPred false
+ else NCopred) (map (nun_const_spec_of_isa ctxt) specs)
+ | IRec specs => NRec (map (nun_rec_spec_of_isa ctxt) specs)
+ | ISpec spec => NSpec (nun_consts_spec_of_isa ctxt spec)
+ | IAxiom prop => NAxiom (prop_of_isa ctxt prop)
+ | IGoal prop => NGoal (prop_of_isa ctxt prop)
+ | IEval t => NEval (tm_of_isa ctxt t));
+ in
+ {commandss = map (map cmd_of) commandss, sound = sound, complete = complete}
+ end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Nunchaku/nunchaku_util.ML Fri Sep 08 00:01:36 2017 +0200
@@ -0,0 +1,95 @@
+(* Title: HOL/Nunchaku/Tools/nunchaku_util.ML
+ Author: Jasmin Blanchette, VU Amsterdam
+ Copyright 2015, 2016, 2017
+
+General-purpose functions used by Nunchaku.
+*)
+
+signature NUNCHAKU_UTIL =
+sig
+ val elide_string: int -> string -> string
+ val nat_subscript: int -> string
+ val timestamp: unit -> string
+ val parse_bool_option: bool -> string -> string -> bool option
+ val parse_time: string -> string -> Time.time
+ val string_of_time: Time.time -> string
+ val simplify_spaces: string -> string
+ val ascii_of: string -> string
+ val unascii_of: string -> string
+ val double_lookup: ('a * 'a -> bool) -> ('a option * 'b) list -> 'a -> 'b option
+ val triple_lookup: (''a * ''a -> bool) -> (''a option * 'b) list -> ''a -> 'b option
+ val plural_s_for_list: 'a list -> string
+ val with_overlord_file: string -> string -> (Path.T -> 'a) -> 'a
+ val with_tmp_or_overlord_file: bool -> string -> string -> (Path.T -> 'a) -> 'a
+ val num_binder_types: typ -> int
+ val strip_fun_type: typ -> typ list * typ
+ val attach_typeS: term -> term
+ val specialize_type: theory -> string * typ -> term -> term
+ val typ_match: theory -> typ * typ -> bool
+ val term_match: theory -> term * term -> bool
+ val const_match: theory -> (string * typ) * (string * typ) -> bool
+ val DETERM_TIMEOUT: Time.time -> tactic -> tactic
+ val spying: bool -> (unit -> Proof.state * int * string) -> unit
+end;
+
+structure Nunchaku_Util : NUNCHAKU_UTIL =
+struct
+
+val elide_string = ATP_Util.elide_string;
+val nat_subscript = Nitpick_Util.nat_subscript;
+val timestamp = ATP_Util.timestamp;
+
+val parse_bool_option = Sledgehammer_Util.parse_bool_option;
+val parse_time = Sledgehammer_Util.parse_time;
+val string_of_time = ATP_Util.string_of_time;
+val simplify_spaces = Sledgehammer_Util.simplify_spaces;
+val ascii_of = ATP_Problem_Generate.ascii_of;
+val unascii_of = ATP_Problem_Generate.unascii_of;
+val double_lookup = Nitpick_Util.double_lookup;
+val triple_lookup = Nitpick_Util.triple_lookup;
+val plural_s_for_list = Nitpick_Util.plural_s_for_list;
+
+fun with_overlord_file name ext f =
+ f (Path.explode ("$ISABELLE_HOME_USER/" ^ name ^ "." ^ ext));
+
+fun with_tmp_or_overlord_file overlord =
+ if overlord then with_overlord_file else Isabelle_System.with_tmp_file;
+
+val num_binder_types = BNF_Util.num_binder_types
+val strip_fun_type = BNF_Util.strip_fun_type;
+
+(* Clone from "HOL/Tools/inductive_realizer.ML". *)
+val attach_typeS =
+ map_types (map_atyps
+ (fn TFree (s, []) => TFree (s, @{sort type})
+ | TVar (ixn, []) => TVar (ixn, @{sort type})
+ | T => T));
+
+val specialize_type = ATP_Util.specialize_type;
+
+fun typ_match thy TU = can (Sign.typ_match thy TU) Vartab.empty;
+fun term_match thy tu = can (Pattern.match thy tu) (Vartab.empty, Vartab.empty);
+fun const_match thy = term_match thy o apply2 Const;
+
+val DETERM_TIMEOUT = Nitpick_Util.DETERM_TIMEOUT;
+
+val spying_version = "a"
+
+val hackish_string_of_term = Sledgehammer_Util.hackish_string_of_term;
+
+fun spying spy f =
+ if spy then
+ let
+ val (state, i, message) = f ();
+ val ctxt = Proof.context_of state;
+ val goal = Logic.get_goal (Thm.prop_of (#goal (Proof.goal state))) i;
+ val hash =
+ String.substring (SHA1.rep (SHA1.digest (hackish_string_of_term ctxt goal)), 0, 12);
+ in
+ File.append (Path.explode "$ISABELLE_HOME_USER/spy_nunchaku")
+ (spying_version ^ " " ^ timestamp () ^ ": " ^ hash ^ ": " ^ message ^ "\n")
+ end
+ else
+ ();
+
+end;