# HG changeset patch # User hoelzl # Date 1268235620 -3600 # Node ID 55390690442632b11cf150575b5d90ac8dff4318 # Parent d58a4ac1ca1c9eacc315aaa252f0f99b177eeae8# Parent d9c9b81b16a80221c271fbae795b2e10b41ac3f5 merged diff -r d58a4ac1ca1c -r 553906904426 NEWS --- a/NEWS Tue Mar 09 16:30:43 2010 +0100 +++ b/NEWS Wed Mar 10 16:40:20 2010 +0100 @@ -76,6 +76,10 @@ within a local theory context, with explicit checking of the constructors involved (in contrast to the raw 'syntax' versions). +* Command 'typedecl' now works within a local theory context -- +without introducing dependencies on parameters or assumptions, which +is not possible in Isabelle/Pure. + *** HOL *** diff -r d58a4ac1ca1c -r 553906904426 doc-src/IsarRef/Thy/Spec.thy --- a/doc-src/IsarRef/Thy/Spec.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/doc-src/IsarRef/Thy/Spec.thy Wed Mar 10 16:40:20 2010 +0100 @@ -954,7 +954,7 @@ text {* \begin{matharray}{rcll} @{command_def "types"} & : & @{text "theory \ theory"} \\ - @{command_def "typedecl"} & : & @{text "theory \ theory"} \\ + @{command_def "typedecl"} & : & @{text "local_theory \ local_theory"} \\ @{command_def "arities"} & : & @{text "theory \ theory"} & (axiomatic!) \\ \end{matharray} diff -r d58a4ac1ca1c -r 553906904426 doc-src/IsarRef/Thy/document/Spec.tex --- a/doc-src/IsarRef/Thy/document/Spec.tex Tue Mar 09 16:30:43 2010 +0100 +++ b/doc-src/IsarRef/Thy/document/Spec.tex Wed Mar 10 16:40:20 2010 +0100 @@ -991,7 +991,7 @@ \begin{isamarkuptext}% \begin{matharray}{rcll} \indexdef{}{command}{types}\hypertarget{command.types}{\hyperlink{command.types}{\mbox{\isa{\isacommand{types}}}}} & : & \isa{{\isachardoublequote}theory\ {\isasymrightarrow}\ theory{\isachardoublequote}} \\ - \indexdef{}{command}{typedecl}\hypertarget{command.typedecl}{\hyperlink{command.typedecl}{\mbox{\isa{\isacommand{typedecl}}}}} & : & \isa{{\isachardoublequote}theory\ {\isasymrightarrow}\ theory{\isachardoublequote}} \\ + \indexdef{}{command}{typedecl}\hypertarget{command.typedecl}{\hyperlink{command.typedecl}{\mbox{\isa{\isacommand{typedecl}}}}} & : & \isa{{\isachardoublequote}local{\isacharunderscore}theory\ {\isasymrightarrow}\ local{\isacharunderscore}theory{\isachardoublequote}} \\ \indexdef{}{command}{arities}\hypertarget{command.arities}{\hyperlink{command.arities}{\mbox{\isa{\isacommand{arities}}}}} & : & \isa{{\isachardoublequote}theory\ {\isasymrightarrow}\ theory{\isachardoublequote}} & (axiomatic!) \\ \end{matharray} diff -r d58a4ac1ca1c -r 553906904426 doc-src/Nitpick/nitpick.tex --- a/doc-src/Nitpick/nitpick.tex Tue Mar 09 16:30:43 2010 +0100 +++ b/doc-src/Nitpick/nitpick.tex Wed Mar 10 16:40:20 2010 +0100 @@ -136,8 +136,8 @@ suggesting several textual improvements. % and Perry James for reporting a typo. -\section{First Steps} -\label{first-steps} +\section{Overview} +\label{overview} This section introduces Nitpick by presenting small examples. If possible, you should try out the examples on your workstation. Your theory file should start @@ -145,7 +145,7 @@ \prew \textbf{theory}~\textit{Scratch} \\ -\textbf{imports}~\textit{Main~Coinductive\_List~Quotient\_Product~RealDef} \\ +\textbf{imports}~\textit{Main~Quotient\_Product~RealDef} \\ \textbf{begin} \postw @@ -677,7 +677,7 @@ \hbox{}\qquad\qquad $y = \Abs{(1,\, 0)}$ \\ \hbox{}\qquad Datatypes: \\ \hbox{}\qquad\qquad $\textit{nat} = \{0,\, 1,\, \unr\}$ \\ -\hbox{}\qquad\qquad $\textit{nat} \times \textit{nat} = \{(0,\, 0),\> (1,\, 0),\> \unr\}$ \\ +\hbox{}\qquad\qquad $\textit{nat} \times \textit{nat}~[\textsl{boxed\/}] = \{(0,\, 0),\> (1,\, 0),\> \unr\}$ \\ \hbox{}\qquad\qquad $\textit{my\_int} = \{\Abs{(0,\, 0)},\> \Abs{(1,\, 0)},\> \unr\}$ \postw @@ -704,8 +704,6 @@ & \lparr\textit{Xcoord} = 1,\> \textit{Ycoord} = 1\rparr,\, \unr\}\end{aligned}$ \postw - - Finally, Nitpick provides rudimentary support for rationals and reals using a similar approach: @@ -940,9 +938,10 @@ \label{coinductive-datatypes} While Isabelle regrettably lacks a high-level mechanism for defining coinductive -datatypes, the \textit{Coinductive\_List} theory provides a coinductive ``lazy -list'' datatype, $'a~\textit{llist}$, defined the hard way. Nitpick supports -these lazy lists seamlessly and provides a hook, described in +datatypes, the \textit{Coinductive\_List} theory from Andreas Lochbihler's +\textit{Coinductive} AFP entry \cite{lochbihler-2010} provides a coinductive +``lazy list'' datatype, $'a~\textit{llist}$, defined the hard way. Nitpick +supports these lazy lists seamlessly and provides a hook, described in \S\ref{registration-of-coinductive-datatypes}, to register custom coinductive datatypes. @@ -1165,10 +1164,11 @@ {\looseness=-1 Boxing can be enabled or disabled globally or on a per-type basis using the -\textit{box} option. Moreover, setting the cardinality of a function or -product type implicitly enables boxing for that type. Nitpick usually performs -reasonable choices about which types should be boxed, but option tweaking -sometimes helps. +\textit{box} option. Nitpick usually performs reasonable choices about which +types should be boxed, but option tweaking sometimes helps. A related optimization, +``finalization,'' attempts to wrap functions that constant at all but finitely +many points (e.g., finite sets); see the documentation for the \textit{finalize} +option in \S\ref{scope-of-search} for details. } @@ -1850,7 +1850,7 @@ The number of options can be overwhelming at first glance. Do not let that worry you: Nitpick's defaults have been chosen so that it almost always does the right thing, and the most important options have been covered in context in -\S\ref{first-steps}. +\S\ref{overview}. The descriptions below refer to the following syntactic quantities: @@ -1936,14 +1936,10 @@ Specifies the sequence of cardinalities to use for a given type. For free types, and often also for \textbf{typedecl}'d types, it usually makes sense to specify cardinalities as a range of the form \textit{$1$--$n$}. -Although function and product types are normally mapped directly to the -corresponding Kodkod concepts, setting -the cardinality of such types is also allowed and implicitly enables ``boxing'' -for them, as explained in the description of the \textit{box}~\qty{type} -and \textit{box} (\S\ref{scope-of-search}) options. \nopagebreak -{\small See also \textit{mono} (\S\ref{scope-of-search}).} +{\small See also \textit{box} (\S\ref{scope-of-search}) and \textit{mono} +(\S\ref{scope-of-search}).} \opdefault{card}{int\_seq}{$\mathbf{1}$--$\mathbf{8}$} Specifies the default sequence of cardinalities to use. This can be overridden @@ -2062,8 +2058,8 @@ Specifies whether Nitpick should attempt to wrap (``box'') a given function or product type in an isomorphic datatype internally. Boxing is an effective mean to reduce the search space and speed up Nitpick, because the isomorphic datatype -is approximated by a subset of the possible function or pair values; -like other drastic optimizations, it can also prevent the discovery of +is approximated by a subset of the possible function or pair values. +Like other drastic optimizations, it can also prevent the discovery of counterexamples. The option can take the following values: \begin{enum} @@ -2075,30 +2071,68 @@ higher-order functions are good candidates for boxing. \end{enum} -Setting the \textit{card}~\qty{type} option for a function or product type -implicitly enables boxing for that type. - \nopagebreak -{\small See also \textit{verbose} (\S\ref{output-format}) -and \textit{debug} (\S\ref{output-format}).} +{\small See also \textit{finitize} (\S\ref{scope-of-search}), \textit{verbose} +(\S\ref{output-format}), and \textit{debug} (\S\ref{output-format}).} \opsmart{box}{dont\_box} Specifies the default boxing setting to use. This can be overridden on a per-type basis using the \textit{box}~\qty{type} option described above. +\opargboolorsmart{finitize}{type}{dont\_finitize} +Specifies whether Nitpick should attempt to finitize a given type, which can be +a function type or an infinite ``shallow datatype'' (an infinite datatype whose +constructors don't appear in the problem). + +For function types, Nitpick performs a monotonicity analysis to detect functions +that are constant at all but finitely many points (e.g., finite sets) and treats +such occurrences specially, thereby increasing the precision. The option can +then take the following values: + +\begin{enum} +\item[$\bullet$] \textbf{\textit{false}}: Don't attempt to finitize the type. +\item[$\bullet$] \textbf{\textit{true}} or \textbf{\textit{smart}}: Finitize the +type wherever possible. +\end{enum} + +The semantics of the option is somewhat different for infinite shallow +datatypes: + +\begin{enum} +\item[$\bullet$] \textbf{\textit{true}}: Finitize the datatype. Since this is +unsound, counterexamples generated under these conditions are tagged as ``likely +genuine.'' +\item[$\bullet$] \textbf{\textit{false}}: Don't attempt to finitize the datatype. +\item[$\bullet$] \textbf{\textit{smart}}: Perform a monotonicity analysis to +detect whether the datatype can be safely finitized before finitizing it. +\end{enum} + +Like other drastic optimizations, finitization can sometimes prevent the +discovery of counterexamples. + +\nopagebreak +{\small See also \textit{box} (\S\ref{scope-of-search}), \textit{mono} +(\S\ref{scope-of-search}), \textit{verbose} (\S\ref{output-format}), and +\textit{debug} (\S\ref{output-format}).} + +\opsmart{finitize}{dont\_finitize} +Specifies the default finitization setting to use. This can be overridden on a +per-type basis using the \textit{finitize}~\qty{type} option described above. + \opargboolorsmart{mono}{type}{non\_mono} -Specifies whether the given type should be considered monotonic when -enumerating scopes. If the option is set to \textit{smart}, Nitpick performs a -monotonicity check on the type. Setting this option to \textit{true} can reduce -the number of scopes tried, but it also diminishes the theoretical chance of +Specifies whether the given type should be considered monotonic when enumerating +scopes and finitizing types. If the option is set to \textit{smart}, Nitpick +performs a monotonicity check on the type. Setting this option to \textit{true} +can reduce the number of scopes tried, but it can also diminish the chance of finding a counterexample, as demonstrated in \S\ref{scope-monotonicity}. \nopagebreak {\small See also \textit{card} (\S\ref{scope-of-search}), +\textit{finitize} (\S\ref{scope-of-search}), \textit{merge\_type\_vars} (\S\ref{scope-of-search}), and \textit{verbose} (\S\ref{output-format}).} -\opsmart{mono}{non\_box} +\opsmart{mono}{non\_mono} Specifies the default monotonicity setting to use. This can be overridden on a per-type basis using the \textit{mono}~\qty{type} option described above. diff -r d58a4ac1ca1c -r 553906904426 doc-src/manual.bib --- a/doc-src/manual.bib Tue Mar 09 16:30:43 2010 +0100 +++ b/doc-src/manual.bib Wed Mar 10 16:40:20 2010 +0100 @@ -1759,3 +1759,12 @@ key = "Wikipedia", title = "Wikipedia: {AA} Tree", note = "\url{http://en.wikipedia.org/wiki/AA_tree}"} + +@incollection{lochbihler-2010, + title = "Coinduction", + author = "Andreas Lochbihler", + booktitle = "The Archive of Formal Proofs", + editor = "Gerwin Klein and Tobias Nipkow and Lawrence C. Paulson", + publisher = "\url{http://afp.sourceforge.net/entries/Coinductive.shtml}", + month = "Feb.", + year = 2010} diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Boogie/Tools/boogie_loader.ML --- a/src/HOL/Boogie/Tools/boogie_loader.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Boogie/Tools/boogie_loader.ML Wed Mar 10 16:40:20 2010 +0100 @@ -88,7 +88,7 @@ let val args = Name.variant_list [] (replicate arity "'") val (T, thy') = - Typedecl.typedecl (Binding.name isa_name, args, mk_syntax name arity) thy + Typedecl.typedecl_global (Binding.name isa_name, args, mk_syntax name arity) thy val type_name = fst (Term.dest_Type T) in (((name, type_name), log_new name type_name), thy') end) end diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Code_Numeral.thy --- a/src/HOL/Code_Numeral.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Code_Numeral.thy Wed Mar 10 16:40:20 2010 +0100 @@ -247,7 +247,7 @@ "nat_of i = nat_of_aux i 0" by (simp add: nat_of_aux_def) -definition div_mod_code_numeral :: "code_numeral \ code_numeral \ code_numeral \ code_numeral" where +definition div_mod_code_numeral :: "code_numeral \ code_numeral \ code_numeral \ code_numeral" where [code del]: "div_mod_code_numeral n m = (n div m, n mod m)" lemma [code]: diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Divides.thy --- a/src/HOL/Divides.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Divides.thy Wed Mar 10 16:40:20 2010 +0100 @@ -376,7 +376,7 @@ end -class ring_div = semiring_div + idom +class ring_div = semiring_div + comm_ring_1 begin text {* Negation respects modular equivalence. *} @@ -2353,47 +2353,6 @@ apply (rule Divides.div_less_dividend, simp_all) done -text {* code generator setup *} - -context ring_1 -begin - -lemma of_int_num [code]: - "of_int k = (if k = 0 then 0 else if k < 0 then - - of_int (- k) else let - (l, m) = divmod_int k 2; - l' = of_int l - in if m = 0 then l' + l' else l' + l' + 1)" -proof - - have aux1: "k mod (2\int) \ (0\int) \ - of_int k = of_int (k div 2 * 2 + 1)" - proof - - have "k mod 2 < 2" by (auto intro: pos_mod_bound) - moreover have "0 \ k mod 2" by (auto intro: pos_mod_sign) - moreover assume "k mod 2 \ 0" - ultimately have "k mod 2 = 1" by arith - moreover have "of_int k = of_int (k div 2 * 2 + k mod 2)" by simp - ultimately show ?thesis by auto - qed - have aux2: "\x. of_int 2 * x = x + x" - proof - - fix x - have int2: "(2::int) = 1 + 1" by arith - show "of_int 2 * x = x + x" - unfolding int2 of_int_add left_distrib by simp - qed - have aux3: "\x. x * of_int 2 = x + x" - proof - - fix x - have int2: "(2::int) = 1 + 1" by arith - show "x * of_int 2 = x + x" - unfolding int2 of_int_add right_distrib by simp - qed - from aux1 show ?thesis by (auto simp add: divmod_int_mod_div Let_def aux2 aux3) -qed - -end - lemma zmod_eq_dvd_iff: "(x::int) mod n = y mod n \ n dvd x - y" proof assume H: "x mod n = y mod n" @@ -2482,6 +2441,7 @@ mod_div_equality' [THEN eq_reflection] zmod_zdiv_equality' [THEN eq_reflection] + subsubsection {* Code generation *} definition pdivmod :: "int \ int \ int \ int" where @@ -2515,6 +2475,45 @@ then show ?thesis by (simp add: divmod_int_pdivmod) qed +context ring_1 +begin + +lemma of_int_num [code]: + "of_int k = (if k = 0 then 0 else if k < 0 then + - of_int (- k) else let + (l, m) = divmod_int k 2; + l' = of_int l + in if m = 0 then l' + l' else l' + l' + 1)" +proof - + have aux1: "k mod (2\int) \ (0\int) \ + of_int k = of_int (k div 2 * 2 + 1)" + proof - + have "k mod 2 < 2" by (auto intro: pos_mod_bound) + moreover have "0 \ k mod 2" by (auto intro: pos_mod_sign) + moreover assume "k mod 2 \ 0" + ultimately have "k mod 2 = 1" by arith + moreover have "of_int k = of_int (k div 2 * 2 + k mod 2)" by simp + ultimately show ?thesis by auto + qed + have aux2: "\x. of_int 2 * x = x + x" + proof - + fix x + have int2: "(2::int) = 1 + 1" by arith + show "of_int 2 * x = x + x" + unfolding int2 of_int_add left_distrib by simp + qed + have aux3: "\x. x * of_int 2 = x + x" + proof - + fix x + have int2: "(2::int) = 1 + 1" by arith + show "x * of_int 2 = x + x" + unfolding int2 of_int_add right_distrib by simp + qed + from aux1 show ?thesis by (auto simp add: divmod_int_mod_div Let_def aux2 aux3) +qed + +end + code_modulename SML Divides Arith diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Library/Dlist.thy --- a/src/HOL/Library/Dlist.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Library/Dlist.thy Wed Mar 10 16:40:20 2010 +0100 @@ -123,8 +123,6 @@ "list_of_dlist (filter P dxs) = List.filter P (list_of_dlist dxs)" by (simp add: filter_def) -declare null_def [code] member_def [code] length_def [code] fold_def [code] -- {* explicit is better than implicit *} - section {* Implementation of sets by distinct lists -- canonical! *} diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Library/Efficient_Nat.thy --- a/src/HOL/Library/Efficient_Nat.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Library/Efficient_Nat.thy Wed Mar 10 16:40:20 2010 +0100 @@ -400,7 +400,7 @@ (SML "IntInf.max/ (/0,/ _)") (OCaml "Big'_int.max'_big'_int/ Big'_int.zero'_big'_int") -text {* For Haskell ans Scala, things are slightly different again. *} +text {* For Haskell and Scala, things are slightly different again. *} code_const int and nat (Haskell "toInteger" and "fromInteger") diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Nat_Transfer.thy --- a/src/HOL/Nat_Transfer.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Nat_Transfer.thy Wed Mar 10 16:40:20 2010 +0100 @@ -27,17 +27,17 @@ text {* set up transfer direction *} lemma transfer_morphism_nat_int: "transfer_morphism nat (op <= (0::int))" - by (simp add: transfer_morphism_def) + by (fact transfer_morphismI) -declare transfer_morphism_nat_int [transfer - add mode: manual +declare transfer_morphism_nat_int [transfer add + mode: manual return: nat_0_le - labels: natint + labels: nat_int ] text {* basic functions and relations *} -lemma transfer_nat_int_numerals: +lemma transfer_nat_int_numerals [transfer key: transfer_morphism_nat_int]: "(0::nat) = nat 0" "(1::nat) = nat 1" "(2::nat) = nat 2" @@ -52,8 +52,7 @@ lemma tsub_eq: "x >= y \ tsub x y = x - y" by (simp add: tsub_def) - -lemma transfer_nat_int_functions: +lemma transfer_nat_int_functions [transfer key: transfer_morphism_nat_int]: "(x::int) >= 0 \ y >= 0 \ (nat x) + (nat y) = nat (x + y)" "(x::int) >= 0 \ y >= 0 \ (nat x) * (nat y) = nat (x * y)" "(x::int) >= 0 \ y >= 0 \ (nat x) - (nat y) = nat (tsub x y)" @@ -61,7 +60,7 @@ by (auto simp add: eq_nat_nat_iff nat_mult_distrib nat_power_eq tsub_def) -lemma transfer_nat_int_function_closures: +lemma transfer_nat_int_function_closures [transfer key: transfer_morphism_nat_int]: "(x::int) >= 0 \ y >= 0 \ x + y >= 0" "(x::int) >= 0 \ y >= 0 \ x * y >= 0" "(x::int) >= 0 \ y >= 0 \ tsub x y >= 0" @@ -73,7 +72,7 @@ "int z >= 0" by (auto simp add: zero_le_mult_iff tsub_def) -lemma transfer_nat_int_relations: +lemma transfer_nat_int_relations [transfer key: transfer_morphism_nat_int]: "x >= 0 \ y >= 0 \ (nat (x::int) = nat y) = (x = y)" "x >= 0 \ y >= 0 \ @@ -84,13 +83,6 @@ (nat (x::int) dvd nat y) = (x dvd y)" by (auto simp add: zdvd_int) -declare transfer_morphism_nat_int [transfer add return: - transfer_nat_int_numerals - transfer_nat_int_functions - transfer_nat_int_function_closures - transfer_nat_int_relations -] - text {* first-order quantifiers *} @@ -108,7 +100,7 @@ then show "\x. P x" by auto qed -lemma transfer_nat_int_quantifiers: +lemma transfer_nat_int_quantifiers [transfer key: transfer_morphism_nat_int]: "(ALL (x::nat). P x) = (ALL (x::int). x >= 0 \ P (nat x))" "(EX (x::nat). P x) = (EX (x::int). x >= 0 & P (nat x))" by (rule all_nat, rule ex_nat) @@ -123,18 +115,15 @@ by auto declare transfer_morphism_nat_int [transfer add - return: transfer_nat_int_quantifiers cong: all_cong ex_cong] text {* if *} -lemma nat_if_cong: "(if P then (nat x) else (nat y)) = - nat (if P then x else y)" +lemma nat_if_cong [transfer key: transfer_morphism_nat_int]: + "(if P then (nat x) else (nat y)) = nat (if P then x else y)" by auto -declare transfer_morphism_nat_int [transfer add return: nat_if_cong] - text {* operations with sets *} @@ -276,22 +265,18 @@ text {* set up transfer direction *} -lemma transfer_morphism_int_nat: "transfer_morphism int (UNIV :: nat set)" - by (simp add: transfer_morphism_def) +lemma transfer_morphism_int_nat: "transfer_morphism int (\n. True)" + by (fact transfer_morphismI) declare transfer_morphism_int_nat [transfer add mode: manual -(* labels: int-nat *) return: nat_int + labels: int_nat ] text {* basic functions and relations *} -lemma UNIV_apply: - "UNIV x = True" - by (simp add: top_fun_eq top_bool_eq) - definition is_nat :: "int \ bool" where @@ -335,7 +320,6 @@ transfer_int_nat_functions transfer_int_nat_function_closures transfer_int_nat_relations - UNIV_apply ] diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Nitpick.thy --- a/src/HOL/Nitpick.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Nitpick.thy Wed Mar 10 16:40:20 2010 +0100 @@ -36,10 +36,12 @@ and bisim :: "bisim_iterator \ 'a \ 'a \ bool" and bisim_iterator_max :: bisim_iterator and Quot :: "'a \ 'b" - and Tha :: "('a \ bool) \ 'a" + and safe_The :: "('a \ bool) \ 'a" + and safe_Eps :: "('a \ bool) \ 'a" +datatype ('a, 'b) fin_fun = FinFun "('a \ 'b)" +datatype ('a, 'b) fun_box = FunBox "('a \ 'b)" datatype ('a, 'b) pair_box = PairBox 'a 'b -datatype ('a, 'b) fun_box = FunBox "('a \ 'b)" typedecl unsigned_bit typedecl signed_bit @@ -220,8 +222,8 @@ use "Tools/Nitpick/kodkod_sat.ML" use "Tools/Nitpick/nitpick_util.ML" use "Tools/Nitpick/nitpick_hol.ML" +use "Tools/Nitpick/nitpick_mono.ML" use "Tools/Nitpick/nitpick_preproc.ML" -use "Tools/Nitpick/nitpick_mono.ML" use "Tools/Nitpick/nitpick_scope.ML" use "Tools/Nitpick/nitpick_peephole.ML" use "Tools/Nitpick/nitpick_rep.ML" @@ -236,11 +238,13 @@ setup {* Nitpick_Isar.setup *} hide (open) const unknown is_unknown undefined_fast_The undefined_fast_Eps bisim - bisim_iterator_max Quot Tha PairBox FunBox Word refl' wf' wf_wfrec wf_wfrec' - wfrec' card' setsum' fold_graph' nat_gcd nat_lcm int_gcd int_lcm Frac - Abs_Frac Rep_Frac zero_frac one_frac num denom norm_frac frac plus_frac - times_frac uminus_frac number_of_frac inverse_frac less_eq_frac of_frac -hide (open) type bisim_iterator pair_box fun_box unsigned_bit signed_bit word + bisim_iterator_max Quot safe_The safe_Eps FinFun FunBox PairBox Word refl' + wf' wf_wfrec wf_wfrec' wfrec' card' setsum' fold_graph' nat_gcd nat_lcm + int_gcd int_lcm Frac Abs_Frac Rep_Frac zero_frac one_frac num denom + norm_frac frac plus_frac times_frac uminus_frac number_of_frac inverse_frac + less_eq_frac of_frac +hide (open) type bisim_iterator fin_fun fun_box pair_box unsigned_bit signed_bit + word hide (open) fact If_def Ex1_def rtrancl_def rtranclp_def tranclp_def refl'_def wf'_def wf_wfrec'_def wfrec'_def card'_def setsum'_def fold_graph'_def The_psimp Eps_psimp unit_case_def nat_case_def list_size_simp nat_gcd_def diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Nitpick_Examples/Induct_Nits.thy --- a/src/HOL/Nitpick_Examples/Induct_Nits.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Nitpick_Examples/Induct_Nits.thy Wed Mar 10 16:40:20 2010 +0100 @@ -61,7 +61,7 @@ lemma "q2 = {}" nitpick [expect = genuine] nitpick [dont_star_linear_preds, expect = genuine] -nitpick [wf, expect = likely_genuine] +nitpick [wf, expect = quasi_genuine] oops lemma "p2 = UNIV" @@ -72,7 +72,7 @@ lemma "q2 = UNIV" nitpick [expect = none] nitpick [dont_star_linear_preds, expect = none] -nitpick [wf, expect = likely_genuine] +nitpick [wf, expect = quasi_genuine] sorry lemma "p2 = q2" diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Nitpick_Examples/Manual_Nits.thy --- a/src/HOL/Nitpick_Examples/Manual_Nits.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Nitpick_Examples/Manual_Nits.thy Wed Mar 10 16:40:20 2010 +0100 @@ -8,7 +8,7 @@ header {* Examples from the Nitpick Manual *} theory Manual_Nits -imports Main Coinductive_List Quotient_Product RealDef +imports Main Quotient_Product RealDef begin chapter {* 3. First Steps *} @@ -18,29 +18,29 @@ subsection {* 3.1. Propositional Logic *} lemma "P \ Q" -nitpick +nitpick [expect = genuine] apply auto -nitpick 1 -nitpick 2 +nitpick [expect = genuine] 1 +nitpick [expect = genuine] 2 oops subsection {* 3.2. Type Variables *} lemma "P x \ P (THE y. P y)" -nitpick [verbose] +nitpick [verbose, expect = genuine] oops subsection {* 3.3. Constants *} lemma "P x \ P (THE y. P y)" -nitpick [show_consts] -nitpick [full_descrs, show_consts] -nitpick [dont_specialize, full_descrs, show_consts] +nitpick [show_consts, expect = genuine] +nitpick [full_descrs, show_consts, expect = genuine] +nitpick [dont_specialize, full_descrs, show_consts, expect = genuine] oops lemma "\!x. P x \ P (THE y. P y)" -nitpick -nitpick [card 'a = 1-50] +nitpick [expect = none] +nitpick [card 'a = 1\50, expect = none] (* sledgehammer *) apply (metis the_equality) done @@ -48,45 +48,45 @@ subsection {* 3.4. Skolemization *} lemma "\g. \x. g (f x) = x \ \y. \x. y = f x" -nitpick +nitpick [expect = genuine] oops lemma "\x. \f. f x = x" -nitpick +nitpick [expect = genuine] oops lemma "refl r \ sym r" -nitpick +nitpick [expect = genuine] oops subsection {* 3.5. Natural Numbers and Integers *} lemma "\i \ j; n \ (m\int)\ \ i * n + j * m \ i * m + j * n" -nitpick +nitpick [expect = genuine] oops lemma "\n. Suc n \ n \ P" -nitpick [card nat = 100, check_potential] +nitpick [card nat = 100, check_potential, expect = genuine] oops lemma "P Suc" -nitpick +nitpick [expect = none] oops lemma "P (op +\nat\nat\nat)" -nitpick [card nat = 1] -nitpick [card nat = 2] +nitpick [card nat = 1, expect = genuine] +nitpick [card nat = 2, expect = none] oops subsection {* 3.6. Inductive Datatypes *} lemma "hd (xs @ [y, y]) = hd xs" -nitpick -nitpick [show_consts, show_datatypes] +nitpick [expect = genuine] +nitpick [show_consts, show_datatypes, expect = genuine] oops lemma "\length xs = 1; length ys = 1\ \ xs = ys" -nitpick [show_datatypes] +nitpick [show_datatypes, expect = genuine] oops subsection {* 3.7. Typedefs, Records, Rationals, and Reals *} @@ -99,7 +99,7 @@ definition C :: three where "C \ Abs_three 2" lemma "\P A; P B\ \ P x" -nitpick [show_datatypes] +nitpick [show_datatypes, expect = genuine] oops fun my_int_rel where @@ -114,7 +114,7 @@ quotient_definition "add\my_int \ my_int \ my_int" is add_raw lemma "add x y = add x x" -nitpick [show_datatypes] +nitpick [show_datatypes, expect = genuine] oops record point = @@ -122,11 +122,11 @@ Ycoord :: int lemma "Xcoord (p\point) = Xcoord (q\point)" -nitpick [show_datatypes] +nitpick [show_datatypes, expect = genuine] oops lemma "4 * x + 3 * (y\real) \ 1 / 2" -nitpick [show_datatypes] +nitpick [show_datatypes, expect = genuine] oops subsection {* 3.8. Inductive and Coinductive Predicates *} @@ -136,11 +136,11 @@ "even n \ even (Suc (Suc n))" lemma "\n. even n \ even (Suc n)" -nitpick [card nat = 100, unary_ints, verbose] +nitpick [card nat = 100, unary_ints, verbose, expect = potential] oops lemma "\n \ 99. even n \ even (Suc n)" -nitpick [card nat = 100, unary_ints, verbose] +nitpick [card nat = 100, unary_ints, verbose, expect = genuine] oops inductive even' where @@ -149,18 +149,18 @@ "\even' m; even' n\ \ even' (m + n)" lemma "\n \ {0, 2, 4, 6, 8}. \ even' n" -nitpick [card nat = 10, unary_ints, verbose, show_consts] +nitpick [card nat = 10, unary_ints, verbose, show_consts, expect = genuine] oops lemma "even' (n - 2) \ even' n" -nitpick [card nat = 10, show_consts] +nitpick [card nat = 10, show_consts, expect = genuine] oops coinductive nats where "nats (x\nat) \ nats x" lemma "nats = {0, 1, 2, 3, 4}" -nitpick [card nat = 10, show_consts] +nitpick [card nat = 10, show_consts, expect = genuine] oops inductive odd where @@ -168,23 +168,46 @@ "\odd m; even n\ \ odd (m + n)" lemma "odd n \ odd (n - 2)" -nitpick [card nat = 10, show_consts] +nitpick [card nat = 10, show_consts, expect = genuine] oops subsection {* 3.9. Coinductive Datatypes *} +(* Lazy lists are defined in Andreas Lochbihler's "Coinductive" AFP entry. Since +we cannot rely on its presence, we expediently provide our own axiomatization. +The examples also work unchanged with Lochbihler's "Coinductive_List" theory. *) + +typedef 'a llist = "UNIV\('a list + (nat \ 'a)) set" by auto + +definition LNil where +"LNil = Abs_llist (Inl [])" +definition LCons where +"LCons y ys = Abs_llist (case Rep_llist ys of + Inl ys' \ Inl (y # ys') + | Inr f \ Inr (\n. case n of 0 \ y | Suc m \ f m))" + +axiomatization iterates :: "('a \ 'a) \ 'a \ 'a llist" + +lemma iterates_def [nitpick_simp]: +"iterates f a = LCons a (iterates f (f a))" +sorry + +setup {* +Nitpick.register_codatatype @{typ "'a llist"} "" + (map dest_Const [@{term LNil}, @{term LCons}]) +*} + lemma "xs \ LCons a xs" -nitpick +nitpick [expect = genuine] oops lemma "\xs = LCons a xs; ys = iterates (\b. a) b\ \ xs = ys" -nitpick [verbose] -nitpick [bisim_depth = -1, verbose] +nitpick [verbose, expect = genuine] oops lemma "\xs = LCons a xs; ys = LCons a ys\ \ xs = ys" -nitpick [bisim_depth = -1, show_datatypes] -nitpick +nitpick [bisim_depth = -1, show_datatypes, expect = quasi_genuine] +nitpick [expect = none] sorry subsection {* 3.10. Boxing *} @@ -208,9 +231,9 @@ "subst\<^isub>1 \ (App t u) = App (subst\<^isub>1 \ t) (subst\<^isub>1 \ u)" lemma "\ loose t 0 \ subst\<^isub>1 \ t = t" -nitpick [verbose] -nitpick [eval = "subst\<^isub>1 \ t"] -(* nitpick [dont_box] *) +nitpick [verbose, expect = genuine] +nitpick [eval = "subst\<^isub>1 \ t", expect = genuine] +(* nitpick [dont_box, expect = unknown] *) oops primrec subst\<^isub>2 where @@ -220,19 +243,19 @@ "subst\<^isub>2 \ (App t u) = App (subst\<^isub>2 \ t) (subst\<^isub>2 \ u)" lemma "\ loose t 0 \ subst\<^isub>2 \ t = t" -nitpick [card = 1\6] +nitpick [card = 1\6, expect = none] sorry subsection {* 3.11. Scope Monotonicity *} lemma "length xs = length ys \ rev (zip xs ys) = zip xs (rev ys)" -nitpick [verbose] -nitpick [card = 8, verbose] +nitpick [verbose, expect = genuine] +nitpick [card = 8, verbose, expect = genuine] oops lemma "\g. \x\'b. g (f x) = x \ \y\'a. \x. y = f x" -nitpick [mono] -nitpick +nitpick [mono, expect = none] +nitpick [expect = genuine] oops subsection {* 3.12. Inductive Properties *} @@ -243,21 +266,21 @@ "n \ reach \ n + 2 \ reach" lemma "n \ reach \ 2 dvd n" -nitpick [unary_ints] +nitpick [unary_ints, expect = none] apply (induct set: reach) apply auto - nitpick + nitpick [expect = none] apply (thin_tac "n \ reach") - nitpick + nitpick [expect = genuine] oops lemma "n \ reach \ 2 dvd n \ n \ 0" -nitpick [unary_ints] +nitpick [unary_ints, expect = none] apply (induct set: reach) apply auto - nitpick + nitpick [expect = none] apply (thin_tac "n \ reach") - nitpick + nitpick [expect = genuine] oops lemma "n \ reach \ 2 dvd n \ n \ 4" @@ -275,13 +298,13 @@ "swap (Branch t u) a b = Branch (swap t a b) (swap u a b)" lemma "{a, b} \ labels t \ labels (swap t a b) = labels t" -nitpick +(* nitpick *) proof (induct t) case Leaf thus ?case by simp next case (Branch t u) thus ?case - nitpick - nitpick [non_std, show_all] + (* nitpick *) + nitpick [non_std, show_all, expect = genuine] oops lemma "labels (swap t a b) = @@ -316,7 +339,7 @@ theorem S\<^isub>1_sound: "w \ S\<^isub>1 \ length [x \ w. x = a] = length [x \ w. x = b]" -nitpick +nitpick [expect = genuine] oops inductive_set S\<^isub>2 and A\<^isub>2 and B\<^isub>2 where @@ -329,7 +352,7 @@ theorem S\<^isub>2_sound: "w \ S\<^isub>2 \ length [x \ w. x = a] = length [x \ w. x = b]" -nitpick +nitpick [expect = genuine] oops inductive_set S\<^isub>3 and A\<^isub>3 and B\<^isub>3 where @@ -347,7 +370,7 @@ theorem S\<^isub>3_complete: "length [x \ w. x = a] = length [x \ w. x = b] \ w \ S\<^isub>3" -nitpick +nitpick [expect = genuine] oops inductive_set S\<^isub>4 and A\<^isub>4 and B\<^isub>4 where @@ -443,11 +466,11 @@ (if x > y then insort\<^isub>1 u x else u))" theorem wf_insort\<^isub>1: "wf t \ wf (insort\<^isub>1 t x)" -nitpick +nitpick [expect = genuine] oops theorem wf_insort\<^isub>1_nat: "wf t \ wf (insort\<^isub>1 t (x\nat))" -nitpick [eval = "insort\<^isub>1 t x"] +nitpick [eval = "insort\<^isub>1 t x", expect = genuine] oops primrec insort\<^isub>2 where diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Nominal/nominal_fresh_fun.ML --- a/src/HOL/Nominal/nominal_fresh_fun.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Nominal/nominal_fresh_fun.ML Wed Mar 10 16:40:20 2010 +0100 @@ -58,7 +58,7 @@ val goal = List.nth(prems_of thm, i-1); val ps = Logic.strip_params goal; val Ts = rev (map snd ps); - fun is_of_fs_name T = Sign.of_sort thy (T, Sign.intern_sort thy ["fs_"^atom_basename]); + fun is_of_fs_name T = Sign.of_sort thy (T, [Sign.intern_class thy ("fs_"^atom_basename)]); (* rebuild de bruijn indices *) val bvs = map_index (Bound o fst) ps; (* select variables of the right class *) diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/HISTORY --- a/src/HOL/Tools/Nitpick/HISTORY Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/HISTORY Wed Mar 10 16:40:20 2010 +0100 @@ -2,13 +2,13 @@ * Added and implemented "binary_ints" and "bits" options * Added "std" option and implemented support for nonstandard models + * Added and implemented "finitize" option to improve the precision of infinite + datatypes based on a monotonicity analysis * Added support for quotient types - * Added support for local definitions - * Disabled "fast_descrs" option by default + * Added support for local definitions (for "function" and "termination" + proofs) * Optimized "Multiset.multiset" and "FinFun.finfun" * Improved efficiency of "destroy_constrs" optimization - * Improved precision of infinite datatypes whose constructors don't appear - in the formula to falsify based on a monotonicity analysis * Fixed soundness bugs related to "destroy_constrs" optimization and record getters * Renamed "MiniSatJNI", "zChaffJNI", "BerkMinAlloy", and "SAT4JLight" to diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/minipick.ML --- a/src/HOL/Tools/Nitpick/minipick.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/minipick.ML Wed Mar 10 16:40:20 2010 +0100 @@ -36,8 +36,10 @@ datatype rep = SRep | RRep (* Proof.context -> typ -> unit *) -fun check_type ctxt (Type ("fun", Ts)) = List.app (check_type ctxt) Ts - | check_type ctxt (Type ("*", Ts)) = List.app (check_type ctxt) Ts +fun check_type ctxt (Type (@{type_name fun}, Ts)) = + List.app (check_type ctxt) Ts + | check_type ctxt (Type (@{type_name "*"}, Ts)) = + List.app (check_type ctxt) Ts | check_type _ @{typ bool} = () | check_type _ (TFree (_, @{sort "{}"})) = () | check_type _ (TFree (_, @{sort HOL.type})) = () @@ -45,13 +47,14 @@ raise NOT_SUPPORTED ("type " ^ quote (Syntax.string_of_typ ctxt T)) (* rep -> (typ -> int) -> typ -> int list *) -fun atom_schema_of SRep card (Type ("fun", [T1, T2])) = +fun atom_schema_of SRep card (Type (@{type_name fun}, [T1, T2])) = replicate_list (card T1) (atom_schema_of SRep card T2) - | atom_schema_of RRep card (Type ("fun", [T1, @{typ bool}])) = + | atom_schema_of RRep card (Type (@{type_name fun}, [T1, @{typ bool}])) = atom_schema_of SRep card T1 - | atom_schema_of RRep card (Type ("fun", [T1, T2])) = + | atom_schema_of RRep card (Type (@{type_name fun}, [T1, T2])) = atom_schema_of SRep card T1 @ atom_schema_of RRep card T2 - | atom_schema_of _ card (Type ("*", Ts)) = maps (atom_schema_of SRep card) Ts + | atom_schema_of _ card (Type (@{type_name "*"}, Ts)) = + maps (atom_schema_of SRep card) Ts | atom_schema_of _ card T = [card T] (* rep -> (typ -> int) -> typ -> int *) val arity_of = length ooo atom_schema_of @@ -89,7 +92,7 @@ fun kodkod_formula_from_term ctxt card frees = let (* typ -> rel_expr -> rel_expr *) - fun R_rep_from_S_rep (T as Type ("fun", [T1, @{typ bool}])) r = + fun R_rep_from_S_rep (T as Type (@{type_name fun}, [T1, @{typ bool}])) r = let val jss = atom_schema_of SRep card T1 |> map (rpair 0) |> all_combinations @@ -100,7 +103,7 @@ (index_seq 0 (length jss)) jss |> foldl1 Union end - | R_rep_from_S_rep (Type ("fun", [T1, T2])) r = + | R_rep_from_S_rep (Type (@{type_name fun}, [T1, T2])) r = let val jss = atom_schema_of SRep card T1 |> map (rpair 0) |> all_combinations @@ -115,7 +118,7 @@ end | R_rep_from_S_rep _ r = r (* typ list -> typ -> rel_expr -> rel_expr *) - fun S_rep_from_R_rep Ts (T as Type ("fun", _)) r = + fun S_rep_from_R_rep Ts (T as Type (@{type_name fun}, _)) r = Comprehension (decls_for SRep card Ts T, RelEq (R_rep_from_S_rep T (rel_expr_for_bound_var card SRep (T :: Ts) 0), r)) @@ -137,7 +140,9 @@ | Const (@{const_name "op ="}, _) $ t1 $ t2 => RelEq (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name ord_class.less_eq}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) + $ t1 $ t2 => Subset (to_R_rep Ts t1, to_R_rep Ts t2) | @{const "op &"} $ t1 $ t2 => And (to_F Ts t1, to_F Ts t2) | @{const "op |"} $ t1 $ t2 => Or (to_F Ts t1, to_F Ts t2) @@ -179,7 +184,8 @@ | Const (@{const_name "op ="}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name "op ="}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name ord_class.less_eq}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ _ => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name ord_class.less_eq}, _) => to_R_rep Ts (eta_expand Ts t 2) @@ -190,7 +196,7 @@ | @{const "op -->"} $ _ => to_R_rep Ts (eta_expand Ts t 1) | @{const "op -->"} => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name bot_class.bot}, - T as Type ("fun", [_, @{typ bool}])) => + T as Type (@{type_name fun}, [_, @{typ bool}])) => empty_n_ary_rel (arity_of RRep card T) | Const (@{const_name insert}, _) $ t1 $ t2 => Union (to_S_rep Ts t1, to_R_rep Ts t2) @@ -203,27 +209,35 @@ raise NOT_SUPPORTED "transitive closure for function or pair type" | Const (@{const_name trancl}, _) => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name semilattice_inf_class.inf}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) + $ t1 $ t2 => Intersect (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name semilattice_inf_class.inf}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name semilattice_inf_class.inf}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name semilattice_sup_class.sup}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) + $ t1 $ t2 => Union (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name semilattice_sup_class.sup}, _) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name semilattice_sup_class.sup}, _) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name minus_class.minus}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ t1 $ t2 => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) + $ t1 $ t2 => Difference (to_R_rep Ts t1, to_R_rep Ts t2) | Const (@{const_name minus_class.minus}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) $ _ => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) $ _ => to_R_rep Ts (eta_expand Ts t 1) | Const (@{const_name minus_class.minus}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])) => + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])) => to_R_rep Ts (eta_expand Ts t 2) | Const (@{const_name Pair}, _) $ _ $ _ => raise SAME () | Const (@{const_name Pair}, _) $ _ => raise SAME () @@ -277,7 +291,8 @@ end (* (typ -> int) -> typ list -> typ -> rel_expr -> formula *) -fun declarative_axiom_for_rel_expr card Ts (Type ("fun", [T1, T2])) r = +fun declarative_axiom_for_rel_expr card Ts (Type (@{type_name fun}, [T1, T2])) + r = if body_type T2 = bool_T then True else @@ -294,8 +309,9 @@ let val thy = ProofContext.theory_of ctxt (* typ -> int *) - fun card (Type ("fun", [T1, T2])) = reasonable_power (card T2) (card T1) - | card (Type ("*", [T1, T2])) = card T1 * card T2 + fun card (Type (@{type_name fun}, [T1, T2])) = + reasonable_power (card T2) (card T1) + | card (Type (@{type_name "*"}, [T1, T2])) = card T1 * card T2 | card @{typ bool} = 2 | card T = Int.max (1, raw_card T) val neg_t = @{const Not} $ Object_Logic.atomize_term thy t diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick.ML --- a/src/HOL/Tools/Nitpick/nitpick.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick.ML Wed Mar 10 16:40:20 2010 +0100 @@ -15,6 +15,7 @@ bitss: int list, bisim_depths: int list, boxes: (typ option * bool option) list, + finitizes: (typ option * bool option) list, monos: (typ option * bool option) list, stds: (typ option * bool) list, wfs: (styp option * bool option) list, @@ -87,6 +88,7 @@ bitss: int list, bisim_depths: int list, boxes: (typ option * bool option) list, + finitizes: (typ option * bool option) list, monos: (typ option * bool option) list, stds: (typ option * bool) list, wfs: (styp option * bool option) list, @@ -200,13 +202,13 @@ error "You must import the theory \"Nitpick\" to use Nitpick" *) val {cards_assigns, maxes_assigns, iters_assigns, bitss, bisim_depths, - boxes, monos, stds, wfs, sat_solver, falsify, debug, verbose, overlord, - user_axioms, assms, merge_type_vars, binary_ints, destroy_constrs, - specialize, skolemize, star_linear_preds, uncurry, fast_descrs, - peephole_optim, tac_timeout, sym_break, sharing_depth, flatten_props, - max_threads, show_skolems, show_datatypes, show_consts, evals, formats, - max_potential, max_genuine, check_potential, check_genuine, batch_size, - ...} = + boxes, finitizes, monos, stds, wfs, sat_solver, falsify, debug, + verbose, overlord, user_axioms, assms, merge_type_vars, binary_ints, + destroy_constrs, specialize, skolemize, star_linear_preds, uncurry, + fast_descrs, peephole_optim, tac_timeout, sym_break, sharing_depth, + flatten_props, max_threads, show_skolems, show_datatypes, show_consts, + evals, formats, max_potential, max_genuine, check_potential, + check_genuine, batch_size, ...} = params val state_ref = Unsynchronized.ref state (* Pretty.T -> unit *) @@ -289,7 +291,7 @@ val _ = null (Term.add_tvars assms_t []) orelse raise NOT_SUPPORTED "schematic type variables" val (nondef_ts, def_ts, got_all_mono_user_axioms, no_poly_user_axioms, - binarize) = preprocess_term hol_ctxt assms_t + binarize) = preprocess_term hol_ctxt finitizes monos assms_t val got_all_user_axioms = got_all_mono_user_axioms andalso no_poly_user_axioms @@ -321,7 +323,9 @@ fold add_free_and_const_names (nondef_us @ def_us) ([], []) val (sel_names, nonsel_names) = List.partition (is_sel o nickname_of) const_names - val genuine_means_genuine = got_all_user_axioms andalso none_true wfs + val sound_finitizes = + none_true (filter_out (fn (SOME (Type (@{type_name fun}, _)), _) => true + | _ => false) finitizes) val standard = forall snd stds (* val _ = List.app (priority o string_for_nut ctxt) (nondef_us @ def_us) @@ -340,21 +344,30 @@ ". " ^ extra end (* typ -> bool *) - fun is_type_always_monotonic T = + fun is_type_fundamentally_monotonic T = (is_datatype thy stds T andalso not (is_quot_type thy T) andalso (not (is_pure_typedef thy T) orelse is_univ_typedef thy T)) orelse is_number_type thy T orelse is_bit_type T fun is_type_actually_monotonic T = formulas_monotonic hol_ctxt binarize T (nondef_ts, def_ts) + fun is_type_kind_of_monotonic T = + case triple_lookup (type_match thy) monos T of + SOME (SOME false) => false + | _ => is_type_actually_monotonic T fun is_type_monotonic T = unique_scope orelse case triple_lookup (type_match thy) monos T of SOME (SOME b) => b - | _ => is_type_always_monotonic T orelse is_type_actually_monotonic T - fun is_type_finitizable T = - case triple_lookup (type_match thy) monos T of - SOME (SOME false) => false - | _ => is_type_actually_monotonic T + | _ => is_type_fundamentally_monotonic T orelse + is_type_actually_monotonic T + fun is_shallow_datatype_finitizable (T as Type (@{type_name fin_fun}, _)) = + is_type_kind_of_monotonic T + | is_shallow_datatype_finitizable (T as Type (@{type_name fun_box}, _)) = + is_type_kind_of_monotonic T + | is_shallow_datatype_finitizable T = + case triple_lookup (type_match thy) finitizes T of + SOME (SOME b) => b + | _ => is_type_kind_of_monotonic T fun is_datatype_deep T = not standard orelse T = nat_T orelse is_word_type T orelse exists (curry (op =) T o domain_type o type_of) sel_names @@ -371,7 +384,7 @@ val (mono_Ts, nonmono_Ts) = List.partition is_type_monotonic all_Ts val _ = if verbose andalso not unique_scope then - case filter_out is_type_always_monotonic mono_Ts of + case filter_out is_type_fundamentally_monotonic mono_Ts of [] => () | interesting_mono_Ts => print_v (K (monotonicity_message interesting_mono_Ts @@ -382,7 +395,7 @@ all_Ts |> filter (is_datatype thy stds) |> List.partition is_datatype_deep val finitizable_dataTs = shallow_dataTs |> filter_out (is_finite_type hol_ctxt) - |> filter is_type_finitizable + |> filter is_shallow_datatype_finitizable val _ = if verbose andalso not (null finitizable_dataTs) then print_v (K (monotonicity_message finitizable_dataTs @@ -588,7 +601,7 @@ fun show_kodkod_warning "" = () | show_kodkod_warning s = print_m (fn () => "Kodkod warning: " ^ s ^ ".") - (* bool -> KK.raw_bound list -> problem_extension -> bool option *) + (* bool -> KK.raw_bound list -> problem_extension -> bool * bool option *) fun print_and_check_model genuine bounds ({free_names, sel_names, nonsel_names, rel_table, scope, ...} : problem_extension) = @@ -599,117 +612,126 @@ show_consts = show_consts} scope formats frees free_names sel_names nonsel_names rel_table bounds - val genuine_means_genuine = genuine_means_genuine andalso codatatypes_ok + val genuine_means_genuine = + got_all_user_axioms andalso none_true wfs andalso + sound_finitizes andalso codatatypes_ok in - pprint (Pretty.chunks - [Pretty.blk (0, - (pstrs ("Nitpick found a" ^ - (if not genuine then " potential " - else if genuine_means_genuine then " " - else " likely genuine ") ^ das_wort_model) @ - (case pretties_for_scope scope verbose of - [] => [] - | pretties => pstrs " for " @ pretties) @ - [Pretty.str ":\n"])), - Pretty.indent indent_size reconstructed_model]); - if genuine then - (if check_genuine andalso standard then - (case prove_hol_model scope tac_timeout free_names sel_names + (pprint (Pretty.chunks + [Pretty.blk (0, + (pstrs ("Nitpick found a" ^ + (if not genuine then " potential " + else if genuine_means_genuine then " " + else " quasi genuine ") ^ das_wort_model) @ + (case pretties_for_scope scope verbose of + [] => [] + | pretties => pstrs " for " @ pretties) @ + [Pretty.str ":\n"])), + Pretty.indent indent_size reconstructed_model]); + if genuine then + (if check_genuine andalso standard then + case prove_hol_model scope tac_timeout free_names sel_names rel_table bounds assms_t of - SOME true => print ("Confirmation by \"auto\": The above " ^ - das_wort_model ^ " is really genuine.") + SOME true => + print ("Confirmation by \"auto\": The above " ^ + das_wort_model ^ " is really genuine.") | SOME false => if genuine_means_genuine then error ("A supposedly genuine " ^ das_wort_model ^ " was \ \shown to be spurious by \"auto\".\nThis should never \ \happen.\nPlease send a bug report to blanchet\ \te@in.tum.de.") - else - print ("Refutation by \"auto\": The above " ^ das_wort_model ^ - " is spurious.") - | NONE => print "No confirmation by \"auto\".") - else - (); - if not standard andalso likely_in_struct_induct_step then - print "The existence of a nonstandard model suggests that the \ - \induction hypothesis is not general enough or perhaps even \ - \wrong. See the \"Inductive Properties\" section of the \ - \Nitpick manual for details (\"isabelle doc nitpick\")." - else - (); - if has_syntactic_sorts orig_t then - print "Hint: Maybe you forgot a type constraint?" + else + print ("Refutation by \"auto\": The above " ^ + das_wort_model ^ " is spurious.") + | NONE => print "No confirmation by \"auto\"." + else + (); + if not standard andalso likely_in_struct_induct_step then + print "The existence of a nonstandard model suggests that the \ + \induction hypothesis is not general enough or perhaps \ + \even wrong. See the \"Inductive Properties\" section of \ + \the Nitpick manual for details (\"isabelle doc nitpick\")." + else + (); + if has_syntactic_sorts orig_t then + print "Hint: Maybe you forgot a type constraint?" + else + (); + if not genuine_means_genuine then + if no_poly_user_axioms then + let + val options = + [] |> not got_all_mono_user_axioms + ? cons ("user_axioms", "\"true\"") + |> not (none_true wfs) + ? cons ("wf", "\"smart\" or \"false\"") + |> not sound_finitizes + ? cons ("finitize", "\"smart\" or \"false\"") + |> not codatatypes_ok + ? cons ("bisim_depth", "a nonnegative value") + val ss = + map (fn (name, value) => quote name ^ " set to " ^ value) + options + in + print ("Try again with " ^ + space_implode " " (serial_commas "and" ss) ^ + " to confirm that the " ^ das_wort_model ^ + " is genuine.") + end + else + print ("Nitpick is unable to guarantee the authenticity of \ + \the " ^ das_wort_model ^ " in the presence of \ + \polymorphic axioms.") + else + (); + NONE) + else + if not genuine then + (Unsynchronized.inc met_potential; + if check_potential then + let + val status = prove_hol_model scope tac_timeout free_names + sel_names rel_table bounds assms_t + in + (case status of + SOME true => print ("Confirmation by \"auto\": The \ + \above " ^ das_wort_model ^ + " is genuine.") + | SOME false => print ("Refutation by \"auto\": The above " ^ + das_wort_model ^ " is spurious.") + | NONE => print "No confirmation by \"auto\"."); + status + end + else + NONE) else - (); - if not genuine_means_genuine then - if no_poly_user_axioms then - let - val options = - [] |> not got_all_mono_user_axioms - ? cons ("user_axioms", "\"true\"") - |> not (none_true wfs) - ? cons ("wf", "\"smart\" or \"false\"") - |> not codatatypes_ok - ? cons ("bisim_depth", "a nonnegative value") - val ss = - map (fn (name, value) => quote name ^ " set to " ^ value) - options - in - print ("Try again with " ^ - space_implode " " (serial_commas "and" ss) ^ - " to confirm that the " ^ das_wort_model ^ - " is genuine.") - end - else - print ("Nitpick is unable to guarantee the authenticity of \ - \the " ^ das_wort_model ^ " in the presence of \ - \polymorphic axioms.") - else - (); - NONE) - else - if not genuine then - (Unsynchronized.inc met_potential; - if check_potential then - let - val status = prove_hol_model scope tac_timeout free_names - sel_names rel_table bounds assms_t - in - (case status of - SOME true => print ("Confirmation by \"auto\": The above " ^ - das_wort_model ^ " is genuine.") - | SOME false => print ("Refutation by \"auto\": The above " ^ - das_wort_model ^ " is spurious.") - | NONE => print "No confirmation by \"auto\"."); - status - end - else - NONE) - else - NONE + NONE) + |> pair genuine_means_genuine end - (* int -> int -> int -> bool -> rich_problem list -> int * int * int *) - fun solve_any_problem max_potential max_genuine donno first_time problems = + (* bool * int * int * int -> bool -> rich_problem list + -> bool * int * int * int *) + fun solve_any_problem (found_really_genuine, max_potential, max_genuine, + donno) first_time problems = let val max_potential = Int.max (0, max_potential) val max_genuine = Int.max (0, max_genuine) - (* bool -> int * KK.raw_bound list -> bool option *) + (* bool -> int * KK.raw_bound list -> bool * bool option *) fun print_and_check genuine (j, bounds) = print_and_check_model genuine bounds (snd (nth problems j)) val max_solutions = max_potential + max_genuine |> not need_incremental ? curry Int.min 1 in if max_solutions <= 0 then - (0, 0, donno) + (found_really_genuine, 0, 0, donno) else case KK.solve_any_problem overlord deadline max_threads max_solutions (map fst problems) of KK.NotInstalled => (print_m install_kodkodi_message; - (max_potential, max_genuine, donno + 1)) + (found_really_genuine, max_potential, max_genuine, donno + 1)) | KK.Normal ([], unsat_js, s) => (update_checked_problems problems unsat_js; show_kodkod_warning s; - (max_potential, max_genuine, donno)) + (found_really_genuine, max_potential, max_genuine, donno)) | KK.Normal (sat_ps, unsat_js, s) => let val (lib_ps, con_ps) = @@ -719,16 +741,19 @@ show_kodkod_warning s; if null con_ps then let - val num_genuine = take max_potential lib_ps - |> map (print_and_check false) - |> filter (curry (op =) (SOME true)) - |> length + val genuine_codes = + lib_ps |> take max_potential + |> map (print_and_check false) + |> filter (curry (op =) (SOME true) o snd) + val found_really_genuine = + found_really_genuine orelse exists fst genuine_codes + val num_genuine = length genuine_codes val max_genuine = max_genuine - num_genuine val max_potential = max_potential - (length lib_ps - num_genuine) in if max_genuine <= 0 then - (0, 0, donno) + (found_really_genuine, 0, 0, donno) else let (* "co_js" is the list of sound problems whose unsound @@ -748,18 +773,21 @@ |> max_potential <= 0 ? filter_out (#unsound o snd) in - solve_any_problem max_potential max_genuine donno false - problems + solve_any_problem (found_really_genuine, max_potential, + max_genuine, donno) false problems end end else let - val _ = take max_genuine con_ps - |> List.app (ignore o print_and_check true) - val max_genuine = max_genuine - length con_ps + val genuine_codes = + con_ps |> take max_genuine + |> map (print_and_check true) + val max_genuine = max_genuine - length genuine_codes + val found_really_genuine = + found_really_genuine orelse exists fst genuine_codes in if max_genuine <= 0 orelse not first_time then - (0, max_genuine, donno) + (found_really_genuine, 0, max_genuine, donno) else let val bye_js = sort_distinct int_ord @@ -767,7 +795,10 @@ val problems = problems |> filter_out_indices bye_js |> filter_out (#unsound o snd) - in solve_any_problem 0 max_genuine donno false problems end + in + solve_any_problem (found_really_genuine, 0, max_genuine, + donno) false problems + end end end | KK.TimedOut unsat_js => @@ -778,11 +809,13 @@ | KK.Error (s, unsat_js) => (update_checked_problems problems unsat_js; print_v (K ("Kodkod error: " ^ s ^ ".")); - (max_potential, max_genuine, donno + 1)) + (found_really_genuine, max_potential, max_genuine, donno + 1)) end - (* int -> int -> scope list -> int * int * int -> int * int * int *) - fun run_batch j n scopes (max_potential, max_genuine, donno) = + (* int -> int -> scope list -> bool * int * int * int + -> bool * int * int * int *) + fun run_batch j n scopes (found_really_genuine, max_potential, max_genuine, + donno) = let val _ = if null scopes then @@ -852,7 +885,8 @@ else () in - solve_any_problem max_potential max_genuine donno true (rev problems) + solve_any_problem (found_really_genuine, max_potential, max_genuine, + donno) true (rev problems) end (* rich_problem list -> scope -> int *) @@ -884,8 +918,9 @@ "") ^ "." end - (* int -> int -> scope list -> int * int * int -> KK.outcome *) - fun run_batches _ _ [] (max_potential, max_genuine, donno) = + (* int -> int -> scope list -> bool * int * int * int -> KK.outcome *) + fun run_batches _ _ [] + (found_really_genuine, max_potential, max_genuine, donno) = if donno > 0 andalso max_genuine > 0 then (print_m (fn () => excipit "ran out of some resource"); "unknown") else if max_genuine = original_max_genuine then @@ -902,10 +937,12 @@ "Nitpick could not find a" ^ (if max_genuine = 1 then " better " ^ das_wort_model ^ "." else "ny better " ^ das_wort_model ^ "s.")); "potential") + else if found_really_genuine then + "genuine" else - if genuine_means_genuine then "genuine" else "likely_genuine" + "quasi_genuine" | run_batches j n (batch :: batches) z = - let val (z as (_, max_genuine, _)) = run_batch j n batch z in + let val (z as (_, _, max_genuine, _)) = run_batch j n batch z in run_batches (j + 1) n (if max_genuine > 0 then batches else []) z end @@ -925,7 +962,8 @@ val batches = batch_list batch_size (!scopes) val outcome_code = - (run_batches 0 (length batches) batches (max_potential, max_genuine, 0) + (run_batches 0 (length batches) batches + (false, max_potential, max_genuine, 0) handle Exn.Interrupt => do_interrupted ()) handle TimeLimit.TimeOut => (print_m (fn () => excipit "ran out of time"); diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_hol.ML --- a/src/HOL/Tools/Nitpick/nitpick_hol.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML Wed Mar 10 16:40:20 2010 +0100 @@ -65,8 +65,8 @@ val strip_any_connective : term -> term list * term val conjuncts_of : term -> term list val disjuncts_of : term -> term list - val unarize_and_unbox_type : typ -> typ - val unarize_unbox_and_uniterize_type : typ -> typ + val unarize_unbox_etc_type : typ -> typ + val uniterize_unarize_unbox_etc_type : typ -> typ val string_for_type : Proof.context -> typ -> string val prefix_name : string -> string -> string val shortest_name : string -> string @@ -148,11 +148,13 @@ theory -> (typ option * bool) list -> styp -> term -> int -> typ -> term val construct_value : theory -> (typ option * bool) list -> styp -> term list -> term + val coerce_term : hol_context -> typ list -> typ -> typ -> term -> term val card_of_type : (typ * int) list -> typ -> int val bounded_card_of_type : int -> int -> (typ * int) list -> typ -> int val bounded_exact_card_of_type : hol_context -> typ list -> int -> int -> (typ * int) list -> typ -> int val is_finite_type : hol_context -> typ -> bool + val is_small_finite_type : hol_context -> typ -> bool val special_bounds : term list -> (indexname * typ) list val abs_var : indexname * typ -> term -> term val is_funky_typedef : theory -> typ -> bool @@ -359,7 +361,8 @@ (@{const_name finite}, 1), (@{const_name unknown}, 0), (@{const_name is_unknown}, 1), - (@{const_name Tha}, 1), + (@{const_name safe_The}, 1), + (@{const_name safe_Eps}, 1), (@{const_name Frac}, 0), (@{const_name norm_frac}, 0)] val built_in_nat_consts = @@ -401,22 +404,24 @@ | unarize_type @{typ "signed_bit word"} = int_T | unarize_type (Type (s, Ts as _ :: _)) = Type (s, map unarize_type Ts) | unarize_type T = T -fun unarize_and_unbox_type (Type (@{type_name fun_box}, Ts)) = - unarize_and_unbox_type (Type ("fun", Ts)) - | unarize_and_unbox_type (Type (@{type_name pair_box}, Ts)) = - Type ("*", map unarize_and_unbox_type Ts) - | unarize_and_unbox_type @{typ "unsigned_bit word"} = nat_T - | unarize_and_unbox_type @{typ "signed_bit word"} = int_T - | unarize_and_unbox_type (Type (s, Ts as _ :: _)) = - Type (s, map unarize_and_unbox_type Ts) - | unarize_and_unbox_type T = T +fun unarize_unbox_etc_type (Type (@{type_name fin_fun}, Ts)) = + unarize_unbox_etc_type (Type (@{type_name fun}, Ts)) + | unarize_unbox_etc_type (Type (@{type_name fun_box}, Ts)) = + unarize_unbox_etc_type (Type (@{type_name fun}, Ts)) + | unarize_unbox_etc_type (Type (@{type_name pair_box}, Ts)) = + Type (@{type_name "*"}, map unarize_unbox_etc_type Ts) + | unarize_unbox_etc_type @{typ "unsigned_bit word"} = nat_T + | unarize_unbox_etc_type @{typ "signed_bit word"} = int_T + | unarize_unbox_etc_type (Type (s, Ts as _ :: _)) = + Type (s, map unarize_unbox_etc_type Ts) + | unarize_unbox_etc_type T = T fun uniterize_type (Type (s, Ts as _ :: _)) = Type (s, map uniterize_type Ts) | uniterize_type @{typ bisim_iterator} = nat_T | uniterize_type T = T -val unarize_unbox_and_uniterize_type = uniterize_type o unarize_and_unbox_type +val uniterize_unarize_unbox_etc_type = uniterize_type o unarize_unbox_etc_type (* Proof.context -> typ -> string *) -fun string_for_type ctxt = Syntax.string_of_typ ctxt o unarize_and_unbox_type +fun string_for_type ctxt = Syntax.string_of_typ ctxt o unarize_unbox_etc_type (* string -> string -> string *) val prefix_name = Long_Name.qualify o Long_Name.base_name @@ -439,9 +444,10 @@ #> map_types shorten_names_in_type (* theory -> typ * typ -> bool *) -fun type_match thy (T1, T2) = +fun strict_type_match thy (T1, T2) = (Sign.typ_match thy (T2, T1) Vartab.empty; true) handle Type.TYPE_MATCH => false +fun type_match thy = strict_type_match thy o pairself unarize_unbox_etc_type (* theory -> styp * styp -> bool *) fun const_match thy ((s1, T1), (s2, T2)) = s1 = s2 andalso type_match thy (T1, T2) @@ -454,14 +460,14 @@ (* typ -> bool *) fun is_TFree (TFree _) = true | is_TFree _ = false -fun is_higher_order_type (Type ("fun", _)) = true +fun is_higher_order_type (Type (@{type_name fun}, _)) = true | is_higher_order_type (Type (_, Ts)) = exists is_higher_order_type Ts | is_higher_order_type _ = false -fun is_fun_type (Type ("fun", _)) = true +fun is_fun_type (Type (@{type_name fun}, _)) = true | is_fun_type _ = false -fun is_set_type (Type ("fun", [_, @{typ bool}])) = true +fun is_set_type (Type (@{type_name fun}, [_, @{typ bool}])) = true | is_set_type _ = false -fun is_pair_type (Type ("*", _)) = true +fun is_pair_type (Type (@{type_name "*"}, _)) = true | is_pair_type _ = false fun is_lfp_iterator_type (Type (s, _)) = String.isPrefix lfp_iterator_prefix s | is_lfp_iterator_type _ = false @@ -494,19 +500,20 @@ (* int -> typ -> typ list * typ *) fun strip_n_binders 0 T = ([], T) - | strip_n_binders n (Type ("fun", [T1, T2])) = + | strip_n_binders n (Type (@{type_name fun}, [T1, T2])) = strip_n_binders (n - 1) T2 |>> cons T1 | strip_n_binders n (Type (@{type_name fun_box}, Ts)) = - strip_n_binders n (Type ("fun", Ts)) + strip_n_binders n (Type (@{type_name fun}, Ts)) | strip_n_binders _ T = raise TYPE ("Nitpick_HOL.strip_n_binders", [T], []) (* typ -> typ *) val nth_range_type = snd oo strip_n_binders (* typ -> int *) -fun num_factors_in_type (Type ("*", [T1, T2])) = +fun num_factors_in_type (Type (@{type_name "*"}, [T1, T2])) = fold (Integer.add o num_factors_in_type) [T1, T2] 0 | num_factors_in_type _ = 1 -fun num_binder_types (Type ("fun", [_, T2])) = 1 + num_binder_types T2 +fun num_binder_types (Type (@{type_name fun}, [_, T2])) = + 1 + num_binder_types T2 | num_binder_types _ = 0 (* typ -> typ list *) val curried_binder_types = maps HOLogic.flatten_tupleT o binder_types @@ -515,7 +522,7 @@ (* typ -> term list -> term *) fun mk_flat_tuple _ [t] = t - | mk_flat_tuple (Type ("*", [T1, T2])) (t :: ts) = + | mk_flat_tuple (Type (@{type_name "*"}, [T1, T2])) (t :: ts) = HOLogic.pair_const T1 T2 $ t $ (mk_flat_tuple T2 ts) | mk_flat_tuple T ts = raise TYPE ("Nitpick_HOL.mk_flat_tuple", [T], ts) (* int -> term -> term list *) @@ -595,7 +602,7 @@ val (co_s, co_Ts) = dest_Type co_T val _ = if forall is_TFree co_Ts andalso not (has_duplicates (op =) co_Ts) andalso - co_s <> "fun" andalso + co_s <> @{type_name fun} andalso not (is_basic_datatype thy [(NONE, true)] co_s) then () else @@ -669,7 +676,7 @@ find_index (curry (op =) s o fst) (Record.get_extT_fields thy T1 ||> single |> op @) (* theory -> styp -> bool *) -fun is_record_get thy (s, Type ("fun", [T1, _])) = +fun is_record_get thy (s, Type (@{type_name fun}, [T1, _])) = exists (curry (op =) s o fst) (all_record_fields thy T1) | is_record_get _ _ = false fun is_record_update thy (s, T) = @@ -677,30 +684,33 @@ exists (curry (op =) (unsuffix Record.updateN s) o fst) (all_record_fields thy (body_type T)) handle TYPE _ => false -fun is_abs_fun thy (s, Type ("fun", [_, Type (s', _)])) = +fun is_abs_fun thy (s, Type (@{type_name fun}, [_, Type (s', _)])) = (case typedef_info thy s' of SOME {Abs_name, ...} => s = Abs_name | NONE => false) | is_abs_fun _ _ = false -fun is_rep_fun thy (s, Type ("fun", [Type (s', _), _])) = +fun is_rep_fun thy (s, Type (@{type_name fun}, [Type (s', _), _])) = (case typedef_info thy s' of SOME {Rep_name, ...} => s = Rep_name | NONE => false) | is_rep_fun _ _ = false (* Proof.context -> styp -> bool *) -fun is_quot_abs_fun ctxt (x as (_, Type ("fun", [_, Type (s', _)]))) = +fun is_quot_abs_fun ctxt + (x as (_, Type (@{type_name fun}, [_, Type (s', _)]))) = (try (Quotient_Term.absrep_const_chk Quotient_Term.AbsF ctxt) s' = SOME (Const x)) | is_quot_abs_fun _ _ = false -fun is_quot_rep_fun ctxt (x as (_, Type ("fun", [Type (s', _), _]))) = +fun is_quot_rep_fun ctxt + (x as (_, Type (@{type_name fun}, [Type (s', _), _]))) = (try (Quotient_Term.absrep_const_chk Quotient_Term.RepF ctxt) s' = SOME (Const x)) | is_quot_rep_fun _ _ = false (* theory -> styp -> styp *) -fun mate_of_rep_fun thy (x as (_, Type ("fun", [T1 as Type (s', _), T2]))) = +fun mate_of_rep_fun thy (x as (_, Type (@{type_name fun}, + [T1 as Type (s', _), T2]))) = (case typedef_info thy s' of - SOME {Abs_name, ...} => (Abs_name, Type ("fun", [T2, T1])) + SOME {Abs_name, ...} => (Abs_name, Type (@{type_name fun}, [T2, T1])) | NONE => raise TERM ("Nitpick_HOL.mate_of_rep_fun", [Const x])) | mate_of_rep_fun _ x = raise TERM ("Nitpick_HOL.mate_of_rep_fun", [Const x]) (* theory -> typ -> typ *) @@ -729,10 +739,10 @@ end handle TYPE ("dest_Type", _, _) => false fun is_constr_like thy (s, T) = - member (op =) [@{const_name FunBox}, @{const_name PairBox}, - @{const_name Quot}, @{const_name Zero_Rep}, - @{const_name Suc_Rep}] s orelse - let val (x as (_, T)) = (s, unarize_and_unbox_type T) in + member (op =) [@{const_name FinFun}, @{const_name FunBox}, + @{const_name PairBox}, @{const_name Quot}, + @{const_name Zero_Rep}, @{const_name Suc_Rep}] s orelse + let val (x as (_, T)) = (s, unarize_unbox_etc_type T) in Refute.is_IDT_constructor thy x orelse is_record_constr x orelse (is_abs_fun thy x andalso is_pure_typedef thy (range_type T)) orelse is_coconstr thy x @@ -749,8 +759,8 @@ (* string -> bool *) val is_sel = String.isPrefix discr_prefix orf String.isPrefix sel_prefix val is_sel_like_and_no_discr = - String.isPrefix sel_prefix - orf (member (op =) [@{const_name fst}, @{const_name snd}]) + String.isPrefix sel_prefix orf + (member (op =) [@{const_name fst}, @{const_name snd}]) (* boxability -> boxability *) fun in_fun_lhs_for InConstr = InSel @@ -763,10 +773,10 @@ (* hol_context -> boxability -> typ -> bool *) fun is_boxing_worth_it (hol_ctxt : hol_context) boxy T = case T of - Type ("fun", _) => + Type (@{type_name fun}, _) => (boxy = InPair orelse boxy = InFunLHS) andalso not (is_boolean_type (body_type T)) - | Type ("*", Ts) => + | Type (@{type_name "*"}, Ts) => boxy = InPair orelse boxy = InFunRHS1 orelse boxy = InFunRHS2 orelse ((boxy = InExpr orelse boxy = InFunLHS) andalso exists (is_boxing_worth_it hol_ctxt InPair) @@ -780,7 +790,7 @@ (* hol_context -> boxability -> typ -> typ *) and box_type hol_ctxt boxy T = case T of - Type (z as ("fun", [T1, T2])) => + Type (z as (@{type_name fun}, [T1, T2])) => if boxy <> InConstr andalso boxy <> InSel andalso should_box_type hol_ctxt boxy z then Type (@{type_name fun_box}, @@ -788,12 +798,13 @@ else box_type hol_ctxt (in_fun_lhs_for boxy) T1 --> box_type hol_ctxt (in_fun_rhs_for boxy) T2 - | Type (z as ("*", Ts)) => + | Type (z as (@{type_name "*"}, Ts)) => if boxy <> InConstr andalso boxy <> InSel andalso should_box_type hol_ctxt boxy z then Type (@{type_name pair_box}, map (box_type hol_ctxt InSel) Ts) else - Type ("*", map (box_type hol_ctxt + Type (@{type_name "*"}, + map (box_type hol_ctxt (if boxy = InConstr orelse boxy = InSel then boxy else InPair)) Ts) | _ => T @@ -979,7 +990,7 @@ else let (* int -> typ -> int * term *) - fun aux m (Type ("*", [T1, T2])) = + fun aux m (Type (@{type_name "*"}, [T1, T2])) = let val (m, t1) = aux m T1 val (m, t2) = aux m T2 @@ -1018,10 +1029,85 @@ | _ => list_comb (Const x, args) end +(* hol_context -> typ -> term -> term *) +fun constr_expand (hol_ctxt as {thy, stds, ...}) T t = + (case head_of t of + Const x => if is_constr_like thy x then t else raise SAME () + | _ => raise SAME ()) + handle SAME () => + let + val x' as (_, T') = + if is_pair_type T then + let val (T1, T2) = HOLogic.dest_prodT T in + (@{const_name Pair}, T1 --> T2 --> T) + end + else + datatype_constrs hol_ctxt T |> hd + val arg_Ts = binder_types T' + in + list_comb (Const x', map2 (select_nth_constr_arg thy stds x' t) + (index_seq 0 (length arg_Ts)) arg_Ts) + end + +(* (term -> term) -> int -> term -> term *) +fun coerce_bound_no f j t = + case t of + t1 $ t2 => coerce_bound_no f j t1 $ coerce_bound_no f j t2 + | Abs (s, T, t') => Abs (s, T, coerce_bound_no f (j + 1) t') + | Bound j' => if j' = j then f t else t + | _ => t +(* hol_context -> typ -> typ -> term -> term *) +fun coerce_bound_0_in_term hol_ctxt new_T old_T = + old_T <> new_T ? coerce_bound_no (coerce_term hol_ctxt [new_T] old_T new_T) 0 +(* hol_context -> typ list -> typ -> typ -> term -> term *) +and coerce_term (hol_ctxt as {thy, stds, fast_descrs, ...}) Ts new_T old_T t = + if old_T = new_T then + t + else + case (new_T, old_T) of + (Type (new_s, new_Ts as [new_T1, new_T2]), + Type (@{type_name fun}, [old_T1, old_T2])) => + (case eta_expand Ts t 1 of + Abs (s, _, t') => + Abs (s, new_T1, + t' |> coerce_bound_0_in_term hol_ctxt new_T1 old_T1 + |> coerce_term hol_ctxt (new_T1 :: Ts) new_T2 old_T2) + |> Envir.eta_contract + |> new_s <> @{type_name fun} + ? construct_value thy stds + (if new_s = @{type_name fin_fun} then @{const_name FinFun} + else @{const_name FunBox}, + Type (@{type_name fun}, new_Ts) --> new_T) + o single + | t' => raise TERM ("Nitpick_HOL.coerce_term", [t'])) + | (Type (new_s, new_Ts as [new_T1, new_T2]), + Type (old_s, old_Ts as [old_T1, old_T2])) => + if old_s = @{type_name fin_fun} orelse old_s = @{type_name fun_box} orelse + old_s = @{type_name pair_box} orelse old_s = @{type_name "*"} then + case constr_expand hol_ctxt old_T t of + Const (old_s, _) $ t1 => + if new_s = @{type_name fun} then + coerce_term hol_ctxt Ts new_T (Type (@{type_name fun}, old_Ts)) t1 + else + construct_value thy stds + (old_s, Type (@{type_name fun}, new_Ts) --> new_T) + [coerce_term hol_ctxt Ts (Type (@{type_name fun}, new_Ts)) + (Type (@{type_name fun}, old_Ts)) t1] + | Const _ $ t1 $ t2 => + construct_value thy stds + (if new_s = @{type_name "*"} then @{const_name Pair} + else @{const_name PairBox}, new_Ts ---> new_T) + (map3 (coerce_term hol_ctxt Ts) [new_T1, new_T2] [old_T1, old_T2] + [t1, t2]) + | t' => raise TERM ("Nitpick_HOL.coerce_term", [t']) + else + raise TYPE ("Nitpick_HOL.coerce_term", [new_T, old_T], [t]) + | _ => raise TYPE ("Nitpick_HOL.coerce_term", [new_T, old_T], [t]) + (* (typ * int) list -> typ -> int *) -fun card_of_type assigns (Type ("fun", [T1, T2])) = +fun card_of_type assigns (Type (@{type_name fun}, [T1, T2])) = reasonable_power (card_of_type assigns T2) (card_of_type assigns T1) - | card_of_type assigns (Type ("*", [T1, T2])) = + | card_of_type assigns (Type (@{type_name "*"}, [T1, T2])) = card_of_type assigns T1 * card_of_type assigns T2 | card_of_type _ (Type (@{type_name itself}, _)) = 1 | card_of_type _ @{typ prop} = 2 @@ -1033,7 +1119,8 @@ | NONE => if T = @{typ bisim_iterator} then 0 else raise TYPE ("Nitpick_HOL.card_of_type", [T], []) (* int -> (typ * int) list -> typ -> int *) -fun bounded_card_of_type max default_card assigns (Type ("fun", [T1, T2])) = +fun bounded_card_of_type max default_card assigns + (Type (@{type_name fun}, [T1, T2])) = let val k1 = bounded_card_of_type max default_card assigns T1 val k2 = bounded_card_of_type max default_card assigns T2 @@ -1041,7 +1128,8 @@ if k1 = max orelse k2 = max then max else Int.min (max, reasonable_power k2 k1) end - | bounded_card_of_type max default_card assigns (Type ("*", [T1, T2])) = + | bounded_card_of_type max default_card assigns + (Type (@{type_name "*"}, [T1, T2])) = let val k1 = bounded_card_of_type max default_card assigns T1 val k2 = bounded_card_of_type max default_card assigns T2 @@ -1064,7 +1152,7 @@ else if member (op =) finitizable_dataTs T then raise SAME () else case T of - Type ("fun", [T1, T2]) => + Type (@{type_name fun}, [T1, T2]) => let val k1 = aux avoid T1 val k2 = aux avoid T2 @@ -1073,7 +1161,7 @@ else if k1 >= max orelse k2 >= max then max else Int.min (max, reasonable_power k2 k1) end - | Type ("*", [T1, T2]) => + | Type (@{type_name "*"}, [T1, T2]) => let val k1 = aux avoid T1 val k2 = aux avoid T2 @@ -1104,9 +1192,16 @@ AList.lookup (op =) assigns T |> the_default default_card in Int.min (max, aux [] T) end +val small_type_max_card = 5 + (* hol_context -> typ -> bool *) fun is_finite_type hol_ctxt T = - bounded_exact_card_of_type hol_ctxt [] 1 2 [] T <> 0 + bounded_exact_card_of_type hol_ctxt [] 1 2 [] T > 0 +(* hol_context -> typ -> bool *) +fun is_small_finite_type hol_ctxt T = + let val n = bounded_exact_card_of_type hol_ctxt [] 1 2 [] T in + n > 0 andalso n <= small_type_max_card + end (* term -> bool *) fun is_ground_term (t1 $ t2) = is_ground_term t1 andalso is_ground_term t2 @@ -1144,7 +1239,8 @@ is_typedef_axiom thy boring t2 | is_typedef_axiom thy boring (@{const Trueprop} $ (Const (@{const_name Typedef.type_definition}, _) - $ Const (_, Type ("fun", [Type (s, _), _])) $ Const _ $ _)) = + $ Const (_, Type (@{type_name fun}, [Type (s, _), _])) + $ Const _ $ _)) = boring <> is_funky_typedef_name thy s andalso is_typedef thy s | is_typedef_axiom _ _ _ = false @@ -1538,8 +1634,8 @@ fun is_inductive_pred hol_ctxt = is_real_inductive_pred hol_ctxt andf (not o is_real_equational_fun hol_ctxt) fun is_equational_fun hol_ctxt = - (is_real_equational_fun hol_ctxt orf is_real_inductive_pred hol_ctxt - orf (String.isPrefix ubfp_prefix orf String.isPrefix lbfp_prefix) o fst) + (is_real_equational_fun hol_ctxt orf is_real_inductive_pred hol_ctxt orf + (String.isPrefix ubfp_prefix orf String.isPrefix lbfp_prefix) o fst) (* term * term -> term *) fun s_betapply (Const (@{const_name If}, _) $ @{const True} $ t, _) = t @@ -1586,7 +1682,7 @@ fun do_term depth Ts t = case t of (t0 as Const (@{const_name Int.number_class.number_of}, - Type ("fun", [_, ran_T]))) $ t1 => + Type (@{type_name fun}, [_, ran_T]))) $ t1 => ((if is_number_type thy ran_T then let val j = t1 |> HOLogic.dest_numeral @@ -1612,7 +1708,7 @@ betapplys (t0 |> loose_bvar1 (t2', 0) ? do_term depth Ts, map (do_term depth Ts) [t1, t2]) | Const (x as (@{const_name distinct}, - Type ("fun", [Type (@{type_name list}, [T']), _]))) + Type (@{type_name fun}, [Type (@{type_name list}, [T']), _]))) $ (t1 as _ $ _) => (t1 |> HOLogic.dest_list |> distinctness_formula T' handle TERM _ => do_const depth Ts t x [t1]) @@ -1726,7 +1822,7 @@ val bisim_max = @{const bisim_iterator_max} val n_var = Var (("n", 0), iter_T) val n_var_minus_1 = - Const (@{const_name Tha}, (iter_T --> bool_T) --> iter_T) + Const (@{const_name safe_The}, (iter_T --> bool_T) --> iter_T) $ Abs ("m", iter_T, HOLogic.eq_const iter_T $ (suc_const iter_T $ Bound 0) $ n_var) val x_var = Var (("x", 0), T) @@ -1969,7 +2065,7 @@ val tuple_T = HOLogic.mk_tupleT tuple_arg_Ts val set_T = tuple_T --> bool_T val curried_T = tuple_T --> set_T - val uncurried_T = Type ("*", [tuple_T, tuple_T]) --> bool_T + val uncurried_T = Type (@{type_name "*"}, [tuple_T, tuple_T]) --> bool_T val (base_rhs, step_rhs) = linear_pred_base_and_step_rhss fp_app val base_x as (base_s, _) = (base_prefix ^ s, outer_Ts ---> set_T) val base_eq = HOLogic.mk_eq (list_comb (Const base_x, outer_vars), base_rhs) @@ -2092,8 +2188,8 @@ let fun aux T accum = case T of - Type ("fun", Ts) => fold aux Ts accum - | Type ("*", Ts) => fold aux Ts accum + Type (@{type_name fun}, Ts) => fold aux Ts accum + | Type (@{type_name "*"}, Ts) => fold aux Ts accum | Type (@{type_name itself}, [T1]) => aux T1 accum | Type (_, Ts) => if member (op =) (@{typ prop} :: @{typ bool} :: @{typ unit} :: accum) @@ -2161,7 +2257,7 @@ (* int list -> int list -> typ -> typ *) fun format_type default_format format T = let - val T = unarize_unbox_and_uniterize_type T + val T = uniterize_unarize_unbox_etc_type T val format = format |> filter (curry (op <) 0) in if forall (curry (op =) 1) format then @@ -2200,7 +2296,7 @@ (* term -> term *) val do_term = map_aterms (fn Const x => fst (do_const x) | t' => t') val (x' as (_, T'), js, ts) = - AList.find (op =) (!special_funs) (s, unarize_and_unbox_type T) + AList.find (op =) (!special_funs) (s, unarize_unbox_etc_type T) |> the_single val max_j = List.last js val Ts = List.take (binder_types T', max_j + 1) @@ -2294,7 +2390,7 @@ let val t = Const (original_name s, T) in (t, format_term_type thy def_table formats t) end) - |>> map_types unarize_unbox_and_uniterize_type + |>> map_types uniterize_unarize_unbox_etc_type |>> shorten_names_in_term |>> Term.map_abs_vars shortest_name in do_const end diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_isar.ML --- a/src/HOL/Tools/Nitpick/nitpick_isar.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_isar.ML Wed Mar 10 16:40:20 2010 +0100 @@ -39,6 +39,7 @@ ("bits", ["1,2,3,4,6,8,10,12"]), ("bisim_depth", ["7"]), ("box", ["smart"]), + ("finitize", ["smart"]), ("mono", ["smart"]), ("std", ["true"]), ("wf", ["smart"]), @@ -78,6 +79,7 @@ val negated_params = [("dont_box", "box"), + ("dont_finitize", "finitize"), ("non_mono", "mono"), ("non_std", "std"), ("non_wf", "wf"), @@ -111,8 +113,8 @@ AList.defined (op =) negated_params s orelse s = "max" orelse s = "eval" orelse s = "expect" orelse exists (fn p => String.isPrefix (p ^ " ") s) - ["card", "max", "iter", "box", "dont_box", "mono", "non_mono", "std", - "non_std", "wf", "non_wf", "format"] + ["card", "max", "iter", "box", "dont_box", "finitize", "dont_finitize", + "mono", "non_mono", "std", "non_std", "wf", "non_wf", "format"] (* string * 'a -> unit *) fun check_raw_param (s, _) = @@ -297,14 +299,8 @@ val iters_assigns = lookup_ints_assigns read_const_polymorphic "iter" 0 val bitss = lookup_int_seq "bits" 1 val bisim_depths = lookup_int_seq "bisim_depth" ~1 - val boxes = - lookup_bool_option_assigns read_type_polymorphic "box" @ - map_filter (fn (SOME T, _) => - if is_fun_type T orelse is_pair_type T then - SOME (SOME T, SOME true) - else - NONE - | (NONE, _) => NONE) cards_assigns + val boxes = lookup_bool_option_assigns read_type_polymorphic "box" + val finitizes = lookup_bool_option_assigns read_type_polymorphic "finitize" val monos = lookup_bool_option_assigns read_type_polymorphic "mono" val stds = lookup_bool_assigns read_type_polymorphic "std" val wfs = lookup_bool_option_assigns read_const_polymorphic "wf" @@ -349,8 +345,8 @@ in {cards_assigns = cards_assigns, maxes_assigns = maxes_assigns, iters_assigns = iters_assigns, bitss = bitss, bisim_depths = bisim_depths, - boxes = boxes, monos = monos, stds = stds, wfs = wfs, - sat_solver = sat_solver, blocking = blocking, falsify = falsify, + boxes = boxes, finitizes = finitizes, monos = monos, stds = stds, + wfs = wfs, sat_solver = sat_solver, blocking = blocking, falsify = falsify, debug = debug, verbose = verbose, overlord = overlord, user_axioms = user_axioms, assms = assms, merge_type_vars = merge_type_vars, binary_ints = binary_ints, diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_kodkod.ML --- a/src/HOL/Tools/Nitpick/nitpick_kodkod.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_kodkod.ML Wed Mar 10 16:40:20 2010 +0100 @@ -301,8 +301,8 @@ (* Proof.context -> bool -> dtype_spec list -> nut -> KK.bound *) fun bound_for_sel_rel ctxt debug dtypes - (FreeRel (x, T as Type ("fun", [T1, T2]), R as Func (Atom (_, j0), R2), - nick)) = + (FreeRel (x, T as Type (@{type_name fun}, [T1, T2]), + R as Func (Atom (_, j0), R2), nick)) = let val {delta, epsilon, exclusive, explicit_max, ...} = constr_spec dtypes (original_name nick, T1) @@ -1208,7 +1208,7 @@ (rel_expr_from_rel_expr kk min_R R2 r2)) end | Cst (Iden, _, Func (Atom (1, j0), Formula Neut)) => KK.Atom j0 - | Cst (Iden, T as Type ("fun", [T1, _]), R as Func (R1, _)) => + | Cst (Iden, T as Type (@{type_name fun}, [T1, _]), R as Func (R1, _)) => to_rep R (Cst (Iden, T, Func (one_rep ofs T1 R1, Formula Neut))) | Cst (Num j, T, R) => if is_word_type T then @@ -1229,36 +1229,36 @@ | Cst (Suc, @{typ "nat => nat"}, Func (Atom x, _)) => kk_intersect (KK.Rel suc_rel) (kk_product KK.Univ (KK.AtomSeq x)) | Cst (Suc, _, Func (Atom _, _)) => KK.Rel suc_rel - | Cst (Add, Type ("fun", [@{typ nat}, _]), _) => KK.Rel nat_add_rel - | Cst (Add, Type ("fun", [@{typ int}, _]), _) => KK.Rel int_add_rel - | Cst (Add, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => + | Cst (Add, Type (_, [@{typ nat}, _]), _) => KK.Rel nat_add_rel + | Cst (Add, Type (_, [@{typ int}, _]), _) => KK.Rel int_add_rel + | Cst (Add, T as Type (_, [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_binary_op T R NONE (SOME (curry KK.Add)) - | Cst (Add, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => + | Cst (Add, T as Type (_, [@{typ "signed_bit word"}, _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => kk_implies (KK.LE (KK.Num 0, KK.BitXor (i1, i2))) (KK.LE (KK.Num 0, KK.BitXor (i2, i3))))) (SOME (curry KK.Add)) - | Cst (Subtract, Type ("fun", [@{typ nat}, _]), _) => + | Cst (Subtract, Type (_, [@{typ nat}, _]), _) => KK.Rel nat_subtract_rel - | Cst (Subtract, Type ("fun", [@{typ int}, _]), _) => + | Cst (Subtract, Type (_, [@{typ int}, _]), _) => KK.Rel int_subtract_rel - | Cst (Subtract, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => + | Cst (Subtract, T as Type (_, [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_binary_op T R NONE (SOME (fn i1 => fn i2 => KK.IntIf (KK.LE (i1, i2), KK.Num 0, KK.Sub (i1, i2)))) - | Cst (Subtract, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => + | Cst (Subtract, T as Type (_, [@{typ "signed_bit word"}, _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => kk_implies (KK.LT (KK.BitXor (i1, i2), KK.Num 0)) (KK.LT (KK.BitXor (i2, i3), KK.Num 0)))) (SOME (curry KK.Sub)) - | Cst (Multiply, Type ("fun", [@{typ nat}, _]), _) => + | Cst (Multiply, Type (_, [@{typ nat}, _]), _) => KK.Rel nat_multiply_rel - | Cst (Multiply, Type ("fun", [@{typ int}, _]), _) => + | Cst (Multiply, Type (_, [@{typ int}, _]), _) => KK.Rel int_multiply_rel | Cst (Multiply, - T as Type ("fun", [Type (@{type_name word}, [bit_T]), _]), R) => + T as Type (_, [Type (@{type_name word}, [bit_T]), _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => kk_or (KK.IntEq (i2, KK.Num 0)) @@ -1267,14 +1267,14 @@ ? kk_and (KK.LE (KK.Num 0, foldl1 KK.BitAnd [i1, i2, i3]))))) (SOME (curry KK.Mult)) - | Cst (Divide, Type ("fun", [@{typ nat}, _]), _) => KK.Rel nat_divide_rel - | Cst (Divide, Type ("fun", [@{typ int}, _]), _) => KK.Rel int_divide_rel - | Cst (Divide, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => + | Cst (Divide, Type (_, [@{typ nat}, _]), _) => KK.Rel nat_divide_rel + | Cst (Divide, Type (_, [@{typ int}, _]), _) => KK.Rel int_divide_rel + | Cst (Divide, T as Type (_, [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_binary_op T R NONE (SOME (fn i1 => fn i2 => KK.IntIf (KK.IntEq (i2, KK.Num 0), KK.Num 0, KK.Div (i1, i2)))) - | Cst (Divide, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => + | Cst (Divide, T as Type (_, [@{typ "signed_bit word"}, _]), R) => to_bit_word_binary_op T R (SOME (fn i1 => fn i2 => fn i3 => KK.LE (KK.Num 0, foldl1 KK.BitAnd [i1, i2, i3]))) @@ -1293,9 +1293,9 @@ | Cst (Fracs, _, Func (Struct _, _)) => kk_project_seq (KK.Rel norm_frac_rel) 2 2 | Cst (NormFrac, _, _) => KK.Rel norm_frac_rel - | Cst (NatToInt, Type ("fun", [@{typ nat}, _]), Func (Atom _, Atom _)) => + | Cst (NatToInt, Type (_, [@{typ nat}, _]), Func (Atom _, Atom _)) => KK.Iden - | Cst (NatToInt, Type ("fun", [@{typ nat}, _]), + | Cst (NatToInt, Type (_, [@{typ nat}, _]), Func (Atom (_, nat_j0), Opt (Atom (int_k, int_j0)))) => if nat_j0 = int_j0 then kk_intersect KK.Iden @@ -1303,9 +1303,9 @@ KK.Univ) else raise BAD ("Nitpick_Kodkod.to_r (NatToInt)", "\"nat_j0 <> int_j0\"") - | Cst (NatToInt, T as Type ("fun", [@{typ "unsigned_bit word"}, _]), R) => + | Cst (NatToInt, T as Type (_, [@{typ "unsigned_bit word"}, _]), R) => to_bit_word_unary_op T R I - | Cst (IntToNat, Type ("fun", [@{typ int}, _]), + | Cst (IntToNat, Type (_, [@{typ int}, _]), Func (Atom (int_k, int_j0), nat_R)) => let val abs_card = max_int_for_card int_k + 1 @@ -1321,7 +1321,7 @@ else raise BAD ("Nitpick_Kodkod.to_r (IntToNat)", "\"nat_j0 <> int_j0\"") end - | Cst (IntToNat, T as Type ("fun", [@{typ "signed_bit word"}, _]), R) => + | Cst (IntToNat, T as Type (_, [@{typ "signed_bit word"}, _]), R) => to_bit_word_unary_op T R (fn i => KK.IntIf (KK.LE (i, KK.Num 0), KK.Num 0, i)) | Op1 (Not, _, R, u1) => kk_not3 (to_rep R u1) @@ -1388,7 +1388,7 @@ (Func (R1, Formula Neut)) r)) (to_opt R1 u1) | _ => raise NUT ("Nitpick_Kodkod.to_r (SingletonSet)", [u])) - | Op1 (Tha, _, R, u1) => + | Op1 (SafeThe, _, R, u1) => if is_opt_rep R then kk_join (to_rep (Func (unopt_rep R, Opt bool_atom_R)) u1) true_atom else diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_model.ML --- a/src/HOL/Tools/Nitpick/nitpick_model.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_model.ML Wed Mar 10 16:40:20 2010 +0100 @@ -110,48 +110,53 @@ the (AList.lookup (op =) bounds (the_rel rel_table name)) handle NUT _ => [[]] (* term -> term *) -fun unarize_and_unbox_term (Const (@{const_name FunBox}, _) $ t1) = - unarize_and_unbox_term t1 - | unarize_and_unbox_term (Const (@{const_name PairBox}, - Type ("fun", [T1, Type ("fun", [T2, _])])) - $ t1 $ t2) = - let val Ts = map unarize_unbox_and_uniterize_type [T1, T2] in - Const (@{const_name Pair}, Ts ---> Type ("*", Ts)) - $ unarize_and_unbox_term t1 $ unarize_and_unbox_term t2 +fun unarize_unbox_etc_term (Const (@{const_name FinFun}, _) $ t1) = + unarize_unbox_etc_term t1 + | unarize_unbox_etc_term (Const (@{const_name FunBox}, _) $ t1) = + unarize_unbox_etc_term t1 + | unarize_unbox_etc_term + (Const (@{const_name PairBox}, + Type (@{type_name fun}, [T1, Type (@{type_name fun}, [T2, _])])) + $ t1 $ t2) = + let val Ts = map uniterize_unarize_unbox_etc_type [T1, T2] in + Const (@{const_name Pair}, Ts ---> Type (@{type_name "*"}, Ts)) + $ unarize_unbox_etc_term t1 $ unarize_unbox_etc_term t2 end - | unarize_and_unbox_term (Const (s, T)) = - Const (s, unarize_unbox_and_uniterize_type T) - | unarize_and_unbox_term (t1 $ t2) = - unarize_and_unbox_term t1 $ unarize_and_unbox_term t2 - | unarize_and_unbox_term (Free (s, T)) = - Free (s, unarize_unbox_and_uniterize_type T) - | unarize_and_unbox_term (Var (x, T)) = - Var (x, unarize_unbox_and_uniterize_type T) - | unarize_and_unbox_term (Bound j) = Bound j - | unarize_and_unbox_term (Abs (s, T, t')) = - Abs (s, unarize_unbox_and_uniterize_type T, unarize_and_unbox_term t') + | unarize_unbox_etc_term (Const (s, T)) = + Const (s, uniterize_unarize_unbox_etc_type T) + | unarize_unbox_etc_term (t1 $ t2) = + unarize_unbox_etc_term t1 $ unarize_unbox_etc_term t2 + | unarize_unbox_etc_term (Free (s, T)) = + Free (s, uniterize_unarize_unbox_etc_type T) + | unarize_unbox_etc_term (Var (x, T)) = + Var (x, uniterize_unarize_unbox_etc_type T) + | unarize_unbox_etc_term (Bound j) = Bound j + | unarize_unbox_etc_term (Abs (s, T, t')) = + Abs (s, uniterize_unarize_unbox_etc_type T, unarize_unbox_etc_term t') (* typ -> typ -> (typ * typ) * (typ * typ) *) -fun factor_out_types (T1 as Type ("*", [T11, T12])) - (T2 as Type ("*", [T21, T22])) = +fun factor_out_types (T1 as Type (@{type_name "*"}, [T11, T12])) + (T2 as Type (@{type_name "*"}, [T21, T22])) = let val (n1, n2) = pairself num_factors_in_type (T11, T21) in if n1 = n2 then let val ((T11', opt_T12'), (T21', opt_T22')) = factor_out_types T12 T22 in - ((Type ("*", [T11, T11']), opt_T12'), - (Type ("*", [T21, T21']), opt_T22')) + ((Type (@{type_name "*"}, [T11, T11']), opt_T12'), + (Type (@{type_name "*"}, [T21, T21']), opt_T22')) end else if n1 < n2 then case factor_out_types T1 T21 of (p1, (T21', NONE)) => (p1, (T21', SOME T22)) | (p1, (T21', SOME T22')) => - (p1, (T21', SOME (Type ("*", [T22', T22])))) + (p1, (T21', SOME (Type (@{type_name "*"}, [T22', T22])))) else swap (factor_out_types T2 T1) end - | factor_out_types (Type ("*", [T11, T12])) T2 = ((T11, SOME T12), (T2, NONE)) - | factor_out_types T1 (Type ("*", [T21, T22])) = ((T1, NONE), (T21, SOME T22)) + | factor_out_types (Type (@{type_name "*"}, [T11, T12])) T2 = + ((T11, SOME T12), (T2, NONE)) + | factor_out_types T1 (Type (@{type_name "*"}, [T21, T22])) = + ((T1, NONE), (T21, SOME T22)) | factor_out_types T1 T2 = ((T1, NONE), (T2, NONE)) (* bool -> typ -> typ -> (term * term) list -> term *) @@ -188,10 +193,11 @@ val (ts1, ts2) = t |> HOLogic.strip_ptuple ps |> chop cut in (HOLogic.mk_ptuple ps1 T1 ts1, HOLogic.mk_ptuple ps2 T2 ts2) end (* typ -> term -> term -> term *) -fun pair_up (Type ("*", [T1', T2'])) +fun pair_up (Type (@{type_name "*"}, [T1', T2'])) (t1 as Const (@{const_name Pair}, - Type ("fun", [_, Type ("fun", [_, T1])])) $ t11 $ t12) - t2 = + Type (@{type_name fun}, + [_, Type (@{type_name fun}, [_, T1])])) + $ t11 $ t12) t2 = if T1 = T1' then HOLogic.mk_prod (t1, t2) else HOLogic.mk_prod (t11, pair_up T2' t12 t2) | pair_up _ t1 t2 = HOLogic.mk_prod (t1, t2) @@ -199,7 +205,7 @@ fun multi_pair_up T1 t1 (ts2, ts3) = map2 (pair o pair_up T1 t1) ts2 ts3 (* typ -> typ -> typ -> term -> term *) -fun typecast_fun (Type ("fun", [T1', T2'])) T1 T2 t = +fun typecast_fun (Type (@{type_name fun}, [T1', T2'])) T1 T2 t = let (* typ -> typ -> typ -> typ -> term -> term *) fun do_curry T1 T1a T1b T2 t = @@ -238,9 +244,11 @@ t |> do_arrow T1a' (T1b' --> T2') T1 T2 |> do_uncurry T1' T2' | _ => raise TYPE ("Nitpick_Model.typecast_fun.do_fun", [T1, T1'], []) (* typ -> typ -> term -> term *) - and do_term (Type ("fun", [T1', T2'])) (Type ("fun", [T1, T2])) t = + and do_term (Type (@{type_name fun}, [T1', T2'])) + (Type (@{type_name fun}, [T1, T2])) t = do_fun T1' T2' T1 T2 t - | do_term (T' as Type ("*", Ts' as [T1', T2'])) (Type ("*", [T1, T2])) + | do_term (T' as Type (@{type_name "*"}, Ts' as [T1', T2'])) + (Type (@{type_name "*"}, [T1, T2])) (Const (@{const_name Pair}, _) $ t1 $ t2) = Const (@{const_name Pair}, Ts' ---> T') $ do_term T1' T1 t1 $ do_term T2' T2 t2 @@ -257,7 +265,7 @@ | truth_const_sort_key _ = "1" (* typ -> term list -> term *) -fun mk_tuple (Type ("*", [T1, T2])) ts = +fun mk_tuple (Type (@{type_name "*"}, [T1, T2])) ts = HOLogic.mk_prod (mk_tuple T1 ts, mk_tuple T2 (List.drop (ts, length (HOLogic.flatten_tupleT T1)))) | mk_tuple _ (t :: _) = t @@ -307,8 +315,8 @@ else aux tps end - (* typ -> typ -> typ -> (term * term) list -> term *) - fun make_map T1 T2 T2' = + (* bool -> typ -> typ -> typ -> (term * term) list -> term *) + fun make_map maybe_opt T1 T2 T2' = let val update_const = Const (@{const_name fun_upd}, (T1 --> T2) --> T1 --> T2 --> T1 --> T2) @@ -319,7 +327,7 @@ Const (@{const_name None}, _) => aux' ps | _ => update_const $ aux' ps $ t1 $ t2) fun aux ps = - if not (is_complete_type datatypes false T1) then + if maybe_opt andalso not (is_complete_type datatypes false T1) then update_const $ aux' ps $ Const (unrep, T1) $ (Const (@{const_name Some}, T2' --> T2) $ Const (unknown, T2')) else @@ -328,7 +336,7 @@ (* typ list -> term -> term *) fun polish_funs Ts t = (case fastype_of1 (Ts, t) of - Type ("fun", [T1, T2]) => + Type (@{type_name fun}, [T1, T2]) => if is_plain_fun t then case T2 of @{typ bool} => @@ -341,9 +349,9 @@ end | Type (@{type_name option}, [T2']) => let - val ts_pair = snd (dest_plain_fun t) - |> pairself (map (polish_funs Ts)) - in make_map T1 T2 T2' (rev (op ~~ ts_pair)) end + val (maybe_opt, ts_pair) = + dest_plain_fun t ||> pairself (map (polish_funs Ts)) + in make_map maybe_opt T1 T2 T2' (rev (op ~~ ts_pair)) end | _ => raise SAME () else raise SAME () @@ -356,7 +364,7 @@ else polish_funs Ts t1 $ polish_funs Ts t2 | t1 $ t2 => polish_funs Ts t1 $ polish_funs Ts t2 | Abs (s, T, t') => Abs (s, T, polish_funs (T :: Ts) t') - | Const (s, Type ("fun", [T1, T2])) => + | Const (s, Type (@{type_name fun}, [T1, T2])) => if s = opt_flag orelse s = non_opt_flag then Abs ("x", T1, Const (unknown, T2)) else @@ -366,24 +374,24 @@ fun make_fun maybe_opt T1 T2 T' ts1 ts2 = ts1 ~~ ts2 |> sort (nice_term_ord o pairself fst) |> make_plain_fun maybe_opt T1 T2 - |> unarize_and_unbox_term - |> typecast_fun (unarize_unbox_and_uniterize_type T') - (unarize_unbox_and_uniterize_type T1) - (unarize_unbox_and_uniterize_type T2) + |> unarize_unbox_etc_term + |> typecast_fun (uniterize_unarize_unbox_etc_type T') + (uniterize_unarize_unbox_etc_type T1) + (uniterize_unarize_unbox_etc_type T2) (* (typ * int) list -> typ -> typ -> int -> term *) - fun term_for_atom seen (T as Type ("fun", [T1, T2])) T' j _ = + fun term_for_atom seen (T as Type (@{type_name fun}, [T1, T2])) T' j _ = let val k1 = card_of_type card_assigns T1 val k2 = card_of_type card_assigns T2 in - term_for_rep seen T T' (Vect (k1, Atom (k2, 0))) + term_for_rep true seen T T' (Vect (k1, Atom (k2, 0))) [nth_combination (replicate k1 (k2, 0)) j] handle General.Subscript => raise ARG ("Nitpick_Model.reconstruct_term.term_for_atom", signed_string_of_int j ^ " for " ^ string_for_rep (Vect (k1, Atom (k2, 0)))) end - | term_for_atom seen (Type ("*", [T1, T2])) _ j k = + | term_for_atom seen (Type (@{type_name "*"}, [T1, T2])) _ j k = let val k1 = card_of_type card_assigns T1 val k2 = k div k1 @@ -461,8 +469,9 @@ if length arg_Ts = 0 then [] else - map3 (fn Ts => term_for_rep seen Ts Ts) arg_Ts arg_Rs - arg_jsss + map3 (fn Ts => + term_for_rep (constr_s <> @{const_name FinFun}) + seen Ts Ts) arg_Ts arg_Rs arg_jsss |> mk_tuple (HOLogic.mk_tupleT uncur_arg_Ts) |> dest_n_tuple (length uncur_arg_Ts) val t = @@ -519,50 +528,54 @@ and term_for_vect seen k R T1 T2 T' js = make_fun true T1 T2 T' (map (fn j => term_for_atom seen T1 T1 j k) (index_seq 0 k)) - (map (term_for_rep seen T2 T2 R o single) + (map (term_for_rep true seen T2 T2 R o single) (batch_list (arity_of_rep R) js)) - (* (typ * int) list -> typ -> typ -> rep -> int list list -> term *) - and term_for_rep seen T T' Unit [[]] = term_for_atom seen T T' 0 1 - | term_for_rep seen T T' (R as Atom (k, j0)) [[j]] = + (* bool -> (typ * int) list -> typ -> typ -> rep -> int list list -> term *) + and term_for_rep _ seen T T' Unit [[]] = term_for_atom seen T T' 0 1 + | term_for_rep _ seen T T' (R as Atom (k, j0)) [[j]] = if j >= j0 andalso j < j0 + k then term_for_atom seen T T' (j - j0) k else raise REP ("Nitpick_Model.reconstruct_term.term_for_rep", [R]) - | term_for_rep seen (Type ("*", [T1, T2])) _ (Struct [R1, R2]) [js] = + | term_for_rep _ seen (Type (@{type_name "*"}, [T1, T2])) _ + (Struct [R1, R2]) [js] = let val arity1 = arity_of_rep R1 val (js1, js2) = chop arity1 js in list_comb (HOLogic.pair_const T1 T2, - map3 (fn T => term_for_rep seen T T) [T1, T2] [R1, R2] + map3 (fn T => term_for_rep true seen T T) [T1, T2] [R1, R2] [[js1], [js2]]) end - | term_for_rep seen (Type ("fun", [T1, T2])) T' (Vect (k, R')) [js] = + | term_for_rep _ seen (Type (@{type_name fun}, [T1, T2])) T' + (Vect (k, R')) [js] = term_for_vect seen k R' T1 T2 T' js - | term_for_rep seen (Type ("fun", [T1, T2])) T' (Func (R1, Formula Neut)) - jss = + | term_for_rep _ seen (Type (@{type_name fun}, [T1, T2])) T' + (Func (R1, Formula Neut)) jss = let val jss1 = all_combinations_for_rep R1 - val ts1 = map (term_for_rep seen T1 T1 R1 o single) jss1 + val ts1 = map (term_for_rep true seen T1 T1 R1 o single) jss1 val ts2 = - map (fn js => term_for_rep seen T2 T2 (Atom (2, 0)) + map (fn js => term_for_rep true seen T2 T2 (Atom (2, 0)) [[int_from_bool (member (op =) jss js)]]) jss1 in make_fun false T1 T2 T' ts1 ts2 end - | term_for_rep seen (Type ("fun", [T1, T2])) T' (Func (R1, R2)) jss = + | term_for_rep maybe_opt seen (Type (@{type_name fun}, [T1, T2])) T' + (Func (R1, R2)) jss = let val arity1 = arity_of_rep R1 val jss1 = all_combinations_for_rep R1 - val ts1 = map (term_for_rep seen T1 T1 R1 o single) jss1 + val ts1 = map (term_for_rep false seen T1 T1 R1 o single) jss1 val grouped_jss2 = AList.group (op =) (map (chop arity1) jss) - val ts2 = map (term_for_rep seen T2 T2 R2 o the_default [] + val ts2 = map (term_for_rep false seen T2 T2 R2 o the_default [] o AList.lookup (op =) grouped_jss2) jss1 - in make_fun true T1 T2 T' ts1 ts2 end - | term_for_rep seen T T' (Opt R) jss = - if null jss then Const (unknown, T) else term_for_rep seen T T' R jss - | term_for_rep _ T _ R jss = + in make_fun maybe_opt T1 T2 T' ts1 ts2 end + | term_for_rep _ seen T T' (Opt R) jss = + if null jss then Const (unknown, T) + else term_for_rep true seen T T' R jss + | term_for_rep _ _ T _ R jss = raise ARG ("Nitpick_Model.reconstruct_term.term_for_rep", Refute.string_of_typ T ^ " " ^ string_for_rep R ^ " " ^ string_of_int (length jss)) - in polish_funs [] o unarize_and_unbox_term oooo term_for_rep [] end + in polish_funs [] o unarize_unbox_etc_term oooo term_for_rep true [] end (* atom_pool -> scope -> nut list -> nut NameTable.table -> KK.raw_bound list -> nut -> term *) @@ -689,7 +702,7 @@ val (oper, (t1, T'), T) = case name of FreeName (s, T, _) => - let val t = Free (s, unarize_unbox_and_uniterize_type T) in + let val t = Free (s, uniterize_unarize_unbox_etc_type T) in ("=", (t, format_term_type thy def_table formats t), T) end | ConstName (s, T, _) => @@ -710,12 +723,17 @@ (* dtype_spec -> Pretty.T *) fun pretty_for_datatype ({typ, card, complete, ...} : dtype_spec) = Pretty.block (Pretty.breaks - [Syntax.pretty_typ ctxt (unarize_unbox_and_uniterize_type typ), - Pretty.str "=", - Pretty.enum "," "{" "}" - (map (Syntax.pretty_term ctxt) (all_values_of_type card typ) @ - (if fun_from_pair complete false then [] - else [Pretty.str unrep]))]) + (Syntax.pretty_typ ctxt (uniterize_unarize_unbox_etc_type typ) :: + (case typ of + Type (@{type_name fin_fun}, _) => [Pretty.str "[finite]"] + | Type (@{type_name fun_box}, _) => [Pretty.str "[boxed]"] + | Type (@{type_name pair_box}, _) => [Pretty.str "[boxed]"] + | _ => []) @ + [Pretty.str "=", + Pretty.enum "," "{" "}" + (map (Syntax.pretty_term ctxt) (all_values_of_type card typ) @ + (if fun_from_pair complete false then [] + else [Pretty.str unrep]))])) (* typ -> dtype_spec list *) fun integer_datatype T = [{typ = T, card = card_of_type card_assigns T, co = false, @@ -752,13 +770,14 @@ val (eval_names, noneval_nonskolem_nonsel_names) = List.partition (String.isPrefix eval_prefix o nickname_of) nonskolem_nonsel_names - ||> filter_out (curry (op =) @{const_name bisim_iterator_max} + ||> filter_out (member (op =) [@{const_name bisim}, + @{const_name bisim_iterator_max}] o nickname_of) val free_names = map (fn x as (s, T) => case filter (curry (op =) x o pairf nickname_of - (unarize_unbox_and_uniterize_type o type_of)) + (uniterize_unarize_unbox_etc_type o type_of)) free_names of [name] => name | [] => FreeName (s, T, Any) diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_mono.ML --- a/src/HOL/Tools/Nitpick/nitpick_mono.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_mono.ML Wed Mar 10 16:40:20 2010 +0100 @@ -11,6 +11,9 @@ val formulas_monotonic : hol_context -> bool -> typ -> term list * term list -> bool + val finitize_funs : + hol_context -> bool -> (typ option * bool option) list -> typ + -> term list * term list -> term list * term list end; structure Nitpick_Mono : NITPICK_MONO = @@ -42,14 +45,17 @@ {hol_ctxt: hol_context, binarize: bool, alpha_T: typ, + no_harmless: bool, max_fresh: int Unsynchronized.ref, - datatype_cache: ((string * typ list) * mtyp) list Unsynchronized.ref, - constr_cache: (styp * mtyp) list Unsynchronized.ref} + datatype_mcache: ((string * typ list) * mtyp) list Unsynchronized.ref, + constr_mcache: (styp * mtyp) list Unsynchronized.ref} -exception MTYPE of string * mtyp list +exception MTYPE of string * mtyp list * typ list +exception MTERM of string * mterm list (* string -> unit *) fun print_g (_ : string) = () +(* val print_g = tracing *) (* var -> string *) val string_for_var = signed_string_of_int @@ -70,7 +76,7 @@ (* sign_atom -> string *) fun string_for_sign_atom (S sn) = string_for_sign sn - | string_for_sign_atom (V j) = string_for_var j + | string_for_sign_atom (V x) = string_for_var x (* literal -> string *) fun string_for_literal (x, sn) = string_for_var x ^ " = " ^ string_for_sign sn @@ -83,7 +89,7 @@ | is_MRec _ = false (* mtyp -> mtyp * sign_atom * mtyp *) fun dest_MFun (MFun z) = z - | dest_MFun M = raise MTYPE ("Nitpick_Mono.dest_MFun", [M]) + | dest_MFun M = raise MTYPE ("Nitpick_Mono.dest_MFun", [M], []) val no_prec = 100 @@ -157,13 +163,18 @@ | mtype_of_mterm (MApp (m1, _)) = case mtype_of_mterm m1 of MFun (_, _, M12) => M12 - | M1 => raise MTYPE ("Nitpick_Mono.mtype_of_mterm", [M1]) + | M1 => raise MTYPE ("Nitpick_Mono.mtype_of_mterm", [M1], []) + +(* mterm -> mterm * mterm list *) +fun strip_mcomb (MApp (m1, m2)) = strip_mcomb m1 ||> (fn ms => append ms [m2]) + | strip_mcomb m = (m, []) -(* hol_context -> bool -> typ -> mdata *) -fun initial_mdata hol_ctxt binarize alpha_T = +(* hol_context -> bool -> bool -> typ -> mdata *) +fun initial_mdata hol_ctxt binarize no_harmless alpha_T = ({hol_ctxt = hol_ctxt, binarize = binarize, alpha_T = alpha_T, - max_fresh = Unsynchronized.ref 0, datatype_cache = Unsynchronized.ref [], - constr_cache = Unsynchronized.ref []} : mdata) + no_harmless = no_harmless, max_fresh = Unsynchronized.ref 0, + datatype_mcache = Unsynchronized.ref [], + constr_mcache = Unsynchronized.ref []} : mdata) (* typ -> typ -> bool *) fun could_exist_alpha_subtype alpha_T (T as Type (_, Ts)) = @@ -215,7 +226,7 @@ else repair_mtype cache (M :: seen) M (* ((string * typ list) * mtyp) list Unsynchronized.ref -> unit *) -fun repair_datatype_cache cache = +fun repair_datatype_mcache cache = let (* (string * typ list) * mtyp -> unit *) fun repair_one (z, M) = @@ -224,46 +235,67 @@ in List.app repair_one (rev (!cache)) end (* (typ * mtyp) list -> (styp * mtyp) list Unsynchronized.ref -> unit *) -fun repair_constr_cache dtype_cache constr_cache = +fun repair_constr_mcache dtype_cache constr_mcache = let (* styp * mtyp -> unit *) fun repair_one (x, M) = - Unsynchronized.change constr_cache + Unsynchronized.change constr_mcache (AList.update (op =) (x, repair_mtype dtype_cache [] M)) - in List.app repair_one (!constr_cache) end + in List.app repair_one (!constr_mcache) end -(* mdata -> typ -> typ -> mtyp * sign_atom * mtyp *) -fun fresh_mfun_for_fun_type (mdata as {max_fresh, ...} : mdata) T1 T2 = +(* typ -> bool *) +fun is_fin_fun_supported_type @{typ prop} = true + | is_fin_fun_supported_type @{typ bool} = true + | is_fin_fun_supported_type (Type (@{type_name option}, _)) = true + | is_fin_fun_supported_type _ = false +(* typ -> typ -> term -> term option *) +fun fin_fun_body _ _ (t as @{term False}) = SOME t + | fin_fun_body _ _ (t as Const (@{const_name None}, _)) = SOME t + | fin_fun_body dom_T ran_T + ((t0 as Const (@{const_name If}, _)) + $ (t1 as Const (@{const_name "op ="}, _) $ Bound 0 $ t1') + $ t2 $ t3) = + (if loose_bvar1 (t1', 0) then + NONE + else case fin_fun_body dom_T ran_T t3 of + NONE => NONE + | SOME t3 => + SOME (t0 $ (Const (@{const_name is_unknown}, dom_T --> bool_T) $ t1') + $ (Const (@{const_name unknown}, ran_T)) $ (t0 $ t1 $ t2 $ t3))) + | fin_fun_body _ _ _ = NONE + +(* mdata -> bool -> typ -> typ -> mtyp * sign_atom * mtyp *) +fun fresh_mfun_for_fun_type (mdata as {max_fresh, ...} : mdata) all_minus + T1 T2 = let - val M1 = fresh_mtype_for_type mdata T1 - val M2 = fresh_mtype_for_type mdata T2 - val a = if is_boolean_type (body_type T2) andalso - exists_alpha_sub_mtype_fresh M1 then + val M1 = fresh_mtype_for_type mdata all_minus T1 + val M2 = fresh_mtype_for_type mdata all_minus T2 + val a = if not all_minus andalso exists_alpha_sub_mtype_fresh M1 andalso + is_fin_fun_supported_type (body_type T2) then V (Unsynchronized.inc max_fresh) else S Minus in (M1, a, M2) end -(* mdata -> typ -> mtyp *) +(* mdata -> bool -> typ -> mtyp *) and fresh_mtype_for_type (mdata as {hol_ctxt as {thy, ...}, binarize, alpha_T, - datatype_cache, constr_cache, ...}) = + datatype_mcache, constr_mcache, ...}) + all_minus = let - (* typ -> typ -> mtyp *) - val do_fun = MFun oo fresh_mfun_for_fun_type mdata (* typ -> mtyp *) fun do_type T = if T = alpha_T then MAlpha else case T of - Type ("fun", [T1, T2]) => do_fun T1 T2 - | Type (@{type_name fun_box}, [T1, T2]) => do_fun T1 T2 - | Type ("*", [T1, T2]) => MPair (pairself do_type (T1, T2)) + Type (@{type_name fun}, [T1, T2]) => + MFun (fresh_mfun_for_fun_type mdata false T1 T2) + | Type (@{type_name "*"}, [T1, T2]) => MPair (pairself do_type (T1, T2)) | Type (z as (s, _)) => if could_exist_alpha_sub_mtype thy alpha_T T then - case AList.lookup (op =) (!datatype_cache) z of + case AList.lookup (op =) (!datatype_mcache) z of SOME M => M | NONE => let - val _ = Unsynchronized.change datatype_cache (cons (z, MRec z)) + val _ = Unsynchronized.change datatype_mcache (cons (z, MRec z)) val xs = binarized_and_boxed_datatype_constrs hol_ctxt binarize T val (all_Ms, constr_Ms) = fold_rev (fn (_, T') => fn (all_Ms, constr_Ms) => @@ -279,15 +311,15 @@ end) xs ([], []) val M = MType (s, all_Ms) - val _ = Unsynchronized.change datatype_cache + val _ = Unsynchronized.change datatype_mcache (AList.update (op =) (z, M)) - val _ = Unsynchronized.change constr_cache + val _ = Unsynchronized.change constr_mcache (append (xs ~~ constr_Ms)) in - if forall (not o is_MRec o snd) (!datatype_cache) then - (repair_datatype_cache datatype_cache; - repair_constr_cache (!datatype_cache) constr_cache; - AList.lookup (op =) (!datatype_cache) z |> the) + if forall (not o is_MRec o snd) (!datatype_mcache) then + (repair_datatype_mcache datatype_mcache; + repair_constr_mcache (!datatype_mcache) constr_mcache; + AList.lookup (op =) (!datatype_mcache) z |> the) else M end @@ -300,7 +332,7 @@ fun prodM_factors (MPair (M1, M2)) = maps prodM_factors [M1, M2] | prodM_factors M = [M] (* mtyp -> mtyp list * mtyp *) -fun curried_strip_mtype (MFun (M1, S Minus, M2)) = +fun curried_strip_mtype (MFun (M1, _, M2)) = curried_strip_mtype M2 |>> append (prodM_factors M1) | curried_strip_mtype M = ([], M) (* string -> mtyp -> mtyp *) @@ -311,36 +343,34 @@ end (* mdata -> styp -> mtyp *) -fun mtype_for_constr (mdata as {hol_ctxt = {thy, ...}, alpha_T, constr_cache, +fun mtype_for_constr (mdata as {hol_ctxt = {thy, ...}, alpha_T, constr_mcache, ...}) (x as (_, T)) = if could_exist_alpha_sub_mtype thy alpha_T T then - case AList.lookup (op =) (!constr_cache) x of + case AList.lookup (op =) (!constr_mcache) x of SOME M => M | NONE => if T = alpha_T then - let val M = fresh_mtype_for_type mdata T in - (Unsynchronized.change constr_cache (cons (x, M)); M) + let val M = fresh_mtype_for_type mdata false T in + (Unsynchronized.change constr_mcache (cons (x, M)); M) end else - (fresh_mtype_for_type mdata (body_type T); - AList.lookup (op =) (!constr_cache) x |> the) + (fresh_mtype_for_type mdata false (body_type T); + AList.lookup (op =) (!constr_mcache) x |> the) else - fresh_mtype_for_type mdata T + fresh_mtype_for_type mdata false T fun mtype_for_sel (mdata as {hol_ctxt, binarize, ...}) (x as (s, _)) = x |> binarized_and_boxed_constr_for_sel hol_ctxt binarize |> mtype_for_constr mdata |> sel_mtype_from_constr_mtype s +(* literal list -> sign_atom -> sign_atom *) +fun resolve_sign_atom lits (V x) = + x |> AList.lookup (op =) lits |> Option.map S |> the_default (V x) + | resolve_sign_atom _ a = a (* literal list -> mtyp -> mtyp *) -fun instantiate_mtype lits = +fun resolve_mtype lits = let (* mtyp -> mtyp *) fun aux MAlpha = MAlpha - | aux (MFun (M1, V x, M2)) = - let - val a = case AList.lookup (op =) lits x of - SOME sn => S sn - | NONE => V x - in MFun (aux M1, a, aux M2) end - | aux (MFun (M1, a, M2)) = MFun (aux M1, a, aux M2) + | aux (MFun (M1, a, M2)) = MFun (aux M1, resolve_sign_atom lits a, aux M2) | aux (MPair Mp) = MPair (pairself aux Mp) | aux (MType (s, Ms)) = MType (s, map aux Ms) | aux (MRec z) = MRec z @@ -417,11 +447,12 @@ accum = (accum |> fold (uncurry (do_mtype_comp cmp xs)) [(M11, M21), (M12, M22)] handle Library.UnequalLengths => - raise MTYPE ("Nitpick_Mono.do_mtype_comp", [M1, M2])) + raise MTYPE ("Nitpick_Mono.do_mtype_comp", [M1, M2], [])) | do_mtype_comp _ _ (MType _) (MType _) accum = accum (* no need to compare them thanks to the cache *) - | do_mtype_comp _ _ M1 M2 _ = - raise MTYPE ("Nitpick_Mono.do_mtype_comp", [M1, M2]) + | do_mtype_comp cmp _ M1 M2 _ = + raise MTYPE ("Nitpick_Mono.do_mtype_comp (" ^ string_for_comp_op cmp ^ ")", + [M1, M2], []) (* comp_op -> mtyp -> mtyp -> constraint_set -> constraint_set *) fun add_mtype_comp _ _ _ UnsolvableCSet = UnsolvableCSet @@ -471,20 +502,20 @@ | do_notin_mtype_fv sn sexp (MType (_, Ms)) accum = accum |> fold (do_notin_mtype_fv sn sexp) Ms | do_notin_mtype_fv _ _ M _ = - raise MTYPE ("Nitpick_Mono.do_notin_mtype_fv", [M]) + raise MTYPE ("Nitpick_Mono.do_notin_mtype_fv", [M], []) (* sign -> mtyp -> constraint_set -> constraint_set *) fun add_notin_mtype_fv _ _ UnsolvableCSet = UnsolvableCSet | add_notin_mtype_fv sn M (CSet (lits, comps, sexps)) = - (print_g ("*** Add " ^ string_for_mtype M ^ " is right-" ^ - (case sn of Minus => "unique" | Plus => "total") ^ "."); + (print_g ("*** Add " ^ string_for_mtype M ^ " is " ^ + (case sn of Minus => "concrete" | Plus => "complete") ^ "."); case do_notin_mtype_fv sn [] M (SOME (lits, sexps)) of NONE => (print_g "**** Unsolvable"; UnsolvableCSet) | SOME (lits, sexps) => CSet (lits, comps, sexps)) (* mtyp -> constraint_set -> constraint_set *) -val add_mtype_is_right_unique = add_notin_mtype_fv Minus -val add_mtype_is_right_total = add_notin_mtype_fv Plus +val add_mtype_is_concrete = add_notin_mtype_fv Minus +val add_mtype_is_complete = add_notin_mtype_fv Plus val bool_from_minus = true @@ -574,34 +605,35 @@ type mtype_schema = mtyp * constraint_set type mtype_context = - {bounds: mtyp list, + {bound_Ts: typ list, + bound_Ms: mtyp list, frees: (styp * mtyp) list, consts: (styp * mtyp) list} type accumulator = mtype_context * constraint_set -val initial_gamma = {bounds = [], frees = [], consts = []} +val initial_gamma = {bound_Ts = [], bound_Ms = [], frees = [], consts = []} val unsolvable_accum = (initial_gamma, UnsolvableCSet) -(* mtyp -> mtype_context -> mtype_context *) -fun push_bound M {bounds, frees, consts} = - {bounds = M :: bounds, frees = frees, consts = consts} +(* typ -> mtyp -> mtype_context -> mtype_context *) +fun push_bound T M {bound_Ts, bound_Ms, frees, consts} = + {bound_Ts = T :: bound_Ts, bound_Ms = M :: bound_Ms, frees = frees, + consts = consts} (* mtype_context -> mtype_context *) -fun pop_bound {bounds, frees, consts} = - {bounds = tl bounds, frees = frees, consts = consts} - handle List.Empty => initial_gamma +fun pop_bound {bound_Ts, bound_Ms, frees, consts} = + {bound_Ts = tl bound_Ts, bound_Ms = tl bound_Ms, frees = frees, + consts = consts} + handle List.Empty => initial_gamma (* FIXME: needed? *) (* mdata -> term -> accumulator -> mterm * accumulator *) fun consider_term (mdata as {hol_ctxt as {thy, ctxt, stds, fast_descrs, - def_table, ...}, + def_table, ...}, alpha_T, max_fresh, ...}) = let - (* typ -> typ -> mtyp * sign_atom * mtyp *) - val mfun_for = fresh_mfun_for_fun_type mdata (* typ -> mtyp *) - val mtype_for = fresh_mtype_for_type mdata + val mtype_for = fresh_mtype_for_type mdata false (* mtyp -> mtyp *) - fun pos_set_mtype_for_dom M = + fun plus_set_mtype_for_dom M = MFun (M, S (if exists_alpha_sub_mtype M then Plus else Minus), bool_M) (* typ -> accumulator -> mterm * accumulator *) fun do_all T (gamma, cset) = @@ -610,13 +642,13 @@ val body_M = mtype_for (body_type T) in (MFun (MFun (abs_M, S Minus, body_M), S Minus, body_M), - (gamma, cset |> add_mtype_is_right_total abs_M)) + (gamma, cset |> add_mtype_is_complete abs_M)) end fun do_equals T (gamma, cset) = let val M = mtype_for (domain_type T) in (MFun (M, S Minus, MFun (M, V (Unsynchronized.inc max_fresh), mtype_for (nth_range_type 2 T))), - (gamma, cset |> add_mtype_is_right_unique M)) + (gamma, cset |> add_mtype_is_concrete M)) end fun do_robust_set_operation T (gamma, cset) = let @@ -633,25 +665,25 @@ val set_T = domain_type T val set_M = mtype_for set_T (* typ -> mtyp *) - fun custom_mtype_for (T as Type ("fun", [T1, T2])) = + fun custom_mtype_for (T as Type (@{type_name fun}, [T1, T2])) = if T = set_T then set_M else MFun (custom_mtype_for T1, S Minus, custom_mtype_for T2) | custom_mtype_for T = mtype_for T in - (custom_mtype_for T, (gamma, cset |> add_mtype_is_right_unique set_M)) + (custom_mtype_for T, (gamma, cset |> add_mtype_is_concrete set_M)) end (* typ -> accumulator -> mtyp * accumulator *) fun do_pair_constr T accum = case mtype_for (nth_range_type 2 T) of M as MPair (a_M, b_M) => (MFun (a_M, S Minus, MFun (b_M, S Minus, M)), accum) - | M => raise MTYPE ("Nitpick_Mono.consider_term.do_pair_constr", [M]) + | M => raise MTYPE ("Nitpick_Mono.consider_term.do_pair_constr", [M], []) (* int -> typ -> accumulator -> mtyp * accumulator *) fun do_nth_pair_sel n T = case mtype_for (domain_type T) of M as MPair (a_M, b_M) => pair (MFun (M, S Minus, if n = 0 then a_M else b_M)) - | M => raise MTYPE ("Nitpick_Mono.consider_term.do_nth_pair_sel", [M]) + | M => raise MTYPE ("Nitpick_Mono.consider_term.do_nth_pair_sel", [M], []) (* mtyp * accumulator *) val mtype_unsolvable = (dummy_M, unsolvable_accum) (* term -> mterm * accumulator *) @@ -661,8 +693,9 @@ fun do_bounded_quantifier t0 abs_s abs_T connective_t bound_t body_t accum = let val abs_M = mtype_for abs_T - val (bound_m, accum) = accum |>> push_bound abs_M |> do_term bound_t - val expected_bound_M = pos_set_mtype_for_dom abs_M + val (bound_m, accum) = + accum |>> push_bound abs_T abs_M |> do_term bound_t + val expected_bound_M = plus_set_mtype_for_dom abs_M val (body_m, accum) = accum ||> add_mtypes_equal expected_bound_M (mtype_of_mterm bound_m) |> do_term body_t ||> apfst pop_bound @@ -678,7 +711,8 @@ end (* term -> accumulator -> mterm * accumulator *) and do_term t (_, UnsolvableCSet) = mterm_unsolvable t - | do_term t (accum as (gamma as {bounds, frees, consts}, cset)) = + | do_term t (accum as (gamma as {bound_Ts, bound_Ms, frees, consts}, + cset)) = (case t of Const (x as (s, T)) => (case AList.lookup (op =) consts x of @@ -714,12 +748,12 @@ let val set_T = domain_type (range_type T) val M1 = mtype_for (domain_type set_T) - val M1' = pos_set_mtype_for_dom M1 + val M1' = plus_set_mtype_for_dom M1 val M2 = mtype_for set_T val M3 = mtype_for set_T in (MFun (M1, S Minus, MFun (M2, S Minus, M3)), - (gamma, cset |> add_mtype_is_right_unique M1 + (gamma, cset |> add_mtype_is_concrete M1 |> add_is_sub_mtype M1' M3 |> add_is_sub_mtype M2 M3)) end @@ -733,15 +767,6 @@ val ba_set_M = range_type T |> mtype_for_set in (MFun (ab_set_M, S Minus, ba_set_M), accum) end | @{const_name trancl} => do_fragile_set_operation T accum - | @{const_name rtrancl} => - (print_g "*** rtrancl"; mtype_unsolvable) - | @{const_name finite} => - if is_finite_type hol_ctxt T then - let val M1 = mtype_for (domain_type (domain_type T)) in - (MFun (pos_set_mtype_for_dom M1, S Minus, bool_M), accum) - end - else - (print_g "*** finite"; mtype_unsolvable) | @{const_name rel_comp} => let val x = Unsynchronized.inc max_fresh @@ -761,8 +786,12 @@ val b_M = mtype_for (range_type (domain_type T)) in (MFun (MFun (a_M, S Minus, b_M), S Minus, - MFun (pos_set_mtype_for_dom a_M, S Minus, - pos_set_mtype_for_dom b_M)), accum) + MFun (plus_set_mtype_for_dom a_M, S Minus, + plus_set_mtype_for_dom b_M)), accum) + end + | @{const_name finite} => + let val M1 = mtype_for (domain_type (domain_type T)) in + (MFun (plus_set_mtype_for_dom M1, S Minus, bool_M), accum) end | @{const_name Sigma} => let @@ -781,18 +810,15 @@ (MFun (a_set_M, S Minus, MFun (a_to_b_set_M, S Minus, ab_set_M)), accum) end - | @{const_name Tha} => - let - val a_M = mtype_for (domain_type (domain_type T)) - val a_set_M = pos_set_mtype_for_dom a_M - in (MFun (a_set_M, S Minus, a_M), accum) end - | @{const_name FunBox} => - let val dom_M = mtype_for (domain_type T) in - (MFun (dom_M, S Minus, dom_M), accum) - end | _ => - if s = @{const_name minus_class.minus} andalso - is_set_type (domain_type T) then + if s = @{const_name safe_The} orelse + s = @{const_name safe_Eps} then + let + val a_set_M = mtype_for (domain_type T) + val a_M = dest_MFun a_set_M |> #1 + in (MFun (a_set_M, S Minus, a_M), accum) end + else if s = @{const_name minus_class.minus} andalso + is_set_type (domain_type T) then let val set_T = domain_type T val left_set_M = mtype_for set_T @@ -800,7 +826,7 @@ in (MFun (left_set_M, S Minus, MFun (right_set_M, S Minus, left_set_M)), - (gamma, cset |> add_mtype_is_right_unique right_set_M + (gamma, cset |> add_mtype_is_concrete right_set_M |> add_is_sub_mtype right_set_M left_set_M)) end else if s = @{const_name ord_class.less_eq} andalso @@ -811,50 +837,49 @@ is_set_type (domain_type T) then do_robust_set_operation T accum else if is_sel s then - if constr_name_for_sel_like s = @{const_name FunBox} then - let val dom_M = mtype_for (domain_type T) in - (MFun (dom_M, S Minus, dom_M), accum) - end - else - (mtype_for_sel mdata x, accum) + (mtype_for_sel mdata x, accum) else if is_constr thy stds x then (mtype_for_constr mdata x, accum) else if is_built_in_const thy stds fast_descrs x then - case def_of_const thy def_table x of - SOME t' => do_term t' accum |>> mtype_of_mterm - | NONE => (print_g ("*** built-in " ^ s); mtype_unsolvable) + (fresh_mtype_for_type mdata true T, accum) else let val M = mtype_for T in - (M, ({bounds = bounds, frees = frees, - consts = (x, M) :: consts}, cset)) + (M, ({bound_Ts = bound_Ts, bound_Ms = bound_Ms, + frees = frees, consts = (x, M) :: consts}, cset)) end) |>> curry MRaw t | Free (x as (_, T)) => (case AList.lookup (op =) frees x of SOME M => (M, accum) | NONE => let val M = mtype_for T in - (M, ({bounds = bounds, frees = (x, M) :: frees, - consts = consts}, cset)) + (M, ({bound_Ts = bound_Ts, bound_Ms = bound_Ms, + frees = (x, M) :: frees, consts = consts}, cset)) end) |>> curry MRaw t | Var _ => (print_g "*** Var"; mterm_unsolvable t) - | Bound j => (MRaw (t, nth bounds j), accum) - | Abs (s, T, t' as @{const False}) => - let val (M1, a, M2) = mfun_for T bool_T in - (MAbs (s, T, M1, a, MRaw (t', M2)), accum) - end + | Bound j => (MRaw (t, nth bound_Ms j), accum) | Abs (s, T, t') => - ((case t' of - t1' $ Bound 0 => - if not (loose_bvar1 (t1', 0)) then - do_term (incr_boundvars ~1 t1') accum - else - raise SAME () - | _ => raise SAME ()) - handle SAME () => - let - val M = mtype_for T - val (m', accum) = do_term t' (accum |>> push_bound M) - in (MAbs (s, T, M, S Minus, m'), accum |>> pop_bound) end) + (case fin_fun_body T (fastype_of1 (T :: bound_Ts, t')) t' of + SOME t' => + let + val M = mtype_for T + val a = V (Unsynchronized.inc max_fresh) + val (m', accum) = do_term t' (accum |>> push_bound T M) + in (MAbs (s, T, M, a, m'), accum |>> pop_bound) end + | NONE => + ((case t' of + t1' $ Bound 0 => + if not (loose_bvar1 (t1', 0)) then + do_term (incr_boundvars ~1 t1') accum + else + raise SAME () + | _ => raise SAME ()) + handle SAME () => + let + val M = mtype_for T + val (m', accum) = do_term t' (accum |>> push_bound T M) + in + (MAbs (s, T, M, S Minus, m'), accum |>> pop_bound) + end)) | (t0 as Const (@{const_name All}, _)) $ Abs (s', T', (t10 as @{const "op -->"}) $ (t11 $ Bound 0) $ t12) => do_bounded_quantifier t0 s' T' t10 t11 t12 accum @@ -872,6 +897,8 @@ (_, UnsolvableCSet) => mterm_unsolvable t | _ => let + val T11 = domain_type (fastype_of1 (bound_Ts, t1)) + val T2 = fastype_of1 (bound_Ts, t2) val M11 = mtype_of_mterm m1 |> dest_MFun |> #1 val M2 = mtype_of_mterm m2 in (MApp (m1, m2), accum ||> add_is_sub_mtype M2 M11) end @@ -880,25 +907,45 @@ string_for_mterm ctxt m)) in do_term end -(* mdata -> styp -> term -> term -> mterm * accumulator *) -fun consider_general_equals mdata (x as (_, T)) t1 t2 accum = +(* + accum |> (case a of + S Minus => accum + | S Plus => unsolvable_accum + | V x => do_literal (x, Minus) lits) +*) + +(* int -> mtyp -> accumulator -> accumulator *) +fun force_minus_funs 0 _ = I + | force_minus_funs n (M as MFun (M1, _, M2)) = + add_mtypes_equal M (MFun (M1, S Minus, M2)) + #> force_minus_funs (n - 1) M2 + | force_minus_funs _ M = + raise MTYPE ("Nitpick_Mono.force_minus_funs", [M], []) +(* mdata -> bool -> styp -> term -> term -> mterm * accumulator *) +fun consider_general_equals mdata def (x as (_, T)) t1 t2 accum = let val (m1, accum) = consider_term mdata t1 accum val (m2, accum) = consider_term mdata t2 accum val M1 = mtype_of_mterm m1 val M2 = mtype_of_mterm m2 - val body_M = fresh_mtype_for_type mdata (nth_range_type 2 T) + val accum = accum ||> add_mtypes_equal M1 M2 + val body_M = fresh_mtype_for_type mdata false (nth_range_type 2 T) + val m = MApp (MApp (MRaw (Const x, + MFun (M1, S Minus, MFun (M2, S Minus, body_M))), m1), m2) in - (MApp (MApp (MRaw (Const x, - MFun (M1, S Minus, MFun (M2, S Minus, body_M))), m1), m2), - accum ||> add_mtypes_equal M1 M2) + (m, if def then + let val (head_m, arg_ms) = strip_mcomb m1 in + accum ||> force_minus_funs (length arg_ms) (mtype_of_mterm head_m) + end + else + accum) end (* mdata -> sign -> term -> accumulator -> mterm * accumulator *) fun consider_general_formula (mdata as {hol_ctxt = {ctxt, ...}, ...}) = let (* typ -> mtyp *) - val mtype_for = fresh_mtype_for_type mdata + val mtype_for = fresh_mtype_for_type mdata false (* term -> accumulator -> mterm * accumulator *) val do_term = consider_term mdata (* sign -> term -> accumulator -> mterm * accumulator *) @@ -912,11 +959,13 @@ val abs_M = mtype_for abs_T val side_cond = ((sn = Minus) = (quant_s = @{const_name Ex})) val (body_m, accum) = - accum ||> side_cond ? add_mtype_is_right_total abs_M - |>> push_bound abs_M |> do_formula sn body_t + accum ||> side_cond ? add_mtype_is_complete abs_M + |>> push_bound abs_T abs_M |> do_formula sn body_t val body_M = mtype_of_mterm body_m in - (MApp (MRaw (Const quant_x, MFun (abs_M, S Minus, body_M)), + (MApp (MRaw (Const quant_x, + MFun (MFun (abs_M, S Minus, body_M), S Minus, + body_M)), MAbs (abs_s, abs_T, abs_M, S Minus, body_m)), accum |>> pop_bound) end @@ -924,7 +973,7 @@ fun do_equals x t1 t2 = case sn of Plus => do_term t accum - | Minus => consider_general_equals mdata x t1 t2 accum + | Minus => consider_general_equals mdata false x t1 t2 accum in case t of Const (x as (@{const_name all}, _)) $ Abs (s1, T1, t1) => @@ -947,7 +996,7 @@ (case sn of Plus => do_quantifier x0 s1 T1 t1' | Minus => - (* ### do elsewhere *) + (* FIXME: Move elsewhere *) do_term (@{const Not} $ (HOLogic.eq_const (domain_type T0) $ t1 $ Abs (Name.uu, T1, @{const False}))) accum) @@ -981,28 +1030,29 @@ [@{const_name ord_class.less}, @{const_name ord_class.less_eq}] val bounteous_consts = [@{const_name bisim}] -(* term -> bool *) -fun is_harmless_axiom ({thy, stds, fast_descrs, ...} : hol_context) t = - Term.add_consts t [] - |> filter_out (is_built_in_const thy stds fast_descrs) - |> (forall (member (op =) harmless_consts o original_name o fst) - orf exists (member (op =) bounteous_consts o fst)) +(* mdata -> term -> bool *) +fun is_harmless_axiom ({no_harmless = true, ...} : mdata) _ = false + | is_harmless_axiom {hol_ctxt = {thy, stds, fast_descrs, ...}, ...} t = + Term.add_consts t [] + |> filter_out (is_built_in_const thy stds fast_descrs) + |> (forall (member (op =) harmless_consts o original_name o fst) orf + exists (member (op =) bounteous_consts o fst)) (* mdata -> term -> accumulator -> mterm * accumulator *) -fun consider_nondefinitional_axiom (mdata as {hol_ctxt, ...}) t = - if is_harmless_axiom hol_ctxt t then pair (MRaw (t, dummy_M)) +fun consider_nondefinitional_axiom mdata t = + if is_harmless_axiom mdata t then pair (MRaw (t, dummy_M)) else consider_general_formula mdata Plus t (* mdata -> term -> accumulator -> mterm * accumulator *) -fun consider_definitional_axiom (mdata as {hol_ctxt as {thy, ...}, ...}) t = +fun consider_definitional_axiom (mdata as {hol_ctxt = {thy, ...}, ...}) t = if not (is_constr_pattern_formula thy t) then consider_nondefinitional_axiom mdata t - else if is_harmless_axiom hol_ctxt t then + else if is_harmless_axiom mdata t then pair (MRaw (t, dummy_M)) else let (* typ -> mtyp *) - val mtype_for = fresh_mtype_for_type mdata + val mtype_for = fresh_mtype_for_type mdata false (* term -> accumulator -> mterm * accumulator *) val do_term = consider_term mdata (* term -> string -> typ -> term -> accumulator -> mterm * accumulator *) @@ -1010,10 +1060,11 @@ let val abs_M = mtype_for abs_T val (body_m, accum) = - accum |>> push_bound abs_M |> do_formula body_t + accum |>> push_bound abs_T abs_M |> do_formula body_t val body_M = mtype_of_mterm body_m in - (MApp (MRaw (quant_t, MFun (abs_M, S Minus, body_M)), + (MApp (MRaw (quant_t, + MFun (MFun (abs_M, S Minus, body_M), S Minus, body_M)), MAbs (abs_s, abs_T, abs_M, S Minus, body_m)), accum |>> pop_bound) end @@ -1039,16 +1090,20 @@ case t of (t0 as Const (@{const_name all}, _)) $ Abs (s1, T1, t1) => do_all t0 s1 T1 t1 accum - | @{const Trueprop} $ t1 => do_formula t1 accum + | @{const Trueprop} $ t1 => + let val (m1, accum) = do_formula t1 accum in + (MApp (MRaw (@{const Trueprop}, mtype_for (bool_T --> prop_T)), + m1), accum) + end | Const (x as (@{const_name "=="}, _)) $ t1 $ t2 => - consider_general_equals mdata x t1 t2 accum + consider_general_equals mdata true x t1 t2 accum | (t0 as @{const "==>"}) $ t1 $ t2 => do_implies t0 t1 t2 accum | (t0 as @{const Pure.conjunction}) $ t1 $ t2 => do_conjunction t0 t1 t2 accum | (t0 as Const (@{const_name All}, _)) $ Abs (s0, T1, t1) => do_all t0 s0 T1 t1 accum | Const (x as (@{const_name "op ="}, _)) $ t1 $ t2 => - consider_general_equals mdata x t1 t2 accum + consider_general_equals mdata true x t1 t2 accum | (t0 as @{const "op &"}) $ t1 $ t2 => do_conjunction t0 t1 t2 accum | (t0 as @{const "op -->"}) $ t1 $ t2 => do_implies t0 t1 t2 accum | _ => raise TERM ("Nitpick_Mono.consider_definitional_axiom.\ @@ -1057,8 +1112,7 @@ (* Proof.context -> literal list -> term -> mtyp -> string *) fun string_for_mtype_of_term ctxt lits t M = - Syntax.string_of_term ctxt t ^ " : " ^ - string_for_mtype (instantiate_mtype lits M) + Syntax.string_of_term ctxt t ^ " : " ^ string_for_mtype (resolve_mtype lits M) (* theory -> literal list -> mtype_context -> unit *) fun print_mtype_context ctxt lits ({frees, consts, ...} : mtype_context) = @@ -1067,31 +1121,153 @@ |> cat_lines |> print_g (* ('a -> 'b -> 'c * 'd) -> 'a -> 'c list * 'b -> 'c list * 'd *) -fun gather f t (ms, accum) = +fun amass f t (ms, accum) = let val (m, accum) = f t accum in (m :: ms, accum) end -(* hol_context -> bool -> typ -> term list * term list -> bool *) -fun formulas_monotonic (hol_ctxt as {ctxt, ...}) binarize alpha_T - (nondef_ts, def_ts) = +(* string -> bool -> hol_context -> bool -> typ -> term list * term list + -> (literal list * (mterm list * mterm list) * (styp * mtyp) list) option *) +fun infer which no_harmless (hol_ctxt as {ctxt, ...}) binarize alpha_T + (nondef_ts, def_ts) = let - val _ = print_g ("****** Monotonicity analysis: " ^ + val _ = print_g ("****** " ^ which ^ " analysis: " ^ string_for_mtype MAlpha ^ " is " ^ Syntax.string_of_typ ctxt alpha_T) - val mdata as {max_fresh, constr_cache, ...} = - initial_mdata hol_ctxt binarize alpha_T - + val mdata as {max_fresh, constr_mcache, ...} = + initial_mdata hol_ctxt binarize no_harmless alpha_T val accum = (initial_gamma, slack) val (nondef_ms, accum) = - ([], accum) |> gather (consider_general_formula mdata Plus) (hd nondef_ts) - |> fold (gather (consider_nondefinitional_axiom mdata)) + ([], accum) |> amass (consider_general_formula mdata Plus) (hd nondef_ts) + |> fold (amass (consider_nondefinitional_axiom mdata)) (tl nondef_ts) val (def_ms, (gamma, cset)) = - ([], accum) |> fold (gather (consider_definitional_axiom mdata)) def_ts + ([], accum) |> fold (amass (consider_definitional_axiom mdata)) def_ts in case solve (!max_fresh) cset of - SOME lits => (print_mtype_context ctxt lits gamma; true) - | _ => false + SOME lits => (print_mtype_context ctxt lits gamma; + SOME (lits, (nondef_ms, def_ms), !constr_mcache)) + | _ => NONE end - handle MTYPE (loc, Ms) => raise BAD (loc, commas (map string_for_mtype Ms)) + handle MTYPE (loc, Ms, Ts) => + raise BAD (loc, commas (map string_for_mtype Ms @ + map (Syntax.string_of_typ ctxt) Ts)) + | MTERM (loc, ms) => + raise BAD (loc, commas (map (string_for_mterm ctxt) ms)) + +(* hol_context -> bool -> typ -> term list * term list -> bool *) +val formulas_monotonic = is_some oooo infer "Monotonicity" false + +(* typ -> typ -> styp *) +fun fin_fun_constr T1 T2 = + (@{const_name FinFun}, (T1 --> T2) --> Type (@{type_name fin_fun}, [T1, T2])) + +(* hol_context -> bool -> (typ option * bool option) list -> typ + -> term list * term list -> term list * term list *) +fun finitize_funs (hol_ctxt as {thy, stds, fast_descrs, constr_cache, ...}) + binarize finitizes alpha_T tsp = + case infer "Finiteness" true hol_ctxt binarize alpha_T tsp of + SOME (lits, msp, constr_mtypes) => + let + (* typ -> sign_atom -> bool *) + fun should_finitize T a = + case triple_lookup (type_match thy) finitizes T of + SOME (SOME false) => false + | _ => resolve_sign_atom lits a = S Plus + (* typ -> mtyp -> typ *) + fun type_from_mtype T M = + case (M, T) of + (MAlpha, _) => T + | (MFun (M1, a, M2), Type (@{type_name fun}, Ts)) => + Type (if should_finitize T a then @{type_name fin_fun} + else @{type_name fun}, map2 type_from_mtype Ts [M1, M2]) + | (MPair (M1, M2), Type (@{type_name "*"}, Ts)) => + Type (@{type_name "*"}, map2 type_from_mtype Ts [M1, M2]) + | (MType _, _) => T + | _ => raise MTYPE ("Nitpick_Mono.finitize_funs.type_from_mtype", + [M], [T]) + (* styp -> styp *) + fun finitize_constr (x as (s, T)) = + (s, case AList.lookup (op =) constr_mtypes x of + SOME M => type_from_mtype T M + | NONE => T) + (* typ list -> mterm -> term *) + fun term_from_mterm Ts m = + case m of + MRaw (t, M) => + let + val T = fastype_of1 (Ts, t) + val T' = type_from_mtype T M + in + case t of + Const (x as (s, _)) => + if s = @{const_name insert} then + case nth_range_type 2 T' of + set_T' as Type (@{type_name fin_fun}, [elem_T', _]) => + Abs (Name.uu, elem_T', Abs (Name.uu, set_T', + Const (@{const_name If}, + bool_T --> set_T' --> set_T' --> set_T') + $ (Const (@{const_name is_unknown}, elem_T' --> bool_T) + $ Bound 1) + $ (Const (@{const_name unknown}, set_T')) + $ (coerce_term hol_ctxt Ts T' T (Const x) + $ Bound 1 $ Bound 0))) + | _ => Const (s, T') + else if s = @{const_name finite} then + case domain_type T' of + set_T' as Type (@{type_name fin_fun}, _) => + Abs (Name.uu, set_T', @{const True}) + | _ => Const (s, T') + else if s = @{const_name "=="} orelse + s = @{const_name "op ="} then + Const (s, T') + else if is_built_in_const thy stds fast_descrs x then + coerce_term hol_ctxt Ts T' T t + else if is_constr thy stds x then + Const (finitize_constr x) + else if is_sel s then + let + val n = sel_no_from_name s + val x' = x |> binarized_and_boxed_constr_for_sel hol_ctxt + binarize + |> finitize_constr + val x'' = binarized_and_boxed_nth_sel_for_constr hol_ctxt + binarize x' n + in Const x'' end + else + Const (s, T') + | Free (s, T) => Free (s, type_from_mtype T M) + | Bound _ => t + | _ => raise MTERM ("Nitpick_Mono.finitize_funs.term_from_mterm", + [m]) + end + | MApp (m1, m2) => + let + val (t1, t2) = pairself (term_from_mterm Ts) (m1, m2) + val (T1, T2) = pairself (curry fastype_of1 Ts) (t1, t2) + val (t1', T2') = + case T1 of + Type (s, [T11, T12]) => + (if s = @{type_name fin_fun} then + select_nth_constr_arg thy stds (fin_fun_constr T11 T12) t1 0 + (T11 --> T12) + else + t1, T11) + | _ => raise TYPE ("Nitpick_Mono.finitize_funs.term_from_mterm", + [T1], []) + in betapply (t1', coerce_term hol_ctxt Ts T2' T2 t2) end + | MAbs (s, T, M, a, m') => + let + val T = type_from_mtype T M + val t' = term_from_mterm (T :: Ts) m' + val T' = fastype_of1 (T :: Ts, t') + in + Abs (s, T, t') + |> should_finitize (T --> T') a + ? construct_value thy stds (fin_fun_constr T T') o single + end + in + Unsynchronized.change constr_cache (map (apsnd (map finitize_constr))); + pairself (map (term_from_mterm [])) msp + end + | NONE => tsp end; diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_nut.ML --- a/src/HOL/Tools/Nitpick/nitpick_nut.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_nut.ML Wed Mar 10 16:40:20 2010 +0100 @@ -39,7 +39,7 @@ Closure | SingletonSet | IsUnknown | - Tha | + SafeThe | First | Second | Cast @@ -158,7 +158,7 @@ Closure | SingletonSet | IsUnknown | - Tha | + SafeThe | First | Second | Cast @@ -232,7 +232,7 @@ | string_for_op1 Closure = "Closure" | string_for_op1 SingletonSet = "SingletonSet" | string_for_op1 IsUnknown = "IsUnknown" - | string_for_op1 Tha = "Tha" + | string_for_op1 SafeThe = "SafeThe" | string_for_op1 First = "First" | string_for_op1 Second = "Second" | string_for_op1 Cast = "Cast" @@ -287,8 +287,9 @@ "Tuple " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ implode (map sub us) | Construct (us', T, R, us) => - "Construct " ^ implode (map sub us') ^ Syntax.string_of_typ ctxt T ^ - " " ^ string_for_rep R ^ " " ^ implode (map sub us) + "Construct " ^ implode (map sub us') ^ " " ^ + Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ + implode (map sub us) | BoundName (j, T, R, nick) => "BoundName " ^ signed_string_of_int j ^ " " ^ Syntax.string_of_typ ctxt T ^ " " ^ string_for_rep R ^ " " ^ nick @@ -459,7 +460,8 @@ (res_T, Const (@{const_name snd}, T --> res_T) $ t) end (* typ * term -> (typ * term) list *) -fun factorize (z as (Type ("*", _), _)) = maps factorize [mk_fst z, mk_snd z] +fun factorize (z as (Type (@{type_name "*"}, _), _)) = + maps factorize [mk_fst z, mk_snd z] | factorize z = [z] (* hol_context -> op2 -> term -> nut *) @@ -514,8 +516,15 @@ val t1 = list_comb (t0, ts') val T1 = fastype_of1 (Ts, t1) in Op2 (Apply, range_type T1, Any, sub t1, sub t2) end - (* styp -> term list -> term *) - fun construct (x as (_, T)) ts = + (* op2 -> string -> styp -> term -> nut *) + fun do_description_operator oper undef_s (x as (_, T)) t1 = + if fast_descrs then + Op2 (oper, range_type T, Any, sub t1, + sub (Const (undef_s, range_type T))) + else + do_apply (Const x) [t1] + (* styp -> term list -> nut *) + fun do_construct (x as (_, T)) ts = case num_binder_types T - length ts of 0 => Construct (map ((fn (s', T') => ConstName (s', T', Any)) o nth_sel_for_constr x) @@ -550,18 +559,10 @@ do_quantifier Exist s T t1 | (t0 as Const (@{const_name Ex}, _), [t1]) => sub' (t0 $ eta_expand Ts t1 1) - | (t0 as Const (@{const_name The}, T), [t1]) => - if fast_descrs then - Op2 (The, range_type T, Any, sub t1, - sub (Const (@{const_name undefined_fast_The}, range_type T))) - else - do_apply t0 [t1] - | (t0 as Const (@{const_name Eps}, T), [t1]) => - if fast_descrs then - Op2 (Eps, range_type T, Any, sub t1, - sub (Const (@{const_name undefined_fast_Eps}, range_type T))) - else - do_apply t0 [t1] + | (Const (x as (@{const_name The}, _)), [t1]) => + do_description_operator The @{const_name undefined_fast_The} x t1 + | (Const (x as (@{const_name Eps}, _)), [t1]) => + do_description_operator Eps @{const_name undefined_fast_Eps} x t1 | (Const (@{const_name "op ="}, T), [t1, t2]) => sub_equals T t1 t2 | (Const (@{const_name "op &"}, _), [t1, t2]) => Op2 (And, bool_T, Any, sub' t1, sub' t2) @@ -603,7 +604,7 @@ Op2 (Image, nth_range_type 2 T, Any, sub t1, sub t2) | (Const (x as (s as @{const_name Suc}, T)), []) => if is_built_in_const thy stds false x then Cst (Suc, T, Any) - else if is_constr thy stds x then construct x [] + else if is_constr thy stds x then do_construct x [] else ConstName (s, T, Any) | (Const (@{const_name finite}, T), [t1]) => (if is_finite_type hol_ctxt (domain_type T) then @@ -614,7 +615,7 @@ | (Const (@{const_name nat}, T), []) => Cst (IntToNat, T, Any) | (Const (x as (s as @{const_name zero_class.zero}, T)), []) => if is_built_in_const thy stds false x then Cst (Num 0, T, Any) - else if is_constr thy stds x then construct x [] + else if is_constr thy stds x then do_construct x [] else ConstName (s, T, Any) | (Const (x as (s as @{const_name one_class.one}, T)), []) => if is_built_in_const thy stds false x then Cst (Num 1, T, Any) @@ -623,7 +624,8 @@ if is_built_in_const thy stds false x then Cst (Add, T, Any) else ConstName (s, T, Any) | (Const (@{const_name minus_class.minus}, - Type ("fun", [T1 as Type ("fun", [_, @{typ bool}]), _])), + Type (@{type_name fun}, + [T1 as Type (@{type_name fun}, [_, @{typ bool}]), _])), [t1, t2]) => Op2 (SetDifference, T1, Any, sub t1, sub t2) | (Const (x as (s as @{const_name minus_class.minus}, T)), []) => @@ -642,7 +644,8 @@ else do_apply t0 ts | (Const (@{const_name ord_class.less_eq}, - Type ("fun", [Type ("fun", [_, @{typ bool}]), _])), + Type (@{type_name fun}, + [Type (@{type_name fun}, [_, @{typ bool}]), _])), [t1, t2]) => Op2 (Subset, bool_T, Any, sub t1, sub t2) (* FIXME: find out if this case is necessary *) @@ -666,8 +669,11 @@ | (Const (@{const_name unknown}, T), []) => Cst (Unknown, T, Any) | (Const (@{const_name is_unknown}, _), [t1]) => Op1 (IsUnknown, bool_T, Any, sub t1) - | (Const (@{const_name Tha}, Type ("fun", [_, T2])), [t1]) => - Op1 (Tha, T2, Any, sub t1) + | (Const (@{const_name safe_The}, + Type (@{type_name fun}, [_, T2])), [t1]) => + Op1 (SafeThe, T2, Any, sub t1) + | (Const (x as (@{const_name safe_Eps}, _)), [t1]) => + do_description_operator Eps @{const_name undefined_fast_Eps} x t1 | (Const (@{const_name Frac}, T), []) => Cst (Fracs, T, Any) | (Const (@{const_name norm_frac}, T), []) => Cst (NormFrac, T, Any) | (Const (@{const_name of_nat}, T as @{typ "nat => int"}), []) => @@ -676,16 +682,18 @@ T as @{typ "unsigned_bit word => signed_bit word"}), []) => Cst (NatToInt, T, Any) | (Const (@{const_name semilattice_inf_class.inf}, - Type ("fun", [T1 as Type ("fun", [_, @{typ bool}]), _])), + Type (@{type_name fun}, + [T1 as Type (@{type_name fun}, [_, @{typ bool}]), _])), [t1, t2]) => Op2 (Intersect, T1, Any, sub t1, sub t2) | (Const (@{const_name semilattice_sup_class.sup}, - Type ("fun", [T1 as Type ("fun", [_, @{typ bool}]), _])), + Type (@{type_name fun}, + [T1 as Type (@{type_name fun}, [_, @{typ bool}]), _])), [t1, t2]) => Op2 (Union, T1, Any, sub t1, sub t2) | (t0 as Const (x as (s, T)), ts) => if is_constr thy stds x then - construct x ts + do_construct x ts else if String.isPrefix numeral_prefix s then Cst (Num (the (Int.fromString (unprefix numeral_prefix s))), T, Any) else @@ -956,7 +964,7 @@ if ok then Cst (Num j, T, Atom (k, j0)) else Cst (Unrep, T, Opt (Atom (k, j0))) end) - | Cst (Suc, T as Type ("fun", [T1, _]), _) => + | Cst (Suc, T as Type (@{type_name fun}, [T1, _]), _) => let val R = Atom (spec_of_type scope T1) in Cst (Suc, T, Func (R, Opt R)) end @@ -1306,12 +1314,13 @@ in (w :: ws, pool, NameTable.update (v, w) table) end (* typ -> rep -> nut list -> nut *) -fun shape_tuple (T as Type ("*", [T1, T2])) (R as Struct [R1, R2]) us = +fun shape_tuple (T as Type (@{type_name "*"}, [T1, T2])) (R as Struct [R1, R2]) + us = let val arity1 = arity_of_rep R1 in Tuple (T, R, [shape_tuple T1 R1 (List.take (us, arity1)), shape_tuple T2 R2 (List.drop (us, arity1))]) end - | shape_tuple (T as Type ("fun", [_, T2])) (R as Vect (k, R')) us = + | shape_tuple (T as Type (@{type_name fun}, [_, T2])) (R as Vect (k, R')) us = Tuple (T, R, map (shape_tuple T2 R') (batch_list (length us div k) us)) | shape_tuple T _ [u] = if type_of u = T then u else raise NUT ("Nitpick_Nut.shape_tuple", [u]) diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_preproc.ML --- a/src/HOL/Tools/Nitpick/nitpick_preproc.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_preproc.ML Wed Mar 10 16:40:20 2010 +0100 @@ -9,7 +9,9 @@ sig type hol_context = Nitpick_HOL.hol_context val preprocess_term : - hol_context -> term -> term list * term list * bool * bool * bool + hol_context -> (typ option * bool option) list + -> (typ option * bool option) list -> term + -> term list * term list * bool * bool * bool end structure Nitpick_Preproc : NITPICK_PREPROC = @@ -17,6 +19,7 @@ open Nitpick_Util open Nitpick_HOL +open Nitpick_Mono (* polarity -> string -> bool *) fun is_positive_existential polar quant_s = @@ -115,88 +118,15 @@ (** Boxing **) -(* hol_context -> typ -> term -> term *) -fun constr_expand (hol_ctxt as {thy, stds, ...}) T t = - (case head_of t of - Const x => if is_constr_like thy x then t else raise SAME () - | _ => raise SAME ()) - handle SAME () => - let - val x' as (_, T') = - if is_pair_type T then - let val (T1, T2) = HOLogic.dest_prodT T in - (@{const_name Pair}, T1 --> T2 --> T) - end - else - datatype_constrs hol_ctxt T |> hd - val arg_Ts = binder_types T' - in - list_comb (Const x', map2 (select_nth_constr_arg thy stds x' t) - (index_seq 0 (length arg_Ts)) arg_Ts) - end - -(* (term -> term) -> int -> term -> term *) -fun coerce_bound_no f j t = - case t of - t1 $ t2 => coerce_bound_no f j t1 $ coerce_bound_no f j t2 - | Abs (s, T, t') => Abs (s, T, coerce_bound_no f (j + 1) t') - | Bound j' => if j' = j then f t else t - | _ => t -(* hol_context -> typ -> typ -> term -> term *) -fun coerce_bound_0_in_term hol_ctxt new_T old_T = - old_T <> new_T ? coerce_bound_no (coerce_term hol_ctxt [new_T] old_T new_T) 0 -(* hol_context -> typ list -> typ -> typ -> term -> term *) -and coerce_term (hol_ctxt as {thy, stds, fast_descrs, ...}) Ts new_T old_T t = - if old_T = new_T then - t - else - case (new_T, old_T) of - (Type (new_s, new_Ts as [new_T1, new_T2]), - Type ("fun", [old_T1, old_T2])) => - (case eta_expand Ts t 1 of - Abs (s, _, t') => - Abs (s, new_T1, - t' |> coerce_bound_0_in_term hol_ctxt new_T1 old_T1 - |> coerce_term hol_ctxt (new_T1 :: Ts) new_T2 old_T2) - |> Envir.eta_contract - |> new_s <> "fun" - ? construct_value thy stds - (@{const_name FunBox}, Type ("fun", new_Ts) --> new_T) - o single - | t' => raise TERM ("Nitpick_Preproc.coerce_term", [t'])) - | (Type (new_s, new_Ts as [new_T1, new_T2]), - Type (old_s, old_Ts as [old_T1, old_T2])) => - if old_s = @{type_name fun_box} orelse - old_s = @{type_name pair_box} orelse old_s = "*" then - case constr_expand hol_ctxt old_T t of - Const (old_s, _) $ t1 => - if new_s = "fun" then - coerce_term hol_ctxt Ts new_T (Type ("fun", old_Ts)) t1 - else - construct_value thy stds - (old_s, Type ("fun", new_Ts) --> new_T) - [coerce_term hol_ctxt Ts (Type ("fun", new_Ts)) - (Type ("fun", old_Ts)) t1] - | Const _ $ t1 $ t2 => - construct_value thy stds - (if new_s = "*" then @{const_name Pair} - else @{const_name PairBox}, new_Ts ---> new_T) - [coerce_term hol_ctxt Ts new_T1 old_T1 t1, - coerce_term hol_ctxt Ts new_T2 old_T2 t2] - | t' => raise TERM ("Nitpick_Preproc.coerce_term", [t']) - else - raise TYPE ("Nitpick_Preproc.coerce_term", [new_T, old_T], [t]) - | _ => raise TYPE ("Nitpick_Preproc.coerce_term", [new_T, old_T], [t]) - (* hol_context -> bool -> term -> term *) fun box_fun_and_pair_in_term (hol_ctxt as {thy, stds, fast_descrs, ...}) def orig_t = let (* typ -> typ *) - fun box_relational_operator_type (Type ("fun", Ts)) = - Type ("fun", map box_relational_operator_type Ts) - | box_relational_operator_type (Type ("*", Ts)) = - Type ("*", map (box_type hol_ctxt InPair) Ts) + fun box_relational_operator_type (Type (@{type_name fun}, Ts)) = + Type (@{type_name fun}, map box_relational_operator_type Ts) + | box_relational_operator_type (Type (@{type_name "*"}, Ts)) = + Type (@{type_name "*"}, map (box_type hol_ctxt InPair) Ts) | box_relational_operator_type T = T (* indexname * typ -> typ * term -> typ option list -> typ option list *) fun add_boxed_types_for_var (z as (_, T)) (T', t') = @@ -291,7 +221,8 @@ $ do_term new_Ts old_Ts polar t2 | Const (s as @{const_name The}, T) => do_description_operator s T | Const (s as @{const_name Eps}, T) => do_description_operator s T - | Const (s as @{const_name Tha}, T) => do_description_operator s T + | Const (s as @{const_name safe_The}, T) => do_description_operator s T + | Const (s as @{const_name safe_Eps}, T) => do_description_operator s T | Const (x as (s, T)) => Const (s, if s = @{const_name converse} orelse s = @{const_name trancl} then @@ -320,12 +251,13 @@ val T2 = fastype_of1 (new_Ts, t2) val t2 = coerce_term hol_ctxt new_Ts (hd Ts1) T2 t2 in - betapply (if s1 = "fun" then + betapply (if s1 = @{type_name fun} then t1 else select_nth_constr_arg thy stds - (@{const_name FunBox}, Type ("fun", Ts1) --> T1) t1 0 - (Type ("fun", Ts1)), t2) + (@{const_name FunBox}, + Type (@{type_name fun}, Ts1) --> T1) t1 0 + (Type (@{type_name fun}, Ts1)), t2) end | t1 $ t2 => let @@ -336,12 +268,13 @@ val T2 = fastype_of1 (new_Ts, t2) val t2 = coerce_term hol_ctxt new_Ts (hd Ts1) T2 t2 in - betapply (if s1 = "fun" then + betapply (if s1 = @{type_name fun} then t1 else select_nth_constr_arg thy stds - (@{const_name FunBox}, Type ("fun", Ts1) --> T1) t1 0 - (Type ("fun", Ts1)), t2) + (@{const_name FunBox}, + Type (@{type_name fun}, Ts1) --> T1) t1 0 + (Type (@{type_name fun}, Ts1)), t2) end | Free (s, T) => Free (s, box_type hol_ctxt InExpr T) | Var (z as (x, T)) => @@ -597,7 +530,7 @@ if pass1 then do_eq false t2 t1 else raise SAME () else case t1 of Bound j' => if j' = j then SOME (t2, ts @ seen) else raise SAME () - | Const (s, Type ("fun", [T1, T2])) $ Bound j' => + | Const (s, Type (@{type_name fun}, [T1, T2])) $ Bound j' => if j' = j andalso s = nth_sel_name_for_constr_name @{const_name FunBox} 0 then SOME (construct_value thy stds (@{const_name FunBox}, T2 --> T1) @@ -1141,8 +1074,8 @@ (* int -> typ -> accumulator -> accumulator *) and add_axioms_for_type depth T = case T of - Type ("fun", Ts) => fold (add_axioms_for_type depth) Ts - | Type ("*", Ts) => fold (add_axioms_for_type depth) Ts + Type (@{type_name fun}, Ts) => fold (add_axioms_for_type depth) Ts + | Type (@{type_name "*"}, Ts) => fold (add_axioms_for_type depth) Ts | @{typ prop} => I | @{typ bool} => I | @{typ unit} => I @@ -1399,11 +1332,31 @@ | _ => t in aux "" [] [] end +(** Inference of finite functions **) + +(* hol_context -> bool -> (typ option * bool option) list + -> (typ option * bool option) list -> term list * term list + -> term list * term list *) +fun finitize_all_types_of_funs (hol_ctxt as {thy, ...}) binarize finitizes monos + (nondef_ts, def_ts) = + let + val Ts = ground_types_in_terms hol_ctxt binarize (nondef_ts @ def_ts) + |> filter_out (fn Type (@{type_name fun_box}, _) => true + | @{typ signed_bit} => true + | @{typ unsigned_bit} => true + | T => is_small_finite_type hol_ctxt T orelse + triple_lookup (type_match thy) monos T + = SOME (SOME false)) + in fold (finitize_funs hol_ctxt binarize finitizes) Ts (nondef_ts, def_ts) end + (** Preprocessor entry point **) -(* hol_context -> term -> term list * term list * bool * bool * bool *) +(* hol_context -> (typ option * bool option) list + -> (typ option * bool option) list -> term + -> term list * term list * bool * bool * bool *) fun preprocess_term (hol_ctxt as {thy, stds, binary_ints, destroy_constrs, - boxes, skolemize, uncurry, ...}) t = + boxes, skolemize, uncurry, ...}) + finitizes monos t = let val skolem_depth = if skolemize then 4 else ~1 val (nondef_ts, def_ts, got_all_mono_user_axioms, no_poly_user_axioms) = @@ -1442,6 +1395,9 @@ #> Term.map_abs_vars shortest_name val nondef_ts = map (do_rest false) nondef_ts val def_ts = map (do_rest true) def_ts + val (nondef_ts, def_ts) = + finitize_all_types_of_funs hol_ctxt binarize finitizes monos + (nondef_ts, def_ts) in (nondef_ts, def_ts, got_all_mono_user_axioms, no_poly_user_axioms, binarize) end diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_rep.ML --- a/src/HOL/Tools/Nitpick/nitpick_rep.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_rep.ML Wed Mar 10 16:40:20 2010 +0100 @@ -158,9 +158,9 @@ | smart_range_rep _ _ _ (Func (_, R2)) = R2 | smart_range_rep ofs T ran_card (Opt R) = Opt (smart_range_rep ofs T ran_card R) - | smart_range_rep ofs (Type ("fun", [_, T2])) _ (Atom (1, _)) = + | smart_range_rep ofs (Type (@{type_name fun}, [_, T2])) _ (Atom (1, _)) = Atom (1, offset_of_type ofs T2) - | smart_range_rep ofs (Type ("fun", [_, T2])) ran_card (Atom _) = + | smart_range_rep ofs (Type (@{type_name fun}, [_, T2])) ran_card (Atom _) = Atom (ran_card (), offset_of_type ofs T2) | smart_range_rep _ _ _ R = raise REP ("Nitpick_Rep.smart_range_rep", [R]) @@ -183,10 +183,10 @@ | one_rep _ _ (Vect z) = Vect z | one_rep ofs T (Opt R) = one_rep ofs T R | one_rep ofs T R = Atom (card_of_rep R, offset_of_type ofs T) -fun optable_rep ofs (Type ("fun", [_, T2])) (Func (R1, R2)) = +fun optable_rep ofs (Type (@{type_name fun}, [_, T2])) (Func (R1, R2)) = Func (R1, optable_rep ofs T2 R2) | optable_rep ofs T R = one_rep ofs T R -fun opt_rep ofs (Type ("fun", [_, T2])) (Func (R1, R2)) = +fun opt_rep ofs (Type (@{type_name fun}, [_, T2])) (Func (R1, R2)) = Func (R1, opt_rep ofs T2 R2) | opt_rep ofs T R = Opt (optable_rep ofs T R) (* rep -> rep *) @@ -264,11 +264,11 @@ (* scope -> typ -> rep *) fun best_one_rep_for_type (scope as {card_assigns, ...} : scope) - (Type ("fun", [T1, T2])) = + (Type (@{type_name fun}, [T1, T2])) = (case best_one_rep_for_type scope T2 of Unit => Unit | R2 => Vect (card_of_type card_assigns T1, R2)) - | best_one_rep_for_type scope (Type ("*", [T1, T2])) = + | best_one_rep_for_type scope (Type (@{type_name "*"}, [T1, T2])) = (case (best_one_rep_for_type scope T1, best_one_rep_for_type scope T2) of (Unit, Unit) => Unit | (R1, R2) => Struct [R1, R2]) @@ -284,12 +284,12 @@ (* Datatypes are never represented by Unit, because it would confuse "nfa_transitions_for_ctor". *) (* scope -> typ -> rep *) -fun best_opt_set_rep_for_type scope (Type ("fun", [T1, T2])) = +fun best_opt_set_rep_for_type scope (Type (@{type_name fun}, [T1, T2])) = Func (best_one_rep_for_type scope T1, best_opt_set_rep_for_type scope T2) | best_opt_set_rep_for_type (scope as {ofs, ...}) T = opt_rep ofs T (best_one_rep_for_type scope T) fun best_non_opt_set_rep_for_type (scope as {ofs, ...}) - (Type ("fun", [T1, T2])) = + (Type (@{type_name fun}, [T1, T2])) = (case (best_one_rep_for_type scope T1, best_non_opt_set_rep_for_type scope T2) of (_, Unit) => Unit @@ -302,7 +302,7 @@ (if is_exact_type datatypes true T then best_non_opt_set_rep_for_type else best_opt_set_rep_for_type) scope T fun best_non_opt_symmetric_reps_for_fun_type (scope as {ofs, ...}) - (Type ("fun", [T1, T2])) = + (Type (@{type_name fun}, [T1, T2])) = (optable_rep ofs T1 (best_one_rep_for_type scope T1), optable_rep ofs T2 (best_one_rep_for_type scope T2)) | best_non_opt_symmetric_reps_for_fun_type _ T = @@ -325,11 +325,11 @@ fun type_schema_of_rep _ (Formula _) = [] | type_schema_of_rep _ Unit = [] | type_schema_of_rep T (Atom _) = [T] - | type_schema_of_rep (Type ("*", [T1, T2])) (Struct [R1, R2]) = + | type_schema_of_rep (Type (@{type_name "*"}, [T1, T2])) (Struct [R1, R2]) = type_schema_of_reps [T1, T2] [R1, R2] - | type_schema_of_rep (Type ("fun", [_, T2])) (Vect (k, R)) = + | type_schema_of_rep (Type (@{type_name fun}, [_, T2])) (Vect (k, R)) = replicate_list k (type_schema_of_rep T2 R) - | type_schema_of_rep (Type ("fun", [T1, T2])) (Func (R1, R2)) = + | type_schema_of_rep (Type (@{type_name fun}, [T1, T2])) (Func (R1, R2)) = type_schema_of_rep T1 R1 @ type_schema_of_rep T2 R2 | type_schema_of_rep T (Opt R) = type_schema_of_rep T R | type_schema_of_rep _ R = raise REP ("Nitpick_Rep.type_schema_of_rep", [R]) diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/Nitpick/nitpick_scope.ML --- a/src/HOL/Tools/Nitpick/nitpick_scope.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/Nitpick/nitpick_scope.ML Wed Mar 10 16:40:20 2010 +0100 @@ -106,22 +106,26 @@ | NONE => constr_spec dtypes x (* dtype_spec list -> bool -> typ -> bool *) -fun is_complete_type dtypes facto (Type ("fun", [T1, T2])) = +fun is_complete_type dtypes facto (Type (@{type_name fun}, [T1, T2])) = is_concrete_type dtypes facto T1 andalso is_complete_type dtypes facto T2 - | is_complete_type dtypes facto (Type ("*", Ts)) = + | is_complete_type dtypes facto (Type (@{type_name fin_fun}, [T1, T2])) = + is_exact_type dtypes facto T1 andalso is_complete_type dtypes facto T2 + | is_complete_type dtypes facto (Type (@{type_name "*"}, Ts)) = forall (is_complete_type dtypes facto) Ts | is_complete_type dtypes facto T = not (is_integer_like_type T) andalso not (is_bit_type T) andalso fun_from_pair (#complete (the (datatype_spec dtypes T))) facto handle Option.Option => true -and is_concrete_type dtypes facto (Type ("fun", [T1, T2])) = +and is_concrete_type dtypes facto (Type (@{type_name fun}, [T1, T2])) = is_complete_type dtypes facto T1 andalso is_concrete_type dtypes facto T2 - | is_concrete_type dtypes facto (Type ("*", Ts)) = + | is_concrete_type dtypes facto (Type (@{type_name fin_fun}, [_, T2])) = + is_concrete_type dtypes facto T2 + | is_concrete_type dtypes facto (Type (@{type_name "*"}, Ts)) = forall (is_concrete_type dtypes facto) Ts | is_concrete_type dtypes facto T = fun_from_pair (#concrete (the (datatype_spec dtypes T))) facto handle Option.Option => true -fun is_exact_type dtypes facto = +and is_exact_type dtypes facto = is_complete_type dtypes facto andf is_concrete_type dtypes facto (* int Typtab.table -> typ -> int *) @@ -528,15 +532,15 @@ (* theory -> typ list -> (typ option * int list) list -> (typ option * int list) list *) -fun repair_cards_assigns_wrt_boxing _ _ [] = [] - | repair_cards_assigns_wrt_boxing thy Ts ((SOME T, ks) :: cards_assigns) = +fun repair_cards_assigns_wrt_boxing_etc _ _ [] = [] + | repair_cards_assigns_wrt_boxing_etc thy Ts ((SOME T, ks) :: cards_assigns) = (if is_fun_type T orelse is_pair_type T then - Ts |> filter (curry (type_match thy o swap) T o unarize_and_unbox_type) - |> map (rpair ks o SOME) + Ts |> filter (curry (type_match thy o swap) T) |> map (rpair ks o SOME) else - [(SOME T, ks)]) @ repair_cards_assigns_wrt_boxing thy Ts cards_assigns - | repair_cards_assigns_wrt_boxing thy Ts ((NONE, ks) :: cards_assigns) = - (NONE, ks) :: repair_cards_assigns_wrt_boxing thy Ts cards_assigns + [(SOME T, ks)]) @ + repair_cards_assigns_wrt_boxing_etc thy Ts cards_assigns + | repair_cards_assigns_wrt_boxing_etc thy Ts ((NONE, ks) :: cards_assigns) = + (NONE, ks) :: repair_cards_assigns_wrt_boxing_etc thy Ts cards_assigns val max_scopes = 4096 val distinct_threshold = 512 @@ -548,8 +552,8 @@ maxes_assigns iters_assigns bitss bisim_depths mono_Ts nonmono_Ts deep_dataTs finitizable_dataTs = let - val cards_assigns = repair_cards_assigns_wrt_boxing thy mono_Ts - cards_assigns + val cards_assigns = repair_cards_assigns_wrt_boxing_etc thy mono_Ts + cards_assigns val blocks = blocks_for_types hol_ctxt binarize cards_assigns maxes_assigns iters_assigns bitss bisim_depths mono_Ts nonmono_Ts diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/TFL/tfl.ML --- a/src/HOL/Tools/TFL/tfl.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/TFL/tfl.ML Wed Mar 10 16:40:20 2010 +0100 @@ -360,7 +360,7 @@ (*For Isabelle, the lhs of a definition must be a constant.*) -fun mk_const_def sign (c, Ty, rhs) = +fun legacy_const_def sign (c, Ty, rhs) = singleton (Syntax.check_terms (ProofContext.init sign)) (Sign.intern_term sign (Const("==",dummyT) $ Const(c,Ty) $ rhs)); @@ -385,7 +385,7 @@ val wfrec_R_M = map_types poly_tvars (wfrec $ map_types poly_tvars R) $ functional - val def_term = mk_const_def thy (x, Ty, wfrec_R_M) + val def_term = legacy_const_def thy (x, Ty, wfrec_R_M) val ([def], thy') = PureThy.add_defs false [Thm.no_attributes (Binding.name def_name, def_term)] thy in (thy', def) end; end; @@ -540,7 +540,7 @@ val {lhs,rhs} = S.dest_eq proto_def' val (c,args) = S.strip_comb lhs val (name,Ty) = dest_atom c - val defn = mk_const_def thy (name, Ty, S.list_mk_abs (args,rhs)) + val defn = legacy_const_def thy (name, Ty, S.list_mk_abs (args,rhs)) val ([def0], theory) = thy |> PureThy.add_defs false diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/inductive.ML --- a/src/HOL/Tools/inductive.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/inductive.ML Wed Mar 10 16:40:20 2010 +0100 @@ -22,7 +22,7 @@ sig type inductive_result = {preds: term list, elims: thm list, raw_induct: thm, - induct: thm, intrs: thm list} + induct: thm, inducts: thm list, intrs: thm list} val morph_result: morphism -> inductive_result -> inductive_result type inductive_info = {names: string list, coind: bool} * inductive_result val the_inductive: Proof.context -> string -> inductive_info @@ -73,7 +73,7 @@ local_theory -> inductive_result * local_theory val declare_rules: binding -> bool -> bool -> string list -> thm list -> binding list -> Attrib.src list list -> (thm * string list * int) list -> - thm -> local_theory -> thm list * thm list * thm * local_theory + thm -> local_theory -> thm list * thm list * thm * thm list * local_theory val add_ind_def: add_ind_def val gen_add_inductive_i: add_ind_def -> inductive_flags -> ((binding * typ) * mixfix) list -> (string * typ) list -> (Attrib.binding * term) list -> @@ -121,16 +121,16 @@ type inductive_result = {preds: term list, elims: thm list, raw_induct: thm, - induct: thm, intrs: thm list}; + induct: thm, inducts: thm list, intrs: thm list}; -fun morph_result phi {preds, elims, raw_induct: thm, induct, intrs} = +fun morph_result phi {preds, elims, raw_induct: thm, induct, inducts, intrs} = let val term = Morphism.term phi; val thm = Morphism.thm phi; val fact = Morphism.fact phi; in {preds = map term preds, elims = fact elims, raw_induct = thm raw_induct, - induct = thm induct, intrs = fact intrs} + induct = thm induct, inducts = fact inducts, intrs = fact intrs} end; type inductive_info = @@ -737,8 +737,8 @@ ((rec_qualified true (Binding.name (coind_prefix coind ^ "induct")), map (Attrib.internal o K) (#2 induct)), [rulify (#1 induct)]); - val lthy3 = - if no_ind orelse coind then lthy2 + val (inducts, lthy3) = + if no_ind orelse coind then ([], lthy2) else let val inducts = cnames ~~ Project_Rule.projects lthy2 (1 upto length cnames) induct' in lthy2 |> @@ -746,9 +746,9 @@ inducts |> map (fn (name, th) => ([th], [Attrib.internal (K ind_case_names), Attrib.internal (K (Rule_Cases.consumes 1)), - Attrib.internal (K (Induct.induct_pred name))])))] |> snd + Attrib.internal (K (Induct.induct_pred name))])))] |>> snd o hd end; - in (intrs', elims', induct', lthy3) end; + in (intrs', elims', induct', inducts, lthy3) end; type inductive_flags = {quiet_mode: bool, verbose: bool, alt_name: binding, coind: bool, @@ -796,7 +796,7 @@ prove_indrule quiet_mode cs argTs bs xs rec_const params intr_ts mono fp_def rec_preds_defs lthy1); - val (intrs', elims', induct, lthy2) = declare_rules rec_name coind no_ind + val (intrs', elims', induct, inducts, lthy2) = declare_rules rec_name coind no_ind cnames intrs intr_names intr_atts elims raw_induct lthy1; val result = @@ -804,7 +804,8 @@ intrs = intrs', elims = elims', raw_induct = rulify raw_induct, - induct = induct}; + induct = induct, + inducts = inducts}; val lthy3 = lthy2 |> Local_Theory.declaration false (fn phi => diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/inductive_set.ML --- a/src/HOL/Tools/inductive_set.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/inductive_set.ML Wed Mar 10 16:40:20 2010 +0100 @@ -520,7 +520,7 @@ val cnames = map (Local_Theory.full_name lthy3 o #1) cnames_syn; (* FIXME *) val (intr_names, intr_atts) = split_list (map fst intros); val raw_induct' = to_set [] (Context.Proof lthy3) raw_induct; - val (intrs', elims', induct, lthy4) = + val (intrs', elims', induct, inducts, lthy4) = Inductive.declare_rules rec_name coind no_ind cnames (map (to_set [] (Context.Proof lthy3)) intrs) intr_names intr_atts (map (fn th => (to_set [] (Context.Proof lthy3) th, @@ -528,7 +528,7 @@ Rule_Cases.get_constraints th)) elims) raw_induct' lthy3; in - ({intrs = intrs', elims = elims', induct = induct, + ({intrs = intrs', elims = elims', induct = induct, inducts = inducts, raw_induct = raw_induct', preds = map fst defs}, lthy4) end; diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/recdef.ML --- a/src/HOL/Tools/recdef.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/recdef.ML Wed Mar 10 16:40:20 2010 +0100 @@ -184,6 +184,7 @@ fun gen_add_recdef tfl_fn prep_att prep_hints not_permissive raw_name R eq_srcs hints thy = let + val _ = legacy_feature ("\"recdef\"; prefer \"function\" instead"); val _ = requires_recdef thy; val name = Sign.intern_const thy raw_name; diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/transfer.ML --- a/src/HOL/Tools/transfer.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/transfer.ML Wed Mar 10 16:40:20 2010 +0100 @@ -1,12 +1,18 @@ -(* Author: Amine Chaieb, University of Cambridge, 2009 - Author: Jeremy Avigad, Carnegie Mellon University +(* Author: Amine Chaieb, University of Cambridge, 2009 + Jeremy Avigad, Carnegie Mellon University + Florian Haftmann, TU Muenchen + +Simple transfer principle on theorems. *) signature TRANSFER = sig + datatype selection = Direction of term * term | Hints of string list | Prop + val transfer: Context.generic -> selection -> string list -> thm -> thm list type entry - val get: Proof.context -> (thm * entry) list - val del: thm -> Context.generic -> Context.generic + val add: thm -> bool -> entry -> Context.generic -> Context.generic + val del: thm -> entry -> Context.generic -> Context.generic + val drop: thm -> Context.generic -> Context.generic val setup: theory -> theory end; @@ -15,171 +21,169 @@ (* data administration *) +val direction_of = Thm.dest_binop o Thm.dest_arg o cprop_of; + fun check_morphism_key ctxt key = let val _ = (Thm.match o pairself Thm.cprop_of) (@{thm transfer_morphismI}, key) handle Pattern.MATCH => error - ("Expected theorem of the form " ^ quote (Display.string_of_thm ctxt @{thm transfer_morphismI})); - in (Thm.dest_binop o Thm.dest_arg o Thm.cprop_of) key end; + ("Transfer: expected theorem of the form " ^ quote (Display.string_of_thm ctxt @{thm transfer_morphismI})); + in direction_of key end; -type entry = { inj : thm list, emb : thm list, ret : thm list, cong : thm list, - guess : bool, hints : string list }; +type entry = { inj : thm list, embed : thm list, return : thm list, cong : thm list, + hints : string list }; -fun merge_entry ({ inj = inj1, emb = emb1, ret = ret1, cong = cong1, guess = guess1, hints = hints1 } : entry, - { inj = inj2, emb = emb2, ret = ret2, cong = cong2, guess = guess2, hints = hints2 } : entry) = - { inj = merge Thm.eq_thm (inj1, inj2), emb = merge Thm.eq_thm (emb1, emb2), - ret = merge Thm.eq_thm (ret1, ret2), cong = merge Thm.eq_thm (cong1, cong2), - guess = guess1 andalso guess2, hints = merge (op =) (hints1, hints2) }; +val empty_entry = { inj = [], embed = [], return = [], cong = [], hints = [] }; +fun merge_entry ({ inj = inj1, embed = embed1, return = return1, cong = cong1, hints = hints1 } : entry, + { inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } : entry) = + { inj = merge Thm.eq_thm (inj1, inj2), embed = merge Thm.eq_thm (embed1, embed2), + return = merge Thm.eq_thm (return1, return2), cong = merge Thm.eq_thm (cong1, cong2), + hints = merge (op =) (hints1, hints2) }; structure Data = Generic_Data ( type T = (thm * entry) list; val empty = []; - val extend = I; + val extend = I; val merge = AList.join Thm.eq_thm (K merge_entry); ); -val get = Data.get o Context.Proof; + +(* data lookup *) + +fun transfer_rules_of { inj, embed, return, cong, ... } = + (inj, embed, return, cong); + +fun get_by_direction context (a, D) = + let + val ctxt = Context.proof_of context; + val certify = Thm.cterm_of (Context.theory_of context); + val a0 = certify a; + val D0 = certify D; + fun eq_direction ((a, D), thm') = + let + val (a', D') = direction_of thm'; + in a aconvc a' andalso D aconvc D' end; + in case AList.lookup eq_direction (Data.get context) (a0, D0) of + SOME e => ((a0, D0), transfer_rules_of e) + | NONE => error ("Transfer: no such instance: (" + ^ Syntax.string_of_term ctxt a ^ ", " ^ Syntax.string_of_term ctxt D ^ ")") + end; -fun del key = Data.map (remove (eq_fst Thm.eq_thm) (key, [])); +fun get_by_hints context hints = + let + val insts = map_filter (fn (k, e) => if exists (member (op =) (#hints e)) hints + then SOME (direction_of k, transfer_rules_of e) else NONE) (Data.get context); + val _ = if null insts then error ("Transfer: no such labels: " ^ commas (map quote hints)) else (); + in insts end; + +fun splits P [] = [] + | splits P (xs as (x :: _)) = + let + val (pss, qss) = List.partition (P x) xs; + in if null pss then [qss] else if null qss then [pss] else pss :: splits P qss end; -val del_attribute = Thm.declaration_attribute del; +fun get_by_prop context t = + let + val tys = map snd (Term.add_vars t []); + val _ = if null tys then error "Transfer: unable to guess instance" else (); + val tyss = splits (curry Type.could_unify) tys; + val get_ty = typ_of o ctyp_of_term o fst o direction_of; + val insts = map_filter (fn tys => get_first (fn (k, e) => + if Type.could_unify (hd tys, range_type (get_ty k)) + then SOME (direction_of k, transfer_rules_of e) + else NONE) (Data.get context)) tyss; + val _ = if null insts then + error "Transfer: no instances, provide direction or hints explicitly" else (); + in insts end; (* applying transfer data *) -fun build_simpset inj_only {inj = inj, emb = emb, ret = ret, cong = cg, guess = g, hints = hints} = - HOL_ss addsimps inj addsimps (if inj_only then [] else emb @ ret) addcongs cg; - -fun basic_transfer_rule inj_only a0 D0 e leave ctxt0 th = +fun transfer_thm ((raw_a, raw_D), (inj, embed, return, cong)) leave ctxt1 thm = let - val ([a, D], ctxt) = apfst (map Drule.dest_term o snd) - (Variable.import true (map Drule.mk_term [a0, D0]) ctxt0); - val (aT, bT) = - let val T = typ_of (ctyp_of_term a) - in (Term.range_type T, Term.domain_type T) end; - val ctxt' = (Variable.declare_term (term_of a) o Variable.declare_term (term_of D) - o Variable.declare_thm th) ctxt; - val ns = filter (fn i => Type.could_unify (snd i, aT) andalso - not (member (op =) leave (fst (fst i)))) (Term.add_vars (prop_of th) []); - val (ins, ctxt'') = Variable.variant_fixes (map (fst o fst) ns) ctxt'; - val certify = Thm.cterm_of (ProofContext.theory_of ctxt''); - val cns = map (certify o Var) ns; - val cfis = map (certify o (fn n => Free (n, bT))) ins; - val cis = map (Thm.capply a) cfis - val (hs, ctxt''') = Assumption.add_assumes (map (fn ct => - Thm.capply @{cterm "Trueprop"} (Thm.capply D ct)) cfis) ctxt''; - val th1 = Drule.cterm_instantiate (cns ~~ cis) th; - val th2 = fold Thm.elim_implies hs (fold_rev implies_intr (map cprop_of hs) th1); - val th3 = Simplifier.asm_full_simplify (Simplifier.context ctxt''' - (build_simpset inj_only e)) (fold_rev implies_intr (map cprop_of hs) th2); - in hd (Variable.export ctxt''' ctxt0 [th3]) end; + (* identify morphism function *) + val ([a, D], ctxt2) = ctxt1 + |> Variable.import true (map Drule.mk_term [raw_a, raw_D]) + |>> map Drule.dest_term o snd; + val transform = Thm.capply @{cterm "Trueprop"} o Thm.capply D; + val T = Thm.typ_of (Thm.ctyp_of_term a); + val (aT, bT) = (Term.range_type T, Term.domain_type T); + + (* determine variables to transfer *) + val ctxt3 = ctxt2 + |> Variable.declare_thm thm + |> Variable.declare_term (term_of a) + |> Variable.declare_term (term_of D); + val certify = Thm.cterm_of (ProofContext.theory_of ctxt3); + val vars = filter (fn ((v, _), T) => Type.could_unify (T, aT) andalso + not (member (op =) leave v)) (Term.add_vars (Thm.prop_of thm) []); + val c_vars = map (certify o Var) vars; + val (vs', ctxt4) = Variable.variant_fixes (map (fst o fst) vars) ctxt3; + val c_vars' = map (certify o (fn v => Free (v, bT))) vs'; + val c_exprs' = map (Thm.capply a) c_vars'; -fun transfer_rule (a, D) leave (gctxt, th) = + (* transfer *) + val (hyps, ctxt5) = ctxt4 + |> Assumption.add_assumes (map transform c_vars'); + val simpset = Simplifier.context ctxt5 HOL_ss + addsimps (inj @ embed @ return) addcongs cong; + val thm' = thm + |> Drule.cterm_instantiate (c_vars ~~ c_exprs') + |> fold_rev Thm.implies_intr (map cprop_of hyps) + |> Simplifier.asm_full_simplify simpset + in singleton (Variable.export ctxt5 ctxt1) thm' end; + +fun transfer_thm_multiple insts leave ctxt thm = + map (fn inst => transfer_thm inst leave ctxt thm) insts; + +datatype selection = Direction of term * term | Hints of string list | Prop; + +fun insts_for context thm (Direction direction) = [get_by_direction context direction] + | insts_for context thm (Hints hints) = get_by_hints context hints + | insts_for context thm Prop = get_by_prop context (Thm.prop_of thm); + +fun transfer context selection leave thm = + transfer_thm_multiple (insts_for context thm selection) leave (Context.proof_of context) thm; + + +(* maintaining transfer data *) + +fun extend_entry ctxt (a, D) guess + { inj = inj1, embed = embed1, return = return1, cong = cong1, hints = hints1 } + { inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } = let - fun transfer_ruleh a D leave ctxt th = - let - val al = get ctxt; - val certify = Thm.cterm_of (ProofContext.theory_of ctxt); - val a0 = certify a; - val D0 = certify D; - fun h (th', e) = - let - val (a',D') = (Thm.dest_binop o Thm.dest_arg o cprop_of) th' - in if a0 aconvc a' andalso D0 aconvc D' then SOME e else NONE end; - in case get_first h al of - SOME e => basic_transfer_rule false a0 D0 e leave ctxt th - | NONE => error "Transfer: corresponding instance not found in context data" - end; - in - (gctxt, transfer_ruleh a D leave (Context.proof_of gctxt) th) + fun add_del eq del add = union eq add #> subtract eq del; + val guessed = if guess + then map (fn thm => transfer_thm + ((a, D), (if null inj1 then inj2 else inj1, [], [], cong1)) [] ctxt thm RS sym) embed1 + else []; + in + { inj = union Thm.eq_thm inj1 inj2, embed = union Thm.eq_thm embed1 embed2, + return = union Thm.eq_thm guessed (union Thm.eq_thm return1 return2), + cong = union Thm.eq_thm cong1 cong2, hints = union (op =) hints1 hints2 } end; -fun splits P [] = [] - | splits P (xxs as (x :: xs)) = - let - val pss = filter (P x) xxs; - val qss = filter_out (P x) xxs; - in if null pss then [qss] else if null qss then [pss] else pss:: splits P qss end; +fun diminish_entry + { inj = inj0, embed = embed0, return = return0, cong = cong0, hints = hints0 } + { inj = inj2, embed = embed2, return = return2, cong = cong2, hints = hints2 } = + { inj = subtract Thm.eq_thm inj0 inj2, embed = subtract Thm.eq_thm embed0 embed2, + return = subtract Thm.eq_thm return0 return2, cong = subtract Thm.eq_thm cong0 cong2, + hints = subtract (op =) hints0 hints2 }; -fun all_transfers leave (gctxt, th) = - let - val ctxt = Context.proof_of gctxt; - val tys = map snd (Term.add_vars (prop_of th) []); - val _ = if null tys then error "transfer: Unable to guess instance" else (); - val tyss = splits (curry Type.could_unify) tys; - val get_ty = typ_of o ctyp_of_term o fst o Thm.dest_binop o Thm.dest_arg o cprop_of; - val get_aD = Thm.dest_binop o Thm.dest_arg o cprop_of; - val insts = - map_filter (fn tys => - get_first (fn (k,ss) => - if Type.could_unify (hd tys, range_type (get_ty k)) - then SOME (get_aD k, ss) - else NONE) (get ctxt)) tyss; - val _ = - if null insts then - error "Transfer guesser: there were no possible instances, use direction: in order to provide a direction" - else (); - val ths = map (fn ((a, D), e) => basic_transfer_rule false a D e leave ctxt th) insts; - val cth = Conjunction.intr_balanced ths; - in (gctxt, cth) end; - -fun transfer_rule_by_hint ls leave (gctxt, th) = +fun add key guess entry context = let - val ctxt = Context.proof_of gctxt; - val get_aD = Thm.dest_binop o Thm.dest_arg o cprop_of; - val insts = map_filter (fn (k,e) => if exists (member (op =) (#hints e)) ls - then SOME (get_aD k, e) else NONE) (get ctxt); - val _ = if null insts then error "Transfer: No labels provided are stored in the context" else (); - val ths = map (fn ((a,D),e) => basic_transfer_rule false a D e leave ctxt th) insts; - val cth = Conjunction.intr_balanced ths; - in (gctxt, cth) end; - -fun transferred_attribute ls NONE leave = - if null ls then all_transfers leave else transfer_rule_by_hint ls leave - | transferred_attribute _ (SOME (a, D)) leave = transfer_rule (a, D) leave; - - -(* adding transfer data *) - -fun merge_update eq m (k, v) [] = [(k, v)] - | merge_update eq m (k, v) ((k', v') :: al) = - if eq (k, k') then (k', m (v, v')) :: al else (k', v') :: merge_update eq m (k, v) al; - -(*? fun merge_update eq m (k, v) = AList.map_entry eq k (fn v' => m (v, v'));*) - -fun merge_entries {inj = inj0, emb = emb0, ret = ret0, cong = cg0, guess = g0, hints = hints0} - ({inj = inj1, emb = emb1, ret = ret1, cong = cg1, guess = g1, hints = hints1}, - {inj = inj2, emb = emb2, ret = ret2, cong = cg2, guess = g2, hints = hints2} : entry) = - let - fun h xs0 xs ys = subtract Thm.eq_thm xs0 (merge Thm.eq_thm (xs, ys)) + val ctxt = Context.proof_of context; + val a_D = check_morphism_key ctxt key; in - {inj = h inj0 inj1 inj2, emb = h emb0 emb1 emb2, - ret = h ret0 ret1 ret2, cong = h cg0 cg1 cg2, guess = g1 andalso g2, - hints = subtract (op =) hints0 (union (op =) hints1 hints2) } + context + |> Data.map (AList.map_default Thm.eq_thm + (key, empty_entry) (extend_entry ctxt a_D guess entry)) end; -fun add ((inja, injd), (emba, embd), (reta, retd), (cga, cgd), g, (hintsa, hintsd)) key = - Data.map (fn al => - let - val ctxt0 = ProofContext.init (Thm.theory_of_thm key); (*FIXME*) - val (a0, D0) = check_morphism_key ctxt0 key; - val e0 = {inj = inja, emb = emba, ret = reta, cong = cga, guess = g, hints = hintsa}; - val ed = {inj = injd, emb = embd, ret = retd, cong = cgd, guess = g, hints = hintsd}; - val entry = if g then - let - val inj' = if null inja then #inj - (case AList.lookup Thm.eq_thm al key of SOME e => e - | NONE => error "Transfer: cannot generate return rules on the fly, either add injectivity axiom or force manual mode with mode: manual") - else inja - val ret' = merge Thm.eq_thm (reta, map - (fn th => basic_transfer_rule true a0 D0 {inj = inj', emb = [], ret = [], cong = cga, guess = g, - hints = hintsa} [] ctxt0 th RS sym) emba); - in {inj = inja, emb = emba, ret = ret', cong = cga, guess = g, hints = hintsa} end - else e0; - in merge_update Thm.eq_thm (merge_entries ed) (key, entry) al end); +fun del key entry = Data.map (AList.map_entry Thm.eq_thm key (diminish_entry entry)); -fun add_attribute args = Thm.declaration_attribute (add args); +fun drop key = Data.map (AList.delete Thm.eq_thm key); (* syntax *) @@ -192,22 +196,25 @@ fun keyword k = Scan.lift (Args.$$$ k) >> K (); fun keyword_colon k = Scan.lift (Args.$$$ k -- Args.colon) >> K (); -val congN = "cong"; +val addN = "add"; +val delN = "del"; +val keyN = "key"; +val modeN = "mode"; +val automaticN = "automatic"; +val manualN = "manual"; val injN = "inj"; val embedN = "embed"; val returnN = "return"; -val addN = "add"; -val delN = "del"; -val modeN = "mode"; -val automaticN = "automatic"; -val manualN = "manual"; +val congN = "cong"; +val labelsN = "labels"; + +val leavingN = "leaving"; val directionN = "direction"; -val labelsN = "labels"; -val leavingN = "leaving"; -val any_keyword = keyword_colon congN || keyword_colon injN || keyword_colon embedN - || keyword_colon returnN || keyword_colon directionN || keyword_colon modeN - || keyword_colon delN || keyword_colon labelsN || keyword_colon leavingN; +val any_keyword = keyword_colon addN || keyword_colon delN || keyword_colon keyN + || keyword_colon modeN || keyword_colon injN || keyword_colon embedN || keyword_colon returnN + || keyword_colon congN || keyword_colon labelsN + || keyword_colon leavingN || keyword_colon directionN; val thms = Scan.repeat (Scan.unless any_keyword Attrib.multi_thm) >> flat; val names = Scan.repeat (Scan.unless any_keyword (Scan.lift Args.name)) @@ -220,20 +227,30 @@ val cong = (keyword_colon congN |-- thms) -- these (keyword_colon delN |-- thms); val labels = (keyword_colon labelsN |-- names) -- these (keyword_colon delN |-- names); -val entry = Scan.optional mode true -- these_pair inj -- these_pair embed - -- these_pair return -- these_pair cong -- these_pair labels; +val entry_pair = these_pair inj -- these_pair embed + -- these_pair return -- these_pair cong -- these_pair labels + >> (fn (((((inja, injd), (embeda, embedd)), (returna, returnd)), (conga, congd)), + (hintsa, hintsd)) => + ({ inj = inja, embed = embeda, return = returna, cong = conga, hints = hintsa }, + { inj = injd, embed = embedd, return = returnd, cong = congd, hints = hintsd })); -val transfer_directive = these names -- Scan.option (keyword_colon directionN - |-- (Args.term -- Args.term)) -- these (keyword_colon leavingN |-- names); +val selection = (keyword_colon directionN |-- (Args.term -- Args.term) >> Direction) + || these names >> (fn hints => if null hints then Prop else Hints hints); in -val transfer_syntax = (Scan.lift (Args.$$$ delN >> K del_attribute) - || Scan.unless any_keyword (keyword addN) |-- entry - >> (fn (((((g, inj), embed), ret), cg), hints) => add_attribute (inj, embed, ret, cg, g, hints))) +val transfer_attribute = keyword delN >> K (Thm.declaration_attribute drop) + || keyword addN |-- Scan.optional mode true -- entry_pair + >> (fn (guess, (entry_add, entry_del)) => Thm.declaration_attribute + (fn thm => add thm guess entry_add #> del thm entry_del)) + || keyword_colon keyN |-- Attrib.thm + >> (fn key => Thm.declaration_attribute + (fn thm => add key false + { inj = [], embed = [], return = [thm], cong = [], hints = [] })); -val transferred_syntax = transfer_directive - >> (fn ((hints, aD), leave) => transferred_attribute hints aD leave); +val transferred_attribute = selection -- these (keyword_colon leavingN |-- names) + >> (fn (selection, leave) => Thm.rule_attribute (fn context => + Conjunction.intr_balanced o transfer context selection leave)); end; @@ -241,9 +258,9 @@ (* theory setup *) val setup = - Attrib.setup @{binding transfer} transfer_syntax + Attrib.setup @{binding transfer} transfer_attribute "Installs transfer data" #> - Attrib.setup @{binding transferred} transferred_syntax + Attrib.setup @{binding transferred} transferred_attribute "Transfers theorems"; end; diff -r d58a4ac1ca1c -r 553906904426 src/HOL/Tools/typedef.ML --- a/src/HOL/Tools/typedef.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/Tools/typedef.ML Wed Mar 10 16:40:20 2010 +0100 @@ -130,7 +130,7 @@ in Drule.export_without_context (Drule.equal_elim_rule2 OF [goal_eq, th]) end; fun typedef_result inhabited = - Typedecl.typedecl (tname, vs, mx) + Typedecl.typedecl_global (tname, vs, mx) #> snd #> Sign.add_consts_i [(Rep_name, newT --> oldT, NoSyn), diff -r d58a4ac1ca1c -r 553906904426 src/HOL/ex/Transfer_Ex.thy --- a/src/HOL/ex/Transfer_Ex.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOL/ex/Transfer_Ex.thy Wed Mar 10 16:40:20 2010 +0100 @@ -2,41 +2,46 @@ header {* Various examples for transfer procedure *} theory Transfer_Ex -imports Complex_Main +imports Main begin -(* nat to int *) - lemma ex1: "(x::nat) + y = y + x" by auto -thm ex1 [transferred] +lemma "(0\int) \ (y\int) \ (0\int) \ (x\int) \ x + y = y + x" + by (fact ex1 [transferred]) lemma ex2: "(a::nat) div b * b + a mod b = a" by (rule mod_div_equality) -thm ex2 [transferred] +lemma "(0\int) \ (b\int) \ (0\int) \ (a\int) \ a div b * b + a mod b = a" + by (fact ex2 [transferred]) lemma ex3: "ALL (x::nat). ALL y. EX z. z >= x + y" by auto -thm ex3 [transferred natint] +lemma "\x\0\int. \y\0\int. \xa\0\int. x + y \ xa" + by (fact ex3 [transferred nat_int]) lemma ex4: "(x::nat) >= y \ (x - y) + y = x" by auto -thm ex4 [transferred] +lemma "(0\int) \ (x\int) \ (0\int) \ (y\int) \ y \ x \ tsub x y + y = x" + by (fact ex4 [transferred]) -lemma ex5: "(2::nat) * (SUM i <= n. i) = n * (n + 1)" +lemma ex5: "(2::nat) * \{..n} = n * (n + 1)" by (induct n rule: nat_induct, auto) -thm ex5 [transferred] +lemma "(0\int) \ (n\int) \ (2\int) * \{0\int..n} = n * (n + (1\int))" + by (fact ex5 [transferred]) + +lemma "(0\nat) \ (n\nat) \ (2\nat) * \{0\nat..n} = n * (n + (1\nat))" + by (fact ex5 [transferred, transferred]) theorem ex6: "0 <= (n::int) \ 2 * \{0..n} = n * (n + 1)" by (rule ex5 [transferred]) -thm ex6 [transferred] - -thm ex5 [transferred, transferred] +lemma "(0\nat) \ (n\nat) \ (2\nat) * \{0\nat..n} = n * (n + (1\nat))" + by (fact ex6 [transferred]) end \ No newline at end of file diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Domain.thy --- a/src/HOLCF/Domain.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Domain.thy Wed Mar 10 16:40:20 2010 +0100 @@ -19,107 +19,6 @@ defaultsort pcpo -subsection {* Continuous isomorphisms *} - -text {* A locale for continuous isomorphisms *} - -locale iso = - fixes abs :: "'a \ 'b" - fixes rep :: "'b \ 'a" - assumes abs_iso [simp]: "rep\(abs\x) = x" - assumes rep_iso [simp]: "abs\(rep\y) = y" -begin - -lemma swap: "iso rep abs" - by (rule iso.intro [OF rep_iso abs_iso]) - -lemma abs_below: "(abs\x \ abs\y) = (x \ y)" -proof - assume "abs\x \ abs\y" - then have "rep\(abs\x) \ rep\(abs\y)" by (rule monofun_cfun_arg) - then show "x \ y" by simp -next - assume "x \ y" - then show "abs\x \ abs\y" by (rule monofun_cfun_arg) -qed - -lemma rep_below: "(rep\x \ rep\y) = (x \ y)" - by (rule iso.abs_below [OF swap]) - -lemma abs_eq: "(abs\x = abs\y) = (x = y)" - by (simp add: po_eq_conv abs_below) - -lemma rep_eq: "(rep\x = rep\y) = (x = y)" - by (rule iso.abs_eq [OF swap]) - -lemma abs_strict: "abs\\ = \" -proof - - have "\ \ rep\\" .. - then have "abs\\ \ abs\(rep\\)" by (rule monofun_cfun_arg) - then have "abs\\ \ \" by simp - then show ?thesis by (rule UU_I) -qed - -lemma rep_strict: "rep\\ = \" - by (rule iso.abs_strict [OF swap]) - -lemma abs_defin': "abs\x = \ \ x = \" -proof - - have "x = rep\(abs\x)" by simp - also assume "abs\x = \" - also note rep_strict - finally show "x = \" . -qed - -lemma rep_defin': "rep\z = \ \ z = \" - by (rule iso.abs_defin' [OF swap]) - -lemma abs_defined: "z \ \ \ abs\z \ \" - by (erule contrapos_nn, erule abs_defin') - -lemma rep_defined: "z \ \ \ rep\z \ \" - by (rule iso.abs_defined [OF iso.swap]) (rule iso_axioms) - -lemma abs_defined_iff: "(abs\x = \) = (x = \)" - by (auto elim: abs_defin' intro: abs_strict) - -lemma rep_defined_iff: "(rep\x = \) = (x = \)" - by (rule iso.abs_defined_iff [OF iso.swap]) (rule iso_axioms) - -lemma casedist_rule: "rep\x = \ \ P \ x = \ \ P" - by (simp add: rep_defined_iff) - -lemma compact_abs_rev: "compact (abs\x) \ compact x" -proof (unfold compact_def) - assume "adm (\y. \ abs\x \ y)" - with cont_Rep_CFun2 - have "adm (\y. \ abs\x \ abs\y)" by (rule adm_subst) - then show "adm (\y. \ x \ y)" using abs_below by simp -qed - -lemma compact_rep_rev: "compact (rep\x) \ compact x" - by (rule iso.compact_abs_rev [OF iso.swap]) (rule iso_axioms) - -lemma compact_abs: "compact x \ compact (abs\x)" - by (rule compact_rep_rev) simp - -lemma compact_rep: "compact x \ compact (rep\x)" - by (rule iso.compact_abs [OF iso.swap]) (rule iso_axioms) - -lemma iso_swap: "(x = abs\y) = (rep\x = y)" -proof - assume "x = abs\y" - then have "rep\x = rep\(abs\y)" by simp - then show "rep\x = y" by simp -next - assume "rep\x = y" - then have "abs\(rep\x) = abs\y" by simp - then show "x = abs\y" by simp -qed - -end - - subsection {* Casedist *} lemma ex_one_defined_iff: @@ -214,102 +113,6 @@ ssum_map_sinl ssum_map_sinr sprod_map_spair u_map_up -subsection {* Take functions and finiteness *} - -lemma lub_ID_take_lemma: - assumes "chain t" and "(\n. t n) = ID" - assumes "\n. t n\x = t n\y" shows "x = y" -proof - - have "(\n. t n\x) = (\n. t n\y)" - using assms(3) by simp - then have "(\n. t n)\x = (\n. t n)\y" - using assms(1) by (simp add: lub_distribs) - then show "x = y" - using assms(2) by simp -qed - -lemma lub_ID_reach: - assumes "chain t" and "(\n. t n) = ID" - shows "(\n. t n\x) = x" -using assms by (simp add: lub_distribs) - -text {* - Let a ``decisive'' function be a deflation that maps every input to - either itself or bottom. Then if a domain's take functions are all - decisive, then all values in the domain are finite. -*} - -definition - decisive :: "('a::pcpo \ 'a) \ bool" -where - "decisive d \ (\x. d\x = x \ d\x = \)" - -lemma decisiveI: "(\x. d\x = x \ d\x = \) \ decisive d" - unfolding decisive_def by simp - -lemma decisive_cases: - assumes "decisive d" obtains "d\x = x" | "d\x = \" -using assms unfolding decisive_def by auto - -lemma decisive_bottom: "decisive \" - unfolding decisive_def by simp - -lemma decisive_ID: "decisive ID" - unfolding decisive_def by simp - -lemma decisive_ssum_map: - assumes f: "decisive f" - assumes g: "decisive g" - shows "decisive (ssum_map\f\g)" -apply (rule decisiveI, rename_tac s) -apply (case_tac s, simp_all) -apply (rule_tac x=x in decisive_cases [OF f], simp_all) -apply (rule_tac x=y in decisive_cases [OF g], simp_all) -done - -lemma decisive_sprod_map: - assumes f: "decisive f" - assumes g: "decisive g" - shows "decisive (sprod_map\f\g)" -apply (rule decisiveI, rename_tac s) -apply (case_tac s, simp_all) -apply (rule_tac x=x in decisive_cases [OF f], simp_all) -apply (rule_tac x=y in decisive_cases [OF g], simp_all) -done - -lemma decisive_abs_rep: - fixes abs rep - assumes iso: "iso abs rep" - assumes d: "decisive d" - shows "decisive (abs oo d oo rep)" -apply (rule decisiveI) -apply (rule_tac x="rep\x" in decisive_cases [OF d]) -apply (simp add: iso.rep_iso [OF iso]) -apply (simp add: iso.abs_strict [OF iso]) -done - -lemma lub_ID_finite: - assumes chain: "chain d" - assumes lub: "(\n. d n) = ID" - assumes decisive: "\n. decisive (d n)" - shows "\n. d n\x = x" -proof - - have 1: "chain (\n. d n\x)" using chain by simp - have 2: "(\n. d n\x) = x" using chain lub by (rule lub_ID_reach) - have "\n. d n\x = x \ d n\x = \" - using decisive unfolding decisive_def by simp - hence "range (\n. d n\x) \ {x, \}" - by auto - hence "finite (range (\n. d n\x))" - by (rule finite_subset, simp) - with 1 have "finite_chain (\n. d n\x)" - by (rule finite_range_imp_finch) - then have "\n. (\n. d n\x) = d n\x" - unfolding finite_chain_def by (auto simp add: maxinch_is_thelub) - with 2 show "\n. d n\x = x" by (auto elim: sym) -qed - - subsection {* Installing the domain package *} lemmas con_strict_rules = diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Domain_Aux.thy --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/HOLCF/Domain_Aux.thy Wed Mar 10 16:40:20 2010 +0100 @@ -0,0 +1,263 @@ +(* Title: HOLCF/Domain_Aux.thy + Author: Brian Huffman +*) + +header {* Domain package support *} + +theory Domain_Aux +imports Ssum Sprod Fixrec +uses + ("Tools/Domain/domain_take_proofs.ML") +begin + +subsection {* Continuous isomorphisms *} + +text {* A locale for continuous isomorphisms *} + +locale iso = + fixes abs :: "'a \ 'b" + fixes rep :: "'b \ 'a" + assumes abs_iso [simp]: "rep\(abs\x) = x" + assumes rep_iso [simp]: "abs\(rep\y) = y" +begin + +lemma swap: "iso rep abs" + by (rule iso.intro [OF rep_iso abs_iso]) + +lemma abs_below: "(abs\x \ abs\y) = (x \ y)" +proof + assume "abs\x \ abs\y" + then have "rep\(abs\x) \ rep\(abs\y)" by (rule monofun_cfun_arg) + then show "x \ y" by simp +next + assume "x \ y" + then show "abs\x \ abs\y" by (rule monofun_cfun_arg) +qed + +lemma rep_below: "(rep\x \ rep\y) = (x \ y)" + by (rule iso.abs_below [OF swap]) + +lemma abs_eq: "(abs\x = abs\y) = (x = y)" + by (simp add: po_eq_conv abs_below) + +lemma rep_eq: "(rep\x = rep\y) = (x = y)" + by (rule iso.abs_eq [OF swap]) + +lemma abs_strict: "abs\\ = \" +proof - + have "\ \ rep\\" .. + then have "abs\\ \ abs\(rep\\)" by (rule monofun_cfun_arg) + then have "abs\\ \ \" by simp + then show ?thesis by (rule UU_I) +qed + +lemma rep_strict: "rep\\ = \" + by (rule iso.abs_strict [OF swap]) + +lemma abs_defin': "abs\x = \ \ x = \" +proof - + have "x = rep\(abs\x)" by simp + also assume "abs\x = \" + also note rep_strict + finally show "x = \" . +qed + +lemma rep_defin': "rep\z = \ \ z = \" + by (rule iso.abs_defin' [OF swap]) + +lemma abs_defined: "z \ \ \ abs\z \ \" + by (erule contrapos_nn, erule abs_defin') + +lemma rep_defined: "z \ \ \ rep\z \ \" + by (rule iso.abs_defined [OF iso.swap]) (rule iso_axioms) + +lemma abs_defined_iff: "(abs\x = \) = (x = \)" + by (auto elim: abs_defin' intro: abs_strict) + +lemma rep_defined_iff: "(rep\x = \) = (x = \)" + by (rule iso.abs_defined_iff [OF iso.swap]) (rule iso_axioms) + +lemma casedist_rule: "rep\x = \ \ P \ x = \ \ P" + by (simp add: rep_defined_iff) + +lemma compact_abs_rev: "compact (abs\x) \ compact x" +proof (unfold compact_def) + assume "adm (\y. \ abs\x \ y)" + with cont_Rep_CFun2 + have "adm (\y. \ abs\x \ abs\y)" by (rule adm_subst) + then show "adm (\y. \ x \ y)" using abs_below by simp +qed + +lemma compact_rep_rev: "compact (rep\x) \ compact x" + by (rule iso.compact_abs_rev [OF iso.swap]) (rule iso_axioms) + +lemma compact_abs: "compact x \ compact (abs\x)" + by (rule compact_rep_rev) simp + +lemma compact_rep: "compact x \ compact (rep\x)" + by (rule iso.compact_abs [OF iso.swap]) (rule iso_axioms) + +lemma iso_swap: "(x = abs\y) = (rep\x = y)" +proof + assume "x = abs\y" + then have "rep\x = rep\(abs\y)" by simp + then show "rep\x = y" by simp +next + assume "rep\x = y" + then have "abs\(rep\x) = abs\y" by simp + then show "x = abs\y" by simp +qed + +end + + +subsection {* Proofs about take functions *} + +text {* + This section contains lemmas that are used in a module that supports + the domain isomorphism package; the module contains proofs related + to take functions and the finiteness predicate. +*} + +lemma deflation_abs_rep: + fixes abs and rep and d + assumes abs_iso: "\x. rep\(abs\x) = x" + assumes rep_iso: "\y. abs\(rep\y) = y" + shows "deflation d \ deflation (abs oo d oo rep)" +by (rule ep_pair.deflation_e_d_p) (simp add: ep_pair.intro assms) + +lemma deflation_chain_min: + assumes chain: "chain d" + assumes defl: "\n. deflation (d n)" + shows "d m\(d n\x) = d (min m n)\x" +proof (rule linorder_le_cases) + assume "m \ n" + with chain have "d m \ d n" by (rule chain_mono) + then have "d m\(d n\x) = d m\x" + by (rule deflation_below_comp1 [OF defl defl]) + moreover from `m \ n` have "min m n = m" by simp + ultimately show ?thesis by simp +next + assume "n \ m" + with chain have "d n \ d m" by (rule chain_mono) + then have "d m\(d n\x) = d n\x" + by (rule deflation_below_comp2 [OF defl defl]) + moreover from `n \ m` have "min m n = n" by simp + ultimately show ?thesis by simp +qed + +lemma lub_ID_take_lemma: + assumes "chain t" and "(\n. t n) = ID" + assumes "\n. t n\x = t n\y" shows "x = y" +proof - + have "(\n. t n\x) = (\n. t n\y)" + using assms(3) by simp + then have "(\n. t n)\x = (\n. t n)\y" + using assms(1) by (simp add: lub_distribs) + then show "x = y" + using assms(2) by simp +qed + +lemma lub_ID_reach: + assumes "chain t" and "(\n. t n) = ID" + shows "(\n. t n\x) = x" +using assms by (simp add: lub_distribs) + +lemma lub_ID_take_induct: + assumes "chain t" and "(\n. t n) = ID" + assumes "adm P" and "\n. P (t n\x)" shows "P x" +proof - + from `chain t` have "chain (\n. t n\x)" by simp + from `adm P` this `\n. P (t n\x)` have "P (\n. t n\x)" by (rule admD) + with `chain t` `(\n. t n) = ID` show "P x" by (simp add: lub_distribs) +qed + + +subsection {* Finiteness *} + +text {* + Let a ``decisive'' function be a deflation that maps every input to + either itself or bottom. Then if a domain's take functions are all + decisive, then all values in the domain are finite. +*} + +definition + decisive :: "('a::pcpo \ 'a) \ bool" +where + "decisive d \ (\x. d\x = x \ d\x = \)" + +lemma decisiveI: "(\x. d\x = x \ d\x = \) \ decisive d" + unfolding decisive_def by simp + +lemma decisive_cases: + assumes "decisive d" obtains "d\x = x" | "d\x = \" +using assms unfolding decisive_def by auto + +lemma decisive_bottom: "decisive \" + unfolding decisive_def by simp + +lemma decisive_ID: "decisive ID" + unfolding decisive_def by simp + +lemma decisive_ssum_map: + assumes f: "decisive f" + assumes g: "decisive g" + shows "decisive (ssum_map\f\g)" +apply (rule decisiveI, rename_tac s) +apply (case_tac s, simp_all) +apply (rule_tac x=x in decisive_cases [OF f], simp_all) +apply (rule_tac x=y in decisive_cases [OF g], simp_all) +done + +lemma decisive_sprod_map: + assumes f: "decisive f" + assumes g: "decisive g" + shows "decisive (sprod_map\f\g)" +apply (rule decisiveI, rename_tac s) +apply (case_tac s, simp_all) +apply (rule_tac x=x in decisive_cases [OF f], simp_all) +apply (rule_tac x=y in decisive_cases [OF g], simp_all) +done + +lemma decisive_abs_rep: + fixes abs rep + assumes iso: "iso abs rep" + assumes d: "decisive d" + shows "decisive (abs oo d oo rep)" +apply (rule decisiveI) +apply (rule_tac x="rep\x" in decisive_cases [OF d]) +apply (simp add: iso.rep_iso [OF iso]) +apply (simp add: iso.abs_strict [OF iso]) +done + +lemma lub_ID_finite: + assumes chain: "chain d" + assumes lub: "(\n. d n) = ID" + assumes decisive: "\n. decisive (d n)" + shows "\n. d n\x = x" +proof - + have 1: "chain (\n. d n\x)" using chain by simp + have 2: "(\n. d n\x) = x" using chain lub by (rule lub_ID_reach) + have "\n. d n\x = x \ d n\x = \" + using decisive unfolding decisive_def by simp + hence "range (\n. d n\x) \ {x, \}" + by auto + hence "finite (range (\n. d n\x))" + by (rule finite_subset, simp) + with 1 have "finite_chain (\n. d n\x)" + by (rule finite_range_imp_finch) + then have "\n. (\n. d n\x) = d n\x" + unfolding finite_chain_def by (auto simp add: maxinch_is_thelub) + with 2 show "\n. d n\x = x" by (auto elim: sym) +qed + +lemma lub_ID_finite_take_induct: + assumes "chain d" and "(\n. d n) = ID" and "\n. decisive (d n)" + shows "(\n. P (d n\x)) \ P x" +using lub_ID_finite [OF assms] by metis + +subsection {* ML setup *} + +use "Tools/Domain/domain_take_proofs.ML" + +end diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/IsaMakefile --- a/src/HOLCF/IsaMakefile Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/IsaMakefile Wed Mar 10 16:40:20 2010 +0100 @@ -39,6 +39,7 @@ Discrete.thy \ Deflation.thy \ Domain.thy \ + Domain_Aux.thy \ Eventual.thy \ Ffun.thy \ Fixrec.thy \ diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Representable.thy --- a/src/HOLCF/Representable.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Representable.thy Wed Mar 10 16:40:20 2010 +0100 @@ -5,51 +5,12 @@ header {* Representable Types *} theory Representable -imports Algebraic Universal Ssum Sprod One Fixrec +imports Algebraic Universal Ssum Sprod One Fixrec Domain_Aux uses ("Tools/repdef.ML") - ("Tools/Domain/domain_take_proofs.ML") ("Tools/Domain/domain_isomorphism.ML") begin -subsection {* Preliminaries: Take proofs *} - -text {* - This section contains lemmas that are used in a module that supports - the domain isomorphism package; the module contains proofs related - to take functions and the finiteness predicate. -*} - -lemma deflation_abs_rep: - fixes abs and rep and d - assumes abs_iso: "\x. rep\(abs\x) = x" - assumes rep_iso: "\y. abs\(rep\y) = y" - shows "deflation d \ deflation (abs oo d oo rep)" -by (rule ep_pair.deflation_e_d_p) (simp add: ep_pair.intro assms) - -lemma deflation_chain_min: - assumes chain: "chain d" - assumes defl: "\n. deflation (d n)" - shows "d m\(d n\x) = d (min m n)\x" -proof (rule linorder_le_cases) - assume "m \ n" - with chain have "d m \ d n" by (rule chain_mono) - then have "d m\(d n\x) = d m\x" - by (rule deflation_below_comp1 [OF defl defl]) - moreover from `m \ n` have "min m n = m" by simp - ultimately show ?thesis by simp -next - assume "n \ m" - with chain have "d n \ d m" by (rule chain_mono) - then have "d m\(d n\x) = d n\x" - by (rule deflation_below_comp2 [OF defl defl]) - moreover from `n \ m` have "min m n = n" by simp - ultimately show ?thesis by simp -qed - -use "Tools/Domain/domain_take_proofs.ML" - - subsection {* Class of representable types *} text "Overloaded embedding and projection functions between diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Tools/Domain/domain_axioms.ML --- a/src/HOLCF/Tools/Domain/domain_axioms.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Tools/Domain/domain_axioms.ML Wed Mar 10 16:40:20 2010 +0100 @@ -16,7 +16,8 @@ val add_axioms : (binding * (typ * typ)) list -> theory -> - Domain_Take_Proofs.iso_info list * theory + (Domain_Take_Proofs.iso_info list + * Domain_Take_Proofs.take_induct_info) * theory end; @@ -120,8 +121,13 @@ fold_map axiomatize_lub_take (map fst dom_eqns ~~ #take_consts take_info) thy; + (* prove additional take theorems *) + val (take_info2, thy) = + Domain_Take_Proofs.add_lub_take_theorems + (map fst dom_eqns ~~ iso_infos) take_info lub_take_thms thy; + in - (iso_infos, thy) (* TODO: also return take_info, lub_take_thms *) + ((iso_infos, take_info2), thy) end; end; (* struct *) diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Tools/Domain/domain_extender.ML --- a/src/HOLCF/Tools/Domain/domain_extender.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Tools/Domain/domain_extender.ML Wed Mar 10 16:40:20 2010 +0100 @@ -184,7 +184,7 @@ fun mk_eq_typ (_, cons) = foldr1 mk_ssumT (map mk_con_typ cons); val repTs : typ list = map mk_eq_typ eqs'; val dom_eqns : (binding * (typ * typ)) list = dbinds ~~ (dts ~~ repTs); - val (iso_infos, thy) = + val ((iso_infos, take_info), thy) = Domain_Axioms.add_axioms dom_eqns thy; val ((rewss, take_rews), theorems_thy) = @@ -192,7 +192,7 @@ |> fold_map (fn ((eq, (x,cs)), info) => Domain_Theorems.theorems (eq, eqs) (Type x, cs) info) (eqs ~~ eqs' ~~ iso_infos) - ||>> Domain_Theorems.comp_theorems (comp_dnam, eqs); + ||>> Domain_Theorems.comp_theorems (comp_dnam, eqs) take_info; in theorems_thy |> Sign.add_path (Long_Name.base_name comp_dnam) @@ -246,7 +246,7 @@ if null args then oneT else foldr1 mk_sprodT (map mk_arg_typ args); fun mk_eq_typ (_, cons) = foldr1 mk_ssumT (map mk_con_typ cons); - val (iso_infos, thy) = thy |> + val ((iso_infos, take_info), thy) = thy |> Domain_Isomorphism.domain_isomorphism (map (fn ((vs, dname, mx, _), eq) => (map fst vs, dname, mx, mk_eq_typ eq, NONE)) @@ -268,7 +268,7 @@ |> fold_map (fn ((eq, (x,cs)), info) => Domain_Theorems.theorems (eq, eqs) (Type x, cs) info) (eqs ~~ eqs' ~~ iso_infos) - ||>> Domain_Theorems.comp_theorems (comp_dnam, eqs); + ||>> Domain_Theorems.comp_theorems (comp_dnam, eqs) take_info; in theorems_thy |> Sign.add_path (Long_Name.base_name comp_dnam) diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Tools/Domain/domain_isomorphism.ML --- a/src/HOLCF/Tools/Domain/domain_isomorphism.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Tools/Domain/domain_isomorphism.ML Wed Mar 10 16:40:20 2010 +0100 @@ -7,8 +7,12 @@ signature DOMAIN_ISOMORPHISM = sig val domain_isomorphism : - (string list * binding * mixfix * typ * (binding * binding) option) list - -> theory -> Domain_Take_Proofs.iso_info list * theory + (string list * binding * mixfix * typ + * (binding * binding) option) list -> + theory -> + (Domain_Take_Proofs.iso_info list + * Domain_Take_Proofs.take_induct_info) * theory + val domain_isomorphism_cmd : (string list * binding * mixfix * string * (binding * binding) option) list -> theory -> theory @@ -265,7 +269,8 @@ (prep_typ: theory -> 'a -> (string * sort) list -> typ * (string * sort) list) (doms_raw: (string list * binding * mixfix * 'a * (binding * binding) option) list) (thy: theory) - : Domain_Take_Proofs.iso_info list * theory = + : (Domain_Take_Proofs.iso_info list + * Domain_Take_Proofs.take_induct_info) * theory = let val _ = Theory.requires thy "Representable" "domain isomorphisms"; @@ -644,8 +649,12 @@ fold_map prove_lub_take (dom_binds ~~ take_consts ~~ map_ID_thms ~~ dom_eqns) thy; + (* prove additional take theorems *) + val (take_info2, thy) = + Domain_Take_Proofs.add_lub_take_theorems + (dom_binds ~~ iso_infos) take_info lub_take_thms thy; in - (iso_infos, thy) + ((iso_infos, take_info2), thy) end; val domain_isomorphism = gen_domain_isomorphism cert_typ; diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Tools/Domain/domain_take_proofs.ML --- a/src/HOLCF/Tools/Domain/domain_take_proofs.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Tools/Domain/domain_take_proofs.ML Wed Mar 10 16:40:20 2010 +0100 @@ -16,10 +16,9 @@ abs_inverse : thm, rep_inverse : thm } - - val define_take_functions : - (binding * iso_info) list -> theory -> - { take_consts : term list, + type take_info = + { + take_consts : term list, take_defs : thm list, chain_take_thms : thm list, take_0_thms : thm list, @@ -27,7 +26,29 @@ deflation_take_thms : thm list, finite_consts : term list, finite_defs : thm list - } * theory + } + type take_induct_info = + { + take_consts : term list, + take_defs : thm list, + chain_take_thms : thm list, + take_0_thms : thm list, + take_Suc_thms : thm list, + deflation_take_thms : thm list, + finite_consts : term list, + finite_defs : thm list, + lub_take_thms : thm list, + reach_thms : thm list, + take_lemma_thms : thm list, + is_finite : bool, + take_induct_thms : thm list + } + val define_take_functions : + (binding * iso_info) list -> theory -> take_info * theory + + val add_lub_take_theorems : + (binding * iso_info) list -> take_info -> thm list -> + theory -> take_induct_info * theory val map_of_typ : theory -> (typ * term) list -> typ -> term @@ -52,6 +73,34 @@ rep_inverse : thm }; +type take_info = + { take_consts : term list, + take_defs : thm list, + chain_take_thms : thm list, + take_0_thms : thm list, + take_Suc_thms : thm list, + deflation_take_thms : thm list, + finite_consts : term list, + finite_defs : thm list + }; + +type take_induct_info = + { + take_consts : term list, + take_defs : thm list, + chain_take_thms : thm list, + take_0_thms : thm list, + take_Suc_thms : thm list, + deflation_take_thms : thm list, + finite_consts : term list, + finite_defs : thm list, + lub_take_thms : thm list, + reach_thms : thm list, + take_lemma_thms : thm list, + is_finite : bool, + take_induct_thms : thm list + }; + val beta_ss = HOL_basic_ss addsimps simp_thms @@ -168,6 +217,13 @@ ((const, def_thm), thy) end; +fun add_qualified_def name (path, eqn) thy = + thy + |> Sign.add_path path + |> yield_singleton (PureThy.add_defs false) + (Thm.no_attributes (Binding.name name, eqn)) + ||> Sign.parent_path; + fun add_qualified_thm name (path, thm) thy = thy |> Sign.add_path path @@ -239,12 +295,8 @@ Sign.declare_const ((take_bind, take_type), NoSyn) thy; val take_eqn = Logic.mk_equals (take_const, take_rhs); val (take_def_thm, thy) = - thy - |> Sign.add_path (Binding.name_of tbind) - |> yield_singleton - (PureThy.add_defs false o map Thm.no_attributes) - (Binding.name "take_def", take_eqn) - ||> Sign.parent_path; + add_qualified_def "take_def" + (Binding.name_of tbind, take_eqn) thy; in ((take_const, take_def_thm), thy) end; val ((take_consts, take_defs), thy) = thy |> fold_map define_take_const (dom_binds ~~ take_rhss ~~ dom_eqns) @@ -388,12 +440,8 @@ (lambda n (mk_eq (mk_capply (take_const $ n, x), x)))); val finite_eqn = Logic.mk_equals (finite_const, finite_rhs); val (finite_def_thm, thy) = - thy - |> Sign.add_path (Binding.name_of tbind) - |> yield_singleton - (PureThy.add_defs false o map Thm.no_attributes) - (Binding.name "finite_def", finite_eqn) - ||> Sign.parent_path; + add_qualified_def "finite_def" + (Binding.name_of tbind, finite_eqn) thy; in ((finite_const, finite_def_thm), thy) end; val ((finite_consts, finite_defs), thy) = thy |> fold_map define_finite_const (dom_binds ~~ take_consts ~~ dom_eqns) @@ -415,4 +463,174 @@ (result, thy) end; +fun prove_finite_take_induct + (spec : (binding * iso_info) list) + (take_info : take_info) + (lub_take_thms : thm list) + (thy : theory) = + let + val dom_binds = map fst spec; + val iso_infos = map snd spec; + val absTs = map #absT iso_infos; + val dnames = map Binding.name_of dom_binds; + val {take_consts, ...} = take_info; + val {chain_take_thms, take_0_thms, take_Suc_thms, ...} = take_info; + val {finite_consts, finite_defs, ...} = take_info; + + val decisive_lemma = + let + fun iso_locale info = + @{thm iso.intro} OF [#abs_inverse info, #rep_inverse info]; + val iso_locale_thms = map iso_locale iso_infos; + val decisive_abs_rep_thms = + map (fn x => @{thm decisive_abs_rep} OF [x]) iso_locale_thms; + val n = Free ("n", @{typ nat}); + fun mk_decisive t = + Const (@{const_name decisive}, fastype_of t --> boolT) $ t; + fun f take_const = mk_decisive (take_const $ n); + val goal = mk_trp (foldr1 mk_conj (map f take_consts)); + val rules0 = @{thm decisive_bottom} :: take_0_thms; + val rules1 = + take_Suc_thms @ decisive_abs_rep_thms + @ @{thms decisive_ID decisive_ssum_map decisive_sprod_map}; + val tac = EVERY [ + rtac @{thm nat.induct} 1, + simp_tac (HOL_ss addsimps rules0) 1, + asm_simp_tac (HOL_ss addsimps rules1) 1]; + in Goal.prove_global thy [] [] goal (K tac) end; + fun conjuncts 1 thm = [thm] + | conjuncts n thm = let + val thmL = thm RS @{thm conjunct1}; + val thmR = thm RS @{thm conjunct2}; + in thmL :: conjuncts (n-1) thmR end; + val decisive_thms = conjuncts (length spec) decisive_lemma; + + fun prove_finite_thm (absT, finite_const) = + let + val goal = mk_trp (finite_const $ Free ("x", absT)); + val tac = + EVERY [ + rewrite_goals_tac finite_defs, + rtac @{thm lub_ID_finite} 1, + resolve_tac chain_take_thms 1, + resolve_tac lub_take_thms 1, + resolve_tac decisive_thms 1]; + in + Goal.prove_global thy [] [] goal (K tac) + end; + val finite_thms = + map prove_finite_thm (absTs ~~ finite_consts); + + fun prove_take_induct ((ch_take, lub_take), decisive) = + Drule.export_without_context + (@{thm lub_ID_finite_take_induct} OF [ch_take, lub_take, decisive]); + val take_induct_thms = + map prove_take_induct + (chain_take_thms ~~ lub_take_thms ~~ decisive_thms); + + val thy = thy + |> fold (snd oo add_qualified_thm "finite") + (dnames ~~ finite_thms) + |> fold (snd oo add_qualified_thm "take_induct") + (dnames ~~ take_induct_thms); + in + ((finite_thms, take_induct_thms), thy) + end; + +fun add_lub_take_theorems + (spec : (binding * iso_info) list) + (take_info : take_info) + (lub_take_thms : thm list) + (thy : theory) = + let + + (* retrieve components of spec *) + val dom_binds = map fst spec; + val iso_infos = map snd spec; + val absTs = map #absT iso_infos; + val repTs = map #repT iso_infos; + val dnames = map Binding.name_of dom_binds; + val {take_consts, take_0_thms, take_Suc_thms, ...} = take_info; + val {chain_take_thms, deflation_take_thms, ...} = take_info; + + (* prove take lemmas *) + fun prove_take_lemma ((chain_take, lub_take), dname) thy = + let + val take_lemma = + Drule.export_without_context + (@{thm lub_ID_take_lemma} OF [chain_take, lub_take]); + in + add_qualified_thm "take_lemma" (dname, take_lemma) thy + end; + val (take_lemma_thms, thy) = + fold_map prove_take_lemma + (chain_take_thms ~~ lub_take_thms ~~ dnames) thy; + + (* prove reach lemmas *) + fun prove_reach_lemma ((chain_take, lub_take), dname) thy = + let + val thm = + Drule.export_without_context + (@{thm lub_ID_reach} OF [chain_take, lub_take]); + in + add_qualified_thm "reach" (dname, thm) thy + end; + val (reach_thms, thy) = + fold_map prove_reach_lemma + (chain_take_thms ~~ lub_take_thms ~~ dnames) thy; + + (* test for finiteness of domain definitions *) + local + val types = [@{type_name ssum}, @{type_name sprod}]; + fun finite d T = if T mem absTs then d else finite' d T + and finite' d (Type (c, Ts)) = + let val d' = d andalso c mem types; + in forall (finite d') Ts end + | finite' d _ = true; + in + val is_finite = forall (finite true) repTs; + end; + + val ((finite_thms, take_induct_thms), thy) = + if is_finite + then + let + val ((finites, take_inducts), thy) = + prove_finite_take_induct spec take_info lub_take_thms thy; + in + ((SOME finites, take_inducts), thy) + end + else + let + fun prove_take_induct (chain_take, lub_take) = + Drule.export_without_context + (@{thm lub_ID_take_induct} OF [chain_take, lub_take]); + val take_inducts = + map prove_take_induct (chain_take_thms ~~ lub_take_thms); + val thy = fold (snd oo add_qualified_thm "take_induct") + (dnames ~~ take_inducts) thy; + in + ((NONE, take_inducts), thy) + end; + + val result = + { + take_consts = #take_consts take_info, + take_defs = #take_defs take_info, + chain_take_thms = #chain_take_thms take_info, + take_0_thms = #take_0_thms take_info, + take_Suc_thms = #take_Suc_thms take_info, + deflation_take_thms = #deflation_take_thms take_info, + finite_consts = #finite_consts take_info, + finite_defs = #finite_defs take_info, + lub_take_thms = lub_take_thms, + reach_thms = reach_thms, + take_lemma_thms = take_lemma_thms, + is_finite = is_finite, + take_induct_thms = take_induct_thms + }; + in + (result, thy) + end; + end; diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/Tools/Domain/domain_theorems.ML --- a/src/HOLCF/Tools/Domain/domain_theorems.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/Tools/Domain/domain_theorems.ML Wed Mar 10 16:40:20 2010 +0100 @@ -15,7 +15,11 @@ -> Domain_Take_Proofs.iso_info -> theory -> thm list * theory; - val comp_theorems: bstring * Domain_Library.eq list -> theory -> thm list * theory; + val comp_theorems : + bstring * Domain_Library.eq list -> + Domain_Take_Proofs.take_induct_info -> + theory -> thm list * theory + val quiet_mode: bool Unsynchronized.ref; val trace_domain: bool Unsynchronized.ref; end; @@ -204,15 +208,14 @@ fun prove_induction (comp_dnam, eqs : eq list) - (take_lemmas : thm list) - (axs_reach : thm list) (take_rews : thm list) + (take_info : Domain_Take_Proofs.take_induct_info) (thy : theory) = let val dnames = map (fst o fst) eqs; val conss = map snd eqs; fun dc_take dn = %%:(dn^"_take"); - val x_name = idx_name dnames "x"; + val x_name = idx_name dnames "x"; val P_name = idx_name dnames "P"; val pg = pg' thy; @@ -222,15 +225,15 @@ in val axs_rep_iso = map (ga "rep_iso") dnames; val axs_abs_iso = map (ga "abs_iso") dnames; - val axs_chain_take = map (ga "chain_take") dnames; - val lub_take_thms = map (ga "lub_take") dnames; - val axs_finite_def = map (ga "finite_def") dnames; - val take_0_thms = map (ga "take_0") dnames; - val take_Suc_thms = map (ga "take_Suc") dnames; val cases = map (ga "casedist" ) dnames; val con_rews = maps (gts "con_rews" ) dnames; end; + val {take_consts, ...} = take_info; + val {take_0_thms, take_Suc_thms, chain_take_thms, ...} = take_info; + val {lub_take_thms, finite_defs, reach_thms, ...} = take_info; + val {take_induct_thms, ...} = take_info; + fun one_con p (con, args) = let val P_names = map P_name (1 upto (length dnames)); @@ -281,7 +284,7 @@ in val n__eqs = mapn (fn n => fn (_,cons) => (n,cons)) 0 eqs; val is_emptys = map warn n__eqs; - val is_finite = forall (not o lazy_rec_to [] false) n__eqs; + val is_finite = #is_finite take_info; val _ = if is_finite then message ("Proving finiteness rule for domain "^comp_dnam^" ...") else (); @@ -317,78 +320,32 @@ in tacs1 @ maps cases_tacs (conss ~~ cases) end; - in pg'' thy [] goal tacf - handle ERROR _ => (warning "Proof of finite_ind failed."; TrueI) - end; + in pg'' thy [] goal tacf end; (* ----- theorems concerning finiteness and induction ----------------------- *) val global_ctxt = ProofContext.init thy; - val _ = trace " Proving finites, ind..."; - val (finites, ind) = - ( + val _ = trace " Proving ind..."; + val ind = if is_finite then (* finite case *) let - val decisive_lemma = - let - val iso_locale_thms = - map2 (fn x => fn y => @{thm iso.intro} OF [x, y]) - axs_abs_iso axs_rep_iso; - val decisive_abs_rep_thms = - map (fn x => @{thm decisive_abs_rep} OF [x]) - iso_locale_thms; - val n = Free ("n", @{typ nat}); - fun mk_decisive t = %%: @{const_name decisive} $ t; - fun f dn = mk_decisive (dc_take dn $ n); - val goal = mk_trp (foldr1 mk_conj (map f dnames)); - val rules0 = @{thm decisive_bottom} :: take_0_thms; - val rules1 = - take_Suc_thms @ decisive_abs_rep_thms - @ @{thms decisive_ID decisive_ssum_map decisive_sprod_map}; - val tacs = [ - rtac @{thm nat.induct} 1, - simp_tac (HOL_ss addsimps rules0) 1, - asm_simp_tac (HOL_ss addsimps rules1) 1]; - in pg [] goal (K tacs) end; - fun take_enough dn = mk_ex ("n",dc_take dn $ Bound 0 ` %:"x" === %:"x"); - fun one_finite (dn, decisive_thm) = + fun concf n dn = %:(P_name n) $ %:(x_name n); + fun tacf {prems, context} = let - val goal = mk_trp (%%:(dn^"_finite") $ %:"x"); - val tacs = [ - rtac @{thm lub_ID_finite} 1, - resolve_tac axs_chain_take 1, - resolve_tac lub_take_thms 1, - rtac decisive_thm 1]; - in pg axs_finite_def goal (K tacs) end; - - val _ = trace " Proving finites"; - val finites = map one_finite (dnames ~~ atomize global_ctxt decisive_lemma); - val _ = trace " Proving ind"; - val ind = - let - fun concf n dn = %:(P_name n) $ %:(x_name n); - fun tacf {prems, context} = - let - fun finite_tacs (finite, fin_ind) = [ - rtac(rewrite_rule axs_finite_def finite RS exE)1, - etac subst 1, - rtac fin_ind 1, - ind_prems_tac prems]; - in - TRY (safe_tac HOL_cs) :: - maps finite_tacs (finites ~~ atomize global_ctxt finite_ind) - end; - in pg'' thy [] (ind_term concf) tacf end; - in (finites, ind) end (* let *) + fun finite_tacs (take_induct, fin_ind) = [ + rtac take_induct 1, + rtac fin_ind 1, + ind_prems_tac prems]; + in + TRY (safe_tac HOL_cs) :: + maps finite_tacs (take_induct_thms ~~ atomize global_ctxt finite_ind) + end; + in pg'' thy [] (ind_term concf) tacf end else (* infinite case *) let - fun one_finite n dn = - read_instantiate global_ctxt [(("P", 0), dn ^ "_finite " ^ x_name n)] excluded_middle; - val finites = mapn one_finite 1 dnames; - val goal = let fun one_adm n _ = mk_trp (mk_adm (%:(P_name n))); @@ -398,33 +355,36 @@ @{thms cont_id cont_const cont2cont_Rep_CFun cont2cont_fst cont2cont_snd}; val subgoal = - let fun p n dn = %:(P_name n) $ (dc_take dn $ Bound 0 `%(x_name n)); - in mk_trp (mk_all ("n", foldr1 mk_conj (mapn p 1 dnames))) end; - val subgoal' = legacy_infer_term thy subgoal; + let + val Ts = map (Type o fst) eqs; + val P_names = Datatype_Prop.indexify_names (map (K "P") dnames); + val x_names = Datatype_Prop.indexify_names (map (K "x") dnames); + val P_types = map (fn T => T --> HOLogic.boolT) Ts; + val Ps = map Free (P_names ~~ P_types); + val xs = map Free (x_names ~~ Ts); + val n = Free ("n", HOLogic.natT); + val goals = + map (fn ((P,t),x) => P $ HOLCF_Library.mk_capply (t $ n, x)) + (Ps ~~ take_consts ~~ xs); + in + HOLogic.mk_Trueprop + (HOLogic.mk_all ("n", HOLogic.natT, foldr1 HOLogic.mk_conj goals)) + end; fun tacf {prems, context} = let val subtac = EVERY [rtac allI 1, rtac finite_ind 1, ind_prems_tac prems]; - val subthm = Goal.prove context [] [] subgoal' (K subtac); + val subthm = Goal.prove context [] [] subgoal (K subtac); in - map (fn ax_reach => rtac (ax_reach RS subst) 1) axs_reach @ [ + map (fn ax_reach => rtac (ax_reach RS subst) 1) reach_thms @ [ cut_facts_tac (subthm :: take (length dnames) prems) 1, REPEAT (rtac @{thm conjI} 1 ORELSE EVERY [etac @{thm admD [OF _ ch2ch_Rep_CFunL]} 1, - resolve_tac axs_chain_take 1, + resolve_tac chain_take_thms 1, asm_simp_tac HOL_basic_ss 1]) ] end; - val ind = (pg'' thy [] goal tacf - handle ERROR _ => - (warning "Cannot prove infinite induction rule"; TrueI) - ); - in (finites, ind) end - ) - handle THM _ => - (warning "Induction proofs failed (THM raised)."; ([], TrueI)) - | ERROR _ => - (warning "Cannot prove induction rule"; ([], TrueI)); + in pg'' thy [] goal tacf end; val case_ns = let @@ -440,15 +400,11 @@ ((Binding.empty, [rule]), [Rule_Cases.case_names case_ns, Induct.induct_type dname]); -val induct_failed = (Thm.prop_of ind = Thm.prop_of TrueI); - in thy |> Sign.add_path comp_dnam |> snd o PureThy.add_thmss [ - ((Binding.name "finites" , finites ), []), ((Binding.name "finite_ind" , [finite_ind]), []), ((Binding.name "ind" , [ind] ), [])] - |> (if induct_failed then I - else snd o PureThy.add_thmss (map ind_rule (dnames ~~ inducts))) + |> (snd o PureThy.add_thmss (map ind_rule (dnames ~~ inducts))) |> Sign.parent_path end; (* prove_induction *) @@ -604,7 +560,10 @@ |> Sign.parent_path end; (* let *) -fun comp_theorems (comp_dnam, eqs: eq list) thy = +fun comp_theorems + (comp_dnam : string, eqs : eq list) + (take_info : Domain_Take_Proofs.take_induct_info) + (thy : theory) = let val map_tab = Domain_Take_Proofs.get_map_tab thy; @@ -629,31 +588,7 @@ (* theorems about take *) -local - fun ga s dn = PureThy.get_thm thy (dn ^ "." ^ s); - val axs_chain_take = map (ga "chain_take") dnames; - val axs_lub_take = map (ga "lub_take" ) dnames; - fun take_thms ((ax_chain_take, ax_lub_take), dname) thy = - let - val dnam = Long_Name.base_name dname; - val take_lemma = - Drule.export_without_context - (@{thm lub_ID_take_lemma} OF [ax_chain_take, ax_lub_take]); - val reach = - Drule.export_without_context - (@{thm lub_ID_reach} OF [ax_chain_take, ax_lub_take]); - val thy = - thy |> Sign.add_path dnam - |> snd o PureThy.add_thms [ - ((Binding.name "take_lemma", take_lemma), []), - ((Binding.name "reach" , reach ), [])] - |> Sign.parent_path; - in ((take_lemma, reach), thy) end; -in - val ((take_lemmas, axs_reach), thy) = - fold_map take_thms (axs_chain_take ~~ axs_lub_take ~~ dnames) thy - |>> ListPair.unzip; -end; +val take_lemmas = #take_lemma_thms take_info; val take_rews = maps (fn dn => PureThy.get_thms thy (dn ^ ".take_rews")) dnames; @@ -661,7 +596,7 @@ (* prove induction rules, unless definition is indirect recursive *) val thy = if is_indirect then thy else - prove_induction (comp_dnam, eqs) take_lemmas axs_reach take_rews thy; + prove_induction (comp_dnam, eqs) take_rews take_info thy; val thy = if is_indirect then thy else diff -r d58a4ac1ca1c -r 553906904426 src/HOLCF/ex/Domain_ex.thy --- a/src/HOLCF/ex/Domain_ex.thy Tue Mar 09 16:30:43 2010 +0100 +++ b/src/HOLCF/ex/Domain_ex.thy Wed Mar 10 16:40:20 2010 +0100 @@ -107,7 +107,7 @@ subsection {* Generated constants and theorems *} -domain 'a tree = Leaf (lazy 'a) | Node (left :: "'a tree") (lazy right :: "'a tree") +domain 'a tree = Leaf (lazy 'a) | Node (left :: "'a tree") (right :: "'a tree") lemmas tree_abs_defined_iff = iso.abs_defined_iff [OF iso.intro [OF tree.abs_iso tree.rep_iso]] @@ -174,7 +174,7 @@ text {* Rules about finiteness predicate *} term tree_finite thm tree.finite_def -thm tree.finites +thm tree.finite (* only generated for flat datatypes *) text {* Rules about bisimulation predicate *} term tree_bisim @@ -196,14 +196,6 @@ -- {* Inner syntax error at "= UU" *} *) -text {* - I don't know what is going on here. The failed proof has to do with - the finiteness predicate. -*} - -domain foo = Foo (lazy "bar") and bar = Bar - -- "Cannot prove induction rule" - text {* Declaring class constraints on the LHS is currently broken. *} (* domain ('a::cpo) box = Box (lazy 'a) diff -r d58a4ac1ca1c -r 553906904426 src/Pure/General/name_space.ML --- a/src/Pure/General/name_space.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/General/name_space.ML Wed Mar 10 16:40:20 2010 +0100 @@ -47,6 +47,7 @@ val transform_binding: naming -> binding -> binding val full_name: naming -> binding -> string val declare: bool -> naming -> binding -> T -> string * T + val alias: naming -> binding -> string -> T -> T type 'a table = T * 'a Symtab.table val define: bool -> naming -> binding * 'a -> 'a table -> string * 'a table val empty_table: string -> 'a table @@ -72,8 +73,7 @@ (* datatype entry *) type entry = - {externals: xstring list, - concealed: bool, + {concealed: bool, group: serial option, theory_name: string, pos: Position.T, @@ -96,7 +96,7 @@ Name_Space of {kind: string, internals: (string list * string list) Symtab.table, (*visible, hidden*) - entries: entry Symtab.table}; + entries: (xstring list * entry) Symtab.table}; (*externals, entry*) fun make_name_space (kind, internals, entries) = Name_Space {kind = kind, internals = internals, entries = entries}; @@ -115,8 +115,7 @@ fun the_entry (Name_Space {kind, entries, ...}) name = (case Symtab.lookup entries name of NONE => error ("Unknown " ^ kind ^ " " ^ quote name) - | SOME {concealed, group, theory_name, pos, id, ...} => - {concealed = concealed, group = group, theory_name = theory_name, pos = pos, id = id}); + | SOME (_, entry) => entry); fun is_concealed space name = #concealed (the_entry space name); @@ -134,7 +133,7 @@ fun get_accesses (Name_Space {entries, ...}) name = (case Symtab.lookup entries name of NONE => [name] - | SOME {externals, ...} => externals); + | SOME (externals, _) => externals); fun valid_accesses (Name_Space {internals, ...}) name = Symtab.fold (fn (xname, (names, _)) => @@ -212,7 +211,7 @@ then raise Symtab.SAME else (Library.merge (op =) (names1, names2), Library.merge (op =) (names1', names2')))); val entries' = (entries1, entries2) |> Symtab.join - (fn name => fn (entry1, entry2) => + (fn name => fn ((_, entry1), (_, entry2)) => if #id entry1 = #id entry2 then raise Symtab.SAME else err_dup kind' (name, entry1) (name, entry2)); in make_name_space (kind', internals', entries') end; @@ -311,13 +310,13 @@ (* declaration *) -fun new_entry strict entry = +fun new_entry strict (name, (externals, entry)) = map_name_space (fn (kind, internals, entries) => let val entries' = - (if strict then Symtab.update_new else Symtab.update) entry entries + (if strict then Symtab.update_new else Symtab.update) (name, (externals, entry)) entries handle Symtab.DUP dup => - err_dup kind (dup, the (Symtab.lookup entries dup)) entry; + err_dup kind (dup, #2 (the (Symtab.lookup entries dup))) (name, entry); in (kind, internals, entries') end); fun declare strict naming binding space = @@ -330,16 +329,35 @@ val _ = name = "" andalso err_bad binding; val entry = - {externals = accs', - concealed = concealed, + {concealed = concealed, group = group, theory_name = theory_name, pos = Position.default (Binding.pos_of binding), id = serial ()}; - val space' = space |> fold (add_name name) accs |> new_entry strict (name, entry); + val space' = space + |> fold (add_name name) accs + |> new_entry strict (name, (accs', entry)); in (name, space') end; +(* alias *) + +fun alias naming binding name space = + let + val (accs, accs') = accesses naming binding; + val space' = space + |> fold (add_name name) accs + |> map_name_space (fn (kind, internals, entries) => + let + val _ = Symtab.defined entries name orelse + error ("Undefined " ^ kind ^ " " ^ quote name); + val entries' = entries + |> Symtab.map_entry name (fn (externals, entry) => + (Library.merge (op =) (externals, accs'), entry)) + in (kind, internals, entries') end); + in space' end; + + (** name spaces coupled with symbol tables **) diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Isar/class.ML --- a/src/Pure/Isar/class.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Isar/class.ML Wed Mar 10 16:40:20 2010 +0100 @@ -338,7 +338,7 @@ val subclass = gen_subclass (K I) user_proof; fun prove_subclass tac = gen_subclass (K I) (tactic_proof tac); -val subclass_cmd = gen_subclass Sign.read_class user_proof; +val subclass_cmd = gen_subclass (ProofContext.read_class o ProofContext.init) user_proof; end; (*local*) diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Isar/class_target.ML --- a/src/Pure/Isar/class_target.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Isar/class_target.ML Wed Mar 10 16:40:20 2010 +0100 @@ -417,7 +417,8 @@ fun read_multi_arity thy (raw_tycos, raw_sorts, raw_sort) = let - val all_arities = map (fn raw_tyco => Sign.read_arity thy + val ctxt = ProofContext.init thy; + val all_arities = map (fn raw_tyco => ProofContext.read_arity ctxt (raw_tyco, raw_sorts, raw_sort)) raw_tycos; val tycos = map #1 all_arities; val (_, sorts, sort) = hd all_arities; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Isar/constdefs.ML --- a/src/Pure/Isar/constdefs.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Isar/constdefs.ML Wed Mar 10 16:40:20 2010 +0100 @@ -22,6 +22,8 @@ fun gen_constdef prep_vars prep_prop prep_att structs (raw_decl, ((raw_name, raw_atts), raw_prop)) thy = let + val _ = legacy_feature ("\"constdefs\"; prefer \"definition\" instead"); + fun err msg ts = error (cat_lines (msg :: map (Syntax.string_of_term_global thy) ts)); val thy_ctxt = ProofContext.init thy; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Isar/isar_syn.ML --- a/src/Pure/Isar/isar_syn.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Isar/isar_syn.ML Wed Mar 10 16:40:20 2010 +0100 @@ -103,16 +103,15 @@ (* types *) val _ = - OuterSyntax.command "typedecl" "type declaration" K.thy_decl - (P.type_args -- P.binding -- P.opt_mixfix >> (fn ((args, a), mx) => - Toplevel.theory (Typedecl.typedecl (a, args, mx) #> snd))); + OuterSyntax.local_theory "typedecl" "type declaration" K.thy_decl + (P.type_args -- P.binding -- P.opt_mixfix + >> (fn ((args, a), mx) => Typedecl.typedecl (a, args, mx) #> snd)); val _ = OuterSyntax.command "types" "declare type abbreviations" K.thy_decl (Scan.repeat1 (P.type_args -- P.binding -- (P.$$$ "=" |-- P.!!! (P.typ -- P.opt_mixfix'))) - >> (Toplevel.theory o Sign.add_tyabbrs o - map (fn ((args, a), (T, mx)) => (a, args, T, mx)))); + >> (Toplevel.theory o Sign.add_tyabbrs o map (fn ((args, a), (T, mx)) => (a, args, T, mx)))); val _ = OuterSyntax.command "nonterminals" "declare types treated as grammar nonterminal symbols" diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Isar/proof_context.ML --- a/src/Pure/Isar/proof_context.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Isar/proof_context.ML Wed Mar 10 16:40:20 2010 +0100 @@ -27,6 +27,8 @@ val restore_naming: Proof.context -> Proof.context -> Proof.context val full_name: Proof.context -> binding -> string val syn_of: Proof.context -> Syntax.syntax + val tsig_of: Proof.context -> Type.tsig + val default_sort: Proof.context -> indexname -> sort val consts_of: Proof.context -> Consts.T val the_const_constraint: Proof.context -> string -> typ val set_syntax_mode: Syntax.mode -> Proof.context -> Proof.context @@ -41,6 +43,9 @@ val pretty_term_abbrev: Proof.context -> term -> Pretty.T val pretty_fact_aux: Proof.context -> bool -> string * thm list -> Pretty.T val pretty_fact: Proof.context -> string * thm list -> Pretty.T + val read_class: Proof.context -> xstring -> class + val read_arity: Proof.context -> xstring * string list * string -> arity + val cert_arity: Proof.context -> arity -> arity val read_typ: Proof.context -> string -> typ val read_typ_syntax: Proof.context -> string -> typ val read_typ_abbrev: Proof.context -> string -> typ @@ -53,6 +58,7 @@ val inferred_param: string -> Proof.context -> typ * Proof.context val inferred_fixes: Proof.context -> (string * typ) list * Proof.context val read_type_name: Proof.context -> bool -> string -> typ + val read_type_name_proper: Proof.context -> bool -> string -> typ val read_const_proper: Proof.context -> bool -> string -> term val read_const: Proof.context -> bool -> string -> term val allow_dummies: Proof.context -> Proof.context @@ -122,6 +128,9 @@ Context.generic -> Context.generic val target_notation: bool -> Syntax.mode -> (term * mixfix) list -> morphism -> Context.generic -> Context.generic + val class_alias: binding -> class -> Proof.context -> Proof.context + val type_alias: binding -> string -> Proof.context -> Proof.context + val const_alias: binding -> string -> Proof.context -> Proof.context val add_const_constraint: string * typ option -> Proof.context -> Proof.context val add_abbrev: string -> binding * term -> Proof.context -> (term * term) * Proof.context val revert_abbrev: string -> string -> Proof.context -> Proof.context @@ -167,16 +176,17 @@ datatype ctxt = Ctxt of - {mode: mode, (*inner syntax mode*) - naming: Name_Space.naming, (*local naming conventions*) - syntax: Local_Syntax.T, (*local syntax*) - consts: Consts.T * Consts.T, (*local/global consts*) - facts: Facts.T, (*local facts*) + {mode: mode, (*inner syntax mode*) + naming: Name_Space.naming, (*local naming conventions*) + syntax: Local_Syntax.T, (*local syntax*) + tsigs: Type.tsig * Type.tsig, (*local/global type signature -- local name space only*) + consts: Consts.T * Consts.T, (*local/global consts -- local name space/abbrevs only*) + facts: Facts.T, (*local facts*) cases: (string * (Rule_Cases.T * bool)) list}; (*named case contexts*) -fun make_ctxt (mode, naming, syntax, consts, facts, cases) = +fun make_ctxt (mode, naming, syntax, tsigs, consts, facts, cases) = Ctxt {mode = mode, naming = naming, syntax = syntax, - consts = consts, facts = facts, cases = cases}; + tsigs = tsigs, consts = consts, facts = facts, cases = cases}; val local_naming = Name_Space.default_naming |> Name_Space.add_path "local"; @@ -185,41 +195,46 @@ type T = ctxt; fun init thy = make_ctxt (mode_default, local_naming, Local_Syntax.init thy, + (Sign.tsig_of thy, Sign.tsig_of thy), (Sign.consts_of thy, Sign.consts_of thy), Facts.empty, []); ); fun rep_context ctxt = ContextData.get ctxt |> (fn Ctxt args => args); fun map_context f = - ContextData.map (fn Ctxt {mode, naming, syntax, consts, facts, cases} => - make_ctxt (f (mode, naming, syntax, consts, facts, cases))); + ContextData.map (fn Ctxt {mode, naming, syntax, tsigs, consts, facts, cases} => + make_ctxt (f (mode, naming, syntax, tsigs, consts, facts, cases))); -fun set_mode mode = map_context (fn (_, naming, syntax, consts, facts, cases) => - (mode, naming, syntax, consts, facts, cases)); +fun set_mode mode = map_context (fn (_, naming, syntax, tsigs, consts, facts, cases) => + (mode, naming, syntax, tsigs, consts, facts, cases)); fun map_mode f = - map_context (fn (Mode {stmt, pattern, schematic, abbrev}, naming, syntax, consts, facts, cases) => - (make_mode (f (stmt, pattern, schematic, abbrev)), naming, syntax, consts, facts, cases)); + map_context (fn (Mode {stmt, pattern, schematic, abbrev}, naming, syntax, tsigs, consts, facts, cases) => + (make_mode (f (stmt, pattern, schematic, abbrev)), naming, syntax, tsigs, consts, facts, cases)); fun map_naming f = - map_context (fn (mode, naming, syntax, consts, facts, cases) => - (mode, f naming, syntax, consts, facts, cases)); + map_context (fn (mode, naming, syntax, tsigs, consts, facts, cases) => + (mode, f naming, syntax, tsigs, consts, facts, cases)); fun map_syntax f = - map_context (fn (mode, naming, syntax, consts, facts, cases) => - (mode, naming, f syntax, consts, facts, cases)); + map_context (fn (mode, naming, syntax, tsigs, consts, facts, cases) => + (mode, naming, f syntax, tsigs, consts, facts, cases)); + +fun map_tsigs f = + map_context (fn (mode, naming, syntax, tsigs, consts, facts, cases) => + (mode, naming, syntax, f tsigs, consts, facts, cases)); fun map_consts f = - map_context (fn (mode, naming, syntax, consts, facts, cases) => - (mode, naming, syntax, f consts, facts, cases)); + map_context (fn (mode, naming, syntax, tsigs, consts, facts, cases) => + (mode, naming, syntax, tsigs, f consts, facts, cases)); fun map_facts f = - map_context (fn (mode, naming, syntax, consts, facts, cases) => - (mode, naming, syntax, consts, f facts, cases)); + map_context (fn (mode, naming, syntax, tsigs, consts, facts, cases) => + (mode, naming, syntax, tsigs, consts, f facts, cases)); fun map_cases f = - map_context (fn (mode, naming, syntax, consts, facts, cases) => - (mode, naming, syntax, consts, facts, f cases)); + map_context (fn (mode, naming, syntax, tsigs, consts, facts, cases) => + (mode, naming, syntax, tsigs, consts, facts, f cases)); val get_mode = #mode o rep_context; val restore_mode = set_mode o get_mode; @@ -237,6 +252,9 @@ val set_syntax_mode = map_syntax o Local_Syntax.set_mode; val restore_syntax_mode = map_syntax o Local_Syntax.restore_mode o syntax_of; +val tsig_of = #1 o #tsigs o rep_context; +fun default_sort ctxt = the_default (Type.defaultS (tsig_of ctxt)) o Variable.def_sort ctxt; + val consts_of = #1 o #consts o rep_context; val the_const_constraint = Consts.the_constraint o consts_of; @@ -246,8 +264,13 @@ (* theory transfer *) -fun transfer_syntax thy = - map_syntax (Local_Syntax.rebuild thy) #> +fun transfer_syntax thy ctxt = ctxt |> + map_syntax (Local_Syntax.rebuild thy) |> + map_tsigs (fn tsigs as (local_tsig, global_tsig) => + let val thy_tsig = Sign.tsig_of thy in + if Type.eq_tsig (thy_tsig, global_tsig) then tsigs + else (Type.merge_tsigs (Syntax.pp ctxt) (local_tsig, thy_tsig), thy_tsig) + end) |> map_consts (fn consts as (local_consts, global_consts) => let val thy_consts = Sign.consts_of thy in if Consts.eq_consts (thy_consts, global_consts) then consts @@ -299,23 +322,49 @@ (** prepare types **) -(* read_typ *) +(* classes *) + +fun read_class ctxt text = + let + val tsig = tsig_of ctxt; + val (syms, pos) = Syntax.read_token text; + val c = Type.cert_class tsig (Type.intern_class tsig (Symbol_Pos.content syms)) + handle TYPE (msg, _, _) => error (msg ^ Position.str_of pos); + val _ = Position.report (Markup.tclass c) pos; + in c end; + + +(* type arities *) + +local + +fun prep_arity prep_tycon prep_sort ctxt (t, Ss, S) = + let val arity = (prep_tycon ctxt t, map (prep_sort ctxt) Ss, prep_sort ctxt S) + in Type.add_arity (Syntax.pp ctxt) arity (tsig_of ctxt); arity end; + +in + +val read_arity = prep_arity (Type.intern_type o tsig_of) Syntax.read_sort; +val cert_arity = prep_arity (K I) (Type.cert_sort o tsig_of); + +end; + + +(* types *) fun read_typ_mode mode ctxt s = Syntax.read_typ (Type.set_mode mode ctxt) s; -val read_typ = read_typ_mode Type.mode_default; +val read_typ = read_typ_mode Type.mode_default; val read_typ_syntax = read_typ_mode Type.mode_syntax; val read_typ_abbrev = read_typ_mode Type.mode_abbrev; -(* cert_typ *) - fun cert_typ_mode mode ctxt T = - Sign.certify_typ_mode mode (theory_of ctxt) T + Type.cert_typ_mode mode (tsig_of ctxt) T handle TYPE (msg, _, _) => error msg; -val cert_typ = cert_typ_mode Type.mode_default; +val cert_typ = cert_typ_mode Type.mode_default; val cert_typ_syntax = cert_typ_mode Type.mode_syntax; val cert_typ_abbrev = cert_typ_mode Type.mode_abbrev; @@ -424,16 +473,16 @@ fun read_type_name ctxt strict text = let - val thy = theory_of ctxt; + val tsig = tsig_of ctxt; val (c, pos) = token_content text; in if Syntax.is_tid c then (Position.report Markup.tfree pos; - TFree (c, the_default (Sign.defaultS thy) (Variable.def_sort ctxt (c, ~1)))) + TFree (c, default_sort ctxt (c, ~1))) else let - val d = Sign.intern_type thy c; - val decl = Sign.the_type_decl thy d; + val d = Type.intern_type tsig c; + val decl = Type.the_decl tsig d; val _ = Position.report (Markup.tycon d) pos; fun err () = error ("Bad type name: " ^ quote d); val args = @@ -444,6 +493,12 @@ in Type (d, replicate args dummyT) end end; +fun read_type_name_proper ctxt strict text = + (case read_type_name ctxt strict text of + T as Type _ => T + | T => error ("Not a type constructor: " ^ Syntax.string_of_typ ctxt T)); + + fun read_const_proper ctxt strict = prep_const_proper ctxt strict o token_content; fun read_const ctxt strict text = @@ -470,8 +525,6 @@ (* local abbreviations *) -val tsig_of = Sign.tsig_of o ProofContext.theory_of; - local fun certify_consts ctxt = Consts.certify (Syntax.pp ctxt) (tsig_of ctxt) @@ -553,9 +606,9 @@ (* types *) -fun get_sort thy def_sort raw_env = +fun get_sort ctxt def_sort raw_env = let - val tsig = Sign.tsig_of thy; + val tsig = tsig_of ctxt; fun eq ((xi, S), (xi', S')) = Term.eq_ix (xi, xi') andalso Type.eq_sort tsig (S, S'); @@ -571,8 +624,8 @@ | (SOME S, NONE) => S | (SOME S, SOME S') => if Type.eq_sort tsig (S, S') then S' - else error ("Sort constraint " ^ Syntax.string_of_sort_global thy S ^ - " inconsistent with default " ^ Syntax.string_of_sort_global thy S' ^ + else error ("Sort constraint " ^ Syntax.string_of_sort ctxt S ^ + " inconsistent with default " ^ Syntax.string_of_sort ctxt S' ^ " for type variable " ^ quote (Term.string_of_vname' xi))); in get end; @@ -594,12 +647,10 @@ in fun term_context ctxt = - let val thy = theory_of ctxt in - {get_sort = get_sort thy (Variable.def_sort ctxt), - map_const = fn a => ((true, #1 (Term.dest_Const (read_const_proper ctxt false a))) - handle ERROR _ => (false, Consts.intern (consts_of ctxt) a)), - map_free = intern_skolem ctxt (Variable.def_type ctxt false)} - end; + {get_sort = get_sort ctxt (Variable.def_sort ctxt), + map_const = fn a => ((true, #1 (Term.dest_Const (read_const_proper ctxt false a))) + handle ERROR _ => (false, Consts.intern (consts_of ctxt) a)), + map_free = intern_skolem ctxt (Variable.def_type ctxt false)}; fun decode_term ctxt = let val {get_sort, map_const, map_free} = term_context ctxt @@ -680,8 +731,7 @@ fun parse_typ ctxt text = let - val thy = theory_of ctxt; - val get_sort = get_sort thy (Variable.def_sort ctxt); + val get_sort = get_sort ctxt (Variable.def_sort ctxt); val (syms, pos) = Syntax.parse_token Markup.typ text; val T = Syntax.standard_parse_typ ctxt (syn_of ctxt) get_sort (syms, pos) handle ERROR msg => cat_error msg ("Failed to parse type" ^ Position.str_of pos); @@ -689,7 +739,6 @@ fun parse_term T ctxt text = let - val thy = theory_of ctxt; val {get_sort, map_const, map_free} = term_context ctxt; val (T', _) = TypeInfer.paramify_dummies T 0; @@ -700,33 +749,33 @@ handle ERROR msg => SOME msg; val t = Syntax.standard_parse_term (Syntax.pp ctxt) check get_sort map_const map_free - ctxt (Sign.is_logtype thy) (syn_of ctxt) T' (syms, pos) - handle ERROR msg => cat_error msg ("Failed to parse " ^ kind ^ Position.str_of pos); + ctxt (Type.is_logtype (tsig_of ctxt)) (syn_of ctxt) T' (syms, pos) + handle ERROR msg => cat_error msg ("Failed to parse " ^ kind ^ Position.str_of pos); in t end; fun unparse_sort ctxt = - Syntax.standard_unparse_sort {extern_class = Sign.extern_class (theory_of ctxt)} + Syntax.standard_unparse_sort {extern_class = Type.extern_class (tsig_of ctxt)} ctxt (syn_of ctxt); fun unparse_typ ctxt = let - val thy = theory_of ctxt; - val extern = {extern_class = Sign.extern_class thy, extern_type = Sign.extern_type thy}; + val tsig = tsig_of ctxt; + val extern = {extern_class = Type.extern_class tsig, extern_type = Type.extern_type tsig}; in Syntax.standard_unparse_typ extern ctxt (syn_of ctxt) end; fun unparse_term ctxt = let - val thy = theory_of ctxt; + val tsig = tsig_of ctxt; val syntax = syntax_of ctxt; val consts = consts_of ctxt; val extern = - {extern_class = Sign.extern_class thy, - extern_type = Sign.extern_type thy, + {extern_class = Type.extern_class tsig, + extern_type = Type.extern_type tsig, extern_const = Consts.extern consts}; in Syntax.standard_unparse_term (Local_Syntax.idents_of syntax) extern ctxt - (Local_Syntax.syn_of syntax) (not (PureThy.old_appl_syntax thy)) + (Local_Syntax.syn_of syntax) (not (PureThy.old_appl_syntax (theory_of ctxt))) end; in @@ -967,7 +1016,7 @@ -(** parameters **) +(** basic logical entities **) (* variables *) @@ -999,7 +1048,14 @@ end; -(* authentic constants *) +(* authentic syntax *) + +val _ = Context.>> + (Context.map_theory + (Sign.add_advanced_trfuns + (Syntax.type_ast_trs + {read_class = read_class, + read_type = fn ctxt => #1 o dest_Type o read_type_name_proper ctxt false}, [], [], []))); local @@ -1070,8 +1126,14 @@ in if Term.aconv_untyped (t, t') then SOME (t', mx) else NONE end); in Context.mapping (Sign.notation add mode args') (notation add mode args') end; +end; -end; + +(* aliases *) + +fun class_alias b c ctxt = (map_tsigs o apfst) (Type.class_alias (naming_of ctxt) b c) ctxt; +fun type_alias b c ctxt = (map_tsigs o apfst) (Type.type_alias (naming_of ctxt) b c) ctxt; +fun const_alias b c ctxt = (map_consts o apfst) (Consts.alias (naming_of ctxt) b c) ctxt; (* local constants *) diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Isar/typedecl.ML --- a/src/Pure/Isar/typedecl.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Isar/typedecl.ML Wed Mar 10 16:40:20 2010 +0100 @@ -6,26 +6,50 @@ signature TYPEDECL = sig - val typedecl: binding * string list * mixfix -> theory -> typ * theory + val typedecl: binding * string list * mixfix -> local_theory -> typ * local_theory + val typedecl_global: binding * string list * mixfix -> theory -> typ * theory end; structure Typedecl: TYPEDECL = struct -fun typedecl (b, vs, mx) thy = +fun typedecl (b, vs, mx) lthy = let - val base_sort = Object_Logic.get_base_sort thy; - val _ = has_duplicates (op =) vs andalso - error ("Duplicate parameters in type declaration " ^ quote (Binding.str_of b)); - val name = Sign.full_name thy b; + fun err msg = error (msg ^ " in type declaration " ^ quote (Binding.str_of b)); + val _ = has_duplicates (op =) vs andalso err "Duplicate parameters"; + + val name = Local_Theory.full_name lthy b; val n = length vs; - val T = Type (name, map (fn v => TFree (v, [])) vs); + val args = map (fn a => TFree (a, ProofContext.default_sort lthy (a, ~1))) vs; + val T = Type (name, args); + + val bad_args = + #2 (Term.dest_Type (Logic.type_map (singleton (Variable.polymorphic lthy)) T)) + |> filter_out Term.is_TVar; + val _ = not (null bad_args) andalso + err ("Locally fixed type arguments " ^ commas_quote (map (Syntax.string_of_typ lthy) bad_args)); + + val base_sort = Object_Logic.get_base_sort (ProofContext.theory_of lthy); in - thy - |> Sign.add_types [(b, n, mx)] - |> (case base_sort of NONE => I | SOME S => AxClass.axiomatize_arity (name, replicate n S, S)) + lthy + |> Local_Theory.theory + (Sign.add_types [(b, n, NoSyn)] #> + (case base_sort of + NONE => I + | SOME S => AxClass.axiomatize_arity (name, replicate n S, S))) + |> Local_Theory.checkpoint + |> Local_Theory.type_notation true Syntax.mode_default [(T, mx)] + |> Local_Theory.type_syntax false + (fn _ => Context.mapping (Sign.type_alias b name) (ProofContext.type_alias b name)) + |> ProofContext.type_alias b name + |> Variable.declare_typ T |> pair T end; +fun typedecl_global decl = + Theory_Target.init NONE + #> typedecl decl + #> Local_Theory.exit_result_global Morphism.typ; + end; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/ML/ml_antiquote.ML --- a/src/Pure/ML/ml_antiquote.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/ML/ml_antiquote.ML Wed Mar 10 16:40:20 2010 +0100 @@ -102,8 +102,8 @@ (* type classes *) -fun class syn = Args.theory -- Scan.lift Args.name_source >> (fn (thy, s) => - Sign.read_class thy s +fun class syn = Args.context -- Scan.lift Args.name_source >> (fn (ctxt, s) => + ProofContext.read_class ctxt s |> syn ? Syntax.mark_class |> ML_Syntax.print_string); @@ -119,8 +119,8 @@ fun type_name kind check = Args.context -- Scan.lift (OuterParse.position Args.name_source) >> (fn (ctxt, (s, pos)) => let - val Type (c, _) = ProofContext.read_type_name ctxt false s; - val decl = Sign.the_type_decl (ProofContext.theory_of ctxt) c; + val Type (c, _) = ProofContext.read_type_name_proper ctxt false s; + val decl = Type.the_decl (ProofContext.tsig_of ctxt) c; val res = (case try check (c, decl) of SOME res => res diff -r d58a4ac1ca1c -r 553906904426 src/Pure/ROOT.ML --- a/src/Pure/ROOT.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/ROOT.ML Wed Mar 10 16:40:20 2010 +0100 @@ -148,7 +148,6 @@ use "assumption.ML"; use "display.ML"; use "goal.ML"; -use "axclass.ML"; (* Isar -- Intelligible Semi-Automated Reasoning *) @@ -209,6 +208,7 @@ use "Isar/local_theory.ML"; use "Isar/overloading.ML"; use "Isar/locale.ML"; +use "axclass.ML"; use "Isar/class_target.ML"; use "Isar/theory_target.ML"; use "Isar/expression.ML"; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Syntax/syntax.ML --- a/src/Pure/Syntax/syntax.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Syntax/syntax.ML Wed Mar 10 16:40:20 2010 +0100 @@ -29,10 +29,7 @@ val mode_default: mode val mode_input: mode val merge_syntaxes: syntax -> syntax -> syntax - val empty_syntax: syntax - val basic_syntax: - {read_class: theory -> xstring -> string, - read_type: theory -> xstring -> string} -> syntax + val basic_syntax: syntax val basic_nonterms: string list val print_gram: syntax -> unit val print_trans: syntax -> unit @@ -383,9 +380,9 @@ (* basic syntax *) -fun basic_syntax read = +val basic_syntax = empty_syntax - |> update_syntax mode_default (TypeExt.type_ext read) + |> update_syntax mode_default TypeExt.type_ext |> update_syntax mode_default SynExt.pure_ext; val basic_nonterms = diff -r d58a4ac1ca1c -r 553906904426 src/Pure/Syntax/type_ext.ML --- a/src/Pure/Syntax/type_ext.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/Syntax/type_ext.ML Wed Mar 10 16:40:20 2010 +0100 @@ -15,6 +15,10 @@ val term_of_typ: bool -> typ -> term val no_brackets: unit -> bool val no_type_brackets: unit -> bool + val type_ast_trs: + {read_class: Proof.context -> string -> string, + read_type: Proof.context -> string -> string} -> + (string * (Proof.context -> Ast.ast list -> Ast.ast)) list end; signature TYPE_EXT = @@ -23,9 +27,7 @@ val term_of_sort: sort -> term val tappl_ast_tr': Ast.ast * Ast.ast list -> Ast.ast val sortT: typ - val type_ext: - {read_class: theory -> string -> string, - read_type: theory -> string -> string} -> SynExt.syn_ext + val type_ext: SynExt.syn_ext end; structure TypeExt: TYPE_EXT = @@ -240,7 +242,7 @@ local open Lexicon SynExt in -fun type_ext {read_class, read_type} = syn_ext' false (K false) +val type_ext = syn_ext' false (K false) [Mfix ("_", tidT --> typeT, "", [], max_pri), Mfix ("_", tvarT --> typeT, "", [], max_pri), Mfix ("_", idT --> typeT, "_type_name", [], max_pri), @@ -267,19 +269,18 @@ Mfix ("'(_')", typeT --> typeT, "", [0], max_pri), Mfix ("'_", typeT, "\\<^type>dummy", [], max_pri)] ["_type_prop"] - (map SynExt.mk_trfun - [("_class_name", class_name_tr o read_class o ProofContext.theory_of), - ("_classes", classes_tr o read_class o ProofContext.theory_of), - ("_type_name", type_name_tr o read_type o ProofContext.theory_of), - ("_tapp", tapp_ast_tr o read_type o ProofContext.theory_of), - ("_tappl", tappl_ast_tr o read_type o ProofContext.theory_of), - ("_bracket", K bracket_ast_tr)], - [], - [], - map SynExt.mk_trfun [("\\<^type>fun", K fun_ast_tr')]) + ([], [], [], map SynExt.mk_trfun [("\\<^type>fun", K fun_ast_tr')]) [] ([], []); +fun type_ast_trs {read_class, read_type} = + [("_class_name", class_name_tr o read_class), + ("_classes", classes_tr o read_class), + ("_type_name", type_name_tr o read_type), + ("_tapp", tapp_ast_tr o read_type), + ("_tappl", tappl_ast_tr o read_type), + ("_bracket", K bracket_ast_tr)]; + end; end; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/axclass.ML --- a/src/Pure/axclass.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/axclass.ML Wed Mar 10 16:40:20 2010 +0100 @@ -280,7 +280,7 @@ in (c1, c2) end; fun read_classrel thy raw_rel = - cert_classrel thy (pairself (Sign.read_class thy) raw_rel) + cert_classrel thy (pairself (ProofContext.read_class (ProofContext.init thy)) raw_rel) handle TYPE (msg, _, _) => error msg; @@ -387,7 +387,7 @@ fun prove_arity raw_arity tac thy = let val ctxt = ProofContext.init thy; - val arity = Sign.cert_arity thy raw_arity; + val arity = ProofContext.cert_arity ctxt raw_arity; val names = map (prefix arity_prefix) (Logic.name_arities arity); val props = Logic.mk_arities arity; val ths = Goal.prove_multi ctxt [] [] props @@ -530,7 +530,7 @@ (map (prefix classrel_prefix o Logic.name_classrel)) add_classrel; fun ax_arity prep = - axiomatize prep Logic.mk_arities + axiomatize (prep o ProofContext.init) Logic.mk_arities (map (prefix arity_prefix) o Logic.name_arities) add_arity; fun class_const c = @@ -550,11 +550,11 @@ in val axiomatize_class = ax_class Sign.certify_class cert_classrel; -val axiomatize_class_cmd = ax_class Sign.read_class read_classrel; +val axiomatize_class_cmd = ax_class (ProofContext.read_class o ProofContext.init) read_classrel; val axiomatize_classrel = ax_classrel cert_classrel; val axiomatize_classrel_cmd = ax_classrel read_classrel; -val axiomatize_arity = ax_arity Sign.cert_arity; -val axiomatize_arity_cmd = ax_arity Sign.read_arity; +val axiomatize_arity = ax_arity ProofContext.cert_arity; +val axiomatize_arity_cmd = ax_arity ProofContext.read_arity; end; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/consts.ML --- a/src/Pure/consts.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/consts.ML Wed Mar 10 16:40:20 2010 +0100 @@ -19,6 +19,7 @@ val is_monomorphic: T -> string -> bool (*exception TYPE*) val the_constraint: T -> string -> typ (*exception TYPE*) val space_of: T -> Name_Space.T + val alias: Name_Space.naming -> binding -> string -> T -> T val is_concealed: T -> string -> bool val intern: T -> xstring -> string val extern: T -> string -> xstring @@ -122,6 +123,9 @@ fun space_of (Consts {decls = (space, _), ...}) = space; +fun alias naming binding name = map_consts (fn ((space, decls), constraints, rev_abbrevs) => + ((Name_Space.alias naming binding name space, decls), constraints, rev_abbrevs)); + val is_concealed = Name_Space.is_concealed o space_of; val intern = Name_Space.intern o space_of; diff -r d58a4ac1ca1c -r 553906904426 src/Pure/sign.ML --- a/src/Pure/sign.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/sign.ML Wed Mar 10 16:40:20 2010 +0100 @@ -47,17 +47,16 @@ val class_space: theory -> Name_Space.T val type_space: theory -> Name_Space.T val const_space: theory -> Name_Space.T + val class_alias: binding -> class -> theory -> theory + val type_alias: binding -> string -> theory -> theory + val const_alias: binding -> string -> theory -> theory val intern_class: theory -> xstring -> string val extern_class: theory -> string -> xstring val intern_type: theory -> xstring -> string val extern_type: theory -> string -> xstring val intern_const: theory -> xstring -> string val extern_const: theory -> string -> xstring - val intern_sort: theory -> sort -> sort - val extern_sort: theory -> sort -> sort - val intern_typ: theory -> typ -> typ val intern_term: theory -> term -> term - val the_type_decl: theory -> string -> Type.decl val arity_number: theory -> string -> int val arity_sorts: theory -> string -> sort -> sort list val certify_class: theory -> class -> class @@ -71,9 +70,6 @@ val no_frees: Pretty.pp -> term -> term val no_vars: Pretty.pp -> term -> term val cert_def: Proof.context -> term -> (string * typ) * term - val read_class: theory -> xstring -> class - val read_arity: theory -> xstring * string list * string -> arity - val cert_arity: theory -> arity -> arity val add_defsort: string -> theory -> theory val add_defsort_i: sort -> theory -> theory val add_types: (binding * int * mixfix) list -> theory -> theory @@ -154,7 +150,7 @@ make_sign (Name_Space.default_naming, syn, tsig, consts); val empty = - make_sign (Name_Space.default_naming, Syntax.empty_syntax, Type.empty_tsig, Consts.empty); + make_sign (Name_Space.default_naming, Syntax.basic_syntax, Type.empty_tsig, Consts.empty); fun merge pp (sign1, sign2) = let @@ -236,10 +232,14 @@ (** intern / extern names **) -val class_space = #1 o #classes o Type.rep_tsig o tsig_of; -val type_space = #1 o #types o Type.rep_tsig o tsig_of; +val class_space = Type.class_space o tsig_of; +val type_space = Type.type_space o tsig_of; val const_space = Consts.space_of o consts_of; +fun class_alias b c thy = map_tsig (Type.class_alias (naming_of thy) b c) thy; +fun type_alias b c thy = map_tsig (Type.type_alias (naming_of thy) b c) thy; +fun const_alias b c thy = map_consts (Consts.alias (naming_of thy) b c) thy; + val intern_class = Name_Space.intern o class_space; val extern_class = Name_Space.extern o class_space; val intern_type = Name_Space.intern o type_space; @@ -247,9 +247,6 @@ val intern_const = Name_Space.intern o const_space; val extern_const = Name_Space.extern o const_space; -val intern_sort = map o intern_class; -val extern_sort = map o extern_class; - local fun map_typ f g (Type (c, Ts)) = Type (g c, map (map_typ f g) Ts) @@ -265,7 +262,6 @@ in -fun intern_typ thy = map_typ (intern_class thy) (intern_type thy); fun intern_term thy = map_term (intern_class thy) (intern_type thy) (intern_const thy); end; @@ -276,7 +272,6 @@ (* certify wrt. type signature *) -val the_type_decl = Type.the_decl o tsig_of; val arity_number = Type.arity_number o tsig_of; fun arity_sorts thy = Type.arity_sorts (Syntax.pp_global thy) (tsig_of thy); @@ -367,51 +362,6 @@ -(** read and certify entities **) (*exception ERROR*) - -(* classes *) - -fun read_class thy text = - let - val (syms, pos) = Syntax.read_token text; - val c = certify_class thy (intern_class thy (Symbol_Pos.content syms)) - handle TYPE (msg, _, _) => error (msg ^ Position.str_of pos); - val _ = Position.report (Markup.tclass c) pos; - in c end; - - -(* type arities *) - -fun prep_arity prep_tycon prep_sort thy (t, Ss, S) = - let val arity = (prep_tycon thy t, map (prep_sort thy) Ss, prep_sort thy S) - in Type.add_arity (Syntax.pp_global thy) arity (tsig_of thy); arity end; - -val read_arity = prep_arity intern_type Syntax.read_sort_global; -val cert_arity = prep_arity (K I) certify_sort; - - -(* type syntax entities *) - -local - -fun read_type thy text = - let - val (syms, pos) = Syntax.read_token text; - val c = intern_type thy (Symbol_Pos.content syms); - val _ = the_type_decl thy c; - val _ = Position.report (Markup.tycon c) pos; - in c end; - -in - -val _ = Context.>> - (Context.map_theory - (map_syn (K (Syntax.basic_syntax {read_class = read_class, read_type = read_type})))); - -end; - - - (** signature extension functions **) (*exception ERROR/TYPE*) (* add default sort *) diff -r d58a4ac1ca1c -r 553906904426 src/Pure/type.ML --- a/src/Pure/type.ML Tue Mar 09 16:30:43 2010 +0100 +++ b/src/Pure/type.ML Wed Mar 10 16:40:20 2010 +0100 @@ -13,12 +13,17 @@ Abbreviation of string list * typ * bool | Nonterminal type tsig + val eq_tsig: tsig * tsig -> bool val rep_tsig: tsig -> {classes: Name_Space.T * Sorts.algebra, default: sort, types: decl Name_Space.table, log_types: string list} val empty_tsig: tsig + val class_space: tsig -> Name_Space.T + val class_alias: Name_Space.naming -> binding -> string -> tsig -> tsig + val intern_class: tsig -> xstring -> string + val extern_class: tsig -> string -> xstring val defaultS: tsig -> sort val logical_types: tsig -> string list val eq_sort: tsig -> sort * sort -> bool @@ -35,6 +40,11 @@ val get_mode: Proof.context -> mode val set_mode: mode -> Proof.context -> Proof.context val restore_mode: Proof.context -> Proof.context -> Proof.context + val type_space: tsig -> Name_Space.T + val type_alias: Name_Space.naming -> binding -> string -> tsig -> tsig + val intern_type: tsig -> xstring -> string + val extern_type: tsig -> string -> xstring + val is_logtype: tsig -> string -> bool val the_decl: tsig -> string -> decl val cert_typ_mode: mode -> tsig -> typ -> typ val cert_typ: tsig -> typ -> typ @@ -103,6 +113,13 @@ types: decl Name_Space.table, (*declared types*) log_types: string list}; (*logical types sorted by number of arguments*) +fun eq_tsig + (TSig {classes = classes1, default = default1, types = types1, log_types = _}, + TSig {classes = classes2, default = default2, types = types2, log_types = _}) = + pointer_eq (classes1, classes2) andalso + default1 = default2 andalso + pointer_eq (types1, types2); + fun rep_tsig (TSig comps) = comps; fun make_tsig (classes, default, types, log_types) = @@ -124,6 +141,14 @@ (* classes and sorts *) +val class_space = #1 o #classes o rep_tsig; + +fun class_alias naming binding name = map_tsig (fn ((space, classes), default, types) => + ((Name_Space.alias naming binding name space, classes), default, types)); + +val intern_class = Name_Space.intern o class_space; +val extern_class = Name_Space.extern o class_space; + fun defaultS (TSig {default, ...}) = default; fun logical_types (TSig {log_types, ...}) = log_types; @@ -158,7 +183,18 @@ fun restore_mode ctxt = set_mode (get_mode ctxt); -(* lookup types *) +(* types *) + +val type_space = #1 o #types o rep_tsig; + +fun type_alias naming binding name = map_tsig (fn (classes, default, (space, types)) => + (classes, default, (Name_Space.alias naming binding name space, types))); + +val intern_type = Name_Space.intern o type_space; +val extern_type = Name_Space.extern o type_space; + +val is_logtype = member (op =) o logical_types; + fun undecl_type c = "Undeclared type constructor: " ^ quote c;