new tool Code_Lazy
authorAndreas Lochbihler
Sat, 12 May 2018 11:24:11 +0200
changeset 68155 8b50f29a1992
parent 68154 42d63ea39161
child 68156 7da3af31ca4d
new tool Code_Lazy
src/HOL/Codegenerator_Test/Code_Lazy_Test.thy
src/HOL/Codegenerator_Test/Code_Test_GHC.thy
src/HOL/Codegenerator_Test/Code_Test_MLton.thy
src/HOL/Codegenerator_Test/Code_Test_OCaml.thy
src/HOL/Codegenerator_Test/Code_Test_PolyML.thy
src/HOL/Codegenerator_Test/Code_Test_SMLNJ.thy
src/HOL/Codegenerator_Test/Code_Test_Scala.thy
src/HOL/Library/Code_Lazy.thy
src/HOL/Library/Library.thy
src/HOL/Library/case_converter.ML
src/HOL/Library/code_lazy.ML
src/HOL/Library/document/root.bib
src/HOL/ROOT
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Codegenerator_Test/Code_Lazy_Test.thy	Sat May 12 11:24:11 2018 +0200
@@ -0,0 +1,192 @@
+(* Author: Andreas Lochbihler, Digital Asset *)
+
+section \<open>Laziness tests\<close>
+
+theory Code_Lazy_Test imports
+  "HOL-Library.Code_Lazy"
+  "HOL-Library.Stream" 
+  "HOL-Library.Code_Test"
+  "HOL-Library.BNF_Corec"
+begin
+
+subsection \<open>Linear codatatype\<close>
+
+code_lazy_type stream
+
+value [code] "cycle ''ab''"
+value [code] "let x = cycle ''ab''; y = snth x 10 in x"
+
+datatype 'a llist = LNil ("\<^bold>[\<^bold>]") | LCons (lhd: 'a) (ltl: "'a llist") (infixr "\<^bold>#" 65)
+
+subsection \<open>Finite lazy lists\<close>
+
+code_lazy_type llist
+
+no_notation lazy_llist ("_")
+syntax "_llist" :: "args => 'a list"    ("\<^bold>[(_)\<^bold>]")
+translations
+  "\<^bold>[x, xs\<^bold>]" == "x\<^bold>#\<^bold>[xs\<^bold>]"
+  "\<^bold>[x\<^bold>]" == "x\<^bold>#\<^bold>[\<^bold>]"
+  "\<^bold>[x\<^bold>]" == "CONST lazy_llist x"
+
+definition llist :: "nat llist" where
+  "llist = \<^bold>[1, 2, 3, hd [], 4\<^bold>]"
+
+fun lnth :: "nat \<Rightarrow> 'a llist \<Rightarrow> 'a" where
+  "lnth 0 (x \<^bold># xs) = x"
+| "lnth (Suc n) (x \<^bold># xs) = lnth n xs"
+
+value [code] "llist"
+value [code] "let x = lnth 2 llist in (x, llist)"
+value [code] "llist"
+
+fun lfilter :: "('a \<Rightarrow> bool) \<Rightarrow> 'a llist \<Rightarrow> 'a llist" where
+  "lfilter P \<^bold>[\<^bold>] = \<^bold>[\<^bold>]"
+| "lfilter P (x \<^bold># xs) = (if P x then x \<^bold># lfilter P xs else lfilter P xs)"
+
+value [code] "lhd (lfilter odd llist)"
+
+definition lfilter_test :: "nat llist \<Rightarrow> _" where "lfilter_test xs = lhd (lfilter even xs)"
+  \<comment> \<open>Filtering @{term llist} for @{term even} fails because only the datatype is lazy, not the
+  filter function itself.\<close>
+ML_val \<open> (@{code lfilter_test} @{code llist}; raise Fail "Failure expected") handle Match => () \<close>
+
+subsection \<open>Records as free type\<close>
+
+record ('a, 'b) rec = 
+  rec1 :: 'a
+  rec2 :: 'b
+
+free_constructors rec_ext for rec.rec_ext
+  subgoal by(rule rec.cases_scheme)
+  subgoal by(rule rec.ext_inject)
+  done
+
+code_lazy_type rec_ext
+
+definition rec_test1 where "rec_test1 = rec1 (\<lparr>rec1 = Suc 5, rec2 = True\<rparr>\<lparr>rec1 := 0\<rparr>)"
+definition rec_test2 :: "(bool, bool) rec" where "rec_test2 = \<lparr>rec1 = hd [], rec2 = True\<rparr>"
+test_code "rec_test1 = 0" in PolyML Scala
+value [code] "rec_test2"
+
+subsection \<open>Branching codatatypes\<close>
+
+codatatype tree = L | Node tree tree (infix "\<triangle>" 900)
+
+code_lazy_type tree
+
+fun mk_tree :: "nat \<Rightarrow> tree" where
+  mk_tree_0: "mk_tree 0 = L"
+|            "mk_tree (Suc n) = (let t = mk_tree n in t \<triangle> t)"
+
+function subtree :: "bool list \<Rightarrow> tree \<Rightarrow> tree" where
+  "subtree [] t = t"
+| "subtree (True # p) (l \<triangle> r) = subtree p l"
+| "subtree (False # p) (l \<triangle> r) = subtree p r"
+| "subtree _ L = L"
+  by pat_completeness auto
+termination by lexicographic_order
+
+value [code] "mk_tree 10"
+value [code] "let t = mk_tree 10; _ = subtree [True, True, False, False] t in t"
+
+lemma mk_tree_Suc: "mk_tree (Suc n) = mk_tree n \<triangle> mk_tree n"
+  by(simp add: Let_def)
+lemmas [code] = mk_tree_0 mk_tree_Suc
+value [code] "let t = mk_tree 10; _ = subtree [True, True, False, False] t in t"
+value [code] "let t = mk_tree 4; _ = subtree [True, True, False, False] t in t"
+
+subsection \<open>Corecursion\<close>
+
+corec (friend) plus :: "'a :: plus stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream" where
+  "plus xs ys = (shd xs + shd ys) ## plus (stl xs) (stl ys)"
+
+corec (friend) times :: "'a :: {plus, times} stream \<Rightarrow> 'a stream \<Rightarrow> 'a stream" where
+  "times xs ys = (shd xs * shd ys) ## plus (times (stl xs) ys) (times xs (stl ys))"
+
+subsection \<open>Pattern-matching tests\<close>
+
+definition f1 :: "bool \<Rightarrow> bool \<Rightarrow> bool \<Rightarrow> nat llist \<Rightarrow> unit" where
+  "f1 _ _ _ _ = ()"
+
+declare [[code drop: f1]]
+lemma f1_code1 [code]: 
+  "f1 b c d    ns     = Code.abort (STR ''4'') (\<lambda>_. ())" 
+  "f1 b c True \<^bold>[n, m\<^bold>] = Code.abort (STR ''3'') (\<lambda>_. ())" 
+  "f1 b True d \<^bold>[n\<^bold>]    = Code.abort (STR ''2'') (\<lambda>_. ())" 
+  "f1 True c d \<^bold>[\<^bold>]     = ()"
+  by(simp_all add: f1_def)
+
+value [code] "f1 True False False \<^bold>[\<^bold>]"
+deactivate_lazy_type llist
+value [code] "f1 True False False \<^bold>[\<^bold>]"
+declare f1_code1(1) [code del]
+value [code] "f1 True False False \<^bold>[\<^bold>]"
+activate_lazy_type llist
+value [code] "f1 True False False \<^bold>[\<^bold>]"
+
+declare [[code drop: f1]]
+lemma f1_code2 [code]: 
+  "f1 b c d    ns     = Code.abort (STR ''4'') (\<lambda>_. ())" 
+  "f1 b c True \<^bold>[n, m\<^bold>] = Code.abort (STR ''3'') (\<lambda>_. ())" 
+  "f1 b True d \<^bold>[n\<^bold>]    = ()"
+  "f1 True c d \<^bold>[\<^bold>]     = Code.abort (STR ''1'') (\<lambda>_. ())"
+  by(simp_all add: f1_def)
+
+value [code] "f1 True True True \<^bold>[0\<^bold>]"
+declare f1_code2(1)[code del]
+value [code] "f1 True True True \<^bold>[0\<^bold>]"
+
+declare [[code drop: f1]]
+lemma f1_code3 [code]:
+  "f1 b c d    ns     = Code.abort (STR ''4'') (\<lambda>_. ())"
+  "f1 b c True \<^bold>[n, m\<^bold>] = ()" 
+  "f1 b True d \<^bold>[n\<^bold>]    = Code.abort (STR ''2'') (\<lambda>_. ())"
+  "f1 True c d \<^bold>[\<^bold>]     = Code.abort (STR ''1'') (\<lambda>_. ())"
+  by(simp_all add: f1_def)
+
+value [code] "f1 True True True \<^bold>[0, 1\<^bold>]"
+declare f1_code3(1)[code del]
+value [code] "f1 True True True \<^bold>[0, 1\<^bold>]"
+
+declare [[code drop: f1]]
+lemma f1_code4 [code]:
+  "f1 b c d    ns     = ()" 
+  "f1 b c True \<^bold>[n, m\<^bold>] = Code.abort (STR ''3'') (\<lambda>_. ())"
+  "f1 b True d \<^bold>[n\<^bold>]    = Code.abort (STR ''2'') (\<lambda>_. ())" 
+  "f1 True c d \<^bold>[\<^bold>]     = Code.abort (STR ''1'') (\<lambda>_. ())"
+  by(simp_all add: f1_def)
+
+value [code] "f1 True True True \<^bold>[0, 1, 2\<^bold>]"
+value [code] "f1 True True False \<^bold>[0, 1\<^bold>]"
+value [code] "f1 True False True \<^bold>[0\<^bold>]"
+value [code] "f1 False True True \<^bold>[\<^bold>]"
+
+definition f2 :: "nat llist llist list \<Rightarrow> unit" where "f2 _ = ()"
+
+declare [[code drop: f2]]
+lemma f2_code1 [code]:
+  "f2 xs = Code.abort (STR ''a'') (\<lambda>_. ())"
+  "f2 [\<^bold>[\<^bold>[\<^bold>]\<^bold>]] = ()"
+  "f2 [\<^bold>[\<^bold>[Suc n\<^bold>]\<^bold>]] = ()"
+  "f2 [\<^bold>[\<^bold>[0, Suc n\<^bold>]\<^bold>]] = ()"
+  by(simp_all add: f2_def)
+
+value [code] "f2 [\<^bold>[\<^bold>[\<^bold>]\<^bold>]]"
+value [code] "f2 [\<^bold>[\<^bold>[4\<^bold>]\<^bold>]]"
+value [code] "f2 [\<^bold>[\<^bold>[0, 1\<^bold>]\<^bold>]]"
+ML_val \<open> (@{code f2} []; error "Fail expected") handle Fail _ => () \<close>
+
+definition f3 :: "nat set llist \<Rightarrow> unit" where "f3 _ = ()"
+
+declare [[code drop: f3]]
+lemma f3_code1 [code]:
+  "f3 \<^bold>[\<^bold>] = ()"
+  "f3 \<^bold>[A\<^bold>] = ()"
+  by(simp_all add: f3_def)
+
+value [code] "f3 \<^bold>[\<^bold>]"
+value [code] "f3 \<^bold>[{}\<^bold>]"
+value [code] "f3 \<^bold>[UNIV\<^bold>]"
+
+end
\ No newline at end of file
--- a/src/HOL/Codegenerator_Test/Code_Test_GHC.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Codegenerator_Test/Code_Test_GHC.thy	Sat May 12 11:24:11 2018 +0200
@@ -4,7 +4,7 @@
 Test case for test_code on GHC
 *)
 
-theory Code_Test_GHC imports "HOL-Library.Code_Test" begin
+theory Code_Test_GHC imports "HOL-Library.Code_Test" Code_Lazy_Test begin
 
 test_code "(14 + 7 * -12 :: integer) = 140 div -2" in GHC
 
@@ -20,4 +20,6 @@
   "gcd 0 0 = (0 :: integer)"
 in GHC
 
+test_code "stake 10 (cycle ''ab'') = ''ababababab''" in GHC
+
 end
--- a/src/HOL/Codegenerator_Test/Code_Test_MLton.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Codegenerator_Test/Code_Test_MLton.thy	Sat May 12 11:24:11 2018 +0200
@@ -4,10 +4,12 @@
 Test case for test_code on MLton
 *)
 
-theory Code_Test_MLton imports  "HOL-Library.Code_Test" begin
+theory Code_Test_MLton imports  "HOL-Library.Code_Test" Code_Lazy_Test begin
 
 test_code "14 + 7 * -12 = (140 div -2 :: integer)" in MLton
 
 value [MLton] "14 + 7 * -12 :: integer"
 
+test_code "stake 10 (cycle ''ab'') = ''ababababab''" in MLton
+
 end
--- a/src/HOL/Codegenerator_Test/Code_Test_OCaml.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Codegenerator_Test/Code_Test_OCaml.thy	Sat May 12 11:24:11 2018 +0200
@@ -4,7 +4,7 @@
 Test case for test_code on OCaml
 *)
 
-theory Code_Test_OCaml imports  "HOL-Library.Code_Test" begin
+theory Code_Test_OCaml imports  "HOL-Library.Code_Test" Code_Lazy_Test begin
 
 test_code "14 + 7 * -12 = (140 div -2 :: integer)" in OCaml
 
@@ -20,4 +20,6 @@
   "gcd 0 0 = (0 :: integer)"
 in OCaml
 
+test_code "stake 10 (cycle ''ab'') = ''ababababab''" in OCaml
+
 end
--- a/src/HOL/Codegenerator_Test/Code_Test_PolyML.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Codegenerator_Test/Code_Test_PolyML.thy	Sat May 12 11:24:11 2018 +0200
@@ -4,10 +4,12 @@
 Test case for test_code on PolyML
 *)
 
-theory Code_Test_PolyML imports  "HOL-Library.Code_Test" begin
+theory Code_Test_PolyML imports  "HOL-Library.Code_Test" Code_Lazy_Test begin
 
 test_code "14 + 7 * -12 = (140 div -2 :: integer)" in PolyML
 
 value [PolyML] "14 + 7 * -12 :: integer"
 
+test_code "stake 10 (cycle ''ab'') = ''ababababab''" in PolyML
+
 end
--- a/src/HOL/Codegenerator_Test/Code_Test_SMLNJ.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Codegenerator_Test/Code_Test_SMLNJ.thy	Sat May 12 11:24:11 2018 +0200
@@ -4,10 +4,12 @@
 Test case for test_code on SMLNJ
 *)
 
-theory Code_Test_SMLNJ imports  "HOL-Library.Code_Test" begin
+theory Code_Test_SMLNJ imports  "HOL-Library.Code_Test" Code_Lazy_Test begin
 
 test_code "14 + 7 * -12 = (140 div -2 :: integer)" in SMLNJ
 
 value [SMLNJ] "14 + 7 * -12 :: integer"
 
+test_code "stake 10 (cycle ''ab'') = ''ababababab''" in SMLNJ
+
 end
--- a/src/HOL/Codegenerator_Test/Code_Test_Scala.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Codegenerator_Test/Code_Test_Scala.thy	Sat May 12 11:24:11 2018 +0200
@@ -4,7 +4,7 @@
 Test case for test_code on Scala
 *)
 
-theory Code_Test_Scala imports  "HOL-Library.Code_Test" begin
+theory Code_Test_Scala imports  "HOL-Library.Code_Test" Code_Lazy_Test begin
 
 test_code "14 + 7 * -12 = (140 div -2 :: integer)" in Scala
 
@@ -20,4 +20,6 @@
   "gcd 0 0 = (0 :: integer)"
 in Scala
 
+test_code "stake 10 (cycle ''ab'') = ''ababababab''" in Scala
+
 end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/Code_Lazy.thy	Sat May 12 11:24:11 2018 +0200
@@ -0,0 +1,251 @@
+(* Author: Pascal Stoop, ETH Zurich
+   Author: Andreas Lochbihler, Digital Asset *)
+
+section \<open>Lazy types in generated code\<close>
+
+theory Code_Lazy
+imports Main
+keywords
+  "code_lazy_type"
+  "activate_lazy_type"
+  "deactivate_lazy_type"
+  "activate_lazy_types"
+  "deactivate_lazy_types"
+  "print_lazy_types" :: thy_decl
+begin
+
+text \<open>
+  This theory and the CodeLazy tool described in @{cite "LochbihlerStoop2018"}.
+
+  It hooks into Isabelle’s code generator such that the generated code evaluates a user-specified
+  set of type constructors lazily, even in target languages with eager evaluation.
+  The lazy type must be algebraic, i.e., values must be built from constructors and a
+  corresponding case operator decomposes them. Every datatype and codatatype is algebraic
+  and thus eligible for lazification.
+\<close>
+
+subsection \<open>Eliminating pattern matches\<close>
+
+definition missing_pattern_match :: "String.literal \<Rightarrow> (unit \<Rightarrow> 'a) \<Rightarrow> 'a" where
+  [code del]: "missing_pattern_match m f = f ()"
+
+lemma missing_pattern_match_cong [cong]:
+  "m = m' \<Longrightarrow> missing_pattern_match m f = missing_pattern_match m' f"
+  by(rule arg_cong)
+
+lemma missing_pattern_match_code [code_unfold]:
+  "missing_pattern_match = Code.abort"
+  unfolding missing_pattern_match_def Code.abort_def ..
+
+ML_file "case_converter.ML"
+
+subsection \<open>The type @{text lazy}\<close>
+
+typedef 'a lazy = "UNIV :: 'a set" ..
+setup_lifting type_definition_lazy
+lift_definition delay :: "(unit \<Rightarrow> 'a) \<Rightarrow> 'a lazy"  is "\<lambda>f. f ()" .
+lift_definition force :: "'a lazy \<Rightarrow> 'a" is "\<lambda>x. x" .
+
+code_datatype delay
+lemma force_delay [code]: "force (delay f) = f ()" by transfer(rule refl)
+lemma delay_force: "delay (\<lambda>_. force s) = s" by transfer(rule refl)
+
+text \<open>The implementations of @{typ "_ lazy"} using language primitives cache forced values.\<close>
+
+code_printing code_module Lazy \<rightharpoonup> (SML)
+\<open>signature LAZY =
+sig
+  type 'a lazy;
+  val lazy : (unit -> 'a) -> 'a lazy;
+  val force : 'a lazy -> 'a;
+  val peek : 'a lazy -> 'a option
+  val termify_lazy : 
+   (string -> 'typerep -> 'term) -> 
+   ('term -> 'term -> 'term) -> 
+   (string -> 'typerep -> 'term -> 'term) ->
+   'typerep -> ('typerep -> 'typerep -> 'typerep) -> ('typerep -> 'typerep) ->
+   ('a -> 'term) -> 'typerep -> 'a lazy -> 'term -> 'term;
+end;
+
+structure Lazy : LAZY = 
+struct
+
+datatype 'a content =
+   Delay of unit -> 'a
+ | Value of 'a 
+ | Exn of exn;
+
+datatype 'a lazy = Lazy of 'a content ref;
+
+fun lazy f = Lazy (ref (Delay f));
+
+fun force (Lazy x) = case !x of
+   Delay f => (
+     let val res = f (); val _ = x := Value res; in res end
+     handle exn => (x := Exn exn; raise exn))
+  | Value x => x
+  | Exn exn => raise exn;
+
+fun peek (Lazy x) = case !x of
+    Value x => SOME x
+  | _ => NONE;
+
+fun termify_lazy const app abs unitT funT lazyT term_of T x _ =
+  app (const "Code_Lazy.delay" (funT (funT unitT T) (lazyT T))) 
+    (case peek x of SOME y => abs "_" unitT (term_of y)
+     | _ => const "Pure.dummy_pattern" (funT unitT T));
+
+end;\<close>
+code_reserved SML Lazy
+
+code_printing
+  type_constructor lazy \<rightharpoonup> (SML) "_ Lazy.lazy"
+| constant delay \<rightharpoonup> (SML) "Lazy.lazy"
+| constant force \<rightharpoonup> (SML) "Lazy.force"
+
+code_printing \<comment> \<open>For code generation within the Isabelle environment, we reuse the thread-safe
+  implementation of lazy from @{file "~~/src/Pure/Concurrent/lazy.ML"}\<close>
+  code_module Lazy \<rightharpoonup> (Eval) \<open>\<close>
+| type_constructor lazy \<rightharpoonup> (Eval) "_ Lazy.lazy"
+| constant delay \<rightharpoonup> (Eval) "Lazy.lazy"
+| constant force \<rightharpoonup> (Eval) "Lazy.force"
+
+code_printing
+  code_module Lazy \<rightharpoonup> (Haskell) 
+\<open>newtype Lazy a = Lazy a;
+delay f = Lazy (f ());
+force (Lazy x) = x;\<close>
+| type_constructor lazy \<rightharpoonup> (Haskell) "Lazy.Lazy _"
+| constant delay \<rightharpoonup> (Haskell) "Lazy.delay"
+| constant force \<rightharpoonup> (Haskell) "Lazy.force"
+code_reserved Haskell Lazy
+
+code_printing
+  code_module Lazy \<rightharpoonup> (Scala) 
+\<open>object Lazy {
+  final class Lazy[A] (f: Unit => A) {
+    var evaluated = false;
+    lazy val x: A = f ()
+
+    def get() : A = {
+      evaluated = true;
+      return x
+    }
+  }
+
+  def force[A] (x: Lazy[A]) : A = {
+    return x.get()
+  }
+
+  def delay[A] (f: Unit => A) : Lazy[A] = {
+    return new Lazy[A] (f)
+  }
+
+  def termify_lazy[Typerep, Term, A] (
+    const: String => Typerep => Term,
+    app: Term => Term => Term,
+    abs: String => Typerep => Term => Term,
+    unitT: Typerep,
+    funT: Typerep => Typerep => Typerep,
+    lazyT: Typerep => Typerep,
+    term_of: A => Term,
+    ty: Typerep,
+    x: Lazy[A],
+    dummy: Term) : Term = {
+    if (x.evaluated)
+      app(const("Code_Lazy.delay")(funT(funT(unitT)(ty))(lazyT(ty))))(abs("_")(unitT)(term_of(x.get)))
+    else
+      app(const("Code_Lazy.delay")(funT(funT(unitT)(ty))(lazyT(ty))))(const("Pure.dummy_pattern")(funT(unitT)(ty)))
+  }
+}\<close>
+| type_constructor lazy \<rightharpoonup> (Scala) "Lazy.Lazy[_]"
+| constant delay \<rightharpoonup> (Scala) "Lazy.delay"
+| constant force \<rightharpoonup> (Scala) "Lazy.force"
+code_reserved Scala Lazy termify_lazy
+
+code_printing
+  type_constructor lazy \<rightharpoonup> (OCaml) "_ Lazy.t"
+| constant delay \<rightharpoonup> (OCaml) "Lazy.from'_fun"
+| constant force \<rightharpoonup> (OCaml) "Lazy.force"
+code_reserved OCaml Lazy
+
+text \<open>
+  Term reconstruction for lazy looks into the lazy value and reconstructs it to the depth it has been evaluated.
+  This is not done for Haskell and Scala as we do not know of any portable way to inspect whether a lazy value
+  has been evaluated to or not.
+\<close>
+code_printing code_module Termify_Lazy \<rightharpoonup> (Eval) 
+\<open>structure Termify_Lazy = struct
+fun termify_lazy 
+  (_: string -> typ -> term) (_: term -> term -> term)  (_: string -> typ -> term -> term)
+  (_: typ) (_: typ -> typ -> typ) (_: typ -> typ)
+  (term_of: 'a -> term) (T: typ) (x: 'a Lazy.lazy) (_: term) =
+  Const ("Code_Lazy.delay", (HOLogic.unitT --> T) --> Type ("Code_Lazy.lazy", [T])) $ 
+  (case Lazy.peek x of
+    SOME (Exn.Res x) => absdummy HOLogic.unitT (term_of x)
+  | _ => Const ("Pure.dummy_pattern", HOLogic.unitT --> T));
+end;\<close>
+code_reserved Eval Termify_Lazy TERMIFY_LAZY termify_lazy
+
+code_printing code_module Termify_Lazy \<rightharpoonup> (OCaml) 
+\<open>module Termify_Lazy : sig
+  val termify_lazy :
+   (string -> 'typerep -> 'term) -> 
+   ('term -> 'term -> 'term) -> 
+   (string -> 'typerep -> 'term -> 'term) ->
+   'typerep -> ('typerep -> 'typerep -> 'typerep) -> ('typerep -> 'typerep) ->
+   ('a -> 'term) -> 'typerep -> 'a Lazy.t -> 'term -> 'term
+end = struct
+
+let termify_lazy const app abs unitT funT lazyT term_of ty x _ =
+  app (const "Code_Lazy.delay" (funT (funT unitT ty) (lazyT ty))) 
+    (if Lazy.is_val x then abs "_" unitT (term_of (Lazy.force x))
+     else const "Pure.dummy_pattern" (funT unitT ty));;
+
+end;;\<close>
+code_reserved OCaml Termify_Lazy termify_lazy
+
+definition termify_lazy2 :: "'a :: typerep lazy \<Rightarrow> term"
+where "termify_lazy2 x =
+  Code_Evaluation.App (Code_Evaluation.Const (STR ''Code_Lazy.delay'') (TYPEREP((unit \<Rightarrow> 'a) \<Rightarrow> 'a lazy)))
+    (Code_Evaluation.Const (STR ''Pure.dummy_pattern'') (TYPEREP((unit \<Rightarrow> 'a))))"
+
+definition termify_lazy :: 
+  "(String.literal \<Rightarrow> 'typerep \<Rightarrow> 'term) \<Rightarrow> 
+   ('term \<Rightarrow> 'term \<Rightarrow> 'term) \<Rightarrow> 
+   (String.literal \<Rightarrow> 'typerep \<Rightarrow> 'term \<Rightarrow> 'term) \<Rightarrow>
+   'typerep \<Rightarrow> ('typerep \<Rightarrow> 'typerep \<Rightarrow> 'typerep) \<Rightarrow> ('typerep \<Rightarrow> 'typerep) \<Rightarrow>
+   ('a \<Rightarrow> 'term) \<Rightarrow> 'typerep \<Rightarrow> 'a :: typerep lazy \<Rightarrow> 'term \<Rightarrow> term"
+where "termify_lazy _ _ _ _ _ _ _ _ x _ = termify_lazy2 x"
+
+declare [[code drop: "Code_Evaluation.term_of :: _ lazy \<Rightarrow> _"]]
+
+lemma term_of_lazy_code [code]:
+  "Code_Evaluation.term_of x \<equiv> 
+   termify_lazy 
+     Code_Evaluation.Const Code_Evaluation.App Code_Evaluation.Abs 
+     TYPEREP(unit) (\<lambda>T U. typerep.Typerep (STR ''fun'') [T, U]) (\<lambda>T. typerep.Typerep (STR ''Code_Lazy.lazy'') [T])
+     Code_Evaluation.term_of TYPEREP('a) x (Code_Evaluation.Const (STR '''') (TYPEREP(unit)))"
+  for x :: "'a :: {typerep, term_of} lazy"
+by(rule term_of_anything)
+
+code_printing constant termify_lazy
+  \<rightharpoonup> (SML) "Lazy.termify'_lazy" 
+  and (Eval) "Termify'_Lazy.termify'_lazy"
+  and (OCaml) "Termify'_Lazy.termify'_lazy"
+  and (Scala) "Lazy.termify'_lazy"
+  
+text \<open>Make evaluation with the simplifier respect @{term delay}s.\<close>
+
+lemma delay_lazy_cong: "delay f = delay f" by simp
+setup \<open>Code_Simp.map_ss (Simplifier.add_cong @{thm delay_lazy_cong})\<close>        
+
+subsection \<open>Implementation\<close>
+
+ML_file "code_lazy.ML"
+
+setup \<open>
+  Code_Preproc.add_functrans ("lazy_datatype", Code_Lazy.transform_code_eqs);
+\<close>
+
+end
--- a/src/HOL/Library/Library.thy	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Library/Library.thy	Sat May 12 11:24:11 2018 +0200
@@ -10,6 +10,7 @@
   Boolean_Algebra
   Bourbaki_Witt_Fixpoint
   Char_ord
+  Code_Lazy
   Code_Test
   Combine_PER
   Complete_Partial_Order2
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/case_converter.ML	Sat May 12 11:24:11 2018 +0200
@@ -0,0 +1,549 @@
+(* Author: Pascal Stoop, ETH Zurich
+   Author: Andreas Lochbihler, Digital Asset *)
+
+signature CASE_CONVERTER =
+sig
+  val to_case: Proof.context -> (string * string -> bool) -> (string * typ -> int) ->
+    thm list -> thm list option
+end;
+
+structure Case_Converter : CASE_CONVERTER =
+struct
+
+fun lookup_remove _ _ [] = (NONE, [])
+  | lookup_remove eq k ((k', v) :: kvs) =
+    if eq (k, k') then (SOME (k', v), kvs)
+    else apsnd (cons (k', v)) (lookup_remove eq k kvs)
+
+fun map_option _ NONE = NONE
+  | map_option f (SOME x) = SOME (f x)
+
+fun mk_abort msg t =
+  let 
+    val T = fastype_of t
+    val abort = Const (@{const_name missing_pattern_match}, HOLogic.literalT --> (HOLogic.unitT --> T) --> T)
+  in
+    abort $ HOLogic.mk_literal msg $ absdummy HOLogic.unitT t
+  end
+
+(* fold_term : (string * typ -> 'a) ->
+               (string * typ -> 'a) ->
+               (indexname * typ -> 'a) ->
+               (int -> 'a) ->
+               (string * typ * 'a -> 'a) ->
+               ('a * 'a -> 'a) ->
+               term ->
+               'a *)
+fun fold_term const_fun free_fun var_fun bound_fun abs_fun dollar_fun term =
+  let
+    fun go x = case x of
+      Const (s, T) => const_fun (s, T)
+      | Free (s, T) => free_fun (s, T)
+      | Var (i, T) => var_fun (i, T)
+      | Bound n => bound_fun n
+      | Abs (s, T, term) => abs_fun (s, T, go term)
+      | term1 $ term2 => dollar_fun (go term1, go term2)
+  in
+    go term
+  end;
+
+datatype term_coordinate = End of typ
+  | Coordinate of (string * (int * term_coordinate)) list;
+
+fun term_coordinate_merge (End T) _ = End T
+  | term_coordinate_merge _ (End T) = End T
+  | term_coordinate_merge (Coordinate xs) (Coordinate ys) =
+  let
+    fun merge_consts xs [] = xs
+      | merge_consts xs ((s1, (n, y)) :: ys) = 
+        case List.partition (fn (s2, (m, _)) => s1 = s2 andalso n = m) xs of
+            ([], xs') => (s1, (n, y)) :: (merge_consts xs' ys)
+          | ((_, (_, x)) :: _, xs') => (s1, (n, term_coordinate_merge x y)) :: (merge_consts xs' ys)
+  in
+    Coordinate (merge_consts xs ys)
+  end;
+
+fun term_to_coordinates P term = 
+  let
+    val (ctr, args) = strip_comb term
+  in
+    case ctr of Const (s, T) =>
+      if P (body_type T |> dest_Type |> fst, s)
+      then SOME (End (body_type T))
+      else
+        let
+          fun f (i, t) = term_to_coordinates P t |> map_option (pair i)
+          val tcos = map_filter I (map_index f args)
+        in
+          if null tcos then NONE
+          else SOME (Coordinate (map (pair s) tcos))
+        end
+    | _ => NONE
+  end;
+
+fun coordinates_to_list (End x) = [(x, [])]
+  | coordinates_to_list (Coordinate xs) = 
+  let
+    fun f (s, (n, xss)) = map (fn (T, xs) => (T, (s, n) :: xs)) (coordinates_to_list xss)
+  in flat (map f xs) end;
+
+
+(* AL: TODO: change from term to const_name *)
+fun find_ctr ctr1 xs =
+  let
+    val const_name = fst o dest_Const
+    fun const_equal (ctr1, ctr2) = const_name ctr1 = const_name ctr2
+  in
+    lookup_remove const_equal ctr1 xs
+  end;
+
+datatype pattern 
+  = Wildcard
+  | Value
+  | Split of int * (term * pattern) list * pattern;
+
+fun pattern_merge Wildcard pat' = pat'
+  | pattern_merge Value _ = Value
+  | pattern_merge (Split (n, xs, pat)) Wildcard =
+    Split (n, map (apsnd (fn pat'' => pattern_merge pat'' Wildcard)) xs, pattern_merge pat Wildcard)
+  | pattern_merge (Split _) Value = Value
+  | pattern_merge (Split (n, xs, pat)) (Split (m, ys, pat'')) =
+    let 
+      fun merge_consts xs [] = map (apsnd (fn pat => pattern_merge pat Wildcard)) xs
+        | merge_consts xs ((ctr, y) :: ys) =
+          (case find_ctr ctr xs of
+              (SOME (ctr, x), xs) => (ctr, pattern_merge x y) :: merge_consts xs ys
+            | (NONE, xs) => (ctr, y) :: merge_consts xs ys
+          )
+     in
+       Split (if n <= 0 then m else n, merge_consts xs ys, pattern_merge pat pat'')
+     end
+     
+fun pattern_intersect Wildcard _ = Wildcard
+  | pattern_intersect Value pat2 = pat2
+  | pattern_intersect (Split _) Wildcard = Wildcard
+  | pattern_intersect (Split (n, xs', pat1)) Value =
+    Split (n,
+      map (apsnd (fn pat1 => pattern_intersect pat1 Value)) xs',
+      pattern_intersect pat1 Value)
+  | pattern_intersect (Split (n, xs', pat1)) (Split (m, ys, pat2)) =
+    Split (if n <= 0 then m else n, 
+      intersect_consts xs' ys pat1 pat2,
+      pattern_intersect pat1 pat2)
+and
+    intersect_consts xs [] _ default2 = map (apsnd (fn pat => pattern_intersect pat default2)) xs
+  | intersect_consts xs ((ctr, pat2) :: ys) default1 default2 = case find_ctr ctr xs of
+    (SOME (ctr, pat1), xs') => 
+      (ctr, pattern_merge (pattern_merge (pattern_intersect pat1 pat2) (pattern_intersect default1 pat2))
+              (pattern_intersect pat1 default2)) ::
+      intersect_consts xs' ys default1 default2
+    | (NONE, xs') => (ctr, pattern_intersect default1 pat2) :: (intersect_consts xs' ys default1 default2)
+        
+fun pattern_lookup _ Wildcard = Wildcard
+  | pattern_lookup _ Value = Value
+  | pattern_lookup [] (Split (n, xs, pat)) = 
+    Split (n, map (apsnd (pattern_lookup [])) xs, pattern_lookup [] pat)
+  | pattern_lookup (term :: terms) (Split (n, xs, pat)) =
+  let
+    val (ctr, args) = strip_comb term
+    fun map_ctr (term, pat) =
+      let
+        val args = term |> dest_Const |> snd |> binder_types |> map (fn T => Free ("x", T))
+      in
+        pattern_lookup args pat
+      end
+  in
+    if is_Const ctr then
+       case find_ctr ctr xs of (SOME (_, pat'), _) => 
+           pattern_lookup terms (pattern_merge (pattern_lookup args pat') (pattern_lookup [] pat))
+         | (NONE, _) => pattern_lookup terms pat
+    else if length xs < n orelse n <= 0 then
+      pattern_lookup terms pat
+    else pattern_lookup terms
+      (pattern_merge
+        (fold pattern_intersect (map map_ctr (tl xs)) (map_ctr (hd xs)))
+        (pattern_lookup [] pat))
+  end;
+
+fun pattern_contains terms pat = case pattern_lookup terms pat of
+    Wildcard => false
+  | Value => true
+  | Split _ => raise Match;
+
+fun pattern_create _ [] = Wildcard
+  | pattern_create ctr_count (term :: terms) = 
+    let
+      val (ctr, args) = strip_comb term
+    in
+      if is_Const ctr then
+        Split (ctr_count ctr, [(ctr, pattern_create ctr_count (args @ terms))], Wildcard)
+      else Split (0, [], pattern_create ctr_count terms)
+    end;
+
+fun pattern_insert ctr_count terms pat =
+  let
+    fun new_pattern terms = pattern_insert ctr_count terms (pattern_create ctr_count terms)
+    fun aux _ false Wildcard = Wildcard
+      | aux terms true Wildcard = if null terms then Value else new_pattern terms
+      | aux _ _ Value = Value
+      | aux terms modify (Split (n, xs', pat)) =
+      let
+        val unmodified = (n, map (apsnd (aux [] false)) xs', aux [] false pat)
+      in case terms of [] => Split unmodified
+        | term :: terms =>
+        let
+          val (ctr, args) = strip_comb term
+          val (m, ys, pat') = unmodified
+        in
+          if is_Const ctr
+            then case find_ctr ctr xs' of
+              (SOME (ctr, pat''), xs) =>
+                Split (m, (ctr, aux (args @ terms) modify pat'') :: map (apsnd (aux [] false)) xs, pat')
+              | (NONE, _) => if modify
+                then if m <= 0
+                  then Split (ctr_count ctr, (ctr, new_pattern (args @ terms)) :: ys, pat')
+                  else Split (m, (ctr, new_pattern (args @ terms)) :: ys, pat')
+                else Split unmodified
+            else Split (m, ys, aux terms modify pat)
+        end
+      end
+  in
+    aux terms true pat
+  end;
+
+val pattern_empty = Wildcard;
+
+fun replace_frees lhss rhss typ_list ctxt =
+  let
+    fun replace_frees_once (lhs, rhs) ctxt =
+      let
+        val add_frees_list = fold_rev Term.add_frees
+        val frees = add_frees_list lhs []
+        val (new_frees, ctxt1) = (Ctr_Sugar_Util.mk_Frees "x" (map snd frees) ctxt)
+        val (new_frees1, ctxt2) =
+          let
+            val (dest_frees, types) = split_list (map dest_Free new_frees)
+            val (new_frees, ctxt2) = Variable.variant_fixes dest_frees ctxt1
+          in
+            (map Free (new_frees ~~ types), ctxt2)
+          end
+        val dict = frees ~~ new_frees1
+        fun free_map_fun (s, T) =
+          case AList.lookup (op =) dict (s, T) of
+              NONE => Free (s, T)
+            | SOME x => x
+        val map_fun = fold_term Const free_map_fun Var Bound Abs (op $)
+      in
+        ((map map_fun lhs, map_fun rhs), ctxt2)
+      end
+
+    fun variant_fixes (def_frees, ctxt) =
+      let
+        val (dest_frees, types) = split_list (map dest_Free def_frees)
+        val (def_frees, ctxt1) = Variable.variant_fixes dest_frees ctxt
+      in
+        (map Free (def_frees ~~ types), ctxt1)
+      end
+    val (def_frees, ctxt1) = variant_fixes (Ctr_Sugar_Util.mk_Frees "x" typ_list ctxt)
+    val (rhs_frees, ctxt2) = variant_fixes (Ctr_Sugar_Util.mk_Frees "x" typ_list ctxt1)
+    val (case_args, ctxt3) = variant_fixes (Ctr_Sugar_Util.mk_Frees "x"
+      (map fastype_of (hd lhss)) ctxt2)
+    val (new_terms1, ctxt4) = fold_map replace_frees_once (lhss ~~ rhss) ctxt3
+    val (lhss1, rhss1) = split_list new_terms1
+  in
+    (lhss1, rhss1, def_frees ~~ rhs_frees, case_args, ctxt4)
+  end;
+
+fun add_names_in_type (Type (name, Ts)) = 
+    List.foldr (op o) (Symtab.update (name, ())) (map add_names_in_type Ts)
+  | add_names_in_type (TFree _) = I
+  | add_names_in_type (TVar _) = I
+
+fun add_names_in_term (Const (_, T)) = add_names_in_type T
+  | add_names_in_term (Free (_, T)) = add_names_in_type T
+  | add_names_in_term (Var (_, T)) = add_names_in_type T
+  | add_names_in_term (Bound _) = I
+  | add_names_in_term (Abs (_, T, body)) =
+    add_names_in_type T o add_names_in_term body
+  | add_names_in_term (t1 $ t2) = add_names_in_term t1 o add_names_in_term t2
+
+fun add_type_names terms =
+  fold (fn term => fn f => add_names_in_term term o f) terms I
+
+fun get_split_theorems ctxt =
+  Symtab.keys
+  #> map_filter (Ctr_Sugar.ctr_sugar_of ctxt)
+  #> map #split;
+
+fun match (Const (s1, _)) (Const (s2, _)) = if s1 = s2 then SOME I else NONE
+  | match (Free y) x = SOME (fn z => if z = Free y then x else z)
+  | match (pat1 $ pattern2) (t1 $ t2) =
+    (case (match pat1 t1, match pattern2 t2) of
+       (SOME f, SOME g) => SOME (f o g)
+       | _ => NONE
+     )
+  | match _ _ = NONE;
+
+fun match_all patterns terms =
+  let
+    fun combine _ NONE = NONE
+      | combine (f_opt, f_opt') (SOME g) = 
+        case match f_opt f_opt' of SOME f => SOME (f o g) | _ => NONE
+  in
+    fold_rev combine (patterns ~~ terms) (SOME I)
+  end
+
+fun matches (Const (s1, _)) (Const (s2, _)) = s1 = s2
+  | matches (Free _) _ = true 
+  | matches (pat1 $ pat2) (t1 $ t2) = matches pat1 t1 andalso matches pat2 t2
+  | matches _ _ = false;
+fun matches_all patterns terms = forall (uncurry matches) (patterns ~~ terms)
+
+fun terms_to_case_at ctr_count ctxt (fun_t : term) (default_lhs : term list)
+    (pos, (lazy_case_arg, rhs_free))
+    ((lhss : term list list), (rhss : term list), type_name_fun) =
+  let
+    fun abort t =
+      let
+        val fun_name = head_of t |> dest_Const |> fst
+        val msg = "Missing pattern in " ^ fun_name ^ "."
+      in
+        mk_abort msg t
+      end;
+
+    (* Step 1 : Eliminate lazy pattern *)
+    fun replace_pat_at (n, tcos) pat pats =
+      let
+        fun map_at _ _ [] = raise Empty
+          | map_at n f (x :: xs) = if n > 0
+            then apfst (cons x) (map_at (n - 1) f xs)
+            else apfst (fn x => x :: xs) (f x)
+        fun replace [] pat term = (pat, term)
+          | replace ((s1, n) :: tcos) pat term =
+            let
+              val (ctr, args) = strip_comb term
+            in
+              case ctr of Const (s2, _) =>
+                  if s1 = s2
+                  then apfst (pair ctr #> list_comb) (map_at n (replace tcos pat) args)
+                  else (term, rhs_free)
+                | _ => (term, rhs_free)
+            end
+        val (part1, (old_pat, part2)) = chop n pats ||> (fn xs => (hd xs, tl xs))
+        val (new_pat, old_pat1) = replace tcos pat old_pat
+      in
+        (part1 @ [new_pat] @ part2, old_pat1)
+      end                               
+    val (lhss1, lazy_pats) = map (replace_pat_at pos lazy_case_arg) lhss
+      |> split_list
+
+    (* Step 2 : Split patterns *)
+    fun split equs =
+      let
+        fun merge_pattern (Const (s1, T1), Const (s2, _)) =
+            if s1 = s2 then SOME (Const (s1, T1)) else NONE
+          | merge_pattern (t, Free _) = SOME t
+          | merge_pattern (Free _, t) = SOME t
+          | merge_pattern (t1l $ t1r, t2l $ t2r) =
+            (case (merge_pattern (t1l, t2l), merge_pattern (t1r, t2r)) of
+              (SOME t1, SOME t2) => SOME (t1 $ t2)
+              | _ => NONE)
+          | merge_pattern _ = NONE
+        fun merge_patterns pats1 pats2 = case (pats1, pats2) of
+          ([], []) => SOME []
+          | (x :: xs, y :: ys) =>
+            (case (merge_pattern (x, y), merge_patterns xs ys) of
+              (SOME x, SOME xs) => SOME (x :: xs)
+              | _ => NONE
+            )
+          | _ => raise Match
+        fun merge_insert ((lhs1, case_pat), _) [] =
+            [(lhs1, pattern_empty |> pattern_insert ctr_count [case_pat])]
+          | merge_insert ((lhs1, case_pat), rhs) ((lhs2, pat) :: pats) =
+            let
+              val pats = merge_insert ((lhs1, case_pat), rhs) pats
+              val (first_equ_needed, new_lhs) = case merge_patterns lhs1 lhs2 of
+                SOME new_lhs => (not (pattern_contains [case_pat] pat), new_lhs)
+                | NONE => (false, lhs2)
+              val second_equ_needed = not (matches_all lhs1 lhs2)
+                orelse not first_equ_needed
+              val first_equ = if first_equ_needed
+                then [(new_lhs, pattern_insert ctr_count [case_pat] pat)]
+                else []
+              val second_equ = if second_equ_needed
+                then [(lhs2, pat)]
+                else []
+            in
+              first_equ @ second_equ @ pats
+            end
+        in
+          (fold merge_insert equs []
+            |> split_list
+            |> fst) @ [default_lhs]
+        end
+    val lhss2 = split ((lhss1 ~~ lazy_pats) ~~ rhss)
+
+    (* Step 3 : Remove redundant patterns *)
+    fun remove_redundant_lhs lhss =
+      let
+        fun f lhs pat = if pattern_contains lhs pat
+          then ((lhs, false), pat)
+          else ((lhs, true), pattern_insert ctr_count lhs pat)
+      in
+        fold_map f lhss pattern_empty
+        |> fst
+        |> filter snd
+        |> map fst
+      end
+    fun remove_redundant_rhs rhss =
+      let
+        fun f (lhs, rhs) pat = if pattern_contains [lhs] pat
+          then (((lhs, rhs), false), pat)
+          else (((lhs, rhs), true), pattern_insert ctr_count [lhs] pat)
+      in
+        map fst (filter snd (fold_map f rhss pattern_empty |> fst))
+      end
+    val lhss3 = remove_redundant_lhs lhss2
+
+    (* Step 4 : Compute right hand side *)
+    fun subs_fun f = fold_term
+      Const
+      (f o Free)
+      Var
+      Bound
+      Abs
+      (fn (x, y) => f x $ f y)
+    fun find_rhss lhs =
+      let
+        fun f (lhs1, (pat, rhs)) = 
+          case match_all lhs1 lhs of NONE => NONE
+          | SOME f => SOME (pat, subs_fun f rhs)
+      in
+        remove_redundant_rhs
+          (map_filter f (lhss1 ~~ (lazy_pats ~~ rhss)) @
+            [(lazy_case_arg, list_comb (fun_t, lhs) |> abort)]
+          )
+      end
+
+    (* Step 5 : make_case of right hand side *)
+    fun make_case ctxt case_arg cases = case cases of
+      [(Free x, rhs)] => subs_fun (fn y => if y = Free x then case_arg else y) rhs
+      | _ => Case_Translation.make_case
+        ctxt
+        Case_Translation.Warning
+        Name.context
+        case_arg
+        cases
+    val type_name_fun = add_type_names lazy_pats o type_name_fun
+    val rhss3 = map ((make_case ctxt lazy_case_arg) o find_rhss) lhss3
+  in
+    (lhss3, rhss3, type_name_fun)
+  end;
+
+fun terms_to_case ctxt ctr_count (head : term) (lhss : term list list)
+    (rhss : term list) (typ_list : typ list) (poss : (int * (string * int) list) list) =
+  let
+    val (lhss1, rhss1, def_frees, case_args, ctxt1) = replace_frees lhss rhss typ_list ctxt
+    val exec_list = poss ~~ def_frees
+    val (lhss2, rhss2, type_name_fun) = fold_rev
+      (terms_to_case_at ctr_count ctxt1 head case_args) exec_list (lhss1, rhss1, I)
+    fun make_eq_term (lhss, rhs) = (list_comb (head, lhss), rhs)
+      |> HOLogic.mk_eq
+      |> HOLogic.mk_Trueprop
+  in
+    (map make_eq_term (lhss2 ~~ rhss2),
+      get_split_theorems ctxt1 (type_name_fun Symtab.empty),
+      ctxt1)
+  end;
+
+fun build_case_t replace_ctr ctr_count head lhss rhss ctxt =
+  let
+    val num_eqs = length lhss
+    val _ = if length rhss = num_eqs andalso num_eqs > 0 then ()
+      else raise Fail
+        ("expected same number of left-hand sides as right-hand sides\n"
+          ^ "and at least one equation")
+    val n = length (hd lhss)
+    val _ = if forall (fn m => length m = n) lhss then ()
+      else raise Fail "expected equal number of arguments"
+
+    fun to_coordinates (n, ts) = case map_filter (term_to_coordinates replace_ctr) ts of
+        [] => NONE
+      | (tco :: tcos) => SOME (n, fold term_coordinate_merge tcos tco |> coordinates_to_list)
+    fun add_T (n, xss) = map (fn (T, xs) => (T, (n, xs))) xss
+    val (typ_list, poss) = lhss
+      |> Ctr_Sugar_Util.transpose
+      |> map_index to_coordinates
+      |> map_filter (map_option add_T)
+      |> flat
+      |> split_list 
+  in
+    if null poss then ([], [], ctxt)
+    else terms_to_case ctxt (dest_Const #> ctr_count) head lhss rhss typ_list poss
+  end;
+
+fun tac ctxt {splits, intros, defs} =
+  let
+    val split_and_subst = 
+      split_tac ctxt splits 
+      THEN' REPEAT_ALL_NEW (
+        resolve_tac ctxt [@{thm conjI}, @{thm allI}]
+        ORELSE'
+        (resolve_tac ctxt [@{thm impI}] THEN' hyp_subst_tac_thin true ctxt))
+  in
+    (REPEAT_ALL_NEW split_and_subst ORELSE' K all_tac)
+    THEN' (K (Local_Defs.unfold_tac ctxt [@{thm missing_pattern_match_def}]))
+    THEN' (K (Local_Defs.unfold_tac ctxt defs))
+    THEN_ALL_NEW (SOLVED' (resolve_tac ctxt (@{thm refl} :: intros)))
+  end;
+
+fun to_case _ _ _ [] = NONE
+  | to_case ctxt replace_ctr ctr_count ths =
+    let
+      val strip_eq = Thm.prop_of #> HOLogic.dest_Trueprop #> HOLogic.dest_eq
+      fun import [] ctxt = ([], ctxt)
+        | import (thm :: thms) ctxt =
+          let
+            val fun_ct = strip_eq #> fst #> head_of #> Logic.mk_term #> Thm.cterm_of ctxt
+            val ct = fun_ct thm
+            val cts = map fun_ct thms
+            val pairs = map (fn s => (s,ct)) cts
+            val thms' = map (fn (th,p) => Thm.instantiate (Thm.match p) th) (thms ~~ pairs)
+          in
+            Variable.import true (thm :: thms') ctxt |> apfst snd
+          end
+
+      val (iths, ctxt') = import ths ctxt
+      val head = hd iths |> strip_eq |> fst |> head_of
+      val eqs = map (strip_eq #> apfst (snd o strip_comb)) iths
+
+      fun hide_rhs ((pat, rhs), name) lthy =
+        let
+          val frees = fold Term.add_frees pat []
+          val abs_rhs = fold absfree frees rhs
+          val (f, def, lthy') = case lthy
+            |> Local_Defs.define [((Binding.name name, NoSyn), (Binding.empty_atts, abs_rhs))] of
+              ([(f, (_, def))], lthy') => (f, def, lthy')
+              | _ => raise Match
+        in
+          ((list_comb (f, map Free (rev frees)), def), lthy')
+        end
+
+      val rhs_names = Name.invent (Variable.names_of ctxt') "rhs" (length eqs)
+      val ((def_ts, def_thms), ctxt2) =
+        fold_map hide_rhs (eqs ~~ rhs_names) ctxt' |> apfst split_list
+      val (ts, split_thms, ctxt3) = build_case_t replace_ctr ctr_count head
+        (map fst eqs) def_ts ctxt2
+      fun mk_thm t = Goal.prove ctxt3 [] [] t
+          (fn {context=ctxt, ...} => tac ctxt {splits=split_thms, intros=ths, defs=def_thms} 1)
+    in
+      if null ts then NONE
+      else
+        ts
+        |> map mk_thm
+        |> Proof_Context.export ctxt3 ctxt
+        |> map (Goal.norm_result ctxt)
+        |> SOME
+    end;
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/code_lazy.ML	Sat May 12 11:24:11 2018 +0200
@@ -0,0 +1,655 @@
+(* Author: Pascal Stoop, ETH Zurich
+   Author: Andreas Lochbihler, Digital Asset *)
+
+signature CODE_LAZY =
+sig
+  type lazy_info =
+    {eagerT: typ,
+     lazyT: typ,
+     ctr: term,
+     destr: term,
+     lazy_ctrs: term list,
+     case_lazy: term,
+     active: bool,
+     activate: theory -> theory,
+     deactivate: theory -> theory};
+  val code_lazy_type: string -> theory -> theory
+  val activate_lazy_type: string -> theory -> theory
+  val deactivate_lazy_type: string -> theory -> theory
+  val activate_lazy_types: theory -> theory
+  val deactivate_lazy_types: theory -> theory
+
+  val get_lazy_types: theory -> (string * lazy_info) list
+
+  val print_lazy_types: theory -> unit
+
+  val transform_code_eqs: Proof.context -> (thm * bool) list -> (thm * bool) list option
+end;
+
+structure Code_Lazy : CODE_LAZY =
+struct
+
+type lazy_info =
+  {eagerT: typ,
+   lazyT: typ,
+   ctr: term,
+   destr: term,
+   lazy_ctrs: term list,
+   case_lazy: term,
+   active: bool,
+   activate: theory -> theory,
+   deactivate: theory -> theory};
+
+fun map_active f {eagerT, lazyT, ctr, destr, lazy_ctrs, case_lazy, active, activate, deactivate} =
+  { eagerT = eagerT, 
+    lazyT = lazyT,
+    ctr = ctr,
+    destr = destr,
+    lazy_ctrs = lazy_ctrs,
+    case_lazy = case_lazy,
+    active = f active,
+    activate = activate,
+    deactivate = deactivate
+  }
+
+structure Laziness_Data = Theory_Data(
+  type T = lazy_info Symtab.table;
+  val empty = Symtab.empty;
+  val merge = Symtab.join (fn _ => fn (_, record) => record);
+  val extend = I;
+);
+
+fun fold_type type' tfree tvar typ =
+  let
+    fun go (Type (s, Ts)) = type' (s, map go Ts)
+      | go (TFree T) = tfree T
+      | go (TVar T) = tvar T
+  in
+    go typ
+  end;
+
+fun read_typ lthy name =
+  let
+    val (s, Ts) = Proof_Context.read_type_name {proper = true, strict = true} lthy name |> dest_Type
+    val (Ts', _) = Ctr_Sugar_Util.mk_TFrees (length Ts) lthy
+  in 
+    Type (s, Ts')
+  end
+
+fun mk_name prefix suffix name ctxt =
+  Ctr_Sugar_Util.mk_fresh_names ctxt 1 (prefix ^ name ^ suffix) |>> hd;
+fun generate_typedef_name name ctxt = mk_name "" "_lazy" name ctxt;
+
+fun add_syntax_definition short_type_name eagerT lazyT lazy_ctr lthy = 
+  let
+    val (name, _) = mk_name "lazy_" "" short_type_name lthy
+    val freeT = HOLogic.unitT --> lazyT
+    val lazyT' = Type (@{type_name lazy}, [lazyT])
+    val def = Logic.all_const freeT $ absdummy freeT (Logic.mk_equals (
+      Free (name, (freeT --> eagerT)) $ Bound 0,
+      lazy_ctr $ (Const (@{const_name delay}, (freeT --> lazyT')) $ Bound 0)))
+    val (_, lthy') = Local_Theory.open_target lthy
+    val ((t, (_, thm)), lthy') = Specification.definition NONE [] [] 
+      ((Thm.def_binding (Binding.name name), []), def) lthy'
+    val lthy' = Specification.notation true ("", false) [(t, Mixfix.mixfix "_")] lthy'
+    val lthy = Local_Theory.close_target lthy'
+    val def_thm = singleton (Proof_Context.export lthy' lthy) thm
+  in
+    (def_thm, lthy)
+  end;
+
+fun add_ctr_code raw_ctrs case_thms thy =
+  let
+    fun mk_case_certificate ctxt raw_thms =
+      let
+        val thms = raw_thms
+          |> Conjunction.intr_balanced
+          |> Thm.unvarify_global (Proof_Context.theory_of ctxt)
+          |> Conjunction.elim_balanced (length raw_thms)
+          |> map Simpdata.mk_meta_eq
+          |> map Drule.zero_var_indexes
+        val thm1 = case thms of
+          thm :: _ => thm
+          | _ => raise Empty
+        val params = Term.add_free_names (Thm.prop_of thm1) [];
+        val rhs = thm1
+          |> Thm.prop_of |> Logic.dest_equals |> fst |> strip_comb
+          ||> fst o split_last |> list_comb
+        val lhs = Free (singleton (Name.variant_list params) "case", fastype_of rhs);
+        val assum = Thm.cterm_of ctxt (Logic.mk_equals (lhs, rhs))
+      in
+        thms
+        |> Conjunction.intr_balanced
+        |> rewrite_rule ctxt [Thm.symmetric (Thm.assume assum)]
+        |> Thm.implies_intr assum
+        |> Thm.generalize ([], params) 0
+        |> Axclass.unoverload ctxt
+        |> Thm.varifyT_global
+      end
+    val ctrs = map (apsnd (perhaps (try Logic.unvarifyT_global))) raw_ctrs
+    val unover_ctrs = map (fn ctr as (_, fcT) => (Axclass.unoverload_const thy ctr, fcT)) ctrs
+  in
+    if can (Code.constrset_of_consts thy) unover_ctrs then
+      thy
+      |> Code.declare_datatype_global ctrs
+      |> fold_rev (Code.add_eqn_global o rpair true) case_thms
+      |> Code.declare_case_global (mk_case_certificate (Proof_Context.init_global thy) case_thms)
+    else
+      thy
+  end;
+
+fun not_found s = error (s ^ " has not been added as lazy type");
+
+fun validate_type_name thy type_name =
+  let
+    val lthy = Named_Target.theory_init thy
+    val eager_type = read_typ lthy type_name
+    val type_name = case eager_type of
+      Type (s, _) => s
+      | _ => raise Match
+  in
+    type_name
+  end;
+
+fun set_active_lazy_type value eager_type_string thy =
+  let
+    val type_name = validate_type_name thy eager_type_string
+    val x =
+      case Symtab.lookup (Laziness_Data.get thy) type_name of
+        NONE => not_found type_name
+        | SOME x => x
+    val new_x = map_active (K value) x
+    val thy1 = if value = #active x
+      then thy
+      else if value
+        then #activate x thy
+        else #deactivate x thy
+  in
+    Laziness_Data.map (Symtab.update (type_name, new_x)) thy1
+  end;
+
+fun set_active_lazy_types value thy =
+  let
+    val lazy_type_names = Symtab.keys (Laziness_Data.get thy)
+    fun fold_fun value type_name thy =
+      let
+        val x =
+          case Symtab.lookup (Laziness_Data.get thy) type_name of
+            SOME x => x
+            | NONE => raise Match
+        val new_x = map_active (K value) x
+        val thy1 = if value = #active x
+          then thy
+          else if value
+            then #activate x thy
+            else #deactivate x thy
+      in
+        Laziness_Data.map (Symtab.update (type_name, new_x)) thy1
+      end
+  in
+    fold (fold_fun value) lazy_type_names thy
+  end;
+
+(* code_lazy_type : string -> theory -> theory *)
+fun code_lazy_type eager_type_string thy =
+  let
+    val lthy = Named_Target.theory_init thy
+    val eagerT = read_typ lthy eager_type_string
+    val (type_name, type_args) = dest_Type eagerT
+    val short_type_name = Long_Name.base_name type_name
+    val _ = if Symtab.defined (Laziness_Data.get thy) type_name
+      then error (type_name ^ " has already been added as lazy type.")
+      else ()
+    val {case_thms, casex, ctrs, ...} = case Ctr_Sugar.ctr_sugar_of lthy type_name of
+        SOME x => x
+      | _ => error (type_name ^ " is not registered with free constructors.")
+
+    fun substitute_ctr (old_T, new_T) ctr_T lthy =
+      let
+        val old_ctr_vars = map TVar (Term.add_tvarsT ctr_T [])
+        val old_ctr_Ts = map TFree (Term.add_tfreesT ctr_T []) @ old_ctr_vars
+        val (new_ctr_Ts, lthy') = Ctr_Sugar_Util.mk_TFrees (length old_ctr_Ts) lthy
+
+        fun double_type_fold Ts = case Ts of
+          (Type (_, Ts1), Type (_, Ts2)) => flat (map double_type_fold (Ts1 ~~ Ts2))
+          | (Type _, _) => raise Match
+          | (_, Type _) => raise Match
+          | Ts => [Ts]
+        fun map_fun1 f = List.foldr
+          (fn ((T1, T2), f) => fn T => if T = T1 then T2 else f T)
+          f
+          (double_type_fold (old_T, new_T))
+        val map_fun2 = AList.lookup (op =) (old_ctr_Ts ~~ new_ctr_Ts) #> the
+        val map_fun = map_fun1 map_fun2
+
+        val new_ctr_type = fold_type Type (map_fun o TFree) (map_fun o TVar) ctr_T
+      in
+        (new_ctr_type, lthy')
+      end
+
+    val (short_lazy_type_name, lthy1) = generate_typedef_name short_type_name lthy
+
+    fun mk_lazy_typedef (name, eager_type) lthy =
+      let
+        val binding = Binding.name name
+        val (result, lthy1) = (Typedef.add_typedef
+            { overloaded=false }
+            (binding, rev (Term.add_tfreesT eager_type []), Mixfix.NoSyn)
+            (Const (@{const_name "top"}, Type (@{type_name set}, [eager_type])))
+            NONE
+            (fn ctxt => resolve_tac ctxt [@{thm UNIV_witness}] 1)
+          o (Local_Theory.open_target #> snd)) lthy
+      in
+         (binding, result, Local_Theory.close_target lthy1)
+      end;
+
+    val (typedef_binding, (_, info), lthy2) = mk_lazy_typedef (short_lazy_type_name, eagerT) lthy1
+
+    val lazy_type = Type (Local_Theory.full_name lthy2 typedef_binding, type_args)
+    val (Abs_lazy, Rep_lazy) =
+      let
+        val info = fst info
+        val Abs_name = #Abs_name info
+        val Rep_name = #Rep_name info
+        val Abs_type = eagerT --> lazy_type
+        val Rep_type = lazy_type --> eagerT
+      in
+        (Const (Abs_name, Abs_type), Const (Rep_name, Rep_type))
+      end
+    val Abs_inverse = #Abs_inverse (snd info)
+    val Rep_inverse = #Rep_inverse (snd info)
+
+    val (ctrs', lthy3) = List.foldr
+      (fn (Const (s, T), (ctrs, lthy)) => let
+            val (T', lthy') = substitute_ctr (body_type T, eagerT) T lthy
+          in
+            ((Const (s, T')) :: ctrs, lthy')
+          end
+      )
+      ([], lthy2)
+      ctrs
+
+    fun to_destr_eagerT typ = case typ of
+          Type (@{type_name "fun"}, [_, Type (@{type_name "fun"}, Ts)]) => 
+          to_destr_eagerT (Type (@{type_name "fun"}, Ts))
+        | Type (@{type_name "fun"}, [T, _]) => T
+        | _ => raise Match
+    val (case', lthy4) = 
+      let
+        val (eager_case, caseT) = dest_Const casex
+        val (caseT', lthy') = substitute_ctr (to_destr_eagerT caseT, eagerT) caseT lthy3
+      in (Const (eager_case, caseT'), lthy') end
+
+    val ctr_names = map (Long_Name.base_name o fst o dest_Const) ctrs
+    val ((((lazy_ctr_name, lazy_sel_name), lazy_ctrs_name), lazy_case_name), ctxt) = lthy4
+      |> mk_name "Lazy_" "" short_type_name
+      ||>> mk_name "unlazy_" "" short_type_name
+      ||>> fold_map (mk_name "" "_Lazy") ctr_names
+      ||>> mk_name "case_" "_lazy" short_type_name
+
+    fun mk_def (name, T, rhs) lthy =
+      let
+        val binding = Binding.name name
+        val ((_, (_, thm)), lthy1) = 
+          Local_Theory.open_target lthy |> snd
+          |> Specification.definition NONE [] [] ((Thm.def_binding binding, []), rhs)
+        val lthy2 = Local_Theory.close_target lthy1
+        val def_thm = hd (Proof_Context.export lthy1 lthy2 [thm])
+      in
+        ({binding = binding, const = Const (Local_Theory.full_name lthy2 binding, T), thm = def_thm}, lthy2)
+      end;
+    
+    val lazy_datatype = Type (@{type_name lazy}, [lazy_type])
+    val Lazy_type = lazy_datatype --> eagerT
+    val unstr_type = eagerT --> lazy_datatype
+    
+    fun apply_bounds i n term =
+      if n > i then apply_bounds i (n-1) (term $ Bound (n-1))
+      else term
+    fun all_abs Ts t = Logic.list_all (map (pair Name.uu) Ts, t)
+    fun mk_force t = Const (@{const_name force}, lazy_datatype --> lazy_type) $ t
+    fun mk_delay t = Const (@{const_name delay}, (@{typ unit} --> lazy_type) --> lazy_datatype) $ t
+
+    val lazy_ctr = all_abs [lazy_datatype]
+      (Logic.mk_equals (Free (lazy_ctr_name, Lazy_type) $ Bound 0, Rep_lazy $ mk_force (Bound 0)))
+    val (lazy_ctr_def, lthy5) = mk_def (lazy_ctr_name, Lazy_type, lazy_ctr) lthy4
+
+    val lazy_sel = all_abs [eagerT]
+      (Logic.mk_equals (Free (lazy_sel_name, unstr_type) $ Bound 0, 
+        mk_delay (Abs (Name.uu, @{typ unit}, Abs_lazy $ Bound 1))))
+    val (lazy_sel_def, lthy6) = mk_def (lazy_sel_name, unstr_type, lazy_sel) lthy5
+
+    fun mk_lazy_ctr (name, eager_ctr) =
+      let
+        val (_, ctrT) = dest_Const eager_ctr
+        fun to_lazy_ctrT (Type (@{type_name fun}, [T1, T2])) = T1 --> to_lazy_ctrT T2
+          | to_lazy_ctrT T = if T = eagerT then lazy_type else raise Match
+        val lazy_ctrT = to_lazy_ctrT ctrT
+        val argsT = binder_types ctrT
+        val lhs = apply_bounds 0 (length argsT) (Free (name, lazy_ctrT))
+        val rhs = Abs_lazy $ apply_bounds 0 (length argsT) eager_ctr
+      in
+        mk_def (name, lazy_ctrT, all_abs argsT (Logic.mk_equals (lhs, rhs)))
+      end
+    val (lazy_ctrs_def, lthy7) = fold_map mk_lazy_ctr (lazy_ctrs_name ~~ ctrs') lthy6
+
+    val (lazy_case_def, lthy8) =
+      let
+        val (_, caseT) = dest_Const case'
+        fun to_lazy_caseT (Type (@{type_name fun}, [T1, T2])) =
+            if T1 = eagerT then lazy_type --> T2 else T1 --> to_lazy_caseT T2
+        val lazy_caseT = to_lazy_caseT caseT
+        val argsT = binder_types lazy_caseT
+        val n = length argsT
+        val lhs = apply_bounds 0 n (Free (lazy_case_name, lazy_caseT)) 
+        val rhs = apply_bounds 1 n case' $ (Rep_lazy $ Bound 0)
+      in
+        mk_def (lazy_case_name, lazy_caseT, all_abs argsT (Logic.mk_equals (lhs, rhs))) lthy7
+      end
+
+    fun mk_thm ((name, term), thms) lthy =
+      let
+        val binding = Binding.name name
+        fun tac {context, ...} = Simplifier.simp_tac
+          (put_simpset HOL_basic_ss context addsimps thms)
+          1
+        val thm = Goal.prove lthy [] [] term tac
+        val (_, lthy1) = lthy
+          |> Local_Theory.open_target |> snd
+          |> Local_Theory.note ((binding, []), [thm])
+      in
+        (thm, Local_Theory.close_target lthy1)
+      end
+    fun mk_thms exec_list lthy = fold_map mk_thm exec_list lthy
+
+    val mk_eq = HOLogic.mk_Trueprop o HOLogic.mk_eq
+
+    val lazy_ctrs = map #const lazy_ctrs_def
+    val eager_lazy_ctrs = ctrs' ~~ lazy_ctrs
+
+    val (((((((Lazy_delay_eq_name, Rep_ctr_names), ctrs_lazy_names), force_sel_name), case_lazy_name),
+      sel_lazy_name), case_ctrs_name), _) = lthy5
+      |> mk_name "Lazy_" "_delay" short_type_name
+      ||>> fold_map (mk_name "Rep_" "_Lazy") ctr_names
+      ||>> fold_map (mk_name "" "_conv_lazy") ctr_names
+      ||>> mk_name "force_unlazy_" "" short_type_name
+      ||>> mk_name "case_" "_conv_lazy" short_type_name
+      ||>> mk_name "Lazy_" "_inverse" short_type_name
+      ||>> fold_map (mk_name ("case_" ^ short_type_name ^ "_lazy_") "") ctr_names
+
+    val mk_Lazy_delay_eq =
+      (#const lazy_ctr_def $ mk_delay (Bound 0), Rep_lazy $ (Bound 0 $ @{const Unity}))
+      |> mk_eq |> all_abs [@{typ unit} --> lazy_type]
+    val (Lazy_delay_thm, lthy8a) = mk_thm 
+      ((Lazy_delay_eq_name, mk_Lazy_delay_eq), [#thm lazy_ctr_def, @{thm force_delay}])
+      lthy8
+
+    fun mk_lazy_ctr_eq (eager_ctr, lazy_ctr) =
+      let
+        val (_, ctrT) = dest_Const eager_ctr
+        val argsT = binder_types ctrT
+        val args = length argsT
+      in
+        (Rep_lazy $ apply_bounds 0 args lazy_ctr, apply_bounds 0 args eager_ctr)
+        |> mk_eq |> all_abs argsT
+      end
+    val Rep_ctr_eqs = map mk_lazy_ctr_eq eager_lazy_ctrs
+    val (Rep_ctr_thms, lthy8b) = mk_thms
+        ((Rep_ctr_names ~~ Rep_ctr_eqs) ~~
+          (map (fn def => [#thm def, Abs_inverse, @{thm UNIV_I}]) lazy_ctrs_def)
+        )
+        lthy8a
+
+    fun mk_ctrs_lazy_eq (eager_ctr, lazy_ctr) =
+      let
+        val argsT = dest_Const eager_ctr |> snd |> binder_types
+        val n = length argsT
+        val lhs = apply_bounds 0 n eager_ctr
+        val rhs = #const lazy_ctr_def $ 
+          (mk_delay (Abs (Name.uu, @{typ unit}, apply_bounds 1 (n + 1) lazy_ctr)))
+      in
+        (lhs, rhs) |> mk_eq |> all_abs argsT
+      end
+    val ctrs_lazy_eq = map mk_ctrs_lazy_eq eager_lazy_ctrs 
+    val (ctrs_lazy_thms, lthy8c) = mk_thms
+      ((ctrs_lazy_names ~~ ctrs_lazy_eq) ~~ map (fn thm => [Lazy_delay_thm, thm]) Rep_ctr_thms)
+      lthy8b
+
+    val force_sel_eq = 
+      (mk_force (#const lazy_sel_def $ Bound 0), Abs_lazy $ Bound 0)
+      |> mk_eq |> all_abs [eagerT]
+    val (force_sel_thm, lthy8d) = mk_thm
+        ((force_sel_name, force_sel_eq), [#thm lazy_sel_def, @{thm force_delay}])
+        lthy8c
+
+    val case_lazy_eq = 
+      let
+        val (_, caseT) = case' |> dest_Const
+        val argsT = binder_types caseT
+        val n = length argsT
+        val lhs = apply_bounds 0 n case'
+        val rhs = apply_bounds 1 n (#const lazy_case_def) $ (mk_force (#const lazy_sel_def $ Bound 0))
+      in
+        (lhs, rhs) |> mk_eq |> all_abs argsT
+      end
+    val (case_lazy_thm, lthy8e) = mk_thm
+        ((case_lazy_name, case_lazy_eq), 
+        [#thm lazy_case_def, force_sel_thm, Abs_inverse, @{thm UNIV_I}])
+        lthy8d
+
+    val sel_lazy_eq =
+      (#const lazy_sel_def $ (#const lazy_ctr_def $ Bound 0), Bound 0)
+      |> mk_eq |> all_abs [lazy_datatype]
+    val (sel_lazy_thm, lthy8f) = mk_thm
+      ((sel_lazy_name, sel_lazy_eq),
+      [#thm lazy_sel_def, #thm lazy_ctr_def, Rep_inverse, @{thm delay_force}])
+      lthy8e
+
+    fun mk_case_ctrs_eq (i, lazy_ctr) =
+      let
+        val lazy_case = #const lazy_case_def
+        val (_, ctrT) = dest_Const lazy_ctr
+        val ctr_argsT = binder_types ctrT
+        val ctr_args_n = length ctr_argsT
+        val (_, caseT) = dest_Const lazy_case
+        val case_argsT = binder_types caseT
+
+        fun n_bounds_from m n t =
+          if n > 0 then n_bounds_from (m - 1) (n - 1) (t $ Bound (m - 1)) else t
+
+        val case_argsT' = take (length case_argsT - 1) case_argsT
+        val Ts = case_argsT' @ ctr_argsT
+        val num_abs_types = length Ts
+        val lhs = n_bounds_from num_abs_types (length case_argsT') lazy_case $
+          apply_bounds 0 ctr_args_n lazy_ctr
+        val rhs = apply_bounds 0 ctr_args_n (Bound (num_abs_types - i - 1))
+      in
+        (lhs, rhs) |> mk_eq |> all_abs Ts
+      end
+    val case_ctrs_eq = map_index mk_case_ctrs_eq lazy_ctrs
+    val (case_ctrs_thms, lthy9) = mk_thms
+        ((case_ctrs_name ~~ case_ctrs_eq) ~~
+         map2 (fn thm1 => fn thm2 => [#thm lazy_case_def, thm1, thm2]) Rep_ctr_thms case_thms
+        )
+        lthy8f
+
+    val (pat_def_thm, lthy10) = 
+      add_syntax_definition short_type_name eagerT lazy_type (#const lazy_ctr_def) lthy9
+
+    val add_lazy_ctrs =
+      Code.declare_datatype_global [dest_Const (#const lazy_ctr_def)]
+    val eager_ctrs = map (apsnd (perhaps (try Logic.unvarifyT_global)) o dest_Const) ctrs
+    val add_eager_ctrs =
+      fold Code.del_eqn_global ctrs_lazy_thms
+      #> Code.declare_datatype_global eager_ctrs
+    val add_code_eqs = fold (Code.add_eqn_global o rpair true) 
+      ([case_lazy_thm, sel_lazy_thm])
+    val add_lazy_ctr_thms = fold (Code.add_eqn_global o rpair true) ctrs_lazy_thms
+    val add_lazy_case_thms =
+      fold Code.del_eqn_global case_thms
+      #> Code.add_eqn_global (case_lazy_thm, false)
+    val add_eager_case_thms = Code.del_eqn_global case_lazy_thm
+      #> fold (Code.add_eqn_global o rpair false) case_thms
+
+    val delay_dummy_thm = (pat_def_thm RS @{thm symmetric})
+      |> Drule.infer_instantiate' lthy10
+         [SOME (Thm.cterm_of lthy10 (Const (@{const_name "Pure.dummy_pattern"}, HOLogic.unitT --> lazy_type)))]
+      |> Thm.generalize (map (fst o dest_TFree) type_args, []) (Variable.maxidx_of lthy10 + 1);
+
+    val ctr_post = delay_dummy_thm :: map (fn thm => thm RS @{thm sym}) ctrs_lazy_thms
+    val ctr_thms_abs = map (fn thm => Drule.abs_def (thm RS @{thm eq_reflection})) ctrs_lazy_thms
+    val case_thm_abs = Drule.abs_def (case_lazy_thm RS @{thm eq_reflection})
+    val add_simps = Code_Preproc.map_pre
+      (fn ctxt => ctxt addsimps (case_thm_abs :: ctr_thms_abs))
+    val del_simps = Code_Preproc.map_pre
+      (fn ctxt => ctxt delsimps (case_thm_abs :: ctr_thms_abs))
+    val add_post = Code_Preproc.map_post
+      (fn ctxt => ctxt addsimps ctr_post)
+    val del_post = Code_Preproc.map_post
+      (fn ctxt => ctxt delsimps ctr_post)
+  in
+    Local_Theory.exit_global lthy10
+    |> Laziness_Data.map (Symtab.update (type_name,
+      {eagerT = eagerT, 
+       lazyT = lazy_type,
+       ctr = #const lazy_ctr_def,
+       destr = #const lazy_sel_def,
+       lazy_ctrs = map #const lazy_ctrs_def,
+       case_lazy = #const lazy_case_def,
+       active = true,
+       activate = add_lazy_ctrs #> add_lazy_ctr_thms #> add_lazy_case_thms #> add_simps #> add_post,
+       deactivate = add_eager_ctrs #> add_eager_case_thms #> del_simps #> del_post}))
+    |> add_lazy_ctrs
+    |> add_ctr_code (map (dest_Const o #const) lazy_ctrs_def) case_ctrs_thms
+    |> add_code_eqs
+    |> add_lazy_ctr_thms
+    |> add_simps
+    |> add_post
+  end;
+
+fun transform_code_eqs _ [] = NONE
+  | transform_code_eqs ctxt eqs =
+    let
+      val thy = Proof_Context.theory_of ctxt
+      val table = Laziness_Data.get thy
+      fun eliminate (s1, s2) = case Symtab.lookup table s1 of
+          NONE => false
+        | SOME x => #active x andalso s2 <> (#ctr x |> dest_Const |> fst)
+      fun num_consts_fun (_, T) =
+        let
+          val s = body_type T |> dest_Type |> fst
+        in
+          if Symtab.defined table s
+            then Ctr_Sugar.ctr_sugar_of ctxt s |> the |> #ctrs |> length
+            else Code.get_type thy s |> fst |> snd |> length
+        end
+      val eqs = map (apfst (Thm.transfer thy)) eqs;
+
+      val ((code_eqs, nbe_eqs), pure) =
+        ((case hd eqs |> fst |> Thm.prop_of of
+            Const (@{const_name Pure.eq}, _) $ _ $ _ =>
+              (map (apfst (fn x => x RS @{thm meta_eq_to_obj_eq})) eqs, true)
+         | _ => (eqs, false))
+        |> apfst (List.partition snd))
+        handle THM _ => (([], eqs), false)
+      val to_original_eq = if pure then map (apfst (fn x => x RS @{thm eq_reflection})) else I
+    in
+      case Case_Converter.to_case ctxt eliminate num_consts_fun (map fst code_eqs) of
+          NONE => NONE
+        | SOME thms => SOME (nbe_eqs @ map (rpair true) thms |> to_original_eq)
+    end handle THM ex => (Output.writeln (@{make_string} eqs); raise THM ex);
+
+val activate_lazy_type = set_active_lazy_type true;
+val deactivate_lazy_type = set_active_lazy_type false;
+val activate_lazy_types = set_active_lazy_types true;
+val deactivate_lazy_types = set_active_lazy_types false;
+
+fun get_lazy_types thy = Symtab.dest (Laziness_Data.get thy) 
+
+fun print_lazy_type thy (name, info : lazy_info) = 
+  let
+    val ctxt = Proof_Context.init_global thy
+    fun pretty_ctr ctr = 
+      let
+        val argsT = dest_Const ctr |> snd |> binder_types
+      in
+        Pretty.block [
+          Syntax.pretty_term ctxt ctr,
+          Pretty.brk 1,
+          Pretty.block (Pretty.separate "" (map (Pretty.quote o Syntax.pretty_typ ctxt) argsT))
+        ]
+      end
+  in
+    Pretty.block [
+      Pretty.str (name ^ (if #active info then "" else " (inactive)") ^ ":"),
+      Pretty.brk 1,
+      Pretty.block [
+        Syntax.pretty_typ ctxt (#eagerT info),
+        Pretty.brk 1,
+        Pretty.str "=",
+        Pretty.brk 1,
+        Syntax.pretty_term ctxt (#ctr info),
+        Pretty.brk 1,
+        Pretty.block [
+          Pretty.str "(",
+          Syntax.pretty_term ctxt (#destr info),
+          Pretty.str ":",
+          Pretty.brk 1,
+          Syntax.pretty_typ ctxt (Type (@{type_name lazy}, [#lazyT info])),
+          Pretty.str ")"
+        ]
+      ],
+      Pretty.fbrk,
+      Pretty.keyword2 "and",
+      Pretty.brk 1,
+      Pretty.block ([
+        Syntax.pretty_typ ctxt (#lazyT info),
+        Pretty.brk 1,
+        Pretty.str "=",
+        Pretty.brk 1] @
+        Pretty.separate " |" (map pretty_ctr (#lazy_ctrs info)) @ [
+        Pretty.fbrk,
+        Pretty.keyword2 "for",
+        Pretty.brk 1, 
+        Pretty.str "case:",
+        Pretty.brk 1,
+        Syntax.pretty_term ctxt (#case_lazy info)
+      ])
+    ]
+  end;
+
+fun print_lazy_types thy = 
+  let
+    fun cmp ((name1, _), (name2, _)) = string_ord (name1, name2)
+    val infos = Laziness_Data.get thy |> Symtab.dest |> map (apfst Long_Name.base_name) |> sort cmp
+  in
+    Pretty.writeln_chunks (map (print_lazy_type thy) infos)
+  end;
+
+
+val _ =
+  Outer_Syntax.command @{command_keyword code_lazy_type}
+    "make a lazy copy of the datatype and activate substitution"
+    (Parse.binding >> (fn b => Toplevel.theory (Binding.name_of b |> code_lazy_type)));
+val _ =
+  Outer_Syntax.command @{command_keyword activate_lazy_type}
+    "activate substitution on a specific (lazy) type"
+    (Parse.binding >> (fn b => Toplevel.theory (Binding.name_of b |> activate_lazy_type)));
+val _ =
+  Outer_Syntax.command @{command_keyword deactivate_lazy_type}
+    "deactivate substitution on a specific (lazy) type"
+    (Parse.binding >> (fn b => Toplevel.theory (Binding.name_of b |> deactivate_lazy_type)));
+val _ =
+  Outer_Syntax.command @{command_keyword activate_lazy_types}
+    "activate substitution on all (lazy) types"
+    (pair (Toplevel.theory activate_lazy_types));
+val _ =
+  Outer_Syntax.command @{command_keyword deactivate_lazy_types}
+    "deactivate substitution on all (lazy) type"
+    (pair (Toplevel.theory deactivate_lazy_types));
+val _ =
+  Outer_Syntax.command @{command_keyword print_lazy_types}
+    "print the types that have been declared as lazy and their substitution state"
+    (pair (Toplevel.theory (tap print_lazy_types)));
+
+end
\ No newline at end of file
--- a/src/HOL/Library/document/root.bib	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/Library/document/root.bib	Sat May 12 11:24:11 2018 +0200
@@ -52,3 +52,10 @@
   year		= 1993,
   publisher	= {Springer},
   series	= {LNCS 664}}
+
+@InProceedings{LochbihlerStoop2018,
+  author = {Andreas Lochbihler and Pascal Stoop},
+  title = {Lazy Algebraic Types in {Isabelle/HOL}},
+  booktitle = {Isabelle Workshop 2018},
+  year = 2018,
+}
\ No newline at end of file
--- a/src/HOL/ROOT	Fri May 11 22:59:00 2018 +0200
+++ b/src/HOL/ROOT	Sat May 12 11:24:11 2018 +0200
@@ -247,6 +247,7 @@
     Generate_Binary_Nat
     Generate_Target_Nat
     Generate_Efficient_Datastructures
+    Code_Lazy_Test
     Code_Test_PolyML
     Code_Test_Scala
   theories [condition = ISABELLE_GHC]