merged
authorwenzelm
Mon, 06 Jul 2015 14:27:03 +0200
changeset 60663 143ac4d9504a
parent 60637 03a25d3e759e (current diff)
parent 60662 deaf10a6bdad (diff)
child 60664 263a8f15faf3
merged
src/Doc/Isar_Ref/ML_Tactic.thy
--- a/NEWS	Fri Jul 03 10:17:29 2015 +0200
+++ b/NEWS	Mon Jul 06 14:27:03 2015 +0200
@@ -234,6 +234,12 @@
 command. Minor INCOMPATIBILITY, use 'function' instead.
 
 
+** ML **
+
+* Thm.instantiate (and derivatives) no longer require the LHS of the
+instantiation to be certified: plain variables are given directly.
+
+
 
 New in Isabelle2015 (May 2015)
 ------------------------------
--- a/src/Doc/Implementation/Logic.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Doc/Implementation/Logic.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -656,7 +656,8 @@
   @{index_ML Thm.implies_intr: "cterm -> thm -> thm"} \\
   @{index_ML Thm.implies_elim: "thm -> thm -> thm"} \\
   @{index_ML Thm.generalize: "string list * string list -> int -> thm -> thm"} \\
-  @{index_ML Thm.instantiate: "(ctyp * ctyp) list * (cterm * cterm) list -> thm -> thm"} \\
+  @{index_ML Thm.instantiate: "((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list
+  -> thm -> thm"} \\
   @{index_ML Thm.add_axiom: "Proof.context ->
   binding * term -> theory -> (string * thm) * theory"} \\
   @{index_ML Thm.add_oracle: "binding * ('a -> cterm) -> theory ->
--- a/src/Doc/Implementation/Proof.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Doc/Implementation/Proof.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -108,7 +108,7 @@
   @{index_ML Variable.export: "Proof.context -> Proof.context -> thm list -> thm list"} \\
   @{index_ML Variable.polymorphic: "Proof.context -> term list -> term list"} \\
   @{index_ML Variable.import: "bool -> thm list -> Proof.context ->
-  (((ctyp * ctyp) list * (cterm * cterm) list) * thm list) * Proof.context"} \\
+  ((((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list) * thm list) * Proof.context"} \\
   @{index_ML Variable.focus: "term -> Proof.context ->
   ((string * (string * typ)) list * term) * Proof.context"} \\
   \end{mldecls}
--- a/src/Doc/Isar_Ref/HOL_Specific.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Doc/Isar_Ref/HOL_Specific.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -1,8 +1,14 @@
 theory HOL_Specific
-imports Base "~~/src/HOL/Library/Old_Datatype" "~~/src/HOL/Library/Old_Recdef"
-  "~~/src/Tools/Adhoc_Overloading" "~~/src/HOL/Library/Dlist"  "~~/src/HOL/Library/FSet"
+imports
+  Base
+  "~~/src/HOL/Library/Old_Datatype"
+  "~~/src/HOL/Library/Old_Recdef"
+  "~~/src/Tools/Adhoc_Overloading"
+  "~~/src/HOL/Library/Dlist"
+  "~~/src/HOL/Library/FSet"
 begin
 
+
 chapter \<open>Higher-Order Logic\<close>
 
 text \<open>Isabelle/HOL is based on Higher-Order Logic, a polymorphic
@@ -52,9 +58,7 @@
   nicely, but they also mean that HOL requires some sophistication
   from the user.  In particular, an understanding of Hindley-Milner
   type-inference with type-classes, which are both used extensively in
-  the standard libraries and applications.  Beginners can set
-  @{attribute show_types} or even @{attribute show_sorts} to get more
-  explicit information about the result of type-inference.\<close>
+  the standard libraries and applications.\<close>
 
 
 chapter \<open>Derived specification elements\<close>
@@ -171,12 +175,10 @@
 
   \end{description}
 
-  When several predicates @{text "R\<^sub>1, \<dots>, R\<^sub>n"} are
-  defined simultaneously, the list of introduction rules is called
-  @{text "R\<^sub>1_\<dots>_R\<^sub>n.intros"}, the case analysis rules are
-  called @{text "R\<^sub>1.cases, \<dots>, R\<^sub>n.cases"}, and the list
-  of mutual induction rules is called @{text
-  "R\<^sub>1_\<dots>_R\<^sub>n.inducts"}.
+  When several predicates @{text "R\<^sub>1, \<dots>, R\<^sub>n"} are defined simultaneously,
+  the list of introduction rules is called @{text "R\<^sub>1_\<dots>_R\<^sub>n.intros"}, the
+  case analysis rules are called @{text "R\<^sub>1.cases, \<dots>, R\<^sub>n.cases"}, and the
+  list of mutual induction rules is called @{text "R\<^sub>1_\<dots>_R\<^sub>n.inducts"}.
 \<close>
 
 
@@ -236,8 +238,8 @@
 (*<*)end(*>*)
 
 text \<open>Common logical connectives can be easily characterized as
-non-recursive inductive definitions with parameters, but without
-arguments.\<close>
+  non-recursive inductive definitions with parameters, but without
+  arguments.\<close>
 
 (*<*)experiment begin(*>*)
 inductive AND for A B :: bool
@@ -251,12 +253,12 @@
 where "B a \<Longrightarrow> EXISTS B"
 (*<*)end(*>*)
 
-text \<open>Here the @{text "cases"} or @{text "induct"} rules produced by
-  the @{command inductive} package coincide with the expected
-  elimination rules for Natural Deduction.  Already in the original
-  article by Gerhard Gentzen @{cite "Gentzen:1935"} there is a hint that
-  each connective can be characterized by its introductions, and the
-  elimination can be constructed systematically.\<close>
+text \<open>Here the @{text "cases"} or @{text "induct"} rules produced by the
+  @{command inductive} package coincide with the expected elimination rules
+  for Natural Deduction. Already in the original article by Gerhard Gentzen
+  @{cite "Gentzen:1935"} there is a hint that each connective can be
+  characterized by its introductions, and the elimination can be constructed
+  systematically.\<close>
 
 
 section \<open>Recursive functions \label{sec:recursion}\<close>
@@ -288,101 +290,98 @@
 
   \begin{description}
 
-  \item @{command (HOL) "primrec"} defines primitive recursive
-  functions over datatypes (see also @{command_ref (HOL) datatype}).
-  The given @{text equations} specify reduction rules that are produced
-  by instantiating the generic combinator for primitive recursion that
-  is available for each datatype.
+  \item @{command (HOL) "primrec"} defines primitive recursive functions
+  over datatypes (see also @{command_ref (HOL) datatype}). The given @{text
+  equations} specify reduction rules that are produced by instantiating the
+  generic combinator for primitive recursion that is available for each
+  datatype.
 
   Each equation needs to be of the form:
 
   @{text [display] "f x\<^sub>1 \<dots> x\<^sub>m (C y\<^sub>1 \<dots> y\<^sub>k) z\<^sub>1 \<dots> z\<^sub>n = rhs"}
 
-  such that @{text C} is a datatype constructor, @{text rhs} contains
-  only the free variables on the left-hand side (or from the context),
-  and all recursive occurrences of @{text "f"} in @{text "rhs"} are of
-  the form @{text "f \<dots> y\<^sub>i \<dots>"} for some @{text i}.  At most one
-  reduction rule for each constructor can be given.  The order does
-  not matter.  For missing constructors, the function is defined to
-  return a default value, but this equation is made difficult to
-  access for users.
-
-  The reduction rules are declared as @{attribute simp} by default,
-  which enables standard proof methods like @{method simp} and
-  @{method auto} to normalize expressions of @{text "f"} applied to
-  datatype constructions, by simulating symbolic computation via
-  rewriting.
-
-  \item @{command (HOL) "function"} defines functions by general
-  wellfounded recursion. A detailed description with examples can be
-  found in @{cite "isabelle-function"}. The function is specified by a
-  set of (possibly conditional) recursive equations with arbitrary
-  pattern matching. The command generates proof obligations for the
-  completeness and the compatibility of patterns.
+  such that @{text C} is a datatype constructor, @{text rhs} contains only
+  the free variables on the left-hand side (or from the context), and all
+  recursive occurrences of @{text "f"} in @{text "rhs"} are of the form
+  @{text "f \<dots> y\<^sub>i \<dots>"} for some @{text i}. At most one reduction rule for
+  each constructor can be given. The order does not matter. For missing
+  constructors, the function is defined to return a default value, but this
+  equation is made difficult to access for users.
+
+  The reduction rules are declared as @{attribute simp} by default, which
+  enables standard proof methods like @{method simp} and @{method auto} to
+  normalize expressions of @{text "f"} applied to datatype constructions, by
+  simulating symbolic computation via rewriting.
+
+  \item @{command (HOL) "function"} defines functions by general wellfounded
+  recursion. A detailed description with examples can be found in @{cite
+  "isabelle-function"}. The function is specified by a set of (possibly
+  conditional) recursive equations with arbitrary pattern matching. The
+  command generates proof obligations for the completeness and the
+  compatibility of patterns.
 
   The defined function is considered partial, and the resulting
-  simplification rules (named @{text "f.psimps"}) and induction rule
-  (named @{text "f.pinduct"}) are guarded by a generated domain
-  predicate @{text "f_dom"}. The @{command (HOL) "termination"}
-  command can then be used to establish that the function is total.
-
-  \item @{command (HOL) "fun"} is a shorthand notation for ``@{command
-  (HOL) "function"}~@{text "(sequential)"}'', followed by automated
-  proof attempts regarding pattern matching and termination.  See
-  @{cite "isabelle-function"} for further details.
-
-  \item @{command (HOL) "termination"}~@{text f} commences a
-  termination proof for the previously defined function @{text f}.  If
-  this is omitted, the command refers to the most recent function
-  definition.  After the proof is closed, the recursive equations and
-  the induction principle is established.
-
-  \item @{command (HOL) "fun_cases"} generates specialized elimination
-  rules for function equations. It expects one or more function equations
-  and produces rules that eliminate the given equalities, following the cases
+  simplification rules (named @{text "f.psimps"}) and induction rule (named
+  @{text "f.pinduct"}) are guarded by a generated domain predicate @{text
+  "f_dom"}. The @{command (HOL) "termination"} command can then be used to
+  establish that the function is total.
+
+  \item @{command (HOL) "fun"} is a shorthand notation for ``@{command (HOL)
+  "function"}~@{text "(sequential)"}'', followed by automated proof attempts
+  regarding pattern matching and termination. See @{cite
+  "isabelle-function"} for further details.
+
+  \item @{command (HOL) "termination"}~@{text f} commences a termination
+  proof for the previously defined function @{text f}. If this is omitted,
+  the command refers to the most recent function definition. After the proof
+  is closed, the recursive equations and the induction principle is
+  established.
+
+  \item @{command (HOL) "fun_cases"} generates specialized elimination rules
+  for function equations. It expects one or more function equations and
+  produces rules that eliminate the given equalities, following the cases
   given in the function definition.
+
   \end{description}
 
   Recursive definitions introduced by the @{command (HOL) "function"}
-  command accommodate reasoning by induction (cf.\ @{method induct}):
-  rule @{text "f.induct"} refers to a specific induction rule, with
-  parameters named according to the user-specified equations. Cases
-  are numbered starting from 1.  For @{command (HOL) "primrec"}, the
-  induction principle coincides with structural recursion on the
-  datatype where the recursion is carried out.
-
-  The equations provided by these packages may be referred later as
-  theorem list @{text "f.simps"}, where @{text f} is the (collective)
-  name of the functions defined.  Individual equations may be named
-  explicitly as well.
-
-  The @{command (HOL) "function"} command accepts the following
-  options.
+  command accommodate reasoning by induction (cf.\ @{method induct}): rule
+  @{text "f.induct"} refers to a specific induction rule, with parameters
+  named according to the user-specified equations. Cases are numbered
+  starting from 1. For @{command (HOL) "primrec"}, the induction principle
+  coincides with structural recursion on the datatype where the recursion is
+  carried out.
+
+  The equations provided by these packages may be referred later as theorem
+  list @{text "f.simps"}, where @{text f} is the (collective) name of the
+  functions defined. Individual equations may be named explicitly as well.
+
+  The @{command (HOL) "function"} command accepts the following options.
 
   \begin{description}
 
   \item @{text sequential} enables a preprocessor which disambiguates
-  overlapping patterns by making them mutually disjoint.  Earlier
-  equations take precedence over later ones.  This allows to give the
-  specification in a format very similar to functional programming.
-  Note that the resulting simplification and induction rules
-  correspond to the transformed specification, not the one given
-  originally. This usually means that each equation given by the user
-  may result in several theorems.  Also note that this automatic
-  transformation only works for ML-style datatype patterns.
-
-  \item @{text domintros} enables the automated generation of
-  introduction rules for the domain predicate. While mostly not
-  needed, they can be helpful in some proofs about partial functions.
+  overlapping patterns by making them mutually disjoint. Earlier equations
+  take precedence over later ones. This allows to give the specification in
+  a format very similar to functional programming. Note that the resulting
+  simplification and induction rules correspond to the transformed
+  specification, not the one given originally. This usually means that each
+  equation given by the user may result in several theorems. Also note that
+  this automatic transformation only works for ML-style datatype patterns.
+
+  \item @{text domintros} enables the automated generation of introduction
+  rules for the domain predicate. While mostly not needed, they can be
+  helpful in some proofs about partial functions.
 
   \end{description}
 \<close>
 
+
 subsubsection \<open>Example: evaluation of expressions\<close>
 
-text \<open>Subsequently, we define mutual datatypes for arithmetic and
-  boolean expressions, and use @{command primrec} for evaluation
-  functions that follow the same recursive structure.\<close>
+text \<open>Subsequently, we define mutual datatypes for arithmetic and boolean
+  expressions, and use @{command primrec} for evaluation functions that
+  follow the same recursive structure.\<close>
 
 (*<*)experiment begin(*>*)
 datatype 'a aexp =
@@ -413,13 +412,13 @@
 
 text \<open>Since the value of an expression depends on the value of its
   variables, the functions @{const evala} and @{const evalb} take an
-  additional parameter, an \emph{environment} that maps variables to
-  their values.
-
-  \medskip Substitution on expressions can be defined similarly.  The
-  mapping @{text f} of type @{typ "'a \<Rightarrow> 'a aexp"} given as a
-  parameter is lifted canonically on the types @{typ "'a aexp"} and
-  @{typ "'a bexp"}, respectively.
+  additional parameter, an \emph{environment} that maps variables to their
+  values.
+
+  \medskip Substitution on expressions can be defined similarly. The mapping
+  @{text f} of type @{typ "'a \<Rightarrow> 'a aexp"} given as a parameter is lifted
+  canonically on the types @{typ "'a aexp"} and @{typ "'a bexp"},
+  respectively.
 \<close>
 
 primrec substa :: "('a \<Rightarrow> 'b aexp) \<Rightarrow> 'a aexp \<Rightarrow> 'b aexp"
@@ -434,10 +433,10 @@
 | "substb f (And b1 b2) = And (substb f b1) (substb f b2)"
 | "substb f (Neg b) = Neg (substb f b)"
 
-text \<open>In textbooks about semantics one often finds substitution
-  theorems, which express the relationship between substitution and
-  evaluation.  For @{typ "'a aexp"} and @{typ "'a bexp"}, we can prove
-  such a theorem by mutual induction, followed by simplification.
+text \<open>In textbooks about semantics one often finds substitution theorems,
+  which express the relationship between substitution and evaluation. For
+  @{typ "'a aexp"} and @{typ "'a bexp"}, we can prove such a theorem by
+  mutual induction, followed by simplification.
 \<close>
 
 lemma subst_one:
@@ -487,9 +486,8 @@
 
 subsubsection \<open>Example: a map function for infinitely branching trees\<close>
 
-text \<open>Defining functions on infinitely branching datatypes by
-  primitive recursion is just as easy.
-\<close>
+text \<open>Defining functions on infinitely branching datatypes by primitive
+  recursion is just as easy.\<close>
 
 (*<*)experiment begin(*>*)
 datatype 'a tree = Atom 'a | Branch "nat \<Rightarrow> 'a tree"
@@ -499,11 +497,12 @@
   "map_tree f (Atom a) = Atom (f a)"
 | "map_tree f (Branch ts) = Branch (\<lambda>x. map_tree f (ts x))"
 
-text \<open>Note that all occurrences of functions such as @{text ts}
-  above must be applied to an argument.  In particular, @{term
-  "map_tree f \<circ> ts"} is not allowed here.\<close>
-
-text \<open>Here is a simple composition lemma for @{term map_tree}:\<close>
+text \<open>Note that all occurrences of functions such as @{text ts} above must
+  be applied to an argument. In particular, @{term "map_tree f \<circ> ts"} is not
+  allowed here.
+
+  \medskip Here is a simple composition lemma for @{term map_tree}:
+\<close>
 
 lemma "map_tree g (map_tree f t) = map_tree (g \<circ> f) t"
   by (induct t) simp_all
@@ -645,7 +644,6 @@
   definitions.
 
   \end{description}
-
 \<close>
 
 
@@ -701,6 +699,80 @@
 \<close>
 
 
+section \<open>Adhoc overloading of constants\<close>
+
+text \<open>
+  \begin{tabular}{rcll}
+  @{command_def "adhoc_overloading"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+  @{command_def "no_adhoc_overloading"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
+  @{attribute_def "show_variants"} & : & @{text "attribute"} & default @{text false} \\
+  \end{tabular}
+
+  \medskip
+
+  Adhoc overloading allows to overload a constant depending on
+  its type. Typically this involves the introduction of an
+  uninterpreted constant (used for input and output) and the addition
+  of some variants (used internally). For examples see
+  @{file "~~/src/HOL/ex/Adhoc_Overloading_Examples.thy"} and
+  @{file "~~/src/HOL/Library/Monad_Syntax.thy"}.
+
+  @{rail \<open>
+    (@@{command adhoc_overloading} | @@{command no_adhoc_overloading})
+      (@{syntax nameref} (@{syntax term} + ) + @'and')
+  \<close>}
+
+  \begin{description}
+
+  \item @{command "adhoc_overloading"}~@{text "c v\<^sub>1 ... v\<^sub>n"}
+  associates variants with an existing constant.
+
+  \item @{command "no_adhoc_overloading"} is similar to
+  @{command "adhoc_overloading"}, but removes the specified variants
+  from the present context.
+
+  \item @{attribute "show_variants"} controls printing of variants
+  of overloaded constants. If enabled, the internally used variants
+  are printed instead of their respective overloaded constants. This
+  is occasionally useful to check whether the system agrees with a
+  user's expectations about derived variants.
+
+  \end{description}
+\<close>
+
+
+section \<open>Definition by specification \label{sec:hol-specification}\<close>
+
+text \<open>
+  \begin{matharray}{rcl}
+    @{command_def (HOL) "specification"} & : & @{text "theory \<rightarrow> proof(prove)"} \\
+  \end{matharray}
+
+  @{rail \<open>
+    @@{command (HOL) specification} '(' (decl +) ')' \<newline>
+      (@{syntax thmdecl}? @{syntax prop} +)
+    ;
+    decl: (@{syntax name} ':')? @{syntax term} ('(' @'overloaded' ')')?
+  \<close>}
+
+  \begin{description}
+
+  \item @{command (HOL) "specification"}~@{text "decls \<phi>"} sets up a
+  goal stating the existence of terms with the properties specified to
+  hold for the constants given in @{text decls}.  After finishing the
+  proof, the theory will be augmented with definitions for the given
+  constants, as well as with theorems stating the properties for these
+  constants.
+
+  @{text decl} declares a constant to be defined by the
+  specification given.  The definition for the constant @{text c} is
+  bound to the name @{text c_def} unless a theorem name is given in
+  the declaration.  Overloaded constants should be declared as such.
+
+  \end{description}
+\<close>
+
+
 section \<open>Old-style datatypes \label{sec:hol-datatype}\<close>
 
 text \<open>
@@ -745,9 +817,8 @@
 
 subsubsection \<open>Examples\<close>
 
-text \<open>We define a type of finite sequences, with slightly different
-  names than the existing @{typ "'a list"} that is already in @{theory
-  Main}:\<close>
+text \<open>We define a type of finite sequences, with slightly different names
+  than the existing @{typ "'a list"} that is already in @{theory Main}:\<close>
 
 (*<*)experiment begin(*>*)
 datatype 'a seq = Empty | Seq 'a "'a seq"
@@ -902,33 +973,30 @@
 
 subsection \<open>Record operations\<close>
 
-text \<open>
-  Any record definition of the form presented above produces certain
-  standard operations.  Selectors and updates are provided for any
-  field, including the improper one ``@{text more}''.  There are also
-  cumulative record constructor functions.  To simplify the
-  presentation below, we assume for now that @{text "(\<alpha>\<^sub>1, \<dots>,
-  \<alpha>\<^sub>m) t"} is a root record with fields @{text "c\<^sub>1 ::
-  \<sigma>\<^sub>1, \<dots>, c\<^sub>n :: \<sigma>\<^sub>n"}.
-
-  \medskip \textbf{Selectors} and \textbf{updates} are available for
-  any field (including ``@{text more}''):
+text \<open>Any record definition of the form presented above produces certain
+  standard operations. Selectors and updates are provided for any field,
+  including the improper one ``@{text more}''. There are also cumulative
+  record constructor functions. To simplify the presentation below, we
+  assume for now that @{text "(\<alpha>\<^sub>1, \<dots>, \<alpha>\<^sub>m) t"} is a root record with fields
+  @{text "c\<^sub>1 :: \<sigma>\<^sub>1, \<dots>, c\<^sub>n :: \<sigma>\<^sub>n"}.
+
+  \medskip \textbf{Selectors} and \textbf{updates} are available for any
+  field (including ``@{text more}''):
 
   \begin{matharray}{lll}
     @{text "c\<^sub>i"} & @{text "::"} & @{text "\<lparr>\<^vec>c :: \<^vec>\<sigma>, \<dots> :: \<zeta>\<rparr> \<Rightarrow> \<sigma>\<^sub>i"} \\
     @{text "c\<^sub>i_update"} & @{text "::"} & @{text "\<sigma>\<^sub>i \<Rightarrow> \<lparr>\<^vec>c :: \<^vec>\<sigma>, \<dots> :: \<zeta>\<rparr> \<Rightarrow> \<lparr>\<^vec>c :: \<^vec>\<sigma>, \<dots> :: \<zeta>\<rparr>"} \\
   \end{matharray}
 
-  There is special syntax for application of updates: @{text "r\<lparr>x :=
-  a\<rparr>"} abbreviates term @{text "x_update a r"}.  Further notation for
-  repeated updates is also available: @{text "r\<lparr>x := a\<rparr>\<lparr>y := b\<rparr>\<lparr>z :=
-  c\<rparr>"} may be written @{text "r\<lparr>x := a, y := b, z := c\<rparr>"}.  Note that
-  because of postfix notation the order of fields shown here is
-  reverse than in the actual term.  Since repeated updates are just
-  function applications, fields may be freely permuted in @{text "\<lparr>x
-  := a, y := b, z := c\<rparr>"}, as far as logical equality is concerned.
-  Thus commutativity of independent updates can be proven within the
-  logic for any two fields, but not as a general theorem.
+  There is special syntax for application of updates: @{text "r\<lparr>x := a\<rparr>"}
+  abbreviates term @{text "x_update a r"}. Further notation for repeated
+  updates is also available: @{text "r\<lparr>x := a\<rparr>\<lparr>y := b\<rparr>\<lparr>z := c\<rparr>"} may be
+  written @{text "r\<lparr>x := a, y := b, z := c\<rparr>"}. Note that because of postfix
+  notation the order of fields shown here is reverse than in the actual
+  term. Since repeated updates are just function applications, fields may be
+  freely permuted in @{text "\<lparr>x := a, y := b, z := c\<rparr>"}, as far as logical
+  equality is concerned. Thus commutativity of independent updates can be
+  proven within the logic for any two fields, but not as a general theorem.
 
   \medskip The \textbf{make} operation provides a cumulative record
   constructor function:
@@ -937,15 +1005,14 @@
     @{text "t.make"} & @{text "::"} & @{text "\<sigma>\<^sub>1 \<Rightarrow> \<dots> \<sigma>\<^sub>n \<Rightarrow> \<lparr>\<^vec>c :: \<^vec>\<sigma>\<rparr>"} \\
   \end{matharray}
 
-  \medskip We now reconsider the case of non-root records, which are
-  derived of some parent.  In general, the latter may depend on
-  another parent as well, resulting in a list of \emph{ancestor
-  records}.  Appending the lists of fields of all ancestors results in
-  a certain field prefix.  The record package automatically takes care
-  of this by lifting operations over this context of ancestor fields.
-  Assuming that @{text "(\<alpha>\<^sub>1, \<dots>, \<alpha>\<^sub>m) t"} has ancestor
-  fields @{text "b\<^sub>1 :: \<rho>\<^sub>1, \<dots>, b\<^sub>k :: \<rho>\<^sub>k"},
-  the above record operations will get the following types:
+  \medskip We now reconsider the case of non-root records, which are derived
+  of some parent. In general, the latter may depend on another parent as
+  well, resulting in a list of \emph{ancestor records}. Appending the lists
+  of fields of all ancestors results in a certain field prefix. The record
+  package automatically takes care of this by lifting operations over this
+  context of ancestor fields. Assuming that @{text "(\<alpha>\<^sub>1, \<dots>, \<alpha>\<^sub>m) t"} has
+  ancestor fields @{text "b\<^sub>1 :: \<rho>\<^sub>1, \<dots>, b\<^sub>k :: \<rho>\<^sub>k"}, the above record
+  operations will get the following types:
 
   \medskip
   \begin{tabular}{lll}
@@ -959,11 +1026,11 @@
   \medskip
 
   \noindent Some further operations address the extension aspect of a
-  derived record scheme specifically: @{text "t.fields"} produces a
-  record fragment consisting of exactly the new fields introduced here
-  (the result may serve as a more part elsewhere); @{text "t.extend"}
-  takes a fixed record and adds a given more part; @{text
-  "t.truncate"} restricts a record scheme to a fixed record.
+  derived record scheme specifically: @{text "t.fields"} produces a record
+  fragment consisting of exactly the new fields introduced here (the result
+  may serve as a more part elsewhere); @{text "t.extend"} takes a fixed
+  record and adds a given more part; @{text "t.truncate"} restricts a record
+  scheme to a fixed record.
 
   \medskip
   \begin{tabular}{lll}
@@ -974,8 +1041,8 @@
   \end{tabular}
   \medskip
 
-  \noindent Note that @{text "t.make"} and @{text "t.fields"} coincide
-  for root records.
+  \noindent Note that @{text "t.make"} and @{text "t.fields"} coincide for
+  root records.
 \<close>
 
 
@@ -989,41 +1056,40 @@
 
   \begin{enumerate}
 
-  \item Standard conversions for selectors or updates applied to
-  record constructor terms are made part of the default Simplifier
-  context; thus proofs by reduction of basic operations merely require
-  the @{method simp} method without further arguments.  These rules
-  are available as @{text "t.simps"}, too.
-
-  \item Selectors applied to updated records are automatically reduced
-  by an internal simplification procedure, which is also part of the
-  standard Simplifier setup.
-
-  \item Inject equations of a form analogous to @{prop "(x, y) = (x',
-  y') \<equiv> x = x' \<and> y = y'"} are declared to the Simplifier and Classical
-  Reasoner as @{attribute iff} rules.  These rules are available as
-  @{text "t.iffs"}.
-
-  \item The introduction rule for record equality analogous to @{text
-  "x r = x r' \<Longrightarrow> y r = y r' \<dots> \<Longrightarrow> r = r'"} is declared to the Simplifier,
-  and as the basic rule context as ``@{attribute intro}@{text "?"}''.
-  The rule is called @{text "t.equality"}.
+  \item Standard conversions for selectors or updates applied to record
+  constructor terms are made part of the default Simplifier context; thus
+  proofs by reduction of basic operations merely require the @{method simp}
+  method without further arguments. These rules are available as @{text
+  "t.simps"}, too.
+
+  \item Selectors applied to updated records are automatically reduced by an
+  internal simplification procedure, which is also part of the standard
+  Simplifier setup.
+
+  \item Inject equations of a form analogous to @{prop "(x, y) = (x', y') \<equiv>
+  x = x' \<and> y = y'"} are declared to the Simplifier and Classical Reasoner as
+  @{attribute iff} rules. These rules are available as @{text "t.iffs"}.
+
+  \item The introduction rule for record equality analogous to @{text "x r =
+  x r' \<Longrightarrow> y r = y r' \<dots> \<Longrightarrow> r = r'"} is declared to the Simplifier, and as the
+  basic rule context as ``@{attribute intro}@{text "?"}''. The rule is
+  called @{text "t.equality"}.
 
   \item Representations of arbitrary record expressions as canonical
   constructor terms are provided both in @{method cases} and @{method
   induct} format (cf.\ the generic proof methods of the same name,
-  \secref{sec:cases-induct}).  Several variations are available, for
-  fixed records, record schemes, more parts etc.
-
-  The generic proof methods are sufficiently smart to pick the most
-  sensible rule according to the type of the indicated record
-  expression: users just need to apply something like ``@{text "(cases
-  r)"}'' to a certain proof problem.
-
-  \item The derived record operations @{text "t.make"}, @{text
-  "t.fields"}, @{text "t.extend"}, @{text "t.truncate"} are \emph{not}
-  treated automatically, but usually need to be expanded by hand,
-  using the collective fact @{text "t.defs"}.
+  \secref{sec:cases-induct}). Several variations are available, for fixed
+  records, record schemes, more parts etc.
+
+  The generic proof methods are sufficiently smart to pick the most sensible
+  rule according to the type of the indicated record expression: users just
+  need to apply something like ``@{text "(cases r)"}'' to a certain proof
+  problem.
+
+  \item The derived record operations @{text "t.make"}, @{text "t.fields"},
+  @{text "t.extend"}, @{text "t.truncate"} are \emph{not} treated
+  automatically, but usually need to be expanded by hand, using the
+  collective fact @{text "t.defs"}.
 
   \end{enumerate}
 \<close>
@@ -1033,6 +1099,7 @@
 
 text \<open>See @{file "~~/src/HOL/ex/Records.thy"}, for example.\<close>
 
+
 section \<open>Typedef axiomatization \label{sec:hol-typedef}\<close>
 
 text \<open>
@@ -1187,30 +1254,28 @@
 
   \begin{description}
 
-  \item @{command (HOL) "functor"}~@{text "prefix: m"} allows to
-  prove and register properties about the functorial structure of type
-  constructors.  These properties then can be used by other packages
-  to deal with those type constructors in certain type constructions.
-  Characteristic theorems are noted in the current local theory.  By
-  default, they are prefixed with the base name of the type
-  constructor, an explicit prefix can be given alternatively.
+  \item @{command (HOL) "functor"}~@{text "prefix: m"} allows to prove and
+  register properties about the functorial structure of type constructors.
+  These properties then can be used by other packages to deal with those
+  type constructors in certain type constructions. Characteristic theorems
+  are noted in the current local theory. By default, they are prefixed with
+  the base name of the type constructor, an explicit prefix can be given
+  alternatively.
 
   The given term @{text "m"} is considered as \emph{mapper} for the
-  corresponding type constructor and must conform to the following
-  type pattern:
+  corresponding type constructor and must conform to the following type
+  pattern:
 
   \begin{matharray}{lll}
     @{text "m"} & @{text "::"} &
       @{text "\<sigma>\<^sub>1 \<Rightarrow> \<dots> \<sigma>\<^sub>k \<Rightarrow> (\<^vec>\<alpha>\<^sub>n) t \<Rightarrow> (\<^vec>\<beta>\<^sub>n) t"} \\
   \end{matharray}
 
-  \noindent where @{text t} is the type constructor, @{text
-  "\<^vec>\<alpha>\<^sub>n"} and @{text "\<^vec>\<beta>\<^sub>n"} are distinct
-  type variables free in the local theory and @{text "\<sigma>\<^sub>1"},
-  \ldots, @{text "\<sigma>\<^sub>k"} is a subsequence of @{text "\<alpha>\<^sub>1 \<Rightarrow>
-  \<beta>\<^sub>1"}, @{text "\<beta>\<^sub>1 \<Rightarrow> \<alpha>\<^sub>1"}, \ldots,
-  @{text "\<alpha>\<^sub>n \<Rightarrow> \<beta>\<^sub>n"}, @{text "\<beta>\<^sub>n \<Rightarrow>
-  \<alpha>\<^sub>n"}.
+  \noindent where @{text t} is the type constructor, @{text "\<^vec>\<alpha>\<^sub>n"}
+  and @{text "\<^vec>\<beta>\<^sub>n"} are distinct type variables free in the local
+  theory and @{text "\<sigma>\<^sub>1"}, \ldots, @{text "\<sigma>\<^sub>k"} is a subsequence of @{text
+  "\<alpha>\<^sub>1 \<Rightarrow> \<beta>\<^sub>1"}, @{text "\<beta>\<^sub>1 \<Rightarrow> \<alpha>\<^sub>1"}, \ldots, @{text "\<alpha>\<^sub>n \<Rightarrow> \<beta>\<^sub>n"}, @{text
+  "\<beta>\<^sub>n \<Rightarrow> \<alpha>\<^sub>n"}.
 
   \end{description}
 \<close>
@@ -1240,29 +1305,28 @@
     @{attribute_def (HOL) "quot_preserve"} & : & @{text attribute} \\
   \end{matharray}
 
-  The quotient package defines a new quotient type given a raw type
-  and a partial equivalence relation. The package also historically 
-  includes automation for transporting definitions and theorems. 
-  But most of this automation was superseded by the Lifting and Transfer
-  packages. The user should consider using these two new packages for
-  lifting definitions and transporting theorems.
+  The quotient package defines a new quotient type given a raw type and a
+  partial equivalence relation. The package also historically includes
+  automation for transporting definitions and theorems. But most of this
+  automation was superseded by the Lifting (\secref{sec:lifting}) and
+  Transfer (\secref{sec:transfer}) packages. The user should consider using
+  these two new packages for lifting definitions and transporting theorems.
 
   @{rail \<open>
-    @@{command (HOL) quotient_type} (spec)
+    @@{command (HOL) quotient_type} @{syntax typespec} @{syntax mixfix}? '='
+      quot_type \<newline> quot_morphisms? quot_parametric?
+    ;
+    quot_type: @{syntax type} '/' ('partial' ':')? @{syntax term}
     ;
-    spec: @{syntax typespec} @{syntax mixfix}? '=' \<newline>
-     @{syntax type} '/' ('partial' ':')? @{syntax term} \<newline>
-     (@'morphisms' @{syntax name} @{syntax name})? (@'parametric' @{syntax thmref})?
-  \<close>}
-
-  @{rail \<open>
+    quot_morphisms: @'morphisms' @{syntax name} @{syntax name}
+    ;
+    quot_parametric: @'parametric' @{syntax thmref}
+    ;
     @@{command (HOL) quotient_definition} constdecl? @{syntax thmdecl}? \<newline>
     @{syntax term} 'is' @{syntax term}
     ;
     constdecl: @{syntax name} ('::' @{syntax type})? @{syntax mixfix}?
-  \<close>}
-
-  @{rail \<open>
+    ;
     @@{method (HOL) lifting} @{syntax thmrefs}?
     ;
     @@{method (HOL) lifting_setup} @{syntax thmrefs}?
@@ -1270,175 +1334,92 @@
 
   \begin{description}
 
-  \item @{command (HOL) "quotient_type"} defines a new quotient type @{text \<tau>}. The
-  injection from a quotient type to a raw type is called @{text
+  \item @{command (HOL) "quotient_type"} defines a new quotient type @{text
+  \<tau>}. The injection from a quotient type to a raw type is called @{text
   rep_\<tau>}, its inverse @{text abs_\<tau>} unless explicit @{keyword (HOL)
-  "morphisms"} specification provides alternative names. @{command
-  (HOL) "quotient_type"} requires the user to prove that the relation
-  is an equivalence relation (predicate @{text equivp}), unless the
-  user specifies explicitly @{text partial} in which case the
-  obligation is @{text part_equivp}.  A quotient defined with @{text
-  partial} is weaker in the sense that less things can be proved
-  automatically.
+  "morphisms"} specification provides alternative names. @{command (HOL)
+  "quotient_type"} requires the user to prove that the relation is an
+  equivalence relation (predicate @{text equivp}), unless the user specifies
+  explicitly @{text partial} in which case the obligation is @{text
+  part_equivp}. A quotient defined with @{text partial} is weaker in the
+  sense that less things can be proved automatically.
 
   The command internally proves a Quotient theorem and sets up the Lifting
-  package by the command @{command (HOL) setup_lifting}. Thus the Lifting 
+  package by the command @{command (HOL) setup_lifting}. Thus the Lifting
   and Transfer packages can be used also with quotient types defined by
-  @{command (HOL) "quotient_type"} without any extra set-up. The parametricity 
-  theorem for the equivalence relation R can be provided as an extra argument 
-  of the command and is passed to the corresponding internal call of @{command (HOL) setup_lifting}.
-  This theorem allows the Lifting package to generate a stronger transfer rule for equality.
-  
+  @{command (HOL) "quotient_type"} without any extra set-up. The
+  parametricity theorem for the equivalence relation R can be provided as an
+  extra argument of the command and is passed to the corresponding internal
+  call of @{command (HOL) setup_lifting}. This theorem allows the Lifting
+  package to generate a stronger transfer rule for equality.
+
   \end{description}
 
-  The most of the rest of the package was superseded by the Lifting and Transfer
-  packages. The user should consider using these two new packages for
-  lifting definitions and transporting theorems.
-
-  \begin{description}  
-
-  \item @{command (HOL) "quotient_definition"} defines a constant on
-  the quotient type.
-
-  \item @{command (HOL) "print_quotmapsQ3"} prints quotient map
-  functions.
+  Most of the rest of the package was superseded by the Lifting
+  (\secref{sec:lifting}) and Transfer (\secref{sec:transfer}) packages.
+
+  \begin{description}
+
+  \item @{command (HOL) "quotient_definition"} defines a constant on the
+  quotient type.
+
+  \item @{command (HOL) "print_quotmapsQ3"} prints quotient map functions.
 
   \item @{command (HOL) "print_quotientsQ3"} prints quotients.
 
   \item @{command (HOL) "print_quotconsts"} prints quotient constants.
 
   \item @{method (HOL) "lifting"} and @{method (HOL) "lifting_setup"}
-    methods match the current goal with the given raw theorem to be
-    lifted producing three new subgoals: regularization, injection and
-    cleaning subgoals. @{method (HOL) "lifting"} tries to apply the
-    heuristics for automatically solving these three subgoals and
-    leaves only the subgoals unsolved by the heuristics to the user as
-    opposed to @{method (HOL) "lifting_setup"} which leaves the three
-    subgoals unsolved.
-
-  \item @{method (HOL) "descending"} and @{method (HOL)
-    "descending_setup"} try to guess a raw statement that would lift
-    to the current subgoal. Such statement is assumed as a new subgoal
-    and @{method (HOL) "descending"} continues in the same way as
-    @{method (HOL) "lifting"} does. @{method (HOL) "descending"} tries
-    to solve the arising regularization, injection and cleaning
-    subgoals with the analogous method @{method (HOL)
-    "descending_setup"} which leaves the four unsolved subgoals.
+  methods match the current goal with the given raw theorem to be lifted
+  producing three new subgoals: regularization, injection and cleaning
+  subgoals. @{method (HOL) "lifting"} tries to apply the heuristics for
+  automatically solving these three subgoals and leaves only the subgoals
+  unsolved by the heuristics to the user as opposed to @{method (HOL)
+  "lifting_setup"} which leaves the three subgoals unsolved.
+
+  \item @{method (HOL) "descending"} and @{method (HOL) "descending_setup"}
+  try to guess a raw statement that would lift to the current subgoal. Such
+  statement is assumed as a new subgoal and @{method (HOL) "descending"}
+  continues in the same way as @{method (HOL) "lifting"} does. @{method
+  (HOL) "descending"} tries to solve the arising regularization, injection
+  and cleaning subgoals with the analogous method @{method (HOL)
+  "descending_setup"} which leaves the four unsolved subgoals.
 
   \item @{method (HOL) "partiality_descending"} finds the regularized
-    theorem that would lift to the current subgoal, lifts it and
-    leaves as a subgoal. This method can be used with partial
-    equivalence quotients where the non regularized statements would
-    not be true. @{method (HOL) "partiality_descending_setup"} leaves
-    the injection and cleaning subgoals unchanged.
-
-  \item @{method (HOL) "regularize"} applies the regularization
-    heuristics to the current subgoal.
-
-  \item @{method (HOL) "injection"} applies the injection heuristics
-    to the current goal using the stored quotient respectfulness
-    theorems.
-
-  \item @{method (HOL) "cleaning"} applies the injection cleaning
-    heuristics to the current subgoal using the stored quotient
-    preservation theorems.
-
-  \item @{attribute (HOL) quot_lifted} attribute tries to
-    automatically transport the theorem to the quotient type.
-    The attribute uses all the defined quotients types and quotient
-    constants often producing undesired results or theorems that
-    cannot be lifted.
-
-  \item @{attribute (HOL) quot_respect} and @{attribute (HOL)
-    quot_preserve} attributes declare a theorem as a respectfulness
-    and preservation theorem respectively.  These are stored in the
-    local theory store and used by the @{method (HOL) "injection"}
-    and @{method (HOL) "cleaning"} methods respectively.
-
-  \item @{attribute (HOL) quot_thm} declares that a certain theorem
-    is a quotient extension theorem. Quotient extension theorems
-    allow for quotienting inside container types. Given a polymorphic
-    type that serves as a container, a map function defined for this
-    container using @{command (HOL) "functor"} and a relation
-    map defined for for the container type, the quotient extension
-    theorem should be @{term "Quotient3 R Abs Rep \<Longrightarrow> Quotient3
-    (rel_map R) (map Abs) (map Rep)"}. Quotient extension theorems
-    are stored in a database and are used all the steps of lifting
-    theorems.
-
-  \end{description}
-\<close>
-
-
-section \<open>Definition by specification \label{sec:hol-specification}\<close>
-
-text \<open>
-  \begin{matharray}{rcl}
-    @{command_def (HOL) "specification"} & : & @{text "theory \<rightarrow> proof(prove)"} \\
-  \end{matharray}
-
-  @{rail \<open>
-    @@{command (HOL) specification} '(' (decl +) ')' \<newline>
-      (@{syntax thmdecl}? @{syntax prop} +)
-    ;
-    decl: (@{syntax name} ':')? @{syntax term} ('(' @'overloaded' ')')?
-  \<close>}
-
-  \begin{description}
-
-  \item @{command (HOL) "specification"}~@{text "decls \<phi>"} sets up a
-  goal stating the existence of terms with the properties specified to
-  hold for the constants given in @{text decls}.  After finishing the
-  proof, the theory will be augmented with definitions for the given
-  constants, as well as with theorems stating the properties for these
-  constants.
-
-  @{text decl} declares a constant to be defined by the
-  specification given.  The definition for the constant @{text c} is
-  bound to the name @{text c_def} unless a theorem name is given in
-  the declaration.  Overloaded constants should be declared as such.
-
-  \end{description}
-\<close>
-
-
-section \<open>Adhoc overloading of constants\<close>
-
-text \<open>
-  \begin{tabular}{rcll}
-  @{command_def "adhoc_overloading"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
-  @{command_def "no_adhoc_overloading"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
-  @{attribute_def "show_variants"} & : & @{text "attribute"} & default @{text false} \\
-  \end{tabular}
-
-  \medskip
-
-  Adhoc overloading allows to overload a constant depending on
-  its type. Typically this involves the introduction of an
-  uninterpreted constant (used for input and output) and the addition
-  of some variants (used internally). For examples see
-  @{file "~~/src/HOL/ex/Adhoc_Overloading_Examples.thy"} and
-  @{file "~~/src/HOL/Library/Monad_Syntax.thy"}.
-
-  @{rail \<open>
-    (@@{command adhoc_overloading} | @@{command no_adhoc_overloading})
-      (@{syntax nameref} (@{syntax term} + ) + @'and')
-  \<close>}
-
-  \begin{description}
-
-  \item @{command "adhoc_overloading"}~@{text "c v\<^sub>1 ... v\<^sub>n"}
-  associates variants with an existing constant.
-
-  \item @{command "no_adhoc_overloading"} is similar to
-  @{command "adhoc_overloading"}, but removes the specified variants
-  from the present context.
-  
-  \item @{attribute "show_variants"} controls printing of variants
-  of overloaded constants. If enabled, the internally used variants
-  are printed instead of their respective overloaded constants. This
-  is occasionally useful to check whether the system agrees with a
-  user's expectations about derived variants.
+  theorem that would lift to the current subgoal, lifts it and leaves as a
+  subgoal. This method can be used with partial equivalence quotients where
+  the non regularized statements would not be true. @{method (HOL)
+  "partiality_descending_setup"} leaves the injection and cleaning subgoals
+  unchanged.
+
+  \item @{method (HOL) "regularize"} applies the regularization heuristics
+  to the current subgoal.
+
+  \item @{method (HOL) "injection"} applies the injection heuristics to the
+  current goal using the stored quotient respectfulness theorems.
+
+  \item @{method (HOL) "cleaning"} applies the injection cleaning heuristics
+  to the current subgoal using the stored quotient preservation theorems.
+
+  \item @{attribute (HOL) quot_lifted} attribute tries to automatically
+  transport the theorem to the quotient type. The attribute uses all the
+  defined quotients types and quotient constants often producing undesired
+  results or theorems that cannot be lifted.
+
+  \item @{attribute (HOL) quot_respect} and @{attribute (HOL) quot_preserve}
+  attributes declare a theorem as a respectfulness and preservation theorem
+  respectively. These are stored in the local theory store and used by the
+  @{method (HOL) "injection"} and @{method (HOL) "cleaning"} methods
+  respectively.
+
+  \item @{attribute (HOL) quot_thm} declares that a certain theorem is a
+  quotient extension theorem. Quotient extension theorems allow for
+  quotienting inside container types. Given a polymorphic type that serves
+  as a container, a map function defined for this container using @{command
+  (HOL) "functor"} and a relation map defined for for the container type,
+  the quotient extension theorem should be @{term "Quotient3 R Abs Rep \<Longrightarrow>
+  Quotient3 (rel_map R) (map Abs) (map Rep)"}. Quotient extension theorems
+  are stored in a database and are used all the steps of lifting theorems.
 
   \end{description}
 \<close>
@@ -1446,31 +1427,244 @@
 
 chapter \<open>Proof tools\<close>
 
-section \<open>Adhoc tuples\<close>
+
+section \<open>Lifting package \label{sec:lifting}\<close>
 
 text \<open>
   \begin{matharray}{rcl}
-    @{attribute_def (HOL) split_format}@{text "\<^sup>*"} & : & @{text attribute} \\
+    @{command_def (HOL) "setup_lifting"} & : & @{text "local_theory \<rightarrow> local_theory"}\\
+    @{command_def (HOL) "lift_definition"} & : & @{text "local_theory \<rightarrow> proof(prove)"}\\
+    @{command_def (HOL) "lifting_forget"} & : & @{text "local_theory \<rightarrow> local_theory"}\\
+    @{command_def (HOL) "lifting_update"} & : & @{text "local_theory \<rightarrow> local_theory"}\\
+    @{command_def (HOL) "print_quot_maps"} & : & @{text "context \<rightarrow>"}\\
+    @{command_def (HOL) "print_quotients"} & : & @{text "context \<rightarrow>"}\\
+    @{attribute_def (HOL) "quot_map"} & : & @{text attribute} \\
+    @{attribute_def (HOL) "relator_eq_onp"} & : & @{text attribute} \\
+    @{attribute_def (HOL) "relator_mono"} & : & @{text attribute} \\
+    @{attribute_def (HOL) "relator_distr"} & : & @{text attribute} \\
+    @{attribute_def (HOL) "quot_del"} & : & @{text attribute} \\
+    @{attribute_def (HOL) "lifting_restore"} & : & @{text attribute} \\
   \end{matharray}
 
+  The Lifting package allows users to lift terms of the raw type to the
+  abstract type, which is a necessary step in building a library for an
+  abstract type. Lifting defines a new constant by combining coercion
+  functions (@{term Abs} and @{term Rep}) with the raw term. It also proves
+  an appropriate transfer rule for the Transfer (\secref{sec:transfer})
+  package and, if possible, an equation for the code generator.
+
+  The Lifting package provides two main commands: @{command (HOL)
+  "setup_lifting"} for initializing the package to work with a new type, and
+  @{command (HOL) "lift_definition"} for lifting constants. The Lifting
+  package works with all four kinds of type abstraction: type copies,
+  subtypes, total quotients and partial quotients.
+
+  Theoretical background can be found in @{cite
+  "Huffman-Kuncar:2013:lifting_transfer"}.
+
   @{rail \<open>
-    @@{attribute (HOL) split_format} ('(' 'complete' ')')?
+    @@{command (HOL) setup_lifting} @{syntax thmref} @{syntax thmref}? \<newline>
+      (@'parametric' @{syntax thmref})?
+    ;
+    @@{command (HOL) lift_definition} ('(' 'code_dt' ')')? \<newline>
+      @{syntax name} '::' @{syntax type} @{syntax mixfix}? 'is' @{syntax term} \<newline>
+      (@'parametric' (@{syntax thmref}+))?
+    ;
+    @@{command (HOL) lifting_forget} @{syntax nameref}
+    ;
+    @@{command (HOL) lifting_update} @{syntax nameref}
+    ;
+    @@{attribute (HOL) lifting_restore} @{syntax thmref} (@{syntax thmref} @{syntax thmref})?
   \<close>}
 
   \begin{description}
 
-  \item @{attribute (HOL) split_format}\ @{text "(complete)"} causes
-  arguments in function applications to be represented canonically
-  according to their tuple type structure.
-
-  Note that this operation tends to invent funny names for new local
-  parameters introduced.
+  \item @{command (HOL) "setup_lifting"} Sets up the Lifting package to work
+  with a user-defined type. The command supports two modes. The first one is
+  a low-level mode when the user must provide as a first argument of
+  @{command (HOL) "setup_lifting"} a quotient theorem @{term "Quotient R Abs
+  Rep T"}. The package configures a transfer rule for equality, a domain
+  transfer rules and sets up the @{command_def (HOL) "lift_definition"}
+  command to work with the abstract type. An optional theorem @{term "reflp
+  R"}, which certifies that the equivalence relation R is total, can be
+  provided as a second argument. This allows the package to generate
+  stronger transfer rules. And finally, the parametricity theorem for R can
+  be provided as a third argument. This allows the package to generate a
+  stronger transfer rule for equality.
+
+  Users generally will not prove the @{text Quotient} theorem manually for
+  new types, as special commands exist to automate the process.
+
+  When a new subtype is defined by @{command (HOL) typedef}, @{command (HOL)
+  "lift_definition"} can be used in its second mode, where only the
+  type_definition theorem @{text "type_definition Rep Abs A"} is used as an
+  argument of the command. The command internally proves the corresponding
+  Quotient theorem and registers it with @{command (HOL) setup_lifting}
+  using its first mode.
+
+  For quotients, the command @{command (HOL) quotient_type} can be used. The
+  command defines a new quotient type and similarly to the previous case,
+  the corresponding Quotient theorem is proved and registered by @{command
+  (HOL) setup_lifting}.
+
+  The command @{command (HOL) "setup_lifting"} also sets up the code
+  generator for the new type. Later on, when a new constant is defined by
+  @{command (HOL) "lift_definition"}, the Lifting package proves and
+  registers a code equation (if there is one) for the new constant.
+
+  \item @{command (HOL) "lift_definition"} @{text "f :: \<tau>"} @{keyword (HOL)
+  "is"} @{text t} Defines a new function @{text f} with an abstract type
+  @{text \<tau>} in terms of a corresponding operation @{text t} on a
+  representation type. More formally, if @{text "t :: \<sigma>"}, then the command
+  builds a term @{text "F"} as a corresponding combination of abstraction
+  and representation functions such that @{text "F :: \<sigma> \<Rightarrow> \<tau>" } and defines
+  @{text f} is as @{text "f \<equiv> F t"}. The term @{text t} does not have to be
+  necessarily a constant but it can be any term.
+
+  The command opens a proof environment and the user must discharge a
+  respectfulness proof obligation. For a type copy, i.e., a typedef with
+  @{text UNIV}, the obligation is discharged automatically. The proof goal
+  is presented in a user-friendly, readable form. A respectfulness theorem
+  in the standard format @{text f.rsp} and a transfer rule @{text
+  f.transfer} for the Transfer package are generated by the package.
+
+  The user can specify a parametricity theorems for @{text t} after the
+  keyword @{keyword "parametric"}, which allows the command to generate
+  parametric transfer rules for @{text f}.
+
+  For each constant defined through trivial quotients (type copies or
+  subtypes) @{text f.rep_eq} is generated. The equation is a code
+  certificate that defines @{text f} using the representation function.
+
+  For each constant @{text f.abs_eq} is generated. The equation is
+  unconditional for total quotients. The equation defines @{text f} using
+  the abstraction function.
+
+  Integration with [@{attribute code} abstract]: For subtypes (e.g.,
+  corresponding to a datatype invariant, such as @{typ "'a dlist"}),
+  @{command (HOL) "lift_definition"} uses a code certificate theorem @{text
+  f.rep_eq} as a code equation. Because of the limitation of the code
+  generator, @{text f.rep_eq} cannot be used as a code equation if the
+  subtype occurs inside the result type rather than at the top level (e.g.,
+  function returning @{typ "'a dlist option"} vs. @{typ "'a dlist"}). In
+  this case, an extension of @{command (HOL) "lift_definition"} can be
+  invoked by specifying the flag @{text "code_dt"}. This extension enables
+  code execution through series of internal type and lifting definitions if
+  the return type @{text "\<tau>"} meets the following inductive conditions:
+  \begin{description} \item @{text "\<tau>"} is a type variable \item @{text "\<tau> =
+  \<tau>\<^sub>1 \<dots> \<tau>\<^sub>n \<kappa>"}, where @{text "\<kappa>"} is an abstract type constructor and
+  @{text "\<tau>\<^sub>1 \<dots> \<tau>\<^sub>n"} do not contain abstract types (i.e., @{typ "int
+  dlist"} is allowed whereas @{typ "int dlist dlist"} not) \item @{text "\<tau> =
+  \<tau>\<^sub>1 \<dots> \<tau>\<^sub>n \<kappa>"}, @{text "\<kappa>"} is a type constructor that was defined as a
+  (co)datatype whose constructor argument types do not contain either
+  non-free datatypes or the function type. \end{description}
+
+  Integration with [@{attribute code} equation]: For total quotients,
+  @{command (HOL) "lift_definition"} uses @{text f.abs_eq} as a code
+  equation.
+
+  \item @{command (HOL) lifting_forget} and @{command (HOL) lifting_update}
+  These two commands serve for storing and deleting the set-up of the
+  Lifting package and corresponding transfer rules defined by this package.
+  This is useful for hiding of type construction details of an abstract type
+  when the construction is finished but it still allows additions to this
+  construction when this is later necessary.
+
+  Whenever the Lifting package is set up with a new abstract type @{text
+  "\<tau>"} by @{command_def (HOL) "lift_definition"}, the package defines a new
+  bundle that is called @{text "\<tau>.lifting"}. This bundle already includes
+  set-up for the Lifting package. The new transfer rules introduced by
+  @{command (HOL) "lift_definition"} can be stored in the bundle by the
+  command @{command (HOL) "lifting_update"} @{text "\<tau>.lifting"}.
+
+  The command @{command (HOL) "lifting_forget"} @{text "\<tau>.lifting"} deletes
+  set-up of the Lifting package for @{text \<tau>} and deletes all the transfer
+  rules that were introduced by @{command (HOL) "lift_definition"} using
+  @{text \<tau>} as an abstract type.
+
+  The stored set-up in a bundle can be reintroduced by the Isar commands for
+  including a bundle (@{command "include"}, @{keyword "includes"} and
+  @{command "including"}).
+
+  \item @{command (HOL) "print_quot_maps"} prints stored quotient map
+  theorems.
+
+  \item @{command (HOL) "print_quotients"} prints stored quotient theorems.
+
+  \item @{attribute (HOL) quot_map} registers a quotient map theorem, a
+  theorem showing how to "lift" quotients over type constructors. E.g.,
+  @{term "Quotient R Abs Rep T \<Longrightarrow> Quotient (rel_set R) (image Abs) (image
+  Rep) (rel_set T)"}. For examples see @{file "~~/src/HOL/Lifting_Set.thy"}
+  or @{file "~~/src/HOL/Lifting.thy"}. This property is proved automatically
+  if the involved type is BNF without dead variables.
+
+  \item @{attribute (HOL) relator_eq_onp} registers a theorem that shows
+  that a relator applied to an equality restricted by a predicate @{term P}
+  (i.e., @{term "eq_onp P"}) is equal to a predicator applied to the @{term
+  P}. The combinator @{const eq_onp} is used for internal encoding of proper
+  subtypes. Such theorems allows the package to hide @{text eq_onp} from a
+  user in a user-readable form of a respectfulness theorem. For examples see
+  @{file "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
+  This property is proved automatically if the involved type is BNF without
+  dead variables.
+
+  \item @{attribute (HOL) "relator_mono"} registers a property describing a
+  monotonicity of a relator. E.g., @{term "A \<le> B \<Longrightarrow> rel_set A \<le> rel_set B"}.
+  This property is needed for proving a stronger transfer rule in
+  @{command_def (HOL) "lift_definition"} when a parametricity theorem for
+  the raw term is specified and also for the reflexivity prover. For
+  examples see @{file "~~/src/HOL/Lifting_Set.thy"} or @{file
+  "~~/src/HOL/Lifting.thy"}. This property is proved automatically if the
+  involved type is BNF without dead variables.
+
+  \item @{attribute (HOL) "relator_distr"} registers a property describing a
+  distributivity of the relation composition and a relator. E.g., @{text
+  "rel_set R \<circ>\<circ> rel_set S = rel_set (R \<circ>\<circ> S)"}. This property is needed for
+  proving a stronger transfer rule in @{command_def (HOL) "lift_definition"}
+  when a parametricity theorem for the raw term is specified. When this
+  equality does not hold unconditionally (e.g., for the function type), the
+  user can specified each direction separately and also register multiple
+  theorems with different set of assumptions. This attribute can be used
+  only after the monotonicity property was already registered by @{attribute
+  (HOL) "relator_mono"}. For examples see @{file
+  "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}. This
+  property is proved automatically if the involved type is BNF without dead
+  variables.
+
+  \item @{attribute (HOL) quot_del} deletes a corresponding Quotient theorem
+  from the Lifting infrastructure and thus de-register the corresponding
+  quotient. This effectively causes that @{command (HOL) lift_definition}
+  will not do any lifting for the corresponding type. This attribute is
+  rather used for low-level manipulation with set-up of the Lifting package
+  because @{command (HOL) lifting_forget} is preferred for normal usage.
+
+  \item @{attribute (HOL) lifting_restore} @{text "Quotient_thm pcr_def
+  pcr_cr_eq_thm"} registers the Quotient theorem @{text Quotient_thm} in the
+  Lifting infrastructure and thus sets up lifting for an abstract type
+  @{text \<tau>} (that is defined by @{text Quotient_thm}). Optional theorems
+  @{text pcr_def} and @{text pcr_cr_eq_thm} can be specified to register the
+  parametrized correspondence relation for @{text \<tau>}. E.g., for @{typ "'a
+  dlist"}, @{text pcr_def} is @{text "pcr_dlist A \<equiv> list_all2 A \<circ>\<circ>
+  cr_dlist"} and @{text pcr_cr_eq_thm} is @{text "pcr_dlist op= = op="}.
+  This attribute is rather used for low-level manipulation with set-up of
+  the Lifting package because using of the bundle @{text \<tau>.lifting} together
+  with the commands @{command (HOL) lifting_forget} and @{command (HOL)
+  lifting_update} is preferred for normal usage.
+
+  \item Integration with the BNF package @{cite "isabelle-datatypes"}: As
+  already mentioned, the theorems that are registered by the following
+  attributes are proved and registered automatically if the involved type is
+  BNF without dead variables: @{attribute (HOL) quot_map}, @{attribute (HOL)
+  relator_eq_onp}, @{attribute (HOL) "relator_mono"}, @{attribute (HOL)
+  "relator_distr"}. Also the definition of a relator and predicator is
+  provided automatically. Moreover, if the BNF represents a datatype,
+  simplification rules for a predicator are again proved automatically.
 
   \end{description}
 \<close>
 
 
-section \<open>Transfer package\<close>
+section \<open>Transfer package \label{sec:transfer}\<close>
 
 text \<open>
   \begin{matharray}{rcl}
@@ -1487,318 +1681,81 @@
 
   \begin{description}
 
-  \item @{method (HOL) "transfer"} method replaces the current subgoal
-    with a logically equivalent one that uses different types and
-    constants. The replacement of types and constants is guided by the
-    database of transfer rules. Goals are generalized over all free
-    variables by default; this is necessary for variables whose types
-    change, but can be overridden for specific variables with e.g.
-    @{text "transfer fixing: x y z"}.
-
-  \item @{method (HOL) "transfer'"} is a variant of @{method (HOL)
-    transfer} that allows replacing a subgoal with one that is
-    logically stronger (rather than equivalent). For example, a
-    subgoal involving equality on a quotient type could be replaced
-    with a subgoal involving equality (instead of the corresponding
-    equivalence relation) on the underlying raw type.
-
-  \item @{method (HOL) "transfer_prover"} method assists with proving
-    a transfer rule for a new constant, provided the constant is
-    defined in terms of other constants that already have transfer
-    rules. It should be applied after unfolding the constant
-    definitions.
-
-  \item @{attribute (HOL) "untransferred"} proves the same equivalent theorem
-     as @{method (HOL) "transfer"} internally does.
+  \item @{method (HOL) "transfer"} method replaces the current subgoal with
+  a logically equivalent one that uses different types and constants. The
+  replacement of types and constants is guided by the database of transfer
+  rules. Goals are generalized over all free variables by default; this is
+  necessary for variables whose types change, but can be overridden for
+  specific variables with e.g. @{text "transfer fixing: x y z"}.
+
+  \item @{method (HOL) "transfer'"} is a variant of @{method (HOL) transfer}
+  that allows replacing a subgoal with one that is logically stronger
+  (rather than equivalent). For example, a subgoal involving equality on a
+  quotient type could be replaced with a subgoal involving equality (instead
+  of the corresponding equivalence relation) on the underlying raw type.
+
+  \item @{method (HOL) "transfer_prover"} method assists with proving a
+  transfer rule for a new constant, provided the constant is defined in
+  terms of other constants that already have transfer rules. It should be
+  applied after unfolding the constant definitions.
+
+  \item @{attribute (HOL) "untransferred"} proves the same equivalent
+  theorem as @{method (HOL) "transfer"} internally does.
 
   \item @{attribute (HOL) Transfer.transferred} works in the opposite
-    direction than @{method (HOL) "transfer'"}. E.g., given the transfer
-    relation @{text "ZN x n \<equiv> (x = int n)"}, corresponding transfer rules and the theorem
-    @{text "\<forall>x::int \<in> {0..}. x < x + 1"}, the attribute would prove 
-    @{text "\<forall>n::nat. n < n + 1"}. The attribute is still in experimental
-    phase of development.
-
-  \item @{attribute (HOL) "transfer_rule"} attribute maintains a
-    collection of transfer rules, which relate constants at two
-    different types. Typical transfer rules may relate different type
-    instances of the same polymorphic constant, or they may relate an
-    operation on a raw type to a corresponding operation on an
-    abstract type (quotient or subtype). For example:
-
-    @{text "((A ===> B) ===> list_all2 A ===> list_all2 B) map map"}\\
+  direction than @{method (HOL) "transfer'"}. E.g., given the transfer
+  relation @{text "ZN x n \<equiv> (x = int n)"}, corresponding transfer rules and
+  the theorem @{text "\<forall>x::int \<in> {0..}. x < x + 1"}, the attribute would
+  prove @{text "\<forall>n::nat. n < n + 1"}. The attribute is still in experimental
+  phase of development.
+
+  \item @{attribute (HOL) "transfer_rule"} attribute maintains a collection
+  of transfer rules, which relate constants at two different types. Typical
+  transfer rules may relate different type instances of the same polymorphic
+  constant, or they may relate an operation on a raw type to a corresponding
+  operation on an abstract type (quotient or subtype). For example:
+
+    @{text "((A ===> B) ===> list_all2 A ===> list_all2 B) map map"} \\
     @{text "(cr_int ===> cr_int ===> cr_int) (\<lambda>(x,y) (u,v). (x+u, y+v)) plus"}
 
-    Lemmas involving predicates on relations can also be registered
-    using the same attribute. For example:
-
-    @{text "bi_unique A \<Longrightarrow> (list_all2 A ===> op =) distinct distinct"}\\
+  Lemmas involving predicates on relations can also be registered using the
+  same attribute. For example:
+
+    @{text "bi_unique A \<Longrightarrow> (list_all2 A ===> op =) distinct distinct"} \\
     @{text "\<lbrakk>bi_unique A; bi_unique B\<rbrakk> \<Longrightarrow> bi_unique (rel_prod A B)"}
 
-    Preservation of predicates on relations (@{text "bi_unique, bi_total,
-    right_unique, right_total, left_unique, left_total"}) with the respect to a relator
-    is proved automatically if the involved type is BNF
-    @{cite "isabelle-datatypes"} without dead variables.
-
-  \item @{attribute (HOL) "transfer_domain_rule"} attribute maintains a collection
-    of rules, which specify a domain of a transfer relation by a predicate.
-    E.g., given the transfer relation @{text "ZN x n \<equiv> (x = int n)"}, 
-    one can register the following transfer domain rule: 
-    @{text "Domainp ZN = (\<lambda>x. x \<ge> 0)"}. The rules allow the package to produce
-    more readable transferred goals, e.g., when quantifiers are transferred.
-
-  \item @{attribute (HOL) relator_eq} attribute collects identity laws
-    for relators of various type constructors, e.g. @{term "rel_set
-    (op =) = (op =)"}. The @{method (HOL) transfer} method uses these
-    lemmas to infer transfer rules for non-polymorphic constants on
-    the fly. For examples see @{file
-    "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}. 
-    This property is proved automatically if the involved type is BNF without dead variables.
-
-  \item @{attribute_def (HOL) "relator_domain"} attribute collects rules 
-    describing domains of relators by predicators. E.g., 
-    @{term "Domainp (rel_set T) = (\<lambda>A. Ball A (Domainp T))"}. This allows the package 
-    to lift transfer domain rules through type constructors. For examples see @{file
-    "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
-    This property is proved automatically if the involved type is BNF without dead variables.
+  Preservation of predicates on relations (@{text "bi_unique, bi_total,
+  right_unique, right_total, left_unique, left_total"}) with the respect to
+  a relator is proved automatically if the involved type is BNF @{cite
+  "isabelle-datatypes"} without dead variables.
+
+  \item @{attribute (HOL) "transfer_domain_rule"} attribute maintains a
+  collection of rules, which specify a domain of a transfer relation by a
+  predicate. E.g., given the transfer relation @{text "ZN x n \<equiv> (x = int
+  n)"}, one can register the following transfer domain rule: @{text "Domainp
+  ZN = (\<lambda>x. x \<ge> 0)"}. The rules allow the package to produce more readable
+  transferred goals, e.g., when quantifiers are transferred.
+
+  \item @{attribute (HOL) relator_eq} attribute collects identity laws for
+  relators of various type constructors, e.g. @{term "rel_set (op =) = (op
+  =)"}. The @{method (HOL) transfer} method uses these lemmas to infer
+  transfer rules for non-polymorphic constants on the fly. For examples see
+  @{file "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
+  This property is proved automatically if the involved type is BNF without
+  dead variables.
+
+  \item @{attribute_def (HOL) "relator_domain"} attribute collects rules
+  describing domains of relators by predicators. E.g., @{term "Domainp
+  (rel_set T) = (\<lambda>A. Ball A (Domainp T))"}. This allows the package to lift
+  transfer domain rules through type constructors. For examples see @{file
+  "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}. This
+  property is proved automatically if the involved type is BNF without dead
+  variables.
 
   \end{description}
 
-  Theoretical background can be found in @{cite "Huffman-Kuncar:2013:lifting_transfer"}.
-\<close>
-
-
-section \<open>Lifting package\<close>
-
-text \<open>
-  The Lifting package allows users to lift terms of the raw type to the abstract type, which is 
-  a necessary step in building a library for an abstract type. Lifting defines a new constant 
-  by combining coercion functions (Abs and Rep) with the raw term. It also proves an appropriate 
-  transfer rule for the Transfer package and, if possible, an equation for the code generator.
-
-  The Lifting package provides two main commands: @{command (HOL) "setup_lifting"} for initializing 
-  the package to work with a new type, and @{command (HOL) "lift_definition"} for lifting constants. 
-  The Lifting package works with all four kinds of type abstraction: type copies, subtypes, 
-  total quotients and partial quotients.
-
-  Theoretical background can be found in @{cite "Huffman-Kuncar:2013:lifting_transfer"}.
-
-  \begin{matharray}{rcl}
-    @{command_def (HOL) "setup_lifting"} & : & @{text "local_theory \<rightarrow> local_theory"}\\
-    @{command_def (HOL) "lift_definition"} & : & @{text "local_theory \<rightarrow> proof(prove)"}\\
-    @{command_def (HOL) "lifting_forget"} & : & @{text "local_theory \<rightarrow> local_theory"}\\
-    @{command_def (HOL) "lifting_update"} & : & @{text "local_theory \<rightarrow> local_theory"}\\
-    @{command_def (HOL) "print_quot_maps"} & : & @{text "context \<rightarrow>"}\\
-    @{command_def (HOL) "print_quotients"} & : & @{text "context \<rightarrow>"}\\
-    @{attribute_def (HOL) "quot_map"} & : & @{text attribute} \\
-    @{attribute_def (HOL) "relator_eq_onp"} & : & @{text attribute} \\
-    @{attribute_def (HOL) "relator_mono"} & : & @{text attribute} \\
-    @{attribute_def (HOL) "relator_distr"} & : & @{text attribute} \\
-    @{attribute_def (HOL) "quot_del"} & : & @{text attribute} \\
-    @{attribute_def (HOL) "lifting_restore"} & : & @{text attribute} \\   
-  \end{matharray}
-
-  @{rail \<open>
-    @@{command (HOL) setup_lifting} @{syntax thmref} @{syntax thmref}? \<newline>
-      (@'parametric' @{syntax thmref})?
-  \<close>}
-
-  @{rail \<open>
-    @@{command (HOL) lift_definition} ('(' 'code_dt' ')')? @{syntax name} '::' @{syntax type}  \<newline>  
-      @{syntax mixfix}? 'is' @{syntax term} (@'parametric' (@{syntax thmref}+))?
-  \<close>}
-
-  @{rail \<open>
-    @@{command (HOL) lifting_forget} @{syntax nameref}
-  \<close>}
-
-  @{rail \<open>
-    @@{command (HOL) lifting_update} @{syntax nameref}
-  \<close>}
-
-  @{rail \<open>
-    @@{attribute (HOL) lifting_restore} @{syntax thmref} (@{syntax thmref} @{syntax thmref})?
-  \<close>}
-
-  \begin{description}
-
-  \item @{command (HOL) "setup_lifting"} Sets up the Lifting package
-    to work with a user-defined type. 
-    The command supports two modes. The first one is a low-level mode when 
-    the user must provide as a first
-    argument of @{command (HOL) "setup_lifting"} a
-    quotient theorem @{term "Quotient R Abs Rep T"}. The
-    package configures a transfer rule for equality, a domain transfer
-    rules and sets up the @{command_def (HOL) "lift_definition"}
-    command to work with the abstract type. An optional theorem @{term "reflp R"}, which certifies that 
-    the equivalence relation R is total,
-    can be provided as a second argument. This allows the package to generate stronger transfer
-    rules. And finally, the parametricity theorem for R can be provided as a third argument.
-    This allows the package to generate a stronger transfer rule for equality.
-
-    Users generally will not prove the @{text Quotient} theorem manually for 
-    new types, as special commands exist to automate the process.
-    
-    When a new subtype is defined by @{command (HOL) typedef}, @{command (HOL) "lift_definition"} 
-    can be used in its
-    second mode, where only the type_definition theorem @{text "type_definition Rep Abs A"}
-    is used as an argument of the command. The command internally proves the corresponding 
-    Quotient theorem and registers it with @{command (HOL) setup_lifting} using its first mode.
-
-    For quotients, the command @{command (HOL) quotient_type} can be used. The command defines 
-    a new quotient type and similarly to the previous case, the corresponding Quotient theorem is proved 
-    and registered by @{command (HOL) setup_lifting}.
-    
-    The command @{command (HOL) "setup_lifting"} also sets up the code generator
-    for the new type. Later on, when a new constant is defined by @{command (HOL) "lift_definition"},
-    the Lifting package proves and registers a code equation (if there is one) for the new constant.
-
-  \item @{command (HOL) "lift_definition"} @{text "f :: \<tau>"} @{keyword (HOL) "is"} @{text t}
-    Defines a new function @{text f} with an abstract type @{text \<tau>}
-    in terms of a corresponding operation @{text t} on a
-    representation type. More formally, if @{text "t :: \<sigma>"}, then
-    the command builds a term @{text "F"} as a corresponding combination of abstraction 
-    and representation functions such that @{text "F :: \<sigma> \<Rightarrow> \<tau>" } and 
-    defines @{text f} is as @{text "f \<equiv> F t"}.
-    The term @{text t} does not have to be necessarily a constant but it can be any term.
-
-    The command opens a proof environment and the user must discharge 
-    a respectfulness proof obligation. For a type copy, i.e., a typedef with @{text
-    UNIV}, the obligation is discharged automatically. The proof goal is
-    presented in a user-friendly, readable form. A respectfulness
-    theorem in the standard format @{text f.rsp} and a transfer rule
-    @{text f.transfer} for the Transfer package are generated by the
-    package.
-
-    The user can specify a parametricity theorems for @{text t} after the keyword 
-    @{keyword "parametric"}, which allows the command
-    to generate parametric transfer rules for @{text f}.
-
-    For each constant defined through trivial quotients (type copies or
-    subtypes) @{text f.rep_eq} is generated. The equation is a code certificate
-    that defines @{text f} using the representation function.
-
-    For each constant @{text f.abs_eq} is generated. The equation is unconditional
-    for total quotients. The equation defines @{text f} using
-    the abstraction function.
-
-    Integration with [@{attribute code} abstract]: For subtypes (e.g.,
-    corresponding to a datatype invariant, such as @{typ "'a dlist"}), @{command
-    (HOL) "lift_definition"} uses a code certificate theorem
-    @{text f.rep_eq} as a code equation. Because of the limitation of the code generator,
-    @{text f.rep_eq} cannot be used as a code equation if the subtype occurs inside the result
-    type rather than at the top level (e.g., function returning @{typ "'a dlist option"} vs. 
-    @{typ "'a dlist"}). In this case, an extension of @{command
-    (HOL) "lift_definition"} can be invoked by specifying the flag @{text "code_dt"}. This
-    extension enables code execution through series of internal type and lifting definitions 
-    if the return type @{text "\<tau>"} meets the following inductive conditions:
-    \begin{description}
-      \item  @{text "\<tau>"} is a type variable
-      \item @{text "\<tau> = \<tau>\<^sub>1 \<dots> \<tau>\<^sub>n \<kappa>"}, where @{text "\<kappa>"} is an abstract type constructor 
-        and @{text "\<tau>\<^sub>1 \<dots> \<tau>\<^sub>n"} do not contain abstract types (i.e., @{typ "int dlist"} is allowed 
-        whereas @{typ "int dlist dlist"} not)
-      \item @{text "\<tau> = \<tau>\<^sub>1 \<dots> \<tau>\<^sub>n \<kappa>"}, @{text "\<kappa>"} is a type constructor that was defined as a 
-        (co)datatype whose constructor argument types do not contain either non-free datatypes 
-        or the function type.
-    \end{description}
-
-    Integration with [@{attribute code} equation]: For total quotients, @{command
-    (HOL) "lift_definition"} uses @{text f.abs_eq} as a code equation.
-
-  \item @{command (HOL) lifting_forget} and  @{command (HOL) lifting_update}
-    These two commands serve for storing and deleting the set-up of
-    the Lifting package and corresponding transfer rules defined by this package.
-    This is useful for hiding of type construction details of an abstract type 
-    when the construction is finished but it still allows additions to this construction
-    when this is later necessary.
-
-    Whenever the Lifting package is set up with a new abstract type @{text "\<tau>"} by  
-    @{command_def (HOL) "lift_definition"}, the package defines a new bundle
-    that is called @{text "\<tau>.lifting"}. This bundle already includes set-up for the Lifting package. 
-    The new transfer rules
-    introduced by @{command (HOL) "lift_definition"} can be stored in the bundle by
-    the command @{command (HOL) "lifting_update"} @{text "\<tau>.lifting"}.
-
-    The command @{command (HOL) "lifting_forget"} @{text "\<tau>.lifting"} deletes set-up of the Lifting 
-    package
-    for @{text \<tau>} and deletes all the transfer rules that were introduced
-    by @{command (HOL) "lift_definition"} using @{text \<tau>} as an abstract type.
-
-    The stored set-up in a bundle can be reintroduced by the Isar commands for including a bundle
-    (@{command "include"}, @{keyword "includes"} and @{command "including"}).
-
-  \item @{command (HOL) "print_quot_maps"} prints stored quotient map
-    theorems.
-
-  \item @{command (HOL) "print_quotients"} prints stored quotient
-    theorems.
-
-  \item @{attribute (HOL) quot_map} registers a quotient map
-    theorem, a theorem showing how to "lift" quotients over type constructors. 
-    E.g., @{term "Quotient R Abs Rep T \<Longrightarrow> 
-    Quotient (rel_set R) (image Abs) (image Rep) (rel_set T)"}. 
-    For examples see @{file
-    "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
-    This property is proved automatically if the involved type is BNF without dead variables.
-
-  \item @{attribute (HOL) relator_eq_onp} registers a theorem that
-    shows that a relator applied to an equality restricted by a predicate @{term P} (i.e., @{term
-    "eq_onp P"}) is equal 
-    to a predicator applied to the @{term P}. The combinator @{const eq_onp} is used for 
-    internal encoding of proper subtypes. Such theorems allows the package to hide @{text
-    eq_onp} from a user in a user-readable form of a
-    respectfulness theorem. For examples see @{file
-    "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
-    This property is proved automatically if the involved type is BNF without dead variables.
-
-  \item @{attribute (HOL) "relator_mono"} registers a property describing a monotonicity of a relator.
-    E.g., @{term "A \<le> B \<Longrightarrow> rel_set A \<le> rel_set B"}. 
-    This property is needed for proving a stronger transfer rule in @{command_def (HOL) "lift_definition"}
-    when a parametricity theorem for the raw term is specified and also for the reflexivity prover.
-    For examples see @{file
-    "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
-    This property is proved automatically if the involved type is BNF without dead variables.
-
-  \item @{attribute (HOL) "relator_distr"} registers a property describing a distributivity
-    of the relation composition and a relator. E.g., 
-    @{text "rel_set R \<circ>\<circ> rel_set S = rel_set (R \<circ>\<circ> S)"}. 
-    This property is needed for proving a stronger transfer rule in @{command_def (HOL) "lift_definition"}
-    when a parametricity theorem for the raw term is specified.
-    When this equality does not hold unconditionally (e.g., for the function type), the user can specified
-    each direction separately and also register multiple theorems with different set of assumptions.
-    This attribute can be used only after the monotonicity property was already registered by
-    @{attribute (HOL) "relator_mono"}. For examples see @{file
-    "~~/src/HOL/Lifting_Set.thy"} or @{file "~~/src/HOL/Lifting.thy"}.
-    This property is proved automatically if the involved type is BNF without dead variables.
-
-  \item @{attribute (HOL) quot_del} deletes a corresponding Quotient theorem
-    from the Lifting infrastructure and thus de-register the corresponding quotient. 
-    This effectively causes that @{command (HOL) lift_definition}  will not
-    do any lifting for the corresponding type. This attribute is rather used for low-level
-    manipulation with set-up of the Lifting package because @{command (HOL) lifting_forget} is
-    preferred for normal usage.
-
-  \item @{attribute (HOL) lifting_restore} @{text "Quotient_thm pcr_def pcr_cr_eq_thm"} 
-    registers the Quotient theorem @{text Quotient_thm} in the Lifting infrastructure 
-    and thus sets up lifting for an abstract type @{text \<tau>} (that is defined by @{text Quotient_thm}).
-    Optional theorems @{text pcr_def} and @{text pcr_cr_eq_thm} can be specified to register 
-    the parametrized
-    correspondence relation for @{text \<tau>}. E.g., for @{typ "'a dlist"}, @{text pcr_def} is
-    @{text "pcr_dlist A \<equiv> list_all2 A \<circ>\<circ> cr_dlist"} and @{text pcr_cr_eq_thm} is 
-    @{text "pcr_dlist op= = op="}.
-    This attribute is rather used for low-level
-    manipulation with set-up of the Lifting package because using of the bundle @{text \<tau>.lifting} 
-    together with the commands @{command (HOL) lifting_forget} and @{command (HOL) lifting_update} is
-    preferred for normal usage.
-
-  \item Integration with the BNF package @{cite "isabelle-datatypes"}:
-    As already mentioned, the theorems that are registered
-    by the following attributes are proved and registered automatically if the involved type
-    is BNF without dead variables: @{attribute (HOL) quot_map}, @{attribute (HOL) relator_eq_onp}, 
-    @{attribute (HOL) "relator_mono"}, @{attribute (HOL) "relator_distr"}. Also the definition of a 
-    relator and predicator is provided automatically. Moreover, if the BNF represents a datatype, 
-    simplification rules for a predicator are again proved automatically.
-  
-  \end{description}
+  Theoretical background can be found in @{cite
+  "Huffman-Kuncar:2013:lifting_transfer"}.
 \<close>
 
 
@@ -2264,10 +2221,10 @@
   them back into Isabelle terms for displaying counterexamples.
     \begin{description}
     \item[@{text exhaustive}] The parameters of the type classes @{class exhaustive}
-      and @{class full_exhaustive} implement the testing. They take a 
+      and @{class full_exhaustive} implement the testing. They take a
       testing function as a parameter, which takes a value of type @{typ "'a"}
       and optionally produces a counterexample, and a size parameter for the test values.
-      In @{class full_exhaustive}, the testing function parameter additionally 
+      In @{class full_exhaustive}, the testing function parameter additionally
       expects a lazy term reconstruction in the type @{typ Code_Evaluation.term}
       of the tested value.
 
@@ -2280,7 +2237,7 @@
       value of the given size and a lazy term reconstruction of the value
       in the type @{typ Code_Evaluation.term}. A pseudo-randomness generator
       is defined in theory @{theory Random}.
-      
+
     \item[@{text narrowing}] implements Haskell's Lazy Smallcheck @{cite "runciman-naylor-lindblad"}
       using the type classes @{class narrowing} and @{class partial_term_of}.
       Variables in the current goal are initially represented as symbolic variables.
@@ -2402,6 +2359,31 @@
 \<close>
 
 
+
+section \<open>Adhoc tuples\<close>
+
+text \<open>
+  \begin{matharray}{rcl}
+    @{attribute_def (HOL) split_format}@{text "\<^sup>*"} & : & @{text attribute} \\
+  \end{matharray}
+
+  @{rail \<open>
+    @@{attribute (HOL) split_format} ('(' 'complete' ')')?
+  \<close>}
+
+  \begin{description}
+
+  \item @{attribute (HOL) split_format}\ @{text "(complete)"} causes
+  arguments in function applications to be represented canonically
+  according to their tuple type structure.
+
+  Note that this operation tends to invent funny names for new local
+  parameters introduced.
+
+  \end{description}
+\<close>
+
+
 chapter \<open>Executable code\<close>
 
 text \<open>For validation purposes, it is often useful to \emph{execute}
@@ -2614,7 +2596,7 @@
   if needed these are implemented by program abort (exception) instead.
 
   Usually packages introducing code equations provide a reasonable
-  default setup for selection.  
+  default setup for selection.
 
   \item @{command (HOL) "code_datatype"} specifies a constructor set
   for a logical type.
--- a/src/Doc/Isar_Ref/ML_Tactic.thy	Fri Jul 03 10:17:29 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,177 +0,0 @@
-theory ML_Tactic
-imports Base Main
-begin
-
-chapter \<open>ML tactic expressions\<close>
-
-text \<open>
-  Isar Proof methods closely resemble traditional tactics, when used
-  in unstructured sequences of @{command "apply"} commands.
-  Isabelle/Isar provides emulations for all major ML tactics of
-  classic Isabelle --- mostly for the sake of easy porting of existing
-  developments, as actual Isar proof texts would demand much less
-  diversity of proof methods.
-
-  Unlike tactic expressions in ML, Isar proof methods provide proper
-  concrete syntax for additional arguments, options, modifiers etc.
-  Thus a typical method text is usually more concise than the
-  corresponding ML tactic.  Furthermore, the Isar versions of classic
-  Isabelle tactics often cover several variant forms by a single
-  method with separate options to tune the behavior.  For example,
-  method @{method simp} replaces all of @{ML simp_tac}~/ @{ML
-  asm_simp_tac}~/ @{ML full_simp_tac}~/ @{ML asm_full_simp_tac}, there
-  is also concrete syntax for augmenting the Simplifier context (the
-  current ``simpset'') in a convenient way.
-\<close>
-
-
-section \<open>Resolution tactics\<close>
-
-text \<open>
-  Classic Isabelle provides several variant forms of tactics for
-  single-step rule applications (based on higher-order resolution).
-  The space of resolution tactics has the following main dimensions.
-
-  \begin{enumerate}
-
-  \item The ``mode'' of resolution: intro, elim, destruct, or forward
-  (e.g.\ @{ML resolve_tac}, @{ML eresolve_tac}, @{ML dresolve_tac},
-  @{ML forward_tac}).
-
-  \item Optional explicit instantiation (e.g.\ @{ML resolve_tac} vs.\
-  @{ML Rule_Insts.res_inst_tac}).
-
-  \item Abbreviations for singleton arguments (e.g.\ @{ML resolve_tac}
-  vs.\ @{ML rtac}).
-
-  \end{enumerate}
-
-  Basically, the set of Isar tactic emulations @{method rule_tac},
-  @{method erule_tac}, @{method drule_tac}, @{method frule_tac} (see
-  \secref{sec:tactics}) would be sufficient to cover the four modes,
-  either with or without instantiation, and either with single or
-  multiple arguments.  Although it is more convenient in most cases to
-  use the plain @{method_ref (Pure) rule} method, or any of its
-  ``improper'' variants @{method erule}, @{method drule}, @{method
-  frule}.  Note that explicit goal addressing is only supported by the
-  actual @{method rule_tac} version.
-
-  With this in mind, plain resolution tactics correspond to Isar
-  methods as follows.
-
-  \medskip
-  \begin{tabular}{lll}
-    @{ML rtac}~@{text "a 1"} & & @{text "rule a"} \\
-    @{ML resolve_tac}~@{text "ctxt [a\<^sub>1, \<dots>] 1"} & & @{text "rule a\<^sub>1 \<dots>"} \\
-    @{ML Rule_Insts.res_inst_tac}~@{text "ctxt [(x\<^sub>1, t\<^sub>1), \<dots>] a 1"} & &
-    @{text "rule_tac x\<^sub>1 = t\<^sub>1 \<AND> \<dots> \<IN> a"} \\[0.5ex]
-    @{ML rtac}~@{text "a i"} & & @{text "rule_tac [i] a"} \\
-    @{ML resolve_tac}~@{text "ctxt [a\<^sub>1, \<dots>] i"} & & @{text "rule_tac [i] a\<^sub>1 \<dots>"} \\
-    @{ML Rule_Insts.res_inst_tac}~@{text "ctxt [(x\<^sub>1, t\<^sub>1), \<dots>] a i"} & &
-    @{text "rule_tac [i] x\<^sub>1 = t\<^sub>1 \<AND> \<dots> \<IN> a"} \\
-  \end{tabular}
-  \medskip
-
-  Note that explicit goal addressing may be usually avoided by
-  changing the order of subgoals with @{command "defer"} or @{command
-  "prefer"} (see \secref{sec:tactic-commands}).
-\<close>
-
-
-section \<open>Simplifier tactics\<close>
-
-text \<open>The main Simplifier tactics @{ML simp_tac} and variants are
-  all covered by the @{method simp} and @{method simp_all} methods
-  (see \secref{sec:simplifier}).  Note that there is no individual
-  goal addressing available, simplification acts either on the first
-  goal (@{method simp}) or all goals (@{method simp_all}).
-
-  \medskip
-  \begin{tabular}{lll}
-    @{ML asm_full_simp_tac}~@{text "@{context} 1"} & & @{method simp} \\
-    @{ML ALLGOALS}~(@{ML asm_full_simp_tac}~@{text "@{context}"}) & & @{method simp_all} \\[0.5ex]
-    @{ML simp_tac}~@{text "@{context} 1"} & & @{method simp}~@{text "(no_asm)"} \\
-    @{ML asm_simp_tac}~@{text "@{context} 1"} & & @{method simp}~@{text "(no_asm_simp)"} \\
-    @{ML full_simp_tac}~@{text "@{context} 1"} & & @{method simp}~@{text "(no_asm_use)"} \\
-    @{ML asm_lr_simp_tac}~@{text "@{context} 1"} & & @{method simp}~@{text "(asm_lr)"} \\
-  \end{tabular}
-  \medskip
-\<close>
-
-
-section \<open>Classical Reasoner tactics\<close>
-
-text \<open>The Classical Reasoner provides a rather large number of
-  variations of automated tactics, such as @{ML blast_tac}, @{ML
-  fast_tac}, @{ML clarify_tac} etc.  The corresponding Isar methods
-  usually share the same base name, such as @{method blast}, @{method
-  fast}, @{method clarify} etc.\ (see \secref{sec:classical}).\<close>
-
-
-section \<open>Miscellaneous tactics\<close>
-
-text \<open>
-  There are a few additional tactics defined in various theories of
-  Isabelle/HOL, some of these also in Isabelle/FOL or Isabelle/ZF.
-  The most common ones of these may be ported to Isar as follows.
-
-  \medskip
-  \begin{tabular}{lll}
-    @{ML stac}~@{text "a 1"} & & @{text "subst a"} \\
-    @{ML hyp_subst_tac}~@{text 1} & & @{text hypsubst} \\
-    @{ML split_all_tac}~@{text 1} & & @{text "simp (no_asm_simp) only: split_tupled_all"} \\
-      & @{text "\<approx>"} & @{text "simp only: split_tupled_all"} \\
-      & @{text "\<lless>"} & @{text "clarify"} \\
-  \end{tabular}
-\<close>
-
-
-section \<open>Tacticals\<close>
-
-text \<open>
-  Classic Isabelle provides a huge amount of tacticals for combination
-  and modification of existing tactics.  This has been greatly reduced
-  in Isar, providing the bare minimum of combinators only: ``@{text
-  ","}'' (sequential composition), ``@{text "|"}'' (alternative
-  choices), ``@{text "?"}'' (try), ``@{text "+"}'' (repeat at least
-  once).  These are usually sufficient in practice; if all fails,
-  arbitrary ML tactic code may be invoked via the @{method tactic}
-  method (see \secref{sec:tactics}).
-
-  \medskip Common ML tacticals may be expressed directly in Isar as
-  follows:
-
-  \medskip
-  \begin{tabular}{lll}
-    @{text "tac\<^sub>1"}~@{ML_text THEN}~@{text "tac\<^sub>2"} & & @{text "meth\<^sub>1, meth\<^sub>2"} \\
-    @{text "tac\<^sub>1"}~@{ML_text ORELSE}~@{text "tac\<^sub>2"} & & @{text "meth\<^sub>1 | meth\<^sub>2"} \\
-    @{ML TRY}~@{text tac} & & @{text "meth?"} \\
-    @{ML REPEAT1}~@{text tac} & & @{text "meth+"} \\
-    @{ML REPEAT}~@{text tac} & & @{text "(meth+)?"} \\
-    @{ML EVERY}~@{text "[tac\<^sub>1, \<dots>]"} & & @{text "meth\<^sub>1, \<dots>"} \\
-    @{ML FIRST}~@{text "[tac\<^sub>1, \<dots>]"} & & @{text "meth\<^sub>1 | \<dots>"} \\
-  \end{tabular}
-  \medskip
-
-  \medskip @{ML CHANGED} (see @{cite "isabelle-implementation"}) is
-  usually not required in Isar, since most basic proof methods already
-  fail unless there is an actual change in the goal state.
-  Nevertheless, ``@{text "?"}''  (try) may be used to accept
-  \emph{unchanged} results as well.
-
-  \medskip @{ML ALLGOALS}, @{ML SOMEGOAL} etc.\ (see
-  @{cite "isabelle-implementation"}) are not available in Isar, since
-  there is no direct goal addressing.  Nevertheless, some basic
-  methods address all goals internally, notably @{method simp_all}
-  (see \secref{sec:simplifier}).  Also note that @{ML ALLGOALS} can be
-  often replaced by ``@{text "+"}'' (repeat at least once), although
-  this usually has a different operational behavior: subgoals are
-  solved in a different order.
-
-  \medskip Iterated resolution, such as
-  @{ML_text "REPEAT (FIRSTGOAL (resolve_tac ...))"}, is usually better
-  expressed using the @{method intro} and @{method elim} methods of
-  Isar (see \secref{sec:classical}).
-\<close>
-
-end
--- a/src/Doc/Isar_Ref/document/root.tex	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Doc/Isar_Ref/document/root.tex	Mon Jul 06 14:27:03 2015 +0200
@@ -86,7 +86,6 @@
 \input{Quick_Reference.tex}
 \let\int\intorig
 \input{Symbols.tex}
-\input{ML_Tactic.tex}
 
 \begingroup
   \tocentry{\bibname}
--- a/src/Doc/ROOT	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Doc/ROOT	Mon Jul 06 14:27:03 2015 +0200
@@ -171,7 +171,6 @@
     HOL_Specific
     Quick_Reference
     Symbols
-    ML_Tactic
   document_files (in "..")
     "prepare_document"
     "pdfsetup.sty"
--- a/src/FOLP/ex/Nat.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/FOLP/ex/Nat.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -82,24 +82,26 @@
 lemmas nat_congs = Suc_cong Plus_cong
 
 ML {*
-  val add_ss = FOLP_ss addcongs @{thms nat_congs} addrews [@{thm add_0}, @{thm add_Suc}]
+  val add_ss =
+    FOLP_ss addcongs @{thms nat_congs}
+    |> fold (addrew @{context}) @{thms add_0 add_Suc}
 *}
 
 schematic_lemma add_assoc: "?p : (k+m)+n = k+(m+n)"
 apply (rule_tac n = k in induct)
-apply (tactic {* SIMP_TAC add_ss 1 *})
-apply (tactic {* ASM_SIMP_TAC add_ss 1 *})
+apply (tactic {* SIMP_TAC @{context} add_ss 1 *})
+apply (tactic {* ASM_SIMP_TAC @{context} add_ss 1 *})
 done
 
 schematic_lemma add_0_right: "?p : m+0 = m"
 apply (rule_tac n = m in induct)
-apply (tactic {* SIMP_TAC add_ss 1 *})
-apply (tactic {* ASM_SIMP_TAC add_ss 1 *})
+apply (tactic {* SIMP_TAC @{context} add_ss 1 *})
+apply (tactic {* ASM_SIMP_TAC @{context} add_ss 1 *})
 done
 
 schematic_lemma add_Suc_right: "?p : m+Suc(n) = Suc(m+n)"
 apply (rule_tac n = m in induct)
-apply (tactic {* ALLGOALS (ASM_SIMP_TAC add_ss) *})
+apply (tactic {* ALLGOALS (ASM_SIMP_TAC @{context} add_ss) *})
 done
 
 (*mk_typed_congs appears not to work with FOLP's version of subst*)
--- a/src/FOLP/simp.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/FOLP/simp.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -24,26 +24,26 @@
 end;
 
 
-infix 4 addrews addcongs delrews delcongs setauto;
+infix 4 addcongs delrews delcongs setauto;
 
 signature SIMP =
 sig
   type simpset
   val empty_ss  : simpset
   val addcongs  : simpset * thm list -> simpset
-  val addrews   : simpset * thm list -> simpset
+  val addrew    : Proof.context -> thm -> simpset -> simpset
   val delcongs  : simpset * thm list -> simpset
   val delrews   : simpset * thm list -> simpset
   val dest_ss   : simpset -> thm list * thm list
-  val print_ss  : simpset -> unit
+  val print_ss  : Proof.context -> simpset -> unit
   val setauto   : simpset * (int -> tactic) -> simpset
-  val ASM_SIMP_CASE_TAC : simpset -> int -> tactic
-  val ASM_SIMP_TAC      : simpset -> int -> tactic
+  val ASM_SIMP_CASE_TAC : Proof.context -> simpset -> int -> tactic
+  val ASM_SIMP_TAC      : Proof.context -> simpset -> int -> tactic
   val CASE_TAC          : simpset -> int -> tactic
-  val SIMP_CASE2_TAC    : simpset -> int -> tactic
-  val SIMP_THM          : simpset -> thm -> thm
-  val SIMP_TAC          : simpset -> int -> tactic
-  val SIMP_CASE_TAC     : simpset -> int -> tactic
+  val SIMP_CASE2_TAC    : Proof.context -> simpset -> int -> tactic
+  val SIMP_THM          : Proof.context -> simpset -> thm -> thm
+  val SIMP_TAC          : Proof.context -> simpset -> int -> tactic
+  val SIMP_CASE_TAC     : Proof.context -> simpset -> int -> tactic
   val mk_congs          : theory -> string list -> thm list
   val mk_typed_congs    : theory -> (string * string) list -> thm list
 (* temporarily disabled:
@@ -220,15 +220,13 @@
     in add_norms(congs,ccs,new_asms) end;
 
 fun normed_rews congs =
-  let val add_norms = add_norm_tags congs in
-    fn thm =>
+  let
+    val add_norms = add_norm_tags congs
+    fun normed ctxt thm =
       let
-        val ctxt =
-          Thm.theory_of_thm thm
-          |> Proof_Context.init_global
-          |> Variable.declare_thm thm;
+        val ctxt' = Variable.declare_thm thm ctxt;
       in Variable.tradeT (K (map (add_norms o mk_trans) o maps mk_rew_rules)) ctxt [thm] end
-  end;
+  in normed end;
 
 fun NORM norm_lhs_tac = EVERY'[rtac red2 , norm_lhs_tac, refl_tac];
 
@@ -241,7 +239,7 @@
         SS of {auto_tac: int -> tactic,
                congs: thm list,
                cong_net: thm Net.net,
-               mk_simps: thm -> thm list,
+               mk_simps: Proof.context -> thm -> thm list,
                simps: (thm * thm list) list,
                simp_net: thm Net.net}
 
@@ -251,22 +249,18 @@
 (** Insertion of congruences and rewrites **)
 
 (*insert a thm in a thm net*)
-fun insert_thm_warn th net =
+fun insert_thm th net =
   Net.insert_term Thm.eq_thm_prop (Thm.concl_of th, th) net
-  handle Net.INSERT =>
-    (writeln ("Duplicate rewrite or congruence rule:\n" ^
-        Display.string_of_thm_without_context th); net);
+    handle Net.INSERT => net;
 
-val insert_thms = fold_rev insert_thm_warn;
+val insert_thms = fold_rev insert_thm;
 
-fun addrew thm (SS{auto_tac,congs,cong_net,mk_simps,simps,simp_net}) =
-let val thms = mk_simps thm
+fun addrew ctxt thm (SS{auto_tac,congs,cong_net,mk_simps,simps,simp_net}) =
+let val thms = mk_simps ctxt thm
 in SS{auto_tac=auto_tac,congs=congs, cong_net=cong_net, mk_simps=mk_simps,
       simps = (thm,thms)::simps, simp_net = insert_thms thms simp_net}
 end;
 
-fun ss addrews thms = fold addrew thms ss;
-
 fun op addcongs(SS{auto_tac,congs,cong_net,mk_simps,simps,simp_net}, thms) =
 let val congs' = thms @ congs;
 in SS{auto_tac=auto_tac, congs= congs',
@@ -277,13 +271,11 @@
 (** Deletion of congruences and rewrites **)
 
 (*delete a thm from a thm net*)
-fun delete_thm_warn th net =
+fun delete_thm th net =
   Net.delete_term Thm.eq_thm_prop (Thm.concl_of th, th) net
-  handle Net.DELETE =>
-    (writeln ("No such rewrite or congruence rule:\n" ^
-        Display.string_of_thm_without_context th); net);
+    handle Net.DELETE => net;
 
-val delete_thms = fold_rev delete_thm_warn;
+val delete_thms = fold_rev delete_thm;
 
 fun op delcongs(SS{auto_tac,congs,cong_net,mk_simps,simps,simp_net}, thms) =
 let val congs' = fold (remove Thm.eq_thm_prop) thms congs
@@ -295,9 +287,7 @@
 fun delrew thm (SS{auto_tac,congs,cong_net,mk_simps,simps,simp_net}) =
 let fun find((p as (th,ths))::ps',ps) =
           if Thm.eq_thm_prop(thm,th) then (ths,ps@ps') else find(ps',p::ps)
-      | find([],simps') =
-          (writeln ("No such rewrite or congruence rule:\n" ^
-              Display.string_of_thm_without_context thm); ([], simps'))
+      | find([],simps') = ([], simps')
     val (thms,simps') = find(simps,[])
 in SS{auto_tac=auto_tac, congs=congs, cong_net=cong_net, mk_simps=mk_simps,
       simps = simps', simp_net = delete_thms thms simp_net }
@@ -315,10 +305,10 @@
 
 fun dest_ss(SS{congs,simps,...}) = (congs, map #1 simps);
 
-fun print_ss(SS{congs,simps,...}) =
+fun print_ss ctxt (SS{congs,simps,...}) =
   writeln (cat_lines
-   (["Congruences:"] @ map Display.string_of_thm_without_context congs @
-    ["Rewrite Rules:"] @ map (Display.string_of_thm_without_context o #1) simps));
+   (["Congruences:"] @ map (Display.string_of_thm ctxt) congs @
+    ["Rewrite Rules:"] @ map (Display.string_of_thm ctxt o #1) simps));
 
 
 (* Rewriting with conditionals *)
@@ -385,26 +375,25 @@
     in variants_abs (params, Logic.strip_assums_concl subgi) end;
 
 (*print lhs of conclusion of subgoal i*)
-fun pr_goal_lhs i st =
-    writeln (Syntax.string_of_term_global (Thm.theory_of_thm st)
-             (lhs_of (prepare_goal i st)));
+fun pr_goal_lhs ctxt i st =
+    writeln (Syntax.string_of_term ctxt (lhs_of (prepare_goal i st)));
 
 (*print conclusion of subgoal i*)
-fun pr_goal_concl i st =
-    writeln (Syntax.string_of_term_global (Thm.theory_of_thm st) (prepare_goal i st))
+fun pr_goal_concl ctxt i st =
+    writeln (Syntax.string_of_term ctxt (prepare_goal i st))
 
 (*print subgoals i to j (inclusive)*)
-fun pr_goals (i,j) st =
+fun pr_goals ctxt (i,j) st =
     if i>j then ()
-    else (pr_goal_concl i st;  pr_goals (i+1,j) st);
+    else (pr_goal_concl ctxt i st;  pr_goals ctxt (i+1,j) st);
 
 (*Print rewrite for tracing; i=subgoal#, n=number of new subgoals,
   thm=old state, thm'=new state *)
-fun pr_rew (i,n,thm,thm',not_asms) =
+fun pr_rew ctxt (i,n,thm,thm',not_asms) =
     if !tracing
     then (if not_asms then () else writeln"Assumption used in";
-          pr_goal_lhs i thm; writeln"->"; pr_goal_lhs (i+n) thm';
-          if n>0 then (writeln"Conditions:"; pr_goals (i, i+n-1) thm')
+          pr_goal_lhs ctxt i thm; writeln"->"; pr_goal_lhs ctxt (i+n) thm';
+          if n>0 then (writeln"Conditions:"; pr_goals ctxt (i, i+n-1) thm')
           else ();
           writeln"" )
     else ();
@@ -417,7 +406,8 @@
         strip_varify(t,n,Var(("?",length vs),T)::vs)
   | strip_varify  _  = [];
 
-fun execute(ss,if_fl,auto_tac,cong_tac,net,i,thm) = let
+fun execute ctxt (ss,if_fl,auto_tac,cong_tac,net,i,thm) =
+let
 
 fun simp_lhs(thm,ss,anet,ats,cs) =
     if var_lhs(thm,i) then (ss,thm,anet,ats,cs) else
@@ -436,21 +426,16 @@
       are represented by rules, generalized over their parameters*)
 fun add_asms(ss,thm,a,anet,ats,cs) =
     let val As = strip_varify(nth_subgoal i thm, a, []);
-        val thms = map (Thm.trivial o Thm.global_cterm_of(Thm.theory_of_thm thm)) As;
+        val thms = map (Thm.trivial o Thm.cterm_of ctxt) As;
         val new_rws = maps mk_rew_rules thms;
         val rwrls = map mk_trans (maps mk_rew_rules thms);
         val anet' = fold_rev lhs_insert_thm rwrls anet;
-    in  if !tracing andalso not(null new_rws)
-        then writeln (cat_lines
-          ("Adding rewrites:" :: map Display.string_of_thm_without_context new_rws))
-        else ();
-        (ss,thm,anet',anet::ats,cs)
-    end;
+    in (ss,thm,anet',anet::ats,cs) end;
 
 fun rew(seq,thm,ss,anet,ats,cs, more) = case Seq.pull seq of
       SOME(thm',seq') =>
             let val n = (Thm.nprems_of thm') - (Thm.nprems_of thm)
-            in pr_rew(i,n,thm,thm',more);
+            in pr_rew ctxt (i,n,thm,thm',more);
                if n=0 then (SIMP_LHS::ss, thm', anet, ats, cs)
                else ((replicate n PROVE) @ (POP_CS::SIMP_LHS::ss),
                      thm', anet, ats, (ss,thm,anet,ats,seq',more)::cs)
@@ -466,7 +451,7 @@
     | NONE => let val (ss0,thm0,anet0,ats0,seq,more)::cs0 = cs
               in if !tracing
                  then (writeln"*** Failed to prove precondition. Normal form:";
-                       pr_goal_concl i thm;  writeln"")
+                       pr_goal_concl ctxt i thm;  writeln"")
                  else ();
                  rew(seq,thm0,ss0,anet0,ats0,cs0,more)
               end;
@@ -495,30 +480,30 @@
 in exec(ss, thm, Net.empty, [], []) end;
 
 
-fun EXEC_TAC(ss,fl) (SS{auto_tac,cong_net,simp_net,...}) =
+fun EXEC_TAC ctxt (ss,fl) (SS{auto_tac,cong_net,simp_net,...}) =
 let val cong_tac = net_tac cong_net
 in fn i =>
     (fn thm =>
      if i <= 0 orelse Thm.nprems_of thm < i then Seq.empty
-     else Seq.single(execute(ss,fl,auto_tac,cong_tac,simp_net,i,thm)))
+     else Seq.single(execute ctxt (ss,fl,auto_tac,cong_tac,simp_net,i,thm)))
     THEN TRY(auto_tac i)
 end;
 
-val SIMP_TAC = EXEC_TAC([MK_EQ,SIMP_LHS,REFL,STOP],false);
-val SIMP_CASE_TAC = EXEC_TAC([MK_EQ,SIMP_LHS,IF,REFL,STOP],false);
+fun SIMP_TAC ctxt = EXEC_TAC ctxt ([MK_EQ,SIMP_LHS,REFL,STOP],false);
+fun SIMP_CASE_TAC ctxt = EXEC_TAC ctxt ([MK_EQ,SIMP_LHS,IF,REFL,STOP],false);
 
-val ASM_SIMP_TAC = EXEC_TAC([ASMS(0),MK_EQ,SIMP_LHS,REFL,STOP],false);
-val ASM_SIMP_CASE_TAC = EXEC_TAC([ASMS(0),MK_EQ,SIMP_LHS,IF,REFL,STOP],false);
+fun ASM_SIMP_TAC ctxt = EXEC_TAC ctxt ([ASMS(0),MK_EQ,SIMP_LHS,REFL,STOP],false);
+fun ASM_SIMP_CASE_TAC ctxt = EXEC_TAC ctxt ([ASMS(0),MK_EQ,SIMP_LHS,IF,REFL,STOP],false);
 
-val SIMP_CASE2_TAC = EXEC_TAC([MK_EQ,SIMP_LHS,IF,REFL,STOP],true);
+fun SIMP_CASE2_TAC ctxt = EXEC_TAC ctxt ([MK_EQ,SIMP_LHS,IF,REFL,STOP],true);
 
-fun REWRITE (ss,fl) (SS{auto_tac,cong_net,simp_net,...}) =
+fun REWRITE ctxt (ss,fl) (SS{auto_tac,cong_net,simp_net,...}) =
 let val cong_tac = net_tac cong_net
 in fn thm => let val state = thm RSN (2,red1)
-             in execute(ss,fl,auto_tac,cong_tac,simp_net,1,state) end
+             in execute ctxt (ss,fl,auto_tac,cong_tac,simp_net,1,state) end
 end;
 
-val SIMP_THM = REWRITE ([ASMS(0),SIMP_LHS,IF,REFL,STOP],false);
+fun SIMP_THM ctxt = REWRITE ctxt ([ASMS(0),SIMP_LHS,IF,REFL,STOP],false);
 
 
 (* Compute Congruence rules for individual constants using the substition
--- a/src/FOLP/simpdata.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/FOLP/simpdata.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -78,9 +78,9 @@
 
 val auto_ss = empty_ss setauto ares_tac [@{thm TrueI}];
 
-val IFOLP_ss = auto_ss addcongs FOLP_congs addrews IFOLP_rews;
+val IFOLP_ss = auto_ss addcongs FOLP_congs |> fold (addrew @{context}) IFOLP_rews;
 
 
 val FOLP_rews = IFOLP_rews @ @{thms cla_rews};
 
-val FOLP_ss = auto_ss addcongs FOLP_congs addrews FOLP_rews;
+val FOLP_ss = auto_ss addcongs FOLP_congs |> fold (addrew @{context}) FOLP_rews;
--- a/src/HOL/Decision_Procs/approximation.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Decision_Procs/approximation.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -78,9 +78,9 @@
                |> HOLogic.mk_list @{typ nat}
                |> Thm.cterm_of ctxt
      in
-       (rtac (Thm.instantiate ([], [(@{cpat "?n::nat"}, n),
-                                   (@{cpat "?prec::nat"}, p),
-                                   (@{cpat "?ss::nat list"}, s)])
+       (rtac (Thm.instantiate ([], [((("n", 0), @{typ nat}), n),
+                                   ((("prec", 0), @{typ nat}), p),
+                                   ((("ss", 0), @{typ "nat list"}), s)])
             @{thm "approx_form"}) i
         THEN simp_tac (put_simpset (simpset_of @{context}) ctxt) i) st
      end
@@ -95,9 +95,9 @@
        val s = vs |> map lookup_splitting |> hd
             |> Thm.cterm_of ctxt
      in
-       rtac (Thm.instantiate ([], [(@{cpat "?s::nat"}, s),
-                                   (@{cpat "?t::nat"}, t),
-                                   (@{cpat "?prec::nat"}, p)])
+       rtac (Thm.instantiate ([], [((("s", 0), @{typ nat}), s),
+                                   ((("t", 0), @{typ nat}), t),
+                                   ((("prec", 0), @{typ nat}), p)])
             @{thm "approx_tse_form"}) i st
      end
   end
--- a/src/HOL/Decision_Procs/ferrante_rackoff.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Decision_Procs/ferrante_rackoff.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -58,6 +58,7 @@
    funpow 2 (Thm.dest_arg o snd o Thm.dest_abs NONE)
      (funpow 4 Thm.dest_arg (Thm.cprop_of (hd minf)))
    |> Thm.dest_binop |> apply2 Thm.dest_binop |> apfst (apply2 Thm.dest_fun)
+   |> apply2 (apply2 (dest_Var o Thm.term_of))
 
  fun myfwd (th1, th2, th3, th4, th5) p1 p2
       [(th_1,th_2,th_3,th_4,th_5), (th_1',th_2',th_3',th_4',th_5')] =
@@ -73,7 +74,7 @@
   in (fw true th1 th_1 th_1', fw false th2 th_2 th_2',
       fw true th3 th_3 th_3', fw false th4 th_4 th_4', fw true th5 th_5 th_5')
   end
- val U_v = (Thm.dest_arg o Thm.dest_arg o Thm.dest_arg1) (Thm.cprop_of qe)
+ val U_v = dest_Var (Thm.term_of (Thm.dest_arg (Thm.dest_arg (Thm.dest_arg1 (Thm.cprop_of qe)))))
  fun main vs p =
   let
    val ((xn,ce),(x,fm)) = (case Thm.term_of p of
--- a/src/HOL/Eisbach/eisbach_rule_insts.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Eisbach/eisbach_rule_insts.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -9,14 +9,13 @@
 Drule.cterm_instantiate.
 *)
 
-structure Eisbach_Rule_Insts : sig end =
+structure Eisbach_Rule_Insts: sig end =
 struct
 
 fun restore_tags thm = Thm.map_tags (K (Thm.get_tags thm));
 
-fun add_thm_insts thm =
+fun add_thm_insts ctxt thm =
   let
-    val thy = Thm.theory_of_thm thm;
     val tyvars = Thm.fold_terms Term.add_tvars thm [];
     val tyvars' = tyvars |> map (Logic.mk_term o Logic.mk_type o TVar);
 
@@ -24,7 +23,7 @@
     val tvars' = tvars  |> map (Logic.mk_term o Var);
 
     val conj =
-      Logic.mk_conjunction_list (tyvars' @ tvars') |> Thm.global_cterm_of thy |> Drule.mk_term;
+      Logic.mk_conjunction_list (tyvars' @ tvars') |> Thm.cterm_of ctxt |> Drule.mk_term;
   in
     ((tyvars, tvars), Conjunction.intr thm conj)
   end;
@@ -48,19 +47,17 @@
     (thm', insts')
   end;
 
-fun instantiate_xis insts thm =
+fun instantiate_xis ctxt insts thm =
   let
     val tyvars = Thm.fold_terms Term.add_tvars thm [];
     val tvars = Thm.fold_terms Term.add_vars thm [];
-    val cert = Thm.global_cterm_of (Thm.theory_of_thm thm);
-    val certT = Thm.global_ctyp_of (Thm.theory_of_thm thm);
 
     fun add_inst (xi, t) (Ts, ts) =
       (case AList.lookup (op =) tyvars xi of
-        SOME S => ((certT (TVar (xi, S)), certT (Logic.dest_type t)) :: Ts, ts)
+        SOME S => (((xi, S), Thm.ctyp_of ctxt (Logic.dest_type t)) :: Ts, ts)
       | NONE =>
           (case AList.lookup (op =) tvars xi of
-            SOME T => (Ts, (cert (Var (xi, T)), cert t) :: ts)
+            SOME T => (Ts, (Thm.cterm_of ctxt (Var (xi, T)), Thm.cterm_of ctxt t) :: ts)
           | NONE => error "indexname not found in thm"));
 
     val (cTinsts, cinsts) = fold add_inst insts ([], []);
@@ -187,7 +184,7 @@
       let
         val insts' = map (fn ((v, t), _) => ((v, Position.none), t)) insts;
 
-        val (thm_insts, thm') = add_thm_insts thm
+        val (thm_insts, thm') = add_thm_insts ctxt thm;
         val (thm'', thm_insts') =
           Rule_Insts.where_rule ctxt insts' fixes thm'
           |> get_thm_insts;
@@ -215,8 +212,9 @@
         val (ts', ctxt'') = Variable.import_terms false ts ctxt';
         val ts'' = Variable.export_terms ctxt'' ctxt ts';
         val insts' = ListPair.zip (xis, ts'');
-      in instantiate_xis insts' thm end
-  | read_instantiate_closed _ (Term_Insts insts) thm = instantiate_xis insts thm;
+      in instantiate_xis ctxt insts' thm end
+  | read_instantiate_closed ctxt (Term_Insts insts) thm =
+      instantiate_xis ctxt insts thm;
 
 val _ =
   Theory.setup
--- a/src/HOL/Eisbach/match_method.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Eisbach/match_method.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -440,10 +440,9 @@
 
 val focus_schematics = #2 o Focus_Data.get;
 
-fun add_focus_schematics cterms =
+fun add_focus_schematics schematics =
   (Focus_Data.map o @{apply 3(2)})
-    (fold (fn (Var (xi, T), t) => Vartab.update_new (xi, (T, t)))
-      (map (apply2 Thm.term_of) cterms));
+    (fold (fn ((xi, T), ct) => Vartab.update_new (xi, (T, Thm.term_of ct))) schematics);
 
 
 (* focus params *)
--- a/src/HOL/Import/import_rule.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Import/import_rule.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -157,15 +157,12 @@
     meta_mp i th4
   end
 
-fun freezeT thm =
+fun freezeT thy thm =
   let
     val tvars = Term.add_tvars (Thm.prop_of thm) []
     val tfrees = map (fn ((t, _), s) => TFree (t, s)) tvars
-    val tvars = map TVar tvars
-    val thy = Thm.theory_of_thm thm
-    fun inst ty = Thm.global_ctyp_of thy ty
   in
-    Thm.instantiate ((map inst tvars ~~ map inst tfrees), []) thm
+    Thm.instantiate ((tvars ~~ map (Thm.global_ctyp_of thy) tfrees), []) thm
   end
 
 fun def' constname rhs thy =
@@ -177,7 +174,7 @@
     val eq = Logic.mk_equals (Const (Sign.full_name thy1 constbinding, typ), rhs)
     val (thms, thy2) = Global_Theory.add_defs false
       [((Binding.suffix_name "_hldef" constbinding, eq), [])] thy1
-    val def_thm = freezeT (hd thms)
+    val def_thm = freezeT thy1 (hd thms)
   in
     (meta_eq_to_obj_eq def_thm, thy2)
   end
@@ -225,7 +222,7 @@
      Typedef.add_typedef_global false (Binding.name tycname, map (rpair dummyS) tnames, NoSyn) c
        (SOME (Binding.name rep_name, Binding.name abs_name)) (fn _ => rtac th2 1) thy
     val aty = #abs_type (#1 typedef_info)
-    val th = freezeT (#type_definition (#2 typedef_info))
+    val th = freezeT thy' (#type_definition (#2 typedef_info))
     val (th_s, _) = Thm.dest_comb (Thm.dest_arg (Thm.cprop_of th))
     val (th_s, abst) = Thm.dest_comb th_s
     val rept = Thm.dest_arg th_s
@@ -264,11 +261,12 @@
     val tys_before = Term.add_tfrees (Thm.prop_of th) []
     val th1 = Thm.varifyT_global th
     val tys_after = Term.add_tvars (Thm.prop_of th1) []
-    val tyinst = map2 (fn bef => fn iS =>
-       (case try (assoc (TFree bef)) lambda of
-              SOME cty => (Thm.global_ctyp_of thy (TVar iS), cty)
-            | NONE => (Thm.global_ctyp_of thy (TVar iS), Thm.global_ctyp_of thy (TFree bef))
-       )) tys_before tys_after
+    val tyinst =
+      map2 (fn bef => fn iS =>
+        (case try (assoc (TFree bef)) lambda of
+          SOME cty => (iS, cty)
+        | NONE => (iS, Thm.global_ctyp_of thy (TFree bef))))
+      tys_before tys_after
   in
     Thm.instantiate (tyinst,[]) th1
   end
@@ -334,9 +332,7 @@
     val tns = map (fn (_, _) => "'") tvs
     val nms = fst (fold_map Name.variant tns (Variable.names_of ctxt))
     val vs = map TVar ((nms ~~ (map (snd o fst) tvs)) ~~ (map snd tvs))
-    val cvs = map (Thm.ctyp_of ctxt) vs
-    val ctvs = map (Thm.ctyp_of ctxt) (map TVar tvs)
-    val thm' = Thm.instantiate ((ctvs ~~ cvs), []) thm
+    val thm' = Thm.instantiate ((tvs ~~ map (Thm.ctyp_of ctxt) vs), []) thm
   in
     snd (Global_Theory.add_thm ((binding, thm'), []) thy)
   end
@@ -372,7 +368,7 @@
       | process (thy, state) (#"M", [s]) =
           let
             val ctxt = Variable.set_body false (Proof_Context.init_global thy)
-            val thm = freezeT (Global_Theory.get_thm thy s)
+            val thm = freezeT thy (Global_Theory.get_thm thy s)
             val ((_, [th']), _) = Variable.import true [thm] ctxt
           in
             setth th' (thy, state)
--- a/src/HOL/Library/Old_SMT/old_smt_normalize.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/Old_SMT/old_smt_normalize.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -34,7 +34,7 @@
 
   fun inst f ct thm =
     let val cv = f (Drule.strip_imp_concl (Thm.cprop_of thm))
-    in Thm.instantiate ([], [(cv, ct)]) thm end
+    in Thm.instantiate ([], [(dest_Var (Thm.term_of cv), ct)]) thm end
 in
 
 fun instantiate_elim thm =
@@ -215,7 +215,7 @@
   fun insert_trigger_conv [] ct = Conv.all_conv ct
     | insert_trigger_conv ctss ct =
         let val (ctr, cp) = Thm.dest_binop (Thm.rhs_of trigger_eq) ||> rpair ct
-        in Thm.instantiate ([], [cp, (ctr, mk_trigger ctss)]) trigger_eq end
+        in Thm.instantiate ([], map (apfst (dest_Var o Thm.term_of)) [cp, (ctr, mk_trigger ctss)]) trigger_eq end
 
   fun infer_trigger_eq_conv outer_ctxt (ctxt, cvs) ct =
     let
@@ -298,7 +298,8 @@
   fun mk_weight_eq w =
     let val cv = Thm.dest_arg1 (Thm.rhs_of weight_eq)
     in
-      Thm.instantiate ([], [(cv, Numeral.mk_cnumber @{ctyp int} w)]) weight_eq
+      Thm.instantiate ([], [(dest_Var (Thm.term_of cv), Numeral.mk_cnumber @{ctyp int} w)])
+        weight_eq
     end
 
   fun add_weight_conv NONE _ = Conv.all_conv
--- a/src/HOL/Library/Old_SMT/old_smt_utils.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/Old_SMT/old_smt_utils.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -152,7 +152,7 @@
 val destT1 = hd o Thm.dest_ctyp
 val destT2 = hd o tl o Thm.dest_ctyp
 
-fun instTs cUs (cTs, ct) = Thm.instantiate_cterm (cTs ~~ cUs, []) ct
+fun instTs cUs (cTs, ct) = Thm.instantiate_cterm (map (dest_TVar o Thm.typ_of) cTs ~~ cUs, []) ct
 fun instT cU (cT, ct) = instTs [cU] ([cT], ct)
 fun instT' ct = instT (Thm.ctyp_of_cterm ct)
 
--- a/src/HOL/Library/Old_SMT/old_z3_proof_literals.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/Old_SMT/old_z3_proof_literals.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -195,7 +195,9 @@
   fun on_cprop f thm = f (Thm.cprop_of thm)
   fun precomp2 f g thm = (on_cprem 1 f thm, on_cprem 2 g thm, f, g, thm)
   fun comp2 (cv1, cv2, f, g, rule) thm1 thm2 =
-    Thm.instantiate ([], [(cv1, on_cprop f thm1), (cv2, on_cprop g thm2)]) rule
+    Thm.instantiate ([],
+      [(dest_Var (Thm.term_of cv1), on_cprop f thm1),
+       (dest_Var (Thm.term_of cv2), on_cprop g thm2)]) rule
     |> Old_Z3_Proof_Tools.discharge thm1 |> Old_Z3_Proof_Tools.discharge thm2
 
   fun d1 ct = Thm.dest_arg ct and d2 ct = Thm.dest_arg (Thm.dest_arg ct)
@@ -303,7 +305,7 @@
           |> (fn (nlit, plit) => nlit COMP (plit COMP contra_rule)))
     end
 
-  val falseE_v = Thm.dest_arg (Thm.dest_arg (Thm.cprop_of @{thm FalseE}))
+  val falseE_v = dest_Var (Thm.term_of (Thm.dest_arg (Thm.dest_arg (Thm.cprop_of @{thm FalseE}))))
   fun contra_right ct = Thm.instantiate ([], [(falseE_v, ct)]) @{thm FalseE}
 in
 fun contradict conj ct =
--- a/src/HOL/Library/Old_SMT/old_z3_proof_parser.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/Old_SMT/old_z3_proof_parser.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -109,7 +109,7 @@
     val max = fold (Integer.max o fst) vars 0
     val ns = fst (Variable.variant_fixes (replicate (max + 1) var_prefix) ctxt)
     fun mk (i, v) =
-      (v, Thm.cterm_of ctxt (Free (nth ns i, Thm.typ_of_cterm v)))
+      (dest_Var (Thm.term_of v), Thm.cterm_of ctxt (Free (nth ns i, Thm.typ_of_cterm v)))
   in map mk vars end
 
 fun close ctxt (ct, vars) =
@@ -161,7 +161,10 @@
         if null vars then ([], vars)
         else fold part vars ([], [])
 
-    in (Thm.instantiate_cterm ([], inst) ct, (maxidx', vars' @ all_vars)) end
+    in
+      (Thm.instantiate_cterm ([], map (apfst (dest_Var o Thm.term_of)) inst) ct,
+        (maxidx', vars' @ all_vars))
+    end
 in
 fun mk_fun f ts =
   let val (cts, (_, vars)) = fold_map prep ts (0, [])
--- a/src/HOL/Library/Old_SMT/old_z3_proof_reconstruction.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/Old_SMT/old_z3_proof_reconstruction.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -625,7 +625,10 @@
       val vs = frees_of ctxt (Thm.term_of ct)
       val (thm, ctxt') = f (fold_rev mk_forall vs ct) ctxt
       val vars_of = get_vars Term.add_vars Var (K true) ctxt'
-    in (Thm.instantiate ([], vars_of (Thm.prop_of thm) ~~ vs) thm, ctxt') end
+    in
+      (Thm.instantiate ([], map (dest_Var o Thm.term_of) (vars_of (Thm.prop_of thm)) ~~ vs) thm,
+        ctxt')
+    end
 
   val sk_rules = @{lemma
     "c = (SOME x. P x) ==> (EX x. P x) = P c"
--- a/src/HOL/Library/Old_SMT/old_z3_proof_tools.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/Old_SMT/old_z3_proof_tools.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -27,7 +27,7 @@
     (Proof.context -> cterm -> thm) -> cterm -> thm
 
   (*a faster COMP*)
-  type compose_data
+  type compose_data = cterm list * (cterm -> cterm list) * thm
   val precompose: (cterm -> cterm list) -> thm -> compose_data
   val precompose2: (cterm -> cterm * cterm) -> thm -> compose_data
   val compose: compose_data -> thm -> thm
@@ -257,11 +257,11 @@
 
 fun list2 (x, y) = [x, y]
 
-fun precompose f rule = (f (Thm.cprem_of rule 1), f, rule)
-fun precompose2 f rule = precompose (list2 o f) rule
+fun precompose f rule : compose_data = (f (Thm.cprem_of rule 1), f, rule)
+fun precompose2 f rule : compose_data = precompose (list2 o f) rule
 
 fun compose (cvs, f, rule) thm =
-  discharge thm (Thm.instantiate ([], cvs ~~ f (Thm.cprop_of thm)) rule)
+  discharge thm (Thm.instantiate ([], map (dest_Var o Thm.term_of) cvs ~~ f (Thm.cprop_of thm)) rule)
 
 
 
--- a/src/HOL/Library/cconv.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/cconv.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -182,7 +182,7 @@
           ((if n = 1 then fun_cconv else I) o arg_cconv) (prems_cconv (n-1) cv) ct
       | _ =>  cv ct)
 
-fun inst_imp_cong ct = Thm.instantiate ([], [(@{cpat "?A :: prop"}, ct)]) Drule.imp_cong
+fun inst_imp_cong ct = Thm.instantiate ([], [((("A", 0), propT), ct)]) Drule.imp_cong
 
 (*rewrite B in A1 \<Longrightarrow> ... \<Longrightarrow> An \<Longrightarrow> B*)
 fun concl_cconv 0 cv ct = cv ct
@@ -202,10 +202,10 @@
             NONE => (As, B)
           | SOME (A,B) => strip_prems (i - 1) (A::As) B
     val (prem, (prems, concl)) = ct |> Thm.dest_implies ||> strip_prems n [] 
-    val rewr_imp_concl = Thm.instantiate ([], [(@{cpat "?C :: prop"}, concl)]) @{thm rewr_imp}
+    val rewr_imp_concl = Thm.instantiate ([], [((("C", 0), propT), concl)]) @{thm rewr_imp}
     val th1 = cv prem RS rewr_imp_concl
     val nprems = Thm.nprems_of th1
-    fun inst_cut_rl ct = Thm.instantiate ([], [(@{cpat "?psi :: prop"}, ct)]) cut_rl
+    fun inst_cut_rl ct = Thm.instantiate ([], [((("psi", 0), propT), ct)]) cut_rl
     fun f p th = (th RS inst_cut_rl p)
       |> Conv.fconv_rule (Conv.concl_conv nprems (Conv.rewr_conv @{thm imp_cong_eq}))
   in fold f prems th1 end
--- a/src/HOL/Library/old_recdef.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/old_recdef.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -304,15 +304,15 @@
             (Pv, Dv,
              Sign.typ_match thy (Dty, ty) Vartab.empty)
           | _ => error "not a valid case thm";
-      val type_cinsts = map (fn (ixn, (S, T)) => apply2 (Thm.ctyp_of ctxt) (TVar (ixn, S), T))
+      val type_cinsts = map (fn (ixn, (S, T)) => ((ixn, S), Thm.ctyp_of ctxt T))
         (Vartab.dest type_insts);
-      val cPv = Thm.cterm_of ctxt (Envir.subst_term_types type_insts Pv);
-      val cDv = Thm.cterm_of ctxt (Envir.subst_term_types type_insts Dv);
+      val Pv = dest_Var (Envir.subst_term_types type_insts Pv);
+      val Dv = dest_Var (Envir.subst_term_types type_insts Dv);
     in
       Conv.fconv_rule Drule.beta_eta_conversion
          (case_thm
             |> Thm.instantiate (type_cinsts, [])
-            |> Thm.instantiate ([], [(cPv, abs_ct), (cDv, free_ct)]))
+            |> Thm.instantiate ([], [(Pv, abs_ct), (Dv, free_ct)]))
     end;
 
 
@@ -1124,11 +1124,11 @@
 local (* this is fragile *)
   val prop = Thm.prop_of spec
   val x = hd (tl (Misc_Legacy.term_vars prop))
-  val cTV = Thm.ctyp_of @{context} (type_of x)
+  val TV = dest_TVar (type_of x)
   val gspec = Thm.forall_intr (Thm.cterm_of @{context} x) spec
 in
 fun SPEC tm thm =
-   let val gspec' = Drule.instantiate_normalize ([(cTV, Thm.ctyp_of_cterm tm)], []) gspec
+   let val gspec' = Drule.instantiate_normalize ([(TV, Thm.ctyp_of_cterm tm)], []) gspec
    in thm RS (Thm.forall_elim tm gspec') end
 end;
 
@@ -1141,11 +1141,11 @@
 local
   val prop = Thm.prop_of allI
   val [P] = Misc_Legacy.add_term_vars (prop, [])
-  fun cty_theta ctxt = map (fn (i, (S, ty)) => apply2 (Thm.ctyp_of ctxt) (TVar (i, S), ty))
+  fun cty_theta ctxt = map (fn (i, (S, ty)) => ((i, S), Thm.ctyp_of ctxt ty))
   fun ctm_theta ctxt =
     map (fn (i, (_, tm2)) =>
       let val ctm2 = Thm.cterm_of ctxt tm2
-      in (Thm.cterm_of ctxt (Var (i, Thm.typ_of_cterm ctm2)), ctm2) end)
+      in ((i, Thm.typ_of_cterm ctm2), ctm2) end)
   fun certify ctxt (ty_theta,tm_theta) =
     (cty_theta ctxt (Vartab.dest ty_theta),
      ctm_theta ctxt (Vartab.dest tm_theta))
--- a/src/HOL/Library/positivstellensatz.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/positivstellensatz.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -483,7 +483,7 @@
     fun real_not_eq_conv ct =
       let
         val (l,r) = dest_eq (Thm.dest_arg ct)
-        val th = Thm.instantiate ([],[(@{cpat "?x::real"},l),(@{cpat "?y::real"},r)]) neq_th
+        val th = Thm.instantiate ([],[((("x", 0), @{typ real}),l),((("y", 0), @{typ real}),r)]) neq_th
         val th_p = poly_conv(Thm.dest_arg(Thm.dest_arg1(Thm.rhs_of th)))
         val th_x = Drule.arg_cong_rule @{cterm "uminus :: real => _"} th_p
         val th_n = fconv_rule (arg_conv poly_neg_conv) th_x
@@ -729,9 +729,9 @@
       val pth_max = instantiate' [SOME @{ctyp real}] [] max_split
       val pth_min = instantiate' [SOME @{ctyp real}] [] min_split
       val abs_tm = @{cterm "abs :: real => _"}
-      val p_tm = @{cpat "?P :: real => bool"}
-      val x_tm = @{cpat "?x :: real"}
-      val y_tm = @{cpat "?y::real"}
+      val p_v = (("P", 0), @{typ "real \<Rightarrow> bool"})
+      val x_v = (("x", 0), @{typ real})
+      val y_v = (("y", 0), @{typ real})
       val is_max = is_binop @{cterm "max :: real => _"}
       val is_min = is_binop @{cterm "min :: real => _"}
       fun is_abs t = is_comb t andalso Thm.dest_fun t aconvc abs_tm
@@ -746,16 +746,16 @@
 
       val elim_abs = eliminate_construct is_abs
         (fn p => fn ax =>
-          Thm.instantiate ([], [(p_tm,p), (x_tm, Thm.dest_arg ax)]) pth_abs)
+          Thm.instantiate ([], [(p_v,p), (x_v, Thm.dest_arg ax)]) pth_abs)
       val elim_max = eliminate_construct is_max
         (fn p => fn ax =>
           let val (ax,y) = Thm.dest_comb ax
-          in Thm.instantiate ([], [(p_tm,p), (x_tm, Thm.dest_arg ax), (y_tm,y)])
+          in Thm.instantiate ([], [(p_v,p), (x_v, Thm.dest_arg ax), (y_v,y)])
                              pth_max end)
       val elim_min = eliminate_construct is_min
         (fn p => fn ax =>
           let val (ax,y) = Thm.dest_comb ax
-          in Thm.instantiate ([], [(p_tm,p), (x_tm, Thm.dest_arg ax), (y_tm,y)])
+          in Thm.instantiate ([], [(p_v,p), (x_v, Thm.dest_arg ax), (y_v,y)])
                              pth_min end)
     in first_conv [elim_abs, elim_max, elim_min, all_conv]
     end;
--- a/src/HOL/Library/rewrite.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Library/rewrite.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -265,15 +265,13 @@
   let
     fun instantiate_normalize_env ctxt env thm =
       let
-        fun certs f = map (apply2 (f ctxt))
         val prop = Thm.prop_of thm
         val norm_type = Envir.norm_type o Envir.type_env
         val insts = Term.add_vars prop []
-          |> map (fn x as (s,T) => (Var (s, norm_type env T), Envir.norm_term env (Var x)))
-          |> certs Thm.cterm_of
+          |> map (fn x as (s, T) =>
+              ((s, norm_type env T), Thm.cterm_of ctxt (Envir.norm_term env (Var x))))
         val tyinsts = Term.add_tvars prop []
-          |> map (fn x => (TVar x, norm_type env (TVar x)))
-          |> certs Thm.ctyp_of
+          |> map (fn x => (x, Thm.ctyp_of ctxt (norm_type env (TVar x))))
       in Drule.instantiate_normalize (tyinsts, insts) thm end
     
     fun unify_with_rhs context to env thm =
--- a/src/HOL/Matrix_LP/Compute_Oracle/linker.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Matrix_LP/Compute_Oracle/linker.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -315,8 +315,8 @@
     end
 
 fun conv_subst thy (subst : Type.tyenv) =
-    map (fn (iname, (sort, ty)) => (Thm.global_ctyp_of thy (TVar (iname, sort)), Thm.global_ctyp_of thy ty))
-      (Vartab.dest subst)
+  map (fn (iname, (sort, ty)) => ((iname, sort), Thm.global_ctyp_of thy ty))
+    (Vartab.dest subst)
 
 fun add_monos thy monocs pats ths =
     let
--- a/src/HOL/Multivariate_Analysis/normarith.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Multivariate_Analysis/normarith.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -258,8 +258,9 @@
 
 local
  val pth_zero = @{thm norm_zero}
- val tv_n = (Thm.ctyp_of_cterm o Thm.dest_arg o Thm.dest_arg1 o Thm.dest_arg o Thm.cprop_of)
-             pth_zero
+ val tv_n =
+  (dest_TVar o Thm.typ_of_cterm o Thm.dest_arg o Thm.dest_arg1 o Thm.dest_arg o Thm.cprop_of)
+    pth_zero
  val concl = Thm.dest_arg o Thm.cprop_of
  fun real_vector_combo_prover ctxt translator (nubs,ges,gts) =
   let
@@ -356,7 +357,7 @@
   val shs = filter (member (fn (t,th) => t aconvc Thm.cprop_of th) asl) (#hyps (Thm.crep_thm th1))
   val th11 = hd (Variable.export ctxt' ctxt [fold Thm.implies_intr shs th1])
   val cps = map (swap o Thm.dest_equals) (cprems_of th11)
-  val th12 = Drule.instantiate_normalize ([], cps) th11
+  val th12 = Drule.instantiate_normalize ([], map (apfst (dest_Var o Thm.term_of)) cps) th11
   val th13 = fold Thm.elim_implies (map (Thm.reflexive o snd) cps) th12;
  in hd (Variable.export ctxt' ctxt [th13])
  end
--- a/src/HOL/Mutabelle/mutabelle_extra.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Mutabelle/mutabelle_extra.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -332,7 +332,7 @@
 
 fun thms_of all thy =
   filter
-    (fn th => (all orelse Context.theory_name (Thm.theory_of_thm th) = Context.theory_name thy)
+    (fn th => (all orelse Thm.theory_name_of_thm th = Context.theory_name thy)
       (* andalso is_executable_thm thy th *))
     (map snd (filter_out is_forbidden_theorem (Global_Theory.all_thms_of thy false)))
 
--- a/src/HOL/Nitpick_Examples/Mono_Nits.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Nitpick_Examples/Mono_Nits.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -153,7 +153,7 @@
 fun theorems_of thy =
   filter (fn (name, th) =>
              not (is_forbidden_theorem name) andalso
-             (Thm.theory_of_thm th, thy) |> apply2 Context.theory_name |> op =)
+             Thm.theory_name_of_thm th = Context.theory_name thy)
          (Global_Theory.all_thms_of thy true)
 
 fun check_formulas tsp =
--- a/src/HOL/Nominal/nominal_datatype.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Nominal/nominal_datatype.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -1390,13 +1390,13 @@
       end);
 
     val induct_aux' = Thm.instantiate ([],
-      map (fn (s, v as Var (_, T)) =>
-        (Thm.global_cterm_of thy9 v, Thm.global_cterm_of thy9 (Free (s, T))))
+      map (fn (s, Var (v as (_, T))) =>
+        (v, Thm.global_cterm_of thy9 (Free (s, T))))
           (pnames ~~ map head_of (HOLogic.dest_conj
              (HOLogic.dest_Trueprop (Thm.concl_of induct_aux)))) @
       map (fn (_, f) =>
         let val f' = Logic.varify_global f
-        in (Thm.global_cterm_of thy9 f',
+        in (dest_Var f',
           Thm.global_cterm_of thy9 (Const (@{const_name Nominal.supp}, fastype_of f')))
         end) fresh_fs) induct_aux;
 
@@ -1562,7 +1562,7 @@
             (augment_sort thy1 pt_cp_sort
               (Logic.mk_implies (HOLogic.mk_Trueprop Q, HOLogic.mk_Trueprop P)))
             (fn {context = ctxt, ...} => dtac (Thm.instantiate ([],
-                 [(Thm.global_cterm_of thy11 (Var (("pi", 0), permT)),
+                 [((("pi", 0), permT),
                    Thm.global_cterm_of thy11 (Const (@{const_name rev}, permT --> permT) $ pi))]) th) 1 THEN
                NominalPermeq.perm_simp_tac (put_simpset HOL_ss ctxt) 1)) (ps ~~ ths)
       in (ths, ths') end) dt_atomTs);
@@ -1653,7 +1653,7 @@
                     SUBPROOF (fn {prems = prems', params = [(_, a), (_, b)], ...} => EVERY
                       [cut_facts_tac [rec_prem] 1,
                        rtac (Thm.instantiate ([],
-                         [(Thm.global_cterm_of thy11 (Var (("pi", 0), mk_permT aT)),
+                         [((("pi", 0), mk_permT aT),
                            Thm.global_cterm_of thy11
                             (perm_of_pair (Thm.term_of a, Thm.term_of b)))]) eqvt_th) 1,
                        asm_simp_tac (put_simpset HOL_ss context addsimps
@@ -1872,8 +1872,7 @@
                             val l = find_index (equal T) dt_atomTs;
                             val th = nth (nth rec_equiv_thms' l) k;
                             val th' = Thm.instantiate ([],
-                              [(Thm.global_cterm_of thy11 (Var (("pi", 0), U)),
-                                Thm.global_cterm_of thy11 p)]) th;
+                              [((("pi", 0), U), Thm.global_cterm_of thy11 p)]) th;
                           in rtac th' 1 end;
                         val th' = Goal.prove context'' [] []
                           (HOLogic.mk_Trueprop (S $ mk_pi x $ mk_pi y))
--- a/src/HOL/Nominal/nominal_fresh_fun.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Nominal/nominal_fresh_fun.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -31,7 +31,10 @@
 (* FIXME proper SUBGOAL/CSUBGOAL instead of cprems_of etc. *)
 
 fun res_inst_tac_term ctxt =
-  gen_res_inst_tac_term ctxt (curry Thm.instantiate);
+  gen_res_inst_tac_term ctxt (fn instT => fn inst =>
+    Thm.instantiate
+     (map (apfst (dest_TVar o Thm.typ_of)) instT,
+      map (apfst (dest_Var o Thm.term_of)) inst));
 
 fun res_inst_tac_term' ctxt =
   gen_res_inst_tac_term ctxt (K Drule.cterm_instantiate) [];
@@ -146,10 +149,10 @@
     fun inst_fresh vars params i st =
    let val vars' = Misc_Legacy.term_vars (Thm.prop_of st);
    in case subtract (op =) vars vars' of
-     [x] =>
+     [Var v] =>
       Seq.single
         (Thm.instantiate ([],
-          [apply2 (Thm.cterm_of ctxt) (x, fold_rev Term.abs params (Bound 0))]) st)
+          [(v, Thm.cterm_of ctxt (fold_rev Term.abs params (Bound 0)))]) st)
     | _ => error "fresh_fun_simp: Too many variables, please report."
    end
   in
--- a/src/HOL/Nominal/nominal_inductive.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Nominal/nominal_inductive.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -338,7 +338,8 @@
                  val pis'' = fold (concat_perm #> map) pis' pis;
                  val env = Pattern.first_order_match thy (ihypt, Thm.prop_of ihyp)
                    (Vartab.empty, Vartab.empty);
-                 val ihyp' = Thm.instantiate ([], map (apply2 (Thm.global_cterm_of thy))
+                 val ihyp' = Thm.instantiate ([],
+                   map (fn (v, t) => (dest_Var v, Thm.global_cterm_of thy t))
                    (map (Envir.subst_term env) vs ~~
                     map (fold_rev (NominalDatatype.mk_perm [])
                       (rev pis' @ pis)) params' @ [z])) ihyp;
--- a/src/HOL/Nominal/nominal_inductive2.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Nominal/nominal_inductive2.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -146,9 +146,7 @@
 fun inst_params thy (vs, p) th cts =
   let val env = Pattern.first_order_match thy (p, Thm.prop_of th)
     (Vartab.empty, Vartab.empty)
-  in Thm.instantiate ([],
-    map (Envir.subst_term env #> Thm.global_cterm_of thy) vs ~~ cts) th
-  end;
+  in Thm.instantiate ([], map (dest_Var o Envir.subst_term env) vs ~~ cts) th end;
 
 fun prove_strong_ind s alt_name avoids ctxt =
   let
--- a/src/HOL/Statespace/distinct_tree_prover.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Statespace/distinct_tree_prover.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -18,7 +18,7 @@
   val subtractProver : Proof.context -> term -> cterm -> thm -> thm
   val distinct_simproc : string list -> simproc
 
-  val discharge : thm list -> thm -> thm
+  val discharge : Proof.context -> thm list -> thm -> thm
 end;
 
 structure DistinctTreeProver : DISTINCT_TREE_PROVER =
@@ -95,7 +95,11 @@
     fun mapT_and_recertify ct =
       (Thm.cterm_of ctxt (Term.map_types (Term.map_type_tvar substT) (Thm.term_of ct)));
     val insts' = map (apfst mapT_and_recertify) insts;
-  in Thm.instantiate (instTs, insts') end;
+  in
+    Thm.instantiate
+     (map (apfst (dest_TVar o Thm.typ_of)) instTs,
+      map (apfst (dest_Var o Thm.term_of)) insts')
+  end;
 
 fun tvar_clash ixn S S' =
   raise TYPE ("Type variable has two distinct sorts", [TVar (ixn, S), TVar (ixn, S')], []);
@@ -140,18 +144,14 @@
   in mtch env (t, ct) end;
 
 
-fun discharge prems rule =
+fun discharge ctxt prems rule =
   let
-    val thy = Thm.theory_of_thm (hd prems);
     val (tyinsts,insts) =
       fold naive_cterm_first_order_match (Thm.prems_of rule ~~ map Thm.cprop_of prems) ([], []);
-
     val tyinsts' =
-      map (fn (v, (S, U)) =>
-        (Thm.global_ctyp_of thy (TVar (v, S)), Thm.global_ctyp_of thy U)) tyinsts;
+      map (fn (v, (S, U)) => ((v, S), Thm.ctyp_of ctxt U)) tyinsts;
     val insts' =
-      map (fn (idxn, ct) =>
-        (Thm.global_cterm_of thy (Var (idxn, Thm.typ_of_cterm ct)), ct)) insts;
+      map (fn (idxn, ct) => ((idxn, Thm.typ_of_cterm ct), ct)) insts;
     val rule' = Thm.instantiate (tyinsts', insts') rule;
   in fold Thm.elim_implies prems rule' end;
 
@@ -203,7 +203,7 @@
          let
            val rule =
             (case p of Left => @{thm all_distinct_left} | Right => @{thm all_distinct_right})
-         in dist_subtree ps (discharge [thm] rule) end;
+         in dist_subtree ps (discharge ctxt [thm] rule) end;
 
     val (ps, x_rest, y_rest) = split_common_prefix x_path y_path;
     val dist_subtree_thm = dist_subtree ps dist_thm;
@@ -227,7 +227,7 @@
                 instantiate ctxt
                   [(Thm.ctyp_of_cterm x_in_set_left, xT)]
                   [(x_in_set_left, x), (r_in_set_left, r)] @{thm in_set_left};
-            in discharge [in_set_l] in_set_left' end
+            in discharge ctxt [in_set_l] in_set_left' end
         | Right :: ps' =>
             let
               val in_set_r = in_set ps' r;
@@ -235,15 +235,17 @@
                 instantiate ctxt
                   [(Thm.ctyp_of_cterm x_in_set_right, xT)]
                   [(x_in_set_right, x), (l_in_set_right, l)] @{thm in_set_right};
-            in discharge [in_set_r] in_set_right' end)
+            in discharge ctxt [in_set_r] in_set_right' end)
       end;
 
   fun in_set' [] = raise TERM ("distinctTreeProver", [])
     | in_set' (Left :: ps) = in_set ps l
     | in_set' (Right :: ps) = in_set ps r;
 
-  fun distinct_lr node_in_set Left = discharge [dist_subtree_thm, node_in_set] @{thm distinct_left}
-    | distinct_lr node_in_set Right = discharge [dist_subtree_thm, node_in_set] @{thm distinct_right}
+  fun distinct_lr node_in_set Left =
+        discharge ctxt [dist_subtree_thm, node_in_set] @{thm distinct_left}
+    | distinct_lr node_in_set Right =
+        discharge ctxt [dist_subtree_thm, node_in_set] @{thm distinct_right}
 
   val (swap, neq) =
     (case x_rest of
@@ -262,22 +264,24 @@
             in
               (case xr of
                 Left =>
-                  (false, discharge [dist_subtree_thm, x_in_set, y_in_set] @{thm distinct_left_right})
+                  (false,
+                    discharge ctxt [dist_subtree_thm, x_in_set, y_in_set] @{thm distinct_left_right})
               | Right =>
-                  (true, discharge [dist_subtree_thm, y_in_set, x_in_set] @{thm distinct_left_right}))
+                  (true,
+                    discharge ctxt [dist_subtree_thm, y_in_set, x_in_set] @{thm distinct_left_right}))
            end));
-  in if swap then discharge [neq] @{thm swap_neq} else neq end;
+  in if swap then discharge ctxt [neq] @{thm swap_neq} else neq end;
 
 
-fun deleteProver dist_thm [] = @{thm delete_root} OF [dist_thm]
-  | deleteProver dist_thm (p::ps) =
+fun deleteProver _ dist_thm [] = @{thm delete_root} OF [dist_thm]
+  | deleteProver ctxt dist_thm (p::ps) =
       let
         val dist_rule =
           (case p of Left => @{thm all_distinct_left} | Right => @{thm all_distinct_right});
-        val dist_thm' = discharge [dist_thm] dist_rule;
+        val dist_thm' = discharge ctxt [dist_thm] dist_rule;
         val del_rule = (case p of Left => @{thm delete_left} | Right => @{thm delete_right});
-        val del = deleteProver dist_thm' ps;
-      in discharge [dist_thm, del] del_rule end;
+        val del = deleteProver ctxt dist_thm' ps;
+      in discharge ctxt [dist_thm, del] del_rule end;
 
 local
   val (alpha, v) =
@@ -286,7 +290,7 @@
         @{thm subtract_Tip} |> Thm.cprop_of |> Thm.dest_comb |> #2 |> Thm.dest_comb |> #2
         |> Thm.dest_comb |> #2;
       val [alpha] = ct |> Thm.ctyp_of_cterm |> Thm.dest_ctyp;
-    in (alpha, #1 (dest_Var (Thm.term_of ct))) end;
+    in (dest_TVar (Thm.typ_of alpha), #1 (dest_Var (Thm.term_of ct))) end;
 in
 
 fun subtractProver ctxt (Const (@{const_name Tip}, T)) ct dist_thm =
@@ -296,20 +300,20 @@
       in
         Thm.instantiate
           ([(alpha, Thm.ctyp_of ctxt alphaI)],
-           [(Thm.cterm_of ctxt (Var (v, treeT alphaI)), ct')]) @{thm subtract_Tip}
+           [((v, treeT alphaI), ct')]) @{thm subtract_Tip}
       end
   | subtractProver ctxt (Const (@{const_name Node}, nT) $ l $ x $ d $ r) ct dist_thm =
       let
         val ct' = dist_thm |> Thm.cprop_of |> Thm.dest_comb |> #2 |> Thm.dest_comb |> #2;
         val (_, [cl, _, _, cr]) = Drule.strip_comb ct;
         val ps = the (find_tree x (Thm.term_of ct'));
-        val del_tree = deleteProver dist_thm ps;
-        val dist_thm' = discharge [del_tree, dist_thm] @{thm delete_Some_all_distinct};
+        val del_tree = deleteProver ctxt dist_thm ps;
+        val dist_thm' = discharge ctxt [del_tree, dist_thm] @{thm delete_Some_all_distinct};
         val sub_l = subtractProver ctxt (Thm.term_of cl) cl (dist_thm');
         val sub_r =
           subtractProver ctxt (Thm.term_of cr) cr
-            (discharge [sub_l, dist_thm'] @{thm subtract_Some_all_distinct_res});
-      in discharge [del_tree, sub_l, sub_r] @{thm subtract_Node} end;
+            (discharge ctxt [sub_l, dist_thm'] @{thm subtract_Some_all_distinct_res});
+      in discharge ctxt [del_tree, sub_l, sub_r] @{thm subtract_Node} end;
 
 end;
 
--- a/src/HOL/TPTP/TPTP_Parser/tptp_reconstruct.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_reconstruct.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -1029,12 +1029,14 @@
         val thy = Proof_Context.theory_of ctxt
         val pannot = get_pannot_of_prob thy prob_name
 
+(* FIXME !???!
         fun rtac_wrap thm_f i = fn st =>
           let
             val thy = Thm.theory_of_thm st
           in
             rtac (thm_f thy) i st
           end
+*)
 
         (*Some nodes don't have an inference name, such as the conjecture,
           definitions and axioms. Such nodes shouldn't appear in the
@@ -1063,14 +1065,7 @@
              let
                fun thm ctxt'' = prover_tac ctxt'' prob_name (hd skel |> stock_to_string)
                val reconstructed_inference = thm ctxt'
-               val rec_inf_tac = fn st =>
-                 let
-                   val ctxt =
-                     Thm.theory_of_thm st
-                     |> Proof_Context.init_global
-                 in
-                   HEADGOAL (rtac (thm ctxt)) st
-                 end
+               fun rec_inf_tac st = HEADGOAL (rtac (thm ctxt')) st
              in (reconstructed_inference,
                  rec_inf_tac)
              end)
@@ -1157,14 +1152,16 @@
                                 else
                                   let
                                     val maybe_thm = ignore_interpretation_exn try_make_step ctxt
+(* FIXME !???!
                                     val ctxt' =
                                       if is_some maybe_thm then
                                         the maybe_thm
                                         |> #1
                                         |> Thm.theory_of_thm |> Proof_Context.init_global
                                       else ctxt
+*)
                                   in
-                                    (maybe_thm, ctxt')
+                                    (maybe_thm, ctxt)
                                   end
                           in (thm, (node, thm) :: memo, ctxt') end
                       | SOME maybe_thm => (maybe_thm, memo, ctxt)
@@ -1183,8 +1180,7 @@
                   (hd skel,
                    Thm.prop_of (def_thm thy)
                    |> SOME,
-                   SOME (def_thm thy,
-                         HEADGOAL (rtac_wrap def_thm))) :: rest memo ctxt
+                   SOME (def_thm thy, HEADGOAL (rtac (def_thm thy)))) :: rest memo ctxt
                 end
             | Axiom n =>
                 let
--- a/src/HOL/TPTP/TPTP_Parser/tptp_reconstruct_library.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/TPTP_Parser/tptp_reconstruct_library.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -570,14 +570,14 @@
       diff (Proof_Context.theory_of ctxt) (scheme_t, instance_t)
 
     (*valuation of type variables*)
-    val typeval = map (apply2 (Thm.ctyp_of ctxt)) type_pairing
+    val typeval = map (fn (v, T) => (dest_TVar v, Thm.ctyp_of ctxt T)) type_pairing
 
     val typeval_env =
       map (apfst dest_TVar) type_pairing
     (*valuation of term variables*)
     val termval =
-      map (apfst (type_devar typeval_env)) term_pairing
-      |> map (apply2 (Thm.cterm_of ctxt))
+      map (apfst (dest_Var o type_devar typeval_env)) term_pairing
+      |> map (apsnd (Thm.cterm_of ctxt))
   in
     Thm.instantiate (typeval, termval) scheme_thm
   end
--- a/src/HOL/TPTP/TPTP_Proof_Reconstruction.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/TPTP_Proof_Reconstruction.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -950,8 +950,8 @@
     val tactic =
       if is_none var_opt then no_tac
       else
-        fold (curry (op APPEND)) (map (instantiate_tac (the var_opt)) skolem_cts) no_tac
-
+        fold (curry (op APPEND))
+          (map (instantiate_tac (dest_Var (Thm.term_of (the var_opt)))) skolem_cts) no_tac
   in
     tactic st
   end
--- a/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -329,10 +329,8 @@
 *}
 
 ML {*
-(*given a thm, show us the axioms in its thy*)
-val axms_of_thy_of_thm =
-  Thm.theory_of_thm
-  #> ` Theory.axioms_of
+val axms_of_thy =
+  `Theory.axioms_of
   #> apsnd cterm_of
   #> swap
   #> apsnd (map snd)
@@ -351,14 +349,8 @@
   *}
 
 ML {*
-fun leo2_tac_wrap prob_name step i = fn st =>
-  let
-    val ctxt =
-      Thm.theory_of_thm st
-      |> Proof_Context.init_global
-  in
-    rtac (interpret_leo2_inference ctxt prob_name step) i st
-  end
+fun leo2_tac_wrap ctxt prob_name step i st =
+  rtac (interpret_leo2_inference ctxt prob_name step) i st
 *}
 
 (*FIXME move these examples elsewhere*)
--- a/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test_Units.thy	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/TPTP_Proof_Reconstruction_Test_Units.thy	Mon Jul 06 14:27:03 2015 +0200
@@ -7,7 +7,7 @@
   - Makes use of the PolyML structure.
 *)
 
-theory TPTP_Proof_Reconstruction_Test
+theory TPTP_Proof_Reconstruction_Test_Units
 imports TPTP_Test TPTP_Proof_Reconstruction
 begin
 
@@ -482,16 +482,7 @@
              (\<not> SEU602_2_bnd_in (SEU602_2_bnd_sK7_E SV49) SEU602_2_bnd_sK2_SY17)) =
             False"
 (*FIXME this (and similar) tests are getting the "Bad background theory of goal state" error since upgrading to Isabelle2013-2.*)
-by (tactic {*fn thm =>
-  let
-    val ctxt =
-      theory_of_thm thm
-      |> Context.Theory
-      |> Context.proof_of
-  in nonfull_extcnf_combined_tac ctxt [Extuni_Func, Existential_Var] thm
-  end*})
-(*by (tactic {*nonfull_extcnf_combined_tac @{context} [Extuni_Func, Existential_Var]*})*)
-oops
+by (tactic {*nonfull_extcnf_combined_tac @{context} [Extuni_Func, Existential_Var]*})
 
 consts
   SEV405_5_bnd_sK1_SY2 :: "(TPTP_Interpret.ind \<Rightarrow> bool) \<Rightarrow> TPTP_Interpret.ind"
--- a/src/HOL/TPTP/atp_theory_export.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/atp_theory_export.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -166,7 +166,7 @@
     val facts =
       Sledgehammer_Fact.all_facts (Proof_Context.init_global thy) true false
                                   Keyword.empty_keywords [] [] css_table
-      |> sort (Sledgehammer_MaSh.crude_thm_ord o apply2 snd)
+      |> sort (Sledgehammer_MaSh.crude_thm_ord ctxt o apply2 snd)
     val problem =
       facts
       |> map (fn ((_, loc), th) =>
--- a/src/HOL/TPTP/mash_eval.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/mash_eval.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -54,7 +54,7 @@
 
     val css = clasimpset_rule_table_of ctxt
     val facts = all_facts ctxt true false Keyword.empty_keywords [] [] css
-    val name_tabs = build_name_tables nickname_of_thm facts
+    val name_tabs = build_name_tables (nickname_of_thm ctxt) facts
 
     fun with_index facts s = (find_index (curry (op =) s) facts + 1, s)
     fun index_str (j, s) = s ^ "@" ^ string_of_int j
@@ -90,12 +90,12 @@
               [name] => name
             | names => error ("Input files out of sync: facts " ^ commas (map quote names) ^ "."))
           val th =
-            case find_first (fn (_, th) => nickname_of_thm th = name) facts of
+            case find_first (fn (_, th) => nickname_of_thm ctxt th = name) facts of
               SOME (_, th) => th
             | NONE => error ("No fact called \"" ^ name ^ "\".")
           val goal = goal_of_thm (Proof_Context.theory_of ctxt) th
           val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal 1 ctxt
-          val isar_deps = these (isar_dependencies_of name_tabs th)
+          val isar_deps = these (isar_dependencies_of ctxt name_tabs th)
           val suggss = isar_deps :: suggss0
           val facts = facts |> filter (fn (_, th') => thm_less (th', th))
 
@@ -116,7 +116,7 @@
             else
               let
                 fun nickify ((_, stature), th) =
-                  ((K (encode_str (nickname_of_thm th)), stature), th)
+                  ((K (encode_str (nickname_of_thm ctxt th)), stature), th)
 
                 val facts =
                   suggs
--- a/src/HOL/TPTP/mash_export.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/TPTP/mash_export.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -58,14 +58,14 @@
   | _ => ("", []))
 
 fun has_thm_thy th thy =
-  Context.theory_name thy = Context.theory_name (Thm.theory_of_thm th)
+  Context.theory_name thy = Thm.theory_name_of_thm th
 
 fun has_thys thys th = exists (has_thm_thy th) thys
 
 fun all_facts ctxt =
   let val css = Sledgehammer_Fact.clasimpset_rule_table_of ctxt in
     Sledgehammer_Fact.all_facts ctxt true false Keyword.empty_keywords [] [] css
-    |> sort (crude_thm_ord o apply2 snd)
+    |> sort (crude_thm_ord ctxt o apply2 snd)
   end
 
 fun filter_accessible_from th = filter (fn (_, th') => thm_less (th', th))
@@ -82,8 +82,8 @@
     val facts =
       all_facts ctxt
       |> filter_out (has_thys thys o snd)
-      |> attach_parents_to_facts []
-      |> map (apsnd (nickname_of_thm o snd))
+      |> attach_parents_to_facts ctxt []
+      |> map (apsnd (nickname_of_thm ctxt o snd))
   in
     File.write path "";
     List.app do_fact facts
@@ -97,8 +97,8 @@
 
     fun do_fact ((_, stature), th) =
       let
-        val name = nickname_of_thm th
-        val feats = features_of ctxt (Thm.theory_of_thm th) stature [Thm.prop_of th]
+        val name = nickname_of_thm ctxt th
+        val feats = features_of ctxt (Thm.theory_name_of_thm th) stature [Thm.prop_of th]
         val s = encode_str name ^ ": " ^ encode_strs (sort string_ord feats) ^ "\n"
       in
         File.append path s
@@ -124,7 +124,7 @@
         let
           val deps =
             (case isar_deps_opt of
-              NONE => isar_dependencies_of name_tabs th
+              NONE => isar_dependencies_of ctxt name_tabs th
             | deps => deps)
         in
           (case deps of
@@ -142,12 +142,12 @@
   let
     val path = Path.explode file_name
     val facts = filter_out (has_thys thys o snd) (all_facts ctxt)
-    val name_tabs = build_name_tables nickname_of_thm facts
+    val name_tabs = build_name_tables (nickname_of_thm ctxt) facts
 
     fun do_fact (j, (_, th)) =
       if in_range range j then
         let
-          val name = nickname_of_thm th
+          val name = nickname_of_thm ctxt th
           val _ = tracing ("Fact " ^ string_of_int j ^ ": " ^ name)
           val access_facts = filter_accessible_from th facts
           val (marker, deps) = smart_dependencies_of ctxt params_opt access_facts name_tabs th NONE
@@ -180,22 +180,22 @@
     val path = Path.explode file_name
     val facts = all_facts ctxt
     val (new_facts, old_facts) = facts |> List.partition (has_thys thys o snd)
-    val name_tabs = build_name_tables nickname_of_thm facts
+    val name_tabs = build_name_tables (nickname_of_thm ctxt) facts
 
     fun do_fact (j, (name, (parents, ((_, stature), th)))) =
       if in_range range j then
         let
           val _ = tracing ("Fact " ^ string_of_int j ^ ": " ^ name)
-          val isar_deps = isar_dependencies_of name_tabs th
+          val isar_deps = isar_dependencies_of ctxt name_tabs th
           val do_query = not (is_bad_query ctxt ho_atp step j th isar_deps)
-          val goal_feats = features_of ctxt (Thm.theory_of_thm th) stature [Thm.prop_of th]
+          val goal_feats = features_of ctxt (Thm.theory_name_of_thm th) stature [Thm.prop_of th]
           val access_facts = filter_accessible_from th new_facts @ old_facts
           val (marker, deps) =
             smart_dependencies_of ctxt params_opt access_facts name_tabs th isar_deps
 
           fun extra_features_of (((_, stature), th), weight) =
             [Thm.prop_of th]
-            |> features_of ctxt (Thm.theory_of_thm th) stature
+            |> features_of ctxt (Thm.theory_name_of_thm th) stature
             |> map (rpair (weight * extra_feature_factor))
 
           val query =
@@ -224,7 +224,9 @@
         ""
 
     val new_facts =
-      new_facts |> attach_parents_to_facts old_facts |> map (`(nickname_of_thm o snd o snd))
+      new_facts
+      |> attach_parents_to_facts ctxt old_facts
+      |> map (`(nickname_of_thm ctxt o snd o snd))
     val lines = map do_fact (tag_list 1 new_facts)
   in
     File.write_list path lines
@@ -243,16 +245,16 @@
     val path = Path.explode file_name
     val facts = all_facts ctxt
     val (new_facts, old_facts) = facts |> List.partition (has_thys thys o snd)
-    val name_tabs = build_name_tables nickname_of_thm facts
+    val name_tabs = build_name_tables (nickname_of_thm ctxt) facts
 
     fun do_fact (j, ((_, th), old_facts)) =
       if in_range range j then
         let
-          val name = nickname_of_thm th
+          val name = nickname_of_thm ctxt th
           val _ = tracing ("Fact " ^ string_of_int j ^ ": " ^ name)
           val goal = goal_of_thm (Proof_Context.theory_of ctxt) th
           val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal 1 ctxt
-          val isar_deps = isar_dependencies_of name_tabs th
+          val isar_deps = isar_dependencies_of ctxt name_tabs th
         in
           if is_bad_query ctxt ho_atp step j th isar_deps then
             ""
@@ -261,9 +263,9 @@
               val suggs =
                 old_facts
                 |> filter_accessible_from th
-                |> mepo_or_mash_suggested_facts ctxt (Thm.theory_of_thm th) params max_suggs hyp_ts
-                  concl_t
-                |> map (nickname_of_thm o snd)
+                |> mepo_or_mash_suggested_facts ctxt (Thm.theory_name_of_thm th)
+                  params max_suggs hyp_ts concl_t
+                |> map (nickname_of_thm ctxt o snd)
             in
               encode_str name ^ ": " ^ encode_strs suggs ^ "\n"
             end
@@ -288,11 +290,11 @@
   (Options.put_default @{system_option MaSh} algorithm;
    Sledgehammer_MaSh.mash_unlearn ();
    generate_mepo_or_mash_suggestions
-     (fn ctxt => fn thy => fn params as {provers = prover :: _, ...} => fn max_suggs => fn hyp_ts =>
-          fn concl_t =>
+     (fn ctxt => fn thy_name => fn params as {provers = prover :: _, ...} =>
+          fn max_suggs => fn hyp_ts => fn concl_t =>
         tap (Sledgehammer_MaSh.mash_learn_facts ctxt params prover 2 false
           Sledgehammer_Util.one_year)
-        #> Sledgehammer_MaSh.mash_suggested_facts ctxt thy params max_suggs hyp_ts concl_t
+        #> Sledgehammer_MaSh.mash_suggested_facts ctxt thy_name params max_suggs hyp_ts concl_t
         #> fst))
 
 fun generate_mesh_suggestions max_suggs mash_file_name mepo_file_name mesh_file_name =
--- a/src/HOL/Tools/BNF/bnf_fp_util.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/BNF/bnf_fp_util.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -607,9 +607,9 @@
   let
     val n = Thm.nprems_of coind;
     val m = Thm.nprems_of (hd rel_monos) - n;
-    fun mk_inst phi = (phi, mk_union (phi, HOLogic.eq_const (fst (dest_pred2T (fastype_of phi)))))
-      |> apply2 (Thm.cterm_of ctxt);
-    val insts = Term.add_vars (Thm.prop_of coind) [] |> rev |> take n |> map (mk_inst o Var);
+    fun mk_inst phi =
+      (phi, Thm.cterm_of ctxt (mk_union (Var phi, HOLogic.eq_const (fst (dest_pred2T (#2 phi))))))
+    val insts = Term.add_vars (Thm.prop_of coind) [] |> rev |> take n |> map mk_inst;
     fun mk_unfold rel_eq rel_mono =
       let
         val eq = iffD2 OF [rel_eq RS @{thm predicate2_eqD}, refl];
--- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -685,7 +685,9 @@
       let
         val ((inject_thms, inject_thmss), half_distinct_thmss) = chop n thmss |>> `flat;
 
-        val rho_As = map (apply2 (Thm.ctyp_of lthy)) (map Logic.varifyT_global As ~~ As);
+        val rho_As =
+          map (fn (T, U) => (dest_TVar T, Thm.ctyp_of lthy U))
+            (map Logic.varifyT_global As ~~ As);
 
         fun inst_thm t thm =
           Drule.instantiate' [] [SOME (Thm.cterm_of lthy t)]
--- a/src/HOL/Tools/Function/function.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Function/function.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -98,7 +98,7 @@
 
     fun afterqed [[proof]] lthy =
       let
-        val result = cont (Thm.close_derivation proof)
+        val result = cont lthy (Thm.close_derivation proof)
         val FunctionResult {fs, R, dom, psimps, simple_pinducts,
                 termination, domintros, cases, ...} = result
 
--- a/src/HOL/Tools/Function/function_core.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Function/function_core.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -15,7 +15,7 @@
     -> local_theory
     -> (term   (* f *)
         * thm  (* goalstate *)
-        * (thm -> Function_Common.function_result) (* continuation *)
+        * (Proof.context -> thm -> Function_Common.function_result) (* continuation *)
        ) * local_theory
 
 end
@@ -893,11 +893,8 @@
       (prove_stuff lthy globals G f R xclauses complete compat
          compat_store G_elim) f_defthm
 
-    fun mk_partial_rules provedgoal =
+    fun mk_partial_rules newctxt provedgoal =
       let
-        val newthy = Thm.theory_of_thm provedgoal (*FIXME*)
-        val newctxt = Proof_Context.init_global newthy (*FIXME*)
-
         val (graph_is_function, complete_thm) =
           provedgoal
           |> Conjunction.elim
--- a/src/HOL/Tools/Function/mutual.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Function/mutual.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -12,7 +12,7 @@
     -> term list
     -> local_theory
     -> ((thm (* goalstate *)
-        * (thm -> Function_Common.function_result) (* proof continuation *)
+        * (Proof.context -> thm -> Function_Common.function_result) (* proof continuation *)
        ) * local_theory)
 end
 
@@ -310,7 +310,7 @@
 
     val (mutual', lthy'') = define_projections fixes mutual fsum lthy'
 
-    val cont' = mk_partial_rules_mutual lthy'' cont mutual'
+    fun cont' ctxt = mk_partial_rules_mutual lthy'' (cont ctxt) mutual'
   in
     ((goalstate, cont'), lthy'')
   end
--- a/src/HOL/Tools/Function/partial_function.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Function/partial_function.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -200,7 +200,8 @@
     val (P_var, x_var) =
        Thm.prop_of inst_rule |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop
       |> strip_comb |> apsnd hd
-    val P_rangeT = range_type (snd (Term.dest_Var P_var))
+      |> apply2 dest_Var
+    val P_rangeT = range_type (snd P_var)
     val PT = map (snd o dest_Free) args ---> P_rangeT
     val x_inst = cert (foldl1 HOLogic.mk_prod args)
     val P_inst = cert (uncurry_n (length args) (Free (P, PT)))
@@ -211,7 +212,7 @@
          THEN Simplifier.simp_tac (put_simpset curry_uncurry_ss ctxt) 3
          THEN CONVERSION (split_params_conv ctxt
            then_conv (Conv.forall_conv (K split_paired_all_conv) ctxt)) 3)
-      |> Thm.instantiate ([], [(cert P_var, P_inst), (cert x_var, x_inst)]) 
+      |> Thm.instantiate ([], [(P_var, P_inst), (x_var, x_inst)])
       |> Simplifier.full_simplify (put_simpset split_conv_ss ctxt)
       |> singleton (Variable.export ctxt' ctxt)
   in
--- a/src/HOL/Tools/Function/relation.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Function/relation.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -18,8 +18,7 @@
 fun inst_state_tac ctxt inst =
   SUBGOAL (fn (goal, _) =>
     (case Term.add_vars goal [] of
-      [v as (_, T)] =>
-        PRIMITIVE (Thm.instantiate ([], [(Thm.cterm_of ctxt (Var v), Thm.cterm_of ctxt (inst T))]))
+      [v as (_, T)] => PRIMITIVE (Thm.instantiate ([], [(v, Thm.cterm_of ctxt (inst T))]))
     | _ => no_tac));
 
 fun relation_tac ctxt rel i =
@@ -39,9 +38,7 @@
             |> map_types Type_Infer.paramify_vars
             |> Type.constraint T
             |> Syntax.check_term ctxt;
-        in
-          PRIMITIVE (Thm.instantiate ([], [(Thm.cterm_of ctxt (Var v), Thm.cterm_of ctxt rel')]))
-        end
+        in PRIMITIVE (Thm.instantiate ([], [(v, Thm.cterm_of ctxt rel')])) end
     | _ => no_tac));
 
 fun relation_infer_tac ctxt rel i =
--- a/src/HOL/Tools/Lifting/lifting_bnf.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Lifting/lifting_bnf.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -23,7 +23,7 @@
     fun get_lhs thm = thm |> Thm.concl_of |> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> fst
     val vars = get_lhs rel_Grp |> strip_comb |> snd |> map_filter (try (strip_comb #> snd #> hd))
     val UNIVs = map (fn var => HOLogic.mk_UNIV (var |> dest_Var |> snd |> dest_Type |> snd |> hd)) vars
-    val inst = map2 (curry (apply2 (Thm.cterm_of ctxt))) vars UNIVs
+    val inst = map dest_Var vars ~~ map (Thm.cterm_of ctxt) UNIVs
     val rel_Grp_UNIV_sym = rel_Grp |> Drule.instantiate_normalize ([], inst) 
       |> Local_Defs.unfold ctxt @{thms subset_UNIV[THEN eqTrueI] UNIV_def[symmetric] simp_thms(21)}
       |> (fn thm => thm RS sym)
--- a/src/HOL/Tools/Lifting/lifting_term.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Lifting/lifting_term.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -310,7 +310,7 @@
     val thy = Proof_Context.theory_of ctxt
     val (_, qty_schematic) = quot_thm_rty_qty quot_thm
     val match_env = Sign.typ_match thy (qty_schematic, qty) Vartab.empty
-    fun prep_ty (x, (S, ty)) = apply2 (Thm.ctyp_of ctxt) (TVar (x, S), ty)
+    fun prep_ty (x, (S, ty)) = ((x, S), Thm.ctyp_of ctxt ty)
     val ty_inst = Vartab.fold (cons o prep_ty) match_env []
   in
     Thm.instantiate (ty_inst, []) quot_thm
--- a/src/HOL/Tools/Lifting/lifting_util.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Lifting/lifting_util.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -145,7 +145,7 @@
 fun instT_thm ctxt env =
   let
     val cinst = env |> Vartab.dest 
-      |> map (fn (x, (S, T)) => (Thm.ctyp_of ctxt (TVar (x, S)), Thm.ctyp_of ctxt T));
+      |> map (fn (x, (S, T)) => ((x, S), Thm.ctyp_of ctxt T));
   in
     Thm.instantiate (cinst, [])
   end;
--- a/src/HOL/Tools/Meson/meson.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Meson/meson.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -346,16 +346,17 @@
 (*Replaces universally quantified variables by FREE variables -- because
   assumptions may not contain scheme variables.  Later, generalize using Variable.export. *)
 local  
-  val spec_var = Thm.dest_arg (Thm.dest_arg (#2 (Thm.dest_implies (Thm.cprop_of spec))));
-  val spec_varT = Thm.typ_of_cterm spec_var;
-  fun name_of (Const (@{const_name All}, _) $ Abs(x,_,_)) = x | name_of _ = Name.uu;
+  val spec_var =
+    Thm.dest_arg (Thm.dest_arg (#2 (Thm.dest_implies (Thm.cprop_of spec))))
+    |> Thm.term_of |> dest_Var;
+  fun name_of (Const (@{const_name All}, _) $ Abs(x, _, _)) = x | name_of _ = Name.uu;
 in  
   fun freeze_spec th ctxt =
     let
       val ([x], ctxt') =
         Variable.variant_fixes [name_of (HOLogic.dest_Trueprop (Thm.concl_of th))] ctxt;
       val spec' = spec
-        |> Thm.instantiate ([], [(spec_var, Thm.cterm_of ctxt' (Free (x, spec_varT)))]);
+        |> Thm.instantiate ([], [(spec_var, Thm.cterm_of ctxt' (Free (x, snd spec_var)))]);
     in (th RS spec', ctxt') end
 end;
 
--- a/src/HOL/Tools/Meson/meson_clausify.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Meson/meson_clausify.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -44,10 +44,10 @@
    "Sledgehammer_Util".) *)
 fun transform_elim_theorem th =
   (case Thm.concl_of th of    (*conclusion variable*)
-    @{const Trueprop} $ (v as Var (_, @{typ bool})) =>
-      Thm.instantiate ([], [(Thm.cterm_of @{theory_context HOL} v, cfalse)]) th
-  | v as Var(_, @{typ prop}) =>
-      Thm.instantiate ([], [(Thm.cterm_of @{theory_context HOL} v, ctp_false)]) th
+    @{const Trueprop} $ (Var (v as (_, @{typ bool}))) =>
+      Thm.instantiate ([], [(v, cfalse)]) th
+  | Var (v as (_, @{typ prop})) =>
+      Thm.instantiate ([], [(v, ctp_false)]) th
   | _ => th)
 
 
@@ -375,9 +375,7 @@
        th ctxt
     val (cnf_ths, ctxt) = clausify nnf_th
     fun intr_imp ct th =
-      Thm.instantiate ([], map (apply2 (Thm.cterm_of ctxt))
-                               [(Var (("i", 0), @{typ nat}),
-                                 HOLogic.mk_nat ax_no)])
+      Thm.instantiate ([], [((("i", 0), @{typ nat}), Thm.cterm_of ctxt (HOLogic.mk_nat ax_no))])
                       (zero_var_indexes @{thm skolem_COMBK_D})
       RS Thm.implies_intr ct th
   in
--- a/src/HOL/Tools/Metis/metis_reconstruct.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Metis/metis_reconstruct.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -174,7 +174,7 @@
 fun incr_type_indexes ctxt inc th =
   let
     val tvs = Term.add_tvars (Thm.full_prop_of th) []
-    fun inc_tvar ((a, i), s) = apply2 (Thm.ctyp_of ctxt) (TVar ((a, i), s), TVar ((a, i + inc), s))
+    fun inc_tvar ((a, i), s) = (((a, i), s), Thm.ctyp_of ctxt (TVar ((a, i + inc), s)))
   in
     Thm.instantiate (map inc_tvar tvs, []) th
   end
@@ -211,7 +211,7 @@
           |> rpair (Envir.empty ~1)
           |-> fold (Pattern.unify (Context.Proof ctxt))
           |> Envir.type_env |> Vartab.dest
-          |> map (fn (x, (S, T)) => apply2 (Thm.ctyp_of ctxt) (TVar (x, S), T))
+          |> map (fn (x, (S, T)) => ((x, S), Thm.ctyp_of ctxt T))
       in
         (* The unifier, which is invoked from "Thm.bicompose", will sometimes refuse to unify
            "?a::?'a" with "?a::?'b" or "?a::nat" and throw a "TERM" exception (with "add_ffpair" as
@@ -400,8 +400,8 @@
         val thy = Proof_Context.theory_of ctxt
         val (tyenv, tenv) = fold (Pattern.first_order_match thy) pairs (Vartab.empty, Vartab.empty)
   
-        fun mkT (v, (S, T)) = apply2 (Thm.ctyp_of ctxt) (TVar (v, S), T)
-        fun mk (v, (T, t)) = apply2 (Thm.cterm_of ctxt) (Var (v, Envir.subst_type tyenv T), t)
+        fun mkT (v, (S, T)) = ((v, S), Thm.ctyp_of ctxt T)
+        fun mk (v, (T, t)) = ((v, Envir.subst_type tyenv T), Thm.cterm_of ctxt t)
   
         val instsT = Vartab.fold (cons o mkT) tyenv []
         val insts = Vartab.fold (cons o mk) tenv []
@@ -570,11 +570,11 @@
             Envir.Envir {maxidx = Vartab.fold (Integer.max o snd o fst) tyenv 0,
               tenv = Vartab.empty, tyenv = tyenv}
           val ty_inst =
-            Vartab.fold (fn (x, (S, T)) => cons (apply2 (Thm.ctyp_of ctxt) (TVar (x, S), T)))
+            Vartab.fold (fn (x, (S, T)) => cons (((x, S), Thm.ctyp_of ctxt T)))
               tyenv []
           val t_inst = [apply2 (Thm.cterm_of ctxt o Envir.norm_term env) (Var var, t')]
         in
-          Drule.instantiate_normalize (ty_inst, t_inst) th
+          Drule.instantiate_normalize (ty_inst, map (apfst (dest_Var o Thm.term_of)) t_inst) th
         end
       | _ => raise Fail "expected a single non-zapped, non-Metis Var")
   in
--- a/src/HOL/Tools/Predicate_Compile/core_data.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Predicate_Compile/core_data.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -232,10 +232,10 @@
 
     fun instantiate i n ({context = ctxt2, prems, ...}: Subgoal.focus) =
       let
-        fun term_pair_of (ix, (ty, t)) = (Var (ix, ty), t)
+        fun inst_pair_of (ix, (ty, t)) = ((ix, ty), t)
         fun inst_of_matches tts =
           fold (Pattern.match thy) tts (Vartab.empty, Vartab.empty)
-          |> snd |> Vartab.dest |> map (apply2 (Thm.cterm_of ctxt2) o term_pair_of)
+          |> snd |> Vartab.dest |> map (apsnd (Thm.cterm_of ctxt2) o inst_pair_of)
         val (cases, (eqs, prems1)) = apsnd (chop (nargs - nparams)) (chop n prems)
         val case_th =
           rewrite_rule ctxt2 (@{thm Predicate.eq_is_eq} :: map meta_eq_of eqs) (nth cases (i - 1))
--- a/src/HOL/Tools/Qelim/cooper.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Qelim/cooper.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -150,10 +150,10 @@
 
 val get_pmi = get_pmi_term o Thm.cprop_of;
 
-val p_v' = @{cpat "?P' :: int => bool"};
-val q_v' = @{cpat "?Q' :: int => bool"};
-val p_v = @{cpat "?P:: int => bool"};
-val q_v = @{cpat "?Q:: int => bool"};
+val p_v' = (("P'", 0), @{typ "int \<Rightarrow> bool"});
+val q_v' = (("Q'", 0), @{typ "int \<Rightarrow> bool"});
+val p_v = (("P", 0), @{typ "int \<Rightarrow> bool"});
+val q_v = (("Q", 0), @{typ "int \<Rightarrow> bool"});
 
 fun myfwd (th1, th2, th3) p q
       [(th_1,th_2,th_3), (th_1',th_2',th_3')] =
@@ -430,11 +430,13 @@
 val insert_tm = @{cterm "insert :: int => _"};
 fun mkISet cts = fold_rev (Thm.apply insert_tm #> Thm.apply) cts emptyIS;
 val eqelem_imp_imp = @{thm eqelem_imp_iff} RS iffD1;
-val [A_tm,B_tm] = map (fn th => Thm.cprop_of th |> funpow 2 Thm.dest_arg |> Thm.dest_abs NONE |> snd |> Thm.dest_arg1 |> Thm.dest_arg
-                                      |> Thm.dest_abs NONE |> snd |> Thm.dest_fun |> Thm.dest_arg)
-                      [asetP,bsetP];
+val [A_v,B_v] =
+  map (fn th => Thm.cprop_of th |> funpow 2 Thm.dest_arg
+    |> Thm.dest_abs NONE |> snd |> Thm.dest_arg1 |> Thm.dest_arg
+    |> Thm.dest_abs NONE |> snd |> Thm.dest_fun |> Thm.dest_arg
+    |> Thm.term_of |> dest_Var) [asetP, bsetP];
 
-val D_tm = @{cpat "?D::int"};
+val D_v = (("D", 0), @{typ int});
 
 fun cooperex_conv ctxt vs q =
 let
@@ -501,16 +503,16 @@
    if length (distinct (op aconv) bl) <= length (distinct (op aconv) al)
    then
     (bl,b0,decomp_minf,
-     fn B => (map (fn th => Thm.implies_elim (Thm.instantiate ([],[(B_tm,B), (D_tm,cd)]) th) dp)
+     fn B => (map (fn th => Thm.implies_elim (Thm.instantiate ([],[(B_v,B), (D_v,cd)]) th) dp)
                      [bseteq,bsetneq,bsetlt, bsetle, bsetgt,bsetge])@
-                   (map (Thm.instantiate ([],[(B_tm,B), (D_tm,cd)]))
+                   (map (Thm.instantiate ([],[(B_v,B), (D_v,cd)]))
                         [bsetdvd,bsetndvd,bsetP,infDdvd, infDndvd,bsetconj,
                          bsetdisj,infDconj, infDdisj]),
                        cpmi)
      else (al,a0,decomp_pinf,fn A =>
-          (map (fn th => Thm.implies_elim (Thm.instantiate ([],[(A_tm,A), (D_tm,cd)]) th) dp)
+          (map (fn th => Thm.implies_elim (Thm.instantiate ([],[(A_v,A), (D_v,cd)]) th) dp)
                    [aseteq,asetneq,asetlt, asetle, asetgt,asetge])@
-                   (map (Thm.instantiate ([],[(A_tm,A), (D_tm,cd)]))
+                   (map (Thm.instantiate ([],[(A_v,A), (D_v,cd)]))
                    [asetdvd,asetndvd, asetP, infDdvd, infDndvd,asetconj,
                          asetdisj,infDconj, infDdisj]),cppi)
  val cpth =
--- a/src/HOL/Tools/Quickcheck/find_unused_assms.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Quickcheck/find_unused_assms.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -13,8 +13,9 @@
 structure Find_Unused_Assms : FIND_UNUSED_ASSMS =
 struct
 
-fun thms_of thy thy_name = Global_Theory.all_thms_of thy false
-  |> filter (fn (_, th) => Context.theory_name (Thm.theory_of_thm th) = thy_name)
+fun thms_of thy thy_name =
+  Global_Theory.all_thms_of thy false
+  |> filter (fn (_, th) => Thm.theory_name_of_thm th = thy_name)
 
 fun do_while P f s list =
   if P s then
--- a/src/HOL/Tools/Quickcheck/random_generators.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Quickcheck/random_generators.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -73,7 +73,9 @@
 val lhs = eq |> Thm.dest_arg1;
 val pt_random_aux = lhs |> Thm.dest_fun;
 val pt_rhs = eq |> Thm.dest_arg |> Thm.dest_fun;
-val aT = pt_random_aux |> Thm.ctyp_of_cterm |> dest_ctyp_nth 1;
+val a_v =
+  pt_random_aux |> Thm.ctyp_of_cterm |> dest_ctyp_nth 1
+  |> Thm.typ_of |> dest_TVar;
 
 val rew_thms = map mk_meta_eq [@{thm natural_zero_minus_one},
   @{thm Suc_natural_minus_one}, @{thm select_weight_cons_zero}, @{thm beyond_zero}];
@@ -89,7 +91,7 @@
       (HOLogic.dest_eq o HOLogic.dest_Trueprop) eq;
     val Type (_, [_, iT]) = T;
     val icT = Thm.ctyp_of lthy iT;
-    val inst = Thm.instantiate_cterm ([(aT, icT)], []);
+    val inst = Thm.instantiate_cterm ([(a_v, icT)], []);
     fun subst_v t' = map_aterms (fn t as Free (w, _) => if v = w then t' else t | t => t);
     val t_rhs = lambda t_k proto_t_rhs;
     val eqs0 = [subst_v @{term "0::natural"} eq,
@@ -98,11 +100,11 @@
     val ((_, (_, eqs2)), lthy') = lthy
       |> BNF_LFP_Compat.primrec_simple
         [((Binding.concealed (Binding.name random_aux), T), NoSyn)] eqs1;
-    val cT_random_aux = inst pt_random_aux;
-    val cT_rhs = inst pt_rhs;
+    val cT_random_aux = inst pt_random_aux |> Thm.term_of |> dest_Var;
+    val cT_rhs = inst pt_rhs |> Thm.term_of |> dest_Var;
     val rule = @{thm random_aux_rec}
       |> Drule.instantiate_normalize
-        ([(aT, icT)],
+        ([(a_v, icT)],
           [(cT_random_aux, Thm.cterm_of lthy' t_random_aux),
            (cT_rhs, Thm.cterm_of lthy' t_rhs)]);
     fun tac ctxt =
--- a/src/HOL/Tools/Quotient/quotient_tacs.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Quotient/quotient_tacs.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -72,20 +72,16 @@
   | _ => error "Solve_quotient_assm failed. Possibly a quotient theorem is missing."
 
 
-fun prep_trm thy (x, (T, t)) =
-  (Thm.global_cterm_of thy (Var (x, T)), Thm.global_cterm_of thy t)
-
-fun prep_ty thy (x, (S, ty)) =
-  (Thm.global_ctyp_of thy (TVar (x, S)), Thm.global_ctyp_of thy ty)
-
 fun get_match_inst thy pat trm =
   let
     val univ = Unify.matchers (Context.Theory thy) [(pat, trm)]
     val SOME (env, _) = Seq.pull univ           (* raises Bind, if no unifier *) (* FIXME fragile *)
     val tenv = Vartab.dest (Envir.term_env env)
     val tyenv = Vartab.dest (Envir.type_env env)
+    fun prep_ty (x, (S, ty)) = ((x, S), Thm.global_ctyp_of thy ty)
+    fun prep_trm (x, (T, t)) = ((x, T), Thm.global_cterm_of thy t)
   in
-    (map (prep_ty thy) tyenv, map (prep_trm thy) tenv)
+    (map prep_ty tyenv, map prep_trm tenv)
   end
 
 (* Calculates the instantiations for the lemmas:
@@ -480,7 +476,7 @@
           then make_inst_id (Thm.term_of (Thm.lhs_of thm3)) (Thm.term_of ctrm)
           else make_inst (Thm.term_of (Thm.lhs_of thm3)) (Thm.term_of ctrm)
         val thm4 =
-          Drule.instantiate_normalize ([], [(Thm.cterm_of ctxt insp, Thm.cterm_of ctxt inst)]) thm3
+          Drule.instantiate_normalize ([], [(dest_Var insp, Thm.cterm_of ctxt inst)]) thm3
       in
         Conv.rewr_conv thm4 ctrm
       end
--- a/src/HOL/Tools/SMT/smt_normalize.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/SMT/smt_normalize.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -35,7 +35,7 @@
 
   fun inst f ct thm =
     let val cv = f (Drule.strip_imp_concl (Thm.cprop_of thm))
-    in Thm.instantiate ([], [(cv, ct)]) thm end
+    in Thm.instantiate ([], [(dest_Var (Thm.term_of cv), ct)]) thm end
 in
 
 fun instantiate_elim thm =
@@ -199,8 +199,10 @@
 
   fun insert_trigger_conv [] ct = Conv.all_conv ct
     | insert_trigger_conv ctss ct =
-        let val (ctr, cp) = Thm.dest_binop (Thm.rhs_of trigger_eq) ||> rpair ct
-        in Thm.instantiate ([], [cp, (ctr, mk_trigger ctss)]) trigger_eq end
+        let
+          val (ctr, cp) = Thm.dest_binop (Thm.rhs_of trigger_eq) ||> rpair ct
+          val inst = map (apfst (dest_Var o Thm.term_of)) [cp, (ctr, mk_trigger ctss)]
+        in Thm.instantiate ([], inst) trigger_eq end
 
   fun infer_trigger_eq_conv outer_ctxt (ctxt, cvs) ct =
     let
--- a/src/HOL/Tools/SMT/smt_util.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/SMT/smt_util.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -171,7 +171,7 @@
 val destT1 = hd o Thm.dest_ctyp
 val destT2 = hd o tl o Thm.dest_ctyp
 
-fun instTs cUs (cTs, ct) = Thm.instantiate_cterm (cTs ~~ cUs, []) ct
+fun instTs cUs (cTs, ct) = Thm.instantiate_cterm (map (dest_TVar o Thm.typ_of) cTs ~~ cUs, []) ct
 fun instT cU (cT, ct) = instTs [cU] ([cT], ct)
 fun instT' ct = instT (Thm.ctyp_of_cterm ct)
 
--- a/src/HOL/Tools/SMT/z3_replay_methods.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/SMT/z3_replay_methods.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -100,20 +100,20 @@
   (Vartab.empty, Vartab.empty)
   |> Pattern.first_order_match (Proof_Context.theory_of ctxt) (pat, t)
 
-fun gen_certify_inst sel mk cert ctxt thm t =
+fun gen_certify_inst sel cert ctxt thm t =
   let
     val inst = match ctxt (dest_thm thm) (dest_prop t)
-    fun cert_inst (ix, (a, b)) = (cert (mk (ix, a)), cert b)
+    fun cert_inst (ix, (a, b)) = ((ix, a), cert b)
   in Vartab.fold (cons o cert_inst) (sel inst) [] end
 
 fun match_instantiateT ctxt t thm =
   if Term.exists_type (Term.exists_subtype Term.is_TVar) (dest_thm thm) then
-    Thm.instantiate (gen_certify_inst fst TVar (Thm.ctyp_of ctxt) ctxt thm t, []) thm
+    Thm.instantiate (gen_certify_inst fst (Thm.ctyp_of ctxt) ctxt thm t, []) thm
   else thm
 
 fun match_instantiate ctxt t thm =
   let val thm' = match_instantiateT ctxt t thm in
-    Thm.instantiate ([], gen_certify_inst snd Var (Thm.cterm_of ctxt) ctxt thm' t) thm'
+    Thm.instantiate ([], gen_certify_inst snd (Thm.cterm_of ctxt) ctxt thm' t) thm'
   end
 
 fun apply_rule ctxt t =
--- a/src/HOL/Tools/SMT/z3_replay_util.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/SMT/z3_replay_util.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -15,7 +15,7 @@
   val discharge: thm -> thm -> thm
 
   (*a faster COMP*)
-  type compose_data
+  type compose_data = cterm list * (cterm -> cterm list) * thm
   val precompose: (cterm -> cterm list) -> thm -> compose_data
   val precompose2: (cterm -> cterm * cterm) -> thm -> compose_data
   val compose: compose_data -> thm -> thm
@@ -71,11 +71,12 @@
 
 fun list2 (x, y) = [x, y]
 
-fun precompose f rule = (f (Thm.cprem_of rule 1), f, rule)
-fun precompose2 f rule = precompose (list2 o f) rule
+fun precompose f rule : compose_data = (f (Thm.cprem_of rule 1), f, rule)
+fun precompose2 f rule : compose_data = precompose (list2 o f) rule
 
 fun compose (cvs, f, rule) thm =
-  discharge thm (Thm.instantiate ([], cvs ~~ f (Thm.cprop_of thm)) rule)
+  discharge thm
+    (Thm.instantiate ([], map (dest_Var o Thm.term_of) cvs ~~ f (Thm.cprop_of thm)) rule)
 
 
 (* simpset *)
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_mash.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -45,19 +45,20 @@
   val str_of_mash_algorithm : mash_algorithm -> string
 
   val mesh_facts : ('a * 'a -> bool) -> int -> (real * (('a * real) list * 'a list)) list -> 'a list
-  val nickname_of_thm : thm -> string
+  val nickname_of_thm : Proof.context -> thm -> string
   val find_suggested_facts : Proof.context -> ('b * thm) list -> string list -> ('b * thm) list
-  val crude_thm_ord : thm * thm -> order
+  val crude_thm_ord : Proof.context -> thm * thm -> order
   val thm_less : thm * thm -> bool
   val goal_of_thm : theory -> thm -> thm
   val run_prover_for_mash : Proof.context -> params -> string -> string -> fact list -> thm ->
     prover_result
-  val features_of : Proof.context -> theory -> stature -> term list -> string list
+  val features_of : Proof.context -> string -> stature -> term list -> string list
   val trim_dependencies : string list -> string list option
-  val isar_dependencies_of : string Symtab.table * string Symtab.table -> thm -> string list option
+  val isar_dependencies_of : Proof.context -> string Symtab.table * string Symtab.table ->
+    thm -> string list option
   val prover_dependencies_of : Proof.context -> params -> string -> int -> raw_fact list ->
     string Symtab.table * string Symtab.table -> thm -> bool * string list
-  val attach_parents_to_facts : ('a * thm) list -> ('a * thm) list ->
+  val attach_parents_to_facts : Proof.context -> ('a * thm) list -> ('a * thm) list ->
     (string list * ('a * thm)) list
   val num_extra_feature_facts : int
   val extra_feature_factor : real
@@ -65,7 +66,7 @@
   val weight_facts_steeply : 'a list -> ('a * real) list
   val find_mash_suggestions : Proof.context -> int -> string list -> ('a * thm) list ->
     ('a * thm) list -> ('a * thm) list -> ('a * thm) list * ('a * thm) list
-  val mash_suggested_facts : Proof.context -> theory -> params -> int -> term list -> term ->
+  val mash_suggested_facts : Proof.context -> string -> params -> int -> term list -> term ->
     raw_fact list -> fact list * fact list
 
   val mash_unlearn : unit -> unit
@@ -737,25 +738,24 @@
 
 (*** Isabelle helpers ***)
 
-fun elided_backquote_thm threshold th =
-  elide_string threshold (backquote_thm (Proof_Context.init_global (Thm.theory_of_thm th)) th)
+fun elided_backquote_thm ctxt threshold th =
+  elide_string threshold (backquote_thm ctxt th)
 
-val thy_name_of_thm = Context.theory_name o Thm.theory_of_thm
-
-fun nickname_of_thm th =
+fun nickname_of_thm ctxt th =
   if Thm.has_name_hint th then
     let val hint = Thm.get_name_hint th in
       (* There must be a better way to detect local facts. *)
       (case Long_Name.dest_local hint of
-        SOME suf => Long_Name.implode [thy_name_of_thm th, suf, elided_backquote_thm 50 th]
+        SOME suf =>
+          Long_Name.implode [Thm.theory_name_of_thm th, suf, elided_backquote_thm ctxt 50 th]
       | NONE => hint)
     end
   else
-    elided_backquote_thm 200 th
+    elided_backquote_thm ctxt 200 th
 
 fun find_suggested_facts ctxt facts =
   let
-    fun add (fact as (_, th)) = Symtab.default (nickname_of_thm th, fact)
+    fun add (fact as (_, th)) = Symtab.default (nickname_of_thm ctxt th, fact)
     val tab = fold add facts Symtab.empty
     fun lookup nick =
       Symtab.lookup tab nick
@@ -778,12 +778,12 @@
       EQUAL => string_ord (apply2 Context.theory_name p)
     | order => order)
 
-fun crude_thm_ord p =
+fun crude_thm_ord ctxt p =
   (case crude_theory_ord (apply2 Thm.theory_of_thm p) of
     EQUAL =>
     (* The hack below is necessary because of odd dependencies that are not reflected in the theory
        comparison. *)
-    let val q = apply2 nickname_of_thm p in
+    let val q = apply2 (nickname_of_thm ctxt) p in
       (* Hack to put "xxx_def" before "xxxI" and "xxxE" *)
       (case bool_ord (apply2 (String.isSuffix "_def") (swap q)) of
         EQUAL => string_ord q
@@ -924,12 +924,10 @@
 val type_max_depth = 1
 
 (* TODO: Generate type classes for types? *)
-fun features_of ctxt thy (scope, _) ts =
-  let val thy_name = Context.theory_name thy in
-    thy_feature_of thy_name ::
-    term_features_of ctxt thy_name term_max_depth type_max_depth ts
-    |> scope <> Global ? cons local_feature
-  end
+fun features_of ctxt thy_name (scope, _) ts =
+  thy_feature_of thy_name ::
+  term_features_of ctxt thy_name term_max_depth type_max_depth ts
+  |> scope <> Global ? cons local_feature
 
 (* Too many dependencies is a sign that a decision procedure is at work. There is not much to learn
    from such proofs. *)
@@ -938,14 +936,14 @@
 val prover_default_max_facts = 25
 
 (* "type_definition_xxx" facts are characterized by their use of "CollectI". *)
-val typedef_dep = nickname_of_thm @{thm CollectI}
+val typedef_dep = nickname_of_thm @{context} @{thm CollectI}
 (* Mysterious parts of the class machinery create lots of proofs that refer exclusively to
    "someI_ex" (and to some internal constructions). *)
-val class_some_dep = nickname_of_thm @{thm someI_ex}
+val class_some_dep = nickname_of_thm @{context} @{thm someI_ex}
 
 val fundef_ths =
   @{thms fundef_ex1_existence fundef_ex1_uniqueness fundef_ex1_iff fundef_default_value}
-  |> map nickname_of_thm
+  |> map (nickname_of_thm @{context})
 
 (* "Rep_xxx_inject", "Abs_xxx_inverse", etc., are derived using these facts. *)
 val typedef_ths =
@@ -953,46 +951,46 @@
       type_definition.Rep_inject type_definition.Abs_inject type_definition.Rep_cases
       type_definition.Abs_cases type_definition.Rep_induct type_definition.Abs_induct
       type_definition.Rep_range type_definition.Abs_image}
-  |> map nickname_of_thm
+  |> map (nickname_of_thm @{context})
 
-fun is_size_def [dep] th =
+fun is_size_def ctxt [dep] th =
     (case first_field ".rec" dep of
       SOME (pref, _) =>
-      (case first_field ".size" (nickname_of_thm th) of
+      (case first_field ".size" (nickname_of_thm ctxt th) of
         SOME (pref', _) => pref = pref'
       | NONE => false)
     | NONE => false)
-  | is_size_def _ _ = false
+  | is_size_def _ _ _ = false
 
 fun trim_dependencies deps =
   if length deps > max_dependencies then NONE else SOME deps
 
-fun isar_dependencies_of name_tabs th =
+fun isar_dependencies_of ctxt name_tabs th =
   thms_in_proof max_dependencies (SOME name_tabs) th
   |> Option.map (fn deps =>
     if deps = [typedef_dep] orelse deps = [class_some_dep] orelse
         exists (member (op =) fundef_ths) deps orelse exists (member (op =) typedef_ths) deps orelse
-        is_size_def deps th then
+        is_size_def ctxt deps th then
       []
     else
       deps)
 
 fun prover_dependencies_of ctxt (params as {verbose, max_facts, ...}) prover auto_level facts
     name_tabs th =
-  (case isar_dependencies_of name_tabs th of
+  (case isar_dependencies_of ctxt name_tabs th of
     SOME [] => (false, [])
   | isar_deps0 =>
     let
       val isar_deps = these isar_deps0
       val thy = Proof_Context.theory_of ctxt
       val goal = goal_of_thm thy th
-      val name = nickname_of_thm th
+      val name = nickname_of_thm ctxt th
       val (_, hyp_ts, concl_t) = ATP_Util.strip_subgoal goal 1 ctxt
       val facts = facts |> filter (fn (_, th') => thm_less (th', th))
 
-      fun nickify ((_, stature), th) = ((nickname_of_thm th, stature), th)
+      fun nickify ((_, stature), th) = ((nickname_of_thm ctxt th, stature), th)
 
-      fun is_dep dep (_, th) = nickname_of_thm th = dep
+      fun is_dep dep (_, th) = nickname_of_thm ctxt th = dep
 
       fun add_isar_dep facts dep accum =
         if exists (is_dep dep) accum then
@@ -1036,7 +1034,7 @@
 
 (* In the following functions, chunks are risers w.r.t. "thm_less_eq". *)
 
-fun chunks_and_parents_for chunks th =
+fun chunks_and_parents_for ctxt chunks th =
   let
     fun insert_parent new parents =
       let val parents = parents |> filter_out (fn p => thm_less_eq (p, new)) in
@@ -1060,11 +1058,11 @@
   in
     fold_rev do_chunk chunks ([], [])
     |>> cons []
-    ||> map nickname_of_thm
+    ||> map (nickname_of_thm ctxt)
   end
 
-fun attach_parents_to_facts _ [] = []
-  | attach_parents_to_facts old_facts (facts as (_, th) :: _) =
+fun attach_parents_to_facts _ _ [] = []
+  | attach_parents_to_facts ctxt old_facts (facts as (_, th) :: _) =
     let
       fun do_facts _ [] = []
         | do_facts (_, parents) [fact] = [(parents, fact)]
@@ -1073,16 +1071,16 @@
           let
             val chunks = app_hd (cons th) chunks
             val chunks_and_parents' =
-              if thm_less_eq (th, th') andalso thy_name_of_thm th = thy_name_of_thm th' then
-                (chunks, [nickname_of_thm th])
-              else
-                chunks_and_parents_for chunks th'
+              if thm_less_eq (th, th') andalso
+                Thm.theory_name_of_thm th = Thm.theory_name_of_thm th'
+              then (chunks, [nickname_of_thm ctxt th])
+              else chunks_and_parents_for ctxt chunks th'
           in
             (parents, fact) :: do_facts chunks_and_parents' facts
           end
     in
       old_facts @ facts
-      |> do_facts (chunks_and_parents_for [[]] th)
+      |> do_facts (chunks_and_parents_for ctxt [[]] th)
       |> drop (length old_facts)
     end
 
@@ -1114,15 +1112,15 @@
 
 fun strict_subthy thyp = Theory.subthy thyp andalso not (Theory.subthy (swap thyp))
 
-fun maximal_wrt_access_graph _ [] = []
-  | maximal_wrt_access_graph access_G ((fact as (_, th)) :: facts) =
+fun maximal_wrt_access_graph _ _ [] = []
+  | maximal_wrt_access_graph ctxt access_G ((fact as (_, th)) :: facts) =
     let val thy = Thm.theory_of_thm th in
       fact :: filter_out (fn (_, th') => strict_subthy (Thm.theory_of_thm th', thy)) facts
-      |> map (nickname_of_thm o snd)
+      |> map (nickname_of_thm ctxt o snd)
       |> maximal_wrt_graph access_G
     end
 
-fun is_fact_in_graph access_G = can (Graph.get_node access_G) o nickname_of_thm
+fun is_fact_in_graph ctxt access_G = can (Graph.get_node access_G) o nickname_of_thm ctxt
 
 val chained_feature_factor = 0.5 (* FUDGE *)
 val extra_feature_factor = 0.1 (* FUDGE *)
@@ -1147,29 +1145,28 @@
     (mesh_facts (eq_snd (gen_eq_thm ctxt)) max_facts mess, unknown)
   end
 
-fun mash_suggested_facts ctxt thy ({debug, ...} : params) max_suggs hyp_ts concl_t facts =
+fun mash_suggested_facts ctxt thy_name ({debug, ...} : params) max_suggs hyp_ts concl_t facts =
   let
-    val thy_name = Context.theory_name thy
     val algorithm = the_mash_algorithm ()
 
     val facts = facts
-      |> rev_sort_list_prefix (crude_thm_ord o apply2 snd)
+      |> rev_sort_list_prefix (crude_thm_ord ctxt o apply2 snd)
         (Int.max (num_extra_feature_facts, max_proximity_facts))
 
     val chained = filter (fn ((_, (scope, _)), _) => scope = Chained) facts
 
-    fun fact_has_right_theory (_, th) =
-      thy_name = Context.theory_name (Thm.theory_of_thm th)
+    fun fact_has_right_theory (_, th) = thy_name = Thm.theory_name_of_thm th
 
     fun chained_or_extra_features_of factor (((_, stature), th), weight) =
       [Thm.prop_of th]
-      |> features_of ctxt (Thm.theory_of_thm th) stature
+      |> features_of ctxt (Thm.theory_name_of_thm th) stature
       |> map (rpair (weight * factor))
 
     val {access_G, xtabs = ((num_facts, fact_tab), (num_feats, feat_tab)), ffds, freqs, ...} =
       peek_state ctxt
 
-    val goal_feats0 = features_of ctxt thy (Local, General) (concl_t :: hyp_ts)
+    val goal_feats0 =
+      features_of ctxt thy_name (Local, General) (concl_t :: hyp_ts)
     val chained_feats = chained
       |> map (rpair 1.0)
       |> map (chained_or_extra_features_of chained_feature_factor)
@@ -1185,7 +1182,7 @@
       fold (union (eq_fst (op =))) [chained_feats, extra_feats] (map (rpair 1.0) goal_feats0)
       |> debug ? sort (Real.compare o swap o apply2 snd)
 
-    val parents = maximal_wrt_access_graph access_G facts
+    val parents = maximal_wrt_access_graph ctxt access_G facts
     val visible_facts = map_filter (Symtab.lookup fact_tab) (Graph.all_preds access_G parents)
 
     val suggs =
@@ -1205,7 +1202,7 @@
             goal_feats int_goal_feats
         end
 
-    val unknown = filter_out (is_fact_in_graph access_G o snd) facts
+    val unknown = filter_out (is_fact_in_graph ctxt access_G o snd) facts
   in
     find_mash_suggestions ctxt max_suggs suggs facts chained unknown
     |> apply2 (map fact_of_raw_fact)
@@ -1266,16 +1263,16 @@
     launch_thread timeout (fn () =>
       let
         val thy = Proof_Context.theory_of ctxt
-        val feats = features_of ctxt thy (Local, General) [t]
-        val facts = rev_sort_list_prefix (crude_thm_ord o apply2 snd) 1 facts
+        val feats = features_of ctxt (Context.theory_name thy) (Local, General) [t]
+        val facts = rev_sort_list_prefix (crude_thm_ord ctxt o apply2 snd) 1 facts
       in
         map_state ctxt
           (fn {access_G, xtabs as ((num_facts0, _), _), ffds, freqs, dirty_facts} =>
              let
-               val parents = maximal_wrt_access_graph access_G facts
+               val parents = maximal_wrt_access_graph ctxt access_G facts
                val deps = used_ths
-                 |> filter (is_fact_in_graph access_G)
-                 |> map nickname_of_thm
+                 |> filter (is_fact_in_graph ctxt access_G)
+                 |> map (nickname_of_thm ctxt)
 
                val name = learned_proof_name ()
                val (access_G', xtabs', rev_learns) =
@@ -1304,7 +1301,7 @@
     fun next_commit_time () = Time.+ (Timer.checkRealTimer timer, commit_timeout)
 
     val {access_G, ...} = peek_state ctxt
-    val is_in_access_G = is_fact_in_graph access_G o snd
+    val is_in_access_G = is_fact_in_graph ctxt access_G o snd
     val no_new_facts = forall is_in_access_G facts
   in
     if no_new_facts andalso not run_prover then
@@ -1318,7 +1315,7 @@
         ""
     else
       let
-        val name_tabs = build_name_tables nickname_of_thm facts
+        val name_tabs = build_name_tables (nickname_of_thm ctxt) facts
 
         fun deps_of status th =
           if status = Non_Rec_Def orelse status = Rec_Def then
@@ -1327,7 +1324,7 @@
             prover_dependencies_of ctxt params prover auto_level facts name_tabs th
             |> (fn (false, _) => NONE | (true, deps) => trim_dependencies deps)
           else
-            isar_dependencies_of name_tabs th
+            isar_dependencies_of ctxt name_tabs th
 
         fun do_commit [] [] [] state = state
           | do_commit learns relearns flops
@@ -1375,8 +1372,8 @@
           | learn_new_fact (parents, ((_, stature as (_, status)), th))
               (learns, (num_nontrivial, next_commit, _)) =
             let
-              val name = nickname_of_thm th
-              val feats = features_of ctxt (Thm.theory_of_thm th) stature [Thm.prop_of th]
+              val name = nickname_of_thm ctxt th
+              val feats = features_of ctxt (Thm.theory_name_of_thm th) stature [Thm.prop_of th]
               val deps = these (deps_of status th)
               val num_nontrivial = num_nontrivial |> not (null deps) ? Integer.add 1
               val learns = (name, parents, feats, deps) :: learns
@@ -1396,8 +1393,8 @@
           else
             let
               val new_facts = facts
-                |> sort (crude_thm_ord o apply2 snd)
-                |> attach_parents_to_facts []
+                |> sort (crude_thm_ord ctxt o apply2 snd)
+                |> attach_parents_to_facts ctxt []
                 |> filter_out (is_in_access_G o snd)
               val (learns, (num_nontrivial, _, _)) =
                 ([], (0, next_commit_time (), false))
@@ -1410,7 +1407,7 @@
           | relearn_old_fact ((_, (_, status)), th)
               ((relearns, flops), (num_nontrivial, next_commit, _)) =
             let
-              val name = nickname_of_thm th
+              val name = nickname_of_thm ctxt th
               val (num_nontrivial, relearns, flops) =
                 (case deps_of status th of
                   SOME deps => (num_nontrivial + 1, (name, deps) :: relearns, flops)
@@ -1434,7 +1431,7 @@
 
               fun priority_of th =
                 Random.random_range 0 max_isar +
-                (case try (Graph.get_node access_G) (nickname_of_thm th) of
+                (case try (Graph.get_node access_G) (nickname_of_thm ctxt th) of
                   SOME (Isar_Proof, _, deps) => ~100 * length deps
                 | SOME (Automatic_Proof, _, _) => 2 * max_isar
                 | SOME (Isar_Proof_wegen_Prover_Flop, _, _) => max_isar
@@ -1468,7 +1465,7 @@
     val ctxt = ctxt |> Config.put instantiate_inducts false
     val facts =
       nearly_all_facts ctxt false fact_override Keyword.empty_keywords css chained [] @{prop True}
-      |> sort (crude_thm_ord o apply2 snd o swap)
+      |> sort (crude_thm_ord ctxt o apply2 snd o swap)
     val num_facts = length facts
     val prover = hd provers
 
@@ -1515,7 +1512,7 @@
     [("", [])]
   else
     let
-      val thy = Proof_Context.theory_of ctxt
+      val thy_name = Context.theory_name (Proof_Context.theory_of ctxt)
 
       fun maybe_launch_thread exact min_num_facts_to_learn =
         if not (Async_Manager_Legacy.has_running_threads MaShN) andalso
@@ -1536,7 +1533,7 @@
       fun please_learn () =
         let
           val {access_G, xtabs = ((num_facts0, _), _), ...} = peek_state ctxt
-          val is_in_access_G = is_fact_in_graph access_G o snd
+          val is_in_access_G = is_fact_in_graph ctxt access_G o snd
           val min_num_facts_to_learn = length facts - num_facts0
         in
           if min_num_facts_to_learn <= max_facts_to_learn_before_query then
@@ -1578,8 +1575,8 @@
          |> weight_facts_steeply, [])
 
       fun mash () =
-        mash_suggested_facts ctxt thy params (generous_max_suggestions max_facts) hyp_ts concl_t
-          facts
+        mash_suggested_facts ctxt thy_name params
+          (generous_max_suggestions max_facts) hyp_ts concl_t facts
         |>> weight_facts_steeply
 
       val mess =
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_mepo.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_mepo.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -220,7 +220,7 @@
     t
 
 fun theory_const_prop_of fudge th =
-  theory_constify fudge (Context.theory_name (Thm.theory_of_thm th)) (Thm.prop_of th)
+  theory_constify fudge (Thm.theory_name_of_thm th) (Thm.prop_of th)
 
 fun pair_consts_fact thy fudge fact =
   (case fact |> snd |> theory_const_prop_of fudge |> pconsts_in_fact thy of
--- a/src/HOL/Tools/Transfer/transfer.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/Transfer/transfer.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -592,11 +592,10 @@
     fun prep a = the (AList.lookup (op =) tab a)
     val thm = transfer_rule_of_terms fst ctxt' tab s t
     val binsts = bool_insts (if equiv then 0 else 1) (s, t)
-    val cbool = @{ctyp bool}
-    val relT = @{typ "bool => bool => bool"}
     val idx = Thm.maxidx_of thm + 1
-    fun tinst (a, _) = (Thm.ctyp_of ctxt' (TVar ((a, idx), @{sort type})), cbool)
-    fun inst (a, t) = apply2 (Thm.cterm_of ctxt') (Var (Name.clean_index (prep a, idx), relT), t)
+    fun tinst (a, _) = (((a, idx), @{sort type}), @{ctyp bool})
+    fun inst (a, t) =
+      ((Name.clean_index (prep a, idx), @{typ "bool \<Rightarrow> bool \<Rightarrow> bool"}), Thm.cterm_of ctxt' t)
   in
     thm
       |> Thm.generalize (tfrees, rnames @ frees) idx
@@ -616,11 +615,10 @@
     fun prep a = the (AList.lookup (op =) tab a)
     val thm = transfer_rule_of_terms snd ctxt' tab t s
     val binsts = bool_insts 1 (s, t)
-    val cbool = @{ctyp bool}
-    val relT = @{typ "bool => bool => bool"}
     val idx = Thm.maxidx_of thm + 1
-    fun tinst (a, _) = (Thm.ctyp_of ctxt' (TVar ((a, idx), @{sort type})), cbool)
-    fun inst (a, t) = apply2 (Thm.cterm_of ctxt') (Var (Name.clean_index (prep a, idx), relT), t)
+    fun tinst (a, _) = (((a, idx), @{sort type}), @{ctyp bool})
+    fun inst (a, t) =
+      ((Name.clean_index (prep a, idx), @{typ "bool \<Rightarrow> bool \<Rightarrow> bool"}), Thm.cterm_of ctxt' t)
   in
     thm
       |> Thm.generalize (tfrees, rnames @ frees) idx
--- a/src/HOL/Tools/coinduction.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/coinduction.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -61,9 +61,9 @@
             |> fold Variable.declare_names vars
             |> fold Variable.declare_thm (raw_thm :: prems);
           val thm_concl = Thm.cprop_of raw_thm |> strip_imp_concl;
-          val (rhoTs, rhots) = Thm.match (thm_concl, concl)
-            |>> map (apply2 Thm.typ_of)
-            ||> map (apply2 Thm.term_of);
+          val (instT, inst) = Thm.match (thm_concl, concl);
+          val rhoTs = map (fn (v, T) => (TVar v, Thm.typ_of T)) instT;
+          val rhots = map (fn (v, t) => (Var v, Thm.term_of t)) inst;
           val xs = hd (Thm.prems_of raw_thm) |> HOLogic.dest_Trueprop |> strip_comb |> snd
             |> map (subst_atomic_types rhoTs);
           val raw_eqs = map (fn x => (x, AList.lookup op aconv rhots x |> the)) xs;
@@ -160,4 +160,3 @@
 end;
 
 end;
-
--- a/src/HOL/Tools/hologic.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/hologic.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -207,14 +207,14 @@
   let
     val (P, Q) = apply2 (Object_Logic.dest_judgment ctxt o Thm.cprop_of) (thP, thQ)
       handle CTERM (msg, _) => raise THM (msg, 0, [thP, thQ]);
-    val inst = Thm.instantiate ([], [(@{cpat "?P::bool"}, P), (@{cpat "?Q::bool"}, Q)]);
+    val inst = Thm.instantiate ([], [((("P", 0), boolT), P), ((("Q", 0), boolT), Q)]);
   in Drule.implies_elim_list (inst @{thm conjI}) [thP, thQ] end;
 
 fun conj_elim ctxt thPQ =
   let
     val (P, Q) = Thm.dest_binop (Object_Logic.dest_judgment ctxt (Thm.cprop_of thPQ))
       handle CTERM (msg, _) => raise THM (msg, 0, [thPQ]);
-    val inst = Thm.instantiate ([], [(@{cpat "?P::bool"}, P), (@{cpat "?Q::bool"}, Q)]);
+    val inst = Thm.instantiate ([], [((("P", 0), boolT), P), ((("Q", 0), boolT), Q)]);
     val thP = Thm.implies_elim (inst @{thm conjunct1}) thPQ;
     val thQ = Thm.implies_elim (inst @{thm conjunct2}) thPQ;
   in (thP, thQ) end;
--- a/src/HOL/Tools/inductive_set.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/inductive_set.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -205,7 +205,7 @@
       val x' = map_type
         (K (Ts @ HOLogic.strip_ptupleT ps U ---> HOLogic.boolT)) x;
     in
-      (Thm.cterm_of ctxt x,
+      (dest_Var x,
        Thm.cterm_of ctxt (fold_rev (Term.abs o pair "x") Ts
          (HOLogic.Collect_const U $
             HOLogic.mk_psplits ps U HOLogic.boolT
@@ -367,7 +367,7 @@
         val T = HOLogic.mk_ptupleT ps Us;
         val x' = map_type (K (Rs ---> HOLogic.mk_setT T)) x
       in
-        (Thm.cterm_of ctxt x,
+        (dest_Var x,
          Thm.cterm_of ctxt (fold_rev (Term.abs o pair "x") Ts
           (HOLogic.mk_mem (HOLogic.mk_ptuple ps T (map Bound (k downto 0)),
              list_comb (x', map Bound (l - 1 downto k + 1))))))
--- a/src/HOL/Tools/lin_arith.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/lin_arith.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -60,10 +60,8 @@
 fun is_nat t = (fastype_of1 t = HOLogic.natT);
 
 fun mk_nat_thm thy t =
-  let
-    val cn = Thm.global_cterm_of thy (Var (("n", 0), HOLogic.natT))
-    and ct = Thm.global_cterm_of thy t
-  in Drule.instantiate_normalize ([], [(cn, ct)]) @{thm le0} end;
+  let val ct = Thm.global_cterm_of thy t
+  in Drule.instantiate_normalize ([], [((("n", 0), HOLogic.natT), ct)]) @{thm le0} end;
 
 end;
 
--- a/src/HOL/Tools/monomorph.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/monomorph.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -159,7 +159,7 @@
 
 fun instantiate ctxt subst =
   let
-    fun cert (ix, (S, T)) = apply2 (Thm.ctyp_of ctxt) (TVar (ix, S), T)
+    fun cert (ix, (S, T)) = ((ix, S), Thm.ctyp_of ctxt T)
     fun cert' subst = Vartab.fold (cons o cert) subst []
   in Thm.instantiate (cert' subst, []) end
 
--- a/src/HOL/Tools/numeral.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/numeral.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -49,7 +49,7 @@
 val uminus = @{cpat "uminus"};
 val uminusT = Thm.ctyp_of @{context} (Term.range_type (Thm.typ_of_cterm uminus));
 
-fun instT T V = Thm.instantiate_cterm ([(V, T)], []);
+fun instT T V = Thm.instantiate_cterm ([(dest_TVar (Thm.typ_of V), T)], []);
 
 in
 
--- a/src/HOL/Tools/numeral_simprocs.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/numeral_simprocs.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -595,8 +595,8 @@
 
  fun prove_nz ctxt T t =
     let
-      val z = Thm.instantiate_cterm ([(zT,T)],[]) zr
-      val eq = Thm.instantiate_cterm ([(eqT,T)],[]) geq
+      val z = Thm.instantiate_cterm ([(dest_TVar (Thm.typ_of zT), T)],[]) zr
+      val eq = Thm.instantiate_cterm ([(dest_TVar (Thm.typ_of eqT), T)],[]) geq
       val th = Simplifier.rewrite (ctxt addsimps @{thms simp_thms})
            (Thm.apply @{cterm "Trueprop"} (Thm.apply @{cterm "Not"}
                   (Thm.apply (Thm.apply eq t) z)))
--- a/src/HOL/Tools/record.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/record.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -1755,7 +1755,7 @@
       fun mk_eq_refl thy =
         @{thm equal_refl}
         |> Thm.instantiate
-          ([apply2 (Thm.global_ctyp_of thy) (TVar (("'a", 0), @{sort equal}), Logic.varifyT_global extT)], [])
+          ([((("'a", 0), @{sort equal}), Thm.global_ctyp_of thy (Logic.varifyT_global extT))], [])
         |> Axclass.unoverload thy;
       val ensure_random_record = ensure_sort_record (@{sort random}, mk_random_eq);
       val ensure_exhaustive_record =
--- a/src/HOL/Tools/reification.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/reification.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -169,9 +169,9 @@
           val (fnvs, invs) = List.partition (fn ((vn, _),_) => member (op =) vns vn) (Vartab.dest tmenv);
           val (fts, its) =
             (map (snd o snd) fnvs,
-             map (fn ((vn, vi), (tT, t)) => apply2 (Thm.cterm_of ctxt) (Var ((vn, vi), tT), t)) invs);
+             map (fn ((vn, vi), (tT, t)) => (((vn, vi), tT), Thm.cterm_of ctxt t)) invs);
           val ctyenv =
-            map (fn ((vn, vi), (s, ty)) => apply2 (Thm.ctyp_of ctxt) (TVar ((vn, vi), s), ty))
+            map (fn ((vn, vi), (s, ty)) => (((vn, vi), s), Thm.ctyp_of ctxt ty))
               (Vartab.dest tyenv);
         in
           ((map (Thm.cterm_of ctxt) fts ~~ replicate (length fts) ctxt,
@@ -203,7 +203,7 @@
         val sbst = Envir.subst_term (tyenv, tmenv);
         val sbsT = Envir.subst_type tyenv;
         val subst_ty =
-          map (fn (n, (s, t)) => apply2 (Thm.ctyp_of ctxt'') (TVar (n, s), t)) (Vartab.dest tyenv)
+          map (fn (n, (s, t)) => ((n, s), Thm.ctyp_of ctxt'' t)) (Vartab.dest tyenv)
         val tml = Vartab.dest tmenv;
         val (subst_ns, bds) = fold_map
           (fn (Const _ $ _ $ n, Var (xn0, _)) => fn bds =>
@@ -230,7 +230,9 @@
           let
             val ih = Drule.cterm_rule (Thm.instantiate (subst_ty, []));
           in map (apply2 ih) (subst_ns @ subst_vs @ cts) end;
-        val th = (Drule.instantiate_normalize (subst_ty, substt) eq) RS sym;
+        val th =
+          (Drule.instantiate_normalize (subst_ty, map (apfst (dest_Var o Thm.term_of)) substt) eq)
+            RS sym;
       in (hd (Variable.export ctxt'' ctxt [th]), bds) end)
       handle Pattern.MATCH => tryeqs eqs (ct, ctxt) bds);
 
@@ -266,10 +268,7 @@
       let
         val (_, _ :: vs) = eq |> Thm.prop_of |> HOLogic.dest_Trueprop
           |> HOLogic.dest_eq |> fst |> strip_comb;
-        val subst =
-          map_filter
-            (fn (v as Var (_, T)) => SOME (Thm.cterm_of ctxt' v, subst T)
-              | _ => NONE) vs;
+        val subst = map_filter (fn Var v => SOME (v, subst (#2 v)) | _ => NONE) vs;
       in Thm.instantiate ([], subst) eq end;
     val (ps, congs) = map_split (mk_congeq ctxt' fs o prep_eq) eqs;
     val bds = AList.make (K ([], [])) tys;
@@ -285,9 +284,10 @@
       | is_list_var _ = false;
     val vars = th |> Thm.prop_of |> Logic.dest_equals |> snd
       |> strip_comb |> snd |> filter is_list_var;
-    val vs = map (fn v as Var (_, T) =>
+    val vs = map (fn Var (v as (_, T)) =>
       (v, the (AList.lookup Type.could_unify bds' T) |> snd |> HOLogic.mk_list (dest_listT T))) vars;
-    val th' = Drule.instantiate_normalize ([], map (apply2 (Thm.cterm_of ctxt)) vs) th;
+    val th' =
+      Drule.instantiate_normalize ([], map (apsnd (Thm.cterm_of ctxt)) vs) th;
     val th'' = Thm.symmetric (dereify ctxt [] (Thm.lhs_of th'));
   in Thm.transitive th'' th' end;
 
--- a/src/HOL/Tools/sat.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/sat.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -73,8 +73,6 @@
 val resolution_thm =
   @{lemma "(P ==> False) ==> (~ P ==> False) ==> False" by (rule case_split)}
 
-val cP = Thm.cterm_of @{context} (Var (("P", 0), HOLogic.boolT));
-
 (* ------------------------------------------------------------------------- *)
 (* lit_ord: an order on integers that considers their absolute values only,  *)
 (*      thereby treating integers that represent the same atom (positively   *)
@@ -173,7 +171,7 @@
                 val cLit =
                   snd (Thm.dest_comb (if hyp1_is_neg then hyp2 else hyp1))  (* strip Trueprop *)
               in
-                Thm.instantiate ([], [(cP, cLit)]) resolution_thm
+                Thm.instantiate ([], [((("P", 0), HOLogic.boolT), cLit)]) resolution_thm
               end
 
             val _ =
--- a/src/HOL/Tools/semiring_normalizer.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/semiring_normalizer.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -238,7 +238,7 @@
   if is_binop ct ct' then Thm.dest_binop ct'
   else raise CTERM ("dest_binop: bad binop", [ct, ct'])
 
-fun inst_thm inst = Thm.instantiate ([], inst);
+fun inst_thm inst = Thm.instantiate ([], map (apfst (dest_Var o Thm.term_of)) inst);
 
 val dest_number = Thm.term_of #> HOLogic.dest_number #> snd;
 val is_number = can dest_number;
@@ -300,7 +300,7 @@
         val sub_tm = Thm.dest_fun (Thm.dest_fun sub_pat)
         val neg_tm = Thm.dest_fun neg_pat
         val dest_sub = dest_binop sub_tm
-      in (neg_mul,sub_add,sub_tm,neg_tm,dest_sub, neg_mul |> concl |> Thm.dest_arg,
+      in (neg_mul, sub_add, sub_tm, neg_tm, dest_sub, neg_mul |> concl |> Thm.dest_arg,
           sub_add |> concl |> Thm.dest_arg |> Thm.dest_arg)
       end
     | _ => (TrueI, TrueI, true_tm, true_tm, (fn t => (t,t)), true_tm, true_tm));
@@ -760,7 +760,7 @@
 fun polynomial_neg_conv ctxt tm =
    let val (l,r) = Thm.dest_comb tm in
         if not (l aconvc neg_tm) then raise CTERM ("polynomial_neg_conv",[tm]) else
-        let val th1 = inst_thm [(cx',r)] neg_mul
+        let val th1 = inst_thm [(cx', r)] neg_mul
             val th2 = Thm.transitive th1 (Conv.arg1_conv semiring_mul_conv (concl th1))
         in Thm.transitive th2 (polynomial_monomial_mul_conv ctxt (concl th2))
         end
@@ -770,7 +770,7 @@
 (* Subtraction.                                                              *)
 fun polynomial_sub_conv ctxt tm =
   let val (l,r) = dest_sub tm
-      val th1 = inst_thm [(cx',l),(cy',r)] sub_add
+      val th1 = inst_thm [(cx', l), (cy', r)] sub_add
       val (tm1,tm2) = Thm.dest_comb(concl th1)
       val th2 = Drule.arg_cong_rule tm1 (polynomial_neg_conv ctxt tm2)
   in Thm.transitive th1 (Thm.transitive th2 (polynomial_add_conv ctxt (concl th2)))
--- a/src/HOL/Tools/simpdata.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/simpdata.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -56,7 +56,7 @@
   (P1 =simp=> ... =simp=> Pn => x == y) ==> (P1 =simp=> ... =simp=> Pn => x = y)
 *)
 
-fun lift_meta_eq_to_obj_eq i st =
+fun lift_meta_eq_to_obj_eq ctxt i st =
   let
     fun count_imp (Const (@{const_name HOL.simp_implies}, _) $ P $ Q) = 1 + count_imp Q
       | count_imp _ = 0;
@@ -69,7 +69,7 @@
         val mk_simp_implies = fold_rev (fn R => fn S =>
           Const (@{const_name HOL.simp_implies}, propT --> propT --> propT) $ R $ S) Ps;
       in
-        Goal.prove_global (Thm.theory_of_thm st) []
+        Goal.prove_global (Proof_Context.theory_of ctxt) []
           [mk_simp_implies @{prop "(x::'a) == y"}]
           (mk_simp_implies @{prop "(x::'a) = y"})
           (fn {context = ctxt, prems} => EVERY
@@ -85,7 +85,7 @@
 fun mk_meta_cong ctxt rl =
   let
     val rl' = Seq.hd (TRYALL (fn i => fn st =>
-      resolve_tac ctxt [lift_meta_eq_to_obj_eq i st] i st) rl)
+      resolve_tac ctxt [lift_meta_eq_to_obj_eq ctxt i st] i st) rl)
   in
     mk_meta_eq rl' handle THM _ =>
       if can Logic.dest_equals (Thm.concl_of rl') then rl'
--- a/src/HOL/Tools/split_rule.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/HOL/Tools/split_rule.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -42,7 +42,7 @@
 fun split_rule_var' ctxt (t as Var (v, Type ("fun", [T1, T2]))) rl =
       let val T' = HOLogic.flatten_tupleT T1 ---> T2;
           val newt = ap_split T1 T2 (Var (v, T'));
-      in Thm.instantiate ([], [apply2 (Thm.cterm_of ctxt) (t, newt)]) rl end
+      in Thm.instantiate ([], [(dest_Var t, Thm.cterm_of ctxt newt)]) rl end
   | split_rule_var' _ _ rl = rl;
 
 
@@ -65,15 +65,15 @@
                 val ys = Name.variant_list xs (replicate (length Ts) a);
               in
                 (xs @ ys,
-                  apply2 (Thm.cterm_of ctxt)
-                    (v, HOLogic.mk_ptuple (HOLogic.flat_tupleT_paths T) T
-                      (map (Var o apfst (rpair 0)) (ys ~~ Ts))) :: insts)
+                  (dest_Var v,
+                    Thm.cterm_of ctxt (HOLogic.mk_ptuple (HOLogic.flat_tupleT_paths T) T
+                      (map (Var o apfst (rpair 0)) (ys ~~ Ts)))) :: insts)
               end
           | mk_tuple _ x = x;
         val newt = ap_split' Us U (Var (v, T'));
         val (vs', insts) = fold mk_tuple ts (vs, []);
       in
-        (Drule.instantiate_normalize ([], [apply2 (Thm.cterm_of ctxt) (t, newt)] @ insts) rl, vs')
+        (Drule.instantiate_normalize ([], ((v, T), Thm.cterm_of ctxt newt) :: insts) rl, vs')
       end
   | complete_split_rule_var _ _ x = x;
 
--- a/src/Provers/Arith/fast_lin_arith.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Provers/Arith/fast_lin_arith.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -424,7 +424,7 @@
             val T = Thm.typ_of_cterm cv
           in
             mth
-            |> Thm.instantiate ([], [(cv, number_of T n)])
+            |> Thm.instantiate ([], [(dest_Var (Thm.term_of cv), number_of T n)])
             |> rewrite_concl
             |> discharge_prem
             handle CTERM _ => mult_by_add n thm
--- a/src/Provers/classical.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Provers/classical.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -282,12 +282,12 @@
 fun delete x = delete_tagged_list (joinrules x);
 fun delete' x = delete_tagged_list (joinrules' x);
 
-fun string_of_thm NONE = Display.string_of_thm_without_context
-  | string_of_thm (SOME context) = Display.string_of_thm (Context.proof_of context);
+fun bad_thm (SOME context) msg th =
+      error (msg ^ "\n" ^ Display.string_of_thm (Context.proof_of context) th)
+  | bad_thm NONE msg th = raise THM (msg, 0, [th]);
 
-fun make_elim context th =
-  if has_fewer_prems 1 th then
-    error ("Ill-formed destruction rule\n" ^ string_of_thm context th)
+fun make_elim opt_context th =
+  if has_fewer_prems 1 th then bad_thm opt_context "Ill-formed destruction rule" th
   else Tactic.make_elim th;
 
 fun warn_thm (SOME (Context.Proof ctxt)) msg th =
@@ -295,31 +295,31 @@
       then warning (msg ^ Display.string_of_thm ctxt th) else ()
   | warn_thm _ _ _ = ();
 
-fun warn_rules context msg rules th =
-  Item_Net.member rules th andalso (warn_thm context msg th; true);
+fun warn_rules opt_context msg rules th =
+  Item_Net.member rules th andalso (warn_thm opt_context msg th; true);
 
-fun warn_claset context th (CS {safeIs, safeEs, hazIs, hazEs, ...}) =
-  warn_rules context "Rule already declared as safe introduction (intro!)\n" safeIs th orelse
-  warn_rules context "Rule already declared as safe elimination (elim!)\n" safeEs th orelse
-  warn_rules context "Rule already declared as introduction (intro)\n" hazIs th orelse
-  warn_rules context "Rule already declared as elimination (elim)\n" hazEs th;
+fun warn_claset opt_context th (CS {safeIs, safeEs, hazIs, hazEs, ...}) =
+  warn_rules opt_context "Rule already declared as safe introduction (intro!)\n" safeIs th orelse
+  warn_rules opt_context "Rule already declared as safe elimination (elim!)\n" safeEs th orelse
+  warn_rules opt_context "Rule already declared as introduction (intro)\n" hazIs th orelse
+  warn_rules opt_context "Rule already declared as elimination (elim)\n" hazEs th;
 
 
 (*** Safe rules ***)
 
-fun addSI w context th
+fun addSI w opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
-  if warn_rules context "Ignoring duplicate safe introduction (intro!)\n" safeIs th then cs
+  if warn_rules opt_context "Ignoring duplicate safe introduction (intro!)\n" safeIs th then cs
   else
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = flat_rule ctxt th;
       val (safe0_rls, safep_rls) = (*0 subgoals vs 1 or more*)
         List.partition Thm.no_prems [th'];
       val nI = Item_Net.length safeIs + 1;
       val nE = Item_Net.length safeEs;
-      val _ = warn_claset context th cs;
+      val _ = warn_claset opt_context th cs;
     in
       CS
        {safeIs = Item_Net.update th safeIs,
@@ -335,21 +335,20 @@
         xtra_netpair = insert' (the_default 0 w) (nI,nE) ([th], []) xtra_netpair}
     end;
 
-fun addSE w context th
+fun addSE w opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
-  if warn_rules context "Ignoring duplicate safe elimination (elim!)\n" safeEs th then cs
-  else if has_fewer_prems 1 th then
-    error ("Ill-formed elimination rule\n" ^ string_of_thm context th)
+  if warn_rules opt_context "Ignoring duplicate safe elimination (elim!)\n" safeEs th then cs
+  else if has_fewer_prems 1 th then bad_thm opt_context "Ill-formed elimination rule" th
   else
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = classical_rule ctxt (flat_rule ctxt th);
       val (safe0_rls, safep_rls) = (*0 subgoals vs 1 or more*)
         List.partition (fn rl => Thm.nprems_of rl=1) [th'];
       val nI = Item_Net.length safeIs;
       val nE = Item_Net.length safeEs + 1;
-      val _ = warn_claset context th cs;
+      val _ = warn_claset opt_context th cs;
     in
       CS
        {safeEs = Item_Net.update th safeEs,
@@ -365,22 +364,22 @@
         xtra_netpair = insert' (the_default 0 w) (nI,nE) ([], [th]) xtra_netpair}
     end;
 
-fun addSD w context th = addSE w context (make_elim context th);
+fun addSD w opt_context th = addSE w opt_context (make_elim opt_context th);
 
 
 (*** Hazardous (unsafe) rules ***)
 
-fun addI w context th
+fun addI w opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
-  if warn_rules context "Ignoring duplicate introduction (intro)\n" hazIs th then cs
+  if warn_rules opt_context "Ignoring duplicate introduction (intro)\n" hazIs th then cs
   else
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = flat_rule ctxt th;
       val nI = Item_Net.length hazIs + 1;
       val nE = Item_Net.length hazEs;
-      val _ = warn_claset context th cs;
+      val _ = warn_claset opt_context th cs;
     in
       CS
        {hazIs = Item_Net.update th hazIs,
@@ -396,21 +395,20 @@
         xtra_netpair = insert' (the_default 1 w) (nI, nE) ([th], []) xtra_netpair}
     end
     handle THM ("RSN: no unifiers", _, _) => (*from dup_intr*)  (* FIXME !? *)
-      error ("Ill-formed introduction rule\n" ^ string_of_thm context th);
+      bad_thm opt_context "Ill-formed introduction rule" th;
 
-fun addE w context th
+fun addE w opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
-  if warn_rules context "Ignoring duplicate elimination (elim)\n" hazEs th then cs
-  else if has_fewer_prems 1 th then
-    error ("Ill-formed elimination rule\n" ^ string_of_thm context th)
+  if warn_rules opt_context "Ignoring duplicate elimination (elim)\n" hazEs th then cs
+  else if has_fewer_prems 1 th then bad_thm opt_context "Ill-formed elimination rule" th
   else
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = classical_rule ctxt (flat_rule ctxt th);
       val nI = Item_Net.length hazIs;
       val nE = Item_Net.length hazEs + 1;
-      val _ = warn_claset context th cs;
+      val _ = warn_claset opt_context th cs;
     in
       CS
        {hazEs = Item_Net.update th hazEs,
@@ -426,7 +424,7 @@
         xtra_netpair = insert' (the_default 1 w) (nI, nE) ([], [th]) xtra_netpair}
     end;
 
-fun addD w context th = addE w context (make_elim context th);
+fun addD w opt_context th = addE w opt_context (make_elim opt_context th);
 
 
 
@@ -435,12 +433,12 @@
         to insert.
 ***)
 
-fun delSI context th
+fun delSI opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
   if Item_Net.member safeIs th then
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = flat_rule ctxt th;
       val (safe0_rls, safep_rls) = List.partition Thm.no_prems [th'];
     in
@@ -459,12 +457,12 @@
     end
   else cs;
 
-fun delSE context th
+fun delSE opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
   if Item_Net.member safeEs th then
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = classical_rule ctxt (flat_rule ctxt th);
       val (safe0_rls, safep_rls) = List.partition (fn rl => Thm.nprems_of rl = 1) [th'];
     in
@@ -483,12 +481,12 @@
     end
   else cs;
 
-fun delI context th
+fun delI opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
   if Item_Net.member hazIs th then
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = flat_rule ctxt th;
     in
       CS
@@ -506,14 +504,14 @@
     end
   else cs
   handle THM ("RSN: no unifiers", _, _) => (*from dup_intr*)  (* FIXME !? *)
-    error ("Ill-formed introduction rule\n" ^ string_of_thm context th);
+    bad_thm opt_context "Ill-formed introduction rule" th;
 
-fun delE context th
+fun delE opt_context th
     (cs as CS {safeIs, safeEs, hazIs, hazEs, swrappers, uwrappers,
       safe0_netpair, safep_netpair, haz_netpair, dup_netpair, xtra_netpair}) =
   if Item_Net.member hazEs th then
     let
-      val ctxt = default_context context th;
+      val ctxt = default_context opt_context th;
       val th' = classical_rule ctxt (flat_rule ctxt th);
     in
       CS
@@ -532,17 +530,17 @@
   else cs;
 
 (*Delete ALL occurrences of "th" in the claset (perhaps from several lists)*)
-fun delrule context th (cs as CS {safeIs, safeEs, hazIs, hazEs, ...}) =
+fun delrule opt_context th (cs as CS {safeIs, safeEs, hazIs, hazEs, ...}) =
   let val th' = Tactic.make_elim th in
     if Item_Net.member safeIs th orelse Item_Net.member safeEs th orelse
       Item_Net.member hazIs th orelse Item_Net.member hazEs th orelse
       Item_Net.member safeEs th' orelse Item_Net.member hazEs th'
     then
-      delSI context th
-        (delSE context th
-          (delI context th
-            (delE context th (delSE context th' (delE context th' cs)))))
-    else (warn_thm context "Undeclared classical rule\n" th; cs)
+      delSI opt_context th
+        (delSE opt_context th
+          (delI opt_context th
+            (delE opt_context th (delSE opt_context th' (delE opt_context th' cs)))))
+    else (warn_thm opt_context "Undeclared classical rule\n" th; cs)
   end;
 
 
@@ -858,7 +856,8 @@
 (* attributes *)
 
 fun attrib f =
-  Thm.declaration_attribute (fn th => fn context => map_cs (f (SOME context) th) context);
+  Thm.declaration_attribute (fn th => fn opt_context =>
+    map_cs (f (SOME opt_context) th) opt_context);
 
 val safe_elim = attrib o addSE;
 val safe_intro = attrib o addSI;
@@ -868,8 +867,8 @@
 val haz_dest = attrib o addD;
 
 val rule_del =
-  Thm.declaration_attribute (fn th => fn context =>
-    context |> map_cs (delrule (SOME context) th) |>
+  Thm.declaration_attribute (fn th => fn opt_context =>
+    opt_context |> map_cs (delrule (SOME opt_context) th) |>
     Thm.attribute_declaration Context_Rules.rule_del th);
 
 
--- a/src/Provers/hypsubst.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Provers/hypsubst.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -186,10 +186,10 @@
         val (instT, _) = Thm.match (apply2 (Thm.cterm_of ctxt o Logic.mk_type) (V, U));
       in
         compose_tac ctxt (true, Drule.instantiate_normalize (instT,
-          map (apply2 (Thm.cterm_of ctxt))
-            [(Var (ixn, Ts ---> U --> body_type T), u),
-             (Var (fst (dest_Var (head_of v1)), Ts ---> U), fold_rev Term.abs ps t),
-             (Var (fst (dest_Var (head_of v2)), Ts ---> U), fold_rev Term.abs ps t')]) rl',
+          map (apsnd (Thm.cterm_of ctxt))
+            [((ixn, Ts ---> U --> body_type T), u),
+             ((fst (dest_Var (head_of v1)), Ts ---> U), fold_rev Term.abs ps t),
+             ((fst (dest_Var (head_of v2)), Ts ---> U), fold_rev Term.abs ps t')]) rl',
           Thm.nprems_of rl) i
       end
   | NONE => no_tac);
--- a/src/Pure/Isar/element.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/Isar/element.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -201,7 +201,7 @@
     SOME C =>
       let
         val thesis = Var ((Auto_Bind.thesisN, Thm.maxidx_of th + 1), fastype_of C);
-        val th' = Thm.instantiate ([], [apply2 (Thm.cterm_of ctxt) (C, thesis)]) th;
+        val th' = Thm.instantiate ([], [(Term.dest_Var C, Thm.cterm_of ctxt thesis)]) th;
       in (th', true) end
   | NONE => (th, false));
 
@@ -340,7 +340,7 @@
 fun instantiate_tfrees thy subst th =
   let
     val idx = Thm.maxidx_of th + 1;
-    fun cert_inst (a, (S, T)) = apply2 (Thm.global_ctyp_of thy) (TVar ((a, idx), S), T);
+    fun cert_inst (a, (S, T)) = (((a, idx), S), Thm.global_ctyp_of thy T);
 
     fun add_inst (a, S) insts =
       if AList.defined (op =) insts a then insts
--- a/src/Pure/Isar/generic_target.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/Isar/generic_target.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -271,12 +271,13 @@
       |>> map Logic.dest_type;
 
     val instT = map_filter (fn (TVar v, T) => SOME (v, T) | _ => NONE) (tvars ~~ tfrees);
-    val inst = filter (is_Var o fst) (vars ~~ frees);
-    val cinstT = instT
-      |> map (apply2 (Thm.ctyp_of ctxt) o apfst TVar);
-    val cinst = inst
-      |> map (apply2 (Thm.cterm_of ctxt o Term.map_types (Term_Subst.instantiateT instT)));
-    val result' = Thm.instantiate (cinstT, cinst) result;
+    val inst =
+      map_filter
+        (fn (Var (xi, T), t) =>
+          SOME ((xi, Term_Subst.instantiateT instT T),
+            Thm.cterm_of ctxt (Term.map_types (Term_Subst.instantiateT instT) t))
+        | _ => NONE) (vars ~~ frees);
+    val result' = Thm.instantiate (map (apsnd (Thm.ctyp_of ctxt)) instT, inst) result;
 
     (*import assumes/defines*)
     val result'' =
--- a/src/Pure/Isar/obtain.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/Isar/obtain.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -334,7 +334,7 @@
 
     val instT =
       fold (Term.add_tvarsT o #2) params []
-      |> map (TVar #> (fn T => apply2 (Thm.ctyp_of ctxt) (T, norm_type T)));
+      |> map (fn v => (v, Thm.ctyp_of ctxt (norm_type (TVar v))));
     val closed_rule = rule
       |> Thm.forall_intr (Thm.cterm_of ctxt (Free thesis_var))
       |> Thm.instantiate (instT, []);
--- a/src/Pure/Isar/subgoal.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/Isar/subgoal.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -12,8 +12,9 @@
 
 signature SUBGOAL =
 sig
-  type focus = {context: Proof.context, params: (string * cterm) list, prems: thm list,
-    asms: cterm list, concl: cterm, schematics: (ctyp * ctyp) list * (cterm * cterm) list}
+  type focus =
+   {context: Proof.context, params: (string * cterm) list, prems: thm list, asms: cterm list,
+    concl: cterm, schematics: ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list}
   val focus_params: Proof.context -> int -> thm -> focus * thm
   val focus_params_fixed: Proof.context -> int -> thm -> focus * thm
   val focus_prems: Proof.context -> int -> thm -> focus * thm
@@ -36,8 +37,9 @@
 
 (* focus *)
 
-type focus = {context: Proof.context, params: (string * cterm) list, prems: thm list,
-  asms: cterm list, concl: cterm, schematics: (ctyp * ctyp) list * (cterm * cterm) list};
+type focus =
+ {context: Proof.context, params: (string * cterm) list, prems: thm list, asms: cterm list,
+  concl: cterm, schematics: ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list};
 
 fun gen_focus (do_prems, do_concl) ctxt i raw_st =
   let
@@ -100,7 +102,7 @@
     val (inst1, inst2) =
       split_list (map (apply2 (apply2 (Thm.cterm_of ctxt))) (map2 var_inst vars ys));
 
-    val th'' = Thm.instantiate ([], inst1) th';
+    val th'' = Thm.instantiate ([], map (apfst (Term.dest_Var o Thm.term_of)) inst1) th';
   in ((inst2, th''), ctxt'') end;
 
 (*
--- a/src/Pure/Proof/proof_checker.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/Proof/proof_checker.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -78,8 +78,8 @@
           let
             val tvars = Term.add_tvars (Thm.full_prop_of thm) [] |> rev;
             val (fmap, thm') = Thm.varifyT_global' [] thm;
-            val ctye = map (apply2 (Thm.global_ctyp_of thy))
-              (map TVar tvars @ map (fn ((_, S), ixn) => TVar (ixn, S)) fmap ~~ Ts)
+            val ctye =
+              (tvars @ map (fn ((_, S), ixn) => (ixn, S)) fmap ~~ map (Thm.global_ctyp_of thy) Ts);
           in
             Thm.instantiate (ctye, []) (forall_intr_vars (Thm.forall_intr_frees thm'))
           end;
@@ -118,7 +118,7 @@
                 handle THM (s, _, _) => appl_error thy pretty_term vs Hs s prf t
               end
 
-          | thm_of (vs, names) Hs (AbsP (s, SOME t, prf)) =
+          | thm_of (vs, names) Hs (AbsP (_, SOME t, prf)) =
               let
                 val ct = Thm.global_cterm_of thy (Term.subst_bounds (map Free vs, t));
                 val thm = thm_of (vs, names) (Thm.assume ct :: Hs) prf;
--- a/src/Pure/Tools/rule_insts.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/Tools/rule_insts.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -149,8 +149,8 @@
   in
     thm
     |> Drule.instantiate_normalize
-      (map (apply2 (Thm.ctyp_of ctxt') o apfst TVar) inst_tvars,
-       map (apply2 (Thm.cterm_of ctxt') o apfst Var) inst_vars)
+      (map (apsnd (Thm.ctyp_of ctxt')) inst_tvars,
+       map (apsnd (Thm.cterm_of ctxt')) inst_vars)
     |> singleton (Variable.export ctxt' ctxt)
     |> Rule_Cases.save thm
   end;
@@ -240,13 +240,13 @@
 
     val inc = Thm.maxidx_of st + 1;
     val lift_type = Logic.incr_tvar inc;
-    fun lift_var ((a, j), T) = Var ((a, j + inc), paramTs ---> lift_type T);
+    fun lift_var ((a, j), T) = ((a, j + inc), paramTs ---> lift_type T);
     fun lift_term t = fold_rev Term.absfree params (Logic.incr_indexes (fixed, paramTs, inc) t);
 
     val inst_tvars' = inst_tvars
-      |> map (apply2 (Thm.ctyp_of inst_ctxt o lift_type) o apfst TVar);
+      |> map (fn (((a, i), S), T) => (((a, i + inc), S), Thm.ctyp_of inst_ctxt (lift_type T)));
     val inst_vars' = inst_vars
-      |> map (fn (v, t) => apply2 (Thm.cterm_of inst_ctxt) (lift_var v, lift_term t));
+      |> map (fn (v, t) => (lift_var v, Thm.cterm_of inst_ctxt (lift_term t)));
 
     val thm' = Thm.lift_rule cgoal thm
       |> Drule.instantiate_normalize (inst_tvars', inst_vars')
@@ -262,10 +262,11 @@
 fun make_elim_preserve ctxt rl =
   let
     val maxidx = Thm.maxidx_of rl;
+    fun var x = ((x, 0), propT);
     fun cvar xi = Thm.cterm_of ctxt (Var (xi, propT));
     val revcut_rl' =
-      Drule.instantiate_normalize ([], [(cvar ("V", 0), cvar ("V", maxidx + 1)),
-        (cvar ("W", 0), cvar ("W", maxidx + 1))]) Drule.revcut_rl;
+      Drule.instantiate_normalize ([], [(var "V", cvar ("V", maxidx + 1)),
+        (var "W", cvar ("W", maxidx + 1))]) Drule.revcut_rl;
   in
     (case Seq.list_of
       (Thm.bicompose (SOME ctxt) {flatten = true, match = false, incremented = false}
--- a/src/Pure/conjunction.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/conjunction.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -69,8 +69,8 @@
 
 local
 
-val A = read_prop "A" and vA = read_prop "?A";
-val B = read_prop "B" and vB = read_prop "?B";
+val A = read_prop "A" and vA = (("A", 0), propT);
+val B = read_prop "B" and vB = (("B", 0), propT);
 val C = read_prop "C";
 val ABC = read_prop "A ==> B ==> C";
 val A_B = read_prop "A &&& B";
--- a/src/Pure/display.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/display.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -21,10 +21,8 @@
   val pretty_thm: Proof.context -> thm -> Pretty.T
   val pretty_thm_item: Proof.context -> thm -> Pretty.T
   val pretty_thm_global: theory -> thm -> Pretty.T
-  val pretty_thm_without_context: thm -> Pretty.T
   val string_of_thm: Proof.context -> thm -> string
   val string_of_thm_global: theory -> thm -> string
-  val string_of_thm_without_context: thm -> string
   val pretty_full_theory: bool -> theory -> Pretty.T list
 end;
 
@@ -82,11 +80,8 @@
 fun pretty_thm_global thy =
   pretty_thm_raw (Syntax.init_pretty_global thy) {quote = false, show_hyps = false};
 
-fun pretty_thm_without_context th = pretty_thm_global (Thm.theory_of_thm th) th;
-
 val string_of_thm = Pretty.string_of oo pretty_thm;
 val string_of_thm_global = Pretty.string_of oo pretty_thm_global;
-val string_of_thm_without_context = Pretty.string_of o pretty_thm_without_context;
 
 
 
--- a/src/Pure/drule.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/drule.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -20,7 +20,8 @@
   val lift_all: Proof.context -> cterm -> thm -> thm
   val implies_elim_list: thm -> thm list -> thm
   val implies_intr_list: cterm list -> thm -> thm
-  val instantiate_normalize: (ctyp * ctyp) list * (cterm * cterm) list -> thm -> thm
+  val instantiate_normalize: ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list ->
+    thm -> thm
   val zero_var_indexes_list: thm list -> thm list
   val zero_var_indexes: thm -> thm
   val implies_intr_hyps: thm -> thm
@@ -613,11 +614,9 @@
 
 fun mk_term ct =
   let
-    val thy = Thm.theory_of_cterm ct;
-    val T = Thm.typ_of_cterm ct;
-    val instT = apply2 (Thm.global_ctyp_of thy) (TVar (("'a", 0), []), T);
-    val x = Thm.global_cterm_of thy (Var (("x", 0), T));
-  in Thm.instantiate ([instT], [(x, ct)]) termI end;
+    val cT = Thm.ctyp_of_cterm ct;
+    val T = Thm.typ_of cT;
+  in Thm.instantiate ([((("'a", 0), []), cT)], [((("x", 0), T), ct)]) termI end;
 
 fun dest_term th =
   let val cprop = strip_imp_concl (Thm.cprop_of th) in
@@ -767,9 +766,9 @@
         val (thy, tye, _) = fold_rev add_types ctpairs (Thm.theory_of_thm th, Vartab.empty, 0);
         val instT =
           Vartab.fold (fn (xi, (S, T)) =>
-            cons (apply2 (Thm.global_ctyp_of thy) (TVar (xi, S), Envir.norm_type tye T))) tye [];
+            cons ((xi, S), Thm.global_ctyp_of thy (Envir.norm_type tye T))) tye [];
         val inst = map (apply2 (Thm.instantiate_cterm (instT, []))) ctpairs;
-      in instantiate_normalize (instT, inst) th end
+      in instantiate_normalize (instT, map (apfst (Thm.term_of #> dest_Var)) inst) th end
       handle TERM (msg, _) => raise THM (msg, 0, [th])
         | TYPE (msg, _, _) => raise THM (msg, 0, [th]);
 end;
@@ -784,27 +783,18 @@
         map_filter (Option.map Thm.typ_of) cTs,
         map_filter (Option.map Thm.term_of) cts);
 
-    fun inst_of (v, ct) =
-      (Thm.global_cterm_of (Thm.theory_of_cterm ct) (Var v), ct)
-        handle TYPE (msg, _, _) => err msg;
-
-    fun tyinst_of (v, cT) =
-      (Thm.global_ctyp_of (Thm.theory_of_ctyp cT) (TVar v), cT)
-        handle TYPE (msg, _, _) => err msg;
-
     fun zip_vars xs ys =
       zip_options xs ys handle ListPair.UnequalLengths =>
         err "more instantiations than variables in thm";
 
-    (*instantiate types first!*)
     val thm' =
       if forall is_none cTs then thm
-      else Thm.instantiate
-        (map tyinst_of (zip_vars (rev (Thm.fold_terms Term.add_tvars thm [])) cTs), []) thm;
+      else
+        Thm.instantiate ((zip_vars (rev (Thm.fold_terms Term.add_tvars thm [])) cTs), []) thm;
     val thm'' =
       if forall is_none cts then thm'
-      else Thm.instantiate
-        ([], map inst_of (zip_vars (rev (Thm.fold_terms Term.add_vars thm' [])) cts)) thm';
+      else
+        Thm.instantiate ([], zip_vars (rev (Thm.fold_terms Term.add_vars thm' [])) cts) thm';
     in thm'' end;
 
 
--- a/src/Pure/goal.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/goal.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -60,9 +60,7 @@
   -------- (init)
   C ==> #C
 *)
-val init =
-  let val A = #1 (Thm.dest_implies (Thm.cprop_of Drule.protectI))
-  in fn C => Thm.instantiate ([], [(A, C)]) Drule.protectI end;
+fun init C = Thm.instantiate ([], [((("A", 0), propT), C)]) Drule.protectI;
 
 (*
   A1 ==> ... ==> An ==> C
@@ -134,8 +132,7 @@
     val fixes = map (Thm.cterm_of ctxt) xs;
 
     val tfrees = fold Term.add_tfrees (prop :: As) [];
-    val instT =
-      map (fn (a, S) => apply2 (Thm.ctyp_of ctxt) (TVar ((a, 0), S), TFree (a, S))) tfrees;
+    val instT = map (fn (a, S) => (((a, 0), S), Thm.ctyp_of ctxt (TFree (a, S)))) tfrees;
 
     val global_prop =
       Logic.varify_types_global (fold_rev Logic.all xs (Logic.list_implies (As, prop)))
--- a/src/Pure/more_thm.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/more_thm.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -62,12 +62,12 @@
   val forall_elim_vars: int -> thm -> thm
   val global_certify_inst: theory ->
     ((indexname * sort) * typ) list * ((indexname * typ) * term) list ->
-    (ctyp * ctyp) list * (cterm * cterm) list
+    ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list
   val global_certify_instantiate: theory ->
     ((indexname * sort) * typ) list * ((indexname * typ) * term) list -> thm -> thm
   val certify_inst: Proof.context ->
     ((indexname * sort) * typ) list * ((indexname * typ) * term) list ->
-    (ctyp * ctyp) list * (cterm * cterm) list
+    ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list
   val certify_instantiate: Proof.context ->
     ((indexname * sort) * typ) list * ((indexname * typ) * term) list -> thm -> thm
   val forall_intr_frees: thm -> thm
@@ -357,15 +357,15 @@
 (* certify_instantiate *)
 
 fun global_certify_inst thy (instT, inst) =
- (map (fn (v, T) => apply2 (Thm.global_ctyp_of thy) (TVar v, T)) instT,
-  map (fn (v, t) => apply2 (Thm.global_cterm_of thy) (Var v, t)) inst);
+ (map (apsnd (Thm.global_ctyp_of thy)) instT,
+  map (apsnd (Thm.global_cterm_of thy)) inst);
 
 fun global_certify_instantiate thy insts th =
   Thm.instantiate (global_certify_inst thy insts) th;
 
 fun certify_inst ctxt (instT, inst) =
- (map (fn (v, T) => apply2 (Thm.ctyp_of ctxt) (TVar v, T)) instT,
-  map (fn (v, t) => apply2 (Thm.cterm_of ctxt) (Var v, t)) inst);
+ (map (apsnd (Thm.ctyp_of ctxt)) instT,
+  map (apsnd (Thm.cterm_of ctxt)) inst);
 
 fun certify_instantiate ctxt insts th =
   Thm.instantiate (certify_inst ctxt insts) th;
@@ -446,10 +446,12 @@
 
 fun stripped_sorts thy t =
   let
-    val tfrees = rev (map TFree (Term.add_tfrees t []));
-    val tfrees' = map (fn a => TFree (a, [])) (Name.invent Name.context Name.aT (length tfrees));
-    val strip = tfrees ~~ tfrees';
-    val recover = map (apply2 (Thm.global_ctyp_of thy o Logic.varifyT_global) o swap) strip;
+    val tfrees = rev (Term.add_tfrees t []);
+    val tfrees' = map (fn a => (a, [])) (Name.invent Name.context Name.aT (length tfrees));
+    val recover =
+      map2 (fn (a', S') => fn (a, S) => (((a', 0), S'), Thm.global_ctyp_of thy (TVar ((a, 0), S))))
+        tfrees' tfrees;
+    val strip = map (apply2 TFree) (tfrees ~~ tfrees');
     val t' = Term.map_types (Term.map_atyps (perhaps (AList.lookup (op =) strip))) t;
   in (strip, recover, t') end;
 
--- a/src/Pure/raw_simplifier.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/raw_simplifier.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -1044,8 +1044,9 @@
             then NONE else SOME thm2'))
   end;
 
-val (cA, (cB, cC)) =
-  apsnd Thm.dest_equals (Thm.dest_implies (hd (cprems_of Drule.imp_cong)));
+val vA = (("A", 0), propT);
+val vB = (("B", 0), propT);
+val vC = (("C", 0), propT);
 
 fun transitive1 NONE NONE = NONE
   | transitive1 (SOME thm1) NONE = SOME thm1
@@ -1177,7 +1178,7 @@
         val (lhs, rhs) = Thm.dest_equals (Thm.cprop_of eq);
         val eq' =
           Thm.implies_elim
-            (Thm.instantiate ([], [(cA, prem), (cB, lhs), (cC, rhs)]) Drule.imp_cong)
+            (Thm.instantiate ([], [(vA, prem), (vB, lhs), (vC, rhs)]) Drule.imp_cong)
             (Thm.implies_intr prem eq);
       in
         if not r then eq'
@@ -1188,9 +1189,9 @@
           in
             Thm.transitive
               (Thm.transitive
-                (Thm.instantiate ([], [(cA, prem'), (cB, prem), (cC, concl)]) Drule.swap_prems_eq)
+                (Thm.instantiate ([], [(vA, prem'), (vB, prem), (vC, concl)]) Drule.swap_prems_eq)
                 eq')
-              (Thm.instantiate ([], [(cA, prem), (cB, prem''), (cC, concl)]) Drule.swap_prems_eq)
+              (Thm.instantiate ([], [(vA, prem), (vB, prem''), (vC, concl)]) Drule.swap_prems_eq)
           end
       end
 
--- a/src/Pure/thm.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/thm.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -47,8 +47,10 @@
   val lambda: cterm -> cterm -> cterm
   val adjust_maxidx_cterm: int -> cterm -> cterm
   val incr_indexes_cterm: int -> cterm -> cterm
-  val match: cterm * cterm -> (ctyp * ctyp) list * (cterm * cterm) list
-  val first_order_match: cterm * cterm -> (ctyp * ctyp) list * (cterm * cterm) list
+  val match: cterm * cterm ->
+    ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list
+  val first_order_match: cterm * cterm ->
+    ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list
   (*theorems*)
   val rep_thm: thm ->
    {thy: theory,
@@ -70,6 +72,7 @@
   val terms_of_tpairs: (term * term) list -> term list
   val full_prop_of: thm -> term
   val theory_of_thm: thm -> theory
+  val theory_name_of_thm: thm -> string
   val maxidx_of: thm -> int
   val maxidx_thm: thm -> int -> int
   val hyps_of: thm -> term list
@@ -119,8 +122,10 @@
   val equal_elim: thm -> thm -> thm
   val flexflex_rule: Proof.context option -> thm -> thm Seq.seq
   val generalize: string list * string list -> int -> thm -> thm
-  val instantiate: (ctyp * ctyp) list * (cterm * cterm) list -> thm -> thm
-  val instantiate_cterm: (ctyp * ctyp) list * (cterm * cterm) list -> cterm -> cterm
+  val instantiate: ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list ->
+    thm -> thm
+  val instantiate_cterm: ((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list ->
+    cterm -> cterm
   val trivial: cterm -> thm
   val of_class: ctyp * class -> thm
   val strip_shyps: thm -> thm
@@ -311,12 +316,10 @@
     val (Tinsts, tinsts) = match thy (t1, t2) (Vartab.empty, Vartab.empty);
     val sorts = Sorts.union sorts1 sorts2;
     fun mk_cTinst ((a, i), (S, T)) =
-      (Ctyp {T = TVar ((a, i), S), thy = thy, maxidx = i, sorts = sorts},
-       Ctyp {T = T, thy = thy, maxidx = maxidx2, sorts = sorts});
-    fun mk_ctinst ((x, i), (T, t)) =
-      let val T = Envir.subst_type Tinsts T in
-        (Cterm {t = Var ((x, i), T), T = T, thy = thy, maxidx = i, sorts = sorts},
-         Cterm {t = t, T = T, thy = thy, maxidx = maxidx2, sorts = sorts})
+      (((a, i), S), Ctyp {T = T, thy = thy, maxidx = maxidx2, sorts = sorts});
+    fun mk_ctinst ((x, i), (U, t)) =
+      let val T = Envir.subst_type Tinsts U in
+        (((x, i), T), Cterm {t = t, T = T, thy = thy, maxidx = maxidx2, sorts = sorts})
       end;
   in (Vartab.fold (cons o mk_cTinst) Tinsts [], Vartab.fold (cons o mk_ctinst) tinsts []) end;
 
@@ -391,6 +394,7 @@
 (* basic components *)
 
 val theory_of_thm = #thy o rep_thm;
+val theory_name_of_thm = Context.theory_name o #thy o rep_thm;
 val maxidx_of = #maxidx o rep_thm;
 fun maxidx_thm th i = Int.max (maxidx_of th, i);
 val hyps_of = #hyps o rep_thm;
@@ -1089,37 +1093,28 @@
 fun pretty_typing thy t T = Pretty.block
   [Syntax.pretty_term_global thy t, Pretty.str " ::", Pretty.brk 1, Syntax.pretty_typ_global thy T];
 
-fun add_inst (ct, cu) (thy, sorts) =
+fun add_inst (v as (_, T), cu) (thy, sorts) =
   let
-    val Cterm {t = t, T = T, ...} = ct;
-    val Cterm {t = u, T = U, sorts = sorts_u, maxidx = maxidx_u, ...} = cu;
-    val thy' = Theory.merge (thy, merge_thys0 ct cu);
+    val Cterm {t = u, T = U, thy = thy2, sorts = sorts_u, maxidx = maxidx_u, ...} = cu;
+    val thy' = Theory.merge (thy, thy2);
     val sorts' = Sorts.union sorts_u sorts;
   in
-    (case t of Var v =>
-      if T = U then ((v, (u, maxidx_u)), (thy', sorts'))
-      else raise TYPE (Pretty.string_of (Pretty.block
+    if T = U then ((v, (u, maxidx_u)), (thy', sorts'))
+    else
+      raise TYPE (Pretty.string_of (Pretty.block
        [Pretty.str "instantiate: type conflict",
-        Pretty.fbrk, pretty_typing thy' t T,
-        Pretty.fbrk, pretty_typing thy' u U]), [T, U], [t, u])
-    | _ => raise TYPE (Pretty.string_of (Pretty.block
-       [Pretty.str "instantiate: not a variable",
-        Pretty.fbrk, Syntax.pretty_term_global thy' t]), [], [t]))
+        Pretty.fbrk, pretty_typing thy' (Var v) T,
+        Pretty.fbrk, pretty_typing thy' u U]), [T, U], [Var v, u])
   end;
 
-fun add_instT (cT, cU) (thy, sorts) =
+fun add_instT (v as (_, S), cU) (thy, sorts) =
   let
-    val Ctyp {T, thy = thy1, ...} = cT
-    and Ctyp {T = U, thy = thy2, sorts = sorts_U, maxidx = maxidx_U, ...} = cU;
-    val thy' = Theory.merge (thy, Theory.merge (thy1, thy2));
+    val Ctyp {T = U, thy = thy2, sorts = sorts_U, maxidx = maxidx_U, ...} = cU;
+    val thy' = Theory.merge (thy, thy2);
     val sorts' = Sorts.union sorts_U sorts;
   in
-    (case T of TVar (v as (_, S)) =>
-      if Sign.of_sort thy' (U, S) then ((v, (U, maxidx_U)), (thy', sorts'))
-      else raise TYPE ("Type not of sort " ^ Syntax.string_of_sort_global thy' S, [U], [])
-    | _ => raise TYPE (Pretty.string_of (Pretty.block
-        [Pretty.str "instantiate: not a type variable",
-         Pretty.fbrk, Syntax.pretty_typ_global thy' T]), [T], []))
+    if Sign.of_sort thy' (U, S) then ((v, (U, maxidx_U)), (thy', sorts'))
+    else raise TYPE ("Type not of sort " ^ Syntax.string_of_sort_global thy' S, [U], [])
   end;
 
 in
--- a/src/Pure/variable.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Pure/variable.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -71,10 +71,11 @@
     (((indexname * sort) * typ) list * ((indexname * typ) * term) list) * Proof.context
   val importT_terms: term list -> Proof.context -> term list * Proof.context
   val import_terms: bool -> term list -> Proof.context -> term list * Proof.context
-  val importT: thm list -> Proof.context -> ((ctyp * ctyp) list * thm list) * Proof.context
+  val importT: thm list -> Proof.context ->
+    (((indexname * sort) * ctyp) list * thm list) * Proof.context
   val import_prf: bool -> Proofterm.proof -> Proof.context -> Proofterm.proof * Proof.context
   val import: bool -> thm list -> Proof.context ->
-    (((ctyp * ctyp) list * (cterm * cterm) list) * thm list) * Proof.context
+    ((((indexname * sort) * ctyp) list * ((indexname * typ) * cterm) list) * thm list) * Proof.context
   val tradeT: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list
   val trade: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list
   val focus_params: term -> Proof.context -> (string list * (string * typ) list) * Proof.context
--- a/src/Tools/IsaPlanner/rw_inst.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Tools/IsaPlanner/rw_inst.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -174,7 +174,7 @@
     val typinsts = cross_inst_typs (nonfixed_typinsts @ fixtyinsts);
 
     (* certified instantiations for types *)
-    val ctyp_insts = map (fn (ix, (s, ty)) => apply2 (Thm.ctyp_of ctxt) (TVar (ix, s), ty)) typinsts;
+    val ctyp_insts = map (fn (ix, (s, ty)) => ((ix, s), Thm.ctyp_of ctxt ty)) typinsts;
 
     (* type instantiated versions *)
     val tgt_th_tyinst = Thm.instantiate (ctyp_insts,[]) target_thm;
@@ -198,7 +198,7 @@
 
     (* create certms of instantiations *)
     val cinsts_tyinst =
-      map (fn (ix, (ty, t)) => apply2 (Thm.cterm_of ctxt) (Var (ix, ty), t)) insts_tyinst_inst;
+      map (fn (ix, (ty, t)) => ((ix, ty), Thm.cterm_of ctxt t)) insts_tyinst_inst;
 
     (* The instantiated rule *)
     val rule_inst = rule_tyinst |> Thm.instantiate ([], cinsts_tyinst);
@@ -217,7 +217,7 @@
     val couter_inst = Thm.reflexive (Thm.cterm_of ctxt outerterm_inst);
     val (cprems, abstract_rule_inst) =
       rule_inst
-      |> Thm.instantiate ([], cterm_renamings)
+      |> Thm.instantiate ([], map (apfst (dest_Var o Thm.term_of)) cterm_renamings)
       |> mk_abstractedrule ctxt FakeTs_tyinst Ts_tyinst;
     val specific_tgt_rule =
       Conv.fconv_rule Drule.beta_eta_conversion
@@ -227,7 +227,7 @@
     val tgt_th_inst =
       tgt_th_tyinst
       |> Thm.instantiate ([], cinsts_tyinst)
-      |> Thm.instantiate ([], cterm_renamings);
+      |> Thm.instantiate ([], map (apfst (dest_Var o Thm.term_of)) cterm_renamings);
 
     val (vars,frees_of_fixed_vars) = Library.split_list cterm_renamings;
   in
--- a/src/Tools/coherent.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Tools/coherent.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -179,10 +179,10 @@
     val th' =
       Drule.implies_elim_list
         (Thm.instantiate
-           (map (fn (ixn, (S, T)) => apply2 (Thm.ctyp_of ctxt) (TVar (ixn, S), T)) (Vartab.dest tye),
+           (map (fn (ixn, (S, T)) => ((ixn, S), Thm.ctyp_of ctxt T)) (Vartab.dest tye),
             map (fn (ixn, (T, t)) =>
-              apply2 (Thm.cterm_of ctxt) (Var (ixn, Envir.subst_type tye T), t)) (Vartab.dest env) @
-            map (fn (ixnT, t) => apply2 (Thm.cterm_of ctxt) (Var ixnT, t)) insts) th)
+              ((ixn, Envir.subst_type tye T), Thm.cterm_of ctxt t)) (Vartab.dest env) @
+            map (fn (ixnT, t) => (ixnT, Thm.cterm_of ctxt t)) insts) th)
         (map (nth asms) is);
     val (_, cases) = dest_elim (Thm.prop_of th');
   in
--- a/src/Tools/induct.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Tools/induct.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -573,8 +573,8 @@
     val pairs = Vartab.dest (Envir.term_env env);
     val types = Vartab.dest (Envir.type_env env);
     val ts = map (Thm.cterm_of ctxt o Envir.norm_term env o #2 o #2) pairs;
-    val xs = map2 (curry (Thm.cterm_of ctxt o Var)) (map #1 pairs) (map Thm.typ_of_cterm ts);
-  in (map (fn (xi, (S, T)) => apply2 (Thm.ctyp_of ctxt) (TVar (xi, S), T)) types, xs ~~ ts) end;
+    val xs = map #1 pairs ~~ map Thm.typ_of_cterm ts;
+  in (map (fn (ai, (S, T)) => ((ai, S), Thm.ctyp_of ctxt T)) types, xs ~~ ts) end;
 
 in
 
--- a/src/Tools/misc_legacy.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Tools/misc_legacy.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -173,9 +173,9 @@
           then ((v, T), true, free_of "METAHYP2_" (v, T))
           else ((v, T), false, free_of "METAHYP2_" (v, map #2 params ---> T))
       (*Instantiate subgoal vars by Free applied to params*)
-      fun mk_ctpair (v, in_concl, u) =
-          if in_concl then apply2 (Thm.cterm_of ctxt) (Var v, u)
-          else apply2 (Thm.cterm_of ctxt) (Var v, list_comb (u, fparams))
+      fun mk_inst (v, in_concl, u) =
+          if in_concl then (v, Thm.cterm_of ctxt u)
+          else (v, Thm.cterm_of ctxt (list_comb (u, fparams)))
       (*Restore Vars with higher type and index*)
       fun mk_subgoal_swap_ctpair (((a, i), T), in_concl, u as Free (_, U)) =
           if in_concl then apply2 (Thm.cterm_of ctxt) (u, Var ((a, i), T))
@@ -191,7 +191,7 @@
               fold Term.add_vars (Logic.strip_imp_prems prop) []
             and concl_vars = Term.add_vars (Logic.strip_imp_concl prop) []
             val subgoal_insts = map (mk_subgoal_inst concl_vars) subgoal_vars
-            val st' = Thm.instantiate ([], map mk_ctpair subgoal_insts) st
+            val st' = Thm.instantiate ([], map mk_inst subgoal_insts) st
             val emBs = map (Thm.cterm_of ctxt o embed) (Thm.prems_of st')
             val Cth  = implies_elim_list st' (map (elim o Thm.assume) emBs)
         in  (*restore the unknowns to the hypotheses*)
@@ -269,7 +269,7 @@
              fun thaw i th' = (*i is non-negative increment for Var indexes*)
                  th' |> forall_intr_list (map #2 insts)
                      |> forall_elim_list (map (Thm.incr_indexes_cterm i o #1) insts)
-         in  (Thm.instantiate ([],insts) fth, thaw)  end
+         in  (Thm.instantiate ([], map (apfst (dest_Var o Thm.term_of)) insts) fth, thaw)  end
  end;
 
 (*Basic version of the function above. No option to rename Vars apart in thaw.
@@ -291,7 +291,7 @@
              fun thaw th' =
                  th' |> forall_intr_list (map #2 insts)
                      |> forall_elim_list (map #1 insts)
-         in  (Thm.instantiate ([],insts) fth, thaw)  end
+         in  (Thm.instantiate ([], map (apfst (dest_Var o Thm.term_of)) insts) fth, thaw)  end
  end;
 
 end;
--- a/src/Tools/nbe.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/Tools/nbe.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -101,11 +101,10 @@
     val vs_tab = map mk_entry (Term.add_tfrees (Thm.term_of ct) []);
     fun instantiate thm =
       let
-        val cert_tvars = map (Thm.ctyp_of ctxt o TVar) (Term.add_tvars
-          ((fst o Logic.dest_equals o Logic.strip_imp_concl o Thm.prop_of) thm) []);
-        val instantiation =
-          map2 (fn cert_tvar => fn (_, (_, (cT, _))) => (cert_tvar, cT)) cert_tvars vs_tab;
-      in Thm.instantiate (instantiation, []) thm end;
+        val tvars =
+          Term.add_tvars (#1 (Logic.dest_equals (Logic.strip_imp_concl (Thm.prop_of thm)))) [];
+        val instT = map2 (fn v => fn (_, (_, (cT, _))) => (v, cT)) tvars vs_tab;
+      in Thm.instantiate (instT, []) thm end;
     fun of_class (TFree (v, _), class) =
           the (AList.lookup (op =) ((snd o snd o the o AList.lookup (op =) vs_tab) v) class)
       | of_class (T, _) = error ("Bad type " ^ Syntax.string_of_typ ctxt T);
--- a/src/ZF/Tools/cartprod.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/ZF/Tools/cartprod.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -109,7 +109,7 @@
       in
         remove_split ctxt
           (Drule.instantiate_normalize ([],
-            [apply2 (Thm.cterm_of ctxt) (Var(v, Ind_Syntax.iT-->T2), newt)]) rl)
+            [((v, Ind_Syntax.iT-->T2), Thm.cterm_of ctxt newt)]) rl)
       end
   | split_rule_var _ (t,T,rl) = rl;
 
--- a/src/ZF/Tools/datatype_package.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/ZF/Tools/datatype_package.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -17,7 +17,7 @@
     recursor_eqns : thm list,          (*equations for the recursor*)
     free_iffs  : thm list,             (*freeness rewrite rules*)
     free_SEs   : thm list,             (*freeness destruct rules*)
-    mk_free    : string -> thm};       (*function to make freeness theorems*)
+    mk_free    : Proof.context -> string -> thm};    (*function to make freeness theorems*)
 
 signature DATATYPE_ARG =
 sig
@@ -343,10 +343,9 @@
 
   (*Typical theorems have the form ~con1=con2, con1=con2==>False,
     con1(x)=con1(y) ==> x=y, con1(x)=con1(y) <-> x=y, etc.  *)
-  fun mk_free s =
+  fun mk_free ctxt s =
     let
-      val thy = Thm.theory_of_thm elim;
-      val ctxt = Proof_Context.init_global thy;
+      val thy = Proof_Context.theory_of ctxt;
     in
       Goal.prove_global thy [] [] (Syntax.read_prop ctxt s)
         (fn {context = ctxt', ...} => EVERY
--- a/src/ZF/Tools/inductive_package.ML	Fri Jul 03 10:17:29 2015 +0200
+++ b/src/ZF/Tools/inductive_package.ML	Mon Jul 06 14:27:03 2015 +0200
@@ -495,7 +495,7 @@
        The name "x.1" comes from the "RS spec" !*)
      val inst =
          case elem_frees of [_] => I
-            | _ => Drule.instantiate_normalize ([], [(Thm.global_cterm_of thy (Var(("x",1), Ind_Syntax.iT)),
+            | _ => Drule.instantiate_normalize ([], [(((("x",1), Ind_Syntax.iT)),
                                       Thm.global_cterm_of thy elem_tuple)]);
 
      (*strip quantifier and the implication*)