merged
authorhaftmann
Mon, 09 Aug 2010 15:40:06 +0200
changeset 38295 36b20361e2a5
parent 38290 581a402a80f0 (diff)
parent 38294 1bef02e7e1b8 (current diff)
child 38299 b1738c40c2a7
merged
--- a/Admin/isatest/settings/cygwin-poly-e	Mon Aug 09 14:47:28 2010 +0200
+++ b/Admin/isatest/settings/cygwin-poly-e	Mon Aug 09 15:40:06 2010 +0200
@@ -24,4 +24,6 @@
 
 ISABELLE_USEDIR_OPTIONS="-M 1 -i false -d false"
 
-init_component "$HOME/contrib_devel/kodkodi"
+# Disable while Jasmin is on vacation
+unset KODKODI
+#init_component "$HOME/contrib_devel/kodkodi"
--- a/doc-src/Nitpick/nitpick.tex	Mon Aug 09 14:47:28 2010 +0200
+++ b/doc-src/Nitpick/nitpick.tex	Mon Aug 09 15:40:06 2010 +0200
@@ -719,9 +719,9 @@
 & \phantom{fun\,~\textit{HOLogic.mk\_number}~T~(}{-}~\textit{snd}~(\textit{HOLogic.dest\_number~t2\/})) \\[-2pt]
 & \phantom{fun}\!{\mid}\,~\textit{my\_int\_postproc}~\_~\_~\_~\_~t = t \\[-2pt]
 {*}\}\end{aligned}$ \\[2\smallskipamount]
-$\textbf{setup}~\,\{{*} \\
+$\textbf{declaration}~\,\{{*} \\
 \!\begin{aligned}[t]
-& \textit{Nitpick\_Model.register\_term\_postprocessor\_global\/}~\!\begin{aligned}[t]
+& \textit{Nitpick\_Model.register\_term\_postprocessor}~\!\begin{aligned}[t]
   & @\{\textrm{typ}~\textit{my\_int}\} \\[-2pt]
   & \textit{my\_int\_postproc}\end{aligned} \\[-2pt]
 {*}\}\end{aligned}$
@@ -1237,11 +1237,13 @@
 
 where \textit{xs} is of type $'a~\textit{list}$ and \textit{ys} is of type
 $'b~\textit{list}$. A priori, Nitpick would need to consider $1\,000$ scopes to
-exhaust the specification \textit{card}~= 1--10. However, our intuition tells us
-that any counterexample found with a small scope would still be a counterexample
-in a larger scope---by simply ignoring the fresh $'a$ and $'b$ values provided
-by the larger scope. Nitpick comes to the same conclusion after a careful
-inspection of the formula and the relevant definitions:
+exhaust the specification \textit{card}~= 1--10 (10 cardinalies for $'a$
+$\times$ 10 cardinalities for $'b$ $\times$ 10 cardinalities for the datatypes).
+However, our intuition tells us that any counterexample found with a small scope
+would still be a counterexample in a larger scope---by simply ignoring the fresh
+$'a$ and $'b$ values provided by the larger scope. Nitpick comes to the same
+conclusion after a careful inspection of the formula and the relevant
+definitions:
 
 \prew
 \textbf{nitpick}~[\textit{verbose}] \\[2\smallskipamount]
@@ -1312,10 +1314,11 @@
 As insinuated in \S\ref{natural-numbers-and-integers} and
 \S\ref{inductive-datatypes}, \textit{nat}, \textit{int}, and inductive datatypes
 are normally monotonic and treated as such. The same is true for record types,
-\textit{rat}, \textit{real}, and some \textbf{typedef}'d types. Thus, given the
+\textit{rat}, and \textit{real}. Thus, given the
 cardinality specification 1--10, a formula involving \textit{nat}, \textit{int},
 \textit{int~list}, \textit{rat}, and \textit{rat~list} will lead Nitpick to
-consider only 10~scopes instead of $10\,000$.
+consider only 10~scopes instead of $10\,000$. On the other hand,
+\textbf{typedef}s and quotient types are generally nonmonotonic.
 
 \subsection{Inductive Properties}
 \label{inductive-properties}
@@ -2787,10 +2790,11 @@
 \textit{Nitpick\_HOL} structure:
 
 \prew
-$\textbf{val}\,~\textit{register\_codatatype\_global\/} : {}$ \\
-$\hbox{}\quad\textit{typ} \rightarrow \textit{string} \rightarrow \textit{styp~list} \rightarrow \textit{theory} \rightarrow \textit{theory}$ \\
-$\textbf{val}\,~\textit{unregister\_codatatype\_global\/} :\,
-\textit{typ} \rightarrow \textit{theory} \rightarrow \textit{theory}$
+$\textbf{val}\,~\textit{register\_codatatype\/} : {}$ \\
+$\hbox{}\quad\textit{morphism} \rightarrow \textit{typ} \rightarrow \textit{string} \rightarrow (\textit{string} \times \textit{typ})\;\textit{list} \rightarrow \textit{Context.generic} {}$ \\
+$\hbox{}\quad{\rightarrow}\; \textit{Context.generic}$ \\
+$\textbf{val}\,~\textit{unregister\_codatatype\/} : {}$ \\
+$\hbox{}\quad\textit{morphism} \rightarrow \textit{typ} \rightarrow \textit{Context.generic} \rightarrow \textit{Context.generic} {}$
 \postw
 
 The type $'a~\textit{llist}$ of lazy lists is already registered; had it
@@ -2798,24 +2802,25 @@
 to your theory file:
 
 \prew
-$\textbf{setup}~\,\{{*}$ \\
-$\hbox{}\qquad\textit{Nitpick\_HOL.register\_codatatype\_global\/}~@\{\antiq{typ}~``\kern1pt'a~\textit{llist\/}\textrm{''}\}$ \\
-$\hbox{}\qquad\qquad @\{\antiq{const\_name}~ \textit{llist\_case}\}$ \\
-$\hbox{}\qquad\qquad (\textit{map}~\textit{dest\_Const}~[@\{\antiq{term}~\textit{LNil}\},\, @\{\antiq{term}~\textit{LCons}\}])$ \\
+$\textbf{declaration}~\,\{{*}$ \\
+$\hbox{}\quad\textit{Nitpick\_HOL.register\_codatatype}~@\{\antiq{typ}~``\kern1pt'a~\textit{llist\/}\textrm{''}\}$ \\
+$\hbox{}\qquad\quad @\{\antiq{const\_name}~ \textit{llist\_case}\}$ \\
+$\hbox{}\qquad\quad (\textit{map}~\textit{dest\_Const}~[@\{\antiq{term}~\textit{LNil}\},\, @\{\antiq{term}~\textit{LCons}\}])$ \\
 ${*}\}$
 \postw
 
-The \textit{register\_codatatype\_global\/} function takes a coinductive type, its
-case function, and the list of its constructors. The case function must take its
-arguments in the order that the constructors are listed. If no case function
-with the correct signature is available, simply pass the empty string.
+The \textit{register\_codatatype} function takes a coinductive datatype, its
+case function, and the list of its constructors (in addition to the current
+morphism and generic proof context). The case function must take its arguments
+in the order that the constructors are listed. If no case function with the
+correct signature is available, simply pass the empty string.
 
 On the other hand, if your goal is to cripple Nitpick, add the following line to
 your theory file and try to check a few conjectures about lazy lists:
 
 \prew
-$\textbf{setup}~\,\{{*}$ \\
-$\hbox{}\qquad\textit{Nitpick\_HOL.unregister\_codatatype\_global\/}~@\{\antiq{typ}~``\kern1pt'a~\textit{llist\/}\textrm{''}\}$ \\
+$\textbf{declaration}~\,\{{*}$ \\
+$\hbox{}\quad\textit{Nitpick\_HOL.unregister\_codatatype}~@\{\antiq{typ}~``\kern1pt'a~\textit{llist\/}\textrm{''}\}$ \\
 ${*}\}$
 \postw
 
@@ -2835,10 +2840,11 @@
 \prew
 $\textbf{type}\,~\textit{term\_postprocessor}\,~{=} {}$ \\
 $\hbox{}\quad\textit{Proof.context} \rightarrow \textit{string} \rightarrow (\textit{typ} \rightarrow \textit{term~list\/}) \rightarrow \textit{typ} \rightarrow \textit{term} \rightarrow \textit{term}$ \\
-$\textbf{val}\,~\textit{register\_term\_postprocessor\_global\/} : {}$ \\
-$\hbox{}\quad\textit{typ} \rightarrow \textit{term\_postprocessor} \rightarrow \textit{theory} \rightarrow \textit{theory}$ \\
-$\textbf{val}\,~\textit{unregister\_term\_postprocessor\_global\/} :\,
-\textit{typ} \rightarrow \textit{theory} \rightarrow \textit{theory}$
+$\textbf{val}\,~\textit{register\_term\_postprocessor} : {}$ \\
+$\hbox{}\quad\textit{typ} \rightarrow \textit{term\_postprocessor} \rightarrow \textit{morphism} \rightarrow \textit{Context.generic}$ \\
+$\hbox{}\quad{\rightarrow}\; \textit{Context.generic}$ \\
+$\textbf{val}\,~\textit{unregister\_term\_postprocessor} : {}$ \\
+$\hbox{}\quad\textit{typ} \rightarrow \textit{morphism} \rightarrow \textit{Context.generic} \rightarrow \textit{Context.generic}$
 \postw
 
 \S\ref{typedefs-quotient-types-records-rationals-and-reals} and
--- a/etc/isar-keywords-ZF.el	Mon Aug 09 14:47:28 2010 +0200
+++ b/etc/isar-keywords-ZF.el	Mon Aug 09 15:40:06 2010 +0200
@@ -8,10 +8,8 @@
   '("\\."
     "\\.\\."
     "Isabelle\\.command"
-    "Isar\\.begin_document"
     "Isar\\.define_command"
     "Isar\\.edit_document"
-    "Isar\\.end_document"
     "ML"
     "ML_command"
     "ML_prf"
@@ -258,10 +256,8 @@
 
 (defconst isar-keywords-control
   '("Isabelle\\.command"
-    "Isar\\.begin_document"
     "Isar\\.define_command"
     "Isar\\.edit_document"
-    "Isar\\.end_document"
     "ProofGeneral\\.inform_file_processed"
     "ProofGeneral\\.inform_file_retracted"
     "ProofGeneral\\.kill_proof"
--- a/etc/isar-keywords.el	Mon Aug 09 14:47:28 2010 +0200
+++ b/etc/isar-keywords.el	Mon Aug 09 15:40:06 2010 +0200
@@ -8,10 +8,8 @@
   '("\\."
     "\\.\\."
     "Isabelle\\.command"
-    "Isar\\.begin_document"
     "Isar\\.define_command"
     "Isar\\.edit_document"
-    "Isar\\.end_document"
     "ML"
     "ML_command"
     "ML_prf"
@@ -322,10 +320,8 @@
 
 (defconst isar-keywords-control
   '("Isabelle\\.command"
-    "Isar\\.begin_document"
     "Isar\\.define_command"
     "Isar\\.edit_document"
-    "Isar\\.end_document"
     "ProofGeneral\\.inform_file_processed"
     "ProofGeneral\\.inform_file_retracted"
     "ProofGeneral\\.kill_proof"
--- a/src/HOL/IsaMakefile	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/IsaMakefile	Mon Aug 09 15:40:06 2010 +0200
@@ -321,9 +321,10 @@
   Tools/Sledgehammer/metis_tactics.ML \
   Tools/Sledgehammer/sledgehammer.ML \
   Tools/Sledgehammer/sledgehammer_fact_filter.ML \
-  Tools/Sledgehammer/sledgehammer_fact_minimizer.ML \
+  Tools/Sledgehammer/sledgehammer_fact_minimize.ML \
   Tools/Sledgehammer/sledgehammer_isar.ML \
   Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML \
+  Tools/Sledgehammer/sledgehammer_translate.ML \
   Tools/Sledgehammer/sledgehammer_util.ML \
   Tools/SMT/cvc3_solver.ML \
   Tools/SMT/smtlib_interface.ML \
--- a/src/HOL/Library/Multiset.thy	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Library/Multiset.thy	Mon Aug 09 15:40:06 2010 +0200
@@ -1723,8 +1723,8 @@
   | multiset_postproc _ _ _ _ t = t
 *}
 
-setup {*
-Nitpick_Model.register_term_postprocessor_global @{typ "'a multiset"}
+declaration {*
+Nitpick_Model.register_term_postprocessor @{typ "'a multiset"}
     multiset_postproc
 *}
 
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -391,7 +391,7 @@
     val params = Sledgehammer_Isar.default_params thy
       [("atps", prover_name), ("minimize_timeout", Int.toString timeout ^ " s")]
     val minimize =
-      Sledgehammer_Fact_Minimizer.minimize_theorems params 1 (subgoal_count st)
+      Sledgehammer_Fact_Minimize.minimize_theorems params 1 (subgoal_count st)
     val _ = log separator
   in
     case minimize st (these (!named_thms)) of
--- a/src/HOL/Nitpick_Examples/Manual_Nits.thy	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Nitpick_Examples/Manual_Nits.thy	Mon Aug 09 15:40:06 2010 +0200
@@ -128,9 +128,8 @@
   | my_int_postproc _ _ _ _ t = t
 *}
 
-setup {*
-Nitpick_Model.register_term_postprocessor_global @{typ my_int}
-                                                 my_int_postproc
+declaration {*
+Nitpick_Model.register_term_postprocessor @{typ my_int} my_int_postproc
 *}
 
 lemma "add x y = add x x"
@@ -213,8 +212,8 @@
 "iterates f a = LCons a (iterates f (f a))"
 sorry
 
-setup {*
-Nitpick_HOL.register_codatatype_global @{typ "'a llist"} ""
+declaration {*
+Nitpick_HOL.register_codatatype @{typ "'a llist"} ""
     (map dest_Const [@{term LNil}, @{term LCons}])
 *}
 
--- a/src/HOL/Rat.thy	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Rat.thy	Mon Aug 09 15:40:06 2010 +0200
@@ -1173,8 +1173,8 @@
 fun rat_of_int i = (i, 1);
 *}
 
-setup {*
-  Nitpick_HOL.register_frac_type_global @{type_name rat}
+declaration {*
+  Nitpick_HOL.register_frac_type @{type_name rat}
    [(@{const_name zero_rat_inst.zero_rat}, @{const_name Nitpick.zero_frac}),
     (@{const_name one_rat_inst.one_rat}, @{const_name Nitpick.one_frac}),
     (@{const_name plus_rat_inst.plus_rat}, @{const_name Nitpick.plus_frac}),
--- a/src/HOL/RealDef.thy	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/RealDef.thy	Mon Aug 09 15:40:06 2010 +0200
@@ -1795,8 +1795,8 @@
 fun real_of_int i = (i, 1);
 *}
 
-setup {*
-  Nitpick_HOL.register_frac_type_global @{type_name real}
+declaration {*
+  Nitpick_HOL.register_frac_type @{type_name real}
    [(@{const_name zero_real_inst.zero_real}, @{const_name Nitpick.zero_frac}),
     (@{const_name one_real_inst.one_real}, @{const_name Nitpick.one_frac}),
     (@{const_name plus_real_inst.plus_real}, @{const_name Nitpick.plus_frac}),
--- a/src/HOL/Sledgehammer.thy	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Sledgehammer.thy	Mon Aug 09 15:40:06 2010 +0200
@@ -20,9 +20,10 @@
   ("Tools/Sledgehammer/metis_tactics.ML")
   ("Tools/Sledgehammer/sledgehammer_util.ML")
   ("Tools/Sledgehammer/sledgehammer_fact_filter.ML")
+  ("Tools/Sledgehammer/sledgehammer_translate.ML")
   ("Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML")
   ("Tools/Sledgehammer/sledgehammer.ML")
-  ("Tools/Sledgehammer/sledgehammer_fact_minimizer.ML")
+  ("Tools/Sledgehammer/sledgehammer_fact_minimize.ML")
   ("Tools/Sledgehammer/sledgehammer_isar.ML")
 begin
 
@@ -100,10 +101,11 @@
 
 use "Tools/Sledgehammer/sledgehammer_util.ML"
 use "Tools/Sledgehammer/sledgehammer_fact_filter.ML"
+use "Tools/Sledgehammer/sledgehammer_translate.ML"
 use "Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML"
 use "Tools/Sledgehammer/sledgehammer.ML"
 setup Sledgehammer.setup
-use "Tools/Sledgehammer/sledgehammer_fact_minimizer.ML"
+use "Tools/Sledgehammer/sledgehammer_fact_minimize.ML"
 use "Tools/Sledgehammer/sledgehammer_isar.ML"
 setup Metis_Tactics.setup
 
--- a/src/HOL/Tools/ATP/atp_systems.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/ATP/atp_systems.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -140,7 +140,7 @@
        "# Cannot determine problem status within resource limit"),
       (OutOfResources, "SZS status: ResourceOut"),
       (OutOfResources, "SZS status ResourceOut")],
-   max_new_relevant_facts_per_iter = 90 (* FIXME *),
+   max_new_relevant_facts_per_iter = 60 (* FIXME *),
    prefers_theory_relevant = false,
    explicit_forall = false}
 val e = ("e", e_config)
--- a/src/HOL/Tools/Nitpick/nitpick_hol.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick_hol.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -144,16 +144,19 @@
   val eta_expand : typ list -> term -> int -> term
   val distinctness_formula : typ -> term list -> term
   val register_frac_type :
-    string -> (string * string) list -> Proof.context -> Proof.context
+    string -> (string * string) list -> morphism -> Context.generic
+    -> Context.generic
   val register_frac_type_global :
     string -> (string * string) list -> theory -> theory
-  val unregister_frac_type : string -> Proof.context -> Proof.context
+  val unregister_frac_type :
+    string -> morphism -> Context.generic -> Context.generic
   val unregister_frac_type_global : string -> theory -> theory
   val register_codatatype :
-    typ -> string -> styp list -> Proof.context -> Proof.context
+    typ -> string -> styp list -> morphism -> Context.generic -> Context.generic
   val register_codatatype_global :
     typ -> string -> styp list -> theory -> theory
-  val unregister_codatatype : typ -> Proof.context -> Proof.context
+  val unregister_codatatype :
+    typ -> morphism -> Context.generic -> Context.generic
   val unregister_codatatype_global : typ -> theory -> theory
   val datatype_constrs : hol_context -> typ -> styp list
   val binarized_and_boxed_datatype_constrs :
@@ -631,11 +634,15 @@
     val {frac_types, codatatypes} = Data.get generic
     val frac_types = AList.update (op =) (frac_s, ersaetze) frac_types
   in Data.put {frac_types = frac_types, codatatypes = codatatypes} generic end
-val register_frac_type = Context.proof_map oo register_frac_type_generic
+(* TODO: Consider morphism. *)
+fun register_frac_type frac_s ersaetze (_ : morphism) =
+  register_frac_type_generic frac_s ersaetze
 val register_frac_type_global = Context.theory_map oo register_frac_type_generic
 
 fun unregister_frac_type_generic frac_s = register_frac_type_generic frac_s []
-val unregister_frac_type = Context.proof_map o unregister_frac_type_generic
+(* TODO: Consider morphism. *)
+fun unregister_frac_type frac_s (_ : morphism) =
+  unregister_frac_type_generic frac_s
 val unregister_frac_type_global =
   Context.theory_map o unregister_frac_type_generic
 
@@ -656,12 +663,16 @@
     val codatatypes = AList.update (op =) (co_s, (case_name, constr_xs))
                                    codatatypes
   in Data.put {frac_types = frac_types, codatatypes = codatatypes} generic end
-val register_codatatype = Context.proof_map ooo register_codatatype_generic
+(* TODO: Consider morphism. *)
+fun register_codatatype co_T case_name constr_xs (_ : morphism) =
+  register_codatatype_generic co_T case_name constr_xs
 val register_codatatype_global =
   Context.theory_map ooo register_codatatype_generic
 
 fun unregister_codatatype_generic co_T = register_codatatype_generic co_T "" []
-val unregister_codatatype = Context.proof_map o unregister_codatatype_generic
+(* TODO: Consider morphism. *)
+fun unregister_codatatype co_T (_ : morphism) =
+  unregister_codatatype_generic co_T
 val unregister_codatatype_global =
   Context.theory_map o unregister_codatatype_generic
 
--- a/src/HOL/Tools/Nitpick/nitpick_model.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Nitpick/nitpick_model.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -25,10 +25,11 @@
   val unknown : string
   val unrep : unit -> string
   val register_term_postprocessor :
-    typ -> term_postprocessor -> Proof.context -> Proof.context
+    typ -> term_postprocessor -> morphism -> Context.generic -> Context.generic
   val register_term_postprocessor_global :
     typ -> term_postprocessor -> theory -> theory
-  val unregister_term_postprocessor : typ -> Proof.context -> Proof.context
+  val unregister_term_postprocessor :
+    typ -> morphism -> Context.generic -> Context.generic
   val unregister_term_postprocessor_global : typ -> theory -> theory
   val tuple_list_for_name :
     nut NameTable.table -> Kodkod.raw_bound list -> nut -> int list list
@@ -158,15 +159,18 @@
               | ord => ord)
            | _ => Term_Ord.fast_term_ord tp
 
-fun register_term_postprocessor_generic T p = Data.map (cons (T, p))
-val register_term_postprocessor =
-  Context.proof_map oo register_term_postprocessor_generic
+fun register_term_postprocessor_generic T postproc =
+  Data.map (cons (T, postproc))
+(* TODO: Consider morphism. *)
+fun register_term_postprocessor T postproc (_ : morphism) =
+  register_term_postprocessor_generic T postproc
 val register_term_postprocessor_global =
   Context.theory_map oo register_term_postprocessor_generic
 
 fun unregister_term_postprocessor_generic T = Data.map (AList.delete (op =) T)
-val unregister_term_postprocessor =
-  Context.proof_map o unregister_term_postprocessor_generic
+(* TODO: Consider morphism. *)
+fun unregister_term_postprocessor T (_ : morphism) =
+  unregister_term_postprocessor_generic T
 val unregister_term_postprocessor_global =
   Context.theory_map o unregister_term_postprocessor_generic
 
--- a/src/HOL/Tools/Sledgehammer/clausifier.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/clausifier.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -55,7 +55,7 @@
         let
           val args = OldTerm.term_frees body
           val Ts = map type_of args
-          val cT = Ts ---> T (* FIXME: use "skolem_type_and_args" *)
+          val cT = Ts ---> T
           (* Forms a lambda-abstraction over the formal parameters *)
           val rhs =
             list_abs_free (map dest_Free args,
@@ -78,11 +78,10 @@
 
 (*Returns the vars of a theorem*)
 fun vars_of_thm th =
-  map (Thm.cterm_of (theory_of_thm th) o Var) (Thm.fold_terms Term.add_vars th []);
+  map (cterm_of (theory_of_thm th) o Var) (Thm.fold_terms Term.add_vars th []);
 
 val fun_cong_all = @{thm expand_fun_eq [THEN iffD1]}
 
-(* ### FIXME: removes only one lambda? *)
 (* Removes the lambdas from an equation of the form "t = (%x. u)".
    (Cf. "extensionalize_term" in "ATP_Systems".) *)
 fun extensionalize_theorem th =
@@ -101,7 +100,7 @@
 val [g_C,f_C] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_C}));
 val [f_S,g_S] = map (cterm_of @{theory}) (OldTerm.term_vars (prop_of @{thm abs_S}));
 
-(*FIXME: requires more use of cterm constructors*)
+(* FIXME: Requires more use of cterm constructors. *)
 fun abstract ct =
   let
       val thy = theory_of_cterm ct
@@ -182,7 +181,7 @@
             TrueI)
 
 (*cterms are used throughout for efficiency*)
-val cTrueprop = Thm.cterm_of @{theory HOL} HOLogic.Trueprop;
+val cTrueprop = cterm_of @{theory HOL} HOLogic.Trueprop;
 
 (*Given an abstraction over n variables, replace the bound variables by free
   ones. Return the body, along with the list of free variables.*)
@@ -198,7 +197,7 @@
    Example: "EX x. x : A & x ~: B ==> sko A B : A & sko A B ~: B"  [.] *)
 fun skolem_theorem_of_def thy rhs0 =
   let
-    val rhs = rhs0 |> Type.legacy_freeze_thaw |> #1 |> Thm.cterm_of thy
+    val rhs = rhs0 |> Type.legacy_freeze_thaw |> #1 |> cterm_of thy
     val rhs' = rhs |> Thm.dest_comb |> snd
     val (ch, frees) = c_variant_abs_multi (rhs', [])
     val (hilbert, cabs) = ch |> Thm.dest_comb |>> term_of
@@ -206,7 +205,7 @@
       case hilbert of
         Const (@{const_name Eps}, Type (@{type_name fun}, [_, T])) => T
       | _ => raise TERM ("skolem_theorem_of_def: expected \"Eps\"", [hilbert])
-    val cex = Thm.cterm_of thy (HOLogic.exists_const T)
+    val cex = cterm_of thy (HOLogic.exists_const T)
     val ex_tm = Thm.capply cTrueprop (Thm.capply cex cabs)
     val conc =
       Drule.list_comb (rhs, frees)
@@ -231,8 +230,8 @@
                     |> Meson.make_nnf ctxt
   in  (th3, ctxt)  end;
 
-(* Skolemize a named theorem, with Skolem functions as additional premises. *)
-fun skolemize_theorem thy th =
+(* Convert a theorem to CNF, with Skolem functions as additional premises. *)
+fun cnf_axiom thy th =
   let
     val ctxt0 = Variable.global_thm_context th
     val (nnfth, ctxt) = to_nnf th ctxt0
@@ -247,8 +246,4 @@
   end
   handle THM _ => []
 
-(* Convert Isabelle theorems into axiom clauses. *)
-(* FIXME: is transfer necessary? *)
-fun cnf_axiom thy = skolemize_theorem thy o Thm.transfer thy
-
 end;
--- a/src/HOL/Tools/Sledgehammer/metis_tactics.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/metis_tactics.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -574,23 +574,21 @@
 
 fun real_literal (_, (c, _)) = not (String.isPrefix class_prefix c);
 
-(* FIXME: use "fold" instead *)
-fun translate _ _ _ thpairs [] = thpairs
-  | translate ctxt mode skolems thpairs ((fol_th, inf) :: infpairs) =
-      let val _ = trace_msg (fn () => "=============================================")
-          val _ = trace_msg (fn () => "METIS THM: " ^ Metis.Thm.toString fol_th)
-          val _ = trace_msg (fn () => "INFERENCE: " ^ Metis.Proof.inferenceToString inf)
-          val th = Meson.flexflex_first_order (step ctxt mode skolems
-                                                    thpairs (fol_th, inf))
-          val _ = trace_msg (fn () => "ISABELLE THM: " ^ Display.string_of_thm ctxt th)
-          val _ = trace_msg (fn () => "=============================================")
-          val n_metis_lits =
-            length (filter real_literal (Metis.LiteralSet.toList (Metis.Thm.clause fol_th)))
-      in
-          if nprems_of th = n_metis_lits then ()
-          else raise METIS ("translate", "Proof reconstruction has gone wrong.");
-          translate ctxt mode skolems ((fol_th, th) :: thpairs) infpairs
-      end;
+fun translate_one ctxt mode skolems (fol_th, inf) thpairs =
+  let
+    val _ = trace_msg (fn () => "=============================================")
+    val _ = trace_msg (fn () => "METIS THM: " ^ Metis.Thm.toString fol_th)
+    val _ = trace_msg (fn () => "INFERENCE: " ^ Metis.Proof.inferenceToString inf)
+    val th = Meson.flexflex_first_order (step ctxt mode skolems
+                                              thpairs (fol_th, inf))
+    val _ = trace_msg (fn () => "ISABELLE THM: " ^ Display.string_of_thm ctxt th)
+    val _ = trace_msg (fn () => "=============================================")
+    val n_metis_lits =
+      length (filter real_literal (Metis.LiteralSet.toList (Metis.Thm.clause fol_th)))
+    val _ =
+      if nprems_of th = n_metis_lits then ()
+      else raise METIS ("translate", "Proof reconstruction has gone wrong.")
+  in (fol_th, th) :: thpairs end
 
 (*Determining which axiom clauses are actually used*)
 fun used_axioms axioms (th, Metis.Proof.Axiom _) = SOME (lookth axioms th)
@@ -744,7 +742,7 @@
                 val ctxt' = fold Variable.declare_constraints (map prop_of cls) ctxt
                              (*add constraints arising from converting goal to clause form*)
                 val proof = Metis.Proof.proof mth
-                val result = translate ctxt' mode skolems axioms proof
+                val result = fold (translate_one ctxt' mode skolems) proof axioms
                 and used = map_filter (used_axioms axioms) proof
                 val _ = trace_msg (fn () => "METIS COMPLETED...clauses actually used:")
                 val _ = app (fn th => trace_msg (fn () => Display.string_of_thm ctxt th)) used
--- a/src/HOL/Tools/Sledgehammer/sledgehammer.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -39,7 +39,7 @@
      atp_run_time_in_msecs: int,
      output: string,
      proof: string,
-     internal_thm_names: string Vector.vector,
+     axiom_names: string Vector.vector,
      conjecture_shape: int list list}
   type prover = params -> minimize_command -> problem -> prover_result
 
@@ -64,6 +64,7 @@
 open Metis_Clauses
 open Sledgehammer_Util
 open Sledgehammer_Fact_Filter
+open Sledgehammer_Translate
 open Sledgehammer_Proof_Reconstruct
 
 
@@ -73,9 +74,6 @@
    "Async_Manager". *)
 val das_Tool = "Sledgehammer"
 
-(* Freshness almost guaranteed! *)
-val sledgehammer_weak_prefix = "Sledgehammer:"
-
 fun kill_atps () = Async_Manager.kill_threads das_Tool "ATPs"
 fun running_atps () = Async_Manager.running_threads das_Tool "ATPs"
 val messages = Async_Manager.thread_messages das_Tool "ATP"
@@ -112,7 +110,7 @@
    atp_run_time_in_msecs: int,
    output: string,
    proof: string,
-   internal_thm_names: string Vector.vector,
+   axiom_names: string Vector.vector,
    conjecture_shape: int list list}
 
 type prover = params -> minimize_command -> problem -> prover_result
@@ -158,470 +156,6 @@
                else
                  failure))
 
-
-(* Clause preparation *)
-
-datatype fol_formula =
-  FOLFormula of {name: string,
-                 kind: kind,
-                 combformula: (name, combterm) formula,
-                 ctypes_sorts: typ list}
-
-fun mk_anot phi = AConn (ANot, [phi])
-fun mk_aconn c phi1 phi2 = AConn (c, [phi1, phi2])
-fun mk_ahorn [] phi = phi
-  | mk_ahorn (phi :: phis) psi =
-    AConn (AImplies, [fold (mk_aconn AAnd) phis phi, psi])
-
-(* ### FIXME: reintroduce
-fun make_formula_table xs =
-  fold (Termtab.update o `(prop_of o snd)) xs Termtab.empty
-(* Remove existing axioms from the conjecture, as this can
-   dramatically boost an ATP's performance (for some reason). *)
-fun subtract_cls axioms =
-  filter_out (Termtab.defined (make_formula_table axioms) o prop_of)
-*)
-
-fun combformula_for_prop thy =
-  let
-    val do_term = combterm_from_term thy
-    fun do_quant bs q s T t' =
-      do_formula ((s, T) :: bs) t'
-      #>> (fn phi => AQuant (q, [`make_bound_var s], phi))
-    and do_conn bs c t1 t2 =
-      do_formula bs t1 ##>> do_formula bs t2
-      #>> (fn (phi1, phi2) => AConn (c, [phi1, phi2]))
-    and do_formula bs t =
-      case t of
-        @{const Not} $ t1 =>
-        do_formula bs t1 #>> (fn phi => AConn (ANot, [phi]))
-      | Const (@{const_name All}, _) $ Abs (s, T, t') =>
-        do_quant bs AForall s T t'
-      | Const (@{const_name Ex}, _) $ Abs (s, T, t') =>
-        do_quant bs AExists s T t'
-      | @{const "op &"} $ t1 $ t2 => do_conn bs AAnd t1 t2
-      | @{const "op |"} $ t1 $ t2 => do_conn bs AOr t1 t2
-      | @{const "op -->"} $ t1 $ t2 => do_conn bs AImplies t1 t2
-      | Const (@{const_name "op ="}, Type (_, [@{typ bool}, _])) $ t1 $ t2 =>
-        do_conn bs AIff t1 t2
-      | _ => (fn ts => do_term bs (Envir.eta_contract t)
-                       |>> AAtom ||> union (op =) ts)
-  in do_formula [] end
-
-(* Converts an elim-rule into an equivalent theorem that does not have the
-   predicate variable. Leaves other theorems unchanged. We simply instantiate
-   the conclusion variable to False. (Cf. "transform_elim_term" in
-   "ATP_Systems".) *)
-(* FIXME: test! *)
-fun transform_elim_term t =
-  case Logic.strip_imp_concl t of
-    @{const Trueprop} $ Var (z, @{typ bool}) =>
-    subst_Vars [(z, @{const True})] t
-  | Var (z, @{typ prop}) => subst_Vars [(z, @{prop True})] t
-  | _ => t
-
-fun presimplify_term thy =
-  Skip_Proof.make_thm thy
-  #> Meson.presimplify
-  #> prop_of
-
-fun concealed_bound_name j = sledgehammer_weak_prefix ^ Int.toString j
-fun conceal_bounds Ts t =
-  subst_bounds (map (Free o apfst concealed_bound_name)
-                    (0 upto length Ts - 1 ~~ Ts), t)
-fun reveal_bounds Ts =
-  subst_atomic (map (fn (j, T) => (Free (concealed_bound_name j, T), Bound j))
-                    (0 upto length Ts - 1 ~~ Ts))
-
-fun introduce_combinators_in_term ctxt kind t =
-  let
-    val thy = ProofContext.theory_of ctxt
-    fun aux Ts t =
-      case t of
-        @{const Not} $ t1 => @{const Not} $ aux Ts t1
-      | (t0 as Const (@{const_name All}, _)) $ Abs (s, T, t') =>
-        t0 $ Abs (s, T, aux (T :: Ts) t')
-      | (t0 as Const (@{const_name Ex}, _)) $ Abs (s, T, t') =>
-        t0 $ Abs (s, T, aux (T :: Ts) t')
-      | (t0 as @{const "op &"}) $ t1 $ t2 => t0 $ aux Ts t1 $ aux Ts t2
-      | (t0 as @{const "op |"}) $ t1 $ t2 => t0 $ aux Ts t1 $ aux Ts t2
-      | (t0 as @{const "op -->"}) $ t1 $ t2 => t0 $ aux Ts t1 $ aux Ts t2
-      | (t0 as Const (@{const_name "op ="}, Type (_, [@{typ bool}, _])))
-          $ t1 $ t2 =>
-        t0 $ aux Ts t1 $ aux Ts t2
-      | _ => if not (exists_subterm (fn Abs _ => true | _ => false) t) then
-               t
-             else
-               let
-                 val t = t |> conceal_bounds Ts
-                           |> Envir.eta_contract
-                 val ([t], ctxt') = Variable.import_terms true [t] ctxt
-               in
-                 t |> cterm_of thy
-                   |> Clausifier.introduce_combinators_in_cterm
-                   |> singleton (Variable.export ctxt' ctxt)
-                   |> prop_of |> Logic.dest_equals |> snd
-                   |> reveal_bounds Ts
-               end
-  in t |> not (Meson.is_fol_term thy t) ? aux [] end
-  handle THM _ =>
-         (* A type variable of sort "{}" will make abstraction fail. *)
-         case kind of
-           Axiom => HOLogic.true_const
-         | Conjecture => HOLogic.false_const
-
-(* Metis's use of "resolve_tac" freezes the schematic variables. We simulate the
-   same in Sledgehammer to prevent the discovery of unreplable proofs. *)
-fun freeze_term t =
-  let
-    fun aux (t $ u) = aux t $ aux u
-      | aux (Abs (s, T, t)) = Abs (s, T, aux t)
-      | aux (Var ((s, i), T)) =
-        Free (sledgehammer_weak_prefix ^ s ^ "_" ^ string_of_int i, T)
-      | aux t = t
-  in t |> exists_subterm is_Var t ? aux end
-
-(* making axiom and conjecture formulas *)
-fun make_formula ctxt presimp (name, kind, t) =
-  let
-    val thy = ProofContext.theory_of ctxt
-    val t = t |> transform_elim_term
-              |> Object_Logic.atomize_term thy
-    val t = t |> fastype_of t = HOLogic.boolT ? HOLogic.mk_Trueprop
-              |> extensionalize_term
-              |> presimp ? presimplify_term thy
-              |> perhaps (try (HOLogic.dest_Trueprop))
-              |> introduce_combinators_in_term ctxt kind
-              |> kind = Conjecture ? freeze_term
-    val (combformula, ctypes_sorts) = combformula_for_prop thy t []
-  in
-    FOLFormula {name = name, combformula = combformula, kind = kind,
-                ctypes_sorts = ctypes_sorts}
-  end
-
-fun make_axiom ctxt presimp (name, th) =
-  (name, make_formula ctxt presimp (name, Axiom, prop_of th))
-fun make_conjectures ctxt ts =
-  map2 (fn j => fn t => make_formula ctxt true (Int.toString j, Conjecture, t))
-       (0 upto length ts - 1) ts
-
-(** Helper facts **)
-
-fun count_combterm (CombConst ((s, _), _, _)) =
-    Symtab.map_entry s (Integer.add 1)
-  | count_combterm (CombVar _) = I
-  | count_combterm (CombApp (t1, t2)) = fold count_combterm [t1, t2]
-fun count_combformula (AQuant (_, _, phi)) = count_combformula phi
-  | count_combformula (AConn (_, phis)) = fold count_combformula phis
-  | count_combformula (AAtom tm) = count_combterm tm
-fun count_fol_formula (FOLFormula {combformula, ...}) =
-  count_combformula combformula
-
-val optional_helpers =
-  [(["c_COMBI", "c_COMBK"], @{thms COMBI_def COMBK_def}),
-   (["c_COMBB", "c_COMBC"], @{thms COMBB_def COMBC_def}),
-   (["c_COMBS"], @{thms COMBS_def})]
-val optional_typed_helpers =
-  [(["c_True", "c_False"], @{thms True_or_False}),
-   (["c_If"], @{thms if_True if_False True_or_False})]
-val mandatory_helpers = @{thms fequal_imp_equal equal_imp_fequal}
-
-val init_counters =
-  Symtab.make (maps (maps (map (rpair 0) o fst))
-                    [optional_helpers, optional_typed_helpers])
-
-fun get_helper_facts ctxt is_FO full_types conjectures axioms =
-  let
-    val ct = fold (fold count_fol_formula) [conjectures, axioms] init_counters
-    fun is_needed c = the (Symtab.lookup ct c) > 0
-  in
-    (optional_helpers
-     |> full_types ? append optional_typed_helpers
-     |> maps (fn (ss, ths) =>
-                 if exists is_needed ss then map (`Thm.get_name_hint) ths
-                 else [])) @
-    (if is_FO then [] else map (`Thm.get_name_hint) mandatory_helpers)
-    |> map (snd o make_axiom ctxt false)
-  end
-
-fun meta_not t = @{const "==>"} $ t $ @{prop False}
-
-fun prepare_formulas ctxt full_types hyp_ts concl_t axcls =
-  let
-    val thy = ProofContext.theory_of ctxt
-    val goal_t = Logic.list_implies (hyp_ts, concl_t)
-    val is_FO = Meson.is_fol_term thy goal_t
-    val axtms = map (prop_of o snd) axcls
-    val subs = tfree_classes_of_terms [goal_t]
-    val supers = tvar_classes_of_terms axtms
-    val tycons = type_consts_of_terms thy (goal_t :: axtms)
-    (* TFrees in the conjecture; TVars in the axioms *)
-    val conjectures = map meta_not hyp_ts @ [concl_t] |> make_conjectures ctxt
-    val (clnames, axioms) = ListPair.unzip (map (make_axiom ctxt true) axcls)
-    val helper_facts = get_helper_facts ctxt is_FO full_types conjectures axioms
-    val (supers', arity_clauses) = make_arity_clauses thy tycons supers
-    val class_rel_clauses = make_class_rel_clauses thy subs supers'
-  in
-    (Vector.fromList clnames,
-      (conjectures, axioms, helper_facts, class_rel_clauses, arity_clauses))
-  end
-
-fun wrap_type ty t = ATerm ((type_wrapper_name, type_wrapper_name), [ty, t])
-
-fun fo_term_for_combtyp (CombTVar name) = ATerm (name, [])
-  | fo_term_for_combtyp (CombTFree name) = ATerm (name, [])
-  | fo_term_for_combtyp (CombType (name, tys)) =
-    ATerm (name, map fo_term_for_combtyp tys)
-
-fun fo_literal_for_type_literal (TyLitVar (class, name)) =
-    (true, ATerm (class, [ATerm (name, [])]))
-  | fo_literal_for_type_literal (TyLitFree (class, name)) =
-    (true, ATerm (class, [ATerm (name, [])]))
-
-fun formula_for_fo_literal (pos, t) = AAtom t |> not pos ? mk_anot
-
-fun fo_term_for_combterm full_types =
-  let
-    fun aux top_level u =
-      let
-        val (head, args) = strip_combterm_comb u
-        val (x, ty_args) =
-          case head of
-            CombConst (name as (s, s'), _, ty_args) =>
-            if s = "equal" then
-              (if top_level andalso length args = 2 then name
-               else ("c_fequal", @{const_name fequal}), [])
-            else if top_level then
-              case s of
-                "c_False" => (("$false", s'), [])
-              | "c_True" => (("$true", s'), [])
-              | _ => (name, if full_types then [] else ty_args)
-            else
-              (name, if full_types then [] else ty_args)
-          | CombVar (name, _) => (name, [])
-          | CombApp _ => raise Fail "impossible \"CombApp\""
-        val t = ATerm (x, map fo_term_for_combtyp ty_args @
-                          map (aux false) args)
-    in
-      if full_types then wrap_type (fo_term_for_combtyp (combtyp_of u)) t else t
-    end
-  in aux true end
-
-fun formula_for_combformula full_types =
-  let
-    fun aux (AQuant (q, xs, phi)) = AQuant (q, xs, aux phi)
-      | aux (AConn (c, phis)) = AConn (c, map aux phis)
-      | aux (AAtom tm) = AAtom (fo_term_for_combterm full_types tm)
-  in aux end
-
-fun formula_for_axiom full_types (FOLFormula {combformula, ctypes_sorts, ...}) =
-  mk_ahorn (map (formula_for_fo_literal o fo_literal_for_type_literal)
-                (type_literals_for_types ctypes_sorts))
-           (formula_for_combformula full_types combformula)
-
-fun problem_line_for_fact prefix full_types
-                          (formula as FOLFormula {name, kind, ...}) =
-  Fof (prefix ^ ascii_of name, kind, formula_for_axiom full_types formula)
-
-fun problem_line_for_class_rel_clause (ClassRelClause {name, subclass,
-                                                       superclass, ...}) =
-  let val ty_arg = ATerm (("T", "T"), []) in
-    Fof (class_rel_clause_prefix ^ ascii_of name, Axiom,
-         AConn (AImplies, [AAtom (ATerm (subclass, [ty_arg])),
-                           AAtom (ATerm (superclass, [ty_arg]))]))
-  end
-
-fun fo_literal_for_arity_literal (TConsLit (c, t, args)) =
-    (true, ATerm (c, [ATerm (t, map (fn arg => ATerm (arg, [])) args)]))
-  | fo_literal_for_arity_literal (TVarLit (c, sort)) =
-    (false, ATerm (c, [ATerm (sort, [])]))
-
-fun problem_line_for_arity_clause (ArityClause {name, conclLit, premLits,
-                                                ...}) =
-  Fof (arity_clause_prefix ^ ascii_of name, Axiom,
-       mk_ahorn (map (formula_for_fo_literal o apfst not
-                      o fo_literal_for_arity_literal) premLits)
-                (formula_for_fo_literal
-                     (fo_literal_for_arity_literal conclLit)))
-
-fun problem_line_for_conjecture full_types
-                                (FOLFormula {name, kind, combformula, ...}) =
-  Fof (conjecture_prefix ^ name, kind,
-       formula_for_combformula full_types combformula)
-
-fun free_type_literals_for_conjecture (FOLFormula {ctypes_sorts, ...}) =
-  map fo_literal_for_type_literal (type_literals_for_types ctypes_sorts)
-
-fun problem_line_for_free_type lit =
-  Fof (tfrees_name, Conjecture, mk_anot (formula_for_fo_literal lit))
-fun problem_lines_for_free_types conjectures =
-  let
-    val litss = map free_type_literals_for_conjecture conjectures
-    val lits = fold (union (op =)) litss []
-  in map problem_line_for_free_type lits end
-
-(** "hBOOL" and "hAPP" **)
-
-type const_info = {min_arity: int, max_arity: int, sub_level: bool}
-
-fun consider_term top_level (ATerm ((s, _), ts)) =
-  (if is_tptp_variable s then
-     I
-   else
-     let val n = length ts in
-       Symtab.map_default
-           (s, {min_arity = n, max_arity = 0, sub_level = false})
-           (fn {min_arity, max_arity, sub_level} =>
-               {min_arity = Int.min (n, min_arity),
-                max_arity = Int.max (n, max_arity),
-                sub_level = sub_level orelse not top_level})
-     end)
-  #> fold (consider_term (top_level andalso s = type_wrapper_name)) ts
-fun consider_formula (AQuant (_, _, phi)) = consider_formula phi
-  | consider_formula (AConn (_, phis)) = fold consider_formula phis
-  | consider_formula (AAtom tm) = consider_term true tm
-
-fun consider_problem_line (Fof (_, _, phi)) = consider_formula phi
-fun consider_problem problem = fold (fold consider_problem_line o snd) problem
-
-fun const_table_for_problem explicit_apply problem =
-  if explicit_apply then NONE
-  else SOME (Symtab.empty |> consider_problem problem)
-
-val tc_fun = make_fixed_type_const @{type_name fun}
-
-fun min_arity_of thy full_types NONE s =
-    (if s = "equal" orelse s = type_wrapper_name orelse
-        String.isPrefix type_const_prefix s orelse
-        String.isPrefix class_prefix s then
-       16383 (* large number *)
-     else if full_types then
-       0
-     else case strip_prefix_and_undo_ascii const_prefix s of
-       SOME s' => num_type_args thy (invert_const s')
-     | NONE => 0)
-  | min_arity_of _ _ (SOME the_const_tab) s =
-    case Symtab.lookup the_const_tab s of
-      SOME ({min_arity, ...} : const_info) => min_arity
-    | NONE => 0
-
-fun full_type_of (ATerm ((s, _), [ty, _])) =
-    if s = type_wrapper_name then ty else raise Fail "expected type wrapper"
-  | full_type_of _ = raise Fail "expected type wrapper"
-
-fun list_hAPP_rev _ t1 [] = t1
-  | list_hAPP_rev NONE t1 (t2 :: ts2) =
-    ATerm (`I "hAPP", [list_hAPP_rev NONE t1 ts2, t2])
-  | list_hAPP_rev (SOME ty) t1 (t2 :: ts2) =
-    let val ty' = ATerm (`make_fixed_type_const @{type_name fun},
-                         [full_type_of t2, ty]) in
-      ATerm (`I "hAPP", [wrap_type ty' (list_hAPP_rev (SOME ty') t1 ts2), t2])
-    end
-
-fun repair_applications_in_term thy full_types const_tab =
-  let
-    fun aux opt_ty (ATerm (name as (s, _), ts)) =
-      if s = type_wrapper_name then
-        case ts of
-          [t1, t2] => ATerm (name, [aux NONE t1, aux (SOME t1) t2])
-        | _ => raise Fail "malformed type wrapper"
-      else
-        let
-          val ts = map (aux NONE) ts
-          val (ts1, ts2) = chop (min_arity_of thy full_types const_tab s) ts
-        in list_hAPP_rev opt_ty (ATerm (name, ts1)) (rev ts2) end
-  in aux NONE end
-
-fun boolify t = ATerm (`I "hBOOL", [t])
-
-(* True if the constant ever appears outside of the top-level position in
-   literals, or if it appears with different arities (e.g., because of different
-   type instantiations). If false, the constant always receives all of its
-   arguments and is used as a predicate. *)
-fun is_predicate NONE s =
-    s = "equal" orelse String.isPrefix type_const_prefix s orelse
-    String.isPrefix class_prefix s
-  | is_predicate (SOME the_const_tab) s =
-    case Symtab.lookup the_const_tab s of
-      SOME {min_arity, max_arity, sub_level} =>
-      not sub_level andalso min_arity = max_arity
-    | NONE => false
-
-fun repair_predicates_in_term const_tab (t as ATerm ((s, _), ts)) =
-  if s = type_wrapper_name then
-    case ts of
-      [_, t' as ATerm ((s', _), _)] =>
-      if is_predicate const_tab s' then t' else boolify t
-    | _ => raise Fail "malformed type wrapper"
-  else
-    t |> not (is_predicate const_tab s) ? boolify
-
-fun close_universally phi =
-  let
-    fun term_vars bounds (ATerm (name as (s, _), tms)) =
-        (is_tptp_variable s andalso not (member (op =) bounds name))
-          ? insert (op =) name
-        #> fold (term_vars bounds) tms
-    fun formula_vars bounds (AQuant (q, xs, phi)) =
-        formula_vars (xs @ bounds) phi
-      | formula_vars bounds (AConn (_, phis)) = fold (formula_vars bounds) phis
-      | formula_vars bounds (AAtom tm) = term_vars bounds tm
-  in
-    case formula_vars [] phi [] of [] => phi | xs => AQuant (AForall, xs, phi)
-  end
-
-fun repair_formula thy explicit_forall full_types const_tab =
-  let
-    fun aux (AQuant (q, xs, phi)) = AQuant (q, xs, aux phi)
-      | aux (AConn (c, phis)) = AConn (c, map aux phis)
-      | aux (AAtom tm) =
-        AAtom (tm |> repair_applications_in_term thy full_types const_tab
-                  |> repair_predicates_in_term const_tab)
-  in aux #> explicit_forall ? close_universally end
-
-fun repair_problem_line thy explicit_forall full_types const_tab
-                        (Fof (ident, kind, phi)) =
-  Fof (ident, kind, repair_formula thy explicit_forall full_types const_tab phi)
-fun repair_problem_with_const_table thy =
-  map o apsnd o map ooo repair_problem_line thy
-
-fun repair_problem thy explicit_forall full_types explicit_apply problem =
-  repair_problem_with_const_table thy explicit_forall full_types
-      (const_table_for_problem explicit_apply problem) problem
-
-fun write_tptp_file thy readable_names explicit_forall full_types explicit_apply
-                    file (conjectures, axioms, helper_facts, class_rel_clauses,
-                          arity_clauses) =
-  let
-    val axiom_lines = map (problem_line_for_fact axiom_prefix full_types) axioms
-    val helper_lines =
-      map (problem_line_for_fact helper_prefix full_types) helper_facts
-    val conjecture_lines =
-      map (problem_line_for_conjecture full_types) conjectures
-    val tfree_lines = problem_lines_for_free_types conjectures
-    val class_rel_lines =
-      map problem_line_for_class_rel_clause class_rel_clauses
-    val arity_lines = map problem_line_for_arity_clause arity_clauses
-    (* Reordering these might or might not confuse the proof reconstruction
-       code or the SPASS Flotter hack. *)
-    val problem =
-      [("Relevant facts", axiom_lines),
-       ("Class relationships", class_rel_lines),
-       ("Arity declarations", arity_lines),
-       ("Helper facts", helper_lines),
-       ("Conjectures", conjecture_lines),
-       ("Type variables", tfree_lines)]
-      |> repair_problem thy explicit_forall full_types explicit_apply
-    val (problem, pool) = nice_tptp_problem readable_names problem
-    val conjecture_offset =
-      length axiom_lines + length class_rel_lines + length arity_lines
-      + length helper_lines
-    val _ = File.write_list file (strings_for_tptp_problem problem)
-  in
-    (case pool of SOME the_pool => snd the_pool | NONE => Symtab.empty,
-     conjecture_offset)
-  end
-
 fun extract_clause_sequence output =
   let
     val tokens_of = String.tokens (not o Char.isAlphaNum)
@@ -684,7 +218,6 @@
   let
     val (ctxt, (_, th)) = goal;
     val thy = ProofContext.theory_of ctxt
-    (* ### FIXME: (1) preprocessing for "if" etc. *)
     val (params, hyp_ts, concl_t) = strip_subgoal th subgoal
     val the_axioms =
       case axioms of
@@ -694,8 +227,6 @@
                     max_new_relevant_facts_per_iter
                     (the_default prefers_theory_relevant theory_relevant)
                     relevance_override goal hyp_ts concl_t
-    val (internal_thm_names, formulas) =
-      prepare_formulas ctxt full_types hyp_ts concl_t the_axioms
 
     (* path to unique problem file *)
     val the_dest_dir = if overlord then getenv "ISABELLE_HOME_USER"
@@ -760,9 +291,10 @@
                                             known_failures output
               in (output, msecs, proof, outcome) end
             val readable_names = debug andalso overlord
-            val (pool, conjecture_offset) =
-              write_tptp_file thy readable_names explicit_forall full_types
-                              explicit_apply probfile formulas
+            val (problem, pool, conjecture_offset, axiom_names) =
+              prepare_problem ctxt readable_names explicit_forall full_types
+                              explicit_apply hyp_ts concl_t the_axioms
+            val _ = File.write_list probfile (strings_for_tptp_problem problem)
             val conjecture_shape =
               conjecture_offset + 1 upto conjecture_offset + length hyp_ts + 1
               |> map single
@@ -773,7 +305,7 @@
                      |> (fn (output, msecs, proof, outcome) =>
                             (output, msecs0 + msecs, proof, outcome))
                    | result => result)
-          in ((pool, conjecture_shape), result) end
+          in ((pool, conjecture_shape, axiom_names), result) end
         else
           error ("Bad executable: " ^ Path.implode command ^ ".")
 
@@ -787,24 +319,24 @@
       else
         File.write (Path.explode (Path.implode probfile ^ "_proof")) output
 
-    val ((pool, conjecture_shape), (output, msecs, proof, outcome)) =
+    val ((pool, conjecture_shape, axiom_names),
+         (output, msecs, proof, outcome)) =
       with_path cleanup export run_on (prob_pathname subgoal)
-    val (conjecture_shape, internal_thm_names) =
+    val (conjecture_shape, axiom_names) =
       repair_conjecture_shape_and_theorem_names output conjecture_shape
-                                                internal_thm_names
+                                                axiom_names
 
     val (message, used_thm_names) =
       case outcome of
         NONE =>
         proof_text isar_proof
             (pool, debug, isar_shrink_factor, ctxt, conjecture_shape)
-            (full_types, minimize_command, proof, internal_thm_names, th,
-             subgoal)
+            (full_types, minimize_command, proof, axiom_names, th, subgoal)
       | SOME failure => (string_for_failure failure ^ "\n", [])
   in
     {outcome = outcome, message = message, pool = pool,
      used_thm_names = used_thm_names, atp_run_time_in_msecs = msecs,
-     output = output, proof = proof, internal_thm_names = internal_thm_names,
+     output = output, proof = proof, axiom_names = axiom_names,
      conjecture_shape = conjecture_shape}
   end
 
@@ -831,6 +363,8 @@
             in
               prover params (minimize_command name) problem |> #message
               handle ERROR message => "Error: " ^ message ^ "\n"
+                   | exn => "Internal error: \n" ^ ML_Compiler.exn_message exn ^
+                            "\n"
             end)
   end
 
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_filter.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -50,8 +50,13 @@
 (* Relevance Filtering                                         *)
 (***************************************************************)
 
-fun strip_Trueprop (@{const Trueprop} $ t) = t
-  | strip_Trueprop t = t;
+fun strip_Trueprop_and_all (@{const Trueprop} $ t) =
+    strip_Trueprop_and_all t
+  | strip_Trueprop_and_all (Const (@{const_name all}, _) $ Abs (_, _, t)) =
+    strip_Trueprop_and_all t
+  | strip_Trueprop_and_all (Const (@{const_name All}, _) $ Abs (_, _, t)) =
+    strip_Trueprop_and_all t
+  | strip_Trueprop_and_all t = t
 
 (*** constants with types ***)
 
@@ -312,7 +317,6 @@
                 if member Thm.eq_thm add_thms th then 1.0
                 else if member Thm.eq_thm del_thms th then 0.0
                 else formula_weight const_tab rel_const_tab consts_typs
-val _ = (if Real.isFinite weight then () else error ("*** " ^ Real.toString weight)) (*###*)
             in
               if weight >= threshold orelse
                  (defs_relevant andalso defines thy th rel_const_tab) then
@@ -502,33 +506,23 @@
 (**** Predicates to detect unwanted facts (prolific or likely to cause
       unsoundness) ****)
 
-(** Too general means, positive equality literal with a variable X as one operand,
-  when X does not occur properly in the other operand. This rules out clearly
-  inconsistent facts such as V = a | V = b, though it by no means guarantees
-  soundness. **)
-
-fun var_occurs_in_term ix =
-  let
-    fun aux (Var (jx, _)) = (ix = jx)
-      | aux (t1 $ t2) = aux t1 orelse aux t2
-      | aux (Abs (_, _, t)) = aux t
-      | aux _ = false
-  in aux end
+(* Too general means, positive equality literal with a variable X as one
+   operand, when X does not occur properly in the other operand. This rules out
+   clearly inconsistent facts such as X = a | X = b, though it by no means
+   guarantees soundness. *)
 
 fun is_record_type T = not (null (Record.dest_recTs T))
 
-(*Unwanted equalities include
-  (1) those between a variable that does not properly occur in the second operand,
-  (2) those between a variable and a record, since these seem to be prolific "cases" thms
-  (* FIXME: still a problem? *)
-*)
-fun too_general_eqterms (Var (ix,T), t) =
-    not (var_occurs_in_term ix t) orelse is_record_type T
-  | too_general_eqterms _ = false;
+(* Unwanted equalities are those between a (bound or schematic) variable that
+   does not properly occur in the second operand. *)
+fun too_general_eqterms (Var z) t =
+    not (exists_subterm (fn Var z' => z = z' | _ => false) t)
+  | too_general_eqterms (Bound j) t = not (loose_bvar1 (t, j))
+  | too_general_eqterms _ _ = false
 
-fun too_general_equality (Const (@{const_name "op ="}, _) $ x $ y) =
-      too_general_eqterms (x,y) orelse too_general_eqterms(y,x)
-  | too_general_equality _ = false;
+fun too_general_equality (Const (@{const_name "op ="}, _) $ t1 $ t2) =
+    too_general_eqterms t1 t2 orelse too_general_eqterms t2 t1
+  | too_general_equality _ = false
 
 fun has_typed_var tycons = exists_subterm
   (fn Var (_, Type (a, _)) => member (op =) tycons a | _ => false);
@@ -541,13 +535,13 @@
 val dangerous_types = [@{type_name unit}, @{type_name bool}];
 
 (* Facts containing variables of type "unit" or "bool" or of the form
-   "?x = A | ?x = B | ?x = C" are likely to lead to unsound proofs if types are
-   omitted. *)
+   "ALL x. x = A | x = B | x = C" are likely to lead to unsound proofs if types
+   are omitted. *)
 fun is_dangerous_term _ @{prop True} = true
   | is_dangerous_term full_types t =
     not full_types andalso
     (has_typed_var dangerous_types t orelse
-     forall too_general_equality (HOLogic.disjuncts (strip_Trueprop t)))
+     forall too_general_equality (HOLogic.disjuncts (strip_Trueprop_and_all t)))
 
 fun relevant_facts full_types relevance_threshold relevance_convergence
                    defs_relevant max_new theory_relevant
@@ -557,7 +551,7 @@
     val thy = ProofContext.theory_of ctxt
     val add_thms = maps (ProofContext.get_fact ctxt) add
     val has_override = not (null add) orelse not (null del)
-(*###
+(* FIXME:
     val is_FO = forall Meson.is_fol_term thy (concl_t :: hyp_ts)
 *)
     val axioms =
@@ -567,7 +561,7 @@
       |> not has_override ? make_unique
       |> filter (fn (_, th) =>
                     member Thm.eq_thm add_thms th orelse
-                    ((* ### FIXME: keep? (not is_FO orelse is_quasi_fol_theorem thy th) andalso*)
+                    ((* FIXME: keep? (not is_FO orelse is_quasi_fol_theorem thy th) andalso*)
                      not (is_dangerous_term full_types (prop_of th))))
   in
     relevance_filter ctxt relevance_threshold relevance_convergence
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_fact_minimize.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -0,0 +1,173 @@
+(*  Title:      HOL/Tools/Sledgehammer/sledgehammer_fact_minimize.ML
+    Author:     Philipp Meyer, TU Muenchen
+    Author:     Jasmin Blanchette, TU Muenchen
+
+Minimization of theorem list for Metis using automatic theorem provers.
+*)
+
+signature SLEDGEHAMMER_FACT_MINIMIZE =
+sig
+  type params = Sledgehammer.params
+
+  val minimize_theorems :
+    params -> int -> int -> Proof.state -> (string * thm list) list
+    -> (string * thm list) list option * string
+  val run_minimize : params -> int -> Facts.ref list -> Proof.state -> unit
+end;
+
+structure Sledgehammer_Fact_Minimize : SLEDGEHAMMER_FACT_MINIMIZE =
+struct
+
+open Sledgehammer_Util
+open Sledgehammer_Fact_Filter
+open Sledgehammer_Proof_Reconstruct
+open Sledgehammer
+
+(* wrapper for calling external prover *)
+
+fun string_for_failure Unprovable = "Unprovable."
+  | string_for_failure TimedOut = "Timed out."
+  | string_for_failure _ = "Unknown error."
+
+fun n_theorems name_thms_pairs =
+  let val n = length name_thms_pairs in
+    string_of_int n ^ " theorem" ^ plural_s n ^
+    (if n > 0 then
+       ": " ^ space_implode " "
+                  (name_thms_pairs
+                   |> map (perhaps (try (unprefix chained_prefix)))
+                   |> sort_distinct string_ord)
+     else
+       "")
+  end
+
+fun test_theorems ({debug, verbose, overlord, atps, full_types,
+                    relevance_threshold, relevance_convergence, theory_relevant,
+                    defs_relevant, isar_proof, isar_shrink_factor,
+                    ...} : params)
+                  (prover : prover) explicit_apply timeout subgoal state
+                  name_thms_pairs =
+  let
+    val _ =
+      priority ("Testing " ^ n_theorems (map fst name_thms_pairs) ^ "...")
+    val params =
+      {debug = debug, verbose = verbose, overlord = overlord, atps = atps,
+       full_types = full_types, explicit_apply = explicit_apply,
+       relevance_threshold = relevance_threshold,
+       relevance_convergence = relevance_convergence,
+       theory_relevant = theory_relevant, defs_relevant = defs_relevant,
+       isar_proof = isar_proof, isar_shrink_factor = isar_shrink_factor,
+       timeout = timeout, minimize_timeout = timeout}
+    val axioms = maps (fn (n, ths) => map (pair n) ths) name_thms_pairs
+    val {context = ctxt, facts, goal} = Proof.goal state
+    val problem =
+     {subgoal = subgoal, goal = (ctxt, (facts, goal)),
+      relevance_override = {add = [], del = [], only = false},
+      axioms = SOME axioms}
+    val result as {outcome, used_thm_names, ...} =
+      prover params (K "") problem
+  in
+    priority (case outcome of
+                NONE =>
+                if length used_thm_names = length name_thms_pairs then
+                  "Found proof."
+                else
+                  "Found proof with " ^ n_theorems used_thm_names ^ "."
+              | SOME failure => string_for_failure failure);
+    result
+  end
+
+(* minimalization of thms *)
+
+fun filter_used_facts used =
+  filter (member (op =) used o perhaps (try (unprefix chained_prefix)) o fst)
+
+fun sublinear_minimize _ [] p = p
+  | sublinear_minimize test (x :: xs) (seen, result) =
+    case test (xs @ seen) of
+      result as {outcome = NONE, proof, used_thm_names, ...}
+      : prover_result =>
+      sublinear_minimize test (filter_used_facts used_thm_names xs)
+                         (filter_used_facts used_thm_names seen, result)
+    | _ => sublinear_minimize test xs (x :: seen, result)
+
+(* Give the ATP some slack. The ATP gets further slack because the Sledgehammer
+   preprocessing time is included in the estimate below but isn't part of the
+   timeout. *)
+val fudge_msecs = 250
+
+fun minimize_theorems {atps = [], ...} _ _ _ _ = error "No ATP is set."
+  | minimize_theorems
+        (params as {debug, verbose, overlord, atps as atp :: _, full_types,
+                    relevance_threshold, relevance_convergence, theory_relevant,
+                    defs_relevant, isar_proof, isar_shrink_factor,
+                    minimize_timeout, ...})
+        i n state name_thms_pairs =
+  let
+    val thy = Proof.theory_of state
+    val prover = get_prover_fun thy atp
+    val msecs = Time.toMilliseconds minimize_timeout
+    val _ =
+      priority ("Sledgehammer minimize: ATP " ^ quote atp ^
+                " with a time limit of " ^ string_of_int msecs ^ " ms.")
+    val {context = ctxt, goal, ...} = Proof.goal state
+    val (_, hyp_ts, concl_t) = strip_subgoal goal i
+    val explicit_apply =
+      not (forall (Meson.is_fol_term thy)
+                  (concl_t :: hyp_ts @ maps (map prop_of o snd) name_thms_pairs))
+    fun do_test timeout =
+      test_theorems params prover explicit_apply timeout i state
+    val timer = Timer.startRealTimer ()
+  in
+    (case do_test minimize_timeout name_thms_pairs of
+       result as {outcome = NONE, pool, used_thm_names,
+                  conjecture_shape, ...} =>
+       let
+         val time = Timer.checkRealTimer timer
+         val new_timeout =
+           Int.min (msecs, Time.toMilliseconds time + fudge_msecs)
+           |> Time.fromMilliseconds
+         val (min_thms, {proof, axiom_names, ...}) =
+           sublinear_minimize (do_test new_timeout)
+               (filter_used_facts used_thm_names name_thms_pairs) ([], result)
+         val n = length min_thms
+         val _ = priority (cat_lines
+           ["Minimized: " ^ string_of_int n ^ " theorem" ^ plural_s n] ^
+            (case filter (String.isPrefix chained_prefix o fst) min_thms of
+               [] => ""
+             | chained => " (including " ^ Int.toString (length chained) ^
+                          " chained)") ^ ".")
+       in
+         (SOME min_thms,
+          proof_text isar_proof
+              (pool, debug, isar_shrink_factor, ctxt, conjecture_shape)
+              (full_types, K "", proof, axiom_names, goal, i) |> fst)
+       end
+     | {outcome = SOME TimedOut, ...} =>
+       (NONE, "Timeout: You can increase the time limit using the \"timeout\" \
+              \option (e.g., \"timeout = " ^
+              string_of_int (10 + msecs div 1000) ^ " s\").")
+     | {outcome = SOME UnknownError, ...} =>
+       (* Failure sometimes mean timeout, unfortunately. *)
+       (NONE, "Failure: No proof was found with the current time limit. You \
+              \can increase the time limit using the \"timeout\" \
+              \option (e.g., \"timeout = " ^
+              string_of_int (10 + msecs div 1000) ^ " s\").")
+     | {message, ...} => (NONE, "ATP error: " ^ message))
+    handle ERROR msg => (NONE, "Error: " ^ msg)
+  end
+
+fun run_minimize params i refs state =
+  let
+    val ctxt = Proof.context_of state
+    val chained_ths = #facts (Proof.goal state)
+    val name_thms_pairs = map (name_thms_pair_from_ref ctxt chained_ths) refs
+  in
+    case subgoal_count state of
+      0 => priority "No subgoal!"
+    | n =>
+      (kill_atps ();
+       priority (#2 (minimize_theorems params i n state name_thms_pairs)))
+  end
+
+end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_fact_minimizer.ML	Mon Aug 09 14:47:28 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,174 +0,0 @@
-(*  Title:      HOL/Tools/Sledgehammer/sledgehammer_fact_minimizer.ML
-    Author:     Philipp Meyer, TU Muenchen
-    Author:     Jasmin Blanchette, TU Muenchen
-
-Minimization of theorem list for Metis using automatic theorem provers.
-*)
-
-signature SLEDGEHAMMER_FACT_MINIMIZER =
-sig
-  type params = Sledgehammer.params
-
-  val minimize_theorems :
-    params -> int -> int -> Proof.state -> (string * thm list) list
-    -> (string * thm list) list option * string
-  val run_minimizer : params -> int -> Facts.ref list -> Proof.state -> unit
-end;
-
-structure Sledgehammer_Fact_Minimizer : SLEDGEHAMMER_FACT_MINIMIZER =
-struct
-
-open ATP_Systems
-open Sledgehammer_Util
-open Sledgehammer_Fact_Filter
-open Sledgehammer_Proof_Reconstruct
-open Sledgehammer
-
-(* wrapper for calling external prover *)
-
-fun string_for_failure Unprovable = "Unprovable."
-  | string_for_failure TimedOut = "Timed out."
-  | string_for_failure _ = "Unknown error."
-
-fun n_theorems name_thms_pairs =
-  let val n = length name_thms_pairs in
-    string_of_int n ^ " theorem" ^ plural_s n ^
-    (if n > 0 then
-       ": " ^ space_implode " "
-                  (name_thms_pairs
-                   |> map (perhaps (try (unprefix chained_prefix)))
-                   |> sort_distinct string_ord)
-     else
-       "")
-  end
-
-fun test_theorems ({debug, verbose, overlord, atps, full_types,
-                    relevance_threshold, relevance_convergence, theory_relevant,
-                    defs_relevant, isar_proof, isar_shrink_factor,
-                    ...} : params)
-                  (prover : prover) explicit_apply timeout subgoal state
-                  name_thms_pairs =
-  let
-    val _ =
-      priority ("Testing " ^ n_theorems (map fst name_thms_pairs) ^ "...")
-    val params =
-      {debug = debug, verbose = verbose, overlord = overlord, atps = atps,
-       full_types = full_types, explicit_apply = explicit_apply,
-       relevance_threshold = relevance_threshold,
-       relevance_convergence = relevance_convergence,
-       theory_relevant = theory_relevant, defs_relevant = defs_relevant,
-       isar_proof = isar_proof, isar_shrink_factor = isar_shrink_factor,
-       timeout = timeout, minimize_timeout = timeout}
-    val axioms = maps (fn (n, ths) => map (pair n) ths) name_thms_pairs
-    val {context = ctxt, facts, goal} = Proof.goal state
-    val problem =
-     {subgoal = subgoal, goal = (ctxt, (facts, goal)),
-      relevance_override = {add = [], del = [], only = false},
-      axioms = SOME axioms}
-    val result as {outcome, used_thm_names, ...} =
-      prover params (K "") problem
-  in
-    priority (case outcome of
-                NONE =>
-                if length used_thm_names = length name_thms_pairs then
-                  "Found proof."
-                else
-                  "Found proof with " ^ n_theorems used_thm_names ^ "."
-              | SOME failure => string_for_failure failure);
-    result
-  end
-
-(* minimalization of thms *)
-
-fun filter_used_facts used =
-  filter (member (op =) used o perhaps (try (unprefix chained_prefix)) o fst)
-
-fun sublinear_minimize _ [] p = p
-  | sublinear_minimize test (x :: xs) (seen, result) =
-    case test (xs @ seen) of
-      result as {outcome = NONE, proof, used_thm_names, ...}
-      : prover_result =>
-      sublinear_minimize test (filter_used_facts used_thm_names xs)
-                         (filter_used_facts used_thm_names seen, result)
-    | _ => sublinear_minimize test xs (x :: seen, result)
-
-(* Give the ATP some slack. The ATP gets further slack because the Sledgehammer
-   preprocessing time is included in the estimate below but isn't part of the
-   timeout. *)
-val fudge_msecs = 250
-
-fun minimize_theorems {atps = [], ...} _ _ _ _ = error "No ATP is set."
-  | minimize_theorems
-        (params as {debug, verbose, overlord, atps as atp :: _, full_types,
-                    relevance_threshold, relevance_convergence, theory_relevant,
-                    defs_relevant, isar_proof, isar_shrink_factor,
-                    minimize_timeout, ...})
-        i n state name_thms_pairs =
-  let
-    val thy = Proof.theory_of state
-    val prover = get_prover_fun thy atp
-    val msecs = Time.toMilliseconds minimize_timeout
-    val _ =
-      priority ("Sledgehammer minimizer: ATP " ^ quote atp ^
-                " with a time limit of " ^ string_of_int msecs ^ " ms.")
-    val {context = ctxt, goal, ...} = Proof.goal state
-    val (_, hyp_ts, concl_t) = strip_subgoal goal i
-    val explicit_apply =
-      not (forall (Meson.is_fol_term thy)
-                  (concl_t :: hyp_ts @ maps (map prop_of o snd) name_thms_pairs))
-    fun do_test timeout =
-      test_theorems params prover explicit_apply timeout i state
-    val timer = Timer.startRealTimer ()
-  in
-    (case do_test minimize_timeout name_thms_pairs of
-       result as {outcome = NONE, pool, used_thm_names,
-                  conjecture_shape, ...} =>
-       let
-         val time = Timer.checkRealTimer timer
-         val new_timeout =
-           Int.min (msecs, Time.toMilliseconds time + fudge_msecs)
-           |> Time.fromMilliseconds
-         val (min_thms, {proof, internal_thm_names, ...}) =
-           sublinear_minimize (do_test new_timeout)
-               (filter_used_facts used_thm_names name_thms_pairs) ([], result)
-         val n = length min_thms
-         val _ = priority (cat_lines
-           ["Minimized: " ^ string_of_int n ^ " theorem" ^ plural_s n] ^
-            (case filter (String.isPrefix chained_prefix o fst) min_thms of
-               [] => ""
-             | chained => " (including " ^ Int.toString (length chained) ^
-                          " chained)") ^ ".")
-       in
-         (SOME min_thms,
-          proof_text isar_proof
-              (pool, debug, isar_shrink_factor, ctxt, conjecture_shape)
-              (full_types, K "", proof, internal_thm_names, goal, i) |> fst)
-       end
-     | {outcome = SOME TimedOut, ...} =>
-       (NONE, "Timeout: You can increase the time limit using the \"timeout\" \
-              \option (e.g., \"timeout = " ^
-              string_of_int (10 + msecs div 1000) ^ " s\").")
-     | {outcome = SOME UnknownError, ...} =>
-       (* Failure sometimes mean timeout, unfortunately. *)
-       (NONE, "Failure: No proof was found with the current time limit. You \
-              \can increase the time limit using the \"timeout\" \
-              \option (e.g., \"timeout = " ^
-              string_of_int (10 + msecs div 1000) ^ " s\").")
-     | {message, ...} => (NONE, "ATP error: " ^ message))
-    handle ERROR msg => (NONE, "Error: " ^ msg)
-  end
-
-fun run_minimizer params i refs state =
-  let
-    val ctxt = Proof.context_of state
-    val chained_ths = #facts (Proof.goal state)
-    val name_thms_pairs = map (name_thms_pair_from_ref ctxt chained_ths) refs
-  in
-    case subgoal_count state of
-      0 => priority "No subgoal!"
-    | n =>
-      (kill_atps ();
-       priority (#2 (minimize_theorems params i n state name_thms_pairs)))
-  end
-
-end;
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -20,7 +20,7 @@
 open ATP_Systems
 open Sledgehammer_Util
 open Sledgehammer
-open Sledgehammer_Fact_Minimizer
+open Sledgehammer_Fact_Minimize
 
 (** Sledgehammer commands **)
 
@@ -226,9 +226,9 @@
                          (minimize_command override_params i) state
       end
     else if subcommand = minimizeN then
-      run_minimizer (get_params thy (map (apfst minimizize_raw_param_name)
-                                override_params))
-                    (the_default 1 opt_i) (#add relevance_override) state
+      run_minimize (get_params thy (map (apfst minimizize_raw_param_name)
+                               override_params))
+                   (the_default 1 opt_i) (#add relevance_override) state
     else if subcommand = messagesN then
       messages opt_i
     else if subcommand = available_atpsN then
--- a/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_proof_reconstruct.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -10,12 +10,6 @@
 sig
   type minimize_command = string list -> string
 
-  val axiom_prefix : string
-  val conjecture_prefix : string
-  val helper_prefix : string
-  val class_rel_clause_prefix : string
-  val arity_clause_prefix : string
-  val tfrees_name : string
   val metis_line: bool -> int -> int -> string list -> string
   val metis_proof_text:
     bool * minimize_command * string * string vector * thm * int
@@ -37,16 +31,10 @@
 open Metis_Clauses
 open Sledgehammer_Util
 open Sledgehammer_Fact_Filter
+open Sledgehammer_Translate
 
 type minimize_command = string list -> string
 
-val axiom_prefix = "ax_"
-val conjecture_prefix = "conj_"
-val helper_prefix = "help_"
-val class_rel_clause_prefix = "clrel_";
-val arity_clause_prefix = "arity_"
-val tfrees_name = "tfrees"
-
 (* Simple simplifications to ensure that sort annotations don't leave a trail of
    spurious "True"s. *)
 fun s_not @{const False} = @{const True}
@@ -71,9 +59,9 @@
 fun mk_aconn c (phi1, phi2) = AConn (c, [phi1, phi2])
 
 fun index_in_shape x = find_index (exists (curry (op =) x))
-fun is_axiom_number thm_names num =
-  num > 0 andalso num <= Vector.length thm_names andalso
-  Vector.sub (thm_names, num - 1) <> ""
+fun is_axiom_number axiom_names num =
+  num > 0 andalso num <= Vector.length axiom_names andalso
+  Vector.sub (axiom_names, num - 1) <> ""
 fun is_conjecture_number conjecture_shape num =
   index_in_shape num conjecture_shape >= 0
 
@@ -491,10 +479,10 @@
 (* Discard axioms; consolidate adjacent lines that prove the same formula, since
    they differ only in type information.*)
 fun add_line _ _ (line as Definition _) lines = line :: lines
-  | add_line conjecture_shape thm_names (Inference (num, t, [])) lines =
+  | add_line conjecture_shape axiom_names (Inference (num, t, [])) lines =
     (* No dependencies: axiom, conjecture, or (for Vampire) internal axioms or
        definitions. *)
-    if is_axiom_number thm_names num then
+    if is_axiom_number axiom_names num then
       (* Axioms are not proof lines. *)
       if is_only_type_information t then
         map (replace_deps_in_line (num, [])) lines
@@ -540,10 +528,10 @@
 
 fun add_desired_line _ _ _ _ (line as Definition (num, _, _)) (j, lines) =
     (j, line :: map (replace_deps_in_line (num, [])) lines)
-  | add_desired_line isar_shrink_factor conjecture_shape thm_names frees
+  | add_desired_line isar_shrink_factor conjecture_shape axiom_names frees
                      (Inference (num, t, deps)) (j, lines) =
     (j + 1,
-     if is_axiom_number thm_names num orelse
+     if is_axiom_number axiom_names num orelse
         is_conjecture_number conjecture_shape num orelse
         (not (is_only_type_information t) andalso
          null (Term.add_tvars t []) andalso
@@ -562,16 +550,18 @@
    (108) is extracted. For SPASS, lines have the form "108[0:Inp] ...", where
    the first number (108) is extracted. For Vampire, we look for
    "108. ... [input]". *)
-fun used_facts_in_atp_proof thm_names atp_proof =
+fun used_facts_in_atp_proof axiom_names atp_proof =
   let
     fun axiom_name num =
       let val j = Int.fromString num |> the_default ~1 in
-        if is_axiom_number thm_names j then SOME (Vector.sub (thm_names, j - 1))
-        else NONE
+        if is_axiom_number axiom_names j then
+          SOME (Vector.sub (axiom_names, j - 1))
+        else
+          NONE
       end
     val tokens_of =
       String.tokens (fn c => not (Char.isAlphaNum c) andalso c <> #"_")
-    val thm_name_list = Vector.foldr (op ::) [] thm_names
+    val thm_name_list = Vector.foldr (op ::) [] axiom_names
     fun do_line ("fof" :: num :: "axiom" :: (rest as _ :: _)) =
         (case strip_prefix_and_undo_ascii axiom_prefix (List.last rest) of
            SOME name =>
@@ -617,16 +607,16 @@
 
 val unprefix_chained = perhaps (try (unprefix chained_prefix))
 
-fun used_facts thm_names =
-  used_facts_in_atp_proof thm_names
+fun used_facts axiom_names =
+  used_facts_in_atp_proof axiom_names
   #> List.partition (String.isPrefix chained_prefix)
   #>> map (unprefix chained_prefix)
   #> pairself (sort_distinct string_ord)
 
-fun metis_proof_text (full_types, minimize_command, atp_proof, thm_names, goal,
-                      i) =
+fun metis_proof_text (full_types, minimize_command, atp_proof, axiom_names,
+                      goal, i) =
   let
-    val (chained_lemmas, other_lemmas) = used_facts thm_names atp_proof
+    val (chained_lemmas, other_lemmas) = used_facts axiom_names atp_proof
     val lemmas = other_lemmas @ chained_lemmas
     val n = Logic.count_prems (prop_of goal)
   in
@@ -656,9 +646,9 @@
 fun smart_case_split [] facts = ByMetis facts
   | smart_case_split proofs facts = CaseSplit (proofs, facts)
 
-fun add_fact_from_dep thm_names num =
-  if is_axiom_number thm_names num then
-    apsnd (insert (op =) (Vector.sub (thm_names, num - 1)))
+fun add_fact_from_dep axiom_names num =
+  if is_axiom_number axiom_names num then
+    apsnd (insert (op =) (Vector.sub (axiom_names, num - 1)))
   else
     apfst (insert (op =) (raw_prefix, num))
 
@@ -667,27 +657,27 @@
 
 fun step_for_line _ _ (Definition (_, t1, t2)) = Let (t1, t2)
   | step_for_line _ _ (Inference (num, t, [])) = Assume ((raw_prefix, num), t)
-  | step_for_line thm_names j (Inference (num, t, deps)) =
+  | step_for_line axiom_names j (Inference (num, t, deps)) =
     Have (if j = 1 then [Show] else [], (raw_prefix, num),
           forall_vars t,
-          ByMetis (fold (add_fact_from_dep thm_names) deps ([], [])))
+          ByMetis (fold (add_fact_from_dep axiom_names) deps ([], [])))
 
 fun proof_from_atp_proof pool ctxt full_types tfrees isar_shrink_factor
-                         atp_proof conjecture_shape thm_names params frees =
+                         atp_proof conjecture_shape axiom_names params frees =
   let
     val lines =
       atp_proof ^ "$" (* the $ sign acts as a sentinel (FIXME: needed?) *)
       |> parse_proof pool
       |> sort (int_ord o pairself raw_step_number)
       |> decode_lines ctxt full_types tfrees
-      |> rpair [] |-> fold_rev (add_line conjecture_shape thm_names)
+      |> rpair [] |-> fold_rev (add_line conjecture_shape axiom_names)
       |> rpair [] |-> fold_rev add_nontrivial_line
       |> rpair (0, []) |-> fold_rev (add_desired_line isar_shrink_factor
-                                               conjecture_shape thm_names frees)
+                                             conjecture_shape axiom_names frees)
       |> snd
   in
     (if null params then [] else [Fix params]) @
-    map2 (step_for_line thm_names) (length lines downto 1) lines
+    map2 (step_for_line axiom_names) (length lines downto 1) lines
   end
 
 (* When redirecting proofs, we keep information about the labels seen so far in
@@ -995,8 +985,8 @@
   in do_proof end
 
 fun isar_proof_text (pool, debug, isar_shrink_factor, ctxt, conjecture_shape)
-                    (other_params as (full_types, _, atp_proof, thm_names, goal,
-                                      i)) =
+                    (other_params as (full_types, _, atp_proof, axiom_names,
+                                      goal, i)) =
   let
     val (params, hyp_ts, concl_t) = strip_subgoal goal i
     val frees = fold Term.add_frees (concl_t :: hyp_ts) []
@@ -1005,7 +995,7 @@
     val (one_line_proof, lemma_names) = metis_proof_text other_params
     fun isar_proof_for () =
       case proof_from_atp_proof pool ctxt full_types tfrees isar_shrink_factor
-                                atp_proof conjecture_shape thm_names params
+                                atp_proof conjecture_shape axiom_names params
                                 frees
            |> redirect_proof conjecture_shape hyp_ts concl_t
            |> kill_duplicate_assumptions_in_proof
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/Sledgehammer/sledgehammer_translate.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -0,0 +1,505 @@
+(*  Title:      HOL/Tools/Sledgehammer/sledgehammer_translate.ML
+    Author:     Fabian Immler, TU Muenchen
+    Author:     Makarius
+    Author:     Jasmin Blanchette, TU Muenchen
+
+Translation of HOL to FOL.
+*)
+
+signature SLEDGEHAMMER_TRANSLATE =
+sig
+  type 'a problem = 'a ATP_Problem.problem
+
+  val axiom_prefix : string
+  val conjecture_prefix : string
+  val helper_prefix : string
+  val class_rel_clause_prefix : string
+  val arity_clause_prefix : string
+  val tfrees_name : string
+  val prepare_problem :
+    Proof.context -> bool -> bool -> bool -> bool -> term list -> term
+    -> (string * thm) list
+    -> string problem * string Symtab.table * int * string Vector.vector
+end;
+
+structure Sledgehammer_Translate : SLEDGEHAMMER_TRANSLATE =
+struct
+
+open ATP_Problem
+open Metis_Clauses
+open Sledgehammer_Util
+
+val axiom_prefix = "ax_"
+val conjecture_prefix = "conj_"
+val helper_prefix = "help_"
+val class_rel_clause_prefix = "clrel_";
+val arity_clause_prefix = "arity_"
+val tfrees_name = "tfrees"
+
+(* Freshness almost guaranteed! *)
+val sledgehammer_weak_prefix = "Sledgehammer:"
+
+datatype fol_formula =
+  FOLFormula of {name: string,
+                 kind: kind,
+                 combformula: (name, combterm) formula,
+                 ctypes_sorts: typ list}
+
+fun mk_anot phi = AConn (ANot, [phi])
+fun mk_aconn c phi1 phi2 = AConn (c, [phi1, phi2])
+fun mk_ahorn [] phi = phi
+  | mk_ahorn (phi :: phis) psi =
+    AConn (AImplies, [fold (mk_aconn AAnd) phis phi, psi])
+
+fun combformula_for_prop thy =
+  let
+    val do_term = combterm_from_term thy
+    fun do_quant bs q s T t' =
+      do_formula ((s, T) :: bs) t'
+      #>> (fn phi => AQuant (q, [`make_bound_var s], phi))
+    and do_conn bs c t1 t2 =
+      do_formula bs t1 ##>> do_formula bs t2
+      #>> (fn (phi1, phi2) => AConn (c, [phi1, phi2]))
+    and do_formula bs t =
+      case t of
+        @{const Not} $ t1 =>
+        do_formula bs t1 #>> (fn phi => AConn (ANot, [phi]))
+      | Const (@{const_name All}, _) $ Abs (s, T, t') =>
+        do_quant bs AForall s T t'
+      | Const (@{const_name Ex}, _) $ Abs (s, T, t') =>
+        do_quant bs AExists s T t'
+      | @{const "op &"} $ t1 $ t2 => do_conn bs AAnd t1 t2
+      | @{const "op |"} $ t1 $ t2 => do_conn bs AOr t1 t2
+      | @{const "op -->"} $ t1 $ t2 => do_conn bs AImplies t1 t2
+      | Const (@{const_name "op ="}, Type (_, [@{typ bool}, _])) $ t1 $ t2 =>
+        do_conn bs AIff t1 t2
+      | _ => (fn ts => do_term bs (Envir.eta_contract t)
+                       |>> AAtom ||> union (op =) ts)
+  in do_formula [] end
+
+(* Converts an elim-rule into an equivalent theorem that does not have the
+   predicate variable. Leaves other theorems unchanged. We simply instantiate
+   the conclusion variable to False. (Cf. "transform_elim_term" in
+   "ATP_Systems".) *)
+fun transform_elim_term t =
+  case Logic.strip_imp_concl t of
+    @{const Trueprop} $ Var (z, @{typ bool}) =>
+    subst_Vars [(z, @{const False})] t
+  | Var (z, @{typ prop}) => subst_Vars [(z, @{prop False})] t
+  | _ => t
+
+fun presimplify_term thy =
+  Skip_Proof.make_thm thy
+  #> Meson.presimplify
+  #> prop_of
+
+fun concealed_bound_name j = sledgehammer_weak_prefix ^ Int.toString j
+fun conceal_bounds Ts t =
+  subst_bounds (map (Free o apfst concealed_bound_name)
+                    (0 upto length Ts - 1 ~~ Ts), t)
+fun reveal_bounds Ts =
+  subst_atomic (map (fn (j, T) => (Free (concealed_bound_name j, T), Bound j))
+                    (0 upto length Ts - 1 ~~ Ts))
+
+fun introduce_combinators_in_term ctxt kind t =
+  let
+    val thy = ProofContext.theory_of ctxt
+    fun aux Ts t =
+      case t of
+        @{const Not} $ t1 => @{const Not} $ aux Ts t1
+      | (t0 as Const (@{const_name All}, _)) $ Abs (s, T, t') =>
+        t0 $ Abs (s, T, aux (T :: Ts) t')
+      | (t0 as Const (@{const_name Ex}, _)) $ Abs (s, T, t') =>
+        t0 $ Abs (s, T, aux (T :: Ts) t')
+      | (t0 as @{const "op &"}) $ t1 $ t2 => t0 $ aux Ts t1 $ aux Ts t2
+      | (t0 as @{const "op |"}) $ t1 $ t2 => t0 $ aux Ts t1 $ aux Ts t2
+      | (t0 as @{const "op -->"}) $ t1 $ t2 => t0 $ aux Ts t1 $ aux Ts t2
+      | (t0 as Const (@{const_name "op ="}, Type (_, [@{typ bool}, _])))
+          $ t1 $ t2 =>
+        t0 $ aux Ts t1 $ aux Ts t2
+      | _ => if not (exists_subterm (fn Abs _ => true | _ => false) t) then
+               t
+             else
+               let
+                 val t = t |> conceal_bounds Ts
+                           |> Envir.eta_contract
+                 val ([t], ctxt') = Variable.import_terms true [t] ctxt
+               in
+                 t |> cterm_of thy
+                   |> Clausifier.introduce_combinators_in_cterm
+                   |> singleton (Variable.export ctxt' ctxt)
+                   |> prop_of |> Logic.dest_equals |> snd
+                   |> reveal_bounds Ts
+               end
+  in t |> not (Meson.is_fol_term thy t) ? aux [] end
+  handle THM _ =>
+         (* A type variable of sort "{}" will make abstraction fail. *)
+         case kind of
+           Axiom => HOLogic.true_const
+         | Conjecture => HOLogic.false_const
+
+(* Metis's use of "resolve_tac" freezes the schematic variables. We simulate the
+   same in Sledgehammer to prevent the discovery of unreplable proofs. *)
+fun freeze_term t =
+  let
+    fun aux (t $ u) = aux t $ aux u
+      | aux (Abs (s, T, t)) = Abs (s, T, aux t)
+      | aux (Var ((s, i), T)) =
+        Free (sledgehammer_weak_prefix ^ s ^ "_" ^ string_of_int i, T)
+      | aux t = t
+  in t |> exists_subterm is_Var t ? aux end
+
+(* making axiom and conjecture formulas *)
+fun make_formula ctxt presimp (name, kind, t) =
+  let
+    val thy = ProofContext.theory_of ctxt
+    val t = t |> transform_elim_term
+              |> Object_Logic.atomize_term thy
+    val t = t |> fastype_of t = HOLogic.boolT ? HOLogic.mk_Trueprop
+              |> extensionalize_term
+              |> presimp ? presimplify_term thy
+              |> perhaps (try (HOLogic.dest_Trueprop))
+              |> introduce_combinators_in_term ctxt kind
+              |> kind = Conjecture ? freeze_term
+    val (combformula, ctypes_sorts) = combformula_for_prop thy t []
+  in
+    FOLFormula {name = name, combformula = combformula, kind = kind,
+                ctypes_sorts = ctypes_sorts}
+  end
+
+fun make_axiom ctxt presimp (name, th) =
+  (name, make_formula ctxt presimp (name, Axiom, prop_of th))
+fun make_conjectures ctxt ts =
+  map2 (fn j => fn t => make_formula ctxt true (Int.toString j, Conjecture, t))
+       (0 upto length ts - 1) ts
+
+(** Helper facts **)
+
+fun count_combterm (CombConst ((s, _), _, _)) =
+    Symtab.map_entry s (Integer.add 1)
+  | count_combterm (CombVar _) = I
+  | count_combterm (CombApp (t1, t2)) = fold count_combterm [t1, t2]
+fun count_combformula (AQuant (_, _, phi)) = count_combformula phi
+  | count_combformula (AConn (_, phis)) = fold count_combformula phis
+  | count_combformula (AAtom tm) = count_combterm tm
+fun count_fol_formula (FOLFormula {combformula, ...}) =
+  count_combformula combformula
+
+val optional_helpers =
+  [(["c_COMBI", "c_COMBK"], @{thms COMBI_def COMBK_def}),
+   (["c_COMBB", "c_COMBC"], @{thms COMBB_def COMBC_def}),
+   (["c_COMBS"], @{thms COMBS_def})]
+val optional_typed_helpers =
+  [(["c_True", "c_False"], @{thms True_or_False}),
+   (["c_If"], @{thms if_True if_False True_or_False})]
+val mandatory_helpers = @{thms fequal_imp_equal equal_imp_fequal}
+
+val init_counters =
+  Symtab.make (maps (maps (map (rpair 0) o fst))
+                    [optional_helpers, optional_typed_helpers])
+
+fun get_helper_facts ctxt is_FO full_types conjectures axioms =
+  let
+    val ct = fold (fold count_fol_formula) [conjectures, axioms] init_counters
+    fun is_needed c = the (Symtab.lookup ct c) > 0
+  in
+    (optional_helpers
+     |> full_types ? append optional_typed_helpers
+     |> maps (fn (ss, ths) =>
+                 if exists is_needed ss then map (`Thm.get_name_hint) ths
+                 else [])) @
+    (if is_FO then [] else map (`Thm.get_name_hint) mandatory_helpers)
+    |> map (snd o make_axiom ctxt false)
+  end
+
+fun meta_not t = @{const "==>"} $ t $ @{prop False}
+
+fun prepare_formulas ctxt full_types hyp_ts concl_t axioms =
+  let
+    val thy = ProofContext.theory_of ctxt
+    val axiom_ts = map (prop_of o snd) axioms
+    val hyp_ts =
+      if null hyp_ts then
+        []
+      else
+        (* Remove existing axioms from the conjecture, as this can dramatically
+           boost an ATP's performance (for some reason). *)
+        let
+          val axiom_table = fold (Termtab.update o rpair ()) axiom_ts
+                                 Termtab.empty
+        in hyp_ts |> filter_out (Termtab.defined axiom_table) end
+    val goal_t = Logic.list_implies (hyp_ts, concl_t)
+    val is_FO = Meson.is_fol_term thy goal_t
+    val subs = tfree_classes_of_terms [goal_t]
+    val supers = tvar_classes_of_terms axiom_ts
+    val tycons = type_consts_of_terms thy (goal_t :: axiom_ts)
+    (* TFrees in the conjecture; TVars in the axioms *)
+    val conjectures = map meta_not hyp_ts @ [concl_t] |> make_conjectures ctxt
+    val (axiom_names, axioms) =
+      ListPair.unzip (map (make_axiom ctxt true) axioms)
+    val helper_facts = get_helper_facts ctxt is_FO full_types conjectures axioms
+    val (supers', arity_clauses) = make_arity_clauses thy tycons supers
+    val class_rel_clauses = make_class_rel_clauses thy subs supers'
+  in
+    (Vector.fromList axiom_names,
+     (conjectures, axioms, helper_facts, class_rel_clauses, arity_clauses))
+  end
+
+fun wrap_type ty t = ATerm ((type_wrapper_name, type_wrapper_name), [ty, t])
+
+fun fo_term_for_combtyp (CombTVar name) = ATerm (name, [])
+  | fo_term_for_combtyp (CombTFree name) = ATerm (name, [])
+  | fo_term_for_combtyp (CombType (name, tys)) =
+    ATerm (name, map fo_term_for_combtyp tys)
+
+fun fo_literal_for_type_literal (TyLitVar (class, name)) =
+    (true, ATerm (class, [ATerm (name, [])]))
+  | fo_literal_for_type_literal (TyLitFree (class, name)) =
+    (true, ATerm (class, [ATerm (name, [])]))
+
+fun formula_for_fo_literal (pos, t) = AAtom t |> not pos ? mk_anot
+
+fun fo_term_for_combterm full_types =
+  let
+    fun aux top_level u =
+      let
+        val (head, args) = strip_combterm_comb u
+        val (x, ty_args) =
+          case head of
+            CombConst (name as (s, s'), _, ty_args) =>
+            if s = "equal" then
+              (if top_level andalso length args = 2 then name
+               else ("c_fequal", @{const_name fequal}), [])
+            else if top_level then
+              case s of
+                "c_False" => (("$false", s'), [])
+              | "c_True" => (("$true", s'), [])
+              | _ => (name, if full_types then [] else ty_args)
+            else
+              (name, if full_types then [] else ty_args)
+          | CombVar (name, _) => (name, [])
+          | CombApp _ => raise Fail "impossible \"CombApp\""
+        val t = ATerm (x, map fo_term_for_combtyp ty_args @
+                          map (aux false) args)
+    in
+      if full_types then wrap_type (fo_term_for_combtyp (combtyp_of u)) t else t
+    end
+  in aux true end
+
+fun formula_for_combformula full_types =
+  let
+    fun aux (AQuant (q, xs, phi)) = AQuant (q, xs, aux phi)
+      | aux (AConn (c, phis)) = AConn (c, map aux phis)
+      | aux (AAtom tm) = AAtom (fo_term_for_combterm full_types tm)
+  in aux end
+
+fun formula_for_axiom full_types (FOLFormula {combformula, ctypes_sorts, ...}) =
+  mk_ahorn (map (formula_for_fo_literal o fo_literal_for_type_literal)
+                (type_literals_for_types ctypes_sorts))
+           (formula_for_combformula full_types combformula)
+
+fun problem_line_for_fact prefix full_types
+                          (formula as FOLFormula {name, kind, ...}) =
+  Fof (prefix ^ ascii_of name, kind, formula_for_axiom full_types formula)
+
+fun problem_line_for_class_rel_clause (ClassRelClause {name, subclass,
+                                                       superclass, ...}) =
+  let val ty_arg = ATerm (("T", "T"), []) in
+    Fof (class_rel_clause_prefix ^ ascii_of name, Axiom,
+         AConn (AImplies, [AAtom (ATerm (subclass, [ty_arg])),
+                           AAtom (ATerm (superclass, [ty_arg]))]))
+  end
+
+fun fo_literal_for_arity_literal (TConsLit (c, t, args)) =
+    (true, ATerm (c, [ATerm (t, map (fn arg => ATerm (arg, [])) args)]))
+  | fo_literal_for_arity_literal (TVarLit (c, sort)) =
+    (false, ATerm (c, [ATerm (sort, [])]))
+
+fun problem_line_for_arity_clause (ArityClause {name, conclLit, premLits,
+                                                ...}) =
+  Fof (arity_clause_prefix ^ ascii_of name, Axiom,
+       mk_ahorn (map (formula_for_fo_literal o apfst not
+                      o fo_literal_for_arity_literal) premLits)
+                (formula_for_fo_literal
+                     (fo_literal_for_arity_literal conclLit)))
+
+fun problem_line_for_conjecture full_types
+                                (FOLFormula {name, kind, combformula, ...}) =
+  Fof (conjecture_prefix ^ name, kind,
+       formula_for_combformula full_types combformula)
+
+fun free_type_literals_for_conjecture (FOLFormula {ctypes_sorts, ...}) =
+  map fo_literal_for_type_literal (type_literals_for_types ctypes_sorts)
+
+fun problem_line_for_free_type lit =
+  Fof (tfrees_name, Conjecture, mk_anot (formula_for_fo_literal lit))
+fun problem_lines_for_free_types conjectures =
+  let
+    val litss = map free_type_literals_for_conjecture conjectures
+    val lits = fold (union (op =)) litss []
+  in map problem_line_for_free_type lits end
+
+(** "hBOOL" and "hAPP" **)
+
+type const_info = {min_arity: int, max_arity: int, sub_level: bool}
+
+fun consider_term top_level (ATerm ((s, _), ts)) =
+  (if is_tptp_variable s then
+     I
+   else
+     let val n = length ts in
+       Symtab.map_default
+           (s, {min_arity = n, max_arity = 0, sub_level = false})
+           (fn {min_arity, max_arity, sub_level} =>
+               {min_arity = Int.min (n, min_arity),
+                max_arity = Int.max (n, max_arity),
+                sub_level = sub_level orelse not top_level})
+     end)
+  #> fold (consider_term (top_level andalso s = type_wrapper_name)) ts
+fun consider_formula (AQuant (_, _, phi)) = consider_formula phi
+  | consider_formula (AConn (_, phis)) = fold consider_formula phis
+  | consider_formula (AAtom tm) = consider_term true tm
+
+fun consider_problem_line (Fof (_, _, phi)) = consider_formula phi
+fun consider_problem problem = fold (fold consider_problem_line o snd) problem
+
+fun const_table_for_problem explicit_apply problem =
+  if explicit_apply then NONE
+  else SOME (Symtab.empty |> consider_problem problem)
+
+fun min_arity_of thy full_types NONE s =
+    (if s = "equal" orelse s = type_wrapper_name orelse
+        String.isPrefix type_const_prefix s orelse
+        String.isPrefix class_prefix s then
+       16383 (* large number *)
+     else if full_types then
+       0
+     else case strip_prefix_and_undo_ascii const_prefix s of
+       SOME s' => num_type_args thy (invert_const s')
+     | NONE => 0)
+  | min_arity_of _ _ (SOME the_const_tab) s =
+    case Symtab.lookup the_const_tab s of
+      SOME ({min_arity, ...} : const_info) => min_arity
+    | NONE => 0
+
+fun full_type_of (ATerm ((s, _), [ty, _])) =
+    if s = type_wrapper_name then ty else raise Fail "expected type wrapper"
+  | full_type_of _ = raise Fail "expected type wrapper"
+
+fun list_hAPP_rev _ t1 [] = t1
+  | list_hAPP_rev NONE t1 (t2 :: ts2) =
+    ATerm (`I "hAPP", [list_hAPP_rev NONE t1 ts2, t2])
+  | list_hAPP_rev (SOME ty) t1 (t2 :: ts2) =
+    let val ty' = ATerm (`make_fixed_type_const @{type_name fun},
+                         [full_type_of t2, ty]) in
+      ATerm (`I "hAPP", [wrap_type ty' (list_hAPP_rev (SOME ty') t1 ts2), t2])
+    end
+
+fun repair_applications_in_term thy full_types const_tab =
+  let
+    fun aux opt_ty (ATerm (name as (s, _), ts)) =
+      if s = type_wrapper_name then
+        case ts of
+          [t1, t2] => ATerm (name, [aux NONE t1, aux (SOME t1) t2])
+        | _ => raise Fail "malformed type wrapper"
+      else
+        let
+          val ts = map (aux NONE) ts
+          val (ts1, ts2) = chop (min_arity_of thy full_types const_tab s) ts
+        in list_hAPP_rev opt_ty (ATerm (name, ts1)) (rev ts2) end
+  in aux NONE end
+
+fun boolify t = ATerm (`I "hBOOL", [t])
+
+(* True if the constant ever appears outside of the top-level position in
+   literals, or if it appears with different arities (e.g., because of different
+   type instantiations). If false, the constant always receives all of its
+   arguments and is used as a predicate. *)
+fun is_predicate NONE s =
+    s = "equal" orelse String.isPrefix type_const_prefix s orelse
+    String.isPrefix class_prefix s
+  | is_predicate (SOME the_const_tab) s =
+    case Symtab.lookup the_const_tab s of
+      SOME {min_arity, max_arity, sub_level} =>
+      not sub_level andalso min_arity = max_arity
+    | NONE => false
+
+fun repair_predicates_in_term const_tab (t as ATerm ((s, _), ts)) =
+  if s = type_wrapper_name then
+    case ts of
+      [_, t' as ATerm ((s', _), _)] =>
+      if is_predicate const_tab s' then t' else boolify t
+    | _ => raise Fail "malformed type wrapper"
+  else
+    t |> not (is_predicate const_tab s) ? boolify
+
+fun close_universally phi =
+  let
+    fun term_vars bounds (ATerm (name as (s, _), tms)) =
+        (is_tptp_variable s andalso not (member (op =) bounds name))
+          ? insert (op =) name
+        #> fold (term_vars bounds) tms
+    fun formula_vars bounds (AQuant (q, xs, phi)) =
+        formula_vars (xs @ bounds) phi
+      | formula_vars bounds (AConn (_, phis)) = fold (formula_vars bounds) phis
+      | formula_vars bounds (AAtom tm) = term_vars bounds tm
+  in
+    case formula_vars [] phi [] of [] => phi | xs => AQuant (AForall, xs, phi)
+  end
+
+fun repair_formula thy explicit_forall full_types const_tab =
+  let
+    fun aux (AQuant (q, xs, phi)) = AQuant (q, xs, aux phi)
+      | aux (AConn (c, phis)) = AConn (c, map aux phis)
+      | aux (AAtom tm) =
+        AAtom (tm |> repair_applications_in_term thy full_types const_tab
+                  |> repair_predicates_in_term const_tab)
+  in aux #> explicit_forall ? close_universally end
+
+fun repair_problem_line thy explicit_forall full_types const_tab
+                        (Fof (ident, kind, phi)) =
+  Fof (ident, kind, repair_formula thy explicit_forall full_types const_tab phi)
+fun repair_problem_with_const_table thy =
+  map o apsnd o map ooo repair_problem_line thy
+
+fun repair_problem thy explicit_forall full_types explicit_apply problem =
+  repair_problem_with_const_table thy explicit_forall full_types
+      (const_table_for_problem explicit_apply problem) problem
+
+fun prepare_problem ctxt readable_names explicit_forall full_types
+                    explicit_apply hyp_ts concl_t axiom_ts =
+  let
+    val thy = ProofContext.theory_of ctxt
+    val (axiom_names, (conjectures, axioms, helper_facts, class_rel_clauses,
+                       arity_clauses)) =
+      prepare_formulas ctxt full_types hyp_ts concl_t axiom_ts
+    val axiom_lines = map (problem_line_for_fact axiom_prefix full_types) axioms
+    val helper_lines =
+      map (problem_line_for_fact helper_prefix full_types) helper_facts
+    val conjecture_lines =
+      map (problem_line_for_conjecture full_types) conjectures
+    val tfree_lines = problem_lines_for_free_types conjectures
+    val class_rel_lines =
+      map problem_line_for_class_rel_clause class_rel_clauses
+    val arity_lines = map problem_line_for_arity_clause arity_clauses
+    (* Reordering these might or might not confuse the proof reconstruction
+       code or the SPASS Flotter hack. *)
+    val problem =
+      [("Relevant facts", axiom_lines),
+       ("Class relationships", class_rel_lines),
+       ("Arity declarations", arity_lines),
+       ("Helper facts", helper_lines),
+       ("Conjectures", conjecture_lines),
+       ("Type variables", tfree_lines)]
+      |> repair_problem thy explicit_forall full_types explicit_apply
+    val (problem, pool) = nice_tptp_problem readable_names problem
+    val conjecture_offset =
+      length axiom_lines + length class_rel_lines + length arity_lines
+      + length helper_lines
+  in
+    (problem,
+     case pool of SOME the_pool => snd the_pool | NONE => Symtab.empty,
+     conjecture_offset, axiom_names)
+  end
+
+end;
--- a/src/Pure/Concurrent/future.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Concurrent/future.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -61,7 +61,7 @@
   val cancel_group: group -> unit
   val cancel: 'a future -> unit
   val shutdown: unit -> unit
-  val report: (unit -> 'a) -> 'a
+  val status: (unit -> 'a) -> 'a
 end;
 
 structure Future: FUTURE =
@@ -532,9 +532,9 @@
   else ();
 
 
-(* report markup *)
+(* status markup *)
 
-fun report e =
+fun status e =
   let
     val _ = Output.status (Markup.markup Markup.forked "");
     val x = e ();  (*sic -- report "joined" only for success*)
--- a/src/Pure/General/markup.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/markup.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -9,7 +9,7 @@
   type T = string * Properties.T
   val none: T
   val is_none: T -> bool
-  val properties: (string * string) list -> T -> T
+  val properties: Properties.T -> T -> T
   val nameN: string
   val name: string -> T -> T
   val bindingN: string val binding: string -> T
@@ -93,7 +93,7 @@
   val commentN: string val comment: T
   val controlN: string val control: T
   val malformedN: string val malformed: T
-  val tokenN: string val token: T
+  val tokenN: string val token: Properties.T -> T
   val command_spanN: string val command_span: string -> T
   val ignored_spanN: string val ignored_span: T
   val malformed_spanN: string val malformed_span: T
@@ -284,7 +284,8 @@
 val (controlN, control) = markup_elem "control";
 val (malformedN, malformed) = markup_elem "malformed";
 
-val (tokenN, token) = markup_elem "token";
+val tokenN = "token";
+fun token props = (tokenN, props);
 
 val (command_spanN, command_span) = markup_string "command_span" nameN;
 val (ignored_spanN, ignored_span) = markup_elem "ignored_span";
--- a/src/Pure/General/markup.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/markup.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -183,7 +183,7 @@
 
   /* interactive documents */
 
-  val ASSIGN = "assign"
+  val Assign = Markup("assign", Nil)
   val EDIT = "edit"
 
 
@@ -196,6 +196,7 @@
 
   val INIT = "init"
   val STATUS = "status"
+  val REPORT = "report"
   val WRITELN = "writeln"
   val TRACING = "tracing"
   val WARNING = "warning"
@@ -207,10 +208,12 @@
   val SIGNAL = "signal"
   val EXIT = "exit"
 
-  val READY = "ready"
+  val Ready = Markup("ready", Nil)
 
 
   /* system data */
 
-  val DATA = "data"
+  val Data = Markup("data", Nil)
 }
+
+sealed case class Markup(name: String, properties: List[(String, String)])
--- a/src/Pure/General/output.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/output.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -39,9 +39,11 @@
   val debug_fn: (output -> unit) Unsynchronized.ref
   val prompt_fn: (output -> unit) Unsynchronized.ref
   val status_fn: (output -> unit) Unsynchronized.ref
+  val report_fn: (output -> unit) Unsynchronized.ref
   val error_msg: string -> unit
   val prompt: string -> unit
   val status: string -> unit
+  val report: string -> unit
   val debugging: bool Unsynchronized.ref
   val no_warnings_CRITICAL: ('a -> 'b) -> 'a -> 'b
   val debug: (unit -> string) -> unit
@@ -98,6 +100,7 @@
 val debug_fn = Unsynchronized.ref (std_output o suffix "\n" o prefix_lines "::: ");
 val prompt_fn = Unsynchronized.ref std_output;
 val status_fn = Unsynchronized.ref (fn _: string => ());
+val report_fn = Unsynchronized.ref (fn _: string => ());
 
 fun writeln s = ! writeln_fn (output s);
 fun priority s = ! priority_fn (output s);
@@ -106,6 +109,7 @@
 fun error_msg s = ! error_fn (output s);
 fun prompt s = ! prompt_fn (output s);
 fun status s = ! status_fn (output s);
+fun report s = ! report_fn (output s);
 
 fun legacy_feature s = warning ("Legacy feature! " ^ s);
 
--- a/src/Pure/General/position.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/position.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -127,7 +127,7 @@
 
 fun report_text markup (pos as Pos (count, _)) txt =
   if invalid_count count then ()
-  else Output.status (Markup.markup (Markup.properties (properties_of pos) markup) txt);
+  else Output.report (Markup.markup (Markup.properties (properties_of pos) markup) txt);
 
 fun report markup pos = report_text markup pos "";
 
--- a/src/Pure/General/pretty.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/pretty.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -17,11 +17,11 @@
   object Block
   {
     def apply(indent: Int, body: List[XML.Tree]): XML.Tree =
-      XML.Elem(Markup.BLOCK, List((Markup.INDENT, indent.toString)), body)
+      XML.Elem(Markup(Markup.BLOCK, List((Markup.INDENT, indent.toString))), body)
 
     def unapply(tree: XML.Tree): Option[(Int, List[XML.Tree])] =
       tree match {
-        case XML.Elem(Markup.BLOCK, List((Markup.INDENT, indent)), body) =>
+        case XML.Elem(Markup(Markup.BLOCK, List((Markup.INDENT, indent))), body) =>
           Markup.parse_int(indent) match {
             case Some(i) => Some((i, body))
             case None => None
@@ -33,12 +33,12 @@
   object Break
   {
     def apply(width: Int): XML.Tree =
-      XML.Elem(Markup.BREAK, List((Markup.WIDTH, width.toString)),
+      XML.Elem(Markup(Markup.BREAK, List((Markup.WIDTH, width.toString))),
         List(XML.Text(Symbol.spaces(width))))
 
     def unapply(tree: XML.Tree): Option[Int] =
       tree match {
-        case XML.Elem(Markup.BREAK, List((Markup.WIDTH, width)), _) => Markup.parse_int(width)
+        case XML.Elem(Markup(Markup.BREAK, List((Markup.WIDTH, width))), _) => Markup.parse_int(width)
         case _ => None
       }
   }
@@ -50,7 +50,7 @@
 
   private def standard_format(tree: XML.Tree): List[XML.Tree] =
     tree match {
-      case XML.Elem(name, atts, body) => List(XML.Elem(name, atts, body.flatMap(standard_format)))
+      case XML.Elem(markup, body) => List(XML.Elem(markup, body.flatMap(standard_format)))
       case XML.Text(text) =>
         Library.separate(FBreak,
           Library.chunks(text).toList.map((s: CharSequence) => XML.Text(s.toString)))
@@ -82,7 +82,7 @@
 
     def content_length(tree: XML.Tree): Double =
       tree match {
-        case XML.Elem(_, _, body) => (0.0 /: body)(_ + content_length(_))
+        case XML.Elem(_, body) => (0.0 /: body)(_ + content_length(_))
         case XML.Text(s) => metric(s)
       }
 
@@ -122,10 +122,10 @@
           else format(ts, blockin, after, text.newline.blanks(blockin.toInt))
         case FBreak :: ts => format(ts, blockin, after, text.newline.blanks(blockin.toInt))
 
-        case XML.Elem(name, atts, body) :: ts =>
+        case XML.Elem(markup, body) :: ts =>
           val btext = format(body, blockin, breakdist(ts, after), text.copy(tx = Nil))
           val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts
-          val btext1 = btext.copy(tx = XML.Elem(name, atts, btext.content) :: text.tx)
+          val btext1 = btext.copy(tx = XML.Elem(markup, btext.content) :: text.tx)
           format(ts1, blockin, after, btext1)
         case XML.Text(s) :: ts => format(ts, blockin, after, text.string(s, metric(s)))
       }
@@ -146,7 +146,7 @@
         case Block(_, body) => body.flatMap(fmt)
         case Break(wd) => List(XML.Text(Symbol.spaces(wd)))
         case FBreak => List(XML.Text(Symbol.space))
-        case XML.Elem(name, atts, body) => List(XML.Elem(name, atts, body.flatMap(fmt)))
+        case XML.Elem(markup, body) => List(XML.Elem(markup, body.flatMap(fmt)))
         case XML.Text(_) => List(tree)
       }
     input.flatMap(standard_format).flatMap(fmt)
--- a/src/Pure/General/xml.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/xml.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -1,14 +1,14 @@
 (*  Title:      Pure/General/xml.ML
     Author:     David Aspinall, Stefan Berghofer and Markus Wenzel
 
-Basic support for XML.
+Simple XML tree values.
 *)
 
 signature XML =
 sig
   type attributes = Properties.T
   datatype tree =
-      Elem of string * attributes * tree list
+      Elem of Markup.T * tree list
     | Text of string
   val add_content: tree -> Buffer.T -> Buffer.T
   val header: string
@@ -32,10 +32,10 @@
 type attributes = Properties.T;
 
 datatype tree =
-    Elem of string * attributes * tree list
+    Elem of Markup.T * tree list
   | Text of string;
 
-fun add_content (Elem (_, _, ts)) = fold add_content ts
+fun add_content (Elem (_, ts)) = fold add_content ts
   | add_content (Text s) = Buffer.add s;
 
 
@@ -84,9 +84,9 @@
 
 fun buffer_of tree =
   let
-    fun traverse (Elem (name, atts, [])) =
+    fun traverse (Elem ((name, atts), [])) =
           Buffer.add "<" #> Buffer.add (elem name atts) #> Buffer.add "/>"
-      | traverse (Elem (name, atts, ts)) =
+      | traverse (Elem ((name, atts), ts)) =
           Buffer.add "<" #> Buffer.add (elem name atts) #> Buffer.add ">" #>
           fold traverse ts #>
           Buffer.add "</" #> Buffer.add name #> Buffer.add ">"
@@ -170,8 +170,7 @@
         (Scan.this_string "/>" >> ignored
          || $$ ">" |-- parse_content --|
             !! (err ("Expected </" ^ s ^ ">"))
-              (Scan.this_string ("</" ^ s) --| blanks --| $$ ">"))) >>
-    (fn ((s, atts), ts) => Elem (s, atts, ts))) xs;
+              (Scan.this_string ("</" ^ s) --| blanks --| $$ ">"))) >> Elem) xs;
 
 val parse_document =
   (Scan.repeat parse_misc -- Scan.option parse_doctype -- Scan.repeat parse_misc)
--- a/src/Pure/General/xml.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/xml.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -24,11 +24,11 @@
       s.toString
     }
   }
-  case class Elem(name: String, attributes: Attributes, body: List[Tree]) extends Tree
+  case class Elem(markup: Markup, body: List[Tree]) extends Tree
   case class Text(content: String) extends Tree
 
-  def elem(name: String, body: List[Tree]) = Elem(name, Nil, body)
-  def elem(name: String) = Elem(name, Nil, Nil)
+  def elem(name: String, body: List[Tree]) = Elem(Markup(name, Nil), body)
+  def elem(name: String) = Elem(Markup(name, Nil), Nil)
 
 
   /* string representation */
@@ -56,9 +56,9 @@
 
   private def append_tree(tree: Tree, s: StringBuilder) {
     tree match {
-      case Elem(name, atts, Nil) =>
+      case Elem(Markup(name, atts), Nil) =>
         s ++= "<"; append_elem(name, atts, s); s ++= "/>"
-      case Elem(name, atts, ts) =>
+      case Elem(Markup(name, atts), ts) =>
         s ++= "<"; append_elem(name, atts, s); s ++= ">"
         for (t <- ts) append_tree(t, s)
         s ++= "</"; s ++= name; s ++= ">"
@@ -72,7 +72,7 @@
   private type State = Option[(String, List[Tree])]
 
   private def get_next(tree: Tree): State = tree match {
-    case Elem(_, _, body) => get_nexts(body)
+    case Elem(_, body) => get_nexts(body)
     case Text(content) => Some(content, Nil)
   }
   private def get_nexts(trees: List[Tree]): State = trees match {
@@ -118,7 +118,7 @@
     def cache_string(x: String): String =
       lookup(x) match {
         case Some(y) => y
-        case None => store(x)
+        case None => store(new String(x.toCharArray))  // trim string value
       }
     def cache_props(x: List[(String, String)]): List[(String, String)] =
       if (x.isEmpty) x
@@ -127,14 +127,23 @@
           case Some(y) => y
           case None => store(x.map(p => (cache_string(p._1), cache_string(p._2))))
         }
+    def cache_markup(x: Markup): Markup =
+      lookup(x) match {
+        case Some(y) => y
+        case None =>
+          x match {
+            case Markup(name, props) =>
+              store(Markup(cache_string(name), cache_props(props)))
+          }
+      }
     def cache_tree(x: XML.Tree): XML.Tree =
       lookup(x) match {
         case Some(y) => y
         case None =>
           x match {
-            case XML.Elem(name, props, body) =>
-              store(XML.Elem(cache_string(name), cache_props(props), cache_trees(body)))
-            case XML.Text(text) => XML.Text(cache_string(text))
+            case XML.Elem(markup, body) =>
+              store(XML.Elem(cache_markup(markup), cache_trees(body)))
+            case XML.Text(text) => store(XML.Text(cache_string(text)))
           }
       }
     def cache_trees(x: List[XML.Tree]): List[XML.Tree] =
@@ -150,7 +159,7 @@
   /* document object model (W3C DOM) */
 
   def get_data(node: org.w3c.dom.Node): Option[XML.Tree] =
-    node.getUserData(Markup.DATA) match {
+    node.getUserData(Markup.Data.name) match {
       case tree: XML.Tree => Some(tree)
       case _ => None
     }
@@ -158,12 +167,12 @@
   def document_node(doc: org.w3c.dom.Document, tree: Tree): org.w3c.dom.Node =
   {
     def DOM(tr: Tree): org.w3c.dom.Node = tr match {
-      case Elem(Markup.DATA, Nil, List(data, t)) =>
+      case Elem(Markup.Data, List(data, t)) =>
         val node = DOM(t)
-        node.setUserData(Markup.DATA, data, null)
+        node.setUserData(Markup.Data.name, data, null)
         node
-      case Elem(name, atts, ts) =>
-        if (name == Markup.DATA)
+      case Elem(Markup(name, atts), ts) =>
+        if (name == Markup.Data.name)
           error("Malformed data element: " + tr.toString)
         val node = doc.createElement(name)
         for ((name, value) <- atts) node.setAttribute(name, value)
--- a/src/Pure/General/yxml.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/yxml.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -64,7 +64,7 @@
     fun attrib (a, x) =
       Buffer.add Y #>
       Buffer.add a #> Buffer.add "=" #> Buffer.add x;
-    fun tree (XML.Elem (name, atts, ts)) =
+    fun tree (XML.Elem ((name, atts), ts)) =
           Buffer.add XY #> Buffer.add name #> fold attrib atts #> Buffer.add X #>
           fold tree ts #>
           Buffer.add XYX
@@ -104,7 +104,7 @@
   | push name atts pending = ((name, atts), []) :: pending;
 
 fun pop ((("", _), _) :: _) = err_unbalanced ""
-  | pop (((name, atts), body) :: pending) = add (XML.Elem (name, atts, rev body)) pending;
+  | pop ((markup, body) :: pending) = add (XML.Elem (markup, rev body)) pending;
 
 
 (* parsing *)
--- a/src/Pure/General/yxml.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/General/yxml.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -55,7 +55,7 @@
     val s = source.toString
     val i = s.indexOf('=')
     if (i <= 0) err_attribute()
-    (s.substring(0, i).intern, s.substring(i + 1))
+    (s.substring(0, i), s.substring(i + 1))
   }
 
 
@@ -64,7 +64,7 @@
     /* stack operations */
 
     def buffer(): ListBuffer[XML.Tree] = new ListBuffer[XML.Tree]
-    var stack: List[((String, XML.Attributes), ListBuffer[XML.Tree])] = List((("", Nil), buffer()))
+    var stack: List[(Markup, ListBuffer[XML.Tree])] = List((Markup("", Nil), buffer()))
 
     def add(x: XML.Tree)
     {
@@ -74,15 +74,16 @@
     def push(name: String, atts: XML.Attributes)
     {
       if (name == "") err_element()
-      else stack = ((name, atts), buffer()) :: stack
+      else stack = (Markup(name, atts), buffer()) :: stack
     }
 
     def pop()
     {
       (stack: @unchecked) match {
-        case ((("", _), _) :: _) => err_unbalanced("")
-        case (((name, atts), body) :: pending) =>
-          stack = pending; add(XML.Elem(name, atts, body.toList))
+        case ((Markup("", _), _) :: _) => err_unbalanced("")
+        case ((markup, body) :: pending) =>
+          stack = pending
+          add(XML.Elem(markup, body.toList))
       }
     }
 
@@ -94,14 +95,14 @@
       else {
         Library.chunks(chunk, Y).toList match {
           case ch :: name :: atts if ch.length == 0 =>
-            push(name.toString.intern, atts.map(parse_attrib))
+            push(name.toString, atts.map(parse_attrib))
           case txts => for (txt <- txts) add(XML.Text(txt.toString))
         }
       }
     }
     stack match {
-      case List((("", _), body)) => body.toList
-      case ((name, _), _) :: _ => err_unbalanced(name)
+      case List((Markup("", _), body)) => body.toList
+      case (Markup(name, _), _) :: _ => err_unbalanced(name)
     }
   }
 
@@ -116,7 +117,7 @@
   /* failsafe parsing */
 
   private def markup_failsafe(source: CharSequence) =
-    XML.Elem (Markup.MALFORMED, Nil,
+    XML.elem (Markup.MALFORMED,
       List(XML.Text(source.toString.replace(X_string, "\\<^X>").replace(Y_string, "\\<^Y>"))))
 
   def parse_body_failsafe(source: CharSequence): List[XML.Tree] =
--- a/src/Pure/Isar/isar_document.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/isar_document.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -233,7 +233,7 @@
     val _ = define_state state_id' state';
   in (state_id', (id, state_id') :: updates) end;
 
-fun report_updates updates =
+fun updates_status updates =
   implode (map (fn (id, state_id) => Markup.markup (Markup.edit id state_id) "") updates)
   |> Markup.markup Markup.assign
   |> Output.status;
@@ -262,7 +262,7 @@
           |-> fold_map (fn name => Graph.map_node_yield name (update_node name));
 
       val _ = define_document new_id new_document';
-      val _ = report_updates (flat updatess);
+      val _ = updates_status (flat updatess);
       val _ = execute new_document';
     in () end);
 
--- a/src/Pure/Isar/isar_document.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/isar_document.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -9,12 +9,12 @@
 
 object Isar_Document
 {
-  /* reports */
+  /* protocol messages */
 
   object Assign {
     def unapply(msg: XML.Tree): Option[List[XML.Tree]] =
       msg match {
-        case XML.Elem(Markup.ASSIGN, Nil, edits) => Some(edits)
+        case XML.Elem(Markup.Assign, edits) => Some(edits)
         case _ => None
       }
   }
@@ -22,7 +22,7 @@
   object Edit {
     def unapply(msg: XML.Tree): Option[(Document.Command_ID, Document.State_ID)] =
       msg match {
-        case XML.Elem(Markup.EDIT, List((Markup.ID, cmd_id), (Markup.STATE, state_id)), Nil) =>
+        case XML.Elem(Markup(Markup.EDIT, List((Markup.ID, cmd_id), (Markup.STATE, state_id))), Nil) =>
           Some(cmd_id, state_id)
         case _ => None
       }
--- a/src/Pure/Isar/keyword.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/keyword.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -44,8 +44,8 @@
   val dest_commands: unit -> string list
   val command_keyword: string -> T option
   val command_tags: string -> string list
-  val keyword_status_reportN: string
-  val report: unit -> unit
+  val keyword_statusN: string
+  val status: unit -> unit
   val keyword: string -> unit
   val command: string -> T -> unit
   val is_diag: string -> bool
@@ -146,27 +146,27 @@
 fun command_tags name = these (Option.map tags_of (command_keyword name));
 
 
-(* reports *)
+(* status *)
 
-val keyword_status_reportN = "keyword_status_report";
+val keyword_statusN = "keyword_status";
 
-fun report_message s =
-  (if print_mode_active keyword_status_reportN then Output.status else writeln) s;
+fun status_message s =
+  (if print_mode_active keyword_statusN then Output.status else writeln) s;
 
-fun report_keyword name =
-  report_message (Markup.markup (Markup.keyword_decl name)
+fun keyword_status name =
+  status_message (Markup.markup (Markup.keyword_decl name)
     ("Outer syntax keyword: " ^ quote name));
 
-fun report_command (name, kind) =
-  report_message (Markup.markup (Markup.command_decl name (kind_of kind))
+fun command_status (name, kind) =
+  status_message (Markup.markup (Markup.command_decl name (kind_of kind))
     ("Outer syntax command: " ^ quote name ^ " (" ^ kind_of kind ^ ")"));
 
-fun report () =
+fun status () =
   let val (keywords, commands) = CRITICAL (fn () =>
     (dest_keywords (), sort_wrt #1 (Symtab.dest (get_commands ()))))
   in
-    List.app report_keyword keywords;
-    List.app report_command commands
+    List.app keyword_status keywords;
+    List.app command_status commands
   end;
 
 
@@ -174,12 +174,12 @@
 
 fun keyword name =
  (change_lexicons (apfst (Scan.extend_lexicon (Symbol.explode name)));
-  report_keyword name);
+  keyword_status name);
 
 fun command name kind =
  (change_lexicons (apsnd (Scan.extend_lexicon (Symbol.explode name)));
   change_commands (Symtab.update (name, kind));
-  report_command (name, kind));
+  command_status (name, kind));
 
 
 (* command categories *)
--- a/src/Pure/Isar/keyword.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/keyword.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -50,12 +50,12 @@
   val improper = Set(THY_SCRIPT, PRF_SCRIPT)
 
 
-  /* reports */
+  /* protocol messages */
 
   object Keyword_Decl {
     def unapply(msg: XML.Tree): Option[String] =
       msg match {
-        case XML.Elem(Markup.KEYWORD_DECL, List((Markup.NAME, name)), _) => Some(name)
+        case XML.Elem(Markup(Markup.KEYWORD_DECL, List((Markup.NAME, name))), _) => Some(name)
         case _ => None
       }
   }
@@ -63,7 +63,7 @@
   object Command_Decl {
     def unapply(msg: XML.Tree): Option[(String, String)] =
       msg match {
-        case XML.Elem(Markup.COMMAND_DECL, List((Markup.NAME, name), (Markup.KIND, kind)), _) =>
+        case XML.Elem(Markup(Markup.COMMAND_DECL, List((Markup.NAME, name), (Markup.KIND, kind))), _) =>
           Some((name, kind))
         case _ => None
       }
--- a/src/Pure/Isar/outer_syntax.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/outer_syntax.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -277,7 +277,7 @@
 
     val toks = Source.exhausted (Thy_Syntax.token_source lexs pos (Source.of_string text));
     val spans = Source.exhaust (Thy_Syntax.span_source toks);
-    val _ = List.app Thy_Syntax.report_span spans;
+    val _ = List.app Thy_Syntax.report_span spans;  (* FIXME ?? *)
     val units = Source.exhaust (Thy_Syntax.unit_source (Source.of_list spans))
       |> Par_List.map (prepare_unit commands init) |> flat;
 
--- a/src/Pure/Isar/proof.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/proof.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -1074,8 +1074,7 @@
 local
 
 fun future_terminal_proof proof1 proof2 meths int state =
-  if not (Goal.local_future_enforced ()) andalso
-    int orelse is_relevant state orelse not (Goal.local_future_enabled ())
+  if int orelse is_relevant state orelse not (Goal.local_future_enabled ())
   then proof1 meths state
   else snd (proof2 (fn state' => Goal.fork (fn () => ((), proof1 meths state'))) state);
 
--- a/src/Pure/Isar/proof_context.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/proof_context.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -334,7 +334,7 @@
     val (syms, pos) = Syntax.read_token text;
     val c = Type.cert_class tsig (Type.intern_class tsig (Symbol_Pos.content syms))
       handle TYPE (msg, _, _) => error (msg ^ Position.str_of pos);
-    val _ = Position.report (Markup.tclass c) pos;
+    val _ = Context_Position.report ctxt (Markup.tclass c) pos;
   in c end;
 
 
@@ -469,7 +469,7 @@
     val _ =
       if strict then ignore (Consts.the_type consts d) handle TYPE (msg, _, _) => err msg
       else ();
-    val _ = Position.report (Markup.const d) pos;
+    val _ = Context_Position.report ctxt (Markup.const d) pos;
   in t end;
 
 in
@@ -480,13 +480,13 @@
     val (c, pos) = token_content text;
   in
     if Syntax.is_tid c then
-     (Position.report Markup.tfree pos;
+     (Context_Position.report ctxt Markup.tfree pos;
       TFree (c, default_sort ctxt (c, ~1)))
     else
       let
         val d = Type.intern_type tsig c;
         val decl = Type.the_decl tsig d;
-        val _ = Position.report (Markup.tycon d) pos;
+        val _ = Context_Position.report ctxt (Markup.tycon d) pos;
         fun err () = error ("Bad type name: " ^ quote d);
         val args =
           (case decl of
@@ -511,7 +511,7 @@
   in
     (case (lookup_skolem ctxt c, Variable.is_const ctxt c) of
       (SOME x, false) =>
-        (Position.report (Markup.name x
+        (Context_Position.report ctxt (Markup.name x
             (if can Name.dest_skolem x then Markup.skolem else Markup.free)) pos;
           Free (x, infer_type ctxt (x, ty)))
     | _ => prep_const_proper ctxt strict (c, pos))
@@ -738,14 +738,14 @@
 
 fun parse_sort ctxt text =
   let
-    val (syms, pos) = Syntax.parse_token Markup.sort text;
+    val (syms, pos) = Syntax.parse_token ctxt Markup.sort text;
     val S = Syntax.standard_parse_sort ctxt (syn_of ctxt) (syms, pos)
       handle ERROR msg => cat_error msg ("Failed to parse sort" ^ Position.str_of pos)
   in Type.minimize_sort (tsig_of ctxt) S end;
 
 fun parse_typ ctxt text =
   let
-    val (syms, pos) = Syntax.parse_token Markup.typ text;
+    val (syms, pos) = Syntax.parse_token ctxt Markup.typ text;
     val T = Syntax.standard_parse_typ ctxt (syn_of ctxt) (get_sort ctxt) (syms, pos)
       handle ERROR msg => cat_error msg ("Failed to parse type" ^ Position.str_of pos);
   in T end;
@@ -756,7 +756,7 @@
 
     val (T', _) = Type_Infer.paramify_dummies T 0;
     val (markup, kind) = if T' = propT then (Markup.prop, "proposition") else (Markup.term, "term");
-    val (syms, pos) = Syntax.parse_token markup text;
+    val (syms, pos) = Syntax.parse_token ctxt markup text;
 
     fun check t = (Syntax.check_term ctxt (Type_Infer.constrain T' t); NONE)
       handle ERROR msg => SOME msg;
@@ -977,7 +977,7 @@
           if name = "" then [Thm.transfer thy Drule.dummy_thm]
           else
             (case Facts.lookup (Context.Proof ctxt) local_facts name of
-              SOME (_, ths) => (Position.report (Markup.local_fact name) pos;
+              SOME (_, ths) => (Context_Position.report ctxt (Markup.local_fact name) pos;
                 map (Thm.transfer thy) (Facts.select thmref ths))
             | NONE => PureThy.get_fact (Context.Proof ctxt) thy xthmref);
       in pick name thms end;
@@ -1007,7 +1007,7 @@
   let
     val pos = Binding.pos_of b;
     val name = full_name ctxt b;
-    val _ = Context_Position.report_visible ctxt (Markup.local_fact_decl name) pos;
+    val _ = Context_Position.report ctxt (Markup.local_fact_decl name) pos;
 
     val facts = PureThy.name_thmss false name raw_facts;
     fun app (th, attrs) x =
@@ -1186,7 +1186,7 @@
       |> fold_map declare_var (map2 (fn x' => fn (_, T, mx) => (x', T, mx)) xs' vars)
       |-> (map_syntax o Local_Syntax.add_syntax (theory_of ctxt) o map (pair Local_Syntax.Fixed));
     val _ = (vars ~~ xs') |> List.app (fn ((b, _, _), x') =>
-      Context_Position.report_visible ctxt (Markup.fixed_decl x') (Binding.pos_of b));
+      Context_Position.report ctxt (Markup.fixed_decl x') (Binding.pos_of b));
   in (xs', ctxt'') end;
 
 
--- a/src/Pure/Isar/token.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/token.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -181,7 +181,7 @@
 (* token content *)
 
 fun source_of (Token ((source, (pos, _)), _, _)) =
-  YXML.string_of (XML.Elem (Markup.tokenN, Position.properties_of pos, [XML.Text source]));
+  YXML.string_of (XML.Elem (Markup.token (Position.properties_of pos), [XML.Text source]));
 
 fun source_position_of (Token ((source, (pos, _)), _, _)) = (source, pos);
 
--- a/src/Pure/Isar/toplevel.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Isar/toplevel.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -571,7 +571,7 @@
   if print then
     ignore
       (Future.fork (fn () =>
-          setmp_thread_position tr (fn () => Future.report (fn () => print_state false st)) ()))
+          setmp_thread_position tr (fn () => Future.status (fn () => print_state false st)) ()))
   else ();
 
 fun error_msg tr exn_info =
--- a/src/Pure/ML/ml_compiler_polyml-5.3.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ML/ml_compiler_polyml-5.3.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -22,7 +22,7 @@
       endLine = end_line, endPosition = end_offset} = loc;
     val loc_props =
       (case YXML.parse text of
-        XML.Elem (e, atts, _) => if e = Markup.positionN then atts else []
+        XML.Elem ((e, atts), _) => if e = Markup.positionN then atts else []
       | XML.Text s => Position.file_name s);
   in
     Position.value Markup.lineN line @
--- a/src/Pure/ML/ml_context.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ML/ml_context.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -105,7 +105,7 @@
   let val ((name, _), pos) = Args.dest_src src in
     (case Symtab.lookup (! global_parsers) name of
       NONE => error ("Unknown ML antiquotation command: " ^ quote name ^ Position.str_of pos)
-    | SOME scan => (Position.report (Markup.ML_antiq name) pos;
+    | SOME scan => (Context_Position.report ctxt (Markup.ML_antiq name) pos;
         Args.context_syntax "ML antiquotation" (scan pos) src ctxt))
   end;
 
--- a/src/Pure/PIDE/change.scala	Mon Aug 09 14:47:28 2010 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,84 +0,0 @@
-/*  Title:      Pure/PIDE/change.scala
-    Author:     Fabian Immler, TU Munich
-    Author:     Makarius
-
-Changes of plain text, resulting in document edits.
-*/
-
-package isabelle
-
-
-object Change
-{
-  val init = new Change(Document.NO_ID, None, Nil, Future.value(Nil, Document.init))
-
-  abstract class Snapshot
-  {
-    val document: Document
-    val node: Document.Node
-    val is_outdated: Boolean
-    def convert(offset: Int): Int
-    def revert(offset: Int): Int
-  }
-}
-
-class Change(
-  val id: Document.Version_ID,
-  val parent: Option[Change],
-  val edits: List[Document.Node_Text_Edit],
-  val result: Future[(List[Document.Edit[Command]], Document)])
-{
-  /* ancestor versions */
-
-  def ancestors: Iterator[Change] = new Iterator[Change]
-  {
-    private var state: Option[Change] = Some(Change.this)
-    def hasNext = state.isDefined
-    def next =
-      state match {
-        case Some(change) => state = change.parent; change
-        case None => throw new NoSuchElementException("next on empty iterator")
-      }
-  }
-
-
-  /* editing and state assignment */
-
-  def edit(session: Session, edits: List[Document.Node_Text_Edit]): Change =
-  {
-    val new_id = session.create_id()
-    val result: Future[(List[Document.Edit[Command]], Document)] =
-      Future.fork {
-        val old_doc = join_document
-        old_doc.await_assignment
-        Document.text_edits(session, old_doc, new_id, edits)
-      }
-    new Change(new_id, Some(this), edits, result)
-  }
-
-  def join_document: Document = result.join._2
-  def is_assigned: Boolean = result.is_finished && join_document.assignment.is_finished
-
-
-  /* snapshot */
-
-  def snapshot(name: String, extra_edits: List[Text_Edit]): Change.Snapshot =
-  {
-    val latest = this
-    val stable = latest.ancestors.find(_.is_assigned)
-    require(stable.isDefined)
-
-    val edits =
-      (extra_edits /: latest.ancestors.takeWhile(_ != stable.get))((edits, change) =>
-          (for ((a, eds) <- change.edits if a == name) yield eds).flatten ::: edits)
-    lazy val reverse_edits = edits.reverse
-
-    new Change.Snapshot {
-      val document = stable.get.join_document
-      val node = document.nodes(name)
-      val is_outdated = !(extra_edits.isEmpty && latest == stable.get)
-      def convert(offset: Int): Int = (offset /: edits)((i, edit) => edit.convert(i))
-      def revert(offset: Int): Int = (offset /: reverse_edits)((i, edit) => edit.revert(i))
-    }
-  }
-}
\ No newline at end of file
--- a/src/Pure/PIDE/command.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/PIDE/command.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -37,7 +37,7 @@
 class Command(
     val id: Document.Command_ID,
     val span: Thy_Syntax.Span,
-    val static_parent: Option[Command] = None)
+    val static_parent: Option[Command] = None)  // FIXME !?
   extends Session.Entity
 {
   /* classification */
--- a/src/Pure/PIDE/document.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/PIDE/document.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -14,33 +14,31 @@
 
 object Document
 {
-  /* unique identifiers */
+  /* formal identifiers */
 
+  type Version_ID = String
+  type Command_ID = String
   type State_ID = String
-  type Command_ID = String
-  type Version_ID = String
 
   val NO_ID = ""
 
 
-  /* command start positions */
 
-  def command_starts(commands: Iterator[Command], offset: Int = 0): Iterator[(Command, Int)] =
-  {
-    var i = offset
-    for (command <- commands) yield {
-      val start = i
-      i += command.length
-      (command, start)
-    }
-  }
-
-
-  /* named document nodes */
+  /** named document nodes **/
 
   object Node
   {
     val empty: Node = new Node(Linear_Set())
+
+    def command_starts(commands: Iterator[Command], offset: Int = 0): Iterator[(Command, Int)] =
+    {
+      var i = offset
+      for (command <- commands) yield {
+        val start = i
+        i += command.length
+        (command, start)
+      }
+    }
   }
 
   class Node(val commands: Linear_Set[Command])
@@ -48,7 +46,7 @@
     /* command ranges */
 
     def command_starts: Iterator[(Command, Int)] =
-      Document.command_starts(commands.iterator)
+      Node.command_starts(commands.iterator)
 
     def command_start(cmd: Command): Option[Int] =
       command_starts.find(_._1 == cmd).map(_._2)
@@ -85,7 +83,7 @@
 
 
 
-  /** editing **/
+  /** changes of plain text, eventually resulting in document edits **/
 
   type Node_Text_Edit = (String, List[Text_Edit])  // FIXME None: remove
 
@@ -93,6 +91,65 @@
    (String,  // node name
     Option[List[(Option[C], Option[C])]])  // None: remove, Some: insert/remove commands
 
+  abstract class Snapshot
+  {
+    val document: Document
+    val node: Document.Node
+    val is_outdated: Boolean
+    def convert(offset: Int): Int
+    def revert(offset: Int): Int
+  }
+
+  object Change
+  {
+    val init = new Change(NO_ID, None, Nil, Future.value(Nil, Document.init))
+  }
+
+  class Change(
+    val id: Version_ID,
+    val parent: Option[Change],
+    val edits: List[Node_Text_Edit],
+    val result: Future[(List[Edit[Command]], Document)])
+  {
+    def ancestors: Iterator[Change] = new Iterator[Change]
+    {
+      private var state: Option[Change] = Some(Change.this)
+      def hasNext = state.isDefined
+      def next =
+        state match {
+          case Some(change) => state = change.parent; change
+          case None => throw new NoSuchElementException("next on empty iterator")
+        }
+    }
+
+    def join_document: Document = result.join._2
+    def is_assigned: Boolean = result.is_finished && join_document.assignment.is_finished
+
+    def snapshot(name: String, pending_edits: List[Text_Edit]): Snapshot =
+    {
+      val latest = Change.this
+      val stable = latest.ancestors.find(_.is_assigned)
+      require(stable.isDefined)
+
+      val edits =
+        (pending_edits /: latest.ancestors.takeWhile(_ != stable.get))((edits, change) =>
+            (for ((a, eds) <- change.edits if a == name) yield eds).flatten ::: edits)
+      lazy val reverse_edits = edits.reverse
+
+      new Snapshot {
+        val document = stable.get.join_document
+        val node = document.nodes(name)
+        val is_outdated = !(pending_edits.isEmpty && latest == stable.get)
+        def convert(offset: Int): Int = (offset /: edits)((i, edit) => edit.convert(i))
+        def revert(offset: Int): Int = (offset /: reverse_edits)((i, edit) => edit.revert(i))
+      }
+    }
+  }
+
+
+
+  /** editing **/
+
   def text_edits(session: Session, old_doc: Document, new_id: Version_ID,
       edits: List[Node_Text_Edit]): (List[Edit[Command]], Document) =
   {
@@ -102,9 +159,9 @@
     /* unparsed dummy commands */
 
     def unparsed(source: String) =
-      new Command(null, List(Token(Token.Kind.UNPARSED, source)))
+      new Command(NO_ID, List(Token(Token.Kind.UNPARSED, source)))
 
-    def is_unparsed(command: Command) = command.id == null
+    def is_unparsed(command: Command) = command.id == NO_ID
 
 
     /* phase 1: edit individual command source */
@@ -114,7 +171,7 @@
     {
       eds match {
         case e :: es =>
-          command_starts(commands.iterator).find {
+          Node.command_starts(commands.iterator).find {
             case (cmd, cmd_start) =>
               e.can_edit(cmd.source, cmd_start) ||
                 e.is_insert && e.start == cmd_start + cmd.length
@@ -182,7 +239,7 @@
       for ((name, text_edits) <- edits) {
         val commands0 = nodes(name).commands
         val commands1 = edit_text(text_edits, commands0)
-        val commands2 = parse_spans(commands1)
+        val commands2 = parse_spans(commands1)   // FIXME somewhat slow
 
         val removed_commands = commands0.iterator.filter(!commands2.contains(_)).toList
         val inserted_commands = commands2.iterator.filter(!commands0.contains(_)).toList
--- a/src/Pure/PIDE/state.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/PIDE/state.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -2,7 +2,7 @@
     Author:     Fabian Immler, TU Munich
     Author:     Makarius
 
-Accumulating results from prover.
+Accumulated results from prover.
 */
 
 package isabelle
@@ -76,15 +76,21 @@
 
   def accumulate(message: XML.Tree): State =
     message match {
-      case XML.Elem(Markup.STATUS, _, elems) =>
+      case XML.Elem(Markup(Markup.STATUS, _), elems) =>
         (this /: elems)((state, elem) =>
           elem match {
-            case XML.Elem(Markup.UNPROCESSED, _, _) => state.set_status(Command.Status.UNPROCESSED)
-            case XML.Elem(Markup.FINISHED, _, _) => state.set_status(Command.Status.FINISHED)
-            case XML.Elem(Markup.FAILED, _, _) => state.set_status(Command.Status.FAILED)
-            case XML.Elem(Markup.FORKED, _, _) => state.add_forks(1)
-            case XML.Elem(Markup.JOINED, _, _) => state.add_forks(-1)
-            case XML.Elem(kind, atts, body) if Position.get_id(atts) == Some(command.id) =>
+            case XML.Elem(Markup(Markup.UNPROCESSED, _), _) => state.set_status(Command.Status.UNPROCESSED)
+            case XML.Elem(Markup(Markup.FINISHED, _), _) => state.set_status(Command.Status.FINISHED)
+            case XML.Elem(Markup(Markup.FAILED, _), _) => state.set_status(Command.Status.FAILED)
+            case XML.Elem(Markup(Markup.FORKED, _), _) => state.add_forks(1)
+            case XML.Elem(Markup(Markup.JOINED, _), _) => state.add_forks(-1)
+            case _ => System.err.println("Ignored status message: " + elem); state
+          })
+
+      case XML.Elem(Markup(Markup.REPORT, _), elems) =>
+        (this /: elems)((state, elem) =>
+          elem match {
+            case XML.Elem(Markup(kind, atts), body) if Position.get_id(atts) == Some(command.id) =>
               atts match {
                 case Position.Range(begin, end) =>
                   if (kind == Markup.ML_TYPING) {
@@ -94,7 +100,7 @@
                   }
                   else if (kind == Markup.ML_REF) {
                     body match {
-                      case List(XML.Elem(Markup.ML_DEF, atts, _)) =>
+                      case List(XML.Elem(Markup(Markup.ML_DEF, atts), _)) =>
                         state.add_markup(command.markup_node(
                           begin - 1, end - 1,
                           Command.RefInfo(
@@ -112,9 +118,7 @@
                   }
                 case _ => state
               }
-            case _ =>
-              System.err.println("Ignored status report: " + elem)
-              state
+            case _ => System.err.println("Ignored report message: " + elem); state
           })
       case _ => add_result(message)
     }
--- a/src/Pure/ProofGeneral/pgip_input.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/pgip_input.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -62,7 +62,7 @@
     (* unofficial escape command for debugging *)
     | Quitpgip       of { }
 
-    val input: string * XML.attributes * XML.tree list -> pgipinput option  (* raises PGIP *)
+    val input: Markup.T * XML.tree list -> pgipinput option  (* raises PGIP *)
 end
 
 structure PgipInput : PGIPINPUT = 
@@ -161,7 +161,7 @@
    Raise PGIP exception for invalid data.
    Return NONE for unknown/unhandled commands. 
 *)
-fun input (elem, attrs, data) =
+fun input ((elem, attrs), data) =
 SOME 
  (case elem of 
      "askpgip"        => Askpgip { }
--- a/src/Pure/ProofGeneral/pgip_markup.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/pgip_markup.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -75,81 +75,81 @@
        case docelt of
 
            Openblock vs  =>
-           XML.Elem("openblock", opt_attr "name" (#metavarid vs) @
-                                 opt_attr_map PgipTypes.name_of_objtype "objtype" (#objtype vs) @
-                                 opt_attr "metavarid" (#metavarid vs),
+           XML.Elem(("openblock", opt_attr "name" (#metavarid vs) @
+                                  opt_attr_map PgipTypes.name_of_objtype "objtype" (#objtype vs) @
+                                  opt_attr "metavarid" (#metavarid vs)),
                     [])
 
          | Closeblock _ =>
-           XML.Elem("closeblock", [], [])
+           XML.Elem(("closeblock", []), [])
 
          | Opentheory vs  =>
-           XML.Elem("opentheory",
+           XML.Elem(("opentheory",
                     opt_attr "thyname" (#thyname vs) @
                     opt_attr "parentnames"
                              (case (#parentnames vs)
                                of [] => NONE
-                                | ps => SOME (space_implode ";" ps)),
+                                | ps => SOME (space_implode ";" ps))),
                     [XML.Text (#text vs)])
 
          | Theoryitem vs =>
-           XML.Elem("theoryitem",
+           XML.Elem(("theoryitem",
                     opt_attr "name" (#name vs) @
-                    opt_attr_map PgipTypes.name_of_objtype "objtype" (#objtype vs),
+                    opt_attr_map PgipTypes.name_of_objtype "objtype" (#objtype vs)),
                     [XML.Text (#text vs)])
 
          | Closetheory vs =>
-           XML.Elem("closetheory", [], [XML.Text (#text vs)])
+           XML.Elem(("closetheory", []), [XML.Text (#text vs)])
 
          | Opengoal vs =>
-           XML.Elem("opengoal",
-                    opt_attr "thmname" (#thmname vs),
+           XML.Elem(("opengoal",
+                    opt_attr "thmname" (#thmname vs)),
                     [XML.Text (#text vs)])
 
          | Proofstep vs =>
-           XML.Elem("proofstep", [], [XML.Text (#text vs)])
+           XML.Elem(("proofstep", []), [XML.Text (#text vs)])
 
          | Closegoal vs =>
-           XML.Elem("closegoal", [], [XML.Text (#text vs)])
+           XML.Elem(("closegoal", []), [XML.Text (#text vs)])
 
          | Giveupgoal vs =>
-           XML.Elem("giveupgoal", [], [XML.Text (#text vs)])
+           XML.Elem(("giveupgoal", []), [XML.Text (#text vs)])
 
          | Postponegoal vs =>
-           XML.Elem("postponegoal", [], [XML.Text (#text vs)])
+           XML.Elem(("postponegoal", []), [XML.Text (#text vs)])
 
          | Comment vs =>
-           XML.Elem("comment", [], [XML.Text (#text vs)])
+           XML.Elem(("comment", []), [XML.Text (#text vs)])
 
          | Whitespace vs =>
-           XML.Elem("whitespace", [], [XML.Text (#text vs)])
+           XML.Elem(("whitespace", []), [XML.Text (#text vs)])
 
          | Doccomment vs =>
-           XML.Elem("doccomment", [], [XML.Text (#text vs)])
+           XML.Elem(("doccomment", []), [XML.Text (#text vs)])
 
          | Spuriouscmd vs =>
-           XML.Elem("spuriouscmd", [], [XML.Text (#text vs)])
+           XML.Elem(("spuriouscmd", []), [XML.Text (#text vs)])
 
          | Badcmd vs =>
-           XML.Elem("badcmd", [], [XML.Text (#text vs)])
+           XML.Elem(("badcmd", []), [XML.Text (#text vs)])
 
          | Unparseable vs =>
-           XML.Elem("unparseable", [], [XML.Text (#text vs)])
+           XML.Elem(("unparseable", []), [XML.Text (#text vs)])
 
          | Metainfo vs =>
-           XML.Elem("metainfo", opt_attr "name" (#name vs),
+           XML.Elem(("metainfo", opt_attr "name" (#name vs)),
                     [XML.Text (#text vs)])
 
          | Litcomment vs =>
-           XML.Elem("litcomment", opt_attr "format" (#format vs),
+           XML.Elem(("litcomment", opt_attr "format" (#format vs)),
                    #content vs)
 
          | Showcode vs =>
-           XML.Elem("showcode",
-                    attr "show" (PgipTypes.bool_to_pgstring (#show vs)), [])
+           XML.Elem(("showcode",
+                    attr "show" (PgipTypes.bool_to_pgstring (#show vs))), [])
 
          | Setformat vs =>
-           XML.Elem("setformat", attr "format" (#format vs), [])
+           XML.Elem(("setformat", attr "format" (#format vs)), [])
 
    val output_doc = map xml_of
 
--- a/src/Pure/ProofGeneral/pgip_output.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/pgip_output.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -117,9 +117,7 @@
     let 
         val content = #content vs
     in
-        XML.Elem ("normalresponse", 
-                  [],
-                  content)
+        XML.Elem (("normalresponse", []), content)
     end
 
 fun errorresponse (Errorresponse vs) =
@@ -128,9 +126,9 @@
         val location = #location vs
         val content = #content vs
     in
-        XML.Elem ("errorresponse",
+        XML.Elem (("errorresponse",
                  attrs_of_fatality fatality @
-                 these (Option.map attrs_of_location location),
+                 these (Option.map attrs_of_location location)),
                  content)
     end
 
@@ -139,9 +137,9 @@
         val url = #url vs
         val completed = #completed vs
     in
-        XML.Elem ("informfileloaded", 
+        XML.Elem (("informfileloaded", 
                   attrs_of_pgipurl url @ 
-                  (attr "completed" (PgipTypes.bool_to_pgstring completed)),
+                  (attr "completed" (PgipTypes.bool_to_pgstring completed))),
                   [])
     end
 
@@ -150,9 +148,9 @@
         val url = #url vs
         val completed = #completed vs
     in
-        XML.Elem ("informfileoutdated", 
+        XML.Elem (("informfileoutdated", 
                   attrs_of_pgipurl url @ 
-                  (attr "completed" (PgipTypes.bool_to_pgstring completed)),
+                  (attr "completed" (PgipTypes.bool_to_pgstring completed))),
                   [])
     end
 
@@ -161,9 +159,9 @@
         val url = #url vs
         val completed = #completed vs
     in
-        XML.Elem ("informfileretracted", 
+        XML.Elem (("informfileretracted", 
                   attrs_of_pgipurl url @ 
-                  (attr "completed" (PgipTypes.bool_to_pgstring completed)),
+                  (attr "completed" (PgipTypes.bool_to_pgstring completed))),
                   [])
     end
 
@@ -172,14 +170,14 @@
         val attrs = #attrs vs
         val content = #content vs
     in
-        XML.Elem ("metainforesponse", attrs, content)
+        XML.Elem (("metainforesponse", attrs), content)
     end
 
 fun lexicalstructure (Lexicalstructure vs) =
     let
         val content = #content vs
     in
-        XML.Elem ("lexicalstructure", [], content)
+        XML.Elem (("lexicalstructure", []), content)
     end
 
 fun proverinfo (Proverinfo vs) =
@@ -191,13 +189,13 @@
         val url = #url vs
         val filenameextns = #filenameextns vs
     in 
-        XML.Elem ("proverinfo",
+        XML.Elem (("proverinfo",
                  [("name", name),
                   ("version", version),
                   ("instance", instance), 
                   ("descr", descr),
                   ("url", Url.implode url),
-                  ("filenameextns", filenameextns)],
+                  ("filenameextns", filenameextns)]),
                  [])
     end
 
@@ -205,7 +203,7 @@
     let
         val idtables = #idtables vs
     in
-        XML.Elem ("setids",[],map idtable_to_xml idtables)
+        XML.Elem (("setids", []), map idtable_to_xml idtables)
     end
 
 fun setrefs (Setrefs vs) =
@@ -216,13 +214,13 @@
         val name = #name vs
         val idtables = #idtables vs
         val fileurls = #fileurls vs
-        fun fileurl_to_xml url = XML.Elem ("fileurl", attrs_of_pgipurl url, [])
+        fun fileurl_to_xml url = XML.Elem (("fileurl", attrs_of_pgipurl url), [])
     in
-        XML.Elem ("setrefs",
+        XML.Elem (("setrefs",
                   (the_default [] (Option.map attrs_of_pgipurl url)) @ 
                   (the_default [] (Option.map attrs_of_objtype objtype)) @
                   (opt_attr "thyname" thyname) @
-                  (opt_attr "name" name),
+                  (opt_attr "name" name)),
                   (map idtable_to_xml idtables) @ 
                   (map fileurl_to_xml fileurls))
     end
@@ -231,14 +229,14 @@
     let
         val idtables = #idtables vs
     in
-        XML.Elem ("addids",[],map idtable_to_xml idtables)
+        XML.Elem (("addids", []), map idtable_to_xml idtables)
     end
 
 fun delids (Delids vs) =
     let
         val idtables = #idtables vs
     in
-        XML.Elem ("delids",[],map idtable_to_xml idtables)
+        XML.Elem (("delids", []), map idtable_to_xml idtables)
     end
 
 fun hasprefs (Hasprefs vs) =
@@ -246,7 +244,7 @@
       val prefcategory = #prefcategory vs
       val prefs = #prefs vs
   in 
-      XML.Elem("hasprefs",opt_attr "prefcategory" prefcategory, map haspref prefs)
+      XML.Elem (("hasprefs", opt_attr "prefcategory" prefcategory), map haspref prefs)
   end
 
 fun prefval (Prefval vs) =
@@ -254,7 +252,7 @@
         val name = #name vs
         val value = #value vs
     in
-        XML.Elem("prefval", attr "name" name, [XML.Text value])
+        XML.Elem (("prefval", attr "name" name), [XML.Text value])
     end 
 
 fun idvalue (Idvalue vs) =
@@ -264,10 +262,10 @@
         val name = #name vs
         val text = #text vs
     in
-        XML.Elem("idvalue", 
+        XML.Elem (("idvalue", 
                  objtype_attrs @
                  (opt_attr "thyname" thyname) @
-                 (attr "name" name),
+                 attr "name" name),
                  text)
     end
 
@@ -278,7 +276,7 @@
       val theorem = #theorem vs
       val proofpos = #proofpos vs
 
-      fun elto nm attrfn xo = case xo of NONE=>[] | SOME x=>[XML.Elem(nm,attrfn x,[])]
+      fun elto nm attrfn xo = case xo of NONE => [] | SOME x => [XML.Elem ((nm, attrfn x), [])]
 
       val guisefile = elto "guisefile" attrs_of_pgipurl file
       val guisetheory = elto "guisetheory" (single o (pair "thyname")) theory
@@ -286,7 +284,7 @@
                             (fn thm=>(attr "thmname" thm) @
                                      (opt_attr "proofpos" (Option.map Int.toString proofpos))) theorem
   in 
-      XML.Elem("informguise", [], guisefile @ guisetheory @ guiseproof)
+      XML.Elem (("informguise", []), guisefile @ guisetheory @ guiseproof)
   end
 
 fun parseresult (Parseresult vs) =
@@ -296,7 +294,7 @@
         val errs = #errs vs
         val xmldoc = PgipMarkup.output_doc doc
     in 
-        XML.Elem("parseresult", attrs, errs @ xmldoc)
+        XML.Elem (("parseresult", attrs), errs @ xmldoc)
     end
 
 fun acceptedpgipelems (Usespgip vs) = 
@@ -305,11 +303,10 @@
         fun async_attrs b = if b then attr "async" "true" else []
         fun attrs_attrs attrs = if attrs=[] then [] else attr "attributes" (space_implode "," attrs)
         fun singlepgipelem (e,async,attrs) = 
-            XML.Elem("pgipelem", ((async_attrs async) @ (attrs_attrs attrs)),[XML.Text e])
+            XML.Elem (("pgipelem", async_attrs async @ attrs_attrs attrs), [XML.Text e])
                                                       
     in
-        XML.Elem ("acceptedpgipelems", [],
-                 map singlepgipelem pgipelems)
+        XML.Elem (("acceptedpgipelems", []), map singlepgipelem pgipelems)
     end
 
 fun usespgip (Usespgip vs) =
@@ -317,14 +314,14 @@
         val version = #version vs
         val acceptedelems = acceptedpgipelems (Usespgip vs)
     in 
-        XML.Elem("usespgip", attr "version" version, [acceptedelems])
+        XML.Elem (("usespgip", attr "version" version), [acceptedelems])
     end
 
 fun usespgml (Usespgml vs) =
     let
         val version = #version vs
     in 
-        XML.Elem("usespgml", attr "version" version, [])
+        XML.Elem (("usespgml", attr "version" version), [])
     end
 
 fun pgip (Pgip vs) =
@@ -338,18 +335,18 @@
         val refseq = #refseq vs
         val content = #content vs
     in
-        XML.Elem("pgip",
+        XML.Elem(("pgip",
                  opt_attr "tag" tag @
                  attr "id" id @
                  opt_attr "destid" destid @
                  attr "class" class @
                  opt_attr "refid" refid @
                  opt_attr_map string_of_int "refseq" refseq @
-                 attr "seq" (string_of_int seq),
+                 attr "seq" (string_of_int seq)),
                  content)
     end
 
-fun ready (Ready _) = XML.Elem("ready",[],[])
+fun ready (Ready _) = XML.Elem (("ready", []), [])
 
 
 fun output pgipoutput = case pgipoutput of
--- a/src/Pure/ProofGeneral/pgip_types.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/pgip_types.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -168,11 +168,9 @@
     let 
         val objtype_attrs = attrs_of_objtype objtype
         val context_attrs = opt_attr "context" context
-        val ids_content = map (fn x => XML.Elem("identifier",[],[XML.Text x])) ids
+        val ids_content = map (fn x => XML.Elem(("identifier", []), [XML.Text x])) ids
     in 
-        XML.Elem ("idtable",
-                  objtype_attrs @ context_attrs,
-                  ids_content)
+        XML.Elem (("idtable", objtype_attrs @ context_attrs), ids_content)
     end
 
 
@@ -282,7 +280,7 @@
                           Pgipchoice ds => map destpgipdtype ds
                         | _ => []
     in 
-        XML.Elem (elt, (desc_attr @ attrs), (map pgipdtype_to_xml typargs))
+        XML.Elem ((elt, desc_attr @ attrs), map pgipdtype_to_xml typargs)
     end
 
 val pgiptype_to_xml = pgipdtype_to_xml o pair NONE
@@ -424,10 +422,10 @@
                     pgiptype: pgiptype }
 
 fun haspref ({ name, descr, default, pgiptype}:preference) = 
-    XML.Elem ("haspref",
+    XML.Elem (("haspref",
               attr "name" name @
               opt_attr "descr" descr @
-              opt_attr "default" default,
+              opt_attr "default" default),
               [pgiptype_to_xml pgiptype])
 
 end
--- a/src/Pure/ProofGeneral/pgml.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/pgml.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -109,25 +109,25 @@
 
     (* NOTE: we assume strings are already XML escaped here, for convenience in Isabelle;
        would be better not to *)  (* FIXME !??? *)
-    fun atom_to_xml (Sym {name, content}) = XML.Elem ("sym", attr "name" name, [XML.Text content])
+    fun atom_to_xml (Sym {name, content}) = XML.Elem (("sym", attr "name" name), [XML.Text content])
       | atom_to_xml (Str content) = XML.Text content;
 
     fun pgmlterm_to_xml (Atoms {kind, content}) =
-        XML.Elem("atom", opt_attr "kind" kind, map atom_to_xml content)
+        XML.Elem(("atom", opt_attr "kind" kind), map atom_to_xml content)
 
       | pgmlterm_to_xml (Box {orient, indent, content}) =
-        XML.Elem("box",
+        XML.Elem(("box",
                  opt_attr_map pgmlorient_to_string "orient" orient @
-                 opt_attr_map int_to_pgstring "indent" indent,
+                 opt_attr_map int_to_pgstring "indent" indent),
                  map pgmlterm_to_xml content)
 
       | pgmlterm_to_xml (Break {mandatory, indent}) =
-        XML.Elem("break",
+        XML.Elem(("break",
                  opt_attr_map bool_to_pgstring "mandatory" mandatory @
-                 opt_attr_map int_to_pgstring "indent" indent, [])
+                 opt_attr_map int_to_pgstring "indent" indent), [])
 
       | pgmlterm_to_xml (Subterm {kind, param, place, name, decoration, action, pos, xref, content}) =
-        XML.Elem("subterm",
+        XML.Elem(("subterm",
                  opt_attr "kind" kind @
                  opt_attr "param" param @
                  opt_attr_map pgmlplace_to_string "place" place @
@@ -135,13 +135,13 @@
                  opt_attr_map pgmldec_to_string "decoration" decoration @
                  opt_attr_map pgmlaction_to_string "action" action @
                  opt_attr "pos" pos @
-                 opt_attr_map string_of_pgipurl "xref" xref,
+                 opt_attr_map string_of_pgipurl "xref" xref),
                  map pgmlterm_to_xml content)
 
       | pgmlterm_to_xml (Alt {kind, content}) =
-        XML.Elem("alt", opt_attr "kind" kind, map pgmlterm_to_xml content)
+        XML.Elem(("alt", opt_attr "kind" kind), map pgmlterm_to_xml content)
 
-      | pgmlterm_to_xml (Embed xmls) = XML.Elem("embed", [], xmls)
+      | pgmlterm_to_xml (Embed xmls) = XML.Elem(("embed", []), xmls)
 
       | pgmlterm_to_xml (Raw xml) = xml
 
@@ -152,9 +152,9 @@
                        content: pgmlterm list }
 
     fun pgml_to_xml (Pgml {version,systemid,area,content}) =
-        XML.Elem("pgml",
+        XML.Elem(("pgml",
                  opt_attr "version" version @
                  opt_attr "systemid" systemid @
-                 the_default [] (Option.map PgipTypes.attrs_of_displayarea area),
+                 the_default [] (Option.map PgipTypes.attrs_of_displayarea area)),
                  map pgmlterm_to_xml content)
 end
--- a/src/Pure/ProofGeneral/proof_general_emacs.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/proof_general_emacs.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -33,7 +33,7 @@
 
 fun render_trees ts = fold render_tree ts
 and render_tree (XML.Text s) = Buffer.add s
-  | render_tree (XML.Elem (name, props, ts)) =
+  | render_tree (XML.Elem ((name, props), ts)) =
       let
         val (bg1, en1) =
           if name <> Markup.promptN andalso print_mode_active test_markupN
@@ -78,6 +78,7 @@
 fun setup_messages () =
  (Output.writeln_fn := message "" "" "";
   Output.status_fn := (fn _ => ());
+  Output.report_fn := (fn _ => ());
   Output.priority_fn := message (special "I") (special "J") "";
   Output.tracing_fn := message (special "I" ^ special "V") (special "J") "";
   Output.debug_fn := message (special "K") (special "L") "+++ ";
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -105,7 +105,7 @@
 
 in
 
-fun pgml_terms (XML.Elem (name, atts, body)) =
+fun pgml_terms (XML.Elem ((name, atts), body)) =
       if member (op =) token_markups name then
         let val content = pgml_syms (Buffer.content (fold XML.add_content body Buffer.empty))
         in [Pgml.Atoms {kind = SOME name, content = content}] end
@@ -132,7 +132,7 @@
     val body = YXML.parse_body s;
     val area =
       (case body of
-        [XML.Elem (name, _, _)] =>
+        [XML.Elem ((name, _), _)] =>
           if name = Markup.stateN then PgipTypes.Display else default_area
       | _ => default_area);
   in Pgml.pgml_to_xml (pgml area (maps pgml_terms body)) end;
@@ -161,6 +161,7 @@
 fun setup_messages () =
  (Output.writeln_fn := (fn s => normalmsg Message s);
   Output.status_fn := (fn _ => ());
+  Output.report_fn := (fn _ => ());
   Output.priority_fn := (fn s => normalmsg Status s);
   Output.tracing_fn := (fn s => normalmsg Tracing s);
   Output.warning_fn := (fn s => errormsg Message Warning s);
@@ -283,8 +284,8 @@
 
 fun thm_deps_message (thms, deps) =
   let
-    val valuethms = XML.Elem ("value", [("name", "thms")], [XML.Text thms]);
-    val valuedeps = XML.Elem ("value", [("name", "deps")], [XML.Text deps]);
+    val valuethms = XML.Elem (("value", [("name", "thms")]), [XML.Text thms]);
+    val valuedeps = XML.Elem (("value", [("name", "deps")]), [XML.Text deps]);
   in
     issue_pgip (Metainforesponse
       {attrs = [("infotype", "isabelle_theorem_dependencies")],
@@ -312,7 +313,7 @@
     let val keywords = Keyword.dest_keywords ()
         val commands = Keyword.dest_commands ()
         fun keyword_elt kind keyword =
-            XML.Elem("keyword", [("word", keyword), ("category", kind)], [])
+            XML.Elem (("keyword", [("word", keyword), ("category", kind)]), [])
         in
             (* Also, note we don't call init_outer_syntax here to add interface commands,
             but they should never appear in scripts anyway so it shouldn't matter *)
@@ -647,8 +648,7 @@
 
         fun idvalue strings =
             issue_pgip (Idvalue { thyname=thyname, objtype=objtype, name=name,
-                                  text=[XML.Elem("pgml",[],
-                                                 maps YXML.parse_body strings)] })
+                                  text=[XML.Elem (("pgml", []), maps YXML.parse_body strings)] })
 
         fun strings_of_thm (thy, name) =
           map (Display.string_of_thm_global thy) (PureThy.get_thms thy name)
@@ -927,7 +927,7 @@
     (pgip_refid := NONE;
      pgip_refseq := NONE;
      (case xml of
-          XML.Elem ("pgip", attrs, pgips) =>
+          XML.Elem (("pgip", attrs), pgips) =>
           (let
                val class = PgipTypes.get_attr "class" attrs
                val dest  = PgipTypes.get_attr_opt "destid" attrs
--- a/src/Pure/Syntax/lexicon.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Syntax/lexicon.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -65,7 +65,7 @@
   val tvarT: typ
   val terminals: string list
   val is_terminal: string -> bool
-  val report_token: token -> unit
+  val report_token: Proof.context -> token -> unit
   val matching_tokens: token * token -> bool
   val valued_token: token -> bool
   val predef_term: string -> token option
@@ -185,8 +185,8 @@
   | Comment     => Markup.inner_comment
   | EOF         => Markup.none;
 
-fun report_token (Token (kind, _, (pos, _))) =
-  Position.report (token_kind_markup kind) pos;
+fun report_token ctxt (Token (kind, _, (pos, _))) =
+  Context_Position.report ctxt (token_kind_markup kind) pos;
 
 
 (* matching_tokens *)
--- a/src/Pure/Syntax/syntax.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Syntax/syntax.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -80,7 +80,7 @@
     (string * string) trrule list -> syntax -> syntax
   val update_trrules_i: ast trrule list -> syntax -> syntax
   val remove_trrules_i: ast trrule list -> syntax -> syntax
-  val parse_token: Markup.T -> string -> Symbol_Pos.T list * Position.T
+  val parse_token: Proof.context -> Markup.T -> string -> Symbol_Pos.T list * Position.T
   val parse_sort: Proof.context -> string -> sort
   val parse_typ: Proof.context -> string -> typ
   val parse_term: Proof.context -> string -> term
@@ -458,12 +458,15 @@
 (* read token -- with optional YXML encoding of position *)
 
 fun read_token str =
-  let val (text, pos) =
-    (case YXML.parse str handle Fail msg => error msg of
-      XML.Elem ("token", props, [XML.Text text]) => (text, Position.of_properties props)
-    | XML.Elem ("token", props, []) => ("", Position.of_properties props)
-    | XML.Text text => (text, Position.none)
-    | _ => (str, Position.none))
+  let
+    val tree = YXML.parse str handle Fail msg => error msg;
+    val text = Buffer.empty |> XML.add_content tree |> Buffer.content;
+    val pos =
+      (case tree of
+        XML.Elem ((name, props), _) =>
+          if name = Markup.tokenN then Position.of_properties props
+          else Position.none
+      | XML.Text _ => Position.none);
   in (Symbol_Pos.explode (text, pos), pos) end;
 
 
@@ -479,7 +482,7 @@
   let
     val {lexicon, gram, parse_ast_trtab, ...} = tabs;
     val toks = Lexicon.tokenize lexicon xids syms;
-    val _ = List.app Lexicon.report_token toks;
+    val _ = List.app (Lexicon.report_token ctxt) toks;
 
     val root' = if root <> "prop" andalso is_logtype root then Syn_Ext.logic else root;
     val pts = Parser.parse gram root' (filter Lexicon.is_proper toks);
@@ -691,10 +694,10 @@
 
 (* (un)parsing *)
 
-fun parse_token markup str =
+fun parse_token ctxt markup str =
   let
     val (syms, pos) = read_token str;
-    val _ = Position.report markup pos;
+    val _ = Context_Position.report ctxt markup pos;
   in (syms, pos) end;
 
 local
--- a/src/Pure/System/isabelle_process.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/System/isabelle_process.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -34,7 +34,7 @@
 fun message _ _ _ "" = ()
   | message out_stream ch props body =
       let
-        val header = YXML.string_of (XML.Elem (ch, map (pairself YXML.binary_text) props, []));
+        val header = YXML.string_of (XML.Elem ((ch, map (pairself YXML.binary_text) props), []));
         val msg = Symbol.STX ^ chunk header ^ chunk body;
       in TextIO.output (out_stream, msg) (*atomic output*) end;
 
@@ -72,11 +72,12 @@
     val _ = Simple_Thread.fork false (auto_flush TextIO.stdErr);
   in
     Output.status_fn   := standard_message out_stream "B";
-    Output.writeln_fn  := standard_message out_stream "C";
-    Output.tracing_fn  := standard_message out_stream "D";
-    Output.warning_fn  := standard_message out_stream "E";
-    Output.error_fn    := standard_message out_stream "F";
-    Output.debug_fn    := standard_message out_stream "G";
+    Output.report_fn   := standard_message out_stream "C";
+    Output.writeln_fn  := standard_message out_stream "D";
+    Output.tracing_fn  := standard_message out_stream "E";
+    Output.warning_fn  := standard_message out_stream "F";
+    Output.error_fn    := standard_message out_stream "G";
+    Output.debug_fn    := standard_message out_stream "H";
     Output.priority_fn := ! Output.writeln_fn;
     Output.prompt_fn   := ignore;
     out_stream
@@ -89,10 +90,10 @@
 
 fun init out =
  (Unsynchronized.change print_mode
-    (fold (update op =) [isabelle_processN, Keyword.keyword_status_reportN, Pretty.symbolicN]);
+    (fold (update op =) [isabelle_processN, Keyword.keyword_statusN, Pretty.symbolicN]);
   setup_channels out |> init_message;
   quick_and_dirty := true;  (* FIXME !? *)
-  Keyword.report ();
+  Keyword.status ();
   Output.status (Markup.markup Markup.ready "");
   Isar.toplevel_loop {init = true, welcome = false, sync = true, secure = true});
 
--- a/src/Pure/System/isabelle_process.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/System/isabelle_process.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -24,11 +24,12 @@
     val markup = Map(
       ('A' : Int) -> Markup.INIT,
       ('B' : Int) -> Markup.STATUS,
-      ('C' : Int) -> Markup.WRITELN,
-      ('D' : Int) -> Markup.TRACING,
-      ('E' : Int) -> Markup.WARNING,
-      ('F' : Int) -> Markup.ERROR,
-      ('G' : Int) -> Markup.DEBUG)
+      ('C' : Int) -> Markup.REPORT,
+      ('D' : Int) -> Markup.WRITELN,
+      ('E' : Int) -> Markup.TRACING,
+      ('F' : Int) -> Markup.WARNING,
+      ('G' : Int) -> Markup.ERROR,
+      ('H' : Int) -> Markup.DEBUG)
     def is_raw(kind: String) =
       kind == Markup.STDOUT
     def is_control(kind: String) =
@@ -45,26 +46,27 @@
 
   class Result(val message: XML.Elem)
   {
-    def kind = message.name
+    def kind = message.markup.name
+    def properties = message.markup.properties
     def body = message.body
 
     def is_raw = Kind.is_raw(kind)
     def is_control = Kind.is_control(kind)
     def is_system = Kind.is_system(kind)
     def is_status = kind == Markup.STATUS
-    def is_ready = is_status && body == List(XML.Elem(Markup.READY, Nil, Nil))
+    def is_report = kind == Markup.REPORT
+    def is_ready = is_status && body == List(XML.Elem(Markup.Ready, Nil))
 
     override def toString: String =
     {
       val res =
-        if (is_status) message.body.map(_.toString).mkString
+        if (is_status || is_report) message.body.map(_.toString).mkString
         else Pretty.string_of(message.body)
-      val props = message.attributes
-      if (props.isEmpty)
+      if (properties.isEmpty)
         kind.toString + " [[" + res + "]]"
       else
         kind.toString + " " +
-          (for ((x, y) <- props) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]"
+          (for ((x, y) <- properties) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]"
     }
 
     def cache(c: XML.Cache): Result = new Result(c.cache_tree(message).asInstanceOf[XML.Elem])
@@ -98,7 +100,7 @@
     if (kind == Markup.INIT) {
       for ((Markup.PID, p) <- props) pid = p
     }
-    receiver ! new Result(XML.Elem(kind, props, body))
+    receiver ! new Result(XML.Elem(Markup(kind, props), body))
   }
 
   private def put_result(kind: String, text: String)
@@ -300,7 +302,7 @@
             val header = read_chunk()
             val body = read_chunk()
             header match {
-              case List(XML.Elem(name, props, Nil))
+              case List(XML.Elem(Markup(name, props), Nil))
                   if name.size == 1 && Kind.markup.isDefinedAt(name(0)) =>
                 put_result(Kind.markup(name(0)), props, body)
               case _ => throw new Protocol_Error("bad header: " + header.toString)
--- a/src/Pure/System/session.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/System/session.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -1,5 +1,6 @@
 /*  Title:      Pure/System/session.scala
     Author:     Makarius
+    Options:    :folding=explicit:collapseFolds=1:
 
 Isabelle session, potentially with running prover.
 */
@@ -8,6 +9,7 @@
 
 
 import scala.actors.TIMEOUT
+import scala.actors.Actor
 import scala.actors.Actor._
 
 
@@ -74,7 +76,7 @@
       case _ => None
     }
 
-  private case class Start(timeout: Int, args: List[String])
+  private case class Started(timeout: Int, args: List[String])
   private case object Stop
 
   private lazy val session_actor = actor {
@@ -90,7 +92,8 @@
 
     /* document changes */
 
-    def handle_change(change: Change)
+    def handle_change(change: Document.Change)
+    //{{{
     {
       require(change.parent.isDefined)
 
@@ -120,6 +123,7 @@
       register_document(doc)
       prover.edit_document(change.parent.get.id, doc.id, id_edits)
     }
+    //}}}
 
 
     /* prover results */
@@ -130,10 +134,11 @@
     }
 
     def handle_result(result: Isabelle_Process.Result)
+    //{{{
     {
       raw_results.event(result)
 
-      val target_id: Option[Session.Entity_ID] = Position.get_id(result.message.attributes)
+      val target_id: Option[Session.Entity_ID] = Position.get_id(result.properties)
       val target: Option[Session.Entity] =
         target_id match {
           case None => None
@@ -175,6 +180,7 @@
       else if (!result.is_system)   // FIXME syslog (!?)
         bad_result(result)
     }
+    //}}}
 
 
     /* prover startup */
@@ -222,7 +228,7 @@
 
     loop {
       react {
-        case Start(timeout, args) =>
+        case Started(timeout, args) =>
           if (prover == null) {
             prover = new Isabelle_Process(system, self, args:_*) with Isar_Document
             val origin = sender
@@ -238,7 +244,7 @@
             prover = null
           }
 
-        case change: Change if prover != null =>
+        case change: Document.Change if prover != null =>
           handle_change(change)
 
         case result: Isabelle_Process.Result =>
@@ -254,9 +260,11 @@
 
 
 
-  /** buffered command changes -- delay_first discipline **/
+  /** buffered command changes (delay_first discipline) **/
 
-  private lazy val command_change_buffer = actor {
+  private lazy val command_change_buffer = actor
+  //{{{
+  {
     import scala.compat.Platform.currentTime
 
     var changed: Set[Command] = Set()
@@ -292,6 +300,7 @@
       }
     }
   }
+  //}}}
 
   def indicate_command_change(command: Command)
   {
@@ -299,11 +308,49 @@
   }
 
 
-  /* main methods */
+
+  /** editor history **/
+
+  private case class Edit_Document(edits: List[Document.Node_Text_Edit])
+
+  private val editor_history = new Actor
+  {
+    @volatile private var history = Document.Change.init
+    def current_change(): Document.Change = history
 
-  def start(timeout: Int, args: List[String]): Option[String] =
-    (session_actor !? Start(timeout, args)).asInstanceOf[Option[String]]
+    def act
+    {
+      loop {
+        react {
+          case Edit_Document(edits) =>
+            val old_change = history
+            val new_id = create_id()
+            val result: isabelle.Future[(List[Document.Edit[Command]], Document)] =
+              isabelle.Future.fork {
+                val old_doc = old_change.join_document
+                old_doc.await_assignment
+                Document.text_edits(Session.this, old_doc, new_id, edits)
+              }
+            val new_change = new Document.Change(new_id, Some(old_change), edits, result)
+            history = new_change
+            new_change.result.map(_ => session_actor ! new_change)
+
+          case bad => System.err.println("editor_model: ignoring bad message " + bad)
+        }
+      }
+    }
+  }
+  editor_history.start
+
+
+
+  /** main methods **/
+
+  def started(timeout: Int, args: List[String]): Option[String] =
+    (session_actor !? Started(timeout, args)).asInstanceOf[Option[String]]
 
   def stop() { session_actor ! Stop }
-  def input(change: Change) { session_actor ! change }
+
+  def current_change(): Document.Change = editor_history.current_change()
+  def edit_document(edits: List[Document.Node_Text_Edit]) { editor_history ! Edit_Document(edits) }
 }
--- a/src/Pure/System/swing_thread.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/System/swing_thread.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -46,8 +46,9 @@
 
   private def delayed_action(first: Boolean)(time_span: Int)(action: => Unit): () => Unit =
   {
-    val listener =
-      new ActionListener { override def actionPerformed(e: ActionEvent) { action } }
+    val listener = new ActionListener {
+      override def actionPerformed(e: ActionEvent) { Swing_Thread.assert(); action }
+    }
     val timer = new Timer(time_span, listener)
     timer.setRepeats(false)
 
--- a/src/Pure/Thy/html.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Thy/html.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -42,7 +42,7 @@
   // document markup
 
   def span(cls: String, body: List[XML.Tree]): XML.Elem =
-    XML.Elem(SPAN, List((CLASS, cls)), body)
+    XML.Elem(Markup(SPAN, List((CLASS, cls))), body)
 
   def hidden(txt: String): XML.Elem =
     span(Markup.HIDDEN, List(XML.Text(txt)))
@@ -52,8 +52,8 @@
 
   def spans(tree: XML.Tree): List[XML.Tree] =
     tree match {
-      case XML.Elem(name, _, ts) =>
-        List(XML.elem(Markup.DATA, List(tree, span(name, ts.flatMap(spans)))))
+      case XML.Elem(Markup(name, _), ts) =>
+        List(XML.Elem(Markup.Data, List(tree, span(name, ts.flatMap(spans)))))
       case XML.Text(txt) =>
         val ts = new ListBuffer[XML.Tree]
         val t = new StringBuilder
--- a/src/Pure/Thy/thy_syntax.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Thy/thy_syntax.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -7,30 +7,29 @@
 package isabelle
 
 
+import scala.collection.mutable
+
+
 object Thy_Syntax
 {
-  private val parser = new Parse.Parser
-  {
-    override def filter_proper = false
-
-    val command_span: Parser[Span] =
-    {
-      val whitespace = token("white space", _.is_ignored)
-      val command = token("command keyword", _.is_command)
-      val body = not(rep(whitespace) ~ (command | eof)) ~> not_eof
-      command ~ rep(body) ^^ { case x ~ ys => x :: ys } | rep1(whitespace) | rep1(body)
-    }
-  }
-
   type Span = List[Token]
 
   def parse_spans(toks: List[Token]): List[Span] =
   {
-    import parser._
+    val result = new mutable.ListBuffer[Span]
+    val span = new mutable.ListBuffer[Token]
+    val whitespace = new mutable.ListBuffer[Token]
 
-    parse(rep(command_span), Token.reader(toks)) match {
-      case Success(spans, rest) if rest.atEnd => spans
-      case bad => error(bad.toString)
+    def flush(buffer: mutable.ListBuffer[Token])
+    {
+      if (!buffer.isEmpty) { result += buffer.toList; buffer.clear }
     }
+    for (tok <- toks) {
+      if (tok.is_command) { flush(span); flush(whitespace); span += tok }
+      else if (tok.is_ignored) whitespace += tok
+      else { span ++= whitespace; whitespace.clear; span += tok }
+    }
+    flush(span); flush(whitespace)
+    result.toList
   }
 }
--- a/src/Pure/Tools/xml_syntax.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/Tools/xml_syntax.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -22,148 +22,150 @@
 
 (**** XML output ****)
 
-fun xml_of_class name = XML.Elem ("class", [("name", name)], []);
+fun xml_of_class name = XML.Elem (("class", [("name", name)]), []);
 
-fun xml_of_type (TVar ((s, i), S)) = XML.Elem ("TVar",
-      ("name", s) :: (if i=0 then [] else [("index", string_of_int i)]),
-      map xml_of_class S)
+fun xml_of_type (TVar ((s, i), S)) =
+      XML.Elem (("TVar", ("name", s) :: (if i=0 then [] else [("index", string_of_int i)])),
+        map xml_of_class S)
   | xml_of_type (TFree (s, S)) =
-      XML.Elem ("TFree", [("name", s)], map xml_of_class S)
+      XML.Elem (("TFree", [("name", s)]), map xml_of_class S)
   | xml_of_type (Type (s, Ts)) =
-      XML.Elem ("Type", [("name", s)], map xml_of_type Ts);
+      XML.Elem (("Type", [("name", s)]), map xml_of_type Ts);
 
 fun xml_of_term (Bound i) =
-      XML.Elem ("Bound", [("index", string_of_int i)], [])
+      XML.Elem (("Bound", [("index", string_of_int i)]), [])
   | xml_of_term (Free (s, T)) =
-      XML.Elem ("Free", [("name", s)], [xml_of_type T])
-  | xml_of_term (Var ((s, i), T)) = XML.Elem ("Var",
-      ("name", s) :: (if i=0 then [] else [("index", string_of_int i)]),
-      [xml_of_type T])
+      XML.Elem (("Free", [("name", s)]), [xml_of_type T])
+  | xml_of_term (Var ((s, i), T)) =
+      XML.Elem (("Var", ("name", s) :: (if i=0 then [] else [("index", string_of_int i)])),
+        [xml_of_type T])
   | xml_of_term (Const (s, T)) =
-      XML.Elem ("Const", [("name", s)], [xml_of_type T])
+      XML.Elem (("Const", [("name", s)]), [xml_of_type T])
   | xml_of_term (t $ u) =
-      XML.Elem ("App", [], [xml_of_term t, xml_of_term u])
+      XML.Elem (("App", []), [xml_of_term t, xml_of_term u])
   | xml_of_term (Abs (s, T, t)) =
-      XML.Elem ("Abs", [("vname", s)], [xml_of_type T, xml_of_term t]);
+      XML.Elem (("Abs", [("vname", s)]), [xml_of_type T, xml_of_term t]);
 
 fun xml_of_opttypes NONE = []
-  | xml_of_opttypes (SOME Ts) = [XML.Elem ("types", [], map xml_of_type Ts)];
+  | xml_of_opttypes (SOME Ts) = [XML.Elem (("types", []), map xml_of_type Ts)];
 
 (* FIXME: the t argument of PThm and PAxm is actually redundant, since *)
 (* it can be looked up in the theorem database. Thus, it could be      *)
 (* omitted from the xml representation.                                *)
 
+(* FIXME not exhaustive *)
 fun xml_of_proof (PBound i) =
-      XML.Elem ("PBound", [("index", string_of_int i)], [])
+      XML.Elem (("PBound", [("index", string_of_int i)]), [])
   | xml_of_proof (Abst (s, optT, prf)) =
-      XML.Elem ("Abst", [("vname", s)],
-        (case optT of NONE => [] | SOME T => [xml_of_type T]) @
-        [xml_of_proof prf])
+      XML.Elem (("Abst", [("vname", s)]),
+        (case optT of NONE => [] | SOME T => [xml_of_type T]) @ [xml_of_proof prf])
   | xml_of_proof (AbsP (s, optt, prf)) =
-      XML.Elem ("AbsP", [("vname", s)],
-        (case optt of NONE => [] | SOME t => [xml_of_term t]) @
-        [xml_of_proof prf])
+      XML.Elem (("AbsP", [("vname", s)]),
+        (case optt of NONE => [] | SOME t => [xml_of_term t]) @ [xml_of_proof prf])
   | xml_of_proof (prf % optt) =
-      XML.Elem ("Appt", [],
-        xml_of_proof prf ::
-        (case optt of NONE => [] | SOME t => [xml_of_term t]))
+      XML.Elem (("Appt", []),
+        xml_of_proof prf :: (case optt of NONE => [] | SOME t => [xml_of_term t]))
   | xml_of_proof (prf %% prf') =
-      XML.Elem ("AppP", [], [xml_of_proof prf, xml_of_proof prf'])
-  | xml_of_proof (Hyp t) = XML.Elem ("Hyp", [], [xml_of_term t])
+      XML.Elem (("AppP", []), [xml_of_proof prf, xml_of_proof prf'])
+  | xml_of_proof (Hyp t) = XML.Elem (("Hyp", []), [xml_of_term t])
   | xml_of_proof (PThm (_, ((s, t, optTs), _))) =
-      XML.Elem ("PThm", [("name", s)], xml_of_term t :: xml_of_opttypes optTs)
+      XML.Elem (("PThm", [("name", s)]), xml_of_term t :: xml_of_opttypes optTs)
   | xml_of_proof (PAxm (s, t, optTs)) =
-      XML.Elem ("PAxm", [("name", s)], xml_of_term t :: xml_of_opttypes optTs)
+      XML.Elem (("PAxm", [("name", s)]), xml_of_term t :: xml_of_opttypes optTs)
   | xml_of_proof (Oracle (s, t, optTs)) =
-      XML.Elem ("Oracle", [("name", s)], xml_of_term t :: xml_of_opttypes optTs)
+      XML.Elem (("Oracle", [("name", s)]), xml_of_term t :: xml_of_opttypes optTs)
   | xml_of_proof MinProof =
-      XML.Elem ("MinProof", [], []);
+      XML.Elem (("MinProof", []), []);
+
 
 (* useful for checking the output against a schema file *)
 
 fun write_to_file path elname x =
   File.write path
     ("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" ^
-     XML.string_of (XML.Elem (elname,
-       [("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"),
-        ("xsi:noNamespaceSchemaLocation", "isabelle.xsd")],
+     XML.string_of (XML.Elem ((elname,
+         [("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"),
+          ("xsi:noNamespaceSchemaLocation", "isabelle.xsd")]),
        [x])));
 
 
+
 (**** XML input ****)
 
 exception XML of string * XML.tree;
 
-fun class_of_xml (XML.Elem ("class", [("name", name)], [])) = name
+fun class_of_xml (XML.Elem (("class", [("name", name)]), [])) = name
   | class_of_xml tree = raise XML ("class_of_xml: bad tree", tree);
 
-fun index_of_string s tree idx = (case Int.fromString idx of
-  NONE => raise XML (s ^ ": bad index", tree) | SOME i => i);
+fun index_of_string s tree idx =
+  (case Int.fromString idx of
+    NONE => raise XML (s ^ ": bad index", tree)
+  | SOME i => i);
 
-fun type_of_xml (tree as XML.Elem ("TVar", atts, classes)) = TVar
+fun type_of_xml (tree as XML.Elem (("TVar", atts), classes)) = TVar
       ((case Properties.get atts "name" of
           NONE => raise XML ("type_of_xml: name of TVar missing", tree)
         | SOME name => name,
         the_default 0 (Option.map (index_of_string "type_of_xml" tree)
           (Properties.get atts "index"))),
        map class_of_xml classes)
-  | type_of_xml (XML.Elem ("TFree", [("name", s)], classes)) =
+  | type_of_xml (XML.Elem (("TFree", [("name", s)]), classes)) =
       TFree (s, map class_of_xml classes)
-  | type_of_xml (XML.Elem ("Type", [("name", s)], types)) =
+  | type_of_xml (XML.Elem (("Type", [("name", s)]), types)) =
       Type (s, map type_of_xml types)
   | type_of_xml tree = raise XML ("type_of_xml: bad tree", tree);
 
-fun term_of_xml (tree as XML.Elem ("Bound", [("index", idx)], [])) =
+fun term_of_xml (tree as XML.Elem (("Bound", [("index", idx)]), [])) =
       Bound (index_of_string "bad variable index" tree idx)
-  | term_of_xml (XML.Elem ("Free", [("name", s)], [typ])) =
+  | term_of_xml (XML.Elem (("Free", [("name", s)]), [typ])) =
       Free (s, type_of_xml typ)
-  | term_of_xml (tree as XML.Elem ("Var", atts, [typ])) = Var
+  | term_of_xml (tree as XML.Elem (("Var", atts), [typ])) = Var
       ((case Properties.get atts "name" of
           NONE => raise XML ("type_of_xml: name of Var missing", tree)
         | SOME name => name,
         the_default 0 (Option.map (index_of_string "term_of_xml" tree)
           (Properties.get atts "index"))),
        type_of_xml typ)
-  | term_of_xml (XML.Elem ("Const", [("name", s)], [typ])) =
+  | term_of_xml (XML.Elem (("Const", [("name", s)]), [typ])) =
       Const (s, type_of_xml typ)
-  | term_of_xml (XML.Elem ("App", [], [term, term'])) =
+  | term_of_xml (XML.Elem (("App", []), [term, term'])) =
       term_of_xml term $ term_of_xml term'
-  | term_of_xml (XML.Elem ("Abs", [("vname", s)], [typ, term])) =
+  | term_of_xml (XML.Elem (("Abs", [("vname", s)]), [typ, term])) =
       Abs (s, type_of_xml typ, term_of_xml term)
   | term_of_xml tree = raise XML ("term_of_xml: bad tree", tree);
 
 fun opttypes_of_xml [] = NONE
-  | opttypes_of_xml [XML.Elem ("types", [], types)] =
+  | opttypes_of_xml [XML.Elem (("types", []), types)] =
       SOME (map type_of_xml types)
   | opttypes_of_xml (tree :: _) = raise XML ("opttypes_of_xml: bad tree", tree);
 
-fun proof_of_xml (tree as XML.Elem ("PBound", [("index", idx)], [])) =
+fun proof_of_xml (tree as XML.Elem (("PBound", [("index", idx)]), [])) =
       PBound (index_of_string "proof_of_xml" tree idx)
-  | proof_of_xml (XML.Elem ("Abst", [("vname", s)], [proof])) =
+  | proof_of_xml (XML.Elem (("Abst", [("vname", s)]), [proof])) =
       Abst (s, NONE, proof_of_xml proof)
-  | proof_of_xml (XML.Elem ("Abst", [("vname", s)], [typ, proof])) =
+  | proof_of_xml (XML.Elem (("Abst", [("vname", s)]), [typ, proof])) =
       Abst (s, SOME (type_of_xml typ), proof_of_xml proof)
-  | proof_of_xml (XML.Elem ("AbsP", [("vname", s)], [proof])) =
+  | proof_of_xml (XML.Elem (("AbsP", [("vname", s)]), [proof])) =
       AbsP (s, NONE, proof_of_xml proof)
-  | proof_of_xml (XML.Elem ("AbsP", [("vname", s)], [term, proof])) =
+  | proof_of_xml (XML.Elem (("AbsP", [("vname", s)]), [term, proof])) =
       AbsP (s, SOME (term_of_xml term), proof_of_xml proof)
-  | proof_of_xml (XML.Elem ("Appt", [], [proof])) =
+  | proof_of_xml (XML.Elem (("Appt", []), [proof])) =
       proof_of_xml proof % NONE
-  | proof_of_xml (XML.Elem ("Appt", [], [proof, term])) =
+  | proof_of_xml (XML.Elem (("Appt", []), [proof, term])) =
       proof_of_xml proof % SOME (term_of_xml term)
-  | proof_of_xml (XML.Elem ("AppP", [], [proof, proof'])) =
+  | proof_of_xml (XML.Elem (("AppP", []), [proof, proof'])) =
       proof_of_xml proof %% proof_of_xml proof'
-  | proof_of_xml (XML.Elem ("Hyp", [], [term])) =
+  | proof_of_xml (XML.Elem (("Hyp", []), [term])) =
       Hyp (term_of_xml term)
-  | proof_of_xml (XML.Elem ("PThm", [("name", s)], term :: opttypes)) =
+  | proof_of_xml (XML.Elem (("PThm", [("name", s)]), term :: opttypes)) =
       (* FIXME? *)
       PThm (serial (), ((s, term_of_xml term, opttypes_of_xml opttypes),
         Future.value (Proofterm.approximate_proof_body MinProof)))
-  | proof_of_xml (XML.Elem ("PAxm", [("name", s)], term :: opttypes)) =
+  | proof_of_xml (XML.Elem (("PAxm", [("name", s)]), term :: opttypes)) =
       PAxm (s, term_of_xml term, opttypes_of_xml opttypes)
-  | proof_of_xml (XML.Elem ("Oracle", [("name", s)], term :: opttypes)) =
+  | proof_of_xml (XML.Elem (("Oracle", [("name", s)]), term :: opttypes)) =
       Oracle (s, term_of_xml term, opttypes_of_xml opttypes)
-  | proof_of_xml (XML.Elem ("MinProof", _, _)) = MinProof
+  | proof_of_xml (XML.Elem (("MinProof", _), _)) = MinProof
   | proof_of_xml tree = raise XML ("proof_of_xml: bad tree", tree);
 
 end;
--- a/src/Pure/build-jars	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/build-jars	Mon Aug 09 15:40:06 2010 +0200
@@ -37,7 +37,6 @@
   Isar/outer_syntax.scala
   Isar/parse.scala
   Isar/token.scala
-  PIDE/change.scala
   PIDE/command.scala
   PIDE/document.scala
   PIDE/event_bus.scala
--- a/src/Pure/context_position.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/context_position.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -9,7 +9,7 @@
   val is_visible: Proof.context -> bool
   val set_visible: bool -> Proof.context -> Proof.context
   val restore_visible: Proof.context -> Proof.context -> Proof.context
-  val report_visible: Proof.context -> Markup.T -> Position.T -> unit
+  val report: Proof.context -> Markup.T -> Position.T -> unit
 end;
 
 structure Context_Position: CONTEXT_POSITION =
@@ -25,7 +25,7 @@
 val set_visible = Data.put;
 val restore_visible = set_visible o is_visible;
 
-fun report_visible ctxt markup pos =
+fun report ctxt markup pos =
   if is_visible ctxt then Position.report markup pos else ();
 
 end;
--- a/src/Pure/goal.ML	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/goal.ML	Mon Aug 09 15:40:06 2010 +0200
@@ -26,7 +26,6 @@
   val fork: (unit -> 'a) -> 'a future
   val future_enabled: unit -> bool
   val local_future_enabled: unit -> bool
-  val local_future_enforced: unit -> bool
   val future_result: Proof.context -> thm future -> term -> thm
   val prove_internal: cterm list -> cterm -> (thm list -> tactic) -> thm
   val prove_multi: Proof.context -> string list -> term list -> term list ->
@@ -104,7 +103,7 @@
 
 (* parallel proofs *)
 
-fun fork e = Future.fork_pri ~1 (fn () => Future.report e);
+fun fork e = Future.fork_pri ~1 (fn () => Future.status e);
 
 val parallel_proofs = Unsynchronized.ref 1;
 
@@ -112,7 +111,6 @@
   Multithreading.enabled () andalso Future.is_worker () andalso ! parallel_proofs >= 1;
 
 fun local_future_enabled () = future_enabled () andalso ! parallel_proofs >= 2;
-fun local_future_enforced () = future_enabled () andalso ! parallel_proofs >= 3;
 
 
 (* future_result *)
--- a/src/Pure/library.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Pure/library.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -89,8 +89,9 @@
     (parent: Component, title: String, message: Any*)
   {
     Swing_Thread.now {
+      val java_message = message map { case x: scala.swing.Component => x.peer case x => x }
       JOptionPane.showMessageDialog(parent,
-        message.toArray.asInstanceOf[Array[AnyRef]],
+        java_message.toArray.asInstanceOf[Array[AnyRef]],
         if (title == null) default_title else title, kind)
     }
   }
--- a/src/Tools/jEdit/src/jedit/document_model.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/document_model.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -2,7 +2,7 @@
     Author:     Fabian Immler, TU Munich
     Author:     Makarius
 
-Document model connected to jEdit buffer.
+Document model connected to jEdit buffer -- single node in theory graph.
 */
 
 package isabelle.jedit
@@ -149,10 +149,10 @@
 
   private val key = "isabelle.document_model"
 
-  def init(session: Session, buffer: Buffer): Document_Model =
+  def init(session: Session, buffer: Buffer, thy_name: String): Document_Model =
   {
-    Swing_Thread.assert()
-    val model = new Document_Model(session, buffer)
+    Swing_Thread.require()
+    val model = new Document_Model(session, buffer, thy_name)
     buffer.setProperty(key, model)
     model.activate()
     model
@@ -160,7 +160,7 @@
 
   def apply(buffer: Buffer): Option[Document_Model] =
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     buffer.getProperty(key) match {
       case model: Document_Model => Some(model)
       case _ => None
@@ -169,7 +169,7 @@
 
   def exit(buffer: Buffer)
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     apply(buffer) match {
       case None => error("No document model for buffer: " + buffer)
       case Some(model) =>
@@ -180,7 +180,7 @@
 }
 
 
-class Document_Model(val session: Session, val buffer: Buffer)
+class Document_Model(val session: Session, val buffer: Buffer, val thy_name: String)
 {
   /* visible line end */
 
@@ -195,32 +195,39 @@
   }
 
 
-  /* global state -- owned by Swing thread */
+  /* pending text edits */
+
+  object pending_edits  // owned by Swing thread
+  {
+    private val pending = new mutable.ListBuffer[Text_Edit]
+    def snapshot(): List[Text_Edit] = pending.toList
+
+    private val delay_flush = Swing_Thread.delay_last(session.input_delay) {
+      if (!pending.isEmpty) session.edit_document(List((thy_name, flush())))
+    }
 
-  @volatile private var history = Change.init // owned by Swing thread
-  private val edits_buffer = new mutable.ListBuffer[Text_Edit]   // owned by Swing thread
+    def flush(): List[Text_Edit] =
+    {
+      Swing_Thread.require()
+      val edits = snapshot()
+      pending.clear
+      edits
+    }
+
+    def +=(edit: Text_Edit)
+    {
+      Swing_Thread.require()
+      pending += edit
+      delay_flush()
+    }
+  }
 
 
   /* snapshot */
 
-  // FIXME proper error handling
-  val thy_name = Thy_Header.split_thy_path(Isabelle.system.posix_path(buffer.getPath))._2
-
-  def current_change(): Change = history
-
-  def snapshot(): Change.Snapshot =
-    Swing_Thread.now { history.snapshot(thy_name, edits_buffer.toList) }
-
-
-  /* text edits */
-
-  private val edits_delay = Swing_Thread.delay_last(session.input_delay) {
-    if (!edits_buffer.isEmpty) {
-      val new_change = history.edit(session, List((thy_name, edits_buffer.toList)))
-      edits_buffer.clear
-      history = new_change
-      new_change.result.map(_ => session.input(new_change))
-    }
+  def snapshot(): Document.Snapshot = {
+    Swing_Thread.require()
+    session.current_change().snapshot(thy_name, pending_edits.snapshot())
   }
 
 
@@ -231,15 +238,13 @@
     override def contentInserted(buffer: JEditBuffer,
       start_line: Int, offset: Int, num_lines: Int, length: Int)
     {
-      edits_buffer += new Text_Edit(true, offset, buffer.getText(offset, length))
-      edits_delay()
+      pending_edits += new Text_Edit(true, offset, buffer.getText(offset, length))
     }
 
     override def preContentRemoved(buffer: JEditBuffer,
       start_line: Int, start: Int, num_lines: Int, removed_length: Int)
     {
-      edits_buffer += new Text_Edit(false, start, buffer.getText(start, removed_length))
-      edits_delay()
+      pending_edits += new Text_Edit(false, start, buffer.getText(start, removed_length))
     }
   }
 
@@ -257,7 +262,8 @@
       val start = buffer.getLineStartOffset(line)
       val stop = start + line_segment.count
 
-      val snapshot = Document_Model.this.snapshot()
+      // FIXME proper synchronization / thread context (!??)
+      val snapshot = Swing_Thread.now { Document_Model.this.snapshot() }
 
       /* FIXME
       for (text_area <- Isabelle.jedit_text_areas(buffer)
@@ -314,9 +320,7 @@
     buffer.setTokenMarker(token_marker)
     buffer.addBufferListener(buffer_listener)
     buffer.propertiesChanged()
-
-    edits_buffer += new Text_Edit(true, 0, buffer.getText(0, buffer.getLength))
-    edits_delay()
+    pending_edits += new Text_Edit(true, 0, buffer.getText(0, buffer.getLength))
   }
 
   def refresh()
--- a/src/Tools/jEdit/src/jedit/document_view.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/document_view.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -24,7 +24,7 @@
 
 object Document_View
 {
-  def choose_color(snapshot: Change.Snapshot, command: Command): Color =
+  def choose_color(snapshot: Document.Snapshot, command: Command): Color =
   {
     val state = snapshot.document.current_state(command)
     if (snapshot.is_outdated) new Color(240, 240, 240)
@@ -46,7 +46,7 @@
 
   def init(model: Document_Model, text_area: TextArea): Document_View =
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     val doc_view = new Document_View(model, text_area)
     text_area.putClientProperty(key, doc_view)
     doc_view.activate()
@@ -55,7 +55,7 @@
 
   def apply(text_area: TextArea): Option[Document_View] =
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     text_area.getClientProperty(key) match {
       case doc_view: Document_View => Some(doc_view)
       case _ => None
@@ -64,7 +64,7 @@
 
   def exit(text_area: TextArea)
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     apply(text_area) match {
       case None => error("No document view for text area: " + text_area)
       case Some(doc_view) =>
@@ -86,14 +86,14 @@
 
   def extend_styles()
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     styles = Document_Model.Token_Markup.extend_styles(text_area.getPainter.getStyles)
   }
   extend_styles()
 
   def set_styles()
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
     text_area.getPainter.setStyles(styles)
   }
 
@@ -116,9 +116,9 @@
     }
   }
 
-  def full_repaint(snapshot: Change.Snapshot, changed: Set[Command])
+  def full_repaint(snapshot: Document.Snapshot, changed: Set[Command])
   {
-    Swing_Thread.assert()
+    Swing_Thread.require()
 
     val buffer = model.buffer
     var visible_change = false
@@ -148,54 +148,57 @@
       first_line: Int, last_line: Int, physical_lines: Array[Int],
       start: Array[Int], end: Array[Int], y0: Int, line_height: Int)
     {
-      Swing_Thread.now {
-        val snapshot = model.snapshot()
+      Swing_Thread.assert()
+
+      val snapshot = model.snapshot()
+
+      val command_range: Iterable[(Command, Int)] =
+      {
+        val range = snapshot.node.command_range(snapshot.revert(start(0)))
+        if (range.hasNext) {
+          val (cmd0, start0) = range.next
+          new Iterable[(Command, Int)] {
+            def iterator =
+              Document.Node.command_starts(snapshot.node.commands.iterator(cmd0), start0)
+          }
+        }
+        else Iterable.empty
+      }
 
-        val command_range: Iterable[(Command, Int)] =
-        {
-          val range = snapshot.node.command_range(snapshot.revert(start(0)))
-          if (range.hasNext) {
-            val (cmd0, start0) = range.next
-            new Iterable[(Command, Int)] {
-              def iterator = Document.command_starts(snapshot.node.commands.iterator(cmd0), start0)
+      val saved_color = gfx.getColor
+      try {
+        var y = y0
+        for (i <- 0 until physical_lines.length) {
+          if (physical_lines(i) != -1) {
+            val line_start = start(i)
+            val line_end = model.visible_line_end(line_start, end(i))
+
+            val a = snapshot.revert(line_start)
+            val b = snapshot.revert(line_end)
+            val cmds = command_range.iterator.
+              dropWhile { case (cmd, c) => c + cmd.length <= a } .
+              takeWhile { case (_, c) => c < b }
+
+            for ((command, command_start) <- cmds if !command.is_ignored) {
+              val p =
+                text_area.offsetToXY(line_start max snapshot.convert(command_start))
+              val q =
+                text_area.offsetToXY(line_end min snapshot.convert(command_start + command.length))
+              assert(p.y == q.y)
+              gfx.setColor(Document_View.choose_color(snapshot, command))
+              gfx.fillRect(p.x, y, q.x - p.x, line_height)
             }
           }
-          else Iterable.empty
+          y += line_height
         }
-
-        val saved_color = gfx.getColor
-        try {
-          var y = y0
-          for (i <- 0 until physical_lines.length) {
-            if (physical_lines(i) != -1) {
-              val line_start = start(i)
-              val line_end = model.visible_line_end(line_start, end(i))
-
-              val a = snapshot.revert(line_start)
-              val b = snapshot.revert(line_end)
-              val cmds = command_range.iterator.
-                dropWhile { case (cmd, c) => c + cmd.length <= a } .
-                takeWhile { case (_, c) => c < b }
-
-              for ((command, command_start) <- cmds if !command.is_ignored) {
-                val p =
-                  text_area.offsetToXY(line_start max snapshot.convert(command_start))
-                val q =
-                  text_area.offsetToXY(line_end min snapshot.convert(command_start + command.length))
-                assert(p.y == q.y)
-                gfx.setColor(Document_View.choose_color(snapshot, command))
-                gfx.fillRect(p.x, y, q.x - p.x, line_height)
-              }
-            }
-            y += line_height
-          }
-        }
-        finally { gfx.setColor(saved_color) }
       }
+      finally { gfx.setColor(saved_color) }
     }
 
     override def getToolTipText(x: Int, y: Int): String =
     {
+      Swing_Thread.assert()
+
       val snapshot = model.snapshot()
       val offset = snapshot.revert(text_area.xyToOffset(x, y))
       snapshot.node.command_at(offset) match {
@@ -213,7 +216,10 @@
   /* caret handling */
 
   def selected_command(): Option[Command] =
+  {
+    Swing_Thread.require()
     model.snapshot().node.proper_command_at(text_area.getCaretPosition)
+  }
 
   private val caret_listener = new CaretListener {
     private val delay = Swing_Thread.delay_last(session.input_delay) {
--- a/src/Tools/jEdit/src/jedit/html_panel.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/html_panel.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -158,7 +158,8 @@
       val html_body =
         current_body.flatMap(div =>
           Pretty.formatted(List(div), current_margin, Pretty.font_metric(current_font_metrics))
-            .map(t => XML.Elem(HTML.PRE, List((Markup.CLASS, Markup.MESSAGE)), HTML.spans(t))))
+            .map(t =>
+              XML.Elem(Markup(HTML.PRE, List((Markup.CLASS, Markup.MESSAGE))), HTML.spans(t))))
       val doc =
         builder.parse(
           new InputSourceImpl(
--- a/src/Tools/jEdit/src/jedit/isabelle_hyperlinks.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/isabelle_hyperlinks.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -40,6 +40,7 @@
 {
   def getHyperlink(buffer: Buffer, original_offset: Int): Hyperlink =
   {
+    Swing_Thread.assert()
     Document_Model(buffer) match {
       case Some(model) =>
         val snapshot = model.snapshot()
--- a/src/Tools/jEdit/src/jedit/isabelle_sidekick.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/isabelle_sidekick.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -95,7 +95,7 @@
     import Isabelle_Sidekick.int_to_pos
 
     val root = data.root
-    val doc = model.snapshot().node  // FIXME cover all nodes (!??)
+    val doc = Swing_Thread.now { model.snapshot().node }  // FIXME cover all nodes (!??)
     for {
       (command, command_start) <- doc.command_range(0)
       if command.is_command && !stopped
@@ -128,7 +128,7 @@
     import Isabelle_Sidekick.int_to_pos
 
     val root = data.root
-    val snapshot = model.snapshot()  // FIXME cover all nodes (!??)
+    val snapshot = Swing_Thread.now { model.snapshot() }  // FIXME cover all nodes (!??)
     for ((command, command_start) <- snapshot.node.command_range(0) if !stopped) {
       root.add(snapshot.document.current_state(command).markup_root.swing_tree((node: Markup_Node) =>
           {
--- a/src/Tools/jEdit/src/jedit/output_dockable.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/output_dockable.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -22,7 +22,7 @@
 
 class Output_Dockable(view: View, position: String) extends Dockable(view, position)
 {
-  Swing_Thread.assert()
+  Swing_Thread.require()
 
   val html_panel =
     new HTML_Panel(Isabelle.system, Isabelle.font_family(), scala.math.round(Isabelle.font_size()))
@@ -68,8 +68,8 @@
               val snapshot = doc_view.model.snapshot()
               val filtered_results =
                 snapshot.document.current_state(cmd).results filter {
-                  case XML.Elem(Markup.TRACING, _, _) => show_tracing
-                  case XML.Elem(Markup.DEBUG, _, _) => show_debug
+                  case XML.Elem(Markup(Markup.TRACING, _), _) => show_tracing
+                  case XML.Elem(Markup(Markup.DEBUG, _), _) => show_debug
                   case _ => true
                 }
               html_panel.render(filtered_results)
--- a/src/Tools/jEdit/src/jedit/plugin.scala	Mon Aug 09 14:47:28 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/plugin.scala	Mon Aug 09 15:40:06 2010 +0200
@@ -1,6 +1,4 @@
 /*  Title:      Tools/jEdit/src/jedit/plugin.scala
-    Author:     Johannes Hölzl, TU Munich
-    Author:     Fabian Immler, TU Munich
     Author:     Makarius
 
 Main Isabelle/jEdit plugin setup.
@@ -32,11 +30,6 @@
   var session: Session = null
 
 
-  /* name */
-
-  val NAME = "Isabelle"
-
-
   /* properties */
 
   val OPTION_PREFIX = "options.isabelle."
@@ -110,7 +103,7 @@
   }
 
 
-  /* main jEdit components */  // FIXME ownership!?
+  /* main jEdit components */
 
   def jedit_buffers(): Iterator[Buffer] = jEdit.getBuffers().iterator
 
@@ -149,12 +142,12 @@
     }
 
 
-  /* manage prover */
+  /* manage prover */  // FIXME async!?
 
   private def prover_started(view: View): Boolean =
   {
     val timeout = Int_Property("startup-timeout") max 1000
-    session.start(timeout, Isabelle.isabelle_args()) match {
+    session.started(timeout, Isabelle.isabelle_args()) match {
       case Some(err) =>
         val text = new JTextArea(err); text.setEditable(false)
         Library.error_dialog(view, null, "Failed to start Isabelle process", text)
@@ -169,7 +162,10 @@
   def activate_buffer(view: View, buffer: Buffer)
   {
     if (prover_started(view)) {
-      val model = Document_Model.init(session, buffer)
+      // FIXME proper error handling
+      val (_, thy_name) = Thy_Header.split_thy_path(Isabelle.system.posix_path(buffer.getPath))
+
+      val model = Document_Model.init(session, buffer, thy_name)
       for (text_area <- jedit_text_areas(buffer))
         Document_View.init(model, text_area)
     }