merged
authorpaulson
Wed, 30 May 2018 23:11:28 +0100
changeset 68327 7d946d8bc058
parent 68325 57e4bd1e2e18 (diff)
parent 68326 3c71695ff7ce (current diff)
child 68329 9946707cf329
child 68339 5958e8342cfd
merged
--- a/src/Doc/System/Server.thy	Wed May 30 23:11:12 2018 +0100
+++ b/src/Doc/System/Server.thy	Wed May 30 23:11:28 2018 +0100
@@ -516,13 +516,14 @@
   session-qualified theory name (e.g.\ \<^verbatim>\<open>HOL-ex.Seq\<close>).
 
   \<^item> \<^bold>\<open>type\<close> \<open>node_status = {ok: bool, total: int, unprocessed: int, running:
-  int, warned: int, failed: int, finished: int, consolidated: bool}\<close>
-  represents a formal theory node status of the PIDE document model. Fields
-  \<open>total\<close>, \<open>unprocessed\<close>, \<open>running\<close>, \<open>warned\<close>, \<open>failed\<close>, \<open>finished\<close> account
-  for individual commands within a theory node; \<open>ok\<close> is an abstraction for
-  \<open>failed = 0\<close>. The \<open>consolidated\<close> flag indicates whether the outermost theory
-  command structure has finished (or failed) and the final \<^theory_text>\<open>end\<close> command has
-  been checked.
+  int, warned: int, failed: int, finished: int, initialized: bool,
+  consolidated: bool}\<close> represents a formal theory node status of the PIDE
+  document model. Fields \<open>total\<close>, \<open>unprocessed\<close>, \<open>running\<close>, \<open>warned\<close>,
+  \<open>failed\<close>, \<open>finished\<close> account for individual commands within a theory node;
+  \<open>ok\<close> is an abstraction for \<open>failed = 0\<close>. The \<open>initialized\<close> flag indicates
+  that the initial \<^theory_text>\<open>theory\<close> command has been processed. The \<open>consolidated\<close>
+  flag indicates whether the outermost theory command structure has finished
+  (or failed) and the final \<^theory_text>\<open>end\<close> command has been checked.
 \<close>
 
 
--- a/src/Doc/System/Sessions.thy	Wed May 30 23:11:12 2018 +0100
+++ b/src/Doc/System/Sessions.thy	Wed May 30 23:11:28 2018 +0100
@@ -557,7 +557,7 @@
 \<open>Usage: isabelle export [OPTIONS] SESSION
 
   Options are:
-    -D DIR       target directory for exported files (default: "export")
+    -O DIR       output directory for exported files (default: "export")
     -d DIR       include session directory
     -l           list exports
     -n           no build of session
@@ -590,7 +590,7 @@
   \<^emph>\<open>all\<close> theory exports. Multiple options \<^verbatim>\<open>-x\<close> refer to the union of all
   specified patterns.
 
-  Option \<^verbatim>\<open>-D\<close> specifies an alternative base directory for option \<^verbatim>\<open>-x\<close>: the
+  Option \<^verbatim>\<open>-O\<close> specifies an alternative output directory for option \<^verbatim>\<open>-x\<close>: the
   default is \<^verbatim>\<open>export\<close> within the current directory. Each theory creates its
   own sub-directory hierarchy, using the session-qualified theory name.
 \<close>
--- a/src/HOL/Data_Structures/AVL_Set.thy	Wed May 30 23:11:12 2018 +0100
+++ b/src/HOL/Data_Structures/AVL_Set.thy	Wed May 30 23:11:28 2018 +0100
@@ -455,80 +455,93 @@
 
 subsection \<open>Height-Size Relation\<close>
 
-text \<open>By Daniel St\"uwe\<close>
-
-fun fib_tree :: "nat \<Rightarrow> unit avl_tree" where
-"fib_tree 0 = Leaf" |
-"fib_tree (Suc 0) = Node 1 Leaf () Leaf" |
-"fib_tree (Suc(Suc n)) = Node (Suc(Suc(n))) (fib_tree (Suc n)) () (fib_tree n)"
-
-lemma [simp]: "ht (fib_tree h) = h"
-by (induction h rule: "fib_tree.induct") auto
+text \<open>By Daniel St\"uwe, Manuel Eberl and Peter Lammich.\<close>
 
-lemma [simp]: "height (fib_tree h) = h"
-by (induction h rule: "fib_tree.induct") auto
-
-lemma "avl(fib_tree h)"          
-by (induction h rule: "fib_tree.induct") auto
-
-lemma fib_tree_size1: "size1 (fib_tree h) = fib (h+2)"
-by (induction h rule: fib_tree.induct) auto
-
-lemma height_invers[simp]: 
+lemma height_invers: 
   "(height t = 0) = (t = Leaf)"
   "avl t \<Longrightarrow> (height t = Suc h) = (\<exists> l a r . t = Node (Suc h) l a r)"
 by (induction t) auto
 
-lemma fib_Suc_lt: "fib n \<le> fib (Suc n)"
-by (induction n rule: fib.induct) auto
+text \<open>Any AVL tree of height \<open>h\<close> has at least \<open>fib (h+2)\<close> leaves:\<close>
 
-lemma fib_mono: "n \<le> m \<Longrightarrow> fib n \<le> fib m"
-proof (induction n arbitrary: m rule: fib.induct )
-  case (2 m)
-  thus ?case using fib_neq_0_nat[of m] by auto
+lemma avl_fib_bound: "avl t \<Longrightarrow> height t = h \<Longrightarrow> fib (h+2) \<le> size1 t"
+proof (induction h arbitrary: t rule: fib.induct)
+  case 1 thus ?case by (simp add: height_invers)
 next
-  case (3 n m)
-  from 3 obtain m' where "m = Suc (Suc m')"
-    by (metis le_Suc_ex plus_nat.simps(2)) 
-  thus ?case using 3(1)[of "Suc m'"] 3(2)[of m'] 3(3) by auto
-qed simp
-
-lemma size1_fib_tree_mono:
-  assumes "n \<le> m"
-  shows   "size1 (fib_tree n) \<le> size1 (fib_tree m)"
-using fib_tree_size1 fib_mono[OF assms] fib_mono[of "Suc n"] add_le_mono assms by fastforce 
-
-lemma fib_tree_minimal: "avl t \<Longrightarrow> size1 (fib_tree (ht t)) \<le> size1 t"
-proof (induction "ht t" arbitrary: t rule: fib_tree.induct)
-  case (2 t)
-  from 2 obtain l a r where "t = Node (Suc 0) l a r" by (cases t) auto
-  with 2 show ?case by auto
+  case 2 thus ?case by (cases t) (auto simp: height_invers)
 next
-  case (3 h t)
-  note [simp] = 3(3)[symmetric] 
-  from 3 obtain l a r where [simp]: "t = Node (Suc (Suc h)) l a r" by (cases t) auto
-  show ?case proof (cases rule: linorder_cases[of "ht l" "ht r"]) 
-    case equal
-    with 3(3,4) have ht: "ht l = Suc h" "ht r = Suc h" by auto
-    with 3 have "size1 (fib_tree (ht l)) \<le> size1 l" by auto moreover
-    from 3(1)[of r] 3(3,4) ht(2) have "size1 (fib_tree (ht r)) \<le> size1 r" by auto ultimately
-    show ?thesis using ht size1_fib_tree_mono[of h "Suc h"] by auto
-  next
-    case greater
-    with 3(3,4) have ht: "ht l = Suc h"  "ht r = h" by auto
-    from ht 3(1,2,4) have "size1 (fib_tree (Suc h)) \<le> size1 l" by auto moreover
-    from ht 3(1,2,4) have "size1 (fib_tree h) \<le> size1 r" by auto ultimately
-    show ?thesis by auto
-  next
-    case less (* analogously *)
-    with 3 have ht: "ht l = h"  "Suc h = ht r" by auto
-    from ht 3 have "size1 (fib_tree h) \<le> size1 l" by auto moreover
-    from ht 3 have "size1 (fib_tree (Suc h)) \<le> size1 r" by auto ultimately
-    show ?thesis by auto
-  qed
-qed auto
+  case (3 h)
+  from "3.prems" obtain l a r where
+    [simp]: "t = Node (Suc(Suc h)) l a r" "avl l" "avl r"
+    and C: "
+      height r = Suc h \<and> height l = Suc h
+    \<or> height r = Suc h \<and> height l = h
+    \<or> height r = h \<and> height l = Suc h" (is "?C1 \<or> ?C2 \<or> ?C3")
+    by (cases t) (simp, fastforce)
+  {
+    assume ?C1
+    with "3.IH"(1)
+    have "fib (h + 3) \<le> size1 l" "fib (h + 3) \<le> size1 r"
+      by (simp_all add: eval_nat_numeral)
+    hence ?case by (auto simp: eval_nat_numeral)
+  } moreover {
+    assume ?C2
+    hence ?case using "3.IH"(1)[of r] "3.IH"(2)[of l] by auto
+  } moreover {
+    assume ?C3
+    hence ?case using "3.IH"(1)[of l] "3.IH"(2)[of r] by auto
+  } ultimately show ?case using C by blast
+qed
+
+lemma fib_alt_induct [consumes 1, case_names 1 2 rec]:
+  assumes "n > 0" "P 1" "P 2" "\<And>n. n > 0 \<Longrightarrow> P n \<Longrightarrow> P (Suc n) \<Longrightarrow> P (Suc (Suc n))"
+  shows   "P n"
+  using assms(1)
+proof (induction n rule: fib.induct)
+  case (3 n)
+  thus ?case using assms by (cases n) (auto simp: eval_nat_numeral)
+qed (insert assms, auto)
+
+text \<open>An exponential lower bound for @{const fib}:\<close>
 
-theorem avl_size_bound: "avl t \<Longrightarrow> fib(height t + 2) \<le> size1 t" 
-using fib_tree_minimal fib_tree_size1 by fastforce
+lemma fib_lowerbound:
+  defines "\<phi> \<equiv> (1 + sqrt 5) / 2"
+  defines "c \<equiv> 1 / \<phi> ^ 2"
+  assumes "n > 0"
+  shows   "real (fib n) \<ge> c * \<phi> ^ n"
+proof -
+  have "\<phi> > 1" by (simp add: \<phi>_def)
+  hence "c > 0" by (simp add: c_def)
+  from \<open>n > 0\<close> show ?thesis
+  proof (induction n rule: fib_alt_induct)
+    case (rec n)
+    have "c * \<phi> ^ Suc (Suc n) = \<phi> ^ 2 * (c * \<phi> ^ n)"
+      by (simp add: field_simps power2_eq_square)
+    also have "\<dots> \<le> (\<phi> + 1) * (c * \<phi> ^ n)"
+      by (rule mult_right_mono) (insert \<open>c > 0\<close>, simp_all add: \<phi>_def power2_eq_square field_simps)
+    also have "\<dots> = c * \<phi> ^ Suc n + c * \<phi> ^ n"
+      by (simp add: field_simps)
+    also have "\<dots> \<le> real (fib (Suc n)) + real (fib n)"
+      by (intro add_mono rec.IH)
+    finally show ?case by simp
+  qed (insert \<open>\<phi> > 1\<close>, simp_all add: c_def power2_eq_square eval_nat_numeral)
+qed
+
+text \<open>The size of an AVL tree is (at least) exponential in its height:\<close>
+
+lemma avl_lowerbound:
+  defines "\<phi> \<equiv> (1 + sqrt 5) / 2"
+  assumes "avl t"
+  shows   "real (size1 t) \<ge> \<phi> ^ (height t)"
+proof -
+  have "\<phi> > 0" by(simp add: \<phi>_def add_pos_nonneg)
+  hence "\<phi> ^ height t = (1 / \<phi> ^ 2) * \<phi> ^ (height t + 2)"
+    by(simp add: field_simps power2_eq_square)
+  also have "\<dots> \<le> real (fib (height t + 2))"
+    using fib_lowerbound[of "height t + 2"] by(simp add: \<phi>_def)
+  also have "\<dots> \<le> real (size1 t)"
+    using avl_fib_bound[of t "height t"] assms by simp
+  finally show ?thesis .
+qed
 
 end
--- a/src/HOL/Library/Fun_Lexorder.thy	Wed May 30 23:11:12 2018 +0100
+++ b/src/HOL/Library/Fun_Lexorder.thy	Wed May 30 23:11:28 2018 +0100
@@ -1,6 +1,6 @@
 (* Author: Florian Haftmann, TU Muenchen *)
 
-section \<open>Lexical order on functions\<close>
+section \<open>Lexicographic order on functions\<close>
 
 theory Fun_Lexorder
 imports Main
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/List_Lexorder.thy	Wed May 30 23:11:28 2018 +0100
@@ -0,0 +1,121 @@
+(*  Title:      HOL/Library/List_Lexorder.thy
+    Author:     Norbert Voelker
+*)
+
+section \<open>Lexicographic order on lists\<close>
+
+theory List_Lexorder
+imports Main
+begin
+
+instantiation list :: (ord) ord
+begin
+
+definition
+  list_less_def: "xs < ys \<longleftrightarrow> (xs, ys) \<in> lexord {(u, v). u < v}"
+
+definition
+  list_le_def: "(xs :: _ list) \<le> ys \<longleftrightarrow> xs < ys \<or> xs = ys"
+
+instance ..
+
+end
+
+instance list :: (order) order
+proof
+  fix xs :: "'a list"
+  show "xs \<le> xs" by (simp add: list_le_def)
+next
+  fix xs ys zs :: "'a list"
+  assume "xs \<le> ys" and "ys \<le> zs"
+  then show "xs \<le> zs"
+    apply (auto simp add: list_le_def list_less_def)
+    apply (rule lexord_trans)
+    apply (auto intro: transI)
+    done
+next
+  fix xs ys :: "'a list"
+  assume "xs \<le> ys" and "ys \<le> xs"
+  then show "xs = ys"
+    apply (auto simp add: list_le_def list_less_def)
+    apply (rule lexord_irreflexive [THEN notE])
+    defer
+    apply (rule lexord_trans)
+    apply (auto intro: transI)
+    done
+next
+  fix xs ys :: "'a list"
+  show "xs < ys \<longleftrightarrow> xs \<le> ys \<and> \<not> ys \<le> xs"
+    apply (auto simp add: list_less_def list_le_def)
+    defer
+    apply (rule lexord_irreflexive [THEN notE])
+    apply auto
+    apply (rule lexord_irreflexive [THEN notE])
+    defer
+    apply (rule lexord_trans)
+    apply (auto intro: transI)
+    done
+qed
+
+instance list :: (linorder) linorder
+proof
+  fix xs ys :: "'a list"
+  have "(xs, ys) \<in> lexord {(u, v). u < v} \<or> xs = ys \<or> (ys, xs) \<in> lexord {(u, v). u < v}"
+    by (rule lexord_linear) auto
+  then show "xs \<le> ys \<or> ys \<le> xs"
+    by (auto simp add: list_le_def list_less_def)
+qed
+
+instantiation list :: (linorder) distrib_lattice
+begin
+
+definition "(inf :: 'a list \<Rightarrow> _) = min"
+
+definition "(sup :: 'a list \<Rightarrow> _) = max"
+
+instance
+  by standard (auto simp add: inf_list_def sup_list_def max_min_distrib2)
+
+end
+
+lemma not_less_Nil [simp]: "\<not> x < []"
+  by (simp add: list_less_def)
+
+lemma Nil_less_Cons [simp]: "[] < a # x"
+  by (simp add: list_less_def)
+
+lemma Cons_less_Cons [simp]: "a # x < b # y \<longleftrightarrow> a < b \<or> a = b \<and> x < y"
+  by (simp add: list_less_def)
+
+lemma le_Nil [simp]: "x \<le> [] \<longleftrightarrow> x = []"
+  unfolding list_le_def by (cases x) auto
+
+lemma Nil_le_Cons [simp]: "[] \<le> x"
+  unfolding list_le_def by (cases x) auto
+
+lemma Cons_le_Cons [simp]: "a # x \<le> b # y \<longleftrightarrow> a < b \<or> a = b \<and> x \<le> y"
+  unfolding list_le_def by auto
+
+instantiation list :: (order) order_bot
+begin
+
+definition "bot = []"
+
+instance
+  by standard (simp add: bot_list_def)
+
+end
+
+lemma less_list_code [code]:
+  "xs < ([]::'a::{equal, order} list) \<longleftrightarrow> False"
+  "[] < (x::'a::{equal, order}) # xs \<longleftrightarrow> True"
+  "(x::'a::{equal, order}) # xs < y # ys \<longleftrightarrow> x < y \<or> x = y \<and> xs < ys"
+  by simp_all
+
+lemma less_eq_list_code [code]:
+  "x # xs \<le> ([]::'a::{equal, order} list) \<longleftrightarrow> False"
+  "[] \<le> (xs::'a::{equal, order} list) \<longleftrightarrow> True"
+  "(x::'a::{equal, order}) # xs \<le> y # ys \<longleftrightarrow> x < y \<or> x = y \<and> xs \<le> ys"
+  by simp_all
+
+end
--- a/src/HOL/Library/List_lexord.thy	Wed May 30 23:11:12 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,121 +0,0 @@
-(*  Title:      HOL/Library/List_lexord.thy
-    Author:     Norbert Voelker
-*)
-
-section \<open>Lexicographic order on lists\<close>
-
-theory List_lexord
-imports Main
-begin
-
-instantiation list :: (ord) ord
-begin
-
-definition
-  list_less_def: "xs < ys \<longleftrightarrow> (xs, ys) \<in> lexord {(u, v). u < v}"
-
-definition
-  list_le_def: "(xs :: _ list) \<le> ys \<longleftrightarrow> xs < ys \<or> xs = ys"
-
-instance ..
-
-end
-
-instance list :: (order) order
-proof
-  fix xs :: "'a list"
-  show "xs \<le> xs" by (simp add: list_le_def)
-next
-  fix xs ys zs :: "'a list"
-  assume "xs \<le> ys" and "ys \<le> zs"
-  then show "xs \<le> zs"
-    apply (auto simp add: list_le_def list_less_def)
-    apply (rule lexord_trans)
-    apply (auto intro: transI)
-    done
-next
-  fix xs ys :: "'a list"
-  assume "xs \<le> ys" and "ys \<le> xs"
-  then show "xs = ys"
-    apply (auto simp add: list_le_def list_less_def)
-    apply (rule lexord_irreflexive [THEN notE])
-    defer
-    apply (rule lexord_trans)
-    apply (auto intro: transI)
-    done
-next
-  fix xs ys :: "'a list"
-  show "xs < ys \<longleftrightarrow> xs \<le> ys \<and> \<not> ys \<le> xs"
-    apply (auto simp add: list_less_def list_le_def)
-    defer
-    apply (rule lexord_irreflexive [THEN notE])
-    apply auto
-    apply (rule lexord_irreflexive [THEN notE])
-    defer
-    apply (rule lexord_trans)
-    apply (auto intro: transI)
-    done
-qed
-
-instance list :: (linorder) linorder
-proof
-  fix xs ys :: "'a list"
-  have "(xs, ys) \<in> lexord {(u, v). u < v} \<or> xs = ys \<or> (ys, xs) \<in> lexord {(u, v). u < v}"
-    by (rule lexord_linear) auto
-  then show "xs \<le> ys \<or> ys \<le> xs"
-    by (auto simp add: list_le_def list_less_def)
-qed
-
-instantiation list :: (linorder) distrib_lattice
-begin
-
-definition "(inf :: 'a list \<Rightarrow> _) = min"
-
-definition "(sup :: 'a list \<Rightarrow> _) = max"
-
-instance
-  by standard (auto simp add: inf_list_def sup_list_def max_min_distrib2)
-
-end
-
-lemma not_less_Nil [simp]: "\<not> x < []"
-  by (simp add: list_less_def)
-
-lemma Nil_less_Cons [simp]: "[] < a # x"
-  by (simp add: list_less_def)
-
-lemma Cons_less_Cons [simp]: "a # x < b # y \<longleftrightarrow> a < b \<or> a = b \<and> x < y"
-  by (simp add: list_less_def)
-
-lemma le_Nil [simp]: "x \<le> [] \<longleftrightarrow> x = []"
-  unfolding list_le_def by (cases x) auto
-
-lemma Nil_le_Cons [simp]: "[] \<le> x"
-  unfolding list_le_def by (cases x) auto
-
-lemma Cons_le_Cons [simp]: "a # x \<le> b # y \<longleftrightarrow> a < b \<or> a = b \<and> x \<le> y"
-  unfolding list_le_def by auto
-
-instantiation list :: (order) order_bot
-begin
-
-definition "bot = []"
-
-instance
-  by standard (simp add: bot_list_def)
-
-end
-
-lemma less_list_code [code]:
-  "xs < ([]::'a::{equal, order} list) \<longleftrightarrow> False"
-  "[] < (x::'a::{equal, order}) # xs \<longleftrightarrow> True"
-  "(x::'a::{equal, order}) # xs < y # ys \<longleftrightarrow> x < y \<or> x = y \<and> xs < ys"
-  by simp_all
-
-lemma less_eq_list_code [code]:
-  "x # xs \<le> ([]::'a::{equal, order} list) \<longleftrightarrow> False"
-  "[] \<le> (xs::'a::{equal, order} list) \<longleftrightarrow> True"
-  "(x::'a::{equal, order}) # xs \<le> y # ys \<longleftrightarrow> x < y \<or> x = y \<and> xs \<le> ys"
-  by simp_all
-
-end
--- a/src/HOL/List.thy	Wed May 30 23:11:12 2018 +0100
+++ b/src/HOL/List.thy	Wed May 30 23:11:28 2018 +0100
@@ -429,7 +429,6 @@
   (*"_lc_let" :: "letbinds => lc_qual"  ("let _")*)
   "_lc_end" :: "lc_quals" ("]")
   "_lc_quals" :: "lc_qual \<Rightarrow> lc_quals \<Rightarrow> lc_quals"  (", __")
-  "_lc_abs" :: "'a => 'b list => 'b list"
 
 syntax (ASCII)
   "_lc_gen" :: "'a \<Rightarrow> 'a list \<Rightarrow> lc_qual"  ("_ <- _")
--- a/src/HOL/ROOT	Wed May 30 23:11:12 2018 +0100
+++ b/src/HOL/ROOT	Wed May 30 23:11:28 2018 +0100
@@ -29,7 +29,7 @@
     Library
     (*conflicting type class instantiations and dependent applications*)
     Finite_Lattice
-    List_lexord
+    List_Lexorder
     Prefix_Order
     Product_Lexorder
     Product_Order
--- a/src/HOL/ex/Radix_Sort.thy	Wed May 30 23:11:12 2018 +0100
+++ b/src/HOL/ex/Radix_Sort.thy	Wed May 30 23:11:28 2018 +0100
@@ -2,7 +2,7 @@
 
 theory Radix_Sort
 imports
-  "HOL-Library.List_lexord" 
+  "HOL-Library.List_Lexorder" 
   "HOL-Library.Sublist" 
   "HOL-Library.Multiset" 
 begin
--- a/src/Pure/General/exn.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/General/exn.scala	Wed May 30 23:11:28 2018 +0100
@@ -81,6 +81,10 @@
     found_interrupt
   }
 
+  def interruptible_capture[A](e: => A): Result[A] =
+    try { Res(e) }
+    catch { case exn: Throwable if !is_interrupt(exn) => Exn[A](exn) }
+
   object Interrupt
   {
     def apply(): Throwable = new InterruptedException
--- a/src/Pure/General/graph.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/General/graph.scala	Wed May 30 23:11:28 2018 +0100
@@ -66,6 +66,7 @@
 
   def is_empty: Boolean = rep.isEmpty
   def defined(x: Key): Boolean = rep.isDefinedAt(x)
+  def domain: Set[Key] = rep.keySet
 
   def iterator: Iterator[(Key, Entry)] = rep.iterator
 
--- a/src/Pure/PIDE/command.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/command.scala	Wed May 30 23:11:28 2018 +0100
@@ -260,6 +260,8 @@
     exports: Exports = Exports.empty,
     markups: Markups = Markups.empty)
   {
+    def initialized: Boolean = status.exists(markup => markup.name == Markup.INITIALIZED)
+
     lazy val consolidated: Boolean =
       status.exists(markup => markup.name == Markup.CONSOLIDATED)
 
@@ -565,6 +567,8 @@
   val is_unparsed: Boolean = span.content.exists(_.is_unparsed)
   val is_unfinished: Boolean = span.content.exists(_.is_unfinished)
 
+  def potentially_initialized: Boolean = span.name == Thy_Header.THEORY
+
 
   /* blobs */
 
--- a/src/Pure/PIDE/document.ML	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/document.ML	Wed May 30 23:11:28 2018 +0100
@@ -633,7 +633,9 @@
     val parents =
       if null parents_reports then [Theory.get_pure ()] else map #1 parents_reports;
     val _ = Position.reports (map #2 parents_reports);
-  in Resources.begin_theory master_dir header parents end;
+    val thy = Resources.begin_theory master_dir header parents;
+    val _ = Output.status (Markup.markup_only Markup.initialized);
+  in thy end;
 
 fun check_root_theory node =
   let
--- a/src/Pure/PIDE/document.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/document.scala	Wed May 30 23:11:28 2018 +0100
@@ -383,6 +383,8 @@
       new Nodes(graph3.map_node(name, _ => node))
     }
 
+    def domain: Set[Node.Name] = graph.domain
+
     def iterator: Iterator[(Node.Name, Node)] =
       graph.iterator.map({ case (name, (node, _)) => (name, node) })
 
@@ -529,7 +531,6 @@
 
     def node_name: Node.Name
     def node: Node
-    def node_consolidated: Boolean
 
     def commands_loading: List[Command]
     def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range]
@@ -920,6 +921,13 @@
       }
     }
 
+    def node_initialized(version: Version, name: Node.Name): Boolean =
+      name.is_theory &&
+      (version.nodes(name).commands.iterator.find(_.potentially_initialized) match {
+        case None => false
+        case Some(command) => command_states(version, command).headOption.exists(_.initialized)
+      })
+
     def node_consolidated(version: Version, name: Node.Name): Boolean =
       !name.is_theory ||
         version.nodes(name).commands.reverse.iterator.
@@ -969,8 +977,6 @@
         val node_name: Node.Name = name
         val node: Node = version.nodes(name)
 
-        def node_consolidated: Boolean = state.node_consolidated(version, node_name)
-
         val commands_loading: List[Command] =
           if (node_name.is_theory) Nil
           else version.nodes.commands_loading(node_name)
--- a/src/Pure/PIDE/markup.ML	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/markup.ML	Wed May 30 23:11:28 2018 +0100
@@ -167,6 +167,7 @@
   val runningN: string val running: T
   val finishedN: string val finished: T
   val failedN: string val failed: T
+  val initializedN: string val initialized: T
   val consolidatedN: string val consolidated: T
   val exec_idN: string
   val initN: string
@@ -580,6 +581,8 @@
 val (runningN, running) = markup_elem "running";
 val (finishedN, finished) = markup_elem "finished";
 val (failedN, failed) = markup_elem "failed";
+
+val (initializedN, initialized) = markup_elem "initialized";
 val (consolidatedN, consolidated) = markup_elem "consolidated";
 
 
--- a/src/Pure/PIDE/markup.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/markup.scala	Wed May 30 23:11:28 2018 +0100
@@ -426,6 +426,8 @@
   val RUNNING = "running"
   val FINISHED = "finished"
   val FAILED = "failed"
+
+  val INITIALIZED = "initialized"
   val CONSOLIDATED = "consolidated"
 
 
--- a/src/Pure/PIDE/protocol.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/protocol.scala	Wed May 30 23:11:28 2018 +0100
@@ -142,7 +142,8 @@
   /* node status */
 
   sealed case class Node_Status(
-    unprocessed: Int, running: Int, warned: Int, failed: Int, finished: Int, consolidated: Boolean)
+    unprocessed: Int, running: Int, warned: Int, failed: Int, finished: Int,
+    initialized: Boolean, consolidated: Boolean)
   {
     def ok: Boolean = failed == 0
     def total: Int = unprocessed + running + warned + failed + finished
@@ -150,15 +151,16 @@
     def json: JSON.Object.T =
       JSON.Object("ok" -> ok, "total" -> total, "unprocessed" -> unprocessed,
         "running" -> running, "warned" -> warned, "failed" -> failed, "finished" -> finished,
-        "consolidated" -> consolidated)
+        "initialized" -> initialized, "consolidated" -> consolidated)
   }
 
   def node_status(
     state: Document.State,
     version: Document.Version,
-    name: Document.Node.Name,
-    node: Document.Node): Node_Status =
+    name: Document.Node.Name): Node_Status =
   {
+    val node = version.nodes(name)
+
     var unprocessed = 0
     var running = 0
     var warned = 0
@@ -174,9 +176,10 @@
       else if (status.is_finished) finished += 1
       else unprocessed += 1
     }
+    val initialized = state.node_initialized(version, name)
     val consolidated = state.node_consolidated(version, name)
 
-    Node_Status(unprocessed, running, warned, failed, finished, consolidated)
+    Node_Status(unprocessed, running, warned, failed, finished, initialized, consolidated)
   }
 
 
--- a/src/Pure/PIDE/resources.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/PIDE/resources.scala	Wed May 30 23:11:28 2018 +0100
@@ -300,7 +300,7 @@
   private def require_thy(
     progress: Progress,
     initiators: List[Document.Node.Name],
-    required: Dependencies,
+    dependencies: Dependencies,
     thy: (Document.Node.Name, Position.T)): Dependencies =
   {
     val (name, pos) = thy
@@ -309,10 +309,10 @@
       "The error(s) above occurred for theory " + quote(name.theory) +
         required_by(initiators) + Position.here(pos)
 
-    if (required.seen(name)) required
+    if (dependencies.seen(name)) dependencies
     else {
-      val required1 = required + name
-      if (session_base.loaded_theory(name)) required1
+      val dependencies1 = dependencies + name
+      if (session_base.loaded_theory(name)) dependencies1
       else {
         try {
           if (initiators.contains(name)) error(cycle_msg(initiators))
@@ -322,11 +322,11 @@
             try { check_thy(name, Token.Pos.file(name.node)).cat_errors(message) }
             catch { case ERROR(msg) => cat_error(msg, message) }
           Document.Node.Entry(name, header) ::
-            require_thys(progress, name :: initiators, required1, header.imports)
+            require_thys(progress, name :: initiators, dependencies1, header.imports)
         }
         catch {
           case e: Throwable =>
-            Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))) :: required1
+            Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))) :: dependencies1
         }
       }
     }
@@ -335,9 +335,9 @@
   private def require_thys(
       progress: Progress,
       initiators: List[Document.Node.Name],
-      required: Dependencies,
+      dependencies: Dependencies,
       thys: List[(Document.Node.Name, Position.T)]): Dependencies =
-    (required /: thys)(require_thy(progress, initiators, _, _))
+    (dependencies /: thys)(require_thy(progress, initiators, _, _))
 
   def dependencies(
       thys: List[(Document.Node.Name, Position.T)],
--- a/src/Pure/Thy/export.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/Thy/export.scala	Wed May 30 23:11:28 2018 +0100
@@ -251,7 +251,7 @@
 Usage: isabelle export [OPTIONS] SESSION
 
   Options are:
-    -D DIR       target directory for exported files (default: """ + default_export_dir + """)
+    -O DIR       output directory for exported files (default: """ + default_export_dir + """)
     -d DIR       include session directory
     -l           list exports
     -n           no build of session
@@ -266,7 +266,7 @@
   (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc],
   and variants {pattern1,pattern2,pattern3}.
 """,
-      "D:" -> (arg => export_dir = Path.explode(arg)),
+      "O:" -> (arg => export_dir = Path.explode(arg)),
       "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
       "l" -> (_ => export_list = true),
       "n" -> (_ => no_build = true),
--- a/src/Pure/Thy/sessions.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/Thy/sessions.scala	Wed May 30 23:11:28 2018 +0100
@@ -146,6 +146,7 @@
     doc_names: List[String] = Nil,
     global_theories: Map[String, String] = Map.empty,
     loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
+    used_theories: List[Document.Node.Name] = Nil,
     known: Known = Known.empty,
     overall_syntax: Outer_Syntax = Outer_Syntax.empty,
     imported_sources: List[(Path, SHA1.Digest)] = Nil,
@@ -349,6 +350,7 @@
                 doc_names = doc_names,
                 global_theories = global_theories,
                 loaded_theories = dependencies.loaded_theories,
+                used_theories = dependencies.theories,
                 known = known,
                 overall_syntax = overall_syntax,
                 imported_sources = check_sources(imported_files),
--- a/src/Pure/Thy/thy_resources.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/Thy/thy_resources.scala	Wed May 30 23:11:28 2018 +0100
@@ -60,6 +60,7 @@
     val version: Document.Version,
     val nodes: List[(Document.Node.Name, Protocol.Node_Status)])
   {
+    def node_names: List[Document.Node.Name] = nodes.map(_._1)
     def ok: Boolean = nodes.forall({ case (_, st) => st.ok })
 
     def messages(node_name: Document.Node.Name): List[(XML.Tree, Position.T)] =
@@ -73,6 +74,13 @@
        } yield (tree, pos)).toList
     }
 
+    def markup_to_XML(node_name: Document.Node.Name,
+      range: Text.Range = Text.Range.full,
+      elements: Markup.Elements = Markup.Elements.full): XML.Body =
+    {
+      state.markup_to_XML(version, node_name, range, elements)
+    }
+
     def exports(node_name: Document.Node.Name): List[Export.Entry] =
     {
       val node = version.nodes(node_name)
@@ -125,7 +133,7 @@
           case Some(version) if dep_theories.forall(state.node_consolidated(version, _)) =>
             val nodes =
               for (name <- dep_theories)
-              yield (name -> Protocol.node_status(state, version, name, version.nodes(name)))
+              yield (name -> Protocol.node_status(state, version, name))
             try { result.fulfill(Theories_Result(state, version, nodes)) }
             catch { case _: IllegalStateException => }
           case _ =>
--- a/src/Pure/Tools/dump.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Pure/Tools/dump.scala	Wed May 30 23:11:28 2018 +0100
@@ -9,12 +9,67 @@
 
 object Dump
 {
+  /* aspects */
+
+  sealed case class Aspect_Args(
+    options: Options, progress: Progress, output_dir: Path, result: Thy_Resources.Theories_Result)
+  {
+    def write(node_name: Document.Node.Name, file_name: String, bytes: Bytes)
+    {
+      val path = output_dir + Path.basic(node_name.theory) + Path.basic(file_name)
+      Isabelle_System.mkdirs(path.dir)
+      Bytes.write(path, bytes)
+    }
+
+    def write(node_name: Document.Node.Name, file_name: String, text: String)
+    {
+      write(node_name, file_name, Bytes(text))
+    }
+  }
+
+  sealed case class Aspect(name: String, description: String, operation: Aspect_Args => Unit)
+
+  private val known_aspects =
+    List(
+      Aspect("list", "list theory nodes",
+        { case args =>
+            for (node_name <- args.result.node_names) args.progress.echo(node_name.toString)
+        }),
+      Aspect("messages", "output messages (YXML format)",
+        { case args =>
+            for (node_name <- args.result.node_names) {
+              args.write(node_name, "messages.yxml",
+                YXML.string_of_body(args.result.messages(node_name).iterator.map(_._1).toList))
+            }
+        }),
+      Aspect("markup", "PIDE markup (YXML format)",
+        { case args =>
+            for (node_name <- args.result.node_names) {
+              args.write(node_name, "markup.yxml",
+                YXML.string_of_body(args.result.markup_to_XML(node_name)))
+            }
+        })
+    )
+
+  def show_aspects: String =
+    cat_lines(known_aspects.sortBy(_.name).map(aspect => aspect.name + " - " + aspect.description))
+
+  def the_aspect(name: String): Aspect =
+    known_aspects.find(aspect => aspect.name == name) getOrElse
+      error("Unknown aspect " + quote(name))
+
+
+  /* dump */
+
+  val default_output_dir = Path.explode("dump")
+
   def dump(options: Options, logic: String,
-    consume: Thy_Resources.Theories_Result => Unit = _ => (),
+    aspects: List[Aspect] = Nil,
     progress: Progress = No_Progress,
     log: Logger = No_Logger,
     dirs: List[Path] = Nil,
     select_dirs: List[Path] = Nil,
+    output_dir: Path = default_output_dir,
     verbose: Boolean = false,
     system_mode: Boolean = false,
     selection: Sessions.Selection = Sessions.Selection.empty): Process_Result =
@@ -24,22 +79,37 @@
 
     val dump_options = options.int.update("completion_limit", 0).bool.update("ML_statistics", false)
 
+
+    /* dependencies */
+
     val deps =
       Sessions.load_structure(dump_options, dirs = dirs, select_dirs = select_dirs).
         selection_deps(selection)
 
+    val include_sessions =
+      deps.sessions_structure.imports_topological_order
+
+    val use_theories =
+      deps.sessions_structure.build_topological_order.
+        flatMap(session_name => deps.session_bases(session_name).used_theories.map(_.theory))
+
+
+    /* session */
+
     val session =
       Thy_Resources.start_session(dump_options, logic, session_dirs = dirs,
-        include_sessions = deps.sessions_structure.imports_topological_order,
-        progress = progress, log = log)
+        include_sessions = include_sessions, progress = progress, log = log)
+
+    val theories_result = session.use_theories(use_theories, progress = progress)
+    val session_result = session.stop()
+
 
-    val theories = deps.all_known.theory_graph.topological_order.map(_.theory)
-    val theories_result = session.use_theories(theories, progress = progress)
+    /* dump aspects */
 
-    try { consume(theories_result) }
-    catch { case exn: Throwable => session.stop (); throw exn }
+    val aspect_args = Aspect_Args(dump_options, progress, output_dir, theories_result)
+    aspects.foreach(_.operation(aspect_args))
 
-    session.stop()
+    session_result
   }
 
 
@@ -48,8 +118,10 @@
   val isabelle_tool =
     Isabelle_Tool("dump", "dump build database produced by PIDE session.", args =>
     {
+      var aspects: List[Aspect] = Nil
       var base_sessions: List[String] = Nil
       var select_dirs: List[Path] = Nil
+      var output_dir = default_output_dir
       var requirements = false
       var exclude_session_groups: List[String] = Nil
       var all_sessions = false
@@ -65,8 +137,10 @@
 Usage: isabelle dump [OPTIONS] [SESSIONS ...]
 
   Options are:
+    -A NAMES     dump named aspects (comma-separated list, see below)
     -B NAME      include session NAME and all descendants
     -D DIR       include session directory and select its sessions
+    -O DIR       output directory for dumped files (default: """ + default_output_dir + """)
     -R           operate on requirements of selected sessions
     -X NAME      exclude sessions from group NAME and all descendants
     -a           select all sessions
@@ -78,9 +152,14 @@
     -v           verbose
     -x NAME      exclude session NAME and all descendants
 
-  Dump build database (PIDE markup etc.) based on dynamic session.""",
+  Dump build database produced by PIDE session. The following dump aspects
+  are known (option -A):
+
+""" + Library.prefix_lines("    ", show_aspects) + "\n",
+      "A:" -> (arg => aspects = Library.distinct(space_explode(',', arg)).map(the_aspect(_))),
       "B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
       "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
+      "O:" -> (arg => output_dir = Path.explode(arg)),
       "R" -> (_ => requirements = true),
       "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
       "a" -> (_ => all_sessions = true),
@@ -96,18 +175,13 @@
 
       val progress = new Console_Progress(verbose = verbose)
 
-      def consume(theories_result: Thy_Resources.Theories_Result)
-      {
-        // FIXME
-        for ((node, _) <- theories_result.nodes) progress.echo(node.toString)
-      }
-
       val result =
         dump(options, logic,
-          consume = consume _,
+          aspects = aspects,
           progress = progress,
           dirs = dirs,
           select_dirs = select_dirs,
+          output_dir = output_dir,
           verbose = verbose,
           selection = Sessions.Selection(
             requirements = requirements,
--- a/src/Tools/jEdit/src/theories_dockable.scala	Wed May 30 23:11:12 2018 +0100
+++ b/src/Tools/jEdit/src/theories_dockable.scala	Wed May 30 23:11:28 2018 +0100
@@ -228,21 +228,17 @@
     val snapshot = PIDE.session.snapshot()
     val nodes = snapshot.version.nodes
 
-    val iterator =
-      restriction match {
-        case Some(names) => names.iterator.map(name => (name, nodes(name)))
-        case None => nodes.iterator
-      }
     val nodes_status1 =
-      (nodes_status /: iterator)({ case (status, (name, node)) =>
-          if (PIDE.resources.is_hidden(name) ||
-              PIDE.resources.session_base.loaded_theory(name) ||
-              node.is_empty) status
-          else {
-            val st = Protocol.node_status(snapshot.state, snapshot.version, name, node)
-            status + (name -> st)
-          }
-      })
+      (nodes_status /: (restriction.getOrElse(nodes.domain).iterator))(
+        { case (status, name) =>
+            if (PIDE.resources.is_hidden(name) ||
+                PIDE.resources.session_base.loaded_theory(name) ||
+                nodes(name).is_empty) status
+            else {
+              val st = Protocol.node_status(snapshot.state, snapshot.version, name)
+              status + (name -> st)
+            }
+        })
 
     val nodes_status2 =
       nodes_status1 -- nodes_status1.keysIterator.filter(nodes.is_suppressed(_))