merged
authorpaulson
Mon, 04 Jun 2018 21:00:21 +0100
changeset 68372 8e9da2d09dc6
parent 68370 bcdc47c9d4af (diff)
parent 68371 17c3b22a9575 (current diff)
child 68373 f254e383bfe9
merged
--- a/NEWS	Mon Jun 04 21:00:12 2018 +0100
+++ b/NEWS	Mon Jun 04 21:00:21 2018 +0100
@@ -71,6 +71,9 @@
 
 *** Isabelle/jEdit Prover IDE ***
 
+* Slightly more parallel checking, notably for high priority print
+functions (e.g. State output).
+
 * The view title is set dynamically, according to the Isabelle
 distribution and the logic session name. The user can override this via
 set-view-title (stored persistently in $JEDIT_SETTINGS/perspective.xml).
@@ -86,16 +89,16 @@
 E.g. "Prob" may be completed to "HOL-Probability.Probability".
 
 * The command-line tool "isabelle jedit" provides more flexible options
-for session selection:
-  - options -P opens the parent session image of -l
-  - options -A and -B modify the meaning of -l to produce a base
-    image on the spot, based on the specified ancestor (or parent)
-  - option -F focuses on the specified logic session
-  - option -R has changed: it only opens the session ROOT entry
-  - option -S sets up the development environment to edit the
-    specified session: it abbreviates -B -F -R -l
+for session management:
+  - option -R builds an auxiliary logic image with all required theories
+    from other sessions, relative to an ancestor session given by option
+    -A (default: parent)
+  - option -S is like -R, with a focus on the selected session and its
+    descendants (this reduces startup time for big projects like AFP)
 
   Examples:
+    isabelle jedit -R HOL-Number_Theory
+    isabelle jedit -R HOL-Number_Theory -A HOL
     isabelle jedit -d '$AFP' -S Formal_SSA -A HOL
     isabelle jedit -d '$AFP' -S Formal_SSA -A HOL-Analysis
 
@@ -315,8 +318,10 @@
 * Theory List: functions "sorted_wrt" and "sorted" now compare every
   element in a list to all following elements, not just the next one.
 
-* Theory List: the non-standard filter-syntax "[x <- xs. P]" is
-  deprecated and is currently only available as input syntax anymore.
+* Theory List: Synatx:
+  - filter-syntax "[x <- xs. P]" is no longer output syntax
+    but only input syntax.
+  - list comprehension syntax now supports tuple patterns in "pat <- xs".
 
 * Removed nat-int transfer machinery. Rare INCOMPATIBILITY.
 
--- a/src/Doc/JEdit/JEdit.thy	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Doc/JEdit/JEdit.thy	Mon Jun 04 21:00:21 2018 +0100
@@ -228,15 +228,12 @@
 \<open>Usage: isabelle jedit [OPTIONS] [FILES ...]
 
   Options are:
-    -A           specify ancestor for base session image (default: parent)
-    -B           use base session image, with theories from other sessions
-    -F           focus on selected logic session: ignore unrelated theories
+    -A NAME      ancestor session for options -R and -S (default: parent)
     -D NAME=X    set JVM system property
     -J OPTION    add JVM runtime option
                  (default $JEDIT_JAVA_SYSTEM_OPTIONS $JEDIT_JAVA_OPTIONS)
-    -P           use parent session image
-    -R           open ROOT entry of logic session
-    -S NAME      edit specified logic session, abbreviates -B -F -R -l NAME
+    -R NAME      build image with requirements from other sessions
+    -S NAME      like option -R, with focus on selected session
     -b           build only
     -d DIR       include session directory
     -f           fresh build
@@ -261,18 +258,15 @@
   The \<^verbatim>\<open>-n\<close> option bypasses the implicit build process for the selected
   session image.
 
-  Option \<^verbatim>\<open>-P\<close> and \<^verbatim>\<open>-B\<close> and are mutually exclusive and modify the meaning of
-  option \<^verbatim>\<open>-l\<close> as follows. Option \<^verbatim>\<open>-P\<close> opens the parent session image. Option
-  \<^verbatim>\<open>-B\<close> prepares a logic image on the spot, based on the required theory
-  imports from other sessions, relative to an ancestor session given by option
-  \<^verbatim>\<open>-A\<close> (default: parent session).
+  Option \<^verbatim>\<open>-R\<close> builds an auxiliary logic image with all required theories from
+  \<^emph>\<open>other\<close> sessions, relative to an ancestor session given by option \<^verbatim>\<open>-A\<close>
+  (default: parent session). Option \<^verbatim>\<open>-R\<close> also opens the session \<^verbatim>\<open>ROOT\<close> entry
+  in the editor, to facilitate editing of the main session.
 
-  Option \<^verbatim>\<open>-F\<close> focuses on the effective logic session: the accessible
-  namespace of theories is restricted to sessions that are connected to it.
-
-  Option \<^verbatim>\<open>-R\<close> opens the ROOT entry of the specified logic session in the
-  editor. Option \<^verbatim>\<open>-S\<close> sets up the development environment to edit the
-  specified session: it abbreviates \<^verbatim>\<open>-B\<close> \<^verbatim>\<open>-F\<close> \<^verbatim>\<open>-R\<close> \<^verbatim>\<open>-l\<close>.
+  Option \<^verbatim>\<open>-S\<close> is like \<^verbatim>\<open>-R\<close>, with a focus on the selected session and its
+  descendants: the namespace of accessible theories is restricted accordingly.
+  This reduces startup time for big projects, notably the ``Archive of Formal
+  Proofs''.
 
   The \<^verbatim>\<open>-m\<close> option specifies additional print modes for the prover process.
   Note that the system option @{system_option_ref jedit_print_mode} allows to
--- a/src/Doc/Main/Main_Doc.thy	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Doc/Main/Main_Doc.thy	Mon Jun 04 21:00:21 2018 +0100
@@ -571,13 +571,15 @@
 \<open>[x\<^sub>1,\<dots>,x\<^sub>n]\<close> & \<open>x\<^sub>1 # \<dots> # x\<^sub>n # []\<close>\\
 @{term"[m..<n]"} & @{term[source]"upt m n"}\\
 @{term"[i..j]"} & @{term[source]"upto i j"}\\
-\<open>[e. x \<leftarrow> xs]\<close> & @{term"map (%x. e) xs"}\\
 @{term"xs[n := x]"} & @{term[source]"list_update xs n x"}\\
 @{term"\<Sum>x\<leftarrow>xs. e"} & @{term[source]"listsum (map (\<lambda>x. e) xs)"}\\
 \end{supertabular}
 \<^medskip>
 
-List comprehension: \<open>[e. q\<^sub>1, \<dots>, q\<^sub>n]\<close> where each
+Filter input syntax \<open>[pat \<leftarrow> e. b]\<close>, where
+\<open>pat\<close> is a tuple pattern, which stands for @{term "filter (\<lambda>pat. b) e"}.
+
+List comprehension input syntax: \<open>[e. q\<^sub>1, \<dots>, q\<^sub>n]\<close> where each
 qualifier \<open>q\<^sub>i\<close> is either a generator \mbox{\<open>pat \<leftarrow> e\<close>} or a
 guard, i.e.\ boolean expression.
 
--- a/src/Doc/Main/document/root.tex	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Doc/Main/document/root.tex	Mon Jun 04 21:00:21 2018 +0100
@@ -2,13 +2,14 @@
 \usepackage{lmodern}
 \usepackage[T1]{fontenc}
 
-\oddsidemargin=4.6mm
-\evensidemargin=4.6mm
-\textwidth=150mm
-\topmargin=4.6mm
-\headheight=0mm
-\headsep=0mm
-\textheight=234mm
+%shortens document but can cause odd page breaks
+%\oddsidemargin=4.6mm
+%\evensidemargin=4.6mm
+%\textwidth=150mm
+%\topmargin=4.6mm
+%\headheight=0mm
+%\headsep=0mm
+%\textheight=234mm
 
 \usepackage{isabelle,isabellesym}
 \usepackage{amssymb}
--- a/src/HOL/HOLCF/Bifinite.thy	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/HOL/HOLCF/Bifinite.thy	Mon Jun 04 21:00:21 2018 +0100
@@ -112,18 +112,21 @@
 by (rule chainI, simp add: monofun_cfun monofun_LAM)
 
 lemma lub_discr_approx [simp]: "(\<Squnion>i. discr_approx i) = ID"
-apply (rule cfun_eqI)
-apply (simp add: contlub_cfun_fun)
-apply (simp add: discr_approx_def)
-apply (case_tac x, simp)
-apply (rule lub_eqI)
-apply (rule is_lubI)
-apply (rule ub_rangeI, simp)
-apply (drule ub_rangeD)
-apply (erule rev_below_trans)
-apply simp
-apply (rule lessI)
-done
+  apply (rule cfun_eqI)
+  apply (simp add: contlub_cfun_fun)
+  apply (simp add: discr_approx_def)
+  subgoal for x
+    apply (cases x)
+     apply simp
+    apply (rule lub_eqI)
+    apply (rule is_lubI)
+     apply (rule ub_rangeI, simp)
+    apply (drule ub_rangeD)
+    apply (erule rev_below_trans)
+    apply simp
+    apply (rule lessI)
+    done
+  done
 
 lemma inj_on_undiscr [simp]: "inj_on undiscr A"
 using Discr_undiscr by (rule inj_on_inverseI)
@@ -203,14 +206,21 @@
 definition "decode_prod_u = (\<Lambda>(:up\<cdot>x, up\<cdot>y:). up\<cdot>(x, y))"
 
 lemma decode_encode_prod_u [simp]: "decode_prod_u\<cdot>(encode_prod_u\<cdot>x) = x"
-unfolding encode_prod_u_def decode_prod_u_def
-by (case_tac x, simp, rename_tac y, case_tac y, simp)
+  unfolding encode_prod_u_def decode_prod_u_def
+  apply (cases x)
+   apply simp
+  subgoal for y by (cases y) simp
+  done
 
 lemma encode_decode_prod_u [simp]: "encode_prod_u\<cdot>(decode_prod_u\<cdot>y) = y"
-unfolding encode_prod_u_def decode_prod_u_def
-apply (case_tac y, simp, rename_tac a b)
-apply (case_tac a, simp, case_tac b, simp, simp)
-done
+  unfolding encode_prod_u_def decode_prod_u_def
+  apply (cases y)
+   apply simp
+  subgoal for a b
+    apply (cases a, simp)
+    apply (cases b, simp, simp)
+    done
+  done
 
 instance prod :: (profinite, profinite) profinite
 proof
--- a/src/HOL/HOLCF/Pcpo.thy	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/HOL/HOLCF/Pcpo.thy	Mon Jun 04 21:00:21 2018 +0100
@@ -198,16 +198,20 @@
 begin
 
 subclass chfin
-  apply standard
-  apply (unfold max_in_chain_def)
-  apply (case_tac "\<forall>i. Y i = \<bottom>")
-   apply simp
-  apply simp
-  apply (erule exE)
-  apply (rule_tac x="i" in exI)
-  apply clarify
-  apply (blast dest: chain_mono ax_flat)
-  done
+proof
+  fix Y
+  assume *: "chain Y"
+  show "\<exists>n. max_in_chain n Y"
+    apply (unfold max_in_chain_def)
+    apply (cases "\<forall>i. Y i = \<bottom>")
+     apply simp
+    apply simp
+    apply (erule exE)
+    apply (rule_tac x="i" in exI)
+    apply clarify
+    using * apply (blast dest: chain_mono ax_flat)
+    done
+qed
 
 lemma flat_below_iff: "x \<sqsubseteq> y \<longleftrightarrow> x = \<bottom> \<or> x = y"
   by (safe dest!: ax_flat)
--- a/src/HOL/List.thy	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/HOL/List.thy	Mon Jun 04 21:00:21 2018 +0100
@@ -433,75 +433,70 @@
 syntax (ASCII)
   "_lc_gen" :: "'a \<Rightarrow> 'a list \<Rightarrow> lc_qual"  ("_ <- _")
 
-(* These are easier than ML code but cannot express the optimized
-   translation of [e. p<-xs]
-translations
-  "[e. p<-xs]" => "concat(map (_lc_abs p [e]) xs)"
-  "_listcompr e (_lc_gen p xs) (_lc_quals Q Qs)"
-   => "concat (map (_lc_abs p (_listcompr e Q Qs)) xs)"
-  "[e. P]" => "if P then [e] else []"
-  "_listcompr e (_lc_test P) (_lc_quals Q Qs)"
-   => "if P then (_listcompr e Q Qs) else []"
-  "_listcompr e (_lc_let b) (_lc_quals Q Qs)"
-   => "_Let b (_listcompr e Q Qs)"
-*)
-
 parse_translation \<open>
-  let
-    val NilC = Syntax.const @{const_syntax Nil};
-    val ConsC = Syntax.const @{const_syntax Cons};
-    val mapC = Syntax.const @{const_syntax map};
-    val concatC = Syntax.const @{const_syntax concat};
-    val IfC = Syntax.const @{const_syntax If};
-
-    fun single x = ConsC $ x $ NilC;
-
-    fun pat_tr ctxt p e opti = (* %x. case x of p => e | _ => [] *)
-      let
-        (* FIXME proper name context!? *)
-        val x =
-          Free (singleton (Name.variant_list (fold Term.add_free_names [p, e] [])) "x", dummyT);
-        val e = if opti then single e else e;
-        val case1 = Syntax.const @{syntax_const "_case1"} $ p $ e;
-        val case2 =
-          Syntax.const @{syntax_const "_case1"} $
-            Syntax.const @{const_syntax Pure.dummy_pattern} $ NilC;
-        val cs = Syntax.const @{syntax_const "_case2"} $ case1 $ case2;
-      in Syntax_Trans.abs_tr [x, Case_Translation.case_tr false ctxt [x, cs]] end;
-
-    fun abs_tr ctxt p e opti =
-      (case Term_Position.strip_positions p of
-        Free (s, T) =>
-          let
-            val thy = Proof_Context.theory_of ctxt;
-            val s' = Proof_Context.intern_const ctxt s;
-          in
-            if Sign.declared_const thy s'
-            then (pat_tr ctxt p e opti, false)
-            else (Syntax_Trans.abs_tr [p, e], true)
-          end
-      | _ => (pat_tr ctxt p e opti, false));
-
-    fun lc_tr ctxt [e, Const (@{syntax_const "_lc_test"}, _) $ b, qs] =
-          let
-            val res =
-              (case qs of
-                Const (@{syntax_const "_lc_end"}, _) => single e
-              | Const (@{syntax_const "_lc_quals"}, _) $ q $ qs => lc_tr ctxt [e, q, qs]);
-          in IfC $ b $ res $ NilC end
-      | lc_tr ctxt
-            [e, Const (@{syntax_const "_lc_gen"}, _) $ p $ es,
-              Const(@{syntax_const "_lc_end"}, _)] =
-          (case abs_tr ctxt p e true of
-            (f, true) => mapC $ f $ es
-          | (f, false) => concatC $ (mapC $ f $ es))
-      | lc_tr ctxt
-            [e, Const (@{syntax_const "_lc_gen"}, _) $ p $ es,
-              Const (@{syntax_const "_lc_quals"}, _) $ q $ qs] =
-          let val e' = lc_tr ctxt [e, q, qs];
-          in concatC $ (mapC $ (fst (abs_tr ctxt p e' false)) $ es) end;
-
-  in [(@{syntax_const "_listcompr"}, lc_tr)] end
+let
+  val NilC = Syntax.const @{const_syntax Nil};
+  val ConsC = Syntax.const @{const_syntax Cons};
+  val mapC = Syntax.const @{const_syntax map};
+  val concatC = Syntax.const @{const_syntax concat};
+  val IfC = Syntax.const @{const_syntax If};
+  val dummyC = Syntax.const @{const_syntax Pure.dummy_pattern}
+
+  fun single x = ConsC $ x $ NilC;
+
+  fun pat_tr ctxt p e opti = (* %x. case x of p => e | _ => [] *)
+    let
+      (* FIXME proper name context!? *)
+      val x =
+        Free (singleton (Name.variant_list (fold Term.add_free_names [p, e] [])) "x", dummyT);
+      val e = if opti then single e else e;
+      val case1 = Syntax.const @{syntax_const "_case1"} $ p $ e;
+      val case2 =
+        Syntax.const @{syntax_const "_case1"} $ dummyC $ NilC;
+      val cs = Syntax.const @{syntax_const "_case2"} $ case1 $ case2;
+    in Syntax_Trans.abs_tr [x, Case_Translation.case_tr false ctxt [x, cs]] end;
+
+  fun pair_pat_tr (x as Free _) e = Syntax_Trans.abs_tr [x, e]
+    | pair_pat_tr (_ $ p1 $ p2) e =
+        Syntax.const @{const_syntax case_prod} $ pair_pat_tr p1 (pair_pat_tr p2 e)
+    | pair_pat_tr dummy e = Syntax_Trans.abs_tr [Syntax.const "_idtdummy", e]
+
+  fun pair_pat ctxt (Const (@{const_syntax "Pair"},_) $ s $ t) =
+        pair_pat ctxt s andalso pair_pat ctxt t
+    | pair_pat ctxt (Free (s,_)) =
+        let
+          val thy = Proof_Context.theory_of ctxt;
+          val s' = Proof_Context.intern_const ctxt s;
+        in not (Sign.declared_const thy s') end
+    | pair_pat _ t = (t = dummyC);
+
+  fun abs_tr ctxt p e opti =
+    let val p = Term_Position.strip_positions p
+    in if pair_pat ctxt p
+       then (pair_pat_tr p e, true)
+       else (pat_tr ctxt p e opti, false)
+    end
+
+  fun lc_tr ctxt [e, Const (@{syntax_const "_lc_test"}, _) $ b, qs] =
+    let
+      val res =
+        (case qs of
+           Const (@{syntax_const "_lc_end"}, _) => single e
+         | Const (@{syntax_const "_lc_quals"}, _) $ q $ qs => lc_tr ctxt [e, q, qs]);
+    in IfC $ b $ res $ NilC end
+  | lc_tr ctxt
+      [e, Const (@{syntax_const "_lc_gen"}, _) $ p $ es,
+          Const(@{syntax_const "_lc_end"}, _)] =
+      (case abs_tr ctxt p e true of
+         (f, true) => mapC $ f $ es
+       | (f, false) => concatC $ (mapC $ f $ es))
+  | lc_tr ctxt
+      [e, Const (@{syntax_const "_lc_gen"}, _) $ p $ es,
+          Const (@{syntax_const "_lc_quals"}, _) $ q $ qs] =
+      let val e' = lc_tr ctxt [e, q, qs];
+      in concatC $ (mapC $ (fst (abs_tr ctxt p e' false)) $ es) end;
+
+in [(@{syntax_const "_listcompr"}, lc_tr)] end
 \<close>
 
 ML_val \<open>
@@ -513,8 +508,8 @@
           quote (Input.source_content s1) ^ Position.here_list [Input.pos_of s1, Input.pos_of s2]);
   in
     check \<open>[(x,y,z). b]\<close> \<open>if b then [(x, y, z)] else []\<close>;
-    check \<open>[(x,y,z). x\<leftarrow>xs]\<close> \<open>map (\<lambda>x. (x, y, z)) xs\<close>;
-    check \<open>[e x y. x\<leftarrow>xs, y\<leftarrow>ys]\<close> \<open>concat (map (\<lambda>x. map (\<lambda>y. e x y) ys) xs)\<close>;
+    check \<open>[(x,y,z). (x,_,y)\<leftarrow>xs]\<close> \<open>map (\<lambda>(x,_,y). (x, y, z)) xs\<close>;
+    check \<open>[e x y. (x,_)\<leftarrow>xs, y\<leftarrow>ys]\<close> \<open>concat (map (\<lambda>(x,_). map (\<lambda>y. e x y) ys) xs)\<close>;
     check \<open>[(x,y,z). x<a, x>b]\<close> \<open>if x < a then if b < x then [(x, y, z)] else [] else []\<close>;
     check \<open>[(x,y,z). x\<leftarrow>xs, x>b]\<close> \<open>concat (map (\<lambda>x. if b < x then [(x, y, z)] else []) xs)\<close>;
     check \<open>[(x,y,z). x<a, x\<leftarrow>xs]\<close> \<open>if x < a then map (\<lambda>x. (x, y, z)) xs else []\<close>;
@@ -526,26 +521,21 @@
       \<open>if x < a then if b < x then if x = d then [(x, y, z)] else [] else [] else []\<close>;
     check \<open>[(x,y,z). x<a, x>b, y\<leftarrow>ys]\<close>
       \<open>if x < a then if b < x then map (\<lambda>y. (x, y, z)) ys else [] else []\<close>;
-    check \<open>[(x,y,z). x<a, x\<leftarrow>xs,y>b]\<close>
-      \<open>if x < a then concat (map (\<lambda>x. if b < y then [(x, y, z)] else []) xs) else []\<close>;
+    check \<open>[(x,y,z). x<a, (_,x)\<leftarrow>xs,y>b]\<close>
+      \<open>if x < a then concat (map (\<lambda>(_,x). if b < y then [(x, y, z)] else []) xs) else []\<close>;
     check \<open>[(x,y,z). x<a, x\<leftarrow>xs, y\<leftarrow>ys]\<close>
       \<open>if x < a then concat (map (\<lambda>x. map (\<lambda>y. (x, y, z)) ys) xs) else []\<close>;
     check \<open>[(x,y,z). x\<leftarrow>xs, x>b, y<a]\<close>
       \<open>concat (map (\<lambda>x. if b < x then if y < a then [(x, y, z)] else [] else []) xs)\<close>;
     check \<open>[(x,y,z). x\<leftarrow>xs, x>b, y\<leftarrow>ys]\<close>
       \<open>concat (map (\<lambda>x. if b < x then map (\<lambda>y. (x, y, z)) ys else []) xs)\<close>;
-    check \<open>[(x,y,z). x\<leftarrow>xs, y\<leftarrow>ys,y>x]\<close>
-      \<open>concat (map (\<lambda>x. concat (map (\<lambda>y. if x < y then [(x, y, z)] else []) ys)) xs)\<close>;
+    check \<open>[(x,y,z). x\<leftarrow>xs, (y,_)\<leftarrow>ys,y>x]\<close>
+      \<open>concat (map (\<lambda>x. concat (map (\<lambda>(y,_). if x < y then [(x, y, z)] else []) ys)) xs)\<close>;
     check \<open>[(x,y,z). x\<leftarrow>xs, y\<leftarrow>ys,z\<leftarrow>zs]\<close>
       \<open>concat (map (\<lambda>x. concat (map (\<lambda>y. map (\<lambda>z. (x, y, z)) zs) ys)) xs)\<close>
   end;
 \<close>
 
-(*
-term "[(x,y). x\<leftarrow>xs, let xx = x+x, y\<leftarrow>ys, y \<noteq> xx]"
-*)
-
-
 ML \<open>
 (* Simproc for rewriting list comprehensions applied to List.set to set
    comprehension. *)
--- a/src/Pure/PIDE/command.ML	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/PIDE/command.ML	Mon Jun 04 21:00:21 2018 +0100
@@ -26,6 +26,7 @@
   val print0: {pri: int, print_fn: print_fn} -> eval -> print
   val print: bool -> (string * string list) list -> Keyword.keywords -> string ->
     eval -> print list -> print list option
+  val parallel_print: print -> bool
   type print_function =
     {keywords: Keyword.keywords, command_name: string, args: string list, exec_id: Document_ID.exec} ->
       {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option
@@ -36,6 +37,7 @@
   val no_exec: exec
   val exec_ids: exec option -> Document_ID.exec list
   val exec: Document_ID.execution -> exec -> unit
+  val exec_parallel_prints: Document_ID.execution -> Future.task list -> exec -> exec option
 end;
 
 structure Command: COMMAND =
@@ -386,6 +388,9 @@
     if eq_list print_eq (old_prints, new_prints) then NONE else SOME new_prints
   end;
 
+fun parallel_print (Print {pri, ...}) =
+  pri <= 0 orelse (Future.enabled () andalso Options.default_bool "parallel_print");
+
 fun print_function name f =
   Synchronized.change print_functions (fn funs =>
    (if name = "" then error "Unnamed print function" else ();
@@ -448,23 +453,24 @@
   if Lazy.is_finished eval_process then ()
   else run_process execution_id exec_id eval_process;
 
-fun run_print execution_id (Print {name, delay, pri, exec_id, print_process, ...}) =
+fun fork_print execution_id deps (Print {name, delay, pri, exec_id, print_process, ...}) =
+  let
+    val group = Future.worker_subgroup ();
+    fun fork () =
+      ignore ((singleton o Future.forks)
+        {name = name, group = SOME group, deps = deps, pri = pri, interrupts = true}
+        (fn () =>
+          if ignore_process print_process then ()
+          else run_process execution_id exec_id print_process));
+  in
+    (case delay of
+      NONE => fork ()
+    | SOME d => ignore (Event_Timer.request (Time.now () + d) fork))
+  end;
+
+fun run_print execution_id (print as Print {exec_id, print_process, ...}) =
   if ignore_process print_process then ()
-  else if pri <= 0 orelse (Future.enabled () andalso Options.default_bool "parallel_print")
-  then
-    let
-      val group = Future.worker_subgroup ();
-      fun fork () =
-        ignore ((singleton o Future.forks)
-          {name = name, group = SOME group, deps = [], pri = pri, interrupts = true}
-          (fn () =>
-            if ignore_process print_process then ()
-            else run_process execution_id exec_id print_process));
-    in
-      (case delay of
-        NONE => fork ()
-      | SOME d => ignore (Event_Timer.request (Time.now () + d) fork))
-    end
+  else if parallel_print print then fork_print execution_id [] print
   else run_process execution_id exec_id print_process;
 
 in
@@ -472,7 +478,11 @@
 fun exec execution_id (eval, prints) =
   (run_eval execution_id eval; List.app (run_print execution_id) prints);
 
+fun exec_parallel_prints execution_id deps (exec as (Eval {eval_process, ...}, prints)) =
+  if Lazy.is_finished eval_process
+  then (List.app (fork_print execution_id deps) prints; NONE)
+  else SOME exec;
+
 end;
 
 end;
-
--- a/src/Pure/PIDE/document.ML	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/PIDE/document.ML	Mon Jun 04 21:00:21 2018 +0100
@@ -314,17 +314,20 @@
 type execution =
  {version_id: Document_ID.version,  (*static version id*)
   execution_id: Document_ID.execution,  (*dynamic execution id*)
-  delay_request: unit future};  (*pending event timer request*)
+  delay_request: unit future,  (*pending event timer request*)
+  parallel_prints: Command.exec list};  (*parallel prints for early execution*)
 
 val no_execution: execution =
   {version_id = Document_ID.none,
    execution_id = Document_ID.none,
-   delay_request = Future.value ()};
+   delay_request = Future.value (),
+   parallel_prints = []};
 
-fun new_execution version_id delay_request : execution =
+fun new_execution version_id delay_request parallel_prints : execution =
   {version_id = version_id,
    execution_id = Execution.start (),
-   delay_request = delay_request};
+   delay_request = delay_request,
+   parallel_prints = parallel_prints};
 
 abstype state = State of
  {versions: version Inttab.table,  (*version id -> document content*)
@@ -347,12 +350,23 @@
 
 (* document versions *)
 
-fun define_version version_id version =
+fun parallel_prints_node node =
+  iterate_entries (fn (_, opt_exec) => fn rev_result =>
+    (case opt_exec of
+      SOME (eval, prints) =>
+        (case filter Command.parallel_print prints of
+          [] => SOME rev_result
+        | prints' => SOME ((eval, prints') :: rev_result))
+    | NONE => NONE)) node [] |> rev;
+
+fun define_version version_id version assigned_nodes =
   map_state (fn (versions, blobs, commands, {delay_request, ...}) =>
     let
-      val versions' = Inttab.update_new (version_id, version) versions
+      val version' = fold put_node assigned_nodes version;
+      val versions' = Inttab.update_new (version_id, version') versions
         handle Inttab.DUP dup => err_dup "document version" dup;
-      val execution' = new_execution version_id delay_request;
+      val parallel_prints = maps (parallel_prints_node o #2) assigned_nodes;
+      val execution' = new_execution version_id delay_request parallel_prints;
     in (versions', blobs, commands, execution') end);
 
 fun the_version (State {versions, ...}) version_id =
@@ -479,12 +493,16 @@
 fun start_execution state = state |> map_state (fn (versions, blobs, commands, execution) =>
   timeit "Document.start_execution" (fn () =>
     let
-      val {version_id, execution_id, delay_request} = execution;
+      val {version_id, execution_id, delay_request, parallel_prints} = execution;
 
       val delay = seconds (Options.default_real "editor_execution_delay");
 
       val _ = Future.cancel delay_request;
       val delay_request' = Event_Timer.future (Time.now () + delay);
+      val delay = Future.task_of delay_request';
+
+      val parallel_prints' = parallel_prints
+        |> map_filter (Command.exec_parallel_prints execution_id [delay]);
 
       fun finished_import (name, (node, _)) =
         finished_result node orelse is_some (Thy_Info.lookup_theory name);
@@ -497,7 +515,7 @@
             if Symtab.defined required name orelse visible_node node orelse pending_result node then
               let
                 fun body () =
-                 (Execution.worker_task_active true;
+                 (Execution.worker_task_active true name;
                   if forall finished_import deps then
                     iterate_entries (fn (_, opt_exec) => fn () =>
                       (case opt_exec of
@@ -507,23 +525,22 @@
                           else NONE
                       | NONE => NONE)) node ()
                   else ();
-                  Execution.worker_task_active false)
+                  Execution.worker_task_active false name)
                   handle exn =>
                    (Output.system_message (Runtime.exn_message exn);
-                    Execution.worker_task_active false;
+                    Execution.worker_task_active false name;
                     Exn.reraise exn);
                 val future =
                   (singleton o Future.forks)
                    {name = "theory:" ^ name,
                     group = SOME (Future.new_group NONE),
-                    deps =
-                      Future.task_of delay_request' :: Execution.active_tasks name @
-                        maps (the_list o #2 o #2) deps,
+                    deps = delay :: Execution.active_tasks name @ maps (the_list o #2 o #2) deps,
                     pri = 0, interrupts = false} body;
               in (node, SOME (Future.task_of future)) end
             else (node, NONE));
       val execution' =
-        {version_id = version_id, execution_id = execution_id, delay_request = delay_request'};
+        {version_id = version_id, execution_id = execution_id,
+          delay_request = delay_request', parallel_prints = parallel_prints'};
     in (versions, blobs, commands, execution') end));
 
 
@@ -838,7 +855,7 @@
     val assigned_nodes = map_filter #3 updated;
 
     val state' = state
-      |> define_version new_version_id (fold put_node assigned_nodes new_version);
+      |> define_version new_version_id new_version assigned_nodes;
 
   in (removed, assign_result, state') end);
 
--- a/src/Pure/PIDE/document.scala	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/PIDE/document.scala	Mon Jun 04 21:00:21 2018 +0100
@@ -537,6 +537,8 @@
     def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command]
 
     def markup_to_XML(range: Text.Range, elements: Markup.Elements): XML.Body
+    def messages: List[(XML.Tree, Position.T)]
+    def exports: List[Export.Entry]
 
     def find_command(id: Document_ID.Generic): Option[(Node, Command)]
     def find_command_position(id: Document_ID.Generic, offset: Symbol.Offset)
@@ -1006,6 +1008,22 @@
         def markup_to_XML(range: Text.Range, elements: Markup.Elements): XML.Body =
           state.markup_to_XML(version, node_name, range, elements)
 
+        def messages: List[(XML.Tree, Position.T)] =
+          (for {
+            (command, start) <-
+              Document.Node.Commands.starts_pos(
+                node.commands.iterator, Token.Pos.file(node_name.node))
+            pos = command.span.keyword_pos(start).position(command.span.name)
+            (_, tree) <- state.command_results(version, command).iterator
+           } yield (tree, pos)).toList
+
+        def exports: List[Export.Entry] =
+          Command.Exports.merge(
+            for {
+              command <- node.commands.iterator
+              st <- state.command_states(version, command).iterator
+            } yield st.exports).iterator.map(_._2).toList
+
 
         /* find command */
 
--- a/src/Pure/Thy/sessions.scala	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/Thy/sessions.scala	Mon Jun 04 21:00:21 2018 +0100
@@ -394,21 +394,21 @@
     progress: Progress = No_Progress,
     dirs: List[Path] = Nil,
     include_sessions: List[String] = Nil,
-    ancestor_session: Option[String] = None,
-    all_known: Boolean = false,
-    focus_session: Boolean = false,
-    required_session: Boolean = false): Base_Info =
+    session_ancestor: Option[String] = None,
+    session_requirements: Boolean = false,
+    session_focus: Boolean = false,
+    all_known: Boolean = false): Base_Info =
   {
     val full_sessions = load_structure(options, dirs = dirs)
     val global_theories = full_sessions.global_theories
 
     val selected_sessions =
-      full_sessions.selection(Selection(sessions = session :: ancestor_session.toList))
+      full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
     val info = selected_sessions(session)
-    val ancestor = ancestor_session orElse info.parent
+    val ancestor = session_ancestor orElse info.parent
 
     val (session1, infos1) =
-      if (required_session && ancestor.isDefined) {
+      if (session_requirements && ancestor.isDefined) {
         val deps = Sessions.deps(selected_sessions, global_theories, progress = progress)
         val base = deps(session)
 
@@ -430,7 +430,7 @@
 
         if (required_theories.isEmpty) (ancestor.get, Nil)
         else {
-          val other_name = info.name + "_base(" + ancestor.get + ")"
+          val other_name = info.name + "_requirements(" + ancestor.get + ")"
           (other_name,
             List(
               make_info(info.options,
@@ -461,7 +461,7 @@
     {
       val sel_sessions1 = session1 :: include_sessions
       val select_sessions1 =
-        if (focus_session) full_sessions1.imports_descendants(sel_sessions1) else sel_sessions1
+        if (session_focus) full_sessions1.imports_descendants(sel_sessions1) else sel_sessions1
       full_sessions1.selection(Selection(sessions = select_sessions1))
     }
 
--- a/src/Pure/Thy/thy_resources.scala	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/Thy/thy_resources.scala	Mon Jun 04 21:00:21 2018 +0100
@@ -55,7 +55,7 @@
     }
   }
 
-  sealed case class Theories_Result(
+  class Theories_Result private[Thy_Resources](
     val state: Document.State,
     val version: Document.Version,
     val nodes: List[(Document.Node.Name, Protocol.Node_Status)])
@@ -63,32 +63,11 @@
     def node_names: List[Document.Node.Name] = nodes.map(_._1)
     def ok: Boolean = nodes.forall({ case (_, st) => st.ok })
 
-    def messages(node_name: Document.Node.Name): List[(XML.Tree, Position.T)] =
+    def snapshot(node_name: Document.Node.Name): Document.Snapshot =
     {
-      val node = version.nodes(node_name)
-      (for {
-        (command, start) <-
-          Document.Node.Commands.starts_pos(node.commands.iterator, Token.Pos.file(node_name.node))
-        pos = command.span.keyword_pos(start).position(command.span.name)
-        (_, tree) <- state.command_results(version, command).iterator
-       } yield (tree, pos)).toList
-    }
-
-    def markup_to_XML(node_name: Document.Node.Name,
-      range: Text.Range = Text.Range.full,
-      elements: Markup.Elements = Markup.Elements.full): XML.Body =
-    {
-      state.markup_to_XML(version, node_name, range, elements)
-    }
-
-    def exports(node_name: Document.Node.Name): List[Export.Entry] =
-    {
-      val node = version.nodes(node_name)
-      Command.Exports.merge(
-        for {
-          command <- node.commands.iterator
-          st <- state.command_states(version, command).iterator
-        } yield st.exports).iterator.map(_._2).toList
+      val snapshot = state.snapshot(node_name)
+      assert(version.id == snapshot.version.id)
+      snapshot
     }
   }
 
@@ -134,7 +113,7 @@
             val nodes =
               for (name <- dep_theories)
               yield (name -> Protocol.node_status(state, version, name))
-            try { result.fulfill(Theories_Result(state, version, nodes)) }
+            try { result.fulfill(new Theories_Result(state, version, nodes)) }
             catch { case _: IllegalStateException => }
           case _ =>
         }
--- a/src/Pure/Tools/dump.scala	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/Tools/dump.scala	Mon Jun 04 21:00:21 2018 +0100
@@ -14,22 +14,24 @@
   sealed case class Aspect_Args(
     options: Options,
     progress: Progress,
+    deps: Sessions.Deps,
     output_dir: Path,
-    deps: Sessions.Deps,
-    result: Thy_Resources.Theories_Result)
+    node_name: Document.Node.Name,
+    node_status: Protocol.Node_Status,
+    snapshot: Document.Snapshot)
   {
-    def write(node_name: Document.Node.Name, file_name: Path, bytes: Bytes)
+    def write(file_name: Path, bytes: Bytes)
     {
       val path = output_dir + Path.basic(node_name.theory) + file_name
       Isabelle_System.mkdirs(path.dir)
       Bytes.write(path, bytes)
     }
 
-    def write(node_name: Document.Node.Name, file_name: Path, text: String): Unit =
-      write(node_name, file_name, Bytes(text))
+    def write(file_name: Path, text: String): Unit =
+      write(file_name, Bytes(text))
 
-    def write(node_name: Document.Node.Name, file_name: Path, body: XML.Body): Unit =
-      write(node_name, file_name, Symbol.encode(YXML.string_of_body(body)))
+    def write(file_name: Path, body: XML.Body): Unit =
+      write(file_name, Symbol.encode(YXML.string_of_body(body)))
   }
 
   sealed case class Aspect(name: String, description: String, operation: Aspect_Args => Unit,
@@ -40,35 +42,27 @@
 
   val known_aspects =
     List(
+      Aspect("markup", "PIDE markup (YXML format)",
+        { case args =>
+            args.write(Path.explode("markup.yxml"),
+              args.snapshot.markup_to_XML(Text.Range.full, Markup.Elements.full))
+        }),
       Aspect("messages", "output messages (YXML format)",
         { case args =>
-            for (node_name <- args.result.node_names) {
-              args.write(node_name, Path.explode("messages.yxml"),
-                args.result.messages(node_name).iterator.map(_._1).toList)
-            }
-        }),
-      Aspect("markup", "PIDE markup (YXML format)",
-        { case args =>
-            for (node_name <- args.result.node_names) {
-              args.write(node_name, Path.explode("markup.yxml"),
-                args.result.markup_to_XML(node_name))
-            }
+            args.write(Path.explode("messages.yxml"),
+              args.snapshot.messages.iterator.map(_._1).toList)
         }),
       Aspect("latex", "generated LaTeX source",
         { case args =>
-            for {
-              node_name <- args.result.node_names
-              entry <- args.result.exports(node_name)
-              if entry.name == "document.tex"
-            } args.write(node_name, Path.explode(entry.name), entry.uncompressed())
+            for (entry <- args.snapshot.exports if entry.name == "document.tex")
+              args.write(Path.explode(entry.name), entry.uncompressed())
         }, options = List("editor_presentation")),
       Aspect("theory", "foundational theory content",
         { case args =>
             for {
-              node_name <- args.result.node_names
-              entry <- args.result.exports(node_name)
+              entry <- args.snapshot.exports
               if entry.name.startsWith(Export_Theory.export_prefix)
-            } args.write(node_name, Path.explode(entry.name), entry.uncompressed())
+            } args.write(Path.explode(entry.name), entry.uncompressed())
         }, options = List("editor_presentation", "export_theory"))
     ).sortBy(_.name)
 
@@ -129,8 +123,12 @@
 
     /* dump aspects */
 
-    val aspect_args = Aspect_Args(dump_options, progress, output_dir, deps, theories_result)
-    aspects.foreach(_.operation(aspect_args))
+    for ((node_name, node_status) <- theories_result.nodes) {
+      val snapshot = theories_result.snapshot(node_name)
+      val aspect_args =
+        Aspect_Args(dump_options, progress, deps, output_dir, node_name, node_status, snapshot)
+      aspects.foreach(_.operation(aspect_args))
+    }
 
     session_result
   }
--- a/src/Pure/Tools/server_commands.scala	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Pure/Tools/server_commands.scala	Mon Jun 04 21:00:21 2018 +0100
@@ -205,25 +205,27 @@
           "errors" ->
             (for {
               (name, status) <- result.nodes if !status.ok
-              (tree, pos) <- result.messages(name) if Protocol.is_error(tree)
+              (tree, pos) <- result.snapshot(name).messages if Protocol.is_error(tree)
             } yield output_message(tree, pos)),
           "nodes" ->
-            (for ((name, status) <- result.nodes) yield
+            (for ((name, status) <- result.nodes) yield {
+              val snapshot = result.snapshot(name)
               name.json +
                 ("status" -> status.json) +
                 ("messages" ->
                   (for {
-                    (tree, pos) <- result.messages(name) if Protocol.is_exported(tree)
+                    (tree, pos) <- snapshot.messages if Protocol.is_exported(tree)
                   } yield output_message(tree, pos))) +
                 ("exports" ->
                   (if (args.export_pattern == "") Nil else {
                     val matcher = Export.make_matcher(args.export_pattern)
-                    for { entry <- result.exports(name) if matcher(entry.theory_name, entry.name) }
+                    for { entry <- snapshot.exports if matcher(entry.theory_name, entry.name) }
                     yield {
                       val (base64, body) = entry.uncompressed().maybe_base64
                       JSON.Object("name" -> entry.name, "base64" -> base64, "body" -> body)
                     }
-                  }))))
+                  }))
+            }))
 
       (result_json, result)
     }
--- a/src/Tools/jEdit/lib/Tools/jedit	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Tools/jEdit/lib/Tools/jedit	Mon Jun 04 21:00:21 2018 +0100
@@ -97,15 +97,12 @@
   echo "Usage: isabelle $PRG [OPTIONS] [FILES ...]"
   echo
   echo "  Options are:"
-  echo "    -A           specify ancestor for base session image (default: parent)"
-  echo "    -B           use base session image, with theories from other sessions"
-  echo "    -F           focus on selected logic session: ignore unrelated theories"
+  echo "    -A NAME      ancestor session for options -R and -S (default: parent)"
   echo "    -D NAME=X    set JVM system property"
   echo "    -J OPTION    add JVM runtime option"
   echo "                 (default $JEDIT_JAVA_SYSTEM_OPTIONS $JEDIT_JAVA_OPTIONS)"
-  echo "    -P           use parent session image"
-  echo "    -R           open ROOT entry of logic session"
-  echo "    -S NAME      edit specified logic session, abbreviates -B -F -R -l NAME"
+  echo "    -R NAME      build image with requirements from other sessions"
+  echo "    -S NAME      like option -R, with focus on selected session"
   echo "    -b           build only"
   echo "    -d DIR       include session directory"
   echo "    -f           fresh build"
@@ -143,11 +140,9 @@
 BUILD_JARS="jars"
 ML_PROCESS_POLICY=""
 JEDIT_LOGIC_ANCESTOR=""
-JEDIT_LOGIC_BASE=""
+JEDIT_LOGIC_REQUIREMENTS=""
 JEDIT_LOGIC_FOCUS=""
 JEDIT_SESSION_DIRS=""
-JEDIT_LOGIC_ROOT=""
-JEDIT_LOGIC_PARENT=""
 JEDIT_LOGIC=""
 JEDIT_PRINT_MODE=""
 JEDIT_BUILD_MODE="normal"
@@ -155,38 +150,26 @@
 function getoptions()
 {
   OPTIND=1
-  while getopts "A:BFD:J:PRS:bd:fj:l:m:np:s" OPT
+  while getopts "A:BFD:J:R:S:bd:fj:l:m:np:s" OPT
   do
     case "$OPT" in
       A)
         JEDIT_LOGIC_ANCESTOR="$OPTARG"
         ;;
-      B)
-        JEDIT_LOGIC_BASE="true"
-        JEDIT_LOGIC_PARENT=""
-        ;;
       D)
         JAVA_ARGS["${#JAVA_ARGS[@]}"]="-D$OPTARG"
         ;;
-      F)
-        JEDIT_LOGIC_FOCUS="true"
-        ;;
       J)
         JAVA_ARGS["${#JAVA_ARGS[@]}"]="$OPTARG"
         ;;
-      P)
-        JEDIT_LOGIC_PARENT="true"
-        JEDIT_LOGIC_BASE=""
-        ;;
       R)
-        JEDIT_LOGIC_ROOT="true"
+        JEDIT_LOGIC="$OPTARG"
+        JEDIT_LOGIC_REQUIREMENTS="true"
         ;;
       S)
         JEDIT_LOGIC="$OPTARG"
-        JEDIT_LOGIC_BASE="true"
-        JEDIT_LOGIC_PARENT=""
+        JEDIT_LOGIC_REQUIREMENTS="true"
         JEDIT_LOGIC_FOCUS="true"
-        JEDIT_LOGIC_ROOT="true"
         ;;
       b)
         BUILD_ONLY=true
@@ -429,8 +412,8 @@
 
 if [ "$BUILD_ONLY" = false ]
 then
-  export JEDIT_SESSION_DIRS JEDIT_LOGIC JEDIT_LOGIC_ANCESTOR JEDIT_LOGIC_BASE JEDIT_LOGIC_FOCUS \
-    JEDIT_LOGIC_PARENT JEDIT_LOGIC_ROOT JEDIT_PRINT_MODE JEDIT_BUILD_MODE
+  export JEDIT_SESSION_DIRS JEDIT_LOGIC JEDIT_LOGIC_ANCESTOR JEDIT_LOGIC_REQUIREMENTS \
+    JEDIT_LOGIC_FOCUS JEDIT_PRINT_MODE JEDIT_BUILD_MODE
   export JEDIT_ML_PROCESS_POLICY="$ML_PROCESS_POLICY"
   classpath "$JEDIT_HOME/dist/jedit.jar"
   exec isabelle java "${JAVA_ARGS[@]}" isabelle.Main "${ARGS[@]}"
--- a/src/Tools/jEdit/src/jedit_sessions.scala	Mon Jun 04 21:00:12 2018 +0100
+++ b/src/Tools/jEdit/src/jedit_sessions.scala	Mon Jun 04 21:00:21 2018 +0100
@@ -40,9 +40,8 @@
       options.string(jedit_logic_option))
 
   def logic_ancestor: Option[String] = proper_string(Isabelle_System.getenv("JEDIT_LOGIC_ANCESTOR"))
+  def logic_requirements: Boolean = Isabelle_System.getenv("JEDIT_LOGIC_REQUIREMENTS") == "true"
   def logic_focus: Boolean = Isabelle_System.getenv("JEDIT_LOGIC_FOCUS") == "true"
-  def logic_base: Boolean = Isabelle_System.getenv("JEDIT_LOGIC_BASE") == "true"
-  def logic_parent: Boolean = Isabelle_System.getenv("JEDIT_LOGIC_PARENT") == "true"
 
   def logic_info(options: Options): Option[Sessions.Info] =
     try {
@@ -52,9 +51,7 @@
     catch { case ERROR(_) => None }
 
   def logic_root(options: Options): Position.T =
-    if (Isabelle_System.getenv("JEDIT_LOGIC_ROOT") == "true") {
-      logic_info(options).map(_.pos) getOrElse Position.none
-    }
+    if (logic_requirements) logic_info(options).map(_.pos) getOrElse Position.none
     else Position.none
 
 
@@ -111,13 +108,11 @@
   def session_base_info(options: Options): Sessions.Base_Info =
     Sessions.base_info(options,
       dirs = JEdit_Sessions.session_dirs(),
-      session =
-        if (logic_parent) logic_info(options).flatMap(_.parent) getOrElse logic_name(options)
-        else logic_name(options),
-      ancestor_session = logic_ancestor,
-      all_known = !logic_focus,
-      focus_session = logic_focus,
-      required_session = logic_base)
+      session = logic_name(options),
+      session_ancestor = logic_ancestor,
+      session_requirements = logic_requirements,
+      session_focus = logic_focus,
+      all_known = !logic_focus)
 
   def session_build(
     options: Options, progress: Progress = No_Progress, no_build: Boolean = false): Int =