--- a/src/Doc/System/Server.thy Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Doc/System/Server.thy Wed Sep 05 05:05:26 2018 +0200
@@ -909,6 +909,7 @@
\quad~~\<open>export_pattern?: string,\<close> \\
\quad~~\<open>check_delay?: double,\<close> & \<^bold>\<open>default:\<close> \<^verbatim>\<open>0.5\<close> \\
\quad~~\<open>check_limit?: int,\<close> \\
+ \quad~~\<open>watchdog_timeout?: double,\<close> \\
\quad~~\<open>nodes_status_delay?: double}\<close> & \<^bold>\<open>default:\<close> \<^verbatim>\<open>-1.0\<close> \\
\end{tabular}
@@ -948,9 +949,13 @@
\<^medskip> The status of PIDE processing is checked every \<open>check_delay\<close> seconds, and
bounded by \<open>check_limit\<close> attempts (default: 0, i.e.\ unbounded). A
- \<open>check_limit > 0\<close> effectively specifies a timeout of \<open>check_delay \<times>
+ \<open>check_limit > 0\<close> effectively specifies a global timeout of \<open>check_delay \<times>
check_limit\<close> seconds.
+ \<^medskip> If \<open>watchdog_timeout\<close> is greater than 0, it specifies the timespan (in
+ seconds) after the last command status change of Isabelle/PIDE, before
+ finishing with a potentially non-terminating or deadlocked execution.
+
\<^medskip> A non-negative \<open>nodes_status_delay\<close> enables continuous notifications of
kind \<open>nodes_status\<close>, with a field of name and type \<open>nodes_status\<close>. The time
interval is specified in seconds; by default it is negative and thus
--- a/src/HOL/Library/Finite_Map.thy Wed Sep 05 05:05:00 2018 +0200
+++ b/src/HOL/Library/Finite_Map.thy Wed Sep 05 05:05:26 2018 +0200
@@ -378,43 +378,43 @@
unfolding fmfilter_alt_defs by simp
lemma fmrestrict_set_dom[simp]: "fmrestrict_set (fmdom' m) m = m"
- by (rule fmap_ext) auto
+by (rule fmap_ext) auto
lemma fmrestrict_fset_dom[simp]: "fmrestrict_fset (fmdom m) m = m"
- by (rule fmap_ext) auto
+by (rule fmap_ext) auto
lemma fmdrop_empty[simp]: "fmdrop a fmempty = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_set_empty[simp]: "fmdrop_set A fmempty = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_fset_empty[simp]: "fmdrop_fset A fmempty = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmrestrict_set_empty[simp]: "fmrestrict_set A fmempty = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmrestrict_fset_empty[simp]: "fmrestrict_fset A fmempty = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_set_null[simp]: "fmdrop_set {} m = m"
- by (rule fmap_ext) auto
+by (rule fmap_ext) auto
lemma fmdrop_fset_null[simp]: "fmdrop_fset {||} m = m"
- by (rule fmap_ext) auto
+by (rule fmap_ext) auto
lemma fmdrop_set_single[simp]: "fmdrop_set {a} m = fmdrop a m"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_fset_single[simp]: "fmdrop_fset {|a|} m = fmdrop a m"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmrestrict_set_null[simp]: "fmrestrict_set {} m = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmrestrict_fset_null[simp]: "fmrestrict_fset {||} m = fmempty"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_comm: "fmdrop a (fmdrop b m) = fmdrop b (fmdrop a m)"
unfolding fmfilter_alt_defs by (rule fmfilter_comm)
@@ -438,30 +438,30 @@
lemma fmdom'_add[simp]: "fmdom' (m ++\<^sub>f n) = fmdom' m \<union> fmdom' n" by transfer' auto
lemma fmadd_drop_left_dom: "fmdrop_fset (fmdom n) m ++\<^sub>f n = m ++\<^sub>f n"
- by (rule fmap_ext) auto
+by (rule fmap_ext) auto
lemma fmadd_restrict_right_dom: "fmrestrict_fset (fmdom n) (m ++\<^sub>f n) = n"
- by (rule fmap_ext) auto
+by (rule fmap_ext) auto
lemma fmfilter_add_distrib[simp]: "fmfilter P (m ++\<^sub>f n) = fmfilter P m ++\<^sub>f fmfilter P n"
by transfer' (auto simp: map_filter_def map_add_def)
lemma fmdrop_add_distrib[simp]: "fmdrop a (m ++\<^sub>f n) = fmdrop a m ++\<^sub>f fmdrop a n"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_set_add_distrib[simp]: "fmdrop_set A (m ++\<^sub>f n) = fmdrop_set A m ++\<^sub>f fmdrop_set A n"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmdrop_fset_add_distrib[simp]: "fmdrop_fset A (m ++\<^sub>f n) = fmdrop_fset A m ++\<^sub>f fmdrop_fset A n"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmrestrict_set_add_distrib[simp]:
"fmrestrict_set A (m ++\<^sub>f n) = fmrestrict_set A m ++\<^sub>f fmrestrict_set A n"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmrestrict_fset_add_distrib[simp]:
"fmrestrict_fset A (m ++\<^sub>f n) = fmrestrict_fset A m ++\<^sub>f fmrestrict_fset A n"
- unfolding fmfilter_alt_defs by simp
+unfolding fmfilter_alt_defs by simp
lemma fmadd_empty[simp]: "fmempty ++\<^sub>f m = m" "m ++\<^sub>f fmempty = m"
by (transfer'; auto)+
@@ -527,19 +527,19 @@
by transfer' (auto simp: map_pred_def map_filter_def)
lemma fmpred_drop[intro]: "fmpred P m \<Longrightarrow> fmpred P (fmdrop a m)"
- by (auto simp: fmfilter_alt_defs)
+by (auto simp: fmfilter_alt_defs)
lemma fmpred_drop_set[intro]: "fmpred P m \<Longrightarrow> fmpred P (fmdrop_set A m)"
- by (auto simp: fmfilter_alt_defs)
+by (auto simp: fmfilter_alt_defs)
lemma fmpred_drop_fset[intro]: "fmpred P m \<Longrightarrow> fmpred P (fmdrop_fset A m)"
- by (auto simp: fmfilter_alt_defs)
+by (auto simp: fmfilter_alt_defs)
lemma fmpred_restrict_set[intro]: "fmpred P m \<Longrightarrow> fmpred P (fmrestrict_set A m)"
- by (auto simp: fmfilter_alt_defs)
+by (auto simp: fmfilter_alt_defs)
lemma fmpred_restrict_fset[intro]: "fmpred P m \<Longrightarrow> fmpred P (fmrestrict_fset A m)"
- by (auto simp: fmfilter_alt_defs)
+by (auto simp: fmfilter_alt_defs)
lemma fmpred_cases[consumes 1]:
assumes "fmpred P m"
@@ -590,7 +590,6 @@
lemma fset_of_fmap_iff'[simp]: "(a, b) \<in> fset (fset_of_fmap m) \<longleftrightarrow> fmlookup m a = Some b"
by transfer' (auto simp: set_of_map_def)
-
lift_definition fmap_of_list :: "('a \<times> 'b) list \<Rightarrow> ('a, 'b) fmap"
is map_of
parametric map_of_transfer
@@ -819,19 +818,19 @@
unfolding fmrel_iff by auto
lemma fmrel_drop[intro]: "fmrel P m n \<Longrightarrow> fmrel P (fmdrop a m) (fmdrop a n)"
- unfolding fmfilter_alt_defs by blast
+unfolding fmfilter_alt_defs by blast
lemma fmrel_drop_set[intro]: "fmrel P m n \<Longrightarrow> fmrel P (fmdrop_set A m) (fmdrop_set A n)"
- unfolding fmfilter_alt_defs by blast
+unfolding fmfilter_alt_defs by blast
lemma fmrel_drop_fset[intro]: "fmrel P m n \<Longrightarrow> fmrel P (fmdrop_fset A m) (fmdrop_fset A n)"
- unfolding fmfilter_alt_defs by blast
+unfolding fmfilter_alt_defs by blast
lemma fmrel_restrict_set[intro]: "fmrel P m n \<Longrightarrow> fmrel P (fmrestrict_set A m) (fmrestrict_set A n)"
- unfolding fmfilter_alt_defs by blast
+unfolding fmfilter_alt_defs by blast
lemma fmrel_restrict_fset[intro]: "fmrel P m n \<Longrightarrow> fmrel P (fmrestrict_fset A m) (fmrestrict_fset A n)"
- unfolding fmfilter_alt_defs by blast
+unfolding fmfilter_alt_defs by blast
lemma fmrel_on_fset_fmrel_restrict:
"fmrel_on_fset S P m n \<longleftrightarrow> fmrel P (fmrestrict_fset S m) (fmrestrict_fset S n)"
@@ -1057,6 +1056,27 @@
by transfer (simp add: map_of_map_keys)
+subsection \<open>Additional properties\<close>
+
+lemma fmchoice':
+ assumes "finite S" "\<forall>x \<in> S. \<exists>y. Q x y"
+ shows "\<exists>m. fmdom' m = S \<and> fmpred Q m"
+proof -
+ obtain f where f: "Q x (f x)" if "x \<in> S" for x
+ using assms by (metis bchoice)
+ define f' where "f' x = (if x \<in> S then Some (f x) else None)" for x
+
+ have "eq_onp (\<lambda>m. finite (dom m)) f' f'"
+ unfolding eq_onp_def f'_def dom_def using assms by auto
+
+ show ?thesis
+ apply (rule exI[where x = "Abs_fmap f'"])
+ apply (subst fmpred.abs_eq, fact)
+ apply (subst fmdom'.abs_eq, fact)
+ unfolding f'_def dom_def map_pred_def using f
+ by auto
+qed
+
subsection \<open>Lifting/transfer setup\<close>
context includes lifting_syntax begin
@@ -1066,14 +1086,40 @@
lemma fmadd_transfer[transfer_rule]:
"(fmrel P ===> fmrel P ===> fmrel P) fmadd fmadd"
- by (intro fmrel_addI rel_funI)
+by (intro fmrel_addI rel_funI)
lemma fmupd_transfer[transfer_rule]:
"((=) ===> P ===> fmrel P ===> fmrel P) fmupd fmupd"
- by auto
+by auto
end
+lemma Quotient_fmap_bnf[quot_map]:
+ assumes "Quotient R Abs Rep T"
+ shows "Quotient (fmrel R) (fmmap Abs) (fmmap Rep) (fmrel T)"
+unfolding Quotient_alt_def4 proof safe
+ fix m n
+ assume "fmrel T m n"
+ then have "fmlookup (fmmap Abs m) x = fmlookup n x" for x
+ apply (cases rule: fmrel_cases[where x = x])
+ using assms unfolding Quotient_alt_def by auto
+ then show "fmmap Abs m = n"
+ by (rule fmap_ext)
+next
+ fix m
+ show "fmrel T (fmmap Rep m) m"
+ unfolding fmap.rel_map
+ apply (rule fmap.rel_refl)
+ using assms unfolding Quotient_alt_def
+ by auto
+next
+ from assms have "R = T OO T\<inverse>\<inverse>"
+ unfolding Quotient_alt_def4 by simp
+
+ then show "fmrel R = fmrel T OO (fmrel T)\<inverse>\<inverse>"
+ by (simp add: fmap.rel_compp fmap.rel_conversep)
+qed
+
subsection \<open>View as datatype\<close>
@@ -1311,4 +1357,31 @@
lifting_update fmap.lifting
lifting_forget fmap.lifting
+
+subsection \<open>Tests\<close>
+
+\<comment> \<open>Code generation\<close>
+
+export_code
+ fBall fmrel fmran fmran' fmdom fmdom' fmpred pred_fmap fmsubset fmupd fmrel_on_fset
+ fmdrop fmdrop_set fmdrop_fset fmrestrict_set fmrestrict_fset fmimage fmlookup fmempty
+ fmfilter fmadd fmmap fmmap_keys fmcomp
+ checking SML Scala Haskell? OCaml?
+
+\<comment> \<open>\<open>lifting\<close> through @{type fmap}\<close>
+
+experiment begin
+
+context includes fset.lifting begin
+
+lift_definition test1 :: "('a, 'b fset) fmap" is "fmempty :: ('a, 'b set) fmap"
+ by auto
+
+lift_definition test2 :: "'a \<Rightarrow> 'b \<Rightarrow> ('a, 'b fset) fmap" is "\<lambda>a b. fmupd a {b} fmempty"
+ by auto
+
+end
+
+end
+
end
\ No newline at end of file
--- a/src/HOL/Library/datatype_records.ML Wed Sep 05 05:05:00 2018 +0200
+++ b/src/HOL/Library/datatype_records.ML Wed Sep 05 05:05:26 2018 +0200
@@ -58,7 +58,7 @@
fun mk_name sel =
Binding.name ("update_" ^ Long_Name.base_name (fst (dest_Const sel)))
- val thms_binding = (@{binding record_simps}, @{attributes [simp]})
+ val thms_binding = (Binding.name "record_simps", @{attributes [simp]})
fun mk_t idx =
let
--- a/src/Pure/PIDE/document_status.scala Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Pure/PIDE/document_status.scala Wed Sep 05 05:05:26 2018 +0200
@@ -219,18 +219,21 @@
object Nodes_Status
{
- val empty: Nodes_Status = new Nodes_Status(Map.empty)
-
- type Update = (Nodes_Status, List[Document.Node.Name])
- val empty_update: Update = (empty, Nil)
+ val empty: Nodes_Status = new Nodes_Status(Map.empty, Document.Nodes.empty)
}
- final class Nodes_Status private(private val rep: Map[Document.Node.Name, Node_Status])
+ final class Nodes_Status private(
+ private val rep: Map[Document.Node.Name, Node_Status],
+ nodes: Document.Nodes)
{
def is_empty: Boolean = rep.isEmpty
def apply(name: Document.Node.Name): Node_Status = rep(name)
def get(name: Document.Node.Name): Option[Node_Status] = rep.get(name)
+ def present: List[(Document.Node.Name, Node_Status)] =
+ for { name <- nodes.topological_order; node_status <- get(name) }
+ yield (name, node_status)
+
def quasi_consolidated(name: Document.Node.Name): Boolean =
rep.get(name) match {
case Some(st) => st.quasi_consolidated
@@ -249,24 +252,23 @@
state: Document.State,
version: Document.Version,
domain: Option[Set[Document.Node.Name]] = None,
- trim: Boolean = false): Option[Nodes_Status.Update] =
+ trim: Boolean = false): (Boolean, Nodes_Status) =
{
- val nodes = version.nodes
+ val nodes1 = version.nodes
val update_iterator =
for {
- name <- domain.getOrElse(nodes.domain).iterator
+ name <- domain.getOrElse(nodes1.domain).iterator
if !Sessions.is_hidden(name) &&
!session_base.loaded_theory(name) &&
- !nodes.is_suppressed(name) &&
- !nodes(name).is_empty
+ !nodes1.is_suppressed(name) &&
+ !nodes1(name).is_empty
st = Document_Status.Node_Status.make(state, version, name)
if !rep.isDefinedAt(name) || rep(name) != st
} yield (name -> st)
val rep1 = rep ++ update_iterator
- val rep2 = if (trim) rep1 -- rep1.keysIterator.filterNot(nodes.domain) else rep1
+ val rep2 = if (trim) rep1 -- rep1.keysIterator.filterNot(nodes1.domain) else rep1
- if (rep == rep2) None
- else Some(new Nodes_Status(rep2), version.nodes.topological_order.filter(rep2.keySet))
+ (rep != rep2, new Nodes_Status(rep2, nodes1))
}
override def hashCode: Int = rep.hashCode
--- a/src/Pure/System/progress.scala Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Pure/System/progress.scala Wed Sep 05 05:05:26 2018 +0200
@@ -22,7 +22,7 @@
def echo_if(cond: Boolean, msg: String) { if (cond) echo(msg) }
def theory(session: String, theory: String) {}
def theory_percentage(session: String, theory: String, percentage: Int) {}
- def nodes_status(nodes_status: Document_Status.Nodes_Status, names: List[Document.Node.Name]) {}
+ def nodes_status(nodes_status: Document_Status.Nodes_Status) {}
def echo_warning(msg: String) { echo(Output.warning_text(msg)) }
def echo_error_message(msg: String) { echo(Output.error_message_text(msg)) }
--- a/src/Pure/Thy/thy_resources.scala Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Pure/Thy/thy_resources.scala Wed Sep 05 05:05:26 2018 +0200
@@ -102,6 +102,7 @@
master_dir: String = "",
check_delay: Time = Time.seconds(default_check_delay),
check_limit: Int = 0,
+ watchdog_timeout: Time = Time.zero,
nodes_status_delay: Time = Time.seconds(default_nodes_status_delay),
id: UUID = UUID(),
progress: Progress = No_Progress): Theories_Result =
@@ -115,19 +116,23 @@
}
val dep_theories_set = dep_theories.toSet
- val nodes_status_update = Synchronized(Document_Status.Nodes_Status.empty_update)
+ val current_nodes_status = Synchronized(Document_Status.Nodes_Status.empty)
val result = Future.promise[Theories_Result]
+ var watchdog_time = Synchronized(Time.now())
+ def watchdog: Boolean =
+ watchdog_timeout > Time.zero && Time.now() - watchdog_time.value > watchdog_timeout
+
def check_state(beyond_limit: Boolean = false)
{
val state = session.current_state()
state.stable_tip_version match {
case Some(version) =>
- if (beyond_limit ||
+ if (beyond_limit || watchdog ||
dep_theories.forall(name =>
state.node_consolidated(version, name) ||
- nodes_status_update.value._1.quasi_consolidated(name)))
+ current_nodes_status.value.quasi_consolidated(name)))
{
val nodes =
for (name <- dep_theories)
@@ -152,15 +157,15 @@
}
}
- val theories_progress = Synchronized(Set.empty[Document.Node.Name])
+ val consumer =
+ {
+ val theories_progress = Synchronized(Set.empty[Document.Node.Name])
- val delay_nodes_status =
- Standard_Thread.delay_first(nodes_status_delay max Time.zero) {
- val (nodes_status, names) = nodes_status_update.value
- progress.nodes_status(nodes_status, names)
- }
+ val delay_nodes_status =
+ Standard_Thread.delay_first(nodes_status_delay max Time.zero) {
+ progress.nodes_status(current_nodes_status.value)
+ }
- val consumer =
Session.Consumer[Session.Commands_Changed](getClass.getName) {
case changed =>
if (changed.nodes.exists(dep_theories_set)) {
@@ -168,29 +173,33 @@
val state = snapshot.state
val version = snapshot.version
- nodes_status_update.change(
- { case upd @ (nodes_status, _) =>
+ watchdog_time.change(_ => Time.now())
+
+ val theory_percentages =
+ current_nodes_status.change_result(nodes_status =>
+ {
val domain =
if (nodes_status.is_empty) dep_theories_set
else changed.nodes.iterator.filter(dep_theories_set).toSet
- val upd1 @ (nodes_status1, names1) =
+ val (nodes_status_changed, nodes_status1) =
nodes_status.update(resources.session_base, state, version,
- domain = Some(domain), trim = changed.assignment).getOrElse(upd)
+ domain = Some(domain), trim = changed.assignment)
- for {
- name <- names1.iterator if changed.nodes.contains(name)
- p = nodes_status.get(name).map(_.percentage)
- p1 = nodes_status1.get(name).map(_.percentage)
- if p != p1 && p1.isDefined && p1.get > 0
- } progress.theory_percentage("", name.theory, p1.get)
-
- if (nodes_status_delay >= Time.zero && upd != upd1) {
+ if (nodes_status_delay >= Time.zero && nodes_status_changed) {
delay_nodes_status.invoke
}
- upd1
- })
+ val progress_percentage =
+ (for {
+ (name, node_status) <- nodes_status1.present.iterator
+ if changed.nodes.contains(name)
+ p1 = node_status.percentage
+ if p1 > 0 && Some(p1) != nodes_status.get(name).map(_.percentage)
+ } yield (name.theory, p1)).toList
+
+ (progress_percentage, nodes_status1)
+ })
val check_theories =
(for {
@@ -210,18 +219,22 @@
initialized.map(_.theory).sorted.foreach(progress.theory("", _))
}
+ for ((theory, percentage) <- theory_percentages)
+ progress.theory_percentage("", theory, percentage)
+
check_state()
}
}
+ }
try {
session.commands_changed += consumer
resources.load_theories(session, id, dep_theories, progress)
result.join_result
check_progress.cancel
- session.commands_changed -= consumer
}
finally {
+ session.commands_changed -= consumer
resources.unload_theories(session, id, dep_theories)
}
--- a/src/Pure/Tools/server.scala Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Pure/Tools/server.scala Wed Sep 05 05:05:26 2018 +0200
@@ -273,10 +273,11 @@
context.writeln(Progress.theory_message(session, theory),
(List("session" -> session, "theory" -> theory) ::: more.toList):_*)
- override def nodes_status(
- nodes_status: Document_Status.Nodes_Status, names: List[Document.Node.Name])
+ override def nodes_status(nodes_status: Document_Status.Nodes_Status)
{
- val json = names.map(name => name.json + ("status" -> nodes_status(name).json))
+ val json =
+ for ((name, node_status) <- nodes_status.present)
+ yield name.json + ("status" -> nodes_status(name).json)
context.notify(JSON.Object(Markup.KIND -> Markup.NODES_STATUS, Markup.NODES_STATUS -> json))
}
--- a/src/Pure/Tools/server_commands.scala Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Pure/Tools/server_commands.scala Wed Sep 05 05:05:26 2018 +0200
@@ -160,6 +160,7 @@
export_pattern: String = "",
check_delay: Double = Thy_Resources.default_check_delay,
check_limit: Int = 0,
+ watchdog_timeout: Double = 0.0,
nodes_status_delay: Double = Thy_Resources.default_nodes_status_delay)
def unapply(json: JSON.T): Option[Args] =
@@ -172,13 +173,14 @@
export_pattern <- JSON.string_default(json, "export_pattern")
check_delay <- JSON.double_default(json, "check_delay", Thy_Resources.default_check_delay)
check_limit <- JSON.int_default(json, "check_limit")
+ watchdog_timeout <- JSON.double_default(json, "watchdog_timeout")
nodes_status_delay <-
JSON.double_default(json, "nodes_status_delay", Thy_Resources.default_nodes_status_delay)
}
yield {
Args(session_id, theories, master_dir = master_dir, pretty_margin = pretty_margin,
unicode_symbols = unicode_symbols, export_pattern = export_pattern,
- check_delay = check_delay, check_limit = check_limit,
+ check_delay = check_delay, check_limit = check_limit, watchdog_timeout = watchdog_timeout,
nodes_status_delay = nodes_status_delay)
}
@@ -190,6 +192,7 @@
val result =
session.use_theories(args.theories, master_dir = args.master_dir,
check_delay = Time.seconds(args.check_delay), check_limit = args.check_limit,
+ watchdog_timeout = Time.seconds(args.watchdog_timeout),
nodes_status_delay = Time.seconds(args.nodes_status_delay),
id = id, progress = progress)
--- a/src/Tools/jEdit/src/theories_dockable.scala Wed Sep 05 05:05:00 2018 +0200
+++ b/src/Tools/jEdit/src/theories_dockable.scala Wed Sep 05 05:05:26 2018 +0200
@@ -217,11 +217,12 @@
val session_base = PIDE.resources.session_base
val snapshot = PIDE.session.snapshot()
- for {
- (nodes_status1, nodes_list1) <-
- nodes_status.update(
- session_base, snapshot.state, snapshot.version, domain = domain, trim = trim)
- } { nodes_status = nodes_status1; status.listData = nodes_list1 }
+ val (nodes_status_changed, nodes_status1) =
+ nodes_status.update(
+ session_base, snapshot.state, snapshot.version, domain = domain, trim = trim)
+
+ nodes_status = nodes_status1
+ if (nodes_status_changed) status.listData = nodes_status1.present.map(_._1)
}