merged
authorwenzelm
Sun, 15 Sep 2019 17:24:31 +0200
changeset 70706 374caac3d624
parent 70694 ae37b8fbf023 (current diff)
parent 70705 437da7b72b5e (diff)
child 70707 125705f5965f
child 70708 3e11f35496b3
child 70710 3f557ed88fd6
merged
--- a/src/Pure/Concurrent/single_assignment.ML	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/Pure/Concurrent/single_assignment.ML	Sun Sep 15 17:24:31 2019 +0200
@@ -28,32 +28,39 @@
 
 fun var name = Var {name = name, state = Unsynchronized.ref (init_state ())};
 
-fun peek (Var {state, ...}) = (case ! state of Set x => SOME x | Unset _ => NONE);
+fun peek (Var {name, state}) =
+  (case ! state of
+    Set x => SOME x
+  | Unset {lock, ...} =>
+      Multithreading.synchronized name lock (fn () =>
+        (case ! state of
+          Set x => SOME x
+        | Unset _ => NONE)));
 
-fun await (v as Var {name, state}) =
+fun await (Var {name, state}) =
   (case ! state of
     Set x => x
   | Unset {lock, cond} =>
       Multithreading.synchronized name lock (fn () =>
         let
           fun wait () =
-            (case peek v of
-              NONE =>
+            (case ! state of
+              Unset _ =>
                 (case Multithreading.sync_wait NONE cond lock of
                   Exn.Res _ => wait ()
                 | Exn.Exn exn => Exn.reraise exn)
-            | SOME x => x);
+            | Set x => x);
         in wait () end));
 
 fun assign_fail name = raise Fail ("Duplicate assignment to " ^ name);
-fun assign (v as Var {name, state}) x =
+fun assign (Var {name, state}) x =
   (case ! state of
     Set _ => assign_fail name
   | Unset {lock, cond} =>
       Multithreading.synchronized name lock (fn () =>
-        (case peek v of
-          SOME _ => assign_fail name
-        | NONE =>
+        (case ! state of
+          Set _ => assign_fail name
+        | Unset _ =>
             Thread_Attributes.uninterruptible (fn _ => fn () =>
              (state := Set x; RunCall.clearMutableBit state; ConditionVar.broadcast cond)) ())));
 
--- a/src/Pure/General/graph.scala	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/Pure/General/graph.scala	Sun Sep 15 17:24:31 2019 +0200
@@ -191,6 +191,8 @@
   def restrict(pred: Key => Boolean): Graph[Key, A] =
     (this /: iterator){ case (graph, (x, _)) => if (!pred(x)) graph.del_node(x) else graph }
 
+  def exclude(pred: Key => Boolean): Graph[Key, A] = restrict(name => !pred(name))
+
 
   /* edge operations */
 
--- a/src/Pure/PIDE/document.ML	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/Pure/PIDE/document.ML	Sun Sep 15 17:24:31 2019 +0200
@@ -193,15 +193,13 @@
   map_node (fn (header, keywords, perspective, entries, result, _) =>
     (header, keywords, perspective, entries, result, Lazy.lazy I));
 
-fun get_consolidated (Node {consolidated, ...}) = consolidated;
+fun commit_consolidated (Node {consolidated, ...}) =
+  (Lazy.force consolidated; Output.status [Markup.markup_only Markup.consolidated]);
 
-val is_consolidated = Lazy.is_finished o get_consolidated;
-
-fun commit_consolidated node =
-  (Lazy.force (get_consolidated node); Output.status [Markup.markup_only Markup.consolidated]);
+fun consolidated_node (Node {consolidated, ...}) = Lazy.is_finished consolidated;
 
 fun could_consolidate node =
-  not (is_consolidated node) andalso is_some (finished_result_theory node);
+  not (consolidated_node node) andalso is_some (finished_result_theory node);
 
 fun get_node nodes name = String_Graph.get_node nodes name
   handle String_Graph.UNDEF _ => empty_node;
@@ -299,14 +297,14 @@
       (String_Graph.all_succs nodes (map_filter (fn (a, Deps _) => SOME a | _ => NONE) edits))
       nodes);
 
-fun is_suppressed_node (nodes: node String_Graph.T) (name, node) =
+fun suppressed_node (nodes: node String_Graph.T) (name, node) =
   String_Graph.is_maximal nodes name andalso is_empty_node node;
 
 fun put_node (name, node) (Version nodes) =
   let
     val nodes1 = update_node name (K node) nodes;
     val nodes2 =
-      if is_suppressed_node nodes1 (name, node)
+      if suppressed_node nodes1 (name, node)
       then String_Graph.del_node name nodes1
       else nodes1;
   in Version nodes2 end;
--- a/src/Pure/PIDE/document.scala	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/Pure/PIDE/document.scala	Sun Sep 15 17:24:31 2019 +0200
@@ -882,7 +882,7 @@
         blobs = blobs1,
         commands = commands1,
         execs = execs1,
-        commands_redirection = commands_redirection.restrict(commands1.isDefinedAt(_)),
+        commands_redirection = commands_redirection.restrict(commands1.keySet),
         assignments = assignments1,
         history = history.purge(versions1),
         removing_versions = false)
--- a/src/Pure/PIDE/headless.scala	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/Pure/PIDE/headless.scala	Sun Sep 15 17:24:31 2019 +0200
@@ -74,10 +74,7 @@
           case current :: rest =>
             val dep_graph1 =
               if (rest.isEmpty) dep_graph
-              else {
-                val exclude = dep_graph.all_succs(rest).toSet
-                dep_graph.restrict(name => !exclude(name))
-              }
+              else dep_graph.exclude(dep_graph.all_succs(rest).toSet)
             dep_graph1.all_succs(List(current))
         }
 
@@ -141,7 +138,7 @@
     /* theories */
 
     private sealed case class Use_Theories_State(
-      dependencies: resources.Dependencies[Unit],
+      dep_graph: Document.Node.Name.Graph[Unit],
       checkpoints_state: Checkpoints_State,
       watchdog_timeout: Time,
       commit: Option[(Document.Snapshot, Document_Status.Node_Status) => Unit],
@@ -150,8 +147,6 @@
       already_committed: Map[Document.Node.Name, Document_Status.Node_Status] = Map.empty,
       result: Option[Exn.Result[Use_Theories_Result]] = None)
     {
-      def dep_graph: Document.Node.Name.Graph[Unit] = dependencies.theory_graph
-
       def update(new_nodes_status: Document_Status.Nodes_Status): Use_Theories_State =
         copy(last_update = Time.now(), nodes_status = new_nodes_status)
 
@@ -166,8 +161,35 @@
       def cancel_result: Use_Theories_State =
         if (finished_result) this else copy(result = Some(Exn.Exn(Exn.Interrupt())))
 
-      def clean: Set[Document.Node.Name] =
-        already_committed.keySet -- checkpoints_state.nodes
+      def clean_theories: (List[Document.Node.Name], Use_Theories_State) =
+      {
+        @tailrec def frontier(base: List[Document.Node.Name], front: Set[Document.Node.Name])
+          : Set[Document.Node.Name] =
+        {
+          val add = base.filter(name => dep_graph.imm_succs(name).forall(front))
+          if (add.isEmpty) front
+          else {
+            val preds = add.map(dep_graph.imm_preds)
+            val base1 = (preds.head /: preds.tail)(_ ++ _).toList.filter(already_committed.keySet)
+            frontier(base1, front ++ add)
+          }
+        }
+
+        if (already_committed.isEmpty) (Nil, this)
+        else {
+          val base =
+            (for {
+              (name, (_, (_, succs))) <- dep_graph.iterator
+              if succs.isEmpty && already_committed.isDefinedAt(name)
+            } yield name).toList
+          val clean = frontier(base, Set.empty)
+          if (clean.isEmpty) (Nil, this)
+          else {
+            (dep_graph.topological_order.filter(clean),
+              copy(dep_graph = dep_graph.exclude(clean)))
+          }
+        }
+      }
 
       def check(state: Document.State, version: Document.Version, beyond_limit: Boolean)
         : (List[Document.Node.Name], Use_Theories_State) =
@@ -195,8 +217,8 @@
 
         def finished_theory(name: Document.Node.Name): Boolean =
           loaded_theory(name) ||
-          already_committed1.isDefinedAt(name) ||
-          state.node_consolidated(version, name)
+          (if (commit.isDefined) already_committed1.isDefinedAt(name)
+           else state.node_consolidated(version, name))
 
         val result1 =
           if (!finished_result &&
@@ -263,7 +285,8 @@
           Checkpoints_State.init(
             if (checkpoints.isEmpty) Nil
             else dependencies.theory_graph.topological_order.filter(checkpoints(_)))
-        Synchronized(Use_Theories_State(dependencies, checkpoints_state, watchdog_timeout, commit))
+        Synchronized(
+          Use_Theories_State(dependencies.theory_graph, checkpoints_state, watchdog_timeout, commit))
       }
 
       def check_state(beyond_limit: Boolean = false)
@@ -300,7 +323,11 @@
 
         val delay_commit_clean =
           Standard_Thread.delay_first(commit_cleanup_delay max Time.zero) {
-            resources.clean_theories(session, id, use_theories_state.value.clean)
+            val clean_theories = use_theories_state.change_result(_.clean_theories)
+            if (clean_theories.nonEmpty) {
+              progress.echo("Removing " + clean_theories.length + " theories ...")
+              resources.clean_theories(session, id, clean_theories)
+            }
           }
 
         Session.Consumer[Session.Commands_Changed](getClass.getName) {
@@ -539,23 +566,6 @@
 
         ((purged, retained), remove_theories(purged))
       }
-
-      def frontier_theories(clean: Set[Document.Node.Name]): Set[Document.Node.Name] =
-      {
-        @tailrec def frontier(base: List[Document.Node.Name], front: Set[Document.Node.Name])
-          : Set[Document.Node.Name] =
-        {
-          val add = base.filter(b => theory_graph.imm_succs(b).forall(front))
-          if (add.isEmpty) front
-          else {
-            val pre_add = add.map(theory_graph.imm_preds)
-            val base1 = (pre_add.head /: pre_add.tail)(_ ++ _).toList.filter(clean)
-            frontier(base1, front ++ add)
-          }
-        }
-        if (clean.isEmpty) Set.empty
-        else frontier(theory_graph.maximals.filter(clean), Set.empty)
-      }
     }
   }
 
@@ -656,18 +666,11 @@
       state.change(_.unload_theories(session, id, theories))
     }
 
-    def clean_theories(session: Session, id: UUID.T, clean: Set[Document.Node.Name])
+    def clean_theories(session: Session, id: UUID.T, theories: List[Document.Node.Name])
     {
       state.change(st =>
-        {
-          val frontier = st.frontier_theories(clean).toList
-          if (frontier.isEmpty) st
-          else {
-            val st1 = st.unload_theories(session, id, frontier)
-            val (_, st2) = st1.purge_theories(session, frontier)
-            st2
-          }
-        })
+        st.unload_theories(session, id, theories).purge_theories(session, theories)._2
+      )
     }
 
     def purge_theories(session: Session, nodes: Option[List[Document.Node.Name]])
--- a/src/Pure/Tools/dump.scala	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/Pure/Tools/dump.scala	Sun Sep 15 17:24:31 2019 +0200
@@ -212,9 +212,9 @@
         val use_theories_result =
           session.use_theories(used_theories.map(_.theory),
             unicode_symbols = unicode_symbols,
-            share_common_data = false /* FIXME true */,
+            share_common_data = true,
             progress = progress,
-            checkpoints = Set.empty /* FIXME deps.dump_checkpoints */,
+            checkpoints = deps.dump_checkpoints,
             commit = Some(Consumer.apply _))
 
         val bad_theories = Consumer.shutdown()
--- a/src/ZF/ROOT	Fri Sep 13 12:46:36 2019 +0100
+++ b/src/ZF/ROOT	Sun Sep 15 17:24:31 2019 +0200
@@ -44,9 +44,8 @@
   "
   sessions
     FOL
-  theories [dump_checkpoint]
+  theories
     ZF (global)
-  theories
     ZFC (global)
   document_files "root.tex"