more informative trace;
authorwenzelm
Sun, 16 Jul 2023 11:29:23 +0200
changeset 78356 974dbe256a37
parent 78355 9fbc6a043268
child 78357 0cecb7236298
more informative trace;
src/Pure/General/sql.scala
src/Pure/ML/ml_heap.scala
src/Pure/System/host.scala
src/Pure/System/progress.scala
src/Pure/Thy/document_build.scala
src/Pure/Thy/export.scala
src/Pure/Thy/store.scala
src/Pure/Tools/build_process.scala
--- a/src/Pure/General/sql.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/General/sql.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -256,10 +256,11 @@
     def transaction_lock[A](
       db: Database,
       create: Boolean = false,
+      label: String = "",
       log: Logger = new System_Logger,
       synchronized: Boolean = false,
     )(body: => A): A = {
-      def run: A = db.transaction_lock(tables, create = create, log = log)(body)
+      def run: A = db.transaction_lock(tables, create = create, label = label, log = log)(body)
       if (synchronized) db.synchronized { run } else run
     }
 
@@ -461,6 +462,7 @@
     def transaction_lock[A](
       tables: Tables,
       create: Boolean = false,
+      label: String = "",
       log: Logger = new System_Logger
     )(body: => A): A = {
       val trace_enabled = System.getProperty("isabelle.transaction_log", "") == "true"
@@ -471,7 +473,8 @@
       def trace(msg: => String, nl: Boolean = false): Unit = {
         if (trace_enabled) {
           val trace_time = Time.now() - trace_start
-          log((if (nl) "\n" else "") + trace_time + " transaction " + trace_count + ": " + msg)
+          log((if (nl) "\n" else "") + trace_time + " transaction " +
+            (if (label.isEmpty) "" else label + " ") + trace_count + ": " + msg)
         }
       }
 
--- a/src/Pure/ML/ml_heap.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/ML/ml_heap.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -123,12 +123,12 @@
   }
 
   def clean_entry(db: SQL.Database, session_name: String): Unit =
-    Data.transaction_lock(db, create = true, synchronized = true) {
+    Data.transaction_lock(db, create = true, synchronized = true, label = "ML_Heap.clean_entry") {
       Data.clean_entry(db, session_name)
     }
 
   def get_entry(db: SQL.Database, session_name: String): Option[SHA1.Digest] =
-    Data.transaction_lock(db, create = true, synchronized = true) {
+    Data.transaction_lock(db, create = true, synchronized = true, label = "ML_Heap.get_entry") {
       Data.get_entry(db, session_name)
     }
 
@@ -149,7 +149,7 @@
         val step = (size.toDouble / slices.toDouble).ceil.toLong
 
         try {
-          Data.transaction_lock(db, create = true, synchronized = true) {
+          Data.transaction_lock(db, create = true, synchronized = true, label = "ML_Heap.store1") {
             Data.prepare_entry(db, session_name)
           }
 
@@ -160,17 +160,17 @@
             val content =
               Bytes.read_file(heap.file, offset = offset, limit = limit)
                 .compress(cache = cache)
-            Data.transaction_lock(db, synchronized = true) {
+            Data.transaction_lock(db, synchronized = true, label = "ML_Heap.store2") {
               Data.write_entry(db, session_name, i, content)
             }
           }
 
-          Data.transaction_lock(db, synchronized = true) {
+          Data.transaction_lock(db, synchronized = true, label = "ML_Heap.store3") {
             Data.finish_entry(db, session_name, size, digest)
           }
         }
         catch { case exn: Throwable =>
-          Data.transaction_lock(db, create = true, synchronized = true) {
+          Data.transaction_lock(db, create = true, synchronized = true, label = "ML_Heap.store4") {
             Data.clean_entry(db, session_name)
           }
           throw exn
@@ -188,7 +188,7 @@
     database match {
       case None =>
       case Some(db) =>
-        Data.transaction_lock(db, create = true, synchronized = true) {
+        Data.transaction_lock(db, create = true, synchronized = true, label = "ML_Heap.restore") {
           val db_digest = Data.get_entry(db, session_name)
           val file_digest = read_file_digest(heap)
 
--- a/src/Pure/System/host.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/System/host.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -134,7 +134,7 @@
     else {
       val available = available_nodes.zipWithIndex
       val used = used_nodes
-      Data.transaction_lock(db, create = true) {
+      Data.transaction_lock(db, create = true, label = "Host.next_numa_node") {
         val numa_next = Data.read_numa_next(db, hostname)
         val numa_index = available.collectFirst({ case (n, i) if n == numa_next => i }).getOrElse(0)
         val candidates = available.drop(numa_index) ::: available.take(numa_index)
--- a/src/Pure/System/progress.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/System/progress.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -259,7 +259,7 @@
   def agent_uuid: String = synchronized { _agent_uuid }
 
   private def init(): Unit = synchronized {
-    Progress.Data.transaction_lock(db, create = true) {
+    Progress.Data.transaction_lock(db, create = true, label = "Database_Progress.init") {
       Progress.Data.read_progress_context(db, context_uuid) match {
         case Some(context) =>
           _context = context
@@ -296,7 +296,7 @@
 
   def exit(close: Boolean = false): Unit = synchronized {
     if (_context > 0) {
-      Progress.Data.transaction_lock(db) {
+      Progress.Data.transaction_lock(db, label = "Database_Progress.exit") {
         Progress.Data.update_agent(db, _agent_uuid, _seen, stop_now = true)
       }
       _context = 0
@@ -306,7 +306,7 @@
 
   private def sync_database[A](body: => A): A = synchronized {
     require(_context > 0)
-    Progress.Data.transaction_lock(db) {
+    Progress.Data.transaction_lock(db, label = "Database_Progress.sync") {
       val stopped_db = Progress.Data.read_progress_stopped(db, _context)
       val stopped = base_progress.stopped
 
--- a/src/Pure/Thy/document_build.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/Thy/document_build.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -104,13 +104,19 @@
   }
 
   def clean_session(db: SQL.Database, session_name: String): Unit =
-    Data.transaction_lock(db, create = true) { Data.clean_session(db, session_name) }
+    Data.transaction_lock(db, create = true, label = "Document_Build.clean_session") {
+      Data.clean_session(db, session_name)
+    }
 
   def read_document(db: SQL.Database, session_name: String, name: String): Option[Document_Output] =
-    Data.transaction_lock(db) { Data.read_document(db, session_name, name) }
+    Data.transaction_lock(db, label = "Document_Build.read_document") {
+      Data.read_document(db, session_name, name)
+    }
 
   def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit =
-    Data.transaction_lock(db) { Data.write_document(db, session_name, doc) }
+    Data.transaction_lock(db, label = "Document_Build.write_document") {
+      Data.write_document(db, session_name, doc)
+    }
 
 
   /* background context */
--- a/src/Pure/Thy/export.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/Thy/export.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -204,16 +204,24 @@
   }
 
   def clean_session(db: SQL.Database, session_name: String): Unit =
-    Data.transaction_lock(db, create = true) { Data.clean_session(db, session_name) }
+    Data.transaction_lock(db, create = true, label = "Export.clean_session") {
+      Data.clean_session(db, session_name)
+    }
 
   def read_theory_names(db: SQL.Database, session_name: String): List[String] =
-    Data.transaction_lock(db) { Data.read_theory_names(db, session_name) }
+    Data.transaction_lock(db, label = "Export.read_theory_names") {
+      Data.read_theory_names(db, session_name)
+    }
 
   def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] =
-    Data.transaction_lock(db) { Data.read_entry_names(db, session_name) }
+    Data.transaction_lock(db, label = "Export.read_entry_names") {
+      Data.read_entry_names(db, session_name)
+    }
 
   def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] =
-    Data.transaction_lock(db) { Data.read_entry(db, entry_name, cache) }
+    Data.transaction_lock(db, label = "Export.read_entry") {
+      Data.read_entry(db, entry_name, cache)
+    }
 
 
   /* database consumer thread */
@@ -230,7 +238,7 @@
         consume =
           { (args: List[(Entry, Boolean)]) =>
             val results =
-              Data.transaction_lock(db) {
+              Data.transaction_lock(db, label = "Export.consumer") {
                 for ((entry, strict) <- args)
                 yield {
                   if (progress.stopped) {
--- a/src/Pure/Thy/store.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/Thy/store.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -415,7 +415,11 @@
     Export.clean_session(db, name)
     Document_Build.clean_session(db, name)
 
-    Store.Data.transaction_lock(db, create = true, synchronized = true) {
+    Store.Data.transaction_lock(db,
+      create = true,
+      synchronized = true,
+      label = "Store.clean_session_info"
+    ) {
       val already_defined = session_info_defined(db, name)
 
       db.execute_statement(
@@ -436,36 +440,52 @@
     build_log: Build_Log.Session_Info,
     build: Store.Build_Info
   ): Unit = {
-    Store.Data.transaction_lock(db, synchronized = true) {
+    Store.Data.transaction_lock(db, synchronized = true, label = "Store.write_session_info") {
       Store.Data.write_sources(db, session_name, sources)
       Store.Data.write_session_info(db, cache.compress, session_name, build_log, build)
     }
   }
 
   def read_session_timing(db: SQL.Database, session: String): Properties.T =
-    Store.Data.transaction_lock(db) { Store.Data.read_session_timing(db, session, cache) }
+    Store.Data.transaction_lock(db, label = "Store.read_session_timing") {
+      Store.Data.read_session_timing(db, session, cache)
+    }
 
   def read_command_timings(db: SQL.Database, session: String): Bytes =
-    Store.Data.transaction_lock(db) { Store.Data.read_command_timings(db, session) }
+    Store.Data.transaction_lock(db, label = "Store.read_command_timings") {
+      Store.Data.read_command_timings(db, session)
+    }
 
   def read_theory_timings(db: SQL.Database, session: String): List[Properties.T] =
-    Store.Data.transaction_lock(db) { Store.Data.read_theory_timings(db, session, cache) }
+    Store.Data.transaction_lock(db, label = "Store.read_theory_timings") {
+      Store.Data.read_theory_timings(db, session, cache)
+    }
 
   def read_ml_statistics(db: SQL.Database, session: String): List[Properties.T] =
-    Store.Data.transaction_lock(db) { Store.Data.read_ml_statistics(db, session, cache) }
+    Store.Data.transaction_lock(db, label = "Store.read_ml_statistics") {
+      Store.Data.read_ml_statistics(db, session, cache)
+    }
 
   def read_task_statistics(db: SQL.Database, session: String): List[Properties.T] =
-    Store.Data.transaction_lock(db) { Store.Data.read_task_statistics(db, session, cache) }
+    Store.Data.transaction_lock(db, label = "Store.read_task_statistics") {
+      Store.Data.read_task_statistics(db, session, cache)
+    }
 
   def read_theories(db: SQL.Database, session: String): List[String] =
     read_theory_timings(db, session).flatMap(Markup.Name.unapply)
 
   def read_errors(db: SQL.Database, session: String): List[String] =
-    Store.Data.transaction_lock(db) { Store.Data.read_errors(db, session, cache) }
+    Store.Data.transaction_lock(db, label = "Store.read_errors") {
+      Store.Data.read_errors(db, session, cache)
+    }
 
   def read_build(db: SQL.Database, session: String): Option[Store.Build_Info] =
-    Store.Data.transaction_lock(db) { Store.Data.read_build(db, session) }
+    Store.Data.transaction_lock(db, label = "Store.read_build") {
+      Store.Data.read_build(db, session)
+    }
 
   def read_sources(db: SQL.Database, session: String, name: String = ""): List[Store.Source_File] =
-    Store.Data.transaction_lock(db) { Store.Data.read_sources(db, session, name, cache.compress) }
+    Store.Data.transaction_lock(db, label = "Store.read_sources") {
+      Store.Data.read_sources(db, session, name, cache.compress)
+    }
 }
--- a/src/Pure/Tools/build_process.scala	Sun Jul 16 11:03:10 2023 +0200
+++ b/src/Pure/Tools/build_process.scala	Sun Jul 16 11:29:23 2023 +0200
@@ -824,7 +824,9 @@
   }
 
   def read_builds(db: SQL.Database): List[Build] =
-    Data.transaction_lock(db, create = true) { Data.read_builds(db) }
+    Data.transaction_lock(db, create = true, label = "Build_Process.read_builds") {
+      Data.read_builds(db)
+    }
 }
 
 
@@ -855,7 +857,10 @@
     try {
       for (db <- store.maybe_open_build_database()) yield {
         val store_tables = if (db.is_postgresql) Store.Data.tables else SQL.Tables.empty
-        Build_Process.Data.transaction_lock(db, create = true) {
+        Build_Process.Data.transaction_lock(db,
+          create = true,
+          label = "Build_Process.build_database"
+        ) {
           Build_Process.Data.clean_build(db)
           store_tables.lock(db, create = true)
         }
@@ -903,20 +908,21 @@
 
   private var _state: Build_Process.State = Build_Process.State()
 
-  protected def synchronized_database[A](body: => A): A = synchronized {
-    _build_database match {
-      case None => body
-      case Some(db) =>
-        progress.asInstanceOf[Database_Progress].sync()
-        Build_Process.Data.transaction_lock(db) {
-          _state = Build_Process.Data.pull_database(db, worker_uuid, hostname, _state)
-          val res = body
-          _state =
-            Build_Process.Data.update_database(db, worker_uuid, build_uuid, hostname, _state)
-          res
-        }
+  protected def synchronized_database[A](label: String)(body: => A): A =
+    synchronized {
+      _build_database match {
+        case None => body
+        case Some(db) =>
+          progress.asInstanceOf[Database_Progress].sync()
+          Build_Process.Data.transaction_lock(db, label = label) {
+            _state = Build_Process.Data.pull_database(db, worker_uuid, hostname, _state)
+            val res = body
+            _state =
+              Build_Process.Data.update_database(db, worker_uuid, build_uuid, hostname, _state)
+            res
+          }
+      }
     }
-  }
 
 
   /* policy operations */
@@ -1029,27 +1035,27 @@
   final def is_session_name(job_name: String): Boolean =
     !Long_Name.is_qualified(job_name)
 
-  protected final def start_build(): Unit = synchronized_database {
+  protected final def start_build(): Unit = synchronized_database("Build_Process.start_build") {
     for (db <- _build_database) {
       Build_Process.Data.start_build(db, build_uuid, build_context.ml_platform,
         build_context.sessions_structure.session_prefs)
     }
   }
 
-  protected final def stop_build(): Unit = synchronized_database {
+  protected final def stop_build(): Unit = synchronized_database("Build_Process.stop_build") {
     for (db <- _build_database) {
       Build_Process.Data.stop_build(db, build_uuid)
     }
   }
 
-  protected final def start_worker(): Unit = synchronized_database {
+  protected final def start_worker(): Unit = synchronized_database("Build_Process.start_worker") {
     for (db <- _build_database) {
       _state = _state.inc_serial
       Build_Process.Data.start_worker(db, worker_uuid, build_uuid, _state.serial)
     }
   }
 
-  protected final def stop_worker(): Unit = synchronized_database {
+  protected final def stop_worker(): Unit = synchronized_database("Build_Process.stop_worker") {
     for (db <- _build_database) {
       Build_Process.Data.stamp_worker(db, worker_uuid, _state.serial, stop_now = true)
     }
@@ -1059,16 +1065,18 @@
   /* run */
 
   def run(): Map[String, Process_Result] = {
-    if (build_context.master) synchronized_database { _state = init_state(_state) }
+    if (build_context.master) {
+      synchronized_database("Build_Process.init") { _state = init_state(_state) }
+    }
 
-    def finished(): Boolean = synchronized_database { _state.finished }
+    def finished(): Boolean = synchronized_database("Build_Process.test") { _state.finished }
 
     def sleep(): Unit =
       Isabelle_Thread.interrupt_handler(_ => progress.stop()) {
         build_options.seconds("editor_input_delay").sleep()
       }
 
-    def start_job(): Boolean = synchronized_database {
+    def start_job(): Boolean = synchronized_database("Build_Process.start_job") {
       next_job(_state) match {
         case Some(name) =>
           if (is_session_name(name)) {
@@ -1094,7 +1102,7 @@
 
       try {
         while (!finished()) {
-          synchronized_database {
+          synchronized_database("Build_Process.main") {
             if (progress.stopped) _state.stop_running()
 
             for (job <- _state.finished_running()) {
@@ -1115,7 +1123,7 @@
         if (build_context.master) stop_build()
       }
 
-      synchronized_database {
+      synchronized_database("Build_Process.result") {
         for ((name, result) <- _state.results) yield name -> result.process_result
       }
     }
@@ -1124,7 +1132,7 @@
 
   /* snapshot */
 
-  def snapshot(): Build_Process.Snapshot = synchronized_database {
+  def snapshot(): Build_Process.Snapshot = synchronized_database("Build_Process.snapshot") {
     val (builds, workers) =
       _build_database match {
         case None => (Nil, Nil)