clarified signature;
authorwenzelm
Mon, 26 Jun 2023 13:01:58 +0200
changeset 78204 0aa5360fa88b
parent 78203 928ef137758c
child 78205 a40ae2df39ad
clarified signature;
src/Pure/ML/ml_heap.scala
src/Pure/Tools/build_job.scala
src/Pure/Tools/build_process.scala
--- a/src/Pure/ML/ml_heap.scala	Mon Jun 26 12:07:28 2023 +0200
+++ b/src/Pure/ML/ml_heap.scala	Mon Jun 26 13:01:58 2023 +0200
@@ -110,29 +110,30 @@
           })
   }
 
-  def clean_entry(db: SQL.Database, name: String): Unit =
-    Data.transaction_lock(db, create = true) { Data.clean_entry(db, name) }
+  def clean_entry(db: SQL.Database, session_name: String): Unit =
+    Data.transaction_lock(db, create = true) { Data.clean_entry(db, session_name) }
 
-  def get_entry(db: SQL.Database, name: String): Option[SHA1.Digest] =
-    Data.transaction_lock(db, create = true) { Data.get_entry(db, name) }
+  def get_entry(db: SQL.Database, session_name: String): Option[SHA1.Digest] =
+    Data.transaction_lock(db, create = true) { Data.get_entry(db, session_name) }
 
   def store(
     database: Option[SQL.Database],
+    session_name: String,
     heap: Path,
     slice: Long,
     cache: Compress.Cache = Compress.Cache.none
   ): SHA1.Digest = {
     val digest = write_file_digest(heap)
     database match {
+      case None =>
       case Some(db) =>
-        val name = heap.file_name
         val size = File.space(heap).bytes - sha1_prefix.length - SHA1.digest_length
 
         val slices = (size.toDouble / slice.toDouble).ceil.toInt
         val step = (size.toDouble / slices.toDouble).ceil.toLong
 
         try {
-          Data.transaction_lock(db, create = true) { Data.prepare_entry(db, name) }
+          Data.transaction_lock(db, create = true) { Data.prepare_entry(db, session_name) }
 
           for (i <- 0 until slices) {
             val j = i + 1
@@ -141,39 +142,42 @@
             val content =
               Bytes.read_file(heap.file, offset = offset, limit = limit)
                 .compress(cache = cache)
-            Data.transaction_lock(db) { Data.write_entry(db, name, i, content) }
+            Data.transaction_lock(db) { Data.write_entry(db, session_name, i, content) }
           }
 
-          Data.transaction_lock(db) { Data.finish_entry(db, name, size, digest) }
+          Data.transaction_lock(db) { Data.finish_entry(db, session_name, size, digest) }
         }
         catch { case exn: Throwable =>
-          Data.transaction_lock(db, create = true) { Data.clean_entry(db, name) }
+          Data.transaction_lock(db, create = true) { Data.clean_entry(db, session_name) }
           throw exn
         }
-      case None =>
     }
     digest
   }
 
   def restore(
-    db: SQL.Database,
+    database: Option[SQL.Database],
+    session_name: String,
     heap: Path,
     cache: Compress.Cache = Compress.Cache.none
   ): Unit = {
-    val name = heap.file_name
-    Data.transaction_lock(db, create = true) {
-      val db_digest = Data.get_entry(db, name)
-      val file_digest = read_file_digest(heap)
+    database match {
+      case None =>
+      case Some(db) =>
+        Data.transaction_lock(db, create = true) {
+          val db_digest = Data.get_entry(db, session_name)
+          val file_digest = read_file_digest(heap)
 
-      if (db_digest.isDefined && db_digest != file_digest) {
-        Isabelle_System.make_directory(heap.expand.dir)
-        Bytes.write(heap, Bytes.empty)
-          for (slice <- Data.read_entry(db, name)) {
-            Bytes.append(heap, slice.uncompress(cache = cache))
+          if (db_digest.isDefined && db_digest != file_digest) {
+            Isabelle_System.make_directory(heap.expand.dir)
+            Bytes.write(heap, Bytes.empty)
+              for (slice <- Data.read_entry(db, session_name)) {
+                Bytes.append(heap, slice.uncompress(cache = cache))
+              }
+            val digest = write_file_digest(heap)
+            if (db_digest.get != digest) error("Incoherent content for file " + heap)
           }
-        val digest = write_file_digest(heap)
-        if (db_digest.get != digest) error("Incoherent content for file " + heap)
-      }
+        }
     }
   }
 }
--- a/src/Pure/Tools/build_job.scala	Mon Jun 26 12:07:28 2023 +0200
+++ b/src/Pure/Tools/build_job.scala	Mon Jun 26 13:01:58 2023 +0200
@@ -454,7 +454,8 @@
           if (process_result.ok && store_heap && heap.is_file) {
             val slice = Space.MiB(options.real("build_database_slice")).bytes
             val digest =
-              using_optional(store.maybe_open_heaps_database())(ML_Heap.store(_, heap, slice))
+              using_optional(store.maybe_open_heaps_database())(
+                ML_Heap.store(_, session_name, heap, slice))
             SHA1.shasum(digest, session_name)
           }
           else SHA1.no_shasum
--- a/src/Pure/Tools/build_process.scala	Mon Jun 26 12:07:28 2023 +0200
+++ b/src/Pure/Tools/build_process.scala	Mon Jun 26 13:01:58 2023 +0200
@@ -906,10 +906,9 @@
     val cancelled = progress.stopped || !ancestor_results.forall(_.ok)
 
     if (!skipped && !cancelled) {
-      using_optional(store.maybe_open_heaps_database()) { database =>
-        database.foreach(
-          ML_Heap.restore(_, store.output_heap(session_name), cache = store.cache.compress))
-      }
+      using_optional(store.maybe_open_heaps_database())(
+        ML_Heap.restore(_, session_name, store.output_heap(session_name),
+          cache = store.cache.compress))
     }
 
     val result_name = (session_name, worker_uuid, build_uuid)