--- a/src/Pure/Build/build_benchmark.scala Thu Feb 22 13:27:15 2024 +0100
+++ b/src/Pure/Build/build_benchmark.scala Thu Feb 22 13:57:13 2024 +0100
@@ -52,12 +52,12 @@
val session = sessions(benchmark_session)
val hierachy = session.ancestors.map(store.output_session(_, store_heap = true))
- ML_Heap.restore(database_server, hierachy, cache = store.cache.compress)
+ for (db <- database_server) ML_Heap.restore(db, hierachy, cache = store.cache.compress)
val local_options = options + "build_database_server=false" + "build_database=false"
benchmark_requirements(local_options, progress)
- ML_Heap.restore(database_server, hierachy, cache = store.cache.compress)
+ for (db <- database_server) ML_Heap.restore(db, hierachy, cache = store.cache.compress)
def get_shasum(session_name: String): SHA1.Shasum = {
val ancestor_shasums = sessions(session_name).ancestors.map(get_shasum)
--- a/src/Pure/Build/build_process.scala Thu Feb 22 13:27:15 2024 +0100
+++ b/src/Pure/Build/build_process.scala Thu Feb 22 13:57:13 2024 +0100
@@ -1020,11 +1020,12 @@
val cancelled = progress.stopped || !ancestor_results.forall(_.ok)
if (!skipped && !cancelled) {
- val hierarchy =
- (session_name :: ancestor_results.map(_.name))
- .map(store.output_session(_, store_heap = true))
- ML_Heap.restore(_database_server orElse _heaps_database,
- hierarchy, cache = store.cache.compress)
+ for (db <- _database_server orElse _heaps_database) {
+ val hierarchy =
+ (session_name :: ancestor_results.map(_.name))
+ .map(store.output_session(_, store_heap = true))
+ ML_Heap.restore(db, hierarchy, cache = store.cache.compress)
+ }
}
val result_name = (session_name, worker_uuid, build_uuid)
--- a/src/Pure/ML/ml_heap.scala Thu Feb 22 13:27:15 2024 +0100
+++ b/src/Pure/ML/ml_heap.scala Thu Feb 22 13:57:13 2024 +0100
@@ -241,60 +241,58 @@
}
def restore(
- database: Option[SQL.Database],
+ db: SQL.Database,
sessions: List[Store.Session],
cache: Compress.Cache = Compress.Cache.none,
progress: Progress = new Progress
): Unit = {
- database match {
- case Some(db) if sessions.exists(_.defined) =>
- private_data.transaction_lock(db, create = true, label = "ML_Heap.restore") {
- /* heap */
+ if (sessions.exists(_.defined)) {
+ private_data.transaction_lock(db, create = true, label = "ML_Heap.restore") {
+ /* heap */
- val defined_heaps =
- for (session <- sessions; heap <- session.heap)
- yield session.name -> heap
-
- val db_digests = private_data.read_digests(db, defined_heaps.map(_._1))
+ val defined_heaps =
+ for (session <- sessions; heap <- session.heap)
+ yield session.name -> heap
- for ((session_name, heap) <- defined_heaps) {
- val file_digest = read_file_digest(heap)
- val db_digest = db_digests.get(session_name)
- if (db_digest.isDefined && db_digest != file_digest) {
- progress.echo("Restoring " + session_name + " ...")
+ val db_digests = private_data.read_digests(db, defined_heaps.map(_._1))
- val base_dir = Isabelle_System.make_directory(heap.expand.dir)
- Isabelle_System.with_tmp_file(session_name + "_", base_dir = base_dir.file) { tmp =>
- Bytes.write(tmp, Bytes.empty)
- for (slice <- private_data.read_slices(db, session_name)) {
- Bytes.append(tmp, slice.uncompress(cache = cache))
- }
- val digest = write_file_digest(tmp)
- if (db_digest.get == digest) {
- Isabelle_System.chmod("a+r", tmp)
- Isabelle_System.move_file(tmp, heap)
- }
- else error("Incoherent content for session heap " + heap)
+ for ((session_name, heap) <- defined_heaps) {
+ val file_digest = read_file_digest(heap)
+ val db_digest = db_digests.get(session_name)
+ if (db_digest.isDefined && db_digest != file_digest) {
+ progress.echo("Restoring " + session_name + " ...")
+
+ val base_dir = Isabelle_System.make_directory(heap.expand.dir)
+ Isabelle_System.with_tmp_file(session_name + "_", base_dir = base_dir.file) { tmp =>
+ Bytes.write(tmp, Bytes.empty)
+ for (slice <- private_data.read_slices(db, session_name)) {
+ Bytes.append(tmp, slice.uncompress(cache = cache))
}
- }
- }
-
-
- /* log_db */
-
- for (session <- sessions; path <- session.log_db) {
- val file_uuid = Store.read_build_uuid(path, session.name)
- private_data.read_log_db(db, session.name, old_uuid = file_uuid) match {
- case Some(log_db) if file_uuid.isEmpty =>
- progress.echo("Restoring " + session.log_db_name + " ...")
- Isabelle_System.make_directory(path.expand.dir)
- Bytes.write(path, log_db.content)
- case Some(_) => error("Incoherent content for session database " + path)
- case None =>
+ val digest = write_file_digest(tmp)
+ if (db_digest.get == digest) {
+ Isabelle_System.chmod("a+r", tmp)
+ Isabelle_System.move_file(tmp, heap)
+ }
+ else error("Incoherent content for session heap " + heap)
}
}
}
- case _ =>
+
+
+ /* log_db */
+
+ for (session <- sessions; path <- session.log_db) {
+ val file_uuid = Store.read_build_uuid(path, session.name)
+ private_data.read_log_db(db, session.name, old_uuid = file_uuid) match {
+ case Some(log_db) if file_uuid.isEmpty =>
+ progress.echo("Restoring " + session.log_db_name + " ...")
+ Isabelle_System.make_directory(path.expand.dir)
+ Bytes.write(path, log_db.content)
+ case Some(_) => error("Incoherent content for session database " + path)
+ case None =>
+ }
+ }
+ }
}
}
}