# HG changeset patch # User wenzelm # Date 1687854272 -7200 # Node ID fd0430a7b7a47da4980da82a1346abb6acb5eee9 # Parent dfb172d7e40ef460da165a1d70d7fc85493dd0f6 avoid repeated open_database_server: synchronized transaction_lock; diff -r dfb172d7e40e -r fd0430a7b7a4 src/Pure/ML/ml_heap.scala --- a/src/Pure/ML/ml_heap.scala Tue Jun 27 10:05:33 2023 +0200 +++ b/src/Pure/ML/ml_heap.scala Tue Jun 27 10:24:32 2023 +0200 @@ -111,10 +111,14 @@ } def clean_entry(db: SQL.Database, session_name: String): Unit = - Data.transaction_lock(db, create = true) { Data.clean_entry(db, session_name) } + Data.transaction_lock(db, create = true, synchronized = true) { + Data.clean_entry(db, session_name) + } def get_entry(db: SQL.Database, session_name: String): Option[SHA1.Digest] = - Data.transaction_lock(db, create = true) { Data.get_entry(db, session_name) } + Data.transaction_lock(db, create = true, synchronized = true) { + Data.get_entry(db, session_name) + } def store( database: Option[SQL.Database], @@ -133,7 +137,9 @@ val step = (size.toDouble / slices.toDouble).ceil.toLong try { - Data.transaction_lock(db, create = true) { Data.prepare_entry(db, session_name) } + Data.transaction_lock(db, create = true, synchronized = true) { + Data.prepare_entry(db, session_name) + } for (i <- 0 until slices) { val j = i + 1 @@ -142,13 +148,19 @@ val content = Bytes.read_file(heap.file, offset = offset, limit = limit) .compress(cache = cache) - Data.transaction_lock(db) { Data.write_entry(db, session_name, i, content) } + Data.transaction_lock(db, synchronized = true) { + Data.write_entry(db, session_name, i, content) + } } - Data.transaction_lock(db) { Data.finish_entry(db, session_name, size, digest) } + Data.transaction_lock(db, synchronized = true) { + Data.finish_entry(db, session_name, size, digest) + } } catch { case exn: Throwable => - Data.transaction_lock(db, create = true) { Data.clean_entry(db, session_name) } + Data.transaction_lock(db, create = true, synchronized = true) { + Data.clean_entry(db, session_name) + } throw exn } } @@ -164,7 +176,7 @@ database match { case None => case Some(db) => - Data.transaction_lock(db, create = true) { + Data.transaction_lock(db, create = true, synchronized = true) { val db_digest = Data.get_entry(db, session_name) val file_digest = read_file_digest(heap) diff -r dfb172d7e40e -r fd0430a7b7a4 src/Pure/Thy/store.scala --- a/src/Pure/Thy/store.scala Tue Jun 27 10:05:33 2023 +0200 +++ b/src/Pure/Thy/store.scala Tue Jun 27 10:24:32 2023 +0200 @@ -304,10 +304,16 @@ def prepare_output(): Unit = Isabelle_System.make_directory(output_dir + Path.basic("log")) - def clean_output(name: String, init: Boolean = false): Option[Boolean] = { + def clean_output( + database_server: Option[SQL.Database], + name: String, + init: Boolean = false + ): Option[Boolean] = { val relevant_db = - build_database_server && - using_option(try_open_database(name))(clean_session_info(_, name)).getOrElse(false) + database_server match { + case Some(db) => clean_session_info(db, name) + case None => false + } val del = for { @@ -317,9 +323,7 @@ path = dir + file if path.is_file } yield path.file.delete - using_optional(maybe_open_database_server()) { database => - database.foreach(ML_Heap.clean_entry(_, name)) - } + database_server.foreach(ML_Heap.clean_entry(_, name)) if (init) { using(open_database(name, output = true))(clean_session_info(_, name)) @@ -384,7 +388,7 @@ sql = Store.Data.Session_Info.session_name.where_equal(name))) def clean_session_info(db: SQL.Database, name: String): Boolean = - Store.Data.transaction_lock(db, create = true) { + Store.Data.transaction_lock(db, create = true, synchronized = true) { val already_defined = session_info_defined(db, name) db.execute_statement( @@ -411,7 +415,7 @@ build_log: Build_Log.Session_Info, build: Store.Build_Info ): Unit = { - Store.Data.transaction_lock(db) { + Store.Data.transaction_lock(db, synchronized = true) { Store.Data.write_sources(db, session_name, sources) Store.Data.write_session_info(db, cache.compress, session_name, build_log, build) } diff -r dfb172d7e40e -r fd0430a7b7a4 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Tue Jun 27 10:05:33 2023 +0200 +++ b/src/Pure/Tools/build.scala Tue Jun 27 10:24:32 2023 +0200 @@ -172,11 +172,13 @@ build_context.prepare_database() if (clean_build) { - for (name <- full_sessions.imports_descendants(full_sessions_selection)) { - store.clean_output(name) match { - case None => - case Some(true) => progress.echo("Cleaned " + name) - case Some(false) => progress.echo(name + " FAILED to clean") + using_optional(store.maybe_open_database_server()) { database_server => + for (name <- full_sessions.imports_descendants(full_sessions_selection)) { + store.clean_output(database_server, name) match { + case None => + case Some(true) => progress.echo("Cleaned " + name) + case Some(false) => progress.echo(name + " FAILED to clean") + } } } } diff -r dfb172d7e40e -r fd0430a7b7a4 src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala Tue Jun 27 10:05:33 2023 +0200 +++ b/src/Pure/Tools/build_job.scala Tue Jun 27 10:24:32 2023 +0200 @@ -23,11 +23,13 @@ build_context: Build_Process.Context, progress: Progress, log: Logger, + database_server: Option[SQL.Database], session_background: Sessions.Background, input_shasum: SHA1.Shasum, node_info: Host.Node_Info ): Session_Job = { - new Session_Job(build_context, progress, log, session_background, input_shasum, node_info) + new Session_Job(build_context, progress, log, database_server, + session_background, input_shasum, node_info) } object Session_Context { @@ -93,6 +95,7 @@ build_context: Build_Process.Context, progress: Progress, log: Logger, + database_server: Option[SQL.Database], session_background: Sessions.Background, input_shasum: SHA1.Shasum, node_info: Host.Node_Info @@ -453,9 +456,7 @@ val heap = store.output_heap(session_name) if (process_result.ok && store_heap && heap.is_file) { val slice = Space.MiB(options.real("build_database_slice")).bytes - val digest = - using_optional(store.maybe_open_database_server())( - ML_Heap.store(_, session_name, heap, slice)) + val digest = ML_Heap.store(database_server, session_name, heap, slice) SHA1.shasum(digest, session_name) } else SHA1.no_shasum diff -r dfb172d7e40e -r fd0430a7b7a4 src/Pure/Tools/build_process.scala --- a/src/Pure/Tools/build_process.scala Tue Jun 27 10:05:33 2023 +0200 +++ b/src/Pure/Tools/build_process.scala Tue Jun 27 10:24:32 2023 +0200 @@ -808,6 +808,9 @@ private val _host_database: Option[SQL.Database] = store.maybe_open_build_database(Host.Data.database) + private val _database_server: Option[SQL.Database] = + store.maybe_open_database_server() + protected val (progress, worker_uuid) = synchronized { _build_database match { case None => (build_progress, UUID.random().toString) @@ -906,9 +909,8 @@ val cancelled = progress.stopped || !ancestor_results.forall(_.ok) if (!skipped && !cancelled) { - using_optional(store.maybe_open_database_server())( - ML_Heap.restore(_, session_name, store.output_heap(session_name), - cache = store.cache.compress)) + ML_Heap.restore(_database_server, session_name, store.output_heap(session_name), + cache = store.cache.compress) } val result_name = (session_name, worker_uuid, build_uuid) @@ -944,10 +946,10 @@ (if (store_heap) "Building " else "Running ") + session_name + if_proper(node_info.numa_node, " on " + node_info) + " ...") - store.clean_output(session_name, init = true) + store.clean_output(_database_server, session_name, init = true) val build = - Build_Job.start_session(build_context, progress, log, + Build_Job.start_session(build_context, progress, log, _database_server, build_deps.background(session_name), input_shasum, node_info) val job = Build_Process.Job(session_name, worker_uuid, build_uuid, node_info, Some(build))