# HG changeset patch # User Fabian Huch # Date 1706118081 -3600 # Node ID f1f08ca40d966c4450f4e56303f8bc25e94acaa1 # Parent 6e5397fcc41b0e34b92b8da6949b0aa7065ae8ac make build process state protected to avoid copying in subclasses (e.g. for database connections); diff -r 6e5397fcc41b -r f1f08ca40d96 src/Pure/Build/build_process.scala --- a/src/Pure/Build/build_process.scala Wed Jan 24 17:30:49 2024 +0100 +++ b/src/Pure/Build/build_process.scala Wed Jan 24 18:41:21 2024 +0100 @@ -857,7 +857,7 @@ try { store.maybe_open_database_server(server = server) } catch { case exn: Throwable => close(); throw exn } - private val _build_database: Option[SQL.Database] = + protected val _build_database: Option[SQL.Database] = try { for (db <- store.maybe_open_build_database(server = server)) yield { if (!db.is_postgresql) { @@ -920,7 +920,7 @@ build_cluster } - private val _build_cluster = + protected val _build_cluster = try { if (build_context.master && _build_database.isDefined) open_build_cluster() else Build_Cluster.none @@ -941,7 +941,7 @@ /* global state: internal var vs. external database */ - private var _state: Build_Process.State = Build_Process.State() + protected var _state: Build_Process.State = Build_Process.State() protected def synchronized_database[A](label: String)(body: => A): A = synchronized { diff -r 6e5397fcc41b -r f1f08ca40d96 src/Pure/Build/build_schedule.scala --- a/src/Pure/Build/build_schedule.scala Wed Jan 24 17:30:49 2024 +0100 +++ b/src/Pure/Build/build_schedule.scala Wed Jan 24 18:41:21 2024 +0100 @@ -811,43 +811,25 @@ build_progress: Progress, server: SSH.Server, ) extends Build_Process(build_context, build_progress, server) { - /* global resources with common close() operation */ - - protected final lazy val _build_database: Option[SQL.Database] = - try { - for (db <- store.maybe_open_build_database(server = server)) yield { - if (build_context.master) { - Build_Schedule.private_data.transaction_lock( - db, - create = true, - label = "Build_Schedule.build_database" - ) { Build_Schedule.private_data.clean_build_schedules(db) } - db.vacuum(Build_Schedule.private_data.tables.list) - } - db - } - } - catch { case exn: Throwable => close(); throw exn } - - override def close(): Unit = { - Option(_build_database).flatten.foreach(_.close()) - super.close() - } - - /* global state: internal var vs. external database */ protected var _schedule = Schedule.init(build_uuid) override protected def synchronized_database[A](label: String)(body: => A): A = - super.synchronized_database(label) { + synchronized { _build_database match { case None => body case Some(db) => - Build_Schedule.private_data.transaction_lock(db, label = label) { + val tables = + Build_Process.private_data.tables.list ::: Build_Schedule.private_data.tables.list + db.transaction_lock(SQL.Tables.list(tables), label = label) { + val old_state = Build_Process.private_data.pull_database(db, worker_uuid, _state) val old_schedule = Build_Schedule.private_data.pull_schedule(db, _schedule) + _state = old_state _schedule = old_schedule val res = body + _state = + Build_Process.private_data.update_database(db, worker_uuid, _state, old_state) _schedule = Build_Schedule.private_data.update_schedule(db, _schedule, old_schedule) res } @@ -873,6 +855,16 @@ protected val start_date = Date.now() + for (db <- _build_database) { + Build_Schedule.private_data.transaction_lock( + db, + create = true, + label = "Scheduler_Build_Process.create" + ) { Build_Schedule.private_data.clean_build_schedules(db) } + db.vacuum(Build_Schedule.private_data.tables.list) + } + + def init_scheduler(timing_data: Timing_Data): Scheduler @@ -895,26 +887,6 @@ /* previous results via build log */ - override def open_build_cluster(): Build_Cluster = { - val build_cluster = super.open_build_cluster() - build_cluster.init() - - Benchmark.benchmark_requirements(build_options) - - if (build_context.max_jobs > 0) { - val benchmark_options = build_options.string("build_hostname") = hostname - Benchmark.benchmark(benchmark_options, progress) - } - build_cluster.benchmark() - - for (db <- _build_database) - Build_Process.private_data.transaction_lock(db, label = "Scheduler_Build_Process.init") { - Build_Process.private_data.clean_build(db) - } - - build_cluster - } - private val timing_data: Timing_Data = { val cluster_hosts: List[Build_Cluster.Host] = if (build_context.max_jobs == 0) build_context.build_hosts @@ -1030,6 +1002,19 @@ } override def run(): Build.Results = { + Benchmark.benchmark_requirements(build_options) + + if (build_context.max_jobs > 0) { + val benchmark_options = build_options.string("build_hostname") = hostname + Benchmark.benchmark(benchmark_options, progress) + } + _build_cluster.benchmark() + + for (db <- _build_database) + Build_Process.private_data.transaction_lock(db, label = "Scheduler_Build_Process.init") { + Build_Process.private_data.clean_build(db) + } + val results = super.run() write_build_log(results, snapshot().results) results