--- a/src/Pure/Tools/build_process.scala Sun Feb 12 20:53:55 2023 +0100
+++ b/src/Pure/Tools/build_process.scala Sun Feb 12 21:09:12 2023 +0100
@@ -167,11 +167,6 @@
private var running = Map.empty[String, (SHA1.Shasum, Build_Job)]
private var results = Map.empty[String, Build_Process.Result]
- private def sleep(): Unit =
- Isabelle_Thread.interrupt_handler(_ => progress.stop()) {
- build_options.seconds("editor_input_delay").sleep()
- }
-
private val log =
build_options.string("system_log") match {
case "" => No_Logger
@@ -203,154 +198,153 @@
"Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")"
}
- def run(): Map[String, Build_Process.Result] = {
- @tailrec def loop(): Unit = {
- if (!build_graph.is_empty) {
- if (progress.stopped) {
- for ((_, (_, job)) <- running) job.terminate()
- }
+ private def finish_job(session_name: String, input_heaps: SHA1.Shasum, job: Build_Job): Unit = {
+ val process_result = job.join
- running.find({ case (_, (_, job)) => job.is_finished }) match {
- case Some((session_name, (input_heaps, job))) =>
- //{{{ finish job
-
- val process_result = job.join
+ val output_heap =
+ if (process_result.ok && job.do_store && store.output_heap(session_name).is_file) {
+ SHA1.shasum(ML_Heap.write_digest(store.output_heap(session_name)), session_name)
+ }
+ else SHA1.no_shasum
- val output_heap =
- if (process_result.ok && job.do_store && store.output_heap(session_name).is_file) {
- SHA1.shasum(ML_Heap.write_digest(store.output_heap(session_name)), session_name)
- }
- else SHA1.no_shasum
+ val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test)
+ val process_result_tail = {
+ val tail = job.info.options.int("process_output_tail")
+ process_result.copy(
+ out_lines =
+ "(more details via \"isabelle log -H Error " + session_name + "\")" ::
+ (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)))
+ }
- val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test)
- val process_result_tail = {
- val tail = job.info.options.int("process_output_tail")
- process_result.copy(
- out_lines =
- "(more details via \"isabelle log -H Error " + session_name + "\")" ::
- (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)))
- }
+ val build_log =
+ Build_Log.Log_File(session_name, process_result.out_lines).
+ parse_session_info(
+ command_timings = true,
+ theory_timings = true,
+ ml_statistics = true,
+ task_statistics = true)
+
+ // write log file
+ if (process_result.ok) {
+ File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines))
+ }
+ else File.write(store.output_log(session_name), terminate_lines(log_lines))
- val build_log =
- Build_Log.Log_File(session_name, process_result.out_lines).
- parse_session_info(
- command_timings = true,
- theory_timings = true,
- ml_statistics = true,
- task_statistics = true)
+ // write database
+ using(store.open_database(session_name, output = true))(db =>
+ store.write_session_info(db, session_name, job.session_sources,
+ build_log =
+ if (process_result.timeout) build_log.error("Timeout") else build_log,
+ build =
+ Build.Session_Info(build_deps.sources_shasum(session_name), input_heaps,
+ output_heap, process_result.rc, UUID.random().toString)))
- // write log file
- if (process_result.ok) {
- File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines))
- }
- else File.write(store.output_log(session_name), terminate_lines(log_lines))
+ // messages
+ process_result.err_lines.foreach(progress.echo)
- // write database
- using(store.open_database(session_name, output = true))(db =>
- store.write_session_info(db, session_name, job.session_sources,
- build_log =
- if (process_result.timeout) build_log.error("Timeout") else build_log,
- build =
- Build.Session_Info(build_deps.sources_shasum(session_name), input_heaps,
- output_heap, process_result.rc, UUID.random().toString)))
+ if (process_result.ok) {
+ if (verbose) progress.echo(session_timing(session_name, build_log))
+ progress.echo(session_finished(session_name, process_result))
+ }
+ else {
+ progress.echo(session_name + " FAILED")
+ if (!process_result.interrupted) progress.echo(process_result_tail.out)
+ }
- // messages
- process_result.err_lines.foreach(progress.echo)
+ remove_pending(session_name)
+ running -= session_name
+ results += (session_name -> Build_Process.Result(false, output_heap, process_result_tail))
+ }
- if (process_result.ok) {
- if (verbose) progress.echo(session_timing(session_name, build_log))
- progress.echo(session_finished(session_name, process_result))
- }
- else {
- progress.echo(session_name + " FAILED")
- if (!process_result.interrupted) progress.echo(process_result_tail.out)
- }
+ private def start_job(session_name: String): Unit = {
+ val ancestor_results =
+ build_deps.sessions_structure.build_requirements(List(session_name)).
+ filterNot(_ == session_name).map(results(_))
+ val input_heaps =
+ if (ancestor_results.isEmpty) {
+ SHA1.shasum_meta_info(SHA1.digest(Path.explode("$POLYML_EXE")))
+ }
+ else SHA1.flat_shasum(ancestor_results.map(_.output_heap))
- remove_pending(session_name)
- running -= session_name
- results += (session_name -> Build_Process.Result(false, output_heap, process_result_tail))
- loop()
- //}}}
- case None if running.size < (max_jobs max 1) =>
- //{{{ check/start next job
- next_pending() match {
- case Some(session_name) =>
- val ancestor_results =
- build_deps.sessions_structure.build_requirements(List(session_name)).
- filterNot(_ == session_name).map(results(_))
- val input_heaps =
- if (ancestor_results.isEmpty) {
- SHA1.shasum_meta_info(SHA1.digest(Path.explode("$POLYML_EXE")))
- }
- else SHA1.flat_shasum(ancestor_results.map(_.output_heap))
+ val do_store = build_heap || build_context.build_heap(session_name)
+ val (current, output_heap) = {
+ store.try_open_database(session_name) match {
+ case Some(db) =>
+ using(db)(store.read_build(_, session_name)) match {
+ case Some(build) =>
+ val output_heap = store.find_heap_shasum(session_name)
+ val current =
+ !fresh_build &&
+ build.ok &&
+ build.sources == build_deps.sources_shasum(session_name) &&
+ build.input_heaps == input_heaps &&
+ build.output_heap == output_heap &&
+ !(do_store && output_heap.is_empty)
+ (current, output_heap)
+ case None => (false, SHA1.no_shasum)
+ }
+ case None => (false, SHA1.no_shasum)
+ }
+ }
+ val all_current = current && ancestor_results.forall(_.current)
- val do_store = build_heap || build_context.build_heap(session_name)
- val (current, output_heap) = {
- store.try_open_database(session_name) match {
- case Some(db) =>
- using(db)(store.read_build(_, session_name)) match {
- case Some(build) =>
- val output_heap = store.find_heap_shasum(session_name)
- val current =
- !fresh_build &&
- build.ok &&
- build.sources == build_deps.sources_shasum(session_name) &&
- build.input_heaps == input_heaps &&
- build.output_heap == output_heap &&
- !(do_store && output_heap.is_empty)
- (current, output_heap)
- case None => (false, SHA1.no_shasum)
- }
- case None => (false, SHA1.no_shasum)
- }
- }
- val all_current = current && ancestor_results.forall(_.current)
+ if (all_current) {
+ remove_pending(session_name)
+ results += (session_name -> Build_Process.Result(true, output_heap, Process_Result.ok))
+ }
+ else if (no_build) {
+ progress.echo_if(verbose, "Skipping " + session_name + " ...")
+ remove_pending(session_name)
+ results += (session_name -> Build_Process.Result(false, output_heap, Process_Result.error))
+ }
+ else if (ancestor_results.forall(_.ok) && !progress.stopped) {
+ progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...")
+
+ store.clean_output(session_name)
+ using(store.open_database(session_name, output = true))(
+ store.init_session_info(_, session_name))
- if (all_current) {
- remove_pending(session_name)
- results += (session_name -> Build_Process.Result(true, output_heap, Process_Result.ok))
- loop()
- }
- else if (no_build) {
- progress.echo_if(verbose, "Skipping " + session_name + " ...")
- remove_pending(session_name)
- results += (session_name -> Build_Process.Result(false, output_heap, Process_Result.error))
- loop()
- }
- else if (ancestor_results.forall(_.ok) && !progress.stopped) {
- progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...")
+ val session_background = build_deps.background(session_name)
+ val resources =
+ new Resources(session_background, log = log,
+ command_timings = build_context(session_name).old_command_timings)
- store.clean_output(session_name)
- using(store.open_database(session_name, output = true))(
- store.init_session_info(_, session_name))
+ val numa_node = numa_nodes.next(used_node)
+ val job =
+ new Build_Job(progress, session_background, store, do_store,
+ resources, session_setup, numa_node)
+ running += (session_name -> (input_heaps, job))
+ }
+ else {
+ progress.echo(session_name + " CANCELLED")
+ remove_pending(session_name)
+ results += (session_name -> Build_Process.Result(false, output_heap, Process_Result.undefined))
+ }
+ }
- val session_background = build_deps.background(session_name)
- val resources =
- new Resources(session_background, log = log,
- command_timings = build_context(session_name).old_command_timings)
+ private def sleep(): Unit =
+ Isabelle_Thread.interrupt_handler(_ => progress.stop()) {
+ build_options.seconds("editor_input_delay").sleep()
+ }
- val numa_node = numa_nodes.next(used_node)
- val job =
- new Build_Job(progress, session_background, store, do_store,
- resources, session_setup, numa_node)
- running += (session_name -> (input_heaps, job))
- loop()
- }
- else {
- progress.echo(session_name + " CANCELLED")
- remove_pending(session_name)
- results += (session_name -> Build_Process.Result(false, output_heap, Process_Result.undefined))
- loop()
- }
- case None => sleep(); loop()
- }
- ///}}}
- case None => sleep(); loop()
- }
+ def run(): Map[String, Build_Process.Result] = {
+ while (!build_graph.is_empty) {
+ if (progress.stopped) {
+ for ((_, (_, job)) <- running) job.terminate()
+ }
+
+ running.find({ case (_, (_, job)) => job.is_finished }) match {
+ case Some((session_name, (input_heaps, job))) =>
+ finish_job(session_name, input_heaps, job)
+ case None if running.size < (max_jobs max 1) =>
+ next_pending() match {
+ case Some(session_name) => start_job(session_name)
+ case None => sleep()
+ }
+ case None => sleep()
}
}
- loop()
results
}
}