--- a/Admin/Release/CHECKLIST Wed Mar 08 17:51:56 2023 +0100
+++ b/Admin/Release/CHECKLIST Wed Mar 08 20:19:05 2023 +0100
@@ -89,11 +89,11 @@
- Docker image:
- isabelle build_docker -o Dockerfile -E -t makarius/isabelle:Isabelle2022 Isabelle2022_linux.tar.gz
- isabelle build_docker -o Dockerfile -E -t makarius/isabelle:Isabelle2022_X11_Latex -P X11 -P latex Isabelle2022_linux.tar.gz
+ isabelle docker_build -o Dockerfile -E -t makarius/isabelle:Isabelle2022 Isabelle2022_linux.tar.gz
+ isabelle docker_build -o Dockerfile -E -t makarius/isabelle:Isabelle2022_X11_Latex -P X11 -P latex Isabelle2022_linux.tar.gz
- isabelle build_docker -o Dockerfile -E -t makarius/isabelle:Isabelle2022_ARM Isabelle2022_linux_arm.tar.gz
- isabelle build_docker -o Dockerfile -E -t makarius/isabelle:Isabelle2022_ARM_X11_Latex -P X11 -P latex Isabelle2022_linux_arm.tar.gz
+ isabelle docker_build -o Dockerfile -E -t makarius/isabelle:Isabelle2022_ARM Isabelle2022_linux_arm.tar.gz
+ isabelle docker_build -o Dockerfile -E -t makarius/isabelle:Isabelle2022_ARM_X11_Latex -P X11 -P latex Isabelle2022_linux_arm.tar.gz
docker login
--- a/Admin/Windows/Cygwin/README Wed Mar 08 17:51:56 2023 +0100
+++ b/Admin/Windows/Cygwin/README Wed Mar 08 20:19:05 2023 +0100
@@ -54,7 +54,7 @@
Redirect /cygwin_2022/noarch/release https://ftp.eq.uc.pt/software/pc/prog/cygwin/noarch/release
```
-* Quasi-component: "isabelle build_cygwin" (as Administrator)
+* Quasi-component: "isabelle component_cygwin" (as Administrator)
* SSH server:
--- a/Admin/components/README Wed Mar 08 17:51:56 2023 +0100
+++ b/Admin/components/README Wed Mar 08 20:19:05 2023 +0100
@@ -10,11 +10,11 @@
* packaging (with associated SHA1 digest), e.g.
- $ isabelle build_components screwdriver-3.14
+ $ isabelle components_build screwdriver-3.14
* publishing, e.g.
- $ isabelle build_components -P screwdriver-3.14.tar.gz
+ $ isabelle components_build -P screwdriver-3.14.tar.gz
* manual editing of Admin/components/main: screwdriver-3.14
@@ -54,7 +54,7 @@
The file `Admin/components/components.sha1` contains SHA1 identifiers
within the Isabelle repository, for integrity checking of the archives
that are exposed to the public file-system. The command-line tool
-`isabelle build_components` maintains these hash-keys automatically.
+`isabelle components_build` maintains these hash-keys automatically.
Unpacked copy
@@ -67,6 +67,6 @@
isabelle_cronjob does this routinely: it will break if the unpacked version is
omitted.
-The command-line tool `isabelle build_components -P` takes care of uploading
+The command-line tool `isabelle components_build -P` takes care of uploading
the .tar.gz archive and unpacking it, unless it is a special component (e.g.
for multiplatform application bundling).
--- a/Admin/isabelle_fonts/README Wed Mar 08 17:51:56 2023 +0100
+++ b/Admin/isabelle_fonts/README Wed Mar 08 20:19:05 2023 +0100
@@ -6,7 +6,7 @@
TTF files are produced automatically from the Deja Vu font family with symbols
from the IsabelleSymbols font (formerly IsabelleText):
- isabelle build_fonts -d dejavu-fonts-ttf-2.37/ttf
+ isabelle component_fonts -d dejavu-fonts-ttf-2.37/ttf
The IsabelleSymbols template (see Admin/isabelle_fonts in the repository) has
been assembled manually, by composing glyphs from Bluesky TeX fonts (scaled
@@ -20,7 +20,7 @@
Makarius
- 22-Mar-2021
+ 08-Mar-2023
----------------------------------------------------------------------------
--- a/Admin/polyml/INSTALL-MinGW Wed Mar 08 17:51:56 2023 +0100
+++ b/Admin/polyml/INSTALL-MinGW Wed Mar 08 20:19:05 2023 +0100
@@ -17,5 +17,5 @@
- build (as regular user) e.g. on vmnipkow9 with Cygwin-Terminal from Isabelle2018
(to avoid address relocation problems):
- isabelle build_polyml -M /cygdrive/c/msys64 -m32 -s sha1 src
- isabelle build_polyml -M /cygdrive/c/msys64 -m64 -s sha1 src
+ isabelle component_polyml -M /cygdrive/c/msys64 -m32 -s sha1 src
+ isabelle component_polyml -M /cygdrive/c/msys64 -m64 -s sha1 src
--- a/src/Pure/Admin/build_status.scala Wed Mar 08 17:51:56 2023 +0100
+++ b/src/Pure/Admin/build_status.scala Wed Mar 08 20:19:05 2023 +0100
@@ -24,7 +24,7 @@
history: Int = 0,
afp: Boolean = false,
bulky: Boolean = false,
- sql: String
+ sql: String = ""
) {
def days(options: Options): Int = options.int("build_log_history") max history
@@ -37,15 +37,15 @@
only_sessions: Set[String]
): PostgreSQL.Source = {
Build_Log.Data.universal_table.select(columns, distinct = true, sql =
- "WHERE " +
- Build_Log.Data.pull_date(afp) + " > " + Build_Log.Data.recent_time(days(options)) +
- " AND " +
- Build_Log.Data.status.member(
- List(
- Build_Log.Session_Status.finished.toString,
- Build_Log.Session_Status.failed.toString)) +
- if_proper(only_sessions, " AND " + Build_Log.Data.session_name.member(only_sessions)) +
- " AND " + SQL.enclose(sql))
+ SQL.where(
+ SQL.and(
+ Build_Log.Data.pull_date(afp).ident + " > " + Build_Log.Data.recent_time(days(options)),
+ Build_Log.Data.status.member(
+ List(
+ Build_Log.Session_Status.finished.toString,
+ Build_Log.Session_Status.failed.toString)),
+ if_proper(only_sessions, Build_Log.Data.session_name.member(only_sessions)),
+ if_proper(sql, SQL.enclose(sql)))))
}
}
--- a/src/Pure/Admin/isabelle_cronjob.scala Wed Mar 08 17:51:56 2023 +0100
+++ b/src/Pure/Admin/isabelle_cronjob.scala Wed Mar 08 20:19:05 2023 +0100
@@ -116,7 +116,7 @@
db.execute_query_statement(
Build_Log.Data.select_recent_versions(
- days = days, rev = rev, afp_rev = afp_rev, sql = "WHERE " + sql),
+ days = days, rev = rev, afp_rev = afp_rev, sql = SQL.where(sql)),
List.from[Item],
{ res =>
val known = res.bool(Build_Log.Data.known)
--- a/src/Pure/Tools/build.scala Wed Mar 08 17:51:56 2023 +0100
+++ b/src/Pure/Tools/build.scala Wed Mar 08 20:19:05 2023 +0100
@@ -167,7 +167,7 @@
Build_Process.Context(store, build_deps, progress = progress,
hostname = hostname(build_options), build_heap = build_heap,
numa_shuffling = numa_shuffling, max_jobs = max_jobs, fresh_build = fresh_build,
- no_build = no_build, session_setup = session_setup)
+ no_build = no_build, session_setup = session_setup, master = true)
store.prepare_output()
build_context.prepare_database()
@@ -186,7 +186,7 @@
Isabelle_Thread.uninterruptible {
val engine = get_engine(build_options.string("build_engine"))
using(engine.init(build_context, progress)) { build_process =>
- val res = build_process.run(master = true)
+ val res = build_process.run()
Results(build_context, res)
}
}
--- a/src/Pure/Tools/build_job.scala Wed Mar 08 17:51:56 2023 +0100
+++ b/src/Pure/Tools/build_job.scala Wed Mar 08 20:19:05 2023 +0100
@@ -12,11 +12,12 @@
trait Build_Job {
def job_name: String
+ def worker_uuid: String
def node_info: Host.Node_Info
def cancel(): Unit = ()
def is_finished: Boolean = false
def join: (Process_Result, SHA1.Shasum) = (Process_Result.undefined, SHA1.no_shasum)
- def make_abstract: Build_Job.Abstract = Build_Job.Abstract(job_name, node_info)
+ def make_abstract: Build_Job.Abstract = Build_Job.Abstract(job_name, worker_uuid, node_info)
}
object Build_Job {
@@ -26,6 +27,7 @@
sealed case class Abstract(
override val job_name: String,
+ override val worker_uuid: String,
override val node_info: Host.Node_Info
) extends Build_Job {
override def make_abstract: Abstract = this
@@ -38,13 +40,15 @@
def start_session(
build_context: Build_Process.Context,
+ worker_uuid: String,
progress: Progress,
log: Logger,
session_background: Sessions.Background,
input_shasum: SHA1.Shasum,
node_info: Host.Node_Info
): Session_Job = {
- new Session_Job(build_context, progress, log, session_background, input_shasum, node_info)
+ new Session_Job(
+ build_context, worker_uuid, progress, log, session_background, input_shasum, node_info)
}
object Session_Context {
@@ -106,6 +110,7 @@
class Session_Job private[Build_Job](
build_context: Build_Process.Context,
+ override val worker_uuid: String,
progress: Progress,
log: Logger,
session_background: Sessions.Background,
--- a/src/Pure/Tools/build_process.scala Wed Mar 08 17:51:56 2023 +0100
+++ b/src/Pure/Tools/build_process.scala Wed Mar 08 20:19:05 2023 +0100
@@ -29,7 +29,8 @@
fresh_build: Boolean = false,
no_build: Boolean = false,
session_setup: (String, Session) => Unit = (_, _) => (),
- build_uuid: String = UUID.random().toString
+ build_uuid: String = UUID.random().toString,
+ master: Boolean = false,
): Context = {
val sessions_structure = build_deps.sessions_structure
val build_graph = sessions_structure.build_graph
@@ -83,7 +84,7 @@
val numa_nodes = Host.numa_nodes(enabled = numa_shuffling)
new Context(store, build_deps, sessions, ordering, ml_platform, hostname, numa_nodes,
build_heap = build_heap, max_jobs = max_jobs, fresh_build = fresh_build,
- no_build = no_build, session_setup, build_uuid = build_uuid)
+ no_build = no_build, session_setup, build_uuid = build_uuid, master = master)
}
}
@@ -100,7 +101,8 @@
val fresh_build: Boolean,
val no_build: Boolean,
val session_setup: (String, Session) => Unit,
- val build_uuid: String
+ val build_uuid: String,
+ val master: Boolean
) {
override def toString: String =
"Build_Process.Context(build_uuid = " + quote(build_uuid) + ")"
@@ -141,6 +143,8 @@
def store_heap(name: String): Boolean =
build_heap || Sessions.is_pure(name) ||
sessions.valuesIterator.exists(_.ancestors.contains(name))
+
+ def worker_active: Boolean = max_jobs > 1
}
@@ -149,12 +153,23 @@
type Progress_Messages = SortedMap[Long, Progress.Message]
- case class Entry(name: String, deps: List[String], info: JSON.Object.T = JSON.Object.empty) {
+ case class Task(name: String, deps: List[String], info: JSON.Object.T = JSON.Object.empty) {
def is_ready: Boolean = deps.isEmpty
- def resolve(dep: String): Entry =
+ def resolve(dep: String): Task =
if (deps.contains(dep)) copy(deps = deps.filterNot(_ == dep)) else this
}
+ case class Worker(
+ worker_uuid: String,
+ build_uuid: String,
+ hostname: String,
+ java_pid: Long,
+ java_start: Date,
+ start: Date,
+ stamp: Date,
+ stop: Option[Date],
+ serial: Long)
+
case class Result(
process_result: Process_Result,
output_shasum: SHA1.Shasum,
@@ -166,7 +181,8 @@
object State {
type Sessions = Map[String, Build_Job.Session_Context]
- type Pending = List[Entry]
+ type Workers = List[Worker]
+ type Pending = List[Task]
type Running = Map[String, Build_Job]
type Results = Map[String, Result]
@@ -181,6 +197,7 @@
progress_seen: Long = 0,
numa_next: Int = 0,
sessions: State.Sessions = Map.empty, // static build targets
+ workers: State.Workers = Nil, // available worker processes
pending: State.Pending = Nil, // dynamic build "queue"
running: State.Running = Map.empty, // presently running jobs
results: State.Results = Map.empty // finished results
@@ -196,6 +213,8 @@
if (message_serial > progress_seen) copy(progress_seen = message_serial)
else error("Bad serial " + message_serial + " for progress output (already seen)")
+ def set_workers(new_workers: State.Workers): State = copy(workers = new_workers)
+
def numa_next_node(numa_nodes: List[Int]): (Option[Int], State) =
if (numa_nodes.isEmpty) (None, this)
else {
@@ -432,23 +451,57 @@
object Workers {
val worker_uuid = Generic.worker_uuid.make_primary_key
val build_uuid = Generic.build_uuid
+ val hostname = SQL.Column.string("hostname")
+ val java_pid = SQL.Column.long("java_pid")
+ val java_start = SQL.Column.date("java_start")
val start = SQL.Column.date("start")
val stamp = SQL.Column.date("stamp")
val stop = SQL.Column.date("stop")
val serial = SQL.Column.long("serial")
- val table = make_table("workers", List(worker_uuid, build_uuid, start, stamp, stop, serial))
+ val table = make_table("workers",
+ List(worker_uuid, build_uuid, hostname, java_pid, java_start, start, stamp, stop, serial))
val serial_max = serial.copy(expr = "MAX(" + serial.ident + ")")
}
+ def read_workers(
+ db: SQL.Database,
+ build_uuid: String = "",
+ worker_uuid: String = ""
+ ): State.Workers = {
+ db.execute_query_statement(
+ Workers.table.select(sql =
+ SQL.where(Generic.sql(build_uuid = build_uuid, worker_uuid = worker_uuid))),
+ List.from[Worker],
+ { res =>
+ Worker(
+ worker_uuid = res.string(Workers.worker_uuid),
+ build_uuid = res.string(Workers.build_uuid),
+ hostname = res.string(Workers.hostname),
+ java_pid = res.long(Workers.java_pid),
+ java_start = res.date(Workers.java_start),
+ start = res.date(Workers.start),
+ stamp = res.date(Workers.stamp),
+ stop = res.get_date(Workers.stop),
+ serial = res.long(Workers.serial))
+ })
+ }
+
def serial_max(db: SQL.Database): Long =
db.execute_query_statementO[Long](
Workers.table.select(List(Workers.serial_max)),
res => res.long(Workers.serial)
).getOrElse(0L)
- def start_worker(db: SQL.Database, worker_uuid: String, build_uuid: String): Long = {
+ def start_worker(
+ db: SQL.Database,
+ worker_uuid: String,
+ build_uuid: String,
+ hostname: String,
+ java_pid: Long,
+ java_start: Date
+ ): Long = {
def err(msg: String): Nothing =
error("Cannot start worker " + worker_uuid + if_proper(msg, "\n" + msg))
@@ -470,10 +523,13 @@
val now = db.now()
stmt.string(1) = worker_uuid
stmt.string(2) = build_uuid
- stmt.date(3) = now
- stmt.date(4) = now
- stmt.date(5) = None
- stmt.long(6) = serial
+ stmt.string(3) = hostname
+ stmt.long(4) = java_pid
+ stmt.date(5) = java_start
+ stmt.date(6) = now
+ stmt.date(7) = now
+ stmt.date(8) = None
+ stmt.long(9) = serial
})
serial
}
@@ -507,15 +563,15 @@
val table = make_table("pending", List(name, deps, info))
}
- def read_pending(db: SQL.Database): List[Entry] =
+ def read_pending(db: SQL.Database): List[Task] =
db.execute_query_statement(
Pending.table.select(sql = SQL.order_by(List(Pending.name))),
- List.from[Entry],
+ List.from[Task],
{ res =>
val name = res.string(Pending.name)
val deps = res.string(Pending.deps)
val info = res.string(Pending.info)
- Entry(name, split_lines(deps), info = JSON.Object.parse(info))
+ Task(name, split_lines(deps), info = JSON.Object.parse(info))
})
def update_pending(db: SQL.Database, pending: State.Pending): Boolean = {
@@ -544,10 +600,11 @@
object Running {
val name = Generic.name.make_primary_key
+ val worker_uuid = Generic.worker_uuid
val hostname = SQL.Column.string("hostname")
val numa_node = SQL.Column.int("numa_node")
- val table = make_table("running", List(name, hostname, numa_node))
+ val table = make_table("running", List(name, worker_uuid, hostname, numa_node))
}
def read_running(db: SQL.Database): List[Build_Job.Abstract] =
@@ -556,9 +613,10 @@
List.from[Build_Job.Abstract],
{ res =>
val name = res.string(Running.name)
+ val worker_uuid = res.string(Running.worker_uuid)
val hostname = res.string(Running.hostname)
val numa_node = res.get_int(Running.numa_node)
- Build_Job.Abstract(name, Host.Node_Info(hostname, numa_node))
+ Build_Job.Abstract(name, worker_uuid, Host.Node_Info(hostname, numa_node))
}
)
@@ -577,8 +635,9 @@
db.execute_statement(Running.table.insert(), body =
{ stmt =>
stmt.string(1) = job.job_name
- stmt.string(2) = job.node_info.hostname
- stmt.int(3) = job.node_info.numa_node
+ stmt.string(2) = job.worker_uuid
+ stmt.string(3) = job.node_info.hostname
+ stmt.int(4) = job.node_info.numa_node
})
}
@@ -692,7 +751,7 @@
val serial = if (changed.exists(identity)) State.inc_serial(serial0) else serial0
stamp_worker(db, worker_uuid, serial)
- state.set_serial(serial)
+ state.set_serial(serial).set_workers(read_workers(db))
}
}
}
@@ -789,14 +848,14 @@
for {
(name, session_context) <- build_context.sessions.iterator
if !old_pending(name)
- } yield Build_Process.Entry(name, session_context.deps))
+ } yield Build_Process.Task(name, session_context.deps))
val pending1 = new_pending ::: state.pending
state.copy(sessions = sessions1, pending = pending1)
}
protected def next_job(state: Build_Process.State): Option[String] =
- if (state.running.size < (build_context.max_jobs max 1)) {
+ if (progress.stopped || state.running.size < build_context.max_jobs) {
state.pending.filter(entry => entry.is_ready && !state.is_running(entry.name))
.sortBy(_.name)(build_context.ordering)
.headOption.map(_.name)
@@ -835,7 +894,7 @@
remove_pending(session_name).
make_result(session_name, Process_Result.error, output_shasum)
}
- else if (!ancestor_results.forall(_.ok) || progress.stopped) {
+ else if (progress.stopped || !ancestor_results.forall(_.ok)) {
progress.echo(session_name + " CANCELLED")
state
.remove_pending(session_name)
@@ -852,7 +911,7 @@
store.init_output(session_name)
val job =
- Build_Job.start_session(build_context, progress, log,
+ Build_Job.start_session(build_context, worker_uuid, progress, log,
build_deps.background(session_name), input_shasum, node_info)
state1.add_running(session_name, job)
}
@@ -876,7 +935,12 @@
final def start_worker(): Unit = synchronized_database {
for (db <- _database) {
- val serial = Build_Process.Data.start_worker(db, worker_uuid, build_uuid)
+ val java = ProcessHandle.current()
+ val java_pid = java.pid
+ val java_start = Date.instant(java.info.startInstant.get)
+ val serial =
+ Build_Process.Data.start_worker(
+ db, worker_uuid, build_uuid, build_context.hostname, java_pid, java_start)
_state = _state.set_serial(serial)
}
}
@@ -890,7 +954,7 @@
/* run */
- def run(master: Boolean = false): Map[String, Process_Result] = {
+ def run(): Map[String, Process_Result] = {
def finished(): Boolean = synchronized_database { _state.finished }
def sleep(): Unit =
@@ -915,8 +979,13 @@
Map.empty[String, Process_Result]
}
else {
- if (master) start_build()
+ if (build_context.master) start_build()
+
start_worker()
+ if (build_context.master && !build_context.worker_active) {
+ progress.echo("Waiting for external workers ...")
+ }
+
try {
while (!finished()) {
if (progress.stopped) synchronized_database { _state.stop_running() }
@@ -940,7 +1009,7 @@
}
finally {
stop_worker()
- if (master) stop_build()
+ if (build_context.master) stop_build()
}
synchronized_database {
--- a/src/Tools/VSCode/README.md Wed Mar 08 17:51:56 2023 +0100
+++ b/src/Tools/VSCode/README.md Wed Mar 08 20:19:05 2023 +0100
@@ -19,7 +19,7 @@
* Shell commands within $ISABELLE_HOME directory:
- isabelle build_vscode_extension -U
+ isabelle component_vscode_extension -U
isabelle vscode src/Tools/VSCode/extension
* VSCode commands:
@@ -29,4 +29,4 @@
## Build and install ##
- isabelle build_vscode_extension -I
+ isabelle component_vscode_extension -I