author | Fabian Huch <huch@in.tum.de> |
Tue, 04 Jun 2024 18:55:55 +0200 | |
changeset 80251 | 6ae378791c52 |
parent 80250 | 8ae6f4e8cc2a |
child 80252 | 96543177ab7e |
permissions | -rw-r--r-- |
80246 | 1 |
/* Title: Pure/Build/build_manager.scala |
2 |
Author: Fabian Huch, TU Muenchen |
|
3 |
||
4 |
Isabelle manager for automated and quasi-interactive builds, with web frontend. |
|
5 |
*/ |
|
6 |
||
7 |
package isabelle |
|
8 |
||
9 |
||
10 |
import scala.collection.mutable |
|
11 |
import scala.annotation.tailrec |
|
12 |
||
13 |
||
14 |
object Build_Manager { |
|
15 |
/* task state synchronized via db */ |
|
16 |
||
17 |
object Component { |
|
18 |
def parse(s: String): Component = |
|
19 |
space_explode('/', s) match { |
|
20 |
case name :: rev :: Nil => Component(name, rev) |
|
21 |
case _ => error("Malformed component: " + quote(s)) |
|
22 |
} |
|
23 |
||
24 |
def AFP(rev: String = "") = Component("AFP", rev) |
|
25 |
} |
|
26 |
||
27 |
case class Component(name: String, rev: String = "") { |
|
28 |
override def toString: String = name + "/" + rev |
|
29 |
} |
|
30 |
||
31 |
sealed trait Build_Config { |
|
32 |
def name: String |
|
33 |
def components: List[Component] |
|
34 |
def fresh_build: Boolean |
|
35 |
def command(build_hosts: List[Build_Cluster.Host]): String |
|
36 |
} |
|
37 |
||
38 |
case class CI_Build(name: String, components: List[Component]) extends Build_Config { |
|
39 |
def fresh_build: Boolean = true |
|
40 |
def command(build_hosts: List[Build_Cluster.Host]): String = " ci_build " + name |
|
41 |
} |
|
42 |
||
43 |
object User_Build { |
|
44 |
val name: String = "user" |
|
45 |
} |
|
46 |
||
47 |
case class User_Build( |
|
48 |
afp_rev: Option[String] = None, |
|
49 |
prefs: List[Options.Spec] = Nil, |
|
50 |
requirements: Boolean = false, |
|
51 |
all_sessions: Boolean = false, |
|
52 |
base_sessions: List[String] = Nil, |
|
53 |
exclude_session_groups: List[String] = Nil, |
|
54 |
exclude_sessions: List[String] = Nil, |
|
55 |
session_groups: List[String] = Nil, |
|
56 |
sessions: List[String] = Nil, |
|
57 |
build_heap: Boolean = false, |
|
58 |
clean_build: Boolean = false, |
|
59 |
export_files: Boolean = false, |
|
60 |
fresh_build: Boolean = false, |
|
61 |
presentation: Boolean = false |
|
62 |
) extends Build_Config { |
|
63 |
def name: String = User_Build.name |
|
64 |
def components: List[Component] = afp_rev.map(Component.AFP).toList |
|
65 |
def command(build_hosts: List[Build_Cluster.Host]): String = { |
|
66 |
" build" + |
|
67 |
if_proper(afp_rev, " -A:") + |
|
68 |
base_sessions.map(session => " -B " + Bash.string(session)).mkString + |
|
69 |
if_proper(build_hosts, build_hosts.map(host => " -H " + Bash.string(host.print)).mkString) + |
|
70 |
if_proper(presentation, " -P:") + |
|
71 |
if_proper(requirements, " -R") + |
|
72 |
if_proper(all_sessions, " -a") + |
|
73 |
if_proper(build_heap, " -b") + |
|
74 |
if_proper(clean_build, " -c") + |
|
75 |
if_proper(export_files, " -e") + |
|
76 |
if_proper(fresh_build, " -f") + |
|
77 |
Options.Spec.bash_strings(prefs, bg = true) + |
|
78 |
" -v" + |
|
79 |
sessions.map(session => " " + Bash.string(session)).mkString |
|
80 |
} |
|
81 |
} |
|
82 |
||
83 |
enum Priority { case low, normal, high } |
|
84 |
||
85 |
sealed trait T extends Library.Named |
|
86 |
||
87 |
sealed case class Task( |
|
88 |
build_config: Build_Config, |
|
89 |
id: UUID.T = UUID.random(), |
|
90 |
submit_date: Date = Date.now(), |
|
91 |
priority: Priority = Priority.normal, |
|
92 |
isabelle_rev: String = "" |
|
93 |
) extends T { |
|
94 |
def name: String = id.toString |
|
95 |
def kind: String = build_config.name |
|
96 |
def components: List[Component] = build_config.components |
|
97 |
} |
|
98 |
||
99 |
sealed case class Job( |
|
100 |
id: UUID.T, |
|
101 |
kind: String, |
|
102 |
number: Long, |
|
103 |
isabelle_rev: String, |
|
104 |
components: List[Component], |
|
105 |
start_date: Date = Date.now(), |
|
106 |
cancelled: Boolean = false |
|
107 |
) extends T { def name: String = kind + "/" + number } |
|
108 |
||
109 |
object Status { |
|
110 |
def from_result(result: Process_Result): Status = { |
|
111 |
if (result.ok) Status.ok |
|
112 |
else if (result.interrupted) Status.cancelled |
|
113 |
else Status.failed |
|
114 |
} |
|
115 |
} |
|
116 |
||
117 |
enum Status { case ok, cancelled, aborted, failed } |
|
118 |
||
119 |
sealed case class Result( |
|
120 |
kind: String, |
|
121 |
number: Long, |
|
122 |
status: Status, |
|
123 |
id: Option[UUID.T] = None, |
|
124 |
date: Date = Date.now(), |
|
125 |
serial: Long = 0, |
|
126 |
) extends T { def name: String = kind + "/" + number } |
|
127 |
||
128 |
object State { |
|
129 |
def max_serial(serials: Iterable[Long]): Long = serials.maxOption.getOrElse(0L) |
|
130 |
def inc_serial(serial: Long): Long = { |
|
131 |
require(serial < Long.MaxValue, "number overflow") |
|
132 |
serial + 1 |
|
133 |
} |
|
134 |
||
135 |
type Pending = Library.Update.Data[Task] |
|
136 |
type Running = Library.Update.Data[Job] |
|
137 |
type Finished = Map[String, Result] |
|
138 |
} |
|
139 |
||
140 |
sealed case class State( |
|
141 |
serial: Long = 0, |
|
142 |
pending: State.Pending = Map.empty, |
|
143 |
running: State.Running = Map.empty, |
|
144 |
finished: State.Finished = Map.empty |
|
145 |
) { |
|
146 |
def next_serial: Long = State.inc_serial(serial) |
|
147 |
||
148 |
def add_pending(task: Task): State = copy(pending = pending + (task.name -> task)) |
|
149 |
def remove_pending(name: String): State = copy(pending = pending - name) |
|
150 |
||
151 |
def num_builds = running.size + finished.size |
|
152 |
||
153 |
def next: List[Task] = |
|
154 |
if (pending.isEmpty) Nil |
|
155 |
else { |
|
156 |
val priority = pending.values.map(_.priority).maxBy(_.ordinal) |
|
157 |
pending.values.filter(_.priority == priority).toList.sortBy(_.submit_date)(Date.Ordering) |
|
158 |
} |
|
159 |
||
160 |
def add_running(job: Job): State = copy(running = running + (job.name -> job)) |
|
161 |
def remove_running(name: String): State = copy(running = running - name) |
|
162 |
||
163 |
def add_finished(result: Result): State = copy(finished = finished + (result.name -> result)) |
|
164 |
||
165 |
lazy val kinds = ( |
|
166 |
pending.values.map(_.kind) ++ |
|
167 |
running.values.map(_.kind) ++ |
|
168 |
finished.values.map(_.kind)).toList.distinct |
|
169 |
||
170 |
def next_number(kind: String): Long = { |
|
171 |
val serials = get_finished(kind).map(_.number) ::: get_running(kind).map(_.number) |
|
172 |
State.inc_serial(State.max_serial(serials)) |
|
173 |
} |
|
174 |
||
175 |
def get_running(kind: String): List[Job] = |
|
176 |
(for ((_, job) <- running if job.kind == kind) yield job).toList |
|
177 |
||
178 |
def get_finished(kind: String): List[Result] = |
|
179 |
(for ((_, result) <- finished if result.kind == kind) yield result).toList |
|
180 |
||
181 |
def get(name: String): Option[T] = |
|
182 |
pending.get(name).orElse(running.get(name)).orElse(finished.get(name)) |
|
183 |
||
184 |
def get(id: UUID.T): Option[T] = |
|
185 |
pending.values.find(_.id == id).orElse( |
|
186 |
running.values.find(_.id == id)).orElse( |
|
187 |
finished.values.find(_.id.contains(id))) |
|
188 |
} |
|
189 |
||
190 |
||
191 |
/* SQL data model */ |
|
192 |
||
193 |
object private_data extends SQL.Data("isabelle_build_manager") { |
|
194 |
/* tables */ |
|
195 |
||
196 |
override lazy val tables: SQL.Tables = |
|
197 |
SQL.Tables(State.table, Pending.table, Running.table, Finished.table) |
|
198 |
||
199 |
||
200 |
/* state */ |
|
201 |
||
202 |
object State { |
|
203 |
val serial = SQL.Column.long("serial").make_primary_key |
|
204 |
||
205 |
val table = make_table(List(serial), name = "state") |
|
206 |
} |
|
207 |
||
208 |
def read_serial(db: SQL.Database): Long = |
|
209 |
db.execute_query_statementO[Long]( |
|
210 |
State.table.select(List(State.serial.max)), |
|
211 |
_.long(State.serial)).getOrElse(0L) |
|
212 |
||
213 |
def pull_state(db: SQL.Database, state: State): State = { |
|
214 |
val serial_db = read_serial(db) |
|
215 |
if (serial_db == state.serial) state |
|
216 |
else { |
|
217 |
val serial = serial_db max state.serial |
|
218 |
||
219 |
val pending = pull_pending(db) |
|
220 |
val running = pull_running(db) |
|
221 |
val finished = pull_finished(db, state.finished) |
|
222 |
||
223 |
state.copy(serial = serial, pending = pending, running = running, finished = finished) |
|
224 |
} |
|
225 |
} |
|
226 |
||
227 |
def push_state(db: SQL.Database, old_state: State, state: State): State = { |
|
228 |
val finished = push_finished(db, state.finished) |
|
229 |
val updates = |
|
230 |
List( |
|
231 |
update_pending(db, old_state.pending, state.pending), |
|
232 |
update_running(db, old_state.running, state.running), |
|
233 |
).filter(_.defined) |
|
234 |
||
235 |
if (updates.isEmpty && finished == old_state.finished) state |
|
236 |
else { |
|
237 |
val serial = state.next_serial |
|
238 |
db.execute_statement(State.table.delete(State.serial.where_equal(old_state.serial))) |
|
239 |
db.execute_statement(State.table.insert(), body = |
|
240 |
{ (stmt: SQL.Statement) => |
|
241 |
stmt.long(1) = serial |
|
242 |
}) |
|
243 |
state.copy(serial = serial, finished = finished) |
|
244 |
} |
|
245 |
} |
|
246 |
||
247 |
||
248 |
/* pending */ |
|
249 |
||
250 |
object Pending { |
|
251 |
val kind = SQL.Column.string("kind") |
|
252 |
val id = SQL.Column.string("id").make_primary_key |
|
253 |
val submit_date = SQL.Column.date("submit_date") |
|
254 |
val priority = SQL.Column.string("priority") |
|
255 |
val isabelle_rev = SQL.Column.string("isabelle_rev") |
|
256 |
val components = SQL.Column.string("components") |
|
257 |
||
258 |
val prefs = SQL.Column.string("prefs") |
|
259 |
val requirements = SQL.Column.bool("requirements") |
|
260 |
val all_sessions = SQL.Column.bool("all_sessions") |
|
261 |
val base_sessions = SQL.Column.string("base_sessions") |
|
262 |
val exclude_session_groups = SQL.Column.string("exclude_session_groups") |
|
263 |
val exclude_sessions = SQL.Column.string("exclude_sessions") |
|
264 |
val session_groups = SQL.Column.string("session_groups") |
|
265 |
val sessions = SQL.Column.string("sessions") |
|
266 |
val build_heap = SQL.Column.bool("build_heap") |
|
267 |
val clean_build = SQL.Column.bool("clean_build") |
|
268 |
val export_files = SQL.Column.bool("export_files") |
|
269 |
val fresh_build = SQL.Column.bool("fresh_build") |
|
270 |
val presentation = SQL.Column.bool("presentation") |
|
271 |
||
272 |
val table = |
|
273 |
make_table(List(kind, id, submit_date, priority, isabelle_rev, components, prefs, |
|
274 |
requirements, all_sessions, base_sessions, exclude_session_groups, exclude_sessions, |
|
275 |
session_groups, sessions, build_heap, clean_build, export_files, fresh_build, |
|
276 |
presentation), |
|
277 |
name = "pending") |
|
278 |
} |
|
279 |
||
280 |
def pull_pending(db: SQL.Database): Build_Manager.State.Pending = |
|
281 |
db.execute_query_statement(Pending.table.select(), Map.from[String, Task], get = |
|
282 |
{ res => |
|
283 |
val kind = res.string(Pending.kind) |
|
284 |
val id = res.string(Pending.id) |
|
285 |
val submit_date = res.date(Pending.submit_date) |
|
286 |
val priority = Priority.valueOf(res.string(Pending.priority)) |
|
287 |
val isabelle_rev = res.string(Pending.isabelle_rev) |
|
288 |
val components = space_explode(',', res.string(Pending.components)).map(Component.parse) |
|
289 |
||
290 |
val build_config = |
|
291 |
if (kind != User_Build.name) CI_Build(kind, components) |
|
292 |
else { |
|
293 |
val prefs = Options.Spec.parse(res.string(Pending.prefs)) |
|
294 |
val requirements = res.bool(Pending.requirements) |
|
295 |
val all_sessions = res.bool(Pending.all_sessions) |
|
296 |
val base_sessions = space_explode(',', res.string(Pending.base_sessions)) |
|
297 |
val exclude_session_groups = |
|
298 |
space_explode(',', res.string(Pending.exclude_session_groups)) |
|
299 |
val exclude_sessions = space_explode(',', res.string(Pending.exclude_sessions)) |
|
300 |
val session_groups = space_explode(',', res.string(Pending.session_groups)) |
|
301 |
val sessions = space_explode(',', res.string(Pending.sessions)) |
|
302 |
val build_heap = res.bool(Pending.build_heap) |
|
303 |
val clean_build = res.bool(Pending.clean_build) |
|
304 |
val export_files = res.bool(Pending.export_files) |
|
305 |
val fresh_build = res.bool(Pending.fresh_build) |
|
306 |
val presentation = res.bool(Pending.presentation) |
|
307 |
||
308 |
val afp_rev = components.find(_.name == Component.AFP().name).map(_.rev) |
|
309 |
User_Build(afp_rev, prefs, requirements, all_sessions, base_sessions, |
|
310 |
exclude_session_groups, exclude_sessions, session_groups, sessions, build_heap, |
|
311 |
clean_build, export_files, fresh_build, presentation) |
|
312 |
} |
|
313 |
||
314 |
val task = Task(build_config, UUID.make(id), submit_date, priority, isabelle_rev) |
|
315 |
||
316 |
task.name -> task |
|
317 |
}) |
|
318 |
||
319 |
def update_pending( |
|
320 |
db: SQL.Database, |
|
321 |
old_pending: Build_Manager.State.Pending, |
|
322 |
pending: Build_Manager.State.Pending |
|
323 |
): Library.Update = { |
|
324 |
val update = Library.Update.make(old_pending, pending) |
|
325 |
val delete = update.delete.map(old_pending(_).id.toString) |
|
326 |
||
327 |
if (update.deletes) |
|
328 |
db.execute_statement(Pending.table.delete(Pending.id.where_member(delete))) |
|
329 |
||
330 |
if (update.inserts) { |
|
331 |
db.execute_batch_statement(Pending.table.insert(), batch = |
|
332 |
for (name <- update.insert) yield { (stmt: SQL.Statement) => |
|
333 |
val task = pending(name) |
|
334 |
stmt.string(1) = task.kind |
|
335 |
stmt.string(2) = task.id.toString |
|
336 |
stmt.date(3) = task.submit_date |
|
337 |
stmt.string(4) = task.priority.toString |
|
338 |
stmt.string(5) = task.isabelle_rev |
|
339 |
stmt.string(6) = task.components.mkString(",") |
|
340 |
||
341 |
def get[A](f: User_Build => A): Option[A] = |
|
342 |
task.build_config match { |
|
343 |
case user_build: User_Build => Some(f(user_build)) |
|
344 |
case _ => None |
|
345 |
} |
|
346 |
||
80251 | 347 |
stmt.string(7) = get(user_build => user_build.prefs.map(_.print).mkString(",")) |
80246 | 348 |
stmt.bool(8) = get(_.requirements) |
349 |
stmt.bool(9) = get(_.all_sessions) |
|
350 |
stmt.string(10) = get(_.base_sessions.mkString(",")) |
|
351 |
stmt.string(11) = get(_.exclude_session_groups.mkString(",")) |
|
352 |
stmt.string(12) = get(_.exclude_sessions.mkString(",")) |
|
353 |
stmt.string(13) = get(_.session_groups.mkString(",")) |
|
354 |
stmt.string(14) = get(_.sessions.mkString(",")) |
|
355 |
stmt.bool(15) = get(_.build_heap) |
|
356 |
stmt.bool(16) = get(_.clean_build) |
|
357 |
stmt.bool(17) = get(_.export_files) |
|
358 |
stmt.bool(18) = get(_.fresh_build) |
|
359 |
stmt.bool(19) = get(_.presentation) |
|
360 |
}) |
|
361 |
} |
|
362 |
||
363 |
update |
|
364 |
} |
|
365 |
||
366 |
||
367 |
/* running */ |
|
368 |
||
369 |
object Running { |
|
370 |
val id = SQL.Column.string("id").make_primary_key |
|
371 |
val kind = SQL.Column.string("kind") |
|
372 |
val number = SQL.Column.long("number") |
|
373 |
val isabelle_rev = SQL.Column.string("isabelle_rev") |
|
374 |
val components = SQL.Column.string("components") |
|
375 |
val start_date = SQL.Column.date("start_date") |
|
376 |
val cancelled = SQL.Column.bool("cancelled") |
|
377 |
||
378 |
val table = |
|
379 |
make_table(List(id, kind, number, isabelle_rev, components, start_date, cancelled), |
|
380 |
name = "running") |
|
381 |
} |
|
382 |
||
383 |
def pull_running(db: SQL.Database): Build_Manager.State.Running = |
|
384 |
db.execute_query_statement(Running.table.select(), Map.from[String, Job], get = |
|
385 |
{ res => |
|
386 |
val id = res.string(Running.id) |
|
387 |
val kind = res.string(Running.kind) |
|
388 |
val number = res.long(Running.number) |
|
389 |
val isabelle_rev = res.string(Running.isabelle_rev) |
|
390 |
val components = space_explode(',', res.string(Running.components)).map(Component.parse) |
|
391 |
val start_date = res.date(Running.start_date) |
|
392 |
val cancelled = res.bool(Running.cancelled) |
|
393 |
||
394 |
val job = |
|
395 |
Job(UUID.make(id), kind, number, isabelle_rev, components, start_date, cancelled) |
|
396 |
||
397 |
job.name -> job |
|
398 |
}) |
|
399 |
||
400 |
def update_running( |
|
401 |
db: SQL.Database, |
|
402 |
old_running: Build_Manager.State.Running, |
|
403 |
running: Build_Manager.State.Running |
|
404 |
): Library.Update = { |
|
405 |
val update = Library.Update.make(old_running, running) |
|
406 |
val delete = update.delete.map(old_running(_).id.toString) |
|
407 |
||
408 |
if (update.deletes) |
|
409 |
db.execute_statement(Running.table.delete(Running.id.where_member(delete))) |
|
410 |
||
411 |
if (update.inserts) { |
|
412 |
db.execute_batch_statement(Running.table.insert(), batch = |
|
413 |
for (name <- update.insert) yield { (stmt: SQL.Statement) => |
|
414 |
val job = running(name) |
|
415 |
stmt.string(1) = job.id.toString |
|
416 |
stmt.string(2) = job.kind |
|
417 |
stmt.long(3) = job.number |
|
418 |
stmt.string(4) = job.isabelle_rev |
|
419 |
stmt.string(5) = job.components.mkString(",") |
|
420 |
stmt.date(6) = job.start_date |
|
421 |
stmt.bool(7) = job.cancelled |
|
422 |
}) |
|
423 |
} |
|
424 |
update |
|
425 |
} |
|
426 |
||
427 |
||
428 |
/* finished */ |
|
429 |
||
430 |
object Finished { |
|
431 |
val kind = SQL.Column.string("kind") |
|
432 |
val number = SQL.Column.long("number") |
|
433 |
val status = SQL.Column.string("status") |
|
434 |
val id = SQL.Column.string("id") |
|
435 |
val date = SQL.Column.date("date") |
|
436 |
val serial = SQL.Column.long("serial").make_primary_key |
|
437 |
||
438 |
val table = make_table(List(kind, number, status, id, date, serial), name = "finished") |
|
439 |
} |
|
440 |
||
441 |
def read_finished_serial(db: SQL.Database): Long = |
|
442 |
db.execute_query_statementO[Long]( |
|
443 |
Finished.table.select(List(Finished.serial.max)), |
|
444 |
_.long(Finished.serial)).getOrElse(0L) |
|
445 |
||
446 |
def pull_finished( |
|
447 |
db: SQL.Database, |
|
448 |
finished: Build_Manager.State.Finished |
|
449 |
): Build_Manager.State.Finished = { |
|
450 |
val max_serial0 = Build_Manager.State.max_serial(finished.values.map(_.serial)) |
|
451 |
val max_serial1 = read_finished_serial(db) |
|
452 |
val missing = (max_serial0 + 1) to max_serial1 |
|
453 |
finished ++ db.execute_query_statement( |
|
454 |
Finished.table.select(sql = Finished.serial.where_member_long(missing)), |
|
455 |
Map.from[String, Result], get = |
|
456 |
{ res => |
|
457 |
val kind = res.string(Finished.kind) |
|
458 |
val number = res.long(Finished.number) |
|
459 |
val status = Status.valueOf(res.string(Finished.status)) |
|
460 |
val id = res.get_string(Finished.id).map(UUID.make) |
|
461 |
val date = res.date(Finished.date) |
|
462 |
val serial = res.long(Finished.serial) |
|
463 |
||
464 |
val result = Result(kind, number, status, id, date, serial) |
|
465 |
result.name -> result |
|
466 |
} |
|
467 |
) |
|
468 |
} |
|
469 |
||
470 |
def push_finished( |
|
471 |
db: SQL.Database, |
|
472 |
finished: Build_Manager.State.Finished |
|
473 |
): Build_Manager.State.Finished = { |
|
474 |
val (insert0, old) = finished.partition(_._2.serial == 0L) |
|
475 |
val max_serial = Build_Manager.State.max_serial(finished.map(_._2.serial)) |
|
476 |
val insert = |
|
477 |
for (((_, result), n) <- insert0.zipWithIndex) |
|
478 |
yield result.copy(serial = max_serial + 1 + n) |
|
479 |
||
480 |
if (insert.nonEmpty) |
|
481 |
db.execute_batch_statement(Finished.table.insert(), batch = |
|
482 |
for (result <- insert) yield { (stmt: SQL.Statement) => |
|
483 |
stmt.string(1) = result.kind |
|
484 |
stmt.long(2) = result.number |
|
485 |
stmt.string(3) = result.status.toString |
|
486 |
stmt.string(4) = result.id.map(_.toString) |
|
487 |
stmt.date(5) = result.date |
|
488 |
stmt.long(6) = result.serial |
|
489 |
}) |
|
490 |
||
491 |
old ++ insert.map(result => result.serial.toString -> result) |
|
492 |
} |
|
493 |
} |
|
494 |
||
495 |
||
496 |
/* running build manager processes */ |
|
497 |
||
498 |
abstract class Loop_Process[A](name: String, store: Store, progress: Progress) |
|
499 |
extends Runnable { |
|
500 |
val options = store.options |
|
501 |
||
502 |
private val _database = |
|
503 |
try { store.open_database() } |
|
504 |
catch { case exn: Throwable => close(); throw exn } |
|
505 |
||
506 |
def close(): Unit = Option(_database).foreach(_.close()) |
|
507 |
||
508 |
protected var _state = State() |
|
509 |
||
510 |
protected def synchronized_database[A](label: String)(body: => A): A = synchronized { |
|
511 |
Build_Manager.private_data.transaction_lock(_database, label = name + "." + label) { |
|
512 |
val old_state = Build_Manager.private_data.pull_state(_database, _state) |
|
513 |
_state = old_state |
|
514 |
val res = body |
|
515 |
_state = Build_Manager.private_data.push_state(_database, old_state, _state) |
|
516 |
res |
|
517 |
} |
|
518 |
} |
|
519 |
||
520 |
protected def delay = options.seconds("build_manager_delay") |
|
521 |
||
522 |
def init: A |
|
523 |
def loop_body(a: A): A |
|
524 |
def stopped(a: A): Boolean = progress.stopped |
|
525 |
||
526 |
private val interrupted = Synchronized(false) |
|
527 |
private def sleep(time_limit: Time): Unit = |
|
528 |
interrupted.timed_access(_ => Some(time_limit), b => if (b) Some((), false) else None) |
|
529 |
def interrupt(): Unit = interrupted.change(_ => true) |
|
530 |
||
531 |
@tailrec private def loop(a: A): Unit = |
|
532 |
if (!stopped(a)) { |
|
533 |
val start = Time.now() |
|
534 |
val a1 = loop_body(a) |
|
535 |
if (!stopped(a)) { |
|
536 |
sleep(start + delay) |
|
537 |
loop(a1) |
|
538 |
} |
|
539 |
} |
|
540 |
||
541 |
override def run(): Unit = { |
|
542 |
progress.echo("Started " + name) |
|
543 |
loop(init) |
|
544 |
close() |
|
545 |
progress.echo("Stopped " + name) |
|
546 |
} |
|
547 |
||
548 |
def echo(msg: String) = progress.echo(name + ": " + msg) |
|
549 |
def echo_error_message(msg: String) = progress.echo_error_message(name + ": " + msg) |
|
550 |
} |
|
551 |
||
552 |
||
553 |
/* build runner */ |
|
554 |
||
555 |
object Runner { |
|
556 |
object State { |
|
557 |
def empty: State = new State(Map.empty, Map.empty) |
|
558 |
} |
|
559 |
||
560 |
class State private( |
|
561 |
processes: Map[String, Future[Bash.Process]], |
|
562 |
results: Map[String, Future[Process_Result]] |
|
563 |
) { |
|
564 |
def is_empty = processes.isEmpty && results.isEmpty |
|
565 |
||
566 |
def init(build_config: Build_Config, job: Job, context: Context): State = { |
|
567 |
val process = Future.fork(context.process(build_config)) |
|
568 |
val result = |
|
569 |
Future.fork( |
|
570 |
process.join_result match { |
|
571 |
case Exn.Res(res) => context.run(res) |
|
572 |
case Exn.Exn(_) => Process_Result(Process_Result.RC.interrupt) |
|
573 |
}) |
|
574 |
new State(processes + (job.name -> process), results + (job.name -> result)) |
|
575 |
} |
|
576 |
||
577 |
def running: List[String] = processes.keys.toList |
|
578 |
||
579 |
def update: (State, Map[String, Process_Result]) = { |
|
580 |
val finished = |
|
581 |
for ((name, future) <- results if future.is_finished) yield name -> future.join |
|
582 |
||
583 |
val processes1 = processes.filterNot((name, _) => finished.contains(name)) |
|
584 |
val results1 = results.filterNot((name, _) => finished.contains(name)) |
|
585 |
||
586 |
(new State(processes1, results1), finished) |
|
587 |
} |
|
588 |
||
589 |
def cancel(cancelled: List[String]): State = { |
|
590 |
for (name <- cancelled) { |
|
591 |
val process = processes(name) |
|
592 |
if (process.is_finished) process.join.interrupt() |
|
593 |
else process.cancel() |
|
594 |
} |
|
595 |
||
596 |
new State(processes.filterNot((name, _) => cancelled.contains(name)), results) |
|
597 |
} |
|
598 |
} |
|
599 |
} |
|
600 |
||
601 |
class Runner( |
|
602 |
store: Store, |
|
603 |
build_hosts: List[Build_Cluster.Host], |
|
604 |
isabelle_repository: Mercurial.Repository, |
|
605 |
sync_dirs: List[Sync.Dir], |
|
606 |
progress: Progress |
|
607 |
) extends Loop_Process[Runner.State]("Runner", store, progress) { |
|
608 |
val rsync_context = Rsync.Context() |
|
609 |
||
610 |
private def sync(repository: Mercurial.Repository, rev: String, target: Path): String = { |
|
611 |
repository.pull() |
|
612 |
||
613 |
if (rev.nonEmpty) repository.sync(rsync_context, target, rev = rev) |
|
614 |
||
615 |
Exn.capture(repository.id(File.read(target + Mercurial.Hg_Sync.PATH_ID))) match { |
|
616 |
case Exn.Res(res) => res |
|
617 |
case Exn.Exn(exn) => "" |
|
618 |
} |
|
619 |
} |
|
620 |
||
621 |
private def start_next(): Option[(Build_Config, Job)] = |
|
622 |
synchronized_database("start_job") { |
|
623 |
_state.next.headOption.flatMap { task => |
|
624 |
progress.echo("Initializing " + task.name) |
|
625 |
||
626 |
_state = _state.remove_pending(task.name) |
|
627 |
||
628 |
val context = Context(store, task, build_hosts) |
|
629 |
val number = _state.next_number(task.kind) |
|
630 |
||
631 |
Exn.capture { |
|
632 |
val isabelle_rev = |
|
633 |
sync(isabelle_repository, task.isabelle_rev, context.isabelle_dir) |
|
634 |
||
635 |
val components = |
|
636 |
for (component <- task.components) |
|
637 |
yield sync_dirs.find(_.name == component.name) match { |
|
638 |
case Some(sync_dir) => |
|
639 |
val target = context.isabelle_dir + sync_dir.target |
|
640 |
component.copy(rev = sync(sync_dir.hg, component.rev, target)) |
|
641 |
case None => |
|
642 |
if (component.rev.isEmpty) component |
|
643 |
else error("Unknown component " + component) |
|
644 |
} |
|
645 |
||
646 |
Job(task.id, task.kind, number, isabelle_rev, components) |
|
647 |
} match { |
|
648 |
case Exn.Res(job) => |
|
649 |
_state = _state.add_running(job) |
|
650 |
val context1 = context.move(Context(store, job)) |
|
651 |
||
652 |
val msg = "Starting " + job.name |
|
653 |
echo(msg + " (id " + job.id + ")") |
|
654 |
context1.progress.echo(msg) |
|
655 |
||
656 |
Some(task.build_config, job) |
|
657 |
case Exn.Exn(exn) => |
|
658 |
val result = Result(task.kind, number, Status.aborted) |
|
659 |
val context1 = Context(store, result) |
|
660 |
||
661 |
val msg = "Failed to start job: " + exn.getMessage |
|
662 |
echo_error_message(msg) |
|
663 |
context1.progress.echo_error_message(msg) |
|
664 |
||
665 |
context.remove() |
|
666 |
_state = _state.add_finished(result) |
|
667 |
||
668 |
None |
|
669 |
} |
|
670 |
} |
|
671 |
} |
|
672 |
||
673 |
private def stop_cancelled(state: Runner.State): Runner.State = |
|
674 |
synchronized_database("stop_cancelled") { |
|
675 |
val cancelled = for (name <- state.running if _state.running(name).cancelled) yield name |
|
676 |
state.cancel(cancelled) |
|
677 |
} |
|
678 |
||
679 |
private def finish_job(name: String, process_result: Process_Result): Unit = |
|
680 |
synchronized_database("finish_job") { |
|
681 |
val job = _state.running(name) |
|
682 |
val context = Context(store, job, build_hosts) |
|
683 |
||
684 |
val result = Result(job.kind, job.number, Status.from_result(process_result), Some(job.id)) |
|
685 |
context.copy_results(Context(store, result)) |
|
686 |
context.remove() |
|
687 |
echo("Finished job " + job.id + " with status code " + process_result.rc) |
|
688 |
||
689 |
_state = _state |
|
690 |
.remove_running(job.name) |
|
691 |
.add_finished(result) |
|
692 |
} |
|
693 |
||
694 |
override def stopped(state: Runner.State): Boolean = progress.stopped && state.is_empty |
|
695 |
||
696 |
def init: Runner.State = Runner.State.empty |
|
697 |
def loop_body(state: Runner.State): Runner.State = { |
|
698 |
if (state.is_empty && !progress.stopped) { |
|
699 |
start_next() match { |
|
700 |
case None => state |
|
701 |
case Some((build_config, job)) => |
|
702 |
state.init(build_config, job, Context(store, job, build_hosts)) |
|
703 |
} |
|
704 |
} |
|
705 |
else { |
|
706 |
val (state1, results) = stop_cancelled(state).update |
|
707 |
results.foreach(finish_job) |
|
708 |
state1 |
|
709 |
} |
|
710 |
} |
|
711 |
} |
|
712 |
||
713 |
||
714 |
/* repository poller */ |
|
715 |
||
716 |
object Poller { |
|
717 |
case class State(ids: List[String], next: Future[List[String]]) |
|
718 |
} |
|
719 |
||
720 |
class Poller( |
|
721 |
ci_jobs: List[String], |
|
722 |
store: Store, |
|
723 |
isabelle_repository: Mercurial.Repository, |
|
724 |
sync_dirs: List[Sync.Dir], |
|
725 |
progress: Progress |
|
726 |
) extends Loop_Process[Poller.State]("Poller", store, progress) { |
|
727 |
||
728 |
override def delay = options.seconds("build_manager_poll_delay") |
|
729 |
||
730 |
private def ids: List[String] = |
|
731 |
isabelle_repository.id("default") :: sync_dirs.map(_.hg.id("default")) |
|
732 |
||
733 |
private def poll: Future[List[String]] = Future.fork { |
|
734 |
Par_List.map((repo: Mercurial.Repository) => repo.pull(), |
|
735 |
isabelle_repository :: sync_dirs.map(_.hg)) |
|
736 |
||
737 |
ids |
|
738 |
} |
|
739 |
||
740 |
val init: Poller.State = Poller.State(ids, poll) |
|
741 |
||
742 |
def ci_task(name: String): Task = |
|
743 |
Task(CI_Build(name, sync_dirs.map(dir => Component(dir.name, "default"))), |
|
744 |
priority = Priority.low, isabelle_rev = "default") |
|
745 |
||
746 |
private def add_task(): Unit = synchronized_database("add_task") { |
|
747 |
for (name <- ci_jobs if !_state.pending.values.exists(_.kind == name)) { |
|
748 |
_state = _state.add_pending(ci_task(name)) |
|
749 |
} |
|
750 |
} |
|
751 |
||
752 |
def loop_body(state: Poller.State): Poller.State = |
|
753 |
if (!state.next.is_finished) state |
|
754 |
else { |
|
755 |
state.next.join_result match { |
|
756 |
case Exn.Exn(exn) => |
|
757 |
echo_error_message("Could not reach repository: " + exn.getMessage) |
|
758 |
Poller.State(state.ids, poll) |
|
759 |
case Exn.Res(ids1) => |
|
760 |
if (state.ids != ids1) { |
|
761 |
echo("Found new revisions: " + ids1) |
|
762 |
add_task() |
|
763 |
} |
|
764 |
Poller.State(ids1, poll) |
|
765 |
} |
|
766 |
} |
|
767 |
} |
|
768 |
||
769 |
||
770 |
/* web server */ |
|
771 |
||
772 |
object Web_Server { |
|
773 |
object Page { |
|
774 |
val HOME = Path.basic("home") |
|
775 |
val OVERVIEW = Path.basic("overview") |
|
776 |
val BUILD = Path.basic("build") |
|
777 |
} |
|
778 |
||
779 |
object API { |
|
780 |
val BUILD_CANCEL = Path.explode("api/build/cancel") |
|
781 |
val CSS = Path.explode("api/isabelle.css") |
|
782 |
} |
|
783 |
||
784 |
object Cache { |
|
785 |
def empty: Cache = new Cache() |
|
786 |
} |
|
787 |
||
788 |
class Cache private(keep: Time = Time.minutes(1)) { |
|
789 |
var logs: Map[String, (Time, String)] = Map.empty |
|
790 |
||
791 |
def update(store: Store, state: State): Unit = synchronized { |
|
792 |
logs = |
|
793 |
for { |
|
794 |
(name, (time, log)) <- logs |
|
795 |
if time + keep > Time.now() |
|
796 |
} yield name -> (time, Context(store, state.get(name).get).log) |
|
797 |
} |
|
798 |
||
799 |
def lookup(store: Store, elem: T): String = synchronized { |
|
800 |
logs.get(elem.name) match { |
|
801 |
case Some((_, log)) => |
|
802 |
logs += elem.name -> (Time.now(), log) |
|
803 |
case None => |
|
804 |
logs += elem.name -> (Time.now(), Context(store, elem).log) |
|
805 |
} |
|
806 |
logs(elem.name)._2 |
|
807 |
} |
|
808 |
} |
|
809 |
} |
|
810 |
||
811 |
class Web_Server(port: Int, paths: Web_App.Paths, store: Store, progress: Progress) |
|
812 |
extends Loop_Process[Unit]("Web_Server", store, progress) { |
|
813 |
import Web_App.* |
|
814 |
import Web_Server.* |
|
815 |
||
816 |
val cache = Cache.empty |
|
817 |
val Id = new Properties.String(Markup.ID) |
|
818 |
||
819 |
enum Model { |
|
820 |
case Error extends Model |
|
821 |
case Cancelled extends Model |
|
822 |
case Home(state: State) extends Model |
|
823 |
case Overview(kind: String, state: State) extends Model |
|
824 |
case Build(elem: T, state: State, public: Boolean = true) extends Model |
|
825 |
} |
|
826 |
||
827 |
object View { |
|
828 |
import HTML.* |
|
829 |
import More_HTML.* |
|
830 |
||
831 |
def render_if(cond: Boolean, body: XML.Body): XML.Body = if (cond) body else Nil |
|
832 |
||
833 |
def frontend_link(url: Url, xml: XML.Body): XML.Elem = |
|
834 |
link(url.toString, xml) + ("target" -> "_parent") |
|
835 |
||
836 |
def link_kind(kind: String): XML.Elem = |
|
837 |
frontend_link(paths.frontend_url(Page.OVERVIEW, Markup.Kind(kind)), text(kind)) |
|
838 |
def link_build(name: String, number: Long): XML.Elem = |
|
839 |
frontend_link(paths.frontend_url(Page.BUILD, Markup.Name(name)), text("#" + number)) |
|
840 |
||
841 |
def render_home(state: State): XML.Body = { |
|
842 |
def render_kind(kind: String): XML.Elem = { |
|
843 |
val running = state.get_running(kind).sortBy(_.number).reverse |
|
844 |
val finished = state.get_finished(kind).sortBy(_.number).reverse |
|
845 |
||
846 |
def render_previous(finished: List[Result]): XML.Body = { |
|
847 |
val (failed, rest) = finished.span(_.status != Status.ok) |
|
848 |
val first_failed = failed.lastOption.map(result => |
|
849 |
par( |
|
850 |
text("first failure: ") ::: |
|
851 |
link_build(result.name, result.number) :: |
|
852 |
text(" on " + result.date))) |
|
853 |
val last_success = rest.headOption.map(result => |
|
854 |
par( |
|
855 |
text("last success: ") ::: link_build(result.name, result.number) :: |
|
856 |
text(" on " + result.date))) |
|
857 |
first_failed.toList ::: last_success.toList |
|
858 |
} |
|
859 |
||
860 |
def render_job(job: Job): XML.Body = |
|
861 |
par(link_build(job.name, job.number) :: text(": running since " + job.start_date)) :: |
|
862 |
render_if(finished.headOption.exists(_.status != Status.ok), render_previous(finished)) |
|
863 |
||
864 |
def render_result(result: Result): XML.Body = |
|
865 |
par( |
|
866 |
link_build(result.name, result.number) :: |
|
867 |
text(" (" + result.status.toString + ") on " + result.date)) :: |
|
868 |
render_if(result.status != Status.ok, render_previous(finished.tail)) |
|
869 |
||
870 |
fieldset( |
|
871 |
XML.elem("legend", List(link_kind(kind))) :: |
|
872 |
(if (running.nonEmpty) render_job(running.head) |
|
873 |
else if (finished.nonEmpty) render_result(finished.head) |
|
874 |
else Nil)) |
|
875 |
} |
|
876 |
||
877 |
chapter("Dashboard") :: |
|
878 |
text("Queue: " + state.pending.size + " tasks waiting") ::: |
|
879 |
section("Builds") :: text("Total: " + state.num_builds + " builds") ::: |
|
880 |
state.kinds.map(render_kind) |
|
881 |
} |
|
882 |
||
883 |
def render_overview(kind: String, state: State): XML.Body = { |
|
884 |
def render_job(job: Job): XML.Body = |
|
885 |
List(par(link_build(job.name, job.number) :: text(" running since " + job.start_date))) |
|
886 |
||
887 |
def render_result(result: Result): XML.Body = |
|
888 |
List(par( |
|
889 |
link_build(result.name, result.number) :: |
|
890 |
text(" (" + result.status + ") on " + result.date))) |
|
891 |
||
892 |
chapter(kind) :: |
|
893 |
itemize( |
|
894 |
state.get_running(kind).sortBy(_.number).reverse.map(render_job) ::: |
|
895 |
state.get_finished(kind).sortBy(_.number).reverse.map(render_result)) :: Nil |
|
896 |
} |
|
897 |
||
898 |
private val ID = Params.key(Markup.ID) |
|
899 |
||
900 |
def render_build(elem: T, state: State, public: Boolean): XML.Body = { |
|
901 |
def render_cancel(id: UUID.T): XML.Body = |
|
902 |
render_if(!public, List( |
|
903 |
submit_form("", List(hidden(ID, id.toString), |
|
904 |
api_button(paths.api_route(API.BUILD_CANCEL), "cancel build"))))) |
|
905 |
||
906 |
def render_rev(isabelle_rev: String, components: List[Component]): XML.Body = |
|
907 |
for { |
|
908 |
component <- Component("Isabelle", isabelle_rev) :: components |
|
909 |
if component.rev.nonEmpty |
|
910 |
} yield par(text(component.toString)) |
|
911 |
||
912 |
chapter("Build " + elem.name) :: (elem match { |
|
913 |
case task: Task => |
|
914 |
par(text("Task from " + task.submit_date + ". ")) :: |
|
915 |
render_rev(task.isabelle_rev, task.components) ::: |
|
916 |
render_cancel(task.id) |
|
917 |
case job: Job => |
|
918 |
par(text("Start: " + job.start_date)) :: |
|
919 |
par( |
|
920 |
if (job.cancelled) text("Cancelling...") |
|
921 |
else text("Running...") ::: render_cancel(job.id)) :: |
|
922 |
render_rev(job.isabelle_rev, job.components) ::: |
|
923 |
source(cache.lookup(store, job)) :: Nil |
|
924 |
case result: Result => |
|
925 |
par(text("Date: " + result.date)) :: |
|
926 |
par(text("Status: " + result.status)) :: |
|
927 |
source(cache.lookup(store, result)) :: Nil |
|
928 |
}) |
|
929 |
} |
|
930 |
||
931 |
def render_cancelled: XML.Body = |
|
932 |
List(chapter("Build Cancelled"), frontend_link(paths.frontend_url(Page.HOME), text("Home"))) |
|
933 |
||
934 |
def parse_id(params: Params.Data): Option[UUID.T] = |
|
935 |
for { |
|
936 |
id <- params.get(ID) |
|
937 |
uuid <- UUID.unapply(id) |
|
938 |
} yield uuid |
|
939 |
} |
|
940 |
||
941 |
private val server = new Server[Model](paths, port, progress = progress) { |
|
942 |
/* control */ |
|
943 |
||
944 |
def overview: Some[Model.Home] = Some(Model.Home(_state)) |
|
945 |
||
946 |
def get_overview(props: Properties.T): Option[Model.Overview] = |
|
947 |
props match { |
|
948 |
case Markup.Kind(kind) => Some(Model.Overview(kind, _state)) |
|
949 |
case _ => None |
|
950 |
} |
|
951 |
||
952 |
def get_build(props: Properties.T): Option[Model.Build] = |
|
953 |
props match { |
|
954 |
case Markup.Name(name) => |
|
955 |
val state = _state |
|
956 |
state.get(name).map(Model.Build(_, state)) |
|
957 |
case Id(UUID(id)) => |
|
958 |
val state = _state |
|
959 |
state.get(id).map(Model.Build(_, state, public = false)) |
|
960 |
case _ => None |
|
961 |
} |
|
962 |
||
963 |
def cancel_build(params: Params.Data): Option[Model] = |
|
964 |
for { |
|
965 |
id <- View.parse_id(params) |
|
966 |
model <- |
|
967 |
synchronized_database("cancel_build") { |
|
968 |
_state.get(id).map { |
|
969 |
case task: Task => |
|
970 |
_state = _state.remove_pending(task.name) |
|
971 |
Model.Cancelled |
|
972 |
case job: Job => |
|
973 |
val job1 = job.copy(cancelled = true) |
|
974 |
_state = _state |
|
975 |
.remove_running(job.name) |
|
976 |
.add_running(job1) |
|
977 |
Model.Build(job1, _state, public = false) |
|
978 |
case result: Result => Model.Build(result, _state, public = false) |
|
979 |
} |
|
980 |
} |
|
981 |
} yield model |
|
982 |
||
983 |
def render(model: Model): XML.Body = |
|
984 |
HTML.title("Isabelle Build Manager") :: ( |
|
985 |
model match { |
|
986 |
case Model.Error => HTML.text("invalid request") |
|
987 |
case Model.Home(state) => View.render_home(state) |
|
988 |
case Model.Overview(kind, state) => View.render_overview(kind, state) |
|
989 |
case Model.Build(elem, state, public) => View.render_build(elem, state, public) |
|
990 |
case Model.Cancelled => View.render_cancelled |
|
991 |
}) |
|
992 |
||
993 |
val error_model: Model = Model.Error |
|
994 |
val endpoints = List( |
|
995 |
Get(Page.HOME, "home", _ => overview), |
|
996 |
Get(Page.OVERVIEW, "overview", get_overview), |
|
997 |
Get(Page.BUILD, "build", get_build), |
|
998 |
Post(API.BUILD_CANCEL, "cancel build", cancel_build), |
|
999 |
Get_File(API.CSS, "css", _ => Some(HTML.isabelle_css))) |
|
1000 |
val head = List(HTML.style_file(paths.api_route(API.CSS))) |
|
1001 |
} |
|
1002 |
||
1003 |
def init: Unit = server.start() |
|
1004 |
def loop_body(u: Unit): Unit = { |
|
1005 |
if (progress.stopped) server.stop() |
|
1006 |
else synchronized_database("iterate") { cache.update(store, _state) } |
|
1007 |
} |
|
1008 |
} |
|
1009 |
||
1010 |
||
1011 |
/* context */ |
|
1012 |
||
1013 |
object Context { |
|
1014 |
def apply(store: Store, elem: T, build_hosts: List[Build_Cluster.Host] = Nil): Context = |
|
1015 |
new Context(store, store.dir(elem), build_hosts) |
|
1016 |
} |
|
1017 |
||
1018 |
class Context private(store: Store, val dir: Path, val build_hosts: List[Build_Cluster.Host]) { |
|
1019 |
def isabelle_dir: Path = dir + Path.basic("isabelle") |
|
1020 |
||
1021 |
private val log_file = dir + Path.basic("log") |
|
1022 |
val progress = new File_Progress(log_file, verbose = true) |
|
1023 |
def log: String = |
|
1024 |
Exn.capture(File.read(log_file)) match { |
|
1025 |
case Exn.Exn(_) => "" |
|
1026 |
case Exn.Res(res) => res |
|
1027 |
} |
|
1028 |
||
1029 |
def move(other: Context): Context = { |
|
1030 |
Isabelle_System.make_directory(other.dir.dir) |
|
1031 |
Isabelle_System.move_file(dir, other.dir) |
|
1032 |
other |
|
1033 |
} |
|
1034 |
||
1035 |
def copy_results(other: Context): Context = { |
|
1036 |
Isabelle_System.make_directory(other.dir) |
|
1037 |
Isabelle_System.copy_file(log_file, other.log_file) |
|
1038 |
other |
|
1039 |
} |
|
1040 |
||
1041 |
def remove(): Unit = Isabelle_System.rm_tree(dir) |
|
1042 |
||
1043 |
lazy val ssh = store.open_ssh() |
|
1044 |
||
1045 |
def process(build_config: Build_Config): Bash.Process = { |
|
1046 |
val isabelle = Other_Isabelle(isabelle_dir, store.identifier, ssh, progress) |
|
1047 |
||
1048 |
val init_components = |
|
1049 |
for { |
|
1050 |
dir <- build_config.components |
|
1051 |
target = isabelle_dir + Sync.DIRS + Path.basic(dir.name) |
|
1052 |
if Components.is_component_dir(target) |
|
1053 |
} yield "init_component " + quote(target.absolute.implode) |
|
1054 |
||
1055 |
isabelle.init(other_settings = isabelle.init_components() ::: init_components, |
|
1056 |
fresh = build_config.fresh_build, echo = true) |
|
1057 |
||
1058 |
val cmd = build_config.command(build_hosts) |
|
1059 |
progress.echo("isabelle" + cmd) |
|
1060 |
||
1061 |
val script = File.bash_path(Isabelle_Tool.exe(isabelle.isabelle_home)) + cmd |
|
1062 |
ssh.bash_process(isabelle.bash_context(script), settings = false) |
|
1063 |
} |
|
1064 |
||
1065 |
def run(process: Bash.Process): Process_Result = { |
|
1066 |
val process_result = |
|
1067 |
process.result(progress_stdout = progress.echo(_), progress_stderr = progress.echo(_)) |
|
1068 |
ssh.close() |
|
1069 |
process_result |
|
1070 |
} |
|
1071 |
} |
|
1072 |
||
1073 |
||
1074 |
/* build manager store */ |
|
1075 |
||
1076 |
case class Store(options: Options) { |
|
1077 |
val base_dir = Path.explode(options.string("build_manager_dir")) |
|
1078 |
val identifier = options.string("build_manager_identifier") |
|
1079 |
||
1080 |
def dir(elem: T): Path = base_dir + ( |
|
1081 |
elem match { |
|
1082 |
case task: Task => Path.make(List("pending", task.id.toString)) |
|
1083 |
case job: Job => Path.make(List("running", job.kind, job.number.toString)) |
|
1084 |
case result: Result => Path.make(List("finished", result.kind, result.number.toString)) |
|
1085 |
}) |
|
1086 |
||
1087 |
def open_ssh(): SSH.Session = |
|
1088 |
SSH.open_session(options, |
|
1089 |
host = options.string("build_manager_ssh_host"), |
|
1090 |
port = options.int("build_manager_ssh_port"), |
|
1091 |
user = options.string("build_manager_ssh_user")) |
|
1092 |
||
1093 |
def open_database(server: SSH.Server = SSH.no_server): PostgreSQL.Database = |
|
1094 |
PostgreSQL.open_database_server(options, server = server, |
|
1095 |
user = options.string("build_manager_database_user"), |
|
1096 |
password = options.string("build_manager_database_password"), |
|
1097 |
database = options.string("build_manager_database_name"), |
|
1098 |
host = options.string("build_manager_database_host"), |
|
1099 |
port = options.int("build_manager_database_port"), |
|
1100 |
ssh_host = options.string("build_manager_database_ssh_host"), |
|
1101 |
ssh_port = options.int("build_manager_database_ssh_port"), |
|
1102 |
ssh_user = options.string("build_manager_database_ssh_user")) |
|
1103 |
||
1104 |
def open_postgresql_server(): SSH.Server = |
|
1105 |
PostgreSQL.open_server(options, |
|
1106 |
host = options.string("build_manager_database_host"), |
|
1107 |
port = options.int("build_manager_database_port"), |
|
1108 |
ssh_host = options.string("build_manager_ssh_host"), |
|
1109 |
ssh_port = options.int("build_manager_ssh_port"), |
|
1110 |
ssh_user = options.string("build_manager_ssh_user")) |
|
1111 |
} |
|
1112 |
||
1113 |
||
1114 |
/* build manager */ |
|
1115 |
||
1116 |
def build_manager( |
|
1117 |
build_hosts: List[Build_Cluster.Host], |
|
1118 |
options: Options, |
|
1119 |
port: Int, |
|
1120 |
sync_dirs: List[Sync.Dir] = Nil, |
|
1121 |
progress: Progress = new Progress |
|
1122 |
): Unit = { |
|
1123 |
val store = Store(options) |
|
1124 |
val isabelle_repository = Mercurial.self_repository() |
|
1125 |
val ci_jobs = space_explode(',', options.string("build_manager_ci_jobs")) |
|
1126 |
val url = Url(options.string("build_manager_address")) |
|
1127 |
val paths = Web_App.Paths(url, Path.current, true, Web_Server.Page.HOME) |
|
1128 |
||
1129 |
using(store.open_database())(db => |
|
1130 |
Build_Manager.private_data.transaction_lock(db, |
|
1131 |
create = true, label = "Build_Manager.build_manager") {}) |
|
1132 |
||
1133 |
val processes = List( |
|
1134 |
new Runner(store, build_hosts, isabelle_repository, sync_dirs, progress), |
|
1135 |
new Poller(ci_jobs, store, isabelle_repository, sync_dirs, progress), |
|
1136 |
new Web_Server(port, paths, store, progress)) |
|
1137 |
||
1138 |
val threads = processes.map(Isabelle_Thread.create(_)) |
|
1139 |
POSIX_Interrupt.handler { |
|
1140 |
progress.stop() |
|
1141 |
processes.foreach(_.interrupt()) |
|
1142 |
} { |
|
1143 |
threads.foreach(_.start()) |
|
1144 |
threads.foreach(_.join()) |
|
1145 |
} |
|
1146 |
} |
|
1147 |
||
1148 |
def build_task( |
|
1149 |
options: Options, |
|
1150 |
store: Store, |
|
1151 |
afp_root: Option[Path] = None, |
|
1152 |
base_sessions: List[String] = Nil, |
|
1153 |
presentation: Boolean = false, |
|
1154 |
requirements: Boolean = false, |
|
1155 |
exclude_session_groups: List[String] = Nil, |
|
1156 |
all_sessions: Boolean = false, |
|
1157 |
build_heap: Boolean = false, |
|
1158 |
clean_build: Boolean = false, |
|
1159 |
export_files: Boolean = false, |
|
1160 |
fresh_build: Boolean = false, |
|
1161 |
session_groups: List[String] = Nil, |
|
1162 |
sessions: List[String] = Nil, |
|
1163 |
prefs: List[Options.Spec] = Nil, |
|
1164 |
exclude_sessions: List[String] = Nil, |
|
80250
8ae6f4e8cc2a
allow explicit Isabelle rev in build task (e.g., for older Isabelle versions);
Fabian Huch <huch@in.tum.de>
parents:
80246
diff
changeset
|
1165 |
rev: String = "", |
80246 | 1166 |
progress: Progress = new Progress |
1167 |
): UUID.T = { |
|
1168 |
val id = UUID.random() |
|
1169 |
val afp_rev = if (afp_root.nonEmpty) Some("") else None |
|
1170 |
||
1171 |
val build_config = User_Build(afp_rev, prefs, requirements, all_sessions, base_sessions, |
|
1172 |
exclude_session_groups, exclude_sessions, session_groups, sessions, build_heap, clean_build, |
|
1173 |
export_files, fresh_build, presentation) |
|
1174 |
val task = Task(build_config, id, Date.now(), Priority.high) |
|
1175 |
||
1176 |
val context = Context(store, task) |
|
1177 |
||
1178 |
progress.interrupt_handler { |
|
1179 |
using(store.open_ssh()) { ssh => |
|
1180 |
val rsync_context = Rsync.Context(ssh = ssh, chmod = "g+rwx") |
|
1181 |
progress.echo("Transferring repositories...") |
|
1182 |
Sync.sync(store.options, rsync_context, context.isabelle_dir, preserve_jars = true, |
|
80250
8ae6f4e8cc2a
allow explicit Isabelle rev in build task (e.g., for older Isabelle versions);
Fabian Huch <huch@in.tum.de>
parents:
80246
diff
changeset
|
1183 |
dirs = Sync.afp_dirs(afp_root), rev = rev) |
80246 | 1184 |
ssh.execute("chmod g+rwx " + File.bash_path(context.dir)) |
1185 |
||
1186 |
if (progress.stopped) { |
|
1187 |
progress.echo("Cancelling submission...") |
|
1188 |
ssh.rm_tree(context.dir) |
|
1189 |
} else { |
|
1190 |
using(store.open_postgresql_server()) { server => |
|
1191 |
using(store.open_database(server = server)) { db => |
|
1192 |
Build_Manager.private_data.transaction_lock(db, label = "Build_Manager.build_task") { |
|
1193 |
val old_state = Build_Manager.private_data.pull_state(db, State()) |
|
1194 |
val state = old_state.add_pending(task) |
|
1195 |
Build_Manager.private_data.push_state(db, old_state, state) |
|
1196 |
} |
|
1197 |
} |
|
1198 |
} |
|
1199 |
val address = options.string("build_manager_address") + "/build?id=" + task.id |
|
1200 |
progress.echo("Submitted task. Private url: " + address) |
|
1201 |
} |
|
1202 |
} |
|
1203 |
} |
|
1204 |
||
1205 |
id |
|
1206 |
} |
|
1207 |
||
1208 |
||
1209 |
/* Isabelle tool wrapper */ |
|
1210 |
||
1211 |
private def show_options(relevant_options: List[String], options: Options): String = |
|
1212 |
cat_lines(relevant_options.flatMap(options.get).map(_.print)) |
|
1213 |
||
1214 |
private val notable_server_options = |
|
1215 |
List( |
|
1216 |
"build_manager_dir", |
|
1217 |
"build_manager_address", |
|
1218 |
"build_manager_ssh_host", |
|
1219 |
"build_manager_ci_jobs") |
|
1220 |
||
1221 |
val isabelle_tool = Isabelle_Tool("build_manager", "run build manager", Scala_Project.here, |
|
1222 |
{ args => |
|
1223 |
var afp_root: Option[Path] = None |
|
1224 |
val dirs = new mutable.ListBuffer[Path] |
|
1225 |
val build_hosts = new mutable.ListBuffer[Build_Cluster.Host] |
|
1226 |
var options = Options.init() |
|
1227 |
var port = 8080 |
|
1228 |
||
1229 |
val getopts = Getopts(""" |
|
1230 |
Usage: isabelle build_manager [OPTIONS] |
|
1231 |
||
1232 |
Options are: |
|
1233 |
-A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """) |
|
1234 |
-D DIR include extra component in given directory |
|
1235 |
-H HOSTS additional cluster host specifications of the form |
|
1236 |
NAMES:PARAMETERS (separated by commas) |
|
1237 |
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) |
|
1238 |
-p PORT explicit web server port |
|
1239 |
||
1240 |
Run Isabelle build manager. Notable system options: |
|
1241 |
||
1242 |
""" + Library.indent_lines(2, show_options(notable_server_options, options)) + "\n", |
|
1243 |
"A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))), |
|
1244 |
"D:" -> (arg => dirs += Path.explode(arg)), |
|
1245 |
"H:" -> (arg => build_hosts ++= Build_Cluster.Host.parse(Registry.global, arg)), |
|
1246 |
"o:" -> (arg => options = options + arg), |
|
1247 |
"p:" -> (arg => port = Value.Int.parse(arg))) |
|
1248 |
||
1249 |
val more_args = getopts(args) |
|
1250 |
if (more_args.nonEmpty) getopts.usage() |
|
1251 |
||
1252 |
val progress = new Console_Progress() |
|
1253 |
val sync_dirs = |
|
1254 |
Sync.afp_dirs(afp_root) ::: dirs.toList.map(dir => Sync.Dir(dir.file_name, dir)) |
|
1255 |
||
1256 |
sync_dirs.foreach(_.check()) |
|
1257 |
||
1258 |
build_manager(build_hosts = build_hosts.toList, options = options, port = port, |
|
1259 |
sync_dirs = sync_dirs, progress = progress) |
|
1260 |
}) |
|
1261 |
||
1262 |
val isabelle_tool1 = Isabelle_Tool("build_task", "submit build task for build manager", |
|
1263 |
Scala_Project.here, |
|
1264 |
{ args => |
|
1265 |
var afp_root: Option[Path] = None |
|
1266 |
val base_sessions = new mutable.ListBuffer[String] |
|
1267 |
var presentation = false |
|
1268 |
var requirements = false |
|
1269 |
val exclude_session_groups = new mutable.ListBuffer[String] |
|
1270 |
var all_sessions = false |
|
1271 |
var build_heap = false |
|
1272 |
var clean_build = false |
|
1273 |
var export_files = false |
|
1274 |
var fresh_build = false |
|
1275 |
val session_groups = new mutable.ListBuffer[String] |
|
1276 |
var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS) |
|
1277 |
var prefs: List[Options.Spec] = Nil |
|
80250
8ae6f4e8cc2a
allow explicit Isabelle rev in build task (e.g., for older Isabelle versions);
Fabian Huch <huch@in.tum.de>
parents:
80246
diff
changeset
|
1278 |
var rev = "" |
80246 | 1279 |
val exclude_sessions = new mutable.ListBuffer[String] |
1280 |
||
1281 |
val getopts = Getopts(""" |
|
1282 |
Usage: isabelle build_task [OPTIONS] [SESSIONS ...] |
|
1283 |
||
1284 |
Options are: |
|
1285 |
-A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """) |
|
1286 |
-B NAME include session NAME and all descendants |
|
1287 |
-P enable HTML/PDF presentation |
|
1288 |
-R refer to requirements of selected sessions |
|
1289 |
-X NAME exclude sessions from group NAME and all descendants |
|
1290 |
-a select all sessions |
|
1291 |
-b build heap images |
|
1292 |
-c clean build |
|
1293 |
-e export files from session specification into file-system |
|
1294 |
-f fresh build |
|
1295 |
-g NAME select session group NAME |
|
1296 |
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) |
|
1297 |
-p OPTIONS comma-separated preferences for build process |
|
80250
8ae6f4e8cc2a
allow explicit Isabelle rev in build task (e.g., for older Isabelle versions);
Fabian Huch <huch@in.tum.de>
parents:
80246
diff
changeset
|
1298 |
-r REV explicit revision (default: state of working directory) |
80246 | 1299 |
-x NAME exclude session NAME and all descendants |
1300 |
||
1301 |
Submit build task on SSH server. Notable system options: |
|
1302 |
||
1303 |
""" + Library.indent_lines(2, show_options(List("build_manager_ssh_user"), options)) + "\n", |
|
1304 |
"A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))), |
|
1305 |
"B:" -> (arg => base_sessions += arg), |
|
1306 |
"P" -> (_ => presentation = true), |
|
1307 |
"R" -> (_ => requirements = true), |
|
1308 |
"X:" -> (arg => exclude_session_groups += arg), |
|
1309 |
"a" -> (_ => all_sessions = true), |
|
1310 |
"b" -> (_ => build_heap = true), |
|
1311 |
"c" -> (_ => clean_build = true), |
|
1312 |
"e" -> (_ => export_files = true), |
|
1313 |
"f" -> (_ => fresh_build = true), |
|
1314 |
"g:" -> (arg => session_groups += arg), |
|
1315 |
"o:" -> (arg => options = options + arg), |
|
1316 |
"p:" -> (arg => prefs = Options.Spec.parse(arg)), |
|
80250
8ae6f4e8cc2a
allow explicit Isabelle rev in build task (e.g., for older Isabelle versions);
Fabian Huch <huch@in.tum.de>
parents:
80246
diff
changeset
|
1317 |
"r:" -> (arg => rev = arg), |
80246 | 1318 |
"x:" -> (arg => exclude_sessions += arg)) |
1319 |
||
1320 |
val sessions = getopts(args) |
|
1321 |
val store = Store(options) |
|
1322 |
val progress = new Console_Progress() |
|
1323 |
||
1324 |
build_task(options, store = store, afp_root = afp_root, base_sessions = |
|
1325 |
base_sessions.toList, presentation = presentation, requirements = requirements, |
|
1326 |
exclude_session_groups = exclude_session_groups.toList, all_sessions = all_sessions, |
|
1327 |
build_heap = build_heap, clean_build = clean_build, export_files = export_files, |
|
1328 |
fresh_build = fresh_build, session_groups = session_groups.toList, sessions = sessions, |
|
80250
8ae6f4e8cc2a
allow explicit Isabelle rev in build task (e.g., for older Isabelle versions);
Fabian Huch <huch@in.tum.de>
parents:
80246
diff
changeset
|
1329 |
prefs = prefs, rev = rev, exclude_sessions = exclude_sessions.toList, progress = progress) |
80246 | 1330 |
}) |
1331 |
} |
|
1332 |
||
1333 |
class Build_Manager_Tools extends Isabelle_Scala_Tools( |
|
1334 |
Build_Manager.isabelle_tool, Build_Manager.isabelle_tool1) |