--- a/src/Pure/System/build.scala Sun Jul 22 10:00:51 2012 +0200
+++ b/src/Pure/System/build.scala Sun Jul 22 23:36:11 2012 +0200
@@ -17,10 +17,10 @@
{
/** session information **/
- type Options = List[(String, Option[String])]
-
object Session
{
+ /* Key */
+
object Key
{
object Ordering extends scala.math.Ordering[Key]
@@ -38,13 +38,20 @@
override def toString: String = name
}
+
+ /* Info */
+
sealed case class Info(
dir: Path,
parent: Option[String],
description: String,
options: Options,
- theories: List[(Options, List[String])],
- files: List[String])
+ theories: List[(Options, List[Path])],
+ files: List[Path],
+ digest: SHA1.Digest)
+
+
+ /* Queue */
object Queue
{
@@ -55,8 +62,10 @@
keys: Map[String, Key] = Map.empty,
graph: Graph[Key, Info] = Graph.empty(Key.Ordering))
{
+ def is_empty: Boolean = graph.is_empty
+
+ def apply(name: String): Info = graph.get_node(keys(name))
def defined(name: String): Boolean = keys.isDefinedAt(name)
-
def is_inner(name: String): Boolean = !graph.is_maximal(keys(name))
def + (key: Key, info: Info): Queue =
@@ -78,6 +87,8 @@
new Queue(keys1, graph1)
}
+ def - (name: String): Queue = new Queue(keys - name, graph.del_node(keys(name)))
+
def required(names: List[String]): Queue =
{
val req = graph.all_preds(names.map(keys(_))).map(_.name).toSet
@@ -86,8 +97,16 @@
new Queue(keys1, graph1)
}
- def topological_order: List[(Key, Info)] =
- graph.topological_order.map(key => (key, graph.get_node(key)))
+ def dequeue(skip: String => Boolean): Option[(String, Info)] =
+ {
+ val it = graph.entries.dropWhile(
+ { case (key, (_, (deps, _))) => !deps.isEmpty || skip(key.name) })
+ if (it.hasNext) { val (key, (info, _)) = it.next; Some((key.name, info)) }
+ else None
+ }
+
+ def topological_order: List[(String, Info)] =
+ graph.topological_order.map(key => (key.name, graph.get_node(key)))
}
}
@@ -101,8 +120,8 @@
path: Option[String],
parent: Option[String],
description: String,
- options: Options,
- theories: List[(Options, List[String])],
+ options: List[Options.Spec],
+ theories: List[(List[Options.Spec], List[String])],
files: List[String])
private object Parser extends Parse.Parser
@@ -161,7 +180,8 @@
private def is_pure(name: String): Boolean = name == "RAW" || name == "Pure"
- private def sessions_root(dir: Path, root: JFile, queue: Session.Queue): Session.Queue =
+ private def sessions_root(options: Options, dir: Path, root: JFile, queue: Session.Queue)
+ : Session.Queue =
{
(queue /: Parser.parse_entries(root))((queue1, entry) =>
try {
@@ -187,9 +207,15 @@
}
val key = Session.Key(full_name, entry.order)
+
+ val theories =
+ entry.theories.map({ case (opts, thys) => (options ++ opts, thys.map(Path.explode(_))) })
+ val files = entry.files.map(Path.explode(_))
+ val digest = SHA1.digest((full_name, entry.parent, entry.options, entry.theories).toString)
+
val info =
Session.Info(dir + path, entry.parent,
- entry.description, entry.options, entry.theories, entry.files)
+ entry.description, options ++ entry.options, theories, files, digest)
queue1 + (key, info)
}
@@ -200,22 +226,24 @@
})
}
- private def sessions_dir(strict: Boolean, dir: Path, queue: Session.Queue): Session.Queue =
+ private def sessions_dir(options: Options, strict: Boolean, dir: Path, queue: Session.Queue)
+ : Session.Queue =
{
val root = (dir + ROOT).file
- if (root.isFile) sessions_root(dir, root, queue)
+ if (root.isFile) sessions_root(options, dir, root, queue)
else if (strict) error("Bad session root file: " + quote(root.toString))
else queue
}
- private def sessions_catalog(dir: Path, catalog: JFile, queue: Session.Queue): Session.Queue =
+ private def sessions_catalog(options: Options, dir: Path, catalog: JFile, queue: Session.Queue)
+ : Session.Queue =
{
val dirs =
split_lines(File.read(catalog)).filterNot(line => line == "" || line.startsWith("#"))
(queue /: dirs)((queue1, dir1) =>
try {
val dir2 = dir + Path.explode(dir1)
- if (dir2.file.isDirectory) sessions_dir(true, dir2, queue1)
+ if (dir2.file.isDirectory) sessions_dir(options, true, dir2, queue1)
else error("Bad session directory: " + dir2.toString)
}
catch {
@@ -224,20 +252,20 @@
})
}
- def find_sessions(all_sessions: Boolean, sessions: List[String],
+ def find_sessions(options: Options, all_sessions: Boolean, sessions: List[String],
more_dirs: List[Path]): Session.Queue =
{
var queue = Session.Queue.empty
for (dir <- Isabelle_System.components()) {
- queue = sessions_dir(false, dir, queue)
+ queue = sessions_dir(options, false, dir, queue)
val catalog = (dir + SESSIONS).file
if (catalog.isFile)
- queue = sessions_catalog(dir, catalog, queue)
+ queue = sessions_catalog(options, dir, catalog, queue)
}
- for (dir <- more_dirs) queue = sessions_dir(true, dir, queue)
+ for (dir <- more_dirs) queue = sessions_dir(options, true, dir, queue)
sessions.filter(name => !queue.defined(name)) match {
case Nil =>
@@ -251,25 +279,68 @@
/** build **/
- private def echo(msg: String) { java.lang.System.out.println(msg) }
- private def echo_n(msg: String) { java.lang.System.out.print(msg) }
+ /* dependencies */
+
+ sealed case class Node(
+ loaded_theories: Set[String],
+ sources: List[(Path, SHA1.Digest)])
+
+ sealed case class Deps(deps: Map[String, Node])
+ {
+ def sources(name: String): List[(Path, SHA1.Digest)] = deps(name).sources
+ }
+
+ def dependencies(queue: Session.Queue): Deps =
+ Deps((Map.empty[String, Node] /: queue.topological_order)(
+ { case (deps, (name, info)) =>
+ val preloaded =
+ info.parent match {
+ case None => Set.empty[String]
+ case Some(parent) => deps(parent).loaded_theories
+ }
+ val thy_info = new Thy_Info(new Thy_Load(preloaded))
- class Build_Job(cwd: JFile, env: Map[String, String], script: String, args: String)
+ val thy_deps =
+ thy_info.dependencies(
+ info.theories.map(_._2).flatten.
+ map(thy => Document.Node.Name(info.dir + Thy_Load.thy_path(thy))))
+
+ val loaded_theories = preloaded ++ thy_deps.map(_._1.theory)
+
+ val all_files =
+ thy_deps.map({ case (n, h) =>
+ val thy = Path.explode(n.node).expand
+ val uses =
+ h match {
+ case Exn.Res(d) =>
+ d.uses.map(p => (Path.explode(n.dir) + Path.explode(p._1)).expand)
+ case _ => Nil
+ }
+ thy :: uses
+ }).flatten ::: info.files.map(file => info.dir + file)
+ val sources = all_files.par.map(p => (p, SHA1.digest(p))).toList
+
+ deps + (name -> Node(loaded_theories, sources))
+ }))
+
+
+ /* jobs */
+
+ private class Job(cwd: JFile, env: Map[String, String], script: String, args: String)
{
private val args_file = File.tmp_file("args")
private val env1 = env + ("ARGS_FILE" -> Isabelle_System.posix_path(args_file.getPath))
File.write(args_file, args)
- private val (thread, result) = Simple_Thread.future("build_job") {
- Isabelle_System.bash_env(cwd, env1, script)
- }
+ private val (thread, result) =
+ Simple_Thread.future("build") { Isabelle_System.bash_env(cwd, env1, script) }
def terminate: Unit = thread.interrupt
def is_finished: Boolean = result.is_finished
- def join: (String, String, Int) = { val rc = result.join; args_file.delete; rc }
+ def join: (String, String, Int) = { val res = result.join; args_file.delete; res }
}
- private def build_job(save: Boolean, name: String, info: Session.Info): Build_Job =
+ private def start_job(save: Boolean, name: String, info: Session.Info): Job =
{
val parent = info.parent.getOrElse("")
@@ -300,18 +371,25 @@
val args_xml =
{
import XML.Encode._
- def symbol_string: T[String] = (s => string(Symbol.encode(s)))
- pair(bool, pair(symbol_string, pair(symbol_string, list(symbol_string))))(
- save, (parent, (name, info.theories.map(_._2).flatten)))
+ pair(bool, pair(string, pair(string, list(string))))(
+ save, (parent, (name, info.theories.map(_._2).flatten.map(_.implode))))
}
- new Build_Job(cwd, env, script, YXML.string_of_body(args_xml))
+ new Job(cwd, env, script, YXML.string_of_body(args_xml))
}
- def build(all_sessions: Boolean, build_images: Boolean, list_only: Boolean,
+
+ /* build */
+
+ private def echo(msg: String) { java.lang.System.out.println(msg) }
+ private def sleep(): Unit = Thread.sleep(500)
+
+ def build(all_sessions: Boolean, build_images: Boolean, max_jobs: Int,
+ list_only: Boolean, verbose: Boolean,
more_dirs: List[Path], more_options: List[String], sessions: List[String]): Int =
{
val options = (Options.init() /: more_options)(_.define_simple(_))
- val queue = find_sessions(all_sessions, sessions, more_dirs)
+ val queue = find_sessions(options, all_sessions, sessions, more_dirs)
+ val deps = dependencies(queue)
// prepare browser info dir
@@ -331,36 +409,60 @@
val log_dir = Path.explode("$ISABELLE_OUTPUT/log")
log_dir.file.mkdirs()
- // run jobs
- val rcs =
- for ((key, info) <- queue.topological_order) yield
- {
- val name = key.name
+ // scheduler loop
+ @tailrec def loop(
+ pending: Session.Queue,
+ running: Map[String, Job],
+ results: Map[String, Int]): Map[String, Int] =
+ {
+ if (pending.is_empty) results
+ else if (running.exists({ case (_, job) => job.is_finished })) {
+ val (name, job) = running.find({ case (_, job) => job.is_finished }).get
- if (list_only) { echo(name + " in " + info.dir); 0 }
- else {
- val save = build_images || queue.is_inner(name)
- echo((if (save) "Building " else "Running ") + name + " ...")
-
- val (out, err, rc) = build_job(save, name, info).join
- echo_n(err)
+ val (out, err, rc) = job.join
+ echo(Library.trim_line(err))
- val log = log_dir + Path.basic(name)
- if (rc == 0) {
- File.write_zip(log.ext("gz"), out)
- }
- else {
- File.write(log, out)
- echo(name + " FAILED")
- echo("(see also " + log.file + ")")
- val lines = split_lines(out)
- val tail = lines.drop(lines.length - 20 max 0)
- echo("\n" + cat_lines(tail))
- }
- rc
+ val log = log_dir + Path.basic(name)
+ if (rc == 0) {
+ val sources =
+ (queue(name).digest :: deps.sources(name).map(_._2)).map(_.toString).sorted
+ .mkString("sources: ", " ", "\n")
+ File.write_zip(log.ext("gz"), sources + out)
+ }
+ else {
+ File.write(log, out)
+ echo(name + " FAILED")
+ echo("(see also " + log.file + ")")
+ val lines = split_lines(out)
+ val tail = lines.drop(lines.length - 20 max 0)
+ echo("\n" + cat_lines(tail))
+ }
+ loop(pending - name, running - name, results + (name -> rc))
+ }
+ else if (running.size < (max_jobs max 1)) {
+ pending.dequeue(running.isDefinedAt(_)) match {
+ case Some((name, info)) =>
+ if (list_only) {
+ echo(name + " in " + info.dir)
+ loop(pending - name, running, results + (name -> 0))
+ }
+ else if (info.parent.map(results(_)).forall(_ == 0)) {
+ val save = build_images || queue.is_inner(name)
+ echo((if (save) "Building " else "Running ") + name + " ...")
+ val job = start_job(save, name, info)
+ loop(pending, running + (name -> job), results)
+ }
+ else {
+ echo(name + " CANCELLED")
+ loop(pending - name, running, results + (name -> 1))
+ }
+ case None => sleep(); loop(pending, running, results)
}
}
- (0 /: rcs)(_ max _)
+ else { sleep(); loop(pending, running, results) }
+ }
+
+ (0 /: loop(queue, Map.empty, Map.empty))({ case (rc1, (_, rc2)) => rc1 max rc2 })
}
@@ -373,9 +475,11 @@
case
Properties.Value.Boolean(all_sessions) ::
Properties.Value.Boolean(build_images) ::
+ Properties.Value.Int(max_jobs) ::
Properties.Value.Boolean(list_only) ::
+ Properties.Value.Boolean(verbose) ::
Command_Line.Chunks(more_dirs, options, sessions) =>
- build(all_sessions, build_images, list_only,
+ build(all_sessions, build_images, max_jobs, list_only, verbose,
more_dirs.map(Path.explode), options, sessions)
case _ => error("Bad arguments:\n" + cat_lines(args))
}