--- a/etc/build.props Sat Jan 20 13:52:36 2024 +0100
+++ b/etc/build.props Sat Jan 20 15:07:41 2024 +0100
@@ -48,6 +48,18 @@
src/Pure/Admin/component_zipperposition.scala \
src/Pure/Admin/component_zstd.scala \
src/Pure/Admin/isabelle_cronjob.scala \
+ src/Pure/Build/browser_info.scala \
+ src/Pure/Build/build.scala \
+ src/Pure/Build/build_cluster.scala \
+ src/Pure/Build/build_job.scala \
+ src/Pure/Build/build_process.scala \
+ src/Pure/Build/build_schedule.scala \
+ src/Pure/Build/export.scala \
+ src/Pure/Build/export_theory.scala \
+ src/Pure/Build/file_format.scala \
+ src/Pure/Build/resources.scala \
+ src/Pure/Build/sessions.scala \
+ src/Pure/Build/store.scala \
src/Pure/Concurrent/consumer_thread.scala \
src/Pure/Concurrent/counter.scala \
src/Pure/Concurrent/delay.scala \
@@ -146,7 +158,6 @@
src/Pure/PIDE/prover.scala \
src/Pure/PIDE/query_operation.scala \
src/Pure/PIDE/rendering.scala \
- src/Pure/PIDE/resources.scala \
src/Pure/PIDE/session.scala \
src/Pure/PIDE/text.scala \
src/Pure/PIDE/xml.scala \
@@ -181,22 +192,11 @@
src/Pure/System/system_channel.scala \
src/Pure/System/tty_loop.scala \
src/Pure/Thy/bibtex.scala \
- src/Pure/Thy/browser_info.scala \
src/Pure/Thy/document_build.scala \
- src/Pure/Thy/export.scala \
- src/Pure/Thy/export_theory.scala \
- src/Pure/Thy/file_format.scala \
src/Pure/Thy/latex.scala \
- src/Pure/Thy/sessions.scala \
- src/Pure/Thy/store.scala \
src/Pure/Thy/thy_element.scala \
src/Pure/Thy/thy_header.scala \
src/Pure/Thy/thy_syntax.scala \
- src/Pure/Tools/build.scala \
- src/Pure/Tools/build_cluster.scala \
- src/Pure/Tools/build_job.scala \
- src/Pure/Tools/build_process.scala \
- src/Pure/Tools/build_schedule.scala \
src/Pure/Tools/check_keywords.scala \
src/Pure/Tools/debugger.scala \
src/Pure/Tools/doc.scala \
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/browser_info.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,708 @@
+/* Title: Pure/Build/browser_info.scala
+ Author: Makarius
+
+HTML/PDF presentation of PIDE document information.
+*/
+
+package isabelle
+
+
+import scala.annotation.tailrec
+import scala.collection.immutable.SortedMap
+import scala.collection.mutable
+
+
+object Browser_Info {
+ /* browser_info store configuration */
+
+ object Config {
+ val none: Config = new Config { def enabled: Boolean = false }
+ val standard: Config = new Config { def enabled: Boolean = true }
+
+ def dir(path: Path): Config =
+ new Config {
+ def enabled: Boolean = true
+ override def presentation_dir(store: Store): Path = path
+ }
+
+ def make(s: String): Config =
+ if (s == ":") standard else dir(Path.explode(s))
+ }
+
+ abstract class Config private {
+ def enabled: Boolean
+ def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info
+ def presentation_dir(store: Store): Path = store.presentation_dir
+ }
+
+
+ /* meta info within the file-system */
+
+ object Meta_Info {
+ /* directory */
+
+ val PATH: Path = Path.explode(".browser_info")
+
+ def check_directory(dir: Path): Unit = {
+ if (dir.is_dir && !(dir + PATH).is_dir && File.read_dir(dir).nonEmpty) {
+ error("Existing content in " + dir.expand + " lacks " + PATH + " meta info.\n" +
+ "To avoid potential disaster, it has not been changed automatically.\n" +
+ "If this is the intended directory, please move/remove/empty it manually.")
+ }
+ }
+
+ def init_directory(dir: Path): Path = {
+ check_directory(dir)
+ Isabelle_System.make_directory(dir + PATH)
+ dir
+ }
+
+ def clean_directory(dir: Path): Path = {
+ check_directory(dir)
+ Isabelle_System.rm_tree(dir) // guarded by check_directory!
+ Isabelle_System.new_directory(dir + PATH)
+ }
+
+
+ /* content */
+
+ def make_path(dir: Path, name: String): Path =
+ dir + PATH + Path.basic(name)
+
+ def value(dir: Path, name: String): String = {
+ val path = make_path(dir, name)
+ if (path.is_file) File.read(path) else ""
+ }
+
+ def change(dir: Path, name: String)(f: String => String): Unit = {
+ val path = make_path(dir, name)
+ val x = value(dir, name)
+ val y =
+ try { f(x) }
+ catch { case ERROR(msg) => error("Failed to change " + path.expand + ":\n" + msg)}
+ if (x != y) File.write(path, y)
+ }
+
+
+ /* build_uuid */
+
+ val BUILD_UUID = "build_uuid"
+
+ def check_build_uuid(dir: Path, uuid: String): Boolean = {
+ val uuid0 = value(dir, BUILD_UUID)
+ uuid0.nonEmpty && uuid.nonEmpty && uuid0 == uuid
+ }
+
+ def set_build_uuid(dir: Path, uuid: String): Unit =
+ change(dir, BUILD_UUID)(_ => uuid)
+
+
+ /* index */
+
+ val INDEX = "index.json"
+
+ object Item {
+ def parse(json: JSON.T): Item = {
+ def err(): Nothing =
+ error("Bad JSON object for item:\n" + JSON.Format.pretty_print(json))
+ val obj = JSON.Object.unapply(json) getOrElse err()
+
+ val name = JSON.string(obj, "name") getOrElse err()
+ val description = JSON.string(obj, "description") getOrElse ""
+ Item(name, description = Symbol.trim_blank_lines(description))
+ }
+ }
+
+ sealed case class Item(name: String, description: String = "") {
+ override def toString: String = name
+
+ def json: JSON.T = JSON.Object("name" -> name, "description" -> description)
+ }
+
+ object Index {
+ def parse(s: JSON.S, kind: String): Index = {
+ if (s.isEmpty) Index(kind, Nil)
+ else {
+ def err(): Nothing = error("Bad JSON object " + kind + " index:\n" + s)
+
+ val json = JSON.parse(s)
+ val obj = JSON.Object.unapply(json) getOrElse err()
+
+ val kind1 = JSON.string(obj, "kind") getOrElse err()
+ val items = JSON.list(obj, "items", x => Some(Item.parse(x))) getOrElse err()
+ if (kind == kind1) Index(kind, items)
+ else error("Expected index kind " + quote(kind) + " but found " + quote(kind1))
+ }
+ }
+ }
+
+ sealed case class Index(kind: String, items: List[Item]) {
+ def is_empty: Boolean = items.isEmpty
+
+ def + (item: Item): Index =
+ Index(kind, (item :: items.filterNot(_.name == item.name)).sortBy(_.name))
+
+ def json: JSON.T = JSON.Object("kind" -> kind, "items" -> items.map(_.json))
+ def print_json: JSON.S = JSON.Format.pretty_print(json)
+ }
+ }
+
+
+ /* presentation elements */
+
+ sealed case class Elements(
+ html: Markup.Elements = Markup.Elements.empty,
+ entity: Markup.Elements = Markup.Elements.empty,
+ language: Markup.Elements = Markup.Elements.empty)
+
+ val default_elements: Elements =
+ Elements(
+ html = Rendering.foreground_elements ++ Rendering.text_color_elements +
+ Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE +
+ Markup.PATH + Markup.URL,
+ entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT,
+ Markup.CLASS, Markup.LOCALE, Markup.FREE))
+
+ val extra_elements: Elements =
+ Elements(
+ html = default_elements.html ++ Rendering.markdown_elements,
+ language = Markup.Elements(Markup.Language.DOCUMENT))
+
+
+
+ /** HTML/PDF presentation context **/
+
+ def context(
+ sessions_structure: Sessions.Structure,
+ elements: Elements = default_elements,
+ root_dir: Path = Path.current,
+ document_info: Document_Info = Document_Info.empty
+ ): Context = new Context(sessions_structure, elements, root_dir, document_info)
+
+ class Context private[Browser_Info](
+ sessions_structure: Sessions.Structure,
+ val elements: Elements,
+ val root_dir: Path,
+ val document_info: Document_Info
+ ) {
+ /* directory structure and resources */
+
+ def theory_by_name(session: String, theory: String): Option[Document_Info.Theory] =
+ document_info.theory_by_name(session, theory)
+
+ def theory_by_file(session: String, file: String): Option[Document_Info.Theory] =
+ document_info.theory_by_file(session, file)
+
+ def session_chapter(session: String): String =
+ sessions_structure(session).chapter
+
+ def chapter_dir(session: String): Path =
+ root_dir + Path.basic(session_chapter(session))
+
+ def session_dir(session: String): Path =
+ chapter_dir(session) + Path.basic(session)
+
+ def theory_dir(theory: Document_Info.Theory): Path =
+ session_dir(theory.dynamic_session)
+
+ def theory_html(theory: Document_Info.Theory): Path =
+ {
+ def check(name: String): Option[Path] = {
+ val path = Path.basic(name).html
+ if (Path.eq_case_insensitive(path, Path.index_html)) None
+ else Some(path)
+ }
+ check(theory.print_short) orElse check(theory.name) getOrElse
+ error("Illegal global theory name " + quote(theory.name) +
+ " (conflict with " + Path.index_html + ")")
+ }
+
+ def file_html(file: String): Path =
+ Path.explode(file).squash.html
+
+ def smart_html(theory: Document_Info.Theory, file: String): Path =
+ if (File.is_thy(file)) theory_html(theory) else file_html(file)
+
+
+ /* HTML content */
+
+ def head(title: String, rest: XML.Body = Nil): XML.Tree =
+ HTML.div("head", HTML.chapter(title) :: rest)
+
+ def source(body: XML.Body): XML.Tree = HTML.pre("source", body)
+
+ def contents(
+ heading: String,
+ items: List[XML.Body],
+ css_class: String = "contents"
+ ) : List[XML.Elem] = {
+ if (items.isEmpty) Nil
+ else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items))))
+ }
+
+
+ /* preview PIDE document */
+
+ lazy val isabelle_css: String = File.read(HTML.isabelle_css)
+
+ def html_document(title: String, body: XML.Body, fonts_css: String): HTML_Document = {
+ val content =
+ HTML.output_document(
+ List(
+ HTML.style(fonts_css + "\n\n" + isabelle_css),
+ HTML.title(title)),
+ List(HTML.source(body)), css = "", structural = false)
+ HTML_Document(title, content)
+ }
+
+ def preview_document(
+ snapshot: Document.Snapshot,
+ plain_text: Boolean = false,
+ fonts_css: String = HTML.fonts_css()
+ ): HTML_Document = {
+ require(!snapshot.is_outdated, "document snapshot outdated")
+
+ val name = snapshot.node_name
+ if (plain_text) {
+ val title = "File " + Symbol.cartouche_decoded(name.file_name)
+ val body = HTML.text(snapshot.node.source)
+ html_document(title, body, fonts_css)
+ }
+ else {
+ Resources.html_document(snapshot) getOrElse {
+ val title =
+ if (name.is_theory) "Theory " + quote(name.theory_base_name)
+ else "File " + Symbol.cartouche_decoded(name.file_name)
+ val xml = snapshot.xml_markup(elements = elements.html)
+ val body = Node_Context.empty.make_html(elements, xml)
+ html_document(title, body, fonts_css)
+ }
+ }
+ }
+
+
+ /* maintain presentation structure */
+
+ def update_chapter(session_name: String, session_description: String): Unit = synchronized {
+ val dir = Meta_Info.init_directory(chapter_dir(session_name))
+ Meta_Info.change(dir, Meta_Info.INDEX) { text =>
+ val index0 = Meta_Info.Index.parse(text, "chapter")
+ val item = Meta_Info.Item(session_name, description = session_description)
+ val index = index0 + item
+
+ if (index != index0) {
+ val title = "Isabelle/" + session_chapter(session_name) + " sessions"
+ HTML.write_document(dir, "index.html",
+ List(HTML.title(title + Isabelle_System.isabelle_heading())),
+ HTML.chapter(title) ::
+ (if (index.is_empty) Nil
+ else
+ List(HTML.div("sessions",
+ List(HTML.description(
+ index.items.map(item =>
+ (List(HTML.link(item.name + "/index.html", HTML.text(item.name))),
+ if (item.description.isEmpty) Nil
+ else HTML.break ::: List(HTML.pre(HTML.text(item.description)))))))))),
+ root = Some(root_dir))
+ }
+
+ index.print_json
+ }
+ }
+
+ def update_root(): Unit = synchronized {
+ Meta_Info.init_directory(root_dir)
+ HTML.init_fonts(root_dir)
+ Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"),
+ root_dir + Path.explode("isabelle.gif"))
+
+ Meta_Info.change(root_dir, Meta_Info.INDEX) { text =>
+ val index0 = Meta_Info.Index.parse(text, "root")
+ val index = {
+ val items1 =
+ sessions_structure.known_chapters
+ .map(ch => Meta_Info.Item(ch.name, description = ch.description))
+ val items2 = index0.items.filterNot(item => items1.exists(_.name == item.name))
+ index0.copy(items = items1 ::: items2)
+ }
+
+ if (index != index0) {
+ val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library"
+ HTML.write_document(root_dir, "index.html",
+ List(HTML.title(title + Isabelle_System.isabelle_heading())),
+ HTML.chapter(title) ::
+ (if (index.is_empty) Nil
+ else
+ List(HTML.div("sessions",
+ List(HTML.description(
+ index.items.map(item =>
+ (List(HTML.link(item.name + "/index.html", HTML.text(item.name))),
+ if (item.description.isEmpty) Nil
+ else HTML.break ::: List(HTML.pre(HTML.text(item.description)))))))))),
+ root = Some(root_dir))
+ }
+
+ index.print_json
+ }
+ }
+ }
+
+ sealed case class HTML_Document(title: String, content: String)
+
+
+ /* formal entities */
+
+ object Theory_Ref {
+ def unapply(props: Properties.T): Option[String] =
+ (props, props) match {
+ case (Markup.Kind(Markup.THEORY), Markup.Name(theory)) => Some(theory)
+ case _ => None
+ }
+ }
+
+ object Entity_Ref {
+ def unapply(props: Properties.T): Option[(String, String, String)] =
+ (props, props, props, props) match {
+ case (Markup.Entity.Ref.Prop(_), Position.Def_File(file), Markup.Kind(kind), Markup.Name(name))
+ if Path.is_wellformed(file) => Some((file, kind, name))
+ case _ => None
+ }
+ }
+
+ object Node_Context {
+ val empty: Node_Context = new Node_Context
+
+ def make(
+ context: Context,
+ session_name: String,
+ theory_name: String,
+ file_name: String,
+ node_dir: Path,
+ ): Node_Context =
+ new Node_Context {
+ private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
+
+ override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] =
+ body match {
+ case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
+ case _ =>
+ for (theory <- context.theory_by_name(session_name, theory_name))
+ yield {
+ val body1 =
+ if (seen_ranges.contains(range)) {
+ HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body))
+ }
+ else HTML.span(body)
+ theory.get_defs(file_name, range).foldLeft(body1) {
+ case (elem, entity) =>
+ HTML.entity_def(HTML.span(HTML.id(entity.kname), List(elem)))
+ }
+ }
+ }
+
+ private def offset_id(range: Text.Range): String =
+ "offset_" + range.start + ".." + range.stop
+
+ override def make_file_ref(file: String, body: XML.Body): Option[XML.Elem] = {
+ for (theory <- context.theory_by_file(session_name, file))
+ yield {
+ val html_path = context.theory_dir(theory) + context.smart_html(theory, file)
+ val html_link = HTML.relative_href(html_path, base = Some(node_dir))
+ HTML.link(html_link, body)
+ }
+ }
+
+ override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = {
+ props match {
+ case Theory_Ref(thy_name) =>
+ for (theory <- context.theory_by_name(session_name, thy_name))
+ yield {
+ val html_path = context.theory_dir(theory) + context.theory_html(theory)
+ val html_link = HTML.relative_href(html_path, base = Some(node_dir))
+ HTML.link(html_link, body)
+ }
+ case Entity_Ref(def_file, kind, name) =>
+ def logical_ref(theory: Document_Info.Theory): Option[String] =
+ theory.get_def(def_file, kind, name).map(_.kname)
+
+ def physical_ref(theory: Document_Info.Theory): Option[String] =
+ props match {
+ case Position.Def_Range(range) if theory.name == theory_name =>
+ seen_ranges += range
+ Some(offset_id(range))
+ case _ => None
+ }
+
+ for {
+ theory <- context.theory_by_file(session_name, def_file)
+ html_ref <- logical_ref(theory) orElse physical_ref(theory)
+ }
+ yield {
+ val html_path = context.theory_dir(theory) + context.smart_html(theory, def_file)
+ val html_link = HTML.relative_href(html_path, base = Some(node_dir))
+ HTML.entity_ref(HTML.link(html_link + "#" + html_ref, body))
+ }
+ case _ => None
+ }
+ }
+ }
+ }
+
+ class Node_Context {
+ def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = None
+ def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = None
+ def make_file_ref(file: String, body: XML.Body): Option[XML.Elem] = None
+
+ val div_elements: Set[String] =
+ Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.`enum`.name,
+ HTML.descr.name)
+
+ def make_html(elements: Elements, xml: XML.Body): XML.Body = {
+ def html_div(html: XML.Body): Boolean =
+ html exists {
+ case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
+ case XML.Text(_) => false
+ }
+
+ def html_class(c: String, html: XML.Body): XML.Body =
+ if (c == "") html
+ else if (html_div(html)) List(HTML.div(c, html))
+ else List(HTML.span(c, html))
+
+ def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
+ xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
+ val (res1, offset) = html_body_single(tree, end_offset1)
+ (res1 ++ res, offset)
+ }
+
+ @tailrec
+ def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
+ xml_tree match {
+ case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
+ case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ if (elements.entity(kind)) {
+ make_ref(props, body1) match {
+ case Some(link) => (List(link), offset)
+ case None => (body1, offset)
+ }
+ }
+ else (body1, offset)
+ case XML.Elem(Markup.Path(file), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ make_file_ref(file, body1) match {
+ case Some(link) => (List(link), offset)
+ case None => (body1, offset)
+ }
+ case XML.Elem(Markup.Url(href), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ (List(HTML.link(href, body1)), offset)
+ case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ (html_class(if (elements.language(name)) name else "", body1), offset)
+ case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ (List(HTML.par(body1)), offset)
+ case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ (List(HTML.item(body1)), offset)
+ case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
+ (Nil, end_offset - XML.symbol_length(text))
+ case XML.Elem(Markup.Markdown_List(kind), body) =>
+ val (body1, offset) = html_body(body, end_offset)
+ if (kind == Markup.ENUMERATE) (List(HTML.`enum`(body1)), offset)
+ else (List(HTML.list(body1)), offset)
+ case XML.Elem(markup, body) =>
+ val name = markup.name
+ val (body1, offset) = html_body(body, end_offset)
+ val html =
+ markup.properties match {
+ case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
+ html_class(kind, body1)
+ case _ =>
+ body1
+ }
+ Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
+ case Some(c) => (html_class(c.toString, html), offset)
+ case None => (html_class(name, html), offset)
+ }
+ case XML.Text(text) =>
+ val offset = end_offset - Symbol.length(text)
+ val body = HTML.text(Symbol.decode(text))
+ make_def(Text.Range(offset, end_offset), body) match {
+ case Some(body1) => (List(body1), offset)
+ case None => (body, offset)
+ }
+ }
+
+ html_body(xml, XML.symbol_length(xml) + 1)._1
+ }
+ }
+
+
+
+ /** build presentation **/
+
+ val session_graph_path: Path = Path.explode("session_graph.pdf")
+
+ def build_session(
+ context: Context,
+ session_context: Export.Session_Context,
+ progress: Progress = new Progress,
+ ): Unit = {
+ progress.expose_interrupt()
+
+ val session_name = session_context.session_name
+ val session_info = session_context.sessions_structure(session_name)
+
+ val session_dir = context.session_dir(session_name).expand
+ progress.echo("Presenting " + session_name + " in " + session_dir + " ...")
+
+ Meta_Info.init_directory(context.chapter_dir(session_name))
+ Meta_Info.clean_directory(session_dir)
+
+ val session = context.document_info.the_session(session_name)
+
+ Bytes.write(session_dir + session_graph_path,
+ graphview.Graph_File.make_pdf(session_info.options,
+ session_context.session_base.session_graph_display))
+
+ val document_variants =
+ for {
+ doc <- session_info.document_variants
+ db <- session_context.session_db()
+ document <- Document_Build.read_document(db, session_name, doc.name)
+ }
+ yield {
+ val doc_path = session_dir + doc.path.pdf
+ if (Path.eq_case_insensitive(doc.path.pdf, session_graph_path)) {
+ error("Illegal document variant " + quote(doc.name) +
+ " (conflict with " + session_graph_path + ")")
+ }
+ progress.echo("Presenting document " + session_name + "/" + doc.name, verbose = true)
+ if (session_info.document_echo) progress.echo("Document at " + doc_path)
+ Bytes.write(doc_path, document.pdf)
+ doc
+ }
+
+ val document_links = {
+ val link1 = HTML.link(session_graph_path, HTML.text("theory dependencies"))
+ val links2 = document_variants.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
+ Library.separate(HTML.break ::: HTML.nl,
+ (link1 :: links2).map(link => HTML.text("View ") ::: List(link))).flatten
+ }
+
+ def present_theory(theory_name: String): XML.Body = {
+ progress.expose_interrupt()
+
+ def err(): Nothing =
+ error("Missing document information for theory: " + quote(theory_name))
+
+ val snapshot = Build.read_theory(session_context.theory(theory_name)) getOrElse err()
+ val theory = context.theory_by_name(session_name, theory_name) getOrElse err()
+
+ progress.echo("Presenting theory " + quote(theory_name), verbose = true)
+
+ val thy_elements = theory.elements(context.elements)
+
+ def node_context(file_name: String, node_dir: Path): Node_Context =
+ Node_Context.make(context, session_name, theory_name, file_name, node_dir)
+
+ val thy_html =
+ context.source(
+ node_context(theory.thy_file, session_dir).
+ make_html(thy_elements, snapshot.xml_markup(elements = thy_elements.html)))
+
+ val master_dir = Path.explode(snapshot.node_name.master_dir)
+
+ val files =
+ for {
+ blob_name <- snapshot.node_files.tail
+ xml = snapshot.switch(blob_name).xml_markup(elements = thy_elements.html)
+ if xml.nonEmpty
+ }
+ yield {
+ progress.expose_interrupt()
+
+ val file = blob_name.node
+ progress.echo("Presenting file " + quote(file), verbose = true)
+
+ val file_html = session_dir + context.file_html(file)
+ val file_dir = file_html.dir
+ val html_link = HTML.relative_href(file_html, base = Some(session_dir))
+ val html = context.source(node_context(file, file_dir).make_html(thy_elements, xml))
+
+ val path = Path.explode(file)
+ val src_path = File.relative_path(master_dir, path).getOrElse(path)
+
+ val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
+ HTML.write_document(file_dir, file_html.file_name,
+ List(HTML.title(file_title)), List(context.head(file_title), html),
+ root = Some(context.root_dir))
+ List(HTML.link(html_link, HTML.text(file_title)))
+ }
+
+ val thy_title = "Theory " + theory.print_short
+ HTML.write_document(session_dir, context.theory_html(theory).implode,
+ List(HTML.title(thy_title)), List(context.head(thy_title), thy_html),
+ root = Some(context.root_dir))
+
+ List(HTML.link(context.theory_html(theory),
+ HTML.text(theory.print_short) :::
+ (if (files.isEmpty) Nil else List(HTML.itemize(files)))))
+ }
+
+ val theories = session.used_theories.map(present_theory)
+
+ val title = "Session " + session_name
+ HTML.write_document(session_dir, "index.html",
+ List(HTML.title(title + Isabelle_System.isabelle_heading())),
+ context.head(title, List(HTML.par(document_links))) ::
+ context.contents("Theories", theories),
+ root = Some(context.root_dir))
+
+ Meta_Info.set_build_uuid(session_dir, session.build_uuid)
+
+ context.update_chapter(session_name, session_info.description)
+ }
+
+ def build(
+ browser_info: Config,
+ store: Store,
+ deps: Sessions.Deps,
+ sessions: List[String],
+ progress: Progress = new Progress,
+ server: SSH.Server = SSH.no_server
+ ): Unit = {
+ val root_dir = browser_info.presentation_dir(store).absolute
+ progress.echo("Presentation in " + root_dir)
+
+ using(Export.open_database_context(store, server = server)) { database_context =>
+ val context0 = context(deps.sessions_structure, root_dir = root_dir)
+
+ val sessions1 =
+ deps.sessions_structure.build_requirements(sessions).filter { session_name =>
+ using(database_context.open_database(session_name)) { session_database =>
+ database_context.store.read_build(session_database.db, session_name) match {
+ case None => false
+ case Some(build) =>
+ val session_dir = context0.session_dir(session_name)
+ !Meta_Info.check_build_uuid(session_dir, build.uuid)
+ }
+ }
+ }
+
+ val context1 =
+ context(deps.sessions_structure, root_dir = root_dir,
+ document_info = Document_Info.read(database_context, deps, sessions1))
+
+ context1.update_root()
+
+ Par_List.map({ (session: String) =>
+ using(database_context.open_session(deps.background(session))) { session_context =>
+ build_session(context1, session_context, progress = progress)
+ }
+ }, sessions1)
+ }
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/build.ML Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,122 @@
+(* Title: Pure/Build/build.ML
+ Author: Makarius
+
+Build Isabelle sessions.
+*)
+
+signature BUILD =
+sig
+ type hook = string -> (theory * Document_Output.segment list) list -> unit
+ val add_hook: hook -> unit
+end;
+
+structure Build: BUILD =
+struct
+
+(* session timing *)
+
+fun session_timing f x =
+ let
+ val start = Timing.start ();
+ val y = f x;
+ val timing = Timing.result start;
+
+ val threads = string_of_int (Multithreading.max_threads ());
+ val props = [("threads", threads)] @ Markup.timing_properties timing;
+ val _ = Output.protocol_message (Markup.session_timing :: props) [];
+ in y end;
+
+
+(* build theories *)
+
+type hook = string -> (theory * Document_Output.segment list) list -> unit;
+
+local
+ val hooks = Synchronized.var "Build.hooks" ([]: hook list);
+in
+
+fun add_hook hook = Synchronized.change hooks (cons hook);
+
+fun apply_hooks qualifier loaded_theories =
+ Synchronized.value hooks
+ |> List.app (fn hook => hook qualifier loaded_theories);
+
+end;
+
+
+fun build_theories qualifier (options, thys) =
+ let
+ val _ = ML_Profiling.check_mode (Options.string options "profiling");
+ val condition = space_explode "," (Options.string options "condition");
+ val conds = filter_out (can getenv_strict) condition;
+ val loaded_theories =
+ if null conds then
+ (Options.set_default options;
+ Isabelle_Process.init_options ();
+ Future.fork I;
+ (Thy_Info.use_theories options qualifier
+ |> Unsynchronized.setmp print_mode
+ (space_explode "," (Options.string options "print_mode") @ print_mode_value ())) thys)
+ else
+ (Output.physical_stderr ("Skipping theories " ^ commas_quote (map #1 thys) ^
+ " (undefined " ^ commas conds ^ ")\n"); [])
+ in loaded_theories end;
+
+
+(* build session *)
+
+val _ =
+ Protocol_Command.define_bytes "build_session"
+ (fn [resources_yxml, args_yxml] =>
+ let
+ val _ = Resources.init_session_yxml resources_yxml;
+ val (session_name, theories) =
+ YXML.parse_body_bytes args_yxml |>
+ let
+ open XML.Decode;
+ val position = Position.of_properties o properties;
+ in pair string (list (pair Options.decode (list (pair string position)))) end;
+
+ val _ = Session.init session_name;
+
+ fun build () =
+ let
+ val res =
+ theories |>
+ (map (build_theories session_name)
+ |> session_timing
+ |> Exn.capture);
+ val res1 =
+ (case res of
+ Exn.Res loaded_theories =>
+ Exn.capture (apply_hooks session_name) (flat loaded_theories)
+ | Exn.Exn exn => Exn.Exn exn);
+ val res2 = Exn.capture_body Session.finish;
+
+ val _ = Resources.finish_session_base ();
+ val _ = Par_Exn.release_all [res1, res2];
+ val _ =
+ if session_name = Context.PureN
+ then Theory.install_pure (Thy_Info.get_theory Context.PureN) else ();
+ in () end;
+
+ fun exec e =
+ if can Theory.get_pure () then
+ Isabelle_Thread.fork (Isabelle_Thread.params "build_session") e
+ |> ignore
+ else e ();
+ in
+ exec (fn () =>
+ (case Exn.capture_body (Future.interruptible_task build) of
+ Exn.Res () => (0, [])
+ | Exn.Exn exn =>
+ (case Exn.capture Runtime.exn_message_list exn of
+ Exn.Res errs => (1, errs)
+ | Exn.Exn _ (*sic!*) => (2, ["CRASHED"])))
+ |> let open XML.Encode in pair int (list string) end
+ |> single
+ |> Output.protocol_message Markup.build_session_finished)
+ end
+ | _ => raise Match);
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/build.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,916 @@
+/* Title: Pure/Build/build.scala
+ Author: Makarius
+ Options: :folding=explicit:
+
+Command-line tools to build and manage Isabelle sessions.
+*/
+
+package isabelle
+
+import scala.collection.mutable
+import scala.util.matching.Regex
+
+
+object Build {
+ /** "isabelle build" **/
+
+ /* options */
+
+ def hostname(options: Options): String =
+ Isabelle_System.hostname(options.string("build_hostname"))
+
+ def engine_name(options: Options): String = options.string("build_engine")
+
+
+
+ /* context */
+
+ sealed case class Context(
+ store: Store,
+ engine: Engine,
+ build_deps: isabelle.Sessions.Deps,
+ afp_root: Option[Path] = None,
+ build_hosts: List[Build_Cluster.Host] = Nil,
+ ml_platform: String = Isabelle_System.getenv("ML_PLATFORM"),
+ hostname: String = Isabelle_System.hostname(),
+ numa_shuffling: Boolean = false,
+ build_heap: Boolean = false,
+ max_jobs: Int = 1,
+ fresh_build: Boolean = false,
+ no_build: Boolean = false,
+ session_setup: (String, Session) => Unit = (_, _) => (),
+ build_uuid: String = UUID.random().toString,
+ master: Boolean = false
+ ) {
+ override def toString: String =
+ "Build.Context(build_uuid = " + quote(build_uuid) + if_proper(master, ", master = true") + ")"
+
+ def build_options: Options = store.options
+
+ def sessions_structure: isabelle.Sessions.Structure = build_deps.sessions_structure
+
+ def worker_active: Boolean = max_jobs > 0
+ }
+
+
+ /* results */
+
+ object Results {
+ def apply(
+ context: Context,
+ results: Map[String, Process_Result] = Map.empty,
+ other_rc: Int = Process_Result.RC.ok
+ ): Results = {
+ new Results(context.store, context.build_deps, results, other_rc)
+ }
+ }
+
+ class Results private(
+ val store: Store,
+ val deps: Sessions.Deps,
+ results: Map[String, Process_Result],
+ other_rc: Int
+ ) {
+ def cache: Term.Cache = store.cache
+
+ def sessions_ok: List[String] =
+ (for {
+ name <- deps.sessions_structure.build_topological_order.iterator
+ result <- results.get(name) if result.ok
+ } yield name).toList
+
+ def info(name: String): Sessions.Info = deps.sessions_structure(name)
+ def sessions: Set[String] = results.keySet
+ def cancelled(name: String): Boolean = !results(name).defined
+ def apply(name: String): Process_Result = results(name).strict
+
+ val rc: Int =
+ Process_Result.RC.merge(other_rc,
+ Process_Result.RC.merge(results.valuesIterator.map(_.strict.rc)))
+ def ok: Boolean = rc == Process_Result.RC.ok
+
+ lazy val unfinished: List[String] = sessions.iterator.filterNot(apply(_).ok).toList.sorted
+
+ override def toString: String = rc.toString
+ }
+
+
+ /* engine */
+
+ object Engine {
+ lazy val services: List[Engine] =
+ Isabelle_System.make_services(classOf[Engine])
+
+ def apply(name: String): Engine =
+ services.find(_.name == name).getOrElse(error("Bad build engine " + quote(name)))
+ }
+
+ class Engine(val name: String) extends Isabelle_System.Service {
+ override def toString: String = name
+
+ def build_options(options: Options, build_hosts: List[Build_Cluster.Host] = Nil): Options = {
+ val options1 = options + "completion_limit=0" + "editor_tracing_messages=0"
+ if (build_hosts.isEmpty) options1
+ else options1 + "build_database_server" + "build_database"
+ }
+
+ final def build_store(options: Options,
+ build_hosts: List[Build_Cluster.Host] = Nil,
+ cache: Term.Cache = Term.Cache.make()
+ ): Store = {
+ val store = Store(build_options(options, build_hosts = build_hosts), cache = cache)
+ Isabelle_System.make_directory(store.output_dir + Path.basic("log"))
+ Isabelle_Fonts.init()
+ store
+ }
+
+ def open_build_process(
+ build_context: Context,
+ build_progress: Progress,
+ server: SSH.Server
+ ): Build_Process = new Build_Process(build_context, build_progress, server)
+
+ final def run_build_process(
+ context: Context,
+ progress: Progress,
+ server: SSH.Server
+ ): Results = {
+ Isabelle_Thread.uninterruptible {
+ using(open_build_process(context, progress, server))(_.run())
+ }
+ }
+ }
+
+ class Default_Engine extends Engine("") { override def toString: String = "<default>" }
+
+
+ /* build */
+
+ def build(
+ options: Options,
+ build_hosts: List[Build_Cluster.Host] = Nil,
+ selection: Sessions.Selection = Sessions.Selection.empty,
+ browser_info: Browser_Info.Config = Browser_Info.Config.none,
+ progress: Progress = new Progress,
+ check_unknown_files: Boolean = false,
+ build_heap: Boolean = false,
+ clean_build: Boolean = false,
+ afp_root: Option[Path] = None,
+ dirs: List[Path] = Nil,
+ select_dirs: List[Path] = Nil,
+ infos: List[Sessions.Info] = Nil,
+ numa_shuffling: Boolean = false,
+ max_jobs: Int = 1,
+ list_files: Boolean = false,
+ check_keywords: Set[String] = Set.empty,
+ fresh_build: Boolean = false,
+ no_build: Boolean = false,
+ soft_build: Boolean = false,
+ export_files: Boolean = false,
+ augment_options: String => List[Options.Spec] = _ => Nil,
+ session_setup: (String, Session) => Unit = (_, _) => (),
+ cache: Term.Cache = Term.Cache.make()
+ ): Results = {
+ val build_engine = Engine(engine_name(options))
+
+ val store = build_engine.build_store(options, build_hosts = build_hosts, cache = cache)
+ val build_options = store.options
+
+ using(store.open_server()) { server =>
+ using_optional(store.maybe_open_database_server(server = server)) { database_server =>
+
+
+ /* session selection and dependencies */
+
+ val full_sessions =
+ Sessions.load_structure(build_options, dirs = AFP.make_dirs(afp_root) ::: dirs,
+ select_dirs = select_dirs, infos = infos, augment_options = augment_options)
+ val full_sessions_selection = full_sessions.imports_selection(selection)
+
+ val build_deps = {
+ val deps0 =
+ Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true,
+ list_files = list_files, check_keywords = check_keywords).check_errors
+
+ if (soft_build && !fresh_build) {
+ val outdated =
+ deps0.sessions_structure.build_topological_order.flatMap(name =>
+ store.try_open_database(name, server = server) match {
+ case Some(db) =>
+ using(db)(store.read_build(_, name)) match {
+ case Some(build) if build.ok =>
+ val session_options = deps0.sessions_structure(name).options
+ val session_sources = deps0.sources_shasum(name)
+ if (Sessions.eq_sources(session_options, build.sources, session_sources)) None
+ else Some(name)
+ case _ => Some(name)
+ }
+ case None => Some(name)
+ })
+
+ Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)),
+ progress = progress, inlined_files = true).check_errors
+ }
+ else deps0
+ }
+
+
+ /* check unknown files */
+
+ if (check_unknown_files) {
+ val source_files =
+ (for {
+ (_, base) <- build_deps.session_bases.iterator
+ (path, _) <- base.session_sources.iterator
+ } yield path).toList
+ Mercurial.check_files(source_files)._2 match {
+ case Nil =>
+ case unknown_files =>
+ progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" +
+ unknown_files.map(File.standard_path).sorted.mkString("\n ", "\n ", ""))
+ }
+ }
+
+
+ /* build process and results */
+
+ val build_context =
+ Context(store, build_engine, build_deps, afp_root = afp_root, build_hosts = build_hosts,
+ hostname = hostname(build_options), build_heap = build_heap,
+ numa_shuffling = numa_shuffling, max_jobs = max_jobs, fresh_build = fresh_build,
+ no_build = no_build, session_setup = session_setup, master = true)
+
+ if (clean_build) {
+ for (name <- full_sessions.imports_descendants(full_sessions_selection)) {
+ store.clean_output(database_server, name) match {
+ case None =>
+ case Some(true) => progress.echo("Cleaned " + name)
+ case Some(false) => progress.echo(name + " FAILED to clean")
+ }
+ }
+ }
+
+ val results = build_engine.run_build_process(build_context, progress, server)
+
+ if (export_files) {
+ for (name <- full_sessions_selection.iterator if results(name).ok) {
+ val info = results.info(name)
+ if (info.export_files.nonEmpty) {
+ progress.echo("Exporting " + info.name + " ...")
+ for ((dir, prune, pats) <- info.export_files) {
+ Export.export_files(store, name, info.dir + dir,
+ progress = if (progress.verbose) progress else new Progress,
+ export_prune = prune,
+ export_patterns = pats)
+ }
+ }
+ }
+ }
+
+ val presentation_sessions =
+ results.sessions_ok.filter(name => browser_info.enabled(results.info(name)))
+ if (presentation_sessions.nonEmpty && !progress.stopped) {
+ Browser_Info.build(browser_info, results.store, results.deps, presentation_sessions,
+ progress = progress, server = server)
+ }
+
+ if (results.unfinished.nonEmpty && (progress.verbose || !no_build)) {
+ progress.echo("Unfinished session(s): " + commas(results.unfinished))
+ }
+
+ results
+ }
+ }
+ }
+
+
+ /* build logic image */
+
+ def build_logic(options: Options, logic: String,
+ progress: Progress = new Progress,
+ build_heap: Boolean = false,
+ dirs: List[Path] = Nil,
+ fresh: Boolean = false,
+ strict: Boolean = false
+ ): Int = {
+ val selection = Sessions.Selection.session(logic)
+ val rc =
+ if (!fresh && build(options, selection = selection,
+ build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok
+ else {
+ progress.echo("Build started for Isabelle/" + logic + " ...")
+ build(options, selection = selection, progress = progress,
+ build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc
+ }
+ if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc
+ }
+
+
+ /* command-line wrapper */
+
+ val isabelle_tool1 = Isabelle_Tool("build", "build and manage Isabelle sessions",
+ Scala_Project.here,
+ { args =>
+ var afp_root: Option[Path] = None
+ val base_sessions = new mutable.ListBuffer[String]
+ val select_dirs = new mutable.ListBuffer[Path]
+ val build_hosts = new mutable.ListBuffer[Build_Cluster.Host]
+ var numa_shuffling = false
+ var browser_info = Browser_Info.Config.none
+ var requirements = false
+ var soft_build = false
+ val exclude_session_groups = new mutable.ListBuffer[String]
+ var all_sessions = false
+ var build_heap = false
+ var clean_build = false
+ val dirs = new mutable.ListBuffer[Path]
+ var export_files = false
+ var fresh_build = false
+ val session_groups = new mutable.ListBuffer[String]
+ var max_jobs = 1
+ var check_keywords: Set[String] = Set.empty
+ var list_files = false
+ var no_build = false
+ var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS)
+ var verbose = false
+ val exclude_sessions = new mutable.ListBuffer[String]
+
+ val getopts = Getopts("""
+Usage: isabelle build [OPTIONS] [SESSIONS ...]
+
+ Options are:
+ -A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """)
+ -B NAME include session NAME and all descendants
+ -D DIR include session directory and select its sessions
+ -H HOSTS additional build cluster host specifications, of the form
+ "NAMES:PARAMETERS" (separated by commas)
+ -N cyclic shuffling of NUMA CPU nodes (performance tuning)
+ -P DIR enable HTML/PDF presentation in directory (":" for default)
+ -R refer to requirements of selected sessions
+ -S soft build: only observe changes of sources, not heap images
+ -X NAME exclude sessions from group NAME and all descendants
+ -a select all sessions
+ -b build heap images
+ -c clean build
+ -d DIR include session directory
+ -e export files from session specification into file-system
+ -f fresh build
+ -g NAME select session group NAME
+ -j INT maximum number of parallel jobs (default 1)
+ -k KEYWORD check theory sources for conflicts with proposed keywords
+ -l list session source files
+ -n no build -- take existing session build databases
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -v verbose
+ -x NAME exclude session NAME and all descendants
+
+ Build and manage Isabelle sessions: ML heaps, session databases, documents.
+
+ Parameters for host specifications (option -H), apart from system options:
+""" + Library.indent_lines(4, Build_Cluster.Host.parameters.print()) +
+"""
+
+ Notable system options: see "isabelle options -l -t build"
+
+ Notable system settings:
+""" + Library.indent_lines(4, Build_Log.Settings.show()) + "\n",
+ "A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))),
+ "B:" -> (arg => base_sessions += arg),
+ "D:" -> (arg => select_dirs += Path.explode(arg)),
+ "H:" -> (arg => build_hosts ++= Build_Cluster.Host.parse(Registry.global, arg)),
+ "N" -> (_ => numa_shuffling = true),
+ "P:" -> (arg => browser_info = Browser_Info.Config.make(arg)),
+ "R" -> (_ => requirements = true),
+ "S" -> (_ => soft_build = true),
+ "X:" -> (arg => exclude_session_groups += arg),
+ "a" -> (_ => all_sessions = true),
+ "b" -> (_ => build_heap = true),
+ "c" -> (_ => clean_build = true),
+ "d:" -> (arg => dirs += Path.explode(arg)),
+ "e" -> (_ => export_files = true),
+ "f" -> (_ => fresh_build = true),
+ "g:" -> (arg => session_groups += arg),
+ "j:" -> (arg => max_jobs = Value.Int.parse(arg)),
+ "k:" -> (arg => check_keywords = check_keywords + arg),
+ "l" -> (_ => list_files = true),
+ "n" -> (_ => no_build = true),
+ "o:" -> (arg => options = options + arg),
+ "v" -> (_ => verbose = true),
+ "x:" -> (arg => exclude_sessions += arg))
+
+ val sessions = getopts(args)
+
+ val progress = new Console_Progress(verbose = verbose)
+
+ progress.echo(
+ "Started at " + Build_Log.print_date(progress.start) +
+ " (" + Isabelle_System.ml_identifier() + " on " + hostname(options) +")",
+ verbose = true)
+ progress.echo(Build_Log.Settings.show() + "\n", verbose = true)
+
+ val results =
+ progress.interrupt_handler {
+ build(options,
+ selection = Sessions.Selection(
+ requirements = requirements,
+ all_sessions = all_sessions,
+ base_sessions = base_sessions.toList,
+ exclude_session_groups = exclude_session_groups.toList,
+ exclude_sessions = exclude_sessions.toList,
+ session_groups = session_groups.toList,
+ sessions = sessions),
+ browser_info = browser_info,
+ progress = progress,
+ check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME),
+ build_heap = build_heap,
+ clean_build = clean_build,
+ afp_root = afp_root,
+ dirs = dirs.toList,
+ select_dirs = select_dirs.toList,
+ numa_shuffling = Host.numa_check(progress, numa_shuffling),
+ max_jobs = max_jobs,
+ list_files = list_files,
+ check_keywords = check_keywords,
+ fresh_build = fresh_build,
+ no_build = no_build,
+ soft_build = soft_build,
+ export_files = export_files,
+ build_hosts = build_hosts.toList)
+ }
+ val stop_date = Date.now()
+ val elapsed_time = stop_date.time - progress.start.time
+
+ progress.echo("\nFinished at " + Build_Log.print_date(stop_date), verbose = true)
+
+ val total_timing =
+ results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _).
+ copy(elapsed = elapsed_time)
+ progress.echo(total_timing.message_resources)
+
+ sys.exit(results.rc)
+ })
+
+
+
+ /** build cluster management **/
+
+ /* identified builds */
+
+ def read_builds(build_database: Option[SQL.Database]): List[Build_Process.Build] =
+ build_database match {
+ case None => Nil
+ case Some(db) => Build_Process.read_builds(db)
+ }
+
+ def print_builds(build_database: Option[SQL.Database], builds: List[Build_Process.Build]): String =
+ {
+ val print_database =
+ build_database match {
+ case None => ""
+ case Some(db) => " (database " + db + ")"
+ }
+ if (builds.isEmpty) "No build processes" + print_database
+ else "Build processes" + print_database + builds.map(build => "\n " + build.print).mkString
+ }
+
+ def find_builds(
+ build_database: Option[SQL.Database],
+ build_id: String,
+ builds: List[Build_Process.Build]
+ ): Build_Process.Build = {
+ (build_id, builds.length) match {
+ case (UUID(_), _) if builds.exists(_.build_uuid == build_id) =>
+ builds.find(_.build_uuid == build_id).get
+ case ("", 1) => builds.head
+ case ("", 0) => error(print_builds(build_database, builds))
+ case _ =>
+ cat_error("Cannot identify build process " + quote(build_id),
+ print_builds(build_database, builds))
+ }
+ }
+
+
+ /* "isabelle build_process" */
+
+ def build_process(
+ options: Options,
+ list_builds: Boolean = false,
+ remove_builds: Boolean = false,
+ force: Boolean = false,
+ progress: Progress = new Progress
+ ): Unit = {
+ val build_engine = Engine(engine_name(options))
+ val store = build_engine.build_store(options)
+
+ using(store.open_server()) { server =>
+ using_optional(store.maybe_open_build_database(server = server)) { build_database =>
+ def print(builds: List[Build_Process.Build]): Unit =
+ if (list_builds) progress.echo(print_builds(build_database, builds))
+
+ build_database match {
+ case None => print(Nil)
+ case Some(db) =>
+ Build_Process.private_data.transaction_lock(db, create = true, label = "build_process") {
+ val builds = Build_Process.private_data.read_builds(db)
+ val remove =
+ if (!remove_builds) Nil
+ else if (force) builds.map(_.build_uuid)
+ else builds.flatMap(build => if (build.active) None else Some(build.build_uuid))
+
+ print(builds)
+ if (remove.nonEmpty) {
+ if (remove_builds) {
+ progress.echo("Removing " + commas(remove) + " ...")
+ Build_Process.private_data.remove_builds(db, remove)
+ print(Build_Process.private_data.read_builds(db))
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ val isabelle_tool2 = Isabelle_Tool("build_process", "manage session build process",
+ Scala_Project.here,
+ { args =>
+ var force = false
+ var list_builds = false
+ var options =
+ Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS :::
+ List(
+ Options.Spec.make("build_database_server"),
+ Options.Spec.make("build_database")))
+ var remove_builds = false
+
+ val getopts = Getopts("""
+Usage: isabelle build_process [OPTIONS]
+
+ Options are:
+ -f extra force for option -r
+ -l list build processes
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -r remove data from build processes: inactive processes (default)
+ or all processes (option -f)
+""",
+ "f" -> (_ => force = true),
+ "l" -> (_ => list_builds = true),
+ "o:" -> (arg => options = options + arg),
+ "r" -> (_ => remove_builds = true))
+
+ val more_args = getopts(args)
+ if (more_args.nonEmpty) getopts.usage()
+
+ val progress = new Console_Progress()
+
+ build_process(options, list_builds = list_builds, remove_builds = remove_builds,
+ force = force, progress = progress)
+ })
+
+
+
+ /* "isabelle build_worker" */
+
+ def build_worker_command(
+ host: Build_Cluster.Host,
+ ssh: SSH.System = SSH.Local,
+ build_options: List[Options.Spec] = Nil,
+ build_id: String = "",
+ isabelle_home: Path = Path.current,
+ afp_root: Option[Path] = None,
+ dirs: List[Path] = Nil,
+ quiet: Boolean = false,
+ verbose: Boolean = false
+ ): String = {
+ val options = build_options ::: Options.Spec.eq("build_hostname", host.name) :: host.options
+ ssh.bash_path(isabelle_home + Path.explode("bin/isabelle")) + " build_worker" +
+ if_proper(build_id, " -B " + Bash.string(build_id)) +
+ if_proper(afp_root, " -A " + ssh.bash_path(afp_root.get)) +
+ dirs.map(dir => " -d " + ssh.bash_path(dir)).mkString +
+ if_proper(host.numa, " -N") + " -j" + host.jobs +
+ Options.Spec.bash_strings(options, bg = true) +
+ if_proper(quiet, " -q") +
+ if_proper(verbose, " -v")
+ }
+
+ def build_worker(
+ options: Options,
+ build_id: String = "",
+ progress: Progress = new Progress,
+ afp_root: Option[Path] = None,
+ dirs: List[Path] = Nil,
+ numa_shuffling: Boolean = false,
+ max_jobs: Int = 1
+ ): Results = {
+ val build_engine = Engine(engine_name(options))
+ val store = build_engine.build_store(options)
+ val build_options = store.options
+
+ using(store.open_server()) { server =>
+ using_optional(store.maybe_open_build_database(server = server)) { build_database =>
+ val builds = read_builds(build_database)
+
+ val build_master = find_builds(build_database, build_id, builds.filter(_.active))
+
+ val sessions_structure =
+ Sessions.load_structure(build_options, dirs = AFP.make_dirs(afp_root) ::: dirs).
+ selection(Sessions.Selection(sessions = build_master.sessions))
+
+ val build_deps =
+ Sessions.deps(sessions_structure, progress = progress, inlined_files = true).check_errors
+
+ val build_context =
+ Context(store, build_engine, build_deps, afp_root = afp_root,
+ hostname = hostname(build_options), numa_shuffling = numa_shuffling,
+ max_jobs = max_jobs, build_uuid = build_master.build_uuid)
+
+ build_engine.run_build_process(build_context, progress, server)
+ }
+ }
+ }
+
+ val isabelle_tool3 = Isabelle_Tool("build_worker", "start worker for session build process",
+ Scala_Project.here,
+ { args =>
+ var afp_root: Option[Path] = None
+ var build_id = ""
+ var numa_shuffling = false
+ val dirs = new mutable.ListBuffer[Path]
+ var max_jobs = 1
+ var options =
+ Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS :::
+ List(
+ Options.Spec.make("build_database_server"),
+ Options.Spec.make("build_database")))
+ var quiet = false
+ var verbose = false
+
+ val getopts = Getopts("""
+Usage: isabelle build_worker [OPTIONS]
+
+ Options are:
+ -A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """)
+ -B UUID existing UUID for build process (default: from database)
+ -N cyclic shuffling of NUMA CPU nodes (performance tuning)
+ -d DIR include session directory
+ -j INT maximum number of parallel jobs (default 1)
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -q quiet mode: no progress
+ -v verbose
+""",
+ "A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))),
+ "B:" -> (arg => build_id = arg),
+ "N" -> (_ => numa_shuffling = true),
+ "d:" -> (arg => dirs += Path.explode(arg)),
+ "j:" -> (arg => max_jobs = Value.Int.parse(arg)),
+ "o:" -> (arg => options = options + arg),
+ "q" -> (_ => quiet = true),
+ "v" -> (_ => verbose = true))
+
+ val more_args = getopts(args)
+ if (more_args.nonEmpty) getopts.usage()
+
+ val progress =
+ if (quiet && verbose) new Progress { override def verbose: Boolean = true }
+ else if (quiet) new Progress
+ else new Console_Progress(verbose = verbose)
+
+ val results =
+ progress.interrupt_handler {
+ build_worker(options,
+ build_id = build_id,
+ progress = progress,
+ afp_root = afp_root,
+ dirs = dirs.toList,
+ numa_shuffling = Host.numa_check(progress, numa_shuffling),
+ max_jobs = max_jobs)
+ }
+
+ if (!results.ok) sys.exit(results.rc)
+ })
+
+
+
+ /** "isabelle build_log" **/
+
+ /* theory markup/messages from session database */
+
+ def read_theory(
+ theory_context: Export.Theory_Context,
+ unicode_symbols: Boolean = false
+ ): Option[Document.Snapshot] = {
+ def decode_bytes(bytes: Bytes): String =
+ Symbol.output(unicode_symbols, UTF8.decode_permissive(bytes))
+
+ def read(name: String): Export.Entry = theory_context(name, permissive = true)
+
+ def read_xml(name: String): XML.Body =
+ YXML.parse_body(decode_bytes(read(name).bytes), cache = theory_context.cache)
+
+ def read_source_file(name: String): Store.Source_File =
+ theory_context.session_context.source_file(name)
+
+ for {
+ id <- theory_context.document_id()
+ (thy_file, blobs_files) <- theory_context.files(permissive = true)
+ }
+ yield {
+ val master_dir =
+ Path.explode(Url.strip_base_name(thy_file).getOrElse(
+ error("Cannot determine theory master directory: " + quote(thy_file))))
+
+ val blobs =
+ blobs_files.map { name =>
+ val path = Path.explode(name)
+ val src_path = File.relative_path(master_dir, path).getOrElse(path)
+
+ val file = read_source_file(name)
+ val bytes = file.bytes
+ val text = decode_bytes(bytes)
+ val chunk = Symbol.Text_Chunk(text)
+
+ Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) ->
+ Document.Blobs.Item(bytes, text, chunk, changed = false)
+ }
+
+ val thy_source = decode_bytes(read_source_file(thy_file).bytes)
+ val thy_xml = read_xml(Export.MARKUP)
+ val blobs_xml =
+ for (i <- (1 to blobs.length).toList)
+ yield read_xml(Export.MARKUP + i)
+
+ val markups_index = Command.Markup_Index.make(blobs.map(_._1))
+ val markups =
+ Command.Markups.make(
+ for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml))
+ yield index -> Markup_Tree.from_XML(xml))
+
+ val results =
+ Command.Results.make(
+ for (case elem@XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES))
+ yield i -> elem)
+
+ val command =
+ Command.unparsed(thy_source, theory = true, id = id,
+ node_name = Document.Node.Name(thy_file, theory = theory_context.theory),
+ blobs_info = Command.Blobs_Info.make(blobs),
+ markups = markups, results = results)
+
+ val doc_blobs = Document.Blobs.make(blobs)
+
+ Document.State.init.snippet(command, doc_blobs)
+ }
+ }
+
+
+ /* print messages */
+
+ def print_log(
+ options: Options,
+ sessions: List[String],
+ theories: List[String] = Nil,
+ message_head: List[Regex] = Nil,
+ message_body: List[Regex] = Nil,
+ progress: Progress = new Progress,
+ margin: Double = Pretty.default_margin,
+ breakgain: Double = Pretty.default_breakgain,
+ metric: Pretty.Metric = Symbol.Metric,
+ unicode_symbols: Boolean = false
+ ): Unit = {
+ val store = Store(options)
+ val session = new Session(options, Resources.bootstrap)
+
+ def check(filter: List[Regex], make_string: => String): Boolean =
+ filter.isEmpty || {
+ val s = Output.clean_yxml(make_string)
+ filter.forall(r => r.findFirstIn(Output.clean_yxml(s)).nonEmpty)
+ }
+
+ def print(session_name: String): Unit = {
+ using(Export.open_session_context0(store, session_name)) { session_context =>
+ val result =
+ for {
+ db <- session_context.session_db()
+ theories = store.read_theories(db, session_name)
+ errors = store.read_errors(db, session_name)
+ info <- store.read_build(db, session_name)
+ } yield (theories, errors, info.return_code)
+ result match {
+ case None => store.error_database(session_name)
+ case Some((used_theories, errors, rc)) =>
+ theories.filterNot(used_theories.toSet) match {
+ case Nil =>
+ case bad => error("Unknown theories " + commas_quote(bad))
+ }
+ val print_theories =
+ if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet)
+
+ for (thy <- print_theories) {
+ val thy_heading = "\nTheory " + quote(thy) + " (in " + session_name + ")" + ":"
+
+ Build.read_theory(session_context.theory(thy), unicode_symbols = unicode_symbols) match {
+ case None => progress.echo(thy_heading + " MISSING")
+ case Some(snapshot) =>
+ val rendering = new Rendering(snapshot, options, session)
+ val messages =
+ rendering.text_messages(Text.Range.full)
+ .filter(message => progress.verbose || Protocol.is_exported(message.info))
+ if (messages.nonEmpty) {
+ val line_document = Line.Document(snapshot.node.source)
+ val buffer = new mutable.ListBuffer[String]
+ for (Text.Info(range, elem) <- messages) {
+ val line = line_document.position(range.start).line1
+ val pos = Position.Line_File(line, snapshot.node_name.node)
+ def message_text: String =
+ Protocol.message_text(elem, heading = true, pos = pos,
+ margin = margin, breakgain = breakgain, metric = metric)
+ val ok =
+ check(message_head, Protocol.message_heading(elem, pos)) &&
+ check(message_body, XML.content(Pretty.unformatted(List(elem))))
+ if (ok) buffer += message_text
+ }
+ if (buffer.nonEmpty) {
+ progress.echo(thy_heading)
+ buffer.foreach(progress.echo(_))
+ }
+ }
+ }
+ }
+
+ if (errors.nonEmpty) {
+ val msg = Symbol.output(unicode_symbols, cat_lines(errors))
+ progress.echo("\nBuild errors:\n" + Output.error_message_text(msg))
+ }
+ if (rc != Process_Result.RC.ok) {
+ progress.echo("\n" + Process_Result.RC.print_long(rc))
+ }
+ }
+ }
+ }
+
+ val errors = new mutable.ListBuffer[String]
+ for (session_name <- sessions) {
+ Exn.result(print(session_name)) match {
+ case Exn.Res(_) =>
+ case Exn.Exn(exn) => errors += Exn.message(exn)
+ }
+ }
+ if (errors.nonEmpty) error(cat_lines(errors.toList))
+ }
+
+
+ /* command-line wrapper */
+
+ val isabelle_tool4 = Isabelle_Tool("build_log", "print messages from session build database",
+ Scala_Project.here,
+ { args =>
+ /* arguments */
+
+ val message_head = new mutable.ListBuffer[Regex]
+ val message_body = new mutable.ListBuffer[Regex]
+ var unicode_symbols = false
+ val theories = new mutable.ListBuffer[String]
+ var margin = Pretty.default_margin
+ var options = Options.init()
+ var verbose = false
+
+ val getopts = Getopts("""
+Usage: isabelle build_log [OPTIONS] [SESSIONS ...]
+
+ Options are:
+ -H REGEX filter messages by matching against head
+ -M REGEX filter messages by matching against body
+ -T NAME restrict to given theories (multiple options possible)
+ -U output Unicode symbols
+ -m MARGIN margin for pretty printing (default: """ + margin + """)
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -v print all messages, including information etc.
+
+ Print messages from the session build database of the given sessions,
+ without any checks against current sources nor session structure: results
+ from old sessions or failed builds can be printed as well.
+
+ Multiple options -H and -M are conjunctive: all given patterns need to
+ match. Patterns match any substring, but ^ or $ may be used to match the
+ start or end explicitly.
+""",
+ "H:" -> (arg => message_head += arg.r),
+ "M:" -> (arg => message_body += arg.r),
+ "T:" -> (arg => theories += arg),
+ "U" -> (_ => unicode_symbols = true),
+ "m:" -> (arg => margin = Value.Double.parse(arg)),
+ "o:" -> (arg => options = options + arg),
+ "v" -> (_ => verbose = true))
+
+ val sessions = getopts(args)
+
+ val progress = new Console_Progress(verbose = verbose)
+
+ if (sessions.isEmpty) progress.echo_warning("No sessions to print")
+ else {
+ print_log(options, sessions, theories = theories.toList, message_head = message_head.toList,
+ message_body = message_body.toList, margin = margin, progress = progress,
+ unicode_symbols = unicode_symbols)
+ }
+ })
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/build_cluster.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,376 @@
+/* Title: Pure/Build/build_cluster.scala
+ Author: Makarius
+
+Management of build cluster: independent ssh hosts with access to shared
+PostgreSQL server.
+
+NB: extensible classes allow quite different implementations in user-space,
+via the service class Build.Engine and overridable methods
+Build.Engine.open_build_process(), Build_Process.open_build_cluster().
+*/
+
+package isabelle
+
+
+object Build_Cluster {
+ /* host specifications */
+
+ object Host {
+ private val rfc822_specials = """()<>@,;:\"[]"""
+
+ private val HOSTNAME = "hostname"
+ private val USER = "user"
+ private val PORT = "port"
+ private val JOBS = "jobs"
+ private val NUMA = "numa"
+ private val DIRS = "dirs"
+ private val SHARED = "shared"
+
+ val parameters: Options =
+ Options.inline("""
+ option hostname : string = "" -- "explicit SSH hostname"
+ option user : string = "" -- "explicit SSH user"
+ option port : int = 0 -- "explicit SSH port"
+ option jobs : int = 1 -- "maximum number of parallel jobs"
+ option numa : bool = false -- "cyclic shuffling of NUMA CPU nodes"
+ option dirs : string = "" -- "additional session directories (separated by colon)"
+ option shared : bool = false -- "shared directory: omit sync + init"
+ """)
+
+ def is_parameter(spec: Options.Spec): Boolean = parameters.defined(spec.name)
+
+ lazy val test_options: Options = Options.init0()
+
+ def apply(
+ name: String = "",
+ hostname: String = parameters.string(HOSTNAME),
+ user: String = parameters.string(USER),
+ port: Int = parameters.int(PORT),
+ jobs: Int = parameters.int(JOBS),
+ numa: Boolean = parameters.bool(NUMA),
+ dirs: String = parameters.string(DIRS),
+ shared: Boolean = parameters.bool(SHARED),
+ options: List[Options.Spec] = Nil
+ ): Host = {
+ Path.split(dirs) // check
+ new Host(name, hostname, user, port, jobs, numa, dirs, shared, options)
+ }
+
+ def parse(registry: Registry, str: String): List[Host] = {
+ def err(msg: String): Nothing =
+ cat_error(msg, "The error(s) above occurred in host specification " + quote(str))
+
+ val names = str.takeWhile(c => !rfc822_specials.contains(c) || c == ',')
+ val more_specs =
+ try {
+ val n = str.length
+ val m = names.length
+ val l =
+ if (m == n) n
+ else if (str(m) == ':') m + 1
+ else error("Missing \":\" after host name")
+ Options.Spec.parse(str.substring(l))
+ }
+ catch { case ERROR(msg) => err(msg) }
+
+ def get_registry(a: String): Registry.Cluster.Value =
+ Registry.Cluster.try_unqualify(a) match {
+ case Some(b) => Registry.Cluster.get(registry, b)
+ case None => List(a -> Registry.Host.get(registry, a))
+ }
+
+ for (name <- space_explode(',', names); (host, host_specs) <- get_registry(name))
+ yield {
+ val (params, options) =
+ try {
+ val (specs1, specs2) = (host_specs ::: more_specs).partition(is_parameter)
+ (parameters ++ specs1, { test_options ++ specs2; specs2 })
+ }
+ catch { case ERROR(msg) => err(msg) }
+
+ apply(name = host,
+ hostname = params.string(HOSTNAME),
+ user = params.string(USER),
+ port = params.int(PORT),
+ jobs = params.int(JOBS),
+ numa = params.bool(NUMA),
+ dirs = params.string(DIRS),
+ shared = params.bool(SHARED),
+ options = options)
+ }
+ }
+
+ def parse_single(registry: Registry, str: String): Host =
+ Library.the_single(parse(registry, str), "Single host expected: " + quote(str))
+ }
+
+ class Host(
+ val name: String,
+ val hostname: String,
+ val user: String,
+ val port: Int,
+ val jobs: Int,
+ val numa: Boolean,
+ val dirs: String,
+ val shared: Boolean,
+ val options: List[Options.Spec]
+ ) {
+ host =>
+
+ def ssh_host: String = proper_string(hostname) getOrElse name
+ def is_local: Boolean = SSH.is_local(ssh_host)
+
+ override def toString: String = print
+
+ def print: String = {
+ val params =
+ List(
+ if (host.hostname.isEmpty) "" else Options.Spec.print(Host.HOSTNAME, host.hostname),
+ if (host.user.isEmpty) "" else Options.Spec.print(Host.USER, host.user),
+ if (host.port == 0) "" else Options.Spec.print(Host.PORT, host.port.toString),
+ if (host.jobs == 1) "" else Options.Spec.print(Host.JOBS, host.jobs.toString),
+ if_proper(host.numa, Host.NUMA),
+ if_proper(dirs, Options.Spec.print(Host.DIRS, host.dirs)),
+ if_proper(host.shared, Host.SHARED)
+ ).filter(_.nonEmpty)
+ val rest = (params ::: host.options.map(_.print)).mkString(",")
+
+ SSH.print_local(host.name) + if_proper(rest, ":" + rest)
+ }
+
+ def message(msg: String): String = "Host " + quote(host.name) + if_proper(msg, ": " + msg)
+
+ def open_ssh(options: Options): SSH.System =
+ SSH.open_system(options ++ host.options, ssh_host, port = host.port, user = host.user)
+
+ def open_session(build_context: Build.Context, progress: Progress = new Progress): Session = {
+ val ssh_options = build_context.build_options ++ host.options
+ val ssh = open_ssh(build_context.build_options)
+ new Session(build_context, host, ssh_options, ssh, progress)
+ }
+ }
+
+
+ /* SSH sessions */
+
+ class Session(
+ val build_context: Build.Context,
+ val host: Host,
+ val options: Options,
+ val ssh: SSH.System,
+ val progress: Progress
+ ) extends AutoCloseable {
+ override def toString: String = ssh.toString
+
+ val remote_identifier: String = options.string("build_cluster_identifier")
+ val remote_root: Path = Path.explode(options.string("build_cluster_root"))
+ val remote_isabelle_home: Path = remote_root + Path.explode("isabelle")
+ val remote_afp_root: Option[Path] =
+ if (build_context.afp_root.isEmpty) None
+ else Some(remote_isabelle_home + Path.explode("AFP"))
+
+ lazy val remote_isabelle: Other_Isabelle =
+ Other_Isabelle(remote_isabelle_home, isabelle_identifier = remote_identifier, ssh = ssh)
+
+ def sync(): Other_Isabelle = {
+ Sync.sync(options, Rsync.Context(ssh = ssh), remote_isabelle_home,
+ afp_root = build_context.afp_root,
+ purge_heaps = true,
+ preserve_jars = true)
+ remote_isabelle
+ }
+
+ def init(): Unit = remote_isabelle.init()
+
+ def benchmark(): Unit = {
+ val script =
+ Benchmark.benchmark_command(host, ssh = ssh, isabelle_home = remote_isabelle_home)
+ remote_isabelle.bash(script).check
+ }
+
+ def start(): Process_Result = {
+ val remote_ml_platform = remote_isabelle.getenv("ML_PLATFORM")
+ if (remote_ml_platform != build_context.ml_platform) {
+ error("Bad ML_PLATFORM: found " + remote_ml_platform +
+ ", but expected " + build_context.ml_platform)
+ }
+ val script =
+ Build.build_worker_command(host,
+ ssh = ssh,
+ build_options = List(options.spec("build_database_server")),
+ build_id = build_context.build_uuid,
+ isabelle_home = remote_isabelle_home,
+ afp_root = remote_afp_root,
+ dirs = Path.split(host.dirs).map(remote_isabelle.expand_path),
+ quiet = true)
+ remote_isabelle.bash(script).check
+ }
+
+ override def close(): Unit = ssh.close()
+ }
+
+ class Result(val host: Host, val process_result: Exn.Result[Process_Result]) {
+ def rc: Int = Process_Result.RC(process_result)
+ def ok: Boolean = rc == Process_Result.RC.ok
+ }
+
+
+ /* build clusters */
+
+ object none extends Build_Cluster {
+ override def toString: String = "Build_Cluster.none"
+ }
+
+ def make(build_context: Build.Context, progress: Progress = new Progress): Build_Cluster = {
+ val remote_hosts = build_context.build_hosts.filterNot(_.is_local)
+ if (remote_hosts.isEmpty) none
+ else new Remote_Build_Cluster(build_context, remote_hosts, progress = progress)
+ }
+}
+
+// NB: extensible via Build.Engine.build_process() and Build_Process.init_cluster()
+trait Build_Cluster extends AutoCloseable {
+ def rc: Int = Process_Result.RC.ok
+ def ok: Boolean = rc == Process_Result.RC.ok
+ def return_code(rc: Int): Unit = ()
+ def return_code(res: Process_Result): Unit = return_code(res.rc)
+ def return_code(exn: Throwable): Unit = return_code(Process_Result.RC(exn))
+ def open(): Unit = ()
+ def init(): Unit = ()
+ def benchmark(): Unit = ()
+ def start(): Unit = ()
+ def active(): Boolean = false
+ def join: List[Build_Cluster.Result] = Nil
+ def stop(): Unit = { join; close() }
+ override def close(): Unit = ()
+}
+
+class Remote_Build_Cluster(
+ val build_context: Build.Context,
+ val remote_hosts: List[Build_Cluster.Host],
+ val progress: Progress = new Progress
+) extends Build_Cluster {
+ require(remote_hosts.nonEmpty && !remote_hosts.exists(_.is_local), "remote hosts required")
+
+ override def toString: String =
+ remote_hosts.iterator.map(_.name).mkString("Build_Cluster(", ", ", ")")
+
+
+ /* cumulative return code */
+
+ private val _rc = Synchronized(Process_Result.RC.ok)
+
+ override def rc: Int = _rc.value
+
+ override def return_code(rc: Int): Unit =
+ _rc.change(rc0 => Process_Result.RC.merge(rc0, rc))
+
+ def capture[A](host: Build_Cluster.Host, op: String)(
+ e: => A,
+ msg: String = host.message(op + " ..."),
+ err: Throwable => String = { exn =>
+ return_code(exn)
+ host.message("failed to " + op + "\n" + Exn.print(exn))
+ }
+ ): Exn.Result[A] = {
+ progress.capture(e, msg = msg, err = err)
+ }
+
+
+ /* open remote sessions */
+
+ private var _sessions = List.empty[Build_Cluster.Session]
+
+ override def open(): Unit = synchronized {
+ require(_sessions.isEmpty, "build cluster already open")
+
+ val attempts =
+ Par_List.map((host: Build_Cluster.Host) =>
+ capture(host, "open") { host.open_session(build_context, progress = progress) },
+ remote_hosts, thread = true)
+
+ if (attempts.forall(Exn.is_res)) {
+ _sessions = attempts.map(Exn.the_res)
+ _sessions
+ }
+ else {
+ for (case Exn.Res(session) <- attempts) session.close()
+ error("Failed to connect build cluster")
+ }
+ }
+
+
+ /* init and benchmark remote Isabelle distributions */
+
+ private var _init = List.empty[Future[Unit]]
+
+ override def init(): Unit = synchronized {
+ require(_sessions.nonEmpty, "build cluster not yet open")
+
+ if (_init.isEmpty) {
+ _init =
+ for (session <- _sessions if !session.host.shared) yield {
+ Future.thread(session.host.message("session")) {
+ capture(session.host, "sync") { session.sync() }
+ capture(session.host, "init") { session.init() }
+ }
+ }
+ }
+ }
+
+ override def benchmark(): Unit = synchronized {
+ _init.foreach(_.join)
+ _init =
+ for (session <- _sessions if !session.host.shared) yield {
+ Future.thread(session.host.message("session")) {
+ capture(session.host, "benchmark") { session.benchmark() }
+ }
+ }
+ _init.foreach(_.join)
+ }
+
+
+ /* workers */
+
+ private var _workers = List.empty[Future[Process_Result]]
+
+ override def active(): Boolean = synchronized { _workers.nonEmpty }
+
+ override def start(): Unit = synchronized {
+ require(_sessions.nonEmpty, "build cluster not yet open")
+ require(_workers.isEmpty, "build cluster already active")
+
+ init()
+ _init.foreach(_.join)
+
+ _workers =
+ for (session <- _sessions) yield {
+ Future.thread(session.host.message("work")) {
+ Exn.release(capture(session.host, "work") { session.start() })
+ }
+ }
+ }
+
+ override def join: List[Build_Cluster.Result] = synchronized {
+ _init.foreach(_.join)
+ if (_workers.isEmpty) Nil
+ else {
+ assert(_sessions.length == _workers.length)
+ for ((session, worker) <- _sessions zip _workers)
+ yield Build_Cluster.Result(session.host, worker.join_result)
+ }
+ }
+
+
+ /* close */
+
+ override def close(): Unit = synchronized {
+ _init.foreach(_.join)
+ _workers.foreach(_.join_result)
+ _sessions.foreach(_.close())
+
+ _init = Nil
+ _workers = Nil
+ _sessions = Nil
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/build_job.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,552 @@
+/* Title: Pure/Build/build_job.scala
+ Author: Makarius
+
+Build job running prover process, with rudimentary PIDE session.
+*/
+
+package isabelle
+
+
+import scala.collection.mutable
+
+
+trait Build_Job {
+ def cancel(): Unit = ()
+ def is_finished: Boolean = false
+ def join: Option[Build_Job.Result] = None
+}
+
+object Build_Job {
+ sealed case class Result(process_result: Process_Result, output_shasum: SHA1.Shasum)
+
+
+ /* build session */
+
+ def start_session(
+ build_context: Build.Context,
+ session_context: Session_Context,
+ progress: Progress,
+ log: Logger,
+ server: SSH.Server,
+ session_background: Sessions.Background,
+ sources_shasum: SHA1.Shasum,
+ input_shasum: SHA1.Shasum,
+ node_info: Host.Node_Info,
+ store_heap: Boolean
+ ): Session_Job = {
+ new Session_Job(build_context, session_context, progress, log, server,
+ session_background, sources_shasum, input_shasum, node_info, store_heap)
+ }
+
+ object Session_Context {
+ def load(
+ database_server: Option[SQL.Database],
+ build_uuid: String,
+ name: String,
+ deps: List[String],
+ ancestors: List[String],
+ session_prefs: String,
+ sources_shasum: SHA1.Shasum,
+ timeout: Time,
+ store: Store,
+ progress: Progress = new Progress
+ ): Session_Context = {
+ def default: Session_Context =
+ Session_Context(
+ name, deps, ancestors, session_prefs, sources_shasum, timeout,
+ Time.zero, Bytes.empty, build_uuid)
+
+ def read(db: SQL.Database): Session_Context = {
+ def ignore_error(msg: String) = {
+ progress.echo_warning(
+ "Ignoring bad database " + db + " for session " + quote(name) +
+ if_proper(msg, ":\n" + msg))
+ default
+ }
+ try {
+ val command_timings = store.read_command_timings(db, name)
+ val elapsed =
+ store.read_session_timing(db, name) match {
+ case Markup.Elapsed(s) => Time.seconds(s)
+ case _ => Time.zero
+ }
+ new Session_Context(
+ name, deps, ancestors, session_prefs, sources_shasum, timeout,
+ elapsed, command_timings, build_uuid)
+ }
+ catch {
+ case ERROR(msg) => ignore_error(msg)
+ case exn: java.lang.Error => ignore_error(Exn.message(exn))
+ case _: XML.Error => ignore_error("XML.Error")
+ }
+ }
+
+ database_server match {
+ case Some(db) => if (store.session_info_exists(db)) read(db) else default
+ case None => using_option(store.try_open_database(name))(read) getOrElse default
+ }
+ }
+ }
+
+ sealed case class Session_Context(
+ name: String,
+ deps: List[String],
+ ancestors: List[String],
+ session_prefs: String,
+ sources_shasum: SHA1.Shasum,
+ timeout: Time,
+ old_time: Time,
+ old_command_timings_blob: Bytes,
+ build_uuid: String
+ ) extends Library.Named
+
+ class Session_Job private[Build_Job](
+ build_context: Build.Context,
+ session_context: Session_Context,
+ progress: Progress,
+ log: Logger,
+ server: SSH.Server,
+ session_background: Sessions.Background,
+ sources_shasum: SHA1.Shasum,
+ input_shasum: SHA1.Shasum,
+ node_info: Host.Node_Info,
+ store_heap: Boolean
+ ) extends Build_Job {
+ def session_name: String = session_background.session_name
+
+ private val future_result: Future[Option[Result]] =
+ Future.thread("build", uninterruptible = true) {
+ val info = session_background.sessions_structure(session_name)
+ val options = Host.node_options(info.options, node_info)
+
+ val store = build_context.store
+
+ using_optional(store.maybe_open_database_server(server = server)) { database_server =>
+
+ store.clean_output(database_server, session_name, session_init = true)
+
+ val session_sources =
+ Store.Sources.load(session_background.base, cache = store.cache.compress)
+
+ val env =
+ Isabelle_System.settings(
+ List("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString))
+
+ val session_heaps =
+ session_background.info.parent match {
+ case None => Nil
+ case Some(logic) => ML_Process.session_heaps(store, session_background, logic = logic)
+ }
+
+ val use_prelude = if (session_heaps.isEmpty) Thy_Header.ml_roots.map(_._1) else Nil
+
+ val eval_store =
+ if (store_heap) {
+ (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) :::
+ List("ML_Heap.save_child " +
+ ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name))))
+ }
+ else Nil
+
+ def session_blobs(node_name: Document.Node.Name): List[(Command.Blob, Document.Blobs.Item)] =
+ session_background.base.theory_load_commands.get(node_name.theory) match {
+ case None => Nil
+ case Some(spans) =>
+ val syntax = session_background.base.theory_syntax(node_name)
+ val master_dir = Path.explode(node_name.master_dir)
+ for (span <- spans; file <- span.loaded_files(syntax).files)
+ yield {
+ val src_path = Path.explode(file)
+ val blob_name = Document.Node.Name(File.symbolic_path(master_dir + src_path))
+
+ val bytes = session_sources(blob_name.node).bytes
+ val text = bytes.text
+ val chunk = Symbol.Text_Chunk(text)
+
+ Command.Blob(blob_name, src_path, Some((SHA1.digest(bytes), chunk))) ->
+ Document.Blobs.Item(bytes, text, chunk, changed = false)
+ }
+ }
+
+
+ /* session */
+
+ val resources =
+ new Resources(session_background, log = log,
+ command_timings =
+ Properties.uncompress(session_context.old_command_timings_blob, cache = store.cache))
+
+ val session =
+ new Session(options, resources) {
+ override val cache: Term.Cache = store.cache
+
+ override def build_blobs_info(node_name: Document.Node.Name): Command.Blobs_Info =
+ Command.Blobs_Info.make(session_blobs(node_name))
+
+ override def build_blobs(node_name: Document.Node.Name): Document.Blobs =
+ Document.Blobs.make(session_blobs(node_name))
+ }
+
+ object Build_Session_Errors {
+ private val promise: Promise[List[String]] = Future.promise
+
+ def result: Exn.Result[List[String]] = promise.join_result
+ def cancel(): Unit = promise.cancel()
+ def apply(errs: List[String]): Unit = {
+ try { promise.fulfill(errs) }
+ catch { case _: IllegalStateException => }
+ }
+ }
+
+ val export_consumer =
+ Export.consumer(store.open_database(session_name, output = true, server = server),
+ store.cache, progress = progress)
+
+ val stdout = new StringBuilder(1000)
+ val stderr = new StringBuilder(1000)
+ val command_timings = new mutable.ListBuffer[Properties.T]
+ val theory_timings = new mutable.ListBuffer[Properties.T]
+ val session_timings = new mutable.ListBuffer[Properties.T]
+ val runtime_statistics = new mutable.ListBuffer[Properties.T]
+ val task_statistics = new mutable.ListBuffer[Properties.T]
+
+ def fun(
+ name: String,
+ acc: mutable.ListBuffer[Properties.T],
+ unapply: Properties.T => Option[Properties.T]
+ ): (String, Session.Protocol_Function) = {
+ name -> ((msg: Prover.Protocol_Output) =>
+ unapply(msg.properties) match {
+ case Some(props) => acc += props; true
+ case _ => false
+ })
+ }
+
+ session.init_protocol_handler(new Session.Protocol_Handler {
+ override def exit(): Unit = Build_Session_Errors.cancel()
+
+ private def build_session_finished(msg: Prover.Protocol_Output): Boolean = {
+ val (rc, errors) =
+ try {
+ val (rc, errs) = {
+ import XML.Decode._
+ pair(int, list(x => x))(Symbol.decode_yxml(msg.text))
+ }
+ val errors =
+ for (err <- errs) yield {
+ val prt = Protocol_Message.expose_no_reports(err)
+ Pretty.string_of(prt, metric = Symbol.Metric)
+ }
+ (rc, errors)
+ }
+ catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) }
+
+ session.protocol_command("Prover.stop", rc.toString)
+ Build_Session_Errors(errors)
+ true
+ }
+
+ private def loading_theory(msg: Prover.Protocol_Output): Boolean =
+ msg.properties match {
+ case Markup.Loading_Theory(Markup.Name(name)) =>
+ progress.theory(Progress.Theory(name, session = session_name))
+ false
+ case _ => false
+ }
+
+ private def export_(msg: Prover.Protocol_Output): Boolean =
+ msg.properties match {
+ case Protocol.Export(args) =>
+ export_consumer.make_entry(session_name, args, msg.chunk)
+ true
+ case _ => false
+ }
+
+ override val functions: Session.Protocol_Functions =
+ List(
+ Markup.Build_Session_Finished.name -> build_session_finished,
+ Markup.Loading_Theory.name -> loading_theory,
+ Markup.EXPORT -> export_,
+ fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply),
+ fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply),
+ fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply))
+ })
+
+ session.command_timings += Session.Consumer("command_timings") {
+ case Session.Command_Timing(props) =>
+ for {
+ elapsed <- Markup.Elapsed.unapply(props)
+ elapsed_time = Time.seconds(elapsed)
+ if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold")
+ } command_timings += props.filter(Markup.command_timing_property)
+ }
+
+ session.runtime_statistics += Session.Consumer("ML_statistics") {
+ case Session.Runtime_Statistics(props) => runtime_statistics += props
+ }
+
+ session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories") {
+ case snapshot =>
+ if (!progress.stopped) {
+ def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit = {
+ if (!progress.stopped) {
+ val theory_name = snapshot.node_name.theory
+ val args =
+ Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress)
+ val body = Bytes(Symbol.encode(YXML.string_of_body(xml)))
+ export_consumer.make_entry(session_name, args, body)
+ }
+ }
+ def export_text(name: String, text: String, compress: Boolean = true): Unit =
+ export_(name, List(XML.Text(text)), compress = compress)
+
+ for (command <- snapshot.snippet_command) {
+ export_text(Export.DOCUMENT_ID, command.id.toString, compress = false)
+ }
+
+ export_text(Export.FILES,
+ cat_lines(snapshot.node_files.map(name => File.symbolic_path(name.path))),
+ compress = false)
+
+ for ((blob_name, i) <- snapshot.node_files.tail.zipWithIndex) {
+ val xml = snapshot.switch(blob_name).xml_markup()
+ export_(Export.MARKUP + (i + 1), xml)
+ }
+ export_(Export.MARKUP, snapshot.xml_markup())
+ export_(Export.MESSAGES, snapshot.messages.map(_._1))
+ }
+ }
+
+ session.all_messages += Session.Consumer[Any]("build_session_output") {
+ case msg: Prover.Output =>
+ val message = msg.message
+ if (msg.is_system) resources.log(Protocol.message_text(message))
+
+ if (msg.is_stdout) {
+ stdout ++= Symbol.encode(XML.content(message))
+ }
+ else if (msg.is_stderr) {
+ stderr ++= Symbol.encode(XML.content(message))
+ }
+ else if (msg.is_exit) {
+ val err =
+ "Prover terminated" +
+ (msg.properties match {
+ case Markup.Process_Result(result) => ": " + result.print_rc
+ case _ => ""
+ })
+ Build_Session_Errors(List(err))
+ }
+ case _ =>
+ }
+
+ build_context.session_setup(session_name, session)
+
+ val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store)
+
+
+ /* process */
+
+ val process =
+ Isabelle_Process.start(options, session, session_background, session_heaps,
+ use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env)
+
+ val timeout_request: Option[Event_Timer.Request] =
+ if (info.timeout_ignored) None
+ else Some(Event_Timer.request(Time.now() + info.timeout) { process.terminate() })
+
+ val build_errors =
+ Isabelle_Thread.interrupt_handler(_ => process.terminate()) {
+ Exn.capture { process.await_startup() } match {
+ case Exn.Res(_) =>
+ val resources_yxml = resources.init_session_yxml
+ val encode_options: XML.Encode.T[Options] =
+ options => session.prover_options(options).encode
+ val args_yxml =
+ YXML.string_of_body(
+ {
+ import XML.Encode._
+ pair(string, list(pair(encode_options, list(pair(string, properties)))))(
+ (session_name, info.theories))
+ })
+ session.protocol_command("build_session", resources_yxml, args_yxml)
+ Build_Session_Errors.result
+ case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn)))
+ }
+ }
+
+ val result0 =
+ Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() }
+
+ val was_timeout =
+ timeout_request match {
+ case None => false
+ case Some(request) => !request.cancel()
+ }
+
+ session.stop()
+
+ val export_errors =
+ export_consumer.shutdown(close = true).map(Output.error_message_text)
+
+ val (document_output, document_errors) =
+ try {
+ if (Exn.is_res(build_errors) && result0.ok && info.documents.nonEmpty) {
+ using(Export.open_database_context(store, server = server)) { database_context =>
+ val documents =
+ using(database_context.open_session(session_background)) {
+ session_context =>
+ Document_Build.build_documents(
+ Document_Build.context(session_context, progress = progress),
+ output_sources = info.document_output,
+ output_pdf = info.document_output)
+ }
+ using(database_context.open_database(session_name, output = true))(session_database =>
+ documents.foreach(_.write(session_database.db, session_name)))
+ (documents.flatMap(_.log_lines), Nil)
+ }
+ }
+ else (Nil, Nil)
+ }
+ catch {
+ case exn: Document_Build.Build_Error => (exn.log_lines, exn.log_errors)
+ case Exn.Interrupt.ERROR(msg) => (Nil, List(msg))
+ }
+
+
+ /* process result */
+
+ val result1 = {
+ val theory_timing =
+ theory_timings.iterator.flatMap(
+ {
+ case props @ Markup.Name(name) => Some(name -> props)
+ case _ => None
+ }).toMap
+ val used_theory_timings =
+ for { (name, _) <- session_background.base.used_theories }
+ yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory))
+
+ val more_output =
+ Library.trim_line(stdout.toString) ::
+ command_timings.toList.map(Protocol.Command_Timing_Marker.apply) :::
+ used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) :::
+ session_timings.toList.map(Protocol.Session_Timing_Marker.apply) :::
+ runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) :::
+ task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) :::
+ document_output
+
+ result0.output(more_output)
+ .error(Library.trim_line(stderr.toString))
+ .errors_rc(export_errors ::: document_errors)
+ }
+
+ val result2 =
+ build_errors match {
+ case Exn.Res(build_errs) =>
+ val errs = build_errs ::: document_errors
+ if (errs.nonEmpty) {
+ result1.error_rc.output(
+ errs.flatMap(s => split_lines(Output.error_message_text(s))) :::
+ errs.map(Protocol.Error_Message_Marker.apply))
+ }
+ else if (progress.stopped && result1.ok) result1.copy(rc = Process_Result.RC.interrupt)
+ else result1
+ case Exn.Exn(Exn.Interrupt()) =>
+ if (result1.ok) result1.copy(rc = Process_Result.RC.interrupt)
+ else result1
+ case Exn.Exn(exn) => throw exn
+ }
+
+ val process_result =
+ if (result2.ok) result2
+ else if (was_timeout) result2.error(Output.error_message_text("Timeout")).timeout_rc
+ else if (result2.interrupted) result2.error(Output.error_message_text("Interrupt"))
+ else result2
+
+
+ /* output heap */
+
+ val output_shasum = {
+ val heap = store.output_heap(session_name)
+ if (process_result.ok && store_heap && heap.is_file) {
+ val slice = Space.MiB(options.real("build_database_slice")).bytes
+ val digest = ML_Heap.store(database_server, session_name, heap, slice)
+ SHA1.shasum(digest, session_name)
+ }
+ else SHA1.no_shasum
+ }
+
+ val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test)
+
+ val build_log =
+ Build_Log.Log_File(session_name, process_result.out_lines, cache = store.cache).
+ parse_session_info(
+ command_timings = true,
+ theory_timings = true,
+ ml_statistics = true,
+ task_statistics = true)
+
+ // write log file
+ if (process_result.ok) {
+ File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines))
+ }
+ else File.write(store.output_log(session_name), terminate_lines(log_lines))
+
+ // write database
+ def write_info(db: SQL.Database): Unit =
+ store.write_session_info(db, session_name, session_sources,
+ build_log =
+ if (process_result.timeout) build_log.error("Timeout") else build_log,
+ build =
+ Store.Build_Info(
+ sources = sources_shasum,
+ input_heaps = input_shasum,
+ output_heap = output_shasum,
+ process_result.rc,
+ build_context.build_uuid))
+
+ val valid =
+ if (progress.stopped_local) false
+ else {
+ database_server match {
+ case Some(db) => write_info(db)
+ case None => using(store.open_database(session_name, output = true))(write_info)
+ }
+ true
+ }
+
+ // messages
+ process_result.err_lines.foreach(progress.echo(_))
+
+ if (process_result.ok) {
+ val props = build_log.session_timing
+ val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1
+ val timing = Markup.Timing_Properties.get(props)
+ progress.echo(
+ "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")",
+ verbose = true)
+ progress.echo(
+ "Finished " + session_name + " (" + process_result.timing.message_resources + ")")
+ }
+ else {
+ progress.echo(
+ session_name + " FAILED (see also \"isabelle build_log -H Error " + session_name + "\")")
+ if (!process_result.interrupted) {
+ val tail = info.options.int("process_output_tail")
+ val suffix = if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)
+ val prefix = if (log_lines.length == suffix.length) Nil else List("...")
+ progress.echo(Library.trim_line(cat_lines(prefix ::: suffix)))
+ }
+ }
+
+ if (valid) Some(Result(process_result.copy(out_lines = log_lines), output_shasum))
+ else None
+ }
+ }
+
+ override def cancel(): Unit = future_result.cancel()
+ override def is_finished: Boolean = future_result.is_finished
+ override def join: Option[Result] = future_result.join
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/build_process.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,1223 @@
+/* Title: Pure/Build/build_process.scala
+ Author: Makarius
+
+Build process for sessions, with build database, optional heap, and
+optional presentation.
+*/
+
+package isabelle
+
+
+import scala.collection.immutable.SortedMap
+import scala.math.Ordering
+import scala.annotation.tailrec
+
+
+object Build_Process {
+ /** state vs. database **/
+
+ sealed case class Build(
+ build_uuid: String, // Database_Progress.context_uuid
+ ml_platform: String,
+ options: String,
+ start: Date,
+ stop: Option[Date],
+ sessions: List[String]
+ ) {
+ def active: Boolean = stop.isEmpty
+
+ def print: String =
+ build_uuid + " (platform: " + ml_platform + ", start: " + Build_Log.print_date(start) +
+ if_proper(stop, ", stop: " + Build_Log.print_date(stop.get)) + ")"
+ }
+
+ sealed case class Worker(
+ worker_uuid: String, // Database_Progress.agent_uuid
+ build_uuid: String,
+ start: Date,
+ stamp: Date,
+ stop: Option[Date],
+ serial: Long
+ )
+
+ sealed case class Task(
+ name: String,
+ deps: List[String],
+ info: JSON.Object.T,
+ build_uuid: String
+ ) {
+ def is_ready: Boolean = deps.isEmpty
+ def resolve(dep: String): Task =
+ if (deps.contains(dep)) copy(deps = deps.filterNot(_ == dep)) else this
+ }
+
+ sealed case class Job(
+ name: String,
+ worker_uuid: String,
+ build_uuid: String,
+ node_info: Host.Node_Info,
+ start_date: Date,
+ build: Option[Build_Job]
+ ) extends Library.Named {
+ def join_build: Option[Build_Job.Result] = build.flatMap(_.join)
+ }
+
+ sealed case class Result(
+ name: String,
+ worker_uuid: String,
+ build_uuid: String,
+ node_info: Host.Node_Info,
+ process_result: Process_Result,
+ output_shasum: SHA1.Shasum,
+ current: Boolean
+ ) extends Library.Named {
+ def ok: Boolean = process_result.ok
+ }
+
+ object Sessions {
+ type Graph = isabelle.Graph[String, Build_Job.Session_Context]
+ val empty: Sessions = new Sessions(Graph.string)
+ }
+
+ final class Sessions private(val graph: Sessions.Graph) {
+ override def toString: String = graph.toString
+
+ def defined(name: String): Boolean = graph.defined(name)
+ def apply(name: String): Build_Job.Session_Context = graph.get_node(name)
+
+ def iterator: Iterator[Build_Job.Session_Context] =
+ for (name <- graph.topological_order.iterator) yield apply(name)
+
+ def make(new_graph: Sessions.Graph): Sessions =
+ if (graph == new_graph) this
+ else {
+ new Sessions(
+ new_graph.iterator.foldLeft(new_graph) {
+ case (g, (name, (session, _))) => g.add_deps_acyclic(name, session.deps)
+ })
+ }
+
+ def pull(
+ data_domain: Set[String],
+ data: Set[String] => List[Build_Job.Session_Context]
+ ): Sessions = {
+ val dom = data_domain -- iterator.map(_.name)
+ make(data(dom).foldLeft(graph.restrict(dom)) { case (g, e) => g.new_node(e.name, e) })
+ }
+
+ def init(
+ build_context: isabelle.Build.Context,
+ database_server: Option[SQL.Database],
+ progress: Progress = new Progress
+ ): Sessions = {
+ val sessions_structure = build_context.sessions_structure
+ make(
+ sessions_structure.build_graph.iterator.foldLeft(graph) {
+ case (graph0, (name, (info, _))) =>
+ val deps = info.parent.toList
+ val prefs = info.session_prefs
+ val ancestors = sessions_structure.build_requirements(deps)
+ val sources_shasum = build_context.build_deps.sources_shasum(name)
+
+ if (graph0.defined(name)) {
+ val session0 = graph0.get_node(name)
+ val prefs0 = session0.session_prefs
+ val ancestors0 = session0.ancestors
+ val sources_shasum0 = session0.sources_shasum
+
+ def err(msg: String, a: String, b: String): Nothing =
+ error("Conflicting dependencies for session " + quote(name) + ": " + msg + "\n" +
+ Library.indent_lines(2, a) + "\nvs.\n" + Library.indent_lines(2, b))
+
+ if (prefs0 != prefs) {
+ err("preferences disagree",
+ Symbol.cartouche_decoded(prefs0), Symbol.cartouche_decoded(prefs))
+ }
+ if (ancestors0 != ancestors) {
+ err("ancestors disagree", commas_quote(ancestors0), commas_quote(ancestors))
+ }
+ if (sources_shasum0 != sources_shasum) {
+ val a = sources_shasum0 - sources_shasum
+ val b = sources_shasum - sources_shasum0
+ err("sources disagree",
+ Library.trim_line(a.toString),
+ Library.trim_line(b.toString))
+ }
+
+ graph0
+ }
+ else {
+ val session =
+ Build_Job.Session_Context.load(database_server,
+ build_context.build_uuid, name, deps, ancestors, prefs, sources_shasum,
+ info.timeout, build_context.store, progress = progress)
+ graph0.new_node(name, session)
+ }
+ }
+ )
+ }
+
+ lazy val max_time: Map[String, Double] = {
+ val maximals = graph.maximals.toSet
+ def descendants_time(name: String): Double = {
+ if (maximals.contains(name)) apply(name).old_time.seconds
+ else {
+ val descendants = graph.all_succs(List(name)).toSet
+ val g = graph.restrict(descendants)
+ (0.0 :: g.maximals.flatMap { desc =>
+ val ps = g.all_preds(List(desc))
+ if (ps.exists(p => !graph.defined(p))) None
+ else Some(ps.map(p => apply(p).old_time.seconds).sum)
+ }).max
+ }
+ }
+ Map.from(
+ for (name <- graph.keys_iterator)
+ yield name -> descendants_time(name)).withDefaultValue(0.0)
+ }
+
+ lazy val ordering: Ordering[String] =
+ (a: String, b: String) =>
+ max_time(b) compare max_time(a) match {
+ case 0 =>
+ apply(b).timeout compare apply(a).timeout match {
+ case 0 => a compare b
+ case ord => ord
+ }
+ case ord => ord
+ }
+ }
+
+ sealed case class Snapshot(
+ builds: List[Build], // available build configurations
+ workers: List[Worker], // available worker processes
+ sessions: Sessions, // static build targets
+ pending: State.Pending, // dynamic build "queue"
+ running: State.Running, // presently running jobs
+ results: State.Results) // finished results
+
+ object State {
+ type Pending = List[Task]
+ type Running = Map[String, Job]
+ type Results = Map[String, Result]
+ }
+
+ sealed case class State(
+ serial: Long = 0,
+ numa_nodes: List[Int] = Nil,
+ sessions: Sessions = Sessions.empty,
+ pending: State.Pending = Nil,
+ running: State.Running = Map.empty,
+ results: State.Results = Map.empty
+ ) {
+ require(serial >= 0, "serial underflow")
+ def inc_serial: State = {
+ require(serial < Long.MaxValue, "serial overflow")
+ copy(serial = serial + 1)
+ }
+
+ def ready: State.Pending = pending.filter(_.is_ready)
+ def next_ready: State.Pending = ready.filter(entry => !is_running(entry.name))
+
+ def remove_pending(name: String): State =
+ copy(pending = pending.flatMap(
+ entry => if (entry.name == name) None else Some(entry.resolve(name))))
+
+ def is_running(name: String): Boolean = running.isDefinedAt(name)
+
+ def stop_running(): Unit =
+ for (job <- running.valuesIterator; build <- job.build) build.cancel()
+
+ def build_running: List[Build_Job] =
+ List.from(for (job <- running.valuesIterator; build <- job.build) yield build)
+
+ def finished_running(): List[Job] =
+ List.from(
+ for (job <- running.valuesIterator; build <- job.build if build.is_finished)
+ yield job)
+
+ def add_running(job: Job): State =
+ copy(running = running + (job.name -> job))
+
+ def remove_running(name: String): State =
+ copy(running = running - name)
+
+ def make_result(
+ result_name: (String, String, String),
+ process_result: Process_Result,
+ output_shasum: SHA1.Shasum,
+ node_info: Host.Node_Info = Host.Node_Info.none,
+ current: Boolean = false
+ ): State = {
+ val (name, worker_uuid, build_uuid) = result_name
+ val result =
+ Result(name, worker_uuid, build_uuid, node_info, process_result, output_shasum, current)
+ copy(results = results + (name -> result))
+ }
+
+ def ancestor_results(name: String): Option[List[Result]] = {
+ val defined =
+ sessions.defined(name) &&
+ sessions(name).ancestors.forall(a => sessions.defined(a) && results.isDefinedAt(a))
+ if (defined) Some(sessions(name).ancestors.map(results)) else None
+ }
+ }
+
+
+
+ /** SQL data model **/
+
+ object private_data extends SQL.Data("isabelle_build") {
+ val database: Path = Path.explode("$ISABELLE_HOME_USER/build.db")
+
+ def pull[A <: Library.Named](
+ data_domain: Set[String],
+ data_iterator: Set[String] => Iterator[A],
+ old_data: Map[String, A]
+ ): Map[String, A] = {
+ val dom = data_domain -- old_data.keysIterator
+ val data = old_data -- old_data.keysIterator.filterNot(data_domain)
+ if (dom.isEmpty) data
+ else data_iterator(dom).foldLeft(data) { case (map, a) => map + (a.name -> a) }
+ }
+
+ def pull0[A <: Library.Named](
+ new_data: Map[String, A],
+ old_data: Map[String, A]
+ ): Map[String, A] = {
+ pull(new_data.keySet, dom => new_data.valuesIterator.filter(a => dom(a.name)), old_data)
+ }
+
+ def pull1[A <: Library.Named](
+ data_domain: Set[String],
+ data_base: Set[String] => Map[String, A],
+ old_data: Map[String, A]
+ ): Map[String, A] = {
+ pull(data_domain, dom => data_base(dom).valuesIterator, old_data)
+ }
+
+ object Generic {
+ val build_uuid = SQL.Column.string("build_uuid")
+ val worker_uuid = SQL.Column.string("worker_uuid")
+ val name = SQL.Column.string("name")
+
+ def sql(
+ build_uuid: String = "",
+ worker_uuid: String = "",
+ names: Iterable[String] = Nil
+ ): SQL.Source =
+ SQL.and(
+ if_proper(build_uuid, Generic.build_uuid.equal(build_uuid)),
+ if_proper(worker_uuid, Generic.worker_uuid.equal(worker_uuid)),
+ if_proper(names, Generic.name.member(names)))
+
+ def sql_where(
+ build_uuid: String = "",
+ worker_uuid: String = "",
+ names: Iterable[String] = Nil
+ ): SQL.Source = {
+ SQL.where(sql(build_uuid = build_uuid, worker_uuid = worker_uuid, names = names))
+ }
+ }
+
+
+ /* base table */
+
+ object Base {
+ val build_uuid = Generic.build_uuid.make_primary_key
+ val ml_platform = SQL.Column.string("ml_platform")
+ val options = SQL.Column.string("options")
+ val start = SQL.Column.date("start")
+ val stop = SQL.Column.date("stop")
+
+ val table = make_table(List(build_uuid, ml_platform, options, start, stop))
+ }
+
+ def read_builds(db: SQL.Database): List[Build] = {
+ val builds =
+ db.execute_query_statement(Base.table.select(), List.from[Build],
+ { res =>
+ val build_uuid = res.string(Base.build_uuid)
+ val ml_platform = res.string(Base.ml_platform)
+ val options = res.string(Base.options)
+ val start = res.date(Base.start)
+ val stop = res.get_date(Base.stop)
+ Build(build_uuid, ml_platform, options, start, stop, Nil)
+ })
+
+ for (build <- builds.sortBy(_.start)(Date.Ordering)) yield {
+ val sessions = private_data.read_sessions_domain(db, build_uuid = build.build_uuid)
+ build.copy(sessions = sessions.toList.sorted)
+ }
+ }
+
+ def remove_builds(db: SQL.Database, remove: List[String]): Unit =
+ if (remove.nonEmpty) {
+ val sql = Generic.build_uuid.where_member(remove)
+ db.execute_statement(SQL.MULTI(build_uuid_tables.map(_.delete(sql = sql))))
+ }
+
+ def start_build(
+ db: SQL.Database,
+ build_uuid: String,
+ ml_platform: String,
+ options: String
+ ): Unit = {
+ db.execute_statement(Base.table.insert(), body =
+ { stmt =>
+ stmt.string(1) = build_uuid
+ stmt.string(2) = ml_platform
+ stmt.string(3) = options
+ stmt.date(4) = db.now()
+ stmt.date(5) = None
+ })
+ }
+
+ def stop_build(db: SQL.Database, build_uuid: String): Unit =
+ db.execute_statement(
+ Base.table.update(List(Base.stop), sql = Base.build_uuid.where_equal(build_uuid)),
+ body = { stmt => stmt.date(1) = db.now() })
+
+ def clean_build(db: SQL.Database): Unit = {
+ val remove =
+ db.execute_query_statement(
+ Base.table.select(List(Base.build_uuid), sql = SQL.where(Base.stop.defined)),
+ List.from[String], res => res.string(Base.build_uuid))
+
+ remove_builds(db, remove)
+ }
+
+
+ /* sessions */
+
+ object Sessions {
+ val name = Generic.name.make_primary_key
+ val deps = SQL.Column.string("deps")
+ val ancestors = SQL.Column.string("ancestors")
+ val options = SQL.Column.string("options")
+ val sources = SQL.Column.string("sources")
+ val timeout = SQL.Column.long("timeout")
+ val old_time = SQL.Column.long("old_time")
+ val old_command_timings = SQL.Column.bytes("old_command_timings")
+ val build_uuid = Generic.build_uuid
+
+ val table =
+ make_table(
+ List(name, deps, ancestors, options, sources, timeout,
+ old_time, old_command_timings, build_uuid),
+ name = "sessions")
+ }
+
+ def read_sessions_domain(db: SQL.Database, build_uuid: String = ""): Set[String] =
+ db.execute_query_statement(
+ Sessions.table.select(List(Sessions.name),
+ sql = if_proper(build_uuid, Sessions.build_uuid.where_equal(build_uuid))),
+ Set.from[String], res => res.string(Sessions.name))
+
+ def read_sessions(db: SQL.Database,
+ names: Iterable[String] = Nil,
+ build_uuid: String = ""
+ ): List[Build_Job.Session_Context] = {
+ db.execute_query_statement(
+ Sessions.table.select(
+ sql =
+ SQL.where_and(
+ if_proper(names, Sessions.name.member(names)),
+ if_proper(build_uuid, Sessions.build_uuid.equal(build_uuid)))
+ ),
+ List.from[Build_Job.Session_Context],
+ { res =>
+ val name = res.string(Sessions.name)
+ val deps = split_lines(res.string(Sessions.deps))
+ val ancestors = split_lines(res.string(Sessions.ancestors))
+ val options = res.string(Sessions.options)
+ val sources_shasum = SHA1.fake_shasum(res.string(Sessions.sources))
+ val timeout = Time.ms(res.long(Sessions.timeout))
+ val old_time = Time.ms(res.long(Sessions.old_time))
+ val old_command_timings_blob = res.bytes(Sessions.old_command_timings)
+ val build_uuid = res.string(Sessions.build_uuid)
+ Build_Job.Session_Context(name, deps, ancestors, options, sources_shasum,
+ timeout, old_time, old_command_timings_blob, build_uuid)
+ }
+ )
+ }
+
+ def update_sessions(
+ db: SQL.Database,
+ sessions: Build_Process.Sessions,
+ old_sessions: Build_Process.Sessions
+ ): Boolean = {
+ val insert = sessions.iterator.filterNot(s => old_sessions.defined(s.name)).toList
+
+ if (insert.nonEmpty) {
+ db.execute_batch_statement(Sessions.table.insert(), batch =
+ for (session <- insert) yield { (stmt: SQL.Statement) =>
+ stmt.string(1) = session.name
+ stmt.string(2) = cat_lines(session.deps)
+ stmt.string(3) = cat_lines(session.ancestors)
+ stmt.string(4) = session.session_prefs
+ stmt.string(5) = session.sources_shasum.toString
+ stmt.long(6) = session.timeout.ms
+ stmt.long(7) = session.old_time.ms
+ stmt.bytes(8) = session.old_command_timings_blob
+ stmt.string(9) = session.build_uuid
+ })
+ }
+
+ insert.nonEmpty
+ }
+
+
+ /* workers */
+
+ object Workers {
+ val worker_uuid = Generic.worker_uuid.make_primary_key
+ val build_uuid = Generic.build_uuid
+ val start = SQL.Column.date("start")
+ val stamp = SQL.Column.date("stamp")
+ val stop = SQL.Column.date("stop")
+ val serial = SQL.Column.long("serial")
+
+ val table =
+ make_table(List(worker_uuid, build_uuid, start, stamp, stop, serial), name = "workers")
+ }
+
+ def read_serial(db: SQL.Database): Long =
+ db.execute_query_statementO[Long](
+ Workers.table.select(List(Workers.serial.max)), _.long(Workers.serial)).getOrElse(0L)
+
+ def read_workers(
+ db: SQL.Database,
+ build_uuid: String = "",
+ worker_uuid: String = ""
+ ): List[Worker] = {
+ db.execute_query_statement(
+ Workers.table.select(
+ sql = Generic.sql_where(build_uuid = build_uuid, worker_uuid = worker_uuid)),
+ List.from[Worker],
+ { res =>
+ Worker(
+ worker_uuid = res.string(Workers.worker_uuid),
+ build_uuid = res.string(Workers.build_uuid),
+ start = res.date(Workers.start),
+ stamp = res.date(Workers.stamp),
+ stop = res.get_date(Workers.stop),
+ serial = res.long(Workers.serial))
+ })
+ }
+
+ def start_worker(
+ db: SQL.Database,
+ worker_uuid: String,
+ build_uuid: String,
+ serial: Long
+ ): Unit = {
+ def err(msg: String): Nothing =
+ error("Cannot start worker " + worker_uuid + if_proper(msg, "\n" + msg))
+
+ val build_stop =
+ db.execute_query_statementO(
+ Base.table.select(List(Base.stop), sql = Base.build_uuid.where_equal(build_uuid)),
+ res => res.get_date(Base.stop))
+
+ build_stop match {
+ case Some(None) =>
+ case Some(Some(_)) => err("for already stopped build process " + build_uuid)
+ case None => err("for unknown build process " + build_uuid)
+ }
+
+ db.execute_statement(Workers.table.insert(), body =
+ { stmt =>
+ val now = db.now()
+ stmt.string(1) = worker_uuid
+ stmt.string(2) = build_uuid
+ stmt.date(3) = now
+ stmt.date(4) = now
+ stmt.date(5) = None
+ stmt.long(6) = serial
+ })
+ }
+
+ def stamp_worker(
+ db: SQL.Database,
+ worker_uuid: String,
+ serial: Long,
+ stop_now: Boolean = false
+ ): Unit = {
+ val sql = Workers.worker_uuid.where_equal(worker_uuid)
+
+ val stop =
+ db.execute_query_statementO(
+ Workers.table.select(List(Workers.stop), sql = sql), _.get_date(Workers.stop)).flatten
+
+ db.execute_statement(
+ Workers.table.update(List(Workers.stamp, Workers.stop, Workers.serial), sql = sql),
+ body = { stmt =>
+ val now = db.now()
+ stmt.date(1) = now
+ stmt.date(2) = if (stop_now) Some(now) else stop
+ stmt.long(3) = serial
+ })
+ }
+
+
+ /* pending jobs */
+
+ object Pending {
+ val name = Generic.name.make_primary_key
+ val deps = SQL.Column.string("deps")
+ val info = SQL.Column.string("info")
+ val build_uuid = Generic.build_uuid
+
+ val table = make_table(List(name, deps, info, build_uuid), name = "pending")
+ }
+
+ def read_pending(db: SQL.Database): List[Task] =
+ db.execute_query_statement(
+ Pending.table.select(sql = SQL.order_by(List(Pending.name))),
+ List.from[Task],
+ { res =>
+ val name = res.string(Pending.name)
+ val deps = res.string(Pending.deps)
+ val info = res.string(Pending.info)
+ val build_uuid = res.string(Pending.build_uuid)
+ Task(name, split_lines(deps), JSON.Object.parse(info), build_uuid)
+ })
+
+ def update_pending(
+ db: SQL.Database,
+ pending: State.Pending,
+ old_pending: State.Pending
+ ): Boolean = {
+ val (delete, insert) = Library.symmetric_difference(old_pending, pending)
+
+ if (delete.nonEmpty) {
+ db.execute_statement(
+ Pending.table.delete(sql = Generic.sql_where(names = delete.map(_.name))))
+ }
+
+ if (insert.nonEmpty) {
+ db.execute_batch_statement(Pending.table.insert(), batch =
+ for (task <- insert) yield { (stmt: SQL.Statement) =>
+ stmt.string(1) = task.name
+ stmt.string(2) = cat_lines(task.deps)
+ stmt.string(3) = JSON.Format(task.info)
+ stmt.string(4) = task.build_uuid
+ })
+ }
+
+ delete.nonEmpty || insert.nonEmpty
+ }
+
+
+ /* running jobs */
+
+ object Running {
+ val name = Generic.name.make_primary_key
+ val worker_uuid = Generic.worker_uuid
+ val build_uuid = Generic.build_uuid
+ val hostname = SQL.Column.string("hostname")
+ val numa_node = SQL.Column.int("numa_node")
+ val rel_cpus = SQL.Column.string("rel_cpus")
+ val start_date = SQL.Column.date("start_date")
+
+ val table =
+ make_table(
+ List(name, worker_uuid, build_uuid, hostname, numa_node, rel_cpus, start_date),
+ name = "running")
+ }
+
+ def read_running(db: SQL.Database): State.Running =
+ db.execute_query_statement(
+ Running.table.select(sql = SQL.order_by(List(Running.name))),
+ Map.from[String, Job],
+ { res =>
+ val name = res.string(Running.name)
+ val worker_uuid = res.string(Running.worker_uuid)
+ val build_uuid = res.string(Running.build_uuid)
+ val hostname = res.string(Running.hostname)
+ val numa_node = res.get_int(Running.numa_node)
+ val rel_cpus = res.string(Running.rel_cpus)
+ val start_date = res.date(Running.start_date)
+
+ val node_info = Host.Node_Info(hostname, numa_node, Host.Range.from(rel_cpus))
+ name -> Job(name, worker_uuid, build_uuid, node_info, start_date, None)
+ }
+ )
+
+ def update_running(
+ db: SQL.Database,
+ running: State.Running,
+ old_running: State.Running
+ ): Boolean = {
+ val running0 = old_running.valuesIterator.toList
+ val running1 = running.valuesIterator.toList
+ val (delete, insert) = Library.symmetric_difference(running0, running1)
+
+ if (delete.nonEmpty) {
+ db.execute_statement(
+ Running.table.delete(sql = Generic.sql_where(names = delete.map(_.name))))
+ }
+
+ if (insert.nonEmpty) {
+ db.execute_batch_statement(Running.table.insert(), batch =
+ for (job <- insert) yield { (stmt: SQL.Statement) =>
+ stmt.string(1) = job.name
+ stmt.string(2) = job.worker_uuid
+ stmt.string(3) = job.build_uuid
+ stmt.string(4) = job.node_info.hostname
+ stmt.int(5) = job.node_info.numa_node
+ stmt.string(6) = Host.Range(job.node_info.rel_cpus)
+ stmt.date(7) = job.start_date
+ })
+ }
+
+ delete.nonEmpty || insert.nonEmpty
+ }
+
+
+ /* job results */
+
+ object Results {
+ val name = Generic.name.make_primary_key
+ val worker_uuid = Generic.worker_uuid
+ val build_uuid = Generic.build_uuid
+ val hostname = SQL.Column.string("hostname")
+ val numa_node = SQL.Column.int("numa_node")
+ val rel_cpus = SQL.Column.string("rel_cpus")
+ val rc = SQL.Column.int("rc")
+ val out = SQL.Column.string("out")
+ val err = SQL.Column.string("err")
+ val timing_elapsed = SQL.Column.long("timing_elapsed")
+ val timing_cpu = SQL.Column.long("timing_cpu")
+ val timing_gc = SQL.Column.long("timing_gc")
+ val output_shasum = SQL.Column.string("output_shasum")
+ val current = SQL.Column.bool("current")
+
+ val table =
+ make_table(
+ List(name, worker_uuid, build_uuid, hostname, numa_node, rel_cpus,
+ rc, out, err, timing_elapsed, timing_cpu, timing_gc, output_shasum, current),
+ name = "results")
+ }
+
+ def read_results_domain(db: SQL.Database): Set[String] =
+ db.execute_query_statement(
+ Results.table.select(List(Results.name)),
+ Set.from[String], res => res.string(Results.name))
+
+ def read_results(db: SQL.Database, names: Iterable[String] = Nil): State.Results =
+ db.execute_query_statement(
+ Results.table.select(sql = if_proper(names, Results.name.where_member(names))),
+ Map.from[String, Result],
+ { res =>
+ val name = res.string(Results.name)
+ val worker_uuid = res.string(Results.worker_uuid)
+ val build_uuid = res.string(Results.build_uuid)
+ val hostname = res.string(Results.hostname)
+ val numa_node = res.get_int(Results.numa_node)
+ val rel_cpus = res.string(Results.rel_cpus)
+ val node_info = Host.Node_Info(hostname, numa_node, Host.Range.from(rel_cpus))
+
+ val rc = res.int(Results.rc)
+ val out = res.string(Results.out)
+ val err = res.string(Results.err)
+ val timing =
+ res.timing(
+ Results.timing_elapsed,
+ Results.timing_cpu,
+ Results.timing_gc)
+ val process_result =
+ Process_Result(rc,
+ out_lines = split_lines(out),
+ err_lines = split_lines(err),
+ timing = timing)
+
+ val output_shasum = SHA1.fake_shasum(res.string(Results.output_shasum))
+ val current = res.bool(Results.current)
+
+ name ->
+ Result(name, worker_uuid, build_uuid, node_info, process_result, output_shasum, current)
+ }
+ )
+
+ def update_results(
+ db: SQL.Database,
+ results: State.Results,
+ old_results: State.Results
+ ): Boolean = {
+ val insert =
+ results.valuesIterator.filterNot(res => old_results.isDefinedAt(res.name)).toList
+
+ if (insert.nonEmpty) {
+ db.execute_batch_statement(Results.table.insert(), batch =
+ for (result <- insert) yield { (stmt: SQL.Statement) =>
+ val process_result = result.process_result
+ stmt.string(1) = result.name
+ stmt.string(2) = result.worker_uuid
+ stmt.string(3) = result.build_uuid
+ stmt.string(4) = result.node_info.hostname
+ stmt.int(5) = result.node_info.numa_node
+ stmt.string(6) = Host.Range(result.node_info.rel_cpus)
+ stmt.int(7) = process_result.rc
+ stmt.string(8) = cat_lines(process_result.out_lines)
+ stmt.string(9) = cat_lines(process_result.err_lines)
+ stmt.long(10) = process_result.timing.elapsed.ms
+ stmt.long(11) = process_result.timing.cpu.ms
+ stmt.long(12) = process_result.timing.gc.ms
+ stmt.string(13) = result.output_shasum.toString
+ stmt.bool(14) = result.current
+ })
+ }
+
+ insert.nonEmpty
+ }
+
+
+ /* collective operations */
+
+ override val tables =
+ SQL.Tables(
+ Base.table,
+ Workers.table,
+ Sessions.table,
+ Pending.table,
+ Running.table,
+ Results.table)
+
+ val build_uuid_tables =
+ tables.filter(table =>
+ table.columns.exists(column => column.name == Generic.build_uuid.name))
+
+ def pull_database(db: SQL.Database, worker_uuid: String, state: State): State = {
+ val serial_db = read_serial(db)
+ if (serial_db == state.serial) state
+ else {
+ val serial = serial_db max state.serial
+ stamp_worker(db, worker_uuid, serial)
+
+ val sessions = state.sessions.pull(read_sessions_domain(db), read_sessions(db, _))
+ val pending = read_pending(db)
+ val running = pull0(read_running(db), state.running)
+ val results = pull1(read_results_domain(db), read_results(db, _), state.results)
+
+ state.copy(serial = serial, sessions = sessions, pending = pending,
+ running = running, results = results)
+ }
+ }
+
+ def update_database(
+ db: SQL.Database,
+ worker_uuid: String,
+ state: State,
+ old_state: State
+ ): State = {
+ val changed =
+ List(
+ update_sessions(db, state.sessions, old_state.sessions),
+ update_pending(db, state.pending, old_state.pending),
+ update_running(db, state.running, old_state.running),
+ update_results(db, state.results, old_state.results))
+
+ val state1 = if (changed.exists(identity)) state.inc_serial else state
+ if (state1.serial != state.serial) stamp_worker(db, worker_uuid, state1.serial)
+
+ state1
+ }
+ }
+
+ def read_builds(db: SQL.Database): List[Build] =
+ private_data.transaction_lock(db, create = true, label = "Build_Process.read_builds") {
+ private_data.read_builds(db)
+ }
+}
+
+
+
+/** main process **/
+
+class Build_Process(
+ protected final val build_context: Build.Context,
+ protected final val build_progress: Progress,
+ protected final val server: SSH.Server
+)
+extends AutoCloseable {
+ /* context */
+
+ protected final val store: Store = build_context.store
+ protected final val build_options: Options = store.options
+ protected final val build_deps: isabelle.Sessions.Deps = build_context.build_deps
+ protected final val hostname: String = build_context.hostname
+ protected final val build_uuid: String = build_context.build_uuid
+
+
+ /* global resources with common close() operation */
+
+ protected val _database_server: Option[SQL.Database] =
+ try { store.maybe_open_database_server(server = server) }
+ catch { case exn: Throwable => close(); throw exn }
+
+ private val _build_database: Option[SQL.Database] =
+ try {
+ for (db <- store.maybe_open_build_database(server = server)) yield {
+ if (!db.is_postgresql) {
+ error("Distributed build requires PostgreSQL (option build_database_server)")
+ }
+ val store_tables = db.is_postgresql
+ Build_Process.private_data.transaction_lock(db,
+ create = true,
+ label = "Build_Process.build_database"
+ ) {
+ Build_Process.private_data.clean_build(db)
+ if (store_tables) Store.private_data.tables.lock(db, create = true)
+ }
+ if (build_context.master) {
+ db.vacuum(Build_Process.private_data.tables.list)
+ if (store_tables) db.vacuum(Store.private_data.tables.list)
+ }
+ db
+ }
+ }
+ catch { case exn: Throwable => close(); throw exn }
+
+ protected val build_delay: Time = {
+ val option =
+ _build_database match {
+ case Some(db) if db.is_postgresql => "build_cluster_delay"
+ case _ => "build_delay"
+ }
+ build_options.seconds(option)
+ }
+
+ protected val _host_database: SQL.Database =
+ try { store.open_build_database(path = Host.private_data.database, server = server) }
+ catch { case exn: Throwable => close(); throw exn }
+
+ protected val (progress, worker_uuid) = synchronized {
+ if (_build_database.isEmpty) (build_progress, UUID.random().toString)
+ else {
+ try {
+ val db = store.open_build_database(Progress.private_data.database, server = server)
+ val progress =
+ new Database_Progress(db, build_progress,
+ input_messages = build_context.master,
+ output_stopped = build_context.master,
+ hostname = hostname,
+ context_uuid = build_uuid,
+ kind = "build_process",
+ timeout = Some(build_delay))
+ (progress, progress.agent_uuid)
+ }
+ catch { case exn: Throwable => close(); throw exn }
+ }
+ }
+
+ protected val log: Logger = Logger.make_system_log(progress, build_options)
+
+ protected def open_build_cluster(): Build_Cluster = {
+ val build_cluster = Build_Cluster.make(build_context, progress = build_progress)
+ build_cluster.open()
+ build_cluster
+ }
+
+ private val _build_cluster =
+ try {
+ if (build_context.master && _build_database.isDefined) open_build_cluster()
+ else Build_Cluster.none
+ }
+ catch { case exn: Throwable => close(); throw exn }
+
+ def close(): Unit = synchronized {
+ Option(_database_server).flatten.foreach(_.close())
+ Option(_build_database).flatten.foreach(_.close())
+ Option(_host_database).foreach(_.close())
+ Option(_build_cluster).foreach(_.close())
+ progress match {
+ case db_progress: Database_Progress => db_progress.exit(close = true)
+ case _ =>
+ }
+ }
+
+
+ /* global state: internal var vs. external database */
+
+ private var _state: Build_Process.State = Build_Process.State()
+
+ protected def synchronized_database[A](label: String)(body: => A): A =
+ synchronized {
+ _build_database match {
+ case None => body
+ case Some(db) =>
+ Build_Process.private_data.transaction_lock(db, label = label) {
+ val old_state = Build_Process.private_data.pull_database(db, worker_uuid, _state)
+ _state = old_state
+ val res = body
+ _state = Build_Process.private_data.update_database(db, worker_uuid, _state, old_state)
+ res
+ }
+ }
+ }
+
+
+ /* policy operations */
+
+ protected def init_state(state: Build_Process.State): Build_Process.State = {
+ val sessions1 = state.sessions.init(build_context, _database_server, progress = build_progress)
+
+ val old_pending = state.pending.iterator.map(_.name).toSet
+ val new_pending =
+ List.from(
+ for (session <- sessions1.iterator if !old_pending(session.name))
+ yield Build_Process.Task(session.name, session.deps, JSON.Object.empty, build_uuid))
+ val pending1 = new_pending ::: state.pending
+
+ state.copy(sessions = sessions1, pending = pending1)
+ }
+
+ protected def next_jobs(state: Build_Process.State): List[String] = {
+ val limit = {
+ if (progress.stopped) { if (build_context.master) Int.MaxValue else 0 }
+ else build_context.max_jobs - state.build_running.length
+ }
+ if (limit > 0) state.next_ready.sortBy(_.name)(state.sessions.ordering).take(limit).map(_.name)
+ else Nil
+ }
+
+ protected def next_node_info(state: Build_Process.State, session_name: String): Host.Node_Info = {
+ def used_nodes: Set[Int] =
+ Set.from(for (job <- state.running.valuesIterator; i <- job.node_info.numa_node) yield i)
+ val numa_node = Host.next_numa_node(_host_database, hostname, state.numa_nodes, used_nodes)
+ Host.Node_Info(hostname, numa_node, Nil)
+ }
+
+ protected def start_session(
+ state: Build_Process.State,
+ session_name: String,
+ ancestor_results: List[Build_Process.Result]
+ ): Build_Process.State = {
+ val sources_shasum = state.sessions(session_name).sources_shasum
+
+ val input_shasum =
+ if (ancestor_results.isEmpty) ML_Process.bootstrap_shasum()
+ else SHA1.flat_shasum(ancestor_results.map(_.output_shasum))
+
+ val store_heap =
+ build_context.build_heap || Sessions.is_pure(session_name) ||
+ state.sessions.iterator.exists(_.ancestors.contains(session_name))
+
+ val (current, output_shasum) =
+ store.check_output(_database_server, session_name,
+ session_options = build_context.sessions_structure(session_name).options,
+ sources_shasum = sources_shasum,
+ input_shasum = input_shasum,
+ fresh_build = build_context.fresh_build,
+ store_heap = store_heap)
+
+ val finished = current && ancestor_results.forall(_.current)
+ val skipped = build_context.no_build
+ val cancelled = progress.stopped || !ancestor_results.forall(_.ok)
+
+ if (!skipped && !cancelled) {
+ val heaps = (session_name :: ancestor_results.map(_.name)).map(store.output_heap)
+ ML_Heap.restore(_database_server, heaps, cache = store.cache.compress)
+ }
+
+ val result_name = (session_name, worker_uuid, build_uuid)
+
+ if (finished) {
+ state
+ .remove_pending(session_name)
+ .make_result(result_name, Process_Result.ok, output_shasum, current = true)
+ }
+ else if (skipped) {
+ progress.echo("Skipping " + session_name + " ...", verbose = true)
+ state.
+ remove_pending(session_name).
+ make_result(result_name, Process_Result.error, output_shasum)
+ }
+ else if (cancelled) {
+ if (build_context.master) {
+ progress.echo(session_name + " CANCELLED")
+ state
+ .remove_pending(session_name)
+ .make_result(result_name, Process_Result.undefined, output_shasum)
+ }
+ else state
+ }
+ else {
+ val node_info = next_node_info(state, session_name)
+
+ val print_node_info =
+ node_info.numa_node.isDefined || node_info.rel_cpus.nonEmpty ||
+ _build_database.isDefined && _build_database.get.is_postgresql
+
+ progress.echo(
+ (if (store_heap) "Building " else "Running ") + session_name +
+ (if (print_node_info) " (on " + node_info + ")" else "") + " ...")
+
+ val session = state.sessions(session_name)
+
+ val build =
+ Build_Job.start_session(build_context, session, progress, log, server,
+ build_deps.background(session_name), sources_shasum, input_shasum, node_info, store_heap)
+
+ val job =
+ Build_Process.Job(session_name, worker_uuid, build_uuid, node_info, Date.now(), Some(build))
+
+ state.add_running(job)
+ }
+ }
+
+
+ /* build process roles */
+
+ final def is_session_name(job_name: String): Boolean =
+ !Long_Name.is_qualified(job_name)
+
+ protected final def start_build(): Unit = synchronized_database("Build_Process.start_build") {
+ for (db <- _build_database) {
+ Build_Process.private_data.start_build(db, build_uuid, build_context.ml_platform,
+ build_context.sessions_structure.session_prefs)
+ }
+ }
+
+ protected final def stop_build(): Unit = synchronized_database("Build_Process.stop_build") {
+ for (db <- _build_database) {
+ Build_Process.private_data.stop_build(db, build_uuid)
+ }
+ }
+
+ protected final def start_worker(): Unit = synchronized_database("Build_Process.start_worker") {
+ for (db <- _build_database) {
+ _state = _state.inc_serial
+ Build_Process.private_data.start_worker(db, worker_uuid, build_uuid, _state.serial)
+ }
+ }
+
+ protected final def stop_worker(): Unit = synchronized_database("Build_Process.stop_worker") {
+ for (db <- _build_database) {
+ Build_Process.private_data.stamp_worker(db, worker_uuid, _state.serial, stop_now = true)
+ }
+ }
+
+
+ /* run */
+
+ def run(): Build.Results = {
+ synchronized_database("Build_Process.init") {
+ if (build_context.master) {
+ _build_cluster.init()
+ _state = init_state(_state)
+ }
+ _state = _state.copy(numa_nodes = Host.numa_nodes(enabled = build_context.numa_shuffling))
+ }
+
+ def finished(): Boolean = synchronized_database("Build_Process.test") {
+ if (!build_context.master && progress.stopped) _state.build_running.isEmpty
+ else _state.pending.isEmpty
+ }
+
+ def sleep(): Unit =
+ Isabelle_Thread.interrupt_handler(_ => progress.stop()) { build_delay.sleep() }
+
+ val build_progress_warnings = Synchronized(Set.empty[String])
+ def build_progress_warning(msg: String): Unit =
+ build_progress_warnings.change(seen =>
+ if (seen(msg)) seen else { build_progress.echo_warning(msg); seen + msg })
+
+ def check_jobs(): Boolean = synchronized_database("Build_Process.check_jobs") {
+ val jobs = next_jobs(_state)
+ for (name <- jobs) {
+ if (is_session_name(name)) {
+ if (build_context.sessions_structure.defined(name)) {
+ _state.ancestor_results(name) match {
+ case Some(results) => _state = start_session(_state, name, results)
+ case None =>
+ build_progress_warning("Bad build job " + quote(name) + ": no ancestor results")
+ }
+ }
+ else build_progress_warning("Bad build job " + quote(name) + ": no session info")
+ }
+ else build_progress_warning("Bad build job " + quote(name))
+ }
+ jobs.nonEmpty
+ }
+
+ if (finished()) {
+ progress.echo_warning("Nothing to build")
+ Build.Results(build_context)
+ }
+ else {
+ if (build_context.master) start_build()
+ start_worker()
+ _build_cluster.start()
+
+ if (build_context.master && !build_context.worker_active && _build_cluster.active()) {
+ build_progress.echo("Waiting for external workers ...")
+ }
+
+ try {
+ while (!finished()) {
+ synchronized_database("Build_Process.main") {
+ if (progress.stopped) _state.stop_running()
+
+ for (job <- _state.finished_running()) {
+ job.join_build match {
+ case None =>
+ _state = _state.remove_running(job.name)
+ case Some(result) =>
+ val result_name = (job.name, worker_uuid, build_uuid)
+ _state = _state.
+ remove_pending(job.name).
+ remove_running(job.name).
+ make_result(result_name,
+ result.process_result,
+ result.output_shasum,
+ node_info = job.node_info)
+ }
+ }
+ }
+
+ if (!check_jobs()) sleep()
+ }
+ }
+ finally {
+ _build_cluster.stop()
+ stop_worker()
+ if (build_context.master) stop_build()
+ }
+
+ synchronized_database("Build_Process.result") {
+ val results = for ((name, result) <- _state.results) yield name -> result.process_result
+ Build.Results(build_context, results = results, other_rc = _build_cluster.rc)
+ }
+ }
+ }
+
+
+ /* snapshot */
+
+ def snapshot(): Build_Process.Snapshot = synchronized_database("Build_Process.snapshot") {
+ val (builds, workers) =
+ _build_database match {
+ case None => (Nil, Nil)
+ case Some(db) =>
+ (Build_Process.private_data.read_builds(db),
+ Build_Process.private_data.read_workers(db))
+ }
+ Build_Process.Snapshot(
+ builds = builds,
+ workers = workers,
+ sessions = _state.sessions,
+ pending = _state.pending,
+ running = _state.running,
+ results = _state.results)
+ }
+
+
+ /* toString */
+
+ override def toString: String =
+ "Build_Process(worker_uuid = " + quote(worker_uuid) + ", build_uuid = " + quote(build_uuid) +
+ if_proper(build_context.master, ", master = true") + ")"
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/build_schedule.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,1573 @@
+/* Title: Pure/Build/build_schedule.scala
+ Author: Fabian Huch, TU Muenchen
+
+Build schedule generated by Heuristic methods, allowing for more efficient builds.
+ */
+package isabelle
+
+
+import Host.Node_Info
+import scala.annotation.tailrec
+import scala.collection.mutable
+
+
+object Build_Schedule {
+ /* organized historic timing information (extracted from build logs) */
+
+ case class Timing_Entry(job_name: String, hostname: String, threads: Int, timing: Timing) {
+ def elapsed: Time = timing.elapsed
+ def proper_cpu: Option[Time] = timing.cpu.proper_ms.map(Time.ms)
+ }
+
+ case class Timing_Entries(entries: List[Timing_Entry]) {
+ require(entries.nonEmpty)
+
+ def is_empty = entries.isEmpty
+ def size = entries.length
+
+ lazy val by_job = entries.groupBy(_.job_name).view.mapValues(Timing_Entries(_)).toMap
+ lazy val by_threads = entries.groupBy(_.threads).view.mapValues(Timing_Entries(_)).toMap
+ lazy val by_hostname = entries.groupBy(_.hostname).view.mapValues(Timing_Entries(_)).toMap
+
+ def mean_time: Time = Timing_Data.mean_time(entries.map(_.elapsed))
+ def best_entry: Timing_Entry = entries.minBy(_.elapsed.ms)
+ }
+
+ object Timing_Data {
+ def median_timing(obs: List[Timing]): Timing = obs.sortBy(_.elapsed.ms).apply(obs.length / 2)
+
+ def median_time(obs: List[Time]): Time = obs.sortBy(_.ms).apply(obs.length / 2)
+
+ def mean_time(obs: Iterable[Time]): Time = Time.ms(obs.map(_.ms).sum / obs.size)
+
+ private def dummy_entries(host: Host, host_factor: Double) = {
+ val baseline = Time.minutes(5).scale(host_factor)
+ val gc = Time.seconds(10).scale(host_factor)
+ List(
+ Timing_Entry("dummy", host.name, 1, Timing(baseline, baseline, gc)),
+ Timing_Entry("dummy", host.name, 8, Timing(baseline.scale(0.2), baseline, gc)))
+ }
+
+ def make(
+ host_infos: Host_Infos,
+ build_history: List[(Build_Log.Meta_Info, Build_Log.Build_Info)],
+ ): Timing_Data = {
+ val hosts = host_infos.hosts
+ val measurements =
+ for {
+ (meta_info, build_info) <- build_history
+ build_host = meta_info.get(Build_Log.Prop.build_host)
+ (job_name, session_info) <- build_info.sessions.toList
+ if build_info.finished_sessions.contains(job_name)
+ hostname <- session_info.hostname.orElse(build_host).toList
+ host <- hosts.find(_.info.hostname == hostname).toList
+ threads = session_info.threads.getOrElse(host.info.num_cpus)
+ } yield (job_name, hostname, threads) -> session_info.timing
+
+ val entries =
+ if (measurements.isEmpty) {
+ val default_host = host_infos.hosts.sorted(host_infos.host_speeds).last
+ host_infos.hosts.flatMap(host =>
+ dummy_entries(host, host_infos.host_factor(default_host, host)))
+ }
+ else
+ measurements.groupMap(_._1)(_._2).toList.map {
+ case ((job_name, hostname, threads), timings) =>
+ Timing_Entry(job_name, hostname, threads, median_timing(timings))
+ }
+
+ new Timing_Data(Timing_Entries(entries), host_infos)
+ }
+
+ def load(host_infos: Host_Infos, log_database: SQL.Database): Timing_Data = {
+ val build_history =
+ for {
+ log_name <- log_database.execute_query_statement(
+ Build_Log.private_data.meta_info_table.select(List(Build_Log.Column.log_name)),
+ List.from[String], res => res.string(Build_Log.Column.log_name))
+ meta_info <- Build_Log.private_data.read_meta_info(log_database, log_name)
+ build_info = Build_Log.private_data.read_build_info(log_database, log_name)
+ } yield (meta_info, build_info)
+
+ make(host_infos, build_history)
+ }
+ }
+
+ class Timing_Data private(data: Timing_Entries, val host_infos: Host_Infos) {
+ private def inflection_point(last_mono: Int, next: Int): Int =
+ last_mono + ((next - last_mono) / 2)
+
+ def best_threads(job_name: String, max_threads: Int): Int = {
+ val worse_threads =
+ data.by_job.get(job_name).toList.flatMap(_.by_hostname).flatMap {
+ case (hostname, data) =>
+ val best_threads = data.best_entry.threads
+ data.by_threads.keys.toList.sorted.find(_ > best_threads).map(
+ inflection_point(best_threads, _))
+ }
+ (max_threads :: worse_threads).min
+ }
+
+ private def hostname_factor(from: String, to: String): Double =
+ host_infos.host_factor(host_infos.the_host(from), host_infos.the_host(to))
+
+ private def approximate_threads(entries_unsorted: List[(Int, Time)], threads: Int): Time = {
+ val entries = entries_unsorted.sortBy(_._1)
+
+ def sorted_prefix[A](xs: List[A], f: A => Long): List[A] =
+ xs match {
+ case x1 :: x2 :: xs =>
+ if (f(x1) <= f(x2)) x1 :: sorted_prefix(x2 :: xs, f) else x1 :: Nil
+ case xs => xs
+ }
+
+ def linear(p0: (Int, Time), p1: (Int, Time)): Time = {
+ val a = (p1._2 - p0._2).scale(1.0 / (p1._1 - p0._1))
+ val b = p0._2 - a.scale(p0._1)
+ (a.scale(threads) + b) max Time.zero
+ }
+
+ val mono_prefix = sorted_prefix(entries, e => -e._2.ms)
+
+ val is_mono = entries == mono_prefix
+ val in_prefix = mono_prefix.length > 1 && threads <= mono_prefix.last._1
+ val in_inflection =
+ !is_mono && mono_prefix.length > 1 && threads < entries.drop(mono_prefix.length).head._1
+ if (is_mono || in_prefix || in_inflection) {
+ // Model with Amdahl's law
+ val t_p =
+ Timing_Data.median_time(for {
+ (n, t0) <- mono_prefix
+ (m, t1) <- mono_prefix
+ if m != n
+ } yield (t0 - t1).scale(n.toDouble * m / (m - n)))
+ val t_c =
+ Timing_Data.median_time(for ((n, t) <- mono_prefix) yield t - t_p.scale(1.0 / n))
+
+ def model(threads: Int): Time = (t_c + t_p.scale(1.0 / threads)) max Time.zero
+
+ if (is_mono || in_prefix) model(threads)
+ else {
+ val post_inflection = entries.drop(mono_prefix.length).head
+ val inflection_threads = inflection_point(mono_prefix.last._1, post_inflection._1)
+
+ if (threads <= inflection_threads) model(threads)
+ else linear((inflection_threads, model(inflection_threads)), post_inflection)
+ }
+ } else {
+ // Piecewise linear
+ val (p0, p1) =
+ if (entries.head._1 < threads && threads < entries.last._1) {
+ val split = entries.partition(_._1 < threads)
+ (split._1.last, split._2.head)
+ } else {
+ val piece = if (threads < entries.head._1) entries.take(2) else entries.takeRight(2)
+ (piece.head, piece.last)
+ }
+
+ linear(p0, p1)
+ }
+ }
+
+ private def unify_hosts(job_name: String, on_host: String): List[(Int, Time)] = {
+ def unify(hostname: String, data: Timing_Entries) =
+ data.mean_time.scale(hostname_factor(hostname, on_host))
+
+ for {
+ data <- data.by_job.get(job_name).toList
+ (threads, data) <- data.by_threads
+ entries = data.by_hostname.toList.map(unify)
+ } yield threads -> Timing_Data.median_time(entries)
+ }
+
+ def estimate_threads(job_name: String, hostname: String, threads: Int): Option[Time] = {
+ def try_approximate(data: Timing_Entries): Option[Time] = {
+ val entries =
+ data.by_threads.toList match {
+ case List((i, Timing_Entries(List(entry)))) if i != 1 =>
+ (i, data.mean_time) :: entry.proper_cpu.map(1 -> _).toList
+ case entries => entries.map((threads, data) => threads -> data.mean_time)
+ }
+ if (entries.size < 2) None else Some(approximate_threads(entries, threads))
+ }
+
+ for {
+ data <- data.by_job.get(job_name)
+ data <- data.by_hostname.get(hostname)
+ time <- data.by_threads.get(threads).map(_.mean_time).orElse(try_approximate(data))
+ } yield time
+ }
+
+ def global_threads_factor(from: Int, to: Int): Double = {
+ def median(xs: Iterable[Double]): Double = xs.toList.sorted.apply(xs.size / 2)
+
+ val estimates =
+ for {
+ (hostname, data) <- data.by_hostname
+ job_name <- data.by_job.keys
+ from_time <- estimate_threads(job_name, hostname, from)
+ to_time <- estimate_threads(job_name, hostname, to)
+ } yield from_time.ms.toDouble / to_time.ms
+
+ if (estimates.nonEmpty) median(estimates)
+ else {
+ // unify hosts
+ val estimates =
+ for {
+ (job_name, data) <- data.by_job
+ hostname = data.by_hostname.keys.head
+ entries = unify_hosts(job_name, hostname)
+ if entries.length > 1
+ } yield
+ approximate_threads(entries, from).ms.toDouble / approximate_threads(entries, to).ms
+
+ if (estimates.nonEmpty) median(estimates)
+ else from.toDouble / to.toDouble
+ }
+ }
+
+ private var cache: Map[(String, String, Int), Time] = Map.empty
+ def estimate(job_name: String, hostname: String, threads: Int): Time = {
+ def estimate: Time =
+ data.by_job.get(job_name) match {
+ case None =>
+ // no data for job, take average of other jobs for given threads
+ val job_estimates = data.by_job.keys.flatMap(estimate_threads(_, hostname, threads))
+ if (job_estimates.nonEmpty) Timing_Data.mean_time(job_estimates)
+ else {
+ // no other job to estimate from, use global curve to approximate any other job
+ val (threads1, data1) = data.by_threads.head
+ data1.mean_time.scale(global_threads_factor(threads1, threads))
+ }
+
+ case Some(data) =>
+ data.by_threads.get(threads) match {
+ case None => // interpolate threads
+ estimate_threads(job_name, hostname, threads).getOrElse {
+ // per machine, try to approximate config for threads
+ val approximated =
+ for {
+ hostname1 <- data.by_hostname.keys
+ estimate <- estimate_threads(job_name, hostname1, threads)
+ factor = hostname_factor(hostname1, hostname)
+ } yield estimate.scale(factor)
+
+ if (approximated.nonEmpty) Timing_Data.mean_time(approximated)
+ else {
+ // no single machine where config can be approximated, unify data points
+ val unified_entries = unify_hosts(job_name, hostname)
+
+ if (unified_entries.length > 1) approximate_threads(unified_entries, threads)
+ else {
+ // only single data point, use global curve to approximate
+ val (job_threads, job_time) = unified_entries.head
+ job_time.scale(global_threads_factor(job_threads, threads))
+ }
+ }
+ }
+
+ case Some(data) => // time for job/thread exists, interpolate machine
+ data.by_hostname.get(hostname).map(_.mean_time).getOrElse {
+ Timing_Data.median_time(
+ data.by_hostname.toList.map((hostname1, data) =>
+ data.mean_time.scale(hostname_factor(hostname1, hostname))))
+ }
+ }
+ }
+
+ cache.get(job_name, hostname, threads) match {
+ case Some(time) => time
+ case None =>
+ val time = estimate
+ cache = cache + ((job_name, hostname, threads) -> time)
+ time
+ }
+ }
+ }
+
+
+ /* host information */
+
+ case class Host(info: isabelle.Host.Info, build: Build_Cluster.Host) {
+ def name: String = info.hostname
+ def num_cpus: Int = info.num_cpus
+ }
+
+ object Host_Infos {
+ def dummy: Host_Infos =
+ new Host_Infos(
+ List(Host(isabelle.Host.Info("dummy", Nil, 8, Some(1.0)), Build_Cluster.Host("dummy"))))
+
+ def load(build_hosts: List[Build_Cluster.Host], db: SQL.Database): Host_Infos = {
+ def get_host(build_host: Build_Cluster.Host): Host = {
+ val info =
+ isabelle.Host.read_info(db, build_host.name).getOrElse(
+ error("No benchmark for " + quote(build_host.name)))
+ Host(info, build_host)
+ }
+
+ new Host_Infos(build_hosts.map(get_host))
+ }
+ }
+
+ class Host_Infos private(val hosts: List[Host]) {
+ require(hosts.nonEmpty)
+
+ private val by_hostname = hosts.map(host => host.name -> host).toMap
+
+ def host_factor(from: Host, to: Host): Double =
+ from.info.benchmark_score.get / to.info.benchmark_score.get
+
+ val host_speeds: Ordering[Host] =
+ Ordering.fromLessThan((host1, host2) => host_factor(host1, host2) < 1)
+
+ def the_host(hostname: String): Host =
+ by_hostname.getOrElse(hostname, error("Unknown host " + quote(hostname)))
+ def the_host(node_info: Node_Info): Host = the_host(node_info.hostname)
+
+ def num_threads(node_info: Node_Info): Int =
+ if (node_info.rel_cpus.nonEmpty) node_info.rel_cpus.length
+ else the_host(node_info).info.num_cpus
+
+ def available(state: Build_Process.State): Resources = {
+ val allocated =
+ state.running.values.map(_.node_info).groupMapReduce(the_host)(List(_))(_ ::: _)
+ Resources(this, allocated)
+ }
+ }
+
+
+ /* offline tracking of job configurations and resource allocations */
+
+ case class Config(job_name: String, node_info: Node_Info) {
+ def job_of(start_time: Time): Build_Process.Job =
+ Build_Process.Job(job_name, "", "", node_info, Date(start_time), None)
+ }
+
+ case class Resources(
+ host_infos: Host_Infos,
+ allocated_nodes: Map[Host, List[Node_Info]]
+ ) {
+ def unused_nodes(host: Host, threads: Int): List[Node_Info] =
+ if (!available(host, threads)) Nil
+ else {
+ val node = next_node(host, threads)
+ node :: allocate(node).unused_nodes(host, threads)
+ }
+
+ def unused_nodes(threads: Int): List[Node_Info] =
+ host_infos.hosts.flatMap(unused_nodes(_, threads))
+
+ def allocated(host: Host): List[Node_Info] = allocated_nodes.getOrElse(host, Nil)
+
+ def allocate(node: Node_Info): Resources = {
+ val host = host_infos.the_host(node)
+ copy(allocated_nodes = allocated_nodes + (host -> (node :: allocated(host))))
+ }
+
+ def try_allocate_tasks(
+ hosts: List[(Host, Int)],
+ tasks: List[(Build_Process.Task, Int, Int)],
+ ): (List[Config], Resources) =
+ tasks match {
+ case Nil => (Nil, this)
+ case (task, min_threads, max_threads) :: tasks =>
+ val (config, resources) =
+ hosts.find((host, _) => available(host, min_threads)) match {
+ case Some((host, host_max_threads)) =>
+ val free_threads = host.info.num_cpus - ((host.build.jobs - 1) * host_max_threads)
+ val node_info = next_node(host, (min_threads max free_threads) min max_threads)
+ (Some(Config(task.name, node_info)), allocate(node_info))
+ case None => (None, this)
+ }
+ val (configs, resources1) = resources.try_allocate_tasks(hosts, tasks)
+ (configs ++ config, resources1)
+ }
+
+ def next_node(host: Host, threads: Int): Node_Info = {
+ val numa_node_num_cpus = host.info.num_cpus / (host.info.numa_nodes.length max 1)
+ def explicit_cpus(node_info: Node_Info): List[Int] =
+ if (node_info.rel_cpus.nonEmpty) node_info.rel_cpus else (0 until numa_node_num_cpus).toList
+
+ val used_nodes = allocated(host).groupMapReduce(_.numa_node)(explicit_cpus)(_ ::: _)
+
+ val available_nodes = host.info.numa_nodes
+ val numa_node =
+ if (!host.build.numa) None
+ else available_nodes.sortBy(n => used_nodes.getOrElse(Some(n), Nil).length).headOption
+
+ val used_cpus = used_nodes.getOrElse(numa_node, Nil).toSet
+ val available_cpus = (0 until numa_node_num_cpus).filterNot(used_cpus.contains).toList
+
+ val rel_cpus = if (available_cpus.length >= threads) available_cpus.take(threads) else Nil
+
+ Node_Info(host.name, numa_node, rel_cpus)
+ }
+
+ def available(host: Host, threads: Int): Boolean = {
+ val used = allocated(host)
+
+ if (used.length >= host.build.jobs) false
+ else {
+ if (host.info.numa_nodes.length <= 1)
+ used.map(host_infos.num_threads).sum + threads <= host.info.num_cpus
+ else {
+ def node_threads(n: Int): Int =
+ used.filter(_.numa_node.contains(n)).map(host_infos.num_threads).sum
+
+ host.info.numa_nodes.exists(
+ node_threads(_) + threads <= host.info.num_cpus / host.info.numa_nodes.length)
+ }
+ }
+ }
+ }
+
+
+ /* schedule generation */
+
+ object Schedule {
+ case class Node(job_name: String, node_info: Node_Info, start: Date, duration: Time) {
+ def end: Date = Date(start.time + duration)
+ }
+
+ type Graph = isabelle.Graph[String, Node]
+
+ def init(build_uuid: String): Schedule = Schedule(build_uuid, "none", Date.now(), Graph.empty)
+ }
+
+ case class Schedule(
+ build_uuid: String,
+ generator: String,
+ start: Date,
+ graph: Schedule.Graph,
+ serial: Long = 0,
+ ) {
+ require(serial >= 0, "serial underflow")
+ def inc_serial: Schedule = {
+ require(serial < Long.MaxValue, "serial overflow")
+ copy(serial = serial + 1)
+ }
+
+ def end: Date =
+ if (graph.is_empty) start
+ else graph.maximals.map(graph.get_node).map(_.end).maxBy(_.unix_epoch)
+
+ def duration: Time = end.time - start.time
+ def message: String = "Estimated " + duration.message_hms + " build time with " + generator
+
+ def deviation(other: Schedule): Time = Time.ms((end.time - other.end.time).ms.abs)
+
+ def num_built(state: Build_Process.State): Int = graph.keys.count(state.results.contains)
+ def elapsed(): Time = Time.now() - start.time
+ def is_empty: Boolean = graph.is_empty
+ def is_outdated(options: Options, state: Build_Process.State): Boolean =
+ if (is_empty) true
+ else {
+ num_built(state) > options.int("build_schedule_outdated_limit") &&
+ elapsed() > options.seconds("build_schedule_outdated_delay")
+ }
+
+ def next(hostname: String, state: Build_Process.State): List[String] =
+ for {
+ task <- state.next_ready
+ node = graph.get_node(task.name)
+ if hostname == node.node_info.hostname
+ if graph.imm_preds(node.job_name).subsetOf(state.results.keySet)
+ } yield task.name
+
+ def update(state: Build_Process.State): Schedule = {
+ val start1 = Date.now()
+ val pending = state.pending.map(_.name).toSet
+
+ def shift_elapsed(graph: Schedule.Graph, name: String): Schedule.Graph =
+ graph.map_node(name, { node =>
+ val elapsed = start1.time - state.running(name).start_date.time
+ node.copy(duration = node.duration - elapsed)
+ })
+
+ def shift_starts(graph: Schedule.Graph, name: String): Schedule.Graph =
+ graph.map_node(name, { node =>
+ val starts = start1 :: graph.imm_preds(node.job_name).toList.map(graph.get_node(_).end)
+ node.copy(start = starts.max(Date.Ordering))
+ })
+
+ val graph0 = state.running.keys.foldLeft(graph.restrict(pending.contains))(shift_elapsed)
+ val graph1 = graph0.topological_order.foldLeft(graph0)(shift_starts)
+
+ copy(start = start1, graph = graph1)
+ }
+ }
+
+ case class State(build_state: Build_Process.State, current_time: Time, finished: Schedule) {
+ def start(config: Config): State =
+ copy(build_state =
+ build_state.copy(running = build_state.running +
+ (config.job_name -> config.job_of(current_time))))
+
+ def step(timing_data: Timing_Data): State = {
+ val remaining =
+ build_state.running.values.toList.map { job =>
+ val elapsed = current_time - job.start_date.time
+ val threads = timing_data.host_infos.num_threads(job.node_info)
+ val predicted = timing_data.estimate(job.name, job.node_info.hostname, threads)
+ val remaining = if (elapsed > predicted) Time.zero else predicted - elapsed
+ job -> remaining
+ }
+
+ if (remaining.isEmpty) error("Schedule step without running sessions")
+ else {
+ val (job, elapsed) = remaining.minBy(_._2.ms)
+ val now = current_time + elapsed
+ val node = Schedule.Node(job.name, job.node_info, job.start_date, now - job.start_date.time)
+
+ val host_preds =
+ for {
+ (name, (pred_node, _)) <- finished.graph.iterator.toSet
+ if pred_node.node_info.hostname == job.node_info.hostname
+ if pred_node.end.time <= node.start.time
+ } yield name
+ val build_preds =
+ build_state.sessions.graph.imm_preds(job.name).filter(finished.graph.defined)
+ val preds = build_preds ++ host_preds
+
+ val graph = preds.foldLeft(finished.graph.new_node(job.name, node))(_.add_edge(_, job.name))
+
+ val build_state1 = build_state.remove_running(job.name).remove_pending(job.name)
+ State(build_state1, now, finished.copy(graph = graph))
+ }
+ }
+
+ def is_finished: Boolean = build_state.pending.isEmpty && build_state.running.isEmpty
+ }
+
+ trait Scheduler { def build_schedule(build_state: Build_Process.State): Schedule }
+
+ abstract class Heuristic(timing_data: Timing_Data, build_uuid: String)
+ extends Scheduler {
+ val host_infos = timing_data.host_infos
+ val ordered_hosts = host_infos.hosts.sorted(host_infos.host_speeds)
+
+ def next(state: Build_Process.State): List[Config]
+
+ def build_schedule(build_state: Build_Process.State): Schedule = {
+ @tailrec
+ def simulate(state: State): State =
+ if (state.is_finished) state
+ else {
+ val state1 = next(state.build_state).foldLeft(state)(_.start(_)).step(timing_data)
+ simulate(state1)
+ }
+
+ val start = Date.now()
+ val end_state =
+ simulate(State(build_state, start.time, Schedule(build_uuid, toString, start, Graph.empty)))
+
+ end_state.finished
+ }
+ }
+
+ class Default_Heuristic(timing_data: Timing_Data, options: Options, build_uuid: String)
+ extends Heuristic(timing_data, build_uuid) {
+ override def toString: String = "default build heuristic"
+
+ def host_threads(host: Host): Int = {
+ val m = (options ++ host.build.options).int("threads")
+ if (m > 0) m else (host.num_cpus max 1) min 8
+ }
+
+ def next_jobs(resources: Resources, sorted_jobs: List[String], host: Host): List[Config] =
+ sorted_jobs.zip(resources.unused_nodes(host, host_threads(host))).map(Config(_, _))
+
+ def next(state: Build_Process.State): List[Config] = {
+ val sorted_jobs = state.next_ready.sortBy(_.name)(state.sessions.ordering).map(_.name)
+ val resources = host_infos.available(state)
+
+ host_infos.hosts.foldLeft((sorted_jobs, List.empty[Config])) {
+ case ((jobs, res), host) =>
+ val configs = next_jobs(resources, jobs, host)
+ val config_jobs = configs.map(_.job_name).toSet
+ (jobs.filterNot(config_jobs.contains), configs ::: res)
+ }._2
+ }
+ }
+
+ class Meta_Heuristic(heuristics: List[Heuristic]) extends Scheduler {
+ require(heuristics.nonEmpty)
+
+ def best_result(state: Build_Process.State): (Heuristic, Schedule) =
+ heuristics.map(heuristic =>
+ heuristic -> heuristic.build_schedule(state)).minBy(_._2.duration.ms)
+
+ def next(state: Build_Process.State): List[Config] = best_result(state)._1.next(state)
+
+ def build_schedule(state: Build_Process.State): Schedule = best_result(state)._2
+ }
+
+
+ /* heuristics */
+
+ abstract class Path_Heuristic(
+ timing_data: Timing_Data,
+ sessions_structure: Sessions.Structure,
+ max_threads_limit: Int,
+ build_uuid: String
+ ) extends Heuristic(timing_data, build_uuid) {
+ /* pre-computed properties for efficient heuristic */
+
+ val max_threads = host_infos.hosts.map(_.info.num_cpus).max min max_threads_limit
+
+ type Node = String
+ val build_graph = sessions_structure.build_graph
+
+ val minimals = build_graph.minimals
+ val maximals = build_graph.maximals
+
+ def all_preds(node: Node): Set[Node] = build_graph.all_preds(List(node)).toSet
+ val maximals_all_preds = maximals.map(node => node -> all_preds(node)).toMap
+
+ def best_time(node: Node): Time = {
+ val host = ordered_hosts.last
+ val threads = timing_data.best_threads(node, max_threads) min host.info.num_cpus
+ timing_data.estimate(node, host.name, threads)
+ }
+ val best_times = build_graph.keys.map(node => node -> best_time(node)).toMap
+
+ val succs_max_time_ms = build_graph.node_height(best_times(_).ms)
+ def max_time(node: Node): Time = Time.ms(succs_max_time_ms(node)) + best_times(node)
+ def max_time(task: Build_Process.Task): Time = max_time(task.name)
+
+ def path_times(minimals: List[Node]): Map[Node, Time] = {
+ def time_ms(node: Node): Long = best_times(node).ms
+ val path_times_ms = build_graph.reachable_length(time_ms, build_graph.imm_succs, minimals)
+ path_times_ms.view.mapValues(Time.ms).toMap
+ }
+
+ def path_max_times(minimals: List[Node]): Map[Node, Time] =
+ path_times(minimals).toList.map((node, time) => node -> (time + max_time(node))).toMap
+
+ def parallel_paths(running: List[(Node, Time)], pred: Node => Boolean = _ => true): Int = {
+ def start(node: Node): (Node, Time) = node -> best_times(node)
+
+ def pass_time(elapsed: Time)(node: Node, time: Time): (Node, Time) =
+ node -> (time - elapsed)
+
+ def parallel_paths(running: Map[Node, Time]): (Int, Map[Node, Time]) =
+ if (running.isEmpty) (0, running)
+ else {
+ def get_next(node: Node): List[Node] =
+ build_graph.imm_succs(node).filter(pred).filter(
+ build_graph.imm_preds(_).intersect(running.keySet) == Set(node)).toList
+
+ val (next, elapsed) = running.minBy(_._2.ms)
+ val (remaining, finished) =
+ running.toList.map(pass_time(elapsed)).partition(_._2 > Time.zero)
+
+ val running1 =
+ remaining.map(pass_time(elapsed)).toMap ++
+ finished.map(_._1).flatMap(get_next).map(start)
+ val (res, running2) = parallel_paths(running1)
+ (res max running.size, running2)
+ }
+
+ parallel_paths(running.toMap)._1
+ }
+ }
+
+
+ object Path_Time_Heuristic {
+ sealed trait Critical_Criterion
+ case class Absolute_Time(time: Time) extends Critical_Criterion {
+ override def toString: String = "absolute time (" + time.message_hms + ")"
+ }
+ case class Relative_Time(factor: Double) extends Critical_Criterion {
+ override def toString: String = "relative time (" + factor + ")"
+ }
+
+ sealed trait Parallel_Strategy
+ case class Fixed_Thread(threads: Int) extends Parallel_Strategy {
+ override def toString: String = "fixed threads (" + threads + ")"
+ }
+ case class Time_Based_Threads(f: Time => Int) extends Parallel_Strategy {
+ override def toString: String = "time based threads"
+ }
+
+ sealed trait Host_Criterion
+ case object Critical_Nodes extends Host_Criterion {
+ override def toString: String = "per critical node"
+ }
+ case class Fixed_Fraction(fraction: Double) extends Host_Criterion {
+ override def toString: String = "fixed fraction (" + fraction + ")"
+ }
+ case class Host_Speed(min_factor: Double) extends Host_Criterion {
+ override def toString: String = "host speed (" + min_factor + ")"
+ }
+ }
+
+ class Path_Time_Heuristic(
+ is_critical: Path_Time_Heuristic.Critical_Criterion,
+ parallel_threads: Path_Time_Heuristic.Parallel_Strategy,
+ host_criterion: Path_Time_Heuristic.Host_Criterion,
+ timing_data: Timing_Data,
+ sessions_structure: Sessions.Structure,
+ build_uuid: String,
+ max_threads_limit: Int = 8
+ ) extends Path_Heuristic(timing_data, sessions_structure, max_threads_limit, build_uuid) {
+ import Path_Time_Heuristic.*
+
+ override def toString: Node = {
+ val params =
+ List(
+ "critical: " + is_critical,
+ "parallel: " + parallel_threads,
+ "fast hosts: " + host_criterion)
+ "path time heuristic (" + params.mkString(", ") + ")"
+ }
+
+ def next(state: Build_Process.State): List[Config] = {
+ val resources = host_infos.available(state)
+
+ def best_threads(task: Build_Process.Task): Int =
+ timing_data.best_threads(task.name, max_threads)
+
+ val rev_ordered_hosts = ordered_hosts.reverse.map(_ -> max_threads)
+
+ val available_nodes =
+ host_infos.available(state.copy(running = Map.empty))
+ .unused_nodes(max_threads)
+ .sortBy(node => host_infos.the_host(node))(host_infos.host_speeds).reverse
+
+ def remaining_time(node: Node): (Node, Time) =
+ state.running.get(node) match {
+ case None => node -> best_time(node)
+ case Some(job) =>
+ val estimate =
+ timing_data.estimate(job.name, job.node_info.hostname,
+ host_infos.num_threads(job.node_info))
+ node -> ((Time.now() - job.start_date.time + estimate) max Time.zero)
+ }
+
+ val max_parallel = parallel_paths(state.ready.map(_.name).map(remaining_time))
+ val next_sorted = state.next_ready.sortBy(max_time(_).ms).reverse
+
+ if (max_parallel <= available_nodes.length) {
+ val all_tasks = next_sorted.map(task => (task, best_threads(task), best_threads(task)))
+ resources.try_allocate_tasks(rev_ordered_hosts, all_tasks)._1
+ }
+ else {
+ def is_critical(time: Time): Boolean =
+ this.is_critical match {
+ case Absolute_Time(threshold) => time > threshold
+ case Relative_Time(factor) => time > minimals.map(max_time).maxBy(_.ms).scale(factor)
+ }
+
+ val critical_minimals = state.ready.filter(task => is_critical(max_time(task))).map(_.name)
+ val critical_nodes =
+ path_max_times(critical_minimals).filter((_, time) => is_critical(time)).keySet
+
+ val (critical, other) = next_sorted.partition(task => critical_nodes.contains(task.name))
+
+ val critical_tasks = critical.map(task => (task, best_threads(task), best_threads(task)))
+
+ def parallel_threads(task: Build_Process.Task): Int =
+ this.parallel_threads match {
+ case Fixed_Thread(threads) => threads
+ case Time_Based_Threads(f) => f(best_time(task.name))
+ }
+
+ val other_tasks = other.map(task => (task, parallel_threads(task), best_threads(task)))
+
+ val max_critical_parallel =
+ parallel_paths(critical_minimals.map(remaining_time), critical_nodes.contains)
+ val max_critical_hosts =
+ available_nodes.take(max_critical_parallel).map(_.hostname).distinct.length
+
+ val split =
+ this.host_criterion match {
+ case Critical_Nodes => max_critical_hosts
+ case Fixed_Fraction(fraction) =>
+ ((rev_ordered_hosts.length * fraction).ceil.toInt max 1) min max_critical_hosts
+ case Host_Speed(min_factor) =>
+ val best = rev_ordered_hosts.head._1.info.benchmark_score.get
+ val num_fast =
+ rev_ordered_hosts.count(_._1.info.benchmark_score.exists(_ >= best * min_factor))
+ num_fast min max_critical_hosts
+ }
+
+ val (critical_hosts, other_hosts) = rev_ordered_hosts.splitAt(split)
+
+ val (configs1, resources1) = resources.try_allocate_tasks(critical_hosts, critical_tasks)
+ val (configs2, _) = resources1.try_allocate_tasks(other_hosts, other_tasks)
+
+ configs1 ::: configs2
+ }
+ }
+ }
+
+
+ /* master and slave processes for scheduled build */
+
+ class Scheduled_Build_Process(
+ build_context: Build.Context,
+ build_progress: Progress,
+ server: SSH.Server,
+ ) extends Build_Process(build_context, build_progress, server) {
+ /* global resources with common close() operation */
+
+ protected final lazy val _build_database: Option[SQL.Database] =
+ try {
+ for (db <- store.maybe_open_build_database(server = server)) yield {
+ if (build_context.master) {
+ Build_Schedule.private_data.transaction_lock(
+ db,
+ create = true,
+ label = "Build_Schedule.build_database"
+ ) { Build_Schedule.private_data.clean_build_schedules(db) }
+ db.vacuum(Build_Schedule.private_data.tables.list)
+ }
+ db
+ }
+ }
+ catch { case exn: Throwable => close(); throw exn }
+
+ override def close(): Unit = {
+ Option(_build_database).flatten.foreach(_.close())
+ super.close()
+ }
+
+
+ /* global state: internal var vs. external database */
+
+ protected var _schedule = Schedule.init(build_uuid)
+
+ override protected def synchronized_database[A](label: String)(body: => A): A =
+ super.synchronized_database(label) {
+ _build_database match {
+ case None => body
+ case Some(db) =>
+ Build_Schedule.private_data.transaction_lock(db, label = label) {
+ val old_schedule = Build_Schedule.private_data.pull_schedule(db, _schedule)
+ _schedule = old_schedule
+ val res = body
+ _schedule = Build_Schedule.private_data.update_schedule(db, _schedule, old_schedule)
+ res
+ }
+ }
+ }
+
+
+ /* build process */
+
+ override def next_node_info(state: Build_Process.State, session_name: String): Node_Info =
+ _schedule.graph.get_node(session_name).node_info
+
+ override def next_jobs(state: Build_Process.State): List[String] =
+ if (progress.stopped || _schedule.is_empty) Nil else _schedule.next(hostname, state)
+ }
+
+ abstract class Scheduler_Build_Process(
+ build_context: Build.Context,
+ build_progress: Progress,
+ server: SSH.Server,
+ ) extends Scheduled_Build_Process(build_context, build_progress, server) {
+ require(build_context.master)
+
+ protected val start_date = Date.now()
+
+ def init_scheduler(timing_data: Timing_Data): Scheduler
+
+
+ /* global resources with common close() operation */
+
+ private final lazy val _log_store: Build_Log.Store = Build_Log.store(build_options)
+ private final lazy val _log_database: SQL.Database =
+ try {
+ val db = _log_store.open_database(server = this.server)
+ _log_store.init_database(db)
+ db
+ }
+ catch { case exn: Throwable => close(); throw exn }
+
+ override def close(): Unit = {
+ Option(_log_database).foreach(_.close())
+ super.close()
+ }
+
+
+ /* previous results via build log */
+
+ override def open_build_cluster(): Build_Cluster = {
+ val build_cluster = super.open_build_cluster()
+ build_cluster.init()
+
+ Benchmark.benchmark_requirements(build_options)
+
+ if (build_context.max_jobs > 0) {
+ val benchmark_options = build_options.string("build_hostname") = hostname
+ Benchmark.benchmark(benchmark_options, progress)
+ }
+ build_cluster.benchmark()
+
+ for (db <- _build_database)
+ Build_Process.private_data.transaction_lock(db, label = "Scheduler_Build_Process.init") {
+ Build_Process.private_data.clean_build(db)
+ }
+
+ build_cluster
+ }
+
+ private val timing_data: Timing_Data = {
+ val cluster_hosts: List[Build_Cluster.Host] =
+ if (build_context.max_jobs == 0) build_context.build_hosts
+ else {
+ val local_build_host =
+ Build_Cluster.Host(
+ hostname, jobs = build_context.max_jobs, numa = build_context.numa_shuffling)
+ local_build_host :: build_context.build_hosts
+ }
+
+ val host_infos = Host_Infos.load(cluster_hosts, _host_database)
+ Timing_Data.load(host_infos, _log_database)
+ }
+ private val scheduler = init_scheduler(timing_data)
+
+ def write_build_log(results: Build.Results, state: Build_Process.State.Results): Unit = {
+ val sessions =
+ for {
+ (session_name, result) <- state.toList
+ if !result.current
+ } yield {
+ val info = build_context.sessions_structure(session_name)
+ val entry =
+ if (!results.cancelled(session_name)) {
+ val status =
+ if (result.ok) Build_Log.Session_Status.finished
+ else Build_Log.Session_Status.failed
+
+ Build_Log.Session_Entry(
+ chapter = info.chapter,
+ groups = info.groups,
+ hostname = Some(result.node_info.hostname),
+ threads = Some(timing_data.host_infos.num_threads(result.node_info)),
+ timing = result.process_result.timing,
+ sources = Some(result.output_shasum.digest.toString),
+ status = Some(status))
+ }
+ else
+ Build_Log.Session_Entry(
+ chapter = info.chapter,
+ groups = info.groups,
+ status = Some(Build_Log.Session_Status.cancelled))
+ session_name -> entry
+ }
+
+ val settings =
+ Build_Log.Settings.all_settings.map(_.name).map(name =>
+ name -> Isabelle_System.getenv(name))
+ val props =
+ List(
+ Build_Log.Prop.build_id.name -> build_context.build_uuid,
+ Build_Log.Prop.build_engine.name -> build_context.engine.name,
+ Build_Log.Prop.build_host.name -> hostname,
+ Build_Log.Prop.build_start.name -> Build_Log.print_date(start_date))
+
+ val meta_info = Build_Log.Meta_Info(props, settings)
+ val build_info = Build_Log.Build_Info(sessions.toMap)
+ val log_name = Build_Log.log_filename(engine = build_context.engine.name, date = start_date)
+
+ Build_Log.private_data.update_sessions(
+ _log_database, _log_store.cache.compress, log_name.file_name, build_info)
+ Build_Log.private_data.update_meta_info(_log_database, log_name.file_name, meta_info)
+ }
+
+
+ /* build process */
+
+ def is_current(state: Build_Process.State, session_name: String): Boolean =
+ state.ancestor_results(session_name) match {
+ case Some(ancestor_results) if ancestor_results.forall(_.current) =>
+ val sources_shasum = state.sessions(session_name).sources_shasum
+
+ val input_shasum =
+ if (ancestor_results.isEmpty) ML_Process.bootstrap_shasum()
+ else SHA1.flat_shasum(ancestor_results.map(_.output_shasum))
+
+ val store_heap =
+ build_context.build_heap || Sessions.is_pure(session_name) ||
+ state.sessions.iterator.exists(_.ancestors.contains(session_name))
+
+ store.check_output(
+ _database_server, session_name,
+ session_options = build_context.sessions_structure(session_name).options,
+ sources_shasum = sources_shasum,
+ input_shasum = input_shasum,
+ fresh_build = build_context.fresh_build,
+ store_heap = store_heap)._1
+ case _ => false
+ }
+
+ override def next_jobs(state: Build_Process.State): List[String] =
+ if (progress.stopped) state.next_ready.map(_.name)
+ else if (!_schedule.is_outdated(build_options, state)) _schedule.next(hostname, state)
+ else {
+ val current = state.next_ready.filter(task => is_current(state, task.name))
+ if (current.nonEmpty) current.map(_.name)
+ else {
+ val start = Time.now()
+
+ val new_schedule = scheduler.build_schedule(state).update(state)
+ val schedule =
+ if (_schedule.is_empty) new_schedule
+ else List(_schedule.update(state), new_schedule).minBy(_.end)(Date.Ordering)
+
+ val elapsed = Time.now() - start
+
+ val timing_msg = if (elapsed.is_relevant) " (took " + elapsed.message + ")" else ""
+ progress.echo_if(_schedule.deviation(schedule).minutes > 1, schedule.message + timing_msg)
+
+ _schedule = schedule
+ _schedule.next(hostname, state)
+ }
+ }
+
+ override def run(): Build.Results = {
+ val results = super.run()
+ write_build_log(results, snapshot().results)
+ results
+ }
+ }
+
+
+ /** SQL data model of build schedule, extending isabelle_build database */
+
+ object private_data extends SQL.Data("isabelle_build") {
+ import Build_Process.private_data.{Base, Generic}
+
+
+ /* schedule */
+
+ object Schedules {
+ val build_uuid = Generic.build_uuid.make_primary_key
+ val generator = SQL.Column.string("generator")
+ val start = SQL.Column.date("start")
+ val serial = SQL.Column.long("serial")
+
+ val table = make_table(List(build_uuid, generator, start, serial), name = "schedules")
+ }
+
+ def read_serial(db: SQL.Database, build_uuid: String = ""): Long =
+ db.execute_query_statementO[Long](
+ Schedules.table.select(List(Schedules.serial.max), sql =
+ SQL.where(if_proper(build_uuid, Schedules.build_uuid.equal(build_uuid)))),
+ _.long(Schedules.serial)).getOrElse(0L)
+
+ def read_scheduled_builds_domain(db: SQL.Database): List[String] =
+ db.execute_query_statement(
+ Schedules.table.select(List(Schedules.build_uuid)),
+ List.from[String], res => res.string(Schedules.build_uuid))
+
+ def read_schedules(db: SQL.Database, build_uuid: String = ""): List[Schedule] = {
+ val schedules =
+ db.execute_query_statement(Schedules.table.select(sql =
+ SQL.where(if_proper(build_uuid, Schedules.build_uuid.equal(build_uuid)))),
+ List.from[Schedule],
+ { res =>
+ val build_uuid = res.string(Schedules.build_uuid)
+ val generator = res.string(Schedules.generator)
+ val start = res.date(Schedules.start)
+ val serial = res.long(Schedules.serial)
+ Schedule(build_uuid, generator, start, Graph.empty, serial)
+ })
+
+ for (schedule <- schedules.sortBy(_.start)(Date.Ordering)) yield {
+ val nodes = private_data.read_nodes(db, build_uuid = schedule.build_uuid)
+ schedule.copy(graph = Graph.make(nodes))
+ }
+ }
+
+ def write_schedule(db: SQL.Database, schedule: Schedule): Unit = {
+ db.execute_statement(
+ Schedules.table.delete(Schedules.build_uuid.where_equal(schedule.build_uuid)))
+ db.execute_statement(Schedules.table.insert(), { stmt =>
+ stmt.string(1) = schedule.build_uuid
+ stmt.string(2) = schedule.generator
+ stmt.date(3) = schedule.start
+ stmt.long(4) = schedule.serial
+ })
+ update_nodes(db, schedule.build_uuid, schedule.graph.dest)
+ }
+
+
+ /* nodes */
+
+ object Nodes {
+ val build_uuid = Generic.build_uuid.make_primary_key
+ val name = Generic.name.make_primary_key
+ val succs = SQL.Column.string("succs")
+ val hostname = SQL.Column.string("hostname")
+ val numa_node = SQL.Column.int("numa_node")
+ val rel_cpus = SQL.Column.string("rel_cpus")
+ val start = SQL.Column.date("start")
+ val duration = SQL.Column.long("duration")
+
+ val table =
+ make_table(
+ List(build_uuid, name, succs, hostname, numa_node, rel_cpus, start, duration),
+ name = "schedule_nodes")
+ }
+
+ type Nodes = List[((String, Schedule.Node), List[String])]
+
+ def read_nodes(db: SQL.Database, build_uuid: String = ""): Nodes = {
+ db.execute_query_statement(
+ Nodes.table.select(sql =
+ SQL.where(if_proper(build_uuid, Nodes.build_uuid.equal(build_uuid)))),
+ List.from[((String, Schedule.Node), List[String])],
+ { res =>
+ val name = res.string(Nodes.name)
+ val succs = split_lines(res.string(Nodes.succs))
+ val hostname = res.string(Nodes.hostname)
+ val numa_node = res.get_int(Nodes.numa_node)
+ val rel_cpus = res.string(Nodes.rel_cpus)
+ val start = res.date(Nodes.start)
+ val duration = Time.ms(res.long(Nodes.duration))
+
+ val node_info = Node_Info(hostname, numa_node, isabelle.Host.Range.from(rel_cpus))
+ ((name, Schedule.Node(name, node_info, start, duration)), succs)
+ }
+ )
+ }
+
+ def update_nodes(db: SQL.Database, build_uuid: String, nodes: Nodes): Unit = {
+ db.execute_statement(Nodes.table.delete(Nodes.build_uuid.where_equal(build_uuid)))
+ db.execute_batch_statement(Nodes.table.insert(), batch =
+ for (((name, node), succs) <- nodes) yield { (stmt: SQL.Statement) =>
+ stmt.string(1) = build_uuid
+ stmt.string(2) = name
+ stmt.string(3) = cat_lines(succs)
+ stmt.string(4) = node.node_info.hostname
+ stmt.int(5) = node.node_info.numa_node
+ stmt.string(6) = isabelle.Host.Range(node.node_info.rel_cpus)
+ stmt.date(7) = node.start
+ stmt.long(8) = node.duration.ms
+ })
+ }
+
+ def pull_schedule(db: SQL.Database, old_schedule: Schedule): Build_Schedule.Schedule = {
+ val serial_db = read_serial(db)
+ if (serial_db == old_schedule.serial) old_schedule
+ else {
+ read_schedules(db, old_schedule.build_uuid) match {
+ case Nil => old_schedule
+ case schedules => Library.the_single(schedules)
+ }
+ }
+ }
+
+ def update_schedule(db: SQL.Database, schedule: Schedule, old_schedule: Schedule): Schedule = {
+ val changed =
+ schedule.generator != old_schedule.generator ||
+ schedule.start != old_schedule.start ||
+ schedule.graph != old_schedule.graph
+
+ val schedule1 =
+ if (changed) schedule.copy(serial = old_schedule.serial).inc_serial else schedule
+ if (schedule1.serial != schedule.serial) write_schedule(db, schedule1)
+
+ schedule1
+ }
+
+ def remove_schedules(db: SQL.Database, remove: List[String]): Unit =
+ if (remove.nonEmpty) {
+ val sql = Generic.build_uuid.where_member(remove)
+ db.execute_statement(SQL.MULTI(tables.map(_.delete(sql = sql))))
+ }
+
+ def clean_build_schedules(db: SQL.Database): Unit = {
+ val running_builds_domain =
+ db.execute_query_statement(
+ Base.table.select(List(Base.build_uuid), sql = SQL.where(Base.stop.undefined)),
+ List.from[String], res => res.string(Base.build_uuid))
+
+ val (remove, _) =
+ Library.symmetric_difference(read_scheduled_builds_domain(db), running_builds_domain)
+
+ remove_schedules(db, remove)
+ }
+
+ override val tables = SQL.Tables(Schedules.table, Nodes.table)
+ }
+
+
+ class Engine extends Build.Engine("build_schedule") {
+
+ def scheduler(timing_data: Timing_Data, context: Build.Context): Scheduler = {
+ val sessions_structure = context.sessions_structure
+
+ val is_criticals =
+ List(
+ Path_Time_Heuristic.Absolute_Time(Time.minutes(5)),
+ Path_Time_Heuristic.Absolute_Time(Time.minutes(10)),
+ Path_Time_Heuristic.Absolute_Time(Time.minutes(20)),
+ Path_Time_Heuristic.Relative_Time(0.5))
+ val parallel_threads =
+ List(
+ Path_Time_Heuristic.Fixed_Thread(1),
+ Path_Time_Heuristic.Time_Based_Threads({
+ case time if time < Time.minutes(1) => 1
+ case time if time < Time.minutes(5) => 4
+ case _ => 8
+ }))
+ val machine_splits =
+ List(
+ Path_Time_Heuristic.Critical_Nodes,
+ Path_Time_Heuristic.Fixed_Fraction(0.3),
+ Path_Time_Heuristic.Host_Speed(0.9))
+
+ val path_time_heuristics =
+ for {
+ is_critical <- is_criticals
+ parallel <- parallel_threads
+ machine_split <- machine_splits
+ } yield
+ Path_Time_Heuristic(is_critical, parallel, machine_split, timing_data, sessions_structure,
+ context.build_uuid)
+ val default_heuristic =
+ Default_Heuristic(timing_data, context.build_options, context.build_uuid)
+ new Meta_Heuristic(default_heuristic :: path_time_heuristics)
+ }
+
+ override def open_build_process(
+ context: Build.Context,
+ progress: Progress,
+ server: SSH.Server
+ ): Build_Process =
+ if (!context.master) new Scheduled_Build_Process(context, progress, server)
+ else new Scheduler_Build_Process(context, progress, server) {
+ def init_scheduler(timing_data: Timing_Data): Scheduler = scheduler(timing_data, context)
+ }
+ }
+
+
+ /* build schedule */
+
+ def build_schedule(
+ options: Options,
+ build_hosts: List[Build_Cluster.Host] = Nil,
+ selection: Sessions.Selection = Sessions.Selection.empty,
+ progress: Progress = new Progress,
+ afp_root: Option[Path] = None,
+ dirs: List[Path] = Nil,
+ select_dirs: List[Path] = Nil,
+ infos: List[Sessions.Info] = Nil,
+ numa_shuffling: Boolean = false,
+ augment_options: String => List[Options.Spec] = _ => Nil,
+ session_setup: (String, Session) => Unit = (_, _) => (),
+ cache: Term.Cache = Term.Cache.make()
+ ): Schedule = {
+ val build_engine = new Engine
+
+ val store = build_engine.build_store(options, build_hosts = build_hosts, cache = cache)
+ val log_store = Build_Log.store(options, cache = cache)
+ val build_options = store.options
+
+ def build_schedule(
+ server: SSH.Server,
+ database_server: Option[SQL.Database],
+ log_database: PostgreSQL.Database,
+ host_database: SQL.Database
+ ): Schedule = {
+ val full_sessions =
+ Sessions.load_structure(build_options, dirs = AFP.make_dirs(afp_root) ::: dirs,
+ select_dirs = select_dirs, infos = infos, augment_options = augment_options)
+ val full_sessions_selection = full_sessions.imports_selection(selection)
+
+ val build_deps =
+ Sessions.deps(full_sessions.selection(selection), progress = progress,
+ inlined_files = true).check_errors
+
+ val build_context =
+ Build.Context(store, build_engine, build_deps, afp_root = afp_root,
+ build_hosts = build_hosts, hostname = Build.hostname(build_options),
+ numa_shuffling = numa_shuffling, max_jobs = 0, session_setup = session_setup,
+ master = true)
+
+ val cluster_hosts = build_context.build_hosts
+
+ val hosts_current =
+ cluster_hosts.forall(host => isabelle.Host.read_info(host_database, host.name).isDefined)
+ if (!hosts_current) {
+ val build_cluster = Build_Cluster.make(build_context, progress = progress)
+ build_cluster.open()
+ build_cluster.init()
+ build_cluster.benchmark()
+ build_cluster.close()
+ }
+
+ val host_infos = Host_Infos.load(cluster_hosts, host_database)
+ val timing_data = Timing_Data.load(host_infos, log_database)
+
+ val sessions = Build_Process.Sessions.empty.init(build_context, database_server, progress)
+ def task(session: Build_Job.Session_Context): Build_Process.Task =
+ Build_Process.Task(session.name, session.deps, JSON.Object.empty, build_context.build_uuid)
+
+ val build_state =
+ Build_Process.State(sessions = sessions, pending = sessions.iterator.map(task).toList)
+
+ val scheduler = build_engine.scheduler(timing_data, build_context)
+ def schedule_msg(res: Exn.Result[Schedule]): String =
+ res match { case Exn.Res(schedule) => schedule.message case _ => "" }
+
+ Timing.timeit(scheduler.build_schedule(build_state), schedule_msg, output = progress.echo(_))
+ }
+
+ using(store.open_server()) { server =>
+ using_optional(store.maybe_open_database_server(server = server)) { database_server =>
+ using(log_store.open_database(server = server)) { log_database =>
+ using(store.open_build_database(
+ path = isabelle.Host.private_data.database, server = server)) { host_database =>
+ build_schedule(server, database_server, log_database, host_database)
+ }
+ }
+ }
+ }
+ }
+
+ def write_schedule_graphic(schedule: Schedule, output: Path): Unit = {
+ import java.awt.geom.{GeneralPath, Rectangle2D}
+ import java.awt.{BasicStroke, Color, Graphics2D}
+
+ val line_height = isabelle.graphview.Metrics.default.height
+ val char_width = isabelle.graphview.Metrics.default.char_width
+ val padding = isabelle.graphview.Metrics.default.space_width
+ val gap = isabelle.graphview.Metrics.default.gap
+
+ val graph = schedule.graph
+
+ def text_width(text: String): Double = text.length * char_width
+
+ val generator_height = line_height + padding
+ val hostname_height = generator_height + line_height + padding
+ def time_height(time: Time): Double = time.seconds
+ def date_height(date: Date): Double = time_height(date.time - schedule.start.time)
+
+ val hosts = graph.iterator.map(_._2._1).toList.groupBy(_.node_info.hostname)
+
+ def node_width(node: Schedule.Node): Double = 2 * padding + text_width(node.job_name)
+
+ case class Range(start: Double, stop: Double) {
+ def proper: List[Range] = if (start < stop) List(this) else Nil
+ def width: Double = stop - start
+ }
+
+ val rel_node_ranges =
+ hosts.toList.flatMap { (hostname, nodes) =>
+ val sorted = nodes.sortBy(node => (node.start.time.ms, node.end.time.ms, node.job_name))
+ sorted.foldLeft((List.empty[Schedule.Node], Map.empty[Schedule.Node, Range])) {
+ case ((nodes, allocated), node) =>
+ val width = node_width(node) + padding
+ val parallel = nodes.filter(_.end.time > node.start.time)
+ val (last, slots) =
+ parallel.sortBy(allocated(_).start).foldLeft((0D, List.empty[Range])) {
+ case ((start, ranges), node1) =>
+ val node_range = allocated(node1)
+ (node_range.stop, ranges ::: Range(start, node_range.start).proper)
+ }
+ val start =
+ (Range(last, Double.MaxValue) :: slots.filter(_.width >= width)).minBy(_.width).start
+ (node :: parallel, allocated + (node -> Range(start, start + width)))
+ }._2
+ }.toMap
+
+ def host_width(hostname: String) =
+ 2 * padding + (hosts(hostname).map(rel_node_ranges(_).stop).max max text_width(hostname))
+
+ def graph_height(graph: Graph[String, Schedule.Node]): Double =
+ date_height(graph.maximals.map(graph.get_node(_).end).maxBy(_.unix_epoch))
+
+ val height = (hostname_height + 2 * padding + graph_height(graph)).ceil.toInt
+ val (last, host_starts) =
+ hosts.keys.foldLeft((0D, Map.empty[String, Double])) {
+ case ((previous, starts), hostname) =>
+ (previous + gap + host_width(hostname), starts + (hostname -> previous))
+ }
+ val width = (last - gap).ceil.toInt
+
+ def node_start(node: Schedule.Node): Double =
+ host_starts(node.node_info.hostname) + padding + rel_node_ranges(node).start
+
+ def paint(gfx: Graphics2D): Unit = {
+ gfx.setColor(Color.LIGHT_GRAY)
+ gfx.fillRect(0, 0, width, height)
+ gfx.setRenderingHints(isabelle.graphview.Metrics.rendering_hints)
+ gfx.setFont(isabelle.graphview.Metrics.default.font)
+ gfx.setStroke(new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND))
+
+ draw_string(schedule.generator + ", build time: " + schedule.duration.message_hms, padding, 0)
+
+ def draw_host(x: Double, hostname: String): Double = {
+ val nodes = hosts(hostname).map(_.job_name).toSet
+ val width = host_width(hostname)
+ val height = 2 * padding + graph_height(graph.restrict(nodes.contains))
+ val padding1 = ((width - text_width(hostname)) / 2) max 0
+ val rect = new Rectangle2D.Double(x, hostname_height, width, height)
+ gfx.setColor(Color.BLACK)
+ gfx.draw(rect)
+ gfx.setColor(Color.GRAY)
+ gfx.fill(rect)
+ draw_string(hostname, x + padding1, generator_height)
+ x + gap + width
+ }
+
+ def draw_string(str: String, x: Double, y: Double): Unit = {
+ gfx.setColor(Color.BLACK)
+ gfx.drawString(str, x.toInt, (y + line_height).toInt)
+ }
+
+ def node_rect(node: Schedule.Node): Rectangle2D.Double = {
+ val x = node_start(node)
+ val y = hostname_height + padding + date_height(node.start)
+ val width = node_width(node)
+ val height = time_height(node.duration)
+ new Rectangle2D.Double(x, y, width, height)
+ }
+
+ def draw_node(node: Schedule.Node): Rectangle2D.Double = {
+ val rect = node_rect(node)
+ gfx.setColor(Color.BLACK)
+ gfx.draw(rect)
+ gfx.setColor(Color.WHITE)
+ gfx.fill(rect)
+
+ def add_text(y: Double, text: String): Double =
+ if (line_height > rect.height - y || text_width(text) + 2 * padding > rect.width) y
+ else {
+ val padding1 = padding min ((rect.height - (y + line_height)) / 2)
+ draw_string(text, rect.x + padding, rect.y + y + padding1)
+ y + padding1 + line_height
+ }
+
+ val node_info = node.node_info
+
+ val duration_str = "(" + node.duration.message_hms + ")"
+ val node_str =
+ "on " + proper_string(node_info.toString.stripPrefix(node_info.hostname)).getOrElse("all")
+ val start_str = "Start: " + (node.start.time - schedule.start.time).message_hms
+
+ List(node.job_name, duration_str, node_str, start_str).foldLeft(0D)(add_text)
+
+ rect
+ }
+
+ def draw_arrow(from: Schedule.Node, to: Rectangle2D.Double, curve: Double = 10): Unit = {
+ val from_rect = node_rect(from)
+
+ val path = new GeneralPath()
+ path.moveTo(from_rect.getCenterX, from_rect.getMaxY)
+ path.lineTo(to.getCenterX, to.getMinY)
+
+ gfx.setColor(Color.BLUE)
+ gfx.draw(path)
+ }
+
+ hosts.keys.foldLeft(0D)(draw_host)
+
+ graph.topological_order.foreach { job_name =>
+ val node = graph.get_node(job_name)
+ val rect = draw_node(node)
+
+ for {
+ pred <- graph.imm_preds(job_name).iterator
+ pred_node = graph.get_node(pred)
+ if node.node_info.hostname != pred_node.node_info.hostname
+ } draw_arrow(pred_node, rect)
+ }
+ }
+
+ val name = output.file_name
+ if (File.is_png(name)) Graphics_File.write_png(output.file, paint, width, height)
+ else if (File.is_pdf(name)) Graphics_File.write_pdf(output.file, paint, width, height)
+ else error("Bad type of file: " + quote(name) + " (.png or .pdf expected)")
+ }
+
+
+ /* command-line wrapper */
+
+ val isabelle_tool = Isabelle_Tool("build_schedule", "generate build schedule", Scala_Project.here,
+ { args =>
+ var afp_root: Option[Path] = None
+ val base_sessions = new mutable.ListBuffer[String]
+ val select_dirs = new mutable.ListBuffer[Path]
+ val build_hosts = new mutable.ListBuffer[Build_Cluster.Host]
+ var numa_shuffling = false
+ var output_file: Option[Path] = None
+ var requirements = false
+ val exclude_session_groups = new mutable.ListBuffer[String]
+ var all_sessions = false
+ val dirs = new mutable.ListBuffer[Path]
+ val session_groups = new mutable.ListBuffer[String]
+ var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS)
+ var verbose = false
+ val exclude_sessions = new mutable.ListBuffer[String]
+
+ val getopts = Getopts("""
+Usage: isabelle build_schedule [OPTIONS] [SESSIONS ...]
+
+ Options are:
+ -A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """)
+ -B NAME include session NAME and all descendants
+ -D DIR include session directory and select its sessions
+ -H HOSTS additional build cluster host specifications, of the form
+ "NAMES:PARAMETERS" (separated by commas)
+ -N cyclic shuffling of NUMA CPU nodes (performance tuning)
+ -O FILE output file
+ -R refer to requirements of selected sessions
+ -X NAME exclude sessions from group NAME and all descendants
+ -a select all sessions
+ -d DIR include session directory
+ -g NAME select session group NAME
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -v verbose
+ -x NAME exclude session NAME and all descendants
+
+ Generate build graph.
+""",
+ "A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))),
+ "B:" -> (arg => base_sessions += arg),
+ "D:" -> (arg => select_dirs += Path.explode(arg)),
+ "H:" -> (arg => build_hosts ++= Build_Cluster.Host.parse(Registry.global, arg)),
+ "N" -> (_ => numa_shuffling = true),
+ "O:" -> (arg => output_file = Some(Path.explode(arg))),
+ "R" -> (_ => requirements = true),
+ "X:" -> (arg => exclude_session_groups += arg),
+ "a" -> (_ => all_sessions = true),
+ "d:" -> (arg => dirs += Path.explode(arg)),
+ "g:" -> (arg => session_groups += arg),
+ "o:" -> (arg => options = options + arg),
+ "v" -> (_ => verbose = true),
+ "x:" -> (arg => exclude_sessions += arg))
+
+ val sessions = getopts(args)
+
+ val progress = new Console_Progress(verbose = verbose)
+
+ val schedule =
+ build_schedule(options,
+ selection = Sessions.Selection(
+ requirements = requirements,
+ all_sessions = all_sessions,
+ base_sessions = base_sessions.toList,
+ exclude_session_groups = exclude_session_groups.toList,
+ exclude_sessions = exclude_sessions.toList,
+ session_groups = session_groups.toList,
+ sessions = sessions),
+ progress = progress,
+ afp_root = afp_root,
+ dirs = dirs.toList,
+ select_dirs = select_dirs.toList,
+ numa_shuffling = isabelle.Host.numa_check(progress, numa_shuffling),
+ build_hosts = build_hosts.toList)
+
+ if (!schedule.is_empty && output_file.nonEmpty)
+ write_schedule_graphic(schedule, output_file.get)
+ })
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/export.ML Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,73 @@
+(* Title: Pure/Build/export.ML
+ Author: Makarius
+
+Manage per-session theory exports: compressed blobs.
+*)
+
+signature EXPORT =
+sig
+ val report_export: theory -> Path.binding -> unit
+ type params =
+ {theory: theory, binding: Path.binding, executable: bool, compress: bool, strict: bool}
+ val export_params: params -> XML.body -> unit
+ val export: theory -> Path.binding -> XML.body -> unit
+ val export_executable: theory -> Path.binding -> XML.body -> unit
+ val export_file: theory -> Path.binding -> Path.T -> unit
+ val export_executable_file: theory -> Path.binding -> Path.T -> unit
+ val markup: theory -> Path.T -> Markup.T
+ val message: theory -> Path.T -> string
+end;
+
+structure Export: EXPORT =
+struct
+
+(* export *)
+
+fun report_export thy binding =
+ let
+ val theory_name = Context.theory_long_name thy;
+ val (path, pos) = Path.dest_binding binding;
+ val markup = Markup.export_path (Path.implode (Path.basic theory_name + path));
+ in Context_Position.report_generic (Context.Theory thy) pos markup end;
+
+type params =
+ {theory: theory, binding: Path.binding, executable: bool, compress: bool, strict: bool};
+
+fun export_params ({theory = thy, binding, executable, compress, strict}: params) body =
+ (report_export thy binding;
+ (Output.try_protocol_message o Markup.export)
+ {id = Position.id_of (Position.thread_data ()),
+ serial = serial (),
+ theory_name = Context.theory_long_name thy,
+ name = Path.implode_binding (tap Path.proper_binding binding),
+ executable = executable,
+ compress = compress,
+ strict = strict} [body]);
+
+fun export thy binding body =
+ export_params
+ {theory = thy, binding = binding, executable = false, compress = true, strict = true} body;
+
+fun export_executable thy binding body =
+ export_params
+ {theory = thy, binding = binding, executable = true, compress = true, strict = true} body;
+
+fun export_file thy binding file =
+ export thy binding (Bytes.contents_blob (Bytes.read file));
+
+fun export_executable_file thy binding file =
+ export_executable thy binding (Bytes.contents_blob (Bytes.read file));
+
+
+(* information message *)
+
+fun markup thy path =
+ let
+ val thy_path = Path.basic (Context.theory_long_name thy) + path;
+ val name = (Markup.nameN, Path.implode thy_path);
+ in Active.make_markup Markup.theory_exportsN {implicit = false, properties = [name]} end;
+
+fun message thy path =
+ "See " ^ Markup.markup (markup thy path) "theory exports";
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/export.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,681 @@
+/* Title: Pure/Build/export.scala
+ Author: Makarius
+
+Manage per-session theory exports: compressed blobs.
+*/
+
+package isabelle
+
+
+import scala.annotation.tailrec
+import scala.util.matching.Regex
+import scala.collection.mutable
+
+
+object Export {
+ /* artefact names */
+
+ val DOCUMENT_ID: String = "PIDE/document_id"
+ val FILES: String = "PIDE/files"
+ val MARKUP: String = "PIDE/markup"
+ val MESSAGES: String = "PIDE/messages"
+ val DOCUMENT_PREFIX: String = "document/"
+ val DOCUMENT_LATEX: String = DOCUMENT_PREFIX + "latex"
+ val THEORY_PREFIX: String = "theory/"
+ val PROOFS_PREFIX: String = "proofs/"
+
+ def explode_name(s: String): List[String] = space_explode('/', s)
+ def implode_name(elems: Iterable[String]): String = elems.mkString("/")
+
+
+ /* SQL data model */
+
+ object private_data extends SQL.Data() {
+ override lazy val tables = SQL.Tables(Base.table)
+
+ object Base {
+ val session_name = SQL.Column.string("session_name").make_primary_key
+ val theory_name = SQL.Column.string("theory_name").make_primary_key
+ val name = SQL.Column.string("name").make_primary_key
+ val executable = SQL.Column.bool("executable")
+ val compressed = SQL.Column.bool("compressed")
+ val body = SQL.Column.bytes("body")
+
+ val table =
+ SQL.Table("isabelle_exports",
+ List(session_name, theory_name, name, executable, compressed, body))
+ }
+
+ def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source =
+ SQL.where_and(
+ Base.session_name.equal(session_name),
+ if_proper(theory_name, Base.theory_name.equal(theory_name)),
+ if_proper(name, Base.name.equal(name)))
+
+ def clean_session(db: SQL.Database, session_name: String): Unit =
+ db.execute_statement(Base.table.delete(sql = where_equal(session_name)))
+
+ def known_entries(db: SQL.Database, entry_names: Iterable[Entry_Name]): Set[Entry_Name] = {
+ val it = entry_names.iterator
+ if (it.isEmpty) Set.empty[Entry_Name]
+ else {
+ val sql_preds =
+ List.from(
+ for (entry_name <- it) yield {
+ SQL.and(
+ Base.session_name.equal(entry_name.session),
+ Base.theory_name.equal(entry_name.theory),
+ Base.name.equal(entry_name.name)
+ )
+ })
+ db.execute_query_statement(
+ Base.table.select(List(Base.session_name, Base.theory_name, Base.name),
+ sql = SQL.where(SQL.OR(sql_preds))),
+ Set.from[Entry_Name],
+ { res =>
+ val session_name = res.string(Base.session_name)
+ val theory_name = res.string(Base.theory_name)
+ val name = res.string(Base.name)
+ Entry_Name(session_name, theory_name, name)
+ })
+ }
+ }
+
+ def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] =
+ db.execute_query_statementO[Entry](
+ Base.table.select(List(Base.executable, Base.compressed, Base.body),
+ sql = private_data.where_equal(entry_name.session, entry_name.theory, entry_name.name)),
+ { res =>
+ val executable = res.bool(Base.executable)
+ val compressed = res.bool(Base.compressed)
+ val bytes = res.bytes(Base.body)
+ val body = Future.value(compressed, bytes)
+ Entry(entry_name, executable, body, cache)
+ }
+ )
+
+ def write_entries(db: SQL.Database, entries: List[Entry]): Unit =
+ db.execute_batch_statement(Base.table.insert(), batch =
+ for (entry <- entries) yield { (stmt: SQL.Statement) =>
+ val (compressed, bs) = entry.body.join
+ stmt.string(1) = entry.session_name
+ stmt.string(2) = entry.theory_name
+ stmt.string(3) = entry.name
+ stmt.bool(4) = entry.executable
+ stmt.bool(5) = compressed
+ stmt.bytes(6) = bs
+ })
+
+ def read_theory_names(db: SQL.Database, session_name: String): List[String] =
+ db.execute_query_statement(
+ Base.table.select(List(Base.theory_name), distinct = true,
+ sql = private_data.where_equal(session_name) + SQL.order_by(List(Base.theory_name))),
+ List.from[String], res => res.string(Base.theory_name))
+
+ def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] =
+ db.execute_query_statement(
+ Base.table.select(List(Base.theory_name, Base.name),
+ sql = private_data.where_equal(session_name)) + SQL.order_by(List(Base.theory_name, Base.name)),
+ List.from[Entry_Name],
+ { res =>
+ Entry_Name(
+ session = session_name,
+ theory = res.string(Base.theory_name),
+ name = res.string(Base.name))
+ })
+ }
+
+ def compound_name(a: String, b: String): String =
+ if (a.isEmpty) b else a + ":" + b
+
+ sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") {
+ val compound_name: String = Export.compound_name(theory, name)
+
+ def make_path(prune: Int = 0): Path = {
+ val elems = theory :: space_explode('/', name)
+ if (elems.length < prune + 1) {
+ error("Cannot prune path by " + prune + " element(s): " + Path.make(elems))
+ }
+ else Path.make(elems.drop(prune))
+ }
+ }
+
+ def message(msg: String, theory_name: String, name: String): String =
+ msg + " " + quote(name) + " for theory " + quote(theory_name)
+
+ object Entry {
+ def apply(
+ entry_name: Entry_Name,
+ executable: Boolean,
+ body: Future[(Boolean, Bytes)],
+ cache: XML.Cache
+ ): Entry = new Entry(entry_name, executable, body, cache)
+
+ def empty(theory_name: String, name: String): Entry =
+ Entry(Entry_Name(theory = theory_name, name = name),
+ false, Future.value(false, Bytes.empty), XML.Cache.none)
+
+ def make(
+ session_name: String,
+ args: Protocol.Export.Args,
+ bytes: Bytes,
+ cache: XML.Cache
+ ): Entry = {
+ val body =
+ if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.compress))
+ else Future.value((false, bytes))
+ val entry_name =
+ Entry_Name(session = session_name, theory = args.theory_name, name = args.name)
+ Entry(entry_name, args.executable, body, cache)
+ }
+ }
+
+ final class Entry private(
+ val entry_name: Entry_Name,
+ val executable: Boolean,
+ val body: Future[(Boolean, Bytes)],
+ val cache: XML.Cache
+ ) {
+ def session_name: String = entry_name.session
+ def theory_name: String = entry_name.theory
+ def name: String = entry_name.name
+ override def toString: String = name
+
+ def is_finished: Boolean = body.is_finished
+ def cancel(): Unit = body.cancel()
+
+ def compound_name: String = entry_name.compound_name
+
+ def name_has_prefix(s: String): Boolean = name.startsWith(s)
+ val name_elems: List[String] = explode_name(name)
+
+ def name_extends(elems: List[String]): Boolean =
+ name_elems.startsWith(elems) && name_elems != elems
+
+ def bytes: Bytes = {
+ val (compressed, bs) = body.join
+ if (compressed) bs.uncompress(cache = cache.compress) else bs
+ }
+
+ def text: String = bytes.text
+
+ def yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(bytes), cache = cache)
+ }
+
+ def make_regex(pattern: String): Regex = {
+ @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex =
+ chs match {
+ case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest)
+ case '*' :: rest => make("[^:/]*" :: result, depth, rest)
+ case '?' :: rest => make("[^:/]" :: result, depth, rest)
+ case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest)
+ case '{' :: rest => make("(" :: result, depth + 1, rest)
+ case ',' :: rest if depth > 0 => make("|" :: result, depth, rest)
+ case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest)
+ case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest)
+ case c :: rest => make(c.toString :: result, depth, rest)
+ case Nil => result.reverse.mkString.r
+ }
+ make(Nil, 0, pattern.toList)
+ }
+
+ def make_matcher(pats: List[String]): Entry_Name => Boolean = {
+ val regs = pats.map(make_regex)
+ (entry_name: Entry_Name) => regs.exists(_.matches(entry_name.compound_name))
+ }
+
+ def clean_session(db: SQL.Database, session_name: String): Unit =
+ private_data.transaction_lock(db, create = true, label = "Export.clean_session") {
+ private_data.clean_session(db, session_name)
+ }
+
+ def read_theory_names(db: SQL.Database, session_name: String): List[String] =
+ private_data.transaction_lock(db, label = "Export.read_theory_names") {
+ private_data.read_theory_names(db, session_name)
+ }
+
+ def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] =
+ private_data.transaction_lock(db, label = "Export.read_entry_names") {
+ private_data.read_entry_names(db, session_name)
+ }
+
+ def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] =
+ private_data.transaction_lock(db, label = "Export.read_entry") {
+ private_data.read_entry(db, entry_name, cache)
+ }
+
+
+ /* database consumer thread */
+
+ def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer =
+ new Consumer(db, cache, progress)
+
+ class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) {
+ private val errors = Synchronized[List[String]](Nil)
+
+ private def consume(args: List[(Entry, Boolean)]): List[Exn.Result[Unit]] = {
+ for ((entry, _) <- args) {
+ if (progress.stopped) entry.cancel() else entry.body.join
+ }
+ private_data.transaction_lock(db, label = "Export.consumer(" + args.length + ")") {
+ var known = private_data.known_entries(db, args.map(p => p._1.entry_name))
+ val buffer = new mutable.ListBuffer[Option[Entry]]
+
+ for ((entry, strict) <- args) {
+ if (progress.stopped || known(entry.entry_name)) {
+ buffer += None
+ if (strict && known(entry.entry_name)) {
+ val msg = message("Duplicate export", entry.theory_name, entry.name)
+ errors.change(msg :: _)
+ }
+ }
+ else {
+ buffer += Some(entry)
+ known += entry.entry_name
+ }
+ }
+
+ val entries = buffer.toList
+ try {
+ private_data.write_entries(db, entries.flatten)
+ val ok = Exn.Res[Unit](())
+ entries.map(_ => ok)
+ }
+ catch {
+ case exn: Throwable =>
+ val err = Exn.Exn[Unit](exn)
+ entries.map(_ => err)
+ }
+ }
+ }
+
+ private val consumer =
+ Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")(
+ bulk = { case (entry, _) => entry.is_finished },
+ consume = args => (args.grouped(20).toList.flatMap(consume), true))
+
+ def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = {
+ if (!progress.stopped && !body.is_empty) {
+ consumer.send(Entry.make(session_name, args, body, cache) -> args.strict)
+ }
+ }
+
+ def shutdown(close: Boolean = false): List[String] = {
+ consumer.shutdown()
+ if (close) db.close()
+ errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil)
+ }
+ }
+
+
+ /* context for database access */
+
+ def open_database_context(store: Store, server: SSH.Server = SSH.no_server): Database_Context =
+ new Database_Context(store, store.maybe_open_database_server(server = server))
+
+ def open_session_context0(
+ store: Store,
+ session: String,
+ server: SSH.Server = SSH.no_server
+ ): Session_Context = {
+ open_database_context(store, server = server)
+ .open_session0(session, close_database_context = true)
+ }
+
+ def open_session_context(
+ store: Store,
+ session_background: Sessions.Background,
+ document_snapshot: Option[Document.Snapshot] = None,
+ server: SSH.Server = SSH.no_server
+ ): Session_Context = {
+ open_database_context(store, server = server).open_session(
+ session_background, document_snapshot = document_snapshot, close_database_context = true)
+ }
+
+ class Database_Context private[Export](
+ val store: Store,
+ val database_server: Option[SQL.Database]
+ ) extends AutoCloseable {
+ database_context =>
+
+ override def toString: String = {
+ val s =
+ database_server match {
+ case Some(db) => db.toString
+ case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ")
+ }
+ "Database_Context(" + s + ")"
+ }
+
+ def cache: Term.Cache = store.cache
+
+ def close(): Unit = database_server.foreach(_.close())
+
+ def open_database(session: String, output: Boolean = false): Session_Database =
+ database_server match {
+ case Some(db) => new Session_Database(session, db)
+ case None =>
+ new Session_Database(session, store.open_database(session, output = output)) {
+ override def close(): Unit = db.close()
+ }
+ }
+
+ def open_session0(session: String, close_database_context: Boolean = false): Session_Context =
+ open_session(Sessions.background0(session), close_database_context = close_database_context)
+
+ def open_session(
+ session_background: Sessions.Background,
+ document_snapshot: Option[Document.Snapshot] = None,
+ close_database_context: Boolean = false
+ ): Session_Context = {
+ val session_name = session_background.check_errors.session_name
+ val session_hierarchy = session_background.sessions_structure.build_hierarchy(session_name)
+ val session_databases =
+ database_server match {
+ case Some(db) => session_hierarchy.map(name => new Session_Database(name, db))
+ case None =>
+ val attempts =
+ for (name <- session_hierarchy)
+ yield name -> store.try_open_database(name, server_mode = false)
+ attempts.collectFirst({ case (name, None) => name }) match {
+ case Some(bad) =>
+ for (case (_, Some(db)) <- attempts) db.close()
+ store.error_database(bad)
+ case None =>
+ for (case (name, Some(db)) <- attempts) yield {
+ new Session_Database(name, db) { override def close(): Unit = this.db.close() }
+ }
+ }
+ }
+ new Session_Context(
+ database_context, session_background, session_databases, document_snapshot) {
+ override def close(): Unit = {
+ session_databases.foreach(_.close())
+ if (close_database_context) database_context.close()
+ }
+ }
+ }
+ }
+
+ class Session_Database private[Export](val session: String, val db: SQL.Database)
+ extends AutoCloseable {
+ def close(): Unit = ()
+
+ lazy private [Export] val theory_names: List[String] = read_theory_names(db, session)
+ lazy private [Export] val entry_names: List[Entry_Name] = read_entry_names(db, session)
+ }
+
+ class Session_Context private[Export](
+ val database_context: Database_Context,
+ session_background: Sessions.Background,
+ db_hierarchy: List[Session_Database],
+ val document_snapshot: Option[Document.Snapshot]
+ ) extends AutoCloseable {
+ session_context =>
+
+ def close(): Unit = ()
+
+ def cache: Term.Cache = database_context.cache
+
+ def sessions_structure: Sessions.Structure = session_background.sessions_structure
+
+ def session_base: Sessions.Base = session_background.base
+
+ def session_name: String =
+ if (document_snapshot.isDefined) Sessions.DRAFT
+ else session_base.session_name
+
+ def session_database(session: String = session_name): Option[Session_Database] =
+ db_hierarchy.find(_.session == session)
+
+ def session_db(session: String = session_name): Option[SQL.Database] =
+ session_database(session = session).map(_.db)
+
+ def session_stack: List[String] =
+ ((if (document_snapshot.isDefined) List(session_name) else Nil) :::
+ db_hierarchy.map(_.session)).reverse
+
+ private def select[A](
+ session: String,
+ select: Session_Database => List[A],
+ project: Entry_Name => A,
+ sort_key: A => String
+ ): List[A] = {
+ def result(name: String): List[A] =
+ if (name == Sessions.DRAFT) {
+ (for {
+ snapshot <- document_snapshot.iterator
+ entry_name <- snapshot.all_exports.keysIterator
+ } yield project(entry_name)).toSet.toList.sortBy(sort_key)
+ }
+ else session_database(name).map(select).getOrElse(Nil)
+
+ if (session.nonEmpty) result(session) else session_stack.flatMap(result)
+ }
+
+ def entry_names(session: String = session_name): List[Entry_Name] =
+ select(session, _.entry_names, identity, _.compound_name)
+
+ def theory_names(session: String = session_name): List[String] =
+ select(session, _.theory_names, _.theory, identity)
+
+ def get(theory: String, name: String): Option[Entry] =
+ {
+ def snapshot_entry: Option[Entry] =
+ for {
+ snapshot <- document_snapshot
+ entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name)
+ entry <- snapshot.all_exports.get(entry_name)
+ } yield entry
+ def db_entry: Option[Entry] =
+ db_hierarchy.view.map { database =>
+ val entry_name = Export.Entry_Name(session = database.session, theory = theory, name = name)
+ read_entry(database.db, entry_name, cache)
+ }.collectFirst({ case Some(entry) => entry })
+
+ snapshot_entry orElse db_entry
+ }
+
+ def apply(theory: String, name: String, permissive: Boolean = false): Entry =
+ get(theory, name) match {
+ case None if permissive => Entry.empty(theory, name)
+ case None => error("Missing export entry " + quote(compound_name(theory, name)))
+ case Some(entry) => entry
+ }
+
+ def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context =
+ new Theory_Context(session_context, theory, other_cache)
+
+ def get_source_file(name: String): Option[Store.Source_File] = {
+ val store = database_context.store
+ (for {
+ database <- db_hierarchy.iterator
+ file <- store.read_sources(database.db, database.session, name = name).iterator
+ } yield file).nextOption()
+ }
+
+ def source_file(name: String): Store.Source_File =
+ get_source_file(name).getOrElse(error("Missing session source file " + quote(name)))
+
+ def theory_source(theory: String, unicode_symbols: Boolean = false): String = {
+ def snapshot_source: Option[String] =
+ for {
+ snapshot <- document_snapshot
+ text <- snapshot.version.nodes.iterator.collectFirst(
+ { case (name, node) if name.theory == theory => node.source })
+ if text.nonEmpty
+ } yield Symbol.output(unicode_symbols, text)
+
+ def db_source: Option[String] = {
+ val theory_context = session_context.theory(theory)
+ for {
+ name <- theory_context.files0(permissive = true).headOption
+ file <- get_source_file(name)
+ } yield Symbol.output(unicode_symbols, file.bytes.text)
+ }
+
+ snapshot_source orElse db_source getOrElse ""
+ }
+
+ def classpath(): List[File.Content] = {
+ (for {
+ session <- session_stack.iterator
+ info <- sessions_structure.get(session).iterator
+ if info.export_classpath.nonEmpty
+ matcher = make_matcher(info.export_classpath)
+ entry_name <- entry_names(session = session).iterator
+ if matcher(entry_name)
+ entry <- get(entry_name.theory, entry_name.name).iterator
+ } yield File.content(entry.entry_name.make_path(), entry.bytes)).toList
+ }
+
+ override def toString: String =
+ "Export.Session_Context(" + commas_quote(session_stack) + ")"
+ }
+
+ class Theory_Context private[Export](
+ val session_context: Session_Context,
+ val theory: String,
+ other_cache: Option[Term.Cache]
+ ) {
+ def cache: Term.Cache = other_cache getOrElse session_context.cache
+
+ def get(name: String): Option[Entry] = session_context.get(theory, name)
+ def apply(name: String, permissive: Boolean = false): Entry =
+ session_context.apply(theory, name, permissive = permissive)
+
+ def yxml(name: String): XML.Body =
+ get(name) match {
+ case Some(entry) => entry.yxml
+ case None => Nil
+ }
+
+ def document_id(): Option[Long] =
+ apply(DOCUMENT_ID, permissive = true).text match {
+ case Value.Long(id) => Some(id)
+ case _ => None
+ }
+
+ def files0(permissive: Boolean = false): List[String] =
+ split_lines(apply(FILES, permissive = permissive).text)
+
+ def files(permissive: Boolean = false): Option[(String, List[String])] =
+ files0(permissive = permissive) match {
+ case Nil => None
+ case a :: bs => Some((a, bs))
+ }
+
+ override def toString: String = "Export.Theory_Context(" + quote(theory) + ")"
+ }
+
+
+ /* export to file-system */
+
+ def export_files(
+ store: Store,
+ session_name: String,
+ export_dir: Path,
+ progress: Progress = new Progress,
+ export_prune: Int = 0,
+ export_list: Boolean = false,
+ export_patterns: List[String] = Nil
+ ): Unit = {
+ using(store.open_database(session_name)) { db =>
+ val entry_names = read_entry_names(db, session_name)
+
+ // list
+ if (export_list) {
+ for (entry_name <- entry_names) progress.echo(entry_name.compound_name)
+ }
+
+ // export
+ if (export_patterns.nonEmpty) {
+ val matcher = make_matcher(export_patterns)
+ for {
+ entry_name <- entry_names if matcher(entry_name)
+ entry <- read_entry(db, entry_name, store.cache)
+ } {
+ val path = export_dir + entry_name.make_path(prune = export_prune)
+ progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
+ Isabelle_System.make_directory(path.dir)
+ val bytes = entry.bytes
+ if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes)
+ File.set_executable(path, reset = !entry.executable)
+ }
+ }
+ }
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val default_export_dir: Path = Path.explode("export")
+
+ val isabelle_tool =
+ Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here,
+ { args =>
+ /* arguments */
+
+ var export_dir = default_export_dir
+ var dirs: List[Path] = Nil
+ var export_list = false
+ var no_build = false
+ var options = Options.init()
+ var export_prune = 0
+ var export_patterns: List[String] = Nil
+
+ val getopts = Getopts("""
+Usage: isabelle export [OPTIONS] SESSION
+
+ Options are:
+ -O DIR output directory for exported files (default: """ + default_export_dir + """)
+ -d DIR include session directory
+ -l list exports
+ -n no build of session
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -p NUM prune path of exported files by NUM elements
+ -x PATTERN extract files matching pattern (e.g. "*:**" for all)
+
+ List or export theory exports for SESSION: named blobs produced by
+ isabelle build. Option -l or -x is required; option -x may be repeated.
+
+ The PATTERN language resembles glob patterns in the shell, with ? and *
+ (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc],
+ and variants {pattern1,pattern2,pattern3}.
+""",
+ "O:" -> (arg => export_dir = Path.explode(arg)),
+ "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
+ "l" -> (_ => export_list = true),
+ "n" -> (_ => no_build = true),
+ "o:" -> (arg => options = options + arg),
+ "p:" -> (arg => export_prune = Value.Int.parse(arg)),
+ "x:" -> (arg => export_patterns ::= arg))
+
+ val more_args = getopts(args)
+ val session_name =
+ more_args match {
+ case List(session_name) if export_list || export_patterns.nonEmpty => session_name
+ case _ => getopts.usage()
+ }
+
+ val progress = new Console_Progress()
+
+
+ /* build */
+
+ if (!no_build) {
+ val rc =
+ progress.interrupt_handler {
+ Build.build_logic(options, session_name, progress = progress, dirs = dirs)
+ }
+ if (rc != Process_Result.RC.ok) sys.exit(rc)
+ }
+
+
+ /* export files */
+
+ val store = Store(options)
+ export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune,
+ export_list = export_list, export_patterns = export_patterns)
+ })
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/export_theory.ML Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,479 @@
+(* Title: Pure/Build/export_theory.ML
+ Author: Makarius
+
+Export foundational theory content and locale/class structure.
+*)
+
+signature EXPORT_THEORY =
+sig
+ val other_name_space: (theory -> Name_Space.T) -> theory -> theory
+ val export_enabled: Thy_Info.presentation_context -> bool
+ val export_body: theory -> string -> XML.body -> unit
+end;
+
+structure Export_Theory: EXPORT_THEORY =
+struct
+
+(* other name spaces *)
+
+fun err_dup_kind kind = error ("Duplicate name space kind " ^ quote kind);
+
+structure Data = Theory_Data
+(
+ type T = (theory -> Name_Space.T) Inttab.table;
+ val empty = Inttab.empty;
+ val merge = Inttab.merge (K true);
+);
+
+val other_name_spaces = map #2 o Inttab.dest o Data.get;
+fun other_name_space get_space thy = Data.map (Inttab.update (serial (), get_space)) thy;
+
+val _ = Theory.setup
+ (other_name_space Thm.oracle_space #>
+ other_name_space Global_Theory.fact_space #>
+ other_name_space (Bundle.bundle_space o Context.Theory) #>
+ other_name_space (Attrib.attribute_space o Context.Theory) #>
+ other_name_space (Method.method_space o Context.Theory));
+
+
+(* approximative syntax *)
+
+val get_syntax = Syntax.get_approx o Proof_Context.syn_of;
+fun get_syntax_type ctxt = get_syntax ctxt o Lexicon.mark_type;
+fun get_syntax_const ctxt = get_syntax ctxt o Lexicon.mark_const;
+fun get_syntax_fixed ctxt = get_syntax ctxt o Lexicon.mark_fixed;
+
+fun get_syntax_param ctxt loc x =
+ let val thy = Proof_Context.theory_of ctxt in
+ if Class.is_class thy loc then
+ (case AList.lookup (op =) (Class.these_params thy [loc]) x of
+ NONE => NONE
+ | SOME (_, (c, _)) => get_syntax_const ctxt c)
+ else get_syntax_fixed ctxt x
+ end;
+
+val encode_syntax =
+ XML.Encode.variant
+ [fn NONE => ([], []),
+ fn SOME (Syntax.Prefix delim) => ([delim], []),
+ fn SOME (Syntax.Infix {assoc, delim, pri}) =>
+ let
+ val ass =
+ (case assoc of
+ Printer.No_Assoc => 0
+ | Printer.Left_Assoc => 1
+ | Printer.Right_Assoc => 2);
+ open XML.Encode Term_XML.Encode;
+ in ([], triple int string int (ass, delim, pri)) end];
+
+
+(* free variables: not declared in the context *)
+
+val is_free = not oo Name.is_declared;
+
+fun add_frees used =
+ fold_aterms (fn Free (x, T) => is_free used x ? insert (op =) (x, T) | _ => I);
+
+fun add_tfrees used =
+ (fold_types o fold_atyps) (fn TFree (a, S) => is_free used a ? insert (op =) (a, S) | _ => I);
+
+
+(* locales *)
+
+fun locale_content thy loc =
+ let
+ val ctxt = Locale.init loc thy;
+ val args =
+ Locale.params_of thy loc
+ |> map (fn ((x, T), _) => ((x, T), get_syntax_param ctxt loc x));
+ val axioms =
+ let
+ val (asm, defs) = Locale.specification_of thy loc;
+ val cprops = map (Thm.cterm_of ctxt) (the_list asm @ defs);
+ val (intro1, intro2) = Locale.intros_of thy loc;
+ val intros_tac = Method.try_intros_tac ctxt (the_list intro1 @ the_list intro2) [];
+ val res =
+ Goal.init (Conjunction.mk_conjunction_balanced cprops)
+ |> (ALLGOALS Goal.conjunction_tac THEN intros_tac)
+ |> try Seq.hd;
+ in
+ (case res of
+ SOME goal => Thm.prems_of goal
+ | NONE => raise Fail ("Cannot unfold locale " ^ quote loc))
+ end;
+ val typargs = build_rev (fold Term.add_tfrees (map (Free o #1) args @ axioms));
+ in {typargs = typargs, args = args, axioms = axioms} end;
+
+fun get_locales thy =
+ Locale.get_locales thy |> map_filter (fn loc =>
+ if Experiment.is_experiment thy loc then NONE else SOME (loc, ()));
+
+fun get_dependencies prev_thys thy =
+ Locale.dest_dependencies prev_thys thy |> map_filter (fn dep =>
+ if Experiment.is_experiment thy (#source dep) orelse
+ Experiment.is_experiment thy (#target dep) then NONE
+ else
+ let
+ val (type_params, params) = Locale.parameters_of thy (#source dep);
+ val typargs = fold (Term.add_tfreesT o #2 o #1) params type_params;
+ val substT =
+ typargs |> map_filter (fn v =>
+ let
+ val T = TFree v;
+ val T' = Morphism.typ (#morphism dep) T;
+ in if T = T' then NONE else SOME (v, T') end);
+ val subst =
+ params |> map_filter (fn (v, _) =>
+ let
+ val t = Free v;
+ val t' = Morphism.term (#morphism dep) t;
+ in if t aconv t' then NONE else SOME (v, t') end);
+ in SOME (dep, (substT, subst)) end);
+
+
+(* presentation *)
+
+fun export_enabled (context: Thy_Info.presentation_context) =
+ Options.bool (#options context) "export_theory";
+
+fun export_body thy name body =
+ if XML.is_empty_body body then ()
+ else Export.export thy (Path.binding0 (Path.make ("theory" :: space_explode "/" name))) body;
+
+val _ = (Theory.setup o Thy_Info.add_presentation) (fn context => fn thy =>
+ let
+ val rep_tsig = Type.rep_tsig (Sign.tsig_of thy);
+ val consts = Sign.consts_of thy;
+ val thy_ctxt = Proof_Context.init_global thy;
+
+ val pos_properties = Thy_Info.adjust_pos_properties context;
+
+ val enabled = export_enabled context;
+
+
+ (* strict parents *)
+
+ val parents = Theory.parents_of thy;
+ val _ =
+ Export.export thy \<^path_binding>\<open>theory/parents\<close>
+ (XML.Encode.string (cat_lines (map Context.theory_long_name parents) ^ "\n"));
+
+
+ (* spec rules *)
+
+ fun spec_rule_content {pos, name, rough_classification, terms, rules} =
+ let
+ val spec =
+ terms @ map Thm.plain_prop_of rules
+ |> Term_Subst.zero_var_indexes_list
+ |> map Logic.unvarify_global;
+ in
+ {props = pos_properties pos,
+ name = name,
+ rough_classification = rough_classification,
+ typargs = build_rev (fold Term.add_tfrees spec),
+ args = build_rev (fold Term.add_frees spec),
+ terms = map (fn t => (t, Term.type_of t)) (take (length terms) spec),
+ rules = drop (length terms) spec}
+ end;
+
+
+ (* entities *)
+
+ fun make_entity_markup name xname pos serial =
+ let val props = pos_properties pos @ Markup.serial_properties serial;
+ in (Markup.entityN, (Markup.nameN, name) :: (Markup.xnameN, xname) :: props) end;
+
+ fun entity_markup space name =
+ let
+ val xname = Name_Space.extern_shortest thy_ctxt space name;
+ val {serial, pos, ...} = Name_Space.the_entry space name;
+ in make_entity_markup name xname pos serial end;
+
+ fun export_entities export_name get_space decls export =
+ let
+ val parent_spaces = map get_space parents;
+ val space = get_space thy;
+ in
+ build (decls |> fold (fn (name, decl) =>
+ if exists (fn space => Name_Space.declared space name) parent_spaces then I
+ else
+ (case export name decl of
+ NONE => I
+ | SOME make_body =>
+ let
+ val i = #serial (Name_Space.the_entry space name);
+ val body = if enabled then make_body () else [];
+ in cons (i, XML.Elem (entity_markup space name, body)) end)))
+ |> sort (int_ord o apply2 #1) |> map #2
+ |> export_body thy export_name
+ end;
+
+
+ (* types *)
+
+ val encode_type =
+ let open XML.Encode Term_XML.Encode
+ in triple encode_syntax (list string) (option typ) end;
+
+ val _ =
+ export_entities "types" Sign.type_space (Name_Space.dest_table (#types rep_tsig))
+ (fn c =>
+ (fn Type.Logical_Type n =>
+ SOME (fn () =>
+ encode_type (get_syntax_type thy_ctxt c, Name.invent Name.context Name.aT n, NONE))
+ | Type.Abbreviation (args, U, false) =>
+ SOME (fn () =>
+ encode_type (get_syntax_type thy_ctxt c, args, SOME U))
+ | _ => NONE));
+
+
+ (* consts *)
+
+ val encode_term = Term_XML.Encode.term consts;
+
+ val encode_const =
+ let open XML.Encode Term_XML.Encode
+ in pair encode_syntax (pair (list string) (pair typ (pair (option encode_term) bool))) end;
+
+ val _ =
+ export_entities "consts" Sign.const_space (#constants (Consts.dest consts))
+ (fn c => fn (T, abbrev) =>
+ SOME (fn () =>
+ let
+ val syntax = get_syntax_const thy_ctxt c;
+ val U = Logic.unvarifyT_global T;
+ val U0 = Term.strip_sortsT U;
+ val trim_abbrev = Proofterm.standard_vars_term Name.context #> Term.strip_sorts;
+ val abbrev' = Option.map trim_abbrev abbrev;
+ val args = map (#1 o dest_TFree) (Consts.typargs consts (c, U0));
+ val propositional = Object_Logic.is_propositional thy_ctxt (Term.body_type U0);
+ in encode_const (syntax, (args, (U0, (abbrev', propositional)))) end));
+
+
+ (* axioms *)
+
+ fun standard_prop used extra_shyps raw_prop raw_proof =
+ let
+ val (prop, proof) = Proofterm.standard_vars used (raw_prop, raw_proof);
+ val args = rev (add_frees used prop []);
+ val typargs = rev (add_tfrees used prop []);
+ val used_typargs = fold (Name.declare o #1) typargs used;
+ val sorts = Name.invent used_typargs Name.aT (length extra_shyps) ~~ extra_shyps;
+ in ((sorts @ typargs, args, prop), proof) end;
+
+ fun standard_prop_of thm =
+ standard_prop Name.context (Thm.extra_shyps thm) (Thm.full_prop_of thm);
+
+ val encode_prop =
+ let open XML.Encode Term_XML.Encode
+ in triple (list (pair string sort)) (list (pair string typ)) encode_term end;
+
+ fun encode_axiom used prop =
+ encode_prop (#1 (standard_prop used [] prop NONE));
+
+ val _ =
+ export_entities "axioms" Theory.axiom_space (Theory.all_axioms_of thy)
+ (fn _ => fn prop => SOME (fn () => encode_axiom Name.context prop));
+
+
+ (* theorems and proof terms *)
+
+ val clean_thm = Thm.check_hyps (Context.Theory thy) #> Thm.strip_shyps;
+ val prep_thm = clean_thm #> Thm.unconstrainT #> Thm.strip_shyps;
+
+ val lookup_thm_id = Global_Theory.lookup_thm_id thy;
+
+ fun expand_name thm_id (header: Proofterm.thm_header) =
+ if #serial header = #serial thm_id then ""
+ else
+ (case lookup_thm_id (Proofterm.thm_header_id header) of
+ NONE => ""
+ | SOME thm_name => Thm_Name.print thm_name);
+
+ fun entity_markup_thm (serial, (name, i)) =
+ let
+ val space = Global_Theory.fact_space thy;
+ val xname = Name_Space.extern_shortest thy_ctxt space name;
+ val {pos, ...} = Name_Space.the_entry space name;
+ in make_entity_markup (Thm_Name.print (name, i)) (Thm_Name.print (xname, i)) pos serial end;
+
+ fun encode_thm thm_id raw_thm =
+ let
+ val deps = map (Thm_Name.print o #2) (Thm_Deps.thm_deps thy [raw_thm]);
+ val thm = prep_thm raw_thm;
+
+ val proof0 =
+ if Proofterm.export_standard_enabled () then
+ Proof_Syntax.standard_proof_of
+ {full = true, expand_name = SOME o expand_name thm_id} thm
+ else if Proofterm.export_enabled () then Thm.reconstruct_proof_of thm
+ else MinProof;
+ val (prop, SOME proof) = standard_prop_of thm (SOME proof0);
+ val _ = Thm.expose_proofs thy [thm];
+ in
+ (prop, deps, proof) |>
+ let
+ open XML.Encode Term_XML.Encode;
+ val encode_proof = Proofterm.encode_standard_proof consts;
+ in triple encode_prop (list string) encode_proof end
+ end;
+
+ fun export_thm (thm_id, thm_name) =
+ let
+ val markup = entity_markup_thm (#serial thm_id, thm_name);
+ val body =
+ if enabled then
+ Global_Theory.get_thm_name thy (thm_name, Position.none)
+ |> encode_thm thm_id
+ else [];
+ in XML.Elem (markup, body) end;
+
+ val _ = export_body thy "thms" (map export_thm (Global_Theory.dest_thm_names thy));
+
+
+ (* type classes *)
+
+ val encode_class =
+ let open XML.Encode Term_XML.Encode
+ in pair (list (pair string typ)) (list (encode_axiom Name.context)) end;
+
+ val _ =
+ export_entities "classes" Sign.class_space
+ (map (rpair ()) (Graph.keys (Sorts.classes_of (#2 (#classes rep_tsig)))))
+ (fn name => fn () => SOME (fn () =>
+ (case try (Axclass.get_info thy) name of
+ NONE => ([], [])
+ | SOME {params, axioms, ...} => (params, map (Thm.plain_prop_of o clean_thm) axioms))
+ |> encode_class));
+
+
+ (* sort algebra *)
+
+ val _ =
+ if enabled then
+ let
+ val prop = encode_axiom Name.context o Logic.varify_global;
+
+ val encode_classrel =
+ let open XML.Encode
+ in list (pair prop (pair string string)) end;
+
+ val encode_arities =
+ let open XML.Encode Term_XML.Encode
+ in list (pair prop (triple string (list sort) string)) end;
+
+ val export_classrel =
+ maps (fn (c, cs) => map (pair c) cs) #> map (`Logic.mk_classrel) #> encode_classrel;
+
+ val export_arities = map (`Logic.mk_arity) #> encode_arities;
+
+ val {classrel, arities} =
+ Sorts.dest_algebra (map (#2 o #classes o Type.rep_tsig o Sign.tsig_of) parents)
+ (#2 (#classes rep_tsig));
+ in
+ if null classrel then () else export_body thy "classrel" (export_classrel classrel);
+ if null arities then () else export_body thy "arities" (export_arities arities)
+ end
+ else ();
+
+
+ (* locales *)
+
+ fun encode_locale used =
+ let open XML.Encode Term_XML.Encode in
+ triple (list (pair string sort)) (list (pair (pair string typ) encode_syntax))
+ (list (encode_axiom used))
+ end;
+
+ val _ =
+ export_entities "locales" Locale.locale_space (get_locales thy)
+ (fn loc => fn () => SOME (fn () =>
+ let
+ val {typargs, args, axioms} = locale_content thy loc;
+ val used = fold Name.declare (map #1 typargs @ map (#1 o #1) args) Name.context;
+ in encode_locale used (typargs, args, axioms) end
+ handle ERROR msg =>
+ cat_error msg ("The error(s) above occurred in locale " ^
+ quote (Locale.markup_name thy_ctxt loc))));
+
+
+ (* locale dependencies *)
+
+ fun encode_locale_dependency (dep: Locale.locale_dependency, subst) =
+ (#source dep, (#target dep, (#prefix dep, subst))) |>
+ let
+ open XML.Encode Term_XML.Encode;
+ val encode_subst =
+ pair (list (pair (pair string sort) typ)) (list (pair (pair string typ) (term consts)));
+ in pair string (pair string (pair (list (pair string bool)) encode_subst)) end;
+
+ val _ =
+ if enabled then
+ get_dependencies parents thy |> map_index (fn (i, dep) =>
+ let
+ val xname = string_of_int (i + 1);
+ val name = Long_Name.implode [Context.theory_base_name thy, xname];
+ val markup = make_entity_markup name xname (#pos (#1 dep)) (#serial (#1 dep));
+ val body = encode_locale_dependency dep;
+ in XML.Elem (markup, body) end)
+ |> export_body thy "locale_dependencies"
+ else ();
+
+
+ (* constdefs *)
+
+ val _ =
+ if enabled then
+ let
+ val constdefs =
+ Defs.dest_constdefs (map Theory.defs_of (Theory.parents_of thy)) (Theory.defs_of thy)
+ |> sort_by #1;
+ val encode =
+ let open XML.Encode
+ in list (pair string string) end;
+ in if null constdefs then () else export_body thy "constdefs" (encode constdefs) end
+ else ();
+
+
+ (* spec rules *)
+
+ val encode_specs =
+ let open XML.Encode Term_XML.Encode in
+ list (fn {props, name, rough_classification, typargs, args, terms, rules} =>
+ pair properties (pair string (pair Spec_Rules.encode_rough_classification
+ (pair (list (pair string sort)) (pair (list (pair string typ))
+ (pair (list (pair encode_term typ)) (list encode_term))))))
+ (props, (name, (rough_classification, (typargs, (args, (terms, rules)))))))
+ end;
+
+ val _ =
+ if enabled then
+ (case Spec_Rules.dest_theory thy of
+ [] => ()
+ | spec_rules =>
+ export_body thy "spec_rules" (encode_specs (map spec_rule_content spec_rules)))
+ else ();
+
+
+ (* other entities *)
+
+ fun export_other get_space =
+ let
+ val space = get_space thy;
+ val export_name = "other/" ^ Name_Space.kind_of space;
+ val decls = Name_Space.get_names space |> map (rpair ());
+ in export_entities export_name get_space decls (fn _ => fn () => SOME (K [])) end;
+
+ val other_spaces = other_name_spaces thy;
+ val other_kinds = map (fn get_space => Name_Space.kind_of (get_space thy)) other_spaces;
+ val _ =
+ if null other_kinds then ()
+ else
+ Export.export thy \<^path_binding>\<open>theory/other_kinds\<close>
+ (XML.Encode.string (cat_lines other_kinds));
+ val _ = List.app export_other other_spaces;
+
+ in () end);
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/export_theory.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,763 @@
+/* Title: Pure/Build/export_theory.scala
+ Author: Makarius
+
+Export foundational theory content.
+*/
+
+package isabelle
+
+
+import scala.collection.immutable.SortedMap
+
+
+object Export_Theory {
+ /** session content **/
+
+ sealed case class Session(name: String, theory_graph: Graph[String, Option[Theory]]) {
+ override def toString: String = name
+
+ def theory(theory_name: String): Option[Theory] =
+ if (theory_graph.defined(theory_name)) theory_graph.get_node(theory_name)
+ else None
+
+ def theories: List[Theory] =
+ theory_graph.topological_order.flatMap(theory)
+ }
+
+ def read_session(
+ session_context: Export.Session_Context,
+ session_stack: Boolean = false,
+ progress: Progress = new Progress
+ ): Session = {
+ val thys =
+ for (theory <- theory_names(session_context, session_stack = session_stack)) yield {
+ progress.echo("Reading theory " + theory)
+ read_theory(session_context.theory(theory))
+ }
+
+ val graph0 =
+ thys.foldLeft(Graph.string[Option[Theory]]) {
+ case (g, thy) => g.default_node(thy.name, Some(thy))
+ }
+ val graph1 =
+ thys.foldLeft(graph0) {
+ case (g0, thy) =>
+ thy.parents.foldLeft(g0) {
+ case (g1, parent) => g1.default_node(parent, None).add_edge_acyclic(parent, thy.name)
+ }
+ }
+
+ Session(session_context.session_name, graph1)
+ }
+
+
+
+ /** theory content **/
+
+ sealed case class Theory(name: String, parents: List[String],
+ types: List[Entity[Type]],
+ consts: List[Entity[Const]],
+ axioms: List[Entity[Axiom]],
+ thms: List[Entity[Thm]],
+ classes: List[Entity[Class]],
+ locales: List[Entity[Locale]],
+ locale_dependencies: List[Entity[Locale_Dependency]],
+ classrel: List[Classrel],
+ arities: List[Arity],
+ constdefs: List[Constdef],
+ typedefs: List[Typedef],
+ datatypes: List[Datatype],
+ spec_rules: List[Spec_Rule],
+ others: Map[String, List[Entity[Other]]]
+ ) {
+ override def toString: String = name
+
+ def entity_iterator: Iterator[Entity0] =
+ types.iterator.map(_.no_content) ++
+ consts.iterator.map(_.no_content) ++
+ axioms.iterator.map(_.no_content) ++
+ thms.iterator.map(_.no_content) ++
+ classes.iterator.map(_.no_content) ++
+ locales.iterator.map(_.no_content) ++
+ locale_dependencies.iterator.map(_.no_content) ++
+ (for { (_, xs) <- others; x <- xs.iterator } yield x.no_content)
+
+ def cache(cache: Term.Cache): Theory =
+ Theory(cache.string(name),
+ parents.map(cache.string),
+ types.map(_.cache(cache)),
+ consts.map(_.cache(cache)),
+ axioms.map(_.cache(cache)),
+ thms.map(_.cache(cache)),
+ classes.map(_.cache(cache)),
+ locales.map(_.cache(cache)),
+ locale_dependencies.map(_.cache(cache)),
+ classrel.map(_.cache(cache)),
+ arities.map(_.cache(cache)),
+ constdefs.map(_.cache(cache)),
+ typedefs.map(_.cache(cache)),
+ datatypes.map(_.cache(cache)),
+ spec_rules.map(_.cache(cache)),
+ (for ((k, xs) <- others.iterator) yield cache.string(k) -> xs.map(_.cache(cache))).toMap)
+ }
+
+ def read_theory_parents(theory_context: Export.Theory_Context): Option[List[String]] =
+ theory_context.get(Export.THEORY_PREFIX + "parents")
+ .map(entry => Library.trim_split_lines(entry.text))
+
+ def theory_names(
+ session_context: Export.Session_Context,
+ session_stack: Boolean = false
+ ): List[String] = {
+ val session = if (session_stack) "" else session_context.session_name
+ for {
+ theory <- session_context.theory_names(session = session)
+ if read_theory_parents(session_context.theory(theory)).isDefined
+ } yield theory
+ }
+
+ def no_theory: Theory =
+ Theory("", Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Map.empty)
+
+ def read_theory(
+ theory_context: Export.Theory_Context,
+ permissive: Boolean = false
+ ): Theory = {
+ val cache = theory_context.cache
+ val session_name = theory_context.session_context.session_name
+ val theory_name = theory_context.theory
+ read_theory_parents(theory_context) match {
+ case None if permissive => no_theory
+ case None =>
+ error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name))
+ case Some(parents) =>
+ val theory =
+ Theory(theory_name, parents,
+ read_types(theory_context),
+ read_consts(theory_context),
+ read_axioms(theory_context),
+ read_thms(theory_context),
+ read_classes(theory_context),
+ read_locales(theory_context),
+ read_locale_dependencies(theory_context),
+ read_classrel(theory_context),
+ read_arities(theory_context),
+ read_constdefs(theory_context),
+ read_typedefs(theory_context),
+ read_datatypes(theory_context),
+ read_spec_rules(theory_context),
+ read_others(theory_context))
+ if (cache.no_cache) theory else theory.cache(cache)
+ }
+ }
+
+
+ /* entities */
+
+ object Kind {
+ val TYPE = "type"
+ val CONST = "const"
+ val THM = "thm"
+ val PROOF = "proof"
+ val LOCALE_DEPENDENCY = "locale_dependency"
+ val DOCUMENT_HEADING = "document_heading"
+ val DOCUMENT_TEXT = "document_text"
+ val PROOF_TEXT = "proof_text"
+ }
+
+ def export_kind(kind: String): String =
+ if (kind == Markup.TYPE_NAME) Kind.TYPE
+ else if (kind == Markup.CONSTANT) Kind.CONST
+ else kind
+
+ def export_kind_name(kind: String, name: String): String =
+ name + "|" + export_kind(kind)
+
+ abstract class Content[T] {
+ def cache(cache: Term.Cache): T
+ }
+ sealed case class No_Content() extends Content[No_Content] {
+ def cache(cache: Term.Cache): No_Content = this
+ }
+ sealed case class Entity[A <: Content[A]](
+ kind: String,
+ name: String,
+ xname: String,
+ pos: Position.T,
+ id: Option[Long],
+ serial: Long,
+ content: Option[A]
+ ) {
+ val kname: String = export_kind_name(kind, name)
+ val range: Symbol.Range = Position.Range.unapply(pos).getOrElse(Text.Range.offside)
+ val file: String = Position.File.unapply(pos).getOrElse("")
+
+ def export_kind: String = Export_Theory.export_kind(kind)
+ override def toString: String = export_kind + " " + quote(name)
+
+ def the_content: A =
+ if (content.isDefined) content.get else error("No content for " + toString)
+
+ def no_content: Entity0 = copy(content = None)
+
+ def cache(cache: Term.Cache): Entity[A] =
+ Entity(
+ cache.string(kind),
+ cache.string(name),
+ cache.string(xname),
+ cache.position(pos),
+ id,
+ serial,
+ content.map(_.cache(cache)))
+ }
+ type Entity0 = Entity[No_Content]
+
+ def read_entities[A <: Content[A]](
+ theory_context: Export.Theory_Context,
+ export_name: String,
+ kind: String,
+ decode: XML.Decode.T[A]
+ ): List[Entity[A]] = {
+ def decode_entity(tree: XML.Tree): Entity[A] = {
+ def err(): Nothing = throw new XML.XML_Body(List(tree))
+
+ tree match {
+ case XML.Elem(Markup(Markup.ENTITY, props), body) =>
+ val name = Markup.Name.unapply(props) getOrElse err()
+ val xname = Markup.XName.unapply(props) getOrElse err()
+ val pos = props.filter(p => Markup.position_property(p) && p._1 != Markup.ID)
+ val id = Position.Id.unapply(props)
+ val serial = Markup.Serial.unapply(props) getOrElse err()
+ val content = if (body.isEmpty) None else Some(decode(body))
+ Entity(kind, name, xname, pos, id, serial, content)
+ case _ => err()
+ }
+ }
+ theory_context.yxml(export_name).map(decode_entity)
+ }
+
+
+ /* approximative syntax */
+
+ enum Assoc { case NO_ASSOC, LEFT_ASSOC, RIGHT_ASSOC }
+
+ sealed abstract class Syntax
+ case object No_Syntax extends Syntax
+ case class Prefix(delim: String) extends Syntax
+ case class Infix(assoc: Assoc, delim: String, pri: Int) extends Syntax
+
+ def decode_syntax: XML.Decode.T[Syntax] =
+ XML.Decode.variant(List(
+ { case (Nil, Nil) => No_Syntax },
+ { case (List(delim), Nil) => Prefix(delim) },
+ { case (Nil, body) =>
+ import XML.Decode._
+ val (ass, delim, pri) = triple(int, string, int)(body)
+ Infix(Assoc.fromOrdinal(ass), delim, pri) }))
+
+
+ /* types */
+
+ sealed case class Type(syntax: Syntax, args: List[String], abbrev: Option[Term.Typ])
+ extends Content[Type] {
+ override def cache(cache: Term.Cache): Type =
+ Type(
+ syntax,
+ args.map(cache.string),
+ abbrev.map(cache.typ))
+ }
+
+ def read_types(theory_context: Export.Theory_Context): List[Entity[Type]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME,
+ { body =>
+ import XML.Decode._
+ val (syntax, args, abbrev) =
+ triple(decode_syntax, list(string), option(Term_XML.Decode.typ))(body)
+ Type(syntax, args, abbrev)
+ })
+
+
+ /* consts */
+
+ sealed case class Const(
+ syntax: Syntax,
+ typargs: List[String],
+ typ: Term.Typ,
+ abbrev: Option[Term.Term],
+ propositional: Boolean
+ ) extends Content[Const] {
+ override def cache(cache: Term.Cache): Const =
+ Const(
+ syntax,
+ typargs.map(cache.string),
+ cache.typ(typ),
+ abbrev.map(cache.term),
+ propositional)
+ }
+
+ def read_consts(theory_context: Export.Theory_Context): List[Entity[Const]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "consts", Markup.CONSTANT,
+ { body =>
+ import XML.Decode._
+ val (syntax, (typargs, (typ, (abbrev, propositional)))) =
+ pair(decode_syntax,
+ pair(list(string),
+ pair(Term_XML.Decode.typ,
+ pair(option(Term_XML.Decode.term), bool))))(body)
+ Const(syntax, typargs, typ, abbrev, propositional)
+ })
+
+
+ /* axioms */
+
+ sealed case class Prop(
+ typargs: List[(String, Term.Sort)],
+ args: List[(String, Term.Typ)],
+ term: Term.Term
+ ) extends Content[Prop] {
+ override def cache(cache: Term.Cache): Prop =
+ Prop(
+ typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
+ args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
+ cache.term(term))
+ }
+
+ def decode_prop(body: XML.Body): Prop = {
+ val (typargs, args, t) = {
+ import XML.Decode._
+ import Term_XML.Decode._
+ triple(list(pair(string, sort)), list(pair(string, typ)), term)(body)
+ }
+ Prop(typargs, args, t)
+ }
+
+ sealed case class Axiom(prop: Prop) extends Content[Axiom] {
+ override def cache(cache: Term.Cache): Axiom = Axiom(prop.cache(cache))
+ }
+
+ def read_axioms(theory_context: Export.Theory_Context): List[Entity[Axiom]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "axioms", Markup.AXIOM,
+ body => Axiom(decode_prop(body)))
+
+
+ /* theorems */
+
+ sealed case class Thm_Id(serial: Long, theory_name: String)
+
+ sealed case class Thm(
+ prop: Prop,
+ deps: List[String],
+ proof: Term.Proof)
+ extends Content[Thm] {
+ override def cache(cache: Term.Cache): Thm =
+ Thm(
+ prop.cache(cache),
+ deps.map(cache.string),
+ cache.proof(proof))
+ }
+
+ def read_thms(theory_context: Export.Theory_Context): List[Entity[Thm]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "thms", Kind.THM,
+ { body =>
+ import XML.Decode._
+ import Term_XML.Decode._
+ val (prop, deps, prf) = triple(decode_prop, list(string), proof)(body)
+ Thm(prop, deps, prf)
+ })
+
+ sealed case class Proof(
+ typargs: List[(String, Term.Sort)],
+ args: List[(String, Term.Typ)],
+ term: Term.Term,
+ proof: Term.Proof
+ ) {
+ def prop: Prop = Prop(typargs, args, term)
+
+ def cache(cache: Term.Cache): Proof =
+ Proof(
+ typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
+ args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
+ cache.term(term),
+ cache.proof(proof))
+ }
+
+ def read_proof(
+ session_context: Export.Session_Context,
+ id: Thm_Id,
+ other_cache: Option[Term.Cache] = None
+ ): Option[Proof] = {
+ val theory_context = session_context.theory(id.theory_name, other_cache = other_cache)
+ val cache = theory_context.cache
+
+ for { entry <- theory_context.get(Export.PROOFS_PREFIX + id.serial) }
+ yield {
+ val body = entry.yxml
+ val (typargs, (args, (prop_body, proof_body))) = {
+ import XML.Decode._
+ import Term_XML.Decode._
+ pair(list(pair(string, sort)), pair(list(pair(string, typ)), pair(x => x, x => x)))(body)
+ }
+ val env = args.toMap
+ val prop = Term_XML.Decode.term_env(env)(prop_body)
+ val proof = Term_XML.Decode.proof_env(env)(proof_body)
+
+ val result = Proof(typargs, args, prop, proof)
+ if (cache.no_cache) result else result.cache(cache)
+ }
+ }
+
+ def read_proof_boxes(
+ session_context: Export.Session_Context,
+ proof: Term.Proof,
+ suppress: Thm_Id => Boolean = _ => false,
+ other_cache: Option[Term.Cache] = None
+ ): List[(Thm_Id, Proof)] = {
+ var seen = Set.empty[Long]
+ var result = SortedMap.empty[Long, (Thm_Id, Proof)]
+
+ def boxes(context: Option[(Long, Term.Proof)], prf: Term.Proof): Unit = {
+ prf match {
+ case Term.Abst(_, _, p) => boxes(context, p)
+ case Term.AbsP(_, _, p) => boxes(context, p)
+ case Term.Appt(p, _) => boxes(context, p)
+ case Term.AppP(p, q) => boxes(context, p); boxes(context, q)
+ case thm: Term.PThm if !seen(thm.serial) =>
+ seen += thm.serial
+ val id = Thm_Id(thm.serial, thm.theory_name)
+ if (!suppress(id)) {
+ Export_Theory.read_proof(session_context, id, other_cache = other_cache) match {
+ case Some(p) =>
+ result += (thm.serial -> (id -> p))
+ boxes(Some((thm.serial, p.proof)), p.proof)
+ case None =>
+ error("Missing proof " + thm.serial + " (theory " + quote (thm.theory_name) + ")" +
+ (context match {
+ case None => ""
+ case Some((i, p)) => " in proof " + i + ":\n" + p
+ }))
+ }
+ }
+ case _ =>
+ }
+ }
+
+ boxes(None, proof)
+ result.iterator.map(_._2).toList
+ }
+
+
+ /* type classes */
+
+ sealed case class Class(params: List[(String, Term.Typ)], axioms: List[Prop])
+ extends Content[Class] {
+ override def cache(cache: Term.Cache): Class =
+ Class(
+ params.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
+ axioms.map(_.cache(cache)))
+ }
+
+ def read_classes(theory_context: Export.Theory_Context): List[Entity[Class]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "classes", Markup.CLASS,
+ { body =>
+ import XML.Decode._
+ import Term_XML.Decode._
+ val (params, axioms) = pair(list(pair(string, typ)), list(decode_prop))(body)
+ Class(params, axioms)
+ })
+
+
+ /* locales */
+
+ sealed case class Locale(
+ typargs: List[(String, Term.Sort)],
+ args: List[((String, Term.Typ), Syntax)],
+ axioms: List[Prop]
+ ) extends Content[Locale] {
+ override def cache(cache: Term.Cache): Locale =
+ Locale(
+ typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
+ args.map({ case ((name, typ), syntax) => ((cache.string(name), cache.typ(typ)), syntax) }),
+ axioms.map(_.cache(cache)))
+ }
+
+ def read_locales(theory_context: Export.Theory_Context): List[Entity[Locale]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "locales", Markup.LOCALE,
+ { body =>
+ import XML.Decode._
+ import Term_XML.Decode._
+ val (typargs, args, axioms) =
+ triple(list(pair(string, sort)), list(pair(pair(string, typ), decode_syntax)),
+ list(decode_prop))(body)
+ Locale(typargs, args, axioms)
+ })
+
+
+ /* locale dependencies */
+
+ sealed case class Locale_Dependency(
+ source: String,
+ target: String,
+ prefix: List[(String, Boolean)],
+ subst_types: List[((String, Term.Sort), Term.Typ)],
+ subst_terms: List[((String, Term.Typ), Term.Term)]
+ ) extends Content[Locale_Dependency] {
+ override def cache(cache: Term.Cache): Locale_Dependency =
+ Locale_Dependency(
+ cache.string(source),
+ cache.string(target),
+ prefix.map({ case (name, mandatory) => (cache.string(name), mandatory) }),
+ subst_types.map({ case ((a, s), ty) => ((cache.string(a), cache.sort(s)), cache.typ(ty)) }),
+ subst_terms.map({ case ((x, ty), t) => ((cache.string(x), cache.typ(ty)), cache.term(t)) }))
+
+ def is_inclusion: Boolean =
+ subst_types.isEmpty && subst_terms.isEmpty
+ }
+
+ def read_locale_dependencies(
+ theory_context: Export.Theory_Context
+ ): List[Entity[Locale_Dependency]] = {
+ read_entities(theory_context, Export.THEORY_PREFIX + "locale_dependencies",
+ Kind.LOCALE_DEPENDENCY,
+ { body =>
+ import XML.Decode._
+ import Term_XML.Decode._
+ val (source, (target, (prefix, (subst_types, subst_terms)))) =
+ pair(string, pair(string, pair(list(pair(string, bool)),
+ pair(list(pair(pair(string, sort), typ)), list(pair(pair(string, typ), term))))))(body)
+ Locale_Dependency(source, target, prefix, subst_types, subst_terms)
+ })
+ }
+
+
+ /* sort algebra */
+
+ sealed case class Classrel(class1: String, class2: String, prop: Prop) {
+ def cache(cache: Term.Cache): Classrel =
+ Classrel(cache.string(class1), cache.string(class2), prop.cache(cache))
+ }
+
+ def read_classrel(theory_context: Export.Theory_Context): List[Classrel] = {
+ val body = theory_context.yxml(Export.THEORY_PREFIX + "classrel")
+ val classrel = {
+ import XML.Decode._
+ list(pair(decode_prop, pair(string, string)))(body)
+ }
+ for ((prop, (c1, c2)) <- classrel) yield Classrel(c1, c2, prop)
+ }
+
+ sealed case class Arity(
+ type_name: String,
+ domain: List[Term.Sort],
+ codomain: String,
+ prop: Prop
+ ) {
+ def cache(cache: Term.Cache): Arity =
+ Arity(cache.string(type_name), domain.map(cache.sort), cache.string(codomain),
+ prop.cache(cache))
+ }
+
+ def read_arities(theory_context: Export.Theory_Context): List[Arity] = {
+ val body = theory_context.yxml(Export.THEORY_PREFIX + "arities")
+ val arities = {
+ import XML.Decode._
+ import Term_XML.Decode._
+ list(pair(decode_prop, triple(string, list(sort), string)))(body)
+ }
+ for ((prop, (a, b, c)) <- arities) yield Arity(a, b, c, prop)
+ }
+
+
+ /* Pure constdefs */
+
+ sealed case class Constdef(name: String, axiom_name: String) {
+ def cache(cache: Term.Cache): Constdef =
+ Constdef(cache.string(name), cache.string(axiom_name))
+ }
+
+ def read_constdefs(theory_context: Export.Theory_Context): List[Constdef] = {
+ val body = theory_context.yxml(Export.THEORY_PREFIX + "constdefs")
+ val constdefs = {
+ import XML.Decode._
+ list(pair(string, string))(body)
+ }
+ for ((name, axiom_name) <- constdefs) yield Constdef(name, axiom_name)
+ }
+
+
+ /* HOL typedefs */
+
+ sealed case class Typedef(
+ name: String,
+ rep_type: Term.Typ,
+ abs_type: Term.Typ,
+ rep_name: String,
+ abs_name: String,
+ axiom_name: String
+ ) {
+ def cache(cache: Term.Cache): Typedef =
+ Typedef(cache.string(name),
+ cache.typ(rep_type),
+ cache.typ(abs_type),
+ cache.string(rep_name),
+ cache.string(abs_name),
+ cache.string(axiom_name))
+ }
+
+ def read_typedefs(theory_context: Export.Theory_Context): List[Typedef] = {
+ val body = theory_context.yxml(Export.THEORY_PREFIX + "typedefs")
+ val typedefs = {
+ import XML.Decode._
+ import Term_XML.Decode._
+ list(pair(string, pair(typ, pair(typ, pair(string, pair(string, string))))))(body)
+ }
+ for { (name, (rep_type, (abs_type, (rep_name, (abs_name, axiom_name))))) <- typedefs }
+ yield Typedef(name, rep_type, abs_type, rep_name, abs_name, axiom_name)
+ }
+
+
+ /* HOL datatypes */
+
+ sealed case class Datatype(
+ pos: Position.T,
+ name: String,
+ co: Boolean,
+ typargs: List[(String, Term.Sort)],
+ typ: Term.Typ,
+ constructors: List[(Term.Term, Term.Typ)]
+ ) {
+ def id: Option[Long] = Position.Id.unapply(pos)
+
+ def cache(cache: Term.Cache): Datatype =
+ Datatype(
+ cache.position(pos),
+ cache.string(name),
+ co,
+ typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
+ cache.typ(typ),
+ constructors.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }))
+ }
+
+ def read_datatypes(theory_context: Export.Theory_Context): List[Datatype] = {
+ val body = theory_context.yxml(Export.THEORY_PREFIX + "datatypes")
+ val datatypes = {
+ import XML.Decode._
+ import Term_XML.Decode._
+ list(pair(properties, pair(string, pair(bool, pair(list(pair(string, sort)),
+ pair(typ, list(pair(term, typ))))))))(body)
+ }
+ for ((pos, (name, (co, (typargs, (typ, constructors))))) <- datatypes)
+ yield Datatype(pos, name, co, typargs, typ, constructors)
+ }
+
+
+ /* Pure spec rules */
+
+ sealed abstract class Recursion {
+ def cache(cache: Term.Cache): Recursion =
+ this match {
+ case Primrec(types) => Primrec(types.map(cache.string))
+ case Primcorec(types) => Primcorec(types.map(cache.string))
+ case _ => this
+ }
+ }
+ case class Primrec(types: List[String]) extends Recursion
+ case object Recdef extends Recursion
+ case class Primcorec(types: List[String]) extends Recursion
+ case object Corec extends Recursion
+ case object Unknown_Recursion extends Recursion
+
+ val decode_recursion: XML.Decode.T[Recursion] = {
+ import XML.Decode._
+ variant(List(
+ { case (Nil, a) => Primrec(list(string)(a)) },
+ { case (Nil, Nil) => Recdef },
+ { case (Nil, a) => Primcorec(list(string)(a)) },
+ { case (Nil, Nil) => Corec },
+ { case (Nil, Nil) => Unknown_Recursion }))
+ }
+
+
+ sealed abstract class Rough_Classification {
+ def is_equational: Boolean = this.isInstanceOf[Equational]
+ def is_inductive: Boolean = this == Inductive
+ def is_co_inductive: Boolean = this == Co_Inductive
+ def is_relational: Boolean = is_inductive || is_co_inductive
+ def is_unknown: Boolean = this == Unknown
+
+ def cache(cache: Term.Cache): Rough_Classification =
+ this match {
+ case Equational(recursion) => Equational(recursion.cache(cache))
+ case _ => this
+ }
+ }
+ case class Equational(recursion: Recursion) extends Rough_Classification
+ case object Inductive extends Rough_Classification
+ case object Co_Inductive extends Rough_Classification
+ case object Unknown extends Rough_Classification
+
+ val decode_rough_classification: XML.Decode.T[Rough_Classification] = {
+ import XML.Decode._
+ variant(List(
+ { case (Nil, a) => Equational(decode_recursion(a)) },
+ { case (Nil, Nil) => Inductive },
+ { case (Nil, Nil) => Co_Inductive },
+ { case (Nil, Nil) => Unknown }))
+ }
+
+
+ sealed case class Spec_Rule(
+ pos: Position.T,
+ name: String,
+ rough_classification: Rough_Classification,
+ typargs: List[(String, Term.Sort)],
+ args: List[(String, Term.Typ)],
+ terms: List[(Term.Term, Term.Typ)],
+ rules: List[Term.Term]
+ ) {
+ def id: Option[Long] = Position.Id.unapply(pos)
+
+ def cache(cache: Term.Cache): Spec_Rule =
+ Spec_Rule(
+ cache.position(pos),
+ cache.string(name),
+ rough_classification.cache(cache),
+ typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
+ args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
+ terms.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }),
+ rules.map(cache.term))
+ }
+
+ def read_spec_rules(theory_context: Export.Theory_Context): List[Spec_Rule] = {
+ val body = theory_context.yxml(Export.THEORY_PREFIX + "spec_rules")
+ val spec_rules = {
+ import XML.Decode._
+ import Term_XML.Decode._
+ list(
+ pair(properties, pair(string, pair(decode_rough_classification,
+ pair(list(pair(string, sort)), pair(list(pair(string, typ)),
+ pair(list(pair(term, typ)), list(term))))))))(body)
+ }
+ for ((pos, (name, (rough_classification, (typargs, (args, (terms, rules)))))) <- spec_rules)
+ yield Spec_Rule(pos, name, rough_classification, typargs, args, terms, rules)
+ }
+
+
+ /* other entities */
+
+ sealed case class Other() extends Content[Other] {
+ override def cache(cache: Term.Cache): Other = this
+ }
+
+ def read_others(theory_context: Export.Theory_Context): Map[String, List[Entity[Other]]] = {
+ val kinds =
+ theory_context.get(Export.THEORY_PREFIX + "other_kinds") match {
+ case Some(entry) => split_lines(entry.text)
+ case None => Nil
+ }
+ val other = Other()
+ def read_other(kind: String): List[Entity[Other]] =
+ read_entities(theory_context, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other)
+
+ kinds.map(kind => kind -> read_other(kind)).toMap
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/file_format.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,111 @@
+/* Title: Pure/Build/file_format.scala
+ Author: Makarius
+
+Support for user-defined file formats, associated with active session.
+*/
+
+package isabelle
+
+
+object File_Format {
+ object No_Data extends AnyRef {
+ override def toString: String = "File_Format.No_Data"
+ }
+
+ sealed case class Theory_Context(name: Document.Node.Name, content: String)
+
+
+ /* registry */
+
+ lazy val registry: Registry =
+ new Registry(Isabelle_System.make_services(classOf[File_Format]))
+
+ final class Registry private [File_Format](file_formats: List[File_Format]) {
+ override def toString: String = file_formats.mkString("File_Format.Environment(", ",", ")")
+
+ def get(name: String): Option[File_Format] = file_formats.find(_.detect(name))
+ def get(name: Document.Node.Name): Option[File_Format] = get(name.node)
+ def get_theory(name: Document.Node.Name): Option[File_Format] = get(name.theory)
+ def is_theory(name: Document.Node.Name): Boolean = get_theory(name).isDefined
+
+ def theory_excluded(name: String): Boolean = file_formats.exists(_.theory_excluded(name))
+
+ def parse_data(name: String, text: String): AnyRef =
+ get(name) match {
+ case Some(file_format) => file_format.parse_data(name, text)
+ case None => No_Data
+ }
+ def parse_data(name: Document.Node.Name, text: String): AnyRef = parse_data(name.node, text)
+
+ def start_session(session: isabelle.Session): Session =
+ new Session(file_formats.map(_.start(session)).filterNot(_ == No_Agent))
+ }
+
+
+ /* session */
+
+ final class Session private[File_Format](agents: List[Agent]) {
+ override def toString: String =
+ agents.mkString("File_Format.Session(", ", ", ")")
+
+ def prover_options(options: Options): Options =
+ agents.foldLeft(options) { case (opts, agent) => agent.prover_options(opts) }
+
+ def stop_session(): Unit = agents.foreach(_.stop())
+ }
+
+ trait Agent {
+ def prover_options(options: Options): Options = options
+ def stop(): Unit = {}
+ }
+
+ object No_Agent extends Agent
+}
+
+abstract class File_Format extends Isabelle_System.Service {
+ def format_name: String
+ override def toString: String = "File_Format(" + format_name + ")"
+
+ def file_ext: String
+ def detect(name: String): Boolean = name.endsWith("." + file_ext)
+
+ def parse_data(name: String, text: String): AnyRef = File_Format.No_Data
+
+
+ /* implicit theory context: name and content */
+
+ def theory_suffix: String = ""
+ def theory_content(name: String): String = ""
+ def theory_excluded(name: String): Boolean = false
+
+ def make_theory_name(
+ resources: Resources,
+ name: Document.Node.Name
+ ): Option[Document.Node.Name] = {
+ for {
+ theory <- Url.get_base_name(name.node)
+ if detect(name.node) && theory_suffix.nonEmpty
+ }
+ yield {
+ val node = resources.append_path(name.node, Path.explode(theory_suffix))
+ Document.Node.Name(node, theory = theory)
+ }
+ }
+
+ def make_theory_content(resources: Resources, thy_name: Document.Node.Name): Option[String] = {
+ for {
+ prefix <- Url.strip_base_name(thy_name.node)
+ suffix <- Url.get_base_name(thy_name.node)
+ if detect(prefix) && suffix == theory_suffix
+ thy <- Url.get_base_name(prefix)
+ s <- proper_string(theory_content(thy))
+ } yield s
+ }
+
+ def html_document(snapshot: Document.Snapshot): Option[Browser_Info.HTML_Document] = None
+
+
+ /* PIDE session agent */
+
+ def start(session: isabelle.Session): File_Format.Agent = File_Format.No_Agent
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/resources.ML Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,483 @@
+(* Title: Pure/Build/resources.ML
+ Author: Makarius
+
+Resources for theories and auxiliary files.
+*)
+
+signature RESOURCES =
+sig
+ val default_qualifier: string
+ val init_session:
+ {session_positions: (string * Properties.T) list,
+ session_directories: (string * string) list,
+ command_timings: Properties.T list,
+ load_commands: (string * Position.T) list,
+ scala_functions: (string * ((bool * bool) * Position.T)) list,
+ global_theories: (string * string) list,
+ loaded_theories: string list} -> unit
+ val init_session_yxml: Bytes.T -> unit
+ val init_session_env: unit -> unit
+ val finish_session_base: unit -> unit
+ val global_theory: string -> string option
+ val loaded_theory: string -> bool
+ val check_session: Proof.context -> string * Position.T -> string
+ val last_timing: Toplevel.transition -> Time.time
+ val check_load_command: Proof.context -> string * Position.T -> string
+ val check_scala_function: Proof.context -> string * Position.T -> string * (bool * bool)
+ val master_directory: theory -> Path.T
+ val imports_of: theory -> (string * Position.T) list
+ val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
+ val thy_path: Path.T -> Path.T
+ val theory_qualifier: string -> string
+ val find_theory_file: string -> Path.T option
+ val import_name: string -> Path.T -> string ->
+ {node_name: Path.T, master_dir: Path.T, theory_name: string}
+ val check_thy: Path.T -> string ->
+ {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
+ imports: (string * Position.T) list, keywords: Thy_Header.keywords}
+ val read_file_node: string -> Path.T -> Path.T * Position.T -> Token.file
+ val read_file: Path.T -> Path.T * Position.T -> Token.file
+ val parsed_files: (Path.T -> Path.T list) ->
+ Token.file Exn.result list * Input.source -> theory -> Token.file list
+ val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser
+ val parse_file: (theory -> Token.file) parser
+ val provide: Path.T * SHA1.digest -> theory -> theory
+ val provide_file: Token.file -> theory -> theory
+ val provide_file': Token.file -> theory -> Token.file * theory
+ val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser
+ val provide_parse_file: (theory -> Token.file * theory) parser
+ val loaded_files_current: theory -> bool
+ val check_path: Proof.context -> Path.T option -> Input.source -> Path.T
+ val check_file: Proof.context -> Path.T option -> Input.source -> Path.T
+ val check_dir: Proof.context -> Path.T option -> Input.source -> Path.T
+ val check_session_dir: Proof.context -> Path.T option -> Input.source -> Path.T
+end;
+
+structure Resources: RESOURCES =
+struct
+
+(* command timings *)
+
+type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*)
+
+val empty_timings: timings = Symtab.empty;
+
+fun update_timings props =
+ (case Markup.parse_command_timing_properties props of
+ SOME ({file, offset, name}, time) =>
+ Symtab.map_default (file, Inttab.empty)
+ (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time)))
+ | NONE => I);
+
+fun make_timings command_timings =
+ fold update_timings command_timings empty_timings;
+
+fun approximative_id name pos =
+ (case (Position.file_of pos, Position.offset_of pos) of
+ (SOME file, SOME offset) =>
+ if name = "" then NONE else SOME {file = file, offset = offset, name = name}
+ | _ => NONE);
+
+fun get_timings timings tr =
+ (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of
+ SOME {file, offset, name} =>
+ (case Symtab.lookup timings file of
+ SOME offsets =>
+ (case Inttab.lookup offsets offset of
+ SOME (name', time) => if name = name' then SOME time else NONE
+ | NONE => NONE)
+ | NONE => NONE)
+ | NONE => NONE)
+ |> the_default Time.zeroTime;
+
+
+(* session base *)
+
+val default_qualifier = "Draft";
+
+type entry = {pos: Position.T, serial: serial};
+
+fun make_entry props : entry =
+ {pos = Position.of_properties props, serial = serial ()};
+
+val empty_session_base =
+ ({session_positions = []: (string * entry) list,
+ session_directories = Symtab.empty: Path.T list Symtab.table,
+ timings = empty_timings,
+ load_commands = []: (string * Position.T) list,
+ scala_functions = Symtab.empty: ((bool * bool) * Position.T) Symtab.table},
+ {global_theories = Symtab.empty: string Symtab.table,
+ loaded_theories = Symset.empty: Symset.T});
+
+val global_session_base =
+ Synchronized.var "Sessions.base" empty_session_base;
+
+fun init_session
+ {session_positions, session_directories, command_timings, load_commands,
+ scala_functions, global_theories, loaded_theories} =
+ Synchronized.change global_session_base
+ (fn _ =>
+ ({session_positions = sort_by #1 (map (apsnd make_entry) session_positions),
+ session_directories =
+ Symtab.build (session_directories |> fold_rev (fn (dir, name) =>
+ Symtab.cons_list (name, Path.explode dir))),
+ timings = make_timings command_timings,
+ load_commands = load_commands,
+ scala_functions = Symtab.make scala_functions},
+ {global_theories = Symtab.make global_theories,
+ loaded_theories = Symset.make loaded_theories}));
+
+fun init_session_yxml yxml =
+ let
+ val (session_positions, (session_directories, (command_timings,
+ (load_commands, (scala_functions, (global_theories, loaded_theories)))))) =
+ YXML.parse_body_bytes yxml |>
+ let open XML.Decode in
+ (pair (list (pair string properties))
+ (pair (list (pair string string))
+ (pair (list properties)
+ (pair (list (pair string properties))
+ (pair (list (pair string (pair (pair bool bool) properties)))
+ (pair (list (pair string string)) (list string)))))))
+ end;
+ in
+ init_session
+ {session_positions = session_positions,
+ session_directories = session_directories,
+ command_timings = command_timings,
+ load_commands = (map o apsnd) Position.of_properties load_commands,
+ scala_functions = (map o apsnd o apsnd) Position.of_properties scala_functions,
+ global_theories = global_theories,
+ loaded_theories = loaded_theories}
+ end;
+
+fun init_session_env () =
+ (case getenv "ISABELLE_INIT_SESSION" of
+ "" => ()
+ | name =>
+ try Bytes.read (Path.explode name)
+ |> Option.app init_session_yxml);
+
+val _ = init_session_env ();
+
+fun finish_session_base () =
+ Synchronized.change global_session_base
+ (apfst (K (#1 empty_session_base)));
+
+fun get_session_base f = f (Synchronized.value global_session_base);
+fun get_session_base1 f = get_session_base (f o #1);
+fun get_session_base2 f = get_session_base (f o #2);
+
+fun global_theory a = Symtab.lookup (get_session_base2 #global_theories) a;
+fun loaded_theory a = Symset.member (get_session_base2 #loaded_theories) a;
+
+fun check_session ctxt arg =
+ Completion.check_item "session"
+ (fn (name, {pos, serial}) =>
+ Position.make_entity_markup {def = false} serial Markup.sessionN (name, pos))
+ (get_session_base1 #session_positions) ctxt arg;
+
+fun last_timing tr = get_timings (get_session_base1 #timings) tr;
+
+fun check_load_command ctxt arg =
+ Completion.check_entity Markup.load_commandN (get_session_base1 #load_commands) ctxt arg;
+
+
+(* Scala functions *)
+
+fun check_scala_function ctxt arg =
+ let
+ val table = get_session_base1 #scala_functions;
+ val funs = Symtab.fold (fn (a, (_, pos)) => cons (a, pos)) table [] |> sort_by #1;
+ val name = Completion.check_entity Markup.scala_functionN funs ctxt arg;
+ val flags = #1 (the (Symtab.lookup table name));
+ in (name, flags) end;
+
+val _ = Theory.setup
+ (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>scala_function\<close>
+ (Scan.lift Parse.embedded_position) (#1 oo check_scala_function) #>
+ ML_Antiquotation.inline_embedded \<^binding>\<open>scala_function\<close>
+ (Args.context -- Scan.lift Parse.embedded_position
+ >> (uncurry check_scala_function #> #1 #> ML_Syntax.print_string)) #>
+ ML_Antiquotation.value_embedded \<^binding>\<open>scala\<close>
+ (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, arg) =>
+ let
+ val (name, (single, bytes)) = check_scala_function ctxt arg;
+ val func =
+ (if single then "Scala.function1" else "Scala.function") ^
+ (if bytes then "_bytes" else "");
+ in ML_Syntax.atomic (func ^ " " ^ ML_Syntax.print_string name) end)));
+
+
+(* manage source files *)
+
+type files =
+ {master_dir: Path.T, (*master directory of theory source*)
+ imports: (string * Position.T) list, (*source specification of imports*)
+ provided: (Path.T * SHA1.digest) list}; (*source path, digest*)
+
+fun make_files (master_dir, imports, provided): files =
+ {master_dir = master_dir, imports = imports, provided = provided};
+
+structure Files = Theory_Data
+(
+ type T = files;
+ val empty = make_files (Path.current, [], []);
+ fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) =
+ let val provided' = Library.merge (op =) (provided1, provided2)
+ in make_files (master_dir, imports, provided') end
+);
+
+fun map_files f =
+ Files.map (fn {master_dir, imports, provided} =>
+ make_files (f (master_dir, imports, provided)));
+
+
+val master_directory = #master_dir o Files.get;
+val imports_of = #imports o Files.get;
+
+fun begin_theory master_dir {name, imports, keywords} parents =
+ let
+ val thy =
+ Theory.begin_theory name parents
+ |> map_files (fn _ => (Path.explode (File.symbolic_path master_dir), imports, []))
+ |> Thy_Header.add_keywords keywords;
+ val ctxt = Proof_Context.init_global thy;
+ val _ = List.app (ignore o check_load_command ctxt o #load_command o #2) keywords;
+ in thy end;
+
+
+(* theory files *)
+
+val thy_path = Path.ext "thy";
+
+fun theory_qualifier theory =
+ (case global_theory theory of
+ SOME qualifier => qualifier
+ | NONE => Long_Name.qualifier theory);
+
+fun literal_theory theory =
+ Long_Name.is_qualified theory orelse is_some (global_theory theory);
+
+fun theory_name qualifier theory =
+ if literal_theory theory then theory
+ else Long_Name.qualify qualifier theory;
+
+fun find_theory_file thy_name =
+ let
+ val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name));
+ val session = theory_qualifier thy_name;
+ val dirs = Symtab.lookup_list (get_session_base1 #session_directories) session;
+ in
+ dirs |> get_first (fn dir =>
+ let val path = dir + thy_file
+ in if File.is_file path then SOME path else NONE end)
+ end;
+
+fun make_theory_node node_name theory =
+ {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory};
+
+fun loaded_theory_node theory =
+ {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory};
+
+fun import_name qualifier dir s =
+ let
+ val theory = theory_name qualifier (Thy_Header.import_name s);
+ fun theory_node path = make_theory_node path theory;
+ val literal_import = literal_theory theory andalso qualifier <> theory_qualifier theory;
+ val _ =
+ if literal_import andalso not (Thy_Header.is_base_name s) then
+ error ("Bad import of theory from other session via file-path: " ^ quote s)
+ else ();
+ in
+ if loaded_theory theory then loaded_theory_node theory
+ else
+ (case find_theory_file theory of
+ SOME node_name => theory_node node_name
+ | NONE =>
+ if Thy_Header.is_base_name s andalso Long_Name.is_qualified s
+ then loaded_theory_node theory
+ else theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))))
+ end;
+
+fun check_file dir file = File.check_file (File.full_path dir file);
+
+fun check_thy dir thy_name =
+ let
+ val thy_base_name = Long_Name.base_name thy_name;
+ val master_file =
+ (case find_theory_file thy_name of
+ SOME path => check_file Path.current path
+ | NONE => check_file dir (thy_path (Path.basic thy_base_name)));
+ val text = File.read master_file;
+
+ val {name = (name, pos), imports, keywords} =
+ Thy_Header.read (Position.file (File.symbolic_path master_file)) text;
+ val _ =
+ thy_base_name <> name andalso
+ error ("Bad theory name " ^ quote name ^
+ " for file " ^ Path.print (Path.base master_file) ^ Position.here pos);
+ in
+ {master = (master_file, SHA1.digest text), text = text, theory_pos = pos,
+ imports = imports, keywords = keywords}
+ end;
+
+
+(* read_file *)
+
+fun read_file_node file_node master_dir (src_path, pos) =
+ let
+ fun read_local () =
+ let
+ val path = File.check_file (File.full_path master_dir src_path);
+ val text = File.read path;
+ val file_pos = Position.file (File.symbolic_path path);
+ in (text, file_pos) end;
+
+ fun read_remote () =
+ let
+ val text = Bytes.content (Isabelle_System.download file_node);
+ val file_pos = Position.file file_node;
+ in (text, file_pos) end;
+
+ val (text, file_pos) =
+ (case try Url.explode file_node of
+ NONE => read_local ()
+ | SOME (Url.File _) => read_local ()
+ | _ => read_remote ());
+
+ val lines = split_lines text;
+ val digest = SHA1.digest text;
+ in {src_path = src_path, lines = lines, digest = digest, pos = Position.copy_id pos file_pos} end
+ handle ERROR msg => error (msg ^ Position.here pos);
+
+val read_file = read_file_node "";
+
+
+(* load files *)
+
+fun parsed_files make_paths (files, source) thy =
+ if null files then
+ let
+ val master_dir = master_directory thy;
+ val name = Input.string_of source;
+ val pos = Input.pos_of source;
+ val delimited = Input.is_delimited source;
+ val src_paths = make_paths (Path.explode name);
+ val reports =
+ src_paths |> maps (fn src_path =>
+ [(pos, Markup.path (File.symbolic_path (master_dir + src_path))),
+ (pos, Markup.language_path delimited)]);
+ val _ = Position.reports reports;
+ in map (read_file master_dir o rpair pos) src_paths end
+ else map Exn.release files;
+
+fun parse_files make_paths =
+ (Scan.ahead Parse.not_eof >> Token.get_files) -- Parse.path_input >> parsed_files make_paths;
+
+val parse_file = parse_files single >> (fn f => f #> the_single);
+
+
+fun provide (src_path, id) =
+ map_files (fn (master_dir, imports, provided) =>
+ if AList.defined (op =) provided src_path then
+ error ("Duplicate use of source file: " ^ Path.print src_path)
+ else (master_dir, imports, (src_path, id) :: provided));
+
+fun provide_file (file: Token.file) = provide (#src_path file, #digest file);
+fun provide_file' file thy = (file, provide_file file thy);
+
+fun provide_parse_files make_paths =
+ parse_files make_paths >> (fn files => fn thy => fold_map provide_file' (files thy) thy);
+
+val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single);
+
+
+fun load_file thy src_path =
+ let
+ val full_path = check_file (master_directory thy) src_path;
+ val text = File.read full_path;
+ val id = SHA1.digest text;
+ in ((full_path, id), text) end;
+
+fun loaded_files_current thy =
+ #provided (Files.get thy) |>
+ forall (fn (src_path, id) =>
+ (case try (load_file thy) src_path of
+ NONE => false
+ | SOME ((_, id'), _) => id = id'));
+
+
+(* formal check *)
+
+fun formal_check (check: Path.T -> Path.T) ctxt opt_dir source =
+ let
+ val name = Input.string_of source;
+ val pos = Input.pos_of source;
+ val delimited = Input.is_delimited source;
+
+ val _ = Context_Position.report ctxt pos (Markup.language_path delimited);
+
+ fun err msg = error (msg ^ Position.here pos);
+ val dir =
+ (case opt_dir of
+ SOME dir => dir
+ | NONE => master_directory (Proof_Context.theory_of ctxt));
+ val path = dir + Path.explode name handle ERROR msg => err msg;
+ val _ = Path.expand path handle ERROR msg => err msg;
+ val _ = Context_Position.report ctxt pos (Markup.path (File.symbolic_path path));
+ val _ = check path handle ERROR msg => err msg;
+ in path end;
+
+val check_path = formal_check I;
+val check_file = formal_check File.check_file;
+val check_dir = formal_check File.check_dir;
+
+fun check_session_dir ctxt opt_dir s =
+ let
+ val dir = Path.expand (check_dir ctxt opt_dir s);
+ val ok =
+ File.is_file (dir + Path.explode("ROOT")) orelse
+ File.is_file (dir + Path.explode("ROOTS"));
+ in
+ if ok then dir
+ else
+ error ("Bad session root directory (missing ROOT or ROOTS): " ^
+ Path.print dir ^ Position.here (Input.pos_of s))
+ end;
+
+
+(* antiquotations *)
+
+local
+
+fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) =
+ Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
+ (check ctxt NONE source;
+ Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source))
+ |> Latex.macro "isatt"));
+
+fun ML_antiq check =
+ Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
+ check ctxt (SOME Path.current) source |> ML_Syntax.print_path);
+
+in
+
+val _ = Theory.setup
+ (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>session\<close>
+ (Scan.lift Parse.embedded_position) check_session #>
+ Document_Output.antiquotation_raw_embedded \<^binding>\<open>path\<close> (document_antiq check_path) (K I) #>
+ Document_Output.antiquotation_raw_embedded \<^binding>\<open>file\<close> (document_antiq check_file) (K I) #>
+ Document_Output.antiquotation_raw_embedded \<^binding>\<open>dir\<close> (document_antiq check_dir) (K I) #>
+ ML_Antiquotation.value_embedded \<^binding>\<open>path\<close> (ML_antiq check_path) #>
+ ML_Antiquotation.value_embedded \<^binding>\<open>file\<close> (ML_antiq check_file) #>
+ ML_Antiquotation.value_embedded \<^binding>\<open>dir\<close> (ML_antiq check_dir) #>
+ ML_Antiquotation.value_embedded \<^binding>\<open>path_binding\<close>
+ (Scan.lift (Parse.position Parse.path) >>
+ (ML_Syntax.print_path_binding o Path.explode_binding)) #>
+ ML_Antiquotation.value \<^binding>\<open>master_dir\<close>
+ (Args.theory >> (ML_Syntax.print_path o master_directory)));
+
+end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/resources.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,469 @@
+/* Title: Pure/Build/resources.scala
+ Author: Makarius
+
+Resources for theories and auxiliary files.
+*/
+
+package isabelle
+
+
+import scala.util.parsing.input.Reader
+
+import java.io.{File => JFile}
+
+
+object Resources {
+ def bootstrap: Resources = new Resources(Sessions.Background(base = Sessions.Base.bootstrap))
+
+ def hidden_node(name: Document.Node.Name): Boolean =
+ !name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name)
+
+ def html_document(snapshot: Document.Snapshot): Option[Browser_Info.HTML_Document] =
+ File_Format.registry.get(snapshot.node_name).flatMap(_.html_document(snapshot))
+}
+
+class Resources(
+ val session_background: Sessions.Background,
+ val log: Logger = No_Logger,
+ command_timings: List[Properties.T] = Nil
+) {
+ resources =>
+
+ def sessions_structure: Sessions.Structure = session_background.sessions_structure
+ def session_base: Sessions.Base = session_background.base
+
+ override def toString: String = "Resources(" + session_base.print_body + ")"
+
+
+ /* init session */
+
+ def init_session_yxml: String = {
+ import XML.Encode._
+
+ YXML.string_of_body(
+ pair(list(pair(string, properties)),
+ pair(list(pair(string, string)),
+ pair(list(properties),
+ pair(list(pair(string, properties)),
+ pair(list(Scala.encode_fun),
+ pair(list(pair(string, string)), list(string)))))))(
+ (sessions_structure.session_positions,
+ (sessions_structure.dest_session_directories,
+ (command_timings,
+ (Command_Span.load_commands.map(cmd => (cmd.name, cmd.position)),
+ (Scala.functions,
+ (sessions_structure.global_theories.toList,
+ session_base.loaded_theories.keys))))))))
+ }
+
+
+ /* file formats */
+
+ def make_theory_name(name: Document.Node.Name): Option[Document.Node.Name] =
+ File_Format.registry.get(name).flatMap(_.make_theory_name(resources, name))
+
+ def make_theory_content(thy_name: Document.Node.Name): Option[String] =
+ File_Format.registry.get_theory(thy_name).flatMap(_.make_theory_content(resources, thy_name))
+
+
+ /* file-system operations */
+
+ def migrate_name(name: Document.Node.Name): Document.Node.Name = name
+
+ def append_path(prefix: String, source_path: Path): String =
+ File.standard_path(Path.explode(prefix) + source_path)
+
+ def read_dir(dir: String): List[String] = File.read_dir(Path.explode(dir))
+
+ def list_thys(dir: String): List[String] = {
+ val entries = try { read_dir(dir) } catch { case ERROR(_) => Nil }
+ entries.flatMap(Thy_Header.get_thy_name)
+ }
+
+
+ /* source files of Isabelle/ML bootstrap */
+
+ def source_file(raw_name: String): Option[String] = {
+ if (Path.is_wellformed(raw_name)) {
+ if (Path.is_valid(raw_name)) {
+ def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None
+
+ val path = Path.explode(raw_name)
+ val path1 =
+ if (path.is_absolute || path.is_current) check(path)
+ else {
+ check(Path.explode("~~/src/Pure") + path) orElse
+ (if (Isabelle_System.getenv("ML_SOURCES") == "") None
+ else check(Path.explode("$ML_SOURCES") + path))
+ }
+ Some(File.platform_path(path1 getOrElse path))
+ }
+ else None
+ }
+ else Some(raw_name)
+ }
+
+
+ /* theory files */
+
+ def load_commands(
+ syntax: Outer_Syntax,
+ name: Document.Node.Name
+ ) : () => List[Command_Span.Span] = {
+ val (is_utf8, raw_text) =
+ with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
+ () =>
+ {
+ if (syntax.has_load_commands(raw_text)) {
+ val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
+ syntax.parse_spans(text).filter(_.is_load_command(syntax))
+ }
+ else Nil
+ }
+ }
+
+ def loaded_files(
+ syntax: Outer_Syntax,
+ name: Document.Node.Name,
+ spans: List[Command_Span.Span]
+ ) : List[Document.Node.Name] = {
+ for (span <- spans; file <- span.loaded_files(syntax).files)
+ yield Document.Node.Name(append_path(name.master_dir, Path.explode(file)))
+ }
+
+ def pure_files(syntax: Outer_Syntax): List[Document.Node.Name] =
+ (for {
+ (file, theory) <- Thy_Header.ml_roots.iterator
+ node = append_path("~~/src/Pure", Path.explode(file))
+ node_name = Document.Node.Name(node, theory = theory)
+ name <- loaded_files(syntax, node_name, load_commands(syntax, node_name)()).iterator
+ } yield name).toList
+
+ def global_theory(theory: String): Boolean =
+ sessions_structure.global_theories.isDefinedAt(theory)
+
+ def literal_theory(theory: String): Boolean =
+ Long_Name.is_qualified(theory) || global_theory(theory)
+
+ def theory_name(qualifier: String, theory: String): String =
+ if (literal_theory(theory)) theory
+ else Long_Name.qualify(qualifier, theory)
+
+ def find_theory_node(theory: String): Option[Document.Node.Name] = {
+ val thy_file = Path.basic(Long_Name.base_name(theory)).thy
+ val session = sessions_structure.theory_qualifier(theory)
+ val dirs =
+ sessions_structure.get(session) match {
+ case Some(info) => info.dirs
+ case None => Nil
+ }
+ dirs.collectFirst { case dir if (dir + thy_file).is_file =>
+ Document.Node.Name(append_path("", dir + thy_file), theory = theory) }
+ }
+
+ def import_name(qualifier: String, prefix: String, s: String): Document.Node.Name = {
+ val theory = theory_name(qualifier, Thy_Header.import_name(s))
+ val literal_import =
+ literal_theory(theory) && qualifier != sessions_structure.theory_qualifier(theory)
+ if (literal_import && !Url.is_base_name(s)) {
+ error("Bad import of theory from other session via file-path: " + quote(s))
+ }
+ if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
+ else {
+ find_theory_node(theory) match {
+ case Some(node_name) => node_name
+ case None =>
+ if (Url.is_base_name(s) && literal_theory(s)) Document.Node.Name.loaded_theory(theory)
+ else Document.Node.Name(append_path(prefix, Path.explode(s).thy), theory = theory)
+ }
+ }
+ }
+
+ def import_name(name: Document.Node.Name, s: String): Document.Node.Name =
+ import_name(sessions_structure.theory_qualifier(name), name.master_dir, s)
+
+ def import_name(info: Sessions.Info, s: String): Document.Node.Name =
+ import_name(info.name, info.dir.implode, s)
+
+ def find_theory(file: JFile): Option[Document.Node.Name] = {
+ for {
+ qualifier <- sessions_structure.session_directories.get(File.canonical(file).getParentFile)
+ theory_base <- proper_string(Thy_Header.theory_name(file.getName))
+ theory = theory_name(qualifier, theory_base)
+ theory_node <- find_theory_node(theory)
+ if File.eq(theory_node.path.file, file)
+ } yield theory_node
+ }
+
+ def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = {
+ val context_session = sessions_structure.theory_qualifier(context_name)
+ def context_dir(): List[String] = list_thys(context_name.master_dir)
+ def session_dir(info: Sessions.Info): List[String] = info.dirs.flatMap(Thy_Header.list_thys)
+ (for {
+ (session, (info, _)) <- sessions_structure.imports_graph.iterator
+ theory <- (if (session == context_session) context_dir() else session_dir(info)).iterator
+ if Completion.completed(s)(theory)
+ } yield {
+ if (session == context_session || global_theory(theory)) theory
+ else Long_Name.qualify(session, theory)
+ }).toList.sorted
+ }
+
+ def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = {
+ val path = name.path
+ if (path.is_file) using(Scan.byte_reader(path.file))(f)
+ else if (name.node == name.theory)
+ error("Cannot load theory " + quote(name.theory))
+ else error ("Cannot load theory file " + path)
+ }
+
+ def check_thy(
+ node_name: Document.Node.Name,
+ reader: Reader[Char],
+ command: Boolean = true,
+ strict: Boolean = true
+ ): Document.Node.Header = {
+ if (node_name.is_theory && reader.source.length > 0) {
+ try {
+ val header = Thy_Header.read(node_name, reader, command = command, strict = strict)
+ val imports =
+ header.imports.map({ case (s, pos) =>
+ val name = import_name(node_name, s)
+ if (Sessions.illegal_theory(name.theory_base_name)) {
+ error("Illegal theory name " + quote(name.theory_base_name) + Position.here(pos))
+ }
+ else (name, pos)
+ })
+ Document.Node.Header(imports, header.keywords, header.abbrevs)
+ }
+ catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
+ }
+ else Document.Node.no_header
+ }
+
+
+ /* special header */
+
+ def special_header(name: Document.Node.Name): Option[Document.Node.Header] = {
+ val imports =
+ if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_import))
+ else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP))
+ else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE))
+ else Nil
+ if (imports.isEmpty) None
+ else Some(Document.Node.Header(imports.map((_, Position.none))))
+ }
+
+
+ /* blobs */
+
+ def undefined_blobs(version: Document.Version): List[Document.Node.Name] =
+ (for {
+ (node_name, node) <- version.nodes.iterator
+ if !session_base.loaded_theory(node_name)
+ cmd <- node.load_commands.iterator
+ name <- cmd.blobs_undefined.iterator
+ } yield name).toList
+
+
+ /* document changes */
+
+ def parse_change(
+ reparse_limit: Int,
+ previous: Document.Version,
+ doc_blobs: Document.Blobs,
+ edits: List[Document.Edit_Text],
+ consolidate: List[Document.Node.Name]): Session.Change =
+ Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate)
+
+ def commit(change: Session.Change): Unit = {}
+
+
+ /* theory and file dependencies */
+
+ def dependencies(
+ thys: List[(Document.Node.Name, Position.T)],
+ progress: Progress = new Progress): Dependencies[Unit] =
+ Dependencies.empty[Unit].require_thys((), thys, progress = progress)
+
+ def session_dependencies(
+ info: Sessions.Info,
+ progress: Progress = new Progress
+ ) : Dependencies[Options] = {
+ info.theories.foldLeft(Dependencies.empty[Options]) {
+ case (dependencies, (options, thys)) =>
+ dependencies.require_thys(options,
+ for { (thy, pos) <- thys } yield (import_name(info, thy), pos),
+ progress = progress)
+ }
+ }
+
+ object Dependencies {
+ def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty)
+
+ private def show_path(names: List[Document.Node.Name]): String =
+ names.map(name => quote(name.theory)).mkString(" via ")
+
+ private def cycle_msg(names: List[Document.Node.Name]): String =
+ "Cyclic dependency of " + show_path(names)
+
+ private def required_by(initiators: List[Document.Node.Name]): String =
+ if_proper(initiators, "\n(required by " + show_path(initiators.reverse) + ")")
+ }
+
+ final class Dependencies[A] private(
+ rev_entries: List[Document.Node.Entry],
+ seen: Map[Document.Node.Name, A]
+ ) {
+ private def cons(entry: Document.Node.Entry): Dependencies[A] =
+ new Dependencies[A](entry :: rev_entries, seen)
+
+ def require_thy(adjunct: A,
+ thy: (Document.Node.Name, Position.T),
+ initiators: List[Document.Node.Name] = Nil,
+ progress: Progress = new Progress): Dependencies[A] = {
+ val (name, pos) = thy
+
+ def message: String =
+ "The error(s) above occurred for theory " + quote(name.theory) +
+ Dependencies.required_by(initiators) + Position.here(pos)
+
+ if (seen.isDefinedAt(name)) this
+ else {
+ val dependencies1 = new Dependencies[A](rev_entries, seen + (name -> adjunct))
+ if (session_base.loaded_theory(name)) dependencies1
+ else {
+ try {
+ if (initiators.contains(name)) error(Dependencies.cycle_msg(initiators))
+
+ progress.expose_interrupt()
+ val header =
+ try {
+ with_thy_reader(name, check_thy(name, _, command = false)).cat_errors(message)
+ }
+ catch { case ERROR(msg) => cat_error(msg, message) }
+ val entry = Document.Node.Entry(name, header)
+ dependencies1.require_thys(adjunct, header.imports_pos,
+ initiators = name :: initiators, progress = progress).cons(entry)
+ }
+ catch {
+ case e: Throwable =>
+ dependencies1.cons(Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))))
+ }
+ }
+ }
+ }
+
+ def require_thys(adjunct: A,
+ thys: List[(Document.Node.Name, Position.T)],
+ progress: Progress = new Progress,
+ initiators: List[Document.Node.Name] = Nil): Dependencies[A] =
+ thys.foldLeft(this)(_.require_thy(adjunct, _, progress = progress, initiators = initiators))
+
+ def entries: List[Document.Node.Entry] = rev_entries.reverse
+
+ def theories: List[Document.Node.Name] = entries.map(_.name)
+ def theories_adjunct: List[(Document.Node.Name, A)] = theories.map(name => (name, seen(name)))
+
+ def errors: List[String] = entries.flatMap(_.header.errors)
+
+ def check_errors: Dependencies[A] =
+ errors match {
+ case Nil => this
+ case errs => error(cat_lines(errs))
+ }
+
+ lazy val theory_graph: Document.Node.Name.Graph[Unit] = {
+ val regular = theories.toSet
+ val irregular =
+ (for {
+ entry <- entries.iterator
+ imp <- entry.header.imports
+ if !regular(imp)
+ } yield imp).toSet
+
+ Document.Node.Name.make_graph(
+ irregular.toList.map(name => ((name, ()), Nil)) :::
+ entries.map(entry => ((entry.name, ()), entry.header.imports)))
+ }
+
+ lazy val loaded_theories: Graph[String, Outer_Syntax] =
+ entries.foldLeft(session_base.loaded_theories) {
+ case (graph, entry) =>
+ val name = entry.name.theory
+ val imports = entry.header.imports.map(_.theory)
+
+ val graph1 = (name :: imports).foldLeft(graph)(_.default_node(_, Outer_Syntax.empty))
+ val graph2 = imports.foldLeft(graph1)(_.add_edge(_, name))
+
+ val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil
+ val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node)
+ val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header
+
+ graph2.map_node(name, _ => syntax)
+ }
+
+ def get_syntax(name: Document.Node.Name): Outer_Syntax =
+ loaded_theories.get_node(name.theory)
+
+ lazy val load_commands: List[(Document.Node.Name, List[Command_Span.Span])] =
+ theories.zip(
+ Par_List.map((e: () => List[Command_Span.Span]) => e(),
+ theories.map(name => resources.load_commands(get_syntax(name), name))))
+ .filter(p => p._2.nonEmpty)
+
+ def loaded_files(
+ name: Document.Node.Name,
+ spans: List[Command_Span.Span]
+ ) : (String, List[Document.Node.Name]) = {
+ val theory = name.theory
+ val syntax = get_syntax(name)
+ val files1 = resources.loaded_files(syntax, name, spans)
+ val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil
+ (theory, files1 ::: files2)
+ }
+
+ def loaded_files: List[Document.Node.Name] =
+ for {
+ (name, spans) <- load_commands
+ file <- loaded_files(name, spans)._2
+ } yield file
+
+ def imported_files: List[Path] = {
+ val base_theories =
+ loaded_theories.all_preds(theories.map(_.theory)).
+ filter(session_base.loaded_theories.defined)
+
+ base_theories.map(theory => session_base.known_theories(theory).name.path) :::
+ base_theories.flatMap(session_base.known_loaded_files.withDefaultValue(Nil))
+ }
+
+ lazy val overall_syntax: Outer_Syntax =
+ Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node))
+
+ override def toString: String = entries.toString
+ }
+
+
+ /* resolve implicit theory dependencies */
+
+ def resolve_dependencies[A](
+ models: Iterable[Document.Model],
+ theories: List[Document.Node.Name]
+ ): List[Document.Node.Name] = {
+ val model_theories =
+ (for (model <- models.iterator if model.is_theory)
+ yield (model.node_name, Position.none)).toList
+
+ val thy_files1 =
+ dependencies(model_theories ::: theories.map((_, Position.none))).theories
+
+ val thy_files2 =
+ (for {
+ model <- models.iterator if !model.is_theory
+ thy_name <- make_theory_name(model.node_name)
+ } yield thy_name).toList
+
+ thy_files1 ::: thy_files2
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/sessions.ML Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,146 @@
+(* Title: Pure/Build/sessions.ML
+ Author: Makarius
+
+Support for session ROOT syntax.
+*)
+
+signature SESSIONS =
+sig
+ val root_name: string
+ val theory_name: string
+ val chapter_definition_parser: (Toplevel.transition -> Toplevel.transition) parser
+ val session_parser: (Toplevel.transition -> Toplevel.transition) parser
+end;
+
+structure Sessions: SESSIONS =
+struct
+
+val root_name = "ROOT";
+val theory_name = "Pure.Sessions";
+
+local
+
+val groups =
+ Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.name --| Parse.$$$ ")")) [];
+
+val description =
+ Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.embedded)) Input.empty;
+
+val theory_entry = Parse.input Parse.theory_name --| Parse.opt_keyword "global";
+
+val theories =
+ Parse.$$$ "theories" |-- Parse.!!! (Scan.optional Parse.options [] -- Scan.repeat1 theory_entry);
+
+val document_theories =
+ Parse.$$$ "document_theories" |-- Scan.repeat1 (Parse.input Parse.theory_name);
+
+val document_files =
+ Parse.$$$ "document_files" |--
+ Parse.!!! (Parse.in_path_parens "document" -- Scan.repeat1 Parse.path_input);
+
+val prune =
+ Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.nat --| Parse.$$$ "]")) 0;
+
+val export_files =
+ Parse.$$$ "export_files" |--
+ Parse.!!! (Parse.in_path_parens "export" -- prune -- Scan.repeat1 Parse.embedded);
+
+val export_classpath =
+ Parse.$$$ "export_classpath" |-- Scan.repeat Parse.embedded;
+
+fun path_source source path =
+ Input.source (Input.is_delimited source) (Path.implode path) (Input.range_of source);
+
+in
+
+val chapter_definition_parser =
+ Parse.chapter_name -- groups -- description >> (fn (_, descr) =>
+ Toplevel.keep (fn state =>
+ let
+ val ctxt = Toplevel.context_of state;
+ val _ =
+ Context_Position.report ctxt
+ (Position.range_position (Symbol_Pos.range (Input.source_explode descr)))
+ Markup.comment;
+ in () end));
+
+val session_parser =
+ Parse.session_name -- groups -- Parse.in_path "." --
+ (Parse.$$$ "=" |--
+ Parse.!!! (Scan.option (Parse.session_name --| Parse.!!! (Parse.$$$ "+")) -- description --
+ Scan.optional (Parse.$$$ "options" |-- Parse.!!! Parse.options) [] --
+ Scan.optional (Parse.$$$ "sessions" |--
+ Parse.!!! (Scan.repeat1 Parse.session_name)) [] --
+ Scan.optional (Parse.$$$ "directories" |-- Parse.!!! (Scan.repeat1 Parse.path_input)) [] --
+ Scan.repeat theories --
+ Scan.optional document_theories [] --
+ Scan.repeat document_files --
+ Scan.repeat export_files --
+ Scan.optional export_classpath []))
+ >> (fn (((((session, _), _), dir),
+ ((((((((((parent, descr), options), sessions), directories), theories),
+ document_theories), document_files), export_files), _)))) =>
+ Toplevel.keep (fn state =>
+ let
+ val ctxt = Toplevel.context_of state;
+ val session_dir = Resources.check_dir ctxt NONE dir;
+
+ val _ =
+ (the_list parent @ sessions) |> List.app (fn arg =>
+ ignore (Resources.check_session ctxt arg)
+ handle ERROR msg => Output.error_message msg);
+
+ val _ =
+ Context_Position.report ctxt
+ (Position.range_position (Symbol_Pos.range (Input.source_explode descr)))
+ Markup.comment;
+
+ val _ =
+ (options @ maps #1 theories) |> List.app (fn (x, y) =>
+ ignore (Completion.check_option_value ctxt x y (Options.default ()))
+ handle ERROR msg => Output.error_message msg);
+
+ fun check_thy source =
+ ignore (Resources.check_file ctxt (SOME Path.current) source)
+ handle ERROR msg => Output.error_message msg;
+
+ val _ =
+ maps #2 theories |> List.app (fn source =>
+ let
+ val s = Input.string_of source;
+ val pos = Input.pos_of source;
+ val {node_name, theory_name, ...} =
+ Resources.import_name session session_dir s
+ handle ERROR msg => error (msg ^ Position.here pos);
+ val thy_path = the_default node_name (Resources.find_theory_file theory_name);
+ in check_thy (path_source source thy_path) end);
+
+ val _ =
+ directories |> List.app (ignore o Resources.check_dir ctxt (SOME session_dir));
+
+ val _ =
+ document_theories |> List.app (fn source =>
+ let
+ val thy = Input.string_of source;
+ val pos = Input.pos_of source;
+ in
+ (case Resources.find_theory_file thy of
+ NONE => Output.error_message ("Unknown theory " ^ quote thy ^ Position.here pos)
+ | SOME path => check_thy (path_source source path))
+ end);
+
+ val _ =
+ document_files |> List.app (fn (doc_dir, doc_files) =>
+ let
+ val dir = Resources.check_dir ctxt (SOME session_dir) doc_dir;
+ val _ = List.app (ignore o Resources.check_file ctxt (SOME dir)) doc_files;
+ in () end);
+
+ val _ =
+ export_files |> List.app (fn ((export_dir, _), _) =>
+ ignore (Resources.check_path ctxt (SOME session_dir) export_dir));
+ in () end));
+
+end;
+
+end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/sessions.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,1333 @@
+/* Title: Pure/Build/sessions.scala
+ Author: Makarius
+
+Cumulative session information.
+*/
+
+package isabelle
+
+import java.io.{File => JFile}
+
+import scala.collection.immutable.{SortedSet, SortedMap}
+import scala.collection.mutable
+
+
+object Sessions {
+ /* main operations */
+
+ def background0(session: String): Background = Background.empty(session)
+
+ def background(options: Options,
+ session: String,
+ progress: Progress = new Progress,
+ dirs: List[Path] = Nil,
+ include_sessions: List[String] = Nil,
+ session_ancestor: Option[String] = None,
+ session_requirements: Boolean = false
+ ): Background = {
+ Background.load(options, session, progress = progress, dirs = dirs,
+ include_sessions = include_sessions, session_ancestor = session_ancestor,
+ session_requirements = session_requirements)
+ }
+
+ def load_structure(
+ options: Options,
+ dirs: List[Path] = Nil,
+ select_dirs: List[Path] = Nil,
+ infos: List[Info] = Nil,
+ augment_options: String => List[Options.Spec] = _ => Nil
+ ): Structure = {
+ val roots = load_root_files(dirs = dirs, select_dirs = select_dirs)
+ Structure.make(options, augment_options, roots = roots, infos = infos)
+ }
+
+ def deps(sessions_structure: Structure,
+ progress: Progress = new Progress,
+ inlined_files: Boolean = false,
+ list_files: Boolean = false,
+ check_keywords: Set[String] = Set.empty
+ ): Deps = {
+ Deps.load(sessions_structure, progress = progress, inlined_files = inlined_files,
+ list_files = list_files, check_keywords = check_keywords)
+ }
+
+
+ /* session and theory names */
+
+ val ROOTS: Path = Path.explode("ROOTS")
+ val ROOT: Path = Path.explode("ROOT")
+
+ val roots_name: String = "ROOTS"
+ val root_name: String = "ROOT"
+ val theory_import: String = "Pure.Sessions"
+
+ val UNSORTED = "Unsorted"
+ val DRAFT = "Draft"
+
+ def is_pure(name: String): Boolean = name == Thy_Header.PURE
+
+ def illegal_session(name: String): Boolean = name == "" || name == DRAFT
+ def illegal_theory(name: String): Boolean =
+ name == root_name || File_Format.registry.theory_excluded(name)
+
+
+ /* ROOTS file format */
+
+ class ROOTS_File_Format extends File_Format {
+ val format_name: String = roots_name
+ val file_ext = ""
+
+ override def detect(name: String): Boolean =
+ Url.get_base_name(name) match {
+ case Some(base_name) => base_name == roots_name
+ case None => false
+ }
+
+ override def theory_suffix: String = "ROOTS_file"
+ override def theory_content(name: String): String =
+ """theory "ROOTS" imports Pure begin ROOTS_file "." end"""
+ override def theory_excluded(name: String): Boolean = name == "ROOTS"
+ }
+
+
+ /* base info */
+
+ object Base {
+ val bootstrap: Base = Base(overall_syntax = Thy_Header.bootstrap_syntax)
+ }
+
+ sealed case class Base(
+ session_name: String = "",
+ session_pos: Position.T = Position.none,
+ proper_session_theories: List[Document.Node.Name] = Nil,
+ document_theories: List[Document.Node.Name] = Nil,
+ loaded_theories: Graph[String, Outer_Syntax] = Graph.string, // cumulative imports
+ used_theories: List[(Document.Node.Name, Options)] = Nil, // new imports
+ theory_load_commands: Map[String, List[Command_Span.Span]] = Map.empty,
+ known_theories: Map[String, Document.Node.Entry] = Map.empty,
+ known_loaded_files: Map[String, List[Path]] = Map.empty,
+ overall_syntax: Outer_Syntax = Outer_Syntax.empty,
+ imported_sources: List[(Path, SHA1.Digest)] = Nil,
+ session_sources: List[(Path, SHA1.Digest)] = Nil,
+ session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
+ errors: List[String] = Nil
+ ) {
+ def session_entry: (String, Base) = session_name -> this
+
+ override def toString: String = "Sessions.Base(" + print_body + ")"
+ def print_body: String =
+ "session_name = " + quote(session_name) +
+ ", loaded_theories = " + loaded_theories.size +
+ ", used_theories = " + used_theories.length
+
+ def all_sources: List[(Path, SHA1.Digest)] = imported_sources ::: session_sources
+
+ def all_document_theories: List[Document.Node.Name] =
+ proper_session_theories ::: document_theories
+
+ def loaded_theory(name: String): Boolean = loaded_theories.defined(name)
+ def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory)
+
+ def loaded_theory_syntax(name: String): Option[Outer_Syntax] =
+ if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None
+ def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] =
+ loaded_theory_syntax(name.theory)
+
+ def theory_syntax(name: Document.Node.Name): Outer_Syntax =
+ loaded_theory_syntax(name) getOrElse overall_syntax
+
+ def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax =
+ nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax
+ }
+
+
+ /* background context */
+
+ object Background {
+ def empty(session: String): Background =
+ Background(Base(session_name = session))
+
+ def load(options: Options,
+ session: String,
+ progress: Progress = new Progress,
+ dirs: List[Path] = Nil,
+ include_sessions: List[String] = Nil,
+ session_ancestor: Option[String] = None,
+ session_requirements: Boolean = false
+ ): Background = {
+ val full_sessions = load_structure(options, dirs = dirs)
+
+ val selected_sessions =
+ full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
+ val info = selected_sessions(session)
+ val ancestor = session_ancestor orElse info.parent
+
+ val (session1, infos1) =
+ if (session_requirements && ancestor.isDefined) {
+ val deps = Sessions.deps(selected_sessions, progress = progress)
+ val base = deps(session)
+
+ val ancestor_loaded =
+ deps.get(ancestor.get) match {
+ case Some(ancestor_base)
+ if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
+ ancestor_base.loaded_theories.defined _
+ case _ =>
+ error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
+ }
+
+ val required_theories =
+ for {
+ thy <- base.loaded_theories.keys
+ if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session
+ }
+ yield thy
+
+ if (required_theories.isEmpty) (ancestor.get, Nil)
+ else {
+ val other_name = info.name + "_requirements(" + ancestor.get + ")"
+ Isabelle_System.isabelle_tmp_prefix()
+
+ (other_name,
+ List(
+ Info.make(
+ Chapter_Defs.empty,
+ Options.init0(),
+ info.options,
+ augment_options = _ => Nil,
+ dir_selected = false,
+ dir = Path.explode("$ISABELLE_TMP_PREFIX"),
+ chapter = info.chapter,
+ Session_Entry(
+ pos = info.pos,
+ name = other_name,
+ groups = info.groups,
+ path = ".",
+ parent = ancestor,
+ description = "Required theory imports from other sessions",
+ options = Nil,
+ imports = info.deps,
+ directories = Nil,
+ theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
+ document_theories = Nil,
+ document_files = Nil,
+ export_files = Nil,
+ export_classpath = Nil))))
+ }
+ }
+ else (session, Nil)
+
+ val full_sessions1 =
+ if (infos1.isEmpty) full_sessions
+ else load_structure(options, dirs = dirs, infos = infos1)
+
+ val selected_sessions1 =
+ full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions))
+
+ val deps1 = Sessions.deps(selected_sessions1, progress = progress)
+
+ Background(deps1(session1), sessions_structure = full_sessions1,
+ errors = deps1.errors, infos = infos1)
+ }
+ }
+
+ sealed case class Background(
+ base: Base,
+ sessions_structure: Structure = Structure.empty,
+ errors: List[String] = Nil,
+ infos: List[Info] = Nil
+ ) {
+ def session_name: String = base.session_name
+ def info: Info = sessions_structure(session_name)
+
+ def check_errors: Background =
+ if (errors.isEmpty) this
+ else error(cat_lines(errors))
+ }
+
+
+ /* source dependencies */
+
+ object Deps {
+ def load(sessions_structure: Structure,
+ progress: Progress = new Progress,
+ inlined_files: Boolean = false,
+ list_files: Boolean = false,
+ check_keywords: Set[String] = Set.empty
+ ): Deps = {
+ var cache_sources = Map.empty[JFile, SHA1.Digest]
+ def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = {
+ for {
+ path <- paths
+ file = path.file
+ if cache_sources.isDefinedAt(file) || file.isFile
+ }
+ yield {
+ cache_sources.get(file) match {
+ case Some(digest) => (path, digest)
+ case None =>
+ val digest = SHA1.digest(file)
+ cache_sources = cache_sources + (file -> digest)
+ (path, digest)
+ }
+ }
+ }
+
+ val session_bases =
+ sessions_structure.imports_topological_order.foldLeft(Map(Base.bootstrap.session_entry)) {
+ case (session_bases, session_name) =>
+ progress.expose_interrupt()
+
+ val info = sessions_structure(session_name)
+ try {
+ val deps_base = info.deps_base(session_bases)
+ val session_background =
+ Background(base = deps_base, sessions_structure = sessions_structure)
+ val resources = new Resources(session_background)
+
+ progress.echo(
+ "Session " + info.chapter + "/" + session_name +
+ if_proper(info.groups, info.groups.mkString(" (", " ", ")")),
+ verbose = !list_files)
+
+ val dependencies = resources.session_dependencies(info)
+
+ val overall_syntax = dependencies.overall_syntax
+
+ val proper_session_theories =
+ dependencies.theories.filter(name =>
+ sessions_structure.theory_qualifier(name) == session_name)
+
+ val theory_files = dependencies.theories.map(_.path)
+
+ val (load_commands, load_commands_errors) =
+ try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
+ catch { case ERROR(msg) => (Nil, List(msg)) }
+
+ val theory_load_commands =
+ (for ((name, span) <- load_commands.iterator) yield name.theory -> span).toMap
+
+ val loaded_files: List[(String, List[Path])] =
+ for ((name, spans) <- load_commands) yield {
+ val (theory, files) = dependencies.loaded_files(name, spans)
+ theory -> files.map(file => Path.explode(file.node))
+ }
+
+ val document_files =
+ for ((path1, path2) <- info.document_files)
+ yield info.dir + path1 + path2
+
+ val session_files =
+ (theory_files ::: loaded_files.flatMap(_._2) ::: document_files).map(_.expand)
+
+ val imported_files = if (inlined_files) dependencies.imported_files else Nil
+
+ if (list_files) {
+ progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _)))
+ }
+
+ if (check_keywords.nonEmpty) {
+ Check_Keywords.check_keywords(
+ progress, overall_syntax.keywords, check_keywords, theory_files)
+ }
+
+ val session_graph_display: Graph_Display.Graph = {
+ def session_node(name: String): Graph_Display.Node =
+ Graph_Display.Node("[" + name + "]", "session." + name)
+
+ def node(name: Document.Node.Name): Graph_Display.Node = {
+ val qualifier = sessions_structure.theory_qualifier(name)
+ if (qualifier == info.name) {
+ Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
+ }
+ else session_node(qualifier)
+ }
+
+ val required_sessions =
+ dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
+ .map(theory => sessions_structure.theory_qualifier(theory))
+ .filter(name => name != info.name && sessions_structure.defined(name))
+
+ val required_subgraph =
+ sessions_structure.imports_graph
+ .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
+ .transitive_closure
+ .restrict(required_sessions.toSet)
+ .transitive_reduction_acyclic
+
+ val graph0 =
+ required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) {
+ case (g, session) =>
+ val a = session_node(session)
+ val bs = required_subgraph.imm_preds(session).toList.map(session_node)
+ bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
+ }
+
+ dependencies.entries.foldLeft(graph0) {
+ case (g, entry) =>
+ val a = node(entry.name)
+ val bs = entry.header.imports.map(node).filterNot(_ == a)
+ bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
+ }
+ }
+
+ val known_theories =
+ dependencies.entries.iterator.map(entry => entry.name.theory -> entry).
+ foldLeft(deps_base.known_theories)(_ + _)
+
+ val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
+
+ val import_errors = {
+ val known_sessions =
+ sessions_structure.imports_requirements(List(session_name)).toSet
+ for {
+ name <- dependencies.theories
+ qualifier = sessions_structure.theory_qualifier(name)
+ if !known_sessions(qualifier)
+ } yield "Bad import of theory " + quote(name.toString) +
+ ": need to include sessions " + quote(qualifier) + " in ROOT"
+ }
+
+ val document_errors =
+ info.document_theories.flatMap(
+ {
+ case (thy, pos) =>
+ val build_hierarchy =
+ if (sessions_structure.build_graph.defined(session_name)) {
+ sessions_structure.build_hierarchy(session_name)
+ }
+ else Nil
+
+ def err(msg: String): Option[String] =
+ Some(msg + " " + quote(thy) + Position.here(pos))
+
+ known_theories.get(thy).map(_.name) match {
+ case None => err("Unknown document theory")
+ case Some(name) =>
+ val qualifier = sessions_structure.theory_qualifier(name)
+ if (proper_session_theories.contains(name)) {
+ err("Redundant document theory from this session:")
+ }
+ else if (
+ !build_hierarchy.contains(qualifier) &&
+ !dependencies.theories.contains(name)
+ ) {
+ err("Document theory from other session not imported properly:")
+ }
+ else None
+ }
+ })
+ val document_theories =
+ info.document_theories.map({ case (thy, _) => known_theories(thy).name })
+
+ val dir_errors = {
+ val ok = info.dirs.map(_.canonical_file).toSet
+ val bad =
+ (for {
+ name <- proper_session_theories.iterator
+ path = Path.explode(name.master_dir)
+ if !ok(path.canonical_file)
+ path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
+ } yield (path1, name)).toList
+ val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
+
+ val errs1 =
+ for { (path1, name) <- bad }
+ yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString)
+ val errs2 =
+ if (bad_dirs.isEmpty) Nil
+ else List("Implicit use of session directories: " + commas(bad_dirs))
+ val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
+ val errs4 =
+ (for {
+ name <- proper_session_theories.iterator
+ name1 <- resources.find_theory_node(name.theory)
+ if name.node != name1.node
+ } yield {
+ "Incoherent theory file import:\n " + quote(name.node) +
+ " vs. \n " + quote(name1.node)
+ }).toList
+
+ errs1 ::: errs2 ::: errs3 ::: errs4
+ }
+
+ val sources_errors =
+ for (p <- session_files if !p.is_file) yield "No such file: " + p
+
+ val path_errors =
+ try { Path.check_case_insensitive(session_files ::: imported_files); Nil }
+ catch { case ERROR(msg) => List(msg) }
+
+ val bibtex_errors = info.bibtex_entries.errors
+
+ val base =
+ Base(
+ session_name = info.name,
+ session_pos = info.pos,
+ proper_session_theories = proper_session_theories,
+ document_theories = document_theories,
+ loaded_theories = dependencies.loaded_theories,
+ used_theories = dependencies.theories_adjunct,
+ theory_load_commands = theory_load_commands,
+ known_theories = known_theories,
+ known_loaded_files = known_loaded_files,
+ overall_syntax = overall_syntax,
+ imported_sources = check_sources(imported_files),
+ session_sources = check_sources(session_files),
+ session_graph_display = session_graph_display,
+ errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
+ document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
+ bibtex_errors)
+
+ session_bases + base.session_entry
+ }
+ catch {
+ case ERROR(msg) =>
+ cat_error(msg, "The error(s) above occurred in session " +
+ quote(info.name) + Position.here(info.pos))
+ }
+ }
+
+ new Deps(sessions_structure, session_bases)
+ }
+ }
+
+ final class Deps private[Sessions](
+ val sessions_structure: Structure,
+ val session_bases: Map[String, Base]
+ ) {
+ def background(session: String): Background =
+ Background(base = apply(session), sessions_structure = sessions_structure, errors = errors)
+
+ def is_empty: Boolean = session_bases.keysIterator.forall(_.isEmpty)
+ def apply(name: String): Base = session_bases(name)
+ def get(name: String): Option[Base] = session_bases.get(name)
+
+ def sources_shasum(name: String): SHA1.Shasum = {
+ val meta_info = sessions_structure(name).meta_info
+ val sources =
+ SHA1.shasum_sorted(
+ for ((path, digest) <- apply(name).all_sources)
+ yield digest -> File.symbolic_path(path))
+ meta_info ::: sources
+ }
+
+ def errors: List[String] =
+ (for {
+ (name, base) <- session_bases.iterator
+ if base.errors.nonEmpty
+ } yield cat_lines(base.errors) +
+ "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.session_pos)
+ ).toList
+
+ def check_errors: Deps =
+ errors match {
+ case Nil => this
+ case errs => error(cat_lines(errs))
+ }
+
+ override def toString: String = "Sessions.Deps(" + sessions_structure + ")"
+ }
+
+
+ /* cumulative session info */
+
+ private val BUILD_PREFS_BG = "<build_prefs:"
+ private val BUILD_PREFS_EN = ">"
+
+ def make_build_prefs(name: String): String =
+ BUILD_PREFS_BG + name + BUILD_PREFS_EN
+
+ def is_build_prefs(s: String): Boolean = {
+ val i = s.indexOf('<')
+ i >= 0 && {
+ val s1 = s.substring(i)
+ s1.startsWith(BUILD_PREFS_BG) && s1.endsWith(BUILD_PREFS_EN)
+ }
+ }
+
+ def eq_sources(options: Options, shasum1: SHA1.Shasum, shasum2: SHA1.Shasum): Boolean =
+ if (options.bool("build_thorough")) shasum1 == shasum2
+ else {
+ def trim(shasum: SHA1.Shasum): SHA1.Shasum =
+ shasum.filter(s => !is_build_prefs(s))
+
+ trim(shasum1) == trim(shasum2)
+ }
+
+ sealed case class Chapter_Info(
+ name: String,
+ pos: Position.T,
+ groups: List[String],
+ description: String,
+ sessions: List[String]
+ )
+
+ object Info {
+ def make(
+ chapter_defs: Chapter_Defs,
+ options0: Options,
+ options: Options,
+ augment_options: String => List[Options.Spec],
+ dir_selected: Boolean,
+ dir: Path,
+ chapter: String,
+ entry: Session_Entry
+ ): Info = {
+ try {
+ val name = entry.name
+
+ if (illegal_session(name)) error("Illegal session name " + quote(name))
+ if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
+ if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
+
+ val session_path = dir + Path.explode(entry.path)
+ val directories = entry.directories.map(dir => session_path + Path.explode(dir))
+
+ val session_options0 = options ++ entry.options
+ val session_options = session_options0 ++ augment_options(name)
+ val session_prefs =
+ session_options.make_prefs(defaults = session_options0, filter = _.session_content)
+
+ val build_prefs_digests =
+ session_options.changed(defaults = options0, filter = _.session_content)
+ .map(ch => SHA1.digest(ch.print_prefs) -> make_build_prefs(ch.name))
+
+ val theories =
+ entry.theories.map({ case (opts, thys) =>
+ (session_options ++ opts,
+ thys.map({ case ((thy, pos), _) =>
+ val thy_name = Thy_Header.import_name(thy)
+ if (illegal_theory(thy_name)) {
+ error("Illegal theory name " + quote(thy_name) + Position.here(pos))
+ }
+ else (thy, pos) })) })
+
+ val global_theories =
+ for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global }
+ yield {
+ val thy_name = Path.explode(thy).file_name
+ if (Long_Name.is_qualified(thy_name)) {
+ error("Bad qualified name for global theory " +
+ quote(thy_name) + Position.here(pos))
+ }
+ else thy_name
+ }
+
+ val conditions =
+ theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted.
+ map(x => (x, Isabelle_System.getenv(x) != ""))
+
+ val document_files =
+ entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) })
+
+ val export_files =
+ entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) })
+
+ val meta_digest =
+ SHA1.digest(
+ (name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
+ entry.theories_no_position, conditions, entry.document_theories_no_position).toString)
+
+ val meta_info =
+ SHA1.shasum_meta_info(meta_digest) ::: SHA1.shasum_sorted(build_prefs_digests)
+
+ val chapter_groups = chapter_defs(chapter).groups
+ val groups = chapter_groups ::: entry.groups.filterNot(chapter_groups.contains)
+
+ Info(name, chapter, dir_selected, entry.pos, groups, session_path,
+ entry.parent, entry.description, directories, session_options, session_prefs,
+ entry.imports, theories, global_theories, entry.document_theories, document_files,
+ export_files, entry.export_classpath, meta_info)
+ }
+ catch {
+ case ERROR(msg) =>
+ error(msg + "\nThe error(s) above occurred in session entry " +
+ quote(entry.name) + Position.here(entry.pos))
+ }
+ }
+ }
+
+ sealed case class Info(
+ name: String,
+ chapter: String,
+ dir_selected: Boolean,
+ pos: Position.T,
+ groups: List[String],
+ dir: Path,
+ parent: Option[String],
+ description: String,
+ directories: List[Path],
+ options: Options,
+ session_prefs: String,
+ imports: List[String],
+ theories: List[(Options, List[(String, Position.T)])],
+ global_theories: List[String],
+ document_theories: List[(String, Position.T)],
+ document_files: List[(Path, Path)],
+ export_files: List[(Path, Int, List[String])],
+ export_classpath: List[String],
+ meta_info: SHA1.Shasum
+ ) {
+ def deps: List[String] = parent.toList ::: imports
+
+ def deps_base(session_bases: String => Base): Base = {
+ val parent_base = session_bases(parent.getOrElse(""))
+ val imports_bases = imports.map(session_bases)
+ parent_base.copy(
+ known_theories =
+ (for {
+ base <- imports_bases.iterator
+ (_, entry) <- base.known_theories.iterator
+ } yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _),
+ known_loaded_files =
+ imports_bases.iterator.map(_.known_loaded_files).
+ foldLeft(parent_base.known_loaded_files)(_ ++ _))
+ }
+
+ def dirs: List[Path] = dir :: directories
+
+ def main_group: Boolean = groups.contains("main")
+ def doc_group: Boolean = groups.contains("doc")
+
+ def timeout_ignored: Boolean =
+ !options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1)
+
+ def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale"))
+
+ def document_enabled: Boolean =
+ options.string("document") match {
+ case "" | "false" => false
+ case "pdf" | "true" => true
+ case doc => error("Bad document specification " + quote(doc))
+ }
+
+ def document_variants: List[Document_Build.Document_Variant] = {
+ val variants =
+ space_explode(':', options.string("document_variants")).
+ map(Document_Build.Document_Variant.parse)
+
+ val dups = Library.duplicates(variants.map(_.name))
+ if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups))
+
+ variants
+ }
+
+ def document_echo: Boolean = options.bool("document_echo")
+
+ def documents: List[Document_Build.Document_Variant] = {
+ val variants = document_variants
+ if (!document_enabled || document_files.isEmpty) Nil else variants
+ }
+
+ def document_output: Option[Path] =
+ options.string("document_output") match {
+ case "" => None
+ case s => Some(dir + Path.explode(s))
+ }
+
+ def browser_info: Boolean = options.bool("browser_info")
+
+ lazy val bibtex_entries: Bibtex.Entries =
+ (for {
+ (document_dir, file) <- document_files.iterator
+ if File.is_bib(file.file_name)
+ } yield {
+ val path = dir + document_dir + file
+ Bibtex.Entries.parse(File.read(path), start = Token.Pos.file(File.standard_path(path)))
+ }).foldRight(Bibtex.Entries.empty)(_ ::: _)
+
+ def record_proofs: Boolean = options.int("record_proofs") >= 2
+
+ def is_afp: Boolean = chapter == AFP.chapter
+ def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains)
+ }
+
+ object Selection {
+ val empty: Selection = Selection()
+ val all: Selection = Selection(all_sessions = true)
+ def session(session: String): Selection = Selection(sessions = List(session))
+ }
+
+ sealed case class Selection(
+ requirements: Boolean = false,
+ all_sessions: Boolean = false,
+ base_sessions: List[String] = Nil,
+ exclude_session_groups: List[String] = Nil,
+ exclude_sessions: List[String] = Nil,
+ session_groups: List[String] = Nil,
+ sessions: List[String] = Nil
+ ) {
+ def ++ (other: Selection): Selection =
+ Selection(
+ requirements = requirements || other.requirements,
+ all_sessions = all_sessions || other.all_sessions,
+ base_sessions = Library.merge(base_sessions, other.base_sessions),
+ exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups),
+ exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions),
+ session_groups = Library.merge(session_groups, other.session_groups),
+ sessions = Library.merge(sessions, other.sessions))
+ }
+
+ object Structure {
+ val empty: Structure = make(Options.empty)
+
+ def make(
+ options: Options,
+ augment_options: String => List[Options.Spec] = _ => Nil,
+ roots: List[Root_File] = Nil,
+ infos: List[Info] = Nil
+ ): Structure = {
+ val chapter_defs: Chapter_Defs =
+ roots.foldLeft(Chapter_Defs.empty) {
+ case (defs1, root) =>
+ root.entries.foldLeft(defs1) {
+ case (defs2, entry: Chapter_Def) => defs2 + entry
+ case (defs2, _) => defs2
+ }
+ }
+
+ val options0 = Options.init0()
+ val session_prefs = options.make_prefs(defaults = options0, filter = _.session_content)
+
+ val root_infos = {
+ var chapter = UNSORTED
+ val root_infos = new mutable.ListBuffer[Info]
+ for (root <- roots) {
+ root.entries.foreach {
+ case entry: Chapter_Entry => chapter = entry.name
+ case entry: Session_Entry =>
+ root_infos +=
+ Info.make(chapter_defs, options0, options, augment_options,
+ root.select, root.dir, chapter, entry)
+ case _ =>
+ }
+ chapter = UNSORTED
+ }
+ root_infos.toList
+ }
+
+ val info_graph =
+ (root_infos ::: infos).foldLeft(Graph.string[Info]) {
+ case (graph, info) =>
+ if (graph.defined(info.name)) {
+ error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
+ Position.here(graph.get_node(info.name).pos))
+ }
+ else graph.new_node(info.name, info)
+ }
+
+ def augment_graph(
+ graph: Graph[String, Info],
+ kind: String,
+ edges: Info => Iterable[String]
+ ) : Graph[String, Info] = {
+ def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = {
+ if (!g.defined(parent)) {
+ error("Bad " + kind + " session " + quote(parent) + " for " +
+ quote(name) + Position.here(pos))
+ }
+
+ try { g.add_edge_acyclic(parent, name) }
+ catch {
+ case exn: Graph.Cycles[_] =>
+ error(cat_lines(exn.cycles.map(cycle =>
+ "Cyclic session dependency of " +
+ cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
+ }
+ }
+ graph.iterator.foldLeft(graph) {
+ case (g, (name, (info, _))) =>
+ edges(info).foldLeft(g)(add_edge(info.pos, name, _, _))
+ }
+ }
+
+ val build_graph = augment_graph(info_graph, "parent", _.parent)
+ val imports_graph = augment_graph(build_graph, "imports", _.imports)
+
+ val session_positions: List[(String, Position.T)] =
+ (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList
+
+ val session_directories: Map[JFile, String] =
+ (for {
+ session <- imports_graph.topological_order.iterator
+ info = info_graph.get_node(session)
+ dir <- info.dirs.iterator
+ } yield (info, dir)).foldLeft(Map.empty[JFile, String]) {
+ case (dirs, (info, dir)) =>
+ val session = info.name
+ val canonical_dir = dir.canonical_file
+ dirs.get(canonical_dir) match {
+ case Some(session1) =>
+ val info1 = info_graph.get_node(session1)
+ error("Duplicate use of directory " + dir +
+ "\n for session " + quote(session1) + Position.here(info1.pos) +
+ "\n vs. session " + quote(session) + Position.here(info.pos))
+ case None => dirs + (canonical_dir -> session)
+ }
+ }
+
+ val global_theories: Map[String, String] =
+ (for {
+ session <- imports_graph.topological_order.iterator
+ info = info_graph.get_node(session)
+ thy <- info.global_theories.iterator }
+ yield (info, thy)
+ ).foldLeft(Thy_Header.bootstrap_global_theories.toMap) {
+ case (global, (info, thy)) =>
+ val qualifier = info.name
+ global.get(thy) match {
+ case Some(qualifier1) if qualifier != qualifier1 =>
+ error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
+ case _ => global + (thy -> qualifier)
+ }
+ }
+
+ new Structure(chapter_defs, session_prefs, session_positions, session_directories,
+ global_theories, build_graph, imports_graph)
+ }
+ }
+
+ final class Structure private[Sessions](
+ chapter_defs: Chapter_Defs,
+ val session_prefs: String,
+ val session_positions: List[(String, Position.T)],
+ val session_directories: Map[JFile, String],
+ val global_theories: Map[String, String],
+ val build_graph: Graph[String, Info],
+ val imports_graph: Graph[String, Info]
+ ) {
+ sessions_structure =>
+
+ def dest_session_directories: List[(String, String)] =
+ for ((file, session) <- session_directories.toList)
+ yield (File.standard_path(file), session)
+
+ lazy val known_chapters: List[Chapter_Info] = {
+ val chapter_sessions =
+ Multi_Map.from(
+ for ((_, (info, _)) <- build_graph.iterator)
+ yield info.chapter -> info.name)
+ val chapters1 =
+ (for (entry <- chapter_defs.list.iterator) yield {
+ val sessions = chapter_sessions.get_list(entry.name)
+ Chapter_Info(entry.name, entry.pos, entry.groups, entry.description, sessions.sorted)
+ }).toList
+ val chapters2 =
+ (for {
+ (name, sessions) <- chapter_sessions.iterator_list
+ if !chapters1.exists(_.name == name)
+ } yield Chapter_Info(name, Position.none, Nil, "", sessions.sorted)).toList.sortBy(_.name)
+ chapters1 ::: chapters2
+ }
+
+ def relevant_chapters: List[Chapter_Info] = known_chapters.filter(_.sessions.nonEmpty)
+
+ def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph)
+ def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph)
+
+ def defined(name: String): Boolean = imports_graph.defined(name)
+ def apply(name: String): Info = imports_graph.get_node(name)
+ def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None
+
+ def theory_qualifier(name: String): String =
+ global_theories.getOrElse(name, Long_Name.qualifier(name))
+ def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
+
+ def check_sessions(names: List[String]): Unit = {
+ val bad_sessions = SortedSet(names.filterNot(defined): _*).toList
+ if (bad_sessions.nonEmpty) {
+ error("Undefined session(s): " + commas_quote(bad_sessions))
+ }
+ }
+
+ def check_sessions(sel: Selection): Unit =
+ check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions)
+
+ private def selected(graph: Graph[String, Info], sel: Selection): List[String] = {
+ check_sessions(sel)
+
+ val select_group = sel.session_groups.toSet
+ val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions)
+
+ val selected0 =
+ if (sel.all_sessions) graph.keys
+ else {
+ (for {
+ (name, (info, _)) <- graph.iterator
+ if info.dir_selected || select_session(name) || info.groups.exists(select_group)
+ } yield name).toList
+ }
+
+ if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList
+ else selected0
+ }
+
+ def selection(sel: Selection): Structure = {
+ check_sessions(sel)
+
+ val excluded = {
+ val exclude_group = sel.exclude_session_groups.toSet
+ val exclude_group_sessions =
+ (for {
+ (name, (info, _)) <- imports_graph.iterator
+ if info.groups.exists(exclude_group)
+ } yield name).toList
+ imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet
+ }
+
+ def restrict(graph: Graph[String, Info]): Graph[String, Info] = {
+ val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded)
+ graph.restrict(graph.all_preds(sessions).toSet)
+ }
+
+ new Structure(chapter_defs, session_prefs, session_positions, session_directories,
+ global_theories, restrict(build_graph), restrict(imports_graph))
+ }
+
+ def selection(session: String): Structure = selection(Selection.session(session))
+
+ def selection_deps(
+ selection: Selection,
+ progress: Progress = new Progress,
+ loading_sessions: Boolean = false,
+ inlined_files: Boolean = false
+ ): Deps = {
+ val deps =
+ Sessions.deps(sessions_structure.selection(selection),
+ progress = progress, inlined_files = inlined_files)
+
+ if (loading_sessions) {
+ val selection_size = deps.sessions_structure.build_graph.size
+ if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...")
+ }
+
+ deps
+ }
+
+ def build_hierarchy(session: String): List[String] =
+ if (build_graph.defined(session)) build_graph.all_preds(List(session))
+ else List(session)
+
+ def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
+ def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
+ def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
+ def build_topological_order: List[String] = build_graph.topological_order
+
+ def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
+ def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
+ def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss)
+ def imports_topological_order: List[String] = imports_graph.topological_order
+
+ override def toString: String =
+ imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")")
+ }
+
+
+ /* parser */
+
+ private val CHAPTER_DEFINITION = "chapter_definition"
+ private val CHAPTER = "chapter"
+ private val SESSION = "session"
+ private val DESCRIPTION = "description"
+ private val DIRECTORIES = "directories"
+ private val OPTIONS = "options"
+ private val SESSIONS = "sessions"
+ private val THEORIES = "theories"
+ private val GLOBAL = "global"
+ private val DOCUMENT_THEORIES = "document_theories"
+ private val DOCUMENT_FILES = "document_files"
+ private val EXPORT_FILES = "export_files"
+ private val EXPORT_CLASSPATH = "export_classpath"
+
+ val root_syntax: Outer_Syntax =
+ Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + "in" +
+ GLOBAL +
+ (CHAPTER_DEFINITION, Keyword.THY_DECL) +
+ (CHAPTER, Keyword.THY_DECL) +
+ (SESSION, Keyword.THY_DECL) +
+ (DESCRIPTION, Keyword.QUASI_COMMAND) +
+ (DIRECTORIES, Keyword.QUASI_COMMAND) +
+ (OPTIONS, Keyword.QUASI_COMMAND) +
+ (SESSIONS, Keyword.QUASI_COMMAND) +
+ (THEORIES, Keyword.QUASI_COMMAND) +
+ (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) +
+ (DOCUMENT_FILES, Keyword.QUASI_COMMAND) +
+ (EXPORT_FILES, Keyword.QUASI_COMMAND) +
+ (EXPORT_CLASSPATH, Keyword.QUASI_COMMAND)
+
+ abstract class Entry
+ object Chapter_Def {
+ def empty(chapter: String): Chapter_Def = Chapter_Def(Position.none, chapter, Nil, "")
+ }
+ sealed case class Chapter_Def(
+ pos: Position.T,
+ name: String,
+ groups: List[String],
+ description: String
+ ) extends Entry
+ sealed case class Chapter_Entry(name: String) extends Entry
+ sealed case class Session_Entry(
+ pos: Position.T,
+ name: String,
+ groups: List[String],
+ path: String,
+ parent: Option[String],
+ description: String,
+ options: List[Options.Spec],
+ imports: List[String],
+ directories: List[String],
+ theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
+ document_theories: List[(String, Position.T)],
+ document_files: List[(String, String)],
+ export_files: List[(String, Int, List[String])],
+ export_classpath: List[String]
+ ) extends Entry {
+ def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] =
+ theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) })
+ def document_theories_no_position: List[String] =
+ document_theories.map(_._1)
+ }
+
+ object Chapter_Defs {
+ val empty: Chapter_Defs = new Chapter_Defs(Nil)
+ }
+
+ class Chapter_Defs private(rev_list: List[Chapter_Def]) {
+ def list: List[Chapter_Def] = rev_list.reverse
+
+ override def toString: String =
+ list.map(_.name).mkString("Chapter_Defs(", ", ", ")")
+
+ def get(chapter: String): Option[Chapter_Def] =
+ rev_list.find(_.name == chapter)
+
+ def apply(chapter: String): Chapter_Def =
+ get(chapter) getOrElse Chapter_Def.empty(chapter)
+
+ def + (entry: Chapter_Def): Chapter_Defs =
+ get(entry.name) match {
+ case None => new Chapter_Defs(entry :: rev_list)
+ case Some(old_entry) =>
+ error("Duplicate chapter definition " + quote(entry.name) +
+ Position.here(old_entry.pos) + Position.here(entry.pos))
+ }
+ }
+
+ private object Parsers extends Options.Parsers {
+ private val groups: Parser[List[String]] =
+ ($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)
+
+ private val description: Parser[String] =
+ ($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")
+
+ private val chapter_def: Parser[Chapter_Def] =
+ command(CHAPTER_DEFINITION) ~! (position(chapter_name) ~ groups ~ description) ^^
+ { case _ ~ ((a, pos) ~ b ~ c) => Chapter_Def(pos, a, b, c) }
+
+ private val chapter_entry: Parser[Chapter_Entry] =
+ command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter_Entry(a) }
+
+ private val session_entry: Parser[Session_Entry] = {
+ val options = $$$("[") ~> rep1sep(option_spec, $$$(",")) <~ $$$("]")
+
+ val theory_entry =
+ position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) }
+
+ val theories =
+ $$$(THEORIES) ~!
+ ((options | success(Nil)) ~ rep1(theory_entry)) ^^
+ { case _ ~ (x ~ y) => (x, y) }
+
+ val document_theories =
+ $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x }
+
+ val document_files =
+ $$$(DOCUMENT_FILES) ~! (in_path_parens("document") ~ rep1(path)) ^^
+ { case _ ~ (x ~ y) => y.map((x, _)) }
+
+ val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0)
+
+ val export_files =
+ $$$(EXPORT_FILES) ~! (in_path_parens("export") ~ prune ~ rep1(embedded)) ^^
+ { case _ ~ (x ~ y ~ z) => (x, y, z) }
+
+ val export_classpath =
+ $$$(EXPORT_CLASSPATH) ~! (rep1(embedded) | success(List("*:classpath/*.jar"))) ^^
+ { case _ ~ x => x }
+
+ command(SESSION) ~!
+ (position(session_name) ~ groups ~ in_path(".") ~
+ ($$$("=") ~!
+ (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ description ~
+ (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~
+ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~
+ (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~
+ rep(theories) ~
+ (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~
+ (rep(document_files) ^^ (x => x.flatten)) ~
+ rep(export_files) ~
+ opt(export_classpath)))) ^^
+ { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) =>
+ Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l, m.getOrElse(Nil)) }
+ }
+
+ def parse_root(path: Path): List[Entry] = {
+ val toks = Token.explode(root_syntax.keywords, File.read(path))
+ val start = Token.Pos.file(path.implode)
+ val parser: Parser[Entry] = chapter_def | chapter_entry | session_entry
+ parse_all(rep(parser), Token.reader(toks, start)) match {
+ case Success(result, _) => result
+ case bad => error(bad.toString)
+ }
+ }
+ }
+
+ def parse_root(path: Path): List[Entry] = Parsers.parse_root(path)
+
+ def parse_root_entries(path: Path): List[Session_Entry] =
+ Parsers.parse_root(path).flatMap(Library.as_subclass(classOf[Session_Entry]))
+
+ def parse_roots(roots: Path): List[String] = {
+ for {
+ line <- split_lines(File.read(roots))
+ if !(line == "" || line.startsWith("#"))
+ } yield line
+ }
+
+
+ /* load sessions from certain directories */
+
+ def is_session_dir(dir: Path, ssh: SSH.System = SSH.Local): Boolean =
+ ssh.is_file(dir + ROOT) || ssh.is_file(dir + ROOTS)
+
+ def check_session_dir(dir: Path): Path =
+ if (is_session_dir(dir)) File.pwd() + dir.expand
+ else {
+ error("Bad session root directory: " + dir.expand.toString +
+ "\n (missing \"ROOT\" or \"ROOTS\")")
+ }
+
+ def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = {
+ val default_dirs = Components.directories().filter(is_session_dir(_))
+ for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) }
+ yield (select, dir.canonical)
+ }
+
+ sealed case class Root_File(path: Path, select: Boolean) {
+ val key: JFile = path.canonical_file
+ def dir: Path = path.dir
+
+ lazy val entries: List[Entry] = Parsers.parse_root(path)
+ }
+
+ def load_root_files(
+ dirs: List[Path] = Nil,
+ select_dirs: List[Path] = Nil
+ ): List[Root_File] = {
+ def load_dir(select: Boolean, dir: Path): List[Root_File] =
+ load_root(select, dir) ::: load_roots(select, dir)
+
+ def load_root(select: Boolean, dir: Path): List[Root_File] = {
+ val root = dir + ROOT
+ if (root.is_file) List(Root_File(root, select)) else Nil
+ }
+
+ def load_roots(select: Boolean, dir: Path): List[Root_File] = {
+ val roots = dir + ROOTS
+ if (roots.is_file) {
+ for {
+ entry <- parse_roots(roots)
+ dir1 =
+ try { check_session_dir(dir + Path.explode(entry)) }
+ catch {
+ case ERROR(msg) =>
+ error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString)
+ }
+ res <- load_dir(select, dir1)
+ } yield res
+ }
+ else Nil
+ }
+
+ val raw_roots: List[Root_File] =
+ for {
+ (select, dir) <- directories(dirs, select_dirs)
+ root <- load_dir(select, check_session_dir(dir))
+ } yield root
+
+ var next_root = 0
+ var seen_roots = Map.empty[JFile, (Root_File, Int)]
+ for (root <- raw_roots) {
+ seen_roots.get(root.key) match {
+ case None =>
+ seen_roots += (root.key -> (root, next_root))
+ next_root += 1
+ case Some((root0, next0)) =>
+ val root1 = root0.copy(select = root0.select || root.select)
+ seen_roots += (root0.key -> (root1, next0))
+ }
+ }
+ seen_roots.valuesIterator.toList.sortBy(_._2).map(_._1)
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions",
+ Scala_Project.here,
+ { args =>
+ var base_sessions: List[String] = Nil
+ var select_dirs: List[Path] = Nil
+ var requirements = false
+ var exclude_session_groups: List[String] = Nil
+ var all_sessions = false
+ var build_graph = false
+ var dirs: List[Path] = Nil
+ var session_groups: List[String] = Nil
+ var exclude_sessions: List[String] = Nil
+
+ val getopts = Getopts("""
+Usage: isabelle sessions [OPTIONS] [SESSIONS ...]
+
+ Options are:
+ -B NAME include session NAME and all descendants
+ -D DIR include session directory and select its sessions
+ -R refer to requirements of selected sessions
+ -X NAME exclude sessions from group NAME and all descendants
+ -a select all sessions
+ -b follow session build dependencies (default: source imports)
+ -d DIR include session directory
+ -g NAME select session group NAME
+ -x NAME exclude session NAME and all descendants
+
+ Explore the structure of Isabelle sessions and print result names in
+ topological order (on stdout).
+""",
+ "B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
+ "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
+ "R" -> (_ => requirements = true),
+ "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
+ "a" -> (_ => all_sessions = true),
+ "b" -> (_ => build_graph = true),
+ "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
+ "g:" -> (arg => session_groups = session_groups ::: List(arg)),
+ "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg)))
+
+ val sessions = getopts(args)
+
+ val options = Options.init()
+
+ val selection =
+ Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions,
+ exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions,
+ session_groups = session_groups, sessions = sessions)
+ val sessions_structure =
+ load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection)
+
+ val order =
+ if (build_graph) sessions_structure.build_topological_order
+ else sessions_structure.imports_topological_order
+ for (name <- order) Output.writeln(name, stdout = true)
+ })
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Build/store.scala Sat Jan 20 15:07:41 2024 +0100
@@ -0,0 +1,512 @@
+/* Title: Pure/Build/store.scala
+ Author: Makarius
+
+Persistent store for session content: within file-system and/or SQL database.
+*/
+
+package isabelle
+
+
+import java.sql.SQLException
+
+
+object Store {
+ def apply(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
+ new Store(options, cache)
+
+
+ /* session build info */
+
+ sealed case class Build_Info(
+ sources: SHA1.Shasum,
+ input_heaps: SHA1.Shasum,
+ output_heap: SHA1.Shasum,
+ return_code: Int,
+ uuid: String
+ ) {
+ def ok: Boolean = return_code == 0
+ }
+
+
+ /* session sources */
+
+ sealed case class Source_File(
+ name: String,
+ digest: SHA1.Digest,
+ compressed: Boolean,
+ body: Bytes,
+ cache: Compress.Cache
+ ) {
+ override def toString: String = name
+
+ def bytes: Bytes = if (compressed) body.uncompress(cache = cache) else body
+ }
+
+ object Sources {
+ def load(session_base: Sessions.Base, cache: Compress.Cache = Compress.Cache.none): Sources =
+ new Sources(
+ session_base.session_sources.foldLeft(Map.empty) {
+ case (sources, (path, digest)) =>
+ def err(): Nothing = error("Incoherent digest for source file: " + path)
+ val name = File.symbolic_path(path)
+ sources.get(name) match {
+ case Some(source_file) =>
+ if (source_file.digest == digest) sources else err()
+ case None =>
+ val bytes = Bytes.read(path)
+ if (bytes.sha1_digest == digest) {
+ val (compressed, body) =
+ bytes.maybe_compress(Compress.Options_Zstd(), cache = cache)
+ val file = Source_File(name, digest, compressed, body, cache)
+ sources + (name -> file)
+ }
+ else err()
+ }
+ })
+ }
+
+ class Sources private(rep: Map[String, Source_File]) extends Iterable[Source_File] {
+ override def toString: String = rep.values.toList.sortBy(_.name).mkString("Sources(", ", ", ")")
+ override def iterator: Iterator[Source_File] = rep.valuesIterator
+
+ def get(name: String): Option[Source_File] = rep.get(name)
+ def apply(name: String): Source_File =
+ get(name).getOrElse(error("Missing session sources entry " + quote(name)))
+ }
+
+
+ /* SQL data model */
+
+ object private_data extends SQL.Data() {
+ override lazy val tables = SQL.Tables(Session_Info.table, Sources.table)
+
+ object Session_Info {
+ val session_name = SQL.Column.string("session_name").make_primary_key
+
+ // Build_Log.Session_Info
+ val session_timing = SQL.Column.bytes("session_timing")
+ val command_timings = SQL.Column.bytes("command_timings")
+ val theory_timings = SQL.Column.bytes("theory_timings")
+ val ml_statistics = SQL.Column.bytes("ml_statistics")
+ val task_statistics = SQL.Column.bytes("task_statistics")
+ val errors = SQL.Column.bytes("errors")
+ val build_log_columns =
+ List(session_name, session_timing, command_timings, theory_timings,
+ ml_statistics, task_statistics, errors)
+
+ // Build_Info
+ val sources = SQL.Column.string("sources")
+ val input_heaps = SQL.Column.string("input_heaps")
+ val output_heap = SQL.Column.string("output_heap")
+ val return_code = SQL.Column.int("return_code")
+ val uuid = SQL.Column.string("uuid")
+ val build_columns = List(sources, input_heaps, output_heap, return_code, uuid)
+
+ val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
+ }
+
+ object Sources {
+ val session_name = SQL.Column.string("session_name").make_primary_key
+ val name = SQL.Column.string("name").make_primary_key
+ val digest = SQL.Column.string("digest")
+ val compressed = SQL.Column.bool("compressed")
+ val body = SQL.Column.bytes("body")
+
+ val table =
+ SQL.Table("isabelle_sources", List(session_name, name, digest, compressed, body))
+
+ def where_equal(session_name: String, name: String = ""): SQL.Source =
+ SQL.where_and(
+ Sources.session_name.equal(session_name),
+ if_proper(name, Sources.name.equal(name)))
+ }
+
+ def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
+ db.execute_query_statementO[Bytes](
+ Session_Info.table.select(List(column), sql = Session_Info.session_name.where_equal(name)),
+ res => res.bytes(column)
+ ).getOrElse(Bytes.empty)
+
+ def read_properties(
+ db: SQL.Database, name: String, column: SQL.Column, cache: Term.Cache
+ ): List[Properties.T] = Properties.uncompress(read_bytes(db, name, column), cache = cache)
+
+ def read_session_timing(db: SQL.Database, name: String, cache: Term.Cache): Properties.T =
+ Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache)
+
+ def read_command_timings(db: SQL.Database, name: String): Bytes =
+ read_bytes(db, name, Session_Info.command_timings)
+
+ def read_theory_timings(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] =
+ read_properties(db, name, Session_Info.theory_timings, cache)
+
+ def read_ml_statistics(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] =
+ read_properties(db, name, Session_Info.ml_statistics, cache)
+
+ def read_task_statistics(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] =
+ read_properties(db, name, Session_Info.task_statistics, cache)
+
+ def read_errors(db: SQL.Database, name: String, cache: Term.Cache): List[String] =
+ Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache)
+
+ def read_build(db: SQL.Database, name: String): Option[Store.Build_Info] =
+ db.execute_query_statementO[Store.Build_Info](
+ Session_Info.table.select(sql = Session_Info.session_name.where_equal(name)),
+ { res =>
+ val uuid =
+ try { Option(res.string(Session_Info.uuid)).getOrElse("") }
+ catch { case _: SQLException => "" }
+ Store.Build_Info(
+ SHA1.fake_shasum(res.string(Session_Info.sources)),
+ SHA1.fake_shasum(res.string(Session_Info.input_heaps)),
+ SHA1.fake_shasum(res.string(Session_Info.output_heap)),
+ res.int(Session_Info.return_code),
+ uuid)
+ })
+
+ def write_session_info(
+ db: SQL.Database,
+ cache: Compress.Cache,
+ session_name: String,
+ build_log: Build_Log.Session_Info,
+ build: Build_Info
+ ): Unit = {
+ db.execute_statement(Session_Info.table.insert(), body =
+ { stmt =>
+ stmt.string(1) = session_name
+ stmt.bytes(2) = Properties.encode(build_log.session_timing)
+ stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache)
+ stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache)
+ stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = cache)
+ stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = cache)
+ stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = cache)
+ stmt.string(8) = build.sources.toString
+ stmt.string(9) = build.input_heaps.toString
+ stmt.string(10) = build.output_heap.toString
+ stmt.int(11) = build.return_code
+ stmt.string(12) = build.uuid
+ })
+ }
+
+ def write_sources(
+ db: SQL.Database,
+ session_name: String,
+ source_files: Iterable[Source_File]
+ ): Unit = {
+ db.execute_batch_statement(Sources.table.insert(), batch =
+ for (source_file <- source_files) yield { (stmt: SQL.Statement) =>
+ stmt.string(1) = session_name
+ stmt.string(2) = source_file.name
+ stmt.string(3) = source_file.digest.toString
+ stmt.bool(4) = source_file.compressed
+ stmt.bytes(5) = source_file.body
+ })
+ }
+
+ def read_sources(
+ db: SQL.Database,
+ session_name: String,
+ name: String,
+ cache: Compress.Cache
+ ): List[Source_File] = {
+ db.execute_query_statement(
+ Sources.table.select(
+ sql = Sources.where_equal(session_name, name = name) + SQL.order_by(List(Sources.name))),
+ List.from[Source_File],
+ { res =>
+ val res_name = res.string(Sources.name)
+ val digest = SHA1.fake_digest(res.string(Sources.digest))
+ val compressed = res.bool(Sources.compressed)
+ val body = res.bytes(Sources.body)
+ Source_File(res_name, digest, compressed, body, cache)
+ }
+ )
+ }
+ }
+}
+
+class Store private(val options: Options, val cache: Term.Cache) {
+ store =>
+
+ override def toString: String = "Store(output_dir = " + output_dir.absolute + ")"
+
+
+ /* directories */
+
+ val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER")
+ val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER")
+
+ def system_heaps: Boolean = options.bool("system_heaps")
+
+ val output_dir: Path =
+ if (system_heaps) system_output_dir else user_output_dir
+
+ val input_dirs: List[Path] =
+ if (system_heaps) List(system_output_dir)
+ else List(user_output_dir, system_output_dir)
+
+ def presentation_dir: Path =
+ if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
+ else Path.explode("$ISABELLE_BROWSER_INFO")
+
+
+ /* file names */
+
+ def heap(name: String): Path = Path.basic(name)
+ def database(name: String): Path = Path.basic("log") + Path.basic(name).db
+ def log(name: String): Path = Path.basic("log") + Path.basic(name)
+ def log_gz(name: String): Path = log(name).gz
+
+ def output_heap(name: String): Path = output_dir + heap(name)
+ def output_database(name: String): Path = output_dir + database(name)
+ def output_log(name: String): Path = output_dir + log(name)
+ def output_log_gz(name: String): Path = output_dir + log_gz(name)
+
+
+ /* heap */
+
+ def find_heap(name: String): Option[Path] =
+ input_dirs.map(_ + heap(name)).find(_.is_file)
+
+ def the_heap(name: String): Path =
+ find_heap(name) getOrElse
+ error("Missing heap image for session " + quote(name) + " -- expected in:\n" +
+ cat_lines(input_dirs.map(dir => " " + File.standard_path(dir))))
+
+ def heap_shasum(database_server: Option[SQL.Database], name: String): SHA1.Shasum = {
+ def get_database: Option[SHA1.Digest] = {
+ for {
+ db <- database_server
+ digest <- ML_Heap.get_entries(db, List(name)).valuesIterator.nextOption()
+ } yield digest
+ }
+
+ def get_file: Option[SHA1.Digest] =
+ find_heap(name).flatMap(ML_Heap.read_file_digest)
+
+ get_database orElse get_file match {
+ case Some(digest) => SHA1.shasum(digest, name)
+ case None => SHA1.no_shasum
+ }
+ }
+
+
+ /* databases for build process and session content */
+
+ def find_database(name: String): Option[Path] =
+ input_dirs.map(_ + database(name)).find(_.is_file)
+
+ def build_database_server: Boolean = options.bool("build_database_server")
+ def build_database: Boolean = options.bool("build_database")
+
+ def open_server(): SSH.Server =
+ PostgreSQL.open_server(options,
+ host = options.string("build_database_host"),
+ port = options.int("build_database_port"),
+ ssh_host = options.string("build_database_ssh_host"),
+ ssh_port = options.int("build_database_ssh_port"),
+ ssh_user = options.string("build_database_ssh_user"))
+
+ def open_database_server(server: SSH.Server = SSH.no_server): PostgreSQL.Database =
+ PostgreSQL.open_database_server(options, server = server,
+ user = options.string("build_database_user"),
+ password = options.string("build_database_password"),
+ database = options.string("build_database_name"),
+ host = options.string("build_database_host"),
+ port = options.int("build_database_port"),
+ ssh_host = options.string("build_database_ssh_host"),
+ ssh_port = options.int("build_database_ssh_port"),
+ ssh_user = options.string("build_database_ssh_user"))
+
+ def maybe_open_database_server(server: SSH.Server = SSH.no_server): Option[SQL.Database] =
+ if (build_database_server) Some(open_database_server(server = server)) else None
+
+ def open_build_database(path: Path, server: SSH.Server = SSH.no_server): SQL.Database =
+ if (build_database_server) open_database_server(server = server)
+ else SQLite.open_database(path, restrict = true)
+
+ def maybe_open_build_database(
+ path: Path = Path.explode("$ISABELLE_HOME_USER/build.db"),
+ server: SSH.Server = SSH.no_server
+ ): Option[SQL.Database] = {
+ if (build_database) Some(open_build_database(path, server = server)) else None
+ }
+
+ def try_open_database(
+ name: String,
+ output: Boolean = false,
+ server: SSH.Server = SSH.no_server,
+ server_mode: Boolean = build_database_server
+ ): Option[SQL.Database] = {
+ def check(db: SQL.Database): Option[SQL.Database] =
+ if (output || session_info_exists(db)) Some(db) else { db.close(); None }
+
+ if (server_mode) check(open_database_server(server = server))
+ else if (output) Some(SQLite.open_database(output_database(name)))
+ else {
+ (for {
+ dir <- input_dirs.view
+ path = dir + database(name) if path.is_file
+ db <- check(SQLite.open_database(path))
+ } yield db).headOption
+ }
+ }
+
+ def error_database(name: String): Nothing =
+ error("Missing build database for session " + quote(name))
+
+ def open_database(
+ name: String,
+ output: Boolean = false,
+ server: SSH.Server = SSH.no_server
+ ): SQL.Database = {
+ try_open_database(name, output = output, server = server) getOrElse error_database(name)
+ }
+
+ def clean_output(
+ database_server: Option[SQL.Database],
+ name: String,
+ session_init: Boolean = false
+ ): Option[Boolean] = {
+ val relevant_db =
+ database_server match {
+ case Some(db) =>
+ ML_Heap.clean_entry(db, name)
+ clean_session_info(db, name)
+ case None => false
+ }
+
+ val del =
+ for {
+ dir <-
+ (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir))
+ file <- List(heap(name), database(name), log(name), log_gz(name))
+ path = dir + file if path.is_file
+ } yield path.file.delete
+
+ if (database_server.isEmpty && session_init) {
+ using(open_database(name, output = true))(clean_session_info(_, name))
+ }
+
+ if (relevant_db || del.nonEmpty) Some(del.forall(identity)) else None
+ }
+
+ def check_output(
+ database_server: Option[SQL.Database],
+ name: String,
+ session_options: Options,
+ sources_shasum: SHA1.Shasum,
+ input_shasum: SHA1.Shasum,
+ fresh_build: Boolean,
+ store_heap: Boolean
+ ): (Boolean, SHA1.Shasum) = {
+ def no_check: (Boolean, SHA1.Shasum) = (false, SHA1.no_shasum)
+
+ def check(db: SQL.Database): (Boolean, SHA1.Shasum) =
+ read_build(db, name) match {
+ case Some(build) =>
+ val output_shasum = heap_shasum(if (db.is_postgresql) Some(db) else None, name)
+ val current =
+ !fresh_build &&
+ build.ok &&
+ Sessions.eq_sources(session_options, build.sources, sources_shasum) &&
+ build.input_heaps == input_shasum &&
+ build.output_heap == output_shasum &&
+ !(store_heap && output_shasum.is_empty)
+ (current, output_shasum)
+ case None => no_check
+ }
+
+ database_server match {
+ case Some(db) => if (session_info_exists(db)) check(db) else no_check
+ case None => using_option(try_open_database(name))(check) getOrElse no_check
+ }
+ }
+
+
+ /* session info */
+
+ def session_info_exists(db: SQL.Database): Boolean =
+ Store.private_data.tables.forall(db.exists_table)
+
+ def session_info_defined(db: SQL.Database, name: String): Boolean =
+ db.execute_query_statementB(
+ Store.private_data.Session_Info.table.select(List(Store.private_data.Session_Info.session_name),
+ sql = Store.private_data.Session_Info.session_name.where_equal(name)))
+
+ def clean_session_info(db: SQL.Database, name: String): Boolean = {
+ Export.clean_session(db, name)
+ Document_Build.clean_session(db, name)
+
+ Store.private_data.transaction_lock(db, create = true, label = "Store.clean_session_info") {
+ val already_defined = session_info_defined(db, name)
+
+ db.execute_statement(
+ SQL.multi(
+ Store.private_data.Session_Info.table.delete(
+ sql = Store.private_data.Session_Info.session_name.where_equal(name)),
+ Store.private_data.Sources.table.delete(
+ sql = Store.private_data.Sources.where_equal(name))))
+
+ already_defined
+ }
+ }
+
+ def write_session_info(
+ db: SQL.Database,
+ session_name: String,
+ sources: Store.Sources,
+ build_log: Build_Log.Session_Info,
+ build: Store.Build_Info
+ ): Unit = {
+ Store.private_data.transaction_lock(db, label = "Store.write_session_info") {
+ for (source_files <- sources.iterator.toList.grouped(200)) {
+ Store.private_data.write_sources(db, session_name, source_files)
+ }
+ Store.private_data.write_session_info(db, cache.compress, session_name, build_log, build)
+ }
+ }
+
+ def read_session_timing(db: SQL.Database, session: String): Properties.T =
+ Store.private_data.transaction_lock(db, label = "Store.read_session_timing") {
+ Store.private_data.read_session_timing(db, session, cache)
+ }
+
+ def read_command_timings(db: SQL.Database, session: String): Bytes =
+ Store.private_data.transaction_lock(db, label = "Store.read_command_timings") {
+ Store.private_data.read_command_timings(db, session)
+ }
+
+ def read_theory_timings(db: SQL.Database, session: String): List[Properties.T] =
+ Store.private_data.transaction_lock(db, label = "Store.read_theory_timings") {
+ Store.private_data.read_theory_timings(db, session, cache)
+ }
+
+ def read_ml_statistics(db: SQL.Database, session: String): List[Properties.T] =
+ Store.private_data.transaction_lock(db, label = "Store.read_ml_statistics") {
+ Store.private_data.read_ml_statistics(db, session, cache)
+ }
+
+ def read_task_statistics(db: SQL.Database, session: String): List[Properties.T] =
+ Store.private_data.transaction_lock(db, label = "Store.read_task_statistics") {
+ Store.private_data.read_task_statistics(db, session, cache)
+ }
+
+ def read_theories(db: SQL.Database, session: String): List[String] =
+ read_theory_timings(db, session).flatMap(Markup.Name.unapply)
+
+ def read_errors(db: SQL.Database, session: String): List[String] =
+ Store.private_data.transaction_lock(db, label = "Store.read_errors") {
+ Store.private_data.read_errors(db, session, cache)
+ }
+
+ def read_build(db: SQL.Database, session: String): Option[Store.Build_Info] =
+ Store.private_data.transaction_lock(db, label = "Store.read_build") {
+ if (session_info_exists(db)) Store.private_data.read_build(db, session) else None
+ }
+
+ def read_sources(db: SQL.Database, session: String, name: String = ""): List[Store.Source_File] =
+ Store.private_data.transaction_lock(db, label = "Store.read_sources") {
+ Store.private_data.read_sources(db, session, name, cache.compress)
+ }
+}
--- a/src/Pure/PIDE/resources.ML Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,483 +0,0 @@
-(* Title: Pure/PIDE/resources.ML
- Author: Makarius
-
-Resources for theories and auxiliary files.
-*)
-
-signature RESOURCES =
-sig
- val default_qualifier: string
- val init_session:
- {session_positions: (string * Properties.T) list,
- session_directories: (string * string) list,
- command_timings: Properties.T list,
- load_commands: (string * Position.T) list,
- scala_functions: (string * ((bool * bool) * Position.T)) list,
- global_theories: (string * string) list,
- loaded_theories: string list} -> unit
- val init_session_yxml: Bytes.T -> unit
- val init_session_env: unit -> unit
- val finish_session_base: unit -> unit
- val global_theory: string -> string option
- val loaded_theory: string -> bool
- val check_session: Proof.context -> string * Position.T -> string
- val last_timing: Toplevel.transition -> Time.time
- val check_load_command: Proof.context -> string * Position.T -> string
- val check_scala_function: Proof.context -> string * Position.T -> string * (bool * bool)
- val master_directory: theory -> Path.T
- val imports_of: theory -> (string * Position.T) list
- val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
- val thy_path: Path.T -> Path.T
- val theory_qualifier: string -> string
- val find_theory_file: string -> Path.T option
- val import_name: string -> Path.T -> string ->
- {node_name: Path.T, master_dir: Path.T, theory_name: string}
- val check_thy: Path.T -> string ->
- {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T,
- imports: (string * Position.T) list, keywords: Thy_Header.keywords}
- val read_file_node: string -> Path.T -> Path.T * Position.T -> Token.file
- val read_file: Path.T -> Path.T * Position.T -> Token.file
- val parsed_files: (Path.T -> Path.T list) ->
- Token.file Exn.result list * Input.source -> theory -> Token.file list
- val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser
- val parse_file: (theory -> Token.file) parser
- val provide: Path.T * SHA1.digest -> theory -> theory
- val provide_file: Token.file -> theory -> theory
- val provide_file': Token.file -> theory -> Token.file * theory
- val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser
- val provide_parse_file: (theory -> Token.file * theory) parser
- val loaded_files_current: theory -> bool
- val check_path: Proof.context -> Path.T option -> Input.source -> Path.T
- val check_file: Proof.context -> Path.T option -> Input.source -> Path.T
- val check_dir: Proof.context -> Path.T option -> Input.source -> Path.T
- val check_session_dir: Proof.context -> Path.T option -> Input.source -> Path.T
-end;
-
-structure Resources: RESOURCES =
-struct
-
-(* command timings *)
-
-type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*)
-
-val empty_timings: timings = Symtab.empty;
-
-fun update_timings props =
- (case Markup.parse_command_timing_properties props of
- SOME ({file, offset, name}, time) =>
- Symtab.map_default (file, Inttab.empty)
- (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, t + time)))
- | NONE => I);
-
-fun make_timings command_timings =
- fold update_timings command_timings empty_timings;
-
-fun approximative_id name pos =
- (case (Position.file_of pos, Position.offset_of pos) of
- (SOME file, SOME offset) =>
- if name = "" then NONE else SOME {file = file, offset = offset, name = name}
- | _ => NONE);
-
-fun get_timings timings tr =
- (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of
- SOME {file, offset, name} =>
- (case Symtab.lookup timings file of
- SOME offsets =>
- (case Inttab.lookup offsets offset of
- SOME (name', time) => if name = name' then SOME time else NONE
- | NONE => NONE)
- | NONE => NONE)
- | NONE => NONE)
- |> the_default Time.zeroTime;
-
-
-(* session base *)
-
-val default_qualifier = "Draft";
-
-type entry = {pos: Position.T, serial: serial};
-
-fun make_entry props : entry =
- {pos = Position.of_properties props, serial = serial ()};
-
-val empty_session_base =
- ({session_positions = []: (string * entry) list,
- session_directories = Symtab.empty: Path.T list Symtab.table,
- timings = empty_timings,
- load_commands = []: (string * Position.T) list,
- scala_functions = Symtab.empty: ((bool * bool) * Position.T) Symtab.table},
- {global_theories = Symtab.empty: string Symtab.table,
- loaded_theories = Symset.empty: Symset.T});
-
-val global_session_base =
- Synchronized.var "Sessions.base" empty_session_base;
-
-fun init_session
- {session_positions, session_directories, command_timings, load_commands,
- scala_functions, global_theories, loaded_theories} =
- Synchronized.change global_session_base
- (fn _ =>
- ({session_positions = sort_by #1 (map (apsnd make_entry) session_positions),
- session_directories =
- Symtab.build (session_directories |> fold_rev (fn (dir, name) =>
- Symtab.cons_list (name, Path.explode dir))),
- timings = make_timings command_timings,
- load_commands = load_commands,
- scala_functions = Symtab.make scala_functions},
- {global_theories = Symtab.make global_theories,
- loaded_theories = Symset.make loaded_theories}));
-
-fun init_session_yxml yxml =
- let
- val (session_positions, (session_directories, (command_timings,
- (load_commands, (scala_functions, (global_theories, loaded_theories)))))) =
- YXML.parse_body_bytes yxml |>
- let open XML.Decode in
- (pair (list (pair string properties))
- (pair (list (pair string string))
- (pair (list properties)
- (pair (list (pair string properties))
- (pair (list (pair string (pair (pair bool bool) properties)))
- (pair (list (pair string string)) (list string)))))))
- end;
- in
- init_session
- {session_positions = session_positions,
- session_directories = session_directories,
- command_timings = command_timings,
- load_commands = (map o apsnd) Position.of_properties load_commands,
- scala_functions = (map o apsnd o apsnd) Position.of_properties scala_functions,
- global_theories = global_theories,
- loaded_theories = loaded_theories}
- end;
-
-fun init_session_env () =
- (case getenv "ISABELLE_INIT_SESSION" of
- "" => ()
- | name =>
- try Bytes.read (Path.explode name)
- |> Option.app init_session_yxml);
-
-val _ = init_session_env ();
-
-fun finish_session_base () =
- Synchronized.change global_session_base
- (apfst (K (#1 empty_session_base)));
-
-fun get_session_base f = f (Synchronized.value global_session_base);
-fun get_session_base1 f = get_session_base (f o #1);
-fun get_session_base2 f = get_session_base (f o #2);
-
-fun global_theory a = Symtab.lookup (get_session_base2 #global_theories) a;
-fun loaded_theory a = Symset.member (get_session_base2 #loaded_theories) a;
-
-fun check_session ctxt arg =
- Completion.check_item "session"
- (fn (name, {pos, serial}) =>
- Position.make_entity_markup {def = false} serial Markup.sessionN (name, pos))
- (get_session_base1 #session_positions) ctxt arg;
-
-fun last_timing tr = get_timings (get_session_base1 #timings) tr;
-
-fun check_load_command ctxt arg =
- Completion.check_entity Markup.load_commandN (get_session_base1 #load_commands) ctxt arg;
-
-
-(* Scala functions *)
-
-fun check_scala_function ctxt arg =
- let
- val table = get_session_base1 #scala_functions;
- val funs = Symtab.fold (fn (a, (_, pos)) => cons (a, pos)) table [] |> sort_by #1;
- val name = Completion.check_entity Markup.scala_functionN funs ctxt arg;
- val flags = #1 (the (Symtab.lookup table name));
- in (name, flags) end;
-
-val _ = Theory.setup
- (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>scala_function\<close>
- (Scan.lift Parse.embedded_position) (#1 oo check_scala_function) #>
- ML_Antiquotation.inline_embedded \<^binding>\<open>scala_function\<close>
- (Args.context -- Scan.lift Parse.embedded_position
- >> (uncurry check_scala_function #> #1 #> ML_Syntax.print_string)) #>
- ML_Antiquotation.value_embedded \<^binding>\<open>scala\<close>
- (Args.context -- Scan.lift Parse.embedded_position >> (fn (ctxt, arg) =>
- let
- val (name, (single, bytes)) = check_scala_function ctxt arg;
- val func =
- (if single then "Scala.function1" else "Scala.function") ^
- (if bytes then "_bytes" else "");
- in ML_Syntax.atomic (func ^ " " ^ ML_Syntax.print_string name) end)));
-
-
-(* manage source files *)
-
-type files =
- {master_dir: Path.T, (*master directory of theory source*)
- imports: (string * Position.T) list, (*source specification of imports*)
- provided: (Path.T * SHA1.digest) list}; (*source path, digest*)
-
-fun make_files (master_dir, imports, provided): files =
- {master_dir = master_dir, imports = imports, provided = provided};
-
-structure Files = Theory_Data
-(
- type T = files;
- val empty = make_files (Path.current, [], []);
- fun merge ({master_dir, imports, provided = provided1}, {provided = provided2, ...}) =
- let val provided' = Library.merge (op =) (provided1, provided2)
- in make_files (master_dir, imports, provided') end
-);
-
-fun map_files f =
- Files.map (fn {master_dir, imports, provided} =>
- make_files (f (master_dir, imports, provided)));
-
-
-val master_directory = #master_dir o Files.get;
-val imports_of = #imports o Files.get;
-
-fun begin_theory master_dir {name, imports, keywords} parents =
- let
- val thy =
- Theory.begin_theory name parents
- |> map_files (fn _ => (Path.explode (File.symbolic_path master_dir), imports, []))
- |> Thy_Header.add_keywords keywords;
- val ctxt = Proof_Context.init_global thy;
- val _ = List.app (ignore o check_load_command ctxt o #load_command o #2) keywords;
- in thy end;
-
-
-(* theory files *)
-
-val thy_path = Path.ext "thy";
-
-fun theory_qualifier theory =
- (case global_theory theory of
- SOME qualifier => qualifier
- | NONE => Long_Name.qualifier theory);
-
-fun literal_theory theory =
- Long_Name.is_qualified theory orelse is_some (global_theory theory);
-
-fun theory_name qualifier theory =
- if literal_theory theory then theory
- else Long_Name.qualify qualifier theory;
-
-fun find_theory_file thy_name =
- let
- val thy_file = thy_path (Path.basic (Long_Name.base_name thy_name));
- val session = theory_qualifier thy_name;
- val dirs = Symtab.lookup_list (get_session_base1 #session_directories) session;
- in
- dirs |> get_first (fn dir =>
- let val path = dir + thy_file
- in if File.is_file path then SOME path else NONE end)
- end;
-
-fun make_theory_node node_name theory =
- {node_name = node_name, master_dir = Path.dir node_name, theory_name = theory};
-
-fun loaded_theory_node theory =
- {node_name = Path.basic theory, master_dir = Path.current, theory_name = theory};
-
-fun import_name qualifier dir s =
- let
- val theory = theory_name qualifier (Thy_Header.import_name s);
- fun theory_node path = make_theory_node path theory;
- val literal_import = literal_theory theory andalso qualifier <> theory_qualifier theory;
- val _ =
- if literal_import andalso not (Thy_Header.is_base_name s) then
- error ("Bad import of theory from other session via file-path: " ^ quote s)
- else ();
- in
- if loaded_theory theory then loaded_theory_node theory
- else
- (case find_theory_file theory of
- SOME node_name => theory_node node_name
- | NONE =>
- if Thy_Header.is_base_name s andalso Long_Name.is_qualified s
- then loaded_theory_node theory
- else theory_node (File.full_path dir (thy_path (Path.expand (Path.explode s)))))
- end;
-
-fun check_file dir file = File.check_file (File.full_path dir file);
-
-fun check_thy dir thy_name =
- let
- val thy_base_name = Long_Name.base_name thy_name;
- val master_file =
- (case find_theory_file thy_name of
- SOME path => check_file Path.current path
- | NONE => check_file dir (thy_path (Path.basic thy_base_name)));
- val text = File.read master_file;
-
- val {name = (name, pos), imports, keywords} =
- Thy_Header.read (Position.file (File.symbolic_path master_file)) text;
- val _ =
- thy_base_name <> name andalso
- error ("Bad theory name " ^ quote name ^
- " for file " ^ Path.print (Path.base master_file) ^ Position.here pos);
- in
- {master = (master_file, SHA1.digest text), text = text, theory_pos = pos,
- imports = imports, keywords = keywords}
- end;
-
-
-(* read_file *)
-
-fun read_file_node file_node master_dir (src_path, pos) =
- let
- fun read_local () =
- let
- val path = File.check_file (File.full_path master_dir src_path);
- val text = File.read path;
- val file_pos = Position.file (File.symbolic_path path);
- in (text, file_pos) end;
-
- fun read_remote () =
- let
- val text = Bytes.content (Isabelle_System.download file_node);
- val file_pos = Position.file file_node;
- in (text, file_pos) end;
-
- val (text, file_pos) =
- (case try Url.explode file_node of
- NONE => read_local ()
- | SOME (Url.File _) => read_local ()
- | _ => read_remote ());
-
- val lines = split_lines text;
- val digest = SHA1.digest text;
- in {src_path = src_path, lines = lines, digest = digest, pos = Position.copy_id pos file_pos} end
- handle ERROR msg => error (msg ^ Position.here pos);
-
-val read_file = read_file_node "";
-
-
-(* load files *)
-
-fun parsed_files make_paths (files, source) thy =
- if null files then
- let
- val master_dir = master_directory thy;
- val name = Input.string_of source;
- val pos = Input.pos_of source;
- val delimited = Input.is_delimited source;
- val src_paths = make_paths (Path.explode name);
- val reports =
- src_paths |> maps (fn src_path =>
- [(pos, Markup.path (File.symbolic_path (master_dir + src_path))),
- (pos, Markup.language_path delimited)]);
- val _ = Position.reports reports;
- in map (read_file master_dir o rpair pos) src_paths end
- else map Exn.release files;
-
-fun parse_files make_paths =
- (Scan.ahead Parse.not_eof >> Token.get_files) -- Parse.path_input >> parsed_files make_paths;
-
-val parse_file = parse_files single >> (fn f => f #> the_single);
-
-
-fun provide (src_path, id) =
- map_files (fn (master_dir, imports, provided) =>
- if AList.defined (op =) provided src_path then
- error ("Duplicate use of source file: " ^ Path.print src_path)
- else (master_dir, imports, (src_path, id) :: provided));
-
-fun provide_file (file: Token.file) = provide (#src_path file, #digest file);
-fun provide_file' file thy = (file, provide_file file thy);
-
-fun provide_parse_files make_paths =
- parse_files make_paths >> (fn files => fn thy => fold_map provide_file' (files thy) thy);
-
-val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single);
-
-
-fun load_file thy src_path =
- let
- val full_path = check_file (master_directory thy) src_path;
- val text = File.read full_path;
- val id = SHA1.digest text;
- in ((full_path, id), text) end;
-
-fun loaded_files_current thy =
- #provided (Files.get thy) |>
- forall (fn (src_path, id) =>
- (case try (load_file thy) src_path of
- NONE => false
- | SOME ((_, id'), _) => id = id'));
-
-
-(* formal check *)
-
-fun formal_check (check: Path.T -> Path.T) ctxt opt_dir source =
- let
- val name = Input.string_of source;
- val pos = Input.pos_of source;
- val delimited = Input.is_delimited source;
-
- val _ = Context_Position.report ctxt pos (Markup.language_path delimited);
-
- fun err msg = error (msg ^ Position.here pos);
- val dir =
- (case opt_dir of
- SOME dir => dir
- | NONE => master_directory (Proof_Context.theory_of ctxt));
- val path = dir + Path.explode name handle ERROR msg => err msg;
- val _ = Path.expand path handle ERROR msg => err msg;
- val _ = Context_Position.report ctxt pos (Markup.path (File.symbolic_path path));
- val _ = check path handle ERROR msg => err msg;
- in path end;
-
-val check_path = formal_check I;
-val check_file = formal_check File.check_file;
-val check_dir = formal_check File.check_dir;
-
-fun check_session_dir ctxt opt_dir s =
- let
- val dir = Path.expand (check_dir ctxt opt_dir s);
- val ok =
- File.is_file (dir + Path.explode("ROOT")) orelse
- File.is_file (dir + Path.explode("ROOTS"));
- in
- if ok then dir
- else
- error ("Bad session root directory (missing ROOT or ROOTS): " ^
- Path.print dir ^ Position.here (Input.pos_of s))
- end;
-
-
-(* antiquotations *)
-
-local
-
-fun document_antiq (check: Proof.context -> Path.T option -> Input.source -> Path.T) =
- Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
- (check ctxt NONE source;
- Latex.string (Latex.output_ascii_breakable "/" (Input.string_of source))
- |> Latex.macro "isatt"));
-
-fun ML_antiq check =
- Args.context -- Scan.lift Parse.path_input >> (fn (ctxt, source) =>
- check ctxt (SOME Path.current) source |> ML_Syntax.print_path);
-
-in
-
-val _ = Theory.setup
- (Document_Output.antiquotation_verbatim_embedded \<^binding>\<open>session\<close>
- (Scan.lift Parse.embedded_position) check_session #>
- Document_Output.antiquotation_raw_embedded \<^binding>\<open>path\<close> (document_antiq check_path) (K I) #>
- Document_Output.antiquotation_raw_embedded \<^binding>\<open>file\<close> (document_antiq check_file) (K I) #>
- Document_Output.antiquotation_raw_embedded \<^binding>\<open>dir\<close> (document_antiq check_dir) (K I) #>
- ML_Antiquotation.value_embedded \<^binding>\<open>path\<close> (ML_antiq check_path) #>
- ML_Antiquotation.value_embedded \<^binding>\<open>file\<close> (ML_antiq check_file) #>
- ML_Antiquotation.value_embedded \<^binding>\<open>dir\<close> (ML_antiq check_dir) #>
- ML_Antiquotation.value_embedded \<^binding>\<open>path_binding\<close>
- (Scan.lift (Parse.position Parse.path) >>
- (ML_Syntax.print_path_binding o Path.explode_binding)) #>
- ML_Antiquotation.value \<^binding>\<open>master_dir\<close>
- (Args.theory >> (ML_Syntax.print_path o master_directory)));
-
-end;
-
-end;
--- a/src/Pure/PIDE/resources.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,469 +0,0 @@
-/* Title: Pure/PIDE/resources.scala
- Author: Makarius
-
-Resources for theories and auxiliary files.
-*/
-
-package isabelle
-
-
-import scala.util.parsing.input.Reader
-
-import java.io.{File => JFile}
-
-
-object Resources {
- def bootstrap: Resources = new Resources(Sessions.Background(base = Sessions.Base.bootstrap))
-
- def hidden_node(name: Document.Node.Name): Boolean =
- !name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name)
-
- def html_document(snapshot: Document.Snapshot): Option[Browser_Info.HTML_Document] =
- File_Format.registry.get(snapshot.node_name).flatMap(_.html_document(snapshot))
-}
-
-class Resources(
- val session_background: Sessions.Background,
- val log: Logger = No_Logger,
- command_timings: List[Properties.T] = Nil
-) {
- resources =>
-
- def sessions_structure: Sessions.Structure = session_background.sessions_structure
- def session_base: Sessions.Base = session_background.base
-
- override def toString: String = "Resources(" + session_base.print_body + ")"
-
-
- /* init session */
-
- def init_session_yxml: String = {
- import XML.Encode._
-
- YXML.string_of_body(
- pair(list(pair(string, properties)),
- pair(list(pair(string, string)),
- pair(list(properties),
- pair(list(pair(string, properties)),
- pair(list(Scala.encode_fun),
- pair(list(pair(string, string)), list(string)))))))(
- (sessions_structure.session_positions,
- (sessions_structure.dest_session_directories,
- (command_timings,
- (Command_Span.load_commands.map(cmd => (cmd.name, cmd.position)),
- (Scala.functions,
- (sessions_structure.global_theories.toList,
- session_base.loaded_theories.keys))))))))
- }
-
-
- /* file formats */
-
- def make_theory_name(name: Document.Node.Name): Option[Document.Node.Name] =
- File_Format.registry.get(name).flatMap(_.make_theory_name(resources, name))
-
- def make_theory_content(thy_name: Document.Node.Name): Option[String] =
- File_Format.registry.get_theory(thy_name).flatMap(_.make_theory_content(resources, thy_name))
-
-
- /* file-system operations */
-
- def migrate_name(name: Document.Node.Name): Document.Node.Name = name
-
- def append_path(prefix: String, source_path: Path): String =
- File.standard_path(Path.explode(prefix) + source_path)
-
- def read_dir(dir: String): List[String] = File.read_dir(Path.explode(dir))
-
- def list_thys(dir: String): List[String] = {
- val entries = try { read_dir(dir) } catch { case ERROR(_) => Nil }
- entries.flatMap(Thy_Header.get_thy_name)
- }
-
-
- /* source files of Isabelle/ML bootstrap */
-
- def source_file(raw_name: String): Option[String] = {
- if (Path.is_wellformed(raw_name)) {
- if (Path.is_valid(raw_name)) {
- def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None
-
- val path = Path.explode(raw_name)
- val path1 =
- if (path.is_absolute || path.is_current) check(path)
- else {
- check(Path.explode("~~/src/Pure") + path) orElse
- (if (Isabelle_System.getenv("ML_SOURCES") == "") None
- else check(Path.explode("$ML_SOURCES") + path))
- }
- Some(File.platform_path(path1 getOrElse path))
- }
- else None
- }
- else Some(raw_name)
- }
-
-
- /* theory files */
-
- def load_commands(
- syntax: Outer_Syntax,
- name: Document.Node.Name
- ) : () => List[Command_Span.Span] = {
- val (is_utf8, raw_text) =
- with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
- () =>
- {
- if (syntax.has_load_commands(raw_text)) {
- val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
- syntax.parse_spans(text).filter(_.is_load_command(syntax))
- }
- else Nil
- }
- }
-
- def loaded_files(
- syntax: Outer_Syntax,
- name: Document.Node.Name,
- spans: List[Command_Span.Span]
- ) : List[Document.Node.Name] = {
- for (span <- spans; file <- span.loaded_files(syntax).files)
- yield Document.Node.Name(append_path(name.master_dir, Path.explode(file)))
- }
-
- def pure_files(syntax: Outer_Syntax): List[Document.Node.Name] =
- (for {
- (file, theory) <- Thy_Header.ml_roots.iterator
- node = append_path("~~/src/Pure", Path.explode(file))
- node_name = Document.Node.Name(node, theory = theory)
- name <- loaded_files(syntax, node_name, load_commands(syntax, node_name)()).iterator
- } yield name).toList
-
- def global_theory(theory: String): Boolean =
- sessions_structure.global_theories.isDefinedAt(theory)
-
- def literal_theory(theory: String): Boolean =
- Long_Name.is_qualified(theory) || global_theory(theory)
-
- def theory_name(qualifier: String, theory: String): String =
- if (literal_theory(theory)) theory
- else Long_Name.qualify(qualifier, theory)
-
- def find_theory_node(theory: String): Option[Document.Node.Name] = {
- val thy_file = Path.basic(Long_Name.base_name(theory)).thy
- val session = sessions_structure.theory_qualifier(theory)
- val dirs =
- sessions_structure.get(session) match {
- case Some(info) => info.dirs
- case None => Nil
- }
- dirs.collectFirst { case dir if (dir + thy_file).is_file =>
- Document.Node.Name(append_path("", dir + thy_file), theory = theory) }
- }
-
- def import_name(qualifier: String, prefix: String, s: String): Document.Node.Name = {
- val theory = theory_name(qualifier, Thy_Header.import_name(s))
- val literal_import =
- literal_theory(theory) && qualifier != sessions_structure.theory_qualifier(theory)
- if (literal_import && !Url.is_base_name(s)) {
- error("Bad import of theory from other session via file-path: " + quote(s))
- }
- if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
- else {
- find_theory_node(theory) match {
- case Some(node_name) => node_name
- case None =>
- if (Url.is_base_name(s) && literal_theory(s)) Document.Node.Name.loaded_theory(theory)
- else Document.Node.Name(append_path(prefix, Path.explode(s).thy), theory = theory)
- }
- }
- }
-
- def import_name(name: Document.Node.Name, s: String): Document.Node.Name =
- import_name(sessions_structure.theory_qualifier(name), name.master_dir, s)
-
- def import_name(info: Sessions.Info, s: String): Document.Node.Name =
- import_name(info.name, info.dir.implode, s)
-
- def find_theory(file: JFile): Option[Document.Node.Name] = {
- for {
- qualifier <- sessions_structure.session_directories.get(File.canonical(file).getParentFile)
- theory_base <- proper_string(Thy_Header.theory_name(file.getName))
- theory = theory_name(qualifier, theory_base)
- theory_node <- find_theory_node(theory)
- if File.eq(theory_node.path.file, file)
- } yield theory_node
- }
-
- def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = {
- val context_session = sessions_structure.theory_qualifier(context_name)
- def context_dir(): List[String] = list_thys(context_name.master_dir)
- def session_dir(info: Sessions.Info): List[String] = info.dirs.flatMap(Thy_Header.list_thys)
- (for {
- (session, (info, _)) <- sessions_structure.imports_graph.iterator
- theory <- (if (session == context_session) context_dir() else session_dir(info)).iterator
- if Completion.completed(s)(theory)
- } yield {
- if (session == context_session || global_theory(theory)) theory
- else Long_Name.qualify(session, theory)
- }).toList.sorted
- }
-
- def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = {
- val path = name.path
- if (path.is_file) using(Scan.byte_reader(path.file))(f)
- else if (name.node == name.theory)
- error("Cannot load theory " + quote(name.theory))
- else error ("Cannot load theory file " + path)
- }
-
- def check_thy(
- node_name: Document.Node.Name,
- reader: Reader[Char],
- command: Boolean = true,
- strict: Boolean = true
- ): Document.Node.Header = {
- if (node_name.is_theory && reader.source.length > 0) {
- try {
- val header = Thy_Header.read(node_name, reader, command = command, strict = strict)
- val imports =
- header.imports.map({ case (s, pos) =>
- val name = import_name(node_name, s)
- if (Sessions.illegal_theory(name.theory_base_name)) {
- error("Illegal theory name " + quote(name.theory_base_name) + Position.here(pos))
- }
- else (name, pos)
- })
- Document.Node.Header(imports, header.keywords, header.abbrevs)
- }
- catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
- }
- else Document.Node.no_header
- }
-
-
- /* special header */
-
- def special_header(name: Document.Node.Name): Option[Document.Node.Header] = {
- val imports =
- if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_import))
- else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP))
- else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE))
- else Nil
- if (imports.isEmpty) None
- else Some(Document.Node.Header(imports.map((_, Position.none))))
- }
-
-
- /* blobs */
-
- def undefined_blobs(version: Document.Version): List[Document.Node.Name] =
- (for {
- (node_name, node) <- version.nodes.iterator
- if !session_base.loaded_theory(node_name)
- cmd <- node.load_commands.iterator
- name <- cmd.blobs_undefined.iterator
- } yield name).toList
-
-
- /* document changes */
-
- def parse_change(
- reparse_limit: Int,
- previous: Document.Version,
- doc_blobs: Document.Blobs,
- edits: List[Document.Edit_Text],
- consolidate: List[Document.Node.Name]): Session.Change =
- Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate)
-
- def commit(change: Session.Change): Unit = {}
-
-
- /* theory and file dependencies */
-
- def dependencies(
- thys: List[(Document.Node.Name, Position.T)],
- progress: Progress = new Progress): Dependencies[Unit] =
- Dependencies.empty[Unit].require_thys((), thys, progress = progress)
-
- def session_dependencies(
- info: Sessions.Info,
- progress: Progress = new Progress
- ) : Dependencies[Options] = {
- info.theories.foldLeft(Dependencies.empty[Options]) {
- case (dependencies, (options, thys)) =>
- dependencies.require_thys(options,
- for { (thy, pos) <- thys } yield (import_name(info, thy), pos),
- progress = progress)
- }
- }
-
- object Dependencies {
- def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty)
-
- private def show_path(names: List[Document.Node.Name]): String =
- names.map(name => quote(name.theory)).mkString(" via ")
-
- private def cycle_msg(names: List[Document.Node.Name]): String =
- "Cyclic dependency of " + show_path(names)
-
- private def required_by(initiators: List[Document.Node.Name]): String =
- if_proper(initiators, "\n(required by " + show_path(initiators.reverse) + ")")
- }
-
- final class Dependencies[A] private(
- rev_entries: List[Document.Node.Entry],
- seen: Map[Document.Node.Name, A]
- ) {
- private def cons(entry: Document.Node.Entry): Dependencies[A] =
- new Dependencies[A](entry :: rev_entries, seen)
-
- def require_thy(adjunct: A,
- thy: (Document.Node.Name, Position.T),
- initiators: List[Document.Node.Name] = Nil,
- progress: Progress = new Progress): Dependencies[A] = {
- val (name, pos) = thy
-
- def message: String =
- "The error(s) above occurred for theory " + quote(name.theory) +
- Dependencies.required_by(initiators) + Position.here(pos)
-
- if (seen.isDefinedAt(name)) this
- else {
- val dependencies1 = new Dependencies[A](rev_entries, seen + (name -> adjunct))
- if (session_base.loaded_theory(name)) dependencies1
- else {
- try {
- if (initiators.contains(name)) error(Dependencies.cycle_msg(initiators))
-
- progress.expose_interrupt()
- val header =
- try {
- with_thy_reader(name, check_thy(name, _, command = false)).cat_errors(message)
- }
- catch { case ERROR(msg) => cat_error(msg, message) }
- val entry = Document.Node.Entry(name, header)
- dependencies1.require_thys(adjunct, header.imports_pos,
- initiators = name :: initiators, progress = progress).cons(entry)
- }
- catch {
- case e: Throwable =>
- dependencies1.cons(Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))))
- }
- }
- }
- }
-
- def require_thys(adjunct: A,
- thys: List[(Document.Node.Name, Position.T)],
- progress: Progress = new Progress,
- initiators: List[Document.Node.Name] = Nil): Dependencies[A] =
- thys.foldLeft(this)(_.require_thy(adjunct, _, progress = progress, initiators = initiators))
-
- def entries: List[Document.Node.Entry] = rev_entries.reverse
-
- def theories: List[Document.Node.Name] = entries.map(_.name)
- def theories_adjunct: List[(Document.Node.Name, A)] = theories.map(name => (name, seen(name)))
-
- def errors: List[String] = entries.flatMap(_.header.errors)
-
- def check_errors: Dependencies[A] =
- errors match {
- case Nil => this
- case errs => error(cat_lines(errs))
- }
-
- lazy val theory_graph: Document.Node.Name.Graph[Unit] = {
- val regular = theories.toSet
- val irregular =
- (for {
- entry <- entries.iterator
- imp <- entry.header.imports
- if !regular(imp)
- } yield imp).toSet
-
- Document.Node.Name.make_graph(
- irregular.toList.map(name => ((name, ()), Nil)) :::
- entries.map(entry => ((entry.name, ()), entry.header.imports)))
- }
-
- lazy val loaded_theories: Graph[String, Outer_Syntax] =
- entries.foldLeft(session_base.loaded_theories) {
- case (graph, entry) =>
- val name = entry.name.theory
- val imports = entry.header.imports.map(_.theory)
-
- val graph1 = (name :: imports).foldLeft(graph)(_.default_node(_, Outer_Syntax.empty))
- val graph2 = imports.foldLeft(graph1)(_.add_edge(_, name))
-
- val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil
- val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node)
- val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header
-
- graph2.map_node(name, _ => syntax)
- }
-
- def get_syntax(name: Document.Node.Name): Outer_Syntax =
- loaded_theories.get_node(name.theory)
-
- lazy val load_commands: List[(Document.Node.Name, List[Command_Span.Span])] =
- theories.zip(
- Par_List.map((e: () => List[Command_Span.Span]) => e(),
- theories.map(name => resources.load_commands(get_syntax(name), name))))
- .filter(p => p._2.nonEmpty)
-
- def loaded_files(
- name: Document.Node.Name,
- spans: List[Command_Span.Span]
- ) : (String, List[Document.Node.Name]) = {
- val theory = name.theory
- val syntax = get_syntax(name)
- val files1 = resources.loaded_files(syntax, name, spans)
- val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil
- (theory, files1 ::: files2)
- }
-
- def loaded_files: List[Document.Node.Name] =
- for {
- (name, spans) <- load_commands
- file <- loaded_files(name, spans)._2
- } yield file
-
- def imported_files: List[Path] = {
- val base_theories =
- loaded_theories.all_preds(theories.map(_.theory)).
- filter(session_base.loaded_theories.defined)
-
- base_theories.map(theory => session_base.known_theories(theory).name.path) :::
- base_theories.flatMap(session_base.known_loaded_files.withDefaultValue(Nil))
- }
-
- lazy val overall_syntax: Outer_Syntax =
- Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node))
-
- override def toString: String = entries.toString
- }
-
-
- /* resolve implicit theory dependencies */
-
- def resolve_dependencies[A](
- models: Iterable[Document.Model],
- theories: List[Document.Node.Name]
- ): List[Document.Node.Name] = {
- val model_theories =
- (for (model <- models.iterator if model.is_theory)
- yield (model.node_name, Position.none)).toList
-
- val thy_files1 =
- dependencies(model_theories ::: theories.map((_, Position.none))).theories
-
- val thy_files2 =
- (for {
- model <- models.iterator if !model.is_theory
- thy_name <- make_theory_name(model.node_name)
- } yield thy_name).toList
-
- thy_files1 ::: thy_files2
- }
-}
--- a/src/Pure/ROOT.ML Sat Jan 20 13:52:36 2024 +0100
+++ b/src/Pure/ROOT.ML Sat Jan 20 15:07:41 2024 +0100
@@ -136,7 +136,7 @@
ML_file "Concurrent/cache.ML";
ML_file "PIDE/active.ML";
-ML_file "Thy/export.ML";
+ML_file "Build/export.ML";
subsection "Inner syntax";
@@ -318,13 +318,13 @@
ML_file "Thy/document_antiquotations.ML";
ML_file "General/graph_display.ML";
ML_file "pure_syn.ML";
-ML_file "PIDE/resources.ML";
+ML_file "Build/resources.ML";
ML_file "PIDE/command.ML";
ML_file "PIDE/query_operation.ML";
ML_file "Thy/thy_info.ML";
ML_file "thm_deps.ML";
-ML_file "Thy/export_theory.ML";
-ML_file "Thy/sessions.ML";
+ML_file "Build/export_theory.ML";
+ML_file "Build/sessions.ML";
ML_file "PIDE/session.ML";
ML_file "PIDE/document.ML";
@@ -356,7 +356,7 @@
ML_file "General/xz.ML";
ML_file "General/zstd.ML";
ML_file "Tools/prismjs.ML";
-ML_file "Tools/build.ML";
+ML_file "Build/build.ML";
ML_file "Tools/named_thms.ML";
ML_file "Tools/print_operation.ML";
ML_file "Tools/rail.ML";
--- a/src/Pure/Thy/browser_info.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,708 +0,0 @@
-/* Title: Pure/Thy/browser_info.scala
- Author: Makarius
-
-HTML/PDF presentation of PIDE document information.
-*/
-
-package isabelle
-
-
-import scala.annotation.tailrec
-import scala.collection.immutable.SortedMap
-import scala.collection.mutable
-
-
-object Browser_Info {
- /* browser_info store configuration */
-
- object Config {
- val none: Config = new Config { def enabled: Boolean = false }
- val standard: Config = new Config { def enabled: Boolean = true }
-
- def dir(path: Path): Config =
- new Config {
- def enabled: Boolean = true
- override def presentation_dir(store: Store): Path = path
- }
-
- def make(s: String): Config =
- if (s == ":") standard else dir(Path.explode(s))
- }
-
- abstract class Config private {
- def enabled: Boolean
- def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info
- def presentation_dir(store: Store): Path = store.presentation_dir
- }
-
-
- /* meta info within the file-system */
-
- object Meta_Info {
- /* directory */
-
- val PATH: Path = Path.explode(".browser_info")
-
- def check_directory(dir: Path): Unit = {
- if (dir.is_dir && !(dir + PATH).is_dir && File.read_dir(dir).nonEmpty) {
- error("Existing content in " + dir.expand + " lacks " + PATH + " meta info.\n" +
- "To avoid potential disaster, it has not been changed automatically.\n" +
- "If this is the intended directory, please move/remove/empty it manually.")
- }
- }
-
- def init_directory(dir: Path): Path = {
- check_directory(dir)
- Isabelle_System.make_directory(dir + PATH)
- dir
- }
-
- def clean_directory(dir: Path): Path = {
- check_directory(dir)
- Isabelle_System.rm_tree(dir) // guarded by check_directory!
- Isabelle_System.new_directory(dir + PATH)
- }
-
-
- /* content */
-
- def make_path(dir: Path, name: String): Path =
- dir + PATH + Path.basic(name)
-
- def value(dir: Path, name: String): String = {
- val path = make_path(dir, name)
- if (path.is_file) File.read(path) else ""
- }
-
- def change(dir: Path, name: String)(f: String => String): Unit = {
- val path = make_path(dir, name)
- val x = value(dir, name)
- val y =
- try { f(x) }
- catch { case ERROR(msg) => error("Failed to change " + path.expand + ":\n" + msg)}
- if (x != y) File.write(path, y)
- }
-
-
- /* build_uuid */
-
- val BUILD_UUID = "build_uuid"
-
- def check_build_uuid(dir: Path, uuid: String): Boolean = {
- val uuid0 = value(dir, BUILD_UUID)
- uuid0.nonEmpty && uuid.nonEmpty && uuid0 == uuid
- }
-
- def set_build_uuid(dir: Path, uuid: String): Unit =
- change(dir, BUILD_UUID)(_ => uuid)
-
-
- /* index */
-
- val INDEX = "index.json"
-
- object Item {
- def parse(json: JSON.T): Item = {
- def err(): Nothing =
- error("Bad JSON object for item:\n" + JSON.Format.pretty_print(json))
- val obj = JSON.Object.unapply(json) getOrElse err()
-
- val name = JSON.string(obj, "name") getOrElse err()
- val description = JSON.string(obj, "description") getOrElse ""
- Item(name, description = Symbol.trim_blank_lines(description))
- }
- }
-
- sealed case class Item(name: String, description: String = "") {
- override def toString: String = name
-
- def json: JSON.T = JSON.Object("name" -> name, "description" -> description)
- }
-
- object Index {
- def parse(s: JSON.S, kind: String): Index = {
- if (s.isEmpty) Index(kind, Nil)
- else {
- def err(): Nothing = error("Bad JSON object " + kind + " index:\n" + s)
-
- val json = JSON.parse(s)
- val obj = JSON.Object.unapply(json) getOrElse err()
-
- val kind1 = JSON.string(obj, "kind") getOrElse err()
- val items = JSON.list(obj, "items", x => Some(Item.parse(x))) getOrElse err()
- if (kind == kind1) Index(kind, items)
- else error("Expected index kind " + quote(kind) + " but found " + quote(kind1))
- }
- }
- }
-
- sealed case class Index(kind: String, items: List[Item]) {
- def is_empty: Boolean = items.isEmpty
-
- def + (item: Item): Index =
- Index(kind, (item :: items.filterNot(_.name == item.name)).sortBy(_.name))
-
- def json: JSON.T = JSON.Object("kind" -> kind, "items" -> items.map(_.json))
- def print_json: JSON.S = JSON.Format.pretty_print(json)
- }
- }
-
-
- /* presentation elements */
-
- sealed case class Elements(
- html: Markup.Elements = Markup.Elements.empty,
- entity: Markup.Elements = Markup.Elements.empty,
- language: Markup.Elements = Markup.Elements.empty)
-
- val default_elements: Elements =
- Elements(
- html = Rendering.foreground_elements ++ Rendering.text_color_elements +
- Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE +
- Markup.PATH + Markup.URL,
- entity = Markup.Elements(Markup.THEORY, Markup.TYPE_NAME, Markup.CONSTANT, Markup.FACT,
- Markup.CLASS, Markup.LOCALE, Markup.FREE))
-
- val extra_elements: Elements =
- Elements(
- html = default_elements.html ++ Rendering.markdown_elements,
- language = Markup.Elements(Markup.Language.DOCUMENT))
-
-
-
- /** HTML/PDF presentation context **/
-
- def context(
- sessions_structure: Sessions.Structure,
- elements: Elements = default_elements,
- root_dir: Path = Path.current,
- document_info: Document_Info = Document_Info.empty
- ): Context = new Context(sessions_structure, elements, root_dir, document_info)
-
- class Context private[Browser_Info](
- sessions_structure: Sessions.Structure,
- val elements: Elements,
- val root_dir: Path,
- val document_info: Document_Info
- ) {
- /* directory structure and resources */
-
- def theory_by_name(session: String, theory: String): Option[Document_Info.Theory] =
- document_info.theory_by_name(session, theory)
-
- def theory_by_file(session: String, file: String): Option[Document_Info.Theory] =
- document_info.theory_by_file(session, file)
-
- def session_chapter(session: String): String =
- sessions_structure(session).chapter
-
- def chapter_dir(session: String): Path =
- root_dir + Path.basic(session_chapter(session))
-
- def session_dir(session: String): Path =
- chapter_dir(session) + Path.basic(session)
-
- def theory_dir(theory: Document_Info.Theory): Path =
- session_dir(theory.dynamic_session)
-
- def theory_html(theory: Document_Info.Theory): Path =
- {
- def check(name: String): Option[Path] = {
- val path = Path.basic(name).html
- if (Path.eq_case_insensitive(path, Path.index_html)) None
- else Some(path)
- }
- check(theory.print_short) orElse check(theory.name) getOrElse
- error("Illegal global theory name " + quote(theory.name) +
- " (conflict with " + Path.index_html + ")")
- }
-
- def file_html(file: String): Path =
- Path.explode(file).squash.html
-
- def smart_html(theory: Document_Info.Theory, file: String): Path =
- if (File.is_thy(file)) theory_html(theory) else file_html(file)
-
-
- /* HTML content */
-
- def head(title: String, rest: XML.Body = Nil): XML.Tree =
- HTML.div("head", HTML.chapter(title) :: rest)
-
- def source(body: XML.Body): XML.Tree = HTML.pre("source", body)
-
- def contents(
- heading: String,
- items: List[XML.Body],
- css_class: String = "contents"
- ) : List[XML.Elem] = {
- if (items.isEmpty) Nil
- else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items))))
- }
-
-
- /* preview PIDE document */
-
- lazy val isabelle_css: String = File.read(HTML.isabelle_css)
-
- def html_document(title: String, body: XML.Body, fonts_css: String): HTML_Document = {
- val content =
- HTML.output_document(
- List(
- HTML.style(fonts_css + "\n\n" + isabelle_css),
- HTML.title(title)),
- List(HTML.source(body)), css = "", structural = false)
- HTML_Document(title, content)
- }
-
- def preview_document(
- snapshot: Document.Snapshot,
- plain_text: Boolean = false,
- fonts_css: String = HTML.fonts_css()
- ): HTML_Document = {
- require(!snapshot.is_outdated, "document snapshot outdated")
-
- val name = snapshot.node_name
- if (plain_text) {
- val title = "File " + Symbol.cartouche_decoded(name.file_name)
- val body = HTML.text(snapshot.node.source)
- html_document(title, body, fonts_css)
- }
- else {
- Resources.html_document(snapshot) getOrElse {
- val title =
- if (name.is_theory) "Theory " + quote(name.theory_base_name)
- else "File " + Symbol.cartouche_decoded(name.file_name)
- val xml = snapshot.xml_markup(elements = elements.html)
- val body = Node_Context.empty.make_html(elements, xml)
- html_document(title, body, fonts_css)
- }
- }
- }
-
-
- /* maintain presentation structure */
-
- def update_chapter(session_name: String, session_description: String): Unit = synchronized {
- val dir = Meta_Info.init_directory(chapter_dir(session_name))
- Meta_Info.change(dir, Meta_Info.INDEX) { text =>
- val index0 = Meta_Info.Index.parse(text, "chapter")
- val item = Meta_Info.Item(session_name, description = session_description)
- val index = index0 + item
-
- if (index != index0) {
- val title = "Isabelle/" + session_chapter(session_name) + " sessions"
- HTML.write_document(dir, "index.html",
- List(HTML.title(title + Isabelle_System.isabelle_heading())),
- HTML.chapter(title) ::
- (if (index.is_empty) Nil
- else
- List(HTML.div("sessions",
- List(HTML.description(
- index.items.map(item =>
- (List(HTML.link(item.name + "/index.html", HTML.text(item.name))),
- if (item.description.isEmpty) Nil
- else HTML.break ::: List(HTML.pre(HTML.text(item.description)))))))))),
- root = Some(root_dir))
- }
-
- index.print_json
- }
- }
-
- def update_root(): Unit = synchronized {
- Meta_Info.init_directory(root_dir)
- HTML.init_fonts(root_dir)
- Isabelle_System.copy_file(Path.explode("~~/lib/logo/isabelle.gif"),
- root_dir + Path.explode("isabelle.gif"))
-
- Meta_Info.change(root_dir, Meta_Info.INDEX) { text =>
- val index0 = Meta_Info.Index.parse(text, "root")
- val index = {
- val items1 =
- sessions_structure.known_chapters
- .map(ch => Meta_Info.Item(ch.name, description = ch.description))
- val items2 = index0.items.filterNot(item => items1.exists(_.name == item.name))
- index0.copy(items = items1 ::: items2)
- }
-
- if (index != index0) {
- val title = "The " + XML.text(Isabelle_System.isabelle_name()) + " Library"
- HTML.write_document(root_dir, "index.html",
- List(HTML.title(title + Isabelle_System.isabelle_heading())),
- HTML.chapter(title) ::
- (if (index.is_empty) Nil
- else
- List(HTML.div("sessions",
- List(HTML.description(
- index.items.map(item =>
- (List(HTML.link(item.name + "/index.html", HTML.text(item.name))),
- if (item.description.isEmpty) Nil
- else HTML.break ::: List(HTML.pre(HTML.text(item.description)))))))))),
- root = Some(root_dir))
- }
-
- index.print_json
- }
- }
- }
-
- sealed case class HTML_Document(title: String, content: String)
-
-
- /* formal entities */
-
- object Theory_Ref {
- def unapply(props: Properties.T): Option[String] =
- (props, props) match {
- case (Markup.Kind(Markup.THEORY), Markup.Name(theory)) => Some(theory)
- case _ => None
- }
- }
-
- object Entity_Ref {
- def unapply(props: Properties.T): Option[(String, String, String)] =
- (props, props, props, props) match {
- case (Markup.Entity.Ref.Prop(_), Position.Def_File(file), Markup.Kind(kind), Markup.Name(name))
- if Path.is_wellformed(file) => Some((file, kind, name))
- case _ => None
- }
- }
-
- object Node_Context {
- val empty: Node_Context = new Node_Context
-
- def make(
- context: Context,
- session_name: String,
- theory_name: String,
- file_name: String,
- node_dir: Path,
- ): Node_Context =
- new Node_Context {
- private val seen_ranges: mutable.Set[Symbol.Range] = mutable.Set.empty
-
- override def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] =
- body match {
- case List(XML.Elem(Markup("span", List("id" -> _)), _)) => None
- case _ =>
- for (theory <- context.theory_by_name(session_name, theory_name))
- yield {
- val body1 =
- if (seen_ranges.contains(range)) {
- HTML.entity_def(HTML.span(HTML.id(offset_id(range)), body))
- }
- else HTML.span(body)
- theory.get_defs(file_name, range).foldLeft(body1) {
- case (elem, entity) =>
- HTML.entity_def(HTML.span(HTML.id(entity.kname), List(elem)))
- }
- }
- }
-
- private def offset_id(range: Text.Range): String =
- "offset_" + range.start + ".." + range.stop
-
- override def make_file_ref(file: String, body: XML.Body): Option[XML.Elem] = {
- for (theory <- context.theory_by_file(session_name, file))
- yield {
- val html_path = context.theory_dir(theory) + context.smart_html(theory, file)
- val html_link = HTML.relative_href(html_path, base = Some(node_dir))
- HTML.link(html_link, body)
- }
- }
-
- override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = {
- props match {
- case Theory_Ref(thy_name) =>
- for (theory <- context.theory_by_name(session_name, thy_name))
- yield {
- val html_path = context.theory_dir(theory) + context.theory_html(theory)
- val html_link = HTML.relative_href(html_path, base = Some(node_dir))
- HTML.link(html_link, body)
- }
- case Entity_Ref(def_file, kind, name) =>
- def logical_ref(theory: Document_Info.Theory): Option[String] =
- theory.get_def(def_file, kind, name).map(_.kname)
-
- def physical_ref(theory: Document_Info.Theory): Option[String] =
- props match {
- case Position.Def_Range(range) if theory.name == theory_name =>
- seen_ranges += range
- Some(offset_id(range))
- case _ => None
- }
-
- for {
- theory <- context.theory_by_file(session_name, def_file)
- html_ref <- logical_ref(theory) orElse physical_ref(theory)
- }
- yield {
- val html_path = context.theory_dir(theory) + context.smart_html(theory, def_file)
- val html_link = HTML.relative_href(html_path, base = Some(node_dir))
- HTML.entity_ref(HTML.link(html_link + "#" + html_ref, body))
- }
- case _ => None
- }
- }
- }
- }
-
- class Node_Context {
- def make_def(range: Symbol.Range, body: XML.Body): Option[XML.Elem] = None
- def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = None
- def make_file_ref(file: String, body: XML.Body): Option[XML.Elem] = None
-
- val div_elements: Set[String] =
- Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.`enum`.name,
- HTML.descr.name)
-
- def make_html(elements: Elements, xml: XML.Body): XML.Body = {
- def html_div(html: XML.Body): Boolean =
- html exists {
- case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body)
- case XML.Text(_) => false
- }
-
- def html_class(c: String, html: XML.Body): XML.Body =
- if (c == "") html
- else if (html_div(html)) List(HTML.div(c, html))
- else List(HTML.span(c, html))
-
- def html_body(xml_body: XML.Body, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
- xml_body.foldRight((List.empty[XML.Tree], end_offset)) { case (tree, (res, end_offset1)) =>
- val (res1, offset) = html_body_single(tree, end_offset1)
- (res1 ++ res, offset)
- }
-
- @tailrec
- def html_body_single(xml_tree: XML.Tree, end_offset: Symbol.Offset): (XML.Body, Symbol.Offset) =
- xml_tree match {
- case XML.Wrapped_Elem(markup, _, body) => html_body_single(XML.Elem(markup, body), end_offset)
- case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) =>
- val (body1, offset) = html_body(body, end_offset)
- if (elements.entity(kind)) {
- make_ref(props, body1) match {
- case Some(link) => (List(link), offset)
- case None => (body1, offset)
- }
- }
- else (body1, offset)
- case XML.Elem(Markup.Path(file), body) =>
- val (body1, offset) = html_body(body, end_offset)
- make_file_ref(file, body1) match {
- case Some(link) => (List(link), offset)
- case None => (body1, offset)
- }
- case XML.Elem(Markup.Url(href), body) =>
- val (body1, offset) = html_body(body, end_offset)
- (List(HTML.link(href, body1)), offset)
- case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) =>
- val (body1, offset) = html_body(body, end_offset)
- (html_class(if (elements.language(name)) name else "", body1), offset)
- case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) =>
- val (body1, offset) = html_body(body, end_offset)
- (List(HTML.par(body1)), offset)
- case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) =>
- val (body1, offset) = html_body(body, end_offset)
- (List(HTML.item(body1)), offset)
- case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), text) =>
- (Nil, end_offset - XML.symbol_length(text))
- case XML.Elem(Markup.Markdown_List(kind), body) =>
- val (body1, offset) = html_body(body, end_offset)
- if (kind == Markup.ENUMERATE) (List(HTML.`enum`(body1)), offset)
- else (List(HTML.list(body1)), offset)
- case XML.Elem(markup, body) =>
- val name = markup.name
- val (body1, offset) = html_body(body, end_offset)
- val html =
- markup.properties match {
- case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD =>
- html_class(kind, body1)
- case _ =>
- body1
- }
- Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match {
- case Some(c) => (html_class(c.toString, html), offset)
- case None => (html_class(name, html), offset)
- }
- case XML.Text(text) =>
- val offset = end_offset - Symbol.length(text)
- val body = HTML.text(Symbol.decode(text))
- make_def(Text.Range(offset, end_offset), body) match {
- case Some(body1) => (List(body1), offset)
- case None => (body, offset)
- }
- }
-
- html_body(xml, XML.symbol_length(xml) + 1)._1
- }
- }
-
-
-
- /** build presentation **/
-
- val session_graph_path: Path = Path.explode("session_graph.pdf")
-
- def build_session(
- context: Context,
- session_context: Export.Session_Context,
- progress: Progress = new Progress,
- ): Unit = {
- progress.expose_interrupt()
-
- val session_name = session_context.session_name
- val session_info = session_context.sessions_structure(session_name)
-
- val session_dir = context.session_dir(session_name).expand
- progress.echo("Presenting " + session_name + " in " + session_dir + " ...")
-
- Meta_Info.init_directory(context.chapter_dir(session_name))
- Meta_Info.clean_directory(session_dir)
-
- val session = context.document_info.the_session(session_name)
-
- Bytes.write(session_dir + session_graph_path,
- graphview.Graph_File.make_pdf(session_info.options,
- session_context.session_base.session_graph_display))
-
- val document_variants =
- for {
- doc <- session_info.document_variants
- db <- session_context.session_db()
- document <- Document_Build.read_document(db, session_name, doc.name)
- }
- yield {
- val doc_path = session_dir + doc.path.pdf
- if (Path.eq_case_insensitive(doc.path.pdf, session_graph_path)) {
- error("Illegal document variant " + quote(doc.name) +
- " (conflict with " + session_graph_path + ")")
- }
- progress.echo("Presenting document " + session_name + "/" + doc.name, verbose = true)
- if (session_info.document_echo) progress.echo("Document at " + doc_path)
- Bytes.write(doc_path, document.pdf)
- doc
- }
-
- val document_links = {
- val link1 = HTML.link(session_graph_path, HTML.text("theory dependencies"))
- val links2 = document_variants.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name)))
- Library.separate(HTML.break ::: HTML.nl,
- (link1 :: links2).map(link => HTML.text("View ") ::: List(link))).flatten
- }
-
- def present_theory(theory_name: String): XML.Body = {
- progress.expose_interrupt()
-
- def err(): Nothing =
- error("Missing document information for theory: " + quote(theory_name))
-
- val snapshot = Build.read_theory(session_context.theory(theory_name)) getOrElse err()
- val theory = context.theory_by_name(session_name, theory_name) getOrElse err()
-
- progress.echo("Presenting theory " + quote(theory_name), verbose = true)
-
- val thy_elements = theory.elements(context.elements)
-
- def node_context(file_name: String, node_dir: Path): Node_Context =
- Node_Context.make(context, session_name, theory_name, file_name, node_dir)
-
- val thy_html =
- context.source(
- node_context(theory.thy_file, session_dir).
- make_html(thy_elements, snapshot.xml_markup(elements = thy_elements.html)))
-
- val master_dir = Path.explode(snapshot.node_name.master_dir)
-
- val files =
- for {
- blob_name <- snapshot.node_files.tail
- xml = snapshot.switch(blob_name).xml_markup(elements = thy_elements.html)
- if xml.nonEmpty
- }
- yield {
- progress.expose_interrupt()
-
- val file = blob_name.node
- progress.echo("Presenting file " + quote(file), verbose = true)
-
- val file_html = session_dir + context.file_html(file)
- val file_dir = file_html.dir
- val html_link = HTML.relative_href(file_html, base = Some(session_dir))
- val html = context.source(node_context(file, file_dir).make_html(thy_elements, xml))
-
- val path = Path.explode(file)
- val src_path = File.relative_path(master_dir, path).getOrElse(path)
-
- val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short)
- HTML.write_document(file_dir, file_html.file_name,
- List(HTML.title(file_title)), List(context.head(file_title), html),
- root = Some(context.root_dir))
- List(HTML.link(html_link, HTML.text(file_title)))
- }
-
- val thy_title = "Theory " + theory.print_short
- HTML.write_document(session_dir, context.theory_html(theory).implode,
- List(HTML.title(thy_title)), List(context.head(thy_title), thy_html),
- root = Some(context.root_dir))
-
- List(HTML.link(context.theory_html(theory),
- HTML.text(theory.print_short) :::
- (if (files.isEmpty) Nil else List(HTML.itemize(files)))))
- }
-
- val theories = session.used_theories.map(present_theory)
-
- val title = "Session " + session_name
- HTML.write_document(session_dir, "index.html",
- List(HTML.title(title + Isabelle_System.isabelle_heading())),
- context.head(title, List(HTML.par(document_links))) ::
- context.contents("Theories", theories),
- root = Some(context.root_dir))
-
- Meta_Info.set_build_uuid(session_dir, session.build_uuid)
-
- context.update_chapter(session_name, session_info.description)
- }
-
- def build(
- browser_info: Config,
- store: Store,
- deps: Sessions.Deps,
- sessions: List[String],
- progress: Progress = new Progress,
- server: SSH.Server = SSH.no_server
- ): Unit = {
- val root_dir = browser_info.presentation_dir(store).absolute
- progress.echo("Presentation in " + root_dir)
-
- using(Export.open_database_context(store, server = server)) { database_context =>
- val context0 = context(deps.sessions_structure, root_dir = root_dir)
-
- val sessions1 =
- deps.sessions_structure.build_requirements(sessions).filter { session_name =>
- using(database_context.open_database(session_name)) { session_database =>
- database_context.store.read_build(session_database.db, session_name) match {
- case None => false
- case Some(build) =>
- val session_dir = context0.session_dir(session_name)
- !Meta_Info.check_build_uuid(session_dir, build.uuid)
- }
- }
- }
-
- val context1 =
- context(deps.sessions_structure, root_dir = root_dir,
- document_info = Document_Info.read(database_context, deps, sessions1))
-
- context1.update_root()
-
- Par_List.map({ (session: String) =>
- using(database_context.open_session(deps.background(session))) { session_context =>
- build_session(context1, session_context, progress = progress)
- }
- }, sessions1)
- }
- }
-}
--- a/src/Pure/Thy/export.ML Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,73 +0,0 @@
-(* Title: Pure/Thy/export.ML
- Author: Makarius
-
-Manage theory exports: compressed blobs.
-*)
-
-signature EXPORT =
-sig
- val report_export: theory -> Path.binding -> unit
- type params =
- {theory: theory, binding: Path.binding, executable: bool, compress: bool, strict: bool}
- val export_params: params -> XML.body -> unit
- val export: theory -> Path.binding -> XML.body -> unit
- val export_executable: theory -> Path.binding -> XML.body -> unit
- val export_file: theory -> Path.binding -> Path.T -> unit
- val export_executable_file: theory -> Path.binding -> Path.T -> unit
- val markup: theory -> Path.T -> Markup.T
- val message: theory -> Path.T -> string
-end;
-
-structure Export: EXPORT =
-struct
-
-(* export *)
-
-fun report_export thy binding =
- let
- val theory_name = Context.theory_long_name thy;
- val (path, pos) = Path.dest_binding binding;
- val markup = Markup.export_path (Path.implode (Path.basic theory_name + path));
- in Context_Position.report_generic (Context.Theory thy) pos markup end;
-
-type params =
- {theory: theory, binding: Path.binding, executable: bool, compress: bool, strict: bool};
-
-fun export_params ({theory = thy, binding, executable, compress, strict}: params) body =
- (report_export thy binding;
- (Output.try_protocol_message o Markup.export)
- {id = Position.id_of (Position.thread_data ()),
- serial = serial (),
- theory_name = Context.theory_long_name thy,
- name = Path.implode_binding (tap Path.proper_binding binding),
- executable = executable,
- compress = compress,
- strict = strict} [body]);
-
-fun export thy binding body =
- export_params
- {theory = thy, binding = binding, executable = false, compress = true, strict = true} body;
-
-fun export_executable thy binding body =
- export_params
- {theory = thy, binding = binding, executable = true, compress = true, strict = true} body;
-
-fun export_file thy binding file =
- export thy binding (Bytes.contents_blob (Bytes.read file));
-
-fun export_executable_file thy binding file =
- export_executable thy binding (Bytes.contents_blob (Bytes.read file));
-
-
-(* information message *)
-
-fun markup thy path =
- let
- val thy_path = Path.basic (Context.theory_long_name thy) + path;
- val name = (Markup.nameN, Path.implode thy_path);
- in Active.make_markup Markup.theory_exportsN {implicit = false, properties = [name]} end;
-
-fun message thy path =
- "See " ^ Markup.markup (markup thy path) "theory exports";
-
-end;
--- a/src/Pure/Thy/export.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,681 +0,0 @@
-/* Title: Pure/Thy/export.scala
- Author: Makarius
-
-Manage theory exports: compressed blobs.
-*/
-
-package isabelle
-
-
-import scala.annotation.tailrec
-import scala.util.matching.Regex
-import scala.collection.mutable
-
-
-object Export {
- /* artefact names */
-
- val DOCUMENT_ID: String = "PIDE/document_id"
- val FILES: String = "PIDE/files"
- val MARKUP: String = "PIDE/markup"
- val MESSAGES: String = "PIDE/messages"
- val DOCUMENT_PREFIX: String = "document/"
- val DOCUMENT_LATEX: String = DOCUMENT_PREFIX + "latex"
- val THEORY_PREFIX: String = "theory/"
- val PROOFS_PREFIX: String = "proofs/"
-
- def explode_name(s: String): List[String] = space_explode('/', s)
- def implode_name(elems: Iterable[String]): String = elems.mkString("/")
-
-
- /* SQL data model */
-
- object private_data extends SQL.Data() {
- override lazy val tables = SQL.Tables(Base.table)
-
- object Base {
- val session_name = SQL.Column.string("session_name").make_primary_key
- val theory_name = SQL.Column.string("theory_name").make_primary_key
- val name = SQL.Column.string("name").make_primary_key
- val executable = SQL.Column.bool("executable")
- val compressed = SQL.Column.bool("compressed")
- val body = SQL.Column.bytes("body")
-
- val table =
- SQL.Table("isabelle_exports",
- List(session_name, theory_name, name, executable, compressed, body))
- }
-
- def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source =
- SQL.where_and(
- Base.session_name.equal(session_name),
- if_proper(theory_name, Base.theory_name.equal(theory_name)),
- if_proper(name, Base.name.equal(name)))
-
- def clean_session(db: SQL.Database, session_name: String): Unit =
- db.execute_statement(Base.table.delete(sql = where_equal(session_name)))
-
- def known_entries(db: SQL.Database, entry_names: Iterable[Entry_Name]): Set[Entry_Name] = {
- val it = entry_names.iterator
- if (it.isEmpty) Set.empty[Entry_Name]
- else {
- val sql_preds =
- List.from(
- for (entry_name <- it) yield {
- SQL.and(
- Base.session_name.equal(entry_name.session),
- Base.theory_name.equal(entry_name.theory),
- Base.name.equal(entry_name.name)
- )
- })
- db.execute_query_statement(
- Base.table.select(List(Base.session_name, Base.theory_name, Base.name),
- sql = SQL.where(SQL.OR(sql_preds))),
- Set.from[Entry_Name],
- { res =>
- val session_name = res.string(Base.session_name)
- val theory_name = res.string(Base.theory_name)
- val name = res.string(Base.name)
- Entry_Name(session_name, theory_name, name)
- })
- }
- }
-
- def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] =
- db.execute_query_statementO[Entry](
- Base.table.select(List(Base.executable, Base.compressed, Base.body),
- sql = private_data.where_equal(entry_name.session, entry_name.theory, entry_name.name)),
- { res =>
- val executable = res.bool(Base.executable)
- val compressed = res.bool(Base.compressed)
- val bytes = res.bytes(Base.body)
- val body = Future.value(compressed, bytes)
- Entry(entry_name, executable, body, cache)
- }
- )
-
- def write_entries(db: SQL.Database, entries: List[Entry]): Unit =
- db.execute_batch_statement(Base.table.insert(), batch =
- for (entry <- entries) yield { (stmt: SQL.Statement) =>
- val (compressed, bs) = entry.body.join
- stmt.string(1) = entry.session_name
- stmt.string(2) = entry.theory_name
- stmt.string(3) = entry.name
- stmt.bool(4) = entry.executable
- stmt.bool(5) = compressed
- stmt.bytes(6) = bs
- })
-
- def read_theory_names(db: SQL.Database, session_name: String): List[String] =
- db.execute_query_statement(
- Base.table.select(List(Base.theory_name), distinct = true,
- sql = private_data.where_equal(session_name) + SQL.order_by(List(Base.theory_name))),
- List.from[String], res => res.string(Base.theory_name))
-
- def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] =
- db.execute_query_statement(
- Base.table.select(List(Base.theory_name, Base.name),
- sql = private_data.where_equal(session_name)) + SQL.order_by(List(Base.theory_name, Base.name)),
- List.from[Entry_Name],
- { res =>
- Entry_Name(
- session = session_name,
- theory = res.string(Base.theory_name),
- name = res.string(Base.name))
- })
- }
-
- def compound_name(a: String, b: String): String =
- if (a.isEmpty) b else a + ":" + b
-
- sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") {
- val compound_name: String = Export.compound_name(theory, name)
-
- def make_path(prune: Int = 0): Path = {
- val elems = theory :: space_explode('/', name)
- if (elems.length < prune + 1) {
- error("Cannot prune path by " + prune + " element(s): " + Path.make(elems))
- }
- else Path.make(elems.drop(prune))
- }
- }
-
- def message(msg: String, theory_name: String, name: String): String =
- msg + " " + quote(name) + " for theory " + quote(theory_name)
-
- object Entry {
- def apply(
- entry_name: Entry_Name,
- executable: Boolean,
- body: Future[(Boolean, Bytes)],
- cache: XML.Cache
- ): Entry = new Entry(entry_name, executable, body, cache)
-
- def empty(theory_name: String, name: String): Entry =
- Entry(Entry_Name(theory = theory_name, name = name),
- false, Future.value(false, Bytes.empty), XML.Cache.none)
-
- def make(
- session_name: String,
- args: Protocol.Export.Args,
- bytes: Bytes,
- cache: XML.Cache
- ): Entry = {
- val body =
- if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.compress))
- else Future.value((false, bytes))
- val entry_name =
- Entry_Name(session = session_name, theory = args.theory_name, name = args.name)
- Entry(entry_name, args.executable, body, cache)
- }
- }
-
- final class Entry private(
- val entry_name: Entry_Name,
- val executable: Boolean,
- val body: Future[(Boolean, Bytes)],
- val cache: XML.Cache
- ) {
- def session_name: String = entry_name.session
- def theory_name: String = entry_name.theory
- def name: String = entry_name.name
- override def toString: String = name
-
- def is_finished: Boolean = body.is_finished
- def cancel(): Unit = body.cancel()
-
- def compound_name: String = entry_name.compound_name
-
- def name_has_prefix(s: String): Boolean = name.startsWith(s)
- val name_elems: List[String] = explode_name(name)
-
- def name_extends(elems: List[String]): Boolean =
- name_elems.startsWith(elems) && name_elems != elems
-
- def bytes: Bytes = {
- val (compressed, bs) = body.join
- if (compressed) bs.uncompress(cache = cache.compress) else bs
- }
-
- def text: String = bytes.text
-
- def yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(bytes), cache = cache)
- }
-
- def make_regex(pattern: String): Regex = {
- @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex =
- chs match {
- case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest)
- case '*' :: rest => make("[^:/]*" :: result, depth, rest)
- case '?' :: rest => make("[^:/]" :: result, depth, rest)
- case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest)
- case '{' :: rest => make("(" :: result, depth + 1, rest)
- case ',' :: rest if depth > 0 => make("|" :: result, depth, rest)
- case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest)
- case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest)
- case c :: rest => make(c.toString :: result, depth, rest)
- case Nil => result.reverse.mkString.r
- }
- make(Nil, 0, pattern.toList)
- }
-
- def make_matcher(pats: List[String]): Entry_Name => Boolean = {
- val regs = pats.map(make_regex)
- (entry_name: Entry_Name) => regs.exists(_.matches(entry_name.compound_name))
- }
-
- def clean_session(db: SQL.Database, session_name: String): Unit =
- private_data.transaction_lock(db, create = true, label = "Export.clean_session") {
- private_data.clean_session(db, session_name)
- }
-
- def read_theory_names(db: SQL.Database, session_name: String): List[String] =
- private_data.transaction_lock(db, label = "Export.read_theory_names") {
- private_data.read_theory_names(db, session_name)
- }
-
- def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] =
- private_data.transaction_lock(db, label = "Export.read_entry_names") {
- private_data.read_entry_names(db, session_name)
- }
-
- def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] =
- private_data.transaction_lock(db, label = "Export.read_entry") {
- private_data.read_entry(db, entry_name, cache)
- }
-
-
- /* database consumer thread */
-
- def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer =
- new Consumer(db, cache, progress)
-
- class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) {
- private val errors = Synchronized[List[String]](Nil)
-
- private def consume(args: List[(Entry, Boolean)]): List[Exn.Result[Unit]] = {
- for ((entry, _) <- args) {
- if (progress.stopped) entry.cancel() else entry.body.join
- }
- private_data.transaction_lock(db, label = "Export.consumer(" + args.length + ")") {
- var known = private_data.known_entries(db, args.map(p => p._1.entry_name))
- val buffer = new mutable.ListBuffer[Option[Entry]]
-
- for ((entry, strict) <- args) {
- if (progress.stopped || known(entry.entry_name)) {
- buffer += None
- if (strict && known(entry.entry_name)) {
- val msg = message("Duplicate export", entry.theory_name, entry.name)
- errors.change(msg :: _)
- }
- }
- else {
- buffer += Some(entry)
- known += entry.entry_name
- }
- }
-
- val entries = buffer.toList
- try {
- private_data.write_entries(db, entries.flatten)
- val ok = Exn.Res[Unit](())
- entries.map(_ => ok)
- }
- catch {
- case exn: Throwable =>
- val err = Exn.Exn[Unit](exn)
- entries.map(_ => err)
- }
- }
- }
-
- private val consumer =
- Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")(
- bulk = { case (entry, _) => entry.is_finished },
- consume = args => (args.grouped(20).toList.flatMap(consume), true))
-
- def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = {
- if (!progress.stopped && !body.is_empty) {
- consumer.send(Entry.make(session_name, args, body, cache) -> args.strict)
- }
- }
-
- def shutdown(close: Boolean = false): List[String] = {
- consumer.shutdown()
- if (close) db.close()
- errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil)
- }
- }
-
-
- /* context for database access */
-
- def open_database_context(store: Store, server: SSH.Server = SSH.no_server): Database_Context =
- new Database_Context(store, store.maybe_open_database_server(server = server))
-
- def open_session_context0(
- store: Store,
- session: String,
- server: SSH.Server = SSH.no_server
- ): Session_Context = {
- open_database_context(store, server = server)
- .open_session0(session, close_database_context = true)
- }
-
- def open_session_context(
- store: Store,
- session_background: Sessions.Background,
- document_snapshot: Option[Document.Snapshot] = None,
- server: SSH.Server = SSH.no_server
- ): Session_Context = {
- open_database_context(store, server = server).open_session(
- session_background, document_snapshot = document_snapshot, close_database_context = true)
- }
-
- class Database_Context private[Export](
- val store: Store,
- val database_server: Option[SQL.Database]
- ) extends AutoCloseable {
- database_context =>
-
- override def toString: String = {
- val s =
- database_server match {
- case Some(db) => db.toString
- case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ")
- }
- "Database_Context(" + s + ")"
- }
-
- def cache: Term.Cache = store.cache
-
- def close(): Unit = database_server.foreach(_.close())
-
- def open_database(session: String, output: Boolean = false): Session_Database =
- database_server match {
- case Some(db) => new Session_Database(session, db)
- case None =>
- new Session_Database(session, store.open_database(session, output = output)) {
- override def close(): Unit = db.close()
- }
- }
-
- def open_session0(session: String, close_database_context: Boolean = false): Session_Context =
- open_session(Sessions.background0(session), close_database_context = close_database_context)
-
- def open_session(
- session_background: Sessions.Background,
- document_snapshot: Option[Document.Snapshot] = None,
- close_database_context: Boolean = false
- ): Session_Context = {
- val session_name = session_background.check_errors.session_name
- val session_hierarchy = session_background.sessions_structure.build_hierarchy(session_name)
- val session_databases =
- database_server match {
- case Some(db) => session_hierarchy.map(name => new Session_Database(name, db))
- case None =>
- val attempts =
- for (name <- session_hierarchy)
- yield name -> store.try_open_database(name, server_mode = false)
- attempts.collectFirst({ case (name, None) => name }) match {
- case Some(bad) =>
- for (case (_, Some(db)) <- attempts) db.close()
- store.error_database(bad)
- case None =>
- for (case (name, Some(db)) <- attempts) yield {
- new Session_Database(name, db) { override def close(): Unit = this.db.close() }
- }
- }
- }
- new Session_Context(
- database_context, session_background, session_databases, document_snapshot) {
- override def close(): Unit = {
- session_databases.foreach(_.close())
- if (close_database_context) database_context.close()
- }
- }
- }
- }
-
- class Session_Database private[Export](val session: String, val db: SQL.Database)
- extends AutoCloseable {
- def close(): Unit = ()
-
- lazy private [Export] val theory_names: List[String] = read_theory_names(db, session)
- lazy private [Export] val entry_names: List[Entry_Name] = read_entry_names(db, session)
- }
-
- class Session_Context private[Export](
- val database_context: Database_Context,
- session_background: Sessions.Background,
- db_hierarchy: List[Session_Database],
- val document_snapshot: Option[Document.Snapshot]
- ) extends AutoCloseable {
- session_context =>
-
- def close(): Unit = ()
-
- def cache: Term.Cache = database_context.cache
-
- def sessions_structure: Sessions.Structure = session_background.sessions_structure
-
- def session_base: Sessions.Base = session_background.base
-
- def session_name: String =
- if (document_snapshot.isDefined) Sessions.DRAFT
- else session_base.session_name
-
- def session_database(session: String = session_name): Option[Session_Database] =
- db_hierarchy.find(_.session == session)
-
- def session_db(session: String = session_name): Option[SQL.Database] =
- session_database(session = session).map(_.db)
-
- def session_stack: List[String] =
- ((if (document_snapshot.isDefined) List(session_name) else Nil) :::
- db_hierarchy.map(_.session)).reverse
-
- private def select[A](
- session: String,
- select: Session_Database => List[A],
- project: Entry_Name => A,
- sort_key: A => String
- ): List[A] = {
- def result(name: String): List[A] =
- if (name == Sessions.DRAFT) {
- (for {
- snapshot <- document_snapshot.iterator
- entry_name <- snapshot.all_exports.keysIterator
- } yield project(entry_name)).toSet.toList.sortBy(sort_key)
- }
- else session_database(name).map(select).getOrElse(Nil)
-
- if (session.nonEmpty) result(session) else session_stack.flatMap(result)
- }
-
- def entry_names(session: String = session_name): List[Entry_Name] =
- select(session, _.entry_names, identity, _.compound_name)
-
- def theory_names(session: String = session_name): List[String] =
- select(session, _.theory_names, _.theory, identity)
-
- def get(theory: String, name: String): Option[Entry] =
- {
- def snapshot_entry: Option[Entry] =
- for {
- snapshot <- document_snapshot
- entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name)
- entry <- snapshot.all_exports.get(entry_name)
- } yield entry
- def db_entry: Option[Entry] =
- db_hierarchy.view.map { database =>
- val entry_name = Export.Entry_Name(session = database.session, theory = theory, name = name)
- read_entry(database.db, entry_name, cache)
- }.collectFirst({ case Some(entry) => entry })
-
- snapshot_entry orElse db_entry
- }
-
- def apply(theory: String, name: String, permissive: Boolean = false): Entry =
- get(theory, name) match {
- case None if permissive => Entry.empty(theory, name)
- case None => error("Missing export entry " + quote(compound_name(theory, name)))
- case Some(entry) => entry
- }
-
- def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context =
- new Theory_Context(session_context, theory, other_cache)
-
- def get_source_file(name: String): Option[Store.Source_File] = {
- val store = database_context.store
- (for {
- database <- db_hierarchy.iterator
- file <- store.read_sources(database.db, database.session, name = name).iterator
- } yield file).nextOption()
- }
-
- def source_file(name: String): Store.Source_File =
- get_source_file(name).getOrElse(error("Missing session source file " + quote(name)))
-
- def theory_source(theory: String, unicode_symbols: Boolean = false): String = {
- def snapshot_source: Option[String] =
- for {
- snapshot <- document_snapshot
- text <- snapshot.version.nodes.iterator.collectFirst(
- { case (name, node) if name.theory == theory => node.source })
- if text.nonEmpty
- } yield Symbol.output(unicode_symbols, text)
-
- def db_source: Option[String] = {
- val theory_context = session_context.theory(theory)
- for {
- name <- theory_context.files0(permissive = true).headOption
- file <- get_source_file(name)
- } yield Symbol.output(unicode_symbols, file.bytes.text)
- }
-
- snapshot_source orElse db_source getOrElse ""
- }
-
- def classpath(): List[File.Content] = {
- (for {
- session <- session_stack.iterator
- info <- sessions_structure.get(session).iterator
- if info.export_classpath.nonEmpty
- matcher = make_matcher(info.export_classpath)
- entry_name <- entry_names(session = session).iterator
- if matcher(entry_name)
- entry <- get(entry_name.theory, entry_name.name).iterator
- } yield File.content(entry.entry_name.make_path(), entry.bytes)).toList
- }
-
- override def toString: String =
- "Export.Session_Context(" + commas_quote(session_stack) + ")"
- }
-
- class Theory_Context private[Export](
- val session_context: Session_Context,
- val theory: String,
- other_cache: Option[Term.Cache]
- ) {
- def cache: Term.Cache = other_cache getOrElse session_context.cache
-
- def get(name: String): Option[Entry] = session_context.get(theory, name)
- def apply(name: String, permissive: Boolean = false): Entry =
- session_context.apply(theory, name, permissive = permissive)
-
- def yxml(name: String): XML.Body =
- get(name) match {
- case Some(entry) => entry.yxml
- case None => Nil
- }
-
- def document_id(): Option[Long] =
- apply(DOCUMENT_ID, permissive = true).text match {
- case Value.Long(id) => Some(id)
- case _ => None
- }
-
- def files0(permissive: Boolean = false): List[String] =
- split_lines(apply(FILES, permissive = permissive).text)
-
- def files(permissive: Boolean = false): Option[(String, List[String])] =
- files0(permissive = permissive) match {
- case Nil => None
- case a :: bs => Some((a, bs))
- }
-
- override def toString: String = "Export.Theory_Context(" + quote(theory) + ")"
- }
-
-
- /* export to file-system */
-
- def export_files(
- store: Store,
- session_name: String,
- export_dir: Path,
- progress: Progress = new Progress,
- export_prune: Int = 0,
- export_list: Boolean = false,
- export_patterns: List[String] = Nil
- ): Unit = {
- using(store.open_database(session_name)) { db =>
- val entry_names = read_entry_names(db, session_name)
-
- // list
- if (export_list) {
- for (entry_name <- entry_names) progress.echo(entry_name.compound_name)
- }
-
- // export
- if (export_patterns.nonEmpty) {
- val matcher = make_matcher(export_patterns)
- for {
- entry_name <- entry_names if matcher(entry_name)
- entry <- read_entry(db, entry_name, store.cache)
- } {
- val path = export_dir + entry_name.make_path(prune = export_prune)
- progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
- Isabelle_System.make_directory(path.dir)
- val bytes = entry.bytes
- if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes)
- File.set_executable(path, reset = !entry.executable)
- }
- }
- }
- }
-
-
- /* Isabelle tool wrapper */
-
- val default_export_dir: Path = Path.explode("export")
-
- val isabelle_tool =
- Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here,
- { args =>
- /* arguments */
-
- var export_dir = default_export_dir
- var dirs: List[Path] = Nil
- var export_list = false
- var no_build = false
- var options = Options.init()
- var export_prune = 0
- var export_patterns: List[String] = Nil
-
- val getopts = Getopts("""
-Usage: isabelle export [OPTIONS] SESSION
-
- Options are:
- -O DIR output directory for exported files (default: """ + default_export_dir + """)
- -d DIR include session directory
- -l list exports
- -n no build of session
- -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
- -p NUM prune path of exported files by NUM elements
- -x PATTERN extract files matching pattern (e.g. "*:**" for all)
-
- List or export theory exports for SESSION: named blobs produced by
- isabelle build. Option -l or -x is required; option -x may be repeated.
-
- The PATTERN language resembles glob patterns in the shell, with ? and *
- (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc],
- and variants {pattern1,pattern2,pattern3}.
-""",
- "O:" -> (arg => export_dir = Path.explode(arg)),
- "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
- "l" -> (_ => export_list = true),
- "n" -> (_ => no_build = true),
- "o:" -> (arg => options = options + arg),
- "p:" -> (arg => export_prune = Value.Int.parse(arg)),
- "x:" -> (arg => export_patterns ::= arg))
-
- val more_args = getopts(args)
- val session_name =
- more_args match {
- case List(session_name) if export_list || export_patterns.nonEmpty => session_name
- case _ => getopts.usage()
- }
-
- val progress = new Console_Progress()
-
-
- /* build */
-
- if (!no_build) {
- val rc =
- progress.interrupt_handler {
- Build.build_logic(options, session_name, progress = progress, dirs = dirs)
- }
- if (rc != Process_Result.RC.ok) sys.exit(rc)
- }
-
-
- /* export files */
-
- val store = Store(options)
- export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune,
- export_list = export_list, export_patterns = export_patterns)
- })
-}
--- a/src/Pure/Thy/export_theory.ML Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,479 +0,0 @@
-(* Title: Pure/Thy/export_theory.ML
- Author: Makarius
-
-Export foundational theory content and locale/class structure.
-*)
-
-signature EXPORT_THEORY =
-sig
- val other_name_space: (theory -> Name_Space.T) -> theory -> theory
- val export_enabled: Thy_Info.presentation_context -> bool
- val export_body: theory -> string -> XML.body -> unit
-end;
-
-structure Export_Theory: EXPORT_THEORY =
-struct
-
-(* other name spaces *)
-
-fun err_dup_kind kind = error ("Duplicate name space kind " ^ quote kind);
-
-structure Data = Theory_Data
-(
- type T = (theory -> Name_Space.T) Inttab.table;
- val empty = Inttab.empty;
- val merge = Inttab.merge (K true);
-);
-
-val other_name_spaces = map #2 o Inttab.dest o Data.get;
-fun other_name_space get_space thy = Data.map (Inttab.update (serial (), get_space)) thy;
-
-val _ = Theory.setup
- (other_name_space Thm.oracle_space #>
- other_name_space Global_Theory.fact_space #>
- other_name_space (Bundle.bundle_space o Context.Theory) #>
- other_name_space (Attrib.attribute_space o Context.Theory) #>
- other_name_space (Method.method_space o Context.Theory));
-
-
-(* approximative syntax *)
-
-val get_syntax = Syntax.get_approx o Proof_Context.syn_of;
-fun get_syntax_type ctxt = get_syntax ctxt o Lexicon.mark_type;
-fun get_syntax_const ctxt = get_syntax ctxt o Lexicon.mark_const;
-fun get_syntax_fixed ctxt = get_syntax ctxt o Lexicon.mark_fixed;
-
-fun get_syntax_param ctxt loc x =
- let val thy = Proof_Context.theory_of ctxt in
- if Class.is_class thy loc then
- (case AList.lookup (op =) (Class.these_params thy [loc]) x of
- NONE => NONE
- | SOME (_, (c, _)) => get_syntax_const ctxt c)
- else get_syntax_fixed ctxt x
- end;
-
-val encode_syntax =
- XML.Encode.variant
- [fn NONE => ([], []),
- fn SOME (Syntax.Prefix delim) => ([delim], []),
- fn SOME (Syntax.Infix {assoc, delim, pri}) =>
- let
- val ass =
- (case assoc of
- Printer.No_Assoc => 0
- | Printer.Left_Assoc => 1
- | Printer.Right_Assoc => 2);
- open XML.Encode Term_XML.Encode;
- in ([], triple int string int (ass, delim, pri)) end];
-
-
-(* free variables: not declared in the context *)
-
-val is_free = not oo Name.is_declared;
-
-fun add_frees used =
- fold_aterms (fn Free (x, T) => is_free used x ? insert (op =) (x, T) | _ => I);
-
-fun add_tfrees used =
- (fold_types o fold_atyps) (fn TFree (a, S) => is_free used a ? insert (op =) (a, S) | _ => I);
-
-
-(* locales *)
-
-fun locale_content thy loc =
- let
- val ctxt = Locale.init loc thy;
- val args =
- Locale.params_of thy loc
- |> map (fn ((x, T), _) => ((x, T), get_syntax_param ctxt loc x));
- val axioms =
- let
- val (asm, defs) = Locale.specification_of thy loc;
- val cprops = map (Thm.cterm_of ctxt) (the_list asm @ defs);
- val (intro1, intro2) = Locale.intros_of thy loc;
- val intros_tac = Method.try_intros_tac ctxt (the_list intro1 @ the_list intro2) [];
- val res =
- Goal.init (Conjunction.mk_conjunction_balanced cprops)
- |> (ALLGOALS Goal.conjunction_tac THEN intros_tac)
- |> try Seq.hd;
- in
- (case res of
- SOME goal => Thm.prems_of goal
- | NONE => raise Fail ("Cannot unfold locale " ^ quote loc))
- end;
- val typargs = build_rev (fold Term.add_tfrees (map (Free o #1) args @ axioms));
- in {typargs = typargs, args = args, axioms = axioms} end;
-
-fun get_locales thy =
- Locale.get_locales thy |> map_filter (fn loc =>
- if Experiment.is_experiment thy loc then NONE else SOME (loc, ()));
-
-fun get_dependencies prev_thys thy =
- Locale.dest_dependencies prev_thys thy |> map_filter (fn dep =>
- if Experiment.is_experiment thy (#source dep) orelse
- Experiment.is_experiment thy (#target dep) then NONE
- else
- let
- val (type_params, params) = Locale.parameters_of thy (#source dep);
- val typargs = fold (Term.add_tfreesT o #2 o #1) params type_params;
- val substT =
- typargs |> map_filter (fn v =>
- let
- val T = TFree v;
- val T' = Morphism.typ (#morphism dep) T;
- in if T = T' then NONE else SOME (v, T') end);
- val subst =
- params |> map_filter (fn (v, _) =>
- let
- val t = Free v;
- val t' = Morphism.term (#morphism dep) t;
- in if t aconv t' then NONE else SOME (v, t') end);
- in SOME (dep, (substT, subst)) end);
-
-
-(* presentation *)
-
-fun export_enabled (context: Thy_Info.presentation_context) =
- Options.bool (#options context) "export_theory";
-
-fun export_body thy name body =
- if XML.is_empty_body body then ()
- else Export.export thy (Path.binding0 (Path.make ("theory" :: space_explode "/" name))) body;
-
-val _ = (Theory.setup o Thy_Info.add_presentation) (fn context => fn thy =>
- let
- val rep_tsig = Type.rep_tsig (Sign.tsig_of thy);
- val consts = Sign.consts_of thy;
- val thy_ctxt = Proof_Context.init_global thy;
-
- val pos_properties = Thy_Info.adjust_pos_properties context;
-
- val enabled = export_enabled context;
-
-
- (* strict parents *)
-
- val parents = Theory.parents_of thy;
- val _ =
- Export.export thy \<^path_binding>\<open>theory/parents\<close>
- (XML.Encode.string (cat_lines (map Context.theory_long_name parents) ^ "\n"));
-
-
- (* spec rules *)
-
- fun spec_rule_content {pos, name, rough_classification, terms, rules} =
- let
- val spec =
- terms @ map Thm.plain_prop_of rules
- |> Term_Subst.zero_var_indexes_list
- |> map Logic.unvarify_global;
- in
- {props = pos_properties pos,
- name = name,
- rough_classification = rough_classification,
- typargs = build_rev (fold Term.add_tfrees spec),
- args = build_rev (fold Term.add_frees spec),
- terms = map (fn t => (t, Term.type_of t)) (take (length terms) spec),
- rules = drop (length terms) spec}
- end;
-
-
- (* entities *)
-
- fun make_entity_markup name xname pos serial =
- let val props = pos_properties pos @ Markup.serial_properties serial;
- in (Markup.entityN, (Markup.nameN, name) :: (Markup.xnameN, xname) :: props) end;
-
- fun entity_markup space name =
- let
- val xname = Name_Space.extern_shortest thy_ctxt space name;
- val {serial, pos, ...} = Name_Space.the_entry space name;
- in make_entity_markup name xname pos serial end;
-
- fun export_entities export_name get_space decls export =
- let
- val parent_spaces = map get_space parents;
- val space = get_space thy;
- in
- build (decls |> fold (fn (name, decl) =>
- if exists (fn space => Name_Space.declared space name) parent_spaces then I
- else
- (case export name decl of
- NONE => I
- | SOME make_body =>
- let
- val i = #serial (Name_Space.the_entry space name);
- val body = if enabled then make_body () else [];
- in cons (i, XML.Elem (entity_markup space name, body)) end)))
- |> sort (int_ord o apply2 #1) |> map #2
- |> export_body thy export_name
- end;
-
-
- (* types *)
-
- val encode_type =
- let open XML.Encode Term_XML.Encode
- in triple encode_syntax (list string) (option typ) end;
-
- val _ =
- export_entities "types" Sign.type_space (Name_Space.dest_table (#types rep_tsig))
- (fn c =>
- (fn Type.Logical_Type n =>
- SOME (fn () =>
- encode_type (get_syntax_type thy_ctxt c, Name.invent Name.context Name.aT n, NONE))
- | Type.Abbreviation (args, U, false) =>
- SOME (fn () =>
- encode_type (get_syntax_type thy_ctxt c, args, SOME U))
- | _ => NONE));
-
-
- (* consts *)
-
- val encode_term = Term_XML.Encode.term consts;
-
- val encode_const =
- let open XML.Encode Term_XML.Encode
- in pair encode_syntax (pair (list string) (pair typ (pair (option encode_term) bool))) end;
-
- val _ =
- export_entities "consts" Sign.const_space (#constants (Consts.dest consts))
- (fn c => fn (T, abbrev) =>
- SOME (fn () =>
- let
- val syntax = get_syntax_const thy_ctxt c;
- val U = Logic.unvarifyT_global T;
- val U0 = Term.strip_sortsT U;
- val trim_abbrev = Proofterm.standard_vars_term Name.context #> Term.strip_sorts;
- val abbrev' = Option.map trim_abbrev abbrev;
- val args = map (#1 o dest_TFree) (Consts.typargs consts (c, U0));
- val propositional = Object_Logic.is_propositional thy_ctxt (Term.body_type U0);
- in encode_const (syntax, (args, (U0, (abbrev', propositional)))) end));
-
-
- (* axioms *)
-
- fun standard_prop used extra_shyps raw_prop raw_proof =
- let
- val (prop, proof) = Proofterm.standard_vars used (raw_prop, raw_proof);
- val args = rev (add_frees used prop []);
- val typargs = rev (add_tfrees used prop []);
- val used_typargs = fold (Name.declare o #1) typargs used;
- val sorts = Name.invent used_typargs Name.aT (length extra_shyps) ~~ extra_shyps;
- in ((sorts @ typargs, args, prop), proof) end;
-
- fun standard_prop_of thm =
- standard_prop Name.context (Thm.extra_shyps thm) (Thm.full_prop_of thm);
-
- val encode_prop =
- let open XML.Encode Term_XML.Encode
- in triple (list (pair string sort)) (list (pair string typ)) encode_term end;
-
- fun encode_axiom used prop =
- encode_prop (#1 (standard_prop used [] prop NONE));
-
- val _ =
- export_entities "axioms" Theory.axiom_space (Theory.all_axioms_of thy)
- (fn _ => fn prop => SOME (fn () => encode_axiom Name.context prop));
-
-
- (* theorems and proof terms *)
-
- val clean_thm = Thm.check_hyps (Context.Theory thy) #> Thm.strip_shyps;
- val prep_thm = clean_thm #> Thm.unconstrainT #> Thm.strip_shyps;
-
- val lookup_thm_id = Global_Theory.lookup_thm_id thy;
-
- fun expand_name thm_id (header: Proofterm.thm_header) =
- if #serial header = #serial thm_id then ""
- else
- (case lookup_thm_id (Proofterm.thm_header_id header) of
- NONE => ""
- | SOME thm_name => Thm_Name.print thm_name);
-
- fun entity_markup_thm (serial, (name, i)) =
- let
- val space = Global_Theory.fact_space thy;
- val xname = Name_Space.extern_shortest thy_ctxt space name;
- val {pos, ...} = Name_Space.the_entry space name;
- in make_entity_markup (Thm_Name.print (name, i)) (Thm_Name.print (xname, i)) pos serial end;
-
- fun encode_thm thm_id raw_thm =
- let
- val deps = map (Thm_Name.print o #2) (Thm_Deps.thm_deps thy [raw_thm]);
- val thm = prep_thm raw_thm;
-
- val proof0 =
- if Proofterm.export_standard_enabled () then
- Proof_Syntax.standard_proof_of
- {full = true, expand_name = SOME o expand_name thm_id} thm
- else if Proofterm.export_enabled () then Thm.reconstruct_proof_of thm
- else MinProof;
- val (prop, SOME proof) = standard_prop_of thm (SOME proof0);
- val _ = Thm.expose_proofs thy [thm];
- in
- (prop, deps, proof) |>
- let
- open XML.Encode Term_XML.Encode;
- val encode_proof = Proofterm.encode_standard_proof consts;
- in triple encode_prop (list string) encode_proof end
- end;
-
- fun export_thm (thm_id, thm_name) =
- let
- val markup = entity_markup_thm (#serial thm_id, thm_name);
- val body =
- if enabled then
- Global_Theory.get_thm_name thy (thm_name, Position.none)
- |> encode_thm thm_id
- else [];
- in XML.Elem (markup, body) end;
-
- val _ = export_body thy "thms" (map export_thm (Global_Theory.dest_thm_names thy));
-
-
- (* type classes *)
-
- val encode_class =
- let open XML.Encode Term_XML.Encode
- in pair (list (pair string typ)) (list (encode_axiom Name.context)) end;
-
- val _ =
- export_entities "classes" Sign.class_space
- (map (rpair ()) (Graph.keys (Sorts.classes_of (#2 (#classes rep_tsig)))))
- (fn name => fn () => SOME (fn () =>
- (case try (Axclass.get_info thy) name of
- NONE => ([], [])
- | SOME {params, axioms, ...} => (params, map (Thm.plain_prop_of o clean_thm) axioms))
- |> encode_class));
-
-
- (* sort algebra *)
-
- val _ =
- if enabled then
- let
- val prop = encode_axiom Name.context o Logic.varify_global;
-
- val encode_classrel =
- let open XML.Encode
- in list (pair prop (pair string string)) end;
-
- val encode_arities =
- let open XML.Encode Term_XML.Encode
- in list (pair prop (triple string (list sort) string)) end;
-
- val export_classrel =
- maps (fn (c, cs) => map (pair c) cs) #> map (`Logic.mk_classrel) #> encode_classrel;
-
- val export_arities = map (`Logic.mk_arity) #> encode_arities;
-
- val {classrel, arities} =
- Sorts.dest_algebra (map (#2 o #classes o Type.rep_tsig o Sign.tsig_of) parents)
- (#2 (#classes rep_tsig));
- in
- if null classrel then () else export_body thy "classrel" (export_classrel classrel);
- if null arities then () else export_body thy "arities" (export_arities arities)
- end
- else ();
-
-
- (* locales *)
-
- fun encode_locale used =
- let open XML.Encode Term_XML.Encode in
- triple (list (pair string sort)) (list (pair (pair string typ) encode_syntax))
- (list (encode_axiom used))
- end;
-
- val _ =
- export_entities "locales" Locale.locale_space (get_locales thy)
- (fn loc => fn () => SOME (fn () =>
- let
- val {typargs, args, axioms} = locale_content thy loc;
- val used = fold Name.declare (map #1 typargs @ map (#1 o #1) args) Name.context;
- in encode_locale used (typargs, args, axioms) end
- handle ERROR msg =>
- cat_error msg ("The error(s) above occurred in locale " ^
- quote (Locale.markup_name thy_ctxt loc))));
-
-
- (* locale dependencies *)
-
- fun encode_locale_dependency (dep: Locale.locale_dependency, subst) =
- (#source dep, (#target dep, (#prefix dep, subst))) |>
- let
- open XML.Encode Term_XML.Encode;
- val encode_subst =
- pair (list (pair (pair string sort) typ)) (list (pair (pair string typ) (term consts)));
- in pair string (pair string (pair (list (pair string bool)) encode_subst)) end;
-
- val _ =
- if enabled then
- get_dependencies parents thy |> map_index (fn (i, dep) =>
- let
- val xname = string_of_int (i + 1);
- val name = Long_Name.implode [Context.theory_base_name thy, xname];
- val markup = make_entity_markup name xname (#pos (#1 dep)) (#serial (#1 dep));
- val body = encode_locale_dependency dep;
- in XML.Elem (markup, body) end)
- |> export_body thy "locale_dependencies"
- else ();
-
-
- (* constdefs *)
-
- val _ =
- if enabled then
- let
- val constdefs =
- Defs.dest_constdefs (map Theory.defs_of (Theory.parents_of thy)) (Theory.defs_of thy)
- |> sort_by #1;
- val encode =
- let open XML.Encode
- in list (pair string string) end;
- in if null constdefs then () else export_body thy "constdefs" (encode constdefs) end
- else ();
-
-
- (* spec rules *)
-
- val encode_specs =
- let open XML.Encode Term_XML.Encode in
- list (fn {props, name, rough_classification, typargs, args, terms, rules} =>
- pair properties (pair string (pair Spec_Rules.encode_rough_classification
- (pair (list (pair string sort)) (pair (list (pair string typ))
- (pair (list (pair encode_term typ)) (list encode_term))))))
- (props, (name, (rough_classification, (typargs, (args, (terms, rules)))))))
- end;
-
- val _ =
- if enabled then
- (case Spec_Rules.dest_theory thy of
- [] => ()
- | spec_rules =>
- export_body thy "spec_rules" (encode_specs (map spec_rule_content spec_rules)))
- else ();
-
-
- (* other entities *)
-
- fun export_other get_space =
- let
- val space = get_space thy;
- val export_name = "other/" ^ Name_Space.kind_of space;
- val decls = Name_Space.get_names space |> map (rpair ());
- in export_entities export_name get_space decls (fn _ => fn () => SOME (K [])) end;
-
- val other_spaces = other_name_spaces thy;
- val other_kinds = map (fn get_space => Name_Space.kind_of (get_space thy)) other_spaces;
- val _ =
- if null other_kinds then ()
- else
- Export.export thy \<^path_binding>\<open>theory/other_kinds\<close>
- (XML.Encode.string (cat_lines other_kinds));
- val _ = List.app export_other other_spaces;
-
- in () end);
-
-end;
--- a/src/Pure/Thy/export_theory.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,763 +0,0 @@
-/* Title: Pure/Thy/export_theory.scala
- Author: Makarius
-
-Export foundational theory content.
-*/
-
-package isabelle
-
-
-import scala.collection.immutable.SortedMap
-
-
-object Export_Theory {
- /** session content **/
-
- sealed case class Session(name: String, theory_graph: Graph[String, Option[Theory]]) {
- override def toString: String = name
-
- def theory(theory_name: String): Option[Theory] =
- if (theory_graph.defined(theory_name)) theory_graph.get_node(theory_name)
- else None
-
- def theories: List[Theory] =
- theory_graph.topological_order.flatMap(theory)
- }
-
- def read_session(
- session_context: Export.Session_Context,
- session_stack: Boolean = false,
- progress: Progress = new Progress
- ): Session = {
- val thys =
- for (theory <- theory_names(session_context, session_stack = session_stack)) yield {
- progress.echo("Reading theory " + theory)
- read_theory(session_context.theory(theory))
- }
-
- val graph0 =
- thys.foldLeft(Graph.string[Option[Theory]]) {
- case (g, thy) => g.default_node(thy.name, Some(thy))
- }
- val graph1 =
- thys.foldLeft(graph0) {
- case (g0, thy) =>
- thy.parents.foldLeft(g0) {
- case (g1, parent) => g1.default_node(parent, None).add_edge_acyclic(parent, thy.name)
- }
- }
-
- Session(session_context.session_name, graph1)
- }
-
-
-
- /** theory content **/
-
- sealed case class Theory(name: String, parents: List[String],
- types: List[Entity[Type]],
- consts: List[Entity[Const]],
- axioms: List[Entity[Axiom]],
- thms: List[Entity[Thm]],
- classes: List[Entity[Class]],
- locales: List[Entity[Locale]],
- locale_dependencies: List[Entity[Locale_Dependency]],
- classrel: List[Classrel],
- arities: List[Arity],
- constdefs: List[Constdef],
- typedefs: List[Typedef],
- datatypes: List[Datatype],
- spec_rules: List[Spec_Rule],
- others: Map[String, List[Entity[Other]]]
- ) {
- override def toString: String = name
-
- def entity_iterator: Iterator[Entity0] =
- types.iterator.map(_.no_content) ++
- consts.iterator.map(_.no_content) ++
- axioms.iterator.map(_.no_content) ++
- thms.iterator.map(_.no_content) ++
- classes.iterator.map(_.no_content) ++
- locales.iterator.map(_.no_content) ++
- locale_dependencies.iterator.map(_.no_content) ++
- (for { (_, xs) <- others; x <- xs.iterator } yield x.no_content)
-
- def cache(cache: Term.Cache): Theory =
- Theory(cache.string(name),
- parents.map(cache.string),
- types.map(_.cache(cache)),
- consts.map(_.cache(cache)),
- axioms.map(_.cache(cache)),
- thms.map(_.cache(cache)),
- classes.map(_.cache(cache)),
- locales.map(_.cache(cache)),
- locale_dependencies.map(_.cache(cache)),
- classrel.map(_.cache(cache)),
- arities.map(_.cache(cache)),
- constdefs.map(_.cache(cache)),
- typedefs.map(_.cache(cache)),
- datatypes.map(_.cache(cache)),
- spec_rules.map(_.cache(cache)),
- (for ((k, xs) <- others.iterator) yield cache.string(k) -> xs.map(_.cache(cache))).toMap)
- }
-
- def read_theory_parents(theory_context: Export.Theory_Context): Option[List[String]] =
- theory_context.get(Export.THEORY_PREFIX + "parents")
- .map(entry => Library.trim_split_lines(entry.text))
-
- def theory_names(
- session_context: Export.Session_Context,
- session_stack: Boolean = false
- ): List[String] = {
- val session = if (session_stack) "" else session_context.session_name
- for {
- theory <- session_context.theory_names(session = session)
- if read_theory_parents(session_context.theory(theory)).isDefined
- } yield theory
- }
-
- def no_theory: Theory =
- Theory("", Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Nil, Map.empty)
-
- def read_theory(
- theory_context: Export.Theory_Context,
- permissive: Boolean = false
- ): Theory = {
- val cache = theory_context.cache
- val session_name = theory_context.session_context.session_name
- val theory_name = theory_context.theory
- read_theory_parents(theory_context) match {
- case None if permissive => no_theory
- case None =>
- error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name))
- case Some(parents) =>
- val theory =
- Theory(theory_name, parents,
- read_types(theory_context),
- read_consts(theory_context),
- read_axioms(theory_context),
- read_thms(theory_context),
- read_classes(theory_context),
- read_locales(theory_context),
- read_locale_dependencies(theory_context),
- read_classrel(theory_context),
- read_arities(theory_context),
- read_constdefs(theory_context),
- read_typedefs(theory_context),
- read_datatypes(theory_context),
- read_spec_rules(theory_context),
- read_others(theory_context))
- if (cache.no_cache) theory else theory.cache(cache)
- }
- }
-
-
- /* entities */
-
- object Kind {
- val TYPE = "type"
- val CONST = "const"
- val THM = "thm"
- val PROOF = "proof"
- val LOCALE_DEPENDENCY = "locale_dependency"
- val DOCUMENT_HEADING = "document_heading"
- val DOCUMENT_TEXT = "document_text"
- val PROOF_TEXT = "proof_text"
- }
-
- def export_kind(kind: String): String =
- if (kind == Markup.TYPE_NAME) Kind.TYPE
- else if (kind == Markup.CONSTANT) Kind.CONST
- else kind
-
- def export_kind_name(kind: String, name: String): String =
- name + "|" + export_kind(kind)
-
- abstract class Content[T] {
- def cache(cache: Term.Cache): T
- }
- sealed case class No_Content() extends Content[No_Content] {
- def cache(cache: Term.Cache): No_Content = this
- }
- sealed case class Entity[A <: Content[A]](
- kind: String,
- name: String,
- xname: String,
- pos: Position.T,
- id: Option[Long],
- serial: Long,
- content: Option[A]
- ) {
- val kname: String = export_kind_name(kind, name)
- val range: Symbol.Range = Position.Range.unapply(pos).getOrElse(Text.Range.offside)
- val file: String = Position.File.unapply(pos).getOrElse("")
-
- def export_kind: String = Export_Theory.export_kind(kind)
- override def toString: String = export_kind + " " + quote(name)
-
- def the_content: A =
- if (content.isDefined) content.get else error("No content for " + toString)
-
- def no_content: Entity0 = copy(content = None)
-
- def cache(cache: Term.Cache): Entity[A] =
- Entity(
- cache.string(kind),
- cache.string(name),
- cache.string(xname),
- cache.position(pos),
- id,
- serial,
- content.map(_.cache(cache)))
- }
- type Entity0 = Entity[No_Content]
-
- def read_entities[A <: Content[A]](
- theory_context: Export.Theory_Context,
- export_name: String,
- kind: String,
- decode: XML.Decode.T[A]
- ): List[Entity[A]] = {
- def decode_entity(tree: XML.Tree): Entity[A] = {
- def err(): Nothing = throw new XML.XML_Body(List(tree))
-
- tree match {
- case XML.Elem(Markup(Markup.ENTITY, props), body) =>
- val name = Markup.Name.unapply(props) getOrElse err()
- val xname = Markup.XName.unapply(props) getOrElse err()
- val pos = props.filter(p => Markup.position_property(p) && p._1 != Markup.ID)
- val id = Position.Id.unapply(props)
- val serial = Markup.Serial.unapply(props) getOrElse err()
- val content = if (body.isEmpty) None else Some(decode(body))
- Entity(kind, name, xname, pos, id, serial, content)
- case _ => err()
- }
- }
- theory_context.yxml(export_name).map(decode_entity)
- }
-
-
- /* approximative syntax */
-
- enum Assoc { case NO_ASSOC, LEFT_ASSOC, RIGHT_ASSOC }
-
- sealed abstract class Syntax
- case object No_Syntax extends Syntax
- case class Prefix(delim: String) extends Syntax
- case class Infix(assoc: Assoc, delim: String, pri: Int) extends Syntax
-
- def decode_syntax: XML.Decode.T[Syntax] =
- XML.Decode.variant(List(
- { case (Nil, Nil) => No_Syntax },
- { case (List(delim), Nil) => Prefix(delim) },
- { case (Nil, body) =>
- import XML.Decode._
- val (ass, delim, pri) = triple(int, string, int)(body)
- Infix(Assoc.fromOrdinal(ass), delim, pri) }))
-
-
- /* types */
-
- sealed case class Type(syntax: Syntax, args: List[String], abbrev: Option[Term.Typ])
- extends Content[Type] {
- override def cache(cache: Term.Cache): Type =
- Type(
- syntax,
- args.map(cache.string),
- abbrev.map(cache.typ))
- }
-
- def read_types(theory_context: Export.Theory_Context): List[Entity[Type]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "types", Markup.TYPE_NAME,
- { body =>
- import XML.Decode._
- val (syntax, args, abbrev) =
- triple(decode_syntax, list(string), option(Term_XML.Decode.typ))(body)
- Type(syntax, args, abbrev)
- })
-
-
- /* consts */
-
- sealed case class Const(
- syntax: Syntax,
- typargs: List[String],
- typ: Term.Typ,
- abbrev: Option[Term.Term],
- propositional: Boolean
- ) extends Content[Const] {
- override def cache(cache: Term.Cache): Const =
- Const(
- syntax,
- typargs.map(cache.string),
- cache.typ(typ),
- abbrev.map(cache.term),
- propositional)
- }
-
- def read_consts(theory_context: Export.Theory_Context): List[Entity[Const]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "consts", Markup.CONSTANT,
- { body =>
- import XML.Decode._
- val (syntax, (typargs, (typ, (abbrev, propositional)))) =
- pair(decode_syntax,
- pair(list(string),
- pair(Term_XML.Decode.typ,
- pair(option(Term_XML.Decode.term), bool))))(body)
- Const(syntax, typargs, typ, abbrev, propositional)
- })
-
-
- /* axioms */
-
- sealed case class Prop(
- typargs: List[(String, Term.Sort)],
- args: List[(String, Term.Typ)],
- term: Term.Term
- ) extends Content[Prop] {
- override def cache(cache: Term.Cache): Prop =
- Prop(
- typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
- args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
- cache.term(term))
- }
-
- def decode_prop(body: XML.Body): Prop = {
- val (typargs, args, t) = {
- import XML.Decode._
- import Term_XML.Decode._
- triple(list(pair(string, sort)), list(pair(string, typ)), term)(body)
- }
- Prop(typargs, args, t)
- }
-
- sealed case class Axiom(prop: Prop) extends Content[Axiom] {
- override def cache(cache: Term.Cache): Axiom = Axiom(prop.cache(cache))
- }
-
- def read_axioms(theory_context: Export.Theory_Context): List[Entity[Axiom]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "axioms", Markup.AXIOM,
- body => Axiom(decode_prop(body)))
-
-
- /* theorems */
-
- sealed case class Thm_Id(serial: Long, theory_name: String)
-
- sealed case class Thm(
- prop: Prop,
- deps: List[String],
- proof: Term.Proof)
- extends Content[Thm] {
- override def cache(cache: Term.Cache): Thm =
- Thm(
- prop.cache(cache),
- deps.map(cache.string),
- cache.proof(proof))
- }
-
- def read_thms(theory_context: Export.Theory_Context): List[Entity[Thm]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "thms", Kind.THM,
- { body =>
- import XML.Decode._
- import Term_XML.Decode._
- val (prop, deps, prf) = triple(decode_prop, list(string), proof)(body)
- Thm(prop, deps, prf)
- })
-
- sealed case class Proof(
- typargs: List[(String, Term.Sort)],
- args: List[(String, Term.Typ)],
- term: Term.Term,
- proof: Term.Proof
- ) {
- def prop: Prop = Prop(typargs, args, term)
-
- def cache(cache: Term.Cache): Proof =
- Proof(
- typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
- args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
- cache.term(term),
- cache.proof(proof))
- }
-
- def read_proof(
- session_context: Export.Session_Context,
- id: Thm_Id,
- other_cache: Option[Term.Cache] = None
- ): Option[Proof] = {
- val theory_context = session_context.theory(id.theory_name, other_cache = other_cache)
- val cache = theory_context.cache
-
- for { entry <- theory_context.get(Export.PROOFS_PREFIX + id.serial) }
- yield {
- val body = entry.yxml
- val (typargs, (args, (prop_body, proof_body))) = {
- import XML.Decode._
- import Term_XML.Decode._
- pair(list(pair(string, sort)), pair(list(pair(string, typ)), pair(x => x, x => x)))(body)
- }
- val env = args.toMap
- val prop = Term_XML.Decode.term_env(env)(prop_body)
- val proof = Term_XML.Decode.proof_env(env)(proof_body)
-
- val result = Proof(typargs, args, prop, proof)
- if (cache.no_cache) result else result.cache(cache)
- }
- }
-
- def read_proof_boxes(
- session_context: Export.Session_Context,
- proof: Term.Proof,
- suppress: Thm_Id => Boolean = _ => false,
- other_cache: Option[Term.Cache] = None
- ): List[(Thm_Id, Proof)] = {
- var seen = Set.empty[Long]
- var result = SortedMap.empty[Long, (Thm_Id, Proof)]
-
- def boxes(context: Option[(Long, Term.Proof)], prf: Term.Proof): Unit = {
- prf match {
- case Term.Abst(_, _, p) => boxes(context, p)
- case Term.AbsP(_, _, p) => boxes(context, p)
- case Term.Appt(p, _) => boxes(context, p)
- case Term.AppP(p, q) => boxes(context, p); boxes(context, q)
- case thm: Term.PThm if !seen(thm.serial) =>
- seen += thm.serial
- val id = Thm_Id(thm.serial, thm.theory_name)
- if (!suppress(id)) {
- Export_Theory.read_proof(session_context, id, other_cache = other_cache) match {
- case Some(p) =>
- result += (thm.serial -> (id -> p))
- boxes(Some((thm.serial, p.proof)), p.proof)
- case None =>
- error("Missing proof " + thm.serial + " (theory " + quote (thm.theory_name) + ")" +
- (context match {
- case None => ""
- case Some((i, p)) => " in proof " + i + ":\n" + p
- }))
- }
- }
- case _ =>
- }
- }
-
- boxes(None, proof)
- result.iterator.map(_._2).toList
- }
-
-
- /* type classes */
-
- sealed case class Class(params: List[(String, Term.Typ)], axioms: List[Prop])
- extends Content[Class] {
- override def cache(cache: Term.Cache): Class =
- Class(
- params.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
- axioms.map(_.cache(cache)))
- }
-
- def read_classes(theory_context: Export.Theory_Context): List[Entity[Class]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "classes", Markup.CLASS,
- { body =>
- import XML.Decode._
- import Term_XML.Decode._
- val (params, axioms) = pair(list(pair(string, typ)), list(decode_prop))(body)
- Class(params, axioms)
- })
-
-
- /* locales */
-
- sealed case class Locale(
- typargs: List[(String, Term.Sort)],
- args: List[((String, Term.Typ), Syntax)],
- axioms: List[Prop]
- ) extends Content[Locale] {
- override def cache(cache: Term.Cache): Locale =
- Locale(
- typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
- args.map({ case ((name, typ), syntax) => ((cache.string(name), cache.typ(typ)), syntax) }),
- axioms.map(_.cache(cache)))
- }
-
- def read_locales(theory_context: Export.Theory_Context): List[Entity[Locale]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "locales", Markup.LOCALE,
- { body =>
- import XML.Decode._
- import Term_XML.Decode._
- val (typargs, args, axioms) =
- triple(list(pair(string, sort)), list(pair(pair(string, typ), decode_syntax)),
- list(decode_prop))(body)
- Locale(typargs, args, axioms)
- })
-
-
- /* locale dependencies */
-
- sealed case class Locale_Dependency(
- source: String,
- target: String,
- prefix: List[(String, Boolean)],
- subst_types: List[((String, Term.Sort), Term.Typ)],
- subst_terms: List[((String, Term.Typ), Term.Term)]
- ) extends Content[Locale_Dependency] {
- override def cache(cache: Term.Cache): Locale_Dependency =
- Locale_Dependency(
- cache.string(source),
- cache.string(target),
- prefix.map({ case (name, mandatory) => (cache.string(name), mandatory) }),
- subst_types.map({ case ((a, s), ty) => ((cache.string(a), cache.sort(s)), cache.typ(ty)) }),
- subst_terms.map({ case ((x, ty), t) => ((cache.string(x), cache.typ(ty)), cache.term(t)) }))
-
- def is_inclusion: Boolean =
- subst_types.isEmpty && subst_terms.isEmpty
- }
-
- def read_locale_dependencies(
- theory_context: Export.Theory_Context
- ): List[Entity[Locale_Dependency]] = {
- read_entities(theory_context, Export.THEORY_PREFIX + "locale_dependencies",
- Kind.LOCALE_DEPENDENCY,
- { body =>
- import XML.Decode._
- import Term_XML.Decode._
- val (source, (target, (prefix, (subst_types, subst_terms)))) =
- pair(string, pair(string, pair(list(pair(string, bool)),
- pair(list(pair(pair(string, sort), typ)), list(pair(pair(string, typ), term))))))(body)
- Locale_Dependency(source, target, prefix, subst_types, subst_terms)
- })
- }
-
-
- /* sort algebra */
-
- sealed case class Classrel(class1: String, class2: String, prop: Prop) {
- def cache(cache: Term.Cache): Classrel =
- Classrel(cache.string(class1), cache.string(class2), prop.cache(cache))
- }
-
- def read_classrel(theory_context: Export.Theory_Context): List[Classrel] = {
- val body = theory_context.yxml(Export.THEORY_PREFIX + "classrel")
- val classrel = {
- import XML.Decode._
- list(pair(decode_prop, pair(string, string)))(body)
- }
- for ((prop, (c1, c2)) <- classrel) yield Classrel(c1, c2, prop)
- }
-
- sealed case class Arity(
- type_name: String,
- domain: List[Term.Sort],
- codomain: String,
- prop: Prop
- ) {
- def cache(cache: Term.Cache): Arity =
- Arity(cache.string(type_name), domain.map(cache.sort), cache.string(codomain),
- prop.cache(cache))
- }
-
- def read_arities(theory_context: Export.Theory_Context): List[Arity] = {
- val body = theory_context.yxml(Export.THEORY_PREFIX + "arities")
- val arities = {
- import XML.Decode._
- import Term_XML.Decode._
- list(pair(decode_prop, triple(string, list(sort), string)))(body)
- }
- for ((prop, (a, b, c)) <- arities) yield Arity(a, b, c, prop)
- }
-
-
- /* Pure constdefs */
-
- sealed case class Constdef(name: String, axiom_name: String) {
- def cache(cache: Term.Cache): Constdef =
- Constdef(cache.string(name), cache.string(axiom_name))
- }
-
- def read_constdefs(theory_context: Export.Theory_Context): List[Constdef] = {
- val body = theory_context.yxml(Export.THEORY_PREFIX + "constdefs")
- val constdefs = {
- import XML.Decode._
- list(pair(string, string))(body)
- }
- for ((name, axiom_name) <- constdefs) yield Constdef(name, axiom_name)
- }
-
-
- /* HOL typedefs */
-
- sealed case class Typedef(
- name: String,
- rep_type: Term.Typ,
- abs_type: Term.Typ,
- rep_name: String,
- abs_name: String,
- axiom_name: String
- ) {
- def cache(cache: Term.Cache): Typedef =
- Typedef(cache.string(name),
- cache.typ(rep_type),
- cache.typ(abs_type),
- cache.string(rep_name),
- cache.string(abs_name),
- cache.string(axiom_name))
- }
-
- def read_typedefs(theory_context: Export.Theory_Context): List[Typedef] = {
- val body = theory_context.yxml(Export.THEORY_PREFIX + "typedefs")
- val typedefs = {
- import XML.Decode._
- import Term_XML.Decode._
- list(pair(string, pair(typ, pair(typ, pair(string, pair(string, string))))))(body)
- }
- for { (name, (rep_type, (abs_type, (rep_name, (abs_name, axiom_name))))) <- typedefs }
- yield Typedef(name, rep_type, abs_type, rep_name, abs_name, axiom_name)
- }
-
-
- /* HOL datatypes */
-
- sealed case class Datatype(
- pos: Position.T,
- name: String,
- co: Boolean,
- typargs: List[(String, Term.Sort)],
- typ: Term.Typ,
- constructors: List[(Term.Term, Term.Typ)]
- ) {
- def id: Option[Long] = Position.Id.unapply(pos)
-
- def cache(cache: Term.Cache): Datatype =
- Datatype(
- cache.position(pos),
- cache.string(name),
- co,
- typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
- cache.typ(typ),
- constructors.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }))
- }
-
- def read_datatypes(theory_context: Export.Theory_Context): List[Datatype] = {
- val body = theory_context.yxml(Export.THEORY_PREFIX + "datatypes")
- val datatypes = {
- import XML.Decode._
- import Term_XML.Decode._
- list(pair(properties, pair(string, pair(bool, pair(list(pair(string, sort)),
- pair(typ, list(pair(term, typ))))))))(body)
- }
- for ((pos, (name, (co, (typargs, (typ, constructors))))) <- datatypes)
- yield Datatype(pos, name, co, typargs, typ, constructors)
- }
-
-
- /* Pure spec rules */
-
- sealed abstract class Recursion {
- def cache(cache: Term.Cache): Recursion =
- this match {
- case Primrec(types) => Primrec(types.map(cache.string))
- case Primcorec(types) => Primcorec(types.map(cache.string))
- case _ => this
- }
- }
- case class Primrec(types: List[String]) extends Recursion
- case object Recdef extends Recursion
- case class Primcorec(types: List[String]) extends Recursion
- case object Corec extends Recursion
- case object Unknown_Recursion extends Recursion
-
- val decode_recursion: XML.Decode.T[Recursion] = {
- import XML.Decode._
- variant(List(
- { case (Nil, a) => Primrec(list(string)(a)) },
- { case (Nil, Nil) => Recdef },
- { case (Nil, a) => Primcorec(list(string)(a)) },
- { case (Nil, Nil) => Corec },
- { case (Nil, Nil) => Unknown_Recursion }))
- }
-
-
- sealed abstract class Rough_Classification {
- def is_equational: Boolean = this.isInstanceOf[Equational]
- def is_inductive: Boolean = this == Inductive
- def is_co_inductive: Boolean = this == Co_Inductive
- def is_relational: Boolean = is_inductive || is_co_inductive
- def is_unknown: Boolean = this == Unknown
-
- def cache(cache: Term.Cache): Rough_Classification =
- this match {
- case Equational(recursion) => Equational(recursion.cache(cache))
- case _ => this
- }
- }
- case class Equational(recursion: Recursion) extends Rough_Classification
- case object Inductive extends Rough_Classification
- case object Co_Inductive extends Rough_Classification
- case object Unknown extends Rough_Classification
-
- val decode_rough_classification: XML.Decode.T[Rough_Classification] = {
- import XML.Decode._
- variant(List(
- { case (Nil, a) => Equational(decode_recursion(a)) },
- { case (Nil, Nil) => Inductive },
- { case (Nil, Nil) => Co_Inductive },
- { case (Nil, Nil) => Unknown }))
- }
-
-
- sealed case class Spec_Rule(
- pos: Position.T,
- name: String,
- rough_classification: Rough_Classification,
- typargs: List[(String, Term.Sort)],
- args: List[(String, Term.Typ)],
- terms: List[(Term.Term, Term.Typ)],
- rules: List[Term.Term]
- ) {
- def id: Option[Long] = Position.Id.unapply(pos)
-
- def cache(cache: Term.Cache): Spec_Rule =
- Spec_Rule(
- cache.position(pos),
- cache.string(name),
- rough_classification.cache(cache),
- typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }),
- args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }),
- terms.map({ case (term, typ) => (cache.term(term), cache.typ(typ)) }),
- rules.map(cache.term))
- }
-
- def read_spec_rules(theory_context: Export.Theory_Context): List[Spec_Rule] = {
- val body = theory_context.yxml(Export.THEORY_PREFIX + "spec_rules")
- val spec_rules = {
- import XML.Decode._
- import Term_XML.Decode._
- list(
- pair(properties, pair(string, pair(decode_rough_classification,
- pair(list(pair(string, sort)), pair(list(pair(string, typ)),
- pair(list(pair(term, typ)), list(term))))))))(body)
- }
- for ((pos, (name, (rough_classification, (typargs, (args, (terms, rules)))))) <- spec_rules)
- yield Spec_Rule(pos, name, rough_classification, typargs, args, terms, rules)
- }
-
-
- /* other entities */
-
- sealed case class Other() extends Content[Other] {
- override def cache(cache: Term.Cache): Other = this
- }
-
- def read_others(theory_context: Export.Theory_Context): Map[String, List[Entity[Other]]] = {
- val kinds =
- theory_context.get(Export.THEORY_PREFIX + "other_kinds") match {
- case Some(entry) => split_lines(entry.text)
- case None => Nil
- }
- val other = Other()
- def read_other(kind: String): List[Entity[Other]] =
- read_entities(theory_context, Export.THEORY_PREFIX + "other/" + kind, kind, _ => other)
-
- kinds.map(kind => kind -> read_other(kind)).toMap
- }
-}
--- a/src/Pure/Thy/file_format.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,111 +0,0 @@
-/* Title: Pure/Thy/file_format.scala
- Author: Makarius
-
-Support for user-defined file formats, associated with active session.
-*/
-
-package isabelle
-
-
-object File_Format {
- object No_Data extends AnyRef {
- override def toString: String = "File_Format.No_Data"
- }
-
- sealed case class Theory_Context(name: Document.Node.Name, content: String)
-
-
- /* registry */
-
- lazy val registry: Registry =
- new Registry(Isabelle_System.make_services(classOf[File_Format]))
-
- final class Registry private [File_Format](file_formats: List[File_Format]) {
- override def toString: String = file_formats.mkString("File_Format.Environment(", ",", ")")
-
- def get(name: String): Option[File_Format] = file_formats.find(_.detect(name))
- def get(name: Document.Node.Name): Option[File_Format] = get(name.node)
- def get_theory(name: Document.Node.Name): Option[File_Format] = get(name.theory)
- def is_theory(name: Document.Node.Name): Boolean = get_theory(name).isDefined
-
- def theory_excluded(name: String): Boolean = file_formats.exists(_.theory_excluded(name))
-
- def parse_data(name: String, text: String): AnyRef =
- get(name) match {
- case Some(file_format) => file_format.parse_data(name, text)
- case None => No_Data
- }
- def parse_data(name: Document.Node.Name, text: String): AnyRef = parse_data(name.node, text)
-
- def start_session(session: isabelle.Session): Session =
- new Session(file_formats.map(_.start(session)).filterNot(_ == No_Agent))
- }
-
-
- /* session */
-
- final class Session private[File_Format](agents: List[Agent]) {
- override def toString: String =
- agents.mkString("File_Format.Session(", ", ", ")")
-
- def prover_options(options: Options): Options =
- agents.foldLeft(options) { case (opts, agent) => agent.prover_options(opts) }
-
- def stop_session(): Unit = agents.foreach(_.stop())
- }
-
- trait Agent {
- def prover_options(options: Options): Options = options
- def stop(): Unit = {}
- }
-
- object No_Agent extends Agent
-}
-
-abstract class File_Format extends Isabelle_System.Service {
- def format_name: String
- override def toString: String = "File_Format(" + format_name + ")"
-
- def file_ext: String
- def detect(name: String): Boolean = name.endsWith("." + file_ext)
-
- def parse_data(name: String, text: String): AnyRef = File_Format.No_Data
-
-
- /* implicit theory context: name and content */
-
- def theory_suffix: String = ""
- def theory_content(name: String): String = ""
- def theory_excluded(name: String): Boolean = false
-
- def make_theory_name(
- resources: Resources,
- name: Document.Node.Name
- ): Option[Document.Node.Name] = {
- for {
- theory <- Url.get_base_name(name.node)
- if detect(name.node) && theory_suffix.nonEmpty
- }
- yield {
- val node = resources.append_path(name.node, Path.explode(theory_suffix))
- Document.Node.Name(node, theory = theory)
- }
- }
-
- def make_theory_content(resources: Resources, thy_name: Document.Node.Name): Option[String] = {
- for {
- prefix <- Url.strip_base_name(thy_name.node)
- suffix <- Url.get_base_name(thy_name.node)
- if detect(prefix) && suffix == theory_suffix
- thy <- Url.get_base_name(prefix)
- s <- proper_string(theory_content(thy))
- } yield s
- }
-
- def html_document(snapshot: Document.Snapshot): Option[Browser_Info.HTML_Document] = None
-
-
- /* PIDE session agent */
-
- def start(session: isabelle.Session): File_Format.Agent = File_Format.No_Agent
-}
--- a/src/Pure/Thy/sessions.ML Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,146 +0,0 @@
-(* Title: Pure/Thy/sessions.ML
- Author: Makarius
-
-Support for session ROOT syntax.
-*)
-
-signature SESSIONS =
-sig
- val root_name: string
- val theory_name: string
- val chapter_definition_parser: (Toplevel.transition -> Toplevel.transition) parser
- val session_parser: (Toplevel.transition -> Toplevel.transition) parser
-end;
-
-structure Sessions: SESSIONS =
-struct
-
-val root_name = "ROOT";
-val theory_name = "Pure.Sessions";
-
-local
-
-val groups =
- Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.name --| Parse.$$$ ")")) [];
-
-val description =
- Scan.optional (Parse.$$$ "description" |-- Parse.!!! (Parse.input Parse.embedded)) Input.empty;
-
-val theory_entry = Parse.input Parse.theory_name --| Parse.opt_keyword "global";
-
-val theories =
- Parse.$$$ "theories" |-- Parse.!!! (Scan.optional Parse.options [] -- Scan.repeat1 theory_entry);
-
-val document_theories =
- Parse.$$$ "document_theories" |-- Scan.repeat1 (Parse.input Parse.theory_name);
-
-val document_files =
- Parse.$$$ "document_files" |--
- Parse.!!! (Parse.in_path_parens "document" -- Scan.repeat1 Parse.path_input);
-
-val prune =
- Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.nat --| Parse.$$$ "]")) 0;
-
-val export_files =
- Parse.$$$ "export_files" |--
- Parse.!!! (Parse.in_path_parens "export" -- prune -- Scan.repeat1 Parse.embedded);
-
-val export_classpath =
- Parse.$$$ "export_classpath" |-- Scan.repeat Parse.embedded;
-
-fun path_source source path =
- Input.source (Input.is_delimited source) (Path.implode path) (Input.range_of source);
-
-in
-
-val chapter_definition_parser =
- Parse.chapter_name -- groups -- description >> (fn (_, descr) =>
- Toplevel.keep (fn state =>
- let
- val ctxt = Toplevel.context_of state;
- val _ =
- Context_Position.report ctxt
- (Position.range_position (Symbol_Pos.range (Input.source_explode descr)))
- Markup.comment;
- in () end));
-
-val session_parser =
- Parse.session_name -- groups -- Parse.in_path "." --
- (Parse.$$$ "=" |--
- Parse.!!! (Scan.option (Parse.session_name --| Parse.!!! (Parse.$$$ "+")) -- description --
- Scan.optional (Parse.$$$ "options" |-- Parse.!!! Parse.options) [] --
- Scan.optional (Parse.$$$ "sessions" |--
- Parse.!!! (Scan.repeat1 Parse.session_name)) [] --
- Scan.optional (Parse.$$$ "directories" |-- Parse.!!! (Scan.repeat1 Parse.path_input)) [] --
- Scan.repeat theories --
- Scan.optional document_theories [] --
- Scan.repeat document_files --
- Scan.repeat export_files --
- Scan.optional export_classpath []))
- >> (fn (((((session, _), _), dir),
- ((((((((((parent, descr), options), sessions), directories), theories),
- document_theories), document_files), export_files), _)))) =>
- Toplevel.keep (fn state =>
- let
- val ctxt = Toplevel.context_of state;
- val session_dir = Resources.check_dir ctxt NONE dir;
-
- val _ =
- (the_list parent @ sessions) |> List.app (fn arg =>
- ignore (Resources.check_session ctxt arg)
- handle ERROR msg => Output.error_message msg);
-
- val _ =
- Context_Position.report ctxt
- (Position.range_position (Symbol_Pos.range (Input.source_explode descr)))
- Markup.comment;
-
- val _ =
- (options @ maps #1 theories) |> List.app (fn (x, y) =>
- ignore (Completion.check_option_value ctxt x y (Options.default ()))
- handle ERROR msg => Output.error_message msg);
-
- fun check_thy source =
- ignore (Resources.check_file ctxt (SOME Path.current) source)
- handle ERROR msg => Output.error_message msg;
-
- val _ =
- maps #2 theories |> List.app (fn source =>
- let
- val s = Input.string_of source;
- val pos = Input.pos_of source;
- val {node_name, theory_name, ...} =
- Resources.import_name session session_dir s
- handle ERROR msg => error (msg ^ Position.here pos);
- val thy_path = the_default node_name (Resources.find_theory_file theory_name);
- in check_thy (path_source source thy_path) end);
-
- val _ =
- directories |> List.app (ignore o Resources.check_dir ctxt (SOME session_dir));
-
- val _ =
- document_theories |> List.app (fn source =>
- let
- val thy = Input.string_of source;
- val pos = Input.pos_of source;
- in
- (case Resources.find_theory_file thy of
- NONE => Output.error_message ("Unknown theory " ^ quote thy ^ Position.here pos)
- | SOME path => check_thy (path_source source path))
- end);
-
- val _ =
- document_files |> List.app (fn (doc_dir, doc_files) =>
- let
- val dir = Resources.check_dir ctxt (SOME session_dir) doc_dir;
- val _ = List.app (ignore o Resources.check_file ctxt (SOME dir)) doc_files;
- in () end);
-
- val _ =
- export_files |> List.app (fn ((export_dir, _), _) =>
- ignore (Resources.check_path ctxt (SOME session_dir) export_dir));
- in () end));
-
-end;
-
-end;
--- a/src/Pure/Thy/sessions.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1333 +0,0 @@
-/* Title: Pure/Thy/sessions.scala
- Author: Makarius
-
-Cumulative session information.
-*/
-
-package isabelle
-
-import java.io.{File => JFile}
-
-import scala.collection.immutable.{SortedSet, SortedMap}
-import scala.collection.mutable
-
-
-object Sessions {
- /* main operations */
-
- def background0(session: String): Background = Background.empty(session)
-
- def background(options: Options,
- session: String,
- progress: Progress = new Progress,
- dirs: List[Path] = Nil,
- include_sessions: List[String] = Nil,
- session_ancestor: Option[String] = None,
- session_requirements: Boolean = false
- ): Background = {
- Background.load(options, session, progress = progress, dirs = dirs,
- include_sessions = include_sessions, session_ancestor = session_ancestor,
- session_requirements = session_requirements)
- }
-
- def load_structure(
- options: Options,
- dirs: List[Path] = Nil,
- select_dirs: List[Path] = Nil,
- infos: List[Info] = Nil,
- augment_options: String => List[Options.Spec] = _ => Nil
- ): Structure = {
- val roots = load_root_files(dirs = dirs, select_dirs = select_dirs)
- Structure.make(options, augment_options, roots = roots, infos = infos)
- }
-
- def deps(sessions_structure: Structure,
- progress: Progress = new Progress,
- inlined_files: Boolean = false,
- list_files: Boolean = false,
- check_keywords: Set[String] = Set.empty
- ): Deps = {
- Deps.load(sessions_structure, progress = progress, inlined_files = inlined_files,
- list_files = list_files, check_keywords = check_keywords)
- }
-
-
- /* session and theory names */
-
- val ROOTS: Path = Path.explode("ROOTS")
- val ROOT: Path = Path.explode("ROOT")
-
- val roots_name: String = "ROOTS"
- val root_name: String = "ROOT"
- val theory_import: String = "Pure.Sessions"
-
- val UNSORTED = "Unsorted"
- val DRAFT = "Draft"
-
- def is_pure(name: String): Boolean = name == Thy_Header.PURE
-
- def illegal_session(name: String): Boolean = name == "" || name == DRAFT
- def illegal_theory(name: String): Boolean =
- name == root_name || File_Format.registry.theory_excluded(name)
-
-
- /* ROOTS file format */
-
- class ROOTS_File_Format extends File_Format {
- val format_name: String = roots_name
- val file_ext = ""
-
- override def detect(name: String): Boolean =
- Url.get_base_name(name) match {
- case Some(base_name) => base_name == roots_name
- case None => false
- }
-
- override def theory_suffix: String = "ROOTS_file"
- override def theory_content(name: String): String =
- """theory "ROOTS" imports Pure begin ROOTS_file "." end"""
- override def theory_excluded(name: String): Boolean = name == "ROOTS"
- }
-
-
- /* base info */
-
- object Base {
- val bootstrap: Base = Base(overall_syntax = Thy_Header.bootstrap_syntax)
- }
-
- sealed case class Base(
- session_name: String = "",
- session_pos: Position.T = Position.none,
- proper_session_theories: List[Document.Node.Name] = Nil,
- document_theories: List[Document.Node.Name] = Nil,
- loaded_theories: Graph[String, Outer_Syntax] = Graph.string, // cumulative imports
- used_theories: List[(Document.Node.Name, Options)] = Nil, // new imports
- theory_load_commands: Map[String, List[Command_Span.Span]] = Map.empty,
- known_theories: Map[String, Document.Node.Entry] = Map.empty,
- known_loaded_files: Map[String, List[Path]] = Map.empty,
- overall_syntax: Outer_Syntax = Outer_Syntax.empty,
- imported_sources: List[(Path, SHA1.Digest)] = Nil,
- session_sources: List[(Path, SHA1.Digest)] = Nil,
- session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph,
- errors: List[String] = Nil
- ) {
- def session_entry: (String, Base) = session_name -> this
-
- override def toString: String = "Sessions.Base(" + print_body + ")"
- def print_body: String =
- "session_name = " + quote(session_name) +
- ", loaded_theories = " + loaded_theories.size +
- ", used_theories = " + used_theories.length
-
- def all_sources: List[(Path, SHA1.Digest)] = imported_sources ::: session_sources
-
- def all_document_theories: List[Document.Node.Name] =
- proper_session_theories ::: document_theories
-
- def loaded_theory(name: String): Boolean = loaded_theories.defined(name)
- def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory)
-
- def loaded_theory_syntax(name: String): Option[Outer_Syntax] =
- if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None
- def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] =
- loaded_theory_syntax(name.theory)
-
- def theory_syntax(name: Document.Node.Name): Outer_Syntax =
- loaded_theory_syntax(name) getOrElse overall_syntax
-
- def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax =
- nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax
- }
-
-
- /* background context */
-
- object Background {
- def empty(session: String): Background =
- Background(Base(session_name = session))
-
- def load(options: Options,
- session: String,
- progress: Progress = new Progress,
- dirs: List[Path] = Nil,
- include_sessions: List[String] = Nil,
- session_ancestor: Option[String] = None,
- session_requirements: Boolean = false
- ): Background = {
- val full_sessions = load_structure(options, dirs = dirs)
-
- val selected_sessions =
- full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
- val info = selected_sessions(session)
- val ancestor = session_ancestor orElse info.parent
-
- val (session1, infos1) =
- if (session_requirements && ancestor.isDefined) {
- val deps = Sessions.deps(selected_sessions, progress = progress)
- val base = deps(session)
-
- val ancestor_loaded =
- deps.get(ancestor.get) match {
- case Some(ancestor_base)
- if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
- ancestor_base.loaded_theories.defined _
- case _ =>
- error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
- }
-
- val required_theories =
- for {
- thy <- base.loaded_theories.keys
- if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session
- }
- yield thy
-
- if (required_theories.isEmpty) (ancestor.get, Nil)
- else {
- val other_name = info.name + "_requirements(" + ancestor.get + ")"
- Isabelle_System.isabelle_tmp_prefix()
-
- (other_name,
- List(
- Info.make(
- Chapter_Defs.empty,
- Options.init0(),
- info.options,
- augment_options = _ => Nil,
- dir_selected = false,
- dir = Path.explode("$ISABELLE_TMP_PREFIX"),
- chapter = info.chapter,
- Session_Entry(
- pos = info.pos,
- name = other_name,
- groups = info.groups,
- path = ".",
- parent = ancestor,
- description = "Required theory imports from other sessions",
- options = Nil,
- imports = info.deps,
- directories = Nil,
- theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
- document_theories = Nil,
- document_files = Nil,
- export_files = Nil,
- export_classpath = Nil))))
- }
- }
- else (session, Nil)
-
- val full_sessions1 =
- if (infos1.isEmpty) full_sessions
- else load_structure(options, dirs = dirs, infos = infos1)
-
- val selected_sessions1 =
- full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions))
-
- val deps1 = Sessions.deps(selected_sessions1, progress = progress)
-
- Background(deps1(session1), sessions_structure = full_sessions1,
- errors = deps1.errors, infos = infos1)
- }
- }
-
- sealed case class Background(
- base: Base,
- sessions_structure: Structure = Structure.empty,
- errors: List[String] = Nil,
- infos: List[Info] = Nil
- ) {
- def session_name: String = base.session_name
- def info: Info = sessions_structure(session_name)
-
- def check_errors: Background =
- if (errors.isEmpty) this
- else error(cat_lines(errors))
- }
-
-
- /* source dependencies */
-
- object Deps {
- def load(sessions_structure: Structure,
- progress: Progress = new Progress,
- inlined_files: Boolean = false,
- list_files: Boolean = false,
- check_keywords: Set[String] = Set.empty
- ): Deps = {
- var cache_sources = Map.empty[JFile, SHA1.Digest]
- def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = {
- for {
- path <- paths
- file = path.file
- if cache_sources.isDefinedAt(file) || file.isFile
- }
- yield {
- cache_sources.get(file) match {
- case Some(digest) => (path, digest)
- case None =>
- val digest = SHA1.digest(file)
- cache_sources = cache_sources + (file -> digest)
- (path, digest)
- }
- }
- }
-
- val session_bases =
- sessions_structure.imports_topological_order.foldLeft(Map(Base.bootstrap.session_entry)) {
- case (session_bases, session_name) =>
- progress.expose_interrupt()
-
- val info = sessions_structure(session_name)
- try {
- val deps_base = info.deps_base(session_bases)
- val session_background =
- Background(base = deps_base, sessions_structure = sessions_structure)
- val resources = new Resources(session_background)
-
- progress.echo(
- "Session " + info.chapter + "/" + session_name +
- if_proper(info.groups, info.groups.mkString(" (", " ", ")")),
- verbose = !list_files)
-
- val dependencies = resources.session_dependencies(info)
-
- val overall_syntax = dependencies.overall_syntax
-
- val proper_session_theories =
- dependencies.theories.filter(name =>
- sessions_structure.theory_qualifier(name) == session_name)
-
- val theory_files = dependencies.theories.map(_.path)
-
- val (load_commands, load_commands_errors) =
- try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
- catch { case ERROR(msg) => (Nil, List(msg)) }
-
- val theory_load_commands =
- (for ((name, span) <- load_commands.iterator) yield name.theory -> span).toMap
-
- val loaded_files: List[(String, List[Path])] =
- for ((name, spans) <- load_commands) yield {
- val (theory, files) = dependencies.loaded_files(name, spans)
- theory -> files.map(file => Path.explode(file.node))
- }
-
- val document_files =
- for ((path1, path2) <- info.document_files)
- yield info.dir + path1 + path2
-
- val session_files =
- (theory_files ::: loaded_files.flatMap(_._2) ::: document_files).map(_.expand)
-
- val imported_files = if (inlined_files) dependencies.imported_files else Nil
-
- if (list_files) {
- progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _)))
- }
-
- if (check_keywords.nonEmpty) {
- Check_Keywords.check_keywords(
- progress, overall_syntax.keywords, check_keywords, theory_files)
- }
-
- val session_graph_display: Graph_Display.Graph = {
- def session_node(name: String): Graph_Display.Node =
- Graph_Display.Node("[" + name + "]", "session." + name)
-
- def node(name: Document.Node.Name): Graph_Display.Node = {
- val qualifier = sessions_structure.theory_qualifier(name)
- if (qualifier == info.name) {
- Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
- }
- else session_node(qualifier)
- }
-
- val required_sessions =
- dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
- .map(theory => sessions_structure.theory_qualifier(theory))
- .filter(name => name != info.name && sessions_structure.defined(name))
-
- val required_subgraph =
- sessions_structure.imports_graph
- .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
- .transitive_closure
- .restrict(required_sessions.toSet)
- .transitive_reduction_acyclic
-
- val graph0 =
- required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) {
- case (g, session) =>
- val a = session_node(session)
- val bs = required_subgraph.imm_preds(session).toList.map(session_node)
- bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
- }
-
- dependencies.entries.foldLeft(graph0) {
- case (g, entry) =>
- val a = node(entry.name)
- val bs = entry.header.imports.map(node).filterNot(_ == a)
- bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
- }
- }
-
- val known_theories =
- dependencies.entries.iterator.map(entry => entry.name.theory -> entry).
- foldLeft(deps_base.known_theories)(_ + _)
-
- val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
-
- val import_errors = {
- val known_sessions =
- sessions_structure.imports_requirements(List(session_name)).toSet
- for {
- name <- dependencies.theories
- qualifier = sessions_structure.theory_qualifier(name)
- if !known_sessions(qualifier)
- } yield "Bad import of theory " + quote(name.toString) +
- ": need to include sessions " + quote(qualifier) + " in ROOT"
- }
-
- val document_errors =
- info.document_theories.flatMap(
- {
- case (thy, pos) =>
- val build_hierarchy =
- if (sessions_structure.build_graph.defined(session_name)) {
- sessions_structure.build_hierarchy(session_name)
- }
- else Nil
-
- def err(msg: String): Option[String] =
- Some(msg + " " + quote(thy) + Position.here(pos))
-
- known_theories.get(thy).map(_.name) match {
- case None => err("Unknown document theory")
- case Some(name) =>
- val qualifier = sessions_structure.theory_qualifier(name)
- if (proper_session_theories.contains(name)) {
- err("Redundant document theory from this session:")
- }
- else if (
- !build_hierarchy.contains(qualifier) &&
- !dependencies.theories.contains(name)
- ) {
- err("Document theory from other session not imported properly:")
- }
- else None
- }
- })
- val document_theories =
- info.document_theories.map({ case (thy, _) => known_theories(thy).name })
-
- val dir_errors = {
- val ok = info.dirs.map(_.canonical_file).toSet
- val bad =
- (for {
- name <- proper_session_theories.iterator
- path = Path.explode(name.master_dir)
- if !ok(path.canonical_file)
- path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
- } yield (path1, name)).toList
- val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
-
- val errs1 =
- for { (path1, name) <- bad }
- yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString)
- val errs2 =
- if (bad_dirs.isEmpty) Nil
- else List("Implicit use of session directories: " + commas(bad_dirs))
- val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
- val errs4 =
- (for {
- name <- proper_session_theories.iterator
- name1 <- resources.find_theory_node(name.theory)
- if name.node != name1.node
- } yield {
- "Incoherent theory file import:\n " + quote(name.node) +
- " vs. \n " + quote(name1.node)
- }).toList
-
- errs1 ::: errs2 ::: errs3 ::: errs4
- }
-
- val sources_errors =
- for (p <- session_files if !p.is_file) yield "No such file: " + p
-
- val path_errors =
- try { Path.check_case_insensitive(session_files ::: imported_files); Nil }
- catch { case ERROR(msg) => List(msg) }
-
- val bibtex_errors = info.bibtex_entries.errors
-
- val base =
- Base(
- session_name = info.name,
- session_pos = info.pos,
- proper_session_theories = proper_session_theories,
- document_theories = document_theories,
- loaded_theories = dependencies.loaded_theories,
- used_theories = dependencies.theories_adjunct,
- theory_load_commands = theory_load_commands,
- known_theories = known_theories,
- known_loaded_files = known_loaded_files,
- overall_syntax = overall_syntax,
- imported_sources = check_sources(imported_files),
- session_sources = check_sources(session_files),
- session_graph_display = session_graph_display,
- errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
- document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
- bibtex_errors)
-
- session_bases + base.session_entry
- }
- catch {
- case ERROR(msg) =>
- cat_error(msg, "The error(s) above occurred in session " +
- quote(info.name) + Position.here(info.pos))
- }
- }
-
- new Deps(sessions_structure, session_bases)
- }
- }
-
- final class Deps private[Sessions](
- val sessions_structure: Structure,
- val session_bases: Map[String, Base]
- ) {
- def background(session: String): Background =
- Background(base = apply(session), sessions_structure = sessions_structure, errors = errors)
-
- def is_empty: Boolean = session_bases.keysIterator.forall(_.isEmpty)
- def apply(name: String): Base = session_bases(name)
- def get(name: String): Option[Base] = session_bases.get(name)
-
- def sources_shasum(name: String): SHA1.Shasum = {
- val meta_info = sessions_structure(name).meta_info
- val sources =
- SHA1.shasum_sorted(
- for ((path, digest) <- apply(name).all_sources)
- yield digest -> File.symbolic_path(path))
- meta_info ::: sources
- }
-
- def errors: List[String] =
- (for {
- (name, base) <- session_bases.iterator
- if base.errors.nonEmpty
- } yield cat_lines(base.errors) +
- "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.session_pos)
- ).toList
-
- def check_errors: Deps =
- errors match {
- case Nil => this
- case errs => error(cat_lines(errs))
- }
-
- override def toString: String = "Sessions.Deps(" + sessions_structure + ")"
- }
-
-
- /* cumulative session info */
-
- private val BUILD_PREFS_BG = "<build_prefs:"
- private val BUILD_PREFS_EN = ">"
-
- def make_build_prefs(name: String): String =
- BUILD_PREFS_BG + name + BUILD_PREFS_EN
-
- def is_build_prefs(s: String): Boolean = {
- val i = s.indexOf('<')
- i >= 0 && {
- val s1 = s.substring(i)
- s1.startsWith(BUILD_PREFS_BG) && s1.endsWith(BUILD_PREFS_EN)
- }
- }
-
- def eq_sources(options: Options, shasum1: SHA1.Shasum, shasum2: SHA1.Shasum): Boolean =
- if (options.bool("build_thorough")) shasum1 == shasum2
- else {
- def trim(shasum: SHA1.Shasum): SHA1.Shasum =
- shasum.filter(s => !is_build_prefs(s))
-
- trim(shasum1) == trim(shasum2)
- }
-
- sealed case class Chapter_Info(
- name: String,
- pos: Position.T,
- groups: List[String],
- description: String,
- sessions: List[String]
- )
-
- object Info {
- def make(
- chapter_defs: Chapter_Defs,
- options0: Options,
- options: Options,
- augment_options: String => List[Options.Spec],
- dir_selected: Boolean,
- dir: Path,
- chapter: String,
- entry: Session_Entry
- ): Info = {
- try {
- val name = entry.name
-
- if (illegal_session(name)) error("Illegal session name " + quote(name))
- if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
- if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
-
- val session_path = dir + Path.explode(entry.path)
- val directories = entry.directories.map(dir => session_path + Path.explode(dir))
-
- val session_options0 = options ++ entry.options
- val session_options = session_options0 ++ augment_options(name)
- val session_prefs =
- session_options.make_prefs(defaults = session_options0, filter = _.session_content)
-
- val build_prefs_digests =
- session_options.changed(defaults = options0, filter = _.session_content)
- .map(ch => SHA1.digest(ch.print_prefs) -> make_build_prefs(ch.name))
-
- val theories =
- entry.theories.map({ case (opts, thys) =>
- (session_options ++ opts,
- thys.map({ case ((thy, pos), _) =>
- val thy_name = Thy_Header.import_name(thy)
- if (illegal_theory(thy_name)) {
- error("Illegal theory name " + quote(thy_name) + Position.here(pos))
- }
- else (thy, pos) })) })
-
- val global_theories =
- for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global }
- yield {
- val thy_name = Path.explode(thy).file_name
- if (Long_Name.is_qualified(thy_name)) {
- error("Bad qualified name for global theory " +
- quote(thy_name) + Position.here(pos))
- }
- else thy_name
- }
-
- val conditions =
- theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted.
- map(x => (x, Isabelle_System.getenv(x) != ""))
-
- val document_files =
- entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) })
-
- val export_files =
- entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) })
-
- val meta_digest =
- SHA1.digest(
- (name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
- entry.theories_no_position, conditions, entry.document_theories_no_position).toString)
-
- val meta_info =
- SHA1.shasum_meta_info(meta_digest) ::: SHA1.shasum_sorted(build_prefs_digests)
-
- val chapter_groups = chapter_defs(chapter).groups
- val groups = chapter_groups ::: entry.groups.filterNot(chapter_groups.contains)
-
- Info(name, chapter, dir_selected, entry.pos, groups, session_path,
- entry.parent, entry.description, directories, session_options, session_prefs,
- entry.imports, theories, global_theories, entry.document_theories, document_files,
- export_files, entry.export_classpath, meta_info)
- }
- catch {
- case ERROR(msg) =>
- error(msg + "\nThe error(s) above occurred in session entry " +
- quote(entry.name) + Position.here(entry.pos))
- }
- }
- }
-
- sealed case class Info(
- name: String,
- chapter: String,
- dir_selected: Boolean,
- pos: Position.T,
- groups: List[String],
- dir: Path,
- parent: Option[String],
- description: String,
- directories: List[Path],
- options: Options,
- session_prefs: String,
- imports: List[String],
- theories: List[(Options, List[(String, Position.T)])],
- global_theories: List[String],
- document_theories: List[(String, Position.T)],
- document_files: List[(Path, Path)],
- export_files: List[(Path, Int, List[String])],
- export_classpath: List[String],
- meta_info: SHA1.Shasum
- ) {
- def deps: List[String] = parent.toList ::: imports
-
- def deps_base(session_bases: String => Base): Base = {
- val parent_base = session_bases(parent.getOrElse(""))
- val imports_bases = imports.map(session_bases)
- parent_base.copy(
- known_theories =
- (for {
- base <- imports_bases.iterator
- (_, entry) <- base.known_theories.iterator
- } yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _),
- known_loaded_files =
- imports_bases.iterator.map(_.known_loaded_files).
- foldLeft(parent_base.known_loaded_files)(_ ++ _))
- }
-
- def dirs: List[Path] = dir :: directories
-
- def main_group: Boolean = groups.contains("main")
- def doc_group: Boolean = groups.contains("doc")
-
- def timeout_ignored: Boolean =
- !options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1)
-
- def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale"))
-
- def document_enabled: Boolean =
- options.string("document") match {
- case "" | "false" => false
- case "pdf" | "true" => true
- case doc => error("Bad document specification " + quote(doc))
- }
-
- def document_variants: List[Document_Build.Document_Variant] = {
- val variants =
- space_explode(':', options.string("document_variants")).
- map(Document_Build.Document_Variant.parse)
-
- val dups = Library.duplicates(variants.map(_.name))
- if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups))
-
- variants
- }
-
- def document_echo: Boolean = options.bool("document_echo")
-
- def documents: List[Document_Build.Document_Variant] = {
- val variants = document_variants
- if (!document_enabled || document_files.isEmpty) Nil else variants
- }
-
- def document_output: Option[Path] =
- options.string("document_output") match {
- case "" => None
- case s => Some(dir + Path.explode(s))
- }
-
- def browser_info: Boolean = options.bool("browser_info")
-
- lazy val bibtex_entries: Bibtex.Entries =
- (for {
- (document_dir, file) <- document_files.iterator
- if File.is_bib(file.file_name)
- } yield {
- val path = dir + document_dir + file
- Bibtex.Entries.parse(File.read(path), start = Token.Pos.file(File.standard_path(path)))
- }).foldRight(Bibtex.Entries.empty)(_ ::: _)
-
- def record_proofs: Boolean = options.int("record_proofs") >= 2
-
- def is_afp: Boolean = chapter == AFP.chapter
- def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains)
- }
-
- object Selection {
- val empty: Selection = Selection()
- val all: Selection = Selection(all_sessions = true)
- def session(session: String): Selection = Selection(sessions = List(session))
- }
-
- sealed case class Selection(
- requirements: Boolean = false,
- all_sessions: Boolean = false,
- base_sessions: List[String] = Nil,
- exclude_session_groups: List[String] = Nil,
- exclude_sessions: List[String] = Nil,
- session_groups: List[String] = Nil,
- sessions: List[String] = Nil
- ) {
- def ++ (other: Selection): Selection =
- Selection(
- requirements = requirements || other.requirements,
- all_sessions = all_sessions || other.all_sessions,
- base_sessions = Library.merge(base_sessions, other.base_sessions),
- exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups),
- exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions),
- session_groups = Library.merge(session_groups, other.session_groups),
- sessions = Library.merge(sessions, other.sessions))
- }
-
- object Structure {
- val empty: Structure = make(Options.empty)
-
- def make(
- options: Options,
- augment_options: String => List[Options.Spec] = _ => Nil,
- roots: List[Root_File] = Nil,
- infos: List[Info] = Nil
- ): Structure = {
- val chapter_defs: Chapter_Defs =
- roots.foldLeft(Chapter_Defs.empty) {
- case (defs1, root) =>
- root.entries.foldLeft(defs1) {
- case (defs2, entry: Chapter_Def) => defs2 + entry
- case (defs2, _) => defs2
- }
- }
-
- val options0 = Options.init0()
- val session_prefs = options.make_prefs(defaults = options0, filter = _.session_content)
-
- val root_infos = {
- var chapter = UNSORTED
- val root_infos = new mutable.ListBuffer[Info]
- for (root <- roots) {
- root.entries.foreach {
- case entry: Chapter_Entry => chapter = entry.name
- case entry: Session_Entry =>
- root_infos +=
- Info.make(chapter_defs, options0, options, augment_options,
- root.select, root.dir, chapter, entry)
- case _ =>
- }
- chapter = UNSORTED
- }
- root_infos.toList
- }
-
- val info_graph =
- (root_infos ::: infos).foldLeft(Graph.string[Info]) {
- case (graph, info) =>
- if (graph.defined(info.name)) {
- error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
- Position.here(graph.get_node(info.name).pos))
- }
- else graph.new_node(info.name, info)
- }
-
- def augment_graph(
- graph: Graph[String, Info],
- kind: String,
- edges: Info => Iterable[String]
- ) : Graph[String, Info] = {
- def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = {
- if (!g.defined(parent)) {
- error("Bad " + kind + " session " + quote(parent) + " for " +
- quote(name) + Position.here(pos))
- }
-
- try { g.add_edge_acyclic(parent, name) }
- catch {
- case exn: Graph.Cycles[_] =>
- error(cat_lines(exn.cycles.map(cycle =>
- "Cyclic session dependency of " +
- cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
- }
- }
- graph.iterator.foldLeft(graph) {
- case (g, (name, (info, _))) =>
- edges(info).foldLeft(g)(add_edge(info.pos, name, _, _))
- }
- }
-
- val build_graph = augment_graph(info_graph, "parent", _.parent)
- val imports_graph = augment_graph(build_graph, "imports", _.imports)
-
- val session_positions: List[(String, Position.T)] =
- (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList
-
- val session_directories: Map[JFile, String] =
- (for {
- session <- imports_graph.topological_order.iterator
- info = info_graph.get_node(session)
- dir <- info.dirs.iterator
- } yield (info, dir)).foldLeft(Map.empty[JFile, String]) {
- case (dirs, (info, dir)) =>
- val session = info.name
- val canonical_dir = dir.canonical_file
- dirs.get(canonical_dir) match {
- case Some(session1) =>
- val info1 = info_graph.get_node(session1)
- error("Duplicate use of directory " + dir +
- "\n for session " + quote(session1) + Position.here(info1.pos) +
- "\n vs. session " + quote(session) + Position.here(info.pos))
- case None => dirs + (canonical_dir -> session)
- }
- }
-
- val global_theories: Map[String, String] =
- (for {
- session <- imports_graph.topological_order.iterator
- info = info_graph.get_node(session)
- thy <- info.global_theories.iterator }
- yield (info, thy)
- ).foldLeft(Thy_Header.bootstrap_global_theories.toMap) {
- case (global, (info, thy)) =>
- val qualifier = info.name
- global.get(thy) match {
- case Some(qualifier1) if qualifier != qualifier1 =>
- error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
- case _ => global + (thy -> qualifier)
- }
- }
-
- new Structure(chapter_defs, session_prefs, session_positions, session_directories,
- global_theories, build_graph, imports_graph)
- }
- }
-
- final class Structure private[Sessions](
- chapter_defs: Chapter_Defs,
- val session_prefs: String,
- val session_positions: List[(String, Position.T)],
- val session_directories: Map[JFile, String],
- val global_theories: Map[String, String],
- val build_graph: Graph[String, Info],
- val imports_graph: Graph[String, Info]
- ) {
- sessions_structure =>
-
- def dest_session_directories: List[(String, String)] =
- for ((file, session) <- session_directories.toList)
- yield (File.standard_path(file), session)
-
- lazy val known_chapters: List[Chapter_Info] = {
- val chapter_sessions =
- Multi_Map.from(
- for ((_, (info, _)) <- build_graph.iterator)
- yield info.chapter -> info.name)
- val chapters1 =
- (for (entry <- chapter_defs.list.iterator) yield {
- val sessions = chapter_sessions.get_list(entry.name)
- Chapter_Info(entry.name, entry.pos, entry.groups, entry.description, sessions.sorted)
- }).toList
- val chapters2 =
- (for {
- (name, sessions) <- chapter_sessions.iterator_list
- if !chapters1.exists(_.name == name)
- } yield Chapter_Info(name, Position.none, Nil, "", sessions.sorted)).toList.sortBy(_.name)
- chapters1 ::: chapters2
- }
-
- def relevant_chapters: List[Chapter_Info] = known_chapters.filter(_.sessions.nonEmpty)
-
- def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph)
- def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph)
-
- def defined(name: String): Boolean = imports_graph.defined(name)
- def apply(name: String): Info = imports_graph.get_node(name)
- def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None
-
- def theory_qualifier(name: String): String =
- global_theories.getOrElse(name, Long_Name.qualifier(name))
- def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory)
-
- def check_sessions(names: List[String]): Unit = {
- val bad_sessions = SortedSet(names.filterNot(defined): _*).toList
- if (bad_sessions.nonEmpty) {
- error("Undefined session(s): " + commas_quote(bad_sessions))
- }
- }
-
- def check_sessions(sel: Selection): Unit =
- check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions)
-
- private def selected(graph: Graph[String, Info], sel: Selection): List[String] = {
- check_sessions(sel)
-
- val select_group = sel.session_groups.toSet
- val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions)
-
- val selected0 =
- if (sel.all_sessions) graph.keys
- else {
- (for {
- (name, (info, _)) <- graph.iterator
- if info.dir_selected || select_session(name) || info.groups.exists(select_group)
- } yield name).toList
- }
-
- if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList
- else selected0
- }
-
- def selection(sel: Selection): Structure = {
- check_sessions(sel)
-
- val excluded = {
- val exclude_group = sel.exclude_session_groups.toSet
- val exclude_group_sessions =
- (for {
- (name, (info, _)) <- imports_graph.iterator
- if info.groups.exists(exclude_group)
- } yield name).toList
- imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet
- }
-
- def restrict(graph: Graph[String, Info]): Graph[String, Info] = {
- val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded)
- graph.restrict(graph.all_preds(sessions).toSet)
- }
-
- new Structure(chapter_defs, session_prefs, session_positions, session_directories,
- global_theories, restrict(build_graph), restrict(imports_graph))
- }
-
- def selection(session: String): Structure = selection(Selection.session(session))
-
- def selection_deps(
- selection: Selection,
- progress: Progress = new Progress,
- loading_sessions: Boolean = false,
- inlined_files: Boolean = false
- ): Deps = {
- val deps =
- Sessions.deps(sessions_structure.selection(selection),
- progress = progress, inlined_files = inlined_files)
-
- if (loading_sessions) {
- val selection_size = deps.sessions_structure.build_graph.size
- if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...")
- }
-
- deps
- }
-
- def build_hierarchy(session: String): List[String] =
- if (build_graph.defined(session)) build_graph.all_preds(List(session))
- else List(session)
-
- def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
- def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
- def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
- def build_topological_order: List[String] = build_graph.topological_order
-
- def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel)
- def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss)
- def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss)
- def imports_topological_order: List[String] = imports_graph.topological_order
-
- override def toString: String =
- imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")")
- }
-
-
- /* parser */
-
- private val CHAPTER_DEFINITION = "chapter_definition"
- private val CHAPTER = "chapter"
- private val SESSION = "session"
- private val DESCRIPTION = "description"
- private val DIRECTORIES = "directories"
- private val OPTIONS = "options"
- private val SESSIONS = "sessions"
- private val THEORIES = "theories"
- private val GLOBAL = "global"
- private val DOCUMENT_THEORIES = "document_theories"
- private val DOCUMENT_FILES = "document_files"
- private val EXPORT_FILES = "export_files"
- private val EXPORT_CLASSPATH = "export_classpath"
-
- val root_syntax: Outer_Syntax =
- Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + "in" +
- GLOBAL +
- (CHAPTER_DEFINITION, Keyword.THY_DECL) +
- (CHAPTER, Keyword.THY_DECL) +
- (SESSION, Keyword.THY_DECL) +
- (DESCRIPTION, Keyword.QUASI_COMMAND) +
- (DIRECTORIES, Keyword.QUASI_COMMAND) +
- (OPTIONS, Keyword.QUASI_COMMAND) +
- (SESSIONS, Keyword.QUASI_COMMAND) +
- (THEORIES, Keyword.QUASI_COMMAND) +
- (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) +
- (DOCUMENT_FILES, Keyword.QUASI_COMMAND) +
- (EXPORT_FILES, Keyword.QUASI_COMMAND) +
- (EXPORT_CLASSPATH, Keyword.QUASI_COMMAND)
-
- abstract class Entry
- object Chapter_Def {
- def empty(chapter: String): Chapter_Def = Chapter_Def(Position.none, chapter, Nil, "")
- }
- sealed case class Chapter_Def(
- pos: Position.T,
- name: String,
- groups: List[String],
- description: String
- ) extends Entry
- sealed case class Chapter_Entry(name: String) extends Entry
- sealed case class Session_Entry(
- pos: Position.T,
- name: String,
- groups: List[String],
- path: String,
- parent: Option[String],
- description: String,
- options: List[Options.Spec],
- imports: List[String],
- directories: List[String],
- theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
- document_theories: List[(String, Position.T)],
- document_files: List[(String, String)],
- export_files: List[(String, Int, List[String])],
- export_classpath: List[String]
- ) extends Entry {
- def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] =
- theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) })
- def document_theories_no_position: List[String] =
- document_theories.map(_._1)
- }
-
- object Chapter_Defs {
- val empty: Chapter_Defs = new Chapter_Defs(Nil)
- }
-
- class Chapter_Defs private(rev_list: List[Chapter_Def]) {
- def list: List[Chapter_Def] = rev_list.reverse
-
- override def toString: String =
- list.map(_.name).mkString("Chapter_Defs(", ", ", ")")
-
- def get(chapter: String): Option[Chapter_Def] =
- rev_list.find(_.name == chapter)
-
- def apply(chapter: String): Chapter_Def =
- get(chapter) getOrElse Chapter_Def.empty(chapter)
-
- def + (entry: Chapter_Def): Chapter_Defs =
- get(entry.name) match {
- case None => new Chapter_Defs(entry :: rev_list)
- case Some(old_entry) =>
- error("Duplicate chapter definition " + quote(entry.name) +
- Position.here(old_entry.pos) + Position.here(entry.pos))
- }
- }
-
- private object Parsers extends Options.Parsers {
- private val groups: Parser[List[String]] =
- ($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)
-
- private val description: Parser[String] =
- ($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")
-
- private val chapter_def: Parser[Chapter_Def] =
- command(CHAPTER_DEFINITION) ~! (position(chapter_name) ~ groups ~ description) ^^
- { case _ ~ ((a, pos) ~ b ~ c) => Chapter_Def(pos, a, b, c) }
-
- private val chapter_entry: Parser[Chapter_Entry] =
- command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter_Entry(a) }
-
- private val session_entry: Parser[Session_Entry] = {
- val options = $$$("[") ~> rep1sep(option_spec, $$$(",")) <~ $$$("]")
-
- val theory_entry =
- position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) }
-
- val theories =
- $$$(THEORIES) ~!
- ((options | success(Nil)) ~ rep1(theory_entry)) ^^
- { case _ ~ (x ~ y) => (x, y) }
-
- val document_theories =
- $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x }
-
- val document_files =
- $$$(DOCUMENT_FILES) ~! (in_path_parens("document") ~ rep1(path)) ^^
- { case _ ~ (x ~ y) => y.map((x, _)) }
-
- val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0)
-
- val export_files =
- $$$(EXPORT_FILES) ~! (in_path_parens("export") ~ prune ~ rep1(embedded)) ^^
- { case _ ~ (x ~ y ~ z) => (x, y, z) }
-
- val export_classpath =
- $$$(EXPORT_CLASSPATH) ~! (rep1(embedded) | success(List("*:classpath/*.jar"))) ^^
- { case _ ~ x => x }
-
- command(SESSION) ~!
- (position(session_name) ~ groups ~ in_path(".") ~
- ($$$("=") ~!
- (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ description ~
- (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~
- (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~
- (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~
- rep(theories) ~
- (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~
- (rep(document_files) ^^ (x => x.flatten)) ~
- rep(export_files) ~
- opt(export_classpath)))) ^^
- { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) =>
- Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l, m.getOrElse(Nil)) }
- }
-
- def parse_root(path: Path): List[Entry] = {
- val toks = Token.explode(root_syntax.keywords, File.read(path))
- val start = Token.Pos.file(path.implode)
- val parser: Parser[Entry] = chapter_def | chapter_entry | session_entry
- parse_all(rep(parser), Token.reader(toks, start)) match {
- case Success(result, _) => result
- case bad => error(bad.toString)
- }
- }
- }
-
- def parse_root(path: Path): List[Entry] = Parsers.parse_root(path)
-
- def parse_root_entries(path: Path): List[Session_Entry] =
- Parsers.parse_root(path).flatMap(Library.as_subclass(classOf[Session_Entry]))
-
- def parse_roots(roots: Path): List[String] = {
- for {
- line <- split_lines(File.read(roots))
- if !(line == "" || line.startsWith("#"))
- } yield line
- }
-
-
- /* load sessions from certain directories */
-
- def is_session_dir(dir: Path, ssh: SSH.System = SSH.Local): Boolean =
- ssh.is_file(dir + ROOT) || ssh.is_file(dir + ROOTS)
-
- def check_session_dir(dir: Path): Path =
- if (is_session_dir(dir)) File.pwd() + dir.expand
- else {
- error("Bad session root directory: " + dir.expand.toString +
- "\n (missing \"ROOT\" or \"ROOTS\")")
- }
-
- def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = {
- val default_dirs = Components.directories().filter(is_session_dir(_))
- for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) }
- yield (select, dir.canonical)
- }
-
- sealed case class Root_File(path: Path, select: Boolean) {
- val key: JFile = path.canonical_file
- def dir: Path = path.dir
-
- lazy val entries: List[Entry] = Parsers.parse_root(path)
- }
-
- def load_root_files(
- dirs: List[Path] = Nil,
- select_dirs: List[Path] = Nil
- ): List[Root_File] = {
- def load_dir(select: Boolean, dir: Path): List[Root_File] =
- load_root(select, dir) ::: load_roots(select, dir)
-
- def load_root(select: Boolean, dir: Path): List[Root_File] = {
- val root = dir + ROOT
- if (root.is_file) List(Root_File(root, select)) else Nil
- }
-
- def load_roots(select: Boolean, dir: Path): List[Root_File] = {
- val roots = dir + ROOTS
- if (roots.is_file) {
- for {
- entry <- parse_roots(roots)
- dir1 =
- try { check_session_dir(dir + Path.explode(entry)) }
- catch {
- case ERROR(msg) =>
- error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString)
- }
- res <- load_dir(select, dir1)
- } yield res
- }
- else Nil
- }
-
- val raw_roots: List[Root_File] =
- for {
- (select, dir) <- directories(dirs, select_dirs)
- root <- load_dir(select, check_session_dir(dir))
- } yield root
-
- var next_root = 0
- var seen_roots = Map.empty[JFile, (Root_File, Int)]
- for (root <- raw_roots) {
- seen_roots.get(root.key) match {
- case None =>
- seen_roots += (root.key -> (root, next_root))
- next_root += 1
- case Some((root0, next0)) =>
- val root1 = root0.copy(select = root0.select || root.select)
- seen_roots += (root0.key -> (root1, next0))
- }
- }
- seen_roots.valuesIterator.toList.sortBy(_._2).map(_._1)
- }
-
-
- /* Isabelle tool wrapper */
-
- val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions",
- Scala_Project.here,
- { args =>
- var base_sessions: List[String] = Nil
- var select_dirs: List[Path] = Nil
- var requirements = false
- var exclude_session_groups: List[String] = Nil
- var all_sessions = false
- var build_graph = false
- var dirs: List[Path] = Nil
- var session_groups: List[String] = Nil
- var exclude_sessions: List[String] = Nil
-
- val getopts = Getopts("""
-Usage: isabelle sessions [OPTIONS] [SESSIONS ...]
-
- Options are:
- -B NAME include session NAME and all descendants
- -D DIR include session directory and select its sessions
- -R refer to requirements of selected sessions
- -X NAME exclude sessions from group NAME and all descendants
- -a select all sessions
- -b follow session build dependencies (default: source imports)
- -d DIR include session directory
- -g NAME select session group NAME
- -x NAME exclude session NAME and all descendants
-
- Explore the structure of Isabelle sessions and print result names in
- topological order (on stdout).
-""",
- "B:" -> (arg => base_sessions = base_sessions ::: List(arg)),
- "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))),
- "R" -> (_ => requirements = true),
- "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
- "a" -> (_ => all_sessions = true),
- "b" -> (_ => build_graph = true),
- "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
- "g:" -> (arg => session_groups = session_groups ::: List(arg)),
- "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg)))
-
- val sessions = getopts(args)
-
- val options = Options.init()
-
- val selection =
- Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions,
- exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions,
- session_groups = session_groups, sessions = sessions)
- val sessions_structure =
- load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection)
-
- val order =
- if (build_graph) sessions_structure.build_topological_order
- else sessions_structure.imports_topological_order
- for (name <- order) Output.writeln(name, stdout = true)
- })
-}
--- a/src/Pure/Thy/store.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,512 +0,0 @@
-/* Title: Pure/Thy/store.scala
- Author: Makarius
-
-Persistent store for session content: within file-system and/or SQL database.
-*/
-
-package isabelle
-
-
-import java.sql.SQLException
-
-
-object Store {
- def apply(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
- new Store(options, cache)
-
-
- /* session build info */
-
- sealed case class Build_Info(
- sources: SHA1.Shasum,
- input_heaps: SHA1.Shasum,
- output_heap: SHA1.Shasum,
- return_code: Int,
- uuid: String
- ) {
- def ok: Boolean = return_code == 0
- }
-
-
- /* session sources */
-
- sealed case class Source_File(
- name: String,
- digest: SHA1.Digest,
- compressed: Boolean,
- body: Bytes,
- cache: Compress.Cache
- ) {
- override def toString: String = name
-
- def bytes: Bytes = if (compressed) body.uncompress(cache = cache) else body
- }
-
- object Sources {
- def load(session_base: Sessions.Base, cache: Compress.Cache = Compress.Cache.none): Sources =
- new Sources(
- session_base.session_sources.foldLeft(Map.empty) {
- case (sources, (path, digest)) =>
- def err(): Nothing = error("Incoherent digest for source file: " + path)
- val name = File.symbolic_path(path)
- sources.get(name) match {
- case Some(source_file) =>
- if (source_file.digest == digest) sources else err()
- case None =>
- val bytes = Bytes.read(path)
- if (bytes.sha1_digest == digest) {
- val (compressed, body) =
- bytes.maybe_compress(Compress.Options_Zstd(), cache = cache)
- val file = Source_File(name, digest, compressed, body, cache)
- sources + (name -> file)
- }
- else err()
- }
- })
- }
-
- class Sources private(rep: Map[String, Source_File]) extends Iterable[Source_File] {
- override def toString: String = rep.values.toList.sortBy(_.name).mkString("Sources(", ", ", ")")
- override def iterator: Iterator[Source_File] = rep.valuesIterator
-
- def get(name: String): Option[Source_File] = rep.get(name)
- def apply(name: String): Source_File =
- get(name).getOrElse(error("Missing session sources entry " + quote(name)))
- }
-
-
- /* SQL data model */
-
- object private_data extends SQL.Data() {
- override lazy val tables = SQL.Tables(Session_Info.table, Sources.table)
-
- object Session_Info {
- val session_name = SQL.Column.string("session_name").make_primary_key
-
- // Build_Log.Session_Info
- val session_timing = SQL.Column.bytes("session_timing")
- val command_timings = SQL.Column.bytes("command_timings")
- val theory_timings = SQL.Column.bytes("theory_timings")
- val ml_statistics = SQL.Column.bytes("ml_statistics")
- val task_statistics = SQL.Column.bytes("task_statistics")
- val errors = SQL.Column.bytes("errors")
- val build_log_columns =
- List(session_name, session_timing, command_timings, theory_timings,
- ml_statistics, task_statistics, errors)
-
- // Build_Info
- val sources = SQL.Column.string("sources")
- val input_heaps = SQL.Column.string("input_heaps")
- val output_heap = SQL.Column.string("output_heap")
- val return_code = SQL.Column.int("return_code")
- val uuid = SQL.Column.string("uuid")
- val build_columns = List(sources, input_heaps, output_heap, return_code, uuid)
-
- val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
- }
-
- object Sources {
- val session_name = SQL.Column.string("session_name").make_primary_key
- val name = SQL.Column.string("name").make_primary_key
- val digest = SQL.Column.string("digest")
- val compressed = SQL.Column.bool("compressed")
- val body = SQL.Column.bytes("body")
-
- val table =
- SQL.Table("isabelle_sources", List(session_name, name, digest, compressed, body))
-
- def where_equal(session_name: String, name: String = ""): SQL.Source =
- SQL.where_and(
- Sources.session_name.equal(session_name),
- if_proper(name, Sources.name.equal(name)))
- }
-
- def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
- db.execute_query_statementO[Bytes](
- Session_Info.table.select(List(column), sql = Session_Info.session_name.where_equal(name)),
- res => res.bytes(column)
- ).getOrElse(Bytes.empty)
-
- def read_properties(
- db: SQL.Database, name: String, column: SQL.Column, cache: Term.Cache
- ): List[Properties.T] = Properties.uncompress(read_bytes(db, name, column), cache = cache)
-
- def read_session_timing(db: SQL.Database, name: String, cache: Term.Cache): Properties.T =
- Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache)
-
- def read_command_timings(db: SQL.Database, name: String): Bytes =
- read_bytes(db, name, Session_Info.command_timings)
-
- def read_theory_timings(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] =
- read_properties(db, name, Session_Info.theory_timings, cache)
-
- def read_ml_statistics(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] =
- read_properties(db, name, Session_Info.ml_statistics, cache)
-
- def read_task_statistics(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] =
- read_properties(db, name, Session_Info.task_statistics, cache)
-
- def read_errors(db: SQL.Database, name: String, cache: Term.Cache): List[String] =
- Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache)
-
- def read_build(db: SQL.Database, name: String): Option[Store.Build_Info] =
- db.execute_query_statementO[Store.Build_Info](
- Session_Info.table.select(sql = Session_Info.session_name.where_equal(name)),
- { res =>
- val uuid =
- try { Option(res.string(Session_Info.uuid)).getOrElse("") }
- catch { case _: SQLException => "" }
- Store.Build_Info(
- SHA1.fake_shasum(res.string(Session_Info.sources)),
- SHA1.fake_shasum(res.string(Session_Info.input_heaps)),
- SHA1.fake_shasum(res.string(Session_Info.output_heap)),
- res.int(Session_Info.return_code),
- uuid)
- })
-
- def write_session_info(
- db: SQL.Database,
- cache: Compress.Cache,
- session_name: String,
- build_log: Build_Log.Session_Info,
- build: Build_Info
- ): Unit = {
- db.execute_statement(Session_Info.table.insert(), body =
- { stmt =>
- stmt.string(1) = session_name
- stmt.bytes(2) = Properties.encode(build_log.session_timing)
- stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache)
- stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache)
- stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = cache)
- stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = cache)
- stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = cache)
- stmt.string(8) = build.sources.toString
- stmt.string(9) = build.input_heaps.toString
- stmt.string(10) = build.output_heap.toString
- stmt.int(11) = build.return_code
- stmt.string(12) = build.uuid
- })
- }
-
- def write_sources(
- db: SQL.Database,
- session_name: String,
- source_files: Iterable[Source_File]
- ): Unit = {
- db.execute_batch_statement(Sources.table.insert(), batch =
- for (source_file <- source_files) yield { (stmt: SQL.Statement) =>
- stmt.string(1) = session_name
- stmt.string(2) = source_file.name
- stmt.string(3) = source_file.digest.toString
- stmt.bool(4) = source_file.compressed
- stmt.bytes(5) = source_file.body
- })
- }
-
- def read_sources(
- db: SQL.Database,
- session_name: String,
- name: String,
- cache: Compress.Cache
- ): List[Source_File] = {
- db.execute_query_statement(
- Sources.table.select(
- sql = Sources.where_equal(session_name, name = name) + SQL.order_by(List(Sources.name))),
- List.from[Source_File],
- { res =>
- val res_name = res.string(Sources.name)
- val digest = SHA1.fake_digest(res.string(Sources.digest))
- val compressed = res.bool(Sources.compressed)
- val body = res.bytes(Sources.body)
- Source_File(res_name, digest, compressed, body, cache)
- }
- )
- }
- }
-}
-
-class Store private(val options: Options, val cache: Term.Cache) {
- store =>
-
- override def toString: String = "Store(output_dir = " + output_dir.absolute + ")"
-
-
- /* directories */
-
- val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER")
- val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER")
-
- def system_heaps: Boolean = options.bool("system_heaps")
-
- val output_dir: Path =
- if (system_heaps) system_output_dir else user_output_dir
-
- val input_dirs: List[Path] =
- if (system_heaps) List(system_output_dir)
- else List(user_output_dir, system_output_dir)
-
- def presentation_dir: Path =
- if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
- else Path.explode("$ISABELLE_BROWSER_INFO")
-
-
- /* file names */
-
- def heap(name: String): Path = Path.basic(name)
- def database(name: String): Path = Path.basic("log") + Path.basic(name).db
- def log(name: String): Path = Path.basic("log") + Path.basic(name)
- def log_gz(name: String): Path = log(name).gz
-
- def output_heap(name: String): Path = output_dir + heap(name)
- def output_database(name: String): Path = output_dir + database(name)
- def output_log(name: String): Path = output_dir + log(name)
- def output_log_gz(name: String): Path = output_dir + log_gz(name)
-
-
- /* heap */
-
- def find_heap(name: String): Option[Path] =
- input_dirs.map(_ + heap(name)).find(_.is_file)
-
- def the_heap(name: String): Path =
- find_heap(name) getOrElse
- error("Missing heap image for session " + quote(name) + " -- expected in:\n" +
- cat_lines(input_dirs.map(dir => " " + File.standard_path(dir))))
-
- def heap_shasum(database_server: Option[SQL.Database], name: String): SHA1.Shasum = {
- def get_database: Option[SHA1.Digest] = {
- for {
- db <- database_server
- digest <- ML_Heap.get_entries(db, List(name)).valuesIterator.nextOption()
- } yield digest
- }
-
- def get_file: Option[SHA1.Digest] =
- find_heap(name).flatMap(ML_Heap.read_file_digest)
-
- get_database orElse get_file match {
- case Some(digest) => SHA1.shasum(digest, name)
- case None => SHA1.no_shasum
- }
- }
-
-
- /* databases for build process and session content */
-
- def find_database(name: String): Option[Path] =
- input_dirs.map(_ + database(name)).find(_.is_file)
-
- def build_database_server: Boolean = options.bool("build_database_server")
- def build_database: Boolean = options.bool("build_database")
-
- def open_server(): SSH.Server =
- PostgreSQL.open_server(options,
- host = options.string("build_database_host"),
- port = options.int("build_database_port"),
- ssh_host = options.string("build_database_ssh_host"),
- ssh_port = options.int("build_database_ssh_port"),
- ssh_user = options.string("build_database_ssh_user"))
-
- def open_database_server(server: SSH.Server = SSH.no_server): PostgreSQL.Database =
- PostgreSQL.open_database_server(options, server = server,
- user = options.string("build_database_user"),
- password = options.string("build_database_password"),
- database = options.string("build_database_name"),
- host = options.string("build_database_host"),
- port = options.int("build_database_port"),
- ssh_host = options.string("build_database_ssh_host"),
- ssh_port = options.int("build_database_ssh_port"),
- ssh_user = options.string("build_database_ssh_user"))
-
- def maybe_open_database_server(server: SSH.Server = SSH.no_server): Option[SQL.Database] =
- if (build_database_server) Some(open_database_server(server = server)) else None
-
- def open_build_database(path: Path, server: SSH.Server = SSH.no_server): SQL.Database =
- if (build_database_server) open_database_server(server = server)
- else SQLite.open_database(path, restrict = true)
-
- def maybe_open_build_database(
- path: Path = Path.explode("$ISABELLE_HOME_USER/build.db"),
- server: SSH.Server = SSH.no_server
- ): Option[SQL.Database] = {
- if (build_database) Some(open_build_database(path, server = server)) else None
- }
-
- def try_open_database(
- name: String,
- output: Boolean = false,
- server: SSH.Server = SSH.no_server,
- server_mode: Boolean = build_database_server
- ): Option[SQL.Database] = {
- def check(db: SQL.Database): Option[SQL.Database] =
- if (output || session_info_exists(db)) Some(db) else { db.close(); None }
-
- if (server_mode) check(open_database_server(server = server))
- else if (output) Some(SQLite.open_database(output_database(name)))
- else {
- (for {
- dir <- input_dirs.view
- path = dir + database(name) if path.is_file
- db <- check(SQLite.open_database(path))
- } yield db).headOption
- }
- }
-
- def error_database(name: String): Nothing =
- error("Missing build database for session " + quote(name))
-
- def open_database(
- name: String,
- output: Boolean = false,
- server: SSH.Server = SSH.no_server
- ): SQL.Database = {
- try_open_database(name, output = output, server = server) getOrElse error_database(name)
- }
-
- def clean_output(
- database_server: Option[SQL.Database],
- name: String,
- session_init: Boolean = false
- ): Option[Boolean] = {
- val relevant_db =
- database_server match {
- case Some(db) =>
- ML_Heap.clean_entry(db, name)
- clean_session_info(db, name)
- case None => false
- }
-
- val del =
- for {
- dir <-
- (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir))
- file <- List(heap(name), database(name), log(name), log_gz(name))
- path = dir + file if path.is_file
- } yield path.file.delete
-
- if (database_server.isEmpty && session_init) {
- using(open_database(name, output = true))(clean_session_info(_, name))
- }
-
- if (relevant_db || del.nonEmpty) Some(del.forall(identity)) else None
- }
-
- def check_output(
- database_server: Option[SQL.Database],
- name: String,
- session_options: Options,
- sources_shasum: SHA1.Shasum,
- input_shasum: SHA1.Shasum,
- fresh_build: Boolean,
- store_heap: Boolean
- ): (Boolean, SHA1.Shasum) = {
- def no_check: (Boolean, SHA1.Shasum) = (false, SHA1.no_shasum)
-
- def check(db: SQL.Database): (Boolean, SHA1.Shasum) =
- read_build(db, name) match {
- case Some(build) =>
- val output_shasum = heap_shasum(if (db.is_postgresql) Some(db) else None, name)
- val current =
- !fresh_build &&
- build.ok &&
- Sessions.eq_sources(session_options, build.sources, sources_shasum) &&
- build.input_heaps == input_shasum &&
- build.output_heap == output_shasum &&
- !(store_heap && output_shasum.is_empty)
- (current, output_shasum)
- case None => no_check
- }
-
- database_server match {
- case Some(db) => if (session_info_exists(db)) check(db) else no_check
- case None => using_option(try_open_database(name))(check) getOrElse no_check
- }
- }
-
-
- /* session info */
-
- def session_info_exists(db: SQL.Database): Boolean =
- Store.private_data.tables.forall(db.exists_table)
-
- def session_info_defined(db: SQL.Database, name: String): Boolean =
- db.execute_query_statementB(
- Store.private_data.Session_Info.table.select(List(Store.private_data.Session_Info.session_name),
- sql = Store.private_data.Session_Info.session_name.where_equal(name)))
-
- def clean_session_info(db: SQL.Database, name: String): Boolean = {
- Export.clean_session(db, name)
- Document_Build.clean_session(db, name)
-
- Store.private_data.transaction_lock(db, create = true, label = "Store.clean_session_info") {
- val already_defined = session_info_defined(db, name)
-
- db.execute_statement(
- SQL.multi(
- Store.private_data.Session_Info.table.delete(
- sql = Store.private_data.Session_Info.session_name.where_equal(name)),
- Store.private_data.Sources.table.delete(
- sql = Store.private_data.Sources.where_equal(name))))
-
- already_defined
- }
- }
-
- def write_session_info(
- db: SQL.Database,
- session_name: String,
- sources: Store.Sources,
- build_log: Build_Log.Session_Info,
- build: Store.Build_Info
- ): Unit = {
- Store.private_data.transaction_lock(db, label = "Store.write_session_info") {
- for (source_files <- sources.iterator.toList.grouped(200)) {
- Store.private_data.write_sources(db, session_name, source_files)
- }
- Store.private_data.write_session_info(db, cache.compress, session_name, build_log, build)
- }
- }
-
- def read_session_timing(db: SQL.Database, session: String): Properties.T =
- Store.private_data.transaction_lock(db, label = "Store.read_session_timing") {
- Store.private_data.read_session_timing(db, session, cache)
- }
-
- def read_command_timings(db: SQL.Database, session: String): Bytes =
- Store.private_data.transaction_lock(db, label = "Store.read_command_timings") {
- Store.private_data.read_command_timings(db, session)
- }
-
- def read_theory_timings(db: SQL.Database, session: String): List[Properties.T] =
- Store.private_data.transaction_lock(db, label = "Store.read_theory_timings") {
- Store.private_data.read_theory_timings(db, session, cache)
- }
-
- def read_ml_statistics(db: SQL.Database, session: String): List[Properties.T] =
- Store.private_data.transaction_lock(db, label = "Store.read_ml_statistics") {
- Store.private_data.read_ml_statistics(db, session, cache)
- }
-
- def read_task_statistics(db: SQL.Database, session: String): List[Properties.T] =
- Store.private_data.transaction_lock(db, label = "Store.read_task_statistics") {
- Store.private_data.read_task_statistics(db, session, cache)
- }
-
- def read_theories(db: SQL.Database, session: String): List[String] =
- read_theory_timings(db, session).flatMap(Markup.Name.unapply)
-
- def read_errors(db: SQL.Database, session: String): List[String] =
- Store.private_data.transaction_lock(db, label = "Store.read_errors") {
- Store.private_data.read_errors(db, session, cache)
- }
-
- def read_build(db: SQL.Database, session: String): Option[Store.Build_Info] =
- Store.private_data.transaction_lock(db, label = "Store.read_build") {
- if (session_info_exists(db)) Store.private_data.read_build(db, session) else None
- }
-
- def read_sources(db: SQL.Database, session: String, name: String = ""): List[Store.Source_File] =
- Store.private_data.transaction_lock(db, label = "Store.read_sources") {
- Store.private_data.read_sources(db, session, name, cache.compress)
- }
-}
--- a/src/Pure/Tools/build.ML Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,122 +0,0 @@
-(* Title: Pure/Tools/build.ML
- Author: Makarius
-
-Build Isabelle sessions.
-*)
-
-signature BUILD =
-sig
- type hook = string -> (theory * Document_Output.segment list) list -> unit
- val add_hook: hook -> unit
-end;
-
-structure Build: BUILD =
-struct
-
-(* session timing *)
-
-fun session_timing f x =
- let
- val start = Timing.start ();
- val y = f x;
- val timing = Timing.result start;
-
- val threads = string_of_int (Multithreading.max_threads ());
- val props = [("threads", threads)] @ Markup.timing_properties timing;
- val _ = Output.protocol_message (Markup.session_timing :: props) [];
- in y end;
-
-
-(* build theories *)
-
-type hook = string -> (theory * Document_Output.segment list) list -> unit;
-
-local
- val hooks = Synchronized.var "Build.hooks" ([]: hook list);
-in
-
-fun add_hook hook = Synchronized.change hooks (cons hook);
-
-fun apply_hooks qualifier loaded_theories =
- Synchronized.value hooks
- |> List.app (fn hook => hook qualifier loaded_theories);
-
-end;
-
-
-fun build_theories qualifier (options, thys) =
- let
- val _ = ML_Profiling.check_mode (Options.string options "profiling");
- val condition = space_explode "," (Options.string options "condition");
- val conds = filter_out (can getenv_strict) condition;
- val loaded_theories =
- if null conds then
- (Options.set_default options;
- Isabelle_Process.init_options ();
- Future.fork I;
- (Thy_Info.use_theories options qualifier
- |> Unsynchronized.setmp print_mode
- (space_explode "," (Options.string options "print_mode") @ print_mode_value ())) thys)
- else
- (Output.physical_stderr ("Skipping theories " ^ commas_quote (map #1 thys) ^
- " (undefined " ^ commas conds ^ ")\n"); [])
- in loaded_theories end;
-
-
-(* build session *)
-
-val _ =
- Protocol_Command.define_bytes "build_session"
- (fn [resources_yxml, args_yxml] =>
- let
- val _ = Resources.init_session_yxml resources_yxml;
- val (session_name, theories) =
- YXML.parse_body_bytes args_yxml |>
- let
- open XML.Decode;
- val position = Position.of_properties o properties;
- in pair string (list (pair Options.decode (list (pair string position)))) end;
-
- val _ = Session.init session_name;
-
- fun build () =
- let
- val res =
- theories |>
- (map (build_theories session_name)
- |> session_timing
- |> Exn.capture);
- val res1 =
- (case res of
- Exn.Res loaded_theories =>
- Exn.capture (apply_hooks session_name) (flat loaded_theories)
- | Exn.Exn exn => Exn.Exn exn);
- val res2 = Exn.capture_body Session.finish;
-
- val _ = Resources.finish_session_base ();
- val _ = Par_Exn.release_all [res1, res2];
- val _ =
- if session_name = Context.PureN
- then Theory.install_pure (Thy_Info.get_theory Context.PureN) else ();
- in () end;
-
- fun exec e =
- if can Theory.get_pure () then
- Isabelle_Thread.fork (Isabelle_Thread.params "build_session") e
- |> ignore
- else e ();
- in
- exec (fn () =>
- (case Exn.capture_body (Future.interruptible_task build) of
- Exn.Res () => (0, [])
- | Exn.Exn exn =>
- (case Exn.capture Runtime.exn_message_list exn of
- Exn.Res errs => (1, errs)
- | Exn.Exn _ (*sic!*) => (2, ["CRASHED"])))
- |> let open XML.Encode in pair int (list string) end
- |> single
- |> Output.protocol_message Markup.build_session_finished)
- end
- | _ => raise Match);
-
-end;
--- a/src/Pure/Tools/build.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,916 +0,0 @@
-/* Title: Pure/Tools/build.scala
- Author: Makarius
- Options: :folding=explicit:
-
-Command-line tools to build and manage Isabelle sessions.
-*/
-
-package isabelle
-
-import scala.collection.mutable
-import scala.util.matching.Regex
-
-
-object Build {
- /** "isabelle build" **/
-
- /* options */
-
- def hostname(options: Options): String =
- Isabelle_System.hostname(options.string("build_hostname"))
-
- def engine_name(options: Options): String = options.string("build_engine")
-
-
-
- /* context */
-
- sealed case class Context(
- store: Store,
- engine: Engine,
- build_deps: isabelle.Sessions.Deps,
- afp_root: Option[Path] = None,
- build_hosts: List[Build_Cluster.Host] = Nil,
- ml_platform: String = Isabelle_System.getenv("ML_PLATFORM"),
- hostname: String = Isabelle_System.hostname(),
- numa_shuffling: Boolean = false,
- build_heap: Boolean = false,
- max_jobs: Int = 1,
- fresh_build: Boolean = false,
- no_build: Boolean = false,
- session_setup: (String, Session) => Unit = (_, _) => (),
- build_uuid: String = UUID.random().toString,
- master: Boolean = false
- ) {
- override def toString: String =
- "Build.Context(build_uuid = " + quote(build_uuid) + if_proper(master, ", master = true") + ")"
-
- def build_options: Options = store.options
-
- def sessions_structure: isabelle.Sessions.Structure = build_deps.sessions_structure
-
- def worker_active: Boolean = max_jobs > 0
- }
-
-
- /* results */
-
- object Results {
- def apply(
- context: Context,
- results: Map[String, Process_Result] = Map.empty,
- other_rc: Int = Process_Result.RC.ok
- ): Results = {
- new Results(context.store, context.build_deps, results, other_rc)
- }
- }
-
- class Results private(
- val store: Store,
- val deps: Sessions.Deps,
- results: Map[String, Process_Result],
- other_rc: Int
- ) {
- def cache: Term.Cache = store.cache
-
- def sessions_ok: List[String] =
- (for {
- name <- deps.sessions_structure.build_topological_order.iterator
- result <- results.get(name) if result.ok
- } yield name).toList
-
- def info(name: String): Sessions.Info = deps.sessions_structure(name)
- def sessions: Set[String] = results.keySet
- def cancelled(name: String): Boolean = !results(name).defined
- def apply(name: String): Process_Result = results(name).strict
-
- val rc: Int =
- Process_Result.RC.merge(other_rc,
- Process_Result.RC.merge(results.valuesIterator.map(_.strict.rc)))
- def ok: Boolean = rc == Process_Result.RC.ok
-
- lazy val unfinished: List[String] = sessions.iterator.filterNot(apply(_).ok).toList.sorted
-
- override def toString: String = rc.toString
- }
-
-
- /* engine */
-
- object Engine {
- lazy val services: List[Engine] =
- Isabelle_System.make_services(classOf[Engine])
-
- def apply(name: String): Engine =
- services.find(_.name == name).getOrElse(error("Bad build engine " + quote(name)))
- }
-
- class Engine(val name: String) extends Isabelle_System.Service {
- override def toString: String = name
-
- def build_options(options: Options, build_hosts: List[Build_Cluster.Host] = Nil): Options = {
- val options1 = options + "completion_limit=0" + "editor_tracing_messages=0"
- if (build_hosts.isEmpty) options1
- else options1 + "build_database_server" + "build_database"
- }
-
- final def build_store(options: Options,
- build_hosts: List[Build_Cluster.Host] = Nil,
- cache: Term.Cache = Term.Cache.make()
- ): Store = {
- val store = Store(build_options(options, build_hosts = build_hosts), cache = cache)
- Isabelle_System.make_directory(store.output_dir + Path.basic("log"))
- Isabelle_Fonts.init()
- store
- }
-
- def open_build_process(
- build_context: Context,
- build_progress: Progress,
- server: SSH.Server
- ): Build_Process = new Build_Process(build_context, build_progress, server)
-
- final def run_build_process(
- context: Context,
- progress: Progress,
- server: SSH.Server
- ): Results = {
- Isabelle_Thread.uninterruptible {
- using(open_build_process(context, progress, server))(_.run())
- }
- }
- }
-
- class Default_Engine extends Engine("") { override def toString: String = "<default>" }
-
-
- /* build */
-
- def build(
- options: Options,
- build_hosts: List[Build_Cluster.Host] = Nil,
- selection: Sessions.Selection = Sessions.Selection.empty,
- browser_info: Browser_Info.Config = Browser_Info.Config.none,
- progress: Progress = new Progress,
- check_unknown_files: Boolean = false,
- build_heap: Boolean = false,
- clean_build: Boolean = false,
- afp_root: Option[Path] = None,
- dirs: List[Path] = Nil,
- select_dirs: List[Path] = Nil,
- infos: List[Sessions.Info] = Nil,
- numa_shuffling: Boolean = false,
- max_jobs: Int = 1,
- list_files: Boolean = false,
- check_keywords: Set[String] = Set.empty,
- fresh_build: Boolean = false,
- no_build: Boolean = false,
- soft_build: Boolean = false,
- export_files: Boolean = false,
- augment_options: String => List[Options.Spec] = _ => Nil,
- session_setup: (String, Session) => Unit = (_, _) => (),
- cache: Term.Cache = Term.Cache.make()
- ): Results = {
- val build_engine = Engine(engine_name(options))
-
- val store = build_engine.build_store(options, build_hosts = build_hosts, cache = cache)
- val build_options = store.options
-
- using(store.open_server()) { server =>
- using_optional(store.maybe_open_database_server(server = server)) { database_server =>
-
-
- /* session selection and dependencies */
-
- val full_sessions =
- Sessions.load_structure(build_options, dirs = AFP.make_dirs(afp_root) ::: dirs,
- select_dirs = select_dirs, infos = infos, augment_options = augment_options)
- val full_sessions_selection = full_sessions.imports_selection(selection)
-
- val build_deps = {
- val deps0 =
- Sessions.deps(full_sessions.selection(selection), progress = progress, inlined_files = true,
- list_files = list_files, check_keywords = check_keywords).check_errors
-
- if (soft_build && !fresh_build) {
- val outdated =
- deps0.sessions_structure.build_topological_order.flatMap(name =>
- store.try_open_database(name, server = server) match {
- case Some(db) =>
- using(db)(store.read_build(_, name)) match {
- case Some(build) if build.ok =>
- val session_options = deps0.sessions_structure(name).options
- val session_sources = deps0.sources_shasum(name)
- if (Sessions.eq_sources(session_options, build.sources, session_sources)) None
- else Some(name)
- case _ => Some(name)
- }
- case None => Some(name)
- })
-
- Sessions.deps(full_sessions.selection(Sessions.Selection(sessions = outdated)),
- progress = progress, inlined_files = true).check_errors
- }
- else deps0
- }
-
-
- /* check unknown files */
-
- if (check_unknown_files) {
- val source_files =
- (for {
- (_, base) <- build_deps.session_bases.iterator
- (path, _) <- base.session_sources.iterator
- } yield path).toList
- Mercurial.check_files(source_files)._2 match {
- case Nil =>
- case unknown_files =>
- progress.echo_warning("Unknown files (not part of the underlying Mercurial repository):" +
- unknown_files.map(File.standard_path).sorted.mkString("\n ", "\n ", ""))
- }
- }
-
-
- /* build process and results */
-
- val build_context =
- Context(store, build_engine, build_deps, afp_root = afp_root, build_hosts = build_hosts,
- hostname = hostname(build_options), build_heap = build_heap,
- numa_shuffling = numa_shuffling, max_jobs = max_jobs, fresh_build = fresh_build,
- no_build = no_build, session_setup = session_setup, master = true)
-
- if (clean_build) {
- for (name <- full_sessions.imports_descendants(full_sessions_selection)) {
- store.clean_output(database_server, name) match {
- case None =>
- case Some(true) => progress.echo("Cleaned " + name)
- case Some(false) => progress.echo(name + " FAILED to clean")
- }
- }
- }
-
- val results = build_engine.run_build_process(build_context, progress, server)
-
- if (export_files) {
- for (name <- full_sessions_selection.iterator if results(name).ok) {
- val info = results.info(name)
- if (info.export_files.nonEmpty) {
- progress.echo("Exporting " + info.name + " ...")
- for ((dir, prune, pats) <- info.export_files) {
- Export.export_files(store, name, info.dir + dir,
- progress = if (progress.verbose) progress else new Progress,
- export_prune = prune,
- export_patterns = pats)
- }
- }
- }
- }
-
- val presentation_sessions =
- results.sessions_ok.filter(name => browser_info.enabled(results.info(name)))
- if (presentation_sessions.nonEmpty && !progress.stopped) {
- Browser_Info.build(browser_info, results.store, results.deps, presentation_sessions,
- progress = progress, server = server)
- }
-
- if (results.unfinished.nonEmpty && (progress.verbose || !no_build)) {
- progress.echo("Unfinished session(s): " + commas(results.unfinished))
- }
-
- results
- }
- }
- }
-
-
- /* build logic image */
-
- def build_logic(options: Options, logic: String,
- progress: Progress = new Progress,
- build_heap: Boolean = false,
- dirs: List[Path] = Nil,
- fresh: Boolean = false,
- strict: Boolean = false
- ): Int = {
- val selection = Sessions.Selection.session(logic)
- val rc =
- if (!fresh && build(options, selection = selection,
- build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok
- else {
- progress.echo("Build started for Isabelle/" + logic + " ...")
- build(options, selection = selection, progress = progress,
- build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc
- }
- if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc
- }
-
-
- /* command-line wrapper */
-
- val isabelle_tool1 = Isabelle_Tool("build", "build and manage Isabelle sessions",
- Scala_Project.here,
- { args =>
- var afp_root: Option[Path] = None
- val base_sessions = new mutable.ListBuffer[String]
- val select_dirs = new mutable.ListBuffer[Path]
- val build_hosts = new mutable.ListBuffer[Build_Cluster.Host]
- var numa_shuffling = false
- var browser_info = Browser_Info.Config.none
- var requirements = false
- var soft_build = false
- val exclude_session_groups = new mutable.ListBuffer[String]
- var all_sessions = false
- var build_heap = false
- var clean_build = false
- val dirs = new mutable.ListBuffer[Path]
- var export_files = false
- var fresh_build = false
- val session_groups = new mutable.ListBuffer[String]
- var max_jobs = 1
- var check_keywords: Set[String] = Set.empty
- var list_files = false
- var no_build = false
- var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS)
- var verbose = false
- val exclude_sessions = new mutable.ListBuffer[String]
-
- val getopts = Getopts("""
-Usage: isabelle build [OPTIONS] [SESSIONS ...]
-
- Options are:
- -A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """)
- -B NAME include session NAME and all descendants
- -D DIR include session directory and select its sessions
- -H HOSTS additional build cluster host specifications, of the form
- "NAMES:PARAMETERS" (separated by commas)
- -N cyclic shuffling of NUMA CPU nodes (performance tuning)
- -P DIR enable HTML/PDF presentation in directory (":" for default)
- -R refer to requirements of selected sessions
- -S soft build: only observe changes of sources, not heap images
- -X NAME exclude sessions from group NAME and all descendants
- -a select all sessions
- -b build heap images
- -c clean build
- -d DIR include session directory
- -e export files from session specification into file-system
- -f fresh build
- -g NAME select session group NAME
- -j INT maximum number of parallel jobs (default 1)
- -k KEYWORD check theory sources for conflicts with proposed keywords
- -l list session source files
- -n no build -- take existing session build databases
- -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
- -v verbose
- -x NAME exclude session NAME and all descendants
-
- Build and manage Isabelle sessions: ML heaps, session databases, documents.
-
- Parameters for host specifications (option -H), apart from system options:
-""" + Library.indent_lines(4, Build_Cluster.Host.parameters.print()) +
-"""
-
- Notable system options: see "isabelle options -l -t build"
-
- Notable system settings:
-""" + Library.indent_lines(4, Build_Log.Settings.show()) + "\n",
- "A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))),
- "B:" -> (arg => base_sessions += arg),
- "D:" -> (arg => select_dirs += Path.explode(arg)),
- "H:" -> (arg => build_hosts ++= Build_Cluster.Host.parse(Registry.global, arg)),
- "N" -> (_ => numa_shuffling = true),
- "P:" -> (arg => browser_info = Browser_Info.Config.make(arg)),
- "R" -> (_ => requirements = true),
- "S" -> (_ => soft_build = true),
- "X:" -> (arg => exclude_session_groups += arg),
- "a" -> (_ => all_sessions = true),
- "b" -> (_ => build_heap = true),
- "c" -> (_ => clean_build = true),
- "d:" -> (arg => dirs += Path.explode(arg)),
- "e" -> (_ => export_files = true),
- "f" -> (_ => fresh_build = true),
- "g:" -> (arg => session_groups += arg),
- "j:" -> (arg => max_jobs = Value.Int.parse(arg)),
- "k:" -> (arg => check_keywords = check_keywords + arg),
- "l" -> (_ => list_files = true),
- "n" -> (_ => no_build = true),
- "o:" -> (arg => options = options + arg),
- "v" -> (_ => verbose = true),
- "x:" -> (arg => exclude_sessions += arg))
-
- val sessions = getopts(args)
-
- val progress = new Console_Progress(verbose = verbose)
-
- progress.echo(
- "Started at " + Build_Log.print_date(progress.start) +
- " (" + Isabelle_System.ml_identifier() + " on " + hostname(options) +")",
- verbose = true)
- progress.echo(Build_Log.Settings.show() + "\n", verbose = true)
-
- val results =
- progress.interrupt_handler {
- build(options,
- selection = Sessions.Selection(
- requirements = requirements,
- all_sessions = all_sessions,
- base_sessions = base_sessions.toList,
- exclude_session_groups = exclude_session_groups.toList,
- exclude_sessions = exclude_sessions.toList,
- session_groups = session_groups.toList,
- sessions = sessions),
- browser_info = browser_info,
- progress = progress,
- check_unknown_files = Mercurial.is_repository(Path.ISABELLE_HOME),
- build_heap = build_heap,
- clean_build = clean_build,
- afp_root = afp_root,
- dirs = dirs.toList,
- select_dirs = select_dirs.toList,
- numa_shuffling = Host.numa_check(progress, numa_shuffling),
- max_jobs = max_jobs,
- list_files = list_files,
- check_keywords = check_keywords,
- fresh_build = fresh_build,
- no_build = no_build,
- soft_build = soft_build,
- export_files = export_files,
- build_hosts = build_hosts.toList)
- }
- val stop_date = Date.now()
- val elapsed_time = stop_date.time - progress.start.time
-
- progress.echo("\nFinished at " + Build_Log.print_date(stop_date), verbose = true)
-
- val total_timing =
- results.sessions.iterator.map(a => results(a).timing).foldLeft(Timing.zero)(_ + _).
- copy(elapsed = elapsed_time)
- progress.echo(total_timing.message_resources)
-
- sys.exit(results.rc)
- })
-
-
-
- /** build cluster management **/
-
- /* identified builds */
-
- def read_builds(build_database: Option[SQL.Database]): List[Build_Process.Build] =
- build_database match {
- case None => Nil
- case Some(db) => Build_Process.read_builds(db)
- }
-
- def print_builds(build_database: Option[SQL.Database], builds: List[Build_Process.Build]): String =
- {
- val print_database =
- build_database match {
- case None => ""
- case Some(db) => " (database " + db + ")"
- }
- if (builds.isEmpty) "No build processes" + print_database
- else "Build processes" + print_database + builds.map(build => "\n " + build.print).mkString
- }
-
- def find_builds(
- build_database: Option[SQL.Database],
- build_id: String,
- builds: List[Build_Process.Build]
- ): Build_Process.Build = {
- (build_id, builds.length) match {
- case (UUID(_), _) if builds.exists(_.build_uuid == build_id) =>
- builds.find(_.build_uuid == build_id).get
- case ("", 1) => builds.head
- case ("", 0) => error(print_builds(build_database, builds))
- case _ =>
- cat_error("Cannot identify build process " + quote(build_id),
- print_builds(build_database, builds))
- }
- }
-
-
- /* "isabelle build_process" */
-
- def build_process(
- options: Options,
- list_builds: Boolean = false,
- remove_builds: Boolean = false,
- force: Boolean = false,
- progress: Progress = new Progress
- ): Unit = {
- val build_engine = Engine(engine_name(options))
- val store = build_engine.build_store(options)
-
- using(store.open_server()) { server =>
- using_optional(store.maybe_open_build_database(server = server)) { build_database =>
- def print(builds: List[Build_Process.Build]): Unit =
- if (list_builds) progress.echo(print_builds(build_database, builds))
-
- build_database match {
- case None => print(Nil)
- case Some(db) =>
- Build_Process.private_data.transaction_lock(db, create = true, label = "build_process") {
- val builds = Build_Process.private_data.read_builds(db)
- val remove =
- if (!remove_builds) Nil
- else if (force) builds.map(_.build_uuid)
- else builds.flatMap(build => if (build.active) None else Some(build.build_uuid))
-
- print(builds)
- if (remove.nonEmpty) {
- if (remove_builds) {
- progress.echo("Removing " + commas(remove) + " ...")
- Build_Process.private_data.remove_builds(db, remove)
- print(Build_Process.private_data.read_builds(db))
- }
- }
- }
- }
- }
- }
- }
-
- val isabelle_tool2 = Isabelle_Tool("build_process", "manage session build process",
- Scala_Project.here,
- { args =>
- var force = false
- var list_builds = false
- var options =
- Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS :::
- List(
- Options.Spec.make("build_database_server"),
- Options.Spec.make("build_database")))
- var remove_builds = false
-
- val getopts = Getopts("""
-Usage: isabelle build_process [OPTIONS]
-
- Options are:
- -f extra force for option -r
- -l list build processes
- -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
- -r remove data from build processes: inactive processes (default)
- or all processes (option -f)
-""",
- "f" -> (_ => force = true),
- "l" -> (_ => list_builds = true),
- "o:" -> (arg => options = options + arg),
- "r" -> (_ => remove_builds = true))
-
- val more_args = getopts(args)
- if (more_args.nonEmpty) getopts.usage()
-
- val progress = new Console_Progress()
-
- build_process(options, list_builds = list_builds, remove_builds = remove_builds,
- force = force, progress = progress)
- })
-
-
-
- /* "isabelle build_worker" */
-
- def build_worker_command(
- host: Build_Cluster.Host,
- ssh: SSH.System = SSH.Local,
- build_options: List[Options.Spec] = Nil,
- build_id: String = "",
- isabelle_home: Path = Path.current,
- afp_root: Option[Path] = None,
- dirs: List[Path] = Nil,
- quiet: Boolean = false,
- verbose: Boolean = false
- ): String = {
- val options = build_options ::: Options.Spec.eq("build_hostname", host.name) :: host.options
- ssh.bash_path(isabelle_home + Path.explode("bin/isabelle")) + " build_worker" +
- if_proper(build_id, " -B " + Bash.string(build_id)) +
- if_proper(afp_root, " -A " + ssh.bash_path(afp_root.get)) +
- dirs.map(dir => " -d " + ssh.bash_path(dir)).mkString +
- if_proper(host.numa, " -N") + " -j" + host.jobs +
- Options.Spec.bash_strings(options, bg = true) +
- if_proper(quiet, " -q") +
- if_proper(verbose, " -v")
- }
-
- def build_worker(
- options: Options,
- build_id: String = "",
- progress: Progress = new Progress,
- afp_root: Option[Path] = None,
- dirs: List[Path] = Nil,
- numa_shuffling: Boolean = false,
- max_jobs: Int = 1
- ): Results = {
- val build_engine = Engine(engine_name(options))
- val store = build_engine.build_store(options)
- val build_options = store.options
-
- using(store.open_server()) { server =>
- using_optional(store.maybe_open_build_database(server = server)) { build_database =>
- val builds = read_builds(build_database)
-
- val build_master = find_builds(build_database, build_id, builds.filter(_.active))
-
- val sessions_structure =
- Sessions.load_structure(build_options, dirs = AFP.make_dirs(afp_root) ::: dirs).
- selection(Sessions.Selection(sessions = build_master.sessions))
-
- val build_deps =
- Sessions.deps(sessions_structure, progress = progress, inlined_files = true).check_errors
-
- val build_context =
- Context(store, build_engine, build_deps, afp_root = afp_root,
- hostname = hostname(build_options), numa_shuffling = numa_shuffling,
- max_jobs = max_jobs, build_uuid = build_master.build_uuid)
-
- build_engine.run_build_process(build_context, progress, server)
- }
- }
- }
-
- val isabelle_tool3 = Isabelle_Tool("build_worker", "start worker for session build process",
- Scala_Project.here,
- { args =>
- var afp_root: Option[Path] = None
- var build_id = ""
- var numa_shuffling = false
- val dirs = new mutable.ListBuffer[Path]
- var max_jobs = 1
- var options =
- Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS :::
- List(
- Options.Spec.make("build_database_server"),
- Options.Spec.make("build_database")))
- var quiet = false
- var verbose = false
-
- val getopts = Getopts("""
-Usage: isabelle build_worker [OPTIONS]
-
- Options are:
- -A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """)
- -B UUID existing UUID for build process (default: from database)
- -N cyclic shuffling of NUMA CPU nodes (performance tuning)
- -d DIR include session directory
- -j INT maximum number of parallel jobs (default 1)
- -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
- -q quiet mode: no progress
- -v verbose
-""",
- "A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))),
- "B:" -> (arg => build_id = arg),
- "N" -> (_ => numa_shuffling = true),
- "d:" -> (arg => dirs += Path.explode(arg)),
- "j:" -> (arg => max_jobs = Value.Int.parse(arg)),
- "o:" -> (arg => options = options + arg),
- "q" -> (_ => quiet = true),
- "v" -> (_ => verbose = true))
-
- val more_args = getopts(args)
- if (more_args.nonEmpty) getopts.usage()
-
- val progress =
- if (quiet && verbose) new Progress { override def verbose: Boolean = true }
- else if (quiet) new Progress
- else new Console_Progress(verbose = verbose)
-
- val results =
- progress.interrupt_handler {
- build_worker(options,
- build_id = build_id,
- progress = progress,
- afp_root = afp_root,
- dirs = dirs.toList,
- numa_shuffling = Host.numa_check(progress, numa_shuffling),
- max_jobs = max_jobs)
- }
-
- if (!results.ok) sys.exit(results.rc)
- })
-
-
-
- /** "isabelle build_log" **/
-
- /* theory markup/messages from session database */
-
- def read_theory(
- theory_context: Export.Theory_Context,
- unicode_symbols: Boolean = false
- ): Option[Document.Snapshot] = {
- def decode_bytes(bytes: Bytes): String =
- Symbol.output(unicode_symbols, UTF8.decode_permissive(bytes))
-
- def read(name: String): Export.Entry = theory_context(name, permissive = true)
-
- def read_xml(name: String): XML.Body =
- YXML.parse_body(decode_bytes(read(name).bytes), cache = theory_context.cache)
-
- def read_source_file(name: String): Store.Source_File =
- theory_context.session_context.source_file(name)
-
- for {
- id <- theory_context.document_id()
- (thy_file, blobs_files) <- theory_context.files(permissive = true)
- }
- yield {
- val master_dir =
- Path.explode(Url.strip_base_name(thy_file).getOrElse(
- error("Cannot determine theory master directory: " + quote(thy_file))))
-
- val blobs =
- blobs_files.map { name =>
- val path = Path.explode(name)
- val src_path = File.relative_path(master_dir, path).getOrElse(path)
-
- val file = read_source_file(name)
- val bytes = file.bytes
- val text = decode_bytes(bytes)
- val chunk = Symbol.Text_Chunk(text)
-
- Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) ->
- Document.Blobs.Item(bytes, text, chunk, changed = false)
- }
-
- val thy_source = decode_bytes(read_source_file(thy_file).bytes)
- val thy_xml = read_xml(Export.MARKUP)
- val blobs_xml =
- for (i <- (1 to blobs.length).toList)
- yield read_xml(Export.MARKUP + i)
-
- val markups_index = Command.Markup_Index.make(blobs.map(_._1))
- val markups =
- Command.Markups.make(
- for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml))
- yield index -> Markup_Tree.from_XML(xml))
-
- val results =
- Command.Results.make(
- for (case elem@XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES))
- yield i -> elem)
-
- val command =
- Command.unparsed(thy_source, theory = true, id = id,
- node_name = Document.Node.Name(thy_file, theory = theory_context.theory),
- blobs_info = Command.Blobs_Info.make(blobs),
- markups = markups, results = results)
-
- val doc_blobs = Document.Blobs.make(blobs)
-
- Document.State.init.snippet(command, doc_blobs)
- }
- }
-
-
- /* print messages */
-
- def print_log(
- options: Options,
- sessions: List[String],
- theories: List[String] = Nil,
- message_head: List[Regex] = Nil,
- message_body: List[Regex] = Nil,
- progress: Progress = new Progress,
- margin: Double = Pretty.default_margin,
- breakgain: Double = Pretty.default_breakgain,
- metric: Pretty.Metric = Symbol.Metric,
- unicode_symbols: Boolean = false
- ): Unit = {
- val store = Store(options)
- val session = new Session(options, Resources.bootstrap)
-
- def check(filter: List[Regex], make_string: => String): Boolean =
- filter.isEmpty || {
- val s = Output.clean_yxml(make_string)
- filter.forall(r => r.findFirstIn(Output.clean_yxml(s)).nonEmpty)
- }
-
- def print(session_name: String): Unit = {
- using(Export.open_session_context0(store, session_name)) { session_context =>
- val result =
- for {
- db <- session_context.session_db()
- theories = store.read_theories(db, session_name)
- errors = store.read_errors(db, session_name)
- info <- store.read_build(db, session_name)
- } yield (theories, errors, info.return_code)
- result match {
- case None => store.error_database(session_name)
- case Some((used_theories, errors, rc)) =>
- theories.filterNot(used_theories.toSet) match {
- case Nil =>
- case bad => error("Unknown theories " + commas_quote(bad))
- }
- val print_theories =
- if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet)
-
- for (thy <- print_theories) {
- val thy_heading = "\nTheory " + quote(thy) + " (in " + session_name + ")" + ":"
-
- Build.read_theory(session_context.theory(thy), unicode_symbols = unicode_symbols) match {
- case None => progress.echo(thy_heading + " MISSING")
- case Some(snapshot) =>
- val rendering = new Rendering(snapshot, options, session)
- val messages =
- rendering.text_messages(Text.Range.full)
- .filter(message => progress.verbose || Protocol.is_exported(message.info))
- if (messages.nonEmpty) {
- val line_document = Line.Document(snapshot.node.source)
- val buffer = new mutable.ListBuffer[String]
- for (Text.Info(range, elem) <- messages) {
- val line = line_document.position(range.start).line1
- val pos = Position.Line_File(line, snapshot.node_name.node)
- def message_text: String =
- Protocol.message_text(elem, heading = true, pos = pos,
- margin = margin, breakgain = breakgain, metric = metric)
- val ok =
- check(message_head, Protocol.message_heading(elem, pos)) &&
- check(message_body, XML.content(Pretty.unformatted(List(elem))))
- if (ok) buffer += message_text
- }
- if (buffer.nonEmpty) {
- progress.echo(thy_heading)
- buffer.foreach(progress.echo(_))
- }
- }
- }
- }
-
- if (errors.nonEmpty) {
- val msg = Symbol.output(unicode_symbols, cat_lines(errors))
- progress.echo("\nBuild errors:\n" + Output.error_message_text(msg))
- }
- if (rc != Process_Result.RC.ok) {
- progress.echo("\n" + Process_Result.RC.print_long(rc))
- }
- }
- }
- }
-
- val errors = new mutable.ListBuffer[String]
- for (session_name <- sessions) {
- Exn.result(print(session_name)) match {
- case Exn.Res(_) =>
- case Exn.Exn(exn) => errors += Exn.message(exn)
- }
- }
- if (errors.nonEmpty) error(cat_lines(errors.toList))
- }
-
-
- /* command-line wrapper */
-
- val isabelle_tool4 = Isabelle_Tool("build_log", "print messages from session build database",
- Scala_Project.here,
- { args =>
- /* arguments */
-
- val message_head = new mutable.ListBuffer[Regex]
- val message_body = new mutable.ListBuffer[Regex]
- var unicode_symbols = false
- val theories = new mutable.ListBuffer[String]
- var margin = Pretty.default_margin
- var options = Options.init()
- var verbose = false
-
- val getopts = Getopts("""
-Usage: isabelle build_log [OPTIONS] [SESSIONS ...]
-
- Options are:
- -H REGEX filter messages by matching against head
- -M REGEX filter messages by matching against body
- -T NAME restrict to given theories (multiple options possible)
- -U output Unicode symbols
- -m MARGIN margin for pretty printing (default: """ + margin + """)
- -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
- -v print all messages, including information etc.
-
- Print messages from the session build database of the given sessions,
- without any checks against current sources nor session structure: results
- from old sessions or failed builds can be printed as well.
-
- Multiple options -H and -M are conjunctive: all given patterns need to
- match. Patterns match any substring, but ^ or $ may be used to match the
- start or end explicitly.
-""",
- "H:" -> (arg => message_head += arg.r),
- "M:" -> (arg => message_body += arg.r),
- "T:" -> (arg => theories += arg),
- "U" -> (_ => unicode_symbols = true),
- "m:" -> (arg => margin = Value.Double.parse(arg)),
- "o:" -> (arg => options = options + arg),
- "v" -> (_ => verbose = true))
-
- val sessions = getopts(args)
-
- val progress = new Console_Progress(verbose = verbose)
-
- if (sessions.isEmpty) progress.echo_warning("No sessions to print")
- else {
- print_log(options, sessions, theories = theories.toList, message_head = message_head.toList,
- message_body = message_body.toList, margin = margin, progress = progress,
- unicode_symbols = unicode_symbols)
- }
- })
-}
--- a/src/Pure/Tools/build_cluster.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,376 +0,0 @@
-/* Title: Pure/Tools/build_cluster.scala
- Author: Makarius
-
-Management of build cluster: independent ssh hosts with access to shared
-PostgreSQL server.
-
-NB: extensible classes allow quite different implementations in user-space,
-via the service class Build.Engine and overridable methods
-Build.Engine.open_build_process(), Build_Process.open_build_cluster().
-*/
-
-package isabelle
-
-
-object Build_Cluster {
- /* host specifications */
-
- object Host {
- private val rfc822_specials = """()<>@,;:\"[]"""
-
- private val HOSTNAME = "hostname"
- private val USER = "user"
- private val PORT = "port"
- private val JOBS = "jobs"
- private val NUMA = "numa"
- private val DIRS = "dirs"
- private val SHARED = "shared"
-
- val parameters: Options =
- Options.inline("""
- option hostname : string = "" -- "explicit SSH hostname"
- option user : string = "" -- "explicit SSH user"
- option port : int = 0 -- "explicit SSH port"
- option jobs : int = 1 -- "maximum number of parallel jobs"
- option numa : bool = false -- "cyclic shuffling of NUMA CPU nodes"
- option dirs : string = "" -- "additional session directories (separated by colon)"
- option shared : bool = false -- "shared directory: omit sync + init"
- """)
-
- def is_parameter(spec: Options.Spec): Boolean = parameters.defined(spec.name)
-
- lazy val test_options: Options = Options.init0()
-
- def apply(
- name: String = "",
- hostname: String = parameters.string(HOSTNAME),
- user: String = parameters.string(USER),
- port: Int = parameters.int(PORT),
- jobs: Int = parameters.int(JOBS),
- numa: Boolean = parameters.bool(NUMA),
- dirs: String = parameters.string(DIRS),
- shared: Boolean = parameters.bool(SHARED),
- options: List[Options.Spec] = Nil
- ): Host = {
- Path.split(dirs) // check
- new Host(name, hostname, user, port, jobs, numa, dirs, shared, options)
- }
-
- def parse(registry: Registry, str: String): List[Host] = {
- def err(msg: String): Nothing =
- cat_error(msg, "The error(s) above occurred in host specification " + quote(str))
-
- val names = str.takeWhile(c => !rfc822_specials.contains(c) || c == ',')
- val more_specs =
- try {
- val n = str.length
- val m = names.length
- val l =
- if (m == n) n
- else if (str(m) == ':') m + 1
- else error("Missing \":\" after host name")
- Options.Spec.parse(str.substring(l))
- }
- catch { case ERROR(msg) => err(msg) }
-
- def get_registry(a: String): Registry.Cluster.Value =
- Registry.Cluster.try_unqualify(a) match {
- case Some(b) => Registry.Cluster.get(registry, b)
- case None => List(a -> Registry.Host.get(registry, a))
- }
-
- for (name <- space_explode(',', names); (host, host_specs) <- get_registry(name))
- yield {
- val (params, options) =
- try {
- val (specs1, specs2) = (host_specs ::: more_specs).partition(is_parameter)
- (parameters ++ specs1, { test_options ++ specs2; specs2 })
- }
- catch { case ERROR(msg) => err(msg) }
-
- apply(name = host,
- hostname = params.string(HOSTNAME),
- user = params.string(USER),
- port = params.int(PORT),
- jobs = params.int(JOBS),
- numa = params.bool(NUMA),
- dirs = params.string(DIRS),
- shared = params.bool(SHARED),
- options = options)
- }
- }
-
- def parse_single(registry: Registry, str: String): Host =
- Library.the_single(parse(registry, str), "Single host expected: " + quote(str))
- }
-
- class Host(
- val name: String,
- val hostname: String,
- val user: String,
- val port: Int,
- val jobs: Int,
- val numa: Boolean,
- val dirs: String,
- val shared: Boolean,
- val options: List[Options.Spec]
- ) {
- host =>
-
- def ssh_host: String = proper_string(hostname) getOrElse name
- def is_local: Boolean = SSH.is_local(ssh_host)
-
- override def toString: String = print
-
- def print: String = {
- val params =
- List(
- if (host.hostname.isEmpty) "" else Options.Spec.print(Host.HOSTNAME, host.hostname),
- if (host.user.isEmpty) "" else Options.Spec.print(Host.USER, host.user),
- if (host.port == 0) "" else Options.Spec.print(Host.PORT, host.port.toString),
- if (host.jobs == 1) "" else Options.Spec.print(Host.JOBS, host.jobs.toString),
- if_proper(host.numa, Host.NUMA),
- if_proper(dirs, Options.Spec.print(Host.DIRS, host.dirs)),
- if_proper(host.shared, Host.SHARED)
- ).filter(_.nonEmpty)
- val rest = (params ::: host.options.map(_.print)).mkString(",")
-
- SSH.print_local(host.name) + if_proper(rest, ":" + rest)
- }
-
- def message(msg: String): String = "Host " + quote(host.name) + if_proper(msg, ": " + msg)
-
- def open_ssh(options: Options): SSH.System =
- SSH.open_system(options ++ host.options, ssh_host, port = host.port, user = host.user)
-
- def open_session(build_context: Build.Context, progress: Progress = new Progress): Session = {
- val ssh_options = build_context.build_options ++ host.options
- val ssh = open_ssh(build_context.build_options)
- new Session(build_context, host, ssh_options, ssh, progress)
- }
- }
-
-
- /* SSH sessions */
-
- class Session(
- val build_context: Build.Context,
- val host: Host,
- val options: Options,
- val ssh: SSH.System,
- val progress: Progress
- ) extends AutoCloseable {
- override def toString: String = ssh.toString
-
- val remote_identifier: String = options.string("build_cluster_identifier")
- val remote_root: Path = Path.explode(options.string("build_cluster_root"))
- val remote_isabelle_home: Path = remote_root + Path.explode("isabelle")
- val remote_afp_root: Option[Path] =
- if (build_context.afp_root.isEmpty) None
- else Some(remote_isabelle_home + Path.explode("AFP"))
-
- lazy val remote_isabelle: Other_Isabelle =
- Other_Isabelle(remote_isabelle_home, isabelle_identifier = remote_identifier, ssh = ssh)
-
- def sync(): Other_Isabelle = {
- Sync.sync(options, Rsync.Context(ssh = ssh), remote_isabelle_home,
- afp_root = build_context.afp_root,
- purge_heaps = true,
- preserve_jars = true)
- remote_isabelle
- }
-
- def init(): Unit = remote_isabelle.init()
-
- def benchmark(): Unit = {
- val script =
- Benchmark.benchmark_command(host, ssh = ssh, isabelle_home = remote_isabelle_home)
- remote_isabelle.bash(script).check
- }
-
- def start(): Process_Result = {
- val remote_ml_platform = remote_isabelle.getenv("ML_PLATFORM")
- if (remote_ml_platform != build_context.ml_platform) {
- error("Bad ML_PLATFORM: found " + remote_ml_platform +
- ", but expected " + build_context.ml_platform)
- }
- val script =
- Build.build_worker_command(host,
- ssh = ssh,
- build_options = List(options.spec("build_database_server")),
- build_id = build_context.build_uuid,
- isabelle_home = remote_isabelle_home,
- afp_root = remote_afp_root,
- dirs = Path.split(host.dirs).map(remote_isabelle.expand_path),
- quiet = true)
- remote_isabelle.bash(script).check
- }
-
- override def close(): Unit = ssh.close()
- }
-
- class Result(val host: Host, val process_result: Exn.Result[Process_Result]) {
- def rc: Int = Process_Result.RC(process_result)
- def ok: Boolean = rc == Process_Result.RC.ok
- }
-
-
- /* build clusters */
-
- object none extends Build_Cluster {
- override def toString: String = "Build_Cluster.none"
- }
-
- def make(build_context: Build.Context, progress: Progress = new Progress): Build_Cluster = {
- val remote_hosts = build_context.build_hosts.filterNot(_.is_local)
- if (remote_hosts.isEmpty) none
- else new Remote_Build_Cluster(build_context, remote_hosts, progress = progress)
- }
-}
-
-// NB: extensible via Build.Engine.build_process() and Build_Process.init_cluster()
-trait Build_Cluster extends AutoCloseable {
- def rc: Int = Process_Result.RC.ok
- def ok: Boolean = rc == Process_Result.RC.ok
- def return_code(rc: Int): Unit = ()
- def return_code(res: Process_Result): Unit = return_code(res.rc)
- def return_code(exn: Throwable): Unit = return_code(Process_Result.RC(exn))
- def open(): Unit = ()
- def init(): Unit = ()
- def benchmark(): Unit = ()
- def start(): Unit = ()
- def active(): Boolean = false
- def join: List[Build_Cluster.Result] = Nil
- def stop(): Unit = { join; close() }
- override def close(): Unit = ()
-}
-
-class Remote_Build_Cluster(
- val build_context: Build.Context,
- val remote_hosts: List[Build_Cluster.Host],
- val progress: Progress = new Progress
-) extends Build_Cluster {
- require(remote_hosts.nonEmpty && !remote_hosts.exists(_.is_local), "remote hosts required")
-
- override def toString: String =
- remote_hosts.iterator.map(_.name).mkString("Build_Cluster(", ", ", ")")
-
-
- /* cumulative return code */
-
- private val _rc = Synchronized(Process_Result.RC.ok)
-
- override def rc: Int = _rc.value
-
- override def return_code(rc: Int): Unit =
- _rc.change(rc0 => Process_Result.RC.merge(rc0, rc))
-
- def capture[A](host: Build_Cluster.Host, op: String)(
- e: => A,
- msg: String = host.message(op + " ..."),
- err: Throwable => String = { exn =>
- return_code(exn)
- host.message("failed to " + op + "\n" + Exn.print(exn))
- }
- ): Exn.Result[A] = {
- progress.capture(e, msg = msg, err = err)
- }
-
-
- /* open remote sessions */
-
- private var _sessions = List.empty[Build_Cluster.Session]
-
- override def open(): Unit = synchronized {
- require(_sessions.isEmpty, "build cluster already open")
-
- val attempts =
- Par_List.map((host: Build_Cluster.Host) =>
- capture(host, "open") { host.open_session(build_context, progress = progress) },
- remote_hosts, thread = true)
-
- if (attempts.forall(Exn.is_res)) {
- _sessions = attempts.map(Exn.the_res)
- _sessions
- }
- else {
- for (case Exn.Res(session) <- attempts) session.close()
- error("Failed to connect build cluster")
- }
- }
-
-
- /* init and benchmark remote Isabelle distributions */
-
- private var _init = List.empty[Future[Unit]]
-
- override def init(): Unit = synchronized {
- require(_sessions.nonEmpty, "build cluster not yet open")
-
- if (_init.isEmpty) {
- _init =
- for (session <- _sessions if !session.host.shared) yield {
- Future.thread(session.host.message("session")) {
- capture(session.host, "sync") { session.sync() }
- capture(session.host, "init") { session.init() }
- }
- }
- }
- }
-
- override def benchmark(): Unit = synchronized {
- _init.foreach(_.join)
- _init =
- for (session <- _sessions if !session.host.shared) yield {
- Future.thread(session.host.message("session")) {
- capture(session.host, "benchmark") { session.benchmark() }
- }
- }
- _init.foreach(_.join)
- }
-
-
- /* workers */
-
- private var _workers = List.empty[Future[Process_Result]]
-
- override def active(): Boolean = synchronized { _workers.nonEmpty }
-
- override def start(): Unit = synchronized {
- require(_sessions.nonEmpty, "build cluster not yet open")
- require(_workers.isEmpty, "build cluster already active")
-
- init()
- _init.foreach(_.join)
-
- _workers =
- for (session <- _sessions) yield {
- Future.thread(session.host.message("work")) {
- Exn.release(capture(session.host, "work") { session.start() })
- }
- }
- }
-
- override def join: List[Build_Cluster.Result] = synchronized {
- _init.foreach(_.join)
- if (_workers.isEmpty) Nil
- else {
- assert(_sessions.length == _workers.length)
- for ((session, worker) <- _sessions zip _workers)
- yield Build_Cluster.Result(session.host, worker.join_result)
- }
- }
-
-
- /* close */
-
- override def close(): Unit = synchronized {
- _init.foreach(_.join)
- _workers.foreach(_.join_result)
- _sessions.foreach(_.close())
-
- _init = Nil
- _workers = Nil
- _sessions = Nil
- }
-}
--- a/src/Pure/Tools/build_job.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,552 +0,0 @@
-/* Title: Pure/Tools/build_job.scala
- Author: Makarius
-
-Build job running prover process, with rudimentary PIDE session.
-*/
-
-package isabelle
-
-
-import scala.collection.mutable
-
-
-trait Build_Job {
- def cancel(): Unit = ()
- def is_finished: Boolean = false
- def join: Option[Build_Job.Result] = None
-}
-
-object Build_Job {
- sealed case class Result(process_result: Process_Result, output_shasum: SHA1.Shasum)
-
-
- /* build session */
-
- def start_session(
- build_context: Build.Context,
- session_context: Session_Context,
- progress: Progress,
- log: Logger,
- server: SSH.Server,
- session_background: Sessions.Background,
- sources_shasum: SHA1.Shasum,
- input_shasum: SHA1.Shasum,
- node_info: Host.Node_Info,
- store_heap: Boolean
- ): Session_Job = {
- new Session_Job(build_context, session_context, progress, log, server,
- session_background, sources_shasum, input_shasum, node_info, store_heap)
- }
-
- object Session_Context {
- def load(
- database_server: Option[SQL.Database],
- build_uuid: String,
- name: String,
- deps: List[String],
- ancestors: List[String],
- session_prefs: String,
- sources_shasum: SHA1.Shasum,
- timeout: Time,
- store: Store,
- progress: Progress = new Progress
- ): Session_Context = {
- def default: Session_Context =
- Session_Context(
- name, deps, ancestors, session_prefs, sources_shasum, timeout,
- Time.zero, Bytes.empty, build_uuid)
-
- def read(db: SQL.Database): Session_Context = {
- def ignore_error(msg: String) = {
- progress.echo_warning(
- "Ignoring bad database " + db + " for session " + quote(name) +
- if_proper(msg, ":\n" + msg))
- default
- }
- try {
- val command_timings = store.read_command_timings(db, name)
- val elapsed =
- store.read_session_timing(db, name) match {
- case Markup.Elapsed(s) => Time.seconds(s)
- case _ => Time.zero
- }
- new Session_Context(
- name, deps, ancestors, session_prefs, sources_shasum, timeout,
- elapsed, command_timings, build_uuid)
- }
- catch {
- case ERROR(msg) => ignore_error(msg)
- case exn: java.lang.Error => ignore_error(Exn.message(exn))
- case _: XML.Error => ignore_error("XML.Error")
- }
- }
-
- database_server match {
- case Some(db) => if (store.session_info_exists(db)) read(db) else default
- case None => using_option(store.try_open_database(name))(read) getOrElse default
- }
- }
- }
-
- sealed case class Session_Context(
- name: String,
- deps: List[String],
- ancestors: List[String],
- session_prefs: String,
- sources_shasum: SHA1.Shasum,
- timeout: Time,
- old_time: Time,
- old_command_timings_blob: Bytes,
- build_uuid: String
- ) extends Library.Named
-
- class Session_Job private[Build_Job](
- build_context: Build.Context,
- session_context: Session_Context,
- progress: Progress,
- log: Logger,
- server: SSH.Server,
- session_background: Sessions.Background,
- sources_shasum: SHA1.Shasum,
- input_shasum: SHA1.Shasum,
- node_info: Host.Node_Info,
- store_heap: Boolean
- ) extends Build_Job {
- def session_name: String = session_background.session_name
-
- private val future_result: Future[Option[Result]] =
- Future.thread("build", uninterruptible = true) {
- val info = session_background.sessions_structure(session_name)
- val options = Host.node_options(info.options, node_info)
-
- val store = build_context.store
-
- using_optional(store.maybe_open_database_server(server = server)) { database_server =>
-
- store.clean_output(database_server, session_name, session_init = true)
-
- val session_sources =
- Store.Sources.load(session_background.base, cache = store.cache.compress)
-
- val env =
- Isabelle_System.settings(
- List("ISABELLE_ML_DEBUGGER" -> options.bool("ML_debugger").toString))
-
- val session_heaps =
- session_background.info.parent match {
- case None => Nil
- case Some(logic) => ML_Process.session_heaps(store, session_background, logic = logic)
- }
-
- val use_prelude = if (session_heaps.isEmpty) Thy_Header.ml_roots.map(_._1) else Nil
-
- val eval_store =
- if (store_heap) {
- (if (info.theories.nonEmpty) List("ML_Heap.share_common_data ()") else Nil) :::
- List("ML_Heap.save_child " +
- ML_Syntax.print_string_bytes(File.platform_path(store.output_heap(session_name))))
- }
- else Nil
-
- def session_blobs(node_name: Document.Node.Name): List[(Command.Blob, Document.Blobs.Item)] =
- session_background.base.theory_load_commands.get(node_name.theory) match {
- case None => Nil
- case Some(spans) =>
- val syntax = session_background.base.theory_syntax(node_name)
- val master_dir = Path.explode(node_name.master_dir)
- for (span <- spans; file <- span.loaded_files(syntax).files)
- yield {
- val src_path = Path.explode(file)
- val blob_name = Document.Node.Name(File.symbolic_path(master_dir + src_path))
-
- val bytes = session_sources(blob_name.node).bytes
- val text = bytes.text
- val chunk = Symbol.Text_Chunk(text)
-
- Command.Blob(blob_name, src_path, Some((SHA1.digest(bytes), chunk))) ->
- Document.Blobs.Item(bytes, text, chunk, changed = false)
- }
- }
-
-
- /* session */
-
- val resources =
- new Resources(session_background, log = log,
- command_timings =
- Properties.uncompress(session_context.old_command_timings_blob, cache = store.cache))
-
- val session =
- new Session(options, resources) {
- override val cache: Term.Cache = store.cache
-
- override def build_blobs_info(node_name: Document.Node.Name): Command.Blobs_Info =
- Command.Blobs_Info.make(session_blobs(node_name))
-
- override def build_blobs(node_name: Document.Node.Name): Document.Blobs =
- Document.Blobs.make(session_blobs(node_name))
- }
-
- object Build_Session_Errors {
- private val promise: Promise[List[String]] = Future.promise
-
- def result: Exn.Result[List[String]] = promise.join_result
- def cancel(): Unit = promise.cancel()
- def apply(errs: List[String]): Unit = {
- try { promise.fulfill(errs) }
- catch { case _: IllegalStateException => }
- }
- }
-
- val export_consumer =
- Export.consumer(store.open_database(session_name, output = true, server = server),
- store.cache, progress = progress)
-
- val stdout = new StringBuilder(1000)
- val stderr = new StringBuilder(1000)
- val command_timings = new mutable.ListBuffer[Properties.T]
- val theory_timings = new mutable.ListBuffer[Properties.T]
- val session_timings = new mutable.ListBuffer[Properties.T]
- val runtime_statistics = new mutable.ListBuffer[Properties.T]
- val task_statistics = new mutable.ListBuffer[Properties.T]
-
- def fun(
- name: String,
- acc: mutable.ListBuffer[Properties.T],
- unapply: Properties.T => Option[Properties.T]
- ): (String, Session.Protocol_Function) = {
- name -> ((msg: Prover.Protocol_Output) =>
- unapply(msg.properties) match {
- case Some(props) => acc += props; true
- case _ => false
- })
- }
-
- session.init_protocol_handler(new Session.Protocol_Handler {
- override def exit(): Unit = Build_Session_Errors.cancel()
-
- private def build_session_finished(msg: Prover.Protocol_Output): Boolean = {
- val (rc, errors) =
- try {
- val (rc, errs) = {
- import XML.Decode._
- pair(int, list(x => x))(Symbol.decode_yxml(msg.text))
- }
- val errors =
- for (err <- errs) yield {
- val prt = Protocol_Message.expose_no_reports(err)
- Pretty.string_of(prt, metric = Symbol.Metric)
- }
- (rc, errors)
- }
- catch { case ERROR(err) => (Process_Result.RC.failure, List(err)) }
-
- session.protocol_command("Prover.stop", rc.toString)
- Build_Session_Errors(errors)
- true
- }
-
- private def loading_theory(msg: Prover.Protocol_Output): Boolean =
- msg.properties match {
- case Markup.Loading_Theory(Markup.Name(name)) =>
- progress.theory(Progress.Theory(name, session = session_name))
- false
- case _ => false
- }
-
- private def export_(msg: Prover.Protocol_Output): Boolean =
- msg.properties match {
- case Protocol.Export(args) =>
- export_consumer.make_entry(session_name, args, msg.chunk)
- true
- case _ => false
- }
-
- override val functions: Session.Protocol_Functions =
- List(
- Markup.Build_Session_Finished.name -> build_session_finished,
- Markup.Loading_Theory.name -> loading_theory,
- Markup.EXPORT -> export_,
- fun(Markup.Theory_Timing.name, theory_timings, Markup.Theory_Timing.unapply),
- fun(Markup.Session_Timing.name, session_timings, Markup.Session_Timing.unapply),
- fun(Markup.Task_Statistics.name, task_statistics, Markup.Task_Statistics.unapply))
- })
-
- session.command_timings += Session.Consumer("command_timings") {
- case Session.Command_Timing(props) =>
- for {
- elapsed <- Markup.Elapsed.unapply(props)
- elapsed_time = Time.seconds(elapsed)
- if elapsed_time.is_relevant && elapsed_time >= options.seconds("command_timing_threshold")
- } command_timings += props.filter(Markup.command_timing_property)
- }
-
- session.runtime_statistics += Session.Consumer("ML_statistics") {
- case Session.Runtime_Statistics(props) => runtime_statistics += props
- }
-
- session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories") {
- case snapshot =>
- if (!progress.stopped) {
- def export_(name: String, xml: XML.Body, compress: Boolean = true): Unit = {
- if (!progress.stopped) {
- val theory_name = snapshot.node_name.theory
- val args =
- Protocol.Export.Args(theory_name = theory_name, name = name, compress = compress)
- val body = Bytes(Symbol.encode(YXML.string_of_body(xml)))
- export_consumer.make_entry(session_name, args, body)
- }
- }
- def export_text(name: String, text: String, compress: Boolean = true): Unit =
- export_(name, List(XML.Text(text)), compress = compress)
-
- for (command <- snapshot.snippet_command) {
- export_text(Export.DOCUMENT_ID, command.id.toString, compress = false)
- }
-
- export_text(Export.FILES,
- cat_lines(snapshot.node_files.map(name => File.symbolic_path(name.path))),
- compress = false)
-
- for ((blob_name, i) <- snapshot.node_files.tail.zipWithIndex) {
- val xml = snapshot.switch(blob_name).xml_markup()
- export_(Export.MARKUP + (i + 1), xml)
- }
- export_(Export.MARKUP, snapshot.xml_markup())
- export_(Export.MESSAGES, snapshot.messages.map(_._1))
- }
- }
-
- session.all_messages += Session.Consumer[Any]("build_session_output") {
- case msg: Prover.Output =>
- val message = msg.message
- if (msg.is_system) resources.log(Protocol.message_text(message))
-
- if (msg.is_stdout) {
- stdout ++= Symbol.encode(XML.content(message))
- }
- else if (msg.is_stderr) {
- stderr ++= Symbol.encode(XML.content(message))
- }
- else if (msg.is_exit) {
- val err =
- "Prover terminated" +
- (msg.properties match {
- case Markup.Process_Result(result) => ": " + result.print_rc
- case _ => ""
- })
- Build_Session_Errors(List(err))
- }
- case _ =>
- }
-
- build_context.session_setup(session_name, session)
-
- val eval_main = Command_Line.ML_tool("Isabelle_Process.init_build ()" :: eval_store)
-
-
- /* process */
-
- val process =
- Isabelle_Process.start(options, session, session_background, session_heaps,
- use_prelude = use_prelude, eval_main = eval_main, cwd = info.dir.file, env = env)
-
- val timeout_request: Option[Event_Timer.Request] =
- if (info.timeout_ignored) None
- else Some(Event_Timer.request(Time.now() + info.timeout) { process.terminate() })
-
- val build_errors =
- Isabelle_Thread.interrupt_handler(_ => process.terminate()) {
- Exn.capture { process.await_startup() } match {
- case Exn.Res(_) =>
- val resources_yxml = resources.init_session_yxml
- val encode_options: XML.Encode.T[Options] =
- options => session.prover_options(options).encode
- val args_yxml =
- YXML.string_of_body(
- {
- import XML.Encode._
- pair(string, list(pair(encode_options, list(pair(string, properties)))))(
- (session_name, info.theories))
- })
- session.protocol_command("build_session", resources_yxml, args_yxml)
- Build_Session_Errors.result
- case Exn.Exn(exn) => Exn.Res(List(Exn.message(exn)))
- }
- }
-
- val result0 =
- Isabelle_Thread.interrupt_handler(_ => process.terminate()) { process.await_shutdown() }
-
- val was_timeout =
- timeout_request match {
- case None => false
- case Some(request) => !request.cancel()
- }
-
- session.stop()
-
- val export_errors =
- export_consumer.shutdown(close = true).map(Output.error_message_text)
-
- val (document_output, document_errors) =
- try {
- if (Exn.is_res(build_errors) && result0.ok && info.documents.nonEmpty) {
- using(Export.open_database_context(store, server = server)) { database_context =>
- val documents =
- using(database_context.open_session(session_background)) {
- session_context =>
- Document_Build.build_documents(
- Document_Build.context(session_context, progress = progress),
- output_sources = info.document_output,
- output_pdf = info.document_output)
- }
- using(database_context.open_database(session_name, output = true))(session_database =>
- documents.foreach(_.write(session_database.db, session_name)))
- (documents.flatMap(_.log_lines), Nil)
- }
- }
- else (Nil, Nil)
- }
- catch {
- case exn: Document_Build.Build_Error => (exn.log_lines, exn.log_errors)
- case Exn.Interrupt.ERROR(msg) => (Nil, List(msg))
- }
-
-
- /* process result */
-
- val result1 = {
- val theory_timing =
- theory_timings.iterator.flatMap(
- {
- case props @ Markup.Name(name) => Some(name -> props)
- case _ => None
- }).toMap
- val used_theory_timings =
- for { (name, _) <- session_background.base.used_theories }
- yield theory_timing.getOrElse(name.theory, Markup.Name(name.theory))
-
- val more_output =
- Library.trim_line(stdout.toString) ::
- command_timings.toList.map(Protocol.Command_Timing_Marker.apply) :::
- used_theory_timings.map(Protocol.Theory_Timing_Marker.apply) :::
- session_timings.toList.map(Protocol.Session_Timing_Marker.apply) :::
- runtime_statistics.toList.map(Protocol.ML_Statistics_Marker.apply) :::
- task_statistics.toList.map(Protocol.Task_Statistics_Marker.apply) :::
- document_output
-
- result0.output(more_output)
- .error(Library.trim_line(stderr.toString))
- .errors_rc(export_errors ::: document_errors)
- }
-
- val result2 =
- build_errors match {
- case Exn.Res(build_errs) =>
- val errs = build_errs ::: document_errors
- if (errs.nonEmpty) {
- result1.error_rc.output(
- errs.flatMap(s => split_lines(Output.error_message_text(s))) :::
- errs.map(Protocol.Error_Message_Marker.apply))
- }
- else if (progress.stopped && result1.ok) result1.copy(rc = Process_Result.RC.interrupt)
- else result1
- case Exn.Exn(Exn.Interrupt()) =>
- if (result1.ok) result1.copy(rc = Process_Result.RC.interrupt)
- else result1
- case Exn.Exn(exn) => throw exn
- }
-
- val process_result =
- if (result2.ok) result2
- else if (was_timeout) result2.error(Output.error_message_text("Timeout")).timeout_rc
- else if (result2.interrupted) result2.error(Output.error_message_text("Interrupt"))
- else result2
-
-
- /* output heap */
-
- val output_shasum = {
- val heap = store.output_heap(session_name)
- if (process_result.ok && store_heap && heap.is_file) {
- val slice = Space.MiB(options.real("build_database_slice")).bytes
- val digest = ML_Heap.store(database_server, session_name, heap, slice)
- SHA1.shasum(digest, session_name)
- }
- else SHA1.no_shasum
- }
-
- val log_lines = process_result.out_lines.filterNot(Protocol_Message.Marker.test)
-
- val build_log =
- Build_Log.Log_File(session_name, process_result.out_lines, cache = store.cache).
- parse_session_info(
- command_timings = true,
- theory_timings = true,
- ml_statistics = true,
- task_statistics = true)
-
- // write log file
- if (process_result.ok) {
- File.write_gzip(store.output_log_gz(session_name), terminate_lines(log_lines))
- }
- else File.write(store.output_log(session_name), terminate_lines(log_lines))
-
- // write database
- def write_info(db: SQL.Database): Unit =
- store.write_session_info(db, session_name, session_sources,
- build_log =
- if (process_result.timeout) build_log.error("Timeout") else build_log,
- build =
- Store.Build_Info(
- sources = sources_shasum,
- input_heaps = input_shasum,
- output_heap = output_shasum,
- process_result.rc,
- build_context.build_uuid))
-
- val valid =
- if (progress.stopped_local) false
- else {
- database_server match {
- case Some(db) => write_info(db)
- case None => using(store.open_database(session_name, output = true))(write_info)
- }
- true
- }
-
- // messages
- process_result.err_lines.foreach(progress.echo(_))
-
- if (process_result.ok) {
- val props = build_log.session_timing
- val threads = Markup.Session_Timing.Threads.unapply(props) getOrElse 1
- val timing = Markup.Timing_Properties.get(props)
- progress.echo(
- "Timing " + session_name + " (" + threads + " threads, " + timing.message_factor + ")",
- verbose = true)
- progress.echo(
- "Finished " + session_name + " (" + process_result.timing.message_resources + ")")
- }
- else {
- progress.echo(
- session_name + " FAILED (see also \"isabelle build_log -H Error " + session_name + "\")")
- if (!process_result.interrupted) {
- val tail = info.options.int("process_output_tail")
- val suffix = if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)
- val prefix = if (log_lines.length == suffix.length) Nil else List("...")
- progress.echo(Library.trim_line(cat_lines(prefix ::: suffix)))
- }
- }
-
- if (valid) Some(Result(process_result.copy(out_lines = log_lines), output_shasum))
- else None
- }
- }
-
- override def cancel(): Unit = future_result.cancel()
- override def is_finished: Boolean = future_result.is_finished
- override def join: Option[Result] = future_result.join
- }
-}
--- a/src/Pure/Tools/build_process.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1223 +0,0 @@
-/* Title: Pure/Tools/build_process.scala
- Author: Makarius
-
-Build process for sessions, with build database, optional heap, and
-optional presentation.
-*/
-
-package isabelle
-
-
-import scala.collection.immutable.SortedMap
-import scala.math.Ordering
-import scala.annotation.tailrec
-
-
-object Build_Process {
- /** state vs. database **/
-
- sealed case class Build(
- build_uuid: String, // Database_Progress.context_uuid
- ml_platform: String,
- options: String,
- start: Date,
- stop: Option[Date],
- sessions: List[String]
- ) {
- def active: Boolean = stop.isEmpty
-
- def print: String =
- build_uuid + " (platform: " + ml_platform + ", start: " + Build_Log.print_date(start) +
- if_proper(stop, ", stop: " + Build_Log.print_date(stop.get)) + ")"
- }
-
- sealed case class Worker(
- worker_uuid: String, // Database_Progress.agent_uuid
- build_uuid: String,
- start: Date,
- stamp: Date,
- stop: Option[Date],
- serial: Long
- )
-
- sealed case class Task(
- name: String,
- deps: List[String],
- info: JSON.Object.T,
- build_uuid: String
- ) {
- def is_ready: Boolean = deps.isEmpty
- def resolve(dep: String): Task =
- if (deps.contains(dep)) copy(deps = deps.filterNot(_ == dep)) else this
- }
-
- sealed case class Job(
- name: String,
- worker_uuid: String,
- build_uuid: String,
- node_info: Host.Node_Info,
- start_date: Date,
- build: Option[Build_Job]
- ) extends Library.Named {
- def join_build: Option[Build_Job.Result] = build.flatMap(_.join)
- }
-
- sealed case class Result(
- name: String,
- worker_uuid: String,
- build_uuid: String,
- node_info: Host.Node_Info,
- process_result: Process_Result,
- output_shasum: SHA1.Shasum,
- current: Boolean
- ) extends Library.Named {
- def ok: Boolean = process_result.ok
- }
-
- object Sessions {
- type Graph = isabelle.Graph[String, Build_Job.Session_Context]
- val empty: Sessions = new Sessions(Graph.string)
- }
-
- final class Sessions private(val graph: Sessions.Graph) {
- override def toString: String = graph.toString
-
- def defined(name: String): Boolean = graph.defined(name)
- def apply(name: String): Build_Job.Session_Context = graph.get_node(name)
-
- def iterator: Iterator[Build_Job.Session_Context] =
- for (name <- graph.topological_order.iterator) yield apply(name)
-
- def make(new_graph: Sessions.Graph): Sessions =
- if (graph == new_graph) this
- else {
- new Sessions(
- new_graph.iterator.foldLeft(new_graph) {
- case (g, (name, (session, _))) => g.add_deps_acyclic(name, session.deps)
- })
- }
-
- def pull(
- data_domain: Set[String],
- data: Set[String] => List[Build_Job.Session_Context]
- ): Sessions = {
- val dom = data_domain -- iterator.map(_.name)
- make(data(dom).foldLeft(graph.restrict(dom)) { case (g, e) => g.new_node(e.name, e) })
- }
-
- def init(
- build_context: isabelle.Build.Context,
- database_server: Option[SQL.Database],
- progress: Progress = new Progress
- ): Sessions = {
- val sessions_structure = build_context.sessions_structure
- make(
- sessions_structure.build_graph.iterator.foldLeft(graph) {
- case (graph0, (name, (info, _))) =>
- val deps = info.parent.toList
- val prefs = info.session_prefs
- val ancestors = sessions_structure.build_requirements(deps)
- val sources_shasum = build_context.build_deps.sources_shasum(name)
-
- if (graph0.defined(name)) {
- val session0 = graph0.get_node(name)
- val prefs0 = session0.session_prefs
- val ancestors0 = session0.ancestors
- val sources_shasum0 = session0.sources_shasum
-
- def err(msg: String, a: String, b: String): Nothing =
- error("Conflicting dependencies for session " + quote(name) + ": " + msg + "\n" +
- Library.indent_lines(2, a) + "\nvs.\n" + Library.indent_lines(2, b))
-
- if (prefs0 != prefs) {
- err("preferences disagree",
- Symbol.cartouche_decoded(prefs0), Symbol.cartouche_decoded(prefs))
- }
- if (ancestors0 != ancestors) {
- err("ancestors disagree", commas_quote(ancestors0), commas_quote(ancestors))
- }
- if (sources_shasum0 != sources_shasum) {
- val a = sources_shasum0 - sources_shasum
- val b = sources_shasum - sources_shasum0
- err("sources disagree",
- Library.trim_line(a.toString),
- Library.trim_line(b.toString))
- }
-
- graph0
- }
- else {
- val session =
- Build_Job.Session_Context.load(database_server,
- build_context.build_uuid, name, deps, ancestors, prefs, sources_shasum,
- info.timeout, build_context.store, progress = progress)
- graph0.new_node(name, session)
- }
- }
- )
- }
-
- lazy val max_time: Map[String, Double] = {
- val maximals = graph.maximals.toSet
- def descendants_time(name: String): Double = {
- if (maximals.contains(name)) apply(name).old_time.seconds
- else {
- val descendants = graph.all_succs(List(name)).toSet
- val g = graph.restrict(descendants)
- (0.0 :: g.maximals.flatMap { desc =>
- val ps = g.all_preds(List(desc))
- if (ps.exists(p => !graph.defined(p))) None
- else Some(ps.map(p => apply(p).old_time.seconds).sum)
- }).max
- }
- }
- Map.from(
- for (name <- graph.keys_iterator)
- yield name -> descendants_time(name)).withDefaultValue(0.0)
- }
-
- lazy val ordering: Ordering[String] =
- (a: String, b: String) =>
- max_time(b) compare max_time(a) match {
- case 0 =>
- apply(b).timeout compare apply(a).timeout match {
- case 0 => a compare b
- case ord => ord
- }
- case ord => ord
- }
- }
-
- sealed case class Snapshot(
- builds: List[Build], // available build configurations
- workers: List[Worker], // available worker processes
- sessions: Sessions, // static build targets
- pending: State.Pending, // dynamic build "queue"
- running: State.Running, // presently running jobs
- results: State.Results) // finished results
-
- object State {
- type Pending = List[Task]
- type Running = Map[String, Job]
- type Results = Map[String, Result]
- }
-
- sealed case class State(
- serial: Long = 0,
- numa_nodes: List[Int] = Nil,
- sessions: Sessions = Sessions.empty,
- pending: State.Pending = Nil,
- running: State.Running = Map.empty,
- results: State.Results = Map.empty
- ) {
- require(serial >= 0, "serial underflow")
- def inc_serial: State = {
- require(serial < Long.MaxValue, "serial overflow")
- copy(serial = serial + 1)
- }
-
- def ready: State.Pending = pending.filter(_.is_ready)
- def next_ready: State.Pending = ready.filter(entry => !is_running(entry.name))
-
- def remove_pending(name: String): State =
- copy(pending = pending.flatMap(
- entry => if (entry.name == name) None else Some(entry.resolve(name))))
-
- def is_running(name: String): Boolean = running.isDefinedAt(name)
-
- def stop_running(): Unit =
- for (job <- running.valuesIterator; build <- job.build) build.cancel()
-
- def build_running: List[Build_Job] =
- List.from(for (job <- running.valuesIterator; build <- job.build) yield build)
-
- def finished_running(): List[Job] =
- List.from(
- for (job <- running.valuesIterator; build <- job.build if build.is_finished)
- yield job)
-
- def add_running(job: Job): State =
- copy(running = running + (job.name -> job))
-
- def remove_running(name: String): State =
- copy(running = running - name)
-
- def make_result(
- result_name: (String, String, String),
- process_result: Process_Result,
- output_shasum: SHA1.Shasum,
- node_info: Host.Node_Info = Host.Node_Info.none,
- current: Boolean = false
- ): State = {
- val (name, worker_uuid, build_uuid) = result_name
- val result =
- Result(name, worker_uuid, build_uuid, node_info, process_result, output_shasum, current)
- copy(results = results + (name -> result))
- }
-
- def ancestor_results(name: String): Option[List[Result]] = {
- val defined =
- sessions.defined(name) &&
- sessions(name).ancestors.forall(a => sessions.defined(a) && results.isDefinedAt(a))
- if (defined) Some(sessions(name).ancestors.map(results)) else None
- }
- }
-
-
-
- /** SQL data model **/
-
- object private_data extends SQL.Data("isabelle_build") {
- val database: Path = Path.explode("$ISABELLE_HOME_USER/build.db")
-
- def pull[A <: Library.Named](
- data_domain: Set[String],
- data_iterator: Set[String] => Iterator[A],
- old_data: Map[String, A]
- ): Map[String, A] = {
- val dom = data_domain -- old_data.keysIterator
- val data = old_data -- old_data.keysIterator.filterNot(data_domain)
- if (dom.isEmpty) data
- else data_iterator(dom).foldLeft(data) { case (map, a) => map + (a.name -> a) }
- }
-
- def pull0[A <: Library.Named](
- new_data: Map[String, A],
- old_data: Map[String, A]
- ): Map[String, A] = {
- pull(new_data.keySet, dom => new_data.valuesIterator.filter(a => dom(a.name)), old_data)
- }
-
- def pull1[A <: Library.Named](
- data_domain: Set[String],
- data_base: Set[String] => Map[String, A],
- old_data: Map[String, A]
- ): Map[String, A] = {
- pull(data_domain, dom => data_base(dom).valuesIterator, old_data)
- }
-
- object Generic {
- val build_uuid = SQL.Column.string("build_uuid")
- val worker_uuid = SQL.Column.string("worker_uuid")
- val name = SQL.Column.string("name")
-
- def sql(
- build_uuid: String = "",
- worker_uuid: String = "",
- names: Iterable[String] = Nil
- ): SQL.Source =
- SQL.and(
- if_proper(build_uuid, Generic.build_uuid.equal(build_uuid)),
- if_proper(worker_uuid, Generic.worker_uuid.equal(worker_uuid)),
- if_proper(names, Generic.name.member(names)))
-
- def sql_where(
- build_uuid: String = "",
- worker_uuid: String = "",
- names: Iterable[String] = Nil
- ): SQL.Source = {
- SQL.where(sql(build_uuid = build_uuid, worker_uuid = worker_uuid, names = names))
- }
- }
-
-
- /* base table */
-
- object Base {
- val build_uuid = Generic.build_uuid.make_primary_key
- val ml_platform = SQL.Column.string("ml_platform")
- val options = SQL.Column.string("options")
- val start = SQL.Column.date("start")
- val stop = SQL.Column.date("stop")
-
- val table = make_table(List(build_uuid, ml_platform, options, start, stop))
- }
-
- def read_builds(db: SQL.Database): List[Build] = {
- val builds =
- db.execute_query_statement(Base.table.select(), List.from[Build],
- { res =>
- val build_uuid = res.string(Base.build_uuid)
- val ml_platform = res.string(Base.ml_platform)
- val options = res.string(Base.options)
- val start = res.date(Base.start)
- val stop = res.get_date(Base.stop)
- Build(build_uuid, ml_platform, options, start, stop, Nil)
- })
-
- for (build <- builds.sortBy(_.start)(Date.Ordering)) yield {
- val sessions = private_data.read_sessions_domain(db, build_uuid = build.build_uuid)
- build.copy(sessions = sessions.toList.sorted)
- }
- }
-
- def remove_builds(db: SQL.Database, remove: List[String]): Unit =
- if (remove.nonEmpty) {
- val sql = Generic.build_uuid.where_member(remove)
- db.execute_statement(SQL.MULTI(build_uuid_tables.map(_.delete(sql = sql))))
- }
-
- def start_build(
- db: SQL.Database,
- build_uuid: String,
- ml_platform: String,
- options: String
- ): Unit = {
- db.execute_statement(Base.table.insert(), body =
- { stmt =>
- stmt.string(1) = build_uuid
- stmt.string(2) = ml_platform
- stmt.string(3) = options
- stmt.date(4) = db.now()
- stmt.date(5) = None
- })
- }
-
- def stop_build(db: SQL.Database, build_uuid: String): Unit =
- db.execute_statement(
- Base.table.update(List(Base.stop), sql = Base.build_uuid.where_equal(build_uuid)),
- body = { stmt => stmt.date(1) = db.now() })
-
- def clean_build(db: SQL.Database): Unit = {
- val remove =
- db.execute_query_statement(
- Base.table.select(List(Base.build_uuid), sql = SQL.where(Base.stop.defined)),
- List.from[String], res => res.string(Base.build_uuid))
-
- remove_builds(db, remove)
- }
-
-
- /* sessions */
-
- object Sessions {
- val name = Generic.name.make_primary_key
- val deps = SQL.Column.string("deps")
- val ancestors = SQL.Column.string("ancestors")
- val options = SQL.Column.string("options")
- val sources = SQL.Column.string("sources")
- val timeout = SQL.Column.long("timeout")
- val old_time = SQL.Column.long("old_time")
- val old_command_timings = SQL.Column.bytes("old_command_timings")
- val build_uuid = Generic.build_uuid
-
- val table =
- make_table(
- List(name, deps, ancestors, options, sources, timeout,
- old_time, old_command_timings, build_uuid),
- name = "sessions")
- }
-
- def read_sessions_domain(db: SQL.Database, build_uuid: String = ""): Set[String] =
- db.execute_query_statement(
- Sessions.table.select(List(Sessions.name),
- sql = if_proper(build_uuid, Sessions.build_uuid.where_equal(build_uuid))),
- Set.from[String], res => res.string(Sessions.name))
-
- def read_sessions(db: SQL.Database,
- names: Iterable[String] = Nil,
- build_uuid: String = ""
- ): List[Build_Job.Session_Context] = {
- db.execute_query_statement(
- Sessions.table.select(
- sql =
- SQL.where_and(
- if_proper(names, Sessions.name.member(names)),
- if_proper(build_uuid, Sessions.build_uuid.equal(build_uuid)))
- ),
- List.from[Build_Job.Session_Context],
- { res =>
- val name = res.string(Sessions.name)
- val deps = split_lines(res.string(Sessions.deps))
- val ancestors = split_lines(res.string(Sessions.ancestors))
- val options = res.string(Sessions.options)
- val sources_shasum = SHA1.fake_shasum(res.string(Sessions.sources))
- val timeout = Time.ms(res.long(Sessions.timeout))
- val old_time = Time.ms(res.long(Sessions.old_time))
- val old_command_timings_blob = res.bytes(Sessions.old_command_timings)
- val build_uuid = res.string(Sessions.build_uuid)
- Build_Job.Session_Context(name, deps, ancestors, options, sources_shasum,
- timeout, old_time, old_command_timings_blob, build_uuid)
- }
- )
- }
-
- def update_sessions(
- db: SQL.Database,
- sessions: Build_Process.Sessions,
- old_sessions: Build_Process.Sessions
- ): Boolean = {
- val insert = sessions.iterator.filterNot(s => old_sessions.defined(s.name)).toList
-
- if (insert.nonEmpty) {
- db.execute_batch_statement(Sessions.table.insert(), batch =
- for (session <- insert) yield { (stmt: SQL.Statement) =>
- stmt.string(1) = session.name
- stmt.string(2) = cat_lines(session.deps)
- stmt.string(3) = cat_lines(session.ancestors)
- stmt.string(4) = session.session_prefs
- stmt.string(5) = session.sources_shasum.toString
- stmt.long(6) = session.timeout.ms
- stmt.long(7) = session.old_time.ms
- stmt.bytes(8) = session.old_command_timings_blob
- stmt.string(9) = session.build_uuid
- })
- }
-
- insert.nonEmpty
- }
-
-
- /* workers */
-
- object Workers {
- val worker_uuid = Generic.worker_uuid.make_primary_key
- val build_uuid = Generic.build_uuid
- val start = SQL.Column.date("start")
- val stamp = SQL.Column.date("stamp")
- val stop = SQL.Column.date("stop")
- val serial = SQL.Column.long("serial")
-
- val table =
- make_table(List(worker_uuid, build_uuid, start, stamp, stop, serial), name = "workers")
- }
-
- def read_serial(db: SQL.Database): Long =
- db.execute_query_statementO[Long](
- Workers.table.select(List(Workers.serial.max)), _.long(Workers.serial)).getOrElse(0L)
-
- def read_workers(
- db: SQL.Database,
- build_uuid: String = "",
- worker_uuid: String = ""
- ): List[Worker] = {
- db.execute_query_statement(
- Workers.table.select(
- sql = Generic.sql_where(build_uuid = build_uuid, worker_uuid = worker_uuid)),
- List.from[Worker],
- { res =>
- Worker(
- worker_uuid = res.string(Workers.worker_uuid),
- build_uuid = res.string(Workers.build_uuid),
- start = res.date(Workers.start),
- stamp = res.date(Workers.stamp),
- stop = res.get_date(Workers.stop),
- serial = res.long(Workers.serial))
- })
- }
-
- def start_worker(
- db: SQL.Database,
- worker_uuid: String,
- build_uuid: String,
- serial: Long
- ): Unit = {
- def err(msg: String): Nothing =
- error("Cannot start worker " + worker_uuid + if_proper(msg, "\n" + msg))
-
- val build_stop =
- db.execute_query_statementO(
- Base.table.select(List(Base.stop), sql = Base.build_uuid.where_equal(build_uuid)),
- res => res.get_date(Base.stop))
-
- build_stop match {
- case Some(None) =>
- case Some(Some(_)) => err("for already stopped build process " + build_uuid)
- case None => err("for unknown build process " + build_uuid)
- }
-
- db.execute_statement(Workers.table.insert(), body =
- { stmt =>
- val now = db.now()
- stmt.string(1) = worker_uuid
- stmt.string(2) = build_uuid
- stmt.date(3) = now
- stmt.date(4) = now
- stmt.date(5) = None
- stmt.long(6) = serial
- })
- }
-
- def stamp_worker(
- db: SQL.Database,
- worker_uuid: String,
- serial: Long,
- stop_now: Boolean = false
- ): Unit = {
- val sql = Workers.worker_uuid.where_equal(worker_uuid)
-
- val stop =
- db.execute_query_statementO(
- Workers.table.select(List(Workers.stop), sql = sql), _.get_date(Workers.stop)).flatten
-
- db.execute_statement(
- Workers.table.update(List(Workers.stamp, Workers.stop, Workers.serial), sql = sql),
- body = { stmt =>
- val now = db.now()
- stmt.date(1) = now
- stmt.date(2) = if (stop_now) Some(now) else stop
- stmt.long(3) = serial
- })
- }
-
-
- /* pending jobs */
-
- object Pending {
- val name = Generic.name.make_primary_key
- val deps = SQL.Column.string("deps")
- val info = SQL.Column.string("info")
- val build_uuid = Generic.build_uuid
-
- val table = make_table(List(name, deps, info, build_uuid), name = "pending")
- }
-
- def read_pending(db: SQL.Database): List[Task] =
- db.execute_query_statement(
- Pending.table.select(sql = SQL.order_by(List(Pending.name))),
- List.from[Task],
- { res =>
- val name = res.string(Pending.name)
- val deps = res.string(Pending.deps)
- val info = res.string(Pending.info)
- val build_uuid = res.string(Pending.build_uuid)
- Task(name, split_lines(deps), JSON.Object.parse(info), build_uuid)
- })
-
- def update_pending(
- db: SQL.Database,
- pending: State.Pending,
- old_pending: State.Pending
- ): Boolean = {
- val (delete, insert) = Library.symmetric_difference(old_pending, pending)
-
- if (delete.nonEmpty) {
- db.execute_statement(
- Pending.table.delete(sql = Generic.sql_where(names = delete.map(_.name))))
- }
-
- if (insert.nonEmpty) {
- db.execute_batch_statement(Pending.table.insert(), batch =
- for (task <- insert) yield { (stmt: SQL.Statement) =>
- stmt.string(1) = task.name
- stmt.string(2) = cat_lines(task.deps)
- stmt.string(3) = JSON.Format(task.info)
- stmt.string(4) = task.build_uuid
- })
- }
-
- delete.nonEmpty || insert.nonEmpty
- }
-
-
- /* running jobs */
-
- object Running {
- val name = Generic.name.make_primary_key
- val worker_uuid = Generic.worker_uuid
- val build_uuid = Generic.build_uuid
- val hostname = SQL.Column.string("hostname")
- val numa_node = SQL.Column.int("numa_node")
- val rel_cpus = SQL.Column.string("rel_cpus")
- val start_date = SQL.Column.date("start_date")
-
- val table =
- make_table(
- List(name, worker_uuid, build_uuid, hostname, numa_node, rel_cpus, start_date),
- name = "running")
- }
-
- def read_running(db: SQL.Database): State.Running =
- db.execute_query_statement(
- Running.table.select(sql = SQL.order_by(List(Running.name))),
- Map.from[String, Job],
- { res =>
- val name = res.string(Running.name)
- val worker_uuid = res.string(Running.worker_uuid)
- val build_uuid = res.string(Running.build_uuid)
- val hostname = res.string(Running.hostname)
- val numa_node = res.get_int(Running.numa_node)
- val rel_cpus = res.string(Running.rel_cpus)
- val start_date = res.date(Running.start_date)
-
- val node_info = Host.Node_Info(hostname, numa_node, Host.Range.from(rel_cpus))
- name -> Job(name, worker_uuid, build_uuid, node_info, start_date, None)
- }
- )
-
- def update_running(
- db: SQL.Database,
- running: State.Running,
- old_running: State.Running
- ): Boolean = {
- val running0 = old_running.valuesIterator.toList
- val running1 = running.valuesIterator.toList
- val (delete, insert) = Library.symmetric_difference(running0, running1)
-
- if (delete.nonEmpty) {
- db.execute_statement(
- Running.table.delete(sql = Generic.sql_where(names = delete.map(_.name))))
- }
-
- if (insert.nonEmpty) {
- db.execute_batch_statement(Running.table.insert(), batch =
- for (job <- insert) yield { (stmt: SQL.Statement) =>
- stmt.string(1) = job.name
- stmt.string(2) = job.worker_uuid
- stmt.string(3) = job.build_uuid
- stmt.string(4) = job.node_info.hostname
- stmt.int(5) = job.node_info.numa_node
- stmt.string(6) = Host.Range(job.node_info.rel_cpus)
- stmt.date(7) = job.start_date
- })
- }
-
- delete.nonEmpty || insert.nonEmpty
- }
-
-
- /* job results */
-
- object Results {
- val name = Generic.name.make_primary_key
- val worker_uuid = Generic.worker_uuid
- val build_uuid = Generic.build_uuid
- val hostname = SQL.Column.string("hostname")
- val numa_node = SQL.Column.int("numa_node")
- val rel_cpus = SQL.Column.string("rel_cpus")
- val rc = SQL.Column.int("rc")
- val out = SQL.Column.string("out")
- val err = SQL.Column.string("err")
- val timing_elapsed = SQL.Column.long("timing_elapsed")
- val timing_cpu = SQL.Column.long("timing_cpu")
- val timing_gc = SQL.Column.long("timing_gc")
- val output_shasum = SQL.Column.string("output_shasum")
- val current = SQL.Column.bool("current")
-
- val table =
- make_table(
- List(name, worker_uuid, build_uuid, hostname, numa_node, rel_cpus,
- rc, out, err, timing_elapsed, timing_cpu, timing_gc, output_shasum, current),
- name = "results")
- }
-
- def read_results_domain(db: SQL.Database): Set[String] =
- db.execute_query_statement(
- Results.table.select(List(Results.name)),
- Set.from[String], res => res.string(Results.name))
-
- def read_results(db: SQL.Database, names: Iterable[String] = Nil): State.Results =
- db.execute_query_statement(
- Results.table.select(sql = if_proper(names, Results.name.where_member(names))),
- Map.from[String, Result],
- { res =>
- val name = res.string(Results.name)
- val worker_uuid = res.string(Results.worker_uuid)
- val build_uuid = res.string(Results.build_uuid)
- val hostname = res.string(Results.hostname)
- val numa_node = res.get_int(Results.numa_node)
- val rel_cpus = res.string(Results.rel_cpus)
- val node_info = Host.Node_Info(hostname, numa_node, Host.Range.from(rel_cpus))
-
- val rc = res.int(Results.rc)
- val out = res.string(Results.out)
- val err = res.string(Results.err)
- val timing =
- res.timing(
- Results.timing_elapsed,
- Results.timing_cpu,
- Results.timing_gc)
- val process_result =
- Process_Result(rc,
- out_lines = split_lines(out),
- err_lines = split_lines(err),
- timing = timing)
-
- val output_shasum = SHA1.fake_shasum(res.string(Results.output_shasum))
- val current = res.bool(Results.current)
-
- name ->
- Result(name, worker_uuid, build_uuid, node_info, process_result, output_shasum, current)
- }
- )
-
- def update_results(
- db: SQL.Database,
- results: State.Results,
- old_results: State.Results
- ): Boolean = {
- val insert =
- results.valuesIterator.filterNot(res => old_results.isDefinedAt(res.name)).toList
-
- if (insert.nonEmpty) {
- db.execute_batch_statement(Results.table.insert(), batch =
- for (result <- insert) yield { (stmt: SQL.Statement) =>
- val process_result = result.process_result
- stmt.string(1) = result.name
- stmt.string(2) = result.worker_uuid
- stmt.string(3) = result.build_uuid
- stmt.string(4) = result.node_info.hostname
- stmt.int(5) = result.node_info.numa_node
- stmt.string(6) = Host.Range(result.node_info.rel_cpus)
- stmt.int(7) = process_result.rc
- stmt.string(8) = cat_lines(process_result.out_lines)
- stmt.string(9) = cat_lines(process_result.err_lines)
- stmt.long(10) = process_result.timing.elapsed.ms
- stmt.long(11) = process_result.timing.cpu.ms
- stmt.long(12) = process_result.timing.gc.ms
- stmt.string(13) = result.output_shasum.toString
- stmt.bool(14) = result.current
- })
- }
-
- insert.nonEmpty
- }
-
-
- /* collective operations */
-
- override val tables =
- SQL.Tables(
- Base.table,
- Workers.table,
- Sessions.table,
- Pending.table,
- Running.table,
- Results.table)
-
- val build_uuid_tables =
- tables.filter(table =>
- table.columns.exists(column => column.name == Generic.build_uuid.name))
-
- def pull_database(db: SQL.Database, worker_uuid: String, state: State): State = {
- val serial_db = read_serial(db)
- if (serial_db == state.serial) state
- else {
- val serial = serial_db max state.serial
- stamp_worker(db, worker_uuid, serial)
-
- val sessions = state.sessions.pull(read_sessions_domain(db), read_sessions(db, _))
- val pending = read_pending(db)
- val running = pull0(read_running(db), state.running)
- val results = pull1(read_results_domain(db), read_results(db, _), state.results)
-
- state.copy(serial = serial, sessions = sessions, pending = pending,
- running = running, results = results)
- }
- }
-
- def update_database(
- db: SQL.Database,
- worker_uuid: String,
- state: State,
- old_state: State
- ): State = {
- val changed =
- List(
- update_sessions(db, state.sessions, old_state.sessions),
- update_pending(db, state.pending, old_state.pending),
- update_running(db, state.running, old_state.running),
- update_results(db, state.results, old_state.results))
-
- val state1 = if (changed.exists(identity)) state.inc_serial else state
- if (state1.serial != state.serial) stamp_worker(db, worker_uuid, state1.serial)
-
- state1
- }
- }
-
- def read_builds(db: SQL.Database): List[Build] =
- private_data.transaction_lock(db, create = true, label = "Build_Process.read_builds") {
- private_data.read_builds(db)
- }
-}
-
-
-
-/** main process **/
-
-class Build_Process(
- protected final val build_context: Build.Context,
- protected final val build_progress: Progress,
- protected final val server: SSH.Server
-)
-extends AutoCloseable {
- /* context */
-
- protected final val store: Store = build_context.store
- protected final val build_options: Options = store.options
- protected final val build_deps: isabelle.Sessions.Deps = build_context.build_deps
- protected final val hostname: String = build_context.hostname
- protected final val build_uuid: String = build_context.build_uuid
-
-
- /* global resources with common close() operation */
-
- protected val _database_server: Option[SQL.Database] =
- try { store.maybe_open_database_server(server = server) }
- catch { case exn: Throwable => close(); throw exn }
-
- private val _build_database: Option[SQL.Database] =
- try {
- for (db <- store.maybe_open_build_database(server = server)) yield {
- if (!db.is_postgresql) {
- error("Distributed build requires PostgreSQL (option build_database_server)")
- }
- val store_tables = db.is_postgresql
- Build_Process.private_data.transaction_lock(db,
- create = true,
- label = "Build_Process.build_database"
- ) {
- Build_Process.private_data.clean_build(db)
- if (store_tables) Store.private_data.tables.lock(db, create = true)
- }
- if (build_context.master) {
- db.vacuum(Build_Process.private_data.tables.list)
- if (store_tables) db.vacuum(Store.private_data.tables.list)
- }
- db
- }
- }
- catch { case exn: Throwable => close(); throw exn }
-
- protected val build_delay: Time = {
- val option =
- _build_database match {
- case Some(db) if db.is_postgresql => "build_cluster_delay"
- case _ => "build_delay"
- }
- build_options.seconds(option)
- }
-
- protected val _host_database: SQL.Database =
- try { store.open_build_database(path = Host.private_data.database, server = server) }
- catch { case exn: Throwable => close(); throw exn }
-
- protected val (progress, worker_uuid) = synchronized {
- if (_build_database.isEmpty) (build_progress, UUID.random().toString)
- else {
- try {
- val db = store.open_build_database(Progress.private_data.database, server = server)
- val progress =
- new Database_Progress(db, build_progress,
- input_messages = build_context.master,
- output_stopped = build_context.master,
- hostname = hostname,
- context_uuid = build_uuid,
- kind = "build_process",
- timeout = Some(build_delay))
- (progress, progress.agent_uuid)
- }
- catch { case exn: Throwable => close(); throw exn }
- }
- }
-
- protected val log: Logger = Logger.make_system_log(progress, build_options)
-
- protected def open_build_cluster(): Build_Cluster = {
- val build_cluster = Build_Cluster.make(build_context, progress = build_progress)
- build_cluster.open()
- build_cluster
- }
-
- private val _build_cluster =
- try {
- if (build_context.master && _build_database.isDefined) open_build_cluster()
- else Build_Cluster.none
- }
- catch { case exn: Throwable => close(); throw exn }
-
- def close(): Unit = synchronized {
- Option(_database_server).flatten.foreach(_.close())
- Option(_build_database).flatten.foreach(_.close())
- Option(_host_database).foreach(_.close())
- Option(_build_cluster).foreach(_.close())
- progress match {
- case db_progress: Database_Progress => db_progress.exit(close = true)
- case _ =>
- }
- }
-
-
- /* global state: internal var vs. external database */
-
- private var _state: Build_Process.State = Build_Process.State()
-
- protected def synchronized_database[A](label: String)(body: => A): A =
- synchronized {
- _build_database match {
- case None => body
- case Some(db) =>
- Build_Process.private_data.transaction_lock(db, label = label) {
- val old_state = Build_Process.private_data.pull_database(db, worker_uuid, _state)
- _state = old_state
- val res = body
- _state = Build_Process.private_data.update_database(db, worker_uuid, _state, old_state)
- res
- }
- }
- }
-
-
- /* policy operations */
-
- protected def init_state(state: Build_Process.State): Build_Process.State = {
- val sessions1 = state.sessions.init(build_context, _database_server, progress = build_progress)
-
- val old_pending = state.pending.iterator.map(_.name).toSet
- val new_pending =
- List.from(
- for (session <- sessions1.iterator if !old_pending(session.name))
- yield Build_Process.Task(session.name, session.deps, JSON.Object.empty, build_uuid))
- val pending1 = new_pending ::: state.pending
-
- state.copy(sessions = sessions1, pending = pending1)
- }
-
- protected def next_jobs(state: Build_Process.State): List[String] = {
- val limit = {
- if (progress.stopped) { if (build_context.master) Int.MaxValue else 0 }
- else build_context.max_jobs - state.build_running.length
- }
- if (limit > 0) state.next_ready.sortBy(_.name)(state.sessions.ordering).take(limit).map(_.name)
- else Nil
- }
-
- protected def next_node_info(state: Build_Process.State, session_name: String): Host.Node_Info = {
- def used_nodes: Set[Int] =
- Set.from(for (job <- state.running.valuesIterator; i <- job.node_info.numa_node) yield i)
- val numa_node = Host.next_numa_node(_host_database, hostname, state.numa_nodes, used_nodes)
- Host.Node_Info(hostname, numa_node, Nil)
- }
-
- protected def start_session(
- state: Build_Process.State,
- session_name: String,
- ancestor_results: List[Build_Process.Result]
- ): Build_Process.State = {
- val sources_shasum = state.sessions(session_name).sources_shasum
-
- val input_shasum =
- if (ancestor_results.isEmpty) ML_Process.bootstrap_shasum()
- else SHA1.flat_shasum(ancestor_results.map(_.output_shasum))
-
- val store_heap =
- build_context.build_heap || Sessions.is_pure(session_name) ||
- state.sessions.iterator.exists(_.ancestors.contains(session_name))
-
- val (current, output_shasum) =
- store.check_output(_database_server, session_name,
- session_options = build_context.sessions_structure(session_name).options,
- sources_shasum = sources_shasum,
- input_shasum = input_shasum,
- fresh_build = build_context.fresh_build,
- store_heap = store_heap)
-
- val finished = current && ancestor_results.forall(_.current)
- val skipped = build_context.no_build
- val cancelled = progress.stopped || !ancestor_results.forall(_.ok)
-
- if (!skipped && !cancelled) {
- val heaps = (session_name :: ancestor_results.map(_.name)).map(store.output_heap)
- ML_Heap.restore(_database_server, heaps, cache = store.cache.compress)
- }
-
- val result_name = (session_name, worker_uuid, build_uuid)
-
- if (finished) {
- state
- .remove_pending(session_name)
- .make_result(result_name, Process_Result.ok, output_shasum, current = true)
- }
- else if (skipped) {
- progress.echo("Skipping " + session_name + " ...", verbose = true)
- state.
- remove_pending(session_name).
- make_result(result_name, Process_Result.error, output_shasum)
- }
- else if (cancelled) {
- if (build_context.master) {
- progress.echo(session_name + " CANCELLED")
- state
- .remove_pending(session_name)
- .make_result(result_name, Process_Result.undefined, output_shasum)
- }
- else state
- }
- else {
- val node_info = next_node_info(state, session_name)
-
- val print_node_info =
- node_info.numa_node.isDefined || node_info.rel_cpus.nonEmpty ||
- _build_database.isDefined && _build_database.get.is_postgresql
-
- progress.echo(
- (if (store_heap) "Building " else "Running ") + session_name +
- (if (print_node_info) " (on " + node_info + ")" else "") + " ...")
-
- val session = state.sessions(session_name)
-
- val build =
- Build_Job.start_session(build_context, session, progress, log, server,
- build_deps.background(session_name), sources_shasum, input_shasum, node_info, store_heap)
-
- val job =
- Build_Process.Job(session_name, worker_uuid, build_uuid, node_info, Date.now(), Some(build))
-
- state.add_running(job)
- }
- }
-
-
- /* build process roles */
-
- final def is_session_name(job_name: String): Boolean =
- !Long_Name.is_qualified(job_name)
-
- protected final def start_build(): Unit = synchronized_database("Build_Process.start_build") {
- for (db <- _build_database) {
- Build_Process.private_data.start_build(db, build_uuid, build_context.ml_platform,
- build_context.sessions_structure.session_prefs)
- }
- }
-
- protected final def stop_build(): Unit = synchronized_database("Build_Process.stop_build") {
- for (db <- _build_database) {
- Build_Process.private_data.stop_build(db, build_uuid)
- }
- }
-
- protected final def start_worker(): Unit = synchronized_database("Build_Process.start_worker") {
- for (db <- _build_database) {
- _state = _state.inc_serial
- Build_Process.private_data.start_worker(db, worker_uuid, build_uuid, _state.serial)
- }
- }
-
- protected final def stop_worker(): Unit = synchronized_database("Build_Process.stop_worker") {
- for (db <- _build_database) {
- Build_Process.private_data.stamp_worker(db, worker_uuid, _state.serial, stop_now = true)
- }
- }
-
-
- /* run */
-
- def run(): Build.Results = {
- synchronized_database("Build_Process.init") {
- if (build_context.master) {
- _build_cluster.init()
- _state = init_state(_state)
- }
- _state = _state.copy(numa_nodes = Host.numa_nodes(enabled = build_context.numa_shuffling))
- }
-
- def finished(): Boolean = synchronized_database("Build_Process.test") {
- if (!build_context.master && progress.stopped) _state.build_running.isEmpty
- else _state.pending.isEmpty
- }
-
- def sleep(): Unit =
- Isabelle_Thread.interrupt_handler(_ => progress.stop()) { build_delay.sleep() }
-
- val build_progress_warnings = Synchronized(Set.empty[String])
- def build_progress_warning(msg: String): Unit =
- build_progress_warnings.change(seen =>
- if (seen(msg)) seen else { build_progress.echo_warning(msg); seen + msg })
-
- def check_jobs(): Boolean = synchronized_database("Build_Process.check_jobs") {
- val jobs = next_jobs(_state)
- for (name <- jobs) {
- if (is_session_name(name)) {
- if (build_context.sessions_structure.defined(name)) {
- _state.ancestor_results(name) match {
- case Some(results) => _state = start_session(_state, name, results)
- case None =>
- build_progress_warning("Bad build job " + quote(name) + ": no ancestor results")
- }
- }
- else build_progress_warning("Bad build job " + quote(name) + ": no session info")
- }
- else build_progress_warning("Bad build job " + quote(name))
- }
- jobs.nonEmpty
- }
-
- if (finished()) {
- progress.echo_warning("Nothing to build")
- Build.Results(build_context)
- }
- else {
- if (build_context.master) start_build()
- start_worker()
- _build_cluster.start()
-
- if (build_context.master && !build_context.worker_active && _build_cluster.active()) {
- build_progress.echo("Waiting for external workers ...")
- }
-
- try {
- while (!finished()) {
- synchronized_database("Build_Process.main") {
- if (progress.stopped) _state.stop_running()
-
- for (job <- _state.finished_running()) {
- job.join_build match {
- case None =>
- _state = _state.remove_running(job.name)
- case Some(result) =>
- val result_name = (job.name, worker_uuid, build_uuid)
- _state = _state.
- remove_pending(job.name).
- remove_running(job.name).
- make_result(result_name,
- result.process_result,
- result.output_shasum,
- node_info = job.node_info)
- }
- }
- }
-
- if (!check_jobs()) sleep()
- }
- }
- finally {
- _build_cluster.stop()
- stop_worker()
- if (build_context.master) stop_build()
- }
-
- synchronized_database("Build_Process.result") {
- val results = for ((name, result) <- _state.results) yield name -> result.process_result
- Build.Results(build_context, results = results, other_rc = _build_cluster.rc)
- }
- }
- }
-
-
- /* snapshot */
-
- def snapshot(): Build_Process.Snapshot = synchronized_database("Build_Process.snapshot") {
- val (builds, workers) =
- _build_database match {
- case None => (Nil, Nil)
- case Some(db) =>
- (Build_Process.private_data.read_builds(db),
- Build_Process.private_data.read_workers(db))
- }
- Build_Process.Snapshot(
- builds = builds,
- workers = workers,
- sessions = _state.sessions,
- pending = _state.pending,
- running = _state.running,
- results = _state.results)
- }
-
-
- /* toString */
-
- override def toString: String =
- "Build_Process(worker_uuid = " + quote(worker_uuid) + ", build_uuid = " + quote(build_uuid) +
- if_proper(build_context.master, ", master = true") + ")"
-}
--- a/src/Pure/Tools/build_schedule.scala Sat Jan 20 13:52:36 2024 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1573 +0,0 @@
-/* Title: Pure/Tools/build_schedule.scala
- Author: Fabian Huch, TU Muenchen
-
-Build schedule generated by Heuristic methods, allowing for more efficient builds.
- */
-package isabelle
-
-
-import Host.Node_Info
-import scala.annotation.tailrec
-import scala.collection.mutable
-
-
-object Build_Schedule {
- /* organized historic timing information (extracted from build logs) */
-
- case class Timing_Entry(job_name: String, hostname: String, threads: Int, timing: Timing) {
- def elapsed: Time = timing.elapsed
- def proper_cpu: Option[Time] = timing.cpu.proper_ms.map(Time.ms)
- }
-
- case class Timing_Entries(entries: List[Timing_Entry]) {
- require(entries.nonEmpty)
-
- def is_empty = entries.isEmpty
- def size = entries.length
-
- lazy val by_job = entries.groupBy(_.job_name).view.mapValues(Timing_Entries(_)).toMap
- lazy val by_threads = entries.groupBy(_.threads).view.mapValues(Timing_Entries(_)).toMap
- lazy val by_hostname = entries.groupBy(_.hostname).view.mapValues(Timing_Entries(_)).toMap
-
- def mean_time: Time = Timing_Data.mean_time(entries.map(_.elapsed))
- def best_entry: Timing_Entry = entries.minBy(_.elapsed.ms)
- }
-
- object Timing_Data {
- def median_timing(obs: List[Timing]): Timing = obs.sortBy(_.elapsed.ms).apply(obs.length / 2)
-
- def median_time(obs: List[Time]): Time = obs.sortBy(_.ms).apply(obs.length / 2)
-
- def mean_time(obs: Iterable[Time]): Time = Time.ms(obs.map(_.ms).sum / obs.size)
-
- private def dummy_entries(host: Host, host_factor: Double) = {
- val baseline = Time.minutes(5).scale(host_factor)
- val gc = Time.seconds(10).scale(host_factor)
- List(
- Timing_Entry("dummy", host.name, 1, Timing(baseline, baseline, gc)),
- Timing_Entry("dummy", host.name, 8, Timing(baseline.scale(0.2), baseline, gc)))
- }
-
- def make(
- host_infos: Host_Infos,
- build_history: List[(Build_Log.Meta_Info, Build_Log.Build_Info)],
- ): Timing_Data = {
- val hosts = host_infos.hosts
- val measurements =
- for {
- (meta_info, build_info) <- build_history
- build_host = meta_info.get(Build_Log.Prop.build_host)
- (job_name, session_info) <- build_info.sessions.toList
- if build_info.finished_sessions.contains(job_name)
- hostname <- session_info.hostname.orElse(build_host).toList
- host <- hosts.find(_.info.hostname == hostname).toList
- threads = session_info.threads.getOrElse(host.info.num_cpus)
- } yield (job_name, hostname, threads) -> session_info.timing
-
- val entries =
- if (measurements.isEmpty) {
- val default_host = host_infos.hosts.sorted(host_infos.host_speeds).last
- host_infos.hosts.flatMap(host =>
- dummy_entries(host, host_infos.host_factor(default_host, host)))
- }
- else
- measurements.groupMap(_._1)(_._2).toList.map {
- case ((job_name, hostname, threads), timings) =>
- Timing_Entry(job_name, hostname, threads, median_timing(timings))
- }
-
- new Timing_Data(Timing_Entries(entries), host_infos)
- }
-
- def load(host_infos: Host_Infos, log_database: SQL.Database): Timing_Data = {
- val build_history =
- for {
- log_name <- log_database.execute_query_statement(
- Build_Log.private_data.meta_info_table.select(List(Build_Log.Column.log_name)),
- List.from[String], res => res.string(Build_Log.Column.log_name))
- meta_info <- Build_Log.private_data.read_meta_info(log_database, log_name)
- build_info = Build_Log.private_data.read_build_info(log_database, log_name)
- } yield (meta_info, build_info)
-
- make(host_infos, build_history)
- }
- }
-
- class Timing_Data private(data: Timing_Entries, val host_infos: Host_Infos) {
- private def inflection_point(last_mono: Int, next: Int): Int =
- last_mono + ((next - last_mono) / 2)
-
- def best_threads(job_name: String, max_threads: Int): Int = {
- val worse_threads =
- data.by_job.get(job_name).toList.flatMap(_.by_hostname).flatMap {
- case (hostname, data) =>
- val best_threads = data.best_entry.threads
- data.by_threads.keys.toList.sorted.find(_ > best_threads).map(
- inflection_point(best_threads, _))
- }
- (max_threads :: worse_threads).min
- }
-
- private def hostname_factor(from: String, to: String): Double =
- host_infos.host_factor(host_infos.the_host(from), host_infos.the_host(to))
-
- private def approximate_threads(entries_unsorted: List[(Int, Time)], threads: Int): Time = {
- val entries = entries_unsorted.sortBy(_._1)
-
- def sorted_prefix[A](xs: List[A], f: A => Long): List[A] =
- xs match {
- case x1 :: x2 :: xs =>
- if (f(x1) <= f(x2)) x1 :: sorted_prefix(x2 :: xs, f) else x1 :: Nil
- case xs => xs
- }
-
- def linear(p0: (Int, Time), p1: (Int, Time)): Time = {
- val a = (p1._2 - p0._2).scale(1.0 / (p1._1 - p0._1))
- val b = p0._2 - a.scale(p0._1)
- (a.scale(threads) + b) max Time.zero
- }
-
- val mono_prefix = sorted_prefix(entries, e => -e._2.ms)
-
- val is_mono = entries == mono_prefix
- val in_prefix = mono_prefix.length > 1 && threads <= mono_prefix.last._1
- val in_inflection =
- !is_mono && mono_prefix.length > 1 && threads < entries.drop(mono_prefix.length).head._1
- if (is_mono || in_prefix || in_inflection) {
- // Model with Amdahl's law
- val t_p =
- Timing_Data.median_time(for {
- (n, t0) <- mono_prefix
- (m, t1) <- mono_prefix
- if m != n
- } yield (t0 - t1).scale(n.toDouble * m / (m - n)))
- val t_c =
- Timing_Data.median_time(for ((n, t) <- mono_prefix) yield t - t_p.scale(1.0 / n))
-
- def model(threads: Int): Time = (t_c + t_p.scale(1.0 / threads)) max Time.zero
-
- if (is_mono || in_prefix) model(threads)
- else {
- val post_inflection = entries.drop(mono_prefix.length).head
- val inflection_threads = inflection_point(mono_prefix.last._1, post_inflection._1)
-
- if (threads <= inflection_threads) model(threads)
- else linear((inflection_threads, model(inflection_threads)), post_inflection)
- }
- } else {
- // Piecewise linear
- val (p0, p1) =
- if (entries.head._1 < threads && threads < entries.last._1) {
- val split = entries.partition(_._1 < threads)
- (split._1.last, split._2.head)
- } else {
- val piece = if (threads < entries.head._1) entries.take(2) else entries.takeRight(2)
- (piece.head, piece.last)
- }
-
- linear(p0, p1)
- }
- }
-
- private def unify_hosts(job_name: String, on_host: String): List[(Int, Time)] = {
- def unify(hostname: String, data: Timing_Entries) =
- data.mean_time.scale(hostname_factor(hostname, on_host))
-
- for {
- data <- data.by_job.get(job_name).toList
- (threads, data) <- data.by_threads
- entries = data.by_hostname.toList.map(unify)
- } yield threads -> Timing_Data.median_time(entries)
- }
-
- def estimate_threads(job_name: String, hostname: String, threads: Int): Option[Time] = {
- def try_approximate(data: Timing_Entries): Option[Time] = {
- val entries =
- data.by_threads.toList match {
- case List((i, Timing_Entries(List(entry)))) if i != 1 =>
- (i, data.mean_time) :: entry.proper_cpu.map(1 -> _).toList
- case entries => entries.map((threads, data) => threads -> data.mean_time)
- }
- if (entries.size < 2) None else Some(approximate_threads(entries, threads))
- }
-
- for {
- data <- data.by_job.get(job_name)
- data <- data.by_hostname.get(hostname)
- time <- data.by_threads.get(threads).map(_.mean_time).orElse(try_approximate(data))
- } yield time
- }
-
- def global_threads_factor(from: Int, to: Int): Double = {
- def median(xs: Iterable[Double]): Double = xs.toList.sorted.apply(xs.size / 2)
-
- val estimates =
- for {
- (hostname, data) <- data.by_hostname
- job_name <- data.by_job.keys
- from_time <- estimate_threads(job_name, hostname, from)
- to_time <- estimate_threads(job_name, hostname, to)
- } yield from_time.ms.toDouble / to_time.ms
-
- if (estimates.nonEmpty) median(estimates)
- else {
- // unify hosts
- val estimates =
- for {
- (job_name, data) <- data.by_job
- hostname = data.by_hostname.keys.head
- entries = unify_hosts(job_name, hostname)
- if entries.length > 1
- } yield
- approximate_threads(entries, from).ms.toDouble / approximate_threads(entries, to).ms
-
- if (estimates.nonEmpty) median(estimates)
- else from.toDouble / to.toDouble
- }
- }
-
- private var cache: Map[(String, String, Int), Time] = Map.empty
- def estimate(job_name: String, hostname: String, threads: Int): Time = {
- def estimate: Time =
- data.by_job.get(job_name) match {
- case None =>
- // no data for job, take average of other jobs for given threads
- val job_estimates = data.by_job.keys.flatMap(estimate_threads(_, hostname, threads))
- if (job_estimates.nonEmpty) Timing_Data.mean_time(job_estimates)
- else {
- // no other job to estimate from, use global curve to approximate any other job
- val (threads1, data1) = data.by_threads.head
- data1.mean_time.scale(global_threads_factor(threads1, threads))
- }
-
- case Some(data) =>
- data.by_threads.get(threads) match {
- case None => // interpolate threads
- estimate_threads(job_name, hostname, threads).getOrElse {
- // per machine, try to approximate config for threads
- val approximated =
- for {
- hostname1 <- data.by_hostname.keys
- estimate <- estimate_threads(job_name, hostname1, threads)
- factor = hostname_factor(hostname1, hostname)
- } yield estimate.scale(factor)
-
- if (approximated.nonEmpty) Timing_Data.mean_time(approximated)
- else {
- // no single machine where config can be approximated, unify data points
- val unified_entries = unify_hosts(job_name, hostname)
-
- if (unified_entries.length > 1) approximate_threads(unified_entries, threads)
- else {
- // only single data point, use global curve to approximate
- val (job_threads, job_time) = unified_entries.head
- job_time.scale(global_threads_factor(job_threads, threads))
- }
- }
- }
-
- case Some(data) => // time for job/thread exists, interpolate machine
- data.by_hostname.get(hostname).map(_.mean_time).getOrElse {
- Timing_Data.median_time(
- data.by_hostname.toList.map((hostname1, data) =>
- data.mean_time.scale(hostname_factor(hostname1, hostname))))
- }
- }
- }
-
- cache.get(job_name, hostname, threads) match {
- case Some(time) => time
- case None =>
- val time = estimate
- cache = cache + ((job_name, hostname, threads) -> time)
- time
- }
- }
- }
-
-
- /* host information */
-
- case class Host(info: isabelle.Host.Info, build: Build_Cluster.Host) {
- def name: String = info.hostname
- def num_cpus: Int = info.num_cpus
- }
-
- object Host_Infos {
- def dummy: Host_Infos =
- new Host_Infos(
- List(Host(isabelle.Host.Info("dummy", Nil, 8, Some(1.0)), Build_Cluster.Host("dummy"))))
-
- def load(build_hosts: List[Build_Cluster.Host], db: SQL.Database): Host_Infos = {
- def get_host(build_host: Build_Cluster.Host): Host = {
- val info =
- isabelle.Host.read_info(db, build_host.name).getOrElse(
- error("No benchmark for " + quote(build_host.name)))
- Host(info, build_host)
- }
-
- new Host_Infos(build_hosts.map(get_host))
- }
- }
-
- class Host_Infos private(val hosts: List[Host]) {
- require(hosts.nonEmpty)
-
- private val by_hostname = hosts.map(host => host.name -> host).toMap
-
- def host_factor(from: Host, to: Host): Double =
- from.info.benchmark_score.get / to.info.benchmark_score.get
-
- val host_speeds: Ordering[Host] =
- Ordering.fromLessThan((host1, host2) => host_factor(host1, host2) < 1)
-
- def the_host(hostname: String): Host =
- by_hostname.getOrElse(hostname, error("Unknown host " + quote(hostname)))
- def the_host(node_info: Node_Info): Host = the_host(node_info.hostname)
-
- def num_threads(node_info: Node_Info): Int =
- if (node_info.rel_cpus.nonEmpty) node_info.rel_cpus.length
- else the_host(node_info).info.num_cpus
-
- def available(state: Build_Process.State): Resources = {
- val allocated =
- state.running.values.map(_.node_info).groupMapReduce(the_host)(List(_))(_ ::: _)
- Resources(this, allocated)
- }
- }
-
-
- /* offline tracking of job configurations and resource allocations */
-
- case class Config(job_name: String, node_info: Node_Info) {
- def job_of(start_time: Time): Build_Process.Job =
- Build_Process.Job(job_name, "", "", node_info, Date(start_time), None)
- }
-
- case class Resources(
- host_infos: Host_Infos,
- allocated_nodes: Map[Host, List[Node_Info]]
- ) {
- def unused_nodes(host: Host, threads: Int): List[Node_Info] =
- if (!available(host, threads)) Nil
- else {
- val node = next_node(host, threads)
- node :: allocate(node).unused_nodes(host, threads)
- }
-
- def unused_nodes(threads: Int): List[Node_Info] =
- host_infos.hosts.flatMap(unused_nodes(_, threads))
-
- def allocated(host: Host): List[Node_Info] = allocated_nodes.getOrElse(host, Nil)
-
- def allocate(node: Node_Info): Resources = {
- val host = host_infos.the_host(node)
- copy(allocated_nodes = allocated_nodes + (host -> (node :: allocated(host))))
- }
-
- def try_allocate_tasks(
- hosts: List[(Host, Int)],
- tasks: List[(Build_Process.Task, Int, Int)],
- ): (List[Config], Resources) =
- tasks match {
- case Nil => (Nil, this)
- case (task, min_threads, max_threads) :: tasks =>
- val (config, resources) =
- hosts.find((host, _) => available(host, min_threads)) match {
- case Some((host, host_max_threads)) =>
- val free_threads = host.info.num_cpus - ((host.build.jobs - 1) * host_max_threads)
- val node_info = next_node(host, (min_threads max free_threads) min max_threads)
- (Some(Config(task.name, node_info)), allocate(node_info))
- case None => (None, this)
- }
- val (configs, resources1) = resources.try_allocate_tasks(hosts, tasks)
- (configs ++ config, resources1)
- }
-
- def next_node(host: Host, threads: Int): Node_Info = {
- val numa_node_num_cpus = host.info.num_cpus / (host.info.numa_nodes.length max 1)
- def explicit_cpus(node_info: Node_Info): List[Int] =
- if (node_info.rel_cpus.nonEmpty) node_info.rel_cpus else (0 until numa_node_num_cpus).toList
-
- val used_nodes = allocated(host).groupMapReduce(_.numa_node)(explicit_cpus)(_ ::: _)
-
- val available_nodes = host.info.numa_nodes
- val numa_node =
- if (!host.build.numa) None
- else available_nodes.sortBy(n => used_nodes.getOrElse(Some(n), Nil).length).headOption
-
- val used_cpus = used_nodes.getOrElse(numa_node, Nil).toSet
- val available_cpus = (0 until numa_node_num_cpus).filterNot(used_cpus.contains).toList
-
- val rel_cpus = if (available_cpus.length >= threads) available_cpus.take(threads) else Nil
-
- Node_Info(host.name, numa_node, rel_cpus)
- }
-
- def available(host: Host, threads: Int): Boolean = {
- val used = allocated(host)
-
- if (used.length >= host.build.jobs) false
- else {
- if (host.info.numa_nodes.length <= 1)
- used.map(host_infos.num_threads).sum + threads <= host.info.num_cpus
- else {
- def node_threads(n: Int): Int =
- used.filter(_.numa_node.contains(n)).map(host_infos.num_threads).sum
-
- host.info.numa_nodes.exists(
- node_threads(_) + threads <= host.info.num_cpus / host.info.numa_nodes.length)
- }
- }
- }
- }
-
-
- /* schedule generation */
-
- object Schedule {
- case class Node(job_name: String, node_info: Node_Info, start: Date, duration: Time) {
- def end: Date = Date(start.time + duration)
- }
-
- type Graph = isabelle.Graph[String, Node]
-
- def init(build_uuid: String): Schedule = Schedule(build_uuid, "none", Date.now(), Graph.empty)
- }
-
- case class Schedule(
- build_uuid: String,
- generator: String,
- start: Date,
- graph: Schedule.Graph,
- serial: Long = 0,
- ) {
- require(serial >= 0, "serial underflow")
- def inc_serial: Schedule = {
- require(serial < Long.MaxValue, "serial overflow")
- copy(serial = serial + 1)
- }
-
- def end: Date =
- if (graph.is_empty) start
- else graph.maximals.map(graph.get_node).map(_.end).maxBy(_.unix_epoch)
-
- def duration: Time = end.time - start.time
- def message: String = "Estimated " + duration.message_hms + " build time with " + generator
-
- def deviation(other: Schedule): Time = Time.ms((end.time - other.end.time).ms.abs)
-
- def num_built(state: Build_Process.State): Int = graph.keys.count(state.results.contains)
- def elapsed(): Time = Time.now() - start.time
- def is_empty: Boolean = graph.is_empty
- def is_outdated(options: Options, state: Build_Process.State): Boolean =
- if (is_empty) true
- else {
- num_built(state) > options.int("build_schedule_outdated_limit") &&
- elapsed() > options.seconds("build_schedule_outdated_delay")
- }
-
- def next(hostname: String, state: Build_Process.State): List[String] =
- for {
- task <- state.next_ready
- node = graph.get_node(task.name)
- if hostname == node.node_info.hostname
- if graph.imm_preds(node.job_name).subsetOf(state.results.keySet)
- } yield task.name
-
- def update(state: Build_Process.State): Schedule = {
- val start1 = Date.now()
- val pending = state.pending.map(_.name).toSet
-
- def shift_elapsed(graph: Schedule.Graph, name: String): Schedule.Graph =
- graph.map_node(name, { node =>
- val elapsed = start1.time - state.running(name).start_date.time
- node.copy(duration = node.duration - elapsed)
- })
-
- def shift_starts(graph: Schedule.Graph, name: String): Schedule.Graph =
- graph.map_node(name, { node =>
- val starts = start1 :: graph.imm_preds(node.job_name).toList.map(graph.get_node(_).end)
- node.copy(start = starts.max(Date.Ordering))
- })
-
- val graph0 = state.running.keys.foldLeft(graph.restrict(pending.contains))(shift_elapsed)
- val graph1 = graph0.topological_order.foldLeft(graph0)(shift_starts)
-
- copy(start = start1, graph = graph1)
- }
- }
-
- case class State(build_state: Build_Process.State, current_time: Time, finished: Schedule) {
- def start(config: Config): State =
- copy(build_state =
- build_state.copy(running = build_state.running +
- (config.job_name -> config.job_of(current_time))))
-
- def step(timing_data: Timing_Data): State = {
- val remaining =
- build_state.running.values.toList.map { job =>
- val elapsed = current_time - job.start_date.time
- val threads = timing_data.host_infos.num_threads(job.node_info)
- val predicted = timing_data.estimate(job.name, job.node_info.hostname, threads)
- val remaining = if (elapsed > predicted) Time.zero else predicted - elapsed
- job -> remaining
- }
-
- if (remaining.isEmpty) error("Schedule step without running sessions")
- else {
- val (job, elapsed) = remaining.minBy(_._2.ms)
- val now = current_time + elapsed
- val node = Schedule.Node(job.name, job.node_info, job.start_date, now - job.start_date.time)
-
- val host_preds =
- for {
- (name, (pred_node, _)) <- finished.graph.iterator.toSet
- if pred_node.node_info.hostname == job.node_info.hostname
- if pred_node.end.time <= node.start.time
- } yield name
- val build_preds =
- build_state.sessions.graph.imm_preds(job.name).filter(finished.graph.defined)
- val preds = build_preds ++ host_preds
-
- val graph = preds.foldLeft(finished.graph.new_node(job.name, node))(_.add_edge(_, job.name))
-
- val build_state1 = build_state.remove_running(job.name).remove_pending(job.name)
- State(build_state1, now, finished.copy(graph = graph))
- }
- }
-
- def is_finished: Boolean = build_state.pending.isEmpty && build_state.running.isEmpty
- }
-
- trait Scheduler { def build_schedule(build_state: Build_Process.State): Schedule }
-
- abstract class Heuristic(timing_data: Timing_Data, build_uuid: String)
- extends Scheduler {
- val host_infos = timing_data.host_infos
- val ordered_hosts = host_infos.hosts.sorted(host_infos.host_speeds)
-
- def next(state: Build_Process.State): List[Config]
-
- def build_schedule(build_state: Build_Process.State): Schedule = {
- @tailrec
- def simulate(state: State): State =
- if (state.is_finished) state
- else {
- val state1 = next(state.build_state).foldLeft(state)(_.start(_)).step(timing_data)
- simulate(state1)
- }
-
- val start = Date.now()
- val end_state =
- simulate(State(build_state, start.time, Schedule(build_uuid, toString, start, Graph.empty)))
-
- end_state.finished
- }
- }
-
- class Default_Heuristic(timing_data: Timing_Data, options: Options, build_uuid: String)
- extends Heuristic(timing_data, build_uuid) {
- override def toString: String = "default build heuristic"
-
- def host_threads(host: Host): Int = {
- val m = (options ++ host.build.options).int("threads")
- if (m > 0) m else (host.num_cpus max 1) min 8
- }
-
- def next_jobs(resources: Resources, sorted_jobs: List[String], host: Host): List[Config] =
- sorted_jobs.zip(resources.unused_nodes(host, host_threads(host))).map(Config(_, _))
-
- def next(state: Build_Process.State): List[Config] = {
- val sorted_jobs = state.next_ready.sortBy(_.name)(state.sessions.ordering).map(_.name)
- val resources = host_infos.available(state)
-
- host_infos.hosts.foldLeft((sorted_jobs, List.empty[Config])) {
- case ((jobs, res), host) =>
- val configs = next_jobs(resources, jobs, host)
- val config_jobs = configs.map(_.job_name).toSet
- (jobs.filterNot(config_jobs.contains), configs ::: res)
- }._2
- }
- }
-
- class Meta_Heuristic(heuristics: List[Heuristic]) extends Scheduler {
- require(heuristics.nonEmpty)
-
- def best_result(state: Build_Process.State): (Heuristic, Schedule) =
- heuristics.map(heuristic =>
- heuristic -> heuristic.build_schedule(state)).minBy(_._2.duration.ms)
-
- def next(state: Build_Process.State): List[Config] = best_result(state)._1.next(state)
-
- def build_schedule(state: Build_Process.State): Schedule = best_result(state)._2
- }
-
-
- /* heuristics */
-
- abstract class Path_Heuristic(
- timing_data: Timing_Data,
- sessions_structure: Sessions.Structure,
- max_threads_limit: Int,
- build_uuid: String
- ) extends Heuristic(timing_data, build_uuid) {
- /* pre-computed properties for efficient heuristic */
-
- val max_threads = host_infos.hosts.map(_.info.num_cpus).max min max_threads_limit
-
- type Node = String
- val build_graph = sessions_structure.build_graph
-
- val minimals = build_graph.minimals
- val maximals = build_graph.maximals
-
- def all_preds(node: Node): Set[Node] = build_graph.all_preds(List(node)).toSet
- val maximals_all_preds = maximals.map(node => node -> all_preds(node)).toMap
-
- def best_time(node: Node): Time = {
- val host = ordered_hosts.last
- val threads = timing_data.best_threads(node, max_threads) min host.info.num_cpus
- timing_data.estimate(node, host.name, threads)
- }
- val best_times = build_graph.keys.map(node => node -> best_time(node)).toMap
-
- val succs_max_time_ms = build_graph.node_height(best_times(_).ms)
- def max_time(node: Node): Time = Time.ms(succs_max_time_ms(node)) + best_times(node)
- def max_time(task: Build_Process.Task): Time = max_time(task.name)
-
- def path_times(minimals: List[Node]): Map[Node, Time] = {
- def time_ms(node: Node): Long = best_times(node).ms
- val path_times_ms = build_graph.reachable_length(time_ms, build_graph.imm_succs, minimals)
- path_times_ms.view.mapValues(Time.ms).toMap
- }
-
- def path_max_times(minimals: List[Node]): Map[Node, Time] =
- path_times(minimals).toList.map((node, time) => node -> (time + max_time(node))).toMap
-
- def parallel_paths(running: List[(Node, Time)], pred: Node => Boolean = _ => true): Int = {
- def start(node: Node): (Node, Time) = node -> best_times(node)
-
- def pass_time(elapsed: Time)(node: Node, time: Time): (Node, Time) =
- node -> (time - elapsed)
-
- def parallel_paths(running: Map[Node, Time]): (Int, Map[Node, Time]) =
- if (running.isEmpty) (0, running)
- else {
- def get_next(node: Node): List[Node] =
- build_graph.imm_succs(node).filter(pred).filter(
- build_graph.imm_preds(_).intersect(running.keySet) == Set(node)).toList
-
- val (next, elapsed) = running.minBy(_._2.ms)
- val (remaining, finished) =
- running.toList.map(pass_time(elapsed)).partition(_._2 > Time.zero)
-
- val running1 =
- remaining.map(pass_time(elapsed)).toMap ++
- finished.map(_._1).flatMap(get_next).map(start)
- val (res, running2) = parallel_paths(running1)
- (res max running.size, running2)
- }
-
- parallel_paths(running.toMap)._1
- }
- }
-
-
- object Path_Time_Heuristic {
- sealed trait Critical_Criterion
- case class Absolute_Time(time: Time) extends Critical_Criterion {
- override def toString: String = "absolute time (" + time.message_hms + ")"
- }
- case class Relative_Time(factor: Double) extends Critical_Criterion {
- override def toString: String = "relative time (" + factor + ")"
- }
-
- sealed trait Parallel_Strategy
- case class Fixed_Thread(threads: Int) extends Parallel_Strategy {
- override def toString: String = "fixed threads (" + threads + ")"
- }
- case class Time_Based_Threads(f: Time => Int) extends Parallel_Strategy {
- override def toString: String = "time based threads"
- }
-
- sealed trait Host_Criterion
- case object Critical_Nodes extends Host_Criterion {
- override def toString: String = "per critical node"
- }
- case class Fixed_Fraction(fraction: Double) extends Host_Criterion {
- override def toString: String = "fixed fraction (" + fraction + ")"
- }
- case class Host_Speed(min_factor: Double) extends Host_Criterion {
- override def toString: String = "host speed (" + min_factor + ")"
- }
- }
-
- class Path_Time_Heuristic(
- is_critical: Path_Time_Heuristic.Critical_Criterion,
- parallel_threads: Path_Time_Heuristic.Parallel_Strategy,
- host_criterion: Path_Time_Heuristic.Host_Criterion,
- timing_data: Timing_Data,
- sessions_structure: Sessions.Structure,
- build_uuid: String,
- max_threads_limit: Int = 8
- ) extends Path_Heuristic(timing_data, sessions_structure, max_threads_limit, build_uuid) {
- import Path_Time_Heuristic.*
-
- override def toString: Node = {
- val params =
- List(
- "critical: " + is_critical,
- "parallel: " + parallel_threads,
- "fast hosts: " + host_criterion)
- "path time heuristic (" + params.mkString(", ") + ")"
- }
-
- def next(state: Build_Process.State): List[Config] = {
- val resources = host_infos.available(state)
-
- def best_threads(task: Build_Process.Task): Int =
- timing_data.best_threads(task.name, max_threads)
-
- val rev_ordered_hosts = ordered_hosts.reverse.map(_ -> max_threads)
-
- val available_nodes =
- host_infos.available(state.copy(running = Map.empty))
- .unused_nodes(max_threads)
- .sortBy(node => host_infos.the_host(node))(host_infos.host_speeds).reverse
-
- def remaining_time(node: Node): (Node, Time) =
- state.running.get(node) match {
- case None => node -> best_time(node)
- case Some(job) =>
- val estimate =
- timing_data.estimate(job.name, job.node_info.hostname,
- host_infos.num_threads(job.node_info))
- node -> ((Time.now() - job.start_date.time + estimate) max Time.zero)
- }
-
- val max_parallel = parallel_paths(state.ready.map(_.name).map(remaining_time))
- val next_sorted = state.next_ready.sortBy(max_time(_).ms).reverse
-
- if (max_parallel <= available_nodes.length) {
- val all_tasks = next_sorted.map(task => (task, best_threads(task), best_threads(task)))
- resources.try_allocate_tasks(rev_ordered_hosts, all_tasks)._1
- }
- else {
- def is_critical(time: Time): Boolean =
- this.is_critical match {
- case Absolute_Time(threshold) => time > threshold
- case Relative_Time(factor) => time > minimals.map(max_time).maxBy(_.ms).scale(factor)
- }
-
- val critical_minimals = state.ready.filter(task => is_critical(max_time(task))).map(_.name)
- val critical_nodes =
- path_max_times(critical_minimals).filter((_, time) => is_critical(time)).keySet
-
- val (critical, other) = next_sorted.partition(task => critical_nodes.contains(task.name))
-
- val critical_tasks = critical.map(task => (task, best_threads(task), best_threads(task)))
-
- def parallel_threads(task: Build_Process.Task): Int =
- this.parallel_threads match {
- case Fixed_Thread(threads) => threads
- case Time_Based_Threads(f) => f(best_time(task.name))
- }
-
- val other_tasks = other.map(task => (task, parallel_threads(task), best_threads(task)))
-
- val max_critical_parallel =
- parallel_paths(critical_minimals.map(remaining_time), critical_nodes.contains)
- val max_critical_hosts =
- available_nodes.take(max_critical_parallel).map(_.hostname).distinct.length
-
- val split =
- this.host_criterion match {
- case Critical_Nodes => max_critical_hosts
- case Fixed_Fraction(fraction) =>
- ((rev_ordered_hosts.length * fraction).ceil.toInt max 1) min max_critical_hosts
- case Host_Speed(min_factor) =>
- val best = rev_ordered_hosts.head._1.info.benchmark_score.get
- val num_fast =
- rev_ordered_hosts.count(_._1.info.benchmark_score.exists(_ >= best * min_factor))
- num_fast min max_critical_hosts
- }
-
- val (critical_hosts, other_hosts) = rev_ordered_hosts.splitAt(split)
-
- val (configs1, resources1) = resources.try_allocate_tasks(critical_hosts, critical_tasks)
- val (configs2, _) = resources1.try_allocate_tasks(other_hosts, other_tasks)
-
- configs1 ::: configs2
- }
- }
- }
-
-
- /* master and slave processes for scheduled build */
-
- class Scheduled_Build_Process(
- build_context: Build.Context,
- build_progress: Progress,
- server: SSH.Server,
- ) extends Build_Process(build_context, build_progress, server) {
- /* global resources with common close() operation */
-
- protected final lazy val _build_database: Option[SQL.Database] =
- try {
- for (db <- store.maybe_open_build_database(server = server)) yield {
- if (build_context.master) {
- Build_Schedule.private_data.transaction_lock(
- db,
- create = true,
- label = "Build_Schedule.build_database"
- ) { Build_Schedule.private_data.clean_build_schedules(db) }
- db.vacuum(Build_Schedule.private_data.tables.list)
- }
- db
- }
- }
- catch { case exn: Throwable => close(); throw exn }
-
- override def close(): Unit = {
- Option(_build_database).flatten.foreach(_.close())
- super.close()
- }
-
-
- /* global state: internal var vs. external database */
-
- protected var _schedule = Schedule.init(build_uuid)
-
- override protected def synchronized_database[A](label: String)(body: => A): A =
- super.synchronized_database(label) {
- _build_database match {
- case None => body
- case Some(db) =>
- Build_Schedule.private_data.transaction_lock(db, label = label) {
- val old_schedule = Build_Schedule.private_data.pull_schedule(db, _schedule)
- _schedule = old_schedule
- val res = body
- _schedule = Build_Schedule.private_data.update_schedule(db, _schedule, old_schedule)
- res
- }
- }
- }
-
-
- /* build process */
-
- override def next_node_info(state: Build_Process.State, session_name: String): Node_Info =
- _schedule.graph.get_node(session_name).node_info
-
- override def next_jobs(state: Build_Process.State): List[String] =
- if (progress.stopped || _schedule.is_empty) Nil else _schedule.next(hostname, state)
- }
-
- abstract class Scheduler_Build_Process(
- build_context: Build.Context,
- build_progress: Progress,
- server: SSH.Server,
- ) extends Scheduled_Build_Process(build_context, build_progress, server) {
- require(build_context.master)
-
- protected val start_date = Date.now()
-
- def init_scheduler(timing_data: Timing_Data): Scheduler
-
-
- /* global resources with common close() operation */
-
- private final lazy val _log_store: Build_Log.Store = Build_Log.store(build_options)
- private final lazy val _log_database: SQL.Database =
- try {
- val db = _log_store.open_database(server = this.server)
- _log_store.init_database(db)
- db
- }
- catch { case exn: Throwable => close(); throw exn }
-
- override def close(): Unit = {
- Option(_log_database).foreach(_.close())
- super.close()
- }
-
-
- /* previous results via build log */
-
- override def open_build_cluster(): Build_Cluster = {
- val build_cluster = super.open_build_cluster()
- build_cluster.init()
-
- Benchmark.benchmark_requirements(build_options)
-
- if (build_context.max_jobs > 0) {
- val benchmark_options = build_options.string("build_hostname") = hostname
- Benchmark.benchmark(benchmark_options, progress)
- }
- build_cluster.benchmark()
-
- for (db <- _build_database)
- Build_Process.private_data.transaction_lock(db, label = "Scheduler_Build_Process.init") {
- Build_Process.private_data.clean_build(db)
- }
-
- build_cluster
- }
-
- private val timing_data: Timing_Data = {
- val cluster_hosts: List[Build_Cluster.Host] =
- if (build_context.max_jobs == 0) build_context.build_hosts
- else {
- val local_build_host =
- Build_Cluster.Host(
- hostname, jobs = build_context.max_jobs, numa = build_context.numa_shuffling)
- local_build_host :: build_context.build_hosts
- }
-
- val host_infos = Host_Infos.load(cluster_hosts, _host_database)
- Timing_Data.load(host_infos, _log_database)
- }
- private val scheduler = init_scheduler(timing_data)
-
- def write_build_log(results: Build.Results, state: Build_Process.State.Results): Unit = {
- val sessions =
- for {
- (session_name, result) <- state.toList
- if !result.current
- } yield {
- val info = build_context.sessions_structure(session_name)
- val entry =
- if (!results.cancelled(session_name)) {
- val status =
- if (result.ok) Build_Log.Session_Status.finished
- else Build_Log.Session_Status.failed
-
- Build_Log.Session_Entry(
- chapter = info.chapter,
- groups = info.groups,
- hostname = Some(result.node_info.hostname),
- threads = Some(timing_data.host_infos.num_threads(result.node_info)),
- timing = result.process_result.timing,
- sources = Some(result.output_shasum.digest.toString),
- status = Some(status))
- }
- else
- Build_Log.Session_Entry(
- chapter = info.chapter,
- groups = info.groups,
- status = Some(Build_Log.Session_Status.cancelled))
- session_name -> entry
- }
-
- val settings =
- Build_Log.Settings.all_settings.map(_.name).map(name =>
- name -> Isabelle_System.getenv(name))
- val props =
- List(
- Build_Log.Prop.build_id.name -> build_context.build_uuid,
- Build_Log.Prop.build_engine.name -> build_context.engine.name,
- Build_Log.Prop.build_host.name -> hostname,
- Build_Log.Prop.build_start.name -> Build_Log.print_date(start_date))
-
- val meta_info = Build_Log.Meta_Info(props, settings)
- val build_info = Build_Log.Build_Info(sessions.toMap)
- val log_name = Build_Log.log_filename(engine = build_context.engine.name, date = start_date)
-
- Build_Log.private_data.update_sessions(
- _log_database, _log_store.cache.compress, log_name.file_name, build_info)
- Build_Log.private_data.update_meta_info(_log_database, log_name.file_name, meta_info)
- }
-
-
- /* build process */
-
- def is_current(state: Build_Process.State, session_name: String): Boolean =
- state.ancestor_results(session_name) match {
- case Some(ancestor_results) if ancestor_results.forall(_.current) =>
- val sources_shasum = state.sessions(session_name).sources_shasum
-
- val input_shasum =
- if (ancestor_results.isEmpty) ML_Process.bootstrap_shasum()
- else SHA1.flat_shasum(ancestor_results.map(_.output_shasum))
-
- val store_heap =
- build_context.build_heap || Sessions.is_pure(session_name) ||
- state.sessions.iterator.exists(_.ancestors.contains(session_name))
-
- store.check_output(
- _database_server, session_name,
- session_options = build_context.sessions_structure(session_name).options,
- sources_shasum = sources_shasum,
- input_shasum = input_shasum,
- fresh_build = build_context.fresh_build,
- store_heap = store_heap)._1
- case _ => false
- }
-
- override def next_jobs(state: Build_Process.State): List[String] =
- if (progress.stopped) state.next_ready.map(_.name)
- else if (!_schedule.is_outdated(build_options, state)) _schedule.next(hostname, state)
- else {
- val current = state.next_ready.filter(task => is_current(state, task.name))
- if (current.nonEmpty) current.map(_.name)
- else {
- val start = Time.now()
-
- val new_schedule = scheduler.build_schedule(state).update(state)
- val schedule =
- if (_schedule.is_empty) new_schedule
- else List(_schedule.update(state), new_schedule).minBy(_.end)(Date.Ordering)
-
- val elapsed = Time.now() - start
-
- val timing_msg = if (elapsed.is_relevant) " (took " + elapsed.message + ")" else ""
- progress.echo_if(_schedule.deviation(schedule).minutes > 1, schedule.message + timing_msg)
-
- _schedule = schedule
- _schedule.next(hostname, state)
- }
- }
-
- override def run(): Build.Results = {
- val results = super.run()
- write_build_log(results, snapshot().results)
- results
- }
- }
-
-
- /** SQL data model of build schedule, extending isabelle_build database */
-
- object private_data extends SQL.Data("isabelle_build") {
- import Build_Process.private_data.{Base, Generic}
-
-
- /* schedule */
-
- object Schedules {
- val build_uuid = Generic.build_uuid.make_primary_key
- val generator = SQL.Column.string("generator")
- val start = SQL.Column.date("start")
- val serial = SQL.Column.long("serial")
-
- val table = make_table(List(build_uuid, generator, start, serial), name = "schedules")
- }
-
- def read_serial(db: SQL.Database, build_uuid: String = ""): Long =
- db.execute_query_statementO[Long](
- Schedules.table.select(List(Schedules.serial.max), sql =
- SQL.where(if_proper(build_uuid, Schedules.build_uuid.equal(build_uuid)))),
- _.long(Schedules.serial)).getOrElse(0L)
-
- def read_scheduled_builds_domain(db: SQL.Database): List[String] =
- db.execute_query_statement(
- Schedules.table.select(List(Schedules.build_uuid)),
- List.from[String], res => res.string(Schedules.build_uuid))
-
- def read_schedules(db: SQL.Database, build_uuid: String = ""): List[Schedule] = {
- val schedules =
- db.execute_query_statement(Schedules.table.select(sql =
- SQL.where(if_proper(build_uuid, Schedules.build_uuid.equal(build_uuid)))),
- List.from[Schedule],
- { res =>
- val build_uuid = res.string(Schedules.build_uuid)
- val generator = res.string(Schedules.generator)
- val start = res.date(Schedules.start)
- val serial = res.long(Schedules.serial)
- Schedule(build_uuid, generator, start, Graph.empty, serial)
- })
-
- for (schedule <- schedules.sortBy(_.start)(Date.Ordering)) yield {
- val nodes = private_data.read_nodes(db, build_uuid = schedule.build_uuid)
- schedule.copy(graph = Graph.make(nodes))
- }
- }
-
- def write_schedule(db: SQL.Database, schedule: Schedule): Unit = {
- db.execute_statement(
- Schedules.table.delete(Schedules.build_uuid.where_equal(schedule.build_uuid)))
- db.execute_statement(Schedules.table.insert(), { stmt =>
- stmt.string(1) = schedule.build_uuid
- stmt.string(2) = schedule.generator
- stmt.date(3) = schedule.start
- stmt.long(4) = schedule.serial
- })
- update_nodes(db, schedule.build_uuid, schedule.graph.dest)
- }
-
-
- /* nodes */
-
- object Nodes {
- val build_uuid = Generic.build_uuid.make_primary_key
- val name = Generic.name.make_primary_key
- val succs = SQL.Column.string("succs")
- val hostname = SQL.Column.string("hostname")
- val numa_node = SQL.Column.int("numa_node")
- val rel_cpus = SQL.Column.string("rel_cpus")
- val start = SQL.Column.date("start")
- val duration = SQL.Column.long("duration")
-
- val table =
- make_table(
- List(build_uuid, name, succs, hostname, numa_node, rel_cpus, start, duration),
- name = "schedule_nodes")
- }
-
- type Nodes = List[((String, Schedule.Node), List[String])]
-
- def read_nodes(db: SQL.Database, build_uuid: String = ""): Nodes = {
- db.execute_query_statement(
- Nodes.table.select(sql =
- SQL.where(if_proper(build_uuid, Nodes.build_uuid.equal(build_uuid)))),
- List.from[((String, Schedule.Node), List[String])],
- { res =>
- val name = res.string(Nodes.name)
- val succs = split_lines(res.string(Nodes.succs))
- val hostname = res.string(Nodes.hostname)
- val numa_node = res.get_int(Nodes.numa_node)
- val rel_cpus = res.string(Nodes.rel_cpus)
- val start = res.date(Nodes.start)
- val duration = Time.ms(res.long(Nodes.duration))
-
- val node_info = Node_Info(hostname, numa_node, isabelle.Host.Range.from(rel_cpus))
- ((name, Schedule.Node(name, node_info, start, duration)), succs)
- }
- )
- }
-
- def update_nodes(db: SQL.Database, build_uuid: String, nodes: Nodes): Unit = {
- db.execute_statement(Nodes.table.delete(Nodes.build_uuid.where_equal(build_uuid)))
- db.execute_batch_statement(Nodes.table.insert(), batch =
- for (((name, node), succs) <- nodes) yield { (stmt: SQL.Statement) =>
- stmt.string(1) = build_uuid
- stmt.string(2) = name
- stmt.string(3) = cat_lines(succs)
- stmt.string(4) = node.node_info.hostname
- stmt.int(5) = node.node_info.numa_node
- stmt.string(6) = isabelle.Host.Range(node.node_info.rel_cpus)
- stmt.date(7) = node.start
- stmt.long(8) = node.duration.ms
- })
- }
-
- def pull_schedule(db: SQL.Database, old_schedule: Schedule): Build_Schedule.Schedule = {
- val serial_db = read_serial(db)
- if (serial_db == old_schedule.serial) old_schedule
- else {
- read_schedules(db, old_schedule.build_uuid) match {
- case Nil => old_schedule
- case schedules => Library.the_single(schedules)
- }
- }
- }
-
- def update_schedule(db: SQL.Database, schedule: Schedule, old_schedule: Schedule): Schedule = {
- val changed =
- schedule.generator != old_schedule.generator ||
- schedule.start != old_schedule.start ||
- schedule.graph != old_schedule.graph
-
- val schedule1 =
- if (changed) schedule.copy(serial = old_schedule.serial).inc_serial else schedule
- if (schedule1.serial != schedule.serial) write_schedule(db, schedule1)
-
- schedule1
- }
-
- def remove_schedules(db: SQL.Database, remove: List[String]): Unit =
- if (remove.nonEmpty) {
- val sql = Generic.build_uuid.where_member(remove)
- db.execute_statement(SQL.MULTI(tables.map(_.delete(sql = sql))))
- }
-
- def clean_build_schedules(db: SQL.Database): Unit = {
- val running_builds_domain =
- db.execute_query_statement(
- Base.table.select(List(Base.build_uuid), sql = SQL.where(Base.stop.undefined)),
- List.from[String], res => res.string(Base.build_uuid))
-
- val (remove, _) =
- Library.symmetric_difference(read_scheduled_builds_domain(db), running_builds_domain)
-
- remove_schedules(db, remove)
- }
-
- override val tables = SQL.Tables(Schedules.table, Nodes.table)
- }
-
-
- class Engine extends Build.Engine("build_schedule") {
-
- def scheduler(timing_data: Timing_Data, context: Build.Context): Scheduler = {
- val sessions_structure = context.sessions_structure
-
- val is_criticals =
- List(
- Path_Time_Heuristic.Absolute_Time(Time.minutes(5)),
- Path_Time_Heuristic.Absolute_Time(Time.minutes(10)),
- Path_Time_Heuristic.Absolute_Time(Time.minutes(20)),
- Path_Time_Heuristic.Relative_Time(0.5))
- val parallel_threads =
- List(
- Path_Time_Heuristic.Fixed_Thread(1),
- Path_Time_Heuristic.Time_Based_Threads({
- case time if time < Time.minutes(1) => 1
- case time if time < Time.minutes(5) => 4
- case _ => 8
- }))
- val machine_splits =
- List(
- Path_Time_Heuristic.Critical_Nodes,
- Path_Time_Heuristic.Fixed_Fraction(0.3),
- Path_Time_Heuristic.Host_Speed(0.9))
-
- val path_time_heuristics =
- for {
- is_critical <- is_criticals
- parallel <- parallel_threads
- machine_split <- machine_splits
- } yield
- Path_Time_Heuristic(is_critical, parallel, machine_split, timing_data, sessions_structure,
- context.build_uuid)
- val default_heuristic =
- Default_Heuristic(timing_data, context.build_options, context.build_uuid)
- new Meta_Heuristic(default_heuristic :: path_time_heuristics)
- }
-
- override def open_build_process(
- context: Build.Context,
- progress: Progress,
- server: SSH.Server
- ): Build_Process =
- if (!context.master) new Scheduled_Build_Process(context, progress, server)
- else new Scheduler_Build_Process(context, progress, server) {
- def init_scheduler(timing_data: Timing_Data): Scheduler = scheduler(timing_data, context)
- }
- }
-
-
- /* build schedule */
-
- def build_schedule(
- options: Options,
- build_hosts: List[Build_Cluster.Host] = Nil,
- selection: Sessions.Selection = Sessions.Selection.empty,
- progress: Progress = new Progress,
- afp_root: Option[Path] = None,
- dirs: List[Path] = Nil,
- select_dirs: List[Path] = Nil,
- infos: List[Sessions.Info] = Nil,
- numa_shuffling: Boolean = false,
- augment_options: String => List[Options.Spec] = _ => Nil,
- session_setup: (String, Session) => Unit = (_, _) => (),
- cache: Term.Cache = Term.Cache.make()
- ): Schedule = {
- val build_engine = new Engine
-
- val store = build_engine.build_store(options, build_hosts = build_hosts, cache = cache)
- val log_store = Build_Log.store(options, cache = cache)
- val build_options = store.options
-
- def build_schedule(
- server: SSH.Server,
- database_server: Option[SQL.Database],
- log_database: PostgreSQL.Database,
- host_database: SQL.Database
- ): Schedule = {
- val full_sessions =
- Sessions.load_structure(build_options, dirs = AFP.make_dirs(afp_root) ::: dirs,
- select_dirs = select_dirs, infos = infos, augment_options = augment_options)
- val full_sessions_selection = full_sessions.imports_selection(selection)
-
- val build_deps =
- Sessions.deps(full_sessions.selection(selection), progress = progress,
- inlined_files = true).check_errors
-
- val build_context =
- Build.Context(store, build_engine, build_deps, afp_root = afp_root,
- build_hosts = build_hosts, hostname = Build.hostname(build_options),
- numa_shuffling = numa_shuffling, max_jobs = 0, session_setup = session_setup,
- master = true)
-
- val cluster_hosts = build_context.build_hosts
-
- val hosts_current =
- cluster_hosts.forall(host => isabelle.Host.read_info(host_database, host.name).isDefined)
- if (!hosts_current) {
- val build_cluster = Build_Cluster.make(build_context, progress = progress)
- build_cluster.open()
- build_cluster.init()
- build_cluster.benchmark()
- build_cluster.close()
- }
-
- val host_infos = Host_Infos.load(cluster_hosts, host_database)
- val timing_data = Timing_Data.load(host_infos, log_database)
-
- val sessions = Build_Process.Sessions.empty.init(build_context, database_server, progress)
- def task(session: Build_Job.Session_Context): Build_Process.Task =
- Build_Process.Task(session.name, session.deps, JSON.Object.empty, build_context.build_uuid)
-
- val build_state =
- Build_Process.State(sessions = sessions, pending = sessions.iterator.map(task).toList)
-
- val scheduler = build_engine.scheduler(timing_data, build_context)
- def schedule_msg(res: Exn.Result[Schedule]): String =
- res match { case Exn.Res(schedule) => schedule.message case _ => "" }
-
- Timing.timeit(scheduler.build_schedule(build_state), schedule_msg, output = progress.echo(_))
- }
-
- using(store.open_server()) { server =>
- using_optional(store.maybe_open_database_server(server = server)) { database_server =>
- using(log_store.open_database(server = server)) { log_database =>
- using(store.open_build_database(
- path = isabelle.Host.private_data.database, server = server)) { host_database =>
- build_schedule(server, database_server, log_database, host_database)
- }
- }
- }
- }
- }
-
- def write_schedule_graphic(schedule: Schedule, output: Path): Unit = {
- import java.awt.geom.{GeneralPath, Rectangle2D}
- import java.awt.{BasicStroke, Color, Graphics2D}
-
- val line_height = isabelle.graphview.Metrics.default.height
- val char_width = isabelle.graphview.Metrics.default.char_width
- val padding = isabelle.graphview.Metrics.default.space_width
- val gap = isabelle.graphview.Metrics.default.gap
-
- val graph = schedule.graph
-
- def text_width(text: String): Double = text.length * char_width
-
- val generator_height = line_height + padding
- val hostname_height = generator_height + line_height + padding
- def time_height(time: Time): Double = time.seconds
- def date_height(date: Date): Double = time_height(date.time - schedule.start.time)
-
- val hosts = graph.iterator.map(_._2._1).toList.groupBy(_.node_info.hostname)
-
- def node_width(node: Schedule.Node): Double = 2 * padding + text_width(node.job_name)
-
- case class Range(start: Double, stop: Double) {
- def proper: List[Range] = if (start < stop) List(this) else Nil
- def width: Double = stop - start
- }
-
- val rel_node_ranges =
- hosts.toList.flatMap { (hostname, nodes) =>
- val sorted = nodes.sortBy(node => (node.start.time.ms, node.end.time.ms, node.job_name))
- sorted.foldLeft((List.empty[Schedule.Node], Map.empty[Schedule.Node, Range])) {
- case ((nodes, allocated), node) =>
- val width = node_width(node) + padding
- val parallel = nodes.filter(_.end.time > node.start.time)
- val (last, slots) =
- parallel.sortBy(allocated(_).start).foldLeft((0D, List.empty[Range])) {
- case ((start, ranges), node1) =>
- val node_range = allocated(node1)
- (node_range.stop, ranges ::: Range(start, node_range.start).proper)
- }
- val start =
- (Range(last, Double.MaxValue) :: slots.filter(_.width >= width)).minBy(_.width).start
- (node :: parallel, allocated + (node -> Range(start, start + width)))
- }._2
- }.toMap
-
- def host_width(hostname: String) =
- 2 * padding + (hosts(hostname).map(rel_node_ranges(_).stop).max max text_width(hostname))
-
- def graph_height(graph: Graph[String, Schedule.Node]): Double =
- date_height(graph.maximals.map(graph.get_node(_).end).maxBy(_.unix_epoch))
-
- val height = (hostname_height + 2 * padding + graph_height(graph)).ceil.toInt
- val (last, host_starts) =
- hosts.keys.foldLeft((0D, Map.empty[String, Double])) {
- case ((previous, starts), hostname) =>
- (previous + gap + host_width(hostname), starts + (hostname -> previous))
- }
- val width = (last - gap).ceil.toInt
-
- def node_start(node: Schedule.Node): Double =
- host_starts(node.node_info.hostname) + padding + rel_node_ranges(node).start
-
- def paint(gfx: Graphics2D): Unit = {
- gfx.setColor(Color.LIGHT_GRAY)
- gfx.fillRect(0, 0, width, height)
- gfx.setRenderingHints(isabelle.graphview.Metrics.rendering_hints)
- gfx.setFont(isabelle.graphview.Metrics.default.font)
- gfx.setStroke(new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND))
-
- draw_string(schedule.generator + ", build time: " + schedule.duration.message_hms, padding, 0)
-
- def draw_host(x: Double, hostname: String): Double = {
- val nodes = hosts(hostname).map(_.job_name).toSet
- val width = host_width(hostname)
- val height = 2 * padding + graph_height(graph.restrict(nodes.contains))
- val padding1 = ((width - text_width(hostname)) / 2) max 0
- val rect = new Rectangle2D.Double(x, hostname_height, width, height)
- gfx.setColor(Color.BLACK)
- gfx.draw(rect)
- gfx.setColor(Color.GRAY)
- gfx.fill(rect)
- draw_string(hostname, x + padding1, generator_height)
- x + gap + width
- }
-
- def draw_string(str: String, x: Double, y: Double): Unit = {
- gfx.setColor(Color.BLACK)
- gfx.drawString(str, x.toInt, (y + line_height).toInt)
- }
-
- def node_rect(node: Schedule.Node): Rectangle2D.Double = {
- val x = node_start(node)
- val y = hostname_height + padding + date_height(node.start)
- val width = node_width(node)
- val height = time_height(node.duration)
- new Rectangle2D.Double(x, y, width, height)
- }
-
- def draw_node(node: Schedule.Node): Rectangle2D.Double = {
- val rect = node_rect(node)
- gfx.setColor(Color.BLACK)
- gfx.draw(rect)
- gfx.setColor(Color.WHITE)
- gfx.fill(rect)
-
- def add_text(y: Double, text: String): Double =
- if (line_height > rect.height - y || text_width(text) + 2 * padding > rect.width) y
- else {
- val padding1 = padding min ((rect.height - (y + line_height)) / 2)
- draw_string(text, rect.x + padding, rect.y + y + padding1)
- y + padding1 + line_height
- }
-
- val node_info = node.node_info
-
- val duration_str = "(" + node.duration.message_hms + ")"
- val node_str =
- "on " + proper_string(node_info.toString.stripPrefix(node_info.hostname)).getOrElse("all")
- val start_str = "Start: " + (node.start.time - schedule.start.time).message_hms
-
- List(node.job_name, duration_str, node_str, start_str).foldLeft(0D)(add_text)
-
- rect
- }
-
- def draw_arrow(from: Schedule.Node, to: Rectangle2D.Double, curve: Double = 10): Unit = {
- val from_rect = node_rect(from)
-
- val path = new GeneralPath()
- path.moveTo(from_rect.getCenterX, from_rect.getMaxY)
- path.lineTo(to.getCenterX, to.getMinY)
-
- gfx.setColor(Color.BLUE)
- gfx.draw(path)
- }
-
- hosts.keys.foldLeft(0D)(draw_host)
-
- graph.topological_order.foreach { job_name =>
- val node = graph.get_node(job_name)
- val rect = draw_node(node)
-
- for {
- pred <- graph.imm_preds(job_name).iterator
- pred_node = graph.get_node(pred)
- if node.node_info.hostname != pred_node.node_info.hostname
- } draw_arrow(pred_node, rect)
- }
- }
-
- val name = output.file_name
- if (File.is_png(name)) Graphics_File.write_png(output.file, paint, width, height)
- else if (File.is_pdf(name)) Graphics_File.write_pdf(output.file, paint, width, height)
- else error("Bad type of file: " + quote(name) + " (.png or .pdf expected)")
- }
-
-
- /* command-line wrapper */
-
- val isabelle_tool = Isabelle_Tool("build_schedule", "generate build schedule", Scala_Project.here,
- { args =>
- var afp_root: Option[Path] = None
- val base_sessions = new mutable.ListBuffer[String]
- val select_dirs = new mutable.ListBuffer[Path]
- val build_hosts = new mutable.ListBuffer[Build_Cluster.Host]
- var numa_shuffling = false
- var output_file: Option[Path] = None
- var requirements = false
- val exclude_session_groups = new mutable.ListBuffer[String]
- var all_sessions = false
- val dirs = new mutable.ListBuffer[Path]
- val session_groups = new mutable.ListBuffer[String]
- var options = Options.init(specs = Options.Spec.ISABELLE_BUILD_OPTIONS)
- var verbose = false
- val exclude_sessions = new mutable.ListBuffer[String]
-
- val getopts = Getopts("""
-Usage: isabelle build_schedule [OPTIONS] [SESSIONS ...]
-
- Options are:
- -A ROOT include AFP with given root directory (":" for """ + AFP.BASE.implode + """)
- -B NAME include session NAME and all descendants
- -D DIR include session directory and select its sessions
- -H HOSTS additional build cluster host specifications, of the form
- "NAMES:PARAMETERS" (separated by commas)
- -N cyclic shuffling of NUMA CPU nodes (performance tuning)
- -O FILE output file
- -R refer to requirements of selected sessions
- -X NAME exclude sessions from group NAME and all descendants
- -a select all sessions
- -d DIR include session directory
- -g NAME select session group NAME
- -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
- -v verbose
- -x NAME exclude session NAME and all descendants
-
- Generate build graph.
-""",
- "A:" -> (arg => afp_root = Some(if (arg == ":") AFP.BASE else Path.explode(arg))),
- "B:" -> (arg => base_sessions += arg),
- "D:" -> (arg => select_dirs += Path.explode(arg)),
- "H:" -> (arg => build_hosts ++= Build_Cluster.Host.parse(Registry.global, arg)),
- "N" -> (_ => numa_shuffling = true),
- "O:" -> (arg => output_file = Some(Path.explode(arg))),
- "R" -> (_ => requirements = true),
- "X:" -> (arg => exclude_session_groups += arg),
- "a" -> (_ => all_sessions = true),
- "d:" -> (arg => dirs += Path.explode(arg)),
- "g:" -> (arg => session_groups += arg),
- "o:" -> (arg => options = options + arg),
- "v" -> (_ => verbose = true),
- "x:" -> (arg => exclude_sessions += arg))
-
- val sessions = getopts(args)
-
- val progress = new Console_Progress(verbose = verbose)
-
- val schedule =
- build_schedule(options,
- selection = Sessions.Selection(
- requirements = requirements,
- all_sessions = all_sessions,
- base_sessions = base_sessions.toList,
- exclude_session_groups = exclude_session_groups.toList,
- exclude_sessions = exclude_sessions.toList,
- session_groups = session_groups.toList,
- sessions = sessions),
- progress = progress,
- afp_root = afp_root,
- dirs = dirs.toList,
- select_dirs = select_dirs.toList,
- numa_shuffling = isabelle.Host.numa_check(progress, numa_shuffling),
- build_hosts = build_hosts.toList)
-
- if (!schedule.is_empty && output_file.nonEmpty)
- write_schedule_graphic(schedule, output_file.get)
- })
-}