# HG changeset patch # User wenzelm # Date 1672949935 -3600 # Node ID aea34e2cabe8cf3440bbe11c7ef83a27fe65d230 # Parent 830597d13d6de3e30a8ef0b3a0bcf7e376196d50# Parent cb4b1fdebf857be2662b3a57efe5ecc3e4365f0a merged diff -r 830597d13d6d -r aea34e2cabe8 NEWS --- a/NEWS Wed Jan 04 19:06:16 2023 +0000 +++ b/NEWS Thu Jan 05 21:18:55 2023 +0100 @@ -191,6 +191,11 @@ *** System *** +* The command-line tool "isabelle update" is now directly based on +"isabelle build" instead of "isabelle dump". Thus it has become more +scalable, and supports a few additional options from "isabelle build". +Partial builds are supported as well, e.g. "isabelle update -n -a". + * Isabelle/Scala provides generic support for XZ and Zstd compression, via Compress.Options and Compress.Cache. Bytes.uncompress automatically detects the compression scheme. diff -r 830597d13d6d -r aea34e2cabe8 src/Doc/System/Sessions.thy --- a/src/Doc/System/Sessions.thy Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Doc/System/Sessions.thy Thu Jan 05 21:18:55 2023 +0100 @@ -375,7 +375,7 @@ -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files - -n no build -- test dependencies only + -n no build -- take existing build databases -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants @@ -774,8 +774,8 @@ text \ The @{tool_def "update"} tool updates theory sources based on markup that is - produced from a running PIDE session (similar to @{tool dump} - \secref{sec:tool-dump}). Its command-line usage is: @{verbatim [display] + produced by the regular @{tool build} process \secref{sec:tool-build}). Its + command-line usage is: @{verbatim [display] \Usage: isabelle update [OPTIONS] [SESSIONS ...] Options are: @@ -784,9 +784,11 @@ -R refer to requirements of selected sessions -X NAME exclude sessions from group NAME and all descendants -a select all sessions - -b NAME base logic image (default "Pure") + -b NAME additional base logic -d DIR include session directory -g NAME select session group NAME + -j INT maximum number of parallel jobs (default 1) + -n no build -- take existing build databases -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -u OPT override "update" option: shortcut for "-o update_OPT" -v verbose @@ -796,11 +798,15 @@ \<^medskip> Options \<^verbatim>\-B\, \<^verbatim>\-D\, \<^verbatim>\-R\, \<^verbatim>\-X\, \<^verbatim>\-a\, \<^verbatim>\-d\, \<^verbatim>\-g\, \<^verbatim>\-x\ and the remaining command-line arguments specify sessions as in @{tool build} - (\secref{sec:tool-build}) or @{tool dump} (\secref{sec:tool-dump}). + (\secref{sec:tool-build}). + + \<^medskip> Options \<^verbatim>\-N\ and \<^verbatim>\-j\ specify multicore parameters as in @{tool build}. - \<^medskip> Option \<^verbatim>\-b\ specifies an optional base logic image, for improved - scalability of the PIDE session. Its theories are only processed if it is - included in the overall session selection. + \<^medskip> Option \<^verbatim>\-n\ suppresses the actual build process, but existing build + databases are used nonetheless. + + \<^medskip> Option \<^verbatim>\-b\ specifies one or more base logics: these sessions and their + ancestors are \<^emph>\excluded\ from the update. \<^medskip> Option \<^verbatim>\-v\ increases the general level of verbosity. @@ -850,20 +856,14 @@ @{verbatim [display] \ isabelle update -u mixfix_cartouches HOL-Analysis\} - \<^smallskip> Update the same for all application sessions based on \<^verbatim>\HOL-Analysis\ --- - using its image as starting point (for reduced resource requirements): - - @{verbatim [display] \ isabelle update -u mixfix_cartouches -b HOL-Analysis -B HOL-Analysis\} + \<^smallskip> Update the same for all application sessions based on \<^verbatim>\HOL-Analysis\, but + do not change the underlying \<^verbatim>\HOL\ (and \<^verbatim>\Pure\) session: - \<^smallskip> Update sessions that build on \<^verbatim>\HOL-Proofs\, which need to be run - separately with special options as follows: + @{verbatim [display] \ isabelle update -u mixfix_cartouches -b HOL -B HOL-Analysis\} - @{verbatim [display] \ isabelle update -u mixfix_cartouches -l HOL-Proofs -B HOL-Proofs - -o record_proofs=2\} + \<^smallskip> Update all sessions that happen to be properly built beforehand: - \<^smallskip> See also the end of \secref{sec:tool-dump} for hints on increasing - Isabelle/ML heap sizes for very big PIDE processes that include many - sessions, notably from the Archive of Formal Proofs. + @{verbatim [display] \ isabelle update -u mixfix_cartouches -n -a\} \ diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Isar/document_structure.scala --- a/src/Pure/Isar/document_structure.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Isar/document_structure.scala Thu Jan 05 21:18:55 2023 +0100 @@ -99,7 +99,7 @@ /* result structure */ val spans = syntax.parse_spans(text) - spans.foreach(span => add(Command(Document_ID.none, node_name, Command.Blobs_Info.none, span))) + spans.foreach(span => add(Command(Document_ID.none, node_name, Command.Blobs_Info.empty, span))) result() } @@ -166,7 +166,7 @@ for { span <- syntax.parse_spans(text) } { sections.add( new Command_Item(syntax.keywords, - Command(Document_ID.none, node_name, Command.Blobs_Info.none, span))) + Command(Document_ID.none, node_name, Command.Blobs_Info.empty, span))) } sections.result() } diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/PIDE/command.scala Thu Jan 05 21:18:55 2023 +0100 @@ -14,27 +14,20 @@ object Command { /* blobs */ - object Blob { - def read_file(name: Document.Node.Name, src_path: Path): Blob = { - val bytes = Bytes.read(name.path) - val chunk = Symbol.Text_Chunk(bytes.text) - Blob(name, src_path, Some((bytes.sha1_digest, chunk))) - } - } - sealed case class Blob( name: Document.Node.Name, src_path: Path, content: Option[(SHA1.Digest, Symbol.Text_Chunk)] ) { - def read_file: Bytes = Bytes.read(name.path) - def chunk_file: Symbol.Text_Chunk.File = Symbol.Text_Chunk.File(name.node) } object Blobs_Info { - val none: Blobs_Info = Blobs_Info(Nil) + val empty: Blobs_Info = Blobs_Info(Nil) + + def make(blobs: List[(Blob, Document.Blobs.Item)]): Blobs_Info = + if (blobs.isEmpty) empty else Blobs_Info(for ((a, _) <- blobs) yield Exn.Res(a)) def errors(msgs: List[String]): Blobs_Info = Blobs_Info(msgs.map(msg => Exn.Exn[Blob](ERROR(msg)))) @@ -119,6 +112,7 @@ object Markup_Index { val markup: Markup_Index = Markup_Index(false, Symbol.Text_Chunk.Default) def blob(blob: Blob): Markup_Index = Markup_Index(false, blob.chunk_file) + def make(blobs: List[Blob]): List[Markup_Index] = markup :: blobs.map(blob) } sealed case class Markup_Index(status: Boolean, chunk_name: Symbol.Text_Chunk.Name) @@ -377,14 +371,14 @@ } val empty: Command = - Command(Document_ID.none, Document.Node.Name.empty, Blobs_Info.none, Command_Span.empty) + Command(Document_ID.none, Document.Node.Name.empty, Blobs_Info.empty, Command_Span.empty) def unparsed( source: String, theory: Boolean = false, id: Document_ID.Command = Document_ID.none, node_name: Document.Node.Name = Document.Node.Name.empty, - blobs_info: Blobs_Info = Blobs_Info.none, + blobs_info: Blobs_Info = Blobs_Info.empty, results: Results = Results.empty, markups: Markups = Markups.empty ): Command = { @@ -428,7 +422,7 @@ def blobs_info( resources: Resources, syntax: Outer_Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], + get_blob: Document.Node.Name => Option[Document.Blobs.Item], can_import: Document.Node.Name => Boolean, node_name: Document.Node.Name, span: Command_Span.Span diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/PIDE/document.scala --- a/src/Pure/PIDE/document.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/PIDE/document.scala Thu Jan 05 21:18:55 2023 +0100 @@ -41,17 +41,27 @@ /* document blobs: auxiliary files */ - sealed case class Blob(bytes: Bytes, source: String, chunk: Symbol.Text_Chunk, changed: Boolean) { - def unchanged: Blob = copy(changed = false) + object Blobs { + sealed case class Item( + bytes: Bytes, + source: String, + chunk: Symbol.Text_Chunk, + changed: Boolean + ) { + def is_wellformed_text: Boolean = bytes.wellformed_text.nonEmpty + def unchanged: Item = copy(changed = false) + } + + def apply(blobs: Map[Node.Name, Item]): Blobs = new Blobs(blobs) + val empty: Blobs = apply(Map.empty) + + def make(blobs: List[(Command.Blob, Item)]): Blobs = + if (blobs.isEmpty) empty + else apply((for ((a, b) <- blobs.iterator) yield a.name -> b).toMap) } - object Blobs { - def apply(blobs: Map[Node.Name, Blob]): Blobs = new Blobs(blobs) - val empty: Blobs = apply(Map.empty) - } - - final class Blobs private(blobs: Map[Node.Name, Blob]) { - def get(name: Node.Name): Option[Blob] = blobs.get(name) + final class Blobs private(blobs: Map[Node.Name, Blobs.Item]) { + def get(name: Node.Name): Option[Blobs.Item] = blobs.get(name) def changed(name: Node.Name): Boolean = get(name) match { @@ -93,7 +103,7 @@ object Name { def apply(node: String, theory: String = ""): Name = new Name(node, theory) - def loaded_theory(theory: String): Document.Node.Name = Name(theory, theory = theory) + def loaded_theory(theory: String): Name = Name(theory, theory = theory) val empty: Name = Name("") @@ -171,7 +181,7 @@ case _ => false } } - case class Blob[A, B](blob: Document.Blob) extends Edit[A, B] + case class Blob[A, B](blob: Blobs.Item) extends Edit[A, B] case class Edits[A, B](edits: List[A]) extends Edit[A, B] case class Deps[A, B](header: Header) extends Edit[A, B] @@ -263,10 +273,13 @@ } val empty: Node = new Node() + + def init_blob(blob: Blobs.Item): Node = + new Node(get_blob = Some(blob.unchanged)) } final class Node private( - val get_blob: Option[Document.Blob] = None, + val get_blob: Option[Blobs.Item] = None, val header: Node.Header = Node.no_header, val syntax: Option[Outer_Syntax] = None, val text_perspective: Text.Perspective = Text.Perspective.empty, @@ -292,8 +305,6 @@ def load_commands_changed(doc_blobs: Blobs): Boolean = load_commands.exists(_.blobs_changed(doc_blobs)) - def init_blob(blob: Blob): Node = new Node(get_blob = Some(blob.unchanged)) - def update_header(new_header: Node.Header): Node = new Node(get_blob, new_header, syntax, text_perspective, perspective, _commands) @@ -332,6 +343,12 @@ def command_start(cmd: Command): Option[Text.Offset] = Node.Commands.starts(commands.iterator).find(_._1 == cmd).map(_._2) + lazy val is_wellformed_text: Boolean = + get_blob match { + case Some(blob) => blob.is_wellformed_text + case None => true + } + lazy val source: String = get_blob match { case Some(blob) => blob.source @@ -517,16 +534,16 @@ final class Pending_Edits(pending_edits: Map[Node.Name, List[Text.Edit]]) { def is_stable: Boolean = pending_edits.isEmpty - def + (entry: (Document.Node.Name, List[Text.Edit])): Pending_Edits = { + def + (entry: (Node.Name, List[Text.Edit])): Pending_Edits = { val (name, es) = entry if (es.isEmpty) this else new Pending_Edits(pending_edits + (name -> (es ::: edits(name)))) } - def edits(name: Document.Node.Name): List[Text.Edit] = + def edits(name: Node.Name): List[Text.Edit] = pending_edits.getOrElse(name, Nil) - def reverse_edits(name: Document.Node.Name): List[Text.Edit] = + def reverse_edits(name: Node.Name): List[Text.Edit] = reverse_pending_edits.getOrElse(name, Nil) private lazy val reverse_pending_edits = @@ -602,15 +619,19 @@ /* command as add-on snippet */ - def snippet(command: Command): Snapshot = { + def snippet(command: Command, doc_blobs: Blobs): Snapshot = { val node_name = command.node_name + val blobs = for (a <- command.blobs_names; b <- doc_blobs.get(a)) yield a -> b + val nodes0 = version.nodes val nodes1 = nodes0 + (node_name -> nodes0(node_name).update_commands(Linear_Set(command))) - val version1 = Document.Version.make(nodes1) + val nodes2 = blobs.foldLeft(nodes1) { case (ns, (a, b)) => ns + (a -> Node.init_blob(b)) } + val version1 = Version.make(nodes2) val edits: List[Edit_Text] = - List(node_name -> Node.Edits(List(Text.Edit.insert(0, command.source)))) + List(node_name -> Node.Edits(List(Text.Edit.insert(0, command.source)))) ::: + blobs.map({ case (a, b) => a -> Node.Blob(b) }) val state0 = state.define_command(command) val state1 = @@ -635,17 +656,15 @@ snippet_command match { case None => Nil case Some(command) => - for (Exn.Res(blob) <- command.blobs) + for { + Exn.Res(blob) <- command.blobs + blob_node = get_node(blob.name) + if blob_node.is_wellformed_text + } yield { - val bytes = blob.read_file - val text = bytes.text - val xml = - if (Bytes(text) == bytes) { - val markup = command.init_markups(Command.Markup_Index.blob(blob)) - markup.to_XML(Text.Range.length(text), text, elements) - } - else Nil - blob -> xml + val text = blob_node.source + val markup = command.init_markups(Command.Markup_Index.blob(blob)) + blob -> markup.to_XML(Text.Range.length(text), text, elements) } } } @@ -656,8 +675,7 @@ lazy val messages: List[(XML.Elem, Position.T)] = (for { (command, start) <- - Document.Node.Commands.starts_pos( - node.commands.iterator, Token.Pos.file(node_name.node)) + Node.Commands.starts_pos(node.commands.iterator, Token.Pos.file(node_name.node)) pos = command.span.keyword_pos(start).position(command.span.name) (_, elem) <- state.command_results(version, command).iterator } yield (elem, pos)).toList @@ -794,7 +812,7 @@ def node_required: Boolean - def get_blob: Option[Blob] + def get_blob: Option[Blobs.Item] def untyped_data: AnyRef def get_data[C](c: Class[C]): Option[C] = Library.as_subclass(c)(untyped_data) @@ -1003,17 +1021,18 @@ } } - def end_theory(id: Document_ID.Exec): (Snapshot, State) = + def end_theory(id: Document_ID.Exec, document_blobs: Node.Name => Blobs): (Snapshot, State) = theories.get(id) match { case None => fail case Some(st) => val command = st.command val node_name = command.node_name + val doc_blobs = document_blobs(node_name) val command1 = Command.unparsed(command.source, theory = true, id = id, node_name = node_name, blobs_info = command.blobs_info, results = st.results, markups = st.markups) val state1 = copy(theories = theories - id) - (state1.snippet(command1), state1) + (state1.snippet(command1, doc_blobs), state1) } def assign( @@ -1199,9 +1218,8 @@ tree <- markup.to_XML(command_range, command.source, elements).iterator } yield tree).toList } - else { + else if (node.is_wellformed_text) { Text.Range.length(node.source).try_restrict(range) match { - case None => Nil case Some(node_range) => val markup = version.nodes.commands_loading(node_name).headOption match { @@ -1212,8 +1230,10 @@ command_markup(version, command, markup_index, node_range, elements) } markup.to_XML(node_range, node.source, elements) + case None => Nil } } + else Nil } def node_initialized(version: Version, name: Node.Name): Boolean = @@ -1250,7 +1270,7 @@ new Snapshot(this, version, node_name, pending_edits1, snippet_command) } - def snippet(command: Command): Snapshot = - snapshot().snippet(command) + def snippet(command: Command, doc_blobs: Blobs): Snapshot = + snapshot().snippet(command, doc_blobs) } } diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/PIDE/headless.scala --- a/src/Pure/PIDE/headless.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/PIDE/headless.scala Thu Jan 05 21:18:55 2023 +0100 @@ -496,7 +496,7 @@ } sealed case class State( - blobs: Map[Document.Node.Name, Document.Blob] = Map.empty, + blobs: Map[Document.Node.Name, Document.Blobs.Item] = Map.empty, theories: Map[Document.Node.Name, Theory] = Map.empty, required: Multi_Map[Document.Node.Name, UUID.T] = Multi_Map.empty ) { @@ -508,13 +508,12 @@ val new_blobs = names.flatMap { name => val bytes = Bytes.read(name.path) - def new_blob: Document.Blob = { - val text = bytes.text - Document.Blob(bytes, text, Symbol.Text_Chunk(text), changed = true) - } blobs.get(name) match { - case Some(blob) => if (blob.bytes == bytes) None else Some(name -> new_blob) - case None => Some(name -> new_blob) + case Some(blob) if blob.bytes == bytes => None + case _ => + val text = bytes.text + val blob = Document.Blobs.Item(bytes, text, Symbol.Text_Chunk(text), changed = true) + Some(name -> blob) } } val blobs1 = new_blobs.foldLeft(blobs)(_ + _) @@ -524,7 +523,7 @@ def blob_edits( name: Document.Node.Name, - old_blob: Option[Document.Blob] + old_blob: Option[Document.Blobs.Item] ) : List[Document.Edit_Text] = { val blob = blobs.getOrElse(name, error("Missing blob " + quote(name.toString))) val text_edits = diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/PIDE/session.scala Thu Jan 05 21:18:55 2023 +0100 @@ -125,8 +125,8 @@ val cache: Term.Cache = Term.Cache.make() - def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = - Command.Blobs_Info.none + def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = Command.Blobs_Info.empty + def build_blobs(name: Document.Node.Name): Document.Blobs = Document.Blobs.empty /* global flags */ @@ -560,7 +560,7 @@ case Markup.Process_Result(result) if output.is_exit => if (prover.defined) protocol_handlers.exit() for (id <- global_state.value.theories.keys) { - val snapshot = global_state.change_result(_.end_theory(id)) + val snapshot = global_state.change_result(_.end_theory(id, build_blobs)) finished_theories.post(snapshot) } file_formats.stop_session() diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Thy/export.scala Thu Jan 05 21:18:55 2023 +0100 @@ -423,6 +423,17 @@ def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context = new Theory_Context(session_context, theory, other_cache) + def get_source_file(name: String): Option[Sessions.Source_File] = { + val store = database_context.store + (for { + database <- db_hierarchy.iterator + file <- store.read_sources(database.db, database.session, name = name).iterator + } yield file).nextOption() + } + + def source_file(name: String): Sessions.Source_File = + get_source_file(name).getOrElse(error("Missing session source file " + quote(name))) + def node_source(name: Document.Node.Name): String = { def snapshot_source: Option[String] = for { @@ -431,14 +442,13 @@ text = node.source if text.nonEmpty } yield text - val store = database_context.store - def db_source: Option[String] = - (for { - database <- db_hierarchy.iterator - file <- store.read_sources(database.db, database.session, name = name.node).iterator - } yield file.text).nextOption() + def db_source: String = + get_source_file(name.node) match { + case Some(file) => file.text + case None => "" + } - snapshot_source orElse db_source getOrElse "" + snapshot_source getOrElse db_source } def classpath(): List[File.Content] = { diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Thy/sessions.scala Thu Jan 05 21:18:55 2023 +0100 @@ -109,6 +109,10 @@ class Sources private(rep: Map[String, Source_File]) extends Iterable[Source_File] { override def toString: String = rep.values.toList.sortBy(_.name).mkString("Sources(", ", ", ")") override def iterator: Iterator[Source_File] = rep.valuesIterator + + def get(name: String): Option[Source_File] = rep.get(name) + def apply(name: String): Source_File = + get(name).getOrElse(error("Missing session sources entry " + quote(name))) } diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Thy/thy_syntax.scala --- a/src/Pure/Thy/thy_syntax.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Thy/thy_syntax.scala Thu Jan 05 21:18:55 2023 +0100 @@ -166,7 +166,7 @@ private def reparse_spans( resources: Resources, syntax: Outer_Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], + get_blob: Document.Node.Name => Option[Document.Blobs.Item], can_import: Document.Node.Name => Boolean, node_name: Document.Node.Name, commands: Linear_Set[Command], @@ -213,7 +213,7 @@ private def text_edit( resources: Resources, syntax: Outer_Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], + get_blob: Document.Node.Name => Option[Document.Blobs.Item], can_import: Document.Node.Name => Boolean, reparse_limit: Int, node: Document.Node, @@ -245,7 +245,7 @@ } edit match { - case (_, Document.Node.Blob(blob)) => node.init_blob(blob) + case (_, Document.Node.Blob(blob)) => Document.Node.init_blob(blob) case (name, Document.Node.Edits(text_edits)) => if (name.is_theory) { @@ -303,7 +303,7 @@ ): Session.Change = { val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits) - def get_blob(name: Document.Node.Name): Option[Document.Blob] = + def get_blob(name: Document.Node.Name): Option[Document.Blobs.Item] = doc_blobs.get(name) orElse previous.nodes(name).get_blob def can_import(name: Document.Node.Name): Boolean = diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Tools/build.scala Thu Jan 05 21:18:55 2023 +0100 @@ -139,8 +139,8 @@ class Results private[Build]( val store: Sessions.Store, val deps: Sessions.Deps, - results: Map[String, (Option[Process_Result], Sessions.Info)], - val presentation_sessions: Browser_Info.Config => List[String] + val sessions_ok: List[String], + results: Map[String, (Option[Process_Result], Sessions.Info)] ) { def cache: Term.Cache = store.cache @@ -452,18 +452,18 @@ } else Isabelle_Thread.uninterruptible { loop(queue, Map.empty, Map.empty) } + val sessions_ok: List[String] = + (for { + name <- build_deps.sessions_structure.build_topological_order.iterator + result <- build_results.get(name) + if result.ok + } yield name).toList + val results = (for ((name, result) <- build_results.iterator) yield (name, (result.process, result.info))).toMap - def presentation_sessions(config: Browser_Info.Config): List[String] = - (for { - name <- build_deps.sessions_structure.build_topological_order.iterator - result <- build_results.get(name) - if result.ok && config.enabled(result.info) - } yield name).toList - - new Results(store, build_deps, results, presentation_sessions) + new Results(store, build_deps, sessions_ok, results) } if (export_files) { @@ -481,7 +481,8 @@ } } - val presentation_sessions = results.presentation_sessions(browser_info) + val presentation_sessions = + results.sessions_ok.filter(name => browser_info.enabled(results.info(name))) if (presentation_sessions.nonEmpty && !progress.stopped) { Browser_Info.build(browser_info, results.store, results.deps, presentation_sessions, progress = progress, verbose = verbose) @@ -545,7 +546,7 @@ -j INT maximum number of parallel jobs (default 1) -k KEYWORD check theory sources for conflicts with proposed keywords -l list session source files - -n no build -- test dependencies only + -n no build -- take existing build databases -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose -x NAME exclude session NAME and all descendants diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Tools/build_job.scala Thu Jan 05 21:18:55 2023 +0100 @@ -25,57 +25,58 @@ Symbol.output(unicode_symbols, UTF8.decode_permissive(read(name).bytes)), cache = theory_context.cache) + def read_source_file(name: String): Sessions.Source_File = + theory_context.session_context.source_file(name) + for { id <- theory_context.document_id() (thy_file, blobs_files) <- theory_context.files(permissive = true) } yield { - val results = - Command.Results.make( - for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) - yield i -> elem) - val master_dir = Path.explode(Url.strip_base_name(thy_file).getOrElse( error("Cannot determine theory master directory: " + quote(thy_file)))) val blobs = - blobs_files.map { file => - val name = Document.Node.Name(file) - val path = Path.explode(file) + blobs_files.map { name => + val path = Path.explode(name) val src_path = File.relative_path(master_dir, path).getOrElse(path) - Command.Blob(name, src_path, None) + + val file = read_source_file(name) + val bytes = file.bytes + val text = bytes.text + val chunk = Symbol.Text_Chunk(text) + + Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) -> + Document.Blobs.Item(bytes, text, chunk, changed = false) } + + val thy_source = read_source_file(thy_file).text + val thy_xml = read_xml(Export.MARKUP) val blobs_xml = for (i <- (1 to blobs.length).toList) yield read_xml(Export.MARKUP + i) - val blobs_info = - Command.Blobs_Info( - for { (Command.Blob(name, src_path, _), xml) <- blobs zip blobs_xml } - yield { - val text = XML.content(xml) - val chunk = Symbol.Text_Chunk(text) - val digest = SHA1.digest(Symbol.encode(text)) - Exn.Res(Command.Blob(name, src_path, Some((digest, chunk)))) - }) - - val thy_xml = read_xml(Export.MARKUP) - val thy_source = XML.content(thy_xml) - - val markups_index = - Command.Markup_Index.markup :: blobs.map(Command.Markup_Index.blob) + val markups_index = Command.Markup_Index.make(blobs.map(_._1)) val markups = Command.Markups.make( for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) yield index -> Markup_Tree.from_XML(xml)) + val results = + Command.Results.make( + for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) + yield i -> elem) + val command = Command.unparsed(thy_source, theory = true, id = id, node_name = Document.Node.Name(thy_file, theory = theory_context.theory), - blobs_info = blobs_info, results = results, markups = markups) + blobs_info = Command.Blobs_Info.make(blobs), + markups = markups, results = results) - Document.State.init.snippet(command) + val doc_blobs = Document.Blobs.make(blobs) + + Document.State.init.snippet(command, doc_blobs) } } @@ -269,30 +270,38 @@ } else Nil + def session_blobs(node_name: Document.Node.Name): List[(Command.Blob, Document.Blobs.Item)] = + session_background.base.theory_load_commands.get(node_name.theory) match { + case None => Nil + case Some(spans) => + val syntax = session_background.base.theory_syntax(node_name) + val master_dir = Path.explode(node_name.master_dir) + for (span <- spans; file <- span.loaded_files(syntax).files) + yield { + val src_path = Path.explode(file) + val blob_name = Document.Node.Name(File.symbolic_path(master_dir + src_path)) + + val bytes = session_sources(blob_name.node).bytes + val text = bytes.text + val chunk = Symbol.Text_Chunk(text) + + Command.Blob(blob_name, src_path, Some((SHA1.digest(bytes), chunk))) -> + Document.Blobs.Item(bytes, text, chunk, changed = false) + } + } + val resources = new Resources(session_background, log = log, command_timings = command_timings0) + val session = new Session(options, resources) { override val cache: Term.Cache = store.cache - override def build_blobs_info(node_name: Document.Node.Name): Command.Blobs_Info = { - session_background.base.theory_load_commands.get(node_name.theory) match { - case Some(spans) => - val syntax = session_background.base.theory_syntax(node_name) - val blobs = - for (span <- spans; file <- span.loaded_files(syntax).files) - yield { - (Exn.capture { - val master_dir = Path.explode(node_name.master_dir) - val src_path = Path.explode(file) - val node = File.symbolic_path(master_dir + src_path) - Command.Blob.read_file(Document.Node.Name(node), src_path) - }).user_error - } - Command.Blobs_Info(blobs) - case None => Command.Blobs_Info.none - } - } + override def build_blobs_info(node_name: Document.Node.Name): Command.Blobs_Info = + Command.Blobs_Info.make(session_blobs(node_name)) + + override def build_blobs(node_name: Document.Node.Name): Document.Blobs = + Document.Blobs.make(session_blobs(node_name)) } object Build_Session_Errors { diff -r 830597d13d6d -r aea34e2cabe8 src/Pure/Tools/update.scala --- a/src/Pure/Tools/update.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Pure/Tools/update.scala Thu Jan 05 21:18:55 2023 +0100 @@ -8,20 +8,48 @@ object Update { - def update(options: Options, logic: String, + def update(options: Options, + selection: Sessions.Selection = Sessions.Selection.empty, + base_logics: List[String] = Nil, progress: Progress = new Progress, - log: Logger = No_Logger, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, - selection: Sessions.Selection = Sessions.Selection.empty - ): Unit = { - val context = - Dump.Context(options, progress = progress, dirs = dirs, select_dirs = select_dirs, - selection = selection, skip_base = true) + numa_shuffling: Boolean = false, + max_jobs: Int = 1, + no_build: Boolean = false, + verbose: Boolean = false + ): Build.Results = { + /* excluded sessions */ + + val exclude: Set[String] = + if (base_logics.isEmpty) Set.empty + else { + val sessions = + Sessions.load_structure(options, dirs = dirs, select_dirs = select_dirs) + .selection(selection) + + for (name <- base_logics if !sessions.defined(name)) { + error("Base logic " + quote(name) + " outside of session selection") + } - context.build_logic(logic) + sessions.build_requirements(base_logics).toSet + } + + + /* build */ - val path_cartouches = context.options.bool("update_path_cartouches") + val build_results = + Build.build(options, progress = progress, dirs = dirs, select_dirs = select_dirs, + selection = selection, numa_shuffling = numa_shuffling, max_jobs = max_jobs, + no_build = no_build, verbose = verbose) + + val store = build_results.store + val sessions_structure = build_results.deps.sessions_structure + + + /* update */ + + val path_cartouches = options.bool("update_path_cartouches") def update_xml(xml: XML.Body): XML.Body = xml flatMap { @@ -37,25 +65,44 @@ case t => List(t) } - context.sessions(logic, log = log).foreach(_.process { (args: Dump.Args) => - progress.echo("Processing theory " + args.print_node + " ...") + var seen_theory = Set.empty[String] - val snapshot = args.snapshot - for (node_name <- snapshot.node_files) { - val node = snapshot.get_node(node_name) - val xml = - snapshot.state.xml_markup(snapshot.version, node_name, - elements = Markup.Elements(Markup.UPDATE, Markup.LANGUAGE)) - - val source1 = Symbol.encode(XML.content(update_xml(xml))) - if (source1 != Symbol.encode(node.source)) { - progress.echo("Updating " + node_name.path) - File.write(node_name.path, source1) + using(Export.open_database_context(store)) { database_context => + for { + session <- sessions_structure.build_topological_order + if build_results(session).ok && !exclude(session) + } { + progress.echo("Session " + session + " ...") + using(database_context.open_session0(session)) { session_context => + for { + db <- session_context.session_db() + theory <- store.read_theories(db, session) + if !seen_theory(theory) + } { + seen_theory += theory + val theory_context = session_context.theory(theory) + for { + theory_snapshot <- Build_Job.read_theory(theory_context) + node_name <- theory_snapshot.node_files + snapshot = theory_snapshot.switch(node_name) + if snapshot.node.is_wellformed_text + } { + progress.expose_interrupt() + val source1 = + XML.content(update_xml( + snapshot.xml_markup(elements = Markup.Elements(Markup.UPDATE, Markup.LANGUAGE)))) + if (source1 != snapshot.node.source) { + val path = Path.explode(node_name.node) + progress.echo("Updating " + quote(File.standard_path(path))) + File.write(path, source1) + } + } + } } } - }) + } - context.check_errors + build_results } @@ -66,12 +113,15 @@ { args => var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil + var numa_shuffling = false var requirements = false var exclude_session_groups: List[String] = Nil var all_sessions = false var dirs: List[Path] = Nil var session_groups: List[String] = Nil - var logic = Dump.default_logic + var base_logics: List[String] = Nil + var max_jobs = 1 + var no_build = false var options = Options.init() var verbose = false var exclude_sessions: List[String] = Nil @@ -85,9 +135,11 @@ -R refer to requirements of selected sessions -X NAME exclude sessions from group NAME and all descendants -a select all sessions - -b NAME base logic image (default """ + isabelle.quote(Dump.default_logic) + """) + -b NAME additional base logic -d DIR include session directory -g NAME select session group NAME + -j INT maximum number of parallel jobs (default 1) + -n no build -- take existing build databases -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -u OPT override "update" option: shortcut for "-o update_OPT" -v verbose @@ -97,12 +149,15 @@ """, "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), + "N" -> (_ => numa_shuffling = true), "R" -> (_ => requirements = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), - "b:" -> (arg => logic = arg), + "b:" -> (arg => base_logics ::= arg), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "g:" -> (arg => session_groups = session_groups ::: List(arg)), + "j:" -> (arg => max_jobs = Value.Int.parse(arg)), + "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "u:" -> (arg => options = options + ("update_" + arg)), "v" -> (_ => verbose = true), @@ -112,19 +167,27 @@ val progress = new Console_Progress(verbose = verbose) - progress.interrupt_handler { - update(options, logic, - progress = progress, - dirs = dirs, - select_dirs = select_dirs, - selection = Sessions.Selection( - requirements = requirements, - all_sessions = all_sessions, - base_sessions = base_sessions, - exclude_session_groups = exclude_session_groups, - exclude_sessions = exclude_sessions, - session_groups = session_groups, - sessions = sessions)) - } + val results = + progress.interrupt_handler { + update(options, + selection = Sessions.Selection( + requirements = requirements, + all_sessions = all_sessions, + base_sessions = base_sessions, + exclude_session_groups = exclude_session_groups, + exclude_sessions = exclude_sessions, + session_groups = session_groups, + sessions = sessions), + base_logics = base_logics, + progress = progress, + dirs = dirs, + select_dirs = select_dirs, + numa_shuffling = NUMA.enabled_warning(progress, numa_shuffling), + max_jobs = max_jobs, + no_build = no_build, + verbose = verbose) + } + + sys.exit(results.rc) }) } diff -r 830597d13d6d -r aea34e2cabe8 src/Tools/VSCode/src/vscode_model.scala --- a/src/Tools/VSCode/src/vscode_model.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Tools/VSCode/src/vscode_model.scala Thu Jan 05 21:18:55 2023 +0100 @@ -142,9 +142,9 @@ /* blob */ - def get_blob: Option[Document.Blob] = + def get_blob: Option[Document.Blobs.Item] = if (is_theory) None - else Some(Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) + else Some(Document.Blobs.Item(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) /* data */ diff -r 830597d13d6d -r aea34e2cabe8 src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Tools/jEdit/src/document_model.scala Thu Jan 05 21:18:55 2023 +0100 @@ -433,9 +433,9 @@ Line.Node_Position(node_name.node, Line.Position.zero.advance(content.text.substring(0, offset))) - def get_blob: Option[Document.Blob] = + def get_blob: Option[Document.Blobs.Item] = if (is_theory) None - else Some(Document.Blob(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) + else Some(Document.Blobs.Item(content.bytes, content.text, content.chunk, pending_edits.nonEmpty)) def untyped_data: AnyRef = content.data @@ -575,7 +575,7 @@ def reset_blob(): Unit = GUI_Thread.require { blob = None } - def get_blob: Option[Document.Blob] = GUI_Thread.require { + def get_blob: Option[Document.Blobs.Item] = GUI_Thread.require { if (is_theory) None else { val (bytes, text, chunk) = @@ -588,7 +588,7 @@ x } val changed = !is_stable - Some(Document.Blob(bytes, text, chunk, changed)) + Some(Document.Blobs.Item(bytes, text, chunk, changed)) } } @@ -617,7 +617,7 @@ def node_required: Boolean = buffer_state.get_node_required def set_node_required(b: Boolean): Unit = buffer_state.set_node_required(b) - def get_blob: Option[Document.Blob] = buffer_state.get_blob + def get_blob: Option[Document.Blobs.Item] = buffer_state.get_blob def untyped_data: AnyRef = buffer_state.untyped_data diff -r 830597d13d6d -r aea34e2cabe8 src/Tools/jEdit/src/jedit_rendering.scala --- a/src/Tools/jEdit/src/jedit_rendering.scala Wed Jan 04 19:06:16 2023 +0000 +++ b/src/Tools/jEdit/src/jedit_rendering.scala Thu Jan 05 21:18:55 2023 +0100 @@ -31,7 +31,7 @@ results: Command.Results = Command.Results.empty ): (String, JEdit_Rendering) = { val command = Command.rich_text(Document_ID.make(), results, formatted_body) - val snippet = snapshot.snippet(command) + val snippet = snapshot.snippet(command, Document.Blobs.empty) val model = File_Model.init(PIDE.session) val rendering = apply(snippet, model, PIDE.options.value) (command.source, rendering)