# HG changeset patch # User wenzelm # Date 1607166880 -3600 # Node ID ea4f86914cb29cbeff06d2515a35b5d8ef756239 # Parent 85aaaf2cd1735be59cf305d6f3183f381a362a6e support for PIDE markup for auxiliary files ("blobs"); clarified files of theory Pure; diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/PIDE/command.scala Sat Dec 05 12:14:40 2020 +0100 @@ -20,6 +20,10 @@ name: Document.Node.Name, src_path: Path, content: Option[(SHA1.Digest, Symbol.Text_Chunk)]) + { + def chunk_file: Symbol.Text_Chunk.File = + Symbol.Text_Chunk.File(name.node) + } object Blobs_Info { @@ -109,6 +113,7 @@ object Markup_Index { val markup: Markup_Index = Markup_Index(false, Symbol.Text_Chunk.Default) + def blob(blob: Blob): Markup_Index = Markup_Index(false, blob.chunk_file) } sealed case class Markup_Index(status: Boolean, chunk_name: Symbol.Text_Chunk.Name) @@ -374,11 +379,12 @@ theory: Boolean = false, id: Document_ID.Command = Document_ID.none, node_name: Document.Node.Name = Document.Node.Name.empty, + blobs_info: Blobs_Info = Blobs_Info.none, results: Results = Results.empty, markups: Markups = Markups.empty): Command = { val (source1, span1) = Command_Span.unparsed(source, theory).compact_source - new Command(id, node_name, Blobs_Info.none, span1, source1, results, markups) + new Command(id, node_name, blobs_info, span1, source1, results, markups) } def text(source: String): Command = unparsed(source) @@ -461,6 +467,28 @@ Blobs_Info(blobs, loaded_files.index) } } + + def build_blobs_info( + syntax: Outer_Syntax, + node_name: Document.Node.Name, + load_commands: List[Command_Span.Span]): Blobs_Info = + { + val blobs = + for { + span <- load_commands + file <- span.loaded_files(syntax).files + } yield { + (Exn.capture { + val dir = Path.explode(node_name.master_dir) + val src_path = Path.explode(file) + val name = Document.Node.Name((dir + src_path).expand.implode_symbolic) + val bytes = Bytes.read(name.path) + val chunk = Symbol.Text_Chunk(bytes.text) + Blob(name, src_path, Some((bytes.sha1_digest, chunk))) + }).user_error + } + Blobs_Info(blobs, -1) + } } @@ -516,7 +544,7 @@ val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] = ((Symbol.Text_Chunk.Default -> chunk) :: (for (Exn.Res(blob) <- blobs; (_, file) <- blob.content) - yield Symbol.Text_Chunk.File(blob.name.node) -> file)).toMap + yield blob.chunk_file -> file)).toMap def length: Int = source.length def range: Text.Range = chunk.range diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/PIDE/command_span.scala --- a/src/Pure/PIDE/command_span.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/PIDE/command_span.scala Sat Dec 05 12:14:40 2020 +0100 @@ -122,6 +122,9 @@ else Nil } + def is_load_command(syntax: Outer_Syntax): Boolean = + syntax.load_command(name).isDefined + def loaded_files(syntax: Outer_Syntax): Loaded_Files = syntax.load_command(name) match { case None => Loaded_Files.none diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/PIDE/document.scala --- a/src/Pure/PIDE/document.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/PIDE/document.scala Sat Dec 05 12:14:40 2020 +0100 @@ -128,6 +128,11 @@ def path: Path = Path.explode(File.standard_path(node)) def master_dir_path: Path = Path.explode(File.standard_path(master_dir)) + def expand: Name = + Name(path.expand.implode, master_dir_path.expand.implode, theory) + def symbolic: Name = + Name(path.implode_symbolic, master_dir_path.implode_symbolic, theory) + def is_theory: Boolean = theory.nonEmpty def theory_base_name: String = Long_Name.base_name(theory) @@ -552,6 +557,7 @@ def commands_loading_ranges(pred: Node.Name => Boolean): List[Text.Range] def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command] + def get_snippet_command: Option[Command] def command_snippet(command: Command): Snapshot = { val node_name = command.node_name @@ -569,12 +575,17 @@ .define_version(version1, state0.the_assignment(version)) .assign(version1.id, Nil, List(command.id -> List(Document_ID.make())))._2 - state1.snapshot(name = node_name) + state1.snapshot(name = node_name, snippet_command = Some(command)) } def xml_markup( range: Text.Range = Text.Range.full, elements: Markup.Elements = Markup.Elements.full): XML.Body + + def xml_markup_blobs( + read_blob: Node.Name => String, + elements: Markup.Elements = Markup.Elements.full): List[(XML.Body, Command.Blob)] + def messages: List[(XML.Tree, Position.T)] def exports: List[Export.Entry] def exports_map: Map[String, Export.Entry] @@ -814,12 +825,20 @@ st <- command_states(version, command).iterator } yield st.exports) - def begin_theory(node_name: Node.Name, id: Document_ID.Exec, source: String): State = + def begin_theory( + node_name: Node.Name, + id: Document_ID.Exec, + source: String, + blobs_info: Command.Blobs_Info): State = + { if (theories.isDefinedAt(id)) fail else { - val command = Command.unparsed(source, theory = true, id = id, node_name = node_name) + val command = + Command.unparsed(source, theory = true, id = id, node_name = node_name, + blobs_info = blobs_info) copy(theories = theories + (id -> command.empty_state)) } + } def end_theory(theory: String): (Snapshot, State) = theories.collectFirst({ case (_, st) if st.command.node_name.theory == theory => st }) match { @@ -829,7 +848,7 @@ val node_name = command.node_name val command1 = Command.unparsed(command.source, theory = true, id = command.id, node_name = node_name, - results = st.results, markups = st.markups) + blobs_info = command.blobs_info, results = st.results, markups = st.markups) val state1 = copy(theories = theories - command1.id) val snapshot = state1.snapshot(name = node_name).command_snippet(command1) (snapshot, state1) @@ -1062,8 +1081,10 @@ } // persistent user-view - def snapshot(name: Node.Name = Node.Name.empty, pending_edits: List[Text.Edit] = Nil) - : Snapshot = + def snapshot( + name: Node.Name = Node.Name.empty, + pending_edits: List[Text.Edit] = Nil, + snippet_command: Option[Command] = None): Snapshot = { val stable = recent_stable val latest = history.tip @@ -1133,11 +1154,29 @@ } else version.nodes.commands_loading(other_node_name).headOption + def get_snippet_command: Option[Command] = snippet_command + def xml_markup( range: Text.Range = Text.Range.full, elements: Markup.Elements = Markup.Elements.full): XML.Body = state.xml_markup(version, node_name, range = range, elements = elements) + def xml_markup_blobs(read_blob: Node.Name => String, + elements: Markup.Elements = Markup.Elements.full): List[(XML.Body, Command.Blob)] = + { + get_snippet_command match { + case None => Nil + case Some(command) => + for (Exn.Res(blob) <- command.blobs) + yield { + val text = read_blob(blob.name) + val markup = command.init_markups(Command.Markup_Index.blob(blob)) + val xml = markup.to_XML(Text.Range(0, text.size), text, elements) + (xml, blob) + } + } + } + lazy val messages: List[(XML.Tree, Position.T)] = (for { (command, start) <- diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/PIDE/headless.scala --- a/src/Pure/PIDE/headless.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/PIDE/headless.scala Sat Dec 05 12:14:40 2020 +0100 @@ -256,8 +256,8 @@ val dep_theories = dependencies.theories val dep_theories_set = dep_theories.toSet val dep_files = - dependencies.loaded_files.flatMap(_._2). - map(path => Document.Node.Name(resources.append("", path))) + for (path <- dependencies.loaded_files) + yield Document.Node.Name(resources.append("", path)) val use_theories_state = { diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/PIDE/resources.scala Sat Dec 05 12:14:40 2020 +0100 @@ -106,36 +106,38 @@ /* theory files */ - def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name): () => List[Path] = + def load_commands(syntax: Outer_Syntax, name: Document.Node.Name) + : () => List[Command_Span.Span] = { val (is_utf8, raw_text) = with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString)) - () => { - if (syntax.has_load_commands(raw_text)) { - val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text)) - val spans = syntax.parse_spans(text) - val dir = Path.explode(name.master_dir) - (for { - span <- spans.iterator - file <- span.loaded_files(syntax).files - } yield (dir + Path.explode(file)).expand).toList + () => + { + if (syntax.has_load_commands(raw_text)) { + val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text)) + syntax.parse_spans(text).filter(_.is_load_command(syntax)) + } + else Nil } - else Nil - } + } + + def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name, spans: List[Command_Span.Span]) + : List[Path] = + { + val dir = Path.explode(name.master_dir) + for { span <- spans; file <- span.loaded_files(syntax).files } + yield (dir + Path.explode(file)).expand } def pure_files(syntax: Outer_Syntax): List[Path] = { val pure_dir = Path.explode("~~/src/Pure") - val roots = - for { (name, _) <- Thy_Header.ml_roots } - yield (pure_dir + Path.explode(name)).expand - val files = - for { - (path, (_, theory)) <- roots zip Thy_Header.ml_roots - file <- loaded_files(syntax, Document.Node.Name(path.implode, path.dir.implode, theory))() - } yield file - roots ::: files + for { + (name, theory) <- Thy_Header.ml_roots + path = (pure_dir + Path.explode(name)).expand + node_name = Document.Node.Name(path.implode, path.dir.implode, theory) + file <- loaded_files(syntax, node_name, load_commands(syntax, node_name)()) + } yield file } def theory_name(qualifier: String, theory: String): String = @@ -398,19 +400,31 @@ graph2.map_node(name, _ => syntax) }) - def loaded_files: List[(String, List[Path])] = - { + def get_syntax(name: Document.Node.Name): Outer_Syntax = + loaded_theories.get_node(name.theory) + + def load_commands: List[(Document.Node.Name, List[Command_Span.Span])] = theories.zip( - Par_List.map((e: () => List[Path]) => e(), - theories.map(name => - resources.loaded_files(loaded_theories.get_node(name.theory), name)))) - .map({ case (name, files) => - val files1 = - if (name.theory == Thy_Header.PURE) resources.pure_files(overall_syntax) ::: files - else files - (name.theory, files1) }) + Par_List.map((e: () => List[Command_Span.Span]) => e(), + theories.map(name => resources.load_commands(get_syntax(name), name)))) + .filter(p => p._2.nonEmpty) + + def loaded_files(name: Document.Node.Name, spans: List[Command_Span.Span]) + : (String, List[Path]) = + { + val theory = name.theory + val syntax = get_syntax(name) + val files1 = resources.loaded_files(syntax, name, spans) + val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil + (theory, files1 ::: files2) } + def loaded_files: List[Path] = + for { + (name, spans) <- load_commands + file <- loaded_files(name, spans)._2 + } yield file + def imported_files: List[Path] = { val base_theories = @@ -418,7 +432,7 @@ filter(session_base.loaded_theories.defined) base_theories.map(theory => session_base.known_theories(theory).name.path) ::: - base_theories.flatMap(session_base.known_loaded_files(_)) + base_theories.flatMap(session_base.known_loaded_files.withDefaultValue(Nil)) } lazy val overall_syntax: Outer_Syntax = diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/PIDE/session.scala Sat Dec 05 12:14:40 2020 +0100 @@ -130,6 +130,9 @@ val xml_cache: XML.Cache = XML.make_cache() val xz_cache: XZ.Cache = XZ.make_cache() + def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = + Command.Blobs_Info.none + /* global flags */ @@ -506,7 +509,8 @@ change_command(_.add_export(id, (args.serial, export))) case Protocol.Loading_Theory(node_name, id) => - try { global_state.change(_.begin_theory(node_name, id, msg.text)) } + val blobs_info = build_blobs_info(node_name) + try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) } catch { case _: Document.State.Fail => bad_output() } case Markup.Finished_Theory(theory) => diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/Pure.thy --- a/src/Pure/Pure.thy Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/Pure.thy Sat Dec 05 12:14:40 2020 +0100 @@ -168,6 +168,9 @@ in end\ +external_file "ROOT0.ML" +external_file "ROOT.ML" + subsection \Embedded ML text\ diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/Thy/export.scala Sat Dec 05 12:14:40 2020 +0100 @@ -16,6 +16,7 @@ /* artefact names */ val MARKUP = "PIDE/markup" + val FILES = "PIDE/files" val MESSAGES = "PIDE/messages" val DOCUMENT_PREFIX = "document/" val CITATIONS = DOCUMENT_PREFIX + "citations" diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/Thy/sessions.scala Sat Dec 05 12:14:40 2020 +0100 @@ -55,6 +55,7 @@ document_theories: List[Document.Node.Name] = Nil, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, + load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty, known_theories: Map[String, Document.Node.Entry] = Map.empty, known_loaded_files: Map[String, List[Path]] = Map.empty, overall_syntax: Outer_Syntax = Outer_Syntax.empty, @@ -172,10 +173,15 @@ val theory_files = dependencies.theories.map(_.path) - val (loaded_files, loaded_files_errors) = - try { if (inlined_files) (dependencies.loaded_files, Nil) else (Nil, Nil) } + dependencies.load_commands + + val (load_commands, load_commands_errors) = + try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) } catch { case ERROR(msg) => (Nil, List(msg)) } + val loaded_files = + load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) }) + val session_files = (theory_files ::: loaded_files.flatMap(_._2) ::: info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) @@ -325,13 +331,14 @@ document_theories = document_theories, loaded_theories = dependencies.loaded_theories, used_theories = dependencies.theories_adjunct, + load_commands = load_commands.toMap, known_theories = known_theories, known_loaded_files = known_loaded_files, overall_syntax = overall_syntax, imported_sources = check_sources(imported_files), sources = check_sources(session_files), session_graph_display = session_graph_display, - errors = dependencies.errors ::: loaded_files_errors ::: import_errors ::: + errors = dependencies.errors ::: load_commands_errors ::: import_errors ::: document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors) diff -r 85aaaf2cd173 -r ea4f86914cb2 src/Pure/Tools/build_job.scala --- a/src/Pure/Tools/build_job.scala Sat Dec 05 11:49:04 2020 +0100 +++ b/src/Pure/Tools/build_job.scala Sat Dec 05 12:14:40 2020 +0100 @@ -28,6 +28,7 @@ Future.thread("build", uninterruptible = true) { val parent = info.parent.getOrElse("") val base = deps(parent) + val result_base = deps(session_name) val env = Isabelle_System.settings() + @@ -50,6 +51,16 @@ new Session(options, resources) { override val xml_cache: XML.Cache = store.xml_cache override val xz_cache: XZ.Cache = store.xz_cache + + override def build_blobs_info(name: Document.Node.Name): Command.Blobs_Info = + { + result_base.load_commands.get(name.expand) match { + case Some(spans) => + val syntax = result_base.theory_syntax(name) + Command.build_blobs_info(syntax, name, spans) + case None => Command.Blobs_Info.none + } + } } def make_rendering(snapshot: Document.Snapshot): Rendering = new Rendering(snapshot, options, session) { @@ -176,7 +187,13 @@ def export_text(name: String, text: String): Unit = export(name, List(XML.Text(text))) + val blobs = snapshot.xml_markup_blobs(name => File.read(name.path)) + val chunks = for ((_, blob) <- blobs) yield blob.name.symbolic.node + export_text(Export.FILES, cat_lines(snapshot.node_name.symbolic.node :: chunks)) + + for (((xml, _), i) <- blobs.zipWithIndex) export(Export.MARKUP + (i + 1), xml) export(Export.MARKUP, snapshot.xml_markup()) + export(Export.MESSAGES, snapshot.messages.map(_._1)) val citations = Library.distinct(rendering.citations(Text.Range.full).map(_.info))