--- a/src/Pure/Admin/build_doc.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Admin/build_doc.scala Wed Dec 09 23:30:12 2020 +0100
@@ -53,7 +53,7 @@
try {
progress.echo("Documentation " + doc + " ...")
- using(store.open_database_context(deps.sessions_structure))(db_context =>
+ using(store.open_database_context())(db_context =>
Presentation.build_documents(session, deps, db_context,
output_pdf = Some(Path.explode("~~/doc"))))
None
--- a/src/Pure/Admin/build_log.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Admin/build_log.scala Wed Dec 09 23:30:12 2020 +0100
@@ -614,7 +614,7 @@
- /** session info: produced by isabelle build as session log.gz file **/
+ /** session info: produced by isabelle build as session database **/
sealed case class Session_Info(
session_timing: Properties.T,
--- a/src/Pure/PIDE/document.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/PIDE/document.scala Wed Dec 09 23:30:12 2020 +0100
@@ -988,17 +988,17 @@
}
}
- def end_theory(theory: String): (Snapshot, State) =
- theories.collectFirst({ case (_, st) if st.command.node_name.theory == theory => st }) match {
+ def end_theory(id: Document_ID.Exec): (Snapshot, State) =
+ theories.get(id) match {
case None => fail
case Some(st) =>
val command = st.command
val node_name = command.node_name
val command1 =
- Command.unparsed(command.source, theory = true, id = command.id, node_name = node_name,
+ Command.unparsed(command.source, theory = true, id = id, node_name = node_name,
blobs_info = command.blobs_info, results = st.results, markups = st.markups)
- val state1 = copy(theories = theories - command1.id)
- val snapshot = state1.snapshot(node_name = node_name).command_snippet(command1)
+ val state1 = copy(theories = theories - id)
+ val snapshot = state1.command_snippet(command1)
(snapshot, state1)
}
@@ -1233,8 +1233,6 @@
pending_edits: List[Text.Edit] = Nil,
snippet_command: Option[Command] = None): Snapshot =
{
- /* pending edits and unstable changes */
-
val stable = recent_stable
val version = stable.version.get_finished
@@ -1253,5 +1251,8 @@
new Snapshot(this, version, node_name, edits, snippet_command)
}
+
+ def command_snippet(command: Command): Snapshot =
+ snapshot().command_snippet(command)
}
}
--- a/src/Pure/PIDE/markup.ML Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/PIDE/markup.ML Wed Dec 09 23:30:12 2020 +0100
@@ -222,7 +222,6 @@
val theory_timing: Properties.entry
val session_timing: Properties.entry
val loading_theory: string -> Properties.T
- val finished_theory: string -> Properties.T
val build_session_finished: Properties.T
val print_operationsN: string
val print_operations: Properties.T
@@ -698,7 +697,6 @@
val session_timing = (functionN, "session_timing");
fun loading_theory name = [("function", "loading_theory"), (nameN, name)];
-fun finished_theory name = [("function", "finished_theory"), (nameN, name)];
val build_session_finished = [("function", "build_session_finished")];
--- a/src/Pure/PIDE/markup.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/PIDE/markup.scala Wed Dec 09 23:30:12 2020 +0100
@@ -612,7 +612,6 @@
object Task_Statistics extends Properties_Function("task_statistics")
object Loading_Theory extends Properties_Function("loading_theory")
- object Finished_Theory extends Name_Function("finished_theory")
object Build_Session_Finished extends Function("build_session_finished")
object Commands_Accepted extends Function("commands_accepted")
--- a/src/Pure/PIDE/rendering.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/PIDE/rendering.scala Wed Dec 09 23:30:12 2020 +0100
@@ -211,6 +211,7 @@
Markup.Elements(Markup.WRITELN, Markup.INFORMATION, Markup.WARNING, Markup.LEGACY, Markup.ERROR,
Markup.BAD)
+ val message_elements = Markup.Elements(message_pri.keySet)
val warning_elements = Markup.Elements(Markup.WARNING, Markup.LEGACY)
val error_elements = Markup.Elements(Markup.ERROR)
@@ -230,14 +231,14 @@
Markup.Markdown_Bullet.name)
}
-abstract class Rendering(
+class Rendering(
val snapshot: Document.Snapshot,
val options: Options,
val session: Session)
{
override def toString: String = "Rendering(" + snapshot.toString + ")"
- def model: Document.Model
+ def get_text(range: Text.Range): Option[String] = None
/* caret */
@@ -275,7 +276,7 @@
semantic_completion(completed_range, caret_range) match {
case Some(Text.Info(_, Completion.No_Completion)) => (true, None)
case Some(Text.Info(range, names: Completion.Names)) =>
- model.get_text(range) match {
+ get_text(range) match {
case Some(original) => (false, names.complete(range, history, unicode, original))
case None => (false, None)
}
@@ -358,7 +359,7 @@
for {
Text.Info(r1, delimited) <- language_path(before_caret_range(caret))
- s1 <- model.get_text(r1)
+ s1 <- get_text(r1)
(r2, s2) <-
if (is_wrapped(s1)) {
Some((Text.Range(r1.start + 1, r1.stop - 1), s1.substring(1, s1.length - 1)))
@@ -520,7 +521,7 @@
}
- /* message underline color */
+ /* messages */
def message_underline_color(elements: Markup.Elements, range: Text.Range)
: List[Text.Info[Rendering.Color.Value]] =
@@ -536,6 +537,39 @@
} yield Text.Info(r, color)
}
+ def text_messages(range: Text.Range): List[Text.Info[XML.Tree]] =
+ {
+ val results =
+ snapshot.cumulate[Vector[XML.Tree]](range, Vector.empty, Rendering.message_elements,
+ states =>
+ {
+ case (res, Text.Info(_, elem)) =>
+ elem.markup.properties match {
+ case Markup.Serial(i) =>
+ states.collectFirst(
+ {
+ case st if st.results.get(i).isDefined =>
+ res :+ st.results.get(i).get
+ })
+ case _ => None
+ }
+ case _ => None
+ })
+
+ var seen_serials = Set.empty[Long]
+ val seen: XML.Tree => Boolean =
+ {
+ case XML.Elem(Markup(_, Markup.Serial(i)), _) =>
+ val b = seen_serials(i); seen_serials += i; b
+ case _ => false
+ }
+ for {
+ Text.Info(range, trees) <- results
+ tree <- trees
+ if !seen(tree)
+ } yield Text.Info(range, tree)
+ }
+
/* tooltips */
--- a/src/Pure/PIDE/resources.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/PIDE/resources.scala Wed Dec 09 23:30:12 2020 +0100
@@ -69,7 +69,7 @@
def append(node_name: Document.Node.Name, source_path: Path): String =
append(node_name.master_dir, source_path)
- def make_theory_node(dir: String, file: Path, theory: String): Document.Node.Name =
+ def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name =
{
val node = append(dir, file)
val master_dir = append(dir, file.dir)
@@ -155,13 +155,13 @@
case None => Nil
}
dirs.collectFirst({
- case dir if (dir + thy_file).is_file => make_theory_node("", dir + thy_file, theory) })
+ case dir if (dir + thy_file).is_file => file_node(dir + thy_file, theory = theory) })
}
def import_name(qualifier: String, dir: String, s: String): Document.Node.Name =
{
val theory = theory_name(qualifier, Thy_Header.import_name(s))
- def theory_node = make_theory_node(dir, Path.explode(s).thy, theory)
+ def theory_node = file_node(Path.explode(s).thy, dir = dir, theory = theory)
if (!Thy_Header.is_base_name(s)) theory_node
else if (session_base.loaded_theory(theory)) loaded_theory_node(theory)
--- a/src/Pure/PIDE/session.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/PIDE/session.scala Wed Dec 09 23:30:12 2020 +0100
@@ -513,13 +513,6 @@
try { global_state.change(_.begin_theory(node_name, id, msg.text, blobs_info)) }
catch { case _: Document.State.Fail => bad_output() }
- case Markup.Finished_Theory(theory) =>
- try {
- val snapshot = global_state.change_result(_.end_theory(theory))
- finished_theories.post(snapshot)
- }
- catch { case _: Document.State.Fail => bad_output() }
-
case List(Markup.Commands_Accepted.PROPERTY) =>
msg.text match {
case Protocol.Commands_Accepted(ids) =>
@@ -584,6 +577,10 @@
case Markup.Process_Result(result) if output.is_exit =>
if (prover.defined) protocol_handlers.exit()
+ for (id <- global_state.value.theories.keys) {
+ val snapshot = global_state.change_result(_.end_theory(id))
+ finished_theories.post(snapshot)
+ }
file_formats.stop_session
phase = Session.Terminated(result)
prover.reset
--- a/src/Pure/System/isabelle_tool.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/System/isabelle_tool.scala Wed Dec 09 23:30:12 2020 +0100
@@ -182,6 +182,7 @@
class Tools extends Isabelle_Scala_Tools(
Build.isabelle_tool,
Build_Docker.isabelle_tool,
+ Build_Job.isabelle_tool,
Doc.isabelle_tool,
Dump.isabelle_tool,
Export.isabelle_tool,
--- a/src/Pure/Thy/bibtex.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Thy/bibtex.scala Wed Dec 09 23:30:12 2020 +0100
@@ -196,7 +196,7 @@
Text.Info(r, name) <- rendering.citations(rendering.before_caret_range(caret)).headOption
name1 <- Completion.clean_name(name)
- original <- rendering.model.get_text(r)
+ original <- rendering.get_text(r)
original1 <- Completion.clean_name(Library.perhaps_unquote(original))
entries =
--- a/src/Pure/Thy/export.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Thy/export.scala Wed Dec 09 23:30:12 2020 +0100
@@ -84,9 +84,8 @@
def compound_name(a: String, b: String): String = a + ":" + b
- def empty_entry(session_name: String, theory_name: String, name: String): Entry =
- Entry(session_name, theory_name, name, false,
- Future.value(false, Bytes.empty), XZ.no_cache())
+ def empty_entry(theory_name: String, name: String): Entry =
+ Entry("", theory_name, name, false, Future.value(false, Bytes.empty), XZ.no_cache())
sealed case class Entry(
session_name: String,
@@ -260,10 +259,12 @@
}
def database_context(
- context: Sessions.Database_Context, session: String, theory_name: String): Provider =
+ context: Sessions.Database_Context,
+ sessions: List[String],
+ theory_name: String): Provider =
new Provider {
def apply(export_name: String): Option[Entry] =
- context.read_export(session, theory_name, export_name)
+ context.read_export(sessions, theory_name, export_name)
def focus(other_theory: String): Provider = this
--- a/src/Pure/Thy/presentation.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Thy/presentation.scala Wed Dec 09 23:30:12 2020 +0100
@@ -461,6 +461,7 @@
/* session info */
val info = deps.sessions_structure(session)
+ val hierarchy = deps.sessions_structure.hierarchy(session)
val options = info.options
val base = deps(session)
val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display)
@@ -471,7 +472,7 @@
lazy val tex_files =
for (name <- base.session_theories ::: base.document_theories)
yield {
- val entry = db_context.get_export(session, name.theory, document_tex_name(name))
+ val entry = db_context.get_export(hierarchy, name.theory, document_tex_name(name))
Path.basic(tex_name(name)) -> entry.uncompressed
}
@@ -673,7 +674,7 @@
progress.echo_warning("No output directory")
}
- using(store.open_database_context(deps.sessions_structure))(db_context =>
+ using(store.open_database_context())(db_context =>
build_documents(session, deps, db_context, progress = progress,
output_sources = output_sources, output_pdf = output_pdf,
verbose = true, verbose_latex = verbose_latex))
--- a/src/Pure/Thy/sessions.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Thy/sessions.scala Wed Dec 09 23:30:12 2020 +0100
@@ -633,80 +633,85 @@
sessions = Library.merge(sessions, other.sessions))
}
- def make(infos: List[Info]): Structure =
+ object Structure
{
- def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Traversable[String])
- : Graph[String, Info] =
+ val empty: Structure = make(Nil)
+
+ def make(infos: List[Info]): Structure =
{
- def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) =
+ def add_edges(graph: Graph[String, Info], kind: String, edges: Info => Traversable[String])
+ : Graph[String, Info] =
{
- if (!g.defined(parent))
- error("Bad " + kind + " session " + quote(parent) + " for " +
- quote(name) + Position.here(pos))
+ def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) =
+ {
+ if (!g.defined(parent))
+ error("Bad " + kind + " session " + quote(parent) + " for " +
+ quote(name) + Position.here(pos))
- try { g.add_edge_acyclic(parent, name) }
- catch {
- case exn: Graph.Cycles[_] =>
- error(cat_lines(exn.cycles.map(cycle =>
- "Cyclic session dependency of " +
- cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
+ try { g.add_edge_acyclic(parent, name) }
+ catch {
+ case exn: Graph.Cycles[_] =>
+ error(cat_lines(exn.cycles.map(cycle =>
+ "Cyclic session dependency of " +
+ cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos))
+ }
+ }
+ (graph /: graph.iterator) {
+ case (g, (name, (info, _))) => (g /: edges(info))(add_edge(info.pos, name, _, _))
}
}
- (graph /: graph.iterator) {
- case (g, (name, (info, _))) => (g /: edges(info))(add_edge(info.pos, name, _, _))
- }
- }
- val info_graph =
- (Graph.string[Info] /: infos) {
- case (graph, info) =>
- if (graph.defined(info.name))
- error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
- Position.here(graph.get_node(info.name).pos))
- else graph.new_node(info.name, info)
- }
- val build_graph = add_edges(info_graph, "parent", _.parent)
- val imports_graph = add_edges(build_graph, "imports", _.imports)
+ val info_graph =
+ (Graph.string[Info] /: infos) {
+ case (graph, info) =>
+ if (graph.defined(info.name))
+ error("Duplicate session " + quote(info.name) + Position.here(info.pos) +
+ Position.here(graph.get_node(info.name).pos))
+ else graph.new_node(info.name, info)
+ }
+ val build_graph = add_edges(info_graph, "parent", _.parent)
+ val imports_graph = add_edges(build_graph, "imports", _.imports)
- val session_positions: List[(String, Position.T)] =
- (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList
+ val session_positions: List[(String, Position.T)] =
+ (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList
- val session_directories: Map[JFile, String] =
- (Map.empty[JFile, String] /:
- (for {
- session <- imports_graph.topological_order.iterator
- info = info_graph.get_node(session)
- dir <- info.dirs.iterator
- } yield (info, dir)))({ case (dirs, (info, dir)) =>
- val session = info.name
- val canonical_dir = dir.canonical_file
- dirs.get(canonical_dir) match {
- case Some(session1) =>
- val info1 = info_graph.get_node(session1)
- error("Duplicate use of directory " + dir +
- "\n for session " + quote(session1) + Position.here(info1.pos) +
- "\n vs. session " + quote(session) + Position.here(info.pos))
- case None => dirs + (canonical_dir -> session)
- }
- })
+ val session_directories: Map[JFile, String] =
+ (Map.empty[JFile, String] /:
+ (for {
+ session <- imports_graph.topological_order.iterator
+ info = info_graph.get_node(session)
+ dir <- info.dirs.iterator
+ } yield (info, dir)))({ case (dirs, (info, dir)) =>
+ val session = info.name
+ val canonical_dir = dir.canonical_file
+ dirs.get(canonical_dir) match {
+ case Some(session1) =>
+ val info1 = info_graph.get_node(session1)
+ error("Duplicate use of directory " + dir +
+ "\n for session " + quote(session1) + Position.here(info1.pos) +
+ "\n vs. session " + quote(session) + Position.here(info.pos))
+ case None => dirs + (canonical_dir -> session)
+ }
+ })
- val global_theories: Map[String, String] =
- (Thy_Header.bootstrap_global_theories.toMap /:
- (for {
- session <- imports_graph.topological_order.iterator
- info = info_graph.get_node(session)
- thy <- info.global_theories.iterator }
- yield (info, thy)))({ case (global, (info, thy)) =>
- val qualifier = info.name
- global.get(thy) match {
- case Some(qualifier1) if qualifier != qualifier1 =>
- error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
- case _ => global + (thy -> qualifier)
- }
- })
+ val global_theories: Map[String, String] =
+ (Thy_Header.bootstrap_global_theories.toMap /:
+ (for {
+ session <- imports_graph.topological_order.iterator
+ info = info_graph.get_node(session)
+ thy <- info.global_theories.iterator }
+ yield (info, thy)))({ case (global, (info, thy)) =>
+ val qualifier = info.name
+ global.get(thy) match {
+ case Some(qualifier1) if qualifier != qualifier1 =>
+ error("Duplicate global theory " + quote(thy) + Position.here(info.pos))
+ case _ => global + (thy -> qualifier)
+ }
+ })
- new Structure(
- session_positions, session_directories, global_theories, build_graph, imports_graph)
+ new Structure(
+ session_positions, session_directories, global_theories, build_graph, imports_graph)
+ }
}
final class Structure private[Sessions](
@@ -821,6 +826,8 @@
deps
}
+ def hierarchy(session: String): List[String] = build_graph.all_preds(List(session))
+
def build_selection(sel: Selection): List[String] = selected(build_graph, sel)
def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss)
def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss)
@@ -1057,7 +1064,7 @@
}
}).toList.map(_._2)
- make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos)
+ Structure.make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos)
}
@@ -1194,7 +1201,6 @@
class Database_Context private[Sessions](
val store: Sessions.Store,
- val sessions_structure: Sessions.Structure,
database_server: Option[SQL.Database]) extends AutoCloseable
{
def xml_cache: XML.Cache = store.xml_cache
@@ -1218,16 +1224,16 @@
}
}
- def read_export(session: String, theory_name: String, name: String): Option[Export.Entry] =
+ def read_export(
+ sessions: List[String], theory_name: String, name: String): Option[Export.Entry] =
{
- val hierarchy = sessions_structure.build_graph.all_preds(List(session)).view
val attempts =
database_server match {
case Some(db) =>
- hierarchy.map(session_name =>
+ sessions.view.map(session_name =>
Export.read_entry(db, store.xz_cache, session_name, theory_name, name))
case None =>
- hierarchy.map(session_name =>
+ sessions.view.map(session_name =>
store.try_open_database(session_name) match {
case Some(db) =>
using(db)(Export.read_entry(_, store.xz_cache, session_name, theory_name, name))
@@ -1237,9 +1243,10 @@
attempts.collectFirst({ case Some(entry) => entry })
}
- def get_export(session: String, theory_name: String, name: String): Export.Entry =
- read_export(session, theory_name, name) getOrElse
- Export.empty_entry(session, theory_name, name)
+ def get_export(
+ session_hierarchy: List[String], theory_name: String, name: String): Export.Entry =
+ read_export(session_hierarchy, theory_name, name) getOrElse
+ Export.empty_entry(theory_name, name)
override def toString: String =
{
@@ -1331,9 +1338,8 @@
port = options.int("build_database_ssh_port"))),
ssh_close = true)
- def open_database_context(sessions_structure: Sessions.Structure): Database_Context =
- new Database_Context(store, sessions_structure,
- if (database_server) Some(open_database_server()) else None)
+ def open_database_context(): Database_Context =
+ new Database_Context(store, if (database_server) Some(open_database_server()) else None)
def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] =
{
--- a/src/Pure/Thy/thy_info.ML Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Thy/thy_info.ML Wed Dec 09 23:30:12 2020 +0100
@@ -56,8 +56,7 @@
);
fun apply_presentation (context: presentation_context) thy =
- (ignore (Presentation.get thy |> Par_List.map (fn (f, _) => f context thy));
- Output.try_protocol_message (Markup.finished_theory (Context.theory_long_name thy)) []);
+ ignore (Presentation.get thy |> Par_List.map (fn (f, _) => f context thy));
fun add_presentation f = Presentation.map (cons (f, stamp ()));
--- a/src/Pure/Tools/build.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Tools/build.scala Wed Dec 09 23:30:12 2020 +0100
@@ -502,7 +502,7 @@
val presentation_dir = presentation.dir(store)
progress.echo("Presentation in " + presentation_dir.absolute)
- using(store.open_database_context(deps.sessions_structure))(db_context =>
+ using(store.open_database_context())(db_context =>
for ((_, (session_name, _)) <- presentation_chapters) {
progress.expose_interrupt()
progress.echo("Presenting " + session_name + " ...")
--- a/src/Pure/Tools/build_job.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Tools/build_job.scala Wed Dec 09 23:30:12 2020 +0100
@@ -12,13 +12,16 @@
object Build_Job
{
+ /* theory markup/messages from database */
+
def read_theory(
- db_context: Sessions.Database_Context, node_name: Document.Node.Name): Command =
+ db_context: Sessions.Database_Context,
+ resources: Resources,
+ session: String,
+ theory: String): Option[Command] =
{
- val session = db_context.sessions_structure.bootstrap.theory_qualifier(node_name)
-
def read(name: String): Export.Entry =
- db_context.get_export(session, node_name.theory, name)
+ db_context.get_export(List(session), theory, name)
def read_xml(name: String): XML.Body =
db_context.xml_cache.body(
@@ -28,18 +31,14 @@
case (Value.Long(id), thy_file :: blobs_files) =>
val thy_path = Path.explode(thy_file)
val thy_source = Symbol.decode(File.read(thy_path))
+ val node_name = resources.file_node(thy_path, theory = theory)
val blobs =
blobs_files.map(file =>
{
- val master_dir =
- Thy_Header.split_file_name(file) match {
- case Some((dir, _)) => dir
- case None => ""
- }
val path = Path.explode(file)
val src_path = File.relative_path(thy_path, path).getOrElse(path)
- Command.Blob.read_file(Document.Node.Name(file, master_dir), src_path)
+ Command.Blob.read_file(resources.file_node(path), src_path)
})
val blobs_info = Command.Blobs_Info(blobs.map(Exn.Res(_)))
@@ -60,12 +59,111 @@
index -> Markup_Tree.from_XML(xml)
})
- Command.unparsed(thy_source, theory = true, id = id, node_name = node_name,
- blobs_info = blobs_info, results = results, markups = markups)
-
- case _ => error("Malformed PIDE exports for theory " + node_name)
+ val command =
+ Command.unparsed(thy_source, theory = true, id = id, node_name = node_name,
+ blobs_info = blobs_info, results = results, markups = markups)
+ Some(command)
+ case _ => None
}
}
+
+
+ /* print messages */
+
+ def print_log(
+ options: Options,
+ session_name: String,
+ theories: List[String] = Nil,
+ progress: Progress = new Progress,
+ margin: Double = Pretty.default_margin,
+ breakgain: Double = Pretty.default_breakgain,
+ metric: Pretty.Metric = Pretty.Default_Metric)
+ {
+ val store = Sessions.store(options)
+
+ val resources = new Resources(Sessions.Structure.empty, Sessions.Structure.empty.bootstrap)
+ val session = new Session(options, resources)
+
+ using(store.open_database_context())(db_context =>
+ {
+ val result =
+ db_context.input_database(session_name)((db, _) =>
+ {
+ val theories = store.read_theories(db, session_name)
+ val errors = store.read_errors(db, session_name)
+ store.read_build(db, session_name).map(info => (theories, errors, info.return_code))
+ })
+ result match {
+ case None => error("Missing build database for session " + quote(session_name))
+ case Some((used_theories, errors, rc)) =>
+ val bad_theories = theories.filterNot(used_theories.toSet)
+ if (bad_theories.nonEmpty) error("Unknown theories " + commas_quote(bad_theories))
+
+ val print_theories =
+ if (theories.isEmpty) used_theories else used_theories.filter(theories.toSet)
+ for (thy <- print_theories) {
+ val thy_heading = "\nTheory " + quote(thy)
+ read_theory(db_context, resources, session_name, thy) match {
+ case None => progress.echo(thy_heading + ": MISSING")
+ case Some(command) =>
+ progress.echo(thy_heading)
+ val snapshot = Document.State.init.command_snippet(command)
+ val rendering = new Rendering(snapshot, options, session)
+ for (Text.Info(_, t) <- rendering.text_messages(Text.Range.full)) {
+ progress.echo(
+ Protocol.message_text(List(t), margin = margin, breakgain = breakgain,
+ metric = metric))
+ }
+ }
+ }
+
+ if (errors.nonEmpty) {
+ progress.echo("\nErrors:\n" + Output.error_message_text(cat_lines(errors)))
+ }
+ if (rc != 0) progress.echo("\n" + Process_Result.print_return_code(rc))
+ }
+ })
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool = Isabelle_Tool("log", "print messages from build database",
+ Scala_Project.here, args =>
+ {
+ /* arguments */
+
+ var theories: List[String] = Nil
+ var margin = Pretty.default_margin
+ var options = Options.init()
+
+ val getopts = Getopts("""
+Usage: isabelle log [OPTIONS] SESSION
+
+ Options are:
+ -T NAME restrict to given theories (multiple options)
+ -m MARGIN margin for pretty printing (default: """ + margin + """)
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+
+ Print messages from the build database of the given session, without any
+ checks against current sources: results from a failed build can be
+ printed as well.
+""",
+ "T:" -> (arg => theories = theories ::: List(arg)),
+ "m:" -> (arg => margin = Value.Double.parse(arg)),
+ "o:" -> (arg => options = options + arg))
+
+ val more_args = getopts(args)
+ val session_name =
+ more_args match {
+ case List(session_name) => session_name
+ case _ => getopts.usage()
+ }
+
+ val progress = new Console_Progress()
+
+ print_log(options, session_name, theories = theories, margin = margin, progress = progress)
+ })
}
class Build_Job(progress: Progress,
@@ -120,10 +218,6 @@
}
}
}
- def make_rendering(snapshot: Document.Snapshot): Rendering =
- new Rendering(snapshot, options, session) {
- override def model: Document.Model = ???
- }
object Build_Session_Errors
{
@@ -233,7 +327,7 @@
session.finished_theories += Session.Consumer[Document.Snapshot]("finished_theories")
{
case snapshot =>
- val rendering = make_rendering(snapshot)
+ val rendering = new Rendering(snapshot, options, session)
def export(name: String, xml: XML.Body, compress: Boolean = true)
{
@@ -326,7 +420,7 @@
try {
if (build_errors.isInstanceOf[Exn.Res[_]] && process_result.ok && info.documents.nonEmpty)
{
- using(store.open_database_context(deps.sessions_structure))(db_context =>
+ using(store.open_database_context())(db_context =>
{
val documents =
Presentation.build_documents(session_name, deps, db_context,
--- a/src/Pure/Tools/spell_checker.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Pure/Tools/spell_checker.scala Wed Dec 09 23:30:12 2020 +0100
@@ -56,7 +56,7 @@
{
for {
spell_range <- rendering.spell_checker_point(range)
- text <- rendering.model.get_text(spell_range)
+ text <- rendering.get_text(spell_range)
info <- marked_words(spell_range.start, text, info => info.range.overlaps(range)).headOption
} yield info
}
--- a/src/Tools/VSCode/src/vscode_rendering.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Tools/VSCode/src/vscode_rendering.scala Wed Dec 09 23:30:12 2020 +0100
@@ -65,14 +65,15 @@
Markup.Elements(Markup.ENTITY, Markup.PATH, Markup.POSITION, Markup.CITATION)
}
-class VSCode_Rendering(snapshot: Document.Snapshot, _model: VSCode_Model)
- extends Rendering(snapshot, _model.resources.options, _model.session)
+class VSCode_Rendering(snapshot: Document.Snapshot, val model: VSCode_Model)
+ extends Rendering(snapshot, model.resources.options, model.session)
{
rendering =>
- def model: VSCode_Model = _model
def resources: VSCode_Resources = model.resources
+ override def get_text(range: Text.Range): Option[String] = model.get_text(range)
+
/* bibtex */
--- a/src/Tools/jEdit/src/jedit_rendering.scala Wed Dec 09 10:51:45 2020 +0100
+++ b/src/Tools/jEdit/src/jedit_rendering.scala Wed Dec 09 23:30:12 2020 +0100
@@ -161,10 +161,10 @@
}
-class JEdit_Rendering(snapshot: Document.Snapshot, _model: Document_Model, options: Options)
+class JEdit_Rendering(snapshot: Document.Snapshot, model: Document_Model, options: Options)
extends Rendering(snapshot, options, PIDE.session)
{
- def model: Document_Model = _model
+ override def get_text(range: Text.Range): Option[String] = model.get_text(range)
/* colors */