--- a/NEWS Thu Nov 12 17:42:15 2020 +0100
+++ b/NEWS Sat Nov 14 13:01:12 2020 +0100
@@ -32,6 +32,11 @@
*** Document preparation ***
+* Keyword 'document_theories' within ROOT specifies theories from other
+sessions that should be included in the generated document source
+directory. This does not affect the generated session.tex: \input{...}
+needs to be used separately.
+
* The standard LaTeX engine is now lualatex, according to settings
variable ISABELLE_PDFLATEX. This is mostly upwards compatible with old
pdflatex, but text encoding needs to conform strictly to utf8. Rare
--- a/src/Doc/System/Sessions.thy Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Doc/System/Sessions.thy Sat Nov 14 13:01:12 2020 +0100
@@ -55,7 +55,8 @@
@{syntax_def session_entry}: @'session' @{syntax system_name} groups? dir? '=' \<newline>
(@{syntax system_name} '+')? description? options? \<newline>
sessions? directories? (theories*) \<newline>
- (document_files*) (export_files*)
+ (document_theories?) (document_files*) \<newline>
+ (export_files*)
;
groups: '(' (@{syntax name} +) ')'
;
@@ -77,6 +78,8 @@
;
theory_entry: @{syntax system_name} ('(' @'global' ')')?
;
+ document_theories: @'document_theories' (@{syntax name}+)
+ ;
document_files: @'document_files' ('(' dir ')')? (@{syntax embedded}+)
;
export_files: @'export_files' ('(' dir ')')? ('[' nat ']')? \<newline>
@@ -145,6 +148,13 @@
\<open>HOLCF\<close>, \<open>IFOL\<close>, \<open>FOL\<close>, \<open>ZF\<close>, \<open>ZFC\<close> etc. Regular Isabelle applications
should not claim any global theory names.
+ \<^descr> \isakeyword{document_theories}~\<open>names\<close> specifies theories from other
+ sessions that should be included in the generated document source directory.
+ These theories need to be explicit imports in the current session, or
+ impliciti imports from the underlying hierarchy of parent sessions. The
+ generated \<^verbatim>\<open>session.tex\<close> file is not affected: the session's {\LaTeX} setup
+ needs to \<^verbatim>\<open>\input{\<close>\<open>\<dots>\<close>\<^verbatim>\<open>}\<close> generated \<^verbatim>\<open>.tex\<close> files separately.
+
\<^descr> \isakeyword{document_files}~\<open>(\<close>\isakeyword{in}~\<open>base_dir) files\<close> lists
source files for document preparation, typically \<^verbatim>\<open>.tex\<close> and \<^verbatim>\<open>.sty\<close> for
{\LaTeX}. Only these explicitly given files are copied from the base
--- a/src/HOL/ROOT Thu Nov 12 17:42:15 2020 +0100
+++ b/src/HOL/ROOT Sat Nov 14 13:01:12 2020 +0100
@@ -959,6 +959,8 @@
VC_Principles
Reference
Complex_Types
+ document_theories
+ "HOL-SPARK-Examples.Greatest_Common_Divisor"
document_files
"complex_types.ads"
"complex_types_app.adb"
--- a/src/Pure/Admin/build_doc.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/Admin/build_doc.scala Sat Nov 14 13:01:12 2020 +0100
@@ -7,9 +7,6 @@
package isabelle
-import java.io.{File => JFile}
-
-
object Build_Doc
{
/* build_doc */
@@ -49,10 +46,20 @@
val doc_options =
options + "document=pdf" + "document_output=~~/doc" + "document_output_sources=false"
val deps = Sessions.load_structure(doc_options).selection_deps(selection)
- for (session <- selection.sessions) {
- progress.expose_interrupt()
- Present.build_documents(session, deps, store, progress = progress)
- }
+
+ val errs =
+ Par_List.map((doc_session: (String, String)) =>
+ try {
+ Present.build_documents(doc_session._2, deps, store, progress = progress)
+ None
+ }
+ catch {
+ case Exn.Interrupt.ERROR(msg) =>
+ val sep = if (msg.contains('\n')) "\n" else " "
+ Some("Documentation " + doc_session._1 + " failed:" + sep + msg)
+ }, selected).flatten
+
+ if (errs.nonEmpty) error(cat_lines(errs))
}
--- a/src/Pure/General/exn.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/General/exn.scala Sat Nov 14 13:01:12 2020 +0100
@@ -93,6 +93,12 @@
object Interrupt
{
+ object ERROR
+ {
+ def unapply(exn: Throwable): Option[String] =
+ if (is_interrupt(exn)) Some(message(exn)) else user_message(exn)
+ }
+
def apply(): Throwable = new InterruptedException("Interrupt")
def unapply(exn: Throwable): Boolean = is_interrupt(exn)
--- a/src/Pure/Sessions.thy Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/Sessions.thy Sat Nov 14 13:01:12 2020 +0100
@@ -8,7 +8,7 @@
imports Pure
keywords "session" :: thy_decl
and "description" "directories" "options" "sessions" "theories"
- "document_files" "export_files" :: quasi_command
+ "document_theories" "document_files" "export_files" :: quasi_command
and "global"
begin
--- a/src/Pure/System/bash.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/System/bash.scala Sat Nov 14 13:01:12 2020 +0100
@@ -45,6 +45,8 @@
/* process and result */
+ type Watchdog = (Time, Process => Boolean)
+
def process(script: String,
cwd: JFile = null,
env: Map[String, String] = Isabelle_System.settings(),
@@ -168,6 +170,7 @@
def result(
progress_stdout: String => Unit = (_: String) => (),
progress_stderr: String => Unit = (_: String) => (),
+ watchdog: Option[Watchdog] = None,
strict: Boolean = true): Process_Result =
{
stdin.close
@@ -177,9 +180,23 @@
val err_lines =
Future.thread("bash_stderr") { File.read_lines(stderr, progress_stderr) }
+ val watchdog_thread =
+ for ((time, check) <- watchdog)
+ yield {
+ Future.thread("bash_watchdog") {
+ while (proc.isAlive) {
+ time.sleep
+ if (check(this)) interrupt()
+ }
+ }
+ }
+
val rc =
try { join }
catch { case Exn.Interrupt() => terminate(); Exn.Interrupt.return_code }
+
+ watchdog_thread.foreach(_.cancel)
+
if (strict && rc == Exn.Interrupt.return_code) throw Exn.Interrupt()
Process_Result(rc, out_lines.join, err_lines.join, false, get_timing)
--- a/src/Pure/System/isabelle_system.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/System/isabelle_system.scala Sat Nov 14 13:01:12 2020 +0100
@@ -347,11 +347,13 @@
redirect: Boolean = false,
progress_stdout: String => Unit = (_: String) => (),
progress_stderr: String => Unit = (_: String) => (),
+ watchdog: Option[Bash.Watchdog] = None,
strict: Boolean = true,
cleanup: () => Unit = () => ()): Process_Result =
{
Bash.process(script, cwd = cwd, env = env, redirect = redirect, cleanup = cleanup).
- result(progress_stdout, progress_stderr, strict)
+ result(progress_stdout = progress_stdout, progress_stderr = progress_stderr,
+ watchdog = watchdog, strict = strict)
}
def jconsole(): Process_Result =
--- a/src/Pure/System/progress.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/System/progress.scala Sat Nov 14 13:01:12 2020 +0100
@@ -54,12 +54,16 @@
env: Map[String, String] = Isabelle_System.settings(),
redirect: Boolean = false,
echo: Boolean = false,
+ watchdog: Time = Time.zero,
strict: Boolean = true): Process_Result =
{
- Isabelle_System.bash(script, cwd = cwd, env = env, redirect = redirect,
- progress_stdout = echo_if(echo, _),
- progress_stderr = echo_if(echo, _),
- strict = strict)
+ val result =
+ Isabelle_System.bash(script, cwd = cwd, env = env, redirect = redirect,
+ progress_stdout = echo_if(echo, _),
+ progress_stderr = echo_if(echo, _),
+ watchdog = if (watchdog.is_zero) None else Some((watchdog, _ => stopped)),
+ strict = strict)
+ if (strict && stopped) throw Exn.Interrupt() else result
}
}
--- a/src/Pure/Thy/present.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/Thy/present.scala Sat Nov 14 13:01:12 2020 +0100
@@ -223,9 +223,21 @@
val base = deps(session)
val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display)
+ def find_tex(name: Document.Node.Name): Option[Bytes] =
+ deps.sessions_structure.build_requirements(List(session)).reverse.view
+ .map(session_name =>
+ using(store.open_database(session_name))(db =>
+ Export.read_entry(db, session_name, name.theory, document_tex_name(name)).
+ map(_.uncompressed(cache = store.xz_cache))))
+ .collectFirst({ case Some(x) => x })
+
/* prepare document directory */
+ lazy val tex_files =
+ for (name <- base.session_theories ::: base.document_theories)
+ yield Path.basic(tex_name(name)) -> find_tex(name).getOrElse(Bytes.empty)
+
def prepare_dir(dir: Path, doc_name: String, doc_tags: List[String]): (Path, String) =
{
val doc_dir = dir + Path.basic(doc_name)
@@ -240,15 +252,7 @@
Library.terminate_lines(
base.session_theories.map(name => "\\input{" + tex_name(name) + "}")))
- using(store.open_database(session))(db =>
- for (name <- base.session_theories) {
- val tex =
- Export.read_entry(db, session, name.theory, document_tex_name(name)) match {
- case Some(entry) => entry.uncompressed(cache = store.xz_cache)
- case None => Bytes.empty
- }
- Bytes.write(doc_dir + Path.basic(tex_name(name)), tex)
- })
+ for ((path, tex) <- tex_files) Bytes.write(doc_dir + path, tex)
val root1 = "root_" + doc_name
val root = if ((doc_dir + Path.explode(root1).tex).is_file) root1 else "root"
@@ -278,7 +282,8 @@
"isabelle latex -o " + Bash.string(fmt) + " " + Bash.string(root + "." + ext)
def bash(items: String*): Process_Result =
- progress.bash(items.mkString(" && "), cwd = doc_dir.file, echo = verbose_latex)
+ progress.bash(items.mkString(" && "), cwd = doc_dir.file,
+ echo = verbose_latex, watchdog = Time.seconds(0.5))
// prepare document
@@ -315,21 +320,21 @@
})
}
- def output(echo: Boolean, dir: Path)
+ def output(dir: Path)
{
Isabelle_System.make_directory(dir)
for ((name, pdf) <- docs) {
val path = dir + Path.basic(name).pdf
Bytes.write(path, pdf)
- if (echo) progress.echo_document(path)
+ progress.echo_document(path)
}
}
if (info.options.bool("browser_info") || doc_output.isEmpty) {
- output(verbose, store.browser_info + Path.basic(info.chapter) + Path.basic(session))
+ output(store.browser_info + Path.basic(info.chapter) + Path.basic(session))
}
- doc_output.foreach(output(true, _))
+ doc_output.foreach(output)
docs
}
--- a/src/Pure/Thy/sessions.ML Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/Thy/sessions.ML Sat Nov 14 13:01:12 2020 +0100
@@ -27,6 +27,9 @@
val in_path =
Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "in" |-- Parse.position Parse.path --| Parse.$$$ ")");
+val document_theories =
+ Parse.$$$ "document_theories" |-- Scan.repeat1 (Parse.position Parse.name);
+
val document_files =
Parse.$$$ "document_files" |--
Parse.!!! (Scan.optional in_path ("document", Position.none)
@@ -55,11 +58,12 @@
Scan.optional
(Parse.$$$ "directories" |-- Parse.!!! (Scan.repeat1 (Parse.position Parse.path))) [] --
Scan.repeat theories --
+ Scan.optional document_theories [] --
Scan.repeat document_files --
Scan.repeat export_files))
- >> (fn ((((session, _), _), dir),
- ((((((((parent, descr), options), sessions), directories), theories),
- document_files), export_files))) =>
+ >> (fn (((((session, _), _), dir),
+ (((((((((parent, descr), options), sessions), directories), theories),
+ document_theories), document_files), export_files)))) =>
Toplevel.keep (fn state =>
let
val ctxt = Toplevel.context_of state;
@@ -80,6 +84,10 @@
ignore (Completion.check_option_value ctxt x y (Options.default ()))
handle ERROR msg => Output.error_message msg);
+ fun check_thy (path, pos) =
+ ignore (Resources.check_file ctxt (SOME Path.current) (Path.implode path, pos))
+ handle ERROR msg => Output.error_message msg;
+
val _ =
maps #2 theories |> List.app (fn (s, pos) =>
let
@@ -87,13 +95,18 @@
Resources.import_name session session_dir s
handle ERROR msg => error (msg ^ Position.here pos);
val theory_path = the_default node_name (Resources.find_theory_file theory_name);
- val _ = Resources.check_file ctxt (SOME Path.current) (Path.implode theory_path, pos);
- in () end);
+ in check_thy (theory_path, pos) end);
val _ =
directories |> List.app (ignore o Resources.check_dir ctxt (SOME session_dir));
val _ =
+ document_theories |> List.app (fn (thy, pos) =>
+ (case Resources.find_theory_file thy of
+ NONE => Output.error_message ("Unknown theory " ^ quote thy ^ Position.here pos)
+ | SOME path => check_thy (path, pos)));
+
+ val _ =
document_files |> List.app (fn (doc_dir, doc_files) =>
let
val dir = Resources.check_dir ctxt (SOME session_dir) doc_dir;
--- a/src/Pure/Thy/sessions.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/Thy/sessions.scala Sat Nov 14 13:01:12 2020 +0100
@@ -53,6 +53,7 @@
session_directories: Map[JFile, String] = Map.empty,
global_theories: Map[String, String] = Map.empty,
session_theories: List[Document.Node.Name] = Nil,
+ document_theories: List[Document.Node.Name] = Nil,
loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
used_theories: List[(Document.Node.Name, Options)] = Nil,
known_theories: Map[String, Document.Node.Entry] = Map.empty,
@@ -249,6 +250,30 @@
": need to include sessions " + quote(qualifier) + " in ROOT"
}
+ val document_errors =
+ info.document_theories.flatMap(
+ {
+ case (thy, pos) =>
+ val ancestors = sessions_structure.build_requirements(List(session_name))
+ val qualifier = deps_base.theory_qualifier(session_name)
+
+ def err(msg: String): Option[String] =
+ Some(msg + " " + quote(thy) + Position.here(pos))
+
+ known_theories.get(thy) match {
+ case None => err("Unknown document theory")
+ case Some(entry) =>
+ if (session_theories.contains(entry.name)) {
+ err("Redundant document theory from this session:")
+ }
+ else if (ancestors.contains(qualifier)) None
+ else if (dependencies.theories.contains(entry.name)) None
+ else err("Document theory from other session not imported properly:")
+ }
+ })
+ val document_theories =
+ info.document_theories.map({ case (thy, _) => known_theories(thy).name })
+
val dir_errors =
{
val ok = info.dirs.map(_.canonical_file).toSet
@@ -297,6 +322,7 @@
session_directories = sessions_structure.session_directories,
global_theories = sessions_structure.global_theories,
session_theories = session_theories,
+ document_theories = document_theories,
loaded_theories = dependencies.loaded_theories,
used_theories = dependencies.theories_adjunct,
known_theories = known_theories,
@@ -306,7 +332,8 @@
sources = check_sources(session_files),
session_graph_display = session_graph_display,
errors = dependencies.errors ::: loaded_files_errors ::: import_errors :::
- dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors)
+ document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
+ bibtex_errors)
session_bases + (info.name -> base)
}
@@ -391,6 +418,7 @@
imports = info.deps,
directories = Nil,
theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
+ document_theories = Nil,
document_files = Nil,
export_files = Nil))))
}
@@ -426,6 +454,7 @@
imports: List[String],
theories: List[(Options, List[(String, Position.T)])],
global_theories: List[String],
+ document_theories: List[(String, Position.T)],
document_files: List[(Path, Path)],
export_files: List[(Path, Int, List[String])],
meta_digest: SHA1.Digest)
@@ -539,12 +568,15 @@
entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) })
val meta_digest =
- SHA1.digest((name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
- entry.theories_no_position, conditions, entry.document_files).toString)
+ SHA1.digest(
+ (name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
+ entry.theories_no_position, conditions, entry.document_theories, entry.document_files)
+ .toString)
Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path,
entry.parent, entry.description, directories, session_options,
- entry.imports, theories, global_theories, document_files, export_files, meta_digest)
+ entry.imports, theories, global_theories, entry.document_theories, document_files,
+ export_files, meta_digest)
}
catch {
case ERROR(msg) =>
@@ -791,6 +823,7 @@
private val SESSIONS = "sessions"
private val THEORIES = "theories"
private val GLOBAL = "global"
+ private val DOCUMENT_THEORIES = "document_theories"
private val DOCUMENT_FILES = "document_files"
private val EXPORT_FILES = "export_files"
@@ -804,6 +837,7 @@
(OPTIONS, Keyword.QUASI_COMMAND) +
(SESSIONS, Keyword.QUASI_COMMAND) +
(THEORIES, Keyword.QUASI_COMMAND) +
+ (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) +
(DOCUMENT_FILES, Keyword.QUASI_COMMAND) +
(EXPORT_FILES, Keyword.QUASI_COMMAND)
@@ -820,6 +854,7 @@
imports: List[String],
directories: List[String],
theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
+ document_theories: List[(String, Position.T)],
document_files: List[(String, String)],
export_files: List[(String, Int, List[String])]) extends Entry
{
@@ -853,6 +888,9 @@
val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x }
+ val document_theories =
+ $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x }
+
val document_files =
$$$(DOCUMENT_FILES) ~!
((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) }
@@ -874,10 +912,11 @@
(($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~
rep(theories) ~
+ (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~
(rep(document_files) ^^ (x => x.flatten)) ~
- (rep(export_files))))) ^^
- { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k))) =>
- Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k) }
+ rep(export_files)))) ^^
+ { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l))) =>
+ Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l) }
}
def parse_root(path: Path): List[Entry] =
--- a/src/Pure/Tools/build.scala Thu Nov 12 17:42:15 2020 +0100
+++ b/src/Pure/Tools/build.scala Sat Nov 14 13:01:12 2020 +0100
@@ -386,7 +386,7 @@
Present.finish(store.browser_info, graph_pdf, info, session_name)
Nil
}
- catch { case ERROR(msg) => List(msg) case e@Exn.Interrupt() => List(Exn.message(e)) }
+ catch { case Exn.Interrupt.ERROR(msg) => List(msg) }
val result =
{