# HG changeset patch # User wenzelm # Date 1605354905 -3600 # Node ID 2fa4f25d9d077110e365e3f2f450f2c7b74fd708 # Parent 76550282267f307d04cf70d1b135892b3289ed1d official support for document theories from other sessions; diff -r 76550282267f -r 2fa4f25d9d07 NEWS --- a/NEWS Thu Nov 12 16:27:31 2020 +0100 +++ b/NEWS Sat Nov 14 12:55:05 2020 +0100 @@ -32,6 +32,11 @@ *** Document preparation *** +* Keyword 'document_theories' within ROOT specifies theories from other +sessions that should be included in the generated document source +directory. This does not affect the generated session.tex: \input{...} +needs to be used separately. + * The standard LaTeX engine is now lualatex, according to settings variable ISABELLE_PDFLATEX. This is mostly upwards compatible with old pdflatex, but text encoding needs to conform strictly to utf8. Rare diff -r 76550282267f -r 2fa4f25d9d07 src/Doc/System/Sessions.thy --- a/src/Doc/System/Sessions.thy Thu Nov 12 16:27:31 2020 +0100 +++ b/src/Doc/System/Sessions.thy Sat Nov 14 12:55:05 2020 +0100 @@ -55,7 +55,8 @@ @{syntax_def session_entry}: @'session' @{syntax system_name} groups? dir? '=' \ (@{syntax system_name} '+')? description? options? \ sessions? directories? (theories*) \ - (document_files*) (export_files*) + (document_theories?) (document_files*) \ + (export_files*) ; groups: '(' (@{syntax name} +) ')' ; @@ -77,6 +78,8 @@ ; theory_entry: @{syntax system_name} ('(' @'global' ')')? ; + document_theories: @'document_theories' (@{syntax name}+) + ; document_files: @'document_files' ('(' dir ')')? (@{syntax embedded}+) ; export_files: @'export_files' ('(' dir ')')? ('[' nat ']')? \ @@ -145,6 +148,13 @@ \HOLCF\, \IFOL\, \FOL\, \ZF\, \ZFC\ etc. Regular Isabelle applications should not claim any global theory names. + \<^descr> \isakeyword{document_theories}~\names\ specifies theories from other + sessions that should be included in the generated document source directory. + These theories need to be explicit imports in the current session, or + impliciti imports from the underlying hierarchy of parent sessions. The + generated \<^verbatim>\session.tex\ file is not affected: the session's {\LaTeX} setup + needs to \<^verbatim>\\input{\\\\\<^verbatim>\}\ generated \<^verbatim>\.tex\ files separately. + \<^descr> \isakeyword{document_files}~\(\\isakeyword{in}~\base_dir) files\ lists source files for document preparation, typically \<^verbatim>\.tex\ and \<^verbatim>\.sty\ for {\LaTeX}. Only these explicitly given files are copied from the base diff -r 76550282267f -r 2fa4f25d9d07 src/Pure/Sessions.thy --- a/src/Pure/Sessions.thy Thu Nov 12 16:27:31 2020 +0100 +++ b/src/Pure/Sessions.thy Sat Nov 14 12:55:05 2020 +0100 @@ -8,7 +8,7 @@ imports Pure keywords "session" :: thy_decl and "description" "directories" "options" "sessions" "theories" - "document_files" "export_files" :: quasi_command + "document_theories" "document_files" "export_files" :: quasi_command and "global" begin diff -r 76550282267f -r 2fa4f25d9d07 src/Pure/Thy/present.scala --- a/src/Pure/Thy/present.scala Thu Nov 12 16:27:31 2020 +0100 +++ b/src/Pure/Thy/present.scala Sat Nov 14 12:55:05 2020 +0100 @@ -223,9 +223,21 @@ val base = deps(session) val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display) + def find_tex(name: Document.Node.Name): Option[Bytes] = + deps.sessions_structure.build_requirements(List(session)).reverse.view + .map(session_name => + using(store.open_database(session_name))(db => + Export.read_entry(db, session_name, name.theory, document_tex_name(name)). + map(_.uncompressed(cache = store.xz_cache)))) + .collectFirst({ case Some(x) => x }) + /* prepare document directory */ + lazy val tex_files = + for (name <- base.session_theories ::: base.document_theories) + yield Path.basic(tex_name(name)) -> find_tex(name).getOrElse(Bytes.empty) + def prepare_dir(dir: Path, doc_name: String, doc_tags: List[String]): (Path, String) = { val doc_dir = dir + Path.basic(doc_name) @@ -240,15 +252,7 @@ Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))) - using(store.open_database(session))(db => - for (name <- base.session_theories) { - val tex = - Export.read_entry(db, session, name.theory, document_tex_name(name)) match { - case Some(entry) => entry.uncompressed(cache = store.xz_cache) - case None => Bytes.empty - } - Bytes.write(doc_dir + Path.basic(tex_name(name)), tex) - }) + for ((path, tex) <- tex_files) Bytes.write(doc_dir + path, tex) val root1 = "root_" + doc_name val root = if ((doc_dir + Path.explode(root1).tex).is_file) root1 else "root" diff -r 76550282267f -r 2fa4f25d9d07 src/Pure/Thy/sessions.ML --- a/src/Pure/Thy/sessions.ML Thu Nov 12 16:27:31 2020 +0100 +++ b/src/Pure/Thy/sessions.ML Sat Nov 14 12:55:05 2020 +0100 @@ -27,6 +27,9 @@ val in_path = Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "in" |-- Parse.position Parse.path --| Parse.$$$ ")"); +val document_theories = + Parse.$$$ "document_theories" |-- Scan.repeat1 (Parse.position Parse.name); + val document_files = Parse.$$$ "document_files" |-- Parse.!!! (Scan.optional in_path ("document", Position.none) @@ -55,11 +58,12 @@ Scan.optional (Parse.$$$ "directories" |-- Parse.!!! (Scan.repeat1 (Parse.position Parse.path))) [] -- Scan.repeat theories -- + Scan.optional document_theories [] -- Scan.repeat document_files -- Scan.repeat export_files)) - >> (fn ((((session, _), _), dir), - ((((((((parent, descr), options), sessions), directories), theories), - document_files), export_files))) => + >> (fn (((((session, _), _), dir), + (((((((((parent, descr), options), sessions), directories), theories), + document_theories), document_files), export_files)))) => Toplevel.keep (fn state => let val ctxt = Toplevel.context_of state; @@ -80,6 +84,10 @@ ignore (Completion.check_option_value ctxt x y (Options.default ())) handle ERROR msg => Output.error_message msg); + fun check_thy (path, pos) = + ignore (Resources.check_file ctxt (SOME Path.current) (Path.implode path, pos)) + handle ERROR msg => Output.error_message msg; + val _ = maps #2 theories |> List.app (fn (s, pos) => let @@ -87,13 +95,18 @@ Resources.import_name session session_dir s handle ERROR msg => error (msg ^ Position.here pos); val theory_path = the_default node_name (Resources.find_theory_file theory_name); - val _ = Resources.check_file ctxt (SOME Path.current) (Path.implode theory_path, pos); - in () end); + in check_thy (theory_path, pos) end); val _ = directories |> List.app (ignore o Resources.check_dir ctxt (SOME session_dir)); val _ = + document_theories |> List.app (fn (thy, pos) => + (case Resources.find_theory_file thy of + NONE => Output.error_message ("Unknown theory " ^ quote thy ^ Position.here pos) + | SOME path => check_thy (path, pos))); + + val _ = document_files |> List.app (fn (doc_dir, doc_files) => let val dir = Resources.check_dir ctxt (SOME session_dir) doc_dir; diff -r 76550282267f -r 2fa4f25d9d07 src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Thu Nov 12 16:27:31 2020 +0100 +++ b/src/Pure/Thy/sessions.scala Sat Nov 14 12:55:05 2020 +0100 @@ -53,6 +53,7 @@ session_directories: Map[JFile, String] = Map.empty, global_theories: Map[String, String] = Map.empty, session_theories: List[Document.Node.Name] = Nil, + document_theories: List[Document.Node.Name] = Nil, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, known_theories: Map[String, Document.Node.Entry] = Map.empty, @@ -249,6 +250,30 @@ ": need to include sessions " + quote(qualifier) + " in ROOT" } + val document_errors = + info.document_theories.flatMap( + { + case (thy, pos) => + val ancestors = sessions_structure.build_requirements(List(session_name)) + val qualifier = deps_base.theory_qualifier(session_name) + + def err(msg: String): Option[String] = + Some(msg + " " + quote(thy) + Position.here(pos)) + + known_theories.get(thy) match { + case None => err("Unknown document theory") + case Some(entry) => + if (session_theories.contains(entry.name)) { + err("Redundant document theory from this session:") + } + else if (ancestors.contains(qualifier)) None + else if (dependencies.theories.contains(entry.name)) None + else err("Document theory from other session not imported properly:") + } + }) + val document_theories = + info.document_theories.map({ case (thy, _) => known_theories(thy).name }) + val dir_errors = { val ok = info.dirs.map(_.canonical_file).toSet @@ -297,6 +322,7 @@ session_directories = sessions_structure.session_directories, global_theories = sessions_structure.global_theories, session_theories = session_theories, + document_theories = document_theories, loaded_theories = dependencies.loaded_theories, used_theories = dependencies.theories_adjunct, known_theories = known_theories, @@ -306,7 +332,8 @@ sources = check_sources(session_files), session_graph_display = session_graph_display, errors = dependencies.errors ::: loaded_files_errors ::: import_errors ::: - dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors) + document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: + bibtex_errors) session_bases + (info.name -> base) } @@ -391,6 +418,7 @@ imports = info.deps, directories = Nil, theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), + document_theories = Nil, document_files = Nil, export_files = Nil)))) } @@ -426,6 +454,7 @@ imports: List[String], theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], + document_theories: List[(String, Position.T)], document_files: List[(Path, Path)], export_files: List[(Path, Int, List[String])], meta_digest: SHA1.Digest) @@ -539,12 +568,15 @@ entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) }) val meta_digest = - SHA1.digest((name, chapter, entry.parent, entry.directories, entry.options, entry.imports, - entry.theories_no_position, conditions, entry.document_files).toString) + SHA1.digest( + (name, chapter, entry.parent, entry.directories, entry.options, entry.imports, + entry.theories_no_position, conditions, entry.document_theories, entry.document_files) + .toString) Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path, entry.parent, entry.description, directories, session_options, - entry.imports, theories, global_theories, document_files, export_files, meta_digest) + entry.imports, theories, global_theories, entry.document_theories, document_files, + export_files, meta_digest) } catch { case ERROR(msg) => @@ -791,6 +823,7 @@ private val SESSIONS = "sessions" private val THEORIES = "theories" private val GLOBAL = "global" + private val DOCUMENT_THEORIES = "document_theories" private val DOCUMENT_FILES = "document_files" private val EXPORT_FILES = "export_files" @@ -804,6 +837,7 @@ (OPTIONS, Keyword.QUASI_COMMAND) + (SESSIONS, Keyword.QUASI_COMMAND) + (THEORIES, Keyword.QUASI_COMMAND) + + (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + (EXPORT_FILES, Keyword.QUASI_COMMAND) @@ -820,6 +854,7 @@ imports: List[String], directories: List[String], theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], + document_theories: List[(String, Position.T)], document_files: List[(String, String)], export_files: List[(String, Int, List[String])]) extends Entry { @@ -853,6 +888,9 @@ val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x } + val document_theories = + $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x } + val document_files = $$$(DOCUMENT_FILES) ~! ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } @@ -874,10 +912,11 @@ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ rep(theories) ~ + (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~ (rep(document_files) ^^ (x => x.flatten)) ~ - (rep(export_files))))) ^^ - { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k))) => - Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k) } + rep(export_files)))) ^^ + { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l))) => + Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l) } } def parse_root(path: Path): List[Entry] =