# HG changeset patch # User wenzelm # Date 1527356402 -7200 # Node ID 7ca0c23179e6b3486717476b2828dd71d7dced73 # Parent 1e1877cb9b3ac2763914fab5a7ff2a766a353f9c support 'export_files' in session ROOT; diff -r 1e1877cb9b3a -r 7ca0c23179e6 NEWS --- a/NEWS Sat May 26 19:39:06 2018 +0200 +++ b/NEWS Sat May 26 19:40:02 2018 +0200 @@ -372,8 +372,8 @@ $ISABELLE_HOME/heaps/$ML_IDENTIFIER (main Isabelle application or "isabelle jedit -s" or "isabelle build -s"). -* The command-line tool retrieves theory exports from the session build -database. +* The command-line tool "export" and 'export_files' in session ROOT +entries retrieve theory exports from the session build database. * The command-line tools "isabelle server" and "isabelle client" provide access to the Isabelle Server: it supports responsive session management diff -r 1e1877cb9b3a -r 7ca0c23179e6 src/Doc/System/Sessions.thy --- a/src/Doc/System/Sessions.thy Sat May 26 19:39:06 2018 +0200 +++ b/src/Doc/System/Sessions.thy Sat May 26 19:40:02 2018 +0200 @@ -54,7 +54,7 @@ @{syntax_def session_entry}: @'session' @{syntax system_name} groups? dir? '=' \ (@{syntax system_name} '+')? description? options? \ - (sessions?) (theories*) (document_files*) + (sessions?) (theories*) (document_files*) \ (export_files*) ; groups: '(' (@{syntax name} +) ')' ; @@ -75,6 +75,8 @@ theory_entry: @{syntax system_name} ('(' @'global' ')')? ; document_files: @'document_files' ('(' dir ')')? (@{syntax name}+) + ; + export_files: @'export_files' ('(' dir ')')? (@{syntax name}+) \} \<^descr> \isakeyword{session}~\A = B + body\ defines a new session \A\ based on @@ -138,12 +140,14 @@ directory to the document output directory, before formal document processing is started (see also \secref{sec:tool-document}). The local path structure of the \files\ is preserved, which allows to reconstruct the - original directory hierarchy of \base_dir\. + original directory hierarchy of \base_dir\. The default \base_dir\ is + \<^verbatim>\document\ within the session root directory. - \<^descr> \isakeyword{document_files}~\files\ abbreviates - \isakeyword{document_files}~\(\\isakeyword{in}~\document) files\, i.e.\ - document sources are taken from the base directory \<^verbatim>\document\ within the - session root directory. + \<^descr> \isakeyword{export_files}~\(\\isakeyword{in}~\target_dir) patterns\ writes + theory exports to the file-system: the \target_dir\ specification is + relative to the session root directory; its default is \<^verbatim>\export\. Exports + are selected via \patterns\ as in @{tool_ref export} + (\secref{sec:tool-export}). \ diff -r 1e1877cb9b3a -r 7ca0c23179e6 src/Pure/Sessions.thy --- a/src/Pure/Sessions.thy Sat May 26 19:39:06 2018 +0200 +++ b/src/Pure/Sessions.thy Sat May 26 19:40:02 2018 +0200 @@ -7,7 +7,8 @@ theory Sessions imports Pure keywords "session" :: thy_decl - and "description" "options" "sessions" "theories" "document_files" :: quasi_command + and "description" "options" "sessions" "theories" + "document_files" "export_files" :: quasi_command and "global" begin diff -r 1e1877cb9b3a -r 7ca0c23179e6 src/Pure/Thy/sessions.ML --- a/src/Pure/Thy/sessions.ML Sat May 26 19:39:06 2018 +0200 +++ b/src/Pure/Thy/sessions.ML Sat May 26 19:40:02 2018 +0200 @@ -27,15 +27,18 @@ val theories = Parse.$$$ "theories" |-- Parse.!!! (Scan.optional Parse.options [] -- Scan.repeat1 theory_entry); +val in_path = + Parse.$$$ "(" |-- Parse.!!! (Parse.$$$ "in" |-- Parse.position Parse.path --| Parse.$$$ ")"); + val document_files = Parse.$$$ "document_files" |-- - Parse.!!! - (Scan.optional - (Parse.$$$ "(" |-- - Parse.!!! (Parse.$$$ "in" |-- Parse.position Parse.path --| Parse.$$$ ")")) - ("document", Position.none) + Parse.!!! (Scan.optional in_path ("document", Position.none) -- Scan.repeat1 (Parse.position Parse.path)); +val export_files = + Parse.$$$ "export_files" |-- + Parse.!!! (Scan.optional in_path ("export", Position.none) -- Scan.repeat1 Parse.name); + in val command_parser = @@ -49,9 +52,10 @@ Scan.optional (Parse.$$$ "sessions" |-- Parse.!!! (Scan.repeat1 (Parse.position Parse.session_name))) [] -- Scan.repeat theories -- - Scan.repeat document_files)) + Scan.repeat document_files -- + Scan.repeat export_files)) >> (fn (((session, _), dir), - ((((((parent, descr), options), sessions), theories), document_files))) => + (((((((parent, descr), options), sessions), theories), document_files), export_files))) => Toplevel.keep (fn state => let val ctxt = Toplevel.context_of state; @@ -89,6 +93,10 @@ val dir = Resources.check_dir ctxt session_dir doc_dir; val _ = List.app (ignore o Resources.check_file ctxt dir) doc_files; in () end); + + val _ = + export_files |> List.app (fn (export_dir, _) => + ignore (Resources.check_path ctxt session_dir export_dir)); in () end)); end; diff -r 1e1877cb9b3a -r 7ca0c23179e6 src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Sat May 26 19:39:06 2018 +0200 +++ b/src/Pure/Thy/sessions.scala Sat May 26 19:40:02 2018 +0200 @@ -428,7 +428,8 @@ options = Nil, imports = info.deps, theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), - document_files = Nil)))) + document_files = Nil, + export_files = Nil)))) } } else (session, Nil) @@ -469,6 +470,7 @@ theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], document_files: List[(Path, Path)], + export_files: List[(Path, List[String])], meta_digest: SHA1.Digest) { def deps: List[String] = parent.toList ::: imports @@ -520,13 +522,16 @@ val document_files = entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) + val export_files = + entry.export_files.map({ case (dir, pats) => (Path.explode(dir), pats) }) + val meta_digest = SHA1.digest((name, chapter, entry.parent, entry.options, entry.imports, - entry.theories_no_position, conditions, entry.document_files).toString) + entry.theories_no_position, conditions, entry.document_files, entry.export_files).toString) Info(name, chapter, dir_selected, entry.pos, entry.groups, dir + Path.explode(entry.path), entry.parent, entry.description, session_options, - entry.imports, theories, global_theories, document_files, meta_digest) + entry.imports, theories, global_theories, document_files, export_files, meta_digest) } catch { case ERROR(msg) => @@ -703,6 +708,7 @@ private val THEORIES = "theories" private val GLOBAL = "global" private val DOCUMENT_FILES = "document_files" + private val EXPORT_FILES = "export_files" val root_syntax = Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + @@ -712,7 +718,8 @@ (OPTIONS, Keyword.QUASI_COMMAND) + (SESSIONS, Keyword.QUASI_COMMAND) + (THEORIES, Keyword.QUASI_COMMAND) + - (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + + (EXPORT_FILES, Keyword.QUASI_COMMAND) abstract class Entry sealed case class Chapter(name: String) extends Entry @@ -726,7 +733,8 @@ options: List[Options.Spec], imports: List[String], theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], - document_files: List[(String, String)]) extends Entry + document_files: List[(String, String)], + export_files: List[(String, List[String])]) extends Entry { def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] = theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) }) @@ -759,11 +767,15 @@ ((options | success(Nil)) ~ rep1(theory_entry)) ^^ { case _ ~ (x ~ y) => (x, y) } + val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x } + val document_files = $$$(DOCUMENT_FILES) ~! - (($$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ - { case _ ~ (_ ~ x ~ _) => x } | success("document")) ~ - rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } + ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } + + val export_files = + $$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ rep1(name)) ^^ + { case _ ~ (x ~ y) => (x, y) } command(SESSION) ~! (position(session_name) ~ @@ -775,9 +787,10 @@ (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ rep(theories) ~ - (rep(document_files) ^^ (x => x.flatten))))) ^^ - { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i))) => - Session_Entry(pos, a, b, c, d, e, f, g, h, i) } + (rep(document_files) ^^ (x => x.flatten)) ~ + (rep(export_files))))) ^^ + { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j))) => + Session_Entry(pos, a, b, c, d, e, f, g, h, i, j) } } def parse_root(path: Path): List[Entry] = diff -r 1e1877cb9b3a -r 7ca0c23179e6 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Sat May 26 19:39:06 2018 +0200 +++ b/src/Pure/Tools/build.scala Sat May 26 19:40:02 2018 +0200 @@ -322,8 +322,15 @@ Isabelle_System.rm_tree(export_tmp_dir) - if (result1.ok) + if (result1.ok) { + for ((dir, pats) <- info.export_files) { + Export.export_files(store, name, info.dir + dir, + progress = if (verbose) progress else No_Progress, + export_patterns = pats, + export_prefix = name + ": ") + } Present.finish(progress, store.browser_info, graph_file, info, name) + } graph_file.delete