clarified 'export_files' in session ROOT: require explicit "isabelle build -e";
tuned messages;
--- a/NEWS Fri Feb 15 12:34:29 2019 +0100
+++ b/NEWS Fri Feb 15 17:00:21 2019 +0100
@@ -104,10 +104,10 @@
* Code generation: command 'export_code' without file keyword exports
code as regular theory export, which can be materialized using the
-command-line tool "isabelle export" or 'export_files' in the session
-ROOT, or browsed in Isabelle/jEdit via the "isabelle-export:"
-file-system. To get generated code dumped into output, use "file \<open>\<close>".
-Minor INCOMPATIBILITY.
+command-line tools "isabelle export" or "isabelle build -e" (with
+'export_files' in the session ROOT), or browsed in Isabelle/jEdit via
+the "isabelle-export:" file-system. To get generated code dumped into
+output, use "file \<open>\<close>". Minor INCOMPATIBILITY.
* Code generation for OCaml: proper strings are used for literals.
Minor INCOMPATIBILITY.
@@ -721,7 +721,9 @@
*** ML ***
* Operation Export.export emits theory exports (arbitrary blobs), which
-are stored persistently in the session build database.
+are stored persistently in the session build database. Command-line
+tools "isabelle export" and "isabelle build -e" allow to materialize
+exports in the physical file-system.
* Command 'ML_export' exports ML toplevel bindings to the global
bootstrap environment of the ML process. This allows ML evaluation
@@ -777,8 +779,9 @@
* Command-line tool "isabelle imports -I" also reports actual session
imports. This helps to minimize the session dependency graph.
-* The command-line tool "export" and 'export_files' in session ROOT
-entries retrieve theory exports from the session build database.
+* The command-line tool "export" and "isabelle build -e" (with
+'export_files' in session ROOT entries) retrieve theory exports from the
+session build database.
* The command-line tools "isabelle server" and "isabelle client" provide
access to the Isabelle Server: it supports responsive session management
--- a/src/Doc/System/Sessions.thy Fri Feb 15 12:34:29 2019 +0100
+++ b/src/Doc/System/Sessions.thy Fri Feb 15 17:00:21 2019 +0100
@@ -145,13 +145,14 @@
\<^verbatim>\<open>document\<close> within the session root directory.
\<^descr> \isakeyword{export_files}~\<open>(\<close>\isakeyword{in}~\<open>target_dir) [number]
- patterns\<close> writes theory exports to the file-system: the \<open>target_dir\<close>
- specification is relative to the session root directory; its default is
- \<^verbatim>\<open>export\<close>. Exports are selected via \<open>patterns\<close> as in @{tool_ref export}
- (\secref{sec:tool-export}). The number given in brackets (default: 0)
- specifies elements that should be pruned from each name: it allows to reduce
- the resulting directory hierarchy at the danger of overwriting files due to
- loss of uniqueness.
+ patterns\<close> specifies theory exports that may get written to the file-system,
+ e.g. via @{tool_ref build} with option \<^verbatim>\<open>-e\<close> (\secref{sec:tool-build}). The
+ \<open>target_dir\<close> specification is relative to the session root directory; its
+ default is \<^verbatim>\<open>export\<close>. Exports are selected via \<open>patterns\<close> as in @{tool_ref
+ export} (\secref{sec:tool-export}). The number given in brackets (default:
+ 0) specifies elements that should be pruned from each name: it allows to
+ reduce the resulting directory hierarchy at the danger of overwriting files
+ due to loss of uniqueness.
\<close>
@@ -288,6 +289,7 @@
-b build heap images
-c clean build
-d DIR include session directory
+ -e export files from session specification into file-system
-f fresh build
-g NAME select session group NAME
-j INT maximum number of parallel jobs (default 1)
@@ -374,6 +376,15 @@
parent or import graph) before performing the specified build operation.
\<^medskip>
+ Option \<^verbatim>\<open>-e\<close> executes the \isakeyword{export_files} directives from the ROOT
+ specification of all explicitly selected sessions: the status of the session
+ build database needs to be OK, but the session could have been built
+ earlier. Using \isakeyword{export_files}, a session may serve as abstract
+ interface for add-on build artefacts, but these are only materialized on
+ explicit request: without option \<^verbatim>\<open>-e\<close> there is no effect on the physical
+ file-system yet.
+
+ \<^medskip>
Option \<^verbatim>\<open>-f\<close> forces a fresh build of all selected sessions and their
requirements.
--- a/src/Pure/Thy/export.scala Fri Feb 15 12:34:29 2019 +0100
+++ b/src/Pure/Thy/export.scala Fri Feb 15 17:00:21 2019 +0100
@@ -264,8 +264,7 @@
progress: Progress = No_Progress,
export_prune: Int = 0,
export_list: Boolean = false,
- export_patterns: List[String] = Nil,
- export_prefix: String = "")
+ export_patterns: List[String] = Nil)
{
using(store.open_database(session_name))(db =>
{
@@ -298,7 +297,7 @@
}
else export_dir + Path.make(elems.drop(export_prune))
- progress.echo(export_prefix + "export " + path)
+ progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
Isabelle_System.mkdirs(path.dir)
Bytes.write(path, entry.uncompressed(cache = store.xz_cache))
File.set_executable(path, entry.executable)
--- a/src/Pure/Tools/build.scala Fri Feb 15 12:34:29 2019 +0100
+++ b/src/Pure/Tools/build.scala Fri Feb 15 17:00:21 2019 +0100
@@ -334,16 +334,8 @@
Isabelle_System.rm_tree(export_tmp_dir)
- if (result1.ok) {
- for ((dir, prune, pats) <- info.export_files) {
- Export.export_files(store, name, info.dir + dir,
- progress = if (verbose) progress else No_Progress,
- export_prune = prune,
- export_patterns = pats,
- export_prefix = name + ": ")
- }
+ if (result1.ok)
Present.finish(progress, store.browser_info, graph_file, info, name)
- }
graph_file.delete
@@ -405,6 +397,7 @@
soft_build: Boolean = false,
system_mode: Boolean = false,
verbose: Boolean = false,
+ export_files: Boolean = false,
pide: Boolean = false,
requirements: Boolean = false,
all_sessions: Boolean = false,
@@ -640,8 +633,8 @@
val numa_node = numa_nodes.next(used_node(_))
val job =
- new Job(progress, name, info, deps, store, do_output,
- verbose, pide, numa_node, queue.command_timings(name))
+ new Job(progress, name, info, deps, store, do_output, verbose, pide = pide,
+ numa_node, queue.command_timings(name))
loop(pending, running + (name -> (ancestor_heaps, job)), results)
}
else {
@@ -672,6 +665,21 @@
(for ((name, result) <- results0.iterator)
yield (name, (result.process, result.info))).toMap)
+ if (export_files) {
+ for (name <- full_sessions.imports_selection(selection1).iterator if results(name).ok) {
+ val info = results.info(name)
+ if (info.export_files.nonEmpty) {
+ progress.echo("Exporting " + info.name + " ...")
+ for ((dir, prune, pats) <- info.export_files) {
+ Export.export_files(store, name, info.dir + dir,
+ progress = if (verbose) progress else No_Progress,
+ export_prune = prune,
+ export_patterns = pats)
+ }
+ }
+ }
+ }
+
if (results.rc != 0 && (verbose || !no_build)) {
val unfinished =
(for {
@@ -721,6 +729,7 @@
var build_heap = false
var clean_build = false
var dirs: List[Path] = Nil
+ var export_files = false
var fresh_build = false
var session_groups: List[String] = Nil
var max_jobs = 1
@@ -747,6 +756,7 @@
-b build heap images
-c clean build
-d DIR include session directory
+ -e export files from session specification into file-system
-f fresh build
-g NAME select session group NAME
-j INT maximum number of parallel jobs (default 1)
@@ -772,6 +782,7 @@
"b" -> (_ => build_heap = true),
"c" -> (_ => clean_build = true),
"d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
+ "e" -> (_ => export_files = true),
"f" -> (_ => fresh_build = true),
"g:" -> (arg => session_groups = session_groups ::: List(arg)),
"j:" -> (arg => max_jobs = Value.Int.parse(arg)),
@@ -814,6 +825,7 @@
soft_build = soft_build,
system_mode = system_mode,
verbose = verbose,
+ export_files = export_files,
pide = pide,
requirements = requirements,
all_sessions = all_sessions,