--- a/NEWS Fri Aug 08 16:46:03 2025 +0100
+++ b/NEWS Sun Aug 10 23:35:19 2025 +0200
@@ -31,9 +31,9 @@
database. Example: "isabelle jedit -l HOL src/HOL/Nat.thy" for theory
"HOL.Nat" in session "HOL". This information is read-only: editing
theory sources in the editor will invalidate formal markup, and replace
-it by an error message. Output messages exclude proof states and final
-results, unless the underlying session database has been built with
-option "show_states".
+it by an error message. Output messages depend on system options
+"show_results" (default true) and "show_states" (default false),
+provided at build-time for the underlying session database.
*** Isabelle/jEdit Prover IDE ***
@@ -335,6 +335,30 @@
*** System ***
+* The command-line tool "isabelle process_theories" tool takes source
+files and theories within a proper session context (like regular
+"isabelle build"). Output of prover messages works roughly like
+"isabelle build_log", while the theories are being processed.
+
+Examples:
+
+ isabelle process_theories -O HOL-Library.Multiset
+
+ isabelle process_theories -O -o show_states HOL-Library.Multiset
+
+ isabelle process_theories -f '~~/src/HOL/Examples/Seq.thy'
+
+* System option "show_results" (default true) controls output of
+toplevel command results (definitions, theorems) in batch-builds. In
+interactive mode this is always enabled; in batch-builds it can be
+disable like this:
+
+ isabelle build -o show_results=false FOL
+
+Option "show_results" is also the default for the configuration option
+"show_results" within the formal context --- instead of "show_states"
+that was used for this purpose before.
+
* The traditional ML system settings have been reconsidered, and mostly
replaced by ML_Settings in Isabelle/Scala (e.g. via
ML_Settings.system(Options.init())). Potential INCOMPATIBILITY for old
--- a/etc/build.props Fri Aug 08 16:46:03 2025 +0100
+++ b/etc/build.props Sun Aug 10 23:35:19 2025 +0200
@@ -235,6 +235,7 @@
src/Pure/Tools/phabricator.scala \
src/Pure/Tools/print_operation.scala \
src/Pure/Tools/prismjs.scala \
+ src/Pure/Tools/process_theories.scala \
src/Pure/Tools/profiling.scala \
src/Pure/Tools/profiling_report.scala \
src/Pure/Tools/scala_build.scala \
--- a/etc/options Fri Aug 08 16:46:03 2025 +0100
+++ b/etc/options Sun Aug 10 23:35:19 2025 +0200
@@ -69,6 +69,8 @@
option goals_limit : int = 10 for content
-- "maximum number of subgoals to be printed"
+option show_results : bool = true for content
+ -- "show toplevel results even if outside of interactive mode"
option show_states : bool = false for content
-- "show toplevel states even if outside of interactive mode"
@@ -159,6 +161,9 @@
option profiling : string = "" (standard "time") for build
-- "ML profiling (possible values: time, time_thread, allocations)"
+option profiling_dir : string = "" for content
+ -- "auxiliary directory for \"isabelle profiling\" tool"
+
option system_log : string = "" (standard "-") for build
-- "output for system messages (log file or \"-\" for console progress)"
--- a/src/Doc/System/Environment.thy Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Doc/System/Environment.thy Sun Aug 10 23:35:19 2025 +0200
@@ -303,6 +303,15 @@
section \<open>The raw Isabelle ML process\<close>
+text \<open>
+ The raw Isabelle ML process has limited use in actual applications: it lacks
+ the full session context that is required for Isabelle/ML/Scala integration
+ and Prover IDE messages or markup. It is better to use @{tool build}
+ (\secref{sec:tool-build}) for regular sessions, or its front-end @{tool
+ process_theories} (\secref{sec:tool-process-theories}) for adhoc sessions.
+\<close>
+
+
subsection \<open>Batch mode \label{sec:tool-process}\<close>
text \<open>
--- a/src/Doc/System/Sessions.thy Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Doc/System/Sessions.thy Sun Aug 10 23:35:19 2025 +0200
@@ -1196,4 +1196,81 @@
@{verbatim [display] \<open> isabelle build_task -A: -X slow -g AFP\<close>}
\<close>
+
+section \<open>Process theories within a session context \label{sec:tool-process-theories}\<close>
+
+text \<open>
+ The @{tool_def process_theories} tool takes source files and theories from
+ existing sessions to compose an adhoc session (in a temporary directory)
+ that is then processed via regular @{tool build}. It is also possible to
+ output prover messages roughly like @{tool build_log}, while the theories
+ are being processed.
+
+ @{verbatim [display]
+\<open>Usage: isabelle process_theories [OPTIONS] [THEORIES...]
+
+ Options are:
+ -F FILE include addition session files, listed in FILE
+ -H REGEX filter messages by matching against head
+ -M REGEX filter messages by matching against body
+ -O output messages
+ -d DIR include session directory
+ -f FILE include addition session files
+ -l NAME logic session name (default ISABELLE_LOGIC="HOL")
+ -m MARGIN margin for pretty printing (default: 76.0)
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -v verbose
+
+ Process theories within an adhoc session context.
+\<close>}
+
+ Explicit \<^emph>\<open>theories\<close> given as command-line arguments, not options, refer to
+ qualified theory names from existing sessions, e.g. \<^verbatim>\<open>HOL-Library.Multiset\<close>
+ or \<^verbatim>\<open>HOL-Examples.Seq\<close>. The session qualifiers are used to augment the adhoc
+ session specification by suitable entries for \isakeyword{sessions}
+ (\secref{sec:session-root}).
+
+ \<^medskip>
+ Options \<^verbatim>\<open>-f\<close> and \<^verbatim>\<open>-F\<close> specify source files for the adhoc session directory
+ (multiple occurrences are possible): \<^verbatim>\<open>-f\<close> is for literal file names, and
+ \<^verbatim>\<open>-F\<close> is for files that contain a list of further files (one per line).
+
+ All source files are copied to the private (temporary) session directory,
+ without any subdirectory structure. Files with extension \<^verbatim>\<open>.thy\<close> are treated
+ as theory files: their base names are appended to the overall list of
+ \<^emph>\<open>theories\<close>, and thus loaded into the adhoc session, too.
+
+ \<^medskip>
+ Option \<^verbatim>\<open>-l\<close> specifies the parent logic session, which is produced on the
+ spot using @{tool build}. Options \<^verbatim>\<open>-d\<close>, \<^verbatim>\<open>-o\<close>, \<^verbatim>\<open>-v\<close> work as in @{tool
+ build}.
+
+ Option \<^verbatim>\<open>-O\<close> enables output of prover messages. Options \<^verbatim>\<open>-H\<close>, \<^verbatim>\<open>-M\<close>, \<^verbatim>\<open>-m\<close>
+ work as in @{tool build_log}.
+\<close>
+
+
+subsubsection \<open>Examples\<close>
+
+text \<open>
+ Process a theory from a different session in the context of the default
+ logic (\<^verbatim>\<open>HOL\<close>):
+
+ @{verbatim [display] \<open> isabelle process_theories HOL-Library.Multiset\<close>}
+
+ \<^smallskip>
+ Process a theory with prover output enabled:
+ @{verbatim [display] \<open> isabelle process_theories -O HOL-Library.Multiset\<close>}
+
+ \<^smallskip>
+ Process a theory with prover output enabled, including proof states:
+ @{verbatim [display] \<open> isabelle process_theories -O -o show_states HOL-Library.Multiset\<close>}
+
+ \<^smallskip>
+ Process a self-contained theory file from the Isabelle distribution, outside
+ of its original session context:
+ @{verbatim [display] \<open> isabelle process_theories -f '~~/src/HOL/Examples/Seq.thy'\<close>}
+
+\<close>
+
end
--- a/src/Pure/Admin/build_doc.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Admin/build_doc.scala Sun Aug 10 23:35:19 2025 +0200
@@ -39,8 +39,7 @@
progress.echo("Build started for sessions " + commas_quote(selection.sessions))
val build_results =
- Build.build(options, selection = selection, progress = progress, max_jobs = max_jobs)
- if (!build_results.ok) error("Build failed")
+ Build.build(options, selection = selection, progress = progress, max_jobs = max_jobs).check
progress.echo("Build started for documentation " + commas_quote(documents))
val deps = Sessions.load_structure(options + "document").selection_deps(selection)
--- a/src/Pure/Build/build.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Build/build.scala Sun Aug 10 23:35:19 2025 +0200
@@ -98,6 +98,11 @@
lazy val unfinished: List[String] = sessions.iterator.filterNot(apply(_).ok).toList.sorted
+ def check: Results =
+ if (ok) this
+ else if (unfinished.isEmpty) error("Build failed")
+ else error("Build failed with unfinished session(s): " + commas(unfinished))
+
override def toString: String = rc.toString
}
@@ -126,11 +131,16 @@
}
final def build_store(options: Options,
+ private_dir: Option[Path] = None,
build_cluster: Boolean = false,
cache: Rich_Text.Cache = Rich_Text.Cache.make()
): Store = {
val build_options = engine.build_options(options, build_cluster = build_cluster)
- val store = Store(build_options, build_cluster = build_cluster, cache = cache)
+ val store =
+ Store(build_options,
+ private_dir = private_dir,
+ build_cluster = build_cluster,
+ cache = cache)
Isabelle_System.make_directory(store.output_dir + Path.basic("log"))
Isabelle_Fonts.init()
store
@@ -162,6 +172,7 @@
def build(
options: Options,
+ private_dir: Option[Path] = None,
build_hosts: List[Build_Cluster.Host] = Nil,
selection: Sessions.Selection = Sessions.Selection.empty,
browser_info: Browser_Info.Config = Browser_Info.Config.none,
@@ -185,7 +196,11 @@
cache: Rich_Text.Cache = Rich_Text.Cache.make()
): Results = {
val engine = Engine(engine_name(options))
- val store = engine.build_store(options, build_cluster = build_hosts.nonEmpty, cache = cache)
+ val store =
+ engine.build_store(options,
+ private_dir = private_dir,
+ build_cluster = build_hosts.nonEmpty,
+ cache = cache)
val build_options = store.options
using(store.open_server()) { server =>
@@ -195,7 +210,7 @@
val full_sessions =
Sessions.load_structure(build_options, dirs = AFP.main_dirs(afp_root) ::: dirs,
select_dirs = select_dirs, infos = infos, augment_options = augment_options)
- val full_sessions_selection = full_sessions.imports_selection(selection)
+ val selected_sessions = full_sessions.imports_selection(selection)
val build_deps = {
val deps0 =
@@ -247,7 +262,7 @@
/* build process and results */
val clean_sessions =
- if (clean_build) full_sessions.imports_descendants(full_sessions_selection) else Nil
+ if (clean_build) full_sessions.imports_descendants(selected_sessions) else Nil
val numa_nodes = Host.numa_nodes(enabled = numa_shuffling)
val build_context =
@@ -261,7 +276,7 @@
val results = engine.run_build_process(build_context, progress, server)
if (export_files) {
- for (name <- full_sessions_selection.iterator if results(name).ok) {
+ for (name <- selected_sessions.iterator if results(name).ok) {
val info = results.info(name)
if (info.export_files.nonEmpty) {
progress.echo("Exporting " + info.name + " ...")
@@ -294,22 +309,33 @@
/* build logic image */
def build_logic(options: Options, logic: String,
+ private_dir: Option[Path] = None,
progress: Progress = new Progress,
build_heap: Boolean = false,
dirs: List[Path] = Nil,
fresh: Boolean = false,
strict: Boolean = false
- ): Int = {
+ ): Results = {
val selection = Sessions.Selection.session(logic)
- val rc =
- if (!fresh && build(options, selection = selection,
- build_heap = build_heap, no_build = true, dirs = dirs).ok) Process_Result.RC.ok
+
+ def test_build(): Results =
+ build(options, selection = selection,
+ build_heap = build_heap, no_build = true, dirs = dirs)
+
+ def full_build(): Results = {
+ progress.echo("Build started for Isabelle/" + logic + " ...")
+ build(options, selection = selection, progress = progress,
+ build_heap = build_heap, fresh_build = fresh, dirs = dirs)
+ }
+
+ val results =
+ if (fresh) full_build()
else {
- progress.echo("Build started for Isabelle/" + logic + " ...")
- build(options, selection = selection, progress = progress,
- build_heap = build_heap, fresh_build = fresh, dirs = dirs).rc
+ val results0 = test_build()
+ if (results0.ok) results0 else full_build()
}
- if (strict && rc != Process_Result.RC.ok) error("Failed to build Isabelle/" + logic) else rc
+
+ if (strict && !results.ok) error("Failed to build Isabelle/" + logic) else results
}
@@ -774,6 +800,22 @@
/* print messages */
+ def print_log_check(
+ pos: Position.T,
+ elem: XML.Elem,
+ message_head: List[Regex],
+ message_body: List[Regex]
+ ): Boolean = {
+ def check(filter: List[Regex], make_string: => String): Boolean =
+ filter.isEmpty || {
+ val s = Protocol_Message.clean_output(make_string)
+ filter.forall(r => r.findFirstIn(Protocol_Message.clean_output(s)).nonEmpty)
+ }
+
+ check(message_head, Protocol.message_heading(elem, pos)) &&
+ check(message_body, Pretty.unformatted_string_of(List(elem)))
+ }
+
def print_log(
options: Options,
sessions: List[String],
@@ -789,12 +831,6 @@
val session = Session.bootstrap(options)
val store = session.store
- def check(filter: List[Regex], make_string: => String): Boolean =
- filter.isEmpty || {
- val s = Protocol_Message.clean_output(make_string)
- filter.forall(r => r.findFirstIn(Protocol_Message.clean_output(s)).nonEmpty)
- }
-
def print(session_name: String): Unit = {
using(Export.open_session_context0(store, session_name)) { session_context =>
val result =
@@ -829,13 +865,11 @@
for (Text.Info(range, elem) <- messages) {
val line = line_document.position(range.start).line1
val pos = Position.Line_File(line, snapshot.node_name.node)
- def message_text: String =
- Protocol.message_text(elem, heading = true, pos = pos,
- margin = margin, breakgain = breakgain, metric = metric)
- val ok =
- check(message_head, Protocol.message_heading(elem, pos)) &&
- check(message_body, Pretty.unformatted_string_of(List(elem)))
- if (ok) buffer += message_text
+ if (print_log_check(pos, elem, message_head, message_body)) {
+ buffer +=
+ Protocol.message_text(elem, heading = true, pos = pos,
+ margin = margin, breakgain = breakgain, metric = metric)
+ }
}
if (buffer.nonEmpty) {
progress.echo(thy_heading)
--- a/src/Pure/Build/build_benchmark.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Build/build_benchmark.scala Sun Aug 10 23:35:19 2025 +0200
@@ -34,8 +34,7 @@
val options1 = options.string.update("build_engine", Build.Engine.Default.name)
val selection =
Sessions.Selection(requirements = true, sessions = List(benchmark_session(options)))
- val res = Build.build(options1, selection = selection, progress = progress, build_heap = true)
- if (!res.ok) error("Failed building requirements")
+ Build.build(options1, selection = selection, progress = progress, build_heap = true).check
}
def run_benchmark(options: Options, progress: Progress = new Progress): Unit = {
--- a/src/Pure/Build/export.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Build/export.scala Sun Aug 10 23:35:19 2025 +0200
@@ -296,7 +296,7 @@
def shutdown(close: Boolean = false): List[String] = {
consumer.shutdown()
if (close) db.close()
- errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil)
+ errors.value.reverse ::: (if (progress.stopped) List("Session export stopped") else Nil)
}
}
@@ -659,11 +659,11 @@
/* build */
if (!no_build) {
- val rc =
+ val results =
progress.interrupt_handler {
Build.build_logic(options, session_name, progress = progress, dirs = dirs)
}
- if (rc != Process_Result.RC.ok) sys.exit(rc)
+ if (!results.ok) sys.exit(results.rc)
}
--- a/src/Pure/Build/sessions.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Build/sessions.scala Sun Aug 10 23:35:19 2025 +0200
@@ -187,31 +187,23 @@
val other_name = info.name + "_requirements(" + ancestor.get + ")"
Isabelle_System.isabelle_tmp_prefix()
- (other_name,
- List(
- Info.make(
- Chapter_Defs.empty,
- Options.init0(),
- info.options,
- augment_options = _ => Nil,
- dir_selected = false,
- dir = Path.explode("$ISABELLE_TMP_PREFIX"),
- chapter = info.chapter,
- Session_Entry(
- pos = info.pos,
- name = other_name,
- groups = info.groups,
- path = ".",
- parent = ancestor,
- description = "Required theory imports from other sessions",
- options = Nil,
- imports = info.deps,
- directories = Nil,
- theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
- document_theories = Nil,
- document_files = Nil,
- export_files = Nil,
- export_classpath = Nil))))
+ val session_entry =
+ Session_Entry(
+ pos = info.pos,
+ name = other_name,
+ groups = info.groups,
+ parent = ancestor,
+ description = "Required theory imports from other sessions",
+ imports = info.deps,
+ theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))))
+
+ val session_info =
+ Info.make(session_entry,
+ dir = Path.explode("$ISABELLE_TMP_PREFIX"),
+ options = info.options,
+ chapter = info.chapter)
+
+ (other_name, List(session_info))
}
}
else (session, Nil)
@@ -619,19 +611,22 @@
object Info {
def make(
- chapter_defs: Chapter_Defs,
- options0: Options,
+ entry: Session_Entry,
+ dir: Path,
options: Options,
- augment_options: String => List[Options.Spec],
- dir_selected: Boolean,
- dir: Path,
- chapter: String,
- entry: Session_Entry
+ options0: Options = Options.init0(),
+ augment_options: String => List[Options.Spec] = _ => Nil,
+ chapter_defs: Chapter_Defs = Chapter_Defs.empty,
+ chapter: String = UNSORTED,
+ dir_selected: Boolean = false,
+ draft_session: Boolean = false
): Info = {
try {
- val name = entry.name
+ val name =
+ if (draft_session) DRAFT
+ else if (illegal_session(entry.name)) error("Illegal session name " + quote(entry.name))
+ else entry.name
- if (illegal_session(name)) error("Illegal session name " + quote(name))
if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session")
if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session")
@@ -854,8 +849,14 @@
case entry: Chapter_Entry => chapter = entry.name
case entry: Session_Entry =>
root_infos +=
- Info.make(chapter_defs, options0, options, augment_options,
- root.select, root.dir, chapter, entry)
+ Info.make(entry,
+ dir = root.dir,
+ options = options,
+ options0 = options0,
+ augment_options = augment_options,
+ chapter_defs = chapter_defs,
+ chapter = chapter,
+ dir_selected = root.select)
case _ =>
}
chapter = UNSORTED
@@ -1124,20 +1125,20 @@
) extends Entry
sealed case class Chapter_Entry(name: String) extends Entry
sealed case class Session_Entry(
- pos: Position.T,
- name: String,
- groups: List[String],
- path: String,
- parent: Option[String],
- description: String,
- options: List[Options.Spec],
- imports: List[String],
- directories: List[String],
- theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])],
- document_theories: List[(String, Position.T)],
- document_files: List[(String, String)],
- export_files: List[(String, Int, List[String])],
- export_classpath: List[String]
+ pos: Position.T = Position.none,
+ name: String = "",
+ groups: List[String] = Nil,
+ path: String = ".",
+ parent: Option[String] = None,
+ description: String = "",
+ options: List[Options.Spec] = Nil,
+ imports: List[String] = Nil,
+ directories: List[String] = Nil,
+ theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])] = Nil,
+ document_theories: List[(String, Position.T)] = Nil,
+ document_files: List[(String, String)] = Nil,
+ export_files: List[(String, Int, List[String])] = Nil,
+ export_classpath: List[String] = Nil
) extends Entry {
def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] =
theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) })
@@ -1225,7 +1226,9 @@
rep(export_files) ~
opt(export_classpath)))) ^^
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) =>
- Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l, m.getOrElse(Nil)) }
+ Session_Entry(pos = pos, name = a, groups = b, path = c, parent = d, description = e,
+ options = f, imports = g, directories = h, theories = i, document_theories = j,
+ document_files = k, export_files = l, export_classpath = m.getOrElse(Nil)) }
}
def parse_root(path: Path): List[Entry] = {
--- a/src/Pure/Build/store.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Build/store.scala Sun Aug 10 23:35:19 2025 +0200
@@ -13,9 +13,10 @@
object Store {
def apply(
options: Options,
+ private_dir: Option[Path] = None,
build_cluster: Boolean = false,
cache: Rich_Text.Cache = Rich_Text.Cache.make()
- ): Store = new Store(options, build_cluster, cache)
+ ): Store = new Store(options, private_dir, build_cluster, cache)
/* file names */
@@ -276,6 +277,7 @@
class Store private(
val options: Options,
+ private_dir: Option[Path],
val build_cluster: Boolean,
val cache: Rich_Text.Cache
) {
@@ -288,6 +290,9 @@
val ml_settings: ML_Settings = ML_Settings(options)
+ val private_output_dir: Option[Path] =
+ private_dir.map(dir => dir + Path.basic("heaps") + Path.basic(ml_settings.ml_identifier))
+
val system_output_dir: Path =
Path.variable("ISABELLE_HEAPS_SYSTEM") + Path.basic(ml_settings.ml_identifier)
@@ -324,18 +329,19 @@
def system_heaps: Boolean = options.bool("system_heaps")
val output_dir: Path =
- if (system_heaps) system_output_dir else user_output_dir
+ private_output_dir.getOrElse(if (system_heaps) system_output_dir else user_output_dir)
val input_dirs: List[Path] =
- if (system_heaps) List(system_output_dir)
- else List(user_output_dir, system_output_dir)
+ private_output_dir.toList :::
+ (if (system_heaps) List(system_output_dir) else List(user_output_dir, system_output_dir))
val clean_dirs: List[Path] =
- if (system_heaps) List(user_output_dir, system_output_dir)
- else List(user_output_dir)
+ private_output_dir.toList :::
+ (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir))
def presentation_dir: Path =
- if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
+ if (private_dir.isDefined) private_dir.get + Path.basic("browser_info")
+ else if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM")
else Path.explode("$ISABELLE_BROWSER_INFO")
--- a/src/Pure/Isar/proof_display.ML Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Isar/proof_display.ML Sun Aug 10 23:35:19 2025 +0200
@@ -301,7 +301,7 @@
Attrib.setup_config_bool \<^binding>\<open>show_results\<close>
(fn context =>
Config.get_generic context interactive orelse
- Options.default_bool \<^system_option>\<open>show_states\<close>);
+ Options.default_bool \<^system_option>\<open>show_results\<close>);
fun no_print int ctxt = not (Config.get (Config.put interactive int ctxt) show_results);
--- a/src/Pure/ML/ml_console.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/ML/ml_console.scala Sun Aug 10 23:35:19 2025 +0200
@@ -53,11 +53,11 @@
// build logic
if (!no_build && !raw_ml_system) {
val progress = new Console_Progress()
- val rc =
+ val results =
progress.interrupt_handler {
Build.build_logic(options, logic, build_heap = true, progress = progress, dirs = dirs)
}
- if (rc != Process_Result.RC.ok) sys.exit(rc)
+ if (!results.ok) sys.exit(results.rc)
}
val session_background =
--- a/src/Pure/System/isabelle_tool.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/System/isabelle_tool.scala Sun Aug 10 23:35:19 2025 +0200
@@ -150,6 +150,7 @@
Phabricator.isabelle_tool2,
Phabricator.isabelle_tool3,
Phabricator.isabelle_tool4,
+ Process_Theories.isabelle_tool,
Profiling.isabelle_tool,
Profiling_Report.isabelle_tool,
Scala_Project.isabelle_tool,
--- a/src/Pure/Thy/document_build.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Thy/document_build.scala Sun Aug 10 23:35:19 2025 +0200
@@ -597,8 +597,7 @@
progress.interrupt_handler {
val build_results =
Build.build(options, selection = Sessions.Selection.session(session),
- dirs = dirs, progress = progress)
- if (!build_results.ok) error("Failed to build session " + quote(session))
+ dirs = dirs, progress = progress).check
if (output_sources.isEmpty && output_pdf.isEmpty) {
progress.echo_warning("No output directory")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Tools/process_theories.scala Sun Aug 10 23:35:19 2025 +0200
@@ -0,0 +1,186 @@
+/* Title: Pure/Tools/process_theories.scala
+ Author: Makarius
+
+Process theories within an adhoc session context.
+*/
+
+package isabelle
+
+
+import java.io.{File => JFile}
+
+import scala.collection.mutable
+import scala.util.matching.Regex
+
+
+object Process_Theories {
+ /** process theories **/
+
+ def read_files(path: Path): List[Path] =
+ Library.trim_split_lines(File.read(path)).map(Path.explode)
+
+ def process_theories(
+ options: Options,
+ logic: String,
+ theories: List[String] = Nil,
+ files: List[Path] = Nil,
+ dirs: List[Path] = Nil,
+ output_messages: Boolean = false,
+ message_head: List[Regex] = Nil,
+ message_body: List[Regex] = Nil,
+ margin: Double = Pretty.default_margin,
+ breakgain: Double = Pretty.default_breakgain,
+ metric: Pretty.Metric = Symbol.Metric,
+ progress: Progress = new Progress
+ ): Build.Results = {
+ Isabelle_System.with_tmp_dir("private") { private_dir =>
+ /* options */
+
+ val build_engine = Build.Engine(Build.engine_name(options))
+ val build_options = build_engine.build_options(options)
+
+
+ /* session directory and files */
+
+ val private_prefix = private_dir.implode + "/"
+
+ val session_name = Sessions.DRAFT
+ val session_dir = Isabelle_System.make_directory(private_dir + Path.basic(session_name))
+
+ {
+ var seen = Set.empty[JFile]
+ for (path0 <- files) {
+ val path = path0.canonical
+ val file = path.file
+ if (!seen(file)) {
+ seen += file
+ val target = session_dir + path.base
+ if (target.is_file) {
+ error("Duplicate session source file " + path.base + " --- from " + path)
+ }
+ Isabelle_System.copy_file(path, target)
+ }
+ }
+ }
+
+ /* session theories */
+
+ val more_theories =
+ for (path <- files; name <- Thy_Header.get_thy_name(path.implode))
+ yield name
+
+ val session_theories = theories ::: more_theories
+
+
+ /* session imports */
+
+ val sessions_structure = Sessions.load_structure(build_options, dirs = dirs)
+
+ val session_imports =
+ Set.from(
+ for {
+ name <- session_theories.iterator
+ session = sessions_structure.theory_qualifier(name)
+ if session.nonEmpty
+ } yield session).toList
+
+
+ /* build adhoc session */
+
+ val session_entry =
+ Sessions.Session_Entry(
+ parent = Some(logic),
+ theories = session_theories.map(a => (Nil, List(((a, Position.none), false)))),
+ imports = session_imports)
+
+ val session_info =
+ Sessions.Info.make(session_entry, draft_session = true,
+ dir = session_dir, options = options)
+
+ def session_setup(setup_session_name: String, session: Session): Unit = {
+ if (output_messages && setup_session_name == session_name) {
+ session.all_messages += Session.Consumer[Prover.Message]("process_theories") {
+ case output: Prover.Output
+ if Protocol.is_exported(output.message) || Protocol.is_state(output.message) =>
+ output.properties match {
+ case Position.Line_File(line, file0) =>
+ val file = Library.perhaps_unprefix(private_prefix, file0)
+ val pos = Position.Line_File(line, file)
+ if (Build.print_log_check(pos, output.message, message_head, message_body)) {
+ progress.echo(Protocol.message_text(output.message, heading = true, pos = pos,
+ margin = margin, breakgain = breakgain, metric = metric))
+ }
+ case _ =>
+ }
+ case _ =>
+ }
+ }
+ }
+
+ Build.build(options, private_dir = Some(private_dir), progress = progress,
+ infos = List(session_info), selection = Sessions.Selection.session(session_name),
+ session_setup = session_setup)
+ }
+ }
+
+
+
+ /** Isabelle tool wrapper **/
+
+ val isabelle_tool = Isabelle_Tool("process_theories",
+ "process theories within an adhoc session context",
+ Scala_Project.here,
+ { args =>
+ val message_head = new mutable.ListBuffer[Regex]
+ val message_body = new mutable.ListBuffer[Regex]
+ var output_messages = false
+ val dirs = new mutable.ListBuffer[Path]
+ val files = new mutable.ListBuffer[Path]
+ var logic = Isabelle_System.getenv("ISABELLE_LOGIC")
+ var margin = Pretty.default_margin
+ var options = Options.init()
+ var verbose = false
+
+
+ val getopts = Getopts("""
+Usage: isabelle process_theories [OPTIONS] [THEORIES...]
+
+ Options are:
+ -F FILE include addition session files, listed in FILE
+ -H REGEX filter messages by matching against head
+ -M REGEX filter messages by matching against body
+ -O output messages
+ -d DIR include session directory
+ -f FILE include addition session files
+ -l NAME logic session name (default ISABELLE_LOGIC=""" + quote(logic) + """)
+ -m MARGIN margin for pretty printing (default: """ + margin + """)
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -v verbose
+
+ Process theories within an adhoc session context.
+""",
+ "F:" -> (arg => files ++= read_files(Path.explode(arg))),
+ "H:" -> (arg => message_head += arg.r),
+ "M:" -> (arg => message_body += arg.r),
+ "O" -> (_ => output_messages = true),
+ "d:" -> (arg => dirs += Path.explode(arg)),
+ "f:" -> (arg => files += Path.explode(arg)),
+ "l:" -> (arg => logic = arg),
+ "m:" -> (arg => margin = Value.Double.parse(arg)),
+ "o:" -> (arg => options = options + arg),
+ "v" -> (_ => verbose = true))
+
+ val theories = getopts(args)
+
+ val progress = new Console_Progress(verbose = verbose)
+
+ val results =
+ progress.interrupt_handler {
+ process_theories(options, logic, theories, files = files.toList, dirs = dirs.toList,
+ output_messages = output_messages, message_head = message_head.toList,
+ message_body = message_body.toList, margin = margin, progress = progress)
+ }
+
+ sys.exit(results.rc)
+ })
+}
--- a/src/Pure/Tools/profiling.scala Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Pure/Tools/profiling.scala Sun Aug 10 23:35:19 2025 +0200
@@ -71,25 +71,21 @@
def make(
store: Store,
- session_background: Sessions.Background,
+ session_base: Sessions.Base,
+ dirs: List[Path] = Nil,
parent: Option[Statistics] = None
): Statistics = {
- val session_base = session_background.base
val session_name = session_base.session_name
- val session = {
- val args = session_base.used_theories.map(p => p._1.theory)
- val eval_args =
- List("--eval", "use_thy " + ML_Syntax.print_string_bytes("~~/src/Tools/Profiling"))
+ val session =
Isabelle_System.with_tmp_dir("profiling") { dir =>
- val putenv = List("ISABELLE_PROFILING" -> dir.implode)
- File.write(dir + Path.explode("args.yxml"), YXML.string_of_body(encode_args(args)))
- val session_heaps = store.session_heaps(session_background, logic = session_name)
- ML_Process(store.options, session_background, session_heaps, args = eval_args,
- env = Isabelle_System.Settings.env(putenv)).result().check
+ File.write(dir + Path.explode("args.yxml"),
+ YXML.string_of_body(encode_args(session_base.used_theories.map(p => p._1.theory))))
+ val ml_options = store.options + Options.Spec("profiling_dir", Some(dir.implode))
+ Process_Theories.process_theories(ml_options, session_name,
+ dirs = dirs, files = List(Path.explode("~~/src/Tools/Profiling.thy"))).check
decode_result(YXML.parse_body(Bytes.read(dir + Path.explode("result.yxml"))))
}
- }
new Statistics(parent = parent, session = session_name,
theories = session.theories,
@@ -248,12 +244,9 @@
build_heap: Boolean = false,
clean_build: Boolean = false
): Build.Results = {
- val results =
- Build.build(build_options, progress = progress,
- selection = selection, build_heap = build_heap, clean_build = clean_build,
- dirs = sessions_dirs, numa_shuffling = numa_shuffling, max_jobs = max_jobs)
-
- if (results.ok) results else error("Build failed")
+ Build.build(build_options, progress = progress,
+ selection = selection, build_heap = build_heap, clean_build = clean_build,
+ dirs = sessions_dirs, numa_shuffling = numa_shuffling, max_jobs = max_jobs).check
}
@@ -283,8 +276,8 @@
parent_stats <- seen.get(parent_name)
} yield parent_stats
val stats =
- Statistics.make(store,
- build_results.deps.background(session_name),
+ Statistics.make(store, build_results.deps(session_name),
+ dirs = sessions_dirs,
parent = parent)
seen += (session_name -> stats)
stats
--- a/src/Tools/Profiling.thy Fri Aug 08 16:46:03 2025 +0100
+++ b/src/Tools/Profiling.thy Sun Aug 10 23:35:19 2025 +0200
@@ -1,4 +1,4 @@
-(* Title: Tools/profiling.ML
+(* Title: Tools/profiling.thy
Author: Makarius
Session profiling based on loaded ML image.
@@ -8,7 +8,195 @@
imports Pure
begin
-ML_file "profiling.ML"
+ML \<open>
+signature PROFILING =
+sig
+ val locales: theory -> string list
+ val locale_thms: theory -> string -> thm list
+ val locales_thms: theory -> thm list
+ val global_thms: theory -> thm list
+ val all_thms: theory -> thm list
+ type session_statistics =
+ {theories: int,
+ garbage_theories: int,
+ locales: int,
+ locale_thms: int,
+ global_thms: int,
+ sizeof_thys_id: int,
+ sizeof_thms_id: int,
+ sizeof_terms: int,
+ sizeof_types: int,
+ sizeof_names: int,
+ sizeof_spaces: int}
+ val session_statistics: string list -> session_statistics
+ val main: unit -> unit
+end;
+
+structure Profiling: PROFILING =
+struct
+
+(* theory content *)
+
+fun locales thy =
+ let
+ val parent_spaces = map Locale.locale_space (Theory.parents_of thy);
+ fun add name =
+ if exists (fn space => Name_Space.declared space name) parent_spaces then I
+ else cons name;
+ in fold add (Locale.get_locales thy) [] end;
+
+fun proper_thm thm = not (Thm.eq_thm_prop (Drule.dummy_thm, thm));
+
+fun locale_thms thy loc =
+ (Locale.locale_notes thy loc, []) |->
+ (fold (#2 #> fold (#2 #> (fold (#1 #> fold (fn thm => proper_thm thm ? cons thm ))))));
+
+fun locales_thms thy =
+ maps (locale_thms thy) (locales thy);
+
+fun global_thms thy =
+ Facts.dest_static true
+ (map Global_Theory.facts_of (Theory.parents_of thy)) (Global_Theory.facts_of thy)
+ |> maps #2;
+
+fun all_thms thy = locales_thms thy @ global_thms thy;
+
+
+(* session content *)
+
+fun session_content thys =
+ let
+ val thms = maps all_thms thys;
+ val thys_id = map Context.theory_id thys;
+ val thms_id = map Thm.theory_id thms;
+ val terms =
+ Termset.dest ((fold o Thm.fold_terms {hyps = true}) Termset.insert thms Termset.empty);
+ val types = Typset.dest ((fold o fold_types) Typset.insert terms Typset.empty);
+ val spaces =
+ maps (fn f => map f thys)
+ [Sign.class_space,
+ Sign.type_space,
+ Sign.const_space,
+ Theory.axiom_space,
+ Thm.oracle_space,
+ Global_Theory.fact_space,
+ Locale.locale_space,
+ Attrib.attribute_space o Context.Theory,
+ Method.method_space o Context.Theory];
+ val names = Symset.dest (Symset.merges (map (Symset.make o Name_Space.get_names) spaces));
+ in
+ {thys_id = thys_id, thms_id = thms_id, terms = terms,
+ types = types, names = names, spaces = spaces}
+ end;
+
+fun sizeof1_diff (a, b) = ML_Heap.sizeof1 a - ML_Heap.sizeof1 b;
+fun sizeof_list_diff (a, b) = ML_Heap.sizeof_list a - ML_Heap.sizeof_list b;
+fun sizeof_diff (a, b) = ML_Heap.sizeof a - ML_Heap.sizeof b;
+
+
+(* session statistics *)
+
+type session_statistics =
+ {theories: int,
+ garbage_theories: int,
+ locales: int,
+ locale_thms: int,
+ global_thms: int,
+ sizeof_thys_id: int,
+ sizeof_thms_id: int,
+ sizeof_terms: int,
+ sizeof_types: int,
+ sizeof_names: int,
+ sizeof_spaces: int};
+
+fun session_statistics theories : session_statistics =
+ let
+ val theories_member = Symtab.defined (Symtab.make_set theories) o Context.theory_long_name;
+
+ val context_thys =
+ #contexts (Context.finish_tracing ())
+ |> map_filter (fn (Context.Theory thy, _) => SOME thy | _ => NONE);
+
+ val loader_thys = map Thy_Info.get_theory (Thy_Info.get_names ());
+ val loaded_thys = filter theories_member loader_thys;
+ val loaded_context_thys = filter theories_member context_thys;
+
+ val all_thys = loader_thys @ context_thys;
+ val base_thys = filter_out theories_member all_thys;
+
+ val {thys_id = all_thys_id, thms_id = all_thms_id, terms = all_terms,
+ types = all_types, names = all_names, spaces = all_spaces} = session_content all_thys;
+ val {thys_id = base_thys_id, thms_id = base_thms_id, terms = base_terms,
+ types = base_types, names = base_names, spaces = base_spaces} = session_content base_thys;
+
+ val n = length loaded_thys;
+ val m = (case length loaded_context_thys of 0 => 0 | l => l - n);
+ in
+ {theories = n,
+ garbage_theories = m,
+ locales = Integer.sum (map (length o locales) loaded_thys),
+ locale_thms = Integer.sum (map (length o locales_thms) loaded_thys),
+ global_thms = Integer.sum (map (length o global_thms) loaded_thys),
+ sizeof_thys_id =
+ sizeof1_diff ((all_thys_id, all_thms_id), (base_thys_id, all_thms_id)) -
+ sizeof_list_diff (all_thys_id, base_thys_id),
+ sizeof_thms_id =
+ sizeof1_diff ((all_thms_id, all_thys_id), (base_thms_id, all_thys_id)) -
+ sizeof_list_diff (all_thms_id, base_thms_id),
+ sizeof_terms =
+ sizeof1_diff ((all_terms, all_types, all_names), (base_terms, all_types, all_names)) -
+ sizeof_list_diff (all_terms, base_terms),
+ sizeof_types =
+ sizeof1_diff ((all_types, all_names), (base_types, all_names)) -
+ sizeof_list_diff (all_types, base_types),
+ sizeof_spaces =
+ sizeof1_diff ((all_spaces, all_names), (base_spaces, all_names)) -
+ sizeof_list_diff (all_spaces, base_spaces),
+ sizeof_names = sizeof_diff (all_names, base_names)}
+ end;
+
+
+(* main entry point for external process *)
+
+local
+
+val decode_args : string list XML.Decode.T =
+ let open XML.Decode in list string end;
+
+val encode_result : session_statistics XML.Encode.T =
+ (fn {theories = a,
+ garbage_theories = b,
+ locales = c,
+ locale_thms = d,
+ global_thms = e,
+ sizeof_thys_id = f,
+ sizeof_thms_id = g,
+ sizeof_terms = h,
+ sizeof_types = i,
+ sizeof_names = j,
+ sizeof_spaces = k} => ((a, (b, (c, (d, (e, (f, (g, (h, (i, (j, k)))))))))))) #>
+ let open XML.Encode in
+ pair int (pair int (pair int (pair int (pair int (pair int (pair int
+ (pair int (pair int (pair int int)))))))))
+ end;
+
+in
+
+fun main () =
+ (case Options.default_string \<^system_option>\<open>profiling_dir\<close> of
+ "" => ()
+ | dir_name =>
+ let
+ val dir = Path.explode dir_name;
+ val args = decode_args (YXML.parse_body (File.read (dir + \<^path>\<open>args.yxml\<close>)));
+ val result = session_statistics args;
+ in File.write (dir + \<^path>\<open>result.yxml\<close>) (YXML.string_of_body (encode_result result)) end);
+
+end;
+
+end;
+\<close>
+
ML_command \<open>Profiling.main ()\<close>
end
--- a/src/Tools/profiling.ML Fri Aug 08 16:46:03 2025 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,192 +0,0 @@
-(* Title: Tools/profiling.ML
- Author: Makarius
-
-Session profiling based on loaded ML image.
-*)
-
-signature PROFILING =
-sig
- val locales: theory -> string list
- val locale_thms: theory -> string -> thm list
- val locales_thms: theory -> thm list
- val global_thms: theory -> thm list
- val all_thms: theory -> thm list
- type session_statistics =
- {theories: int,
- garbage_theories: int,
- locales: int,
- locale_thms: int,
- global_thms: int,
- sizeof_thys_id: int,
- sizeof_thms_id: int,
- sizeof_terms: int,
- sizeof_types: int,
- sizeof_names: int,
- sizeof_spaces: int}
- val session_statistics: string list -> session_statistics
- val main: unit -> unit
-end;
-
-structure Profiling: PROFILING =
-struct
-
-(* theory content *)
-
-fun locales thy =
- let
- val parent_spaces = map Locale.locale_space (Theory.parents_of thy);
- fun add name =
- if exists (fn space => Name_Space.declared space name) parent_spaces then I
- else cons name;
- in fold add (Locale.get_locales thy) [] end;
-
-fun proper_thm thm = not (Thm.eq_thm_prop (Drule.dummy_thm, thm));
-
-fun locale_thms thy loc =
- (Locale.locale_notes thy loc, []) |->
- (fold (#2 #> fold (#2 #> (fold (#1 #> fold (fn thm => proper_thm thm ? cons thm ))))));
-
-fun locales_thms thy =
- maps (locale_thms thy) (locales thy);
-
-fun global_thms thy =
- Facts.dest_static true
- (map Global_Theory.facts_of (Theory.parents_of thy)) (Global_Theory.facts_of thy)
- |> maps #2;
-
-fun all_thms thy = locales_thms thy @ global_thms thy;
-
-
-(* session content *)
-
-fun session_content thys =
- let
- val thms = maps all_thms thys;
- val thys_id = map Context.theory_id thys;
- val thms_id = map Thm.theory_id thms;
- val terms =
- Termset.dest ((fold o Thm.fold_terms {hyps = true}) Termset.insert thms Termset.empty);
- val types = Typset.dest ((fold o fold_types) Typset.insert terms Typset.empty);
- val spaces =
- maps (fn f => map f thys)
- [Sign.class_space,
- Sign.type_space,
- Sign.const_space,
- Theory.axiom_space,
- Thm.oracle_space,
- Global_Theory.fact_space,
- Locale.locale_space,
- Attrib.attribute_space o Context.Theory,
- Method.method_space o Context.Theory];
- val names = Symset.dest (Symset.merges (map (Symset.make o Name_Space.get_names) spaces));
- in
- {thys_id = thys_id, thms_id = thms_id, terms = terms,
- types = types, names = names, spaces = spaces}
- end;
-
-fun sizeof1_diff (a, b) = ML_Heap.sizeof1 a - ML_Heap.sizeof1 b;
-fun sizeof_list_diff (a, b) = ML_Heap.sizeof_list a - ML_Heap.sizeof_list b;
-fun sizeof_diff (a, b) = ML_Heap.sizeof a - ML_Heap.sizeof b;
-
-
-(* session statistics *)
-
-type session_statistics =
- {theories: int,
- garbage_theories: int,
- locales: int,
- locale_thms: int,
- global_thms: int,
- sizeof_thys_id: int,
- sizeof_thms_id: int,
- sizeof_terms: int,
- sizeof_types: int,
- sizeof_names: int,
- sizeof_spaces: int};
-
-fun session_statistics theories : session_statistics =
- let
- val theories_member = Symtab.defined (Symtab.make_set theories) o Context.theory_long_name;
-
- val context_thys =
- #contexts (Context.finish_tracing ())
- |> map_filter (fn (Context.Theory thy, _) => SOME thy | _ => NONE);
-
- val loader_thys = map Thy_Info.get_theory (Thy_Info.get_names ());
- val loaded_thys = filter theories_member loader_thys;
- val loaded_context_thys = filter theories_member context_thys;
-
- val all_thys = loader_thys @ context_thys;
- val base_thys = filter_out theories_member all_thys;
-
- val {thys_id = all_thys_id, thms_id = all_thms_id, terms = all_terms,
- types = all_types, names = all_names, spaces = all_spaces} = session_content all_thys;
- val {thys_id = base_thys_id, thms_id = base_thms_id, terms = base_terms,
- types = base_types, names = base_names, spaces = base_spaces} = session_content base_thys;
-
- val n = length loaded_thys;
- val m = (case length loaded_context_thys of 0 => 0 | l => l - n);
- in
- {theories = n,
- garbage_theories = m,
- locales = Integer.sum (map (length o locales) loaded_thys),
- locale_thms = Integer.sum (map (length o locales_thms) loaded_thys),
- global_thms = Integer.sum (map (length o global_thms) loaded_thys),
- sizeof_thys_id =
- sizeof1_diff ((all_thys_id, all_thms_id), (base_thys_id, all_thms_id)) -
- sizeof_list_diff (all_thys_id, base_thys_id),
- sizeof_thms_id =
- sizeof1_diff ((all_thms_id, all_thys_id), (base_thms_id, all_thys_id)) -
- sizeof_list_diff (all_thms_id, base_thms_id),
- sizeof_terms =
- sizeof1_diff ((all_terms, all_types, all_names), (base_terms, all_types, all_names)) -
- sizeof_list_diff (all_terms, base_terms),
- sizeof_types =
- sizeof1_diff ((all_types, all_names), (base_types, all_names)) -
- sizeof_list_diff (all_types, base_types),
- sizeof_spaces =
- sizeof1_diff ((all_spaces, all_names), (base_spaces, all_names)) -
- sizeof_list_diff (all_spaces, base_spaces),
- sizeof_names = sizeof_diff (all_names, base_names)}
- end;
-
-
-(* main entry point for external process *)
-
-local
-
-val decode_args : string list XML.Decode.T =
- let open XML.Decode in list string end;
-
-val encode_result : session_statistics XML.Encode.T =
- (fn {theories = a,
- garbage_theories = b,
- locales = c,
- locale_thms = d,
- global_thms = e,
- sizeof_thys_id = f,
- sizeof_thms_id = g,
- sizeof_terms = h,
- sizeof_types = i,
- sizeof_names = j,
- sizeof_spaces = k} => ((a, (b, (c, (d, (e, (f, (g, (h, (i, (j, k)))))))))))) #>
- let open XML.Encode in
- pair int (pair int (pair int (pair int (pair int (pair int (pair int
- (pair int (pair int (pair int int)))))))))
- end;
-
-in
-
-fun main () =
- (case getenv "ISABELLE_PROFILING" of
- "" => ()
- | dir_name =>
- let
- val dir = Path.explode dir_name;
- val args = decode_args (YXML.parse_body (File.read (dir + \<^path>\<open>args.yxml\<close>)));
- val result = session_statistics args;
- in File.write (dir + \<^path>\<open>result.yxml\<close>) (YXML.string_of_body (encode_result result)) end);
-
-end;
-
-end;