--- a/NEWS Tue May 08 15:06:19 2018 +0200
+++ b/NEWS Tue May 08 20:26:40 2018 +0200
@@ -327,8 +327,17 @@
INCOMPATIBILITY.
+*** ML ***
+
+* Operation Export.export emits theory exports (arbitrary blobs), which
+are stored persistently in the session build database.
+
+
*** System ***
+* The command-line tool retrieves theory exports from the session build
+database.
+
* The command-line tools "isabelle server" and "isabelle client" provide
access to the Isabelle Server: it supports responsive session management
and concurrent use of theories, based on Isabelle/PIDE infrastructure.
--- a/src/Doc/System/Sessions.thy Tue May 08 15:06:19 2018 +0200
+++ b/src/Doc/System/Sessions.thy Tue May 08 20:26:40 2018 +0200
@@ -545,4 +545,50 @@
@{verbatim [display] \<open>isabelle imports -U -i -D 'some/where/My_Project'\<close>}
\<close>
+
+section \<open>Retrieve theory exports\<close>
+
+text \<open>
+ The @{tool_def "export"} tool retrieves theory exports from the session
+ database. Its command-line usage is: @{verbatim [display]
+\<open>Usage: isabelle export [OPTIONS] SESSION
+
+ Options are:
+ -D DIR target directory for exported files (default: "export")
+ -d DIR include session directory
+ -l list exports
+ -n no build of session
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -s system build mode for session image
+ -x PATTERN extract files matching pattern (e.g. "*:**" for all)
+
+ List or export theory exports for SESSION: named blobs produced by
+ isabelle build. Option -l or -x is required.
+
+ The PATTERN language resembles glob patterns in the shell, with ? and *
+ (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc],
+ and variants {pattern1,pattern2,pattern3}.\<close>}
+
+ \<^medskip>
+ The specified session is updated via @{tool build}
+ (\secref{sec:tool-build}), with the same options \<^verbatim>\<open>-d\<close>, \<^verbatim>\<open>-o\<close>, \<^verbatim>\<open>-s\<close>. The
+ option \<^verbatim>\<open>-n\<close> suppresses the implicit build process: it means that a
+ potentially outdated session database is used!
+
+ \<^medskip>
+ Option \<^verbatim>\<open>-l\<close> lists all stored exports, with compound names
+ \<open>theory\<close>\<^verbatim>\<open>:\<close>\<open>name\<close>.
+
+ \<^medskip>
+ Option \<^verbatim>\<open>-x\<close> extracts stored exports whose compound name matches the given
+ pattern. Note that wild cards ``\<^verbatim>\<open>?\<close>'' and ``\<^verbatim>\<open>*\<close>'' do not match the
+ separators ``\<^verbatim>\<open>:\<close>'' and ``\<^verbatim>\<open>/\<close>''; the wild card \<^verbatim>\<open>**\<close> matches over directory
+ name hierarchies separated by ``\<^verbatim>\<open>/\<close>''. Thus the pattern ``\<^verbatim>\<open>*:**\<close>'' matches
+ \<^emph>\<open>all\<close> theory exports.
+
+ Option \<^verbatim>\<open>-D\<close> specifies an alternative base directory for option \<^verbatim>\<open>-x\<close>: the
+ default is \<^verbatim>\<open>export\<close> within the current directory. Each theory creates its
+ own sub-directory hierarchy, using the session-qualified theory name.
+\<close>
+
end
--- a/src/Pure/PIDE/command.scala Tue May 08 15:06:19 2018 +0200
+++ b/src/Pure/PIDE/command.scala Tue May 08 20:26:40 2018 +0200
@@ -291,8 +291,9 @@
private def add_result(entry: Results.Entry): State =
copy(results = results + entry)
- def add_export(entry: Exports.Entry): State =
- copy(exports = exports + entry)
+ def add_export(entry: Exports.Entry): Option[State] =
+ if (command.node_name.theory == entry._2.theory_name) Some(copy(exports = exports + entry))
+ else None
private def add_markup(
status: Boolean, chunk_name: Symbol.Text_Chunk.Name, m: Text.Markup): State =
--- a/src/Pure/PIDE/document.scala Tue May 08 15:06:19 2018 +0200
+++ b/src/Pure/PIDE/document.scala Tue May 08 20:26:40 2018 +0200
@@ -725,13 +725,17 @@
{
execs.get(id) match {
case Some(st) =>
- val new_st = st.add_export(entry)
- (new_st, copy(execs = execs + (id -> new_st)))
+ st.add_export(entry) match {
+ case Some(new_st) => (new_st, copy(execs = execs + (id -> new_st)))
+ case None => fail
+ }
case None =>
commands.get(id) match {
case Some(st) =>
- val new_st = st.add_export(entry)
- (new_st, copy(commands = commands + (id -> new_st)))
+ st.add_export(entry) match {
+ case Some(new_st) => (new_st, copy(commands = commands + (id -> new_st)))
+ case None => fail
+ }
case None => fail
}
}
--- a/src/Pure/System/isabelle_tool.scala Tue May 08 15:06:19 2018 +0200
+++ b/src/Pure/System/isabelle_tool.scala Tue May 08 20:26:40 2018 +0200
@@ -109,6 +109,7 @@
Build_Status.isabelle_tool,
Check_Sources.isabelle_tool,
Doc.isabelle_tool,
+ Export.isabelle_tool,
Imports.isabelle_tool,
Mkroot.isabelle_tool,
ML_Process.isabelle_tool,
--- a/src/Pure/Thy/export.ML Tue May 08 15:06:19 2018 +0200
+++ b/src/Pure/Thy/export.ML Tue May 08 20:26:40 2018 +0200
@@ -7,7 +7,7 @@
signature EXPORT =
sig
val export: theory -> string -> string -> unit
- val export_raw: theory -> string -> string list -> unit
+ val export_raw: theory -> string -> string -> unit
end;
structure Export: EXPORT =
@@ -31,9 +31,9 @@
serial = serial (),
theory_name = Context.theory_long_name thy,
name = check_name name,
- compress = compress} body;
+ compress = compress} [body];
-fun export thy name body = gen_export (size body > 60) thy name [body];
-fun export_raw thy name body = gen_export false thy name body;
+fun export thy name body = gen_export (size body > 60) thy name body;
+val export_raw = gen_export false;
end;
--- a/src/Pure/Thy/export.scala Tue May 08 15:06:19 2018 +0200
+++ b/src/Pure/Thy/export.scala Tue May 08 20:26:40 2018 +0200
@@ -6,6 +6,11 @@
package isabelle
+
+import scala.annotation.tailrec
+import scala.util.matching.Regex
+
+
object Export
{
/* SQL data model */
@@ -21,9 +26,17 @@
val table =
SQL.Table("isabelle_exports", List(session_name, theory_name, name, compressed, body))
- def where_equal(session_name: String, theory_name: String): SQL.Source =
+ def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source =
"WHERE " + Data.session_name.equal(session_name) +
- " AND " + Data.theory_name.equal(theory_name)
+ (if (theory_name == "") "" else " AND " + Data.theory_name.equal(theory_name)) +
+ (if (name == "") "" else " AND " + Data.name.equal(name))
+ }
+
+ def read_name(db: SQL.Database, session_name: String, theory_name: String, name: String): Boolean =
+ {
+ val select =
+ Data.table.select(List(Data.name), Data.where_equal(session_name, theory_name, name))
+ db.using_statement(select)(stmt => stmt.execute_query().next())
}
def read_names(db: SQL.Database, session_name: String, theory_name: String): List[String] =
@@ -33,9 +46,19 @@
stmt.execute_query().iterator(res => res.string(Data.name)).toList)
}
+ def read_theory_names(db: SQL.Database, session_name: String): List[(String, String)] =
+ {
+ val select = Data.table.select(List(Data.theory_name, Data.name), Data.where_equal(session_name))
+ db.using_statement(select)(stmt =>
+ stmt.execute_query().iterator(res =>
+ (res.string(Data.theory_name), res.string(Data.name))).toList)
+ }
+
def message(msg: String, theory_name: String, name: String): String =
msg + " " + quote(name) + " for theory " + quote(theory_name)
+ def compound_name(a: String, b: String): String = a + ":" + b
+
sealed case class Entry(
session_name: String,
theory_name: String,
@@ -43,7 +66,7 @@
compressed: Boolean,
body: Future[Bytes])
{
- override def toString: String = theory_name + ":" + name
+ override def toString: String = compound_name(theory_name, name)
def write(db: SQL.Database)
{
@@ -58,6 +81,27 @@
stmt.execute()
})
}
+
+ def body_uncompressed: Bytes =
+ if (compressed) body.join.uncompress() else body.join
+ }
+
+ def make_regex(pattern: String): Regex =
+ {
+ @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex =
+ chs match {
+ case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest)
+ case '*' :: rest => make("[^:/]*" :: result, depth, rest)
+ case '?' :: rest => make("[^:/]" :: result, depth, rest)
+ case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest)
+ case '{' :: rest => make("(" :: result, depth + 1, rest)
+ case ',' :: rest if depth > 0 => make("|" :: result, depth, rest)
+ case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest)
+ case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest)
+ case c :: rest => make(c.toString :: result, depth, rest)
+ case Nil => result.reverse.mkString.r
+ }
+ make(Nil, 0, pattern.toList)
}
def make_entry(session_name: String, args: Markup.Export.Args, body: Bytes): Entry =
@@ -70,7 +114,7 @@
{
val select =
Data.table.select(List(Data.compressed, Data.body),
- Data.where_equal(session_name, theory_name) + " AND " + Data.name.equal(name))
+ Data.where_equal(session_name, theory_name, name))
db.using_statement(select)(stmt =>
{
val res = stmt.execute_query()
@@ -99,7 +143,7 @@
{
entry.body.join
db.transaction {
- if (read_names(db, entry.session_name, entry.theory_name).contains(entry.name)) {
+ if (read_name(db, entry.session_name, entry.theory_name, entry.name)) {
val err = message("Duplicate export", entry.theory_name, entry.name)
export_errors.change(errs => err :: errs)
}
@@ -118,4 +162,113 @@
export_errors.value.reverse
}
}
+
+
+ /* Isabelle tool wrapper */
+
+ val default_export_dir = Path.explode("export")
+
+ val isabelle_tool = Isabelle_Tool("export", "retrieve theory exports", args =>
+ {
+ /* arguments */
+
+ var export_dir = default_export_dir
+ var dirs: List[Path] = Nil
+ var export_list = false
+ var no_build = false
+ var options = Options.init()
+ var system_mode = false
+ var export_pattern = ""
+
+ val getopts = Getopts("""
+Usage: isabelle export [OPTIONS] SESSION
+
+ Options are:
+ -D DIR target directory for exported files (default: """ + default_export_dir + """)
+ -d DIR include session directory
+ -l list exports
+ -n no build of session
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -s system build mode for session image
+ -x PATTERN extract files matching pattern (e.g. "*:**" for all)
+
+ List or export theory exports for SESSION: named blobs produced by
+ isabelle build. Option -l or -x is required.
+
+ The PATTERN language resembles glob patterns in the shell, with ? and *
+ (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc],
+ and variants {pattern1,pattern2,pattern3}.
+""",
+ "D:" -> (arg => export_dir = Path.explode(arg)),
+ "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))),
+ "l" -> (_ => export_list = true),
+ "n" -> (_ => no_build = true),
+ "o:" -> (arg => options = options + arg),
+ "s" -> (_ => system_mode = true),
+ "x:" -> (arg => export_pattern = arg))
+
+ val more_args = getopts(args)
+ val session_name =
+ more_args match {
+ case List(session_name) if export_list || export_pattern != "" => session_name
+ case _ => getopts.usage()
+ }
+
+
+ /* build */
+
+ val progress = new Console_Progress()
+
+ if (!no_build &&
+ !Build.build(options, no_build = true, dirs = dirs, system_mode = system_mode,
+ sessions = List(session_name)).ok)
+ {
+ progress.echo("Build started for Isabelle/" + session_name + " ...")
+ progress.interrupt_handler {
+ val res =
+ Build.build(options, progress = progress, dirs = dirs, system_mode = system_mode,
+ sessions = List(session_name))
+ if (!res.ok) sys.exit(res.rc)
+ }
+ }
+
+
+ /* database */
+
+ val store = Sessions.store(system_mode)
+ val database =
+ store.find_database(session_name) match {
+ case None => error("Missing database for session " + quote(session_name))
+ case Some(database) => database
+ }
+
+ using(SQLite.open_database(database))(db =>
+ {
+ db.transaction {
+ val export_names = read_theory_names(db, session_name)
+
+ // list
+ if (export_list) {
+ (for ((theory_name, name) <- export_names) yield compound_name(theory_name, name)).
+ sorted.foreach(Output.writeln(_, stdout = true))
+ }
+
+ // export
+ if (export_pattern != "") {
+ val regex = make_regex(export_pattern)
+ for {
+ (theory_name, name) <- export_names
+ if regex.pattern.matcher(compound_name(theory_name, name)).matches
+ } {
+ val entry = read_entry(db, session_name, theory_name, name)
+ val path = export_dir + Path.basic(theory_name) + Path.explode(name)
+
+ progress.echo("exporting " + path)
+ Isabelle_System.mkdirs(path.dir)
+ Bytes.write(path, entry.body_uncompressed)
+ }
+ }
+ }
+ })
+ })
}