--- a/src/Pure/General/bytes.scala Sun May 13 16:05:29 2018 +0200
+++ b/src/Pure/General/bytes.scala Sun May 13 16:26:01 2018 +0200
@@ -205,4 +205,11 @@
using(new XZOutputStream(result, options, cache))(write_stream(_))
new Bytes(result.toByteArray, 0, result.size)
}
+
+ def maybe_compress(options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.cache())
+ : (Boolean, Bytes) =
+ {
+ val compressed = compress(options = options, cache = cache)
+ if (compressed.length < length) (true, compressed) else (false, this)
+ }
}
--- a/src/Pure/Thy/export.ML Sun May 13 16:05:29 2018 +0200
+++ b/src/Pure/Thy/export.ML Sun May 13 16:26:01 2018 +0200
@@ -33,7 +33,7 @@
name = check_name name,
compress = compress} body;
-fun export thy name body = gen_export (fold (Integer.add o size) body 0 > 60) thy name body;
+val export = gen_export true;
val export_raw = gen_export false;
end;
--- a/src/Pure/Thy/export.scala Sun May 13 16:05:29 2018 +0200
+++ b/src/Pure/Thy/export.scala Sun May 13 16:26:01 2018 +0200
@@ -63,14 +63,13 @@
session_name: String,
theory_name: String,
name: String,
- compressed: Boolean,
- body: Future[Bytes])
+ body: Future[(Boolean, Bytes)])
{
override def toString: String = compound_name(theory_name, name)
def write(db: SQL.Database)
{
- val bytes = body.join
+ val (compressed, bytes) = body.join
db.using_statement(Data.table.insert())(stmt =>
{
stmt.string(1) = session_name
@@ -82,8 +81,11 @@
})
}
- def body_uncompressed(cache: XZ.Cache = XZ.cache()): Bytes =
- if (compressed) body.join.uncompress(cache = cache) else body.join
+ def uncompressed(cache: XZ.Cache = XZ.cache()): Bytes =
+ {
+ val (compressed, bytes) = body.join
+ if (compressed) bytes.uncompress(cache = cache) else bytes
+ }
}
def make_regex(pattern: String): Regex =
@@ -114,8 +116,9 @@
def make_entry(session_name: String, args: Markup.Export.Args, body: Bytes,
cache: XZ.Cache = XZ.cache()): Entry =
{
- Entry(session_name, args.theory_name, args.name, args.compress,
- if (args.compress) Future.fork(body.compress(cache = cache)) else Future.value(body))
+ Entry(session_name, args.theory_name, args.name,
+ if (args.compress) Future.fork(body.maybe_compress(cache = cache))
+ else Future.value((false, body)))
}
def read_entry(db: SQL.Database, session_name: String, theory_name: String, name: String): Entry =
@@ -129,7 +132,7 @@
if (res.next()) {
val compressed = res.bool(Data.compressed)
val body = res.bytes(Data.body)
- Entry(session_name, theory_name, name, compressed, Future.value(body))
+ Entry(session_name, theory_name, name, Future.value(compressed, body))
}
else error(message("Bad export", theory_name, name))
})
@@ -275,7 +278,7 @@
progress.echo("exporting " + path)
Isabelle_System.mkdirs(path.dir)
- Bytes.write(path, entry.body_uncompressed(cache = xz_cache))
+ Bytes.write(path, entry.uncompressed(cache = xz_cache))
}
}
}
--- a/src/Pure/Tools/server_commands.scala Sun May 13 16:05:29 2018 +0200
+++ b/src/Pure/Tools/server_commands.scala Sun May 13 16:26:01 2018 +0200
@@ -220,7 +220,7 @@
val matcher = Export.make_matcher(args.export_pattern)
for { entry <- result.exports(name) if matcher(entry.theory_name, entry.name) }
yield {
- val (base64, body) = entry.body.join.maybe_base64
+ val (base64, body) = entry.uncompressed().maybe_base64
JSON.Object("name" -> entry.name, "base64" -> base64, "body" -> body)
}
}))))