support for XZ.Cache;
authorwenzelm
Fri, 20 Apr 2018 22:17:42 +0200
changeset 68018 3747fe57eb67
parent 68016 5eb4081e6bf6
child 68019 32d19862781b
support for XZ.Cache;
src/Pure/Admin/build_log.scala
src/Pure/Admin/build_status.scala
src/Pure/General/bytes.scala
src/Pure/General/properties.scala
src/Pure/General/xz.scala
src/Pure/Thy/sessions.scala
--- a/src/Pure/Admin/build_log.scala	Fri Apr 20 15:58:02 2018 +0200
+++ b/src/Pure/Admin/build_log.scala	Fri Apr 20 22:17:42 2018 +0200
@@ -675,13 +675,18 @@
       errors = log_file.filter_lines(ERROR_MESSAGE_MARKER).map(Library.decode_lines(_)))
   }
 
-  def compress_errors(errors: List[String]): Option[Bytes] =
+  def compress_errors(errors: List[String], cache: XZ.Cache = XZ.cache()): Option[Bytes] =
     if (errors.isEmpty) None
-    else Some(Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.string)(errors))).compress())
+    else {
+      Some(Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.string)(errors))).
+        compress(cache = cache))
+    }
 
-  def uncompress_errors(bytes: Bytes): List[String] =
+  def uncompress_errors(bytes: Bytes, cache: XZ.Cache = XZ.cache()): List[String] =
     if (bytes.isEmpty) Nil
-    else XML.Decode.list(YXML.string_of_body(_))(YXML.parse_body(bytes.uncompress().text))
+    else {
+      XML.Decode.list(YXML.string_of_body(_))(YXML.parse_body(bytes.uncompress(cache = cache).text))
+    }
 
 
 
@@ -877,6 +882,7 @@
   class Store private[Build_Log](options: Options)
   {
     val xml_cache: XML.Cache = new XML.Cache()
+    val xz_cache: XZ.Cache = XZ.make_cache()
 
     def open_database(
       user: String = options.string("build_log_database_user"),
@@ -1011,7 +1017,7 @@
           stmt.double(13) = session.ml_timing.factor
           stmt.long(14) = session.heap_size
           stmt.string(15) = session.status.map(_.toString)
-          stmt.bytes(16) = compress_errors(session.errors)
+          stmt.bytes(16) = compress_errors(session.errors, cache = xz_cache)
           stmt.string(17) = session.sources
           stmt.execute()
         }
@@ -1049,7 +1055,7 @@
       {
         val ml_stats: List[(String, Option[Bytes])] =
           Par_List.map[(String, Session_Entry), (String, Option[Bytes])](
-            { case (a, b) => (a, Properties.compress(b.ml_statistics).proper) },
+            { case (a, b) => (a, Properties.compress(b.ml_statistics, cache = xz_cache).proper) },
             build_info.sessions.iterator.filter(p => p._2.ml_statistics.nonEmpty).toList)
         val entries = if (ml_stats.nonEmpty) ml_stats else List("" -> None)
         for ((session_name, ml_statistics) <- entries) {
@@ -1178,10 +1184,12 @@
                 sources = res.get_string(Data.sources),
                 heap_size = res.get_long(Data.heap_size),
                 status = res.get_string(Data.status).map(Session_Status.withName(_)),
-                errors = uncompress_errors(res.bytes(Data.errors)),
+                errors = uncompress_errors(res.bytes(Data.errors), cache = xz_cache),
                 ml_statistics =
-                  if (ml_statistics)
-                    Properties.uncompress(res.bytes(Data.ml_statistics), Some(xml_cache))
+                  if (ml_statistics) {
+                    Properties.uncompress(
+                      res.bytes(Data.ml_statistics), cache = xz_cache, Some(xml_cache))
+                  }
                   else Nil)
             session_name -> session_entry
           }).toMap
--- a/src/Pure/Admin/build_status.scala	Fri Apr 20 15:58:02 2018 +0200
+++ b/src/Pure/Admin/build_status.scala	Fri Apr 20 22:17:42 2018 +0200
@@ -204,6 +204,7 @@
       data_hosts.getOrElse(data_name, Set.empty)
 
     val store = Build_Log.store(options)
+
     using(store.open_database())(db =>
     {
       for (profile <- profiles.sortBy(_.description)) {
@@ -272,8 +273,10 @@
 
             val ml_stats =
               ML_Statistics(
-                if (ml_statistics)
-                  Properties.uncompress(res.bytes(Build_Log.Data.ml_statistics))
+                if (ml_statistics) {
+                  Properties.uncompress(
+                    res.bytes(Build_Log.Data.ml_statistics), cache = store.xz_cache)
+                }
                 else Nil,
                 domain = ml_statistics_domain,
                 heading = session_name + print_version(isabelle_version, afp_version, chapter))
@@ -300,7 +303,9 @@
                 average_heap = ml_stats.average_heap_size,
                 stored_heap = ML_Statistics.heap_scale(res.long(Build_Log.Data.heap_size)),
                 status = Build_Log.Session_Status.withName(res.string(Build_Log.Data.status)),
-                errors = Build_Log.uncompress_errors(res.bytes(Build_Log.Data.errors)))
+                errors =
+                  Build_Log.uncompress_errors(res.bytes(Build_Log.Data.errors),
+                    cache = store.xz_cache))
 
             val sessions = data_entries.getOrElse(data_name, Map.empty)
             val session =
--- a/src/Pure/General/bytes.scala	Fri Apr 20 15:58:02 2018 +0200
+++ b/src/Pure/General/bytes.scala	Fri Apr 20 22:17:42 2018 +0200
@@ -178,13 +178,13 @@
 
   /* XZ data compression */
 
-  def uncompress(): Bytes =
-    using(new XZInputStream(stream()))(Bytes.read_stream(_, hint = length))
+  def uncompress(cache: XZ.Cache = XZ.cache()): Bytes =
+    using(new XZInputStream(stream(), cache))(Bytes.read_stream(_, hint = length))
 
-  def compress(options: XZ.Options = XZ.options()): Bytes =
+  def compress(options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.cache()): Bytes =
   {
     val result = new ByteArrayOutputStream(length)
-    using(new XZOutputStream(result, options))(write_stream(_))
+    using(new XZOutputStream(result, options, cache))(write_stream(_))
     new Bytes(result.toByteArray, 0, result.size)
   }
 }
--- a/src/Pure/General/properties.scala	Fri Apr 20 15:58:02 2018 +0200
+++ b/src/Pure/General/properties.scala	Fri Apr 20 22:17:42 2018 +0200
@@ -46,17 +46,25 @@
     }
   }
 
-  def compress(ps: List[T], options: XZ.Options = XZ.options()): Bytes =
+  def compress(ps: List[T],
+    options: XZ.Options = XZ.options(),
+    cache: XZ.Cache = XZ.cache()): Bytes =
   {
     if (ps.isEmpty) Bytes.empty
-    else Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).compress(options)
+    else {
+      Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).
+        compress(options = options, cache = cache)
+    }
   }
 
-  def uncompress(bs: Bytes, xml_cache: Option[XML.Cache] = None): List[T] =
+  def uncompress(bs: Bytes,
+    cache: XZ.Cache = XZ.cache(),
+    xml_cache: Option[XML.Cache] = None): List[T] =
   {
     if (bs.isEmpty) Nil
     else {
-      val ps = XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress().text))
+      val ps =
+        XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress(cache = cache).text))
       xml_cache match {
         case None => ps
         case Some(cache) => ps.map(cache.props(_))
--- a/src/Pure/General/xz.scala	Fri Apr 20 15:58:02 2018 +0200
+++ b/src/Pure/General/xz.scala	Fri Apr 20 22:17:42 2018 +0200
@@ -7,11 +7,13 @@
 package isabelle
 
 
-import org.tukaani.xz.LZMA2Options
+import org.tukaani.xz.{LZMA2Options, ArrayCache, BasicArrayCache}
 
 
 object XZ
 {
+  /* options */
+
   type Options = LZMA2Options
 
   def options(preset: Int = 3): Options =
@@ -20,4 +22,12 @@
     opts.setPreset(preset)
     opts
   }
+
+
+  /* cache */
+
+  type Cache = ArrayCache
+
+  def cache(): ArrayCache = ArrayCache.getDefaultCache()
+  def make_cache(): ArrayCache = new BasicArrayCache
 }
--- a/src/Pure/Thy/sessions.scala	Fri Apr 20 15:58:02 2018 +0200
+++ b/src/Pure/Thy/sessions.scala	Fri Apr 20 22:17:42 2018 +0200
@@ -974,6 +974,7 @@
     /* SQL database content */
 
     val xml_cache = new XML.Cache()
+    val xz_cache = XZ.make_cache()
 
     def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
       db.using_statement(Session_Info.table.select(List(column),
@@ -984,7 +985,8 @@
       })
 
     def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] =
-      Properties.uncompress(read_bytes(db, name, column), Some(xml_cache))
+      Properties.uncompress(
+        read_bytes(db, name, column), cache = xz_cache, xml_cache = Some(xml_cache))
 
 
     /* output */
@@ -1040,11 +1042,11 @@
         {
           stmt.string(1) = name
           stmt.bytes(2) = Properties.encode(build_log.session_timing)
-          stmt.bytes(3) = Properties.compress(build_log.command_timings)
-          stmt.bytes(4) = Properties.compress(build_log.theory_timings)
-          stmt.bytes(5) = Properties.compress(build_log.ml_statistics)
-          stmt.bytes(6) = Properties.compress(build_log.task_statistics)
-          stmt.bytes(7) = Build_Log.compress_errors(build_log.errors)
+          stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = xz_cache)
+          stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = xz_cache)
+          stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = xz_cache)
+          stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = xz_cache)
+          stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = xz_cache)
           stmt.string(8) = build.sources
           stmt.string(9) = cat_lines(build.input_heaps)
           stmt.string(10) = build.output_heap getOrElse ""
@@ -1070,7 +1072,7 @@
       read_properties(db, name, Session_Info.task_statistics)
 
     def read_errors(db: SQL.Database, name: String): List[String] =
-      Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors))
+      Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = xz_cache)
 
     def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] =
     {