merged
authorwenzelm
Fri Apr 20 22:22:46 2018 +0200 (6 months ago)
changeset 6801932d19862781b
parent 68017 e99f9b3962bf
parent 68018 3747fe57eb67
child 68020 6aade817bee5
merged
     1.1 --- a/src/Pure/Admin/build_log.scala	Fri Apr 20 19:11:17 2018 +0100
     1.2 +++ b/src/Pure/Admin/build_log.scala	Fri Apr 20 22:22:46 2018 +0200
     1.3 @@ -675,13 +675,18 @@
     1.4        errors = log_file.filter_lines(ERROR_MESSAGE_MARKER).map(Library.decode_lines(_)))
     1.5    }
     1.6  
     1.7 -  def compress_errors(errors: List[String]): Option[Bytes] =
     1.8 +  def compress_errors(errors: List[String], cache: XZ.Cache = XZ.cache()): Option[Bytes] =
     1.9      if (errors.isEmpty) None
    1.10 -    else Some(Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.string)(errors))).compress())
    1.11 +    else {
    1.12 +      Some(Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.string)(errors))).
    1.13 +        compress(cache = cache))
    1.14 +    }
    1.15  
    1.16 -  def uncompress_errors(bytes: Bytes): List[String] =
    1.17 +  def uncompress_errors(bytes: Bytes, cache: XZ.Cache = XZ.cache()): List[String] =
    1.18      if (bytes.isEmpty) Nil
    1.19 -    else XML.Decode.list(YXML.string_of_body(_))(YXML.parse_body(bytes.uncompress().text))
    1.20 +    else {
    1.21 +      XML.Decode.list(YXML.string_of_body(_))(YXML.parse_body(bytes.uncompress(cache = cache).text))
    1.22 +    }
    1.23  
    1.24  
    1.25  
    1.26 @@ -877,6 +882,7 @@
    1.27    class Store private[Build_Log](options: Options)
    1.28    {
    1.29      val xml_cache: XML.Cache = new XML.Cache()
    1.30 +    val xz_cache: XZ.Cache = XZ.make_cache()
    1.31  
    1.32      def open_database(
    1.33        user: String = options.string("build_log_database_user"),
    1.34 @@ -1011,7 +1017,7 @@
    1.35            stmt.double(13) = session.ml_timing.factor
    1.36            stmt.long(14) = session.heap_size
    1.37            stmt.string(15) = session.status.map(_.toString)
    1.38 -          stmt.bytes(16) = compress_errors(session.errors)
    1.39 +          stmt.bytes(16) = compress_errors(session.errors, cache = xz_cache)
    1.40            stmt.string(17) = session.sources
    1.41            stmt.execute()
    1.42          }
    1.43 @@ -1049,7 +1055,7 @@
    1.44        {
    1.45          val ml_stats: List[(String, Option[Bytes])] =
    1.46            Par_List.map[(String, Session_Entry), (String, Option[Bytes])](
    1.47 -            { case (a, b) => (a, Properties.compress(b.ml_statistics).proper) },
    1.48 +            { case (a, b) => (a, Properties.compress(b.ml_statistics, cache = xz_cache).proper) },
    1.49              build_info.sessions.iterator.filter(p => p._2.ml_statistics.nonEmpty).toList)
    1.50          val entries = if (ml_stats.nonEmpty) ml_stats else List("" -> None)
    1.51          for ((session_name, ml_statistics) <- entries) {
    1.52 @@ -1178,10 +1184,12 @@
    1.53                  sources = res.get_string(Data.sources),
    1.54                  heap_size = res.get_long(Data.heap_size),
    1.55                  status = res.get_string(Data.status).map(Session_Status.withName(_)),
    1.56 -                errors = uncompress_errors(res.bytes(Data.errors)),
    1.57 +                errors = uncompress_errors(res.bytes(Data.errors), cache = xz_cache),
    1.58                  ml_statistics =
    1.59 -                  if (ml_statistics)
    1.60 -                    Properties.uncompress(res.bytes(Data.ml_statistics), Some(xml_cache))
    1.61 +                  if (ml_statistics) {
    1.62 +                    Properties.uncompress(
    1.63 +                      res.bytes(Data.ml_statistics), cache = xz_cache, Some(xml_cache))
    1.64 +                  }
    1.65                    else Nil)
    1.66              session_name -> session_entry
    1.67            }).toMap
     2.1 --- a/src/Pure/Admin/build_status.scala	Fri Apr 20 19:11:17 2018 +0100
     2.2 +++ b/src/Pure/Admin/build_status.scala	Fri Apr 20 22:22:46 2018 +0200
     2.3 @@ -204,6 +204,7 @@
     2.4        data_hosts.getOrElse(data_name, Set.empty)
     2.5  
     2.6      val store = Build_Log.store(options)
     2.7 +
     2.8      using(store.open_database())(db =>
     2.9      {
    2.10        for (profile <- profiles.sortBy(_.description)) {
    2.11 @@ -272,8 +273,10 @@
    2.12  
    2.13              val ml_stats =
    2.14                ML_Statistics(
    2.15 -                if (ml_statistics)
    2.16 -                  Properties.uncompress(res.bytes(Build_Log.Data.ml_statistics))
    2.17 +                if (ml_statistics) {
    2.18 +                  Properties.uncompress(
    2.19 +                    res.bytes(Build_Log.Data.ml_statistics), cache = store.xz_cache)
    2.20 +                }
    2.21                  else Nil,
    2.22                  domain = ml_statistics_domain,
    2.23                  heading = session_name + print_version(isabelle_version, afp_version, chapter))
    2.24 @@ -300,7 +303,9 @@
    2.25                  average_heap = ml_stats.average_heap_size,
    2.26                  stored_heap = ML_Statistics.heap_scale(res.long(Build_Log.Data.heap_size)),
    2.27                  status = Build_Log.Session_Status.withName(res.string(Build_Log.Data.status)),
    2.28 -                errors = Build_Log.uncompress_errors(res.bytes(Build_Log.Data.errors)))
    2.29 +                errors =
    2.30 +                  Build_Log.uncompress_errors(res.bytes(Build_Log.Data.errors),
    2.31 +                    cache = store.xz_cache))
    2.32  
    2.33              val sessions = data_entries.getOrElse(data_name, Map.empty)
    2.34              val session =
     3.1 --- a/src/Pure/General/bytes.scala	Fri Apr 20 19:11:17 2018 +0100
     3.2 +++ b/src/Pure/General/bytes.scala	Fri Apr 20 22:22:46 2018 +0200
     3.3 @@ -178,13 +178,13 @@
     3.4  
     3.5    /* XZ data compression */
     3.6  
     3.7 -  def uncompress(): Bytes =
     3.8 -    using(new XZInputStream(stream()))(Bytes.read_stream(_, hint = length))
     3.9 +  def uncompress(cache: XZ.Cache = XZ.cache()): Bytes =
    3.10 +    using(new XZInputStream(stream(), cache))(Bytes.read_stream(_, hint = length))
    3.11  
    3.12 -  def compress(options: XZ.Options = XZ.options()): Bytes =
    3.13 +  def compress(options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.cache()): Bytes =
    3.14    {
    3.15      val result = new ByteArrayOutputStream(length)
    3.16 -    using(new XZOutputStream(result, options))(write_stream(_))
    3.17 +    using(new XZOutputStream(result, options, cache))(write_stream(_))
    3.18      new Bytes(result.toByteArray, 0, result.size)
    3.19    }
    3.20  }
     4.1 --- a/src/Pure/General/properties.scala	Fri Apr 20 19:11:17 2018 +0100
     4.2 +++ b/src/Pure/General/properties.scala	Fri Apr 20 22:22:46 2018 +0200
     4.3 @@ -46,17 +46,25 @@
     4.4      }
     4.5    }
     4.6  
     4.7 -  def compress(ps: List[T], options: XZ.Options = XZ.options()): Bytes =
     4.8 +  def compress(ps: List[T],
     4.9 +    options: XZ.Options = XZ.options(),
    4.10 +    cache: XZ.Cache = XZ.cache()): Bytes =
    4.11    {
    4.12      if (ps.isEmpty) Bytes.empty
    4.13 -    else Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).compress(options)
    4.14 +    else {
    4.15 +      Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).
    4.16 +        compress(options = options, cache = cache)
    4.17 +    }
    4.18    }
    4.19  
    4.20 -  def uncompress(bs: Bytes, xml_cache: Option[XML.Cache] = None): List[T] =
    4.21 +  def uncompress(bs: Bytes,
    4.22 +    cache: XZ.Cache = XZ.cache(),
    4.23 +    xml_cache: Option[XML.Cache] = None): List[T] =
    4.24    {
    4.25      if (bs.isEmpty) Nil
    4.26      else {
    4.27 -      val ps = XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress().text))
    4.28 +      val ps =
    4.29 +        XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress(cache = cache).text))
    4.30        xml_cache match {
    4.31          case None => ps
    4.32          case Some(cache) => ps.map(cache.props(_))
     5.1 --- a/src/Pure/General/xz.scala	Fri Apr 20 19:11:17 2018 +0100
     5.2 +++ b/src/Pure/General/xz.scala	Fri Apr 20 22:22:46 2018 +0200
     5.3 @@ -7,11 +7,13 @@
     5.4  package isabelle
     5.5  
     5.6  
     5.7 -import org.tukaani.xz.LZMA2Options
     5.8 +import org.tukaani.xz.{LZMA2Options, ArrayCache, BasicArrayCache}
     5.9  
    5.10  
    5.11  object XZ
    5.12  {
    5.13 +  /* options */
    5.14 +
    5.15    type Options = LZMA2Options
    5.16  
    5.17    def options(preset: Int = 3): Options =
    5.18 @@ -20,4 +22,12 @@
    5.19      opts.setPreset(preset)
    5.20      opts
    5.21    }
    5.22 +
    5.23 +
    5.24 +  /* cache */
    5.25 +
    5.26 +  type Cache = ArrayCache
    5.27 +
    5.28 +  def cache(): ArrayCache = ArrayCache.getDefaultCache()
    5.29 +  def make_cache(): ArrayCache = new BasicArrayCache
    5.30  }
     6.1 --- a/src/Pure/Thy/sessions.scala	Fri Apr 20 19:11:17 2018 +0100
     6.2 +++ b/src/Pure/Thy/sessions.scala	Fri Apr 20 22:22:46 2018 +0200
     6.3 @@ -974,6 +974,7 @@
     6.4      /* SQL database content */
     6.5  
     6.6      val xml_cache = new XML.Cache()
     6.7 +    val xz_cache = XZ.make_cache()
     6.8  
     6.9      def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
    6.10        db.using_statement(Session_Info.table.select(List(column),
    6.11 @@ -984,7 +985,8 @@
    6.12        })
    6.13  
    6.14      def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] =
    6.15 -      Properties.uncompress(read_bytes(db, name, column), Some(xml_cache))
    6.16 +      Properties.uncompress(
    6.17 +        read_bytes(db, name, column), cache = xz_cache, xml_cache = Some(xml_cache))
    6.18  
    6.19  
    6.20      /* output */
    6.21 @@ -1040,11 +1042,11 @@
    6.22          {
    6.23            stmt.string(1) = name
    6.24            stmt.bytes(2) = Properties.encode(build_log.session_timing)
    6.25 -          stmt.bytes(3) = Properties.compress(build_log.command_timings)
    6.26 -          stmt.bytes(4) = Properties.compress(build_log.theory_timings)
    6.27 -          stmt.bytes(5) = Properties.compress(build_log.ml_statistics)
    6.28 -          stmt.bytes(6) = Properties.compress(build_log.task_statistics)
    6.29 -          stmt.bytes(7) = Build_Log.compress_errors(build_log.errors)
    6.30 +          stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = xz_cache)
    6.31 +          stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = xz_cache)
    6.32 +          stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = xz_cache)
    6.33 +          stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = xz_cache)
    6.34 +          stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = xz_cache)
    6.35            stmt.string(8) = build.sources
    6.36            stmt.string(9) = cat_lines(build.input_heaps)
    6.37            stmt.string(10) = build.output_heap getOrElse ""
    6.38 @@ -1070,7 +1072,7 @@
    6.39        read_properties(db, name, Session_Info.task_statistics)
    6.40  
    6.41      def read_errors(db: SQL.Database, name: String): List[String] =
    6.42 -      Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors))
    6.43 +      Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = xz_cache)
    6.44  
    6.45      def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] =
    6.46      {