clarified signature: provide XZ.Cache where Export.Entry is created;
authorwenzelm
Mon, 07 Dec 2020 20:26:09 +0100
changeset 72847 9dda93a753b1
parent 72846 a23e0964f3c3
child 72848 d5d0e36eda16
clarified signature: provide XZ.Cache where Export.Entry is created;
src/Pure/Thy/export.scala
src/Pure/Thy/export_theory.scala
src/Pure/Thy/presentation.scala
src/Pure/Thy/sessions.scala
src/Pure/Tools/dump.scala
src/Pure/Tools/server_commands.scala
src/Tools/jEdit/src/isabelle_export.scala
--- a/src/Pure/Thy/export.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Pure/Thy/export.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -85,14 +85,16 @@
   def compound_name(a: String, b: String): String = a + ":" + b
 
   def empty_entry(session_name: String, theory_name: String, name: String): Entry =
-    Entry(session_name, theory_name, name, false, Future.value(false, Bytes.empty))
+    Entry(session_name, theory_name, name, false,
+      Future.value(false, Bytes.empty), XZ.no_cache())
 
   sealed case class Entry(
     session_name: String,
     theory_name: String,
     name: String,
     executable: Boolean,
-    body: Future[(Boolean, Bytes)])
+    body: Future[(Boolean, Bytes)],
+    cache: XZ.Cache)
   {
     override def toString: String = name
 
@@ -104,16 +106,16 @@
     def name_extends(elems: List[String]): Boolean =
       name_elems.startsWith(elems) && name_elems != elems
 
-    def text: String = uncompressed().text
+    def text: String = uncompressed.text
 
-    def uncompressed(cache: XZ.Cache = XZ.cache()): Bytes =
+    def uncompressed: Bytes =
     {
       val (compressed, bytes) = body.join
       if (compressed) bytes.uncompress(cache = cache) else bytes
     }
 
-    def uncompressed_yxml(cache: XZ.Cache = XZ.cache()): XML.Body =
-      YXML.parse_body(UTF8.decode_permissive(uncompressed(cache = cache)))
+    def uncompressed_yxml: XML.Body =
+      YXML.parse_body(UTF8.decode_permissive(uncompressed))
 
     def write(db: SQL.Database)
     {
@@ -156,16 +158,19 @@
       regex.pattern.matcher(compound_name(theory_name, name)).matches
   }
 
-  def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes,
-    cache: XZ.Cache = XZ.cache()): Entry =
+  def make_entry(
+    session_name: String, args: Protocol.Export.Args, bytes: Bytes,
+    cache: XZ.Cache): Entry =
   {
-    Entry(session_name, args.theory_name, args.name, args.executable,
-      if (args.compress) Future.fork(body.maybe_compress(cache = cache))
-      else Future.value((false, body)))
+    val body =
+      if (args.compress) Future.fork(bytes.maybe_compress(cache = cache))
+      else Future.value((false, bytes))
+    Entry(session_name, args.theory_name, args.name, args.executable, body, cache)
   }
 
-  def read_entry(db: SQL.Database, session_name: String, theory_name: String, name: String)
-    : Option[Entry] =
+  def read_entry(
+    db: SQL.Database, cache: XZ.Cache,
+    session_name: String, theory_name: String, name: String): Option[Entry] =
   {
     val select =
       Data.table.select(List(Data.executable, Data.compressed, Data.body),
@@ -176,20 +181,24 @@
       if (res.next()) {
         val executable = res.bool(Data.executable)
         val compressed = res.bool(Data.compressed)
-        val body = res.bytes(Data.body)
-        Some(Entry(session_name, theory_name, name, executable, Future.value(compressed, body)))
+        val bytes = res.bytes(Data.body)
+        val body = Future.value(compressed, bytes)
+        Some(Entry(session_name, theory_name, name, executable, body, cache))
       }
       else None
     })
   }
 
-  def read_entry(dir: Path, session_name: String, theory_name: String, name: String): Option[Entry] =
+  def read_entry(
+    dir: Path, cache: XZ.Cache,
+    session_name: String, theory_name: String, name: String): Option[Entry] =
   {
     val path = dir + Path.basic(theory_name) + Path.explode(name)
     if (path.is_file) {
       val executable = File.is_executable(path)
       val uncompressed = Bytes.read(path)
-      Some(Entry(session_name, theory_name, name, executable, Future.value((false, uncompressed))))
+      val body = Future.value((false, uncompressed))
+      Some(Entry(session_name, theory_name, name, executable, body, cache))
     }
     else None
   }
@@ -197,7 +206,7 @@
 
   /* database consumer thread */
 
-  def consumer(db: SQL.Database, cache: XZ.Cache = XZ.cache()): Consumer = new Consumer(db, cache)
+  def consumer(db: SQL.Database, cache: XZ.Cache): Consumer = new Consumer(db, cache)
 
   class Consumer private[Export](db: SQL.Database, cache: XZ.Cache)
   {
@@ -227,7 +236,7 @@
           })
 
     def apply(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit =
-      consumer.send(make_entry(session_name, args, body, cache = cache) -> args.strict)
+      consumer.send(make_entry(session_name, args, body, cache) -> args.strict)
 
     def shutdown(close: Boolean = false): List[String] =
     {
@@ -261,14 +270,16 @@
         override def toString: String = context.toString
       }
 
-    def database(db: SQL.Database, session_name: String, theory_name: String): Provider =
+    def database(
+        db: SQL.Database, cache: XZ.Cache,
+        session_name: String, theory_name: String): Provider =
       new Provider {
         def apply(export_name: String): Option[Entry] =
-          read_entry(db, session_name, theory_name, export_name)
+          read_entry(db, cache, session_name, theory_name, export_name)
 
         def focus(other_theory: String): Provider =
           if (other_theory == theory_name) this
-          else Provider.database(db, session_name, other_theory)
+          else Provider.database(db, cache, session_name, other_theory)
 
         override def toString: String = db.toString
       }
@@ -290,14 +301,16 @@
         override def toString: String = snapshot.toString
       }
 
-    def directory(dir: Path, session_name: String, theory_name: String): Provider =
+    def directory(
+        dir: Path, cache: XZ.Cache,
+        session_name: String, theory_name: String): Provider =
       new Provider {
         def apply(export_name: String): Option[Entry] =
-          read_entry(dir, session_name, theory_name, export_name)
+          read_entry(dir, cache, session_name, theory_name, export_name)
 
         def focus(other_theory: String): Provider =
           if (other_theory == theory_name) this
-          else Provider.directory(dir, session_name, other_theory)
+          else Provider.directory(dir, cache, session_name, other_theory)
 
         override def toString: String = dir.toString
       }
@@ -307,9 +320,9 @@
   {
     def apply(export_name: String): Option[Entry]
 
-    def uncompressed_yxml(export_name: String, cache: XZ.Cache = XZ.cache()): XML.Body =
+    def uncompressed_yxml(export_name: String): XML.Body =
       apply(export_name) match {
-        case Some(entry) => entry.uncompressed_yxml(cache = cache)
+        case Some(entry) => entry.uncompressed_yxml
         case None => Nil
       }
 
@@ -350,7 +363,7 @@
           for {
             (theory_name, group) <- exports.toList.groupBy(_._1).toList.sortBy(_._1)
             name <- group.map(_._2).sorted
-            entry <- read_entry(db, session_name, theory_name, name)
+            entry <- read_entry(db, store.xz_cache, session_name, theory_name, name)
           } {
             val elems = theory_name :: space_explode('/', name)
             val path =
@@ -361,7 +374,7 @@
 
             progress.echo("export " + path + (if (entry.executable) " (executable)" else ""))
             Isabelle_System.make_directory(path.dir)
-            Bytes.write(path, entry.uncompressed(cache = store.xz_cache))
+            Bytes.write(path, entry.uncompressed)
             File.set_executable(path, entry.executable)
           }
         }
--- a/src/Pure/Thy/export_theory.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Pure/Thy/export_theory.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -41,7 +41,7 @@
             for (theory <- Export.read_theory_names(db, session))
             yield {
               progress.echo("Reading theory " + theory)
-              read_theory(Export.Provider.database(db, session, theory),
+              read_theory(Export.Provider.database(db, store.xz_cache, session, theory),
                 session, theory, cache = Some(cache))
             }
           }
@@ -113,7 +113,7 @@
       if (theory_name == Thy_Header.PURE) Nil
       else {
         provider(Export.THEORY_PREFIX + "parents") match {
-          case Some(entry) => split_lines(entry.uncompressed().text)
+          case Some(entry) => split_lines(entry.uncompressed.text)
           case None =>
             error("Missing theory export in session " + quote(session_name) + ": " +
               quote(theory_name))
@@ -145,7 +145,8 @@
     using(store.open_database(session_name))(db =>
     {
       db.transaction {
-        read(Export.Provider.database(db, session_name, theory_name), session_name, theory_name)
+        read(Export.Provider.database(
+          db, store.xz_cache, session_name, theory_name), session_name, theory_name)
       }
     })
   }
@@ -378,7 +379,7 @@
   {
     for { entry <- provider.focus(id.theory_name)(Export.PROOFS_PREFIX + id.serial) }
     yield {
-      val body = entry.uncompressed_yxml()
+      val body = entry.uncompressed_yxml
       val (typargs, (args, (prop_body, proof_body))) =
       {
         import XML.Decode._
--- a/src/Pure/Thy/presentation.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Pure/Thy/presentation.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -472,7 +472,7 @@
       for (name <- base.session_theories ::: base.document_theories)
       yield {
         val entry = db_context.get_export(session, name.theory, document_tex_name(name))
-        Path.basic(tex_name(name)) -> entry.uncompressed(cache = db_context.xz_cache)
+        Path.basic(tex_name(name)) -> entry.uncompressed
       }
 
     def prepare_dir1(dir: Path, doc: Document_Variant): (Path, String) =
--- a/src/Pure/Thy/sessions.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Pure/Thy/sessions.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -1224,11 +1224,13 @@
       val attempts =
         database_server match {
           case Some(db) =>
-            hierarchy.map(session_name => Export.read_entry(db, session_name, theory_name, name))
+            hierarchy.map(session_name =>
+              Export.read_entry(db, store.xz_cache, session_name, theory_name, name))
           case None =>
             hierarchy.map(session_name =>
               store.try_open_database(session_name) match {
-                case Some(db) => using(db)(Export.read_entry(_, session_name, theory_name, name))
+                case Some(db) =>
+                  using(db)(Export.read_entry(_, store.xz_cache, session_name, theory_name, name))
                 case None => None
               })
         }
--- a/src/Pure/Tools/dump.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Pure/Tools/dump.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -56,13 +56,13 @@
           for {
             entry <- args.snapshot.exports
             if entry.name_has_prefix(Export.DOCUMENT_PREFIX)
-          } args.write(Path.explode(entry.name), entry.uncompressed())),
+          } args.write(Path.explode(entry.name), entry.uncompressed)),
       Aspect("theory", "foundational theory content",
         args =>
           for {
             entry <- args.snapshot.exports
             if entry.name_has_prefix(Export.THEORY_PREFIX)
-          } args.write(Path.explode(entry.name), entry.uncompressed()),
+          } args.write(Path.explode(entry.name), entry.uncompressed),
         options = List("export_theory"))
     ).sortBy(_.name)
 
--- a/src/Pure/Tools/server_commands.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Pure/Tools/server_commands.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -276,7 +276,7 @@
                     val matcher = Export.make_matcher(args.export_pattern)
                     for { entry <- snapshot.exports if matcher(entry.theory_name, entry.name) }
                     yield {
-                      val (base64, body) = entry.uncompressed().maybe_base64
+                      val (base64, body) = entry.uncompressed.maybe_base64
                       JSON.Object("name" -> entry.name, "base64" -> base64, "body" -> body)
                     }
                   }))
--- a/src/Tools/jEdit/src/isabelle_export.scala	Mon Dec 07 19:45:52 2020 +0100
+++ b/src/Tools/jEdit/src/isabelle_export.scala	Mon Dec 07 20:26:09 2020 +0100
@@ -49,7 +49,7 @@
                 } yield {
                   val is_dir = entry.name_elems.length > depth
                   val path = Export.implode_name(theory :: entry.name_elems.take(depth))
-                  val file_size = if (is_dir) None else Some(entry.uncompressed().length.toLong)
+                  val file_size = if (is_dir) None else Some(entry.uncompressed.length.toLong)
                   (path, file_size)
                 }).toSet.toList
               (for ((path, file_size) <- entries.iterator) yield {
@@ -76,7 +76,7 @@
               if node_name.theory == theory
               (_, entry) <- snapshot.state.node_exports(version, node_name).iterator
               if entry.name_elems == name_elems
-            } yield entry.uncompressed()).find(_ => true).getOrElse(Bytes.empty)
+            } yield entry.uncompressed).find(_ => true).getOrElse(Bytes.empty)
 
           bytes.stream()
       }