clarified signature: more explicit types (see also 90c552d28d36);
authorwenzelm
Mon, 02 Jan 2023 15:05:15 +0100
changeset 76866 19bfc64a7310
parent 76865 9d0e6ea7aa68
child 76867 165ba28378f6
clarified signature: more explicit types (see also 90c552d28d36);
src/Pure/Thy/sessions.scala
src/Pure/Tools/build.scala
--- a/src/Pure/Thy/sessions.scala	Mon Jan 02 13:54:40 2023 +0100
+++ b/src/Pure/Thy/sessions.scala	Mon Jan 02 15:05:15 2023 +0100
@@ -55,6 +55,49 @@
   }
 
 
+  /* source files */
+
+  sealed case class Source_File(name: String, digest: SHA1.Digest, compressed: Boolean, body: Bytes)
+
+  object Sources {
+    val session_name = SQL.Column.string("session_name").make_primary_key
+    val name = SQL.Column.string("name").make_primary_key
+    val digest = SQL.Column.string("digest")
+    val compressed = SQL.Column.bool("compressed")
+    val body = SQL.Column.bytes("body")
+
+    val table =
+      SQL.Table("isabelle_sources", List(session_name, name, digest, compressed, body))
+
+    def where_equal(session_name: String, name: String = ""): SQL.Source =
+      "WHERE " + Sources.session_name.equal(session_name) +
+        (if (name == "") "" else " AND " + Sources.name.equal(name))
+
+
+    type T = Map[String, Source_File]
+
+    def read_files(session_base: Base, cache: Compress.Cache = Compress.Cache.none): T = {
+      def err(path: Path): Nothing = error("Incoherent digest for source file: " + path)
+
+      session_base.session_sources.foldLeft(Map.empty[String, Source_File]) {
+        case (sources, (path, digest)) =>
+          val name = path.implode_symbolic
+          sources.get(name) match {
+            case Some(source_file) => if (source_file.digest == digest) sources else err(path)
+            case None =>
+              val bytes = Bytes.read(path)
+              if (bytes.sha1_digest == digest) {
+                val (compressed, body) = bytes.maybe_compress(Compress.Options_Zstd(), cache = cache)
+                val file = Source_File(name, digest, compressed, body)
+                sources + (name -> file)
+              }
+              else err(path)
+          }
+      }
+    }
+  }
+
+
   /* base info */
 
   object Base {
@@ -248,7 +291,7 @@
     var cache_sources = Map.empty[JFile, SHA1.Digest]
     def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = {
       for {
-        path <- Library.distinct(paths)
+        path <- paths
         file = path.file
         if cache_sources.isDefinedAt(file) || file.isFile
       }
@@ -1336,21 +1379,6 @@
       " ADD COLUMN IF NOT EXISTS " + uuid.decl(SQL.sql_type_postgresql)
   }
 
-  object Sources {
-    val session_name = SQL.Column.string("session_name").make_primary_key
-    val name = SQL.Column.string("name").make_primary_key
-    val digest = SQL.Column.string("digest")
-    val compressed = SQL.Column.bool("compressed")
-    val body = SQL.Column.bytes("body")
-
-    val table =
-      SQL.Table("isabelle_sources", List(session_name, name, digest, compressed, body))
-
-    def where_equal(session_name: String, name: String = ""): SQL.Source =
-      "WHERE " + Sources.session_name.equal(session_name) +
-        (if (name == "") "" else " AND " + Sources.name.equal(name))
-  }
-
   def store(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
     new Store(options, cache)
 
@@ -1536,30 +1564,6 @@
             Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next())
       }
 
-    def write_session_sources(db: SQL.Database, session_base: Base): Unit = {
-      val sources =
-        for ((path, digest) <- session_base.session_sources) yield {
-          val bytes = Bytes.read(path)
-          if (bytes.sha1_digest != digest) error("Erratic change of file content: " + path)
-          val name = path.implode_symbolic
-          val (compressed, body) =
-            bytes.maybe_compress(Compress.Options_Zstd(), cache = cache.compress)
-          (name, digest, compressed, body)
-        }
-      db.transaction {
-        for ((name, digest, compressed, body) <- sources) {
-          db.using_statement(Sources.table.insert()) { stmt =>
-            stmt.string(1) = session_base.session_name
-            stmt.string(2) = name
-            stmt.string(3) = digest.toString
-            stmt.bool(4) = compressed
-            stmt.bytes(5) = body
-            stmt.execute()
-          }
-        }
-      }
-    }
-
     def write_session_info(
       db: SQL.Database,
       session_name: String,
@@ -1629,6 +1633,25 @@
       else None
     }
 
+
+    /* session sources */
+
+    def write_sources(db: SQL.Database, session_base: Base): Unit = {
+      val files = Sources.read_files(session_base, cache = cache.compress)
+      db.transaction {
+        for ((name, file) <- files) {
+          db.using_statement(Sources.table.insert()) { stmt =>
+            stmt.string(1) = session_base.session_name
+            stmt.string(2) = name
+            stmt.string(3) = file.digest.toString
+            stmt.bool(4) = file.compressed
+            stmt.bytes(5) = file.body
+            stmt.execute()
+          }
+        }
+      }
+    }
+
     def read_sources(db: SQL.Database, session_name: String, name: String): Option[Bytes] = {
       val sql =
         Sources.table.select(List(Sources.compressed, Sources.body),
--- a/src/Pure/Tools/build.scala	Mon Jan 02 13:54:40 2023 +0100
+++ b/src/Pure/Tools/build.scala	Mon Jan 02 15:05:15 2023 +0100
@@ -348,7 +348,7 @@
 
             // write database
             using(store.open_database(session_name, output = true))(db =>
-              store.write_session_info(db, build_deps(session_name),
+              store.write_session_info(db, session_name,
                 build_log =
                   if (process_result.timeout) build_log.error("Timeout") else build_log,
                 build =
@@ -418,13 +418,17 @@
                 else if (ancestor_results.forall(_.ok) && !progress.stopped) {
                   progress.echo((if (do_store) "Building " else "Running ") + session_name + " ...")
 
+                  val session_background = build_deps.background(session_name)
+
                   store.clean_output(session_name)
-                  using(store.open_database(session_name, output = true))(
-                    store.init_session_info(_, session_name))
+                  using(store.open_database(session_name, output = true)) { db =>
+                    store.init_session_info(db, session_name)
+                    store.write_sources(db, session_background.base)
+                  }
 
                   val numa_node = numa_nodes.next(used_node)
                   val job =
-                    new Build_Job(progress, build_deps.background(session_name), store, do_store,
+                    new Build_Job(progress, session_background, store, do_store,
                       log, session_setup, numa_node, queue.command_timings(session_name))
                   loop(pending, running + (session_name -> (ancestor_heaps, job)), results)
                 }