merged
authorwenzelm
Thu, 16 Mar 2023 17:12:06 +0100
changeset 77682 a76f49a03448
parent 77672 34176328fc67 (current diff)
parent 77681 1db732e6c3d2 (diff)
child 77683 3e8e749935fc
child 77685 05329cd9db4b
merged
NEWS
--- a/NEWS	Thu Mar 16 13:37:49 2023 +0100
+++ b/NEWS	Thu Mar 16 17:12:06 2023 +0100
@@ -274,13 +274,16 @@
 
 *** System ***
 
-* Session builds observe dependency of options that contribute to the
-formal content. This is determined via option tags specified in
-etc/options, notably "content" or "document". A change of relavant
-session options causes a fresh build. For example:
-
-  isabelle build -o show_types FOL
+* System option "build_through" determines if session builds should
+observe dependency of options that contribute to the formal content.
+This is specified via option tags given in etc/options (e.g. see
+"isabelle options -l -t content,document"). A change of such options
+causes a fresh build. For example:
+
   isabelle build FOL
+  isabelle build -o show_types FOL  # unchanged, no depency on options
+  isabelle build -o build_thorough -o show_types FOL  # changed
+  isabelle build -o build_thorough FOL  # changed
 
 * The command-line tool "isabelle update" is now directly based on
 "isabelle build" instead of "isabelle dump". Thus it has become more
@@ -294,6 +297,9 @@
 "isabelle docker_build", to emphasize its non-relation to "isabelle
 build".
 
+* System option "build_pide_reports" has been discontinued: it coincides
+with "pide_reports".
+
 * System option "ML_process_policy" has been renamed to
 "process_policy", as it may affect other processes as well (notably in
 "isabelle build").
--- a/etc/options	Thu Mar 16 13:37:49 2023 +0100
+++ b/etc/options	Thu Mar 16 17:12:06 2023 +0100
@@ -50,6 +50,9 @@
 
 section "Prover Output"
 
+option pide_reports : bool = true for content
+  -- "enable reports of PIDE markup"
+
 option show_types : bool = false for content
   -- "show type constraints when printing terms"
 option show_sorts : bool = false for content
@@ -171,11 +174,8 @@
 
 section "Build Process"
 
-option pide_reports : bool = true for content
-  -- "report PIDE markup (in ML)"
-
-option build_pide_reports : bool = true for content
-  -- "report PIDE markup (in batch build)"
+option build_thorough : bool = false
+  -- "observe dependencies on options with tag 'content' or 'document'"
 
 option build_hostname : string = ""
   -- "alternative hostname for build process (default $ISABELLE_HOSTNAME)"
--- a/src/Pure/Admin/build_log.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/Admin/build_log.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -631,7 +631,7 @@
 
   object Data {
     def build_log_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table =
-      SQL.Table("isabelle_build_log_" + name, columns, body)
+      SQL.Table("isabelle_build_log" + if_proper(name, "_" + name), columns, body)
 
 
     /* main content */
@@ -797,7 +797,7 @@
           b_table.query_named + SQL.join_inner + sessions_table +
           " ON " + log_name(b_table) + " = " + log_name(sessions_table))
 
-      SQL.Table("isabelle_build_log", c_columns ::: List(ml_statistics),
+      build_log_table("", c_columns ::: List(ml_statistics),
         {
           SQL.select(c_columns.map(_.apply(c_table)) ::: List(ml_statistics)) +
           c_table.query_named + SQL.join_outer + ml_statistics_table + " ON " +
--- a/src/Pure/Admin/isabelle_devel.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/Admin/isabelle_devel.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -54,6 +54,7 @@
   def build_log_database(options: Options, log_dirs: List[Path]): Unit = {
     val store = Build_Log.store(options)
     using(store.open_database()) { db =>
+      db.vacuum()
       store.update_database(db, log_dirs)
       store.update_database(db, log_dirs, ml_statistics = true)
       store.snapshot_database(db, root + Path.explode(BUILD_LOG_DB))
--- a/src/Pure/General/sha1.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/General/sha1.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -67,6 +67,8 @@
 
     def :::(other: Shasum): Shasum = new Shasum(other.rep ::: rep)
 
+    def filter(pred: String => Boolean): Shasum = new Shasum(rep.filter(pred))
+
     def digest: Digest = {
       rep match {
         case List(s)
--- a/src/Pure/General/sql.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/General/sql.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -442,8 +442,15 @@
       result.toList
     }
 
-    def create_table(table: Table, strict: Boolean = false, sql: Source = ""): Unit =
+    def create_table(table: Table, strict: Boolean = false, sql: Source = ""): Unit = {
       execute_statement(table.create(strict, sql_type) + SQL.separate(sql))
+      if (is_postgresql) {
+        for (column <- table.columns if column.T == SQL.Type.Bytes) {
+          execute_statement(
+            "ALTER TABLE " + table + " ALTER COLUMN " + column + " SET STORAGE EXTERNAL")
+        }
+      }
+    }
 
     def create_index(table: Table, name: String, columns: List[Column],
         strict: Boolean = false, unique: Boolean = false): Unit =
--- a/src/Pure/ML/ml_compiler.ML	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/ML/ml_compiler.ML	Thu Mar 16 17:12:06 2023 +0100
@@ -118,7 +118,7 @@
       |> Output.report;
     val _ =
       if not (null persistent_reports) andalso redirect andalso
-        not (Options.default_bool "build_pide_reports") andalso Future.enabled ()
+        not (Options.default_bool "pide_reports") andalso Future.enabled ()
       then
         Execution.print
           {name = "ML_Compiler.report", pos = Position.thread_data (), pri = Task_Queue.urgent_pri}
--- a/src/Pure/Thy/export.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/Thy/export.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -235,9 +235,8 @@
             (results, true)
           })
 
-    def make_entry(session_name: String, args0: Protocol.Export.Args, body: Bytes): Unit = {
+    def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = {
       if (!progress.stopped && !body.is_empty) {
-        val args = if (db.is_postgresql) args0.copy(compress = false) else args0
         consumer.send(Entry.make(session_name, args, body, cache) -> args.strict)
       }
     }
--- a/src/Pure/Thy/sessions.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/Thy/sessions.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -258,6 +258,7 @@
               List(
                 Info.make(
                   Chapter_Defs.empty,
+                  Options.init0(),
                   info.options,
                   augment_options = _ => Nil,
                   dir_selected = false,
@@ -569,7 +570,7 @@
     def get(name: String): Option[Base] = session_bases.get(name)
 
     def sources_shasum(name: String): SHA1.Shasum = {
-      val meta_info = SHA1.shasum_meta_info(sessions_structure(name).meta_digest)
+      val meta_info = sessions_structure(name).meta_info
       val sources =
         SHA1.shasum_sorted(
           for ((path, digest) <- apply(name).all_sources)
@@ -597,6 +598,17 @@
 
   /* cumulative session info */
 
+  val BUILD_PREFS = "<build_prefs>"
+
+  def eq_sources(options: Options, shasum1: SHA1.Shasum, shasum2: SHA1.Shasum): Boolean =
+    if (options.bool("build_thorough")) shasum1 == shasum2
+    else {
+      def trim(shasum: SHA1.Shasum): SHA1.Shasum =
+        shasum.filter(s => !s.endsWith(BUILD_PREFS))
+
+      trim(shasum1) == trim(shasum2)
+    }
+
   sealed case class Chapter_Info(
     name: String,
     pos: Position.T,
@@ -608,6 +620,7 @@
   object Info {
     def make(
       chapter_defs: Chapter_Defs,
+      options0: Options,
       options: Options,
       augment_options: String => List[Options.Spec],
       dir_selected: Boolean,
@@ -630,6 +643,9 @@
         val session_prefs =
           session_options.make_prefs(defaults = session_options0, filter = _.session_content)
 
+        val build_prefs =
+          session_options.make_prefs(defaults = options0, filter = _.session_content)
+
         val theories =
           entry.theories.map({ case (opts, thys) =>
             (session_options ++ opts,
@@ -664,9 +680,11 @@
         val meta_digest =
           SHA1.digest(
             (name, chapter, entry.parent, entry.directories, entry.options, entry.imports,
-              entry.theories_no_position, conditions, entry.document_theories_no_position,
-              entry.document_files, session_prefs)
-            .toString)
+              entry.theories_no_position, conditions, entry.document_theories_no_position).toString)
+
+        val meta_info =
+          SHA1.shasum_meta_info(meta_digest) :::
+          SHA1.shasum(SHA1.digest(build_prefs), BUILD_PREFS)
 
         val chapter_groups = chapter_defs(chapter).groups
         val groups = chapter_groups ::: entry.groups.filterNot(chapter_groups.contains)
@@ -674,7 +692,7 @@
         Info(name, chapter, dir_selected, entry.pos, groups, session_path,
           entry.parent, entry.description, directories, session_options, session_prefs,
           entry.imports, theories, global_theories, entry.document_theories, document_files,
-          export_files, entry.export_classpath, meta_digest)
+          export_files, entry.export_classpath, meta_info)
       }
       catch {
         case ERROR(msg) =>
@@ -703,7 +721,7 @@
     document_files: List[(Path, Path)],
     export_files: List[(Path, Int, List[String])],
     export_classpath: List[String],
-    meta_digest: SHA1.Digest
+    meta_info: SHA1.Shasum
   ) {
     def deps: List[String] = parent.toList ::: imports
 
@@ -823,8 +841,8 @@
             }
         }
 
-      val session_prefs =
-        options.make_prefs(defaults = Options.init0(), filter = _.session_content)
+      val options0 = Options.init0()
+      val session_prefs = options.make_prefs(defaults = options0, filter = _.session_content)
 
       val root_infos = {
         var chapter = UNSORTED
@@ -834,7 +852,7 @@
             case entry: Chapter_Entry => chapter = entry.name
             case entry: Session_Entry =>
               root_infos +=
-                Info.make(chapter_defs, options, augment_options,
+                Info.make(chapter_defs, options0, options, augment_options,
                   root.select, root.dir, chapter, entry)
             case _ =>
           }
@@ -1531,7 +1549,7 @@
 
     def prepare_output(): Unit = Isabelle_System.make_directory(output_dir + Path.basic("log"))
 
-    def clean_output(name: String): (Boolean, Boolean) = {
+    def clean_output(name: String): Option[Boolean] = {
       val relevant_db =
         database_server &&
           using_option(try_open_database(name))(init_session_info(_, name)).getOrElse(false)
@@ -1544,9 +1562,7 @@
           path = dir + file if path.is_file
         } yield path.file.delete
 
-      val relevant = relevant_db || del.nonEmpty
-      val ok = del.forall(b => b)
-      (relevant, ok)
+      if (relevant_db || del.nonEmpty) Some(del.forall(identity)) else None
     }
 
     def init_output(name: String): Unit = {
@@ -1556,6 +1572,7 @@
 
     def check_output(
       name: String,
+      session_options: Options,
       sources_shasum: SHA1.Shasum,
       input_shasum: SHA1.Shasum,
       fresh_build: Boolean,
@@ -1569,7 +1586,7 @@
               val current =
                 !fresh_build &&
                 build.ok &&
-                build.sources == sources_shasum &&
+                Sessions.eq_sources(session_options, build.sources, sources_shasum) &&
                 build.input_heaps == input_shasum &&
                 build.output_heap == output_shasum &&
                 !(store_heap && output_shasum.is_empty)
@@ -1601,9 +1618,9 @@
 
     def init_session_info(db: SQL.Database, name: String): Boolean = {
       db.transaction {
-        val already_defined = session_info_defined(db, name)
+        all_tables.create_lock(db)
 
-        all_tables.create_lock(db)
+        val already_defined = session_info_defined(db, name)
 
         db.execute_statement(
           Session_Info.table.delete(sql = Session_Info.session_name.where_equal(name)))
@@ -1623,15 +1640,13 @@
 
     def session_info_exists(db: SQL.Database): Boolean = {
       val tables = db.tables
-      tables.contains(Session_Info.table.name) &&
-      tables.contains(Export.Data.table.name)
+      all_tables.forall(table => tables.contains(table.name))
     }
 
     def session_info_defined(db: SQL.Database, name: String): Boolean =
-      session_info_exists(db) &&
-        db.execute_query_statementB(
-          Session_Info.table.select(List(Session_Info.session_name),
-            sql = Session_Info.session_name.where_equal(name)))
+      db.execute_query_statementB(
+        Session_Info.table.select(List(Session_Info.session_name),
+          sql = Session_Info.session_name.where_equal(name)))
 
     def write_session_info(
       db: SQL.Database,
--- a/src/Pure/Tools/build.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/Tools/build.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -70,11 +70,7 @@
     Isabelle_System.hostname(options.string("build_hostname"))
 
   def build_init(options: Options, cache: Term.Cache = Term.Cache.make()): Sessions.Store = {
-    val build_options =
-      options +
-        "completion_limit=0" +
-        "editor_tracing_messages=0" +
-        ("pide_reports=" + options.bool("build_pide_reports"))
+    val build_options = options + "completion_limit=0" + "editor_tracing_messages=0"
     val store = Sessions.store(build_options, cache = cache)
 
     Isabelle_Fonts.init()
@@ -130,8 +126,11 @@
             store.try_open_database(name) match {
               case Some(db) =>
                 using(db)(store.read_build(_, name)) match {
-                  case Some(build)
-                  if build.ok && build.sources == deps0.sources_shasum(name) => None
+                  case Some(build) if build.ok =>
+                    val session_options = deps0.sessions_structure(name).options
+                    val session_sources = deps0.sources_shasum(name)
+                    if (Sessions.eq_sources(session_options, build.sources, session_sources)) None
+                    else Some(name)
                   case _ => Some(name)
                 }
               case None => Some(name)
@@ -174,10 +173,10 @@
 
     if (clean_build) {
       for (name <- full_sessions.imports_descendants(full_sessions_selection)) {
-        val (relevant, ok) = store.clean_output(name)
-        if (relevant) {
-          if (ok) progress.echo("Cleaned " + name)
-          else progress.echo(name + " FAILED to clean")
+        store.clean_output(name) match {
+          case None =>
+          case Some(true) => progress.echo("Cleaned " + name)
+          case Some(false) => progress.echo(name + " FAILED to clean")
         }
       }
     }
--- a/src/Pure/Tools/build_process.scala	Thu Mar 16 13:37:49 2023 +0100
+++ b/src/Pure/Tools/build_process.scala	Thu Mar 16 17:12:06 2023 +0100
@@ -123,11 +123,13 @@
 
     def prepare_database(): Unit = {
       using_option(store.open_build_database()) { db =>
+        val shared_db = db.is_postgresql
         db.transaction {
           Data.all_tables.create_lock(db)
           Data.clean_build(db)
+          if (shared_db) store.all_tables.create_lock(db)
         }
-        db.vacuum(Data.all_tables)
+        db.vacuum(Data.all_tables ::: (if (shared_db) store.all_tables else SQL.Tables.empty))
       }
     }
 
@@ -1056,6 +1058,7 @@
 
     val (current, output_shasum) =
       store.check_output(session_name,
+        session_options = build_context.sessions_structure(session_name).options,
         sources_shasum = build_context.sources_shasum(session_name),
         input_shasum = input_shasum,
         fresh_build = build_context.fresh_build,