merged
authorwenzelm
Mon, 02 Oct 2017 16:47:46 +0200
changeset 66754 78c74f9e960a
parent 66741 c90fb8bee1dd (current diff)
parent 66753 f7759beab4f2 (diff)
child 66755 1ceedf710564
merged
--- a/NEWS	Mon Oct 02 03:58:55 2017 +0200
+++ b/NEWS	Mon Oct 02 16:47:46 2017 +0200
@@ -30,8 +30,9 @@
 * Windows and Cygwin is for x86_64 only. Old 32bit platform support has
 been discontinued.
 
-* Command-line tool "isabelle build" supports option -B for base
-sessions: all descendants are included.
+* Command-line tool "isabelle build" supports new options:
+  - option -B NAME: include session NAME and all descendants
+  - option -S: only observe changes of sources, not heap images
 
 
 New in Isabelle2017 (October 2017)
--- a/README_REPOSITORY	Mon Oct 02 03:58:55 2017 +0200
+++ b/README_REPOSITORY	Mon Oct 02 16:47:46 2017 +0200
@@ -300,7 +300,9 @@
   ./bin/isabelle build -a -j2 -o threads=4  #test on multiple cores (example)
 
 See also the chapter on Isabelle sessions and build management in the
-"system" manual.
+"system" manual. The build option -S is particularly useful for quick
+tests of individual commits, e.g. for each step of a longer chain of
+changes, but the final push always requires a full test as above!
 
 Note that implicit dependencies on Isabelle components are specified
 via catalog files in $ISABELLE_HOME/Admin/components/ as part of the
--- a/src/Doc/ROOT	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/Doc/ROOT	Mon Oct 02 16:47:46 2017 +0200
@@ -48,7 +48,6 @@
   options [document_variants = "corec"]
   sessions
     Datatypes
-  theories [document = false] Datatypes.Setup
   theories Corec
   document_files (in "..")
     "prepare_document"
@@ -248,9 +247,6 @@
   options [document_variants = "sugar"]
   sessions
     "HOL-Library"
-  theories [document = false]
-    "HOL-Library.LaTeXsugar"
-    "HOL-Library.OptionalSugar"
   theories Sugar
   document_files (in "..")
     "prepare_document"
--- a/src/Doc/System/Sessions.thy	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/Doc/System/Sessions.thy	Mon Oct 02 16:47:46 2017 +0200
@@ -284,6 +284,7 @@
     -D DIR       include session directory and select its sessions
     -N           cyclic shuffling of NUMA CPU nodes (performance tuning)
     -R           operate on requirements of selected sessions
+    -S           soft build: only observe changes of sources, not heap images
     -X NAME      exclude sessions from group NAME and all descendants
     -a           select all sessions
     -b           build heap images
@@ -350,6 +351,11 @@
   in the given directories.
 
   \<^medskip>
+  Option \<^verbatim>\<open>-S\<close> indicates a ``soft build'': the selection is restricted to
+  those sessions that have changed sources (according to actually imported
+  theories). The status of heap images is ignored.
+
+  \<^medskip>
   The build process depends on additional options
   (\secref{sec:system-options}) that are passed to the prover eventually. The
   settings variable @{setting_ref ISABELLE_BUILD_OPTIONS} allows to provide
@@ -411,6 +417,14 @@
   @{verbatim [display] \<open>isabelle build -b -g main\<close>}
 
   \<^smallskip>
+  Build all descendants (and requirements) of \<^verbatim>\<open>FOL\<close> and \<^verbatim>\<open>ZF\<close>:
+  @{verbatim [display] \<open>isabelle build -B FOL -B ZF\<close>}
+
+  \<^smallskip>
+  Build all sessions where sources have changed (ignoring heaps):
+  @{verbatim [display] \<open>isabelle build -a -S\<close>}
+
+  \<^smallskip>
   Provide a general overview of the status of all Isabelle sessions, without
   building anything:
   @{verbatim [display] \<open>isabelle build -a -n -v\<close>}
--- a/src/HOL/ROOT	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/HOL/ROOT	Mon Oct 02 16:47:46 2017 +0200
@@ -121,8 +121,6 @@
     Exp demonstrates the use of iterated inductive definitions to reason about
     mutually recursive relations.
   *}
-  theories [document = false]
-    "HOL-Library.Old_Datatype"
   theories [quick_and_dirty]
     Common_Patterns
   theories
@@ -142,12 +140,6 @@
 
 session "HOL-IMP" (timing) in IMP = "HOL-Library" +
   options [document_variants = document]
-  theories [document = false]
-    "HOL-Library.While_Combinator"
-    "HOL-Library.Char_ord"
-    "HOL-Library.List_lexord"
-    "HOL-Library.Quotient_List"
-    "HOL-Library.Extended"
   theories
     BExp
     ASM
@@ -200,8 +192,6 @@
     "HOL-Number_Theory"
   theories [document = false]
     Less_False
-    "HOL-Library.Multiset"
-    "HOL-Number_Theory.Fib"
   theories
     Sorting
     Balance
@@ -227,11 +217,6 @@
   *}
   sessions
     "HOL-Algebra"
-  theories [document = false]
-    "HOL-Library.FuncSet"
-    "HOL-Library.Multiset"
-    "HOL-Algebra.Ring"
-    "HOL-Algebra.FiniteProduct"
   theories
     Number_Theory
   document_files
@@ -310,11 +295,6 @@
 
     The Isabelle Algebraic Library.
   *}
-  theories [document = false]
-    (* Preliminaries from set and number theory *)
-    "HOL-Library.FuncSet"
-    "HOL-Computational_Algebra.Primes"
-    "HOL-Library.Permutation"
   theories
     (* Orders and Lattices *)
     Galois_Connection    (* Knaster-Tarski theorem and Galois connections *)
@@ -403,10 +383,6 @@
 
 session "HOL-Imperative_HOL" in Imperative_HOL = "HOL-Library" +
   options [print_mode = "iff,no_brackets"]
-  theories [document = false]
-    "HOL-Library.Countable"
-    "HOL-Library.Monad_Syntax"
-    "HOL-Library.LaTeXsugar"
   theories Imperative_HOL_ex
   document_files "root.bib" "root.tex"
 
@@ -433,13 +409,7 @@
   *}
   options [parallel_proofs = 0, quick_and_dirty = false]
   sessions
-    "HOL-Library"
     "HOL-Computational_Algebra"
-  theories [document = false]
-    "HOL-Library.Code_Target_Numeral"
-    "HOL-Library.Monad_Syntax"
-    "HOL-Computational_Algebra.Primes"
-    "HOL-Library.Open_State_Syntax"
   theories
     Greatest_Common_Divisor
     Warshall
@@ -490,8 +460,6 @@
   *}
   sessions
     "HOL-Eisbach"
-  theories [document = false]
-    "HOL-Library.While_Combinator"
   theories
     MicroJava
   document_files
@@ -555,13 +523,11 @@
   theories CompleteLattice
   document_files "root.tex"
 
-session "HOL-ex" (timing) in ex = "HOL-Library" +
+session "HOL-ex" (timing) in ex = "HOL-Number_Theory" +
   description {*
     Miscellaneous examples for Higher-Order Logic.
   *}
   options [document = false]
-  sessions
-    "HOL-Number_Theory"
   theories
     Adhoc_Overloading_Examples
     Antiquote
@@ -653,9 +619,6 @@
     Miscellaneous Isabelle/Isar examples.
   *}
   options [quick_and_dirty]
-  theories [document = false]
-    "HOL-Library.Lattice_Syntax"
-    "HOL-Computational_Algebra.Primes"
   theories
     Knaster_Tarski
     Peirce
@@ -694,8 +657,6 @@
   description {*
     Verification of the SET Protocol.
   *}
-  theories [document = false]
-    "HOL-Library.Nat_Bijection"
   theories
     SET_Protocol
   document_files "root.tex"
@@ -745,12 +706,6 @@
     ATP_Problem_Import
 
 session "HOL-Probability" (main timing) in "Probability" = "HOL-Analysis" +
-  theories [document = false]
-    "HOL-Library.Countable"
-    "HOL-Library.Permutation"
-    "HOL-Library.Order_Continuity"
-    "HOL-Library.Diagonal_Subsequence"
-    "HOL-Library.Finite_Map"
   theories
     Probability (global)
   document_files "root.tex"
@@ -761,10 +716,8 @@
     Koepf_Duermuth_Countermeasure
     Measure_Not_CCC
 
-session "HOL-Nominal" in Nominal = HOL +
+session "HOL-Nominal" in Nominal = "HOL-Library" +
   options [document = false]
-  sessions
-    "HOL-Library"
   theories
     Nominal
 
@@ -1064,18 +1017,13 @@
     "Examples/Finite"
     "Examples/T2_Spaces"
 
-session HOLCF (main timing) in HOLCF = HOL +
+session HOLCF (main timing) in HOLCF = "HOL-Library" +
   description {*
     Author:     Franz Regensburger
     Author:     Brian Huffman
 
     HOLCF -- a semantic extension of HOL by the LCF logic.
   *}
-  sessions
-    "HOL-Library"
-  theories [document = false]
-    "HOL-Library.Nat_Bijection"
-    "HOL-Library.Countable"
   theories
     HOLCF (global)
   document_files "root.tex"
--- a/src/Pure/Thy/sessions.scala	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/Pure/Thy/sessions.scala	Mon Oct 02 16:47:46 2017 +0200
@@ -114,6 +114,7 @@
     loaded_theories: Graph[String, Outer_Syntax] = Graph.string,
     known: Known = Known.empty,
     overall_syntax: Outer_Syntax = Outer_Syntax.empty,
+    imported_sources: List[(Path, SHA1.Digest)] = Nil,
     sources: List[(Path, SHA1.Digest)] = Nil,
     session_graph: Graph_Display.Graph = Graph_Display.empty_graph,
     errors: List[String] = Nil,
@@ -147,7 +148,12 @@
   {
     def is_empty: Boolean = session_bases.isEmpty
     def apply(name: String): Base = session_bases(name)
-    def sources(name: String): List[SHA1.Digest] = session_bases(name).sources.map(_._2)
+
+    def imported_sources(name: String): List[SHA1.Digest] =
+      session_bases(name).imported_sources.map(_._2)
+
+    def sources(name: String): List[SHA1.Digest] =
+      session_bases(name).sources.map(_._2)
 
     def errors: List[String] =
       (for {
@@ -172,6 +178,25 @@
       check_keywords: Set[String] = Set.empty,
       global_theories: Map[String, String] = Map.empty): Deps =
   {
+    var cache_sources = Map.empty[JFile, SHA1.Digest]
+    def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] =
+    {
+      for {
+        path <- paths
+        file = path.file
+        if cache_sources.isDefinedAt(file) || file.isFile
+      }
+      yield {
+        cache_sources.get(file) match {
+          case Some(digest) => (path, digest)
+          case None =>
+            val digest = SHA1.digest(file)
+            cache_sources = cache_sources + (file -> digest)
+            (path, digest)
+        }
+      }
+    }
+
     val session_bases =
       (Map.empty[String, Base] /: sessions.imports_topological_order)({
         case (session_bases, info) =>
@@ -197,14 +222,9 @@
             }
 
             val thy_deps =
-            {
-              val root_theories =
-                info.theories.flatMap({ case (_, thys) =>
-                  thys.map({ case (thy, pos) =>
-                    (resources.import_name(info.theory_qualifier, info.dir.implode, thy), pos) })
-                })
-              resources.thy_info.dependencies(root_theories)
-            }
+              resources.thy_info.dependencies(
+                for { (_, thys) <- info.theories; (thy, pos) <- thys }
+                yield (resources.import_name(info.theory_qualifier, info.dir.implode, thy), pos))
 
             val overall_syntax = thy_deps.overall_syntax
 
@@ -219,13 +239,15 @@
               }
               else Nil
 
-            val all_files =
+            val session_files =
               (theory_files ::: loaded_files.flatMap(_._2) :::
                 info.files.map(file => info.dir + file) :::
                 info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand)
 
+            val imported_files = if (inlined_files) thy_deps.imported_files else Nil
+
             if (list_files)
-              progress.echo(cat_lines(all_files.map(_.implode).sorted.map("  " + _)))
+              progress.echo(cat_lines(session_files.map(_.implode).sorted.map("  " + _)))
 
             if (check_keywords.nonEmpty) {
               Check_Keywords.check_keywords(
@@ -270,10 +292,8 @@
                 theories = thy_deps.names,
                 loaded_files = loaded_files)
 
-            val sources =
-              for (p <- all_files if p.is_file) yield (p, SHA1.digest(p.file))
             val sources_errors =
-              for (p <- all_files if !p.is_file) yield "No such file: " + p
+              for (p <- session_files if !p.is_file) yield "No such file: " + p
 
             val base =
               Base(
@@ -282,7 +302,8 @@
                 loaded_theories = thy_deps.loaded_theories,
                 known = known,
                 overall_syntax = overall_syntax,
-                sources = sources,
+                imported_sources = check_sources(imported_files),
+                sources = check_sources(session_files),
                 session_graph = session_graph,
                 errors = thy_deps.errors ::: sources_errors,
                 imports = Some(imports_base))
@@ -887,7 +908,7 @@
           stmt.bytes(4) = Properties.compress(build_log.ml_statistics)
           stmt.bytes(5) = Properties.compress(build_log.task_statistics)
           stmt.bytes(6) = Build_Log.compress_errors(build_log.errors)
-          stmt.string(7) = cat_lines(build.sources)
+          stmt.string(7) = build.sources
           stmt.string(8) = cat_lines(build.input_heaps)
           stmt.string(9) = build.output_heap getOrElse ""
           stmt.int(10) = build.return_code
@@ -912,6 +933,7 @@
       Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors))
 
     def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] =
+    {
       db.using_statement(Session_Info.table.select(Session_Info.build_columns,
         Session_Info.session_name.where_equal(name)))(stmt =>
       {
@@ -920,11 +942,12 @@
         else {
           Some(
             Build.Session_Info(
-              split_lines(res.string(Session_Info.sources)),
+              res.string(Session_Info.sources),
               split_lines(res.string(Session_Info.input_heaps)),
               res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) },
               res.int(Session_Info.return_code)))
         }
       })
+    }
   }
 }
--- a/src/Pure/Thy/thy_info.scala	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/Pure/Thy/thy_info.scala	Mon Oct 02 16:47:46 2017 +0200
@@ -66,6 +66,17 @@
           names.map(name => resources.loaded_files(loaded_theories.get_node(name.theory), name)))
     }
 
+    def imported_files: List[Path] =
+    {
+      val base = resources.session_base
+      val base_theories =
+        loaded_theories.all_preds(names.map(_.theory)).
+          filter(base.loaded_theories.defined(_))
+
+      base_theories.map(theory => base.known.theories(theory).path) :::
+      base_theories.flatMap(base.known.loaded_files(_))
+    }
+
     lazy val overall_syntax: Outer_Syntax =
       Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node(_)))
 
--- a/src/Pure/Tools/build.scala	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/Pure/Tools/build.scala	Mon Oct 02 16:47:46 2017 +0200
@@ -24,10 +24,13 @@
   /* persistent build info */
 
   sealed case class Session_Info(
-    sources: List[String],
+    sources: String,
     input_heaps: List[String],
     output_heap: Option[String],
     return_code: Int)
+  {
+    def ok: Boolean = return_code == 0
+  }
 
 
   /* queue with scheduling information */
@@ -352,6 +355,7 @@
     list_files: Boolean = false,
     check_keywords: Set[String] = Set.empty,
     no_build: Boolean = false,
+    soft_build: Boolean = false,
     system_mode: Boolean = false,
     verbose: Boolean = false,
     pide: Boolean = false,
@@ -364,24 +368,58 @@
     sessions: List[String] = Nil,
     selection: Sessions.Selection = Sessions.Selection.empty): Results =
   {
-    /* session selection and dependencies */
+    val build_options = options.int.update("completion_limit", 0).bool.update("ML_statistics", true)
 
-    val build_options = options.int.update("completion_limit", 0).bool.update("ML_statistics", true)
+    val store = Sessions.store(system_mode)
+
+
+    /* session selection and dependencies */
 
     val full_sessions = Sessions.load(build_options, dirs, select_dirs)
 
-    val (selected, selected_sessions) =
-      full_sessions.selection(
-          Sessions.Selection(requirements, all_sessions, base_sessions, exclude_session_groups,
-            exclude_sessions, session_groups, sessions) ++ selection)
+    def sources_stamp(deps: Sessions.Deps, name: String): String =
+    {
+      val digests =
+        full_sessions(name).meta_digest :: deps.sources(name) ::: deps.imported_sources(name)
+      SHA1.digest(cat_lines(digests.map(_.toString).sorted)).toString
+    }
+
+    val (selected, selected_sessions, deps) =
+    {
+      val (selected0, selected_sessions0) =
+        full_sessions.selection(
+            Sessions.Selection(requirements, all_sessions, base_sessions, exclude_session_groups,
+              exclude_sessions, session_groups, sessions) ++ selection)
+
+      val deps0 =
+        Sessions.deps(selected_sessions0, progress = progress, inlined_files = true,
+          verbose = verbose, list_files = list_files, check_keywords = check_keywords,
+          global_theories = full_sessions.global_theories).check_errors
 
-    val deps =
-      Sessions.deps(selected_sessions, progress = progress, inlined_files = true,
-        verbose = verbose, list_files = list_files, check_keywords = check_keywords,
-        global_theories = full_sessions.global_theories).check_errors
+      if (soft_build) {
+        val outdated =
+          selected0.flatMap(name =>
+            store.find_database(name) match {
+              case Some(database) =>
+                using(SQLite.open_database(database))(store.read_build(_, name)) match {
+                  case Some(build)
+                  if build.ok && build.sources == sources_stamp(deps0, name) => None
+                  case _ => Some(name)
+                }
+              case None => Some(name)
+            })
+        val (selected, selected_sessions) =
+          full_sessions.selection(Sessions.Selection(sessions = outdated))
+        val deps =
+          Sessions.deps(selected_sessions, inlined_files = true,
+            global_theories = full_sessions.global_theories).check_errors
+        (selected, selected_sessions, deps)
+      }
+      else (selected0, selected_sessions0, deps0)
+    }
 
-    def sources_stamp(name: String): List[String] =
-      (selected_sessions(name).meta_digest :: deps.sources(name)).map(_.toString).sorted
+
+    /* check unknown files */
 
     if (check_unknown_files) {
       val source_files =
@@ -399,7 +437,6 @@
 
     /* main build process */
 
-    val store = Sessions.store(system_mode)
     val queue = Queue(progress, selected_sessions, store)
 
     store.prepare_output()
@@ -501,7 +538,8 @@
                       parse_session_info(
                         command_timings = true, ml_statistics = true, task_statistics = true),
                   build =
-                    Session_Info(sources_stamp(name), input_heaps, heap_stamp, process_result.rc)))
+                    Session_Info(sources_stamp(deps, name), input_heaps, heap_stamp,
+                      process_result.rc)))
             }
 
             // messages
@@ -533,8 +571,8 @@
                       using(SQLite.open_database(database))(store.read_build(_, name)) match {
                         case Some(build) =>
                           val current =
-                            build.return_code == 0 &&
-                            build.sources == sources_stamp(name) &&
+                            build.ok &&
+                            build.sources == sources_stamp(deps, name) &&
                             build.input_heaps == ancestor_heaps &&
                             build.output_heap == heap_stamp &&
                             !(do_output && heap_stamp.isEmpty)
@@ -633,6 +671,7 @@
     var numa_shuffling = false
     var pide = false
     var requirements = false
+    var soft_build = false
     var exclude_session_groups: List[String] = Nil
     var all_sessions = false
     var build_heap = false
@@ -657,6 +696,7 @@
     -N           cyclic shuffling of NUMA CPU nodes (performance tuning)
     -P           build via PIDE protocol
     -R           operate on requirements of selected sessions
+    -S           soft build: only observe changes of sources, not heap images
     -X NAME      exclude sessions from group NAME and all descendants
     -a           select all sessions
     -b           build heap images
@@ -680,6 +720,7 @@
       "N" -> (_ => numa_shuffling = true),
       "P" -> (_ => pide = true),
       "R" -> (_ => requirements = true),
+      "S" -> (_ => soft_build = true),
       "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)),
       "a" -> (_ => all_sessions = true),
       "b" -> (_ => build_heap = true),
@@ -721,6 +762,7 @@
           list_files = list_files,
           check_keywords = check_keywords,
           no_build = no_build,
+          soft_build = soft_build,
           system_mode = system_mode,
           verbose = verbose,
           pide = pide,
--- a/src/ZF/ROOT	Mon Oct 02 03:58:55 2017 +0200
+++ b/src/ZF/ROOT	Mon Oct 02 16:47:46 2017 +0200
@@ -143,9 +143,10 @@
     Glynn Winskel. The Formal Semantics of Programming Languages.
     MIT Press, 1993.
   *}
-  options [document = false]
   theories Equiv
-  document_files "root.tex" "root.bib"
+  document_files
+    "root.tex"
+    "root.bib"
 
 session "ZF-Induct" (ZF) in Induct = ZF +
   description {*