merged
authorwenzelm
Sat, 28 Jan 2023 22:31:40 +0100
changeset 77134 523839d6d8ff
parent 77107 4c4d40913900 (current diff)
parent 77133 536c033fb6eb (diff)
child 77135 515b6aaede32
merged
--- a/Admin/components/main	Fri Jan 27 19:16:38 2023 +0100
+++ b/Admin/components/main	Sat Jan 28 22:31:40 2023 +0100
@@ -28,7 +28,7 @@
 polyml-test-bafe319bc3a6-1
 postgresql-42.5.0
 prismjs-1.29.0
-scala-3.2.1
+scala-3.2.0-2
 smbc-0.4.1
 spass-3.8ds-2
 sqlite-jdbc-3.39.4.1
--- a/etc/build.props	Fri Jan 27 19:16:38 2023 +0100
+++ b/etc/build.props	Sat Jan 28 22:31:40 2023 +0100
@@ -44,7 +44,6 @@
   src/Pure/Admin/ci_build.scala \
   src/Pure/Admin/isabelle_cronjob.scala \
   src/Pure/Admin/isabelle_devel.scala \
-  src/Pure/Admin/jenkins.scala \
   src/Pure/Admin/other_isabelle.scala \
   src/Pure/Concurrent/consumer_thread.scala \
   src/Pure/Concurrent/counter.scala \
@@ -96,6 +95,7 @@
   src/Pure/General/rsync.scala \
   src/Pure/General/scan.scala \
   src/Pure/General/sha1.scala \
+  src/Pure/General/space.scala \
   src/Pure/General/sql.scala \
   src/Pure/General/ssh.scala \
   src/Pure/General/symbol.scala \
--- a/src/Pure/Admin/build_history.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/Admin/build_history.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -103,8 +103,10 @@
     afp_partition: Int = 0,
     isabelle_identifier: String = default_isabelle_identifier,
     ml_statistics_step: Int = 1,
-    component_repository: String = Components.default_component_repository,
-    components_base: String = Components.standard_components_base,
+    component_repository: String = Components.static_component_repository,
+    components_base: String = Components.dynamic_components_base,
+    clean_platforms: Option[List[Platform.Family.Value]] = None,
+    clean_archives: Boolean = false,
     fresh: Boolean = false,
     hostname: String = "",
     multicore_base: Boolean = false,
@@ -112,7 +114,6 @@
     arch_64: Boolean = false,
     heap: Int = default_heap,
     max_heap: Option[Int] = None,
-    init_settings: List[String] = Nil,
     more_settings: List[String] = Nil,
     more_preferences: List[String] = Nil,
     verbose: Boolean = false,
@@ -176,6 +177,13 @@
     val other_isabelle =
       Other_Isabelle(root, isabelle_identifier = isabelle_identifier, progress = progress)
 
+    def resolve_components(): Unit =
+      other_isabelle.resolve_components(
+        echo = verbose,
+        component_repository = component_repository,
+        clean_platforms = clean_platforms,
+        clean_archives = clean_archives)
+
     val build_host = proper_string(hostname) getOrElse Isabelle_System.hostname()
     val build_history_date = Date.now()
     val build_group_id = build_host + ":" + build_history_date.time.ms
@@ -186,11 +194,10 @@
 
       val component_settings =
         other_isabelle.init_components(
-          component_repository = component_repository,
           components_base = components_base,
           catalogs = Components.optional_catalogs)
-      other_isabelle.init_settings(component_settings ::: init_settings)
-      other_isabelle.resolve_components(echo = verbose)
+      other_isabelle.init_settings(component_settings)
+      resolve_components()
       val ml_platform =
         augment_settings(other_isabelle, threads, arch_64, heap, max_heap, more_settings)
 
@@ -203,7 +210,7 @@
       val isabelle_base_log = isabelle_output + Path.explode("../base_log")
 
       if (first_build) {
-        other_isabelle.resolve_components(echo = verbose)
+        resolve_components()
         other_isabelle.scala_build(fresh = fresh, echo = verbose)
 
         for {
@@ -346,7 +353,7 @@
         build_info.finished_sessions.flatMap { session_name =>
           val heap = isabelle_output + Path.explode(session_name)
           if (heap.is_file) {
-            Some("Heap " + session_name + " (" + Value.Long(heap.file.length) + " bytes)")
+            Some("Heap " + session_name + " (" + Value.Long(File.space(heap).bytes) + " bytes)")
           }
           else None
         }
@@ -394,18 +401,19 @@
     Command_Line.tool {
       var afp = false
       var multicore_base = false
-      var components_base = Components.standard_components_base
+      var components_base = Components.dynamic_components_base
       var heap: Option[Int] = None
       var max_heap: Option[Int] = None
       var multicore_list = List(default_multicore)
       var isabelle_identifier = default_isabelle_identifier
+      var clean_platforms: Option[List[Platform.Family.Value]] = None
       var afp_partition = 0
-      var component_repository = Components.default_component_repository
+      var clean_archives = false
+      var component_repository = Components.static_component_repository
       var more_settings: List[String] = Nil
       var more_preferences: List[String] = Nil
       var fresh = false
       var hostname = ""
-      var init_settings: List[String] = Nil
       var arch_64 = false
       var output_file = ""
       var ml_statistics_step = 1
@@ -420,14 +428,17 @@
     -A           include $ISABELLE_HOME/AFP directory
     -B           first multicore build serves as base for scheduling information
     -C DIR       base directory for Isabelle components (default: """ +
-      quote(Components.standard_components_base) + """)
+      quote(Components.dynamic_components_base) + """)
     -H SIZE      minimal ML heap in MB (default: """ + default_heap + """ for x86, """ +
       default_heap * 2 + """ for x86_64)
     -M MULTICORE multicore configurations (see below)
     -N NAME      alternative ISABELLE_IDENTIFIER (default: """ + default_isabelle_identifier + """)
+    -O PLATFORMS clean resolved components, retaining only the given list
+                 platform families (separated by commas; default: do nothing)
     -P NUMBER    AFP partition number (0, 1, 2, default: 0=unrestricted)
+    -Q           clean archives of downloaded components
     -R URL       remote repository for Isabelle components (default: """ +
-      Components.default_component_repository + """)
+      Components.static_component_repository + """)
     -U SIZE      maximal ML heap in MB (default: unbounded)
     -e TEXT      additional text for generated etc/settings
     -f           fresh build of Isabelle/Scala components (recommended)
@@ -454,13 +465,14 @@
         "H:" -> (arg => heap = Some(Value.Int.parse(arg))),
         "M:" -> (arg => multicore_list = space_explode(',', arg).map(Multicore.parse)),
         "N:" -> (arg => isabelle_identifier = arg),
+        "O:" -> (arg => clean_platforms = Some(space_explode(',',arg).map(Platform.Family.parse))),
         "P:" -> (arg => afp_partition = Value.Int.parse(arg)),
+        "Q" -> (_ => clean_archives = true),
         "R:" -> (arg => component_repository = arg),
         "U:" -> (arg => max_heap = Some(Value.Int.parse(arg))),
         "e:" -> (arg => more_settings = more_settings ::: List(arg)),
         "f" -> (_ => fresh = true),
         "h:" -> (arg => hostname = arg),
-        "i:" -> (arg => init_settings = init_settings ::: List(arg)),
         "m:" ->
           {
             case "32" | "x86" => arch_64 = false
@@ -488,10 +500,11 @@
           afp = afp, afp_partition = afp_partition,
           isabelle_identifier = isabelle_identifier, ml_statistics_step = ml_statistics_step,
           component_repository = component_repository, components_base = components_base,
+          clean_platforms = clean_platforms, clean_archives = clean_archives,
           fresh = fresh, hostname = hostname, multicore_base = multicore_base,
           multicore_list = multicore_list, arch_64 = arch_64,
           heap = heap.getOrElse(if (arch_64) default_heap * 2 else default_heap),
-          max_heap = max_heap, init_settings = init_settings, more_settings = more_settings,
+          max_heap = max_heap, more_settings = more_settings,
           more_preferences = more_preferences, verbose = verbose, build_tags = build_tags,
           build_args = build_args)
 
@@ -517,6 +530,10 @@
     isabelle_self: Path,
     isabelle_other: Path,
     isabelle_identifier: String = "remote_build_history",
+    component_repository: String = Components.static_component_repository,
+    components_base: String = Components.dynamic_components_base,
+    clean_platform: Boolean = false,
+    clean_archives: Boolean = false,
     progress: Progress = new Progress,
     protect_args: Boolean = false,
     rev: String = "",
@@ -524,7 +541,7 @@
     afp_rev: String = "",
     options: String = "",
     args: String = "",
-    no_build: Boolean = false
+    no_build: Boolean = false,
   ): List[(String, Bytes)] = {
     /* synchronize Isabelle + AFP repositories */
 
@@ -539,24 +556,19 @@
         rev = rev, afp_rev = afp_rev, afp_root = if (afp) afp_repos else None)
     }
 
-    def execute(command: String, args: String,
-      echo: Boolean = false,
-      strict: Boolean = true
-    ): Unit =
-      ssh.execute(
-        Isabelle_System.export_isabelle_identifier(isabelle_identifier) +
-          ssh.bash_path(isabelle_self + Path.explode(command)) + " " + args,
-        progress_stdout = progress.echo_if(echo, _),
-        progress_stderr = progress.echo_if(echo, _),
-        strict = strict).check
-
     sync(isabelle_self)
 
     val self_isabelle =
       Other_Isabelle(isabelle_self, isabelle_identifier = isabelle_identifier,
         ssh = ssh, progress = progress)
 
-    self_isabelle.init(fresh = true, echo = true)
+    val clean_platforms = if (clean_platform) Some(List(ssh.isabelle_platform_family)) else None
+
+    self_isabelle.init(fresh = true, echo = true,
+      component_repository = component_repository,
+      other_settings = self_isabelle.init_components(components_base = components_base),
+      clean_platforms = clean_platforms,
+      clean_archives = clean_archives)
 
     sync(isabelle_other, accurate = true,
       rev = proper_string(rev) getOrElse "tip",
@@ -573,14 +585,17 @@
         val build_options = (if (afp_repos.isEmpty) "" else " -A") + " " + options
         try {
           val script =
-            Isabelle_System.export_isabelle_identifier(isabelle_identifier) +
-              ssh.bash_path(self_isabelle.isabelle_home + Path.explode("Admin/build_other")) +
+            ssh.bash_path(Path.explode("Admin/build_other")) +
+              " -R " + Bash.string(component_repository) +
+              " -C " + Bash.string(components_base) +
+              (clean_platforms match {
+                case Some(ps) => " -O " + Bash.string(ps.mkString(","))
+                case None => ""
+              }) +
+              (if (clean_archives) " -Q" else "") +
               " -o " + ssh.bash_path(output_file) + build_options + " " +
               ssh.bash_path(isabelle_other) + " " + args
-          ssh.execute(script,
-            progress_stdout = progress.echo,
-            progress_stderr = progress.echo,
-            strict = false).check
+          self_isabelle.bash(script, echo = true, strict = false).check
         }
         catch {
           case ERROR(msg) =>
--- a/src/Pure/Admin/build_log.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/Admin/build_log.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -122,7 +122,7 @@
     def is_log(file: JFile,
       prefixes: List[String] =
         List(Build_History.log_prefix, Identify.log_prefix, Identify.log_prefix2,
-          Isatest.log_prefix, AFP_Test.log_prefix, Jenkins.log_prefix),
+          Isatest.log_prefix, AFP_Test.log_prefix),
       suffixes: List[String] = List(".log", ".log.gz", ".log.xz")
     ): Boolean = {
       val name = file.getName
@@ -279,9 +279,7 @@
     val log_prefix2 = "plain_identify_"
 
     def engine(log_file: Log_File): String =
-      if (log_file.name.startsWith(Jenkins.log_prefix)) "jenkins_identify"
-      else if (log_file.name.startsWith(log_prefix2)) "plain_identify"
-      else "identify"
+      if (log_file.name.startsWith(log_prefix2)) "plain_identify" else "identify"
 
     def content(date: Date, isabelle_version: Option[String], afp_version: Option[String]): String =
       terminate_lines(
@@ -381,18 +379,6 @@
         parse(AFP_Test.engine, "", start, AFP_Test.End,
           AFP_Test.Isabelle_Version, AFP_Test.AFP_Version)
 
-      case Jenkins.Start() :: _ =>
-        log_file.lines.dropWhile(_ != Jenkins.BUILD) match {
-          case Jenkins.BUILD :: _ :: Jenkins.Start_Date(log_file.Strict_Date(start)) :: _ =>
-            val host =
-              log_file.lines.takeWhile(_ != Jenkins.CONFIGURATION).collectFirst({
-                case Jenkins.Host(a, b) => a + "." + b
-              }).getOrElse("")
-            parse(Jenkins.engine, host, start.to(Date.timezone_berlin), Jenkins.No_End,
-              Jenkins.Isabelle_Version, Jenkins.AFP_Version)
-          case _ => Meta_Info.empty
-        }
-
       case line :: _ if line.startsWith("\u0000") => Meta_Info.empty
       case List(Isatest.End(_)) => Meta_Info.empty
       case _ :: AFP_Test.Bad_Init() :: _ => Meta_Info.empty
@@ -419,7 +405,7 @@
     timing: Timing = Timing.zero,
     ml_timing: Timing = Timing.zero,
     sources: Option[String] = None,
-    heap_size: Option[Long] = None,
+    heap_size: Option[Space] = None,
     status: Option[Session_Status.Value] = None,
     errors: List[String] = Nil,
     theory_timings: Map[String, Timing] = Map.empty,
@@ -479,7 +465,7 @@
     var ml_timing = Map.empty[String, Timing]
     var started = Set.empty[String]
     var sources = Map.empty[String, String]
-    var heap_sizes = Map.empty[String, Long]
+    var heap_sizes = Map.empty[String, Space]
     var theory_timings = Map.empty[String, Map[String, Timing]]
     var ml_statistics = Map.empty[String, List[Properties.T]]
     var errors = Map.empty[String, List[String]]
@@ -527,7 +513,7 @@
           sources += (name -> s)
 
         case Heap(name, Value.Long(size)) =>
-          heap_sizes += (name -> size)
+          heap_sizes += (name -> Space.bytes(size))
 
         case _ if Protocol.Theory_Timing_Marker.test_yxml(line) =>
           line match {
@@ -955,7 +941,7 @@
           stmt.long(11) = session.ml_timing.cpu.proper_ms
           stmt.long(12) = session.ml_timing.gc.proper_ms
           stmt.double(13) = session.ml_timing.factor
-          stmt.long(14) = session.heap_size
+          stmt.long(14) = session.heap_size.map(_.bytes)
           stmt.string(15) = session.status.map(_.toString)
           stmt.bytes(16) = compress_errors(session.errors, cache = cache.compress)
           stmt.string(17) = session.sources
@@ -1109,7 +1095,7 @@
                 ml_timing =
                   res.timing(Data.ml_timing_elapsed, Data.ml_timing_cpu, Data.ml_timing_gc),
                 sources = res.get_string(Data.sources),
-                heap_size = res.get_long(Data.heap_size),
+                heap_size = res.get_long(Data.heap_size).map(Space.bytes),
                 status = res.get_string(Data.status).map(Session_Status.withName),
                 errors = uncompress_errors(res.bytes(Data.errors), cache = cache),
                 ml_statistics =
--- a/src/Pure/Admin/build_status.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/Admin/build_status.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -14,8 +14,7 @@
   val default_image_size = (800, 600)
   val default_history = 30
 
-  def default_profiles: List[Profile] =
-    Jenkins.build_status_profiles ::: Isabelle_Cronjob.build_status_profiles
+  def default_profiles: List[Profile] = Isabelle_Cronjob.build_status_profiles
 
 
   /* data profiles */
@@ -109,9 +108,9 @@
     def check_heap: Boolean =
       finished_entries_size >= 3 &&
       finished_entries.forall(entry =>
-        entry.maximum_heap > 0 ||
-        entry.average_heap > 0 ||
-        entry.stored_heap > 0)
+        entry.maximum_heap.is_proper ||
+        entry.average_heap.is_proper ||
+        entry.stored_heap.is_proper)
 
     def make_csv: CSV.File = {
       val header =
@@ -170,13 +169,13 @@
     afp_version: String,
     timing: Timing,
     ml_timing: Timing,
-    maximum_code: Long,
-    average_code: Long,
-    maximum_stack: Long,
-    average_stack: Long,
-    maximum_heap: Long,
-    average_heap: Long,
-    stored_heap: Long,
+    maximum_code: Space,
+    average_code: Space,
+    maximum_stack: Space,
+    average_stack: Space,
+    maximum_heap: Space,
+    average_heap: Space,
+    stored_heap: Space,
     status: Build_Log.Session_Status.Value,
     errors: List[String]
   ) {
@@ -320,13 +319,13 @@
                     Build_Log.Data.ml_timing_elapsed,
                     Build_Log.Data.ml_timing_cpu,
                     Build_Log.Data.ml_timing_gc),
-                maximum_code = ml_stats.maximum(ML_Statistics.CODE_SIZE).toLong,
-                average_code = ml_stats.average(ML_Statistics.CODE_SIZE).toLong,
-                maximum_stack = ml_stats.maximum(ML_Statistics.STACK_SIZE).toLong,
-                average_stack = ml_stats.average(ML_Statistics.STACK_SIZE).toLong,
-                maximum_heap = ml_stats.maximum(ML_Statistics.HEAP_SIZE).toLong,
-                average_heap = ml_stats.average(ML_Statistics.HEAP_SIZE).toLong,
-                stored_heap = ML_Statistics.mem_scale(res.long(Build_Log.Data.heap_size)),
+                maximum_code = Space.B(ml_stats.maximum(ML_Statistics.CODE_SIZE)),
+                average_code = Space.B(ml_stats.average(ML_Statistics.CODE_SIZE)),
+                maximum_stack = Space.B(ml_stats.maximum(ML_Statistics.STACK_SIZE)),
+                average_stack = Space.B(ml_stats.average(ML_Statistics.STACK_SIZE)),
+                maximum_heap = Space.B(ml_stats.maximum(ML_Statistics.HEAP_SIZE)),
+                average_heap = Space.B(ml_stats.average(ML_Statistics.HEAP_SIZE)),
+                stored_heap = Space.bytes(res.long(Build_Log.Data.heap_size)),
                 status = Build_Log.Session_Status.withName(res.string(Build_Log.Data.status)),
                 errors =
                   Build_Log.uncompress_errors(
@@ -430,13 +429,13 @@
                       entry.timing.resources.minutes.toString,
                       entry.ml_timing.elapsed.minutes.toString,
                       entry.ml_timing.resources.minutes.toString,
-                      entry.maximum_code.toString,
-                      entry.average_code.toString,
-                      entry.maximum_stack.toString,
-                      entry.average_stack.toString,
-                      entry.maximum_heap.toString,
-                      entry.average_heap.toString,
-                      entry.stored_heap.toString).mkString(" "))))
+                      entry.maximum_code.MiB.toString,
+                      entry.average_code.MiB.toString,
+                      entry.maximum_stack.MiB.toString,
+                      entry.average_stack.MiB.toString,
+                      entry.maximum_heap.MiB.toString,
+                      entry.average_heap.MiB.toString,
+                      entry.stored_heap.MiB.toString).mkString(" "))))
 
               val max_time =
                 (session.finished_entries.foldLeft(0.0) {
@@ -502,7 +501,7 @@
                 val image = Image(plot_name, image_width, image_height)
                 val chart =
                   session.ml_statistics.chart(
-                    fields._1 + ": " + session.ml_statistics.heading, fields._2)
+                    fields.title + ": " + session.ml_statistics.heading, fields.names)
                 Graphics_File.write_chart_png(
                   (dir + image.path).file, chart, image.width, image.height)
                 image
@@ -554,19 +553,19 @@
                     List(HTML.link(data_files(session.name).file_name, HTML.text("CSV"))),
                   HTML.text("timing:") -> HTML.text(session.head.timing.message_resources),
                   HTML.text("ML timing:") -> HTML.text(session.head.ml_timing.message_resources)) :::
-                ML_Statistics.mem_print(session.head.maximum_code).map(s =>
+                session.head.maximum_code.print_relevant.map(s =>
                   HTML.text("code maximum:") -> HTML.text(s)).toList :::
-                ML_Statistics.mem_print(session.head.average_code).map(s =>
+                session.head.average_code.print_relevant.map(s =>
                   HTML.text("code average:") -> HTML.text(s)).toList :::
-                ML_Statistics.mem_print(session.head.maximum_stack).map(s =>
+                session.head.maximum_stack.print_relevant.map(s =>
                   HTML.text("stack maximum:") -> HTML.text(s)).toList :::
-                ML_Statistics.mem_print(session.head.average_stack).map(s =>
+                session.head.average_stack.print_relevant.map(s =>
                   HTML.text("stack average:") -> HTML.text(s)).toList :::
-                ML_Statistics.mem_print(session.head.maximum_heap).map(s =>
+                session.head.maximum_heap.print_relevant.map(s =>
                   HTML.text("heap maximum:") -> HTML.text(s)).toList :::
-                ML_Statistics.mem_print(session.head.average_heap).map(s =>
+                session.head.average_heap.print_relevant.map(s =>
                   HTML.text("heap average:") -> HTML.text(s)).toList :::
-                ML_Statistics.mem_print(session.head.stored_heap).map(s =>
+                session.head.stored_heap.print_relevant.map(s =>
                   HTML.text("heap stored:") -> HTML.text(s)).toList :::
                 proper_string(session.head.isabelle_version).map(s =>
                   HTML.text("Isabelle version:") -> HTML.text(s)).toList :::
--- a/src/Pure/Admin/isabelle_cronjob.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/Admin/isabelle_cronjob.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -140,6 +140,8 @@
     historic: Boolean = false,
     history: Int = 0,
     history_base: String = "build_history_base",
+    components_base: String = Components.dynamic_components_base,
+    clean_components: Boolean = false,
     java_heap: String = "",
     options: String = "",
     args: String = "",
@@ -349,8 +351,8 @@
           args = "-a -d '~~/src/Benchmarks'")),
       List(
         Remote_Build("Windows", "vmnipkow9", historic = true, history = 90,
+          components_base = "/cygdrive/d/isatest/contrib",
           options = "-m32 -M4" +
-            " -C /cygdrive/d/isatest/contrib" +
             " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
             " -e ISABELLE_GHC_SETUP=true" +
             " -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml",
@@ -359,6 +361,7 @@
             Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86-windows") + " OR " +
             Build_Log.Settings.ML_PLATFORM + " = " + SQL.string("x86_64_32-windows")),
         Remote_Build("Windows", "vmnipkow9", historic = true, history = 90,
+          components_base = "/cygdrive/d/isatest/contrib",
           options = "-m64 -M4" +
             " -C /cygdrive/d/isatest/contrib" +
             " -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
@@ -404,12 +407,14 @@
               isabelle_repos,
               isabelle_repos.ext(r.host),
               isabelle_identifier = "cronjob_build_history",
+              components_base = r.components_base,
+              clean_platform = r.clean_components,
+              clean_archives = r.clean_components,
               rev = rev,
               afp_repos = if (afp_rev.isDefined) Some(afp_repos) else None,
               afp_rev = afp_rev.getOrElse(""),
               options =
                 " -N " + Bash.string(task_name) + (if (i < 0) "" else "_" + (i + 1).toString) +
-                " -R " + Bash.string(Components.default_component_repository) +
                 " -f " + r.build_history_options,
               args = "-o timeout=10800 " + r.args)
 
@@ -589,8 +594,6 @@
                       (r, i) <- (if (seq.length <= 1) seq.map((_, -1)) else seq.zipWithIndex)
                       (rev, afp_rev) <- r.pick(logger.options, hg.id(), history_base_filter(r))
                     } yield remote_build_history(rev, afp_rev, i, r)))),
-                Logger_Task("jenkins_logs", _ =>
-                  Jenkins.download_logs(logger.options, Jenkins.build_log_jobs, main_dir)),
                 Logger_Task("build_log_database",
                   logger => Isabelle_Devel.build_log_database(logger.options, build_log_dirs)),
                 Logger_Task("build_status",
--- a/src/Pure/Admin/jenkins.scala	Fri Jan 27 19:16:38 2023 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,145 +0,0 @@
-/*  Title:      Pure/Admin/jenkins.scala
-    Author:     Makarius
-
-Support for Jenkins continuous integration service.
-*/
-
-package isabelle
-
-
-import java.net.URL
-
-import scala.util.matching.Regex
-
-
-object Jenkins {
-  /* server API */
-
-  def root(): String =
-    Isabelle_System.getenv_strict("ISABELLE_JENKINS_ROOT")
-
-  def invoke(url: String, args: String*): Any = {
-    val req = url + "/api/json?" + args.mkString("&")
-    val result = Url.read(req)
-    try { JSON.parse(result) }
-    catch { case ERROR(_) => error("Malformed JSON from " + quote(req)) }
-  }
-
-
-  /* build jobs */
-
-  def build_job_names(): List[String] =
-    for {
-      job <- JSON.array(invoke(root()), "jobs").getOrElse(Nil)
-      _class <- JSON.string(job, "_class")
-      if _class == "hudson.model.FreeStyleProject"
-      name <- JSON.string(job, "name")
-    } yield name
-
-
-  def download_logs(
-    options: Options,
-    job_names: List[String],
-    dir: Path,
-    progress: Progress = new Progress
-  ): Unit = {
-    val store = Sessions.store(options)
-    val infos = job_names.flatMap(build_job_infos)
-    Par_List.map((info: Job_Info) => info.download_log(store, dir, progress), infos)
-  }
-
-
-  /* build log status */
-
-  val build_log_jobs = List("isabelle-nightly-benchmark", "isabelle-nightly-slow")
-
-  val build_status_profiles: List[Build_Status.Profile] =
-    build_log_jobs.map(job_name =>
-      Build_Status.Profile("jenkins " + job_name,
-        sql =
-          Build_Log.Prop.build_engine.toString + " = " + SQL.string(Build_Log.Jenkins.engine) +
-          " AND " + Build_Log.Data.session_name + " <> " + SQL.string("Pure") + " AND " +
-          Build_Log.Data.status + " = " + SQL.string(Build_Log.Session_Status.finished.toString) +
-          " AND " + Build_Log.Data.log_name + " LIKE " + SQL.string("%" + job_name)))
-
-
-  /* job info */
-
-  sealed case class Job_Info(
-    job_name: String,
-    timestamp: Long,
-    main_log: URL,
-    session_logs: List[(String, String, URL)]
-  ) {
-    val date: Date = Date(Time.ms(timestamp), Date.timezone_berlin)
-
-    def log_filename: Path =
-      Build_Log.log_filename(Build_Log.Jenkins.engine, date, List(job_name))
-
-    def read_ml_statistics(store: Sessions.Store, session_name: String): List[Properties.T] = {
-      def get_log(ext: String): Option[URL] =
-        session_logs.collectFirst({ case (a, b, url) if a == session_name && b == ext => url })
-
-      get_log("db") match {
-        case Some(url) =>
-          Isabelle_System.with_tmp_file(session_name, "db") { database =>
-            Bytes.write(database, Bytes.read(url))
-            using(SQLite.open_database(database))(db => store.read_ml_statistics(db, session_name))
-          }
-        case None =>
-          get_log("gz") match {
-            case Some(url) =>
-              val log_file = Build_Log.Log_File(session_name, Url.read_gzip(url))
-              log_file.parse_session_info(ml_statistics = true).ml_statistics
-            case None => Nil
-          }
-      }
-    }
-
-    def download_log(store: Sessions.Store, dir: Path, progress: Progress = new Progress): Unit = {
-      val log_dir = dir + Build_Log.log_subdir(date)
-      val log_path = log_dir + log_filename.xz
-
-      if (!log_path.is_file) {
-        progress.echo(File.standard_path(log_path))
-        Isabelle_System.make_directory(log_dir)
-
-        val ml_statistics =
-          session_logs.map(_._1).distinct.sorted.flatMap(session_name =>
-            read_ml_statistics(store, session_name).
-              map(props => (Build_Log.SESSION_NAME -> session_name) :: props))
-
-        File.write_xz(log_path,
-          terminate_lines(Url.read(main_log) ::
-            ml_statistics.map(Protocol.ML_Statistics_Marker.apply)),
-          Compress.Options_XZ(6))
-      }
-    }
-  }
-
-  def build_job_infos(job_name: String): List[Job_Info] = {
-    val Session_Log = new Regex("""^.*/log/([^/]+)\.(db|gz)$""")
-
-    val infos =
-      for {
-        build <-
-          JSON.array(
-            invoke(root() + "/job/" + job_name, "tree=allBuilds[number,timestamp,artifacts[*]]"),
-            "allBuilds").getOrElse(Nil)
-        number <- JSON.int(build, "number")
-        timestamp <- JSON.long(build, "timestamp")
-      } yield {
-        val job_prefix = root() + "/job/" + job_name + "/" + number
-        val main_log = Url(job_prefix + "/consoleText")
-        val session_logs =
-          for {
-            artifact <- JSON.array(build, "artifacts").getOrElse(Nil)
-            log_path <- JSON.string(artifact, "relativePath")
-            (name, ext) <- (log_path match { case Session_Log(a, b) => Some((a, b)) case _ => None })
-          } yield (name, ext, Url(job_prefix + "/artifact/" + log_path))
-        Job_Info(job_name, timestamp, main_log, session_logs)
-      }
-
-    infos.sortBy(info => - info.timestamp)
-  }
-}
--- a/src/Pure/Admin/other_isabelle.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/Admin/other_isabelle.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -75,14 +75,12 @@
   /* components */
 
   def init_components(
-    component_repository: String = Components.default_component_repository,
-    components_base: String = Components.standard_components_base,
+    components_base: String = Components.dynamic_components_base,
     catalogs: List[String] = Components.default_catalogs,
     components: List[String] = Nil
   ): List[String] = {
     val admin = Components.admin(Path.ISABELLE_HOME).implode
 
-    ("ISABELLE_COMPONENT_REPOSITORY=" + Bash.string(component_repository)) ::
     catalogs.map(name =>
       "init_components " + quote(components_base) + " " + quote(admin + "/" + name)) :::
     components.map(name =>
@@ -92,13 +90,15 @@
   def resolve_components(
     echo: Boolean = false,
     clean_platforms: Option[List[Platform.Family.Value]] = None,
-    clean_archives: Boolean = false
+    clean_archives: Boolean = false,
+    component_repository: String = Components.static_component_repository
   ): Unit = {
     val missing = Path.split(getenv("ISABELLE_COMPONENTS_MISSING"))
     for (path <- missing) {
       Components.resolve(path.dir, path.file_name,
         clean_platforms = clean_platforms,
         clean_archives = clean_archives,
+        component_repository = component_repository,
         ssh = ssh,
         progress = if (echo) progress else new Progress)
     }
@@ -146,18 +146,22 @@
   }
 
 
+  /* init */
+
   def init(
     other_settings: List[String] = init_components(),
     fresh: Boolean = false,
     echo: Boolean = false,
     clean_platforms: Option[List[Platform.Family.Value]] = None,
-    clean_archives: Boolean = false
+    clean_archives: Boolean = false,
+    component_repository: String = Components.static_component_repository
   ): Unit = {
     init_settings(other_settings)
     resolve_components(
       echo = echo,
       clean_platforms = clean_platforms,
-      clean_archives = clean_archives)
+      clean_archives = clean_archives,
+      component_repository = component_repository)
     scala_build(fresh = fresh, echo = echo)
   }
 
--- a/src/Pure/General/file.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/General/file.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -399,4 +399,10 @@
 
     def output(out: XML.Body => String): Content = new Content(path, Bytes(out(content)))
   }
+
+
+  /* space */
+
+  def space(path: Path): Space =
+    Space.bytes(check_file(path).file.length)
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/General/space.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -0,0 +1,56 @@
+/*  Title:      Pure/General/space.scala
+    Author:     Makarius
+
+Storage space based on bytes.
+*/
+
+package isabelle
+
+
+import java.util.Locale
+
+import scala.annotation.tailrec
+
+
+object Space {
+  def bytes(bs: Long): Space = new Space(bs)
+  val zero: Space = bytes(0L)
+
+  private val units: List[String] = List("B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB")
+
+  def B(x: Double): Space = bytes(x.round)
+  def KiB(x: Double): Space = B(x * 1024)
+  def MiB(x: Double): Space = KiB(x * 1024)
+  def GiB(x: Double): Space = MiB(x * 1024)
+  def TiB(x: Double): Space = GiB(x * 1024)
+  def PiB(x: Double): Space = TiB(x * 1024)
+  def EiB(x: Double): Space = PiB(x * 1024)
+}
+
+final class Space private(val bytes: Long) extends AnyVal {
+  def is_proper: Boolean = bytes > 0
+  def is_relevant: Boolean = MiB >= 1.0
+
+  def B: Double = bytes.toDouble
+  def KiB: Double = B / 1024
+  def MiB: Double = KiB / 1024
+  def GiB: Double = MiB / 1024
+  def TiB: Double = GiB / 1024
+  def PiB: Double = TiB / 1024
+  def EiB: Double = PiB / 1024
+
+  override def toString: String = Value.Long(bytes)
+
+  def print: String = {
+    @tailrec def print_unit(x: Double, units: List[String]): String =
+      if (x.abs < 1024 || units.tail.isEmpty) {
+        val s = String.format(Locale.ROOT, "%.1f", x.asInstanceOf[AnyRef])
+        Library.perhaps_unsuffix(".0", s) + " " + units.head
+      }
+      else print_unit(x / 1024, units.tail)
+
+    print_unit(bytes.toDouble, Space.units)
+  }
+
+  def print_relevant: Option[String] = if (is_relevant) Some(print) else None
+}
--- a/src/Pure/General/ssh.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/General/ssh.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -412,6 +412,9 @@
       Isabelle_System.download_file(url_name, file, progress = progress)
 
     def isabelle_platform: Isabelle_Platform = Isabelle_Platform()
+
+    def isabelle_platform_family: Platform.Family.Value =
+      Platform.Family.parse(isabelle_platform.ISABELLE_PLATFORM_FAMILY)
   }
 
   object Local extends System
--- a/src/Pure/General/value.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/General/value.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -45,7 +45,7 @@
       try { Some(java.lang.Long.parseLong(s)) }
       catch { case _: NumberFormatException => None }
     def parse(s: java.lang.String): scala.Long =
-      unapply(s) getOrElse error("Bad integer: " + quote(s))
+      unapply(s) getOrElse error("Bad long integer: " + quote(s))
   }
 
   object Double {
--- a/src/Pure/ML/ml_statistics.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/ML/ml_statistics.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -108,17 +108,9 @@
   }
 
 
-  /* memory fields (mega bytes) */
-
-  def mem_print(x: Long): Option[String] =
-    if (x == 0L) None else Some(x.toString + " M")
+  /* memory fields */
 
-  def mem_scale(x: Long): Long = x / 1024 / 1024
-
-  def mem_field_scale(name: String, x: Double): Double =
-    if (heap_fields._2.contains(name) || program_fields._2.contains(name))
-      mem_scale(x.toLong).toDouble
-    else x
+  val scale_MiB: Double = 1.0 / 1024 / 1024
 
   val CODE_SIZE = "size_code"
   val STACK_SIZE = "size_stacks"
@@ -127,43 +119,43 @@
 
   /* standard fields */
 
-  type Fields = (String, List[String])
+  sealed case class Fields(title: String, names: List[String], scale: Double = 1.0)
 
   val tasks_fields: Fields =
-    ("Future tasks",
+    Fields("Future tasks",
       List("tasks_ready", "tasks_pending", "tasks_running", "tasks_passive",
         "tasks_urgent", "tasks_total"))
 
   val workers_fields: Fields =
-    ("Worker threads", List("workers_total", "workers_active", "workers_waiting"))
+    Fields("Worker threads", List("workers_total", "workers_active", "workers_waiting"))
 
   val GC_fields: Fields =
-    ("GCs", List("partial_GCs", "full_GCs", "share_passes"))
+    Fields("GCs", List("partial_GCs", "full_GCs", "share_passes"))
 
   val heap_fields: Fields =
-    ("Heap", List(HEAP_SIZE, "size_allocation", "size_allocation_free",
-      "size_heap_free_last_full_GC", "size_heap_free_last_GC"))
+    Fields("Heap", List(HEAP_SIZE, "size_allocation", "size_allocation_free",
+      "size_heap_free_last_full_GC", "size_heap_free_last_GC"), scale = scale_MiB)
 
   val program_fields: Fields =
-    ("Program", List("size_code", "size_stacks"))
+    Fields("Program", List("size_code", "size_stacks"), scale = scale_MiB)
 
   val threads_fields: Fields =
-    ("Threads", List("threads_total", "threads_in_ML", "threads_wait_condvar",
+    Fields("Threads", List("threads_total", "threads_in_ML", "threads_wait_condvar",
       "threads_wait_IO", "threads_wait_mutex", "threads_wait_signal"))
 
   val time_fields: Fields =
-    ("Time", List("time_elapsed", "time_elapsed_GC", "time_CPU", "time_GC"))
+    Fields("Time", List("time_elapsed", "time_elapsed_GC", "time_CPU", "time_GC"))
 
   val speed_fields: Fields =
-    ("Speed", List("speed_CPU", "speed_GC"))
+    Fields("Speed", List("speed_CPU", "speed_GC"))
 
   private val time_speed = Map("time_CPU" -> "speed_CPU", "time_GC" -> "speed_GC")
 
   val java_heap_fields: Fields =
-    ("Java heap", List("java_heap_size", "java_heap_used"))
+    Fields("Java heap", List("java_heap_size", "java_heap_used"))
 
   val java_thread_fields: Fields =
-    ("Java threads", List("java_threads_total", "java_workers_total", "java_workers_active"))
+    Fields("Java threads", List("java_threads_total", "java_workers_total", "java_workers_active"))
 
 
   val main_fields: List[Fields] =
@@ -175,6 +167,10 @@
 
   val all_fields: List[Fields] = main_fields ::: other_fields
 
+  def field_scale(x: String, y: Double): Double =
+    all_fields.collectFirst({ case fields if fields.names.contains(x) => y * fields.scale })
+      .getOrElse(y)
+
 
   /* content interpretation */
 
@@ -234,7 +230,7 @@
               (x, y) <- props.iterator ++ speeds.iterator
               if x != Now.name && domain(x)
               z = java.lang.Double.parseDouble(y) if z != 0.0
-            } yield { (x.intern, mem_field_scale(x, z)) })
+            } yield { (x.intern, z) })
 
         result += ML_Statistics.Entry(time, data)
       }
@@ -284,7 +280,7 @@
     data.removeAllSeries()
     for (field <- selected_fields) {
       val series = new XYSeries(field)
-      content.foreach(entry => series.add(entry.time, entry.get(field)))
+      content.foreach(e => series.add(e.time, ML_Statistics.field_scale(field, e.get(field))))
       data.addSeries(series)
     }
   }
@@ -298,7 +294,7 @@
   }
 
   def chart(fields: ML_Statistics.Fields): JFreeChart =
-    chart(fields._1, fields._2)
+    chart(fields.title, fields.names)
 
   def show_frames(fields: List[ML_Statistics.Fields] = ML_Statistics.main_fields): Unit =
     fields.map(chart).foreach(c =>
--- a/src/Pure/ML/ml_syntax.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/ML/ml_syntax.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -29,10 +29,10 @@
       case 12 => "\\f"
       case 13 => "\\r"
       case _ =>
-        if (c < 0) "\\" + Library.signed_string_of_int(256 + c)
+        if (c < 0) "\\" + Value.Int(256 + c)
         else if (c < 32) new String(Array[Char](92, 94, (c + 64).toChar))
         else if (c < 127) Symbol.ascii(c.toChar)
-        else "\\" + Library.signed_string_of_int(c)
+        else "\\" + Value.Int(c)
     }
 
   private def print_symbol(s: Symbol.Symbol): String =
--- a/src/Pure/PIDE/headless.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/PIDE/headless.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -90,7 +90,7 @@
       def finished: Load_State = Load_State(Nil, Nil, 0)
 
       def count_file(name: Document.Node.Name): Long =
-        if (loaded_theory(name)) 0 else name.path.file.length
+        if (loaded_theory(name)) 0 else File.space(name.path).bytes
     }
 
     private case class Load_State(
--- a/src/Pure/ROOT.ML	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/ROOT.ML	Sat Jan 28 22:31:40 2023 +0100
@@ -366,3 +366,4 @@
 ML_file "Tools/jedit.ML";
 ML_file "Tools/ghc.ML";
 ML_file "Tools/generated_files.ML";
+
--- a/src/Pure/ROOT.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/ROOT.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -21,3 +21,4 @@
   val proper_string = Library.proper_string _
   def proper_list[A](list: List[A]): Option[List[A]] = Library.proper_list(list)
 }
+
--- a/src/Pure/System/components.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Pure/System/components.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -59,11 +59,11 @@
 
   /* component collections */
 
-  def default_component_repository: String =
+  def static_component_repository: String =
     Isabelle_System.getenv("ISABELLE_COMPONENT_REPOSITORY")
 
   val default_components_base: Path = Path.explode("$ISABELLE_COMPONENTS_BASE")
-  val standard_components_base: String = "${ISABELLE_COMPONENTS_BASE:-$USER_HOME/.isabelle/contrib}"
+  val dynamic_components_base: String = "${ISABELLE_COMPONENTS_BASE:-$USER_HOME/.isabelle/contrib}"
 
   val default_catalogs: List[String] = List("main")
   val optional_catalogs: List[String] = List("main", "optional")
@@ -125,7 +125,7 @@
     copy_dir: Option[Path] = None,
     clean_platforms: Option[List[Platform.Family.Value]] = None,
     clean_archives: Boolean = false,
-    component_repository: String = Components.default_component_repository,
+    component_repository: String = Components.static_component_repository,
     ssh: SSH.System = SSH.Local,
     progress: Progress = new Progress
   ): Unit = {
--- a/src/Tools/jEdit/src/monitor_dockable.scala	Fri Jan 27 19:16:38 2023 +0100
+++ b/src/Tools/jEdit/src/monitor_dockable.scala	Sat Jan 28 22:31:40 2023 +0100
@@ -44,14 +44,14 @@
     statistics_length = 0
   }
 
-  private var data_name = ML_Statistics.all_fields.head._1
+  private var data_name = ML_Statistics.all_fields.head.title
   private val chart = ML_Statistics.empty.chart(null, Nil)
   private val data = chart.getXYPlot.getDataset.asInstanceOf[XYSeriesCollection]
 
   private def update_chart(): Unit = {
-    ML_Statistics.all_fields.find(_._1 == data_name) match {
+    ML_Statistics.all_fields.find(_.title == data_name) match {
       case None =>
-      case Some((_, fields)) => ML_Statistics(statistics.toList).update_data(data, fields)
+      case Some(fields) => ML_Statistics(statistics.toList).update_data(data, fields.names)
     }
   }