merged
authorwenzelm
Sat, 29 Apr 2017 11:06:46 +0200
changeset 65634 e85004302c83
parent 65587 16a8991ab398 (current diff)
parent 65633 826311fca263 (diff)
child 65635 0a025b8496a2
merged
--- a/etc/options	Thu Apr 27 15:59:00 2017 +0100
+++ b/etc/options	Sat Apr 29 11:06:46 2017 +0200
@@ -232,3 +232,15 @@
 
 option ssh_alive_interval : real = 30
   -- "time interval to keep SSH server connection alive (seconds)"
+
+
+section "Build Log Database"
+
+option build_log_database_user : string = ""
+option build_log_database_password : string = ""
+option build_log_database_name : string = ""
+option build_log_database_host : string = ""
+option build_log_database_port : int = 0
+option build_log_ssh_host : string = ""
+option build_log_ssh_user : string = ""
+option build_log_ssh_port : int = 0
--- a/src/Pure/Admin/build_history.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/Admin/build_history.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -16,7 +16,8 @@
 {
   /* log files */
 
-  val BUILD_HISTORY = "build_history"
+  val engine = "build_history"
+  val log_prefix = engine + "_"
   val META_INFO_MARKER = "\fmeta_info = "
 
 
@@ -198,7 +199,7 @@
       val log_path =
         other_isabelle.isabelle_home_user +
           Build_Log.log_subdir(build_history_date) +
-          Build_Log.log_filename(BUILD_HISTORY, build_history_date,
+          Build_Log.log_filename(Build_History.engine, build_history_date,
             List(build_host, ml_platform, "M" + threads) ::: build_tags)
 
       val build_info =
@@ -210,16 +211,16 @@
       val store = Sessions.store()
 
       val meta_info =
-        Build_Log.Prop.multiple(Build_Log.Prop.build_tags, build_tags) :::
-        Build_Log.Prop.multiple(Build_Log.Prop.build_args, build_args1) :::
+        Properties.lines_nonempty(Build_Log.Prop.build_tags.name, build_tags) :::
+        Properties.lines_nonempty(Build_Log.Prop.build_args.name, build_args1) :::
         List(
-          Build_Log.Prop.build_group_id -> build_group_id,
-          Build_Log.Prop.build_id -> (build_host + ":" + build_start.time.ms),
-          Build_Log.Prop.build_engine -> BUILD_HISTORY,
-          Build_Log.Prop.build_host -> build_host,
-          Build_Log.Prop.build_start -> Build_Log.print_date(build_start),
-          Build_Log.Prop.build_end -> Build_Log.print_date(build_end),
-          Build_Log.Prop.isabelle_version -> isabelle_version)
+          Build_Log.Prop.build_group_id.name -> build_group_id,
+          Build_Log.Prop.build_id.name -> (build_host + ":" + build_start.time.ms),
+          Build_Log.Prop.build_engine.name -> Build_History.engine,
+          Build_Log.Prop.build_host.name -> build_host,
+          Build_Log.Prop.build_start.name -> Build_Log.print_date(build_start),
+          Build_Log.Prop.build_end.name -> Build_Log.print_date(build_end),
+          Build_Log.Prop.isabelle_version.name -> isabelle_version)
 
       val ml_statistics =
         build_info.finished_sessions.flatMap(session_name =>
--- a/src/Pure/Admin/build_log.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/Admin/build_log.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -1,7 +1,7 @@
 /*  Title:      Pure/Admin/build_log.scala
     Author:     Makarius
 
-Build log parsing for current and historic formats.
+Management of build log files and database storage.
 */
 
 package isabelle
@@ -11,7 +11,9 @@
 import java.time.ZoneId
 import java.time.format.{DateTimeFormatter, DateTimeParseException}
 import java.util.Locale
+import java.sql.PreparedStatement
 
+import scala.collection.immutable.SortedMap
 import scala.collection.mutable
 import scala.util.matching.Regex
 
@@ -24,22 +26,20 @@
 
   object Prop
   {
-    val separator = '\u000b'
-
-    def multiple(name: String, args: List[String]): Properties.T =
-      if (args.isEmpty) Nil
-      else List(name -> args.mkString(separator.toString))
+    val build_tags = SQL.Column.string("build_tags")  // lines
+    val build_args = SQL.Column.string("build_args")  // lines
+    val build_group_id = SQL.Column.string("build_group_id")
+    val build_id = SQL.Column.string("build_id")
+    val build_engine = SQL.Column.string("build_engine")
+    val build_host = SQL.Column.string("build_host")
+    val build_start = SQL.Column.date("build_start")
+    val build_end = SQL.Column.date("build_end")
+    val isabelle_version = SQL.Column.string("isabelle_version")
+    val afp_version = SQL.Column.string("afp_version")
 
-    val build_tags = "build_tags"  // multiple
-    val build_args = "build_args"  // multiple
-    val build_group_id = "build_group_id"
-    val build_id = "build_id"
-    val build_engine = "build_engine"
-    val build_host = "build_host"
-    val build_start = "build_start"
-    val build_end = "build_end"
-    val isabelle_version = "isabelle_version"
-    val afp_version = "afp_version"
+    val all_props: List[SQL.Column] =
+      List(build_tags, build_args, build_group_id, build_id, build_engine,
+        build_host, build_start, build_end, isabelle_version, afp_version)
   }
 
 
@@ -47,9 +47,14 @@
 
   object Settings
   {
-    val build_settings = List("ISABELLE_BUILD_OPTIONS")
-    val ml_settings = List("ML_PLATFORM", "ML_HOME", "ML_SYSTEM", "ML_OPTIONS")
-    val all_settings = build_settings ::: ml_settings
+    val ISABELLE_BUILD_OPTIONS = SQL.Column.string("ISABELLE_BUILD_OPTIONS")
+    val ML_PLATFORM = SQL.Column.string("ML_PLATFORM")
+    val ML_HOME = SQL.Column.string("ML_HOME")
+    val ML_SYSTEM = SQL.Column.string("ML_SYSTEM")
+    val ML_OPTIONS = SQL.Column.string("ML_OPTIONS")
+
+    val ml_settings = List(ML_PLATFORM, ML_HOME, ML_SYSTEM, ML_OPTIONS)
+    val all_settings = ISABELLE_BUILD_OPTIONS :: ml_settings
 
     type Entry = (String, String)
     type T = List[Entry]
@@ -70,7 +75,8 @@
 
     def show(): String =
       cat_lines(
-        build_settings.map(Entry.getenv(_)) ::: List("") ::: ml_settings.map(Entry.getenv(_)))
+        List(Entry.getenv(ISABELLE_BUILD_OPTIONS.name), "") :::
+        ml_settings.map(c => Entry.getenv(c.name)))
   }
 
 
@@ -88,18 +94,6 @@
     Path.explode((engine :: log_date(date) :: more).mkString("", "_", ".log"))
 
 
-  /* log file collections */
-
-  def is_log(file: JFile): Boolean =
-    List(".log", ".log.gz", ".log.xz").exists(ext => file.getName.endsWith(ext))
-
-  def isatest_files(dir: Path): List[JFile] =
-    File.find_files(dir.file, file => is_log(file) && file.getName.startsWith("isatest-makeall-"))
-
-  def afp_test_files(dir: Path): List[JFile] =
-    File.find_files(dir.file, file => is_log(file) && file.getName.startsWith("afp-test-devel-"))
-
-
 
   /** log file **/
 
@@ -107,8 +101,18 @@
 
   object Log_File
   {
+    /* log file */
+
+    def plain_name(name: String): String =
+    {
+      List(".log", ".log.gz", ".log.xz", ".gz", ".xz").find(name.endsWith(_)) match {
+        case Some(s) => Library.try_unsuffix(s, name).get
+        case None => name
+      }
+    }
+
     def apply(name: String, lines: List[String]): Log_File =
-      new Log_File(name, lines)
+      new Log_File(plain_name(name), lines)
 
     def apply(name: String, text: String): Log_File =
       Log_File(name, Library.trim_split_lines(text))
@@ -116,21 +120,32 @@
     def apply(file: JFile): Log_File =
     {
       val name = file.getName
-      val (base_name, text) =
-        Library.try_unsuffix(".gz", name) match {
-          case Some(base_name) => (base_name, File.read_gzip(file))
-          case None =>
-            Library.try_unsuffix(".xz", name) match {
-              case Some(base_name) => (base_name, File.read_xz(file))
-              case None => (name, File.read(file))
-            }
-          }
-      apply(base_name, text)
+      val text =
+        if (name.endsWith(".gz")) File.read_gzip(file)
+        else if (name.endsWith(".xz")) File.read_xz(file)
+        else File.read(file)
+      apply(name, text)
     }
 
     def apply(path: Path): Log_File = apply(path.file)
 
 
+    /* log file collections */
+
+    def is_log(file: JFile,
+      prefixes: List[String] =
+        List(Build_History.log_prefix, Identify.log_prefix, Isatest.log_prefix, AFP_Test.log_prefix),
+      suffixes: List[String] = List(".log", ".log.gz", ".log.xz")): Boolean =
+    {
+      val name = file.getName
+      prefixes.exists(name.startsWith(_)) &&
+      suffixes.exists(name.endsWith(_))
+    }
+
+    def find_files(dirs: Iterable[Path]): List[JFile] =
+      dirs.iterator.flatMap(dir => File.find_files(dir.file, is_log(_))).toList
+
+
     /* date format */
 
     val Date_Format =
@@ -177,7 +192,7 @@
     /* inlined content */
 
     def print_props(marker: String, props: Properties.T): String =
-      marker + YXML.string_of_body(XML.Encode.properties(props))
+      marker + YXML.string_of_body(XML.Encode.properties(Properties.encode_lines(props)))
   }
 
   class Log_File private(val name: String, val lines: List[String])
@@ -223,8 +238,9 @@
         case None => None
       }
 
-    def get_settings(as: List[String]): Settings.T =
-      for { a <- as; entry <- get_setting(a) } yield entry
+    def get_all_settings: Settings.T =
+      for { c <- Settings.all_settings; entry <- get_setting(c.name) }
+      yield entry
 
 
     /* properties (YXML) */
@@ -232,7 +248,7 @@
     val xml_cache = new XML.Cache()
 
     def parse_props(text: String): Properties.T =
-      xml_cache.props(XML.Decode.properties(YXML.parse_body(text)))
+      xml_cache.props(Properties.decode_lines(XML.Decode.properties(YXML.parse_body(text))))
 
     def filter_props(marker: String): List[Properties.T] =
       for {
@@ -268,15 +284,37 @@
   object Meta_Info
   {
     val empty: Meta_Info = Meta_Info(Nil, Nil)
+
+    val log_name = SQL.Column.string("log_name", primary_key = true)
+    val table =
+      SQL.Table("isabelle_build_log_meta_info", log_name :: Prop.all_props ::: Settings.all_settings)
   }
 
-  sealed case class Meta_Info(props: Properties.T, settings: List[(String, String)])
+  sealed case class Meta_Info(props: Properties.T, settings: Settings.T)
   {
     def is_empty: Boolean = props.isEmpty && settings.isEmpty
+
+    def get(c: SQL.Column): Option[String] =
+      Properties.get(props, c.name) orElse
+      Properties.get(settings, c.name)
+
+    def get_date(c: SQL.Column): Option[Date] =
+      get(c).map(Log_File.Date_Format.parse(_))
+  }
+
+  object Identify
+  {
+    val log_prefix = "isabelle_identify_"
+    val engine = "identify"
+    val Start = new Regex("""^isabelle_identify: (.+)$""")
+    val No_End = new Regex("""$.""")
+    val Isabelle_Version = new Regex("""^Isabelle version: (\S+)$""")
+    val AFP_Version = new Regex("""^AFP version: (\S+)$""")
   }
 
   object Isatest
   {
+    val log_prefix = "isatest-makeall-"
     val engine = "isatest"
     val Start = new Regex("""^------------------- starting test --- (.+) --- (.+)$""")
     val End = new Regex("""^------------------- test (?:successful|FAILED) --- (.+) --- .*$""")
@@ -286,6 +324,7 @@
 
   object AFP_Test
   {
+    val log_prefix = "afp-test-devel-"
     val engine = "afp-test"
     val Start = new Regex("""^Start test(?: for .+)? at ([^,]+), (.*)$""")
     val Start_Old = new Regex("""^Start test(?: for .+)? at ([^,]+)$""")
@@ -318,31 +357,35 @@
         val prefix = if (host != "") host else if (engine != "") engine else ""
         (if (prefix == "") "build" else prefix) + ":" + start.time.ms
       }
-      val build_engine = if (engine == "") Nil else List(Prop.build_engine -> engine)
-      val build_host = if (host == "") Nil else List(Prop.build_host -> host)
+      val build_engine = if (engine == "") Nil else List(Prop.build_engine.name -> engine)
+      val build_host = if (host == "") Nil else List(Prop.build_host.name -> host)
 
-      val start_date = List(Prop.build_start -> start.toString)
+      val start_date = List(Prop.build_start.name -> print_date(start))
       val end_date =
         log_file.lines.last match {
           case End(log_file.Strict_Date(end_date)) =>
-            List(Prop.build_end -> end_date.toString)
+            List(Prop.build_end.name -> print_date(end_date))
           case _ => Nil
         }
 
       val isabelle_version =
-        log_file.find_match(Isabelle_Version).map(Prop.isabelle_version -> _)
+        log_file.find_match(Isabelle_Version).map(Prop.isabelle_version.name -> _)
       val afp_version =
-        log_file.find_match(AFP_Version).map(Prop.afp_version -> _)
+        log_file.find_match(AFP_Version).map(Prop.afp_version.name -> _)
 
-      Meta_Info((Prop.build_id -> build_id) :: build_engine ::: build_host :::
+      Meta_Info((Prop.build_id.name -> build_id) :: build_engine ::: build_host :::
           start_date ::: end_date ::: isabelle_version.toList ::: afp_version.toList,
-        log_file.get_settings(Settings.all_settings))
+        log_file.get_all_settings)
     }
 
     log_file.lines match {
       case line :: _ if line.startsWith(Build_History.META_INFO_MARKER) =>
         Meta_Info(log_file.find_props(Build_History.META_INFO_MARKER).get,
-          log_file.get_settings(Settings.all_settings))
+          log_file.get_all_settings)
+
+      case Identify.Start(log_file.Strict_Date(start)) :: _ =>
+        parse(Identify.engine, "", start, Identify.No_End,
+          Identify.Isabelle_Version, Identify.AFP_Version)
 
       case Isatest.Start(log_file.Strict_Date(start), host) :: _ =>
         parse(Isatest.engine, host, start, Isatest.End,
@@ -384,10 +427,7 @@
 
   object Session_Status extends Enumeration
   {
-    val EXISTING = Value("existing")
-    val FINISHED = Value("finished")
-    val FAILED = Value("failed")
-    val CANCELLED = Value("cancelled")
+    val existing, finished, failed, cancelled = Value
   }
 
   sealed case class Session_Entry(
@@ -396,11 +436,36 @@
     threads: Option[Int],
     timing: Timing,
     ml_timing: Timing,
-    ml_statistics: List[Properties.T],
     heap_size: Option[Long],
-    status: Session_Status.Value)
+    status: Session_Status.Value,
+    ml_statistics: List[Properties.T])
+  {
+    def proper_chapter: Option[String] = if (chapter == "") None else Some(chapter)
+    def proper_groups: Option[String] = if (groups.isEmpty) None else Some(cat_lines(groups))
+    def finished: Boolean = status == Session_Status.finished
+  }
+
+  object Build_Info
   {
-    def finished: Boolean = status == Session_Status.FINISHED
+    val session_name = SQL.Column.string("session_name", primary_key = true)
+    val chapter = SQL.Column.string("chapter")
+    val groups = SQL.Column.string("groups")
+    val threads = SQL.Column.int("threads")
+    val timing_elapsed = SQL.Column.long("timing_elapsed")
+    val timing_cpu = SQL.Column.long("timing_cpu")
+    val timing_gc = SQL.Column.long("timing_gc")
+    val ml_timing_elapsed = SQL.Column.long("ml_timing_elapsed")
+    val ml_timing_cpu = SQL.Column.long("ml_timing_cpu")
+    val ml_timing_gc = SQL.Column.long("ml_timing_gc")
+    val heap_size = SQL.Column.long("heap_size")
+    val status = SQL.Column.string("status")
+    val ml_statistics = SQL.Column.bytes("ml_statistics")
+
+    val table = SQL.Table("isabelle_build_log_build_info",
+      List(Meta_Info.log_name, session_name, chapter, groups, threads, timing_elapsed, timing_cpu,
+        timing_gc, ml_timing_elapsed, ml_timing_cpu, ml_timing_gc, heap_size, status, ml_statistics))
+
+    val table0 = table.copy(columns = table.columns.take(2))
   }
 
   sealed case class Build_Info(sessions: Map[String, Session_Entry])
@@ -454,12 +519,12 @@
     var started = Set.empty[String]
     var failed = Set.empty[String]
     var cancelled = Set.empty[String]
+    var heap_sizes = Map.empty[String, Long]
     var ml_statistics = Map.empty[String, List[Properties.T]]
-    var heap_sizes = Map.empty[String, Long]
 
     def all_sessions: Set[String] =
       chapter.keySet ++ groups.keySet ++ threads.keySet ++ timing.keySet ++ ml_timing.keySet ++
-      failed ++ cancelled ++ started ++ ml_statistics.keySet ++ heap_sizes.keySet
+      failed ++ cancelled ++ started ++ heap_sizes.keySet ++ ml_statistics.keySet
 
 
     for (line <- log_file.lines) {
@@ -498,7 +563,7 @@
         case Heap(name, Value.Long(size)) =>
           heap_sizes += (name -> size)
 
-        case _ if line.startsWith(ML_STATISTICS_MARKER) =>
+        case _ if line.startsWith(ML_STATISTICS_MARKER) && YXML.detect(line) =>
           val (name, props) =
             Library.try_unprefix(ML_STATISTICS_MARKER, line).map(log_file.parse_props(_)) match {
               case Some((SESSION_NAME, session_name) :: props) => (session_name, props)
@@ -514,12 +579,12 @@
       Map(
         (for (name <- all_sessions.toList) yield {
           val status =
-            if (failed(name)) Session_Status.FAILED
-            else if (cancelled(name)) Session_Status.CANCELLED
+            if (failed(name)) Session_Status.failed
+            else if (cancelled(name)) Session_Status.cancelled
             else if (timing.isDefinedAt(name) || ml_timing.isDefinedAt(name))
-              Session_Status.FINISHED
-            else if (started(name)) Session_Status.FAILED
-            else Session_Status.EXISTING
+              Session_Status.finished
+            else if (started(name)) Session_Status.failed
+            else Session_Status.existing
           val entry =
             Session_Entry(
               chapter.getOrElse(name, ""),
@@ -527,9 +592,9 @@
               threads.get(name),
               timing.getOrElse(name, Timing.zero),
               ml_timing.getOrElse(name, Timing.zero),
-              ml_statistics.getOrElse(name, Nil).reverse,
               heap_sizes.get(name),
-              status)
+              status,
+              ml_statistics.getOrElse(name, Nil).reverse)
           (name -> entry)
         }):_*)
     Build_Info(sessions)
@@ -557,4 +622,199 @@
       ml_statistics = if (ml_statistics) log_file.filter_props(ML_STATISTICS_MARKER) else Nil,
       task_statistics = if (task_statistics) log_file.filter_props("\ftask_statistics = ") else Nil)
   }
+
+
+
+  /** persistent store **/
+
+  def store(options: Options): Store = new Store(options)
+
+  class Store private[Build_Log](options: Options) extends Properties.Store
+  {
+    def open_database(
+      user: String = options.string("build_log_database_user"),
+      password: String = options.string("build_log_database_password"),
+      database: String = options.string("build_log_database_name"),
+      host: String = options.string("build_log_database_host"),
+      port: Int = options.int("build_log_database_port"),
+      ssh_host: String = options.string("build_log_ssh_host"),
+      ssh_user: String = options.string("build_log_ssh_user"),
+      ssh_port: Int = options.int("build_log_ssh_port")): PostgreSQL.Database =
+    {
+      PostgreSQL.open_database(
+        user = user, password = password, database = database, host = host, port = port,
+        ssh =
+          if (ssh_host == "") None
+          else Some(SSH.init_context(options).open_session(ssh_host, ssh_user, port)))
+    }
+
+    def write_info(db: SQL.Database, files: List[JFile])
+    {
+      write_meta_info(db, files)
+      write_build_info(db, files)
+    }
+
+    def filter_files(db: SQL.Database, table: SQL.Table, files: List[JFile]): List[JFile] =
+    {
+      db.transaction {
+        db.create_table(table)
+
+        val key = Meta_Info.log_name
+        val known_files =
+          using(db.select(table, List(key), distinct = true))(stmt =>
+            SQL.iterator(stmt.executeQuery)(rs => db.string(rs, key)).toSet)
+
+        val unique_files =
+          (Map.empty[String, JFile] /: files.iterator)({ case (m, file) =>
+            val name = Log_File.plain_name(file.getName)
+            if (known_files(name)) m else m + (name -> file)
+          })
+
+        unique_files.iterator.map(_._2).toList
+      }
+    }
+
+    def write_meta_info(db: SQL.Database, files: List[JFile])
+    {
+      for (file_group <- filter_files(db, Meta_Info.table, files).grouped(1000)) {
+        db.transaction {
+          for (file <- file_group) {
+            val log_file = Log_File(file)
+            val meta_info = log_file.parse_meta_info()
+
+            using(db.delete(Meta_Info.table, Meta_Info.log_name.sql_where_equal(log_file.name)))(
+              _.execute)
+            using(db.insert(Meta_Info.table))(stmt =>
+            {
+              db.set_string(stmt, 1, log_file.name)
+              for ((c, i) <- Meta_Info.table.columns.tail.zipWithIndex) {
+                if (c.T == SQL.Type.Date)
+                  db.set_date(stmt, i + 2, meta_info.get_date(c))
+                else
+                  db.set_string(stmt, i + 2, meta_info.get(c))
+              }
+              stmt.execute()
+            })
+          }
+        }
+      }
+    }
+
+    def read_meta_info(db: SQL.Database, log_name: String): Option[Meta_Info] =
+    {
+      val cs = Meta_Info.table.columns.tail
+      using(db.select(Meta_Info.table, cs, Meta_Info.log_name.sql_where_equal(log_name)))(stmt =>
+      {
+        val rs = stmt.executeQuery
+        if (!rs.next) None
+        else {
+          val results =
+            cs.map(c => c.name ->
+              (if (c.T == SQL.Type.Date)
+                db.get(rs, c, db.date _).map(Log_File.Date_Format(_))
+               else
+                db.get(rs, c, db.string _)))
+          val n = Prop.all_props.length
+          val props = for ((x, Some(y)) <- results.take(n)) yield (x, y)
+          val settings = for ((x, Some(y)) <- results.drop(n)) yield (x, y)
+          Some(Meta_Info(props, settings))
+        }
+      })
+    }
+
+    def write_build_info(db: SQL.Database, files: List[JFile])
+    {
+      for (file_group <- filter_files(db, Build_Info.table, files).grouped(100)) {
+        db.transaction {
+          for (file <- file_group) {
+            val log_file = Log_File(file)
+            val build_info = log_file.parse_build_info()
+
+            using(db.delete(Build_Info.table, Meta_Info.log_name.sql_where_equal(log_file.name)))(
+              _.execute)
+            if (build_info.sessions.isEmpty) {
+              using(db.insert(Build_Info.table0))(stmt =>
+              {
+                db.set_string(stmt, 1, log_file.name)
+                db.set_string(stmt, 2, "")
+                stmt.execute()
+              })
+            }
+            else {
+              using(db.insert(Build_Info.table))(stmt =>
+              {
+                for ((session_name, session) <- build_info.sessions.iterator) {
+                  db.set_string(stmt, 1, log_file.name)
+                  db.set_string(stmt, 2, session_name)
+                  db.set_string(stmt, 3, session.proper_chapter)
+                  db.set_string(stmt, 4, session.proper_groups)
+                  db.set_int(stmt, 5, session.threads)
+                  db.set_long(stmt, 6, session.timing.elapsed.proper_ms)
+                  db.set_long(stmt, 7, session.timing.cpu.proper_ms)
+                  db.set_long(stmt, 8, session.timing.gc.proper_ms)
+                  db.set_long(stmt, 9, session.ml_timing.elapsed.proper_ms)
+                  db.set_long(stmt, 10, session.ml_timing.cpu.proper_ms)
+                  db.set_long(stmt, 11, session.ml_timing.gc.proper_ms)
+                  db.set_long(stmt, 12, session.heap_size)
+                  db.set_string(stmt, 13, session.status.toString)
+                  db.set_bytes(stmt, 14, compress_properties(session.ml_statistics).proper)
+                  stmt.execute()
+                }
+              })
+            }
+          }
+        }
+      }
+    }
+
+    def read_build_info(
+      db: SQL.Database,
+      log_name: String,
+      session_names: List[String] = Nil,
+      ml_statistics: Boolean = false): Build_Info =
+    {
+      val columns =
+        Build_Info.table.columns.filter(c =>
+          c != Meta_Info.log_name && (ml_statistics || c != Build_Info.ml_statistics))
+
+      val where0 =
+        Meta_Info.log_name.sql_where_equal(log_name) + " AND " +
+          Build_Info.session_name.sql_name + " <> ''"
+      val where =
+        if (session_names.isEmpty) where0
+        else
+          where0 + " AND " +
+          session_names.map(a => Build_Info.session_name.sql_name + " = " + SQL.quote_string(a)).
+            mkString("(", " OR ", ")")
+
+      val sessions =
+        using(db.select(Build_Info.table, columns, where))(stmt =>
+        {
+          SQL.iterator(stmt.executeQuery)(rs =>
+          {
+            val session_name = db.string(rs, Build_Info.session_name)
+            val session_entry =
+              Session_Entry(
+                chapter = db.string(rs, Build_Info.chapter),
+                groups = split_lines(db.string(rs, Build_Info.groups)),
+                threads = db.get(rs, Build_Info.threads, db.int _),
+                timing =
+                  Timing(Time.ms(db.long(rs, Build_Info.timing_elapsed)),
+                    Time.ms(db.long(rs, Build_Info.timing_cpu)),
+                    Time.ms(db.long(rs, Build_Info.timing_gc))),
+                ml_timing =
+                  Timing(Time.ms(db.long(rs, Build_Info.ml_timing_elapsed)),
+                    Time.ms(db.long(rs, Build_Info.ml_timing_cpu)),
+                    Time.ms(db.long(rs, Build_Info.ml_timing_gc))),
+                heap_size = db.get(rs, Build_Info.heap_size, db.long _),
+                status = Session_Status.withName(db.string(rs, Build_Info.status)),
+                ml_statistics =
+                  if (ml_statistics) uncompress_properties(db.bytes(rs, Build_Info.ml_statistics))
+                  else Nil)
+            session_name -> session_entry
+          }).toMap
+        })
+      Build_Info(sessions)
+    }
+  }
 }
--- a/src/Pure/Admin/isabelle_cronjob.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/Admin/isabelle_cronjob.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -89,7 +89,7 @@
   sealed case class Remote_Build(
     host: String,
     user: String = "",
-    port: Int = SSH.default_port,
+    port: Int = 0,
     shared_home: Boolean = true,
     options: String = "",
     args: String = "")
--- a/src/Pure/Admin/remote_dmg.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/Admin/remote_dmg.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -32,14 +32,14 @@
     Isabelle_Tool("remote_dmg", "build dmg on remote Mac OS X system", args =>
     {
       Command_Line.tool0 {
-        var port = SSH.default_port
+        var port = 0
         var volume_name = ""
 
         val getopts = Getopts("""
 Usage: isabelle remote_dmg [OPTIONS] USER@HOST TAR_GZ_FILE DMG_FILE
 
   Options are:
-    -p PORT      alternative SSH port (default: """ + SSH.default_port + """)
+    -p PORT      alternative SSH port
     -V NAME      specify volume name
 
   Turn the contents of a tar.gz file into a dmg file -- produced on a remote
--- a/src/Pure/General/bytes.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/General/bytes.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -121,6 +121,9 @@
 
   def isEmpty: Boolean = length == 0
 
+  def proper: Option[Bytes] = if (isEmpty) None else Some(this)
+  def proper_text: Option[String] = if (isEmpty) None else Some(text)
+
   def +(other: Bytes): Bytes =
     if (other.isEmpty) this
     else if (isEmpty) other
--- a/src/Pure/General/file.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/General/file.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -154,7 +154,7 @@
 
   /* read */
 
-  def read(file: JFile): String = Bytes.read(file).toString
+  def read(file: JFile): String = Bytes.read(file).text
   def read(path: Path): String = read(path.file)
 
 
--- a/src/Pure/General/properties.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/General/properties.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -9,6 +9,8 @@
 
 object Properties
 {
+  /* entries */
+
   type Entry = (java.lang.String, java.lang.String)
   type T = List[Entry]
 
@@ -31,7 +33,21 @@
   }
 
 
-  /* named entries */
+  /* multi-line entries */
+
+  val separator = '\u000b'
+
+  def encode_lines(s: java.lang.String): java.lang.String = s.replace('\n', separator)
+  def decode_lines(s: java.lang.String): java.lang.String = s.replace(separator, '\n')
+
+  def encode_lines(props: T): T = props.map({ case (a, b) => (a, encode_lines(b)) })
+  def decode_lines(props: T): T = props.map({ case (a, b) => (a, decode_lines(b)) })
+
+  def lines_nonempty(x: java.lang.String, ys: List[java.lang.String]): Properties.T =
+    if (ys.isEmpty) Nil else List((x, cat_lines(ys)))
+
+
+  /* entry types */
 
   class String(val name: java.lang.String)
   {
@@ -79,5 +95,32 @@
         case Some((_, value)) => Value.Double.unapply(value)
       }
   }
+
+
+  /* cached store */
+
+  trait Store
+  {
+    val xml_cache: XML.Cache = new XML.Cache()
+
+    def encode_properties(ps: T): Bytes =
+      Bytes(YXML.string_of_body(XML.Encode.properties(ps)))
+
+    def decode_properties(bs: Bytes): T =
+      xml_cache.props(XML.Decode.properties(YXML.parse_body(bs.text)))
+
+    def compress_properties(ps: List[T], options: XZ.Options = XZ.options()): Bytes =
+    {
+      if (ps.isEmpty) Bytes.empty
+      else Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).compress(options)
+    }
+
+    def uncompress_properties(bs: Bytes): List[T] =
+    {
+      if (bs.isEmpty) Nil
+      else
+        XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress().text)).
+          map(xml_cache.props(_))
+    }
+  }
 }
-
--- a/src/Pure/General/sql.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/General/sql.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -91,6 +91,9 @@
     def sql_decl(sql_type: Type.Value => String): String =
       sql_name + " " + sql_type(T) + (if (strict || primary_key) " NOT NULL" else "")
 
+    def sql_where_eq: String = "WHERE " + sql_name + " = "
+    def sql_where_equal(s: String): String = sql_where_eq + quote_string(s)
+
     override def toString: String = sql_decl(sql_type_default)
   }
 
@@ -195,57 +198,84 @@
 
     def statement(sql: String): PreparedStatement = connection.prepareStatement(sql)
 
-    def insert_statement(table: Table): PreparedStatement = statement(table.sql_insert)
+    def insert(table: Table): PreparedStatement = statement(table.sql_insert)
 
-    def delete_statement(table: Table, sql: String = ""): PreparedStatement =
+    def delete(table: Table, sql: String = ""): PreparedStatement =
       statement(table.sql_delete + (if (sql == "") "" else " " + sql))
 
-    def select_statement(table: Table, columns: List[Column],
-        sql: String = "", distinct: Boolean = false): PreparedStatement =
+    def select(table: Table, columns: List[Column], sql: String = "", distinct: Boolean = false)
+        : PreparedStatement =
       statement(table.sql_select(columns, distinct) + (if (sql == "") "" else " " + sql))
 
 
     /* input */
 
     def set_bool(stmt: PreparedStatement, i: Int, x: Boolean) { stmt.setBoolean(i, x) }
+    def set_bool(stmt: PreparedStatement, i: Int, x: Option[Boolean])
+    {
+      if (x.isDefined) set_bool(stmt, i, x.get)
+      else stmt.setNull(i, java.sql.Types.BOOLEAN)
+    }
+
     def set_int(stmt: PreparedStatement, i: Int, x: Int) { stmt.setInt(i, x) }
+    def set_int(stmt: PreparedStatement, i: Int, x: Option[Int])
+    {
+      if (x.isDefined) set_int(stmt, i, x.get)
+      else stmt.setNull(i, java.sql.Types.INTEGER)
+    }
+
     def set_long(stmt: PreparedStatement, i: Int, x: Long) { stmt.setLong(i, x) }
+    def set_long(stmt: PreparedStatement, i: Int, x: Option[Long])
+    {
+      if (x.isDefined) set_long(stmt, i, x.get)
+      else stmt.setNull(i, java.sql.Types.BIGINT)
+    }
+
     def set_double(stmt: PreparedStatement, i: Int, x: Double) { stmt.setDouble(i, x) }
+    def set_double(stmt: PreparedStatement, i: Int, x: Option[Double])
+    {
+      if (x.isDefined) set_double(stmt, i, x.get)
+      else stmt.setNull(i, java.sql.Types.DOUBLE)
+    }
+
     def set_string(stmt: PreparedStatement, i: Int, x: String) { stmt.setString(i, x) }
+    def set_string(stmt: PreparedStatement, i: Int, x: Option[String]): Unit =
+      set_string(stmt, i, x.orNull)
+
     def set_bytes(stmt: PreparedStatement, i: Int, bytes: Bytes)
-    { stmt.setBinaryStream(i, bytes.stream(), bytes.length) }
-    def set_date(stmt: PreparedStatement, i: Int, date: Date)
+    {
+      if (bytes == null) stmt.setBytes(i, null)
+      else stmt.setBinaryStream(i, bytes.stream(), bytes.length)
+    }
+    def set_bytes(stmt: PreparedStatement, i: Int, bytes: Option[Bytes]): Unit =
+      set_bytes(stmt, i, bytes.orNull)
+
+    def set_date(stmt: PreparedStatement, i: Int, date: Date): Unit
+    def set_date(stmt: PreparedStatement, i: Int, date: Option[Date]): Unit =
+      set_date(stmt, i, date.orNull)
 
 
     /* output */
 
-    def bool(rs: ResultSet, name: String): Boolean = rs.getBoolean(name)
-    def int(rs: ResultSet, name: String): Int = rs.getInt(name)
-    def long(rs: ResultSet, name: String): Long = rs.getLong(name)
-    def double(rs: ResultSet, name: String): Double = rs.getDouble(name)
-    def string(rs: ResultSet, name: String): String =
+    def bool(rs: ResultSet, column: Column): Boolean = rs.getBoolean(column.name)
+    def int(rs: ResultSet, column: Column): Int = rs.getInt(column.name)
+    def long(rs: ResultSet, column: Column): Long = rs.getLong(column.name)
+    def double(rs: ResultSet, column: Column): Double = rs.getDouble(column.name)
+    def string(rs: ResultSet, column: Column): String =
     {
-      val s = rs.getString(name)
+      val s = rs.getString(column.name)
       if (s == null) "" else s
     }
-    def bytes(rs: ResultSet, name: String): Bytes =
+    def bytes(rs: ResultSet, column: Column): Bytes =
     {
-      val bs = rs.getBytes(name)
+      val bs = rs.getBytes(column.name)
       if (bs == null) Bytes.empty else Bytes(bs)
     }
-    def date(rs: ResultSet, name: String): Date
+    def date(rs: ResultSet, column: Column): Date
 
-    def bool(rs: ResultSet, column: Column): Boolean = bool(rs, column.name)
-    def int(rs: ResultSet, column: Column): Int = int(rs, column.name)
-    def long(rs: ResultSet, column: Column): Long = long(rs, column.name)
-    def double(rs: ResultSet, column: Column): Double = double(rs, column.name)
-    def string(rs: ResultSet, column: Column): String = string(rs, column.name)
-    def bytes(rs: ResultSet, column: Column): Bytes = bytes(rs, column.name)
-    def date(rs: ResultSet, column: Column): Date = date(rs, column.name)
-
-    def get[A, B](rs: ResultSet, a: A, f: (ResultSet, A) => B): Option[B] =
+    def get[A](rs: ResultSet, column: Column, f: (ResultSet, Column) => A): Option[A] =
     {
-      val x = f(rs, a)
+      val x = f(rs, column)
       if (rs.wasNull) None else Some(x)
     }
 
@@ -299,9 +329,11 @@
     def sql_type(T: SQL.Type.Value): String = SQL.sql_type_sqlite(T)
 
     def set_date(stmt: PreparedStatement, i: Int, date: Date): Unit =
-      set_string(stmt, i, date_format(date))
-    def date(rs: ResultSet, name: String): Date =
-      date_format.parse(string(rs, name))
+      if (date == null) set_string(stmt, i, null: String)
+      else set_string(stmt, i, date_format(date))
+
+    def date(rs: ResultSet, column: SQL.Column): Date =
+      date_format.parse(string(rs, column))
 
     def rebuild { using(statement("VACUUM"))(_.execute()) }
   }
@@ -322,7 +354,7 @@
     password: String,
     database: String = "",
     host: String = "",
-    port: Int = default_port,
+    port: Int = 0,
     ssh: Option[SSH.Session] = None): Database =
   {
     init_jdbc
@@ -330,7 +362,7 @@
     require(user != "")
 
     val db_host = if (host != "") host else "localhost"
-    val db_port = if (port != default_port) ":" + port else ""
+    val db_port = if (port > 0 && port != default_port) ":" + port else ""
     val db_name = "/" + (if (database != "") database else user)
 
     val (url, name, port_forwarding) =
@@ -341,7 +373,9 @@
           val name = user + "@" + spec
           (url, name, None)
         case Some(ssh) =>
-          val fw = ssh.port_forwarding(remote_host = db_host, remote_port = port)
+          val fw =
+            ssh.port_forwarding(remote_host = db_host,
+              remote_port = if (port > 0) port else default_port)
           val url = "jdbc:postgresql://localhost:" + fw.local_port + db_name
           val name = user + "@" + fw + db_name + " via ssh " + ssh
           (url, name, Some(fw))
@@ -363,9 +397,11 @@
 
     // see https://jdbc.postgresql.org/documentation/head/8-date-time.html
     def set_date(stmt: PreparedStatement, i: Int, date: Date): Unit =
-      stmt.setObject(i, OffsetDateTime.from(date.to_utc.rep))
-    def date(rs: ResultSet, name: String): Date =
-      Date.instant(rs.getObject(name, classOf[OffsetDateTime]).toInstant)
+      if (date == null) stmt.setObject(i, null)
+      else stmt.setObject(i, OffsetDateTime.from(date.to_utc.rep))
+
+    def date(rs: ResultSet, column: SQL.Column): Date =
+      Date.instant(rs.getObject(column.name, classOf[OffsetDateTime]).toInstant)
 
     override def close() { super.close; port_forwarding.foreach(_.close) }
   }
--- a/src/Pure/General/ssh.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/General/ssh.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -74,9 +74,10 @@
   {
     def update_options(new_options: Options): Context = new Context(new_options, jsch)
 
-    def open_session(host: String, user: String = "", port: Int = default_port): Session =
+    def open_session(host: String, user: String = "", port: Int = 0): Session =
     {
-      val session = jsch.getSession(if (user == "") null else user, host, port)
+      val session =
+        jsch.getSession(if (user == "") null else user, host, if (port > 0) port else default_port)
 
       session.setUserInfo(No_User_Info)
       session.setServerAliveInterval(alive_interval(options))
--- a/src/Pure/General/time.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/General/time.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -31,6 +31,8 @@
 
 final class Time private(val ms: Long) extends AnyVal
 {
+  def proper_ms: Option[Long] = if (ms == 0) None else Some(ms)
+
   def seconds: Double = ms / 1000.0
   def minutes: Double = ms / 60000.0
 
--- a/src/Pure/System/isabelle_tool.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/System/isabelle_tool.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -74,7 +74,7 @@
       file_name <- File.read_dir(dir) if is_external(dir, file_name)
     } yield {
       val source = File.read(dir + Path.explode(file_name))
-      val name = Library.try_unsuffix(".scala", file_name) getOrElse file_name
+      val name = Library.perhaps_unsuffix(".scala", file_name)
       val description =
         split_lines(source).collectFirst({ case Description(s) => s }) getOrElse ""
       (name, description)
--- a/src/Pure/Thy/sessions.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/Thy/sessions.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -747,21 +747,11 @@
     val build_columns = List(sources, input_heaps, output_heap, return_code)
 
     val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
-
-    def where_session_name(name: String): String =
-      "WHERE " + session_name.sql_name + " = " + SQL.quote_string(name)
-
-    def select_statement(db: SQL.Database, name: String, columns: List[SQL.Column])
-        : PreparedStatement =
-      db.select_statement(table, columns, where_session_name(name))
-
-    def delete_statement(db: SQL.Database, name: String): PreparedStatement =
-      db.delete_statement(table, where_session_name(name))
   }
 
   def store(system_mode: Boolean = false): Store = new Store(system_mode)
 
-  class Store private[Sessions](system_mode: Boolean)
+  class Store private[Sessions](system_mode: Boolean) extends Properties.Store
   {
     /* file names */
 
@@ -772,30 +762,9 @@
 
     /* SQL database content */
 
-    val xml_cache: XML.Cache = new XML.Cache()
-
-    def encode_properties(ps: Properties.T): Bytes =
-      Bytes(YXML.string_of_body(XML.Encode.properties(ps)))
-
-    def decode_properties(bs: Bytes): Properties.T =
-      xml_cache.props(XML.Decode.properties(YXML.parse_body(bs.text)))
-
-    def compress_properties(ps: List[Properties.T], options: XZ.Options = XZ.options()): Bytes =
-    {
-      if (ps.isEmpty) Bytes.empty
-      else Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).compress(options)
-    }
-
-    def uncompress_properties(bs: Bytes): List[Properties.T] =
-    {
-      if (bs.isEmpty) Nil
-      else
-        XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress().text)).
-          map(xml_cache.props(_))
-    }
-
     def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes =
-      using(Session_Info.select_statement(db, name, List(column)))(stmt =>
+      using(db.select(Session_Info.table, List(column),
+        Session_Info.session_name.sql_where_equal(name)))(stmt =>
       {
         val rs = stmt.executeQuery
         if (!rs.next) Bytes.empty else db.bytes(rs, column)
@@ -852,8 +821,9 @@
     {
       db.transaction {
         db.create_table(Session_Info.table)
-        using(Session_Info.delete_statement(db, name))(_.execute)
-        using(db.insert_statement(Session_Info.table))(stmt =>
+        using(db.delete(Session_Info.table, Session_Info.session_name.sql_where_equal(name)))(
+          _.execute)
+        using(db.insert(Session_Info.table))(stmt =>
         {
           db.set_string(stmt, 1, name)
           db.set_bytes(stmt, 2, encode_properties(build_log.session_timing))
@@ -894,7 +864,8 @@
     }
 
     def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] =
-      using(Session_Info.select_statement(db, name, Session_Info.build_columns))(stmt =>
+      using(db.select(Session_Info.table, Session_Info.build_columns,
+        Session_Info.session_name.sql_where_equal(name)))(stmt =>
       {
         val rs = stmt.executeQuery
         if (!rs.next) None
--- a/src/Pure/library.scala	Thu Apr 27 15:59:00 2017 +0100
+++ b/src/Pure/library.scala	Sat Apr 29 11:06:46 2017 +0200
@@ -131,6 +131,9 @@
   def try_unsuffix(sffx: String, s: String): Option[String] =
     if (s.endsWith(sffx)) Some(s.substring(0, s.length - sffx.length)) else None
 
+  def perhaps_unprefix(prfx: String, s: String): String = try_unprefix(prfx, s) getOrElse s
+  def perhaps_unsuffix(sffx: String, s: String): String = try_unsuffix(sffx, s) getOrElse s
+
   def trim_line(s: String): String =
     if (s.endsWith("\r\n")) s.substring(0, s.length - 2)
     else if (s.endsWith("\r") || s.endsWith("\n")) s.substring(0, s.length - 1)