merged
authorpaulson
Tue, 26 Mar 2019 17:01:55 +0000
changeset 69987 e7648f104d6b
parent 69985 8e916ed23d17 (diff)
parent 69986 f2d327275065 (current diff)
child 69993 3fd083253a1c
merged
--- a/src/HOL/Data_Structures/Array_Braun.thy	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/HOL/Data_Structures/Array_Braun.thy	Tue Mar 26 17:01:55 2019 +0000
@@ -516,22 +516,29 @@
 text \<open>The code and the proof are originally due to Thomas Sewell (except running time).\<close>
 
 function list_fast_rec :: "'a tree list \<Rightarrow> 'a list" where
-"list_fast_rec ts = (if ts = [] then [] else
-  let us = filter (\<lambda>t. t \<noteq> Leaf) ts
-  in map value us @ list_fast_rec (map left us @ map right us))"
+"list_fast_rec ts = (let us = filter (\<lambda>t. t \<noteq> Leaf) ts in
+  if us = [] then [] else
+  map value us @ list_fast_rec (map left us @ map right us))"
 by (pat_completeness, auto)
 
-lemma list_fast_rec_term: "\<lbrakk> ts \<noteq> []; us = filter (\<lambda>t. t \<noteq> \<langle>\<rangle>) ts \<rbrakk> \<Longrightarrow>
-  (map left us @ map right us, ts) \<in> measure (sum_list \<circ> map (\<lambda>t. 2 * size t + 1))"
-apply (clarsimp simp: sum_list_addf[symmetric] sum_list_map_filter')
-apply (rule sum_list_strict_mono; simp)
-apply (case_tac x; simp)
-done
+lemma list_fast_rec_term1: "ts \<noteq> [] \<Longrightarrow> Leaf \<notin> set ts \<Longrightarrow>
+  sum_list (map (size o left) ts) + sum_list (map (size o right) ts) < sum_list (map size ts)"
+  apply (clarsimp simp: sum_list_addf[symmetric] sum_list_map_filter')
+  apply (rule sum_list_strict_mono; clarsimp?)
+  apply (case_tac x; simp)
+  done
+
+lemma list_fast_rec_term: "us \<noteq> [] \<Longrightarrow> us = filter (\<lambda>t. t \<noteq> \<langle>\<rangle>) ts \<Longrightarrow>
+  sum_list (map (size o left) us) + sum_list (map (size o right) us) < sum_list (map size ts)"
+  apply (rule order_less_le_trans, rule list_fast_rec_term1, simp_all)
+  apply (rule sum_list_filter_le_nat)
+  done
 
 termination
-apply (relation "measure (sum_list o map (\<lambda>t. 2 * size t + 1))")
- apply simp
-using list_fast_rec_term by auto
+  apply (relation "measure (sum_list o map size)")
+   apply simp
+  apply (simp add: list_fast_rec_term)
+  done
 
 declare list_fast_rec.simps[simp del]
 
@@ -539,19 +546,19 @@
 "list_fast t = list_fast_rec [t]"
 
 function t_list_fast_rec :: "'a tree list \<Rightarrow> nat" where
-"t_list_fast_rec ts = (if ts = [] then 0 else
-  let us = filter (\<lambda>t. t \<noteq> Leaf) ts
-  in length ts + 5 * length us + t_list_fast_rec (map left us @ map right us))"
+"t_list_fast_rec ts = (let us = filter (\<lambda>t. t \<noteq> Leaf) ts
+  in length ts + (if us = [] then 0 else
+  5 * length us + t_list_fast_rec (map left us @ map right us)))"
 by (pat_completeness, auto)
 
 termination
-apply (relation "measure (sum_list o map (\<lambda>t. 2 * size t + 1))")
- apply simp
-using list_fast_rec_term by auto
+  apply (relation "measure (sum_list o map size)")
+   apply simp
+  apply (simp add: list_fast_rec_term)
+  done
 
 declare t_list_fast_rec.simps[simp del]
 
-
 paragraph "Functional Correctness"
 
 lemma list_fast_rec_all_Leaf:
@@ -588,7 +595,7 @@
       apply(auto simp: nth_append braun_list_not_Nil take_nths_eq_single braun_list_Nil hd_drop_conv_nth)
       done
     thus ?thesis
-      by (clarsimp simp: list_fast_rec.simps[of ts] o_def list_fast_rec_all_Leaf)
+      by (clarsimp simp: list_fast_rec.simps[of ts] o_def list_fast_rec_all_Leaf Let_def)
   next
     case False
     with less.prems(2) have *:
@@ -614,7 +621,6 @@
   "braun t \<Longrightarrow> list_fast t = list t"
 by (simp add: list_fast_def take_nths_00 braun_list_eq list_fast_rec_correct[where k=0])
 
-
 paragraph "Running Time Analysis"
 
 lemma sum_tree_list_children: "\<forall>t \<in> set ts. t \<noteq> Leaf \<Longrightarrow>
@@ -623,21 +629,21 @@
 
 theorem t_list_fast_rec_ub:
   "t_list_fast_rec ts \<le> sum_list (map (\<lambda>t. 7*size t + 1) ts)"
-proof (induction ts rule: measure_induct_rule[where f="sum_list o map (\<lambda>t. 2*size t + 1)"])
+proof (induction ts rule: measure_induct_rule[where f="sum_list o map size"])
   case (less ts)
+  let ?us = "filter (\<lambda>t. t \<noteq> Leaf) ts"
   show ?case
   proof cases
-    assume "ts = []"
-    thus ?thesis using t_list_fast_rec.simps[of ts] by(simp add: Let_def)
-  next
-    assume "ts \<noteq> []"
-    let ?us = "filter (\<lambda>t. t \<noteq> Leaf) ts"
+    assume "?us = []"
+    thus ?thesis using t_list_fast_rec.simps[of ts]
+      by(simp add: Let_def sum_list_Suc)
+    next
+    assume "?us \<noteq> []"
     let ?children = "map left ?us @ map right ?us"
     have "t_list_fast_rec ts = t_list_fast_rec ?children + 5 * length ?us + length ts"
-     using \<open>ts \<noteq> []\<close> t_list_fast_rec.simps[of ts] by(simp add: Let_def)
+     using \<open>?us \<noteq> []\<close> t_list_fast_rec.simps[of ts] by(simp add: Let_def)
     also have "\<dots> \<le> (\<Sum>t\<leftarrow>?children. 7 * size t + 1) + 5 * length ?us + length ts"
-      using less[of "map left ?us @ map right ?us"]
-        list_fast_rec_term[of ts, OF \<open>ts \<noteq> []\<close>]
+      using less[of "?children"] list_fast_rec_term[of "?us"] \<open>?us \<noteq> []\<close>
       by (simp)
     also have "\<dots> = (\<Sum>t\<leftarrow>?children. 7*size t) + 7 * length ?us + length ts"
       by(simp add: sum_list_Suc o_def)
--- a/src/Pure/Admin/afp.scala	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/Pure/Admin/afp.scala	Tue Mar 26 17:01:55 2019 +0000
@@ -7,6 +7,10 @@
 package isabelle
 
 
+import java.time.LocalDate
+import scala.collection.immutable.SortedMap
+
+
 object AFP
 {
   val repos_source = "https://isabelle.sketis.net/repos/afp-devel"
@@ -21,7 +25,41 @@
   def init(options: Options, base_dir: Path = Path.explode("$AFP_BASE")): AFP =
     new AFP(options, base_dir)
 
-  sealed case class Entry(name: String, sessions: List[String])
+
+  /* entries */
+
+  def parse_date(s: String): Date =
+  {
+    val t = Date.Formatter.pattern("uuuu-MM-dd").parse(s)
+    Date(LocalDate.from(t).atStartOfDay(Date.timezone_berlin))
+  }
+
+  sealed case class Entry(name: String, metadata: Properties.T, sessions: List[String])
+  {
+    def get(prop: String): Option[String] = Properties.get(metadata, prop)
+    def get_string(prop: String): String = get(prop).getOrElse("")
+    def get_strings(prop: String): List[String] =
+      Library.space_explode(',', get_string(prop)).map(_.trim).filter(_.nonEmpty)
+
+    def title: String = get_string("title")
+    def authors: List[String] = get_strings("author")
+    def date: Date =
+      parse_date(get("date").getOrElse(error("Missing date for entry " + quote(name))))
+    def topics: List[String] = get_strings("topic")
+    def `abstract`: String = get_string("abstract").trim
+    def maintainers: List[String] = get_strings("notify")
+    def contributors: List[String] = get_strings("contributors")
+    def license: String = get("license").getOrElse("BSD")
+
+    def rdf_meta_data: Properties.T =
+      RDF.meta_data(
+        proper_string(title).map(Markup.META_TITLE -> _).toList :::
+        authors.map(Markup.META_CREATOR -> _) :::
+        contributors.map(Markup.META_CONTRIBUTOR -> _) :::
+        List(Markup.META_DATE -> RDF.date_format(date)) :::
+        List(Markup.META_LICENSE -> license) :::
+        proper_string(`abstract`).map(Markup.META_DESCRIPTION -> _).toList)
+  }
 }
 
 class AFP private(options: Options, val base_dir: Path)
@@ -31,14 +69,86 @@
   val main_dir: Path = base_dir + Path.explode("thys")
 
 
-  /* entries and sessions */
+  /* metadata */
+
+  private val entry_metadata: Map[String, Properties.T] =
+  {
+    val metadata_file = base_dir + Path.explode("metadata/metadata")
+
+    var result = Map.empty[String, Properties.T]
+    var section = ""
+    var props = List.empty[Properties.Entry]
+
+    val Section = """^\[(\S+)\]\s*$""".r
+    val Property = """^(\S+)\s*=(.*)$""".r
+    val Extra_Line = """^\s+(.*)$""".r
+    val Blank_Line = """^\s*$""".r
+
+    def flush()
+    {
+      if (section != "") result += (section -> props.reverse.filter(p => p._2.nonEmpty))
+      section = ""
+      props = Nil
+    }
+
+    for ((line, i) <- split_lines(File.read(metadata_file)).zipWithIndex)
+    {
+      def err(msg: String): Nothing =
+        error(msg + Position.here(Position.Line_File(i + 1, metadata_file.expand.implode)))
 
-  val entries: List[AFP.Entry] =
-    (for (name <- Sessions.parse_roots(main_dir + Sessions.ROOTS)) yield {
-      val sessions =
-        Sessions.parse_root_entries(main_dir + Path.explode(name) + Sessions.ROOT).map(_.name)
-      AFP.Entry(name, sessions)
-    }).sortBy(_.name)
+      line match {
+        case Section(name) => flush(); section = name
+        case Property(a, b) =>
+          if (section == "") err("Property without a section")
+          props = (a -> b.trim) :: props
+        case Extra_Line(line) =>
+          props match {
+            case Nil => err("Extra line without a property")
+            case (a, b) :: rest => props = (a, b + "\n" + line.trim) :: rest
+          }
+        case Blank_Line() =>
+        case _ => err("Bad input")
+      }
+    }
+
+    flush()
+    result
+  }
+
+
+  /* entries */
+
+  val entries_map: SortedMap[String, AFP.Entry] =
+  {
+    val entries =
+      for (name <- Sessions.parse_roots(main_dir + Sessions.ROOTS)) yield {
+        val metadata =
+          entry_metadata.getOrElse(name, error("Entry without metadata: " + quote(name)))
+        val sessions =
+          Sessions.parse_root_entries(main_dir + Path.explode(name) + Sessions.ROOT).map(_.name)
+        AFP.Entry(name, metadata, sessions)
+      }
+
+    val entries_map =
+      (SortedMap.empty[String, AFP.Entry] /: entries)({ case (m, e) => m + (e.name -> e) })
+
+    val extra_metadata =
+      (for ((name, _) <- entry_metadata.iterator if !entries_map.isDefinedAt(name)) yield name).
+        toList.sorted
+    if (extra_metadata.nonEmpty)
+      error("Meta data without entry: " + commas_quote(extra_metadata))
+
+    entries_map
+  }
+
+  val entries: List[AFP.Entry] = entries_map.toList.map(_._2)
+
+
+  /* sessions */
+
+  val sessions_map: SortedMap[String, AFP.Entry] =
+    (SortedMap.empty[String, AFP.Entry] /: entries)(
+      { case (m1, e) => (m1 /: e.sessions)({ case (m2, s) => m2 + (s -> e) }) })
 
   val sessions: List[String] = entries.flatMap(_.sessions)
 
--- a/src/Pure/Admin/build_log.scala	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/Pure/Admin/build_log.scala	Tue Mar 26 17:01:55 2019 +0000
@@ -8,7 +8,6 @@
 
 
 import java.io.{File => JFile}
-import java.time.ZoneId
 import java.time.format.{DateTimeFormatter, DateTimeParseException}
 import java.util.Locale
 
@@ -157,7 +156,7 @@
           List(Locale.ENGLISH, Locale.GERMAN)) :::
         List(
           DateTimeFormatter.RFC_1123_DATE_TIME,
-          Date.Formatter.pattern("EEE MMM d HH:mm:ss yyyy").withZone(ZoneId.of("Europe/Berlin")))
+          Date.Formatter.pattern("EEE MMM d HH:mm:ss yyyy").withZone(Date.timezone_berlin))
 
       def tune_timezone(s: String): String =
         s match {
@@ -429,7 +428,7 @@
               log_file.lines.takeWhile(_ != Jenkins.CONFIGURATION).collectFirst({
                 case Jenkins.Host(a, b) => a + "." + b
               }).getOrElse("")
-            parse(Jenkins.engine, host, start.to(ZoneId.of("Europe/Berlin")), Jenkins.No_End,
+            parse(Jenkins.engine, host, start.to(Date.timezone_berlin), Jenkins.No_End,
               Jenkins.Isabelle_Version, Jenkins.AFP_Version)
           case _ => Meta_Info.empty
         }
--- a/src/Pure/Admin/jenkins.scala	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/Pure/Admin/jenkins.scala	Tue Mar 26 17:01:55 2019 +0000
@@ -8,7 +8,6 @@
 
 
 import java.net.URL
-import java.time.ZoneId
 
 import scala.util.matching.Regex
 
@@ -71,7 +70,7 @@
     main_log: URL,
     session_logs: List[(String, String, URL)])
   {
-    val date: Date = Date(Time.ms(timestamp), ZoneId.of("Europe/Berlin"))
+    val date: Date = Date(Time.ms(timestamp), Date.timezone_berlin)
 
     def log_filename: Path =
       Build_Log.log_filename(Build_Log.Jenkins.engine, date, List(job_name))
--- a/src/Pure/General/date.scala	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/Pure/General/date.scala	Tue Mar 26 17:01:55 2019 +0000
@@ -75,6 +75,9 @@
 
   /* date operations */
 
+  def timezone_utc: ZoneId = ZoneId.of("UTC")
+  def timezone_berlin: ZoneId = ZoneId.of("Europe/Berlin")
+
   def timezone(): ZoneId = ZoneId.systemDefault
 
   def now(zone: ZoneId = timezone()): Date = new Date(ZonedDateTime.now(zone))
@@ -91,7 +94,6 @@
     new Date(ZonedDateTime.of(rep.toLocalDate, LocalTime.MIDNIGHT, rep.getZone))
 
   def to(zone: ZoneId): Date = new Date(rep.withZoneSameInstant(zone))
-  def to_utc: Date = to(ZoneId.of("UTC"))
 
   def unix_epoch: Long = rep.toEpochSecond
   def instant: Instant = Instant.from(rep)
--- a/src/Pure/General/rdf.scala	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/Pure/General/rdf.scala	Tue Mar 26 17:01:55 2019 +0000
@@ -72,29 +72,31 @@
   def long(x: Long): XML.Body = string(Value.Long(x))
   def double(x: Double): XML.Body = string(Value.Double(x))
   def seconds(x: Time): XML.Body = double(x.seconds)
-  def date_time_stamp(x: Date): XML.Body = string(Date.Format("uuuu-MM-dd'T'HH:mm:ss.SSSxxx")(x))
+
+  val date_format: Date.Format = Date.Format("uuuu-MM-dd'T'HH:mm:ss.SSSxxx")
+  def date_time_stamp(x: Date): XML.Body = string(date_format(x))
 
 
   /* predicates */
 
   object Property  // binary relation with plain value
   {
-    val title: String = dcterms("title")
-    val creator: String = dcterms("creator")
-    val contributor: String = dcterms("contributor")
-    val date: String = dcterms("date")
-    val description: String = dcterms("description")
-    val license: String = dcterms("license")
+    def title: String = dcterms("title")
+    def creator: String = dcterms("creator")
+    def contributor: String = dcterms("contributor")
+    def date: String = dcterms("date")
+    def license: String = dcterms("license")
+    def description: String = dcterms("description")
   }
 
-  private val meta_data_table =
+  private lazy val meta_data_table =
     Map(
       Markup.META_TITLE -> Property.title,
       Markup.META_CREATOR -> Property.creator,
       Markup.META_CONTRIBUTOR -> Property.contributor,
       Markup.META_DATE -> Property.date,
-      Markup.META_DESCRIPTION -> Property.description,
-      Markup.META_LICENSE -> Property.license)
+      Markup.META_LICENSE -> Property.license,
+      Markup.META_DESCRIPTION -> Property.description)
 
   def meta_data(props: Properties.T): Properties.T =
     props.flatMap({ case (a, b) => meta_data_table.get(a).map((_, b)) })
--- a/src/Pure/General/sql.scala	Tue Mar 26 17:01:36 2019 +0000
+++ b/src/Pure/General/sql.scala	Tue Mar 26 17:01:55 2019 +0000
@@ -497,7 +497,7 @@
     // see https://jdbc.postgresql.org/documentation/head/8-date-time.html
     def update_date(stmt: SQL.Statement, i: Int, date: Date): Unit =
       if (date == null) stmt.rep.setObject(i, null)
-      else stmt.rep.setObject(i, OffsetDateTime.from(date.to_utc.rep))
+      else stmt.rep.setObject(i, OffsetDateTime.from(date.to(Date.timezone_utc).rep))
 
     def date(res: SQL.Result, column: SQL.Column): Date =
     {