merged
authorwenzelm
Sun, 06 May 2018 23:03:43 +0200
changeset 68093 b98c5877b0f3
parent 68085 7fe53815cce9 (current diff)
parent 68092 888d35a19866 (diff)
child 68094 0b66aca9c965
merged
--- a/src/Pure/General/bytes.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/General/bytes.scala	Sun May 06 23:03:43 2018 +0200
@@ -39,6 +39,23 @@
     }
 
 
+  def hex(s: String): Bytes =
+  {
+    def err(): Nothing = error("Malformed hexadecimal representation of bytes\n" + s)
+    val len = s.length
+    if (len % 2 != 0) err()
+
+    val n = len / 2
+    val a = new Array[Byte](n)
+    for (i <- 0 until n) {
+      val j = 2 * i
+      try { a(i) = Integer.parseInt(s.substring(j, j + 2), 16).toByte }
+      catch { case _: NumberFormatException => err() }
+    }
+    new Bytes(a, 0, n)
+  }
+
+
   /* read */
 
   def read_stream(stream: InputStream, limit: Int = Integer.MAX_VALUE, hint: Int = 1024): Bytes =
--- a/src/Pure/General/sha1.ML	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/General/sha1.ML	Sun May 06 23:03:43 2018 +0200
@@ -159,7 +159,6 @@
 
 (* digesting *)
 
-fun hex_digit i = if i < 10 then chr (Char.ord #"0" + i) else chr (Char.ord #"a" + i - 10);
 fun hex_string byte = op ^ (apply2 hex_digit (Integer.div_mod (Word8.toInt byte) 16));
 
 in
--- a/src/Pure/General/sql.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/General/sql.scala	Sun May 06 23:03:43 2018 +0200
@@ -120,7 +120,8 @@
     def defined: String = ident + " IS NOT NULL"
     def undefined: String = ident + " IS NULL"
 
-    def where_equal(s: String): Source = "WHERE " + ident + " = " + string(s)
+    def equal(s: String): Source = ident + " = " + string(s)
+    def where_equal(s: String): Source = "WHERE " + equal(s)
 
     override def toString: Source = ident
   }
--- a/src/Pure/PIDE/markup.ML	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/PIDE/markup.ML	Sun May 06 23:03:43 2018 +0200
@@ -202,6 +202,9 @@
   val task_statistics: Properties.entry
   val command_timing: Properties.entry
   val theory_timing: Properties.entry
+  val exportN: string
+  type export_args = {id: string option, theory_name: string, name: string, compress: bool}
+  val export: export_args -> Properties.T
   val loading_theory: string -> Properties.T
   val dest_loading_theory: Properties.T -> string option
   val build_session_finished: Properties.T
@@ -638,6 +641,12 @@
 
 val theory_timing = (functionN, "theory_timing");
 
+val exportN = "export";
+type export_args = {id: string option, theory_name: string, name: string, compress: bool}
+fun export ({id, theory_name, name, compress}: export_args) =
+  [(functionN, exportN), (idN, the_default "" id),
+    ("theory_name", theory_name), (nameN, name), ("compress", Value.print_bool compress)];
+
 fun loading_theory name = [("function", "loading_theory"), ("name", name)];
 
 fun dest_loading_theory [("function", "loading_theory"), ("name", name)] = SOME name
--- a/src/Pure/PIDE/markup.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/PIDE/markup.scala	Sun May 06 23:03:43 2018 +0200
@@ -568,6 +568,31 @@
       }
   }
 
+  val EXPORT = "export"
+  object Export
+  {
+    sealed case class Args(id: Option[String], theory_name: String, name: String, compress: Boolean)
+
+    val THEORY_NAME = "theory_name"
+    val COMPRESS = "compress"
+
+    def dest_inline(props: Properties.T): Option[(Args, Bytes)] =
+      props match {
+        case
+          List((THEORY_NAME, theory_name), (NAME, name), (COMPRESS, Value.Boolean(compress)),
+            (EXPORT, hex)) => Some((Args(None, theory_name, name, compress), Bytes.hex(hex)))
+        case _ => None
+      }
+
+    def unapply(props: Properties.T): Option[Args] =
+      props match {
+        case List((FUNCTION, EXPORT), (ID, id),
+          (THEORY_NAME, theory_name), (PATH, name), (COMPRESS, Value.Boolean(compress))) =>
+            Some(Args(proper_string(id), theory_name, name, compress))
+        case _ => None
+      }
+  }
+
   val BUILD_SESSION_FINISHED = "build_session_finished"
   val Build_Session_Finished: Properties.T = List((FUNCTION, BUILD_SESSION_FINISHED))
 
--- a/src/Pure/PIDE/session.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/PIDE/session.scala	Sun May 06 23:03:43 2018 +0200
@@ -435,6 +435,9 @@
               case Protocol.Theory_Timing(_, _) =>
                 // FIXME
 
+              case Markup.Export(_) =>
+                // FIXME
+
               case Markup.Assign_Update =>
                 msg.text match {
                   case Protocol.Assign_Update(id, update) =>
--- a/src/Pure/ROOT.ML	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/ROOT.ML	Sun May 06 23:03:43 2018 +0200
@@ -300,6 +300,7 @@
 ML_file "PIDE/command.ML";
 ML_file "PIDE/query_operation.ML";
 ML_file "PIDE/resources.ML";
+ML_file "Thy/export.ML";
 ML_file "Thy/present.ML";
 ML_file "Thy/thy_info.ML";
 ML_file "Thy/sessions.ML";
--- a/src/Pure/System/process_result.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/System/process_result.scala	Sun May 06 23:03:43 2018 +0200
@@ -15,7 +15,8 @@
 {
   def out: String = cat_lines(out_lines)
   def err: String = cat_lines(err_lines)
-  def error(s: String): Process_Result = copy(err_lines = err_lines ::: List(s))
+  def errors(errs: List[String]): Process_Result = copy(err_lines = err_lines ::: errs)
+  def error(err: String): Process_Result = errors(List(err))
 
   def was_timeout: Process_Result = copy(rc = 1, timeout = true)
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Thy/export.ML	Sun May 06 23:03:43 2018 +0200
@@ -0,0 +1,26 @@
+(*  Title:      Pure/Thy/export.ML
+    Author:     Makarius
+
+Manage theory exports.
+*)
+
+signature EXPORT =
+sig
+  val export: theory -> string -> Output.output -> unit
+  val export_uncompressed: theory -> string -> Output.output -> unit
+end;
+
+structure Export: EXPORT =
+struct
+
+fun gen_export compress thy name output =
+  (Output.try_protocol_message o Markup.export)
+   {id = Position.get_id (Position.thread_data ()),
+    theory_name = Context.theory_long_name thy,
+    name = name,
+    compress = compress} [output];
+
+val export = gen_export true;
+val export_uncompressed = gen_export false;
+
+end;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Thy/export.scala	Sun May 06 23:03:43 2018 +0200
@@ -0,0 +1,119 @@
+/*  Title:      Pure/Thy/export.scala
+    Author:     Makarius
+
+Manage theory exports.
+*/
+
+package isabelle
+
+object Export
+{
+  /* SQL data model */
+
+  object Data
+  {
+    val session_name = SQL.Column.string("session_name").make_primary_key
+    val theory_name = SQL.Column.string("theory_name").make_primary_key
+    val name = SQL.Column.string("name").make_primary_key
+    val compressed = SQL.Column.bool("compressed")
+    val body = SQL.Column.bytes("body")
+
+    val table =
+      SQL.Table("isabelle_exports", List(session_name, theory_name, name, compressed, body))
+
+    def where_equal(session_name: String, theory_name: String): SQL.Source =
+      "WHERE " + Data.session_name.equal(session_name) +
+      " AND " + Data.theory_name.equal(theory_name)
+  }
+
+  def read_names(db: SQL.Database, session_name: String, theory_name: String): List[String] =
+  {
+    val select = Data.table.select(List(Data.name), Data.where_equal(session_name, theory_name))
+    db.using_statement(select)(stmt =>
+      stmt.execute_query().iterator(res => res.string(Data.name)).toList)
+  }
+
+  sealed case class Entry(
+    session_name: String, theory_name: String, name: String, compressed: Boolean, body: Bytes)
+  {
+    override def toString: String = theory_name + ":" + name
+
+    def message(msg: String): String =
+      msg + " " + quote(name) + " for theory " + quote(theory_name)
+
+    lazy val compressed_body: Bytes = if (compressed) body else body.compress()
+    lazy val uncompressed_body: Bytes = if (compressed) body.uncompress() else body
+
+    def write(db: SQL.Database)
+    {
+      db.using_statement(Data.table.insert())(stmt =>
+      {
+        stmt.string(1) = session_name
+        stmt.string(2) = theory_name
+        stmt.string(3) = name
+        stmt.bool(4) = compressed
+        stmt.bytes(5) = body
+        stmt.execute()
+      })
+    }
+  }
+
+  def read_entry(db: SQL.Database, session_name: String, theory_name: String, name: String): Entry =
+  {
+    val select =
+      Data.table.select(List(Data.compressed, Data.body),
+        Data.where_equal(session_name, theory_name) + " AND " + Data.name.equal(name))
+    db.using_statement(select)(stmt =>
+    {
+      val res = stmt.execute_query()
+      if (res.next()) {
+        val compressed = res.bool(Data.compressed)
+        val body = res.bytes(Data.body)
+        Entry(session_name, theory_name, name, compressed, body)
+      }
+      else error(Entry(session_name, theory_name, name, false, Bytes.empty).message("Bad export"))
+    })
+  }
+
+
+  /* database consumer thread */
+
+  def consumer(db: SQL.Database): Consumer = new Consumer(db)
+
+  class Consumer private[Export](db: SQL.Database)
+  {
+    db.create_table(Data.table)
+
+    private val export_errors = Synchronized[List[String]](Nil)
+
+    private val consumer =
+      Consumer_Thread.fork(name = "export")(consume = (future: Future[Entry]) =>
+        {
+          val entry = future.join
+
+          db.transaction {
+            if (read_names(db, entry.session_name, entry.theory_name).contains(entry.name)) {
+              export_errors.change(errs => entry.message("Duplicate export") :: errs)
+            }
+            else entry.write(db)
+          }
+          true
+        })
+
+    def apply(session_name: String, args: Markup.Export.Args, body: Bytes)
+    {
+      consumer.send(
+        if (args.compress)
+          Future.fork(Entry(session_name, args.theory_name, args.name, true, body.compress()))
+        else
+          Future.value(Entry(session_name, args.theory_name, args.name, false, body)))
+    }
+
+    def shutdown(close: Boolean = false): List[String] =
+    {
+      consumer.shutdown()
+      if (close) db.close()
+      export_errors.value.reverse
+    }
+  }
+}
--- a/src/Pure/Thy/sessions.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/Thy/sessions.scala	Sun May 06 23:03:43 2018 +0200
@@ -1003,6 +1003,10 @@
 
     def prepare_output() { Isabelle_System.mkdirs(output_dir + Path.basic("log")) }
 
+    def output_database(name: String): Path = output_dir + database(name)
+    def output_log(name: String): Path = output_dir + log(name)
+    def output_log_gz(name: String): Path = output_dir + log_gz(name)
+
 
     /* input */
 
@@ -1028,6 +1032,15 @@
 
     /* session info */
 
+    def init_session_info(db: SQL.Database, name: String)
+    {
+      db.transaction {
+        db.create_table(Session_Info.table)
+        db.using_statement(
+          Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute)
+      }
+    }
+
     def write_session_info(
       db: SQL.Database,
       name: String,
@@ -1035,9 +1048,6 @@
       build: Build.Session_Info)
     {
       db.transaction {
-        db.create_table(Session_Info.table)
-        db.using_statement(
-          Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute)
         db.using_statement(Session_Info.table.insert())(stmt =>
         {
           stmt.string(1) = name
--- a/src/Pure/Tools/build.ML	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/Tools/build.ML	Sun May 06 23:03:43 2018 +0200
@@ -94,6 +94,10 @@
         end
       else if function = Markup.theory_timing then
         inline_message (#2 function) args
+      else if function = (Markup.functionN, Markup.exportN) andalso
+        not (null args) andalso #1 (hd args) = Markup.idN
+      then
+        inline_message (#2 function) (tl args @ [(Markup.exportN, hex_string (implode output))])
       else
         (case Markup.dest_loading_theory props of
           SOME name => writeln ("\floading_theory = " ^ name)
--- a/src/Pure/Tools/build.scala	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/Tools/build.scala	Sun May 06 23:03:43 2018 +0200
@@ -32,10 +32,11 @@
 
   private object Queue
   {
-    def load_timings(progress: Progress, store: Sessions.Store, name: String)
-      : (List[Properties.T], Double) =
+    type Timings = (List[Properties.T], Double)
+
+    def load_timings(progress: Progress, store: Sessions.Store, name: String): Timings =
     {
-      val no_timings: (List[Properties.T], Double) = (Nil, 0.0)
+      val no_timings: Timings = (Nil, 0.0)
 
       store.find_database(name) match {
         case None => no_timings
@@ -184,7 +185,6 @@
   {
     val output = store.output_dir + Path.basic(name)
     def output_path: Option[Path] = if (do_output) Some(output) else None
-    output.file.delete
 
     val options =
       numa_node match {
@@ -195,6 +195,8 @@
     private val graph_file = Isabelle_System.tmp_file("session_graph", "pdf")
     isabelle.graphview.Graph_File.write(options, graph_file, deps(name).session_graph_display)
 
+    private val export_consumer = Export.consumer(SQLite.open_database(store.output_database(name)))
+
     private val future_result: Future[Process_Result] =
       Future.thread("build") {
         val parent = info.parent.getOrElse("")
@@ -282,6 +284,11 @@
               Library.try_unprefix("\floading_theory = ", line) match {
                 case Some(theory) => progress.theory(name, theory)
                 case None =>
+                  for {
+                    text <- Library.try_unprefix("\fexport = ", line)
+                    (args, body) <-
+                      Markup.Export.dest_inline(XML.Decode.properties(YXML.parse_body(text)))
+                  } { export_consumer(name, args, body) }
               },
             progress_limit =
               options.int("process_output_limit") match {
@@ -305,8 +312,14 @@
     def join: Process_Result =
     {
       val result = future_result.join
+      val export_result =
+        export_consumer.shutdown(close = true).map(Output.error_message_text(_)) match {
+          case Nil => result
+          case errs if result.ok => result.copy(rc = 1).errors(errs)
+          case errs => result.errors(errs)
+        }
 
-      if (result.ok)
+      if (export_result.ok)
         Present.finish(progress, store.browser_info, graph_file, info, name)
 
       graph_file.delete
@@ -317,11 +330,11 @@
           case Some(request) => !request.cancel
         }
 
-      if (result.interrupted) {
-        if (was_timeout) result.error(Output.error_message_text("Timeout")).was_timeout
-        else result.error(Output.error_message_text("Interrupt"))
+      if (export_result.interrupted) {
+        if (was_timeout) export_result.error(Output.error_message_text("Timeout")).was_timeout
+        else export_result.error(Output.error_message_text("Interrupt"))
       }
-      else result
+      else export_result
     }
   }
 
@@ -448,14 +461,19 @@
 
     store.prepare_output()
 
-    // optional cleanup
+    // cleanup
+    def cleanup(name: String, echo: Boolean = false)
+    {
+      val files =
+        List(Path.basic(name), store.database(name), store.log(name), store.log_gz(name)).
+          map(store.output_dir + _).filter(_.is_file)
+      if (files.nonEmpty) progress.echo_if(echo, "Cleaning " + name + " ...")
+      val res = files.map(p => p.file.delete)
+      if (!res.forall(ok => ok)) progress.echo_if(echo, name + " FAILED to delete")
+    }
     if (clean_build) {
       for (name <- full_sessions.build_descendants(full_sessions.build_selection(selection1))) {
-        val files =
-          List(Path.basic(name), store.database(name), store.log(name), store.log_gz(name)).
-            map(store.output_dir + _).filter(_.is_file)
-        if (files.nonEmpty) progress.echo("Cleaning " + name + " ...")
-        if (!files.forall(p => p.file.delete)) progress.echo(name + " FAILED to delete")
+        cleanup(name, echo = true)
       }
     }
 
@@ -507,7 +525,7 @@
               val tail = job.info.options.int("process_output_tail")
               process_result.copy(
                 out_lines =
-                  "(see also " + (store.output_dir + store.log(name)).file.toString + ")" ::
+                  "(see also " + store.output_log(name).file.toString + ")" ::
                   (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)))
             }
 
@@ -515,29 +533,22 @@
             // write log file
             val heap_stamp =
               if (process_result.ok) {
-                (store.output_dir + store.log(name)).file.delete
                 val heap_stamp =
                   for (path <- job.output_path if path.is_file)
                     yield Sessions.write_heap_digest(path)
 
-                File.write_gzip(store.output_dir + store.log_gz(name), terminate_lines(log_lines))
+                File.write_gzip(store.output_log_gz(name), terminate_lines(log_lines))
 
                 heap_stamp
               }
               else {
-                (store.output_dir + Path.basic(name)).file.delete
-                (store.output_dir + store.log_gz(name)).file.delete
-
-                File.write(store.output_dir + store.log(name), terminate_lines(log_lines))
+                File.write(store.output_log(name), terminate_lines(log_lines))
 
                 None
               }
 
             // write database
             {
-              val database = store.output_dir + store.database(name)
-              database.file.delete
-
               val build_log =
                 Build_Log.Log_File(name, process_result.out_lines).
                   parse_session_info(
@@ -546,7 +557,7 @@
                     ml_statistics = true,
                     task_statistics = true)
 
-              using(SQLite.open_database(database))(db =>
+              using(SQLite.open_database(store.output_database(name)))(db =>
                 store.write_session_info(db, name,
                   build_log =
                     if (process_result.timeout) build_log.error("Timeout") else build_log,
@@ -609,8 +620,13 @@
                     results + (name -> Result(false, heap_stamp, Some(Process_Result(1)), info)))
                 }
                 else if (ancestor_results.forall(_.ok) && !progress.stopped) {
+                  progress.echo((if (do_output) "Building " else "Running ") + name + " ...")
+
+                  cleanup(name)
+                  using(SQLite.open_database(store.output_database(name)))(db =>
+                    store.init_session_info(db, name))
+
                   val numa_node = numa_nodes.next(used_node(_))
-                  progress.echo((if (do_output) "Building " else "Running ") + name + " ...")
                   val job =
                     new Job(progress, name, info, selected_sessions, deps, store, do_output,
                       verbose, pide, numa_node, queue.command_timings(name))
--- a/src/Pure/build-jars	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/build-jars	Sun May 06 23:03:43 2018 +0200
@@ -129,6 +129,7 @@
   System/system_channel.scala
   System/tty_loop.scala
   Thy/bibtex.scala
+  Thy/export.scala
   Thy/html.scala
   Thy/latex.scala
   Thy/present.scala
--- a/src/Pure/library.ML	Sun May 06 15:29:11 2018 +0200
+++ b/src/Pure/library.ML	Sun May 06 23:03:43 2018 +0200
@@ -116,6 +116,7 @@
   (*integers*)
   val upto: int * int -> int list
   val downto: int * int -> int list
+  val hex_digit: int -> string
   val radixpand: int * int -> int list
   val radixstring: int * string * int -> string
   val string_of_int: int -> string
@@ -154,6 +155,7 @@
   val translate_string: (string -> string) -> string -> string
   val encode_lines: string -> string
   val decode_lines: string -> string
+  val hex_string: string -> string
   val align_right: string -> int -> string -> string
   val match_string: string -> string -> bool
 
@@ -613,6 +615,10 @@
 
 (* convert integers to strings *)
 
+(*hexadecimal*)
+fun hex_digit i =
+  if i < 10 then chr (Char.ord #"0" + i) else chr (Char.ord #"a" + i - 10);
+
 (*expand the number in the given base;
   example: radixpand (2, 8) gives [1, 0, 0, 0]*)
 fun radixpand (base, num) : int list =
@@ -771,6 +777,12 @@
 val encode_lines = translate_string (fn "\n" => "\v" | c => c);
 val decode_lines = translate_string (fn "\v" => "\n" | c => c);
 
+fun hex_string s =
+  fold_string (fn c =>
+    let val (a, b) = IntInf.divMod (ord c, 16)
+    in cons (hex_digit a) #> cons (hex_digit b) end) s []
+  |> rev |> implode;
+
 fun align_right c k s =
   let
     val _ = if size c <> 1 orelse size s > k