--- a/src/Pure/Admin/build_history.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/Admin/build_history.scala Fri Mar 17 23:24:04 2017 +0100
@@ -207,6 +207,8 @@
/* output log */
+ val store = Sessions.store()
+
val meta_info =
Build_Log.Prop.multiple(Build_Log.Prop.build_tags, build_tags) :::
Build_Log.Prop.multiple(Build_Log.Prop.build_args, build_args1) :::
@@ -222,12 +224,20 @@
val ml_statistics =
build_info.finished_sessions.flatMap(session_name =>
{
- val session_log = isabelle_output_log + Path.explode(session_name).ext("gz")
- if (session_log.is_file) {
- Build_Log.Log_File(session_log).parse_session_info(ml_statistics = true).
- ml_statistics.map(props => (Build_Log.SESSION_NAME -> session_name) :: props)
- }
- else Nil
+ val database = isabelle_output + store.database(session_name)
+ val log_gz = isabelle_output + store.log_gz(session_name)
+
+ val properties =
+ if (database.is_file) {
+ using(SQLite.open_database(database))(db =>
+ store.read_build_log(db, session_name, ml_statistics = true)).ml_statistics
+ }
+ else if (log_gz.is_file) {
+ Build_Log.Log_File(log_gz).
+ parse_session_info(session_name, ml_statistics = true).ml_statistics
+ }
+ else Nil
+ properties.map(props => (Build_Log.SESSION_NAME -> session_name) :: props)
})
val heap_sizes =
--- a/src/Pure/Admin/build_log.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/Admin/build_log.scala Fri Mar 17 23:24:04 2017 +0100
@@ -265,7 +265,7 @@
- /** meta info **/
+ /** digested meta info: produced by Admin/build_history in log.xz file **/
object Meta_Info
{
@@ -379,7 +379,7 @@
- /** build info: produced by isabelle build or build_history **/
+ /** build info: toplevel output of isabelle build or Admin/build_history **/
val ML_STATISTICS_MARKER = "\fML_statistics = "
val SESSION_NAME = "session_name"
@@ -539,7 +539,7 @@
- /** session info: produced by "isabelle build" **/
+ /** session info: produced by isabelle build as session log.gz file **/
sealed case class Session_Info(
session_name: String,
@@ -555,22 +555,16 @@
ml_statistics: Boolean,
task_statistics: Boolean): Session_Info =
{
- val xml_cache = new XML.Cache()
-
- val session_name =
- log_file.find_line("\fSession.name = ") match {
- case None => default_name
- case Some(name) if default_name == "" || default_name == name => name
- case Some(name) => log_file.err("log from different session " + quote(name))
- }
- val session_timing = log_file.find_props("\fTiming = ") getOrElse Nil
- val command_timings_ =
- if (command_timings) log_file.filter_props("\fcommand_timing = ") else Nil
- val ml_statistics_ =
- if (ml_statistics) log_file.filter_props(ML_STATISTICS_MARKER) else Nil
- val task_statistics_ =
- if (task_statistics) log_file.filter_props("\ftask_statistics = ") else Nil
-
- Session_Info(session_name, session_timing, command_timings_, ml_statistics_, task_statistics_)
+ Session_Info(
+ session_name =
+ log_file.find_line("\fSession.name = ") match {
+ case None => default_name
+ case Some(name) if default_name == "" || default_name == name => name
+ case Some(name) => log_file.err("log from different session " + quote(name))
+ },
+ session_timing = log_file.find_props("\fTiming = ") getOrElse Nil,
+ command_timings = if (command_timings) log_file.filter_props("\fcommand_timing = ") else Nil,
+ ml_statistics = if (ml_statistics) log_file.filter_props(ML_STATISTICS_MARKER) else Nil,
+ task_statistics = if (task_statistics) log_file.filter_props("\ftask_statistics = ") else Nil)
}
}
--- a/src/Pure/General/bytes.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/General/bytes.scala Fri Mar 17 23:24:04 2017 +0100
@@ -110,9 +110,12 @@
lazy val sha1_digest: SHA1.Digest = SHA1.digest(bytes)
+ def text: String =
+ UTF8.decode_chars(s => s, bytes, offset, offset + length).toString
+
override def toString: String =
{
- val str = UTF8.decode_chars(s => s, bytes, offset, offset + length).toString
+ val str = text
if (str.contains('\uFFFD')) "Bytes(" + length + ")" else str
}
--- a/src/Pure/General/exn.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/General/exn.scala Fri Mar 17 23:24:04 2017 +0100
@@ -20,7 +20,7 @@
}
override def hashCode: Int = message.hashCode
- override def toString: String = Output.error_text(message)
+ override def toString: String = "\n" + Output.error_text(message)
}
object ERROR
--- a/src/Pure/General/sql.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/General/sql.scala Fri Mar 17 23:24:04 2017 +0100
@@ -68,24 +68,24 @@
object Column
{
- def bool(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def bool(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.Boolean, strict, primary_key)
- def int(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def int(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.Int, strict, primary_key)
- def long(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def long(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.Long, strict, primary_key)
- def double(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def double(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.Double, strict, primary_key)
- def string(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def string(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.String, strict, primary_key)
- def bytes(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def bytes(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.Bytes, strict, primary_key)
- def date(name: String, strict: Boolean = true, primary_key: Boolean = false): Column =
+ def date(name: String, strict: Boolean = false, primary_key: Boolean = false): Column =
Column(name, Type.Date, strict, primary_key)
}
sealed case class Column(
- name: String, T: Type.Value, strict: Boolean = true, primary_key: Boolean = false)
+ name: String, T: Type.Value, strict: Boolean = false, primary_key: Boolean = false)
{
def sql_name: String = quote_ident(name)
def sql_decl(sql_type: Type.Value => String): String =
@@ -240,17 +240,17 @@
def tables: List[String] =
iterator(connection.getMetaData.getTables(null, null, "%", null))(_.getString(3)).toList
- def create_table(table: Table, strict: Boolean = true, rowid: Boolean = true): Unit =
+ def create_table(table: Table, strict: Boolean = false, rowid: Boolean = true): Unit =
using(statement(table.sql_create(strict, rowid, sql_type)))(_.execute())
- def drop_table(table: Table, strict: Boolean = true): Unit =
+ def drop_table(table: Table, strict: Boolean = false): Unit =
using(statement(table.sql_drop(strict)))(_.execute())
def create_index(table: Table, name: String, columns: List[Column],
- strict: Boolean = true, unique: Boolean = false): Unit =
+ strict: Boolean = false, unique: Boolean = false): Unit =
using(statement(table.sql_create_index(name, columns, strict, unique)))(_.execute())
- def drop_index(table: Table, name: String, strict: Boolean = true): Unit =
+ def drop_index(table: Table, name: String, strict: Boolean = false): Unit =
using(statement(table.sql_drop_index(name, strict)))(_.execute())
}
}
@@ -264,8 +264,11 @@
// see https://www.sqlite.org/lang_datefunc.html
val date_format: Date.Format = Date.Format("uuuu-MM-dd HH:mm:ss.SSS x")
+ lazy val init_jdbc: Unit = Class.forName("org.sqlite.JDBC")
+
def open_database(path: Path): Database =
{
+ init_jdbc
val path0 = path.expand
val s0 = File.platform_path(path0)
val s1 = if (Platform.is_windows) s0.replace('\\', '/') else s0
@@ -296,6 +299,8 @@
{
val default_port = 5432
+ lazy val init_jdbc: Unit = Class.forName("org.postgresql.Driver")
+
def open_database(
user: String,
password: String,
@@ -304,6 +309,8 @@
port: Int = default_port,
ssh: Option[SSH.Session] = None): Database =
{
+ init_jdbc
+
require(user != "")
val db_host = if (host != "") host else "localhost"
--- a/src/Pure/Thy/sessions.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/Thy/sessions.scala Fri Mar 17 23:24:04 2017 +0100
@@ -515,13 +515,80 @@
/** persistent store **/
- def log(name: String): Path = Path.basic("log") + Path.basic(name)
- def log_gz(name: String): Path = log(name).ext("gz")
+ object Session_Info
+ {
+ // Build_Log.Session_Info
+ val session_name = SQL.Column.string("session_name")
+ val session_timing = SQL.Column.bytes("session_timing")
+ val command_timings = SQL.Column.bytes("command_timings")
+ val ml_statistics = SQL.Column.bytes("ml_statistics")
+ val task_statistics = SQL.Column.bytes("task_statistics")
+ val build_log_columns =
+ List(session_name, session_timing, command_timings, ml_statistics, task_statistics)
+
+ // Build.Session_Info
+ val sources = SQL.Column.string("sources")
+ val input_heaps = SQL.Column.string("input_heaps")
+ val output_heap = SQL.Column.string("output_heap")
+ val return_code = SQL.Column.int("return_code")
+ val build_columns = List(sources, input_heaps, output_heap, return_code)
+
+ val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns)
+ }
def store(system_mode: Boolean = false): Store = new Store(system_mode)
class Store private[Sessions](system_mode: Boolean)
{
+ /* file names */
+
+ def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db")
+ def log(name: String): Path = Path.basic("log") + Path.basic(name)
+ def log_gz(name: String): Path = log(name).ext("gz")
+
+
+ /* SQL database content */
+
+ val xml_cache: XML.Cache = new XML.Cache()
+
+ def encode_properties(ps: Properties.T): Bytes =
+ Bytes(YXML.string_of_body(XML.Encode.properties(ps)))
+
+ def decode_properties(bs: Bytes): Properties.T =
+ xml_cache.props(XML.Decode.properties(YXML.parse_body(bs.text)))
+
+ def compress_properties(ps: List[Properties.T], options: XZ.Options = XZ.options()): Bytes =
+ {
+ if (ps.isEmpty) Bytes.empty
+ else Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).compress(options)
+ }
+
+ def uncompress_properties(bs: Bytes): List[Properties.T] =
+ {
+ if (bs.isEmpty) Nil
+ else
+ XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress().text)).
+ map(xml_cache.props(_))
+ }
+
+ def read_string(db: SQL.Database, table: SQL.Table, column: SQL.Column): String =
+ using(db.select_statement(table, List(column)))(stmt =>
+ {
+ val rs = stmt.executeQuery
+ if (!rs.next) "" else db.string(rs, column.name)
+ })
+
+ def read_bytes(db: SQL.Database, table: SQL.Table, column: SQL.Column): Bytes =
+ using(db.select_statement(table, List(column)))(stmt =>
+ {
+ val rs = stmt.executeQuery
+ if (!rs.next) Bytes.empty else db.bytes(rs, column.name)
+ })
+
+ def read_properties(db: SQL.Database, table: SQL.Table, column: SQL.Column)
+ : List[Properties.T] = uncompress_properties(read_bytes(db, table, column))
+
+
/* output */
val browser_info: Path =
@@ -532,6 +599,8 @@
if (system_mode) Path.explode("~~/heaps/$ML_IDENTIFIER")
else Path.explode("$ISABELLE_OUTPUT")
+ override def toString: String = "Store(output_dir = " + output_dir.expand + ")"
+
def prepare_output() { Isabelle_System.mkdirs(output_dir + Path.basic("log")) }
@@ -544,22 +613,87 @@
output_dir :: Path.split(Isabelle_System.getenv_strict("ISABELLE_PATH")).map(_ + ml_ident)
}
- def find(name: String): Option[(Path, Option[String])] =
- input_dirs.find(dir => (dir + log_gz(name)).is_file).map(dir =>
- (dir + log_gz(name), read_heap_digest(dir + Path.basic(name))))
+ def find_database_heap(name: String): Option[(Path, Option[String])] =
+ input_dirs.find(dir => (dir + database(name)).is_file).map(dir =>
+ (dir + database(name), read_heap_digest(dir + Path.basic(name))))
- def find_log(name: String): Option[Path] =
- input_dirs.map(_ + log(name)).find(_.is_file)
-
- def find_log_gz(name: String): Option[Path] =
- input_dirs.map(_ + log_gz(name)).find(_.is_file)
-
- def find_heap(name: String): Option[Path] =
- input_dirs.map(_ + Path.basic(name)).find(_.is_file)
+ def find_database(name: String): Option[Path] =
+ input_dirs.map(_ + database(name)).find(_.is_file)
def heap(name: String): Path =
- find_heap(name) getOrElse
+ input_dirs.map(_ + Path.basic(name)).find(_.is_file) getOrElse
error("Unknown logic " + quote(name) + " -- no heap file found in:\n" +
cat_lines(input_dirs.map(dir => " " + dir.expand.implode)))
+
+
+ /* session info */
+
+ def write_session_info(
+ db: SQL.Database, build_log: Build_Log.Session_Info, build: Build.Session_Info)
+ {
+ db.transaction {
+ db.drop_table(Session_Info.table)
+ db.create_table(Session_Info.table)
+ using(db.insert_statement(Session_Info.table))(stmt =>
+ {
+ db.set_string(stmt, 1, build_log.session_name)
+ db.set_bytes(stmt, 2, encode_properties(build_log.session_timing))
+ db.set_bytes(stmt, 3, compress_properties(build_log.command_timings))
+ db.set_bytes(stmt, 4, compress_properties(build_log.ml_statistics))
+ db.set_bytes(stmt, 5, compress_properties(build_log.task_statistics))
+ db.set_string(stmt, 6, cat_lines(build.sources))
+ db.set_string(stmt, 7, cat_lines(build.input_heaps))
+ db.set_string(stmt, 8, build.output_heap getOrElse "")
+ db.set_int(stmt, 9, build.return_code)
+ stmt.execute()
+ })
+ }
+ }
+
+ def read_session_timing(db: SQL.Database): Properties.T =
+ decode_properties(read_bytes(db, Session_Info.table, Session_Info.session_timing))
+
+ def read_command_timings(db: SQL.Database): List[Properties.T] =
+ read_properties(db, Session_Info.table, Session_Info.command_timings)
+
+ def read_ml_statistics(db: SQL.Database): List[Properties.T] =
+ read_properties(db, Session_Info.table, Session_Info.ml_statistics)
+
+ def read_task_statistics(db: SQL.Database): List[Properties.T] =
+ read_properties(db, Session_Info.table, Session_Info.task_statistics)
+
+ def read_build_log(db: SQL.Database,
+ default_name: String = "",
+ command_timings: Boolean = false,
+ ml_statistics: Boolean = false,
+ task_statistics: Boolean = false): Build_Log.Session_Info =
+ {
+ val name = read_string(db, Session_Info.table, Session_Info.session_name)
+ Build_Log.Session_Info(
+ session_name =
+ if (name == "") default_name
+ else if (default_name == "" || default_name == name) name
+ else error("Database from different session " + quote(name)),
+ session_timing = read_session_timing(db),
+ command_timings = if (command_timings) read_command_timings(db) else Nil,
+ ml_statistics = if (ml_statistics) read_ml_statistics(db) else Nil,
+ task_statistics = if (task_statistics) read_task_statistics(db) else Nil)
+ }
+
+ def read_build(db: SQL.Database): Option[Build.Session_Info] =
+ using(db.select_statement(Session_Info.table, Session_Info.build_columns))(stmt =>
+ {
+ val rs = stmt.executeQuery
+ if (!rs.next) None
+ else {
+ Some(
+ Build.Session_Info(
+ split_lines(db.string(rs, Session_Info.sources.name)),
+ split_lines(db.string(rs, Session_Info.input_heaps.name)),
+ db.string(rs,
+ Session_Info.output_heap.name) match { case "" => None case s => Some(s) },
+ db.int(rs, Session_Info.return_code.name)))
+ }
+ })
}
}
--- a/src/Pure/Tools/build.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Pure/Tools/build.scala Fri Mar 17 23:24:04 2017 +0100
@@ -21,16 +21,53 @@
{
/** auxiliary **/
- /* queue */
+ /* persistent build info */
+
+ sealed case class Session_Info(
+ sources: List[String],
+ input_heaps: List[String],
+ output_heap: Option[String],
+ return_code: Int)
+
+
+ /* queue with scheduling information */
private object Queue
{
- def apply(tree: Sessions.Tree, load_timings: String => (List[Properties.T], Double)): Queue =
+ def load_timings(store: Sessions.Store, name: String): (List[Properties.T], Double) =
+ {
+ val no_timings: (List[Properties.T], Double) = (Nil, 0.0)
+
+ store.find_database(name) match {
+ case None => no_timings
+ case Some(database) =>
+ def ignore_error(msg: String) =
+ {
+ Output.warning("Ignoring bad database: " + database + (if (msg == "") "" else "\n" + msg))
+ no_timings
+ }
+ try {
+ using(SQLite.open_database(database))(db =>
+ {
+ val build_log = store.read_build_log(db, name, command_timings = true)
+ val session_timing = Markup.Elapsed.unapply(build_log.session_timing) getOrElse 0.0
+ (build_log.command_timings, session_timing)
+ })
+ }
+ catch {
+ case ERROR(msg) => ignore_error(msg)
+ case exn: java.lang.Error => ignore_error(Exn.message(exn))
+ case _: XML.Error => ignore_error("")
+ }
+ }
+ }
+
+ def apply(tree: Sessions.Tree, store: Sessions.Store): Queue =
{
val graph = tree.graph
val sessions = graph.keys
- val timings = Par_List.map((name: String) => (name, load_timings(name)), sessions)
+ val timings = sessions.map(name => (name, load_timings(store, name)))
val command_timings =
Map(timings.map({ case (name, (ts, _)) => (name, ts) }): _*).withDefaultValue(Nil)
val session_timing =
@@ -201,46 +238,6 @@
}
- /* sources and heaps */
-
- private val SOURCES = "sources: "
- private val INPUT_HEAP = "input_heap: "
- private val OUTPUT_HEAP = "output_heap: "
- private val LOG_START = "log:"
- private val line_prefixes = List(SOURCES, INPUT_HEAP, OUTPUT_HEAP, LOG_START)
-
- private def sources_stamp(digests: List[SHA1.Digest]): String =
- digests.map(_.toString).sorted.mkString(SOURCES, " ", "")
-
- private def read_stamps(path: Path): Option[(String, List[String], List[String])] =
- if (path.is_file) {
- val stream = new GZIPInputStream(new BufferedInputStream(new FileInputStream(path.file)))
- val reader = new BufferedReader(new InputStreamReader(stream, UTF8.charset))
- val lines =
- {
- val lines = new mutable.ListBuffer[String]
- try {
- var finished = false
- while (!finished) {
- val line = reader.readLine
- if (line != null && line_prefixes.exists(line.startsWith(_)))
- lines += line
- else finished = true
- }
- }
- finally { reader.close }
- lines.toList
- }
-
- if (!lines.isEmpty && lines.last.startsWith(LOG_START)) {
- lines.find(_.startsWith(SOURCES)).map(s =>
- (s, lines.filter(_.startsWith(INPUT_HEAP)), lines.filter(_.startsWith(OUTPUT_HEAP))))
- }
- else None
- }
- else None
-
-
/** build with results **/
@@ -323,56 +320,22 @@
val deps =
Sessions.dependencies(progress, true, verbose, list_files, check_keywords, selected_tree)
- def session_sources_stamp(name: String): String =
- sources_stamp(selected_tree(name).meta_digest :: deps.sources(name))
-
- val store = Sessions.store(system_mode)
-
-
- /* queue with scheduling information */
-
- def load_timings(name: String): (List[Properties.T], Double) =
- {
- val (path, text) =
- store.find_log_gz(name) match {
- case Some(path) => (path, File.read_gzip(path))
- case None =>
- store.find_log(name) match {
- case Some(path) => (path, File.read(path))
- case None => (Path.current, "")
- }
- }
-
- def ignore_error(msg: String): (List[Properties.T], Double) =
- {
- Output.warning("Ignoring bad log file: " + path + (if (msg == "") "" else "\n" + msg))
- (Nil, 0.0)
- }
-
- try {
- val info = Build_Log.Log_File(name, text).parse_session_info(name, command_timings = true)
- val session_timing = Markup.Elapsed.unapply(info.session_timing) getOrElse 0.0
- (info.command_timings, session_timing)
- }
- catch {
- case ERROR(msg) => ignore_error(msg)
- case exn: java.lang.Error => ignore_error(Exn.message(exn))
- case _: XML.Error => ignore_error("")
- }
- }
-
- val queue = Queue(selected_tree, load_timings)
+ def sources_stamp(name: String): List[String] =
+ (selected_tree(name).meta_digest :: deps.sources(name)).map(_.toString).sorted
/* main build process */
+ val store = Sessions.store(system_mode)
+ val queue = Queue(selected_tree, store)
+
store.prepare_output()
// optional cleanup
if (clean_build) {
for (name <- full_tree.graph.all_succs(selected)) {
val files =
- List(Path.basic(name), Sessions.log(name), Sessions.log_gz(name)).
+ List(Path.basic(name), store.database(name), store.log(name), store.log_gz(name)).
map(store.output_dir + _).filter(_.is_file)
if (files.nonEmpty) progress.echo("Cleaning " + name + " ...")
if (!files.forall(p => p.file.delete)) progress.echo(name + " FAILED to delete")
@@ -422,44 +385,53 @@
if (process_result.ok)
progress.echo("Finished " + name + " (" + process_result.timing.message_resources + ")")
+ val log_lines = process_result.out_lines.filterNot(_.startsWith("\f"))
val process_result_tail =
{
- val lines = process_result.out_lines.filterNot(_.startsWith("\f"))
val tail = job.info.options.int("process_output_tail")
- val lines1 = if (tail == 0) lines else lines.drop(lines.length - tail max 0)
process_result.copy(
out_lines =
- "(see also " + (store.output_dir + Sessions.log(name)).file.toString + ")" ::
- lines1)
+ "(see also " + (store.output_dir + store.log(name)).file.toString + ")" ::
+ (if (tail == 0) log_lines else log_lines.drop(log_lines.length - tail max 0)))
}
val heap_stamp =
if (process_result.ok) {
- (store.output_dir + Sessions.log(name)).file.delete
+ (store.output_dir + store.log(name)).file.delete
val heap_stamp =
for (path <- job.output_path if path.is_file)
yield Sessions.write_heap_digest(path)
- File.write_gzip(store.output_dir + Sessions.log_gz(name),
- terminate_lines(
- session_sources_stamp(name) ::
- input_heaps.map(INPUT_HEAP + _) :::
- heap_stamp.toList.map(OUTPUT_HEAP + _) :::
- List(LOG_START) ::: process_result.out_lines))
+ File.write_gzip(store.output_dir + store.log_gz(name), terminate_lines(log_lines))
heap_stamp
}
else {
(store.output_dir + Path.basic(name)).file.delete
- (store.output_dir + Sessions.log_gz(name)).file.delete
+ (store.output_dir + store.log_gz(name)).file.delete
- File.write(store.output_dir + Sessions.log(name),
- terminate_lines(process_result.out_lines))
+ File.write(store.output_dir + store.log(name), terminate_lines(log_lines))
progress.echo(name + " FAILED")
if (!process_result.interrupted) progress.echo(process_result_tail.out)
None
}
+
+ // write database
+ {
+ val database = store.output_dir + store.database(name)
+ database.file.delete
+
+ using(SQLite.open_database(database))(db =>
+ store.write_session_info(db,
+ build_log =
+ Build_Log.Log_File(name, process_result.out_lines).
+ parse_session_info(name,
+ command_timings = true, ml_statistics = true, task_statistics = true),
+ build =
+ Session_Info(sources_stamp(name), input_heaps, heap_stamp, process_result.rc)))
+ }
+
loop(pending - name, running - name,
results + (name -> Result(false, heap_stamp, Some(process_result_tail), job.info)))
//}}}
@@ -474,15 +446,16 @@
val (current, heap_stamp) =
{
- store.find(name) match {
- case Some((log_gz, heap_stamp)) =>
- read_stamps(log_gz) match {
- case Some((sources, input_heaps, output_heaps)) =>
+ store.find_database_heap(name) match {
+ case Some((database, heap_stamp)) =>
+ using(SQLite.open_database(database))(store.read_build(_)) match {
+ case Some(build) =>
val current =
- sources == session_sources_stamp(name) &&
- input_heaps == ancestor_heaps.map(INPUT_HEAP + _) &&
- output_heaps == heap_stamp.toList.map(OUTPUT_HEAP + _) &&
- !(do_output && heap_stamp.isEmpty)
+ build.sources == sources_stamp(name) &&
+ build.input_heaps == ancestor_heaps &&
+ build.output_heap == heap_stamp &&
+ !(do_output && heap_stamp.isEmpty) &&
+ build.return_code == 0
(current, heap_stamp)
case None => (false, None)
}
--- a/src/Tools/jEdit/src/plugin.scala Thu Mar 16 16:02:18 2017 +0000
+++ b/src/Tools/jEdit/src/plugin.scala Fri Mar 17 23:24:04 2017 +0100
@@ -23,19 +23,6 @@
object PIDE
{
- /* plugin instance */
-
- @volatile var _plugin: Plugin = null
-
- def plugin: Plugin =
- if (_plugin == null) error("Uninitialized Isabelle/jEdit plugin")
- else _plugin
-
- def options: JEdit_Options = plugin.options
- def resources: JEdit_Resources = plugin.resources
- def session: Session = plugin.session
-
-
/* semantic document content */
def snapshot(view: View): Document.Snapshot = GUI_Thread.now
@@ -54,9 +41,21 @@
case None => error("No document view for current text area")
}
}
+
+
+ /* plugin instance */
+
+ @volatile var _plugin: Plugin = null
+
+ def plugin: Plugin =
+ if (_plugin == null) error("Uninitialized Isabelle/jEdit plugin")
+ else _plugin
+
+ def options: JEdit_Options = plugin.options
+ def resources: JEdit_Resources = plugin.resources
+ def session: Session = plugin.session
}
-
class Plugin extends EBPlugin
{
/* options */
@@ -415,7 +414,7 @@
// adhoc patch of confusing message
val orig_plugin_error = jEdit.getProperty("plugin-error.start-error")
- jEdit.setProperty("plugin-error.start-error", "Cannot start plugin:\n{0}")
+ jEdit.setProperty("plugin-error.start-error", "Cannot start plugin: {0}")
init_options()
init_resources()