# HG changeset patch # User wenzelm # Date 1688827695 -7200 # Node ID 45a9f5066e07bc5b93cb24109bbdcdf955585b04 # Parent 45381e6bd3adffeca8095125d0d2d048212d7c62# Parent 4be047eaee2ba8aa682614d82c0aa8f287eaf020 merged diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/Admin/build_log.scala --- a/src/Pure/Admin/build_log.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/Admin/build_log.scala Sat Jul 08 16:48:15 2023 +0200 @@ -658,21 +658,23 @@ val known = SQL.Column.bool("known") val meta_info_table = - make_table("meta_info", log_name :: Prop.all_props ::: Settings.all_settings) + make_table(log_name :: Prop.all_props ::: Settings.all_settings, name = "meta_info") val sessions_table = - make_table("sessions", + make_table( List(log_name, session_name, chapter, groups, threads, timing_elapsed, timing_cpu, timing_gc, timing_factor, ml_timing_elapsed, ml_timing_cpu, ml_timing_gc, ml_timing_factor, - heap_size, status, errors, sources)) + heap_size, status, errors, sources), + name = "sessions") val theories_table = - make_table("theories", + make_table( List(log_name, session_name, theory_name, theory_timing_elapsed, theory_timing_cpu, - theory_timing_gc)) + theory_timing_gc), + name = "theories") val ml_statistics_table = - make_table("ml_statistics", List(log_name, session_name, ml_statistics)) + make_table(List(log_name, session_name, ml_statistics), name = "ml_statistics") /* AFP versions */ @@ -680,9 +682,11 @@ val isabelle_afp_versions_table: SQL.Table = { val version1 = Prop.isabelle_version val version2 = Prop.afp_version - make_table("isabelle_afp_versions", List(version1.make_primary_key, version2), - SQL.select(List(version1, version2), distinct = true) + meta_info_table + - SQL.where_and(version1.defined, version2.defined)) + make_table(List(version1.make_primary_key, version2), + body = + SQL.select(List(version1, version2), distinct = true) + meta_info_table + + SQL.where_and(version1.defined, version2.defined), + name = "isabelle_afp_versions") } @@ -697,12 +701,14 @@ if (afp) ("afp_pull_date", List(Prop.isabelle_version, Prop.afp_version)) else ("pull_date", List(Prop.isabelle_version)) - make_table(name, versions.map(_.make_primary_key) ::: List(pull_date(afp)), - "SELECT " + versions.mkString(", ") + - ", min(" + Prop.build_start + ") AS " + pull_date(afp) + - " FROM " + meta_info_table + - " WHERE " + SQL.AND((versions ::: List(Prop.build_start)).map(_.defined)) + - " GROUP BY " + versions.mkString(", ")) + make_table(versions.map(_.make_primary_key) ::: List(pull_date(afp)), + body = + "SELECT " + versions.mkString(", ") + + ", min(" + Prop.build_start + ") AS " + pull_date(afp) + + " FROM " + meta_info_table + + " WHERE " + SQL.AND((versions ::: List(Prop.build_start)).map(_.defined)) + + " GROUP BY " + versions.mkString(", "), + name = name) } @@ -794,14 +800,13 @@ b_table.query_named + SQL.join_inner + sessions_table + " ON " + log_name(b_table) + " = " + log_name(sessions_table)) - make_table("", c_columns ::: List(ml_statistics), - { + make_table(c_columns ::: List(ml_statistics), + body = SQL.select(c_columns.map(_.apply(c_table)) ::: List(ml_statistics)) + c_table.query_named + SQL.join_outer + ml_statistics_table + " ON " + SQL.and( log_name(c_table).ident + " = " + log_name(ml_statistics_table).ident, - session_name(c_table).ident + " = " + session_name(ml_statistics_table).ident) - }) + session_name(c_table).ident + " = " + session_name(ml_statistics_table).ident)) } } diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/General/sql.scala --- a/src/Pure/General/sql.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/General/sql.scala Sat Jul 08 16:48:15 2023 +0200 @@ -252,18 +252,14 @@ def transaction_lock[A]( db: Database, - more_tables: Tables = Tables.empty, create: Boolean = false, synchronized: Boolean = false, )(body: => A): A = { - def run: A = db.transaction { (tables ::: more_tables).lock(db, create = create); body } + def run: A = db.transaction { tables.lock(db, create = create); body } if (synchronized) db.synchronized { run } else run } - def vacuum(db: Database, more_tables: Tables = Tables.empty): Unit = - db.vacuum(tables = tables ::: more_tables) - - def make_table(name: String, columns: List[Column], body: String = ""): Table = { + def make_table(columns: List[Column], body: String = "", name: String = ""): Table = { val table_name = List(proper_string(table_prefix), proper_string(name)).flatten.mkString("_") require(table_name.nonEmpty, "Undefined database table name") diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/ML/ml_heap.scala --- a/src/Pure/ML/ml_heap.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/ML/ml_heap.scala Sat Jul 08 16:48:15 2023 +0200 @@ -55,7 +55,7 @@ val size = SQL.Column.long("size") val digest = SQL.Column.string("digest") - val table = make_table("", List(name, size, digest)) + val table = make_table(List(name, size, digest)) } object Slices { @@ -63,7 +63,7 @@ val slice = SQL.Column.int("slice").make_primary_key val content = SQL.Column.bytes("content") - val table = make_table("slices", List(name, slice, content)) + val table = make_table(List(name, slice, content), name = "slices") } def get_entry(db: SQL.Database, name: String): Option[SHA1.Digest] = diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/System/host.scala --- a/src/Pure/System/host.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/System/host.scala Sat Jul 08 16:48:15 2023 +0200 @@ -104,7 +104,7 @@ val hostname = SQL.Column.string("hostname").make_primary_key val numa_next = SQL.Column.int("numa_next") - val table = make_table("node_info", List(hostname, numa_next)) + val table = make_table(List(hostname, numa_next), name = "node_info") } def read_numa_next(db: SQL.Database, hostname: String): Int = diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/System/progress.scala --- a/src/Pure/System/progress.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/System/progress.scala Sat Jul 08 16:48:15 2023 +0200 @@ -51,7 +51,7 @@ val context = SQL.Column.long("context").make_primary_key val stopped = SQL.Column.bool("stopped") - val table = make_table("", List(context_uuid, context, stopped)) + val table = make_table(List(context_uuid, context, stopped)) } object Agents { @@ -66,8 +66,10 @@ val stop = SQL.Column.date("stop") val seen = SQL.Column.long("seen") - val table = make_table("agents", - List(agent_uuid, context_uuid, kind, hostname, java_pid, java_start, start, stamp, stop, seen)) + val table = + make_table( + List(agent_uuid, context_uuid, kind, hostname, java_pid, java_start, start, stamp, stop, seen), + name = "agents") } object Messages { @@ -80,7 +82,7 @@ val text = SQL.Column.string("text") val verbose = SQL.Column.bool("verbose") - val table = make_table("messages", List(context, serial, kind, text, verbose)) + val table = make_table(List(context, serial, kind, text, verbose), name = "messages") } def read_progress_context(db: SQL.Database, context_uuid: String): Option[Long] = @@ -289,7 +291,7 @@ stmt.long(10) = 0L }) } - if (context_uuid == _agent_uuid) Progress.Data.vacuum(db) + if (context_uuid == _agent_uuid) db.vacuum(Progress.Data.tables) } def exit(close: Boolean = false): Unit = synchronized { diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/Thy/document_build.scala --- a/src/Pure/Thy/document_build.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/Thy/document_build.scala Sat Jul 08 16:48:15 2023 +0200 @@ -54,58 +54,63 @@ /* SQL data model */ - object Data { - val session_name = SQL.Column.string("session_name").make_primary_key - val name = SQL.Column.string("name").make_primary_key - val sources = SQL.Column.string("sources") - val log_xz = SQL.Column.bytes("log_xz") - val pdf = SQL.Column.bytes("pdf") + object Data extends SQL.Data("isabelle_documents") { + override lazy val tables = SQL.Tables(Base.table) - val table = SQL.Table("isabelle_documents", List(session_name, name, sources, log_xz, pdf)) + object Base { + val session_name = SQL.Column.string("session_name").make_primary_key + val name = SQL.Column.string("name").make_primary_key + val sources = SQL.Column.string("sources") + val log_xz = SQL.Column.bytes("log_xz") + val pdf = SQL.Column.bytes("pdf") + + val table = make_table(List(session_name, name, sources, log_xz, pdf)) + } def where_equal(session_name: String, name: String = ""): SQL.Source = SQL.where_and( - Data.session_name.equal(session_name), - if_proper(name, Data.name.equal(name))) + Base.session_name.equal(session_name), + if_proper(name, Base.name.equal(name))) + + def clean_session(db: SQL.Database, session_name: String): Unit = + db.execute_statement(Base.table.delete(sql = Base.session_name.where_equal(session_name))) + + def read_document( + db: SQL.Database, + session_name: String, + name: String + ): Option[Document_Output] = { + db.execute_query_statementO[Document_Output]( + Base.table.select(sql = where_equal(session_name, name = name)), + { res => + val name = res.string(Base.name) + val sources = res.string(Base.sources) + val log_xz = res.bytes(Base.log_xz) + val pdf = res.bytes(Base.pdf) + Document_Output(name, SHA1.fake_shasum(sources), log_xz, pdf) + } + ) + } + + def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit = + db.execute_statement(Base.table.insert(), body = + { stmt => + stmt.string(1) = session_name + stmt.string(2) = doc.name + stmt.string(3) = doc.sources.toString + stmt.bytes(4) = doc.log_xz + stmt.bytes(5) = doc.pdf + }) } - def read_documents(db: SQL.Database, session_name: String): List[Document_Input] = - db.execute_query_statement( - Data.table.select(List(Data.name, Data.sources), sql = Data.where_equal(session_name)), - List.from[Document_Input], - { res => - val name = res.string(Data.name) - val sources = res.string(Data.sources) - Document_Input(name, SHA1.fake_shasum(sources)) - } - ) + def clean_session(db: SQL.Database, session_name: String): Unit = + Data.transaction_lock(db, create = true) { Data.clean_session(db, session_name) } - def read_document( - db: SQL.Database, - session_name: String, - name: String - ): Option[Document_Output] = { - db.execute_query_statementO[Document_Output]( - Data.table.select(sql = Data.where_equal(session_name, name)), - { res => - val name = res.string(Data.name) - val sources = res.string(Data.sources) - val log_xz = res.bytes(Data.log_xz) - val pdf = res.bytes(Data.pdf) - Document_Output(name, SHA1.fake_shasum(sources), log_xz, pdf) - } - ) - } + def read_document(db: SQL.Database, session_name: String, name: String): Option[Document_Output] = + Data.transaction_lock(db) { Data.read_document(db, session_name, name) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit = - db.execute_statement(Data.table.insert(), body = - { stmt => - stmt.string(1) = session_name - stmt.string(2) = doc.name - stmt.string(3) = doc.sources.toString - stmt.bytes(4) = doc.log_xz - stmt.bytes(5) = doc.pdf - }) + Data.transaction_lock(db) { Data.write_document(db, session_name, doc) } /* background context */ diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/Thy/export.scala Sat Jul 08 16:48:15 2023 +0200 @@ -51,6 +51,9 @@ if_proper(theory_name, Base.theory_name.equal(theory_name)), if_proper(name, Base.name.equal(name))) + def clean_session(db: SQL.Database, session_name: String): Unit = + db.execute_statement(Base.table.delete(sql = where_equal(session_name))) + def readable_entry(db: SQL.Database, entry_name: Entry_Name): Boolean = { db.execute_query_statementB( Base.table.select(List(Base.name), @@ -200,6 +203,9 @@ (entry_name: Entry_Name) => regs.exists(_.matches(entry_name.compound_name)) } + def clean_session(db: SQL.Database, session_name: String): Unit = + Data.transaction_lock(db, create = true) { Data.clean_session(db, session_name) } + def read_theory_names(db: SQL.Database, session_name: String): List[String] = Data.transaction_lock(db) { Data.read_theory_names(db, session_name) } diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/Thy/store.scala --- a/src/Pure/Thy/store.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/Thy/store.scala Sat Jul 08 16:48:15 2023 +0200 @@ -78,9 +78,7 @@ /* SQL data model */ object Data extends SQL.Data() { - override lazy val tables = - SQL.Tables(Session_Info.table, Sources.table, - Export.Data.Base.table, Document_Build.Data.table) + override lazy val tables = SQL.Tables(Session_Info.table, Sources.table) object Session_Info { val session_name = SQL.Column.string("session_name").make_primary_key @@ -127,6 +125,54 @@ if_proper(name, Sources.name.equal(name))) } + def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes = + db.execute_query_statementO[Bytes]( + Session_Info.table.select(List(column), sql = Session_Info.session_name.where_equal(name)), + res => res.bytes(column) + ).getOrElse(Bytes.empty) + + def read_properties( + db: SQL.Database, name: String, column: SQL.Column, cache: Term.Cache + ): List[Properties.T] = Properties.uncompress(read_bytes(db, name, column), cache = cache) + + def read_session_timing(db: SQL.Database, name: String, cache: Term.Cache): Properties.T = + Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache) + + def read_command_timings(db: SQL.Database, name: String): Bytes = + read_bytes(db, name, Session_Info.command_timings) + + def read_theory_timings(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] = + read_properties(db, name, Session_Info.theory_timings, cache) + + def read_ml_statistics(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] = + read_properties(db, name, Session_Info.ml_statistics, cache) + + def read_task_statistics(db: SQL.Database, name: String, cache: Term.Cache): List[Properties.T] = + read_properties(db, name, Session_Info.task_statistics, cache) + + def read_errors(db: SQL.Database, name: String, cache: Term.Cache): List[String] = + Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache) + + def read_build(db: SQL.Database, name: String): Option[Store.Build_Info] = { + if (db.tables.contains(Session_Info.table.name)) { + db.execute_query_statementO[Store.Build_Info]( + Session_Info.table.select(sql = Session_Info.session_name.where_equal(name)), + { res => + val uuid = + try { Option(res.string(Session_Info.uuid)).getOrElse("") } + catch { case _: SQLException => "" } + Store.Build_Info( + SHA1.fake_shasum(res.string(Session_Info.sources)), + SHA1.fake_shasum(res.string(Session_Info.input_heaps)), + SHA1.fake_shasum(res.string(Session_Info.output_heap)), + res.int(Session_Info.return_code), + uuid) + } + ) + } + else None + } + def write_session_info( db: SQL.Database, cache: Compress.Cache, @@ -134,7 +180,7 @@ build_log: Build_Log.Session_Info, build: Build_Info ): Unit = { - db.execute_statement(Store.Data.Session_Info.table.insert(), body = + db.execute_statement(Session_Info.table.insert(), body = { stmt => stmt.string(1) = session_name stmt.bytes(2) = Properties.encode(build_log.session_timing) @@ -165,9 +211,9 @@ def read_sources( db: SQL.Database, - cache: Compress.Cache, session_name: String, - name: String = "" + name: String, + cache: Compress.Cache ): List[Source_File] = { db.execute_query_statement( Sources.table.select( @@ -361,19 +407,6 @@ } - /* SQL database content */ - - def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes = - db.execute_query_statementO[Bytes]( - Store.Data.Session_Info.table.select(List(column), - sql = Store.Data.Session_Info.session_name.where_equal(name)), - res => res.bytes(column) - ).getOrElse(Bytes.empty) - - def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] = - Properties.uncompress(read_bytes(db, name, column), cache = cache) - - /* session info */ def session_info_exists(db: SQL.Database): Boolean = { @@ -386,7 +419,10 @@ Store.Data.Session_Info.table.select(List(Store.Data.Session_Info.session_name), sql = Store.Data.Session_Info.session_name.where_equal(name))) - def clean_session_info(db: SQL.Database, name: String): Boolean = + def clean_session_info(db: SQL.Database, name: String): Boolean = { + Export.clean_session(db, name) + Document_Build.clean_session(db, name) + Store.Data.transaction_lock(db, create = true, synchronized = true) { val already_defined = session_info_defined(db, name) @@ -398,14 +434,9 @@ db.execute_statement(Store.Data.Sources.table.delete( sql = Store.Data.Sources.where_equal(name))) - db.execute_statement( - Export.Data.Base.table.delete(sql = Export.Data.Base.session_name.where_equal(name))) - - db.execute_statement( - Document_Build.Data.table.delete(sql = Document_Build.Data.session_name.where_equal(name))) - already_defined } + } def write_session_info( db: SQL.Database, @@ -420,48 +451,30 @@ } } - def read_session_timing(db: SQL.Database, name: String): Properties.T = - Properties.decode(read_bytes(db, name, Store.Data.Session_Info.session_timing), cache = cache) + def read_session_timing(db: SQL.Database, session: String): Properties.T = + Store.Data.transaction_lock(db) { Store.Data.read_session_timing(db, session, cache) } - def read_command_timings(db: SQL.Database, name: String): Bytes = - read_bytes(db, name, Store.Data.Session_Info.command_timings) - - def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] = - read_properties(db, name, Store.Data.Session_Info.theory_timings) + def read_command_timings(db: SQL.Database, session: String): Bytes = + Store.Data.transaction_lock(db) { Store.Data.read_command_timings(db, session) } - def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] = - read_properties(db, name, Store.Data.Session_Info.ml_statistics) - - def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] = - read_properties(db, name, Store.Data.Session_Info.task_statistics) + def read_theory_timings(db: SQL.Database, session: String): List[Properties.T] = + Store.Data.transaction_lock(db) { Store.Data.read_theory_timings(db, session, cache) } - def read_theories(db: SQL.Database, name: String): List[String] = - read_theory_timings(db, name).flatMap(Markup.Name.unapply) - - def read_errors(db: SQL.Database, name: String): List[String] = - Build_Log.uncompress_errors(read_bytes(db, name, Store.Data.Session_Info.errors), cache = cache) + def read_ml_statistics(db: SQL.Database, session: String): List[Properties.T] = + Store.Data.transaction_lock(db) { Store.Data.read_ml_statistics(db, session, cache) } - def read_build(db: SQL.Database, name: String): Option[Store.Build_Info] = { - if (db.tables.contains(Store.Data.Session_Info.table.name)) { - db.execute_query_statementO[Store.Build_Info]( - Store.Data.Session_Info.table.select( - sql = Store.Data.Session_Info.session_name.where_equal(name)), - { res => - val uuid = - try { Option(res.string(Store.Data.Session_Info.uuid)).getOrElse("") } - catch { case _: SQLException => "" } - Store.Build_Info( - SHA1.fake_shasum(res.string(Store.Data.Session_Info.sources)), - SHA1.fake_shasum(res.string(Store.Data.Session_Info.input_heaps)), - SHA1.fake_shasum(res.string(Store.Data.Session_Info.output_heap)), - res.int(Store.Data.Session_Info.return_code), - uuid) - } - ) - } - else None - } + def read_task_statistics(db: SQL.Database, session: String): List[Properties.T] = + Store.Data.transaction_lock(db) { Store.Data.read_task_statistics(db, session, cache) } + + def read_theories(db: SQL.Database, session: String): List[String] = + read_theory_timings(db, session).flatMap(Markup.Name.unapply) + + def read_errors(db: SQL.Database, session: String): List[String] = + Store.Data.transaction_lock(db) { Store.Data.read_errors(db, session, cache) } + + def read_build(db: SQL.Database, session: String): Option[Store.Build_Info] = + Store.Data.transaction_lock(db) { Store.Data.read_build(db, session) } def read_sources(db: SQL.Database, session: String, name: String = ""): List[Store.Source_File] = - Store.Data.read_sources(db, cache.compress, session, name = name) + Store.Data.transaction_lock(db) { Store.Data.read_sources(db, session, name, cache.compress) } } diff -r 45381e6bd3ad -r 45a9f5066e07 src/Pure/Tools/build_process.scala --- a/src/Pure/Tools/build_process.scala Fri Jul 07 14:05:05 2023 +0100 +++ b/src/Pure/Tools/build_process.scala Sat Jul 08 16:48:15 2023 +0200 @@ -288,7 +288,7 @@ old_data: Map[String, A] ): Map[String, A] = { val dom = data_domain -- old_data.keysIterator - val data = old_data -- old_data.keysIterator.filterNot(dom) + val data = old_data -- old_data.keysIterator.filterNot(data_domain) if (dom.isEmpty) data else data_iterator(dom).foldLeft(data) { case (map, a) => map + (a.name -> a) } } @@ -342,7 +342,7 @@ val start = SQL.Column.date("start") val stop = SQL.Column.date("stop") - val table = make_table("", List(build_uuid, ml_platform, options, start, stop)) + val table = make_table(List(build_uuid, ml_platform, options, start, stop)) } def read_builds(db: SQL.Database, build_uuid: String = ""): List[Build] = { @@ -413,9 +413,11 @@ val old_command_timings = SQL.Column.bytes("old_command_timings") val build_uuid = Generic.build_uuid - val table = make_table("sessions", - List(name, deps, ancestors, options, sources, timeout, - old_time, old_command_timings, build_uuid)) + val table = + make_table( + List(name, deps, ancestors, options, sources, timeout, + old_time, old_command_timings, build_uuid), + name = "sessions") } def read_sessions_domain(db: SQL.Database, build_uuid: String = ""): Set[String] = @@ -485,7 +487,8 @@ val stop = SQL.Column.date("stop") val serial = SQL.Column.long("serial") - val table = make_table("workers", List(worker_uuid, build_uuid, start, stamp, stop, serial)) + val table = + make_table(List(worker_uuid, build_uuid, start, stamp, stop, serial), name = "workers") } def read_serial(db: SQL.Database): Long = @@ -575,7 +578,7 @@ val info = SQL.Column.string("info") val build_uuid = Generic.build_uuid - val table = make_table("pending", List(name, deps, info, build_uuid)) + val table = make_table(List(name, deps, info, build_uuid), name = "pending") } def read_pending(db: SQL.Database): List[Task] = @@ -622,7 +625,8 @@ val hostname = SQL.Column.string("hostname") val numa_node = SQL.Column.int("numa_node") - val table = make_table("running", List(name, worker_uuid, build_uuid, hostname, numa_node)) + val table = + make_table(List(name, worker_uuid, build_uuid, hostname, numa_node), name = "running") } def read_running(db: SQL.Database): State.Running = @@ -683,9 +687,10 @@ val current = SQL.Column.bool("current") val table = - make_table("results", + make_table( List(name, worker_uuid, build_uuid, hostname, numa_node, - rc, out, err, timing_elapsed, timing_cpu, timing_gc, output_shasum, current)) + rc, out, err, timing_elapsed, timing_cpu, timing_gc, output_shasum, current), + name = "results") } def read_results_domain(db: SQL.Database): Set[String] = @@ -728,8 +733,12 @@ ) def update_results(db: SQL.Database, results: State.Results): Boolean = { - val old_results = read_results_domain(db) - val insert = results.valuesIterator.filterNot(res => old_results.contains(res.name)).toList + val insert = + if (results.isEmpty) Nil + else { + val old_results = read_results_domain(db) + results.valuesIterator.filterNot(res => old_results.contains(res.name)).toList + } for (result <- insert) { val process_result = result.process_result @@ -845,12 +854,12 @@ private val _build_database: Option[SQL.Database] = try { for (db <- store.maybe_open_build_database()) yield { - val more_tables = if (db.is_postgresql) Store.Data.tables else SQL.Tables.empty + val store_tables = if (db.is_postgresql) Store.Data.tables else SQL.Tables.empty Build_Process.Data.transaction_lock(db, create = true) { Build_Process.Data.clean_build(db) - more_tables.lock(db, create = true) + store_tables.lock(db, create = true) } - Build_Process.Data.vacuum(db, more_tables = more_tables) + db.vacuum(Build_Process.Data.tables ::: store_tables) db } }