# HG changeset patch # User wenzelm # Date 1687350471 -7200 # Node ID 2df0f3604a6721c57988c90a56e1fe741b80bc87 # Parent 721c118f7001e0efef35e93806498b718351c40a clarified signature: more explicit class SQL.Data; diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/Admin/build_log.scala --- a/src/Pure/Admin/build_log.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/Admin/build_log.scala Wed Jun 21 14:27:51 2023 +0200 @@ -630,11 +630,7 @@ /* SQL data model */ - object Data { - def build_log_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table = - SQL.Table("isabelle_build_log" + if_proper(name, "_" + name), columns, body) - - + object Data extends SQL.Data("isabelle_build_log") { /* main content */ val log_name = SQL.Column.string("log_name").make_primary_key @@ -662,21 +658,21 @@ val known = SQL.Column.bool("known") val meta_info_table = - build_log_table("meta_info", log_name :: Prop.all_props ::: Settings.all_settings) + make_table("meta_info", log_name :: Prop.all_props ::: Settings.all_settings) val sessions_table = - build_log_table("sessions", + make_table("sessions", List(log_name, session_name, chapter, groups, threads, timing_elapsed, timing_cpu, timing_gc, timing_factor, ml_timing_elapsed, ml_timing_cpu, ml_timing_gc, ml_timing_factor, heap_size, status, errors, sources)) val theories_table = - build_log_table("theories", + make_table("theories", List(log_name, session_name, theory_name, theory_timing_elapsed, theory_timing_cpu, theory_timing_gc)) val ml_statistics_table = - build_log_table("ml_statistics", List(log_name, session_name, ml_statistics)) + make_table("ml_statistics", List(log_name, session_name, ml_statistics)) /* AFP versions */ @@ -684,7 +680,7 @@ val isabelle_afp_versions_table: SQL.Table = { val version1 = Prop.isabelle_version val version2 = Prop.afp_version - build_log_table("isabelle_afp_versions", List(version1.make_primary_key, version2), + make_table("isabelle_afp_versions", List(version1.make_primary_key, version2), SQL.select(List(version1, version2), distinct = true) + meta_info_table + SQL.where_and(version1.defined, version2.defined)) } @@ -701,7 +697,7 @@ if (afp) ("afp_pull_date", List(Prop.isabelle_version, Prop.afp_version)) else ("pull_date", List(Prop.isabelle_version)) - build_log_table(name, versions.map(_.make_primary_key) ::: List(pull_date(afp)), + make_table(name, versions.map(_.make_primary_key) ::: List(pull_date(afp)), "SELECT " + versions.mkString(", ") + ", min(" + Prop.build_start + ") AS " + pull_date(afp) + " FROM " + meta_info_table + @@ -798,7 +794,7 @@ b_table.query_named + SQL.join_inner + sessions_table + " ON " + log_name(b_table) + " = " + log_name(sessions_table)) - build_log_table("", c_columns ::: List(ml_statistics), + make_table("", c_columns ::: List(ml_statistics), { SQL.select(c_columns.map(_.apply(c_table)) ::: List(ml_statistics)) + c_table.query_named + SQL.join_outer + ml_statistics_table + " ON " + diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/General/sql.scala --- a/src/Pure/General/sql.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/General/sql.scala Wed Jun 21 14:27:51 2023 +0200 @@ -247,6 +247,26 @@ } } + abstract class Data(table_prefix: String = "") { + def tables: Tables = Tables.empty + + def transaction_lock[A]( + db: Database, + more_tables: Tables = Tables.empty, + create: Boolean = false + )(body: => A): A = db.transaction { (tables ::: more_tables).lock(db, create = create); body } + + def vacuum(db: Database, more_tables: Tables = Tables.empty): Unit = + db.vacuum(tables = tables ::: more_tables) + + def make_table(name: String, columns: List[Column], body: String = ""): Table = { + val table_name = + List(proper_string(table_prefix), proper_string(name)).flatten.mkString("_") + require(table_name.nonEmpty, "Undefined database table name") + Table(table_name, columns, body = body) + } + } + /** SQL database operations **/ diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/ML/ml_heap.scala --- a/src/Pure/ML/ml_heap.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/ML/ml_heap.scala Wed Jun 21 14:27:51 2023 +0200 @@ -43,9 +43,8 @@ /* SQL data model */ - object Data { - def make_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table = - SQL.Table("isabelle_heaps" + if_proper(name, "_" + name), columns, body = body) + object Data extends SQL.Data("isabelle_heaps") { + override lazy val tables = SQL.Tables(Base.table, Slices.table) object Generic { val name = SQL.Column.string("name").make_primary_key @@ -107,12 +106,10 @@ stmt.long(1) = size stmt.string(2) = digest.toString }) - - val all_tables: SQL.Tables = SQL.Tables(Base.table, Slices.table) } def clean_entry(db: SQL.Database, name: String): Unit = - db.transaction_lock(Data.all_tables, create = true) { Data.clean_entry(db, name) } + Data.transaction_lock(db, create = true) { Data.clean_entry(db, name) } def write_digest( database: Option[SQL.Database], @@ -130,7 +127,7 @@ val step = (size.toDouble / slices.toDouble).ceil.toLong try { - db.transaction_lock(Data.all_tables, create = true) { Data.prepare_entry(db, name) } + Data.transaction_lock(db, create = true) { Data.prepare_entry(db, name) } for (i <- 0 until slices) { val j = i + 1 @@ -139,13 +136,13 @@ val content = Bytes.read_file(heap.file, offset = offset, limit = limit) .compress(cache = cache) - db.transaction_lock(Data.all_tables) { Data.write_entry(db, name, i, content) } + Data.transaction_lock(db) { Data.write_entry(db, name, i, content) } } - db.transaction_lock(Data.all_tables) { Data.finish_entry(db, name, size, digest) } + Data.transaction_lock(db) { Data.finish_entry(db, name, size, digest) } } catch { case exn: Throwable => - db.transaction_lock(Data.all_tables, create = true) { Data.clean_entry(db, name) } + Data.transaction_lock(db, create = true) { Data.clean_entry(db, name) } throw exn } case None => diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/System/host.scala --- a/src/Pure/System/host.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/System/host.scala Wed Jun 21 14:27:51 2023 +0200 @@ -95,11 +95,10 @@ /* SQL data model */ - object Data { + object Data extends SQL.Data("isabelle_host") { val database: Path = Path.explode("$ISABELLE_HOME_USER/host.db") - def make_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table = - SQL.Table("isabelle_host" + if_proper(name, "_" + name), columns, body = body) + override lazy val tables = SQL.Tables(Node_Info.table) object Node_Info { val hostname = SQL.Column.string("hostname").make_primary_key @@ -108,8 +107,6 @@ val table = make_table("node_info", List(hostname, numa_next)) } - val all_tables: SQL.Tables = SQL.Tables(Node_Info.table) - def read_numa_next(db: SQL.Database, hostname: String): Int = db.execute_query_statementO[Int]( Node_Info.table.select(List(Node_Info.numa_next), @@ -137,7 +134,7 @@ else { val available = available_nodes.zipWithIndex val used = used_nodes - db.transaction_lock(Data.all_tables, create = true) { + Data.transaction_lock(db, create = true) { val numa_next = Data.read_numa_next(db, hostname) val numa_index = available.collectFirst({ case (n, i) if n == numa_next => i }).getOrElse(0) val candidates = available.drop(numa_index) ::: available.take(numa_index) diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/System/progress.scala --- a/src/Pure/System/progress.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/System/progress.scala Wed Jun 21 14:27:51 2023 +0200 @@ -41,11 +41,10 @@ /* SQL data model */ - object Data { + object Data extends SQL.Data("isabelle_progress") { val database: Path = Path.explode("$ISABELLE_HOME_USER/progress.db") - def make_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table = - SQL.Table("isabelle_progress" + if_proper(name, "_" + name), columns, body = body) + override lazy val tables = SQL.Tables(Base.table, Agents.table, Messages.table) object Base { val context_uuid = SQL.Column.string("context_uuid").make_primary_key @@ -83,8 +82,6 @@ val table = make_table("messages", List(context, serial, kind, text, verbose)) } - val all_tables: SQL.Tables = SQL.Tables(Base.table, Agents.table, Messages.table) - def read_progress_context(db: SQL.Database, context_uuid: String): Option[Long] = db.execute_query_statementO( Base.table.select(List(Base.context), @@ -252,11 +249,8 @@ def agent_uuid: String = synchronized { _agent_uuid } - private def transaction_lock[A](body: => A, create: Boolean = false): A = - db.transaction_lock(Progress.Data.all_tables, create = create)(body) - private def init(): Unit = synchronized { - transaction_lock(create = true, body = { + Progress.Data.transaction_lock(db, create = true) { Progress.Data.read_progress_context(db, context_uuid) match { case Some(context) => _context = context @@ -286,13 +280,13 @@ stmt.date(8) = None stmt.long(9) = 0L }) - }) - if (context_uuid == _agent_uuid) db.vacuum(Progress.Data.all_tables) + } + if (context_uuid == _agent_uuid) Progress.Data.vacuum(db) } def exit(): Unit = synchronized { if (_context > 0) { - transaction_lock { + Progress.Data.transaction_lock(db) { Progress.Data.update_agent(db, _agent_uuid, _seen, stop = true) } _context = 0 @@ -301,7 +295,7 @@ private def sync_database[A](body: => A): A = synchronized { require(_context > 0) - transaction_lock { + Progress.Data.transaction_lock(db) { val stopped_db = Progress.Data.read_progress_stopped(db, _context) val stopped = base_progress.stopped diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/Thy/store.scala --- a/src/Pure/Thy/store.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/Thy/store.scala Wed Jun 21 14:27:51 2023 +0200 @@ -77,7 +77,10 @@ /* SQL data model */ - object Data { + object Data extends SQL.Data() { + override lazy val tables = + SQL.Tables(Session_Info.table, Sources.table, Export.Data.table, Document_Build.Data.table) + object Session_Info { val session_name = SQL.Column.string("session_name").make_primary_key @@ -178,9 +181,6 @@ } ) } - - val all_tables: SQL.Tables = - SQL.Tables(Session_Info.table, Sources.table, Export.Data.table, Document_Build.Data.table) } } @@ -367,7 +367,7 @@ def session_info_exists(db: SQL.Database): Boolean = { val tables = db.tables - Store.Data.all_tables.forall(table => tables.contains(table.name)) + Store.Data.tables.forall(table => tables.contains(table.name)) } def session_info_defined(db: SQL.Database, name: String): Boolean = @@ -376,7 +376,7 @@ sql = Store.Data.Session_Info.session_name.where_equal(name))) def init_session_info(db: SQL.Database, name: String): Boolean = - db.transaction_lock(Store.Data.all_tables, create = true) { + Store.Data.transaction_lock(db, create = true) { val already_defined = session_info_defined(db, name) db.execute_statement( @@ -403,7 +403,7 @@ build_log: Build_Log.Session_Info, build: Store.Build_Info ): Unit = { - db.transaction_lock(Store.Data.all_tables) { + Store.Data.transaction_lock(db) { Store.Data.write_sources(db, session_name, sources) Store.Data.write_session_info(db, cache.compress, session_name, build_log, build) } diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/Tools/build_process.scala --- a/src/Pure/Tools/build_process.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/Tools/build_process.scala Wed Jun 21 14:27:51 2023 +0200 @@ -124,11 +124,11 @@ def prepare_database(): Unit = { using_option(store.maybe_open_build_database(Data.database)) { db => val shared_db = db.is_postgresql - db.transaction_lock(Data.all_tables, create = true) { + Data.transaction_lock(db, create = true) { Data.clean_build(db) - if (shared_db) Store.Data.all_tables.lock(db, create = true) + if (shared_db) Store.Data.tables.lock(db, create = true) } - db.vacuum(Data.all_tables ::: (if (shared_db) Store.Data.all_tables else SQL.Tables.empty)) + Data.vacuum(db, more_tables = if (shared_db) Store.Data.tables else SQL.Tables.empty) } } @@ -267,12 +267,9 @@ /** SQL data model **/ - object Data { + object Data extends SQL.Data("isabelle_build") { val database: Path = Path.explode("$ISABELLE_HOME_USER/build.db") - def make_table(name: String, columns: List[SQL.Column], body: String = ""): SQL.Table = - SQL.Table("isabelle_build" + if_proper(name, "_" + name), columns, body = body) - def pull_data[A <: Library.Named]( data_domain: Set[String], data_iterator: Set[String] => Iterator[A], @@ -727,7 +724,7 @@ /* collective operations */ - val all_tables: SQL.Tables = + override val tables = SQL.Tables( Base.table, Workers.table, @@ -737,7 +734,7 @@ Results.table) val build_uuid_tables = - all_tables.filter(table => + tables.filter(table => table.columns.exists(column => column.name == Generic.build_uuid.name)) def pull_database( @@ -845,7 +842,7 @@ _database match { case None => body case Some(db) => - db.transaction_lock(Build_Process.Data.all_tables) { + Build_Process.Data.transaction_lock(db) { progress.asInstanceOf[Database_Progress].sync() _state = Build_Process.Data.pull_database(db, worker_uuid, hostname, _state) val res = body diff -r 721c118f7001 -r 2df0f3604a67 src/Pure/Tools/server.scala --- a/src/Pure/Tools/server.scala Wed Jun 21 11:42:11 2023 +0200 +++ b/src/Pure/Tools/server.scala Wed Jun 21 14:27:51 2023 +0200 @@ -363,7 +363,7 @@ val default_name = "isabelle" - object Data { + object Data extends SQL.Data() { val database = Path.explode("$ISABELLE_HOME_USER/servers.db") val name = SQL.Column.string("name").make_primary_key @@ -371,7 +371,7 @@ val password = SQL.Column.string("password") val table = SQL.Table("isabelle_servers", List(name, port, password)) - val tables = SQL.Tables(table) + override val tables = SQL.Tables(table) } def list(db: SQLite.Database): List[Info] = @@ -398,11 +398,11 @@ log: Logger = No_Logger ): (Info, Option[Server]) = { using(SQLite.open_database(Data.database, restrict = true)) { db => - db.transaction_lock(Data.tables, create = true) { + Data.transaction_lock(db, create = true) { list(db).filterNot(_.active).foreach(server_info => db.execute_statement(Data.table.delete(sql = Data.name.where_equal(server_info.name)))) } - db.transaction_lock(Data.tables) { + Data.transaction_lock(db) { find(db, name) match { case Some(server_info) => (server_info, None) case None => @@ -428,7 +428,7 @@ def exit(name: String = default_name): Boolean = { using(SQLite.open_database(Data.database)) { db => - db.transaction_lock(Data.tables) { + Data.transaction_lock(db) { find(db, name) match { case Some(server_info) => using(server_info.connection())(_.write_line_message("shutdown"))