# HG changeset patch # User wenzelm # Date 1689702672 -7200 # Node ID 7853d9072d1bb60f07e897ccf2ec2ed663058119 # Parent c39819e3adc5c2db7201f22f04053752365f2570 renamed object Data to private_data, to emphasize its intended scope (but it is publicly accessible in the database); diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/ML/ml_heap.scala --- a/src/Pure/ML/ml_heap.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/ML/ml_heap.scala Tue Jul 18 19:51:12 2023 +0200 @@ -43,7 +43,7 @@ /* SQL data model */ - object Data extends SQL.Data("isabelle_heaps") { + object private_data extends SQL.Data("isabelle_heaps") { override lazy val tables = SQL.Tables(Base.table, Slices.table) object Generic { @@ -123,13 +123,13 @@ } def clean_entry(db: SQL.Database, session_name: String): Unit = - Data.transaction_lock(db, create = true, label = "ML_Heap.clean_entry") { - Data.clean_entry(db, session_name) + private_data.transaction_lock(db, create = true, label = "ML_Heap.clean_entry") { + private_data.clean_entry(db, session_name) } def get_entry(db: SQL.Database, session_name: String): Option[SHA1.Digest] = - Data.transaction_lock(db, create = true, label = "ML_Heap.get_entry") { - Data.get_entry(db, session_name) + private_data.transaction_lock(db, create = true, label = "ML_Heap.get_entry") { + private_data.get_entry(db, session_name) } def store( @@ -149,8 +149,8 @@ val step = (size.toDouble / slices.toDouble).ceil.toLong try { - Data.transaction_lock(db, create = true, label = "ML_Heap.store1") { - Data.prepare_entry(db, session_name) + private_data.transaction_lock(db, create = true, label = "ML_Heap.store1") { + private_data.prepare_entry(db, session_name) } for (i <- 0 until slices) { @@ -160,18 +160,18 @@ val content = Bytes.read_file(heap.file, offset = offset, limit = limit) .compress(cache = cache) - Data.transaction_lock(db, label = "ML_Heap.store2") { - Data.write_entry(db, session_name, i, content) + private_data.transaction_lock(db, label = "ML_Heap.store2") { + private_data.write_entry(db, session_name, i, content) } } - Data.transaction_lock(db, label = "ML_Heap.store3") { - Data.finish_entry(db, session_name, size, digest) + private_data.transaction_lock(db, label = "ML_Heap.store3") { + private_data.finish_entry(db, session_name, size, digest) } } catch { case exn: Throwable => - Data.transaction_lock(db, create = true, label = "ML_Heap.store4") { - Data.clean_entry(db, session_name) + private_data.transaction_lock(db, create = true, label = "ML_Heap.store4") { + private_data.clean_entry(db, session_name) } throw exn } @@ -188,14 +188,14 @@ database match { case None => case Some(db) => - Data.transaction_lock(db, create = true, label = "ML_Heap.restore") { - val db_digest = Data.get_entry(db, session_name) + private_data.transaction_lock(db, create = true, label = "ML_Heap.restore") { + val db_digest = private_data.get_entry(db, session_name) val file_digest = read_file_digest(heap) if (db_digest.isDefined && db_digest != file_digest) { Isabelle_System.make_directory(heap.expand.dir) Bytes.write(heap, Bytes.empty) - for (slice <- Data.read_entry(db, session_name)) { + for (slice <- private_data.read_entry(db, session_name)) { Bytes.append(heap, slice.uncompress(cache = cache)) } val digest = write_file_digest(heap) diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/System/host.scala --- a/src/Pure/System/host.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/System/host.scala Tue Jul 18 19:51:12 2023 +0200 @@ -95,7 +95,7 @@ /* SQL data model */ - object Data extends SQL.Data("isabelle_host") { + object private_data extends SQL.Data("isabelle_host") { val database: Path = Path.explode("$ISABELLE_HOME_USER/host.db") override lazy val tables = SQL.Tables(Node_Info.table) @@ -134,8 +134,8 @@ else { val available = available_nodes.zipWithIndex val used = used_nodes - Data.transaction_lock(db, create = true, label = "Host.next_numa_node") { - val numa_next = Data.read_numa_next(db, hostname) + private_data.transaction_lock(db, create = true, label = "Host.next_numa_node") { + val numa_next = private_data.read_numa_next(db, hostname) val numa_index = available.collectFirst({ case (n, i) if n == numa_next => i }).getOrElse(0) val candidates = available.drop(numa_index) ::: available.take(numa_index) val (n, i) = @@ -143,7 +143,7 @@ candidates.find({ case (n, _) => !used(n) }) getOrElse candidates.head val numa_next1 = available_nodes((i + 1) % available_nodes.length) - if (numa_next != numa_next1) Data.write_numa_next(db, hostname, numa_next1) + if (numa_next != numa_next1) private_data.write_numa_next(db, hostname, numa_next1) Some(n) } diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/System/progress.scala --- a/src/Pure/System/progress.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/System/progress.scala Tue Jul 18 19:51:12 2023 +0200 @@ -41,7 +41,7 @@ /* SQL data model */ - object Data extends SQL.Data("isabelle_progress") { + object private_data extends SQL.Data("isabelle_progress") { val database: Path = Path.explode("$ISABELLE_HOME_USER/progress.db") override lazy val tables = SQL.Tables(Base.table, Agents.table, Messages.table) @@ -259,21 +259,21 @@ def agent_uuid: String = synchronized { _agent_uuid } private def init(): Unit = synchronized { - Progress.Data.transaction_lock(db, create = true, label = "Database_Progress.init") { - Progress.Data.read_progress_context(db, context_uuid) match { + Progress.private_data.transaction_lock(db, create = true, label = "Database_Progress.init") { + Progress.private_data.read_progress_context(db, context_uuid) match { case Some(context) => _context = context _agent_uuid = UUID.random().toString case None => - _context = Progress.Data.next_progress_context(db) + _context = Progress.private_data.next_progress_context(db) _agent_uuid = context_uuid - db.execute_statement(Progress.Data.Base.table.insert(), { stmt => + db.execute_statement(Progress.private_data.Base.table.insert(), { stmt => stmt.string(1) = context_uuid stmt.long(2) = _context stmt.bool(3) = false }) } - db.execute_statement(Progress.Data.Agents.table.insert(), { stmt => + db.execute_statement(Progress.private_data.Agents.table.insert(), { stmt => val java = ProcessHandle.current() val java_pid = java.pid val java_start = Date.instant(java.info.startInstant.get) @@ -291,13 +291,13 @@ stmt.long(10) = 0L }) } - if (context_uuid == _agent_uuid) db.vacuum(Progress.Data.tables.list) + if (context_uuid == _agent_uuid) db.vacuum(Progress.private_data.tables.list) } def exit(close: Boolean = false): Unit = synchronized { if (_context > 0) { - Progress.Data.transaction_lock(db, label = "Database_Progress.exit") { - Progress.Data.update_agent(db, _agent_uuid, _seen, stop_now = true) + Progress.private_data.transaction_lock(db, label = "Database_Progress.exit") { + Progress.private_data.update_agent(db, _agent_uuid, _seen, stop_now = true) } _context = 0 } @@ -308,19 +308,19 @@ if (_context < 0) throw new IllegalStateException("Database_Progress before init") if (_context == 0) throw new IllegalStateException("Database_Progress after exit") - Progress.Data.transaction_lock(db, label = "Database_Progress.sync") { - val stopped_db = Progress.Data.read_progress_stopped(db, _context) + Progress.private_data.transaction_lock(db, label = "Database_Progress.sync") { + val stopped_db = Progress.private_data.read_progress_stopped(db, _context) val stopped = base_progress.stopped if (stopped_db && !stopped) base_progress.stop() - if (stopped && !stopped_db) Progress.Data.write_progress_stopped(db, _context, true) + if (stopped && !stopped_db) Progress.private_data.write_progress_stopped(db, _context, true) - val messages = Progress.Data.read_messages(db, _context, seen = _seen) + val messages = Progress.private_data.read_messages(db, _context, seen = _seen) for ((seen, message) <- messages) { if (base_progress.do_output(message)) base_progress.output(message) _seen = _seen max seen } - if (messages.nonEmpty) Progress.Data.update_agent(db, _agent_uuid, _seen) + if (messages.nonEmpty) Progress.private_data.update_agent(db, _agent_uuid, _seen) body } @@ -330,13 +330,13 @@ private def output_database(message: Progress.Message, body: => Unit): Unit = sync_database { - val serial = Progress.Data.next_messages_serial(db, _context) - Progress.Data.write_messages(db, _context, serial, message) + val serial = Progress.private_data.next_messages_serial(db, _context) + Progress.private_data.write_messages(db, _context, serial, message) body _seen = _seen max serial - Progress.Data.update_agent(db, _agent_uuid, _seen) + Progress.private_data.update_agent(db, _agent_uuid, _seen) } override def output(message: Progress.Message): Unit = diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/Thy/document_build.scala --- a/src/Pure/Thy/document_build.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/Thy/document_build.scala Tue Jul 18 19:51:12 2023 +0200 @@ -54,7 +54,7 @@ /* SQL data model */ - object Data extends SQL.Data("isabelle_documents") { + object private_data extends SQL.Data("isabelle_documents") { override lazy val tables = SQL.Tables(Base.table) object Base { @@ -104,18 +104,18 @@ } def clean_session(db: SQL.Database, session_name: String): Unit = - Data.transaction_lock(db, create = true, label = "Document_Build.clean_session") { - Data.clean_session(db, session_name) + private_data.transaction_lock(db, create = true, label = "Document_Build.clean_session") { + private_data.clean_session(db, session_name) } def read_document(db: SQL.Database, session_name: String, name: String): Option[Document_Output] = - Data.transaction_lock(db, label = "Document_Build.read_document") { - Data.read_document(db, session_name, name) + private_data.transaction_lock(db, label = "Document_Build.read_document") { + private_data.read_document(db, session_name, name) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit = - Data.transaction_lock(db, label = "Document_Build.write_document") { - Data.write_document(db, session_name, doc) + private_data.transaction_lock(db, label = "Document_Build.write_document") { + private_data.write_document(db, session_name, doc) } diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/Thy/export.scala --- a/src/Pure/Thy/export.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/Thy/export.scala Tue Jul 18 19:51:12 2023 +0200 @@ -29,7 +29,7 @@ /* SQL data model */ - object Data extends SQL.Data() { + object private_data extends SQL.Data() { override lazy val tables = SQL.Tables(Base.table) object Base { @@ -63,7 +63,7 @@ def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] = db.execute_query_statementO[Entry]( Base.table.select(List(Base.executable, Base.compressed, Base.body), - sql = Data.where_equal(entry_name.session, entry_name.theory, entry_name.name)), + sql = private_data.where_equal(entry_name.session, entry_name.theory, entry_name.name)), { res => val executable = res.bool(Base.executable) val compressed = res.bool(Base.compressed) @@ -88,13 +88,13 @@ def read_theory_names(db: SQL.Database, session_name: String): List[String] = db.execute_query_statement( Base.table.select(List(Base.theory_name), distinct = true, - sql = Data.where_equal(session_name) + SQL.order_by(List(Base.theory_name))), + sql = private_data.where_equal(session_name) + SQL.order_by(List(Base.theory_name))), List.from[String], res => res.string(Base.theory_name)) def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = db.execute_query_statement( Base.table.select(List(Base.theory_name, Base.name), - sql = Data.where_equal(session_name)) + SQL.order_by(List(Base.theory_name, Base.name)), + sql = private_data.where_equal(session_name)) + SQL.order_by(List(Base.theory_name, Base.name)), List.from[Entry_Name], { res => Entry_Name( @@ -204,23 +204,23 @@ } def clean_session(db: SQL.Database, session_name: String): Unit = - Data.transaction_lock(db, create = true, label = "Export.clean_session") { - Data.clean_session(db, session_name) + private_data.transaction_lock(db, create = true, label = "Export.clean_session") { + private_data.clean_session(db, session_name) } def read_theory_names(db: SQL.Database, session_name: String): List[String] = - Data.transaction_lock(db, label = "Export.read_theory_names") { - Data.read_theory_names(db, session_name) + private_data.transaction_lock(db, label = "Export.read_theory_names") { + private_data.read_theory_names(db, session_name) } def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = - Data.transaction_lock(db, label = "Export.read_entry_names") { - Data.read_entry_names(db, session_name) + private_data.transaction_lock(db, label = "Export.read_entry_names") { + private_data.read_entry_names(db, session_name) } def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] = - Data.transaction_lock(db, label = "Export.read_entry") { - Data.read_entry(db, entry_name, cache) + private_data.transaction_lock(db, label = "Export.read_entry") { + private_data.read_entry(db, entry_name, cache) } @@ -238,21 +238,21 @@ consume = { (args: List[(Entry, Boolean)]) => val results = - Data.transaction_lock(db, label = "Export.consumer(" + args.length + ")") { + private_data.transaction_lock(db, label = "Export.consumer(" + args.length + ")") { for ((entry, strict) <- args) yield { if (progress.stopped) { entry.cancel() Exn.Res(()) } - else if (Data.readable_entry(db, entry.entry_name)) { + else if (private_data.readable_entry(db, entry.entry_name)) { if (strict) { val msg = message("Duplicate export", entry.theory_name, entry.name) errors.change(msg :: _) } Exn.Res(()) } - else Exn.capture { Data.write_entry(db, entry) } + else Exn.capture { private_data.write_entry(db, entry) } } } (results, true) diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/Thy/store.scala --- a/src/Pure/Thy/store.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/Thy/store.scala Tue Jul 18 19:51:12 2023 +0200 @@ -77,7 +77,7 @@ /* SQL data model */ - object Data extends SQL.Data() { + object private_data extends SQL.Data() { override lazy val tables = SQL.Tables(Session_Info.table, Sources.table) object Session_Info { @@ -417,26 +417,26 @@ /* session info */ def session_info_exists(db: SQL.Database): Boolean = - Store.Data.tables.forall(db.exists_table) + Store.private_data.tables.forall(db.exists_table) def session_info_defined(db: SQL.Database, name: String): Boolean = db.execute_query_statementB( - Store.Data.Session_Info.table.select(List(Store.Data.Session_Info.session_name), - sql = Store.Data.Session_Info.session_name.where_equal(name))) + Store.private_data.Session_Info.table.select(List(Store.private_data.Session_Info.session_name), + sql = Store.private_data.Session_Info.session_name.where_equal(name))) def clean_session_info(db: SQL.Database, name: String): Boolean = { Export.clean_session(db, name) Document_Build.clean_session(db, name) - Store.Data.transaction_lock(db, create = true, label = "Store.clean_session_info") { + Store.private_data.transaction_lock(db, create = true, label = "Store.clean_session_info") { val already_defined = session_info_defined(db, name) db.execute_statement( - Store.Data.Session_Info.table.delete( - sql = Store.Data.Session_Info.session_name.where_equal(name))) + Store.private_data.Session_Info.table.delete( + sql = Store.private_data.Session_Info.session_name.where_equal(name))) - db.execute_statement(Store.Data.Sources.table.delete( - sql = Store.Data.Sources.where_equal(name))) + db.execute_statement(Store.private_data.Sources.table.delete( + sql = Store.private_data.Sources.where_equal(name))) already_defined } @@ -449,52 +449,52 @@ build_log: Build_Log.Session_Info, build: Store.Build_Info ): Unit = { - Store.Data.transaction_lock(db, label = "Store.write_session_info") { - Store.Data.write_sources(db, session_name, sources) - Store.Data.write_session_info(db, cache.compress, session_name, build_log, build) + Store.private_data.transaction_lock(db, label = "Store.write_session_info") { + Store.private_data.write_sources(db, session_name, sources) + Store.private_data.write_session_info(db, cache.compress, session_name, build_log, build) } } def read_session_timing(db: SQL.Database, session: String): Properties.T = - Store.Data.transaction_lock(db, label = "Store.read_session_timing") { - Store.Data.read_session_timing(db, session, cache) + Store.private_data.transaction_lock(db, label = "Store.read_session_timing") { + Store.private_data.read_session_timing(db, session, cache) } def read_command_timings(db: SQL.Database, session: String): Bytes = - Store.Data.transaction_lock(db, label = "Store.read_command_timings") { - Store.Data.read_command_timings(db, session) + Store.private_data.transaction_lock(db, label = "Store.read_command_timings") { + Store.private_data.read_command_timings(db, session) } def read_theory_timings(db: SQL.Database, session: String): List[Properties.T] = - Store.Data.transaction_lock(db, label = "Store.read_theory_timings") { - Store.Data.read_theory_timings(db, session, cache) + Store.private_data.transaction_lock(db, label = "Store.read_theory_timings") { + Store.private_data.read_theory_timings(db, session, cache) } def read_ml_statistics(db: SQL.Database, session: String): List[Properties.T] = - Store.Data.transaction_lock(db, label = "Store.read_ml_statistics") { - Store.Data.read_ml_statistics(db, session, cache) + Store.private_data.transaction_lock(db, label = "Store.read_ml_statistics") { + Store.private_data.read_ml_statistics(db, session, cache) } def read_task_statistics(db: SQL.Database, session: String): List[Properties.T] = - Store.Data.transaction_lock(db, label = "Store.read_task_statistics") { - Store.Data.read_task_statistics(db, session, cache) + Store.private_data.transaction_lock(db, label = "Store.read_task_statistics") { + Store.private_data.read_task_statistics(db, session, cache) } def read_theories(db: SQL.Database, session: String): List[String] = read_theory_timings(db, session).flatMap(Markup.Name.unapply) def read_errors(db: SQL.Database, session: String): List[String] = - Store.Data.transaction_lock(db, label = "Store.read_errors") { - Store.Data.read_errors(db, session, cache) + Store.private_data.transaction_lock(db, label = "Store.read_errors") { + Store.private_data.read_errors(db, session, cache) } def read_build(db: SQL.Database, session: String): Option[Store.Build_Info] = - Store.Data.transaction_lock(db, label = "Store.read_build") { - if (session_info_exists(db)) Store.Data.read_build(db, session) else None + Store.private_data.transaction_lock(db, label = "Store.read_build") { + if (session_info_exists(db)) Store.private_data.read_build(db, session) else None } def read_sources(db: SQL.Database, session: String, name: String = ""): List[Store.Source_File] = - Store.Data.transaction_lock(db, label = "Store.read_sources") { - Store.Data.read_sources(db, session, name, cache.compress) + Store.private_data.transaction_lock(db, label = "Store.read_sources") { + Store.private_data.read_sources(db, session, name, cache.compress) } } diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/Tools/build_process.scala --- a/src/Pure/Tools/build_process.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/Tools/build_process.scala Tue Jul 18 19:51:12 2023 +0200 @@ -283,7 +283,7 @@ /** SQL data model **/ - object Data extends SQL.Data("isabelle_build") { + object private_data extends SQL.Data("isabelle_build") { val database: Path = Path.explode("$ISABELLE_HOME_USER/build.db") def pull[A <: Library.Named]( @@ -364,7 +364,7 @@ }) for (build <- builds.sortBy(_.start)(Date.Ordering)) yield { - val sessions = Data.read_sessions_domain(db, build_uuid = build.build_uuid) + val sessions = private_data.read_sessions_domain(db, build_uuid = build.build_uuid) build.copy(sessions = sessions.toList.sorted) } } @@ -828,8 +828,8 @@ } def read_builds(db: SQL.Database): List[Build] = - Data.transaction_lock(db, create = true, label = "Build_Process.read_builds") { - Data.read_builds(db) + private_data.transaction_lock(db, create = true, label = "Build_Process.read_builds") { + private_data.read_builds(db) } } @@ -862,16 +862,16 @@ try { for (db <- store.maybe_open_build_database(server = server)) yield { val store_tables = db.is_postgresql - Build_Process.Data.transaction_lock(db, + Build_Process.private_data.transaction_lock(db, create = true, label = "Build_Process.build_database" ) { - Build_Process.Data.clean_build(db) - if (store_tables) Store.Data.tables.lock(db, create = true) + Build_Process.private_data.clean_build(db) + if (store_tables) Store.private_data.tables.lock(db, create = true) } if (build_context.master) { - db.vacuum(Build_Process.Data.tables.list) - if (store_tables) db.vacuum(Store.Data.tables.list) + db.vacuum(Build_Process.private_data.tables.list) + if (store_tables) db.vacuum(Store.private_data.tables.list) } db } @@ -879,7 +879,7 @@ catch { case exn: Throwable => close(); throw exn } private val _host_database: Option[SQL.Database] = - try { store.maybe_open_build_database(path = Host.Data.database, server = server) } + try { store.maybe_open_build_database(path = Host.private_data.database, server = server) } catch { case exn: Throwable => close(); throw exn } protected val (progress, worker_uuid) = synchronized { @@ -887,7 +887,7 @@ case None => (build_progress, UUID.random().toString) case Some(db) => try { - val progress_db = store.open_build_database(Progress.Data.database, server = server) + val progress_db = store.open_build_database(Progress.private_data.database, server = server) val progress = new Database_Progress(progress_db, build_progress, hostname = hostname, @@ -922,11 +922,11 @@ case None => body case Some(db) => progress.asInstanceOf[Database_Progress].sync() - Build_Process.Data.transaction_lock(db, label = label) { - _state = Build_Process.Data.pull_database(db, worker_uuid, hostname, _state) + Build_Process.private_data.transaction_lock(db, label = label) { + _state = Build_Process.private_data.pull_database(db, worker_uuid, hostname, _state) val res = body _state = - Build_Process.Data.update_database(db, worker_uuid, build_uuid, hostname, _state) + Build_Process.private_data.update_database(db, worker_uuid, build_uuid, hostname, _state) res } } @@ -1046,27 +1046,27 @@ protected final def start_build(): Unit = synchronized_database("Build_Process.start_build") { for (db <- _build_database) { - Build_Process.Data.start_build(db, build_uuid, build_context.ml_platform, + Build_Process.private_data.start_build(db, build_uuid, build_context.ml_platform, build_context.sessions_structure.session_prefs) } } protected final def stop_build(): Unit = synchronized_database("Build_Process.stop_build") { for (db <- _build_database) { - Build_Process.Data.stop_build(db, build_uuid) + Build_Process.private_data.stop_build(db, build_uuid) } } protected final def start_worker(): Unit = synchronized_database("Build_Process.start_worker") { for (db <- _build_database) { _state = _state.inc_serial - Build_Process.Data.start_worker(db, worker_uuid, build_uuid, _state.serial) + Build_Process.private_data.start_worker(db, worker_uuid, build_uuid, _state.serial) } } protected final def stop_worker(): Unit = synchronized_database("Build_Process.stop_worker") { for (db <- _build_database) { - Build_Process.Data.stamp_worker(db, worker_uuid, _state.serial, stop_now = true) + Build_Process.private_data.stamp_worker(db, worker_uuid, _state.serial, stop_now = true) } } @@ -1145,8 +1145,8 @@ _build_database match { case None => (Nil, Nil) case Some(db) => - (Build_Process.Data.read_builds(db), - Build_Process.Data.read_workers(db)) + (Build_Process.private_data.read_builds(db), + Build_Process.private_data.read_workers(db)) } Build_Process.Snapshot( builds = builds, diff -r c39819e3adc5 -r 7853d9072d1b src/Pure/Tools/server.scala --- a/src/Pure/Tools/server.scala Tue Jul 18 13:34:18 2023 +0200 +++ b/src/Pure/Tools/server.scala Tue Jul 18 19:51:12 2023 +0200 @@ -363,7 +363,7 @@ val default_name = "isabelle" - object Data extends SQL.Data() { + object private_data extends SQL.Data() { val database = Path.explode("$ISABELLE_HOME_USER/servers.db") val name = SQL.Column.string("name").make_primary_key @@ -375,13 +375,13 @@ } def list(db: SQLite.Database): List[Info] = - if (db.exists_table(Data.table)) { - db.execute_query_statement(Data.table.select(), + if (db.exists_table(private_data.table)) { + db.execute_query_statement(private_data.table.select(), List.from[Info], { res => - val name = res.string(Data.name) - val port = res.int(Data.port) - val password = res.string(Data.password) + val name = res.string(private_data.name) + val port = res.int(private_data.port) + val password = res.string(private_data.password) Info(name, port, password) } ).sortBy(_.name) @@ -397,12 +397,12 @@ existing_server: Boolean = false, log: Logger = No_Logger ): (Info, Option[Server]) = { - using(SQLite.open_database(Data.database, restrict = true)) { db => - Data.transaction_lock(db, create = true) { + using(SQLite.open_database(private_data.database, restrict = true)) { db => + private_data.transaction_lock(db, create = true) { list(db).filterNot(_.active).foreach(server_info => - db.execute_statement(Data.table.delete(sql = Data.name.where_equal(server_info.name)))) + db.execute_statement(private_data.table.delete(sql = private_data.name.where_equal(server_info.name)))) } - Data.transaction_lock(db) { + private_data.transaction_lock(db) { find(db, name) match { case Some(server_info) => (server_info, None) case None => @@ -411,8 +411,8 @@ val server = new Server(port, log) val server_info = Info(name, server.port, server.password) - db.execute_statement(Data.table.delete(sql = Data.name.where_equal(name))) - db.execute_statement(Data.table.insert(), body = + db.execute_statement(private_data.table.delete(sql = private_data.name.where_equal(name))) + db.execute_statement(private_data.table.insert(), body = { stmt => stmt.string(1) = server_info.name stmt.int(2) = server_info.port @@ -427,8 +427,8 @@ } def exit(name: String = default_name): Boolean = { - using(SQLite.open_database(Data.database)) { db => - Data.transaction_lock(db) { + using(SQLite.open_database(private_data.database)) { db => + private_data.transaction_lock(db) { find(db, name) match { case Some(server_info) => using(server_info.connection())(_.write_line_message("shutdown")) @@ -481,7 +481,7 @@ if (operation_list) { for { - server_info <- using(SQLite.open_database(Data.database))(list) + server_info <- using(SQLite.open_database(private_data.database))(list) if server_info.active } Output.writeln(server_info.toString, stdout = true) }