--- a/src/Pure/Thy/sessions.scala Tue Mar 07 12:50:27 2023 +0100
+++ b/src/Pure/Thy/sessions.scala Tue Mar 07 16:23:48 2023 +0100
@@ -14,6 +14,48 @@
object Sessions {
+ /* main operations */
+
+ def background0(session: String): Background = Background.empty(session)
+
+ def background(options: Options,
+ session: String,
+ progress: Progress = new Progress,
+ dirs: List[Path] = Nil,
+ include_sessions: List[String] = Nil,
+ session_ancestor: Option[String] = None,
+ session_requirements: Boolean = false
+ ): Background = {
+ Background.load(options, session, progress = progress, dirs = dirs,
+ include_sessions = include_sessions, session_ancestor = session_ancestor,
+ session_requirements = session_requirements)
+ }
+
+ def load_structure(
+ options: Options,
+ dirs: List[Path] = Nil,
+ select_dirs: List[Path] = Nil,
+ infos: List[Info] = Nil,
+ augment_options: String => List[Options.Spec] = _ => Nil
+ ): Structure = {
+ val roots = load_root_files(dirs = dirs, select_dirs = select_dirs)
+ Structure.make(options, augment_options, roots = roots, infos = infos)
+ }
+
+ def deps(sessions_structure: Structure,
+ progress: Progress = new Progress,
+ inlined_files: Boolean = false,
+ list_files: Boolean = false,
+ check_keywords: Set[String] = Set.empty
+ ): Deps = {
+ Deps.load(sessions_structure, progress = progress, inlined_files = inlined_files,
+ list_files = list_files, check_keywords = check_keywords)
+ }
+
+ def store(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
+ new Store(options, cache)
+
+
/* session and theory names */
val ROOTS: Path = Path.explode("ROOTS")
@@ -167,6 +209,93 @@
/* background context */
+ object Background {
+ def empty(session: String): Background =
+ Background(Base(session_name = session))
+
+ def load(options: Options,
+ session: String,
+ progress: Progress = new Progress,
+ dirs: List[Path] = Nil,
+ include_sessions: List[String] = Nil,
+ session_ancestor: Option[String] = None,
+ session_requirements: Boolean = false
+ ): Background = {
+ val full_sessions = load_structure(options, dirs = dirs)
+
+ val selected_sessions =
+ full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
+ val info = selected_sessions(session)
+ val ancestor = session_ancestor orElse info.parent
+
+ val (session1, infos1) =
+ if (session_requirements && ancestor.isDefined) {
+ val deps = Sessions.deps(selected_sessions, progress = progress)
+ val base = deps(session)
+
+ val ancestor_loaded =
+ deps.get(ancestor.get) match {
+ case Some(ancestor_base)
+ if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
+ ancestor_base.loaded_theories.defined _
+ case _ =>
+ error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
+ }
+
+ val required_theories =
+ for {
+ thy <- base.loaded_theories.keys
+ if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session
+ }
+ yield thy
+
+ if (required_theories.isEmpty) (ancestor.get, Nil)
+ else {
+ val other_name = info.name + "_requirements(" + ancestor.get + ")"
+ Isabelle_System.isabelle_tmp_prefix()
+
+ (other_name,
+ List(
+ Info.make(
+ Chapter_Defs.empty,
+ info.options,
+ augment_options = _ => Nil,
+ dir_selected = false,
+ dir = Path.explode("$ISABELLE_TMP_PREFIX"),
+ chapter = info.chapter,
+ Session_Entry(
+ pos = info.pos,
+ name = other_name,
+ groups = info.groups,
+ path = ".",
+ parent = ancestor,
+ description = "Required theory imports from other sessions",
+ options = Nil,
+ imports = info.deps,
+ directories = Nil,
+ theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
+ document_theories = Nil,
+ document_files = Nil,
+ export_files = Nil,
+ export_classpath = Nil))))
+ }
+ }
+ else (session, Nil)
+
+ val full_sessions1 =
+ if (infos1.isEmpty) full_sessions
+ else load_structure(options, dirs = dirs, infos = infos1)
+
+ val selected_sessions1 =
+ full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions))
+
+ val deps1 = Sessions.deps(selected_sessions1, progress = progress)
+
+ Background(deps1(session1), sessions_structure = full_sessions1,
+ errors = deps1.errors, infos = infos1)
+ }
+ }
+
sealed case class Background(
base: Base,
sessions_structure: Structure = Structure.empty,
@@ -181,94 +310,253 @@
else error(cat_lines(errors))
}
- def background0(session: String): Background =
- Background(Base(session_name = session))
- def background(options: Options,
- session: String,
- progress: Progress = new Progress,
- dirs: List[Path] = Nil,
- include_sessions: List[String] = Nil,
- session_ancestor: Option[String] = None,
- session_requirements: Boolean = false
- ): Background = {
- val full_sessions = load_structure(options, dirs = dirs)
-
- val selected_sessions =
- full_sessions.selection(Selection(sessions = session :: session_ancestor.toList))
- val info = selected_sessions(session)
- val ancestor = session_ancestor orElse info.parent
-
- val (session1, infos1) =
- if (session_requirements && ancestor.isDefined) {
- val deps = Sessions.deps(selected_sessions, progress = progress)
- val base = deps(session)
-
- val ancestor_loaded =
- deps.get(ancestor.get) match {
- case Some(ancestor_base)
- if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
- ancestor_base.loaded_theories.defined _
- case _ =>
- error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
- }
+ /* source dependencies */
- val required_theories =
- for {
- thy <- base.loaded_theories.keys
- if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session
+ object Deps {
+ def load(sessions_structure: Structure,
+ progress: Progress = new Progress,
+ inlined_files: Boolean = false,
+ list_files: Boolean = false,
+ check_keywords: Set[String] = Set.empty
+ ): Deps = {
+ var cache_sources = Map.empty[JFile, SHA1.Digest]
+ def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = {
+ for {
+ path <- paths
+ file = path.file
+ if cache_sources.isDefinedAt(file) || file.isFile
+ }
+ yield {
+ cache_sources.get(file) match {
+ case Some(digest) => (path, digest)
+ case None =>
+ val digest = SHA1.digest(file)
+ cache_sources = cache_sources + (file -> digest)
+ (path, digest)
}
- yield thy
-
- if (required_theories.isEmpty) (ancestor.get, Nil)
- else {
- val other_name = info.name + "_requirements(" + ancestor.get + ")"
- Isabelle_System.isabelle_tmp_prefix()
-
- (other_name,
- List(
- Info.make(
- Chapter_Defs.empty,
- info.options,
- augment_options = _ => Nil,
- dir_selected = false,
- dir = Path.explode("$ISABELLE_TMP_PREFIX"),
- chapter = info.chapter,
- Session_Entry(
- pos = info.pos,
- name = other_name,
- groups = info.groups,
- path = ".",
- parent = ancestor,
- description = "Required theory imports from other sessions",
- options = Nil,
- imports = info.deps,
- directories = Nil,
- theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))),
- document_theories = Nil,
- document_files = Nil,
- export_files = Nil,
- export_classpath = Nil))))
}
}
- else (session, Nil)
+
+ val session_bases =
+ sessions_structure.imports_topological_order.foldLeft(Map(Base.bootstrap.session_entry)) {
+ case (session_bases, session_name) =>
+ progress.expose_interrupt()
+
+ val info = sessions_structure(session_name)
+ try {
+ val deps_base = info.deps_base(session_bases)
+ val session_background =
+ Background(base = deps_base, sessions_structure = sessions_structure)
+ val resources = new Resources(session_background)
+
+ progress.echo(
+ "Session " + info.chapter + "/" + session_name +
+ if_proper(info.groups, info.groups.mkString(" (", " ", ")")),
+ verbose = !list_files)
+
+ val dependencies = resources.session_dependencies(info)
+
+ val overall_syntax = dependencies.overall_syntax
+
+ val proper_session_theories =
+ dependencies.theories.filter(name =>
+ sessions_structure.theory_qualifier(name) == session_name)
+
+ val theory_files = dependencies.theories.map(_.path)
+
+ val (load_commands, load_commands_errors) =
+ try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
+ catch { case ERROR(msg) => (Nil, List(msg)) }
+
+ val theory_load_commands =
+ (for ((name, span) <- load_commands.iterator) yield name.theory -> span).toMap
+
+ val loaded_files: List[(String, List[Path])] =
+ for ((name, spans) <- load_commands) yield {
+ val (theory, files) = dependencies.loaded_files(name, spans)
+ theory -> files.map(file => Path.explode(file.node))
+ }
+
+ val document_files =
+ for ((path1, path2) <- info.document_files)
+ yield info.dir + path1 + path2
+
+ val session_files =
+ (theory_files ::: loaded_files.flatMap(_._2) ::: document_files).map(_.expand)
+
+ val imported_files = if (inlined_files) dependencies.imported_files else Nil
- val full_sessions1 =
- if (infos1.isEmpty) full_sessions
- else load_structure(options, dirs = dirs, infos = infos1)
+ if (list_files) {
+ progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _)))
+ }
+
+ if (check_keywords.nonEmpty) {
+ Check_Keywords.check_keywords(
+ progress, overall_syntax.keywords, check_keywords, theory_files)
+ }
+
+ val session_graph_display: Graph_Display.Graph = {
+ def session_node(name: String): Graph_Display.Node =
+ Graph_Display.Node("[" + name + "]", "session." + name)
+
+ def node(name: Document.Node.Name): Graph_Display.Node = {
+ val qualifier = sessions_structure.theory_qualifier(name)
+ if (qualifier == info.name) {
+ Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
+ }
+ else session_node(qualifier)
+ }
+
+ val required_sessions =
+ dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
+ .map(theory => sessions_structure.theory_qualifier(theory))
+ .filter(name => name != info.name && sessions_structure.defined(name))
- val selected_sessions1 =
- full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions))
+ val required_subgraph =
+ sessions_structure.imports_graph
+ .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
+ .transitive_closure
+ .restrict(required_sessions.toSet)
+ .transitive_reduction_acyclic
+
+ val graph0 =
+ required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) {
+ case (g, session) =>
+ val a = session_node(session)
+ val bs = required_subgraph.imm_preds(session).toList.map(session_node)
+ bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
+ }
+
+ dependencies.entries.foldLeft(graph0) {
+ case (g, entry) =>
+ val a = node(entry.name)
+ val bs = entry.header.imports.map(node).filterNot(_ == a)
+ bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
+ }
+ }
+
+ val known_theories =
+ dependencies.entries.iterator.map(entry => entry.name.theory -> entry).
+ foldLeft(deps_base.known_theories)(_ + _)
+
+ val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
- val deps1 = Sessions.deps(selected_sessions1, progress = progress)
+ val import_errors = {
+ val known_sessions =
+ sessions_structure.imports_requirements(List(session_name)).toSet
+ for {
+ name <- dependencies.theories
+ qualifier = sessions_structure.theory_qualifier(name)
+ if !known_sessions(qualifier)
+ } yield "Bad import of theory " + quote(name.toString) +
+ ": need to include sessions " + quote(qualifier) + " in ROOT"
+ }
+
+ val document_errors =
+ info.document_theories.flatMap(
+ {
+ case (thy, pos) =>
+ val build_hierarchy =
+ if (sessions_structure.build_graph.defined(session_name)) {
+ sessions_structure.build_hierarchy(session_name)
+ }
+ else Nil
+
+ def err(msg: String): Option[String] =
+ Some(msg + " " + quote(thy) + Position.here(pos))
+
+ known_theories.get(thy).map(_.name) match {
+ case None => err("Unknown document theory")
+ case Some(name) =>
+ val qualifier = sessions_structure.theory_qualifier(name)
+ if (proper_session_theories.contains(name)) {
+ err("Redundant document theory from this session:")
+ }
+ else if (
+ !build_hierarchy.contains(qualifier) &&
+ !dependencies.theories.contains(name)
+ ) {
+ err("Document theory from other session not imported properly:")
+ }
+ else None
+ }
+ })
+ val document_theories =
+ info.document_theories.map({ case (thy, _) => known_theories(thy).name })
+
+ val dir_errors = {
+ val ok = info.dirs.map(_.canonical_file).toSet
+ val bad =
+ (for {
+ name <- proper_session_theories.iterator
+ path = Path.explode(name.master_dir)
+ if !ok(path.canonical_file)
+ path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
+ } yield (path1, name)).toList
+ val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
- Background(deps1(session1), sessions_structure = full_sessions1,
- errors = deps1.errors, infos = infos1)
+ val errs1 =
+ for { (path1, name) <- bad }
+ yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString)
+ val errs2 =
+ if (bad_dirs.isEmpty) Nil
+ else List("Implicit use of session directories: " + commas(bad_dirs))
+ val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
+ val errs4 =
+ (for {
+ name <- proper_session_theories.iterator
+ name1 <- resources.find_theory_node(name.theory)
+ if name.node != name1.node
+ } yield {
+ "Incoherent theory file import:\n " + quote(name.node) +
+ " vs. \n " + quote(name1.node)
+ }).toList
+
+ errs1 ::: errs2 ::: errs3 ::: errs4
+ }
+
+ val sources_errors =
+ for (p <- session_files if !p.is_file) yield "No such file: " + p
+
+ val path_errors =
+ try { Path.check_case_insensitive(session_files ::: imported_files); Nil }
+ catch { case ERROR(msg) => List(msg) }
+
+ val bibtex_errors = info.bibtex_entries.errors
+
+ val base =
+ Base(
+ session_name = info.name,
+ session_pos = info.pos,
+ proper_session_theories = proper_session_theories,
+ document_theories = document_theories,
+ loaded_theories = dependencies.loaded_theories,
+ used_theories = dependencies.theories_adjunct,
+ theory_load_commands = theory_load_commands,
+ known_theories = known_theories,
+ known_loaded_files = known_loaded_files,
+ overall_syntax = overall_syntax,
+ imported_sources = check_sources(imported_files),
+ session_sources = check_sources(session_files),
+ session_graph_display = session_graph_display,
+ errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
+ document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
+ bibtex_errors)
+
+ session_bases + base.session_entry
+ }
+ catch {
+ case ERROR(msg) =>
+ cat_error(msg, "The error(s) above occurred in session " +
+ quote(info.name) + Position.here(info.pos))
+ }
+ }
+
+ new Deps(sessions_structure, session_bases)
+ }
}
-
- /* source dependencies */
-
final class Deps private[Sessions](
val sessions_structure: Structure,
val session_bases: Map[String, Base]
@@ -306,248 +594,6 @@
override def toString: String = "Sessions.Deps(" + sessions_structure + ")"
}
- def deps(sessions_structure: Structure,
- progress: Progress = new Progress,
- inlined_files: Boolean = false,
- list_files: Boolean = false,
- check_keywords: Set[String] = Set.empty
- ): Deps = {
- var cache_sources = Map.empty[JFile, SHA1.Digest]
- def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = {
- for {
- path <- paths
- file = path.file
- if cache_sources.isDefinedAt(file) || file.isFile
- }
- yield {
- cache_sources.get(file) match {
- case Some(digest) => (path, digest)
- case None =>
- val digest = SHA1.digest(file)
- cache_sources = cache_sources + (file -> digest)
- (path, digest)
- }
- }
- }
-
- val session_bases =
- sessions_structure.imports_topological_order.foldLeft(Map(Base.bootstrap.session_entry)) {
- case (session_bases, session_name) =>
- progress.expose_interrupt()
-
- val info = sessions_structure(session_name)
- try {
- val deps_base = info.deps_base(session_bases)
- val session_background =
- Background(base = deps_base, sessions_structure = sessions_structure)
- val resources = new Resources(session_background)
-
- progress.echo(
- "Session " + info.chapter + "/" + session_name +
- if_proper(info.groups, info.groups.mkString(" (", " ", ")")),
- verbose = !list_files)
-
- val dependencies = resources.session_dependencies(info)
-
- val overall_syntax = dependencies.overall_syntax
-
- val proper_session_theories =
- dependencies.theories.filter(name =>
- sessions_structure.theory_qualifier(name) == session_name)
-
- val theory_files = dependencies.theories.map(_.path)
-
- val (load_commands, load_commands_errors) =
- try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) }
- catch { case ERROR(msg) => (Nil, List(msg)) }
-
- val theory_load_commands =
- (for ((name, span) <- load_commands.iterator) yield name.theory -> span).toMap
-
- val loaded_files: List[(String, List[Path])] =
- for ((name, spans) <- load_commands) yield {
- val (theory, files) = dependencies.loaded_files(name, spans)
- theory -> files.map(file => Path.explode(file.node))
- }
-
- val document_files =
- for ((path1, path2) <- info.document_files)
- yield info.dir + path1 + path2
-
- val session_files =
- (theory_files ::: loaded_files.flatMap(_._2) ::: document_files).map(_.expand)
-
- val imported_files = if (inlined_files) dependencies.imported_files else Nil
-
- if (list_files) {
- progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _)))
- }
-
- if (check_keywords.nonEmpty) {
- Check_Keywords.check_keywords(
- progress, overall_syntax.keywords, check_keywords, theory_files)
- }
-
- val session_graph_display: Graph_Display.Graph = {
- def session_node(name: String): Graph_Display.Node =
- Graph_Display.Node("[" + name + "]", "session." + name)
-
- def node(name: Document.Node.Name): Graph_Display.Node = {
- val qualifier = sessions_structure.theory_qualifier(name)
- if (qualifier == info.name) {
- Graph_Display.Node(name.theory_base_name, "theory." + name.theory)
- }
- else session_node(qualifier)
- }
-
- val required_sessions =
- dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
- .map(theory => sessions_structure.theory_qualifier(theory))
- .filter(name => name != info.name && sessions_structure.defined(name))
-
- val required_subgraph =
- sessions_structure.imports_graph
- .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
- .transitive_closure
- .restrict(required_sessions.toSet)
- .transitive_reduction_acyclic
-
- val graph0 =
- required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) {
- case (g, session) =>
- val a = session_node(session)
- val bs = required_subgraph.imm_preds(session).toList.map(session_node)
- bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
- }
-
- dependencies.entries.foldLeft(graph0) {
- case (g, entry) =>
- val a = node(entry.name)
- val bs = entry.header.imports.map(node).filterNot(_ == a)
- bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
- }
- }
-
- val known_theories =
- dependencies.entries.iterator.map(entry => entry.name.theory -> entry).
- foldLeft(deps_base.known_theories)(_ + _)
-
- val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
-
- val import_errors = {
- val known_sessions =
- sessions_structure.imports_requirements(List(session_name)).toSet
- for {
- name <- dependencies.theories
- qualifier = sessions_structure.theory_qualifier(name)
- if !known_sessions(qualifier)
- } yield "Bad import of theory " + quote(name.toString) +
- ": need to include sessions " + quote(qualifier) + " in ROOT"
- }
-
- val document_errors =
- info.document_theories.flatMap(
- {
- case (thy, pos) =>
- val build_hierarchy =
- if (sessions_structure.build_graph.defined(session_name)) {
- sessions_structure.build_hierarchy(session_name)
- }
- else Nil
-
- def err(msg: String): Option[String] =
- Some(msg + " " + quote(thy) + Position.here(pos))
-
- known_theories.get(thy).map(_.name) match {
- case None => err("Unknown document theory")
- case Some(name) =>
- val qualifier = sessions_structure.theory_qualifier(name)
- if (proper_session_theories.contains(name)) {
- err("Redundant document theory from this session:")
- }
- else if (
- !build_hierarchy.contains(qualifier) &&
- !dependencies.theories.contains(name)
- ) {
- err("Document theory from other session not imported properly:")
- }
- else None
- }
- })
- val document_theories =
- info.document_theories.map({ case (thy, _) => known_theories(thy).name })
-
- val dir_errors = {
- val ok = info.dirs.map(_.canonical_file).toSet
- val bad =
- (for {
- name <- proper_session_theories.iterator
- path = Path.explode(name.master_dir)
- if !ok(path.canonical_file)
- path1 = File.relative_path(info.dir.canonical, path).getOrElse(path)
- } yield (path1, name)).toList
- val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
-
- val errs1 =
- for { (path1, name) <- bad }
- yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString)
- val errs2 =
- if (bad_dirs.isEmpty) Nil
- else List("Implicit use of session directories: " + commas(bad_dirs))
- val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p
- val errs4 =
- (for {
- name <- proper_session_theories.iterator
- name1 <- resources.find_theory_node(name.theory)
- if name.node != name1.node
- } yield {
- "Incoherent theory file import:\n " + quote(name.node) +
- " vs. \n " + quote(name1.node)
- }).toList
-
- errs1 ::: errs2 ::: errs3 ::: errs4
- }
-
- val sources_errors =
- for (p <- session_files if !p.is_file) yield "No such file: " + p
-
- val path_errors =
- try { Path.check_case_insensitive(session_files ::: imported_files); Nil }
- catch { case ERROR(msg) => List(msg) }
-
- val bibtex_errors = info.bibtex_entries.errors
-
- val base =
- Base(
- session_name = info.name,
- session_pos = info.pos,
- proper_session_theories = proper_session_theories,
- document_theories = document_theories,
- loaded_theories = dependencies.loaded_theories,
- used_theories = dependencies.theories_adjunct,
- theory_load_commands = theory_load_commands,
- known_theories = known_theories,
- known_loaded_files = known_loaded_files,
- overall_syntax = overall_syntax,
- imported_sources = check_sources(imported_files),
- session_sources = check_sources(session_files),
- session_graph_display = session_graph_display,
- errors = dependencies.errors ::: load_commands_errors ::: import_errors :::
- document_errors ::: dir_errors ::: sources_errors ::: path_errors :::
- bibtex_errors)
-
- session_bases + base.session_entry
- }
- catch {
- case ERROR(msg) =>
- cat_error(msg, "The error(s) above occurred in session " +
- quote(info.name) + Position.here(info.pos))
- }
- }
-
- new Deps(sessions_structure, session_bases)
- }
-
/* cumulative session info */
@@ -1257,17 +1303,6 @@
seen_roots.valuesIterator.toList.sortBy(_._2).map(_._1)
}
- def load_structure(
- options: Options,
- dirs: List[Path] = Nil,
- select_dirs: List[Path] = Nil,
- infos: List[Info] = Nil,
- augment_options: String => List[Options.Spec] = _ => Nil
- ): Structure = {
- val roots = load_root_files(dirs = dirs, select_dirs = select_dirs)
- Structure.make(options, augment_options, roots = roots, infos = infos)
- }
-
/* Isabelle tool wrapper */
@@ -1373,9 +1408,6 @@
" ADD COLUMN IF NOT EXISTS " + uuid.decl(SQL.sql_type_postgresql)
}
- def store(options: Options, cache: Term.Cache = Term.Cache.make()): Store =
- new Store(options, cache)
-
class Store private[Sessions](val options: Options, val cache: Term.Cache) {
store =>