# HG changeset patch # User wenzelm # Date 1570440943 -7200 # Node ID 2739631ac3685907d49a2af37ce63577720c77a2 # Parent a90e4011887444c0a1d48165e665bbdae135c3c3 discontinued pointless dump_checkpoint and share_common_data -- superseded by base logic image in Isabelle/MMT; diff -r a90e40118874 -r 2739631ac368 etc/options --- a/etc/options Mon Oct 07 10:51:20 2019 +0200 +++ b/etc/options Mon Oct 07 11:35:43 2019 +0200 @@ -225,9 +225,6 @@ option headless_load_limit : int = 100 -- "limit for loaded theories (0 = unlimited)" -option dump_checkpoint : bool = false - -- "mark individual theories to share common data in ML" - section "Miscellaneous Tools" diff -r a90e40118874 -r 2739631ac368 src/Doc/System/Sessions.thy --- a/src/Doc/System/Sessions.thy Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Doc/System/Sessions.thy Mon Oct 07 11:35:43 2019 +0200 @@ -546,7 +546,6 @@ Options are: -A NAMES dump named aspects (default: ...) -B NAME include session NAME and all descendants - -C observe option dump_checkpoint for theories -D DIR include session directory and select its sessions -O DIR output directory for dumped files (default: "dump") -R operate on requirements of selected sessions @@ -583,11 +582,6 @@ \<^verbatim>\isabelle.Dump.dump()\ takes aspects as user-defined operations on the final PIDE state and document version. This allows to imitate Prover IDE rendering under program control. - - \<^medskip> Option \<^verbatim>\-C\ observes option \<^verbatim>\dump_checkpoint\ within the - \isakeyword{theories} specification of session ROOT definitions. This helps - to structure the load process of large collections of theories, and thus - reduce overall resource requirements. \ diff -r a90e40118874 -r 2739631ac368 src/HOL/ROOT --- a/src/HOL/ROOT Mon Oct 07 10:51:20 2019 +0200 +++ b/src/HOL/ROOT Mon Oct 07 11:35:43 2019 +0200 @@ -6,9 +6,8 @@ " options [strict_facts] directories "../Tools" - theories [dump_checkpoint] + theories Main (global) - theories Complex_Main (global) document_files "root.bib" diff -r a90e40118874 -r 2739631ac368 src/Pure/PIDE/headless.scala --- a/src/Pure/PIDE/headless.scala Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Pure/PIDE/headless.scala Mon Oct 07 11:35:43 2019 +0200 @@ -44,15 +44,12 @@ (nodes.iterator ++ nodes_committed.iterator).forall({ case (_, st) => st.ok }) } - private type Load = (List[Document.Node.Name], Boolean) - private val no_load: Load = (Nil, false) - private sealed abstract class Load_State { def next( limit: Int, dep_graph: Document.Node.Name.Graph[Unit], - finished: Document.Node.Name => Boolean): (Load, Load_State) = + finished: Document.Node.Name => Boolean): (List[Document.Node.Name], Load_State) = { def make_pending(maximals: List[Document.Node.Name]): List[Document.Node.Name] = { @@ -64,60 +61,40 @@ } } - def load_checkpoints(checkpoints: List[Document.Node.Name]): (Load, Load_State) = - Load_Init(checkpoints).next(limit, dep_graph, finished) - - def load_requirements( - pending: List[Document.Node.Name], - checkpoints: List[Document.Node.Name] = Nil, - share_common_data: Boolean = false): (Load, Load_State) = + def load_requirements(pending: List[Document.Node.Name]) + : (List[Document.Node.Name], Load_State) = { - if (pending.isEmpty) load_checkpoints(checkpoints) + if (pending.isEmpty) (Nil, Load_Finished) else if (limit == 0) { val requirements = dep_graph.all_preds(pending).reverse - ((requirements, share_common_data), Load_Bulk(pending, Nil, checkpoints)) + (requirements, Load_Bulk(pending, Nil)) } else { val reachable = dep_graph.reachable_limit(limit, _ => 1, dep_graph.imm_preds, pending) val (pending1, pending2) = pending.partition(reachable) val requirements = dep_graph.all_preds(pending1).reverse - ((requirements, share_common_data), Load_Bulk(pending1, pending2, checkpoints)) + (requirements, Load_Bulk(pending1, pending2)) } } - val result: (Load, Load_State) = + val (load_theories, st1) = this match { - case Load_Init(Nil) => + case Load_Init => val pending = make_pending(dep_graph.maximals) - if (pending.isEmpty) (no_load, Load_Finished) + if (pending.isEmpty) (Nil, Load_Finished) else load_requirements(pending) - case Load_Init(target :: checkpoints) => - val requirements = dep_graph.all_preds(List(target)).reverse - ((requirements, false), Load_Target(target, checkpoints)) - case Load_Target(pending, checkpoints) if finished(pending) => - val dep_graph1 = - if (checkpoints.isEmpty) dep_graph - else dep_graph.exclude(dep_graph.all_succs(checkpoints).toSet) - val dep_graph2 = - dep_graph1.restrict(dep_graph.all_succs(List(pending)).toSet) - val pending2 = make_pending(dep_graph.maximals.filter(dep_graph2.defined)) - load_requirements(pending2, checkpoints = checkpoints, share_common_data = true) - case Load_Bulk(pending, remaining, checkpoints) if pending.forall(finished) => - load_requirements(remaining, checkpoints = checkpoints) - case st => (no_load, st) + case Load_Bulk(pending, remaining) if pending.forall(finished) => + load_requirements(remaining) + case st => (Nil, st) } - val ((load_theories, share_common_data), st1) = result - ((load_theories.filterNot(finished), share_common_data), st1) + (load_theories.filterNot(finished), st1) } } - private case class Load_Init(checkpoints: List[Document.Node.Name]) extends Load_State - private case class Load_Target( - pending: Document.Node.Name, checkpoints: List[Document.Node.Name]) extends Load_State + private case object Load_Init extends Load_State private case class Load_Bulk( pending: List[Document.Node.Name], - remaining: List[Document.Node.Name], - checkpoints: List[Document.Node.Name]) extends Load_State + remaining: List[Document.Node.Name]) extends Load_State private case object Load_Finished extends Load_State class Session private[Headless]( @@ -224,7 +201,7 @@ } def check(state: Document.State, version: Document.Version, beyond_limit: Boolean) - : ((List[Document.Node.Name], Boolean), Use_Theories_State) = + : (List[Document.Node.Name], Use_Theories_State) = { val already_committed1 = commit match { @@ -272,9 +249,10 @@ } else result - val (load, load_state1) = load_state.next(load_limit, dep_graph, finished_theory(_)) + val (load_theories, load_state1) = + load_state.next(load_limit, dep_graph, finished_theory(_)) - (load, + (load_theories, copy(already_committed = already_committed1, result = result1, load_state = load_state1)) } } @@ -289,7 +267,6 @@ watchdog_timeout: Time = default_watchdog_timeout, nodes_status_delay: Time = default_nodes_status_delay, id: UUID.T = UUID.random(), - checkpoints: Set[Document.Node.Name] = Set.empty, // commit: must not block, must not fail commit: Option[(Document.Snapshot, Document_Status.Node_Status) => Unit] = None, commit_cleanup_delay: Time = default_commit_cleanup_delay, @@ -309,24 +286,16 @@ map(path => Document.Node.Name(resources.append("", path))) val use_theories_state = - { - val load_state = - Load_Init( - if (checkpoints.isEmpty) Nil - else dependencies.theory_graph.topological_order.filter(checkpoints(_))) Synchronized( - Use_Theories_State(dependencies.theory_graph, load_state, watchdog_timeout, commit)) - } + Use_Theories_State(dependencies.theory_graph, Load_Init, watchdog_timeout, commit)) def check_state(beyond_limit: Boolean = false) { val state = session.get_state() for (version <- state.stable_tip_version) { - val (load_theories, share_common_data) = - use_theories_state.change_result(_.check(state, version, beyond_limit)) + val load_theories = use_theories_state.change_result(_.check(state, version, beyond_limit)) if (load_theories.nonEmpty) { - resources.load_theories( - session, id, load_theories, dep_files, unicode_symbols, share_common_data, progress) + resources.load_theories(session, id, load_theories, dep_files, unicode_symbols, progress) } } } @@ -648,7 +617,6 @@ theories: List[Document.Node.Name], files: List[Document.Node.Name], unicode_symbols: Boolean, - share_common_data: Boolean, progress: Progress) { val loaded_theories = @@ -682,8 +650,7 @@ for { node_name <- files if doc_blobs1.changed(node_name) } yield st1.blob_edits(node_name, st.blobs.get(node_name)) - session.update(doc_blobs1, theory_edits.flatMap(_._1) ::: file_edits.flatten, - share_common_data = share_common_data) + session.update(doc_blobs1, theory_edits.flatMap(_._1) ::: file_edits.flatten) st1.update_theories(theory_edits.map(_._2)) }) } diff -r a90e40118874 -r 2739631ac368 src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Pure/PIDE/resources.scala Mon Oct 07 11:35:43 2019 +0200 @@ -165,13 +165,6 @@ } yield theory_node } - def dump_checkpoints(info: Sessions.Info): Set[Document.Node.Name] = - (for { - (options, thys) <- info.theories.iterator - if options.bool("dump_checkpoint") - (thy, _) <- thys.iterator - } yield import_name(info, thy)).toSet - def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = { val context_session = session_base.theory_qualifier(context_name) @@ -262,10 +255,8 @@ previous: Document.Version, doc_blobs: Document.Blobs, edits: List[Document.Edit_Text], - consolidate: List[Document.Node.Name], - share_common_data: Boolean): Session.Change = - Thy_Syntax.parse_change( - resources, reparse_limit, previous, doc_blobs, edits, consolidate, share_common_data) + consolidate: List[Document.Node.Name]): Session.Change = + Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate) def commit(change: Session.Change) { } diff -r a90e40118874 -r 2739631ac368 src/Pure/PIDE/session.scala --- a/src/Pure/PIDE/session.scala Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Pure/PIDE/session.scala Mon Oct 07 11:35:43 2019 +0200 @@ -53,7 +53,6 @@ deps_changed: Boolean, doc_edits: List[Document.Edit_Command], consolidate: List[Document.Node.Name], - share_common_data: Boolean, version: Document.Version) case object Change_Flush @@ -65,8 +64,7 @@ case class Statistics(props: Properties.T) case class Global_Options(options: Options) case object Caret_Focus - case class Raw_Edits( - doc_blobs: Document.Blobs, edits: List[Document.Edit_Text], share_common_data: Boolean) + case class Raw_Edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) case class Dialog_Result(id: Document_ID.Generic, serial: Long, result: String) case class Build_Theories(id: String, master_dir: Path, theories: List[(Options, List[Path])]) case class Commands_Changed( @@ -232,17 +230,15 @@ doc_blobs: Document.Blobs, text_edits: List[Document.Edit_Text], consolidate: List[Document.Node.Name], - share_common_data: Boolean, version_result: Promise[Document.Version]) private val change_parser = Consumer_Thread.fork[Text_Edits]("change_parser", daemon = true) { - case Text_Edits(previous, doc_blobs, text_edits, consolidate, share_common_data, version_result) => + case Text_Edits(previous, doc_blobs, text_edits, consolidate, version_result) => val prev = previous.get_finished val change = Timing.timeit("parse_change", timing) { - resources.parse_change( - reparse_limit, prev, doc_blobs, text_edits, consolidate, share_common_data) + resources.parse_change(reparse_limit, prev, doc_blobs, text_edits, consolidate) } version_result.fulfill(change.version) manager.send(change) @@ -393,8 +389,7 @@ def handle_raw_edits( doc_blobs: Document.Blobs = Document.Blobs.empty, edits: List[Document.Edit_Text] = Nil, - consolidate: List[Document.Node.Name] = Nil, - share_common_data: Boolean = false) + consolidate: List[Document.Node.Name] = Nil) //{{{ { require(prover.defined) @@ -405,9 +400,8 @@ val version = Future.promise[Document.Version] global_state.change(_.continue_history(previous, edits, version)) - raw_edits.post(Session.Raw_Edits(doc_blobs, edits, share_common_data)) - change_parser.send( - Text_Edits(previous, doc_blobs, edits, consolidate, share_common_data, version)) + raw_edits.post(Session.Raw_Edits(doc_blobs, edits)) + change_parser.send(Text_Edits(previous, doc_blobs, edits, consolidate, version)) } //}}} @@ -419,10 +413,6 @@ { require(prover.defined) - if (change.share_common_data) { - prover.get.protocol_command("ML_Heap.share_common_data") - } - // define commands { val id_commands = new mutable.ListBuffer[Command] @@ -636,9 +626,8 @@ case Cancel_Exec(exec_id) if prover.defined => prover.get.cancel_exec(exec_id) - case Session.Raw_Edits(doc_blobs, edits, share_common_data) if prover.defined => - handle_raw_edits(doc_blobs = doc_blobs, edits = edits, - share_common_data = share_common_data) + case Session.Raw_Edits(doc_blobs, edits) if prover.defined => + handle_raw_edits(doc_blobs = doc_blobs, edits = edits) case Session.Dialog_Result(id, serial, result) if prover.defined => prover.get.dialog_result(serial, result) @@ -744,12 +733,9 @@ def cancel_exec(exec_id: Document_ID.Exec) { manager.send(Cancel_Exec(exec_id)) } - def update( - doc_blobs: Document.Blobs, - edits: List[Document.Edit_Text], - share_common_data: Boolean = false) + def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) { - if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits, share_common_data)) + if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) } def update_options(options: Options) diff -r a90e40118874 -r 2739631ac368 src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Pure/Thy/sessions.scala Mon Oct 07 11:35:43 2019 +0200 @@ -54,7 +54,6 @@ global_theories: Map[String, String] = Map.empty, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, - dump_checkpoints: Set[Document.Node.Name] = Set.empty, known_theories: Map[String, Document.Node.Entry] = Map.empty, known_loaded_files: Map[String, List[Path]] = Map.empty, overall_syntax: Outer_Syntax = Outer_Syntax.empty, @@ -98,12 +97,6 @@ def imported_sources(name: String): List[SHA1.Digest] = session_bases(name).imported_sources.map(_._2) - def dump_checkpoints: Set[Document.Node.Name] = - (for { - (_, base) <- session_bases.iterator - name <- base.dump_checkpoints.iterator - } yield name).toSet - def used_theories_condition(default_options: Options, progress: Progress = No_Progress) : List[(Document.Node.Name, Options)] = { @@ -219,8 +212,6 @@ val dependencies = resources.session_dependencies(info) - val dump_checkpoints = resources.dump_checkpoints(info) - val overall_syntax = dependencies.overall_syntax val theory_files = dependencies.theories.map(_.path) @@ -338,7 +329,6 @@ global_theories = sessions_structure.global_theories, loaded_theories = dependencies.loaded_theories, used_theories = used_theories, - dump_checkpoints = dump_checkpoints, known_theories = known_theories, known_loaded_files = known_loaded_files, overall_syntax = overall_syntax, diff -r a90e40118874 -r 2739631ac368 src/Pure/Thy/thy_syntax.scala --- a/src/Pure/Thy/thy_syntax.scala Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Pure/Thy/thy_syntax.scala Mon Oct 07 11:35:43 2019 +0200 @@ -296,8 +296,7 @@ previous: Document.Version, doc_blobs: Document.Blobs, edits: List[Document.Edit_Text], - consolidate: List[Document.Node.Name], - share_common_data: Boolean): Session.Change = + consolidate: List[Document.Node.Name]): Session.Change = { val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits) @@ -358,7 +357,6 @@ } Session.Change( - previous, syntax_changed, syntax_changed.nonEmpty, doc_edits, - consolidate, share_common_data, version) + previous, syntax_changed, syntax_changed.nonEmpty, doc_edits, consolidate, version) } } diff -r a90e40118874 -r 2739631ac368 src/Pure/Tools/dump.scala --- a/src/Pure/Tools/dump.scala Mon Oct 07 10:51:20 2019 +0200 +++ b/src/Pure/Tools/dump.scala Mon Oct 07 11:35:43 2019 +0200 @@ -88,7 +88,6 @@ def apply( options: Options, logic: String, - dump_checkpoints: Boolean = false, aspects: List[Aspect] = Nil, progress: Progress = No_Progress, log: Logger = No_Logger, @@ -96,15 +95,13 @@ select_dirs: List[Path] = Nil, selection: Sessions.Selection = Sessions.Selection.empty): Session = { - new Session( - options, logic, dump_checkpoints, aspects, progress, log, dirs, select_dirs, selection) + new Session(options, logic, aspects, progress, log, dirs, select_dirs, selection) } } class Session private( options: Options, logic: String, - dump_checkpoints: Boolean, aspects: List[Aspect], progress: Progress, log: Logger, @@ -215,7 +212,6 @@ session.use_theories(used_theories.map(_.theory), unicode_symbols = unicode_symbols, progress = progress, - checkpoints = if (dump_checkpoints) deps.dump_checkpoints else Set.empty, commit = Some(Consumer.apply _)) val bad_theories = Consumer.shutdown() @@ -247,7 +243,6 @@ def dump( options: Options, logic: String, - dump_checkpoints: Boolean = false, aspects: List[Aspect] = Nil, progress: Progress = No_Progress, log: Logger = No_Logger, @@ -257,9 +252,8 @@ selection: Sessions.Selection = Sessions.Selection.empty) { val session = - Session(options, logic, dump_checkpoints = dump_checkpoints, aspects = aspects, - progress = progress, log = log, dirs = dirs, select_dirs = select_dirs, - selection = selection) + Session(options, logic, aspects = aspects, progress = progress, log = log, dirs = dirs, + select_dirs = select_dirs, selection = selection) session.run((args: Args) => { @@ -280,7 +274,6 @@ var aspects: List[Aspect] = known_aspects var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil - var dump_checkpoints = false var output_dir = default_output_dir var requirements = false var exclude_session_groups: List[String] = Nil @@ -298,7 +291,6 @@ Options are: -A NAMES dump named aspects (default: """ + known_aspects.mkString("\"", ",", "\"") + """) -B NAME include session NAME and all descendants - -C observe option dump_checkpoint for theories -D DIR include session directory and select its sessions -O DIR output directory for dumped files (default: """ + default_output_dir + """) -R operate on requirements of selected sessions @@ -316,7 +308,6 @@ """ + Library.prefix_lines(" ", show_aspects) + "\n", "A:" -> (arg => aspects = Library.distinct(space_explode(',', arg)).map(the_aspect(_))), "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), - "C" -> (_ => dump_checkpoints = true), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "O:" -> (arg => output_dir = Path.explode(arg)), "R" -> (_ => requirements = true), @@ -335,7 +326,6 @@ progress.interrupt_handler { dump(options, logic, - dump_checkpoints = dump_checkpoints, aspects = aspects, progress = progress, dirs = dirs,