# HG changeset patch # User wenzelm # Date 1683618581 -7200 # Node ID f589c50e54a0096fa4852c9bdab32c602611fb70 # Parent bdb5de00379a2fdf588fdfe701ca0cf53a195f3d# Parent ec850750db877dada42a781ed773c6676d1e98e2 merged diff -r bdb5de00379a -r f589c50e54a0 src/Pure/Concurrent/synchronized.ML --- a/src/Pure/Concurrent/synchronized.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/Concurrent/synchronized.ML Tue May 09 09:49:41 2023 +0200 @@ -14,6 +14,9 @@ val guarded_access: 'a var -> ('a -> ('b * 'a) option) -> 'b val change_result: 'a var -> ('a -> 'b * 'a) -> 'b val change: 'a var -> ('a -> 'a) -> unit + type 'a cache + val cache: (unit -> 'a) -> 'a cache + val cache_eval: 'a cache -> 'a end; structure Synchronized: SYNCHRONIZED = @@ -92,4 +95,24 @@ end; + +(* cached evaluation via weak_ref *) + +abstype 'a cache = + Cache of {expr: unit -> 'a, var: 'a Unsynchronized.weak_ref option var} +with + +fun cache expr = + Cache {expr = expr, var = var "Synchronized.cache" NONE}; + +fun cache_eval (Cache {expr, var}) = + change_result var (fn state => + (case state of + SOME (Unsynchronized.ref (SOME (Unsynchronized.ref result))) => (result, state) + | _ => + let val result = expr () + in (result, SOME (Unsynchronized.weak_ref result)) end)); + end; + +end; diff -r bdb5de00379a -r f589c50e54a0 src/Pure/Concurrent/unsynchronized.ML --- a/src/Pure/Concurrent/unsynchronized.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/Concurrent/unsynchronized.ML Tue May 09 09:49:41 2023 +0200 @@ -15,6 +15,8 @@ val dec: int ref -> int val add: int ref -> int -> int val setmp: 'a ref -> 'a -> ('b -> 'c) -> 'b -> 'c + type 'a weak_ref = 'a ref option ref + val weak_ref: 'a -> 'a weak_ref end; structure Unsynchronized: UNSYNCHRONIZED = @@ -41,6 +43,9 @@ val _ = flag := orig_value; in Exn.release result end) (); +type 'a weak_ref = 'a ref option ref; +fun weak_ref a = Weak.weak (SOME (ref a)); + end; ML_Name_Space.forget_val "ref"; diff -r bdb5de00379a -r f589c50e54a0 src/Pure/General/long_name.ML --- a/src/Pure/General/long_name.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/General/long_name.ML Tue May 09 09:49:41 2023 +0200 @@ -108,18 +108,15 @@ fun range_length r = r mod range_limit; fun range_string s r = String.substring (s, range_index r, range_length r); -val range_empty = 0; -val ranges_empty: int vector = Vector.fromList []; - in abstype chunks = Chunks of {range0: int, ranges: int list (*reverse*), string: string} with -val empty_chunks = Chunks {range0 = range_empty, ranges = [], string = ""}; +val empty_chunks = Chunks {range0 = 0, ranges = [], string = ""}; fun is_empty_chunks (Chunks {range0, ranges, ...}) = - range0 = range_empty andalso null ranges; + range0 = 0 andalso null ranges; fun count_chunks (chunks as Chunks {ranges, ...}) = if is_empty_chunks chunks then 0 diff -r bdb5de00379a -r f589c50e54a0 src/Pure/General/name_space.ML --- a/src/Pure/General/name_space.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/General/name_space.ML Tue May 09 09:49:41 2023 +0200 @@ -134,7 +134,8 @@ type internals = string list Long_Name.Chunks.T; (*external name -> internal names*) -val merge_internals : internals * internals -> internals = Long_Name.Chunks.merge_list (op =); +val merge_internals : internals * internals -> internals = + Long_Name.Chunks.merge_list (op =); fun add_internals name xname : internals -> internals = Long_Name.Chunks.update_list (op =) (xname, name); diff -r bdb5de00379a -r f589c50e54a0 src/Pure/Syntax/parser.ML --- a/src/Pure/Syntax/parser.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/Syntax/parser.ML Tue May 09 09:49:41 2023 +0200 @@ -7,6 +7,7 @@ signature PARSER = sig + val timing: bool Unsynchronized.ref type gram val empty_gram: gram val extend_gram: Syntax_Ext.xprod list -> gram -> gram @@ -415,6 +416,8 @@ (** operations on grammars **) +val timing = Unsynchronized.ref true; + val empty_gram = Gram {nt_count = 0, @@ -426,8 +429,8 @@ (*Add productions to a grammar*) -fun extend_gram [] gram = gram - | extend_gram xprods (Gram {nt_count, prod_count, tags, chains, lambdas, prods}) = +fun extend_gram0 [] gram = gram + | extend_gram0 xprods (Gram {nt_count, prod_count, tags, chains, lambdas, prods}) = let (*Get tag for existing nonterminal or create a new one*) fun get_tag nt_count tags nt = @@ -490,6 +493,12 @@ prods = Array.vector prods'} end; +fun extend_gram xprods gram = + if ! timing then + Timing.cond_timeit true ("Parser.extend_gram" ^ Position.here (Position.thread_data ())) + (fn () => extend_gram0 xprods gram) + else extend_gram0 xprods gram; + fun make_gram xprods = extend_gram xprods empty_gram; diff -r bdb5de00379a -r f589c50e54a0 src/Pure/Syntax/syntax_ext.ML --- a/src/Pure/Syntax/syntax_ext.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/Syntax/syntax_ext.ML Tue May 09 09:49:41 2023 +0200 @@ -19,7 +19,7 @@ En datatype xprod = XProd of string * xsymb list * string * int val chain_pri: int - val delims_of: xprod list -> string list list + val delims_of: xprod list -> Symbol.symbol list list datatype syn_ext = Syn_Ext of { xprods: xprod list, diff -r bdb5de00379a -r f589c50e54a0 src/Pure/Tools/find_theorems.ML --- a/src/Pure/Tools/find_theorems.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/Tools/find_theorems.ML Tue May 09 09:49:41 2023 +0200 @@ -236,7 +236,7 @@ val thy = Proof_Context.theory_of ctxt; fun matches obj ctxt' = Pattern.matches thy (pat, obj) orelse (case obj of - Abs (_, T, t) => + Abs _ => let val ((_, t'), ctxt'') = Variable.dest_abs obj ctxt' in matches t' ctxt'' end | t $ u => matches t ctxt' orelse matches u ctxt' diff -r bdb5de00379a -r f589c50e54a0 src/Pure/context.ML --- a/src/Pure/context.ML Mon May 08 17:26:33 2023 +0200 +++ b/src/Pure/context.ML Tue May 09 09:49:41 2023 +0200 @@ -53,8 +53,6 @@ val proper_subthy: theory * theory -> bool val subthy_id: theory_id * theory_id -> bool val subthy: theory * theory -> bool - val trace_theories: bool Unsynchronized.ref - val theories_trace: unit -> {active_positions: Position.T list, active: int, total: int} val join_thys: theory list -> theory val begin_thy: string -> theory list -> theory val finish_thy: theory -> theory @@ -69,6 +67,16 @@ val join_certificate: certificate * certificate -> certificate (*generic context*) datatype generic = Theory of theory | Proof of Proof.context + val trace_theories: bool Unsynchronized.ref + val trace_proofs: bool Unsynchronized.ref + val allocations_trace: unit -> + {contexts: generic list, + active_contexts: int, + active_theories: int, + active_proofs: int, + total_contexts: int, + total_theories: int, + total_proofs: int} val cases: (theory -> 'a) -> (Proof.context -> 'a) -> generic -> 'a val mapping: (theory -> theory) -> (Proof.context -> Proof.context) -> generic -> generic val mapping_result: (theory -> 'a * theory) -> (Proof.context -> 'a * Proof.context) -> @@ -114,9 +122,9 @@ structure Context: PRIVATE_CONTEXT = struct -(*** theory context ***) +(*** type definitions ***) -(* theory data *) +(* context data *) (*private copy avoids potential conflict of table exceptions*) structure Datatab = Table(type key = int val ord = int_ord); @@ -125,9 +133,24 @@ val data_kind = Counter.make (); -(** datatype theory **) +(* theory identity *) + +type id = int; +val new_id = Counter.make (); -(* implicit state *) +abstype theory_id = + Thy_Id of + {id: id, (*identifier*) + ids: Intset.T, (*cumulative identifiers -- symbolic body content*) + name: string, (*official theory name*) + stage: int} (*index for anonymous updates*) +with + fun rep_theory_id (Thy_Id args) = args; + val make_theory_id = Thy_Id; +end; + + +(* theory allocation state *) type state = {stage: int} Synchronized.var; @@ -138,44 +161,99 @@ Synchronized.change_result state (fn {stage} => (stage + 1, {stage = stage + 1})); -(* theory_id *) +(* theory and proof context *) + +datatype theory = + Thy_Undef +| Thy of + (*allocation state*) + state * + (*identity*) + {theory_id: theory_id, + theory_token: theory Unsynchronized.ref} * + (*ancestry*) + {parents: theory list, (*immediate predecessors*) + ancestors: theory list} * (*all predecessors -- canonical reverse order*) + (*data*) + Any.T Datatab.table; (*body content*) -type id = int; -val new_id = Counter.make (); +datatype proof = + Prf_Undef +| Prf of + (*identity*) + proof Unsynchronized.ref * + theory * + (*data*) + Any.T Datatab.table; + +structure Proof = struct type context = proof end; + +datatype generic = Theory of theory | Proof of Proof.context; + + +(* heap allocations *) + +val trace_theories = Unsynchronized.ref false; +val trace_proofs = Unsynchronized.ref false; + +local -abstype theory_id = - Theory_Id of - {id: id, (*identifier*) - ids: Intset.T, (*cumulative identifiers -- symbolic body content*) - name: string, (*official theory name*) - stage: int} (*index for anonymous updates*) -with +fun make_token guard var token0 = + if ! guard then + let + val token = Unsynchronized.ref (! token0); + val _ = Synchronized.change var (cons (Weak.weak (SOME token))); + in (token, fn res => (token := res; res)) end + else (token0, I); + +val theory_tokens = Synchronized.var "theory_tokens" ([]: theory Unsynchronized.weak_ref list); +val proof_tokens = Synchronized.var "proof_tokens" ([]: Proof.context Unsynchronized.weak_ref list); + +val theory_token0 = Unsynchronized.ref Thy_Undef; +val proof_token0 = Unsynchronized.ref Prf_Undef; + +in + +fun theory_token () = make_token trace_theories theory_tokens theory_token0; +fun proof_token () = make_token trace_proofs proof_tokens proof_token0; -fun rep_theory_id (Theory_Id args) = args; -val make_theory_id = Theory_Id; +fun allocations_trace () = + let + val _ = ML_Heap.full_gc (); + val trace1 = Synchronized.value theory_tokens; + val trace2 = Synchronized.value proof_tokens; + + fun cons1 (Unsynchronized.ref (SOME (Unsynchronized.ref (thy as Thy _)))) = cons (Theory thy) + | cons1 _ = I; + fun cons2 (Unsynchronized.ref (SOME (Unsynchronized.ref (ctxt as Prf _)))) = cons (Proof ctxt) + | cons2 _ = I; + + val contexts = build (fold cons1 trace1 #> fold cons2 trace2); + val active_theories = fold (fn Theory _ => Integer.add 1 | _ => I) contexts 0; + val active_proofs = fold (fn Proof _ => Integer.add 1 | _ => I) contexts 0; + + val total_theories = length trace1; + val total_proofs = length trace2; + in + {contexts = contexts, + active_contexts = active_theories + active_proofs, + active_theories = active_theories, + active_proofs = active_proofs, + total_contexts = total_theories + total_proofs, + total_theories = total_theories, + total_proofs = total_proofs} + end; end; -(* theory *) -datatype theory = - Theory of - (*allocation state*) - state * - (*identity*) - {theory_id: theory_id, - token: Position.T Unsynchronized.ref} * - (*ancestry*) - {parents: theory list, (*immediate predecessors*) - ancestors: theory list} * (*all predecessors -- canonical reverse order*) - (*data*) - Any.T Datatab.table; (*body content*) +(*** theory operations ***) + +fun rep_theory (Thy args) = args; exception THEORY of string * theory list; -fun rep_theory (Theory args) = args; - val state_of = #1 o rep_theory; val theory_identity = #2 o rep_theory; val theory_id = #theory_id o theory_identity; @@ -343,45 +421,12 @@ (* create theory *) -val trace_theories = Unsynchronized.ref false; - -local - -val theories = - Synchronized.var "theory_tokens" - ([]: Position.T Unsynchronized.ref option Unsynchronized.ref list); - -val dummy_token = Unsynchronized.ref Position.none; - -fun make_token () = - if ! trace_theories then - let - val token = Unsynchronized.ref (Position.thread_data ()); - val _ = Synchronized.change theories (cons (Weak.weak (SOME token))); - in token end - else dummy_token; - -in - -fun theories_trace () = - let - val trace = Synchronized.value theories; - val _ = ML_Heap.full_gc (); - val active_positions = - fold (fn Unsynchronized.ref (SOME pos) => cons (! pos) | _ => I) trace []; - in - {active_positions = active_positions, - active = length active_positions, - total = length trace} - end; - fun create_thy state ids name stage ancestry data = let val theory_id = make_theory_id {id = new_id (), ids = ids, name = name, stage = stage}; - val identity = {theory_id = theory_id, token = make_token ()}; - in Theory (state, identity, ancestry, data) end; - -end; + val (token, assign) = theory_token (); + val identity = {theory_id = theory_id, theory_token = token}; + in assign (Thy (state, identity, ancestry, data)) end; (* primitives *) @@ -397,7 +442,7 @@ fun change_thy finish f thy = let val {name, stage, ...} = identity_of thy; - val Theory (state, _, ancestry, data) = thy; + val Thy (state, _, ancestry, data) = thy; val ancestry' = if stage_final stage then make_ancestry [thy] (extend_ancestors thy (ancestors_of thy)) @@ -489,14 +534,6 @@ (*** proof context ***) -(* datatype Proof.context *) - -structure Proof = -struct - datatype context = Context of Any.T Datatab.table * theory; -end; - - (* proof data kinds *) local @@ -518,16 +555,21 @@ in -fun raw_transfer thy' (Proof.Context (data, thy)) = - let - val _ = subthy (thy, thy') orelse error "Cannot transfer proof context: not a super theory"; - val data' = init_new_data thy' data; - in Proof.Context (data', thy') end; +fun raw_transfer thy' (ctxt as Prf (_, thy, data)) = + if eq_thy (thy, thy') then ctxt + else if proper_subthy (thy, thy') then + let + val (token', assign) = proof_token (); + val data' = init_new_data thy' data; + in assign (Prf (token', thy', data')) end + else error "Cannot transfer proof context: not a super theory"; structure Proof_Context = struct - fun theory_of (Proof.Context (_, thy)) = thy; - fun init_global thy = Proof.Context (init_data thy, thy); + fun theory_of (Prf (_, thy, _)) = thy; + fun init_global thy = + let val (token, assign) = proof_token () + in assign (Prf (token, thy, init_data thy)) end; fun get_global long thy name = init_global (get_theory long thy name); end; @@ -540,13 +582,14 @@ val _ = Synchronized.change kinds (Datatab.update (k, init)); in k end; -fun get k dest (Proof.Context (data, thy)) = +fun get k dest (Prf (_, thy, data)) = (case Datatab.lookup data k of SOME x => x | NONE => init_fallback k thy) |> dest; -fun put k make x (Proof.Context (data, thy)) = - Proof.Context (Datatab.update (k, make x) data, thy); +fun put k make x (Prf (token, thy, data)) = + let val (token, assign) = proof_token () + in assign (Prf (token, thy, Datatab.update (k, make x) data)) end; end; @@ -583,8 +626,6 @@ (*** generic context ***) -datatype generic = Theory of theory | Proof of Proof.context; - fun cases f _ (Theory thy) = f thy | cases _ g (Proof prf) = g prf;