merged
authordesharna
Wed, 10 May 2023 08:20:53 +0200
changeset 78015 93f294ad42e6
parent 78013 f5b67198b019 (diff)
parent 78014 24f0cd70790b (current diff)
child 78016 b0ef3aae2bdb
merged
--- a/Admin/components/components.sha1	Tue May 09 22:00:36 2023 +0200
+++ b/Admin/components/components.sha1	Wed May 10 08:20:53 2023 +0200
@@ -182,6 +182,7 @@
 260f5e03e8fc7185f7987a6d2961a23abdce6a0b jdk-17.0.4.1+1.tar.gz
 8f417fcbe5d0fef3a958aeb9740499230aa00046 jdk-17.0.5.tar.gz
 e904e85d0b5f6552344aa385c90f3ca528dc3514 jdk-17.0.6.tar.gz
+ee31c8ac65d5828d8c426fa3eedeb467cfa497ab jdk-17.0.7.tar.gz
 8d83e433c1419e0c0cc5fd1762903d11b4a5752c jdk-6u31.tar.gz
 38d2d2a91c66714c18430e136e7e5191af3996e6 jdk-7u11.tar.gz
 d765bc4ad2f34d494429b2a8c1563c49db224944 jdk-7u13.tar.gz
--- a/Admin/components/main	Tue May 09 22:00:36 2023 +0200
+++ b/Admin/components/main	Wed May 10 08:20:53 2023 +0200
@@ -12,7 +12,7 @@
 idea-icons-20210508
 isabelle_fonts-20211004
 isabelle_setup-20230206
-jdk-17.0.6
+jdk-17.0.7
 jedit-20211103
 jfreechart-1.5.3
 jortho-1.0-2
--- a/src/Pure/Admin/component_jdk.scala	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/Admin/component_jdk.scala	Wed May 10 08:20:53 2023 +0200
@@ -33,8 +33,8 @@
   /* build jdk */
 
   val default_base_url = "https://cdn.azul.com/zulu/bin"
-  val default_jdk_version = "17.0.6"
-  val default_zulu_version = "17.40.19-ca"
+  val default_jdk_version = "17.0.7"
+  val default_zulu_version = "17.42.19-ca"
 
   def build_jdk(
     target_dir: Path = Path.current,
--- a/src/Pure/Concurrent/synchronized.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/Concurrent/synchronized.ML	Wed May 10 08:20:53 2023 +0200
@@ -16,6 +16,7 @@
   val change: 'a var -> ('a -> 'a) -> unit
   type 'a cache
   val cache: (unit -> 'a) -> 'a cache
+  val cache_peek: 'a cache -> 'a option
   val cache_eval: 'a cache -> 'a
 end;
 
@@ -105,13 +106,16 @@
 fun cache expr =
   Cache {expr = expr, var = var "Synchronized.cache" NONE};
 
+fun cache_peek (Cache {var, ...}) =
+  Option.mapPartial Unsynchronized.weak_peek (value var);
+
 fun cache_eval (Cache {expr, var}) =
   change_result var (fn state =>
-    (case state of
-      SOME (Unsynchronized.ref (SOME (Unsynchronized.ref result))) => (result, state)
-    | _ =>
-      let val result = expr ()
-      in (result, SOME (Unsynchronized.weak_ref result)) end));
+    (case Option.mapPartial Unsynchronized.weak_peek state of
+      SOME result => (result, state)
+    | NONE =>
+        let val result = expr ()
+        in (result, SOME (Unsynchronized.weak_ref result)) end));
 
 end;
 
--- a/src/Pure/Concurrent/unsynchronized.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/Concurrent/unsynchronized.ML	Wed May 10 08:20:53 2023 +0200
@@ -17,11 +17,14 @@
   val setmp: 'a ref -> 'a -> ('b -> 'c) -> 'b -> 'c
   type 'a weak_ref = 'a ref option ref
   val weak_ref: 'a -> 'a weak_ref
+  val weak_peek: 'a weak_ref -> 'a option
 end;
 
 structure Unsynchronized: UNSYNCHRONIZED =
 struct
 
+(* regular references *)
+
 datatype ref = datatype ref;
 
 val op := = op :=;
@@ -43,9 +46,18 @@
       val _ = flag := orig_value;
     in Exn.release result end) ();
 
+
+(* weak references *)
+
+(*see also $ML_SOURCES/basis/Weak.sml*)
+
 type 'a weak_ref = 'a ref option ref;
+
 fun weak_ref a = Weak.weak (SOME (ref a));
 
+fun weak_peek (ref (SOME (ref a))) = SOME a
+  | weak_peek _ = NONE;
+
 end;
 
 ML_Name_Space.forget_val "ref";
--- a/src/Pure/ROOT.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/ROOT.ML	Wed May 10 08:20:53 2023 +0200
@@ -370,3 +370,4 @@
 ML_file "Tools/jedit.ML";
 ML_file "Tools/ghc.ML";
 ML_file "Tools/generated_files.ML";
+
--- a/src/Pure/ROOT.scala	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/ROOT.scala	Wed May 10 08:20:53 2023 +0200
@@ -27,3 +27,4 @@
   def if_proper[A](x: Iterable[A], body: => String): String = Library.if_proper(x, body)
   def if_proper(b: Boolean, body: => String): String = Library.if_proper(b, body)
 }
+
--- a/src/Pure/Syntax/parser.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/Syntax/parser.ML	Wed May 10 08:20:53 2023 +0200
@@ -7,11 +7,9 @@
 
 signature PARSER =
 sig
-  val timing: bool Unsynchronized.ref
   type gram
   val empty_gram: gram
-  val extend_gram: Syntax_Ext.xprod list -> gram -> gram
-  val make_gram: Syntax_Ext.xprod list -> gram
+  val make_gram: Syntax_Ext.xprod list -> Syntax_Ext.xprod list -> gram option -> gram
   val pretty_gram: gram -> Pretty.T list
   datatype parsetree =
     Node of string * parsetree list |
@@ -37,7 +35,8 @@
 fun tags_content (tags: tags) = sort_by #1 (Symtab.dest tags);
 fun tags_lookup (tags: tags) = Symtab.lookup tags;
 fun tags_insert tag (tags: tags) = Symtab.update_new tag tags;
-fun tags_name (tags: tags) = the o Inttab.lookup (Inttab.make (map swap (Symtab.dest tags)));
+fun tags_name (tags: tags) =
+  the o Inttab.lookup (Inttab.build (Symtab.fold (Inttab.update_new o swap) tags));
 
 type nts = Intset.T;
 val nts_empty: nts = Intset.empty;
@@ -110,7 +109,7 @@
     SOME tk => tk
   | NONE => unknown_start);
 
-fun add_production prods (lhs, new_prod as (rhs, _, pri)) (chains, lambdas) =
+fun add_production array_prods (lhs, new_prod as (rhs, _, pri)) (chains, lambdas) =
   let
     (*store chain if it does not already exist*)
     val (chain, new_chain, chains') =
@@ -133,16 +132,16 @@
       if not new_chain then ([], lambdas)
       else
         let (*lookahead of chain's source*)
-          val ((_, from_tks), _) = Array.nth prods (the chain);
+          val ((_, from_tks), _) = Array.nth array_prods (the chain);
 
           (*copy from's lookahead to chain's destinations*)
           fun copy_lookahead to =
             let
-              val ((to_nts, to_tks), ps) = Array.nth prods to;
+              val ((to_nts, to_tks), ps) = Array.nth array_prods to;
 
               val new_tks = Tokens.subtract to_tks from_tks;  (*added lookahead tokens*)
               val to_tks' = Tokens.merge (to_tks, new_tks);
-              val _ = Array.upd prods to ((to_nts, to_tks'), ps);
+              val _ = Array.upd array_prods to ((to_nts, to_tks'), ps);
             in tokens_add (to, new_tks) end;
 
           val tos = chains_all_succs chains' [lhs];
@@ -171,7 +170,7 @@
           else (NONE, nts_insert nt nts);
 
     (*get all known starting tokens for a nonterminal*)
-    fun starts_for_nt nt = snd (fst (Array.nth prods nt));
+    fun starts_for_nt nt = snd (fst (Array.nth array_prods nt));
 
     (*update prods, lookaheads, and lambdas according to new lambda NTs*)
     val (added_starts', lambdas') =
@@ -181,7 +180,7 @@
           | propagate_lambda (l :: ls) added_starts lambdas =
               let
                 (*get lookahead for lambda NT*)
-                val ((dependent, l_starts), _) = Array.nth prods l;
+                val ((dependent, l_starts), _) = Array.nth array_prods l;
 
                 (*check productions whose lookahead may depend on lambda NT*)
                 fun examine_prods [] add_lambda nt_dependencies added_tks nt_prods =
@@ -222,7 +221,7 @@
                 (*check each NT whose lookahead depends on new lambda NT*)
                 fun process_nts nt (added_lambdas, added_starts) =
                   let
-                    val ((old_nts, old_tks), nt_prods) = Array.nth prods nt;
+                    val ((old_nts, old_tks), nt_prods) = Array.nth array_prods nt;
 
                     (*existing productions whose lookahead may depend on l*)
                     val tk_prods = prods_lookup nt_prods (get_start l_starts);
@@ -236,7 +235,7 @@
                     val new_tks = Tokens.merge (old_tks, added_tks);
 
                     val added_lambdas' = added_lambdas |> add_lambda ? cons nt;
-                    val _ = Array.upd prods nt ((new_nts, new_tks), nt_prods');
+                    val _ = Array.upd array_prods nt ((new_nts, new_tks), nt_prods');
                       (*N.B. that because the tks component
                         is used to access existing
                         productions we have to add new
@@ -280,9 +279,9 @@
           (*add lhs NT to list of dependent NTs in lookahead*)
           fun add_nts nt initial =
             (if initial then
-              let val ((old_nts, old_tks), ps) = Array.nth prods nt in
+              let val ((old_nts, old_tks), ps) = Array.nth array_prods nt in
                 if nts_member old_nts lhs then ()
-                else Array.upd prods nt ((nts_insert lhs old_nts, old_tks), ps)
+                else Array.upd array_prods nt ((nts_insert lhs old_nts, old_tks), ps)
               end
              else (); false);
 
@@ -291,7 +290,7 @@
           fun add_tks [] added = added
             | add_tks (nt :: nts) added =
                 let
-                  val ((old_nts, old_tks), nt_prods) = Array.nth prods nt;
+                  val ((old_nts, old_tks), nt_prods) = Array.nth array_prods nt;
 
                   val new_tks = Tokens.subtract old_tks start_tks;
 
@@ -307,7 +306,7 @@
                     |> nt = lhs ? Tokens.fold store start_tks';
                   val _ =
                     if not changed andalso Tokens.is_empty new_tks then ()
-                    else Array.upd prods nt ((old_nts, Tokens.merge (old_tks, new_tks)), nt_prods');
+                    else Array.upd array_prods nt ((old_nts, Tokens.merge (old_tks, new_tks)), nt_prods');
                 in add_tks nts (tokens_add (nt, new_tks) added) end;
           val _ = nts_fold add_nts start_nts true;
         in add_tks (chains_all_succs chains' [lhs]) [] end;
@@ -364,7 +363,7 @@
                 (*copy existing productions for new starting tokens*)
                 fun process_nts nt =
                   let
-                    val ((nts, tks), nt_prods) = Array.nth prods nt;
+                    val ((nts, tks), nt_prods) = Array.nth array_prods nt;
 
                     val tk_prods = prods_lookup nt_prods key;
 
@@ -378,10 +377,10 @@
 
                     val added_tks = Tokens.subtract tks new_tks;
                     val tks' = Tokens.merge (tks, added_tks);
-                    val _ = Array.upd prods nt ((nts, tks'), nt_prods'');
+                    val _ = Array.upd array_prods nt ((nts, tks'), nt_prods'');
                   in tokens_add (nt, added_tks) end;
 
-                val ((dependent, _), _) = Array.nth prods changed_nt;
+                val ((dependent, _), _) = Array.nth array_prods changed_nt;
               in add_starts (starts @ nts_fold process_nts dependent []) end;
       in add_starts added_starts' end;
   in (chains', lambdas') end;
@@ -416,8 +415,6 @@
 
 (** operations on grammars **)
 
-val timing = Unsynchronized.ref true;
-
 val empty_gram =
   Gram
    {nt_count = 0,
@@ -427,79 +424,68 @@
     lambdas = nts_empty,
     prods = Vector.fromList [nt_gram_empty]};
 
-
-(*Add productions to a grammar*)
-fun extend_gram0 [] gram = gram
-  | extend_gram0 xprods (Gram {nt_count, prod_count, tags, chains, lambdas, prods}) =
-      let
-        (*Get tag for existing nonterminal or create a new one*)
-        fun get_tag nt_count tags nt =
-          (case tags_lookup tags nt of
-            SOME tag => (nt_count, tags, tag)
-          | NONE => (nt_count + 1, tags_insert (nt, nt_count) tags, nt_count));
+fun extend_gram xprods (Gram {nt_count, prod_count, tags, chains, lambdas, prods}) =
+  let
+    (*Get tag for existing nonterminal or create a new one*)
+    fun get_tag nt_count tags nt =
+      (case tags_lookup tags nt of
+        SOME tag => (nt_count, tags, tag)
+      | NONE => (nt_count + 1, tags_insert (nt, nt_count) tags, nt_count));
 
-        (*Convert symbols to the form used by the parser;
-          delimiters and predefined terms are stored as terminals,
-          nonterminals are converted to integer tags*)
-        fun symb_of [] nt_count tags result = (nt_count, tags, rev result)
-          | symb_of (Syntax_Ext.Delim s :: ss) nt_count tags result =
-              symb_of ss nt_count tags (Terminal (Lexicon.literal s) :: result)
-          | symb_of (Syntax_Ext.Argument (s, p) :: ss) nt_count tags result =
-              let
-                val (nt_count', tags', new_symb) =
-                  (case Lexicon.get_terminal s of
-                    NONE =>
-                      let val (nt_count', tags', s_tag) = get_tag nt_count tags s;
-                      in (nt_count', tags', Nonterminal (s_tag, p)) end
-                  | SOME tk => (nt_count, tags, Terminal tk));
-              in symb_of ss nt_count' tags' (new_symb :: result) end
-          | symb_of (_ :: ss) nt_count tags result = symb_of ss nt_count tags result;
+    (*Convert symbols to the form used by the parser;
+      delimiters and predefined terms are stored as terminals,
+      nonterminals are converted to integer tags*)
+    fun symb_of [] nt_count tags result = (nt_count, tags, rev result)
+      | symb_of (Syntax_Ext.Delim s :: ss) nt_count tags result =
+          symb_of ss nt_count tags (Terminal (Lexicon.literal s) :: result)
+      | symb_of (Syntax_Ext.Argument (s, p) :: ss) nt_count tags result =
+          let
+            val (nt_count', tags', new_symb) =
+              (case Lexicon.get_terminal s of
+                NONE =>
+                  let val (nt_count', tags', s_tag) = get_tag nt_count tags s;
+                  in (nt_count', tags', Nonterminal (s_tag, p)) end
+              | SOME tk => (nt_count, tags, Terminal tk));
+          in symb_of ss nt_count' tags' (new_symb :: result) end
+      | symb_of (_ :: ss) nt_count tags result = symb_of ss nt_count tags result;
 
-        (*Convert list of productions by invoking symb_of for each of them*)
-        fun prod_of [] nt_count prod_count tags result =
-              (nt_count, prod_count, tags, result)
-          | prod_of (Syntax_Ext.XProd (lhs, xsymbs, const, pri) :: ps)
-                nt_count prod_count tags result =
-              let
-                val (nt_count', tags', lhs_tag) = get_tag nt_count tags lhs;
-                val (nt_count'', tags'', prods) = symb_of xsymbs nt_count' tags' [];
-              in
-                prod_of ps nt_count'' (prod_count + 1) tags''
-                  ((lhs_tag, (prods, const, pri)) :: result)
-              end;
-
-        val (nt_count', prod_count', tags', xprods') =
-          prod_of xprods nt_count prod_count tags [];
-
-        (*Copy array containing productions of old grammar;
-          this has to be done to preserve the old grammar while being able
-          to change the array's content*)
-        val prods' =
+    (*Convert list of productions by invoking symb_of for each of them*)
+    fun prod_of [] nt_count prod_count tags result =
+          (nt_count, prod_count, tags, result)
+      | prod_of (Syntax_Ext.XProd (lhs, xsymbs, const, pri) :: ps)
+            nt_count prod_count tags result =
           let
-            fun get_prod i =
-              if i < nt_count then Vector.nth prods i
-              else nt_gram_empty;
-          in Array.tabulate (nt_count', get_prod) end;
+            val (nt_count', tags', lhs_tag) = get_tag nt_count tags lhs;
+            val (nt_count'', tags'', prods) = symb_of xsymbs nt_count' tags' [];
+          in
+            prod_of ps nt_count'' (prod_count + 1) tags''
+              ((lhs_tag, (prods, const, pri)) :: result)
+          end;
+
+    val (nt_count', prod_count', tags', xprods') =
+      prod_of xprods nt_count prod_count tags [];
+
+    val array_prods' =
+      Array.tabulate (nt_count', fn i =>
+        if i < nt_count then Vector.nth prods i
+        else nt_gram_empty);
 
-        val (chains', lambdas') =
-          (chains, lambdas) |> fold (add_production prods') xprods';
-      in
-        Gram
-         {nt_count = nt_count',
-          prod_count = prod_count',
-          tags = tags',
-          chains = chains',
-          lambdas = lambdas',
-          prods = Array.vector prods'}
-      end;
+    val (chains', lambdas') =
+      (chains, lambdas) |> fold (add_production array_prods') xprods';
+  in
+    Gram
+     {nt_count = nt_count',
+      prod_count = prod_count',
+      tags = tags',
+      chains = chains',
+      lambdas = lambdas',
+      prods = Array.vector array_prods'}
+  end;
 
-fun extend_gram xprods gram =
-  if ! timing then
-    Timing.cond_timeit true ("Parser.extend_gram" ^ Position.here (Position.thread_data ()))
-      (fn () => extend_gram0 xprods gram)
-  else extend_gram0 xprods gram;
-
-fun make_gram xprods = extend_gram xprods empty_gram;
+fun make_gram [] _ (SOME gram) = gram
+  | make_gram new_xprods _ (SOME gram) = extend_gram new_xprods gram
+  | make_gram [] [] NONE = empty_gram
+  | make_gram new_xprods old_xprods NONE = extend_gram (new_xprods @ old_xprods) empty_gram;
 
 
 
--- a/src/Pure/Syntax/syntax.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/Syntax/syntax.ML	Wed May 10 08:20:53 2023 +0200
@@ -73,7 +73,6 @@
   val pretty_flexpair: Proof.context -> term * term -> Pretty.T list
   type syntax
   val eq_syntax: syntax * syntax -> bool
-  val force_syntax: syntax -> unit
   datatype approx = Prefix of string | Infix of {assoc: Printer.assoc, delim: string, pri: int}
   val get_approx: syntax -> string -> approx option
   val lookup_const: syntax -> string -> string option
@@ -409,7 +408,7 @@
   Syntax of {
     input: Syntax_Ext.xprod list,
     lexicon: Scan.lexicon,
-    gram: Parser.gram lazy,
+    gram: Parser.gram Synchronized.cache,
     consts: string Symtab.table,
     prmodes: string list,
     parse_ast_trtab: ((Proof.context -> Ast.ast list -> Ast.ast) * stamp) Symtab.table,
@@ -422,8 +421,6 @@
 
 fun eq_syntax (Syntax (_, s1), Syntax (_, s2)) = s1 = s2;
 
-fun force_syntax (Syntax ({gram, ...}, _)) = ignore (Lazy.force gram);
-
 datatype approx = Prefix of string | Infix of {assoc: Printer.assoc, delim: string, pri: int};
 fun get_approx (Syntax ({prtabs, ...}, _)) c =
   (case Printer.get_infix prtabs c of
@@ -437,7 +434,7 @@
 fun lookup_const (Syntax ({consts, ...}, _)) = Symtab.lookup consts;
 fun is_keyword (Syntax ({lexicon, ...}, _)) = Scan.is_literal lexicon o Symbol.explode;
 fun tokenize (Syntax ({lexicon, ...}, _)) = Lexicon.tokenize lexicon;
-fun parse (Syntax ({gram, ...}, _)) = Parser.parse (Lazy.force gram);
+fun parse (Syntax ({gram, ...}, _)) = Parser.parse (Synchronized.cache_eval gram);
 
 fun parse_ast_translation (Syntax ({parse_ast_trtab, ...}, _)) = lookup_tr parse_ast_trtab;
 fun parse_translation (Syntax ({parse_trtab, ...}, _)) = lookup_tr parse_trtab;
@@ -452,13 +449,20 @@
 val mode_default = ("", true);
 val mode_input = (Print_Mode.input, true);
 
+fun extend_gram new_xprods old_xprods gram =
+  Synchronized.cache (fn () =>
+    Parser.make_gram new_xprods old_xprods (Synchronized.cache_peek gram));
+
+fun new_gram new_xprods =
+  Synchronized.cache (fn () => Parser.make_gram new_xprods [] NONE);
+
 
 (* empty_syntax *)
 
 val empty_syntax = Syntax
   ({input = [],
     lexicon = Scan.empty_lexicon,
-    gram = Lazy.value Parser.empty_gram,
+    gram = Synchronized.cache (fn () => Parser.empty_gram),
     consts = Symtab.empty,
     prmodes = [],
     parse_ast_trtab = Symtab.empty,
@@ -491,7 +495,7 @@
     Syntax
     ({input = new_xprods @ input,
       lexicon = fold Scan.extend_lexicon (Syntax_Ext.delims_of new_xprods) lexicon,
-      gram = if null new_xprods then gram else Lazy.map (Parser.extend_gram new_xprods) gram,
+      gram = if null new_xprods then gram else extend_gram new_xprods input gram,
       consts = fold update_const consts2 consts1,
       prmodes = insert (op =) mode prmodes,
       parse_ast_trtab =
@@ -520,7 +524,7 @@
     Syntax
     ({input = input',
       lexicon = if changed then Scan.make_lexicon (Syntax_Ext.delims_of input') else lexicon,
-      gram = if changed then Lazy.value (Parser.make_gram input') else gram,
+      gram = if changed then new_gram input' else gram,
       consts = consts,
       prmodes = prmodes,
       parse_ast_trtab = remove_trtab (if_inout parse_ast_translation) parse_ast_trtab,
@@ -557,7 +561,7 @@
             else
               let
                 val input' = new_xprods2 @ input1;
-                val gram' = Lazy.lazy_name "Syntax.merge_syntax" (fn () => Parser.make_gram input');
+                val gram' = new_gram input';
               in (input', gram') end);
   in
     Syntax
@@ -592,7 +596,7 @@
   in
     [Pretty.block (Pretty.breaks
       (Pretty.str "lexicon:" :: map (Pretty.quote o Pretty.keyword1) (Scan.dest_lexicon lexicon))),
-      Pretty.big_list "productions:" (Parser.pretty_gram (Lazy.force gram)),
+      Pretty.big_list "productions:" (Parser.pretty_gram (Synchronized.cache_eval gram)),
       pretty_strs_qs "print modes:" prmodes']
   end;
 
--- a/src/Pure/context.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/context.ML	Wed May 10 08:20:53 2023 +0200
@@ -70,7 +70,7 @@
   val trace_theories: bool Unsynchronized.ref
   val trace_proofs: bool Unsynchronized.ref
   val allocations_trace: unit ->
-   {contexts: generic list,
+   {contexts: (generic * Position.T) list,
     active_contexts: int,
     active_theories: int,
     active_proofs: int,
@@ -170,7 +170,8 @@
     state *
     (*identity*)
     {theory_id: theory_id,
-     theory_token: theory Unsynchronized.ref} *
+     theory_token: theory Unsynchronized.ref,
+     theory_token_pos: Position.T} *
     (*ancestry*)
     {parents: theory list,         (*immediate predecessors*)
      ancestors: theory list} *     (*all predecessors -- canonical reverse order*)
@@ -181,7 +182,8 @@
   Prf_Undef
 | Prf of
     (*identity*)
-    proof Unsynchronized.ref *
+    proof Unsynchronized.ref *  (*token*)
+    Position.T *                (*token_pos*)
     theory *
     (*data*)
     Any.T Datatab.table;
@@ -202,9 +204,11 @@
   if ! guard then
     let
       val token = Unsynchronized.ref (! token0);
-      val _ = Synchronized.change var (cons (Weak.weak (SOME token)));
-    in (token, fn res => (token := res; res)) end
-  else (token0, I);
+      val pos = Position.thread_data ();
+      fun assign res =
+        (token := res; Synchronized.change var (cons (Weak.weak (SOME token))); res);
+    in (token, pos, assign) end
+  else (token0, Position.none, I);
 
 val theory_tokens = Synchronized.var "theory_tokens" ([]: theory Unsynchronized.weak_ref list);
 val proof_tokens = Synchronized.var "proof_tokens" ([]: Proof.context Unsynchronized.weak_ref list);
@@ -223,14 +227,20 @@
     val trace1 = Synchronized.value theory_tokens;
     val trace2 = Synchronized.value proof_tokens;
 
-    fun cons1 (Unsynchronized.ref (SOME (Unsynchronized.ref (thy as Thy _)))) = cons (Theory thy)
-      | cons1 _ = I;
-    fun cons2 (Unsynchronized.ref (SOME (Unsynchronized.ref (ctxt as Prf _)))) = cons (Proof ctxt)
-      | cons2 _ = I;
+    fun cons1 r =
+      (case Unsynchronized.weak_peek r of
+        SOME (thy as Thy (_, {theory_token_pos, ...}, _, _)) =>
+          cons (Theory thy, theory_token_pos)
+      | _ => I);
+    fun cons2 r =
+      (case Unsynchronized.weak_peek r of
+        SOME (ctxt as Prf (_, proof_token_pos, _, _)) =>
+          cons (Proof ctxt, proof_token_pos)
+      | _ => I);
 
     val contexts = build (fold cons1 trace1 #> fold cons2 trace2);
-    val active_theories = fold (fn Theory _ => Integer.add 1 | _ => I) contexts 0;
-    val active_proofs = fold (fn Proof _ => Integer.add 1 | _ => I) contexts 0;
+    val active_theories = fold (fn (Theory _, _) => Integer.add 1 | _ => I) contexts 0;
+    val active_proofs = fold (fn (Proof _, _) => Integer.add 1 | _ => I) contexts 0;
 
     val total_theories = length trace1;
     val total_proofs = length trace2;
@@ -424,8 +434,8 @@
 fun create_thy state ids name stage ancestry data =
   let
     val theory_id = make_theory_id {id = new_id (), ids = ids, name = name, stage = stage};
-    val (token, assign) = theory_token ();
-    val identity = {theory_id = theory_id, theory_token = token};
+    val (token, pos, assign) = theory_token ();
+    val identity = {theory_id = theory_id, theory_token = token, theory_token_pos = pos};
   in assign (Thy (state, identity, ancestry, data)) end;
 
 
@@ -555,21 +565,21 @@
 
 in
 
-fun raw_transfer thy' (ctxt as Prf (_, thy, data)) =
+fun raw_transfer thy' (ctxt as Prf (_, _, thy, data)) =
   if eq_thy (thy, thy') then ctxt
   else if proper_subthy (thy, thy') then
     let
-      val (token', assign) = proof_token ();
+      val (token', pos', assign) = proof_token ();
       val data' = init_new_data thy' data;
-    in assign (Prf (token', thy', data')) end
+    in assign (Prf (token', pos', thy', data')) end
   else error "Cannot transfer proof context: not a super theory";
 
 structure Proof_Context =
 struct
-  fun theory_of (Prf (_, thy, _)) = thy;
+  fun theory_of (Prf (_, _, thy, _)) = thy;
   fun init_global thy =
-    let val (token, assign) = proof_token ()
-    in assign (Prf (token, thy, init_data thy)) end;
+    let val (token, pos, assign) = proof_token ()
+    in assign (Prf (token, pos, thy, init_data thy)) end;
   fun get_global long thy name = init_global (get_theory long thy name);
 end;
 
@@ -582,14 +592,16 @@
     val _ = Synchronized.change kinds (Datatab.update (k, init));
   in k end;
 
-fun get k dest (Prf (_, thy, data)) =
+fun get k dest (Prf (_, _, thy, data)) =
   (case Datatab.lookup data k of
     SOME x => x
   | NONE => init_fallback k thy) |> dest;
 
-fun put k make x (Prf (token, thy, data)) =
-  let val (token, assign) = proof_token ()
-  in assign (Prf (token, thy, Datatab.update (k, make x) data)) end;
+fun put k make x (Prf (_, _, thy, data)) =
+  let
+    val (token', pos', assign) = proof_token ();
+    val data' = Datatab.update (k, make x) data;
+  in assign (Prf (token', pos', thy, data')) end;
 
 end;
 
--- a/src/Pure/theory.ML	Tue May 09 22:00:36 2023 +0200
+++ b/src/Pure/theory.ML	Wed May 10 08:20:53 2023 +0200
@@ -189,7 +189,6 @@
       |> Sign.init_naming
       |> Sign.local_path
       |> apply_wrappers wrappers
-      |> tap (Syntax.force_syntax o Sign.syn_of)
     end;
 
 fun end_theory thy =