# HG changeset patch # User wenzelm # Date 1509811881 -3600 # Node ID af72fa58f71badb363e75a28321cb11f25c9fb35 # Parent 49850a679c2c8de0be825a22371983e3938a92ef clarified lazy Completion within Outer_Syntax: measurable speedup of Sessions.deps; diff -r 49850a679c2c -r af72fa58f71b src/Pure/General/completion.scala --- a/src/Pure/General/completion.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Pure/General/completion.scala Sat Nov 04 17:11:21 2017 +0100 @@ -253,12 +253,13 @@ } - /* init */ + /* make */ val empty: Completion = new Completion() - lazy val init: Completion = - empty.add_symbols.add_abbrevs(Completion.symbol_abbrevs ::: Completion.default_abbrevs) + def make(keywords: List[String], abbrevs: List[(String, String)]): Completion = + empty.add_symbols.add_keywords(keywords).add_abbrevs( + Completion.symbol_abbrevs ::: Completion.default_abbrevs ::: abbrevs) /* word parsers */ @@ -337,39 +338,12 @@ } final class Completion private( - protected val keywords: Set[String] = Set.empty, - protected val words_lex: Scan.Lexicon = Scan.Lexicon.empty, - protected val words_map: Multi_Map[String, String] = Multi_Map.empty, - protected val abbrevs_lex: Scan.Lexicon = Scan.Lexicon.empty, - protected val abbrevs_map: Multi_Map[String, (String, String)] = Multi_Map.empty) + keywords: Set[String] = Set.empty, + words_lex: Scan.Lexicon = Scan.Lexicon.empty, + words_map: Multi_Map[String, String] = Multi_Map.empty, + abbrevs_lex: Scan.Lexicon = Scan.Lexicon.empty, + abbrevs_map: Multi_Map[String, (String, String)] = Multi_Map.empty) { - /* merge */ - - def is_empty: Boolean = - keywords.isEmpty && - words_lex.is_empty && - words_map.isEmpty && - abbrevs_lex.is_empty && - abbrevs_map.isEmpty - - def ++ (other: Completion): Completion = - if (this eq other) this - else if (is_empty) other - else { - val keywords1 = - if (keywords eq other.keywords) keywords - else if (keywords.isEmpty) other.keywords - else (keywords /: other.keywords) { case (ks, k) => if (ks(k)) ks else ks + k } - val words_lex1 = words_lex ++ other.words_lex - val words_map1 = words_map ++ other.words_map - val abbrevs_lex1 = abbrevs_lex ++ other.abbrevs_lex - val abbrevs_map1 = abbrevs_map ++ other.abbrevs_map - if ((keywords eq keywords1) && (words_lex eq words_lex1) && (words_map eq words_map1) && - (abbrevs_lex eq abbrevs_lex1) && (abbrevs_map eq abbrevs_map1)) this - else new Completion(keywords1, words_lex1, words_map1, abbrevs_lex1, abbrevs_map1) - } - - /* keywords */ private def is_symbol(name: String): Boolean = Symbol.names.isDefinedAt(name) @@ -380,6 +354,13 @@ def add_keyword(keyword: String): Completion = new Completion(keywords + keyword, words_lex, words_map, abbrevs_lex, abbrevs_map) + def add_keywords(names: List[String]): Completion = + { + val keywords1 = (keywords /: names) { case (ks, k) => if (ks(k)) ks else ks + k } + if (keywords eq keywords1) this + else new Completion(keywords1, words_lex, words_map, abbrevs_lex, abbrevs_map) + } + /* symbols and abbrevs */ diff -r 49850a679c2c -r af72fa58f71b src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Pure/Isar/outer_syntax.scala Sat Nov 04 17:11:21 2017 +0100 @@ -16,8 +16,6 @@ val empty: Outer_Syntax = new Outer_Syntax() - lazy val init: Outer_Syntax = new Outer_Syntax(completion = Completion.init) - def merge(syns: List[Outer_Syntax]): Outer_Syntax = (empty /: syns)(_ ++ _) @@ -47,7 +45,6 @@ final class Outer_Syntax private( val keywords: Keyword.Keywords = Keyword.Keywords.empty, - val completion: Completion = Completion.empty, val rev_abbrevs: Thy_Header.Abbrevs = Nil, val language_context: Completion.Language_Context = Completion.Language_Context.outer, val has_tokens: Boolean = true) @@ -60,16 +57,8 @@ /* keywords */ def + (name: String, kind: String = "", exts: List[String] = Nil): Outer_Syntax = - { - val keywords1 = keywords + (name, kind, exts) - val completion1 = - completion.add_keyword(name). - add_abbrevs( - (if (Keyword.theory_block.contains(kind)) List((name, name + "\nbegin\n\u0007\nend")) - else Nil) ::: - (if (Completion.Word_Parsers.is_word(name)) List((name, name)) else Nil)) - new Outer_Syntax(keywords1, completion1, rev_abbrevs, language_context, true) - } + new Outer_Syntax( + keywords + (name, kind, exts), rev_abbrevs, language_context, has_tokens = true) def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax = (this /: keywords) { @@ -87,17 +76,31 @@ def add_abbrevs(new_abbrevs: Thy_Header.Abbrevs): Outer_Syntax = if (new_abbrevs.isEmpty) this else { - val completion1 = - completion.add_abbrevs( - (for ((a, b) <- new_abbrevs) yield { + val rev_abbrevs1 = Library.distinct(new_abbrevs) reverse_::: rev_abbrevs + new Outer_Syntax(keywords, rev_abbrevs1, language_context, has_tokens) + } + + + /* completion */ + + lazy val completion: Completion = + { + val completion_keywords = (keywords.minor.iterator ++ keywords.major.iterator).toList + val completion_abbrevs = + completion_keywords.flatMap(name => + (if (Keyword.theory_block.contains(keywords.kinds(name))) + List((name, name + "\nbegin\n\u0007\nend")) + else Nil) ::: + (if (Completion.Word_Parsers.is_word(name)) List((name, name)) else Nil)) ::: + abbrevs.flatMap( + { case (a, b) => val a1 = Symbol.decode(a) val a2 = Symbol.encode(a) val b1 = Symbol.decode(b) List((a1, b1), (a2, b1)) - }).flatten) - val rev_abbrevs1 = Library.distinct(new_abbrevs) reverse_::: rev_abbrevs - new Outer_Syntax(keywords, completion1, rev_abbrevs1, language_context, has_tokens) - } + }) + Completion.make(completion_keywords, completion_abbrevs) + } /* build */ @@ -110,11 +113,9 @@ else if (this eq Outer_Syntax.empty) other else { val keywords1 = keywords ++ other.keywords - val completion1 = completion ++ other.completion val rev_abbrevs1 = Library.merge(rev_abbrevs, other.rev_abbrevs) - if ((keywords eq keywords1) && (completion eq completion1) && (rev_abbrevs eq rev_abbrevs1)) - this - else new Outer_Syntax(keywords1, completion1, rev_abbrevs1, language_context, has_tokens) + if ((keywords eq keywords1) && (rev_abbrevs eq rev_abbrevs1)) this + else new Outer_Syntax(keywords1, rev_abbrevs1, language_context, has_tokens) } @@ -127,13 +128,12 @@ /* language context */ def set_language_context(context: Completion.Language_Context): Outer_Syntax = - new Outer_Syntax(keywords, completion, rev_abbrevs, context, has_tokens) + new Outer_Syntax(keywords, rev_abbrevs, context, has_tokens) def no_tokens: Outer_Syntax = { require(keywords.is_empty) new Outer_Syntax( - completion = completion, rev_abbrevs = rev_abbrevs, language_context = language_context, has_tokens = false) diff -r 49850a679c2c -r af72fa58f71b src/Pure/System/options.scala --- a/src/Pure/System/options.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Pure/System/options.scala Sat Nov 04 17:11:21 2017 +0100 @@ -71,13 +71,13 @@ private val PREFS_DIR = Path.explode("$ISABELLE_HOME_USER/etc") private val PREFS = PREFS_DIR + Path.basic("preferences") - lazy val options_syntax = - Outer_Syntax.init + ":" + "=" + "--" + Symbol.comment + Symbol.comment_decoded + + val options_syntax = + Outer_Syntax.empty + ":" + "=" + "--" + Symbol.comment + Symbol.comment_decoded + (SECTION, Keyword.DOCUMENT_HEADING) + (PUBLIC, Keyword.BEFORE_COMMAND) + (OPTION, Keyword.THY_DECL) - lazy val prefs_syntax = Outer_Syntax.init + "=" + val prefs_syntax = Outer_Syntax.empty + "=" trait Parser extends Parse.Parser { diff -r 49850a679c2c -r af72fa58f71b src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Pure/Thy/sessions.scala Sat Nov 04 17:11:21 2017 +0100 @@ -654,8 +654,8 @@ private val GLOBAL = "global" private val DOCUMENT_FILES = "document_files" - lazy val root_syntax = - Outer_Syntax.init + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + + val root_syntax = + Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + (CHAPTER, Keyword.THY_DECL) + (SESSION, Keyword.THY_DECL) + (DESCRIPTION, Keyword.QUASI_COMMAND) + diff -r 49850a679c2c -r af72fa58f71b src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Pure/Thy/thy_header.scala Sat Nov 04 17:11:21 2017 +0100 @@ -65,8 +65,8 @@ private val bootstrap_keywords = Keyword.Keywords.empty.add_keywords(bootstrap_header) - lazy val bootstrap_syntax: Outer_Syntax = - Outer_Syntax.init.add_keywords(bootstrap_header) + val bootstrap_syntax: Outer_Syntax = + Outer_Syntax.empty.add_keywords(bootstrap_header) /* file name vs. theory name */ diff -r 49850a679c2c -r af72fa58f71b src/Tools/VSCode/src/document_model.scala --- a/src/Tools/VSCode/src/document_model.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Tools/VSCode/src/document_model.scala Sat Nov 04 17:11:21 2017 +0100 @@ -240,5 +240,5 @@ /* syntax */ def syntax(): Outer_Syntax = - if (is_theory) session.recent_syntax(node_name) else Outer_Syntax.init + if (is_theory) session.recent_syntax(node_name) else Outer_Syntax.empty } diff -r 49850a679c2c -r af72fa58f71b src/Tools/jEdit/src/completion_popup.scala --- a/src/Tools/jEdit/src/completion_popup.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Tools/jEdit/src/completion_popup.scala Sat Nov 04 17:11:21 2017 +0100 @@ -421,7 +421,7 @@ name: String = null, instant_popups: Boolean = false, enter_adds_to_history: Boolean = true, - syntax: Outer_Syntax = Outer_Syntax.init) extends + syntax: Outer_Syntax = Outer_Syntax.empty) extends HistoryTextField(name, instant_popups, enter_adds_to_history) { text_field => diff -r 49850a679c2c -r af72fa58f71b src/Tools/jEdit/src/isabelle.scala --- a/src/Tools/jEdit/src/isabelle.scala Sat Nov 04 15:24:40 2017 +0100 +++ b/src/Tools/jEdit/src/isabelle.scala Sat Nov 04 17:11:21 2017 +0100 @@ -37,16 +37,16 @@ "isabelle-root", // session ROOT "sml") // Standard ML (not Isabelle/ML) - private lazy val ml_syntax: Outer_Syntax = - Outer_Syntax.init.no_tokens. + private val ml_syntax: Outer_Syntax = + Outer_Syntax.empty.no_tokens. set_language_context(Completion.Language_Context.ML_outer) - private lazy val sml_syntax: Outer_Syntax = - Outer_Syntax.init.no_tokens. + private val sml_syntax: Outer_Syntax = + Outer_Syntax.empty.no_tokens. set_language_context(Completion.Language_Context.SML_outer) - private lazy val news_syntax: Outer_Syntax = - Outer_Syntax.init.no_tokens + private val news_syntax: Outer_Syntax = + Outer_Syntax.empty.no_tokens def mode_syntax(mode: String): Option[Outer_Syntax] = mode match {