src/Pure/Isar/keyword.scala
author wenzelm
Sun Nov 26 13:19:52 2017 +0100 (18 months ago)
changeset 67090 0ec94bb9cec4
parent 66919 1f93e376aeb6
child 69913 ca515cf61651
permissions -rw-r--r--
clarified lazy lexicons within Keywords: measurable speedup of Sessions.deps;
     1 /*  Title:      Pure/Isar/keyword.scala
     2     Author:     Makarius
     3 
     4 Isar keyword classification.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 object Keyword
    11 {
    12   /** keyword classification **/
    13 
    14   /* kinds */
    15 
    16   val DIAG = "diag"
    17   val DOCUMENT_HEADING = "document_heading"
    18   val DOCUMENT_BODY = "document_body"
    19   val DOCUMENT_RAW = "document_raw"
    20   val THY_BEGIN = "thy_begin"
    21   val THY_END = "thy_end"
    22   val THY_DECL = "thy_decl"
    23   val THY_DECL_BLOCK = "thy_decl_block"
    24   val THY_LOAD = "thy_load"
    25   val THY_GOAL = "thy_goal"
    26   val QED = "qed"
    27   val QED_SCRIPT = "qed_script"
    28   val QED_BLOCK = "qed_block"
    29   val QED_GLOBAL = "qed_global"
    30   val PRF_GOAL = "prf_goal"
    31   val PRF_BLOCK = "prf_block"
    32   val NEXT_BLOCK = "next_block"
    33   val PRF_OPEN = "prf_open"
    34   val PRF_CLOSE = "prf_close"
    35   val PRF_CHAIN = "prf_chain"
    36   val PRF_DECL = "prf_decl"
    37   val PRF_ASM = "prf_asm"
    38   val PRF_ASM_GOAL = "prf_asm_goal"
    39   val PRF_SCRIPT = "prf_script"
    40   val PRF_SCRIPT_GOAL = "prf_script_goal"
    41   val PRF_SCRIPT_ASM_GOAL = "prf_script_asm_goal"
    42 
    43   val BEFORE_COMMAND = "before_command"
    44   val QUASI_COMMAND = "quasi_command"
    45 
    46 
    47   /* command categories */
    48 
    49   val vacous = Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW)
    50 
    51   val diag = Set(DIAG)
    52 
    53   val document_heading = Set(DOCUMENT_HEADING)
    54   val document_body = Set(DOCUMENT_BODY)
    55   val document_raw = Set(DOCUMENT_RAW)
    56   val document = Set(DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW)
    57 
    58   val theory_begin = Set(THY_BEGIN)
    59   val theory_end = Set(THY_END)
    60 
    61   val theory_load = Set(THY_LOAD)
    62 
    63   val theory = Set(THY_BEGIN, THY_END, THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_GOAL)
    64 
    65   val theory_block = Set(THY_BEGIN, THY_DECL_BLOCK)
    66 
    67   val theory_body = Set(THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_GOAL)
    68 
    69   val prf_script = Set(PRF_SCRIPT)
    70 
    71   val proof =
    72     Set(QED, QED_SCRIPT, QED_BLOCK, QED_GLOBAL, PRF_GOAL, PRF_BLOCK, NEXT_BLOCK, PRF_OPEN,
    73       PRF_CLOSE, PRF_CHAIN, PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_SCRIPT, PRF_SCRIPT_GOAL,
    74       PRF_SCRIPT_ASM_GOAL)
    75 
    76   val proof_body =
    77     Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW, PRF_BLOCK, NEXT_BLOCK, PRF_OPEN,
    78       PRF_CLOSE, PRF_CHAIN, PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_SCRIPT, PRF_SCRIPT_GOAL,
    79       PRF_SCRIPT_ASM_GOAL)
    80 
    81   val theory_goal = Set(THY_GOAL)
    82   val proof_goal = Set(PRF_GOAL, PRF_ASM_GOAL, PRF_SCRIPT_GOAL, PRF_SCRIPT_ASM_GOAL)
    83   val qed = Set(QED, QED_SCRIPT, QED_BLOCK)
    84   val qed_global = Set(QED_GLOBAL)
    85 
    86   val proof_open = proof_goal + PRF_OPEN
    87   val proof_close = qed + PRF_CLOSE
    88   val proof_enclose = Set(PRF_BLOCK, NEXT_BLOCK, QED_BLOCK, PRF_CLOSE)
    89 
    90   val close_structure = Set(NEXT_BLOCK, QED_BLOCK, PRF_CLOSE, THY_END)
    91 
    92 
    93 
    94   /** keyword tables **/
    95 
    96   object Spec
    97   {
    98     val none: Spec = Spec("")
    99   }
   100   sealed case class Spec(kind: String, exts: List[String] = Nil, tags: List[String] = Nil)
   101   {
   102     def is_none: Boolean = kind == ""
   103 
   104     override def toString: String =
   105       kind +
   106         (if (exts.isEmpty) "" else " (" + commas_quote(exts) + ")") +
   107         (if (tags.isEmpty) "" else tags.map(quote).mkString(" % ", " % ", ""))
   108   }
   109 
   110   object Keywords
   111   {
   112     def empty: Keywords = new Keywords()
   113   }
   114 
   115   class Keywords private(
   116     val kinds: Map[String, String] = Map.empty,
   117     val load_commands: Map[String, List[String]] = Map.empty)
   118   {
   119     override def toString: String =
   120     {
   121       val entries =
   122         for ((name, kind) <- kinds.toList.sortBy(_._1)) yield {
   123           val exts = load_commands.getOrElse(name, Nil)
   124           val kind_decl =
   125             if (kind == "") ""
   126             else " :: " + quote(kind) + (if (exts.isEmpty) "" else " (" + commas_quote(exts) + ")")
   127           quote(name) + kind_decl
   128         }
   129       entries.mkString("keywords\n  ", " and\n  ", "")
   130     }
   131 
   132 
   133     /* merge */
   134 
   135     def is_empty: Boolean = kinds.isEmpty
   136 
   137     def ++ (other: Keywords): Keywords =
   138       if (this eq other) this
   139       else if (is_empty) other
   140       else {
   141         val kinds1 =
   142           if (kinds eq other.kinds) kinds
   143           else if (kinds.isEmpty) other.kinds
   144           else (kinds /: other.kinds) { case (m, e) => if (m.isDefinedAt(e._1)) m else m + e }
   145         val load_commands1 =
   146           if (load_commands eq other.load_commands) load_commands
   147           else if (load_commands.isEmpty) other.load_commands
   148           else
   149             (load_commands /: other.load_commands) {
   150               case (m, e) => if (m.isDefinedAt(e._1)) m else m + e }
   151         new Keywords(kinds1, load_commands1)
   152       }
   153 
   154 
   155     /* add keywords */
   156 
   157     def + (name: String, kind: String = "", exts: List[String] = Nil): Keywords =
   158     {
   159       val kinds1 = kinds + (name -> kind)
   160       val load_commands1 =
   161         if (kind == THY_LOAD) {
   162           if (!Symbol.iterator(name).forall(Symbol.is_ascii(_)))
   163             error("Bad theory load command " + quote(name))
   164           load_commands + (name -> exts)
   165         }
   166         else load_commands
   167       new Keywords(kinds1, load_commands1)
   168     }
   169 
   170     def add_keywords(header: Thy_Header.Keywords): Keywords =
   171       (this /: header) {
   172         case (keywords, (name, spec)) =>
   173           if (spec.is_none)
   174             keywords + Symbol.decode(name) + Symbol.encode(name)
   175           else
   176             keywords +
   177               (Symbol.decode(name), spec.kind, spec.exts) +
   178               (Symbol.encode(name), spec.kind, spec.exts)
   179       }
   180 
   181 
   182     /* command kind */
   183 
   184     def is_command(token: Token, check_kind: String => Boolean): Boolean =
   185       token.is_command &&
   186         (kinds.get(token.source) match { case Some(k) => check_kind(k) case None => false })
   187 
   188     def is_before_command(token: Token): Boolean =
   189       token.is_keyword && kinds.get(token.source) == Some(BEFORE_COMMAND)
   190 
   191     def is_quasi_command(token: Token): Boolean =
   192       token.is_keyword && kinds.get(token.source) == Some(QUASI_COMMAND)
   193 
   194     def is_indent_command(token: Token): Boolean =
   195       token.is_begin_or_command || is_quasi_command(token)
   196 
   197 
   198     /* load commands */
   199 
   200     def load_commands_in(text: String): Boolean =
   201       load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
   202 
   203 
   204     /* lexicons */
   205 
   206     private def make_lexicon(is_minor: Boolean): Scan.Lexicon =
   207       (Scan.Lexicon.empty /: kinds)(
   208         {
   209           case (lex, (name, kind)) =>
   210             if ((kind == "" || kind == BEFORE_COMMAND || kind == QUASI_COMMAND) == is_minor)
   211               lex + name
   212             else lex
   213         })
   214 
   215     lazy val minor: Scan.Lexicon = make_lexicon(true)
   216     lazy val major: Scan.Lexicon = make_lexicon(false)
   217   }
   218 }