explicit type Keyword.Keywords;
authorwenzelm
Wed Nov 05 16:57:12 2014 +0100 (2014-11-05)
changeset 589001435cc20b022
parent 58899 0a793c580685
child 58901 47809a811eba
explicit type Keyword.Keywords;
src/Pure/Isar/keyword.ML
src/Pure/Isar/keyword.scala
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/token.scala
src/Pure/Thy/thy_header.scala
src/Tools/jEdit/src/rendering.scala
src/Tools/jEdit/src/structure_matching.scala
     1.1 --- a/src/Pure/Isar/keyword.ML	Wed Nov 05 15:32:11 2014 +0100
     1.2 +++ b/src/Pure/Isar/keyword.ML	Wed Nov 05 16:57:12 2014 +0100
     1.3 @@ -1,7 +1,7 @@
     1.4  (*  Title:      Pure/Isar/keyword.ML
     1.5      Author:     Makarius
     1.6  
     1.7 -Isar command keyword classification and global keyword tables.
     1.8 +Isar keyword classification.
     1.9  *)
    1.10  
    1.11  signature KEYWORD =
     2.1 --- a/src/Pure/Isar/keyword.scala	Wed Nov 05 15:32:11 2014 +0100
     2.2 +++ b/src/Pure/Isar/keyword.scala	Wed Nov 05 16:57:12 2014 +0100
     2.3 @@ -1,7 +1,7 @@
     2.4  /*  Title:      Pure/Isar/keyword.scala
     2.5      Author:     Makarius
     2.6  
     2.7 -Isar command keyword classification and keyword tables.
     2.8 +Isar keyword classification.
     2.9  */
    2.10  
    2.11  package isabelle
    2.12 @@ -9,6 +9,8 @@
    2.13  
    2.14  object Keyword
    2.15  {
    2.16 +  /** keyword classification **/
    2.17 +
    2.18    /* kinds */
    2.19  
    2.20    val MINOR = "minor"
    2.21 @@ -60,5 +62,70 @@
    2.22    val proof_goal = Set(PRF_GOAL, PRF_ASM_GOAL, PRF_ASM_GOAL_SCRIPT)
    2.23    val qed = Set(QED, QED_SCRIPT, QED_BLOCK)
    2.24    val qed_global = Set(QED_GLOBAL)
    2.25 +
    2.26 +
    2.27 +  type Spec = ((String, List[String]), List[String])
    2.28 +
    2.29 +
    2.30 +
    2.31 +  /** keyword tables **/
    2.32 +
    2.33 +  object Keywords
    2.34 +  {
    2.35 +    def empty: Keywords = new Keywords()
    2.36 +
    2.37 +    def apply(names: List[String]): Keywords =
    2.38 +      (empty /: names)({ case (keywords, a) => keywords + (a, (MINOR, Nil)) })
    2.39 +  }
    2.40 +
    2.41 +  class Keywords private(
    2.42 +    keywords: Map[String, (String, List[String])] = Map.empty,
    2.43 +    val minor: Scan.Lexicon = Scan.Lexicon.empty,
    2.44 +    val major: Scan.Lexicon = Scan.Lexicon.empty)
    2.45 +  {
    2.46 +    /* content */
    2.47 +
    2.48 +    override def toString: String =
    2.49 +      (for ((name, (kind, files)) <- keywords) yield {
    2.50 +        if (kind == MINOR) quote(name)
    2.51 +        else
    2.52 +          quote(name) + " :: " + quote(kind) +
    2.53 +          (if (files.isEmpty) "" else " (" + commas_quote(files) + ")")
    2.54 +      }).toList.sorted.mkString("keywords\n  ", " and\n  ", "")
    2.55 +
    2.56 +    def is_empty: Boolean = keywords.isEmpty
    2.57 +
    2.58 +    def + (name: String, kind: (String, List[String])): Keywords =
    2.59 +    {
    2.60 +      val keywords1 = keywords + (name -> kind)
    2.61 +      val (minor1, major1) =
    2.62 +        if (kind._1 == MINOR) (minor + name, major) else (minor, major + name)
    2.63 +      new Keywords(keywords1, minor1, major1)
    2.64 +    }
    2.65 +
    2.66 +
    2.67 +    /* kind */
    2.68 +
    2.69 +    def kind(name: String): Option[String] = keywords.get(name).map(_._1)
    2.70 +
    2.71 +    def command_kind(token: Token, pred: String => Boolean): Boolean =
    2.72 +      token.is_command &&
    2.73 +        (kind(token.source) match { case Some(k) => k != MINOR && pred(k) case None => false })
    2.74 +
    2.75 +
    2.76 +    /* load commands */
    2.77 +
    2.78 +    def load_command(name: String): Option[List[String]] =
    2.79 +      keywords.get(name) match {
    2.80 +        case Some((THY_LOAD, exts)) => Some(exts)
    2.81 +        case _ => None
    2.82 +      }
    2.83 +
    2.84 +    private lazy val load_commands: List[(String, List[String])] =
    2.85 +      (for ((name, (THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList
    2.86 +
    2.87 +    def load_commands_in(text: String): Boolean =
    2.88 +      load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
    2.89 +  }
    2.90  }
    2.91  
     3.1 --- a/src/Pure/Isar/outer_syntax.scala	Wed Nov 05 15:32:11 2014 +0100
     3.2 +++ b/src/Pure/Isar/outer_syntax.scala	Wed Nov 05 16:57:12 2014 +0100
     3.3 @@ -74,66 +74,25 @@
     3.4  }
     3.5  
     3.6  final class Outer_Syntax private(
     3.7 -  keywords: Map[String, (String, List[String])] = Map.empty,
     3.8 -  minor: Scan.Lexicon = Scan.Lexicon.empty,
     3.9 -  major: Scan.Lexicon = Scan.Lexicon.empty,
    3.10 +  val keywords: Keyword.Keywords = Keyword.Keywords.empty,
    3.11    val completion: Completion = Completion.empty,
    3.12    val language_context: Completion.Language_Context = Completion.Language_Context.outer,
    3.13    val has_tokens: Boolean = true) extends Prover.Syntax
    3.14  {
    3.15    /** syntax content **/
    3.16  
    3.17 -  override def toString: String =
    3.18 -    (for ((name, (kind, files)) <- keywords) yield {
    3.19 -      if (kind == Keyword.MINOR) quote(name)
    3.20 -      else
    3.21 -        quote(name) + " :: " + quote(kind) +
    3.22 -        (if (files.isEmpty) "" else " (" + commas_quote(files) + ")")
    3.23 -    }).toList.sorted.mkString("keywords\n  ", " and\n  ", "")
    3.24 -
    3.25 -
    3.26 -  /* keyword kind */
    3.27 -
    3.28 -  def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name)
    3.29 -  def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1)
    3.30 -
    3.31 -  def is_command(name: String): Boolean =
    3.32 -    keyword_kind(name) match {
    3.33 -      case Some(kind) => kind != Keyword.MINOR
    3.34 -      case None => false
    3.35 -    }
    3.36 -
    3.37 -  def command_kind(token: Token, pred: String => Boolean): Boolean =
    3.38 -    token.is_command && is_command(token.source) &&
    3.39 -      pred(keyword_kind(token.source).get)
    3.40 -
    3.41 -
    3.42 -  /* load commands */
    3.43 -
    3.44 -  def load_command(name: String): Option[List[String]] =
    3.45 -    keywords.get(name) match {
    3.46 -      case Some((Keyword.THY_LOAD, exts)) => Some(exts)
    3.47 -      case _ => None
    3.48 -    }
    3.49 -
    3.50 -  val load_commands: List[(String, List[String])] =
    3.51 -    (for ((name, (Keyword.THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList
    3.52 -
    3.53 -  def load_commands_in(text: String): Boolean =
    3.54 -    load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
    3.55 +  override def toString: String = keywords.toString
    3.56  
    3.57  
    3.58    /* add keywords */
    3.59  
    3.60    def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax =
    3.61    {
    3.62 -    val keywords1 = keywords + (name -> kind)
    3.63 -    val (minor1, major1) =
    3.64 -      if (kind._1 == Keyword.MINOR) (minor + name, major) else (minor, major + name)
    3.65 +    val keywords1 = keywords + (name, kind)
    3.66      val completion1 =
    3.67        if (replace == Some("")) completion
    3.68        else completion + (name, replace getOrElse name)
    3.69 -    new Outer_Syntax(keywords1, minor1, major1, completion1, language_context, true)
    3.70 +    new Outer_Syntax(keywords1, completion1, language_context, true)
    3.71    }
    3.72  
    3.73    def + (name: String, kind: (String, List[String])): Outer_Syntax =
    3.74 @@ -157,14 +116,20 @@
    3.75      }
    3.76  
    3.77  
    3.78 +  /* load commands */
    3.79 +
    3.80 +  def load_command(name: String): Option[List[String]] = keywords.load_command(name)
    3.81 +  def load_commands_in(text: String): Boolean = keywords.load_commands_in(text)
    3.82 +
    3.83 +
    3.84    /* language context */
    3.85  
    3.86    def set_language_context(context: Completion.Language_Context): Outer_Syntax =
    3.87 -    new Outer_Syntax(keywords, minor, major, completion, context, has_tokens)
    3.88 +    new Outer_Syntax(keywords, completion, context, has_tokens)
    3.89  
    3.90    def no_tokens: Outer_Syntax =
    3.91    {
    3.92 -    require(keywords.isEmpty && minor.isEmpty && major.isEmpty)
    3.93 +    require(keywords.is_empty)
    3.94      new Outer_Syntax(
    3.95        completion = completion,
    3.96        language_context = language_context,
    3.97 @@ -184,7 +149,7 @@
    3.98      val command1 = tokens.exists(_.is_command)
    3.99  
   3.100      val depth1 =
   3.101 -      if (tokens.exists(tok => command_kind(tok, Keyword.theory))) 0
   3.102 +      if (tokens.exists(tok => keywords.command_kind(tok, Keyword.theory))) 0
   3.103        else if (command1) struct.after_span_depth
   3.104        else struct.span_depth
   3.105  
   3.106 @@ -192,11 +157,16 @@
   3.107        ((struct.span_depth, struct.after_span_depth) /: tokens) {
   3.108          case ((x, y), tok) =>
   3.109            if (tok.is_command) {
   3.110 -            if (command_kind(tok, Keyword.theory_goal)) (2, 1)
   3.111 -            else if (command_kind(tok, Keyword.theory)) (1, 0)
   3.112 -            else if (command_kind(tok, Keyword.proof_goal) || tok.is_begin_block) (y + 2, y + 1)
   3.113 -            else if (command_kind(tok, Keyword.qed) || tok.is_end_block) (y + 1, y - 1)
   3.114 -            else if (command_kind(tok, Keyword.qed_global)) (1, 0)
   3.115 +            if (keywords.command_kind(tok, Keyword.theory_goal))
   3.116 +              (2, 1)
   3.117 +            else if (keywords.command_kind(tok, Keyword.theory))
   3.118 +              (1, 0)
   3.119 +            else if (keywords.command_kind(tok, Keyword.proof_goal) || tok.is_begin_block)
   3.120 +              (y + 2, y + 1)
   3.121 +            else if (keywords.command_kind(tok, Keyword.qed) || tok.is_end_block)
   3.122 +              (y + 1, y - 1)
   3.123 +            else if (keywords.command_kind(tok, Keyword.qed_global))
   3.124 +              (1, 0)
   3.125              else (x, y)
   3.126            }
   3.127            else (x, y)
   3.128 @@ -211,7 +181,7 @@
   3.129    def scan(input: CharSequence): List[Token] =
   3.130    {
   3.131      val in: Reader[Char] = new CharSequenceReader(input)
   3.132 -    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(minor, major)), in) match {
   3.133 +    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(keywords)), in) match {
   3.134        case Token.Parsers.Success(tokens, _) => tokens
   3.135        case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
   3.136      }
   3.137 @@ -223,7 +193,7 @@
   3.138      val toks = new mutable.ListBuffer[Token]
   3.139      var ctxt = context
   3.140      while (!in.atEnd) {
   3.141 -      Token.Parsers.parse(Token.Parsers.token_line(minor, major, ctxt), in) match {
   3.142 +      Token.Parsers.parse(Token.Parsers.token_line(keywords, ctxt), in) match {
   3.143          case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
   3.144          case Token.Parsers.NoSuccess(_, rest) =>
   3.145            error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
   3.146 @@ -284,7 +254,7 @@
   3.147        case "subsection" => Some(2)
   3.148        case "subsubsection" => Some(3)
   3.149        case _ =>
   3.150 -        keyword_kind(command.name) match {
   3.151 +        keywords.kind(command.name) match {
   3.152            case Some(kind) if Keyword.theory(kind) => Some(4)
   3.153            case _ => None
   3.154          }
     4.1 --- a/src/Pure/Isar/token.scala	Wed Nov 05 15:32:11 2014 +0100
     4.2 +++ b/src/Pure/Isar/token.scala	Wed Nov 05 16:57:12 2014 +0100
     4.3 @@ -51,7 +51,7 @@
     4.4        string | (alt_string | (verb | (cart | cmt)))
     4.5      }
     4.6  
     4.7 -    private def other_token(minor: Scan.Lexicon, major: Scan.Lexicon): Parser[Token] =
     4.8 +    private def other_token(keywords: Keyword.Keywords): Parser[Token] =
     4.9      {
    4.10        val letdigs1 = many1(Symbol.is_letdig)
    4.11        val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
    4.12 @@ -80,8 +80,8 @@
    4.13          (x => Token(Token.Kind.SYM_IDENT, x))
    4.14  
    4.15        val keyword =
    4.16 -        literal(minor) ^^ (x => Token(Token.Kind.KEYWORD, x)) |||
    4.17 -        literal(major) ^^ (x => Token(Token.Kind.COMMAND, x))
    4.18 +        literal(keywords.minor) ^^ (x => Token(Token.Kind.KEYWORD, x)) |||
    4.19 +        literal(keywords.major) ^^ (x => Token(Token.Kind.COMMAND, x))
    4.20  
    4.21        val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    4.22  
    4.23 @@ -98,10 +98,10 @@
    4.24            keyword) | bad))
    4.25      }
    4.26  
    4.27 -    def token(minor: Scan.Lexicon, major: Scan.Lexicon): Parser[Token] =
    4.28 -      delimited_token | other_token(minor, major)
    4.29 +    def token(keywords: Keyword.Keywords): Parser[Token] =
    4.30 +      delimited_token | other_token(keywords)
    4.31  
    4.32 -    def token_line(minor: Scan.Lexicon, major: Scan.Lexicon, ctxt: Scan.Line_Context)
    4.33 +    def token_line(keywords: Keyword.Keywords, ctxt: Scan.Line_Context)
    4.34        : Parser[(Token, Scan.Line_Context)] =
    4.35      {
    4.36        val string =
    4.37 @@ -111,7 +111,7 @@
    4.38        val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
    4.39        val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
    4.40        val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
    4.41 -      val other = other_token(minor, major) ^^ { case x => (x, Scan.Finished) }
    4.42 +      val other = other_token(keywords) ^^ { case x => (x, Scan.Finished) }
    4.43  
    4.44        string | (alt_string | (verb | (cart | (cmt | other))))
    4.45      }
     5.1 --- a/src/Pure/Thy/thy_header.scala	Wed Nov 05 15:32:11 2014 +0100
     5.2 +++ b/src/Pure/Thy/thy_header.scala	Wed Nov 05 16:57:12 2014 +0100
     5.3 @@ -27,9 +27,10 @@
     5.4    val AND = "and"
     5.5    val BEGIN = "begin"
     5.6  
     5.7 -  private val lexicon =
     5.8 -    Scan.Lexicon("%", "(", ")", ",", "::", "==", AND, BEGIN,
     5.9 -      HEADER, CHAPTER, SECTION, SUBSECTION, SUBSUBSECTION, IMPORTS, KEYWORDS, THEORY)
    5.10 +  private val keywords =
    5.11 +    Keyword.Keywords(
    5.12 +      List("%", "(", ")", ",", "::", "==", AND, BEGIN,
    5.13 +        HEADER, CHAPTER, SECTION, SUBSECTION, SUBSUBSECTION, IMPORTS, KEYWORDS, THEORY))
    5.14  
    5.15  
    5.16    /* theory file name */
    5.17 @@ -95,7 +96,7 @@
    5.18  
    5.19    def read(reader: Reader[Char]): Thy_Header =
    5.20    {
    5.21 -    val token = Token.Parsers.token(lexicon, Scan.Lexicon.empty)
    5.22 +    val token = Token.Parsers.token(keywords)
    5.23      val toks = new mutable.ListBuffer[Token]
    5.24  
    5.25      @tailrec def scan_to_begin(in: Reader[Char])
    5.26 @@ -121,7 +122,7 @@
    5.27  
    5.28    /* keywords */
    5.29  
    5.30 -  type Keywords = List[(String, Option[((String, List[String]), List[String])], Option[String])]
    5.31 +  type Keywords = List[(String, Option[Keyword.Spec], Option[String])]
    5.32  }
    5.33  
    5.34  
     6.1 --- a/src/Tools/jEdit/src/rendering.scala	Wed Nov 05 15:32:11 2014 +0100
     6.2 +++ b/src/Tools/jEdit/src/rendering.scala	Wed Nov 05 16:57:12 2014 +0100
     6.3 @@ -85,7 +85,7 @@
     6.4    }
     6.5  
     6.6    def token_markup(syntax: Outer_Syntax, token: Token): Byte =
     6.7 -    if (token.is_command) command_style(syntax.keyword_kind(token.content).getOrElse(""))
     6.8 +    if (token.is_command) command_style(syntax.keywords.kind(token.content).getOrElse(""))
     6.9      else if (token.is_delimiter) JEditToken.OPERATOR
    6.10      else token_style(token.kind)
    6.11  
     7.1 --- a/src/Tools/jEdit/src/structure_matching.scala	Wed Nov 05 15:32:11 2014 +0100
     7.2 +++ b/src/Tools/jEdit/src/structure_matching.scala	Wed Nov 05 16:57:12 2014 +0100
     7.3 @@ -44,6 +44,9 @@
     7.4          case Some(syntax) =>
     7.5            val limit = PIDE.options.value.int("jedit_structure_limit") max 0
     7.6  
     7.7 +          def command_kind(token: Token, pred: String => Boolean): Boolean =
     7.8 +            syntax.keywords.command_kind(token, pred)
     7.9 +
    7.10            def iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
    7.11              Token_Markup.line_token_iterator(syntax, buffer, line, line + lim).
    7.12                filter(_.info.is_proper)
    7.13 @@ -60,45 +63,45 @@
    7.14  
    7.15            iterator(caret_line, 1).find(info => info.range.touches(caret))
    7.16            match {
    7.17 -            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.theory_goal) =>
    7.18 +            case Some(Text.Info(range1, tok)) if command_kind(tok, Keyword.theory_goal) =>
    7.19                find_block(
    7.20 -                syntax.command_kind(_, Keyword.proof_goal),
    7.21 -                syntax.command_kind(_, Keyword.qed),
    7.22 -                syntax.command_kind(_, Keyword.qed_global),
    7.23 +                command_kind(_, Keyword.proof_goal),
    7.24 +                command_kind(_, Keyword.qed),
    7.25 +                command_kind(_, Keyword.qed_global),
    7.26                  t =>
    7.27 -                  syntax.command_kind(t, Keyword.diag) ||
    7.28 -                  syntax.command_kind(t, Keyword.proof),
    7.29 +                  command_kind(t, Keyword.diag) ||
    7.30 +                  command_kind(t, Keyword.proof),
    7.31                  caret_iterator())
    7.32  
    7.33 -            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.proof_goal) =>
    7.34 +            case Some(Text.Info(range1, tok)) if command_kind(tok, Keyword.proof_goal) =>
    7.35                find_block(
    7.36 -                syntax.command_kind(_, Keyword.proof_goal),
    7.37 -                syntax.command_kind(_, Keyword.qed),
    7.38 +                command_kind(_, Keyword.proof_goal),
    7.39 +                command_kind(_, Keyword.qed),
    7.40                  _ => false,
    7.41                  t =>
    7.42 -                  syntax.command_kind(t, Keyword.diag) ||
    7.43 -                  syntax.command_kind(t, Keyword.proof),
    7.44 +                  command_kind(t, Keyword.diag) ||
    7.45 +                  command_kind(t, Keyword.proof),
    7.46                  caret_iterator())
    7.47  
    7.48 -            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed_global) =>
    7.49 -              rev_caret_iterator().find(info => syntax.command_kind(info.info, Keyword.theory))
    7.50 +            case Some(Text.Info(range1, tok)) if command_kind(tok, Keyword.qed_global) =>
    7.51 +              rev_caret_iterator().find(info => command_kind(info.info, Keyword.theory))
    7.52                match {
    7.53                  case Some(Text.Info(range2, tok))
    7.54 -                if syntax.command_kind(tok, Keyword.theory_goal) => Some((range1, range2))
    7.55 +                if command_kind(tok, Keyword.theory_goal) => Some((range1, range2))
    7.56                  case _ => None
    7.57                }
    7.58  
    7.59 -            case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed) =>
    7.60 +            case Some(Text.Info(range1, tok)) if command_kind(tok, Keyword.qed) =>
    7.61                find_block(
    7.62 -                syntax.command_kind(_, Keyword.qed),
    7.63 +                command_kind(_, Keyword.qed),
    7.64                  t =>
    7.65 -                  syntax.command_kind(t, Keyword.proof_goal) ||
    7.66 -                  syntax.command_kind(t, Keyword.theory_goal),
    7.67 +                  command_kind(t, Keyword.proof_goal) ||
    7.68 +                  command_kind(t, Keyword.theory_goal),
    7.69                  _ => false,
    7.70                  t =>
    7.71 -                  syntax.command_kind(t, Keyword.diag) ||
    7.72 -                  syntax.command_kind(t, Keyword.proof) ||
    7.73 -                  syntax.command_kind(t, Keyword.theory_goal),
    7.74 +                  command_kind(t, Keyword.diag) ||
    7.75 +                  command_kind(t, Keyword.proof) ||
    7.76 +                  command_kind(t, Keyword.theory_goal),
    7.77                  rev_caret_iterator())
    7.78  
    7.79              case Some(Text.Info(range1, tok)) if tok.is_begin =>
    7.80 @@ -114,7 +117,7 @@
    7.81                      find(info => info.info.is_command || info.info.is_begin)
    7.82                    match {
    7.83                      case Some(Text.Info(range3, tok)) =>
    7.84 -                      if (syntax.command_kind(tok, Keyword.theory_block)) Some((range1, range3))
    7.85 +                      if (command_kind(tok, Keyword.theory_block)) Some((range1, range3))
    7.86                        else Some((range1, range2))
    7.87                      case None => None
    7.88                    }