more uniform ML keyword markup;
authorwenzelm
Sat Feb 15 18:28:18 2014 +0100 (2014-02-15 ago)
changeset 555052a1ca7f6607b
parent 55504 4b6a5068a128
child 55506 46f3e31c5a87
more uniform ML keyword markup;
tuned;
src/Pure/Isar/token.scala
src/Pure/ML/ml_lex.ML
src/Pure/ML/ml_lex.scala
src/Pure/PIDE/markup.ML
src/Pure/PIDE/markup.scala
src/Tools/jEdit/etc/options
src/Tools/jEdit/src/rendering.scala
     1.1 --- a/src/Pure/Isar/token.scala	Sat Feb 15 17:10:57 2014 +0100
     1.2 +++ b/src/Pure/Isar/token.scala	Sat Feb 15 18:28:18 2014 +0100
     1.3 @@ -155,7 +155,7 @@
     1.4  {
     1.5    def is_command: Boolean = kind == Token.Kind.COMMAND
     1.6    def is_keyword: Boolean = kind == Token.Kind.KEYWORD
     1.7 -  def is_operator: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
     1.8 +  def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
     1.9    def is_ident: Boolean = kind == Token.Kind.IDENT
    1.10    def is_sym_ident: Boolean = kind == Token.Kind.SYM_IDENT
    1.11    def is_string: Boolean = kind == Token.Kind.STRING
     2.1 --- a/src/Pure/ML/ml_lex.ML	Sat Feb 15 17:10:57 2014 +0100
     2.2 +++ b/src/Pure/ML/ml_lex.ML	Sat Feb 15 18:28:18 2014 +0100
     2.3 @@ -6,6 +6,7 @@
     2.4  
     2.5  signature ML_LEX =
     2.6  sig
     2.7 +  val keywords: string list
     2.8    datatype token_kind =
     2.9      Keyword | Ident | LongIdent | TypeVar | Word | Int | Real | Char | String |
    2.10      Space | Comment | Error of string | EOF
    2.11 @@ -20,7 +21,6 @@
    2.12    val content_of: token -> string
    2.13    val check_content_of: token -> string
    2.14    val flatten: token list -> string
    2.15 -  val keywords: string list
    2.16    val source: (Symbol.symbol, 'a) Source.source ->
    2.17      (token, (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source)
    2.18        Source.source) Source.source
    2.19 @@ -31,6 +31,28 @@
    2.20  structure ML_Lex: ML_LEX =
    2.21  struct
    2.22  
    2.23 +(** keywords **)
    2.24 +
    2.25 +val keywords =
    2.26 + ["#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", "=>", "[",
    2.27 +  "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as", "case",
    2.28 +  "datatype", "do", "else", "end", "eqtype", "exception", "fn", "fun",
    2.29 +  "functor", "handle", "if", "in", "include", "infix", "infixr",
    2.30 +  "let", "local", "nonfix", "of", "op", "open", "orelse", "raise",
    2.31 +  "rec", "sharing", "sig", "signature", "struct", "structure", "then",
    2.32 +  "type", "val", "where", "while", "with", "withtype"];
    2.33 +
    2.34 +val keywords2 =
    2.35 + ["case", "do", "else", "end", "if", "in", "let", "local", "of",
    2.36 +  "sig", "struct", "then", "while", "with"];
    2.37 +
    2.38 +val keywords3 =
    2.39 + ["handle", "open", "raise"];
    2.40 +
    2.41 +val lexicon = Scan.make_lexicon (map raw_explode keywords);
    2.42 +
    2.43 +
    2.44 +
    2.45  (** tokens **)
    2.46  
    2.47  (* datatype token *)
    2.48 @@ -69,6 +91,12 @@
    2.49  fun content_of (Token (_, (_, x))) = x;
    2.50  fun token_leq (tok, tok') = content_of tok <= content_of tok';
    2.51  
    2.52 +fun is_keyword (Token (_, (Keyword, _))) = true
    2.53 +  | is_keyword _ = false;
    2.54 +
    2.55 +fun is_delimiter (Token (_, (Keyword, x))) = not (Symbol.is_ascii_identifier x)
    2.56 +  | is_delimiter _ = false;
    2.57 +
    2.58  fun is_regular (Token (_, (Error _, _))) = false
    2.59    | is_regular (Token (_, (EOF, _))) = false
    2.60    | is_regular _ = true;
    2.61 @@ -104,7 +132,7 @@
    2.62  local
    2.63  
    2.64  val token_kind_markup =
    2.65 - fn Keyword   => (Markup.ML_keyword, "")
    2.66 + fn Keyword   => (Markup.empty, "")
    2.67    | Ident     => (Markup.empty, "")
    2.68    | LongIdent => (Markup.empty, "")
    2.69    | TypeVar   => (Markup.ML_tvar, "")
    2.70 @@ -118,15 +146,16 @@
    2.71    | Error msg => (Markup.bad, msg)
    2.72    | EOF       => (Markup.empty, "");
    2.73  
    2.74 -fun token_markup kind x =
    2.75 -  if kind = Keyword andalso exists_string (not o Symbol.is_ascii_letter) x
    2.76 -  then (Markup.ML_delimiter, "")
    2.77 -  else token_kind_markup kind;
    2.78 -
    2.79  in
    2.80  
    2.81 -fun report_of_token (Token ((pos, _), (kind, x))) =
    2.82 -  let val (markup, txt) = token_markup kind x
    2.83 +fun report_of_token (tok as Token ((pos, _), (kind, x))) =
    2.84 +  let
    2.85 +    val (markup, txt) =
    2.86 +      if not (is_keyword tok) then token_kind_markup kind
    2.87 +      else if is_delimiter tok then (Markup.ML_delimiter, "")
    2.88 +      else if member (op =) keywords2 x then (Markup.ML_keyword2, "")
    2.89 +      else if member (op =) keywords3 x then (Markup.ML_keyword3, "")
    2.90 +      else (Markup.ML_keyword1, "");
    2.91    in ((pos, markup), txt) end;
    2.92  
    2.93  end;
    2.94 @@ -142,20 +171,6 @@
    2.95  fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg);
    2.96  
    2.97  
    2.98 -(* keywords *)
    2.99 -
   2.100 -val keywords = ["#", "(", ")", ",", "->", "...", ":", ":>", ";", "=",
   2.101 -  "=>", "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as",
   2.102 -  "case", "datatype", "do", "else", "end", "eqtype", "exception", "fn",
   2.103 -  "fun", "functor", "handle", "if", "in", "include", "infix", "infixr",
   2.104 -  "let", "local", "nonfix", "of", "op", "open", "orelse", "raise", "rec",
   2.105 -  "sharing", "sig", "signature", "struct", "structure", "then", "type",
   2.106 -  "val", "where", "while", "with", "withtype"];
   2.107 -
   2.108 -val lex = Scan.make_lexicon (map raw_explode keywords);
   2.109 -fun scan_keyword x = Scan.literal lex x;
   2.110 -
   2.111 -
   2.112  (* identifiers *)
   2.113  
   2.114  local
   2.115 @@ -258,7 +273,7 @@
   2.116    scan_blanks1 >> token Space ||
   2.117    Symbol_Pos.scan_comment err_prefix >> token Comment ||
   2.118    Scan.max token_leq
   2.119 -   (scan_keyword >> token Keyword)
   2.120 +   (Scan.literal lexicon >> token Keyword)
   2.121     (scan_word >> token Word ||
   2.122      scan_real >> token Real ||
   2.123      scan_int >> token Int ||
     3.1 --- a/src/Pure/ML/ml_lex.scala	Sat Feb 15 17:10:57 2014 +0100
     3.2 +++ b/src/Pure/ML/ml_lex.scala	Sat Feb 15 18:28:18 2014 +0100
     3.3 @@ -13,6 +13,29 @@
     3.4  
     3.5  object ML_Lex
     3.6  {
     3.7 +  /** keywords **/
     3.8 +
     3.9 +  val keywords: Set[String] =
    3.10 +    Set("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", "=>",
    3.11 +      "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as",
    3.12 +      "case", "datatype", "do", "else", "end", "eqtype", "exception",
    3.13 +      "fn", "fun", "functor", "handle", "if", "in", "include",
    3.14 +      "infix", "infixr", "let", "local", "nonfix", "of", "op", "open",
    3.15 +      "orelse", "raise", "rec", "sharing", "sig", "signature",
    3.16 +      "struct", "structure", "then", "type", "val", "where", "while",
    3.17 +      "with", "withtype")
    3.18 +
    3.19 +  val keywords2: Set[String] =
    3.20 +    Set("case", "do", "else", "end", "if", "in", "let", "local", "of",
    3.21 +      "sig", "struct", "then", "while", "with")
    3.22 +
    3.23 +  val keywords3: Set[String] =
    3.24 +    Set("handle", "open", "raise")
    3.25 +
    3.26 +  private val lexicon: Scan.Lexicon = Scan.Lexicon(keywords.toList: _*)
    3.27 +
    3.28 +
    3.29 +
    3.30    /** tokens **/
    3.31  
    3.32    object Kind extends Enumeration
    3.33 @@ -34,7 +57,7 @@
    3.34    sealed case class Token(val kind: Kind.Value, val source: String)
    3.35    {
    3.36      def is_keyword: Boolean = kind == Kind.KEYWORD
    3.37 -    def is_operator: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
    3.38 +    def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
    3.39    }
    3.40  
    3.41  
    3.42 @@ -43,15 +66,6 @@
    3.43  
    3.44    case object ML_String extends Scan.Context
    3.45  
    3.46 -  private val lexicon =
    3.47 -    Scan.Lexicon("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=",
    3.48 -      "=>", "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as",
    3.49 -      "case", "datatype", "do", "else", "end", "eqtype", "exception", "fn",
    3.50 -      "fun", "functor", "handle", "if", "in", "include", "infix", "infixr",
    3.51 -      "let", "local", "nonfix", "of", "op", "open", "orelse", "raise", "rec",
    3.52 -      "sharing", "sig", "signature", "struct", "structure", "then", "type",
    3.53 -      "val", "where", "while", "with", "withtype")
    3.54 -
    3.55    private object Parsers extends Scan.Parsers
    3.56    {
    3.57      /* string material */
     4.1 --- a/src/Pure/PIDE/markup.ML	Sat Feb 15 17:10:57 2014 +0100
     4.2 +++ b/src/Pure/PIDE/markup.ML	Sat Feb 15 18:28:18 2014 +0100
     4.3 @@ -68,7 +68,9 @@
     4.4    val propN: string val prop: T
     4.5    val sortingN: string val sorting: T
     4.6    val typingN: string val typing: T
     4.7 -  val ML_keywordN: string val ML_keyword: T
     4.8 +  val ML_keyword1N: string val ML_keyword1: T
     4.9 +  val ML_keyword2N: string val ML_keyword2: T
    4.10 +  val ML_keyword3N: string val ML_keyword3: T
    4.11    val ML_delimiterN: string val ML_delimiter: T
    4.12    val ML_tvarN: string val ML_tvar: T
    4.13    val ML_numeralN: string val ML_numeral: T
    4.14 @@ -325,7 +327,9 @@
    4.15  
    4.16  (* ML syntax *)
    4.17  
    4.18 -val (ML_keywordN, ML_keyword) = markup_elem "ML_keyword";
    4.19 +val (ML_keyword1N, ML_keyword1) = markup_elem "ML_keyword1";
    4.20 +val (ML_keyword2N, ML_keyword2) = markup_elem "ML_keyword2";
    4.21 +val (ML_keyword3N, ML_keyword3) = markup_elem "ML_keyword3";
    4.22  val (ML_delimiterN, ML_delimiter) = markup_elem "ML_delimiter";
    4.23  val (ML_tvarN, ML_tvar) = markup_elem "ML_tvar";
    4.24  val (ML_numeralN, ML_numeral) = markup_elem "ML_numeral";
     5.1 --- a/src/Pure/PIDE/markup.scala	Sat Feb 15 17:10:57 2014 +0100
     5.2 +++ b/src/Pure/PIDE/markup.scala	Sat Feb 15 18:28:18 2014 +0100
     5.3 @@ -169,7 +169,9 @@
     5.4  
     5.5    /* ML syntax */
     5.6  
     5.7 -  val ML_KEYWORD = "ML_keyword"
     5.8 +  val ML_KEYWORD1 = "ML_keyword1"
     5.9 +  val ML_KEYWORD2 = "ML_keyword2"
    5.10 +  val ML_KEYWORD3 = "ML_keyword3"
    5.11    val ML_DELIMITER = "ML_delimiter"
    5.12    val ML_TVAR = "ML_tvar"
    5.13    val ML_NUMERAL = "ML_numeral"
     6.1 --- a/src/Tools/jEdit/etc/options	Sat Feb 15 17:10:57 2014 +0100
     6.2 +++ b/src/Tools/jEdit/etc/options	Sat Feb 15 18:28:18 2014 +0100
     6.3 @@ -83,6 +83,7 @@
     6.4  option active_result_color : string = "999966FF"
     6.5  option keyword1_color : string = "006699FF"
     6.6  option keyword2_color : string = "009966FF"
     6.7 +option keyword3_color : string = "0099FFFF"
     6.8  
     6.9  option tfree_color : string = "A020F0FF"
    6.10  option tvar_color : string = "A020F0FF"
     7.1 --- a/src/Tools/jEdit/src/rendering.scala	Sat Feb 15 17:10:57 2014 +0100
     7.2 +++ b/src/Tools/jEdit/src/rendering.scala	Sat Feb 15 18:28:18 2014 +0100
     7.3 @@ -108,23 +108,17 @@
     7.4  
     7.5    def token_markup(syntax: Outer_Syntax, token: Token): Byte =
     7.6      if (token.is_command) command_style(syntax.keyword_kind(token.content).getOrElse(""))
     7.7 -    else if (token.is_operator) JEditToken.OPERATOR
     7.8 +    else if (token.is_delimiter) JEditToken.OPERATOR
     7.9      else token_style(token.kind)
    7.10  
    7.11  
    7.12    /* Isabelle/ML token markup */
    7.13  
    7.14 -  private val ml_keyword2: Set[String] =
    7.15 -    Set("case", "do", "else", "end", "if", "in", "let", "local", "of",
    7.16 -      "sig", "struct", "then", "while", "with")
    7.17 -
    7.18 -  private val ml_keyword3: Set[String] =
    7.19 -    Set("handle", "open", "raise")
    7.20 -
    7.21    private val ml_token_style: Map[ML_Lex.Kind.Value, Byte] =
    7.22    {
    7.23      import JEditToken._
    7.24      Map[ML_Lex.Kind.Value, Byte](
    7.25 +      ML_Lex.Kind.KEYWORD -> NULL,
    7.26        ML_Lex.Kind.IDENT -> NULL,
    7.27        ML_Lex.Kind.LONG_IDENT -> NULL,
    7.28        ML_Lex.Kind.TYPE_VAR -> NULL,
    7.29 @@ -141,9 +135,9 @@
    7.30  
    7.31    def ml_token_markup(token: ML_Lex.Token): Byte =
    7.32      if (!token.is_keyword) ml_token_style(token.kind)
    7.33 -    else if (token.is_operator) JEditToken.OPERATOR
    7.34 -    else if (ml_keyword2(token.source)) JEditToken.KEYWORD2
    7.35 -    else if (ml_keyword3(token.source)) JEditToken.KEYWORD3
    7.36 +    else if (token.is_delimiter) JEditToken.OPERATOR
    7.37 +    else if (ML_Lex.keywords2(token.source)) JEditToken.KEYWORD2
    7.38 +    else if (ML_Lex.keywords3(token.source)) JEditToken.KEYWORD3
    7.39      else JEditToken.KEYWORD1
    7.40  }
    7.41  
    7.42 @@ -183,6 +177,7 @@
    7.43    val active_result_color = color_value("active_result_color")
    7.44    val keyword1_color = color_value("keyword1_color")
    7.45    val keyword2_color = color_value("keyword2_color")
    7.46 +  val keyword3_color = color_value("keyword3_color")
    7.47  
    7.48    val tfree_color = color_value("tfree_color")
    7.49    val tvar_color = color_value("tvar_color")
    7.50 @@ -634,7 +629,9 @@
    7.51        Markup.INNER_CARTOUCHE -> inner_cartouche_color,
    7.52        Markup.INNER_COMMENT -> inner_comment_color,
    7.53        Markup.DYNAMIC_FACT -> dynamic_color,
    7.54 -      Markup.ML_KEYWORD -> keyword1_color,
    7.55 +      Markup.ML_KEYWORD1 -> keyword1_color,
    7.56 +      Markup.ML_KEYWORD2 -> keyword2_color,
    7.57 +      Markup.ML_KEYWORD3 -> keyword3_color,
    7.58        Markup.ML_DELIMITER -> Color.BLACK,
    7.59        Markup.ML_NUMERAL -> inner_numeral_color,
    7.60        Markup.ML_CHAR -> inner_quoted_color,