src/Pure/Isar/outer_syntax.scala
changeset 58899 0a793c580685
parent 58868 c5e1cce7ace3
child 58900 1435cc20b022
     1.1 --- a/src/Pure/Isar/outer_syntax.scala	Tue Nov 04 18:19:38 2014 +0100
     1.2 +++ b/src/Pure/Isar/outer_syntax.scala	Wed Nov 05 15:32:11 2014 +0100
     1.3 @@ -75,7 +75,8 @@
     1.4  
     1.5  final class Outer_Syntax private(
     1.6    keywords: Map[String, (String, List[String])] = Map.empty,
     1.7 -  lexicon: Scan.Lexicon = Scan.Lexicon.empty,
     1.8 +  minor: Scan.Lexicon = Scan.Lexicon.empty,
     1.9 +  major: Scan.Lexicon = Scan.Lexicon.empty,
    1.10    val completion: Completion = Completion.empty,
    1.11    val language_context: Completion.Language_Context = Completion.Language_Context.outer,
    1.12    val has_tokens: Boolean = true) extends Prover.Syntax
    1.13 @@ -127,11 +128,12 @@
    1.14    def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax =
    1.15    {
    1.16      val keywords1 = keywords + (name -> kind)
    1.17 -    val lexicon1 = lexicon + name
    1.18 +    val (minor1, major1) =
    1.19 +      if (kind._1 == Keyword.MINOR) (minor + name, major) else (minor, major + name)
    1.20      val completion1 =
    1.21        if (replace == Some("")) completion
    1.22        else completion + (name, replace getOrElse name)
    1.23 -    new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true)
    1.24 +    new Outer_Syntax(keywords1, minor1, major1, completion1, language_context, true)
    1.25    }
    1.26  
    1.27    def + (name: String, kind: (String, List[String])): Outer_Syntax =
    1.28 @@ -158,11 +160,11 @@
    1.29    /* language context */
    1.30  
    1.31    def set_language_context(context: Completion.Language_Context): Outer_Syntax =
    1.32 -    new Outer_Syntax(keywords, lexicon, completion, context, has_tokens)
    1.33 +    new Outer_Syntax(keywords, minor, major, completion, context, has_tokens)
    1.34  
    1.35    def no_tokens: Outer_Syntax =
    1.36    {
    1.37 -    require(keywords.isEmpty && lexicon.isEmpty)
    1.38 +    require(keywords.isEmpty && minor.isEmpty && major.isEmpty)
    1.39      new Outer_Syntax(
    1.40        completion = completion,
    1.41        language_context = language_context,
    1.42 @@ -209,8 +211,7 @@
    1.43    def scan(input: CharSequence): List[Token] =
    1.44    {
    1.45      val in: Reader[Char] = new CharSequenceReader(input)
    1.46 -    Token.Parsers.parseAll(
    1.47 -        Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match {
    1.48 +    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(minor, major)), in) match {
    1.49        case Token.Parsers.Success(tokens, _) => tokens
    1.50        case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
    1.51      }
    1.52 @@ -222,7 +223,7 @@
    1.53      val toks = new mutable.ListBuffer[Token]
    1.54      var ctxt = context
    1.55      while (!in.atEnd) {
    1.56 -      Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match {
    1.57 +      Token.Parsers.parse(Token.Parsers.token_line(minor, major, ctxt), in) match {
    1.58          case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    1.59          case Token.Parsers.NoSuccess(_, rest) =>
    1.60            error("Unexpected failure of tokenizing input:\n" + rest.source.toString)