clarified minor/major lexicon (like ML version);
authorwenzelm
Wed Nov 05 15:32:11 2014 +0100 (2014-11-05)
changeset 588990a793c580685
parent 58897 527bd5a7e9f8
child 58900 1435cc20b022
clarified minor/major lexicon (like ML version);
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/token.scala
src/Pure/Thy/thy_header.scala
     1.1 --- a/src/Pure/Isar/outer_syntax.scala	Tue Nov 04 18:19:38 2014 +0100
     1.2 +++ b/src/Pure/Isar/outer_syntax.scala	Wed Nov 05 15:32:11 2014 +0100
     1.3 @@ -75,7 +75,8 @@
     1.4  
     1.5  final class Outer_Syntax private(
     1.6    keywords: Map[String, (String, List[String])] = Map.empty,
     1.7 -  lexicon: Scan.Lexicon = Scan.Lexicon.empty,
     1.8 +  minor: Scan.Lexicon = Scan.Lexicon.empty,
     1.9 +  major: Scan.Lexicon = Scan.Lexicon.empty,
    1.10    val completion: Completion = Completion.empty,
    1.11    val language_context: Completion.Language_Context = Completion.Language_Context.outer,
    1.12    val has_tokens: Boolean = true) extends Prover.Syntax
    1.13 @@ -127,11 +128,12 @@
    1.14    def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax =
    1.15    {
    1.16      val keywords1 = keywords + (name -> kind)
    1.17 -    val lexicon1 = lexicon + name
    1.18 +    val (minor1, major1) =
    1.19 +      if (kind._1 == Keyword.MINOR) (minor + name, major) else (minor, major + name)
    1.20      val completion1 =
    1.21        if (replace == Some("")) completion
    1.22        else completion + (name, replace getOrElse name)
    1.23 -    new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true)
    1.24 +    new Outer_Syntax(keywords1, minor1, major1, completion1, language_context, true)
    1.25    }
    1.26  
    1.27    def + (name: String, kind: (String, List[String])): Outer_Syntax =
    1.28 @@ -158,11 +160,11 @@
    1.29    /* language context */
    1.30  
    1.31    def set_language_context(context: Completion.Language_Context): Outer_Syntax =
    1.32 -    new Outer_Syntax(keywords, lexicon, completion, context, has_tokens)
    1.33 +    new Outer_Syntax(keywords, minor, major, completion, context, has_tokens)
    1.34  
    1.35    def no_tokens: Outer_Syntax =
    1.36    {
    1.37 -    require(keywords.isEmpty && lexicon.isEmpty)
    1.38 +    require(keywords.isEmpty && minor.isEmpty && major.isEmpty)
    1.39      new Outer_Syntax(
    1.40        completion = completion,
    1.41        language_context = language_context,
    1.42 @@ -209,8 +211,7 @@
    1.43    def scan(input: CharSequence): List[Token] =
    1.44    {
    1.45      val in: Reader[Char] = new CharSequenceReader(input)
    1.46 -    Token.Parsers.parseAll(
    1.47 -        Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match {
    1.48 +    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(minor, major)), in) match {
    1.49        case Token.Parsers.Success(tokens, _) => tokens
    1.50        case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
    1.51      }
    1.52 @@ -222,7 +223,7 @@
    1.53      val toks = new mutable.ListBuffer[Token]
    1.54      var ctxt = context
    1.55      while (!in.atEnd) {
    1.56 -      Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match {
    1.57 +      Token.Parsers.parse(Token.Parsers.token_line(minor, major, ctxt), in) match {
    1.58          case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    1.59          case Token.Parsers.NoSuccess(_, rest) =>
    1.60            error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
     2.1 --- a/src/Pure/Isar/token.scala	Tue Nov 04 18:19:38 2014 +0100
     2.2 +++ b/src/Pure/Isar/token.scala	Wed Nov 05 15:32:11 2014 +0100
     2.3 @@ -51,8 +51,7 @@
     2.4        string | (alt_string | (verb | (cart | cmt)))
     2.5      }
     2.6  
     2.7 -    private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean)
     2.8 -      : Parser[Token] =
     2.9 +    private def other_token(minor: Scan.Lexicon, major: Scan.Lexicon): Parser[Token] =
    2.10      {
    2.11        val letdigs1 = many1(Symbol.is_letdig)
    2.12        val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
    2.13 @@ -80,9 +79,9 @@
    2.14          (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
    2.15          (x => Token(Token.Kind.SYM_IDENT, x))
    2.16  
    2.17 -      val command_keyword =
    2.18 -        literal(lexicon) ^^
    2.19 -          (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
    2.20 +      val keyword =
    2.21 +        literal(minor) ^^ (x => Token(Token.Kind.KEYWORD, x)) |||
    2.22 +        literal(major) ^^ (x => Token(Token.Kind.COMMAND, x))
    2.23  
    2.24        val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    2.25  
    2.26 @@ -96,13 +95,13 @@
    2.27  
    2.28        space | (recover_delimited |
    2.29          (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
    2.30 -          command_keyword) | bad))
    2.31 +          keyword) | bad))
    2.32      }
    2.33  
    2.34 -    def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] =
    2.35 -      delimited_token | other_token(lexicon, is_command)
    2.36 +    def token(minor: Scan.Lexicon, major: Scan.Lexicon): Parser[Token] =
    2.37 +      delimited_token | other_token(minor, major)
    2.38  
    2.39 -    def token_line(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Line_Context)
    2.40 +    def token_line(minor: Scan.Lexicon, major: Scan.Lexicon, ctxt: Scan.Line_Context)
    2.41        : Parser[(Token, Scan.Line_Context)] =
    2.42      {
    2.43        val string =
    2.44 @@ -112,7 +111,7 @@
    2.45        val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
    2.46        val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
    2.47        val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
    2.48 -      val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) }
    2.49 +      val other = other_token(minor, major) ^^ { case x => (x, Scan.Finished) }
    2.50  
    2.51        string | (alt_string | (verb | (cart | (cmt | other))))
    2.52      }
     3.1 --- a/src/Pure/Thy/thy_header.scala	Tue Nov 04 18:19:38 2014 +0100
     3.2 +++ b/src/Pure/Thy/thy_header.scala	Wed Nov 05 15:32:11 2014 +0100
     3.3 @@ -95,7 +95,7 @@
     3.4  
     3.5    def read(reader: Reader[Char]): Thy_Header =
     3.6    {
     3.7 -    val token = Token.Parsers.token(lexicon, _ => false)
     3.8 +    val token = Token.Parsers.token(lexicon, Scan.Lexicon.empty)
     3.9      val toks = new mutable.ListBuffer[Token]
    3.10  
    3.11      @tailrec def scan_to_begin(in: Reader[Char])
    3.12 @@ -138,7 +138,7 @@
    3.13      val f = Symbol.decode _
    3.14      Thy_Header(f(name), imports.map(f),
    3.15        keywords.map({ case (a, b, c) =>
    3.16 -        (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f))  }), c.map(f)) }))
    3.17 +        (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) }))
    3.18    }
    3.19  }
    3.20