--- a/src/Pure/Isar/outer_syntax.scala Tue Nov 04 18:19:38 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.scala Wed Nov 05 15:32:11 2014 +0100
@@ -75,7 +75,8 @@
final class Outer_Syntax private(
keywords: Map[String, (String, List[String])] = Map.empty,
- lexicon: Scan.Lexicon = Scan.Lexicon.empty,
+ minor: Scan.Lexicon = Scan.Lexicon.empty,
+ major: Scan.Lexicon = Scan.Lexicon.empty,
val completion: Completion = Completion.empty,
val language_context: Completion.Language_Context = Completion.Language_Context.outer,
val has_tokens: Boolean = true) extends Prover.Syntax
@@ -127,11 +128,12 @@
def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax =
{
val keywords1 = keywords + (name -> kind)
- val lexicon1 = lexicon + name
+ val (minor1, major1) =
+ if (kind._1 == Keyword.MINOR) (minor + name, major) else (minor, major + name)
val completion1 =
if (replace == Some("")) completion
else completion + (name, replace getOrElse name)
- new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true)
+ new Outer_Syntax(keywords1, minor1, major1, completion1, language_context, true)
}
def + (name: String, kind: (String, List[String])): Outer_Syntax =
@@ -158,11 +160,11 @@
/* language context */
def set_language_context(context: Completion.Language_Context): Outer_Syntax =
- new Outer_Syntax(keywords, lexicon, completion, context, has_tokens)
+ new Outer_Syntax(keywords, minor, major, completion, context, has_tokens)
def no_tokens: Outer_Syntax =
{
- require(keywords.isEmpty && lexicon.isEmpty)
+ require(keywords.isEmpty && minor.isEmpty && major.isEmpty)
new Outer_Syntax(
completion = completion,
language_context = language_context,
@@ -209,8 +211,7 @@
def scan(input: CharSequence): List[Token] =
{
val in: Reader[Char] = new CharSequenceReader(input)
- Token.Parsers.parseAll(
- Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match {
+ Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(minor, major)), in) match {
case Token.Parsers.Success(tokens, _) => tokens
case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
}
@@ -222,7 +223,7 @@
val toks = new mutable.ListBuffer[Token]
var ctxt = context
while (!in.atEnd) {
- Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match {
+ Token.Parsers.parse(Token.Parsers.token_line(minor, major, ctxt), in) match {
case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
case Token.Parsers.NoSuccess(_, rest) =>
error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
--- a/src/Pure/Isar/token.scala Tue Nov 04 18:19:38 2014 +0100
+++ b/src/Pure/Isar/token.scala Wed Nov 05 15:32:11 2014 +0100
@@ -51,8 +51,7 @@
string | (alt_string | (verb | (cart | cmt)))
}
- private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean)
- : Parser[Token] =
+ private def other_token(minor: Scan.Lexicon, major: Scan.Lexicon): Parser[Token] =
{
val letdigs1 = many1(Symbol.is_letdig)
val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
@@ -80,9 +79,9 @@
(many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
(x => Token(Token.Kind.SYM_IDENT, x))
- val command_keyword =
- literal(lexicon) ^^
- (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
+ val keyword =
+ literal(minor) ^^ (x => Token(Token.Kind.KEYWORD, x)) |||
+ literal(major) ^^ (x => Token(Token.Kind.COMMAND, x))
val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
@@ -96,13 +95,13 @@
space | (recover_delimited |
(((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
- command_keyword) | bad))
+ keyword) | bad))
}
- def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] =
- delimited_token | other_token(lexicon, is_command)
+ def token(minor: Scan.Lexicon, major: Scan.Lexicon): Parser[Token] =
+ delimited_token | other_token(minor, major)
- def token_line(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Line_Context)
+ def token_line(minor: Scan.Lexicon, major: Scan.Lexicon, ctxt: Scan.Line_Context)
: Parser[(Token, Scan.Line_Context)] =
{
val string =
@@ -112,7 +111,7 @@
val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
- val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) }
+ val other = other_token(minor, major) ^^ { case x => (x, Scan.Finished) }
string | (alt_string | (verb | (cart | (cmt | other))))
}
--- a/src/Pure/Thy/thy_header.scala Tue Nov 04 18:19:38 2014 +0100
+++ b/src/Pure/Thy/thy_header.scala Wed Nov 05 15:32:11 2014 +0100
@@ -95,7 +95,7 @@
def read(reader: Reader[Char]): Thy_Header =
{
- val token = Token.Parsers.token(lexicon, _ => false)
+ val token = Token.Parsers.token(lexicon, Scan.Lexicon.empty)
val toks = new mutable.ListBuffer[Token]
@tailrec def scan_to_begin(in: Reader[Char])
@@ -138,7 +138,7 @@
val f = Symbol.decode _
Thy_Header(f(name), imports.map(f),
keywords.map({ case (a, b, c) =>
- (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) }))
+ (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) }))
}
}