--- a/src/Pure/General/scan.scala Fri Feb 14 16:11:14 2014 +0100
+++ b/src/Pure/General/scan.scala Fri Feb 14 16:25:30 2014 +0100
@@ -86,7 +86,7 @@
(("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString)
}
- private def quoted(quote: Symbol.Symbol): Parser[String] =
+ def quoted(quote: Symbol.Symbol): Parser[String] =
{
quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z }
}.named("quoted")
@@ -128,7 +128,7 @@
private def verbatim_body: Parser[String] =
rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString)
- private def verbatim: Parser[String] =
+ def verbatim: Parser[String] =
{
"{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z }
}.named("verbatim")
@@ -288,86 +288,6 @@
else Success(result, in.drop(result.length))
}
}.named("keyword")
-
-
- /* outer syntax tokens */
-
- private def delimited_token: Parser[Token] =
- {
- val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
- val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
- val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
- val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
- val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
-
- string | (alt_string | (verb | (cart | cmt)))
- }
-
- private def other_token(lexicon: Lexicon, is_command: String => Boolean)
- : Parser[Token] =
- {
- val letdigs1 = many1(Symbol.is_letdig)
- val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
- val id =
- one(Symbol.is_letter) ~
- (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^
- { case x ~ y => x + y }
-
- val nat = many1(Symbol.is_digit)
- val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
- val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
-
- val ident = id ~ rep("." ~> id) ^^
- { case x ~ Nil => Token(Token.Kind.IDENT, x)
- case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
-
- val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
- val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
- val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
- val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
- val float =
- ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
-
- val sym_ident =
- (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
- (x => Token(Token.Kind.SYM_IDENT, x))
-
- val command_keyword =
- keyword(lexicon) ^^
- (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
-
- val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
-
- val recover_delimited =
- (recover_quoted("\"") |
- (recover_quoted("`") |
- (recover_verbatim |
- (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
-
- val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
-
- space | (recover_delimited |
- (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
- command_keyword) | bad))
- }
-
- def token(lexicon: Lexicon, is_command: String => Boolean): Parser[Token] =
- delimited_token | other_token(lexicon, is_command)
-
- def token_context(lexicon: Lexicon, is_command: String => Boolean, ctxt: Context)
- : Parser[(Token, Context)] =
- {
- val string =
- quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
- val alt_string =
- quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
- val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
- val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
- val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
- val other = other_token(lexicon, is_command) ^^ { case x => (x, Finished) }
-
- string | (alt_string | (verb | (cart | (cmt | other))))
- }
}
--- a/src/Pure/Isar/outer_syntax.scala Fri Feb 14 16:11:14 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.scala Fri Feb 14 16:25:30 2014 +0100
@@ -128,8 +128,8 @@
def scan(input: Reader[Char]): List[Token] =
{
- Scan.Parsers.parseAll(Scan.Parsers.rep(Scan.Parsers.token(lexicon, is_command)), input) match {
- case Scan.Parsers.Success(tokens, _) => tokens
+ Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), input) match {
+ case Token.Parsers.Success(tokens, _) => tokens
case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
}
}
@@ -143,9 +143,9 @@
val toks = new mutable.ListBuffer[Token]
var ctxt = context
while (!in.atEnd) {
- Scan.Parsers.parse(Scan.Parsers.token_context(lexicon, is_command, ctxt), in) match {
- case Scan.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
- case Scan.Parsers.NoSuccess(_, rest) =>
+ Token.Parsers.parse(Token.Parsers.token_context(lexicon, is_command, ctxt), in) match {
+ case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
+ case Token.Parsers.NoSuccess(_, rest) =>
error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
}
}
--- a/src/Pure/Isar/token.scala Fri Feb 14 16:11:14 2014 +0100
+++ b/src/Pure/Isar/token.scala Fri Feb 14 16:25:30 2014 +0100
@@ -34,6 +34,91 @@
}
+ /* parsers */
+
+ object Parsers extends Parsers
+
+ trait Parsers extends Scan.Parsers
+ {
+ private def delimited_token: Parser[Token] =
+ {
+ val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
+ val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
+ val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
+ val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
+ val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
+
+ string | (alt_string | (verb | (cart | cmt)))
+ }
+
+ private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean)
+ : Parser[Token] =
+ {
+ val letdigs1 = many1(Symbol.is_letdig)
+ val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
+ val id =
+ one(Symbol.is_letter) ~
+ (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^
+ { case x ~ y => x + y }
+
+ val nat = many1(Symbol.is_digit)
+ val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
+ val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
+
+ val ident = id ~ rep("." ~> id) ^^
+ { case x ~ Nil => Token(Token.Kind.IDENT, x)
+ case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
+
+ val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
+ val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
+ val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
+ val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
+ val float =
+ ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
+
+ val sym_ident =
+ (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
+ (x => Token(Token.Kind.SYM_IDENT, x))
+
+ val command_keyword =
+ keyword(lexicon) ^^
+ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
+
+ val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
+
+ val recover_delimited =
+ (recover_quoted("\"") |
+ (recover_quoted("`") |
+ (recover_verbatim |
+ (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
+
+ val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
+
+ space | (recover_delimited |
+ (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
+ command_keyword) | bad))
+ }
+
+ def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] =
+ delimited_token | other_token(lexicon, is_command)
+
+ def token_context(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Context)
+ : Parser[(Token, Scan.Context)] =
+ {
+ val string =
+ quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
+ val alt_string =
+ quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
+ val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
+ val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
+ val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
+ val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) }
+
+ string | (alt_string | (verb | (cart | (cmt | other))))
+ }
+ }
+
+
/* token reader */
class Position(val line: Int, val file: String) extends scala.util.parsing.input.Position
--- a/src/Pure/Thy/thy_header.scala Fri Feb 14 16:11:14 2014 +0100
+++ b/src/Pure/Thy/thy_header.scala Fri Feb 14 16:25:30 2014 +0100
@@ -82,13 +82,13 @@
def read(reader: Reader[Char]): Thy_Header =
{
- val token = Scan.Parsers.token(lexicon, _ => false)
+ val token = Token.Parsers.token(lexicon, _ => false)
val toks = new mutable.ListBuffer[Token]
@tailrec def scan_to_begin(in: Reader[Char])
{
token(in) match {
- case Scan.Parsers.Success(tok, rest) =>
+ case Token.Parsers.Success(tok, rest) =>
toks += tok
if (!tok.is_begin) scan_to_begin(rest)
case _ =>