--- a/src/Pure/ML/ml_lex.scala Sat Feb 15 21:11:29 2014 +0100
+++ b/src/Pure/ML/ml_lex.scala Sun Feb 16 13:18:08 2014 +0100
@@ -64,7 +64,7 @@
/** parsers **/
- case object ML_String extends Scan.Context
+ case object ML_String extends Scan.Line_Context
private object Parsers extends Scan.Parsers
{
@@ -107,9 +107,9 @@
private val ml_string: Parser[Token] =
"\"" ~ ml_string_body ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.STRING, x + y + z) }
- private def ml_string_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+ private def ml_string_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] =
{
- def result(x: String, c: Scan.Context) = (Token(Kind.STRING, x), c)
+ def result(x: String, c: Scan.Line_Context) = (Token(Kind.STRING, x), c)
ctxt match {
case Scan.Finished =>
@@ -130,8 +130,8 @@
private val ml_comment: Parser[Token] =
comment ^^ (x => Token(Kind.COMMENT, x))
- private def ml_comment_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
- comment_context(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) }
+ private def ml_comment_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] =
+ comment_line(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) }
/* delimited token */
@@ -203,11 +203,11 @@
def token: Parser[Token] = delimited_token | other_token
- def token_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+ def token_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] =
{
val other = (ml_char | other_token) ^^ (x => (x, Scan.Finished))
- ml_string_context(ctxt) | (ml_comment_context(ctxt) | other)
+ ml_string_line(ctxt) | (ml_comment_line(ctxt) | other)
}
}
@@ -222,13 +222,14 @@
}
}
- def tokenize_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
+ def tokenize_line(input: CharSequence, context: Scan.Line_Context)
+ : (List[Token], Scan.Line_Context) =
{
var in: Reader[Char] = new CharSequenceReader(input)
val toks = new mutable.ListBuffer[Token]
var ctxt = context
while (!in.atEnd) {
- Parsers.parse(Parsers.token_context(ctxt), in) match {
+ Parsers.parse(Parsers.token_line(ctxt), in) match {
case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
case Parsers.NoSuccess(_, rest) =>
error("Unexpected failure of tokenizing input:\n" + rest.source.toString)