src/Pure/ML/ml_lex.scala
changeset 64824 330ec9bc4b75
parent 63610 4b40b8196dc7
child 67095 91ffe1f8bf5c
--- a/src/Pure/ML/ml_lex.scala	Sat Jan 07 19:36:40 2017 +0100
+++ b/src/Pure/ML/ml_lex.scala	Sat Jan 07 20:01:05 2017 +0100
@@ -8,7 +8,7 @@
 
 
 import scala.collection.mutable
-import scala.util.parsing.input.{Reader, CharSequenceReader}
+import scala.util.parsing.input.Reader
 
 
 object ML_Lex
@@ -265,17 +265,15 @@
   /* tokenize */
 
   def tokenize(input: CharSequence): List[Token] =
-  {
-    Parsers.parseAll(Parsers.rep(Parsers.token), new CharSequenceReader(input)) match {
+    Parsers.parseAll(Parsers.rep(Parsers.token), Scan.char_reader(input)) match {
       case Parsers.Success(tokens, _) => tokens
       case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
     }
-  }
 
   def tokenize_line(SML: Boolean, input: CharSequence, context: Scan.Line_Context)
     : (List[Token], Scan.Line_Context) =
   {
-    var in: Reader[Char] = new CharSequenceReader(input)
+    var in: Reader[Char] = Scan.char_reader(input)
     val toks = new mutable.ListBuffer[Token]
     var ctxt = context
     while (!in.atEnd) {