src/Pure/Isar/outer_syntax.scala
changeset 52066 83b7b88770c9
parent 50428 7a78a74139f5
child 52439 4cf3f6153eb8
--- a/src/Pure/Isar/outer_syntax.scala	Sat May 18 12:41:31 2013 +0200
+++ b/src/Pure/Isar/outer_syntax.scala	Sat May 18 13:00:05 2013 +0200
@@ -112,34 +112,32 @@
   /* tokenize */
 
   def scan(input: Reader[Char]): List[Token] =
-    Exn.recover  // FIXME !?
-    {
-      import lexicon._
+  {
+    import lexicon._
 
-      parseAll(rep(token(is_command)), input) match {
-        case Success(tokens, _) => tokens
-        case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
-      }
+    parseAll(rep(token(is_command)), input) match {
+      case Success(tokens, _) => tokens
+      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
     }
+  }
 
   def scan(input: CharSequence): List[Token] =
     scan(new CharSequenceReader(input))
 
   def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
-    Exn.recover  // FIXME !?
-    {
-      import lexicon._
+  {
+    import lexicon._
 
-      var in: Reader[Char] = new CharSequenceReader(input)
-      val toks = new mutable.ListBuffer[Token]
-      var ctxt = context
-      while (!in.atEnd) {
-        parse(token_context(is_command, ctxt), in) match {
-          case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
-          case NoSuccess(_, rest) =>
-            error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
-        }
+    var in: Reader[Char] = new CharSequenceReader(input)
+    val toks = new mutable.ListBuffer[Token]
+    var ctxt = context
+    while (!in.atEnd) {
+      parse(token_context(is_command, ctxt), in) match {
+        case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
+        case NoSuccess(_, rest) =>
+          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
       }
-      (toks.toList, ctxt)
     }
+    (toks.toList, ctxt)
+  }
 }