# HG changeset patch # User wenzelm # Date 1368874805 -7200 # Node ID 83b7b88770c9048ca1e4a18860d5c153be32861e # Parent 78f2475aa1267439f069bb9d8bf4168a2ce6f9b4 discontinued odd workaround for scala-2.9.2, which is hopefully obsolete in scala-2.10.x; diff -r 78f2475aa126 -r 83b7b88770c9 src/Pure/General/exn.scala --- a/src/Pure/General/exn.scala Sat May 18 12:41:31 2013 +0200 +++ b/src/Pure/General/exn.scala Sat May 18 13:00:05 2013 +0200 @@ -47,23 +47,5 @@ user_message(exn) getOrElse (if (exn.isInstanceOf[InterruptedException]) "Interrupt" else exn.toString) - - - /* recover from spurious crash */ - - def recover[A](e: => A): A = - { - capture(e) match { - case Res(x) => x - case Exn(exn) => - capture(e) match { - case Res(x) => - java.lang.System.err.println("Recovered from spurious crash after retry!") - exn.printStackTrace() - x - case Exn(_) => throw exn - } - } - } } diff -r 78f2475aa126 -r 83b7b88770c9 src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Sat May 18 12:41:31 2013 +0200 +++ b/src/Pure/Isar/outer_syntax.scala Sat May 18 13:00:05 2013 +0200 @@ -112,34 +112,32 @@ /* tokenize */ def scan(input: Reader[Char]): List[Token] = - Exn.recover // FIXME !? - { - import lexicon._ + { + import lexicon._ - parseAll(rep(token(is_command)), input) match { - case Success(tokens, _) => tokens - case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) - } + parseAll(rep(token(is_command)), input) match { + case Success(tokens, _) => tokens + case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) } + } def scan(input: CharSequence): List[Token] = scan(new CharSequenceReader(input)) def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) = - Exn.recover // FIXME !? - { - import lexicon._ + { + import lexicon._ - var in: Reader[Char] = new CharSequenceReader(input) - val toks = new mutable.ListBuffer[Token] - var ctxt = context - while (!in.atEnd) { - parse(token_context(is_command, ctxt), in) match { - case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } - case NoSuccess(_, rest) => - error("Unexpected failure of tokenizing input:\n" + rest.source.toString) - } + var in: Reader[Char] = new CharSequenceReader(input) + val toks = new mutable.ListBuffer[Token] + var ctxt = context + while (!in.atEnd) { + parse(token_context(is_command, ctxt), in) match { + case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } + case NoSuccess(_, rest) => + error("Unexpected failure of tokenizing input:\n" + rest.source.toString) } - (toks.toList, ctxt) } + (toks.toList, ctxt) + } }