discontinued odd workaround for scala-2.9.2, which is hopefully obsolete in scala-2.10.x;
--- a/src/Pure/General/exn.scala Sat May 18 12:41:31 2013 +0200
+++ b/src/Pure/General/exn.scala Sat May 18 13:00:05 2013 +0200
@@ -47,23 +47,5 @@
user_message(exn) getOrElse
(if (exn.isInstanceOf[InterruptedException]) "Interrupt"
else exn.toString)
-
-
- /* recover from spurious crash */
-
- def recover[A](e: => A): A =
- {
- capture(e) match {
- case Res(x) => x
- case Exn(exn) =>
- capture(e) match {
- case Res(x) =>
- java.lang.System.err.println("Recovered from spurious crash after retry!")
- exn.printStackTrace()
- x
- case Exn(_) => throw exn
- }
- }
- }
}
--- a/src/Pure/Isar/outer_syntax.scala Sat May 18 12:41:31 2013 +0200
+++ b/src/Pure/Isar/outer_syntax.scala Sat May 18 13:00:05 2013 +0200
@@ -112,34 +112,32 @@
/* tokenize */
def scan(input: Reader[Char]): List[Token] =
- Exn.recover // FIXME !?
- {
- import lexicon._
+ {
+ import lexicon._
- parseAll(rep(token(is_command)), input) match {
- case Success(tokens, _) => tokens
- case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
- }
+ parseAll(rep(token(is_command)), input) match {
+ case Success(tokens, _) => tokens
+ case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
}
+ }
def scan(input: CharSequence): List[Token] =
scan(new CharSequenceReader(input))
def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
- Exn.recover // FIXME !?
- {
- import lexicon._
+ {
+ import lexicon._
- var in: Reader[Char] = new CharSequenceReader(input)
- val toks = new mutable.ListBuffer[Token]
- var ctxt = context
- while (!in.atEnd) {
- parse(token_context(is_command, ctxt), in) match {
- case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
- case NoSuccess(_, rest) =>
- error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
- }
+ var in: Reader[Char] = new CharSequenceReader(input)
+ val toks = new mutable.ListBuffer[Token]
+ var ctxt = context
+ while (!in.atEnd) {
+ parse(token_context(is_command, ctxt), in) match {
+ case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
+ case NoSuccess(_, rest) =>
+ error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
}
- (toks.toList, ctxt)
}
+ (toks.toList, ctxt)
+ }
}