# HG changeset patch # User wenzelm # Date 1354909149 -3600 # Node ID 7a78a74139f54fca594217c24ec7a5da4a1a4a69 # Parent 2b6bd4771fd7c1b0a2fe36488bc42944c4726160 adhoc recovery from spurious NPEs, similar quantum-effect behind 7c8ce63a3c00; diff -r 2b6bd4771fd7 -r 7a78a74139f5 src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Fri Dec 07 18:20:33 2012 +0100 +++ b/src/Pure/Isar/outer_syntax.scala Fri Dec 07 20:39:09 2012 +0100 @@ -112,32 +112,34 @@ /* tokenize */ def scan(input: Reader[Char]): List[Token] = - { - import lexicon._ + Exn.recover // FIXME !? + { + import lexicon._ - parseAll(rep(token(is_command)), input) match { - case Success(tokens, _) => tokens - case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) + parseAll(rep(token(is_command)), input) match { + case Success(tokens, _) => tokens + case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) + } } - } def scan(input: CharSequence): List[Token] = scan(new CharSequenceReader(input)) def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) = - { - import lexicon._ + Exn.recover // FIXME !? + { + import lexicon._ - var in: Reader[Char] = new CharSequenceReader(input) - val toks = new mutable.ListBuffer[Token] - var ctxt = context - while (!in.atEnd) { - parse(token_context(is_command, ctxt), in) match { - case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } - case NoSuccess(_, rest) => - error("Unexpected failure of tokenizing input:\n" + rest.source.toString) + var in: Reader[Char] = new CharSequenceReader(input) + val toks = new mutable.ListBuffer[Token] + var ctxt = context + while (!in.atEnd) { + parse(token_context(is_command, ctxt), in) match { + case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } + case NoSuccess(_, rest) => + error("Unexpected failure of tokenizing input:\n" + rest.source.toString) + } } + (toks.toList, ctxt) } - (toks.toList, ctxt) - } }