equal
deleted
inserted
replaced
124 |
124 |
125 /* token language */ |
125 /* token language */ |
126 |
126 |
127 def scan(input: CharSequence): List[Token] = |
127 def scan(input: CharSequence): List[Token] = |
128 { |
128 { |
129 var in: Reader[Char] = new CharSequenceReader(input) |
129 val in: Reader[Char] = new CharSequenceReader(input) |
130 Token.Parsers.parseAll( |
130 Token.Parsers.parseAll( |
131 Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match { |
131 Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match { |
132 case Token.Parsers.Success(tokens, _) => tokens |
132 case Token.Parsers.Success(tokens, _) => tokens |
133 case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) |
133 case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) |
134 } |
134 } |