110 |
110 |
111 |
111 |
112 /* tokenize */ |
112 /* tokenize */ |
113 |
113 |
114 def scan(input: Reader[Char]): List[Token] = |
114 def scan(input: Reader[Char]): List[Token] = |
115 Exn.recover // FIXME !? |
115 { |
116 { |
116 import lexicon._ |
117 import lexicon._ |
|
118 |
117 |
119 parseAll(rep(token(is_command)), input) match { |
118 parseAll(rep(token(is_command)), input) match { |
120 case Success(tokens, _) => tokens |
119 case Success(tokens, _) => tokens |
121 case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) |
120 case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) |
122 } |
|
123 } |
121 } |
|
122 } |
124 |
123 |
125 def scan(input: CharSequence): List[Token] = |
124 def scan(input: CharSequence): List[Token] = |
126 scan(new CharSequenceReader(input)) |
125 scan(new CharSequenceReader(input)) |
127 |
126 |
128 def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) = |
127 def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) = |
129 Exn.recover // FIXME !? |
128 { |
130 { |
129 import lexicon._ |
131 import lexicon._ |
|
132 |
130 |
133 var in: Reader[Char] = new CharSequenceReader(input) |
131 var in: Reader[Char] = new CharSequenceReader(input) |
134 val toks = new mutable.ListBuffer[Token] |
132 val toks = new mutable.ListBuffer[Token] |
135 var ctxt = context |
133 var ctxt = context |
136 while (!in.atEnd) { |
134 while (!in.atEnd) { |
137 parse(token_context(is_command, ctxt), in) match { |
135 parse(token_context(is_command, ctxt), in) match { |
138 case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
136 case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
139 case NoSuccess(_, rest) => |
137 case NoSuccess(_, rest) => |
140 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
138 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
141 } |
|
142 } |
139 } |
143 (toks.toList, ctxt) |
|
144 } |
140 } |
|
141 (toks.toList, ctxt) |
|
142 } |
145 } |
143 } |