182 |
181 |
183 Outer_Syntax.Line_Structure(improper1, command1, depth1, span_depth1, after_span_depth1) |
182 Outer_Syntax.Line_Structure(improper1, command1, depth1, span_depth1, after_span_depth1) |
184 } |
183 } |
185 |
184 |
186 |
185 |
187 /* token language */ |
|
188 |
|
189 def scan(input: CharSequence): List[Token] = |
|
190 { |
|
191 val in: Reader[Char] = new CharSequenceReader(input) |
|
192 Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(keywords)), in) match { |
|
193 case Token.Parsers.Success(tokens, _) => tokens |
|
194 case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) |
|
195 } |
|
196 } |
|
197 |
|
198 def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) = |
|
199 { |
|
200 var in: Reader[Char] = new CharSequenceReader(input) |
|
201 val toks = new mutable.ListBuffer[Token] |
|
202 var ctxt = context |
|
203 while (!in.atEnd) { |
|
204 Token.Parsers.parse(Token.Parsers.token_line(keywords, ctxt), in) match { |
|
205 case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
|
206 case Token.Parsers.NoSuccess(_, rest) => |
|
207 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
|
208 } |
|
209 } |
|
210 (toks.toList, ctxt) |
|
211 } |
|
212 |
|
213 |
|
214 /* command spans */ |
186 /* command spans */ |
215 |
187 |
216 def parse_spans(toks: List[Token]): List[Command_Span.Span] = |
188 def parse_spans(toks: List[Token]): List[Command_Span.Span] = |
217 { |
189 { |
218 val result = new mutable.ListBuffer[Command_Span.Span] |
190 val result = new mutable.ListBuffer[Command_Span.Span] |