| author | wenzelm |
| Sun, 18 Jan 2015 23:07:00 +0100 | |
| changeset 59404 | 5d08b2332b76 |
| parent 59112 | e670969f34df |
| child 60215 | 5fb4990dfc73 |
| permissions | -rw-r--r-- |
| 55497 | 1 |
/* Title: Pure/ML/ml_lex.scala |
2 |
Author: Makarius |
|
3 |
||
| 59109 | 4 |
Lexical syntax for Isabelle/ML and Standard ML. |
| 55497 | 5 |
*/ |
6 |
||
7 |
package isabelle |
|
8 |
||
| 55499 | 9 |
|
10 |
import scala.collection.mutable |
|
| 55497 | 11 |
import scala.util.parsing.input.{Reader, CharSequenceReader}
|
12 |
||
13 |
||
14 |
object ML_Lex |
|
15 |
{
|
|
| 55505 | 16 |
/** keywords **/ |
17 |
||
18 |
val keywords: Set[String] = |
|
19 |
Set("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", "=>",
|
|
20 |
"[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as",
|
|
21 |
"case", "datatype", "do", "else", "end", "eqtype", "exception", |
|
22 |
"fn", "fun", "functor", "handle", "if", "in", "include", |
|
23 |
"infix", "infixr", "let", "local", "nonfix", "of", "op", "open", |
|
24 |
"orelse", "raise", "rec", "sharing", "sig", "signature", |
|
25 |
"struct", "structure", "then", "type", "val", "where", "while", |
|
26 |
"with", "withtype") |
|
27 |
||
28 |
val keywords2: Set[String] = |
|
| 58933 | 29 |
Set("and", "case", "do", "else", "end", "if", "in", "let", "local",
|
30 |
"of", "sig", "struct", "then", "while", "with") |
|
| 55505 | 31 |
|
32 |
val keywords3: Set[String] = |
|
33 |
Set("handle", "open", "raise")
|
|
34 |
||
35 |
private val lexicon: Scan.Lexicon = Scan.Lexicon(keywords.toList: _*) |
|
36 |
||
37 |
||
38 |
||
| 55497 | 39 |
/** tokens **/ |
40 |
||
41 |
object Kind extends Enumeration |
|
42 |
{
|
|
43 |
val KEYWORD = Value("keyword")
|
|
44 |
val IDENT = Value("identifier")
|
|
45 |
val LONG_IDENT = Value("long identifier")
|
|
46 |
val TYPE_VAR = Value("type variable")
|
|
47 |
val WORD = Value("word")
|
|
48 |
val INT = Value("integer")
|
|
49 |
val REAL = Value("real")
|
|
50 |
val CHAR = Value("character")
|
|
51 |
val STRING = Value("quoted string")
|
|
52 |
val SPACE = Value("white space")
|
|
| 59112 | 53 |
val CARTOUCHE = Value("text cartouche")
|
| 55497 | 54 |
val COMMENT = Value("comment text")
|
| 55512 | 55 |
val ANTIQ = Value("antiquotation")
|
56 |
val ANTIQ_START = Value("antiquotation: start")
|
|
57 |
val ANTIQ_STOP = Value("antiquotation: stop")
|
|
58 |
val ANTIQ_OTHER = Value("antiquotation: other")
|
|
59 |
val ANTIQ_STRING = Value("antiquotation: quoted string")
|
|
60 |
val ANTIQ_ALT_STRING = Value("antiquotation: back-quoted string")
|
|
61 |
val ANTIQ_CARTOUCHE = Value("antiquotation: text cartouche")
|
|
| 55497 | 62 |
val ERROR = Value("bad input")
|
63 |
} |
|
64 |
||
65 |
sealed case class Token(val kind: Kind.Value, val source: String) |
|
| 55500 | 66 |
{
|
| 55501 | 67 |
def is_keyword: Boolean = kind == Kind.KEYWORD |
| 55505 | 68 |
def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source) |
| 55500 | 69 |
} |
| 55497 | 70 |
|
71 |
||
72 |
||
73 |
/** parsers **/ |
|
74 |
||
|
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
75 |
case object ML_String extends Scan.Line_Context |
| 55512 | 76 |
case class Antiq(ctxt: Scan.Line_Context) extends Scan.Line_Context |
| 55499 | 77 |
|
| 55512 | 78 |
private object Parsers extends Scan.Parsers with Antiquote.Parsers |
| 55497 | 79 |
{
|
80 |
/* string material */ |
|
81 |
||
| 55500 | 82 |
private val blanks = many(character(Symbol.is_ascii_blank)) |
| 55497 | 83 |
private val blanks1 = many1(character(Symbol.is_ascii_blank)) |
84 |
||
| 55499 | 85 |
private val gap = "\\" ~ blanks1 ~ "\\" ^^ { case x ~ y ~ z => x + y + z }
|
| 55500 | 86 |
private val gap_start = "\\" ~ blanks ~ """\z""".r ^^ { case x ~ y ~ _ => x + y }
|
| 55499 | 87 |
|
| 55497 | 88 |
private val escape = |
89 |
one(character("\"\\abtnvfr".contains(_))) |
|
|
90 |
"^" ~ one(character(c => '@' <= c && c <= '_')) ^^ { case x ~ y => x + y } |
|
|
91 |
repeated(character(Symbol.is_ascii_digit), 3, 3) |
|
92 |
||
93 |
private val str = |
|
94 |
one(character(c => c != '"' && c != '\\' && ' ' <= c && c <= '~')) | |
|
|
59108
50ccc027e8a7
clarified Isabelle/ML strings (refining 72238ea2201c);
wenzelm
parents:
58933
diff
changeset
|
95 |
one(s => Symbol.is_symbolic(s) | Symbol.is_control(s)) | |
| 55497 | 96 |
"\\" ~ escape ^^ { case x ~ y => x + y }
|
97 |
||
| 55499 | 98 |
|
99 |
/* ML char -- without gaps */ |
|
100 |
||
101 |
private val ml_char: Parser[Token] = |
|
102 |
"#\"" ~ str ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.CHAR, x + y + z) }
|
|
103 |
||
104 |
private val recover_ml_char: Parser[String] = |
|
105 |
"#\"" ~ opt(str) ^^ { case x ~ Some(y) => x + y case x ~ None => x }
|
|
106 |
||
107 |
||
108 |
/* ML string */ |
|
109 |
||
110 |
private val ml_string_body: Parser[String] = |
|
111 |
rep(gap | str) ^^ (_.mkString) |
|
112 |
||
113 |
private val recover_ml_string: Parser[String] = |
|
114 |
"\"" ~ ml_string_body ^^ { case x ~ y => x + y }
|
|
115 |
||
116 |
private val ml_string: Parser[Token] = |
|
117 |
"\"" ~ ml_string_body ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.STRING, x + y + z) }
|
|
118 |
||
|
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
119 |
private def ml_string_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
| 55499 | 120 |
{
|
|
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
121 |
def result(x: String, c: Scan.Line_Context) = (Token(Kind.STRING, x), c) |
| 55499 | 122 |
|
123 |
ctxt match {
|
|
124 |
case Scan.Finished => |
|
125 |
"\"" ~ ml_string_body ~ ("\"" | gap_start) ^^
|
|
126 |
{ case x ~ y ~ z => result(x + y + z, if (z == "\"") Scan.Finished else ML_String) }
|
|
127 |
case ML_String => |
|
| 55500 | 128 |
blanks ~ opt_term("\\" ~ ml_string_body ~ ("\"" | gap_start)) ^^
|
| 55499 | 129 |
{ case x ~ Some(y ~ z ~ w) =>
|
130 |
result(x + y + z + w, if (w == "\"") Scan.Finished else ML_String) |
|
131 |
case x ~ None => result(x, ML_String) } |
|
132 |
case _ => failure("")
|
|
133 |
} |
|
134 |
} |
|
135 |
||
136 |
||
| 59112 | 137 |
/* ML cartouche */ |
138 |
||
139 |
private val ml_cartouche: Parser[Token] = |
|
140 |
cartouche ^^ (x => Token(Kind.CARTOUCHE, x)) |
|
141 |
||
142 |
private def ml_cartouche_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
143 |
cartouche_line(ctxt) ^^ { case (x, c) => (Token(Kind.CARTOUCHE, x), c) }
|
|
144 |
||
145 |
||
| 55499 | 146 |
/* ML comment */ |
147 |
||
148 |
private val ml_comment: Parser[Token] = |
|
149 |
comment ^^ (x => Token(Kind.COMMENT, x)) |
|
150 |
||
|
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
151 |
private def ml_comment_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
152 |
comment_line(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) }
|
| 55497 | 153 |
|
154 |
||
155 |
/* delimited token */ |
|
156 |
||
157 |
private def delimited_token: Parser[Token] = |
|
| 59112 | 158 |
ml_char | (ml_string | (ml_cartouche | ml_comment)) |
| 55497 | 159 |
|
| 55499 | 160 |
private val recover_delimited: Parser[Token] = |
| 59112 | 161 |
(recover_ml_char | (recover_ml_string | (recover_cartouche | recover_comment))) ^^ |
162 |
(x => Token(Kind.ERROR, x)) |
|
| 55497 | 163 |
|
164 |
||
165 |
private def other_token: Parser[Token] = |
|
166 |
{
|
|
167 |
/* identifiers */ |
|
168 |
||
169 |
val letdigs = many(character(Symbol.is_ascii_letdig)) |
|
170 |
||
171 |
val alphanumeric = |
|
172 |
one(character(Symbol.is_ascii_letter)) ~ letdigs ^^ { case x ~ y => x + y }
|
|
173 |
||
174 |
val symbolic = many1(character("!#$%&*+-/:<=>?@\\^`|~".contains(_)))
|
|
175 |
||
176 |
val ident = (alphanumeric | symbolic) ^^ (x => Token(Kind.IDENT, x)) |
|
177 |
||
178 |
val long_ident = |
|
179 |
rep1(alphanumeric ~ "." ^^ { case x ~ y => x + y }) ~
|
|
180 |
(alphanumeric | (symbolic | "=")) ^^ |
|
181 |
{ case x ~ y => Token(Kind.LONG_IDENT, x.mkString + y) }
|
|
182 |
||
183 |
val type_var = "'" ~ letdigs ^^ { case x ~ y => Token(Kind.TYPE_VAR, x + y) }
|
|
184 |
||
185 |
||
186 |
/* numerals */ |
|
187 |
||
188 |
val dec = many1(character(Symbol.is_ascii_digit)) |
|
189 |
val hex = many1(character(Symbol.is_ascii_hex)) |
|
190 |
val sign = opt("~") ^^ { case Some(x) => x case None => "" }
|
|
191 |
val decint = sign ~ dec ^^ { case x ~ y => x + y }
|
|
192 |
val exp = ("E" | "e") ~ decint ^^ { case x ~ y => x + y }
|
|
193 |
||
194 |
val word = |
|
195 |
("0wx" ~ hex ^^ { case x ~ y => x + y } | "0w" ~ dec ^^ { case x ~ y => x + y }) ^^
|
|
196 |
(x => Token(Kind.WORD, x)) |
|
197 |
||
198 |
val int = |
|
199 |
sign ~ ("0x" ~ hex ^^ { case x ~ y => x + y } | dec) ^^
|
|
200 |
{ case x ~ y => Token(Kind.INT, x + y) }
|
|
201 |
||
202 |
val real = |
|
203 |
(decint ~ "." ~ dec ~ (opt(exp) ^^ { case Some(x) => x case None => "" }) ^^
|
|
204 |
{ case x ~ y ~ z ~ w => x + y + z + w } |
|
|
205 |
decint ~ exp ^^ { case x ~ y => x + y }) ^^ (x => Token(Kind.REAL, x))
|
|
206 |
||
207 |
||
| 55499 | 208 |
/* main */ |
| 55497 | 209 |
|
210 |
val space = blanks1 ^^ (x => Token(Kind.SPACE, x)) |
|
211 |
||
212 |
val keyword = literal(lexicon) ^^ (x => Token(Kind.KEYWORD, x)) |
|
213 |
||
| 55512 | 214 |
val ml_antiq = antiq ^^ (x => Token(Kind.ANTIQ, x)) |
215 |
||
| 55497 | 216 |
val bad = one(_ => true) ^^ (x => Token(Kind.ERROR, x)) |
217 |
||
| 55512 | 218 |
space | (recover_delimited | (ml_antiq | |
219 |
(((word | (real | (int | (long_ident | (ident | type_var))))) ||| keyword) | bad))) |
|
| 55497 | 220 |
} |
221 |
||
| 55499 | 222 |
|
| 55512 | 223 |
/* antiquotations (line-oriented) */ |
224 |
||
225 |
def ml_antiq_start(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
226 |
ctxt match {
|
|
227 |
case Scan.Finished => "@{" ^^ (x => (Token(Kind.ANTIQ_START, x), Antiq(Scan.Finished)))
|
|
228 |
case _ => failure("")
|
|
229 |
} |
|
230 |
||
231 |
def ml_antiq_stop(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
232 |
ctxt match {
|
|
233 |
case Antiq(Scan.Finished) => "}" ^^ (x => (Token(Kind.ANTIQ_STOP, x), Scan.Finished)) |
|
234 |
case _ => failure("")
|
|
235 |
} |
|
236 |
||
237 |
def ml_antiq_body(context: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
238 |
context match {
|
|
239 |
case Antiq(ctxt) => |
|
240 |
(if (ctxt == Scan.Finished) antiq_other ^^ (x => (Token(Kind.ANTIQ_OTHER, x), context)) |
|
241 |
else failure("")) |
|
|
242 |
quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_STRING, x), Antiq(c)) } |
|
|
243 |
quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_ALT_STRING, x), Antiq(c)) } |
|
|
244 |
cartouche_line(ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_CARTOUCHE, x), Antiq(c)) }
|
|
245 |
case _ => failure("")
|
|
246 |
} |
|
247 |
||
248 |
||
| 55499 | 249 |
/* token */ |
250 |
||
| 55497 | 251 |
def token: Parser[Token] = delimited_token | other_token |
| 55499 | 252 |
|
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
253 |
def token_line(SML: Boolean, ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
| 55499 | 254 |
{
|
255 |
val other = (ml_char | other_token) ^^ (x => (x, Scan.Finished)) |
|
256 |
||
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
257 |
if (SML) ml_string_line(ctxt) | (ml_comment_line(ctxt) | other) |
|
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
258 |
else |
|
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
259 |
ml_string_line(ctxt) | |
| 59112 | 260 |
(ml_cartouche_line(ctxt) | |
261 |
(ml_comment_line(ctxt) | |
|
262 |
(ml_antiq_start(ctxt) | (ml_antiq_stop(ctxt) | (ml_antiq_body(ctxt) | other))))) |
|
| 55499 | 263 |
} |
| 55497 | 264 |
} |
265 |
||
| 55499 | 266 |
|
267 |
/* tokenize */ |
|
268 |
||
| 55497 | 269 |
def tokenize(input: CharSequence): List[Token] = |
270 |
{
|
|
271 |
Parsers.parseAll(Parsers.rep(Parsers.token), new CharSequenceReader(input)) match {
|
|
272 |
case Parsers.Success(tokens, _) => tokens |
|
273 |
case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
|
|
274 |
} |
|
275 |
} |
|
| 55499 | 276 |
|
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
277 |
def tokenize_line(SML: Boolean, input: CharSequence, context: Scan.Line_Context) |
|
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
278 |
: (List[Token], Scan.Line_Context) = |
| 55499 | 279 |
{
|
280 |
var in: Reader[Char] = new CharSequenceReader(input) |
|
281 |
val toks = new mutable.ListBuffer[Token] |
|
282 |
var ctxt = context |
|
283 |
while (!in.atEnd) {
|
|
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
284 |
Parsers.parse(Parsers.token_line(SML, ctxt), in) match {
|
| 55499 | 285 |
case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
|
286 |
case Parsers.NoSuccess(_, rest) => |
|
287 |
error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
|
|
288 |
} |
|
289 |
} |
|
290 |
(toks.toList, ctxt) |
|
291 |
} |
|
| 55497 | 292 |
} |
293 |