author | wenzelm |
Thu, 03 Mar 2016 21:59:21 +0100 | |
changeset 62508 | d0b68218ea55 |
parent 61596 | 8323b8e21fe9 |
child 63204 | 921a5be54132 |
permissions | -rw-r--r-- |
55497 | 1 |
/* Title: Pure/ML/ml_lex.scala |
2 |
Author: Makarius |
|
3 |
||
59109 | 4 |
Lexical syntax for Isabelle/ML and Standard ML. |
55497 | 5 |
*/ |
6 |
||
7 |
package isabelle |
|
8 |
||
55499 | 9 |
|
10 |
import scala.collection.mutable |
|
55497 | 11 |
import scala.util.parsing.input.{Reader, CharSequenceReader} |
12 |
||
13 |
||
14 |
object ML_Lex |
|
15 |
{ |
|
55505 | 16 |
/** keywords **/ |
17 |
||
18 |
val keywords: Set[String] = |
|
19 |
Set("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", "=>", |
|
20 |
"[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as", |
|
21 |
"case", "datatype", "do", "else", "end", "eqtype", "exception", |
|
22 |
"fn", "fun", "functor", "handle", "if", "in", "include", |
|
23 |
"infix", "infixr", "let", "local", "nonfix", "of", "op", "open", |
|
24 |
"orelse", "raise", "rec", "sharing", "sig", "signature", |
|
25 |
"struct", "structure", "then", "type", "val", "where", "while", |
|
26 |
"with", "withtype") |
|
27 |
||
28 |
val keywords2: Set[String] = |
|
58933 | 29 |
Set("and", "case", "do", "else", "end", "if", "in", "let", "local", |
30 |
"of", "sig", "struct", "then", "while", "with") |
|
55505 | 31 |
|
32 |
val keywords3: Set[String] = |
|
33 |
Set("handle", "open", "raise") |
|
34 |
||
35 |
private val lexicon: Scan.Lexicon = Scan.Lexicon(keywords.toList: _*) |
|
36 |
||
37 |
||
38 |
||
55497 | 39 |
/** tokens **/ |
40 |
||
41 |
object Kind extends Enumeration |
|
42 |
{ |
|
43 |
val KEYWORD = Value("keyword") |
|
44 |
val IDENT = Value("identifier") |
|
45 |
val LONG_IDENT = Value("long identifier") |
|
46 |
val TYPE_VAR = Value("type variable") |
|
47 |
val WORD = Value("word") |
|
48 |
val INT = Value("integer") |
|
49 |
val REAL = Value("real") |
|
50 |
val CHAR = Value("character") |
|
51 |
val STRING = Value("quoted string") |
|
52 |
val SPACE = Value("white space") |
|
53 |
val COMMENT = Value("comment text") |
|
61471 | 54 |
val CONTROL = Value("control symbol antiquotation") |
55512 | 55 |
val ANTIQ = Value("antiquotation") |
56 |
val ANTIQ_START = Value("antiquotation: start") |
|
57 |
val ANTIQ_STOP = Value("antiquotation: stop") |
|
58 |
val ANTIQ_OTHER = Value("antiquotation: other") |
|
59 |
val ANTIQ_STRING = Value("antiquotation: quoted string") |
|
60 |
val ANTIQ_ALT_STRING = Value("antiquotation: back-quoted string") |
|
61 |
val ANTIQ_CARTOUCHE = Value("antiquotation: text cartouche") |
|
55497 | 62 |
val ERROR = Value("bad input") |
63 |
} |
|
64 |
||
60215 | 65 |
sealed case class Token(kind: Kind.Value, source: String) |
55500 | 66 |
{ |
55501 | 67 |
def is_keyword: Boolean = kind == Kind.KEYWORD |
55505 | 68 |
def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source) |
55500 | 69 |
} |
55497 | 70 |
|
71 |
||
72 |
||
73 |
/** parsers **/ |
|
74 |
||
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
75 |
case object ML_String extends Scan.Line_Context |
55512 | 76 |
case class Antiq(ctxt: Scan.Line_Context) extends Scan.Line_Context |
55499 | 77 |
|
55512 | 78 |
private object Parsers extends Scan.Parsers with Antiquote.Parsers |
55497 | 79 |
{ |
80 |
/* string material */ |
|
81 |
||
55500 | 82 |
private val blanks = many(character(Symbol.is_ascii_blank)) |
55497 | 83 |
private val blanks1 = many1(character(Symbol.is_ascii_blank)) |
84 |
||
55499 | 85 |
private val gap = "\\" ~ blanks1 ~ "\\" ^^ { case x ~ y ~ z => x + y + z } |
55500 | 86 |
private val gap_start = "\\" ~ blanks ~ """\z""".r ^^ { case x ~ y ~ _ => x + y } |
55499 | 87 |
|
55497 | 88 |
private val escape = |
89 |
one(character("\"\\abtnvfr".contains(_))) | |
|
90 |
"^" ~ one(character(c => '@' <= c && c <= '_')) ^^ { case x ~ y => x + y } | |
|
91 |
repeated(character(Symbol.is_ascii_digit), 3, 3) |
|
92 |
||
93 |
private val str = |
|
94 |
one(character(c => c != '"' && c != '\\' && ' ' <= c && c <= '~')) | |
|
59108
50ccc027e8a7
clarified Isabelle/ML strings (refining 72238ea2201c);
wenzelm
parents:
58933
diff
changeset
|
95 |
one(s => Symbol.is_symbolic(s) | Symbol.is_control(s)) | |
55497 | 96 |
"\\" ~ escape ^^ { case x ~ y => x + y } |
97 |
||
55499 | 98 |
|
99 |
/* ML char -- without gaps */ |
|
100 |
||
101 |
private val ml_char: Parser[Token] = |
|
102 |
"#\"" ~ str ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.CHAR, x + y + z) } |
|
103 |
||
104 |
private val recover_ml_char: Parser[String] = |
|
105 |
"#\"" ~ opt(str) ^^ { case x ~ Some(y) => x + y case x ~ None => x } |
|
106 |
||
107 |
||
108 |
/* ML string */ |
|
109 |
||
110 |
private val ml_string_body: Parser[String] = |
|
111 |
rep(gap | str) ^^ (_.mkString) |
|
112 |
||
113 |
private val recover_ml_string: Parser[String] = |
|
114 |
"\"" ~ ml_string_body ^^ { case x ~ y => x + y } |
|
115 |
||
116 |
private val ml_string: Parser[Token] = |
|
117 |
"\"" ~ ml_string_body ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.STRING, x + y + z) } |
|
118 |
||
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
119 |
private def ml_string_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
55499 | 120 |
{ |
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
121 |
def result(x: String, c: Scan.Line_Context) = (Token(Kind.STRING, x), c) |
55499 | 122 |
|
123 |
ctxt match { |
|
124 |
case Scan.Finished => |
|
125 |
"\"" ~ ml_string_body ~ ("\"" | gap_start) ^^ |
|
126 |
{ case x ~ y ~ z => result(x + y + z, if (z == "\"") Scan.Finished else ML_String) } |
|
127 |
case ML_String => |
|
55500 | 128 |
blanks ~ opt_term("\\" ~ ml_string_body ~ ("\"" | gap_start)) ^^ |
55499 | 129 |
{ case x ~ Some(y ~ z ~ w) => |
130 |
result(x + y + z + w, if (w == "\"") Scan.Finished else ML_String) |
|
131 |
case x ~ None => result(x, ML_String) } |
|
132 |
case _ => failure("") |
|
133 |
} |
|
134 |
} |
|
135 |
||
136 |
||
137 |
/* ML comment */ |
|
138 |
||
139 |
private val ml_comment: Parser[Token] = |
|
140 |
comment ^^ (x => Token(Kind.COMMENT, x)) |
|
141 |
||
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
142 |
private def ml_comment_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
143 |
comment_line(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) } |
55497 | 144 |
|
145 |
||
146 |
/* delimited token */ |
|
147 |
||
148 |
private def delimited_token: Parser[Token] = |
|
61596 | 149 |
ml_char | (ml_string | ml_comment) |
55497 | 150 |
|
55499 | 151 |
private val recover_delimited: Parser[Token] = |
59112 | 152 |
(recover_ml_char | (recover_ml_string | (recover_cartouche | recover_comment))) ^^ |
153 |
(x => Token(Kind.ERROR, x)) |
|
55497 | 154 |
|
155 |
||
156 |
private def other_token: Parser[Token] = |
|
157 |
{ |
|
158 |
/* identifiers */ |
|
159 |
||
160 |
val letdigs = many(character(Symbol.is_ascii_letdig)) |
|
161 |
||
162 |
val alphanumeric = |
|
163 |
one(character(Symbol.is_ascii_letter)) ~ letdigs ^^ { case x ~ y => x + y } |
|
164 |
||
165 |
val symbolic = many1(character("!#$%&*+-/:<=>?@\\^`|~".contains(_))) |
|
166 |
||
167 |
val ident = (alphanumeric | symbolic) ^^ (x => Token(Kind.IDENT, x)) |
|
168 |
||
169 |
val long_ident = |
|
170 |
rep1(alphanumeric ~ "." ^^ { case x ~ y => x + y }) ~ |
|
171 |
(alphanumeric | (symbolic | "=")) ^^ |
|
172 |
{ case x ~ y => Token(Kind.LONG_IDENT, x.mkString + y) } |
|
173 |
||
174 |
val type_var = "'" ~ letdigs ^^ { case x ~ y => Token(Kind.TYPE_VAR, x + y) } |
|
175 |
||
176 |
||
177 |
/* numerals */ |
|
178 |
||
179 |
val dec = many1(character(Symbol.is_ascii_digit)) |
|
180 |
val hex = many1(character(Symbol.is_ascii_hex)) |
|
181 |
val sign = opt("~") ^^ { case Some(x) => x case None => "" } |
|
182 |
val decint = sign ~ dec ^^ { case x ~ y => x + y } |
|
183 |
val exp = ("E" | "e") ~ decint ^^ { case x ~ y => x + y } |
|
184 |
||
185 |
val word = |
|
186 |
("0wx" ~ hex ^^ { case x ~ y => x + y } | "0w" ~ dec ^^ { case x ~ y => x + y }) ^^ |
|
187 |
(x => Token(Kind.WORD, x)) |
|
188 |
||
189 |
val int = |
|
190 |
sign ~ ("0x" ~ hex ^^ { case x ~ y => x + y } | dec) ^^ |
|
191 |
{ case x ~ y => Token(Kind.INT, x + y) } |
|
192 |
||
193 |
val real = |
|
194 |
(decint ~ "." ~ dec ~ (opt(exp) ^^ { case Some(x) => x case None => "" }) ^^ |
|
195 |
{ case x ~ y ~ z ~ w => x + y + z + w } | |
|
196 |
decint ~ exp ^^ { case x ~ y => x + y }) ^^ (x => Token(Kind.REAL, x)) |
|
197 |
||
198 |
||
55499 | 199 |
/* main */ |
55497 | 200 |
|
201 |
val space = blanks1 ^^ (x => Token(Kind.SPACE, x)) |
|
202 |
||
203 |
val keyword = literal(lexicon) ^^ (x => Token(Kind.KEYWORD, x)) |
|
204 |
||
61471 | 205 |
val ml_control = control ^^ (x => Token(Kind.CONTROL, x)) |
55512 | 206 |
val ml_antiq = antiq ^^ (x => Token(Kind.ANTIQ, x)) |
207 |
||
55497 | 208 |
val bad = one(_ => true) ^^ (x => Token(Kind.ERROR, x)) |
209 |
||
61596 | 210 |
space | (ml_control | (recover_delimited | (ml_antiq | |
61471 | 211 |
(((word | (real | (int | (long_ident | (ident | type_var))))) ||| keyword) | bad)))) |
55497 | 212 |
} |
213 |
||
55499 | 214 |
|
55512 | 215 |
/* antiquotations (line-oriented) */ |
216 |
||
217 |
def ml_antiq_start(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
218 |
ctxt match { |
|
219 |
case Scan.Finished => "@{" ^^ (x => (Token(Kind.ANTIQ_START, x), Antiq(Scan.Finished))) |
|
220 |
case _ => failure("") |
|
221 |
} |
|
222 |
||
223 |
def ml_antiq_stop(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
224 |
ctxt match { |
|
225 |
case Antiq(Scan.Finished) => "}" ^^ (x => (Token(Kind.ANTIQ_STOP, x), Scan.Finished)) |
|
226 |
case _ => failure("") |
|
227 |
} |
|
228 |
||
229 |
def ml_antiq_body(context: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
|
230 |
context match { |
|
231 |
case Antiq(ctxt) => |
|
232 |
(if (ctxt == Scan.Finished) antiq_other ^^ (x => (Token(Kind.ANTIQ_OTHER, x), context)) |
|
233 |
else failure("")) | |
|
234 |
quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_STRING, x), Antiq(c)) } | |
|
235 |
quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_ALT_STRING, x), Antiq(c)) } | |
|
236 |
cartouche_line(ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_CARTOUCHE, x), Antiq(c)) } |
|
237 |
case _ => failure("") |
|
238 |
} |
|
239 |
||
240 |
||
55499 | 241 |
/* token */ |
242 |
||
55497 | 243 |
def token: Parser[Token] = delimited_token | other_token |
55499 | 244 |
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
245 |
def token_line(SML: Boolean, ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = |
55499 | 246 |
{ |
247 |
val other = (ml_char | other_token) ^^ (x => (x, Scan.Finished)) |
|
248 |
||
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
249 |
if (SML) ml_string_line(ctxt) | (ml_comment_line(ctxt) | other) |
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
250 |
else |
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
251 |
ml_string_line(ctxt) | |
61596 | 252 |
(ml_comment_line(ctxt) | |
253 |
(ml_antiq_start(ctxt) | (ml_antiq_stop(ctxt) | (ml_antiq_body(ctxt) | other)))) |
|
55499 | 254 |
} |
55497 | 255 |
} |
256 |
||
55499 | 257 |
|
258 |
/* tokenize */ |
|
259 |
||
55497 | 260 |
def tokenize(input: CharSequence): List[Token] = |
261 |
{ |
|
262 |
Parsers.parseAll(Parsers.rep(Parsers.token), new CharSequenceReader(input)) match { |
|
263 |
case Parsers.Success(tokens, _) => tokens |
|
264 |
case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) |
|
265 |
} |
|
266 |
} |
|
55499 | 267 |
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
268 |
def tokenize_line(SML: Boolean, input: CharSequence, context: Scan.Line_Context) |
55510
1585a65aad64
tuned signature -- emphasize line-oriented aspect;
wenzelm
parents:
55505
diff
changeset
|
269 |
: (List[Token], Scan.Line_Context) = |
55499 | 270 |
{ |
271 |
var in: Reader[Char] = new CharSequenceReader(input) |
|
272 |
val toks = new mutable.ListBuffer[Token] |
|
273 |
var ctxt = context |
|
274 |
while (!in.atEnd) { |
|
56278
2576d3a40ed6
separate tokenization and language context for SML: no symbols, no antiquotes;
wenzelm
parents:
55512
diff
changeset
|
275 |
Parsers.parse(Parsers.token_line(SML, ctxt), in) match { |
60215 | 276 |
case Parsers.Success((x, c), rest) => toks += x; ctxt = c; in = rest |
55499 | 277 |
case Parsers.NoSuccess(_, rest) => |
278 |
error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
|
279 |
} |
|
280 |
} |
|
281 |
(toks.toList, ctxt) |
|
282 |
} |
|
55497 | 283 |
} |