author | wenzelm |
Fri, 03 Oct 2014 23:33:47 +0200 | |
changeset 58528 | 7d6b8f8893e8 |
parent 58527 | 4b190c763097 |
child 58529 | cd4439d8799c |
permissions | -rw-r--r-- |
58523 | 1 |
/* Title: Pure/Tools/bibtex.scala |
2 |
Author: Makarius |
|
3 |
||
4 |
Some support for bibtex files. |
|
5 |
*/ |
|
6 |
||
7 |
package isabelle |
|
8 |
||
9 |
||
58528 | 10 |
import scala.collection.mutable |
58523 | 11 |
import scala.util.parsing.input.{Reader, CharSequenceReader} |
12 |
import scala.util.parsing.combinator.RegexParsers |
|
13 |
||
14 |
||
15 |
object Bibtex |
|
16 |
{ |
|
17 |
/** content **/ |
|
18 |
||
19 |
val months = List( |
|
20 |
"jan", |
|
21 |
"feb", |
|
22 |
"mar", |
|
23 |
"apr", |
|
24 |
"may", |
|
25 |
"jun", |
|
26 |
"jul", |
|
27 |
"aug", |
|
28 |
"sep", |
|
29 |
"oct", |
|
30 |
"nov", |
|
31 |
"dec") |
|
32 |
||
33 |
val commands = List("preamble", "string") |
|
34 |
||
58524 | 35 |
sealed case class Entry( |
58526 | 36 |
kind: String, |
58523 | 37 |
required: List[String], |
38 |
optional_crossref: List[String], |
|
39 |
optional: List[String]) |
|
58524 | 40 |
{ |
41 |
def fields: List[String] = required ::: optional_crossref ::: optional |
|
42 |
def template: String = |
|
58526 | 43 |
"@" + kind + "{,\n" + fields.map(x => " " + x + " = {},\n").mkString + "}\n" |
58524 | 44 |
} |
58523 | 45 |
|
58524 | 46 |
val entries: List[Entry] = |
47 |
List( |
|
48 |
Entry("Article", |
|
49 |
List("author", "title"), |
|
50 |
List("journal", "year"), |
|
51 |
List("volume", "number", "pages", "month", "note")), |
|
52 |
Entry("InProceedings", |
|
53 |
List("author", "title"), |
|
54 |
List("booktitle", "year"), |
|
55 |
List("editor", "volume", "number", "series", "pages", "month", "address", |
|
56 |
"organization", "publisher", "note")), |
|
57 |
Entry("InCollection", |
|
58 |
List("author", "title", "booktitle"), |
|
59 |
List("publisher", "year"), |
|
60 |
List("editor", "volume", "number", "series", "type", "chapter", "pages", |
|
61 |
"edition", "month", "address", "note")), |
|
62 |
Entry("InBook", |
|
63 |
List("author", "editor", "title", "chapter"), |
|
64 |
List("publisher", "year"), |
|
65 |
List("volume", "number", "series", "type", "address", "edition", "month", "pages", "note")), |
|
66 |
Entry("Proceedings", |
|
67 |
List("title", "year"), |
|
68 |
List(), |
|
69 |
List("booktitle", "editor", "volume", "number", "series", "address", "month", |
|
70 |
"organization", "publisher", "note")), |
|
71 |
Entry("Book", |
|
72 |
List("author", "editor", "title"), |
|
73 |
List("publisher", "year"), |
|
74 |
List("volume", "number", "series", "address", "edition", "month", "note")), |
|
75 |
Entry("Booklet", |
|
76 |
List("title"), |
|
77 |
List(), |
|
78 |
List("author", "howpublished", "address", "month", "year", "note")), |
|
79 |
Entry("PhdThesis", |
|
80 |
List("author", "title", "school", "year"), |
|
81 |
List(), |
|
82 |
List("type", "address", "month", "note")), |
|
83 |
Entry("MastersThesis", |
|
84 |
List("author", "title", "school", "year"), |
|
85 |
List(), |
|
86 |
List("type", "address", "month", "note")), |
|
87 |
Entry("TechReport", |
|
88 |
List("author", "title", "institution", "year"), |
|
89 |
List(), |
|
90 |
List("type", "number", "address", "month", "note")), |
|
91 |
Entry("Manual", |
|
92 |
List("title"), |
|
93 |
List(), |
|
94 |
List("author", "organization", "address", "edition", "month", "year", "note")), |
|
95 |
Entry("Unpublished", |
|
96 |
List("author", "title", "note"), |
|
97 |
List(), |
|
98 |
List("month", "year")), |
|
99 |
Entry("Misc", |
|
100 |
List(), |
|
101 |
List(), |
|
102 |
List("author", "title", "howpublished", "month", "year", "note"))) |
|
58523 | 103 |
|
104 |
||
105 |
||
106 |
/** tokens and chunks **/ |
|
107 |
||
108 |
object Token |
|
109 |
{ |
|
110 |
object Kind extends Enumeration |
|
111 |
{ |
|
112 |
val KEYWORD = Value("keyword") |
|
113 |
val NAT = Value("natural number") |
|
114 |
val IDENT = Value("identifier") |
|
115 |
val STRING = Value("string") |
|
116 |
val SPACE = Value("white space") |
|
117 |
val ERROR = Value("bad input") |
|
118 |
} |
|
119 |
} |
|
120 |
||
121 |
sealed case class Token(kind: Token.Kind.Value, val source: String) |
|
122 |
{ |
|
123 |
def is_space: Boolean = kind == Token.Kind.SPACE |
|
124 |
def is_error: Boolean = kind == Token.Kind.ERROR |
|
125 |
} |
|
126 |
||
58528 | 127 |
abstract class Chunk |
128 |
{ |
|
129 |
def size: Int |
|
130 |
def kind: String |
|
131 |
} |
|
132 |
||
133 |
case class Ignored(source: String) extends Chunk |
|
134 |
{ |
|
135 |
def size: Int = source.size |
|
136 |
def kind: String = "" |
|
137 |
} |
|
138 |
||
139 |
case class Item(kind: String, tokens: List[Token]) extends Chunk |
|
58523 | 140 |
{ |
58526 | 141 |
def size: Int = (0 /: tokens)({ case (n, token) => n + token.source.size }) |
142 |
||
58528 | 143 |
private val wellformed_content: Option[List[Token]] = |
58523 | 144 |
tokens match { |
145 |
case Token(Token.Kind.KEYWORD, "@") :: body |
|
146 |
if !body.isEmpty && !body.exists(_.is_error) => |
|
58526 | 147 |
(body.init.filterNot(_.is_space), body.last) match { |
58528 | 148 |
case (Token(Token.Kind.IDENT, _) :: Token(Token.Kind.KEYWORD, "{") :: toks, |
149 |
Token(Token.Kind.KEYWORD, "}")) => Some(toks) |
|
150 |
case (Token(Token.Kind.IDENT, _) :: Token(Token.Kind.KEYWORD, "(") :: toks, |
|
151 |
Token(Token.Kind.KEYWORD, ")")) => Some(toks) |
|
152 |
case _ => None |
|
58523 | 153 |
} |
58528 | 154 |
case _ => None |
58526 | 155 |
} |
58528 | 156 |
def is_wellformed: Boolean = kind != "" && wellformed_content.isDefined |
157 |
def content_tokens: List[Token] = wellformed_content getOrElse Nil |
|
58526 | 158 |
|
159 |
def name: String = |
|
160 |
content_tokens match { |
|
58528 | 161 |
case Token(Token.Kind.IDENT, id) :: _ if is_wellformed => id |
58523 | 162 |
case _ => "" |
163 |
} |
|
164 |
} |
|
165 |
||
166 |
||
167 |
||
168 |
/** parsing **/ |
|
169 |
||
170 |
// context of partial line-oriented scans |
|
171 |
abstract class Line_Context |
|
58528 | 172 |
case object Ignored_Context extends Line_Context |
173 |
case class Item_Context(kind: String, delim: Delimited, right: String) extends Line_Context |
|
174 |
case class Delimited(quoted: Boolean, depth: Int) |
|
175 |
val Closed = Delimited(false, 0) |
|
58523 | 176 |
|
177 |
private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source) |
|
178 |
private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source) |
|
179 |
||
180 |
||
181 |
// See also http://ctan.org/tex-archive/biblio/bibtex/base/bibtex.web |
|
182 |
// module @<Scan for and process a \.{.bib} command or database entry@>. |
|
183 |
||
184 |
object Parsers extends RegexParsers |
|
185 |
{ |
|
186 |
/* white space and comments */ |
|
187 |
||
188 |
override val whiteSpace = "".r |
|
189 |
||
190 |
private val space = """[ \t\n\r]+""".r ^^ token(Token.Kind.SPACE) |
|
58527
4b190c763097
strict spaces for item_start: despite actual bibtex syntax, but in accordance to bibtex modes in Emacs and jEdit;
wenzelm
parents:
58526
diff
changeset
|
191 |
private val strict_space = """[ \t]+""".r ^^ token(Token.Kind.SPACE) |
58523 | 192 |
|
58528 | 193 |
|
194 |
/* ignored material outside items */ |
|
195 |
||
196 |
private val ignored: Parser[Chunk] = |
|
197 |
rep1("""(?mi)([^@]+|@[ \t]*comment)""".r) ^^ { case ss => Ignored(ss.mkString) } |
|
198 |
||
199 |
private def ignored_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] = |
|
200 |
ctxt match { |
|
201 |
case Ignored_Context => ignored ^^ { case a => (a, ctxt) } |
|
202 |
case _ => failure("") |
|
203 |
} |
|
58523 | 204 |
|
205 |
||
206 |
/* delimited string: outermost "..." or {...} and body with balanced {...} */ |
|
207 |
||
208 |
private def delimited_depth(delim: Delimited): Parser[(String, Delimited)] = |
|
209 |
new Parser[(String, Delimited)] |
|
210 |
{ |
|
211 |
require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0) |
|
212 |
||
213 |
def apply(in: Input) = |
|
214 |
{ |
|
215 |
val start = in.offset |
|
216 |
val end = in.source.length |
|
217 |
||
218 |
var i = start |
|
219 |
var q = delim.quoted |
|
220 |
var d = delim.depth |
|
221 |
var finished = false |
|
222 |
while (!finished && i < end) { |
|
223 |
val c = in.source.charAt(i) |
|
224 |
if (c == '"' && d == 0) { i += 1; d = 1; q = true } |
|
225 |
else if (c == '"' && d == 1) { i += 1; d = 0; q = false; finished = true } |
|
226 |
else if (c == '{') { i += 1; d += 1 } |
|
227 |
else if (c == '}' && d > 0) { i += 1; d -= 1; if (d == 0) finished = true } |
|
228 |
else if (d > 0) i += 1 |
|
229 |
else finished = true |
|
230 |
} |
|
231 |
if (i == start) Failure("bad input", in) |
|
58528 | 232 |
else { |
233 |
val s = in.source.subSequence(start, i).toString |
|
234 |
Success((s, Delimited(q, d)), in.drop(i - start)) |
|
235 |
} |
|
58523 | 236 |
} |
237 |
}.named("delimited_depth") |
|
238 |
||
58528 | 239 |
private def delimited: Parser[Token] = |
240 |
delimited_depth(Closed) ^? |
|
241 |
{ case (s, delim) if delim == Closed => Token(Token.Kind.STRING, s) } |
|
58523 | 242 |
|
58528 | 243 |
private def delimited_line(ctxt: Line_Context): Parser[(Item, Line_Context)] = |
58523 | 244 |
{ |
245 |
ctxt match { |
|
58528 | 246 |
case Item_Context(kind, delim, right) => |
247 |
delimited_depth(delim) ^^ { case (s, delim1) => |
|
248 |
(Item(kind, List(Token(Token.Kind.STRING, s))), Item_Context(kind, delim1, right)) } |
|
58523 | 249 |
case _ => failure("") |
250 |
} |
|
251 |
} |
|
252 |
||
58528 | 253 |
private def recover_delimited: Parser[Token] = |
254 |
"""(?m)["{][^@]+""".r ^^ token(Token.Kind.ERROR) |
|
58523 | 255 |
|
256 |
||
257 |
/* other tokens */ |
|
258 |
||
259 |
private val at = "@" ^^ keyword |
|
260 |
private val left_brace = "{" ^^ keyword |
|
261 |
private val right_brace = "}" ^^ keyword |
|
262 |
private val left_paren = "(" ^^ keyword |
|
263 |
private val right_paren = ")" ^^ keyword |
|
264 |
||
265 |
private val nat = "[0-9]+".r ^^ token(Token.Kind.NAT) |
|
266 |
||
267 |
private val ident = |
|
268 |
"""[\x21-\x7f&&[^"#%'(),={}0-9]][\x21-\x7f&&[^"#%'(),={}]]*""".r ^^ token(Token.Kind.IDENT) |
|
269 |
||
58528 | 270 |
val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space)) |
271 |
||
272 |
||
273 |
/* items */ |
|
274 |
||
275 |
private val item_start: Parser[(String, List[Token])] = |
|
276 |
at ~ rep(strict_space) ~ ident ~ rep(strict_space) ^^ |
|
277 |
{ case a ~ b ~ c ~ d => (c.source, List(a) ::: b ::: List(c) ::: d) } |
|
278 |
||
279 |
private val item_body = |
|
280 |
delimited | (recover_delimited | other_token) |
|
281 |
||
282 |
private val item: Parser[Item] = |
|
283 |
(item_start ~ left_brace ~ rep(item_body) ~ opt(right_brace) | |
|
284 |
item_start ~ left_paren ~ rep(item_body) ~ opt(right_paren)) ^^ |
|
285 |
{ case (kind, a) ~ b ~ c ~ d => Item(kind, a ::: List(b) ::: c ::: d.toList) } |
|
286 |
||
287 |
private val recover_item: Parser[Item] = |
|
288 |
at ~ "(?m)[^@]+".r ^^ { case a ~ b => Item("", List(a, Token(Token.Kind.ERROR, b))) } |
|
289 |
||
290 |
def item_line(ctxt: Line_Context): Parser[(Item, Line_Context)] = |
|
291 |
{ |
|
292 |
ctxt match { |
|
293 |
case Ignored_Context => |
|
294 |
item_start ~ (left_brace | left_paren) ^^ |
|
295 |
{ case (kind, a) ~ b => |
|
296 |
val right = if (b.source == "{") "}" else ")" |
|
297 |
(Item(kind, a ::: List(b)), Item_Context(kind, Closed, right)) } |
|
298 |
case Item_Context(kind, delim, right) => |
|
299 |
if (delim.depth > 0) |
|
300 |
delimited_line(ctxt) |
|
301 |
else { |
|
302 |
delimited_line(ctxt) | |
|
303 |
other_token ^^ { case a => (Item(kind, List(a)), ctxt) } | |
|
304 |
right ^^ { case a => (Item(kind, List(keyword(a))), Ignored_Context) } |
|
305 |
} |
|
306 |
case _ => failure("") |
|
307 |
} |
|
308 |
} |
|
309 |
||
58523 | 310 |
|
311 |
/* chunks */ |
|
312 |
||
58528 | 313 |
val chunk: Parser[Chunk] = ignored | (item | recover_item) |
58523 | 314 |
|
58528 | 315 |
def chunk_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] = |
316 |
ignored_line(ctxt) | item_line(ctxt) |
|
317 |
} |
|
58523 | 318 |
|
319 |
||
58528 | 320 |
/* parse */ |
58523 | 321 |
|
322 |
def parse(input: CharSequence): List[Chunk] = |
|
323 |
{ |
|
324 |
val in: Reader[Char] = new CharSequenceReader(input) |
|
58528 | 325 |
Parsers.parseAll(Parsers.rep(Parsers.chunk), in) match { |
58523 | 326 |
case Parsers.Success(result, _) => result |
58526 | 327 |
case _ => error("Unexpected failure to parse input:\n" + input.toString) |
58523 | 328 |
} |
329 |
} |
|
58528 | 330 |
|
331 |
def parse_line(input: CharSequence, context: Line_Context): (List[Chunk], Line_Context) = |
|
332 |
{ |
|
333 |
var in: Reader[Char] = new CharSequenceReader(input) |
|
334 |
val chunks = new mutable.ListBuffer[Chunk] |
|
335 |
var ctxt = context |
|
336 |
while (!in.atEnd) { |
|
337 |
Parsers.parse(Parsers.chunk_line(ctxt), in) match { |
|
338 |
case Parsers.Success((x, c), rest) => { chunks += x; ctxt = c; in = rest } |
|
339 |
case Parsers.NoSuccess(_, rest) => |
|
340 |
error("Unepected failure to parse input:\n" + rest.source.toString) |
|
341 |
} |
|
342 |
} |
|
343 |
(chunks.toList, ctxt) |
|
344 |
} |
|
58523 | 345 |
} |
346 |