author | wenzelm |
Sat, 15 May 2010 22:15:57 +0200 | |
changeset 36948 | d2cdad45fd14 |
parent 34300 | 3f2e25dc99ab |
child 36956 | 21be4832c362 |
permissions | -rw-r--r-- |
28495 | 1 |
/* Title: Pure/Thy/thy_header.scala |
2 |
Author: Makarius |
|
3 |
||
34169 | 4 |
Theory headers -- independent of outer syntax. |
28495 | 5 |
*/ |
6 |
||
7 |
package isabelle |
|
8 |
||
9 |
||
34169 | 10 |
import scala.collection.mutable |
11 |
import scala.util.parsing.input.{Reader, CharSequenceReader} |
|
12 |
||
34188
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
13 |
import java.io.File |
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
14 |
|
34169 | 15 |
|
32466 | 16 |
object Thy_Header |
32450 | 17 |
{ |
28495 | 18 |
val HEADER = "header" |
19 |
val THEORY = "theory" |
|
20 |
val IMPORTS = "imports" |
|
21 |
val USES = "uses" |
|
22 |
val BEGIN = "begin" |
|
23 |
||
34169 | 24 |
val lexicon = Scan.Lexicon("%", "(", ")", ";", BEGIN, HEADER, IMPORTS, THEORY, USES) |
34190 | 25 |
|
34300 | 26 |
final case class Header(val name: String, val imports: List[String], val uses: List[String]) |
28495 | 27 |
} |
34169 | 28 |
|
29 |
||
36948 | 30 |
class Thy_Header(symbols: Symbol.Interpretation) extends Parse.Parser |
34169 | 31 |
{ |
32 |
import Thy_Header._ |
|
33 |
||
34 |
||
35 |
/* header */ |
|
36 |
||
34190 | 37 |
val header: Parser[Header] = |
34169 | 38 |
{ |
34201
c95dcd12f48a
separate Standard_System (Cygwin/Posix compatibility) vs. Isabelle_System (settings environment etc.);
wenzelm
parents:
34198
diff
changeset
|
39 |
val file_name = atom("file name", _.is_name) ^^ Standard_System.decode_permissive_utf8 |
c95dcd12f48a
separate Standard_System (Cygwin/Posix compatibility) vs. Isabelle_System (settings environment etc.);
wenzelm
parents:
34198
diff
changeset
|
40 |
val theory_name = atom("theory name", _.is_name) ^^ Standard_System.decode_permissive_utf8 |
34169 | 41 |
|
42 |
val file = |
|
43 |
keyword("(") ~! (file_name ~ keyword(")")) ^^ { case _ ~ (x ~ _) => x } | file_name |
|
44 |
||
45 |
val uses = opt(keyword(USES) ~! (rep1(file))) ^^ { case None => Nil case Some(_ ~ xs) => xs } |
|
46 |
||
47 |
val args = |
|
48 |
theory_name ~ (keyword(IMPORTS) ~! (rep1(theory_name) ~ uses ~ keyword(BEGIN))) ^^ |
|
34190 | 49 |
{ case x ~ (_ ~ (ys ~ zs ~ _)) => Header(x, ys, zs) } |
34169 | 50 |
|
51 |
(keyword(HEADER) ~ tags) ~! |
|
52 |
((doc_source ~ rep(keyword(";")) ~ keyword(THEORY) ~ tags) ~> args) ^^ { case _ ~ x => x } | |
|
53 |
(keyword(THEORY) ~ tags) ~! args ^^ { case _ ~ x => x } |
|
54 |
} |
|
55 |
||
56 |
||
34190 | 57 |
/* read -- lazy scanning */ |
34169 | 58 |
|
34190 | 59 |
def read(file: File): Header = |
34169 | 60 |
{ |
61 |
val token = lexicon.token(symbols, _ => false) |
|
62 |
val toks = new mutable.ListBuffer[Outer_Lex.Token] |
|
34188
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
63 |
|
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
64 |
def scan_to_begin(in: Reader[Char]) |
34169 | 65 |
{ |
66 |
token(in) match { |
|
67 |
case lexicon.Success(tok, rest) => |
|
68 |
toks += tok |
|
69 |
if (!(tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == BEGIN)) |
|
34188
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
70 |
scan_to_begin(rest) |
34169 | 71 |
case _ => |
72 |
} |
|
73 |
} |
|
34188
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
74 |
val reader = Scan.byte_reader(file) |
fbfc18be1f8c
scan: operate on file (via Scan.byte_reader), more robust exception handling;
wenzelm
parents:
34169
diff
changeset
|
75 |
try { scan_to_begin(reader) } finally { reader.close } |
34190 | 76 |
|
77 |
parse(commit(header), Outer_Lex.reader(toks.toList)) match { |
|
78 |
case Success(result, _) => result |
|
79 |
case bad => error(bad.toString) |
|
80 |
} |
|
34169 | 81 |
} |
82 |
} |