# HG changeset patch # User wenzelm # Date 1261949807 -3600 # Node ID dfcf667bbfedd442aaadd26211c3316161517163 # Parent 10c5871ec684dd6406752c8b614ac05408ebf7d9 read header by scanning/parsing file; diff -r 10c5871ec684 -r dfcf667bbfed src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Sun Dec 27 22:16:41 2009 +0100 +++ b/src/Pure/Thy/thy_header.scala Sun Dec 27 22:36:47 2009 +0100 @@ -22,6 +22,8 @@ val BEGIN = "begin" val lexicon = Scan.Lexicon("%", "(", ")", ";", BEGIN, HEADER, IMPORTS, THEORY, USES) + + sealed case class Header(val name: String, val imports: List[String], val uses: List[String]) } @@ -32,7 +34,7 @@ /* header */ - val header: Parser[(String, List[String], List[String])] = + val header: Parser[Header] = { val file_name = atom("file name", _.is_name) val theory_name = atom("theory name", _.is_name) @@ -44,7 +46,7 @@ val args = theory_name ~ (keyword(IMPORTS) ~! (rep1(theory_name) ~ uses ~ keyword(BEGIN))) ^^ - { case x ~ (_ ~ (ys ~ zs ~ _)) => (x, ys, zs) } + { case x ~ (_ ~ (ys ~ zs ~ _)) => Header(x, ys, zs) } (keyword(HEADER) ~ tags) ~! ((doc_source ~ rep(keyword(";")) ~ keyword(THEORY) ~ tags) ~> args) ^^ { case _ ~ x => x } | @@ -52,9 +54,9 @@ } - /* scan -- lazy approximation */ + /* read -- lazy scanning */ - def scan(file: File): List[Outer_Lex.Token] = + def read(file: File): Header = { val token = lexicon.token(symbols, _ => false) val toks = new mutable.ListBuffer[Outer_Lex.Token] @@ -69,9 +71,12 @@ case _ => } } - val reader = Scan.byte_reader(file) try { scan_to_begin(reader) } finally { reader.close } - toks.toList + + parse(commit(header), Outer_Lex.reader(toks.toList)) match { + case Success(result, _) => result + case bad => error(bad.toString) + } } }