# HG changeset patch # User wenzelm # Date 1261561273 -3600 # Node ID f5dbdbc86b0facc905a43a5dc3264f248c12fa95 # Parent 412cf41a92a0b00c8d672c448bd2896ad80f79cf# Parent 7501b2910900dd98286863b19400a9b535006efe merged diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/General/scan.scala --- a/src/Pure/General/scan.scala Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/General/scan.scala Wed Dec 23 10:41:13 2009 +0100 @@ -135,9 +135,9 @@ }.named("keyword") - /* symbols */ + /* symbol range */ - def symbols(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] = + def symbol_range(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] = new Parser[String] { def apply(in: Input) = @@ -161,11 +161,11 @@ if (count < min_count) Failure("bad input", in) else Success(in.source.subSequence(start, i).toString, in.drop(i - start)) } - }.named("symbols") + }.named("symbol_range") - def one(pred: String => Boolean): Parser[String] = symbols(pred, 1, 1) - def many(pred: String => Boolean): Parser[String] = symbols(pred, 0, Integer.MAX_VALUE) - def many1(pred: String => Boolean): Parser[String] = symbols(pred, 1, Integer.MAX_VALUE) + def one(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, 1) + def many(pred: String => Boolean): Parser[String] = symbol_range(pred, 0, Integer.MAX_VALUE) + def many1(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, Integer.MAX_VALUE) /* quoted strings */ @@ -246,38 +246,41 @@ def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Outer_Lex.Token] = { - import Outer_Lex._ + import Outer_Lex.Token_Kind, Outer_Lex.Token val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y } val nat = many1(symbols.is_digit) val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x } val ident = id ~ rep("." ~> id) ^^ - { case x ~ Nil => Ident(x) - case x ~ ys => Long_Ident((x :: ys).mkString(".")) } + { case x ~ Nil => Token(Token_Kind.IDENT, x) + case x ~ ys => Token(Token_Kind.LONG_IDENT, (x :: ys).mkString(".")) } - val var_ = "?" ~ id_nat ^^ { case x ~ y => Var(x + y) } - val type_ident = "'" ~ id ^^ { case x ~ y => Type_Ident(x + y) } - val type_var = "?'" ~ id_nat ^^ { case x ~ y => Type_Var(x + y) } - val nat_ = nat ^^ Nat + val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.VAR, x + y) } + val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token_Kind.TYPE_IDENT, x + y) } + val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.TYPE_VAR, x + y) } + val nat_ = nat ^^ (x => Token(Token_Kind.NAT, x)) val sym_ident = (many1(symbols.is_symbolic_char) | - one(sym => symbols.is_symbolic(sym) & Symbol.is_closed(sym))) ^^ Sym_Ident + one(sym => symbols.is_symbolic(sym) & Symbol.is_closed(sym))) ^^ + (x => Token(Token_Kind.SYM_IDENT, x)) - val space = many1(symbols.is_blank) ^^ Space + val space = many1(symbols.is_blank) ^^ (x => Token(Token_Kind.SPACE, x)) - val string = quoted("\"") ^^ String_Token - val alt_string = quoted("`") ^^ Alt_String_Token + val string = quoted("\"") ^^ (x => Token(Token_Kind.STRING, x)) + val alt_string = quoted("`") ^^ (x => Token(Token_Kind.ALT_STRING, x)) - val bad_input = many1(sym => !(symbols.is_blank(sym))) ^^ Bad_Input + val bad_input = many1(sym => !(symbols.is_blank(sym))) ^^ + (x => Token(Token_Kind.BAD_INPUT, x)) /* tokens */ - (space | (string | (alt_string | (verbatim ^^ Verbatim | comment ^^ Comment)))) | + (space | (string | (alt_string | (verbatim ^^ (x => Token(Token_Kind.VERBATIM, x)) | + comment ^^ (x => Token(Token_Kind.COMMENT, x)))))) | ((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) ||| - keyword ^^ (x => if (is_command(x)) Command(x) else Keyword(x))) | + keyword ^^ (x => Token(if (is_command(x)) Token_Kind.COMMAND else Token_Kind.KEYWORD, x))) | bad_input } } diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/IsaMakefile --- a/src/Pure/IsaMakefile Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/IsaMakefile Wed Dec 23 10:41:13 2009 +0100 @@ -125,10 +125,11 @@ General/linear_set.scala General/markup.scala General/position.scala \ General/scan.scala General/swing_thread.scala General/symbol.scala \ General/xml.scala General/yxml.scala Isar/isar_document.scala \ - Isar/outer_keyword.scala Isar/outer_lex.scala System/cygwin.scala \ - System/gui_setup.scala System/isabelle_process.scala \ - System/isabelle_syntax.scala System/isabelle_system.scala \ - System/platform.scala Thy/completion.scala Thy/html.scala \ + Isar/outer_keyword.scala Isar/outer_lex.scala Isar/outer_parse.scala \ + Isar/outer_syntax.scala System/cygwin.scala System/gui_setup.scala \ + System/isabelle_process.scala System/isabelle_syntax.scala \ + System/isabelle_system.scala System/platform.scala \ + System/session_manager.scala Thy/completion.scala Thy/html.scala \ Thy/thy_header.scala library.scala JAR_DIR = $(ISABELLE_HOME)/lib/classes diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/Isar/outer_keyword.scala --- a/src/Pure/Isar/outer_keyword.scala Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/Isar/outer_keyword.scala Wed Dec 23 10:41:13 2009 +0100 @@ -1,13 +1,13 @@ /* Title: Pure/Isar/outer_keyword.scala Author: Makarius -Isar command keyword classification. +Isar command keyword classification and keyword tables. */ package isabelle -object OuterKeyword +object Outer_Keyword { val MINOR = "minor" val CONTROL = "control" diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/Isar/outer_lex.scala --- a/src/Pure/Isar/outer_lex.scala Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/Isar/outer_lex.scala Wed Dec 23 10:41:13 2009 +0100 @@ -9,92 +9,84 @@ object Outer_Lex { - sealed abstract class Token - { - def source: String - def content: String = source - - def is_delimited: Boolean = false - def is_name: Boolean = false - def is_xname: Boolean = false - def is_text: Boolean = false - def is_space: Boolean = false - def is_comment: Boolean = false - def is_proper: Boolean = !(is_space || is_comment) - } - - case class Command(val source: String) extends Token - - case class Keyword(val source: String) extends Token + /* tokens */ - case class Ident(val source: String) extends Token - { - override def is_name = true - override def is_xname = true - override def is_text = true - } - - case class Long_Ident(val source: String) extends Token + object Token_Kind extends Enumeration { - override def is_xname = true - override def is_text = true - } - - case class Sym_Ident(val source: String) extends Token - { - override def is_name = true - override def is_xname = true - override def is_text = true + val COMMAND = Value("command") + val KEYWORD = Value("keyword") + val IDENT = Value("identifier") + val LONG_IDENT = Value("long identifier") + val SYM_IDENT = Value("symbolic identifier") + val VAR = Value("schematic variable") + val TYPE_IDENT = Value("type variable") + val TYPE_VAR = Value("schematic type variable") + val NAT = Value("number") + val STRING = Value("string") + val ALT_STRING = Value("back-quoted string") + val VERBATIM = Value("verbatim text") + val SPACE = Value("white space") + val COMMENT = Value("comment text") + val BAD_INPUT = Value("bad input") + val UNPARSED = Value("unparsed input") } - case class Var(val source: String) extends Token - - case class Type_Ident(val source: String) extends Token - - case class Type_Var(val source: String) extends Token - - case class Nat(val source: String) extends Token + sealed case class Token(val kind: Token_Kind.Value, val source: String) { - override def is_name = true - override def is_xname = true - override def is_text = true - } + def is_delimited: Boolean = + kind == Token_Kind.STRING || + kind == Token_Kind.ALT_STRING || + kind == Token_Kind.VERBATIM || + kind == Token_Kind.COMMENT + def is_name: Boolean = + kind == Token_Kind.IDENT || + kind == Token_Kind.SYM_IDENT || + kind == Token_Kind.STRING || + kind == Token_Kind.NAT + def is_xname: Boolean = is_name || kind == Token_Kind.LONG_IDENT + def is_text: Boolean = is_xname || kind == Token_Kind.VERBATIM + def is_space: Boolean = kind == Token_Kind.SPACE + def is_comment: Boolean = kind == Token_Kind.COMMENT + def is_proper: Boolean = !(is_space || is_comment) - case class String_Token(val source: String) extends Token - { - override def content = Scan.Lexicon.empty.quoted_content("\"", source) - override def is_delimited = true - override def is_name = true - override def is_xname = true - override def is_text = true + def content: String = + if (kind == Token_Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source) + else if (kind == Token_Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source) + else if (kind == Token_Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source) + else if (kind == Token_Kind.COMMENT) Scan.Lexicon.empty.comment_content(source) + else source + + def text: (String, String) = + if (kind == Token_Kind.COMMAND && source == ";") ("terminator", "") + else (kind.toString, source) } - case class Alt_String_Token(val source: String) extends Token - { - override def content = Scan.Lexicon.empty.quoted_content("`", source) - override def is_delimited = true - } + + /* token reader */ - case class Verbatim(val source: String) extends Token + class Line_Position(val line: Int) extends scala.util.parsing.input.Position { - override def content = Scan.Lexicon.empty.verbatim_content(source) - override def is_delimited = true - override def is_text = true + def column = 0 + def lineContents = "" + override def toString = line.toString + + def advance(token: Token): Line_Position = + { + var n = 0 + for (c <- token.content if c == '\n') n += 1 + if (n == 0) this else new Line_Position(line + n) + } } - case class Space(val source: String) extends Token + abstract class Reader extends scala.util.parsing.input.Reader[Token] + + private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader { - override def is_space = true + def first = tokens.head + def rest = new Token_Reader(tokens.tail, pos.advance(first)) + def atEnd = tokens.isEmpty } - case class Comment(val source: String) extends Token - { - override def content = Scan.Lexicon.empty.comment_content(source) - override def is_delimited = true - override def is_comment = true - } - - case class Bad_Input(val source: String) extends Token - case class Unparsed(val source: String) extends Token + def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1)) } diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/Isar/outer_parse.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Isar/outer_parse.scala Wed Dec 23 10:41:13 2009 +0100 @@ -0,0 +1,74 @@ +/* Title: Pure/Isar/outer_parse.scala + Author: Makarius + +Generic parsers for Isabelle/Isar outer syntax. +*/ + +package isabelle + +import scala.util.parsing.combinator.Parsers + + +object Outer_Parse +{ + /* parsing tokens */ + + trait Parser extends Parsers + { + type Elem = Outer_Lex.Token + + private def proper(in: Input): Input = + if (in.atEnd || in.first.is_proper) in + else proper(in.rest) + + def token(s: String, pred: Elem => Boolean): Parser[Elem] = new Parser[Elem] + { + def apply(raw_input: Input) = + { + val in = proper(raw_input) + if (in.atEnd) Failure(s + " expected (past end-of-file!)", in) + else { + val token = in.first + if (pred(token)) Success(token, proper(in.rest)) + else + token.text match { + case (txt, "") => + Failure(s + " expected,\nbut " + txt + " was found", in) + case (txt1, txt2) => + Failure(s + " expected,\nbut " + txt1 + " was found:\n" + txt2, in) + } + } + } + } + + def atom(s: String, pred: Elem => Boolean): Parser[String] = + token(s, pred) ^^ (_.content) + + def not_eof: Parser[Elem] = token("input token", _ => true) + + def keyword(name: String): Parser[String] = + atom(Outer_Lex.Token_Kind.KEYWORD.toString + " \"" + name + "\"", + tok => tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == name) + + def name: Parser[String] = atom("name declaration", _.is_name) + def xname: Parser[String] = atom("name reference", _.is_xname) + def text: Parser[String] = atom("text", _.is_text) + def ML_source: Parser[String] = atom("ML source", _.is_text) + def doc_source: Parser[String] = atom("document source", _.is_text) + def path: Parser[String] = atom("file name/path specification", _.is_name) + + private def tag_name: Parser[String] = + atom("tag name", tok => + tok.kind == Outer_Lex.Token_Kind.IDENT || + tok.kind == Outer_Lex.Token_Kind.STRING) + + def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name) + + + /* wrappers */ + + def parse[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = p(in) + def parse_all[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = parse(phrase(p), in) + } +} + diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/Isar/outer_syntax.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Isar/outer_syntax.scala Wed Dec 23 10:41:13 2009 +0100 @@ -0,0 +1,54 @@ +/* Title: Pure/Isar/outer_syntax.scala + Author: Makarius + +Isabelle/Isar outer syntax. +*/ + +package isabelle + + +import scala.util.parsing.input.{Reader, CharSequenceReader} + + +class Outer_Syntax(symbols: Symbol.Interpretation) +{ + protected val keywords: Map[String, String] = Map((";" -> Outer_Keyword.DIAG)) + protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty + lazy val completion: Completion = new Completion + symbols // FIXME !? + + def + (name: String, kind: String): Outer_Syntax = + { + val new_keywords = keywords + (name -> kind) + val new_lexicon = lexicon + name + val new_completion = completion + name + new Outer_Syntax(symbols) { + override val lexicon = new_lexicon + override val keywords = new_keywords + override lazy val completion = new_completion + } + } + + def + (name: String): Outer_Syntax = this + (name, Outer_Keyword.MINOR) + + def is_command(name: String): Boolean = + keywords.get(name) match { + case Some(kind) => kind != Outer_Keyword.MINOR + case None => false + } + + + /* tokenize */ + + def scan(input: Reader[Char]): List[Outer_Lex.Token] = + { + import lexicon._ + + parseAll(rep(token(symbols, is_command)), input) match { + case Success(tokens, _) => tokens + case _ => error("Failed to tokenize input:\n" + input.source.toString) + } + } + + def scan(input: CharSequence): List[Outer_Lex.Token] = + scan(new CharSequenceReader(input)) +} diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/System/isabelle_system.scala Wed Dec 23 10:41:13 2009 +0100 @@ -8,11 +8,13 @@ import java.util.regex.Pattern import java.util.Locale -import java.io.{BufferedInputStream, FileInputStream, File, IOException} +import java.io.{BufferedInputStream, FileInputStream, BufferedReader, InputStreamReader, + File, IOException} import java.awt.{GraphicsEnvironment, Font} import scala.io.Source import scala.util.matching.Regex +import scala.collection.mutable object Isabelle_System @@ -43,6 +45,22 @@ val rc = proc.waitFor (output, rc) } + + + /* platform files */ + + def read_file(file: File): String = + { + val buf = new StringBuilder(file.length.toInt) + val reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), charset)) + var c = reader.read + while (c != -1) { + buf.append(c.toChar) + c = reader.read + } + reader.close + buf.toString + } } @@ -303,7 +321,7 @@ def find_logics(): List[String] = { val ml_ident = getenv_strict("ML_IDENTIFIER") - var logics: Set[String] = Set() + val logics = new mutable.ListBuffer[String] for (dir <- getenv_strict("ISABELLE_PATH").split(":")) { val files = platform_file(dir + "/" + ml_ident).listFiles() if (files != null) { diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/System/session_manager.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/System/session_manager.scala Wed Dec 23 10:41:13 2009 +0100 @@ -0,0 +1,48 @@ +/* Title: Pure/System/isabelle_manager.scala + Author: Makarius + +Isabelle session manager. +*/ + +package isabelle + + +import java.io.{File, FileFilter} + + +class Session_Manager(system: Isabelle_System) +{ + val ROOT_NAME = "session.root" + + def is_session_file(file: File): Boolean = + file.isFile && file.getName == ROOT_NAME + + def is_session_dir(dir: File): Boolean = + dir.isDirectory && (new File(dir, ROOT_NAME)).isFile + + + // FIXME handle (potentially cyclic) directory graph + private def find_sessions(rev_prefix: List[String], rev_sessions: List[List[String]], + dir: File): List[List[String]] = + { + val (rev_prefix1, rev_sessions1) = + if (is_session_dir(dir)) { + val name = dir.getName // FIXME from root file + val rev_prefix1 = name :: rev_prefix + val rev_sessions1 = rev_prefix1.reverse :: rev_sessions + (rev_prefix1, rev_sessions1) + } + else (rev_prefix, rev_sessions) + + val subdirs = + dir.listFiles(new FileFilter { def accept(entry: File) = entry.isDirectory }) + (rev_sessions1 /: subdirs)(find_sessions(rev_prefix1, _, _)) + } + + def component_sessions(): List[List[String]] = + { + val toplevel_sessions = + system.components().map(system.platform_file(_)).filter(is_session_dir) + ((Nil: List[List[String]]) /: toplevel_sessions)(find_sessions(Nil, _, _)).reverse + } +} diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/Thy/thy_header.ML --- a/src/Pure/Thy/thy_header.ML Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/Thy/thy_header.ML Wed Dec 23 10:41:13 2009 +0100 @@ -35,7 +35,7 @@ val file_name = P.group "file name" P.name; val theory_name = P.group "theory name" P.name; -val file = (P.$$$ "(" |-- P.!!! (file_name --| P.$$$ ")")) >> rpair false || file_name >> rpair true; +val file = P.$$$ "(" |-- P.!!! (file_name --| P.$$$ ")") >> rpair false || file_name >> rpair true; val uses = Scan.optional (P.$$$ usesN |-- P.!!! (Scan.repeat1 file)) []; val args = diff -r 412cf41a92a0 -r f5dbdbc86b0f src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Wed Dec 23 10:09:06 2009 +0100 +++ b/src/Pure/Thy/thy_header.scala Wed Dec 23 10:41:13 2009 +0100 @@ -1,12 +1,16 @@ /* Title: Pure/Thy/thy_header.scala Author: Makarius -Theory header keywords. +Theory headers -- independent of outer syntax. */ package isabelle +import scala.collection.mutable +import scala.util.parsing.input.{Reader, CharSequenceReader} + + object Thy_Header { val HEADER = "header" @@ -15,5 +19,54 @@ val USES = "uses" val BEGIN = "begin" - val keywords = List("%", "(", ")", ";", BEGIN, HEADER, IMPORTS, THEORY, USES) + val lexicon = Scan.Lexicon("%", "(", ")", ";", BEGIN, HEADER, IMPORTS, THEORY, USES) } + + +class Thy_Header(symbols: Symbol.Interpretation) extends Outer_Parse.Parser +{ + import Thy_Header._ + + + /* header */ + + val header: Parser[(String, List[String], List[String])] = + { + val file_name = atom("file name", _.is_name) + val theory_name = atom("theory name", _.is_name) + + val file = + keyword("(") ~! (file_name ~ keyword(")")) ^^ { case _ ~ (x ~ _) => x } | file_name + + val uses = opt(keyword(USES) ~! (rep1(file))) ^^ { case None => Nil case Some(_ ~ xs) => xs } + + val args = + theory_name ~ (keyword(IMPORTS) ~! (rep1(theory_name) ~ uses ~ keyword(BEGIN))) ^^ + { case x ~ (_ ~ (ys ~ zs ~ _)) => (x, ys, zs) } + + (keyword(HEADER) ~ tags) ~! + ((doc_source ~ rep(keyword(";")) ~ keyword(THEORY) ~ tags) ~> args) ^^ { case _ ~ x => x } | + (keyword(THEORY) ~ tags) ~! args ^^ { case _ ~ x => x } + } + + + /* scan -- lazy approximation */ + + def scan(text: CharSequence): List[Outer_Lex.Token] = + { + val token = lexicon.token(symbols, _ => false) + val toks = new mutable.ListBuffer[Outer_Lex.Token] + def scan_until_begin(in: Reader[Char]) + { + token(in) match { + case lexicon.Success(tok, rest) => + toks += tok + if (!(tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == BEGIN)) + scan_until_begin(rest) + case _ => + } + } + scan_until_begin(new CharSequenceReader(text)) + toks.toList + } +}