--- a/src/Pure/General/scan.scala Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/General/scan.scala Mon May 17 14:23:54 2010 +0200
@@ -258,47 +258,44 @@
/* outer syntax tokens */
- def token(symbols: Symbol.Interpretation, is_command: String => Boolean):
- Parser[Outer_Lex.Token] =
+ def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] =
{
- import Outer_Lex.Token_Kind, Outer_Lex.Token
-
val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
val nat = many1(symbols.is_digit)
val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
val ident = id ~ rep("." ~> id) ^^
- { case x ~ Nil => Token(Token_Kind.IDENT, x)
- case x ~ ys => Token(Token_Kind.LONG_IDENT, (x :: ys).mkString(".")) }
+ { case x ~ Nil => Token(Token.Kind.IDENT, x)
+ case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
- val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.VAR, x + y) }
- val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token_Kind.TYPE_IDENT, x + y) }
- val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.TYPE_VAR, x + y) }
- val nat_ = nat ^^ (x => Token(Token_Kind.NAT, x))
+ val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
+ val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
+ val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
+ val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
val sym_ident =
(many1(symbols.is_symbolic_char) |
one(sym => symbols.is_symbolic(sym) & Symbol.is_wellformed(sym))) ^^
- (x => Token(Token_Kind.SYM_IDENT, x))
+ (x => Token(Token.Kind.SYM_IDENT, x))
- val space = many1(symbols.is_blank) ^^ (x => Token(Token_Kind.SPACE, x))
+ val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
- val string = quoted("\"") ^^ (x => Token(Token_Kind.STRING, x))
- val alt_string = quoted("`") ^^ (x => Token(Token_Kind.ALT_STRING, x))
+ val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
+ val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
val junk = many1(sym => !(symbols.is_blank(sym)))
val bad_delimiter =
- ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token_Kind.BAD_INPUT, x + y) }
- val bad = junk ^^ (x => Token(Token_Kind.BAD_INPUT, x))
+ ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.BAD_INPUT, x + y) }
+ val bad = junk ^^ (x => Token(Token.Kind.BAD_INPUT, x))
/* tokens */
- (space | (string | (alt_string | (verbatim ^^ (x => Token(Token_Kind.VERBATIM, x)) |
- comment ^^ (x => Token(Token_Kind.COMMENT, x)))))) |
+ (space | (string | (alt_string | (verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) |
+ comment ^^ (x => Token(Token.Kind.COMMENT, x)))))) |
bad_delimiter |
((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) |||
- keyword ^^ (x => Token(if (is_command(x)) Token_Kind.COMMAND else Token_Kind.KEYWORD, x))) |
+ keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))) |
bad
}
}
--- a/src/Pure/Isar/outer_lex.scala Mon May 17 10:20:55 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,94 +0,0 @@
-/* Title: Pure/Isar/outer_lex.scala
- Author: Makarius
-
-Outer lexical syntax for Isabelle/Isar.
-*/
-
-package isabelle
-
-
-object Outer_Lex
-{
- /* tokens */
-
- object Token_Kind extends Enumeration
- {
- val COMMAND = Value("command")
- val KEYWORD = Value("keyword")
- val IDENT = Value("identifier")
- val LONG_IDENT = Value("long identifier")
- val SYM_IDENT = Value("symbolic identifier")
- val VAR = Value("schematic variable")
- val TYPE_IDENT = Value("type variable")
- val TYPE_VAR = Value("schematic type variable")
- val NAT = Value("number")
- val STRING = Value("string")
- val ALT_STRING = Value("back-quoted string")
- val VERBATIM = Value("verbatim text")
- val SPACE = Value("white space")
- val COMMENT = Value("comment text")
- val BAD_INPUT = Value("bad input")
- val UNPARSED = Value("unparsed input")
- }
-
- sealed case class Token(val kind: Token_Kind.Value, val source: String)
- {
- def is_command: Boolean = kind == Token_Kind.COMMAND
- def is_delimited: Boolean =
- kind == Token_Kind.STRING ||
- kind == Token_Kind.ALT_STRING ||
- kind == Token_Kind.VERBATIM ||
- kind == Token_Kind.COMMENT
- def is_name: Boolean =
- kind == Token_Kind.IDENT ||
- kind == Token_Kind.SYM_IDENT ||
- kind == Token_Kind.STRING ||
- kind == Token_Kind.NAT
- def is_xname: Boolean = is_name || kind == Token_Kind.LONG_IDENT
- def is_text: Boolean = is_xname || kind == Token_Kind.VERBATIM
- def is_space: Boolean = kind == Token_Kind.SPACE
- def is_comment: Boolean = kind == Token_Kind.COMMENT
- def is_ignored: Boolean = is_space || is_comment
- def is_unparsed: Boolean = kind == Token_Kind.UNPARSED
-
- def content: String =
- if (kind == Token_Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
- else if (kind == Token_Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
- else if (kind == Token_Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
- else if (kind == Token_Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
- else source
-
- def text: (String, String) =
- if (kind == Token_Kind.COMMAND && source == ";") ("terminator", "")
- else (kind.toString, source)
- }
-
-
- /* token reader */
-
- class Line_Position(val line: Int) extends scala.util.parsing.input.Position
- {
- def column = 0
- def lineContents = ""
- override def toString = line.toString
-
- def advance(token: Token): Line_Position =
- {
- var n = 0
- for (c <- token.content if c == '\n') n += 1
- if (n == 0) this else new Line_Position(line + n)
- }
- }
-
- abstract class Reader extends scala.util.parsing.input.Reader[Token]
-
- private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader
- {
- def first = tokens.head
- def rest = new Token_Reader(tokens.tail, pos.advance(first))
- def atEnd = tokens.isEmpty
- }
-
- def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1))
-}
-
--- a/src/Pure/Isar/outer_syntax.scala Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/Isar/outer_syntax.scala Mon May 17 14:23:54 2010 +0200
@@ -39,7 +39,7 @@
/* tokenize */
- def scan(input: Reader[Char]): List[Outer_Lex.Token] =
+ def scan(input: Reader[Char]): List[Token] =
{
import lexicon._
@@ -49,6 +49,6 @@
}
}
- def scan(input: CharSequence): List[Outer_Lex.Token] =
+ def scan(input: CharSequence): List[Token] =
scan(new CharSequenceReader(input))
}
--- a/src/Pure/Isar/parse.scala Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/Isar/parse.scala Mon May 17 14:23:54 2010 +0200
@@ -15,7 +15,7 @@
trait Parser extends Parsers
{
- type Elem = Outer_Lex.Token
+ type Elem = Token
def filter_proper = true
@@ -50,8 +50,8 @@
token(s, pred) ^^ (_.content)
def keyword(name: String): Parser[String] =
- atom(Outer_Lex.Token_Kind.KEYWORD.toString + " \"" + name + "\"",
- tok => tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == name)
+ atom(Token.Kind.KEYWORD.toString + " \"" + name + "\"",
+ tok => tok.kind == Token.Kind.KEYWORD && tok.content == name)
def name: Parser[String] = atom("name declaration", _.is_name)
def xname: Parser[String] = atom("name reference", _.is_xname)
@@ -62,16 +62,16 @@
private def tag_name: Parser[String] =
atom("tag name", tok =>
- tok.kind == Outer_Lex.Token_Kind.IDENT ||
- tok.kind == Outer_Lex.Token_Kind.STRING)
+ tok.kind == Token.Kind.IDENT ||
+ tok.kind == Token.Kind.STRING)
def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name)
/* wrappers */
- def parse[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = p(in)
- def parse_all[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = parse(phrase(p), in)
+ def parse[T](p: Parser[T], in: Token.Reader): ParseResult[T] = p(in)
+ def parse_all[T](p: Parser[T], in: Token.Reader): ParseResult[T] = parse(phrase(p), in)
}
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Isar/token.scala Mon May 17 14:23:54 2010 +0200
@@ -0,0 +1,95 @@
+/* Title: Pure/Isar/token.scala
+ Author: Makarius
+
+Outer token syntax for Isabelle/Isar.
+*/
+
+package isabelle
+
+
+object Token
+{
+ /* tokens */
+
+ object Kind extends Enumeration
+ {
+ val COMMAND = Value("command")
+ val KEYWORD = Value("keyword")
+ val IDENT = Value("identifier")
+ val LONG_IDENT = Value("long identifier")
+ val SYM_IDENT = Value("symbolic identifier")
+ val VAR = Value("schematic variable")
+ val TYPE_IDENT = Value("type variable")
+ val TYPE_VAR = Value("schematic type variable")
+ val NAT = Value("number")
+ val STRING = Value("string")
+ val ALT_STRING = Value("back-quoted string")
+ val VERBATIM = Value("verbatim text")
+ val SPACE = Value("white space")
+ val COMMENT = Value("comment text")
+ val BAD_INPUT = Value("bad input")
+ val UNPARSED = Value("unparsed input")
+ }
+
+
+ /* token reader */
+
+ class Line_Position(val line: Int) extends scala.util.parsing.input.Position
+ {
+ def column = 0
+ def lineContents = ""
+ override def toString = line.toString
+
+ def advance(token: Token): Line_Position =
+ {
+ var n = 0
+ for (c <- token.content if c == '\n') n += 1
+ if (n == 0) this else new Line_Position(line + n)
+ }
+ }
+
+ abstract class Reader extends scala.util.parsing.input.Reader[Token]
+
+ private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader
+ {
+ def first = tokens.head
+ def rest = new Token_Reader(tokens.tail, pos.advance(first))
+ def atEnd = tokens.isEmpty
+ }
+
+ def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1))
+}
+
+
+sealed case class Token(val kind: Token.Kind.Value, val source: String)
+{
+ def is_command: Boolean = kind == Token.Kind.COMMAND
+ def is_delimited: Boolean =
+ kind == Token.Kind.STRING ||
+ kind == Token.Kind.ALT_STRING ||
+ kind == Token.Kind.VERBATIM ||
+ kind == Token.Kind.COMMENT
+ def is_name: Boolean =
+ kind == Token.Kind.IDENT ||
+ kind == Token.Kind.SYM_IDENT ||
+ kind == Token.Kind.STRING ||
+ kind == Token.Kind.NAT
+ def is_xname: Boolean = is_name || kind == Token.Kind.LONG_IDENT
+ def is_text: Boolean = is_xname || kind == Token.Kind.VERBATIM
+ def is_space: Boolean = kind == Token.Kind.SPACE
+ def is_comment: Boolean = kind == Token.Kind.COMMENT
+ def is_ignored: Boolean = is_space || is_comment
+ def is_unparsed: Boolean = kind == Token.Kind.UNPARSED
+
+ def content: String =
+ if (kind == Token.Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
+ else if (kind == Token.Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
+ else if (kind == Token.Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
+ else if (kind == Token.Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
+ else source
+
+ def text: (String, String) =
+ if (kind == Token.Kind.COMMAND && source == ";") ("terminator", "")
+ else (kind.toString, source)
+}
+
--- a/src/Pure/PIDE/document.scala Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/PIDE/document.scala Mon May 17 14:23:54 2010 +0200
@@ -46,7 +46,7 @@
/* unparsed dummy commands */
def unparsed(source: String) =
- new Command(null, List(Outer_Lex.Token(Outer_Lex.Token_Kind.UNPARSED, source)))
+ new Command(null, List(Token(Token.Kind.UNPARSED, source)))
def is_unparsed(command: Command) = command.id == null
--- a/src/Pure/Thy/thy_header.scala Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/Thy/thy_header.scala Mon May 17 14:23:54 2010 +0200
@@ -59,14 +59,14 @@
def read(file: File): Header =
{
val token = lexicon.token(symbols, _ => false)
- val toks = new mutable.ListBuffer[Outer_Lex.Token]
+ val toks = new mutable.ListBuffer[Token]
def scan_to_begin(in: Reader[Char])
{
token(in) match {
case lexicon.Success(tok, rest) =>
toks += tok
- if (!(tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == BEGIN))
+ if (!(tok.kind == Token.Kind.KEYWORD && tok.content == BEGIN))
scan_to_begin(rest)
case _ =>
}
@@ -74,7 +74,7 @@
val reader = Scan.byte_reader(file)
try { scan_to_begin(reader) } finally { reader.close }
- parse(commit(header), Outer_Lex.reader(toks.toList)) match {
+ parse(commit(header), Token.reader(toks.toList)) match {
case Success(result, _) => result
case bad => error(bad.toString)
}
--- a/src/Pure/Thy/thy_syntax.scala Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/Thy/thy_syntax.scala Mon May 17 14:23:54 2010 +0200
@@ -22,13 +22,13 @@
}
}
- type Span = List[Outer_Lex.Token]
+ type Span = List[Token]
- def parse_spans(toks: List[Outer_Lex.Token]): List[Span] =
+ def parse_spans(toks: List[Token]): List[Span] =
{
import parser._
- parse(rep(command_span), Outer_Lex.reader(toks)) match {
+ parse(rep(command_span), Token.reader(toks)) match {
case Success(spans, rest) if rest.atEnd => spans
case bad => error(bad.toString)
}
--- a/src/Pure/build-jars Mon May 17 10:20:55 2010 +0200
+++ b/src/Pure/build-jars Mon May 17 14:23:54 2010 +0200
@@ -34,9 +34,9 @@
General/yxml.scala
Isar/isar_document.scala
Isar/keyword.scala
- Isar/outer_lex.scala
Isar/outer_syntax.scala
Isar/parse.scala
+ Isar/token.scala
PIDE/change.scala
PIDE/command.scala
PIDE/document.scala