renamed class Outer_Lex to Token and Token_Kind to Token.Kind;
authorwenzelm
Mon May 17 14:23:54 2010 +0200 (2010-05-17)
changeset 3695621be4832c362
parent 36955 226fb165833e
child 36957 cdb9e83abfbe
renamed class Outer_Lex to Token and Token_Kind to Token.Kind;
src/Pure/General/scan.scala
src/Pure/Isar/outer_lex.scala
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/parse.scala
src/Pure/Isar/token.scala
src/Pure/PIDE/document.scala
src/Pure/Thy/thy_header.scala
src/Pure/Thy/thy_syntax.scala
src/Pure/build-jars
     1.1 --- a/src/Pure/General/scan.scala	Mon May 17 10:20:55 2010 +0200
     1.2 +++ b/src/Pure/General/scan.scala	Mon May 17 14:23:54 2010 +0200
     1.3 @@ -258,47 +258,44 @@
     1.4  
     1.5      /* outer syntax tokens */
     1.6  
     1.7 -    def token(symbols: Symbol.Interpretation, is_command: String => Boolean):
     1.8 -      Parser[Outer_Lex.Token] =
     1.9 +    def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] =
    1.10      {
    1.11 -      import Outer_Lex.Token_Kind, Outer_Lex.Token
    1.12 -
    1.13        val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
    1.14        val nat = many1(symbols.is_digit)
    1.15        val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
    1.16  
    1.17        val ident = id ~ rep("." ~> id) ^^
    1.18 -        { case x ~ Nil => Token(Token_Kind.IDENT, x)
    1.19 -          case x ~ ys => Token(Token_Kind.LONG_IDENT, (x :: ys).mkString(".")) }
    1.20 +        { case x ~ Nil => Token(Token.Kind.IDENT, x)
    1.21 +          case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
    1.22  
    1.23 -      val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.VAR, x + y) }
    1.24 -      val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token_Kind.TYPE_IDENT, x + y) }
    1.25 -      val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.TYPE_VAR, x + y) }
    1.26 -      val nat_ = nat ^^ (x => Token(Token_Kind.NAT, x))
    1.27 +      val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
    1.28 +      val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
    1.29 +      val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
    1.30 +      val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
    1.31  
    1.32        val sym_ident =
    1.33          (many1(symbols.is_symbolic_char) |
    1.34            one(sym => symbols.is_symbolic(sym) & Symbol.is_wellformed(sym))) ^^
    1.35 -        (x => Token(Token_Kind.SYM_IDENT, x))
    1.36 +        (x => Token(Token.Kind.SYM_IDENT, x))
    1.37  
    1.38 -      val space = many1(symbols.is_blank) ^^ (x => Token(Token_Kind.SPACE, x))
    1.39 +      val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    1.40  
    1.41 -      val string = quoted("\"") ^^ (x => Token(Token_Kind.STRING, x))
    1.42 -      val alt_string = quoted("`") ^^ (x => Token(Token_Kind.ALT_STRING, x))
    1.43 +      val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
    1.44 +      val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
    1.45  
    1.46        val junk = many1(sym => !(symbols.is_blank(sym)))
    1.47        val bad_delimiter =
    1.48 -        ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token_Kind.BAD_INPUT, x + y) }
    1.49 -      val bad = junk ^^ (x => Token(Token_Kind.BAD_INPUT, x))
    1.50 +        ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.BAD_INPUT, x + y) }
    1.51 +      val bad = junk ^^ (x => Token(Token.Kind.BAD_INPUT, x))
    1.52  
    1.53  
    1.54        /* tokens */
    1.55  
    1.56 -      (space | (string | (alt_string | (verbatim ^^ (x => Token(Token_Kind.VERBATIM, x)) |
    1.57 -        comment ^^ (x => Token(Token_Kind.COMMENT, x)))))) |
    1.58 +      (space | (string | (alt_string | (verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) |
    1.59 +        comment ^^ (x => Token(Token.Kind.COMMENT, x)))))) |
    1.60        bad_delimiter |
    1.61        ((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) |||
    1.62 -        keyword ^^ (x => Token(if (is_command(x)) Token_Kind.COMMAND else Token_Kind.KEYWORD, x))) |
    1.63 +        keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))) |
    1.64        bad
    1.65      }
    1.66    }
     2.1 --- a/src/Pure/Isar/outer_lex.scala	Mon May 17 10:20:55 2010 +0200
     2.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     2.3 @@ -1,94 +0,0 @@
     2.4 -/*  Title:      Pure/Isar/outer_lex.scala
     2.5 -    Author:     Makarius
     2.6 -
     2.7 -Outer lexical syntax for Isabelle/Isar.
     2.8 -*/
     2.9 -
    2.10 -package isabelle
    2.11 -
    2.12 -
    2.13 -object Outer_Lex
    2.14 -{
    2.15 -  /* tokens */
    2.16 -
    2.17 -  object Token_Kind extends Enumeration
    2.18 -  {
    2.19 -    val COMMAND = Value("command")
    2.20 -    val KEYWORD = Value("keyword")
    2.21 -    val IDENT = Value("identifier")
    2.22 -    val LONG_IDENT = Value("long identifier")
    2.23 -    val SYM_IDENT = Value("symbolic identifier")
    2.24 -    val VAR = Value("schematic variable")
    2.25 -    val TYPE_IDENT = Value("type variable")
    2.26 -    val TYPE_VAR = Value("schematic type variable")
    2.27 -    val NAT = Value("number")
    2.28 -    val STRING = Value("string")
    2.29 -    val ALT_STRING = Value("back-quoted string")
    2.30 -    val VERBATIM = Value("verbatim text")
    2.31 -    val SPACE = Value("white space")
    2.32 -    val COMMENT = Value("comment text")
    2.33 -    val BAD_INPUT = Value("bad input")
    2.34 -    val UNPARSED = Value("unparsed input")
    2.35 -  }
    2.36 -
    2.37 -  sealed case class Token(val kind: Token_Kind.Value, val source: String)
    2.38 -  {
    2.39 -    def is_command: Boolean = kind == Token_Kind.COMMAND
    2.40 -    def is_delimited: Boolean =
    2.41 -      kind == Token_Kind.STRING ||
    2.42 -      kind == Token_Kind.ALT_STRING ||
    2.43 -      kind == Token_Kind.VERBATIM ||
    2.44 -      kind == Token_Kind.COMMENT
    2.45 -    def is_name: Boolean =
    2.46 -      kind == Token_Kind.IDENT ||
    2.47 -      kind == Token_Kind.SYM_IDENT ||
    2.48 -      kind == Token_Kind.STRING ||
    2.49 -      kind == Token_Kind.NAT
    2.50 -    def is_xname: Boolean = is_name || kind == Token_Kind.LONG_IDENT
    2.51 -    def is_text: Boolean = is_xname || kind == Token_Kind.VERBATIM
    2.52 -    def is_space: Boolean = kind == Token_Kind.SPACE
    2.53 -    def is_comment: Boolean = kind == Token_Kind.COMMENT
    2.54 -    def is_ignored: Boolean = is_space || is_comment
    2.55 -    def is_unparsed: Boolean = kind == Token_Kind.UNPARSED
    2.56 -
    2.57 -    def content: String =
    2.58 -      if (kind == Token_Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
    2.59 -      else if (kind == Token_Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
    2.60 -      else if (kind == Token_Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
    2.61 -      else if (kind == Token_Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
    2.62 -      else source
    2.63 -
    2.64 -    def text: (String, String) =
    2.65 -      if (kind == Token_Kind.COMMAND && source == ";") ("terminator", "")
    2.66 -      else (kind.toString, source)
    2.67 -  }
    2.68 -
    2.69 -
    2.70 -  /* token reader */
    2.71 -
    2.72 -  class Line_Position(val line: Int) extends scala.util.parsing.input.Position
    2.73 -  {
    2.74 -    def column = 0
    2.75 -    def lineContents = ""
    2.76 -    override def toString = line.toString
    2.77 -
    2.78 -    def advance(token: Token): Line_Position =
    2.79 -    {
    2.80 -      var n = 0
    2.81 -      for (c <- token.content if c == '\n') n += 1
    2.82 -      if (n == 0) this else new Line_Position(line + n)
    2.83 -    }
    2.84 -  }
    2.85 -
    2.86 -  abstract class Reader extends scala.util.parsing.input.Reader[Token]
    2.87 -
    2.88 -  private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader
    2.89 -  {
    2.90 -    def first = tokens.head
    2.91 -    def rest = new Token_Reader(tokens.tail, pos.advance(first))
    2.92 -    def atEnd = tokens.isEmpty
    2.93 -  }
    2.94 -
    2.95 -  def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1))
    2.96 -}
    2.97 -
     3.1 --- a/src/Pure/Isar/outer_syntax.scala	Mon May 17 10:20:55 2010 +0200
     3.2 +++ b/src/Pure/Isar/outer_syntax.scala	Mon May 17 14:23:54 2010 +0200
     3.3 @@ -39,7 +39,7 @@
     3.4  
     3.5    /* tokenize */
     3.6  
     3.7 -  def scan(input: Reader[Char]): List[Outer_Lex.Token] =
     3.8 +  def scan(input: Reader[Char]): List[Token] =
     3.9    {
    3.10      import lexicon._
    3.11  
    3.12 @@ -49,6 +49,6 @@
    3.13      }
    3.14    }
    3.15  
    3.16 -  def scan(input: CharSequence): List[Outer_Lex.Token] =
    3.17 +  def scan(input: CharSequence): List[Token] =
    3.18      scan(new CharSequenceReader(input))
    3.19  }
     4.1 --- a/src/Pure/Isar/parse.scala	Mon May 17 10:20:55 2010 +0200
     4.2 +++ b/src/Pure/Isar/parse.scala	Mon May 17 14:23:54 2010 +0200
     4.3 @@ -15,7 +15,7 @@
     4.4  
     4.5    trait Parser extends Parsers
     4.6    {
     4.7 -    type Elem = Outer_Lex.Token
     4.8 +    type Elem = Token
     4.9  
    4.10      def filter_proper = true
    4.11  
    4.12 @@ -50,8 +50,8 @@
    4.13        token(s, pred) ^^ (_.content)
    4.14  
    4.15      def keyword(name: String): Parser[String] =
    4.16 -      atom(Outer_Lex.Token_Kind.KEYWORD.toString + " \"" + name + "\"",
    4.17 -        tok => tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == name)
    4.18 +      atom(Token.Kind.KEYWORD.toString + " \"" + name + "\"",
    4.19 +        tok => tok.kind == Token.Kind.KEYWORD && tok.content == name)
    4.20  
    4.21      def name: Parser[String] = atom("name declaration", _.is_name)
    4.22      def xname: Parser[String] = atom("name reference", _.is_xname)
    4.23 @@ -62,16 +62,16 @@
    4.24  
    4.25      private def tag_name: Parser[String] =
    4.26        atom("tag name", tok =>
    4.27 -          tok.kind == Outer_Lex.Token_Kind.IDENT ||
    4.28 -          tok.kind == Outer_Lex.Token_Kind.STRING)
    4.29 +          tok.kind == Token.Kind.IDENT ||
    4.30 +          tok.kind == Token.Kind.STRING)
    4.31  
    4.32      def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name)
    4.33  
    4.34  
    4.35      /* wrappers */
    4.36  
    4.37 -    def parse[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = p(in)
    4.38 -    def parse_all[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = parse(phrase(p), in)
    4.39 +    def parse[T](p: Parser[T], in: Token.Reader): ParseResult[T] = p(in)
    4.40 +    def parse_all[T](p: Parser[T], in: Token.Reader): ParseResult[T] = parse(phrase(p), in)
    4.41    }
    4.42  }
    4.43  
     5.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     5.2 +++ b/src/Pure/Isar/token.scala	Mon May 17 14:23:54 2010 +0200
     5.3 @@ -0,0 +1,95 @@
     5.4 +/*  Title:      Pure/Isar/token.scala
     5.5 +    Author:     Makarius
     5.6 +
     5.7 +Outer token syntax for Isabelle/Isar.
     5.8 +*/
     5.9 +
    5.10 +package isabelle
    5.11 +
    5.12 +
    5.13 +object Token
    5.14 +{
    5.15 +  /* tokens */
    5.16 +
    5.17 +  object Kind extends Enumeration
    5.18 +  {
    5.19 +    val COMMAND = Value("command")
    5.20 +    val KEYWORD = Value("keyword")
    5.21 +    val IDENT = Value("identifier")
    5.22 +    val LONG_IDENT = Value("long identifier")
    5.23 +    val SYM_IDENT = Value("symbolic identifier")
    5.24 +    val VAR = Value("schematic variable")
    5.25 +    val TYPE_IDENT = Value("type variable")
    5.26 +    val TYPE_VAR = Value("schematic type variable")
    5.27 +    val NAT = Value("number")
    5.28 +    val STRING = Value("string")
    5.29 +    val ALT_STRING = Value("back-quoted string")
    5.30 +    val VERBATIM = Value("verbatim text")
    5.31 +    val SPACE = Value("white space")
    5.32 +    val COMMENT = Value("comment text")
    5.33 +    val BAD_INPUT = Value("bad input")
    5.34 +    val UNPARSED = Value("unparsed input")
    5.35 +  }
    5.36 +
    5.37 +
    5.38 +  /* token reader */
    5.39 +
    5.40 +  class Line_Position(val line: Int) extends scala.util.parsing.input.Position
    5.41 +  {
    5.42 +    def column = 0
    5.43 +    def lineContents = ""
    5.44 +    override def toString = line.toString
    5.45 +
    5.46 +    def advance(token: Token): Line_Position =
    5.47 +    {
    5.48 +      var n = 0
    5.49 +      for (c <- token.content if c == '\n') n += 1
    5.50 +      if (n == 0) this else new Line_Position(line + n)
    5.51 +    }
    5.52 +  }
    5.53 +
    5.54 +  abstract class Reader extends scala.util.parsing.input.Reader[Token]
    5.55 +
    5.56 +  private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader
    5.57 +  {
    5.58 +    def first = tokens.head
    5.59 +    def rest = new Token_Reader(tokens.tail, pos.advance(first))
    5.60 +    def atEnd = tokens.isEmpty
    5.61 +  }
    5.62 +
    5.63 +  def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1))
    5.64 +}
    5.65 +
    5.66 +
    5.67 +sealed case class Token(val kind: Token.Kind.Value, val source: String)
    5.68 +{
    5.69 +  def is_command: Boolean = kind == Token.Kind.COMMAND
    5.70 +  def is_delimited: Boolean =
    5.71 +    kind == Token.Kind.STRING ||
    5.72 +    kind == Token.Kind.ALT_STRING ||
    5.73 +    kind == Token.Kind.VERBATIM ||
    5.74 +    kind == Token.Kind.COMMENT
    5.75 +  def is_name: Boolean =
    5.76 +    kind == Token.Kind.IDENT ||
    5.77 +    kind == Token.Kind.SYM_IDENT ||
    5.78 +    kind == Token.Kind.STRING ||
    5.79 +    kind == Token.Kind.NAT
    5.80 +  def is_xname: Boolean = is_name || kind == Token.Kind.LONG_IDENT
    5.81 +  def is_text: Boolean = is_xname || kind == Token.Kind.VERBATIM
    5.82 +  def is_space: Boolean = kind == Token.Kind.SPACE
    5.83 +  def is_comment: Boolean = kind == Token.Kind.COMMENT
    5.84 +  def is_ignored: Boolean = is_space || is_comment
    5.85 +  def is_unparsed: Boolean = kind == Token.Kind.UNPARSED
    5.86 +
    5.87 +  def content: String =
    5.88 +    if (kind == Token.Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
    5.89 +    else if (kind == Token.Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
    5.90 +    else if (kind == Token.Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
    5.91 +    else if (kind == Token.Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
    5.92 +    else source
    5.93 +
    5.94 +  def text: (String, String) =
    5.95 +    if (kind == Token.Kind.COMMAND && source == ";") ("terminator", "")
    5.96 +    else (kind.toString, source)
    5.97 +}
    5.98 +
     6.1 --- a/src/Pure/PIDE/document.scala	Mon May 17 10:20:55 2010 +0200
     6.2 +++ b/src/Pure/PIDE/document.scala	Mon May 17 14:23:54 2010 +0200
     6.3 @@ -46,7 +46,7 @@
     6.4      /* unparsed dummy commands */
     6.5  
     6.6      def unparsed(source: String) =
     6.7 -      new Command(null, List(Outer_Lex.Token(Outer_Lex.Token_Kind.UNPARSED, source)))
     6.8 +      new Command(null, List(Token(Token.Kind.UNPARSED, source)))
     6.9  
    6.10      def is_unparsed(command: Command) = command.id == null
    6.11  
     7.1 --- a/src/Pure/Thy/thy_header.scala	Mon May 17 10:20:55 2010 +0200
     7.2 +++ b/src/Pure/Thy/thy_header.scala	Mon May 17 14:23:54 2010 +0200
     7.3 @@ -59,14 +59,14 @@
     7.4    def read(file: File): Header =
     7.5    {
     7.6      val token = lexicon.token(symbols, _ => false)
     7.7 -    val toks = new mutable.ListBuffer[Outer_Lex.Token]
     7.8 +    val toks = new mutable.ListBuffer[Token]
     7.9  
    7.10      def scan_to_begin(in: Reader[Char])
    7.11      {
    7.12        token(in) match {
    7.13          case lexicon.Success(tok, rest) =>
    7.14            toks += tok
    7.15 -          if (!(tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == BEGIN))
    7.16 +          if (!(tok.kind == Token.Kind.KEYWORD && tok.content == BEGIN))
    7.17              scan_to_begin(rest)
    7.18          case _ =>
    7.19        }
    7.20 @@ -74,7 +74,7 @@
    7.21      val reader = Scan.byte_reader(file)
    7.22      try { scan_to_begin(reader) } finally { reader.close }
    7.23  
    7.24 -    parse(commit(header), Outer_Lex.reader(toks.toList)) match {
    7.25 +    parse(commit(header), Token.reader(toks.toList)) match {
    7.26        case Success(result, _) => result
    7.27        case bad => error(bad.toString)
    7.28      }
     8.1 --- a/src/Pure/Thy/thy_syntax.scala	Mon May 17 10:20:55 2010 +0200
     8.2 +++ b/src/Pure/Thy/thy_syntax.scala	Mon May 17 14:23:54 2010 +0200
     8.3 @@ -22,13 +22,13 @@
     8.4      }
     8.5    }
     8.6  
     8.7 -  type Span = List[Outer_Lex.Token]
     8.8 +  type Span = List[Token]
     8.9  
    8.10 -  def parse_spans(toks: List[Outer_Lex.Token]): List[Span] =
    8.11 +  def parse_spans(toks: List[Token]): List[Span] =
    8.12    {
    8.13      import parser._
    8.14  
    8.15 -    parse(rep(command_span), Outer_Lex.reader(toks)) match {
    8.16 +    parse(rep(command_span), Token.reader(toks)) match {
    8.17        case Success(spans, rest) if rest.atEnd => spans
    8.18        case bad => error(bad.toString)
    8.19      }
     9.1 --- a/src/Pure/build-jars	Mon May 17 10:20:55 2010 +0200
     9.2 +++ b/src/Pure/build-jars	Mon May 17 14:23:54 2010 +0200
     9.3 @@ -34,9 +34,9 @@
     9.4    General/yxml.scala
     9.5    Isar/isar_document.scala
     9.6    Isar/keyword.scala
     9.7 -  Isar/outer_lex.scala
     9.8    Isar/outer_syntax.scala
     9.9    Isar/parse.scala
    9.10 +  Isar/token.scala
    9.11    PIDE/change.scala
    9.12    PIDE/command.scala
    9.13    PIDE/document.scala