tuned signature (in accordance to ML version);
authorwenzelm
Fri Feb 14 16:25:30 2014 +0100 (2014-02-14)
changeset 55494009b71c1ed23
parent 55493 47cac23e3d22
child 55495 b389f65edc44
tuned signature (in accordance to ML version);
src/Pure/General/scan.scala
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/token.scala
src/Pure/Thy/thy_header.scala
     1.1 --- a/src/Pure/General/scan.scala	Fri Feb 14 16:11:14 2014 +0100
     1.2 +++ b/src/Pure/General/scan.scala	Fri Feb 14 16:25:30 2014 +0100
     1.3 @@ -86,7 +86,7 @@
     1.4          (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString)
     1.5      }
     1.6  
     1.7 -    private def quoted(quote: Symbol.Symbol): Parser[String] =
     1.8 +    def quoted(quote: Symbol.Symbol): Parser[String] =
     1.9      {
    1.10        quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z }
    1.11      }.named("quoted")
    1.12 @@ -128,7 +128,7 @@
    1.13      private def verbatim_body: Parser[String] =
    1.14        rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString)
    1.15  
    1.16 -    private def verbatim: Parser[String] =
    1.17 +    def verbatim: Parser[String] =
    1.18      {
    1.19        "{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z }
    1.20      }.named("verbatim")
    1.21 @@ -288,86 +288,6 @@
    1.22          else Success(result, in.drop(result.length))
    1.23        }
    1.24      }.named("keyword")
    1.25 -
    1.26 -
    1.27 -    /* outer syntax tokens */
    1.28 -
    1.29 -    private def delimited_token: Parser[Token] =
    1.30 -    {
    1.31 -      val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
    1.32 -      val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
    1.33 -      val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
    1.34 -      val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
    1.35 -      val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
    1.36 -
    1.37 -      string | (alt_string | (verb | (cart | cmt)))
    1.38 -    }
    1.39 -
    1.40 -    private def other_token(lexicon: Lexicon, is_command: String => Boolean)
    1.41 -      : Parser[Token] =
    1.42 -    {
    1.43 -      val letdigs1 = many1(Symbol.is_letdig)
    1.44 -      val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
    1.45 -      val id =
    1.46 -        one(Symbol.is_letter) ~
    1.47 -          (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^
    1.48 -        { case x ~ y => x + y }
    1.49 -
    1.50 -      val nat = many1(Symbol.is_digit)
    1.51 -      val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
    1.52 -      val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
    1.53 -
    1.54 -      val ident = id ~ rep("." ~> id) ^^
    1.55 -        { case x ~ Nil => Token(Token.Kind.IDENT, x)
    1.56 -          case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
    1.57 -
    1.58 -      val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
    1.59 -      val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
    1.60 -      val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
    1.61 -      val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
    1.62 -      val float =
    1.63 -        ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
    1.64 -
    1.65 -      val sym_ident =
    1.66 -        (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
    1.67 -        (x => Token(Token.Kind.SYM_IDENT, x))
    1.68 -
    1.69 -      val command_keyword =
    1.70 -        keyword(lexicon) ^^
    1.71 -          (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
    1.72 -
    1.73 -      val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    1.74 -
    1.75 -      val recover_delimited =
    1.76 -        (recover_quoted("\"") |
    1.77 -          (recover_quoted("`") |
    1.78 -            (recover_verbatim |
    1.79 -              (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
    1.80 -
    1.81 -      val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
    1.82 -
    1.83 -      space | (recover_delimited |
    1.84 -        (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
    1.85 -          command_keyword) | bad))
    1.86 -    }
    1.87 -
    1.88 -    def token(lexicon: Lexicon, is_command: String => Boolean): Parser[Token] =
    1.89 -      delimited_token | other_token(lexicon, is_command)
    1.90 -
    1.91 -    def token_context(lexicon: Lexicon, is_command: String => Boolean, ctxt: Context)
    1.92 -      : Parser[(Token, Context)] =
    1.93 -    {
    1.94 -      val string =
    1.95 -        quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
    1.96 -      val alt_string =
    1.97 -        quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
    1.98 -      val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
    1.99 -      val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
   1.100 -      val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
   1.101 -      val other = other_token(lexicon, is_command) ^^ { case x => (x, Finished) }
   1.102 -
   1.103 -      string | (alt_string | (verb | (cart | (cmt | other))))
   1.104 -    }
   1.105    }
   1.106  
   1.107  
     2.1 --- a/src/Pure/Isar/outer_syntax.scala	Fri Feb 14 16:11:14 2014 +0100
     2.2 +++ b/src/Pure/Isar/outer_syntax.scala	Fri Feb 14 16:25:30 2014 +0100
     2.3 @@ -128,8 +128,8 @@
     2.4  
     2.5    def scan(input: Reader[Char]): List[Token] =
     2.6    {
     2.7 -    Scan.Parsers.parseAll(Scan.Parsers.rep(Scan.Parsers.token(lexicon, is_command)), input) match {
     2.8 -      case Scan.Parsers.Success(tokens, _) => tokens
     2.9 +    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), input) match {
    2.10 +      case Token.Parsers.Success(tokens, _) => tokens
    2.11        case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
    2.12      }
    2.13    }
    2.14 @@ -143,9 +143,9 @@
    2.15      val toks = new mutable.ListBuffer[Token]
    2.16      var ctxt = context
    2.17      while (!in.atEnd) {
    2.18 -      Scan.Parsers.parse(Scan.Parsers.token_context(lexicon, is_command, ctxt), in) match {
    2.19 -        case Scan.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    2.20 -        case Scan.Parsers.NoSuccess(_, rest) =>
    2.21 +      Token.Parsers.parse(Token.Parsers.token_context(lexicon, is_command, ctxt), in) match {
    2.22 +        case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    2.23 +        case Token.Parsers.NoSuccess(_, rest) =>
    2.24            error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
    2.25        }
    2.26      }
     3.1 --- a/src/Pure/Isar/token.scala	Fri Feb 14 16:11:14 2014 +0100
     3.2 +++ b/src/Pure/Isar/token.scala	Fri Feb 14 16:25:30 2014 +0100
     3.3 @@ -34,6 +34,91 @@
     3.4    }
     3.5  
     3.6  
     3.7 +  /* parsers */
     3.8 +
     3.9 +  object Parsers extends Parsers
    3.10 +
    3.11 +  trait Parsers extends Scan.Parsers
    3.12 +  {
    3.13 +    private def delimited_token: Parser[Token] =
    3.14 +    {
    3.15 +      val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
    3.16 +      val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
    3.17 +      val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
    3.18 +      val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
    3.19 +      val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
    3.20 +
    3.21 +      string | (alt_string | (verb | (cart | cmt)))
    3.22 +    }
    3.23 +
    3.24 +    private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean)
    3.25 +      : Parser[Token] =
    3.26 +    {
    3.27 +      val letdigs1 = many1(Symbol.is_letdig)
    3.28 +      val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
    3.29 +      val id =
    3.30 +        one(Symbol.is_letter) ~
    3.31 +          (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^
    3.32 +        { case x ~ y => x + y }
    3.33 +
    3.34 +      val nat = many1(Symbol.is_digit)
    3.35 +      val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
    3.36 +      val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
    3.37 +
    3.38 +      val ident = id ~ rep("." ~> id) ^^
    3.39 +        { case x ~ Nil => Token(Token.Kind.IDENT, x)
    3.40 +          case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
    3.41 +
    3.42 +      val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
    3.43 +      val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
    3.44 +      val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
    3.45 +      val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
    3.46 +      val float =
    3.47 +        ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
    3.48 +
    3.49 +      val sym_ident =
    3.50 +        (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
    3.51 +        (x => Token(Token.Kind.SYM_IDENT, x))
    3.52 +
    3.53 +      val command_keyword =
    3.54 +        keyword(lexicon) ^^
    3.55 +          (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
    3.56 +
    3.57 +      val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    3.58 +
    3.59 +      val recover_delimited =
    3.60 +        (recover_quoted("\"") |
    3.61 +          (recover_quoted("`") |
    3.62 +            (recover_verbatim |
    3.63 +              (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
    3.64 +
    3.65 +      val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
    3.66 +
    3.67 +      space | (recover_delimited |
    3.68 +        (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
    3.69 +          command_keyword) | bad))
    3.70 +    }
    3.71 +
    3.72 +    def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] =
    3.73 +      delimited_token | other_token(lexicon, is_command)
    3.74 +
    3.75 +    def token_context(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Context)
    3.76 +      : Parser[(Token, Scan.Context)] =
    3.77 +    {
    3.78 +      val string =
    3.79 +        quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
    3.80 +      val alt_string =
    3.81 +        quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
    3.82 +      val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
    3.83 +      val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
    3.84 +      val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
    3.85 +      val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) }
    3.86 +
    3.87 +      string | (alt_string | (verb | (cart | (cmt | other))))
    3.88 +    }
    3.89 +  }
    3.90 +
    3.91 +
    3.92    /* token reader */
    3.93  
    3.94    class Position(val line: Int, val file: String) extends scala.util.parsing.input.Position
     4.1 --- a/src/Pure/Thy/thy_header.scala	Fri Feb 14 16:11:14 2014 +0100
     4.2 +++ b/src/Pure/Thy/thy_header.scala	Fri Feb 14 16:25:30 2014 +0100
     4.3 @@ -82,13 +82,13 @@
     4.4  
     4.5    def read(reader: Reader[Char]): Thy_Header =
     4.6    {
     4.7 -    val token = Scan.Parsers.token(lexicon, _ => false)
     4.8 +    val token = Token.Parsers.token(lexicon, _ => false)
     4.9      val toks = new mutable.ListBuffer[Token]
    4.10  
    4.11      @tailrec def scan_to_begin(in: Reader[Char])
    4.12      {
    4.13        token(in) match {
    4.14 -        case Scan.Parsers.Success(tok, rest) =>
    4.15 +        case Token.Parsers.Success(tok, rest) =>
    4.16            toks += tok
    4.17            if (!tok.is_begin) scan_to_begin(rest)
    4.18          case _ =>