src/Pure/Isar/parse.scala
author wenzelm
Fri Apr 08 16:34:14 2011 +0200 (2011-04-08)
changeset 42290 b1f544c84040
parent 36956 21be4832c362
child 43283 446e6621762d
permissions -rw-r--r--
discontinued special treatment of structure Lexicon;
     1 /*  Title:      Pure/Isar/outer_parse.scala
     2     Author:     Makarius
     3 
     4 Generic parsers for Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 import scala.util.parsing.combinator.Parsers
    10 
    11 
    12 object Parse
    13 {
    14   /* parsing tokens */
    15 
    16   trait Parser extends Parsers
    17   {
    18     type Elem = Token
    19 
    20     def filter_proper = true
    21 
    22     private def proper(in: Input): Input =
    23       if (in.atEnd || !in.first.is_ignored || !filter_proper) in
    24       else proper(in.rest)
    25 
    26     def token(s: String, pred: Elem => Boolean): Parser[Elem] = new Parser[Elem]
    27     {
    28       def apply(raw_input: Input) =
    29       {
    30         val in = proper(raw_input)
    31         if (in.atEnd) Failure(s + " expected (past end-of-file!)", in)
    32         else {
    33           val token = in.first
    34           if (pred(token)) Success(token, proper(in.rest))
    35           else
    36             token.text match {
    37               case (txt, "") =>
    38                 Failure(s + " expected,\nbut " + txt + " was found", in)
    39               case (txt1, txt2) =>
    40                 Failure(s + " expected,\nbut " + txt1 + " was found:\n" + txt2, in)
    41             }
    42         }
    43       }
    44     }
    45 
    46     def not_eof: Parser[Elem] = token("input token", _ => true)
    47     def eof: Parser[Unit] = not(not_eof)
    48 
    49     def atom(s: String, pred: Elem => Boolean): Parser[String] =
    50       token(s, pred) ^^ (_.content)
    51 
    52     def keyword(name: String): Parser[String] =
    53       atom(Token.Kind.KEYWORD.toString + " \"" + name + "\"",
    54         tok => tok.kind == Token.Kind.KEYWORD && tok.content == name)
    55 
    56     def name: Parser[String] = atom("name declaration", _.is_name)
    57     def xname: Parser[String] = atom("name reference", _.is_xname)
    58     def text: Parser[String] = atom("text", _.is_text)
    59     def ML_source: Parser[String] = atom("ML source", _.is_text)
    60     def doc_source: Parser[String] = atom("document source", _.is_text)
    61     def path: Parser[String] = atom("file name/path specification", _.is_name)
    62 
    63     private def tag_name: Parser[String] =
    64       atom("tag name", tok =>
    65           tok.kind == Token.Kind.IDENT ||
    66           tok.kind == Token.Kind.STRING)
    67 
    68     def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name)
    69 
    70 
    71     /* wrappers */
    72 
    73     def parse[T](p: Parser[T], in: Token.Reader): ParseResult[T] = p(in)
    74     def parse_all[T](p: Parser[T], in: Token.Reader): ParseResult[T] = parse(phrase(p), in)
    75   }
    76 }
    77