src/Pure/Isar/outer_syntax.scala
author wenzelm
Tue Aug 17 22:57:11 2010 +0200 (2010-08-17)
changeset 38471 0924654b8163
parent 36956 21be4832c362
child 40454 2516ea25a54b
permissions -rw-r--r--
report command token name instead of kind, which can be retrieved later via Outer_Syntax.keyword_kind;
wenzelm@34166
     1
/*  Title:      Pure/Isar/outer_syntax.scala
wenzelm@34166
     2
    Author:     Makarius
wenzelm@34166
     3
wenzelm@34166
     4
Isabelle/Isar outer syntax.
wenzelm@34166
     5
*/
wenzelm@34166
     6
wenzelm@34166
     7
package isabelle
wenzelm@34166
     8
wenzelm@34166
     9
wenzelm@34166
    10
import scala.util.parsing.input.{Reader, CharSequenceReader}
wenzelm@34166
    11
wenzelm@34166
    12
wenzelm@34166
    13
class Outer_Syntax(symbols: Symbol.Interpretation)
wenzelm@34166
    14
{
wenzelm@36947
    15
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
wenzelm@34166
    16
  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
wenzelm@34166
    17
  lazy val completion: Completion = new Completion + symbols  // FIXME !?
wenzelm@34166
    18
wenzelm@38471
    19
  def keyword_kind(name: String): Option[String] = keywords.get(name)
wenzelm@38471
    20
wenzelm@34166
    21
  def + (name: String, kind: String): Outer_Syntax =
wenzelm@34166
    22
  {
wenzelm@34166
    23
    val new_keywords = keywords + (name -> kind)
wenzelm@34166
    24
    val new_lexicon = lexicon + name
wenzelm@34166
    25
    val new_completion = completion + name
wenzelm@34166
    26
    new Outer_Syntax(symbols) {
wenzelm@34166
    27
      override val lexicon = new_lexicon
wenzelm@34166
    28
      override val keywords = new_keywords
wenzelm@34166
    29
      override lazy val completion = new_completion
wenzelm@34166
    30
    }
wenzelm@34166
    31
  }
wenzelm@34166
    32
wenzelm@36947
    33
  def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
wenzelm@34166
    34
wenzelm@34166
    35
  def is_command(name: String): Boolean =
wenzelm@34166
    36
    keywords.get(name) match {
wenzelm@36947
    37
      case Some(kind) => kind != Keyword.MINOR
wenzelm@34166
    38
      case None => false
wenzelm@34166
    39
    }
wenzelm@34166
    40
wenzelm@34166
    41
wenzelm@34166
    42
  /* tokenize */
wenzelm@34166
    43
wenzelm@36956
    44
  def scan(input: Reader[Char]): List[Token] =
wenzelm@34166
    45
  {
wenzelm@34166
    46
    import lexicon._
wenzelm@34166
    47
wenzelm@34166
    48
    parseAll(rep(token(symbols, is_command)), input) match {
wenzelm@34166
    49
      case Success(tokens, _) => tokens
wenzelm@34264
    50
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
wenzelm@34166
    51
    }
wenzelm@34166
    52
  }
wenzelm@34166
    53
wenzelm@36956
    54
  def scan(input: CharSequence): List[Token] =
wenzelm@34166
    55
    scan(new CharSequenceReader(input))
wenzelm@34166
    56
}