src/Pure/Isar/outer_syntax.scala
author wenzelm
Fri Apr 08 16:34:14 2011 +0200 (2011-04-08)
changeset 42290 b1f544c84040
parent 40533 e38e80686ce5
child 43411 0206466ee473
permissions -rw-r--r--
discontinued special treatment of structure Lexicon;
wenzelm@34166
     1
/*  Title:      Pure/Isar/outer_syntax.scala
wenzelm@34166
     2
    Author:     Makarius
wenzelm@34166
     3
wenzelm@34166
     4
Isabelle/Isar outer syntax.
wenzelm@34166
     5
*/
wenzelm@34166
     6
wenzelm@34166
     7
package isabelle
wenzelm@34166
     8
wenzelm@34166
     9
wenzelm@34166
    10
import scala.util.parsing.input.{Reader, CharSequenceReader}
wenzelm@34166
    11
wenzelm@34166
    12
wenzelm@34166
    13
class Outer_Syntax(symbols: Symbol.Interpretation)
wenzelm@34166
    14
{
wenzelm@36947
    15
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
wenzelm@34166
    16
  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
wenzelm@34166
    17
  lazy val completion: Completion = new Completion + symbols  // FIXME !?
wenzelm@34166
    18
wenzelm@38471
    19
  def keyword_kind(name: String): Option[String] = keywords.get(name)
wenzelm@38471
    20
wenzelm@40533
    21
  def + (name: String, kind: String, replace: String): Outer_Syntax =
wenzelm@34166
    22
  {
wenzelm@34166
    23
    val new_keywords = keywords + (name -> kind)
wenzelm@34166
    24
    val new_lexicon = lexicon + name
wenzelm@40533
    25
    val new_completion = completion + (name, replace)
wenzelm@34166
    26
    new Outer_Syntax(symbols) {
wenzelm@34166
    27
      override val lexicon = new_lexicon
wenzelm@34166
    28
      override val keywords = new_keywords
wenzelm@34166
    29
      override lazy val completion = new_completion
wenzelm@34166
    30
    }
wenzelm@34166
    31
  }
wenzelm@34166
    32
wenzelm@40533
    33
  def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
wenzelm@40533
    34
wenzelm@36947
    35
  def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
wenzelm@34166
    36
wenzelm@34166
    37
  def is_command(name: String): Boolean =
wenzelm@40458
    38
    keyword_kind(name) match {
wenzelm@36947
    39
      case Some(kind) => kind != Keyword.MINOR
wenzelm@34166
    40
      case None => false
wenzelm@34166
    41
    }
wenzelm@34166
    42
wenzelm@40454
    43
  def heading_level(name: String): Option[Int] =
wenzelm@40454
    44
    name match {
wenzelm@40458
    45
      // FIXME avoid hard-wired info!?
wenzelm@40454
    46
      case "header" => Some(1)
wenzelm@40454
    47
      case "chapter" => Some(2)
wenzelm@40454
    48
      case "section" | "sect" => Some(3)
wenzelm@40454
    49
      case "subsection" | "subsect" => Some(4)
wenzelm@40454
    50
      case "subsubsection" | "subsubsect" => Some(5)
wenzelm@40458
    51
      case _ =>
wenzelm@40458
    52
        keyword_kind(name) match {
wenzelm@40458
    53
          case Some(kind) if Keyword.theory(kind) => Some(6)
wenzelm@40458
    54
          case _ => None
wenzelm@40458
    55
        }
wenzelm@40454
    56
    }
wenzelm@40454
    57
wenzelm@40454
    58
  def heading_level(command: Command): Option[Int] =
wenzelm@40454
    59
    heading_level(command.name)
wenzelm@40454
    60
wenzelm@34166
    61
wenzelm@34166
    62
  /* tokenize */
wenzelm@34166
    63
wenzelm@36956
    64
  def scan(input: Reader[Char]): List[Token] =
wenzelm@34166
    65
  {
wenzelm@34166
    66
    import lexicon._
wenzelm@34166
    67
wenzelm@34166
    68
    parseAll(rep(token(symbols, is_command)), input) match {
wenzelm@34166
    69
      case Success(tokens, _) => tokens
wenzelm@34264
    70
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
wenzelm@34166
    71
    }
wenzelm@34166
    72
  }
wenzelm@34166
    73
wenzelm@36956
    74
  def scan(input: CharSequence): List[Token] =
wenzelm@34166
    75
    scan(new CharSequenceReader(input))
wenzelm@34166
    76
}