src/Pure/Isar/outer_syntax.scala
author wenzelm
Sat Jun 18 18:17:08 2011 +0200 (2011-06-18)
changeset 43445 270bbbcda059
parent 43411 0206466ee473
child 43455 4b4b93672f15
permissions -rw-r--r--
hardwired abbreviations for standard control symbols;
wenzelm@34166
     1
/*  Title:      Pure/Isar/outer_syntax.scala
wenzelm@34166
     2
    Author:     Makarius
wenzelm@34166
     3
wenzelm@34166
     4
Isabelle/Isar outer syntax.
wenzelm@34166
     5
*/
wenzelm@34166
     6
wenzelm@34166
     7
package isabelle
wenzelm@34166
     8
wenzelm@34166
     9
wenzelm@34166
    10
import scala.util.parsing.input.{Reader, CharSequenceReader}
wenzelm@43411
    11
import scala.collection.mutable
wenzelm@34166
    12
wenzelm@34166
    13
wenzelm@34166
    14
class Outer_Syntax(symbols: Symbol.Interpretation)
wenzelm@34166
    15
{
wenzelm@36947
    16
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
wenzelm@34166
    17
  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
wenzelm@43445
    18
  lazy val completion: Completion = // FIXME odd initialization
wenzelm@43445
    19
    new Completion + symbols +
wenzelm@43445
    20
      ("sub", "\\<^sub>") +
wenzelm@43445
    21
      ("sup", "\\<^sup>") +
wenzelm@43445
    22
      ("isub", "\\<^isub>") +
wenzelm@43445
    23
      ("isup", "\\<^isup>") +
wenzelm@43445
    24
      ("bold", "\\<^bold>") +
wenzelm@43445
    25
      ("loc", "\\<^loc>")
wenzelm@34166
    26
wenzelm@38471
    27
  def keyword_kind(name: String): Option[String] = keywords.get(name)
wenzelm@38471
    28
wenzelm@40533
    29
  def + (name: String, kind: String, replace: String): Outer_Syntax =
wenzelm@34166
    30
  {
wenzelm@34166
    31
    val new_keywords = keywords + (name -> kind)
wenzelm@34166
    32
    val new_lexicon = lexicon + name
wenzelm@40533
    33
    val new_completion = completion + (name, replace)
wenzelm@34166
    34
    new Outer_Syntax(symbols) {
wenzelm@34166
    35
      override val lexicon = new_lexicon
wenzelm@34166
    36
      override val keywords = new_keywords
wenzelm@34166
    37
      override lazy val completion = new_completion
wenzelm@34166
    38
    }
wenzelm@34166
    39
  }
wenzelm@34166
    40
wenzelm@40533
    41
  def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
wenzelm@40533
    42
wenzelm@36947
    43
  def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
wenzelm@34166
    44
wenzelm@34166
    45
  def is_command(name: String): Boolean =
wenzelm@40458
    46
    keyword_kind(name) match {
wenzelm@36947
    47
      case Some(kind) => kind != Keyword.MINOR
wenzelm@34166
    48
      case None => false
wenzelm@34166
    49
    }
wenzelm@34166
    50
wenzelm@40454
    51
  def heading_level(name: String): Option[Int] =
wenzelm@40454
    52
    name match {
wenzelm@40458
    53
      // FIXME avoid hard-wired info!?
wenzelm@40454
    54
      case "header" => Some(1)
wenzelm@40454
    55
      case "chapter" => Some(2)
wenzelm@40454
    56
      case "section" | "sect" => Some(3)
wenzelm@40454
    57
      case "subsection" | "subsect" => Some(4)
wenzelm@40454
    58
      case "subsubsection" | "subsubsect" => Some(5)
wenzelm@40458
    59
      case _ =>
wenzelm@40458
    60
        keyword_kind(name) match {
wenzelm@40458
    61
          case Some(kind) if Keyword.theory(kind) => Some(6)
wenzelm@40458
    62
          case _ => None
wenzelm@40458
    63
        }
wenzelm@40454
    64
    }
wenzelm@40454
    65
wenzelm@40454
    66
  def heading_level(command: Command): Option[Int] =
wenzelm@40454
    67
    heading_level(command.name)
wenzelm@40454
    68
wenzelm@34166
    69
wenzelm@34166
    70
  /* tokenize */
wenzelm@34166
    71
wenzelm@36956
    72
  def scan(input: Reader[Char]): List[Token] =
wenzelm@34166
    73
  {
wenzelm@34166
    74
    import lexicon._
wenzelm@34166
    75
wenzelm@34166
    76
    parseAll(rep(token(symbols, is_command)), input) match {
wenzelm@34166
    77
      case Success(tokens, _) => tokens
wenzelm@34264
    78
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
wenzelm@34166
    79
    }
wenzelm@34166
    80
  }
wenzelm@34166
    81
wenzelm@36956
    82
  def scan(input: CharSequence): List[Token] =
wenzelm@34166
    83
    scan(new CharSequenceReader(input))
wenzelm@43411
    84
wenzelm@43411
    85
  def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
wenzelm@43411
    86
  {
wenzelm@43411
    87
    import lexicon._
wenzelm@43411
    88
wenzelm@43411
    89
    var in: Reader[Char] = new CharSequenceReader(input)
wenzelm@43411
    90
    val toks = new mutable.ListBuffer[Token]
wenzelm@43411
    91
    var ctxt = context
wenzelm@43411
    92
    while (!in.atEnd) {
wenzelm@43411
    93
      parse(token_context(symbols, is_command, ctxt), in) match {
wenzelm@43411
    94
        case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
wenzelm@43411
    95
        case NoSuccess(_, rest) =>
wenzelm@43411
    96
          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
wenzelm@43411
    97
      }
wenzelm@43411
    98
    }
wenzelm@43411
    99
    (toks.toList, ctxt)
wenzelm@43411
   100
  }
wenzelm@34166
   101
}