src/Pure/Isar/outer_syntax.scala
author wenzelm
Sat Jun 18 18:17:08 2011 +0200 (2011-06-18)
changeset 43445 270bbbcda059
parent 43411 0206466ee473
child 43455 4b4b93672f15
permissions -rw-r--r--
hardwired abbreviations for standard control symbols;
     1 /*  Title:      Pure/Isar/outer_syntax.scala
     2     Author:     Makarius
     3 
     4 Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.util.parsing.input.{Reader, CharSequenceReader}
    11 import scala.collection.mutable
    12 
    13 
    14 class Outer_Syntax(symbols: Symbol.Interpretation)
    15 {
    16   protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
    17   protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
    18   lazy val completion: Completion = // FIXME odd initialization
    19     new Completion + symbols +
    20       ("sub", "\\<^sub>") +
    21       ("sup", "\\<^sup>") +
    22       ("isub", "\\<^isub>") +
    23       ("isup", "\\<^isup>") +
    24       ("bold", "\\<^bold>") +
    25       ("loc", "\\<^loc>")
    26 
    27   def keyword_kind(name: String): Option[String] = keywords.get(name)
    28 
    29   def + (name: String, kind: String, replace: String): Outer_Syntax =
    30   {
    31     val new_keywords = keywords + (name -> kind)
    32     val new_lexicon = lexicon + name
    33     val new_completion = completion + (name, replace)
    34     new Outer_Syntax(symbols) {
    35       override val lexicon = new_lexicon
    36       override val keywords = new_keywords
    37       override lazy val completion = new_completion
    38     }
    39   }
    40 
    41   def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
    42 
    43   def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
    44 
    45   def is_command(name: String): Boolean =
    46     keyword_kind(name) match {
    47       case Some(kind) => kind != Keyword.MINOR
    48       case None => false
    49     }
    50 
    51   def heading_level(name: String): Option[Int] =
    52     name match {
    53       // FIXME avoid hard-wired info!?
    54       case "header" => Some(1)
    55       case "chapter" => Some(2)
    56       case "section" | "sect" => Some(3)
    57       case "subsection" | "subsect" => Some(4)
    58       case "subsubsection" | "subsubsect" => Some(5)
    59       case _ =>
    60         keyword_kind(name) match {
    61           case Some(kind) if Keyword.theory(kind) => Some(6)
    62           case _ => None
    63         }
    64     }
    65 
    66   def heading_level(command: Command): Option[Int] =
    67     heading_level(command.name)
    68 
    69 
    70   /* tokenize */
    71 
    72   def scan(input: Reader[Char]): List[Token] =
    73   {
    74     import lexicon._
    75 
    76     parseAll(rep(token(symbols, is_command)), input) match {
    77       case Success(tokens, _) => tokens
    78       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
    79     }
    80   }
    81 
    82   def scan(input: CharSequence): List[Token] =
    83     scan(new CharSequenceReader(input))
    84 
    85   def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
    86   {
    87     import lexicon._
    88 
    89     var in: Reader[Char] = new CharSequenceReader(input)
    90     val toks = new mutable.ListBuffer[Token]
    91     var ctxt = context
    92     while (!in.atEnd) {
    93       parse(token_context(symbols, is_command, ctxt), in) match {
    94         case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    95         case NoSuccess(_, rest) =>
    96           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
    97       }
    98     }
    99     (toks.toList, ctxt)
   100   }
   101 }