src/Pure/Isar/outer_syntax.scala
author wenzelm
Mon Feb 27 17:13:25 2012 +0100 (2012-02-27)
changeset 46712 8650d9a95736
parent 46626 a02115865bcc
child 46940 a40be2f10ca9
permissions -rw-r--r--
prefer final ADTs -- prevent ooddities;
     1 /*  Title:      Pure/Isar/outer_syntax.scala
     2     Author:     Makarius
     3 
     4 Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.util.parsing.input.{Reader, CharSequenceReader}
    11 import scala.collection.mutable
    12 
    13 
    14 object Outer_Syntax
    15 {
    16   def quote_string(str: String): String =
    17   {
    18     val result = new StringBuilder(str.length + 10)
    19     result += '"'
    20     for (s <- Symbol.iterator(str)) {
    21       if (s.length == 1) {
    22         val c = s(0)
    23         if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') {
    24           result += '\\'
    25           if (c < 10) result += '0'
    26           if (c < 100) result += '0'
    27           result ++= (c.asInstanceOf[Int].toString)
    28         }
    29         else result += c
    30       }
    31       else result ++= s
    32     }
    33     result += '"'
    34     result.toString
    35   }
    36 
    37   def init(): Outer_Syntax = new Outer_Syntax()
    38 }
    39 
    40 final class Outer_Syntax private(
    41   keywords: Map[String, String] = Map((";" -> Keyword.DIAG)),
    42   lexicon: Scan.Lexicon = Scan.Lexicon.empty,
    43   val completion: Completion = Completion.init())
    44 {
    45   def keyword_kind(name: String): Option[String] = keywords.get(name)
    46 
    47   def + (name: String, kind: String, replace: String): Outer_Syntax =
    48     new Outer_Syntax(
    49       keywords + (name -> kind),
    50       lexicon + name,
    51       if (Keyword.control(kind)) completion else completion + (name, replace))
    52 
    53   def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
    54 
    55   def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
    56 
    57   def is_command(name: String): Boolean =
    58     keyword_kind(name) match {
    59       case Some(kind) => kind != Keyword.MINOR
    60       case None => false
    61     }
    62 
    63   def heading_level(name: String): Option[Int] =
    64     name match {
    65       // FIXME avoid hard-wired info!?
    66       case "header" => Some(1)
    67       case "chapter" => Some(2)
    68       case "section" | "sect" => Some(3)
    69       case "subsection" | "subsect" => Some(4)
    70       case "subsubsection" | "subsubsect" => Some(5)
    71       case _ =>
    72         keyword_kind(name) match {
    73           case Some(kind) if Keyword.theory(kind) => Some(6)
    74           case _ => None
    75         }
    76     }
    77 
    78   def heading_level(command: Command): Option[Int] =
    79     heading_level(command.name)
    80 
    81 
    82   /* tokenize */
    83 
    84   def scan(input: Reader[Char]): List[Token] =
    85   {
    86     import lexicon._
    87 
    88     parseAll(rep(token(is_command)), input) match {
    89       case Success(tokens, _) => tokens
    90       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
    91     }
    92   }
    93 
    94   def scan(input: CharSequence): List[Token] =
    95     scan(new CharSequenceReader(input))
    96 
    97   def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
    98   {
    99     import lexicon._
   100 
   101     var in: Reader[Char] = new CharSequenceReader(input)
   102     val toks = new mutable.ListBuffer[Token]
   103     var ctxt = context
   104     while (!in.atEnd) {
   105       parse(token_context(is_command, ctxt), in) match {
   106         case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
   107         case NoSuccess(_, rest) =>
   108           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
   109       }
   110     }
   111     (toks.toList, ctxt)
   112   }
   113 }