src/Pure/Isar/outer_syntax.scala
author wenzelm
Tue Aug 21 12:15:25 2012 +0200 (2012-08-21)
changeset 48870 4accee106f0f
parent 48864 3ee314ae1e0a
child 48872 6124e0d1120a
permissions -rw-r--r--
clarified initialization of Thy_Load, Thy_Info, Session;
     1 /*  Title:      Pure/Isar/outer_syntax.scala
     2     Author:     Makarius
     3 
     4 Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.util.parsing.input.{Reader, CharSequenceReader}
    11 import scala.collection.mutable
    12 
    13 
    14 object Outer_Syntax
    15 {
    16   def quote_string(str: String): String =
    17   {
    18     val result = new StringBuilder(str.length + 10)
    19     result += '"'
    20     for (s <- Symbol.iterator(str)) {
    21       if (s.length == 1) {
    22         val c = s(0)
    23         if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') {
    24           result += '\\'
    25           if (c < 10) result += '0'
    26           if (c < 100) result += '0'
    27           result ++= (c.asInstanceOf[Int].toString)
    28         }
    29         else result += c
    30       }
    31       else result ++= s
    32     }
    33     result += '"'
    34     result.toString
    35   }
    36 
    37   val empty: Outer_Syntax = new Outer_Syntax()
    38 
    39   def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init())
    40 
    41   def init_pure(): Outer_Syntax =
    42     init() + ("theory", Keyword.THY_BEGIN) + ("ML_file", Keyword.THY_LOAD)
    43 }
    44 
    45 final class Outer_Syntax private(
    46   keywords: Map[String, (String, List[String])] = Map.empty,
    47   lexicon: Scan.Lexicon = Scan.Lexicon.empty,
    48   val completion: Completion = Completion.empty)
    49 {
    50   override def toString: String =
    51     (for ((name, (kind, files)) <- keywords) yield {
    52       if (kind == Keyword.MINOR) quote(name)
    53       else
    54         quote(name) + " :: " + quote(kind) +
    55         (if (files.isEmpty) "" else " (" + commas_quote(files) + ")")
    56     }).toList.sorted.mkString("keywords\n  ", " and\n  ", "")
    57 
    58   def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name)
    59   def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1)
    60 
    61   def + (name: String, kind: (String, List[String]), replace: String): Outer_Syntax =
    62     new Outer_Syntax(
    63       keywords + (name -> kind),
    64       lexicon + name,
    65       if (Keyword.control(kind._1)) completion else completion + (name, replace))
    66 
    67   def + (name: String, kind: (String, List[String])): Outer_Syntax = this + (name, kind, name)
    68   def + (name: String, kind: String): Outer_Syntax = this + (name, (kind, Nil), name)
    69   def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
    70 
    71   def add_keywords(header: Document.Node.Header): Outer_Syntax =
    72     (this /: header.keywords) {
    73       case (syntax, ((name, Some((kind, _))))) =>
    74         syntax + (Symbol.decode(name), kind) + (Symbol.encode(name), kind)
    75       case (syntax, ((name, None))) =>
    76         syntax + Symbol.decode(name) + Symbol.encode(name)
    77     }
    78 
    79   def is_command(name: String): Boolean =
    80     keyword_kind(name) match {
    81       case Some(kind) => kind != Keyword.MINOR
    82       case None => false
    83     }
    84 
    85   def heading_level(name: String): Option[Int] =
    86   {
    87     keyword_kind(name) match {
    88       case _ if name == "header" => Some(0)
    89       case Some(Keyword.THY_HEADING1) => Some(1)
    90       case Some(Keyword.THY_HEADING2) | Some(Keyword.PRF_HEADING2) => Some(2)
    91       case Some(Keyword.THY_HEADING3) | Some(Keyword.PRF_HEADING3) => Some(3)
    92       case Some(Keyword.THY_HEADING4) | Some(Keyword.PRF_HEADING4) => Some(4)
    93       case Some(kind) if Keyword.theory(kind) => Some(5)
    94       case _ => None
    95     }
    96   }
    97 
    98   def heading_level(command: Command): Option[Int] =
    99     heading_level(command.name)
   100 
   101 
   102   /* tokenize */
   103 
   104   def scan(input: Reader[Char]): List[Token] =
   105   {
   106     import lexicon._
   107 
   108     parseAll(rep(token(is_command)), input) match {
   109       case Success(tokens, _) => tokens
   110       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
   111     }
   112   }
   113 
   114   def scan(input: CharSequence): List[Token] =
   115     scan(new CharSequenceReader(input))
   116 
   117   def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
   118   {
   119     import lexicon._
   120 
   121     var in: Reader[Char] = new CharSequenceReader(input)
   122     val toks = new mutable.ListBuffer[Token]
   123     var ctxt = context
   124     while (!in.atEnd) {
   125       parse(token_context(is_command, ctxt), in) match {
   126         case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
   127         case NoSuccess(_, rest) =>
   128           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
   129       }
   130     }
   131     (toks.toList, ctxt)
   132   }
   133 }