src/Pure/Isar/outer_syntax.scala
author wenzelm
Tue Jul 12 14:54:29 2011 +0200 (2011-07-12)
changeset 43774 6dfdb70496fe
parent 43695 5130dfe1b7be
child 44051 2ec66075a75c
permissions -rw-r--r--
added Outer_Syntax.quote_string, which is conceptually a bit different from Token.unparse;
wenzelm@34166
     1
/*  Title:      Pure/Isar/outer_syntax.scala
wenzelm@34166
     2
    Author:     Makarius
wenzelm@34166
     3
wenzelm@34166
     4
Isabelle/Isar outer syntax.
wenzelm@34166
     5
*/
wenzelm@34166
     6
wenzelm@34166
     7
package isabelle
wenzelm@34166
     8
wenzelm@34166
     9
wenzelm@34166
    10
import scala.util.parsing.input.{Reader, CharSequenceReader}
wenzelm@43411
    11
import scala.collection.mutable
wenzelm@34166
    12
wenzelm@34166
    13
wenzelm@43774
    14
object Outer_Syntax
wenzelm@43774
    15
{
wenzelm@43774
    16
  def quote_string(str: String): String =
wenzelm@43774
    17
  {
wenzelm@43774
    18
    val result = new StringBuilder(str.length + 10)
wenzelm@43774
    19
    result += '"'
wenzelm@43774
    20
    for (s <- Symbol.iterator(str)) {
wenzelm@43774
    21
      if (s.length == 1) {
wenzelm@43774
    22
        val c = s(0)
wenzelm@43774
    23
        if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') {
wenzelm@43774
    24
          result += '\\'
wenzelm@43774
    25
          if (c < 10) result += '0'
wenzelm@43774
    26
          if (c < 100) result += '0'
wenzelm@43774
    27
          result ++= (c.asInstanceOf[Int].toString)
wenzelm@43774
    28
        }
wenzelm@43774
    29
        else result += c
wenzelm@43774
    30
      }
wenzelm@43774
    31
      else result ++= s
wenzelm@43774
    32
    }
wenzelm@43774
    33
    result += '"'
wenzelm@43774
    34
    result.toString
wenzelm@43774
    35
  }
wenzelm@43774
    36
}
wenzelm@43774
    37
wenzelm@43695
    38
class Outer_Syntax
wenzelm@34166
    39
{
wenzelm@36947
    40
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
wenzelm@34166
    41
  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
wenzelm@43695
    42
  lazy val completion: Completion = (new Completion).add_symbols // FIXME odd initialization
wenzelm@34166
    43
wenzelm@38471
    44
  def keyword_kind(name: String): Option[String] = keywords.get(name)
wenzelm@38471
    45
wenzelm@40533
    46
  def + (name: String, kind: String, replace: String): Outer_Syntax =
wenzelm@34166
    47
  {
wenzelm@34166
    48
    val new_keywords = keywords + (name -> kind)
wenzelm@34166
    49
    val new_lexicon = lexicon + name
wenzelm@40533
    50
    val new_completion = completion + (name, replace)
wenzelm@43695
    51
    new Outer_Syntax {
wenzelm@34166
    52
      override val lexicon = new_lexicon
wenzelm@34166
    53
      override val keywords = new_keywords
wenzelm@34166
    54
      override lazy val completion = new_completion
wenzelm@34166
    55
    }
wenzelm@34166
    56
  }
wenzelm@34166
    57
wenzelm@40533
    58
  def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
wenzelm@40533
    59
wenzelm@36947
    60
  def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
wenzelm@34166
    61
wenzelm@34166
    62
  def is_command(name: String): Boolean =
wenzelm@40458
    63
    keyword_kind(name) match {
wenzelm@36947
    64
      case Some(kind) => kind != Keyword.MINOR
wenzelm@34166
    65
      case None => false
wenzelm@34166
    66
    }
wenzelm@34166
    67
wenzelm@40454
    68
  def heading_level(name: String): Option[Int] =
wenzelm@40454
    69
    name match {
wenzelm@40458
    70
      // FIXME avoid hard-wired info!?
wenzelm@40454
    71
      case "header" => Some(1)
wenzelm@40454
    72
      case "chapter" => Some(2)
wenzelm@40454
    73
      case "section" | "sect" => Some(3)
wenzelm@40454
    74
      case "subsection" | "subsect" => Some(4)
wenzelm@40454
    75
      case "subsubsection" | "subsubsect" => Some(5)
wenzelm@40458
    76
      case _ =>
wenzelm@40458
    77
        keyword_kind(name) match {
wenzelm@40458
    78
          case Some(kind) if Keyword.theory(kind) => Some(6)
wenzelm@40458
    79
          case _ => None
wenzelm@40458
    80
        }
wenzelm@40454
    81
    }
wenzelm@40454
    82
wenzelm@40454
    83
  def heading_level(command: Command): Option[Int] =
wenzelm@40454
    84
    heading_level(command.name)
wenzelm@40454
    85
wenzelm@34166
    86
wenzelm@34166
    87
  /* tokenize */
wenzelm@34166
    88
wenzelm@36956
    89
  def scan(input: Reader[Char]): List[Token] =
wenzelm@34166
    90
  {
wenzelm@34166
    91
    import lexicon._
wenzelm@34166
    92
wenzelm@43695
    93
    parseAll(rep(token(is_command)), input) match {
wenzelm@34166
    94
      case Success(tokens, _) => tokens
wenzelm@34264
    95
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
wenzelm@34166
    96
    }
wenzelm@34166
    97
  }
wenzelm@34166
    98
wenzelm@36956
    99
  def scan(input: CharSequence): List[Token] =
wenzelm@34166
   100
    scan(new CharSequenceReader(input))
wenzelm@43411
   101
wenzelm@43411
   102
  def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
wenzelm@43411
   103
  {
wenzelm@43411
   104
    import lexicon._
wenzelm@43411
   105
wenzelm@43411
   106
    var in: Reader[Char] = new CharSequenceReader(input)
wenzelm@43411
   107
    val toks = new mutable.ListBuffer[Token]
wenzelm@43411
   108
    var ctxt = context
wenzelm@43411
   109
    while (!in.atEnd) {
wenzelm@43695
   110
      parse(token_context(is_command, ctxt), in) match {
wenzelm@43411
   111
        case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
wenzelm@43411
   112
        case NoSuccess(_, rest) =>
wenzelm@43411
   113
          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
wenzelm@43411
   114
      }
wenzelm@43411
   115
    }
wenzelm@43411
   116
    (toks.toList, ctxt)
wenzelm@43411
   117
  }
wenzelm@34166
   118
}