src/Pure/Isar/outer_syntax.scala
author wenzelm
Thu Jun 16 17:25:16 2011 +0200 (2011-06-16)
changeset 43411 0206466ee473
parent 40533 e38e80686ce5
child 43445 270bbbcda059
permissions -rw-r--r--
some support for partial scans with explicit context;
clarified junk vs. junk1;
wenzelm@34166
     1
/*  Title:      Pure/Isar/outer_syntax.scala
wenzelm@34166
     2
    Author:     Makarius
wenzelm@34166
     3
wenzelm@34166
     4
Isabelle/Isar outer syntax.
wenzelm@34166
     5
*/
wenzelm@34166
     6
wenzelm@34166
     7
package isabelle
wenzelm@34166
     8
wenzelm@34166
     9
wenzelm@34166
    10
import scala.util.parsing.input.{Reader, CharSequenceReader}
wenzelm@43411
    11
import scala.collection.mutable
wenzelm@34166
    12
wenzelm@34166
    13
wenzelm@34166
    14
class Outer_Syntax(symbols: Symbol.Interpretation)
wenzelm@34166
    15
{
wenzelm@36947
    16
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
wenzelm@34166
    17
  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
wenzelm@34166
    18
  lazy val completion: Completion = new Completion + symbols  // FIXME !?
wenzelm@34166
    19
wenzelm@38471
    20
  def keyword_kind(name: String): Option[String] = keywords.get(name)
wenzelm@38471
    21
wenzelm@40533
    22
  def + (name: String, kind: String, replace: String): Outer_Syntax =
wenzelm@34166
    23
  {
wenzelm@34166
    24
    val new_keywords = keywords + (name -> kind)
wenzelm@34166
    25
    val new_lexicon = lexicon + name
wenzelm@40533
    26
    val new_completion = completion + (name, replace)
wenzelm@34166
    27
    new Outer_Syntax(symbols) {
wenzelm@34166
    28
      override val lexicon = new_lexicon
wenzelm@34166
    29
      override val keywords = new_keywords
wenzelm@34166
    30
      override lazy val completion = new_completion
wenzelm@34166
    31
    }
wenzelm@34166
    32
  }
wenzelm@34166
    33
wenzelm@40533
    34
  def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
wenzelm@40533
    35
wenzelm@36947
    36
  def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
wenzelm@34166
    37
wenzelm@34166
    38
  def is_command(name: String): Boolean =
wenzelm@40458
    39
    keyword_kind(name) match {
wenzelm@36947
    40
      case Some(kind) => kind != Keyword.MINOR
wenzelm@34166
    41
      case None => false
wenzelm@34166
    42
    }
wenzelm@34166
    43
wenzelm@40454
    44
  def heading_level(name: String): Option[Int] =
wenzelm@40454
    45
    name match {
wenzelm@40458
    46
      // FIXME avoid hard-wired info!?
wenzelm@40454
    47
      case "header" => Some(1)
wenzelm@40454
    48
      case "chapter" => Some(2)
wenzelm@40454
    49
      case "section" | "sect" => Some(3)
wenzelm@40454
    50
      case "subsection" | "subsect" => Some(4)
wenzelm@40454
    51
      case "subsubsection" | "subsubsect" => Some(5)
wenzelm@40458
    52
      case _ =>
wenzelm@40458
    53
        keyword_kind(name) match {
wenzelm@40458
    54
          case Some(kind) if Keyword.theory(kind) => Some(6)
wenzelm@40458
    55
          case _ => None
wenzelm@40458
    56
        }
wenzelm@40454
    57
    }
wenzelm@40454
    58
wenzelm@40454
    59
  def heading_level(command: Command): Option[Int] =
wenzelm@40454
    60
    heading_level(command.name)
wenzelm@40454
    61
wenzelm@34166
    62
wenzelm@34166
    63
  /* tokenize */
wenzelm@34166
    64
wenzelm@36956
    65
  def scan(input: Reader[Char]): List[Token] =
wenzelm@34166
    66
  {
wenzelm@34166
    67
    import lexicon._
wenzelm@34166
    68
wenzelm@34166
    69
    parseAll(rep(token(symbols, is_command)), input) match {
wenzelm@34166
    70
      case Success(tokens, _) => tokens
wenzelm@34264
    71
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
wenzelm@34166
    72
    }
wenzelm@34166
    73
  }
wenzelm@34166
    74
wenzelm@36956
    75
  def scan(input: CharSequence): List[Token] =
wenzelm@34166
    76
    scan(new CharSequenceReader(input))
wenzelm@43411
    77
wenzelm@43411
    78
  def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
wenzelm@43411
    79
  {
wenzelm@43411
    80
    import lexicon._
wenzelm@43411
    81
wenzelm@43411
    82
    var in: Reader[Char] = new CharSequenceReader(input)
wenzelm@43411
    83
    val toks = new mutable.ListBuffer[Token]
wenzelm@43411
    84
    var ctxt = context
wenzelm@43411
    85
    while (!in.atEnd) {
wenzelm@43411
    86
      parse(token_context(symbols, is_command, ctxt), in) match {
wenzelm@43411
    87
        case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
wenzelm@43411
    88
        case NoSuccess(_, rest) =>
wenzelm@43411
    89
          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
wenzelm@43411
    90
      }
wenzelm@43411
    91
    }
wenzelm@43411
    92
    (toks.toList, ctxt)
wenzelm@43411
    93
  }
wenzelm@34166
    94
}