src/Pure/Isar/outer_syntax.scala
author wenzelm
Wed Nov 10 15:00:40 2010 +0100 (2010-11-10)
changeset 40454 2516ea25a54b
parent 38471 0924654b8163
child 40455 e035dad8eca2
permissions -rw-r--r--
some support for nested source structure, based on section headings;
wenzelm@34166
     1
/*  Title:      Pure/Isar/outer_syntax.scala
wenzelm@34166
     2
    Author:     Makarius
wenzelm@34166
     3
wenzelm@34166
     4
Isabelle/Isar outer syntax.
wenzelm@34166
     5
*/
wenzelm@34166
     6
wenzelm@34166
     7
package isabelle
wenzelm@34166
     8
wenzelm@34166
     9
wenzelm@34166
    10
import scala.util.parsing.input.{Reader, CharSequenceReader}
wenzelm@34166
    11
wenzelm@34166
    12
wenzelm@34166
    13
class Outer_Syntax(symbols: Symbol.Interpretation)
wenzelm@34166
    14
{
wenzelm@36947
    15
  protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
wenzelm@34166
    16
  protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
wenzelm@34166
    17
  lazy val completion: Completion = new Completion + symbols  // FIXME !?
wenzelm@34166
    18
wenzelm@38471
    19
  def keyword_kind(name: String): Option[String] = keywords.get(name)
wenzelm@38471
    20
wenzelm@34166
    21
  def + (name: String, kind: String): Outer_Syntax =
wenzelm@34166
    22
  {
wenzelm@34166
    23
    val new_keywords = keywords + (name -> kind)
wenzelm@34166
    24
    val new_lexicon = lexicon + name
wenzelm@34166
    25
    val new_completion = completion + name
wenzelm@34166
    26
    new Outer_Syntax(symbols) {
wenzelm@34166
    27
      override val lexicon = new_lexicon
wenzelm@34166
    28
      override val keywords = new_keywords
wenzelm@34166
    29
      override lazy val completion = new_completion
wenzelm@34166
    30
    }
wenzelm@34166
    31
  }
wenzelm@34166
    32
wenzelm@36947
    33
  def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
wenzelm@34166
    34
wenzelm@34166
    35
  def is_command(name: String): Boolean =
wenzelm@34166
    36
    keywords.get(name) match {
wenzelm@36947
    37
      case Some(kind) => kind != Keyword.MINOR
wenzelm@34166
    38
      case None => false
wenzelm@34166
    39
    }
wenzelm@34166
    40
wenzelm@40454
    41
  def heading_level(name: String): Option[Int] =
wenzelm@40454
    42
    name match {
wenzelm@40454
    43
      // FIXME avoid hard-wired info
wenzelm@40454
    44
      case "header" => Some(1)
wenzelm@40454
    45
      case "chapter" => Some(2)
wenzelm@40454
    46
      case "section" | "sect" => Some(3)
wenzelm@40454
    47
      case "subsection" | "subsect" => Some(4)
wenzelm@40454
    48
      case "subsubsection" | "subsubsect" => Some(5)
wenzelm@40454
    49
      case _ => None
wenzelm@40454
    50
    }
wenzelm@40454
    51
wenzelm@40454
    52
  def heading_level(command: Command): Option[Int] =
wenzelm@40454
    53
    heading_level(command.name)
wenzelm@40454
    54
wenzelm@34166
    55
wenzelm@34166
    56
  /* tokenize */
wenzelm@34166
    57
wenzelm@36956
    58
  def scan(input: Reader[Char]): List[Token] =
wenzelm@34166
    59
  {
wenzelm@34166
    60
    import lexicon._
wenzelm@34166
    61
wenzelm@34166
    62
    parseAll(rep(token(symbols, is_command)), input) match {
wenzelm@34166
    63
      case Success(tokens, _) => tokens
wenzelm@34264
    64
      case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
wenzelm@34166
    65
    }
wenzelm@34166
    66
  }
wenzelm@34166
    67
wenzelm@36956
    68
  def scan(input: CharSequence): List[Token] =
wenzelm@34166
    69
    scan(new CharSequenceReader(input))
wenzelm@34166
    70
}