src/Pure/Isar/outer_syntax.scala
author wenzelm
Sat Oct 18 10:32:19 2014 +0200 (2014-10-18)
changeset 58697 5bc1d6c4a499
parent 58696 6b7445774ce3
child 58700 4717d18cc619
permissions -rw-r--r--
tuned signature;
     1 /*  Title:      Pure/Isar/outer_syntax.scala
     2     Author:     Makarius
     3 
     4 Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.util.parsing.input.{Reader, CharSequenceReader}
    11 import scala.collection.mutable
    12 
    13 
    14 object Outer_Syntax
    15 {
    16   def quote_string(str: String): String =
    17   {
    18     val result = new StringBuilder(str.length + 10)
    19     result += '"'
    20     for (s <- Symbol.iterator(str)) {
    21       if (s.length == 1) {
    22         val c = s(0)
    23         if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') {
    24           result += '\\'
    25           if (c < 10) result += '0'
    26           if (c < 100) result += '0'
    27           result ++= (c.asInstanceOf[Int].toString)
    28         }
    29         else result += c
    30       }
    31       else result ++= s
    32     }
    33     result += '"'
    34     result.toString
    35   }
    36 
    37   val empty: Outer_Syntax = new Outer_Syntax()
    38 
    39   def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init())
    40 
    41 
    42   /* line-oriented structure */
    43 
    44   object Line_Structure
    45   {
    46     val init = Line_Structure(0, 0)
    47   }
    48 
    49   sealed case class Line_Structure(depth_before: Int, depth: Int)
    50 }
    51 
    52 final class Outer_Syntax private(
    53   keywords: Map[String, (String, List[String])] = Map.empty,
    54   lexicon: Scan.Lexicon = Scan.Lexicon.empty,
    55   val completion: Completion = Completion.empty,
    56   val language_context: Completion.Language_Context = Completion.Language_Context.outer,
    57   val has_tokens: Boolean = true) extends Prover.Syntax
    58 {
    59   override def toString: String =
    60     (for ((name, (kind, files)) <- keywords) yield {
    61       if (kind == Keyword.MINOR) quote(name)
    62       else
    63         quote(name) + " :: " + quote(kind) +
    64         (if (files.isEmpty) "" else " (" + commas_quote(files) + ")")
    65     }).toList.sorted.mkString("keywords\n  ", " and\n  ", "")
    66 
    67 
    68   /* keyword kind */
    69 
    70   def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name)
    71   def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1)
    72 
    73   def is_command(name: String): Boolean =
    74     keyword_kind(name) match {
    75       case Some(kind) => kind != Keyword.MINOR
    76       case None => false
    77     }
    78 
    79   def command_kind(token: Token, pred: String => Boolean): Boolean =
    80     token.is_command && is_command(token.source) &&
    81       pred(keyword_kind(token.source).get)
    82 
    83 
    84   /* load commands */
    85 
    86   def load_command(name: String): Option[List[String]] =
    87     keywords.get(name) match {
    88       case Some((Keyword.THY_LOAD, exts)) => Some(exts)
    89       case _ => None
    90     }
    91 
    92   val load_commands: List[(String, List[String])] =
    93     (for ((name, (Keyword.THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList
    94 
    95   def load_commands_in(text: String): Boolean =
    96     load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
    97 
    98 
    99   /* add keywords */
   100 
   101   def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax =
   102   {
   103     val keywords1 = keywords + (name -> kind)
   104     val lexicon1 = lexicon + name
   105     val completion1 =
   106       if (Keyword.control(kind._1) || replace == Some("")) completion
   107       else completion + (name, replace getOrElse name)
   108     new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true)
   109   }
   110 
   111   def + (name: String, kind: (String, List[String])): Outer_Syntax =
   112     this + (name, kind, Some(name))
   113   def + (name: String, kind: String): Outer_Syntax =
   114     this + (name, (kind, Nil), Some(name))
   115   def + (name: String, replace: Option[String]): Outer_Syntax =
   116     this + (name, (Keyword.MINOR, Nil), replace)
   117   def + (name: String): Outer_Syntax = this + (name, None)
   118 
   119   def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax =
   120     (this /: keywords) {
   121       case (syntax, (name, Some((kind, _)), replace)) =>
   122         syntax +
   123           (Symbol.decode(name), kind, replace) +
   124           (Symbol.encode(name), kind, replace)
   125       case (syntax, (name, None, replace)) =>
   126         syntax +
   127           (Symbol.decode(name), replace) +
   128           (Symbol.encode(name), replace)
   129     }
   130 
   131 
   132   /* document headings */
   133 
   134   def heading_level(name: String): Option[Int] =
   135   {
   136     keyword_kind(name) match {
   137       case _ if name == "header" => Some(0)
   138       case Some(Keyword.THY_HEADING1) => Some(1)
   139       case Some(Keyword.THY_HEADING2) | Some(Keyword.PRF_HEADING2) => Some(2)
   140       case Some(Keyword.THY_HEADING3) | Some(Keyword.PRF_HEADING3) => Some(3)
   141       case Some(Keyword.THY_HEADING4) | Some(Keyword.PRF_HEADING4) => Some(4)
   142       case Some(kind) if Keyword.theory(kind) => Some(5)
   143       case _ => None
   144     }
   145   }
   146 
   147   def heading_level(command: Command): Option[Int] =
   148     heading_level(command.name)
   149 
   150 
   151   /* line-oriented structure */
   152 
   153   def line_structure(tokens: List[Token], depth: Int): Outer_Syntax.Line_Structure =
   154   {
   155     val depth1 =
   156       if (tokens.exists(tok => command_kind(tok, Keyword.theory))) 0
   157       else depth
   158     val depth2 =
   159       (depth /: tokens) { case (d, tok) =>
   160         if (command_kind(tok, Keyword.theory_goal)) 1
   161         else if (command_kind(tok, Keyword.theory)) 0
   162         else if (command_kind(tok, Keyword.proof_goal)) d + 1
   163         else if (command_kind(tok, Keyword.qed)) d - 1
   164         else if (command_kind(tok, Keyword.qed_global)) 0
   165         else d
   166       }
   167     Outer_Syntax.Line_Structure(depth1, depth2)
   168   }
   169 
   170 
   171   /* token language */
   172 
   173   def scan(input: CharSequence): List[Token] =
   174   {
   175     val in: Reader[Char] = new CharSequenceReader(input)
   176     Token.Parsers.parseAll(
   177         Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match {
   178       case Token.Parsers.Success(tokens, _) => tokens
   179       case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
   180     }
   181   }
   182 
   183   def scan_line(
   184     input: CharSequence,
   185     context: Scan.Line_Context,
   186     structure: Outer_Syntax.Line_Structure)
   187     : (List[Token], Scan.Line_Context, Outer_Syntax.Line_Structure) =
   188   {
   189     var in: Reader[Char] = new CharSequenceReader(input)
   190     val toks = new mutable.ListBuffer[Token]
   191     var ctxt = context
   192     while (!in.atEnd) {
   193       Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match {
   194         case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
   195         case Token.Parsers.NoSuccess(_, rest) =>
   196           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
   197       }
   198     }
   199     val tokens = toks.toList
   200     (tokens, ctxt, line_structure(tokens, structure.depth))
   201   }
   202 
   203 
   204   /* parse_spans */
   205 
   206   def parse_spans(toks: List[Token]): List[Command_Span.Span] =
   207   {
   208     val result = new mutable.ListBuffer[Command_Span.Span]
   209     val content = new mutable.ListBuffer[Token]
   210     val improper = new mutable.ListBuffer[Token]
   211 
   212     def ship(span: List[Token])
   213     {
   214       val kind =
   215         if (!span.isEmpty && span.head.is_command && !span.exists(_.is_error)) {
   216           val name = span.head.source
   217           val pos = Position.Range(Text.Range(0, Symbol.iterator(name).length) + 1)
   218           Command_Span.Command_Span(name, pos)
   219         }
   220         else if (span.forall(_.is_improper)) Command_Span.Ignored_Span
   221         else Command_Span.Malformed_Span
   222       result += Command_Span.Span(kind, span)
   223     }
   224 
   225     def flush()
   226     {
   227       if (!content.isEmpty) { ship(content.toList); content.clear }
   228       if (!improper.isEmpty) { ship(improper.toList); improper.clear }
   229     }
   230 
   231     for (tok <- toks) {
   232       if (tok.is_command) { flush(); content += tok }
   233       else if (tok.is_improper) improper += tok
   234       else { content ++= improper; improper.clear; content += tok }
   235     }
   236     flush()
   237 
   238     result.toList
   239   }
   240 
   241   def parse_spans(input: CharSequence): List[Command_Span.Span] =
   242     parse_spans(scan(input))
   243 
   244 
   245   /* language context */
   246 
   247   def set_language_context(context: Completion.Language_Context): Outer_Syntax =
   248     new Outer_Syntax(keywords, lexicon, completion, context, has_tokens)
   249 
   250   def no_tokens: Outer_Syntax =
   251   {
   252     require(keywords.isEmpty && lexicon.isEmpty)
   253     new Outer_Syntax(
   254       completion = completion,
   255       language_context = language_context,
   256       has_tokens = false)
   257   }
   258 }