wenzelm@34166: /* Title: Pure/Isar/outer_syntax.scala wenzelm@34166: Author: Makarius wenzelm@34166: wenzelm@34166: Isabelle/Isar outer syntax. wenzelm@34166: */ wenzelm@34166: wenzelm@34166: package isabelle wenzelm@34166: wenzelm@34166: wenzelm@34166: import scala.util.parsing.input.{Reader, CharSequenceReader} wenzelm@43411: import scala.collection.mutable wenzelm@34166: wenzelm@34166: wenzelm@43774: object Outer_Syntax wenzelm@43774: { wenzelm@43774: def quote_string(str: String): String = wenzelm@43774: { wenzelm@43774: val result = new StringBuilder(str.length + 10) wenzelm@43774: result += '"' wenzelm@43774: for (s <- Symbol.iterator(str)) { wenzelm@43774: if (s.length == 1) { wenzelm@43774: val c = s(0) wenzelm@43774: if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') { wenzelm@43774: result += '\\' wenzelm@43774: if (c < 10) result += '0' wenzelm@43774: if (c < 100) result += '0' wenzelm@43774: result ++= (c.asInstanceOf[Int].toString) wenzelm@43774: } wenzelm@43774: else result += c wenzelm@43774: } wenzelm@43774: else result ++= s wenzelm@43774: } wenzelm@43774: result += '"' wenzelm@43774: result.toString wenzelm@43774: } wenzelm@46626: wenzelm@46941: val empty: Outer_Syntax = new Outer_Syntax() wenzelm@48870: wenzelm@46941: def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init()) wenzelm@43774: } wenzelm@43774: wenzelm@46712: final class Outer_Syntax private( wenzelm@48864: keywords: Map[String, (String, List[String])] = Map.empty, wenzelm@46626: lexicon: Scan.Lexicon = Scan.Lexicon.empty, wenzelm@53280: val completion: Completion = Completion.empty, wenzelm@53280: val has_tokens: Boolean = true) wenzelm@34166: { wenzelm@48660: override def toString: String = wenzelm@48864: (for ((name, (kind, files)) <- keywords) yield { wenzelm@48660: if (kind == Keyword.MINOR) quote(name) wenzelm@48864: else wenzelm@48864: quote(name) + " :: " + quote(kind) + wenzelm@48864: (if (files.isEmpty) "" else " (" + commas_quote(files) + ")") wenzelm@48671: }).toList.sorted.mkString("keywords\n ", " and\n ", "") wenzelm@48660: wenzelm@48864: def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name) wenzelm@48864: def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1) wenzelm@38471: wenzelm@54462: def thy_load(span: List[Token]): Option[List[String]] = wenzelm@54462: keywords.get(Command.name(span)) match { wenzelm@54513: case Some((Keyword.THY_LOAD, exts)) => Some(exts) wenzelm@54462: case _ => None wenzelm@54462: } wenzelm@54462: wenzelm@54513: val thy_load_commands: List[(String, List[String])] = wenzelm@48885: (for ((name, (Keyword.THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList wenzelm@48872: wenzelm@50128: def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax = wenzelm@53280: { wenzelm@53280: val keywords1 = keywords + (name -> kind) wenzelm@53280: val lexicon1 = lexicon + name wenzelm@53280: val completion1 = wenzelm@50128: if (Keyword.control(kind._1) || replace == Some("")) completion wenzelm@53280: else completion + (name, replace getOrElse name) wenzelm@53280: new Outer_Syntax(keywords1, lexicon1, completion1, true) wenzelm@53280: } wenzelm@34166: wenzelm@53280: def + (name: String, kind: (String, List[String])): Outer_Syntax = wenzelm@53280: this + (name, kind, Some(name)) wenzelm@53280: def + (name: String, kind: String): Outer_Syntax = wenzelm@53280: this + (name, (kind, Nil), Some(name)) wenzelm@50128: def + (name: String, replace: Option[String]): Outer_Syntax = wenzelm@50128: this + (name, (Keyword.MINOR, Nil), replace) wenzelm@50128: def + (name: String): Outer_Syntax = this + (name, None) wenzelm@48706: wenzelm@48873: def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax = wenzelm@48873: (this /: keywords) { wenzelm@52439: case (syntax, (name, Some((kind, _)), replace)) => wenzelm@50128: syntax + wenzelm@50128: (Symbol.decode(name), kind, replace) + wenzelm@50128: (Symbol.encode(name), kind, replace) wenzelm@52439: case (syntax, (name, None, replace)) => wenzelm@50128: syntax + wenzelm@50128: (Symbol.decode(name), replace) + wenzelm@50128: (Symbol.encode(name), replace) wenzelm@46940: } wenzelm@34166: wenzelm@34166: def is_command(name: String): Boolean = wenzelm@40458: keyword_kind(name) match { wenzelm@36947: case Some(kind) => kind != Keyword.MINOR wenzelm@34166: case None => false wenzelm@34166: } wenzelm@34166: wenzelm@40454: def heading_level(name: String): Option[Int] = wenzelm@46969: { wenzelm@46969: keyword_kind(name) match { wenzelm@46969: case _ if name == "header" => Some(0) wenzelm@46969: case Some(Keyword.THY_HEADING1) => Some(1) wenzelm@46969: case Some(Keyword.THY_HEADING2) | Some(Keyword.PRF_HEADING2) => Some(2) wenzelm@46969: case Some(Keyword.THY_HEADING3) | Some(Keyword.PRF_HEADING3) => Some(3) wenzelm@46969: case Some(Keyword.THY_HEADING4) | Some(Keyword.PRF_HEADING4) => Some(4) wenzelm@46969: case Some(kind) if Keyword.theory(kind) => Some(5) wenzelm@46969: case _ => None wenzelm@40454: } wenzelm@46969: } wenzelm@40454: wenzelm@40454: def heading_level(command: Command): Option[Int] = wenzelm@40454: heading_level(command.name) wenzelm@40454: wenzelm@34166: wenzelm@53280: /* token language */ wenzelm@53280: wenzelm@53280: def no_tokens: Outer_Syntax = wenzelm@53280: { wenzelm@53280: require(keywords.isEmpty && lexicon.isEmpty) wenzelm@53280: new Outer_Syntax(completion = completion, has_tokens = false) wenzelm@53280: } wenzelm@34166: wenzelm@36956: def scan(input: Reader[Char]): List[Token] = wenzelm@52066: { wenzelm@55494: Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), input) match { wenzelm@55494: case Token.Parsers.Success(tokens, _) => tokens wenzelm@52066: case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) wenzelm@34166: } wenzelm@52066: } wenzelm@34166: wenzelm@36956: def scan(input: CharSequence): List[Token] = wenzelm@34166: scan(new CharSequenceReader(input)) wenzelm@43411: wenzelm@43411: def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) = wenzelm@52066: { wenzelm@52066: var in: Reader[Char] = new CharSequenceReader(input) wenzelm@52066: val toks = new mutable.ListBuffer[Token] wenzelm@52066: var ctxt = context wenzelm@52066: while (!in.atEnd) { wenzelm@55494: Token.Parsers.parse(Token.Parsers.token_context(lexicon, is_command, ctxt), in) match { wenzelm@55494: case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } wenzelm@55494: case Token.Parsers.NoSuccess(_, rest) => wenzelm@52066: error("Unexpected failure of tokenizing input:\n" + rest.source.toString) wenzelm@43411: } wenzelm@43411: } wenzelm@52066: (toks.toList, ctxt) wenzelm@52066: } wenzelm@34166: }