tuned signature -- emphasize line-oriented aspect;
authorwenzelm
Sun, 16 Feb 2014 13:18:08 +0100
changeset 55510 1585a65aad64
parent 55508 90c42b130652
child 55511 984e210d412e
tuned signature -- emphasize line-oriented aspect;
src/Pure/General/scan.scala
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/token.scala
src/Pure/ML/ml_lex.scala
src/Tools/jEdit/src/token_markup.scala
--- a/src/Pure/General/scan.scala	Sat Feb 15 21:11:29 2014 +0100
+++ b/src/Pure/General/scan.scala	Sun Feb 16 13:18:08 2014 +0100
@@ -17,14 +17,14 @@
 
 object Scan
 {
-  /** context of partial scans (line boundary) **/
+  /** context of partial line-oriented scans **/
 
-  abstract class Context
-  case object Finished extends Context
-  case class Quoted(quote: String) extends Context
-  case object Verbatim extends Context
-  case class Cartouche(depth: Int) extends Context
-  case class Comment(depth: Int) extends Context
+  abstract class Line_Context
+  case object Finished extends Line_Context
+  case class Quoted(quote: String) extends Line_Context
+  case object Verbatim extends Line_Context
+  case class Cartouche(depth: Int) extends Line_Context
+  case class Comment(depth: Int) extends Line_Context
 
 
 
@@ -110,7 +110,7 @@
       else body
     }
 
-    def quoted_context(quote: Symbol.Symbol, ctxt: Context): Parser[(String, Context)] =
+    def quoted_line(quote: Symbol.Symbol, ctxt: Line_Context): Parser[(String, Line_Context)] =
     {
       ctxt match {
         case Finished =>
@@ -123,7 +123,7 @@
               case x ~ None => (x, ctxt) }
         case _ => failure("")
       }
-    }.named("quoted_context")
+    }.named("quoted_line")
 
     def recover_quoted(quote: Symbol.Symbol): Parser[String] =
       quote ~ quoted_body(quote) ^^ { case x ~ y => x + y }
@@ -145,7 +145,7 @@
       source.substring(2, source.length - 2)
     }
 
-    def verbatim_context(ctxt: Context): Parser[(String, Context)] =
+    def verbatim_line(ctxt: Line_Context): Parser[(String, Line_Context)] =
     {
       ctxt match {
         case Finished =>
@@ -158,7 +158,7 @@
               case x ~ None => (x, Verbatim) }
         case _ => failure("")
       }
-    }.named("verbatim_context")
+    }.named("verbatim_line")
 
     val recover_verbatim: Parser[String] =
       "{*" ~ verbatim_body ^^ { case x ~ y => x + y }
@@ -194,7 +194,7 @@
     def cartouche: Parser[String] =
       cartouche_depth(0) ^? { case (x, d) if d == 0 => x }
 
-    def cartouche_context(ctxt: Context): Parser[(String, Context)] =
+    def cartouche_line(ctxt: Line_Context): Parser[(String, Line_Context)] =
     {
       val depth =
         ctxt match {
@@ -258,7 +258,7 @@
     def comment: Parser[String] =
       comment_depth(0) ^? { case (x, d) if d == 0 => x }
 
-    def comment_context(ctxt: Context): Parser[(String, Context)] =
+    def comment_line(ctxt: Line_Context): Parser[(String, Line_Context)] =
     {
       val depth =
         ctxt match {
--- a/src/Pure/Isar/outer_syntax.scala	Sat Feb 15 21:11:29 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.scala	Sun Feb 16 13:18:08 2014 +0100
@@ -137,13 +137,13 @@
   def scan(input: CharSequence): List[Token] =
     scan(new CharSequenceReader(input))
 
-  def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
+  def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) =
   {
     var in: Reader[Char] = new CharSequenceReader(input)
     val toks = new mutable.ListBuffer[Token]
     var ctxt = context
     while (!in.atEnd) {
-      Token.Parsers.parse(Token.Parsers.token_context(lexicon, is_command, ctxt), in) match {
+      Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match {
         case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
         case Token.Parsers.NoSuccess(_, rest) =>
           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
--- a/src/Pure/Isar/token.scala	Sat Feb 15 21:11:29 2014 +0100
+++ b/src/Pure/Isar/token.scala	Sun Feb 16 13:18:08 2014 +0100
@@ -102,16 +102,16 @@
     def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] =
       delimited_token | other_token(lexicon, is_command)
 
-    def token_context(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Context)
-      : Parser[(Token, Scan.Context)] =
+    def token_line(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Line_Context)
+      : Parser[(Token, Scan.Line_Context)] =
     {
       val string =
-        quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
+        quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
       val alt_string =
-        quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
-      val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
-      val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
-      val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
+        quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
+      val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
+      val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
+      val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
       val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) }
 
       string | (alt_string | (verb | (cart | (cmt | other))))
--- a/src/Pure/ML/ml_lex.scala	Sat Feb 15 21:11:29 2014 +0100
+++ b/src/Pure/ML/ml_lex.scala	Sun Feb 16 13:18:08 2014 +0100
@@ -64,7 +64,7 @@
 
   /** parsers **/
 
-  case object ML_String extends Scan.Context
+  case object ML_String extends Scan.Line_Context
 
   private object Parsers extends Scan.Parsers
   {
@@ -107,9 +107,9 @@
     private val ml_string: Parser[Token] =
       "\"" ~ ml_string_body ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.STRING, x + y + z) }
 
-    private def ml_string_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+    private def ml_string_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] =
     {
-      def result(x: String, c: Scan.Context) = (Token(Kind.STRING, x), c)
+      def result(x: String, c: Scan.Line_Context) = (Token(Kind.STRING, x), c)
 
       ctxt match {
         case Scan.Finished =>
@@ -130,8 +130,8 @@
     private val ml_comment: Parser[Token] =
       comment ^^ (x => Token(Kind.COMMENT, x))
 
-    private def ml_comment_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
-      comment_context(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) }
+    private def ml_comment_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] =
+      comment_line(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) }
 
 
     /* delimited token */
@@ -203,11 +203,11 @@
 
     def token: Parser[Token] = delimited_token | other_token
 
-    def token_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+    def token_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] =
     {
       val other = (ml_char | other_token) ^^ (x => (x, Scan.Finished))
 
-      ml_string_context(ctxt) | (ml_comment_context(ctxt) | other)
+      ml_string_line(ctxt) | (ml_comment_line(ctxt) | other)
     }
   }
 
@@ -222,13 +222,14 @@
     }
   }
 
-  def tokenize_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
+  def tokenize_line(input: CharSequence, context: Scan.Line_Context)
+    : (List[Token], Scan.Line_Context) =
   {
     var in: Reader[Char] = new CharSequenceReader(input)
     val toks = new mutable.ListBuffer[Token]
     var ctxt = context
     while (!in.atEnd) {
-      Parsers.parse(Parsers.token_context(ctxt), in) match {
+      Parsers.parse(Parsers.token_line(ctxt), in) match {
         case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
         case Parsers.NoSuccess(_, rest) =>
           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
--- a/src/Tools/jEdit/src/token_markup.scala	Sat Feb 15 21:11:29 2014 +0100
+++ b/src/Tools/jEdit/src/token_markup.scala	Sun Feb 16 13:18:08 2014 +0100
@@ -177,7 +177,7 @@
 
   private val isabelle_rules = new ParserRuleSet("isabelle", "MAIN")
 
-  private class Line_Context(val context: Option[Scan.Context])
+  private class Line_Context(val context: Option[Scan.Line_Context])
     extends TokenMarker.LineContext(isabelle_rules, null)
   {
     override def hashCode: Int = context.hashCode
@@ -204,14 +204,14 @@
       {
         val (styled_tokens, context1) =
           if (mode == "isabelle-ml") {
-            val (tokens, ctxt1) = ML_Lex.tokenize_context(line, line_ctxt.get)
+            val (tokens, ctxt1) = ML_Lex.tokenize_line(line, line_ctxt.get)
             val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source))
             (styled_tokens, new Line_Context(Some(ctxt1)))
           }
           else {
             Isabelle.mode_syntax(mode) match {
               case Some(syntax) if syntax.has_tokens && line_ctxt.isDefined =>
-                val (tokens, ctxt1) = syntax.scan_context(line, line_ctxt.get)
+                val (tokens, ctxt1) = syntax.scan_line(line, line_ctxt.get)
                 val styled_tokens =
                   tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))
                 (styled_tokens, new Line_Context(Some(ctxt1)))