more explicit Line_Nesting;
authorwenzelm
Thu, 16 Oct 2014 21:24:42 +0200
changeset 58696 6b7445774ce3
parent 58695 91839729224e
child 58697 5bc1d6c4a499
more explicit Line_Nesting;
src/Pure/Isar/outer_syntax.scala
src/Tools/jEdit/src/bibtex_jedit.scala
src/Tools/jEdit/src/fold_handling.scala
src/Tools/jEdit/src/token_markup.scala
--- a/src/Pure/Isar/outer_syntax.scala	Thu Oct 16 12:24:19 2014 +0200
+++ b/src/Pure/Isar/outer_syntax.scala	Thu Oct 16 21:24:42 2014 +0200
@@ -37,6 +37,16 @@
   val empty: Outer_Syntax = new Outer_Syntax()
 
   def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init())
+
+
+  /* line nesting */
+
+  object Line_Nesting
+  {
+    val init = Line_Nesting(0, 0)
+  }
+
+  sealed case class Line_Nesting(depth_before: Int, depth: Int)
 }
 
 final class Outer_Syntax private(
@@ -66,6 +76,10 @@
       case None => false
     }
 
+  def command_kind(token: Token, pred: String => Boolean): Boolean =
+    token.is_command && is_command(token.source) &&
+      pred(keyword_kind(token.source).get)
+
 
   /* load commands */
 
@@ -134,6 +148,26 @@
     heading_level(command.name)
 
 
+  /* line nesting */
+
+  def line_nesting(tokens: List[Token], depth: Int): Outer_Syntax.Line_Nesting =
+  {
+    val depth1 =
+      if (tokens.exists(tok => command_kind(tok, Keyword.theory))) 0
+      else depth
+    val depth2 =
+      (depth /: tokens) { case (d, tok) =>
+        if (command_kind(tok, Keyword.theory_goal)) 1
+        else if (command_kind(tok, Keyword.theory)) 0
+        else if (command_kind(tok, Keyword.proof_goal)) d + 1
+        else if (command_kind(tok, Keyword.qed)) d - 1
+        else if (command_kind(tok, Keyword.qed_global)) 0
+        else d
+      }
+    Outer_Syntax.Line_Nesting(depth1, depth2)
+  }
+
+
   /* token language */
 
   def scan(input: CharSequence): List[Token] =
@@ -146,8 +180,8 @@
     }
   }
 
-  def scan_line(input: CharSequence, context: Scan.Line_Context, depth: Int)
-    : (List[Token], Scan.Line_Context, Int) =
+  def scan_line(input: CharSequence, context: Scan.Line_Context, nesting: Outer_Syntax.Line_Nesting)
+    : (List[Token], Scan.Line_Context, Outer_Syntax.Line_Nesting) =
   {
     var in: Reader[Char] = new CharSequenceReader(input)
     val toks = new mutable.ListBuffer[Token]
@@ -159,9 +193,8 @@
           error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
       }
     }
-
-    val depth1 = depth // FIXME
-    (toks.toList, ctxt, depth1)
+    val tokens = toks.toList
+    (tokens, ctxt, line_nesting(tokens, nesting.depth))
   }
 
 
--- a/src/Tools/jEdit/src/bibtex_jedit.scala	Thu Oct 16 12:24:19 2014 +0200
+++ b/src/Tools/jEdit/src/bibtex_jedit.scala	Thu Oct 16 21:24:42 2014 +0200
@@ -154,7 +154,8 @@
   private val context_rules = new ParserRuleSet("bibtex", "MAIN")
 
   private class Line_Context(context: Option[Bibtex.Line_Context])
-    extends Token_Markup.Generic_Line_Context[Bibtex.Line_Context](context_rules, context, 0)
+    extends Token_Markup.Generic_Line_Context[Bibtex.Line_Context](
+      context_rules, context, Outer_Syntax.Line_Nesting.init)
 
 
   /* token marker */
--- a/src/Tools/jEdit/src/fold_handling.scala	Thu Oct 16 12:24:19 2014 +0200
+++ b/src/Tools/jEdit/src/fold_handling.scala	Thu Oct 16 21:24:42 2014 +0200
@@ -27,7 +27,7 @@
       }
 
     override def getFoldLevel(buffer: JEditBuffer, line: Int, seg: Segment): Int =
-      Token_Markup.buffer_line_depth(buffer, line)
+      Token_Markup.buffer_line_nesting(buffer, line).depth_before
   }
 
 
--- a/src/Tools/jEdit/src/token_markup.scala	Thu Oct 16 12:24:19 2014 +0200
+++ b/src/Tools/jEdit/src/token_markup.scala	Thu Oct 16 21:24:42 2014 +0200
@@ -178,14 +178,14 @@
   class Generic_Line_Context[C](
       rules: ParserRuleSet,
       val context: Option[C],
-      val depth: Int)
+      val nesting: Outer_Syntax.Line_Nesting)
     extends TokenMarker.LineContext(rules, null)
   {
-    override def hashCode: Int = (context, depth).hashCode
+    override def hashCode: Int = (context, nesting).hashCode
     override def equals(that: Any): Boolean =
       that match {
         case other: Generic_Line_Context[_] =>
-          context == other.context && depth == other.depth
+          context == other.context && nesting == other.nesting
         case _ => false
       }
   }
@@ -200,26 +200,29 @@
       case _ => None
     }
 
-  def buffer_line_depth(buffer: JEditBuffer, line: Int): Int =
-    buffer_line_context(buffer, line) match { case Some(c) => c.depth case None => 0 }
+  def buffer_line_nesting(buffer: JEditBuffer, line: Int): Outer_Syntax.Line_Nesting =
+    buffer_line_context(buffer, line) match {
+      case Some(c) => c.nesting
+      case None => Outer_Syntax.Line_Nesting.init
+    }
 
 
   /* token marker */
 
   private val context_rules = new ParserRuleSet("isabelle", "MAIN")
 
-  private class Line_Context(context: Option[Scan.Line_Context], depth: Int)
-    extends Generic_Line_Context[Scan.Line_Context](context_rules, context, depth)
+  private class Line_Context(context: Option[Scan.Line_Context], nesting: Outer_Syntax.Line_Nesting)
+    extends Generic_Line_Context[Scan.Line_Context](context_rules, context, nesting)
 
   class Marker(mode: String) extends TokenMarker
   {
     override def markTokens(context: TokenMarker.LineContext,
         handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext =
     {
-      val (opt_ctxt, depth) =
+      val (opt_ctxt, nesting) =
         context match {
-          case c: Line_Context => (c.context, c.depth)
-          case _ => (Some(Scan.Finished), 0)
+          case c: Line_Context => (c.context, c.nesting)
+          case _ => (Some(Scan.Finished), Outer_Syntax.Line_Nesting.init)
         }
       val line = if (raw_line == null) new Segment else raw_line
 
@@ -230,17 +233,17 @@
             case (Some(ctxt), _) if mode == "isabelle-ml" || mode == "sml" =>
               val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, ctxt)
               val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source))
-              (styled_tokens, new Line_Context(Some(ctxt1), depth))
+              (styled_tokens, new Line_Context(Some(ctxt1), nesting))
 
             case (Some(ctxt), Some(syntax)) if syntax.has_tokens =>
-              val (tokens, ctxt1, depth1) = syntax.scan_line(line, ctxt, depth)
+              val (tokens, ctxt1, nesting1) = syntax.scan_line(line, ctxt, nesting)
               val styled_tokens =
                 tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))
-              (styled_tokens, new Line_Context(Some(ctxt1), depth1))
+              (styled_tokens, new Line_Context(Some(ctxt1), nesting1))
 
             case _ =>
               val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString)
-              (List(styled_token), new Line_Context(None, 0))
+              (List(styled_token), new Line_Context(None, nesting))
           }
 
         val extended = extended_styles(line)