merged;
authorwenzelm
Sat, 04 Oct 2014 22:15:31 +0200
changeset 58541 48e23e342415
parent 58540 872f330a0f8a (diff)
parent 58522 ad010811f450 (current diff)
child 58542 19e062fbfea0
merged;
NEWS
--- a/NEWS	Fri Oct 03 11:55:07 2014 +0200
+++ b/NEWS	Sat Oct 04 22:15:31 2014 +0200
@@ -15,6 +15,12 @@
 semantics due to external visual order vs. internal reverse order.
 
 
+*** Prover IDE -- Isabelle/Scala/jEdit ***
+
+* Support for BibTeX files: context menu, context-sensitive token
+marker, SideKick parser.
+
+
 *** Pure ***
 
 * Command "class_deps" takes optional sort arguments constraining
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Tools/bibtex.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -0,0 +1,385 @@
+/*  Title:      Pure/Tools/bibtex.scala
+    Author:     Makarius
+
+Some support for bibtex files.
+*/
+
+package isabelle
+
+
+import scala.collection.mutable
+import scala.util.parsing.input.{Reader, CharSequenceReader}
+import scala.util.parsing.combinator.RegexParsers
+
+
+object Bibtex
+{
+  /** content **/
+
+  private val months = List(
+    "jan",
+    "feb",
+    "mar",
+    "apr",
+    "may",
+    "jun",
+    "jul",
+    "aug",
+    "sep",
+    "oct",
+    "nov",
+    "dec")
+  def is_month(s: String): Boolean = months.contains(s.toLowerCase)
+
+  private val commands = List("preamble", "string")
+  def is_command(s: String): Boolean = commands.contains(s.toLowerCase)
+
+  sealed case class Entry(
+    kind: String,
+    required: List[String],
+    optional_crossref: List[String],
+    optional_other: List[String])
+  {
+    def is_required(s: String): Boolean = required.contains(s.toLowerCase)
+    def is_optional(s: String): Boolean =
+      optional_crossref.contains(s.toLowerCase) || optional_other.contains(s.toLowerCase)
+
+    def fields: List[String] = required ::: optional_crossref ::: optional_other
+    def template: String =
+      "@" + kind + "{,\n" + fields.map(x => "  " + x + " = {},\n").mkString + "}\n"
+  }
+
+  val entries: List[Entry] =
+    List(
+      Entry("Article",
+        List("author", "title"),
+        List("journal", "year"),
+        List("volume", "number", "pages", "month", "note")),
+      Entry("InProceedings",
+        List("author", "title"),
+        List("booktitle", "year"),
+        List("editor", "volume", "number", "series", "pages", "month", "address",
+          "organization", "publisher", "note")),
+      Entry("InCollection",
+        List("author", "title", "booktitle"),
+        List("publisher", "year"),
+        List("editor", "volume", "number", "series", "type", "chapter", "pages",
+          "edition", "month", "address", "note")),
+      Entry("InBook",
+        List("author", "editor", "title", "chapter"),
+        List("publisher", "year"),
+        List("volume", "number", "series", "type", "address", "edition", "month", "pages", "note")),
+      Entry("Proceedings",
+        List("title", "year"),
+        List(),
+        List("booktitle", "editor", "volume", "number", "series", "address", "month",
+          "organization", "publisher", "note")),
+      Entry("Book",
+        List("author", "editor", "title"),
+        List("publisher", "year"),
+        List("volume", "number", "series", "address", "edition", "month", "note")),
+      Entry("Booklet",
+        List("title"),
+        List(),
+        List("author", "howpublished", "address", "month", "year", "note")),
+      Entry("PhdThesis",
+        List("author", "title", "school", "year"),
+        List(),
+        List("type", "address", "month", "note")),
+      Entry("MastersThesis",
+        List("author", "title", "school", "year"),
+        List(),
+        List("type", "address", "month", "note")),
+      Entry("TechReport",
+        List("author", "title", "institution", "year"),
+        List(),
+        List("type", "number", "address", "month", "note")),
+      Entry("Manual",
+        List("title"),
+        List(),
+        List("author", "organization", "address", "edition", "month", "year", "note")),
+      Entry("Unpublished",
+        List("author", "title", "note"),
+        List(),
+        List("month", "year")),
+      Entry("Misc",
+        List(),
+        List(),
+        List("author", "title", "howpublished", "month", "year", "note")))
+
+  def get_entry(kind: String): Option[Entry] =
+    entries.find(entry => entry.kind.toLowerCase == kind.toLowerCase)
+
+  def is_entry(kind: String): Boolean = get_entry(kind).isDefined
+
+
+
+  /** tokens and chunks **/
+
+  object Token
+  {
+    object Kind extends Enumeration
+    {
+      val COMMAND = Value("command")
+      val ENTRY = Value("entry")
+      val KEYWORD = Value("keyword")
+      val NAT = Value("natural number")
+      val STRING = Value("string")
+      val NAME = Value("name")
+      val IDENT = Value("identifier")
+      val SPACE = Value("white space")
+      val COMMENT = Value("ignored text")
+      val ERROR = Value("bad input")
+    }
+  }
+
+  sealed case class Token(kind: Token.Kind.Value, val source: String)
+  {
+    def is_kind: Boolean =
+      kind == Token.Kind.COMMAND ||
+      kind == Token.Kind.ENTRY ||
+      kind == Token.Kind.IDENT
+    def is_name: Boolean =
+      kind == Token.Kind.NAME ||
+      kind == Token.Kind.IDENT
+    def is_ignored: Boolean =
+      kind == Token.Kind.SPACE ||
+      kind == Token.Kind.COMMENT
+    def is_malformed: Boolean = kind ==
+      Token.Kind.ERROR
+  }
+
+  case class Chunk(kind: String, tokens: List[Token])
+  {
+    val source = tokens.map(_.source).mkString
+
+    private val content: Option[List[Token]] =
+      tokens match {
+        case Token(Token.Kind.KEYWORD, "@") :: body if !body.isEmpty =>
+          (body.init.filterNot(_.is_ignored), body.last) match {
+            case (tok :: Token(Token.Kind.KEYWORD, "{") :: toks, Token(Token.Kind.KEYWORD, "}"))
+            if tok.is_kind => Some(toks)
+
+            case (tok :: Token(Token.Kind.KEYWORD, "(") :: toks, Token(Token.Kind.KEYWORD, ")"))
+            if tok.is_kind => Some(toks)
+
+            case _ => None
+          }
+        case _ => None
+      }
+
+    def name: String =
+      content match {
+        case Some(tok :: _) if tok.is_name => tok.source
+        case _ => ""
+      }
+
+    def is_ignored: Boolean = kind == "" && tokens.forall(_.is_ignored)
+    def is_malformed: Boolean = kind == "" || tokens.exists(_.is_malformed)
+    def is_command: Boolean =
+      Bibtex.is_command(kind) && name != "" && content.isDefined && !is_malformed
+    def is_entry: Boolean =
+      Bibtex.is_entry(kind) && name != "" && content.isDefined && !is_malformed
+  }
+
+
+
+  /** parsing **/
+
+  // context of partial line-oriented scans
+  abstract class Line_Context
+  case object Ignored_Context extends Line_Context
+  case class Item_Context(kind: String, delim: Delimited, right: String) extends Line_Context
+  case class Delimited(quoted: Boolean, depth: Int)
+  val Closed = Delimited(false, 0)
+
+  private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source)
+  private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source)
+
+
+  // See also http://ctan.org/tex-archive/biblio/bibtex/base/bibtex.web
+  // module @<Scan for and process a \.{.bib} command or database entry@>.
+
+  object Parsers extends RegexParsers
+  {
+    /* white space and comments */
+
+    override val whiteSpace = "".r
+
+    private val space = """[ \t\n\r]+""".r ^^ token(Token.Kind.SPACE)
+    private val strict_space = """[ \t]+""".r ^^ token(Token.Kind.SPACE)
+
+
+    /* ignored text */
+
+    private val ignored: Parser[Chunk] =
+      rep1("""(?mi)([^@]+|@[ \t]*comment)""".r) ^^ {
+        case ss => Chunk("", List(Token(Token.Kind.COMMENT, ss.mkString))) }
+
+    private def ignored_line: Parser[(Chunk, Line_Context)] =
+      ignored ^^ { case a => (a, Ignored_Context) }
+
+
+    /* delimited string: outermost "..." or {...} and body with balanced {...} */
+
+    // see also bibtex.web: scan_a_field_token_and_eat_white, scan_balanced_braces
+    private def delimited_depth(delim: Delimited): Parser[(String, Delimited)] =
+      new Parser[(String, Delimited)]
+      {
+        require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0)
+
+        def apply(in: Input) =
+        {
+          val start = in.offset
+          val end = in.source.length
+
+          var i = start
+          var q = delim.quoted
+          var d = delim.depth
+          var finished = false
+          while (!finished && i < end) {
+            val c = in.source.charAt(i)
+
+            if (c == '"' && d == 0) { i += 1; d = 1; q = true }
+            else if (c == '"' && d == 1 && q) {
+              i += 1; d = 0; q = false; finished = true
+            }
+            else if (c == '{') { i += 1; d += 1 }
+            else if (c == '}') {
+              if (d == 1 && !q || d > 1) { i += 1; d -= 1; if (d == 0) finished = true }
+              else {i = start; finished = true }
+            }
+            else if (d > 0) i += 1
+            else finished = true
+          }
+          if (i == start) Failure("bad input", in)
+          else {
+            val s = in.source.subSequence(start, i).toString
+            Success((s, Delimited(q, d)), in.drop(i - start))
+          }
+        }
+      }.named("delimited_depth")
+
+    private def delimited: Parser[Token] =
+      delimited_depth(Closed) ^?
+        { case (s, delim) if delim == Closed => Token(Token.Kind.STRING, s) }
+
+    private def recover_delimited: Parser[Token] =
+      """(?m)["{][^@]*""".r ^^ token(Token.Kind.ERROR)
+
+    def delimited_line(item_ctxt: Item_Context): Parser[(Chunk, Line_Context)] =
+      item_ctxt match {
+        case Item_Context(kind, delim, _) =>
+          delimited_depth(delim) ^^ { case (s, delim1) =>
+            (Chunk(kind, List(Token(Token.Kind.STRING, s))), item_ctxt.copy(delim = delim1)) } |
+          recover_delimited ^^ { case a => (Chunk(kind, List(a)), Ignored_Context) }
+        }
+
+
+    /* other tokens */
+
+    private val at = "@" ^^ keyword
+    private val left_brace = "{" ^^ keyword
+    private val right_brace = "}" ^^ keyword
+    private val left_paren = "(" ^^ keyword
+    private val right_paren = ")" ^^ keyword
+
+    private val nat = "[0-9]+".r ^^ token(Token.Kind.NAT)
+
+    private val identifier =
+      """[\x21-\x7f&&[^"#%'(),={}0-9]][\x21-\x7f&&[^"#%'(),={}]]*""".r
+
+    private val ident = identifier ^^ token(Token.Kind.IDENT)
+
+    val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space))
+
+
+    /* items: command or entry */
+
+    private val item_kind =
+      identifier ^^ { case a =>
+        val kind =
+          if (is_command(a)) Token.Kind.COMMAND
+          else if (is_entry(a)) Token.Kind.ENTRY
+          else Token.Kind.IDENT
+        Token(kind, a)
+      }
+
+    private val item_start =
+      at ~ rep(strict_space) ~ item_kind ~ rep(strict_space) ^^
+        { case a ~ b ~ c ~ d => (c.source, List(a) ::: b ::: List(c) ::: d) }
+
+    private val item_name =
+      rep(strict_space) ~ identifier ^^
+        { case a ~ b => a ::: List(Token(Token.Kind.NAME, b)) }
+
+    private val item_body =
+      delimited | (recover_delimited | other_token)
+
+    private val item: Parser[Chunk] =
+      (item_start ~ left_brace ~ item_name ~ rep(item_body) ~ opt(right_brace) |
+       item_start ~ left_paren ~ item_name ~ rep(item_body) ~ opt(right_paren)) ^^
+        { case (kind, a) ~ b ~ c ~ d ~ e => Chunk(kind, a ::: List(b) ::: c ::: d ::: e.toList) }
+
+    private val recover_item: Parser[Chunk] =
+      at ~ "(?m)[^@]*".r ^^ { case a ~ b => Chunk("", List(a, Token(Token.Kind.ERROR, b))) }
+
+
+    /* chunks */
+
+    val chunk: Parser[Chunk] = ignored | (item | recover_item)
+
+    def chunk_line(ctxt: Line_Context): Parser[(Chunk, Line_Context)] =
+    {
+      ctxt match {
+        case Ignored_Context =>
+          ignored_line |
+          item_start ~ (left_brace | left_paren) ~ opt(item_name) ^^
+            { case (kind, a) ~ b ~ c =>
+                val right = if (b.source == "{") "}" else ")"
+                val chunk = Chunk(kind, a ::: List(b) ::: (c getOrElse Nil))
+                (chunk, Item_Context(kind, Closed, right)) } |
+          recover_item ^^ { case a => (a, Ignored_Context) }
+        case item_ctxt @ Item_Context(kind, delim, right) =>
+          if (delim.depth > 0)
+            delimited_line(item_ctxt) |
+            ignored_line
+          else {
+            delimited_line(item_ctxt) |
+            other_token ^^ { case a => (Chunk(kind, List(a)), ctxt) } |
+            right ^^ { case a => (Chunk(kind, List(keyword(a))), Ignored_Context) } |
+            ignored_line
+          }
+        case _ => failure("")
+      }
+    }
+  }
+
+
+  /* parse */
+
+  def parse(input: CharSequence): List[Chunk] =
+  {
+    val in: Reader[Char] = new CharSequenceReader(input)
+    Parsers.parseAll(Parsers.rep(Parsers.chunk), in) match {
+      case Parsers.Success(result, _) => result
+      case _ => error("Unexpected failure to parse input:\n" + input.toString)
+    }
+  }
+
+  def parse_line(input: CharSequence, context: Line_Context): (List[Chunk], Line_Context) =
+  {
+    var in: Reader[Char] = new CharSequenceReader(input)
+    val chunks = new mutable.ListBuffer[Chunk]
+    var ctxt = context
+    while (!in.atEnd) {
+      Parsers.parse(Parsers.chunk_line(ctxt), in) match {
+        case Parsers.Success((x, c), rest) => { chunks += x; ctxt = c; in = rest }
+        case Parsers.NoSuccess(_, rest) =>
+          error("Unepected failure to parse input:\n" + rest.source.toString)
+      }
+    }
+    (chunks.toList, ctxt)
+  }
+}
+
--- a/src/Pure/build-jars	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Pure/build-jars	Sat Oct 04 22:15:31 2014 +0200
@@ -85,6 +85,7 @@
   Thy/thy_info.scala
   Thy/thy_structure.scala
   Thy/thy_syntax.scala
+  Tools/bibtex.scala
   Tools/build.scala
   Tools/build_console.scala
   Tools/build_doc.scala
--- a/src/Tools/jEdit/lib/Tools/jedit	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/lib/Tools/jedit	Sat Oct 04 22:15:31 2014 +0200
@@ -9,6 +9,7 @@
 
 declare -a SOURCES=(
   "src/active.scala"
+  "src/bibtex_token_markup.scala"
   "src/completion_popup.scala"
   "src/context_menu.scala"
   "src/dockable.scala"
@@ -41,9 +42,9 @@
   "src/rendering.scala"
   "src/rich_text_area.scala"
   "src/scala_console.scala"
-  "src/sledgehammer_dockable.scala"
   "src/simplifier_trace_dockable.scala"
   "src/simplifier_trace_window.scala"
+  "src/sledgehammer_dockable.scala"
   "src/spell_checker.scala"
   "src/symbols_dockable.scala"
   "src/syslog_dockable.scala"
--- a/src/Tools/jEdit/src/Isabelle.props	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/Isabelle.props	Sat Oct 04 22:15:31 2014 +0200
@@ -84,4 +84,6 @@
 mode.isabelle.sidekick.showStatusWindow.label=true
 mode.ml.sidekick.parser=isabelle
 sidekick.parser.isabelle.label=Isabelle
+mode.bibtex.sidekick.parser=bibtex
+mode.bibtex.folding=sidekick
 
--- a/src/Tools/jEdit/src/active.scala	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/active.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -60,16 +60,18 @@
                 }
 
               case XML.Elem(Markup(Markup.SENDBACK, props), _) =>
-                props match {
-                  case Position.Id(id) =>
-                    Isabelle.edit_command(snapshot, buffer,
-                      props.exists(_ == Markup.PADDING_COMMAND), id, text)
-                  case _ =>
-                    if (props.exists(_ == Markup.PADDING_LINE))
-                      Isabelle.insert_line_padding(text_area, text)
-                    else text_area.setSelectedText(text)
+                if (buffer.isEditable) {
+                  props match {
+                    case Position.Id(id) =>
+                      Isabelle.edit_command(snapshot, buffer,
+                        props.exists(_ == Markup.PADDING_COMMAND), id, text)
+                    case _ =>
+                      if (props.exists(_ == Markup.PADDING_LINE))
+                        Isabelle.insert_line_padding(text_area, text)
+                      else text_area.setSelectedText(text)
+                  }
+                  text_area.requestFocus
                 }
-                text_area.requestFocus
 
               case Protocol.Dialog(id, serial, result) =>
                 model.session.dialog_result(id, serial, result)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/src/bibtex_token_markup.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -0,0 +1,102 @@
+/*  Title:      Tools/jEdit/src/bibtex_token_markup.scala
+    Author:     Makarius
+
+Bibtex token markup.
+*/
+
+package isabelle.jedit
+
+
+import isabelle._
+
+import org.gjt.sp.jedit.syntax.{Token => JEditToken, TokenMarker, TokenHandler, ParserRuleSet}
+
+import javax.swing.text.Segment
+
+
+object Bibtex_Token_Markup
+{
+  /* token style */
+
+  private def token_style(context: String, token: Bibtex.Token): Byte =
+    token.kind match {
+      case Bibtex.Token.Kind.COMMAND => JEditToken.KEYWORD2
+      case Bibtex.Token.Kind.ENTRY => JEditToken.KEYWORD1
+      case Bibtex.Token.Kind.KEYWORD => JEditToken.OPERATOR
+      case Bibtex.Token.Kind.NAT => JEditToken.LITERAL2
+      case Bibtex.Token.Kind.STRING => JEditToken.LITERAL1
+      case Bibtex.Token.Kind.NAME => JEditToken.LABEL
+      case Bibtex.Token.Kind.IDENT =>
+        if (Bibtex.is_month(token.source)) JEditToken.LITERAL3
+        else
+          Bibtex.get_entry(context) match {
+            case Some(entry) if entry.is_required(token.source) => JEditToken.KEYWORD3
+            case Some(entry) if entry.is_optional(token.source) => JEditToken.KEYWORD4
+            case _ => JEditToken.DIGIT
+          }
+      case Bibtex.Token.Kind.SPACE => JEditToken.NULL
+      case Bibtex.Token.Kind.COMMENT => JEditToken.COMMENT1
+      case Bibtex.Token.Kind.ERROR => JEditToken.INVALID
+    }
+
+
+  /* line context */
+
+  private val context_rules = new ParserRuleSet("bibtex", "MAIN")
+
+  private class Line_Context(context: Option[Bibtex.Line_Context])
+    extends Token_Markup.Generic_Line_Context[Bibtex.Line_Context](context_rules, context)
+
+
+  /* token marker */
+
+  class Marker extends TokenMarker
+  {
+    override def markTokens(context: TokenMarker.LineContext,
+        handler: TokenHandler, raw_line: Segment): TokenMarker.LineContext =
+    {
+      val line_ctxt =
+        context match {
+          case c: Line_Context => c.context
+          case _ => Some(Bibtex.Ignored_Context)
+        }
+      val line = if (raw_line == null) new Segment else raw_line
+
+      val context1 =
+      {
+        val (styled_tokens, context1) =
+          line_ctxt match {
+            case Some(ctxt) =>
+              val (chunks, ctxt1) = Bibtex.parse_line(line, ctxt)
+              val styled_tokens =
+                for { chunk <- chunks; tok <- chunk.tokens }
+                yield (token_style(chunk.kind, tok), tok.source)
+              (styled_tokens, new Line_Context(Some(ctxt1)))
+            case None =>
+              val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString)
+              (List(styled_token), new Line_Context(None))
+          }
+
+        var offset = 0
+        for ((style, token) <- styled_tokens) {
+          val length = token.length
+          val end_offset = offset + length
+
+          if ((offset until end_offset).exists(i => line.charAt(i) == '\t')) {
+            for (i <- offset until end_offset)
+              handler.handleToken(line, style, i, 1, context1)
+          }
+          else handler.handleToken(line, style, offset, length, context1)
+
+          offset += length
+        }
+        handler.handleToken(line, JEditToken.END, line.count, 0, context1)
+        context1
+      }
+      val context2 = context1.intern
+      handler.setLineContext(context2)
+      context2
+    }
+  }
+}
+
--- a/src/Tools/jEdit/src/context_menu.scala	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/context_menu.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -10,13 +10,13 @@
 import isabelle._
 
 
-import java.awt.event.MouseEvent
-import javax.swing.JMenuItem
+import java.awt.event.{ActionListener, ActionEvent, MouseEvent}
+import javax.swing.{JMenu, JMenuItem}
 
 import org.gjt.sp.jedit.menu.EnhancedMenuItem
 import org.gjt.sp.jedit.gui.DynamicContextMenuService
-import org.gjt.sp.jedit.jEdit
-import org.gjt.sp.jedit.textarea.JEditTextArea
+import org.gjt.sp.jedit.{jEdit, Buffer}
+import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea}
 
 
 class Context_Menu extends DynamicContextMenuService
@@ -67,20 +67,50 @@
   }
 
 
+  /* bibtex menu */
+
+  def bibtex_menu(text_area0: JEditTextArea): List[JMenuItem] =
+  {
+    text_area0 match {
+      case text_area: TextArea =>
+        text_area.getBuffer match {
+          case buffer: Buffer
+          if (JEdit_Lib.buffer_name(buffer).endsWith(".bib") && buffer.isEditable) =>
+            val menu = new JMenu("BibTeX entries")
+            for (entry <- Bibtex.entries) {
+              val item = new JMenuItem(entry.kind)
+              item.addActionListener(new ActionListener {
+                def actionPerformed(evt: ActionEvent): Unit =
+                  Isabelle.insert_line_padding(text_area, entry.template)
+              })
+              menu.add(item)
+            }
+            List(menu)
+          case _ => Nil
+        }
+      case _ => Nil
+    }
+  }
+
+
   /* menu service */
 
   def createMenu(text_area: JEditTextArea, evt: MouseEvent): Array[JMenuItem] =
   {
     PIDE.dismissed_popups(text_area.getView)
-    if (evt != null && evt.getSource == text_area.getPainter) {
-      val offset = text_area.xyToOffset(evt.getX, evt.getY)
-      if (offset >= 0) {
-        val items = spell_checker_menu(text_area, offset)
-        if (items.isEmpty) null else items.toArray
+
+    val items1 =
+      if (evt != null && evt.getSource == text_area.getPainter) {
+        val offset = text_area.xyToOffset(evt.getX, evt.getY)
+        if (offset >= 0) spell_checker_menu(text_area, offset)
+        else Nil
       }
-      else null
-    }
-    else null
+      else Nil
+
+    val items2 = bibtex_menu(text_area)
+
+    val items = items1 ::: items2
+    if (items.isEmpty) null else items.toArray
   }
 }
 
--- a/src/Tools/jEdit/src/isabelle.scala	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/isabelle.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -16,6 +16,7 @@
 
 import org.gjt.sp.jedit.{jEdit, View, Buffer}
 import org.gjt.sp.jedit.textarea.JEditTextArea
+import org.gjt.sp.jedit.syntax.TokenMarker
 import org.gjt.sp.jedit.gui.{DockableWindowManager, CompleteWord}
 import org.gjt.sp.jedit.options.PluginOptions
 
@@ -65,10 +66,11 @@
 
   /* token markers */
 
-  private val markers =
-    Map(modes.map(name => (name, new Token_Markup.Marker(name))): _*)
+  private val markers: Map[String, TokenMarker] =
+    Map(modes.map(name => (name, new Token_Markup.Marker(name))): _*) +
+      ("bibtex" -> new Bibtex_Token_Markup.Marker)
 
-  def token_marker(name: String): Option[Token_Markup.Marker] = markers.get(name)
+  def token_marker(name: String): Option[TokenMarker] = markers.get(name)
 
 
   /* dockable windows */
--- a/src/Tools/jEdit/src/isabelle_sidekick.scala	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/isabelle_sidekick.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -23,6 +23,13 @@
   def int_to_pos(offset: Text.Offset): Position =
     new Position { def getOffset = offset; override def toString: String = offset.toString }
 
+  def root_data(buffer: Buffer): SideKickParsedData =
+  {
+    val data = new SideKickParsedData(buffer.getName)
+    data.getAsset(data.root).setEnd(int_to_pos(buffer.getLength))
+    data
+  }
+
   class Asset(name: String, start: Text.Offset, end: Text.Offset) extends IAsset
   {
     protected var _name = name
@@ -71,19 +78,16 @@
     stopped = false
 
     // FIXME lock buffer (!??)
-    val data = new SideKickParsedData(buffer.getName)
-    val root = data.root
-    data.getAsset(root).setEnd(Isabelle_Sidekick.int_to_pos(buffer.getLength))
-
+    val data = Isabelle_Sidekick.root_data(buffer)
     val syntax = Isabelle.mode_syntax(name)
     val ok =
       if (syntax.isDefined) {
         val ok = parser(buffer, syntax.get, data)
-        if (stopped) { root.add(new DefaultMutableTreeNode("<stopped>")); true }
+        if (stopped) { data.root.add(new DefaultMutableTreeNode("<stopped>")); true }
         else ok
       }
       else false
-    if (!ok) root.add(new DefaultMutableTreeNode("<ignored>"))
+    if (!ok) data.root.add(new DefaultMutableTreeNode("<ignored>"))
 
     data
   }
@@ -155,13 +159,13 @@
       }
     opt_snapshot match {
       case Some(snapshot) =>
-        val root = data.root
         for ((command, command_start) <- snapshot.node.command_iterator() if !stopped) {
           val markup =
             snapshot.state.command_markup(
               snapshot.version, command, Command.Markup_Index.markup,
                 command.range, Markup.Elements.full)
-          Isabelle_Sidekick.swing_markup_tree(markup, root, (info: Text.Info[List[XML.Elem]]) =>
+          Isabelle_Sidekick.swing_markup_tree(markup, data.root,
+            (info: Text.Info[List[XML.Elem]]) =>
               {
                 val range = info.range + command_start
                 val content = command.source(info.range).replace('\n', ' ')
@@ -212,3 +216,40 @@
   }
 }
 
+
+class Isabelle_Sidekick_Bibtex extends SideKickParser("bibtex")
+{
+  override def supportsCompletion = false
+
+  private class Asset(
+      label: String, label_html: String, start: Text.Offset, stop: Text.Offset, source: String)
+    extends Isabelle_Sidekick.Asset(label, start, stop) {
+      override def getShortString: String = label_html
+      override def getLongString: String = source
+    }
+
+  def parse(buffer: Buffer, error_source: errorlist.DefaultErrorSource): SideKickParsedData =
+  {
+    val data = Isabelle_Sidekick.root_data(buffer)
+
+    try {
+      var offset = 0
+      for (chunk <- Bibtex.parse(JEdit_Lib.buffer_text(buffer))) {
+        val kind = chunk.kind
+        val name = chunk.name
+        val source = chunk.source
+        if (kind != "") {
+          val label = kind + (if (name == "") "" else " " + name)
+          val label_html =
+            "<html><b>" + kind + "</b>" + (if (name == "") "" else " " + name) + "</html>"
+          val asset = new Asset(label, label_html, offset, offset + source.size, source)
+          data.root.add(new DefaultMutableTreeNode(asset))
+        }
+        offset += source.size
+      }
+      data
+    }
+    catch { case ERROR(_) => null }
+  }
+}
+
--- a/src/Tools/jEdit/src/services.xml	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/services.xml	Sat Oct 04 22:15:31 2014 +0200
@@ -26,6 +26,9 @@
   <SERVICE NAME="isabelle-root" CLASS="sidekick.SideKickParser">
     new isabelle.jedit.Isabelle_Sidekick_Root();
   </SERVICE>
+  <SERVICE NAME="bibtex" CLASS="sidekick.SideKickParser">
+    new isabelle.jedit.Isabelle_Sidekick_Bibtex();
+  </SERVICE>
   <SERVICE CLASS="console.Shell" NAME="Scala">
     new isabelle.jedit.Scala_Console();
   </SERVICE>
--- a/src/Tools/jEdit/src/token_markup.scala	Fri Oct 03 11:55:07 2014 +0200
+++ b/src/Tools/jEdit/src/token_markup.scala	Sat Oct 04 22:15:31 2014 +0200
@@ -15,8 +15,8 @@
 
 import org.gjt.sp.util.SyntaxUtilities
 import org.gjt.sp.jedit.{jEdit, Mode}
-import org.gjt.sp.jedit.syntax.{Token => JEditToken, TokenMarker, TokenHandler,
-  ParserRuleSet, ModeProvider, XModeHandler, SyntaxStyle}
+import org.gjt.sp.jedit.syntax.{Token => JEditToken, TokenMarker, TokenHandler, ParserRuleSet,
+  ModeProvider, XModeHandler, SyntaxStyle}
 import org.gjt.sp.jedit.textarea.{TextArea, Selection}
 import org.gjt.sp.jedit.buffer.JEditBuffer
 
@@ -173,12 +173,10 @@
   }
 
 
-  /* token marker */
+  /* line context */
 
-  private val isabelle_rules = new ParserRuleSet("isabelle", "MAIN")
-
-  private class Line_Context(val context: Option[Scan.Line_Context])
-    extends TokenMarker.LineContext(isabelle_rules, null)
+  class Generic_Line_Context[C](rules: ParserRuleSet, val context: Option[C])
+    extends TokenMarker.LineContext(rules, null)
   {
     override def hashCode: Int = context.hashCode
     override def equals(that: Any): Boolean =
@@ -188,6 +186,14 @@
       }
   }
 
+  private val context_rules = new ParserRuleSet("isabelle", "MAIN")
+
+  private class Line_Context(context: Option[Scan.Line_Context])
+    extends Generic_Line_Context[Scan.Line_Context](context_rules, context)
+
+
+  /* token marker */
+
   class Marker(mode: String) extends TokenMarker
   {
     override def markTokens(context: TokenMarker.LineContext,
@@ -202,11 +208,19 @@
 
       val context1 =
       {
+        def no_context =
+        {
+          val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString)
+          (List(styled_token), new Line_Context(None))
+        }
         val (styled_tokens, context1) =
           if (mode == "isabelle-ml" || mode == "sml") {
-            val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, line_ctxt.get)
-            val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source))
-            (styled_tokens, new Line_Context(Some(ctxt1)))
+            if (line_ctxt.isDefined) {
+              val (tokens, ctxt1) = ML_Lex.tokenize_line(mode == "sml", line, line_ctxt.get)
+              val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source))
+              (styled_tokens, new Line_Context(Some(ctxt1)))
+            }
+            else no_context
           }
           else {
             Isabelle.mode_syntax(mode) match {
@@ -215,9 +229,7 @@
                 val styled_tokens =
                   tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))
                 (styled_tokens, new Line_Context(Some(ctxt1)))
-              case _ =>
-                val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString)
-                (List(styled_token), new Line_Context(None))
+              case _ => no_context
             }
           }