# HG changeset patch # User wenzelm # Date 1308314124 -7200 # Node ID eeba70379f1a35d5a4ff0945c76f2d5791d428f9 # Parent 717880e98e6b550129548fb0c896afde62aa0e50# Parent a26e514c92b29c1c9e9a77c2051f1aa09f3446ba merged diff -r 717880e98e6b -r eeba70379f1a src/Pure/General/scan.scala --- a/src/Pure/General/scan.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Pure/General/scan.scala Fri Jun 17 14:35:24 2011 +0200 @@ -18,6 +18,16 @@ object Scan { + /** context of partial scans **/ + + sealed abstract class Context + case object Finished extends Context + case class Quoted(quote: String) extends Context + case object Verbatim extends Context + case class Comment(depth: Int) extends Context + + + /** Lexicon -- position tree **/ object Lexicon @@ -116,6 +126,12 @@ override val whiteSpace = "".r + /* optional termination */ + + def opt_term[T](p: => Parser[T]): Parser[Option[T]] = + p ^^ (x => Some(x)) | """\z""".r ^^ (_ => None) + + /* keywords from lexicon */ def keyword: Parser[String] = new Parser[String] @@ -178,12 +194,15 @@ /* quoted strings */ + private def quoted_body(quote: String): Parser[String] = + { + rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" | + (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString) + } + private def quoted(quote: String): Parser[String] = { - quote ~ - rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" | - (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ~ - quote ^^ { case x ~ ys ~ z => x + ys.mkString + z } + quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z } }.named("quoted") def quoted_content(quote: String, source: String): String = @@ -199,13 +218,30 @@ else body } + def quoted_context(quote: String, ctxt: Context): Parser[(String, Context)] = + { + ctxt match { + case Finished => + quote ~ quoted_body(quote) ~ opt_term(quote) ^^ + { case x ~ y ~ Some(z) => (x + y + z, Finished) + case x ~ y ~ None => (x + y, Quoted(quote)) } + case Quoted(q) if q == quote => + quoted_body(quote) ~ opt_term(quote) ^^ + { case x ~ Some(y) => (x + y, Finished) + case x ~ None => (x, ctxt) } + case _ => failure("") + } + }.named("quoted_context") + /* verbatim text */ + private def verbatim_body: Parser[String] = + rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString) + private def verbatim: Parser[String] = { - "{*" ~ rep(many1(sym => sym != "*") | """\*(?!\})""".r) ~ "*}" ^^ - { case x ~ ys ~ z => x + ys.mkString + z } + "{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z } }.named("verbatim") def verbatim_content(source: String): String = @@ -214,11 +250,28 @@ source.substring(2, source.length - 2) } + def verbatim_context(ctxt: Context): Parser[(String, Context)] = + { + ctxt match { + case Finished => + "{*" ~ verbatim_body ~ opt_term("*}") ^^ + { case x ~ y ~ Some(z) => (x + y + z, Finished) + case x ~ y ~ None => (x + y, Verbatim) } + case Verbatim => + verbatim_body ~ opt_term("*}") ^^ + { case x ~ Some(y) => (x + y, Finished) + case x ~ None => (x, Verbatim) } + case _ => failure("") + } + }.named("verbatim_context") + /* nested comments */ - def comment: Parser[String] = new Parser[String] + private def comment_depth(depth: Int): Parser[(String, Int)] = new Parser[(String, Int)] { + require(depth >= 0) + val comment_text = rep1(many1(sym => sym != "*" && sym != "(") | """\*(?!\))|\((?!\*)""".r) @@ -232,18 +285,36 @@ case _ => false } } - var depth = 0 + var d = depth var finished = false while (!finished) { - if (try_parse("(*")) depth += 1 - else if (depth > 0 && try_parse("*)")) depth -= 1 - else if (depth == 0 || !try_parse(comment_text)) finished = true + if (try_parse("(*")) d += 1 + else if (d > 0 && try_parse("*)")) d -= 1 + else if (d == 0 || !try_parse(comment_text)) finished = true } - if (in.offset < rest.offset && depth == 0) - Success(in.source.subSequence(in.offset, rest.offset).toString, rest) + if (in.offset < rest.offset) + Success((in.source.subSequence(in.offset, rest.offset).toString, d), rest) else Failure("comment expected", in) } - }.named("comment") + }.named("comment_depth") + + def comment: Parser[String] = + comment_depth(0) ^? { case (x, d) if d == 0 => x } + + def comment_context(ctxt: Context): Parser[(String, Context)] = + { + val depth = + ctxt match { + case Finished => 0 + case Comment(d) => d + case _ => -1 + } + if (depth >= 0) + comment_depth(depth) ^^ + { case (x, 0) => (x, Finished) + case (x, d) => (x, Comment(d)) } + else failure("") + } def comment_content(source: String): String = { @@ -254,7 +325,18 @@ /* outer syntax tokens */ - def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] = + private def delimited_token: Parser[Token] = + { + val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x)) + val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x)) + val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) + val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x)) + + string | (alt_string | (verb | cmt)) + } + + private def other_token(symbols: Symbol.Interpretation, is_command: String => Boolean) + : Parser[Token] = { val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y } val nat = many1(symbols.is_digit) @@ -278,23 +360,37 @@ val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x)) - val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x)) - val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x)) + // FIXME check + val junk = many(sym => !(symbols.is_blank(sym))) + val junk1 = many1(sym => !(symbols.is_blank(sym))) - val junk = many1(sym => !(symbols.is_blank(sym))) val bad_delimiter = ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.UNPARSED, x + y) } - val bad = junk ^^ (x => Token(Token.Kind.UNPARSED, x)) + val bad = junk1 ^^ (x => Token(Token.Kind.UNPARSED, x)) + + val command_keyword = + keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x)) + space | (bad_delimiter | + (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) ||| + command_keyword) | bad)) + } - /* tokens */ + def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] = + delimited_token | other_token(symbols, is_command) - (space | (string | (alt_string | (verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) | - comment ^^ (x => Token(Token.Kind.COMMENT, x)))))) | - bad_delimiter | - ((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) ||| - keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))) | - bad + def token_context(symbols: Symbol.Interpretation, is_command: String => Boolean, ctxt: Context) + : Parser[(Token, Context)] = + { + val string = + quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) } + val alt_string = + quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) } + val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) } + val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) } + val other = other_token(symbols, is_command) ^^ { case x => (x, Finished) } + + string | (alt_string | (verb | (cmt | other))) } } diff -r 717880e98e6b -r eeba70379f1a src/Pure/General/symbol.ML --- a/src/Pure/General/symbol.ML Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Pure/General/symbol.ML Fri Jun 17 14:35:24 2011 +0200 @@ -134,7 +134,7 @@ fun malformed_msg s = "Malformed symbolic character: " ^ quote s; -(* ascii symbols *) +(* ASCII symbols *) fun is_ascii s = is_char s andalso ord s < 128; diff -r 717880e98e6b -r eeba70379f1a src/Pure/General/symbol.scala --- a/src/Pure/General/symbol.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Pure/General/symbol.scala Fri Jun 17 14:35:24 2011 +0200 @@ -28,6 +28,19 @@ } + /* ASCII characters */ + + def is_ascii_letter(c: Char): Boolean = 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' + def is_ascii_digit(c: Char): Boolean = '0' <= c && c <= '9' + def is_ascii_quasi(c: Char): Boolean = c == '_' || c == '\'' + + def is_ascii_letdig(c: Char): Boolean = + is_ascii_letter(c) || is_ascii_digit(c) || is_ascii_quasi(c) + + def is_ascii_identifier(s: String): Boolean = + s.length > 0 && is_ascii_letter(s(0)) && s.substring(1).forall(is_ascii_letdig) + + /* Symbol regexps */ private val plain = new Regex("""(?xs) diff -r 717880e98e6b -r eeba70379f1a src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Pure/Isar/outer_syntax.scala Fri Jun 17 14:35:24 2011 +0200 @@ -8,6 +8,7 @@ import scala.util.parsing.input.{Reader, CharSequenceReader} +import scala.collection.mutable class Outer_Syntax(symbols: Symbol.Interpretation) @@ -73,4 +74,21 @@ def scan(input: CharSequence): List[Token] = scan(new CharSequenceReader(input)) + + def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) = + { + import lexicon._ + + var in: Reader[Char] = new CharSequenceReader(input) + val toks = new mutable.ListBuffer[Token] + var ctxt = context + while (!in.atEnd) { + parse(token_context(symbols, is_command, ctxt), in) match { + case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } + case NoSuccess(_, rest) => + error("Unexpected failure of tokenizing input:\n" + rest.source.toString) + } + } + (toks.toList, ctxt) + } } diff -r 717880e98e6b -r eeba70379f1a src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Pure/Isar/token.scala Fri Jun 17 14:35:24 2011 +0200 @@ -64,6 +64,7 @@ sealed case class Token(val kind: Token.Kind.Value, val source: String) { def is_command: Boolean = kind == Token.Kind.COMMAND + def is_keyword: Boolean = kind == Token.Kind.KEYWORD def is_delimited: Boolean = kind == Token.Kind.STRING || kind == Token.Kind.ALT_STRING || diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/lib/Tools/jedit --- a/src/Tools/jEdit/lib/Tools/jedit Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Tools/jEdit/lib/Tools/jedit Fri Jun 17 14:35:24 2011 +0200 @@ -24,6 +24,7 @@ "src/scala_console.scala" "src/session_dockable.scala" "src/text_area_painter.scala" + "src/token_markup.scala" ) declare -a RESOURCES=( @@ -142,6 +143,11 @@ ## dependencies +[ -e "$ISABELLE_HOME/Admin/build" ] && \ + { "$ISABELLE_HOME/Admin/build" "$BUILD_JARS" || exit $?; } + +PURE_JAR="$ISABELLE_HOME/lib/classes/Pure.jar" + pushd "$JEDIT_HOME" >/dev/null || failed JEDIT_JAR="$ISABELLE_JEDIT_BUILD_HOME/contrib/$ISABELLE_JEDIT_BUILD_VERSION/jedit.jar" @@ -176,9 +182,9 @@ OUTDATED=true else if [ -n "$ISABELLE_JEDIT_BUILD_HOME" ]; then - declare -a DEPS=("${SOURCES[@]}" "${RESOURCES[@]}" "$JEDIT_JAR" "${JEDIT_JARS[@]}") + declare -a DEPS=("${SOURCES[@]}" "${RESOURCES[@]}" "$PURE_JAR" "$JEDIT_JAR" "${JEDIT_JARS[@]}") else - declare -a DEPS=("${SOURCES[@]}" "${RESOURCES[@]}") + declare -a DEPS=("${SOURCES[@]}" "${RESOURCES[@]}" "$PURE_JAR") fi for DEP in "${DEPS[@]}" do @@ -196,11 +202,6 @@ if [ "$OUTDATED" = true ] then - [ -z "$SCALA_HOME" ] && fail "Unknown SCALA_HOME -- Scala unavailable" - - [ -e "$ISABELLE_HOME/Admin/build" ] && \ - { "$ISABELLE_HOME/Admin/build" "$BUILD_JARS" || exit $?; } - echo "###" echo "### Building Isabelle/jEdit ..." echo "###" @@ -233,9 +234,7 @@ print qq,\n\n,; } print; }' dist/modes/catalog - cp -a "${JEDIT_JARS[@]}" "${SCALA_JARS[@]}" "$ISABELLE_HOME/lib/classes/Pure.jar" \ - dist/jars/. || failed - + cp -a "${JEDIT_JARS[@]}" "${SCALA_JARS[@]}" "$PURE_JAR" dist/jars/. || failed ( for JAR in "$JEDIT_JAR" "${JEDIT_JARS[@]}" "$SCALA_HOME/lib/scala-compiler.jar" do diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Tools/jEdit/src/document_model.scala Fri Jun 17 14:35:24 2011 +0200 @@ -14,44 +14,13 @@ import org.gjt.sp.jedit.Buffer import org.gjt.sp.jedit.buffer.{BufferAdapter, BufferListener, JEditBuffer} -import org.gjt.sp.jedit.syntax.{SyntaxStyle, Token, TokenMarker, TokenHandler, ParserRuleSet} import org.gjt.sp.jedit.textarea.TextArea import java.awt.font.TextAttribute -import javax.swing.text.Segment; object Document_Model { - object Token_Markup - { - /* extended token styles */ - - private val plain_range: Int = Token.ID_COUNT - private def check_range(i: Int) { require(0 <= i && i < plain_range) } - - def subscript(i: Byte): Byte = { check_range(i); (i + plain_range).toByte } - def superscript(i: Byte): Byte = { check_range(i); (i + 2 * plain_range).toByte } - val hidden: Byte = (3 * plain_range).toByte - - - /* line context */ - - private val dummy_rules = new ParserRuleSet("isabelle", "MAIN") - - class Line_Context(val line: Int, prev: Line_Context) - extends TokenMarker.LineContext(dummy_rules, prev) - { - override def hashCode: Int = line - override def equals(that: Any): Boolean = - that match { - case other: Line_Context => line == other.line - case _ => false - } - } - } - - /* document model of buffer */ private val key = "isabelle.document_model" @@ -160,46 +129,9 @@ } - /* semantic token marker */ - - private val token_marker = new TokenMarker - { - override def markTokens(prev: TokenMarker.LineContext, - handler: TokenHandler, line_segment: Segment): TokenMarker.LineContext = - { - Isabelle.swing_buffer_lock(buffer) { - val snapshot = Document_Model.this.snapshot() - - val previous = prev.asInstanceOf[Document_Model.Token_Markup.Line_Context] - val line = if (prev == null) 0 else previous.line + 1 - val context = new Document_Model.Token_Markup.Line_Context(line, previous) - - val start = buffer.getLineStartOffset(line) - val stop = start + line_segment.count - - def handle_token(style: Byte, offset: Text.Offset, length: Int) = - handler.handleToken(line_segment, style, offset, length, context) + /* token marker */ - val syntax = session.current_syntax() - val tokens = snapshot.select_markup(Text.Range(start, stop))(Isabelle_Markup.tokens(syntax)) - - var last = start - for (token <- tokens.iterator) { - val Text.Range(token_start, token_stop) = token.range - if (last < token_start) - handle_token(Token.COMMENT1, last - start, token_start - last) - handle_token(token.info getOrElse Token.NULL, - token_start - start, token_stop - token_start) - last = token_stop - } - if (last < stop) handle_token(Token.COMMENT1, last - start, stop - last) - - handle_token(Token.END, line_segment.count, 0) - handler.setLineContext(context) - context - } - } - } + private val token_marker = Token_Markup.token_marker(session, buffer) /* activation */ diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/src/document_view.scala --- a/src/Tools/jEdit/src/document_view.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Tools/jEdit/src/document_view.scala Fri Jun 17 14:35:24 2011 +0200 @@ -69,22 +69,22 @@ private val session = model.session - /** robust extension body **/ + /* robust extension body */ def robust_body[A](default: A)(body: => A): A = - Swing_Thread.now { - try { - Isabelle.buffer_lock(model.buffer) { - if (model.buffer == text_area.getBuffer) body - else default - } + { + try { + Swing_Thread.require() + if (model.buffer == text_area.getBuffer) body + else { + // FIXME Log.log(Log.ERROR, this, new RuntimeException("Inconsistent document model")) + default } - catch { case t: Throwable => Log.log(Log.ERROR, this, t); default } } + catch { case t: Throwable => Log.log(Log.ERROR, this, t); default } + } - /** token handling **/ - /* visible line ranges */ // simplify slightly odd result of TextArea.getScreenLineEndOffset etc. @@ -110,6 +110,31 @@ } + /* snapshot */ + + // owned by Swing thread + @volatile private var was_outdated = false + @volatile private var was_updated = false + + def update_snapshot(): Document.Snapshot = + { + Swing_Thread.require() + val snapshot = model.snapshot() + was_updated = was_outdated && !snapshot.is_outdated + was_outdated = was_outdated || snapshot.is_outdated + snapshot + } + + def flush_snapshot(): (Boolean, Document.Snapshot) = + { + Swing_Thread.require() + val snapshot = update_snapshot() + val updated = was_updated + if (updated) { was_outdated = false; was_updated = false } + (updated, snapshot) + } + + /* HTML popups */ private var html_popup: Option[Popup] = None @@ -194,7 +219,7 @@ if (!model.buffer.isLoaded) exit_control() else Isabelle.swing_buffer_lock(model.buffer) { - val snapshot = model.snapshot + val snapshot = update_snapshot() if (control) init_popup(snapshot, x, y) @@ -215,7 +240,7 @@ override def getToolTipText(x: Int, y: Int): String = { robust_body(null: String) { - val snapshot = model.snapshot() + val snapshot = update_snapshot() val offset = text_area.xyToOffset(x, y) if (control) { snapshot.select_markup(Text.Range(offset, offset + 1))(Isabelle_Markup.tooltip) match @@ -247,14 +272,14 @@ val width = GutterOptionPane.getSelectionAreaWidth val border_width = jEdit.getIntegerProperty("view.gutter.borderWidth", 3) val FOLD_MARKER_SIZE = 12 - + if (gutter.isSelectionAreaEnabled && !gutter.isExpanded && width >= 12 && line_height >= 12) { Isabelle.swing_buffer_lock(model.buffer) { - val snapshot = model.snapshot() + val snapshot = update_snapshot() for (i <- 0 until physical_lines.length) { if (physical_lines(i) != -1) { val line_range = proper_line_range(start(i), end(i)) - + // gutter icons val icons = (for (Text.Info(_, Some(icon)) <- // FIXME snapshot.cumulate @@ -282,7 +307,7 @@ def selected_command(): Option[Command] = { Swing_Thread.require() - model.snapshot().node.proper_command_at(text_area.getCaretPosition) + update_snapshot().node.proper_command_at(text_area.getCaretPosition) } private val caret_listener = new CaretListener { @@ -329,7 +354,7 @@ val buffer = model.buffer Isabelle.buffer_lock(buffer) { - val snapshot = model.snapshot() + val snapshot = update_snapshot() def line_range(command: Command, start: Text.Offset): Option[(Int, Int)] = { @@ -370,26 +395,26 @@ case Session.Commands_Changed(changed) => val buffer = model.buffer Isabelle.swing_buffer_lock(buffer) { - val snapshot = model.snapshot() + val (updated, snapshot) = flush_snapshot() + val visible_range = screen_lines_range() - if (changed.exists(snapshot.node.commands.contains)) + if (updated || changed.exists(snapshot.node.commands.contains)) overview.repaint() - val visible_range = screen_lines_range() - val visible_cmds = snapshot.node.command_range(snapshot.revert(visible_range)).map(_._1) - if (visible_cmds.exists(changed)) { - for { - line <- 0 until text_area.getVisibleLines - val start = text_area.getScreenLineStartOffset(line) if start >= 0 - val end = text_area.getScreenLineEndOffset(line) if end >= 0 - val range = proper_line_range(start, end) - val line_cmds = snapshot.node.command_range(snapshot.revert(range)).map(_._1) - if line_cmds.exists(changed) - } text_area.invalidateScreenLineRange(line, line) - - // FIXME danger of deadlock!? - // FIXME potentially slow!? - model.buffer.propertiesChanged() + if (updated) invalidate_line_range(visible_range) + else { + val visible_cmds = + snapshot.node.command_range(snapshot.revert(visible_range)).map(_._1) + if (visible_cmds.exists(changed)) { + for { + line <- 0 until text_area.getVisibleLines + val start = text_area.getScreenLineStartOffset(line) if start >= 0 + val end = text_area.getScreenLineEndOffset(line) if end >= 0 + val range = proper_line_range(start, end) + val line_cmds = snapshot.node.command_range(snapshot.revert(range)).map(_._1) + if line_cmds.exists(changed) + } text_area.invalidateScreenLineRange(line, line) + } } } diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/src/isabelle_markup.scala --- a/src/Tools/jEdit/src/isabelle_markup.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Tools/jEdit/src/isabelle_markup.scala Fri Jun 17 14:35:24 2011 +0200 @@ -12,7 +12,7 @@ import java.awt.Color import org.lobobrowser.util.gui.ColorFactory -import org.gjt.sp.jedit.syntax.Token +import org.gjt.sp.jedit.syntax.{Token => JEditToken} object Isabelle_Markup @@ -108,6 +108,14 @@ case Text.Info(_, XML.Elem(Markup(Markup.TOKEN_RANGE, _), _)) => light_color } + /* FIXME update + Markup.ML_SOURCE -> COMMENT3, + Markup.DOC_SOURCE -> COMMENT3, + Markup.ANTIQ -> COMMENT4, + Markup.ML_ANTIQ -> COMMENT4, + Markup.DOC_ANTIQ -> COMMENT4, + */ + private val foreground_colors: Map[String, Color] = Map( Markup.TCLASS -> get_color("red"), @@ -172,7 +180,7 @@ private val command_style: Map[String, Byte] = { - import Token._ + import JEditToken._ Map[String, Byte]( Keyword.THY_END -> KEYWORD2, Keyword.THY_SCRIPT -> LABEL, @@ -182,39 +190,32 @@ ).withDefaultValue(KEYWORD1) } - private val token_style: Map[String, Byte] = + private val token_style: Map[Token.Kind.Value, Byte] = { - import Token._ - Map[String, Byte]( - // embedded source text - Markup.ML_SOURCE -> COMMENT3, - Markup.DOC_SOURCE -> COMMENT3, - Markup.ANTIQ -> COMMENT4, - Markup.ML_ANTIQ -> COMMENT4, - Markup.DOC_ANTIQ -> COMMENT4, - // outer syntax - Markup.KEYWORD -> KEYWORD2, - Markup.OPERATOR -> OPERATOR, - Markup.COMMAND -> KEYWORD1, - Markup.IDENT -> NULL, - Markup.VERBATIM -> COMMENT3, - Markup.COMMENT -> COMMENT1, - Markup.CONTROL -> COMMENT3, - Markup.MALFORMED -> INVALID, - Markup.STRING -> LITERAL3, - Markup.ALTSTRING -> LITERAL1 + import JEditToken._ + Map[Token.Kind.Value, Byte]( + Token.Kind.KEYWORD -> KEYWORD2, + Token.Kind.IDENT -> NULL, + Token.Kind.LONG_IDENT -> NULL, + Token.Kind.SYM_IDENT -> NULL, + Token.Kind.VAR -> NULL, + Token.Kind.TYPE_IDENT -> NULL, + Token.Kind.TYPE_VAR -> NULL, + Token.Kind.NAT -> NULL, + Token.Kind.FLOAT -> NULL, + Token.Kind.STRING -> LITERAL3, + Token.Kind.ALT_STRING -> LITERAL1, + Token.Kind.VERBATIM -> COMMENT3, + Token.Kind.SPACE -> NULL, + Token.Kind.COMMENT -> COMMENT1, + Token.Kind.UNPARSED -> INVALID ).withDefaultValue(NULL) } - def tokens(syntax: Outer_Syntax): Markup_Tree.Select[Byte] = - { - case Text.Info(_, XML.Elem(Markup(Markup.COMMAND, List((Markup.NAME, name))), _)) - if syntax.keyword_kind(name).isDefined => command_style(syntax.keyword_kind(name).get) - - case Text.Info(_, XML.Elem(Markup(Markup.ENTITY, Markup.Kind(kind)), _)) - if token_style(kind) != Token.NULL => token_style(kind) - - case Text.Info(_, XML.Elem(Markup(name, _), _)) - if token_style(name) != Token.NULL => token_style(name) - } + def token_markup(syntax: Outer_Syntax, token: Token): Byte = + if (token.is_command) + command_style(syntax.keyword_kind(token.content).getOrElse("")) + else if (token.is_keyword && !Symbol.is_ascii_identifier(token.content)) + JEditToken.OPERATOR + else token_style(token.kind) } diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/src/output_dockable.scala --- a/src/Tools/jEdit/src/output_dockable.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Tools/jEdit/src/output_dockable.scala Fri Jun 17 14:35:24 2011 +0200 @@ -34,7 +34,7 @@ Document_View(view.getTextArea) match { case Some(doc_view) => val cmd = current_command.get - val start_ofs = doc_view.model.snapshot().node.command_start(cmd).get + val start_ofs = doc_view.update_snapshot().node.command_start(cmd).get val buffer = view.getBuffer buffer.beginCompoundEdit() buffer.remove(start_ofs, cmd.length) @@ -83,7 +83,7 @@ case Some(doc_view) => current_command match { case Some(cmd) if !restriction.isDefined || restriction.get.contains(cmd) => - val snapshot = doc_view.model.snapshot() + val snapshot = doc_view.update_snapshot() val filtered_results = snapshot.state(cmd).results.iterator.map(_._2) filter { case XML.Elem(Markup(Markup.TRACING, _), _) => show_tracing // FIXME not scalable diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/src/text_area_painter.scala --- a/src/Tools/jEdit/src/text_area_painter.scala Thu Jun 16 13:50:35 2011 +0200 +++ b/src/Tools/jEdit/src/text_area_painter.scala Fri Jun 17 14:35:24 2011 +0200 @@ -53,7 +53,7 @@ start: Array[Int], end: Array[Int], y: Int, line_height: Int) { doc_view.robust_body(()) { - painter_snapshot = model.snapshot() + painter_snapshot = doc_view.update_snapshot() painter_clip = gfx.getClip } } @@ -231,7 +231,11 @@ else { var x1 = x + w for (Text.Info(range, info) <- markup) { - val str = chunk.str.substring(range.start - chunk_offset, range.stop - chunk_offset) + // FIXME proper range!? + val str = + chunk.str.substring( + (range.start - chunk_offset) max 0, + (range.stop - chunk_offset) min chunk_length) gfx.setColor(info.getOrElse(chunk_color)) if (range.contains(caret_offset)) { val astr = new AttributedString(str) diff -r 717880e98e6b -r eeba70379f1a src/Tools/jEdit/src/token_markup.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Tools/jEdit/src/token_markup.scala Fri Jun 17 14:35:24 2011 +0200 @@ -0,0 +1,72 @@ +/* Title: Tools/jEdit/src/token_markup.scala + Author: Makarius + +Outer syntax token markup. +*/ + +package isabelle.jedit + + +import isabelle._ + +import org.gjt.sp.jedit.Buffer +import org.gjt.sp.jedit.syntax.{Token => JToken, TokenMarker, TokenHandler, ParserRuleSet} +import javax.swing.text.Segment + + +object Token_Markup +{ + /* extended jEdit syntax styles */ + + private val plain_range: Int = JToken.ID_COUNT + private def check_range(i: Int) { require(0 <= i && i < plain_range) } + + def subscript(i: Byte): Byte = { check_range(i); (i + plain_range).toByte } + def superscript(i: Byte): Byte = { check_range(i); (i + 2 * plain_range).toByte } + val hidden: Byte = (3 * plain_range).toByte + + + /* token marker */ + + private val isabelle_rules = new ParserRuleSet("isabelle", "MAIN") + + private class Line_Context(val context: Scan.Context, prev: Line_Context) + extends TokenMarker.LineContext(isabelle_rules, prev) + { + override def hashCode: Int = context.hashCode + override def equals(that: Any): Boolean = + that match { + case other: Line_Context => context == other.context + case _ => false + } + } + + def token_marker(session: Session, buffer: Buffer): TokenMarker = + new TokenMarker { + override def markTokens(raw_context: TokenMarker.LineContext, + handler: TokenHandler, line: Segment): TokenMarker.LineContext = + { + val syntax = session.current_syntax() + + val context = raw_context.asInstanceOf[Line_Context] + val ctxt = if (context == null) Scan.Finished else context.context + + val (tokens, ctxt1) = syntax.scan_context(line, ctxt) + val context1 = new Line_Context(ctxt1, context) + + var offset = 0 + for (token <- tokens) { + val style = Isabelle_Markup.token_markup(syntax, token) + val length = token.source.length + handler.handleToken(line, style, offset, length, context1) + offset += length + } + handler.handleToken(line, JToken.END, line.count, 0, context1) + + val context2 = context1.intern + handler.setLineContext(context2) + context2 + } + } +} +